This module hosts functions to convert DataJoint table definitions into mysql table definitions, and to
declare the corresponding mysql tables.
is_foreign_key(line)
Parameters:
Name |
Type |
Description |
Default |
line |
|
a line from the table definition
|
required
|
Returns:
Type |
Description |
|
true if the line appears to be a foreign key definition
|
Source code in datajoint/declare.py
142
143
144
145
146
147
148
149 | def is_foreign_key(line):
"""
:param line: a line from the table definition
:return: true if the line appears to be a foreign key definition
"""
arrow_position = line.find("->")
return arrow_position >= 0 and not any(c in line[:arrow_position] for c in "\"#'")
|
compile_foreign_key(line, context, attributes, primary_key, attr_sql, foreign_key_sql, index_sql)
Parameters:
Name |
Type |
Description |
Default |
line |
|
a line from a table definition
|
required
|
context |
|
namespace containing referenced objects
|
required
|
attributes |
|
list of attribute names already in the declaration -- to be updated by this function
|
required
|
primary_key |
|
None if the current foreign key is made from the dependent section. Otherwise it is the list of primary key attributes thus far -- to be updated by the function
|
required
|
attr_sql |
|
list of sql statements defining attributes -- to be updated by this function.
|
required
|
foreign_key_sql |
|
list of sql statements specifying foreign key constraints -- to be updated by this function.
|
required
|
index_sql |
|
list of INDEX declaration statements, duplicate or redundant indexes are ok.
|
required
|
Source code in datajoint/declare.py
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232 | def compile_foreign_key(
line, context, attributes, primary_key, attr_sql, foreign_key_sql, index_sql
):
"""
:param line: a line from a table definition
:param context: namespace containing referenced objects
:param attributes: list of attribute names already in the declaration -- to be updated by this function
:param primary_key: None if the current foreign key is made from the dependent section. Otherwise it is the list
of primary key attributes thus far -- to be updated by the function
:param attr_sql: list of sql statements defining attributes -- to be updated by this function.
:param foreign_key_sql: list of sql statements specifying foreign key constraints -- to be updated by this function.
:param index_sql: list of INDEX declaration statements, duplicate or redundant indexes are ok.
"""
# Parse and validate
from .table import Table
from .expression import QueryExpression
try:
result = foreign_key_parser.parseString(line)
except pp.ParseException as err:
raise DataJointError('Parsing error in line "%s". %s.' % (line, err))
try:
ref = eval(result.ref_table, context)
except Exception:
raise DataJointError(
"Foreign key reference %s could not be resolved" % result.ref_table
)
options = [opt.upper() for opt in result.options]
for opt in options: # check for invalid options
if opt not in {"NULLABLE", "UNIQUE"}:
raise DataJointError('Invalid foreign key option "{opt}"'.format(opt=opt))
is_nullable = "NULLABLE" in options
is_unique = "UNIQUE" in options
if is_nullable and primary_key is not None:
raise DataJointError(
'Primary dependencies cannot be nullable in line "{line}"'.format(line=line)
)
if isinstance(ref, type) and issubclass(ref, Table):
ref = ref()
# check that dependency is of a supported type
if (
not isinstance(ref, QueryExpression)
or len(ref.restriction)
or len(ref.support) != 1
or not isinstance(ref.support[0], str)
):
raise DataJointError(
'Dependency "%s" is not supported (yet). Use a base table or its projection.'
% result.ref_table
)
# declare new foreign key attributes
for attr in ref.primary_key:
if attr not in attributes:
attributes.append(attr)
if primary_key is not None:
primary_key.append(attr)
attr_sql.append(
ref.heading[attr].sql.replace("NOT NULL ", "", int(is_nullable))
)
# declare the foreign key
foreign_key_sql.append(
"FOREIGN KEY (`{fk}`) REFERENCES {ref} (`{pk}`) ON UPDATE CASCADE ON DELETE RESTRICT".format(
fk="`,`".join(ref.primary_key),
pk="`,`".join(ref.heading[name].original_name for name in ref.primary_key),
ref=ref.support[0],
)
)
# declare unique index
if is_unique:
index_sql.append(
"UNIQUE INDEX ({attrs})".format(
attrs=",".join("`%s`" % attr for attr in ref.primary_key)
)
)
|
declare(full_table_name, definition, context)
Parse declaration and generate the SQL CREATE TABLE code
Parameters:
Name |
Type |
Description |
Default |
full_table_name |
|
|
required
|
definition |
|
DataJoint table definition
|
required
|
context |
|
dictionary of objects that might be referred to in the table
|
required
|
Returns:
Type |
Description |
|
SQL CREATE TABLE statement, list of external stores used
|
Source code in datajoint/declare.py
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340 | def declare(full_table_name, definition, context):
"""
Parse declaration and generate the SQL CREATE TABLE code
:param full_table_name: full name of the table
:param definition: DataJoint table definition
:param context: dictionary of objects that might be referred to in the table
:return: SQL CREATE TABLE statement, list of external stores used
"""
table_name = full_table_name.strip("`").split(".")[1]
if len(table_name) > MAX_TABLE_NAME_LENGTH:
raise DataJointError(
"Table name `{name}` exceeds the max length of {max_length}".format(
name=table_name, max_length=MAX_TABLE_NAME_LENGTH
)
)
(
table_comment,
primary_key,
attribute_sql,
foreign_key_sql,
index_sql,
external_stores,
) = prepare_declare(definition, context)
if config.get("add_hidden_timestamp", False):
metadata_attr_sql = [
"`_{full_table_name}_timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP"
]
attribute_sql.extend(
attr.format(
full_table_name=sha1(
full_table_name.replace("`", "").encode("utf-8")
).hexdigest()
)
for attr in metadata_attr_sql
)
if not primary_key:
raise DataJointError("Table must have a primary key")
return (
"CREATE TABLE IF NOT EXISTS %s (\n" % full_table_name
+ ",\n".join(
attribute_sql
+ ["PRIMARY KEY (`" + "`,`".join(primary_key) + "`)"]
+ foreign_key_sql
+ index_sql
)
+ '\n) ENGINE=InnoDB, COMMENT "%s"' % table_comment
), external_stores
|
alter(definition, old_definition, context)
Parameters:
Name |
Type |
Description |
Default |
definition |
|
|
required
|
old_definition |
|
|
required
|
context |
|
the context in which to evaluate foreign key definitions
|
required
|
Returns:
Type |
Description |
|
string SQL ALTER command, list of new stores used for external storage
|
Source code in datajoint/declare.py
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448 | def alter(definition, old_definition, context):
"""
:param definition: new table definition
:param old_definition: current table definition
:param context: the context in which to evaluate foreign key definitions
:return: string SQL ALTER command, list of new stores used for external storage
"""
(
table_comment,
primary_key,
attribute_sql,
foreign_key_sql,
index_sql,
external_stores,
) = prepare_declare(definition, context)
(
table_comment_,
primary_key_,
attribute_sql_,
foreign_key_sql_,
index_sql_,
external_stores_,
) = prepare_declare(old_definition, context)
# analyze differences between declarations
sql = list()
if primary_key != primary_key_:
raise NotImplementedError("table.alter cannot alter the primary key (yet).")
if foreign_key_sql != foreign_key_sql_:
raise NotImplementedError("table.alter cannot alter foreign keys (yet).")
if index_sql != index_sql_:
raise NotImplementedError("table.alter cannot alter indexes (yet)")
if attribute_sql != attribute_sql_:
sql.extend(_make_attribute_alter(attribute_sql, attribute_sql_, primary_key))
if table_comment != table_comment_:
sql.append('COMMENT="%s"' % table_comment)
return sql, [e for e in external_stores if e not in external_stores_]
|
substitute_special_type(match, category, foreign_key_sql, context)
Parameters:
Name |
Type |
Description |
Default |
match |
|
dict containing with keys "type" and "comment" -- will be modified in place
|
required
|
category |
|
attribute type category from TYPE_PATTERN
|
required
|
foreign_key_sql |
|
list of foreign key declarations to add to
|
required
|
context |
|
context for looking up user-defined attribute_type adapters
|
required
|
Source code in datajoint/declare.py
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512 | def substitute_special_type(match, category, foreign_key_sql, context):
"""
:param match: dict containing with keys "type" and "comment" -- will be modified in place
:param category: attribute type category from TYPE_PATTERN
:param foreign_key_sql: list of foreign key declarations to add to
:param context: context for looking up user-defined attribute_type adapters
"""
if category == "UUID":
match["type"] = UUID_DATA_TYPE
elif category == "INTERNAL_ATTACH":
match["type"] = "LONGBLOB"
elif category in EXTERNAL_TYPES:
if category == "FILEPATH" and not _support_filepath_types():
raise DataJointError(
"""
The filepath data type is disabled until complete validation.
To turn it on as experimental feature, set the environment variable
{env} = TRUE or upgrade datajoint.
""".format(
env=FILEPATH_FEATURE_SWITCH
)
)
match["store"] = match["type"].split("@", 1)[1]
match["type"] = UUID_DATA_TYPE
foreign_key_sql.append(
"FOREIGN KEY (`{name}`) REFERENCES `{{database}}`.`{external_table_root}_{store}` (`hash`) "
"ON UPDATE RESTRICT ON DELETE RESTRICT".format(
external_table_root=EXTERNAL_TABLE_ROOT, **match
)
)
elif category == "ADAPTED":
adapter = get_adapter(context, match["type"])
match["type"] = adapter.attribute_type
category = match_type(match["type"])
if category in SPECIAL_TYPES:
# recursive redefinition from user-defined datatypes.
substitute_special_type(match, category, foreign_key_sql, context)
else:
assert False, "Unknown special type"
|
compile_attribute(line, in_key, foreign_key_sql, context)
Convert attribute definition from DataJoint format to SQL
Parameters:
Name |
Type |
Description |
Default |
line |
|
|
required
|
in_key |
|
set to True if attribute is in primary key set
|
required
|
foreign_key_sql |
|
the list of foreign key declarations to add to
|
required
|
context |
|
context in which to look up user-defined attribute type adapterss
|
required
|
Returns:
Type |
Description |
|
(name, sql, is_external) -- attribute name and sql code for its declaration
|
Source code in datajoint/declare.py
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589 | def compile_attribute(line, in_key, foreign_key_sql, context):
"""
Convert attribute definition from DataJoint format to SQL
:param line: attribution line
:param in_key: set to True if attribute is in primary key set
:param foreign_key_sql: the list of foreign key declarations to add to
:param context: context in which to look up user-defined attribute type adapterss
:returns: (name, sql, is_external) -- attribute name and sql code for its declaration
"""
try:
match = attribute_parser.parseString(line + "#", parseAll=True)
except pp.ParseException as err:
raise DataJointError(
"Declaration error in position {pos} in line:\n {line}\n{msg}".format(
line=err.args[0], pos=err.args[1], msg=err.args[2]
)
)
match["comment"] = match["comment"].rstrip("#")
if "default" not in match:
match["default"] = ""
match = {k: v.strip() for k, v in match.items()}
match["nullable"] = match["default"].lower() == "null"
if match["nullable"]:
if in_key:
raise DataJointError(
'Primary key attributes cannot be nullable in line "%s"' % line
)
match["default"] = "DEFAULT NULL" # nullable attributes default to null
else:
if match["default"]:
quote = (
match["default"].split("(")[0].upper() not in CONSTANT_LITERALS
and match["default"][0] not in "\"'"
)
match["default"] = (
"NOT NULL DEFAULT " + ('"%s"' if quote else "%s") % match["default"]
)
else:
match["default"] = "NOT NULL"
match["comment"] = match["comment"].replace(
'"', '\\"'
) # escape double quotes in comment
if match["comment"].startswith(":"):
raise DataJointError(
'An attribute comment must not start with a colon in comment "{comment}"'.format(
**match
)
)
category = match_type(match["type"])
if category in SPECIAL_TYPES:
match["comment"] = ":{type}:{comment}".format(
**match
) # insert custom type into comment
substitute_special_type(match, category, foreign_key_sql, context)
if category in SERIALIZED_TYPES and match["default"] not in {
"DEFAULT NULL",
"NOT NULL",
}:
raise DataJointError(
"The default value for a blob or attachment attributes can only be NULL in:\n{line}".format(
line=line
)
)
sql = (
"`{name}` {type} {default}"
+ (' COMMENT "{comment}"' if match["comment"] else "")
).format(**match)
return match["name"], sql, match.get("store")
|