datajoint 0.14.3__py3-none-any.whl → 0.14.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datajoint might be problematic. Click here for more details.

datajoint/__init__.py CHANGED
@@ -55,24 +55,23 @@ __all__ = [
55
55
  "cli",
56
56
  ]
57
57
 
58
- from .logging import logger
59
- from .version import __version__
60
- from .settings import config
61
- from .connection import conn, Connection
62
- from .schemas import Schema
63
- from .schemas import VirtualModule, list_schemas
64
- from .table import Table, FreeTable
65
- from .user_tables import Manual, Lookup, Imported, Computed, Part
66
- from .expression import Not, AndList, U, Top
67
- from .diagram import Diagram
68
- from .admin import set_password, kill
58
+ from . import errors
59
+ from .admin import kill, set_password
60
+ from .attribute_adapter import AttributeAdapter
69
61
  from .blob import MatCell, MatStruct
62
+ from .cli import cli
63
+ from .connection import Connection, conn
64
+ from .diagram import Diagram
65
+ from .errors import DataJointError
66
+ from .expression import AndList, Not, Top, U
70
67
  from .fetch import key
71
68
  from .hash import key_hash
72
- from .attribute_adapter import AttributeAdapter
73
- from . import errors
74
- from .errors import DataJointError
75
- from .cli import cli
69
+ from .logging import logger
70
+ from .schemas import Schema, VirtualModule, list_schemas
71
+ from .settings import config
72
+ from .table import FreeTable, Table
73
+ from .user_tables import Computed, Imported, Lookup, Manual, Part
74
+ from .version import __version__
76
75
 
77
76
  ERD = Di = Diagram # Aliases for Diagram
78
77
  schema = Schema # Aliases for Schema
datajoint/admin.py CHANGED
@@ -1,10 +1,12 @@
1
- import pymysql
1
+ import logging
2
2
  from getpass import getpass
3
+
4
+ import pymysql
3
5
  from packaging import version
6
+
4
7
  from .connection import conn
5
8
  from .settings import config
6
9
  from .utils import user_choice
7
- import logging
8
10
 
9
11
  logger = logging.getLogger(__name__.split(".")[0])
10
12
 
@@ -1,4 +1,5 @@
1
1
  import re
2
+
2
3
  from .errors import DataJointError, _support_adapted_types
3
4
  from .plugin import type_plugins
4
5
 
datajoint/autopopulate.py CHANGED
@@ -1,17 +1,20 @@
1
1
  """This module defines class dj.AutoPopulate"""
2
2
 
3
- import logging
3
+ import contextlib
4
4
  import datetime
5
- import traceback
6
- import random
7
5
  import inspect
6
+ import logging
7
+ import multiprocessing as mp
8
+ import random
9
+ import signal
10
+ import traceback
11
+
12
+ import deepdiff
8
13
  from tqdm import tqdm
9
- from .hash import key_hash
10
- from .expression import QueryExpression, AndList
14
+
11
15
  from .errors import DataJointError, LostConnectionError
12
- import signal
13
- import multiprocessing as mp
14
- import contextlib
16
+ from .expression import AndList, QueryExpression
17
+ from .hash import key_hash
15
18
 
16
19
  # noinspection PyExceptionInherit,PyCallingNonCallable
17
20
 
@@ -92,13 +95,76 @@ class AutoPopulate:
92
95
 
93
96
  def make(self, key):
94
97
  """
95
- Derived classes must implement method `make` that fetches data from tables
96
- above them in the dependency hierarchy, restricting by the given key,
97
- computes secondary attributes, and inserts the new tuples into self.
98
+ This method must be implemented by derived classes to perform automated computation.
99
+ The method must implement the following three steps:
100
+
101
+ 1. Fetch data from tables above in the dependency hierarchy, restricted by the given key.
102
+ 2. Compute secondary attributes based on the fetched data.
103
+ 3. Insert the new tuple(s) into the current table.
104
+
105
+ The method can be implemented either as:
106
+ (a) Regular method: All three steps are performed in a single database transaction.
107
+ The method must return None.
108
+ (b) Generator method:
109
+ The make method is split into three functions:
110
+ - `make_fetch`: Fetches data from the parent tables.
111
+ - `make_compute`: Computes secondary attributes based on the fetched data.
112
+ - `make_insert`: Inserts the computed data into the current table.
113
+
114
+ Then populate logic is executes as follows:
115
+
116
+ <pseudocode>
117
+ fetched_data1 = self.make_fetch(key)
118
+ computed_result = self.make_compute(key, *fetched_data1)
119
+ begin transaction:
120
+ fetched_data2 = self.make_fetch(key)
121
+ if fetched_data1 != fetched_data2:
122
+ cancel transaction
123
+ else:
124
+ self.make_insert(key, *computed_result)
125
+ commit_transaction
126
+ <pseudocode>
127
+
128
+ Importantly, the output of make_fetch is a tuple that serves as the input into `make_compute`.
129
+ The output of `make_compute` is a tuple that serves as the input into `make_insert`.
130
+
131
+ The functionality must be strictly divided between these three methods:
132
+ - All database queries must be completed in `make_fetch`.
133
+ - All computation must be completed in `make_compute`.
134
+ - All database inserts must be completed in `make_insert`.
135
+
136
+ DataJoint may programmatically enforce this separation in the future.
137
+
138
+ :param key: The primary key value used to restrict the data fetching.
139
+ :raises NotImplementedError: If the derived class does not implement the required methods.
98
140
  """
99
- raise NotImplementedError(
100
- "Subclasses of AutoPopulate must implement the method `make`"
101
- )
141
+
142
+ if not (
143
+ hasattr(self, "make_fetch")
144
+ and hasattr(self, "make_insert")
145
+ and hasattr(self, "make_compute")
146
+ ):
147
+ # user must implement `make`
148
+ raise NotImplementedError(
149
+ "Subclasses of AutoPopulate must implement the method `make` "
150
+ "or (`make_fetch` + `make_compute` + `make_insert`)"
151
+ )
152
+
153
+ # User has implemented `_fetch`, `_compute`, and `_insert` methods instead
154
+
155
+ # Step 1: Fetch data from parent tables
156
+ fetched_data = self.make_fetch(key) # fetched_data is a tuple
157
+ computed_result = yield fetched_data # passed as input into make_compute
158
+
159
+ # Step 2: If computed result is not passed in, compute the result
160
+ if computed_result is None:
161
+ # this is only executed in the first invocation
162
+ computed_result = self.make_compute(key, *fetched_data)
163
+ yield computed_result # this is passed to the second invocation of make
164
+
165
+ # Step 3: Insert the computed result into the current table.
166
+ self.make_insert(key, *computed_result)
167
+ yield
102
168
 
103
169
  @property
104
170
  def target(self):
@@ -200,9 +266,8 @@ class AutoPopulate:
200
266
  self.connection.schemas[self.target.database].jobs if reserve_jobs else None
201
267
  )
202
268
 
203
- # define and set up signal handler for SIGTERM:
204
269
  if reserve_jobs:
205
-
270
+ # Define a signal handler for SIGTERM
206
271
  def handler(signum, frame):
207
272
  logger.info("Populate terminated by SIGTERM")
208
273
  raise SystemExit("SIGTERM received")
@@ -262,13 +327,16 @@ class AutoPopulate:
262
327
  # spawn multiple processes
263
328
  self.connection.close() # disconnect parent process from MySQL server
264
329
  del self.connection._conn.ctx # SSLContext is not pickleable
265
- with mp.Pool(
266
- processes, _initialize_populate, (self, jobs, populate_kwargs)
267
- ) as pool, (
268
- tqdm(desc="Processes: ", total=nkeys)
269
- if display_progress
270
- else contextlib.nullcontext()
271
- ) as progress_bar:
330
+ with (
331
+ mp.Pool(
332
+ processes, _initialize_populate, (self, jobs, populate_kwargs)
333
+ ) as pool,
334
+ (
335
+ tqdm(desc="Processes: ", total=nkeys)
336
+ if display_progress
337
+ else contextlib.nullcontext()
338
+ ) as progress_bar,
339
+ ):
272
340
  for status in pool.imap(_call_populate1, keys, chunksize=1):
273
341
  if status is True:
274
342
  success_list.append(1)
@@ -309,17 +377,47 @@ class AutoPopulate:
309
377
  ):
310
378
  return False
311
379
 
312
- self.connection.start_transaction()
380
+ # if make is a generator, it transaction can be delayed until the final stage
381
+ is_generator = inspect.isgeneratorfunction(make)
382
+ if not is_generator:
383
+ self.connection.start_transaction()
384
+
313
385
  if key in self.target: # already populated
314
- self.connection.cancel_transaction()
386
+ if not is_generator:
387
+ self.connection.cancel_transaction()
315
388
  if jobs is not None:
316
389
  jobs.complete(self.target.table_name, self._job_key(key))
317
390
  return False
318
391
 
319
392
  logger.debug(f"Making {key} -> {self.target.full_table_name}")
320
393
  self.__class__._allow_insert = True
394
+
321
395
  try:
322
- make(dict(key), **(make_kwargs or {}))
396
+ if not is_generator:
397
+ make(dict(key), **(make_kwargs or {}))
398
+ else:
399
+ # tripartite make - transaction is delayed until the final stage
400
+ gen = make(dict(key), **(make_kwargs or {}))
401
+ fetched_data = next(gen)
402
+ fetch_hash = deepdiff.DeepHash(
403
+ fetched_data, ignore_iterable_order=False
404
+ )[fetched_data]
405
+ computed_result = next(gen) # perform the computation
406
+ # fetch and insert inside a transaction
407
+ self.connection.start_transaction()
408
+ gen = make(dict(key), **(make_kwargs or {})) # restart make
409
+ fetched_data = next(gen)
410
+ if (
411
+ fetch_hash
412
+ != deepdiff.DeepHash(fetched_data, ignore_iterable_order=False)[
413
+ fetched_data
414
+ ]
415
+ ): # raise error if fetched data has changed
416
+ raise DataJointError(
417
+ "Referential integrity failed! The `make_fetch` data has changed"
418
+ )
419
+ gen.send(computed_result) # insert
420
+
323
421
  except (KeyboardInterrupt, SystemExit, Exception) as error:
324
422
  try:
325
423
  self.connection.cancel_transaction()
datajoint/blob.py CHANGED
@@ -3,17 +3,18 @@
3
3
  compatibility with Matlab-based serialization implemented by mYm.
4
4
  """
5
5
 
6
- import zlib
7
- from itertools import repeat
8
6
  import collections
9
- from decimal import Decimal
10
7
  import datetime
11
8
  import uuid
9
+ import zlib
10
+ from decimal import Decimal
11
+ from itertools import repeat
12
+
12
13
  import numpy as np
14
+
13
15
  from .errors import DataJointError
14
16
  from .settings import config
15
17
 
16
-
17
18
  deserialize_lookup = {
18
19
  0: {"dtype": None, "scalar_type": "UNKNOWN"},
19
20
  1: {"dtype": None, "scalar_type": "CELL"},
@@ -139,7 +140,7 @@ class Blob:
139
140
  "S": self.read_struct, # matlab struct array
140
141
  "C": self.read_cell_array, # matlab cell array
141
142
  # basic data types
142
- "\xFF": self.read_none, # None
143
+ "\xff": self.read_none, # None
143
144
  "\x01": self.read_tuple, # a Sequence (e.g. tuple)
144
145
  "\x02": self.read_list, # a MutableSequence (e.g. list)
145
146
  "\x03": self.read_set, # a Set
@@ -204,7 +205,7 @@ class Blob:
204
205
  return self.pack_dict(obj)
205
206
  if isinstance(obj, str):
206
207
  return self.pack_string(obj)
207
- if isinstance(obj, collections.abc.ByteString):
208
+ if isinstance(obj, (bytes, bytearray)):
208
209
  return self.pack_bytes(obj)
209
210
  if isinstance(obj, collections.abc.MutableSequence):
210
211
  return self.pack_list(obj)
@@ -400,7 +401,7 @@ class Blob:
400
401
 
401
402
  @staticmethod
402
403
  def pack_none():
403
- return b"\xFF"
404
+ return b"\xff"
404
405
 
405
406
  def read_tuple(self):
406
407
  return tuple(
datajoint/cli.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import argparse
2
2
  from code import interact
3
3
  from collections import ChainMap
4
+
4
5
  import datajoint as dj
5
6
 
6
7
 
datajoint/condition.py CHANGED
@@ -1,17 +1,19 @@
1
- """ methods for generating SQL WHERE clauses from datajoint restriction conditions """
1
+ """methods for generating SQL WHERE clauses from datajoint restriction conditions"""
2
2
 
3
- import inspect
4
3
  import collections
5
- import re
6
- import uuid
7
4
  import datetime
8
5
  import decimal
6
+ import inspect
7
+ import json
8
+ import re
9
+ import uuid
10
+ from dataclasses import dataclass
11
+ from typing import List, Union
12
+
9
13
  import numpy
10
14
  import pandas
11
- import json
15
+
12
16
  from .errors import DataJointError
13
- from typing import Union, List
14
- from dataclasses import dataclass
15
17
 
16
18
  JSON_PATTERN = re.compile(
17
19
  r"^(?P<attr>\w+)(\.(?P<path>[\w.*\[\]]+))?(:(?P<type>[\w(,\s)]+))?$"
@@ -143,7 +145,7 @@ def make_condition(query_expression, condition, columns):
143
145
  condition.
144
146
  :return: an SQL condition string or a boolean value.
145
147
  """
146
- from .expression import QueryExpression, Aggregation, U
148
+ from .expression import Aggregation, QueryExpression, U
147
149
 
148
150
  def prep_value(k, v):
149
151
  """prepare SQL condition"""
datajoint/connection.py CHANGED
@@ -3,20 +3,22 @@ This module contains the Connection class that manages the connection to the dat
3
3
  the ``conn`` function that provides access to a persistent connection in datajoint.
4
4
  """
5
5
 
6
+ import logging
7
+ import pathlib
8
+ import re
6
9
  import warnings
7
10
  from contextlib import contextmanager
8
- import pymysql as client
9
- import logging
10
11
  from getpass import getpass
11
- import re
12
- import pathlib
13
12
 
14
- from .settings import config
13
+ import pymysql as client
14
+
15
15
  from . import errors
16
- from .dependencies import Dependencies
17
16
  from .blob import pack, unpack
17
+ from .dependencies import Dependencies
18
18
  from .hash import uuid_from_buffer
19
19
  from .plugin import connection_plugins
20
+ from .settings import config
21
+ from .version import __version__
20
22
 
21
23
  logger = logging.getLogger(__name__.split(".")[0])
22
24
  query_log_max_length = 300
@@ -190,15 +192,20 @@ class Connection:
190
192
  self.conn_info["ssl_input"] = use_tls
191
193
  self.conn_info["host_input"] = host_input
192
194
  self.init_fun = init_fun
193
- logger.info("Connecting {user}@{host}:{port}".format(**self.conn_info))
194
195
  self._conn = None
195
196
  self._query_cache = None
196
197
  connect_host_hook(self)
197
198
  if self.is_connected:
198
- logger.info("Connected {user}@{host}:{port}".format(**self.conn_info))
199
+ logger.info(
200
+ "DataJoint {version} connected to {user}@{host}:{port}".format(
201
+ version=__version__, **self.conn_info
202
+ )
203
+ )
199
204
  self.connection_id = self.query("SELECT connection_id()").fetchone()[0]
200
205
  else:
201
- raise errors.LostConnectionError("Connection failed.")
206
+ raise errors.LostConnectionError(
207
+ "Connection failed {user}@{host}:{port}".format(**self.conn_info)
208
+ )
202
209
  self._in_transaction = False
203
210
  self.schemas = dict()
204
211
  self.dependencies = Dependencies(self)
@@ -344,7 +351,7 @@ class Connection:
344
351
  except errors.LostConnectionError:
345
352
  if not reconnect:
346
353
  raise
347
- logger.warning("MySQL server has gone away. Reconnecting to the server.")
354
+ logger.warning("Reconnecting to MySQL server.")
348
355
  connect_host_hook(self)
349
356
  if self._in_transaction:
350
357
  self.cancel_transaction()
datajoint/declare.py CHANGED
@@ -3,13 +3,15 @@ This module hosts functions to convert DataJoint table definitions into mysql ta
3
3
  declare the corresponding mysql tables.
4
4
  """
5
5
 
6
- import re
7
- import pyparsing as pp
8
6
  import logging
7
+ import re
9
8
  from hashlib import sha1
10
- from .errors import DataJointError, _support_filepath_types, FILEPATH_FEATURE_SWITCH
9
+
10
+ import pyparsing as pp
11
+
11
12
  from .attribute_adapter import get_adapter
12
13
  from .condition import translate_attribute
14
+ from .errors import FILEPATH_FEATURE_SWITCH, DataJointError, _support_filepath_types
13
15
  from .settings import config
14
16
 
15
17
  UUID_DATA_TYPE = "binary(16)"
@@ -163,8 +165,8 @@ def compile_foreign_key(
163
165
  :param index_sql: list of INDEX declaration statements, duplicate or redundant indexes are ok.
164
166
  """
165
167
  # Parse and validate
166
- from .table import Table
167
168
  from .expression import QueryExpression
169
+ from .table import Table
168
170
 
169
171
  try:
170
172
  result = foreign_key_parser.parseString(line)
datajoint/dependencies.py CHANGED
@@ -1,7 +1,9 @@
1
- import networkx as nx
2
1
  import itertools
3
2
  import re
4
3
  from collections import defaultdict
4
+
5
+ import networkx as nx
6
+
5
7
  from .errors import DataJointError
6
8
 
7
9
 
datajoint/diagram.py CHANGED
@@ -1,14 +1,14 @@
1
- import networkx as nx
2
1
  import functools
2
+ import inspect
3
3
  import io
4
4
  import logging
5
- import inspect
6
- from .table import Table
5
+
6
+ import networkx as nx
7
+
7
8
  from .dependencies import topo_sort
8
- from .user_tables import Manual, Imported, Computed, Lookup, Part, _get_tier, _AliasNode
9
9
  from .errors import DataJointError
10
- from .table import lookup_class_name
11
-
10
+ from .table import Table, lookup_class_name
11
+ from .user_tables import Computed, Imported, Lookup, Manual, Part, _AliasNode, _get_tier
12
12
 
13
13
  try:
14
14
  from matplotlib import pyplot as plt
@@ -35,7 +35,7 @@ if not diagram_active:
35
35
  Entity relationship diagram, currently disabled due to the lack of required packages: matplotlib and pygraphviz.
36
36
 
37
37
  To enable Diagram feature, please install both matplotlib and pygraphviz. For instructions on how to install
38
- these two packages, refer to https://datajoint.com/docs/core/datajoint-python/0.14/client/install/
38
+ these two packages, refer to https://docs.datajoint.com/core/datajoint-python/0.14/client/install/
39
39
  """
40
40
 
41
41
  def __init__(self, *args, **kwargs):
datajoint/expression.py CHANGED
@@ -1,23 +1,24 @@
1
- from itertools import count
2
- import logging
3
- import inspect
4
1
  import copy
2
+ import inspect
3
+ import logging
5
4
  import re
6
- from .settings import config
7
- from .errors import DataJointError
8
- from .fetch import Fetch, Fetch1
9
- from .preview import preview, repr_html
5
+ from itertools import count
6
+
10
7
  from .condition import (
11
8
  AndList,
12
- Top,
13
9
  Not,
14
- make_condition,
10
+ PromiscuousOperand,
11
+ Top,
15
12
  assert_join_compatibility,
16
13
  extract_column_names,
17
- PromiscuousOperand,
14
+ make_condition,
18
15
  translate_attribute,
19
16
  )
20
17
  from .declare import CONSTANT_LITERALS
18
+ from .errors import DataJointError
19
+ from .fetch import Fetch, Fetch1
20
+ from .preview import preview, repr_html
21
+ from .settings import config
21
22
 
22
23
  logger = logging.getLogger(__name__.split(".")[0])
23
24
 
datajoint/external.py CHANGED
@@ -1,15 +1,17 @@
1
- from pathlib import Path, PurePosixPath, PureWindowsPath
1
+ import logging
2
2
  from collections.abc import Mapping
3
+ from pathlib import Path, PurePosixPath, PureWindowsPath
4
+
3
5
  from tqdm import tqdm
4
- import logging
5
- from .settings import config
6
+
7
+ from . import errors, s3
8
+ from .declare import EXTERNAL_TABLE_ROOT
6
9
  from .errors import DataJointError, MissingExternalFile
7
10
  from .hash import uuid_from_buffer, uuid_from_file
8
- from .table import Table, FreeTable
9
11
  from .heading import Heading
10
- from .declare import EXTERNAL_TABLE_ROOT
11
- from . import s3, errors
12
- from .utils import safe_write, safe_copy
12
+ from .settings import config
13
+ from .table import FreeTable, Table
14
+ from .utils import safe_copy, safe_write
13
15
 
14
16
  logger = logging.getLogger(__name__.split(".")[0])
15
17
 
@@ -22,7 +24,7 @@ SUPPORT_MIGRATED_BLOBS = True # support blobs migrated from datajoint 0.11.*
22
24
 
23
25
  def subfold(name, folds):
24
26
  """
25
- subfolding for external storage: e.g. subfold('aBCdefg', (2, 3)) --> ['ab','cde']
27
+ subfolding for external storage: e.g. subfold('aBCdefg', (2, 3)) --> ['ab','cde']
26
28
  """
27
29
  return (
28
30
  (name[: folds[0]].lower(),) + subfold(name[folds[0] :], folds[1:])
@@ -278,7 +280,7 @@ class ExternalTable(Table):
278
280
 
279
281
  # check if the remote file already exists and verify that it matches
280
282
  check_hash = (self & {"hash": uuid}).fetch("contents_hash")
281
- if check_hash:
283
+ if check_hash.size:
282
284
  # the tracking entry exists, check that it's the same file as before
283
285
  if contents_hash != check_hash[0]:
284
286
  raise DataJointError(
datajoint/fetch.py CHANGED
@@ -1,13 +1,15 @@
1
- from functools import partial
2
- from pathlib import Path
3
- import pandas
4
1
  import itertools
5
2
  import json
6
- import numpy as np
7
- import uuid
8
3
  import numbers
4
+ import uuid
5
+ from functools import partial
6
+ from pathlib import Path
7
+
8
+ import numpy as np
9
+ import pandas
9
10
 
10
11
  from datajoint.condition import Top
12
+
11
13
  from . import blob, hash
12
14
  from .errors import DataJointError
13
15
  from .settings import config
datajoint/hash.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import hashlib
2
- import uuid
3
2
  import io
3
+ import uuid
4
4
  from pathlib import Path
5
5
 
6
6
 
datajoint/heading.py CHANGED
@@ -1,18 +1,19 @@
1
- import numpy as np
2
- from collections import namedtuple, defaultdict
3
- from itertools import chain
4
- import re
5
1
  import logging
6
- from .errors import DataJointError, _support_filepath_types, FILEPATH_FEATURE_SWITCH
2
+ import re
3
+ from collections import defaultdict, namedtuple
4
+ from itertools import chain
5
+
6
+ import numpy as np
7
+
8
+ from .attribute_adapter import AttributeAdapter, get_adapter
7
9
  from .declare import (
8
- UUID_DATA_TYPE,
9
- SPECIAL_TYPES,
10
- TYPE_PATTERN,
11
10
  EXTERNAL_TYPES,
12
11
  NATIVE_TYPES,
12
+ SPECIAL_TYPES,
13
+ TYPE_PATTERN,
14
+ UUID_DATA_TYPE,
13
15
  )
14
- from .attribute_adapter import get_adapter, AttributeAdapter
15
-
16
+ from .errors import FILEPATH_FEATURE_SWITCH, DataJointError, _support_filepath_types
16
17
 
17
18
  logger = logging.getLogger(__name__.split(".")[0])
18
19
 
datajoint/jobs.py CHANGED
@@ -1,10 +1,11 @@
1
1
  import os
2
- from .hash import key_hash
3
2
  import platform
4
- from .table import Table
5
- from .settings import config
3
+
6
4
  from .errors import DuplicateError
5
+ from .hash import key_hash
7
6
  from .heading import Heading
7
+ from .settings import config
8
+ from .table import Table
8
9
 
9
10
  ERROR_MESSAGE_LENGTH = 2047
10
11
  TRUNCATION_APPENDIX = "...truncated"
datajoint/plugin.py CHANGED
@@ -1,9 +1,11 @@
1
- from .settings import config
2
- import pkg_resources
1
+ import logging
3
2
  from pathlib import Path
3
+
4
+ import pkg_resources
4
5
  from cryptography.exceptions import InvalidSignature
5
6
  from otumat import hash_pkg, verify
6
- import logging
7
+
8
+ from .settings import config
7
9
 
8
10
  logger = logging.getLogger(__name__.split(".")[0])
9
11
 
datajoint/preview.py CHANGED
@@ -1,4 +1,4 @@
1
- """ methods for generating previews of query expression results in python command line and Jupyter """
1
+ """methods for generating previews of query expression results in python command line and Jupyter"""
2
2
 
3
3
  from .settings import config
4
4
 
datajoint/s3.py CHANGED
@@ -2,12 +2,14 @@
2
2
  AWS S3 operations
3
3
  """
4
4
 
5
+ import logging
6
+ import uuid
5
7
  from io import BytesIO
8
+ from pathlib import Path
9
+
6
10
  import minio # https://docs.minio.io/docs/python-client-api-reference
7
11
  import urllib3
8
- import uuid
9
- import logging
10
- from pathlib import Path
12
+
11
13
  from . import errors
12
14
 
13
15
  logger = logging.getLogger(__name__.split(".")[0])
datajoint/schemas.py CHANGED
@@ -1,19 +1,20 @@
1
- import warnings
2
- import logging
3
- import inspect
4
- import re
5
1
  import collections
2
+ import inspect
6
3
  import itertools
4
+ import logging
5
+ import re
6
+ import types
7
+ import warnings
8
+
7
9
  from .connection import conn
8
- from .settings import config
9
- from .errors import DataJointError, AccessError
10
- from .jobs import JobTable
10
+ from .errors import AccessError, DataJointError
11
11
  from .external import ExternalMapping
12
12
  from .heading import Heading
13
- from .utils import user_choice, to_camel_case
14
- from .user_tables import Part, Computed, Imported, Manual, Lookup, _get_tier
15
- from .table import lookup_class_name, Log, FreeTable
16
- import types
13
+ from .jobs import JobTable
14
+ from .settings import config
15
+ from .table import FreeTable, Log, lookup_class_name
16
+ from .user_tables import Computed, Imported, Lookup, Manual, Part, _get_tier
17
+ from .utils import to_camel_case, user_choice
17
18
 
18
19
  logger = logging.getLogger(__name__.split(".")[0])
19
20
 
@@ -482,8 +483,8 @@ class Schema:
482
483
  return [
483
484
  t
484
485
  for d, t in (
485
- full_t.replace("`", "").split(".")
486
- for full_t in self.connection.dependencies.topo_sort()
486
+ table_name.replace("`", "").split(".")
487
+ for table_name in self.connection.dependencies.topo_sort()
487
488
  )
488
489
  if d == self.database
489
490
  ]
datajoint/settings.py CHANGED
@@ -1,14 +1,15 @@
1
1
  """
2
- Settings for DataJoint.
2
+ Settings for DataJoint
3
3
  """
4
4
 
5
- from contextlib import contextmanager
5
+ import collections
6
6
  import json
7
+ import logging
7
8
  import os
8
9
  import pprint
9
- import logging
10
- import collections
10
+ from contextlib import contextmanager
11
11
  from enum import Enum
12
+
12
13
  from .errors import DataJointError
13
14
 
14
15
  LOCALCONFIG = "dj_local_conf.json"
@@ -48,7 +49,8 @@ default = dict(
48
49
  "database.use_tls": None,
49
50
  "enable_python_native_blobs": True, # python-native/dj0 encoding support
50
51
  "add_hidden_timestamp": False,
51
- "filepath_checksum_size_limit": None, # file size limit for when to disable checksums
52
+ # file size limit for when to disable checksums
53
+ "filepath_checksum_size_limit": None,
52
54
  }
53
55
  )
54
56
 
@@ -117,6 +119,7 @@ class Config(collections.abc.MutableMapping):
117
119
  if filename is None:
118
120
  filename = LOCALCONFIG
119
121
  with open(filename, "r") as fid:
122
+ logger.info(f"DataJoint is configured from {os.path.abspath(filename)}")
120
123
  self._conf.update(json.load(fid))
121
124
 
122
125
  def save_local(self, verbose=False):
@@ -236,7 +239,8 @@ class Config(collections.abc.MutableMapping):
236
239
 
237
240
  def __init__(self, *args, **kwargs):
238
241
  self._conf = dict(default)
239
- self._conf.update(dict(*args, **kwargs)) # use the free update to set keys
242
+ # use the free update to set keys
243
+ self._conf.update(dict(*args, **kwargs))
240
244
 
241
245
  def __getitem__(self, key):
242
246
  return self._conf[key]
@@ -250,7 +254,9 @@ class Config(collections.abc.MutableMapping):
250
254
  valid_logging_levels = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}
251
255
  if key == "loglevel":
252
256
  if value not in valid_logging_levels:
253
- raise ValueError(f"{'value'} is not a valid logging value")
257
+ raise ValueError(
258
+ f"'{value}' is not a valid logging value {tuple(valid_logging_levels)}"
259
+ )
254
260
  logger.setLevel(value)
255
261
 
256
262
 
@@ -260,11 +266,9 @@ config_files = (
260
266
  os.path.expanduser(n) for n in (LOCALCONFIG, os.path.join("~", GLOBALCONFIG))
261
267
  )
262
268
  try:
263
- config_file = next(n for n in config_files if os.path.exists(n))
269
+ config.load(next(n for n in config_files if os.path.exists(n)))
264
270
  except StopIteration:
265
- pass
266
- else:
267
- config.load(config_file)
271
+ logger.info("No config file was found.")
268
272
 
269
273
  # override login credentials with environment variables
270
274
  mapping = {
@@ -292,6 +296,8 @@ mapping = {
292
296
  )
293
297
  if v is not None
294
298
  }
295
- config.update(mapping)
299
+ if mapping:
300
+ logger.info(f"Overloaded settings {tuple(mapping)} from environment variables.")
301
+ config.update(mapping)
296
302
 
297
303
  logger.setLevel(log_levels[config["loglevel"]])
datajoint/table.py CHANGED
@@ -1,30 +1,32 @@
1
1
  import collections
2
- import itertools
2
+ import csv
3
3
  import inspect
4
- import platform
5
- import numpy as np
6
- import pandas
4
+ import itertools
5
+ import json
7
6
  import logging
8
- import uuid
9
- import csv
7
+ import platform
10
8
  import re
11
- import json
9
+ import uuid
12
10
  from pathlib import Path
13
- from .settings import config
14
- from .declare import declare, alter
15
- from .condition import make_condition
16
- from .expression import QueryExpression
11
+ from typing import Union
12
+
13
+ import numpy as np
14
+ import pandas
15
+
17
16
  from . import blob
18
- from .utils import user_choice, get_master, is_camel_case
19
- from .heading import Heading
17
+ from .condition import make_condition
18
+ from .declare import alter, declare
20
19
  from .errors import (
21
- DuplicateError,
22
20
  AccessError,
23
21
  DataJointError,
24
- UnknownAttributeError,
22
+ DuplicateError,
25
23
  IntegrityError,
24
+ UnknownAttributeError,
26
25
  )
27
- from typing import Union
26
+ from .expression import QueryExpression
27
+ from .heading import Heading
28
+ from .settings import config
29
+ from .utils import get_master, is_camel_case, user_choice
28
30
  from .version import __version__ as version
29
31
 
30
32
  logger = logging.getLogger(__name__.split(".")[0])
@@ -135,7 +137,7 @@ class Table(QueryExpression):
135
137
  sql, external_stores = alter(self.definition, old_definition, context)
136
138
  if not sql:
137
139
  if prompt:
138
- logger.warn("Nothing to alter.")
140
+ logger.warning("Nothing to alter.")
139
141
  else:
140
142
  sql = "ALTER TABLE {tab}\n\t".format(
141
143
  tab=self.full_table_name
@@ -518,7 +520,13 @@ class Table(QueryExpression):
518
520
  try:
519
521
  delete_count = table.delete_quick(get_count=True)
520
522
  except IntegrityError as error:
521
- match = foreign_key_error_regexp.match(error.args[0]).groupdict()
523
+ match = foreign_key_error_regexp.match(error.args[0])
524
+ if match is None:
525
+ raise DataJointError(
526
+ "Cascading deletes failed because the error message is missing foreign key information."
527
+ "Make sure you have REFERENCES privilege to all dependent tables."
528
+ ) from None
529
+ match = match.groupdict()
522
530
  # if schema name missing, use table
523
531
  if "`.`" not in match["child"]:
524
532
  match["child"] = "{}.{}".format(
@@ -641,7 +649,7 @@ class Table(QueryExpression):
641
649
  # Confirm and commit
642
650
  if delete_count == 0:
643
651
  if safemode:
644
- logger.warn("Nothing to delete.")
652
+ logger.warning("Nothing to delete.")
645
653
  if transaction:
646
654
  self.connection.cancel_transaction()
647
655
  elif not transaction:
@@ -651,12 +659,12 @@ class Table(QueryExpression):
651
659
  if transaction:
652
660
  self.connection.commit_transaction()
653
661
  if safemode:
654
- logger.info("Deletes committed.")
662
+ logger.info("Delete committed.")
655
663
  else:
656
664
  if transaction:
657
665
  self.connection.cancel_transaction()
658
666
  if safemode:
659
- logger.warn("Deletes cancelled")
667
+ logger.warning("Delete cancelled")
660
668
  return delete_count
661
669
 
662
670
  def drop_quick(self):
@@ -724,11 +732,6 @@ class Table(QueryExpression):
724
732
  ).fetchone()
725
733
  return ret["Data_length"] + ret["Index_length"]
726
734
 
727
- def show_definition(self):
728
- raise AttributeError(
729
- "show_definition is deprecated. Use the describe method instead."
730
- )
731
-
732
735
  def describe(self, context=None, printout=False):
733
736
  """
734
737
  :return: the definition string for the query using DataJoint DDL.
datajoint/user_tables.py CHANGED
@@ -3,10 +3,11 @@ Hosts the table tiers, user tables should be derived from.
3
3
  """
4
4
 
5
5
  import re
6
- from .table import Table
6
+
7
7
  from .autopopulate import AutoPopulate
8
- from .utils import from_camel_case, ClassProperty
9
8
  from .errors import DataJointError
9
+ from .table import Table
10
+ from .utils import ClassProperty, from_camel_case
10
11
 
11
12
  _base_regexp = r"[a-z][a-z0-9]*(_[a-z][a-z0-9]*)*"
12
13
 
datajoint/utils.py CHANGED
@@ -1,8 +1,9 @@
1
1
  """General-purpose utilities"""
2
2
 
3
3
  import re
4
- from pathlib import Path
5
4
  import shutil
5
+ from pathlib import Path
6
+
6
7
  from .errors import DataJointError
7
8
 
8
9
 
@@ -146,3 +147,5 @@ def parse_sql(filepath):
146
147
  if line.endswith(delimiter):
147
148
  yield " ".join(statement)
148
149
  statement = []
150
+ if statement:
151
+ yield " ".join(statement)
datajoint/version.py CHANGED
@@ -1,3 +1,6 @@
1
- __version__ = "0.14.3"
1
+ # version bump auto managed by Github Actions:
2
+ # label_prs.yaml(prep), release.yaml(bump), post_release.yaml(edit)
3
+ # manually set this version will be eventually overwritten by the above actions
4
+ __version__ = "0.14.5"
2
5
 
3
6
  assert len(__version__) <= 10 # The log table limits version to the 10 characters
@@ -1,11 +1,11 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: datajoint
3
- Version: 0.14.3
4
- Summary: A relational data pipeline framework.
3
+ Version: 0.14.5
4
+ Summary: DataJoint for Python is a framework for scientific workflow management based on relational principles. DataJoint is built on the foundation of the relational data model and prescribes a consistent method for organizing, populating, computing, and querying data.
5
5
  Author: Raphael Guzman, Edgar Walker
6
6
  Author-email: Dimitri Yatsenko <dimitri@datajoint.com>, DataJoint Contributors <support@datajoint.com>
7
7
  Maintainer-email: Dimitri Yatsenko <dimitri@datajoint.com>, DataJoint Contributors <support@datajoint.com>
8
- License: GNU LESSER GENERAL PUBLIC LICENSE
8
+ License: GNU LESSER GENERAL PUBLIC LICENSE
9
9
  Version 2.1, February 1999
10
10
 
11
11
  Copyright (C) 1991, 1999 Free Software Foundation, Inc.
@@ -510,55 +510,165 @@ License: GNU LESSER GENERAL PUBLIC LICENSE
510
510
 
511
511
  That's all there is to it!
512
512
 
513
- Project-URL: Homepage, https://datajoint.com/docs
514
- Project-URL: Documentation, https://datajoint.com/docs
513
+ Project-URL: Homepage, https://docs.datajoint.com/
514
+ Project-URL: Documentation, https://docs.datajoint.com/
515
515
  Project-URL: Repository, https://github.com/datajoint/datajoint-python
516
516
  Project-URL: Bug Tracker, https://github.com/datajoint/datajoint-python/issues
517
- Project-URL: Changelog, https://github.com/datajoint/datajoint-python/blob/master/CHANGELOG.md
518
- Keywords: database,data pipelines,scientific computing,automated research workflows
517
+ Project-URL: Release Notes, https://github.com/datajoint/datajoint-python/releases
518
+ Keywords: database,automated,automation,compute,data,pipeline,workflow,scientific,science,research,neuroscience,bioinformatics,bio-informatics,datajoint
519
519
  Classifier: Programming Language :: Python
520
- Requires-Python: <4.0,>=3.8
520
+ Classifier: Development Status :: 5 - Production/Stable
521
+ Classifier: Intended Audience :: Science/Research
522
+ Classifier: Intended Audience :: Healthcare Industry
523
+ Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)
524
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
525
+ Classifier: Topic :: Scientific/Engineering
526
+ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
527
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
528
+ Requires-Python: <4.0,>=3.9
521
529
  Description-Content-Type: text/markdown
522
530
  License-File: LICENSE.txt
523
531
  Requires-Dist: numpy
524
- Requires-Dist: pymysql >=0.7.2
532
+ Requires-Dist: pymysql>=0.7.2
533
+ Requires-Dist: deepdiff
525
534
  Requires-Dist: pyparsing
526
535
  Requires-Dist: ipython
527
536
  Requires-Dist: pandas
528
537
  Requires-Dist: tqdm
529
538
  Requires-Dist: networkx
530
539
  Requires-Dist: pydot
531
- Requires-Dist: minio >=7.0.0
540
+ Requires-Dist: minio>=7.0.0
532
541
  Requires-Dist: matplotlib
533
542
  Requires-Dist: otumat
534
543
  Requires-Dist: faker
535
544
  Requires-Dist: cryptography
536
545
  Requires-Dist: urllib3
546
+ Requires-Dist: setuptools
537
547
  Provides-Extra: test
538
- Requires-Dist: pytest ; extra == 'test'
539
- Requires-Dist: pytest-cov ; extra == 'test'
540
- Requires-Dist: black ==24.2.0 ; extra == 'test'
541
- Requires-Dist: flake8 ; extra == 'test'
542
-
543
- [![DOI](https://zenodo.org/badge/16774/datajoint/datajoint-python.svg)](https://zenodo.org/badge/latestdoi/16774/datajoint/datajoint-python)
544
- [![Coverage Status](https://coveralls.io/repos/datajoint/datajoint-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/datajoint/datajoint-python?branch=master)
545
- [![PyPI version](https://badge.fury.io/py/datajoint.svg)](http://badge.fury.io/py/datajoint)
546
- [![Slack](https://img.shields.io/badge/slack-chat-green.svg)](https://datajoint.slack.com/)
548
+ Requires-Dist: pytest; extra == "test"
549
+ Requires-Dist: pytest-cov; extra == "test"
550
+ Provides-Extra: dev
551
+ Requires-Dist: pre-commit; extra == "dev"
552
+ Requires-Dist: black==24.2.0; extra == "dev"
553
+ Requires-Dist: flake8; extra == "dev"
554
+ Requires-Dist: isort; extra == "dev"
555
+ Requires-Dist: codespell; extra == "dev"
556
+ Requires-Dist: pytest; extra == "dev"
557
+ Requires-Dist: pytest-cov; extra == "dev"
558
+ Dynamic: license-file
547
559
 
548
560
  # Welcome to DataJoint for Python!
549
561
 
550
- DataJoint for Python is a framework for scientific workflow management based on
551
- relational principles. DataJoint is built on the foundation of the relational data
552
- model and prescribes a consistent method for organizing, populating, computing, and
562
+ <table>
563
+ <!-- Thank Zarr for providing badge insights! -->
564
+ <!-- https://github.com/zarr-developers/zarr-python/blob/main/README.md -->
565
+ <tr>
566
+ <td>PyPI</td>
567
+ <td>
568
+ <a href="https://pypi.org/project/datajoint/">
569
+ <img src="https://img.shields.io/pypi/v/datajoint?color=blue" alt="pypi release" />
570
+ </a>
571
+ <br>
572
+ <a href="https://pypi.org/project/datajoint/">
573
+ <img src="https://img.shields.io/pypi/dm/datajoint?color=yellow" alt="pypi downloads" />
574
+ </a>
575
+ </td>
576
+ </tr>
577
+ <tr>
578
+ <td>Conda Forge</td>
579
+ <td>
580
+ <a href="https://anaconda.org/conda-forge/datajoint">
581
+ <img src="https://img.shields.io/conda/vn/conda-forge/datajoint?color=brightgreen" alt="conda-forge release" />
582
+ </a>
583
+ <br>
584
+ <a href="https://anaconda.org/conda-forge/datajoint">
585
+ <img src="https://img.shields.io/conda/dn/conda-forge/datajoint?color=brightgreen" alt="conda-forge downloads" />
586
+ </a>
587
+ </td>
588
+ </tr>
589
+ <tr>
590
+ <td>Since Release</td>
591
+ <td>
592
+ <a id="commit-since-release-link" href="https://github.com/datajoint/datajoint-python/compare/v0.14.5...master">
593
+ <img id="commit-since-release-img" src="https://img.shields.io/github/commits-since/datajoint/datajoint-python/v0.14.5?color=red" alt="commit since last release" />
594
+ </a>
595
+ </td>
596
+ </tr>
597
+ <tr>
598
+ <td>Test Status</td>
599
+ <td>
600
+ <a href="https://github.com/datajoint/datajoint-python/actions/workflows/test.yaml">
601
+ <img src="https://github.com/datajoint/datajoint-python/actions/workflows/test.yaml/badge.svg" alt="test status" />
602
+ </a>
603
+ </td>
604
+ </tr>
605
+ <tr>
606
+ <td>Release Status</td>
607
+ <td>
608
+ <a href="https://github.com/datajoint/datajoint-python/actions/workflows/post_draft_release_published.yaml">
609
+ <img src="https://github.com/datajoint/datajoint-python/actions/workflows/post_draft_release_published.yaml/badge.svg" alt="release status" />
610
+ </a>
611
+ </td>
612
+ </tr>
613
+ <tr>
614
+ <td>Doc Status</td>
615
+ <td>
616
+ <a href="https://docs.datajoint.com">
617
+ <img src="https://github.com/datajoint/datajoint-python/actions/workflows/pages/pages-build-deployment/badge.svg" alt="doc status" />
618
+ </a>
619
+ </td>
620
+ </tr>
621
+ <tr>
622
+ <td>Coverage</td>
623
+ <td>
624
+ <a href="https://coveralls.io/github/datajoint/datajoint-python?branch=master">
625
+ <img src="https://coveralls.io/repos/datajoint/datajoint-python/badge.svg?branch=master&service=github"/ alt="coverage">
626
+ </a>
627
+ </td>
628
+ </tr>
629
+ <tr>
630
+ <td>Developer Chat</td>
631
+ <td>
632
+ <a href="https://datajoint.slack.com/">
633
+ <img src="https://img.shields.io/badge/slack-datajoint-purple.svg" alt="datajoint slack"/>
634
+ </a>
635
+ </td>
636
+ </tr>
637
+ <tr>
638
+ <td>License</td>
639
+ <td>
640
+ <a href="https://github.com/datajoint/datajoint-python/blob/master/LICENSE.txt">
641
+ <img src="https://img.shields.io/github/license/datajoint/datajoint-python" alt="LGPL-2.1" />
642
+ </a>
643
+ </td>
644
+ </tr>
645
+ <tr>
646
+ <td>Citation</td>
647
+ <td>
648
+ <a href="https://doi.org/10.1101/031658">
649
+ <img src="https://img.shields.io/badge/DOI-10.1101/bioRxiv.031658-B31B1B.svg" alt="bioRxiv">
650
+ </a>
651
+ <br>
652
+ <a href="https://doi.org/10.5281/zenodo.6829062">
653
+ <img src="https://zenodo.org/badge/DOI/10.5281/zenodo.6829062.svg" alt="zenodo">
654
+ </a>
655
+ </td>
656
+ </tr>
657
+
658
+ </table>
659
+
660
+ DataJoint for Python is a framework for scientific workflow management based on
661
+ relational principles. DataJoint is built on the foundation of the relational data
662
+ model and prescribes a consistent method for organizing, populating, computing, and
553
663
  querying data.
554
664
 
555
- DataJoint was initially developed in 2009 by Dimitri Yatsenko in Andreas Tolias' Lab at
556
- Baylor College of Medicine for the distributed processing and management of large
557
- volumes of data streaming from regular experiments. Starting in 2011, DataJoint has
558
- been available as an open-source project adopted by other labs and improved through
665
+ DataJoint was initially developed in 2009 by Dimitri Yatsenko in Andreas Tolias' Lab at
666
+ Baylor College of Medicine for the distributed processing and management of large
667
+ volumes of data streaming from regular experiments. Starting in 2011, DataJoint has
668
+ been available as an open-source project adopted by other labs and improved through
559
669
  contributions from several developers.
560
- Presently, the primary developer of DataJoint open-source software is the company
561
- DataJoint (https://datajoint.com).
670
+ Presently, the primary developer of DataJoint open-source software is the company
671
+ DataJoint (<https://datajoint.com>).
562
672
 
563
673
  ## Data Pipeline Example
564
674
 
@@ -580,13 +690,13 @@ DataJoint (https://datajoint.com).
580
690
  pip install datajoint
581
691
  ```
582
692
 
583
- - [Documentation & Tutorials](https://datajoint.com/docs/core/datajoint-python/)
693
+ - [Documentation & Tutorials](https://docs.datajoint.com/core/datajoint-python/)
584
694
 
585
695
  - [Interactive Tutorials](https://github.com/datajoint/datajoint-tutorials) on GitHub Codespaces
586
696
 
587
- - [DataJoint Elements](https://datajoint.com/docs/elements/) - Catalog of example pipelines for neuroscience experiments
697
+ - [DataJoint Elements](https://docs.datajoint.com/elements/) - Catalog of example pipelines for neuroscience experiments
588
698
 
589
699
  - Contribute
590
- - [Development Environment](https://datajoint.com/docs/core/datajoint-python/latest/develop/)
700
+ - [Contribution Guidelines](https://docs.datajoint.com/about/contribute/)
591
701
 
592
- - [Guidelines](https://datajoint.com/docs/about/contribute/)
702
+ - [Developer Guide](https://docs.datajoint.com/core/datajoint-python/latest/develop/)
@@ -0,0 +1,34 @@
1
+ datajoint/__init__.py,sha256=uKCJiD2Vx-smjvfEPVBvxE3VXfm27WfHwKmF2s0r31w,2048
2
+ datajoint/admin.py,sha256=4FEQxrFtWz3Q9h9OCvytDsnx-W6HxXgsjF-TypHfRd8,4500
3
+ datajoint/attribute_adapter.py,sha256=bMcTyb0hIPb6CMVaevZ3FUMkWPbzMCoIj8gJvkQ8tI8,2333
4
+ datajoint/autopopulate.py,sha256=ZhWmIcuxpW9Dd_vYUMalFeAccd9M_y7PQ0w7RKeye0I,18681
5
+ datajoint/blob.py,sha256=0XK_UBesZja-UjDOPY0gNLVURaXcTCN-rDWTc2VbEXU,21968
6
+ datajoint/cli.py,sha256=4ZZAtAn3_7bqi2hMbn_I541RIWDEcwHOwqZyEvHbr0E,2065
7
+ datajoint/condition.py,sha256=zZhq2F0FslZkp2oiMvaCee0FGxxd9mlvZPVeDvM69SU,11848
8
+ datajoint/connection.py,sha256=zOh3Akvxlb7WeciTFQNuUBhWxhziwWReGCRVGDxZ1z8,15984
9
+ datajoint/declare.py,sha256=rXtH8riupNGXWIouxLvwta9Zwm4I2quTktBrP0fPZTA,21106
10
+ datajoint/dependencies.py,sha256=LDMVZ7tKZWrh7mbjECk73Jk1gXSxVOsLefwJSRXe3vg,8413
11
+ datajoint/diagram.py,sha256=3a5Wrz9cjbOp4YD7C-fRsvZY1zqGLw8g6qso16ml-4E,18027
12
+ datajoint/errors.py,sha256=FuD5QO-cIt4fNf8BSmjwypuM-k0P3qNYcjjuhPiE7jM,3337
13
+ datajoint/expression.py,sha256=HKS-Gt1HbOs1hgwFHci2d2MbQgecZWaKtJtE1HaPVSA,39098
14
+ datajoint/external.py,sha256=9p0M81zR8ifks2_Q4KMP9Rq14bqAAZJGHLXR5OiMjFU,19127
15
+ datajoint/fetch.py,sha256=LU8osYfDmEIDobep7I0xNT8DxuwrnfXvL8yAk_0oY5A,12991
16
+ datajoint/hash.py,sha256=v__rn57UNQaO2LVC7ls1mdNdEk4TP8rKQr3Z3n0nGII,1192
17
+ datajoint/heading.py,sha256=LJLIYMT2hwT2XgNbxOKBUcU3fcyuHpD2J5l9nWDI05k,19417
18
+ datajoint/jobs.py,sha256=T1m921P_QOo7keY9bm1RUE0hvZi2G4VWhb4zgTpMWmw,5630
19
+ datajoint/logging.py,sha256=n_dRV3CuTef7wYrVIbhSm9pAKDMWm_XD4oOJ1kMIen4,687
20
+ datajoint/plugin.py,sha256=VA0Wc8gxDMB1cy4MoBN40TbrkfWD-Cqi_j1JQN-HXYA,1555
21
+ datajoint/preview.py,sha256=woHXMVVkW1E9AkAoNjRQdcCkcaW4svB0oHrL4oEXCNE,4806
22
+ datajoint/s3.py,sha256=-BU65VYY3I1l9wD1JeIotfOr-PQ5BzT85fbFE6iYtIY,4083
23
+ datajoint/schemas.py,sha256=xRY5OMlBV9Qup22CFP5gwcvRCOnOUbzNSn7EGtGHe40,20063
24
+ datajoint/settings.py,sha256=fJ5A7HR8aTb0HvN4GQ4eLwtmQxdyB11l6sMiguaQoz8,9406
25
+ datajoint/table.py,sha256=vVzeec-z1tFKYFDWCh6ArJi5sysdNTJWaTteAavwIsE,45185
26
+ datajoint/user_tables.py,sha256=vcnO-8NET543LwlzvI6y8yTmw-UHMUe7VuMk1EKp-8I,6941
27
+ datajoint/utils.py,sha256=m-tnRa65dkwdCV6qqUG7kYHsIaFJGPSVr22QpHzeCZc,4576
28
+ datajoint/version.py,sha256=0J3S_RIdt9bVt0MteMQlEy0auezr5cWmUqFiP90qpzw,302
29
+ datajoint-0.14.5.dist-info/licenses/LICENSE.txt,sha256=7VXUyqv9idbgJNkr3OFy0vfuSqZa3JHmC2TpSavBpss,26444
30
+ datajoint-0.14.5.dist-info/METADATA,sha256=vnpbGSX4jahjFspaxbLoq972sKd5qKxi4hl_r_iU5UA,37822
31
+ datajoint-0.14.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
+ datajoint-0.14.5.dist-info/entry_points.txt,sha256=RLoyQD_Hz9HdlKG-ei0SlHDxuaTpasDJ6gZcvj9iZG0,71
33
+ datajoint-0.14.5.dist-info/top_level.txt,sha256=GLPeIbF_-lu3lpcSazCl0KUco1v2rr5uyUvieYAniLU,10
34
+ datajoint-0.14.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,34 +0,0 @@
1
- datajoint/__init__.py,sha256=BRWCpaIv96Usbn5-XasScBawMBl_1bbPAZoKMjAcmfE,2068
2
- datajoint/admin.py,sha256=Bymh3YEBAFWlWXsB0uKw4DAz5fY8-0eBxNWOt2_F_sU,4498
3
- datajoint/attribute_adapter.py,sha256=SvhBx4HumSYhpA-MqJd7Cuvr4t-oVk-ZYurLzEbU7qw,2332
4
- datajoint/autopopulate.py,sha256=k38W2FreoY-I6FhGNH-Ms_iClcS1WHTQjABnsWK1uXw,14307
5
- datajoint/blob.py,sha256=UOCcR-fbVRdtzgHWUlQYrwNjwp5-N6FDQkFffJNc_Lk,21975
6
- datajoint/cli.py,sha256=r5jD9f_BNW218XvRMd_p_xl1Wz6HJZr_JYx5Kt44170,2064
7
- datajoint/condition.py,sha256=KzeVfbCSfb0FUL04-pdbdsIvP_tt1R85W-wb8MmJzgs,11848
8
- datajoint/connection.py,sha256=CXQeGob99CI92ZSkhmu29-eGBTGxtNDD8Y-ZH6HOYJk,15869
9
- datajoint/declare.py,sha256=mxqATq_XDLD18y1hIKHRSdRxorm5xHIFLZdqfLjkdjk,21104
10
- datajoint/dependencies.py,sha256=ZpnKFRXQTlVzNgRe8ar9ppjalFIivjRdgPw3a2Q3XPM,8411
11
- datajoint/diagram.py,sha256=Va1Dc0ZomYwaABNQmmWJVtnfQ6tSk6CYIaDfLJdKXaU,18044
12
- datajoint/errors.py,sha256=FuD5QO-cIt4fNf8BSmjwypuM-k0P3qNYcjjuhPiE7jM,3337
13
- datajoint/expression.py,sha256=IJ83BWlLO_sptOatTXHET3myq1fSp8OjB8FFH_ciFcA,39097
14
- datajoint/external.py,sha256=Vi74g2Eyp8JGwxPcbsRo_IiW3PggsvyHEIoQjIk_yBw,19122
15
- datajoint/fetch.py,sha256=d6bbxrqK-muif8XNwnrf2u-PX7Diy0dE8pKO9GbJwIw,12989
16
- datajoint/hash.py,sha256=0ZkewC3-reEfm2wic85w6EIokuYq0jvLzrXfoNCXeiM,1192
17
- datajoint/heading.py,sha256=4BUL39e3sdu83IEIyJE8MpuAbZuxlGfltB7Zi6r1NgE,19416
18
- datajoint/jobs.py,sha256=25CPQ4G2JDbQdPp5oSfgTvJz8G6K1R221XB1b-xAZ4k,5629
19
- datajoint/logging.py,sha256=n_dRV3CuTef7wYrVIbhSm9pAKDMWm_XD4oOJ1kMIen4,687
20
- datajoint/plugin.py,sha256=G4VlrCSV64KBjn0XQH-_3UBZUnF4PCatweu7zwhqfpw,1553
21
- datajoint/preview.py,sha256=MXOEJaivLRZe48QtTKK4oNJGeZ5kQP_Srnm4_-s5HRk,4808
22
- datajoint/s3.py,sha256=F9nO86igtNlYevjpjC5NcnohAejMHmlrvIrUI-ayTag,4081
23
- datajoint/schemas.py,sha256=aM-S7w7qOj6bjJQowPb4fOb4xRegeCm7W52jc_tV6Fc,20054
24
- datajoint/settings.py,sha256=WpN-02M0-tjHvAG1JCNoDSP4nJT3Iw7TzC9jwibO73I,9126
25
- datajoint/table.py,sha256=Q65Kmtg3VqW0uH4eQru89rAf4PoEB0kOkYqAaipPypo,44960
26
- datajoint/user_tables.py,sha256=KbPgX45FX-i3ZmymdJZ99Q8K_ScZ-U3ehuZPmRjzBBM,6940
27
- datajoint/utils.py,sha256=QuyVmcRSu1yJb8sYm88l2rLg6HGEpazStyy6GXeWfFM,4515
28
- datajoint/version.py,sha256=wRuI_KwYiCBuiVu-Dh2r2T0Bvw8eUcRW7vsGAym-99A,107
29
- datajoint-0.14.3.dist-info/LICENSE.txt,sha256=7VXUyqv9idbgJNkr3OFy0vfuSqZa3JHmC2TpSavBpss,26444
30
- datajoint-0.14.3.dist-info/METADATA,sha256=epAqlaqqDJ1PvIUAHsHJC2Hq5lddQUZc1FieqGCoTxQ,34106
31
- datajoint-0.14.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
32
- datajoint-0.14.3.dist-info/entry_points.txt,sha256=RLoyQD_Hz9HdlKG-ei0SlHDxuaTpasDJ6gZcvj9iZG0,71
33
- datajoint-0.14.3.dist-info/top_level.txt,sha256=GLPeIbF_-lu3lpcSazCl0KUco1v2rr5uyUvieYAniLU,10
34
- datajoint-0.14.3.dist-info/RECORD,,