ONE-api 3.1.0__tar.gz → 3.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {one_api-3.1.0 → one_api-3.2.0/ONE_api.egg-info}/PKG-INFO +1 -1
  2. {one_api-3.1.0/ONE_api.egg-info → one_api-3.2.0}/PKG-INFO +1 -1
  3. {one_api-3.1.0 → one_api-3.2.0}/one/__init__.py +1 -1
  4. {one_api-3.1.0 → one_api-3.2.0}/one/alf/cache.py +12 -4
  5. {one_api-3.1.0 → one_api-3.2.0}/one/alf/path.py +210 -0
  6. {one_api-3.1.0 → one_api-3.2.0}/one/api.py +15 -8
  7. {one_api-3.1.0 → one_api-3.2.0}/one/webclient.py +1 -1
  8. {one_api-3.1.0 → one_api-3.2.0}/LICENSE +0 -0
  9. {one_api-3.1.0 → one_api-3.2.0}/MANIFEST.in +0 -0
  10. {one_api-3.1.0 → one_api-3.2.0}/ONE_api.egg-info/SOURCES.txt +0 -0
  11. {one_api-3.1.0 → one_api-3.2.0}/ONE_api.egg-info/dependency_links.txt +0 -0
  12. {one_api-3.1.0 → one_api-3.2.0}/ONE_api.egg-info/requires.txt +0 -0
  13. {one_api-3.1.0 → one_api-3.2.0}/ONE_api.egg-info/top_level.txt +0 -0
  14. {one_api-3.1.0 → one_api-3.2.0}/README.md +0 -0
  15. {one_api-3.1.0 → one_api-3.2.0}/one/alf/__init__.py +0 -0
  16. {one_api-3.1.0 → one_api-3.2.0}/one/alf/exceptions.py +0 -0
  17. {one_api-3.1.0 → one_api-3.2.0}/one/alf/io.py +0 -0
  18. {one_api-3.1.0 → one_api-3.2.0}/one/alf/spec.py +0 -0
  19. {one_api-3.1.0 → one_api-3.2.0}/one/converters.py +0 -0
  20. {one_api-3.1.0 → one_api-3.2.0}/one/params.py +0 -0
  21. {one_api-3.1.0 → one_api-3.2.0}/one/registration.py +0 -0
  22. {one_api-3.1.0 → one_api-3.2.0}/one/remote/__init__.py +0 -0
  23. {one_api-3.1.0 → one_api-3.2.0}/one/remote/aws.py +0 -0
  24. {one_api-3.1.0 → one_api-3.2.0}/one/remote/base.py +0 -0
  25. {one_api-3.1.0 → one_api-3.2.0}/one/remote/globus.py +0 -0
  26. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/datasets.pqt +0 -0
  27. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/params/.caches +0 -0
  28. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/params/.test.alyx.internationalbrainlab.org +0 -0
  29. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/1f187d80fd59677b395fcdb18e68e4401bfa1cc9 +0 -0
  30. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/3f51aa2e0baa42438467906f56a457c91a221898 +0 -0
  31. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/47893cf67c985e6361cdee009334963f49fb0746 +0 -0
  32. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/535d0e9a1e2c1efbdeba0d673b131e00361a2edb +0 -0
  33. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/5618bea3484a52cd893616f07903f0e49e023ba1 +0 -0
  34. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/6dc96f7e9bcc6ac2e7581489b9580a6cd3f28293 +0 -0
  35. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/db1731fb8df0208944ae85f76718430813a8bf50 +0 -0
  36. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/dcce48259bb929661f60a02a48563f70aa6185b3 +0 -0
  37. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/rest_responses/f530d6022f61cdc9e38cc66beb3cb71f3003c9a1 +0 -0
  38. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/sessions.pqt +0 -0
  39. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/test_dbs.json +0 -0
  40. {one_api-3.1.0 → one_api-3.2.0}/one/tests/fixtures/test_img.png +0 -0
  41. {one_api-3.1.0 → one_api-3.2.0}/one/util.py +0 -0
  42. {one_api-3.1.0 → one_api-3.2.0}/pyproject.toml +0 -0
  43. {one_api-3.1.0 → one_api-3.2.0}/requirements.txt +0 -0
  44. {one_api-3.1.0 → one_api-3.2.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ONE-api
3
- Version: 3.1.0
3
+ Version: 3.2.0
4
4
  Summary: Open Neurophysiology Environment
5
5
  Author: IBL Staff
6
6
  License: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ONE-api
3
- Version: 3.1.0
3
+ Version: 3.2.0
4
4
  Summary: Open Neurophysiology Environment
5
5
  Author: IBL Staff
6
6
  License: MIT
@@ -1,2 +1,2 @@
1
1
  """The Open Neurophysiology Environment (ONE) API."""
2
- __version__ = '3.1.0'
2
+ __version__ = '3.2.0'
@@ -22,6 +22,7 @@ from functools import partial
22
22
  from pathlib import Path
23
23
  import warnings
24
24
  import logging
25
+ from copy import deepcopy
25
26
 
26
27
  import pandas as pd
27
28
  import numpy as np
@@ -275,6 +276,9 @@ def default_cache(origin=''):
275
276
 
276
277
  """
277
278
  table_meta = _metadata(origin)
279
+ # The origin is now a set, however we leave _metadata as Alyx relies on this and sets
280
+ # can't be serialized to JSON
281
+ table_meta['origin'] = set(filter(None, [origin]))
278
282
  return Bunch({
279
283
  'datasets': EMPTY_DATASETS_FRAME.copy(),
280
284
  'sessions': EMPTY_SESSIONS_FRAME.copy(),
@@ -283,7 +287,7 @@ def default_cache(origin=''):
283
287
  'loaded_time': None,
284
288
  'modified_time': None,
285
289
  'saved_time': None,
286
- 'raw': {k: table_meta.copy() for k in ('datasets', 'sessions')}}
290
+ 'raw': {k: deepcopy(table_meta) for k in ('datasets', 'sessions')}}
287
291
  })
288
292
 
289
293
 
@@ -435,6 +439,10 @@ def load_tables(tables_dir, glob_pattern='*.pqt'):
435
439
  if not cache.index.is_monotonic_increasing:
436
440
  cache.sort_index(inplace=True)
437
441
 
442
+ # Ensure origin is a set (supports multiple origins)
443
+ meta['raw'][table]['origin'] = set(
444
+ filter(None, ensure_list(meta['raw'][table].get('origin', 'unknown'))))
445
+
438
446
  caches[table] = cache
439
447
 
440
448
  created = [datetime.datetime.fromisoformat(x['date_created'])
@@ -532,10 +540,10 @@ def merge_tables(cache, strict=False, origin=None, **kwargs):
532
540
  # Update the table metadata with the origin
533
541
  if origin is not None:
534
542
  table_meta = cache['_meta']['raw'].get(table, {})
535
- if not table_meta.get('origin'):
536
- table_meta['origin'] = origin
543
+ if 'origin' not in table_meta:
544
+ table_meta['origin'] = set(origin)
537
545
  else:
538
- table_meta['origin'] = set((*ensure_list(table_meta['origin']), origin))
546
+ table_meta['origin'].add(origin)
539
547
  cache['_meta']['raw'][table] = table_meta
540
548
  cache['_meta']['modified_time'] = updated
541
549
  return updated
@@ -859,6 +859,36 @@ class PureALFPath(pathlib.PurePath): # py3.12 supports direct subclassing
859
859
  """tuple of str: the full ALF path parts, with empty strings for missing parts."""
860
860
  return tuple(p or '' for p in self.parse_alf_path(as_dict=False))
861
861
 
862
+ @property
863
+ def lab(self):
864
+ """str: The lab part of the ALF path, or an empty str if not present."""
865
+ return self.session_parts[0]
866
+
867
+ @property
868
+ def subject(self):
869
+ """str: The subject part of the ALF path, or an empty str if not present."""
870
+ return self.session_parts[1]
871
+
872
+ @property
873
+ def date(self):
874
+ """str: The date part of the ALF path, or an empty str if not present."""
875
+ return self.session_parts[2]
876
+
877
+ @property
878
+ def sequence(self):
879
+ """str: The number part of the ALF path, or an empty str if not present."""
880
+ return self.session_parts[3]
881
+
882
+ @property
883
+ def collection(self):
884
+ """str: The collection part of the ALF path, or an empty str if not present."""
885
+ return self.alf_parts[4]
886
+
887
+ @property
888
+ def revision(self):
889
+ """str: The revision part of the ALF path, or an empty str if not present."""
890
+ return self.alf_parts[5]
891
+
862
892
  @property
863
893
  def namespace(self):
864
894
  """str: The namespace part of the ALF name, or and empty str if not present."""
@@ -884,6 +914,134 @@ class PureALFPath(pathlib.PurePath): # py3.12 supports direct subclassing
884
914
  """str: The extra part of the ALF name, or and empty str if not present."""
885
915
  return self.dataset_name_parts[4]
886
916
 
917
+ def with_lab(self, lab, strict=False):
918
+ """Return a new path with the ALF lab changed.
919
+
920
+ Parameters
921
+ ----------
922
+ lab : str
923
+ An ALF lab name part to use.
924
+ strict : bool, optional
925
+ If True, the lab part must be present in the path, otherwise the lab/Subjects/ part
926
+ is added if not present.
927
+
928
+ Returns
929
+ -------
930
+ PureALFPath
931
+ The same file path but with the lab part replaced with the input.
932
+
933
+ Raises
934
+ ------
935
+ ValueError
936
+ The lab name is invalid.
937
+ ALFInvalid
938
+ The path is not a valid ALF session path, or the lab part is not present in the path
939
+ when strict is True.
940
+
941
+ """
942
+ if not (lab and spec.regex('^{lab}$').match(lab)):
943
+ raise ValueError(f'Invalid lab name: {lab}')
944
+ if not self.subject or (strict and not self.lab): # FIXME check logic
945
+ raise ALFInvalid(str(self))
946
+
947
+ pattern = spec.regex(SESSION_SPEC)
948
+ repl = fr'{lab}/Subjects/\g<subject>/\g<date>/\g<number>'
949
+ return self.__class__(pattern.sub(repl, self.as_posix(), count=1))
950
+
951
+ def with_subject(self, subject):
952
+ """Return a new path with the ALF subject changed.
953
+
954
+ Parameters
955
+ ----------
956
+ subject : str
957
+ An ALF subject name part to use.
958
+
959
+ Returns
960
+ -------
961
+ PureALFPath
962
+ The same file path but with the subject part replaced with the input.
963
+
964
+ Raises
965
+ ------
966
+ ValueError
967
+ The subject name is invalid.
968
+ ALFInvalid
969
+ The path is not a valid ALF session path.
970
+
971
+ """
972
+ if not (subject and spec.regex('^{subject}$').match(subject)):
973
+ raise ValueError(f'Invalid subject name: {subject}')
974
+ if not self.subject:
975
+ raise ALFInvalid(str(self))
976
+
977
+ pattern = spec.regex('{subject}/{date}/{number}')
978
+ repl = fr'{subject}/\g<date>/\g<number>'
979
+ return self.__class__(pattern.sub(repl, self.as_posix()), count=1)
980
+
981
+ def with_date(self, date):
982
+ """Return a new path with the ALF date changed.
983
+
984
+ Parameters
985
+ ----------
986
+ date : str, datetime.datetime, datetime.date
987
+ An ALF date part to use, in YYYY-MM-DD format.
988
+
989
+ Returns
990
+ -------
991
+ PureALFPath
992
+ The same file path but with the date part replaced with the input.
993
+
994
+ Raises
995
+ ------
996
+ ValueError
997
+ The date is not in YYYY-MM-DD format.
998
+ ALFInvalid
999
+ The path is not a valid ALF session path.
1000
+
1001
+ """
1002
+ if date and not isinstance(date, str):
1003
+ date = str(date)[:10]
1004
+ if not (date and spec.regex('^{date}$').match(date)):
1005
+ raise ValueError(f'Invalid date: {date}')
1006
+ if not self.date:
1007
+ raise ALFInvalid(str(self))
1008
+
1009
+ pattern = spec.regex('{subject}/{date}/{number}')
1010
+ repl = fr'\g<subject>/{date}/\g<number>'
1011
+ return self.__class__(pattern.sub(repl, self.as_posix()), count=1)
1012
+
1013
+ def with_sequence(self, number):
1014
+ """Return a new path with the ALF number changed.
1015
+
1016
+ Parameters
1017
+ ----------
1018
+ number : str, int
1019
+ An ALF number part to use, as a string or integer.
1020
+
1021
+ Returns
1022
+ -------
1023
+ PureALFPath
1024
+ The same file path but with the number part replaced with the input.
1025
+
1026
+ Raises
1027
+ ------
1028
+ ValueError
1029
+ The number is not a valid ALF number.
1030
+ ALFInvalid
1031
+ The path is not a valid ALF session path.
1032
+
1033
+ """
1034
+ if isinstance(number, str):
1035
+ number = int(number.strip())
1036
+ if number is None or not spec.regex('^{number}$').match(str(number)):
1037
+ raise ValueError(f'Invalid number: {number}')
1038
+ if not self.sequence:
1039
+ raise ALFInvalid(str(self))
1040
+
1041
+ pattern = spec.regex('{subject}/{date}/{number}')
1042
+ repl = fr'\g<subject>/\g<date>/{number:03d}'
1043
+ return self.__class__(pattern.sub(repl, self.as_posix()), count=1)
1044
+
887
1045
  def with_object(self, obj):
888
1046
  """Return a new path with the ALF object changed.
889
1047
 
@@ -1084,6 +1242,58 @@ class PureALFPath(pathlib.PurePath): # py3.12 supports direct subclassing
1084
1242
  """
1085
1243
  return padded_sequence(path)
1086
1244
 
1245
+ def with_collection(self, collection):
1246
+ """Return a new path with the ALF collection part added/changed.
1247
+
1248
+ NB: The ALFPath must include the session parts (subject/date/number) for this to work.
1249
+
1250
+ Parameters
1251
+ ----------
1252
+ collection : str
1253
+ An ALF collection part to use.
1254
+
1255
+ Returns
1256
+ -------
1257
+ PureALFPath
1258
+ The same file path but with the collection part added or replaced with the input.
1259
+
1260
+ Raises
1261
+ ------
1262
+ ValueError
1263
+ The collection name is invalid.
1264
+ The path does not include the session parts (subject/date/number).
1265
+ ALFInvalid
1266
+ The path is not a valid ALF session path.
1267
+
1268
+ """
1269
+ collection = pathlib.PurePath(collection or '').as_posix().strip('/')
1270
+ if not (collection and spec.regex('^{collection}$').match(collection)):
1271
+ raise ValueError(f'Invalid collection name: {collection}')
1272
+ # Check path contains session parts
1273
+ if not self.session_path():
1274
+ raise ValueError(
1275
+ 'Cannot add collection to a path without session parts, e.g. subject/date/number'
1276
+ )
1277
+ # If the path is a session path, simply append the collection to it
1278
+ if self.is_session_path():
1279
+ return self.joinpath(collection)
1280
+ # Otherwise substitute the collection with regex
1281
+ string = self.as_posix()
1282
+ if not self.is_dataset():
1283
+ cpat = spec.regex(f'^{COLLECTION_SPEC}$')
1284
+ if cpat.match(self.relative_to_session().as_posix() + '/'):
1285
+ string += '/' # ensure trailing slash for matching folder paths
1286
+ else:
1287
+ raise ALFInvalid(str(self))
1288
+ # Replace the collection part in the path
1289
+ # NB: We don't use SESSION_SPEC here to avoid handling optional lab part
1290
+ pattern = spec.regex('{subject}/{date}/{number}/' + COLLECTION_SPEC)
1291
+ match = pattern.search(string)
1292
+ repl = fr'\g<subject>/\g<date>/\g<number>/{collection}/'
1293
+ if match.groupdict()['revision']:
1294
+ repl += r'#\g<revision>#/'
1295
+ return self.__class__(pattern.sub(repl, string), count=1)
1296
+
1087
1297
  def with_revision(self, revision):
1088
1298
  """Return a new path with the ALF revision part added/changed.
1089
1299
 
@@ -6,7 +6,6 @@ import logging
6
6
  from weakref import WeakMethod
7
7
  from datetime import datetime, timedelta
8
8
  from functools import lru_cache, partial
9
- from itertools import chain
10
9
  from inspect import unwrap
11
10
  from pathlib import Path, PurePosixPath
12
11
  from typing import Any, Union, Optional, List
@@ -40,7 +39,7 @@ from one import util
40
39
 
41
40
  _logger = logging.getLogger(__name__)
42
41
  __all__ = ['ONE', 'One', 'OneAlyx']
43
- SAVE_ON_DELETE = (os.environ.get('ONE_SAVE_ON_DELETE') or '1').casefold() in ('true', '1')
42
+ SAVE_ON_DELETE = (os.environ.get('ONE_SAVE_ON_DELETE') or '0').casefold() in ('true', '1')
44
43
  """bool: Whether to save modified cache tables on delete."""
45
44
 
46
45
  _logger.debug('ONE_SAVE_ON_DELETE: %s', SAVE_ON_DELETE)
@@ -135,20 +134,25 @@ class One(ConversionMixin):
135
134
  tables = tables or filter(lambda x: x[0] != '_', self._cache)
136
135
  return remove_table_files(self._tables_dir, tables)
137
136
 
138
- def load_cache(self, tables_dir=None, **kwargs):
137
+ def load_cache(self, tables_dir=None, clobber=True, **kwargs):
139
138
  """Load parquet cache files from a local directory.
140
139
 
141
140
  Parameters
142
141
  ----------
143
142
  tables_dir : str, pathlib.Path
144
143
  An optional directory location of the parquet files, defaults to One._tables_dir.
144
+ clobber : bool
145
+ If true, the cache is loaded without merging with existing table files.
145
146
 
146
147
  Returns
147
148
  -------
148
149
  datetime.datetime
149
150
  A timestamp of when the cache was loaded.
150
151
  """
151
- self._reset_cache()
152
+ if clobber:
153
+ self._reset_cache()
154
+ else:
155
+ raise NotImplementedError('clobber=False not implemented yet')
152
156
  self._tables_dir = Path(tables_dir or self._tables_dir or self.cache_dir)
153
157
  self._cache = load_tables(self._tables_dir)
154
158
 
@@ -161,7 +165,7 @@ class One(ConversionMixin):
161
165
  # prompt the user to delete them to improve load times
162
166
  raw_meta = self._cache['_meta'].get('raw', {}).values() or [{}]
163
167
  tagged = any(filter(None, flatten(x.get('database_tags') for x in raw_meta)))
164
- origin = set(x['origin'] for x in raw_meta if 'origin' in x)
168
+ origin = set(filter(None, flatten(ensure_list(x.get('origin', [])) for x in raw_meta)))
165
169
  older = (self._cache['_meta']['created_time'] or datetime.now()) < datetime(2025, 2, 13)
166
170
  remote = not self.offline and self.mode == 'remote'
167
171
  if remote and origin == {'alyx'} and older and not self._web_client.silent and not tagged:
@@ -217,10 +221,12 @@ class One(ConversionMixin):
217
221
  created = created.isoformat(sep=' ', timespec='minutes')
218
222
  meta['raw'][table]['date_created'] = created
219
223
 
220
- with FileLock(save_dir, log=_logger, timeout=TIMEOUT, timeout_action='delete'):
224
+ with FileLock(save_dir / '.ONE', log=_logger, timeout=TIMEOUT, timeout_action='delete'):
221
225
  _logger.info('Saving cache tables...')
222
226
  for table in filter(lambda x: not x[0] == '_', caches.keys()):
223
227
  metadata = meta['raw'].get(table, {})
228
+ if isinstance(metadata.get('origin'), set):
229
+ metadata['origin'] = list(metadata['origin'])
224
230
  metadata['date_modified'] = modified.isoformat(sep=' ', timespec='minutes')
225
231
  filename = save_dir.joinpath(f'{table}.pqt')
226
232
  # Cast indices to str before saving
@@ -1696,7 +1702,8 @@ class OneAlyx(One):
1696
1702
  tag = tag or current_tags[0] # For refreshes take the current tag as default
1697
1703
  different_tag = any(x != tag for x in current_tags)
1698
1704
  if not (clobber or different_tag):
1699
- super(OneAlyx, self).load_cache(tables_dir) # Load any present cache
1705
+ # Load any present cache
1706
+ super(OneAlyx, self).load_cache(tables_dir, clobber=True)
1700
1707
  cache_meta = self._cache.get('_meta', {})
1701
1708
  raw_meta = cache_meta.get('raw', {}).values() or [{}]
1702
1709
 
@@ -1719,7 +1726,7 @@ class OneAlyx(One):
1719
1726
  # contain the same tag or origin, we need to download the remote one.
1720
1727
  origin = cache_info.get('origin', 'unknown')
1721
1728
  local_origin = (x.get('origin', []) for x in raw_meta)
1722
- local_origin = set(chain.from_iterable(map(ensure_list, local_origin)))
1729
+ local_origin = set(flatten(map(ensure_list, local_origin)))
1723
1730
  different_origin = origin not in local_origin
1724
1731
  if fresh and not (different_tag or different_origin):
1725
1732
  _logger.info('No newer cache available')
@@ -77,7 +77,7 @@ class _JSONEncoder(json.JSONEncoder):
77
77
 
78
78
 
79
79
  def _cache_response(method):
80
- """Decorator for the generic request method for caching REST reponses.
80
+ """Decorator for the generic request method for caching REST responses.
81
81
 
82
82
  Caches the result of the query and on subsequent calls, returns cache instead of hitting the
83
83
  database.
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes