sl-shared-assets 1.0.0rc16__tar.gz → 1.0.0rc18__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (32) hide show
  1. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/PKG-INFO +3 -1
  2. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/pyproject.toml +4 -2
  3. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/data_classes/session_data.py +88 -23
  4. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/.gitignore +0 -0
  5. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/LICENSE +0 -0
  6. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/README.md +0 -0
  7. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/Makefile +0 -0
  8. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/make.bat +0 -0
  9. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/source/api.rst +0 -0
  10. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/source/conf.py +0 -0
  11. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/source/index.rst +0 -0
  12. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/docs/source/welcome.rst +0 -0
  13. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/envs/slsa_dev_lin.yml +0 -0
  14. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/envs/slsa_dev_lin_spec.txt +0 -0
  15. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/__init__.py +0 -0
  16. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/cli.py +0 -0
  17. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/data_classes/__init__.py +0 -0
  18. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/data_classes/configuration_data.py +0 -0
  19. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/data_classes/runtime_data.py +0 -0
  20. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/data_classes/surgery_data.py +0 -0
  21. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/py.typed +0 -0
  22. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/server/__init__.py +0 -0
  23. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/server/job.py +0 -0
  24. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/server/server.py +0 -0
  25. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/suite2p/__init__.py +0 -0
  26. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/suite2p/multi_day.py +0 -0
  27. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/suite2p/single_day.py +0 -0
  28. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/tools/__init__.py +0 -0
  29. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/tools/ascension_tools.py +0 -0
  30. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/tools/packaging_tools.py +0 -0
  31. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/src/sl_shared_assets/tools/transfer_tools.py +0 -0
  32. {sl_shared_assets-1.0.0rc16 → sl_shared_assets-1.0.0rc18}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc16
3
+ Version: 1.0.0rc18
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -695,6 +695,7 @@ Requires-Dist: ataraxis-base-utilities<4,>=3
695
695
  Requires-Dist: ataraxis-data-structures<4,>=3.1.1
696
696
  Requires-Dist: ataraxis-time<4,>=3
697
697
  Requires-Dist: click<9,>=8
698
+ Requires-Dist: dacite<2,>=1
698
699
  Requires-Dist: paramiko<4,>=3.5.1
699
700
  Requires-Dist: simple-slurm<1,>=0
700
701
  Requires-Dist: tqdm<5,>=4
@@ -716,6 +717,7 @@ Requires-Dist: types-tqdm<5,>=4; extra == 'conda'
716
717
  Provides-Extra: condarun
717
718
  Requires-Dist: appdirs<2,>=1; extra == 'condarun'
718
719
  Requires-Dist: click<9,>=8; extra == 'condarun'
720
+ Requires-Dist: dacite<2,>=1; extra == 'condarun'
719
721
  Requires-Dist: paramiko<4,>=3.5.1; extra == 'condarun'
720
722
  Requires-Dist: tqdm<5,>=4; extra == 'condarun'
721
723
  Provides-Extra: dev
@@ -8,7 +8,7 @@ build-backend = "hatchling.build"
8
8
  # Project metdata section. Provides the genral ID information about the project.
9
9
  [project]
10
10
  name = "sl-shared-assets"
11
- version = "1.0.0rc16"
11
+ version = "1.0.0rc18"
12
12
  description = "Stores assets shared between multiple Sun (NeuroAI) lab data pipelines."
13
13
  readme = "README.md"
14
14
  license = { file = "LICENSE" }
@@ -50,6 +50,7 @@ dependencies = [
50
50
  "paramiko>=3.5.1, <4",
51
51
  "simple-slurm>=0,<1",
52
52
  "click>=8,<9",
53
+ "dacite>=1,<2",
53
54
  ]
54
55
 
55
56
  [project.urls]
@@ -68,6 +69,7 @@ condarun = [
68
69
  "tqdm>=4,<5",
69
70
  "paramiko>=3.5.1,<4",
70
71
  "click>=8,<9",
72
+ "dacite>=1,<2",
71
73
  ]
72
74
 
73
75
  # Dependencies known to be installable with conda for all development platforms (OSX ARM64, WIN AMD64, LIN AMD64).
@@ -93,7 +95,7 @@ conda = [
93
95
  # Types:
94
96
  "types-appdirs>=1,<2",
95
97
  "types-tqdm>=4,<5",
96
- "types-paramiko>=3,<4"
98
+ "types-paramiko>=3,<4",
97
99
  ]
98
100
 
99
101
  # Dependencies known to not be installable with conda for at least one supported development platform
@@ -15,6 +15,7 @@ import appdirs
15
15
  from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
16
16
  from ataraxis_data_structures import YamlConfig
17
17
  from ataraxis_time.time_helpers import get_timestamp
18
+ import dacite
18
19
 
19
20
  from .configuration_data import ExperimentConfiguration
20
21
 
@@ -1111,6 +1112,7 @@ class SessionData(YamlConfig):
1111
1112
  cls,
1112
1113
  session_path: Path,
1113
1114
  on_server: bool,
1115
+ make_directories: bool = True,
1114
1116
  ) -> "SessionData":
1115
1117
  """Loads the SessionData instance from the target session's session_data.yaml file.
1116
1118
 
@@ -1131,6 +1133,10 @@ class SessionData(YamlConfig):
1131
1133
  a non-server machine. Note, non-server runtimes use the same 'root' directory to store raw_data and
1132
1134
  processed_data subfolders. BioHPC server runtimes use different volumes (drives) to store these
1133
1135
  subfolders.
1136
+ make_directories: Determines whether to attempt creating any missing directories. Generally, this option
1137
+ is safe to be True for all destinations other than some specific BioHPC server runtimes, where some
1138
+ data is 'owned' by a general lab account and not the user account. These cases are only present for the
1139
+ sl-forgery library and are resolved by that library.
1134
1140
 
1135
1141
  Returns:
1136
1142
  An initialized SessionData instance for the session whose data is stored at the provided path.
@@ -1151,7 +1157,7 @@ class SessionData(YamlConfig):
1151
1157
  console.error(message=message, error=FileNotFoundError)
1152
1158
 
1153
1159
  # Loads class data from .yaml file
1154
- instance: SessionData = cls.from_yaml(file_path=session_data_path) # type: ignore
1160
+ instance: SessionData = cls._safe_load(path=session_data_path)
1155
1161
 
1156
1162
  # The method assumes that the 'donor' .yaml file is always stored inside the raw_data directory of the session
1157
1163
  # to be processed. Since the directory itself might have moved (between or even within the same PC) relative to
@@ -1162,7 +1168,6 @@ class SessionData(YamlConfig):
1162
1168
  # RAW DATA
1163
1169
  new_root = local_root.joinpath(instance.project_name, instance.animal_id, instance.session_name, "raw_data")
1164
1170
  instance.raw_data.resolve_paths(root_directory_path=new_root)
1165
- instance.raw_data.make_directories()
1166
1171
 
1167
1172
  # Uses the adjusted raw_data section to load the ProjectConfiguration instance. This is used below to resolve
1168
1173
  # all other SessionData sections, as it stores various required root directories.
@@ -1179,12 +1184,10 @@ class SessionData(YamlConfig):
1179
1184
  root_directory_path=new_root,
1180
1185
  experiment_name=instance.experiment_name,
1181
1186
  )
1182
- instance.configuration_data.make_directories()
1183
1187
 
1184
1188
  # DEEPLABCUT
1185
1189
  new_root = local_root.joinpath(instance.project_name, "deeplabcut")
1186
1190
  instance.deeplabcut_data.resolve_paths(root_directory_path=new_root)
1187
- instance.deeplabcut_data.make_directories()
1188
1191
 
1189
1192
  # Resolves the roots for all VRPC-specific sections that use the data from the ProjectConfiguration instance:
1190
1193
 
@@ -1242,25 +1245,26 @@ class SessionData(YamlConfig):
1242
1245
  instance.project_name, instance.animal_id, instance.session_name, "processed_data"
1243
1246
  )
1244
1247
  )
1245
- instance.processed_data.make_directories()
1246
-
1247
- # Ensures that project configuration and session data classes are present in both raw_data and processed_data
1248
- # directories. This ensures that all data of the session can always be traced to the parent project, animal,
1249
- # and session.
1250
- sh.copy2(
1251
- src=instance.raw_data.session_data_path,
1252
- dst=instance.processed_data.session_data_path,
1253
- )
1254
- sh.copy2(
1255
- src=instance.raw_data.project_configuration_path,
1256
- dst=instance.processed_data.project_configuration_path,
1257
- )
1258
1248
 
1259
1249
  # Generates data directory hierarchies that may be missing on the local machine
1260
- instance.raw_data.make_directories()
1261
- instance.configuration_data.make_directories()
1262
- instance.deeplabcut_data.make_directories()
1263
- instance.processed_data.make_directories()
1250
+ if make_directories:
1251
+ instance.raw_data.make_directories()
1252
+ instance.configuration_data.make_directories()
1253
+ instance.deeplabcut_data.make_directories()
1254
+ instance.processed_data.make_directories()
1255
+ instance.processed_data.make_directories()
1256
+
1257
+ # Ensures that project configuration and session data classes are present in both raw_data and
1258
+ # processed_data directories. This ensures that all data of the session can always be traced to the parent
1259
+ # project, animal, and session.
1260
+ sh.copy2(
1261
+ src=instance.raw_data.session_data_path,
1262
+ dst=instance.processed_data.session_data_path,
1263
+ )
1264
+ sh.copy2(
1265
+ src=instance.raw_data.project_configuration_path,
1266
+ dst=instance.processed_data.project_configuration_path,
1267
+ )
1264
1268
 
1265
1269
  # Returns the initialized SessionData instance to caller
1266
1270
  return instance
@@ -1274,6 +1278,67 @@ class SessionData(YamlConfig):
1274
1278
  create() method runtime.
1275
1279
  """
1276
1280
 
1281
+ # Generates a copy of the original class to avoid modifying the instance that will be used for further
1282
+ # processing
1283
+ origin = copy.deepcopy(self)
1284
+
1285
+ # Resets all path fields to null. These fields are not loaded from disk when the instance is loaded, so setting
1286
+ # them to null has no negative consequences. Conversely, keeping these fields with Path objects prevents the
1287
+ # SessionData instance from being loaded from disk.
1288
+ origin.raw_data = None # type: ignore
1289
+ origin.processed_data = None # type: ignore
1290
+ origin.configuration_data = None # type: ignore
1291
+ origin.deeplabcut_data = None # type: ignore
1292
+ origin.vrpc_persistent_data = None # type: ignore
1293
+ origin.scanimagepc_persistent_data = None # type: ignore
1294
+ origin.mesoscope_data = None # type: ignore
1295
+ origin.destinations = None # type: ignore
1296
+
1277
1297
  # Saves instance data as a .YAML file
1278
- self.to_yaml(file_path=self.raw_data.session_data_path)
1279
- self.to_yaml(file_path=self.processed_data.session_data_path)
1298
+ origin.to_yaml(file_path=self.raw_data.session_data_path)
1299
+ origin.to_yaml(file_path=self.processed_data.session_data_path)
1300
+
1301
+ @classmethod
1302
+ def _safe_load(cls, path: Path) -> "SessionData":
1303
+ """Loads a SessionData class instance into memory in a way that avoids collisions with outdated SessionData
1304
+ formats.
1305
+
1306
+ This method is used instead of the default method inherited from the YamlConfig class. Primarily, this is used
1307
+ to avoid errors with old SessionData class formats that contain some data that is either no longer present or
1308
+ cannot be loaded from YAML. Using this custom method ensures we can load any SessionData class, provided it
1309
+ contains the required header fields.
1310
+
1311
+ Returns:
1312
+ The SessionData instance initialized using the resolved header data.
1313
+ """
1314
+
1315
+ # Reads the file content without using the YAML parsing methods.
1316
+ with open(path, 'r') as f:
1317
+ content = f.read()
1318
+
1319
+ # Extracts the necessary fields using regex
1320
+ fields_to_keep = {}
1321
+
1322
+ # Defines the field patterns for each field to extract
1323
+ patterns = {
1324
+ "project_name": r'project_name:\s*(.+?)(?=\n\w|\n$)',
1325
+ "animal_id": r'animal_id:\s*(.+?)(?=\n\w|\n$)',
1326
+ "session_name": r'session_name:\s*(.+?)(?=\n\w|\n$)',
1327
+ "session_type": r'session_type:\s*(.+?)(?=\n\w|\n$)',
1328
+ "experiment_name": r'experiment_name:\s*(.+?)(?=\n\w|\n$)'
1329
+ }
1330
+
1331
+ # Extracts each field
1332
+ for key, pattern in patterns.items():
1333
+ match = re.search(pattern, content)
1334
+ if match:
1335
+ fields_to_keep[key] = match.group(1).strip()
1336
+ else:
1337
+ if key == "experiment_name":
1338
+ fields_to_keep[key] = "null" # Default for experiment_name
1339
+ else:
1340
+ fields_to_keep[key] = "" # Default for other fields
1341
+
1342
+ # Returns the data to caller
1343
+ return dacite.from_dict(data_class=cls, data=fields_to_keep) # type: ignore
1344
+