sl-shared-assets 5.0.0__tar.gz → 5.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (36) hide show
  1. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/PKG-INFO +2 -2
  2. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/README.md +1 -1
  3. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/pyproject.toml +1 -1
  4. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/__init__.py +4 -0
  5. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/server/pipeline.py +22 -7
  6. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/.gitignore +0 -0
  7. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/LICENSE +0 -0
  8. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/Makefile +0 -0
  9. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/make.bat +0 -0
  10. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/source/api.rst +0 -0
  11. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/source/conf.py +0 -0
  12. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/source/index.rst +0 -0
  13. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/docs/source/welcome.rst +0 -0
  14. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_lin.yml +0 -0
  15. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_lin_spec.txt +0 -0
  16. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_osx.yml +0 -0
  17. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_osx_spec.txt +0 -0
  18. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_win.yml +0 -0
  19. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/envs/slsa_dev_win_spec.txt +0 -0
  20. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/command_line_interfaces/__init__.py +0 -0
  21. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/command_line_interfaces/configure.py +0 -0
  22. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/command_line_interfaces/manage.py +0 -0
  23. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/data_classes/__init__.py +0 -0
  24. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/data_classes/configuration_data.py +0 -0
  25. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/data_classes/runtime_data.py +0 -0
  26. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/data_classes/session_data.py +0 -0
  27. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/data_classes/surgery_data.py +0 -0
  28. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/py.typed +0 -0
  29. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/server/__init__.py +0 -0
  30. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/server/job.py +0 -0
  31. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/server/server.py +0 -0
  32. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/tools/__init__.py +0 -0
  33. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/tools/packaging_tools.py +0 -0
  34. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/tools/project_management_tools.py +0 -0
  35. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/src/sl_shared_assets/tools/transfer_tools.py +0 -0
  36. {sl_shared_assets-5.0.0 → sl_shared_assets-5.0.1}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 5.0.0
3
+ Version: 5.0.1
4
4
  Summary: Provides data acquisition and processing assets shared between Sun (NeuroAI) lab libraries.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -755,7 +755,7 @@ manage the data at all stages of Sun lab data workflow: acquisition, processing,
755
755
  - [Authors](#authors)
756
756
  - [License](#license)
757
757
  - [Acknowledgements](#Acknowledgments)
758
- -
758
+
759
759
  ___
760
760
 
761
761
  ## Dependencies
@@ -34,7 +34,7 @@ manage the data at all stages of Sun lab data workflow: acquisition, processing,
34
34
  - [Authors](#authors)
35
35
  - [License](#license)
36
36
  - [Acknowledgements](#Acknowledgments)
37
- -
37
+
38
38
  ___
39
39
 
40
40
  ## Dependencies
@@ -6,7 +6,7 @@ build-backend = "hatchling.build"
6
6
  # Project metdata section. Provides the genral ID information about the project.
7
7
  [project]
8
8
  name = "sl-shared-assets"
9
- version = "5.0.0"
9
+ version = "5.0.1"
10
10
  description = "Provides data acquisition and processing assets shared between Sun (NeuroAI) lab libraries."
11
11
  readme = "README.md"
12
12
  license = { file = "LICENSE" }
@@ -11,6 +11,7 @@ from .tools import (
11
11
  ProjectManifest,
12
12
  delete_directory,
13
13
  transfer_directory,
14
+ generate_project_manifest,
14
15
  calculate_directory_checksum,
15
16
  )
16
17
  from .server import (
@@ -30,6 +31,7 @@ from .data_classes import (
30
31
  DrugData,
31
32
  ImplantData,
32
33
  SessionData,
34
+ SessionLock,
33
35
  SubjectData,
34
36
  SurgeryData,
35
37
  SessionTypes,
@@ -89,6 +91,7 @@ __all__ = [
89
91
  "Server",
90
92
  "ServerCredentials",
91
93
  "SessionData",
94
+ "SessionLock",
92
95
  "SessionTypes",
93
96
  "SubjectData",
94
97
  "SurgeryData",
@@ -98,6 +101,7 @@ __all__ = [
98
101
  "calculate_directory_checksum",
99
102
  "delete_directory",
100
103
  "generate_manager_id",
104
+ "generate_project_manifest",
101
105
  "get_credentials_file_path",
102
106
  "get_system_configuration_data",
103
107
  "get_working_directory",
@@ -169,6 +169,12 @@ class ProcessingTracker(YamlConfig):
169
169
  _lock_path: str = field(init=False)
170
170
  """Stores the path to the .lock file used to ensure that only a single process can simultaneously access the data
171
171
  stored inside the tracker file."""
172
+ _job_count: int = 1
173
+ """Stores the total number of jobs to be executed as part of the tracked pipeline. This is used to
174
+ determine when the tracked pipeline is fully complete when tracking intermediate job outcomes."""
175
+ _completed_jobs: int = 0
176
+ """Stores the total number of jobs completed by the tracked pipeline. This is used together with the '_job_count'
177
+ field to determine when the tracked pipeline is fully complete."""
172
178
 
173
179
  def __post_init__(self) -> None:
174
180
  # Generates the .lock file path for the target tracker .yaml file.
@@ -210,7 +216,7 @@ class ProcessingTracker(YamlConfig):
210
216
  original._lock_path = None # type: ignore
211
217
  original.to_yaml(file_path=self.file_path)
212
218
 
213
- def start(self, manager_id: int) -> None:
219
+ def start(self, manager_id: int, job_count: int = 1) -> None:
214
220
  """Configures the tracker file to indicate that a manager process is currently executing the tracked processing
215
221
  runtime.
216
222
 
@@ -222,6 +228,10 @@ class ProcessingTracker(YamlConfig):
222
228
  Args:
223
229
  manager_id: The unique xxHash-64 hash identifier of the manager process which attempts to start the runtime
224
230
  tracked by this tracker file.
231
+ job_count: The total number of jobs to be executed as part of the tracked pipeline. This is used to make
232
+ the stop() method properly track the end of the pipeline as a whole, rather than the end of intermediate
233
+ jobs. Primarily, this is used by multi-job pipelines where all jobs are submitted as part of a single
234
+ phase and the job completion order cannot be known in-advance.
225
235
 
226
236
  Raises:
227
237
  TimeoutError: If the .lock file for the target .YAML file cannot be acquired within the timeout period.
@@ -253,6 +263,7 @@ class ProcessingTracker(YamlConfig):
253
263
  self._manager_id = manager_id
254
264
  self._complete = False
255
265
  self._encountered_error = False
266
+ self._job_count = job_count
256
267
  self._save_state()
257
268
 
258
269
  def error(self, manager_id: int) -> None:
@@ -331,12 +342,16 @@ class ProcessingTracker(YamlConfig):
331
342
  console.error(message=message, error=RuntimeError)
332
343
  raise RuntimeError(message) # Fallback to appease mypy, should not be reachable
333
344
 
334
- # Otherwise, marks the runtime as complete (stopped)
335
- self._running = False
336
- self._manager_id = -1
337
- self._complete = True
338
- self._encountered_error = False
339
- self._save_state()
345
+ # Increments completed job tracker
346
+ self._completed_jobs += 1
347
+
348
+ # If the pipeline has completed all required jobs, marks the runtime as complete (stopped)
349
+ if self._completed_jobs >= self._job_count:
350
+ self._running = False
351
+ self._manager_id = -1
352
+ self._complete = True
353
+ self._encountered_error = False
354
+ self._save_state()
340
355
 
341
356
  def abort(self) -> None:
342
357
  """Resets the runtime tracker file to the default state.