sl-shared-assets 1.0.2__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -10,6 +10,7 @@ from .server import generate_server_credentials
10
10
  from .data_classes import (
11
11
  SessionData,
12
12
  ExperimentState,
13
+ ProcessingTracker,
13
14
  ProjectConfiguration,
14
15
  MesoscopeSystemConfiguration,
15
16
  MesoscopeExperimentConfiguration,
@@ -65,6 +66,7 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
65
66
  processed session. This use case is fully automated and should not be triggered manually by the user.
66
67
  """
67
68
  session = Path(session_path)
69
+ session_data = SessionData.load(session_path=session)
68
70
 
69
71
  # Runs the verification process
70
72
  verify_session_checksum(
@@ -72,10 +74,12 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
72
74
  )
73
75
 
74
76
  # Checks the outcome of the verification process
75
- session_data = SessionData.load(session_path=session)
76
- if session_data.raw_data.verified_bin_path.exists():
77
+ tracker = ProcessingTracker(file_path=session_data.raw_data.integrity_verification_tracker_path)
78
+ if tracker.is_complete:
79
+ # noinspection PyTypeChecker
77
80
  console.echo(message=f"Session {session.stem} raw data integrity: Verified.", level=LogLevel.SUCCESS)
78
81
  else:
82
+ # noinspection PyTypeChecker
79
83
  console.echo(message=f"Session {session.stem} raw data integrity: Compromised!", level=LogLevel.ERROR)
80
84
 
81
85
 
@@ -120,6 +124,7 @@ def generate_project_manifest_file(
120
124
  output_directory=Path(output_directory),
121
125
  processed_project_directory=Path(project_processed_path) if project_processed_path else None,
122
126
  )
127
+ # noinspection PyTypeChecker
123
128
  console.echo(message=f"Project {Path(project_path).stem} data manifest file: generated.", level=LogLevel.SUCCESS)
124
129
 
125
130
 
@@ -177,6 +182,7 @@ def generate_system_configuration_file(output_directory: str, acquisition_system
177
182
  f"Mesoscope-VR system configuration file: generated. Edit the configuration parameters stored inside the "
178
183
  f"{file_name} file to match the state of the acquisition system and use context."
179
184
  )
185
+ # noinspection PyTypeChecker
180
186
  console.echo(message=message, level=LogLevel.SUCCESS)
181
187
 
182
188
  # For unsupported system types, raises an error message
@@ -234,6 +240,7 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
234
240
  f"Server access credentials file: generated. If necessary, remember to edit the data acquisition system "
235
241
  f"configuration file to include the path to the credentials file generated via this CLI."
236
242
  )
243
+ # noinspection PyTypeChecker
237
244
  console.echo(message=message, level=LogLevel.SUCCESS)
238
245
 
239
246
 
@@ -284,6 +291,7 @@ def generate_project_configuration_file(project: str, surgery_log_id: str, water
284
291
  project_name=project, surgery_sheet_id=surgery_log_id, water_log_sheet_id=water_restriction_log_id
285
292
  )
286
293
  configuration.save(path=file_path.joinpath())
294
+ # noinspection PyTypeChecker
287
295
  console.echo(message=f"Project {project} data structure and configuration file: generated.", level=LogLevel.SUCCESS)
288
296
 
289
297
 
@@ -360,6 +368,7 @@ def generate_experiment_configuration_file(project: str, experiment: str, state_
360
368
  raise ValueError(message) # Fall-back to appease mypy, should not be reachable
361
369
 
362
370
  experiment_configuration.to_yaml(file_path=file_path)
371
+ # noinspection PyTypeChecker
363
372
  console.echo(message=f"Experiment {experiment} configuration file: generated.", level=LogLevel.SUCCESS)
364
373
 
365
374
 
sl_shared_assets/cli.pyi CHANGED
@@ -9,6 +9,7 @@ from .server import generate_server_credentials as generate_server_credentials
9
9
  from .data_classes import (
10
10
  SessionData as SessionData,
11
11
  ExperimentState as ExperimentState,
12
+ ProcessingTracker as ProcessingTracker,
12
13
  ProjectConfiguration as ProjectConfiguration,
13
14
  MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
14
15
  MesoscopeExperimentConfiguration as MesoscopeExperimentConfiguration,
@@ -15,6 +15,7 @@ from .session_data import (
15
15
  RawData,
16
16
  SessionData,
17
17
  ProcessedData,
18
+ ProcessingTracker,
18
19
  ProjectConfiguration,
19
20
  )
20
21
  from .surgery_data import (
@@ -63,4 +64,5 @@ __all__ = [
63
64
  "MesoscopeCameras",
64
65
  "MesoscopeMicroControllers",
65
66
  "MesoscopeAdditionalFirmware",
67
+ "ProcessingTracker",
66
68
  ]
@@ -10,6 +10,7 @@ from .session_data import (
10
10
  RawData as RawData,
11
11
  SessionData as SessionData,
12
12
  ProcessedData as ProcessedData,
13
+ ProcessingTracker as ProcessingTracker,
13
14
  ProjectConfiguration as ProjectConfiguration,
14
15
  )
15
16
  from .surgery_data import (
@@ -58,4 +59,5 @@ __all__ = [
58
59
  "MesoscopeCameras",
59
60
  "MesoscopeMicroControllers",
60
61
  "MesoscopeAdditionalFirmware",
62
+ "ProcessingTracker",
61
63
  ]
@@ -11,6 +11,7 @@ import shutil as sh
11
11
  from pathlib import Path
12
12
  from dataclasses import field, dataclass
13
13
 
14
+ from filelock import Timeout, FileLock
14
15
  from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
15
16
  from ataraxis_data_structures import YamlConfig
16
17
  from ataraxis_time.time_helpers import get_timestamp
@@ -217,7 +218,7 @@ class RawData:
217
218
  the long-term storage destinations (NAS and Server) and the integrity of the moved data is verified on at least one
218
219
  destination. During 'purge' sl-experiment runtimes, the library discovers and removes all session data marked with
219
220
  'ubiquitin.bin' files from the machine that runs the code."""
220
- verified_bin_path: Path = Path()
221
+ integrity_verification_tracker_path: Path = Path()
221
222
  """Stores the path to the verified.bin file. This marker file is created (or removed) by the sl-shared-assets
222
223
  'verify-session' CLI command to indicate whether the session data inside the folder marked by the file has been
223
224
  verified for integrity. Primarily, this is used when the data is moved to the long-term storage destination (BioHPC
@@ -253,7 +254,7 @@ class RawData:
253
254
  self.system_configuration_path = self.raw_data_path.joinpath("system_configuration.yaml")
254
255
  self.telomere_path = self.raw_data_path.joinpath("telomere.bin")
255
256
  self.ubiquitin_path = self.raw_data_path.joinpath("ubiquitin.bin")
256
- self.verified_bin_path = self.raw_data_path.joinpath("verified.bin")
257
+ self.integrity_verification_tracker_path = self.raw_data_path.joinpath("integrity_verification_tracker.yaml")
257
258
 
258
259
  def make_directories(self) -> None:
259
260
  """Ensures that all major subdirectories and the root directory exist, creating any missing directories."""
@@ -290,20 +291,20 @@ class ProcessedData:
290
291
  server-side data processing pipeline runtimes. This directory is primarily used when running data processing jobs
291
292
  on the remote server. However, it is possible to configure local runtimes to also redirect log data to files
292
293
  stored in this directory (by editing ataraxis-base-utilities 'console' variable)."""
293
- single_day_suite2p_bin_path: Path = Path()
294
+ suite2p_processing_tracker_path: Path = Path()
294
295
  """Stores the path to the single_day_suite2p.bin file. This file is created by our single-day suite2p data
295
296
  processing pipeline to mark sessions that have been successfully processed with the single-day sl-suite2p library
296
297
  pipeline. Note, the file is removed at the beginning of the suite2p pipeline, so its presence always indicates
297
298
  successful processing runtime completion."""
298
- multi_day_suite2p_bin_path: Path = Path()
299
+ dataset_formation_tracker_path: Path = Path()
299
300
  """Same as single_day_suite2p_bin_path, but tracks whether the session has been successfully processed with the
300
301
  multi-day suite2p pipeline."""
301
- behavior_bin_path: Path = Path()
302
+ behavior_processing_tracker_path: Path = Path()
302
303
  """Stores the path to the behavior.bin file. This file is created by our behavior data extraction pipeline
303
304
  to mark sessions that have been successfully processed with the sl-behavior library pipeline. Note, the
304
305
  file is removed at the beginning of the behavior data extraction pipeline, so its presence always indicates
305
306
  successful processing runtime completion."""
306
- dlc_bin_path: Path = Path()
307
+ video_processing_tracker_path: Path = Path()
307
308
  """Stores the path to the dlc.bin file. This file is created by our DeepLabCut-based pose tracking pipeline
308
309
  to mark sessions that have been successfully processed with the sl-dlc library pipeline. Note, the
309
310
  file is removed at the beginning of the DeepLabCut pipeline, so its presence always indicates successful processing
@@ -326,10 +327,10 @@ class ProcessedData:
326
327
  self.mesoscope_data_path = self.processed_data_path.joinpath("mesoscope_data")
327
328
  self.behavior_data_path = self.processed_data_path.joinpath("behavior_data")
328
329
  self.job_logs_path = self.processed_data_path.joinpath("job_logs")
329
- self.single_day_suite2p_bin_path = self.processed_data_path.joinpath("single_day_suite2p.bin")
330
- self.multi_day_suite2p_bin_path = self.processed_data_path.joinpath("multi_day_suite2p.bin")
331
- self.behavior_bin_path = self.processed_data_path.joinpath("behavior.bin")
332
- self.dlc_bin_path = self.processed_data_path.joinpath("dlc.bin")
330
+ self.suite2p_processing_tracker_path = self.processed_data_path.joinpath("suite2p_processing_tracker.yaml")
331
+ self.dataset_formation_tracker_path = self.processed_data_path.joinpath("dataset_formation_tracker.yaml")
332
+ self.behavior_processing_tracker_path = self.processed_data_path.joinpath("behavior_processing_tracker.yaml")
333
+ self.video_processing_tracker_path = self.processed_data_path.joinpath("video_processing_tracker.yaml")
333
334
 
334
335
  def make_directories(self) -> None:
335
336
  """Ensures that all major subdirectories and the root directory exist, creating any missing directories."""
@@ -477,6 +478,7 @@ class SessionData(YamlConfig):
477
478
  f"already exists. The newly created session directory uses a '_{counter}' postfix to distinguish "
478
479
  f"itself from the already existing session directory."
479
480
  )
481
+ # noinspection PyTypeChecker
480
482
  console.echo(message=message, level=LogLevel.ERROR)
481
483
 
482
484
  # Generates subclasses stored inside the main class instance based on the data resolved above.
@@ -490,6 +492,7 @@ class SessionData(YamlConfig):
490
492
  processed_data.resolve_paths(root_directory_path=session_path.joinpath("processed_data"))
491
493
 
492
494
  # Packages the sections generated above into a SessionData instance
495
+ # noinspection PyArgumentList
493
496
  instance = SessionData(
494
497
  project_name=project_name,
495
498
  animal_id=animal_id,
@@ -623,3 +626,252 @@ class SessionData(YamlConfig):
623
626
 
624
627
  # Saves instance data as a .YAML file
625
628
  origin.to_yaml(file_path=self.raw_data.session_data_path)
629
+
630
+
631
+ @dataclass()
632
+ class ProcessingTracker(YamlConfig):
633
+ """Wraps the .yaml file that tracks the state of a data processing runtime and provides tools for communicating the
634
+ state between multiple processes in a thread-safe manner.
635
+
636
+ Primarily, this tracker class is used by all remote data processing pipelines in the lab to prevent race conditions
637
+ and make it impossible to run multiple processing runtimes at the same time.
638
+ """
639
+
640
+ file_path: Path
641
+ """Stores the path to the .yaml file used to save the tracker data between runtimes. The class instance functions as
642
+ a wrapper around the data stored inside the specified .yaml file."""
643
+ _is_complete: bool = False
644
+ """Tracks whether the processing runtime managed by this tracker has been successfully carried out for the session
645
+ that calls the tracker."""
646
+ _encountered_error: bool = False
647
+ """Tracks whether the processing runtime managed by this tracker has encountered an error while running for the
648
+ session that calls the tracker."""
649
+ _is_running: bool = False
650
+ """Tracks whether the processing runtime managed by this tracker is currently running for the session that calls
651
+ the tracker."""
652
+ _lock_path: str = field(init=False)
653
+ """Stores the path to the .lock file for the target tracker .yaml file. This file is used to ensure that only one
654
+ process can simultaneously read from or write to the wrapped .yaml file."""
655
+
656
+ def __post_init__(self) -> None:
657
+ # Generates the lock file for the target .yaml file path.
658
+ if self.file_path is not None:
659
+ self._lock_path = str(self.file_path.with_suffix(self.file_path.suffix + ".lock"))
660
+ else:
661
+ self._lock_path = ""
662
+
663
+ def _load_state(self) -> None:
664
+ """Reads the current processing state from the wrapped .YAML file."""
665
+ if self.file_path.exists():
666
+ # Loads the data for the state values, but does not replace the file path or lock attributes.
667
+ instance: ProcessingTracker = self.from_yaml(self.file_path) # type: ignore
668
+ self._is_complete = instance._is_complete
669
+ self._encountered_error = instance._encountered_error
670
+ self._is_running = instance._is_running
671
+ else:
672
+ # Otherwise, if the tracker file does not exist, generates a new .yaml file using default instance values.
673
+ self._save_state()
674
+
675
+ def _save_state(self) -> None:
676
+ """Saves the current processing state stored inside instance attributes to the specified .YAML file."""
677
+ # Resets the _lock and file_path to None before dumping the data to .YAML to avoid issues with loading it
678
+ # back.
679
+ console.echo(message=f"{self.file_path}")
680
+ original = copy.deepcopy(self)
681
+ original.file_path = None # type: ignore
682
+ original._lock_path = None # type: ignore
683
+ original.to_yaml(file_path=self.file_path)
684
+
685
+ def start(self) -> None:
686
+ """Configures the tracker file to indicate that the tracked processing runtime is currently running.
687
+
688
+ All further attempts to start the same processing runtime for the same session's data will automatically abort
689
+ with an error.
690
+
691
+ Raises:
692
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
693
+ """
694
+ try:
695
+ # Acquires the lock
696
+ lock = FileLock(self._lock_path)
697
+ with lock.acquire(timeout=10.0):
698
+ # Loads tracker state from .yaml file
699
+ self._load_state()
700
+
701
+ # If the runtime is already running, aborts with an error
702
+ if self._is_running:
703
+ message = (
704
+ f"Unable to start the processing runtime. The {self.file_path.name} tracker file indicates "
705
+ f"that the runtime is currently running from a different process. Only a single runtime "
706
+ f"instance is allowed to run at the same time."
707
+ )
708
+ console.error(message=message, error=RuntimeError)
709
+ raise RuntimeError(message) # Fallback to appease mypy, should not be reachable
710
+
711
+ # Otherwise, marks the runtime as running and saves the state back to the .yaml file.
712
+ self._is_running = True
713
+ self._is_complete = False
714
+ self._encountered_error = False
715
+ self._save_state()
716
+
717
+ # If lock acquisition fails for any reason, aborts with an error
718
+ except Timeout:
719
+ message = (
720
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
721
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
722
+ f"10 minutes has passed."
723
+ )
724
+ console.error(message=message, error=Timeout)
725
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
726
+
727
+ def error(self) -> None:
728
+ """Configures the tracker file to indicate that the tracked processing runtime encountered an error and failed
729
+ to complete.
730
+
731
+ This method will only work for an active runtime. When called for an active runtime, it expects the runtime to
732
+ be aborted with an error after the method returns. It configures the target tracker to allow other processes
733
+ to restart the runtime at any point after this method returns, so it is UNSAFE to do any further processing
734
+ from the process that calls this method.
735
+
736
+ Raises:
737
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
738
+ """
739
+
740
+ try:
741
+ # Acquires the lock
742
+ lock = FileLock(self._lock_path)
743
+ with lock.acquire(timeout=10.0):
744
+ # Loads tracker state from .yaml file
745
+ self._load_state()
746
+
747
+ # If the runtime is not running, aborts with an error
748
+ if not self._is_running:
749
+ message = (
750
+ f"Unable to report that the processing runtime encountered an error. The {self.file_path.name} "
751
+ f"tracker file indicates that the runtime is currently NOT running. A runtime has to be "
752
+ f"actively running to set the tracker to an error state."
753
+ )
754
+ console.error(message=message, error=RuntimeError)
755
+ raise RuntimeError(message) # Fallback to appease mypy, should not be reachable
756
+
757
+ # Otherwise, indicates that the runtime aborted with an error
758
+ self._is_running = False
759
+ self._is_complete = False
760
+ self._encountered_error = True
761
+ self._save_state()
762
+
763
+ # If lock acquisition fails for any reason, aborts with an error
764
+ except Timeout:
765
+ message = (
766
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
767
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
768
+ f"10 minutes has passed."
769
+ )
770
+ console.error(message=message, error=Timeout)
771
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
772
+
773
+ def stop(self) -> None:
774
+ """Mark processing as started.
775
+
776
+ Raises:
777
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
778
+ """
779
+
780
+ try:
781
+ # Acquires the lock
782
+ lock = FileLock(self._lock_path)
783
+ with lock.acquire(timeout=10.0):
784
+ # Loads tracker state from .yaml file
785
+ self._load_state()
786
+
787
+ # If the runtime is not running, aborts with an error
788
+ if not self._is_running:
789
+ message = (
790
+ f"Unable to stop (complete) the processing runtime. The {self.file_path.name} tracker file "
791
+ f"indicates that the runtime is currently NOT running. A runtime has to be actively running to "
792
+ f"mark it as complete and stop the runtime."
793
+ )
794
+ console.error(message=message, error=RuntimeError)
795
+ raise RuntimeError(message) # Fallback to appease mypy, should not be reachable
796
+
797
+ # Otherwise, marks the runtime as complete (stopped)
798
+ self._is_running = False
799
+ self._is_complete = True
800
+ self._encountered_error = False
801
+ self._save_state()
802
+
803
+ # If lock acquisition fails for any reason, aborts with an error
804
+ except Timeout:
805
+ message = (
806
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
807
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
808
+ f"10 minutes has passed."
809
+ )
810
+ console.error(message=message, error=Timeout)
811
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
812
+
813
+ @property
814
+ def is_complete(self) -> bool:
815
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime has been completed
816
+ successfully and False otherwise."""
817
+ try:
818
+ # Acquires the lock
819
+ lock = FileLock(self._lock_path)
820
+ with lock.acquire(timeout=10.0):
821
+ # Loads tracker state from .yaml file
822
+ self._load_state()
823
+ return self._is_complete
824
+
825
+ # If lock acquisition fails for any reason, aborts with an error
826
+ except Timeout:
827
+ message = (
828
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
829
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
830
+ f"10 minutes has passed."
831
+ )
832
+ console.error(message=message, error=Timeout)
833
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
834
+
835
+ @property
836
+ def encountered_error(self) -> bool:
837
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime aborted due to
838
+ encountering an error and False otherwise."""
839
+ try:
840
+ # Acquires the lock
841
+ lock = FileLock(self._lock_path)
842
+ with lock.acquire(timeout=10.0):
843
+ # Loads tracker state from .yaml file
844
+ self._load_state()
845
+ return self._encountered_error
846
+
847
+ # If lock acquisition fails for any reason, aborts with an error
848
+ except Timeout:
849
+ message = (
850
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
851
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
852
+ f"10 minutes has passed."
853
+ )
854
+ console.error(message=message, error=Timeout)
855
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
856
+
857
+ @property
858
+ def is_running(self) -> bool:
859
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime is currently
860
+ running and False otherwise."""
861
+ try:
862
+ # Acquires the lock
863
+ lock = FileLock(self._lock_path)
864
+ with lock.acquire(timeout=10.0):
865
+ # Loads tracker state from .yaml file
866
+ self._load_state()
867
+ return self._is_running
868
+
869
+ # If lock acquisition fails for any reason, aborts with an error
870
+ except Timeout:
871
+ message = (
872
+ f"Unable to interface with the ProcessingTracker instance data cached inside the target .yaml file "
873
+ f"{self.file_path.stem}. Specifically, unable to acquire the file lock before the timeout duration of "
874
+ f"10 minutes has passed."
875
+ )
876
+ console.error(message=message, error=Timeout)
877
+ raise Timeout(message) # Fallback to appease mypy, should not be reachable
@@ -96,7 +96,7 @@ class RawData:
96
96
  checksum_path: Path = ...
97
97
  telomere_path: Path = ...
98
98
  ubiquitin_path: Path = ...
99
- verified_bin_path: Path = ...
99
+ integrity_verification_tracker_path: Path = ...
100
100
  def resolve_paths(self, root_directory_path: Path) -> None:
101
101
  """Resolves all paths managed by the class instance based on the input root directory path.
102
102
 
@@ -125,10 +125,10 @@ class ProcessedData:
125
125
  mesoscope_data_path: Path = ...
126
126
  behavior_data_path: Path = ...
127
127
  job_logs_path: Path = ...
128
- single_day_suite2p_bin_path: Path = ...
129
- multi_day_suite2p_bin_path: Path = ...
130
- behavior_bin_path: Path = ...
131
- dlc_bin_path: Path = ...
128
+ suite2p_processing_tracker_path: Path = ...
129
+ dataset_formation_tracker_path: Path = ...
130
+ behavior_processing_tracker_path: Path = ...
131
+ video_processing_tracker_path: Path = ...
132
132
  def resolve_paths(self, root_directory_path: Path) -> None:
133
133
  """Resolves all paths managed by the class instance based on the input root directory path.
134
134
 
@@ -250,3 +250,62 @@ class SessionData(YamlConfig):
250
250
  data processing. The method is intended to only be used by the SessionData instance itself during its
251
251
  create() method runtime.
252
252
  """
253
+
254
+ @dataclass()
255
+ class ProcessingTracker(YamlConfig):
256
+ """Wraps the .yaml file that tracks the state of a data processing runtime and provides tools for communicating the
257
+ state between multiple processes in a thread-safe manner.
258
+
259
+ Primarily, this tracker class is used by all remote data processing pipelines in the lab to prevent race conditions
260
+ and make it impossible to run multiple processing runtimes at the same time.
261
+ """
262
+
263
+ file_path: Path
264
+ _is_complete: bool = ...
265
+ _encountered_error: bool = ...
266
+ _is_running: bool = ...
267
+ _lock_path: str = field(init=False)
268
+ def __post_init__(self) -> None: ...
269
+ def _load_state(self) -> None:
270
+ """Reads the current processing state from the wrapped .YAML file."""
271
+ def _save_state(self) -> None:
272
+ """Saves the current processing state stored inside instance attributes to the specified .YAML file."""
273
+ def start(self) -> None:
274
+ """Configures the tracker file to indicate that the tracked processing runtime is currently running.
275
+
276
+ All further attempts to start the same processing runtime for the same session's data will automatically abort
277
+ with an error.
278
+
279
+ Raises:
280
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
281
+ """
282
+ def error(self) -> None:
283
+ """Configures the tracker file to indicate that the tracked processing runtime encountered an error and failed
284
+ to complete.
285
+
286
+ This method will only work for an active runtime. When called for an active runtime, it expects the runtime to
287
+ be aborted with an error after the method returns. It configures the target tracker to allow other processes
288
+ to restart the runtime at any point after this method returns, so it is UNSAFE to do any further processing
289
+ from the process that calls this method.
290
+
291
+ Raises:
292
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
293
+ """
294
+ def stop(self) -> None:
295
+ """Mark processing as started.
296
+
297
+ Raises:
298
+ TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
299
+ """
300
+ @property
301
+ def is_complete(self) -> bool:
302
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime has been completed
303
+ successfully and False otherwise."""
304
+ @property
305
+ def encountered_error(self) -> bool:
306
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime aborted due to
307
+ encountering an error and False otherwise."""
308
+ @property
309
+ def is_running(self) -> bool:
310
+ """Returns True if the tracker wrapped by the instance indicates that the processing runtime is currently
311
+ running and False otherwise."""
@@ -204,6 +204,42 @@ class Server:
204
204
  else:
205
205
  return False
206
206
 
207
+ def pull_file(self, local_file_path: Path, remote_file_path: Path) -> None:
208
+ """Moves the specified file from the remote server to the local machine.
209
+
210
+ Args:
211
+ local_file_path: The path to the local instance of the file (where to copy the file).
212
+ remote_file_path: The path to the target file on the remote server (the file to be copied).
213
+ """
214
+ sftp = self._client.open_sftp()
215
+ sftp.get(localpath=local_file_path, remotepath=str(remote_file_path))
216
+ sftp.close()
217
+
218
+ def push_file(self, local_file_path: Path, remote_file_path: Path) -> None:
219
+ """Moves the specified file from the remote server to the local machine.
220
+
221
+ Args:
222
+ local_file_path: The path to the file that needs to be copied to the remote server.
223
+ remote_file_path: The path to the file on the remote server (where to copy the file).
224
+ """
225
+ sftp = self._client.open_sftp()
226
+ sftp.put(localpath=local_file_path, remotepath=str(remote_file_path))
227
+ sftp.close()
228
+
229
+ def remove(self, remote_path: Path, is_dir: bool) -> None:
230
+ """Removes the specified file or directory from the remote server.
231
+
232
+ Args:
233
+ remote_path: The path to the file or directory on the remote server to be removed.
234
+ is_dir: Determines whether the input path represents a directory or a file.
235
+ """
236
+ sftp = self._client.open_sftp()
237
+ if is_dir:
238
+ sftp.rmdir(path=str(remote_path))
239
+ else:
240
+ sftp.unlink(path=str(remote_path))
241
+ sftp.close()
242
+
207
243
  def close(self) -> None:
208
244
  """Closes the SSH connection to the server.
209
245
 
@@ -88,6 +88,27 @@ class Server:
88
88
  ValueError: If the input Job object does not contain a valid job_id, suggesting that it has not been
89
89
  submitted to the server.
90
90
  """
91
+ def pull_file(self, local_file_path: Path, remote_file_path: Path) -> None:
92
+ """Moves the specified file from the remote server to the local machine.
93
+
94
+ Args:
95
+ local_file_path: The path to the local instance of the file (where to copy the file).
96
+ remote_file_path: The path to the target file on the remote server (the file to be copied).
97
+ """
98
+ def push_file(self, local_file_path: Path, remote_file_path: Path) -> None:
99
+ """Moves the specified file from the remote server to the local machine.
100
+
101
+ Args:
102
+ local_file_path: The path to the file that needs to be copied to the remote server.
103
+ remote_file_path: The path to the file on the remote server (where to copy the file).
104
+ """
105
+ def remove(self, remote_path: Path, is_dir: bool) -> None:
106
+ """Removes the specified file or directory from the remote server.
107
+
108
+ Args:
109
+ remote_path: The path to the file or directory on the remote server to be removed.
110
+ is_dir: Determines whether the input path represents a directory or a file.
111
+ """
91
112
  def close(self) -> None:
92
113
  """Closes the SSH connection to the server.
93
114
 
@@ -17,11 +17,16 @@ _excluded_files = {
17
17
  "ax_checksum.txt",
18
18
  "ubiquitin.bin",
19
19
  "telomere.bin",
20
- "single_day_suite2p.bin",
21
- "multi_day_suite2p.bin",
22
- "behavior.bin",
23
- "dlc.bin",
24
- "verified.bin",
20
+ "suite2p_processing_tracker.yaml",
21
+ "dataset_formation_tracker.yaml",
22
+ "behavior_processing_tracker.yaml",
23
+ "video_processing_tracker.yaml",
24
+ "integrity_verification_tracker.yaml",
25
+ "suite2p_processing_tracker.yaml.lock",
26
+ "dataset_formation_tracker.yaml.lock",
27
+ "behavior_processing_tracker.yaml.lock",
28
+ "video_processing_tracker.yaml.lock",
29
+ "integrity_verification_tracker.yaml.lock",
25
30
  }
26
31
 
27
32
 
@@ -7,7 +7,7 @@ from pathlib import Path
7
7
  import polars as pl
8
8
  from ataraxis_base_utilities import console
9
9
 
10
- from ..data_classes import SessionData
10
+ from ..data_classes import SessionData, ProcessingTracker
11
11
  from .packaging_tools import calculate_directory_checksum
12
12
 
13
13
 
@@ -60,12 +60,15 @@ def generate_project_manifest(
60
60
  "type": [], # Type of the session (e.g., Experiment, Training, etc.).
61
61
  "raw_data": [], # Server-side raw_data folder path.
62
62
  "processed_data": [], # Server-side processed_data folder path.
63
- "complete": [], # Determines if the session data is complete. Incomplete sessions are excluded from processing.
64
- "verified": [], # Determines if the session data integrity has been verified upon transfer to storage machine.
65
- "single_day_suite2p": [], # Determines whether the session has been processed with the single-day s2p pipeline.
66
- "multi_day_suite2p": [], # Determines whether the session has been processed with the multi-day s2p pipeline.
67
- "behavior": [], # Determines whether the session has been processed with the behavior extraction pipeline.
68
- "dlc": [], # Determines whether the session has been processed with the DeepLabCut pipeline.
63
+ # Determines whether the session data is complete. Incomplete sessions are excluded from processing.
64
+ "complete": [],
65
+ # Determines whether the session data integrity has been verified upon transfer to storage machine.
66
+ "integrity_verification": [],
67
+ "suite2p_processing": [], # Determines whether the session has been processed with the single-day s2p pipeline.
68
+ "dataset_formation": [], # Determines whether the session's data has been integrated into a dataset.
69
+ # Determines whether the session has been processed with the behavior extraction pipeline.
70
+ "behavior_processing": [],
71
+ "video_processing": [], # Determines whether the session has been processed with the DeepLabCut pipeline.
69
72
  }
70
73
 
71
74
  # Loops over each session of every animal in the project and extracts session ID information and information
@@ -88,34 +91,35 @@ def generate_project_manifest(
88
91
  # If the session raw_data folder contains the telomere.bin file, marks the session as complete.
89
92
  manifest["complete"].append(session_data.raw_data.telomere_path.exists())
90
93
 
91
- # If the session raw_data folder contains the verified.bin file, marks the session as verified.
92
- manifest["verified"].append(session_data.raw_data.verified_bin_path.exists())
94
+ # Data verification status
95
+ tracker = ProcessingTracker(file_path=session_data.raw_data.integrity_verification_tracker_path)
96
+ manifest["integrity_verification"].append(tracker.is_complete)
93
97
 
94
98
  # If the session is incomplete or unverified, marks all processing steps as FALSE, as automatic processing is
95
99
  # disabled for incomplete sessions. If the session unverified, the case is even more severe, as its data may be
96
100
  # corrupted.
97
101
  if not manifest["complete"][-1] or not not manifest["verified"][-1]:
98
- manifest["single_day_suite2p"].append(False)
99
- manifest["multi_day_suite2p"].append(False)
100
- manifest["behavior"].append(False)
101
- manifest["dlc"].append(False)
102
+ manifest["suite2p_processing"].append(False)
103
+ manifest["dataset_formation"].append(False)
104
+ manifest["behavior_processing"].append(False)
105
+ manifest["video_processing"].append(False)
102
106
  continue # Cycles to the next session
103
107
 
104
- # If the session processed_data folder contains the single-day suite2p.bin file, marks the single-day suite2p
105
- # processing step as complete.
106
- manifest["single_day_suite2p"].append(session_data.processed_data.single_day_suite2p_bin_path.exists())
108
+ # Suite2p (single-day) status
109
+ tracker = ProcessingTracker(file_path=session_data.processed_data.suite2p_processing_tracker_path)
110
+ manifest["suite2p_processing"].append(tracker.is_complete)
107
111
 
108
- # If the session processed_data folder contains the multi-day suite2p.bin file, marks the multi-day suite2p
109
- # processing step as complete.
110
- manifest["multi_day_suite2p"].append(session_data.processed_data.multi_day_suite2p_bin_path.exists())
112
+ # Dataset formation (integration) status. Tracks whether the session has been added to any dataset(s).
113
+ tracker = ProcessingTracker(file_path=session_data.processed_data.dataset_formation_tracker_path)
114
+ manifest["dataset_formation"].append(tracker.is_complete)
111
115
 
112
- # If the session processed_data folder contains the behavior.bin file, marks the behavior processing step as
113
- # complete.
114
- manifest["behavior"].append(session_data.processed_data.behavior_data_path.exists())
116
+ # Dataset formation (integration) status. Tracks whether the session has been added to any dataset(s).
117
+ tracker = ProcessingTracker(file_path=session_data.processed_data.behavior_processing_tracker_path)
118
+ manifest["behavior_processing"].append(tracker.is_complete)
115
119
 
116
- # If the session processed_data folder contains the dlc.bin file, marks the dlc processing step as
117
- # complete.
118
- manifest["dlc"].append(session_data.processed_data.dlc_bin_path.exists())
120
+ # DeepLabCut (video) processing status.
121
+ tracker = ProcessingTracker(file_path=session_data.processed_data.behavior_processing_tracker_path)
122
+ manifest["video_processing"].append(tracker.is_complete)
119
123
 
120
124
  # Converts the manifest dictionary to a Polars Dataframe
121
125
  schema = {
@@ -125,11 +129,11 @@ def generate_project_manifest(
125
129
  "processed_data": pl.String,
126
130
  "type": pl.String,
127
131
  "complete": pl.Boolean,
128
- "verified": pl.Boolean,
129
- "single_day_suite2p": pl.Boolean,
130
- "multi_day_suite2p": pl.Boolean,
131
- "behavior": pl.Boolean,
132
- "dlc": pl.Boolean,
132
+ "integrity_verification": pl.Boolean,
133
+ "suite2p_processing": pl.Boolean,
134
+ "dataset_formation": pl.Boolean,
135
+ "behavior_processing": pl.Boolean,
136
+ "video_processing": pl.Boolean,
133
137
  }
134
138
  df = pl.DataFrame(manifest, schema=schema)
135
139
 
@@ -176,26 +180,38 @@ def verify_session_checksum(
176
180
  make_processed_data_directory=create_processed_data_directory,
177
181
  )
178
182
 
179
- # Unlinks the verified.bin marker if it exists. The presence or absence of the marker is used as the
180
- # primary heuristic for determining if the session data passed verification. Unlinking it early helps in the case
181
- # the verification procedure aborts unexpectedly for any reason.
182
- session_data.raw_data.verified_bin_path.unlink(missing_ok=True)
183
+ # Initializes the ProcessingTracker instance for the verification tracker file
184
+ tracker = ProcessingTracker(file_path=session_data.raw_data.integrity_verification_tracker_path)
183
185
 
184
- # Re-calculates the checksum for the raw_data directory
185
- calculated_checksum = calculate_directory_checksum(
186
- directory=session_data.raw_data.raw_data_path, batch=False, save_checksum=False
187
- )
188
-
189
- # Loads the checksum stored inside the ax_checksum.txt file
190
- with open(session_data.raw_data.checksum_path, "r") as f:
191
- stored_checksum = f.read().strip()
186
+ # Updates the tracker data to communicate that the verification process has started. This automatically clears
187
+ # the previous 'completed' status.
188
+ tracker.start()
192
189
 
193
- # If the two checksums do not match, this likely indicates data corruption.
194
- if stored_checksum != calculated_checksum:
195
- # If the telomere.bin file exists, removes this file. This automatically marks the session as incomplete for
196
- # all other Sun lab runtimes.
197
- session_data.raw_data.telomere_path.unlink(missing_ok=True)
190
+ # Try starts here to allow for proper error-driven 'start' terminations of the tracker cannot acquire the lock for
191
+ # a long time, or if another runtime is already underway.
192
+ try:
193
+ # Re-calculates the checksum for the raw_data directory
194
+ calculated_checksum = calculate_directory_checksum(
195
+ directory=session_data.raw_data.raw_data_path, batch=False, save_checksum=False
196
+ )
198
197
 
199
- # Otherwise, ensures that the session is marked with the verified.bin marker file.
200
- else:
201
- session_data.raw_data.verified_bin_path.touch(exist_ok=True)
198
+ # Loads the checksum stored inside the ax_checksum.txt file
199
+ with open(session_data.raw_data.checksum_path, "r") as f:
200
+ stored_checksum = f.read().strip()
201
+
202
+ # If the two checksums do not match, this likely indicates data corruption.
203
+ if stored_checksum != calculated_checksum:
204
+ # If the telomere.bin file exists, removes this file. This automatically marks the session as incomplete for
205
+ # all other Sun lab runtimes.
206
+ session_data.raw_data.telomere_path.unlink(missing_ok=True)
207
+
208
+ else:
209
+ # Sets the tracker to indicate that the verification runtime completed successfully.
210
+ tracker.stop()
211
+
212
+ finally:
213
+ # If the code reaches this section while the tracker indicates that the processing is still running,
214
+ # this means that the verification runtime encountered an error. Configures the tracker to indicate that this
215
+ # runtime finished with an error to prevent deadlocking the runtime.
216
+ if tracker.is_running:
217
+ tracker.error()
@@ -1,6 +1,9 @@
1
1
  from pathlib import Path
2
2
 
3
- from ..data_classes import SessionData as SessionData
3
+ from ..data_classes import (
4
+ SessionData as SessionData,
5
+ ProcessingTracker as ProcessingTracker,
6
+ )
4
7
  from .packaging_tools import calculate_directory_checksum as calculate_directory_checksum
5
8
 
6
9
  def generate_project_manifest(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.2
3
+ Version: 1.1.1
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -685,7 +685,7 @@ Keywords: acquisition,assets,data,processing,sunlab
685
685
  Classifier: Development Status :: 5 - Production/Stable
686
686
  Classifier: Intended Audience :: Developers
687
687
  Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
688
- Classifier: Operating System :: POSIX :: Linux
688
+ Classifier: Operating System :: OS Independent
689
689
  Classifier: Programming Language :: Python :: 3.11
690
690
  Classifier: Programming Language :: Python :: 3.12
691
691
  Classifier: Programming Language :: Python :: 3.13
@@ -695,6 +695,7 @@ Requires-Dist: ataraxis-base-utilities<4,>=3
695
695
  Requires-Dist: ataraxis-data-structures<4,>=3.1.1
696
696
  Requires-Dist: ataraxis-time<4,>=3
697
697
  Requires-Dist: click<9,>=8
698
+ Requires-Dist: filelock<4,>=3
698
699
  Requires-Dist: natsort<9,>=8
699
700
  Requires-Dist: paramiko<4,>=3.5.1
700
701
  Requires-Dist: polars<2,>=1
@@ -714,18 +715,20 @@ Requires-Dist: sphinx-rtd-theme<4,>=3; extra == 'conda'
714
715
  Requires-Dist: sphinx<9,>=8; extra == 'conda'
715
716
  Requires-Dist: twine<7,>=6; extra == 'conda'
716
717
  Requires-Dist: types-appdirs<2,>=1; extra == 'conda'
718
+ Requires-Dist: types-filelock<4,>=3; extra == 'conda'
717
719
  Requires-Dist: types-paramiko<4,>=3; extra == 'conda'
718
720
  Requires-Dist: types-tqdm<5,>=4; extra == 'conda'
719
721
  Provides-Extra: condarun
720
722
  Requires-Dist: appdirs<2,>=1; extra == 'condarun'
721
723
  Requires-Dist: click<9,>=8; extra == 'condarun'
724
+ Requires-Dist: filelock<4,>=3; extra == 'condarun'
722
725
  Requires-Dist: natsort<9,>=8; extra == 'condarun'
723
726
  Requires-Dist: paramiko<4,>=3.5.1; extra == 'condarun'
724
727
  Requires-Dist: polars<2,>=1; extra == 'condarun'
725
728
  Requires-Dist: pyarrow<21,>=20; extra == 'condarun'
726
729
  Requires-Dist: tqdm<5,>=4; extra == 'condarun'
727
730
  Provides-Extra: dev
728
- Requires-Dist: ataraxis-automation<5,>=4; extra == 'dev'
731
+ Requires-Dist: ataraxis-automation<6,>=5; extra == 'dev'
729
732
  Requires-Dist: build<2,>=1; extra == 'dev'
730
733
  Requires-Dist: grayskull<3,>=2; extra == 'dev'
731
734
  Requires-Dist: hatchling<2,>=1; extra == 'dev'
@@ -741,11 +744,12 @@ Requires-Dist: tox-uv<2,>=1; extra == 'dev'
741
744
  Requires-Dist: tox<5,>=4; extra == 'dev'
742
745
  Requires-Dist: twine<7,>=6; extra == 'dev'
743
746
  Requires-Dist: types-appdirs<2,>=1; extra == 'dev'
747
+ Requires-Dist: types-filelock<4,>=3; extra == 'dev'
744
748
  Requires-Dist: types-paramiko<4,>=3; extra == 'dev'
745
749
  Requires-Dist: types-tqdm<5,>=4; extra == 'dev'
746
750
  Requires-Dist: uv<1,>=0; extra == 'dev'
747
751
  Provides-Extra: noconda
748
- Requires-Dist: ataraxis-automation<5,>=4; extra == 'noconda'
752
+ Requires-Dist: ataraxis-automation<6,>=5; extra == 'noconda'
749
753
  Requires-Dist: build<2,>=1; extra == 'noconda'
750
754
  Requires-Dist: sphinx-rtd-dark-mode<2,>=1; extra == 'noconda'
751
755
  Requires-Dist: tox-uv<2,>=1; extra == 'noconda'
@@ -1,36 +1,36 @@
1
1
  sl_shared_assets/__init__.py,sha256=rscR353jiyUQ-wpguTLOM-A5Lqr1ftQtuTan1D0AtR0,2196
2
2
  sl_shared_assets/__init__.pyi,sha256=Ye6eY_y_l9CTqwbCf-OxXOphxXKfn9UJnuw09DdLEtc,2405
3
- sl_shared_assets/cli.py,sha256=Rfc0W0yUgFi3ohL7Jmx6kh_V-h3qsqPYSpxjqNYtnEE,17433
4
- sl_shared_assets/cli.pyi,sha256=p1tUflODySXx1_2IrLZ_uqixopbQPTearuJvqJ4jKI4,6075
3
+ sl_shared_assets/cli.py,sha256=2HAgnD7hHnFp3R7_tJAfWBI_jRbhSuyDBFK3TGIHYsw,17771
4
+ sl_shared_assets/cli.pyi,sha256=Fh8GZBSQzII_Iz6k5nLQOsVMbp7q1R5mp4KNZjdGflY,6119
5
5
  sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- sl_shared_assets/data_classes/__init__.py,sha256=b0BwTAJCD1zbwjd2UdpXheq61q1sgBuYhtAL-GHA2h4,1835
7
- sl_shared_assets/data_classes/__init__.pyi,sha256=e2082sm5pSw3bxNZGqwcRhuKLH8T7hcEo6kWtOZNDBU,1968
6
+ sl_shared_assets/data_classes/__init__.py,sha256=S1-3mYop1MhI7HRusPE5sfu30hmIgH9BfmB5-r6Bbm8,1883
7
+ sl_shared_assets/data_classes/__init__.pyi,sha256=jB5Y9QQ-XL_auk7Ot0DA87lIbfn1j3AuWOk2LcOyxvk,2037
8
8
  sl_shared_assets/data_classes/configuration_data.py,sha256=ZVk1ynk25CfVuQfMofoH90BUaOPqk7zW8ukY6ls_Pp0,30360
9
9
  sl_shared_assets/data_classes/configuration_data.pyi,sha256=h7AV3z73SC2ITXWcnsShczuezC1Is7L4WSMnEWGSLPQ,9617
10
10
  sl_shared_assets/data_classes/runtime_data.py,sha256=Q7Ykf9hgrw1jYKXa53mn_LW8G2cPmLLuxgGkP6qQcc4,15483
11
11
  sl_shared_assets/data_classes/runtime_data.pyi,sha256=PxaCbeF9COR4ri91pdzh7zVrqaz2KEDYB1EoLhZQC_c,6618
12
- sl_shared_assets/data_classes/session_data.py,sha256=Rd3KFKWTHCUgMrMZ0HHfZq6Bc-9tZSzvIX6KFHrDJwk,38331
13
- sl_shared_assets/data_classes/session_data.pyi,sha256=eLemAagVD6TQ-9pzMIc0eXDgnh-quJwEidKHYitvsjQ,13243
12
+ sl_shared_assets/data_classes/session_data.py,sha256=RWqHHaJU1pnkxVXCuFjAYbWN50HEO7oEjEsT9mmKQic,51112
13
+ sl_shared_assets/data_classes/session_data.pyi,sha256=mVy1UJyZE6sh-hV-qFDyw8UiWGA_UCYNR2iCNbNeh4o,16190
14
14
  sl_shared_assets/data_classes/surgery_data.py,sha256=qsMj3NkjhylAT9b_wHBY-1XwTu2xsZcZatdECmkA7Bs,7437
15
15
  sl_shared_assets/data_classes/surgery_data.pyi,sha256=rf59lJ3tGSYKHQlEGXg75MnjajBwl0DYhL4TClAO4SM,2605
16
16
  sl_shared_assets/server/__init__.py,sha256=nyX6-9ACcrQeRQOCNvBVrWSTHGjRPANIG_u0aq7HPTg,426
17
17
  sl_shared_assets/server/__init__.pyi,sha256=7o99f8uf6NuBjMZjNAM1FX69Qbu5uBluRSAyaUWbXOU,263
18
18
  sl_shared_assets/server/job.py,sha256=GB31yYPEqXR6MgwNmnQrSQuHRJqUHFXcd6p7hb38q_c,7928
19
19
  sl_shared_assets/server/job.pyi,sha256=cxgHMpuwHsJGf_ZcTSSa2tZNzeR_GxqlICOsYGV_oy0,5655
20
- sl_shared_assets/server/server.py,sha256=SkFJA8eYlZrm2dag7sF_CyPiBphFgoq32zV10kqnedc,9414
21
- sl_shared_assets/server/server.pyi,sha256=SoTRqSPJ1GQ5i5gV3N57rDT9xzeT47RjMRH1FYKsOkg,4143
20
+ sl_shared_assets/server/server.py,sha256=s2lOrOxcBGQsELKrWu9yCX4Ga5olyLNmWLSCOFyyC44,11002
21
+ sl_shared_assets/server/server.pyi,sha256=h8wI9rMcEuGLrJulndUjASM7E_nU4G6gXnjPge6mWHg,5263
22
22
  sl_shared_assets/tools/__init__.py,sha256=N95ZPMz-_HdNPrbVieCFza-QSVS6BV2KRB4K1OLRttc,636
23
23
  sl_shared_assets/tools/__init__.pyi,sha256=xeDF8itMc0JRgLrO_IN_9gW7cp_Ld-Gf-rjtrgWvQ2I,551
24
24
  sl_shared_assets/tools/ascension_tools.py,sha256=kIqYGX9F8lRao_LaVOacIiT8J9SypTvarb9mgaI9ZPs,15387
25
25
  sl_shared_assets/tools/ascension_tools.pyi,sha256=tQCDdWZ20ZjUjpMs8aGIN0yBg5ff3j6spi62b3Han4o,3755
26
- sl_shared_assets/tools/packaging_tools.py,sha256=2yaIysL11g5m-j0i4I2MRVzHA6_37WdyLYqXMl2FPAc,7289
26
+ sl_shared_assets/tools/packaging_tools.py,sha256=c9U0bKB6Btj7sfyeU7xx2Jiqv930qTnXbm0ZbNR-o2k,7594
27
27
  sl_shared_assets/tools/packaging_tools.pyi,sha256=vgGbAQCExwg-0A5F72MzEhzHxu97Nqg1yuz-5P89ycU,3118
28
- sl_shared_assets/tools/project_management_tools.py,sha256=aMOCq3nCR2xOTDMQpXbXEh90IuKOvSbIfeKh2VaZIXw,11215
29
- sl_shared_assets/tools/project_management_tools.pyi,sha256=HiU1qqumscmXtAXNX6eaSrkwanGHEvk-lromwZ2ijD8,3445
28
+ sl_shared_assets/tools/project_management_tools.py,sha256=DgMKd6i3iLG4lwVgcCgQeO8jZEfVoWFKU6882JrDvL4,11993
29
+ sl_shared_assets/tools/project_management_tools.pyi,sha256=f_3O8UjnfHRMEe2iZpQxKK9Vb0_lJB2yI1WcJPUqGEU,3498
30
30
  sl_shared_assets/tools/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
31
31
  sl_shared_assets/tools/transfer_tools.pyi,sha256=FoH7eYZe7guGHfPr0MK5ggO62uXKwD2aJ7h1Bu7PaEE,3294
32
- sl_shared_assets-1.0.2.dist-info/METADATA,sha256=fMc6X-sMqNzyERNvE6DrkRKLgeubNLmk9KVh8KuFD8Y,48890
33
- sl_shared_assets-1.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
- sl_shared_assets-1.0.2.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
- sl_shared_assets-1.0.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- sl_shared_assets-1.0.2.dist-info/RECORD,,
32
+ sl_shared_assets-1.1.1.dist-info/METADATA,sha256=jVoN45NK064KTfK8vSFMYt8iv_4p2XH7kXM1dL2f94c,49077
33
+ sl_shared_assets-1.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
+ sl_shared_assets-1.1.1.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
+ sl_shared_assets-1.1.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
+ sl_shared_assets-1.1.1.dist-info/RECORD,,