sl-shared-assets 1.2.0rc1__tar.gz → 1.2.0rc3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (45) hide show
  1. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/PKG-INFO +22 -22
  2. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/pyproject.toml +22 -22
  3. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/runtime_data.py +3 -0
  4. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/runtime_data.pyi +1 -0
  5. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/session_data.py +0 -1
  6. sl_shared_assets-1.2.0rc1/src/sl_shared_assets/data_classes/session_data.pyi +0 -319
  7. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/.gitignore +0 -0
  8. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/LICENSE +0 -0
  9. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/README.md +0 -0
  10. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/Makefile +0 -0
  11. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/make.bat +0 -0
  12. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/source/api.rst +0 -0
  13. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/source/conf.py +0 -0
  14. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/source/index.rst +0 -0
  15. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/docs/source/welcome.rst +0 -0
  16. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/envs/slsa_dev_lin.yml +0 -0
  17. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/envs/slsa_dev_lin_spec.txt +0 -0
  18. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/__init__.py +0 -0
  19. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/__init__.pyi +0 -0
  20. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/cli.py +0 -0
  21. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/cli.pyi +0 -0
  22. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/__init__.py +0 -0
  23. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/__init__.pyi +0 -0
  24. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/configuration_data.py +0 -0
  25. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/configuration_data.pyi +0 -0
  26. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/surgery_data.py +0 -0
  27. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/data_classes/surgery_data.pyi +0 -0
  28. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/py.typed +0 -0
  29. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/__init__.py +0 -0
  30. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/__init__.pyi +0 -0
  31. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/job.py +0 -0
  32. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/job.pyi +0 -0
  33. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/server.py +0 -0
  34. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/server/server.pyi +0 -0
  35. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/__init__.py +0 -0
  36. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/__init__.pyi +0 -0
  37. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/ascension_tools.py +0 -0
  38. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/ascension_tools.pyi +0 -0
  39. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/packaging_tools.py +0 -0
  40. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/packaging_tools.pyi +0 -0
  41. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/project_management_tools.py +0 -0
  42. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/project_management_tools.pyi +0 -0
  43. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/transfer_tools.py +0 -0
  44. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/src/sl_shared_assets/tools/transfer_tools.pyi +0 -0
  45. {sl_shared_assets-1.2.0rc1 → sl_shared_assets-1.2.0rc3}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.2.0rc1
3
+ Version: 1.2.0rc3
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -690,19 +690,19 @@ Classifier: Programming Language :: Python :: 3.11
690
690
  Classifier: Programming Language :: Python :: 3.12
691
691
  Classifier: Programming Language :: Python :: 3.13
692
692
  Requires-Python: >=3.11
693
- Requires-Dist: appdirs<2,>=1
694
- Requires-Dist: ataraxis-base-utilities<4,>=3
695
- Requires-Dist: ataraxis-data-structures<4,>=3.1.1
696
- Requires-Dist: ataraxis-time<4,>=3
697
- Requires-Dist: click<9,>=8
698
- Requires-Dist: filelock<4,>=3
699
- Requires-Dist: natsort<9,>=8
700
- Requires-Dist: paramiko<4,>=3.5.1
701
- Requires-Dist: polars<2,>=1
702
- Requires-Dist: pyarrow<21,>=20
703
- Requires-Dist: simple-slurm<1,>=0
704
- Requires-Dist: tqdm<5,>=4
705
- Requires-Dist: xxhash<4,>=3
693
+ Requires-Dist: appdirs==1.4.4
694
+ Requires-Dist: ataraxis-base-utilities==3.0.1
695
+ Requires-Dist: ataraxis-data-structures==3.1.1
696
+ Requires-Dist: ataraxis-time==3.0.0
697
+ Requires-Dist: click==8.2.1
698
+ Requires-Dist: filelock==3.18.0
699
+ Requires-Dist: natsort==8.4.0
700
+ Requires-Dist: paramiko==3.5.1
701
+ Requires-Dist: polars==1.31.0
702
+ Requires-Dist: pyarrow==20.0.0
703
+ Requires-Dist: simple-slurm==0.3.6
704
+ Requires-Dist: tqdm==4.67.1
705
+ Requires-Dist: xxhash==3.5.0
706
706
  Provides-Extra: conda
707
707
  Requires-Dist: grayskull<3,>=2; extra == 'conda'
708
708
  Requires-Dist: hatchling<2,>=1; extra == 'conda'
@@ -719,14 +719,14 @@ Requires-Dist: types-filelock<4,>=3; extra == 'conda'
719
719
  Requires-Dist: types-paramiko<4,>=3; extra == 'conda'
720
720
  Requires-Dist: types-tqdm<5,>=4; extra == 'conda'
721
721
  Provides-Extra: condarun
722
- Requires-Dist: appdirs<2,>=1; extra == 'condarun'
723
- Requires-Dist: click<9,>=8; extra == 'condarun'
724
- Requires-Dist: filelock<4,>=3; extra == 'condarun'
725
- Requires-Dist: natsort<9,>=8; extra == 'condarun'
726
- Requires-Dist: paramiko<4,>=3.5.1; extra == 'condarun'
727
- Requires-Dist: polars<2,>=1; extra == 'condarun'
728
- Requires-Dist: pyarrow<21,>=20; extra == 'condarun'
729
- Requires-Dist: tqdm<5,>=4; extra == 'condarun'
722
+ Requires-Dist: appdirs==1.4.4; extra == 'condarun'
723
+ Requires-Dist: click==8.2.1; extra == 'condarun'
724
+ Requires-Dist: filelock==3.18.0; extra == 'condarun'
725
+ Requires-Dist: natsort==8.4.0; extra == 'condarun'
726
+ Requires-Dist: paramiko==3.5.1; extra == 'condarun'
727
+ Requires-Dist: polars==1.31.0; extra == 'condarun'
728
+ Requires-Dist: pyarrow==20.0.0; extra == 'condarun'
729
+ Requires-Dist: tqdm==4.67.1; extra == 'condarun'
730
730
  Provides-Extra: dev
731
731
  Requires-Dist: ataraxis-automation<6,>=5; extra == 'dev'
732
732
  Requires-Dist: build<2,>=1; extra == 'dev'
@@ -8,7 +8,7 @@ build-backend = "hatchling.build"
8
8
  # Project metdata section. Provides the genral ID information about the project.
9
9
  [project]
10
10
  name = "sl-shared-assets"
11
- version = "1.2.0rc1"
11
+ version = "1.2.0rc3"
12
12
  description = "Stores assets shared between multiple Sun (NeuroAI) lab data pipelines."
13
13
  readme = "README.md"
14
14
  license = { file = "LICENSE" }
@@ -42,19 +42,19 @@ classifiers = [
42
42
 
43
43
  # Runtime project dependencies. This overlaps with the 'condarun' optional list.
44
44
  dependencies = [
45
- "ataraxis-time>=3,<4",
46
- "ataraxis-base-utilities>=3,<4",
47
- "ataraxis-data-structures>=3.1.1,<4",
48
- "appdirs>=1,<2",
49
- "tqdm>=4,<5",
50
- "xxhash>=3,<4",
51
- "paramiko>=3.5.1, <4",
52
- "simple-slurm>=0,<1",
53
- "click>=8,<9",
54
- "natsort>=8,<9",
55
- "polars>=1,<2",
56
- "pyarrow>=20,<21",
57
- "filelock>=3,<4",
45
+ "ataraxis-time==3.0.0",
46
+ "ataraxis-base-utilities==3.0.1",
47
+ "ataraxis-data-structures==3.1.1",
48
+ "appdirs==1.4.4",
49
+ "tqdm==4.67.1",
50
+ "xxhash==3.5.0",
51
+ "paramiko==3.5.1",
52
+ "simple-slurm==0.3.6",
53
+ "click==8.2.1",
54
+ "natsort==8.4.0",
55
+ "polars==1.31.0",
56
+ "pyarrow==20.0.0",
57
+ "filelock==3.18.0",
58
58
  ]
59
59
 
60
60
  [project.urls]
@@ -69,14 +69,14 @@ Documentation = "https://sl-shared-assets-api-docs.netlify.app/"
69
69
  # (OSX ARM64, WIN AMD64, LIN AMD64). This specification is used by ataraxis-automation to resolve as many
70
70
  # project dependencies with conda over pip as possible.
71
71
  condarun = [
72
- "appdirs>=1,<2",
73
- "tqdm>=4,<5",
74
- "paramiko>=3.5.1,<4",
75
- "click>=8,<9",
76
- "natsort>=8,<9",
77
- "polars>=1,<2",
78
- "pyarrow>=20,<21",
79
- "filelock>=3,<4",
72
+ "appdirs==1.4.4",
73
+ "tqdm==4.67.1",
74
+ "paramiko==3.5.1",
75
+ "click==8.2.1",
76
+ "natsort==8.4.0",
77
+ "polars==1.31.0",
78
+ "pyarrow==20.0.0",
79
+ "filelock==3.18.0",
80
80
  ]
81
81
 
82
82
  # Dependencies known to be installable with conda for all development platforms (OSX ARM64, WIN AMD64, LIN AMD64).
@@ -171,6 +171,9 @@ class MesoscopeExperimentDescriptor(YamlConfig):
171
171
  """The weight of the animal, in grams, at the beginning of the session."""
172
172
  dispensed_water_volume_ml: float
173
173
  """Stores the total water volume, in milliliters, dispensed during runtime."""
174
+ is_guided: bool = False
175
+ """Determines whether the animal has to lick in the reward zone to receive water rewards. If this is set to False,
176
+ the system automatically dispenses water when the animal enters the reward zone."""
174
177
  experimenter_notes: str = "Replace this with your notes."
175
178
  """This field is not set during runtime. It is expected that each experimenter will replace this field with their
176
179
  notes made during runtime."""
@@ -88,6 +88,7 @@ class MesoscopeExperimentDescriptor(YamlConfig):
88
88
  experimenter: str
89
89
  mouse_weight_g: float
90
90
  dispensed_water_volume_ml: float
91
+ is_guided: bool = ...
91
92
  experimenter_notes: str = ...
92
93
  experimenter_given_water_volume_ml: float = ...
93
94
  incomplete: bool = ...
@@ -690,7 +690,6 @@ class ProcessingTracker(YamlConfig):
690
690
  """Saves the current processing state stored inside instance attributes to the specified .YAML file."""
691
691
  # Resets the _lock and file_path to None before dumping the data to .YAML to avoid issues with loading it
692
692
  # back.
693
- console.echo(message=f"{self.file_path}")
694
693
  original = copy.deepcopy(self)
695
694
  original.file_path = None # type: ignore
696
695
  original._lock_path = None # type: ignore
@@ -1,319 +0,0 @@
1
- from pathlib import Path
2
- from dataclasses import field, dataclass
3
-
4
- from _typeshed import Incomplete
5
- from ataraxis_data_structures import YamlConfig
6
-
7
- from .configuration_data import get_system_configuration_data as get_system_configuration_data
8
-
9
- _valid_session_types: Incomplete
10
-
11
- @dataclass()
12
- class VersionData(YamlConfig):
13
- """Stores information about the versions of important Sun lab libraries used to acquire the session's data."""
14
-
15
- python_version: str = ...
16
- sl_experiment_version: str = ...
17
-
18
- @dataclass()
19
- class ProjectConfiguration(YamlConfig):
20
- """Stores the project-specific configuration parameters that do not change between different animals and runtime
21
- sessions.
22
-
23
- An instance of this class is generated and saved as a .yaml file in the 'configuration' directory of each project
24
- when it is created. After that, the stored data is reused for every runtime (training or experiment session) carried
25
- out for each animal of the project. Additionally, a copy of the most actual configuration file is saved inside each
26
- runtime session's 'raw_data' folder, providing seamless integration between the managed data and various Sun lab
27
- (sl-) libraries.
28
-
29
- Notes:
30
- Together with SessionData, this class forms the entry point for all interactions with the data acquired in the
31
- Sun lab. The fields of this class are used to flexibly configure the runtime behavior of major data acquisition
32
- (sl-experiment) and processing (sl-forgery) libraries, adapting them for any project in the lab.
33
- """
34
-
35
- project_name: str = ...
36
- surgery_sheet_id: str = ...
37
- water_log_sheet_id: str = ...
38
- @classmethod
39
- def load(cls, configuration_path: Path) -> ProjectConfiguration:
40
- """Loads the project configuration parameters from the specified project_configuration.yaml file.
41
-
42
- This method is called during each interaction with any runtime session's data, including the creation of a new
43
- session.
44
-
45
- Args:
46
- configuration_path: The path to the project_configuration.yaml file from which to load the data.
47
-
48
- Returns:
49
- The initialized ProjectConfiguration instance that stores the configuration data for the target project.
50
-
51
- Raise:
52
- FileNotFoundError: If the specified configuration file does not exist or is not a valid YAML file.
53
- """
54
- def save(self, path: Path) -> None:
55
- """Saves class instance data to disk as a project_configuration.yaml file.
56
-
57
- This method is automatically called from the 'sl_experiment' library when a new project is created. After this
58
- method's runtime, all future project initialization calls will use the load() method to reuse configuration data
59
- saved to the .yaml file created by this method.
60
-
61
- Args:
62
- path: The path to the .yaml file to save the data to.
63
- """
64
- def _verify_data(self) -> None:
65
- """Verifies the user-modified data loaded from the project_configuration.yaml file.
66
-
67
- Since this class is explicitly designed to be modified by the user, this verification step is carried out to
68
- ensure that the loaded data matches expectations. This reduces the potential for user errors to impact the
69
- runtime behavior of the libraries using this class. This internal method is automatically called by the load()
70
- method.
71
-
72
- Raises:
73
- ValueError: If the loaded data does not match expected formats or values.
74
- """
75
-
76
- @dataclass()
77
- class RawData:
78
- """Stores the paths to the directories and files that make up the 'raw_data' session-specific directory.
79
-
80
- The raw_data directory stores the data acquired during the session runtime before and after preprocessing. Since
81
- preprocessing does not alter the data, any data in that folder is considered 'raw'.
82
-
83
- Notes:
84
- Sun lab data management strategy primarily relies on keeping multiple redundant copies of the raw_data for
85
- each acquired session. Typically, one copy is stored on the lab's processing server and the other is stored on
86
- the NAS.
87
- """
88
-
89
- raw_data_path: Path = ...
90
- camera_data_path: Path = ...
91
- mesoscope_data_path: Path = ...
92
- behavior_data_path: Path = ...
93
- zaber_positions_path: Path = ...
94
- session_descriptor_path: Path = ...
95
- hardware_state_path: Path = ...
96
- surgery_metadata_path: Path = ...
97
- project_configuration_path: Path = ...
98
- session_data_path: Path = ...
99
- experiment_configuration_path: Path = ...
100
- mesoscope_positions_path: Path = ...
101
- window_screenshot_path: Path = ...
102
- system_configuration_path: Path = ...
103
- checksum_path: Path = ...
104
- telomere_path: Path = ...
105
- ubiquitin_path: Path = ...
106
- integrity_verification_tracker_path: Path = ...
107
- version_data_path: Path = ...
108
- def resolve_paths(self, root_directory_path: Path) -> None:
109
- """Resolves all paths managed by the class instance based on the input root directory path.
110
-
111
- This method is called each time the class is instantiated to regenerate the managed path hierarchy on any
112
- machine that instantiates the class.
113
-
114
- Args:
115
- root_directory_path: The path to the top-level directory of the local hierarchy. Depending on the managed
116
- hierarchy, this has to point to a directory under the main /session, /animal, or /project directory of
117
- the managed session.
118
- """
119
- def make_directories(self) -> None:
120
- """Ensures that all major subdirectories and the root directory exist, creating any missing directories."""
121
-
122
- @dataclass()
123
- class ProcessedData:
124
- """Stores the paths to the directories and files that make up the 'processed_data' session-specific directory.
125
-
126
- The processed_data directory stores the data generated by various processing pipelines from the raw data (contents
127
- of the raw_data directory). Processed data represents an intermediate step between raw data and the dataset used in
128
- the data analysis, but is not itself designed to be analyzed.
129
- """
130
-
131
- processed_data_path: Path = ...
132
- camera_data_path: Path = ...
133
- mesoscope_data_path: Path = ...
134
- behavior_data_path: Path = ...
135
- job_logs_path: Path = ...
136
- suite2p_processing_tracker_path: Path = ...
137
- dataset_formation_tracker_path: Path = ...
138
- behavior_processing_tracker_path: Path = ...
139
- video_processing_tracker_path: Path = ...
140
- def resolve_paths(self, root_directory_path: Path) -> None:
141
- """Resolves all paths managed by the class instance based on the input root directory path.
142
-
143
- This method is called each time the class is instantiated to regenerate the managed path hierarchy on any
144
- machine that instantiates the class.
145
-
146
- Args:
147
- root_directory_path: The path to the top-level directory of the local hierarchy. Depending on the managed
148
- hierarchy, this has to point to a directory under the main /session, /animal, or /project directory of
149
- the managed session.
150
- """
151
- def make_directories(self) -> None:
152
- """Ensures that all major subdirectories and the root directory exist, creating any missing directories."""
153
-
154
- @dataclass
155
- class SessionData(YamlConfig):
156
- """Stores and manages the data layout of a single training or experiment session acquired in the Sun lab.
157
-
158
- The primary purpose of this class is to maintain the session data structure across all supported destinations and
159
- during all processing stages. It generates the paths used by all other classes from all Sun lab libraries that
160
- interact with the session's data from the point of its creation and until the data is integrated into an
161
- analysis dataset.
162
-
163
- When necessary, the class can be used to either generate a new session or load the layout of an already existing
164
- session. When the class is used to create a new session, it generates the new session's name using the current
165
- UTC timestamp, accurate to microseconds. This ensures that each session name is unique and preserves the overall
166
- session order.
167
-
168
- Notes:
169
- This class is specifically designed for working with the data from a single session, performed by a single
170
- animal under the specific experiment. The class is used to manage both raw and processed data. It follows the
171
- data through acquisition, preprocessing and processing stages of the Sun lab data workflow. Together with
172
- ProjectConfiguration class, this class serves as an entry point for all interactions with the managed session's
173
- data.
174
- """
175
-
176
- project_name: str
177
- animal_id: str
178
- session_name: str
179
- session_type: str
180
- acquisition_system: str
181
- experiment_name: str | None
182
- raw_data: RawData = field(default_factory=Incomplete)
183
- processed_data: ProcessedData = field(default_factory=Incomplete)
184
- def __post_init__(self) -> None:
185
- """Ensures raw_data and processed_data are always instances of RawData and ProcessedData."""
186
- @classmethod
187
- def create(
188
- cls,
189
- project_name: str,
190
- animal_id: str,
191
- session_type: str,
192
- experiment_name: str | None = None,
193
- session_name: str | None = None,
194
- ) -> SessionData:
195
- """Creates a new SessionData object and generates the new session's data structure on the local PC.
196
-
197
- This method is intended to be called exclusively by the sl-experiment library to create new training or
198
- experiment sessions and generate the session data directory tree.
199
-
200
- Notes:
201
- To load an already existing session data structure, use the load() method instead.
202
-
203
- This method automatically dumps the data of the created SessionData instance into the session_data.yaml file
204
- inside the root raw_data directory of the created hierarchy. It also finds and dumps other configuration
205
- files, such as project_configuration.yaml, experiment_configuration.yaml, and system_configuration.yaml into
206
- the same raw_data directory. This ensures that if the session's runtime is interrupted unexpectedly, the
207
- acquired data can still be processed.
208
-
209
- Args:
210
- project_name: The name of the project for which the data is acquired.
211
- animal_id: The ID code of the animal for which the data is acquired.
212
- session_type: The type of the session. Primarily, this determines how to read the session_descriptor.yaml
213
- file. Valid options are 'Lick training', 'Run training', 'Window checking', or 'Experiment'.
214
- experiment_name: The name of the experiment executed during managed session. This optional argument is only
215
- used for 'Experiment' session types. It is used to find the experiment configuration .YAML file.
216
- session_name: An optional session_name override. Generally, this argument should not be provided for most
217
- sessions. When provided, the method uses this name instead of generating a new timestamp-based name.
218
- This is only used during the 'ascension' runtime to convert old data structures to the modern
219
- lab standards.
220
-
221
- Returns:
222
- An initialized SessionData instance that stores the layout of the newly created session's data.
223
- """
224
- @classmethod
225
- def load(
226
- cls, session_path: Path, processed_data_root: Path | None = None, make_processed_data_directory: bool = False
227
- ) -> SessionData:
228
- """Loads the SessionData instance from the target session's session_data.yaml file.
229
-
230
- This method is used to load the data layout information of an already existing session. Primarily, this is used
231
- when preprocessing or processing session data. Due to how SessionData is stored and used in the lab, this
232
- method always loads the data layout from the session_data.yaml file stored inside the raw_data session
233
- subfolder. Currently, all interactions with Sun lab data require access to the 'raw_data' folder.
234
-
235
- Notes:
236
- To create a new session, use the create() method instead.
237
-
238
- Args:
239
- session_path: The path to the root directory of an existing session, e.g.: root/project/animal/session.
240
- processed_data_root: If processed data is kept on a drive different from the one that stores raw data,
241
- provide the path to the root project directory (directory that stores all Sun lab projects) on that
242
- drive. The method will automatically resolve the project/animal/session/processed_data hierarchy using
243
- this root path. If raw and processed data are kept on the same drive, keep this set to None.
244
- make_processed_data_directory: Determines whether this method should create processed_data directory if it
245
- does not exist.
246
-
247
- Returns:
248
- An initialized SessionData instance for the session whose data is stored at the provided path.
249
-
250
- Raises:
251
- FileNotFoundError: If the 'session_data.yaml' file is not found under the session_path/raw_data/ subfolder.
252
-
253
- """
254
- def _save(self) -> None:
255
- """Saves the instance data to the 'raw_data' directory of the managed session as a 'session_data.yaml' file.
256
-
257
- This is used to save the data stored in the instance to disk, so that it can be reused during preprocessing or
258
- data processing. The method is intended to only be used by the SessionData instance itself during its
259
- create() method runtime.
260
- """
261
-
262
- @dataclass()
263
- class ProcessingTracker(YamlConfig):
264
- """Wraps the .yaml file that tracks the state of a data processing runtime and provides tools for communicating the
265
- state between multiple processes in a thread-safe manner.
266
-
267
- Primarily, this tracker class is used by all remote data processing pipelines in the lab to prevent race conditions
268
- and make it impossible to run multiple processing runtimes at the same time.
269
- """
270
-
271
- file_path: Path
272
- _is_complete: bool = ...
273
- _encountered_error: bool = ...
274
- _is_running: bool = ...
275
- _lock_path: str = field(init=False)
276
- def __post_init__(self) -> None: ...
277
- def _load_state(self) -> None:
278
- """Reads the current processing state from the wrapped .YAML file."""
279
- def _save_state(self) -> None:
280
- """Saves the current processing state stored inside instance attributes to the specified .YAML file."""
281
- def start(self) -> None:
282
- """Configures the tracker file to indicate that the tracked processing runtime is currently running.
283
-
284
- All further attempts to start the same processing runtime for the same session's data will automatically abort
285
- with an error.
286
-
287
- Raises:
288
- TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
289
- """
290
- def error(self) -> None:
291
- """Configures the tracker file to indicate that the tracked processing runtime encountered an error and failed
292
- to complete.
293
-
294
- This method will only work for an active runtime. When called for an active runtime, it expects the runtime to
295
- be aborted with an error after the method returns. It configures the target tracker to allow other processes
296
- to restart the runtime at any point after this method returns, so it is UNSAFE to do any further processing
297
- from the process that calls this method.
298
-
299
- Raises:
300
- TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
301
- """
302
- def stop(self) -> None:
303
- """Mark processing as started.
304
-
305
- Raises:
306
- TimeoutError: If the file lock for the target .YAML file cannot be acquired within the timeout period.
307
- """
308
- @property
309
- def is_complete(self) -> bool:
310
- """Returns True if the tracker wrapped by the instance indicates that the processing runtime has been completed
311
- successfully and False otherwise."""
312
- @property
313
- def encountered_error(self) -> bool:
314
- """Returns True if the tracker wrapped by the instance indicates that the processing runtime aborted due to
315
- encountering an error and False otherwise."""
316
- @property
317
- def is_running(self) -> bool:
318
- """Returns True if the tracker wrapped by the instance indicates that the processing runtime is currently
319
- running and False otherwise."""