sl-shared-assets 1.0.0rc21__py3-none-any.whl → 1.0.0rc23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -25,7 +25,32 @@ from .data_classes import (
25
25
  required=True,
26
26
  help="The absolute path to the session whose raw data needs to be verified for potential corruption.",
27
27
  )
28
- def verify_session_integrity(session_path: str) -> None:
28
+ @click.option(
29
+ "-c",
30
+ "--create_processed_directories",
31
+ is_flag=True,
32
+ show_default=True,
33
+ default=False,
34
+ help=(
35
+ "Determines whether to created the processed data hierarchy. This flag should be disabled for most runtimes. "
36
+ "Primarily, it is used by lab acquisition system code to generate processed data directories on the remote "
37
+ "compute servers as part of the data preprocessing pipeline."
38
+ ),
39
+ )
40
+ @click.option(
41
+ "-pdr",
42
+ "--processed_data_root",
43
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
44
+ required=False,
45
+ help=(
46
+ "The absolute path to the directory where processed data from all projects is stored on the machine that runs "
47
+ "this command. This argument is used when calling the CLI on the BioHPC server, which uses different data "
48
+ "volumes for raw and processed data. Note, the input path must point to the root directory, as it will be "
49
+ "automatically modified to include the project name, the animal id, and the session ID. This argument is only "
50
+ "used if 'create_processed_directories' flag is True."
51
+ ),
52
+ )
53
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
29
54
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
30
55
 
31
56
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -33,9 +58,14 @@ def verify_session_integrity(session_path: str) -> None:
33
58
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
34
59
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
35
60
  'incomplete' and automatically excluding it from all further automated processing runtimes.
61
+
62
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
63
+ processed session. This use case is fully automated and should not be triggered manually by the user.
36
64
  """
37
65
  session = Path(session_path)
38
- if verify_session_checksum(session):
66
+ if verify_session_checksum(
67
+ session, create_processed_data_directory=create_processed_directories, processed_data_root=processed_data_root
68
+ ):
39
69
  console.echo(message=f"Session {session.stem} raw data integrity: verified.", level=LogLevel.SUCCESS)
40
70
  else:
41
71
  console.echo(message=f"Session {session.stem} raw data integrity: compromised!", level=LogLevel.ERROR)
@@ -288,6 +318,15 @@ def generate_experiment_configuration_file(project: str, experiment: str, state_
288
318
  acquisition_system = get_system_configuration_data()
289
319
  file_path = acquisition_system.paths.root_directory.joinpath(project, "configuration", f"{experiment}.yaml")
290
320
 
321
+ if not acquisition_system.paths.root_directory.joinpath(project).exists():
322
+ message = (
323
+ f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
324
+ f"The target project does not exist on the local machine (PC). Use the "
325
+ f"'sl-create-project' CLI command to create the project before creating new experiment configuration(s). "
326
+ )
327
+ console.error(message=message, error=ValueError)
328
+ raise ValueError(message) # Fall-back to appease mypy, should not be reachable
329
+
291
330
  # Loops over the number of requested states and, for each, generates a precursor experiment state field inside the
292
331
  # 'states' dictionary.
293
332
  states = {}
sl_shared_assets/cli.pyi CHANGED
@@ -1,3 +1,5 @@
1
+ from pathlib import Path
2
+
1
3
  from .tools import (
2
4
  ascend_tyche_data as ascend_tyche_data,
3
5
  verify_session_checksum as verify_session_checksum,
@@ -13,7 +15,7 @@ from .data_classes import (
13
15
  set_system_configuration_file as set_system_configuration_file,
14
16
  )
15
17
 
16
- def verify_session_integrity(session_path: str) -> None:
18
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
17
19
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
18
20
 
19
21
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -21,6 +23,9 @@ def verify_session_integrity(session_path: str) -> None:
21
23
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
22
24
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
23
25
  'incomplete' and automatically excluding it from all further automated processing runtimes.
26
+
27
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
28
+ processed session. This use case is fully automated and should not be triggered manually by the user.
24
29
  """
25
30
 
26
31
  def generate_project_manifest_file(
@@ -111,6 +111,9 @@ class MesoscopePaths:
111
111
  sharing protocol, such as SMB."""
112
112
  harvesters_cti_path: Path = Path("/opt/mvIMPACT_Acquire/lib/x86_64/mvGenTLProducer.cti")
113
113
  """The path to the GeniCam CTI file used to connect to Harvesters-managed cameras."""
114
+ server_processed_data_root: Path = Path("/workdir/sun_data")
115
+ """The absolute path to the BioHPC server directory used to store the processed data from all Sun lab projects.
116
+ This path is relative to the server root and is only used when submitting remote jobs to the server."""
114
117
 
115
118
 
116
119
  @dataclass()
@@ -125,10 +128,17 @@ class MesoscopeCameras:
125
128
  right_camera_index: int = 2
126
129
  """The index of the right body camera (from animal's perspective) in the list of all available OpenCV-managed
127
130
  cameras."""
128
- quantization_parameter: int = 15
129
- """The quantization parameter used by all cameras to encode acquired frames as video files. This controls how much
130
- data is discarded when encoding each video frame, directly contributing to the encoding speed, resultant video file
131
- size and video quality."""
131
+ face_camera_quantization_parameter: int = 15
132
+ """The quantization parameter used by the face camera to encode acquired frames as video files. This controls how
133
+ much data is discarded when encoding each video frame, directly contributing to the encoding speed, resultant video
134
+ file size and video quality."""
135
+ body_camera_quantization_parameter: int = 15
136
+ """SThe quantization parameter used by the left and right body cameras to encode acquired frames as video files.
137
+ See 'face_camera_quantization_parameter' field for more information on what this parameter does."""
138
+ display_face_camera_frames: bool = True
139
+ """Determines whether to display the frames grabbed from the face camera during runtime."""
140
+ display_body_camera_frames: bool = True
141
+ """Determines whether to display the frames grabbed from the left and right body cameras during runtime."""
132
142
 
133
143
 
134
144
  @dataclass()
@@ -141,14 +151,13 @@ class MesoscopeMicroControllers:
141
151
  """The USB port used by the Sensor Microcontroller."""
142
152
  encoder_port: str = "/dev/ttyACM2"
143
153
  """The USB port used by the Encoder Microcontroller."""
144
- mesoscope_start_ttl_module_id: int = 1
145
- """The unique byte-code ID of the TTL module instance used to send mesoscope frame acquisition start trigger
146
- signals to the ScanImagePC."""
147
- mesoscope_stop_ttl_module_id: int = 2
148
- """The unique byte-code ID of the TTL module instance used to send mesoscope frame acquisition stop trigger
149
- signals to the ScanImagePC."""
154
+ debug: bool = False
155
+ """Determines whether to run the managed acquisition system in the 'debug mode'. This mode should be disabled
156
+ during most runtimes. It is used during initial system calibration and testing and prints a lot of generally
157
+ redundant information into the terminal."""
150
158
  mesoscope_ttl_pulse_duration_ms: int = 10
151
- """The duration of the HIGH phase of all outgoing mesoscope TTL pulses, in milliseconds."""
159
+ """The duration of the HIGH phase of all outgoing TTL pulses that target the Mesoscope (enable or disable mesoscope
160
+ frame acquisition), in milliseconds."""
152
161
  minimum_break_strength_g_cm: float = 43.2047
153
162
  """The minimum torque applied by the running wheel break in gram centimeter. This is the torque the break delivers
154
163
  at minimum voltage (break is disabled)."""
@@ -240,10 +249,11 @@ class MesoscopeAdditionalFirmware:
240
249
  """Stores the configuration parameters for all firmware and hardware components not assembled in the Sun lab."""
241
250
 
242
251
  headbar_port: str = "/dev/ttyUSB0"
243
- """The USB port used by the HeadBar Zaber motor controllers (devices). Note, this motor group also includes the
244
- running wheel x-axis motor."""
252
+ """The USB port used by the HeadBar Zaber motor controllers (devices)."""
245
253
  lickport_port: str = "/dev/ttyUSB1"
246
254
  """The USB port used by the LickPort Zaber motor controllers (devices)."""
255
+ wheel_port: str = "/dev/ttyUSB2"
256
+ """The USB port used by the (running) Wheel Zaber motor controllers (devices)."""
247
257
  unity_ip: str = "127.0.0.1"
248
258
  """The IP address of the MQTT broker used to communicate with the Unity game engine."""
249
259
  unity_port: int = 1883
@@ -291,6 +301,7 @@ class MesoscopeSystemConfiguration(YamlConfig):
291
301
  self.paths.nas_directory = Path(self.paths.nas_directory)
292
302
  self.paths.mesoscope_directory = Path(self.paths.mesoscope_directory)
293
303
  self.paths.harvesters_cti_path = Path(self.paths.harvesters_cti_path)
304
+ self.paths.server_processed_data_root = Path(self.paths.server_processed_data_root)
294
305
 
295
306
  # Converts valve_calibration data from dictionary to a tuple of tuples format
296
307
  if not isinstance(self.microcontrollers.valve_calibration_data, tuple):
@@ -298,6 +309,23 @@ class MesoscopeSystemConfiguration(YamlConfig):
298
309
  (k, v) for k, v in self.microcontrollers.valve_calibration_data.items()
299
310
  )
300
311
 
312
+ # Verifies the contents of the valve calibration data loaded from the config file.
313
+ valve_calibration_data = self.microcontrollers.valve_calibration_data
314
+ if not all(
315
+ isinstance(item, tuple)
316
+ and len(item) == 2
317
+ and isinstance(item[0], (int, float))
318
+ and isinstance(item[1], (int, float))
319
+ for item in valve_calibration_data
320
+ ):
321
+ message = (
322
+ f"Unable to initialize the MesoscopeSystemConfiguration class. Expected each item under the "
323
+ f"'valve_calibration_data' field of the Mesoscope-VR acquisition system configuration .yaml file to be "
324
+ f"a tuple of two integer or float values, but instead encountered {valve_calibration_data} with at "
325
+ f"least one incompatible element."
326
+ )
327
+ console.error(message=message, error=TypeError)
328
+
301
329
  def save(self, path: Path) -> None:
302
330
  """Saves class instance data to disk as a 'mesoscope_system_configuration.yaml' file.
303
331
 
@@ -322,6 +350,7 @@ class MesoscopeSystemConfiguration(YamlConfig):
322
350
  original.paths.nas_directory = str(original.paths.nas_directory) # type: ignore
323
351
  original.paths.mesoscope_directory = str(original.paths.mesoscope_directory) # type: ignore
324
352
  original.paths.harvesters_cti_path = str(original.paths.harvesters_cti_path) # type: ignore
353
+ original.paths.server_processed_data_root = str(original.paths.server_processed_data_root) # type: ignore
325
354
 
326
355
  # Converts valve calibration data into dictionary format
327
356
  if isinstance(original.microcontrollers.valve_calibration_data, tuple):
@@ -422,7 +451,7 @@ def get_system_configuration_data() -> MesoscopeSystemConfiguration:
422
451
  message = (
423
452
  "Unable to resolve the path to the local system configuration file, as local machine does not have the "
424
453
  "Sun lab data directory. Generate the local configuration file and Sun lab data directory by calling the "
425
- "'sl-config' CLI command and rerun the command that produced this error."
454
+ "'sl-create-system-config' CLI command and rerun the command that produced this error."
426
455
  )
427
456
  console.error(message=message, error=FileNotFoundError)
428
457
 
@@ -435,7 +464,7 @@ def get_system_configuration_data() -> MesoscopeSystemConfiguration:
435
464
  message = (
436
465
  "Unable to resolve the path to the local system configuration file, as the file pointed by the path stored "
437
466
  "in Sun lab data directory does not exist. Generate a new local configuration file by calling the "
438
- "'sl-config' CLI command and rerun the command that produced this error."
467
+ "'sl-create-system-config' CLI command and rerun the command that produced this error."
439
468
  )
440
469
  console.error(message=message, error=FileNotFoundError)
441
470
 
@@ -58,6 +58,7 @@ class MesoscopePaths:
58
58
  nas_directory: Path = ...
59
59
  mesoscope_directory: Path = ...
60
60
  harvesters_cti_path: Path = ...
61
+ server_processed_data_root: Path = ...
61
62
 
62
63
  @dataclass()
63
64
  class MesoscopeCameras:
@@ -66,7 +67,10 @@ class MesoscopeCameras:
66
67
  face_camera_index: int = ...
67
68
  left_camera_index: int = ...
68
69
  right_camera_index: int = ...
69
- quantization_parameter: int = ...
70
+ face_camera_quantization_parameter: int = ...
71
+ body_camera_quantization_parameter: int = ...
72
+ display_face_camera_frames: bool = ...
73
+ display_body_camera_frames: bool = ...
70
74
 
71
75
  @dataclass()
72
76
  class MesoscopeMicroControllers:
@@ -75,8 +79,7 @@ class MesoscopeMicroControllers:
75
79
  actor_port: str = ...
76
80
  sensor_port: str = ...
77
81
  encoder_port: str = ...
78
- mesoscope_start_ttl_module_id: int = ...
79
- mesoscope_stop_ttl_module_id: int = ...
82
+ debug: bool = ...
80
83
  mesoscope_ttl_pulse_duration_ms: int = ...
81
84
  minimum_break_strength_g_cm: float = ...
82
85
  maximum_break_strength_g_cm: float = ...
@@ -111,6 +114,7 @@ class MesoscopeAdditionalFirmware:
111
114
 
112
115
  headbar_port: str = ...
113
116
  lickport_port: str = ...
117
+ wheel_port: str = ...
114
118
  unity_ip: str = ...
115
119
  unity_port: int = ...
116
120
 
@@ -438,6 +438,16 @@ class SessionData(YamlConfig):
438
438
  # Constructs the root session directory path
439
439
  session_path = acquisition_system.paths.root_directory.joinpath(project_name, animal_id, session_name)
440
440
 
441
+ # Prevents creating new sessions for non-existent projects.
442
+ if not acquisition_system.paths.root_directory.joinpath(project_name).exists():
443
+ message = (
444
+ f"Unable to create the session directory hierarchy for the session {session_name} of the animal "
445
+ f"'{animal_id}' and project '{project_name}'. The project does not exist on the local machine (PC). "
446
+ f"Use the 'sl-create-project' CLI command to create the project on the local machine before creating "
447
+ f"new sessions."
448
+ )
449
+ console.error(message=message, error=FileNotFoundError)
450
+
441
451
  # Handles potential session name conflicts
442
452
  counter = 0
443
453
  while session_path.exists():
@@ -5,6 +5,7 @@ up a given project."""
5
5
  from pathlib import Path
6
6
 
7
7
  import polars as pl
8
+ from ataraxis_base_utilities import console
8
9
 
9
10
  from ..data_classes import SessionData
10
11
  from .packaging_tools import calculate_directory_checksum
@@ -33,9 +34,25 @@ def generate_project_manifest(
33
34
  is different from the 'raw_project_directory'. Typically, this would be the case on remote compute server(s)
34
35
  and not on local machines.
35
36
  """
37
+
38
+ if not raw_project_directory.exists():
39
+ message = (
40
+ f"Unable to generate the project manifest file for the requested project {raw_project_directory.stem}. The "
41
+ f"specified project directory does not exist."
42
+ )
43
+ console.error(message=message, error=FileNotFoundError)
44
+
36
45
  # Finds all raw data directories
37
46
  session_directories = [directory.parent for directory in raw_project_directory.rglob("raw_data")]
38
47
 
48
+ if len(session_directories) == 0:
49
+ message = (
50
+ f"Unable to generate the project manifest file for the requested project {raw_project_directory.stem}. The "
51
+ f"project does not contain any raw session data. To generate the manifest file, the project must contain "
52
+ f"at least one valid experiment or training session."
53
+ )
54
+ console.error(message=message, error=FileNotFoundError)
55
+
39
56
  # Precreates the 'manifest' dictionary structure
40
57
  manifest: dict[str, list[str | bool]] = {
41
58
  "animal": [], # Animal IDs.
@@ -120,7 +137,9 @@ def generate_project_manifest(
120
137
  )
121
138
 
122
139
 
123
- def verify_session_checksum(session_path: Path) -> bool:
140
+ def verify_session_checksum(
141
+ session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
142
+ ) -> bool:
124
143
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
125
144
  comparing it against the checksum stored in the ax_checksum.txt file.
126
145
 
@@ -132,16 +151,27 @@ def verify_session_checksum(session_path: Path) -> bool:
132
151
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
133
152
  it from all further automatic processing.
134
153
 
154
+ This function is also used to create the processed data hierarchy on the BioHPC server, when it is called as
155
+ part of the data preprocessing runtime performed by a data acquisition system.
156
+
135
157
  Args:
136
158
  session_path: The path to the session directory to be verified. Note, the input session directory must contain
137
159
  the 'raw_data' subdirectory.
160
+ create_processed_data_directory: Determines whether to create the processed data hierarchy during runtime.
161
+ processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
162
+ the root directory where to store the processed data from all projects, and it will be automatically
163
+ modified to include the project name, the animal name, and the session ID.
138
164
 
139
165
  Returns:
140
166
  True if the checksum matches, False otherwise.
141
167
  """
142
168
 
143
- # Loads session data layout
144
- session_data = SessionData.load(session_path=session_path)
169
+ # Loads session data layout. If configured to do so, also creates the processed data hierarchy
170
+ session_data = SessionData.load(
171
+ session_path=session_path,
172
+ processed_data_root=processed_data_root,
173
+ make_processed_data_directory=create_processed_data_directory,
174
+ )
145
175
 
146
176
  # Re-calculates the checksum for the raw_data directory
147
177
  calculated_checksum = calculate_directory_checksum(
@@ -27,7 +27,9 @@ def generate_project_manifest(
27
27
  and not on local machines.
28
28
  """
29
29
 
30
- def verify_session_checksum(session_path: Path) -> bool:
30
+ def verify_session_checksum(
31
+ session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
32
+ ) -> bool:
31
33
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
32
34
  comparing it against the checksum stored in the ax_checksum.txt file.
33
35
 
@@ -39,9 +41,16 @@ def verify_session_checksum(session_path: Path) -> bool:
39
41
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
40
42
  it from all further automatic processing.
41
43
 
44
+ This function is also used to create the processed data hierarchy on the BioHPC server, when it is called as
45
+ part of the data preprocessing runtime performed by a data acquisition system.
46
+
42
47
  Args:
43
48
  session_path: The path to the session directory to be verified. Note, the input session directory must contain
44
49
  the 'raw_data' subdirectory.
50
+ create_processed_data_directory: Determines whether to create the processed data hierarchy during runtime.
51
+ processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
52
+ the root directory where to store the processed data from all projects, and it will be automatically
53
+ modified to include the project name, the animal name, and the session ID.
45
54
 
46
55
  Returns:
47
56
  True if the checksum matches, False otherwise.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc21
3
+ Version: 1.0.0rc23
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -1,15 +1,15 @@
1
1
  sl_shared_assets/__init__.py,sha256=rscR353jiyUQ-wpguTLOM-A5Lqr1ftQtuTan1D0AtR0,2196
2
2
  sl_shared_assets/__init__.pyi,sha256=Ye6eY_y_l9CTqwbCf-OxXOphxXKfn9UJnuw09DdLEtc,2405
3
- sl_shared_assets/cli.py,sha256=FRc452bUQCDMtMLVxIRr178pbuCVTYWplJAzumy0Ruo,14971
4
- sl_shared_assets/cli.pyi,sha256=dmkg30arGgA3pLipiuQFb8jq2IPy53srE6CdFAaCdwc,5620
3
+ sl_shared_assets/cli.py,sha256=-w9RAbHEcMspp6UDiXHM0fYSUwPSjJpZZcX3T9hLzUc,17103
4
+ sl_shared_assets/cli.pyi,sha256=DuhJhqAXB7e5uOdzCI2c29FwsVwpQefR2eLUfW0gWyQ,5931
5
5
  sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  sl_shared_assets/data_classes/__init__.py,sha256=b0BwTAJCD1zbwjd2UdpXheq61q1sgBuYhtAL-GHA2h4,1835
7
7
  sl_shared_assets/data_classes/__init__.pyi,sha256=e2082sm5pSw3bxNZGqwcRhuKLH8T7hcEo6kWtOZNDBU,1968
8
- sl_shared_assets/data_classes/configuration_data.py,sha256=qCLQfG22qWQGX0Uc2WyWAsNsc4kkonMSmV0CRM7QfhQ,27826
9
- sl_shared_assets/data_classes/configuration_data.pyi,sha256=dTgLSwgqmnIVpoDxN1mETX-cwwlBZ9PJ1jlFaktitbs,9413
8
+ sl_shared_assets/data_classes/configuration_data.py,sha256=Ik-6qgdc3DoWIrO0QxxalK92PI83vnkVmn3aZqskfbk,29882
9
+ sl_shared_assets/data_classes/configuration_data.pyi,sha256=CCnU0DjAdwB7Hdi1N7aKeSLixkpr1aXtTQ0QsxWtHlM,9563
10
10
  sl_shared_assets/data_classes/runtime_data.py,sha256=Q7Ykf9hgrw1jYKXa53mn_LW8G2cPmLLuxgGkP6qQcc4,15483
11
11
  sl_shared_assets/data_classes/runtime_data.pyi,sha256=PxaCbeF9COR4ri91pdzh7zVrqaz2KEDYB1EoLhZQC_c,6618
12
- sl_shared_assets/data_classes/session_data.py,sha256=tQMt1i9Bloj5K9kYS1gkCdOtksd_8_nHFMZQHLaKCoE,36754
12
+ sl_shared_assets/data_classes/session_data.py,sha256=Jf-WPX1f7sPxWOrU2oq79YZhwZZlLOSVjN7YntZdO80,37401
13
13
  sl_shared_assets/data_classes/session_data.pyi,sha256=a3nPC42mQniUPk2HsAM1_DWUa-BfhzDljQfDQh2rSus,13071
14
14
  sl_shared_assets/data_classes/surgery_data.py,sha256=qsMj3NkjhylAT9b_wHBY-1XwTu2xsZcZatdECmkA7Bs,7437
15
15
  sl_shared_assets/data_classes/surgery_data.pyi,sha256=rf59lJ3tGSYKHQlEGXg75MnjajBwl0DYhL4TClAO4SM,2605
@@ -25,12 +25,12 @@ sl_shared_assets/tools/ascension_tools.py,sha256=kIqYGX9F8lRao_LaVOacIiT8J9SypTv
25
25
  sl_shared_assets/tools/ascension_tools.pyi,sha256=tQCDdWZ20ZjUjpMs8aGIN0yBg5ff3j6spi62b3Han4o,3755
26
26
  sl_shared_assets/tools/packaging_tools.py,sha256=oY-EWuTiMfWobYllVZy0piGlVnTHCpPY1GF-WmqQdj4,7269
27
27
  sl_shared_assets/tools/packaging_tools.pyi,sha256=vgGbAQCExwg-0A5F72MzEhzHxu97Nqg1yuz-5P89ycU,3118
28
- sl_shared_assets/tools/project_management_tools.py,sha256=IV4xAfy3_SpV50Xvt7EdVPVLojp9di1rEmO4KoAgFtY,8740
29
- sl_shared_assets/tools/project_management_tools.pyi,sha256=pQY2--Kn3pKSADsArDRmbwGR7JqHD_2qdB0LJBbW_xo,2735
28
+ sl_shared_assets/tools/project_management_tools.py,sha256=E50xhjJZlq6zQXnj8DHm1KkVcTDK7lr61m1ge-u25TY,10476
29
+ sl_shared_assets/tools/project_management_tools.pyi,sha256=bRwohpGa98LDIy1ntLAPSKEGRB4S7ZmnIfyDy097c94,3467
30
30
  sl_shared_assets/tools/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
31
31
  sl_shared_assets/tools/transfer_tools.pyi,sha256=FoH7eYZe7guGHfPr0MK5ggO62uXKwD2aJ7h1Bu7PaEE,3294
32
- sl_shared_assets-1.0.0rc21.dist-info/METADATA,sha256=jPQkQ9cED7lsKrqOjYWL7iWzqDZbZ4PJz6IvJx9BoEM,48613
33
- sl_shared_assets-1.0.0rc21.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
- sl_shared_assets-1.0.0rc21.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
- sl_shared_assets-1.0.0rc21.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- sl_shared_assets-1.0.0rc21.dist-info/RECORD,,
32
+ sl_shared_assets-1.0.0rc23.dist-info/METADATA,sha256=EZHEpyWMHkhYkd9pOuCfmbyWHVMXAK_K-Yl0AVv129E,48613
33
+ sl_shared_assets-1.0.0rc23.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
+ sl_shared_assets-1.0.0rc23.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
+ sl_shared_assets-1.0.0rc23.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
+ sl_shared_assets-1.0.0rc23.dist-info/RECORD,,