sl-shared-assets 1.0.0rc27__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -8,6 +8,7 @@ from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
8
8
  from .tools import ascend_tyche_data, verify_session_checksum, generate_project_manifest
9
9
  from .server import generate_server_credentials
10
10
  from .data_classes import (
11
+ SessionData,
11
12
  ExperimentState,
12
13
  ProjectConfiguration,
13
14
  MesoscopeSystemConfiguration,
@@ -57,18 +58,25 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
57
58
  that stores the data checksum generated before transferring the data to long-term storage destination. This function
58
59
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
59
60
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
60
- 'incomplete' and automatically excluding it from all further automated processing runtimes.
61
+ 'incomplete' and automatically excluding it from all further automated processing runtimes. if the session data
62
+ is intact, generates a 'verified.bin' marker file inside the session's raw_data folder.
61
63
 
62
64
  The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
63
65
  processed session. This use case is fully automated and should not be triggered manually by the user.
64
66
  """
65
67
  session = Path(session_path)
66
- if verify_session_checksum(
68
+
69
+ # Runs the verification process
70
+ verify_session_checksum(
67
71
  session, create_processed_data_directory=create_processed_directories, processed_data_root=processed_data_root
68
- ):
69
- console.echo(message=f"Session {session.stem} raw data integrity: verified.", level=LogLevel.SUCCESS)
72
+ )
73
+
74
+ # Checks the outcome of the verification process
75
+ session_data = SessionData.load(session_path=session)
76
+ if session_data.raw_data.verified_bin_path.exists():
77
+ console.echo(message=f"Session {session.stem} raw data integrity: Verified.", level=LogLevel.SUCCESS)
70
78
  else:
71
- console.echo(message=f"Session {session.stem} raw data integrity: compromised!", level=LogLevel.ERROR)
79
+ console.echo(message=f"Session {session.stem} raw data integrity: Compromised!", level=LogLevel.ERROR)
72
80
 
73
81
 
74
82
  @click.command()
sl_shared_assets/cli.pyi CHANGED
@@ -7,6 +7,7 @@ from .tools import (
7
7
  )
8
8
  from .server import generate_server_credentials as generate_server_credentials
9
9
  from .data_classes import (
10
+ SessionData as SessionData,
10
11
  ExperimentState as ExperimentState,
11
12
  ProjectConfiguration as ProjectConfiguration,
12
13
  MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
@@ -22,7 +23,8 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
22
23
  that stores the data checksum generated before transferring the data to long-term storage destination. This function
23
24
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
24
25
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
25
- 'incomplete' and automatically excluding it from all further automated processing runtimes.
26
+ 'incomplete' and automatically excluding it from all further automated processing runtimes. if the session data
27
+ is intact, generates a 'verified.bin' marker file inside the session's raw_data folder.
26
28
 
27
29
  The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
28
30
  processed session. This use case is fully automated and should not be triggered manually by the user.
@@ -207,7 +207,7 @@ class MesoscopeMicroControllers:
207
207
  PC, conserving communication bandwidth."""
208
208
  torque_averaging_pool_size: int = 10
209
209
  """The number of torque sensor readouts to average together to produce the final torque sensor readout value."""
210
- wheel_encoder_ppr = 8192
210
+ wheel_encoder_ppr: int = 8192
211
211
  """The resolution of the managed quadrature encoder, in Pulses Per Revolution (PPR). This is the number of
212
212
  quadrature pulses the encoder emits per full 360-degree rotation."""
213
213
  wheel_encoder_report_cw: bool = False
@@ -219,9 +219,9 @@ class MesoscopeMicroControllers:
219
219
  wheel_encoder_delta_threshold_pulse: int = 15
220
220
  """The minimum difference, in encoder pulse counts, between two encoder readouts for the change to be reported to
221
221
  the PC. This is used to prevent reporting idle readouts and filter out sub-threshold noise."""
222
- wheel_encoder_polling_delay_us = 500
222
+ wheel_encoder_polling_delay_us: int = 500
223
223
  """The delay, in microseconds, between any two successive encoder state readouts."""
224
- cm_per_unity_unit = 10.0
224
+ cm_per_unity_unit: float = 10.0
225
225
  """The length of each Unity 'unit' in real-world centimeters recorded by the running wheel encoder."""
226
226
  screen_trigger_pulse_duration_ms: int = 500
227
227
  """The duration of the HIGH phase of the TTL pulse used to toggle the VR screens between ON and OFF states."""
@@ -97,12 +97,12 @@ class MesoscopeMicroControllers:
97
97
  torque_signal_threshold_adc: int = ...
98
98
  torque_delta_threshold_adc: int = ...
99
99
  torque_averaging_pool_size: int = ...
100
- wheel_encoder_ppr = ...
100
+ wheel_encoder_ppr: int = ...
101
101
  wheel_encoder_report_cw: bool = ...
102
102
  wheel_encoder_report_ccw: bool = ...
103
103
  wheel_encoder_delta_threshold_pulse: int = ...
104
- wheel_encoder_polling_delay_us = ...
105
- cm_per_unity_unit = ...
104
+ wheel_encoder_polling_delay_us: int = ...
105
+ cm_per_unity_unit: float = ...
106
106
  screen_trigger_pulse_duration_ms: int = ...
107
107
  auditory_tone_duration_ms: int = ...
108
108
  valve_calibration_pulse_count: int = ...
@@ -217,6 +217,11 @@ class RawData:
217
217
  the long-term storage destinations (NAS and Server) and the integrity of the moved data is verified on at least one
218
218
  destination. During 'purge' sl-experiment runtimes, the library discovers and removes all session data marked with
219
219
  'ubiquitin.bin' files from the machine that runs the code."""
220
+ verified_bin_path: Path = Path()
221
+ """Stores the path to the verified.bin file. This marker file is created (or removed) by the sl-shared-assets
222
+ 'verify-session' CLI command to indicate whether the session data inside the folder marked by the file has been
223
+ verified for integrity. Primarily, this is used when the data is moved to the long-term storage destination (BioHPC
224
+ server) to ensure it is safe to remove any local copies of the data stored on the acquisition system machine(s)."""
220
225
 
221
226
  def resolve_paths(self, root_directory_path: Path) -> None:
222
227
  """Resolves all paths managed by the class instance based on the input root directory path.
@@ -248,6 +253,7 @@ class RawData:
248
253
  self.system_configuration_path = self.raw_data_path.joinpath("system_configuration.yaml")
249
254
  self.telomere_path = self.raw_data_path.joinpath("telomere.bin")
250
255
  self.ubiquitin_path = self.raw_data_path.joinpath("ubiquitin.bin")
256
+ self.verified_bin_path = self.raw_data_path.joinpath("verified.bin")
251
257
 
252
258
  def make_directories(self) -> None:
253
259
  """Ensures that all major subdirectories and the root directory exist, creating any missing directories."""
@@ -96,6 +96,7 @@ class RawData:
96
96
  checksum_path: Path = ...
97
97
  telomere_path: Path = ...
98
98
  ubiquitin_path: Path = ...
99
+ verified_bin_path: Path = ...
99
100
  def resolve_paths(self, root_directory_path: Path) -> None:
100
101
  """Resolves all paths managed by the class instance based on the input root directory path.
101
102
 
@@ -75,7 +75,7 @@ class Server:
75
75
  """
76
76
 
77
77
  def __init__(self, credentials_path: Path) -> None:
78
- # Tracker used to prevent __del__ from classing stop() for a partially initialized class.
78
+ # Tracker used to prevent __del__ from calling stop() for a partially initialized class.
79
79
  self._open: bool = False
80
80
 
81
81
  # Loads the credentials from the provided .yaml file
@@ -95,6 +95,7 @@ class Server:
95
95
  self._credentials.host, username=self._credentials.username, password=self._credentials.password
96
96
  )
97
97
  console.echo(f"Connected to {self._credentials.host}", level=LogLevel.SUCCESS)
98
+ self._open = True
98
99
  break
99
100
  except paramiko.AuthenticationException:
100
101
  message = (
@@ -131,7 +132,7 @@ class Server:
131
132
  job: The Job object that contains all job data.
132
133
 
133
134
  Returns:
134
- The job object whose 'job_id' attribute had been modified with the job ID, if the job was successfully
135
+ The job object whose 'job_id' attribute had been modified with the job ID if the job was successfully
135
136
  submitted.
136
137
 
137
138
  Raises:
@@ -69,7 +69,7 @@ class Server:
69
69
  job: The Job object that contains all job data.
70
70
 
71
71
  Returns:
72
- The job object whose 'job_id' attribute had been modified with the job ID, if the job was successfully
72
+ The job object whose 'job_id' attribute had been modified with the job ID if the job was successfully
73
73
  submitted.
74
74
 
75
75
  Raises:
@@ -21,6 +21,7 @@ _excluded_files = {
21
21
  "multi_day_suite2p.bin",
22
22
  "behavior.bin",
23
23
  "dlc.bin",
24
+ "verified.bin",
24
25
  }
25
26
 
26
27
 
@@ -61,6 +61,7 @@ def generate_project_manifest(
61
61
  "raw_data": [], # Server-side raw_data folder path.
62
62
  "processed_data": [], # Server-side processed_data folder path.
63
63
  "complete": [], # Determines if the session data is complete. Incomplete sessions are excluded from processing.
64
+ "verified": [], # Determines if the session data integrity has been verified upon transfer to storage machine.
64
65
  "single_day_suite2p": [], # Determines whether the session has been processed with the single-day s2p pipeline.
65
66
  "multi_day_suite2p": [], # Determines whether the session has been processed with the multi-day s2p pipeline.
66
67
  "behavior": [], # Determines whether the session has been processed with the behavior extraction pipeline.
@@ -87,9 +88,13 @@ def generate_project_manifest(
87
88
  # If the session raw_data folder contains the telomere.bin file, marks the session as complete.
88
89
  manifest["complete"].append(session_data.raw_data.telomere_path.exists())
89
90
 
90
- # If the session is incomplete, marks all processing steps as FALSE, as automatic processing is disabled for
91
- # incomplete sessions.
92
- if not manifest["complete"][-1]:
91
+ # If the session raw_data folder contains the verified.bin file, marks the session as verified.
92
+ manifest["verified"].append(session_data.raw_data.verified_bin_path.exists())
93
+
94
+ # If the session is incomplete or unverified, marks all processing steps as FALSE, as automatic processing is
95
+ # disabled for incomplete sessions. If the session unverified, the case is even more severe, as its data may be
96
+ # corrupted.
97
+ if not manifest["complete"][-1] or not not manifest["verified"][-1]:
93
98
  manifest["single_day_suite2p"].append(False)
94
99
  manifest["multi_day_suite2p"].append(False)
95
100
  manifest["behavior"].append(False)
@@ -120,6 +125,7 @@ def generate_project_manifest(
120
125
  "processed_data": pl.String,
121
126
  "type": pl.String,
122
127
  "complete": pl.Boolean,
128
+ "verified": pl.Boolean,
123
129
  "single_day_suite2p": pl.Boolean,
124
130
  "multi_day_suite2p": pl.Boolean,
125
131
  "behavior": pl.Boolean,
@@ -139,13 +145,13 @@ def generate_project_manifest(
139
145
 
140
146
  def verify_session_checksum(
141
147
  session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
142
- ) -> bool:
148
+ ) -> None:
143
149
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
144
150
  comparing it against the checksum stored in the ax_checksum.txt file.
145
151
 
146
152
  Primarily, this function is used to verify data integrity after transferring it from a local PC to the remote
147
- server for long-term storage. This function is designed to do nothing if the checksum matches and to remove the
148
- 'telomere.bin' marker file if it does not.
153
+ server for long-term storage. This function is designed to create the 'verified.bin' marker file if the checksum
154
+ matches and to remove the 'telomere.bin' and 'verified.bin' marker files if it does not.
149
155
 
150
156
  Notes:
151
157
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
@@ -161,9 +167,6 @@ def verify_session_checksum(
161
167
  processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
162
168
  the root directory where to store the processed data from all projects, and it will be automatically
163
169
  modified to include the project name, the animal name, and the session ID.
164
-
165
- Returns:
166
- True if the checksum matches, False otherwise.
167
170
  """
168
171
 
169
172
  # Loads session data layout. If configured to do so, also creates the processed data hierarchy
@@ -173,6 +176,11 @@ def verify_session_checksum(
173
176
  make_processed_data_directory=create_processed_data_directory,
174
177
  )
175
178
 
179
+ # Unlinks the verified.bin marker if it exists. The presence or absence of the marker is used as the
180
+ # primary heuristic for determining if the session data passed verification. Unlinking it early helps in the case
181
+ # the verification procedure aborts unexpectedly for any reason.
182
+ session_data.raw_data.verified_bin_path.unlink(missing_ok=True)
183
+
176
184
  # Re-calculates the checksum for the raw_data directory
177
185
  calculated_checksum = calculate_directory_checksum(
178
186
  directory=session_data.raw_data.raw_data_path, batch=False, save_checksum=False
@@ -185,10 +193,9 @@ def verify_session_checksum(
185
193
  # If the two checksums do not match, this likely indicates data corruption.
186
194
  if stored_checksum != calculated_checksum:
187
195
  # If the telomere.bin file exists, removes this file. This automatically marks the session as incomplete for
188
- # all other Sun lab runtimes. The presence of the telomere.bin file after integrity verification is used as a
189
- # heuristic for determining whether the session has passed the verification process.
190
- if session_data.raw_data.telomere_path.exists():
191
- session_data.raw_data.telomere_path.unlink()
192
- return False
196
+ # all other Sun lab runtimes.
197
+ session_data.raw_data.telomere_path.unlink(missing_ok=True)
193
198
 
194
- return True
199
+ # Otherwise, ensures that the session is marked with the verified.bin marker file.
200
+ else:
201
+ session_data.raw_data.verified_bin_path.touch(exist_ok=True)
@@ -29,13 +29,13 @@ def generate_project_manifest(
29
29
 
30
30
  def verify_session_checksum(
31
31
  session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
32
- ) -> bool:
32
+ ) -> None:
33
33
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
34
34
  comparing it against the checksum stored in the ax_checksum.txt file.
35
35
 
36
36
  Primarily, this function is used to verify data integrity after transferring it from a local PC to the remote
37
- server for long-term storage. This function is designed to do nothing if the checksum matches and to remove the
38
- 'telomere.bin' marker file if it does not.
37
+ server for long-term storage. This function is designed to create the 'verified.bin' marker file if the checksum
38
+ matches and to remove the 'telomere.bin' and 'verified.bin' marker files if it does not.
39
39
 
40
40
  Notes:
41
41
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
@@ -51,7 +51,4 @@ def verify_session_checksum(
51
51
  processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
52
52
  the root directory where to store the processed data from all projects, and it will be automatically
53
53
  modified to include the project name, the animal name, and the session ID.
54
-
55
- Returns:
56
- True if the checksum matches, False otherwise.
57
54
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc27
3
+ Version: 1.0.1
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -768,16 +768,16 @@ ___
768
768
 
769
769
  ## Detailed Description
770
770
 
771
- Primarily, this library is designed to make the two main Sun lab libraries used in data acquisition
771
+ Primarily, this library is designed to make the two main Sun lab libraries used for data acquisition
772
772
  ([sl-experiment](https://github.com/Sun-Lab-NBB/sl-experiment)) and processing
773
773
  ([sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery)) independent of each other. This is beneficial, as both
774
- libraries feature an extensive and potentially incompatible set of runtime dependencies. Moreover, having a shared
774
+ libraries feature an extensive and largely incompatible set of runtime dependencies. Moreover, having a shared
775
775
  repository of classes and tools reused across Sun lab pipelines streamlines the maintenance of these tools.
776
776
 
777
777
  The library broadly stores two types of assets. First, it stores various dataclasses used to save the data acquired
778
- during experiments in the lab. Moreover, it also stores the dataclasses used to configure data acquisition and
779
- processing runtimes. Secondly, it stores the tools used to safely move the data between the computers used in the data
780
- acquisition and processing and provides the API for accessing the lab’s main computation server.
778
+ during experiments in the lab and the dataclasses used to configure data acquisition and processing runtimes. Secondly,
779
+ it stores the tools used to safely move the data between the machines (computers) used in the data acquisition and
780
+ processing, and provides the API for running various data processing jobs on remote compute servers.
781
781
 
782
782
  ---
783
783
 
@@ -819,7 +819,10 @@ Use the following command to install the library using pip: ```pip install sl-sh
819
819
  ## Usage
820
820
 
821
821
  All library components are intended to be used via other Sun lab libraries. Developers should study the API and CLI
822
- documentation below to learn how to use library components in other Sun lab libraries.
822
+ documentation below to learn how to use library components in other Sun lab libraries. For notes on using shared
823
+ assets for data acquisition, see the [sl-experiment](https://github.com/Sun-Lab-NBB/sl-experiment) library ReadMe.
824
+ For notes on using shared assets for data processing, see the [sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery)
825
+ library ReadMe.
823
826
 
824
827
  ---
825
828
 
@@ -1,36 +1,36 @@
1
1
  sl_shared_assets/__init__.py,sha256=rscR353jiyUQ-wpguTLOM-A5Lqr1ftQtuTan1D0AtR0,2196
2
2
  sl_shared_assets/__init__.pyi,sha256=Ye6eY_y_l9CTqwbCf-OxXOphxXKfn9UJnuw09DdLEtc,2405
3
- sl_shared_assets/cli.py,sha256=-w9RAbHEcMspp6UDiXHM0fYSUwPSjJpZZcX3T9hLzUc,17103
4
- sl_shared_assets/cli.pyi,sha256=DuhJhqAXB7e5uOdzCI2c29FwsVwpQefR2eLUfW0gWyQ,5931
3
+ sl_shared_assets/cli.py,sha256=9PXEfRgKb9-6xb1icmtr_ijdWur-AIsdMwvgA0aAeto,17434
4
+ sl_shared_assets/cli.pyi,sha256=p1tUflODySXx1_2IrLZ_uqixopbQPTearuJvqJ4jKI4,6075
5
5
  sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  sl_shared_assets/data_classes/__init__.py,sha256=b0BwTAJCD1zbwjd2UdpXheq61q1sgBuYhtAL-GHA2h4,1835
7
7
  sl_shared_assets/data_classes/__init__.pyi,sha256=e2082sm5pSw3bxNZGqwcRhuKLH8T7hcEo6kWtOZNDBU,1968
8
- sl_shared_assets/data_classes/configuration_data.py,sha256=wuPFz3zx1DN12KimPhH-fU1geU3pFzRuRB53JEJ2T6I,30343
9
- sl_shared_assets/data_classes/configuration_data.pyi,sha256=FWQG6LlF6gSDV5bU0KvMvIE6wf0Jfrz5wRbuHwyCKHw,9600
8
+ sl_shared_assets/data_classes/configuration_data.py,sha256=ZVk1ynk25CfVuQfMofoH90BUaOPqk7zW8ukY6ls_Pp0,30360
9
+ sl_shared_assets/data_classes/configuration_data.pyi,sha256=h7AV3z73SC2ITXWcnsShczuezC1Is7L4WSMnEWGSLPQ,9617
10
10
  sl_shared_assets/data_classes/runtime_data.py,sha256=Q7Ykf9hgrw1jYKXa53mn_LW8G2cPmLLuxgGkP6qQcc4,15483
11
11
  sl_shared_assets/data_classes/runtime_data.pyi,sha256=PxaCbeF9COR4ri91pdzh7zVrqaz2KEDYB1EoLhZQC_c,6618
12
- sl_shared_assets/data_classes/session_data.py,sha256=YJCZi10amiRZxuGHv2UYB4NaRnorywBUNxZnRHlQsMs,37743
13
- sl_shared_assets/data_classes/session_data.pyi,sha256=FtqkYDOufLmdPZWzz4J8U5KGOkuTSC1cjg05q-QEopM,13209
12
+ sl_shared_assets/data_classes/session_data.py,sha256=oAiTNqNlQi6NQHv5WsaQb3KglBrDGAmBv1EVPPuM6Y4,38329
13
+ sl_shared_assets/data_classes/session_data.pyi,sha256=eLemAagVD6TQ-9pzMIc0eXDgnh-quJwEidKHYitvsjQ,13243
14
14
  sl_shared_assets/data_classes/surgery_data.py,sha256=qsMj3NkjhylAT9b_wHBY-1XwTu2xsZcZatdECmkA7Bs,7437
15
15
  sl_shared_assets/data_classes/surgery_data.pyi,sha256=rf59lJ3tGSYKHQlEGXg75MnjajBwl0DYhL4TClAO4SM,2605
16
16
  sl_shared_assets/server/__init__.py,sha256=nyX6-9ACcrQeRQOCNvBVrWSTHGjRPANIG_u0aq7HPTg,426
17
17
  sl_shared_assets/server/__init__.pyi,sha256=7o99f8uf6NuBjMZjNAM1FX69Qbu5uBluRSAyaUWbXOU,263
18
18
  sl_shared_assets/server/job.py,sha256=GB31yYPEqXR6MgwNmnQrSQuHRJqUHFXcd6p7hb38q_c,7928
19
19
  sl_shared_assets/server/job.pyi,sha256=cxgHMpuwHsJGf_ZcTSSa2tZNzeR_GxqlICOsYGV_oy0,5655
20
- sl_shared_assets/server/server.py,sha256=lgHkg0_pHo0CJA6A6FGb22FC8PKVaKakeaJn4tfWk6I,9382
21
- sl_shared_assets/server/server.pyi,sha256=uu-bs5LBuOF-AL7qroYFbGc1aWgBalm-uHUXzZNxheU,4144
20
+ sl_shared_assets/server/server.py,sha256=SkFJA8eYlZrm2dag7sF_CyPiBphFgoq32zV10kqnedc,9414
21
+ sl_shared_assets/server/server.pyi,sha256=SoTRqSPJ1GQ5i5gV3N57rDT9xzeT47RjMRH1FYKsOkg,4143
22
22
  sl_shared_assets/tools/__init__.py,sha256=N95ZPMz-_HdNPrbVieCFza-QSVS6BV2KRB4K1OLRttc,636
23
23
  sl_shared_assets/tools/__init__.pyi,sha256=xeDF8itMc0JRgLrO_IN_9gW7cp_Ld-Gf-rjtrgWvQ2I,551
24
24
  sl_shared_assets/tools/ascension_tools.py,sha256=kIqYGX9F8lRao_LaVOacIiT8J9SypTvarb9mgaI9ZPs,15387
25
25
  sl_shared_assets/tools/ascension_tools.pyi,sha256=tQCDdWZ20ZjUjpMs8aGIN0yBg5ff3j6spi62b3Han4o,3755
26
- sl_shared_assets/tools/packaging_tools.py,sha256=oY-EWuTiMfWobYllVZy0piGlVnTHCpPY1GF-WmqQdj4,7269
26
+ sl_shared_assets/tools/packaging_tools.py,sha256=2yaIysL11g5m-j0i4I2MRVzHA6_37WdyLYqXMl2FPAc,7289
27
27
  sl_shared_assets/tools/packaging_tools.pyi,sha256=vgGbAQCExwg-0A5F72MzEhzHxu97Nqg1yuz-5P89ycU,3118
28
- sl_shared_assets/tools/project_management_tools.py,sha256=E50xhjJZlq6zQXnj8DHm1KkVcTDK7lr61m1ge-u25TY,10476
29
- sl_shared_assets/tools/project_management_tools.pyi,sha256=bRwohpGa98LDIy1ntLAPSKEGRB4S7ZmnIfyDy097c94,3467
28
+ sl_shared_assets/tools/project_management_tools.py,sha256=aMOCq3nCR2xOTDMQpXbXEh90IuKOvSbIfeKh2VaZIXw,11215
29
+ sl_shared_assets/tools/project_management_tools.pyi,sha256=HiU1qqumscmXtAXNX6eaSrkwanGHEvk-lromwZ2ijD8,3445
30
30
  sl_shared_assets/tools/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
31
31
  sl_shared_assets/tools/transfer_tools.pyi,sha256=FoH7eYZe7guGHfPr0MK5ggO62uXKwD2aJ7h1Bu7PaEE,3294
32
- sl_shared_assets-1.0.0rc27.dist-info/METADATA,sha256=IZzfPVKqyJ5xTV9ziAtWG2COXNLUtJCnXH-vpQYQ1xo,48613
33
- sl_shared_assets-1.0.0rc27.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
- sl_shared_assets-1.0.0rc27.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
- sl_shared_assets-1.0.0rc27.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- sl_shared_assets-1.0.0rc27.dist-info/RECORD,,
32
+ sl_shared_assets-1.0.1.dist-info/METADATA,sha256=eGEvjbrv_aOueU-LBq4Z-fC6A_PEDK7pTkiilWZTaOw,48890
33
+ sl_shared_assets-1.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
+ sl_shared_assets-1.0.1.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
+ sl_shared_assets-1.0.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
+ sl_shared_assets-1.0.1.dist-info/RECORD,,