sl-shared-assets 1.2.0rc4__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -3,20 +3,11 @@
3
3
  from pathlib import Path
4
4
 
5
5
  import click
6
- from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
6
+ from ataraxis_base_utilities import LogLevel, console
7
7
 
8
8
  from .tools import ascend_tyche_data, verify_session_checksum, generate_project_manifest
9
9
  from .server import generate_server_credentials
10
- from .data_classes import (
11
- SessionData,
12
- ExperimentState,
13
- ProcessingTracker,
14
- ProjectConfiguration,
15
- MesoscopeSystemConfiguration,
16
- MesoscopeExperimentConfiguration,
17
- get_system_configuration_data,
18
- set_system_configuration_file,
19
- )
10
+ from .data_classes import SessionData, ProcessingTracker
20
11
 
21
12
 
22
13
  @click.command()
@@ -128,73 +119,6 @@ def generate_project_manifest_file(
128
119
  console.echo(message=f"Project {Path(project_path).stem} data manifest file: generated.", level=LogLevel.SUCCESS)
129
120
 
130
121
 
131
- @click.command()
132
- @click.option(
133
- "-od",
134
- "--output_directory",
135
- type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
136
- required=True,
137
- help="The absolute path to the directory where to store the generated system configuration file.",
138
- )
139
- @click.option(
140
- "-as",
141
- "--acquisition_system",
142
- type=str,
143
- show_default=True,
144
- required=True,
145
- default="mesoscope-vr",
146
- help=(
147
- "The type (name) of the data acquisition system for which to generate the configuration file. Note, currently, "
148
- "only the following types are supported: mesoscope-vr."
149
- ),
150
- )
151
- def generate_system_configuration_file(output_directory: str, acquisition_system: str) -> None:
152
- """Generates a precursor system configuration file for the target acquisition system and configures all local
153
- Sun lab libraries to use that file to load the acquisition system configuration data.
154
-
155
- This command is typically used when setting up a new data acquisition system in the lab. The system configuration
156
- only needs to be specified on the machine (PC) that runs the sl-experiment library and manages the acquisition
157
- runtime if the system uses multiple machines (PCs). Once the system configuration .yaml file is created via this
158
- command, editing the configuration parameters in the file will automatically take effect during all following
159
- runtimes.
160
- """
161
-
162
- # Verifies that the input path is a valid directory path and, if necessary, creates the directory specified by the
163
- # path.
164
- path = Path(output_directory)
165
- if not path.is_dir():
166
- message = (
167
- f"Unable to generate the system configuration file for the system '{acquisition_system}'. The path to "
168
- f"the output directory ({path}) is not a valid directory path."
169
- )
170
- console.error(message=message, error=ValueError)
171
- else:
172
- ensure_directory_exists(path)
173
-
174
- # Mesoscope
175
- if acquisition_system.lower() == "mesoscope-vr":
176
- file_name = "mesoscope_system_configuration.yaml"
177
- file_path = path.joinpath(file_name)
178
- system_configuration = MesoscopeSystemConfiguration()
179
- system_configuration.save(file_path)
180
- set_system_configuration_file(file_path)
181
- message = (
182
- f"Mesoscope-VR system configuration file: generated. Edit the configuration parameters stored inside the "
183
- f"{file_name} file to match the state of the acquisition system and use context."
184
- )
185
- # noinspection PyTypeChecker
186
- console.echo(message=message, level=LogLevel.SUCCESS)
187
-
188
- # For unsupported system types, raises an error message
189
- else:
190
- message = (
191
- f"Unable to generate the system configuration file for the system '{acquisition_system}'. The input "
192
- f"acquisition system is not supported (not recognized). Currently, only the following acquisition "
193
- f"systems are supported: mesoscope-vr."
194
- )
195
- console.error(message=message, error=ValueError)
196
-
197
-
198
122
  @click.command()
199
123
  @click.option(
200
124
  "-od",
@@ -264,134 +188,6 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
264
188
  console.echo(message=message, level=LogLevel.SUCCESS)
265
189
 
266
190
 
267
- @click.command()
268
- @click.option(
269
- "-p",
270
- "--project",
271
- type=str,
272
- required=True,
273
- help="The name of the project to be created.",
274
- )
275
- @click.option(
276
- "-sli",
277
- "--surgery_log_id",
278
- type=str,
279
- required=True,
280
- help="The 44-symbol alpha-numeric ID code used by the project's surgery log Google sheet.",
281
- )
282
- @click.option(
283
- "-wli",
284
- "--water_restriction_log_id",
285
- type=str,
286
- required=True,
287
- help="The 44-symbol alpha-numeric ID code used by the project's water restriction log Google sheet.",
288
- )
289
- def generate_project_configuration_file(project: str, surgery_log_id: str, water_restriction_log_id: str) -> None:
290
- """Generates a new project directory hierarchy and writes its configuration as a project_configuration.yaml file.
291
-
292
- This command creates new Sun lab projects. Until a project is created in this fashion, all data-acquisition and
293
- data-processing commands from sl-experiment and sl-forgery libraries targeting the project will not work. This
294
- command is intended to be called on the main computer of the data-acquisition system(s) used by the project. Note,
295
- this command assumes that the local machine (PC) is the main PC of the data acquisition system and has a valid
296
- acquisition system configuration .yaml file.
297
- """
298
-
299
- # Queries the data acquisition configuration data. Specifically, this is used to get the path to the root
300
- # directory where all projects are stored on the local machine.
301
- system_configuration = get_system_configuration_data()
302
- file_path = system_configuration.paths.root_directory.joinpath(
303
- project, "configuration", "project_configuration.yaml"
304
- )
305
-
306
- # Generates the initial project directory hierarchy
307
- ensure_directory_exists(file_path)
308
-
309
- # Saves project configuration data as a .yaml file to the 'configuration' directory of the created project
310
- configuration = ProjectConfiguration(
311
- project_name=project, surgery_sheet_id=surgery_log_id, water_log_sheet_id=water_restriction_log_id
312
- )
313
- configuration.save(path=file_path.joinpath())
314
- # noinspection PyTypeChecker
315
- console.echo(message=f"Project {project} data structure and configuration file: generated.", level=LogLevel.SUCCESS)
316
-
317
-
318
- @click.command()
319
- @click.option(
320
- "-p",
321
- "--project",
322
- type=str,
323
- required=True,
324
- help="The name of the project for which to generate the new experiment configuration file.",
325
- )
326
- @click.option(
327
- "-e",
328
- "--experiment",
329
- type=str,
330
- required=True,
331
- help="The name of the experiment. Note, the generated experiment configuration file will also use this name.",
332
- )
333
- @click.option(
334
- "-sc",
335
- "--state_count",
336
- type=int,
337
- required=True,
338
- help="The total number of experiment and acquisition system state combinations in the experiment.",
339
- )
340
- def generate_experiment_configuration_file(project: str, experiment: str, state_count: int) -> None:
341
- """Generates a precursor experiment configuration .yaml file for the target experiment inside the project's
342
- configuration folder.
343
-
344
- This command assists users in creating new experiment configurations, by statically resolving the structure (layout)
345
- of the appropriate experiment configuration file for the acquisition system of the local machine (PC). Specifically,
346
- the generated precursor will contain the correct number of experiment state entries initialized to nonsensical
347
- default value. The user needs to manually edit the configuration file to properly specify their experiment runtime
348
- parameters and state transitions before running the experiment. In a sense, this command acts as an 'experiment
349
- template' generator.
350
- """
351
-
352
- # Resolves the acquisition system configuration. Uses the path to the local project directory and the project name
353
- # to determine where to save the experiment configuration file
354
- acquisition_system = get_system_configuration_data()
355
- file_path = acquisition_system.paths.root_directory.joinpath(project, "configuration", f"{experiment}.yaml")
356
-
357
- if not acquisition_system.paths.root_directory.joinpath(project).exists():
358
- message = (
359
- f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
360
- f"The target project does not exist on the local machine (PC). Use the "
361
- f"'sl-create-project' CLI command to create the project before creating new experiment configuration(s). "
362
- )
363
- console.error(message=message, error=ValueError)
364
- raise ValueError(message) # Fall-back to appease mypy, should not be reachable
365
-
366
- # Loops over the number of requested states and, for each, generates a precursor experiment state field inside the
367
- # 'states' dictionary.
368
- states = {}
369
- for state in range(state_count):
370
- states[f"state_{state + 1}"] = ExperimentState(
371
- experiment_state_code=state + 1, # Assumes experiment state sequences are 1-based
372
- system_state_code=0,
373
- state_duration_s=60,
374
- )
375
-
376
- # Depending on the acquisition system, packs state data into the appropriate experiment configuration class and
377
- # saves it to the project's configuration folder as a .yaml file.
378
- if acquisition_system.name == "mesoscope-vr":
379
- experiment_configuration = MesoscopeExperimentConfiguration(experiment_states=states)
380
-
381
- else:
382
- message = (
383
- f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
384
- f"The data acquisition system of the local machine (PC) is not supported (not recognized). Currently, only "
385
- f"the following acquisition systems are supported: mesoscope-vr."
386
- )
387
- console.error(message=message, error=ValueError)
388
- raise ValueError(message) # Fall-back to appease mypy, should not be reachable
389
-
390
- experiment_configuration.to_yaml(file_path=file_path)
391
- # noinspection PyTypeChecker
392
- console.echo(message=f"Experiment {experiment} configuration file: generated.", level=LogLevel.SUCCESS)
393
-
394
-
395
191
  @click.command()
396
192
  @click.option(
397
193
  "-id",
sl_shared_assets/cli.pyi CHANGED
@@ -8,13 +8,7 @@ from .tools import (
8
8
  from .server import generate_server_credentials as generate_server_credentials
9
9
  from .data_classes import (
10
10
  SessionData as SessionData,
11
- ExperimentState as ExperimentState,
12
11
  ProcessingTracker as ProcessingTracker,
13
- ProjectConfiguration as ProjectConfiguration,
14
- MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
15
- MesoscopeExperimentConfiguration as MesoscopeExperimentConfiguration,
16
- get_system_configuration_data as get_system_configuration_data,
17
- set_system_configuration_file as set_system_configuration_file,
18
12
  )
19
13
 
20
14
  def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
@@ -42,17 +36,6 @@ def generate_project_manifest_file(
42
36
  between machines (as it is cached in a file).
43
37
  """
44
38
 
45
- def generate_system_configuration_file(output_directory: str, acquisition_system: str) -> None:
46
- """Generates a precursor system configuration file for the target acquisition system and configures all local
47
- Sun lab libraries to use that file to load the acquisition system configuration data.
48
-
49
- This command is typically used when setting up a new data acquisition system in the lab. The system configuration
50
- only needs to be specified on the machine (PC) that runs the sl-experiment library and manages the acquisition
51
- runtime if the system uses multiple machines (PCs). Once the system configuration .yaml file is created via this
52
- command, editing the configuration parameters in the file will automatically take effect during all following
53
- runtimes.
54
- """
55
-
56
39
  def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
57
40
  """Generates a new server_credentials.yaml file under the specified directory, using input information.
58
41
 
@@ -61,28 +44,6 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
61
44
  lab data processing libraries.
62
45
  """
63
46
 
64
- def generate_project_configuration_file(project: str, surgery_log_id: str, water_restriction_log_id: str) -> None:
65
- """Generates a new project directory hierarchy and writes its configuration as a project_configuration.yaml file.
66
-
67
- This command creates new Sun lab projects. Until a project is created in this fashion, all data-acquisition and
68
- data-processing commands from sl-experiment and sl-forgery libraries targeting the project will not work. This
69
- command is intended to be called on the main computer of the data-acquisition system(s) used by the project. Note,
70
- this command assumes that the local machine (PC) is the main PC of the data acquisition system and has a valid
71
- acquisition system configuration .yaml file.
72
- """
73
-
74
- def generate_experiment_configuration_file(project: str, experiment: str, state_count: int) -> None:
75
- """Generates a precursor experiment configuration .yaml file for the target experiment inside the project's
76
- configuration folder.
77
-
78
- This command assists users in creating new experiment configurations, by statically resolving the structure (layout)
79
- of the appropriate experiment configuration file for the acquisition system of the local machine (PC). Specifically,
80
- the generated precursor will contain the correct number of experiment state entries initialized to nonsensical
81
- default value. The user needs to manually edit the configuration file to properly specify their experiment runtime
82
- parameters and state transitions before running the experiment. In a sense, this command acts as an 'experiment
83
- template' generator.
84
- """
85
-
86
47
  def ascend_tyche_directory(input_directory: str) -> None:
87
48
  """Restructures old Tyche project data to use the modern Sun lab data structure and uploads them to the processing
88
49
  server.
@@ -171,6 +171,10 @@ class MesoscopeExperimentDescriptor(YamlConfig):
171
171
  """The weight of the animal, in grams, at the beginning of the session."""
172
172
  dispensed_water_volume_ml: float
173
173
  """Stores the total water volume, in milliliters, dispensed during runtime."""
174
+ maximum_unconsumed_rewards: int = 1
175
+ """Stores the maximum number of consecutive rewards that can be delivered without the animal consuming them. If
176
+ the animal receives this many rewards without licking (consuming) them, reward delivery is paused until the animal
177
+ consumes the rewards."""
174
178
  experimenter_notes: str = "Replace this with your notes."
175
179
  """This field is not set during runtime. It is expected that each experimenter will replace this field with their
176
180
  notes made during runtime."""
@@ -88,6 +88,7 @@ class MesoscopeExperimentDescriptor(YamlConfig):
88
88
  experimenter: str
89
89
  mouse_weight_g: float
90
90
  dispensed_water_volume_ml: float
91
+ maximum_unconsumed_rewards: int = ...
91
92
  experimenter_notes: str = ...
92
93
  experimenter_given_water_volume_ml: float = ...
93
94
  incomplete: bool = ...
@@ -229,10 +229,9 @@ class RawData:
229
229
  destination. During 'purge' sl-experiment runtimes, the library discovers and removes all session data marked with
230
230
  'ubiquitin.bin' files from the machine that runs the code."""
231
231
  integrity_verification_tracker_path: Path = Path()
232
- """Stores the path to the verified.bin file. This marker file is created (or removed) by the sl-shared-assets
233
- 'verify-session' CLI command to indicate whether the session data inside the folder marked by the file has been
234
- verified for integrity. Primarily, this is used when the data is moved to the long-term storage destination (BioHPC
235
- server) to ensure it is safe to remove any local copies of the data stored on the acquisition system machine(s)."""
232
+ """Stores the path to the integrity_verification.yaml tracker file. This file stores the current state of the data
233
+ integrity verification pipeline. It prevents more than one instance of the pipeline from working with the data
234
+ at a given time and communicates the outcome (success or failure) of the most recent pipeline runtime."""
236
235
  version_data_path: Path = Path()
237
236
  """Stores the path to the version_data.yaml file. This file contains the snapshot of Python and sl-experiment
238
237
  library versions that were used when the data was acquired."""
@@ -306,23 +305,17 @@ class ProcessedData:
306
305
  on the remote server. However, it is possible to configure local runtimes to also redirect log data to files
307
306
  stored in this directory (by editing ataraxis-base-utilities 'console' variable)."""
308
307
  suite2p_processing_tracker_path: Path = Path()
309
- """Stores the path to the single_day_suite2p.bin file. This file is created by our single-day suite2p data
310
- processing pipeline to mark sessions that have been successfully processed with the single-day sl-suite2p library
311
- pipeline. Note, the file is removed at the beginning of the suite2p pipeline, so its presence always indicates
312
- successful processing runtime completion."""
308
+ """Stores the path to the suite2p_processing_tracker.yaml tracker file. This file stores the current state of the
309
+ sl-suite2p single-day data processing pipeline."""
313
310
  dataset_formation_tracker_path: Path = Path()
314
- """Same as single_day_suite2p_bin_path, but tracks whether the session has been successfully processed with the
315
- multi-day suite2p pipeline."""
311
+ """Same as suite2p_processing_tracker_path, but stores the current state of the dataset formation process that
312
+ includes this session (communicates whether the session has been successfully added to any dataset(s))."""
316
313
  behavior_processing_tracker_path: Path = Path()
317
- """Stores the path to the behavior.bin file. This file is created by our behavior data extraction pipeline
318
- to mark sessions that have been successfully processed with the sl-behavior library pipeline. Note, the
319
- file is removed at the beginning of the behavior data extraction pipeline, so its presence always indicates
320
- successful processing runtime completion."""
314
+ """Stores the path to the behavior_processing_tracker.yaml file. This file stores the current state of the
315
+ behavior (log) data processing pipeline."""
321
316
  video_processing_tracker_path: Path = Path()
322
- """Stores the path to the dlc.bin file. This file is created by our DeepLabCut-based pose tracking pipeline
323
- to mark sessions that have been successfully processed with the sl-dlc library pipeline. Note, the
324
- file is removed at the beginning of the DeepLabCut pipeline, so its presence always indicates successful processing
325
- runtime completion."""
317
+ """Stores the path to the video_processing_tracker.yaml file. This file stores the current state of the video
318
+ tracking (DeepLabCut) processing pipeline."""
326
319
 
327
320
  def resolve_paths(self, root_directory_path: Path) -> None:
328
321
  """Resolves all paths managed by the class instance based on the input root directory path.
@@ -192,21 +192,23 @@ class ProjectManifest:
192
192
  sessions = data.select("session").sort("session").to_series().to_list()
193
193
  return tuple(sessions)
194
194
 
195
- def get_session_info(self, animal: str | int, session: str) -> pl.DataFrame:
196
- """Returns a Polars DataFrame that stores detailed information for the specified session and animal combination.
195
+ def get_session_info(self, session: str) -> pl.DataFrame:
196
+ """Returns a Polars DataFrame that stores detailed information for the specified session.
197
+
198
+ Since session IDs are unique, it is expected that filtering by session ID is enough to get the requested
199
+ information.
197
200
 
198
201
  Args:
199
- animal: The ID of the animal for which to retrieve the data.
200
202
  session: The ID of the session for which to retrieve the data.
203
+
204
+ Returns:
205
+ A Polars DataFrame with the following columns: 'animal', 'date', 'notes', 'session', 'type', 'complete',
206
+ 'intensity_verification', 'suite2p_processing', 'behavior_processing', 'video_processing',
207
+ 'dataset_formation'.
201
208
  """
202
- # Ensures that the 'animal' argument has the same type as the data inside the DataFrame.
203
- if self._animal_string:
204
- animal = str(animal)
205
- else:
206
- animal = int(animal)
207
209
 
208
210
  df = self._data
209
- df = df.filter(pl.col("animal").eq(animal) & pl.col("session").eq(session))
211
+ df = df.filter(pl.col("session").eq(session))
210
212
  return df
211
213
 
212
214
 
@@ -82,12 +82,19 @@ class ProjectManifest:
82
82
  Raises:
83
83
  ValueError: If the specified animal is not found in the manifest file.
84
84
  """
85
- def get_session_info(self, animal: str | int, session: str) -> pl.DataFrame:
86
- """Returns a Polars DataFrame that stores detailed information for the specified session and animal combination.
85
+ def get_session_info(self, session: str) -> pl.DataFrame:
86
+ """Returns a Polars DataFrame that stores detailed information for the specified session.
87
+
88
+ Since session IDs are unique, it is expected that filtering by session ID is enough to get the requested
89
+ information.
87
90
 
88
91
  Args:
89
- animal: The ID of the animal for which to retrieve the data.
90
92
  session: The ID of the session for which to retrieve the data.
93
+
94
+ Returns:
95
+ A Polars DataFrame with the following columns: 'animal', 'date', 'notes', 'session', 'type', 'complete',
96
+ 'intensity_verification', 'suite2p_processing', 'behavior_processing', 'video_processing',
97
+ 'dataset_formation'.
91
98
  """
92
99
 
93
100
  def generate_project_manifest(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.2.0rc4
3
+ Version: 2.0.0
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -697,7 +697,7 @@ Requires-Dist: ataraxis-time==3.0.0
697
697
  Requires-Dist: click==8.2.1
698
698
  Requires-Dist: filelock==3.18.0
699
699
  Requires-Dist: natsort==8.4.0
700
- Requires-Dist: numpy==2.2.6
700
+ Requires-Dist: numpy<2.3.0,>=2.0.2
701
701
  Requires-Dist: paramiko==3.5.1
702
702
  Requires-Dist: polars==1.31.0
703
703
  Requires-Dist: pyarrow==20.0.0
@@ -725,7 +725,7 @@ Requires-Dist: appdirs==1.4.4; extra == 'condarun'
725
725
  Requires-Dist: click==8.2.1; extra == 'condarun'
726
726
  Requires-Dist: filelock==3.18.0; extra == 'condarun'
727
727
  Requires-Dist: natsort==8.4.0; extra == 'condarun'
728
- Requires-Dist: numpy==2.2.6; extra == 'condarun'
728
+ Requires-Dist: numpy<2.3.0,>=2.0.2; extra == 'condarun'
729
729
  Requires-Dist: paramiko==3.5.1; extra == 'condarun'
730
730
  Requires-Dist: polars==1.31.0; extra == 'condarun'
731
731
  Requires-Dist: pyarrow==20.0.0; extra == 'condarun'
@@ -1,15 +1,15 @@
1
1
  sl_shared_assets/__init__.py,sha256=_AOpxu9K_0px_xS07H8mqZeYlBS9aD75XBS0dofJzqw,2280
2
2
  sl_shared_assets/__init__.pyi,sha256=H1kPervb1A2BjG5EOLsLFQGUWFS_aHWy4cpL4_W71Fs,2525
3
- sl_shared_assets/cli.py,sha256=SrzbcYbVQQ_fCz29t7JwOdY_ZSUJLHOuH4fJaIdDd1I,18395
4
- sl_shared_assets/cli.pyi,sha256=Fh8GZBSQzII_Iz6k5nLQOsVMbp7q1R5mp4KNZjdGflY,6119
3
+ sl_shared_assets/cli.py,sha256=Rk6Axt0v-274O9mvhPChRj6sQBbHOnkd1Lkc3adgvao,9090
4
+ sl_shared_assets/cli.pyi,sha256=2zTqKYIJNV5cJkzOHTU2h4Hj0uUm-CiP-27vujgUaFI,3342
5
5
  sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  sl_shared_assets/data_classes/__init__.py,sha256=ixn972b-3URCinVLRPjMfDXpO2w24_NkEUUjdqByFrA,1890
7
7
  sl_shared_assets/data_classes/__init__.pyi,sha256=bDBLkyhlosB4t09GxHBNKH0kaVBhHSY_j-i3MD2iKVo,2088
8
8
  sl_shared_assets/data_classes/configuration_data.py,sha256=eL8eGl1EF2Sl8J4W6qB78L5r092qnnbEjiApxyK6lCw,29402
9
9
  sl_shared_assets/data_classes/configuration_data.pyi,sha256=U-snwWQqYT5-zcd8s3ZV8xX27BEpgy9vKlXvie3NKSE,9537
10
- sl_shared_assets/data_classes/runtime_data.py,sha256=Q7Ykf9hgrw1jYKXa53mn_LW8G2cPmLLuxgGkP6qQcc4,15483
11
- sl_shared_assets/data_classes/runtime_data.pyi,sha256=PxaCbeF9COR4ri91pdzh7zVrqaz2KEDYB1EoLhZQC_c,6618
12
- sl_shared_assets/data_classes/session_data.py,sha256=DHfjGXvdMRsOl1fTgNFrF3u9THAQFtTruDU0tsd0y8c,51767
10
+ sl_shared_assets/data_classes/runtime_data.py,sha256=TpqT2lRNq6fJyOgwXEqqI1gfTinEpCVTp8RKlvfqgkk,15789
11
+ sl_shared_assets/data_classes/runtime_data.pyi,sha256=o_eLLHWoAp_w58C2YEIeSHCD36ZYsTsCllRUEIxusSo,6660
12
+ sl_shared_assets/data_classes/session_data.py,sha256=Ob-Lv7mcegHX38o_xIiF37OUBQ6eZ5LeI0weoOGX6mk,51073
13
13
  sl_shared_assets/data_classes/session_data.pyi,sha256=ajVrNwGpk9TQj79WURVYpQ2Bhy-XZsau8VABBgtOzrY,16452
14
14
  sl_shared_assets/data_classes/surgery_data.py,sha256=qsMj3NkjhylAT9b_wHBY-1XwTu2xsZcZatdECmkA7Bs,7437
15
15
  sl_shared_assets/data_classes/surgery_data.pyi,sha256=rf59lJ3tGSYKHQlEGXg75MnjajBwl0DYhL4TClAO4SM,2605
@@ -25,12 +25,12 @@ sl_shared_assets/tools/ascension_tools.py,sha256=kIqYGX9F8lRao_LaVOacIiT8J9SypTv
25
25
  sl_shared_assets/tools/ascension_tools.pyi,sha256=tQCDdWZ20ZjUjpMs8aGIN0yBg5ff3j6spi62b3Han4o,3755
26
26
  sl_shared_assets/tools/packaging_tools.py,sha256=c9U0bKB6Btj7sfyeU7xx2Jiqv930qTnXbm0ZbNR-o2k,7594
27
27
  sl_shared_assets/tools/packaging_tools.pyi,sha256=vgGbAQCExwg-0A5F72MzEhzHxu97Nqg1yuz-5P89ycU,3118
28
- sl_shared_assets/tools/project_management_tools.py,sha256=UzvDFvJ8ZohUQlsZya0GdrtUlUQnOMFJEJY5CUXIW3U,22706
29
- sl_shared_assets/tools/project_management_tools.pyi,sha256=sxjhQzeZ4vIcNwIDtFXYxN9jbTQb-PbCOPZL5P71xa8,7440
28
+ sl_shared_assets/tools/project_management_tools.py,sha256=HatI5QmzUbUMI6MmKiqzylEp_CzQ-JpEyO5V5TJ40Sk,22758
29
+ sl_shared_assets/tools/project_management_tools.pyi,sha256=kteTbOom4KVm9Eem1WhmUZw54MOheUTkFQzOPIvV0vw,7731
30
30
  sl_shared_assets/tools/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
31
31
  sl_shared_assets/tools/transfer_tools.pyi,sha256=FoH7eYZe7guGHfPr0MK5ggO62uXKwD2aJ7h1Bu7PaEE,3294
32
- sl_shared_assets-1.2.0rc4.dist-info/METADATA,sha256=IO6x6Y5-KymLtssNsRRlyGjHxI181b9zBpJF7qwDSlA,49345
33
- sl_shared_assets-1.2.0rc4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
- sl_shared_assets-1.2.0rc4.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
- sl_shared_assets-1.2.0rc4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- sl_shared_assets-1.2.0rc4.dist-info/RECORD,,
32
+ sl_shared_assets-2.0.0.dist-info/METADATA,sha256=j83QXlX_oIZ7npkT9zpZUfrrGEGXnJui3R2RJvMz09o,49356
33
+ sl_shared_assets-2.0.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
+ sl_shared_assets-2.0.0.dist-info/entry_points.txt,sha256=356d5zNg2v3hil8K7VGen6nDHggdDP-LY7zKJmB8ExI,305
35
+ sl_shared_assets-2.0.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
+ sl_shared_assets-2.0.0.dist-info/RECORD,,
@@ -1,8 +1,5 @@
1
1
  [console_scripts]
2
2
  sl-ascend-tyche = sl_shared_assets.cli:ascend_tyche_directory
3
- sl-create-experiment = sl_shared_assets.cli:generate_experiment_configuration_file
4
- sl-create-project = sl_shared_assets.cli:generate_project_configuration_file
5
3
  sl-create-server-credentials = sl_shared_assets.cli:generate_server_credentials_file
6
- sl-create-system-config = sl_shared_assets.cli:generate_system_configuration_file
7
4
  sl-project-manifest = sl_shared_assets.cli:generate_project_manifest_file
8
5
  sl-verify-session = sl_shared_assets.cli:verify_session_integrity