np_codeocean 0.3.5__py3-none-any.whl → 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,331 +1,333 @@
1
- """Convenience functions for:
2
- - Adding neuropixels rig to dynamic routing session directory.
3
- - Updating neuropixels rig from dynamic routing session directory.
4
- """
5
-
6
- import datetime
7
- import logging
8
- import pathlib
9
- import typing
10
-
11
- from aind_data_schema.core import rig, session
12
-
13
- from np_codeocean.metadata import common, np, rigs, storage, update, utils
14
- from np_codeocean.metadata.model_templates import behavior_box, neuropixels_rig
15
-
16
- logger = logging.getLogger(__name__)
17
-
18
-
19
- SESSION_MODEL_GLOB_PATTERN = "*session.json"
20
-
21
-
22
- def scrape_session_model_path(session_directory: pathlib.Path) -> pathlib.Path:
23
- """Scrapes aind-metadata session json from dynamic routing session
24
- directory.
25
- """
26
- matches = list(session_directory.glob(SESSION_MODEL_GLOB_PATTERN))
27
- logger.debug("Scraped session model paths: %s" % matches)
28
- return matches[0]
29
-
30
-
31
- def update_session_from_rig(
32
- session_source: pathlib.Path,
33
- rig_source: pathlib.Path,
34
- output_path: pathlib.Path,
35
- ) -> pathlib.Path:
36
- """Convenience function that updates the `rig_id` of a session model at
37
- `session_source`. Uses the `rig_id` of `rig_source`.
38
-
39
- Notes
40
- -----
41
- - Overwrites the session model at `output_path`.
42
- """
43
- session_model = session.Session.model_validate_json(session_source.read_text())
44
- rig_model = rig.Rig.model_validate_json(rig_source.read_text().replace('NP.2','NP2'))
45
- session_model.rig_id = rig_model.rig_id
46
- return utils.save_aind_model(session_model, output_path)
47
-
48
-
49
- def copy_or_init_rig(
50
- storage_directory: pathlib.Path,
51
- extracted_session_datetime: datetime.datetime,
52
- extracted_rig_name: str,
53
- output_path: pathlib.Path,
54
- ) -> pathlib.Path:
55
- try:
56
- rig_model_path = storage.get_item(
57
- storage_directory, extracted_session_datetime, extracted_rig_name
58
- )
59
- # validate that existing model is of the correct current
60
- # aind-data-schema metadata format
61
- assert rig_model_path is not None
62
- rig.Rig.model_validate_json(rig_model_path.read_text())
63
-
64
- return rigs.copy_rig(
65
- extracted_rig_name,
66
- output_path,
67
- extracted_session_datetime,
68
- storage_directory,
69
- )
70
- except Exception:
71
- logger.error("Failed to copy rig.", exc_info=True)
72
- rig_model = neuropixels_rig.init(
73
- extracted_rig_name, # type: ignore
74
- modification_date=datetime.date.today(),
75
- )
76
- rig_model.write_standard_file(output_path.parent)
77
- return output_path
78
-
79
-
80
- def add_np_rig_to_session_dir(
81
- session_dir: pathlib.Path,
82
- session_datetime: datetime.datetime,
83
- rig_model_dir: typing.Optional[pathlib.Path] = None,
84
- ) -> None:
85
- """Direct support for the dynamic routing task. Adds an `aind-data-schema`
86
- `rig.json` to a dynamic routing session directory. The `aind-data-schema`
87
- `session.json` in `session_dir` will be updated with the `rig_id` of the
88
- added `rig.json`.
89
-
90
- Notes
91
- -----
92
- - An aind metadata session json must exist and be ending with filename
93
- session.json (pattern: `*session.json`) in `session_dir`.
94
- - If `rig_model_dir` is not provided, will attempt to get default from
95
- np-config. You will need to be onprem for `np-config` to work.
96
- """
97
- scraped_session_model_path = scrape_session_model_path(session_dir)
98
- logger.debug("Scraped session model path: %s" % scraped_session_model_path)
99
- scraped_session = session.Session.model_validate_json(
100
- scraped_session_model_path.read_text()
101
- )
102
- scraped_rig_id = scraped_session.rig_id
103
- logger.info("Scraped rig id: %s" % scraped_rig_id)
104
- _, rig_name, _ = scraped_rig_id.split("_")
105
- logger.info("Parsed rig name: %s" % rig_name)
106
- rig_model_path = session_dir / "rig.json"
107
-
108
- if not rig_model_dir:
109
- logger.debug("Getting storage directory from np-config.")
110
- rig_model_dir = np.get_rig_storage_directory()
111
-
112
- current_model_path = copy_or_init_rig(
113
- rig_model_dir,
114
- session_datetime,
115
- rig_name,
116
- rig_model_path,
117
- )
118
-
119
- logger.info("Current model path: %s" % current_model_path)
120
- settings_sources = list(session_dir.glob("**/settings.xml"))
121
- logger.info("Scraped open ephys settings: %s" % settings_sources)
122
-
123
- updated_model_path = update.update_rig(
124
- rig_model_path,
125
- modification_date=session_datetime.date(),
126
- open_ephys_settings_sources=settings_sources,
127
- output_path=rig_model_path,
128
- )
129
-
130
- update_session_from_rig(
131
- scraped_session_model_path,
132
- updated_model_path,
133
- scraped_session_model_path,
134
- )
135
-
136
- storage.update_item(
137
- rig_model_dir,
138
- updated_model_path,
139
- session_datetime,
140
- rig_name,
141
- )
142
-
143
-
144
- def update_neuropixels_rig_from_dynamic_routing_session_dir(
145
- rig_source: pathlib.Path,
146
- session_dir: pathlib.Path,
147
- output_path: pathlib.Path = pathlib.Path("rig.json"),
148
- modification_date: typing.Optional[datetime.date] = None,
149
- mvr_mapping: dict[str, str] = common.DEFAULT_MVR_MAPPING,
150
- ) -> pathlib.Path:
151
- """Scrapes dynamic routing session directory for various rig
152
- configuration/settings and updates `rig_source`.
153
-
154
- Notes
155
- -----
156
- - Will likely be depreciated in the future.
157
- """
158
- try:
159
- task_source = next(session_dir.glob("**/Dynamic*.hdf5"))
160
- logger.debug("Scraped task source: %s" % task_source)
161
- except StopIteration:
162
- task_source = None
163
-
164
- # sync
165
- try:
166
- sync_source = next(session_dir.glob("**/sync.yml"))
167
- logger.debug("Scraped sync source: %s" % sync_source)
168
- except StopIteration:
169
- sync_source = None
170
-
171
- # mvr
172
- try:
173
- mvr_source = next(session_dir.glob("**/mvr.ini"))
174
- logger.debug("Scraped mvr source: %s" % mvr_source)
175
- except StopIteration:
176
- mvr_source = None
177
-
178
- # open ephys
179
- settings_sources = list(session_dir.glob("**/settings.xml"))
180
- logger.debug("Scraped open ephys settings: %s" % settings_sources)
181
-
182
- return update.update_rig(
183
- rig_source,
184
- task_source=task_source,
185
- sync_source=sync_source,
186
- mvr_source=mvr_source,
187
- mvr_mapping=mvr_mapping,
188
- open_ephys_settings_sources=settings_sources,
189
- output_path=output_path,
190
- modification_date=modification_date,
191
- reward_calibration_date=modification_date,
192
- sound_calibration_date=modification_date,
193
- )
194
-
195
-
196
- def extract_rig_name(task_source: pathlib.Path) -> str | None:
197
- """Extracts rig_name from task_source.
198
-
199
- >>> extract_rig_name(
200
- ... pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
201
- ... )
202
- 'NP2'
203
- >>> extract_rig_name(
204
- ... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
205
- ... )
206
- 'D6'
207
- >>> extract_rig_name(
208
- ... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
209
- ... )
210
- 'B2'
211
-
212
- Notes
213
- -----
214
- - If extracted `computerName` is not found or is not bytes, will use
215
- `rigName`.
216
- """
217
- computer_name = utils.extract_hdf5_value(
218
- utils.load_hdf5(task_source),
219
- [
220
- "computerName",
221
- ],
222
- )
223
- logger.debug("Extracted computerName: %s" % computer_name)
224
- rig_name = utils.extract_hdf5_value(
225
- utils.load_hdf5(task_source),
226
- [
227
- "rigName",
228
- ],
229
- )
230
- logger.debug("Extracted rigName: %s" % rig_name)
231
-
232
- if isinstance(computer_name, bytes):
233
- decoded = computer_name.decode("utf8")
234
- if decoded.lower().startswith("beh"):
235
- postfixed = decoded.split(".")[1]
236
- split = postfixed.split("-")
237
- return split[0] + split[1][-1]
238
- else:
239
- return decoded
240
-
241
- if isinstance(rig_name, bytes):
242
- return rig_name.decode("utf-8")
243
-
244
- return None
245
-
246
-
247
- def extract_session_datetime(
248
- task_source: pathlib.Path,
249
- ) -> datetime.datetime:
250
- """
251
- >>> extract_session_datetime(
252
- ... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
253
- ... )
254
- datetime.datetime(2024, 5, 1, 0, 0)
255
- >>> extract_session_datetime(
256
- ... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
257
- ... )
258
- datetime.datetime(2023, 9, 8, 0, 0)
259
- """
260
- start_time_str = utils.extract_hdf5_value(
261
- utils.load_hdf5(task_source),
262
- [
263
- "startTime",
264
- ],
265
- )
266
- if not start_time_str:
267
- raise Exception("Could not extract start time from task source.")
268
-
269
- logger.debug("Extracted start time bytes: %s" % start_time_str)
270
- date_str, _ = start_time_str.decode("utf8").split("_")
271
- logger.debug("Date string: %s" % date_str)
272
- return datetime.datetime.strptime(date_str, "%Y%m%d")
273
-
274
-
275
- def copy_task_rig(
276
- task_source: pathlib.Path,
277
- output_path: pathlib.Path,
278
- storage_directory: typing.Optional[pathlib.Path] = None,
279
- ) -> pathlib.Path | None:
280
- """Extracts rig_name from task_source and copies the associated `rig.json`
281
- to output_path.
282
-
283
- >>> storage_directory = pathlib.Path("examples") / "rig-directory"
284
- >>> task_source = pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
285
- >>> copy_task_rig(
286
- ... task_source,
287
- ... pathlib.Path("rig.json"),
288
- ... storage_directory,
289
- ... )
290
- PosixPath('rig.json')
291
-
292
- >>> task_source = pathlib.Path("examples") / "behavior-box-task-0.hdf5"
293
- >>> copy_task_rig(
294
- ... task_source,
295
- ... pathlib.Path("rig.json"),
296
- ... storage_directory,
297
- ... )
298
- PosixPath('rig.json')
299
-
300
- Notes
301
- -----
302
- - If `storage_directory` is not provided, will attempt to get default from
303
- np-config.
304
- """
305
- # storage_directory optional is legacy behavior
306
- # TODO: remove the optional so we can remove this safeguard
307
- if not storage_directory:
308
- raise Exception("Storage directory must be provided.")
309
-
310
- extracted_rig_name = extract_rig_name(task_source)
311
- logger.debug("Extracted rig name: %s" % extracted_rig_name)
312
- if not extracted_rig_name:
313
- raise Exception("Could not extract rig name from task source: %s" % task_source)
314
-
315
- if rigs.is_behavior_box(extracted_rig_name):
316
- rig_model = behavior_box.init(
317
- extracted_rig_name,
318
- modification_date=datetime.date.today(),
319
- )
320
- rig_model.write_standard_file(output_path.parent)
321
- return output_path
322
-
323
- extracted_session_datetime = extract_session_datetime(task_source)
324
-
325
- # if grabbing latest rig model fails, return a new one
326
- return copy_or_init_rig(
327
- storage_directory,
328
- extracted_session_datetime,
329
- extracted_rig_name,
330
- output_path,
331
- )
1
+ """Convenience functions for:
2
+ - Adding neuropixels rig to dynamic routing session directory.
3
+ - Updating neuropixels rig from dynamic routing session directory.
4
+ """
5
+
6
+ import datetime
7
+ import logging
8
+ import pathlib
9
+ import typing
10
+
11
+ from aind_data_schema.core import rig, session
12
+
13
+ from np_codeocean.metadata import common, np, rigs, storage, update, utils
14
+ from np_codeocean.metadata.model_templates import behavior_box, neuropixels_rig
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ SESSION_MODEL_GLOB_PATTERN = "*session.json"
20
+
21
+
22
+ def scrape_session_model_path(session_directory: pathlib.Path) -> pathlib.Path:
23
+ """Scrapes aind-metadata session json from dynamic routing session
24
+ directory.
25
+ """
26
+ matches = list(session_directory.glob(SESSION_MODEL_GLOB_PATTERN))
27
+ logger.debug("Scraped session model paths: %s" % matches)
28
+ return matches[0]
29
+
30
+
31
+ def update_session_from_rig(
32
+ session_source: pathlib.Path,
33
+ rig_source: pathlib.Path,
34
+ output_path: pathlib.Path,
35
+ ) -> pathlib.Path:
36
+ """Convenience function that updates the `rig_id` of a session model at
37
+ `session_source`. Uses the `rig_id` of `rig_source`.
38
+
39
+ Notes
40
+ -----
41
+ - Overwrites the session model at `output_path`.
42
+ """
43
+ session_model = session.Session.model_validate_json(session_source.read_text())
44
+ rig_model = rig.Rig.model_validate_json(
45
+ rig_source.read_text().replace("NP.2", "NP2")
46
+ )
47
+ session_model.rig_id = rig_model.rig_id
48
+ return utils.save_aind_model(session_model, output_path)
49
+
50
+
51
+ def copy_or_init_rig(
52
+ storage_directory: pathlib.Path,
53
+ extracted_session_datetime: datetime.datetime,
54
+ extracted_rig_name: str,
55
+ output_path: pathlib.Path,
56
+ ) -> pathlib.Path:
57
+ try:
58
+ rig_model_path = storage.get_item(
59
+ storage_directory, extracted_session_datetime, extracted_rig_name
60
+ )
61
+ # validate that existing model is of the correct current
62
+ # aind-data-schema metadata format
63
+ assert rig_model_path is not None
64
+ rig.Rig.model_validate_json(rig_model_path.read_text())
65
+
66
+ return rigs.copy_rig(
67
+ extracted_rig_name,
68
+ output_path,
69
+ extracted_session_datetime,
70
+ storage_directory,
71
+ )
72
+ except Exception:
73
+ logger.error("Failed to copy rig.", exc_info=True)
74
+ rig_model = neuropixels_rig.init(
75
+ extracted_rig_name, # type: ignore
76
+ modification_date=datetime.date.today(),
77
+ )
78
+ rig_model.write_standard_file(output_path.parent)
79
+ return output_path
80
+
81
+
82
+ def add_np_rig_to_session_dir(
83
+ session_dir: pathlib.Path,
84
+ session_datetime: datetime.datetime,
85
+ rig_model_dir: typing.Optional[pathlib.Path] = None,
86
+ ) -> None:
87
+ """Direct support for the dynamic routing task. Adds an `aind-data-schema`
88
+ `rig.json` to a dynamic routing session directory. The `aind-data-schema`
89
+ `session.json` in `session_dir` will be updated with the `rig_id` of the
90
+ added `rig.json`.
91
+
92
+ Notes
93
+ -----
94
+ - An aind metadata session json must exist and be ending with filename
95
+ session.json (pattern: `*session.json`) in `session_dir`.
96
+ - If `rig_model_dir` is not provided, will attempt to get default from
97
+ np-config. You will need to be onprem for `np-config` to work.
98
+ """
99
+ scraped_session_model_path = scrape_session_model_path(session_dir)
100
+ logger.debug("Scraped session model path: %s" % scraped_session_model_path)
101
+ scraped_session = session.Session.model_validate_json(
102
+ scraped_session_model_path.read_text()
103
+ )
104
+ scraped_rig_id = scraped_session.rig_id
105
+ logger.info("Scraped rig id: %s" % scraped_rig_id)
106
+ _, rig_name, _ = scraped_rig_id.split("_")
107
+ logger.info("Parsed rig name: %s" % rig_name)
108
+ rig_model_path = session_dir / "rig.json"
109
+
110
+ if not rig_model_dir:
111
+ logger.debug("Getting storage directory from np-config.")
112
+ rig_model_dir = np.get_rig_storage_directory()
113
+
114
+ current_model_path = copy_or_init_rig(
115
+ rig_model_dir,
116
+ session_datetime,
117
+ rig_name,
118
+ rig_model_path,
119
+ )
120
+
121
+ logger.info("Current model path: %s" % current_model_path)
122
+ settings_sources = list(session_dir.glob("**/settings.xml"))
123
+ logger.info("Scraped open ephys settings: %s" % settings_sources)
124
+
125
+ updated_model_path = update.update_rig(
126
+ rig_model_path,
127
+ modification_date=session_datetime.date(),
128
+ open_ephys_settings_sources=settings_sources,
129
+ output_path=rig_model_path,
130
+ )
131
+
132
+ update_session_from_rig(
133
+ scraped_session_model_path,
134
+ updated_model_path,
135
+ scraped_session_model_path,
136
+ )
137
+
138
+ storage.update_item(
139
+ rig_model_dir,
140
+ updated_model_path,
141
+ session_datetime,
142
+ rig_name,
143
+ )
144
+
145
+
146
+ def update_neuropixels_rig_from_dynamic_routing_session_dir(
147
+ rig_source: pathlib.Path,
148
+ session_dir: pathlib.Path,
149
+ output_path: pathlib.Path = pathlib.Path("rig.json"),
150
+ modification_date: typing.Optional[datetime.date] = None,
151
+ mvr_mapping: dict[str, str] = common.DEFAULT_MVR_MAPPING,
152
+ ) -> pathlib.Path:
153
+ """Scrapes dynamic routing session directory for various rig
154
+ configuration/settings and updates `rig_source`.
155
+
156
+ Notes
157
+ -----
158
+ - Will likely be depreciated in the future.
159
+ """
160
+ try:
161
+ task_source = next(session_dir.glob("**/Dynamic*.hdf5"))
162
+ logger.debug("Scraped task source: %s" % task_source)
163
+ except StopIteration:
164
+ task_source = None
165
+
166
+ # sync
167
+ try:
168
+ sync_source = next(session_dir.glob("**/sync.yml"))
169
+ logger.debug("Scraped sync source: %s" % sync_source)
170
+ except StopIteration:
171
+ sync_source = None
172
+
173
+ # mvr
174
+ try:
175
+ mvr_source = next(session_dir.glob("**/mvr.ini"))
176
+ logger.debug("Scraped mvr source: %s" % mvr_source)
177
+ except StopIteration:
178
+ mvr_source = None
179
+
180
+ # open ephys
181
+ settings_sources = list(session_dir.glob("**/settings.xml"))
182
+ logger.debug("Scraped open ephys settings: %s" % settings_sources)
183
+
184
+ return update.update_rig(
185
+ rig_source,
186
+ task_source=task_source,
187
+ sync_source=sync_source,
188
+ mvr_source=mvr_source,
189
+ mvr_mapping=mvr_mapping,
190
+ open_ephys_settings_sources=settings_sources,
191
+ output_path=output_path,
192
+ modification_date=modification_date,
193
+ reward_calibration_date=modification_date,
194
+ sound_calibration_date=modification_date,
195
+ )
196
+
197
+
198
+ def extract_rig_name(task_source: pathlib.Path) -> str | None:
199
+ """Extracts rig_name from task_source.
200
+
201
+ >>> extract_rig_name(
202
+ ... pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
203
+ ... )
204
+ 'NP2'
205
+ >>> extract_rig_name(
206
+ ... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
207
+ ... )
208
+ 'D6'
209
+ >>> extract_rig_name(
210
+ ... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
211
+ ... )
212
+ 'B2'
213
+
214
+ Notes
215
+ -----
216
+ - If extracted `computerName` is not found or is not bytes, will use
217
+ `rigName`.
218
+ """
219
+ computer_name = utils.extract_hdf5_value(
220
+ utils.load_hdf5(task_source),
221
+ [
222
+ "computerName",
223
+ ],
224
+ )
225
+ logger.debug("Extracted computerName: %s" % computer_name)
226
+ rig_name = utils.extract_hdf5_value(
227
+ utils.load_hdf5(task_source),
228
+ [
229
+ "rigName",
230
+ ],
231
+ )
232
+ logger.debug("Extracted rigName: %s" % rig_name)
233
+
234
+ if isinstance(computer_name, bytes):
235
+ decoded = computer_name.decode("utf8")
236
+ if decoded.lower().startswith("beh"):
237
+ postfixed = decoded.split(".")[1]
238
+ split = postfixed.split("-")
239
+ return split[0] + split[1][-1]
240
+ else:
241
+ return decoded
242
+
243
+ if isinstance(rig_name, bytes):
244
+ return rig_name.decode("utf-8")
245
+
246
+ return None
247
+
248
+
249
+ def extract_session_datetime(
250
+ task_source: pathlib.Path,
251
+ ) -> datetime.datetime:
252
+ """
253
+ >>> extract_session_datetime(
254
+ ... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
255
+ ... )
256
+ datetime.datetime(2024, 5, 1, 0, 0)
257
+ >>> extract_session_datetime(
258
+ ... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
259
+ ... )
260
+ datetime.datetime(2023, 9, 8, 0, 0)
261
+ """
262
+ start_time_str = utils.extract_hdf5_value(
263
+ utils.load_hdf5(task_source),
264
+ [
265
+ "startTime",
266
+ ],
267
+ )
268
+ if not start_time_str:
269
+ raise Exception("Could not extract start time from task source.")
270
+
271
+ logger.debug("Extracted start time bytes: %s" % start_time_str)
272
+ date_str, _ = start_time_str.decode("utf8").split("_")
273
+ logger.debug("Date string: %s" % date_str)
274
+ return datetime.datetime.strptime(date_str, "%Y%m%d")
275
+
276
+
277
+ def copy_task_rig(
278
+ task_source: pathlib.Path,
279
+ output_path: pathlib.Path,
280
+ storage_directory: typing.Optional[pathlib.Path] = None,
281
+ ) -> pathlib.Path | None:
282
+ """Extracts rig_name from task_source and copies the associated `rig.json`
283
+ to output_path.
284
+
285
+ >>> storage_directory = pathlib.Path("examples") / "rig-directory"
286
+ >>> task_source = pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
287
+ >>> copy_task_rig(
288
+ ... task_source,
289
+ ... pathlib.Path("rig.json"),
290
+ ... storage_directory,
291
+ ... )
292
+ PosixPath('rig.json')
293
+
294
+ >>> task_source = pathlib.Path("examples") / "behavior-box-task-0.hdf5"
295
+ >>> copy_task_rig(
296
+ ... task_source,
297
+ ... pathlib.Path("rig.json"),
298
+ ... storage_directory,
299
+ ... )
300
+ PosixPath('rig.json')
301
+
302
+ Notes
303
+ -----
304
+ - If `storage_directory` is not provided, will attempt to get default from
305
+ np-config.
306
+ """
307
+ # storage_directory optional is legacy behavior
308
+ # TODO: remove the optional so we can remove this safeguard
309
+ if not storage_directory:
310
+ raise Exception("Storage directory must be provided.")
311
+
312
+ extracted_rig_name = extract_rig_name(task_source)
313
+ logger.debug("Extracted rig name: %s" % extracted_rig_name)
314
+ if not extracted_rig_name:
315
+ raise Exception("Could not extract rig name from task source: %s" % task_source)
316
+
317
+ if rigs.is_behavior_box(extracted_rig_name):
318
+ rig_model = behavior_box.init(
319
+ extracted_rig_name,
320
+ modification_date=datetime.date.today(),
321
+ )
322
+ rig_model.write_standard_file(output_path.parent)
323
+ return output_path
324
+
325
+ extracted_session_datetime = extract_session_datetime(task_source)
326
+
327
+ # if grabbing latest rig model fails, return a new one
328
+ return copy_or_init_rig(
329
+ storage_directory,
330
+ extracted_session_datetime,
331
+ extracted_rig_name,
332
+ output_path,
333
+ )