np_codeocean 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- np_codeocean/__init__.py +1 -1
- np_codeocean/metadata/__init__.py +1 -1
- np_codeocean/metadata/common.py +1 -3
- np_codeocean/metadata/core.py +333 -331
- np_codeocean/metadata/dynamic_routing_task_etl.py +1 -1
- np_codeocean/metadata/model_templates/behavior_box.py +115 -115
- np_codeocean/metadata/model_templates/neuropixels_rig.py +544 -544
- np_codeocean/metadata/np.py +1 -1
- np_codeocean/metadata/rigs.py +1 -1
- np_codeocean/metadata/storage.py +78 -78
- np_codeocean/metadata/update.py +1 -2
- np_codeocean/metadata/utils.py +1 -1
- np_codeocean/np_session_utils.py +462 -385
- np_codeocean/scripts/upload_dynamic_routing_behavior.py +483 -413
- np_codeocean/scripts/upload_dynamic_routing_ecephys.py +279 -217
- np_codeocean/scripts/upload_split_recordings_example.py +39 -33
- np_codeocean/utils.py +671 -563
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/METADATA +13 -6
- np_codeocean-0.3.6.dist-info/RECORD +23 -0
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/WHEEL +2 -1
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/entry_points.txt +0 -3
- np_codeocean-0.3.6.dist-info/top_level.txt +1 -0
- np_codeocean-0.3.5.dist-info/RECORD +0 -22
np_codeocean/metadata/core.py
CHANGED
|
@@ -1,331 +1,333 @@
|
|
|
1
|
-
"""Convenience functions for:
|
|
2
|
-
- Adding neuropixels rig to dynamic routing session directory.
|
|
3
|
-
- Updating neuropixels rig from dynamic routing session directory.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import datetime
|
|
7
|
-
import logging
|
|
8
|
-
import pathlib
|
|
9
|
-
import typing
|
|
10
|
-
|
|
11
|
-
from aind_data_schema.core import rig, session
|
|
12
|
-
|
|
13
|
-
from np_codeocean.metadata import common, np, rigs, storage, update, utils
|
|
14
|
-
from np_codeocean.metadata.model_templates import behavior_box, neuropixels_rig
|
|
15
|
-
|
|
16
|
-
logger = logging.getLogger(__name__)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
SESSION_MODEL_GLOB_PATTERN = "*session.json"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
def scrape_session_model_path(session_directory: pathlib.Path) -> pathlib.Path:
|
|
23
|
-
"""Scrapes aind-metadata session json from dynamic routing session
|
|
24
|
-
directory.
|
|
25
|
-
"""
|
|
26
|
-
matches = list(session_directory.glob(SESSION_MODEL_GLOB_PATTERN))
|
|
27
|
-
logger.debug("Scraped session model paths: %s" % matches)
|
|
28
|
-
return matches[0]
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
def update_session_from_rig(
|
|
32
|
-
session_source: pathlib.Path,
|
|
33
|
-
rig_source: pathlib.Path,
|
|
34
|
-
output_path: pathlib.Path,
|
|
35
|
-
) -> pathlib.Path:
|
|
36
|
-
"""Convenience function that updates the `rig_id` of a session model at
|
|
37
|
-
`session_source`. Uses the `rig_id` of `rig_source`.
|
|
38
|
-
|
|
39
|
-
Notes
|
|
40
|
-
-----
|
|
41
|
-
- Overwrites the session model at `output_path`.
|
|
42
|
-
"""
|
|
43
|
-
session_model = session.Session.model_validate_json(session_source.read_text())
|
|
44
|
-
rig_model = rig.Rig.model_validate_json(
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
logger.info("
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
logger.info("
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
scraped_session_model_path,
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
return
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
logger.debug("
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
>>>
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
...
|
|
289
|
-
...
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
...
|
|
297
|
-
...
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
1
|
+
"""Convenience functions for:
|
|
2
|
+
- Adding neuropixels rig to dynamic routing session directory.
|
|
3
|
+
- Updating neuropixels rig from dynamic routing session directory.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import datetime
|
|
7
|
+
import logging
|
|
8
|
+
import pathlib
|
|
9
|
+
import typing
|
|
10
|
+
|
|
11
|
+
from aind_data_schema.core import rig, session
|
|
12
|
+
|
|
13
|
+
from np_codeocean.metadata import common, np, rigs, storage, update, utils
|
|
14
|
+
from np_codeocean.metadata.model_templates import behavior_box, neuropixels_rig
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
SESSION_MODEL_GLOB_PATTERN = "*session.json"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def scrape_session_model_path(session_directory: pathlib.Path) -> pathlib.Path:
|
|
23
|
+
"""Scrapes aind-metadata session json from dynamic routing session
|
|
24
|
+
directory.
|
|
25
|
+
"""
|
|
26
|
+
matches = list(session_directory.glob(SESSION_MODEL_GLOB_PATTERN))
|
|
27
|
+
logger.debug("Scraped session model paths: %s" % matches)
|
|
28
|
+
return matches[0]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def update_session_from_rig(
|
|
32
|
+
session_source: pathlib.Path,
|
|
33
|
+
rig_source: pathlib.Path,
|
|
34
|
+
output_path: pathlib.Path,
|
|
35
|
+
) -> pathlib.Path:
|
|
36
|
+
"""Convenience function that updates the `rig_id` of a session model at
|
|
37
|
+
`session_source`. Uses the `rig_id` of `rig_source`.
|
|
38
|
+
|
|
39
|
+
Notes
|
|
40
|
+
-----
|
|
41
|
+
- Overwrites the session model at `output_path`.
|
|
42
|
+
"""
|
|
43
|
+
session_model = session.Session.model_validate_json(session_source.read_text())
|
|
44
|
+
rig_model = rig.Rig.model_validate_json(
|
|
45
|
+
rig_source.read_text().replace("NP.2", "NP2")
|
|
46
|
+
)
|
|
47
|
+
session_model.rig_id = rig_model.rig_id
|
|
48
|
+
return utils.save_aind_model(session_model, output_path)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def copy_or_init_rig(
|
|
52
|
+
storage_directory: pathlib.Path,
|
|
53
|
+
extracted_session_datetime: datetime.datetime,
|
|
54
|
+
extracted_rig_name: str,
|
|
55
|
+
output_path: pathlib.Path,
|
|
56
|
+
) -> pathlib.Path:
|
|
57
|
+
try:
|
|
58
|
+
rig_model_path = storage.get_item(
|
|
59
|
+
storage_directory, extracted_session_datetime, extracted_rig_name
|
|
60
|
+
)
|
|
61
|
+
# validate that existing model is of the correct current
|
|
62
|
+
# aind-data-schema metadata format
|
|
63
|
+
assert rig_model_path is not None
|
|
64
|
+
rig.Rig.model_validate_json(rig_model_path.read_text())
|
|
65
|
+
|
|
66
|
+
return rigs.copy_rig(
|
|
67
|
+
extracted_rig_name,
|
|
68
|
+
output_path,
|
|
69
|
+
extracted_session_datetime,
|
|
70
|
+
storage_directory,
|
|
71
|
+
)
|
|
72
|
+
except Exception:
|
|
73
|
+
logger.error("Failed to copy rig.", exc_info=True)
|
|
74
|
+
rig_model = neuropixels_rig.init(
|
|
75
|
+
extracted_rig_name, # type: ignore
|
|
76
|
+
modification_date=datetime.date.today(),
|
|
77
|
+
)
|
|
78
|
+
rig_model.write_standard_file(output_path.parent)
|
|
79
|
+
return output_path
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def add_np_rig_to_session_dir(
|
|
83
|
+
session_dir: pathlib.Path,
|
|
84
|
+
session_datetime: datetime.datetime,
|
|
85
|
+
rig_model_dir: typing.Optional[pathlib.Path] = None,
|
|
86
|
+
) -> None:
|
|
87
|
+
"""Direct support for the dynamic routing task. Adds an `aind-data-schema`
|
|
88
|
+
`rig.json` to a dynamic routing session directory. The `aind-data-schema`
|
|
89
|
+
`session.json` in `session_dir` will be updated with the `rig_id` of the
|
|
90
|
+
added `rig.json`.
|
|
91
|
+
|
|
92
|
+
Notes
|
|
93
|
+
-----
|
|
94
|
+
- An aind metadata session json must exist and be ending with filename
|
|
95
|
+
session.json (pattern: `*session.json`) in `session_dir`.
|
|
96
|
+
- If `rig_model_dir` is not provided, will attempt to get default from
|
|
97
|
+
np-config. You will need to be onprem for `np-config` to work.
|
|
98
|
+
"""
|
|
99
|
+
scraped_session_model_path = scrape_session_model_path(session_dir)
|
|
100
|
+
logger.debug("Scraped session model path: %s" % scraped_session_model_path)
|
|
101
|
+
scraped_session = session.Session.model_validate_json(
|
|
102
|
+
scraped_session_model_path.read_text()
|
|
103
|
+
)
|
|
104
|
+
scraped_rig_id = scraped_session.rig_id
|
|
105
|
+
logger.info("Scraped rig id: %s" % scraped_rig_id)
|
|
106
|
+
_, rig_name, _ = scraped_rig_id.split("_")
|
|
107
|
+
logger.info("Parsed rig name: %s" % rig_name)
|
|
108
|
+
rig_model_path = session_dir / "rig.json"
|
|
109
|
+
|
|
110
|
+
if not rig_model_dir:
|
|
111
|
+
logger.debug("Getting storage directory from np-config.")
|
|
112
|
+
rig_model_dir = np.get_rig_storage_directory()
|
|
113
|
+
|
|
114
|
+
current_model_path = copy_or_init_rig(
|
|
115
|
+
rig_model_dir,
|
|
116
|
+
session_datetime,
|
|
117
|
+
rig_name,
|
|
118
|
+
rig_model_path,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
logger.info("Current model path: %s" % current_model_path)
|
|
122
|
+
settings_sources = list(session_dir.glob("**/settings.xml"))
|
|
123
|
+
logger.info("Scraped open ephys settings: %s" % settings_sources)
|
|
124
|
+
|
|
125
|
+
updated_model_path = update.update_rig(
|
|
126
|
+
rig_model_path,
|
|
127
|
+
modification_date=session_datetime.date(),
|
|
128
|
+
open_ephys_settings_sources=settings_sources,
|
|
129
|
+
output_path=rig_model_path,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
update_session_from_rig(
|
|
133
|
+
scraped_session_model_path,
|
|
134
|
+
updated_model_path,
|
|
135
|
+
scraped_session_model_path,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
storage.update_item(
|
|
139
|
+
rig_model_dir,
|
|
140
|
+
updated_model_path,
|
|
141
|
+
session_datetime,
|
|
142
|
+
rig_name,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def update_neuropixels_rig_from_dynamic_routing_session_dir(
|
|
147
|
+
rig_source: pathlib.Path,
|
|
148
|
+
session_dir: pathlib.Path,
|
|
149
|
+
output_path: pathlib.Path = pathlib.Path("rig.json"),
|
|
150
|
+
modification_date: typing.Optional[datetime.date] = None,
|
|
151
|
+
mvr_mapping: dict[str, str] = common.DEFAULT_MVR_MAPPING,
|
|
152
|
+
) -> pathlib.Path:
|
|
153
|
+
"""Scrapes dynamic routing session directory for various rig
|
|
154
|
+
configuration/settings and updates `rig_source`.
|
|
155
|
+
|
|
156
|
+
Notes
|
|
157
|
+
-----
|
|
158
|
+
- Will likely be depreciated in the future.
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
task_source = next(session_dir.glob("**/Dynamic*.hdf5"))
|
|
162
|
+
logger.debug("Scraped task source: %s" % task_source)
|
|
163
|
+
except StopIteration:
|
|
164
|
+
task_source = None
|
|
165
|
+
|
|
166
|
+
# sync
|
|
167
|
+
try:
|
|
168
|
+
sync_source = next(session_dir.glob("**/sync.yml"))
|
|
169
|
+
logger.debug("Scraped sync source: %s" % sync_source)
|
|
170
|
+
except StopIteration:
|
|
171
|
+
sync_source = None
|
|
172
|
+
|
|
173
|
+
# mvr
|
|
174
|
+
try:
|
|
175
|
+
mvr_source = next(session_dir.glob("**/mvr.ini"))
|
|
176
|
+
logger.debug("Scraped mvr source: %s" % mvr_source)
|
|
177
|
+
except StopIteration:
|
|
178
|
+
mvr_source = None
|
|
179
|
+
|
|
180
|
+
# open ephys
|
|
181
|
+
settings_sources = list(session_dir.glob("**/settings.xml"))
|
|
182
|
+
logger.debug("Scraped open ephys settings: %s" % settings_sources)
|
|
183
|
+
|
|
184
|
+
return update.update_rig(
|
|
185
|
+
rig_source,
|
|
186
|
+
task_source=task_source,
|
|
187
|
+
sync_source=sync_source,
|
|
188
|
+
mvr_source=mvr_source,
|
|
189
|
+
mvr_mapping=mvr_mapping,
|
|
190
|
+
open_ephys_settings_sources=settings_sources,
|
|
191
|
+
output_path=output_path,
|
|
192
|
+
modification_date=modification_date,
|
|
193
|
+
reward_calibration_date=modification_date,
|
|
194
|
+
sound_calibration_date=modification_date,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def extract_rig_name(task_source: pathlib.Path) -> str | None:
|
|
199
|
+
"""Extracts rig_name from task_source.
|
|
200
|
+
|
|
201
|
+
>>> extract_rig_name(
|
|
202
|
+
... pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
|
|
203
|
+
... )
|
|
204
|
+
'NP2'
|
|
205
|
+
>>> extract_rig_name(
|
|
206
|
+
... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
|
|
207
|
+
... )
|
|
208
|
+
'D6'
|
|
209
|
+
>>> extract_rig_name(
|
|
210
|
+
... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
|
|
211
|
+
... )
|
|
212
|
+
'B2'
|
|
213
|
+
|
|
214
|
+
Notes
|
|
215
|
+
-----
|
|
216
|
+
- If extracted `computerName` is not found or is not bytes, will use
|
|
217
|
+
`rigName`.
|
|
218
|
+
"""
|
|
219
|
+
computer_name = utils.extract_hdf5_value(
|
|
220
|
+
utils.load_hdf5(task_source),
|
|
221
|
+
[
|
|
222
|
+
"computerName",
|
|
223
|
+
],
|
|
224
|
+
)
|
|
225
|
+
logger.debug("Extracted computerName: %s" % computer_name)
|
|
226
|
+
rig_name = utils.extract_hdf5_value(
|
|
227
|
+
utils.load_hdf5(task_source),
|
|
228
|
+
[
|
|
229
|
+
"rigName",
|
|
230
|
+
],
|
|
231
|
+
)
|
|
232
|
+
logger.debug("Extracted rigName: %s" % rig_name)
|
|
233
|
+
|
|
234
|
+
if isinstance(computer_name, bytes):
|
|
235
|
+
decoded = computer_name.decode("utf8")
|
|
236
|
+
if decoded.lower().startswith("beh"):
|
|
237
|
+
postfixed = decoded.split(".")[1]
|
|
238
|
+
split = postfixed.split("-")
|
|
239
|
+
return split[0] + split[1][-1]
|
|
240
|
+
else:
|
|
241
|
+
return decoded
|
|
242
|
+
|
|
243
|
+
if isinstance(rig_name, bytes):
|
|
244
|
+
return rig_name.decode("utf-8")
|
|
245
|
+
|
|
246
|
+
return None
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def extract_session_datetime(
|
|
250
|
+
task_source: pathlib.Path,
|
|
251
|
+
) -> datetime.datetime:
|
|
252
|
+
"""
|
|
253
|
+
>>> extract_session_datetime(
|
|
254
|
+
... pathlib.Path("examples") / "behavior-box-task-0.hdf5"
|
|
255
|
+
... )
|
|
256
|
+
datetime.datetime(2024, 5, 1, 0, 0)
|
|
257
|
+
>>> extract_session_datetime(
|
|
258
|
+
... pathlib.Path("examples") / "behavior-box-task-1.hdf5"
|
|
259
|
+
... )
|
|
260
|
+
datetime.datetime(2023, 9, 8, 0, 0)
|
|
261
|
+
"""
|
|
262
|
+
start_time_str = utils.extract_hdf5_value(
|
|
263
|
+
utils.load_hdf5(task_source),
|
|
264
|
+
[
|
|
265
|
+
"startTime",
|
|
266
|
+
],
|
|
267
|
+
)
|
|
268
|
+
if not start_time_str:
|
|
269
|
+
raise Exception("Could not extract start time from task source.")
|
|
270
|
+
|
|
271
|
+
logger.debug("Extracted start time bytes: %s" % start_time_str)
|
|
272
|
+
date_str, _ = start_time_str.decode("utf8").split("_")
|
|
273
|
+
logger.debug("Date string: %s" % date_str)
|
|
274
|
+
return datetime.datetime.strptime(date_str, "%Y%m%d")
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def copy_task_rig(
|
|
278
|
+
task_source: pathlib.Path,
|
|
279
|
+
output_path: pathlib.Path,
|
|
280
|
+
storage_directory: typing.Optional[pathlib.Path] = None,
|
|
281
|
+
) -> pathlib.Path | None:
|
|
282
|
+
"""Extracts rig_name from task_source and copies the associated `rig.json`
|
|
283
|
+
to output_path.
|
|
284
|
+
|
|
285
|
+
>>> storage_directory = pathlib.Path("examples") / "rig-directory"
|
|
286
|
+
>>> task_source = pathlib.Path("examples") / "neuropixels-rig-task.hdf5"
|
|
287
|
+
>>> copy_task_rig(
|
|
288
|
+
... task_source,
|
|
289
|
+
... pathlib.Path("rig.json"),
|
|
290
|
+
... storage_directory,
|
|
291
|
+
... )
|
|
292
|
+
PosixPath('rig.json')
|
|
293
|
+
|
|
294
|
+
>>> task_source = pathlib.Path("examples") / "behavior-box-task-0.hdf5"
|
|
295
|
+
>>> copy_task_rig(
|
|
296
|
+
... task_source,
|
|
297
|
+
... pathlib.Path("rig.json"),
|
|
298
|
+
... storage_directory,
|
|
299
|
+
... )
|
|
300
|
+
PosixPath('rig.json')
|
|
301
|
+
|
|
302
|
+
Notes
|
|
303
|
+
-----
|
|
304
|
+
- If `storage_directory` is not provided, will attempt to get default from
|
|
305
|
+
np-config.
|
|
306
|
+
"""
|
|
307
|
+
# storage_directory optional is legacy behavior
|
|
308
|
+
# TODO: remove the optional so we can remove this safeguard
|
|
309
|
+
if not storage_directory:
|
|
310
|
+
raise Exception("Storage directory must be provided.")
|
|
311
|
+
|
|
312
|
+
extracted_rig_name = extract_rig_name(task_source)
|
|
313
|
+
logger.debug("Extracted rig name: %s" % extracted_rig_name)
|
|
314
|
+
if not extracted_rig_name:
|
|
315
|
+
raise Exception("Could not extract rig name from task source: %s" % task_source)
|
|
316
|
+
|
|
317
|
+
if rigs.is_behavior_box(extracted_rig_name):
|
|
318
|
+
rig_model = behavior_box.init(
|
|
319
|
+
extracted_rig_name,
|
|
320
|
+
modification_date=datetime.date.today(),
|
|
321
|
+
)
|
|
322
|
+
rig_model.write_standard_file(output_path.parent)
|
|
323
|
+
return output_path
|
|
324
|
+
|
|
325
|
+
extracted_session_datetime = extract_session_datetime(task_source)
|
|
326
|
+
|
|
327
|
+
# if grabbing latest rig model fails, return a new one
|
|
328
|
+
return copy_or_init_rig(
|
|
329
|
+
storage_directory,
|
|
330
|
+
extracted_session_datetime,
|
|
331
|
+
extracted_rig_name,
|
|
332
|
+
output_path,
|
|
333
|
+
)
|