np_codeocean 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- np_codeocean/__init__.py +1 -1
- np_codeocean/metadata/__init__.py +1 -1
- np_codeocean/metadata/common.py +1 -3
- np_codeocean/metadata/core.py +333 -331
- np_codeocean/metadata/dynamic_routing_task_etl.py +1 -1
- np_codeocean/metadata/model_templates/behavior_box.py +115 -115
- np_codeocean/metadata/model_templates/neuropixels_rig.py +544 -544
- np_codeocean/metadata/np.py +1 -1
- np_codeocean/metadata/rigs.py +1 -1
- np_codeocean/metadata/storage.py +78 -78
- np_codeocean/metadata/update.py +1 -2
- np_codeocean/metadata/utils.py +1 -1
- np_codeocean/np_session_utils.py +462 -385
- np_codeocean/scripts/upload_dynamic_routing_behavior.py +483 -413
- np_codeocean/scripts/upload_dynamic_routing_ecephys.py +279 -217
- np_codeocean/scripts/upload_split_recordings_example.py +39 -33
- np_codeocean/utils.py +671 -563
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/METADATA +13 -6
- np_codeocean-0.3.6.dist-info/RECORD +23 -0
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/WHEEL +2 -1
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/entry_points.txt +0 -3
- np_codeocean-0.3.6.dist-info/top_level.txt +1 -0
- np_codeocean-0.3.5.dist-info/RECORD +0 -22
np_codeocean/utils.py
CHANGED
|
@@ -1,563 +1,671 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import contextlib
|
|
4
|
-
import csv
|
|
5
|
-
import datetime
|
|
6
|
-
import functools
|
|
7
|
-
import itertools
|
|
8
|
-
import json
|
|
9
|
-
import logging
|
|
10
|
-
import os
|
|
11
|
-
import pathlib
|
|
12
|
-
import re
|
|
13
|
-
from
|
|
14
|
-
import
|
|
15
|
-
|
|
16
|
-
import np_config
|
|
17
|
-
import np_tools
|
|
18
|
-
import npc_ephys
|
|
19
|
-
import
|
|
20
|
-
import
|
|
21
|
-
import numpy as np
|
|
22
|
-
import polars as pl
|
|
23
|
-
import requests
|
|
24
|
-
|
|
25
|
-
from
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
"
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
"
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
"
|
|
68
|
-
|
|
69
|
-
""
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
"
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
"
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
"
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
def
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
)
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
)
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import csv
|
|
5
|
+
import datetime
|
|
6
|
+
import functools
|
|
7
|
+
import itertools
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import pathlib
|
|
12
|
+
import re
|
|
13
|
+
from collections.abc import Generator, Iterable
|
|
14
|
+
from typing import Any, Literal
|
|
15
|
+
|
|
16
|
+
import np_config
|
|
17
|
+
import np_tools
|
|
18
|
+
import npc_ephys
|
|
19
|
+
import npc_session
|
|
20
|
+
import npc_sync
|
|
21
|
+
import numpy as np
|
|
22
|
+
import polars as pl
|
|
23
|
+
import requests
|
|
24
|
+
import typing_extensions
|
|
25
|
+
from aind_codeocean_pipeline_monitor.models import PipelineMonitorSettings
|
|
26
|
+
from aind_data_schema_models.modalities import Modality
|
|
27
|
+
from aind_data_schema_models.platforms import Platform
|
|
28
|
+
from aind_data_transfer_service.models.core import (
|
|
29
|
+
SubmitJobRequestV2,
|
|
30
|
+
Task,
|
|
31
|
+
UploadJobConfigsV2,
|
|
32
|
+
)
|
|
33
|
+
from aind_slurm_rest_v2.models.v0040_job_desc_msg import (
|
|
34
|
+
V0040JobDescMsg,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
logger = logging.getLogger(__name__)
|
|
38
|
+
|
|
39
|
+
AINDPlatform = Literal["ecephys", "behavior"]
|
|
40
|
+
|
|
41
|
+
AIND_DATA_TRANSFER_SERVICE = "http://aind-data-transfer-service"
|
|
42
|
+
DEV_SERVICE = "http://aind-data-transfer-service-dev"
|
|
43
|
+
HPC_UPLOAD_JOB_EMAIL = "ben.hardcastle@alleninstitute.org"
|
|
44
|
+
ACQ_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
|
45
|
+
|
|
46
|
+
AIND_METADATA_NAMES: tuple[str, ...] = (
|
|
47
|
+
"session",
|
|
48
|
+
"data_description",
|
|
49
|
+
"procedures",
|
|
50
|
+
"processing",
|
|
51
|
+
"rig",
|
|
52
|
+
"subject",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# In the future, default slurm settings can be stored in a job_type in AWS Param Store
|
|
56
|
+
# see http://aind-data-transfer-service/job_params for current job_types
|
|
57
|
+
_DEFAULT_EPHYS_SLURM_SETTINGS_JSON = {
|
|
58
|
+
"memory_per_cpu": {"set": True, "number": 8000},
|
|
59
|
+
"minimum_cpus_per_node": 12, # 6 probes * (lfp + ap)
|
|
60
|
+
"partition": "aind",
|
|
61
|
+
"tasks": 1,
|
|
62
|
+
"time_limit": {"set": True, "number": 15 * 60},
|
|
63
|
+
"environment": [
|
|
64
|
+
"PATH=/bin:/usr/bin/:/usr/local/bin/",
|
|
65
|
+
"LD_LIBRARY_PATH=/lib/:/lib64/:/usr/local/lib",
|
|
66
|
+
],
|
|
67
|
+
"maximum_nodes": 1,
|
|
68
|
+
"minimum_nodes": 1,
|
|
69
|
+
"current_working_directory": ".",
|
|
70
|
+
}
|
|
71
|
+
"""Increased timelimit and cpus for running ephys compression on the hpc"""
|
|
72
|
+
DEFAULT_EPHYS_SLURM_SETTINGS = V0040JobDescMsg.model_validate(
|
|
73
|
+
{
|
|
74
|
+
**_DEFAULT_EPHYS_SLURM_SETTINGS_JSON,
|
|
75
|
+
"qos": "production",
|
|
76
|
+
"standard_error": "/allen/aind/scratch/svc_aind_airflow/prod/logs/%x_%j_error.out",
|
|
77
|
+
"standard_output": "/allen/aind/scratch/svc_aind_airflow/prod/logs/%x_%j.out",
|
|
78
|
+
}
|
|
79
|
+
)
|
|
80
|
+
DEFAULT_EPHYS_SLURM_SETTINGS_DEV = V0040JobDescMsg.model_validate(
|
|
81
|
+
{
|
|
82
|
+
**_DEFAULT_EPHYS_SLURM_SETTINGS_JSON,
|
|
83
|
+
"qos": "dev",
|
|
84
|
+
"standard_error": "/allen/aind/scratch/svc_aind_airflow/dev/logs/%x_%j_error.out",
|
|
85
|
+
"standard_output": "/allen/aind/scratch/svc_aind_airflow/dev/logs/%x_%j.out",
|
|
86
|
+
}
|
|
87
|
+
)
|
|
88
|
+
DEFAULT_EPHYS_IMAGE = {
|
|
89
|
+
"image": "ghcr.io/allenneuraldynamics/aind-ephys-transformation",
|
|
90
|
+
"image_version": "0.2.1",
|
|
91
|
+
"command_script": "#!/bin/bash \nsingularity exec --cleanenv docker://%IMAGE:%IMAGE_VERSION python -m aind_ephys_transformation.ephys_job --job-settings ' %JOB_SETTINGS '",
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class SyncFileNotFoundError(FileNotFoundError):
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@functools.cache
|
|
100
|
+
def get_project_config() -> dict[str, Any]:
|
|
101
|
+
"""Config for this project"""
|
|
102
|
+
return np_config.fetch("/projects/np_codeocean")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def set_npc_lims_credentials() -> None:
|
|
106
|
+
creds = np_config.fetch("/projects/np_codeocean/npc_lims")
|
|
107
|
+
for k, v in creds.items():
|
|
108
|
+
os.environ.setdefault(k, v)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def get_home() -> pathlib.Path:
|
|
112
|
+
if os.name == "nt":
|
|
113
|
+
return pathlib.Path(os.environ["USERPROFILE"])
|
|
114
|
+
return pathlib.Path(os.environ["HOME"])
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def is_behavior_video_file(path: pathlib.Path) -> bool:
|
|
118
|
+
if path.is_dir() or path.suffix not in (".mp4", ".avi", ".json"):
|
|
119
|
+
return False
|
|
120
|
+
with contextlib.suppress(ValueError):
|
|
121
|
+
_ = npc_session.extract_mvr_camera_name(path.as_posix())
|
|
122
|
+
return True
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def is_surface_channel_recording(path_name: str) -> bool:
|
|
127
|
+
"""
|
|
128
|
+
>>> import np_session
|
|
129
|
+
>>> session = np_session.Session("//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_690706_20231129_surface_channels")
|
|
130
|
+
>>> is_surface_channel_recording(session.npexp_path.as_posix())
|
|
131
|
+
True
|
|
132
|
+
"""
|
|
133
|
+
return "surface_channels" in path_name.lower()
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def cleanup_ephys_symlinks(toplevel_dir: pathlib.Path) -> None:
|
|
137
|
+
"""After creating symlinks to the ephys data, run this to make any necessary
|
|
138
|
+
modifications prior to upload.
|
|
139
|
+
|
|
140
|
+
Provided dir path should be a directory containing all ephys data in
|
|
141
|
+
subfolders (e.g. directory containing "Record Node 10x" folders)
|
|
142
|
+
|
|
143
|
+
Only deletes symlinks or writes new files in place of symlinks - does not
|
|
144
|
+
modify original data.
|
|
145
|
+
|
|
146
|
+
Rules:
|
|
147
|
+
- if any continuous.dat files are unreadable: remove them and their containing folders
|
|
148
|
+
- if any probes were recorded on multiple record nodes: just keep the first
|
|
149
|
+
- if continuous.dat files are missing (ie. excluded because probes weren't
|
|
150
|
+
inserted, or we removed symlinks in previous steps): update metadata files
|
|
151
|
+
"""
|
|
152
|
+
remove_unreadable_ephys_data(toplevel_dir)
|
|
153
|
+
remove_duplicate_ephys_data(toplevel_dir)
|
|
154
|
+
cleanup_ephys_metadata(toplevel_dir)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def remove_unreadable_ephys_data(toplevel_dir: pathlib.Path) -> None:
|
|
158
|
+
|
|
159
|
+
for continuous_dir in ephys_continuous_dir_generator(toplevel_dir):
|
|
160
|
+
events_dir = (
|
|
161
|
+
continuous_dir.parent.parent / "events" / continuous_dir.name / "TTL"
|
|
162
|
+
)
|
|
163
|
+
filenames = ("continuous.dat", "timestamps.npy", "sample_numbers.npy")
|
|
164
|
+
dirs = (continuous_dir,) + ((events_dir,) if events_dir.exists() else ())
|
|
165
|
+
mark_for_removal = False
|
|
166
|
+
for d in dirs:
|
|
167
|
+
if not d.exists():
|
|
168
|
+
continue
|
|
169
|
+
for filename in filenames:
|
|
170
|
+
if filename == "continuous.dat" and d.name == "TTL":
|
|
171
|
+
continue # no continuous.dat expected in TTL events
|
|
172
|
+
file = d / filename
|
|
173
|
+
if not (file.is_symlink() or file.exists()):
|
|
174
|
+
logger.warning(
|
|
175
|
+
f"Critical file not found {file}, insufficient data for processing"
|
|
176
|
+
)
|
|
177
|
+
mark_for_removal = True
|
|
178
|
+
break
|
|
179
|
+
try:
|
|
180
|
+
data = np.memmap(
|
|
181
|
+
decode_symlink_path(file),
|
|
182
|
+
dtype="int16" if "timestamps" not in file.name else "float64",
|
|
183
|
+
mode="r",
|
|
184
|
+
)
|
|
185
|
+
except Exception as exc:
|
|
186
|
+
logger.warning(f"Failed to read {file}: {exc!r}")
|
|
187
|
+
mark_for_removal = True
|
|
188
|
+
break
|
|
189
|
+
if data.size == 0:
|
|
190
|
+
logger.warning(f"Empty file {file}")
|
|
191
|
+
mark_for_removal = True
|
|
192
|
+
break
|
|
193
|
+
logger.debug(f"Found readable, non-empty data in {file}")
|
|
194
|
+
if mark_for_removal:
|
|
195
|
+
break
|
|
196
|
+
if mark_for_removal:
|
|
197
|
+
logger.warning(f"Removing {continuous_dir} and its contents")
|
|
198
|
+
remove_folder_of_symlinks(continuous_dir)
|
|
199
|
+
logger.warning(f"Removing {events_dir.parent} and its contents")
|
|
200
|
+
remove_folder_of_symlinks(events_dir.parent)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def remove_duplicate_ephys_data(toplevel_dir: pathlib.Path) -> None:
|
|
204
|
+
logger.info("Checking for duplicate ephys data...")
|
|
205
|
+
paths = sorted(ephys_continuous_dir_generator(toplevel_dir))
|
|
206
|
+
experiments = set(
|
|
207
|
+
re.findall(r"/experiment(\d+)/", path.as_posix())[0] for path in paths
|
|
208
|
+
)
|
|
209
|
+
logger.debug(f"Found {len(experiments)} experiments")
|
|
210
|
+
for experiment in experiments:
|
|
211
|
+
exp_paths = sorted(
|
|
212
|
+
path for path in paths if f"experiment{experiment}" in path.as_posix()
|
|
213
|
+
)
|
|
214
|
+
recordings = set(
|
|
215
|
+
re.findall(r"/recording(\d+)/", path.as_posix())[0] for path in exp_paths
|
|
216
|
+
)
|
|
217
|
+
logger.debug(f"Found {len(recordings)} recordings in experiment{experiment}")
|
|
218
|
+
for recording in recordings:
|
|
219
|
+
recording_paths = sorted(
|
|
220
|
+
path for path in exp_paths if f"recording{recording}" in path.as_posix()
|
|
221
|
+
)
|
|
222
|
+
probes = []
|
|
223
|
+
# import pdb; pdb.set_trace()
|
|
224
|
+
for continuous_dir in recording_paths:
|
|
225
|
+
try:
|
|
226
|
+
probe = npc_session.ProbeRecord(continuous_dir.name)
|
|
227
|
+
except ValueError:
|
|
228
|
+
continue
|
|
229
|
+
suffix = continuous_dir.name.split("-")[-1]
|
|
230
|
+
assert suffix in ("AP", "LFP")
|
|
231
|
+
recording_name = f"{probe}-{suffix}"
|
|
232
|
+
if recording_name in probes:
|
|
233
|
+
logger.info(
|
|
234
|
+
f"Duplicate {recording_name = } found in {continuous_dir.parent.parent} - removing"
|
|
235
|
+
)
|
|
236
|
+
remove_folder_of_symlinks(continuous_dir)
|
|
237
|
+
else:
|
|
238
|
+
probes.append(recording_name)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def remove_folder_of_symlinks(folder: pathlib.Path) -> None:
|
|
242
|
+
"""Recursive deletion of all files in dir tree, with a check that each is a
|
|
243
|
+
symlink."""
|
|
244
|
+
for path in folder.rglob("*"):
|
|
245
|
+
if path.is_dir():
|
|
246
|
+
remove_folder_of_symlinks(path)
|
|
247
|
+
else:
|
|
248
|
+
assert path.is_symlink(), f"Expected {path} to be a symlink"
|
|
249
|
+
path.unlink(missing_ok=True)
|
|
250
|
+
with contextlib.suppress(FileNotFoundError):
|
|
251
|
+
folder.rmdir()
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def ephys_recording_dir_generator(
|
|
255
|
+
toplevel_dir: pathlib.Path,
|
|
256
|
+
) -> Generator[pathlib.Path, None, None]:
|
|
257
|
+
for recording_dir in toplevel_dir.rglob("recording[0-9]*"):
|
|
258
|
+
if recording_dir.is_dir():
|
|
259
|
+
yield recording_dir
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def ephys_continuous_dir_generator(
|
|
263
|
+
toplevel_dir: pathlib.Path,
|
|
264
|
+
) -> Generator[pathlib.Path, None, None]:
|
|
265
|
+
for recording_dir in ephys_recording_dir_generator(toplevel_dir):
|
|
266
|
+
parent = recording_dir / "continuous"
|
|
267
|
+
if not parent.exists():
|
|
268
|
+
continue
|
|
269
|
+
for continuous_dir in parent.iterdir():
|
|
270
|
+
if continuous_dir.is_dir():
|
|
271
|
+
yield continuous_dir
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def ephys_structure_oebin_generator(
|
|
275
|
+
toplevel_dir: pathlib.Path,
|
|
276
|
+
) -> Generator[pathlib.Path, None, None]:
|
|
277
|
+
for recording_dir in ephys_recording_dir_generator(toplevel_dir):
|
|
278
|
+
oebin_path = recording_dir / "structure.oebin"
|
|
279
|
+
if not (oebin_path.is_symlink() or oebin_path.exists()):
|
|
280
|
+
# symlinks that are created for the hpc use posix paths, and aren't
|
|
281
|
+
# readable on windows, so .exists() returns False: use .is_symlink() instead
|
|
282
|
+
logger.warning(f"No structure.oebin found in {recording_dir}")
|
|
283
|
+
continue
|
|
284
|
+
yield oebin_path
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def cleanup_ephys_metadata(toplevel_dir: pathlib.Path) -> None:
|
|
288
|
+
logger.debug("Checking structure.oebin for missing folders...")
|
|
289
|
+
for oebin_path in ephys_structure_oebin_generator(toplevel_dir):
|
|
290
|
+
oebin_obj = np_tools.read_oebin(decode_symlink_path(oebin_path))
|
|
291
|
+
logger.debug(f"Checking {oebin_path} against actual folders...")
|
|
292
|
+
any_removed = False
|
|
293
|
+
for subdir_name in ("events", "continuous"):
|
|
294
|
+
subdir = oebin_path.parent / subdir_name
|
|
295
|
+
# iterate over copy of list so as to not disrupt iteration when elements are removed
|
|
296
|
+
for device in [device for device in oebin_obj[subdir_name]]:
|
|
297
|
+
if not (subdir / device["folder_name"]).exists():
|
|
298
|
+
logger.info(
|
|
299
|
+
f'{device["folder_name"]} not found in {subdir}, removing from structure.oebin'
|
|
300
|
+
)
|
|
301
|
+
oebin_obj[subdir_name].remove(device)
|
|
302
|
+
any_removed = True
|
|
303
|
+
if any_removed:
|
|
304
|
+
oebin_path.unlink()
|
|
305
|
+
oebin_path.write_text(json.dumps(oebin_obj, indent=4))
|
|
306
|
+
logger.debug(
|
|
307
|
+
"Overwrote symlink to structure.oebin with corrected structure.oebin"
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def write_corrected_ephys_timestamps(
|
|
312
|
+
ephys_dir: pathlib.Path,
|
|
313
|
+
behavior_dir: pathlib.Path,
|
|
314
|
+
) -> None:
|
|
315
|
+
for path in itertools.chain(behavior_dir.glob("*.h5"), behavior_dir.glob("*.sync")):
|
|
316
|
+
with contextlib.suppress(Exception):
|
|
317
|
+
sync_dataset = npc_sync.SyncDataset(path)
|
|
318
|
+
_ = sync_dataset.line_labels
|
|
319
|
+
logger.info(f"Found valid sync file {path.as_posix()}")
|
|
320
|
+
break
|
|
321
|
+
else:
|
|
322
|
+
raise SyncFileNotFoundError(
|
|
323
|
+
f"No valid sync file found in {behavior_dir.as_posix()}"
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
timing_on_pxi = (
|
|
327
|
+
timing
|
|
328
|
+
for timing in npc_ephys.get_ephys_timing_on_pxi(
|
|
329
|
+
ephys_dir.glob("**/experiment*/recording*"),
|
|
330
|
+
)
|
|
331
|
+
)
|
|
332
|
+
timing_on_sync = npc_ephys.get_ephys_timing_on_sync(
|
|
333
|
+
sync=sync_dataset,
|
|
334
|
+
devices=timing_on_pxi,
|
|
335
|
+
)
|
|
336
|
+
npc_ephys.overwrite_timestamps(timing_on_sync)
|
|
337
|
+
logger.info(f"Corrected timestamps in {ephys_dir}")
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def decode_symlink_path(oebin_path: pathlib.Path) -> pathlib.Path:
|
|
341
|
+
if not oebin_path.is_symlink():
|
|
342
|
+
return oebin_path
|
|
343
|
+
return np_config.normalize_path(oebin_path.readlink())
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def is_csv_in_hpc_upload_queue(
|
|
347
|
+
csv_path: pathlib.Path, upload_service_url: str = AIND_DATA_TRANSFER_SERVICE
|
|
348
|
+
) -> bool:
|
|
349
|
+
"""Check if an upload job has been submitted to the hpc upload queue.
|
|
350
|
+
|
|
351
|
+
- currently assumes one job per csv
|
|
352
|
+
- does not check status (job may be FINISHED rather than RUNNING)
|
|
353
|
+
|
|
354
|
+
>>> is_csv_in_hpc_upload_queue("//allen/programs/mindscope/workgroups/np-exp/codeocean/DRpilot_664851_20231114/upload.csv")
|
|
355
|
+
False
|
|
356
|
+
"""
|
|
357
|
+
# get subject-id, acq-datetime from csv
|
|
358
|
+
df = pl.read_csv(csv_path, eol_char="\r")
|
|
359
|
+
for col in df.get_columns():
|
|
360
|
+
if col.name.startswith("subject") and col.name.endswith("id"):
|
|
361
|
+
subject = npc_session.SubjectRecord(col[0])
|
|
362
|
+
continue
|
|
363
|
+
if col.name.startswith("acq") and "datetime" in col.name.lower():
|
|
364
|
+
dt = npc_session.DatetimeRecord(col[0])
|
|
365
|
+
continue
|
|
366
|
+
if col.name == "platform":
|
|
367
|
+
platform = col[0]
|
|
368
|
+
continue
|
|
369
|
+
return is_session_in_hpc_queue(
|
|
370
|
+
subject=subject,
|
|
371
|
+
acq_datetime=dt.dt,
|
|
372
|
+
platform=platform,
|
|
373
|
+
upload_service_url=upload_service_url,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def is_session_in_hpc_queue(
|
|
378
|
+
subject: int | str,
|
|
379
|
+
acq_datetime: str | datetime.datetime,
|
|
380
|
+
platform: str | None = None,
|
|
381
|
+
upload_service_url: str = AIND_DATA_TRANSFER_SERVICE,
|
|
382
|
+
) -> bool:
|
|
383
|
+
"""
|
|
384
|
+
>>> is_session_in_hpc_queue(366122, datetime.datetime(2023, 11, 14, 0, 0, 0))
|
|
385
|
+
False
|
|
386
|
+
>>> is_session_in_hpc_queue(702136, datetime.datetime(2024, 3, 4, 13, 21, 35))
|
|
387
|
+
True
|
|
388
|
+
"""
|
|
389
|
+
if not isinstance(acq_datetime, datetime.datetime):
|
|
390
|
+
acq_datetime = datetime.datetime.strptime(acq_datetime, ACQ_DATETIME_FORMAT)
|
|
391
|
+
partial_session_id = f"{subject}_{acq_datetime.strftime(ACQ_DATETIME_FORMAT).replace(' ', '_').replace(':', '-')}"
|
|
392
|
+
if platform:
|
|
393
|
+
partial_session_id = f"{platform}_{partial_session_id}"
|
|
394
|
+
|
|
395
|
+
jobs_response = requests.get(f"{upload_service_url}/jobs")
|
|
396
|
+
jobs_response.raise_for_status()
|
|
397
|
+
return partial_session_id in jobs_response.content.decode()
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def is_job_in_hpc_upload_queue(
|
|
401
|
+
job: UploadJobConfigsV2, upload_service_url: str = AIND_DATA_TRANSFER_SERVICE
|
|
402
|
+
) -> bool:
|
|
403
|
+
return is_session_in_hpc_queue(
|
|
404
|
+
job.subject_id, job.acq_datetime, job.platform.abbreviation, upload_service_url
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def write_upload_csv(
|
|
409
|
+
content: dict[str, Any],
|
|
410
|
+
output_path: pathlib.Path,
|
|
411
|
+
) -> pathlib.Path:
|
|
412
|
+
logger.info(f"Creating upload job file {output_path}")
|
|
413
|
+
with open(output_path, "w") as f:
|
|
414
|
+
w = csv.writer(f, lineterminator="")
|
|
415
|
+
w.writerow(content.keys())
|
|
416
|
+
w.writerow("\n")
|
|
417
|
+
w.writerow(content.values())
|
|
418
|
+
return output_path
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def create_upload_job_configs_v2(
|
|
422
|
+
project_name: str,
|
|
423
|
+
platform: str,
|
|
424
|
+
subject_id: str,
|
|
425
|
+
force_cloud_sync: bool,
|
|
426
|
+
modalities: dict[str, str],
|
|
427
|
+
acq_datetime: datetime.datetime,
|
|
428
|
+
user_email: str = HPC_UPLOAD_JOB_EMAIL,
|
|
429
|
+
job_type: str = "default",
|
|
430
|
+
metadata_dir: str | None = None,
|
|
431
|
+
codeocean_pipeline_settings: dict[str, PipelineMonitorSettings] | None = None,
|
|
432
|
+
check_timestamps: bool = True, # default in transfer service is True: checks timestamps have been corrected via flag file
|
|
433
|
+
test: bool = False,
|
|
434
|
+
**extra_UploadJobConfigsV2_params: Any,
|
|
435
|
+
) -> UploadJobConfigsV2:
|
|
436
|
+
"""Create a UploadJobConfigsV2 model. Modalities should be provided in format
|
|
437
|
+
{modality_abbr: input_source}. job_type refers to the default or custom
|
|
438
|
+
presets used for compression and Code Ocean pipelines.
|
|
439
|
+
"""
|
|
440
|
+
# Each task in airflow can be configured individually
|
|
441
|
+
# force_cloud_sync
|
|
442
|
+
check_s3_folder_exists_task = Task(skip_task=True) if force_cloud_sync else None
|
|
443
|
+
# metadata_dir
|
|
444
|
+
gather_preliminary_metadata_task = (
|
|
445
|
+
Task(job_settings={"metadata_dir": metadata_dir})
|
|
446
|
+
if metadata_dir is not None
|
|
447
|
+
else None
|
|
448
|
+
)
|
|
449
|
+
# modality transformation settings
|
|
450
|
+
modality_transformation_settings_tasks = dict() # {modality_abbr: Task}
|
|
451
|
+
if "modalities" in extra_UploadJobConfigsV2_params:
|
|
452
|
+
raise ValueError(
|
|
453
|
+
"modalities should not be passed as a parameter in extra_BasicUploadJobConfigs_params"
|
|
454
|
+
)
|
|
455
|
+
for modality_abbr, input_source in modalities.items():
|
|
456
|
+
job_settings: dict[str, Any] = {
|
|
457
|
+
"input_source": input_source,
|
|
458
|
+
"output_directory": "%OUTPUT_LOCATION",
|
|
459
|
+
}
|
|
460
|
+
# Ecephys compression settings are currently hardcoded
|
|
461
|
+
# In the future, these can be stored in AWS param store as part of a "job_type"
|
|
462
|
+
if modality_abbr == Modality.ECEPHYS.abbreviation:
|
|
463
|
+
if not check_timestamps:
|
|
464
|
+
job_settings["check_timestamps"] = False
|
|
465
|
+
image_resources = (
|
|
466
|
+
DEFAULT_EPHYS_SLURM_SETTINGS_DEV
|
|
467
|
+
if test
|
|
468
|
+
else DEFAULT_EPHYS_SLURM_SETTINGS
|
|
469
|
+
).model_dump(mode="json", exclude_none=True)
|
|
470
|
+
modality_task = Task(
|
|
471
|
+
skip_task=False,
|
|
472
|
+
job_settings=job_settings,
|
|
473
|
+
image_resources=image_resources,
|
|
474
|
+
**DEFAULT_EPHYS_IMAGE,
|
|
475
|
+
)
|
|
476
|
+
else:
|
|
477
|
+
modality_task = Task(
|
|
478
|
+
job_settings=job_settings,
|
|
479
|
+
)
|
|
480
|
+
modality_transformation_settings_tasks[modality_abbr] = modality_task
|
|
481
|
+
|
|
482
|
+
# Code Ocean pipeline settings
|
|
483
|
+
# You can manually specify up to one pipeline conf per modality.
|
|
484
|
+
# These will override any pipelines defined by the job_type.
|
|
485
|
+
# In the future, these can be stored in AWS param store as part of a "job_type"
|
|
486
|
+
codeocean_pipeline_settings_tasks = dict() # {modality_abbr: Task}
|
|
487
|
+
if codeocean_pipeline_settings is not None:
|
|
488
|
+
codeocean_pipeline_settings_tasks = {
|
|
489
|
+
k: Task(
|
|
490
|
+
job_settings={
|
|
491
|
+
"pipeline_monitor_settings": v.model_dump(
|
|
492
|
+
mode="json", exclude_none=True
|
|
493
|
+
)
|
|
494
|
+
}
|
|
495
|
+
)
|
|
496
|
+
for k, v in codeocean_pipeline_settings.items()
|
|
497
|
+
}
|
|
498
|
+
tasks = {
|
|
499
|
+
"check_s3_folder_exists": check_s3_folder_exists_task,
|
|
500
|
+
"gather_preliminary_metadata": gather_preliminary_metadata_task,
|
|
501
|
+
"modality_transformation_settings": modality_transformation_settings_tasks,
|
|
502
|
+
"codeocean_pipeline_settings": codeocean_pipeline_settings_tasks,
|
|
503
|
+
}
|
|
504
|
+
return UploadJobConfigsV2(
|
|
505
|
+
job_type=job_type,
|
|
506
|
+
platform=Platform.from_abbreviation(platform),
|
|
507
|
+
project_name=project_name,
|
|
508
|
+
subject_id=subject_id,
|
|
509
|
+
acq_datetime=acq_datetime,
|
|
510
|
+
modalities=[
|
|
511
|
+
Modality.from_abbreviation(m)
|
|
512
|
+
for m in modality_transformation_settings_tasks.keys()
|
|
513
|
+
],
|
|
514
|
+
tasks={k: v for k, v in tasks.items() if v is not None},
|
|
515
|
+
user_email=user_email,
|
|
516
|
+
**extra_UploadJobConfigsV2_params,
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
def put_jobs_for_hpc_upload(
|
|
521
|
+
upload_jobs: UploadJobConfigsV2 | Iterable[UploadJobConfigsV2],
|
|
522
|
+
upload_service_url: str = AIND_DATA_TRANSFER_SERVICE,
|
|
523
|
+
user_email: str = HPC_UPLOAD_JOB_EMAIL,
|
|
524
|
+
email_notification_types: Iterable[
|
|
525
|
+
Literal["begin", "end", "fail", "retry", "all"]
|
|
526
|
+
] = ("fail",),
|
|
527
|
+
dry_run: bool = False,
|
|
528
|
+
save_path: pathlib.Path | None = None,
|
|
529
|
+
**extra_model_kwargs: Any,
|
|
530
|
+
) -> None:
|
|
531
|
+
"""Submit one or more jobs to the aind-data-transfer-service, for
|
|
532
|
+
upload to S3 on the hpc.
|
|
533
|
+
|
|
534
|
+
- accepts one or more aind_data_transfer_service UploadJobConfigsV2 models
|
|
535
|
+
- assembles a SubmitJobRequestV2 model
|
|
536
|
+
- excludes jobs for sessions that are already in the upload queue
|
|
537
|
+
- accepts additional parameters for SubmitJobRequestV2 as kwargs
|
|
538
|
+
- submits json via http request
|
|
539
|
+
- optionally saves the json file as a record
|
|
540
|
+
"""
|
|
541
|
+
if isinstance(upload_jobs, UploadJobConfigsV2):
|
|
542
|
+
upload_jobs = (upload_jobs,)
|
|
543
|
+
submit_request = SubmitJobRequestV2(
|
|
544
|
+
upload_jobs=[job for job in upload_jobs if not is_job_in_hpc_upload_queue(job)],
|
|
545
|
+
user_email=user_email,
|
|
546
|
+
email_notification_types=email_notification_types,
|
|
547
|
+
**extra_model_kwargs,
|
|
548
|
+
)
|
|
549
|
+
post_request_content = submit_request.model_dump(mode="json", exclude_none=True)
|
|
550
|
+
if save_path:
|
|
551
|
+
save_path.write_text(
|
|
552
|
+
submit_request.model_dump_json(
|
|
553
|
+
round_trip=True, exclude_none=True, indent=4
|
|
554
|
+
),
|
|
555
|
+
errors="ignore",
|
|
556
|
+
)
|
|
557
|
+
if dry_run:
|
|
558
|
+
logger.warning(
|
|
559
|
+
f"Dry run: not submitting {len(upload_jobs)} upload job(s) to {upload_service_url}"
|
|
560
|
+
)
|
|
561
|
+
return
|
|
562
|
+
|
|
563
|
+
# Uncomment to perform a validation check:
|
|
564
|
+
# validate_json_response: requests.Response = requests.post(
|
|
565
|
+
# url=f"{upload_service_url}/api/v2/validate_json",
|
|
566
|
+
# json=post_request_content,
|
|
567
|
+
# )
|
|
568
|
+
# validate_json_response.raise_for_status()
|
|
569
|
+
|
|
570
|
+
# Submit the jobs
|
|
571
|
+
post_json_response: requests.Response = requests.post(
|
|
572
|
+
url=f"{upload_service_url}/api/v2/submit_jobs",
|
|
573
|
+
json=post_request_content,
|
|
574
|
+
)
|
|
575
|
+
logger.info(f"Submitted {len(upload_jobs)} upload job(s) to {upload_service_url}")
|
|
576
|
+
post_json_response.raise_for_status()
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
@typing_extensions.deprecated(
|
|
580
|
+
"Uses old, pre-v1 endpoints: use put_jobs_for_hpc_upload in combination with create_upload_job_configs_v2"
|
|
581
|
+
)
|
|
582
|
+
def put_csv_for_hpc_upload(
|
|
583
|
+
csv_path: pathlib.Path,
|
|
584
|
+
upload_service_url: str = AIND_DATA_TRANSFER_SERVICE,
|
|
585
|
+
hpc_upload_job_email: str = HPC_UPLOAD_JOB_EMAIL,
|
|
586
|
+
dry_run: bool = False,
|
|
587
|
+
) -> None:
|
|
588
|
+
"""Submit a single job upload csv to the aind-data-transfer-service, for
|
|
589
|
+
upload to S3 on the hpc.
|
|
590
|
+
|
|
591
|
+
- gets validated version of csv
|
|
592
|
+
- checks session is not already being uploaded
|
|
593
|
+
- submits csv via http request
|
|
594
|
+
"""
|
|
595
|
+
|
|
596
|
+
def _raise_for_status(response: requests.Response) -> None:
|
|
597
|
+
"""pydantic validation errors are returned as strings that can be eval'd
|
|
598
|
+
to get the real error class + message."""
|
|
599
|
+
if response.status_code != 200:
|
|
600
|
+
try:
|
|
601
|
+
response.json()["data"]["errors"]
|
|
602
|
+
except (
|
|
603
|
+
KeyError,
|
|
604
|
+
IndexError,
|
|
605
|
+
requests.exceptions.JSONDecodeError,
|
|
606
|
+
SyntaxError,
|
|
607
|
+
) as exc1:
|
|
608
|
+
try:
|
|
609
|
+
response.raise_for_status()
|
|
610
|
+
except requests.exceptions.HTTPError as exc2:
|
|
611
|
+
raise exc2 from exc1
|
|
612
|
+
|
|
613
|
+
with open(csv_path, "rb") as f:
|
|
614
|
+
validate_csv_response = requests.post(
|
|
615
|
+
url=f"{upload_service_url}/api/validate_csv",
|
|
616
|
+
files=dict(file=f),
|
|
617
|
+
)
|
|
618
|
+
_raise_for_status(validate_csv_response)
|
|
619
|
+
logger.debug(f"Validated response: {validate_csv_response.json()}")
|
|
620
|
+
if is_csv_in_hpc_upload_queue(csv_path, upload_service_url):
|
|
621
|
+
logger.warning(f"Job already submitted for {csv_path}")
|
|
622
|
+
return
|
|
623
|
+
if dry_run:
|
|
624
|
+
logger.info(
|
|
625
|
+
f"Dry run: not submitting {csv_path} to hpc upload queue at {upload_service_url}."
|
|
626
|
+
)
|
|
627
|
+
return
|
|
628
|
+
post_csv_response = requests.post(
|
|
629
|
+
url=f"{upload_service_url}/api/submit_hpc_jobs",
|
|
630
|
+
json=dict(
|
|
631
|
+
jobs=[
|
|
632
|
+
dict(
|
|
633
|
+
hpc_settings=json.dumps(
|
|
634
|
+
{"time_limit": 60 * 15, "mail_user": hpc_upload_job_email}
|
|
635
|
+
),
|
|
636
|
+
upload_job_settings=validate_csv_response.json()["data"]["jobs"][0],
|
|
637
|
+
script="",
|
|
638
|
+
)
|
|
639
|
+
]
|
|
640
|
+
),
|
|
641
|
+
)
|
|
642
|
+
logger.info(f"Submitted {csv_path} to hpc upload queue at {upload_service_url}")
|
|
643
|
+
_raise_for_status(post_csv_response)
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
def ensure_posix(path: str | pathlib.Path) -> str:
|
|
647
|
+
posix = pathlib.Path(path).as_posix()
|
|
648
|
+
if posix.startswith("//"):
|
|
649
|
+
posix = posix[1:]
|
|
650
|
+
return posix
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def convert_symlinks_to_posix(toplevel_dir: str | pathlib.Path) -> None:
|
|
654
|
+
"""Convert all symlinks in `root_dir` (recursively) to POSIX paths. This is a
|
|
655
|
+
necessary last step before submitting uploads to run on the HPC.
|
|
656
|
+
"""
|
|
657
|
+
for path in pathlib.Path(toplevel_dir).rglob("*"):
|
|
658
|
+
if path.is_symlink():
|
|
659
|
+
posix_target = path.readlink().as_posix().removeprefix("//?/UNC")
|
|
660
|
+
path.unlink()
|
|
661
|
+
np_tools.symlink(src=ensure_posix(posix_target), dest=path)
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
if __name__ == "__main__":
|
|
665
|
+
import doctest
|
|
666
|
+
|
|
667
|
+
doctest.testmod(
|
|
668
|
+
optionflags=doctest.ELLIPSIS
|
|
669
|
+
| doctest.NORMALIZE_WHITESPACE
|
|
670
|
+
| doctest.IGNORE_EXCEPTION_DETAIL
|
|
671
|
+
)
|