np_codeocean 0.3.2__py3-none-any.whl → 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- np_codeocean/np_session_utils.py +13 -1
- np_codeocean/scripts/upload_dynamic_routing_ecephys.py +6 -3
- np_codeocean/utils.py +7 -3
- {np_codeocean-0.3.2.dist-info → np_codeocean-0.3.4.dist-info}/METADATA +1 -1
- {np_codeocean-0.3.2.dist-info → np_codeocean-0.3.4.dist-info}/RECORD +7 -7
- {np_codeocean-0.3.2.dist-info → np_codeocean-0.3.4.dist-info}/WHEEL +1 -1
- {np_codeocean-0.3.2.dist-info → np_codeocean-0.3.4.dist-info}/entry_points.txt +0 -0
np_codeocean/np_session_utils.py
CHANGED
|
@@ -6,6 +6,8 @@ import doctest
|
|
|
6
6
|
import pathlib
|
|
7
7
|
from collections.abc import Iterable
|
|
8
8
|
import shutil
|
|
9
|
+
import time
|
|
10
|
+
from typing import Any
|
|
9
11
|
|
|
10
12
|
import aind_data_transfer_models.core
|
|
11
13
|
import np_config
|
|
@@ -302,6 +304,7 @@ def upload_session(
|
|
|
302
304
|
regenerate_symlinks: bool = True,
|
|
303
305
|
adjust_ephys_timestamps: bool = True,
|
|
304
306
|
codeocean_configs: aind_data_transfer_models.core.CodeOceanPipelineMonitorConfigs | None = None,
|
|
307
|
+
extra_BasicUploadJobConfigs_params: dict[str, Any] | None = None,
|
|
305
308
|
) -> None:
|
|
306
309
|
codeocean_root = np_session.NPEXP_PATH / ('codeocean-dev' if test else 'codeocean')
|
|
307
310
|
logger.debug(f'{codeocean_root = }')
|
|
@@ -322,6 +325,7 @@ def upload_session(
|
|
|
322
325
|
create_behavior_symlinks(upload.session, upload.behavior)
|
|
323
326
|
if upload.behavior_videos:
|
|
324
327
|
create_behavior_videos_symlinks(upload.session, upload.behavior_videos)
|
|
328
|
+
timestamps_adjusted = False
|
|
325
329
|
if adjust_ephys_timestamps and upload.ephys:
|
|
326
330
|
if not upload.behavior: # includes surface channel recordings
|
|
327
331
|
logger.warning(f"Cannot adjust ephys timestamps for {upload.session} - no behavior folder supplied for upload")
|
|
@@ -336,14 +340,22 @@ def upload_session(
|
|
|
336
340
|
"`adjust_ephys_timestamps=False` or `--no-sync` flag in CLI"
|
|
337
341
|
)
|
|
338
342
|
) from None
|
|
343
|
+
else:
|
|
344
|
+
timestamps_adjusted = True
|
|
339
345
|
for path in (upload.ephys, upload.behavior, upload.behavior_videos, upload.aind_metadata):
|
|
340
346
|
if path is not None and path.exists():
|
|
341
347
|
utils.convert_symlinks_to_posix(path)
|
|
342
348
|
csv_content: dict = get_upload_csv_for_session(upload)
|
|
343
349
|
utils.write_upload_csv(csv_content, upload.job)
|
|
344
350
|
np_logging.web('np_codeocean').info(f'Submitting {upload.session} to hpc upload queue')
|
|
351
|
+
if extra_BasicUploadJobConfigs_params is None:
|
|
352
|
+
extra_BasicUploadJobConfigs_params = {}
|
|
353
|
+
if codeocean_configs is not None:
|
|
354
|
+
if 'codeocean_configs' in extra_BasicUploadJobConfigs_params:
|
|
355
|
+
raise ValueError("Cannot pass `codeocean_configs` as a parameter to `extra_BasicUploadJobConfigs_params`")
|
|
356
|
+
extra_BasicUploadJobConfigs_params['codeocean_configs'] = codeocean_configs
|
|
345
357
|
utils.put_jobs_for_hpc_upload(
|
|
346
|
-
utils.get_job_models_from_csv(upload.job,
|
|
358
|
+
utils.get_job_models_from_csv(upload.job, check_timestamps=timestamps_adjusted, **extra_BasicUploadJobConfigs_params),
|
|
347
359
|
upload_service_url=utils.DEV_SERVICE if test else utils.AIND_DATA_TRANSFER_SERVICE,
|
|
348
360
|
user_email=hpc_upload_job_email,
|
|
349
361
|
dry_run=dry_run,
|
|
@@ -153,6 +153,7 @@ def write_metadata_and_upload(
|
|
|
153
153
|
ignore_errors=True,
|
|
154
154
|
skip_existing=not regenerate_metadata,
|
|
155
155
|
)
|
|
156
|
+
|
|
156
157
|
pipelines = [
|
|
157
158
|
aind_codeocean_pipeline_monitor.models.PipelineMonitorSettings(
|
|
158
159
|
run_params=codeocean.computation.RunParams(
|
|
@@ -182,6 +183,7 @@ def write_metadata_and_upload(
|
|
|
182
183
|
codeocean_configs = aind_data_transfer_models.core.CodeOceanPipelineMonitorConfigs(
|
|
183
184
|
pipeline_monitor_capsule_settings=pipelines,
|
|
184
185
|
)
|
|
186
|
+
|
|
185
187
|
return np_codeocean.upload_session(
|
|
186
188
|
session_path_or_folder_name,
|
|
187
189
|
recording_dirs=recording_dirs,
|
|
@@ -191,7 +193,9 @@ def write_metadata_and_upload(
|
|
|
191
193
|
hpc_upload_job_email=hpc_upload_job_email,
|
|
192
194
|
regenerate_symlinks=regenerate_symlinks,
|
|
193
195
|
adjust_ephys_timestamps=adjust_ephys_timestamps,
|
|
194
|
-
|
|
196
|
+
extra_BasicUploadJobConfigs_params={
|
|
197
|
+
'codeocean_configs': codeocean_configs,
|
|
198
|
+
},
|
|
195
199
|
)
|
|
196
200
|
|
|
197
201
|
def parse_args() -> argparse.Namespace:
|
|
@@ -204,13 +208,12 @@ def parse_args() -> argparse.Namespace:
|
|
|
204
208
|
parser.add_argument('--dry-run', action='store_true', help="Create upload job but do not submit to hpc upload queue.")
|
|
205
209
|
parser.add_argument('--preserve-symlinks', dest='regenerate_symlinks', action='store_false', help="Existing symlink folders will not be deleted and regenerated - may result in additional data being uploaded")
|
|
206
210
|
parser.add_argument('--regenerate-metadata', action='store_true', help="Regenerate metadata files (session.json and rig.json) even if they already exist")
|
|
207
|
-
parser.add_argument('--
|
|
211
|
+
parser.add_argument('--sync', dest="adjust_ephys_timestamps", action='store_true', help="Adjust ephys timestamps.npy prior to upload using sync data (if available)")
|
|
208
212
|
return parser.parse_args()
|
|
209
213
|
|
|
210
214
|
def main() -> None:
|
|
211
215
|
args = parse_args()
|
|
212
216
|
kwargs = vars(args)
|
|
213
|
-
kwargs |= {'adjust_ephys_timestamps': False} # unnecessary while we have machinery in place for adjusting in npc_sessions (adds 5 GB of timestamps files for each upload)
|
|
214
217
|
np_codeocean.utils.set_npc_lims_credentials()
|
|
215
218
|
write_metadata_and_upload(**kwargs)
|
|
216
219
|
|
np_codeocean/utils.py
CHANGED
|
@@ -307,6 +307,7 @@ def write_upload_csv(
|
|
|
307
307
|
def get_job_models_from_csv(
|
|
308
308
|
path: pathlib.Path,
|
|
309
309
|
ephys_slurm_settings: aind_slurm_rest.models.V0036JobProperties = DEFAULT_EPHYS_SLURM_SETTINGS,
|
|
310
|
+
check_timestamps: bool = True, # default in transfer service is True: checks timestamps have been corrected via flag file
|
|
310
311
|
user_email: str = HPC_UPLOAD_JOB_EMAIL,
|
|
311
312
|
**extra_BasicUploadJobConfigs_params: Any,
|
|
312
313
|
) -> tuple[aind_data_transfer_models.core.BasicUploadJobConfigs, ...]:
|
|
@@ -317,15 +318,18 @@ def get_job_models_from_csv(
|
|
|
317
318
|
models = []
|
|
318
319
|
for job in jobs.copy():
|
|
319
320
|
modalities = []
|
|
321
|
+
if 'modalities' in extra_BasicUploadJobConfigs_params:
|
|
322
|
+
raise ValueError('modalities should not be passed as a parameter in extra_BasicUploadJobConfigs_params')
|
|
320
323
|
for modality_column in (k for k in job.keys() if k.startswith('modality') and ".source" not in k):
|
|
321
324
|
modality_name = job[modality_column]
|
|
322
325
|
modalities.append(
|
|
323
326
|
aind_data_transfer_models.core.ModalityConfigs(
|
|
324
327
|
modality=modality_name,
|
|
325
328
|
source=job[f"{modality_column}.source"],
|
|
326
|
-
slurm_settings
|
|
327
|
-
|
|
328
|
-
)
|
|
329
|
+
slurm_settings=ephys_slurm_settings if modality_name == 'ecephys' else None,
|
|
330
|
+
job_settings={'check_timestamps': False} if modality_name == 'ecephys' and not check_timestamps else None,
|
|
331
|
+
),
|
|
332
|
+
)
|
|
329
333
|
for k in (k for k in job.copy().keys() if k.startswith('modality')):
|
|
330
334
|
del job[k]
|
|
331
335
|
for k, v in job.items():
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: np_codeocean
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.4
|
|
4
4
|
Summary: Tools for uploading and interacting with Mindscope Neuropixels experiments on Code Ocean
|
|
5
5
|
Author-Email: Ben Hardcastle <ben.hardcastle@alleninstitute.org>, Chris Mochizuki <chrism@alleninstitute.org>, Arjun Sridhar <arjun.sridhar@alleninstitute.org>
|
|
6
6
|
License: MIT
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
np_codeocean-0.3.
|
|
2
|
-
np_codeocean-0.3.
|
|
3
|
-
np_codeocean-0.3.
|
|
1
|
+
np_codeocean-0.3.4.dist-info/METADATA,sha256=cgyWIHnxtfuDzsm7FSp7DMjKCChhq95V7-QXYplZRW8,3159
|
|
2
|
+
np_codeocean-0.3.4.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
|
3
|
+
np_codeocean-0.3.4.dist-info/entry_points.txt,sha256=p32aRkIjrFa4KtUbq2E6ZMYBVNRUw3U8ZIarvwNkK1E,250
|
|
4
4
|
np_codeocean/__init__.py,sha256=ED7YOu-3AIQIEML9jPT9bQ690lfhYZNiOB4QhnJ8r8U,76
|
|
5
5
|
np_codeocean/metadata/__init__.py,sha256=jayhwvrb4fZYODtiVkJBXEYYkLJ_5SNNQ5WW21bsx2s,72
|
|
6
6
|
np_codeocean/metadata/common.py,sha256=ZOAe8MJJwNtBt9Voqm4MsA-xZCOrzCKkc_rku5A2yNA,584
|
|
@@ -14,7 +14,7 @@ np_codeocean/metadata/rigs.py,sha256=Xq3BWvz0pxbkVA1UN9EFprbHA4hYaiSAFk82pXUfdLQ
|
|
|
14
14
|
np_codeocean/metadata/storage.py,sha256=b7vivpO1E42xoqfLdomAOU-FER4VB3-cUmjTwUQNxNE,2123
|
|
15
15
|
np_codeocean/metadata/update.py,sha256=x2hploIM7AXmX5DaapaNsojJ7E8s_UmTDfVuGRWiyHY,5211
|
|
16
16
|
np_codeocean/metadata/utils.py,sha256=X6CVxgemeCaXiGnt35AHqvSCz0p5keJHHGkIgvYDq5E,4318
|
|
17
|
-
np_codeocean/np_session_utils.py,sha256=
|
|
17
|
+
np_codeocean/np_session_utils.py,sha256=yfLiTUElkgA7M8EFMIu7T6sTP-gB8X0ZVaGHHngjVwg,17318
|
|
18
18
|
np_codeocean/scripts/.mypy_cache/.gitignore,sha256=_7s5RkPBEvI1a7XzGE6V4z_XMhRokvTu6tEml1Vw_zk,36
|
|
19
19
|
np_codeocean/scripts/.mypy_cache/3.11/@plugins_snapshot.json,sha256=RBNvo1WzZ4oRRq0W9-hknpT7T8If536DEMBg9hyq_4o,2
|
|
20
20
|
np_codeocean/scripts/.mypy_cache/3.11/IPython/__init__.data.json,sha256=XIcapuTlKIvgnYu5wffxS8aovyHcg02vUtjEfA_eBDw,5427
|
|
@@ -3395,7 +3395,7 @@ np_codeocean/scripts/.mypy_cache/3.11/zoneinfo/__init__.data.json,sha256=g3oqeS6
|
|
|
3395
3395
|
np_codeocean/scripts/.mypy_cache/3.11/zoneinfo/__init__.meta.json,sha256=AvLhG4cE-92NWoQ-6ZIX546e4YT57YVtwE2rTyRA7rg,1826
|
|
3396
3396
|
np_codeocean/scripts/.mypy_cache/CACHEDIR.TAG,sha256=cOu1T6y3b32rMFgmHu-0OiMV0o0Q_PkKhVOMBacrjR4,193
|
|
3397
3397
|
np_codeocean/scripts/upload_dynamic_routing_behavior.py,sha256=A7FapFuVECdPZeRmeaOtH708DzWh44IkXHTMHSVzHys,17169
|
|
3398
|
-
np_codeocean/scripts/upload_dynamic_routing_ecephys.py,sha256=
|
|
3398
|
+
np_codeocean/scripts/upload_dynamic_routing_ecephys.py,sha256=6dzumt57OrDl4hsqqEY2EZbGzuxX-jZdbsqVBBfw82o,10528
|
|
3399
3399
|
np_codeocean/scripts/upload_split_recordings_example.py,sha256=1_aqoBxAkB_VpRKYqyPsEQBDGvgyAHXAkIJA0ZT2Vb0,1490
|
|
3400
|
-
np_codeocean/utils.py,sha256=
|
|
3401
|
-
np_codeocean-0.3.
|
|
3400
|
+
np_codeocean/utils.py,sha256=xOlDmZhF0m57t0bOS6S9niIakg2EDuz3gjvKZIfUPDk,21075
|
|
3401
|
+
np_codeocean-0.3.4.dist-info/RECORD,,
|
|
File without changes
|