np_codeocean 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
np_codeocean/utils.py CHANGED
@@ -1,94 +1,406 @@
1
- from __future__ import annotations
2
-
3
- import configparser
4
- import json
5
- import os
6
- import pathlib
7
- from typing import Literal
8
-
9
- import np_config
10
-
11
- CONFIG = np_config.fetch('/projects/np_codeocean')
12
- """Config for this project"""
13
-
14
- AWS_CONFIG: dict[Literal['aws_access_key_id', 'aws_secret_access_key'], str] = np_config.fetch('/projects/np_codeocean/aws')['config']
15
- """Config for connecting to AWS/S3 via awscli/boto3"""
16
-
17
- AWS_CREDENTIALS: dict[Literal['domain', 'token'], str] = np_config.fetch('/projects/np_codeocean/aws')['credentials']
18
- """Config for connecting to AWS/S3 via awscli/boto3"""
19
-
20
- CODEOCEAN_CONFIG: dict[Literal['region'], str] = np_config.fetch('/projects/np_codeocean/codeocean')['credentials']
21
- """Config for connecting to CodeOcean via http API"""
22
-
23
-
24
- def get_home() -> pathlib.Path:
25
- if os.name == 'nt':
26
- return pathlib.Path(os.environ['USERPROFILE'])
27
- return pathlib.Path(os.environ['HOME'])
28
-
29
- def get_aws_files() -> dict[Literal['config', 'credentials'], pathlib.Path]:
30
- return {
31
- 'config': get_home() / '.aws' / 'config',
32
- 'credentials': get_home() / '.aws' / 'credentials',
33
- }
34
-
35
- def get_codeocean_files() -> dict[Literal['credentials'], pathlib.Path]:
36
- return {
37
- 'credentials': get_home() / '.codeocean' / 'credentials.json',
38
- }
39
-
40
- def verify_ini_config(path: pathlib.Path, contents: dict, profile: str = 'default') -> None:
41
- config = configparser.ConfigParser()
42
- if path.exists():
43
- config.read(path)
44
- if not all(k in config[profile] for k in contents):
45
- raise ValueError(f'Profile {profile} in {path} exists but is missing some keys required for codeocean or s3 access.')
46
-
47
- def write_or_verify_ini_config(path: pathlib.Path, contents: dict, profile: str = 'default') -> None:
48
- config = configparser.ConfigParser()
49
- if path.exists():
50
- config.read(path)
51
- try:
52
- verify_ini_config(path, contents, profile)
53
- except ValueError:
54
- pass
55
- else:
56
- return
57
- config[profile] = contents
58
- path.parent.mkdir(parents=True, exist_ok=True)
59
- path.touch(exist_ok=True)
60
- with path.open('w') as f:
61
- config.write(f)
62
- verify_ini_config(path, contents, profile)
63
-
64
- def verify_json_config(path: pathlib.Path, contents: dict) -> None:
65
- config = json.loads(path.read_text())
66
- if not all(k in config for k in contents):
67
- raise ValueError(f'{path} exists but is missing some keys required for codeocean or s3 access.')
68
-
69
- def write_or_verify_json_config(path: pathlib.Path, contents: dict) -> None:
70
- if path.exists():
71
- try:
72
- verify_json_config(path, contents)
73
- except ValueError:
74
- contents = np_config.merge(json.loads(path.read_text()), contents)
75
- else:
76
- return
77
- path.parent.mkdir(parents=True, exist_ok=True)
78
- path.touch(exist_ok=True)
79
- path.write_text(json.dumps(contents, indent=4))
80
-
81
- def ensure_credentials() -> None:
82
- for file, contents in (
83
- (get_aws_files()['config'], AWS_CONFIG),
84
- (get_aws_files()['credentials'], AWS_CREDENTIALS),
85
- ):
86
- write_or_verify_ini_config(file, contents, profile='default')
87
-
88
- for file, contents in (
89
- (get_codeocean_files()['credentials'], CODEOCEAN_CONFIG),
90
- ):
91
- write_or_verify_json_config(file, contents)
92
-
93
- if __name__ == '__main__':
94
- ensure_credentials()
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ import csv
5
+ import datetime
6
+ import functools
7
+ import json
8
+ import logging
9
+ import os
10
+ import pathlib
11
+ from typing import Any, Generator, Iterable, Literal
12
+ import typing_extensions
13
+
14
+ import aind_data_transfer_models.core
15
+ import aind_slurm_rest.models
16
+ import np_config
17
+ import np_tools
18
+ import npc_session
19
+ import numpy as np
20
+ import polars as pl
21
+ import requests
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+ AINDPlatform = Literal['ecephys', 'behavior']
26
+
27
+ AIND_DATA_TRANSFER_SERVICE = "http://aind-data-transfer-service"
28
+ DEV_SERVICE = "http://aind-data-transfer-service-dev"
29
+ HPC_UPLOAD_JOB_EMAIL = "ben.hardcastle@alleninstitute.org"
30
+ ACQ_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
31
+
32
+ AIND_METADATA_NAMES: tuple[str, ...] = ('session', 'data_description', 'procedures', 'processing', 'rig', 'subject')
33
+
34
+ DEFAULT_EPHYS_SLURM_SETTINGS = aind_slurm_rest.models.V0036JobProperties(
35
+ environment=dict(), # JonY: set this to an empty dictionary
36
+ time_limit = 15 * 60,
37
+ minimum_cpus_per_node=12, # 6 probes * (lfp + ap)
38
+ )
39
+ """Increased timelimit and cpus for running ephys compression on the hpc"""
40
+
41
+ @functools.cache
42
+ def get_project_config() -> dict[str, Any]:
43
+ """Config for this project"""
44
+ return np_config.fetch('/projects/np_codeocean')
45
+
46
+ def set_npc_lims_credentials() -> None:
47
+ creds = np_config.fetch('/projects/np_codeocean/npc_lims')
48
+ for k, v in creds.items():
49
+ os.environ.setdefault(k, v)
50
+
51
+ def get_home() -> pathlib.Path:
52
+ if os.name == 'nt':
53
+ return pathlib.Path(os.environ['USERPROFILE'])
54
+ return pathlib.Path(os.environ['HOME'])
55
+
56
+ def is_behavior_video_file(path: pathlib.Path) -> bool:
57
+ if path.is_dir() or path.suffix not in ('.mp4', '.avi', '.json'):
58
+ return False
59
+ with contextlib.suppress(ValueError):
60
+ _ = npc_session.extract_mvr_camera_name(path.as_posix())
61
+ return True
62
+ return False
63
+
64
+ def is_surface_channel_recording(path_name: str) -> bool:
65
+ """
66
+ >>> import np_session
67
+ >>> session = np_session.Session("//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_690706_20231129_surface_channels")
68
+ >>> is_surface_channel_recording(session.npexp_path.as_posix())
69
+ True
70
+ """
71
+ return 'surface_channels' in path_name.lower()
72
+
73
+ def cleanup_ephys_symlinks(toplevel_dir: pathlib.Path) -> None:
74
+ """After creating symlinks to the ephys data, run this to make any necessary
75
+ modifications prior to upload.
76
+
77
+ Provided dir path should be a directory containing all ephys data in
78
+ subfolders (e.g. directory containing "Record Node 10x" folders)
79
+
80
+ Only deletes symlinks or writes new files in place of symlinks - does not
81
+ modify original data.
82
+
83
+ Rules:
84
+ - if any continuous.dat files are unreadable: remove them and their containing folders
85
+ - if any probes were recorded on multiple record nodes: just keep the first
86
+ - if continuous.dat files are missing (ie. excluded because probes weren't
87
+ inserted, or we removed symlinks in previous steps): update metadata files
88
+ """
89
+ remove_unreadable_ephys_data(toplevel_dir)
90
+ remove_duplicate_ephys_data(toplevel_dir)
91
+ cleanup_ephys_metadata(toplevel_dir)
92
+
93
+ def remove_unreadable_ephys_data(toplevel_dir: pathlib.Path) -> None:
94
+
95
+ for continuous_dir in ephys_continuous_dir_generator(toplevel_dir):
96
+ events_dir = continuous_dir.parent.parent / 'events' / continuous_dir.name / 'TTL'
97
+ filenames = ('continuous.dat', 'timestamps.npy', 'sample_numbers.npy')
98
+ dirs = (continuous_dir, ) + ((events_dir,) if events_dir.exists() else ())
99
+ mark_for_removal = False
100
+ for d in dirs:
101
+ if not d.exists():
102
+ continue
103
+ for filename in filenames:
104
+ if filename == 'continuous.dat' and d.name == 'TTL':
105
+ continue # no continuous.dat expected in TTL events
106
+ file = d / filename
107
+ if not (file.is_symlink() or file.exists()):
108
+ logger.warning(f'Critical file not found {file}, insufficient data for processing')
109
+ mark_for_removal = True
110
+ break
111
+ try:
112
+ data = np.memmap(decode_symlink_path(file), dtype="int16" if 'timestamps' not in file.name else "float64", mode="r")
113
+ except Exception as exc:
114
+ logger.warning(f'Failed to read {file}: {exc!r}')
115
+ mark_for_removal = True
116
+ break
117
+ if data.size == 0:
118
+ logger.warning(f'Empty file {file}')
119
+ mark_for_removal = True
120
+ break
121
+ logger.debug(f'Found readable, non-empty data in {file}')
122
+ if mark_for_removal:
123
+ break
124
+ if mark_for_removal:
125
+ logger.warning(f'Removing {continuous_dir} and its contents')
126
+ remove_folder_of_symlinks(continuous_dir)
127
+ logger.warning(f'Removing {events_dir.parent} and its contents')
128
+ remove_folder_of_symlinks(events_dir.parent)
129
+
130
+ def remove_duplicate_ephys_data(toplevel_dir: pathlib.Path) -> None:
131
+ previous_recording_name = ''
132
+ for continuous_dir in ephys_continuous_dir_generator(toplevel_dir):
133
+ recording_name = continuous_dir.parent.parent.name
134
+ if recording_name != previous_recording_name:
135
+ # reset probes list for each new recording
136
+ probes = []
137
+ try:
138
+ probe = npc_session.ProbeRecord(continuous_dir.name)
139
+ except ValueError:
140
+ continue
141
+ suffix = continuous_dir.name.split('-')[-1]
142
+ assert suffix in ('AP', 'LFP')
143
+ recording_name = f"{probe}-{suffix}"
144
+ if recording_name in probes:
145
+ logger.info(f'Duplicate {recording_name = } found in {continuous_dir.parent.parent} - removing')
146
+ remove_folder_of_symlinks(continuous_dir)
147
+ else:
148
+ probes.append(recording_name)
149
+
150
+ def remove_folder_of_symlinks(folder: pathlib.Path) -> None:
151
+ """Recursive deletion of all files in dir tree, with a check that each is a
152
+ symlink."""
153
+ for path in folder.rglob('*'):
154
+ if path.is_dir():
155
+ remove_folder_of_symlinks(path)
156
+ else:
157
+ assert path.is_symlink(), f'Expected {path} to be a symlink'
158
+ path.unlink(missing_ok=True)
159
+ with contextlib.suppress(FileNotFoundError):
160
+ folder.rmdir()
161
+
162
+ def ephys_recording_dir_generator(toplevel_dir: pathlib.Path) -> Generator[pathlib.Path, None, None]:
163
+ for recording_dir in toplevel_dir.rglob('recording[0-9]*'):
164
+ if recording_dir.is_dir():
165
+ yield recording_dir
166
+
167
+ def ephys_continuous_dir_generator(toplevel_dir: pathlib.Path) -> Generator[pathlib.Path, None, None]:
168
+ for recording_dir in ephys_recording_dir_generator(toplevel_dir):
169
+ parent = recording_dir / 'continuous'
170
+ if not parent.exists():
171
+ continue
172
+ for continuous_dir in parent.iterdir():
173
+ if continuous_dir.is_dir():
174
+ yield continuous_dir
175
+
176
+ def ephys_structure_oebin_generator(toplevel_dir: pathlib.Path) -> Generator[pathlib.Path, None, None]:
177
+ for recording_dir in ephys_recording_dir_generator(toplevel_dir):
178
+ oebin_path = recording_dir / 'structure.oebin'
179
+ if not (oebin_path.is_symlink() or oebin_path.exists()):
180
+ # symlinks that are created for the hpc use posix paths, and aren't
181
+ # readable on windows, so .exists() returns False: use .is_symlink() instead
182
+ logger.warning(f'No structure.oebin found in {recording_dir}')
183
+ continue
184
+ yield oebin_path
185
+
186
+ def cleanup_ephys_metadata(toplevel_dir: pathlib.Path) -> None:
187
+ logger.debug('Checking structure.oebin for missing folders...')
188
+ for oebin_path in ephys_structure_oebin_generator(toplevel_dir):
189
+ oebin_obj = np_tools.read_oebin(decode_symlink_path(oebin_path))
190
+ logger.debug(f'Checking {oebin_path} against actual folders...')
191
+ any_removed = False
192
+ for subdir_name in ('events', 'continuous'):
193
+ subdir = oebin_path.parent / subdir_name
194
+ # iterate over copy of list so as to not disrupt iteration when elements are removed
195
+ for device in [device for device in oebin_obj[subdir_name]]:
196
+ if not (subdir / device['folder_name']).exists():
197
+ logger.info(f'{device["folder_name"]} not found in {subdir}, removing from structure.oebin')
198
+ oebin_obj[subdir_name].remove(device)
199
+ any_removed = True
200
+ if any_removed:
201
+ oebin_path.unlink()
202
+ oebin_path.write_text(json.dumps(oebin_obj, indent=4))
203
+ logger.debug('Overwrote symlink to structure.oebin with corrected structure.oebin')
204
+
205
+ def decode_symlink_path(oebin_path: pathlib.Path) -> pathlib.Path:
206
+ if not oebin_path.is_symlink():
207
+ return oebin_path
208
+ return np_config.normalize_path(oebin_path.readlink())
209
+
210
+ def is_csv_in_hpc_upload_queue(csv_path: pathlib.Path, upload_service_url: str = AIND_DATA_TRANSFER_SERVICE) -> bool:
211
+ """Check if an upload job has been submitted to the hpc upload queue.
212
+
213
+ - currently assumes one job per csv
214
+ - does not check status (job may be FINISHED rather than RUNNING)
215
+
216
+ >>> is_csv_in_hpc_upload_queue("//allen/programs/mindscope/workgroups/np-exp/codeocean/DRpilot_664851_20231114/upload.csv")
217
+ False
218
+ """
219
+ # get subject-id, acq-datetime from csv
220
+ df = pl.read_csv(csv_path, eol_char='\r')
221
+ for col in df.get_columns():
222
+ if col.name.startswith('subject') and col.name.endswith('id'):
223
+ subject = npc_session.SubjectRecord(col[0])
224
+ continue
225
+ if col.name.startswith('acq') and 'datetime' in col.name.lower():
226
+ dt = npc_session.DatetimeRecord(col[0])
227
+ continue
228
+ if col.name == 'platform':
229
+ platform = col[0]
230
+ continue
231
+ return is_session_in_hpc_queue(subject=subject, acq_datetime=dt.dt, platform=platform, upload_service_url=upload_service_url)
232
+
233
+ def is_session_in_hpc_queue(subject: int | str, acq_datetime: str | datetime.datetime, platform: str | None = None, upload_service_url: str = AIND_DATA_TRANSFER_SERVICE) -> bool:
234
+ """
235
+ >>> is_session_in_hpc_queue(366122, datetime.datetime(2023, 11, 14, 0, 0, 0))
236
+ False
237
+ >>> is_session_in_hpc_queue(702136, datetime.datetime(2024, 3, 4, 13, 21, 35))
238
+ True
239
+ """
240
+ if not isinstance(acq_datetime, datetime.datetime):
241
+ acq_datetime = datetime.datetime.strptime(acq_datetime, ACQ_DATETIME_FORMAT)
242
+ partial_session_id = f"{subject}_{acq_datetime.strftime(ACQ_DATETIME_FORMAT).replace(' ', '_').replace(':', '-')}"
243
+ if platform:
244
+ partial_session_id = f"{platform}_{partial_session_id}"
245
+
246
+ jobs_response = requests.get(f"{upload_service_url}/jobs")
247
+ jobs_response.raise_for_status()
248
+ return partial_session_id in jobs_response.content.decode()
249
+
250
+ def is_job_in_hpc_upload_queue(job: aind_data_transfer_models.core.BasicUploadJobConfigs, upload_service_url: str = AIND_DATA_TRANSFER_SERVICE) -> bool:
251
+ return is_session_in_hpc_queue(job.subject_id, job.acq_datetime, job.platform, upload_service_url)
252
+
253
+ def write_upload_csv(
254
+ content: dict[str, Any],
255
+ output_path: pathlib.Path,
256
+ ) -> pathlib.Path:
257
+ logger.info(f'Creating upload job file {output_path}')
258
+ with open(output_path, 'w') as f:
259
+ w = csv.writer(f, lineterminator='')
260
+ w.writerow(content.keys())
261
+ w.writerow('\n')
262
+ w.writerow(content.values())
263
+ return output_path
264
+
265
+ def get_job_models_from_csv(
266
+ path: pathlib.Path,
267
+ ephys_slurm_settings: aind_slurm_rest.models.V0036JobProperties = DEFAULT_EPHYS_SLURM_SETTINGS,
268
+ user_email: str = HPC_UPLOAD_JOB_EMAIL,
269
+ ) -> tuple[aind_data_transfer_models.core.BasicUploadJobConfigs, ...]:
270
+ jobs = pl.read_csv(path, eol_char='\r').with_columns(
271
+ pl.col('subject-id').cast(str),
272
+ ).to_dicts()
273
+ jobs = jobs
274
+ models = []
275
+ for job in jobs.copy():
276
+ modalities = []
277
+ for modality_column in (k for k in job.keys() if k.startswith('modality') and ".source" not in k):
278
+ modality_name = job[modality_column]
279
+ modalities.append(
280
+ aind_data_transfer_models.core.ModalityConfigs(
281
+ modality=modality_name,
282
+ source=job[f"{modality_column}.source"],
283
+ slurm_settings = ephys_slurm_settings if modality_name == 'ecephys' else None,
284
+ ),
285
+ )
286
+ for k in (k for k in job.copy().keys() if k.startswith('modality')):
287
+ del job[k]
288
+ for k, v in job.items():
289
+ if isinstance(v, str) and '\n' in v:
290
+ job[k] = v.replace('\n', '')
291
+ models.append(
292
+ aind_data_transfer_models.core.BasicUploadJobConfigs(
293
+ **{k.replace('-', '_'): v for k,v in job.items()},
294
+ modalities=modalities,
295
+ user_email=user_email,
296
+ )
297
+ )
298
+ return tuple(models)
299
+
300
+ def put_jobs_for_hpc_upload(
301
+ upload_jobs: aind_data_transfer_models.core.BasicUploadJobConfigs | Iterable[aind_data_transfer_models.core.BasicUploadJobConfigs],
302
+ upload_service_url: str = AIND_DATA_TRANSFER_SERVICE,
303
+ user_email: str = HPC_UPLOAD_JOB_EMAIL,
304
+ email_notification_types: Iterable[str | aind_data_transfer_models.core.EmailNotificationType] = ('fail',),
305
+ dry_run: bool = False,
306
+ save_path: pathlib.Path | None = None,
307
+ **extra_model_kwargs: Any,
308
+ ) -> None:
309
+ """Submit one or more jobs to the aind-data-transfer-service, for
310
+ upload to S3 on the hpc.
311
+
312
+ - accepts one or more aind_data_schema BasicUploadJobConfigs models
313
+ - assembles a SubmitJobRequest model
314
+ - excludes jobs for sessions that are already in the upload queue
315
+ - accepts additional parameters for SubmitHpcJobRequest as kwargs
316
+ - submits json via http request
317
+ - optionally saves the json file as a record
318
+ """
319
+ if not isinstance(upload_jobs, Iterable):
320
+ upload_jobs = (upload_jobs, )
321
+ submit_request = aind_data_transfer_models.core.SubmitJobRequest(
322
+ upload_jobs=[job for job in upload_jobs if not is_job_in_hpc_upload_queue(job)],
323
+ user_email=user_email,
324
+ email_notification_types=email_notification_types,
325
+ **extra_model_kwargs,
326
+ )
327
+ post_request_content = json.loads(
328
+ submit_request.model_dump_json(round_trip=True, exclude_none=True)
329
+ ) #! round_trip required for s3 bucket suffix to work correctly
330
+ if save_path:
331
+ save_path.write_text(submit_request.model_dump_json(round_trip=True, indent=4), errors='ignore')
332
+ if dry_run:
333
+ logger.warning(f'Dry run: not submitting {len(upload_jobs)} upload job(s) to {upload_service_url}')
334
+ return
335
+ post_json_response: requests.Response = requests.post(
336
+ url=f"{upload_service_url}/api/v1/submit_jobs",
337
+ json=post_request_content,
338
+ )
339
+ logger.info(f"Submitted {len(upload_jobs)} upload job(s) to {upload_service_url}")
340
+ post_json_response.raise_for_status()
341
+
342
+ @typing_extensions.deprecated("Uses old, pre-v1 endpoints: use put_jobs_for_hpc_upload in combination with get_job_models_from_csv")
343
+ def put_csv_for_hpc_upload(
344
+ csv_path: pathlib.Path,
345
+ upload_service_url: str = AIND_DATA_TRANSFER_SERVICE,
346
+ hpc_upload_job_email: str = HPC_UPLOAD_JOB_EMAIL,
347
+ dry_run: bool = False,
348
+ ) -> None:
349
+ """Submit a single job upload csv to the aind-data-transfer-service, for
350
+ upload to S3 on the hpc.
351
+
352
+ - gets validated version of csv
353
+ - checks session is not already being uploaded
354
+ - submits csv via http request
355
+ """
356
+ def _raise_for_status(response: requests.Response) -> None:
357
+ """pydantic validation errors are returned as strings that can be eval'd
358
+ to get the real error class + message."""
359
+ if response.status_code != 200:
360
+ try:
361
+ response.json()['data']['errors']
362
+ except (KeyError, IndexError, requests.exceptions.JSONDecodeError, SyntaxError) as exc1:
363
+ try:
364
+ response.raise_for_status()
365
+ except requests.exceptions.HTTPError as exc2:
366
+ raise exc2 from exc1
367
+
368
+ with open(csv_path, 'rb') as f:
369
+ validate_csv_response = requests.post(
370
+ url=f"{upload_service_url}/api/validate_csv",
371
+ files=dict(file=f),
372
+ )
373
+ _raise_for_status(validate_csv_response)
374
+ logger.debug(f"Validated response: {validate_csv_response.json()}")
375
+ if is_csv_in_hpc_upload_queue(csv_path, upload_service_url):
376
+ logger.warning(f"Job already submitted for {csv_path}")
377
+ return
378
+ if dry_run:
379
+ logger.info(f'Dry run: not submitting {csv_path} to hpc upload queue at {upload_service_url}.')
380
+ return
381
+ post_csv_response = requests.post(
382
+ url=f"{upload_service_url}/api/submit_hpc_jobs",
383
+ json=dict(
384
+ jobs=[
385
+ dict(
386
+ hpc_settings=json.dumps({"time_limit": 60 * 15, "mail_user": hpc_upload_job_email}),
387
+ upload_job_settings=validate_csv_response.json()["data"]["jobs"][0],
388
+ script="",
389
+ )
390
+ ]
391
+ ),
392
+ )
393
+ logger.info(f"Submitted {csv_path} to hpc upload queue at {upload_service_url}")
394
+ _raise_for_status(post_csv_response)
395
+
396
+
397
+ def ensure_posix(path: str | pathlib.Path) -> str:
398
+ posix = pathlib.Path(path).as_posix()
399
+ if posix.startswith('//'):
400
+ posix = posix[1:]
401
+ return posix
402
+
403
+
404
+ if __name__ == '__main__':
405
+ import doctest
406
+ doctest.testmod(optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL)
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: np_codeocean
3
- Version: 0.2.0
3
+ Version: 0.2.1
4
4
  Summary: Tools for uploading and interacting with Mindscope Neuropixels experiments on Code Ocean
5
- Author-Email: Ben Hardcastle <ben.hardcastle@alleninstitute.org>
5
+ Author-Email: Ben Hardcastle <ben.hardcastle@alleninstitute.org>, Chris Mochizuki <chrism@alleninstitute.org>, Arjun Sridhar <arjun.sridhar@alleninstitute.org>
6
6
  License: MIT
7
7
  Classifier: Programming Language :: Python :: 3
8
8
  Classifier: Programming Language :: Python :: 3.9
@@ -13,16 +13,22 @@ Classifier: Operating System :: Microsoft :: Windows
13
13
  Classifier: Operating System :: POSIX :: Linux
14
14
  Project-URL: Source, https://github.com/AllenInstitute/np_codeocean
15
15
  Project-URL: Issues, https://github.com/AllenInstitute/np_codeocean/issues
16
- Requires-Python: >=3.9
17
- Requires-Dist: np_session>=0.6.4
18
- Requires-Dist: np-tools>=0.1.21
19
- Requires-Dist: np-config>=0.4.24
16
+ Requires-Python: >=3.10
17
+ Requires-Dist: np_session>=0.6.44
18
+ Requires-Dist: np-tools>=0.1.23
19
+ Requires-Dist: np-config>=0.4.33
20
20
  Requires-Dist: requests>=2.31.0
21
21
  Requires-Dist: npc-session>=0.1.34
22
22
  Requires-Dist: polars>=0.20.16
23
+ Requires-Dist: aind-data-transfer-models>=0.5.1
24
+ Requires-Dist: npc-lims>=0.1.168
23
25
  Requires-Dist: bump>=1.3.2; extra == "dev"
24
26
  Requires-Dist: pdm>=2.4.9; extra == "dev"
27
+ Requires-Dist: np-aind-metadata==0.1.16; extra == "dynamic-routing-metadata"
28
+ Requires-Dist: npc-lims>=0.1.154; extra == "dynamic-routing-metadata"
29
+ Requires-Dist: npc-sessions>=0.0.226; extra == "dynamic-routing-metadata"
25
30
  Provides-Extra: dev
31
+ Provides-Extra: dynamic-routing-metadata
26
32
  Description-Content-Type: text/markdown
27
33
 
28
34
  # np_codeocean
@@ -0,0 +1,12 @@
1
+ np_codeocean-0.2.1.dist-info/METADATA,sha256=Ue-GZ2t2lLUM_4_nKBbmGifYOjgsX02pIm07A1PFM4U,2928
2
+ np_codeocean-0.2.1.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
+ np_codeocean-0.2.1.dist-info/entry_points.txt,sha256=6v5bsd0v058zsOBJ7GAWC5GpDiEEOt8tDXarddpZhvk,235
4
+ np_codeocean/__init__.py,sha256=ED7YOu-3AIQIEML9jPT9bQ690lfhYZNiOB4QhnJ8r8U,76
5
+ np_codeocean/np_session_utils.py,sha256=a4PLmcvf7LIrtrCpu3mOItkuTrkOf5Q5kI7lM5ucNQA,15515
6
+ np_codeocean/scripts/fix_ephys_data_on_s3.py,sha256=c5jHZmFLjMCRGb3YSmefCZRO_telZ7dB-mXGIG5ncYk,560
7
+ np_codeocean/scripts/upload_dynamic_routing_behavior.py,sha256=FSEV_0hlkYaw-xUK7aIZp2wdes_XMxtsAlGOdHtAEJA,17114
8
+ np_codeocean/scripts/upload_dynamic_routing_ecephys.py,sha256=R6MqVylhxXsbCUqUvTtwSXTUK9B4L6y_Qzup8eAREio,8311
9
+ np_codeocean/scripts/upload_ethan_analysis_files.py,sha256=MaJRVk0CfzEMkwMmmXRmnRCqYpo6mGNWtROfZLavgGw,1019
10
+ np_codeocean/scripts/upload_split_recordings_example.py,sha256=1_aqoBxAkB_VpRKYqyPsEQBDGvgyAHXAkIJA0ZT2Vb0,1490
11
+ np_codeocean/utils.py,sha256=82Gvyd0mNTpw4603tZrIcGqDnmG36Rv9eUubz7bz2rA,18248
12
+ np_codeocean-0.2.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.1.8)
2
+ Generator: pdm-backend (2.3.3)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -0,0 +1,5 @@
1
+ [console_scripts]
2
+ upload_dr_behavior = np_codeocean.scripts.upload_dynamic_routing_behavior:main
3
+ upload_dr_ecephys = np_codeocean.scripts.upload_dynamic_routing_ecephys:main
4
+ upload_sessions = np_codeocean.scripts.upload_sessions:main
5
+