np_codeocean 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,217 +1,279 @@
1
- import argparse
2
- import contextlib
3
- import datetime
4
- import logging
5
- import pathlib
6
- import time
7
- import typing
8
- import warnings
9
-
10
- import np_config
11
- import npc_session
12
- import npc_sessions
13
- from aind_data_schema.core.rig import Rig
14
- from aind_data_schema.core.session import Session as AindSession
15
- import aind_codeocean_pipeline_monitor.models
16
- import codeocean.capsule
17
- import codeocean.data_asset
18
- import codeocean.computation
19
- import np_codeocean
20
- from np_codeocean.metadata import core as metadata_core
21
- from aind_data_schema_models.modalities import Modality
22
-
23
- # Disable divide by zero or NaN warnings
24
- warnings.filterwarnings("ignore", category=RuntimeWarning)
25
-
26
- logging.basicConfig(
27
- filename=f"//allen/programs/mindscope/workgroups/np-exp/codeocean-logs/{pathlib.Path(__file__).stem}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log",
28
- level=logging.DEBUG,
29
- format="%(asctime)s | %(name)s | %(levelname)s | %(message)s",
30
- datefmt="%Y-%d-%m %H:%M:%S",
31
- )
32
- logger = logging.getLogger(__name__)
33
-
34
- CONFIG = np_config.fetch('/rigs/room_numbers')
35
-
36
-
37
- def reformat_rig_model_rig_id(rig_id: str, modification_date: datetime.date) -> str:
38
- rig_record = npc_session.RigRecord(rig_id)
39
- if not rig_record.is_neuro_pixels_rig:
40
- raise Exception(
41
- f"Rig is not a neuropixels rig. Only behavior cluster rigs are supported. rig_id={rig_id}")
42
- room_number = CONFIG.get(rig_record, "UNKNOWN")
43
- return rig_record.as_aind_data_schema_rig_id(str(room_number), modification_date).replace('.','')
44
-
45
-
46
- def extract_modification_date(rig: Rig) -> datetime.date:
47
- _, _, date_str = rig.rig_id.split("_")
48
- if len(date_str) == 6:
49
- return datetime.datetime.strptime(date_str, "%y%m%d").date()
50
- elif len(date_str) == 8:
51
- return datetime.datetime.strptime(date_str, "%Y%m%d").date()
52
- else:
53
- raise Exception(f"Unsupported date format: {date_str}")
54
-
55
-
56
- def add_metadata(
57
- session_directory: str | pathlib.Path,
58
- session_datetime: datetime.datetime,
59
- rig_storage_directory: pathlib.Path,
60
- ignore_errors: bool = True,
61
- skip_existing: bool = True,
62
- ) -> None:
63
- """Adds rig and sessions metadata to a session directory.
64
- """
65
- normalized_session_dir = np_config.normalize_path(session_directory)
66
- logger.debug(f"{normalized_session_dir = }")
67
- logger.debug(f"{rig_storage_directory = }")
68
- session_json = normalized_session_dir / "session.json"
69
- if not skip_existing or not (session_json.is_symlink() or session_json.exists()):
70
- logger.debug("Attempting to create session.json")
71
- npc_sessions.DynamicRoutingSession(normalized_session_dir)._aind_session_metadata.write_standard_file(normalized_session_dir)
72
- if session_json.exists():
73
- logger.debug("Created session.json")
74
- else:
75
- raise FileNotFoundError("Failed to find created session.json, but no error occurred during creation: may be in unexpected location")
76
- _ = AindSession.model_validate_json(session_json.read_text())
77
-
78
- rig_model_path = normalized_session_dir / "rig.json"
79
- if not skip_existing or not (rig_model_path.is_symlink() or rig_model_path.exists()):
80
- if not (session_json.is_symlink() or session_json.exists()):
81
- logger.warning("session.json is currently required for the rig.json to be created, so we can't continue with metadata creation")
82
- return None
83
- metadata_core.add_np_rig_to_session_dir(
84
- normalized_session_dir,
85
- session_datetime,
86
- rig_storage_directory,
87
- )
88
- if rig_model_path.exists():
89
- logger.debug("Created rig.json")
90
- else:
91
- raise FileNotFoundError("Failed to find created rig.json, but no error occurred during creation: may be in unexpected location")
92
- if not (rig_model_path.is_symlink() or rig_model_path.exists()):
93
- return None
94
-
95
- rig_metadata = Rig.model_validate_json(rig_model_path.read_text())
96
- modification_date = extract_modification_date(rig_metadata)
97
- rig_metadata.rig_id = reformat_rig_model_rig_id(rig_metadata.rig_id, modification_date)
98
- rig_metadata.write_standard_file(normalized_session_dir) # assumes this will work out to dest/rig.json
99
- session_model_path = metadata_core.scrape_session_model_path(
100
- normalized_session_dir,
101
- )
102
- metadata_core.update_session_from_rig(
103
- session_model_path,
104
- rig_model_path,
105
- session_model_path,
106
- )
107
-
108
- return None
109
-
110
-
111
- def write_metadata_and_upload(
112
- session_path_or_folder_name: str,
113
- recording_dirs: typing.Iterable[str] | None = None,
114
- force: bool = False,
115
- dry_run: bool = False,
116
- test: bool = False,
117
- hpc_upload_job_email: str = np_codeocean.HPC_UPLOAD_JOB_EMAIL,
118
- regenerate_metadata: bool = False,
119
- regenerate_symlinks: bool = True,
120
- adjust_ephys_timestamps: bool = False,
121
- ) -> None:
122
- """Writes and updates aind-data-schema to the session directory
123
- associated with the `session`. The aind-data-schema session model is
124
- updated to reflect the `rig_id` of the rig model added to the session
125
- directory.
126
-
127
- Only handles ecephys platform uploads (ie sessions with a folder of data; not
128
- behavior box sessions, which have a single hdf5 file only)
129
- """
130
- # session = np_session.Session(session) #! this doesn't work for surface_channels
131
- session = np_codeocean.get_np_session(session_path_or_folder_name)
132
-
133
- add_metadata(
134
- session_directory=session.npexp_path,
135
- session_datetime=(
136
- session.start
137
- if not np_codeocean.is_surface_channel_recording(session.npexp_path.name)
138
- else np_codeocean.get_surface_channel_start_time(session)
139
- ),
140
- rig_storage_directory=pathlib.Path(np_codeocean.get_project_config()["rig_metadata_dir"]),
141
- ignore_errors=True,
142
- skip_existing=not regenerate_metadata,
143
- )
144
-
145
- # Optional codeocean_pipeline_settings as {modality_abbr: PipelineMonitorSettings}
146
- # You can specify up to one pipeline conf per modality
147
- # In the future, these can be stored in AWS param store as part of a "job_type"
148
- codeocean_pipeline_settings = {
149
- Modality.ECEPHYS.abbreviation: aind_codeocean_pipeline_monitor.models.PipelineMonitorSettings(
150
- run_params=codeocean.computation.RunParams(
151
- capsule_id="287db808-74ce-4e44-b14b-fde1471eba45",
152
- data_assets=[
153
- codeocean.data_asset.DataAsset(
154
- name="",
155
- id="", # ID of new raw data asset will be inserted here by airflow
156
- mount="ecephys",
157
- created=time.time(),
158
- state=codeocean.data_asset.DataAssetState.Draft,
159
- type=codeocean.data_asset.DataAssetType.Dataset,
160
- last_used=time.time(),
161
- ),
162
- ],
163
- ),
164
- computation_polling_interval=15 * 60,
165
- computation_timeout=48 * 3600,
166
- capture_settings=aind_codeocean_pipeline_monitor.models.CaptureSettings(
167
- tags=[str(session.mouse), 'derived', 'ecephys'],
168
- custom_metadata={'data level': 'derived', 'experiment type': 'ecephys', 'subject id': str(session.mouse)},
169
- process_name_suffix="sorted",
170
- process_name_suffix_tz="US/Pacific",
171
- ),
172
- ),
173
- }
174
-
175
- return np_codeocean.upload_session(
176
- session_path_or_folder_name,
177
- recording_dirs=recording_dirs,
178
- force=force,
179
- dry_run=dry_run,
180
- test=test,
181
- hpc_upload_job_email=hpc_upload_job_email,
182
- regenerate_symlinks=regenerate_symlinks,
183
- adjust_ephys_timestamps=adjust_ephys_timestamps,
184
- codeocean_pipeline_settings=codeocean_pipeline_settings,
185
- )
186
-
187
- def parse_args() -> argparse.Namespace:
188
- parser = argparse.ArgumentParser(description="Upload a session to CodeOcean")
189
- parser.add_argument('session_path_or_folder_name', help="session ID (lims or np-exp foldername) or path to session folder")
190
- parser.add_argument('recording_dirs', nargs='*', help="[optional] specific names of recording directories to upload - for use with split recordings only.")
191
- parser.add_argument('--email', dest='hpc_upload_job_email', type=str, help=f"[optional] specify email address for hpc upload job updates. Default is {np_codeocean.HPC_UPLOAD_JOB_EMAIL}")
192
- parser.add_argument('--force', action='store_true', help="enable `force_cloud_sync` option, re-uploading and re-making raw asset even if data exists on S3")
193
- parser.add_argument('--test', action='store_true', help="use the test-upload service, uploading to the test CodeOcean server instead of the production server")
194
- parser.add_argument('--dry-run', action='store_true', help="Create upload job but do not submit to hpc upload queue.")
195
- parser.add_argument('--preserve-symlinks', dest='regenerate_symlinks', action='store_false', help="Existing symlink folders will not be deleted and regenerated - may result in additional data being uploaded")
196
- parser.add_argument('--regenerate-metadata', action='store_true', help="Regenerate metadata files (session.json and rig.json) even if they already exist")
197
- parser.add_argument('--sync', dest="adjust_ephys_timestamps", action='store_true', help="Adjust ephys timestamps.npy prior to upload using sync data (if available)")
198
- return parser.parse_args()
199
-
200
- def main() -> None:
201
- args = parse_args()
202
- kwargs = vars(args)
203
- np_codeocean.utils.set_npc_lims_credentials()
204
- write_metadata_and_upload(**kwargs)
205
-
206
-
207
- if __name__ == '__main__':
208
- main()
209
- # write_metadata_and_upload(
210
- # 'DRpilot_744740_20241113_surface_channels',
211
- # force=False,
212
- # regenerate_metadata=False,
213
- # regenerate_symlinks=False,
214
- # )
215
- # upload_dr_ecephys DRpilot_712141_20240606 --regenerate-metadata
216
- # upload_dr_ecephys DRpilot_712141_20240611 recording1 recording2 --regenerate-metadata --force
217
- # upload_dr_ecephys DRpilot_712141_20240605 --regenerate-metadata
1
+ import argparse
2
+ import datetime
3
+ import logging
4
+ import pathlib
5
+ import time
6
+ import typing
7
+ import warnings
8
+
9
+ import aind_codeocean_pipeline_monitor.models
10
+ import codeocean.capsule
11
+ import codeocean.computation
12
+ import codeocean.data_asset
13
+ import np_config
14
+ import npc_session
15
+ import npc_sessions
16
+ from aind_data_schema.core.rig import Rig
17
+ from aind_data_schema.core.session import Session as AindSession
18
+ from aind_data_schema_models.modalities import Modality
19
+
20
+ import np_codeocean
21
+ from np_codeocean.metadata import core as metadata_core
22
+
23
+ # Disable divide by zero or NaN warnings
24
+ warnings.filterwarnings("ignore", category=RuntimeWarning)
25
+
26
+ logging.basicConfig(
27
+ filename=f"//allen/programs/mindscope/workgroups/np-exp/codeocean-logs/{pathlib.Path(__file__).stem}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log",
28
+ level=logging.DEBUG,
29
+ format="%(asctime)s | %(name)s | %(levelname)s | %(message)s",
30
+ datefmt="%Y-%d-%m %H:%M:%S",
31
+ )
32
+ logger = logging.getLogger(__name__)
33
+
34
+ CONFIG = np_config.fetch("/rigs/room_numbers")
35
+
36
+
37
+ def reformat_rig_model_rig_id(rig_id: str, modification_date: datetime.date) -> str:
38
+ rig_record = npc_session.RigRecord(rig_id)
39
+ if not rig_record.is_neuro_pixels_rig:
40
+ raise Exception(
41
+ f"Rig is not a neuropixels rig. Only behavior cluster rigs are supported. rig_id={rig_id}"
42
+ )
43
+ room_number = CONFIG.get(rig_record, "UNKNOWN")
44
+ return rig_record.as_aind_data_schema_rig_id(
45
+ str(room_number), modification_date
46
+ ).replace(".", "")
47
+
48
+
49
+ def extract_modification_date(rig: Rig) -> datetime.date:
50
+ _, _, date_str = rig.rig_id.split("_")
51
+ if len(date_str) == 6:
52
+ return datetime.datetime.strptime(date_str, "%y%m%d").date()
53
+ elif len(date_str) == 8:
54
+ return datetime.datetime.strptime(date_str, "%Y%m%d").date()
55
+ else:
56
+ raise Exception(f"Unsupported date format: {date_str}")
57
+
58
+
59
+ def add_metadata(
60
+ session_directory: str | pathlib.Path,
61
+ session_datetime: datetime.datetime,
62
+ rig_storage_directory: pathlib.Path,
63
+ ignore_errors: bool = True,
64
+ skip_existing: bool = True,
65
+ ) -> None:
66
+ """Adds rig and sessions metadata to a session directory."""
67
+ normalized_session_dir = np_config.normalize_path(session_directory)
68
+ logger.debug(f"{normalized_session_dir = }")
69
+ logger.debug(f"{rig_storage_directory = }")
70
+ session_json = normalized_session_dir / "session.json"
71
+ if not skip_existing or not (session_json.is_symlink() or session_json.exists()):
72
+ logger.debug("Attempting to create session.json")
73
+ npc_sessions.DynamicRoutingSession(
74
+ normalized_session_dir
75
+ )._aind_session_metadata.write_standard_file(normalized_session_dir)
76
+ if session_json.exists():
77
+ logger.debug("Created session.json")
78
+ else:
79
+ raise FileNotFoundError(
80
+ "Failed to find created session.json, but no error occurred during creation: may be in unexpected location"
81
+ )
82
+ _ = AindSession.model_validate_json(session_json.read_text())
83
+
84
+ rig_model_path = normalized_session_dir / "rig.json"
85
+ if not skip_existing or not (
86
+ rig_model_path.is_symlink() or rig_model_path.exists()
87
+ ):
88
+ if not (session_json.is_symlink() or session_json.exists()):
89
+ logger.warning(
90
+ "session.json is currently required for the rig.json to be created, so we can't continue with metadata creation"
91
+ )
92
+ return None
93
+ metadata_core.add_np_rig_to_session_dir(
94
+ normalized_session_dir,
95
+ session_datetime,
96
+ rig_storage_directory,
97
+ )
98
+ if rig_model_path.exists():
99
+ logger.debug("Created rig.json")
100
+ else:
101
+ raise FileNotFoundError(
102
+ "Failed to find created rig.json, but no error occurred during creation: may be in unexpected location"
103
+ )
104
+ if not (rig_model_path.is_symlink() or rig_model_path.exists()):
105
+ return None
106
+
107
+ rig_metadata = Rig.model_validate_json(rig_model_path.read_text())
108
+ modification_date = extract_modification_date(rig_metadata)
109
+ rig_metadata.rig_id = reformat_rig_model_rig_id(
110
+ rig_metadata.rig_id, modification_date
111
+ )
112
+ rig_metadata.write_standard_file(
113
+ normalized_session_dir
114
+ ) # assumes this will work out to dest/rig.json
115
+ session_model_path = metadata_core.scrape_session_model_path(
116
+ normalized_session_dir,
117
+ )
118
+ metadata_core.update_session_from_rig(
119
+ session_model_path,
120
+ rig_model_path,
121
+ session_model_path,
122
+ )
123
+
124
+ return None
125
+
126
+
127
+ def write_metadata_and_upload(
128
+ session_path_or_folder_name: str,
129
+ recording_dirs: typing.Iterable[str] | None = None,
130
+ force: bool = False,
131
+ dry_run: bool = False,
132
+ test: bool = False,
133
+ hpc_upload_job_email: str = np_codeocean.HPC_UPLOAD_JOB_EMAIL,
134
+ regenerate_metadata: bool = False,
135
+ regenerate_symlinks: bool = True,
136
+ adjust_ephys_timestamps: bool = False,
137
+ ) -> None:
138
+ """Writes and updates aind-data-schema to the session directory
139
+ associated with the `session`. The aind-data-schema session model is
140
+ updated to reflect the `rig_id` of the rig model added to the session
141
+ directory.
142
+
143
+ Only handles ecephys platform uploads (ie sessions with a folder of data; not
144
+ behavior box sessions, which have a single hdf5 file only)
145
+ """
146
+ # session = np_session.Session(session) #! this doesn't work for surface_channels
147
+ session = np_codeocean.get_np_session(session_path_or_folder_name)
148
+
149
+ add_metadata(
150
+ session_directory=session.npexp_path,
151
+ session_datetime=(
152
+ session.start
153
+ if not np_codeocean.is_surface_channel_recording(session.npexp_path.name)
154
+ else np_codeocean.get_surface_channel_start_time(session)
155
+ ),
156
+ rig_storage_directory=pathlib.Path(
157
+ np_codeocean.get_project_config()["rig_metadata_dir"]
158
+ ),
159
+ ignore_errors=True,
160
+ skip_existing=not regenerate_metadata,
161
+ )
162
+
163
+ # Optional codeocean_pipeline_settings as {modality_abbr: PipelineMonitorSettings}
164
+ # You can specify up to one pipeline conf per modality
165
+ # In the future, these can be stored in AWS param store as part of a "job_type"
166
+ codeocean_pipeline_settings = {
167
+ Modality.ECEPHYS.abbreviation: aind_codeocean_pipeline_monitor.models.PipelineMonitorSettings(
168
+ run_params=codeocean.computation.RunParams(
169
+ capsule_id="287db808-74ce-4e44-b14b-fde1471eba45",
170
+ data_assets=[
171
+ codeocean.data_asset.DataAsset(
172
+ name="",
173
+ id="", # ID of new raw data asset will be inserted here by airflow
174
+ mount="ecephys",
175
+ created=time.time(),
176
+ state=codeocean.data_asset.DataAssetState.Draft,
177
+ type=codeocean.data_asset.DataAssetType.Dataset,
178
+ last_used=time.time(),
179
+ ),
180
+ ],
181
+ ),
182
+ computation_polling_interval=15 * 60,
183
+ computation_timeout=48 * 3600,
184
+ capture_settings=aind_codeocean_pipeline_monitor.models.CaptureSettings(
185
+ tags=[str(session.mouse), "derived", "ecephys"],
186
+ custom_metadata={
187
+ "data level": "derived",
188
+ "experiment type": "ecephys",
189
+ "subject id": str(session.mouse),
190
+ },
191
+ process_name_suffix="sorted",
192
+ process_name_suffix_tz="US/Pacific",
193
+ ),
194
+ ),
195
+ }
196
+
197
+ return np_codeocean.upload_session(
198
+ session_path_or_folder_name,
199
+ recording_dirs=recording_dirs,
200
+ force=force,
201
+ dry_run=dry_run,
202
+ test=test,
203
+ hpc_upload_job_email=hpc_upload_job_email,
204
+ regenerate_symlinks=regenerate_symlinks,
205
+ adjust_ephys_timestamps=adjust_ephys_timestamps,
206
+ codeocean_pipeline_settings=codeocean_pipeline_settings,
207
+ )
208
+
209
+
210
+ def parse_args() -> argparse.Namespace:
211
+ parser = argparse.ArgumentParser(description="Upload a session to CodeOcean")
212
+ parser.add_argument(
213
+ "session_path_or_folder_name",
214
+ help="session ID (lims or np-exp foldername) or path to session folder",
215
+ )
216
+ parser.add_argument(
217
+ "recording_dirs",
218
+ nargs="*",
219
+ help="[optional] specific names of recording directories to upload - for use with split recordings only.",
220
+ )
221
+ parser.add_argument(
222
+ "--email",
223
+ dest="hpc_upload_job_email",
224
+ type=str,
225
+ help=f"[optional] specify email address for hpc upload job updates. Default is {np_codeocean.HPC_UPLOAD_JOB_EMAIL}",
226
+ )
227
+ parser.add_argument(
228
+ "--force",
229
+ action="store_true",
230
+ help="enable `force_cloud_sync` option, re-uploading and re-making raw asset even if data exists on S3",
231
+ )
232
+ parser.add_argument(
233
+ "--test",
234
+ action="store_true",
235
+ help="use the test-upload service, uploading to the test CodeOcean server instead of the production server",
236
+ )
237
+ parser.add_argument(
238
+ "--dry-run",
239
+ action="store_true",
240
+ help="Create upload job but do not submit to hpc upload queue.",
241
+ )
242
+ parser.add_argument(
243
+ "--preserve-symlinks",
244
+ dest="regenerate_symlinks",
245
+ action="store_false",
246
+ help="Existing symlink folders will not be deleted and regenerated - may result in additional data being uploaded",
247
+ )
248
+ parser.add_argument(
249
+ "--regenerate-metadata",
250
+ action="store_true",
251
+ help="Regenerate metadata files (session.json and rig.json) even if they already exist",
252
+ )
253
+ parser.add_argument(
254
+ "--sync",
255
+ dest="adjust_ephys_timestamps",
256
+ action="store_true",
257
+ help="Adjust ephys timestamps.npy prior to upload using sync data (if available)",
258
+ )
259
+ return parser.parse_args()
260
+
261
+
262
+ def main() -> None:
263
+ args = parse_args()
264
+ kwargs = vars(args)
265
+ np_codeocean.utils.set_npc_lims_credentials()
266
+ write_metadata_and_upload(**kwargs)
267
+
268
+
269
+ if __name__ == "__main__":
270
+ main()
271
+ # write_metadata_and_upload(
272
+ # 'DRpilot_744740_20241113_surface_channels',
273
+ # force=False,
274
+ # regenerate_metadata=False,
275
+ # regenerate_symlinks=False,
276
+ # )
277
+ # upload_dr_ecephys DRpilot_712141_20240606 --regenerate-metadata
278
+ # upload_dr_ecephys DRpilot_712141_20240611 recording1 recording2 --regenerate-metadata --force
279
+ # upload_dr_ecephys DRpilot_712141_20240605 --regenerate-metadata
@@ -1,33 +1,39 @@
1
- """
2
- Linked to a .exe file in the virtual env, which can be run as admin to get around
3
- sylink-creation permissions issues.
4
-
5
- - just edit this file, then run the `upload_sessions.exe` as admin (~/.venv/scripts/upload_sessions.exe)
6
- """
7
-
8
- import np_codeocean
9
-
10
- split_recordings: dict[str, tuple[str, ...]] = {
11
- "//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_13-21-35_628801": (),
12
- "//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_14-10-18_628801": (),
13
- "//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-20_12-21-41_670181": (),
14
- "//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-25_09-47-29_670180": (),
15
- "//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_681532_20231019": ('recording1', 'recording2'),
16
- "//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_686176_20231206": ('recording1', 'recording2'),
17
- }
18
- split_recording_folders: set[str] = set(split_recordings.keys())
19
-
20
- session_folders_to_upload: set[str] = set([
21
-
22
- ])
23
-
24
- def main() -> None:
25
- for session_folder in session_folders_to_upload - split_recording_folders:
26
- np_codeocean.upload_session(session_folder)
27
-
28
- for session_folder, recording_dir_names in split_recordings.items():
29
- if recording_dir_names:
30
- np_codeocean.upload_session(session_folder, recording_dir_names)
31
-
32
- if __name__ == '__main__':
33
- main()
1
+ """
2
+ Linked to a .exe file in the virtual env, which can be run as admin to get around
3
+ sylink-creation permissions issues.
4
+
5
+ - just edit this file, then run the `upload_sessions.exe` as admin (~/.venv/scripts/upload_sessions.exe)
6
+ """
7
+
8
+ import np_codeocean
9
+
10
+ split_recordings: dict[str, tuple[str, ...]] = {
11
+ "//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_13-21-35_628801": (),
12
+ "//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_14-10-18_628801": (),
13
+ "//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-20_12-21-41_670181": (),
14
+ "//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-25_09-47-29_670180": (),
15
+ "//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_681532_20231019": (
16
+ "recording1",
17
+ "recording2",
18
+ ),
19
+ "//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_686176_20231206": (
20
+ "recording1",
21
+ "recording2",
22
+ ),
23
+ }
24
+ split_recording_folders: set[str] = set(split_recordings.keys())
25
+
26
+ session_folders_to_upload: set[str] = set([])
27
+
28
+
29
+ def main() -> None:
30
+ for session_folder in session_folders_to_upload - split_recording_folders:
31
+ np_codeocean.upload_session(session_folder)
32
+
33
+ for session_folder, recording_dir_names in split_recordings.items():
34
+ if recording_dir_names:
35
+ np_codeocean.upload_session(session_folder, recording_dir_names)
36
+
37
+
38
+ if __name__ == "__main__":
39
+ main()