np_codeocean 0.3.5__py3-none-any.whl → 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- np_codeocean/__init__.py +1 -1
- np_codeocean/metadata/__init__.py +1 -1
- np_codeocean/metadata/common.py +1 -3
- np_codeocean/metadata/core.py +333 -331
- np_codeocean/metadata/dynamic_routing_task_etl.py +1 -1
- np_codeocean/metadata/model_templates/behavior_box.py +115 -115
- np_codeocean/metadata/model_templates/neuropixels_rig.py +544 -544
- np_codeocean/metadata/np.py +1 -1
- np_codeocean/metadata/rigs.py +1 -1
- np_codeocean/metadata/storage.py +78 -78
- np_codeocean/metadata/update.py +1 -2
- np_codeocean/metadata/utils.py +1 -1
- np_codeocean/np_session_utils.py +462 -385
- np_codeocean/scripts/upload_dynamic_routing_behavior.py +483 -413
- np_codeocean/scripts/upload_dynamic_routing_ecephys.py +279 -217
- np_codeocean/scripts/upload_split_recordings_example.py +39 -33
- np_codeocean/utils.py +671 -563
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/METADATA +13 -6
- np_codeocean-0.3.6.dist-info/RECORD +23 -0
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/WHEEL +2 -1
- {np_codeocean-0.3.5.dist-info → np_codeocean-0.3.6.dist-info}/entry_points.txt +0 -3
- np_codeocean-0.3.6.dist-info/top_level.txt +1 -0
- np_codeocean-0.3.5.dist-info/RECORD +0 -22
|
@@ -1,217 +1,279 @@
|
|
|
1
|
-
import argparse
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
import
|
|
11
|
-
import
|
|
12
|
-
import
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
import
|
|
16
|
-
import
|
|
17
|
-
import
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
from
|
|
22
|
-
|
|
23
|
-
# Disable divide by zero or NaN warnings
|
|
24
|
-
warnings.filterwarnings("ignore", category=RuntimeWarning)
|
|
25
|
-
|
|
26
|
-
logging.basicConfig(
|
|
27
|
-
filename=f"//allen/programs/mindscope/workgroups/np-exp/codeocean-logs/{pathlib.Path(__file__).stem}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log",
|
|
28
|
-
level=logging.DEBUG,
|
|
29
|
-
format="%(asctime)s | %(name)s | %(levelname)s | %(message)s",
|
|
30
|
-
datefmt="%Y-%d-%m %H:%M:%S",
|
|
31
|
-
|
|
32
|
-
logger = logging.getLogger(__name__)
|
|
33
|
-
|
|
34
|
-
CONFIG = np_config.fetch(
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def reformat_rig_model_rig_id(rig_id: str, modification_date: datetime.date) -> str:
|
|
38
|
-
rig_record = npc_session.RigRecord(rig_id)
|
|
39
|
-
if not rig_record.is_neuro_pixels_rig:
|
|
40
|
-
raise Exception(
|
|
41
|
-
f"Rig is not a neuropixels rig. Only behavior cluster rigs are supported. rig_id={rig_id}"
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
if
|
|
89
|
-
logger.
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
#
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
1
|
+
import argparse
|
|
2
|
+
import datetime
|
|
3
|
+
import logging
|
|
4
|
+
import pathlib
|
|
5
|
+
import time
|
|
6
|
+
import typing
|
|
7
|
+
import warnings
|
|
8
|
+
|
|
9
|
+
import aind_codeocean_pipeline_monitor.models
|
|
10
|
+
import codeocean.capsule
|
|
11
|
+
import codeocean.computation
|
|
12
|
+
import codeocean.data_asset
|
|
13
|
+
import np_config
|
|
14
|
+
import npc_session
|
|
15
|
+
import npc_sessions
|
|
16
|
+
from aind_data_schema.core.rig import Rig
|
|
17
|
+
from aind_data_schema.core.session import Session as AindSession
|
|
18
|
+
from aind_data_schema_models.modalities import Modality
|
|
19
|
+
|
|
20
|
+
import np_codeocean
|
|
21
|
+
from np_codeocean.metadata import core as metadata_core
|
|
22
|
+
|
|
23
|
+
# Disable divide by zero or NaN warnings
|
|
24
|
+
warnings.filterwarnings("ignore", category=RuntimeWarning)
|
|
25
|
+
|
|
26
|
+
logging.basicConfig(
|
|
27
|
+
filename=f"//allen/programs/mindscope/workgroups/np-exp/codeocean-logs/{pathlib.Path(__file__).stem}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log",
|
|
28
|
+
level=logging.DEBUG,
|
|
29
|
+
format="%(asctime)s | %(name)s | %(levelname)s | %(message)s",
|
|
30
|
+
datefmt="%Y-%d-%m %H:%M:%S",
|
|
31
|
+
)
|
|
32
|
+
logger = logging.getLogger(__name__)
|
|
33
|
+
|
|
34
|
+
CONFIG = np_config.fetch("/rigs/room_numbers")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def reformat_rig_model_rig_id(rig_id: str, modification_date: datetime.date) -> str:
|
|
38
|
+
rig_record = npc_session.RigRecord(rig_id)
|
|
39
|
+
if not rig_record.is_neuro_pixels_rig:
|
|
40
|
+
raise Exception(
|
|
41
|
+
f"Rig is not a neuropixels rig. Only behavior cluster rigs are supported. rig_id={rig_id}"
|
|
42
|
+
)
|
|
43
|
+
room_number = CONFIG.get(rig_record, "UNKNOWN")
|
|
44
|
+
return rig_record.as_aind_data_schema_rig_id(
|
|
45
|
+
str(room_number), modification_date
|
|
46
|
+
).replace(".", "")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def extract_modification_date(rig: Rig) -> datetime.date:
|
|
50
|
+
_, _, date_str = rig.rig_id.split("_")
|
|
51
|
+
if len(date_str) == 6:
|
|
52
|
+
return datetime.datetime.strptime(date_str, "%y%m%d").date()
|
|
53
|
+
elif len(date_str) == 8:
|
|
54
|
+
return datetime.datetime.strptime(date_str, "%Y%m%d").date()
|
|
55
|
+
else:
|
|
56
|
+
raise Exception(f"Unsupported date format: {date_str}")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def add_metadata(
|
|
60
|
+
session_directory: str | pathlib.Path,
|
|
61
|
+
session_datetime: datetime.datetime,
|
|
62
|
+
rig_storage_directory: pathlib.Path,
|
|
63
|
+
ignore_errors: bool = True,
|
|
64
|
+
skip_existing: bool = True,
|
|
65
|
+
) -> None:
|
|
66
|
+
"""Adds rig and sessions metadata to a session directory."""
|
|
67
|
+
normalized_session_dir = np_config.normalize_path(session_directory)
|
|
68
|
+
logger.debug(f"{normalized_session_dir = }")
|
|
69
|
+
logger.debug(f"{rig_storage_directory = }")
|
|
70
|
+
session_json = normalized_session_dir / "session.json"
|
|
71
|
+
if not skip_existing or not (session_json.is_symlink() or session_json.exists()):
|
|
72
|
+
logger.debug("Attempting to create session.json")
|
|
73
|
+
npc_sessions.DynamicRoutingSession(
|
|
74
|
+
normalized_session_dir
|
|
75
|
+
)._aind_session_metadata.write_standard_file(normalized_session_dir)
|
|
76
|
+
if session_json.exists():
|
|
77
|
+
logger.debug("Created session.json")
|
|
78
|
+
else:
|
|
79
|
+
raise FileNotFoundError(
|
|
80
|
+
"Failed to find created session.json, but no error occurred during creation: may be in unexpected location"
|
|
81
|
+
)
|
|
82
|
+
_ = AindSession.model_validate_json(session_json.read_text())
|
|
83
|
+
|
|
84
|
+
rig_model_path = normalized_session_dir / "rig.json"
|
|
85
|
+
if not skip_existing or not (
|
|
86
|
+
rig_model_path.is_symlink() or rig_model_path.exists()
|
|
87
|
+
):
|
|
88
|
+
if not (session_json.is_symlink() or session_json.exists()):
|
|
89
|
+
logger.warning(
|
|
90
|
+
"session.json is currently required for the rig.json to be created, so we can't continue with metadata creation"
|
|
91
|
+
)
|
|
92
|
+
return None
|
|
93
|
+
metadata_core.add_np_rig_to_session_dir(
|
|
94
|
+
normalized_session_dir,
|
|
95
|
+
session_datetime,
|
|
96
|
+
rig_storage_directory,
|
|
97
|
+
)
|
|
98
|
+
if rig_model_path.exists():
|
|
99
|
+
logger.debug("Created rig.json")
|
|
100
|
+
else:
|
|
101
|
+
raise FileNotFoundError(
|
|
102
|
+
"Failed to find created rig.json, but no error occurred during creation: may be in unexpected location"
|
|
103
|
+
)
|
|
104
|
+
if not (rig_model_path.is_symlink() or rig_model_path.exists()):
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
rig_metadata = Rig.model_validate_json(rig_model_path.read_text())
|
|
108
|
+
modification_date = extract_modification_date(rig_metadata)
|
|
109
|
+
rig_metadata.rig_id = reformat_rig_model_rig_id(
|
|
110
|
+
rig_metadata.rig_id, modification_date
|
|
111
|
+
)
|
|
112
|
+
rig_metadata.write_standard_file(
|
|
113
|
+
normalized_session_dir
|
|
114
|
+
) # assumes this will work out to dest/rig.json
|
|
115
|
+
session_model_path = metadata_core.scrape_session_model_path(
|
|
116
|
+
normalized_session_dir,
|
|
117
|
+
)
|
|
118
|
+
metadata_core.update_session_from_rig(
|
|
119
|
+
session_model_path,
|
|
120
|
+
rig_model_path,
|
|
121
|
+
session_model_path,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def write_metadata_and_upload(
|
|
128
|
+
session_path_or_folder_name: str,
|
|
129
|
+
recording_dirs: typing.Iterable[str] | None = None,
|
|
130
|
+
force: bool = False,
|
|
131
|
+
dry_run: bool = False,
|
|
132
|
+
test: bool = False,
|
|
133
|
+
hpc_upload_job_email: str = np_codeocean.HPC_UPLOAD_JOB_EMAIL,
|
|
134
|
+
regenerate_metadata: bool = False,
|
|
135
|
+
regenerate_symlinks: bool = True,
|
|
136
|
+
adjust_ephys_timestamps: bool = False,
|
|
137
|
+
) -> None:
|
|
138
|
+
"""Writes and updates aind-data-schema to the session directory
|
|
139
|
+
associated with the `session`. The aind-data-schema session model is
|
|
140
|
+
updated to reflect the `rig_id` of the rig model added to the session
|
|
141
|
+
directory.
|
|
142
|
+
|
|
143
|
+
Only handles ecephys platform uploads (ie sessions with a folder of data; not
|
|
144
|
+
behavior box sessions, which have a single hdf5 file only)
|
|
145
|
+
"""
|
|
146
|
+
# session = np_session.Session(session) #! this doesn't work for surface_channels
|
|
147
|
+
session = np_codeocean.get_np_session(session_path_or_folder_name)
|
|
148
|
+
|
|
149
|
+
add_metadata(
|
|
150
|
+
session_directory=session.npexp_path,
|
|
151
|
+
session_datetime=(
|
|
152
|
+
session.start
|
|
153
|
+
if not np_codeocean.is_surface_channel_recording(session.npexp_path.name)
|
|
154
|
+
else np_codeocean.get_surface_channel_start_time(session)
|
|
155
|
+
),
|
|
156
|
+
rig_storage_directory=pathlib.Path(
|
|
157
|
+
np_codeocean.get_project_config()["rig_metadata_dir"]
|
|
158
|
+
),
|
|
159
|
+
ignore_errors=True,
|
|
160
|
+
skip_existing=not regenerate_metadata,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Optional codeocean_pipeline_settings as {modality_abbr: PipelineMonitorSettings}
|
|
164
|
+
# You can specify up to one pipeline conf per modality
|
|
165
|
+
# In the future, these can be stored in AWS param store as part of a "job_type"
|
|
166
|
+
codeocean_pipeline_settings = {
|
|
167
|
+
Modality.ECEPHYS.abbreviation: aind_codeocean_pipeline_monitor.models.PipelineMonitorSettings(
|
|
168
|
+
run_params=codeocean.computation.RunParams(
|
|
169
|
+
capsule_id="287db808-74ce-4e44-b14b-fde1471eba45",
|
|
170
|
+
data_assets=[
|
|
171
|
+
codeocean.data_asset.DataAsset(
|
|
172
|
+
name="",
|
|
173
|
+
id="", # ID of new raw data asset will be inserted here by airflow
|
|
174
|
+
mount="ecephys",
|
|
175
|
+
created=time.time(),
|
|
176
|
+
state=codeocean.data_asset.DataAssetState.Draft,
|
|
177
|
+
type=codeocean.data_asset.DataAssetType.Dataset,
|
|
178
|
+
last_used=time.time(),
|
|
179
|
+
),
|
|
180
|
+
],
|
|
181
|
+
),
|
|
182
|
+
computation_polling_interval=15 * 60,
|
|
183
|
+
computation_timeout=48 * 3600,
|
|
184
|
+
capture_settings=aind_codeocean_pipeline_monitor.models.CaptureSettings(
|
|
185
|
+
tags=[str(session.mouse), "derived", "ecephys"],
|
|
186
|
+
custom_metadata={
|
|
187
|
+
"data level": "derived",
|
|
188
|
+
"experiment type": "ecephys",
|
|
189
|
+
"subject id": str(session.mouse),
|
|
190
|
+
},
|
|
191
|
+
process_name_suffix="sorted",
|
|
192
|
+
process_name_suffix_tz="US/Pacific",
|
|
193
|
+
),
|
|
194
|
+
),
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return np_codeocean.upload_session(
|
|
198
|
+
session_path_or_folder_name,
|
|
199
|
+
recording_dirs=recording_dirs,
|
|
200
|
+
force=force,
|
|
201
|
+
dry_run=dry_run,
|
|
202
|
+
test=test,
|
|
203
|
+
hpc_upload_job_email=hpc_upload_job_email,
|
|
204
|
+
regenerate_symlinks=regenerate_symlinks,
|
|
205
|
+
adjust_ephys_timestamps=adjust_ephys_timestamps,
|
|
206
|
+
codeocean_pipeline_settings=codeocean_pipeline_settings,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def parse_args() -> argparse.Namespace:
|
|
211
|
+
parser = argparse.ArgumentParser(description="Upload a session to CodeOcean")
|
|
212
|
+
parser.add_argument(
|
|
213
|
+
"session_path_or_folder_name",
|
|
214
|
+
help="session ID (lims or np-exp foldername) or path to session folder",
|
|
215
|
+
)
|
|
216
|
+
parser.add_argument(
|
|
217
|
+
"recording_dirs",
|
|
218
|
+
nargs="*",
|
|
219
|
+
help="[optional] specific names of recording directories to upload - for use with split recordings only.",
|
|
220
|
+
)
|
|
221
|
+
parser.add_argument(
|
|
222
|
+
"--email",
|
|
223
|
+
dest="hpc_upload_job_email",
|
|
224
|
+
type=str,
|
|
225
|
+
help=f"[optional] specify email address for hpc upload job updates. Default is {np_codeocean.HPC_UPLOAD_JOB_EMAIL}",
|
|
226
|
+
)
|
|
227
|
+
parser.add_argument(
|
|
228
|
+
"--force",
|
|
229
|
+
action="store_true",
|
|
230
|
+
help="enable `force_cloud_sync` option, re-uploading and re-making raw asset even if data exists on S3",
|
|
231
|
+
)
|
|
232
|
+
parser.add_argument(
|
|
233
|
+
"--test",
|
|
234
|
+
action="store_true",
|
|
235
|
+
help="use the test-upload service, uploading to the test CodeOcean server instead of the production server",
|
|
236
|
+
)
|
|
237
|
+
parser.add_argument(
|
|
238
|
+
"--dry-run",
|
|
239
|
+
action="store_true",
|
|
240
|
+
help="Create upload job but do not submit to hpc upload queue.",
|
|
241
|
+
)
|
|
242
|
+
parser.add_argument(
|
|
243
|
+
"--preserve-symlinks",
|
|
244
|
+
dest="regenerate_symlinks",
|
|
245
|
+
action="store_false",
|
|
246
|
+
help="Existing symlink folders will not be deleted and regenerated - may result in additional data being uploaded",
|
|
247
|
+
)
|
|
248
|
+
parser.add_argument(
|
|
249
|
+
"--regenerate-metadata",
|
|
250
|
+
action="store_true",
|
|
251
|
+
help="Regenerate metadata files (session.json and rig.json) even if they already exist",
|
|
252
|
+
)
|
|
253
|
+
parser.add_argument(
|
|
254
|
+
"--sync",
|
|
255
|
+
dest="adjust_ephys_timestamps",
|
|
256
|
+
action="store_true",
|
|
257
|
+
help="Adjust ephys timestamps.npy prior to upload using sync data (if available)",
|
|
258
|
+
)
|
|
259
|
+
return parser.parse_args()
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def main() -> None:
|
|
263
|
+
args = parse_args()
|
|
264
|
+
kwargs = vars(args)
|
|
265
|
+
np_codeocean.utils.set_npc_lims_credentials()
|
|
266
|
+
write_metadata_and_upload(**kwargs)
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
if __name__ == "__main__":
|
|
270
|
+
main()
|
|
271
|
+
# write_metadata_and_upload(
|
|
272
|
+
# 'DRpilot_744740_20241113_surface_channels',
|
|
273
|
+
# force=False,
|
|
274
|
+
# regenerate_metadata=False,
|
|
275
|
+
# regenerate_symlinks=False,
|
|
276
|
+
# )
|
|
277
|
+
# upload_dr_ecephys DRpilot_712141_20240606 --regenerate-metadata
|
|
278
|
+
# upload_dr_ecephys DRpilot_712141_20240611 recording1 recording2 --regenerate-metadata --force
|
|
279
|
+
# upload_dr_ecephys DRpilot_712141_20240605 --regenerate-metadata
|
|
@@ -1,33 +1,39 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Linked to a .exe file in the virtual env, which can be run as admin to get around
|
|
3
|
-
sylink-creation permissions issues.
|
|
4
|
-
|
|
5
|
-
- just edit this file, then run the `upload_sessions.exe` as admin (~/.venv/scripts/upload_sessions.exe)
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import np_codeocean
|
|
9
|
-
|
|
10
|
-
split_recordings: dict[str, tuple[str, ...]] = {
|
|
11
|
-
"//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_13-21-35_628801": (),
|
|
12
|
-
"//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_14-10-18_628801": (),
|
|
13
|
-
"//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-20_12-21-41_670181": (),
|
|
14
|
-
"//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-25_09-47-29_670180": (),
|
|
15
|
-
"//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_681532_20231019": (
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
1
|
+
"""
|
|
2
|
+
Linked to a .exe file in the virtual env, which can be run as admin to get around
|
|
3
|
+
sylink-creation permissions issues.
|
|
4
|
+
|
|
5
|
+
- just edit this file, then run the `upload_sessions.exe` as admin (~/.venv/scripts/upload_sessions.exe)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import np_codeocean
|
|
9
|
+
|
|
10
|
+
split_recordings: dict[str, tuple[str, ...]] = {
|
|
11
|
+
"//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_13-21-35_628801": (),
|
|
12
|
+
"//allen/programs/mindscope/workgroups/templeton/TTOC/2022-09-20_14-10-18_628801": (),
|
|
13
|
+
"//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-20_12-21-41_670181": (),
|
|
14
|
+
"//allen/programs/mindscope/workgroups/templeton/TTOC/2023-07-25_09-47-29_670180": (),
|
|
15
|
+
"//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_681532_20231019": (
|
|
16
|
+
"recording1",
|
|
17
|
+
"recording2",
|
|
18
|
+
),
|
|
19
|
+
"//allen/programs/mindscope/workgroups/dynamicrouting/PilotEphys/Task 2 pilot/DRpilot_686176_20231206": (
|
|
20
|
+
"recording1",
|
|
21
|
+
"recording2",
|
|
22
|
+
),
|
|
23
|
+
}
|
|
24
|
+
split_recording_folders: set[str] = set(split_recordings.keys())
|
|
25
|
+
|
|
26
|
+
session_folders_to_upload: set[str] = set([])
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def main() -> None:
|
|
30
|
+
for session_folder in session_folders_to_upload - split_recording_folders:
|
|
31
|
+
np_codeocean.upload_session(session_folder)
|
|
32
|
+
|
|
33
|
+
for session_folder, recording_dir_names in split_recordings.items():
|
|
34
|
+
if recording_dir_names:
|
|
35
|
+
np_codeocean.upload_session(session_folder, recording_dir_names)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == "__main__":
|
|
39
|
+
main()
|