eegdash 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of eegdash might be problematic. Click here for more details.
- eegdash/SignalStore/__init__.py +0 -0
- eegdash/SignalStore/signalstore/__init__.py +3 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/abstract_read_adapter.py +13 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/domain_modeling/schema_read_adapter.py +16 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/domain_modeling/vocabulary_read_adapter.py +19 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/handmade_records/excel_study_organizer_read_adapter.py +114 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/axona/axona_read_adapter.py +912 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/ReadIntanSpikeFile.py +140 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/intan_read_adapter.py +29 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/__init__.py +0 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/data_to_result.py +62 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/get_bytes_per_data_block.py +36 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/notch_filter.py +50 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/qstring.py +41 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/read_header.py +135 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/read_one_data_block.py +45 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/load_intan_rhd_format.py +204 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/__init__.py +0 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/data_to_result.py +60 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/get_bytes_per_data_block.py +37 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/notch_filter.py +50 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/qstring.py +41 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/read_header.py +153 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/read_one_data_block.py +47 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/load_intan_rhs_format.py +213 -0
- eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/neurodata_without_borders/neurodata_without_borders_read_adapter.py +14 -0
- eegdash/SignalStore/signalstore/operations/__init__.py +4 -0
- eegdash/SignalStore/signalstore/operations/handler_executor.py +22 -0
- eegdash/SignalStore/signalstore/operations/handler_factory.py +41 -0
- eegdash/SignalStore/signalstore/operations/handlers/base_handler.py +44 -0
- eegdash/SignalStore/signalstore/operations/handlers/domain/property_model_handlers.py +79 -0
- eegdash/SignalStore/signalstore/operations/handlers/domain/schema_handlers.py +3 -0
- eegdash/SignalStore/signalstore/operations/helpers/abstract_helper.py +17 -0
- eegdash/SignalStore/signalstore/operations/helpers/neuroscikit_extractor.py +33 -0
- eegdash/SignalStore/signalstore/operations/helpers/neuroscikit_rawio.py +165 -0
- eegdash/SignalStore/signalstore/operations/helpers/spikeinterface_helper.py +100 -0
- eegdash/SignalStore/signalstore/operations/helpers/wrappers/neo_wrappers.py +21 -0
- eegdash/SignalStore/signalstore/operations/helpers/wrappers/nwb_wrappers.py +27 -0
- eegdash/SignalStore/signalstore/store/__init__.py +8 -0
- eegdash/SignalStore/signalstore/store/data_access_objects.py +1181 -0
- eegdash/SignalStore/signalstore/store/datafile_adapters.py +131 -0
- eegdash/SignalStore/signalstore/store/repositories.py +928 -0
- eegdash/SignalStore/signalstore/store/store_errors.py +68 -0
- eegdash/SignalStore/signalstore/store/unit_of_work.py +97 -0
- eegdash/SignalStore/signalstore/store/unit_of_work_provider.py +67 -0
- eegdash/SignalStore/signalstore/utilities/data_adapters/spike_interface_adapters/si_recording.py +1 -0
- eegdash/SignalStore/signalstore/utilities/data_adapters/spike_interface_adapters/si_sorter.py +1 -0
- eegdash/SignalStore/signalstore/utilities/testing/data_mocks.py +513 -0
- eegdash/SignalStore/signalstore/utilities/tools/dataarrays.py +49 -0
- eegdash/SignalStore/signalstore/utilities/tools/mongo_records.py +25 -0
- eegdash/SignalStore/signalstore/utilities/tools/operation_response.py +78 -0
- eegdash/SignalStore/signalstore/utilities/tools/purge_orchestration_response.py +21 -0
- eegdash/SignalStore/signalstore/utilities/tools/quantities.py +15 -0
- eegdash/SignalStore/signalstore/utilities/tools/strings.py +38 -0
- eegdash/SignalStore/signalstore/utilities/tools/time.py +17 -0
- eegdash/SignalStore/tests/conftest.py +799 -0
- eegdash/SignalStore/tests/data/valid_data/data_arrays/make_fake_data.py +59 -0
- eegdash/SignalStore/tests/unit/store/conftest.py +0 -0
- eegdash/SignalStore/tests/unit/store/test_data_access_objects.py +1235 -0
- eegdash/SignalStore/tests/unit/store/test_repositories.py +1309 -0
- eegdash/SignalStore/tests/unit/store/test_unit_of_work.py +7 -0
- eegdash/SignalStore/tests/unit/test_ci_cd.py +8 -0
- eegdash/__init__.py +1 -0
- eegdash/aws_ingest.py +29 -0
- eegdash/data_utils.py +213 -0
- eegdash/main.py +17 -0
- eegdash/signalstore_data_utils.py +280 -0
- eegdash-0.0.1.dist-info/LICENSE +20 -0
- eegdash-0.0.1.dist-info/METADATA +72 -0
- eegdash-0.0.1.dist-info/RECORD +72 -0
- eegdash-0.0.1.dist-info/WHEEL +5 -0
- eegdash-0.0.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
import xarray as xr
|
|
2
|
+
import numpy as np
|
|
3
|
+
import copy
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from signalstore.utilities.testing.helper_mocks import *
|
|
6
|
+
|
|
7
|
+
def record_with_timestamps(record_dict):
|
|
8
|
+
record_dict_cpy = copy.deepcopy(record_dict)
|
|
9
|
+
record_dict_cpy.update({"time_of_save": time_old.timestamp(), "time_of_removal": None})
|
|
10
|
+
return record_dict_cpy
|
|
11
|
+
|
|
12
|
+
# DataArrays
|
|
13
|
+
|
|
14
|
+
spike_waveforms_dataarray = xr.DataArray(
|
|
15
|
+
name = "test1",
|
|
16
|
+
data = np.zeros(shape=(100,3,5)),
|
|
17
|
+
dims = ["time", "probe", "channel"],
|
|
18
|
+
coords = {
|
|
19
|
+
"time": np.array(range(100)),
|
|
20
|
+
"probe": np.array(range(3)),
|
|
21
|
+
"channel": np.array(range(5))
|
|
22
|
+
},
|
|
23
|
+
attrs = {
|
|
24
|
+
"name": "test1",
|
|
25
|
+
"schema_ref": "spike_waveforms",
|
|
26
|
+
"animal_data_ref": "A10",
|
|
27
|
+
"session_data_ref": "20230810-A10-box-0"
|
|
28
|
+
}
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
spike_labels_dataarray = xr.DataArray(
|
|
32
|
+
name = "test2",
|
|
33
|
+
data = np.zeros(shape=(100,1)),
|
|
34
|
+
dims = ["time", "label"],
|
|
35
|
+
coords = {
|
|
36
|
+
"time": np.array(range(100)),
|
|
37
|
+
},
|
|
38
|
+
attrs = {
|
|
39
|
+
"name": "test2",
|
|
40
|
+
"schema_ref": "spike_labels",
|
|
41
|
+
"animal_data_ref": "A10",
|
|
42
|
+
"session_data_ref": "20230810-A10-box-0"
|
|
43
|
+
}
|
|
44
|
+
)
|
|
45
|
+
dataarrays = [spike_waveforms_dataarray, spike_labels_dataarray]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# Datasets
|
|
49
|
+
|
|
50
|
+
spike_waveforms_dataset = xr.Dataset(
|
|
51
|
+
data_vars = {
|
|
52
|
+
"test1arr": spike_waveforms_dataarray,
|
|
53
|
+
"test2arr": spike_labels_dataarray
|
|
54
|
+
}
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Records
|
|
58
|
+
|
|
59
|
+
session_record = {
|
|
60
|
+
"name": "test0",
|
|
61
|
+
"schema_ref": "session",
|
|
62
|
+
"has_file": False,
|
|
63
|
+
"animal_data_ref": "A10",
|
|
64
|
+
"session_date": "2023-08-10",
|
|
65
|
+
"session_time": "12:00:00",
|
|
66
|
+
"session_duration": "00:30:00",
|
|
67
|
+
"session_notes": "This is a test session"
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
spike_waveforms_record = {
|
|
71
|
+
"name": "test1",
|
|
72
|
+
"schema_ref": "spike_waveforms",
|
|
73
|
+
"has_file": True,
|
|
74
|
+
"data_dimensions": ["time", "probe", "channel"],
|
|
75
|
+
"coordinates": ["time", "probe", "channel"],
|
|
76
|
+
"shape": [100, 3, 5],
|
|
77
|
+
"dtype": "float64",
|
|
78
|
+
"unit_of_measure": "microvolts",
|
|
79
|
+
"dimension_of_measure": "charge",
|
|
80
|
+
"animal_data_ref": "A10",
|
|
81
|
+
"session_data_ref": "20230810-A10-box-0"
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
spike_labels_record = {
|
|
85
|
+
"name": "test2",
|
|
86
|
+
"schema_ref": "spike_labels",
|
|
87
|
+
"has_file": True,
|
|
88
|
+
"data_dimensions": ["time", "label"],
|
|
89
|
+
"coordinates": ["time"],
|
|
90
|
+
"shape": [100, 1],
|
|
91
|
+
"dtype": "int64",
|
|
92
|
+
"unit_of_measure": "neurons",
|
|
93
|
+
"dimension_of_measure": "nominal",
|
|
94
|
+
"animal_data_ref": "A10",
|
|
95
|
+
"session_data_ref": "20230810-A10-box-0"
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
records = [spike_waveforms_record, spike_labels_record, session_record]
|
|
99
|
+
|
|
100
|
+
# Schemata
|
|
101
|
+
|
|
102
|
+
xarray_schema = {
|
|
103
|
+
"schema_ref": "xarray",
|
|
104
|
+
"schema_description": "A record of an xarray DataArray object",
|
|
105
|
+
"json_schema": {
|
|
106
|
+
"type": "object",
|
|
107
|
+
"properties": {
|
|
108
|
+
"data_dimensions": {
|
|
109
|
+
"type": "array"
|
|
110
|
+
},
|
|
111
|
+
"coordinates": {
|
|
112
|
+
"type": "array"
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
"required": [
|
|
116
|
+
"data_dimensions"
|
|
117
|
+
]
|
|
118
|
+
},
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
units_schema = {
|
|
122
|
+
"schema_ref": "units",
|
|
123
|
+
"schema_description": "A record of an xarray DataArray object",
|
|
124
|
+
"json_schema": {
|
|
125
|
+
"type": "object",
|
|
126
|
+
"properties": {
|
|
127
|
+
"unit_of_measure": {
|
|
128
|
+
"type": "string",
|
|
129
|
+
},
|
|
130
|
+
"dimension_of_measure": {
|
|
131
|
+
"type": "array",
|
|
132
|
+
"items": {
|
|
133
|
+
"type": "string",
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
},
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
base_record_schema = {
|
|
142
|
+
"schema_ref": "base_record",
|
|
143
|
+
"schema_description": "A record of a measurement",
|
|
144
|
+
"json_schema": {
|
|
145
|
+
"type": "object",
|
|
146
|
+
"properties": {
|
|
147
|
+
"name": {
|
|
148
|
+
"type": "string",
|
|
149
|
+
},
|
|
150
|
+
"schema_ref": {
|
|
151
|
+
"type": "string",
|
|
152
|
+
},
|
|
153
|
+
"has_file": {
|
|
154
|
+
"type": "boolean"
|
|
155
|
+
}
|
|
156
|
+
},
|
|
157
|
+
"required": ["name", "schema_ref"]
|
|
158
|
+
},
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
data_record_schema = {
|
|
162
|
+
"schema_ref": "data_record",
|
|
163
|
+
"schema_description": "A record of a measurement",
|
|
164
|
+
"json_schema": {
|
|
165
|
+
"type": "object",
|
|
166
|
+
"allOf": [
|
|
167
|
+
{
|
|
168
|
+
"_schema_ref": "base_record"
|
|
169
|
+
},
|
|
170
|
+
{
|
|
171
|
+
"_schema_ref": "xarray"
|
|
172
|
+
},
|
|
173
|
+
{
|
|
174
|
+
"_schema_ref": "units"
|
|
175
|
+
},
|
|
176
|
+
{
|
|
177
|
+
"oneOf": [
|
|
178
|
+
{
|
|
179
|
+
"type": "object",
|
|
180
|
+
"properties": {
|
|
181
|
+
"is_acquisition": { "const": True },
|
|
182
|
+
"acquisition_description": { "type": "string" },
|
|
183
|
+
"acquisition_brand": { "type": "string" },
|
|
184
|
+
"acquisition_settings_name": { "type": "string" }
|
|
185
|
+
},
|
|
186
|
+
"required": ["is_acquisition", "acquisition_description", "acquisition_brand", "acquisition_settings_name"],
|
|
187
|
+
"additionalProperties": True
|
|
188
|
+
},
|
|
189
|
+
{
|
|
190
|
+
"type": "object",
|
|
191
|
+
"properties": {
|
|
192
|
+
"is_acquisition": { "const": False },
|
|
193
|
+
"creation_report_key": { "type": "string" }
|
|
194
|
+
},
|
|
195
|
+
"required": ["is_acquisition", "creation_report"],
|
|
196
|
+
"additionalProperties": True
|
|
197
|
+
}
|
|
198
|
+
]
|
|
199
|
+
}
|
|
200
|
+
]
|
|
201
|
+
},
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
pure_record_schema = {
|
|
205
|
+
"schema_ref": "pure_record",
|
|
206
|
+
"schema_description": "A record of a measurement",
|
|
207
|
+
"json_schema": {
|
|
208
|
+
"type": "object",
|
|
209
|
+
"allOf": [
|
|
210
|
+
{
|
|
211
|
+
"_schema_ref": "base_record"
|
|
212
|
+
},
|
|
213
|
+
{
|
|
214
|
+
"type": "object",
|
|
215
|
+
"properties": {
|
|
216
|
+
"has_file": { "const": False },
|
|
217
|
+
},
|
|
218
|
+
"required": ["has_file"]
|
|
219
|
+
}
|
|
220
|
+
]
|
|
221
|
+
},
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
session_schema = {
|
|
225
|
+
"schema_ref": "session",
|
|
226
|
+
"schema_description": "A record of a recording session, where multiple data recordings may have been taken. A session includes metadata about the session such as the date, time, duration, foriegn keys to records of things used and notes.",
|
|
227
|
+
"json_schema": {
|
|
228
|
+
"allOf": [
|
|
229
|
+
{'schema_ref': 'pure_record'},
|
|
230
|
+
{
|
|
231
|
+
"type": "object",
|
|
232
|
+
"properties": {
|
|
233
|
+
"animal_data_ref": {
|
|
234
|
+
"type": "string",
|
|
235
|
+
},
|
|
236
|
+
"session_date": {
|
|
237
|
+
"type": "string",
|
|
238
|
+
},
|
|
239
|
+
"start_time": {
|
|
240
|
+
"type": "string",
|
|
241
|
+
},
|
|
242
|
+
"session_duration": {
|
|
243
|
+
"type": "string",
|
|
244
|
+
},
|
|
245
|
+
"session_notes": {
|
|
246
|
+
"type": "string",
|
|
247
|
+
}
|
|
248
|
+
},
|
|
249
|
+
"required": ["session_date", "start_time", "session_duration"]
|
|
250
|
+
}
|
|
251
|
+
]
|
|
252
|
+
},
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
animal_schema = {
|
|
256
|
+
"schema_ref": "animal",
|
|
257
|
+
"schema_description": "A record of an animal",
|
|
258
|
+
"json_schema": {
|
|
259
|
+
"allOf": [
|
|
260
|
+
{'schema_ref': 'pure_record'},
|
|
261
|
+
{
|
|
262
|
+
"type": "object",
|
|
263
|
+
"properties": {
|
|
264
|
+
"sex": {
|
|
265
|
+
"type": "string",
|
|
266
|
+
},
|
|
267
|
+
"species": {
|
|
268
|
+
"type": "string",
|
|
269
|
+
},
|
|
270
|
+
"strain": {
|
|
271
|
+
"type": "string",
|
|
272
|
+
},
|
|
273
|
+
"genotype": {
|
|
274
|
+
"type": "string",
|
|
275
|
+
},
|
|
276
|
+
"age": {
|
|
277
|
+
"type": "numeric",
|
|
278
|
+
},
|
|
279
|
+
"age_unit": {
|
|
280
|
+
"type": "string",
|
|
281
|
+
},
|
|
282
|
+
"weight": {
|
|
283
|
+
"type": "numeric",
|
|
284
|
+
},
|
|
285
|
+
"weight_unit": {
|
|
286
|
+
"type": "string",
|
|
287
|
+
},
|
|
288
|
+
"animal_notes": {
|
|
289
|
+
"type": "string",
|
|
290
|
+
}
|
|
291
|
+
},
|
|
292
|
+
# species and strain are required
|
|
293
|
+
"required": ["species", "strain"],
|
|
294
|
+
# if age is present, age_unit must be present
|
|
295
|
+
# if weight is present, weight_unit must be present
|
|
296
|
+
"if": {
|
|
297
|
+
"oneOf": [
|
|
298
|
+
{"properties": {"age": {"type": "numeric"}}},
|
|
299
|
+
{"properties": {"age_unit": {"type": "string"}}}
|
|
300
|
+
]
|
|
301
|
+
},
|
|
302
|
+
"then": {
|
|
303
|
+
"required": ["age", "age_unit"]
|
|
304
|
+
},
|
|
305
|
+
"if": {
|
|
306
|
+
"oneOf": [
|
|
307
|
+
{"properties": {"weight": {"type": "numeric"}}},
|
|
308
|
+
{"properties": {"weight_unit": {"type": "string"}}}
|
|
309
|
+
]
|
|
310
|
+
},
|
|
311
|
+
"then": {
|
|
312
|
+
"required": ["weight", "weight_unit"]
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
]
|
|
316
|
+
},
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
spike_waveforms_schema = {
|
|
322
|
+
"schema_ref": "spike_waveforms",
|
|
323
|
+
"schema_description": "A record of an xarray DataArray object",
|
|
324
|
+
"json_schema": {
|
|
325
|
+
"type": "object",
|
|
326
|
+
"allOf": [
|
|
327
|
+
{"_schema_ref": "data_record"},
|
|
328
|
+
{"type": "object",
|
|
329
|
+
"properties": {
|
|
330
|
+
# dimensions should also have a specific order to them
|
|
331
|
+
"data_dimensions": {
|
|
332
|
+
"type": "array",
|
|
333
|
+
"items": {
|
|
334
|
+
"type": "string",
|
|
335
|
+
"enum": ["time", "probe", "channel"]
|
|
336
|
+
},
|
|
337
|
+
"minItems": 3,
|
|
338
|
+
"maxItems": 3,
|
|
339
|
+
"uniqueItems": True
|
|
340
|
+
},
|
|
341
|
+
"coordinates": {
|
|
342
|
+
"type": "array",
|
|
343
|
+
"items": {
|
|
344
|
+
"type": "string",
|
|
345
|
+
"enum": ["time", "probe", "channel"]
|
|
346
|
+
},
|
|
347
|
+
"uniqueItems": True
|
|
348
|
+
},
|
|
349
|
+
"unit": {
|
|
350
|
+
"type": "string"
|
|
351
|
+
},
|
|
352
|
+
"dimension_of_measure": {
|
|
353
|
+
"type": "string",
|
|
354
|
+
"const": "charge"
|
|
355
|
+
},
|
|
356
|
+
"animal_data_ref": {
|
|
357
|
+
"type": "string"
|
|
358
|
+
},
|
|
359
|
+
"session_data_ref": {
|
|
360
|
+
"type": "string"
|
|
361
|
+
}
|
|
362
|
+
},
|
|
363
|
+
"required": ["data_dimensions", "coordinates", "unit", "dimension_of_measure", "animal_data_ref", "session_data_ref"]
|
|
364
|
+
},
|
|
365
|
+
]
|
|
366
|
+
},
|
|
367
|
+
}
|
|
368
|
+
schemata = [spike_waveforms_schema, xarray_schema, units_schema, base_record_schema, data_record_schema, pure_record_schema, session_schema, animal_schema]
|
|
369
|
+
|
|
370
|
+
# Vocabulary terms
|
|
371
|
+
name_term = {
|
|
372
|
+
"name": "name",
|
|
373
|
+
"title": "Human Readable Identifier",
|
|
374
|
+
"description": "A unique record or object identifier that is intended to be human readable."
|
|
375
|
+
}
|
|
376
|
+
schema_name_term = {
|
|
377
|
+
"name": "schema_ref",
|
|
378
|
+
"title": "Schema Name",
|
|
379
|
+
"description": "The name of a schema used to identify the type of record or data object. Also used as a unique identifier for schemas when they are loaded into the database and queried."
|
|
380
|
+
}
|
|
381
|
+
json_schema_term = {
|
|
382
|
+
"name": "json_schema",
|
|
383
|
+
"title": "Schema Body",
|
|
384
|
+
"description": "The body of a schema. This is a JSON Schema that describes the structure of a record or data object."
|
|
385
|
+
}
|
|
386
|
+
schema_description_term = {
|
|
387
|
+
"name": "schema_description",
|
|
388
|
+
"title": "Schema Description",
|
|
389
|
+
"description": "A description of a schema. This is a string that describes the purpose of a schema."
|
|
390
|
+
}
|
|
391
|
+
has_file_term = {
|
|
392
|
+
"name": "has_file",
|
|
393
|
+
"title": "Has Data",
|
|
394
|
+
"description": "A boolean term (true or false) that says whether or not a record has data associated with it. If a record has data, then it is a data record. If a record does not have data, then it is a metadata record."
|
|
395
|
+
}
|
|
396
|
+
unit_of_measure_term = {
|
|
397
|
+
"name": "unit_of_measure",
|
|
398
|
+
"title": "Unit of Measure",
|
|
399
|
+
"description": "A unit of measure says what a quantity is counting or measuring. Most units of measure are SI Units such as centimeters, volts, ect. However, in the context of this domain, there may be units of measure such as neurons (nominal scale), number of spikes (integer scale) or any other number of specific unit types."
|
|
400
|
+
}
|
|
401
|
+
dimension_of_measure_term = {
|
|
402
|
+
"name": "dimension_of_measure",
|
|
403
|
+
"title": "Dimension of Measure",
|
|
404
|
+
"description": "A Dimension of Measure refers to the dimensional property of a Unit of Measure. For example, if my unit of measure is centimeters then my dimension of measure is length. Likewise for seconds and time. More exotic examples exist as well. Spikes have the dimension of measure count and neurons have the dimension of measure nominal (nominal refers to a category or label)."
|
|
405
|
+
}
|
|
406
|
+
acquisition_term = {
|
|
407
|
+
"name": "acquisition",
|
|
408
|
+
"title": "Acquisition",
|
|
409
|
+
"description": "An acquisition is a boolearn term (true or false) that says whether or not a data object was acquired from a source outside of the data analysis process. All of the data objects read from external data are flagged as being acquisitions. Readers never do preprocessing so that they may reflect the exact numerical values from the original data source."
|
|
410
|
+
}
|
|
411
|
+
acquisition_date_term = {
|
|
412
|
+
"name": "acquisition_date",
|
|
413
|
+
"title": "Acquisition Date",
|
|
414
|
+
"description": "The date when an acquisition was originally produced (usually taken from the metadata of a recording file.)"
|
|
415
|
+
}
|
|
416
|
+
import_date_term = {
|
|
417
|
+
"name": "import_date",
|
|
418
|
+
"title": "Import Date",
|
|
419
|
+
"description": "The date when an acquisition (an imported data set) was imported."
|
|
420
|
+
}
|
|
421
|
+
acquisition_notes_term = {
|
|
422
|
+
"name": "acquisition_notes",
|
|
423
|
+
"title": "Acquisition Notes",
|
|
424
|
+
"description": "Notes on the progeny of an acquisition. This field is usually automatically populated with an explanation of what the acquisition is by an import adapter within signalstore."
|
|
425
|
+
}
|
|
426
|
+
dimensions_term = {
|
|
427
|
+
"name": "data_dimensions",
|
|
428
|
+
"title": "data_dimensions",
|
|
429
|
+
"description": "The named dimensions of a data object. This is a list of strings. It lists the dimension names that would go into the .dims attribute of an xarray DataArray."
|
|
430
|
+
}
|
|
431
|
+
coordinates_term = {
|
|
432
|
+
"name": "coordinates",
|
|
433
|
+
"title": "Coordinates",
|
|
434
|
+
"description": "The named coordinates of a data object. This is a list of strings that is a subset or equal to the dimensions of the data object. It lists the coordinate names that would go into the .coords attribute of an xarray DataArray."
|
|
435
|
+
}
|
|
436
|
+
shape_term = {
|
|
437
|
+
"name": "shape",
|
|
438
|
+
"title": "Shape",
|
|
439
|
+
"description": "The shape of a data object. This is a list of integers that is equal to the shape of the data object. It lists the shape that would go into the .shape attribute of an xarray DataArray."
|
|
440
|
+
}
|
|
441
|
+
dtype_term = {
|
|
442
|
+
"name": "dtype",
|
|
443
|
+
"title": "Data Type",
|
|
444
|
+
"description": "The data type of a data object. This is a string that is equal to the dtype of the data object. It lists the dtype that would go into the .dtype attribute of an xarray DataArray."
|
|
445
|
+
}
|
|
446
|
+
session_date = {
|
|
447
|
+
"name": "session_date",
|
|
448
|
+
"title": "Session Date",
|
|
449
|
+
"description": "The date when a session was taken."
|
|
450
|
+
}
|
|
451
|
+
session_time = {
|
|
452
|
+
"name": "session_time",
|
|
453
|
+
"title": "Session Time",
|
|
454
|
+
"description": "The time when a session was started."
|
|
455
|
+
}
|
|
456
|
+
session_duration = {
|
|
457
|
+
"name": "session_duration",
|
|
458
|
+
"title": "Session Duration",
|
|
459
|
+
"description": "The duration of a session."
|
|
460
|
+
}
|
|
461
|
+
session_notes = {
|
|
462
|
+
"name": "session_notes",
|
|
463
|
+
"title": "Session Notes",
|
|
464
|
+
"description": "Notes about a session."
|
|
465
|
+
}
|
|
466
|
+
time_of_removal = {
|
|
467
|
+
"name": "time_of_removal",
|
|
468
|
+
"title": "Time of Deletion",
|
|
469
|
+
"description": "The timestamp at which the record was deleted."
|
|
470
|
+
}
|
|
471
|
+
time_of_save = {
|
|
472
|
+
"name": "time_of_save",
|
|
473
|
+
"title": "Time of Creation",
|
|
474
|
+
"description": "The timestamp at which the record was created."
|
|
475
|
+
}
|
|
476
|
+
vocabulary_terms = [name_term, schema_name_term, json_schema_term, schema_description_term, has_file_term, unit_of_measure_term, dimension_of_measure_term, acquisition_term, acquisition_date_term, import_date_term, acquisition_notes_term, dimensions_term, coordinates_term, shape_term, dtype_term, session_date, session_time, session_duration, session_notes, time_of_save, time_of_removal]
|
|
477
|
+
|
|
478
|
+
# Time
|
|
479
|
+
|
|
480
|
+
class MockDatetime:
|
|
481
|
+
"""
|
|
482
|
+
This class is a dropin replacement for the entire `datetime` module, to be used in tests. This
|
|
483
|
+
allows us to have a predictable value for `now`.
|
|
484
|
+
"""
|
|
485
|
+
|
|
486
|
+
def __init__(self, now_time):
|
|
487
|
+
self._now_time = now_time
|
|
488
|
+
|
|
489
|
+
def now(self, *args, **kwargs):
|
|
490
|
+
return self._now_time
|
|
491
|
+
|
|
492
|
+
time_newest = datetime(2023, 5, 1, 1, 1, 1, 1)
|
|
493
|
+
time_default = datetime(2023, 1, 1, 1, 1, 1, 1)
|
|
494
|
+
time_old = datetime(1990, 1, 1, 1, 1, 1, 1)
|
|
495
|
+
time_older = datetime(1975, 1, 1, 1, 1, 1, 1)
|
|
496
|
+
|
|
497
|
+
# Handlers
|
|
498
|
+
|
|
499
|
+
class MockHandler:
|
|
500
|
+
def execute(self, *args, **kwargs):
|
|
501
|
+
pass
|
|
502
|
+
|
|
503
|
+
class MockDataHandlerFactory:
|
|
504
|
+
|
|
505
|
+
def create(self, handler_name, service_bundle=None, **kwargs):
|
|
506
|
+
return MockHandler()
|
|
507
|
+
|
|
508
|
+
class MockDomainHandlerFactory:
|
|
509
|
+
|
|
510
|
+
def create(self, handler_name, service_bundle=None, **kwargs):
|
|
511
|
+
return MockHandler()
|
|
512
|
+
|
|
513
|
+
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def dataarray_isequal(dataarray1, dataarray2):
|
|
4
|
+
assertions = {}
|
|
5
|
+
assertions["name"] = dataarray1.name == dataarray2.name
|
|
6
|
+
assertions["attrs"] = {}
|
|
7
|
+
for attr in dataarray1.attrs:
|
|
8
|
+
assertions["attrs"][attr] = dataarray1.attrs[attr] == dataarray2.attrs[attr]
|
|
9
|
+
# check for attributes in dataarray2 that are not in dataarray1
|
|
10
|
+
for attr in dataarray2.attrs:
|
|
11
|
+
if attr not in dataarray1.attrs:
|
|
12
|
+
assertions["attrs"][attr] = False
|
|
13
|
+
assertions["dims"] = set(dataarray1.dims) == set(dataarray2.dims)
|
|
14
|
+
if assertions["dims"] == False:
|
|
15
|
+
# elements in the intersection of dims are true
|
|
16
|
+
assertions["dims"] = {}
|
|
17
|
+
for dim in set(dataarray1.dims) & set(dataarray2.dims):
|
|
18
|
+
assertions["dims"][dim] = True
|
|
19
|
+
# elements in the difference of dims are false
|
|
20
|
+
for dim in set(dataarray1.dims) - set(dataarray2.dims):
|
|
21
|
+
assertions["dims"][dim] = False
|
|
22
|
+
for dim in set(dataarray2.dims) - set(dataarray1.dims):
|
|
23
|
+
assertions["dims"][dim] = False
|
|
24
|
+
# get the union of the coordinates
|
|
25
|
+
coords = list(set(dataarray1.coords) | set(dataarray2.coords))
|
|
26
|
+
assertions["coords"] = {}
|
|
27
|
+
for coord in coords:
|
|
28
|
+
if coord not in dataarray1.coords or coord not in dataarray2.coords:
|
|
29
|
+
assertions["coords"][coord] = False
|
|
30
|
+
continue
|
|
31
|
+
if dataarray1.coords[coord].shape != dataarray2.coords[coord].shape:
|
|
32
|
+
assertions["coords"][coord] = False
|
|
33
|
+
else:
|
|
34
|
+
assertions["coords"][coord] = all(dataarray1.coords[coord] == dataarray2.coords[coord])
|
|
35
|
+
|
|
36
|
+
if dataarray1.data.shape != dataarray2.data.shape:
|
|
37
|
+
assertions["data"] = False
|
|
38
|
+
else:
|
|
39
|
+
assertions["data"] = (dataarray1.data == dataarray2.data).all()
|
|
40
|
+
agg_assertions = {}
|
|
41
|
+
for key in assertions:
|
|
42
|
+
if isinstance(assertions[key], dict):
|
|
43
|
+
agg_assertions[key] = all(assertions[key].values())
|
|
44
|
+
else:
|
|
45
|
+
agg_assertions[key] = assertions[key]
|
|
46
|
+
result = all(agg_assertions.values())
|
|
47
|
+
if result == False:
|
|
48
|
+
print(assertions)
|
|
49
|
+
return result
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
|
|
4
|
+
def init_timestamps(struct):
|
|
5
|
+
# init_timestamps should produce timestamps that are timezone aware
|
|
6
|
+
# and in UTC.
|
|
7
|
+
# https://docs.python.org/3/library/datetime.html#datetime.datetime.now
|
|
8
|
+
# https://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow
|
|
9
|
+
# https://docs.python.org/3/library/datetime.html#datetime.timezone
|
|
10
|
+
if not isinstance(struct, dict):
|
|
11
|
+
return struct
|
|
12
|
+
ret = copy.deepcopy(struct)
|
|
13
|
+
ret['time_of_save'] = datetime.now().astimezone()
|
|
14
|
+
ret['time_of_removal'] = None
|
|
15
|
+
return ret
|
|
16
|
+
|
|
17
|
+
def remove_timestamps(struct):
|
|
18
|
+
if not isinstance(struct, dict):
|
|
19
|
+
return struct
|
|
20
|
+
ret = copy.deepcopy(struct)
|
|
21
|
+
for key in ['time_of_save', 'time_of_removal']:
|
|
22
|
+
if key in ret:
|
|
23
|
+
del ret[key]
|
|
24
|
+
return ret
|
|
25
|
+
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from signalstore.utilities.tools.time import timenow_millis
|
|
4
|
+
|
|
5
|
+
def get_current_kwargs(frame_offset=0):
|
|
6
|
+
"""
|
|
7
|
+
This function inspects the stack frames to retrieve the args passed to the calling function, in
|
|
8
|
+
kwargs form.
|
|
9
|
+
|
|
10
|
+
You can pass a `frame_offset` to go higher up the stack than just 1 level.
|
|
11
|
+
"""
|
|
12
|
+
outerframe = inspect.getouterframes(inspect.currentframe())[1+frame_offset]
|
|
13
|
+
argspec=inspect.getargvalues(outerframe.frame)
|
|
14
|
+
in_kwargs = {arg_name: argspec.locals[arg_name] for arg_name in argspec.args[1:] }
|
|
15
|
+
return in_kwargs
|
|
16
|
+
|
|
17
|
+
def get_current_func_name(frame_offset=0):
|
|
18
|
+
"""
|
|
19
|
+
This function inspects the stack frames to retrieve the name of the current function.
|
|
20
|
+
|
|
21
|
+
You can pass a `frame_offset` to go higher up the stack than just 1 level.
|
|
22
|
+
"""
|
|
23
|
+
outerframe = inspect.getouterframes(inspect.currentframe())[1+frame_offset]
|
|
24
|
+
return outerframe.function
|
|
25
|
+
|
|
26
|
+
def get_current_class_name(frame_offset=0):
|
|
27
|
+
"""
|
|
28
|
+
This function inspects the stack frames to retrieve the name of the current class.
|
|
29
|
+
|
|
30
|
+
You can pass a `frame_offset` to go higher up the stack than just 1 level.
|
|
31
|
+
"""
|
|
32
|
+
outerframe = inspect.getouterframes(inspect.currentframe())[1+frame_offset]
|
|
33
|
+
return outerframe.frame.f_locals["self"].__class__.__name__
|
|
34
|
+
|
|
35
|
+
class OperationResponse:
|
|
36
|
+
"""This class encapsulates information about repository operations (add, delete, etc).
|
|
37
|
+
This information allows us to replay / roll back operations when they fail.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def __init__(self, class_name: str = None, operation: str = None, kwargs: dict = None, timestamp=None, datetime_override=None, result=None):
|
|
41
|
+
# We pass `frame_offset=1` because we need to go up an extra level to get out of __init__.
|
|
42
|
+
self._class_name = class_name or get_current_class_name(frame_offset=1)
|
|
43
|
+
self._operation_name = operation or get_current_func_name(frame_offset=1)
|
|
44
|
+
self._kwargs = kwargs or get_current_kwargs(frame_offset=1)
|
|
45
|
+
self._datetime = datetime_override or datetime
|
|
46
|
+
self._timestamp = timestamp or timenow_millis(self._datetime)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def class_name(self):
|
|
50
|
+
return self._class_name
|
|
51
|
+
|
|
52
|
+
@property
|
|
53
|
+
def operation_name(self):
|
|
54
|
+
return self._operation_name
|
|
55
|
+
|
|
56
|
+
@property
|
|
57
|
+
def kwargs(self):
|
|
58
|
+
return self._kwargs
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def timestamp(self):
|
|
62
|
+
return self._timestamp
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def result(self):
|
|
66
|
+
return self._result
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def dict(self):
|
|
70
|
+
dictionary = {
|
|
71
|
+
"class": self.class_name,
|
|
72
|
+
"method": self.operation_name,
|
|
73
|
+
"kwargs": self.kwargs,
|
|
74
|
+
"timestamp": self.timestamp,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
def operation_response_factory(class_name: str = None, operation: str = None, kwargs: dict = None, timestamp=None, datetime_override=None):
|
|
78
|
+
return OperationResponse(class_name, operation, kwargs, timestamp, datetime_override).dict
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from signalstore.utilities.tools.operation_response import OperationResponse
|
|
2
|
+
|
|
3
|
+
class PurgeOrchestrationResponse:
|
|
4
|
+
|
|
5
|
+
def __init__(self, op_success: bool, op_response: OperationResponse = None, op_exception: Exception = None):
|
|
6
|
+
self._op_success = op_success
|
|
7
|
+
self._op_response = op_response
|
|
8
|
+
self._op_exception = op_exception
|
|
9
|
+
|
|
10
|
+
@property
|
|
11
|
+
def op_success(self):
|
|
12
|
+
return self._op_success
|
|
13
|
+
|
|
14
|
+
@property
|
|
15
|
+
def op_response(self):
|
|
16
|
+
return self._op_response
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def op_exception(self):
|
|
20
|
+
return self._op_exception
|
|
21
|
+
|