eegdash 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of eegdash might be problematic. Click here for more details.

Files changed (72) hide show
  1. eegdash/SignalStore/__init__.py +0 -0
  2. eegdash/SignalStore/signalstore/__init__.py +3 -0
  3. eegdash/SignalStore/signalstore/adapters/read_adapters/abstract_read_adapter.py +13 -0
  4. eegdash/SignalStore/signalstore/adapters/read_adapters/domain_modeling/schema_read_adapter.py +16 -0
  5. eegdash/SignalStore/signalstore/adapters/read_adapters/domain_modeling/vocabulary_read_adapter.py +19 -0
  6. eegdash/SignalStore/signalstore/adapters/read_adapters/handmade_records/excel_study_organizer_read_adapter.py +114 -0
  7. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/axona/axona_read_adapter.py +912 -0
  8. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/ReadIntanSpikeFile.py +140 -0
  9. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/intan_read_adapter.py +29 -0
  10. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/__init__.py +0 -0
  11. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/data_to_result.py +62 -0
  12. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/get_bytes_per_data_block.py +36 -0
  13. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/notch_filter.py +50 -0
  14. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/qstring.py +41 -0
  15. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/read_header.py +135 -0
  16. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/intanutil/read_one_data_block.py +45 -0
  17. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhd_format/load_intan_rhd_format.py +204 -0
  18. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/__init__.py +0 -0
  19. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/data_to_result.py +60 -0
  20. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/get_bytes_per_data_block.py +37 -0
  21. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/notch_filter.py +50 -0
  22. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/qstring.py +41 -0
  23. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/read_header.py +153 -0
  24. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/intanutil/read_one_data_block.py +47 -0
  25. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/intan/load_intan_rhs_format/load_intan_rhs_format.py +213 -0
  26. eegdash/SignalStore/signalstore/adapters/read_adapters/recording_acquisitions/neurodata_without_borders/neurodata_without_borders_read_adapter.py +14 -0
  27. eegdash/SignalStore/signalstore/operations/__init__.py +4 -0
  28. eegdash/SignalStore/signalstore/operations/handler_executor.py +22 -0
  29. eegdash/SignalStore/signalstore/operations/handler_factory.py +41 -0
  30. eegdash/SignalStore/signalstore/operations/handlers/base_handler.py +44 -0
  31. eegdash/SignalStore/signalstore/operations/handlers/domain/property_model_handlers.py +79 -0
  32. eegdash/SignalStore/signalstore/operations/handlers/domain/schema_handlers.py +3 -0
  33. eegdash/SignalStore/signalstore/operations/helpers/abstract_helper.py +17 -0
  34. eegdash/SignalStore/signalstore/operations/helpers/neuroscikit_extractor.py +33 -0
  35. eegdash/SignalStore/signalstore/operations/helpers/neuroscikit_rawio.py +165 -0
  36. eegdash/SignalStore/signalstore/operations/helpers/spikeinterface_helper.py +100 -0
  37. eegdash/SignalStore/signalstore/operations/helpers/wrappers/neo_wrappers.py +21 -0
  38. eegdash/SignalStore/signalstore/operations/helpers/wrappers/nwb_wrappers.py +27 -0
  39. eegdash/SignalStore/signalstore/store/__init__.py +8 -0
  40. eegdash/SignalStore/signalstore/store/data_access_objects.py +1181 -0
  41. eegdash/SignalStore/signalstore/store/datafile_adapters.py +131 -0
  42. eegdash/SignalStore/signalstore/store/repositories.py +928 -0
  43. eegdash/SignalStore/signalstore/store/store_errors.py +68 -0
  44. eegdash/SignalStore/signalstore/store/unit_of_work.py +97 -0
  45. eegdash/SignalStore/signalstore/store/unit_of_work_provider.py +67 -0
  46. eegdash/SignalStore/signalstore/utilities/data_adapters/spike_interface_adapters/si_recording.py +1 -0
  47. eegdash/SignalStore/signalstore/utilities/data_adapters/spike_interface_adapters/si_sorter.py +1 -0
  48. eegdash/SignalStore/signalstore/utilities/testing/data_mocks.py +513 -0
  49. eegdash/SignalStore/signalstore/utilities/tools/dataarrays.py +49 -0
  50. eegdash/SignalStore/signalstore/utilities/tools/mongo_records.py +25 -0
  51. eegdash/SignalStore/signalstore/utilities/tools/operation_response.py +78 -0
  52. eegdash/SignalStore/signalstore/utilities/tools/purge_orchestration_response.py +21 -0
  53. eegdash/SignalStore/signalstore/utilities/tools/quantities.py +15 -0
  54. eegdash/SignalStore/signalstore/utilities/tools/strings.py +38 -0
  55. eegdash/SignalStore/signalstore/utilities/tools/time.py +17 -0
  56. eegdash/SignalStore/tests/conftest.py +799 -0
  57. eegdash/SignalStore/tests/data/valid_data/data_arrays/make_fake_data.py +59 -0
  58. eegdash/SignalStore/tests/unit/store/conftest.py +0 -0
  59. eegdash/SignalStore/tests/unit/store/test_data_access_objects.py +1235 -0
  60. eegdash/SignalStore/tests/unit/store/test_repositories.py +1309 -0
  61. eegdash/SignalStore/tests/unit/store/test_unit_of_work.py +7 -0
  62. eegdash/SignalStore/tests/unit/test_ci_cd.py +8 -0
  63. eegdash/__init__.py +1 -0
  64. eegdash/aws_ingest.py +29 -0
  65. eegdash/data_utils.py +213 -0
  66. eegdash/main.py +17 -0
  67. eegdash/signalstore_data_utils.py +280 -0
  68. eegdash-0.0.1.dist-info/LICENSE +20 -0
  69. eegdash-0.0.1.dist-info/METADATA +72 -0
  70. eegdash-0.0.1.dist-info/RECORD +72 -0
  71. eegdash-0.0.1.dist-info/WHEEL +5 -0
  72. eegdash-0.0.1.dist-info/top_level.txt +1 -0
File without changes
@@ -0,0 +1,3 @@
1
+ from eegdash.SignalStore.signalstore.store.unit_of_work_provider import UnitOfWorkProvider
2
+
3
+ __all__ = ['UnitOfWorkProvider']
@@ -0,0 +1,13 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+ class AbstractReadAdapter(ABC):
4
+
5
+ def __iter__(self):
6
+ return self.read().__iter__()
7
+
8
+ def __next__(self):
9
+ return self.read().__next__()
10
+
11
+ @abstractmethod
12
+ def read(self):
13
+ raise NotImplementedError('AbstractReadAdapter.read() not implemented.')
@@ -0,0 +1,16 @@
1
+ from signalstore.adapters.read_adapters.abstract_read_adapter import AbstractReadAdapter
2
+ import json
3
+ from upath import UPath
4
+
5
+ class SchemaReadAdapter(AbstractReadAdapter):
6
+ def __init__(self, directory):
7
+ self.dir = UPath(directory)
8
+
9
+ def read(self):
10
+ """Reads JSON files that conform to the Neuroscikit data model schemata.
11
+ """
12
+ for json_filepath in self.dir.glob('*.json'):
13
+ with open(json_filepath) as f:
14
+ yield dict(json.load(f))
15
+
16
+
@@ -0,0 +1,19 @@
1
+ from signalstore.adapters.read_adapters.abstract_read_adapter import AbstractReadAdapter
2
+
3
+ import yaml
4
+
5
+ class VocabularyReadAdapter(AbstractReadAdapter):
6
+ def __init__(self, filepath):
7
+ self.filepath = filepath
8
+
9
+ def read(self):
10
+ """Reads a YAML file and converts each data object into an xarray.DataArray with
11
+ the appropriate dimensions, coordinates and metadata attributes for the
12
+ Neuroscikit data model.
13
+ """
14
+ with open(self.filepath) as f:
15
+ yaml_dict = yaml.load(f, Loader=yaml.FullLoader)
16
+ for key, value in yaml_dict.items():
17
+ record = {"name": key}
18
+ record.update(value)
19
+ yield record
@@ -0,0 +1,114 @@
1
+ from signalstore.operations.importers.adapters.abstract_read_adapter import AbstractReadAdapter
2
+
3
+ import openpyxl as xl
4
+
5
+ class ExcelStudyOrganizerReadAdapter(AbstractReadAdapter):
6
+ def __init__(self, path):
7
+ self.path = path
8
+ self.wb = xl.load_workbook(path)
9
+ self.ws = self.wb.active
10
+ self.tables = [str(table) for table in self.wb.sheetnames]
11
+
12
+ def read(self):
13
+ for table in self.tables:
14
+ for record in self._get_table_records(table):
15
+ yield record
16
+
17
+ def read_records(self):
18
+ records = []
19
+ for table in self.tables:
20
+ records.extend(list(self._get_table_records(table)))
21
+ return records
22
+
23
+ def read_records_by_table(self):
24
+ records = {}
25
+ for table in self.tables:
26
+ records[str(table).lower()] = list(self._get_table_records(table))
27
+ return records
28
+
29
+ def _classify_table(self, table):
30
+ # check if name column is unique
31
+ has_unique_keys = self._has_unique_keys(table)
32
+
33
+ # check if the table columns include only the name column,
34
+ # an attribute column and a value column
35
+ is_attr_value_format = self._is_attr_value_format(table)
36
+
37
+ if not has_unique_keys and is_attr_value_format:
38
+ return 'attribute'
39
+ elif has_unique_keys and not is_attr_value_format:
40
+ return 'record'
41
+ else:
42
+ error_string = f'Could not classify table {table}.'
43
+ if not has_unique_keys:
44
+ error_string += '\nTable does not have unique keys.'
45
+ if not is_attr_value_format:
46
+ error_string += '\nTable is not in attribute-value format.'
47
+ raise StudyOrganizerKeyError(error_string)
48
+
49
+ def _has_unique_keys(self, table):
50
+ ws = self.wb[table]
51
+ keys = [str(cell.value).lower() for cell in ws['A'] if cell.value is not None]
52
+ return len(keys) == len(set(keys))
53
+
54
+ def _is_attr_value_format(self, table):
55
+ ws = self.wb[table]
56
+ columns = [str(cell.value).lower() for cell in ws[1]]
57
+ if columns[0] == f'name' and columns[1] == 'attribute' and columns[2] == 'value' and len(columns) == 3:
58
+ return True
59
+ else:
60
+ return False
61
+
62
+ def _get_table_records(self, table):
63
+ table_type = self._classify_table(table)
64
+ readers = {'record': self._get_simple_table_records,
65
+ 'attribute': self._get_attribute_table_records}
66
+ records = readers[table_type](table)
67
+ for record in records:
68
+ yield record
69
+
70
+ def _get_simple_table_records(self, table):
71
+ self.ws = self.wb[table]
72
+ columns = [str(cell.value).lower() for cell in self.ws[1]]
73
+ self._validate_columns(columns, table)
74
+ for row in self.ws.iter_rows(min_row=2):
75
+ record = {}
76
+ for i, column in enumerate(columns):
77
+ value = row[i].value
78
+ if value is not None:
79
+ record[column] = value
80
+ if record != {}:
81
+ record['type'] = table
82
+ yield record
83
+
84
+ def _get_attribute_table_records(self, table):
85
+ self.ws = self.wb[table]
86
+ columns = [str(cell.value).lower() for cell in self.ws[1]]
87
+ self._validate_columns(columns, table)
88
+ attr_records = []
89
+ for row in self.ws.iter_rows(min_row=2):
90
+ record = {}
91
+ for i, column in enumerate(columns):
92
+ record[column] = row[i].value
93
+ attr_records.append(record)
94
+ records = {}
95
+ for attr_record in attr_records:
96
+ rkey = attr_record['name']
97
+ if rkey not in records.keys() and rkey is not None:
98
+ records[rkey] = {'name': rkey, 'type': table}
99
+ if attr_record['value'] is not None:
100
+ records[rkey][attr_record['attribute']] = attr_record['value']
101
+ for record in records.values():
102
+ if record != {}:
103
+ yield record
104
+
105
+ def _validate_columns(self, columns, table_name):
106
+ if not self._first_column_is_key(columns, table_name):
107
+ raise StudyOrganizerKeyError(f'First column must be a "name" column, but is {columns[0]}.')
108
+
109
+ def _first_column_is_key(self, columns, table_name):
110
+ return str(columns[0]) == f'name'
111
+
112
+
113
+ class StudyOrganizerKeyError(KeyError):
114
+ pass