seabirdfilehandler 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of seabirdfilehandler might be problematic. Click here for more details.
- seabirdfilehandler/__init__.py +4 -2
- seabirdfilehandler/bottlefile.py +185 -0
- seabirdfilehandler/bottlelogfile.py +155 -0
- seabirdfilehandler/cnvfile.py +283 -0
- seabirdfilehandler/datafiles.py +259 -0
- seabirdfilehandler/file_collection.py +28 -40
- seabirdfilehandler/parameter.py +29 -3
- seabirdfilehandler/utils.py +53 -0
- seabirdfilehandler/xmlfiles.py +54 -0
- {seabirdfilehandler-0.4.2.dist-info → seabirdfilehandler-0.5.0.dist-info}/METADATA +1 -1
- seabirdfilehandler-0.5.0.dist-info/RECORD +16 -0
- {seabirdfilehandler-0.4.2.dist-info → seabirdfilehandler-0.5.0.dist-info}/WHEEL +1 -1
- seabirdfilehandler/datatablefiles.py +0 -930
- seabirdfilehandler/seabirdfiles.py +0 -210
- seabirdfilehandler-0.4.2.dist-info/RECORD +0 -13
- {seabirdfilehandler-0.4.2.dist-info → seabirdfilehandler-0.5.0.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import xmltodict
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DataFile:
|
|
10
|
+
"""Collection of methods for the SeaBird files that feature some kind of
|
|
11
|
+
data table that is represented in a pandas dataframe.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
path_to_file: Path | str,
|
|
24
|
+
only_header: bool = False,
|
|
25
|
+
):
|
|
26
|
+
self.path_to_file = Path(path_to_file)
|
|
27
|
+
self.file_name = self.path_to_file.stem
|
|
28
|
+
self.file_dir = self.path_to_file.parent
|
|
29
|
+
self.only_header = only_header
|
|
30
|
+
self.raw_file_data = [] # the text file input
|
|
31
|
+
self.header = [] # the full file header
|
|
32
|
+
self.sbe9_data = [] # device specific information
|
|
33
|
+
self.metadata = {} # non-SeaBird metadata
|
|
34
|
+
self.metadata_list = [] # unstructured metadata for easier export
|
|
35
|
+
self.data_table_description = [] # the column names and other info
|
|
36
|
+
self.sensor_data = []
|
|
37
|
+
self.sensors = {} # xml-parsed sensor data
|
|
38
|
+
self.processing_info = [] # everything after the sensor data
|
|
39
|
+
self.data = [] # the data table
|
|
40
|
+
self.file_data = self.raw_file_data # variable file information
|
|
41
|
+
self.read_file()
|
|
42
|
+
self.metadata = self.structure_metadata(self.metadata_list)
|
|
43
|
+
if len(self.sensor_data) > 0:
|
|
44
|
+
self.sensors = self.sensor_xml_to_flattened_dict(
|
|
45
|
+
"".join(self.sensor_data)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
def __str__(self) -> str:
|
|
49
|
+
return "/n".join(self.file_data)
|
|
50
|
+
|
|
51
|
+
def __repr__(self) -> str:
|
|
52
|
+
return str(self.path_to_file.absolute())
|
|
53
|
+
|
|
54
|
+
def __eq__(self, other) -> bool:
|
|
55
|
+
return self.file_data == other.file_data
|
|
56
|
+
|
|
57
|
+
def read_file(self):
|
|
58
|
+
"""Reads and structures all the different information present in the
|
|
59
|
+
file. Lists and Dictionaries are the data structures of choice. Uses
|
|
60
|
+
basic prefix checking to distinguish different header information.
|
|
61
|
+
|
|
62
|
+
Parameters
|
|
63
|
+
----------
|
|
64
|
+
|
|
65
|
+
Returns
|
|
66
|
+
-------
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
past_sensors = False
|
|
70
|
+
with self.path_to_file.open("r", encoding="latin-1") as file:
|
|
71
|
+
for line in file:
|
|
72
|
+
self.raw_file_data.append(line)
|
|
73
|
+
line_prefix = line[:2]
|
|
74
|
+
if line_prefix == "* ":
|
|
75
|
+
self.header.append(line)
|
|
76
|
+
self.sbe9_data.append(line[2:])
|
|
77
|
+
elif line_prefix == "**":
|
|
78
|
+
self.header.append(line)
|
|
79
|
+
self.metadata_list.append(line[3:])
|
|
80
|
+
elif line_prefix == "# ":
|
|
81
|
+
self.header.append(line)
|
|
82
|
+
if line[2:].strip()[0] == "<":
|
|
83
|
+
self.sensor_data.append(line[2:])
|
|
84
|
+
past_sensors = True
|
|
85
|
+
else:
|
|
86
|
+
if past_sensors:
|
|
87
|
+
self.processing_info.append(line[2:])
|
|
88
|
+
else:
|
|
89
|
+
self.data_table_description.append(line[2:])
|
|
90
|
+
else:
|
|
91
|
+
if line.startswith("*END*"):
|
|
92
|
+
self.header.append(line)
|
|
93
|
+
if self.only_header:
|
|
94
|
+
break
|
|
95
|
+
else:
|
|
96
|
+
self.data.append(line)
|
|
97
|
+
|
|
98
|
+
def sensor_xml_to_flattened_dict(
|
|
99
|
+
self, sensor_data: str
|
|
100
|
+
) -> list[dict] | dict:
|
|
101
|
+
"""Reads the pure xml sensor input and creates a multilevel dictionary,
|
|
102
|
+
dropping the first two dictionaries, as they are single entry only
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
|
|
107
|
+
Returns
|
|
108
|
+
-------
|
|
109
|
+
|
|
110
|
+
"""
|
|
111
|
+
full_sensor_dict = xmltodict.parse(sensor_data, process_comments=True)
|
|
112
|
+
try:
|
|
113
|
+
sensors = full_sensor_dict["Sensors"]["sensor"]
|
|
114
|
+
except KeyError as error:
|
|
115
|
+
logger.error(f"XML is not formatted as expected: {error}")
|
|
116
|
+
return full_sensor_dict
|
|
117
|
+
else:
|
|
118
|
+
# create a tidied version of the xml-parsed sensor dict
|
|
119
|
+
tidied_sensor_list = []
|
|
120
|
+
for entry in sensors:
|
|
121
|
+
# use comment value as type descriptor
|
|
122
|
+
comment = entry["#comment"]
|
|
123
|
+
split_comment = comment.split(",")
|
|
124
|
+
new_entry = split_comment[1].strip()
|
|
125
|
+
if split_comment[-1] == " 2":
|
|
126
|
+
new_entry += " 2"
|
|
127
|
+
# remove second-level dict
|
|
128
|
+
calibration_info = list(entry.values())[-1]
|
|
129
|
+
try:
|
|
130
|
+
new_dict = {
|
|
131
|
+
"Channel": entry["@Channel"],
|
|
132
|
+
"SensorName": new_entry,
|
|
133
|
+
**calibration_info,
|
|
134
|
+
}
|
|
135
|
+
except TypeError:
|
|
136
|
+
new_dict = {
|
|
137
|
+
"Channel": entry["@Channel"],
|
|
138
|
+
"SensorName": new_entry,
|
|
139
|
+
"Info": calibration_info,
|
|
140
|
+
}
|
|
141
|
+
tidied_sensor_list.append(new_dict)
|
|
142
|
+
return tidied_sensor_list
|
|
143
|
+
|
|
144
|
+
def structure_metadata(self, metadata_list: list) -> dict:
|
|
145
|
+
"""Creates a dictionary to store the metadata that is added by using
|
|
146
|
+
werums dship API.
|
|
147
|
+
|
|
148
|
+
Parameters
|
|
149
|
+
----------
|
|
150
|
+
metadata_list: list :
|
|
151
|
+
a list of the individual lines of metadata found in the file
|
|
152
|
+
|
|
153
|
+
Returns
|
|
154
|
+
-------
|
|
155
|
+
a dictionary of the lines of metadata divided into key-value pairs
|
|
156
|
+
"""
|
|
157
|
+
out_dict = {}
|
|
158
|
+
for line in metadata_list:
|
|
159
|
+
try:
|
|
160
|
+
(key, val) = line.split("=")
|
|
161
|
+
except ValueError:
|
|
162
|
+
out_dict["text"] = line
|
|
163
|
+
else:
|
|
164
|
+
out_dict[key.strip()] = val.strip()
|
|
165
|
+
return out_dict
|
|
166
|
+
|
|
167
|
+
def define_output_path(
|
|
168
|
+
self,
|
|
169
|
+
file_path: Path | str | None = None,
|
|
170
|
+
file_name: str | None = None,
|
|
171
|
+
file_type: str = ".csv",
|
|
172
|
+
) -> Path:
|
|
173
|
+
"""Creates a Path object holding the desired output path.
|
|
174
|
+
|
|
175
|
+
Parameters
|
|
176
|
+
----------
|
|
177
|
+
file_path : Path :
|
|
178
|
+
directory the file sits in (Default value = self.file_dir)
|
|
179
|
+
file_name : str :
|
|
180
|
+
the original file name (Default value = self.file_name)
|
|
181
|
+
file_type : str :
|
|
182
|
+
the output file type (Default = '.csv')
|
|
183
|
+
Returns
|
|
184
|
+
-------
|
|
185
|
+
a Path object consisting of the full path of the new file
|
|
186
|
+
|
|
187
|
+
"""
|
|
188
|
+
file_path = self.file_dir if file_path is None else file_path
|
|
189
|
+
file_name = self.file_name if file_name is None else file_name
|
|
190
|
+
if file_type[0] != ".":
|
|
191
|
+
file_type = "." + file_type
|
|
192
|
+
return Path(file_path).joinpath(file_name).with_suffix(file_type)
|
|
193
|
+
|
|
194
|
+
def to_csv(
|
|
195
|
+
self,
|
|
196
|
+
selected_columns: list | None = None,
|
|
197
|
+
with_header: bool = True,
|
|
198
|
+
output_file_path: Path | str | None = None,
|
|
199
|
+
output_file_name: str | None = None,
|
|
200
|
+
):
|
|
201
|
+
"""Writes a csv from the current dataframe. Takes a list of columns to
|
|
202
|
+
use, a boolean for writing the header and the output file parameters.
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
selected_columns : list :
|
|
207
|
+
a list of columns to include in the csv
|
|
208
|
+
(Default value = self.df.columns)
|
|
209
|
+
with_header : boolean :
|
|
210
|
+
indicating whether the header shall appear in the output
|
|
211
|
+
(Default value = True)
|
|
212
|
+
output_file_path : Path :
|
|
213
|
+
file directory (Default value = None)
|
|
214
|
+
output_file_name : str :
|
|
215
|
+
original file name (Default value = None)
|
|
216
|
+
|
|
217
|
+
Returns
|
|
218
|
+
-------
|
|
219
|
+
|
|
220
|
+
"""
|
|
221
|
+
selected_columns = (
|
|
222
|
+
self.df.columns if selected_columns is None else selected_columns
|
|
223
|
+
)
|
|
224
|
+
df = self.df[selected_columns].reset_index(drop=True)
|
|
225
|
+
new_file_path = self.define_output_path(
|
|
226
|
+
output_file_path, output_file_name
|
|
227
|
+
)
|
|
228
|
+
if with_header:
|
|
229
|
+
with open(new_file_path, "w") as file:
|
|
230
|
+
for line in self.header:
|
|
231
|
+
file.write(line)
|
|
232
|
+
df.to_csv(new_file_path, index=False, mode="a")
|
|
233
|
+
else:
|
|
234
|
+
df.to_csv(new_file_path, index=False, mode="w")
|
|
235
|
+
logger.info(f"Wrote file {self.path_to_file} to {new_file_path}.")
|
|
236
|
+
|
|
237
|
+
def selecting_columns(
|
|
238
|
+
self,
|
|
239
|
+
list_of_columns: list | str,
|
|
240
|
+
df: pd.DataFrame | None = None,
|
|
241
|
+
):
|
|
242
|
+
"""Alters the dataframe to only hold the given columns.
|
|
243
|
+
|
|
244
|
+
Parameters
|
|
245
|
+
----------
|
|
246
|
+
list_of_columns: list or str : a collection of columns
|
|
247
|
+
df : pandas.Dataframe :
|
|
248
|
+
Dataframe (Default value = None)
|
|
249
|
+
|
|
250
|
+
Returns
|
|
251
|
+
-------
|
|
252
|
+
|
|
253
|
+
"""
|
|
254
|
+
df = self.df if df is None else df
|
|
255
|
+
# ensure that the input is a list, so that isin() can do its job
|
|
256
|
+
if isinstance(list_of_columns, str):
|
|
257
|
+
list_of_columns = [list_of_columns]
|
|
258
|
+
if isinstance(df, pd.DataFrame):
|
|
259
|
+
self.df = df[list_of_columns].reset_index(drop=True)
|
|
@@ -1,11 +1,16 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
import logging
|
|
3
3
|
from collections import UserList
|
|
4
|
-
from typing import Type
|
|
4
|
+
from typing import Callable, Type
|
|
5
5
|
import pandas as pd
|
|
6
6
|
import numpy as np
|
|
7
|
-
from seabirdfilehandler import
|
|
8
|
-
|
|
7
|
+
from seabirdfilehandler import (
|
|
8
|
+
CnvFile,
|
|
9
|
+
BottleFile,
|
|
10
|
+
BottleLogFile,
|
|
11
|
+
)
|
|
12
|
+
from seabirdfilehandler import DataFile
|
|
13
|
+
from seabirdfilehandler.utils import get_unique_sensor_data
|
|
9
14
|
|
|
10
15
|
logger = logging.getLogger(__name__)
|
|
11
16
|
|
|
@@ -28,28 +33,26 @@ class FileCollection(UserList):
|
|
|
28
33
|
self,
|
|
29
34
|
path_to_files: str | Path,
|
|
30
35
|
file_suffix: str,
|
|
31
|
-
pattern: str | None = None,
|
|
32
36
|
only_metadata: bool = False,
|
|
37
|
+
sorting_key: Callable | None = None,
|
|
33
38
|
):
|
|
34
39
|
super().__init__()
|
|
35
40
|
self.path_to_files = Path(path_to_files)
|
|
36
41
|
self.file_suffix = file_suffix.strip(".")
|
|
37
|
-
self.file_type: Type[
|
|
42
|
+
self.file_type: Type[DataFile]
|
|
38
43
|
self.extract_file_type()
|
|
39
44
|
self.individual_file_paths = []
|
|
40
|
-
self.collect_files()
|
|
41
|
-
|
|
42
|
-
# TODO: implement pattern handling
|
|
43
|
-
self.pattern = pattern
|
|
44
|
-
else:
|
|
45
|
-
self.load_files(only_metadata)
|
|
45
|
+
self.collect_files(sorting_key=sorting_key)
|
|
46
|
+
self.load_files(only_metadata)
|
|
46
47
|
if not only_metadata:
|
|
47
|
-
if self.file_type ==
|
|
48
|
+
if self.file_type == DataFile:
|
|
48
49
|
self.df_list = self.get_dataframes()
|
|
49
50
|
self.df = self.get_collection_dataframe(self.df_list)
|
|
50
51
|
if self.file_type == CnvFile:
|
|
51
52
|
self.data_meta_info = self.get_data_table_meta_info()
|
|
52
|
-
self.sensor_data =
|
|
53
|
+
self.sensor_data = get_unique_sensor_data(
|
|
54
|
+
[file.sensors for file in self.data]
|
|
55
|
+
)
|
|
53
56
|
|
|
54
57
|
def __str__(self):
|
|
55
58
|
return "/n".join(self.data)
|
|
@@ -66,13 +69,19 @@ class FileCollection(UserList):
|
|
|
66
69
|
self.file_type = value
|
|
67
70
|
break
|
|
68
71
|
else:
|
|
69
|
-
self.file_type =
|
|
72
|
+
self.file_type = DataFile
|
|
70
73
|
|
|
71
|
-
def collect_files(
|
|
74
|
+
def collect_files(
|
|
75
|
+
self,
|
|
76
|
+
sorting_key: Callable | None = lambda file: int(
|
|
77
|
+
file.stem.split("_")[4]
|
|
78
|
+
),
|
|
79
|
+
):
|
|
72
80
|
""" """
|
|
73
|
-
|
|
74
|
-
self.
|
|
75
|
-
|
|
81
|
+
self.individual_file_paths = sorted(
|
|
82
|
+
self.path_to_files.rglob(f"*{self.file_suffix}"),
|
|
83
|
+
key=sorting_key,
|
|
84
|
+
)
|
|
76
85
|
|
|
77
86
|
def load_files(self, only_metadata: bool = False):
|
|
78
87
|
""" """
|
|
@@ -246,25 +255,4 @@ class FileCollection(UserList):
|
|
|
246
255
|
|
|
247
256
|
def get_data_table_meta_info(self) -> list[list[dict]]:
|
|
248
257
|
""" """
|
|
249
|
-
return [file.
|
|
250
|
-
|
|
251
|
-
def get_sensor_data(self) -> list[tuple[list[dict]]]:
|
|
252
|
-
""" """
|
|
253
|
-
unique = []
|
|
254
|
-
last_unique = None
|
|
255
|
-
for file in [file for file in self.data]:
|
|
256
|
-
cast_sensors = file.sensors
|
|
257
|
-
if last_unique is None:
|
|
258
|
-
unique.append((file.file_name, cast_sensors))
|
|
259
|
-
else:
|
|
260
|
-
differing_dicts = [
|
|
261
|
-
current_dict
|
|
262
|
-
for last_dict, current_dict in zip(
|
|
263
|
-
last_unique, cast_sensors
|
|
264
|
-
)
|
|
265
|
-
if current_dict != last_dict
|
|
266
|
-
]
|
|
267
|
-
if differing_dicts:
|
|
268
|
-
unique.append((file.file_name, differing_dicts))
|
|
269
|
-
last_unique = cast_sensors
|
|
270
|
-
return unique
|
|
258
|
+
return [file.parameters.metadata for file in self.data]
|
seabirdfilehandler/parameter.py
CHANGED
|
@@ -113,6 +113,20 @@ class Parameters(UserDict):
|
|
|
113
113
|
)
|
|
114
114
|
return parameter_dict
|
|
115
115
|
|
|
116
|
+
def _form_data_table_info(self) -> list:
|
|
117
|
+
"""Recreates the data table descriptions, like column names and spans
|
|
118
|
+
from the structured dictionaries these values were stored in."""
|
|
119
|
+
new_table_info = []
|
|
120
|
+
for key, value in self.data_table_stats.items():
|
|
121
|
+
new_table_info.append(f"{key} = {value}\n")
|
|
122
|
+
for index, (name, _) in enumerate(self.data_table_names_and_spans):
|
|
123
|
+
new_table_info.append(f"name {index} = {name}\n")
|
|
124
|
+
for index, (_, span) in enumerate(self.data_table_names_and_spans):
|
|
125
|
+
new_table_info.append(f"span {index} = {span}\n")
|
|
126
|
+
for key, value in self.data_table_misc.items():
|
|
127
|
+
new_table_info.append(f"{key} = {value}\n")
|
|
128
|
+
return new_table_info
|
|
129
|
+
|
|
116
130
|
def differentiate_table_description(self):
|
|
117
131
|
"""
|
|
118
132
|
The original method that structures data table metadata.
|
|
@@ -144,7 +158,10 @@ class Parameters(UserDict):
|
|
|
144
158
|
(name, span)
|
|
145
159
|
for name, span in zip(column_names, column_value_spans)
|
|
146
160
|
]
|
|
147
|
-
self.data_table_misc =
|
|
161
|
+
self.data_table_misc = {
|
|
162
|
+
line.split("=")[0].strip(): line.split("=")[1].strip()
|
|
163
|
+
for line in post
|
|
164
|
+
}
|
|
148
165
|
|
|
149
166
|
def add_parameter(self, parameter: Parameter):
|
|
150
167
|
"""
|
|
@@ -201,7 +218,6 @@ class Parameters(UserDict):
|
|
|
201
218
|
data = np.full(
|
|
202
219
|
fill_value=data,
|
|
203
220
|
shape=self.full_data_array.shape[0],
|
|
204
|
-
dtype=type(data),
|
|
205
221
|
)
|
|
206
222
|
parameter = Parameter(data=data, metadata=metadata)
|
|
207
223
|
self.add_parameter(parameter)
|
|
@@ -263,7 +279,17 @@ class Parameters(UserDict):
|
|
|
263
279
|
).T
|
|
264
280
|
columns = [parameter.name for parameter in self.get_parameter_list()]
|
|
265
281
|
assert data.shape[1] == len(columns)
|
|
266
|
-
|
|
282
|
+
df = pd.DataFrame(data=data, columns=columns)
|
|
283
|
+
for column in df.columns:
|
|
284
|
+
if column.lower() not in [
|
|
285
|
+
"latitude",
|
|
286
|
+
"longitude",
|
|
287
|
+
"event",
|
|
288
|
+
"cast",
|
|
289
|
+
"flag",
|
|
290
|
+
]:
|
|
291
|
+
df[column].astype("float64")
|
|
292
|
+
return df
|
|
267
293
|
|
|
268
294
|
def with_name_type(self, name_type: str = "shortname"):
|
|
269
295
|
"""
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
logger = logging.getLogger(__name__)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_unique_sensor_data(
|
|
7
|
+
sensor_data: list[list[dict]],
|
|
8
|
+
) -> list[tuple[list[dict]]]:
|
|
9
|
+
"""
|
|
10
|
+
Returns all the unique sensors and their configuration used in the given
|
|
11
|
+
collection of sensor data. These will typically be parsed from xml inside
|
|
12
|
+
.cnv or .xmlcon files.
|
|
13
|
+
If for example, the first oxygen sensor has been replaced after the 8 cast,
|
|
14
|
+
then we will see that in the output structure by a seconde tuple, with the
|
|
15
|
+
number 8 and the individual sensor information for that new oxygen sensor.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
sensor_data:
|
|
20
|
+
The structure of xml-parsed dicts inside two organizing lists.
|
|
21
|
+
|
|
22
|
+
Returns
|
|
23
|
+
-------
|
|
24
|
+
The input structure stripped down to unique sensor data and appended by
|
|
25
|
+
the index, at which this new sensor appeared the first time.
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
unique = []
|
|
29
|
+
last_unique = None
|
|
30
|
+
for index, individual_sensor_data in enumerate(
|
|
31
|
+
[file for file in sensor_data]
|
|
32
|
+
):
|
|
33
|
+
if last_unique is None:
|
|
34
|
+
unique.append((index, individual_sensor_data))
|
|
35
|
+
else:
|
|
36
|
+
differing_dicts = [
|
|
37
|
+
current_dict
|
|
38
|
+
for last_dict, current_dict in zip(
|
|
39
|
+
last_unique, individual_sensor_data
|
|
40
|
+
)
|
|
41
|
+
if current_dict != last_dict
|
|
42
|
+
]
|
|
43
|
+
if differing_dicts:
|
|
44
|
+
unique.append((index, differing_dicts))
|
|
45
|
+
last_unique = individual_sensor_data
|
|
46
|
+
return unique
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class UnexpectedFileFormat(Exception):
|
|
50
|
+
def __init__(self, file_type: str, error: str) -> None:
|
|
51
|
+
message = f"{file_type} is not formatted as expected: {error}"
|
|
52
|
+
logger.error(message)
|
|
53
|
+
super().__init__(message)
|
seabirdfilehandler/xmlfiles.py
CHANGED
|
@@ -4,6 +4,8 @@ import xml.etree.ElementTree as ET
|
|
|
4
4
|
import json
|
|
5
5
|
import xmltodict
|
|
6
6
|
|
|
7
|
+
from seabirdfilehandler.utils import UnexpectedFileFormat
|
|
8
|
+
|
|
7
9
|
|
|
8
10
|
class XMLFile(UserDict):
|
|
9
11
|
"""
|
|
@@ -78,6 +80,58 @@ class XMLCONFile(XMLFile):
|
|
|
78
80
|
|
|
79
81
|
def __init__(self, path_to_file):
|
|
80
82
|
super().__init__(path_to_file)
|
|
83
|
+
self.sensor_info = self.get_sensor_info()
|
|
84
|
+
|
|
85
|
+
def get_sensor_info(self) -> list[dict]:
|
|
86
|
+
"""
|
|
87
|
+
Creates a multilevel dictionary, dropping the first four dictionaries,
|
|
88
|
+
to retrieve pure sensor information.
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
A list of all the individual sensor information, stored in dictionaries
|
|
93
|
+
|
|
94
|
+
"""
|
|
95
|
+
try:
|
|
96
|
+
sensors = self.data["SBE_InstrumentConfiguration"]["Instrument"][
|
|
97
|
+
"SensorArray"
|
|
98
|
+
]["Sensor"]
|
|
99
|
+
except KeyError as error:
|
|
100
|
+
raise UnexpectedFileFormat("XMLCON", error)
|
|
101
|
+
else:
|
|
102
|
+
# create a tidied version of the xml-parsed sensor dict
|
|
103
|
+
sensor_names = []
|
|
104
|
+
tidied_sensor_list = []
|
|
105
|
+
for entry in sensors:
|
|
106
|
+
sensor_key = list(entry.keys())[-1]
|
|
107
|
+
if not sensor_key.endswith(("Sensor", "Meter")):
|
|
108
|
+
continue
|
|
109
|
+
sensor_name = sensor_key.removesuffix("Sensor")
|
|
110
|
+
# the wetlab sensors feature a suffix _Sensor
|
|
111
|
+
sensor_name = sensor_name.removesuffix("_")
|
|
112
|
+
# assuming, that the first sensor in the xmlcon is also on the
|
|
113
|
+
# first sensor strand, the second occurence of the name is
|
|
114
|
+
# suffixed with '2'
|
|
115
|
+
if sensor_name in sensor_names:
|
|
116
|
+
sensor_name += "2"
|
|
117
|
+
sensor_names.append(sensor_name)
|
|
118
|
+
# move the calibration info one dictionary level up
|
|
119
|
+
calibration_info = entry[sensor_key]
|
|
120
|
+
# build the new dictionary
|
|
121
|
+
try:
|
|
122
|
+
new_dict = {
|
|
123
|
+
"Channel": str(int(entry["@index"]) + 1),
|
|
124
|
+
"SensorName": sensor_name,
|
|
125
|
+
**calibration_info,
|
|
126
|
+
}
|
|
127
|
+
except TypeError:
|
|
128
|
+
new_dict = {
|
|
129
|
+
"Channel": entry["@Channel"],
|
|
130
|
+
"SensorName": sensor_name,
|
|
131
|
+
"Info": calibration_info,
|
|
132
|
+
}
|
|
133
|
+
tidied_sensor_list.append(new_dict)
|
|
134
|
+
return tidied_sensor_list
|
|
81
135
|
|
|
82
136
|
|
|
83
137
|
class PsaFile(XMLFile):
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
seabirdfilehandler/__init__.py,sha256=5JTzYE3oRdrxkC9_etAnFQ1cy10PHtpmesdR6n5PoPQ,192
|
|
2
|
+
seabirdfilehandler/bottlefile.py,sha256=QBUqtKhD-dUPbgc_sz8NOGEFFkAjL3g5r1oAsqQnUcQ,6063
|
|
3
|
+
seabirdfilehandler/bottlelogfile.py,sha256=CjBeITQS27Ar80bfxguoAnVkYxI1ioAiVTnlvwevw7E,4448
|
|
4
|
+
seabirdfilehandler/cnvfile.py,sha256=-mCuQX76uuWIETljem1DjzDbZ5eNIUucjoHejTQu_sU,9685
|
|
5
|
+
seabirdfilehandler/datafiles.py,sha256=z27PZJBvEbl1aFXpDQ0QTM_yR5NTaNK-HC9_z3t7zuM,8991
|
|
6
|
+
seabirdfilehandler/dataframe_meta_accessor.py,sha256=x4mSEN49us6Ezzjdt41fl5Ry8IJR09ORrZ1roOIJbyc,6439
|
|
7
|
+
seabirdfilehandler/file_collection.py,sha256=qkEdlI-hcoyuOdmgTr8wdAr1mXXkVuKkF9J4j2-v3kY,6882
|
|
8
|
+
seabirdfilehandler/logging.yaml,sha256=mXxbhJPio3OGaukTpc3rLGA8Ywq1DNqp0Vn5YCbH6jY,459
|
|
9
|
+
seabirdfilehandler/parameter.py,sha256=UuwFzege94sqPt0kOjEqtMGGol4hjuFjj2_EH7o0pzA,14374
|
|
10
|
+
seabirdfilehandler/utils.py,sha256=5KXdB8Hdv65dv5tPyXxNMct1mCEOyA3S8XP54AFAnx0,1745
|
|
11
|
+
seabirdfilehandler/validation_modules.py,sha256=eZ6x0giftUtlxnRMOnK_vCkgccdwUXPrDjajFa-E6n0,4698
|
|
12
|
+
seabirdfilehandler/xmlfiles.py,sha256=L_puQf8eg0ojv85AyEMID4jnwkOlV_fgZP3W5yeSUBY,4668
|
|
13
|
+
seabirdfilehandler-0.5.0.dist-info/LICENSE,sha256=Ifd1VPmYv32oJd2QVh3wIQP9X05vYJlcY6kONz360ws,34603
|
|
14
|
+
seabirdfilehandler-0.5.0.dist-info/METADATA,sha256=jPHzHpxz9OY48QHifHT9crI9a5pxYae1aCw6jAhqtYM,1289
|
|
15
|
+
seabirdfilehandler-0.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
16
|
+
seabirdfilehandler-0.5.0.dist-info/RECORD,,
|