seabirdfilehandler 0.4.3__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of seabirdfilehandler might be problematic. Click here for more details.
- seabirdfilehandler/__init__.py +4 -2
- seabirdfilehandler/bottlefile.py +185 -0
- seabirdfilehandler/bottlelogfile.py +155 -0
- seabirdfilehandler/cnvfile.py +283 -0
- seabirdfilehandler/datafiles.py +259 -0
- seabirdfilehandler/file_collection.py +19 -18
- seabirdfilehandler/parameter.py +29 -3
- {seabirdfilehandler-0.4.3.dist-info → seabirdfilehandler-0.5.0.dist-info}/METADATA +1 -1
- seabirdfilehandler-0.5.0.dist-info/RECORD +16 -0
- {seabirdfilehandler-0.4.3.dist-info → seabirdfilehandler-0.5.0.dist-info}/WHEEL +1 -1
- seabirdfilehandler/datatablefiles.py +0 -930
- seabirdfilehandler/seabirdfiles.py +0 -210
- seabirdfilehandler-0.4.3.dist-info/RECORD +0 -14
- {seabirdfilehandler-0.4.3.dist-info → seabirdfilehandler-0.5.0.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import xmltodict
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DataFile:
|
|
10
|
+
"""Collection of methods for the SeaBird files that feature some kind of
|
|
11
|
+
data table that is represented in a pandas dataframe.
|
|
12
|
+
|
|
13
|
+
Parameters
|
|
14
|
+
----------
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
path_to_file: Path | str,
|
|
24
|
+
only_header: bool = False,
|
|
25
|
+
):
|
|
26
|
+
self.path_to_file = Path(path_to_file)
|
|
27
|
+
self.file_name = self.path_to_file.stem
|
|
28
|
+
self.file_dir = self.path_to_file.parent
|
|
29
|
+
self.only_header = only_header
|
|
30
|
+
self.raw_file_data = [] # the text file input
|
|
31
|
+
self.header = [] # the full file header
|
|
32
|
+
self.sbe9_data = [] # device specific information
|
|
33
|
+
self.metadata = {} # non-SeaBird metadata
|
|
34
|
+
self.metadata_list = [] # unstructured metadata for easier export
|
|
35
|
+
self.data_table_description = [] # the column names and other info
|
|
36
|
+
self.sensor_data = []
|
|
37
|
+
self.sensors = {} # xml-parsed sensor data
|
|
38
|
+
self.processing_info = [] # everything after the sensor data
|
|
39
|
+
self.data = [] # the data table
|
|
40
|
+
self.file_data = self.raw_file_data # variable file information
|
|
41
|
+
self.read_file()
|
|
42
|
+
self.metadata = self.structure_metadata(self.metadata_list)
|
|
43
|
+
if len(self.sensor_data) > 0:
|
|
44
|
+
self.sensors = self.sensor_xml_to_flattened_dict(
|
|
45
|
+
"".join(self.sensor_data)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
def __str__(self) -> str:
|
|
49
|
+
return "/n".join(self.file_data)
|
|
50
|
+
|
|
51
|
+
def __repr__(self) -> str:
|
|
52
|
+
return str(self.path_to_file.absolute())
|
|
53
|
+
|
|
54
|
+
def __eq__(self, other) -> bool:
|
|
55
|
+
return self.file_data == other.file_data
|
|
56
|
+
|
|
57
|
+
def read_file(self):
|
|
58
|
+
"""Reads and structures all the different information present in the
|
|
59
|
+
file. Lists and Dictionaries are the data structures of choice. Uses
|
|
60
|
+
basic prefix checking to distinguish different header information.
|
|
61
|
+
|
|
62
|
+
Parameters
|
|
63
|
+
----------
|
|
64
|
+
|
|
65
|
+
Returns
|
|
66
|
+
-------
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
past_sensors = False
|
|
70
|
+
with self.path_to_file.open("r", encoding="latin-1") as file:
|
|
71
|
+
for line in file:
|
|
72
|
+
self.raw_file_data.append(line)
|
|
73
|
+
line_prefix = line[:2]
|
|
74
|
+
if line_prefix == "* ":
|
|
75
|
+
self.header.append(line)
|
|
76
|
+
self.sbe9_data.append(line[2:])
|
|
77
|
+
elif line_prefix == "**":
|
|
78
|
+
self.header.append(line)
|
|
79
|
+
self.metadata_list.append(line[3:])
|
|
80
|
+
elif line_prefix == "# ":
|
|
81
|
+
self.header.append(line)
|
|
82
|
+
if line[2:].strip()[0] == "<":
|
|
83
|
+
self.sensor_data.append(line[2:])
|
|
84
|
+
past_sensors = True
|
|
85
|
+
else:
|
|
86
|
+
if past_sensors:
|
|
87
|
+
self.processing_info.append(line[2:])
|
|
88
|
+
else:
|
|
89
|
+
self.data_table_description.append(line[2:])
|
|
90
|
+
else:
|
|
91
|
+
if line.startswith("*END*"):
|
|
92
|
+
self.header.append(line)
|
|
93
|
+
if self.only_header:
|
|
94
|
+
break
|
|
95
|
+
else:
|
|
96
|
+
self.data.append(line)
|
|
97
|
+
|
|
98
|
+
def sensor_xml_to_flattened_dict(
|
|
99
|
+
self, sensor_data: str
|
|
100
|
+
) -> list[dict] | dict:
|
|
101
|
+
"""Reads the pure xml sensor input and creates a multilevel dictionary,
|
|
102
|
+
dropping the first two dictionaries, as they are single entry only
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
|
|
107
|
+
Returns
|
|
108
|
+
-------
|
|
109
|
+
|
|
110
|
+
"""
|
|
111
|
+
full_sensor_dict = xmltodict.parse(sensor_data, process_comments=True)
|
|
112
|
+
try:
|
|
113
|
+
sensors = full_sensor_dict["Sensors"]["sensor"]
|
|
114
|
+
except KeyError as error:
|
|
115
|
+
logger.error(f"XML is not formatted as expected: {error}")
|
|
116
|
+
return full_sensor_dict
|
|
117
|
+
else:
|
|
118
|
+
# create a tidied version of the xml-parsed sensor dict
|
|
119
|
+
tidied_sensor_list = []
|
|
120
|
+
for entry in sensors:
|
|
121
|
+
# use comment value as type descriptor
|
|
122
|
+
comment = entry["#comment"]
|
|
123
|
+
split_comment = comment.split(",")
|
|
124
|
+
new_entry = split_comment[1].strip()
|
|
125
|
+
if split_comment[-1] == " 2":
|
|
126
|
+
new_entry += " 2"
|
|
127
|
+
# remove second-level dict
|
|
128
|
+
calibration_info = list(entry.values())[-1]
|
|
129
|
+
try:
|
|
130
|
+
new_dict = {
|
|
131
|
+
"Channel": entry["@Channel"],
|
|
132
|
+
"SensorName": new_entry,
|
|
133
|
+
**calibration_info,
|
|
134
|
+
}
|
|
135
|
+
except TypeError:
|
|
136
|
+
new_dict = {
|
|
137
|
+
"Channel": entry["@Channel"],
|
|
138
|
+
"SensorName": new_entry,
|
|
139
|
+
"Info": calibration_info,
|
|
140
|
+
}
|
|
141
|
+
tidied_sensor_list.append(new_dict)
|
|
142
|
+
return tidied_sensor_list
|
|
143
|
+
|
|
144
|
+
def structure_metadata(self, metadata_list: list) -> dict:
|
|
145
|
+
"""Creates a dictionary to store the metadata that is added by using
|
|
146
|
+
werums dship API.
|
|
147
|
+
|
|
148
|
+
Parameters
|
|
149
|
+
----------
|
|
150
|
+
metadata_list: list :
|
|
151
|
+
a list of the individual lines of metadata found in the file
|
|
152
|
+
|
|
153
|
+
Returns
|
|
154
|
+
-------
|
|
155
|
+
a dictionary of the lines of metadata divided into key-value pairs
|
|
156
|
+
"""
|
|
157
|
+
out_dict = {}
|
|
158
|
+
for line in metadata_list:
|
|
159
|
+
try:
|
|
160
|
+
(key, val) = line.split("=")
|
|
161
|
+
except ValueError:
|
|
162
|
+
out_dict["text"] = line
|
|
163
|
+
else:
|
|
164
|
+
out_dict[key.strip()] = val.strip()
|
|
165
|
+
return out_dict
|
|
166
|
+
|
|
167
|
+
def define_output_path(
|
|
168
|
+
self,
|
|
169
|
+
file_path: Path | str | None = None,
|
|
170
|
+
file_name: str | None = None,
|
|
171
|
+
file_type: str = ".csv",
|
|
172
|
+
) -> Path:
|
|
173
|
+
"""Creates a Path object holding the desired output path.
|
|
174
|
+
|
|
175
|
+
Parameters
|
|
176
|
+
----------
|
|
177
|
+
file_path : Path :
|
|
178
|
+
directory the file sits in (Default value = self.file_dir)
|
|
179
|
+
file_name : str :
|
|
180
|
+
the original file name (Default value = self.file_name)
|
|
181
|
+
file_type : str :
|
|
182
|
+
the output file type (Default = '.csv')
|
|
183
|
+
Returns
|
|
184
|
+
-------
|
|
185
|
+
a Path object consisting of the full path of the new file
|
|
186
|
+
|
|
187
|
+
"""
|
|
188
|
+
file_path = self.file_dir if file_path is None else file_path
|
|
189
|
+
file_name = self.file_name if file_name is None else file_name
|
|
190
|
+
if file_type[0] != ".":
|
|
191
|
+
file_type = "." + file_type
|
|
192
|
+
return Path(file_path).joinpath(file_name).with_suffix(file_type)
|
|
193
|
+
|
|
194
|
+
def to_csv(
|
|
195
|
+
self,
|
|
196
|
+
selected_columns: list | None = None,
|
|
197
|
+
with_header: bool = True,
|
|
198
|
+
output_file_path: Path | str | None = None,
|
|
199
|
+
output_file_name: str | None = None,
|
|
200
|
+
):
|
|
201
|
+
"""Writes a csv from the current dataframe. Takes a list of columns to
|
|
202
|
+
use, a boolean for writing the header and the output file parameters.
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
selected_columns : list :
|
|
207
|
+
a list of columns to include in the csv
|
|
208
|
+
(Default value = self.df.columns)
|
|
209
|
+
with_header : boolean :
|
|
210
|
+
indicating whether the header shall appear in the output
|
|
211
|
+
(Default value = True)
|
|
212
|
+
output_file_path : Path :
|
|
213
|
+
file directory (Default value = None)
|
|
214
|
+
output_file_name : str :
|
|
215
|
+
original file name (Default value = None)
|
|
216
|
+
|
|
217
|
+
Returns
|
|
218
|
+
-------
|
|
219
|
+
|
|
220
|
+
"""
|
|
221
|
+
selected_columns = (
|
|
222
|
+
self.df.columns if selected_columns is None else selected_columns
|
|
223
|
+
)
|
|
224
|
+
df = self.df[selected_columns].reset_index(drop=True)
|
|
225
|
+
new_file_path = self.define_output_path(
|
|
226
|
+
output_file_path, output_file_name
|
|
227
|
+
)
|
|
228
|
+
if with_header:
|
|
229
|
+
with open(new_file_path, "w") as file:
|
|
230
|
+
for line in self.header:
|
|
231
|
+
file.write(line)
|
|
232
|
+
df.to_csv(new_file_path, index=False, mode="a")
|
|
233
|
+
else:
|
|
234
|
+
df.to_csv(new_file_path, index=False, mode="w")
|
|
235
|
+
logger.info(f"Wrote file {self.path_to_file} to {new_file_path}.")
|
|
236
|
+
|
|
237
|
+
def selecting_columns(
|
|
238
|
+
self,
|
|
239
|
+
list_of_columns: list | str,
|
|
240
|
+
df: pd.DataFrame | None = None,
|
|
241
|
+
):
|
|
242
|
+
"""Alters the dataframe to only hold the given columns.
|
|
243
|
+
|
|
244
|
+
Parameters
|
|
245
|
+
----------
|
|
246
|
+
list_of_columns: list or str : a collection of columns
|
|
247
|
+
df : pandas.Dataframe :
|
|
248
|
+
Dataframe (Default value = None)
|
|
249
|
+
|
|
250
|
+
Returns
|
|
251
|
+
-------
|
|
252
|
+
|
|
253
|
+
"""
|
|
254
|
+
df = self.df if df is None else df
|
|
255
|
+
# ensure that the input is a list, so that isin() can do its job
|
|
256
|
+
if isinstance(list_of_columns, str):
|
|
257
|
+
list_of_columns = [list_of_columns]
|
|
258
|
+
if isinstance(df, pd.DataFrame):
|
|
259
|
+
self.df = df[list_of_columns].reset_index(drop=True)
|
|
@@ -1,16 +1,15 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
import logging
|
|
3
3
|
from collections import UserList
|
|
4
|
-
from typing import Type
|
|
4
|
+
from typing import Callable, Type
|
|
5
5
|
import pandas as pd
|
|
6
6
|
import numpy as np
|
|
7
7
|
from seabirdfilehandler import (
|
|
8
|
-
SeaBirdFile,
|
|
9
8
|
CnvFile,
|
|
10
9
|
BottleFile,
|
|
11
10
|
BottleLogFile,
|
|
12
11
|
)
|
|
13
|
-
from seabirdfilehandler
|
|
12
|
+
from seabirdfilehandler import DataFile
|
|
14
13
|
from seabirdfilehandler.utils import get_unique_sensor_data
|
|
15
14
|
|
|
16
15
|
logger = logging.getLogger(__name__)
|
|
@@ -34,23 +33,19 @@ class FileCollection(UserList):
|
|
|
34
33
|
self,
|
|
35
34
|
path_to_files: str | Path,
|
|
36
35
|
file_suffix: str,
|
|
37
|
-
pattern: str | None = None,
|
|
38
36
|
only_metadata: bool = False,
|
|
37
|
+
sorting_key: Callable | None = None,
|
|
39
38
|
):
|
|
40
39
|
super().__init__()
|
|
41
40
|
self.path_to_files = Path(path_to_files)
|
|
42
41
|
self.file_suffix = file_suffix.strip(".")
|
|
43
|
-
self.file_type: Type[
|
|
42
|
+
self.file_type: Type[DataFile]
|
|
44
43
|
self.extract_file_type()
|
|
45
44
|
self.individual_file_paths = []
|
|
46
|
-
self.collect_files()
|
|
47
|
-
|
|
48
|
-
# TODO: implement pattern handling
|
|
49
|
-
self.pattern = pattern
|
|
50
|
-
else:
|
|
51
|
-
self.load_files(only_metadata)
|
|
45
|
+
self.collect_files(sorting_key=sorting_key)
|
|
46
|
+
self.load_files(only_metadata)
|
|
52
47
|
if not only_metadata:
|
|
53
|
-
if self.file_type ==
|
|
48
|
+
if self.file_type == DataFile:
|
|
54
49
|
self.df_list = self.get_dataframes()
|
|
55
50
|
self.df = self.get_collection_dataframe(self.df_list)
|
|
56
51
|
if self.file_type == CnvFile:
|
|
@@ -74,13 +69,19 @@ class FileCollection(UserList):
|
|
|
74
69
|
self.file_type = value
|
|
75
70
|
break
|
|
76
71
|
else:
|
|
77
|
-
self.file_type =
|
|
72
|
+
self.file_type = DataFile
|
|
78
73
|
|
|
79
|
-
def collect_files(
|
|
74
|
+
def collect_files(
|
|
75
|
+
self,
|
|
76
|
+
sorting_key: Callable | None = lambda file: int(
|
|
77
|
+
file.stem.split("_")[4]
|
|
78
|
+
),
|
|
79
|
+
):
|
|
80
80
|
""" """
|
|
81
|
-
|
|
82
|
-
self.
|
|
83
|
-
|
|
81
|
+
self.individual_file_paths = sorted(
|
|
82
|
+
self.path_to_files.rglob(f"*{self.file_suffix}"),
|
|
83
|
+
key=sorting_key,
|
|
84
|
+
)
|
|
84
85
|
|
|
85
86
|
def load_files(self, only_metadata: bool = False):
|
|
86
87
|
""" """
|
|
@@ -254,4 +255,4 @@ class FileCollection(UserList):
|
|
|
254
255
|
|
|
255
256
|
def get_data_table_meta_info(self) -> list[list[dict]]:
|
|
256
257
|
""" """
|
|
257
|
-
return [file.
|
|
258
|
+
return [file.parameters.metadata for file in self.data]
|
seabirdfilehandler/parameter.py
CHANGED
|
@@ -113,6 +113,20 @@ class Parameters(UserDict):
|
|
|
113
113
|
)
|
|
114
114
|
return parameter_dict
|
|
115
115
|
|
|
116
|
+
def _form_data_table_info(self) -> list:
|
|
117
|
+
"""Recreates the data table descriptions, like column names and spans
|
|
118
|
+
from the structured dictionaries these values were stored in."""
|
|
119
|
+
new_table_info = []
|
|
120
|
+
for key, value in self.data_table_stats.items():
|
|
121
|
+
new_table_info.append(f"{key} = {value}\n")
|
|
122
|
+
for index, (name, _) in enumerate(self.data_table_names_and_spans):
|
|
123
|
+
new_table_info.append(f"name {index} = {name}\n")
|
|
124
|
+
for index, (_, span) in enumerate(self.data_table_names_and_spans):
|
|
125
|
+
new_table_info.append(f"span {index} = {span}\n")
|
|
126
|
+
for key, value in self.data_table_misc.items():
|
|
127
|
+
new_table_info.append(f"{key} = {value}\n")
|
|
128
|
+
return new_table_info
|
|
129
|
+
|
|
116
130
|
def differentiate_table_description(self):
|
|
117
131
|
"""
|
|
118
132
|
The original method that structures data table metadata.
|
|
@@ -144,7 +158,10 @@ class Parameters(UserDict):
|
|
|
144
158
|
(name, span)
|
|
145
159
|
for name, span in zip(column_names, column_value_spans)
|
|
146
160
|
]
|
|
147
|
-
self.data_table_misc =
|
|
161
|
+
self.data_table_misc = {
|
|
162
|
+
line.split("=")[0].strip(): line.split("=")[1].strip()
|
|
163
|
+
for line in post
|
|
164
|
+
}
|
|
148
165
|
|
|
149
166
|
def add_parameter(self, parameter: Parameter):
|
|
150
167
|
"""
|
|
@@ -201,7 +218,6 @@ class Parameters(UserDict):
|
|
|
201
218
|
data = np.full(
|
|
202
219
|
fill_value=data,
|
|
203
220
|
shape=self.full_data_array.shape[0],
|
|
204
|
-
dtype=type(data),
|
|
205
221
|
)
|
|
206
222
|
parameter = Parameter(data=data, metadata=metadata)
|
|
207
223
|
self.add_parameter(parameter)
|
|
@@ -263,7 +279,17 @@ class Parameters(UserDict):
|
|
|
263
279
|
).T
|
|
264
280
|
columns = [parameter.name for parameter in self.get_parameter_list()]
|
|
265
281
|
assert data.shape[1] == len(columns)
|
|
266
|
-
|
|
282
|
+
df = pd.DataFrame(data=data, columns=columns)
|
|
283
|
+
for column in df.columns:
|
|
284
|
+
if column.lower() not in [
|
|
285
|
+
"latitude",
|
|
286
|
+
"longitude",
|
|
287
|
+
"event",
|
|
288
|
+
"cast",
|
|
289
|
+
"flag",
|
|
290
|
+
]:
|
|
291
|
+
df[column].astype("float64")
|
|
292
|
+
return df
|
|
267
293
|
|
|
268
294
|
def with_name_type(self, name_type: str = "shortname"):
|
|
269
295
|
"""
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
seabirdfilehandler/__init__.py,sha256=5JTzYE3oRdrxkC9_etAnFQ1cy10PHtpmesdR6n5PoPQ,192
|
|
2
|
+
seabirdfilehandler/bottlefile.py,sha256=QBUqtKhD-dUPbgc_sz8NOGEFFkAjL3g5r1oAsqQnUcQ,6063
|
|
3
|
+
seabirdfilehandler/bottlelogfile.py,sha256=CjBeITQS27Ar80bfxguoAnVkYxI1ioAiVTnlvwevw7E,4448
|
|
4
|
+
seabirdfilehandler/cnvfile.py,sha256=-mCuQX76uuWIETljem1DjzDbZ5eNIUucjoHejTQu_sU,9685
|
|
5
|
+
seabirdfilehandler/datafiles.py,sha256=z27PZJBvEbl1aFXpDQ0QTM_yR5NTaNK-HC9_z3t7zuM,8991
|
|
6
|
+
seabirdfilehandler/dataframe_meta_accessor.py,sha256=x4mSEN49us6Ezzjdt41fl5Ry8IJR09ORrZ1roOIJbyc,6439
|
|
7
|
+
seabirdfilehandler/file_collection.py,sha256=qkEdlI-hcoyuOdmgTr8wdAr1mXXkVuKkF9J4j2-v3kY,6882
|
|
8
|
+
seabirdfilehandler/logging.yaml,sha256=mXxbhJPio3OGaukTpc3rLGA8Ywq1DNqp0Vn5YCbH6jY,459
|
|
9
|
+
seabirdfilehandler/parameter.py,sha256=UuwFzege94sqPt0kOjEqtMGGol4hjuFjj2_EH7o0pzA,14374
|
|
10
|
+
seabirdfilehandler/utils.py,sha256=5KXdB8Hdv65dv5tPyXxNMct1mCEOyA3S8XP54AFAnx0,1745
|
|
11
|
+
seabirdfilehandler/validation_modules.py,sha256=eZ6x0giftUtlxnRMOnK_vCkgccdwUXPrDjajFa-E6n0,4698
|
|
12
|
+
seabirdfilehandler/xmlfiles.py,sha256=L_puQf8eg0ojv85AyEMID4jnwkOlV_fgZP3W5yeSUBY,4668
|
|
13
|
+
seabirdfilehandler-0.5.0.dist-info/LICENSE,sha256=Ifd1VPmYv32oJd2QVh3wIQP9X05vYJlcY6kONz360ws,34603
|
|
14
|
+
seabirdfilehandler-0.5.0.dist-info/METADATA,sha256=jPHzHpxz9OY48QHifHT9crI9a5pxYae1aCw6jAhqtYM,1289
|
|
15
|
+
seabirdfilehandler-0.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
16
|
+
seabirdfilehandler-0.5.0.dist-info/RECORD,,
|