disdrodb 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/__init__.py +4 -0
- disdrodb/_version.py +2 -2
- disdrodb/api/checks.py +70 -47
- disdrodb/api/configs.py +0 -2
- disdrodb/api/create_directories.py +0 -2
- disdrodb/api/info.py +3 -3
- disdrodb/api/io.py +48 -8
- disdrodb/api/path.py +116 -133
- disdrodb/api/search.py +12 -3
- disdrodb/cli/disdrodb_create_summary.py +113 -0
- disdrodb/cli/disdrodb_create_summary_station.py +11 -1
- disdrodb/cli/disdrodb_run_l0a_station.py +1 -1
- disdrodb/cli/disdrodb_run_l0b_station.py +2 -2
- disdrodb/cli/disdrodb_run_l0c_station.py +2 -2
- disdrodb/cli/disdrodb_run_l1_station.py +2 -2
- disdrodb/cli/disdrodb_run_l2e_station.py +2 -2
- disdrodb/cli/disdrodb_run_l2m_station.py +2 -2
- disdrodb/constants.py +1 -1
- disdrodb/data_transfer/download_data.py +123 -7
- disdrodb/etc/products/L1/global.yaml +1 -1
- disdrodb/etc/products/L2E/5MIN.yaml +1 -0
- disdrodb/etc/products/L2E/global.yaml +1 -1
- disdrodb/etc/products/L2M/GAMMA_GS_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/GAMMA_ML.yaml +1 -1
- disdrodb/etc/products/L2M/LOGNORMAL_GS_LOG_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/LOGNORMAL_GS_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/LOGNORMAL_ML.yaml +8 -0
- disdrodb/etc/products/L2M/global.yaml +11 -3
- disdrodb/issue/writer.py +2 -0
- disdrodb/l0/check_configs.py +49 -16
- disdrodb/l0/configs/LPM/l0a_encodings.yml +2 -2
- disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +2 -2
- disdrodb/l0/configs/LPM/l0b_encodings.yml +2 -2
- disdrodb/l0/configs/LPM/raw_data_format.yml +2 -2
- disdrodb/l0/configs/PWS100/l0b_encodings.yml +1 -0
- disdrodb/l0/configs/SWS250/bins_diameter.yml +108 -0
- disdrodb/l0/configs/SWS250/bins_velocity.yml +83 -0
- disdrodb/l0/configs/SWS250/l0a_encodings.yml +18 -0
- disdrodb/l0/configs/SWS250/l0b_cf_attrs.yml +72 -0
- disdrodb/l0/configs/SWS250/l0b_encodings.yml +155 -0
- disdrodb/l0/configs/SWS250/raw_data_format.yml +148 -0
- disdrodb/l0/l0a_processing.py +10 -5
- disdrodb/l0/l0b_nc_processing.py +10 -6
- disdrodb/l0/l0b_processing.py +92 -72
- disdrodb/l0/l0c_processing.py +369 -251
- disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +8 -1
- disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +2 -2
- disdrodb/l0/readers/LPM/BELGIUM/ULIEGE.py +256 -0
- disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +2 -2
- disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +2 -2
- disdrodb/l0/readers/LPM/GERMANY/DWD.py +491 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +2 -2
- disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
- disdrodb/l0/readers/LPM/KIT/CHWALA.py +2 -2
- disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +107 -12
- disdrodb/l0/readers/LPM/SLOVENIA/UL.py +3 -3
- disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +2 -2
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +5 -14
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +5 -14
- disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL.py +117 -8
- disdrodb/l0/readers/PARSIVEL2/ARM/ARM_PARSIVEL2.py +4 -0
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +10 -14
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +10 -14
- disdrodb/l0/readers/PARSIVEL2/CANADA/UQAM_NC.py +69 -0
- disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +8 -14
- disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +382 -0
- disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +4 -0
- disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +1 -1
- disdrodb/l0/readers/PARSIVEL2/GREECE/NOA.py +127 -0
- disdrodb/l0/readers/PARSIVEL2/ITALY/HYDROX.py +239 -0
- disdrodb/l0/readers/PARSIVEL2/MPI/BCO_PARSIVEL2.py +136 -0
- disdrodb/l0/readers/PARSIVEL2/MPI/BOWTIE.py +220 -0
- disdrodb/l0/readers/PARSIVEL2/NASA/LPVEX.py +109 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +5 -11
- disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +4 -17
- disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +5 -14
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +10 -13
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +10 -13
- disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +3 -0
- disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/PANGASA.py +232 -0
- disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +6 -18
- disdrodb/l0/readers/PARSIVEL2/SPAIN/GRANADA.py +120 -0
- disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +7 -25
- disdrodb/l0/readers/PWS100/AUSTRIA/HOAL.py +321 -0
- disdrodb/l0/readers/SW250/BELGIUM/KMI.py +239 -0
- disdrodb/l1/beard_model.py +31 -129
- disdrodb/l1/fall_velocity.py +156 -57
- disdrodb/l1/filters.py +25 -28
- disdrodb/l1/processing.py +12 -14
- disdrodb/l1_env/routines.py +46 -17
- disdrodb/l2/empirical_dsd.py +6 -0
- disdrodb/l2/processing.py +3 -3
- disdrodb/metadata/checks.py +132 -125
- disdrodb/metadata/geolocation.py +0 -2
- disdrodb/psd/fitting.py +180 -210
- disdrodb/psd/models.py +1 -1
- disdrodb/routines/__init__.py +54 -0
- disdrodb/{l0/routines.py → routines/l0.py} +288 -418
- disdrodb/{l1/routines.py → routines/l1.py} +60 -92
- disdrodb/{l2/routines.py → routines/l2.py} +284 -485
- disdrodb/{routines.py → routines/wrappers.py} +100 -7
- disdrodb/scattering/axis_ratio.py +95 -85
- disdrodb/scattering/permittivity.py +24 -0
- disdrodb/scattering/routines.py +56 -36
- disdrodb/summary/routines.py +147 -45
- disdrodb/utils/archiving.py +434 -0
- disdrodb/utils/attrs.py +2 -0
- disdrodb/utils/cli.py +5 -5
- disdrodb/utils/dask.py +62 -1
- disdrodb/utils/decorators.py +31 -0
- disdrodb/utils/encoding.py +10 -1
- disdrodb/{l2 → utils}/event.py +1 -66
- disdrodb/utils/logger.py +1 -1
- disdrodb/utils/manipulations.py +22 -12
- disdrodb/utils/routines.py +166 -0
- disdrodb/utils/time.py +5 -293
- disdrodb/utils/xarray.py +3 -0
- disdrodb/viz/plots.py +109 -15
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/METADATA +3 -2
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/RECORD +124 -96
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/entry_points.txt +1 -0
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/WHEEL +0 -0
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/top_level.txt +0 -0
|
@@ -30,6 +30,13 @@ def reader(
|
|
|
30
30
|
#### Open the netCDF
|
|
31
31
|
ds = open_raw_netcdf_file(filepath=filepath, logger=logger)
|
|
32
32
|
|
|
33
|
+
##------------------------------------------------------------------------.
|
|
34
|
+
# Check correct dimensions
|
|
35
|
+
if ds.sizes["particle_diameter"] != 22 or ds.sizes["particle_fall_velocity"] != 20:
|
|
36
|
+
raise ValueError(
|
|
37
|
+
f"Dimensions of {filepath} {ds.sizes} do not match the expected dimensions for LPM sensor.",
|
|
38
|
+
)
|
|
39
|
+
|
|
33
40
|
##------------------------------------------------------------------------.
|
|
34
41
|
#### Adapt the dataframe to adhere to DISDRODB L0 standards
|
|
35
42
|
# Define dictionary mapping dataset variables to select and rename
|
|
@@ -62,7 +69,7 @@ def reader(
|
|
|
62
69
|
"quality_measurement": "quality_index",
|
|
63
70
|
"max_diameter_hail": "max_hail_diameter",
|
|
64
71
|
"laser_status": "laser_status",
|
|
65
|
-
"
|
|
72
|
+
"static_signal_status": "static_signal_status",
|
|
66
73
|
"interior_temperature": "temperature_interior",
|
|
67
74
|
"laser_temperature": "laser_temperature",
|
|
68
75
|
"laser_temperature_analog_status": "laser_temperature_analog_status",
|
|
@@ -137,7 +137,7 @@ def reader(
|
|
|
137
137
|
"quality_index",
|
|
138
138
|
"max_hail_diameter",
|
|
139
139
|
"laser_status",
|
|
140
|
-
"
|
|
140
|
+
"static_signal_status",
|
|
141
141
|
"laser_temperature_analog_status",
|
|
142
142
|
"laser_temperature_digital_status",
|
|
143
143
|
"laser_current_analog_status",
|
|
@@ -151,7 +151,7 @@ def reader(
|
|
|
151
151
|
"current_heating_heads_status",
|
|
152
152
|
"current_heating_carriers_status",
|
|
153
153
|
"control_output_laser_power_status",
|
|
154
|
-
"
|
|
154
|
+
"reserved_status",
|
|
155
155
|
"temperature_interior",
|
|
156
156
|
"laser_temperature",
|
|
157
157
|
"laser_current_average",
|
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
# -----------------------------------------------------------------------------.
|
|
4
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
5
|
+
#
|
|
6
|
+
# This program is free software: you can redistribute it and/or modify
|
|
7
|
+
# it under the terms of the GNU General Public License as published by
|
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
9
|
+
# (at your option) any later version.
|
|
10
|
+
#
|
|
11
|
+
# This program is distributed in the hope that it will be useful,
|
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
# GNU General Public License for more details.
|
|
15
|
+
#
|
|
16
|
+
# You should have received a copy of the GNU General Public License
|
|
17
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
18
|
+
# -----------------------------------------------------------------------------.
|
|
19
|
+
"""DISDRODB reader for ULIEGE LPM stations."""
|
|
20
|
+
|
|
21
|
+
import numpy as np
|
|
22
|
+
import pandas as pd
|
|
23
|
+
|
|
24
|
+
from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
|
|
25
|
+
from disdrodb.l0.l0a_processing import read_raw_text_file
|
|
26
|
+
from disdrodb.utils.logger import log_error, log_warning
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def read_txt_file(file, filename, logger):
|
|
30
|
+
"""Parse ULIEGE LPM hourly file."""
|
|
31
|
+
#### - Define raw data headers
|
|
32
|
+
column_names = ["TO_PARSE"]
|
|
33
|
+
|
|
34
|
+
##------------------------------------------------------------------------.
|
|
35
|
+
#### Define reader options
|
|
36
|
+
# - For more info: https://pandas.pydata.org/docs/reference/api/pandas.read_csv.html
|
|
37
|
+
reader_kwargs = {}
|
|
38
|
+
|
|
39
|
+
# - Define delimiter
|
|
40
|
+
reader_kwargs["delimiter"] = "\\n"
|
|
41
|
+
|
|
42
|
+
# - Avoid first column to become df index !!!
|
|
43
|
+
reader_kwargs["index_col"] = False
|
|
44
|
+
|
|
45
|
+
# Since column names are expected to be passed explicitly, header is set to None
|
|
46
|
+
reader_kwargs["header"] = None
|
|
47
|
+
|
|
48
|
+
# - Number of rows to be skipped at the beginning of the file
|
|
49
|
+
reader_kwargs["skiprows"] = None
|
|
50
|
+
|
|
51
|
+
# - Define behaviour when encountering bad lines
|
|
52
|
+
reader_kwargs["on_bad_lines"] = "skip"
|
|
53
|
+
|
|
54
|
+
# - Define reader engine
|
|
55
|
+
# - C engine is faster
|
|
56
|
+
# - Python engine is more feature-complete
|
|
57
|
+
reader_kwargs["engine"] = "python"
|
|
58
|
+
|
|
59
|
+
# - Define on-the-fly decompression of on-disk data
|
|
60
|
+
# - Available: gzip, bz2, zip
|
|
61
|
+
reader_kwargs["compression"] = "infer"
|
|
62
|
+
|
|
63
|
+
# - Strings to recognize as NA/NaN and replace with standard NA flags
|
|
64
|
+
# - Already included: '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN',
|
|
65
|
+
# '-NaN', '-nan', '1.#IND', '1.#QNAN', '<NA>', 'N/A',
|
|
66
|
+
# 'NA', 'NULL', 'NaN', 'n/a', 'nan', 'null'
|
|
67
|
+
reader_kwargs["na_values"] = ["na", "", "error"]
|
|
68
|
+
|
|
69
|
+
##------------------------------------------------------------------------.
|
|
70
|
+
#### Read the data
|
|
71
|
+
df = read_raw_text_file(
|
|
72
|
+
filepath=file,
|
|
73
|
+
column_names=column_names,
|
|
74
|
+
reader_kwargs=reader_kwargs,
|
|
75
|
+
logger=logger,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
##------------------------------------------------------------------------.
|
|
79
|
+
#### Adapt the dataframe to adhere to DISDRODB L0 standards
|
|
80
|
+
# Count number of delimiters to identify valid rows
|
|
81
|
+
df = df[df["TO_PARSE"].str.count(";") == 442]
|
|
82
|
+
|
|
83
|
+
# Check there are still valid rows
|
|
84
|
+
if len(df) == 0:
|
|
85
|
+
raise ValueError(f"No valid rows in {filename}.")
|
|
86
|
+
|
|
87
|
+
# Split by ; delimiter (before raw drop number)
|
|
88
|
+
df = df["TO_PARSE"].str.split(";", expand=True, n=43)
|
|
89
|
+
|
|
90
|
+
# Assign column names
|
|
91
|
+
column_names = [
|
|
92
|
+
"id",
|
|
93
|
+
"sample_interval",
|
|
94
|
+
"weather_code_synop_4677_5min", # or "weather_code_synop_4680_5min",
|
|
95
|
+
"weather_code_metar_4678_5min",
|
|
96
|
+
"precipitation_rate_5min",
|
|
97
|
+
"weather_code_synop_4677", # or "weather_code_synop_4680",
|
|
98
|
+
"weather_code_metar_4678",
|
|
99
|
+
"precipitation_rate",
|
|
100
|
+
"precipitation_accumulated",
|
|
101
|
+
"sensor_time",
|
|
102
|
+
# "mor_visibility",
|
|
103
|
+
# "reflectivity",
|
|
104
|
+
# "quality_index",
|
|
105
|
+
# "max_hail_diameter",
|
|
106
|
+
# "laser_status",
|
|
107
|
+
"dummy1",
|
|
108
|
+
"dummy2",
|
|
109
|
+
# "laser_temperature",
|
|
110
|
+
"laser_current_average",
|
|
111
|
+
"control_voltage",
|
|
112
|
+
"optical_control_voltage_output",
|
|
113
|
+
# "current_heating_house",
|
|
114
|
+
# "current_heating_heads",
|
|
115
|
+
# "current_heating_carriers",
|
|
116
|
+
"number_particles",
|
|
117
|
+
"number_particles_internal_data",
|
|
118
|
+
"number_particles_min_speed",
|
|
119
|
+
"number_particles_min_speed_internal_data",
|
|
120
|
+
"number_particles_max_speed",
|
|
121
|
+
"number_particles_max_speed_internal_data",
|
|
122
|
+
"number_particles_min_diameter",
|
|
123
|
+
"number_particles_min_diameter_internal_data",
|
|
124
|
+
"number_particles_no_hydrometeor",
|
|
125
|
+
"number_particles_no_hydrometeor_internal_data",
|
|
126
|
+
# "number_particles_unknown_classification", # ????
|
|
127
|
+
# "number_particles_unknown_classification_internal_data",
|
|
128
|
+
"number_particles_class_1",
|
|
129
|
+
"number_particles_class_1_internal_data",
|
|
130
|
+
"number_particles_class_2",
|
|
131
|
+
"number_particles_class_2_internal_data",
|
|
132
|
+
"number_particles_class_3",
|
|
133
|
+
"number_particles_class_3_internal_data",
|
|
134
|
+
"number_particles_class_4",
|
|
135
|
+
"number_particles_class_4_internal_data",
|
|
136
|
+
"number_particles_class_5",
|
|
137
|
+
"number_particles_class_5_internal_data",
|
|
138
|
+
"number_particles_class_6",
|
|
139
|
+
"number_particles_class_6_internal_data",
|
|
140
|
+
"number_particles_class_7",
|
|
141
|
+
"number_particles_class_7_internal_data",
|
|
142
|
+
"number_particles_class_8",
|
|
143
|
+
"number_particles_class_8_internal_data",
|
|
144
|
+
"number_particles_class_9",
|
|
145
|
+
"number_particles_class_9_internal_data",
|
|
146
|
+
"raw_drop_number",
|
|
147
|
+
]
|
|
148
|
+
df.columns = column_names
|
|
149
|
+
|
|
150
|
+
# Deal with case if there are 61 timesteps
|
|
151
|
+
# - Occurs sometimes when previous hourly file miss timesteps
|
|
152
|
+
if len(df) == 61:
|
|
153
|
+
log_warning(logger=logger, msg=f"{filename} contains 61 timesteps. Dropping the first.")
|
|
154
|
+
df = df.iloc[1:]
|
|
155
|
+
|
|
156
|
+
# Raise error if more than 60 timesteps/rows
|
|
157
|
+
n_rows = len(df)
|
|
158
|
+
if n_rows > 60:
|
|
159
|
+
raise ValueError(f"The hourly file contains {n_rows} timesteps.")
|
|
160
|
+
|
|
161
|
+
# Infer and define "time" column
|
|
162
|
+
start_time_str = filename.split(".")[0] # '2024020200.txt'
|
|
163
|
+
start_time = pd.to_datetime(start_time_str, format="%Y%m%d%H")
|
|
164
|
+
|
|
165
|
+
# - Define timedelta based on sensor_time
|
|
166
|
+
dt = pd.to_timedelta(df["sensor_time"] + ":00").to_numpy().astype("m8[s]")
|
|
167
|
+
dt = dt - dt[0]
|
|
168
|
+
|
|
169
|
+
# - Define approximate time
|
|
170
|
+
df["time"] = start_time + dt
|
|
171
|
+
|
|
172
|
+
# - Keep rows where time increment is between 00 and 59 minutes
|
|
173
|
+
valid_rows = dt <= np.timedelta64(3540, "s")
|
|
174
|
+
df = df[valid_rows]
|
|
175
|
+
|
|
176
|
+
# Drop rows where sample interval is not 60 seconds
|
|
177
|
+
df = df[df["sample_interval"] == "000060"]
|
|
178
|
+
|
|
179
|
+
# Drop rows with invalid raw_drop_number
|
|
180
|
+
# --> 440 value # 22x20
|
|
181
|
+
# --> 400 here # 20x20
|
|
182
|
+
df = df[df["raw_drop_number"].astype(str).str.len() == 1599]
|
|
183
|
+
|
|
184
|
+
# Deal with old LPM version 20x20 spectrum
|
|
185
|
+
# - Add 000 in first two velocity bins
|
|
186
|
+
df["raw_drop_number"] = df["raw_drop_number"] + ";" + ";".join(["000"] * 40)
|
|
187
|
+
|
|
188
|
+
# Drop columns not agreeing with DISDRODB L0 standards
|
|
189
|
+
columns_to_drop = [
|
|
190
|
+
"sample_interval",
|
|
191
|
+
"sensor_time",
|
|
192
|
+
"dummy1",
|
|
193
|
+
"dummy2",
|
|
194
|
+
"id",
|
|
195
|
+
]
|
|
196
|
+
df = df.drop(columns=columns_to_drop)
|
|
197
|
+
return df
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@is_documented_by(reader_generic_docstring)
|
|
201
|
+
def reader(
|
|
202
|
+
filepath,
|
|
203
|
+
logger=None,
|
|
204
|
+
):
|
|
205
|
+
"""Reader."""
|
|
206
|
+
import zipfile
|
|
207
|
+
|
|
208
|
+
##------------------------------------------------------------------------.
|
|
209
|
+
# filename = os.path.basename(filepath)
|
|
210
|
+
# return read_txt_file(file=filepath, filename=filename, logger=logger)
|
|
211
|
+
|
|
212
|
+
# ---------------------------------------------------------------------.
|
|
213
|
+
#### Iterate over all files (aka timesteps) in the daily zip archive
|
|
214
|
+
# - Each file contain a single timestep !
|
|
215
|
+
# list_df = []
|
|
216
|
+
# with tempfile.TemporaryDirectory() as temp_dir:
|
|
217
|
+
# # Extract all files
|
|
218
|
+
# unzip_file_on_terminal(filepath, temp_dir)
|
|
219
|
+
|
|
220
|
+
# # Walk through extracted files
|
|
221
|
+
# for root, _, files in os.walk(temp_dir):
|
|
222
|
+
# for filename in sorted(files):
|
|
223
|
+
# if filename.endswith(".txt"):
|
|
224
|
+
# full_path = os.path.join(root, filename)
|
|
225
|
+
# try:
|
|
226
|
+
# df = read_txt_file(file=full_path, filename=filename, logger=logger)
|
|
227
|
+
# if df is not None:
|
|
228
|
+
# list_df.append(df)
|
|
229
|
+
# except Exception as e:
|
|
230
|
+
# msg = f"An error occurred while reading {filename}: {e}"
|
|
231
|
+
# log_error(logger=logger, msg=msg, verbose=True)
|
|
232
|
+
|
|
233
|
+
list_df = []
|
|
234
|
+
with zipfile.ZipFile(filepath, "r") as zip_ref:
|
|
235
|
+
filenames = sorted(zip_ref.namelist())
|
|
236
|
+
for filename in filenames:
|
|
237
|
+
if filename.endswith(".txt"):
|
|
238
|
+
# Open file
|
|
239
|
+
with zip_ref.open(filename) as file:
|
|
240
|
+
try:
|
|
241
|
+
df = read_txt_file(file=file, filename=filename, logger=logger)
|
|
242
|
+
if df is not None:
|
|
243
|
+
list_df.append(df)
|
|
244
|
+
except Exception as e:
|
|
245
|
+
msg = f"An error occurred while reading {filename}. The error is: {e}"
|
|
246
|
+
log_error(logger=logger, msg=msg, verbose=True)
|
|
247
|
+
|
|
248
|
+
# Check the zip file contains at least some non.empty files
|
|
249
|
+
if len(list_df) == 0:
|
|
250
|
+
raise ValueError(f"{filepath} contains only empty files!")
|
|
251
|
+
|
|
252
|
+
# Concatenate all dataframes into a single one
|
|
253
|
+
df = pd.concat(list_df)
|
|
254
|
+
|
|
255
|
+
# ---------------------------------------------------------------------.
|
|
256
|
+
return df
|
|
@@ -96,7 +96,7 @@ def reader(
|
|
|
96
96
|
"quality_index",
|
|
97
97
|
"max_hail_diameter",
|
|
98
98
|
"laser_status",
|
|
99
|
-
"
|
|
99
|
+
"static_signal_status",
|
|
100
100
|
"laser_temperature_analog_status",
|
|
101
101
|
"laser_temperature_digital_status",
|
|
102
102
|
"laser_current_analog_status",
|
|
@@ -110,7 +110,7 @@ def reader(
|
|
|
110
110
|
"current_heating_heads_status",
|
|
111
111
|
"current_heating_carriers_status",
|
|
112
112
|
"control_output_laser_power_status",
|
|
113
|
-
"
|
|
113
|
+
"reserved_status",
|
|
114
114
|
"temperature_interior",
|
|
115
115
|
"laser_temperature",
|
|
116
116
|
"laser_current_average",
|
|
@@ -96,7 +96,7 @@ def reader(
|
|
|
96
96
|
"quality_index",
|
|
97
97
|
"max_hail_diameter",
|
|
98
98
|
"laser_status",
|
|
99
|
-
"
|
|
99
|
+
"static_signal_status",
|
|
100
100
|
"laser_temperature_analog_status",
|
|
101
101
|
"laser_temperature_digital_status",
|
|
102
102
|
"laser_current_analog_status",
|
|
@@ -110,7 +110,7 @@ def reader(
|
|
|
110
110
|
"current_heating_heads_status",
|
|
111
111
|
"current_heating_carriers_status",
|
|
112
112
|
"control_output_laser_power_status",
|
|
113
|
-
"
|
|
113
|
+
"reserved_status",
|
|
114
114
|
"temperature_interior",
|
|
115
115
|
"laser_temperature",
|
|
116
116
|
"laser_current_average",
|