disdrodb 0.1.2__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. disdrodb/__init__.py +64 -34
  2. disdrodb/_config.py +5 -4
  3. disdrodb/_version.py +16 -3
  4. disdrodb/accessor/__init__.py +20 -0
  5. disdrodb/accessor/methods.py +125 -0
  6. disdrodb/api/checks.py +139 -9
  7. disdrodb/api/configs.py +4 -2
  8. disdrodb/api/info.py +10 -10
  9. disdrodb/api/io.py +237 -18
  10. disdrodb/api/path.py +81 -75
  11. disdrodb/api/search.py +6 -6
  12. disdrodb/cli/disdrodb_create_summary_station.py +91 -0
  13. disdrodb/cli/disdrodb_run_l0.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0_station.py +1 -1
  15. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  16. disdrodb/cli/disdrodb_run_l0b_station.py +1 -1
  17. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  18. disdrodb/cli/disdrodb_run_l0c_station.py +1 -1
  19. disdrodb/cli/disdrodb_run_l2e_station.py +1 -1
  20. disdrodb/configs.py +149 -4
  21. disdrodb/constants.py +61 -0
  22. disdrodb/data_transfer/download_data.py +5 -5
  23. disdrodb/etc/configs/attributes.yaml +339 -0
  24. disdrodb/etc/configs/encodings.yaml +473 -0
  25. disdrodb/etc/products/L1/global.yaml +13 -0
  26. disdrodb/etc/products/L2E/10MIN.yaml +12 -0
  27. disdrodb/etc/products/L2E/1MIN.yaml +1 -0
  28. disdrodb/etc/products/L2E/global.yaml +22 -0
  29. disdrodb/etc/products/L2M/10MIN.yaml +12 -0
  30. disdrodb/etc/products/L2M/GAMMA_ML.yaml +8 -0
  31. disdrodb/etc/products/L2M/NGAMMA_GS_LOG_ND_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/NGAMMA_GS_ND_MAE.yaml +6 -0
  33. disdrodb/etc/products/L2M/NGAMMA_GS_Z_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/global.yaml +26 -0
  35. disdrodb/l0/__init__.py +13 -0
  36. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +4 -4
  37. disdrodb/l0/configs/PARSIVEL/l0b_cf_attrs.yml +1 -1
  38. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +3 -3
  39. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +1 -1
  40. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +5 -5
  41. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +3 -3
  42. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +1 -1
  43. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +4 -4
  44. disdrodb/l0/configs/PWS100/raw_data_format.yml +1 -1
  45. disdrodb/l0/l0a_processing.py +30 -30
  46. disdrodb/l0/l0b_nc_processing.py +108 -2
  47. disdrodb/l0/l0b_processing.py +4 -4
  48. disdrodb/l0/l0c_processing.py +5 -13
  49. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_LPM_NC.py +66 -0
  50. disdrodb/l0/readers/LPM/SLOVENIA/{CRNI_VRH.py → UL.py} +3 -0
  51. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +195 -0
  52. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +0 -2
  53. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +4 -1
  54. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +1 -1
  55. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +1 -1
  56. disdrodb/l0/readers/PARSIVEL2/BELGIUM/ILVO.py +168 -0
  57. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +165 -0
  58. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +69 -0
  59. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +255 -134
  60. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +525 -0
  61. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +1 -1
  62. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +9 -7
  63. disdrodb/l0/readers/PARSIVEL2/KIT/BURKINA_FASO.py +1 -1
  64. disdrodb/l0/readers/PARSIVEL2/KIT/TEAMX.py +123 -0
  65. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +120 -0
  66. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +1 -0
  67. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +1 -1
  68. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +126 -0
  69. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_PIPS.py +165 -0
  70. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +1 -1
  71. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +20 -12
  72. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +2 -0
  73. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +144 -0
  74. disdrodb/l0/readers/PARSIVEL2/SPAIN/CR1000DL.py +201 -0
  75. disdrodb/l0/readers/PARSIVEL2/SPAIN/LIAISE.py +137 -0
  76. disdrodb/l0/readers/PARSIVEL2/{NETHERLANDS/DELFT.py → USA/C3WE.py} +65 -85
  77. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +105 -99
  78. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100_SIRTA.py +151 -0
  79. disdrodb/l0/routines.py +105 -14
  80. disdrodb/l1/__init__.py +5 -0
  81. disdrodb/l1/filters.py +34 -20
  82. disdrodb/l1/processing.py +45 -44
  83. disdrodb/l1/resampling.py +77 -66
  84. disdrodb/l1/routines.py +35 -43
  85. disdrodb/l1_env/routines.py +18 -3
  86. disdrodb/l2/__init__.py +7 -0
  87. disdrodb/l2/empirical_dsd.py +58 -10
  88. disdrodb/l2/event.py +27 -120
  89. disdrodb/l2/processing.py +267 -116
  90. disdrodb/l2/routines.py +618 -254
  91. disdrodb/metadata/standards.py +3 -1
  92. disdrodb/psd/fitting.py +463 -144
  93. disdrodb/psd/models.py +8 -5
  94. disdrodb/routines.py +3 -3
  95. disdrodb/scattering/__init__.py +16 -4
  96. disdrodb/scattering/axis_ratio.py +56 -36
  97. disdrodb/scattering/permittivity.py +486 -0
  98. disdrodb/scattering/routines.py +701 -159
  99. disdrodb/summary/__init__.py +17 -0
  100. disdrodb/summary/routines.py +4120 -0
  101. disdrodb/utils/attrs.py +68 -125
  102. disdrodb/utils/compression.py +30 -1
  103. disdrodb/utils/dask.py +59 -8
  104. disdrodb/utils/dataframe.py +61 -7
  105. disdrodb/utils/directories.py +35 -15
  106. disdrodb/utils/encoding.py +33 -19
  107. disdrodb/utils/logger.py +13 -6
  108. disdrodb/utils/manipulations.py +71 -0
  109. disdrodb/utils/subsetting.py +214 -0
  110. disdrodb/utils/time.py +165 -19
  111. disdrodb/utils/writer.py +20 -7
  112. disdrodb/utils/xarray.py +2 -4
  113. disdrodb/viz/__init__.py +13 -0
  114. disdrodb/viz/plots.py +327 -0
  115. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/METADATA +3 -2
  116. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/RECORD +121 -88
  117. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/entry_points.txt +1 -0
  118. disdrodb/l1/encoding_attrs.py +0 -642
  119. disdrodb/l2/processing_options.py +0 -213
  120. /disdrodb/l0/readers/PARSIVEL/SLOVENIA/{UL_FGG.py → UL.py} +0 -0
  121. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/WHEEL +0 -0
  122. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/licenses/LICENSE +0 -0
  123. {disdrodb-0.1.2.dist-info → disdrodb-0.1.3.dist-info}/top_level.txt +0 -0
@@ -17,12 +17,112 @@
17
17
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
18
  # -----------------------------------------------------------------------------.
19
19
  """DISDRODB reader for ENPC PWS100 raw text data."""
20
- import zipfile
21
-
22
20
  import pandas as pd
23
21
 
24
22
  from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
25
- from disdrodb.utils.logger import log_error, log_warning
23
+ from disdrodb.utils.logger import log_error # , log_warning
24
+
25
+
26
+ def read_txt_file(file, filename, logger):
27
+ r"""Parse a single txt file within the daily zip file.
28
+
29
+ The file starts with \\x020 and ends with \\x03\\r\\n.
30
+ """
31
+ # Read file
32
+ try:
33
+ txt = file.readline().decode("utf-8")
34
+ except Exception:
35
+ log_error(logger=logger, msg=f"{filename} is corrupted", verbose=False)
36
+ return None
37
+
38
+ # Check file is not empty
39
+ if txt == "":
40
+ log_error(logger=logger, msg=f"{filename} is empty", verbose=False)
41
+ return None
42
+
43
+ # Remove everything before \x020
44
+ # - At start it can appear: \x013 0 \x02PSU voltage too low 13.3\x03\r\n\
45
+ txt = txt.split("x020 ", 1)[-1]
46
+
47
+ # Remove everything after \\x03
48
+ txt = txt.split("x03", 1)[0]
49
+
50
+ # if "PSU voltage too low" in txt or "volt" in txt:
51
+ # log_warning(logger=logger, msg=f"PSU voltage too low in {filename}", verbose=False)
52
+ # return None
53
+
54
+ # if "Error - message" in txt:
55
+ # log_warning(logger=logger, msg=f"Error message in {filename}", verbose=False)
56
+ # return None
57
+
58
+ # Clean up the line
59
+ txt = txt.replace(" 00 ", " 0 0 ")
60
+ txt = txt.replace(" ", " 0 ")
61
+
62
+ # Split the cleaned line
63
+ buf = txt.split(" ")
64
+
65
+ # Helper to convert list of floats to comma-separated string
66
+ def int_list_to_str(lst):
67
+ return ",".join(f"{int(i)}" for i in lst)
68
+
69
+ # Try to get the drop_size distribution:
70
+ try:
71
+ drop_size_distribution = int_list_to_str(buf[29:329]) # Drop size distribution (message field 42)
72
+ except Exception:
73
+ log_error(logger, msg=f"Corrupted drop_size_distribution field in {filename}", verbose=False)
74
+ return None
75
+
76
+ # Try to get peak_to_pedestal_hist
77
+ try:
78
+ peak_to_pedestal_hist = int_list_to_str(buf[1498:1548])
79
+ except Exception:
80
+ log_error(
81
+ logger,
82
+ msg=f"Corrupted raw_drop_number or peak_to_pedestal_hist field in {filename}",
83
+ verbose=False,
84
+ )
85
+ return None
86
+
87
+ # Parse fields
88
+ data = {
89
+ "mor_visibility": float(buf[1]), # Visibility Range (message field 20)
90
+ "weather_code_synop_4680": float(buf[2]), # Present Weather Code (WMO) (message field 21)
91
+ "weather_code_metar_4678": buf[3], # Present Weather Code (METAR) (message field 22)
92
+ "weather_code_nws": buf[4], # Present Weather Code (NWS) (message field 23)
93
+ "alarms": int_list_to_str(buf[5:21]), # Alarms (message field (24))
94
+ "sensor_status": buf[21], # Fault status of PWS100 (message field 25)
95
+ "air_temperature": float(buf[22]), # Temperature (°C) (message field 30)
96
+ "relative_humidity": float(buf[23]), # Sampled relative humidity (%) (message field 30)
97
+ "wetbulb_temperature": float(buf[24]), # Average wetbulb temperature (°C)(message field 30)
98
+ "air_temperature_max": float(buf[25]), # Maximum temperature (°C)(message field 31)
99
+ "air_temperature_min": float(buf[26]), # Minimum temperature (°C)(message field 31)
100
+ "rainfall_rate": float(buf[27]), # Precipitation rate (mm/h)(message field 40)
101
+ "rainfall_accumulated": float(buf[28]), # Precipitation accumulation (mm/h)(message field 41)
102
+ "drop_size_distribution": drop_size_distribution, # Drop size distribution (message field 42)
103
+ "average_drop_velocity": float(buf[329]), # Average velocity (mm/s)(message field 43)
104
+ "average_drop_size": float(buf[330]), # Average size (mm/h)(message field 43)
105
+ "type_distribution": int_list_to_str(buf[331:342]), # Type distribution (message field 44)
106
+ "raw_drop_number": int_list_to_str(buf[342:1498]), # Size/velocity spectrum (34*34) (message field 47)
107
+ "peak_to_pedestal_hist": (
108
+ peak_to_pedestal_hist # Peak to pedestal ratio distribution histogram (message field 48)
109
+ ),
110
+ }
111
+
112
+ # Convert to single-row DataFrame
113
+ df = pd.DataFrame([data])
114
+
115
+ # Define datetime "time" column from filename
116
+ datetime_str = " ".join(filename.replace(".txt", "").split("_")[-6:])
117
+ df["time"] = pd.to_datetime(datetime_str, format="%Y %m %d %H %M %S")
118
+
119
+ # # Drop columns not agreeing with DISDRODB L0 standards
120
+ # columns_to_drop = [
121
+ # "peak_to_pedestal_hist",
122
+ # "type_distribution",
123
+ # ]
124
+ # df = df.drop(columns=columns_to_drop)
125
+ return df
26
126
 
27
127
 
28
128
  @is_documented_by(reader_generic_docstring)
@@ -31,102 +131,9 @@ def reader(
31
131
  logger=None,
32
132
  ):
33
133
  """Reader."""
134
+ import zipfile
34
135
 
35
- ##------------------------------------------------------------------------.
36
- #### Define function to read each txt file inside each daily zip file
37
- def read_txt_file(file, filename, logger): # noqa PLR0911
38
- """Parse a single txt file within the daily zip file."""
39
- # Read file
40
- try:
41
- txt = file.readline().decode("utf-8")
42
- except Exception:
43
- log_warning(logger=logger, msg=f"{filename} is corrupted", verbose=False)
44
- return None
45
-
46
- # Check file is not empty
47
- if txt == "":
48
- log_warning(logger=logger, msg=f"{filename} is empty", verbose=False)
49
- return None
50
-
51
- if "PSU voltage too low" in txt or "volt" in txt:
52
- log_warning(logger=logger, msg=f"PSU voltage too low in {filename}", verbose=False)
53
- return None
54
-
55
- if "Error - message" in txt:
56
- log_warning(logger=logger, msg=f"Error message in {filename}", verbose=False)
57
- return None
58
-
59
- # Clean up the line
60
- txt = txt.replace(" 00 ", " 0 0 ")
61
- txt = txt.replace(" ", " 0 ")
62
- txt = txt[1:-8]
63
-
64
- # Split the cleaned line
65
- buf = txt.split(" ")
66
-
67
- # Helper to convert list of floats to comma-separated string
68
- def int_list_to_str(lst):
69
- return ",".join(f"{int(i)}" for i in lst)
70
-
71
- # Try to get the drop_size distribution:
72
- try:
73
- drop_size_distribution = int_list_to_str(buf[30:330]) # Drop size distribution (message field 42)
74
- except Exception:
75
- log_warning(logger, msg=f"Corrupted drop_size_distribution field in {filename}", verbose=False)
76
- return None
77
-
78
- # Try to get peak_to_pedestal_hist
79
- try:
80
- peak_to_pedestal_hist = int_list_to_str(buf[1499:1549])
81
- except Exception:
82
- log_warning(
83
- logger,
84
- msg=f"Corrupted raw_drop_number or peak_to_pedestal_hist field in {filename}",
85
- verbose=False,
86
- )
87
- return None
88
- # Parse fields
89
- data = {
90
- "mor_visibility": float(buf[2]), # Visibility Range (message field 20)
91
- "weather_code_synop_4680": float(buf[3]), # Present Weather Code (WMO) (message field 21)
92
- "weather_code_metar_4678": buf[4], # Present Weather Code (METAR) (message field 22)
93
- "weather_code_nws": buf[5], # Present Weather Code (NWS) (message field 23)
94
- "alarms": int_list_to_str(buf[6:22]), # Alarms (message field (24))
95
- "sensor_status": buf[22], # Fault status of PWS100 (message field 25)
96
- "air_temperature": float(buf[23]), # Temperature (°C) (message field 30)
97
- "relative_humidity": float(buf[24]), # Sampled relative humidity (%) (message field 30)
98
- "wetbulb_temperature": float(buf[25]), # Average wetbulb temperature (°C)(message field 30)
99
- "air_temperature_max": float(buf[26]), # Maximum temperature (°C)(message field 31)
100
- "air_temperature_min": float(buf[27]), # Minimum temperature (°C)(message field 31)
101
- "rainfall_rate": float(buf[28]), # Precipitation rate (mm/h)(message field 40)
102
- "rainfall_accumulated": float(buf[29]), # Precipitation accumulation (mm/h)(message field 41)
103
- "drop_size_distribution": drop_size_distribution, # Drop size distribution (message field 42)
104
- "average_drop_velocity": float(buf[330]), # Average velocity (mm/s)(message field 43)
105
- "average_drop_size": float(buf[331]), # Average size (mm/h)(message field 43)
106
- "type_distribution": int_list_to_str(buf[332:343]), # Type distribution (message field 44)
107
- "raw_drop_number": int_list_to_str(buf[343:1499]), # Size/velocity spectrum (34*34) (message field 47)
108
- "peak_to_pedestal_hist": (
109
- peak_to_pedestal_hist # Peak to pedestal ratio distribution histogram (message field 48)
110
- ),
111
- }
112
-
113
- # Convert to single-row DataFrame
114
- df = pd.DataFrame([data])
115
-
116
- # Define datetime "time" column from filename
117
- datetime_str = " ".join(filename.replace(".txt", "").split("_")[-6:])
118
- df["time"] = pd.to_datetime(datetime_str, format="%Y %m %d %H %M %S")
119
-
120
- # # Drop columns not agreeing with DISDRODB L0 standards
121
- # columns_to_drop = [
122
- # "peak_to_pedestal_hist",
123
- # "type_distribution",
124
- # ]
125
- # df = df.drop(columns=columns_to_drop)
126
- return df
127
-
128
- # ---------------------------------------------------------------------.
129
- #### Iterate over all files (aka timesteps) in the daily zip archive
136
+ # Iterate over all files (aka timesteps) in the daily zip archive
130
137
  # - Each file contain a single timestep !
131
138
  list_df = []
132
139
  with zipfile.ZipFile(filepath, "r") as zip_ref:
@@ -146,5 +153,4 @@ def reader(
146
153
  # Concatenate all dataframes into a single one
147
154
  df = pd.concat(list_df)
148
155
 
149
- # ---------------------------------------------------------------------.
150
156
  return df
@@ -0,0 +1,151 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """DISDRODB reader for ENPC PWS100 raw text data."""
18
+ import zipfile
19
+
20
+ import pandas as pd
21
+
22
+ from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
23
+ from disdrodb.utils.logger import log_error # , log_warning
24
+
25
+
26
+ @is_documented_by(reader_generic_docstring)
27
+ def reader(
28
+ filepath,
29
+ logger=None,
30
+ ):
31
+ """Reader."""
32
+
33
+ ##------------------------------------------------------------------------.
34
+ #### Define function to read each txt file inside each daily zip file
35
+ def read_txt_file(file, filename, logger):
36
+ r"""Parse a single txt file within the daily zip file.
37
+
38
+ The file starts with \x020 and ends with \x03\r\r\n.
39
+ """
40
+ # Read file
41
+ try:
42
+ txt = file.readline().decode("utf-8")
43
+ except Exception:
44
+ log_error(logger=logger, msg=f"{filename} is corrupted", verbose=False)
45
+ return None
46
+
47
+ # Check file is not empty
48
+ if txt == "":
49
+ log_error(logger=logger, msg=f"{filename} is empty", verbose=False)
50
+ return None
51
+
52
+ # if "PSU voltage too low" in txt or "volt" in txt:
53
+ # log_warning(logger=logger, msg=f"PSU voltage too low in {filename}", verbose=False)
54
+ # return None
55
+
56
+ # if "Error - message" in txt:
57
+ # log_warning(logger=logger, msg=f"Error message in {filename}", verbose=False)
58
+ # return None
59
+
60
+ # Clean up the line
61
+ txt = txt.replace(" 00 ", " 0 0 ")
62
+ txt = txt.replace(" ", " 0 ")
63
+ txt = txt[1:-8]
64
+
65
+ # Split the cleaned line
66
+ buf = txt.split(" ")
67
+
68
+ # Helper to convert list of floats to comma-separated string
69
+ def int_list_to_str(lst):
70
+ return ",".join(f"{int(i)}" for i in lst)
71
+
72
+ # Try to get the drop_size distribution:
73
+ try:
74
+ drop_size_distribution = int_list_to_str(buf[30:330]) # Drop size distribution (message field 42)
75
+ except Exception:
76
+ log_error(logger, msg=f"Corrupted drop_size_distribution field in {filename}", verbose=False)
77
+ return None
78
+
79
+ # Try to get peak_to_pedestal_hist
80
+ try:
81
+ peak_to_pedestal_hist = int_list_to_str(buf[1499:1549])
82
+ except Exception:
83
+ log_error(
84
+ logger,
85
+ msg=f"Corrupted raw_drop_number or peak_to_pedestal_hist field in {filename}",
86
+ verbose=False,
87
+ )
88
+ return None
89
+ # Parse fields
90
+ data = {
91
+ "mor_visibility": float(buf[2]), # Visibility Range (message field 20)
92
+ "weather_code_synop_4680": float(buf[3]), # Present Weather Code (WMO) (message field 21)
93
+ "weather_code_metar_4678": buf[4], # Present Weather Code (METAR) (message field 22)
94
+ "weather_code_nws": buf[5], # Present Weather Code (NWS) (message field 23)
95
+ "alarms": int_list_to_str(buf[6:22]), # Alarms (message field (24))
96
+ "sensor_status": buf[22], # Fault status of PWS100 (message field 25)
97
+ "air_temperature": float(buf[23]), # Temperature (°C) (message field 30)
98
+ "relative_humidity": float(buf[24]), # Sampled relative humidity (%) (message field 30)
99
+ "wetbulb_temperature": float(buf[25]), # Average wetbulb temperature (°C)(message field 30)
100
+ "air_temperature_max": float(buf[26]), # Maximum temperature (°C)(message field 31)
101
+ "air_temperature_min": float(buf[27]), # Minimum temperature (°C)(message field 31)
102
+ "rainfall_rate": float(buf[28]), # Precipitation rate (mm/h)(message field 40)
103
+ "rainfall_accumulated": float(buf[29]), # Precipitation accumulation (mm/h)(message field 41)
104
+ "drop_size_distribution": drop_size_distribution, # Drop size distribution (message field 42)
105
+ "average_drop_velocity": float(buf[330]), # Average velocity (mm/s)(message field 43)
106
+ "average_drop_size": float(buf[331]), # Average size (mm/h)(message field 43)
107
+ "type_distribution": int_list_to_str(buf[332:343]), # Type distribution (message field 44)
108
+ "raw_drop_number": int_list_to_str(buf[343:1499]), # Size/velocity spectrum (34*34) (message field 47)
109
+ "peak_to_pedestal_hist": (
110
+ peak_to_pedestal_hist # Peak to pedestal ratio distribution histogram (message field 48)
111
+ ),
112
+ }
113
+
114
+ # Convert to single-row DataFrame
115
+ df = pd.DataFrame([data])
116
+
117
+ # Define datetime "time" column from filename
118
+ datetime_str = " ".join(filename.replace(".txt", "").split("_")[-6:])
119
+ df["time"] = pd.to_datetime(datetime_str, format="%Y %m %d %H %M %S")
120
+
121
+ # # Drop columns not agreeing with DISDRODB L0 standards
122
+ # columns_to_drop = [
123
+ # "peak_to_pedestal_hist",
124
+ # "type_distribution",
125
+ # ]
126
+ # df = df.drop(columns=columns_to_drop)
127
+ return df
128
+
129
+ # ---------------------------------------------------------------------.
130
+ #### Iterate over all files (aka timesteps) in the daily zip archive
131
+ # - Each file contain a single timestep !
132
+ list_df = []
133
+ with zipfile.ZipFile(filepath, "r") as zip_ref:
134
+ filenames = sorted(zip_ref.namelist())
135
+ for filename in filenames:
136
+ if filename.endswith(".txt"):
137
+ # Open file
138
+ with zip_ref.open(filename) as f:
139
+ try:
140
+ df = read_txt_file(file=f, filename=filename, logger=logger)
141
+ if df is not None:
142
+ list_df.append(df)
143
+ except Exception as e:
144
+ msg = f"An error occurred while reading {filename}. The error is: {e}."
145
+ log_error(logger=logger, msg=msg, verbose=True)
146
+
147
+ # Concatenate all dataframes into a single one
148
+ df = pd.concat(list_df)
149
+
150
+ # ---------------------------------------------------------------------.
151
+ return df