disdrodb 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. disdrodb/__init__.py +64 -34
  2. disdrodb/_config.py +5 -4
  3. disdrodb/_version.py +16 -3
  4. disdrodb/accessor/__init__.py +20 -0
  5. disdrodb/accessor/methods.py +125 -0
  6. disdrodb/api/checks.py +139 -9
  7. disdrodb/api/configs.py +4 -2
  8. disdrodb/api/info.py +10 -10
  9. disdrodb/api/io.py +237 -18
  10. disdrodb/api/path.py +81 -75
  11. disdrodb/api/search.py +6 -6
  12. disdrodb/cli/disdrodb_create_summary_station.py +91 -0
  13. disdrodb/cli/disdrodb_run_l0.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0_station.py +1 -1
  15. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  16. disdrodb/cli/disdrodb_run_l0b_station.py +1 -1
  17. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  18. disdrodb/cli/disdrodb_run_l0c_station.py +1 -1
  19. disdrodb/cli/disdrodb_run_l2e_station.py +1 -1
  20. disdrodb/configs.py +149 -4
  21. disdrodb/constants.py +61 -0
  22. disdrodb/data_transfer/download_data.py +145 -14
  23. disdrodb/etc/configs/attributes.yaml +339 -0
  24. disdrodb/etc/configs/encodings.yaml +473 -0
  25. disdrodb/etc/products/L1/global.yaml +13 -0
  26. disdrodb/etc/products/L2E/10MIN.yaml +12 -0
  27. disdrodb/etc/products/L2E/1MIN.yaml +1 -0
  28. disdrodb/etc/products/L2E/global.yaml +22 -0
  29. disdrodb/etc/products/L2M/10MIN.yaml +12 -0
  30. disdrodb/etc/products/L2M/GAMMA_ML.yaml +8 -0
  31. disdrodb/etc/products/L2M/NGAMMA_GS_LOG_ND_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/NGAMMA_GS_ND_MAE.yaml +6 -0
  33. disdrodb/etc/products/L2M/NGAMMA_GS_Z_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/global.yaml +26 -0
  35. disdrodb/l0/__init__.py +13 -0
  36. disdrodb/l0/configs/LPM/bins_diameter.yml +3 -3
  37. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +4 -4
  38. disdrodb/l0/configs/PARSIVEL/l0b_cf_attrs.yml +1 -1
  39. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +3 -3
  40. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +1 -1
  41. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +4 -0
  42. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +20 -4
  43. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +44 -3
  44. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +41 -1
  45. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +4 -4
  46. disdrodb/l0/configs/PWS100/raw_data_format.yml +1 -1
  47. disdrodb/l0/l0a_processing.py +30 -30
  48. disdrodb/l0/l0b_nc_processing.py +108 -2
  49. disdrodb/l0/l0b_processing.py +4 -4
  50. disdrodb/l0/l0c_processing.py +5 -13
  51. disdrodb/l0/manuals/SWS250.pdf +0 -0
  52. disdrodb/l0/manuals/VPF730.pdf +0 -0
  53. disdrodb/l0/manuals/VPF750.pdf +0 -0
  54. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_LPM_NC.py +66 -0
  55. disdrodb/l0/readers/LPM/SLOVENIA/{CRNI_VRH.py → UL.py} +3 -0
  56. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +195 -0
  57. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +105 -0
  58. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +128 -0
  59. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +1 -1
  60. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +1 -1
  61. disdrodb/l0/readers/PARSIVEL2/BELGIUM/ILVO.py +168 -0
  62. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +165 -0
  63. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +69 -0
  64. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +255 -134
  65. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +525 -0
  66. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +1 -1
  67. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +9 -7
  68. disdrodb/l0/readers/{PARSIVEL → PARSIVEL2}/KIT/BURKINA_FASO.py +1 -1
  69. disdrodb/l0/readers/PARSIVEL2/KIT/TEAMX.py +123 -0
  70. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +120 -0
  71. disdrodb/l0/readers/PARSIVEL2/{NETHERLANDS/DELFT.py → NCAR/FARM_PARSIVEL2.py} +43 -70
  72. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +1 -1
  73. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +126 -0
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_PIPS.py +165 -0
  75. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +1 -1
  76. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +29 -12
  77. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +69 -0
  78. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +144 -0
  79. disdrodb/l0/readers/PARSIVEL2/SPAIN/CR1000DL.py +201 -0
  80. disdrodb/l0/readers/PARSIVEL2/SPAIN/LIAISE.py +137 -0
  81. disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +146 -0
  82. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +105 -99
  83. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100_SIRTA.py +151 -0
  84. disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +31 -14
  85. disdrodb/l0/routines.py +105 -14
  86. disdrodb/l1/__init__.py +5 -0
  87. disdrodb/l1/filters.py +34 -20
  88. disdrodb/l1/processing.py +45 -44
  89. disdrodb/l1/resampling.py +77 -66
  90. disdrodb/l1/routines.py +35 -42
  91. disdrodb/l1_env/routines.py +18 -3
  92. disdrodb/l2/__init__.py +7 -0
  93. disdrodb/l2/empirical_dsd.py +58 -10
  94. disdrodb/l2/event.py +27 -120
  95. disdrodb/l2/processing.py +267 -116
  96. disdrodb/l2/routines.py +618 -254
  97. disdrodb/metadata/standards.py +3 -1
  98. disdrodb/psd/fitting.py +463 -144
  99. disdrodb/psd/models.py +8 -5
  100. disdrodb/routines.py +3 -3
  101. disdrodb/scattering/__init__.py +16 -4
  102. disdrodb/scattering/axis_ratio.py +56 -36
  103. disdrodb/scattering/permittivity.py +486 -0
  104. disdrodb/scattering/routines.py +701 -159
  105. disdrodb/summary/__init__.py +17 -0
  106. disdrodb/summary/routines.py +4120 -0
  107. disdrodb/utils/attrs.py +68 -125
  108. disdrodb/utils/compression.py +30 -1
  109. disdrodb/utils/dask.py +59 -8
  110. disdrodb/utils/dataframe.py +63 -9
  111. disdrodb/utils/directories.py +49 -17
  112. disdrodb/utils/encoding.py +33 -19
  113. disdrodb/utils/logger.py +13 -6
  114. disdrodb/utils/manipulations.py +71 -0
  115. disdrodb/utils/subsetting.py +214 -0
  116. disdrodb/utils/time.py +165 -19
  117. disdrodb/utils/writer.py +20 -7
  118. disdrodb/utils/xarray.py +85 -4
  119. disdrodb/viz/__init__.py +13 -0
  120. disdrodb/viz/plots.py +327 -0
  121. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/METADATA +3 -2
  122. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/RECORD +127 -87
  123. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/entry_points.txt +1 -0
  124. disdrodb/l1/encoding_attrs.py +0 -635
  125. disdrodb/l2/processing_options.py +0 -213
  126. /disdrodb/l0/readers/PARSIVEL/SLOVENIA/{UL_FGG.py → UL.py} +0 -0
  127. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/WHEEL +0 -0
  128. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/licenses/LICENSE +0 -0
  129. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/top_level.txt +0 -0
@@ -18,12 +18,19 @@
18
18
  # -----------------------------------------------------------------------------.
19
19
  """DISDRODB Reader for NOAA PSL RD80 stations."""
20
20
  import os
21
+ import re
21
22
 
23
+ # Convert ParserWarning into an error
24
+ import warnings
25
+
26
+ import numpy as np
22
27
  import pandas as pd
23
28
 
24
29
  from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
25
30
  from disdrodb.l0.l0a_processing import read_raw_text_file
26
31
 
32
+ warnings.simplefilter("error", pd.errors.ParserWarning)
33
+
27
34
 
28
35
  def read_new_format(filepath, logger):
29
36
  """Read new format."""
@@ -101,6 +108,14 @@ def read_new_format(filepath, logger):
101
108
 
102
109
  ##------------------------------------------------------------------------.
103
110
  #### Adapt the dataframe to adhere to DISDRODB L0 standards
111
+ # Retrieve date and hour information
112
+ with open(filepath) as f:
113
+ date_header = f.readline().strip()
114
+ match = re.search(r":\s*(\d+)\s*UTC", date_header)
115
+ if match:
116
+ date_hour_str = match.group(1)
117
+ else:
118
+ raise ValueError("Date information not found.")
104
119
 
105
120
  # Replace -99.9900 values with NaN
106
121
  columns_to_replace = ["Dmax", "RI", "RA", "Wg", "Z", "EF", "N0", "slope"]
@@ -116,28 +131,30 @@ def read_new_format(filepath, logger):
116
131
 
117
132
  # - Convert start/end MM:SS:SSS to timedelta
118
133
  def parse_time(t):
119
- minutes, seconds, milliseconds = map(int, t.split(":"))
120
- return pd.Timedelta(minutes=minutes, seconds=seconds, milliseconds=milliseconds)
134
+ try:
135
+ minutes, seconds, milliseconds = map(int, t.split(":"))
136
+ timedelta = pd.Timedelta(minutes=minutes, seconds=seconds, milliseconds=milliseconds)
137
+ except Exception:
138
+ timedelta = pd.Timedelta("NaT")
139
+ return timedelta
121
140
 
122
141
  df_time["start"] = df_time["start"].apply(parse_time)
123
142
  df_time["end"] = df_time["end"].apply(parse_time)
124
- # - Wrap end time if it's less than start time (i.e., crosses 60:00 boundary)
125
- # --> 00:00 --> 60:00
126
- df_time.loc[df_time["end"] < df_time["start"], "end"] += pd.Timedelta(minutes=60)
127
-
128
- # Compute sample_interval in seconds as integer
129
- df["sample_interval"] = (df_time["end"] - df_time["start"]).dt.total_seconds().astype(int)
130
143
 
131
144
  # Define time
132
- # - Extract date-hour
133
- filename = os.path.basename(filepath)
134
- if filename.startswith("lab") or filename.startswith("bao0") or filename.startswith("mdt0"):
135
- date_hour_str = filename[4:11]
136
- else:
137
- date_hour_str = filename[3:10]
138
145
  date_hour = pd.to_datetime(date_hour_str, format="%y%j%H")
139
146
  df["time"] = date_hour + df_time["start"]
140
147
 
148
+ # Drop invalid timesteps
149
+ df_time = df_time[~np.isnan(df["time"])]
150
+ df = df[~np.isnan(df["time"])]
151
+
152
+ # Compute sample_interval in seconds as integer
153
+ # - Wrap end time if it's less than start time (i.e., crosses 60:00 boundary)
154
+ # --> 00:00 --> 60:00
155
+ df_time.loc[df_time["end"] < df_time["start"], "end"] += pd.Timedelta(minutes=60)
156
+ df["sample_interval"] = (df_time["end"] - df_time["start"]).dt.total_seconds().astype(int)
157
+
141
158
  # Create raw_drop_number column
142
159
  bin_columns = ["n" + str(i) for i in range(1, 21)]
143
160
  df_arr = df[bin_columns]
disdrodb/l0/routines.py CHANGED
@@ -21,14 +21,13 @@
21
21
  import datetime
22
22
  import logging
23
23
  import os
24
+ import shutil
24
25
  import time
25
26
  from typing import Optional
26
27
 
27
28
  import dask
28
29
 
29
- from disdrodb.api.checks import check_sensor_name
30
-
31
- # Directory
30
+ from disdrodb.api.checks import check_sensor_name, check_station_inputs
32
31
  from disdrodb.api.create_directories import (
33
32
  create_l0_directory_structure,
34
33
  create_logs_directory,
@@ -41,6 +40,7 @@ from disdrodb.api.path import (
41
40
  define_l0b_filename,
42
41
  define_l0c_filename,
43
42
  define_metadata_filepath,
43
+ define_partitioning_tree,
44
44
  )
45
45
  from disdrodb.api.search import get_required_product
46
46
  from disdrodb.configs import get_data_archive_dir, get_folder_partitioning, get_metadata_archive_dir
@@ -53,7 +53,7 @@ from disdrodb.l0.l0a_processing import (
53
53
  )
54
54
  from disdrodb.l0.l0b_nc_processing import sanitize_ds
55
55
  from disdrodb.l0.l0b_processing import (
56
- create_l0b_from_l0a,
56
+ generate_l0b,
57
57
  set_l0b_encodings,
58
58
  write_l0b,
59
59
  )
@@ -63,6 +63,7 @@ from disdrodb.l0.l0c_processing import (
63
63
  retrieve_possible_measurement_intervals,
64
64
  )
65
65
  from disdrodb.metadata import read_station_metadata
66
+ from disdrodb.utils.attrs import set_disdrodb_attrs
66
67
  from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
67
68
 
68
69
  # Logger
@@ -73,8 +74,6 @@ from disdrodb.utils.logger import (
73
74
  log_error,
74
75
  log_info,
75
76
  )
76
-
77
- # log_warning,
78
77
  from disdrodb.utils.writer import write_product
79
78
  from disdrodb.utils.yaml import read_yaml
80
79
 
@@ -124,7 +123,7 @@ def _generate_l0a(
124
123
  # Log start processing
125
124
  msg = f"{product} processing of {filename} has started."
126
125
  log_info(logger=logger, msg=msg, verbose=verbose)
127
-
126
+ success_flag = False
128
127
  ##------------------------------------------------------------------------.
129
128
  ### - Read raw file into a dataframe and sanitize for L0A format
130
129
  try:
@@ -144,6 +143,12 @@ def _generate_l0a(
144
143
  filepath = os.path.join(folder_path, filename)
145
144
  write_l0a(df=df, filepath=filepath, force=force, logger=logger, verbose=verbose)
146
145
 
146
+ ##--------------------------------------------------------------------.
147
+ #### - Define logger file final directory
148
+ if folder_partitioning != "":
149
+ log_dst_dir = define_file_folder_path(df, data_dir=logs_dir, folder_partitioning=folder_partitioning)
150
+ os.makedirs(log_dst_dir, exist_ok=True)
151
+
147
152
  ##--------------------------------------------------------------------.
148
153
  # Clean environment
149
154
  del df
@@ -151,6 +156,7 @@ def _generate_l0a(
151
156
  # Log end processing
152
157
  msg = f"{product} processing of {filename} has ended."
153
158
  log_info(logger=logger, msg=msg, verbose=verbose)
159
+ success_flag = True
154
160
 
155
161
  # Otherwise log the error
156
162
  except Exception as e:
@@ -161,6 +167,13 @@ def _generate_l0a(
161
167
  # Close the file logger
162
168
  close_logger(logger)
163
169
 
170
+ # Move logger file to correct partitioning directory
171
+ if success_flag and folder_partitioning != "" and logger_filepath is not None:
172
+ # Move logger file to correct partitioning directory
173
+ dst_filepath = os.path.join(log_dst_dir, os.path.basename(logger_filepath))
174
+ shutil.move(logger_filepath, dst_filepath)
175
+ logger_filepath = dst_filepath
176
+
164
177
  # Return the logger file path
165
178
  return logger_filepath
166
179
 
@@ -200,6 +213,7 @@ def _generate_l0b(
200
213
  # Log start processing
201
214
  msg = f"{product} processing of {filename} has started."
202
215
  log_info(logger=logger, msg=msg, verbose=verbose)
216
+ success_flag = False
203
217
 
204
218
  ##------------------------------------------------------------------------.
205
219
  # Retrieve sensor name
@@ -209,11 +223,11 @@ def _generate_l0b(
209
223
  ##------------------------------------------------------------------------.
210
224
  try:
211
225
  # Read L0A Apache Parquet file
212
- df = read_l0a_dataframe(filepath, logger=logger, verbose=verbose, debugging_mode=debugging_mode)
226
+ df = read_l0a_dataframe(filepath, debugging_mode=debugging_mode)
213
227
 
214
228
  # -----------------------------------------------------------------.
215
229
  # Create xarray Dataset
216
- ds = create_l0b_from_l0a(df=df, metadata=metadata, logger=logger, verbose=verbose)
230
+ ds = generate_l0b(df=df, metadata=metadata, logger=logger, verbose=verbose)
217
231
 
218
232
  # -----------------------------------------------------------------.
219
233
  # Write L0B netCDF4 dataset
@@ -222,6 +236,12 @@ def _generate_l0b(
222
236
  filepath = os.path.join(folder_path, filename)
223
237
  write_l0b(ds, filepath=filepath, force=force)
224
238
 
239
+ ##--------------------------------------------------------------------.
240
+ #### - Define logger file final directory
241
+ if folder_partitioning != "":
242
+ log_dst_dir = define_file_folder_path(ds, data_dir=logs_dir, folder_partitioning=folder_partitioning)
243
+ os.makedirs(log_dst_dir, exist_ok=True)
244
+
225
245
  ##--------------------------------------------------------------------.
226
246
  # Clean environment
227
247
  del ds, df
@@ -229,6 +249,7 @@ def _generate_l0b(
229
249
  # Log end processing
230
250
  msg = f"{product} processing of {filename} has ended."
231
251
  log_info(logger=logger, msg=msg, verbose=verbose)
252
+ success_flag = True
232
253
 
233
254
  # Otherwise log the error
234
255
  except Exception as e:
@@ -239,10 +260,19 @@ def _generate_l0b(
239
260
  # Close the file logger
240
261
  close_logger(logger)
241
262
 
263
+ # Move logger file to correct partitioning directory
264
+ if success_flag and folder_partitioning != "" and logger_filepath is not None:
265
+ # Move logger file to correct partitioning directory
266
+ dst_filepath = os.path.join(log_dst_dir, os.path.basename(logger_filepath))
267
+ shutil.move(logger_filepath, dst_filepath)
268
+ logger_filepath = dst_filepath
269
+
242
270
  # Return the logger file path
243
271
  return logger_filepath
244
272
 
245
273
 
274
+ @delayed_if_parallel
275
+ @single_threaded_if_parallel
246
276
  def _generate_l0b_from_nc(
247
277
  filepath,
248
278
  data_dir,
@@ -282,6 +312,7 @@ def _generate_l0b_from_nc(
282
312
  # Log start processing
283
313
  msg = f"{product} processing of {filename} has started."
284
314
  log_info(logger=logger, msg=msg, verbose=verbose)
315
+ success_flag = False
285
316
 
286
317
  ##------------------------------------------------------------------------.
287
318
  ### - Read raw netCDF and sanitize for L0B format
@@ -303,6 +334,12 @@ def _generate_l0b_from_nc(
303
334
  filepath = os.path.join(folder_path, filename)
304
335
  write_l0b(ds, filepath=filepath, force=force)
305
336
 
337
+ ##--------------------------------------------------------------------.
338
+ #### - Define logger file final directory
339
+ if folder_partitioning != "":
340
+ log_dst_dir = define_file_folder_path(ds, data_dir=logs_dir, folder_partitioning=folder_partitioning)
341
+ os.makedirs(log_dst_dir, exist_ok=True)
342
+
306
343
  ##--------------------------------------------------------------------.
307
344
  # Clean environment
308
345
  del ds
@@ -310,6 +347,7 @@ def _generate_l0b_from_nc(
310
347
  # Log end processing
311
348
  msg = f"L0B processing of {filename} has ended."
312
349
  log_info(logger=logger, msg=msg, verbose=verbose)
350
+ success_flag = True
313
351
 
314
352
  # Otherwise log the error
315
353
  except Exception as e:
@@ -320,6 +358,13 @@ def _generate_l0b_from_nc(
320
358
  # Close the file logger
321
359
  close_logger(logger)
322
360
 
361
+ # Move logger file to correct partitioning directory
362
+ if success_flag and folder_partitioning != "" and logger_filepath is not None:
363
+ # Move logger file to correct partitioning directory
364
+ dst_filepath = os.path.join(log_dst_dir, os.path.basename(logger_filepath))
365
+ shutil.move(logger_filepath, dst_filepath)
366
+ logger_filepath = dst_filepath
367
+
323
368
  # Return the logger file path
324
369
  return logger_filepath
325
370
 
@@ -358,6 +403,7 @@ def _generate_l0c(
358
403
  # Log start processing
359
404
  msg = f"{product} processing for {day} has started."
360
405
  log_info(logger=logger, msg=msg, verbose=verbose)
406
+ success_flag = False
361
407
 
362
408
  ##------------------------------------------------------------------------.
363
409
  ### Core computation
@@ -388,21 +434,35 @@ def _generate_l0c(
388
434
 
389
435
  # Set encodings
390
436
  ds = set_l0b_encodings(ds=ds, sensor_name=sensor_name)
437
+ # Update global attributes
438
+ ds = set_disdrodb_attrs(ds, product=product)
391
439
 
392
- # Define filepath
440
+ # Define product filepath
393
441
  filename = define_l0c_filename(ds, campaign_name=campaign_name, station_name=station_name)
394
442
  folder_path = define_file_folder_path(ds, data_dir=data_dir, folder_partitioning=folder_partitioning)
395
443
  filepath = os.path.join(folder_path, filename)
396
444
 
397
445
  # Write to disk
398
- write_product(ds, product=product, filepath=filepath, force=force)
446
+ write_product(ds, filepath=filepath, force=force)
399
447
 
400
448
  # Clean environment
401
449
  del ds
402
450
 
451
+ ##--------------------------------------------------------------------.
452
+ #### - Define logger file final directory
453
+ if folder_partitioning != "":
454
+ print(day)
455
+ dirtree = define_partitioning_tree(
456
+ time=datetime.datetime.strptime("2022-03-22", "%Y-%m-%d"),
457
+ folder_partitioning=folder_partitioning,
458
+ )
459
+ log_dst_dir = os.path.join(logs_dir, dirtree)
460
+ os.makedirs(log_dst_dir, exist_ok=True)
461
+
403
462
  # Log end processing
404
463
  msg = f"{product} processing for {day} has ended."
405
464
  log_info(logger=logger, msg=msg, verbose=verbose)
465
+ success_flag = True
406
466
 
407
467
  ##--------------------------------------------------------------------.
408
468
  # Otherwise log the error
@@ -414,6 +474,13 @@ def _generate_l0c(
414
474
  # Close the file logger
415
475
  close_logger(logger)
416
476
 
477
+ # Move logger file to correct partitioning directory
478
+ if success_flag and folder_partitioning != "" and logger_filepath is not None:
479
+ # Move logger file to correct partitioning directory
480
+ dst_filepath = os.path.join(log_dst_dir, os.path.basename(logger_filepath))
481
+ shutil.move(logger_filepath, dst_filepath)
482
+ logger_filepath = dst_filepath
483
+
417
484
  # Return the logger file path
418
485
  return logger_filepath
419
486
 
@@ -474,6 +541,15 @@ def run_l0a_station(
474
541
  data_archive_dir = get_data_archive_dir(data_archive_dir)
475
542
  metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
476
543
 
544
+ # Check valid data_source, campaign_name, and station_name
545
+ check_station_inputs(
546
+ metadata_archive_dir=metadata_archive_dir,
547
+ data_source=data_source,
548
+ campaign_name=campaign_name,
549
+ station_name=station_name,
550
+ )
551
+
552
+ # ------------------------------------------------------------------------.
477
553
  # Read metadata
478
554
  metadata = read_station_metadata(
479
555
  metadata_archive_dir=metadata_archive_dir,
@@ -652,7 +728,7 @@ def run_l0b_station(
652
728
  and multi-threading will be automatically exploited to speed up I/O tasks.
653
729
  debugging_mode : bool, optional
654
730
  If ``True``, the amount of data processed will be reduced.
655
- Only the first 100 rows of 3 L0A files will be processed. The default value is ``False``.
731
+ Only 100 rows sampled from 3 L0A files will be processed. The default value is ``False``.
656
732
  remove_l0a: bool, optional
657
733
  Whether to remove the processed L0A files. The default value is ``False``.
658
734
  data_archive_dir : str, optional
@@ -669,6 +745,13 @@ def run_l0b_station(
669
745
  # Retrieve DISDRODB Metadata Archive directory
670
746
  metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
671
747
 
748
+ # Check valid data_source, campaign_name, and station_name
749
+ check_station_inputs(
750
+ metadata_archive_dir=metadata_archive_dir,
751
+ data_source=data_source,
752
+ campaign_name=campaign_name,
753
+ station_name=station_name,
754
+ )
672
755
  # -----------------------------------------------------------------.
673
756
  # Retrieve metadata
674
757
  metadata = read_station_metadata(
@@ -731,7 +814,7 @@ def run_l0b_station(
731
814
  # If no data available, print error message and return None
732
815
  if flag_not_available_data:
733
816
  msg = (
734
- f"{product} processing of {data_source} {campaign_name} {station_name}"
817
+ f"{product} processing of {data_source} {campaign_name} {station_name} "
735
818
  + f"has not been launched because of missing {required_product} data."
736
819
  )
737
820
  print(msg)
@@ -899,6 +982,14 @@ def run_l0c_station(
899
982
  # Retrieve DISDRODB Metadata Archive directory
900
983
  metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
901
984
 
985
+ # Check valid data_source, campaign_name, and station_name
986
+ check_station_inputs(
987
+ metadata_archive_dir=metadata_archive_dir,
988
+ data_source=data_source,
989
+ campaign_name=campaign_name,
990
+ station_name=station_name,
991
+ )
992
+
902
993
  # ------------------------------------------------------------------------.
903
994
  # Start processing
904
995
  t_i = time.time()
@@ -957,7 +1048,7 @@ def run_l0c_station(
957
1048
  # If no data available, print error message and return None
958
1049
  if flag_not_available_data:
959
1050
  msg = (
960
- f"{product} processing of {data_source} {campaign_name} {station_name}"
1051
+ f"{product} processing of {data_source} {campaign_name} {station_name} "
961
1052
  + f"has not been launched because of missing {required_product} data."
962
1053
  )
963
1054
  print(msg)
disdrodb/l1/__init__.py CHANGED
@@ -15,3 +15,8 @@
15
15
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
16
  # -----------------------------------------------------------------------------.
17
17
  """DISDRODB L1 module."""
18
+ from disdrodb.l1.processing import generate_l1
19
+
20
+ __all__ = [
21
+ "generate_l1",
22
+ ]
disdrodb/l1/filters.py CHANGED
@@ -19,6 +19,8 @@
19
19
  import numpy as np
20
20
  import xarray as xr
21
21
 
22
+ from disdrodb.constants import DIAMETER_DIMENSION, VELOCITY_DIMENSION
23
+
22
24
 
23
25
  def filter_diameter_bins(ds, minimum_diameter=None, maximum_diameter=None):
24
26
  """
@@ -29,10 +31,10 @@ def filter_diameter_bins(ds, minimum_diameter=None, maximum_diameter=None):
29
31
  ds : xarray.Dataset
30
32
  The dataset containing diameter bin data.
31
33
  minimum_diameter : float, optional
32
- The minimum diameter to include in the filter, in millimeters.
34
+ The minimum diameter to be included, in millimeters.
33
35
  Defaults to the minimum value in `ds["diameter_bin_lower"]`.
34
36
  maximum_diameter : float, optional
35
- The maximum diameter to include in the filter, in millimeters.
37
+ The maximum diameter to be included, in millimeters.
36
38
  Defaults to the maximum value in `ds["diameter_bin_upper"]`.
37
39
 
38
40
  Returns
@@ -40,22 +42,28 @@ def filter_diameter_bins(ds, minimum_diameter=None, maximum_diameter=None):
40
42
  xarray.Dataset
41
43
  The filtered dataset containing only the specified diameter bins.
42
44
  """
45
+ # Put data into memory
46
+ ds["diameter_bin_lower"] = ds["diameter_bin_lower"].compute()
47
+ ds["diameter_bin_upper"] = ds["diameter_bin_upper"].compute()
48
+
43
49
  # Initialize default arguments
44
50
  if minimum_diameter is None:
45
51
  minimum_diameter = ds["diameter_bin_lower"].min().item()
46
52
  if maximum_diameter is None:
47
53
  maximum_diameter = ds["diameter_bin_upper"].max().item()
48
- # Select valid bins
54
+
55
+ # Select bins which overlap the specified diameters
49
56
  valid_indices = np.logical_and(
50
- ds["diameter_bin_lower"] >= minimum_diameter,
51
- ds["diameter_bin_upper"] <= maximum_diameter,
52
- )
53
- ds = ds.isel({"diameter_bin_center": valid_indices})
54
- # Update history
55
- history = ds.attrs.get("history", "")
56
- ds.attrs["history"] = (
57
- history + f" Selected drops with diameters between {minimum_diameter} and {maximum_diameter} mm \n"
57
+ ds["diameter_bin_upper"] > minimum_diameter,
58
+ ds["diameter_bin_lower"] < maximum_diameter,
58
59
  )
60
+
61
+ # Select bins with diameter values entirely inside the specified min/max values
62
+ # valid_indices = np.logical_and(
63
+ # ds["diameter_bin_lower"] >= minimum_diameter,
64
+ # ds["diameter_bin_upper"] <= maximum_diameter,
65
+ # )
66
+ ds = ds.isel({DIAMETER_DIMENSION: valid_indices})
59
67
  return ds
60
68
 
61
69
 
@@ -79,22 +87,28 @@ def filter_velocity_bins(ds, minimum_velocity=0, maximum_velocity=12):
79
87
  xarray.Dataset
80
88
  The filtered dataset containing only the specified velocity bins.
81
89
  """
90
+ # Put data into memory
91
+ ds["velocity_bin_lower"] = ds["velocity_bin_lower"].compute()
92
+ ds["velocity_bin_upper"] = ds["velocity_bin_upper"].compute()
93
+
82
94
  # Initialize default arguments
83
95
  if minimum_velocity is None:
84
96
  minimum_velocity = ds["velocity_bin_lower"].min().item()
85
97
  if maximum_velocity is None:
86
98
  maximum_velocity = ds["velocity_bin_upper"].max().item()
87
- # Select valid bins
99
+
100
+ # Select bins which overlap the specified velocities
88
101
  valid_indices = np.logical_and(
89
- ds["velocity_bin_lower"] >= minimum_velocity,
90
- ds["velocity_bin_upper"] <= maximum_velocity,
91
- )
92
- ds = ds.isel({"velocity_bin_center": valid_indices})
93
- # Update history
94
- history = ds.attrs.get("history", "")
95
- ds.attrs["history"] = (
96
- history + f" Selected drops with fall velocity between {minimum_velocity} and {maximum_velocity} m/s \n"
102
+ ds["velocity_bin_upper"] > minimum_velocity,
103
+ ds["velocity_bin_lower"] < maximum_velocity,
97
104
  )
105
+
106
+ # Select bins with velocity values entirely inside the specified min/max values
107
+ # valid_indices = np.logical_and(
108
+ # ds["velocity_bin_lower"] >= minimum_velocity,
109
+ # ds["velocity_bin_upper"] <= maximum_velocity,
110
+ # )
111
+ ds = ds.isel({VELOCITY_DIMENSION: valid_indices})
98
112
  return ds
99
113
 
100
114