disdrodb 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. disdrodb/__init__.py +1 -5
  2. disdrodb/_version.py +2 -2
  3. disdrodb/accessor/methods.py +22 -4
  4. disdrodb/api/checks.py +10 -0
  5. disdrodb/api/io.py +20 -18
  6. disdrodb/api/path.py +42 -77
  7. disdrodb/api/search.py +89 -23
  8. disdrodb/cli/disdrodb_create_summary.py +1 -1
  9. disdrodb/cli/disdrodb_run_l0.py +1 -1
  10. disdrodb/cli/disdrodb_run_l0a.py +1 -1
  11. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  12. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  13. disdrodb/cli/disdrodb_run_l1.py +1 -1
  14. disdrodb/cli/disdrodb_run_l2e.py +1 -1
  15. disdrodb/cli/disdrodb_run_l2m.py +1 -1
  16. disdrodb/configs.py +30 -83
  17. disdrodb/constants.py +4 -3
  18. disdrodb/data_transfer/download_data.py +4 -2
  19. disdrodb/docs.py +2 -2
  20. disdrodb/etc/products/L1/1MIN.yaml +13 -0
  21. disdrodb/etc/products/L1/LPM/1MIN.yaml +13 -0
  22. disdrodb/etc/products/L1/LPM_V0/1MIN.yaml +13 -0
  23. disdrodb/etc/products/L1/PARSIVEL/1MIN.yaml +13 -0
  24. disdrodb/etc/products/L1/PARSIVEL2/1MIN.yaml +13 -0
  25. disdrodb/etc/products/L1/PWS100/1MIN.yaml +13 -0
  26. disdrodb/etc/products/L1/RD80/1MIN.yaml +13 -0
  27. disdrodb/etc/products/L1/SWS250/1MIN.yaml +13 -0
  28. disdrodb/etc/products/L1/global.yaml +6 -0
  29. disdrodb/etc/products/L2E/10MIN.yaml +1 -12
  30. disdrodb/etc/products/L2E/global.yaml +1 -1
  31. disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_R_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/global.yaml +1 -1
  33. disdrodb/issue/checks.py +2 -2
  34. disdrodb/l0/check_configs.py +1 -1
  35. disdrodb/l0/configs/LPM/l0a_encodings.yml +0 -1
  36. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +0 -4
  37. disdrodb/l0/configs/LPM/l0b_encodings.yml +9 -9
  38. disdrodb/l0/configs/LPM/raw_data_format.yml +11 -11
  39. disdrodb/l0/configs/LPM_V0/bins_diameter.yml +103 -0
  40. disdrodb/l0/configs/LPM_V0/bins_velocity.yml +103 -0
  41. disdrodb/l0/configs/LPM_V0/l0a_encodings.yml +45 -0
  42. disdrodb/l0/configs/LPM_V0/l0b_cf_attrs.yml +180 -0
  43. disdrodb/l0/configs/LPM_V0/l0b_encodings.yml +410 -0
  44. disdrodb/l0/configs/LPM_V0/raw_data_format.yml +474 -0
  45. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +1 -1
  46. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +8 -8
  47. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +9 -9
  48. disdrodb/l0/l0_reader.py +2 -2
  49. disdrodb/l0/l0a_processing.py +6 -2
  50. disdrodb/l0/l0b_processing.py +26 -19
  51. disdrodb/l0/l0c_processing.py +17 -3
  52. disdrodb/l0/manuals/LPM_V0.pdf +0 -0
  53. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +15 -7
  54. disdrodb/l0/readers/LPM/ITALY/GID_LPM_PI.py +279 -0
  55. disdrodb/l0/readers/LPM/ITALY/GID_LPM_T.py +276 -0
  56. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
  57. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_RWANDA_LPM_NC.py +103 -0
  58. disdrodb/l0/readers/LPM/NORWAY/HAUKELISETER_LPM.py +216 -0
  59. disdrodb/l0/readers/LPM/NORWAY/NMBU_LPM.py +208 -0
  60. disdrodb/l0/readers/LPM/UK/WITHWORTH_LPM.py +219 -0
  61. disdrodb/l0/readers/LPM/USA/CHARLESTON.py +229 -0
  62. disdrodb/l0/readers/{LPM → LPM_V0}/BELGIUM/ULIEGE.py +33 -49
  63. disdrodb/l0/readers/LPM_V0/ITALY/GID_LPM_V0.py +240 -0
  64. disdrodb/l0/readers/PARSIVEL/BASQUECOUNTRY/EUSKALMET_OTT.py +227 -0
  65. disdrodb/l0/readers/{PARSIVEL2 → PARSIVEL}/NASA/LPVEX.py +16 -28
  66. disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/MC3E.py +1 -1
  67. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +3 -3
  68. disdrodb/l0/readers/PARSIVEL2/BASQUECOUNTRY/EUSKALMET_OTT2.py +232 -0
  69. disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +1 -1
  70. disdrodb/l0/readers/PARSIVEL2/JAPAN/PRECIP.py +155 -0
  71. disdrodb/l0/readers/PARSIVEL2/MPI/BCO_PARSIVEL2.py +14 -7
  72. disdrodb/l0/readers/PARSIVEL2/MPI/BOWTIE.py +8 -3
  73. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +28 -5
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +1 -1
  75. disdrodb/l0/readers/PARSIVEL2/{GPM/GCPEX.py → NORWAY/UIB.py} +54 -29
  76. disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/{PANGASA.py → PAGASA.py} +6 -3
  77. disdrodb/l0/readers/PARSIVEL2/SPAIN/GRANADA.py +1 -1
  78. disdrodb/l0/readers/PARSIVEL2/SWEDEN/SMHI.py +189 -0
  79. disdrodb/l0/readers/{PARSIVEL/GPM/PIERS.py → PARSIVEL2/USA/CSU.py} +62 -29
  80. disdrodb/l0/readers/PARSIVEL2/USA/{C3WE.py → CW3E.py} +51 -24
  81. disdrodb/l0/readers/{PARSIVEL/GPM/IFLOODS.py → RD80/BRAZIL/ATTO_RD80.py} +50 -34
  82. disdrodb/l0/readers/{SW250 → SWS250}/BELGIUM/KMI.py +1 -1
  83. disdrodb/l1/beard_model.py +45 -1
  84. disdrodb/l1/fall_velocity.py +1 -6
  85. disdrodb/l1/filters.py +2 -0
  86. disdrodb/l1/processing.py +6 -5
  87. disdrodb/l1/resampling.py +101 -38
  88. disdrodb/l2/empirical_dsd.py +12 -8
  89. disdrodb/l2/processing.py +4 -3
  90. disdrodb/metadata/search.py +3 -4
  91. disdrodb/routines/l0.py +4 -4
  92. disdrodb/routines/l1.py +173 -60
  93. disdrodb/routines/l2.py +121 -269
  94. disdrodb/routines/options.py +347 -0
  95. disdrodb/routines/wrappers.py +9 -1
  96. disdrodb/scattering/axis_ratio.py +3 -0
  97. disdrodb/scattering/routines.py +1 -1
  98. disdrodb/summary/routines.py +765 -724
  99. disdrodb/utils/archiving.py +51 -44
  100. disdrodb/utils/attrs.py +1 -1
  101. disdrodb/utils/compression.py +4 -2
  102. disdrodb/utils/dask.py +35 -15
  103. disdrodb/utils/dict.py +33 -0
  104. disdrodb/utils/encoding.py +1 -1
  105. disdrodb/utils/manipulations.py +7 -1
  106. disdrodb/utils/routines.py +9 -8
  107. disdrodb/utils/time.py +9 -1
  108. disdrodb/viz/__init__.py +0 -13
  109. disdrodb/viz/plots.py +209 -0
  110. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/METADATA +1 -1
  111. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/RECORD +124 -95
  112. disdrodb/l0/readers/PARSIVEL/GPM/LPVEX.py +0 -85
  113. /disdrodb/etc/products/L2M/{GAMMA_GS_ND_MAE.yaml → MODELS/GAMMA_GS_ND_MAE.yaml} +0 -0
  114. /disdrodb/etc/products/L2M/{GAMMA_ML.yaml → MODELS/GAMMA_ML.yaml} +0 -0
  115. /disdrodb/etc/products/L2M/{LOGNORMAL_GS_LOG_ND_MAE.yaml → MODELS/LOGNORMAL_GS_LOG_ND_MAE.yaml} +0 -0
  116. /disdrodb/etc/products/L2M/{LOGNORMAL_GS_ND_MAE.yaml → MODELS/LOGNORMAL_GS_ND_MAE.yaml} +0 -0
  117. /disdrodb/etc/products/L2M/{LOGNORMAL_ML.yaml → MODELS/LOGNORMAL_ML.yaml} +0 -0
  118. /disdrodb/etc/products/L2M/{NGAMMA_GS_LOG_ND_MAE.yaml → MODELS/NGAMMA_GS_LOG_ND_MAE.yaml} +0 -0
  119. /disdrodb/etc/products/L2M/{NGAMMA_GS_ND_MAE.yaml → MODELS/NGAMMA_GS_ND_MAE.yaml} +0 -0
  120. /disdrodb/etc/products/L2M/{NGAMMA_GS_Z_MAE.yaml → MODELS/NGAMMA_GS_Z_MAE.yaml} +0 -0
  121. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/NSSTC.py +0 -0
  122. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/WHEEL +0 -0
  123. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/entry_points.txt +0 -0
  124. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/licenses/LICENSE +0 -0
  125. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/top_level.txt +0 -0
disdrodb/routines/l2.py CHANGED
@@ -18,7 +18,6 @@
18
18
 
19
19
  import copy
20
20
  import datetime
21
- import json
22
21
  import logging
23
22
  import os
24
23
  import time
@@ -31,195 +30,51 @@ from disdrodb.api.create_directories import (
31
30
  create_logs_directory,
32
31
  create_product_directory,
33
32
  )
34
- from disdrodb.api.info import group_filepaths
35
33
  from disdrodb.api.io import open_netcdf_files
36
34
  from disdrodb.api.path import (
37
35
  define_file_folder_path,
38
36
  define_l2e_filename,
39
37
  define_l2m_filename,
40
- define_temporal_resolution,
41
38
  )
42
39
  from disdrodb.api.search import get_required_product
43
40
  from disdrodb.configs import (
44
41
  get_data_archive_dir,
45
42
  get_metadata_archive_dir,
46
- get_model_options,
47
- get_product_options,
48
- get_product_temporal_resolutions,
49
43
  )
50
- from disdrodb.l1.resampling import resample_dataset
51
44
  from disdrodb.l2.processing import (
52
45
  generate_l2_radar,
53
46
  generate_l2e,
54
47
  generate_l2m,
55
48
  )
56
49
  from disdrodb.metadata import read_station_metadata
50
+ from disdrodb.routines.options import (
51
+ L2ProcessingOptions,
52
+ get_model_options,
53
+ get_product_temporal_resolutions,
54
+ is_possible_product,
55
+ )
57
56
  from disdrodb.scattering.routines import precompute_scattering_tables
58
- from disdrodb.utils.archiving import define_temporal_partitions, get_files_partitions
59
57
  from disdrodb.utils.dask import execute_tasks_safely
60
58
  from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
61
- from disdrodb.utils.list import flatten_list
62
-
63
- # Logger
64
59
  from disdrodb.utils.logger import (
65
60
  create_product_logs,
66
61
  log_info,
67
62
  )
68
63
  from disdrodb.utils.routines import (
69
- is_possible_product,
70
64
  run_product_generation,
71
65
  try_get_required_filepaths,
72
66
  )
73
- from disdrodb.utils.time import (
74
- ensure_sample_interval_in_seconds,
75
- get_sampling_information,
76
- )
77
67
  from disdrodb.utils.writer import write_product
78
68
 
79
69
  logger = logging.getLogger(__name__)
80
70
 
81
71
 
82
- ####----------------------------------------------------------------------------.
83
-
84
-
85
- class ProcessingOptions:
86
- """Define L2 products processing options."""
87
-
88
- # TODO: TO MOVE ELSEWHERE (AFTER L1 REFACTORING !)
89
-
90
- def __init__(self, product, filepaths, parallel, temporal_resolutions=None):
91
- """Define L2 products processing options."""
92
- import disdrodb
93
-
94
- # ---------------------------------------------------------------------.
95
- # Define temporal resolutions for which to retrieve processing options
96
- if temporal_resolutions is None:
97
- temporal_resolutions = get_product_temporal_resolutions(product)
98
- elif isinstance(temporal_resolutions, str):
99
- temporal_resolutions = [temporal_resolutions]
100
-
101
- # ---------------------------------------------------------------------.
102
- # Get product options at various temporal resolutions
103
- dict_product_options = {
104
- temporal_resolution: get_product_options(product, temporal_resolution=temporal_resolution)
105
- for temporal_resolution in temporal_resolutions
106
- }
107
-
108
- # ---------------------------------------------------------------------.
109
- # Group filepaths by source sample intervals
110
- # - Typically the sample interval is fixed and is just one
111
- # - Some stations might change the sample interval along the years
112
- # - For each sample interval, separated processing take place here after !
113
- dict_filepaths = group_filepaths(filepaths, groups="sample_interval")
114
-
115
- # ---------------------------------------------------------------------.
116
- # Retrieve processing information for each temporal resolution
117
- dict_folder_partitioning = {}
118
- dict_files_partitions = {}
119
- _cache_dict_list_partitions: dict[str, dict] = {}
120
- for temporal_resolution in temporal_resolutions:
121
-
122
- # -------------------------------------------------------------------------.
123
- # Retrieve product options
124
- product_options = dict_product_options[temporal_resolution].copy()
125
-
126
- # Retrieve accumulation_interval and rolling option
127
- accumulation_interval, rolling = get_sampling_information(temporal_resolution)
128
-
129
- # Extract processing options
130
- archive_options = product_options.pop("archive_options")
131
-
132
- dict_product_options[temporal_resolution] = product_options
133
- # -------------------------------------------------------------------------.
134
- # Define folder partitioning
135
- if "folder_partitioning" not in archive_options:
136
- dict_folder_partitioning[temporal_resolution] = disdrodb.config.get("folder_partitioning")
137
- else:
138
- dict_folder_partitioning[temporal_resolution] = archive_options.pop("folder_partitioning")
139
-
140
- # -------------------------------------------------------------------------.
141
- # Define list of temporal partitions
142
- # - [{start_time: np.datetime64, end_time: np.datetime64}, ....]
143
- # - Either strategy: "event" or "time_block" or save_by_time_block"
144
- # - "event" requires loading data into memory to identify events
145
- # --> Does some data filtering on what to process !
146
- # - "time_block" does not require loading data into memory
147
- # --> Does not do data filtering on what to process !
148
- # --> Here we cache dict_list_partitions so that we don't need to recompute
149
- # stuffs if processing options are the same
150
- key = json.dumps(archive_options, sort_keys=True)
151
- if key not in _cache_dict_list_partitions:
152
- _cache_dict_list_partitions[key] = {
153
- sample_interval: define_temporal_partitions(filepaths, parallel=parallel, **archive_options)
154
- for sample_interval, filepaths in dict_filepaths.items()
155
- }
156
- dict_list_partitions = _cache_dict_list_partitions[key].copy() # To avoid in-place replacement
157
-
158
- # ------------------------------------------------------------------.
159
- # Group filepaths by temporal partitions
160
- # - This is done separately for each possible source sample interval
161
- # - It groups filepaths by start_time and end_time provided by list_partitions
162
- # - Here 'events' can also simply be period of times ('day', 'months', ...)
163
- # - When aggregating/resampling/accumulating data, we need to load also
164
- # some data after the actual event end_time to ensure that the resampled dataset
165
- # contains the event_end_time
166
- # --> get_files_partitions adjust the event end_time to accounts for the required "border" data.
167
- # - ATTENTION: get_files_partitions returns start_time and end_time as datetime objects !
168
- files_partitions = [
169
- get_files_partitions(
170
- list_partitions=list_partitions,
171
- filepaths=dict_filepaths[sample_interval],
172
- sample_interval=sample_interval,
173
- accumulation_interval=accumulation_interval,
174
- rolling=rolling,
175
- )
176
- for sample_interval, list_partitions in dict_list_partitions.items()
177
- if product != "L2E"
178
- or is_possible_product(
179
- accumulation_interval=accumulation_interval,
180
- sample_interval=sample_interval,
181
- rolling=rolling,
182
- )
183
- ]
184
- files_partitions = flatten_list(files_partitions)
185
- dict_files_partitions[temporal_resolution] = files_partitions
186
-
187
- # ------------------------------------------------------------------.
188
- # Keep only temporal_resolutions for which events could be defined
189
- # - Remove e.g when not compatible accumulation_interval with source sample_interval
190
- temporal_resolutions = [
191
- temporal_resolution
192
- for temporal_resolution in temporal_resolutions
193
- if len(dict_files_partitions[temporal_resolution]) > 0
194
- ]
195
- # ------------------------------------------------------------------.
196
- # Add attributes
197
- self.temporal_resolutions = temporal_resolutions
198
- self.dict_files_partitions = dict_files_partitions
199
- self.dict_product_options = dict_product_options
200
- self.dict_folder_partitioning = dict_folder_partitioning
201
-
202
- def get_files_partitions(self, temporal_resolution):
203
- """Return files partitions dictionary for a specific L2E product."""
204
- return self.dict_files_partitions[temporal_resolution]
205
-
206
- def get_product_options(self, temporal_resolution):
207
- """Return product options dictionary for a specific L2E product."""
208
- return self.dict_product_options[temporal_resolution]
209
-
210
- def get_folder_partitioning(self, temporal_resolution):
211
- """Return the folder partitioning for a specific L2E product."""
212
- # to be used for logs and files !
213
- return self.dict_folder_partitioning[temporal_resolution]
214
-
215
-
216
72
  ####----------------------------------------------------------------------------.
217
73
  #### L2E
218
74
 
219
75
 
220
- def define_l2e_logs_filename(campaign_name, station_name, start_time, end_time, accumulation_interval, rolling):
76
+ def define_l2e_logs_filename(campaign_name, station_name, start_time, end_time, temporal_resolution):
221
77
  """Define L2E logs filename."""
222
- temporal_resolution = define_temporal_resolution(seconds=accumulation_interval, rolling=rolling)
223
78
  starting_time = pd.to_datetime(start_time).strftime("%Y%m%d%H%M%S")
224
79
  ending_time = pd.to_datetime(end_time).strftime("%Y%m%d%H%M%S")
225
80
  logs_filename = f"L2E.{temporal_resolution}.{campaign_name}.{station_name}.s{starting_time}.e{ending_time}"
@@ -239,8 +94,7 @@ def _generate_l2e(
239
94
  campaign_name,
240
95
  station_name,
241
96
  # L2E options
242
- accumulation_interval,
243
- rolling,
97
+ temporal_resolution,
244
98
  product_options,
245
99
  # Processing options
246
100
  force,
@@ -254,42 +108,28 @@ def _generate_l2e(
254
108
  # Define product processing function
255
109
  def core(
256
110
  filepaths,
111
+ start_time,
112
+ end_time,
257
113
  campaign_name,
258
114
  station_name,
259
- product_options,
260
115
  # Processing options
261
116
  logger,
262
117
  parallel,
263
118
  verbose,
264
119
  force,
265
- # Resampling arguments
266
- start_time,
267
- end_time,
268
- accumulation_interval,
269
- rolling,
120
+ # Product options
121
+ temporal_resolution,
122
+ product_options,
270
123
  # Archiving arguments
271
124
  data_dir,
272
125
  folder_partitioning,
273
126
  ):
274
- """Define L1 product processing."""
127
+ """Define L2E product processing."""
275
128
  # Copy to avoid in-place replacement (outside this function)
276
129
  product_options = product_options.copy()
277
130
 
278
131
  # Open the dataset over the period of interest
279
- ds = open_netcdf_files(filepaths, start_time=start_time, end_time=end_time, parallel=False)
280
- ds = ds.load()
281
- ds.close()
282
-
283
- # Resample dataset # TODO: in future to perform in L1
284
- # - Define sample interval in seconds
285
- sample_interval = ensure_sample_interval_in_seconds(ds["sample_interval"]).to_numpy().item()
286
- # - Resample dataset
287
- ds = resample_dataset(
288
- ds=ds,
289
- sample_interval=sample_interval,
290
- accumulation_interval=accumulation_interval,
291
- rolling=rolling,
292
- )
132
+ ds = open_netcdf_files(filepaths, start_time=start_time, end_time=end_time, parallel=False, compute=True)
293
133
 
294
134
  # Extract L2E processing options
295
135
  l2e_options = product_options.get("product_options")
@@ -320,8 +160,7 @@ def _generate_l2e(
320
160
  ds,
321
161
  campaign_name=campaign_name,
322
162
  station_name=station_name,
323
- sample_interval=accumulation_interval,
324
- rolling=rolling,
163
+ temporal_resolution=temporal_resolution,
325
164
  )
326
165
  folder_path = define_file_folder_path(ds, dir_path=data_dir, folder_partitioning=folder_partitioning)
327
166
  filepath = os.path.join(folder_path, filename)
@@ -333,14 +172,14 @@ def _generate_l2e(
333
172
  # Define product processing function kwargs
334
173
  core_func_kwargs = dict( # noqa: C408
335
174
  filepaths=filepaths,
175
+ start_time=start_time,
176
+ end_time=end_time,
177
+ # Station info
336
178
  campaign_name=campaign_name,
337
179
  station_name=station_name,
180
+ # Product options
181
+ temporal_resolution=temporal_resolution,
338
182
  product_options=product_options,
339
- # Resampling arguments
340
- start_time=start_time,
341
- end_time=end_time,
342
- accumulation_interval=accumulation_interval,
343
- rolling=rolling,
344
183
  # Archiving arguments
345
184
  data_dir=data_dir,
346
185
  folder_partitioning=folder_partitioning,
@@ -449,57 +288,85 @@ def run_l2e_station(
449
288
  msg = f"{product} processing of station {station_name} has started."
450
289
  log_info(logger=logger, msg=msg, verbose=verbose)
451
290
 
452
- # -------------------------------------------------------------------------.
453
- # List files to process
454
- # - If no data available, print error message and return None
455
- required_product = get_required_product(product)
456
- filepaths = try_get_required_filepaths(
457
- data_archive_dir=data_archive_dir,
291
+ # ---------------------------------------------------------------------.
292
+ # Retrieve source sampling interval
293
+ # - If a station has varying measurement interval over time, choose the smallest one !
294
+ metadata = read_station_metadata(
295
+ metadata_archive_dir=metadata_archive_dir,
458
296
  data_source=data_source,
459
297
  campaign_name=campaign_name,
460
298
  station_name=station_name,
461
- product=required_product,
462
- # Processing options
463
- debugging_mode=debugging_mode,
464
299
  )
465
- if filepaths is None:
466
- return
467
-
468
- # -------------------------------------------------------------------------.
469
- # Retrieve L2E processing options
470
- l2e_processing_options = ProcessingOptions(product="L2E", filepaths=filepaths, parallel=parallel)
300
+ sample_interval = metadata["measurement_interval"]
301
+ if isinstance(sample_interval, list):
302
+ sample_interval = min(sample_interval)
471
303
 
472
- # -------------------------------------------------------------------------.
304
+ # ---------------------------------------------------------------------.
473
305
  # Generate products for each temporal resolution
474
- # rolling = False
475
- # accumulation_interval = 60
306
+ # temporal_resolution = "1MIN"
476
307
  # temporal_resolution = "10MIN"
477
- # folder_partitioning = ""
478
- # product_options = l2e_processing_options.get_product_options(temporal_resolution)
308
+ temporal_resolutions = get_product_temporal_resolutions(product)
309
+ for temporal_resolution in temporal_resolutions:
479
310
 
480
- for temporal_resolution in l2e_processing_options.temporal_resolutions:
481
- # Print progress message
482
- msg = f"Production of {product} {temporal_resolution} has started."
483
- log_info(logger=logger, msg=msg, verbose=verbose)
311
+ # ------------------------------------------------------------------.
312
+ # Check if the product can be generated
313
+ if not is_possible_product(
314
+ temporal_resolution=temporal_resolution,
315
+ sample_interval=sample_interval,
316
+ ):
317
+ continue
484
318
 
485
- # Retrieve event info
486
- files_partitions = l2e_processing_options.get_files_partitions(temporal_resolution)
319
+ # ---------------------------------------------------------------------.
320
+ # List files to process
321
+ # - If no data available, print error message and try with other L2E accumulation intervals
322
+ required_product = get_required_product(product)
323
+ filepaths = try_get_required_filepaths(
324
+ data_archive_dir=data_archive_dir,
325
+ data_source=data_source,
326
+ campaign_name=campaign_name,
327
+ station_name=station_name,
328
+ product=required_product,
329
+ # Processing options
330
+ debugging_mode=debugging_mode,
331
+ # Product options
332
+ temporal_resolution=temporal_resolution,
333
+ )
334
+ if filepaths is None:
335
+ continue
336
+
337
+ # ---------------------------------------------------------------------.
338
+ # Retrieve L2E processing options
339
+ l2e_processing_options = L2ProcessingOptions(
340
+ product=product,
341
+ temporal_resolution=temporal_resolution,
342
+ filepaths=filepaths,
343
+ parallel=parallel,
344
+ )
345
+
346
+ # ---------------------------------------------------------------------.
347
+ # Retrieve files temporal partitions
348
+ files_partitions = l2e_processing_options.files_partitions
349
+
350
+ if len(files_partitions) == 0:
351
+ msg = (
352
+ f"{product} processing of {data_source} {campaign_name} {station_name} "
353
+ + f"has not been launched because of missing {required_product} {temporal_resolution} data."
354
+ )
355
+ log_info(logger=logger, msg=msg, verbose=verbose)
356
+ continue
487
357
 
488
358
  # Retrieve folder partitioning (for files and logs)
489
- folder_partitioning = l2e_processing_options.get_folder_partitioning(temporal_resolution)
359
+ folder_partitioning = l2e_processing_options.folder_partitioning
490
360
 
491
361
  # Retrieve product options
492
- product_options = l2e_processing_options.get_product_options(temporal_resolution)
493
-
494
- # Retrieve accumulation_interval and rolling option
495
- accumulation_interval, rolling = get_sampling_information(temporal_resolution)
362
+ product_options = l2e_processing_options.product_options
496
363
 
497
364
  # Precompute required scattering tables
498
365
  if product_options["radar_enabled"]:
499
366
  radar_options = product_options["radar_options"]
500
367
  precompute_scattering_tables(verbose=verbose, **radar_options)
501
368
 
502
- # ------------------------------------------------------------------.
369
+ # ---------------------------------------------------------------------.
503
370
  # Create product directory
504
371
  data_dir = create_product_directory(
505
372
  data_archive_dir=data_archive_dir,
@@ -510,8 +377,7 @@ def run_l2e_station(
510
377
  product=product,
511
378
  force=force,
512
379
  # Option for L2E
513
- sample_interval=accumulation_interval,
514
- rolling=rolling,
380
+ temporal_resolution=temporal_resolution,
515
381
  )
516
382
 
517
383
  # Define logs directory
@@ -522,11 +388,10 @@ def run_l2e_station(
522
388
  campaign_name=campaign_name,
523
389
  station_name=station_name,
524
390
  # Option for L2E
525
- sample_interval=accumulation_interval,
526
- rolling=rolling,
391
+ temporal_resolution=temporal_resolution,
527
392
  )
528
393
 
529
- # ------------------------------------------------------------------.
394
+ # ---------------------------------------------------------------------.
530
395
  # Generate files
531
396
  # - L2E product generation is optionally parallelized over events
532
397
  # - If parallel=True, it does that in parallel using dask.delayed
@@ -542,15 +407,13 @@ def run_l2e_station(
542
407
  station_name=station_name,
543
408
  start_time=event_info["start_time"],
544
409
  end_time=event_info["end_time"],
545
- rolling=rolling,
546
- accumulation_interval=accumulation_interval,
410
+ temporal_resolution=temporal_resolution,
547
411
  ),
548
412
  folder_partitioning=folder_partitioning,
549
413
  campaign_name=campaign_name,
550
414
  station_name=station_name,
551
415
  # L2E options
552
- rolling=rolling,
553
- accumulation_interval=accumulation_interval,
416
+ temporal_resolution=temporal_resolution,
554
417
  product_options=product_options,
555
418
  # Processing options
556
419
  force=force,
@@ -570,8 +433,7 @@ def run_l2e_station(
570
433
  station_name=station_name,
571
434
  data_archive_dir=data_archive_dir,
572
435
  # Product options
573
- sample_interval=accumulation_interval,
574
- rolling=rolling,
436
+ temporal_resolution=temporal_resolution,
575
437
  # Logs list
576
438
  list_logs=list_logs,
577
439
  )
@@ -586,9 +448,8 @@ def run_l2e_station(
586
448
 
587
449
  ####----------------------------------------------------------------------------.
588
450
  #### L2M
589
- def define_l2m_logs_filename(campaign_name, station_name, start_time, end_time, model_name, sample_interval, rolling):
451
+ def define_l2m_logs_filename(campaign_name, station_name, start_time, end_time, model_name, temporal_resolution):
590
452
  """Define L2M logs filename."""
591
- temporal_resolution = define_temporal_resolution(seconds=sample_interval, rolling=rolling)
592
453
  starting_time = pd.to_datetime(start_time).strftime("%Y%m%d%H%M%S")
593
454
  ending_time = pd.to_datetime(end_time).strftime("%Y%m%d%H%M%S")
594
455
  logs_filename = (
@@ -610,8 +471,7 @@ def _generate_l2m(
610
471
  campaign_name,
611
472
  station_name,
612
473
  # L2M options
613
- sample_interval,
614
- rolling,
474
+ temporal_resolution,
615
475
  model_name,
616
476
  product_options,
617
477
  # Processing options
@@ -636,14 +496,13 @@ def _generate_l2m(
636
496
  force,
637
497
  # Product options
638
498
  product_options,
639
- sample_interval,
640
- rolling,
499
+ temporal_resolution,
641
500
  model_name,
642
501
  # Archiving arguments
643
502
  data_dir,
644
503
  folder_partitioning,
645
504
  ):
646
- """Define L1 product processing."""
505
+ """Define L2M product processing."""
647
506
  # Copy to avoid in-place replacement (outside this function)
648
507
  product_options = product_options.copy()
649
508
 
@@ -676,9 +535,14 @@ def _generate_l2m(
676
535
 
677
536
  ##------------------------------------------------------------------------.
678
537
  # Open the netCDF files
679
- ds = open_netcdf_files(filepaths, start_time=start_time, end_time=end_time, variables=variables)
680
- ds = ds.load()
681
- ds.close()
538
+ ds = open_netcdf_files(
539
+ filepaths,
540
+ start_time=start_time,
541
+ end_time=end_time,
542
+ variables=variables,
543
+ parallel=False,
544
+ compute=True,
545
+ )
682
546
 
683
547
  # Produce L2M dataset
684
548
  ds = generate_l2m(
@@ -702,8 +566,7 @@ def _generate_l2m(
702
566
  ds,
703
567
  campaign_name=campaign_name,
704
568
  station_name=station_name,
705
- sample_interval=sample_interval,
706
- rolling=rolling,
569
+ temporal_resolution=temporal_resolution,
707
570
  model_name=model_name,
708
571
  )
709
572
  folder_path = define_file_folder_path(ds, dir_path=data_dir, folder_partitioning=folder_partitioning)
@@ -725,8 +588,7 @@ def _generate_l2m(
725
588
  force=force,
726
589
  # Product options
727
590
  product_options=product_options,
728
- sample_interval=sample_interval,
729
- rolling=rolling,
591
+ temporal_resolution=temporal_resolution,
730
592
  model_name=model_name,
731
593
  # Archiving arguments
732
594
  data_dir=data_dir,
@@ -838,19 +700,15 @@ def run_l2m_station(
838
700
  # Loop
839
701
  # temporal_resolution = "1MIN"
840
702
  # temporal_resolution = "10MIN"
841
- temporal_resolutions = get_product_temporal_resolutions("L2M")
703
+ temporal_resolutions = get_product_temporal_resolutions(product)
842
704
  for temporal_resolution in temporal_resolutions:
843
705
 
844
- # Retrieve accumulation_interval and rolling option
845
- accumulation_interval, rolling = get_sampling_information(temporal_resolution)
846
-
847
706
  # ------------------------------------------------------------------.
848
- # Avoid generation of rolling products for source sample interval !
849
- if rolling and accumulation_interval == sample_interval:
850
- continue
851
-
852
- # Avoid product generation if the accumulation_interval is less than the sample interval
853
- if accumulation_interval < sample_interval:
707
+ # Check if the product can be generated
708
+ if not is_possible_product(
709
+ temporal_resolution=temporal_resolution,
710
+ sample_interval=sample_interval,
711
+ ):
854
712
  continue
855
713
 
856
714
  # -----------------------------------------------------------------.
@@ -866,29 +724,28 @@ def run_l2m_station(
866
724
  # Processing options
867
725
  debugging_mode=debugging_mode,
868
726
  # Product options
869
- sample_interval=accumulation_interval,
870
- rolling=rolling,
727
+ temporal_resolution=temporal_resolution,
871
728
  )
872
729
  if filepaths is None:
873
730
  continue
874
731
 
875
732
  # -------------------------------------------------------------------------.
876
733
  # Retrieve L2M processing options
877
- l2m_processing_options = ProcessingOptions(
878
- product="L2M",
879
- temporal_resolutions=temporal_resolution,
734
+ l2m_processing_options = L2ProcessingOptions(
735
+ product=product,
736
+ temporal_resolution=temporal_resolution,
880
737
  filepaths=filepaths,
881
738
  parallel=parallel,
882
739
  )
883
740
 
884
741
  # Retrieve folder partitioning (for files and logs)
885
- folder_partitioning = l2m_processing_options.get_folder_partitioning(temporal_resolution)
742
+ folder_partitioning = l2m_processing_options.folder_partitioning
886
743
 
887
744
  # Retrieve product options
888
- global_product_options = l2m_processing_options.get_product_options(temporal_resolution)
745
+ global_product_options = l2m_processing_options.product_options
889
746
 
890
747
  # Retrieve files temporal partitions
891
- files_partitions = l2m_processing_options.get_files_partitions(temporal_resolution)
748
+ files_partitions = l2m_processing_options.files_partitions
892
749
 
893
750
  if len(files_partitions) == 0:
894
751
  msg = (
@@ -921,7 +778,7 @@ def run_l2m_station(
921
778
  precompute_scattering_tables(verbose=verbose, **radar_options)
922
779
 
923
780
  # -----------------------------------------------------------------.
924
- msg = f"Production of L2M_{model_name} for sample interval {accumulation_interval} s has started."
781
+ msg = f"Production of L2M_{model_name} for {temporal_resolution} has started."
925
782
  log_info(logger=logger, msg=msg, verbose=verbose)
926
783
  msg = f"Estimating {psd_model} parameters using {optimization}."
927
784
  log_info(logger=logger, msg=msg, verbose=verbose)
@@ -941,14 +798,13 @@ def run_l2m_station(
941
798
  product=product,
942
799
  force=force,
943
800
  # Option for L2E
944
- sample_interval=accumulation_interval,
945
- rolling=rolling,
801
+ temporal_resolution=temporal_resolution,
946
802
  # Option for L2M
947
803
  model_name=model_name,
948
804
  )
949
805
  except Exception:
950
806
  msg = (
951
- f"Production of L2M_{model_name} for sample interval {accumulation_interval} s has been "
807
+ f"Production of L2M_{model_name} for {temporal_resolution} data has been "
952
808
  + "skipped because the product already exists and force=False."
953
809
  )
954
810
  log_info(logger=logger, msg=msg, verbose=verbose)
@@ -963,8 +819,7 @@ def run_l2m_station(
963
819
  campaign_name=campaign_name,
964
820
  station_name=station_name,
965
821
  # Option for L2E
966
- sample_interval=accumulation_interval,
967
- rolling=rolling,
822
+ temporal_resolution=temporal_resolution,
968
823
  # Option for L2M
969
824
  model_name=model_name,
970
825
  )
@@ -985,15 +840,13 @@ def run_l2m_station(
985
840
  start_time=event_info["start_time"],
986
841
  end_time=event_info["end_time"],
987
842
  model_name=model_name,
988
- sample_interval=accumulation_interval,
989
- rolling=rolling,
843
+ temporal_resolution=temporal_resolution,
990
844
  ),
991
845
  folder_partitioning=folder_partitioning,
992
846
  campaign_name=campaign_name,
993
847
  station_name=station_name,
994
848
  # L2M options
995
- sample_interval=accumulation_interval,
996
- rolling=rolling,
849
+ temporal_resolution=temporal_resolution,
997
850
  model_name=model_name,
998
851
  product_options=product_options,
999
852
  # Processing options
@@ -1017,8 +870,7 @@ def run_l2m_station(
1017
870
  data_archive_dir=data_archive_dir,
1018
871
  # Product options
1019
872
  model_name=model_name,
1020
- sample_interval=accumulation_interval,
1021
- rolling=rolling,
873
+ temporal_resolution=temporal_resolution,
1022
874
  # Logs list
1023
875
  list_logs=list_logs,
1024
876
  )