disdrodb 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. disdrodb/__init__.py +68 -34
  2. disdrodb/_config.py +5 -4
  3. disdrodb/_version.py +16 -3
  4. disdrodb/accessor/__init__.py +20 -0
  5. disdrodb/accessor/methods.py +125 -0
  6. disdrodb/api/checks.py +177 -24
  7. disdrodb/api/configs.py +3 -3
  8. disdrodb/api/info.py +13 -13
  9. disdrodb/api/io.py +281 -22
  10. disdrodb/api/path.py +184 -195
  11. disdrodb/api/search.py +18 -9
  12. disdrodb/cli/disdrodb_create_summary.py +103 -0
  13. disdrodb/cli/disdrodb_create_summary_station.py +91 -0
  14. disdrodb/cli/disdrodb_run_l0.py +1 -1
  15. disdrodb/cli/disdrodb_run_l0_station.py +1 -1
  16. disdrodb/cli/disdrodb_run_l0a_station.py +1 -1
  17. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  18. disdrodb/cli/disdrodb_run_l0b_station.py +3 -3
  19. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  20. disdrodb/cli/disdrodb_run_l0c_station.py +3 -3
  21. disdrodb/cli/disdrodb_run_l1_station.py +2 -2
  22. disdrodb/cli/disdrodb_run_l2e_station.py +2 -2
  23. disdrodb/cli/disdrodb_run_l2m_station.py +2 -2
  24. disdrodb/configs.py +149 -4
  25. disdrodb/constants.py +61 -0
  26. disdrodb/data_transfer/download_data.py +127 -11
  27. disdrodb/etc/configs/attributes.yaml +339 -0
  28. disdrodb/etc/configs/encodings.yaml +473 -0
  29. disdrodb/etc/products/L1/global.yaml +13 -0
  30. disdrodb/etc/products/L2E/10MIN.yaml +12 -0
  31. disdrodb/etc/products/L2E/1MIN.yaml +1 -0
  32. disdrodb/etc/products/L2E/global.yaml +22 -0
  33. disdrodb/etc/products/L2M/10MIN.yaml +12 -0
  34. disdrodb/etc/products/L2M/GAMMA_ML.yaml +8 -0
  35. disdrodb/etc/products/L2M/NGAMMA_GS_LOG_ND_MAE.yaml +6 -0
  36. disdrodb/etc/products/L2M/NGAMMA_GS_ND_MAE.yaml +6 -0
  37. disdrodb/etc/products/L2M/NGAMMA_GS_Z_MAE.yaml +6 -0
  38. disdrodb/etc/products/L2M/global.yaml +26 -0
  39. disdrodb/issue/writer.py +2 -0
  40. disdrodb/l0/__init__.py +13 -0
  41. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +4 -4
  42. disdrodb/l0/configs/PARSIVEL/l0b_cf_attrs.yml +1 -1
  43. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +3 -3
  44. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +1 -1
  45. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +5 -5
  46. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +3 -3
  47. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +1 -1
  48. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +4 -4
  49. disdrodb/l0/configs/PWS100/raw_data_format.yml +1 -1
  50. disdrodb/l0/l0a_processing.py +37 -32
  51. disdrodb/l0/l0b_nc_processing.py +118 -8
  52. disdrodb/l0/l0b_processing.py +30 -65
  53. disdrodb/l0/l0c_processing.py +369 -259
  54. disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +7 -0
  55. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_LPM_NC.py +66 -0
  56. disdrodb/l0/readers/LPM/SLOVENIA/{CRNI_VRH.py → UL.py} +3 -0
  57. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +195 -0
  58. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +0 -2
  59. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +4 -1
  60. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +1 -1
  61. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +1 -1
  62. disdrodb/l0/readers/PARSIVEL2/ARM/ARM_PARSIVEL2.py +4 -0
  63. disdrodb/l0/readers/PARSIVEL2/BELGIUM/ILVO.py +168 -0
  64. disdrodb/l0/readers/PARSIVEL2/CANADA/UQAM_NC.py +69 -0
  65. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +165 -0
  66. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +69 -0
  67. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +255 -134
  68. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +525 -0
  69. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +1 -1
  70. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +9 -7
  71. disdrodb/l0/readers/PARSIVEL2/KIT/BURKINA_FASO.py +1 -1
  72. disdrodb/l0/readers/PARSIVEL2/KIT/TEAMX.py +123 -0
  73. disdrodb/l0/readers/PARSIVEL2/{NETHERLANDS/DELFT.py → MPI/BCO_PARSIVEL2.py} +41 -71
  74. disdrodb/l0/readers/PARSIVEL2/MPI/BOWTIE.py +220 -0
  75. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +120 -0
  76. disdrodb/l0/readers/PARSIVEL2/NASA/LPVEX.py +109 -0
  77. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +1 -0
  78. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +1 -1
  79. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +126 -0
  80. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_PIPS.py +165 -0
  81. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +1 -1
  82. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +20 -12
  83. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +5 -0
  84. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +144 -0
  85. disdrodb/l0/readers/PARSIVEL2/SPAIN/CR1000DL.py +201 -0
  86. disdrodb/l0/readers/PARSIVEL2/SPAIN/LIAISE.py +137 -0
  87. disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +146 -0
  88. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +105 -99
  89. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100_SIRTA.py +151 -0
  90. disdrodb/l1/__init__.py +5 -0
  91. disdrodb/l1/fall_velocity.py +46 -0
  92. disdrodb/l1/filters.py +34 -20
  93. disdrodb/l1/processing.py +46 -45
  94. disdrodb/l1/resampling.py +77 -66
  95. disdrodb/l1_env/routines.py +18 -3
  96. disdrodb/l2/__init__.py +7 -0
  97. disdrodb/l2/empirical_dsd.py +58 -10
  98. disdrodb/l2/processing.py +268 -117
  99. disdrodb/metadata/checks.py +132 -125
  100. disdrodb/metadata/standards.py +3 -1
  101. disdrodb/psd/fitting.py +631 -345
  102. disdrodb/psd/models.py +9 -6
  103. disdrodb/routines/__init__.py +54 -0
  104. disdrodb/{l0/routines.py → routines/l0.py} +316 -355
  105. disdrodb/{l1/routines.py → routines/l1.py} +76 -116
  106. disdrodb/routines/l2.py +1019 -0
  107. disdrodb/{routines.py → routines/wrappers.py} +98 -10
  108. disdrodb/scattering/__init__.py +16 -4
  109. disdrodb/scattering/axis_ratio.py +61 -37
  110. disdrodb/scattering/permittivity.py +504 -0
  111. disdrodb/scattering/routines.py +746 -184
  112. disdrodb/summary/__init__.py +17 -0
  113. disdrodb/summary/routines.py +4196 -0
  114. disdrodb/utils/archiving.py +434 -0
  115. disdrodb/utils/attrs.py +68 -125
  116. disdrodb/utils/cli.py +5 -5
  117. disdrodb/utils/compression.py +30 -1
  118. disdrodb/utils/dask.py +121 -9
  119. disdrodb/utils/dataframe.py +61 -7
  120. disdrodb/utils/decorators.py +31 -0
  121. disdrodb/utils/directories.py +35 -15
  122. disdrodb/utils/encoding.py +37 -19
  123. disdrodb/{l2 → utils}/event.py +15 -173
  124. disdrodb/utils/logger.py +14 -7
  125. disdrodb/utils/manipulations.py +81 -0
  126. disdrodb/utils/routines.py +166 -0
  127. disdrodb/utils/subsetting.py +214 -0
  128. disdrodb/utils/time.py +35 -177
  129. disdrodb/utils/writer.py +20 -7
  130. disdrodb/utils/xarray.py +5 -4
  131. disdrodb/viz/__init__.py +13 -0
  132. disdrodb/viz/plots.py +398 -0
  133. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/METADATA +4 -3
  134. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/RECORD +139 -98
  135. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/entry_points.txt +2 -0
  136. disdrodb/l1/encoding_attrs.py +0 -642
  137. disdrodb/l2/processing_options.py +0 -213
  138. disdrodb/l2/routines.py +0 -868
  139. /disdrodb/l0/readers/PARSIVEL/SLOVENIA/{UL_FGG.py → UL.py} +0 -0
  140. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/WHEEL +0 -0
  141. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/licenses/LICENSE +0 -0
  142. {disdrodb-0.1.2.dist-info → disdrodb-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1019 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """Implements routines for DISDRODB L2 processing."""
18
+
19
+ import copy
20
+ import datetime
21
+ import json
22
+ import logging
23
+ import os
24
+ import time
25
+ from typing import Optional
26
+
27
+ import pandas as pd
28
+
29
+ from disdrodb.api.checks import check_station_inputs
30
+ from disdrodb.api.create_directories import (
31
+ create_logs_directory,
32
+ create_product_directory,
33
+ )
34
+ from disdrodb.api.info import group_filepaths
35
+ from disdrodb.api.io import open_netcdf_files
36
+ from disdrodb.api.path import (
37
+ define_file_folder_path,
38
+ define_l2e_filename,
39
+ define_l2m_filename,
40
+ define_temporal_resolution,
41
+ )
42
+ from disdrodb.api.search import get_required_product
43
+ from disdrodb.configs import (
44
+ get_data_archive_dir,
45
+ get_metadata_archive_dir,
46
+ get_model_options,
47
+ get_product_options,
48
+ get_product_temporal_resolutions,
49
+ )
50
+ from disdrodb.l1.resampling import resample_dataset
51
+ from disdrodb.l2.processing import (
52
+ generate_l2_radar,
53
+ generate_l2e,
54
+ generate_l2m,
55
+ )
56
+ from disdrodb.metadata import read_station_metadata
57
+ from disdrodb.scattering.routines import precompute_scattering_tables
58
+ from disdrodb.utils.archiving import define_temporal_partitions, get_files_partitions
59
+ from disdrodb.utils.dask import execute_tasks_safely
60
+ from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
61
+ from disdrodb.utils.list import flatten_list
62
+
63
+ # Logger
64
+ from disdrodb.utils.logger import (
65
+ create_product_logs,
66
+ log_info,
67
+ )
68
+ from disdrodb.utils.routines import (
69
+ is_possible_product,
70
+ run_product_generation,
71
+ try_get_required_filepaths,
72
+ )
73
+ from disdrodb.utils.time import (
74
+ ensure_sample_interval_in_seconds,
75
+ get_resampling_information,
76
+ )
77
+ from disdrodb.utils.writer import write_product
78
+
79
+ logger = logging.getLogger(__name__)
80
+
81
+
82
+ ####----------------------------------------------------------------------------.
83
+
84
+
85
+ class ProcessingOptions:
86
+ """Define L2 products processing options."""
87
+
88
+ # TODO: TO MOVE ELSEWHERE (AFTER L1 REFACTORING !)
89
+
90
+ def __init__(self, product, filepaths, parallel, temporal_resolutions=None):
91
+ """Define L2 products processing options."""
92
+ import disdrodb
93
+
94
+ # ---------------------------------------------------------------------.
95
+ # Define temporal resolutions for which to retrieve processing options
96
+ if temporal_resolutions is None:
97
+ temporal_resolutions = get_product_temporal_resolutions(product)
98
+ elif isinstance(temporal_resolutions, str):
99
+ temporal_resolutions = [temporal_resolutions]
100
+
101
+ # ---------------------------------------------------------------------.
102
+ # Get product options at various temporal resolutions
103
+ dict_product_options = {
104
+ temporal_resolution: get_product_options(product, temporal_resolution=temporal_resolution)
105
+ for temporal_resolution in temporal_resolutions
106
+ }
107
+
108
+ # ---------------------------------------------------------------------.
109
+ # Group filepaths by source sample intervals
110
+ # - Typically the sample interval is fixed and is just one
111
+ # - Some stations might change the sample interval along the years
112
+ # - For each sample interval, separated processing take place here after !
113
+ dict_filepaths = group_filepaths(filepaths, groups="sample_interval")
114
+
115
+ # ---------------------------------------------------------------------.
116
+ # Retrieve processing information for each temporal resolution
117
+ dict_folder_partitioning = {}
118
+ dict_files_partitions = {}
119
+ _cache_dict_list_partitions: dict[str, dict] = {}
120
+ for temporal_resolution in temporal_resolutions:
121
+
122
+ # -------------------------------------------------------------------------.
123
+ # Retrieve product options
124
+ product_options = dict_product_options[temporal_resolution].copy()
125
+
126
+ # Retrieve accumulation_interval and rolling option
127
+ accumulation_interval, rolling = get_resampling_information(temporal_resolution)
128
+
129
+ # Extract processing options
130
+ archive_options = product_options.pop("archive_options")
131
+
132
+ dict_product_options[temporal_resolution] = product_options
133
+ # -------------------------------------------------------------------------.
134
+ # Define folder partitioning
135
+ if "folder_partitioning" not in archive_options:
136
+ dict_folder_partitioning[temporal_resolution] = disdrodb.config.get("folder_partitioning")
137
+ else:
138
+ dict_folder_partitioning[temporal_resolution] = archive_options.pop("folder_partitioning")
139
+
140
+ # -------------------------------------------------------------------------.
141
+ # Define list of temporal partitions
142
+ # - [{start_time: np.datetime64, end_time: np.datetime64}, ....]
143
+ # - Either strategy: "event" or "time_block" or save_by_time_block"
144
+ # - "event" requires loading data into memory to identify events
145
+ # --> Does some data filtering on what to process !
146
+ # - "time_block" does not require loading data into memory
147
+ # --> Does not do data filtering on what to process !
148
+ # --> Here we cache dict_list_partitions so that we don't need to recompute
149
+ # stuffs if processing options are the same
150
+ key = json.dumps(archive_options, sort_keys=True)
151
+ if key not in _cache_dict_list_partitions:
152
+ _cache_dict_list_partitions[key] = {
153
+ sample_interval: define_temporal_partitions(filepaths, parallel=parallel, **archive_options)
154
+ for sample_interval, filepaths in dict_filepaths.items()
155
+ }
156
+ dict_list_partitions = _cache_dict_list_partitions[key].copy() # To avoid in-place replacement
157
+
158
+ # ------------------------------------------------------------------.
159
+ # Group filepaths by temporal partitions
160
+ # - This is done separately for each possible source sample interval
161
+ # - It groups filepaths by start_time and end_time provided by list_partitions
162
+ # - Here 'events' can also simply be period of times ('day', 'months', ...)
163
+ # - When aggregating/resampling/accumulating data, we need to load also
164
+ # some data after the actual event end_time to ensure that the resampled dataset
165
+ # contains the event_end_time
166
+ # --> get_files_partitions adjust the event end_time to accounts for the required "border" data.
167
+ # - ATTENTION: get_files_partitions returns start_time and end_time as datetime objects !
168
+ files_partitions = [
169
+ get_files_partitions(
170
+ list_partitions=list_partitions,
171
+ filepaths=dict_filepaths[sample_interval],
172
+ sample_interval=sample_interval,
173
+ accumulation_interval=accumulation_interval,
174
+ rolling=rolling,
175
+ )
176
+ for sample_interval, list_partitions in dict_list_partitions.items()
177
+ if product != "L2E"
178
+ or is_possible_product(
179
+ accumulation_interval=accumulation_interval,
180
+ sample_interval=sample_interval,
181
+ rolling=rolling,
182
+ )
183
+ ]
184
+ files_partitions = flatten_list(files_partitions)
185
+ dict_files_partitions[temporal_resolution] = files_partitions
186
+
187
+ # ------------------------------------------------------------------.
188
+ # Keep only temporal_resolutions for which events could be defined
189
+ # - Remove e.g when not compatible accumulation_interval with source sample_interval
190
+ temporal_resolutions = [
191
+ temporal_resolution
192
+ for temporal_resolution in temporal_resolutions
193
+ if len(dict_files_partitions[temporal_resolution]) > 0
194
+ ]
195
+ # ------------------------------------------------------------------.
196
+ # Add attributes
197
+ self.temporal_resolutions = temporal_resolutions
198
+ self.dict_files_partitions = dict_files_partitions
199
+ self.dict_product_options = dict_product_options
200
+ self.dict_folder_partitioning = dict_folder_partitioning
201
+
202
+ def get_files_partitions(self, temporal_resolution):
203
+ """Return files partitions dictionary for a specific L2E product."""
204
+ return self.dict_files_partitions[temporal_resolution]
205
+
206
+ def get_product_options(self, temporal_resolution):
207
+ """Return product options dictionary for a specific L2E product."""
208
+ return self.dict_product_options[temporal_resolution]
209
+
210
+ def get_folder_partitioning(self, temporal_resolution):
211
+ """Return the folder partitioning for a specific L2E product."""
212
+ # to be used for logs and files !
213
+ return self.dict_folder_partitioning[temporal_resolution]
214
+
215
+
216
+ ####----------------------------------------------------------------------------.
217
+ #### L2E
218
+
219
+
220
+ def define_l2e_logs_filename(campaign_name, station_name, start_time, end_time, accumulation_interval, rolling):
221
+ """Define L2E logs filename."""
222
+ temporal_resolution = define_temporal_resolution(seconds=accumulation_interval, rolling=rolling)
223
+ starting_time = pd.to_datetime(start_time).strftime("%Y%m%d%H%M%S")
224
+ ending_time = pd.to_datetime(end_time).strftime("%Y%m%d%H%M%S")
225
+ logs_filename = f"L2E.{temporal_resolution}.{campaign_name}.{station_name}.s{starting_time}.e{ending_time}"
226
+ return logs_filename
227
+
228
+
229
+ @delayed_if_parallel
230
+ @single_threaded_if_parallel
231
+ def _generate_l2e(
232
+ start_time,
233
+ end_time,
234
+ filepaths,
235
+ data_dir,
236
+ logs_dir,
237
+ logs_filename,
238
+ folder_partitioning,
239
+ campaign_name,
240
+ station_name,
241
+ # L2E options
242
+ accumulation_interval,
243
+ rolling,
244
+ product_options,
245
+ # Processing options
246
+ force,
247
+ verbose,
248
+ parallel, # this is used by the decorator and to initialize correctly the logger !
249
+ ):
250
+ """Generate the L2E product from the DISDRODB L1 netCDF file."""
251
+ # Define product
252
+ product = "L2E"
253
+
254
+ # Define product processing function
255
+ def core(
256
+ filepaths,
257
+ campaign_name,
258
+ station_name,
259
+ product_options,
260
+ # Processing options
261
+ logger,
262
+ parallel,
263
+ verbose,
264
+ force,
265
+ # Resampling arguments
266
+ start_time,
267
+ end_time,
268
+ accumulation_interval,
269
+ rolling,
270
+ # Archiving arguments
271
+ data_dir,
272
+ folder_partitioning,
273
+ ):
274
+ """Define L1 product processing."""
275
+ # Copy to avoid in-place replacement (outside this function)
276
+ product_options = product_options.copy()
277
+
278
+ # Open the dataset over the period of interest
279
+ ds = open_netcdf_files(filepaths, start_time=start_time, end_time=end_time, parallel=False)
280
+ ds = ds.load()
281
+ ds.close()
282
+
283
+ # Resample dataset # TODO: in future to perform in L1
284
+ # - Define sample interval in seconds
285
+ sample_interval = ensure_sample_interval_in_seconds(ds["sample_interval"]).to_numpy().item()
286
+ # - Resample dataset
287
+ ds = resample_dataset(
288
+ ds=ds,
289
+ sample_interval=sample_interval,
290
+ accumulation_interval=accumulation_interval,
291
+ rolling=rolling,
292
+ )
293
+
294
+ # Extract L2E processing options
295
+ l2e_options = product_options.get("product_options")
296
+ radar_enabled = product_options.get("radar_enabled")
297
+ radar_options = product_options.get("radar_options")
298
+
299
+ # Ensure at least 2 timestep available
300
+ if ds["time"].size < 2:
301
+ log_info(logger=logger, msg="File not created. Less than two timesteps available.", verbose=verbose)
302
+ return None
303
+
304
+ # Compute L2E variables
305
+ ds = generate_l2e(ds=ds, **l2e_options)
306
+
307
+ # Ensure at least 2 timestep available
308
+ if ds["time"].size < 2:
309
+ log_info(logger=logger, msg="File not created. Less than two timesteps available.", verbose=verbose)
310
+ return None
311
+
312
+ # Simulate L2M-based radar variables if asked
313
+ if radar_enabled:
314
+ ds_radar = generate_l2_radar(ds, parallel=not parallel, **radar_options)
315
+ ds.update(ds_radar)
316
+ ds.attrs = ds_radar.attrs.copy()
317
+
318
+ # Write L2E netCDF4 dataset
319
+ filename = define_l2e_filename(
320
+ ds,
321
+ campaign_name=campaign_name,
322
+ station_name=station_name,
323
+ sample_interval=accumulation_interval,
324
+ rolling=rolling,
325
+ )
326
+ folder_path = define_file_folder_path(ds, dir_path=data_dir, folder_partitioning=folder_partitioning)
327
+ filepath = os.path.join(folder_path, filename)
328
+ write_product(ds, filepath=filepath, force=force)
329
+
330
+ # Return L2E dataset
331
+ return ds
332
+
333
+ # Define product processing function kwargs
334
+ core_func_kwargs = dict( # noqa: C408
335
+ filepaths=filepaths,
336
+ campaign_name=campaign_name,
337
+ station_name=station_name,
338
+ product_options=product_options,
339
+ # Resampling arguments
340
+ start_time=start_time,
341
+ end_time=end_time,
342
+ accumulation_interval=accumulation_interval,
343
+ rolling=rolling,
344
+ # Archiving arguments
345
+ data_dir=data_dir,
346
+ folder_partitioning=folder_partitioning,
347
+ # Processing options
348
+ parallel=parallel,
349
+ verbose=verbose,
350
+ force=force,
351
+ )
352
+ # Run product generation
353
+ logger_filepath = run_product_generation(
354
+ product=product,
355
+ logs_dir=logs_dir,
356
+ logs_filename=logs_filename,
357
+ parallel=parallel,
358
+ verbose=verbose,
359
+ folder_partitioning=folder_partitioning,
360
+ core_func=core,
361
+ core_func_kwargs=core_func_kwargs,
362
+ pass_logger=True,
363
+ )
364
+ # Return the logger file path
365
+ return logger_filepath
366
+
367
+
368
+ def run_l2e_station(
369
+ # Station arguments
370
+ data_source,
371
+ campaign_name,
372
+ station_name,
373
+ # Processing options
374
+ force: bool = False,
375
+ verbose: bool = True,
376
+ parallel: bool = True,
377
+ debugging_mode: bool = False,
378
+ # DISDRODB root directories
379
+ data_archive_dir: Optional[str] = None,
380
+ metadata_archive_dir: Optional[str] = None,
381
+ ):
382
+ """
383
+ Generate the L2E product of a specific DISDRODB station when invoked from the terminal.
384
+
385
+ This function is intended to be called through the ``disdrodb_run_l2e_station``
386
+ command-line interface.
387
+
388
+ This routine generates L2E files.
389
+ Files are defined based on the DISDRODB archive settings options.
390
+ The DISDRODB archive settings allows to produce L2E files either
391
+ per custom block of time (i.e day/month/year) or per blocks of (rainy) events.
392
+
393
+ For stations with varying measurement intervals, DISDRODB defines a separate list of partitions
394
+ for each measurement interval option. In other words, DISDRODB does not
395
+ mix files with data acquired at different sample intervals when resampling the data.
396
+
397
+ L0C product generation ensure creation of files with unique sample intervals.
398
+
399
+ Parameters
400
+ ----------
401
+ data_source : str
402
+ The name of the institution (for campaigns spanning multiple countries) or
403
+ the name of the country (for campaigns or sensor networks within a single country).
404
+ Must be provided in UPPER CASE.
405
+ campaign_name : str
406
+ The name of the campaign. Must be provided in UPPER CASE.
407
+ station_name : str
408
+ The name of the station.
409
+ force : bool, optional
410
+ If ``True``, existing data in the destination directories will be overwritten.
411
+ If ``False`` (default), an error will be raised if data already exists in the destination directories.
412
+ verbose : bool, optional
413
+ If ``True`` (default), detailed processing information will be printed to the terminal.
414
+ If ``False``, less information will be displayed.
415
+ parallel : bool, optional
416
+ If ``True``, files will be processed in multiple processes simultaneously,
417
+ with each process using a single thread to avoid issues with the HDF/netCDF library.
418
+ If ``False`` (default), files will be processed sequentially in a single process,
419
+ and multi-threading will be automatically exploited to speed up I/O tasks.
420
+ debugging_mode : bool, optional
421
+ If ``True``, the amount of data processed will be reduced.
422
+ Only the first 3 files will be processed. The default value is ``False``.
423
+ data_archive_dir : str, optional
424
+ The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
425
+ If not specified, the path specified in the DISDRODB active configuration will be used.
426
+
427
+ """
428
+ # Define product
429
+ product = "L2E"
430
+
431
+ # Define base directory
432
+ data_archive_dir = get_data_archive_dir(data_archive_dir)
433
+
434
+ # Retrieve DISDRODB Metadata Archive directory
435
+ metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir=metadata_archive_dir)
436
+
437
+ # Check valid data_source, campaign_name, and station_name
438
+ check_station_inputs(
439
+ metadata_archive_dir=metadata_archive_dir,
440
+ data_source=data_source,
441
+ campaign_name=campaign_name,
442
+ station_name=station_name,
443
+ )
444
+
445
+ # ------------------------------------------------------------------------.
446
+ # Start processing
447
+ if verbose:
448
+ t_i = time.time()
449
+ msg = f"{product} processing of station {station_name} has started."
450
+ log_info(logger=logger, msg=msg, verbose=verbose)
451
+
452
+ # -------------------------------------------------------------------------.
453
+ # List files to process
454
+ # - If no data available, print error message and return None
455
+ required_product = get_required_product(product)
456
+ filepaths = try_get_required_filepaths(
457
+ data_archive_dir=data_archive_dir,
458
+ data_source=data_source,
459
+ campaign_name=campaign_name,
460
+ station_name=station_name,
461
+ product=required_product,
462
+ # Processing options
463
+ debugging_mode=debugging_mode,
464
+ )
465
+ if filepaths is None:
466
+ return
467
+
468
+ # -------------------------------------------------------------------------.
469
+ # Retrieve L2E processing options
470
+ l2e_processing_options = ProcessingOptions(product="L2E", filepaths=filepaths, parallel=parallel)
471
+
472
+ # -------------------------------------------------------------------------.
473
+ # Generate products for each temporal resolution
474
+ # rolling = False
475
+ # accumulation_interval = 60
476
+ # temporal_resolution = "10MIN"
477
+ # folder_partitioning = ""
478
+ # product_options = l2e_processing_options.get_product_options(temporal_resolution)
479
+
480
+ for temporal_resolution in l2e_processing_options.temporal_resolutions:
481
+ # Print progress message
482
+ msg = f"Production of {product} {temporal_resolution} has started."
483
+ log_info(logger=logger, msg=msg, verbose=verbose)
484
+
485
+ # Retrieve event info
486
+ files_partitions = l2e_processing_options.get_files_partitions(temporal_resolution)
487
+
488
+ # Retrieve folder partitioning (for files and logs)
489
+ folder_partitioning = l2e_processing_options.get_folder_partitioning(temporal_resolution)
490
+
491
+ # Retrieve product options
492
+ product_options = l2e_processing_options.get_product_options(temporal_resolution)
493
+
494
+ # Retrieve accumulation_interval and rolling option
495
+ accumulation_interval, rolling = get_resampling_information(temporal_resolution)
496
+
497
+ # Precompute required scattering tables
498
+ if product_options["radar_enabled"]:
499
+ radar_options = product_options["radar_options"]
500
+ precompute_scattering_tables(verbose=verbose, **radar_options)
501
+
502
+ # ------------------------------------------------------------------.
503
+ # Create product directory
504
+ data_dir = create_product_directory(
505
+ data_archive_dir=data_archive_dir,
506
+ metadata_archive_dir=metadata_archive_dir,
507
+ data_source=data_source,
508
+ campaign_name=campaign_name,
509
+ station_name=station_name,
510
+ product=product,
511
+ force=force,
512
+ # Option for L2E
513
+ sample_interval=accumulation_interval,
514
+ rolling=rolling,
515
+ )
516
+
517
+ # Define logs directory
518
+ logs_dir = create_logs_directory(
519
+ product=product,
520
+ data_archive_dir=data_archive_dir,
521
+ data_source=data_source,
522
+ campaign_name=campaign_name,
523
+ station_name=station_name,
524
+ # Option for L2E
525
+ sample_interval=accumulation_interval,
526
+ rolling=rolling,
527
+ )
528
+
529
+ # ------------------------------------------------------------------.
530
+ # Generate files
531
+ # - L2E product generation is optionally parallelized over events
532
+ # - If parallel=True, it does that in parallel using dask.delayed
533
+ list_tasks = [
534
+ _generate_l2e(
535
+ start_time=event_info["start_time"],
536
+ end_time=event_info["end_time"],
537
+ filepaths=event_info["filepaths"],
538
+ data_dir=data_dir,
539
+ logs_dir=logs_dir,
540
+ logs_filename=define_l2e_logs_filename(
541
+ campaign_name=campaign_name,
542
+ station_name=station_name,
543
+ start_time=event_info["start_time"],
544
+ end_time=event_info["end_time"],
545
+ rolling=rolling,
546
+ accumulation_interval=accumulation_interval,
547
+ ),
548
+ folder_partitioning=folder_partitioning,
549
+ campaign_name=campaign_name,
550
+ station_name=station_name,
551
+ # L2E options
552
+ rolling=rolling,
553
+ accumulation_interval=accumulation_interval,
554
+ product_options=product_options,
555
+ # Processing options
556
+ force=force,
557
+ verbose=verbose,
558
+ parallel=parallel,
559
+ )
560
+ for event_info in files_partitions
561
+ ]
562
+ list_logs = execute_tasks_safely(list_tasks=list_tasks, parallel=parallel, logs_dir=logs_dir)
563
+
564
+ # -----------------------------------------------------------------.
565
+ # Define product summary logs
566
+ create_product_logs(
567
+ product=product,
568
+ data_source=data_source,
569
+ campaign_name=campaign_name,
570
+ station_name=station_name,
571
+ data_archive_dir=data_archive_dir,
572
+ # Product options
573
+ sample_interval=accumulation_interval,
574
+ rolling=rolling,
575
+ # Logs list
576
+ list_logs=list_logs,
577
+ )
578
+
579
+ # ---------------------------------------------------------------------.
580
+ # End product processing
581
+ if verbose:
582
+ timedelta_str = str(datetime.timedelta(seconds=round(time.time() - t_i)))
583
+ msg = f"{product} processing of station {station_name} completed in {timedelta_str}"
584
+ log_info(logger=logger, msg=msg, verbose=verbose)
585
+
586
+
587
+ ####----------------------------------------------------------------------------.
588
+ #### L2M
589
+ def define_l2m_logs_filename(campaign_name, station_name, start_time, end_time, model_name, sample_interval, rolling):
590
+ """Define L2M logs filename."""
591
+ temporal_resolution = define_temporal_resolution(seconds=sample_interval, rolling=rolling)
592
+ starting_time = pd.to_datetime(start_time).strftime("%Y%m%d%H%M%S")
593
+ ending_time = pd.to_datetime(end_time).strftime("%Y%m%d%H%M%S")
594
+ logs_filename = (
595
+ f"L2M_{model_name}.{temporal_resolution}.{campaign_name}.{station_name}.s{starting_time}.e{ending_time}"
596
+ )
597
+ return logs_filename
598
+
599
+
600
+ @delayed_if_parallel
601
+ @single_threaded_if_parallel
602
+ def _generate_l2m(
603
+ start_time,
604
+ end_time,
605
+ filepaths,
606
+ data_dir,
607
+ logs_dir,
608
+ logs_filename,
609
+ folder_partitioning,
610
+ campaign_name,
611
+ station_name,
612
+ # L2M options
613
+ sample_interval,
614
+ rolling,
615
+ model_name,
616
+ product_options,
617
+ # Processing options
618
+ force,
619
+ verbose,
620
+ parallel, # this is used only to initialize the correct logger !
621
+ ):
622
+ """Generate the L2M product from a DISDRODB L2E netCDF file."""
623
+ # Define product
624
+ product = "L2M"
625
+
626
+ # Define product processing function
627
+ def core(
628
+ start_time,
629
+ end_time,
630
+ filepaths,
631
+ campaign_name,
632
+ station_name,
633
+ # Processing options
634
+ logger,
635
+ verbose,
636
+ force,
637
+ # Product options
638
+ product_options,
639
+ sample_interval,
640
+ rolling,
641
+ model_name,
642
+ # Archiving arguments
643
+ data_dir,
644
+ folder_partitioning,
645
+ ):
646
+ """Define L1 product processing."""
647
+ # Copy to avoid in-place replacement (outside this function)
648
+ product_options = product_options.copy()
649
+
650
+ ##------------------------------------------------------------------------.
651
+ # Extract L2M processing options
652
+ l2m_options = product_options.get("product_options")
653
+ radar_enabled = product_options.get("radar_enabled")
654
+ radar_options = product_options.get("radar_options")
655
+
656
+ # Define variables to load
657
+ optimization_kwargs = l2m_options["optimization_kwargs"]
658
+ if "init_method" in optimization_kwargs:
659
+ init_method = optimization_kwargs["init_method"]
660
+ moments = [f"M{order}" for order in init_method.replace("M", "")] + ["M1"]
661
+ else:
662
+ moments = ["M1"]
663
+
664
+ variables = [
665
+ "drop_number_concentration",
666
+ "fall_velocity",
667
+ "D50",
668
+ "Nw",
669
+ "Nt",
670
+ "N",
671
+ *moments,
672
+ ]
673
+
674
+ ##------------------------------------------------------------------------.
675
+ # Open the netCDF files
676
+ ds = open_netcdf_files(filepaths, start_time=start_time, end_time=end_time, variables=variables)
677
+ ds = ds.load()
678
+ ds.close()
679
+
680
+ # Produce L2M dataset
681
+ ds = generate_l2m(
682
+ ds=ds,
683
+ **l2m_options,
684
+ )
685
+
686
+ # Simulate L2M-based radar variables if asked
687
+ if radar_enabled:
688
+ ds_radar = generate_l2_radar(ds, parallel=not parallel, **radar_options)
689
+ ds.update(ds_radar)
690
+ ds.attrs = ds_radar.attrs.copy() # ds_radar contains already all L2M attrs
691
+
692
+ # Ensure at least 2 timestep available
693
+ if ds["time"].size < 2:
694
+ log_info(logger=logger, msg="File not created. Less than two timesteps available.", verbose=verbose)
695
+ return None
696
+
697
+ # Write L2M netCDF4 dataset
698
+ filename = define_l2m_filename(
699
+ ds,
700
+ campaign_name=campaign_name,
701
+ station_name=station_name,
702
+ sample_interval=sample_interval,
703
+ rolling=rolling,
704
+ model_name=model_name,
705
+ )
706
+ folder_path = define_file_folder_path(ds, dir_path=data_dir, folder_partitioning=folder_partitioning)
707
+ filepath = os.path.join(folder_path, filename)
708
+ write_product(ds, filepath=filepath, force=force)
709
+
710
+ # Return L2M dataset
711
+ return ds
712
+
713
+ # Define product processing function kwargs
714
+ core_func_kwargs = dict( # noqa: C408
715
+ filepaths=filepaths,
716
+ start_time=start_time,
717
+ end_time=end_time,
718
+ campaign_name=campaign_name,
719
+ station_name=station_name,
720
+ # Processing options
721
+ verbose=verbose,
722
+ force=force,
723
+ # Product options
724
+ product_options=product_options,
725
+ sample_interval=sample_interval,
726
+ rolling=rolling,
727
+ model_name=model_name,
728
+ # Archiving arguments
729
+ data_dir=data_dir,
730
+ folder_partitioning=folder_partitioning,
731
+ )
732
+ # Run product generation
733
+ logger_filepath = run_product_generation(
734
+ product=product,
735
+ logs_dir=logs_dir,
736
+ logs_filename=logs_filename,
737
+ parallel=parallel,
738
+ verbose=verbose,
739
+ folder_partitioning=folder_partitioning,
740
+ core_func=core,
741
+ core_func_kwargs=core_func_kwargs,
742
+ pass_logger=True,
743
+ )
744
+ # Return the logger file path
745
+ return logger_filepath
746
+
747
+
748
+ def run_l2m_station(
749
+ # Station arguments
750
+ data_source,
751
+ campaign_name,
752
+ station_name,
753
+ # Processing options
754
+ force: bool = False,
755
+ verbose: bool = True,
756
+ parallel: bool = True,
757
+ debugging_mode: bool = False,
758
+ # DISDRODB root directories
759
+ data_archive_dir: Optional[str] = None,
760
+ metadata_archive_dir: Optional[str] = None,
761
+ ):
762
+ """
763
+ Run the L2M processing of a specific DISDRODB station when invoked from the terminal.
764
+
765
+ This function is intended to be called through the ``disdrodb_run_l2m_station``
766
+ command-line interface.
767
+
768
+ Parameters
769
+ ----------
770
+ data_source : str
771
+ The name of the institution (for campaigns spanning multiple countries) or
772
+ the name of the country (for campaigns or sensor networks within a single country).
773
+ Must be provided in UPPER CASE.
774
+ campaign_name : str
775
+ The name of the campaign. Must be provided in UPPER CASE.
776
+ station_name : str
777
+ The name of the station.
778
+ force : bool, optional
779
+ If ``True``, existing data in the destination directories will be overwritten.
780
+ If ``False`` (default), an error will be raised if data already exists in the destination directories.
781
+ verbose : bool, optional
782
+ If ``True`` (default), detailed processing information will be printed to the terminal.
783
+ If ``False``, less information will be displayed.
784
+ parallel : bool, optional
785
+ If ``True``, files will be processed in multiple processes simultaneously,
786
+ with each process using a single thread to avoid issues with the HDF/netCDF library.
787
+ If ``False`` (default), files will be processed sequentially in a single process,
788
+ and multi-threading will be automatically exploited to speed up I/O tasks.
789
+ debugging_mode : bool, optional
790
+ If ``True``, the amount of data processed will be reduced.
791
+ Only the first 3 files will be processed. The default value is ``False``.
792
+ data_archive_dir : str, optional
793
+ The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
794
+ If not specified, the path specified in the DISDRODB active configuration will be used.
795
+
796
+ """
797
+ # Define product
798
+ product = "L2M"
799
+
800
+ # Define base directory
801
+ data_archive_dir = get_data_archive_dir(data_archive_dir)
802
+
803
+ # Retrieve DISDRODB Metadata Archive directory
804
+ metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
805
+
806
+ # Check valid data_source, campaign_name, and station_name
807
+ check_station_inputs(
808
+ metadata_archive_dir=metadata_archive_dir,
809
+ data_source=data_source,
810
+ campaign_name=campaign_name,
811
+ station_name=station_name,
812
+ )
813
+
814
+ # ------------------------------------------------------------------------.
815
+ # Start processing
816
+ if verbose:
817
+ t_i = time.time()
818
+ msg = f"{product} processing of station {station_name} has started."
819
+ log_info(logger=logger, msg=msg, verbose=verbose)
820
+
821
+ # ---------------------------------------------------------------------.
822
+ # Retrieve source sampling interval
823
+ # - If a station has varying measurement interval over time, choose the smallest one !
824
+ metadata = read_station_metadata(
825
+ metadata_archive_dir=metadata_archive_dir,
826
+ data_source=data_source,
827
+ campaign_name=campaign_name,
828
+ station_name=station_name,
829
+ )
830
+ sample_interval = metadata["measurement_interval"]
831
+ if isinstance(sample_interval, list):
832
+ sample_interval = min(sample_interval)
833
+
834
+ # ---------------------------------------------------------------------.
835
+ # Loop
836
+ # temporal_resolution = "1MIN"
837
+ # temporal_resolution = "10MIN"
838
+ temporal_resolutions = get_product_temporal_resolutions("L2M")
839
+ for temporal_resolution in temporal_resolutions:
840
+
841
+ # Retrieve accumulation_interval and rolling option
842
+ accumulation_interval, rolling = get_resampling_information(temporal_resolution)
843
+
844
+ # ------------------------------------------------------------------.
845
+ # Avoid generation of rolling products for source sample interval !
846
+ if rolling and accumulation_interval == sample_interval:
847
+ continue
848
+
849
+ # Avoid product generation if the accumulation_interval is less than the sample interval
850
+ if accumulation_interval < sample_interval:
851
+ continue
852
+
853
+ # -----------------------------------------------------------------.
854
+ # List files to process
855
+ # - If no data available, print error message and try with other L2E accumulation intervals
856
+ required_product = get_required_product(product)
857
+ filepaths = try_get_required_filepaths(
858
+ data_archive_dir=data_archive_dir,
859
+ data_source=data_source,
860
+ campaign_name=campaign_name,
861
+ station_name=station_name,
862
+ product=required_product,
863
+ # Processing options
864
+ debugging_mode=debugging_mode,
865
+ # Product options
866
+ sample_interval=accumulation_interval,
867
+ rolling=rolling,
868
+ )
869
+ if filepaths is None:
870
+ continue
871
+
872
+ # -------------------------------------------------------------------------.
873
+ # Retrieve L2M processing options
874
+ l2m_processing_options = ProcessingOptions(
875
+ product="L2M",
876
+ temporal_resolutions=temporal_resolution,
877
+ filepaths=filepaths,
878
+ parallel=parallel,
879
+ )
880
+
881
+ # Retrieve folder partitioning (for files and logs)
882
+ folder_partitioning = l2m_processing_options.get_folder_partitioning(temporal_resolution)
883
+
884
+ # Retrieve product options
885
+ global_product_options = l2m_processing_options.get_product_options(temporal_resolution)
886
+
887
+ # Retrieve files temporal partitions
888
+ files_partitions = l2m_processing_options.get_files_partitions(temporal_resolution)
889
+
890
+ if len(files_partitions) == 0:
891
+ msg = (
892
+ f"{product} processing of {data_source} {campaign_name} {station_name} "
893
+ + f"has not been launched because of missing {required_product} {temporal_resolution} data."
894
+ )
895
+ log_info(logger=logger, msg=msg, verbose=verbose)
896
+ continue
897
+
898
+ # -----------------------------------------------------------------.
899
+ # Loop over distributions to fit
900
+ # model_name = "GAMMA_ML"
901
+ # model_options = l2m_options["models"][model_name]
902
+ # Retrieve list of models to fit
903
+ models = global_product_options.pop("models")
904
+ for model_name in models:
905
+ # -----------------------------------------------------------------.
906
+ # Retrieve product-model options
907
+ product_options = copy.deepcopy(global_product_options)
908
+ model_options = get_model_options(product="L2M", model_name=model_name)
909
+ product_options["product_options"].update(model_options)
910
+
911
+ psd_model = model_options["psd_model"]
912
+ optimization = model_options["optimization"]
913
+
914
+ # Precompute required scattering tables
915
+ if product_options["radar_enabled"]:
916
+ radar_options = product_options["radar_options"]
917
+ precompute_scattering_tables(verbose=verbose, **radar_options)
918
+
919
+ # -----------------------------------------------------------------.
920
+ msg = f"Production of L2M_{model_name} for sample interval {accumulation_interval} s has started."
921
+ log_info(logger=logger, msg=msg, verbose=verbose)
922
+ msg = f"Estimating {psd_model} parameters using {optimization}."
923
+ log_info(logger=logger, msg=msg, verbose=verbose)
924
+
925
+ # -------------------------------------------------------------.
926
+ # Create product directory
927
+ data_dir = create_product_directory(
928
+ # DISDRODB root directories
929
+ data_archive_dir=data_archive_dir,
930
+ metadata_archive_dir=metadata_archive_dir,
931
+ # Station arguments
932
+ data_source=data_source,
933
+ campaign_name=campaign_name,
934
+ station_name=station_name,
935
+ # Processing options
936
+ product=product,
937
+ force=force,
938
+ # Option for L2E
939
+ sample_interval=accumulation_interval,
940
+ rolling=rolling,
941
+ # Option for L2M
942
+ model_name=model_name,
943
+ )
944
+
945
+ # Define logs directory
946
+ logs_dir = create_logs_directory(
947
+ product=product,
948
+ data_archive_dir=data_archive_dir,
949
+ # Station arguments
950
+ data_source=data_source,
951
+ campaign_name=campaign_name,
952
+ station_name=station_name,
953
+ # Option for L2E
954
+ sample_interval=accumulation_interval,
955
+ rolling=rolling,
956
+ # Option for L2M
957
+ model_name=model_name,
958
+ )
959
+
960
+ # Generate L2M files
961
+ # - Loop over the L2E netCDF files and generate L2M files.
962
+ # - If parallel=True, it does that in parallel using dask.delayed
963
+ list_tasks = [
964
+ _generate_l2m(
965
+ start_time=event_info["start_time"],
966
+ end_time=event_info["end_time"],
967
+ filepaths=event_info["filepaths"],
968
+ data_dir=data_dir,
969
+ logs_dir=logs_dir,
970
+ logs_filename=define_l2m_logs_filename(
971
+ campaign_name=campaign_name,
972
+ station_name=station_name,
973
+ start_time=event_info["start_time"],
974
+ end_time=event_info["end_time"],
975
+ model_name=model_name,
976
+ sample_interval=accumulation_interval,
977
+ rolling=rolling,
978
+ ),
979
+ folder_partitioning=folder_partitioning,
980
+ campaign_name=campaign_name,
981
+ station_name=station_name,
982
+ # L2M options
983
+ sample_interval=accumulation_interval,
984
+ rolling=rolling,
985
+ model_name=model_name,
986
+ product_options=product_options,
987
+ # Processing options
988
+ force=force,
989
+ verbose=verbose,
990
+ parallel=parallel,
991
+ )
992
+ for event_info in files_partitions
993
+ ]
994
+ list_logs = execute_tasks_safely(list_tasks=list_tasks, parallel=parallel, logs_dir=logs_dir)
995
+
996
+ # -----------------------------------------------------------------.
997
+ # Define L2M summary logs
998
+ create_product_logs(
999
+ product=product,
1000
+ # Station arguments
1001
+ data_source=data_source,
1002
+ campaign_name=campaign_name,
1003
+ station_name=station_name,
1004
+ # DISDRODB root directory
1005
+ data_archive_dir=data_archive_dir,
1006
+ # Product options
1007
+ model_name=model_name,
1008
+ sample_interval=sample_interval,
1009
+ rolling=rolling,
1010
+ # Logs list
1011
+ list_logs=list_logs,
1012
+ )
1013
+
1014
+ # ---------------------------------------------------------------------.
1015
+ # End L2M processing
1016
+ if verbose:
1017
+ timedelta_str = str(datetime.timedelta(seconds=round(time.time() - t_i)))
1018
+ msg = f"{product} processing of station {station_name} completed in {timedelta_str}"
1019
+ log_info(logger=logger, msg=msg, verbose=verbose)