disdrodb 0.1.4__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. disdrodb/__init__.py +1 -5
  2. disdrodb/_version.py +2 -2
  3. disdrodb/accessor/methods.py +14 -3
  4. disdrodb/api/checks.py +10 -0
  5. disdrodb/api/create_directories.py +0 -2
  6. disdrodb/api/io.py +14 -17
  7. disdrodb/api/path.py +42 -77
  8. disdrodb/api/search.py +89 -23
  9. disdrodb/cli/disdrodb_create_summary.py +11 -1
  10. disdrodb/cli/disdrodb_create_summary_station.py +10 -0
  11. disdrodb/cli/disdrodb_run_l0.py +1 -1
  12. disdrodb/cli/disdrodb_run_l0a.py +1 -1
  13. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  15. disdrodb/cli/disdrodb_run_l1.py +1 -1
  16. disdrodb/cli/disdrodb_run_l2e.py +1 -1
  17. disdrodb/cli/disdrodb_run_l2m.py +1 -1
  18. disdrodb/configs.py +30 -83
  19. disdrodb/constants.py +4 -3
  20. disdrodb/data_transfer/download_data.py +4 -2
  21. disdrodb/docs.py +2 -2
  22. disdrodb/etc/products/L1/1MIN.yaml +13 -0
  23. disdrodb/etc/products/L1/LPM/1MIN.yaml +13 -0
  24. disdrodb/etc/products/L1/PARSIVEL/1MIN.yaml +13 -0
  25. disdrodb/etc/products/L1/PARSIVEL2/1MIN.yaml +13 -0
  26. disdrodb/etc/products/L1/PWS100/1MIN.yaml +13 -0
  27. disdrodb/etc/products/L1/RD80/1MIN.yaml +13 -0
  28. disdrodb/etc/products/L1/SWS250/1MIN.yaml +13 -0
  29. disdrodb/etc/products/L1/global.yaml +7 -1
  30. disdrodb/etc/products/L2E/10MIN.yaml +1 -12
  31. disdrodb/etc/products/L2E/5MIN.yaml +1 -0
  32. disdrodb/etc/products/L2E/global.yaml +1 -1
  33. disdrodb/etc/products/L2M/MODELS/GAMMA_GS_ND_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/{GAMMA_ML.yaml → MODELS/GAMMA_ML.yaml} +1 -1
  35. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_LOG_ND_MAE.yaml +6 -0
  36. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_ND_MAE.yaml +6 -0
  37. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_ML.yaml +8 -0
  38. disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_R_MAE.yaml +6 -0
  39. disdrodb/etc/products/L2M/global.yaml +11 -3
  40. disdrodb/l0/check_configs.py +49 -16
  41. disdrodb/l0/configs/LPM/l0a_encodings.yml +2 -2
  42. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +2 -2
  43. disdrodb/l0/configs/LPM/l0b_encodings.yml +2 -2
  44. disdrodb/l0/configs/LPM/raw_data_format.yml +2 -2
  45. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +1 -1
  46. disdrodb/l0/configs/PWS100/l0b_encodings.yml +1 -0
  47. disdrodb/l0/configs/SWS250/bins_diameter.yml +108 -0
  48. disdrodb/l0/configs/SWS250/bins_velocity.yml +83 -0
  49. disdrodb/l0/configs/SWS250/l0a_encodings.yml +18 -0
  50. disdrodb/l0/configs/SWS250/l0b_cf_attrs.yml +72 -0
  51. disdrodb/l0/configs/SWS250/l0b_encodings.yml +155 -0
  52. disdrodb/l0/configs/SWS250/raw_data_format.yml +148 -0
  53. disdrodb/l0/l0_reader.py +2 -2
  54. disdrodb/l0/l0b_processing.py +70 -15
  55. disdrodb/l0/l0c_processing.py +7 -3
  56. disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +1 -1
  57. disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +2 -2
  58. disdrodb/l0/readers/LPM/BELGIUM/ULIEGE.py +256 -0
  59. disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +2 -2
  60. disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +2 -2
  61. disdrodb/l0/readers/LPM/GERMANY/DWD.py +491 -0
  62. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +2 -2
  63. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
  64. disdrodb/l0/readers/LPM/KIT/CHWALA.py +2 -2
  65. disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +107 -12
  66. disdrodb/l0/readers/LPM/SLOVENIA/UL.py +3 -3
  67. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +2 -2
  68. disdrodb/l0/readers/PARSIVEL/BASQUECOUNTRY/EUSKALMET_OTT.py +227 -0
  69. disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/LPVEX.py +1 -1
  70. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +5 -14
  71. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +8 -17
  72. disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL.py +117 -8
  73. disdrodb/l0/readers/PARSIVEL2/BASQUECOUNTRY/EUSKALMET_OTT2.py +232 -0
  74. disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +10 -14
  75. disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +10 -14
  76. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +8 -14
  77. disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +382 -0
  78. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +4 -0
  79. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +1 -1
  80. disdrodb/l0/readers/PARSIVEL2/GREECE/NOA.py +127 -0
  81. disdrodb/l0/readers/PARSIVEL2/ITALY/HYDROX.py +239 -0
  82. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +5 -11
  83. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +4 -17
  84. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +5 -14
  85. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +10 -13
  86. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +10 -13
  87. disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/PAGASA.py +232 -0
  88. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +6 -18
  89. disdrodb/l0/readers/PARSIVEL2/{NASA/LPVEX.py → SPAIN/GRANADA.py} +46 -35
  90. disdrodb/l0/readers/PARSIVEL2/SWEDEN/SMHI.py +189 -0
  91. disdrodb/l0/readers/PARSIVEL2/USA/{C3WE.py → CW3E.py} +10 -28
  92. disdrodb/l0/readers/PWS100/AUSTRIA/HOAL.py +321 -0
  93. disdrodb/l0/readers/SW250/BELGIUM/KMI.py +239 -0
  94. disdrodb/l1/beard_model.py +31 -129
  95. disdrodb/l1/fall_velocity.py +136 -83
  96. disdrodb/l1/filters.py +25 -28
  97. disdrodb/l1/processing.py +16 -17
  98. disdrodb/l1/resampling.py +101 -38
  99. disdrodb/l1_env/routines.py +46 -17
  100. disdrodb/l2/empirical_dsd.py +6 -0
  101. disdrodb/l2/processing.py +6 -5
  102. disdrodb/metadata/geolocation.py +0 -2
  103. disdrodb/metadata/search.py +3 -4
  104. disdrodb/psd/fitting.py +16 -13
  105. disdrodb/routines/l0.py +2 -2
  106. disdrodb/routines/l1.py +173 -60
  107. disdrodb/routines/l2.py +148 -284
  108. disdrodb/routines/options.py +345 -0
  109. disdrodb/routines/wrappers.py +14 -1
  110. disdrodb/scattering/axis_ratio.py +90 -84
  111. disdrodb/scattering/permittivity.py +6 -0
  112. disdrodb/summary/routines.py +735 -670
  113. disdrodb/utils/archiving.py +51 -44
  114. disdrodb/utils/attrs.py +3 -1
  115. disdrodb/utils/dask.py +4 -4
  116. disdrodb/utils/dict.py +33 -0
  117. disdrodb/utils/encoding.py +6 -1
  118. disdrodb/utils/routines.py +9 -8
  119. disdrodb/utils/time.py +11 -3
  120. disdrodb/viz/__init__.py +0 -13
  121. disdrodb/viz/plots.py +231 -1
  122. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/METADATA +2 -1
  123. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/RECORD +135 -103
  124. /disdrodb/etc/products/L2M/{NGAMMA_GS_LOG_ND_MAE.yaml → MODELS/NGAMMA_GS_LOG_ND_MAE.yaml} +0 -0
  125. /disdrodb/etc/products/L2M/{NGAMMA_GS_ND_MAE.yaml → MODELS/NGAMMA_GS_ND_MAE.yaml} +0 -0
  126. /disdrodb/etc/products/L2M/{NGAMMA_GS_Z_MAE.yaml → MODELS/NGAMMA_GS_Z_MAE.yaml} +0 -0
  127. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/IFLOODS.py +0 -0
  128. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/MC3E.py +0 -0
  129. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/PIERS.py +0 -0
  130. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/GCPEX.py +0 -0
  131. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/NSSTC.py +0 -0
  132. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/WHEEL +0 -0
  133. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/entry_points.txt +0 -0
  134. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/licenses/LICENSE +0 -0
  135. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,7 @@ import os
21
21
  import shutil
22
22
  import subprocess
23
23
  import tempfile
24
+ import warnings
24
25
 
25
26
  import matplotlib.lines as mlines
26
27
  import matplotlib.pyplot as plt
@@ -43,9 +44,16 @@ from disdrodb.utils.manipulations import (
43
44
  resample_drop_number_concentration,
44
45
  unstack_radar_variables,
45
46
  )
47
+ from disdrodb.utils.time import get_sampling_information
46
48
  from disdrodb.utils.warnings import suppress_warnings
47
49
  from disdrodb.utils.yaml import write_yaml
48
- from disdrodb.viz import compute_dense_lines, max_blend_images, to_rgba
50
+ from disdrodb.viz.plots import (
51
+ compute_dense_lines,
52
+ max_blend_images,
53
+ plot_raw_and_filtered_spectra,
54
+ plot_spectrum,
55
+ to_rgba,
56
+ )
49
57
 
50
58
  ####-----------------------------------------------------------------
51
59
  #### PDF Latex Utilities
@@ -151,11 +159,15 @@ def save_table_to_pdf(
151
159
  #### Tables summaries
152
160
 
153
161
 
154
- def create_table_rain_summary(df):
162
+ def create_table_rain_summary(df, temporal_resolution):
155
163
  """Create rainy table summary."""
156
164
  # Initialize dictionary
157
165
  table = {}
158
166
 
167
+ # Retrieve accumulation interval
168
+ accumulation_interval, _ = get_sampling_information(temporal_resolution)
169
+ accumulation_interval_minutes = accumulation_interval / 60
170
+
159
171
  # Keep rows with R > 0
160
172
  df = df[df["R"] > 0]
161
173
 
@@ -183,20 +195,34 @@ def create_table_rain_summary(df):
183
195
  table["years_month_coverage"] = years_month_coverage
184
196
 
185
197
  # Rainy minutes statistics
186
- table["n_rainy_minutes"] = len(df["R"])
187
- table["n_rainy_minutes_<0.1"] = df["R"].between(0, 0.1, inclusive="right").sum().item()
188
- table["n_rainy_minutes_0.1_1"] = df["R"].between(0.1, 1, inclusive="right").sum().item()
189
- table["n_rainy_minutes_1_10"] = df["R"].between(1, 10, inclusive="right").sum().item()
190
- table["n_rainy_minutes_10_25"] = df["R"].between(10, 25, inclusive="right").sum().item()
191
- table["n_rainy_minutes_25_50"] = df["R"].between(25, 50, inclusive="right").sum().item()
192
- table["n_rainy_minutes_50_100"] = df["R"].between(50, 100, inclusive="right").sum().item()
193
- table["n_rainy_minutes_100_200"] = df["R"].between(100, 200, inclusive="right").sum().item()
194
- table["n_rainy_minutes_>200"] = np.sum(df["R"] > 200).item()
198
+ table["n_rainy_minutes"] = len(df["R"]) * accumulation_interval_minutes
199
+ table["n_rainy_minutes_<0.1"] = (
200
+ df["R"].between(0, 0.1, inclusive="right").sum().item() * accumulation_interval_minutes
201
+ )
202
+ table["n_rainy_minutes_0.1_1"] = (
203
+ df["R"].between(0.1, 1, inclusive="right").sum().item() * accumulation_interval_minutes
204
+ )
205
+ table["n_rainy_minutes_1_10"] = (
206
+ df["R"].between(1, 10, inclusive="right").sum().item() * accumulation_interval_minutes
207
+ )
208
+ table["n_rainy_minutes_10_25"] = (
209
+ df["R"].between(10, 25, inclusive="right").sum().item() * accumulation_interval_minutes
210
+ )
211
+ table["n_rainy_minutes_25_50"] = (
212
+ df["R"].between(25, 50, inclusive="right").sum().item() * accumulation_interval_minutes
213
+ )
214
+ table["n_rainy_minutes_50_100"] = (
215
+ df["R"].between(50, 100, inclusive="right").sum().item() * accumulation_interval_minutes
216
+ )
217
+ table["n_rainy_minutes_100_200"] = (
218
+ df["R"].between(100, 200, inclusive="right").sum().item() * accumulation_interval_minutes
219
+ )
220
+ table["n_rainy_minutes_>200"] = np.sum(df["R"] > 200).item() * accumulation_interval_minutes
195
221
 
196
222
  # Minutes with larger Dmax
197
- table["n_minutes_Dmax_>7"] = np.sum(df["Dmax"] > 7).item()
198
- table["n_minutes_Dmax_>8"] = np.sum(df["Dmax"] > 8).item()
199
- table["n_minutes_Dmax_>9"] = np.sum(df["Dmax"] > 9).item()
223
+ table["n_minutes_Dmax_>7"] = np.sum(df["Dmax"] > 7).item() * accumulation_interval_minutes
224
+ table["n_minutes_Dmax_>8"] = np.sum(df["Dmax"] > 8).item() * accumulation_interval_minutes
225
+ table["n_minutes_Dmax_>9"] = np.sum(df["Dmax"] > 9).item() * accumulation_interval_minutes
200
226
  return table
201
227
 
202
228
 
@@ -247,22 +273,36 @@ def create_table_dsd_summary(df):
247
273
  df_stats["SKEWNESS"] = df_subset.skew()
248
274
  df_stats["KURTOSIS"] = df_subset.kurt()
249
275
 
250
- # Round statistics
251
- df_stats = df_stats.astype(float).round(2)
276
+ # Round float columns to nearest integer, leave ints unchanged
277
+ float_cols = df_stats.select_dtypes(include=["float"]).columns
278
+ df_stats[float_cols] = df_stats[float_cols].astype(float).round(decimals=2)
252
279
  return df_stats
253
280
 
254
281
 
255
- def create_table_events_summary(df):
256
- """Creata table with events statistics."""
257
- # Event file
282
+ def create_table_events_summary(df, temporal_resolution):
283
+ """Create table with events statistics."""
284
+ # Retrieve accumulation interval
285
+ accumulation_interval, _ = get_sampling_information(temporal_resolution)
286
+ accumulation_interval_minutes = accumulation_interval / 60
287
+
288
+ # Define event settings
258
289
  # - Events are separated by 1 hour or more rain-free periods in rain rate time series.
259
290
  # - The events that are less than 'min_duration' minutes or the rain total is less than 0.1 mm
260
291
  # are not reported.
292
+ if accumulation_interval_minutes >= 5 * 60:
293
+ neighbor_time_interval = temporal_resolution
294
+ event_min_duration = temporal_resolution
295
+ neighbor_min_size = 1
296
+ else:
297
+ neighbor_time_interval = "5MIN"
298
+ event_min_duration = "5MIN"
299
+ neighbor_min_size = 2
300
+
261
301
  event_settings = {
262
- "neighbor_min_size": 2,
263
- "neighbor_time_interval": "5MIN",
302
+ "neighbor_min_size": neighbor_min_size,
303
+ "neighbor_time_interval": neighbor_time_interval,
264
304
  "event_max_time_gap": "1H",
265
- "event_min_duration": "5MIN",
305
+ "event_min_duration": event_min_duration,
266
306
  "event_min_size": 3,
267
307
  }
268
308
  # Keep rows with R > 0
@@ -296,9 +336,12 @@ def create_table_events_summary(df):
296
336
  # Event time info
297
337
  "start_time": start,
298
338
  "end_time": end,
299
- "duration": int((end - start) / np.timedelta64(1, "m")),
339
+ "duration": int((end - start) / np.timedelta64(1, "m")) + accumulation_interval_minutes,
300
340
  # Rainy minutes above thresholds
301
- **{f"rainy_minutes_>{thr}": int((df_event["R"] > thr).sum()) for thr in rain_thresholds},
341
+ **{
342
+ f"rainy_minutes_>{thr}": int((df_event["R"] > thr).sum()) * accumulation_interval_minutes
343
+ for thr in rain_thresholds
344
+ },
302
345
  # Total precipitation (mm)
303
346
  "P_total": df_event["P"].sum(),
304
347
  # R statistics
@@ -327,15 +370,19 @@ def create_table_events_summary(df):
327
370
  events_stats.append(event_stats)
328
371
 
329
372
  df_events = pd.DataFrame.from_records(events_stats)
373
+
374
+ # Round float columns to nearest integer, leave ints unchanged
375
+ float_cols = df_events.select_dtypes(include=["float"]).columns
376
+ df_events[float_cols] = df_events[float_cols].astype(float).round(decimals=2)
330
377
  return df_events
331
378
 
332
379
 
333
380
  def prepare_latex_table_dsd_summary(df):
334
381
  """Prepare a DataFrame with DSD statistics for LaTeX table output."""
335
382
  df = df.copy()
336
- # Round float columns to nearest integer, leave ints unchanged
337
- float_cols = df.select_dtypes(include=["float"]).columns
338
- df[float_cols] = df[float_cols].astype(float).round(decimals=2).astype(str)
383
+ # Cast numeric columns to string
384
+ numeric_cols = df.select_dtypes(include=["float", "int"]).columns
385
+ df[numeric_cols] = df[numeric_cols].astype(str)
339
386
  # Rename
340
387
  rename_dict = {
341
388
  "W": r"$W\,[\mathrm{g}\,\mathrm{m}^{-3}]$", # [g/m3]
@@ -360,9 +407,9 @@ def prepare_latex_table_events_summary(df):
360
407
  # Round datetime to minutes
361
408
  df["start_time"] = df["start_time"].dt.strftime("%Y-%m-%d %H:%M")
362
409
  df["end_time"] = df["end_time"].dt.strftime("%Y-%m-%d %H:%M")
363
- # Round float columns to nearest integer, leave ints unchanged
364
- float_cols = df.select_dtypes(include=["float"]).columns
365
- df[float_cols] = df[float_cols].astype(float).round(decimals=2).astype(str)
410
+ # Cast numeric columns to string
411
+ numeric_cols = df.select_dtypes(include=["float", "int"]).columns
412
+ df[numeric_cols] = df[numeric_cols].astype(str)
366
413
  # Rename
367
414
  rename_dict = {
368
415
  "start_time": r"Start",
@@ -646,92 +693,6 @@ def predict_from_inverse_powerlaw(x, a, b):
646
693
  return (x ** (1 / b)) / (a ** (1 / b))
647
694
 
648
695
 
649
- ####-------------------------------------------------------------------
650
- #### Drop spectrum plots
651
-
652
-
653
- def plot_drop_spectrum(drop_number, norm=None, add_colorbar=True, title="Drop Spectrum"):
654
- """Plot the drop spectrum."""
655
- cmap = plt.get_cmap("Spectral_r").copy()
656
- cmap.set_under("none")
657
- if "time" in drop_number.dims:
658
- drop_number = drop_number.sum(dim="time")
659
- if norm is None:
660
- norm = LogNorm(vmin=1, vmax=None) if drop_number.sum() > 0 else None
661
-
662
- p = drop_number.plot.pcolormesh(
663
- x=DIAMETER_DIMENSION,
664
- y=VELOCITY_DIMENSION,
665
- cmap=cmap,
666
- extend="max",
667
- norm=norm,
668
- add_colorbar=add_colorbar,
669
- cbar_kwargs={"label": "Number of particles"},
670
- )
671
- p.axes.set_xlabel("Diamenter [mm]")
672
- p.axes.set_ylabel("Fall velocity [m/s]")
673
- p.axes.set_title(title)
674
- return p
675
-
676
-
677
- def plot_raw_and_filtered_spectrums(
678
- raw_drop_number,
679
- drop_number,
680
- theoretical_average_velocity,
681
- measured_average_velocity=None,
682
- norm=None,
683
- figsize=(8, 4),
684
- dpi=300,
685
- ):
686
- """Plot raw and filtered drop spectrum."""
687
- # Drop number matrix
688
- cmap = plt.get_cmap("Spectral_r").copy()
689
- cmap.set_under("none")
690
-
691
- if norm is None:
692
- norm = LogNorm(1, None)
693
-
694
- fig = plt.figure(figsize=figsize, dpi=dpi)
695
- gs = GridSpec(1, 2, width_ratios=[1, 1.15], wspace=0.05) # More space for ax2
696
- ax1 = fig.add_subplot(gs[0])
697
- ax2 = fig.add_subplot(gs[1])
698
-
699
- raw_drop_number.plot.pcolormesh(
700
- x=DIAMETER_DIMENSION,
701
- y=VELOCITY_DIMENSION,
702
- ax=ax1,
703
- cmap=cmap,
704
- norm=norm,
705
- extend="max",
706
- add_colorbar=False,
707
- )
708
- theoretical_average_velocity.plot(ax=ax1, c="k", linestyle="dashed")
709
- if measured_average_velocity is not None:
710
- measured_average_velocity.plot(ax=ax1, c="k", linestyle="dotted")
711
- ax1.set_xlabel("Diamenter [mm]")
712
- ax1.set_ylabel("Fall velocity [m/s]")
713
- ax1.set_title("Raw Spectrum")
714
- drop_number.plot.pcolormesh(
715
- x=DIAMETER_DIMENSION,
716
- y=VELOCITY_DIMENSION,
717
- cmap=cmap,
718
- extend="max",
719
- ax=ax2,
720
- norm=norm,
721
- cbar_kwargs={"label": "Number of particles"},
722
- )
723
- theoretical_average_velocity.plot(ax=ax2, c="k", linestyle="dashed", label="Theoretical velocity")
724
- if measured_average_velocity is not None:
725
- measured_average_velocity.plot(ax=ax2, c="k", linestyle="dotted", label="Measured average velocity")
726
- ax2.set_yticks([])
727
- ax2.set_yticklabels([])
728
- ax2.set_xlabel("Diamenter [mm]")
729
- ax2.set_ylabel("")
730
- ax2.set_title("Filtered Spectrum")
731
- ax2.legend(loc="lower right", frameon=False)
732
- return fig
733
-
734
-
735
696
  ####-------------------------------------------------------------------
736
697
  #### N(D) Climatological plots
737
698
 
@@ -2071,31 +2032,34 @@ def plot_A_R(
2071
2032
  ax.set_yticklabels([str(v) for v in a_ticks])
2072
2033
  ax.set_title(title)
2073
2034
  if add_fit:
2074
- # Fit powerlaw k = a * R ** b
2075
- (a_c, b), _ = fit_powerlaw(x=df[r], y=df[a], xbins=r_bins, x_in_db=False)
2076
- # Invert for R = A * k ** B
2077
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2078
- # Define legend title
2079
- a_str = _define_coeff_string(a_c)
2080
- A_str = _define_coeff_string(A_c)
2081
- legend_str = rf"${a_symbol} = {a_str} \, R^{{{b:.2f}}}$" "\n" rf"$R = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2082
- # Get power law predictions
2083
- x_pred = np.arange(*rlims)
2084
- r_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2085
- # Add fitted power law
2086
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2087
- # Add legend
2088
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2089
- ax.text(
2090
- 0.05,
2091
- 0.95,
2092
- legend_str,
2093
- transform=ax.transAxes,
2094
- ha="left",
2095
- va="top",
2096
- fontsize=legend_fontsize,
2097
- bbox=legend_bbox_dict,
2098
- )
2035
+ try:
2036
+ # Fit powerlaw k = a * R ** b
2037
+ (a_c, b), _ = fit_powerlaw(x=df[r], y=df[a], xbins=r_bins, x_in_db=False)
2038
+ # Invert for R = A * k ** B
2039
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2040
+ # Define legend title
2041
+ a_str = _define_coeff_string(a_c)
2042
+ A_str = _define_coeff_string(A_c)
2043
+ legend_str = rf"${a_symbol} = {a_str} \, R^{{{b:.2f}}}$" "\n" rf"$R = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2044
+ # Get power law predictions
2045
+ x_pred = np.arange(*rlims)
2046
+ r_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2047
+ # Add fitted power law
2048
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2049
+ # Add legend
2050
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2051
+ ax.text(
2052
+ 0.05,
2053
+ 0.95,
2054
+ legend_str,
2055
+ transform=ax.transAxes,
2056
+ ha="left",
2057
+ va="top",
2058
+ fontsize=legend_fontsize,
2059
+ bbox=legend_bbox_dict,
2060
+ )
2061
+ except Exception as e:
2062
+ warnings.warn(f"Could not fit power law in plot_A_R: {e!s}", UserWarning, stacklevel=2)
2099
2063
  return p
2100
2064
 
2101
2065
 
@@ -2176,40 +2140,43 @@ def plot_A_Z(
2176
2140
 
2177
2141
  # Fit and plot the power law
2178
2142
  if add_fit:
2179
- # Fit powerlaw k = a * Z ** b (Z in dBZ -> x_in_db=True)
2180
- (a_c, b), _ = fit_powerlaw(
2181
- x=df[z],
2182
- y=df[a],
2183
- xbins=z_bins,
2184
- x_in_db=True,
2185
- )
2186
- # Invert for Z = A * k ** B
2187
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2188
- # Legend text
2189
- a_str = _define_coeff_string(a_c)
2190
- A_str = _define_coeff_string(A_c)
2191
- legend_str = (
2192
- rf"${a_symbol} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2193
- "\n"
2194
- rf"${z_lower_symbol} = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2195
- )
2196
- # Predictions
2197
- x_pred = np.arange(*z_lim)
2198
- x_pred_linear = disdrodb.idecibel(x_pred) # convert to linear for prediction
2199
- y_pred = predict_from_powerlaw(x_pred_linear, a=a_c, b=b)
2200
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2201
- # Add legend
2202
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2203
- ax.text(
2204
- 0.05,
2205
- 0.95,
2206
- legend_str,
2207
- transform=ax.transAxes,
2208
- ha="left",
2209
- va="top",
2210
- fontsize=legend_fontsize,
2211
- bbox=legend_bbox_dict,
2212
- )
2143
+ try:
2144
+ # Fit powerlaw k = a * Z ** b (Z in dBZ -> x_in_db=True)
2145
+ (a_c, b), _ = fit_powerlaw(
2146
+ x=df[z],
2147
+ y=df[a],
2148
+ xbins=z_bins,
2149
+ x_in_db=True,
2150
+ )
2151
+ # Invert for Z = A * k ** B
2152
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2153
+ # Legend text
2154
+ a_str = _define_coeff_string(a_c)
2155
+ A_str = _define_coeff_string(A_c)
2156
+ legend_str = (
2157
+ rf"${a_symbol} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2158
+ "\n"
2159
+ rf"${z_lower_symbol} = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2160
+ )
2161
+ # Predictions
2162
+ x_pred = np.arange(*z_lim)
2163
+ x_pred_linear = disdrodb.idecibel(x_pred) # convert to linear for prediction
2164
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a_c, b=b)
2165
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2166
+ # Add legend
2167
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2168
+ ax.text(
2169
+ 0.05,
2170
+ 0.95,
2171
+ legend_str,
2172
+ transform=ax.transAxes,
2173
+ ha="left",
2174
+ va="top",
2175
+ fontsize=legend_fontsize,
2176
+ bbox=legend_bbox_dict,
2177
+ )
2178
+ except Exception as e:
2179
+ warnings.warn(f"Could not fit power law in plot_A_Z: {e!s}", UserWarning, stacklevel=2)
2213
2180
  return p
2214
2181
 
2215
2182
 
@@ -2309,43 +2276,46 @@ def plot_A_KDP(
2309
2276
 
2310
2277
  # Fit and overlay power law: k = a * KDP^b
2311
2278
  if add_fit:
2312
- (a_c, b), _ = fit_powerlaw(
2313
- x=df[kdp],
2314
- y=df[a],
2315
- xbins=kdp_bins,
2316
- x_in_db=False,
2317
- )
2318
- # Invert: KDP = A * k^B
2319
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2320
-
2321
- a_str = _define_coeff_string(a_c)
2322
- A_str = _define_coeff_string(A_c)
2323
- legend_str = (
2324
- rf"${a_symbol} = {a_str}\,K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2325
- "\n"
2326
- rf"$K_{{\mathrm{{DP}}}} = {A_str}\,{a_symbol}^{{{B:.2f}}}$"
2327
- )
2328
-
2329
- # Predictions along KDP axis
2330
- if log_kdp:
2331
- x_pred = np.logspace(np.log10(kdp_lim[0]), np.log10(kdp_lim[1]), 400)
2332
- else:
2333
- x_pred = np.arange(kdp_lim[0], kdp_lim[1], 0.05)
2334
- y_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2279
+ try:
2280
+ (a_c, b), _ = fit_powerlaw(
2281
+ x=df[kdp],
2282
+ y=df[a],
2283
+ xbins=kdp_bins,
2284
+ x_in_db=False,
2285
+ )
2286
+ # Invert: KDP = A * k^B
2287
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2288
+
2289
+ a_str = _define_coeff_string(a_c)
2290
+ A_str = _define_coeff_string(A_c)
2291
+ legend_str = (
2292
+ rf"${a_symbol} = {a_str}\,K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2293
+ "\n"
2294
+ rf"$K_{{\mathrm{{DP}}}} = {A_str}\,{a_symbol}^{{{B:.2f}}}$"
2295
+ )
2335
2296
 
2336
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2337
- # Add legend
2338
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2339
- ax.text(
2340
- 0.05,
2341
- 0.95,
2342
- legend_str,
2343
- transform=ax.transAxes,
2344
- ha="left",
2345
- va="top",
2346
- fontsize=legend_fontsize,
2347
- bbox=legend_bbox_dict,
2348
- )
2297
+ # Predictions along KDP axis
2298
+ if log_kdp:
2299
+ x_pred = np.logspace(np.log10(kdp_lim[0]), np.log10(kdp_lim[1]), 400)
2300
+ else:
2301
+ x_pred = np.arange(kdp_lim[0], kdp_lim[1], 0.05)
2302
+ y_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2303
+
2304
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2305
+ # Add legend
2306
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2307
+ ax.text(
2308
+ 0.05,
2309
+ 0.95,
2310
+ legend_str,
2311
+ transform=ax.transAxes,
2312
+ ha="left",
2313
+ va="top",
2314
+ fontsize=legend_fontsize,
2315
+ bbox=legend_bbox_dict,
2316
+ )
2317
+ except Exception as e:
2318
+ warnings.warn(f"Could not fit power law in plot_A_KDP: {e!s}", UserWarning, stacklevel=2)
2349
2319
 
2350
2320
  return p
2351
2321
 
@@ -2422,34 +2392,37 @@ def plot_R_Z(
2422
2392
 
2423
2393
  # Fit and plot the powerlaw
2424
2394
  if add_fit:
2425
- # Fit powerlaw R = a * z ** b
2426
- (a, b), _ = fit_powerlaw(x=df[z], y=df[r], xbins=np.arange(10, 50, 1), x_in_db=True)
2427
- # Invert for z = A * R ** B
2428
- A, B = inverse_powerlaw_parameters(a, b)
2429
- # Define legend title
2430
- a_str = _define_coeff_string(a)
2431
- A_str = _define_coeff_string(A)
2432
- legend_str = (
2433
- rf"$R = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$" "\n" rf"${z_lower_symbol} = {A_str} \, R^{{{B:.2f}}}$"
2434
- )
2435
- # Get power law predictions
2436
- x_pred = np.arange(*z_lims)
2437
- x_pred_linear = disdrodb.idecibel(x_pred)
2438
- r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2439
- # Add fitted powerlaw
2440
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2441
- # Add legend
2442
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2443
- ax.text(
2444
- 0.05,
2445
- 0.95,
2446
- legend_str,
2447
- transform=ax.transAxes,
2448
- ha="left",
2449
- va="top",
2450
- fontsize=legend_fontsize,
2451
- bbox=legend_bbox_dict,
2452
- )
2395
+ try:
2396
+ # Fit powerlaw R = a * z ** b
2397
+ (a, b), _ = fit_powerlaw(x=df[z], y=df[r], xbins=np.arange(10, 50, 1), x_in_db=True)
2398
+ # Invert for z = A * R ** B
2399
+ A, B = inverse_powerlaw_parameters(a, b)
2400
+ # Define legend title
2401
+ a_str = _define_coeff_string(a)
2402
+ A_str = _define_coeff_string(A)
2403
+ legend_str = (
2404
+ rf"$R = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$" "\n" rf"${z_lower_symbol} = {A_str} \, R^{{{B:.2f}}}$"
2405
+ )
2406
+ # Get power law predictions
2407
+ x_pred = np.arange(*z_lims)
2408
+ x_pred_linear = disdrodb.idecibel(x_pred)
2409
+ r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2410
+ # Add fitted powerlaw
2411
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2412
+ # Add legend
2413
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2414
+ ax.text(
2415
+ 0.05,
2416
+ 0.95,
2417
+ legend_str,
2418
+ transform=ax.transAxes,
2419
+ ha="left",
2420
+ va="top",
2421
+ fontsize=legend_fontsize,
2422
+ bbox=legend_bbox_dict,
2423
+ )
2424
+ except Exception as e:
2425
+ warnings.warn(f"Could not fit power law in plot_R_Z: {e!s}", UserWarning, stacklevel=2)
2453
2426
  return p
2454
2427
 
2455
2428
 
@@ -2532,35 +2505,38 @@ def plot_R_KDP(
2532
2505
 
2533
2506
  # Fit and plot the power law
2534
2507
  if add_fit:
2535
- # Fit powerlaw R = a * KDP ** b
2536
- (a, b), _ = fit_powerlaw(x=df[kdp], y=df[r], xbins=xbins, x_in_db=False)
2537
- # Invert for KDP = A * R ** B
2538
- A, B = inverse_powerlaw_parameters(a, b)
2539
- # Define legend title
2540
- a_str = _define_coeff_string(a)
2541
- A_str = _define_coeff_string(A)
2542
- legend_str = (
2543
- rf"$R = {a_str} \, K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2544
- "\n"
2545
- rf"$K_{{\mathrm{{DP}}}} = {A_str} \, R^{{{B:.2f}}}$"
2546
- )
2547
- # Get power law predictions
2548
- x_pred = np.arange(*kdp_lim)
2549
- r_pred = predict_from_powerlaw(x_pred, a=a, b=b)
2550
- # Add fitted line
2551
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2552
- # Add legend
2553
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2554
- ax.text(
2555
- 0.05,
2556
- 0.95,
2557
- legend_str,
2558
- transform=ax.transAxes,
2559
- ha="left",
2560
- va="top",
2561
- fontsize=legend_fontsize,
2562
- bbox=legend_bbox_dict,
2563
- )
2508
+ try:
2509
+ # Fit powerlaw R = a * KDP ** b
2510
+ (a, b), _ = fit_powerlaw(x=df[kdp], y=df[r], xbins=xbins, x_in_db=False)
2511
+ # Invert for KDP = A * R ** B
2512
+ A, B = inverse_powerlaw_parameters(a, b)
2513
+ # Define legend title
2514
+ a_str = _define_coeff_string(a)
2515
+ A_str = _define_coeff_string(A)
2516
+ legend_str = (
2517
+ rf"$R = {a_str} \, K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2518
+ "\n"
2519
+ rf"$K_{{\mathrm{{DP}}}} = {A_str} \, R^{{{B:.2f}}}$"
2520
+ )
2521
+ # Get power law predictions
2522
+ x_pred = np.arange(*kdp_lim)
2523
+ r_pred = predict_from_powerlaw(x_pred, a=a, b=b)
2524
+ # Add fitted line
2525
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2526
+ # Add legend
2527
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2528
+ ax.text(
2529
+ 0.05,
2530
+ 0.95,
2531
+ legend_str,
2532
+ transform=ax.transAxes,
2533
+ ha="left",
2534
+ va="top",
2535
+ fontsize=legend_fontsize,
2536
+ bbox=legend_bbox_dict,
2537
+ )
2538
+ except Exception as e:
2539
+ warnings.warn(f"Could not fit power law in plot_R_KDP: {e!s}", UserWarning, stacklevel=2)
2564
2540
  return p
2565
2541
 
2566
2542
 
@@ -2628,41 +2604,44 @@ def plot_ZDR_Z(
2628
2604
 
2629
2605
  # Fit and plot the power law
2630
2606
  if add_fit:
2631
- # Fit powerlaw ZDR = a * Z ** b
2632
- (a, b), _ = fit_powerlaw(
2633
- x=df[z],
2634
- y=df[zdr],
2635
- xbins=np.arange(5, 40, 1),
2636
- x_in_db=True,
2637
- )
2638
- # Invert for Z = A * ZDR ** B
2639
- A, B = inverse_powerlaw_parameters(a, b)
2640
- # Define legend title
2641
- a_str = _define_coeff_string(a)
2642
- A_str = _define_coeff_string(A)
2643
- legend_str = (
2644
- rf"$Z_{{\mathrm{{DR}}}} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2645
- "\n"
2646
- rf"${z_lower_symbol} = {A_str} \, Z_{{\mathrm{{DR}}}}^{{{B:.2f}}}$"
2647
- )
2648
- # Get power law predictions
2649
- x_pred = np.arange(0, 70)
2650
- x_pred_linear = disdrodb.idecibel(x_pred)
2651
- r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2652
- # Add fitted line
2653
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2654
- # Add legend
2655
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2656
- ax.text(
2657
- 0.05,
2658
- 0.95,
2659
- legend_str,
2660
- transform=ax.transAxes,
2661
- ha="left",
2662
- va="top",
2663
- fontsize=legend_fontsize,
2664
- bbox=legend_bbox_dict,
2665
- )
2607
+ try:
2608
+ # Fit powerlaw ZDR = a * Z ** b
2609
+ (a, b), _ = fit_powerlaw(
2610
+ x=df[z],
2611
+ y=df[zdr],
2612
+ xbins=np.arange(5, 40, 1),
2613
+ x_in_db=True,
2614
+ )
2615
+ # Invert for Z = A * ZDR ** B
2616
+ A, B = inverse_powerlaw_parameters(a, b)
2617
+ # Define legend title
2618
+ a_str = _define_coeff_string(a)
2619
+ A_str = _define_coeff_string(A)
2620
+ legend_str = (
2621
+ rf"$Z_{{\mathrm{{DR}}}} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2622
+ "\n"
2623
+ rf"${z_lower_symbol} = {A_str} \, Z_{{\mathrm{{DR}}}}^{{{B:.2f}}}$"
2624
+ )
2625
+ # Get power law predictions
2626
+ x_pred = np.arange(0, 70)
2627
+ x_pred_linear = disdrodb.idecibel(x_pred)
2628
+ r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2629
+ # Add fitted line
2630
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2631
+ # Add legend
2632
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2633
+ ax.text(
2634
+ 0.05,
2635
+ 0.95,
2636
+ legend_str,
2637
+ transform=ax.transAxes,
2638
+ ha="left",
2639
+ va="top",
2640
+ fontsize=legend_fontsize,
2641
+ bbox=legend_bbox_dict,
2642
+ )
2643
+ except Exception as e:
2644
+ warnings.warn(f"Could not fit power law in plot_ZDR_Z: {e!s}", UserWarning, stacklevel=2)
2666
2645
  return p
2667
2646
 
2668
2647
 
@@ -2749,43 +2728,46 @@ def plot_KDP_Z(
2749
2728
 
2750
2729
  # Fit and overlay power law
2751
2730
  if add_fit:
2752
- # Fit: KDP = a * Z^b (Z in dBZ → x_in_db=True)
2753
- (a, b), _ = fit_powerlaw(
2754
- x=df[z],
2755
- y=df[kdp],
2756
- xbins=np.arange(15, 50),
2757
- x_in_db=True,
2758
- )
2759
- # Invert: Z = A * KDP^B
2760
- A, B = inverse_powerlaw_parameters(a, b)
2761
-
2762
- # Define legend title
2763
- a_str = _define_coeff_string(a)
2764
- A_str = _define_coeff_string(A)
2765
- legend_str = (
2766
- rf"$K_{{\mathrm{{DP}}}} = {a_str}\,{z_lower_symbol}^{{{b:.2f}}}$"
2767
- "\n"
2768
- rf"${z_lower_symbol} = {A_str}\,K_{{\mathrm{{DP}}}}^{{{B:.2f}}}$"
2769
- )
2731
+ try:
2732
+ # Fit: KDP = a * Z^b (Z in dBZ → x_in_db=True)
2733
+ (a, b), _ = fit_powerlaw(
2734
+ x=df[z],
2735
+ y=df[kdp],
2736
+ xbins=np.arange(15, 50),
2737
+ x_in_db=True,
2738
+ )
2739
+ # Invert: Z = A * KDP^B
2740
+ A, B = inverse_powerlaw_parameters(a, b)
2741
+
2742
+ # Define legend title
2743
+ a_str = _define_coeff_string(a)
2744
+ A_str = _define_coeff_string(A)
2745
+ legend_str = (
2746
+ rf"$K_{{\mathrm{{DP}}}} = {a_str}\,{z_lower_symbol}^{{{b:.2f}}}$"
2747
+ "\n"
2748
+ rf"${z_lower_symbol} = {A_str}\,K_{{\mathrm{{DP}}}}^{{{B:.2f}}}$"
2749
+ )
2770
2750
 
2771
- # Get power law predictions
2772
- x_pred = np.arange(*z_lim)
2773
- x_pred_linear = disdrodb.idecibel(x_pred)
2774
- y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2775
- # Add fitted power law
2776
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2777
- # Add legend
2778
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2779
- ax.text(
2780
- 0.05,
2781
- 0.95,
2782
- legend_str,
2783
- transform=ax.transAxes,
2784
- ha="left",
2785
- va="top",
2786
- fontsize=legend_fontsize,
2787
- bbox=legend_bbox_dict,
2788
- )
2751
+ # Get power law predictions
2752
+ x_pred = np.arange(*z_lim)
2753
+ x_pred_linear = disdrodb.idecibel(x_pred)
2754
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2755
+ # Add fitted power law
2756
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2757
+ # Add legend
2758
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2759
+ ax.text(
2760
+ 0.05,
2761
+ 0.95,
2762
+ legend_str,
2763
+ transform=ax.transAxes,
2764
+ ha="left",
2765
+ va="top",
2766
+ fontsize=legend_fontsize,
2767
+ bbox=legend_bbox_dict,
2768
+ )
2769
+ except Exception as e:
2770
+ warnings.warn(f"Could not fit power law in plot_KDP_Z: {e!s}", UserWarning, stacklevel=2)
2789
2771
 
2790
2772
  return p
2791
2773
 
@@ -3103,36 +3085,41 @@ def plot_KED_R(
3103
3085
  ax.set_title("KED vs R")
3104
3086
  # Fit and plot a powerlaw
3105
3087
  if add_fit:
3106
- # Fit a power law KED = a * R**b
3107
- (a, b), _ = fit_powerlaw(
3108
- x=df["R"],
3109
- y=df["KED"],
3110
- xbins=r_bins,
3111
- x_in_db=False,
3112
- )
3113
- # Invert for R = A * KED**B
3114
- A, B = inverse_powerlaw_parameters(a, b)
3115
- # Define legend string
3116
- a_str = _define_coeff_string(a)
3117
- A_str = _define_coeff_string(A)
3118
- legend_str = rf"$\mathrm{{KED}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KED}}^{{{B:.2f}}}$"
3119
- # Get power law predictions
3120
- x_pred = np.arange(r_lims[0], r_lims[1])
3121
- y_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3122
- # Add fitted powerlaw
3123
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3124
- # Add legend
3125
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3126
- ax.text(
3127
- 0.05,
3128
- 0.95,
3129
- legend_str,
3130
- transform=ax.transAxes,
3131
- ha="left",
3132
- va="top",
3133
- fontsize=legend_fontsize,
3134
- bbox=legend_bbox_dict,
3135
- )
3088
+ try:
3089
+ # Fit a power law KED = a * R**b
3090
+ (a, b), _ = fit_powerlaw(
3091
+ x=df["R"],
3092
+ y=df["KED"],
3093
+ xbins=r_bins,
3094
+ x_in_db=False,
3095
+ )
3096
+ # Invert for R = A * KED**B
3097
+ A, B = inverse_powerlaw_parameters(a, b)
3098
+ # Define legend string
3099
+ a_str = _define_coeff_string(a)
3100
+ A_str = _define_coeff_string(A)
3101
+ legend_str = (
3102
+ rf"$\mathrm{{KED}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KED}}^{{{B:.2f}}}$"
3103
+ )
3104
+ # Get power law predictions
3105
+ x_pred = np.arange(r_lims[0], r_lims[1])
3106
+ y_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3107
+ # Add fitted powerlaw
3108
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3109
+ # Add legend
3110
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3111
+ ax.text(
3112
+ 0.05,
3113
+ 0.95,
3114
+ legend_str,
3115
+ transform=ax.transAxes,
3116
+ ha="left",
3117
+ va="top",
3118
+ fontsize=legend_fontsize,
3119
+ bbox=legend_bbox_dict,
3120
+ )
3121
+ except Exception as e:
3122
+ warnings.warn(f"Could not fit power law in plot_KED_R: {e!s}", UserWarning, stacklevel=2)
3136
3123
 
3137
3124
  return p
3138
3125
 
@@ -3224,36 +3211,41 @@ def plot_KEF_R(
3224
3211
  # Fit and plot the power law
3225
3212
  # - Alternative fit model: a + I *(1 - b*exp(c*I)) (a is upper limit)
3226
3213
  if add_fit:
3227
- # Fit power law KEF = a * R ** b
3228
- (a, b), _ = fit_powerlaw(
3229
- x=df["R"],
3230
- y=df["KEF"],
3231
- xbins=r_bins,
3232
- x_in_db=False,
3233
- )
3234
- # Invert parameters for R = A * KEF ** B
3235
- A, B = inverse_powerlaw_parameters(a, b)
3236
- # Define legend string
3237
- a_str = _define_coeff_string(a)
3238
- A_str = _define_coeff_string(A)
3239
- legend_str = rf"$\mathrm{{KEF}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KEF}}^{{{B:.2f}}}$"
3240
- # Get power law predictions
3241
- x_pred = np.arange(*r_lims)
3242
- kef_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3243
- # Add fitted powerlaw
3244
- ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3245
- # Add legend
3246
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3247
- ax.text(
3248
- 0.05,
3249
- 0.95,
3250
- legend_str,
3251
- transform=ax.transAxes,
3252
- ha="left",
3253
- va="top",
3254
- fontsize=legend_fontsize,
3255
- bbox=legend_bbox_dict,
3256
- )
3214
+ try:
3215
+ # Fit power law KEF = a * R ** b
3216
+ (a, b), _ = fit_powerlaw(
3217
+ x=df["R"],
3218
+ y=df["KEF"],
3219
+ xbins=r_bins,
3220
+ x_in_db=False,
3221
+ )
3222
+ # Invert parameters for R = A * KEF ** B
3223
+ A, B = inverse_powerlaw_parameters(a, b)
3224
+ # Define legend string
3225
+ a_str = _define_coeff_string(a)
3226
+ A_str = _define_coeff_string(A)
3227
+ legend_str = (
3228
+ rf"$\mathrm{{KEF}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KEF}}^{{{B:.2f}}}$"
3229
+ )
3230
+ # Get power law predictions
3231
+ x_pred = np.arange(*r_lims)
3232
+ kef_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3233
+ # Add fitted powerlaw
3234
+ ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3235
+ # Add legend
3236
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3237
+ ax.text(
3238
+ 0.05,
3239
+ 0.95,
3240
+ legend_str,
3241
+ transform=ax.transAxes,
3242
+ ha="left",
3243
+ va="top",
3244
+ fontsize=legend_fontsize,
3245
+ bbox=legend_bbox_dict,
3246
+ )
3247
+ except Exception as e:
3248
+ warnings.warn(f"Could not fit power law in plot_KEF_R: {e!s}", UserWarning, stacklevel=2)
3257
3249
  return p
3258
3250
 
3259
3251
 
@@ -3338,41 +3330,44 @@ def plot_KEF_Z(
3338
3330
 
3339
3331
  # Fit and plot the powerlaw
3340
3332
  if add_fit:
3341
- # Fit power law KEF = a * Z ** b
3342
- (a, b), _ = fit_powerlaw(
3343
- x=df[z],
3344
- y=df["KEF"],
3345
- xbins=z_bins,
3346
- x_in_db=True,
3347
- )
3348
- # Invert parameters for Z = A * KEF ** B
3349
- A, B = inverse_powerlaw_parameters(a, b)
3350
- # Define legend string
3351
- a_str = _define_coeff_string(a)
3352
- A_str = _define_coeff_string(A)
3353
- legend_str = (
3354
- rf"$\mathrm{{KEF}} = {a_str}\;{z_lower_symbol}^{{{b:.2f}}}$"
3355
- "\n"
3356
- rf"${z_lower_symbol} = {A_str}\;\mathrm{{KEF}}^{{{B:.2f}}}$"
3357
- )
3358
- # Get power law predictions
3359
- x_pred = np.arange(*z_lims)
3360
- x_pred_linear = disdrodb.idecibel(x_pred)
3361
- kef_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3362
- # Add fitted powerlaw
3363
- ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3364
- # Add legend
3365
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3366
- ax.text(
3367
- 0.05,
3368
- 0.95,
3369
- legend_str,
3370
- transform=ax.transAxes,
3371
- ha="left",
3372
- va="top",
3373
- fontsize=legend_fontsize,
3374
- bbox=legend_bbox_dict,
3375
- )
3333
+ try:
3334
+ # Fit power law KEF = a * Z ** b
3335
+ (a, b), _ = fit_powerlaw(
3336
+ x=df[z],
3337
+ y=df["KEF"],
3338
+ xbins=z_bins,
3339
+ x_in_db=True,
3340
+ )
3341
+ # Invert parameters for Z = A * KEF ** B
3342
+ A, B = inverse_powerlaw_parameters(a, b)
3343
+ # Define legend string
3344
+ a_str = _define_coeff_string(a)
3345
+ A_str = _define_coeff_string(A)
3346
+ legend_str = (
3347
+ rf"$\mathrm{{KEF}} = {a_str}\;{z_lower_symbol}^{{{b:.2f}}}$"
3348
+ "\n"
3349
+ rf"${z_lower_symbol} = {A_str}\;\mathrm{{KEF}}^{{{B:.2f}}}$"
3350
+ )
3351
+ # Get power law predictions
3352
+ x_pred = np.arange(*z_lims)
3353
+ x_pred_linear = disdrodb.idecibel(x_pred)
3354
+ kef_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3355
+ # Add fitted powerlaw
3356
+ ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3357
+ # Add legend
3358
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3359
+ ax.text(
3360
+ 0.05,
3361
+ 0.95,
3362
+ legend_str,
3363
+ transform=ax.transAxes,
3364
+ ha="left",
3365
+ va="top",
3366
+ fontsize=legend_fontsize,
3367
+ bbox=legend_bbox_dict,
3368
+ )
3369
+ except Exception as e:
3370
+ warnings.warn(f"Could not fit power law in plot_KEF_Z: {e!s}", UserWarning, stacklevel=2)
3376
3371
 
3377
3372
  return p
3378
3373
 
@@ -3451,37 +3446,42 @@ def plot_TKE_Z(
3451
3446
 
3452
3447
  # Fit and plot the powerlaw
3453
3448
  if add_fit:
3454
- # Fit power law TKE = a * Z ** b
3455
- (a, b), _ = fit_powerlaw(
3456
- x=df[z],
3457
- y=df["TKE"],
3458
- xbins=z_bins,
3459
- x_in_db=True,
3460
- )
3461
- # Invert parameters for Z = A * KEF ** B
3462
- A, B = inverse_powerlaw_parameters(a, b)
3463
- # Define legend string
3464
- a_str = _define_coeff_string(a)
3465
- A_str = _define_coeff_string(A)
3466
- legend_str = rf"$\mathrm{{TKE}} = {a_str}\;z^{{{b:.2f}}}$" "\n" rf"$z = {A_str}\;\mathrm{{TKE}}^{{{B:.2f}}}$"
3467
- # Get power law predictions
3468
- x_pred = np.arange(*z_lims)
3469
- x_pred_linear = disdrodb.idecibel(x_pred)
3470
- y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3471
- # Add fitted powerlaw
3472
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3473
- # Add legend
3474
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3475
- ax.text(
3476
- 0.05,
3477
- 0.95,
3478
- legend_str,
3479
- transform=ax.transAxes,
3480
- ha="left",
3481
- va="top",
3482
- fontsize=legend_fontsize,
3483
- bbox=legend_bbox_dict,
3484
- )
3449
+ try:
3450
+ # Fit power law TKE = a * Z ** b
3451
+ (a, b), _ = fit_powerlaw(
3452
+ x=df[z],
3453
+ y=df["TKE"],
3454
+ xbins=z_bins,
3455
+ x_in_db=True,
3456
+ )
3457
+ # Invert parameters for Z = A * KEF ** B
3458
+ A, B = inverse_powerlaw_parameters(a, b)
3459
+ # Define legend string
3460
+ a_str = _define_coeff_string(a)
3461
+ A_str = _define_coeff_string(A)
3462
+ legend_str = (
3463
+ rf"$\mathrm{{TKE}} = {a_str}\;z^{{{b:.2f}}}$" "\n" rf"$z = {A_str}\;\mathrm{{TKE}}^{{{B:.2f}}}$"
3464
+ )
3465
+ # Get power law predictions
3466
+ x_pred = np.arange(*z_lims)
3467
+ x_pred_linear = disdrodb.idecibel(x_pred)
3468
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3469
+ # Add fitted powerlaw
3470
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3471
+ # Add legend
3472
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3473
+ ax.text(
3474
+ 0.05,
3475
+ 0.95,
3476
+ legend_str,
3477
+ transform=ax.transAxes,
3478
+ ha="left",
3479
+ va="top",
3480
+ fontsize=legend_fontsize,
3481
+ bbox=legend_bbox_dict,
3482
+ )
3483
+ except Exception as e:
3484
+ warnings.warn(f"Could not fit power law in plot_TKE_Z: {e!s}", UserWarning, stacklevel=2)
3485
3485
 
3486
3486
  return p
3487
3487
 
@@ -3716,21 +3716,24 @@ def plot_kinetic_energy_relationships(df):
3716
3716
  #### Summary routine
3717
3717
 
3718
3718
 
3719
- def define_filename(prefix, extension, data_source, campaign_name, station_name):
3719
+ def define_filename(prefix, extension, data_source, campaign_name, station_name, temporal_resolution):
3720
3720
  """Define filename for summary files."""
3721
3721
  if extension in ["png", "jpeg"]:
3722
- filename = f"Figure.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3723
- if extension in ["csv", "parquet", "pdf", "yaml", "yml"]:
3724
- filename = f"Table.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3722
+ filename = f"Figure.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3723
+ if extension in ["csv", "pdf", "yaml", "yml"]:
3724
+ filename = f"Table.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3725
3725
  if extension in ["nc"]:
3726
- filename = f"Dataset.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3726
+ filename = f"Dataset.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3727
+ if extension in ["parquet"]:
3728
+ filename = f"Dataframe.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3727
3729
  return filename
3728
3730
 
3729
3731
 
3730
3732
  def create_l2_dataframe(ds):
3731
3733
  """Create pandas Dataframe for L2 analysis."""
3734
+ dims_to_drop = set(ds.dims).intersection({DIAMETER_DIMENSION, VELOCITY_DIMENSION})
3732
3735
  # - Drop array variables and convert to pandas
3733
- df = ds.drop_dims([DIAMETER_DIMENSION, VELOCITY_DIMENSION]).to_pandas()
3736
+ df = ds.drop_dims(dims_to_drop).to_pandas()
3734
3737
  # - Drop coordinates
3735
3738
  coords_to_drop = ["velocity_method", "sample_interval", *RADAR_OPTIONS]
3736
3739
  df = df.drop(columns=coords_to_drop, errors="ignore")
@@ -3759,12 +3762,16 @@ def prepare_summary_dataset(ds, velocity_method="fall_velocity", source="drop_nu
3759
3762
 
3760
3763
  # Select only timesteps with R > 0
3761
3764
  # - We save R with 2 decimals accuracy ... so 0.01 is the smallest value
3762
- rainy_timesteps = np.logical_and(ds["Rm"].compute() >= 0.01, ds["R"].compute() >= 0.01)
3765
+ if "Rm" in ds: # in L2E
3766
+ rainy_timesteps = np.logical_and(ds["Rm"].compute() >= 0.01, ds["R"].compute() >= 0.01)
3767
+ else: # L2M without Rm
3768
+ rainy_timesteps = ds["R"].compute() >= 0.01
3769
+
3763
3770
  ds = ds.isel(time=rainy_timesteps)
3764
3771
  return ds
3765
3772
 
3766
3773
 
3767
- def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, station_name):
3774
+ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, station_name, temporal_resolution):
3768
3775
  """Generate station summary using L2E dataset."""
3769
3776
  # Create summary directory if does not exist
3770
3777
  os.makedirs(summary_dir_path, exist_ok=True)
@@ -3776,10 +3783,13 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3776
3783
  # Ensure all data are in memory
3777
3784
  ds = ds.compute()
3778
3785
 
3786
+ # Keep only timesteps with at least 3 Nbins to remove noise
3787
+ valid_idx = np.where(ds["Nbins"] >= 3)[0]
3788
+ ds = ds.isel(time=valid_idx)
3789
+
3779
3790
  ####---------------------------------------------------------------------.
3780
3791
  #### Create drop spectrum figures and statistics
3781
3792
  # Compute sum of raw and filtered spectrum over time
3782
-
3783
3793
  raw_drop_number = ds["raw_drop_number"].sum(dim="time")
3784
3794
  drop_number = ds["drop_number"].sum(dim="time")
3785
3795
 
@@ -3799,6 +3809,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3799
3809
  data_source=data_source,
3800
3810
  campaign_name=campaign_name,
3801
3811
  station_name=station_name,
3812
+ temporal_resolution=temporal_resolution,
3802
3813
  )
3803
3814
  ds_stats.to_netcdf(os.path.join(summary_dir_path, filename))
3804
3815
 
@@ -3810,8 +3821,9 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3810
3821
  data_source=data_source,
3811
3822
  campaign_name=campaign_name,
3812
3823
  station_name=station_name,
3824
+ temporal_resolution=temporal_resolution,
3813
3825
  )
3814
- p = plot_drop_spectrum(raw_drop_number, title="Raw Drop Spectrum")
3826
+ p = plot_spectrum(raw_drop_number, title="Raw Drop Spectrum")
3815
3827
  p.figure.savefig(os.path.join(summary_dir_path, filename))
3816
3828
  plt.close()
3817
3829
 
@@ -3822,8 +3834,9 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3822
3834
  data_source=data_source,
3823
3835
  campaign_name=campaign_name,
3824
3836
  station_name=station_name,
3837
+ temporal_resolution=temporal_resolution,
3825
3838
  )
3826
- p = plot_drop_spectrum(drop_number, title="Filtered Drop Spectrum")
3839
+ p = plot_spectrum(drop_number, title="Filtered Drop Spectrum")
3827
3840
  p.figure.savefig(os.path.join(summary_dir_path, filename))
3828
3841
  plt.close()
3829
3842
 
@@ -3834,66 +3847,93 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3834
3847
  data_source=data_source,
3835
3848
  campaign_name=campaign_name,
3836
3849
  station_name=station_name,
3850
+ temporal_resolution=temporal_resolution,
3837
3851
  )
3838
3852
 
3839
- fig = plot_raw_and_filtered_spectrums(
3840
- raw_drop_number=raw_drop_number,
3841
- drop_number=drop_number,
3842
- theoretical_average_velocity=theoretical_average_velocity,
3843
- measured_average_velocity=measured_average_velocity,
3844
- )
3853
+ fig = plot_raw_and_filtered_spectra(ds)
3845
3854
  fig.savefig(os.path.join(summary_dir_path, filename))
3846
3855
  plt.close()
3847
3856
 
3848
3857
  ####---------------------------------------------------------------------.
3849
- #### Create L2E 1MIN dataframe
3858
+ #### Create L2E dataframe
3850
3859
  df = create_l2_dataframe(ds)
3851
3860
 
3852
3861
  # Define diameter bin edges
3853
3862
  diameter_bin_edges = get_diameter_bin_edges(ds)
3854
3863
 
3855
3864
  # ---------------------------------------------------------------------.
3856
- #### Save L2E 1MIN Parquet
3857
- l2e_parquet_filename = f"L2E.1MIN.PARQUET.{data_source}.{campaign_name}.{station_name}.parquet"
3865
+ #### Save L2E Parquet
3866
+ l2e_parquet_filename = define_filename(
3867
+ prefix="L2E",
3868
+ extension="parquet",
3869
+ data_source=data_source,
3870
+ campaign_name=campaign_name,
3871
+ station_name=station_name,
3872
+ temporal_resolution=temporal_resolution,
3873
+ )
3858
3874
  l2e_parquet_filepath = os.path.join(summary_dir_path, l2e_parquet_filename)
3859
3875
  df.to_parquet(l2e_parquet_filepath, engine="pyarrow", compression="snappy")
3860
3876
 
3861
3877
  #### ---------------------------------------------------------------------.
3862
3878
  #### Create table with rain summary
3863
- table_rain_summary = create_table_rain_summary(df)
3864
- table_rain_summary_filename = f"Station_Summary.{data_source}.{campaign_name}.{station_name}.yaml"
3865
- table_rain_summary_filepath = os.path.join(summary_dir_path, table_rain_summary_filename)
3866
- write_yaml(table_rain_summary, filepath=table_rain_summary_filepath)
3879
+ if not temporal_resolution.startswith("ROLL"):
3880
+ table_rain_summary = create_table_rain_summary(df, temporal_resolution=temporal_resolution)
3881
+ table_rain_summary_filename = define_filename(
3882
+ prefix="Station_Summary",
3883
+ extension="yaml",
3884
+ data_source=data_source,
3885
+ campaign_name=campaign_name,
3886
+ station_name=station_name,
3887
+ temporal_resolution=temporal_resolution,
3888
+ )
3889
+ table_rain_summary_filepath = os.path.join(summary_dir_path, table_rain_summary_filename)
3890
+ write_yaml(table_rain_summary, filepath=table_rain_summary_filepath)
3867
3891
 
3868
3892
  # ---------------------------------------------------------------------.
3869
- #### Creata table with events summary
3870
- table_events_summary = create_table_events_summary(df)
3871
- # - Save table as csv
3872
- table_events_summary_csv_filename = f"Events_Summary.{data_source}.{campaign_name}.{station_name}.csv"
3873
- table_events_summary_csv_filepath = os.path.join(summary_dir_path, table_events_summary_csv_filename)
3874
- table_events_summary.to_csv(table_events_summary_csv_filepath)
3875
- # - Save table as pdf
3876
- if is_latex_engine_available():
3877
- table_events_summary_pdf_filename = f"Events_Summary.{data_source}.{campaign_name}.{station_name}.pdf"
3878
- table_events_summary_pdf_filepath = os.path.join(summary_dir_path, table_events_summary_pdf_filename)
3879
- save_table_to_pdf(
3880
- df=prepare_latex_table_events_summary(table_events_summary),
3881
- filepath=table_events_summary_pdf_filepath,
3882
- index=True,
3883
- caption="Events Summary",
3884
- orientation="landscape",
3885
- )
3893
+ #### Create table with events summary
3894
+ if not temporal_resolution.startswith("ROLL"):
3895
+ table_events_summary = create_table_events_summary(df, temporal_resolution=temporal_resolution)
3896
+ if len(table_events_summary) > 0:
3897
+ # - Save table as csv
3898
+ table_events_summary_csv_filename = define_filename(
3899
+ prefix="Events_Summary",
3900
+ extension="csv",
3901
+ data_source=data_source,
3902
+ campaign_name=campaign_name,
3903
+ station_name=station_name,
3904
+ temporal_resolution=temporal_resolution,
3905
+ )
3906
+ table_events_summary_csv_filepath = os.path.join(summary_dir_path, table_events_summary_csv_filename)
3907
+ table_events_summary.to_csv(table_events_summary_csv_filepath)
3908
+ # - Save table as pdf
3909
+ if is_latex_engine_available():
3910
+ table_events_summary_pdf_filename = table_events_summary_csv_filename.replace(".csv", ".pdf")
3911
+ table_events_summary_pdf_filepath = os.path.join(summary_dir_path, table_events_summary_pdf_filename)
3912
+ save_table_to_pdf(
3913
+ df=prepare_latex_table_events_summary(table_events_summary),
3914
+ filepath=table_events_summary_pdf_filepath,
3915
+ index=True,
3916
+ caption="Events Summary",
3917
+ orientation="landscape",
3918
+ )
3886
3919
 
3887
3920
  # ---------------------------------------------------------------------.
3888
3921
  #### Create table with integral DSD parameters statistics
3889
3922
  table_dsd_summary = create_table_dsd_summary(df)
3890
3923
  # - Save table as csv
3891
- table_dsd_summary_csv_filename = f"DSD_Summary.{data_source}.{campaign_name}.{station_name}.csv"
3924
+ table_dsd_summary_csv_filename = define_filename(
3925
+ prefix="DSD_Summary",
3926
+ extension="csv",
3927
+ data_source=data_source,
3928
+ campaign_name=campaign_name,
3929
+ station_name=station_name,
3930
+ temporal_resolution=temporal_resolution,
3931
+ )
3892
3932
  table_dsd_summary_csv_filepath = os.path.join(summary_dir_path, table_dsd_summary_csv_filename)
3893
3933
  table_dsd_summary.to_csv(table_dsd_summary_csv_filepath)
3894
3934
  # - Save table as pdf
3895
3935
  if is_latex_engine_available():
3896
- table_dsd_summary_pdf_filename = f"DSD_Summary.{data_source}.{campaign_name}.{station_name}.pdf"
3936
+ table_dsd_summary_pdf_filename = table_dsd_summary_csv_filename.replace(".csv", ".pdf")
3897
3937
  table_dsd_summary_pdf_filepath = os.path.join(summary_dir_path, table_dsd_summary_pdf_filename)
3898
3938
  save_table_to_pdf(
3899
3939
  df=prepare_latex_table_dsd_summary(table_dsd_summary),
@@ -3905,186 +3945,202 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3905
3945
 
3906
3946
  #### ---------------------------------------------------------------------.
3907
3947
  #### Create L2E RADAR Summary Plots
3908
- # Summary plots at X, C, S bands
3909
- if "DBZH_X" in df:
3948
+ if len(df) > 1000:
3949
+ # Summary plots at X, C, S bands
3950
+ if "DBZH_X" in df:
3951
+ filename = define_filename(
3952
+ prefix="Radar_Band_X",
3953
+ extension="png",
3954
+ data_source=data_source,
3955
+ campaign_name=campaign_name,
3956
+ station_name=station_name,
3957
+ temporal_resolution=temporal_resolution,
3958
+ )
3959
+ fig = plot_radar_relationships(df, band="X")
3960
+ fig.savefig(os.path.join(summary_dir_path, filename))
3961
+ if "DBZH_C" in df:
3962
+ filename = define_filename(
3963
+ prefix="Radar_Band_C",
3964
+ extension="png",
3965
+ data_source=data_source,
3966
+ campaign_name=campaign_name,
3967
+ station_name=station_name,
3968
+ temporal_resolution=temporal_resolution,
3969
+ )
3970
+ fig = plot_radar_relationships(df, band="C")
3971
+ fig.savefig(os.path.join(summary_dir_path, filename))
3972
+ if "DBZH_S" in df:
3973
+ filename = define_filename(
3974
+ prefix="Radar_Band_S",
3975
+ extension="png",
3976
+ data_source=data_source,
3977
+ campaign_name=campaign_name,
3978
+ station_name=station_name,
3979
+ temporal_resolution=temporal_resolution,
3980
+ )
3981
+ fig = plot_radar_relationships(df, band="S")
3982
+ fig.savefig(os.path.join(summary_dir_path, filename))
3983
+
3984
+ # ---------------------------------------------------------------------.
3985
+ #### Create L2E Z-R figure
3910
3986
  filename = define_filename(
3911
- prefix="Radar_Band_X",
3987
+ prefix="Z-R",
3912
3988
  extension="png",
3913
3989
  data_source=data_source,
3914
3990
  campaign_name=campaign_name,
3915
3991
  station_name=station_name,
3992
+ temporal_resolution=temporal_resolution,
3916
3993
  )
3917
- fig = plot_radar_relationships(df, band="X")
3918
- fig.savefig(os.path.join(summary_dir_path, filename))
3919
- if "DBZH_C" in df:
3994
+
3995
+ p = plot_R_Z(df, z="Z", r="R", title=r"$Z$ vs $R$")
3996
+ p.figure.savefig(os.path.join(summary_dir_path, filename))
3997
+ plt.close()
3998
+
3999
+ #### ---------------------------------------------------------------------.
4000
+ #### Create L2E Kinetic Energy Summary Plots
3920
4001
  filename = define_filename(
3921
- prefix="Radar_Band_C",
4002
+ prefix="KineticEnergy",
3922
4003
  extension="png",
3923
4004
  data_source=data_source,
3924
4005
  campaign_name=campaign_name,
3925
4006
  station_name=station_name,
4007
+ temporal_resolution=temporal_resolution,
3926
4008
  )
3927
- fig = plot_radar_relationships(df, band="C")
4009
+ fig = plot_kinetic_energy_relationships(df)
3928
4010
  fig.savefig(os.path.join(summary_dir_path, filename))
3929
- if "DBZH_S" in df:
4011
+
4012
+ #### ---------------------------------------------------------------------.
4013
+ #### Create L2E DSD Parameters summary plots
4014
+ #### - Create DSD parameters density figures with LWC
3930
4015
  filename = define_filename(
3931
- prefix="Radar_Band_S",
4016
+ prefix="DSD_Params_Density_with_LWC_LinearDm_MaxNormalized",
3932
4017
  extension="png",
3933
4018
  data_source=data_source,
3934
4019
  campaign_name=campaign_name,
3935
4020
  station_name=station_name,
4021
+ temporal_resolution=temporal_resolution,
3936
4022
  )
3937
- fig = plot_radar_relationships(df, band="S")
4023
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=False)
3938
4024
  fig.savefig(os.path.join(summary_dir_path, filename))
4025
+ plt.close()
3939
4026
 
3940
- # ---------------------------------------------------------------------.
3941
- #### - Create Z-R figure
3942
- filename = define_filename(
3943
- prefix="Z-R",
3944
- extension="png",
3945
- data_source=data_source,
3946
- campaign_name=campaign_name,
3947
- station_name=station_name,
3948
- )
3949
-
3950
- p = plot_R_Z(df, z="Z", r="R", title=r"$Z$ vs $R$")
3951
- p.figure.savefig(os.path.join(summary_dir_path, filename))
3952
- plt.close()
3953
-
3954
- #### ---------------------------------------------------------------------.
3955
- #### Create L2E Kinetic Energy Summary Plots
3956
- filename = define_filename(
3957
- prefix="KineticEnergy",
3958
- extension="png",
3959
- data_source=data_source,
3960
- campaign_name=campaign_name,
3961
- station_name=station_name,
3962
- )
3963
- fig = plot_kinetic_energy_relationships(df)
3964
- fig.savefig(os.path.join(summary_dir_path, filename))
3965
-
3966
- #### ---------------------------------------------------------------------.
3967
- #### Create L2E DSD Parameters summary plots
3968
- #### - Create DSD parameters density figures with LWC
3969
- filename = define_filename(
3970
- prefix="DSD_Params_Density_with_LWC_LinearDm_MaxNormalized",
3971
- extension="png",
3972
- data_source=data_source,
3973
- campaign_name=campaign_name,
3974
- station_name=station_name,
3975
- )
3976
- fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=False)
3977
- fig.savefig(os.path.join(summary_dir_path, filename))
3978
- plt.close()
3979
-
3980
- filename = define_filename(
3981
- prefix="DSD_Params_Density_with_LWC_LogDm_MaxNormalized",
3982
- extension="png",
3983
- data_source=data_source,
3984
- campaign_name=campaign_name,
3985
- station_name=station_name,
3986
- )
3987
- fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=False)
3988
- fig.savefig(os.path.join(summary_dir_path, filename))
3989
- plt.close()
4027
+ filename = define_filename(
4028
+ prefix="DSD_Params_Density_with_LWC_LogDm_MaxNormalized",
4029
+ extension="png",
4030
+ data_source=data_source,
4031
+ campaign_name=campaign_name,
4032
+ station_name=station_name,
4033
+ temporal_resolution=temporal_resolution,
4034
+ )
4035
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=False)
4036
+ fig.savefig(os.path.join(summary_dir_path, filename))
4037
+ plt.close()
3990
4038
 
3991
- filename = define_filename(
3992
- prefix="DSD_Params_Density_with_LWC_LinearDm_LogNormalized",
3993
- extension="png",
3994
- data_source=data_source,
3995
- campaign_name=campaign_name,
3996
- station_name=station_name,
3997
- )
3998
- fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=True)
3999
- fig.savefig(os.path.join(summary_dir_path, filename))
4000
- plt.close()
4039
+ filename = define_filename(
4040
+ prefix="DSD_Params_Density_with_LWC_LinearDm_LogNormalized",
4041
+ extension="png",
4042
+ data_source=data_source,
4043
+ campaign_name=campaign_name,
4044
+ station_name=station_name,
4045
+ temporal_resolution=temporal_resolution,
4046
+ )
4047
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=True)
4048
+ fig.savefig(os.path.join(summary_dir_path, filename))
4049
+ plt.close()
4001
4050
 
4002
- filename = define_filename(
4003
- prefix="DSD_Params_Density_with_LWC_LogDm_LogNormalized",
4004
- extension="png",
4005
- data_source=data_source,
4006
- campaign_name=campaign_name,
4007
- station_name=station_name,
4008
- )
4009
- fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=True)
4010
- fig.savefig(os.path.join(summary_dir_path, filename))
4011
- plt.close()
4051
+ filename = define_filename(
4052
+ prefix="DSD_Params_Density_with_LWC_LogDm_LogNormalized",
4053
+ extension="png",
4054
+ data_source=data_source,
4055
+ campaign_name=campaign_name,
4056
+ station_name=station_name,
4057
+ temporal_resolution=temporal_resolution,
4058
+ )
4059
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=True)
4060
+ fig.savefig(os.path.join(summary_dir_path, filename))
4061
+ plt.close()
4012
4062
 
4013
- ###------------------------------------------------------------------------.
4014
- #### - Create DSD parameters density figures with R
4015
- filename = define_filename(
4016
- prefix="DSD_Params_Density_with_R_LinearDm_MaxNormalized",
4017
- extension="png",
4018
- data_source=data_source,
4019
- campaign_name=campaign_name,
4020
- station_name=station_name,
4021
- )
4022
- fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=False)
4023
- fig.savefig(os.path.join(summary_dir_path, filename))
4024
- plt.close()
4063
+ ###------------------------------------------------------------------------.
4064
+ #### - Create DSD parameters density figures with R
4065
+ filename = define_filename(
4066
+ prefix="DSD_Params_Density_with_R_LinearDm_MaxNormalized",
4067
+ extension="png",
4068
+ data_source=data_source,
4069
+ campaign_name=campaign_name,
4070
+ station_name=station_name,
4071
+ temporal_resolution=temporal_resolution,
4072
+ )
4073
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=False)
4074
+ fig.savefig(os.path.join(summary_dir_path, filename))
4075
+ plt.close()
4025
4076
 
4026
- filename = define_filename(
4027
- prefix="DSD_Params_Density_with_R_LogDm_MaxNormalized",
4028
- extension="png",
4029
- data_source=data_source,
4030
- campaign_name=campaign_name,
4031
- station_name=station_name,
4032
- )
4033
- fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=False)
4034
- fig.savefig(os.path.join(summary_dir_path, filename))
4035
- plt.close()
4077
+ filename = define_filename(
4078
+ prefix="DSD_Params_Density_with_R_LogDm_MaxNormalized",
4079
+ extension="png",
4080
+ data_source=data_source,
4081
+ campaign_name=campaign_name,
4082
+ station_name=station_name,
4083
+ temporal_resolution=temporal_resolution,
4084
+ )
4085
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=False)
4086
+ fig.savefig(os.path.join(summary_dir_path, filename))
4087
+ plt.close()
4036
4088
 
4037
- filename = define_filename(
4038
- prefix="DSD_Params_Density_with_R_LinearDm_LogNormalized",
4039
- extension="png",
4040
- data_source=data_source,
4041
- campaign_name=campaign_name,
4042
- station_name=station_name,
4043
- )
4044
- fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=True)
4045
- fig.savefig(os.path.join(summary_dir_path, filename))
4046
- plt.close()
4089
+ filename = define_filename(
4090
+ prefix="DSD_Params_Density_with_R_LinearDm_LogNormalized",
4091
+ extension="png",
4092
+ data_source=data_source,
4093
+ campaign_name=campaign_name,
4094
+ station_name=station_name,
4095
+ temporal_resolution=temporal_resolution,
4096
+ )
4097
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=True)
4098
+ fig.savefig(os.path.join(summary_dir_path, filename))
4099
+ plt.close()
4047
4100
 
4048
- filename = define_filename(
4049
- prefix="DSD_Params_Density_with_R_LogDm_LogNormalized",
4050
- extension="png",
4051
- data_source=data_source,
4052
- campaign_name=campaign_name,
4053
- station_name=station_name,
4054
- )
4055
- fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=True)
4056
- fig.savefig(os.path.join(summary_dir_path, filename))
4057
- plt.close()
4101
+ filename = define_filename(
4102
+ prefix="DSD_Params_Density_with_R_LogDm_LogNormalized",
4103
+ extension="png",
4104
+ data_source=data_source,
4105
+ campaign_name=campaign_name,
4106
+ station_name=station_name,
4107
+ temporal_resolution=temporal_resolution,
4108
+ )
4109
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=True)
4110
+ fig.savefig(os.path.join(summary_dir_path, filename))
4111
+ plt.close()
4058
4112
 
4059
- ###------------------------------------------------------------------------.
4060
- #### - Create DSD parameters relationship figures
4061
- filename = define_filename(
4062
- prefix="DSD_Params_Relations",
4063
- extension="png",
4064
- data_source=data_source,
4065
- campaign_name=campaign_name,
4066
- station_name=station_name,
4067
- )
4068
- fig = plot_dsd_params_relationships(df, add_nt=True)
4069
- fig.savefig(os.path.join(summary_dir_path, filename))
4070
- plt.close()
4113
+ ###------------------------------------------------------------------------.
4114
+ #### - Create DSD parameters relationship figures
4115
+ filename = define_filename(
4116
+ prefix="DSD_Params_Relations",
4117
+ extension="png",
4118
+ data_source=data_source,
4119
+ campaign_name=campaign_name,
4120
+ station_name=station_name,
4121
+ temporal_resolution=temporal_resolution,
4122
+ )
4123
+ fig = plot_dsd_params_relationships(df, add_nt=True)
4124
+ fig.savefig(os.path.join(summary_dir_path, filename))
4125
+ plt.close()
4071
4126
 
4072
- ###------------------------------------------------------------------------.
4073
- #### - Create Dmax relationship figures
4074
- filename = define_filename(
4075
- prefix="DSD_Dmax_Relations",
4076
- extension="png",
4077
- data_source=data_source,
4078
- campaign_name=campaign_name,
4079
- station_name=station_name,
4080
- )
4081
- fig = plot_dmax_relationships(df, diameter_bin_edges=diameter_bin_edges, dmax="Dmax", diameter_max=10)
4082
- fig.savefig(os.path.join(summary_dir_path, filename))
4083
- plt.close()
4127
+ ###------------------------------------------------------------------------.
4128
+ #### - Create Dmax relationship figures
4129
+ filename = define_filename(
4130
+ prefix="DSD_Dmax_Relations",
4131
+ extension="png",
4132
+ data_source=data_source,
4133
+ campaign_name=campaign_name,
4134
+ station_name=station_name,
4135
+ temporal_resolution=temporal_resolution,
4136
+ )
4137
+ fig = plot_dmax_relationships(df, diameter_bin_edges=diameter_bin_edges, dmax="Dmax", diameter_max=10)
4138
+ fig.savefig(os.path.join(summary_dir_path, filename))
4139
+ plt.close()
4084
4140
 
4085
- #### ---------------------------------------------------------------------.
4086
- #### Create L2E QC summary plots
4087
- # TODO:
4141
+ #### ---------------------------------------------------------------------.
4142
+ #### Create L2E QC summary plots
4143
+ # TODO:
4088
4144
 
4089
4145
  ####------------------------------------------------------------------------.
4090
4146
  #### Free space - Remove df from memory
@@ -4102,6 +4158,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4102
4158
  data_source=data_source,
4103
4159
  campaign_name=campaign_name,
4104
4160
  station_name=station_name,
4161
+ temporal_resolution=temporal_resolution,
4105
4162
  )
4106
4163
  p = plot_dsd_density(df_nd, diameter_bin_edges=diameter_bin_edges)
4107
4164
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4114,6 +4171,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4114
4171
  data_source=data_source,
4115
4172
  campaign_name=campaign_name,
4116
4173
  station_name=station_name,
4174
+ temporal_resolution=temporal_resolution,
4117
4175
  )
4118
4176
  p = plot_normalized_dsd_density(df_nd)
4119
4177
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4137,6 +4195,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4137
4195
  data_source=data_source,
4138
4196
  campaign_name=campaign_name,
4139
4197
  station_name=station_name,
4198
+ temporal_resolution=temporal_resolution,
4140
4199
  )
4141
4200
  p = plot_dsd_with_dense_lines(drop_number_concentration=drop_number_concentration, r=r)
4142
4201
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4153,6 +4212,7 @@ def create_station_summary(
4153
4212
  station_name,
4154
4213
  parallel=False,
4155
4214
  data_archive_dir=None,
4215
+ temporal_resolution="1MIN",
4156
4216
  ):
4157
4217
  """Create summary figures and tables for a DISDRODB station."""
4158
4218
  # Print processing info
@@ -4169,18 +4229,22 @@ def create_station_summary(
4169
4229
  )
4170
4230
  os.makedirs(summary_dir_path, exist_ok=True)
4171
4231
 
4172
- # Load L2E 1MIN dataset
4173
- ds = disdrodb.open_dataset(
4174
- data_archive_dir=data_archive_dir,
4175
- data_source=data_source,
4176
- campaign_name=campaign_name,
4177
- station_name=station_name,
4178
- product="L2E",
4179
- product_kwargs={"rolling": False, "sample_interval": 60},
4180
- parallel=parallel,
4181
- chunks=-1,
4182
- compute=True,
4183
- )
4232
+ # Load L2E dataset
4233
+ try:
4234
+ ds = disdrodb.open_dataset(
4235
+ data_archive_dir=data_archive_dir,
4236
+ data_source=data_source,
4237
+ campaign_name=campaign_name,
4238
+ station_name=station_name,
4239
+ product="L2E",
4240
+ temporal_resolution=temporal_resolution,
4241
+ parallel=parallel,
4242
+ chunks=-1,
4243
+ compute=True,
4244
+ )
4245
+ except Exception as e:
4246
+ print("Impossible to create the station summary." + str(e))
4247
+ return
4184
4248
 
4185
4249
  # Generate station summary figures and table
4186
4250
  generate_station_summary(
@@ -4189,6 +4253,7 @@ def create_station_summary(
4189
4253
  data_source=data_source,
4190
4254
  campaign_name=campaign_name,
4191
4255
  station_name=station_name,
4256
+ temporal_resolution=temporal_resolution,
4192
4257
  )
4193
4258
 
4194
4259
  print(f"Creation of station summary for {data_source} {campaign_name} {station_name} has terminated.")