disdrodb 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. disdrodb/__init__.py +1 -5
  2. disdrodb/_version.py +2 -2
  3. disdrodb/accessor/methods.py +22 -4
  4. disdrodb/api/checks.py +10 -0
  5. disdrodb/api/io.py +20 -18
  6. disdrodb/api/path.py +42 -77
  7. disdrodb/api/search.py +89 -23
  8. disdrodb/cli/disdrodb_create_summary.py +1 -1
  9. disdrodb/cli/disdrodb_run_l0.py +1 -1
  10. disdrodb/cli/disdrodb_run_l0a.py +1 -1
  11. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  12. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  13. disdrodb/cli/disdrodb_run_l1.py +1 -1
  14. disdrodb/cli/disdrodb_run_l2e.py +1 -1
  15. disdrodb/cli/disdrodb_run_l2m.py +1 -1
  16. disdrodb/configs.py +30 -83
  17. disdrodb/constants.py +4 -3
  18. disdrodb/data_transfer/download_data.py +4 -2
  19. disdrodb/docs.py +2 -2
  20. disdrodb/etc/products/L1/1MIN.yaml +13 -0
  21. disdrodb/etc/products/L1/LPM/1MIN.yaml +13 -0
  22. disdrodb/etc/products/L1/LPM_V0/1MIN.yaml +13 -0
  23. disdrodb/etc/products/L1/PARSIVEL/1MIN.yaml +13 -0
  24. disdrodb/etc/products/L1/PARSIVEL2/1MIN.yaml +13 -0
  25. disdrodb/etc/products/L1/PWS100/1MIN.yaml +13 -0
  26. disdrodb/etc/products/L1/RD80/1MIN.yaml +13 -0
  27. disdrodb/etc/products/L1/SWS250/1MIN.yaml +13 -0
  28. disdrodb/etc/products/L1/global.yaml +6 -0
  29. disdrodb/etc/products/L2E/10MIN.yaml +1 -12
  30. disdrodb/etc/products/L2E/global.yaml +1 -1
  31. disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_R_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/global.yaml +1 -1
  33. disdrodb/issue/checks.py +2 -2
  34. disdrodb/l0/check_configs.py +1 -1
  35. disdrodb/l0/configs/LPM/l0a_encodings.yml +0 -1
  36. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +0 -4
  37. disdrodb/l0/configs/LPM/l0b_encodings.yml +9 -9
  38. disdrodb/l0/configs/LPM/raw_data_format.yml +11 -11
  39. disdrodb/l0/configs/LPM_V0/bins_diameter.yml +103 -0
  40. disdrodb/l0/configs/LPM_V0/bins_velocity.yml +103 -0
  41. disdrodb/l0/configs/LPM_V0/l0a_encodings.yml +45 -0
  42. disdrodb/l0/configs/LPM_V0/l0b_cf_attrs.yml +180 -0
  43. disdrodb/l0/configs/LPM_V0/l0b_encodings.yml +410 -0
  44. disdrodb/l0/configs/LPM_V0/raw_data_format.yml +474 -0
  45. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +1 -1
  46. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +8 -8
  47. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +9 -9
  48. disdrodb/l0/l0_reader.py +2 -2
  49. disdrodb/l0/l0a_processing.py +6 -2
  50. disdrodb/l0/l0b_processing.py +26 -19
  51. disdrodb/l0/l0c_processing.py +17 -3
  52. disdrodb/l0/manuals/LPM_V0.pdf +0 -0
  53. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +15 -7
  54. disdrodb/l0/readers/LPM/ITALY/GID_LPM_PI.py +279 -0
  55. disdrodb/l0/readers/LPM/ITALY/GID_LPM_T.py +276 -0
  56. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
  57. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_RWANDA_LPM_NC.py +103 -0
  58. disdrodb/l0/readers/LPM/NORWAY/HAUKELISETER_LPM.py +216 -0
  59. disdrodb/l0/readers/LPM/NORWAY/NMBU_LPM.py +208 -0
  60. disdrodb/l0/readers/LPM/UK/WITHWORTH_LPM.py +219 -0
  61. disdrodb/l0/readers/LPM/USA/CHARLESTON.py +229 -0
  62. disdrodb/l0/readers/{LPM → LPM_V0}/BELGIUM/ULIEGE.py +33 -49
  63. disdrodb/l0/readers/LPM_V0/ITALY/GID_LPM_V0.py +240 -0
  64. disdrodb/l0/readers/PARSIVEL/BASQUECOUNTRY/EUSKALMET_OTT.py +227 -0
  65. disdrodb/l0/readers/{PARSIVEL2 → PARSIVEL}/NASA/LPVEX.py +16 -28
  66. disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/MC3E.py +1 -1
  67. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +3 -3
  68. disdrodb/l0/readers/PARSIVEL2/BASQUECOUNTRY/EUSKALMET_OTT2.py +232 -0
  69. disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +1 -1
  70. disdrodb/l0/readers/PARSIVEL2/JAPAN/PRECIP.py +155 -0
  71. disdrodb/l0/readers/PARSIVEL2/MPI/BCO_PARSIVEL2.py +14 -7
  72. disdrodb/l0/readers/PARSIVEL2/MPI/BOWTIE.py +8 -3
  73. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +28 -5
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +1 -1
  75. disdrodb/l0/readers/PARSIVEL2/{GPM/GCPEX.py → NORWAY/UIB.py} +54 -29
  76. disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/{PANGASA.py → PAGASA.py} +6 -3
  77. disdrodb/l0/readers/PARSIVEL2/SPAIN/GRANADA.py +1 -1
  78. disdrodb/l0/readers/PARSIVEL2/SWEDEN/SMHI.py +189 -0
  79. disdrodb/l0/readers/{PARSIVEL/GPM/PIERS.py → PARSIVEL2/USA/CSU.py} +62 -29
  80. disdrodb/l0/readers/PARSIVEL2/USA/{C3WE.py → CW3E.py} +51 -24
  81. disdrodb/l0/readers/{PARSIVEL/GPM/IFLOODS.py → RD80/BRAZIL/ATTO_RD80.py} +50 -34
  82. disdrodb/l0/readers/{SW250 → SWS250}/BELGIUM/KMI.py +1 -1
  83. disdrodb/l1/beard_model.py +45 -1
  84. disdrodb/l1/fall_velocity.py +1 -6
  85. disdrodb/l1/filters.py +2 -0
  86. disdrodb/l1/processing.py +6 -5
  87. disdrodb/l1/resampling.py +101 -38
  88. disdrodb/l2/empirical_dsd.py +12 -8
  89. disdrodb/l2/processing.py +4 -3
  90. disdrodb/metadata/search.py +3 -4
  91. disdrodb/routines/l0.py +4 -4
  92. disdrodb/routines/l1.py +173 -60
  93. disdrodb/routines/l2.py +121 -269
  94. disdrodb/routines/options.py +347 -0
  95. disdrodb/routines/wrappers.py +9 -1
  96. disdrodb/scattering/axis_ratio.py +3 -0
  97. disdrodb/scattering/routines.py +1 -1
  98. disdrodb/summary/routines.py +765 -724
  99. disdrodb/utils/archiving.py +51 -44
  100. disdrodb/utils/attrs.py +1 -1
  101. disdrodb/utils/compression.py +4 -2
  102. disdrodb/utils/dask.py +35 -15
  103. disdrodb/utils/dict.py +33 -0
  104. disdrodb/utils/encoding.py +1 -1
  105. disdrodb/utils/manipulations.py +7 -1
  106. disdrodb/utils/routines.py +9 -8
  107. disdrodb/utils/time.py +9 -1
  108. disdrodb/viz/__init__.py +0 -13
  109. disdrodb/viz/plots.py +209 -0
  110. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/METADATA +1 -1
  111. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/RECORD +124 -95
  112. disdrodb/l0/readers/PARSIVEL/GPM/LPVEX.py +0 -85
  113. /disdrodb/etc/products/L2M/{GAMMA_GS_ND_MAE.yaml → MODELS/GAMMA_GS_ND_MAE.yaml} +0 -0
  114. /disdrodb/etc/products/L2M/{GAMMA_ML.yaml → MODELS/GAMMA_ML.yaml} +0 -0
  115. /disdrodb/etc/products/L2M/{LOGNORMAL_GS_LOG_ND_MAE.yaml → MODELS/LOGNORMAL_GS_LOG_ND_MAE.yaml} +0 -0
  116. /disdrodb/etc/products/L2M/{LOGNORMAL_GS_ND_MAE.yaml → MODELS/LOGNORMAL_GS_ND_MAE.yaml} +0 -0
  117. /disdrodb/etc/products/L2M/{LOGNORMAL_ML.yaml → MODELS/LOGNORMAL_ML.yaml} +0 -0
  118. /disdrodb/etc/products/L2M/{NGAMMA_GS_LOG_ND_MAE.yaml → MODELS/NGAMMA_GS_LOG_ND_MAE.yaml} +0 -0
  119. /disdrodb/etc/products/L2M/{NGAMMA_GS_ND_MAE.yaml → MODELS/NGAMMA_GS_ND_MAE.yaml} +0 -0
  120. /disdrodb/etc/products/L2M/{NGAMMA_GS_Z_MAE.yaml → MODELS/NGAMMA_GS_Z_MAE.yaml} +0 -0
  121. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/NSSTC.py +0 -0
  122. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/WHEEL +0 -0
  123. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/entry_points.txt +0 -0
  124. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/licenses/LICENSE +0 -0
  125. {disdrodb-0.1.5.dist-info → disdrodb-0.2.1.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,7 @@ import os
21
21
  import shutil
22
22
  import subprocess
23
23
  import tempfile
24
+ import warnings
24
25
 
25
26
  import matplotlib.lines as mlines
26
27
  import matplotlib.pyplot as plt
@@ -46,7 +47,13 @@ from disdrodb.utils.manipulations import (
46
47
  from disdrodb.utils.time import get_sampling_information
47
48
  from disdrodb.utils.warnings import suppress_warnings
48
49
  from disdrodb.utils.yaml import write_yaml
49
- from disdrodb.viz import compute_dense_lines, max_blend_images, to_rgba
50
+ from disdrodb.viz.plots import (
51
+ compute_dense_lines,
52
+ max_blend_images,
53
+ plot_raw_and_filtered_spectra,
54
+ plot_spectrum,
55
+ to_rgba,
56
+ )
50
57
 
51
58
  ####-----------------------------------------------------------------
52
59
  #### PDF Latex Utilities
@@ -152,11 +159,15 @@ def save_table_to_pdf(
152
159
  #### Tables summaries
153
160
 
154
161
 
155
- def create_table_rain_summary(df):
162
+ def create_table_rain_summary(df, temporal_resolution):
156
163
  """Create rainy table summary."""
157
164
  # Initialize dictionary
158
165
  table = {}
159
166
 
167
+ # Retrieve accumulation interval
168
+ accumulation_interval, _ = get_sampling_information(temporal_resolution)
169
+ accumulation_interval_minutes = accumulation_interval / 60
170
+
160
171
  # Keep rows with R > 0
161
172
  df = df[df["R"] > 0]
162
173
 
@@ -184,20 +195,34 @@ def create_table_rain_summary(df):
184
195
  table["years_month_coverage"] = years_month_coverage
185
196
 
186
197
  # Rainy minutes statistics
187
- table["n_rainy_minutes"] = len(df["R"])
188
- table["n_rainy_minutes_<0.1"] = df["R"].between(0, 0.1, inclusive="right").sum().item()
189
- table["n_rainy_minutes_0.1_1"] = df["R"].between(0.1, 1, inclusive="right").sum().item()
190
- table["n_rainy_minutes_1_10"] = df["R"].between(1, 10, inclusive="right").sum().item()
191
- table["n_rainy_minutes_10_25"] = df["R"].between(10, 25, inclusive="right").sum().item()
192
- table["n_rainy_minutes_25_50"] = df["R"].between(25, 50, inclusive="right").sum().item()
193
- table["n_rainy_minutes_50_100"] = df["R"].between(50, 100, inclusive="right").sum().item()
194
- table["n_rainy_minutes_100_200"] = df["R"].between(100, 200, inclusive="right").sum().item()
195
- table["n_rainy_minutes_>200"] = np.sum(df["R"] > 200).item()
198
+ table["n_rainy_minutes"] = len(df["R"]) * accumulation_interval_minutes
199
+ table["n_rainy_minutes_<0.1"] = (
200
+ df["R"].between(0, 0.1, inclusive="right").sum().item() * accumulation_interval_minutes
201
+ )
202
+ table["n_rainy_minutes_0.1_1"] = (
203
+ df["R"].between(0.1, 1, inclusive="right").sum().item() * accumulation_interval_minutes
204
+ )
205
+ table["n_rainy_minutes_1_10"] = (
206
+ df["R"].between(1, 10, inclusive="right").sum().item() * accumulation_interval_minutes
207
+ )
208
+ table["n_rainy_minutes_10_25"] = (
209
+ df["R"].between(10, 25, inclusive="right").sum().item() * accumulation_interval_minutes
210
+ )
211
+ table["n_rainy_minutes_25_50"] = (
212
+ df["R"].between(25, 50, inclusive="right").sum().item() * accumulation_interval_minutes
213
+ )
214
+ table["n_rainy_minutes_50_100"] = (
215
+ df["R"].between(50, 100, inclusive="right").sum().item() * accumulation_interval_minutes
216
+ )
217
+ table["n_rainy_minutes_100_200"] = (
218
+ df["R"].between(100, 200, inclusive="right").sum().item() * accumulation_interval_minutes
219
+ )
220
+ table["n_rainy_minutes_>200"] = np.sum(df["R"] > 200).item() * accumulation_interval_minutes
196
221
 
197
222
  # Minutes with larger Dmax
198
- table["n_minutes_Dmax_>7"] = np.sum(df["Dmax"] > 7).item()
199
- table["n_minutes_Dmax_>8"] = np.sum(df["Dmax"] > 8).item()
200
- table["n_minutes_Dmax_>9"] = np.sum(df["Dmax"] > 9).item()
223
+ table["n_minutes_Dmax_>7"] = np.sum(df["Dmax"] > 7).item() * accumulation_interval_minutes
224
+ table["n_minutes_Dmax_>8"] = np.sum(df["Dmax"] > 8).item() * accumulation_interval_minutes
225
+ table["n_minutes_Dmax_>9"] = np.sum(df["Dmax"] > 9).item() * accumulation_interval_minutes
201
226
  return table
202
227
 
203
228
 
@@ -254,17 +279,30 @@ def create_table_dsd_summary(df):
254
279
  return df_stats
255
280
 
256
281
 
257
- def create_table_events_summary(df):
258
- """Creata table with events statistics."""
259
- # Event file
282
+ def create_table_events_summary(df, temporal_resolution):
283
+ """Create table with events statistics."""
284
+ # Retrieve accumulation interval
285
+ accumulation_interval, _ = get_sampling_information(temporal_resolution)
286
+ accumulation_interval_minutes = accumulation_interval / 60
287
+
288
+ # Define event settings
260
289
  # - Events are separated by 1 hour or more rain-free periods in rain rate time series.
261
290
  # - The events that are less than 'min_duration' minutes or the rain total is less than 0.1 mm
262
291
  # are not reported.
292
+ if accumulation_interval_minutes >= 5 * 60:
293
+ neighbor_time_interval = temporal_resolution
294
+ event_min_duration = temporal_resolution
295
+ neighbor_min_size = 1
296
+ else:
297
+ neighbor_time_interval = "5MIN"
298
+ event_min_duration = "5MIN"
299
+ neighbor_min_size = 2
300
+
263
301
  event_settings = {
264
- "neighbor_min_size": 2,
265
- "neighbor_time_interval": "5MIN",
302
+ "neighbor_min_size": neighbor_min_size,
303
+ "neighbor_time_interval": neighbor_time_interval,
266
304
  "event_max_time_gap": "1H",
267
- "event_min_duration": "5MIN",
305
+ "event_min_duration": event_min_duration,
268
306
  "event_min_size": 3,
269
307
  }
270
308
  # Keep rows with R > 0
@@ -298,9 +336,12 @@ def create_table_events_summary(df):
298
336
  # Event time info
299
337
  "start_time": start,
300
338
  "end_time": end,
301
- "duration": int((end - start) / np.timedelta64(1, "m")),
339
+ "duration": int((end - start) / np.timedelta64(1, "m")) + accumulation_interval_minutes,
302
340
  # Rainy minutes above thresholds
303
- **{f"rainy_minutes_>{thr}": int((df_event["R"] > thr).sum()) for thr in rain_thresholds},
341
+ **{
342
+ f"rainy_minutes_>{thr}": int((df_event["R"] > thr).sum()) * accumulation_interval_minutes
343
+ for thr in rain_thresholds
344
+ },
304
345
  # Total precipitation (mm)
305
346
  "P_total": df_event["P"].sum(),
306
347
  # R statistics
@@ -652,99 +693,6 @@ def predict_from_inverse_powerlaw(x, a, b):
652
693
  return (x ** (1 / b)) / (a ** (1 / b))
653
694
 
654
695
 
655
- ####-------------------------------------------------------------------
656
- #### Drop spectrum plots
657
-
658
-
659
- def plot_drop_spectrum(drop_number, norm=None, add_colorbar=True, title="Drop Spectrum"):
660
- """Plot the drop spectrum."""
661
- cmap = plt.get_cmap("Spectral_r").copy()
662
- cmap.set_under("none")
663
- if "time" in drop_number.dims:
664
- drop_number = drop_number.sum(dim="time")
665
- if norm is None:
666
- norm = LogNorm(vmin=1, vmax=None) if drop_number.sum() > 0 else None
667
-
668
- p = drop_number.plot.pcolormesh(
669
- x=DIAMETER_DIMENSION,
670
- y=VELOCITY_DIMENSION,
671
- cmap=cmap,
672
- extend="max",
673
- norm=norm,
674
- add_colorbar=add_colorbar,
675
- cbar_kwargs={"label": "Number of particles"},
676
- )
677
- p.axes.set_xlabel("Diamenter [mm]")
678
- p.axes.set_ylabel("Fall velocity [m/s]")
679
- p.axes.set_title(title)
680
- return p
681
-
682
-
683
- def plot_raw_and_filtered_spectrums(
684
- raw_drop_number,
685
- drop_number,
686
- theoretical_average_velocity,
687
- measured_average_velocity=None,
688
- norm=None,
689
- figsize=(8, 4),
690
- dpi=300,
691
- ):
692
- """Plot raw and filtered drop spectrum."""
693
- # Drop number matrix
694
- cmap = plt.get_cmap("Spectral_r").copy()
695
- cmap.set_under("none")
696
-
697
- if "time" in drop_number.dims:
698
- drop_number = drop_number.sum(dim="time")
699
- if "time" in raw_drop_number.dims:
700
- raw_drop_number = raw_drop_number.sum(dim="time")
701
- if "time" in theoretical_average_velocity.dims:
702
- theoretical_average_velocity = theoretical_average_velocity.mean(dim="time")
703
-
704
- if norm is None:
705
- norm = LogNorm(1, None)
706
-
707
- fig = plt.figure(figsize=figsize, dpi=dpi)
708
- gs = GridSpec(1, 2, width_ratios=[1, 1.15], wspace=0.05) # More space for ax2
709
- ax1 = fig.add_subplot(gs[0])
710
- ax2 = fig.add_subplot(gs[1])
711
-
712
- raw_drop_number.plot.pcolormesh(
713
- x=DIAMETER_DIMENSION,
714
- y=VELOCITY_DIMENSION,
715
- ax=ax1,
716
- cmap=cmap,
717
- norm=norm,
718
- extend="max",
719
- add_colorbar=False,
720
- )
721
- theoretical_average_velocity.plot(ax=ax1, c="k", linestyle="dashed")
722
- if measured_average_velocity is not None:
723
- measured_average_velocity.plot(ax=ax1, c="k", linestyle="dotted")
724
- ax1.set_xlabel("Diamenter [mm]")
725
- ax1.set_ylabel("Fall velocity [m/s]")
726
- ax1.set_title("Raw Spectrum")
727
- drop_number.plot.pcolormesh(
728
- x=DIAMETER_DIMENSION,
729
- y=VELOCITY_DIMENSION,
730
- cmap=cmap,
731
- extend="max",
732
- ax=ax2,
733
- norm=norm,
734
- cbar_kwargs={"label": "Number of particles"},
735
- )
736
- theoretical_average_velocity.plot(ax=ax2, c="k", linestyle="dashed", label="Theoretical velocity")
737
- if measured_average_velocity is not None:
738
- measured_average_velocity.plot(ax=ax2, c="k", linestyle="dotted", label="Measured average velocity")
739
- ax2.set_yticks([])
740
- ax2.set_yticklabels([])
741
- ax2.set_xlabel("Diamenter [mm]")
742
- ax2.set_ylabel("")
743
- ax2.set_title("Filtered Spectrum")
744
- ax2.legend(loc="lower right", frameon=False)
745
- return fig
746
-
747
-
748
696
  ####-------------------------------------------------------------------
749
697
  #### N(D) Climatological plots
750
698
 
@@ -768,7 +716,7 @@ def create_nd_dataframe(ds, variables=None):
768
716
  "sample_interval",
769
717
  *RADAR_OPTIONS,
770
718
  ]
771
- df_nd = ds_stack.to_dataframe().drop(columns=coords_to_drop, errors="ignore")
719
+ df_nd = ds_stack.to_dask_dataframe().drop(columns=coords_to_drop, errors="ignore").compute()
772
720
  df_nd["D"] = df_nd["diameter_bin_center"]
773
721
  df_nd["N(D)"] = df_nd["drop_number_concentration"]
774
722
  df_nd = df_nd[df_nd["R"] != 0]
@@ -2084,31 +2032,34 @@ def plot_A_R(
2084
2032
  ax.set_yticklabels([str(v) for v in a_ticks])
2085
2033
  ax.set_title(title)
2086
2034
  if add_fit:
2087
- # Fit powerlaw k = a * R ** b
2088
- (a_c, b), _ = fit_powerlaw(x=df[r], y=df[a], xbins=r_bins, x_in_db=False)
2089
- # Invert for R = A * k ** B
2090
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2091
- # Define legend title
2092
- a_str = _define_coeff_string(a_c)
2093
- A_str = _define_coeff_string(A_c)
2094
- legend_str = rf"${a_symbol} = {a_str} \, R^{{{b:.2f}}}$" "\n" rf"$R = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2095
- # Get power law predictions
2096
- x_pred = np.arange(*rlims)
2097
- r_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2098
- # Add fitted power law
2099
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2100
- # Add legend
2101
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2102
- ax.text(
2103
- 0.05,
2104
- 0.95,
2105
- legend_str,
2106
- transform=ax.transAxes,
2107
- ha="left",
2108
- va="top",
2109
- fontsize=legend_fontsize,
2110
- bbox=legend_bbox_dict,
2111
- )
2035
+ try:
2036
+ # Fit powerlaw k = a * R ** b
2037
+ (a_c, b), _ = fit_powerlaw(x=df[r], y=df[a], xbins=r_bins, x_in_db=False)
2038
+ # Invert for R = A * k ** B
2039
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2040
+ # Define legend title
2041
+ a_str = _define_coeff_string(a_c)
2042
+ A_str = _define_coeff_string(A_c)
2043
+ legend_str = rf"${a_symbol} = {a_str} \, R^{{{b:.2f}}}$" "\n" rf"$R = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2044
+ # Get power law predictions
2045
+ x_pred = np.arange(*rlims)
2046
+ r_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2047
+ # Add fitted power law
2048
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2049
+ # Add legend
2050
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2051
+ ax.text(
2052
+ 0.05,
2053
+ 0.95,
2054
+ legend_str,
2055
+ transform=ax.transAxes,
2056
+ ha="left",
2057
+ va="top",
2058
+ fontsize=legend_fontsize,
2059
+ bbox=legend_bbox_dict,
2060
+ )
2061
+ except Exception as e:
2062
+ warnings.warn(f"Could not fit power law in plot_A_R: {e!s}", UserWarning, stacklevel=2)
2112
2063
  return p
2113
2064
 
2114
2065
 
@@ -2189,40 +2140,43 @@ def plot_A_Z(
2189
2140
 
2190
2141
  # Fit and plot the power law
2191
2142
  if add_fit:
2192
- # Fit powerlaw k = a * Z ** b (Z in dBZ -> x_in_db=True)
2193
- (a_c, b), _ = fit_powerlaw(
2194
- x=df[z],
2195
- y=df[a],
2196
- xbins=z_bins,
2197
- x_in_db=True,
2198
- )
2199
- # Invert for Z = A * k ** B
2200
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2201
- # Legend text
2202
- a_str = _define_coeff_string(a_c)
2203
- A_str = _define_coeff_string(A_c)
2204
- legend_str = (
2205
- rf"${a_symbol} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2206
- "\n"
2207
- rf"${z_lower_symbol} = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2208
- )
2209
- # Predictions
2210
- x_pred = np.arange(*z_lim)
2211
- x_pred_linear = disdrodb.idecibel(x_pred) # convert to linear for prediction
2212
- y_pred = predict_from_powerlaw(x_pred_linear, a=a_c, b=b)
2213
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2214
- # Add legend
2215
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2216
- ax.text(
2217
- 0.05,
2218
- 0.95,
2219
- legend_str,
2220
- transform=ax.transAxes,
2221
- ha="left",
2222
- va="top",
2223
- fontsize=legend_fontsize,
2224
- bbox=legend_bbox_dict,
2225
- )
2143
+ try:
2144
+ # Fit powerlaw k = a * Z ** b (Z in dBZ -> x_in_db=True)
2145
+ (a_c, b), _ = fit_powerlaw(
2146
+ x=df[z],
2147
+ y=df[a],
2148
+ xbins=z_bins,
2149
+ x_in_db=True,
2150
+ )
2151
+ # Invert for Z = A * k ** B
2152
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2153
+ # Legend text
2154
+ a_str = _define_coeff_string(a_c)
2155
+ A_str = _define_coeff_string(A_c)
2156
+ legend_str = (
2157
+ rf"${a_symbol} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2158
+ "\n"
2159
+ rf"${z_lower_symbol} = {A_str} \, {a_symbol}^{{{B:.2f}}}$"
2160
+ )
2161
+ # Predictions
2162
+ x_pred = np.arange(*z_lim)
2163
+ x_pred_linear = disdrodb.idecibel(x_pred) # convert to linear for prediction
2164
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a_c, b=b)
2165
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2166
+ # Add legend
2167
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2168
+ ax.text(
2169
+ 0.05,
2170
+ 0.95,
2171
+ legend_str,
2172
+ transform=ax.transAxes,
2173
+ ha="left",
2174
+ va="top",
2175
+ fontsize=legend_fontsize,
2176
+ bbox=legend_bbox_dict,
2177
+ )
2178
+ except Exception as e:
2179
+ warnings.warn(f"Could not fit power law in plot_A_Z: {e!s}", UserWarning, stacklevel=2)
2226
2180
  return p
2227
2181
 
2228
2182
 
@@ -2322,43 +2276,46 @@ def plot_A_KDP(
2322
2276
 
2323
2277
  # Fit and overlay power law: k = a * KDP^b
2324
2278
  if add_fit:
2325
- (a_c, b), _ = fit_powerlaw(
2326
- x=df[kdp],
2327
- y=df[a],
2328
- xbins=kdp_bins,
2329
- x_in_db=False,
2330
- )
2331
- # Invert: KDP = A * k^B
2332
- A_c, B = inverse_powerlaw_parameters(a_c, b)
2333
-
2334
- a_str = _define_coeff_string(a_c)
2335
- A_str = _define_coeff_string(A_c)
2336
- legend_str = (
2337
- rf"${a_symbol} = {a_str}\,K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2338
- "\n"
2339
- rf"$K_{{\mathrm{{DP}}}} = {A_str}\,{a_symbol}^{{{B:.2f}}}$"
2340
- )
2341
-
2342
- # Predictions along KDP axis
2343
- if log_kdp:
2344
- x_pred = np.logspace(np.log10(kdp_lim[0]), np.log10(kdp_lim[1]), 400)
2345
- else:
2346
- x_pred = np.arange(kdp_lim[0], kdp_lim[1], 0.05)
2347
- y_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2279
+ try:
2280
+ (a_c, b), _ = fit_powerlaw(
2281
+ x=df[kdp],
2282
+ y=df[a],
2283
+ xbins=kdp_bins,
2284
+ x_in_db=False,
2285
+ )
2286
+ # Invert: KDP = A * k^B
2287
+ A_c, B = inverse_powerlaw_parameters(a_c, b)
2288
+
2289
+ a_str = _define_coeff_string(a_c)
2290
+ A_str = _define_coeff_string(A_c)
2291
+ legend_str = (
2292
+ rf"${a_symbol} = {a_str}\,K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2293
+ "\n"
2294
+ rf"$K_{{\mathrm{{DP}}}} = {A_str}\,{a_symbol}^{{{B:.2f}}}$"
2295
+ )
2348
2296
 
2349
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2350
- # Add legend
2351
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2352
- ax.text(
2353
- 0.05,
2354
- 0.95,
2355
- legend_str,
2356
- transform=ax.transAxes,
2357
- ha="left",
2358
- va="top",
2359
- fontsize=legend_fontsize,
2360
- bbox=legend_bbox_dict,
2361
- )
2297
+ # Predictions along KDP axis
2298
+ if log_kdp:
2299
+ x_pred = np.logspace(np.log10(kdp_lim[0]), np.log10(kdp_lim[1]), 400)
2300
+ else:
2301
+ x_pred = np.arange(kdp_lim[0], kdp_lim[1], 0.05)
2302
+ y_pred = predict_from_powerlaw(x_pred, a=a_c, b=b)
2303
+
2304
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2305
+ # Add legend
2306
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2307
+ ax.text(
2308
+ 0.05,
2309
+ 0.95,
2310
+ legend_str,
2311
+ transform=ax.transAxes,
2312
+ ha="left",
2313
+ va="top",
2314
+ fontsize=legend_fontsize,
2315
+ bbox=legend_bbox_dict,
2316
+ )
2317
+ except Exception as e:
2318
+ warnings.warn(f"Could not fit power law in plot_A_KDP: {e!s}", UserWarning, stacklevel=2)
2362
2319
 
2363
2320
  return p
2364
2321
 
@@ -2435,34 +2392,37 @@ def plot_R_Z(
2435
2392
 
2436
2393
  # Fit and plot the powerlaw
2437
2394
  if add_fit:
2438
- # Fit powerlaw R = a * z ** b
2439
- (a, b), _ = fit_powerlaw(x=df[z], y=df[r], xbins=np.arange(10, 50, 1), x_in_db=True)
2440
- # Invert for z = A * R ** B
2441
- A, B = inverse_powerlaw_parameters(a, b)
2442
- # Define legend title
2443
- a_str = _define_coeff_string(a)
2444
- A_str = _define_coeff_string(A)
2445
- legend_str = (
2446
- rf"$R = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$" "\n" rf"${z_lower_symbol} = {A_str} \, R^{{{B:.2f}}}$"
2447
- )
2448
- # Get power law predictions
2449
- x_pred = np.arange(*z_lims)
2450
- x_pred_linear = disdrodb.idecibel(x_pred)
2451
- r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2452
- # Add fitted powerlaw
2453
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2454
- # Add legend
2455
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2456
- ax.text(
2457
- 0.05,
2458
- 0.95,
2459
- legend_str,
2460
- transform=ax.transAxes,
2461
- ha="left",
2462
- va="top",
2463
- fontsize=legend_fontsize,
2464
- bbox=legend_bbox_dict,
2465
- )
2395
+ try:
2396
+ # Fit powerlaw R = a * z ** b
2397
+ (a, b), _ = fit_powerlaw(x=df[z], y=df[r], xbins=np.arange(10, 50, 1), x_in_db=True)
2398
+ # Invert for z = A * R ** B
2399
+ A, B = inverse_powerlaw_parameters(a, b)
2400
+ # Define legend title
2401
+ a_str = _define_coeff_string(a)
2402
+ A_str = _define_coeff_string(A)
2403
+ legend_str = (
2404
+ rf"$R = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$" "\n" rf"${z_lower_symbol} = {A_str} \, R^{{{B:.2f}}}$"
2405
+ )
2406
+ # Get power law predictions
2407
+ x_pred = np.arange(*z_lims)
2408
+ x_pred_linear = disdrodb.idecibel(x_pred)
2409
+ r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2410
+ # Add fitted powerlaw
2411
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2412
+ # Add legend
2413
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2414
+ ax.text(
2415
+ 0.05,
2416
+ 0.95,
2417
+ legend_str,
2418
+ transform=ax.transAxes,
2419
+ ha="left",
2420
+ va="top",
2421
+ fontsize=legend_fontsize,
2422
+ bbox=legend_bbox_dict,
2423
+ )
2424
+ except Exception as e:
2425
+ warnings.warn(f"Could not fit power law in plot_R_Z: {e!s}", UserWarning, stacklevel=2)
2466
2426
  return p
2467
2427
 
2468
2428
 
@@ -2545,35 +2505,38 @@ def plot_R_KDP(
2545
2505
 
2546
2506
  # Fit and plot the power law
2547
2507
  if add_fit:
2548
- # Fit powerlaw R = a * KDP ** b
2549
- (a, b), _ = fit_powerlaw(x=df[kdp], y=df[r], xbins=xbins, x_in_db=False)
2550
- # Invert for KDP = A * R ** B
2551
- A, B = inverse_powerlaw_parameters(a, b)
2552
- # Define legend title
2553
- a_str = _define_coeff_string(a)
2554
- A_str = _define_coeff_string(A)
2555
- legend_str = (
2556
- rf"$R = {a_str} \, K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2557
- "\n"
2558
- rf"$K_{{\mathrm{{DP}}}} = {A_str} \, R^{{{B:.2f}}}$"
2559
- )
2560
- # Get power law predictions
2561
- x_pred = np.arange(*kdp_lim)
2562
- r_pred = predict_from_powerlaw(x_pred, a=a, b=b)
2563
- # Add fitted line
2564
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2565
- # Add legend
2566
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2567
- ax.text(
2568
- 0.05,
2569
- 0.95,
2570
- legend_str,
2571
- transform=ax.transAxes,
2572
- ha="left",
2573
- va="top",
2574
- fontsize=legend_fontsize,
2575
- bbox=legend_bbox_dict,
2576
- )
2508
+ try:
2509
+ # Fit powerlaw R = a * KDP ** b
2510
+ (a, b), _ = fit_powerlaw(x=df[kdp], y=df[r], xbins=xbins, x_in_db=False)
2511
+ # Invert for KDP = A * R ** B
2512
+ A, B = inverse_powerlaw_parameters(a, b)
2513
+ # Define legend title
2514
+ a_str = _define_coeff_string(a)
2515
+ A_str = _define_coeff_string(A)
2516
+ legend_str = (
2517
+ rf"$R = {a_str} \, K_{{\mathrm{{DP}}}}^{{{b:.2f}}}$"
2518
+ "\n"
2519
+ rf"$K_{{\mathrm{{DP}}}} = {A_str} \, R^{{{B:.2f}}}$"
2520
+ )
2521
+ # Get power law predictions
2522
+ x_pred = np.arange(*kdp_lim)
2523
+ r_pred = predict_from_powerlaw(x_pred, a=a, b=b)
2524
+ # Add fitted line
2525
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2526
+ # Add legend
2527
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2528
+ ax.text(
2529
+ 0.05,
2530
+ 0.95,
2531
+ legend_str,
2532
+ transform=ax.transAxes,
2533
+ ha="left",
2534
+ va="top",
2535
+ fontsize=legend_fontsize,
2536
+ bbox=legend_bbox_dict,
2537
+ )
2538
+ except Exception as e:
2539
+ warnings.warn(f"Could not fit power law in plot_R_KDP: {e!s}", UserWarning, stacklevel=2)
2577
2540
  return p
2578
2541
 
2579
2542
 
@@ -2641,41 +2604,44 @@ def plot_ZDR_Z(
2641
2604
 
2642
2605
  # Fit and plot the power law
2643
2606
  if add_fit:
2644
- # Fit powerlaw ZDR = a * Z ** b
2645
- (a, b), _ = fit_powerlaw(
2646
- x=df[z],
2647
- y=df[zdr],
2648
- xbins=np.arange(5, 40, 1),
2649
- x_in_db=True,
2650
- )
2651
- # Invert for Z = A * ZDR ** B
2652
- A, B = inverse_powerlaw_parameters(a, b)
2653
- # Define legend title
2654
- a_str = _define_coeff_string(a)
2655
- A_str = _define_coeff_string(A)
2656
- legend_str = (
2657
- rf"$Z_{{\mathrm{{DR}}}} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2658
- "\n"
2659
- rf"${z_lower_symbol} = {A_str} \, Z_{{\mathrm{{DR}}}}^{{{B:.2f}}}$"
2660
- )
2661
- # Get power law predictions
2662
- x_pred = np.arange(0, 70)
2663
- x_pred_linear = disdrodb.idecibel(x_pred)
2664
- r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2665
- # Add fitted line
2666
- ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2667
- # Add legend
2668
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2669
- ax.text(
2670
- 0.05,
2671
- 0.95,
2672
- legend_str,
2673
- transform=ax.transAxes,
2674
- ha="left",
2675
- va="top",
2676
- fontsize=legend_fontsize,
2677
- bbox=legend_bbox_dict,
2678
- )
2607
+ try:
2608
+ # Fit powerlaw ZDR = a * Z ** b
2609
+ (a, b), _ = fit_powerlaw(
2610
+ x=df[z],
2611
+ y=df[zdr],
2612
+ xbins=np.arange(5, 40, 1),
2613
+ x_in_db=True,
2614
+ )
2615
+ # Invert for Z = A * ZDR ** B
2616
+ A, B = inverse_powerlaw_parameters(a, b)
2617
+ # Define legend title
2618
+ a_str = _define_coeff_string(a)
2619
+ A_str = _define_coeff_string(A)
2620
+ legend_str = (
2621
+ rf"$Z_{{\mathrm{{DR}}}} = {a_str} \, {z_lower_symbol}^{{{b:.2f}}}$"
2622
+ "\n"
2623
+ rf"${z_lower_symbol} = {A_str} \, Z_{{\mathrm{{DR}}}}^{{{B:.2f}}}$"
2624
+ )
2625
+ # Get power law predictions
2626
+ x_pred = np.arange(0, 70)
2627
+ x_pred_linear = disdrodb.idecibel(x_pred)
2628
+ r_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2629
+ # Add fitted line
2630
+ ax.plot(x_pred, r_pred, linestyle="dashed", color="black")
2631
+ # Add legend
2632
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2633
+ ax.text(
2634
+ 0.05,
2635
+ 0.95,
2636
+ legend_str,
2637
+ transform=ax.transAxes,
2638
+ ha="left",
2639
+ va="top",
2640
+ fontsize=legend_fontsize,
2641
+ bbox=legend_bbox_dict,
2642
+ )
2643
+ except Exception as e:
2644
+ warnings.warn(f"Could not fit power law in plot_ZDR_Z: {e!s}", UserWarning, stacklevel=2)
2679
2645
  return p
2680
2646
 
2681
2647
 
@@ -2762,43 +2728,46 @@ def plot_KDP_Z(
2762
2728
 
2763
2729
  # Fit and overlay power law
2764
2730
  if add_fit:
2765
- # Fit: KDP = a * Z^b (Z in dBZ → x_in_db=True)
2766
- (a, b), _ = fit_powerlaw(
2767
- x=df[z],
2768
- y=df[kdp],
2769
- xbins=np.arange(15, 50),
2770
- x_in_db=True,
2771
- )
2772
- # Invert: Z = A * KDP^B
2773
- A, B = inverse_powerlaw_parameters(a, b)
2774
-
2775
- # Define legend title
2776
- a_str = _define_coeff_string(a)
2777
- A_str = _define_coeff_string(A)
2778
- legend_str = (
2779
- rf"$K_{{\mathrm{{DP}}}} = {a_str}\,{z_lower_symbol}^{{{b:.2f}}}$"
2780
- "\n"
2781
- rf"${z_lower_symbol} = {A_str}\,K_{{\mathrm{{DP}}}}^{{{B:.2f}}}$"
2782
- )
2731
+ try:
2732
+ # Fit: KDP = a * Z^b (Z in dBZ → x_in_db=True)
2733
+ (a, b), _ = fit_powerlaw(
2734
+ x=df[z],
2735
+ y=df[kdp],
2736
+ xbins=np.arange(15, 50),
2737
+ x_in_db=True,
2738
+ )
2739
+ # Invert: Z = A * KDP^B
2740
+ A, B = inverse_powerlaw_parameters(a, b)
2741
+
2742
+ # Define legend title
2743
+ a_str = _define_coeff_string(a)
2744
+ A_str = _define_coeff_string(A)
2745
+ legend_str = (
2746
+ rf"$K_{{\mathrm{{DP}}}} = {a_str}\,{z_lower_symbol}^{{{b:.2f}}}$"
2747
+ "\n"
2748
+ rf"${z_lower_symbol} = {A_str}\,K_{{\mathrm{{DP}}}}^{{{B:.2f}}}$"
2749
+ )
2783
2750
 
2784
- # Get power law predictions
2785
- x_pred = np.arange(*z_lim)
2786
- x_pred_linear = disdrodb.idecibel(x_pred)
2787
- y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2788
- # Add fitted power law
2789
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2790
- # Add legend
2791
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2792
- ax.text(
2793
- 0.05,
2794
- 0.95,
2795
- legend_str,
2796
- transform=ax.transAxes,
2797
- ha="left",
2798
- va="top",
2799
- fontsize=legend_fontsize,
2800
- bbox=legend_bbox_dict,
2801
- )
2751
+ # Get power law predictions
2752
+ x_pred = np.arange(*z_lim)
2753
+ x_pred_linear = disdrodb.idecibel(x_pred)
2754
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
2755
+ # Add fitted power law
2756
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
2757
+ # Add legend
2758
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
2759
+ ax.text(
2760
+ 0.05,
2761
+ 0.95,
2762
+ legend_str,
2763
+ transform=ax.transAxes,
2764
+ ha="left",
2765
+ va="top",
2766
+ fontsize=legend_fontsize,
2767
+ bbox=legend_bbox_dict,
2768
+ )
2769
+ except Exception as e:
2770
+ warnings.warn(f"Could not fit power law in plot_KDP_Z: {e!s}", UserWarning, stacklevel=2)
2802
2771
 
2803
2772
  return p
2804
2773
 
@@ -3116,36 +3085,41 @@ def plot_KED_R(
3116
3085
  ax.set_title("KED vs R")
3117
3086
  # Fit and plot a powerlaw
3118
3087
  if add_fit:
3119
- # Fit a power law KED = a * R**b
3120
- (a, b), _ = fit_powerlaw(
3121
- x=df["R"],
3122
- y=df["KED"],
3123
- xbins=r_bins,
3124
- x_in_db=False,
3125
- )
3126
- # Invert for R = A * KED**B
3127
- A, B = inverse_powerlaw_parameters(a, b)
3128
- # Define legend string
3129
- a_str = _define_coeff_string(a)
3130
- A_str = _define_coeff_string(A)
3131
- legend_str = rf"$\mathrm{{KED}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KED}}^{{{B:.2f}}}$"
3132
- # Get power law predictions
3133
- x_pred = np.arange(r_lims[0], r_lims[1])
3134
- y_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3135
- # Add fitted powerlaw
3136
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3137
- # Add legend
3138
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3139
- ax.text(
3140
- 0.05,
3141
- 0.95,
3142
- legend_str,
3143
- transform=ax.transAxes,
3144
- ha="left",
3145
- va="top",
3146
- fontsize=legend_fontsize,
3147
- bbox=legend_bbox_dict,
3148
- )
3088
+ try:
3089
+ # Fit a power law KED = a * R**b
3090
+ (a, b), _ = fit_powerlaw(
3091
+ x=df["R"],
3092
+ y=df["KED"],
3093
+ xbins=r_bins,
3094
+ x_in_db=False,
3095
+ )
3096
+ # Invert for R = A * KED**B
3097
+ A, B = inverse_powerlaw_parameters(a, b)
3098
+ # Define legend string
3099
+ a_str = _define_coeff_string(a)
3100
+ A_str = _define_coeff_string(A)
3101
+ legend_str = (
3102
+ rf"$\mathrm{{KED}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KED}}^{{{B:.2f}}}$"
3103
+ )
3104
+ # Get power law predictions
3105
+ x_pred = np.arange(r_lims[0], r_lims[1])
3106
+ y_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3107
+ # Add fitted powerlaw
3108
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3109
+ # Add legend
3110
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3111
+ ax.text(
3112
+ 0.05,
3113
+ 0.95,
3114
+ legend_str,
3115
+ transform=ax.transAxes,
3116
+ ha="left",
3117
+ va="top",
3118
+ fontsize=legend_fontsize,
3119
+ bbox=legend_bbox_dict,
3120
+ )
3121
+ except Exception as e:
3122
+ warnings.warn(f"Could not fit power law in plot_KED_R: {e!s}", UserWarning, stacklevel=2)
3149
3123
 
3150
3124
  return p
3151
3125
 
@@ -3237,36 +3211,41 @@ def plot_KEF_R(
3237
3211
  # Fit and plot the power law
3238
3212
  # - Alternative fit model: a + I *(1 - b*exp(c*I)) (a is upper limit)
3239
3213
  if add_fit:
3240
- # Fit power law KEF = a * R ** b
3241
- (a, b), _ = fit_powerlaw(
3242
- x=df["R"],
3243
- y=df["KEF"],
3244
- xbins=r_bins,
3245
- x_in_db=False,
3246
- )
3247
- # Invert parameters for R = A * KEF ** B
3248
- A, B = inverse_powerlaw_parameters(a, b)
3249
- # Define legend string
3250
- a_str = _define_coeff_string(a)
3251
- A_str = _define_coeff_string(A)
3252
- legend_str = rf"$\mathrm{{KEF}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KEF}}^{{{B:.2f}}}$"
3253
- # Get power law predictions
3254
- x_pred = np.arange(*r_lims)
3255
- kef_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3256
- # Add fitted powerlaw
3257
- ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3258
- # Add legend
3259
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3260
- ax.text(
3261
- 0.05,
3262
- 0.95,
3263
- legend_str,
3264
- transform=ax.transAxes,
3265
- ha="left",
3266
- va="top",
3267
- fontsize=legend_fontsize,
3268
- bbox=legend_bbox_dict,
3269
- )
3214
+ try:
3215
+ # Fit power law KEF = a * R ** b
3216
+ (a, b), _ = fit_powerlaw(
3217
+ x=df["R"],
3218
+ y=df["KEF"],
3219
+ xbins=r_bins,
3220
+ x_in_db=False,
3221
+ )
3222
+ # Invert parameters for R = A * KEF ** B
3223
+ A, B = inverse_powerlaw_parameters(a, b)
3224
+ # Define legend string
3225
+ a_str = _define_coeff_string(a)
3226
+ A_str = _define_coeff_string(A)
3227
+ legend_str = (
3228
+ rf"$\mathrm{{KEF}} = {a_str}\,R^{{{b:.2f}}}$" "\n" rf"$R = {A_str}\,\mathrm{{KEF}}^{{{B:.2f}}}$"
3229
+ )
3230
+ # Get power law predictions
3231
+ x_pred = np.arange(*r_lims)
3232
+ kef_pred = predict_from_powerlaw(x_pred, a=a, b=b)
3233
+ # Add fitted powerlaw
3234
+ ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3235
+ # Add legend
3236
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3237
+ ax.text(
3238
+ 0.05,
3239
+ 0.95,
3240
+ legend_str,
3241
+ transform=ax.transAxes,
3242
+ ha="left",
3243
+ va="top",
3244
+ fontsize=legend_fontsize,
3245
+ bbox=legend_bbox_dict,
3246
+ )
3247
+ except Exception as e:
3248
+ warnings.warn(f"Could not fit power law in plot_KEF_R: {e!s}", UserWarning, stacklevel=2)
3270
3249
  return p
3271
3250
 
3272
3251
 
@@ -3351,41 +3330,44 @@ def plot_KEF_Z(
3351
3330
 
3352
3331
  # Fit and plot the powerlaw
3353
3332
  if add_fit:
3354
- # Fit power law KEF = a * Z ** b
3355
- (a, b), _ = fit_powerlaw(
3356
- x=df[z],
3357
- y=df["KEF"],
3358
- xbins=z_bins,
3359
- x_in_db=True,
3360
- )
3361
- # Invert parameters for Z = A * KEF ** B
3362
- A, B = inverse_powerlaw_parameters(a, b)
3363
- # Define legend string
3364
- a_str = _define_coeff_string(a)
3365
- A_str = _define_coeff_string(A)
3366
- legend_str = (
3367
- rf"$\mathrm{{KEF}} = {a_str}\;{z_lower_symbol}^{{{b:.2f}}}$"
3368
- "\n"
3369
- rf"${z_lower_symbol} = {A_str}\;\mathrm{{KEF}}^{{{B:.2f}}}$"
3370
- )
3371
- # Get power law predictions
3372
- x_pred = np.arange(*z_lims)
3373
- x_pred_linear = disdrodb.idecibel(x_pred)
3374
- kef_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3375
- # Add fitted powerlaw
3376
- ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3377
- # Add legend
3378
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3379
- ax.text(
3380
- 0.05,
3381
- 0.95,
3382
- legend_str,
3383
- transform=ax.transAxes,
3384
- ha="left",
3385
- va="top",
3386
- fontsize=legend_fontsize,
3387
- bbox=legend_bbox_dict,
3388
- )
3333
+ try:
3334
+ # Fit power law KEF = a * Z ** b
3335
+ (a, b), _ = fit_powerlaw(
3336
+ x=df[z],
3337
+ y=df["KEF"],
3338
+ xbins=z_bins,
3339
+ x_in_db=True,
3340
+ )
3341
+ # Invert parameters for Z = A * KEF ** B
3342
+ A, B = inverse_powerlaw_parameters(a, b)
3343
+ # Define legend string
3344
+ a_str = _define_coeff_string(a)
3345
+ A_str = _define_coeff_string(A)
3346
+ legend_str = (
3347
+ rf"$\mathrm{{KEF}} = {a_str}\;{z_lower_symbol}^{{{b:.2f}}}$"
3348
+ "\n"
3349
+ rf"${z_lower_symbol} = {A_str}\;\mathrm{{KEF}}^{{{B:.2f}}}$"
3350
+ )
3351
+ # Get power law predictions
3352
+ x_pred = np.arange(*z_lims)
3353
+ x_pred_linear = disdrodb.idecibel(x_pred)
3354
+ kef_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3355
+ # Add fitted powerlaw
3356
+ ax.plot(x_pred, kef_pred, linestyle="dashed", color="black")
3357
+ # Add legend
3358
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3359
+ ax.text(
3360
+ 0.05,
3361
+ 0.95,
3362
+ legend_str,
3363
+ transform=ax.transAxes,
3364
+ ha="left",
3365
+ va="top",
3366
+ fontsize=legend_fontsize,
3367
+ bbox=legend_bbox_dict,
3368
+ )
3369
+ except Exception as e:
3370
+ warnings.warn(f"Could not fit power law in plot_KEF_Z: {e!s}", UserWarning, stacklevel=2)
3389
3371
 
3390
3372
  return p
3391
3373
 
@@ -3464,37 +3446,42 @@ def plot_TKE_Z(
3464
3446
 
3465
3447
  # Fit and plot the powerlaw
3466
3448
  if add_fit:
3467
- # Fit power law TKE = a * Z ** b
3468
- (a, b), _ = fit_powerlaw(
3469
- x=df[z],
3470
- y=df["TKE"],
3471
- xbins=z_bins,
3472
- x_in_db=True,
3473
- )
3474
- # Invert parameters for Z = A * KEF ** B
3475
- A, B = inverse_powerlaw_parameters(a, b)
3476
- # Define legend string
3477
- a_str = _define_coeff_string(a)
3478
- A_str = _define_coeff_string(A)
3479
- legend_str = rf"$\mathrm{{TKE}} = {a_str}\;z^{{{b:.2f}}}$" "\n" rf"$z = {A_str}\;\mathrm{{TKE}}^{{{B:.2f}}}$"
3480
- # Get power law predictions
3481
- x_pred = np.arange(*z_lims)
3482
- x_pred_linear = disdrodb.idecibel(x_pred)
3483
- y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3484
- # Add fitted powerlaw
3485
- ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3486
- # Add legend
3487
- legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3488
- ax.text(
3489
- 0.05,
3490
- 0.95,
3491
- legend_str,
3492
- transform=ax.transAxes,
3493
- ha="left",
3494
- va="top",
3495
- fontsize=legend_fontsize,
3496
- bbox=legend_bbox_dict,
3497
- )
3449
+ try:
3450
+ # Fit power law TKE = a * Z ** b
3451
+ (a, b), _ = fit_powerlaw(
3452
+ x=df[z],
3453
+ y=df["TKE"],
3454
+ xbins=z_bins,
3455
+ x_in_db=True,
3456
+ )
3457
+ # Invert parameters for Z = A * KEF ** B
3458
+ A, B = inverse_powerlaw_parameters(a, b)
3459
+ # Define legend string
3460
+ a_str = _define_coeff_string(a)
3461
+ A_str = _define_coeff_string(A)
3462
+ legend_str = (
3463
+ rf"$\mathrm{{TKE}} = {a_str}\;z^{{{b:.2f}}}$" "\n" rf"$z = {A_str}\;\mathrm{{TKE}}^{{{B:.2f}}}$"
3464
+ )
3465
+ # Get power law predictions
3466
+ x_pred = np.arange(*z_lims)
3467
+ x_pred_linear = disdrodb.idecibel(x_pred)
3468
+ y_pred = predict_from_powerlaw(x_pred_linear, a=a, b=b)
3469
+ # Add fitted powerlaw
3470
+ ax.plot(x_pred, y_pred, linestyle="dashed", color="black")
3471
+ # Add legend
3472
+ legend_bbox_dict = {"facecolor": "white", "edgecolor": "black", "alpha": 0.7}
3473
+ ax.text(
3474
+ 0.05,
3475
+ 0.95,
3476
+ legend_str,
3477
+ transform=ax.transAxes,
3478
+ ha="left",
3479
+ va="top",
3480
+ fontsize=legend_fontsize,
3481
+ bbox=legend_bbox_dict,
3482
+ )
3483
+ except Exception as e:
3484
+ warnings.warn(f"Could not fit power law in plot_TKE_Z: {e!s}", UserWarning, stacklevel=2)
3498
3485
 
3499
3486
  return p
3500
3487
 
@@ -3729,14 +3716,16 @@ def plot_kinetic_energy_relationships(df):
3729
3716
  #### Summary routine
3730
3717
 
3731
3718
 
3732
- def define_filename(prefix, extension, data_source, campaign_name, station_name):
3719
+ def define_filename(prefix, extension, data_source, campaign_name, station_name, temporal_resolution):
3733
3720
  """Define filename for summary files."""
3734
3721
  if extension in ["png", "jpeg"]:
3735
- filename = f"Figure.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3736
- if extension in ["csv", "parquet", "pdf", "yaml", "yml"]:
3737
- filename = f"Table.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3722
+ filename = f"Figure.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3723
+ if extension in ["csv", "pdf", "yaml", "yml"]:
3724
+ filename = f"Table.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3738
3725
  if extension in ["nc"]:
3739
- filename = f"Dataset.{prefix}.{data_source}.{campaign_name}.{station_name}.{extension}"
3726
+ filename = f"Dataset.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3727
+ if extension in ["parquet"]:
3728
+ filename = f"Dataframe.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
3740
3729
  return filename
3741
3730
 
3742
3731
 
@@ -3782,7 +3771,7 @@ def prepare_summary_dataset(ds, velocity_method="fall_velocity", source="drop_nu
3782
3771
  return ds
3783
3772
 
3784
3773
 
3785
- def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, station_name):
3774
+ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, station_name, temporal_resolution):
3786
3775
  """Generate station summary using L2E dataset."""
3787
3776
  # Create summary directory if does not exist
3788
3777
  os.makedirs(summary_dir_path, exist_ok=True)
@@ -3800,121 +3789,153 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3800
3789
 
3801
3790
  ####---------------------------------------------------------------------.
3802
3791
  #### Create drop spectrum figures and statistics
3803
- # Compute sum of raw and filtered spectrum over time
3804
- raw_drop_number = ds["raw_drop_number"].sum(dim="time")
3805
- drop_number = ds["drop_number"].sum(dim="time")
3806
-
3807
- # Define theoretical and measured average velocity
3808
- theoretical_average_velocity = ds["fall_velocity"].mean(dim="time")
3809
- measured_average_velocity = get_drop_average_velocity(drop_number)
3810
-
3811
- # Save raw and filtered spectrum over time & theoretical and measured average fall velocity
3812
- ds_stats = xr.Dataset()
3813
- ds_stats["raw_drop_number"] = raw_drop_number
3814
- ds_stats["drop_number"] = raw_drop_number
3815
- ds_stats["theoretical_average_velocity"] = theoretical_average_velocity
3816
- ds_stats["measured_average_velocity"] = measured_average_velocity
3817
- filename = define_filename(
3818
- prefix="SpectrumStats",
3819
- extension="nc",
3820
- data_source=data_source,
3821
- campaign_name=campaign_name,
3822
- station_name=station_name,
3823
- )
3824
- ds_stats.to_netcdf(os.path.join(summary_dir_path, filename))
3792
+ if VELOCITY_DIMENSION in ds.dims:
3793
+ # Compute sum of raw and filtered spectrum over time
3794
+ raw_drop_number = ds["raw_drop_number"].sum(dim="time")
3795
+ drop_number = ds["drop_number"].sum(dim="time")
3796
+
3797
+ # Define theoretical and measured average velocity
3798
+ theoretical_average_velocity = ds["fall_velocity"].mean(dim="time")
3799
+ measured_average_velocity = get_drop_average_velocity(drop_number)
3800
+
3801
+ # Save raw and filtered spectrum over time & theoretical and measured average fall velocity
3802
+ ds_stats = xr.Dataset()
3803
+ ds_stats["raw_drop_number"] = raw_drop_number
3804
+ ds_stats["drop_number"] = raw_drop_number
3805
+ ds_stats["theoretical_average_velocity"] = theoretical_average_velocity
3806
+ if measured_average_velocity is not None:
3807
+ ds_stats["measured_average_velocity"] = measured_average_velocity
3808
+ filename = define_filename(
3809
+ prefix="SpectrumStats",
3810
+ extension="nc",
3811
+ data_source=data_source,
3812
+ campaign_name=campaign_name,
3813
+ station_name=station_name,
3814
+ temporal_resolution=temporal_resolution,
3815
+ )
3816
+ ds_stats.to_netcdf(os.path.join(summary_dir_path, filename))
3825
3817
 
3826
- # Create figures with raw and filtered spectrum
3827
- # - Raw
3828
- filename = define_filename(
3829
- prefix="SpectrumRaw",
3830
- extension="png",
3831
- data_source=data_source,
3832
- campaign_name=campaign_name,
3833
- station_name=station_name,
3834
- )
3835
- p = plot_drop_spectrum(raw_drop_number, title="Raw Drop Spectrum")
3836
- p.figure.savefig(os.path.join(summary_dir_path, filename))
3837
- plt.close()
3818
+ # Create figures with raw and filtered spectrum
3819
+ # - Raw
3820
+ filename = define_filename(
3821
+ prefix="SpectrumRaw",
3822
+ extension="png",
3823
+ data_source=data_source,
3824
+ campaign_name=campaign_name,
3825
+ station_name=station_name,
3826
+ temporal_resolution=temporal_resolution,
3827
+ )
3828
+ p = plot_spectrum(raw_drop_number, title="Raw Drop Spectrum")
3829
+ p.figure.savefig(os.path.join(summary_dir_path, filename))
3830
+ plt.close()
3838
3831
 
3839
- # - Filtered
3840
- filename = define_filename(
3841
- prefix="SpectrumFiltered",
3842
- extension="png",
3843
- data_source=data_source,
3844
- campaign_name=campaign_name,
3845
- station_name=station_name,
3846
- )
3847
- p = plot_drop_spectrum(drop_number, title="Filtered Drop Spectrum")
3848
- p.figure.savefig(os.path.join(summary_dir_path, filename))
3849
- plt.close()
3832
+ # - Filtered
3833
+ filename = define_filename(
3834
+ prefix="SpectrumFiltered",
3835
+ extension="png",
3836
+ data_source=data_source,
3837
+ campaign_name=campaign_name,
3838
+ station_name=station_name,
3839
+ temporal_resolution=temporal_resolution,
3840
+ )
3841
+ p = plot_spectrum(drop_number, title="Filtered Drop Spectrum")
3842
+ p.figure.savefig(os.path.join(summary_dir_path, filename))
3843
+ plt.close()
3850
3844
 
3851
- # Create figure comparing raw and filtered spectrum
3852
- filename = define_filename(
3853
- prefix="SpectrumSummary",
3854
- extension="png",
3855
- data_source=data_source,
3856
- campaign_name=campaign_name,
3857
- station_name=station_name,
3858
- )
3845
+ # Create figure comparing raw and filtered spectrum
3846
+ filename = define_filename(
3847
+ prefix="SpectrumSummary",
3848
+ extension="png",
3849
+ data_source=data_source,
3850
+ campaign_name=campaign_name,
3851
+ station_name=station_name,
3852
+ temporal_resolution=temporal_resolution,
3853
+ )
3859
3854
 
3860
- fig = plot_raw_and_filtered_spectrums(
3861
- raw_drop_number=raw_drop_number,
3862
- drop_number=drop_number,
3863
- theoretical_average_velocity=theoretical_average_velocity,
3864
- measured_average_velocity=measured_average_velocity,
3865
- )
3866
- fig.savefig(os.path.join(summary_dir_path, filename))
3867
- plt.close()
3855
+ fig = plot_raw_and_filtered_spectra(ds)
3856
+ fig.savefig(os.path.join(summary_dir_path, filename))
3857
+ plt.close()
3868
3858
 
3869
3859
  ####---------------------------------------------------------------------.
3870
- #### Create L2E 1MIN dataframe
3860
+ #### Create L2E dataframe
3871
3861
  df = create_l2_dataframe(ds)
3872
3862
 
3873
3863
  # Define diameter bin edges
3874
3864
  diameter_bin_edges = get_diameter_bin_edges(ds)
3875
3865
 
3876
3866
  # ---------------------------------------------------------------------.
3877
- #### Save L2E 1MIN Parquet
3878
- l2e_parquet_filename = f"L2E.1MIN.PARQUET.{data_source}.{campaign_name}.{station_name}.parquet"
3867
+ #### Save L2E Parquet
3868
+ l2e_parquet_filename = define_filename(
3869
+ prefix="L2E",
3870
+ extension="parquet",
3871
+ data_source=data_source,
3872
+ campaign_name=campaign_name,
3873
+ station_name=station_name,
3874
+ temporal_resolution=temporal_resolution,
3875
+ )
3879
3876
  l2e_parquet_filepath = os.path.join(summary_dir_path, l2e_parquet_filename)
3880
3877
  df.to_parquet(l2e_parquet_filepath, engine="pyarrow", compression="snappy")
3881
3878
 
3882
3879
  #### ---------------------------------------------------------------------.
3883
3880
  #### Create table with rain summary
3884
- table_rain_summary = create_table_rain_summary(df)
3885
- table_rain_summary_filename = f"Station_Summary.{data_source}.{campaign_name}.{station_name}.yaml"
3886
- table_rain_summary_filepath = os.path.join(summary_dir_path, table_rain_summary_filename)
3887
- write_yaml(table_rain_summary, filepath=table_rain_summary_filepath)
3881
+ if not temporal_resolution.startswith("ROLL"):
3882
+ table_rain_summary = create_table_rain_summary(df, temporal_resolution=temporal_resolution)
3883
+ table_rain_summary_filename = define_filename(
3884
+ prefix="Station_Summary",
3885
+ extension="yaml",
3886
+ data_source=data_source,
3887
+ campaign_name=campaign_name,
3888
+ station_name=station_name,
3889
+ temporal_resolution=temporal_resolution,
3890
+ )
3891
+ table_rain_summary_filepath = os.path.join(summary_dir_path, table_rain_summary_filename)
3892
+ write_yaml(table_rain_summary, filepath=table_rain_summary_filepath)
3888
3893
 
3889
3894
  # ---------------------------------------------------------------------.
3890
- #### Creata table with events summary
3891
- table_events_summary = create_table_events_summary(df)
3892
- # - Save table as csv
3893
- table_events_summary_csv_filename = f"Events_Summary.{data_source}.{campaign_name}.{station_name}.csv"
3894
- table_events_summary_csv_filepath = os.path.join(summary_dir_path, table_events_summary_csv_filename)
3895
- table_events_summary.to_csv(table_events_summary_csv_filepath)
3896
- # - Save table as pdf
3897
- if is_latex_engine_available():
3898
- table_events_summary_pdf_filename = f"Events_Summary.{data_source}.{campaign_name}.{station_name}.pdf"
3899
- table_events_summary_pdf_filepath = os.path.join(summary_dir_path, table_events_summary_pdf_filename)
3900
- save_table_to_pdf(
3901
- df=prepare_latex_table_events_summary(table_events_summary),
3902
- filepath=table_events_summary_pdf_filepath,
3903
- index=True,
3904
- caption="Events Summary",
3905
- orientation="landscape",
3906
- )
3895
+ #### Create table with events summary
3896
+ if not temporal_resolution.startswith("ROLL"):
3897
+ table_events_summary = create_table_events_summary(df, temporal_resolution=temporal_resolution)
3898
+ if len(table_events_summary) > 0:
3899
+ # - Save table as csv
3900
+ table_events_summary_csv_filename = define_filename(
3901
+ prefix="Events_Summary",
3902
+ extension="csv",
3903
+ data_source=data_source,
3904
+ campaign_name=campaign_name,
3905
+ station_name=station_name,
3906
+ temporal_resolution=temporal_resolution,
3907
+ )
3908
+ table_events_summary_csv_filepath = os.path.join(summary_dir_path, table_events_summary_csv_filename)
3909
+ table_events_summary.to_csv(table_events_summary_csv_filepath)
3910
+ # - Save table as pdf
3911
+ if is_latex_engine_available():
3912
+ table_events_summary_pdf_filename = table_events_summary_csv_filename.replace(".csv", ".pdf")
3913
+ table_events_summary_pdf_filepath = os.path.join(summary_dir_path, table_events_summary_pdf_filename)
3914
+ save_table_to_pdf(
3915
+ df=prepare_latex_table_events_summary(table_events_summary),
3916
+ filepath=table_events_summary_pdf_filepath,
3917
+ index=True,
3918
+ caption="Events Summary",
3919
+ orientation="landscape",
3920
+ )
3907
3921
 
3908
3922
  # ---------------------------------------------------------------------.
3909
3923
  #### Create table with integral DSD parameters statistics
3910
3924
  table_dsd_summary = create_table_dsd_summary(df)
3911
3925
  # - Save table as csv
3912
- table_dsd_summary_csv_filename = f"DSD_Summary.{data_source}.{campaign_name}.{station_name}.csv"
3926
+ table_dsd_summary_csv_filename = define_filename(
3927
+ prefix="DSD_Summary",
3928
+ extension="csv",
3929
+ data_source=data_source,
3930
+ campaign_name=campaign_name,
3931
+ station_name=station_name,
3932
+ temporal_resolution=temporal_resolution,
3933
+ )
3913
3934
  table_dsd_summary_csv_filepath = os.path.join(summary_dir_path, table_dsd_summary_csv_filename)
3914
3935
  table_dsd_summary.to_csv(table_dsd_summary_csv_filepath)
3915
3936
  # - Save table as pdf
3916
3937
  if is_latex_engine_available():
3917
- table_dsd_summary_pdf_filename = f"DSD_Summary.{data_source}.{campaign_name}.{station_name}.pdf"
3938
+ table_dsd_summary_pdf_filename = table_dsd_summary_csv_filename.replace(".csv", ".pdf")
3918
3939
  table_dsd_summary_pdf_filepath = os.path.join(summary_dir_path, table_dsd_summary_pdf_filename)
3919
3940
  save_table_to_pdf(
3920
3941
  df=prepare_latex_table_dsd_summary(table_dsd_summary),
@@ -3926,186 +3947,202 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
3926
3947
 
3927
3948
  #### ---------------------------------------------------------------------.
3928
3949
  #### Create L2E RADAR Summary Plots
3929
- # Summary plots at X, C, S bands
3930
- if "DBZH_X" in df:
3950
+ if len(df) > 1000:
3951
+ # Summary plots at X, C, S bands
3952
+ if "DBZH_X" in df:
3953
+ filename = define_filename(
3954
+ prefix="Radar_Band_X",
3955
+ extension="png",
3956
+ data_source=data_source,
3957
+ campaign_name=campaign_name,
3958
+ station_name=station_name,
3959
+ temporal_resolution=temporal_resolution,
3960
+ )
3961
+ fig = plot_radar_relationships(df, band="X")
3962
+ fig.savefig(os.path.join(summary_dir_path, filename))
3963
+ if "DBZH_C" in df:
3964
+ filename = define_filename(
3965
+ prefix="Radar_Band_C",
3966
+ extension="png",
3967
+ data_source=data_source,
3968
+ campaign_name=campaign_name,
3969
+ station_name=station_name,
3970
+ temporal_resolution=temporal_resolution,
3971
+ )
3972
+ fig = plot_radar_relationships(df, band="C")
3973
+ fig.savefig(os.path.join(summary_dir_path, filename))
3974
+ if "DBZH_S" in df:
3975
+ filename = define_filename(
3976
+ prefix="Radar_Band_S",
3977
+ extension="png",
3978
+ data_source=data_source,
3979
+ campaign_name=campaign_name,
3980
+ station_name=station_name,
3981
+ temporal_resolution=temporal_resolution,
3982
+ )
3983
+ fig = plot_radar_relationships(df, band="S")
3984
+ fig.savefig(os.path.join(summary_dir_path, filename))
3985
+
3986
+ # ---------------------------------------------------------------------.
3987
+ #### Create L2E Z-R figure
3931
3988
  filename = define_filename(
3932
- prefix="Radar_Band_X",
3989
+ prefix="Z-R",
3933
3990
  extension="png",
3934
3991
  data_source=data_source,
3935
3992
  campaign_name=campaign_name,
3936
3993
  station_name=station_name,
3994
+ temporal_resolution=temporal_resolution,
3937
3995
  )
3938
- fig = plot_radar_relationships(df, band="X")
3939
- fig.savefig(os.path.join(summary_dir_path, filename))
3940
- if "DBZH_C" in df:
3996
+
3997
+ p = plot_R_Z(df, z="Z", r="R", title=r"$Z$ vs $R$")
3998
+ p.figure.savefig(os.path.join(summary_dir_path, filename))
3999
+ plt.close()
4000
+
4001
+ #### ---------------------------------------------------------------------.
4002
+ #### Create L2E Kinetic Energy Summary Plots
3941
4003
  filename = define_filename(
3942
- prefix="Radar_Band_C",
4004
+ prefix="KineticEnergy",
3943
4005
  extension="png",
3944
4006
  data_source=data_source,
3945
4007
  campaign_name=campaign_name,
3946
4008
  station_name=station_name,
4009
+ temporal_resolution=temporal_resolution,
3947
4010
  )
3948
- fig = plot_radar_relationships(df, band="C")
4011
+ fig = plot_kinetic_energy_relationships(df)
3949
4012
  fig.savefig(os.path.join(summary_dir_path, filename))
3950
- if "DBZH_S" in df:
4013
+
4014
+ #### ---------------------------------------------------------------------.
4015
+ #### Create L2E DSD Parameters summary plots
4016
+ #### - Create DSD parameters density figures with LWC
3951
4017
  filename = define_filename(
3952
- prefix="Radar_Band_S",
4018
+ prefix="DSD_Params_Density_with_LWC_LinearDm_MaxNormalized",
3953
4019
  extension="png",
3954
4020
  data_source=data_source,
3955
4021
  campaign_name=campaign_name,
3956
4022
  station_name=station_name,
4023
+ temporal_resolution=temporal_resolution,
3957
4024
  )
3958
- fig = plot_radar_relationships(df, band="S")
4025
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=False)
3959
4026
  fig.savefig(os.path.join(summary_dir_path, filename))
4027
+ plt.close()
3960
4028
 
3961
- # ---------------------------------------------------------------------.
3962
- #### - Create Z-R figure
3963
- filename = define_filename(
3964
- prefix="Z-R",
3965
- extension="png",
3966
- data_source=data_source,
3967
- campaign_name=campaign_name,
3968
- station_name=station_name,
3969
- )
3970
-
3971
- p = plot_R_Z(df, z="Z", r="R", title=r"$Z$ vs $R$")
3972
- p.figure.savefig(os.path.join(summary_dir_path, filename))
3973
- plt.close()
3974
-
3975
- #### ---------------------------------------------------------------------.
3976
- #### Create L2E Kinetic Energy Summary Plots
3977
- filename = define_filename(
3978
- prefix="KineticEnergy",
3979
- extension="png",
3980
- data_source=data_source,
3981
- campaign_name=campaign_name,
3982
- station_name=station_name,
3983
- )
3984
- fig = plot_kinetic_energy_relationships(df)
3985
- fig.savefig(os.path.join(summary_dir_path, filename))
3986
-
3987
- #### ---------------------------------------------------------------------.
3988
- #### Create L2E DSD Parameters summary plots
3989
- #### - Create DSD parameters density figures with LWC
3990
- filename = define_filename(
3991
- prefix="DSD_Params_Density_with_LWC_LinearDm_MaxNormalized",
3992
- extension="png",
3993
- data_source=data_source,
3994
- campaign_name=campaign_name,
3995
- station_name=station_name,
3996
- )
3997
- fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=False)
3998
- fig.savefig(os.path.join(summary_dir_path, filename))
3999
- plt.close()
4000
-
4001
- filename = define_filename(
4002
- prefix="DSD_Params_Density_with_LWC_LogDm_MaxNormalized",
4003
- extension="png",
4004
- data_source=data_source,
4005
- campaign_name=campaign_name,
4006
- station_name=station_name,
4007
- )
4008
- fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=False)
4009
- fig.savefig(os.path.join(summary_dir_path, filename))
4010
- plt.close()
4029
+ filename = define_filename(
4030
+ prefix="DSD_Params_Density_with_LWC_LogDm_MaxNormalized",
4031
+ extension="png",
4032
+ data_source=data_source,
4033
+ campaign_name=campaign_name,
4034
+ station_name=station_name,
4035
+ temporal_resolution=temporal_resolution,
4036
+ )
4037
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=False)
4038
+ fig.savefig(os.path.join(summary_dir_path, filename))
4039
+ plt.close()
4011
4040
 
4012
- filename = define_filename(
4013
- prefix="DSD_Params_Density_with_LWC_LinearDm_LogNormalized",
4014
- extension="png",
4015
- data_source=data_source,
4016
- campaign_name=campaign_name,
4017
- station_name=station_name,
4018
- )
4019
- fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=True)
4020
- fig.savefig(os.path.join(summary_dir_path, filename))
4021
- plt.close()
4041
+ filename = define_filename(
4042
+ prefix="DSD_Params_Density_with_LWC_LinearDm_LogNormalized",
4043
+ extension="png",
4044
+ data_source=data_source,
4045
+ campaign_name=campaign_name,
4046
+ station_name=station_name,
4047
+ temporal_resolution=temporal_resolution,
4048
+ )
4049
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=True, log_normalize=True)
4050
+ fig.savefig(os.path.join(summary_dir_path, filename))
4051
+ plt.close()
4022
4052
 
4023
- filename = define_filename(
4024
- prefix="DSD_Params_Density_with_LWC_LogDm_LogNormalized",
4025
- extension="png",
4026
- data_source=data_source,
4027
- campaign_name=campaign_name,
4028
- station_name=station_name,
4029
- )
4030
- fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=True)
4031
- fig.savefig(os.path.join(summary_dir_path, filename))
4032
- plt.close()
4053
+ filename = define_filename(
4054
+ prefix="DSD_Params_Density_with_LWC_LogDm_LogNormalized",
4055
+ extension="png",
4056
+ data_source=data_source,
4057
+ campaign_name=campaign_name,
4058
+ station_name=station_name,
4059
+ temporal_resolution=temporal_resolution,
4060
+ )
4061
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=True, log_normalize=True)
4062
+ fig.savefig(os.path.join(summary_dir_path, filename))
4063
+ plt.close()
4033
4064
 
4034
- ###------------------------------------------------------------------------.
4035
- #### - Create DSD parameters density figures with R
4036
- filename = define_filename(
4037
- prefix="DSD_Params_Density_with_R_LinearDm_MaxNormalized",
4038
- extension="png",
4039
- data_source=data_source,
4040
- campaign_name=campaign_name,
4041
- station_name=station_name,
4042
- )
4043
- fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=False)
4044
- fig.savefig(os.path.join(summary_dir_path, filename))
4045
- plt.close()
4065
+ ###------------------------------------------------------------------------.
4066
+ #### - Create DSD parameters density figures with R
4067
+ filename = define_filename(
4068
+ prefix="DSD_Params_Density_with_R_LinearDm_MaxNormalized",
4069
+ extension="png",
4070
+ data_source=data_source,
4071
+ campaign_name=campaign_name,
4072
+ station_name=station_name,
4073
+ temporal_resolution=temporal_resolution,
4074
+ )
4075
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=False)
4076
+ fig.savefig(os.path.join(summary_dir_path, filename))
4077
+ plt.close()
4046
4078
 
4047
- filename = define_filename(
4048
- prefix="DSD_Params_Density_with_R_LogDm_MaxNormalized",
4049
- extension="png",
4050
- data_source=data_source,
4051
- campaign_name=campaign_name,
4052
- station_name=station_name,
4053
- )
4054
- fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=False)
4055
- fig.savefig(os.path.join(summary_dir_path, filename))
4056
- plt.close()
4079
+ filename = define_filename(
4080
+ prefix="DSD_Params_Density_with_R_LogDm_MaxNormalized",
4081
+ extension="png",
4082
+ data_source=data_source,
4083
+ campaign_name=campaign_name,
4084
+ station_name=station_name,
4085
+ temporal_resolution=temporal_resolution,
4086
+ )
4087
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=False)
4088
+ fig.savefig(os.path.join(summary_dir_path, filename))
4089
+ plt.close()
4057
4090
 
4058
- filename = define_filename(
4059
- prefix="DSD_Params_Density_with_R_LinearDm_LogNormalized",
4060
- extension="png",
4061
- data_source=data_source,
4062
- campaign_name=campaign_name,
4063
- station_name=station_name,
4064
- )
4065
- fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=True)
4066
- fig.savefig(os.path.join(summary_dir_path, filename))
4067
- plt.close()
4091
+ filename = define_filename(
4092
+ prefix="DSD_Params_Density_with_R_LinearDm_LogNormalized",
4093
+ extension="png",
4094
+ data_source=data_source,
4095
+ campaign_name=campaign_name,
4096
+ station_name=station_name,
4097
+ temporal_resolution=temporal_resolution,
4098
+ )
4099
+ fig = plot_dsd_params_density(df, log_dm=False, lwc=False, log_normalize=True)
4100
+ fig.savefig(os.path.join(summary_dir_path, filename))
4101
+ plt.close()
4068
4102
 
4069
- filename = define_filename(
4070
- prefix="DSD_Params_Density_with_R_LogDm_LogNormalized",
4071
- extension="png",
4072
- data_source=data_source,
4073
- campaign_name=campaign_name,
4074
- station_name=station_name,
4075
- )
4076
- fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=True)
4077
- fig.savefig(os.path.join(summary_dir_path, filename))
4078
- plt.close()
4103
+ filename = define_filename(
4104
+ prefix="DSD_Params_Density_with_R_LogDm_LogNormalized",
4105
+ extension="png",
4106
+ data_source=data_source,
4107
+ campaign_name=campaign_name,
4108
+ station_name=station_name,
4109
+ temporal_resolution=temporal_resolution,
4110
+ )
4111
+ fig = plot_dsd_params_density(df, log_dm=True, lwc=False, log_normalize=True)
4112
+ fig.savefig(os.path.join(summary_dir_path, filename))
4113
+ plt.close()
4079
4114
 
4080
- ###------------------------------------------------------------------------.
4081
- #### - Create DSD parameters relationship figures
4082
- filename = define_filename(
4083
- prefix="DSD_Params_Relations",
4084
- extension="png",
4085
- data_source=data_source,
4086
- campaign_name=campaign_name,
4087
- station_name=station_name,
4088
- )
4089
- fig = plot_dsd_params_relationships(df, add_nt=True)
4090
- fig.savefig(os.path.join(summary_dir_path, filename))
4091
- plt.close()
4115
+ ###------------------------------------------------------------------------.
4116
+ #### - Create DSD parameters relationship figures
4117
+ filename = define_filename(
4118
+ prefix="DSD_Params_Relations",
4119
+ extension="png",
4120
+ data_source=data_source,
4121
+ campaign_name=campaign_name,
4122
+ station_name=station_name,
4123
+ temporal_resolution=temporal_resolution,
4124
+ )
4125
+ fig = plot_dsd_params_relationships(df, add_nt=True)
4126
+ fig.savefig(os.path.join(summary_dir_path, filename))
4127
+ plt.close()
4092
4128
 
4093
- ###------------------------------------------------------------------------.
4094
- #### - Create Dmax relationship figures
4095
- filename = define_filename(
4096
- prefix="DSD_Dmax_Relations",
4097
- extension="png",
4098
- data_source=data_source,
4099
- campaign_name=campaign_name,
4100
- station_name=station_name,
4101
- )
4102
- fig = plot_dmax_relationships(df, diameter_bin_edges=diameter_bin_edges, dmax="Dmax", diameter_max=10)
4103
- fig.savefig(os.path.join(summary_dir_path, filename))
4104
- plt.close()
4129
+ ###------------------------------------------------------------------------.
4130
+ #### - Create Dmax relationship figures
4131
+ filename = define_filename(
4132
+ prefix="DSD_Dmax_Relations",
4133
+ extension="png",
4134
+ data_source=data_source,
4135
+ campaign_name=campaign_name,
4136
+ station_name=station_name,
4137
+ temporal_resolution=temporal_resolution,
4138
+ )
4139
+ fig = plot_dmax_relationships(df, diameter_bin_edges=diameter_bin_edges, dmax="Dmax", diameter_max=10)
4140
+ fig.savefig(os.path.join(summary_dir_path, filename))
4141
+ plt.close()
4105
4142
 
4106
- #### ---------------------------------------------------------------------.
4107
- #### Create L2E QC summary plots
4108
- # TODO:
4143
+ #### ---------------------------------------------------------------------.
4144
+ #### Create L2E QC summary plots
4145
+ # TODO:
4109
4146
 
4110
4147
  ####------------------------------------------------------------------------.
4111
4148
  #### Free space - Remove df from memory
@@ -4123,6 +4160,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4123
4160
  data_source=data_source,
4124
4161
  campaign_name=campaign_name,
4125
4162
  station_name=station_name,
4163
+ temporal_resolution=temporal_resolution,
4126
4164
  )
4127
4165
  p = plot_dsd_density(df_nd, diameter_bin_edges=diameter_bin_edges)
4128
4166
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4135,6 +4173,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4135
4173
  data_source=data_source,
4136
4174
  campaign_name=campaign_name,
4137
4175
  station_name=station_name,
4176
+ temporal_resolution=temporal_resolution,
4138
4177
  )
4139
4178
  p = plot_normalized_dsd_density(df_nd)
4140
4179
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4158,6 +4197,7 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
4158
4197
  data_source=data_source,
4159
4198
  campaign_name=campaign_name,
4160
4199
  station_name=station_name,
4200
+ temporal_resolution=temporal_resolution,
4161
4201
  )
4162
4202
  p = plot_dsd_with_dense_lines(drop_number_concentration=drop_number_concentration, r=r)
4163
4203
  p.figure.savefig(os.path.join(summary_dir_path, filename))
@@ -4191,22 +4231,22 @@ def create_station_summary(
4191
4231
  )
4192
4232
  os.makedirs(summary_dir_path, exist_ok=True)
4193
4233
 
4194
- # Define product_kwargs
4195
- sample_interval, rolling = get_sampling_information(temporal_resolution)
4196
- product_kwargs = {"rolling": rolling, "sample_interval": sample_interval}
4197
-
4198
- # Load L2E 1MIN dataset
4199
- ds = disdrodb.open_dataset(
4200
- data_archive_dir=data_archive_dir,
4201
- data_source=data_source,
4202
- campaign_name=campaign_name,
4203
- station_name=station_name,
4204
- product="L2E",
4205
- product_kwargs=product_kwargs,
4206
- parallel=parallel,
4207
- chunks=-1,
4208
- compute=True,
4209
- )
4234
+ # Load L2E dataset
4235
+ try:
4236
+ ds = disdrodb.open_dataset(
4237
+ data_archive_dir=data_archive_dir,
4238
+ data_source=data_source,
4239
+ campaign_name=campaign_name,
4240
+ station_name=station_name,
4241
+ product="L2E",
4242
+ temporal_resolution=temporal_resolution,
4243
+ parallel=parallel,
4244
+ chunks=-1,
4245
+ compute=True,
4246
+ )
4247
+ except Exception as e:
4248
+ print("Impossible to create the station summary." + str(e))
4249
+ return
4210
4250
 
4211
4251
  # Generate station summary figures and table
4212
4252
  generate_station_summary(
@@ -4215,6 +4255,7 @@ def create_station_summary(
4215
4255
  data_source=data_source,
4216
4256
  campaign_name=campaign_name,
4217
4257
  station_name=station_name,
4258
+ temporal_resolution=temporal_resolution,
4218
4259
  )
4219
4260
 
4220
4261
  print(f"Creation of station summary for {data_source} {campaign_name} {station_name} has terminated.")