disdrodb 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. disdrodb/__init__.py +64 -34
  2. disdrodb/_config.py +5 -4
  3. disdrodb/_version.py +16 -3
  4. disdrodb/accessor/__init__.py +20 -0
  5. disdrodb/accessor/methods.py +125 -0
  6. disdrodb/api/checks.py +139 -9
  7. disdrodb/api/configs.py +4 -2
  8. disdrodb/api/info.py +10 -10
  9. disdrodb/api/io.py +237 -18
  10. disdrodb/api/path.py +81 -75
  11. disdrodb/api/search.py +6 -6
  12. disdrodb/cli/disdrodb_create_summary_station.py +91 -0
  13. disdrodb/cli/disdrodb_run_l0.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0_station.py +1 -1
  15. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  16. disdrodb/cli/disdrodb_run_l0b_station.py +1 -1
  17. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  18. disdrodb/cli/disdrodb_run_l0c_station.py +1 -1
  19. disdrodb/cli/disdrodb_run_l2e_station.py +1 -1
  20. disdrodb/configs.py +149 -4
  21. disdrodb/constants.py +61 -0
  22. disdrodb/data_transfer/download_data.py +145 -14
  23. disdrodb/etc/configs/attributes.yaml +339 -0
  24. disdrodb/etc/configs/encodings.yaml +473 -0
  25. disdrodb/etc/products/L1/global.yaml +13 -0
  26. disdrodb/etc/products/L2E/10MIN.yaml +12 -0
  27. disdrodb/etc/products/L2E/1MIN.yaml +1 -0
  28. disdrodb/etc/products/L2E/global.yaml +22 -0
  29. disdrodb/etc/products/L2M/10MIN.yaml +12 -0
  30. disdrodb/etc/products/L2M/GAMMA_ML.yaml +8 -0
  31. disdrodb/etc/products/L2M/NGAMMA_GS_LOG_ND_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/NGAMMA_GS_ND_MAE.yaml +6 -0
  33. disdrodb/etc/products/L2M/NGAMMA_GS_Z_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/global.yaml +26 -0
  35. disdrodb/l0/__init__.py +13 -0
  36. disdrodb/l0/configs/LPM/bins_diameter.yml +3 -3
  37. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +4 -4
  38. disdrodb/l0/configs/PARSIVEL/l0b_cf_attrs.yml +1 -1
  39. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +3 -3
  40. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +1 -1
  41. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +4 -0
  42. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +20 -4
  43. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +44 -3
  44. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +41 -1
  45. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +4 -4
  46. disdrodb/l0/configs/PWS100/raw_data_format.yml +1 -1
  47. disdrodb/l0/l0a_processing.py +30 -30
  48. disdrodb/l0/l0b_nc_processing.py +108 -2
  49. disdrodb/l0/l0b_processing.py +4 -4
  50. disdrodb/l0/l0c_processing.py +5 -13
  51. disdrodb/l0/manuals/SWS250.pdf +0 -0
  52. disdrodb/l0/manuals/VPF730.pdf +0 -0
  53. disdrodb/l0/manuals/VPF750.pdf +0 -0
  54. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_LPM_NC.py +66 -0
  55. disdrodb/l0/readers/LPM/SLOVENIA/{CRNI_VRH.py → UL.py} +3 -0
  56. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +195 -0
  57. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +105 -0
  58. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +128 -0
  59. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +1 -1
  60. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +1 -1
  61. disdrodb/l0/readers/PARSIVEL2/BELGIUM/ILVO.py +168 -0
  62. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +165 -0
  63. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +69 -0
  64. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +255 -134
  65. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +525 -0
  66. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +1 -1
  67. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +9 -7
  68. disdrodb/l0/readers/{PARSIVEL → PARSIVEL2}/KIT/BURKINA_FASO.py +1 -1
  69. disdrodb/l0/readers/PARSIVEL2/KIT/TEAMX.py +123 -0
  70. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +120 -0
  71. disdrodb/l0/readers/PARSIVEL2/{NETHERLANDS/DELFT.py → NCAR/FARM_PARSIVEL2.py} +43 -70
  72. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +1 -1
  73. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +126 -0
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_PIPS.py +165 -0
  75. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +1 -1
  76. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +29 -12
  77. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +69 -0
  78. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +144 -0
  79. disdrodb/l0/readers/PARSIVEL2/SPAIN/CR1000DL.py +201 -0
  80. disdrodb/l0/readers/PARSIVEL2/SPAIN/LIAISE.py +137 -0
  81. disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +146 -0
  82. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +105 -99
  83. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100_SIRTA.py +151 -0
  84. disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +31 -14
  85. disdrodb/l0/routines.py +105 -14
  86. disdrodb/l1/__init__.py +5 -0
  87. disdrodb/l1/filters.py +34 -20
  88. disdrodb/l1/processing.py +45 -44
  89. disdrodb/l1/resampling.py +77 -66
  90. disdrodb/l1/routines.py +35 -42
  91. disdrodb/l1_env/routines.py +18 -3
  92. disdrodb/l2/__init__.py +7 -0
  93. disdrodb/l2/empirical_dsd.py +58 -10
  94. disdrodb/l2/event.py +27 -120
  95. disdrodb/l2/processing.py +267 -116
  96. disdrodb/l2/routines.py +618 -254
  97. disdrodb/metadata/standards.py +3 -1
  98. disdrodb/psd/fitting.py +463 -144
  99. disdrodb/psd/models.py +8 -5
  100. disdrodb/routines.py +3 -3
  101. disdrodb/scattering/__init__.py +16 -4
  102. disdrodb/scattering/axis_ratio.py +56 -36
  103. disdrodb/scattering/permittivity.py +486 -0
  104. disdrodb/scattering/routines.py +701 -159
  105. disdrodb/summary/__init__.py +17 -0
  106. disdrodb/summary/routines.py +4120 -0
  107. disdrodb/utils/attrs.py +68 -125
  108. disdrodb/utils/compression.py +30 -1
  109. disdrodb/utils/dask.py +59 -8
  110. disdrodb/utils/dataframe.py +63 -9
  111. disdrodb/utils/directories.py +49 -17
  112. disdrodb/utils/encoding.py +33 -19
  113. disdrodb/utils/logger.py +13 -6
  114. disdrodb/utils/manipulations.py +71 -0
  115. disdrodb/utils/subsetting.py +214 -0
  116. disdrodb/utils/time.py +165 -19
  117. disdrodb/utils/writer.py +20 -7
  118. disdrodb/utils/xarray.py +85 -4
  119. disdrodb/viz/__init__.py +13 -0
  120. disdrodb/viz/plots.py +327 -0
  121. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/METADATA +3 -2
  122. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/RECORD +127 -87
  123. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/entry_points.txt +1 -0
  124. disdrodb/l1/encoding_attrs.py +0 -635
  125. disdrodb/l2/processing_options.py +0 -213
  126. /disdrodb/l0/readers/PARSIVEL/SLOVENIA/{UL_FGG.py → UL.py} +0 -0
  127. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/WHEEL +0 -0
  128. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/licenses/LICENSE +0 -0
  129. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/top_level.txt +0 -0
disdrodb/viz/plots.py CHANGED
@@ -15,3 +15,330 @@
15
15
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
16
  # -----------------------------------------------------------------------------.
17
17
  """DISDRODB Plotting Tools."""
18
+ import matplotlib.pyplot as plt
19
+ import numpy as np
20
+ import xarray as xr
21
+ from matplotlib.colors import LogNorm, Normalize
22
+
23
+
24
+ def plot_nd(ds, var="drop_number_concentration", cmap=None, norm=None):
25
+ """Plot drop number concentration N(D) timeseries."""
26
+ # Check inputs
27
+ if var not in ds:
28
+ raise ValueError(f"{var} is not a xarray Dataset variable!")
29
+ # Check only time and diameter dimensions are specified
30
+ # TODO: DIAMETER_DIMENSION, "time"
31
+
32
+ # Select N(D)
33
+ ds_var = ds[[var]].compute()
34
+
35
+ # Regularize input
36
+ ds_var = ds_var.disdrodb.regularize()
37
+
38
+ # Set 0 values to np.nan
39
+ ds_var = ds_var.where(ds_var[var] > 0)
40
+
41
+ # Define cmap an norm
42
+ if cmap is None:
43
+ cmap = plt.get_cmap("Spectral_r").copy()
44
+
45
+ vmin = ds_var[var].min().item()
46
+ norm = LogNorm(vmin, None) if norm is None else norm
47
+
48
+ # Plot N(D)
49
+ p = ds_var[var].plot.pcolormesh(x="time", norm=norm, cmap=cmap)
50
+ p.axes.set_title("Drop number concentration (N(D))")
51
+ p.axes.set_ylabel("Drop diameter (mm)")
52
+ return p
53
+
54
+
55
+ def normalize_array(arr, method="max"):
56
+ """Normalize a NumPy array according to the chosen method.
57
+
58
+ Parameters
59
+ ----------
60
+ arr : np.ndarray
61
+ Input array.
62
+ method : str
63
+ Normalization method. Options:
64
+ - 'max' : Divide by the maximum value.
65
+ - 'minmax': Scale to [0, 1] range.
66
+ - 'zscore': Standardize to mean 0, std 1.
67
+ - 'log' : Apply log10 transform (shifted if min <= 0).
68
+ - 'none' : No normalization (return original array).
69
+
70
+ Returns
71
+ -------
72
+ np.ndarray
73
+ Normalized array.
74
+ """
75
+ arr = np.asarray(arr, dtype=float)
76
+
77
+ if method == "max":
78
+ max_val = np.nanmax(arr)
79
+ return arr / max_val if max_val != 0 else arr
80
+
81
+ if method == "minmax":
82
+ min_val = np.nanmin(arr)
83
+ max_val = np.nanmax(arr)
84
+ return (arr - min_val) / (max_val - min_val) if max_val != min_val else np.zeros_like(arr)
85
+
86
+ if method == "zscore":
87
+ mean_val = np.nanmean(arr)
88
+ std_val = np.nanstd(arr)
89
+ return (arr - mean_val) / std_val if std_val != 0 else np.zeros_like(arr)
90
+
91
+ if method == "log":
92
+ min_val = np.nanmin(arr)
93
+ shifted = arr - min_val + 1e-12 # Shift to avoid log(0) or log of negative
94
+ return np.log10(shifted)
95
+
96
+ if method == "none":
97
+ return arr
98
+
99
+ raise ValueError(f"Unknown normalization method: {method}")
100
+
101
+
102
+ def _np_to_rgba_alpha(arr, cmap="viridis", cmap_norm=None, scaling="linear"):
103
+ """Convert a numpy array to an RGBA array with alpha based on array value.
104
+
105
+ Parameters
106
+ ----------
107
+ arr : numpy.ndarray
108
+ arr of counts or frequencies.
109
+ cmap : str or Colormap, optional
110
+ Matplotlib colormap to use for RGB channels.
111
+ cmap_norm: matplotlib.colors.Norm
112
+ Norm to be used to scale data before assigning cmap colors.
113
+ The default is Normalize(vmin, vmax).
114
+ scaling : str, optional
115
+ Scaling type for alpha mapping:
116
+ - "linear" : min-max normalization
117
+ - "log" : logarithmic normalization (positive values only)
118
+ - "sqrt" : square-root (power-law with exponent=0.5)
119
+ - "exp" : exponential scaling
120
+ - "quantile" : percentile-based scaling
121
+ - "none" : full opacity (alpha=1)
122
+
123
+ Returns
124
+ -------
125
+ rgba : 3D numpy array (ny, nx, 4)
126
+ RGBA array.
127
+ """
128
+ # Ensure numpy array
129
+ arr = np.asarray(arr, dtype=float)
130
+ # Define mask with NaN pixel
131
+ mask_na = np.isnan(arr)
132
+ # Retrieve array shape
133
+ ny, nx = arr.shape
134
+
135
+ # Define colormap norm
136
+ if cmap_norm is None:
137
+ cmap_norm = Normalize(vmin=np.nanmin(arr), vmax=np.nanmax(arr))
138
+
139
+ # Define alpha
140
+ if scaling == "linear":
141
+ norm = Normalize(vmin=np.nanmin(arr), vmax=np.nanmax(arr))
142
+ alpha = norm(arr)
143
+ elif scaling == "log":
144
+ vals = np.where(arr > 0, arr, np.nan) # mask non-positive
145
+ norm = LogNorm(vmin=np.nanmin(vals), vmax=np.nanmax(vals))
146
+ alpha = norm(arr)
147
+ alpha = np.nan_to_num(alpha, nan=0.0)
148
+ elif scaling == "sqrt":
149
+ alpha = np.sqrt(np.clip(arr, 0, None) / np.nanmax(arr))
150
+ elif scaling == "exp":
151
+ normed = np.clip(arr / np.nanmax(arr), 0, 1)
152
+ alpha = np.expm1(normed) / np.expm1(1)
153
+ elif scaling == "quantile":
154
+ flat = arr.ravel()
155
+ ranks = np.argsort(np.argsort(flat)) # rankdata without scipy
156
+ alpha = ranks / (len(flat) - 1)
157
+ alpha = alpha.reshape(arr.shape)
158
+ elif scaling == "none":
159
+ alpha = np.ones_like(arr, dtype=float)
160
+ else:
161
+ raise ValueError(f"Unknown scaling type: {scaling}")
162
+
163
+ # Map values to colors
164
+ cmap = plt.get_cmap(cmap).copy()
165
+ rgba = cmap(cmap_norm(arr))
166
+
167
+ # Set alpha channel
168
+ alpha[mask_na] = 0 # where input was NaN
169
+ rgba[..., -1] = np.clip(alpha, 0, 1)
170
+ return rgba
171
+
172
+
173
+ def to_rgba(obj, cmap="viridis", norm=None, scaling="none"):
174
+ """Map a xarray DataArray (or numpy array) to RGBA with optional alpha-scaling."""
175
+ input_is_xarray = False
176
+ if isinstance(obj, xr.DataArray):
177
+ # Define template for RGBA DataArray
178
+ da_rgba = obj.copy()
179
+ da_rgba = da_rgba.expand_dims({"rgba": 4}).transpose(..., "rgba")
180
+ input_is_xarray = True
181
+
182
+ # Extract numpy array
183
+ obj = obj.to_numpy()
184
+
185
+ # Apply transparency
186
+ arr = _np_to_rgba_alpha(obj, cmap=cmap, cmap_norm=norm, scaling=scaling)
187
+
188
+ # Return xarray.DataArray
189
+ if input_is_xarray:
190
+ da_rgba.data = arr
191
+ return da_rgba
192
+ # Or numpy array otherwise
193
+ return arr
194
+
195
+
196
+ def max_blend_images(ds_rgb, dim):
197
+ """Max blend a RGBA DataArray across a samples dimensions."""
198
+ # Ensure dimension to blend in first position
199
+ ds_rgb = ds_rgb.transpose(dim, ...)
200
+ # Extract numpy array
201
+ stack = ds_rgb.data
202
+ # Extract alpha array
203
+ alphas = stack[..., 3]
204
+ # Select the winning RGBA per pixel # (N, H, W)
205
+ idx = np.argmax(alphas, axis=0) # (H, W), index of image with max alpha
206
+ idx4 = np.repeat(idx[np.newaxis, ..., np.newaxis], 4, axis=-1) # (1, H, W, 4)
207
+ out = np.take_along_axis(stack, idx4, axis=0)[0] # (H, W, 4)
208
+ # Create output RGBA array
209
+ da = ds_rgb.isel({dim: 0}).copy()
210
+ da.data = out
211
+ return da
212
+
213
+
214
+ def compute_dense_lines(
215
+ da: xr.DataArray,
216
+ coord: str,
217
+ x_bins: list,
218
+ y_bins: list,
219
+ normalization="max",
220
+ ):
221
+ """
222
+ Compute a 2D density-of-lines histogram from an xarray.DataArray.
223
+
224
+ Parameters
225
+ ----------
226
+ da : xarray.DataArray
227
+ Input data array. One of its dimensions (named by ``coord``) is taken
228
+ as the horizontal coordinate. All other dimensions are collapsed into
229
+ “series,” so that each combination of the remaining dimension values
230
+ produces one 1D line along ``coord``.
231
+ coord : str
232
+ The name of the coordinate/dimension of the DataArray to bin over.
233
+ ``da.coords[coord]`` must be a 1D numeric array (monotonic is recommended).
234
+ x_bins : array_like of shape (nx+1,)
235
+ Bin edges to bin the coordinate/dimension.
236
+ Must be monotonically increasing.
237
+ The number of x-bins will be ``nx = len(x_bins) - 1``.
238
+ y_bins : array_like of shape (ny+1,)
239
+ Bin edges for the DataArray values.
240
+ Must be monotonically increasing.
241
+ The number of y-bins will be ``ny = len(y_bins) - 1``.
242
+ normalization : bool, optional
243
+ If 'none', returns the raw histogram.
244
+ By default, the function normalize the histogram by its global maximum ('max').
245
+ Log-normalization ('log') is also available.
246
+
247
+ Returns
248
+ -------
249
+ xr.DataArray
250
+ 2D histogram of shape ``(ny, nx)``. Dimensions are ``('y', 'x')``, where:
251
+
252
+ - ``x``: the bin-center coordinate of ``x_bins`` (length ``nx``)
253
+ - ``y``: the bin-center coordinate of ``y_bins`` (length ``ny``)
254
+
255
+ Each element ``out.values[y_i, x_j]`` is the count (or normalized count) of how
256
+ many “series-values” from ``da`` fell into the rectangular bin
257
+ ``x_bins[j] ≤ x_value < x_bins[j+1]`` and
258
+ ``y_bins[i] ≤ data_value < y_bins[i+1]``.
259
+
260
+ References
261
+ ----------
262
+ Moritz, D., Fisher, D. (2018).
263
+ Visualizing a Million Time Series with the Density Line Chart
264
+ https://doi.org/10.48550/arXiv.1808.06019
265
+ """
266
+ # Check DataArray name
267
+ if da.name is None or da.name == "":
268
+ raise ValueError("The DataArray must have a name.")
269
+
270
+ # Validate x_bins and y_bins
271
+ x_bins = np.asarray(x_bins)
272
+ y_bins = np.asarray(y_bins)
273
+ if x_bins.ndim != 1 or x_bins.size < 2:
274
+ raise ValueError("`x_bins` must be a 1D array with at least two edges.")
275
+ if y_bins.ndim != 1 or y_bins.size < 2:
276
+ raise ValueError("`y_bins` must be a 1D array with at least two edges.")
277
+ if not np.all(np.diff(x_bins) > 0):
278
+ raise ValueError("`x_bins` must be strictly increasing.")
279
+ if not np.all(np.diff(y_bins) > 0):
280
+ raise ValueError("`y_bins` must be strictly increasing.")
281
+
282
+ # Verify that `coord` exists as either a dimension or a coordinate
283
+ if coord not in (list(da.coords) + list(da.dims)):
284
+ raise ValueError(f"'{coord}' is not a dimension or coordinate of the DataArray.")
285
+ if coord not in da.dims:
286
+ if da[coord].ndim != 1:
287
+ raise ValueError(f"Coordinate '{coord}' must be 1D. Instead has dimensions {da[coord].dims}")
288
+ x_dim = da[coord].dims[0]
289
+ else:
290
+ x_dim = coord
291
+
292
+ # Extract the coordinate array
293
+ x_values = (x_bins[0:-1] + x_bins[1:]) / 2
294
+
295
+ # Extract the array (samples, x)
296
+ other_dims = [d for d in da.dims if d != x_dim]
297
+ if len(other_dims) == 1:
298
+ arr = da.transpose(*other_dims, x_dim).to_numpy()
299
+ else:
300
+ arr = da.stack({"sample": other_dims}).transpose("sample", x_dim).to_numpy()
301
+
302
+ # Define y bins center
303
+ y_center = (y_bins[0:-1] + y_bins[1:]) / 2
304
+
305
+ # Prepare the 2D count grid of shape (ny, nx)
306
+ # - ny correspond tot he value of the timeseries at nx points
307
+ nx = len(x_bins) - 1
308
+ ny = len(y_bins) - 1
309
+ nsamples = arr.shape[0]
310
+ grid = np.zeros((ny, nx), dtype=float)
311
+
312
+ # For each (series, x-index), find which y-bin it falls into:
313
+ # - np.searchsorted(y_bins, value) gives the insertion index in y_bins;
314
+ # --> subtracting 1 yields the bin index.
315
+ # If a value is not in y_bins, searchsorted returns 0, so idx = -1
316
+ indices = np.searchsorted(y_bins, arr) - 1 # (samples, nx)
317
+
318
+ # Assign 1 when line pass in a bin
319
+ valid = (indices >= 0) & (indices < ny)
320
+ s_idx, x_idx = np.nonzero(valid)
321
+ y_idx = indices[valid]
322
+ grid_3d = np.zeros((nsamples, ny, nx), dtype=int)
323
+ grid_3d[s_idx, y_idx, x_idx] = 1
324
+
325
+ # Normalize by columns
326
+ col_sums = grid_3d.sum(axis=1, keepdims=True)
327
+ col_sums[col_sums == 0] = 1 # Avoid division by zero
328
+ grid_3d = grid_3d / col_sums
329
+
330
+ # Normalize over samples
331
+ grid = grid_3d.sum(axis=0)
332
+
333
+ # Normalize grid
334
+ grid = normalize_array(grid, method=normalization)
335
+
336
+ # Create DataArray
337
+ name = da.name
338
+ out = xr.DataArray(grid, dims=[name, coord], coords={coord: (coord, x_values), name: (name, y_center)})
339
+
340
+ # Mask values which are 0 with NaN
341
+ out = out.where(out > 0)
342
+
343
+ # Return 2D histogram
344
+ return out
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: disdrodb
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: disdrodb provides tools to download, standardize, share and analyze global disdrometer data.
5
5
  Author: Gionata Ghiggi
6
6
  Project-URL: homepage, https://github.com/ltelab/disdrodb
@@ -33,6 +33,7 @@ Requires-Dist: numpy
33
33
  Requires-Dist: scipy
34
34
  Requires-Dist: dask[distributed]
35
35
  Requires-Dist: xarray
36
+ Requires-Dist: matplotlib
36
37
  Provides-Extra: dev
37
38
  Requires-Dist: jupyter; extra == "dev"
38
39
  Requires-Dist: pre-commit; extra == "dev"
@@ -190,7 +191,7 @@ disdrodb_run_l2e
190
191
  disdrodb_run_l2m
191
192
  ```
192
193
 
193
- ### 💫 Analyze AnalysisReady Products
194
+ ### 💫 Analyze Analysis-Ready Products
194
195
 
195
196
  The software’s `open_dataset` function **lazily** opens all station files of a given product:
196
197