disdrodb 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/__init__.py +4 -0
- disdrodb/_version.py +2 -2
- disdrodb/accessor/methods.py +14 -0
- disdrodb/api/checks.py +8 -7
- disdrodb/api/io.py +81 -29
- disdrodb/api/path.py +17 -14
- disdrodb/api/search.py +15 -18
- disdrodb/cli/disdrodb_open_products_options.py +38 -0
- disdrodb/cli/disdrodb_run.py +2 -2
- disdrodb/cli/disdrodb_run_station.py +4 -4
- disdrodb/configs.py +1 -1
- disdrodb/data_transfer/download_data.py +70 -1
- disdrodb/etc/configs/attributes.yaml +62 -8
- disdrodb/etc/configs/encodings.yaml +28 -0
- disdrodb/etc/products/L2M/MODELS/GAMMA_GS_ND_SSE.yaml +8 -0
- disdrodb/etc/products/L2M/MODELS/GAMMA_ML.yaml +1 -1
- disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_LOG_ND_SSE.yaml +8 -0
- disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_ND_SSE.yaml +8 -0
- disdrodb/etc/products/L2M/MODELS/LOGNORMAL_ML.yaml +1 -1
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_LOG_ND_SSE.yaml +8 -0
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_ND_SSE.yaml +8 -0
- disdrodb/etc/products/L2M/global.yaml +4 -4
- disdrodb/fall_velocity/graupel.py +8 -8
- disdrodb/fall_velocity/hail.py +2 -2
- disdrodb/fall_velocity/rain.py +33 -5
- disdrodb/issue/checks.py +1 -1
- disdrodb/l0/l0_reader.py +1 -1
- disdrodb/l0/l0a_processing.py +2 -2
- disdrodb/l0/l0b_nc_processing.py +5 -5
- disdrodb/l0/l0b_processing.py +20 -24
- disdrodb/l0/l0c_processing.py +18 -13
- disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +4 -0
- disdrodb/l0/readers/PARSIVEL2/VIETNAM/IGE_PARSIVEL2.py +239 -0
- disdrodb/l0/template_tools.py +13 -13
- disdrodb/l1/classification.py +10 -6
- disdrodb/l2/empirical_dsd.py +25 -15
- disdrodb/l2/processing.py +32 -14
- disdrodb/metadata/download.py +1 -1
- disdrodb/metadata/geolocation.py +4 -4
- disdrodb/metadata/reader.py +3 -3
- disdrodb/metadata/search.py +10 -8
- disdrodb/psd/__init__.py +4 -0
- disdrodb/psd/fitting.py +2660 -592
- disdrodb/psd/gof_metrics.py +389 -0
- disdrodb/psd/grid_search.py +1066 -0
- disdrodb/psd/models.py +1281 -145
- disdrodb/routines/l2.py +6 -6
- disdrodb/routines/options_validation.py +8 -8
- disdrodb/scattering/axis_ratio.py +70 -2
- disdrodb/scattering/permittivity.py +13 -10
- disdrodb/scattering/routines.py +10 -10
- disdrodb/summary/routines.py +23 -20
- disdrodb/utils/archiving.py +29 -22
- disdrodb/utils/attrs.py +6 -4
- disdrodb/utils/dataframe.py +4 -4
- disdrodb/utils/encoding.py +3 -1
- disdrodb/utils/event.py +9 -9
- disdrodb/utils/logger.py +4 -7
- disdrodb/utils/manipulations.py +2 -2
- disdrodb/utils/subsetting.py +1 -1
- disdrodb/utils/time.py +8 -7
- disdrodb/viz/plots.py +25 -17
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/METADATA +44 -33
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/RECORD +68 -66
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/entry_points.txt +1 -0
- disdrodb/etc/products/L2M/MODELS/GAMMA_GS_ND_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_LOG_ND_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_ND_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_LOG_ND_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_ND_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_R_MAE.yaml +0 -6
- disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_Z_MAE.yaml +0 -6
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/WHEEL +0 -0
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/licenses/LICENSE +0 -0
- {disdrodb-0.5.0.dist-info → disdrodb-0.5.1.dist-info}/top_level.txt +0 -0
disdrodb/routines/l2.py
CHANGED
|
@@ -514,12 +514,12 @@ def _generate_l2m(
|
|
|
514
514
|
radar_options = product_options.get("radar_options")
|
|
515
515
|
|
|
516
516
|
# Define variables to load
|
|
517
|
-
|
|
518
|
-
if "init_method" in
|
|
519
|
-
|
|
517
|
+
optimization_settings = l2m_options["optimization_settings"]
|
|
518
|
+
if "init_method" in optimization_settings and optimization_settings["init_method"] is None:
|
|
519
|
+
optimization_settings["init_method"] = "None"
|
|
520
520
|
|
|
521
|
-
if
|
|
522
|
-
init_method =
|
|
521
|
+
if optimization_settings.get("init_method", "None") != "None":
|
|
522
|
+
init_method = optimization_settings["init_method"]
|
|
523
523
|
moments = [f"M{order}" for order in init_method.replace("M", "")] + ["M1"]
|
|
524
524
|
else:
|
|
525
525
|
moments = ["M1"]
|
|
@@ -761,7 +761,7 @@ def run_l2m_station(
|
|
|
761
761
|
# -----------------------------------------------------------------.
|
|
762
762
|
# Loop over distributions to fit
|
|
763
763
|
# model_name = "GAMMA_ML"
|
|
764
|
-
# model_name = "
|
|
764
|
+
# model_name = "LOGNORMAL_GS_ND_SSE"
|
|
765
765
|
# model_options = l2m_options["models"][model_name]
|
|
766
766
|
# Retrieve list of models to fit
|
|
767
767
|
models = global_product_options.pop("models")
|
|
@@ -8,7 +8,7 @@ from disdrodb.api.checks import check_folder_partitioning, check_temporal_resolu
|
|
|
8
8
|
from disdrodb.api.configs import available_sensor_names
|
|
9
9
|
from disdrodb.configs import get_products_configs_dir
|
|
10
10
|
from disdrodb.fall_velocity.rain import check_rain_fall_velocity_model
|
|
11
|
-
from disdrodb.psd.fitting import PSD_MODELS, check_optimization,
|
|
11
|
+
from disdrodb.psd.fitting import PSD_MODELS, check_optimization, check_optimization_settings
|
|
12
12
|
from disdrodb.routines.options import get_l2m_model_settings_files, get_model_options, get_product_options
|
|
13
13
|
from disdrodb.scattering.axis_ratio import check_axis_ratio_model
|
|
14
14
|
from disdrodb.scattering.permittivity import check_permittivity_model
|
|
@@ -464,7 +464,7 @@ class L2MModelConfig(CustomBaseModel):
|
|
|
464
464
|
|
|
465
465
|
psd_model: str = Field(..., description="PSD model name")
|
|
466
466
|
optimization: str = Field(..., description="Optimization method")
|
|
467
|
-
|
|
467
|
+
optimization_settings: dict[str, Any] = Field(..., description="Optimization-specific parameters")
|
|
468
468
|
|
|
469
469
|
@field_validator("psd_model")
|
|
470
470
|
@classmethod
|
|
@@ -482,11 +482,11 @@ class L2MModelConfig(CustomBaseModel):
|
|
|
482
482
|
return check_optimization(optimization)
|
|
483
483
|
|
|
484
484
|
@model_validator(mode="after")
|
|
485
|
-
def
|
|
486
|
-
"""Validate that
|
|
485
|
+
def validate_optimization_settings(self):
|
|
486
|
+
"""Validate that optimization_settings matches the optimization method."""
|
|
487
487
|
# Use the existing validation function
|
|
488
|
-
|
|
489
|
-
|
|
488
|
+
check_optimization_settings(
|
|
489
|
+
optimization_settings=self.optimization_settings,
|
|
490
490
|
optimization=self.optimization,
|
|
491
491
|
psd_model=self.psd_model,
|
|
492
492
|
)
|
|
@@ -504,7 +504,7 @@ def validate_l2m_model_configs(products_configs_dir: str):
|
|
|
504
504
|
|
|
505
505
|
Raises
|
|
506
506
|
------
|
|
507
|
-
ValidationError
|
|
507
|
+
pydantic.ValidationError
|
|
508
508
|
If any L2M model configuration is invalid.
|
|
509
509
|
"""
|
|
510
510
|
# Get all L2M model configuration files
|
|
@@ -622,7 +622,7 @@ def validate_all_product_yaml_files(products_configs_dir):
|
|
|
622
622
|
|
|
623
623
|
Raises
|
|
624
624
|
------
|
|
625
|
-
ValidationError
|
|
625
|
+
pydantic.ValidationError
|
|
626
626
|
If any YAML file validation fails with detailed information.
|
|
627
627
|
"""
|
|
628
628
|
# Define product validators mapping
|
|
@@ -276,6 +276,72 @@ def get_axis_ratio_thurai_2007(diameter):
|
|
|
276
276
|
return axis_ratio
|
|
277
277
|
|
|
278
278
|
|
|
279
|
+
def get_axis_ratio_chang_2009(diameter):
|
|
280
|
+
"""
|
|
281
|
+
Compute the axis ratio of raindrops using the Chang et al. (2009) model.
|
|
282
|
+
|
|
283
|
+
Parameters
|
|
284
|
+
----------
|
|
285
|
+
diameter : array-like
|
|
286
|
+
Diameter of the raindrops in millimeters.
|
|
287
|
+
|
|
288
|
+
Returns
|
|
289
|
+
-------
|
|
290
|
+
axis_ratio : array-like
|
|
291
|
+
Calculated axis ratios corresponding to the input diameters.
|
|
292
|
+
|
|
293
|
+
References
|
|
294
|
+
----------
|
|
295
|
+
Chang, W. Y., T. C. C. Wang, and P. L. Lin, 2009.
|
|
296
|
+
Characteristics of the Raindrop Size Distribution and Drop Shape Relation
|
|
297
|
+
in Typhoon Systems in the Western Pacific from the 2D Video Disdrometer and NCU C-Band Polarimetric Radar.
|
|
298
|
+
Journal of Atmospheric and Oceanic Technology, 26, 1973-1993.
|
|
299
|
+
https://doi.org/10.1175/2009JTECHA1236.1, 2009.
|
|
300
|
+
"""
|
|
301
|
+
axis_ratio = (
|
|
302
|
+
0.98287 + 4.2514e-2 * diameter - 3.3439e-2 * diameter**2 + 4.3402e-3 * diameter**3 - 1.9223e-4 * diameter**4
|
|
303
|
+
)
|
|
304
|
+
return axis_ratio
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
# def get_axis_ratio_wen_2018(diameter):
|
|
308
|
+
# """
|
|
309
|
+
# Compute the axis ratio of raindrops using the Wen et al. (2018) relationship.
|
|
310
|
+
|
|
311
|
+
# Parameters
|
|
312
|
+
# ----------
|
|
313
|
+
# diameter : array-like
|
|
314
|
+
# Diameter of the raindrops in millimeters.
|
|
315
|
+
|
|
316
|
+
# Returns
|
|
317
|
+
# -------
|
|
318
|
+
# axis_ratio : array-like
|
|
319
|
+
# Calculated axis ratios corresponding to the input diameters.
|
|
320
|
+
|
|
321
|
+
# References
|
|
322
|
+
# ----------
|
|
323
|
+
# Wen, L., Zhao, K., Chen, G., Wang, M., Zhou, B., Huang, H., Hu, D., Lee, W., and Hu, H., 2018.
|
|
324
|
+
# Drop Size Distribution Characteristics of Seven Typhoons in China.
|
|
325
|
+
# Journal of Geophysical Research: Atmospheres, 123, 6529-6548.
|
|
326
|
+
# https://doi.org/10.1029/2017JD027950
|
|
327
|
+
|
|
328
|
+
# Notes
|
|
329
|
+
# ------
|
|
330
|
+
# The polynomial formula presented in Wen et al., 2018
|
|
331
|
+
# does not correspond to the illustration in figure 9 of the manuscript.
|
|
332
|
+
# The resulting axis-ratio parameterization appears to be incorrect and
|
|
333
|
+
# therefore is not made available in disdrodb.
|
|
334
|
+
# """
|
|
335
|
+
# axis_ratio = (
|
|
336
|
+
# 0.9946
|
|
337
|
+
# + 2.745e-2 * diameter
|
|
338
|
+
# - 1.868e-2 * diameter**2
|
|
339
|
+
# + 1.159e-3 * diameter**3
|
|
340
|
+
# - 2.143e-4 * diameter**4
|
|
341
|
+
# )
|
|
342
|
+
# return axis_ratio
|
|
343
|
+
|
|
344
|
+
|
|
279
345
|
AXIS_RATIO_MODELS = {
|
|
280
346
|
"Thurai2005": get_axis_ratio_thurai_2005,
|
|
281
347
|
"Thurai2007": get_axis_ratio_thurai_2007,
|
|
@@ -284,6 +350,8 @@ AXIS_RATIO_MODELS = {
|
|
|
284
350
|
"Pruppacher1970": get_axis_ratio_pruppacher_1970,
|
|
285
351
|
"Beard1987": get_axis_ratio_beard_1987,
|
|
286
352
|
"Andsager1999": get_axis_ratio_andsager_1999,
|
|
353
|
+
"Chang2009": get_axis_ratio_chang_2009,
|
|
354
|
+
# "Wen2018": get_axis_ratio_wen_2018,
|
|
287
355
|
}
|
|
288
356
|
|
|
289
357
|
|
|
@@ -308,7 +376,7 @@ def get_axis_ratio_model(model):
|
|
|
308
376
|
model : str
|
|
309
377
|
The model to use for calculating the axis ratio. Available models are:
|
|
310
378
|
'Thurai2005', 'Thurai2007', 'Parsivel', 'Brandes2002',
|
|
311
|
-
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
379
|
+
'Pruppacher1970', 'Beard1987', 'Andsager1999', 'Chang2009'.
|
|
312
380
|
|
|
313
381
|
Returns
|
|
314
382
|
-------
|
|
@@ -340,7 +408,7 @@ def get_axis_ratio(diameter, model):
|
|
|
340
408
|
model : str
|
|
341
409
|
The axis ratio model to use for calculating the axis ratio. Available models are:
|
|
342
410
|
'Thurai2005', 'Thurai2007', 'Parsivel', 'Brandes2002',
|
|
343
|
-
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
411
|
+
'Pruppacher1970', 'Beard1987', 'Andsager1999', 'Chang2009'.
|
|
344
412
|
|
|
345
413
|
Returns
|
|
346
414
|
-------
|
|
@@ -19,6 +19,8 @@
|
|
|
19
19
|
import numpy as np
|
|
20
20
|
import xarray as xr
|
|
21
21
|
|
|
22
|
+
from disdrodb.utils.warnings import suppress_warnings
|
|
23
|
+
|
|
22
24
|
# Definitions
|
|
23
25
|
# - Complex_refractive_index: m
|
|
24
26
|
# - Complex dielectric constant = complex relative permittivity: eps
|
|
@@ -203,7 +205,7 @@ def get_rain_refractive_index_liebe1991_single(temperature, frequency):
|
|
|
203
205
|
|
|
204
206
|
Notes
|
|
205
207
|
-----
|
|
206
|
-
|
|
208
|
+
The code of this function has been derived from RainSense code of Thomas van Leth available at
|
|
207
209
|
https://github.com/temperatureCvanLeth/RainSense/blob/master/rainsense/scattering.py#L149
|
|
208
210
|
|
|
209
211
|
References
|
|
@@ -256,9 +258,9 @@ def get_rain_refractive_index_liebe1991(temperature, frequency):
|
|
|
256
258
|
|
|
257
259
|
Notes
|
|
258
260
|
-----
|
|
259
|
-
|
|
261
|
+
The code of this function has been derived from pyradsim code of Daniel Wolfensberger available at
|
|
260
262
|
https://github.com/wolfidan/pyradsim/blob/master/pyradsim/permittivity_models.py#L37
|
|
261
|
-
|
|
263
|
+
The Liebe et al. (1991) replaces the work of Ray et al. (1972).
|
|
262
264
|
|
|
263
265
|
References
|
|
264
266
|
----------
|
|
@@ -327,8 +329,8 @@ def get_rain_refractive_index_ellison2007(temperature, frequency):
|
|
|
327
329
|
|
|
328
330
|
Notes
|
|
329
331
|
-----
|
|
330
|
-
|
|
331
|
-
|
|
332
|
+
The model is designed to operate only up to 1000 GHz and temperature ranging from 0 degC to 100 degC.
|
|
333
|
+
The code of this function has been derived from Davide Ori raincoat code available at
|
|
332
334
|
https://github.com/OPTIMICe-team/raincoat/blob/master/raincoat/scatTable/water.py#L160
|
|
333
335
|
|
|
334
336
|
References
|
|
@@ -380,7 +382,8 @@ def get_rain_refractive_index_ellison2007(temperature, frequency):
|
|
|
380
382
|
eps = eps_real + 1j * eps_imag
|
|
381
383
|
|
|
382
384
|
# Compute the refractive index
|
|
383
|
-
|
|
385
|
+
with suppress_warnings():
|
|
386
|
+
m = np.sqrt(eps) # on windows sqrt of eps=np.complex128(nan+nanj) raise warning
|
|
384
387
|
return m
|
|
385
388
|
|
|
386
389
|
|
|
@@ -409,7 +412,7 @@ def get_rain_refractive_index_turner2016(frequency, temperature):
|
|
|
409
412
|
|
|
410
413
|
Notes
|
|
411
414
|
-----
|
|
412
|
-
|
|
415
|
+
The code of this function has been checked against Joseph Hardin pyDSD and Davide Ori raincoat codes
|
|
413
416
|
available at:
|
|
414
417
|
https://github.com/josephhardinee/PyDSD/blob/main/pydsd/utility/dielectric.py#L36
|
|
415
418
|
https://github.com/OPTIMICe-team/raincoat/blob/master/raincoat/scatTable/water.py#L54
|
|
@@ -478,7 +481,7 @@ def get_rain_refractive_index_turner2016(frequency, temperature):
|
|
|
478
481
|
|
|
479
482
|
####----------------------------------------------------------------------------------------
|
|
480
483
|
def get_rayleigh_dielectric_factor(m):
|
|
481
|
-
"""Compute the Rayleigh dielectric factor
|
|
484
|
+
r"""Compute the Rayleigh dielectric factor :math:`|K|^2` from the complex refractive index.
|
|
482
485
|
|
|
483
486
|
The magnitude squared of the complex dielectric constant factor for liquid water,
|
|
484
487
|
relative to the surrounding medium (typically air).
|
|
@@ -493,9 +496,9 @@ def get_rayleigh_dielectric_factor(m):
|
|
|
493
496
|
Returns
|
|
494
497
|
-------
|
|
495
498
|
float
|
|
496
|
-
Dielectric factor
|
|
499
|
+
Dielectric factor :math:`|K|^2` used in Rayleigh scattering.
|
|
497
500
|
Often also called the radar dieletric factor.
|
|
498
|
-
In pytmatrix, correspond to the Kw_sqr argument of the Scatterer object.
|
|
501
|
+
In pytmatrix, correspond to the ``Kw_sqr`` argument of the Scatterer object.
|
|
499
502
|
"""
|
|
500
503
|
eps = m**2
|
|
501
504
|
K_complex = (eps - 1.0) / (eps + 2.0)
|
disdrodb/scattering/routines.py
CHANGED
|
@@ -197,7 +197,7 @@ def initialize_scatterer(
|
|
|
197
197
|
Radar elevation angle in degrees.
|
|
198
198
|
Specify 90 degrees for vertically pointing radars.
|
|
199
199
|
The default is 0 degrees.
|
|
200
|
-
scattering_table_dir : str or Path, optional
|
|
200
|
+
scattering_table_dir : str or pathlib.Path, optional
|
|
201
201
|
Directory path where T-Matrix scattering tables are stored. If None, the default
|
|
202
202
|
location will be used.
|
|
203
203
|
verbose: bool
|
|
@@ -205,7 +205,7 @@ def initialize_scatterer(
|
|
|
205
205
|
|
|
206
206
|
Returns
|
|
207
207
|
-------
|
|
208
|
-
|
|
208
|
+
pytmatrix.Scatterer
|
|
209
209
|
A scatterer object with the PSD integrator configured and scattering
|
|
210
210
|
table loaded or generated.
|
|
211
211
|
"""
|
|
@@ -303,7 +303,7 @@ def calculate_scatterer(
|
|
|
303
303
|
|
|
304
304
|
Returns
|
|
305
305
|
-------
|
|
306
|
-
|
|
306
|
+
pytmatrix.Scatterer
|
|
307
307
|
A scatterer object with the PSD integrator configured and scattering
|
|
308
308
|
table loaded or generated.
|
|
309
309
|
"""
|
|
@@ -369,7 +369,7 @@ def load_scatterer(
|
|
|
369
369
|
Radar elevation angle in degrees.
|
|
370
370
|
Specify 90 degrees for vertically pointing radars.
|
|
371
371
|
The default is 0 degrees.
|
|
372
|
-
scattering_table_dir : str or Path, optional
|
|
372
|
+
scattering_table_dir : str or pathlib.Path, optional
|
|
373
373
|
Directory path where T-Matrix scattering tables are stored. If None, the default
|
|
374
374
|
location will be used.
|
|
375
375
|
verbose: bool
|
|
@@ -377,7 +377,7 @@ def load_scatterer(
|
|
|
377
377
|
|
|
378
378
|
Returns
|
|
379
379
|
-------
|
|
380
|
-
|
|
380
|
+
pytmatrix.Scatterer
|
|
381
381
|
A scatterer object with the PSD integrator configured and scattering
|
|
382
382
|
table loaded or generated.
|
|
383
383
|
"""
|
|
@@ -549,7 +549,7 @@ def _estimate_empirical_radar_parameters(
|
|
|
549
549
|
scatterer,
|
|
550
550
|
):
|
|
551
551
|
# Assign PSD model to the scatterer object
|
|
552
|
-
scatterer.psd = BinnedPSD(bin_edges, drop_number_concentration)
|
|
552
|
+
scatterer.psd = BinnedPSD(bin_edges, np.asarray(drop_number_concentration))
|
|
553
553
|
|
|
554
554
|
# Get radar variables
|
|
555
555
|
return _try_compute_radar_variables(scatterer)
|
|
@@ -562,7 +562,7 @@ def _estimate_model_radar_parameters(
|
|
|
562
562
|
scatterer,
|
|
563
563
|
):
|
|
564
564
|
# Assign PSD model to the scatterer object
|
|
565
|
-
parameters = dict(zip(psd_parameters_names, parameters, strict=True))
|
|
565
|
+
parameters = dict(zip(psd_parameters_names, np.asarray(parameters), strict=True))
|
|
566
566
|
scatterer.psd = create_psd(psd_model, parameters)
|
|
567
567
|
|
|
568
568
|
# Get radar variables
|
|
@@ -919,12 +919,12 @@ def get_radar_parameters(
|
|
|
919
919
|
----------
|
|
920
920
|
ds : xarray.Dataset
|
|
921
921
|
Dataset containing the drop number concentration variable.
|
|
922
|
-
frequency : str, float, or list of str
|
|
922
|
+
frequency : str, float, or list of str or float, optional
|
|
923
923
|
Frequencies in GHz for which to compute the radar parameters.
|
|
924
924
|
Alternatively, also strings can be used to specify common radar frequencies.
|
|
925
925
|
If ``None``, the common radar frequencies will be used.
|
|
926
926
|
See ``disdrodb.scattering.available_radar_bands()``.
|
|
927
|
-
num_points: int or
|
|
927
|
+
num_points: int or list of int, optional
|
|
928
928
|
Number of bins into which discretize the PSD.
|
|
929
929
|
diameter_max : float or list of float, optional
|
|
930
930
|
Maximum diameter. The default value is 8 mm.
|
|
@@ -933,7 +933,7 @@ def get_radar_parameters(
|
|
|
933
933
|
axis_ratio_model : str or list of str, optional
|
|
934
934
|
Models to compute the axis ratio. The default model is ``Thurai2007``.
|
|
935
935
|
See available models with ``disdrodb.scattering.available_axis_ratio_models()``.
|
|
936
|
-
permittivity_model : str
|
|
936
|
+
permittivity_model : str or list of str, optional
|
|
937
937
|
Permittivity model to use to compute the refractive index and the
|
|
938
938
|
rayleigh_dielectric_factor. The default is ``Turner2016``.
|
|
939
939
|
See available models with ``disdrodb.scattering.available_permittivity_models()``.
|
disdrodb/summary/routines.py
CHANGED
|
@@ -84,7 +84,7 @@ def save_table_to_pdf(
|
|
|
84
84
|
|
|
85
85
|
Parameters
|
|
86
86
|
----------
|
|
87
|
-
df :
|
|
87
|
+
df : pandas.DataFrame
|
|
88
88
|
The data to render.
|
|
89
89
|
filepath : str
|
|
90
90
|
File path where write the final PDF (e.g. '<...>/table.pdf').
|
|
@@ -93,8 +93,9 @@ def save_table_to_pdf(
|
|
|
93
93
|
fontsize : str, optional
|
|
94
94
|
LaTeX font-size command to wrap the table (e.g. '\\small').
|
|
95
95
|
The default is '\\tiny'.
|
|
96
|
-
orientation :
|
|
97
|
-
Page orientation.
|
|
96
|
+
orientation : str
|
|
97
|
+
Page orientation. Allowed values are 'portrait' and 'landscape'.
|
|
98
|
+
If 'landscape', the table will be laid out horizontally.
|
|
98
99
|
The default is 'landscape'.
|
|
99
100
|
"""
|
|
100
101
|
# Export table to LaTeX
|
|
@@ -500,11 +501,11 @@ def fit_powerlaw(x, y, xbins, quantile=0.5, min_counts=10, x_in_db=False, use_ra
|
|
|
500
501
|
|
|
501
502
|
Parameters
|
|
502
503
|
----------
|
|
503
|
-
x :
|
|
504
|
+
x : array-like
|
|
504
505
|
Independent variable values. Must be positive and finite after filtering.
|
|
505
|
-
y :
|
|
506
|
+
y : array-like
|
|
506
507
|
Dependent variable values. Must be positive and finite after filtering.
|
|
507
|
-
xbins :
|
|
508
|
+
xbins : array-like
|
|
508
509
|
Bin edges for grouping ``x`` values (passed to ``pandas.cut``).
|
|
509
510
|
quantile : float, optional
|
|
510
511
|
Quantile of ``y`` to compute in each bin (between 0 and 1).
|
|
@@ -642,7 +643,7 @@ def predict_from_powerlaw(x, a, b):
|
|
|
642
643
|
|
|
643
644
|
Parameters
|
|
644
645
|
----------
|
|
645
|
-
x :
|
|
646
|
+
x : array-like
|
|
646
647
|
Independent variable values.
|
|
647
648
|
a : float
|
|
648
649
|
Power-law coefficient.
|
|
@@ -651,7 +652,7 @@ def predict_from_powerlaw(x, a, b):
|
|
|
651
652
|
|
|
652
653
|
Returns
|
|
653
654
|
-------
|
|
654
|
-
y : ndarray
|
|
655
|
+
y : numpy.ndarray
|
|
655
656
|
Predicted dependent variable values.
|
|
656
657
|
|
|
657
658
|
Notes
|
|
@@ -697,7 +698,7 @@ def predict_from_inverse_powerlaw(x, a, b):
|
|
|
697
698
|
|
|
698
699
|
Parameters
|
|
699
700
|
----------
|
|
700
|
-
x :
|
|
701
|
+
x : array-like
|
|
701
702
|
Values of ``x`` (independent variable in the original power law).
|
|
702
703
|
a : float
|
|
703
704
|
Power-law coefficient of the inverse power-law model.
|
|
@@ -706,7 +707,7 @@ def predict_from_inverse_powerlaw(x, a, b):
|
|
|
706
707
|
|
|
707
708
|
Returns
|
|
708
709
|
-------
|
|
709
|
-
y : ndarray
|
|
710
|
+
y : numpy.ndarray
|
|
710
711
|
Predicted dependent variable values.
|
|
711
712
|
"""
|
|
712
713
|
return (x ** (1 / b)) / (a ** (1 / b))
|
|
@@ -1726,14 +1727,14 @@ def plot_dmax_relationships(df, diameter_bin_edges, dmax="Dmax", diameter_max=10
|
|
|
1726
1727
|
|
|
1727
1728
|
Parameters
|
|
1728
1729
|
----------
|
|
1729
|
-
df : DataFrame
|
|
1730
|
+
df : pandas.DataFrame
|
|
1730
1731
|
Input dataframe containing the precipitation data
|
|
1731
|
-
dmax : str,
|
|
1732
|
-
Column name for maximum diameter
|
|
1733
|
-
vmax : float,
|
|
1734
|
-
Maximum value for Dmax axis limits
|
|
1735
|
-
dpi : int,
|
|
1736
|
-
Resolution for the figure
|
|
1732
|
+
dmax : str, optional
|
|
1733
|
+
Column name for maximum diameter. Default is 'Dmax'.
|
|
1734
|
+
vmax : float, optional
|
|
1735
|
+
Maximum value for Dmax axis limits. Default is 10 mm.
|
|
1736
|
+
dpi : int, optional
|
|
1737
|
+
Resolution for the figure. The default is 300.
|
|
1737
1738
|
"""
|
|
1738
1739
|
# Compute 2D histograms
|
|
1739
1740
|
# - Dmax-R
|
|
@@ -3739,12 +3740,14 @@ def define_filename(prefix, extension, data_source, campaign_name, station_name,
|
|
|
3739
3740
|
"""Define filename for summary files."""
|
|
3740
3741
|
if extension in ["png", "jpeg"]:
|
|
3741
3742
|
filename = f"Figure.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
|
|
3742
|
-
|
|
3743
|
+
elif extension in ["csv", "pdf", "yaml", "yml"]:
|
|
3743
3744
|
filename = f"Table.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
|
|
3744
|
-
|
|
3745
|
+
elif extension in ["nc"]:
|
|
3745
3746
|
filename = f"Dataset.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
|
|
3746
|
-
|
|
3747
|
+
elif extension in ["parquet"]:
|
|
3747
3748
|
filename = f"Dataframe.{prefix}.{data_source}.{campaign_name}.{station_name}.{temporal_resolution}.{extension}"
|
|
3749
|
+
else:
|
|
3750
|
+
raise NotImplementedError(f"Standardized filename not implemented for extension {extension}.")
|
|
3748
3751
|
return filename
|
|
3749
3752
|
|
|
3750
3753
|
|
disdrodb/utils/archiving.py
CHANGED
|
@@ -81,7 +81,7 @@ def generate_time_blocks(
|
|
|
81
81
|
# Mapping from our custom freq to pandas frequency codes
|
|
82
82
|
freq_map = {
|
|
83
83
|
"hour": "h",
|
|
84
|
-
"day": "
|
|
84
|
+
"day": "D",
|
|
85
85
|
"month": "M",
|
|
86
86
|
"quarter": "Q",
|
|
87
87
|
"year": "Y",
|
|
@@ -203,12 +203,13 @@ def identify_time_partitions(start_times, end_times, freq: str) -> list[dict]:
|
|
|
203
203
|
|
|
204
204
|
Parameters
|
|
205
205
|
----------
|
|
206
|
-
start_times : numpy.ndarray
|
|
207
|
-
Array of inclusive start times for each file.
|
|
208
|
-
end_times : numpy.ndarray
|
|
209
|
-
Array of inclusive end times for each file.
|
|
210
|
-
freq :
|
|
206
|
+
start_times : numpy.ndarray
|
|
207
|
+
Array of inclusive start times in datetime64[s] format for each file.
|
|
208
|
+
end_times : numpy.ndarray
|
|
209
|
+
Array of inclusive end times in datetime64[s] format for each file.
|
|
210
|
+
freq : str
|
|
211
211
|
Frequency determining the granularity of candidate blocks.
|
|
212
|
+
Allowed values are {'none', 'hour', 'day', 'month', 'quarter', 'season', 'year'}.
|
|
212
213
|
See `generate_time_blocks` for more details.
|
|
213
214
|
|
|
214
215
|
Returns
|
|
@@ -252,7 +253,9 @@ def define_temporal_partitions(filepaths, strategy, parallel, strategy_options):
|
|
|
252
253
|
List of files paths to be processed
|
|
253
254
|
|
|
254
255
|
strategy : str
|
|
255
|
-
|
|
256
|
+
Partitioning strategy to apply.
|
|
257
|
+
|
|
258
|
+
Supported values are:
|
|
256
259
|
|
|
257
260
|
- ``'time_block'`` defines fixed time intervals (e.g. monthly) covering input files.
|
|
258
261
|
- ``'event'`` detect clusters of precipitation ("events").
|
|
@@ -267,42 +270,46 @@ def define_temporal_partitions(filepaths, strategy, parallel, strategy_options):
|
|
|
267
270
|
|
|
268
271
|
- ``freq``: Time unit for blocks. One of {'year', 'season', 'month', 'day'}.
|
|
269
272
|
|
|
270
|
-
See identify_time_partitions for more information.
|
|
273
|
+
See the ``identify_time_partitions`` function for more information.
|
|
271
274
|
|
|
272
275
|
If ``strategy == 'event'``, supported options are:
|
|
276
|
+
|
|
273
277
|
- ``variable`` : str
|
|
274
|
-
|
|
278
|
+
Name of the variable to use to apply the event detection.
|
|
275
279
|
- ``detection_threshold`` : int
|
|
276
|
-
|
|
280
|
+
Minimum number of drops to consider a timestep.
|
|
277
281
|
- ``neighbor_min_size`` : int
|
|
278
|
-
|
|
282
|
+
Minimum cluster size for merging neighboring events.
|
|
279
283
|
- ``neighbor_time_interval`` : str
|
|
280
|
-
|
|
284
|
+
Time window (e.g. "5MIN") to merge adjacent clusters.
|
|
281
285
|
- ``event_max_time_gap`` : str
|
|
282
|
-
|
|
286
|
+
Maximum allowed gap (e.g. "6H") within a single event.
|
|
283
287
|
- ``event_min_duration`` : str
|
|
284
|
-
|
|
288
|
+
Minimum total duration (e.g. "5MIN") of an event.
|
|
285
289
|
- ``event_min_size`` : int
|
|
286
|
-
|
|
290
|
+
Minimum number of records in an event.
|
|
287
291
|
|
|
288
|
-
See identify_events for more information.
|
|
292
|
+
See the ``identify_events`` function for more information.
|
|
289
293
|
|
|
290
294
|
Returns
|
|
291
295
|
-------
|
|
292
296
|
list
|
|
293
297
|
A list of dictionaries, each containing:
|
|
294
298
|
|
|
295
|
-
- ``start_time
|
|
299
|
+
- ``start_time``: numpy.datetime64[s]
|
|
296
300
|
Inclusive start of an event or time block.
|
|
297
|
-
- ``end_time
|
|
301
|
+
- ``end_time``: numpy.datetime64[s]
|
|
298
302
|
Inclusive end of an event or time block.
|
|
299
303
|
|
|
300
304
|
Notes
|
|
301
305
|
-----
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
+
The ``'event'`` strategy requires loading data into memory to identify clusters.
|
|
307
|
+
|
|
308
|
+
The ``'time_block'`` strategy can operate on metadata alone, without full data loading.
|
|
309
|
+
|
|
310
|
+
The ``'event'`` strategy implicitly performs data selection on which files to process !
|
|
311
|
+
|
|
312
|
+
The ``'time_block'`` strategy does not performs data selection on which files to process !
|
|
306
313
|
"""
|
|
307
314
|
if strategy not in ["time_block", "event"]:
|
|
308
315
|
raise ValueError(f"Unknown strategy: {strategy!r}. Must be 'time_block' or 'event'.")
|
disdrodb/utils/attrs.py
CHANGED
|
@@ -20,6 +20,7 @@ import os
|
|
|
20
20
|
|
|
21
21
|
from disdrodb.api.checks import get_current_utc_time
|
|
22
22
|
from disdrodb.constants import ARCHIVE_VERSION, CONVENTIONS, COORDINATES, SOFTWARE_VERSION
|
|
23
|
+
from disdrodb.utils.warnings import suppress_warnings
|
|
23
24
|
from disdrodb.utils.yaml import read_yaml
|
|
24
25
|
|
|
25
26
|
####---------------------------------------------------------------------.
|
|
@@ -68,14 +69,14 @@ def update_disdrodb_attrs(ds, product: str):
|
|
|
68
69
|
|
|
69
70
|
Parameters
|
|
70
71
|
----------
|
|
71
|
-
ds : xarray
|
|
72
|
+
ds : xarray.Dataset
|
|
72
73
|
Dataset
|
|
73
74
|
product: str
|
|
74
75
|
DISDRODB product.
|
|
75
76
|
|
|
76
77
|
Returns
|
|
77
78
|
-------
|
|
78
|
-
xarray
|
|
79
|
+
xarray.Dataset
|
|
79
80
|
Dataset.
|
|
80
81
|
"""
|
|
81
82
|
attrs = ds.attrs.copy()
|
|
@@ -96,7 +97,8 @@ def update_disdrodb_attrs(ds, product: str):
|
|
|
96
97
|
if "time" in ds.dims:
|
|
97
98
|
encoding = ds["time"].encoding
|
|
98
99
|
ds["time"] = ds["time"].dt.floor("s") # ensure no sub-second values
|
|
99
|
-
|
|
100
|
+
with suppress_warnings():
|
|
101
|
+
ds["time"] = ds["time"].astype("datetime64[s]")
|
|
100
102
|
ds["time"].encoding = encoding # otherwise time encoding get lost !
|
|
101
103
|
|
|
102
104
|
attrs["time_coverage_start"] = str(ds["time"].data[0])
|
|
@@ -136,7 +138,7 @@ def set_disdrodb_attrs(ds, product: str):
|
|
|
136
138
|
|
|
137
139
|
Returns
|
|
138
140
|
-------
|
|
139
|
-
xarray
|
|
141
|
+
xarray.Dataset
|
|
140
142
|
Dataset.
|
|
141
143
|
"""
|
|
142
144
|
# Add dataset conventions
|
disdrodb/utils/dataframe.py
CHANGED
|
@@ -39,7 +39,7 @@ def log_arange(start, stop, log_step=0.1, base=10):
|
|
|
39
39
|
|
|
40
40
|
Returns
|
|
41
41
|
-------
|
|
42
|
-
|
|
42
|
+
numpy.ndarray
|
|
43
43
|
Array of values spaced in log scale.
|
|
44
44
|
"""
|
|
45
45
|
if start <= 0 or stop <= 0:
|
|
@@ -69,7 +69,7 @@ def log_linspace(start, stop, n_bins, base=10):
|
|
|
69
69
|
|
|
70
70
|
Returns
|
|
71
71
|
-------
|
|
72
|
-
|
|
72
|
+
numpy.ndarray
|
|
73
73
|
Array of values spaced evenly in log space.
|
|
74
74
|
"""
|
|
75
75
|
if start <= 0 or stop <= 0:
|
|
@@ -146,7 +146,7 @@ def compute_1d_histogram(df, column, variables=None, bins=10, labels=None, prefi
|
|
|
146
146
|
full_index = pd.Index(intervals, name=f"{column}_binned")
|
|
147
147
|
|
|
148
148
|
# Define grouping object
|
|
149
|
-
df_grouped = df.groupby([f"{column}_binned"], observed=
|
|
149
|
+
df_grouped = df.groupby([f"{column}_binned"], observed=True)
|
|
150
150
|
|
|
151
151
|
# Compute statistics for specified variables
|
|
152
152
|
variables_stats = []
|
|
@@ -311,7 +311,7 @@ def compute_2d_histogram(
|
|
|
311
311
|
full_index = pd.MultiIndex.from_product([x_intervals, y_intervals], names=[f"{x}_binned", f"{y}_binned"])
|
|
312
312
|
|
|
313
313
|
# Define grouping object
|
|
314
|
-
df_grouped = df.groupby([f"{x}_binned", f"{y}_binned"], observed=
|
|
314
|
+
df_grouped = df.groupby([f"{x}_binned", f"{y}_binned"], observed=True)
|
|
315
315
|
|
|
316
316
|
# Compute statistics for specified variables
|
|
317
317
|
variables_stats = []
|
disdrodb/utils/encoding.py
CHANGED
|
@@ -21,6 +21,7 @@ import os
|
|
|
21
21
|
import numpy as np
|
|
22
22
|
import xarray as xr
|
|
23
23
|
|
|
24
|
+
from disdrodb.utils.warnings import suppress_warnings
|
|
24
25
|
from disdrodb.utils.yaml import read_yaml
|
|
25
26
|
|
|
26
27
|
EPOCH = "seconds since 1970-01-01 00:00:00"
|
|
@@ -67,7 +68,8 @@ def set_encodings(ds: xr.Dataset, encodings_dict: dict) -> xr.Dataset:
|
|
|
67
68
|
# Set time encoding
|
|
68
69
|
if "time" in ds:
|
|
69
70
|
ds["time"] = ds["time"].dt.floor("s") # ensure no sub-second values
|
|
70
|
-
|
|
71
|
+
with suppress_warnings():
|
|
72
|
+
ds["time"] = ds["time"].astype("datetime64[s]")
|
|
71
73
|
ds["time"].encoding.update(get_time_encoding())
|
|
72
74
|
|
|
73
75
|
# Set the variable encodings
|