disdrodb 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/_version.py +2 -2
- disdrodb/api/create_directories.py +0 -2
- disdrodb/cli/disdrodb_create_summary.py +10 -0
- disdrodb/cli/disdrodb_create_summary_station.py +10 -0
- disdrodb/constants.py +1 -1
- disdrodb/etc/products/L1/global.yaml +1 -1
- disdrodb/etc/products/L2E/5MIN.yaml +1 -0
- disdrodb/etc/products/L2E/global.yaml +1 -1
- disdrodb/etc/products/L2M/GAMMA_GS_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/GAMMA_ML.yaml +1 -1
- disdrodb/etc/products/L2M/LOGNORMAL_GS_LOG_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/LOGNORMAL_GS_ND_MAE.yaml +6 -0
- disdrodb/etc/products/L2M/LOGNORMAL_ML.yaml +8 -0
- disdrodb/etc/products/L2M/global.yaml +11 -3
- disdrodb/l0/check_configs.py +49 -16
- disdrodb/l0/configs/LPM/l0a_encodings.yml +2 -2
- disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +2 -2
- disdrodb/l0/configs/LPM/l0b_encodings.yml +2 -2
- disdrodb/l0/configs/LPM/raw_data_format.yml +2 -2
- disdrodb/l0/configs/PWS100/l0b_encodings.yml +1 -0
- disdrodb/l0/configs/SWS250/bins_diameter.yml +108 -0
- disdrodb/l0/configs/SWS250/bins_velocity.yml +83 -0
- disdrodb/l0/configs/SWS250/l0a_encodings.yml +18 -0
- disdrodb/l0/configs/SWS250/l0b_cf_attrs.yml +72 -0
- disdrodb/l0/configs/SWS250/l0b_encodings.yml +155 -0
- disdrodb/l0/configs/SWS250/raw_data_format.yml +148 -0
- disdrodb/l0/l0b_processing.py +70 -15
- disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +1 -1
- disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +2 -2
- disdrodb/l0/readers/LPM/BELGIUM/ULIEGE.py +256 -0
- disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +2 -2
- disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +2 -2
- disdrodb/l0/readers/LPM/GERMANY/DWD.py +491 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +2 -2
- disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
- disdrodb/l0/readers/LPM/KIT/CHWALA.py +2 -2
- disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +107 -12
- disdrodb/l0/readers/LPM/SLOVENIA/UL.py +3 -3
- disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +2 -2
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +5 -14
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +5 -14
- disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL.py +117 -8
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +10 -14
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +10 -14
- disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +8 -14
- disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +382 -0
- disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +4 -0
- disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +1 -1
- disdrodb/l0/readers/PARSIVEL2/GREECE/NOA.py +127 -0
- disdrodb/l0/readers/PARSIVEL2/ITALY/HYDROX.py +239 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +5 -11
- disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +4 -17
- disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +5 -14
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +10 -13
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +10 -13
- disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/PANGASA.py +232 -0
- disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +6 -18
- disdrodb/l0/readers/PARSIVEL2/SPAIN/GRANADA.py +120 -0
- disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +7 -25
- disdrodb/l0/readers/PWS100/AUSTRIA/HOAL.py +321 -0
- disdrodb/l0/readers/SW250/BELGIUM/KMI.py +239 -0
- disdrodb/l1/beard_model.py +31 -129
- disdrodb/l1/fall_velocity.py +136 -83
- disdrodb/l1/filters.py +25 -28
- disdrodb/l1/processing.py +11 -13
- disdrodb/l1_env/routines.py +46 -17
- disdrodb/l2/empirical_dsd.py +6 -0
- disdrodb/l2/processing.py +2 -2
- disdrodb/metadata/geolocation.py +0 -2
- disdrodb/psd/fitting.py +16 -13
- disdrodb/routines/l2.py +35 -23
- disdrodb/routines/wrappers.py +5 -0
- disdrodb/scattering/axis_ratio.py +90 -84
- disdrodb/scattering/permittivity.py +6 -0
- disdrodb/summary/routines.py +38 -12
- disdrodb/utils/attrs.py +2 -0
- disdrodb/utils/encoding.py +5 -0
- disdrodb/utils/time.py +2 -2
- disdrodb/viz/plots.py +24 -1
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/METADATA +2 -1
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/RECORD +85 -65
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/WHEEL +0 -0
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/entry_points.txt +0 -0
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {disdrodb-0.1.4.dist-info → disdrodb-0.1.5.dist-info}/top_level.txt +0 -0
disdrodb/psd/fitting.py
CHANGED
|
@@ -23,7 +23,7 @@ from scipy.optimize import minimize
|
|
|
23
23
|
from scipy.special import gamma, gammaln # Regularized lower incomplete gamma function
|
|
24
24
|
|
|
25
25
|
from disdrodb.constants import DIAMETER_DIMENSION
|
|
26
|
-
from disdrodb.l1.fall_velocity import
|
|
26
|
+
from disdrodb.l1.fall_velocity import get_raindrop_fall_velocity_from_ds
|
|
27
27
|
from disdrodb.l2.empirical_dsd import (
|
|
28
28
|
get_median_volume_drop_diameter,
|
|
29
29
|
get_moment,
|
|
@@ -607,7 +607,7 @@ def estimate_gamma_parameters(
|
|
|
607
607
|
|
|
608
608
|
"""
|
|
609
609
|
# Define initial guess for parameters
|
|
610
|
-
a = mu + 1 # (mu = a-1, a = mu+1)
|
|
610
|
+
a = mu + 1 # (mu = a-1, a = mu+1) (a > 0 --> mu=-1)
|
|
611
611
|
scale = 1 / Lambda
|
|
612
612
|
initial_params = [a, scale]
|
|
613
613
|
|
|
@@ -1208,13 +1208,13 @@ def apply_gamma_gs(
|
|
|
1208
1208
|
):
|
|
1209
1209
|
"""Estimate GammaPSD model parameters using Grid Search."""
|
|
1210
1210
|
# Define parameters bounds
|
|
1211
|
-
mu_bounds = (
|
|
1212
|
-
lambda_bounds = (0
|
|
1211
|
+
mu_bounds = (-1, 40)
|
|
1212
|
+
lambda_bounds = (0, 60)
|
|
1213
1213
|
|
|
1214
1214
|
# Define initial set of parameters
|
|
1215
|
-
mu_step = 0.
|
|
1215
|
+
mu_step = 0.25
|
|
1216
1216
|
lambda_step = 0.5
|
|
1217
|
-
mu_values = np.arange(0
|
|
1217
|
+
mu_values = np.arange(0, 40, step=mu_step)
|
|
1218
1218
|
lambda_values = np.arange(0, 60, step=lambda_step)
|
|
1219
1219
|
|
|
1220
1220
|
# First round of GS
|
|
@@ -1304,15 +1304,17 @@ def apply_lognormal_gs(
|
|
|
1304
1304
|
"""Estimate LognormalPSD model parameters using Grid Search."""
|
|
1305
1305
|
# Define parameters bounds
|
|
1306
1306
|
sigma_bounds = (0, np.inf) # > 0
|
|
1307
|
-
scale_bounds = (0
|
|
1307
|
+
scale_bounds = (0, np.inf) # > 0
|
|
1308
1308
|
# mu_bounds = (- np.inf, np.inf) # mu = np.log(scale)
|
|
1309
1309
|
|
|
1310
1310
|
# Define initial set of parameters
|
|
1311
|
+
# --> Typically sigma between 0 and 3
|
|
1312
|
+
# --> Typically mu between -2 and 2
|
|
1311
1313
|
scale_step = 0.2
|
|
1312
1314
|
sigma_step = 0.2
|
|
1313
|
-
scale_values = np.arange(
|
|
1314
|
-
mu_values = np.log(scale_values)
|
|
1315
|
-
sigma_values = np.arange(0,
|
|
1315
|
+
scale_values = np.arange(scale_step, 20, step=scale_step)
|
|
1316
|
+
mu_values = np.log(scale_values)
|
|
1317
|
+
sigma_values = np.arange(0, 3, step=sigma_step)
|
|
1316
1318
|
|
|
1317
1319
|
# First round of GS
|
|
1318
1320
|
Nt, mu, sigma = _apply_lognormal_gs(
|
|
@@ -1333,7 +1335,8 @@ def apply_lognormal_gs(
|
|
|
1333
1335
|
# Second round of GS
|
|
1334
1336
|
sigma_values = define_param_range(sigma, sigma_step, bounds=sigma_bounds)
|
|
1335
1337
|
scale_values = define_param_range(np.exp(mu), scale_step, bounds=scale_bounds)
|
|
1336
|
-
|
|
1338
|
+
with suppress_warnings():
|
|
1339
|
+
mu_values = np.log(scale_values)
|
|
1337
1340
|
Nt, mu, sigma = _apply_lognormal_gs(
|
|
1338
1341
|
mu_values=mu_values,
|
|
1339
1342
|
sigma_values=sigma_values,
|
|
@@ -1365,7 +1368,7 @@ def apply_normalized_gamma_gs(
|
|
|
1365
1368
|
):
|
|
1366
1369
|
"""Estimate NormalizedGammaPSD model parameters using Grid Search."""
|
|
1367
1370
|
# Define set of mu values
|
|
1368
|
-
mu_arr = np.arange(
|
|
1371
|
+
mu_arr = np.arange(-4, 30, step=0.01)
|
|
1369
1372
|
|
|
1370
1373
|
# Perform grid search
|
|
1371
1374
|
with suppress_warnings():
|
|
@@ -2353,7 +2356,7 @@ def get_gs_parameters(ds, psd_model, target="ND", transformation="log", error_or
|
|
|
2353
2356
|
|
|
2354
2357
|
# Check fall velocity is available if target R
|
|
2355
2358
|
if "fall_velocity" not in ds:
|
|
2356
|
-
ds["fall_velocity"] =
|
|
2359
|
+
ds["fall_velocity"] = get_raindrop_fall_velocity_from_ds(ds)
|
|
2357
2360
|
|
|
2358
2361
|
# Retrieve estimation function
|
|
2359
2362
|
func = OPTIMIZATION_ROUTINES_DICT["GS"][psd_model]
|
disdrodb/routines/l2.py
CHANGED
|
@@ -72,7 +72,7 @@ from disdrodb.utils.routines import (
|
|
|
72
72
|
)
|
|
73
73
|
from disdrodb.utils.time import (
|
|
74
74
|
ensure_sample_interval_in_seconds,
|
|
75
|
-
|
|
75
|
+
get_sampling_information,
|
|
76
76
|
)
|
|
77
77
|
from disdrodb.utils.writer import write_product
|
|
78
78
|
|
|
@@ -124,7 +124,7 @@ class ProcessingOptions:
|
|
|
124
124
|
product_options = dict_product_options[temporal_resolution].copy()
|
|
125
125
|
|
|
126
126
|
# Retrieve accumulation_interval and rolling option
|
|
127
|
-
accumulation_interval, rolling =
|
|
127
|
+
accumulation_interval, rolling = get_sampling_information(temporal_resolution)
|
|
128
128
|
|
|
129
129
|
# Extract processing options
|
|
130
130
|
archive_options = product_options.pop("archive_options")
|
|
@@ -492,7 +492,7 @@ def run_l2e_station(
|
|
|
492
492
|
product_options = l2e_processing_options.get_product_options(temporal_resolution)
|
|
493
493
|
|
|
494
494
|
# Retrieve accumulation_interval and rolling option
|
|
495
|
-
accumulation_interval, rolling =
|
|
495
|
+
accumulation_interval, rolling = get_sampling_information(temporal_resolution)
|
|
496
496
|
|
|
497
497
|
# Precompute required scattering tables
|
|
498
498
|
if product_options["radar_enabled"]:
|
|
@@ -655,7 +655,10 @@ def _generate_l2m(
|
|
|
655
655
|
|
|
656
656
|
# Define variables to load
|
|
657
657
|
optimization_kwargs = l2m_options["optimization_kwargs"]
|
|
658
|
-
if "init_method" in optimization_kwargs:
|
|
658
|
+
if "init_method" in optimization_kwargs and optimization_kwargs["init_method"] is None:
|
|
659
|
+
optimization_kwargs["init_method"] = "None"
|
|
660
|
+
|
|
661
|
+
if optimization_kwargs.get("init_method", "None") != "None":
|
|
659
662
|
init_method = optimization_kwargs["init_method"]
|
|
660
663
|
moments = [f"M{order}" for order in init_method.replace("M", "")] + ["M1"]
|
|
661
664
|
else:
|
|
@@ -839,7 +842,7 @@ def run_l2m_station(
|
|
|
839
842
|
for temporal_resolution in temporal_resolutions:
|
|
840
843
|
|
|
841
844
|
# Retrieve accumulation_interval and rolling option
|
|
842
|
-
accumulation_interval, rolling =
|
|
845
|
+
accumulation_interval, rolling = get_sampling_information(temporal_resolution)
|
|
843
846
|
|
|
844
847
|
# ------------------------------------------------------------------.
|
|
845
848
|
# Avoid generation of rolling products for source sample interval !
|
|
@@ -898,6 +901,7 @@ def run_l2m_station(
|
|
|
898
901
|
# -----------------------------------------------------------------.
|
|
899
902
|
# Loop over distributions to fit
|
|
900
903
|
# model_name = "GAMMA_ML"
|
|
904
|
+
# model_name = "LOGNORMAL_GS_ND_MAE"
|
|
901
905
|
# model_options = l2m_options["models"][model_name]
|
|
902
906
|
# Retrieve list of models to fit
|
|
903
907
|
models = global_product_options.pop("models")
|
|
@@ -924,23 +928,31 @@ def run_l2m_station(
|
|
|
924
928
|
|
|
925
929
|
# -------------------------------------------------------------.
|
|
926
930
|
# Create product directory
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
931
|
+
try:
|
|
932
|
+
data_dir = create_product_directory(
|
|
933
|
+
# DISDRODB root directories
|
|
934
|
+
data_archive_dir=data_archive_dir,
|
|
935
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
936
|
+
# Station arguments
|
|
937
|
+
data_source=data_source,
|
|
938
|
+
campaign_name=campaign_name,
|
|
939
|
+
station_name=station_name,
|
|
940
|
+
# Processing options
|
|
941
|
+
product=product,
|
|
942
|
+
force=force,
|
|
943
|
+
# Option for L2E
|
|
944
|
+
sample_interval=accumulation_interval,
|
|
945
|
+
rolling=rolling,
|
|
946
|
+
# Option for L2M
|
|
947
|
+
model_name=model_name,
|
|
948
|
+
)
|
|
949
|
+
except Exception:
|
|
950
|
+
msg = (
|
|
951
|
+
f"Production of L2M_{model_name} for sample interval {accumulation_interval} s has been "
|
|
952
|
+
+ "skipped because the product already exists and force=False."
|
|
953
|
+
)
|
|
954
|
+
log_info(logger=logger, msg=msg, verbose=verbose)
|
|
955
|
+
continue
|
|
944
956
|
|
|
945
957
|
# Define logs directory
|
|
946
958
|
logs_dir = create_logs_directory(
|
|
@@ -1005,7 +1017,7 @@ def run_l2m_station(
|
|
|
1005
1017
|
data_archive_dir=data_archive_dir,
|
|
1006
1018
|
# Product options
|
|
1007
1019
|
model_name=model_name,
|
|
1008
|
-
sample_interval=
|
|
1020
|
+
sample_interval=accumulation_interval,
|
|
1009
1021
|
rolling=rolling,
|
|
1010
1022
|
# Logs list
|
|
1011
1023
|
list_logs=list_logs,
|
disdrodb/routines/wrappers.py
CHANGED
|
@@ -643,6 +643,7 @@ def create_summary_station(
|
|
|
643
643
|
campaign_name,
|
|
644
644
|
station_name,
|
|
645
645
|
parallel=False,
|
|
646
|
+
temporal_resolution="1MIN",
|
|
646
647
|
data_archive_dir=None,
|
|
647
648
|
):
|
|
648
649
|
"""Create summary figures and tables for a DISDRODB station."""
|
|
@@ -658,6 +659,8 @@ def create_summary_station(
|
|
|
658
659
|
str(data_archive_dir),
|
|
659
660
|
"--parallel",
|
|
660
661
|
str(parallel),
|
|
662
|
+
"--temporal_resolution",
|
|
663
|
+
str(temporal_resolution),
|
|
661
664
|
],
|
|
662
665
|
)
|
|
663
666
|
# Execute command
|
|
@@ -1440,6 +1443,7 @@ def create_summary(
|
|
|
1440
1443
|
campaign_names=None,
|
|
1441
1444
|
station_names=None,
|
|
1442
1445
|
parallel=False,
|
|
1446
|
+
temporal_resolution="1MIN",
|
|
1443
1447
|
data_archive_dir=None,
|
|
1444
1448
|
metadata_archive_dir=None,
|
|
1445
1449
|
):
|
|
@@ -1493,6 +1497,7 @@ def create_summary(
|
|
|
1493
1497
|
station_name=station_name,
|
|
1494
1498
|
# Processing option
|
|
1495
1499
|
parallel=parallel,
|
|
1500
|
+
temporal_resolution=temporal_resolution,
|
|
1496
1501
|
)
|
|
1497
1502
|
print("Creation of station summaries has terminated.")
|
|
1498
1503
|
|
|
@@ -20,90 +20,6 @@ import numpy as np
|
|
|
20
20
|
import xarray as xr
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
def available_axis_ratio_models():
|
|
24
|
-
"""Return a list of the available drop axis ratio models."""
|
|
25
|
-
return list(AXIS_RATIO_MODELS)
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def get_axis_ratio_model(model):
|
|
29
|
-
"""Return the specified drop axis ratio model.
|
|
30
|
-
|
|
31
|
-
Parameters
|
|
32
|
-
----------
|
|
33
|
-
model : str
|
|
34
|
-
The model to use for calculating the axis ratio. Available models are:
|
|
35
|
-
'Thurai2005', 'Thurai2007', 'Battaglia2010', 'Brandes2002',
|
|
36
|
-
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
37
|
-
|
|
38
|
-
Returns
|
|
39
|
-
-------
|
|
40
|
-
callable
|
|
41
|
-
A function which compute the vertical-to-horizontal axis ratio given a
|
|
42
|
-
particle diameter in mm.
|
|
43
|
-
|
|
44
|
-
Notes
|
|
45
|
-
-----
|
|
46
|
-
This function serves as a wrapper to various axis ratio models for raindrops.
|
|
47
|
-
It returns the appropriate model based on the `model` parameter.
|
|
48
|
-
|
|
49
|
-
Please note that the axis ratio function to be provided to pyTmatrix expects to
|
|
50
|
-
return a horizontal-to-vertical axis ratio !
|
|
51
|
-
|
|
52
|
-
"""
|
|
53
|
-
model = check_axis_ratio_model(model)
|
|
54
|
-
return AXIS_RATIO_MODELS[model]
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def check_axis_ratio_model(model):
|
|
58
|
-
"""Check validity of the specified drop axis ratio model."""
|
|
59
|
-
available_models = available_axis_ratio_models()
|
|
60
|
-
if model not in available_models:
|
|
61
|
-
raise ValueError(f"{model} is an invalid axis-ratio model. Valid models: {available_models}.")
|
|
62
|
-
return model
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def get_axis_ratio(diameter, model):
|
|
66
|
-
"""
|
|
67
|
-
Compute the axis ratio of raindrops using the specified model.
|
|
68
|
-
|
|
69
|
-
Parameters
|
|
70
|
-
----------
|
|
71
|
-
diameter : array-like
|
|
72
|
-
Raindrops diameter in mm.
|
|
73
|
-
model : str
|
|
74
|
-
The axis ratio model to use for calculating the axis ratio. Available models are:
|
|
75
|
-
'Thurai2005', 'Thurai2007', 'Battaglia2010', 'Brandes2002',
|
|
76
|
-
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
77
|
-
|
|
78
|
-
Returns
|
|
79
|
-
-------
|
|
80
|
-
axis_ratio : array-like
|
|
81
|
-
The vertical-to-horizontal drop axis ratio corresponding to the input diameters.
|
|
82
|
-
Values of 1 indicate spherical particles, while values <1 indicate oblate particles.
|
|
83
|
-
Values >1 means prolate particles.
|
|
84
|
-
|
|
85
|
-
Notes
|
|
86
|
-
-----
|
|
87
|
-
This function serves as a wrapper to various axis ratio models for raindrops.
|
|
88
|
-
It selects and applies the appropriate model based on the `model` parameter.
|
|
89
|
-
|
|
90
|
-
Examples
|
|
91
|
-
--------
|
|
92
|
-
>>> diameter = np.array([0.5, 1.0, 2.0, 3.0])
|
|
93
|
-
>>> axis_ratio = get_axis_ratio(diameter, model="Brandes2002")
|
|
94
|
-
|
|
95
|
-
"""
|
|
96
|
-
# Retrieve axis ratio function
|
|
97
|
-
axis_ratio_func = get_axis_ratio_model(model)
|
|
98
|
-
|
|
99
|
-
# Retrieve axis ratio
|
|
100
|
-
axis_ratio = axis_ratio_func(diameter)
|
|
101
|
-
|
|
102
|
-
# Clip values between 0 and 1
|
|
103
|
-
axis_ratio = np.clip(axis_ratio, 0, 1)
|
|
104
|
-
return axis_ratio
|
|
105
|
-
|
|
106
|
-
|
|
107
23
|
def get_axis_ratio_andsager_1999(diameter):
|
|
108
24
|
"""
|
|
109
25
|
Compute the axis ratio of raindrops using the Andsager et al. (1999) model.
|
|
@@ -366,3 +282,93 @@ AXIS_RATIO_MODELS = {
|
|
|
366
282
|
"Beard1987": get_axis_ratio_beard_1987,
|
|
367
283
|
"Andsager1999": get_axis_ratio_andsager_1999,
|
|
368
284
|
}
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def available_axis_ratio_models():
|
|
288
|
+
"""Return a list of the available drop axis ratio models."""
|
|
289
|
+
return list(AXIS_RATIO_MODELS)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def check_axis_ratio_model(model):
|
|
293
|
+
"""Check validity of the specified drop axis ratio model."""
|
|
294
|
+
available_models = available_axis_ratio_models()
|
|
295
|
+
if model not in available_models:
|
|
296
|
+
raise ValueError(f"{model} is an invalid axis-ratio model. Valid models: {available_models}.")
|
|
297
|
+
return model
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def get_axis_ratio_model(model):
|
|
301
|
+
"""Return the specified drop axis ratio model.
|
|
302
|
+
|
|
303
|
+
Parameters
|
|
304
|
+
----------
|
|
305
|
+
model : str
|
|
306
|
+
The model to use for calculating the axis ratio. Available models are:
|
|
307
|
+
'Thurai2005', 'Thurai2007', 'Battaglia2010', 'Brandes2002',
|
|
308
|
+
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
309
|
+
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
callable
|
|
313
|
+
A function which compute the vertical-to-horizontal axis ratio given a
|
|
314
|
+
particle diameter in mm.
|
|
315
|
+
|
|
316
|
+
Notes
|
|
317
|
+
-----
|
|
318
|
+
This function serves as a wrapper to various axis ratio models for raindrops.
|
|
319
|
+
It returns the appropriate model based on the `model` parameter.
|
|
320
|
+
|
|
321
|
+
Please note that the axis ratio function to be provided to pyTmatrix expects to
|
|
322
|
+
return a horizontal-to-vertical axis ratio !
|
|
323
|
+
|
|
324
|
+
"""
|
|
325
|
+
model = check_axis_ratio_model(model)
|
|
326
|
+
return AXIS_RATIO_MODELS[model]
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def get_axis_ratio(diameter, model):
|
|
330
|
+
"""
|
|
331
|
+
Compute the axis ratio of raindrops using the specified model.
|
|
332
|
+
|
|
333
|
+
Parameters
|
|
334
|
+
----------
|
|
335
|
+
diameter : array-like
|
|
336
|
+
Raindrops diameter in mm.
|
|
337
|
+
model : str
|
|
338
|
+
The axis ratio model to use for calculating the axis ratio. Available models are:
|
|
339
|
+
'Thurai2005', 'Thurai2007', 'Battaglia2010', 'Brandes2002',
|
|
340
|
+
'Pruppacher1970', 'Beard1987', 'Andsager1999'.
|
|
341
|
+
|
|
342
|
+
Returns
|
|
343
|
+
-------
|
|
344
|
+
axis_ratio : array-like
|
|
345
|
+
The vertical-to-horizontal drop axis ratio corresponding to the input diameters.
|
|
346
|
+
Values of 1 indicate spherical particles, while values <1 indicate oblate particles.
|
|
347
|
+
Values >1 means prolate particles.
|
|
348
|
+
|
|
349
|
+
Notes
|
|
350
|
+
-----
|
|
351
|
+
This function serves as a wrapper to various axis ratio models for raindrops.
|
|
352
|
+
It selects and applies the appropriate model based on the `model` parameter.
|
|
353
|
+
|
|
354
|
+
Examples
|
|
355
|
+
--------
|
|
356
|
+
>>> diameter = np.array([0.5, 1.0, 2.0, 3.0])
|
|
357
|
+
>>> axis_ratio = get_axis_ratio(diameter, model="Brandes2002")
|
|
358
|
+
|
|
359
|
+
"""
|
|
360
|
+
# Retrieve axis ratio function
|
|
361
|
+
axis_ratio_func = get_axis_ratio_model(model)
|
|
362
|
+
|
|
363
|
+
# Retrieve axis ratio
|
|
364
|
+
axis_ratio = axis_ratio_func(diameter)
|
|
365
|
+
|
|
366
|
+
# Clip values between 0 and 1
|
|
367
|
+
axis_ratio = np.clip(axis_ratio, 0, 1)
|
|
368
|
+
|
|
369
|
+
# Add attributes
|
|
370
|
+
if isinstance(axis_ratio, xr.DataArray):
|
|
371
|
+
axis_ratio.name = "axis_ratio"
|
|
372
|
+
axis_ratio.attrs["units"] = ""
|
|
373
|
+
axis_ratio.attrs["model"] = model
|
|
374
|
+
return axis_ratio
|
|
@@ -147,6 +147,12 @@ def get_refractive_index(temperature, frequency, permittivity_model):
|
|
|
147
147
|
|
|
148
148
|
# Retrieve refractive_index
|
|
149
149
|
refractive_index = func(temperature=temperature, frequency=frequency)
|
|
150
|
+
|
|
151
|
+
# Add attributes
|
|
152
|
+
if isinstance(refractive_index, xr.DataArray):
|
|
153
|
+
refractive_index.name = "refractive_index"
|
|
154
|
+
refractive_index.attrs["units"] = ""
|
|
155
|
+
refractive_index.attrs["model"] = permittivity_model
|
|
150
156
|
return refractive_index
|
|
151
157
|
|
|
152
158
|
|
disdrodb/summary/routines.py
CHANGED
|
@@ -43,6 +43,7 @@ from disdrodb.utils.manipulations import (
|
|
|
43
43
|
resample_drop_number_concentration,
|
|
44
44
|
unstack_radar_variables,
|
|
45
45
|
)
|
|
46
|
+
from disdrodb.utils.time import get_sampling_information
|
|
46
47
|
from disdrodb.utils.warnings import suppress_warnings
|
|
47
48
|
from disdrodb.utils.yaml import write_yaml
|
|
48
49
|
from disdrodb.viz import compute_dense_lines, max_blend_images, to_rgba
|
|
@@ -247,8 +248,9 @@ def create_table_dsd_summary(df):
|
|
|
247
248
|
df_stats["SKEWNESS"] = df_subset.skew()
|
|
248
249
|
df_stats["KURTOSIS"] = df_subset.kurt()
|
|
249
250
|
|
|
250
|
-
# Round
|
|
251
|
-
|
|
251
|
+
# Round float columns to nearest integer, leave ints unchanged
|
|
252
|
+
float_cols = df_stats.select_dtypes(include=["float"]).columns
|
|
253
|
+
df_stats[float_cols] = df_stats[float_cols].astype(float).round(decimals=2)
|
|
252
254
|
return df_stats
|
|
253
255
|
|
|
254
256
|
|
|
@@ -327,15 +329,19 @@ def create_table_events_summary(df):
|
|
|
327
329
|
events_stats.append(event_stats)
|
|
328
330
|
|
|
329
331
|
df_events = pd.DataFrame.from_records(events_stats)
|
|
332
|
+
|
|
333
|
+
# Round float columns to nearest integer, leave ints unchanged
|
|
334
|
+
float_cols = df_events.select_dtypes(include=["float"]).columns
|
|
335
|
+
df_events[float_cols] = df_events[float_cols].astype(float).round(decimals=2)
|
|
330
336
|
return df_events
|
|
331
337
|
|
|
332
338
|
|
|
333
339
|
def prepare_latex_table_dsd_summary(df):
|
|
334
340
|
"""Prepare a DataFrame with DSD statistics for LaTeX table output."""
|
|
335
341
|
df = df.copy()
|
|
336
|
-
#
|
|
337
|
-
|
|
338
|
-
df[
|
|
342
|
+
# Cast numeric columns to string
|
|
343
|
+
numeric_cols = df.select_dtypes(include=["float", "int"]).columns
|
|
344
|
+
df[numeric_cols] = df[numeric_cols].astype(str)
|
|
339
345
|
# Rename
|
|
340
346
|
rename_dict = {
|
|
341
347
|
"W": r"$W\,[\mathrm{g}\,\mathrm{m}^{-3}]$", # [g/m3]
|
|
@@ -360,9 +366,9 @@ def prepare_latex_table_events_summary(df):
|
|
|
360
366
|
# Round datetime to minutes
|
|
361
367
|
df["start_time"] = df["start_time"].dt.strftime("%Y-%m-%d %H:%M")
|
|
362
368
|
df["end_time"] = df["end_time"].dt.strftime("%Y-%m-%d %H:%M")
|
|
363
|
-
#
|
|
364
|
-
|
|
365
|
-
df[
|
|
369
|
+
# Cast numeric columns to string
|
|
370
|
+
numeric_cols = df.select_dtypes(include=["float", "int"]).columns
|
|
371
|
+
df[numeric_cols] = df[numeric_cols].astype(str)
|
|
366
372
|
# Rename
|
|
367
373
|
rename_dict = {
|
|
368
374
|
"start_time": r"Start",
|
|
@@ -688,6 +694,13 @@ def plot_raw_and_filtered_spectrums(
|
|
|
688
694
|
cmap = plt.get_cmap("Spectral_r").copy()
|
|
689
695
|
cmap.set_under("none")
|
|
690
696
|
|
|
697
|
+
if "time" in drop_number.dims:
|
|
698
|
+
drop_number = drop_number.sum(dim="time")
|
|
699
|
+
if "time" in raw_drop_number.dims:
|
|
700
|
+
raw_drop_number = raw_drop_number.sum(dim="time")
|
|
701
|
+
if "time" in theoretical_average_velocity.dims:
|
|
702
|
+
theoretical_average_velocity = theoretical_average_velocity.mean(dim="time")
|
|
703
|
+
|
|
691
704
|
if norm is None:
|
|
692
705
|
norm = LogNorm(1, None)
|
|
693
706
|
|
|
@@ -3729,8 +3742,9 @@ def define_filename(prefix, extension, data_source, campaign_name, station_name)
|
|
|
3729
3742
|
|
|
3730
3743
|
def create_l2_dataframe(ds):
|
|
3731
3744
|
"""Create pandas Dataframe for L2 analysis."""
|
|
3745
|
+
dims_to_drop = set(ds.dims).intersection({DIAMETER_DIMENSION, VELOCITY_DIMENSION})
|
|
3732
3746
|
# - Drop array variables and convert to pandas
|
|
3733
|
-
df = ds.drop_dims(
|
|
3747
|
+
df = ds.drop_dims(dims_to_drop).to_pandas()
|
|
3734
3748
|
# - Drop coordinates
|
|
3735
3749
|
coords_to_drop = ["velocity_method", "sample_interval", *RADAR_OPTIONS]
|
|
3736
3750
|
df = df.drop(columns=coords_to_drop, errors="ignore")
|
|
@@ -3759,7 +3773,11 @@ def prepare_summary_dataset(ds, velocity_method="fall_velocity", source="drop_nu
|
|
|
3759
3773
|
|
|
3760
3774
|
# Select only timesteps with R > 0
|
|
3761
3775
|
# - We save R with 2 decimals accuracy ... so 0.01 is the smallest value
|
|
3762
|
-
|
|
3776
|
+
if "Rm" in ds: # in L2E
|
|
3777
|
+
rainy_timesteps = np.logical_and(ds["Rm"].compute() >= 0.01, ds["R"].compute() >= 0.01)
|
|
3778
|
+
else: # L2M without Rm
|
|
3779
|
+
rainy_timesteps = ds["R"].compute() >= 0.01
|
|
3780
|
+
|
|
3763
3781
|
ds = ds.isel(time=rainy_timesteps)
|
|
3764
3782
|
return ds
|
|
3765
3783
|
|
|
@@ -3776,10 +3794,13 @@ def generate_station_summary(ds, summary_dir_path, data_source, campaign_name, s
|
|
|
3776
3794
|
# Ensure all data are in memory
|
|
3777
3795
|
ds = ds.compute()
|
|
3778
3796
|
|
|
3797
|
+
# Keep only timesteps with at least 3 Nbins to remove noise
|
|
3798
|
+
valid_idx = np.where(ds["Nbins"] >= 3)[0]
|
|
3799
|
+
ds = ds.isel(time=valid_idx)
|
|
3800
|
+
|
|
3779
3801
|
####---------------------------------------------------------------------.
|
|
3780
3802
|
#### Create drop spectrum figures and statistics
|
|
3781
3803
|
# Compute sum of raw and filtered spectrum over time
|
|
3782
|
-
|
|
3783
3804
|
raw_drop_number = ds["raw_drop_number"].sum(dim="time")
|
|
3784
3805
|
drop_number = ds["drop_number"].sum(dim="time")
|
|
3785
3806
|
|
|
@@ -4153,6 +4174,7 @@ def create_station_summary(
|
|
|
4153
4174
|
station_name,
|
|
4154
4175
|
parallel=False,
|
|
4155
4176
|
data_archive_dir=None,
|
|
4177
|
+
temporal_resolution="1MIN",
|
|
4156
4178
|
):
|
|
4157
4179
|
"""Create summary figures and tables for a DISDRODB station."""
|
|
4158
4180
|
# Print processing info
|
|
@@ -4169,6 +4191,10 @@ def create_station_summary(
|
|
|
4169
4191
|
)
|
|
4170
4192
|
os.makedirs(summary_dir_path, exist_ok=True)
|
|
4171
4193
|
|
|
4194
|
+
# Define product_kwargs
|
|
4195
|
+
sample_interval, rolling = get_sampling_information(temporal_resolution)
|
|
4196
|
+
product_kwargs = {"rolling": rolling, "sample_interval": sample_interval}
|
|
4197
|
+
|
|
4172
4198
|
# Load L2E 1MIN dataset
|
|
4173
4199
|
ds = disdrodb.open_dataset(
|
|
4174
4200
|
data_archive_dir=data_archive_dir,
|
|
@@ -4176,7 +4202,7 @@ def create_station_summary(
|
|
|
4176
4202
|
campaign_name=campaign_name,
|
|
4177
4203
|
station_name=station_name,
|
|
4178
4204
|
product="L2E",
|
|
4179
|
-
product_kwargs=
|
|
4205
|
+
product_kwargs=product_kwargs,
|
|
4180
4206
|
parallel=parallel,
|
|
4181
4207
|
chunks=-1,
|
|
4182
4208
|
compute=True,
|
disdrodb/utils/attrs.py
CHANGED
|
@@ -95,6 +95,8 @@ def update_disdrodb_attrs(ds, product: str):
|
|
|
95
95
|
# ----------------------------------------------
|
|
96
96
|
# Add time_coverage_start and time_coverage_end
|
|
97
97
|
if "time" in ds.dims:
|
|
98
|
+
ds["time"] = ds["time"].dt.floor("s") # ensure no sub-second values
|
|
99
|
+
ds["time"] = ds["time"].astype("datetime64[s]")
|
|
98
100
|
attrs["time_coverage_start"] = str(ds["time"].data[0])
|
|
99
101
|
attrs["time_coverage_end"] = str(ds["time"].data[-1])
|
|
100
102
|
|
disdrodb/utils/encoding.py
CHANGED
|
@@ -19,6 +19,7 @@
|
|
|
19
19
|
"""DISDRODB netCDF4 encoding utilities."""
|
|
20
20
|
import os
|
|
21
21
|
|
|
22
|
+
import numpy as np
|
|
22
23
|
import xarray as xr
|
|
23
24
|
|
|
24
25
|
from disdrodb.utils.yaml import read_yaml
|
|
@@ -66,6 +67,8 @@ def set_encodings(ds: xr.Dataset, encodings_dict: dict) -> xr.Dataset:
|
|
|
66
67
|
|
|
67
68
|
# Set time encoding
|
|
68
69
|
if "time" in ds:
|
|
70
|
+
ds["time"] = ds["time"].dt.floor("s") # ensure no sub-second values
|
|
71
|
+
ds["time"] = ds["time"].astype("datetime64[s]")
|
|
69
72
|
ds["time"].encoding.update(get_time_encoding())
|
|
70
73
|
|
|
71
74
|
# Set the variable encodings
|
|
@@ -140,6 +143,8 @@ def get_time_encoding() -> dict:
|
|
|
140
143
|
Time encoding.
|
|
141
144
|
"""
|
|
142
145
|
encoding = {}
|
|
146
|
+
encoding["dtype"] = "int64" # if float trailing sub-seconds values
|
|
147
|
+
encoding["fillvalue"] = np.iinfo(np.int64).max
|
|
143
148
|
encoding["units"] = EPOCH
|
|
144
149
|
encoding["calendar"] = "proleptic_gregorian"
|
|
145
150
|
return encoding
|
disdrodb/utils/time.py
CHANGED
|
@@ -62,7 +62,7 @@ def seconds_to_temporal_resolution(seconds):
|
|
|
62
62
|
return temporal_resolution
|
|
63
63
|
|
|
64
64
|
|
|
65
|
-
def
|
|
65
|
+
def get_sampling_information(temporal_resolution):
|
|
66
66
|
"""
|
|
67
67
|
Extract resampling information from the temporal_resolution string.
|
|
68
68
|
|
|
@@ -127,7 +127,7 @@ def temporal_resolution_to_seconds(temporal_resolution):
|
|
|
127
127
|
seconds
|
|
128
128
|
Duration in seconds.
|
|
129
129
|
"""
|
|
130
|
-
seconds, _ =
|
|
130
|
+
seconds, _ = get_sampling_information(temporal_resolution)
|
|
131
131
|
return seconds
|
|
132
132
|
|
|
133
133
|
|
disdrodb/viz/plots.py
CHANGED
|
@@ -22,13 +22,36 @@ import xarray as xr
|
|
|
22
22
|
from matplotlib.colors import LogNorm, Normalize
|
|
23
23
|
|
|
24
24
|
|
|
25
|
+
def _single_plot_nd_distribution(drop_number_concentration, diameter, diameter_bin_width):
|
|
26
|
+
fig, ax = plt.subplots(1, 1)
|
|
27
|
+
ax.bar(
|
|
28
|
+
diameter,
|
|
29
|
+
drop_number_concentration,
|
|
30
|
+
width=diameter_bin_width,
|
|
31
|
+
edgecolor="darkgray",
|
|
32
|
+
color="lightgray",
|
|
33
|
+
label="Data",
|
|
34
|
+
)
|
|
35
|
+
ax.set_title("Drop number concentration (N(D))")
|
|
36
|
+
ax.set_xlabel("Drop diameter (mm)")
|
|
37
|
+
ax.set_ylabel("N(D) [m-3 mm-1]")
|
|
38
|
+
return ax
|
|
39
|
+
|
|
40
|
+
|
|
25
41
|
def plot_nd(ds, var="drop_number_concentration", cmap=None, norm=None):
|
|
26
42
|
"""Plot drop number concentration N(D) timeseries."""
|
|
27
43
|
# Check inputs
|
|
28
44
|
if var not in ds:
|
|
29
45
|
raise ValueError(f"{var} is not a xarray Dataset variable!")
|
|
46
|
+
|
|
30
47
|
# Check only time and diameter dimensions are specified
|
|
31
|
-
|
|
48
|
+
if "time" not in ds.dims:
|
|
49
|
+
ax = _single_plot_nd_distribution(
|
|
50
|
+
drop_number_concentration=ds[var],
|
|
51
|
+
diameter=ds["diameter_bin_center"],
|
|
52
|
+
diameter_bin_width=ds["diameter_bin_width"],
|
|
53
|
+
)
|
|
54
|
+
return ax
|
|
32
55
|
|
|
33
56
|
# Select N(D)
|
|
34
57
|
ds_var = ds[[var]].compute()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: disdrodb
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.5
|
|
4
4
|
Summary: disdrodb provides tools to download, standardize, share and analyze global disdrometer data.
|
|
5
5
|
Author: Gionata Ghiggi
|
|
6
6
|
Project-URL: homepage, https://github.com/ltelab/disdrodb
|
|
@@ -33,6 +33,7 @@ Requires-Dist: numpy
|
|
|
33
33
|
Requires-Dist: scipy
|
|
34
34
|
Requires-Dist: dask[distributed]
|
|
35
35
|
Requires-Dist: xarray
|
|
36
|
+
Requires-Dist: bottleneck
|
|
36
37
|
Requires-Dist: matplotlib
|
|
37
38
|
Provides-Extra: dev
|
|
38
39
|
Requires-Dist: jupyter; extra == "dev"
|