disdrodb 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. disdrodb/__init__.py +4 -0
  2. disdrodb/_version.py +2 -2
  3. disdrodb/api/checks.py +70 -47
  4. disdrodb/api/configs.py +0 -2
  5. disdrodb/api/create_directories.py +0 -2
  6. disdrodb/api/info.py +3 -3
  7. disdrodb/api/io.py +48 -8
  8. disdrodb/api/path.py +116 -133
  9. disdrodb/api/search.py +12 -3
  10. disdrodb/cli/disdrodb_create_summary.py +113 -0
  11. disdrodb/cli/disdrodb_create_summary_station.py +11 -1
  12. disdrodb/cli/disdrodb_run_l0a_station.py +1 -1
  13. disdrodb/cli/disdrodb_run_l0b_station.py +2 -2
  14. disdrodb/cli/disdrodb_run_l0c_station.py +2 -2
  15. disdrodb/cli/disdrodb_run_l1_station.py +2 -2
  16. disdrodb/cli/disdrodb_run_l2e_station.py +2 -2
  17. disdrodb/cli/disdrodb_run_l2m_station.py +2 -2
  18. disdrodb/constants.py +1 -1
  19. disdrodb/data_transfer/download_data.py +123 -7
  20. disdrodb/etc/products/L1/global.yaml +1 -1
  21. disdrodb/etc/products/L2E/5MIN.yaml +1 -0
  22. disdrodb/etc/products/L2E/global.yaml +1 -1
  23. disdrodb/etc/products/L2M/GAMMA_GS_ND_MAE.yaml +6 -0
  24. disdrodb/etc/products/L2M/GAMMA_ML.yaml +1 -1
  25. disdrodb/etc/products/L2M/LOGNORMAL_GS_LOG_ND_MAE.yaml +6 -0
  26. disdrodb/etc/products/L2M/LOGNORMAL_GS_ND_MAE.yaml +6 -0
  27. disdrodb/etc/products/L2M/LOGNORMAL_ML.yaml +8 -0
  28. disdrodb/etc/products/L2M/global.yaml +11 -3
  29. disdrodb/issue/writer.py +2 -0
  30. disdrodb/l0/check_configs.py +49 -16
  31. disdrodb/l0/configs/LPM/l0a_encodings.yml +2 -2
  32. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +2 -2
  33. disdrodb/l0/configs/LPM/l0b_encodings.yml +2 -2
  34. disdrodb/l0/configs/LPM/raw_data_format.yml +2 -2
  35. disdrodb/l0/configs/PWS100/l0b_encodings.yml +1 -0
  36. disdrodb/l0/configs/SWS250/bins_diameter.yml +108 -0
  37. disdrodb/l0/configs/SWS250/bins_velocity.yml +83 -0
  38. disdrodb/l0/configs/SWS250/l0a_encodings.yml +18 -0
  39. disdrodb/l0/configs/SWS250/l0b_cf_attrs.yml +72 -0
  40. disdrodb/l0/configs/SWS250/l0b_encodings.yml +155 -0
  41. disdrodb/l0/configs/SWS250/raw_data_format.yml +148 -0
  42. disdrodb/l0/l0a_processing.py +10 -5
  43. disdrodb/l0/l0b_nc_processing.py +10 -6
  44. disdrodb/l0/l0b_processing.py +92 -72
  45. disdrodb/l0/l0c_processing.py +369 -251
  46. disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +8 -1
  47. disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +2 -2
  48. disdrodb/l0/readers/LPM/BELGIUM/ULIEGE.py +256 -0
  49. disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +2 -2
  50. disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +2 -2
  51. disdrodb/l0/readers/LPM/GERMANY/DWD.py +491 -0
  52. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +2 -2
  53. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
  54. disdrodb/l0/readers/LPM/KIT/CHWALA.py +2 -2
  55. disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +107 -12
  56. disdrodb/l0/readers/LPM/SLOVENIA/UL.py +3 -3
  57. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +2 -2
  58. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +5 -14
  59. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +5 -14
  60. disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL.py +117 -8
  61. disdrodb/l0/readers/PARSIVEL2/ARM/ARM_PARSIVEL2.py +4 -0
  62. disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +10 -14
  63. disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +10 -14
  64. disdrodb/l0/readers/PARSIVEL2/CANADA/UQAM_NC.py +69 -0
  65. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +8 -14
  66. disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +382 -0
  67. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +4 -0
  68. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +1 -1
  69. disdrodb/l0/readers/PARSIVEL2/GREECE/NOA.py +127 -0
  70. disdrodb/l0/readers/PARSIVEL2/ITALY/HYDROX.py +239 -0
  71. disdrodb/l0/readers/PARSIVEL2/MPI/BCO_PARSIVEL2.py +136 -0
  72. disdrodb/l0/readers/PARSIVEL2/MPI/BOWTIE.py +220 -0
  73. disdrodb/l0/readers/PARSIVEL2/NASA/LPVEX.py +109 -0
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +5 -11
  75. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +4 -17
  76. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +5 -14
  77. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +10 -13
  78. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +10 -13
  79. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +3 -0
  80. disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/PANGASA.py +232 -0
  81. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +6 -18
  82. disdrodb/l0/readers/PARSIVEL2/SPAIN/GRANADA.py +120 -0
  83. disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +7 -25
  84. disdrodb/l0/readers/PWS100/AUSTRIA/HOAL.py +321 -0
  85. disdrodb/l0/readers/SW250/BELGIUM/KMI.py +239 -0
  86. disdrodb/l1/beard_model.py +31 -129
  87. disdrodb/l1/fall_velocity.py +156 -57
  88. disdrodb/l1/filters.py +25 -28
  89. disdrodb/l1/processing.py +12 -14
  90. disdrodb/l1_env/routines.py +46 -17
  91. disdrodb/l2/empirical_dsd.py +6 -0
  92. disdrodb/l2/processing.py +3 -3
  93. disdrodb/metadata/checks.py +132 -125
  94. disdrodb/metadata/geolocation.py +0 -2
  95. disdrodb/psd/fitting.py +180 -210
  96. disdrodb/psd/models.py +1 -1
  97. disdrodb/routines/__init__.py +54 -0
  98. disdrodb/{l0/routines.py → routines/l0.py} +288 -418
  99. disdrodb/{l1/routines.py → routines/l1.py} +60 -92
  100. disdrodb/{l2/routines.py → routines/l2.py} +284 -485
  101. disdrodb/{routines.py → routines/wrappers.py} +100 -7
  102. disdrodb/scattering/axis_ratio.py +95 -85
  103. disdrodb/scattering/permittivity.py +24 -0
  104. disdrodb/scattering/routines.py +56 -36
  105. disdrodb/summary/routines.py +147 -45
  106. disdrodb/utils/archiving.py +434 -0
  107. disdrodb/utils/attrs.py +2 -0
  108. disdrodb/utils/cli.py +5 -5
  109. disdrodb/utils/dask.py +62 -1
  110. disdrodb/utils/decorators.py +31 -0
  111. disdrodb/utils/encoding.py +10 -1
  112. disdrodb/{l2 → utils}/event.py +1 -66
  113. disdrodb/utils/logger.py +1 -1
  114. disdrodb/utils/manipulations.py +22 -12
  115. disdrodb/utils/routines.py +166 -0
  116. disdrodb/utils/time.py +5 -293
  117. disdrodb/utils/xarray.py +3 -0
  118. disdrodb/viz/plots.py +109 -15
  119. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/METADATA +3 -2
  120. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/RECORD +124 -96
  121. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/entry_points.txt +1 -0
  122. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/WHEEL +0 -0
  123. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/licenses/LICENSE +0 -0
  124. {disdrodb-0.1.3.dist-info → disdrodb-0.1.5.dist-info}/top_level.txt +0 -0
disdrodb/l1/processing.py CHANGED
@@ -19,8 +19,8 @@
19
19
  import xarray as xr
20
20
 
21
21
  from disdrodb.constants import DIAMETER_DIMENSION, VELOCITY_DIMENSION
22
- from disdrodb.l1.fall_velocity import get_raindrop_fall_velocity
23
- from disdrodb.l1.filters import define_spectrum_mask, filter_diameter_bins, filter_velocity_bins
22
+ from disdrodb.l1.fall_velocity import get_raindrop_fall_velocity_from_ds
23
+ from disdrodb.l1.filters import define_raindrop_spectrum_mask, filter_diameter_bins, filter_velocity_bins
24
24
  from disdrodb.l1.resampling import add_sample_interval
25
25
  from disdrodb.l1_env.routines import load_env_dataset
26
26
  from disdrodb.l2.empirical_dsd import ( # TODO: maybe move out of L2
@@ -34,7 +34,7 @@ from disdrodb.utils.writer import finalize_product
34
34
  def generate_l1(
35
35
  ds,
36
36
  # Fall velocity option
37
- fall_velocity_method="Beard1976",
37
+ fall_velocity_model="Beard1976",
38
38
  # Diameter-Velocity Filtering Options
39
39
  minimum_diameter=0,
40
40
  maximum_diameter=10,
@@ -54,7 +54,7 @@ def generate_l1(
54
54
  ----------
55
55
  ds : xarray.Dataset
56
56
  DISDRODB L0C dataset.
57
- fall_velocity_method : str, optional
57
+ fall_velocity_model : str, optional
58
58
  Method to compute fall velocity.
59
59
  The default method is ``"Beard1976"``.
60
60
  minimum_diameter : float, optional
@@ -83,7 +83,7 @@ def generate_l1(
83
83
  Returns
84
84
  -------
85
85
  xarray.Dataset
86
- DISRODB L1 dataset.
86
+ DISDRODB L1 dataset.
87
87
  """
88
88
  # Retrieve source attributes
89
89
  attrs = ds.attrs.copy()
@@ -106,7 +106,9 @@ def generate_l1(
106
106
 
107
107
  # ---------------------------------------------------------------------------
108
108
  # Retrieve ENV dataset or take defaults
109
- # --> Used only for Beard fall velocity currently !
109
+ # - Used only for Beard fall velocity currently !
110
+ # - It checks and includes default geolocation if missing
111
+ # - For mobile disdrometer, infill missing geolocation with backward and forward filling
110
112
  ds_env = load_env_dataset(ds)
111
113
 
112
114
  # ---------------------------------------------------------------------------
@@ -120,7 +122,7 @@ def generate_l1(
120
122
  ds_l1 = add_sample_interval(ds_l1, sample_interval=sample_interval)
121
123
 
122
124
  # Add L0C coordinates that might got lost
123
- if "time_qc" in ds_l1:
125
+ if "time_qc" in ds:
124
126
  ds_l1 = ds_l1.assign_coords({"time_qc": ds["time_qc"]})
125
127
 
126
128
  # -------------------------------------------------------------------------------------------
@@ -128,7 +130,7 @@ def generate_l1(
128
130
  if sensor_name in ["PARSIVEL", "PARSIVEL2"]:
129
131
  # - Remove first two bins because never reports data !
130
132
  # - If not removed, can alter e.g. L2M model fitting
131
- ds_l1 = filter_diameter_bins(ds=ds_l1, minimum_diameter=0.312) # it includes the 0.2495-0.3745 bin
133
+ ds_l1 = filter_diameter_bins(ds=ds_l1, minimum_diameter=0.2495) # it includes the 0.2495-0.3745 bin
132
134
 
133
135
  # - Filter diameter bins
134
136
  ds_l1 = filter_diameter_bins(ds=ds_l1, minimum_diameter=minimum_diameter, maximum_diameter=maximum_diameter)
@@ -138,16 +140,12 @@ def generate_l1(
138
140
 
139
141
  # -------------------------------------------------------------------------------------------
140
142
  # Compute fall velocity
141
- ds_l1["fall_velocity"] = get_raindrop_fall_velocity(
142
- diameter=ds_l1["diameter_bin_center"],
143
- method=fall_velocity_method,
144
- ds_env=ds_env, # mm
145
- )
143
+ ds_l1["fall_velocity"] = get_raindrop_fall_velocity_from_ds(ds=ds_l1, ds_env=ds_env, model=fall_velocity_model)
146
144
 
147
145
  # -------------------------------------------------------------------------------------------
148
146
  # Define filtering mask according to fall velocity
149
147
  if has_velocity_dimension:
150
- mask = define_spectrum_mask(
148
+ mask = define_raindrop_spectrum_mask(
151
149
  drop_number=ds_l1["raw_drop_number"],
152
150
  fall_velocity=ds_l1["fall_velocity"],
153
151
  above_velocity_fraction=above_velocity_fraction,
@@ -15,39 +15,68 @@
15
15
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
16
  # -----------------------------------------------------------------------------.
17
17
  """Core functions for DISDRODB ENV production."""
18
+ import numpy as np
18
19
  import xarray as xr
19
20
 
20
21
  from disdrodb.constants import GEOLOCATION_COORDS
22
+ from disdrodb.l0.l0b_processing import ensure_valid_geolocation
23
+ from disdrodb.utils.logger import log_warning
24
+
25
+ DEFAULT_GEOLOCATION = {
26
+ "latitude": 46.159346,
27
+ "longitude": 8.774586,
28
+ "altitude": 0,
29
+ }
21
30
 
22
31
 
23
32
  def get_default_environment_dataset():
24
33
  """Define defaults values for the ENV dataset."""
25
34
  ds_env = xr.Dataset()
26
- ds_env["sea_level_air_pressure"] = 101_325
27
- ds_env["gas_constant_dry_air"] = 287.04
28
- ds_env["lapse_rate"] = 0.0065
29
- ds_env["relative_humidity"] = 0.95 # Value between 0 and 1 !
30
- ds_env["temperature"] = 20 + 273.15
35
+ ds_env["sea_level_air_pressure"] = 101_325 # Pa
36
+ ds_env["gas_constant_dry_air"] = 287.04 # J kg⁻¹ K⁻¹
37
+ ds_env["lapse_rate"] = 0.0065 # K m⁻¹
38
+ ds_env["relative_humidity"] = 0.95 # 0-1 !
39
+ ds_env["temperature"] = 20 + 273.15 # K
40
+ ds_env["water_density"] = 1000 # kg m⁻³ (T == 10 --> 999.7, T == 20 --> 998.2)
41
+ # get_water_density(temperature=temperature, air_pressure=air_pressure
31
42
  return ds_env
32
43
 
33
44
 
34
- def _assign_geolocation(ds_src, dst_dst):
45
+ def _assign_geolocation(ds_src, dst_dst, logger=None):
46
+ dict_coords = {}
47
+ for coord in GEOLOCATION_COORDS:
48
+ if coord in ds_src:
49
+ # Check geolocation validity
50
+ ds_src = ensure_valid_geolocation(ds_src, coord=coord, errors="coerce")
51
+ # Assign valid geolocation (or default one if invalid)
52
+ if "time" not in ds_src[coord].dims:
53
+ dict_coords[coord] = ds_src[coord] if not np.isnan(ds_src[coord]) else DEFAULT_GEOLOCATION[coord]
54
+ else: # If coordinates varies over time, infill NaN over time with forward and backward filling
55
+ dict_coords[coord] = ds_src[coord].ffill(dim="time").bfill(dim="time")
56
+ else:
57
+ dict_coords[coord] = DEFAULT_GEOLOCATION[coord]
58
+ log_warning(
59
+ logger=logger,
60
+ msg=f"{coord} not available. Setting {coord}={DEFAULT_GEOLOCATION[coord]}",
61
+ verbose=False,
62
+ )
35
63
 
36
- dict_coords = {coord: ds_src[coord] for coord in GEOLOCATION_COORDS if coord in ds_src}
64
+ # Assign geolocation
37
65
  dst_dst = dst_dst.assign_coords(dict_coords)
38
66
  return dst_dst
39
67
 
40
68
 
41
- def load_env_dataset(ds):
69
+ def load_env_dataset(ds=None, logger=None):
42
70
  """Load the ENV dataset."""
43
- # TODO: Retrieve relative_humidity and temperature from L1-ENV
71
+ # TODO: Retrieve relative_humidity, lapse_rate and temperature from DISDRODB-ENV product
72
+
73
+ # Load default environment dataset
44
74
  ds_env = get_default_environment_dataset()
45
- # Compute water density
46
- # get_water_density(
47
- # temperature=temperature,
48
- # air_pressure=air_pressure,
49
- # )
50
- # --> (T == 10 --> 999.7, T == 20 --> 998.2
51
- ds_env["water_density"] = 1000 # kg / m3 # TODO as function of ENV (temperature, ...) ?
52
- ds_env = _assign_geolocation(ds_src=ds, dst_dst=ds_env)
75
+
76
+ # Assign geolocation if input dataset provided
77
+ if ds is not None:
78
+ ds_env = _assign_geolocation(ds_src=ds, dst_dst=ds_env, logger=logger)
79
+ # Otherwise add default geolocation
80
+ else:
81
+ ds_env = ds_env.assign_coords(DEFAULT_GEOLOCATION)
53
82
  return ds_env
@@ -236,6 +236,12 @@ def get_effective_sampling_area(sensor_name, diameter):
236
236
  if sensor_name == "RD80":
237
237
  sampling_area = 0.005 # m2
238
238
  return sampling_area
239
+ if sensor_name == "SWS250": # TODO: L * (B - diameter / 2) ?
240
+ # Table 29 of the manual that the sample volume is 400cm3, path length?
241
+ # Distance between the end of the hood heaters is 291 mm.
242
+ # Adding a factor of 1.5 for better representation of the Tx-Rx distance: L= 436 mm.
243
+ sampling_area = 0.0091 # m2
244
+ return sampling_area
239
245
  raise NotImplementedError(f"Effective sampling area for {sensor_name} must yet to be specified in the software.")
240
246
 
241
247
 
disdrodb/l2/processing.py CHANGED
@@ -219,7 +219,7 @@ def generate_l2e(
219
219
  Returns
220
220
  -------
221
221
  xarray.Dataset
222
- DISRODB L2E dataset.
222
+ DISDRODB L2E dataset.
223
223
  """
224
224
  # Check and prepapre input dataset
225
225
  ds = check_l2e_input_dataset(ds)
@@ -441,7 +441,7 @@ def generate_l2m(
441
441
  diameter_spacing=0.05,
442
442
  # Processing options
443
443
  ds_env=None,
444
- fall_velocity_method="Beard1976",
444
+ fall_velocity_model="Beard1976",
445
445
  # Filtering options
446
446
  minimum_ndrops=1,
447
447
  minimum_nbins=3,
@@ -548,7 +548,7 @@ def generate_l2m(
548
548
  drop_number_concentration = psd(diameter)
549
549
 
550
550
  # Retrieve fall velocity for each new diameter bin
551
- velocity = get_raindrop_fall_velocity(diameter=diameter, method=fall_velocity_method, ds_env=ds_env) # mm
551
+ velocity = get_raindrop_fall_velocity(diameter=diameter, model=fall_velocity_model, ds_env=ds_env) # mm
552
552
 
553
553
  # Compute integral parameters
554
554
  ds_params = compute_integral_parameters(
@@ -137,8 +137,6 @@ def _check_metadata_measurement_interval(metadata):
137
137
  """Check metadata ``measurement_interval``."""
138
138
  from disdrodb.api.checks import check_measurement_intervals
139
139
 
140
- if "measurement_interval" not in metadata:
141
- raise ValueError("The metadata file does not contain the 'measurement_interval' key.")
142
140
  measurement_intervals = metadata["measurement_interval"]
143
141
  _ = check_measurement_intervals(measurement_intervals)
144
142
 
@@ -150,28 +148,8 @@ def _check_metadata_sensor_name(metadata):
150
148
  check_sensor_name(sensor_name)
151
149
 
152
150
 
153
- def check_station_metadata(data_source, campaign_name, station_name, metadata_archive_dir=None):
154
- """Check DISDRODB metadata compliance."""
155
- from disdrodb.l0.l0_reader import check_metadata_reader
156
-
157
- metadata = read_station_metadata(
158
- data_source=data_source,
159
- campaign_name=campaign_name,
160
- station_name=station_name,
161
- metadata_archive_dir=metadata_archive_dir,
162
- )
163
- _check_metadata_keys(metadata)
164
- _check_metadata_values(metadata)
165
- _check_metadata_campaign_name(metadata, expected_name=campaign_name)
166
- _check_metadata_data_source(metadata, expected_name=data_source)
167
- _check_metadata_station_name(metadata, expected_name=station_name)
168
- _check_metadata_sensor_name(metadata)
169
- _check_metadata_measurement_interval(metadata)
170
- check_metadata_reader(metadata)
171
-
172
-
173
151
  #### --------------------------------------------------------------------------.
174
- #### Metadata Archive Missing Information
152
+ #### Geolocation Checks
175
153
 
176
154
 
177
155
  def _check_lonlat_type(longitude, latitude):
@@ -185,16 +163,18 @@ def _check_lonlat_type(longitude, latitude):
185
163
  raise ValueError("Unspecified longitude and latitude coordinates.")
186
164
 
187
165
 
188
- def _check_lonlat_validity(longitude, latitude):
166
+ def _check_lonlat_validity(longitude, latitude, raise_error_if_unknown=True):
189
167
  if longitude == -9999 or latitude == -9999:
190
- raise ValueError("Missing lat lon coordinates (-9999).")
168
+ if raise_error_if_unknown:
169
+ raise ValueError("Missing lat lon coordinates (-9999).")
170
+ return
191
171
  if longitude > 180 or longitude < -180:
192
172
  raise ValueError("Invalid longitude (outside [-180, 180])")
193
173
  if latitude > 90 or latitude < -90:
194
174
  raise ValueError("Invalid latitude (outside [-90, 90])")
195
175
 
196
176
 
197
- def check_station_metadata_geolocation(metadata) -> None:
177
+ def check_station_metadata_geolocation(metadata, raise_error_if_unknown=True) -> None:
198
178
  """Identify metadata with missing or wrong geolocation."""
199
179
  # Get longitude, latitude and platform type
200
180
  longitude = metadata.get("longitude")
@@ -209,54 +189,11 @@ def check_station_metadata_geolocation(metadata) -> None:
209
189
  raise ValueError("For mobile platform_type, specify latitude and longitude -9999")
210
190
  # - If fixed platform
211
191
  else:
212
- _check_lonlat_validity(longitude=longitude, latitude=latitude)
213
-
214
-
215
- def identify_missing_metadata_coords(metadata_filepaths: str) -> None:
216
- """Identify missing coordinates.
217
-
218
- Parameters
219
- ----------
220
- metadata_filepaths : str
221
- Input YAML file path.
222
-
223
- Raises
224
- ------
225
- TypeError
226
- Error if ``latitude`` or ``longitude`` coordinates are not present or are wrongly formatted.
227
-
228
- """
229
- for filepath in metadata_filepaths:
230
- metadata = read_yaml(filepath)
231
- check_station_metadata_geolocation(metadata)
232
-
233
-
234
- def identify_empty_metadata_keys(metadata_filepaths: list, keys: Union[str, list]) -> None:
235
- """Identify empty metadata keys.
236
-
237
- Parameters
238
- ----------
239
- metadata_filepaths : str
240
- Input YAML file path.
241
- keys : Union[str,list]
242
- Attributes to verify the presence.
243
- """
244
- if isinstance(keys, str):
245
- keys = [keys]
246
-
247
- for filepath in metadata_filepaths:
248
- for key in keys:
249
- metadata = read_yaml(filepath)
250
- if len(str(metadata.get(key, ""))) == 0: # ensure is string to avoid error
251
- print(f"Empty {key} at: ", filepath)
252
-
253
-
254
- #### --------------------------------------------------------------------------.
255
- #### Check Metadata Archive
192
+ _check_lonlat_validity(longitude=longitude, latitude=latitude, raise_error_if_unknown=raise_error_if_unknown)
256
193
 
257
194
 
258
- def check_metadata_archive_keys(metadata_archive_dir: Optional[str] = None) -> bool:
259
- """Check that all metadata files have valid keys.
195
+ def check_metadata_archive_geolocation(metadata_archive_dir: Optional[str] = None, raise_error_if_unknown=True):
196
+ """Check the metadata files have missing or wrong geolocation..
260
197
 
261
198
  Parameters
262
199
  ----------
@@ -293,17 +230,39 @@ def check_metadata_archive_keys(metadata_archive_dir: Optional[str] = None) -> b
293
230
  station_name=station_name,
294
231
  )
295
232
  try:
296
- _check_metadata_keys(metadata)
233
+ check_station_metadata_geolocation(metadata, raise_error_if_unknown=raise_error_if_unknown)
297
234
  except Exception as e:
298
- print(f"Error for {data_source} {campaign_name} {station_name}.")
299
- print(f"The error is: {e}.")
300
235
  is_valid = False
301
-
236
+ print(f"Missing information for {data_source} {campaign_name} {station_name}.")
237
+ print(f"The error is: {e}.")
302
238
  return is_valid
303
239
 
304
240
 
305
- def check_metadata_archive_campaign_name(metadata_archive_dir: Optional[str] = None) -> bool:
306
- """Check metadata ``campaign_name``.
241
+ ####------------------------------------------------------------------------------------------------------------.
242
+ #### Metadata Station and Archive Checking Routines
243
+ def check_station_metadata(data_source, campaign_name, station_name, metadata_archive_dir=None):
244
+ """Check DISDRODB metadata compliance."""
245
+ from disdrodb.l0.l0_reader import check_metadata_reader
246
+
247
+ metadata = read_station_metadata(
248
+ data_source=data_source,
249
+ campaign_name=campaign_name,
250
+ station_name=station_name,
251
+ metadata_archive_dir=metadata_archive_dir,
252
+ )
253
+ _check_metadata_keys(metadata)
254
+ _check_metadata_values(metadata)
255
+ _check_metadata_campaign_name(metadata, expected_name=campaign_name)
256
+ _check_metadata_data_source(metadata, expected_name=data_source)
257
+ _check_metadata_station_name(metadata, expected_name=station_name)
258
+ _check_metadata_sensor_name(metadata)
259
+ _check_metadata_measurement_interval(metadata)
260
+ check_station_metadata_geolocation(metadata, raise_error_if_unknown=False)
261
+ check_metadata_reader(metadata)
262
+
263
+
264
+ def check_metadata_archive(metadata_archive_dir: Optional[str] = None, raise_error=False):
265
+ """Check the archive metadata compliance.
307
266
 
308
267
  Parameters
309
268
  ----------
@@ -312,6 +271,9 @@ def check_metadata_archive_campaign_name(metadata_archive_dir: Optional[str] = N
312
271
  The directory path must end with ``<...>/DISDRODB``.
313
272
  If ``None``, it uses the ``metadata_archive_dir`` path specified
314
273
  in the DISDRODB active configuration.
274
+ raise_error: bool (optional)
275
+ Whether to raise an error and interrupt the archive check if a
276
+ metadata is not compliant. The default value is ``False``.
315
277
 
316
278
  Returns
317
279
  -------
@@ -332,24 +294,74 @@ def check_metadata_archive_campaign_name(metadata_archive_dir: Optional[str] = N
332
294
  data_source = infer_data_source_from_path(filepath)
333
295
  campaign_name = infer_campaign_name_from_path(filepath)
334
296
  station_name = os.path.basename(filepath).replace(".yml", "")
335
-
336
- metadata = read_station_metadata(
337
- metadata_archive_dir=metadata_archive_dir,
338
- data_source=data_source,
339
- campaign_name=campaign_name,
340
- station_name=station_name,
341
- )
297
+ # Check compliance
342
298
  try:
343
- _check_metadata_campaign_name(metadata, expected_name=campaign_name)
299
+ check_station_metadata(
300
+ metadata_archive_dir=metadata_archive_dir,
301
+ data_source=data_source,
302
+ campaign_name=campaign_name,
303
+ station_name=station_name,
304
+ )
344
305
  except Exception as e:
345
306
  is_valid = False
346
- print(f"Error for {data_source} {campaign_name} {station_name}.")
347
- print(f"The error is: {e}.")
307
+ msg = f"Error for {data_source} {campaign_name} {station_name}."
308
+ msg = msg + f"The error is: {e}."
309
+ if raise_error:
310
+ raise ValueError(msg)
311
+ print(msg)
312
+
348
313
  return is_valid
349
314
 
350
315
 
351
- def check_metadata_archive_data_source(metadata_archive_dir: Optional[str] = None) -> bool:
352
- """Check metadata ``data_source``.
316
+ ####-----------------------------------------------------------------------------------------------.
317
+ #### Utilities
318
+
319
+
320
+ def identify_missing_metadata_coords(metadata_filepaths: str) -> None:
321
+ """Identify missing coordinates.
322
+
323
+ Parameters
324
+ ----------
325
+ metadata_filepaths : str
326
+ Input YAML file path.
327
+
328
+ Raises
329
+ ------
330
+ TypeError
331
+ Error if ``latitude`` or ``longitude`` coordinates are not present or are wrongly formatted.
332
+
333
+ """
334
+ for filepath in metadata_filepaths:
335
+ metadata = read_yaml(filepath)
336
+ check_station_metadata_geolocation(metadata)
337
+
338
+
339
+ def identify_empty_metadata_keys(metadata_filepaths: list, keys: Union[str, list]) -> None:
340
+ """Identify empty metadata keys.
341
+
342
+ Parameters
343
+ ----------
344
+ metadata_filepaths : str
345
+ Input YAML file path.
346
+ keys : Union[str,list]
347
+ Attributes to verify the presence.
348
+ """
349
+ if isinstance(keys, str):
350
+ keys = [keys]
351
+
352
+ for filepath in metadata_filepaths:
353
+ for key in keys:
354
+ metadata = read_yaml(filepath)
355
+ if len(str(metadata.get(key, ""))) == 0: # ensure is string to avoid error
356
+ print(f"Empty {key} at: ", filepath)
357
+
358
+
359
+ #### --------------------------------------------------------------------------.
360
+ #### Metadata Archive Utilities
361
+
362
+
363
+ def check_metadata_archive_keys(metadata_archive_dir: Optional[str] = None) -> bool:
364
+ """Check that all metadata files have valid keys.
353
365
 
354
366
  Parameters
355
367
  ----------
@@ -386,16 +398,17 @@ def check_metadata_archive_data_source(metadata_archive_dir: Optional[str] = Non
386
398
  station_name=station_name,
387
399
  )
388
400
  try:
389
- _check_metadata_data_source(metadata, expected_name=data_source)
401
+ _check_metadata_keys(metadata)
390
402
  except Exception as e:
391
- is_valid = False
392
403
  print(f"Error for {data_source} {campaign_name} {station_name}.")
393
404
  print(f"The error is: {e}.")
405
+ is_valid = False
406
+
394
407
  return is_valid
395
408
 
396
409
 
397
- def check_metadata_archive_sensor_name(metadata_archive_dir: Optional[str] = None) -> bool:
398
- """Check metadata ``sensor_name``.
410
+ def check_metadata_archive_campaign_name(metadata_archive_dir: Optional[str] = None) -> bool:
411
+ """Check metadata ``campaign_name``.
399
412
 
400
413
  Parameters
401
414
  ----------
@@ -432,7 +445,7 @@ def check_metadata_archive_sensor_name(metadata_archive_dir: Optional[str] = Non
432
445
  station_name=station_name,
433
446
  )
434
447
  try:
435
- _check_metadata_sensor_name(metadata)
448
+ _check_metadata_campaign_name(metadata, expected_name=campaign_name)
436
449
  except Exception as e:
437
450
  is_valid = False
438
451
  print(f"Error for {data_source} {campaign_name} {station_name}.")
@@ -440,8 +453,8 @@ def check_metadata_archive_sensor_name(metadata_archive_dir: Optional[str] = Non
440
453
  return is_valid
441
454
 
442
455
 
443
- def check_metadata_archive_station_name(metadata_archive_dir: Optional[str] = None) -> bool:
444
- """Check metadata ``station_name``.
456
+ def check_metadata_archive_data_source(metadata_archive_dir: Optional[str] = None) -> bool:
457
+ """Check metadata ``data_source``.
445
458
 
446
459
  Parameters
447
460
  ----------
@@ -478,7 +491,7 @@ def check_metadata_archive_station_name(metadata_archive_dir: Optional[str] = No
478
491
  station_name=station_name,
479
492
  )
480
493
  try:
481
- _check_metadata_station_name(metadata, expected_name=station_name)
494
+ _check_metadata_data_source(metadata, expected_name=data_source)
482
495
  except Exception as e:
483
496
  is_valid = False
484
497
  print(f"Error for {data_source} {campaign_name} {station_name}.")
@@ -486,8 +499,8 @@ def check_metadata_archive_station_name(metadata_archive_dir: Optional[str] = No
486
499
  return is_valid
487
500
 
488
501
 
489
- def check_metadata_archive_reader(metadata_archive_dir: Optional[str] = None) -> bool:
490
- """Check if the ``reader`` key is available and there is the associated reader.
502
+ def check_metadata_archive_sensor_name(metadata_archive_dir: Optional[str] = None) -> bool:
503
+ """Check metadata ``sensor_name``.
491
504
 
492
505
  Parameters
493
506
  ----------
@@ -502,8 +515,6 @@ def check_metadata_archive_reader(metadata_archive_dir: Optional[str] = None) ->
502
515
  bool
503
516
  If the check succeeds, the result is ``True``, otherwise ``False``.
504
517
  """
505
- from disdrodb.l0.l0_reader import check_metadata_reader
506
-
507
518
  is_valid = True
508
519
  metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
509
520
  list_metadata_paths = get_list_metadata(
@@ -526,7 +537,7 @@ def check_metadata_archive_reader(metadata_archive_dir: Optional[str] = None) ->
526
537
  station_name=station_name,
527
538
  )
528
539
  try:
529
- check_metadata_reader(metadata)
540
+ _check_metadata_sensor_name(metadata)
530
541
  except Exception as e:
531
542
  is_valid = False
532
543
  print(f"Error for {data_source} {campaign_name} {station_name}.")
@@ -534,8 +545,8 @@ def check_metadata_archive_reader(metadata_archive_dir: Optional[str] = None) ->
534
545
  return is_valid
535
546
 
536
547
 
537
- def check_metadata_archive(metadata_archive_dir: Optional[str] = None, raise_error=False):
538
- """Check the archive metadata compliance.
548
+ def check_metadata_archive_station_name(metadata_archive_dir: Optional[str] = None) -> bool:
549
+ """Check metadata ``station_name``.
539
550
 
540
551
  Parameters
541
552
  ----------
@@ -544,9 +555,6 @@ def check_metadata_archive(metadata_archive_dir: Optional[str] = None, raise_err
544
555
  The directory path must end with ``<...>/DISDRODB``.
545
556
  If ``None``, it uses the ``metadata_archive_dir`` path specified
546
557
  in the DISDRODB active configuration.
547
- raise_error: bool (optional)
548
- Whether to raise an error and interrupt the archive check if a
549
- metadata is not compliant. The default value is ``False``.
550
558
 
551
559
  Returns
552
560
  -------
@@ -567,27 +575,24 @@ def check_metadata_archive(metadata_archive_dir: Optional[str] = None, raise_err
567
575
  data_source = infer_data_source_from_path(filepath)
568
576
  campaign_name = infer_campaign_name_from_path(filepath)
569
577
  station_name = os.path.basename(filepath).replace(".yml", "")
570
- # Check compliance
578
+
579
+ metadata = read_station_metadata(
580
+ metadata_archive_dir=metadata_archive_dir,
581
+ data_source=data_source,
582
+ campaign_name=campaign_name,
583
+ station_name=station_name,
584
+ )
571
585
  try:
572
- check_station_metadata(
573
- metadata_archive_dir=metadata_archive_dir,
574
- data_source=data_source,
575
- campaign_name=campaign_name,
576
- station_name=station_name,
577
- )
586
+ _check_metadata_station_name(metadata, expected_name=station_name)
578
587
  except Exception as e:
579
588
  is_valid = False
580
- msg = f"Error for {data_source} {campaign_name} {station_name}."
581
- msg = msg + f"The error is: {e}."
582
- if raise_error:
583
- raise ValueError(msg)
584
- print(msg)
585
-
589
+ print(f"Error for {data_source} {campaign_name} {station_name}.")
590
+ print(f"The error is: {e}.")
586
591
  return is_valid
587
592
 
588
593
 
589
- def check_metadata_archive_geolocation(metadata_archive_dir: Optional[str] = None):
590
- """Check the metadata files have missing or wrong geolocation..
594
+ def check_metadata_archive_reader(metadata_archive_dir: Optional[str] = None) -> bool:
595
+ """Check if the ``reader`` key is available and there is the associated reader.
591
596
 
592
597
  Parameters
593
598
  ----------
@@ -602,6 +607,8 @@ def check_metadata_archive_geolocation(metadata_archive_dir: Optional[str] = Non
602
607
  bool
603
608
  If the check succeeds, the result is ``True``, otherwise ``False``.
604
609
  """
610
+ from disdrodb.l0.l0_reader import check_metadata_reader
611
+
605
612
  is_valid = True
606
613
  metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
607
614
  list_metadata_paths = get_list_metadata(
@@ -624,9 +631,9 @@ def check_metadata_archive_geolocation(metadata_archive_dir: Optional[str] = Non
624
631
  station_name=station_name,
625
632
  )
626
633
  try:
627
- check_station_metadata_geolocation(metadata)
634
+ check_metadata_reader(metadata)
628
635
  except Exception as e:
629
636
  is_valid = False
630
- print(f"Missing information for {data_source} {campaign_name} {station_name}.")
637
+ print(f"Error for {data_source} {campaign_name} {station_name}.")
631
638
  print(f"The error is: {e}.")
632
639
  return is_valid
@@ -60,8 +60,6 @@ def infer_altitude(latitude, longitude, dem="aster30m"):
60
60
  ----------
61
61
  https://www.opentopodata.org/api/
62
62
  """
63
- import requests
64
-
65
63
  url = f"https://api.opentopodata.org/v1/{dem}?locations={latitude},{longitude}"
66
64
  r = requests.get(url)
67
65