disdrodb 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. disdrodb/_version.py +2 -2
  2. disdrodb/data_transfer/download_data.py +145 -14
  3. disdrodb/l0/configs/LPM/bins_diameter.yml +3 -3
  4. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +4 -0
  5. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +16 -0
  6. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +41 -0
  7. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +40 -0
  8. disdrodb/l0/manuals/SWS250.pdf +0 -0
  9. disdrodb/l0/manuals/VPF730.pdf +0 -0
  10. disdrodb/l0/manuals/VPF750.pdf +0 -0
  11. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +107 -0
  12. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +125 -0
  13. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +138 -0
  14. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +9 -0
  15. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +67 -0
  16. disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +31 -14
  17. disdrodb/l1/encoding_attrs.py +9 -2
  18. disdrodb/l1/routines.py +8 -7
  19. disdrodb/utils/dataframe.py +2 -2
  20. disdrodb/utils/directories.py +14 -2
  21. disdrodb/utils/xarray.py +83 -0
  22. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/METADATA +1 -1
  23. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/RECORD +28 -21
  24. /disdrodb/l0/readers/{PARSIVEL → PARSIVEL2}/KIT/BURKINA_FASO.py +0 -0
  25. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/WHEEL +0 -0
  26. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/entry_points.txt +0 -0
  27. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/licenses/LICENSE +0 -0
  28. {disdrodb-0.1.1.dist-info → disdrodb-0.1.2.dist-info}/top_level.txt +0 -0
disdrodb/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.1.1'
21
- __version_tuple__ = version_tuple = (0, 1, 1)
20
+ __version__ = version = '0.1.2'
21
+ __version_tuple__ = version_tuple = (0, 1, 2)
@@ -21,6 +21,8 @@
21
21
  import logging
22
22
  import os
23
23
  import shutil
24
+ import subprocess
25
+ import urllib.parse
24
26
  from typing import Optional, Union
25
27
 
26
28
  import click
@@ -213,7 +215,7 @@ def download_station(
213
215
  check_exists=True,
214
216
  )
215
217
  # Download data
216
- _download_station_data(metadata_filepath, data_archive_dir=data_archive_dir, force=force)
218
+ download_station_data(metadata_filepath, data_archive_dir=data_archive_dir, force=force)
217
219
 
218
220
 
219
221
  def _is_valid_disdrodb_data_url(disdrodb_data_url):
@@ -228,13 +230,25 @@ def _extract_station_files(zip_filepath, station_dir):
228
230
  os.remove(zip_filepath)
229
231
 
230
232
 
231
- def _download_station_data(metadata_filepath: str, data_archive_dir: str, force: bool = False) -> None:
233
+ def check_consistent_station_name(metadata_filepath, station_name):
234
+ """Check consistent station_name between YAML file name and metadata key."""
235
+ # Check consistent station name
236
+ expected_station_name = os.path.basename(metadata_filepath).replace(".yml", "")
237
+ if station_name and str(station_name) != str(expected_station_name):
238
+ raise ValueError(f"Inconsistent station_name values in the {metadata_filepath} file. Download aborted.")
239
+ return station_name
240
+
241
+
242
+ def download_station_data(metadata_filepath: str, data_archive_dir: str, force: bool = False) -> None:
232
243
  """Download and unzip the station data .
233
244
 
234
245
  Parameters
235
246
  ----------
236
247
  metadata_filepaths : str
237
248
  Metadata file path.
249
+ data_archive_dir : str (optional)
250
+ DISDRODB Data Archive directory. Format: ``<...>/DISDRODB``.
251
+ If ``None`` (the default), the disdrodb config variable ``data_archive_dir`` is used.
238
252
  force : bool, optional
239
253
  If ``True``, delete existing files and redownload it. The default value is ``False``.
240
254
 
@@ -247,7 +261,7 @@ def _download_station_data(metadata_filepath: str, data_archive_dir: str, force:
247
261
  campaign_name = metadata_dict["campaign_name"]
248
262
  station_name = metadata_dict["station_name"]
249
263
  station_name = check_consistent_station_name(metadata_filepath, station_name)
250
- # Define the destination local filepath path
264
+ # Define the path to the station RAW data directory
251
265
  station_dir = define_station_dir(
252
266
  data_archive_dir=data_archive_dir,
253
267
  data_source=data_source,
@@ -259,19 +273,136 @@ def _download_station_data(metadata_filepath: str, data_archive_dir: str, force:
259
273
  disdrodb_data_url = metadata_dict.get("disdrodb_data_url", None)
260
274
  if not _is_valid_disdrodb_data_url(disdrodb_data_url):
261
275
  raise ValueError(f"Invalid disdrodb_data_url '{disdrodb_data_url}' for station {station_name}")
262
- # Download file
263
- zip_filepath = _download_file_from_url(disdrodb_data_url, dst_dir=station_dir, force=force)
264
- # Extract the stations files from the downloaded station.zip file
265
- _extract_station_files(zip_filepath, station_dir=station_dir)
266
276
 
277
+ # Download files
278
+ # - Option 1: Zip file from Zenodo containing all station raw data
279
+ if disdrodb_data_url.startswith("https://zenodo.org/"):
280
+ download_zenodo_zip_file(url=disdrodb_data_url, dst_dir=station_dir, force=force)
281
+ # - Option 2: Recursive download from a web server via HTTP or HTTPS.
282
+ elif disdrodb_data_url.startswith("http"):
283
+ download_web_server_data(url=disdrodb_data_url, dst_dir=station_dir, force=force, verbose=True)
284
+ else:
285
+ raise NotImplementedError(f"Open a GitHub Issue to enable the download of data from {disdrodb_data_url}.")
267
286
 
268
- def check_consistent_station_name(metadata_filepath, station_name):
269
- """Check consistent station_name between YAML file name and metadata key."""
270
- # Check consistent station name
271
- expected_station_name = os.path.basename(metadata_filepath).replace(".yml", "")
272
- if station_name and str(station_name) != str(expected_station_name):
273
- raise ValueError(f"Inconsistent station_name values in the {metadata_filepath} file. Download aborted.")
274
- return station_name
287
+
288
+ ####-----------------------------------------------------------------------------------------.
289
+ #### Download from Web Server via HTTP or HTTPS
290
+
291
+
292
+ def download_web_server_data(url: str, dst_dir: str, force=True, verbose=True) -> None:
293
+ """Download data from a web server via HTTP or HTTPS.
294
+
295
+ Use the system's wget command to recursively download all files and subdirectories
296
+ under the given HTTPS “directory” URL. Works on both Windows and Linux, provided
297
+ that wget is installed and on the PATH.
298
+
299
+ 1. Ensure wget is available.
300
+ 2. Normalize URL to end with '/'.
301
+ 3. Compute cut-dirs so that only the last segment of the path remains locally.
302
+ 4. Build and run the wget command.
303
+
304
+ Example:
305
+ download_with_wget("https://ruisdael.citg.tudelft.nl/parsivel/PAR001_Cabauw/2021/202101/")
306
+ # → Creates a local folder "202101/" with all files and subfolders.
307
+ """
308
+ # 1. Ensure wget exists
309
+ ensure_wget_available()
310
+
311
+ # 2. Normalize URL
312
+ url = ensure_trailing_slash(url)
313
+
314
+ # 3. Compute cut-dirs so that only the last URL segment remains locally
315
+ cut_dirs = compute_cut_dirs(url)
316
+
317
+ # 4. Create destination directory if needed
318
+ os.makedirs(dst_dir, exist_ok=True)
319
+
320
+ # 5. Build wget command
321
+ cmd = build_webserver_wget_command(url, cut_dirs=cut_dirs, dst_dir=dst_dir, force=force, verbose=verbose)
322
+
323
+ # 6. Run wget command
324
+ try:
325
+ subprocess.run(cmd, check=True)
326
+ except subprocess.CalledProcessError as e:
327
+ raise subprocess.CalledProcessError(
328
+ returncode=e.returncode,
329
+ cmd=e.cmd,
330
+ output=e.output,
331
+ stderr=e.stderr,
332
+ )
333
+
334
+
335
+ def ensure_wget_available() -> None:
336
+ """Raise FileNotFoundError if 'wget' is not on the system PATH."""
337
+ if shutil.which("wget") is None:
338
+ raise FileNotFoundError("The WGET software was not found. Please install WGET or add it to PATH.")
339
+
340
+
341
+ def ensure_trailing_slash(url: str) -> str:
342
+ """Return `url` guaranteed to end with a slash."""
343
+ return url if url.endswith("/") else url.rstrip("/") + "/"
344
+
345
+
346
+ def compute_cut_dirs(url: str) -> int:
347
+ """Compute the wget cut_dirs value to download directly in `dst_dir`.
348
+
349
+ Given a URL ending with '/', compute the total number of path segments.
350
+ By returning len(segments), we strip away all of them—so that files
351
+ within that final directory land directly in `dst_dir` without creating
352
+ an extra subfolder.
353
+ """
354
+ parsed = urllib.parse.urlparse(url)
355
+ path = parsed.path.strip("/") # remove leading/trailing '/'
356
+ segments = path.split("/") if path else []
357
+ return len(segments)
358
+
359
+
360
+ def build_webserver_wget_command(url: str, cut_dirs: int, dst_dir: str, force: bool, verbose: bool) -> list[str]:
361
+ """Construct the wget command list for subprocess.run.
362
+
363
+ Notes
364
+ -----
365
+ The following wget arguments are used
366
+ - -q : quiet mode (no detailed progress)
367
+ - -r : recursive
368
+ - -np : no parent
369
+ - -nH : no host directories
370
+ - --timestamping: download missing files or when remote version is newer
371
+ - --cut-dirs : strip all but the last path segment from the remote path
372
+ - -P dst_dir : download into `dst_dir`
373
+ - url
374
+ """
375
+ cmd = ["wget"]
376
+ if verbose:
377
+ cmd.append("-q")
378
+ cmd += [
379
+ "-r",
380
+ "-np",
381
+ "-nH",
382
+ f"--cut-dirs={cut_dirs}",
383
+ ]
384
+ if force:
385
+ cmd.append("--timestamping") # -N
386
+
387
+ # Define source and destination directory
388
+ cmd += [
389
+ "-P",
390
+ dst_dir,
391
+ url,
392
+ ]
393
+ return cmd
394
+
395
+
396
+ ####--------------------------------------------------------------------.
397
+ #### Download from Zenodo
398
+
399
+
400
+ def download_zenodo_zip_file(url, dst_dir, force):
401
+ """Download zip file from zenodo and extract station raw data."""
402
+ # Download zip file
403
+ zip_filepath = _download_file_from_url(url, dst_dir=dst_dir, force=force)
404
+ # Extract the stations files from the downloaded station.zip file
405
+ _extract_station_files(zip_filepath, station_dir=dst_dir)
275
406
 
276
407
 
277
408
  def _download_file_from_url(url: str, dst_dir: str, force: bool = False) -> str:
@@ -20,7 +20,7 @@ center:
20
20
  18: 6.75
21
21
  19: 7.25
22
22
  20: 7.75
23
- 21: 54
23
+ 21: 9
24
24
  bounds:
25
25
  0:
26
26
  - 0.125
@@ -87,7 +87,7 @@ bounds:
87
87
  - 8.0
88
88
  21:
89
89
  - 8.0
90
- - 100
90
+ - 10.0
91
91
  width:
92
92
  0: 0.125
93
93
  1: 0.125
@@ -110,4 +110,4 @@ width:
110
110
  18: 0.5
111
111
  19: 0.5
112
112
  20: 0.5
113
- 21: 92
113
+ 21: 2
@@ -37,3 +37,7 @@ list_particles: "str"
37
37
  raw_drop_concentration: "str"
38
38
  raw_drop_average_velocity: "str"
39
39
  raw_drop_number: "str"
40
+ air_temperature: "float32"
41
+ relative_humidity: "float32"
42
+ wind_speed: "float32"
43
+ wind_direction: "float32"
@@ -158,3 +158,19 @@ raw_drop_number:
158
158
  description: Drop counts per diameter and velocity class
159
159
  long_name: Raw drop number
160
160
  units: ""
161
+ air_temperature:
162
+ description: "Air temperature in degrees Celsius (C)"
163
+ long_name: Air temperature
164
+ units: "C"
165
+ relative_humidity:
166
+ description: "Relative humidity in percent (%)"
167
+ long_name: Relative humidity
168
+ units: "%"
169
+ wind_speed:
170
+ description: "Wind speed in m/s"
171
+ long_name: Wind speed
172
+ units: "m/s"
173
+ wind_direction:
174
+ description: "Wind direction in degrees (0-360)"
175
+ long_name: Wind direction
176
+ units: "degrees"
@@ -331,3 +331,44 @@ raw_drop_number:
331
331
  - 5000
332
332
  - 32
333
333
  - 32
334
+ air_temperature:
335
+ dtype: uint16
336
+ scale_factor: 0.1
337
+ add_offset: -99.9
338
+ zlib: true
339
+ complevel: 3
340
+ shuffle: true
341
+ fletcher32: false
342
+ contiguous: false
343
+ _FillValue: 65535
344
+ chunksizes: 5000
345
+ relative_humidity:
346
+ dtype: uint16
347
+ scale_factor: 0.01
348
+ zlib: true
349
+ complevel: 3
350
+ shuffle: true
351
+ fletcher32: false
352
+ contiguous: false
353
+ _FillValue: 65535
354
+ chunksizes: 5000
355
+ wind_speed:
356
+ dtype: uint16
357
+ scale_factor: 0.1
358
+ add_offset: -99.9
359
+ zlib: true
360
+ complevel: 3
361
+ shuffle: true
362
+ fletcher32: false
363
+ contiguous: false
364
+ _FillValue: 65535
365
+ chunksizes: 5000
366
+ wind_direction:
367
+ dtype: uint16
368
+ zlib: true
369
+ complevel: 3
370
+ shuffle: true
371
+ fletcher32: false
372
+ contiguous: false
373
+ _FillValue: 65535
374
+ chunksizes: 5000
@@ -379,3 +379,43 @@ raw_drop_number:
379
379
  - diameter_bin_center
380
380
  n_values: 1024
381
381
  field_number: "93"
382
+ air_temperature:
383
+ n_digits: 4
384
+ n_characters: 5
385
+ n_decimals: 1
386
+ n_naturals: 2
387
+ data_range:
388
+ - -40
389
+ - 70
390
+ nan_flags: 99999
391
+ field_number: "521"
392
+ relative_humidity:
393
+ n_digits: 5
394
+ n_characters: 5
395
+ n_decimals: 0
396
+ n_naturals: 5
397
+ data_range:
398
+ - 0
399
+ - 99999
400
+ nan_flags: 99999
401
+ field_number: "522"
402
+ wind_speed:
403
+ n_digits: 3
404
+ n_characters: 4
405
+ n_decimals: 1
406
+ n_naturals: 2
407
+ data_range:
408
+ - 0
409
+ - 60
410
+ nan_flags: null
411
+ field_number: "523"
412
+ wind_direction:
413
+ n_digits: 3
414
+ n_characters: 3
415
+ n_decimals: 0
416
+ n_naturals: 3
417
+ data_range:
418
+ - 0
419
+ - 360
420
+ nan_flags: 999
421
+ field_number: "524"
Binary file
Binary file
Binary file
@@ -0,0 +1,107 @@
1
+ #!/usr/bin/env python3
2
+ # -----------------------------------------------------------------------------.
3
+ # Copyright (c) 2021-2023 DISDRODB developers
4
+ #
5
+ # This program is free software: you can redistribute it and/or modify
6
+ # it under the terms of the GNU General Public License as published by
7
+ # the Free Software Foundation, either version 3 of the License, or
8
+ # (at your option) any later version.
9
+ #
10
+ # This program is distributed in the hope that it will be useful,
11
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
+ # GNU General Public License for more details.
14
+ #
15
+ # You should have received a copy of the GNU General Public License
16
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
17
+ # -----------------------------------------------------------------------------.
18
+ import pandas as pd
19
+
20
+ from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
21
+ from disdrodb.l0.l0a_processing import read_raw_text_file
22
+
23
+
24
+ @is_documented_by(reader_generic_docstring)
25
+ def reader(
26
+ filepath,
27
+ logger=None,
28
+ ):
29
+ """Reader."""
30
+ ##------------------------------------------------------------------------.
31
+ #### Define column names
32
+ column_names = ["time", "TO_BE_SPLITTED"]
33
+
34
+ ##------------------------------------------------------------------------.
35
+ #### Define reader options
36
+ reader_kwargs = {}
37
+ # - Define delimiter
38
+ reader_kwargs["delimiter"] = ";"
39
+ # - Skip first row as columns names
40
+ reader_kwargs["header"] = None
41
+ # Skip first row as columns names
42
+ reader_kwargs["header"] = None
43
+ reader_kwargs["skiprows"] = 0
44
+ # - Skip file with encoding errors
45
+ reader_kwargs["encoding_errors"] = "ignore"
46
+ # - Avoid first column to become df index !!!
47
+ reader_kwargs["index_col"] = False
48
+ # - Define behaviour when encountering bad lines
49
+ reader_kwargs["on_bad_lines"] = "skip"
50
+ # - Define reader engine
51
+ # - C engine is faster
52
+ # - Python engine is more feature-complete
53
+ reader_kwargs["engine"] = "python"
54
+ # - Define on-the-fly decompression of on-disk data
55
+ # - Available: gzip, bz2, zip
56
+ reader_kwargs["compression"] = "infer"
57
+ # - Strings to recognize as NA/NaN and replace with standard NA flags
58
+ # - Already included: '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN',
59
+ # '-NaN', '-nan', '1.#IND', '1.#QNAN', '<NA>', 'N/A',
60
+ # 'NA', 'NULL', 'NaN', 'n/a', 'nan', 'null'
61
+ reader_kwargs["na_values"] = ["na", "", "error", "NA", "-.-"]
62
+
63
+ ##------------------------------------------------------------------------.
64
+ #### Read the data
65
+ df = read_raw_text_file(
66
+ filepath=filepath,
67
+ column_names=column_names,
68
+ reader_kwargs=reader_kwargs,
69
+ logger=logger,
70
+ )
71
+
72
+ ##------------------------------------------------------------------------.
73
+ #### Adapt the dataframe to adhere to DISDRODB L0 standards
74
+ # Convert time column to datetime
75
+ df_time = pd.to_datetime(df["time"], format="%Y%m%d%H%M%S", errors="coerce")
76
+
77
+ # Split the 'TO_BE_SPLITTED' column
78
+ df = df["TO_BE_SPLITTED"].str.split(",", expand=True, n=9)
79
+
80
+ # Assign column names
81
+ columns_names = [
82
+ "station_name",
83
+ "sensor_status",
84
+ "sensor_temperature",
85
+ "number_particles",
86
+ "rainfall_rate_32bit",
87
+ "reflectivity_16bit",
88
+ "mor_visibility",
89
+ "weather_code_synop_4680",
90
+ "weather_code_synop_4677",
91
+ "raw_drop_number",
92
+ ]
93
+ df.columns = columns_names
94
+
95
+ # Add the time column
96
+ df["time"] = df_time
97
+
98
+ # Drop columns not agreeing with DISDRODB L0 standards
99
+ df = df.drop(columns=["station_name"])
100
+
101
+ # Drop rows with invalid values
102
+ # --> Ensure that weather_code_synop_4677 has length 2
103
+ # --> If a previous column is missing it will have 000
104
+ df = df[df["weather_code_synop_4677"].str.len() == 2]
105
+
106
+ # Return the dataframe adhering to DISDRODB L0 standards
107
+ return df
@@ -0,0 +1,125 @@
1
+ #!/usr/bin/env python3
2
+ # -----------------------------------------------------------------------------.
3
+ # Copyright (c) 2021-2023 DISDRODB developers
4
+ #
5
+ # This program is free software: you can redistribute it and/or modify
6
+ # it under the terms of the GNU General Public License as published by
7
+ # the Free Software Foundation, either version 3 of the License, or
8
+ # (at your option) any later version.
9
+ #
10
+ # This program is distributed in the hope that it will be useful,
11
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
+ # GNU General Public License for more details.
14
+ #
15
+ # You should have received a copy of the GNU General Public License
16
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
17
+ # -----------------------------------------------------------------------------.
18
+ import pandas as pd
19
+
20
+ from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
21
+ from disdrodb.l0.l0a_processing import read_raw_text_file
22
+
23
+
24
+ @is_documented_by(reader_generic_docstring)
25
+ def reader(
26
+ filepath,
27
+ logger=None,
28
+ ):
29
+ """Reader."""
30
+ ##------------------------------------------------------------------------.
31
+ #### Define column names
32
+ column_names = ["TO_SPLIT"]
33
+
34
+ ##------------------------------------------------------------------------.
35
+ #### Define reader options
36
+ reader_kwargs = {}
37
+
38
+ # - Define delimiter
39
+ reader_kwargs["delimiter"] = "\\n"
40
+
41
+ # - Skip first row as columns names
42
+ reader_kwargs["header"] = None
43
+
44
+ # - Skip header
45
+ reader_kwargs["skiprows"] = 0
46
+
47
+ # - Define encoding
48
+ reader_kwargs["encoding"] = "ISO-8859-1"
49
+
50
+ # - Avoid first column to become df index !!!
51
+ reader_kwargs["index_col"] = False
52
+
53
+ # - Define behaviour when encountering bad lines
54
+ reader_kwargs["on_bad_lines"] = "skip"
55
+
56
+ # - Define reader engine
57
+ # - C engine is faster
58
+ # - Python engine is more feature-complete
59
+ reader_kwargs["engine"] = "python"
60
+
61
+ # - Define on-the-fly decompression of on-disk data
62
+ # - Available: gzip, bz2, zip
63
+ # reader_kwargs['compression'] = 'xz'
64
+
65
+ # - Strings to recognize as NA/NaN and replace with standard NA flags
66
+ # - Already included: '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN',
67
+ # '-NaN', '-nan', '1.#IND', '1.#QNAN', '<NA>', 'N/A',
68
+ # 'NA', 'NULL', 'NaN', 'n/a', 'nan', 'null'
69
+ reader_kwargs["na_values"] = ["na", "error", "-.-", " NA"]
70
+
71
+ ##------------------------------------------------------------------------.
72
+ #### Read the data
73
+ df = read_raw_text_file(
74
+ filepath=filepath,
75
+ column_names=column_names,
76
+ reader_kwargs=reader_kwargs,
77
+ logger=logger,
78
+ )
79
+
80
+ ##------------------------------------------------------------------------.
81
+ #### Adapt the dataframe to adhere to DISDRODB L0 standards
82
+ # Remove rows with less than 97 characters (empty spectrum --> 97 characters)
83
+ df = df[df["TO_SPLIT"].str.len() >= 97]
84
+
85
+ # Split into columns and assign name
86
+ df = df["TO_SPLIT"].str.split(";", expand=True, n=14)
87
+ columns = [
88
+ "date",
89
+ "time",
90
+ "rainfall_rate_32bit",
91
+ "rainfall_accumulated_32bit",
92
+ "weather_code_synop_4680",
93
+ "weather_code_metar_4678",
94
+ "weather_code_nws",
95
+ "reflectivity_32bit",
96
+ "mor_visibility",
97
+ "laser_amplitude",
98
+ "number_particles",
99
+ "sensor_temperature",
100
+ "sensor_heating_current",
101
+ "sensor_battery_voltage",
102
+ "raw_drop_number",
103
+ ]
104
+ df.columns = columns
105
+
106
+ # Add datetime time column
107
+ df["time"] = df["date"] + "-" + df["time"]
108
+ df["time"] = pd.to_datetime(df["time"], format="%Y/%m/%d-%H:%M:%S", errors="coerce")
109
+ df = df.drop(columns=["date"])
110
+
111
+ # Preprocess the raw spectrum
112
+ # - The '<SPECTRUM>ZERO</SPECTRUM>' indicates no drops detected
113
+ # --> "" generates an array of zeros in L0B processing
114
+ df["raw_drop_number"] = df["raw_drop_number"].str.replace("<SPECTRUM>ZERO</SPECTRUM>", "")
115
+
116
+ # Remove <SPECTRUM> and </SPECTRUM>" acronyms from the raw_drop_number field
117
+ df["raw_drop_number"] = df["raw_drop_number"].str.replace("<SPECTRUM>", "")
118
+ df["raw_drop_number"] = df["raw_drop_number"].str.replace("</SPECTRUM>", "")
119
+
120
+ # Add 0 before every , if , not preceded by a digit
121
+ # Example: ',,1,,' --> '0,0,1,0,'
122
+ df["raw_drop_number"] = df["raw_drop_number"].str.replace(r"(?<!\d);", "0;", regex=True)
123
+
124
+ # Return the dataframe adhering to DISDRODB L0 standards
125
+ return df
@@ -0,0 +1,138 @@
1
+ #!/usr/bin/env python3
2
+ # -----------------------------------------------------------------------------.
3
+ # Copyright (c) 2021-2023 DISDRODB developers
4
+ #
5
+ # This program is free software: you can redistribute it and/or modify
6
+ # it under the terms of the GNU General Public License as published by
7
+ # the Free Software Foundation, either version 3 of the License, or
8
+ # (at your option) any later version.
9
+ #
10
+ # This program is distributed in the hope that it will be useful,
11
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
+ # GNU General Public License for more details.
14
+ #
15
+ # You should have received a copy of the GNU General Public License
16
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
17
+ # -----------------------------------------------------------------------------.
18
+ """Reader for CSWR FARM disdrometer data (used in PERILS and RELAMPAGO campaign)."""
19
+ import pandas as pd
20
+
21
+ from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
22
+ from disdrodb.l0.l0a_processing import read_raw_text_file
23
+
24
+
25
+ @is_documented_by(reader_generic_docstring)
26
+ def reader(
27
+ filepath,
28
+ logger=None,
29
+ ):
30
+ """Reader."""
31
+ ##------------------------------------------------------------------------.
32
+ #### Define column names
33
+ column_names = ["TO_PARSE"]
34
+
35
+ ##------------------------------------------------------------------------.
36
+ #### Define reader options
37
+ reader_kwargs = {}
38
+
39
+ # - Define delimiter
40
+ reader_kwargs["delimiter"] = "\\n"
41
+
42
+ # - Define encoding
43
+ reader_kwargs["encoding"] = "ISO-8859-1"
44
+
45
+ # Skip first row as columns names
46
+ reader_kwargs["header"] = None
47
+ reader_kwargs["skiprows"] = 2
48
+
49
+ # - Avoid first column to become df index !!!
50
+ reader_kwargs["index_col"] = False
51
+
52
+ # - Define behaviour when encountering bad lines
53
+ reader_kwargs["on_bad_lines"] = "skip"
54
+
55
+ # - Define reader engine
56
+ # - C engine is faster
57
+ # - Python engine is more feature-complete
58
+ reader_kwargs["engine"] = "python"
59
+
60
+ # - Define on-the-fly decompression of on-disk data
61
+ # - Available: gzip, bz2, zip
62
+ reader_kwargs["compression"] = "infer"
63
+
64
+ # - Strings to recognize as NA/NaN and replace with standard NA flags
65
+ # - Already included: '#N/A', '#N/A N/A', '#NA', '-1.#IND', '-1.#QNAN',
66
+ # '-NaN', '-nan', '1.#IND', '1.#QNAN', '<NA>', 'N/A',
67
+ # 'NA', 'NULL', 'NaN', 'n/a', 'nan', 'null'
68
+ reader_kwargs["na_values"] = ["na", "", "error"]
69
+
70
+ ##------------------------------------------------------------------------.
71
+ #### Read the data
72
+ df = read_raw_text_file(
73
+ filepath=filepath,
74
+ column_names=column_names,
75
+ reader_kwargs=reader_kwargs,
76
+ logger=logger,
77
+ )
78
+
79
+ ##------------------------------------------------------------------------.
80
+ #### Adapt the dataframe to adhere to DISDRODB L0 standards
81
+ # Split and assign integrated variables names
82
+ df = df["TO_PARSE"].str.split(",", expand=True, n=22)
83
+
84
+ names = [
85
+ "time",
86
+ "station_name",
87
+ "station_number",
88
+ "rainfall_rate_32bit",
89
+ "rainfall_accumulated_32bit",
90
+ "weather_code_synop_4680",
91
+ "weather_code_synop_4677",
92
+ "weather_code_metar_4678",
93
+ "weather_code_nws",
94
+ "reflectivity_32bit",
95
+ "mor_visibility",
96
+ "sample_interval",
97
+ "laser_amplitude",
98
+ "number_particles",
99
+ "sensor_temperature",
100
+ "sensor_serial_number",
101
+ "firmware_iop",
102
+ "firmware_dsp",
103
+ "sensor_heating_current",
104
+ "sensor_battery_voltage",
105
+ "sensor_status",
106
+ "rain_kinetic_energy",
107
+ "TO_SPLIT",
108
+ ]
109
+ df.columns = names
110
+
111
+ # Derive raw drop arrays
112
+ def split_string(s):
113
+ vals = [v.strip() for v in s.split(",")]
114
+ c1 = ", ".join(vals[:32])
115
+ c2 = ", ".join(vals[32:64])
116
+ c3 = ", ".join(vals[64:])
117
+ return pd.Series({"raw_drop_concentration": c1, "raw_drop_average_velocity": c2, "raw_drop_number": c3})
118
+
119
+ splitted_string = df["TO_SPLIT"].apply(split_string)
120
+ df["raw_drop_concentration"] = splitted_string["raw_drop_concentration"]
121
+ df["raw_drop_average_velocity"] = splitted_string["raw_drop_average_velocity"]
122
+ df["raw_drop_number"] = splitted_string["raw_drop_number"]
123
+
124
+ # Define datetime "time" column
125
+ df["time"] = pd.to_datetime(df["time"], format="%Y-%m-%d %H:%M:%S", errors="coerce")
126
+
127
+ # Drop columns not agreeing with DISDRODB L0 standards
128
+ columns_to_drop = [
129
+ "station_name",
130
+ "station_number",
131
+ "firmware_iop",
132
+ "firmware_dsp",
133
+ "TO_SPLIT",
134
+ ]
135
+ df = df.drop(columns=columns_to_drop)
136
+
137
+ # Return the dataframe adhering to DISDRODB L0 standards
138
+ return df
@@ -126,6 +126,15 @@ def reader(
126
126
  ]
127
127
  df_data.columns = column_names
128
128
 
129
+ # Add weather information
130
+ df_data["air_temperature"] = df["fast_temperature"]
131
+ df_data["relative_humidity"] = df["relative_humidity"]
132
+ df_data["wind_direction"] = df["wind_direction"]
133
+ df_data["wind_speed"] = df["wind_speed"]
134
+
135
+ # df_data["dew_point"] = df["dew_point"]
136
+ # df_data["air_pressure"] = df["pressure"]
137
+
129
138
  # Retrieve time and coordinates information
130
139
  # --> Latitude in degrees_north
131
140
  # --> Longitude in degrees_east
@@ -0,0 +1,67 @@
1
+ #!/usr/bin/env python3
2
+ # -----------------------------------------------------------------------------.
3
+ # Copyright (c) 2021-2023 DISDRODB developers
4
+ #
5
+ # This program is free software: you can redistribute it and/or modify
6
+ # it under the terms of the GNU General Public License as published by
7
+ # the Free Software Foundation, either version 3 of the License, or
8
+ # (at your option) any later version.
9
+ #
10
+ # This program is distributed in the hope that it will be useful,
11
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13
+ # GNU General Public License for more details.
14
+ #
15
+ # You should have received a copy of the GNU General Public License
16
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
17
+ # -----------------------------------------------------------------------------.
18
+ """Reader for DELFT OTT PARSIVEL2 sensor in netCDF format."""
19
+
20
+ from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
21
+ from disdrodb.l0.l0b_nc_processing import open_raw_netcdf_file, standardize_raw_dataset
22
+
23
+
24
+ @is_documented_by(reader_generic_docstring)
25
+ def reader(
26
+ filepath,
27
+ logger=None,
28
+ ):
29
+ """Reader."""
30
+ ##------------------------------------------------------------------------.
31
+ #### Open the netCDF
32
+ ds = open_raw_netcdf_file(filepath=filepath, logger=logger)
33
+
34
+ ##------------------------------------------------------------------------.
35
+ #### Adapt the dataframe to adhere to DISDRODB L0 standards
36
+ # Add time coordinate
37
+ ds["time"] = ds["time_as_string"].astype("M8[s]")
38
+ ds = ds.set_coords("time")
39
+
40
+ # Define dictionary mapping dataset variables to select and rename
41
+ dict_names = {
42
+ ### Dimensions
43
+ "diameter_classes": "diameter_bin_center",
44
+ "velocity_classes": "velocity_bin_center",
45
+ ### Variables
46
+ "rainfall_rate_32bit": "rainfall_rate_32bit",
47
+ "weather_code_synop_4680": "weather_code_synop_4680",
48
+ "weather_code_synop_4677": "weather_code_synop_4677",
49
+ "weather_code_metar_4678": "weather_code_metar_4678",
50
+ "weather_code_nws": "weather_code_nws",
51
+ "reflectivity_32bit": "reflectivity_32bit",
52
+ "mor_visibility": "mor_visibility",
53
+ "laser_amplitude": "laser_amplitude",
54
+ "number_particles_validated": "number_particles",
55
+ "sensor_temperature": "sensor_temperature",
56
+ "error_code": "error_code",
57
+ "kinetic_energy": "rain_kinetic_energy",
58
+ "fieldV": "raw_drop_average_velocity",
59
+ "fieldN": "raw_drop_concentration",
60
+ "raw_data": "raw_drop_number",
61
+ }
62
+
63
+ # Rename dataset variables and columns and infill missing variables
64
+ ds = standardize_raw_dataset(ds=ds, dict_names=dict_names, sensor_name="PARSIVEL2")
65
+
66
+ # Return the dataset adhering to DISDRODB L0B standards
67
+ return ds
@@ -18,12 +18,19 @@
18
18
  # -----------------------------------------------------------------------------.
19
19
  """DISDRODB Reader for NOAA PSL RD80 stations."""
20
20
  import os
21
+ import re
21
22
 
23
+ # Convert ParserWarning into an error
24
+ import warnings
25
+
26
+ import numpy as np
22
27
  import pandas as pd
23
28
 
24
29
  from disdrodb.l0.l0_reader import is_documented_by, reader_generic_docstring
25
30
  from disdrodb.l0.l0a_processing import read_raw_text_file
26
31
 
32
+ warnings.simplefilter("error", pd.errors.ParserWarning)
33
+
27
34
 
28
35
  def read_new_format(filepath, logger):
29
36
  """Read new format."""
@@ -101,6 +108,14 @@ def read_new_format(filepath, logger):
101
108
 
102
109
  ##------------------------------------------------------------------------.
103
110
  #### Adapt the dataframe to adhere to DISDRODB L0 standards
111
+ # Retrieve date and hour information
112
+ with open(filepath) as f:
113
+ date_header = f.readline().strip()
114
+ match = re.search(r":\s*(\d+)\s*UTC", date_header)
115
+ if match:
116
+ date_hour_str = match.group(1)
117
+ else:
118
+ raise ValueError("Date information not found.")
104
119
 
105
120
  # Replace -99.9900 values with NaN
106
121
  columns_to_replace = ["Dmax", "RI", "RA", "Wg", "Z", "EF", "N0", "slope"]
@@ -116,28 +131,30 @@ def read_new_format(filepath, logger):
116
131
 
117
132
  # - Convert start/end MM:SS:SSS to timedelta
118
133
  def parse_time(t):
119
- minutes, seconds, milliseconds = map(int, t.split(":"))
120
- return pd.Timedelta(minutes=minutes, seconds=seconds, milliseconds=milliseconds)
134
+ try:
135
+ minutes, seconds, milliseconds = map(int, t.split(":"))
136
+ timedelta = pd.Timedelta(minutes=minutes, seconds=seconds, milliseconds=milliseconds)
137
+ except Exception:
138
+ timedelta = pd.Timedelta("NaT")
139
+ return timedelta
121
140
 
122
141
  df_time["start"] = df_time["start"].apply(parse_time)
123
142
  df_time["end"] = df_time["end"].apply(parse_time)
124
- # - Wrap end time if it's less than start time (i.e., crosses 60:00 boundary)
125
- # --> 00:00 --> 60:00
126
- df_time.loc[df_time["end"] < df_time["start"], "end"] += pd.Timedelta(minutes=60)
127
-
128
- # Compute sample_interval in seconds as integer
129
- df["sample_interval"] = (df_time["end"] - df_time["start"]).dt.total_seconds().astype(int)
130
143
 
131
144
  # Define time
132
- # - Extract date-hour
133
- filename = os.path.basename(filepath)
134
- if filename.startswith("lab") or filename.startswith("bao0") or filename.startswith("mdt0"):
135
- date_hour_str = filename[4:11]
136
- else:
137
- date_hour_str = filename[3:10]
138
145
  date_hour = pd.to_datetime(date_hour_str, format="%y%j%H")
139
146
  df["time"] = date_hour + df_time["start"]
140
147
 
148
+ # Drop invalid timesteps
149
+ df_time = df_time[~np.isnan(df["time"])]
150
+ df = df[~np.isnan(df["time"])]
151
+
152
+ # Compute sample_interval in seconds as integer
153
+ # - Wrap end time if it's less than start time (i.e., crosses 60:00 boundary)
154
+ # --> 00:00 --> 60:00
155
+ df_time.loc[df_time["end"] < df_time["start"], "end"] += pd.Timedelta(minutes=60)
156
+ df["sample_interval"] = (df_time["end"] - df_time["start"]).dt.total_seconds().astype(int)
157
+
141
158
  # Create raw_drop_number column
142
159
  bin_columns = ["n" + str(i) for i in range(1, 21)]
143
160
  df_arr = df[bin_columns]
@@ -112,13 +112,20 @@ def get_attrs_dict():
112
112
  "units": "J",
113
113
  "long_name": "Maximum Drop Kinetic Energy",
114
114
  },
115
- "E": {
115
+ "TKE": {
116
+ "description": "Total Kinetic Energy",
117
+ "standard_name": "",
118
+ "units": "J m-2",
119
+ "long_name": "Total Kinetic Energy",
120
+ },
121
+ "KED": {
116
122
  "description": "Kinetic energy per unit rainfall depth",
117
123
  "standard_name": "",
118
124
  "units": "J m-2 mm-1",
119
125
  "long_name": "Rainfall Kinetic Energy",
120
126
  },
121
- "KE": {
127
+ "KEF": {
128
+ "description": "Kinetic energy per unit time",
122
129
  "standard_name": "",
123
130
  "units": "J m-2 h-1",
124
131
  "long_name": "Kinetic Energy Density Flux",
disdrodb/l1/routines.py CHANGED
@@ -61,15 +61,16 @@ def get_l1_options():
61
61
  # - TODO: as function of sensor name
62
62
 
63
63
  # minimum_diameter
64
- # --> PWS100: 0.05
65
- # --> PARSIVEL: 0.2495
66
- # --> RD80: 0.313
67
- # --> LPM: 0.125 (we currently discard first bin with this setting)
64
+ # --> PWS100: 0 (0.05)
65
+ # --> PARSIVEL: 0.2495 (0.312)
66
+ # --> RD80: 0.313 (0.359)
67
+ # --> LPM: 0.125 (0.1875) (we currently discard first bin with default settings !)
68
68
 
69
69
  # maximum_diameter
70
- # LPM: 8 mm
71
- # RD80: 5.6 mm
72
- # OTT: 26 mm
70
+ # LPM: 9 (10) mm
71
+ # RD80: 5.373 (5.6) mm
72
+ # OTT: 24.5 (26) mm
73
+ # PWS100: 27.2 (28.8) mm
73
74
 
74
75
  l1_options = {
75
76
  # Fall velocity option
@@ -122,7 +122,7 @@ def compute_1d_histogram(df, column, variables=None, bins=10, labels=None, prefi
122
122
  if variables_specified:
123
123
  # Compute quantiles
124
124
  quantiles = [0.01, 0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95, 0.99]
125
- df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1)
125
+ df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1) # noqa: PD010
126
126
  df_stats_quantiles.columns = [f"{prefix}Q{int(q*100)}" for q in df_stats_quantiles.columns]
127
127
  df_stats_quantiles = df_stats_quantiles.rename(
128
128
  columns={
@@ -276,7 +276,7 @@ def compute_2d_histogram(
276
276
  if variables_specified:
277
277
  # Compute quantiles
278
278
  quantiles = [0.01, 0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95, 0.99]
279
- df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1)
279
+ df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1) # noqa: PD010
280
280
  df_stats_quantiles.columns = [f"{prefix}Q{int(q*100)}" for q in df_stats_quantiles.columns]
281
281
  df_stats_quantiles = df_stats_quantiles.rename(
282
282
  columns={
@@ -17,12 +17,12 @@
17
17
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
18
  # -----------------------------------------------------------------------------.
19
19
  """Define utilities for Directory/File Checks/Creation/Deletion."""
20
-
21
20
  import glob
22
21
  import logging
23
22
  import os
24
23
  import pathlib
25
24
  import shutil
25
+ import subprocess
26
26
  from typing import Union
27
27
 
28
28
  from disdrodb.utils.list import flatten_list
@@ -207,10 +207,22 @@ def _remove_file_or_directories(path, logger=None):
207
207
  log_info(logger, msg=f"Deleted the empty directory {path}")
208
208
  # If not empty directory
209
209
  else:
210
- shutil.rmtree(path)
210
+ # If not window use shutil.rmtree
211
+ if os.name != "nt": # Check if not Windows
212
+ shutil.rmtree(path)
213
+ else:
214
+ rmtree_windows(path)
211
215
  log_info(logger, msg=f"Deleted directories within {path}")
212
216
 
213
217
 
218
+ def rmtree_windows(path):
219
+ """Remove a directory tree on Windows."""
220
+ if not os.path.isdir(path):
221
+ raise FileNotFoundError(f"{path!r} is not a valid directory")
222
+ # Use rd (alias rmdir) with /S (remove all subdirectories/files) and /Q (quiet)
223
+ subprocess.check_call(["cmd", "/c", "rd", "/S", "/Q", path])
224
+
225
+
214
226
  def remove_if_exists(path: str, force: bool = False, logger=None) -> None:
215
227
  """Remove file or directory if exists and ``force=True``.
216
228
 
disdrodb/utils/xarray.py CHANGED
@@ -97,6 +97,89 @@ def xr_get_last_valid_idx(da_condition, dim, fill_value=None):
97
97
  return last_idx
98
98
 
99
99
 
100
+ ####-------------------------------------------------------------------
101
+ #### Unstacking dimension
102
+
103
+
104
+ def _check_coord_handling(coord_handling):
105
+ if coord_handling not in {"keep", "drop", "unstack"}:
106
+ raise ValueError("coord_handling must be one of 'keep', 'drop', or 'unstack'.")
107
+
108
+
109
+ def _unstack_coordinates(xr_obj, dim, prefix, suffix):
110
+ # Identify coordinates that share the target dimension
111
+ coords_with_dim = _get_non_dimensional_coordinates(xr_obj, dim=dim)
112
+ ds = xr.Dataset()
113
+ for coord_name in coords_with_dim:
114
+ coord_da = xr_obj[coord_name]
115
+ # Split the coordinate DataArray along the target dimension, drop coordinate and merge
116
+ split_ds = unstack_datarray_dimension(coord_da, coord_handling="drop", dim=dim, prefix=prefix, suffix=suffix)
117
+ ds.update(split_ds)
118
+ return ds
119
+
120
+
121
+ def _handle_unstack_non_dim_coords(ds, source_xr_obj, coord_handling, dim, prefix, suffix):
122
+ # Deal with coordinates sharing the target dimension
123
+ if coord_handling == "keep":
124
+ return ds
125
+ if coord_handling == "unstack":
126
+ ds_coords = _unstack_coordinates(source_xr_obj, dim=dim, prefix=prefix, suffix=suffix)
127
+ ds.update(ds_coords)
128
+ # Remove non dimensional coordinates (unstack and drop coord_handling)
129
+ ds = ds.drop_vars(_get_non_dimensional_coordinates(ds, dim=dim))
130
+ return ds
131
+
132
+
133
+ def _get_non_dimensional_coordinates(xr_obj, dim):
134
+ return [coord_name for coord_name, coord_da in xr_obj.coords.items() if dim in coord_da.dims and coord_name != dim]
135
+
136
+
137
+ def unstack_datarray_dimension(da, dim, coord_handling="keep", prefix="", suffix=""):
138
+ """
139
+ Split a DataArray along a specified dimension into a Dataset with separate prefixed and suffixed variables.
140
+
141
+ Parameters
142
+ ----------
143
+ da : xarray.DataArray
144
+ The DataArray to split.
145
+ dim : str
146
+ The dimension along which to split the DataArray.
147
+ coord_handling : str, optional
148
+ Option to handle coordinates sharing the target dimension.
149
+ Choices are 'keep', 'drop', or 'unstack'. Defaults to 'keep'.
150
+ prefix : str, optional
151
+ String to prepend to each new variable name.
152
+ suffix : str, optional
153
+ String to append to each new variable name.
154
+
155
+ Returns
156
+ -------
157
+ xarray.Dataset
158
+ A Dataset with each variable split along the specified dimension.
159
+ The Dataset variables are named "{prefix}{name}{suffix}{dim_value}".
160
+ Coordinates sharing the target dimension are handled based on `coord_handling`.
161
+ """
162
+ # Retrieve DataArray name
163
+ name = da.name
164
+ # Unstack variables
165
+ ds = da.to_dataset(dim=dim)
166
+ rename_dict = {dim_value: f"{prefix}{name}{suffix}{dim_value}" for dim_value in list(ds.data_vars)}
167
+ ds = ds.rename_vars(rename_dict)
168
+ # Deal with coordinates sharing the target dimension
169
+ return _handle_unstack_non_dim_coords(
170
+ ds=ds,
171
+ source_xr_obj=da,
172
+ coord_handling=coord_handling,
173
+ dim=dim,
174
+ prefix=prefix,
175
+ suffix=suffix,
176
+ )
177
+
178
+
179
+ ####--------------------------------------------------------------------------
180
+ #### Fill Values Utilities
181
+
182
+
100
183
  def define_dataarray_fill_value(da):
101
184
  """Define the fill value for a numerical xarray.DataArray."""
102
185
  if np.issubdtype(da.dtype, np.floating):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: disdrodb
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: disdrodb provides tools to download, standardize, share and analyze global disdrometer data.
5
5
  Author: Gionata Ghiggi
6
6
  Project-URL: homepage, https://github.com/ltelab/disdrodb
@@ -1,6 +1,6 @@
1
1
  disdrodb/__init__.py,sha256=e2ICMwXhEcT0zUMq4RUL3V5b7W18pXHLuJipcp1_sjU,4672
2
2
  disdrodb/_config.py,sha256=K0hhWvJGUo_3UrROVznlGPiWTbjx-ntUm-GeDRP-xBQ,1750
3
- disdrodb/_version.py,sha256=Mmxse1R0ki5tjz9qzU8AQyqUsLt8nTyCAbYQp8R87PU,511
3
+ disdrodb/_version.py,sha256=bSmADqydH8nBu-J4lG8UVuR7hnU_zcwhnSav2oQ0W0A,511
4
4
  disdrodb/configs.py,sha256=XHjG1CfgGDgMk01-nyyPGPUbvuq57ct1skUR2yNyQEg,7870
5
5
  disdrodb/docs.py,sha256=EZQE-LHXeBLMP2Xk6g2jZtvBwzA6IOq2RaEF6OJ1Co4,1500
6
6
  disdrodb/routines.py,sha256=TMLysQV0HT9C_GuFMd6gv2kO2Abz6C1fIoH92APZxQI,53777
@@ -42,7 +42,7 @@ disdrodb/cli/disdrodb_run_l2m_station.py,sha256=qM2t3ehaApZ3QKHVlRokcvMEVRDNHXH4
42
42
  disdrodb/cli/disdrodb_upload_archive.py,sha256=BN3nzBeDboliFNpNQ3SRDXV_EWv8_mcoJzkUHAaO-NI,3977
43
43
  disdrodb/cli/disdrodb_upload_station.py,sha256=18JyaEsXLGUI1scT85qIjOQNeixL2bVGPXH-rj5MIRI,3556
44
44
  disdrodb/data_transfer/__init__.py,sha256=JMz0m-lAzNvckxONeB75B0q-jsS0HPmHfQa6vbCG39w,1134
45
- disdrodb/data_transfer/download_data.py,sha256=l1_Fjb8dLaBAZa_2CFo1zFnv7L-bQUYoZJ2800-JH-s,12726
45
+ disdrodb/data_transfer/download_data.py,sha256=p6oPxs1HvzuM3EZRkWPcXcXMMNKB9-HHF78PDGfoJcw,17489
46
46
  disdrodb/data_transfer/upload_data.py,sha256=imLdxor0e5t-Ha3HnujIrq-7xfPR65mD7QvLsRMmv8M,10916
47
47
  disdrodb/data_transfer/zenodo.py,sha256=yX7GGocOYi39yO92vzxLxrou-HcDxeMw3BYpSexf80U,9672
48
48
  disdrodb/issue/__init__.py,sha256=avYn-r8QItYawWh-iHflsgQDZSILO8b_84RNlgajGbU,991
@@ -60,7 +60,7 @@ disdrodb/l0/l0c_processing.py,sha256=yCfTTRUqlIDaNWTqE1BedF8oe77xla8v-Ptownr5dhA
60
60
  disdrodb/l0/routines.py,sha256=3u8bK_S2cl0rt6yFZyADNDsILZb1ZPoGLUIovLVpx54,36007
61
61
  disdrodb/l0/standards.py,sha256=SJpKdAuUVUvci07Q3jpU9IetiGg4XxciHS5f5ACq2-I,22954
62
62
  disdrodb/l0/template_tools.py,sha256=W-ixB_HTlNA468wawtezSQHk797-XM4cSmGpIS_Cx3s,17097
63
- disdrodb/l0/configs/LPM/bins_diameter.yml,sha256=c6TYpoZlcsOdWeGEtNbbtdmX6y2eWJaCr6A3WurfS6k,1062
63
+ disdrodb/l0/configs/LPM/bins_diameter.yml,sha256=627jsVTNhbM7zpDa2MAwXFpc57hYglQQkZQor6lmW9M,1061
64
64
  disdrodb/l0/configs/LPM/bins_velocity.yml,sha256=fpWt7-viGHKkpN_Mi3nwFxEpnXctBlPzhutQZNYQvO8,918
65
65
  disdrodb/l0/configs/LPM/l0a_encodings.yml,sha256=5Wy0HR3T1RkpFC77x2l_AmrtPqgrV6Fzy4CZ7A-1Udo,3475
66
66
  disdrodb/l0/configs/LPM/l0b_cf_attrs.yml,sha256=CxrgDPV3EvUnQo_0xjFemPXNpoBGZ6GSL6pDXk6z-Tw,11232
@@ -74,10 +74,10 @@ disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml,sha256=l8u_Juoa7NEeKBW8BzxDgBqx77
74
74
  disdrodb/l0/configs/PARSIVEL/raw_data_format.yml,sha256=fhaWZ-t4IlpIUp3ZHTkQ30vgQCh-xeLoSssNCO-8rWM,5225
75
75
  disdrodb/l0/configs/PARSIVEL2/bins_diameter.yml,sha256=hdSuUJ7MUUui0WMP_aljGh_PxpeqV8O2F24WfSn9pGc,1607
76
76
  disdrodb/l0/configs/PARSIVEL2/bins_velocity.yml,sha256=00xL4MH4KIQiOpswss9HwnM5FT2_utv9S_mUnMOJCiE,1492
77
- disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml,sha256=hIMjBNi3Nl3TxMRuvtGm00wq0KPpJRipAuk2xNW4ZA8,1277
78
- disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml,sha256=c6bRhhNOWd-okN8RfxBk_o7T25AM_70KwT2BJrmK_GI,4746
79
- disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml,sha256=NU1wtDVW4q5zAesij2wzyQMJbyxndOA0xYUwxVZQ_mk,5790
80
- disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml,sha256=MCUOycBNHYmwUMFXdHEWW1jzZ631QkhrHrD2GceWNpc,6361
77
+ disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml,sha256=V5_exJ3_1KrhbxK8TvOMGZHMtzxGlIh4Tm9NiQpg39M,1381
78
+ disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml,sha256=bF8LIxWA_m2E_NzVT-7uvzeP_GDaNKYiYwvQibaGZeM,5174
79
+ disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml,sha256=E9Qj5oOo1x_QpXdUJtITefFMzuU-mYUurry7Od4GBfk,6511
80
+ disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml,sha256=xjovAoRz9WpbqWv_RZAjz8S0z8lpgrmgFHdVMCZagyE,6972
81
81
  disdrodb/l0/configs/PWS100/bins_diameter.yml,sha256=Y7t93TVIqp--tSSWX4D-oUgN-QfvlKVvln_BEKdBFv0,1590
82
82
  disdrodb/l0/configs/PWS100/bins_velocity.yml,sha256=Y7t93TVIqp--tSSWX4D-oUgN-QfvlKVvln_BEKdBFv0,1590
83
83
  disdrodb/l0/configs/PWS100/l0a_encodings.yml,sha256=IJ5-LRMMRUogyvJc1foSZxgXTdCqlhKxd1h73mIKJaU,636
@@ -96,6 +96,9 @@ disdrodb/l0/manuals/PARSIVEL.pdf,sha256=9R-AC9Yw6M2kpi18M_B6wJNpkpqsNqSQa1CiYg_8
96
96
  disdrodb/l0/manuals/PARSIVEL2.pdf,sha256=pp9pyE57esOrw5vkd59suflutkNdD-pHJ-Egv8cE7zE,932634
97
97
  disdrodb/l0/manuals/PWS100.pdf,sha256=RoBcAhOgokO-5Wacj-FjIOWg8MZZ7hi4I9rI92_pj9k,2747204
98
98
  disdrodb/l0/manuals/RD80.pdf,sha256=0BbIY2gEch4ouAdfzaXnWMJQO2Z7fyd8RYi7euy-sFg,1087911
99
+ disdrodb/l0/manuals/SWS250.pdf,sha256=7RoIG8TP__ggJtqA1T6QqW9kWpe9NKH32mtfFKj0MG4,2285637
100
+ disdrodb/l0/manuals/VPF730.pdf,sha256=fRFKiqcBbJ43rQrFH1I1qaVMoHR3yhbw37g0JeHlzNM,2719195
101
+ disdrodb/l0/manuals/VPF750.pdf,sha256=fRFKiqcBbJ43rQrFH1I1qaVMoHR3yhbw37g0JeHlzNM,2719195
99
102
  disdrodb/l0/readers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
100
103
  disdrodb/l0/readers/template_reader_raw_netcdf_data.py,sha256=u1mBbK2wgsLf2y7WganSsvgEtyZ2TMUrDrZ_BTxqQl0,2950
101
104
  disdrodb/l0/readers/template_reader_raw_text_data.py,sha256=CqpoTP9HJ3TRw31rJoKLh4XzOuaxp4oU9s3taXG8W_g,3206
@@ -140,7 +143,8 @@ disdrodb/l0/readers/PARSIVEL/EPFL/UNIL_2022.py,sha256=QIZeq7bL7lyQqzzzeg0-dgf8Ft
140
143
  disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py,sha256=W0L_IogcMj6y_fVbS1LksK2KGSBL0MYpX_EM_f6cmUo,3931
141
144
  disdrodb/l0/readers/PARSIVEL/GPM/LPVEX.py,sha256=EFdv89o01qs-SwGUrCs8UHQxPv6G9g9YX0cj5lV5Xjw,3500
142
145
  disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py,sha256=XW7jvKObH5lKNDvAYPO6rrqKl_73YwO2wmZGt7Pm9xo,7209
143
- disdrodb/l0/readers/PARSIVEL/KIT/BURKINA_FASO.py,sha256=RGeyVOLC5V47OAcsA6F3pe_HOJj3MP0t7oQwE9-2wi0,4782
146
+ disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py,sha256=-iegziC4abXL14iNxVvJK4D617o-ICJx0eG2mtjtrgo,4038
147
+ disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py,sha256=_4TEeGkgL4y_Fn9azn0oFuOOYkyrKQdFt6AKCWsdZqE,4591
144
148
  disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py,sha256=Rx3qA7APFEnYxLDgLLfl_7kr0s4jMpGXh41wtIzhpNE,4083
145
149
  disdrodb/l0/readers/PARSIVEL/NCAR/OWLES_MIPS.py,sha256=sGQ1hAZguzIgeBBciJgLU-eYbVopVHEehNrpGJZja0c,3889
146
150
  disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py,sha256=qgIYGQE8PnvkqdPfOvZrFqmbMHVRS-CFWFUhe644-og,4558
@@ -158,7 +162,9 @@ disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py,sha256=aJDxLTcvfsNs1hls3
158
162
  disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py,sha256=Yl0Q4ZtDB6AtTncQsyzytKarVzx7YHl-8PtuSBOZCRQ,3916
159
163
  disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py,sha256=QSRSBTflLrd1j5GIb1tfsz_vc7zvai3FhsI7WWPQ28s,6758
160
164
  disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py,sha256=wlKhAZHc2C8cMl31bqrpRvPyZn_BXxROdhg-yQ9Vqgo,1260
165
+ disdrodb/l0/readers/PARSIVEL2/KIT/BURKINA_FASO.py,sha256=RGeyVOLC5V47OAcsA6F3pe_HOJj3MP0t7oQwE9-2wi0,4782
161
166
  disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py,sha256=YT0SbU0PGddRO1oPxD_yKfpG0Exa26Zexb8gqxIiSCA,2412
167
+ disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py,sha256=w5Z9ZE9TrzNuzFCZNDQQQe0S9nMk4auzvmHgoi5uX-U,4835
162
168
  disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py,sha256=UDv5XE9RAuL_jclbuvu2mH3cpMvob82KoJ5wrUJ8EMc,4396
163
169
  disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_MIPS.py,sha256=emMqH0ulkEZ9A75PuAFrAE5uvxptrDMYcyGnTPQ6wPo,3889
164
170
  disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py,sha256=Ic95NNoVBL1eFSkZVrOU2za8nE2bZSoTMgVVyH3QrMM,6397
@@ -166,22 +172,23 @@ disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py,sha256=41Yml05w7VBN3nmqDl1moe9wc
166
172
  disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py,sha256=41Yml05w7VBN3nmqDl1moe9wc2vPqSi5uLJ2eT1JrwA,6019
167
173
  disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py,sha256=_empgxKM6WIT1_JnMHqydhdxNYbTNOfJeRbe7-9YGsA,3932
168
174
  disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py,sha256=fhA8i4T0_dooyPnC2n6CoQ9wv79iivdFxdc5Zd04tLE,4503
169
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py,sha256=l1isftTEcsWdSsD8xSCAzwIt18HpfPANfMqmfSwqz9I,5246
175
+ disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py,sha256=v6NpCOTh13_R6-ZEz0NHqwbhAUWYrViylB_xP-RC9Y0,5583
170
176
  disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py,sha256=zfwwJQ1ZF7_0Nih6JvuOihlegnQFdclsKwuRP8CqwmY,6081
177
+ disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py,sha256=v85p9fPE19gS4-BCLeWP-EN-KPRF4UAj3GuAVxUSqb4,2865
171
178
  disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py,sha256=YUpuwhiS78WnhXwVDYzHR4YEZiuKXjrdbMHKt3gMqRA,6796
172
179
  disdrodb/l0/readers/RD80/BRAZIL/CHUVA_RD80.py,sha256=yUFwg8hhcm3qQl7cZGL-EibXHqwsL0GItdc5b5ZUSEE,4025
173
180
  disdrodb/l0/readers/RD80/BRAZIL/GOAMAZON_RD80.py,sha256=yUFwg8hhcm3qQl7cZGL-EibXHqwsL0GItdc5b5ZUSEE,4025
174
181
  disdrodb/l0/readers/RD80/NCAR/CINDY_2011_RD80.py,sha256=Vg447gM0LWcxw0_z-C8KhmNIwtf3t3XLcvTuAJTGDe0,4005
175
182
  disdrodb/l0/readers/RD80/NCAR/RELAMPAGO_RD80.py,sha256=n5Jbt8F4S6yQfnlerQSrAvv6R-BlQ-pmbo0kVHW_A3k,4242
176
- disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py,sha256=XGcCEwiu0jw5DYC2izlVFUKfmTs_6vtMCE-AySERgfY,8961
183
+ disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py,sha256=UiIADOFxQDWSDCzXcJEHpHLtKvH12uQ54fEr3jCyheA,9368
177
184
  disdrodb/l1/__init__.py,sha256=F5-IJIQwzjSiw2JBFUyupIm_RaAqJdn9ZFowoDnF0lw,870
178
185
  disdrodb/l1/beard_model.py,sha256=3uNUd6gSSV3vUHqScG9l-NvEje6oI7sDwA8vdbUFMOY,22705
179
- disdrodb/l1/encoding_attrs.py,sha256=xxyxSHXXVusJPcel8DjxOAncwlmJfqA7Woy5py0PaCg,19548
186
+ disdrodb/l1/encoding_attrs.py,sha256=G0cdOiiZWDqN4eK7an8kEFv5b1WnDz9yBlVy3pPJrrU,19801
180
187
  disdrodb/l1/fall_velocity.py,sha256=yE9dryElGYaHC62eWD0Ameqg-BIc7bcYnukgZ7QEGnM,9488
181
188
  disdrodb/l1/filters.py,sha256=dZgu6L8MaYwfYybtxeOXE2-c7W2Zy112dNAv0xy4VZ0,7969
182
189
  disdrodb/l1/processing.py,sha256=rbPjISEabBFTN7apTcQMcf_XACU4WUob0KC7x_kFUb8,8426
183
190
  disdrodb/l1/resampling.py,sha256=gn1FGKcLevxsqZb0fNmw8zHby-uXoaSHIAAyVF6CiSs,8908
184
- disdrodb/l1/routines.py,sha256=u5sq8BGiGpkZGg9GmkolkqQkVLHACHJJ9rkb0HOPu4w,12289
191
+ disdrodb/l1/routines.py,sha256=OKhmWfMOOWg3wG1O3oYNwmIMPta5Q3GHO-IB7RCTPhs,12373
185
192
  disdrodb/l1_env/__init__.py,sha256=a0KBE3Ugqz7FBZ2HM_Ey8YxBlrSNjoXIluLtgh10EGc,897
186
193
  disdrodb/l1_env/routines.py,sha256=Fv9Bp15gbQ_gvz26euMy-dlGt7KVugNGmEzbVHFjw_g,1570
187
194
  disdrodb/l2/__init__.py,sha256=WrQ1sWGc-BVoSlIJzKCtHjMEdp4qB-hcZ1dk5TAhJro,885
@@ -211,22 +218,22 @@ disdrodb/utils/attrs.py,sha256=AZOrEYRaTIafwTAFMOADiK1nz38epD3A6yVREKzma0A,6577
211
218
  disdrodb/utils/cli.py,sha256=C5l61lXKyQbI7WhRMex56j9h8BU7aF2MRirEHYNchpU,7738
212
219
  disdrodb/utils/compression.py,sha256=80vN987bTOWX-dpVGIAgT0iWo4dKpDXf57mMy1n6ZEE,9506
213
220
  disdrodb/utils/dask.py,sha256=S3lP4FlEQa9tHOz60zTeudfkGYt3JZw7xTb0_y3aCs4,2335
214
- disdrodb/utils/dataframe.py,sha256=Nrc9gDWyHWYJspS9ua_HvlCBtjxaxi5eyHELJPO4V30,12699
221
+ disdrodb/utils/dataframe.py,sha256=lengASuequ0Gy80USc6XNJS44CBVO6sfB0UFjJwp_e4,12729
215
222
  disdrodb/utils/decorators.py,sha256=ZudfEHs9t6KRxQaG2m0rPCFn08hNiYu175CLJtg1pew,3822
216
- disdrodb/utils/directories.py,sha256=2A0lYvb18K2ZMAfq5ww1TvI-Q7tfEZA21XAtlS4gGBY,9732
223
+ disdrodb/utils/directories.py,sha256=xalKuxE0dXeC08cMC1Ym74RsVaprRg-C3s0vygSe8yM,10219
217
224
  disdrodb/utils/encoding.py,sha256=CaF7GrJ5OptdDLS7Q4fEMkNa4rvCuhfIcF-VrQhH9h8,3991
218
225
  disdrodb/utils/list.py,sha256=1HEIN4RcGjiCDDQOZ7Kbz4Eqg2AhWuttzV_M0tdOysg,1382
219
226
  disdrodb/utils/logger.py,sha256=cnjZYUBaXRrsA6lg1-FB4qhg9BNCUJO7N_9X78dkF88,11210
220
227
  disdrodb/utils/time.py,sha256=aUdHVmaWYGGEYwnmL925llVXEZfniqMGfGgKVDdBXFs,27414
221
228
  disdrodb/utils/warnings.py,sha256=NQeDC0vJhARVGysE1205OVhcSQ_bg-CV0O_LLdgQyoo,1221
222
229
  disdrodb/utils/writer.py,sha256=LDI8msK5yS52c2oDGO6H-JIGwp7Z5RyIVWZfMoNgCGQ,2047
223
- disdrodb/utils/xarray.py,sha256=U9eRAohWpShEWZBFHDxZT3HdyPRIi76ciP7XchdXNGc,6594
230
+ disdrodb/utils/xarray.py,sha256=kUq7gUv9LAucedRZiLjabh20lYNJrcI3_IRHA2FbHfA,9715
224
231
  disdrodb/utils/yaml.py,sha256=oeh8BaL-8DbqEuj6SjY5snQcsCDH4icIu0bDix6QZIA,1558
225
232
  disdrodb/viz/__init__.py,sha256=jjJc8HDfNm3C4NwNzaJAlLZyVa1AMp_-xp9jpjdD2Ak,881
226
233
  disdrodb/viz/plots.py,sha256=4sYJVI3nb2XBFOqykyj3k5x1Nj4hrBGeGGwbhYxG-Fo,875
227
- disdrodb-0.1.1.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
228
- disdrodb-0.1.1.dist-info/METADATA,sha256=xYx_zzfU4ecI0giu4c-W_glCOmocN1GYvOLQ7NN4kSw,19181
229
- disdrodb-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
230
- disdrodb-0.1.1.dist-info/entry_points.txt,sha256=k_Q9WafFQkKcOn-Gz2Q6V4yHad7leXirMM4gJF-Mjx8,2664
231
- disdrodb-0.1.1.dist-info/top_level.txt,sha256=2LJa4FBAK_iNmy-vH2H7lVNrMrpSAs71py-ktzOMC1k,9
232
- disdrodb-0.1.1.dist-info/RECORD,,
234
+ disdrodb-0.1.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
235
+ disdrodb-0.1.2.dist-info/METADATA,sha256=XJIBZT_gzmD-L4R8taFtphyjZmXt5Bz3ZFmvyls-Kt8,19181
236
+ disdrodb-0.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
237
+ disdrodb-0.1.2.dist-info/entry_points.txt,sha256=k_Q9WafFQkKcOn-Gz2Q6V4yHad7leXirMM4gJF-Mjx8,2664
238
+ disdrodb-0.1.2.dist-info/top_level.txt,sha256=2LJa4FBAK_iNmy-vH2H7lVNrMrpSAs71py-ktzOMC1k,9
239
+ disdrodb-0.1.2.dist-info/RECORD,,