disdrodb 0.0.20__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/__init__.py +132 -15
- disdrodb/_config.py +4 -2
- disdrodb/_version.py +9 -4
- disdrodb/api/checks.py +264 -237
- disdrodb/api/configs.py +4 -8
- disdrodb/api/create_directories.py +235 -290
- disdrodb/api/info.py +217 -26
- disdrodb/api/io.py +295 -269
- disdrodb/api/path.py +597 -173
- disdrodb/api/search.py +486 -0
- disdrodb/{metadata/scripts → cli}/disdrodb_check_metadata_archive.py +12 -7
- disdrodb/{utils/pandas.py → cli/disdrodb_data_archive_directory.py} +9 -18
- disdrodb/cli/disdrodb_download_archive.py +86 -0
- disdrodb/cli/disdrodb_download_metadata_archive.py +53 -0
- disdrodb/cli/disdrodb_download_station.py +84 -0
- disdrodb/{api/scripts → cli}/disdrodb_initialize_station.py +22 -10
- disdrodb/cli/disdrodb_metadata_archive_directory.py +32 -0
- disdrodb/{data_transfer/scripts/disdrodb_download_station.py → cli/disdrodb_open_data_archive.py} +22 -22
- disdrodb/cli/disdrodb_open_logs_directory.py +69 -0
- disdrodb/{data_transfer/scripts/disdrodb_upload_station.py → cli/disdrodb_open_metadata_archive.py} +22 -24
- disdrodb/cli/disdrodb_open_metadata_directory.py +71 -0
- disdrodb/cli/disdrodb_open_product_directory.py +74 -0
- disdrodb/cli/disdrodb_open_readers_directory.py +32 -0
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0.py +38 -31
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0_station.py +32 -30
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a_station.py +24 -33
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b_station.py +25 -34
- disdrodb/cli/disdrodb_run_l0c.py +130 -0
- disdrodb/cli/disdrodb_run_l0c_station.py +129 -0
- disdrodb/cli/disdrodb_run_l1.py +122 -0
- disdrodb/cli/disdrodb_run_l1_station.py +121 -0
- disdrodb/cli/disdrodb_run_l2e.py +122 -0
- disdrodb/cli/disdrodb_run_l2e_station.py +122 -0
- disdrodb/cli/disdrodb_run_l2m.py +122 -0
- disdrodb/cli/disdrodb_run_l2m_station.py +122 -0
- disdrodb/cli/disdrodb_upload_archive.py +105 -0
- disdrodb/cli/disdrodb_upload_station.py +98 -0
- disdrodb/configs.py +90 -25
- disdrodb/data_transfer/__init__.py +22 -0
- disdrodb/data_transfer/download_data.py +87 -90
- disdrodb/data_transfer/upload_data.py +64 -37
- disdrodb/data_transfer/zenodo.py +15 -18
- disdrodb/docs.py +1 -1
- disdrodb/issue/__init__.py +17 -4
- disdrodb/issue/checks.py +10 -23
- disdrodb/issue/reader.py +9 -12
- disdrodb/issue/writer.py +14 -17
- disdrodb/l0/__init__.py +17 -26
- disdrodb/l0/check_configs.py +35 -23
- disdrodb/l0/check_standards.py +32 -42
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_diameter.yml +44 -44
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_velocity.yml +40 -40
- disdrodb/l0/configs/LPM/l0a_encodings.yml +80 -0
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_cf_attrs.yml +62 -59
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_encodings.yml +9 -9
- disdrodb/l0/configs/{Thies_LPM → LPM}/raw_data_format.yml +245 -245
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_diameter.yml +66 -66
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL/l0a_encodings.yml +32 -0
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_cf_attrs.yml +22 -20
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_encodings.yml +17 -17
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/raw_data_format.yml +77 -77
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_diameter.yml +64 -64
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +39 -0
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_cf_attrs.yml +24 -22
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_encodings.yml +20 -20
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/raw_data_format.yml +98 -98
- disdrodb/l0/configs/{RD_80 → RD80}/bins_diameter.yml +40 -40
- disdrodb/l0/configs/RD80/l0a_encodings.yml +16 -0
- disdrodb/l0/configs/{RD_80 → RD80}/l0b_cf_attrs.yml +3 -3
- disdrodb/l0/configs/RD80/l0b_encodings.yml +135 -0
- disdrodb/l0/configs/{RD_80 → RD80}/raw_data_format.yml +48 -48
- disdrodb/l0/l0_reader.py +216 -340
- disdrodb/l0/l0a_processing.py +237 -208
- disdrodb/l0/l0b_nc_processing.py +227 -80
- disdrodb/l0/l0b_processing.py +93 -173
- disdrodb/l0/l0c_processing.py +627 -0
- disdrodb/l0/readers/{ARM → LPM/ARM}/ARM_LPM.py +36 -58
- disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +226 -0
- disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +185 -0
- disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +183 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +179 -0
- disdrodb/l0/readers/{UK → LPM/UK}/DIVEN.py +14 -35
- disdrodb/l0/readers/PARSIVEL/AUSTRALIA/MELBOURNE_2007_PARSIVEL.py +157 -0
- disdrodb/l0/readers/PARSIVEL/CHINA/CHONGQING.py +113 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/ARCTIC_2021.py +40 -57
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/COMMON_2011.py +37 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/DAVOS_2009_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_2009.py +34 -51
- disdrodb/l0/readers/{EPFL/PARADISO_2014.py → PARSIVEL/EPFL/EPFL_ROOF_2008.py} +38 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/EPFL_ROOF_2010.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2012.py +33 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GENEPI_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007_2.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HPICONET_2010.py +34 -51
- disdrodb/l0/readers/{EPFL/EPFL_ROOF_2010.py → PARSIVEL/EPFL/HYMEX_LTE_SOP2.py} +37 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/HYMEX_LTE_SOP3.py +111 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HYMEX_LTE_SOP4.py +36 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2018.py +34 -52
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2019.py +38 -56
- disdrodb/l0/readers/PARSIVEL/EPFL/PARADISO_2014.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PARSIVEL_2007.py +27 -45
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PLATO_2019.py +24 -44
- disdrodb/l0/readers/PARSIVEL/EPFL/RACLETS_2019.py +140 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RACLETS_2019_WJF.py +41 -59
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RIETHOLZBACH_2011.py +34 -51
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2017.py +117 -0
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2019.py +137 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/UNIL_2022.py +42 -55
- disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py +104 -0
- disdrodb/l0/readers/{GPM → PARSIVEL/GPM}/LPVEX.py +29 -48
- disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py +184 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py +113 -0
- disdrodb/l0/readers/{NCAR/VORTEX_SE_2016_P1.py → PARSIVEL/NCAR/OWLES_MIPS.py} +46 -72
- disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +125 -0
- disdrodb/l0/readers/{NCAR/OWLES_MIPS.py → PARSIVEL/NCAR/PLOWS_MIPS.py} +45 -64
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +114 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +176 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +183 -0
- disdrodb/l0/readers/{ARM/ARM_LD.py → PARSIVEL2/ARM/ARM_PARSIVEL2.py} +27 -50
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/{DENMARK → PARSIVEL2/DENMARK}/EROSION_nc.py +14 -35
- disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +119 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +104 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py +176 -0
- disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py +32 -0
- disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py +56 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +120 -0
- disdrodb/l0/readers/{NCAR → PARSIVEL2/NCAR}/PECAN_MIPS.py +45 -64
- disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +181 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +160 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +160 -0
- disdrodb/l0/readers/{NCAR/PLOWS_MIPS.py → PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py} +49 -66
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +118 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +152 -0
- disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py +166 -0
- disdrodb/l0/readers/{NCAR/RELAMPAGO_RD80.py → RD80/BRAZIL/CHUVA_RD80.py} +36 -60
- disdrodb/l0/readers/{BRAZIL → RD80/BRAZIL}/GOAMAZON_RD80.py +36 -55
- disdrodb/l0/readers/{NCAR → RD80/NCAR}/CINDY_2011_RD80.py +35 -54
- disdrodb/l0/readers/{BRAZIL/CHUVA_RD80.py → RD80/NCAR/RELAMPAGO_RD80.py} +40 -54
- disdrodb/l0/readers/template_reader_raw_netcdf_data.py +62 -0
- disdrodb/l0/readers/{reader_template.py → template_reader_raw_text_data.py} +20 -44
- disdrodb/l0/routines.py +885 -581
- disdrodb/l0/standards.py +72 -236
- disdrodb/l0/template_tools.py +104 -109
- disdrodb/l1/__init__.py +17 -0
- disdrodb/l1/beard_model.py +716 -0
- disdrodb/l1/encoding_attrs.py +620 -0
- disdrodb/l1/fall_velocity.py +260 -0
- disdrodb/l1/filters.py +192 -0
- disdrodb/l1/processing.py +200 -0
- disdrodb/l1/resampling.py +236 -0
- disdrodb/l1/routines.py +357 -0
- disdrodb/l1_env/__init__.py +17 -0
- disdrodb/l1_env/routines.py +38 -0
- disdrodb/l2/__init__.py +17 -0
- disdrodb/l2/empirical_dsd.py +1735 -0
- disdrodb/l2/event.py +388 -0
- disdrodb/l2/processing.py +519 -0
- disdrodb/l2/processing_options.py +213 -0
- disdrodb/l2/routines.py +868 -0
- disdrodb/metadata/__init__.py +9 -2
- disdrodb/metadata/checks.py +165 -118
- disdrodb/metadata/download.py +81 -0
- disdrodb/metadata/geolocation.py +146 -0
- disdrodb/metadata/info.py +20 -13
- disdrodb/metadata/manipulation.py +1 -1
- disdrodb/metadata/reader.py +59 -8
- disdrodb/metadata/search.py +77 -144
- disdrodb/metadata/standards.py +7 -8
- disdrodb/metadata/writer.py +8 -14
- disdrodb/psd/__init__.py +38 -0
- disdrodb/psd/fitting.py +2146 -0
- disdrodb/psd/models.py +774 -0
- disdrodb/routines.py +1176 -0
- disdrodb/scattering/__init__.py +28 -0
- disdrodb/scattering/axis_ratio.py +344 -0
- disdrodb/scattering/routines.py +456 -0
- disdrodb/utils/__init__.py +17 -0
- disdrodb/utils/attrs.py +208 -0
- disdrodb/utils/cli.py +269 -0
- disdrodb/utils/compression.py +60 -42
- disdrodb/utils/dask.py +62 -0
- disdrodb/utils/decorators.py +110 -0
- disdrodb/utils/directories.py +107 -46
- disdrodb/utils/encoding.py +127 -0
- disdrodb/utils/list.py +29 -0
- disdrodb/utils/logger.py +168 -46
- disdrodb/utils/time.py +657 -0
- disdrodb/utils/warnings.py +30 -0
- disdrodb/utils/writer.py +57 -0
- disdrodb/utils/xarray.py +138 -47
- disdrodb/utils/yaml.py +0 -1
- disdrodb/viz/__init__.py +17 -0
- disdrodb/viz/plots.py +17 -0
- disdrodb-0.1.0.dist-info/METADATA +321 -0
- disdrodb-0.1.0.dist-info/RECORD +216 -0
- {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info}/WHEEL +1 -1
- disdrodb-0.1.0.dist-info/entry_points.txt +30 -0
- disdrodb/data_transfer/scripts/disdrodb_download_archive.py +0 -53
- disdrodb/data_transfer/scripts/disdrodb_upload_archive.py +0 -57
- disdrodb/l0/configs/OTT_Parsivel/l0a_encodings.yml +0 -32
- disdrodb/l0/configs/OTT_Parsivel2/l0a_encodings.yml +0 -39
- disdrodb/l0/configs/RD_80/l0a_encodings.yml +0 -16
- disdrodb/l0/configs/RD_80/l0b_encodings.yml +0 -135
- disdrodb/l0/configs/Thies_LPM/l0a_encodings.yml +0 -80
- disdrodb/l0/io.py +0 -257
- disdrodb/l0/l0_processing.py +0 -1091
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_OTT.py +0 -178
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_THIES.py +0 -247
- disdrodb/l0/readers/BRAZIL/CHUVA_LPM.py +0 -204
- disdrodb/l0/readers/BRAZIL/CHUVA_OTT.py +0 -183
- disdrodb/l0/readers/BRAZIL/GOAMAZON_LPM.py +0 -204
- disdrodb/l0/readers/BRAZIL/GOAMAZON_OTT.py +0 -183
- disdrodb/l0/readers/CHINA/CHONGQING.py +0 -131
- disdrodb/l0/readers/EPFL/EPFL_ROOF_2008.py +0 -128
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP2.py +0 -127
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP3.py +0 -129
- disdrodb/l0/readers/EPFL/RACLETS_2019.py +0 -158
- disdrodb/l0/readers/EPFL/SAMOYLOV_2017.py +0 -136
- disdrodb/l0/readers/EPFL/SAMOYLOV_2019.py +0 -158
- disdrodb/l0/readers/FRANCE/SIRTA_OTT2.py +0 -138
- disdrodb/l0/readers/GPM/GCPEX.py +0 -123
- disdrodb/l0/readers/GPM/IFLOODS.py +0 -123
- disdrodb/l0/readers/GPM/MC3E.py +0 -123
- disdrodb/l0/readers/GPM/NSSTC.py +0 -164
- disdrodb/l0/readers/ITALY/GID.py +0 -199
- disdrodb/l0/readers/MEXICO/OH_IIUNAM_nc.py +0 -92
- disdrodb/l0/readers/NCAR/CCOPE_2015.py +0 -133
- disdrodb/l0/readers/NCAR/PECAN_FP3.py +0 -137
- disdrodb/l0/readers/NCAR/PECAN_MOBILE.py +0 -144
- disdrodb/l0/readers/NCAR/RELAMPAGO_OTT.py +0 -195
- disdrodb/l0/readers/NCAR/SNOWIE_PJ.py +0 -172
- disdrodb/l0/readers/NCAR/SNOWIE_SB.py +0 -179
- disdrodb/l0/readers/NCAR/VORTEX2_2009.py +0 -133
- disdrodb/l0/readers/NCAR/VORTEX2_2010.py +0 -188
- disdrodb/l0/readers/NCAR/VORTEX2_2010_UF.py +0 -191
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_P2.py +0 -135
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_PIPS.py +0 -170
- disdrodb/l0/readers/NETHERLANDS/DELFT.py +0 -187
- disdrodb/l0/readers/SPAIN/SBEGUERIA.py +0 -179
- disdrodb/l0/scripts/disdrodb_run_l0b_concat.py +0 -93
- disdrodb/l0/scripts/disdrodb_run_l0b_concat_station.py +0 -85
- disdrodb/utils/netcdf.py +0 -452
- disdrodb/utils/scripts.py +0 -102
- disdrodb-0.0.20.dist-info/AUTHORS.md +0 -18
- disdrodb-0.0.20.dist-info/METADATA +0 -186
- disdrodb-0.0.20.dist-info/RECORD +0 -168
- disdrodb-0.0.20.dist-info/entry_points.txt +0 -15
- /disdrodb/l0/configs/{RD_80 → RD80}/bins_velocity.yml +0 -0
- /disdrodb/l0/manuals/{Thies_LPM.pdf → LPM.pdf} +0 -0
- /disdrodb/l0/manuals/{ODM_470.pdf → ODM470.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel.pdf → PARSIVEL.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel2.pdf → PARSIVEL2.pdf} +0 -0
- /disdrodb/l0/manuals/{PWS_100.pdf → PWS100.pdf} +0 -0
- /disdrodb/l0/manuals/{RD_80.pdf → RD80.pdf} +0 -0
- {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info/licenses}/LICENSE +0 -0
- {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info}/top_level.txt +0 -0
|
@@ -18,17 +18,20 @@
|
|
|
18
18
|
# -----------------------------------------------------------------------------.
|
|
19
19
|
"""Routines to upload data to the DISDRODB Decentralized Data Archive."""
|
|
20
20
|
|
|
21
|
-
from typing import
|
|
21
|
+
from typing import Optional
|
|
22
22
|
|
|
23
23
|
import click
|
|
24
24
|
|
|
25
25
|
from disdrodb.api.path import define_metadata_filepath
|
|
26
|
+
from disdrodb.configs import get_data_archive_dir, get_metadata_archive_dir
|
|
26
27
|
from disdrodb.data_transfer.zenodo import upload_station_to_zenodo
|
|
27
|
-
from disdrodb.metadata import get_list_metadata
|
|
28
|
+
from disdrodb.metadata.search import get_list_metadata
|
|
29
|
+
from disdrodb.utils.compression import archive_station_data
|
|
28
30
|
from disdrodb.utils.yaml import read_yaml
|
|
29
31
|
|
|
30
32
|
|
|
31
33
|
def click_upload_options(function: object):
|
|
34
|
+
"""Click command arguments for DISDRODB data upload."""
|
|
32
35
|
function = click.option(
|
|
33
36
|
"--platform",
|
|
34
37
|
type=click.Choice(["zenodo", "sandbox.zenodo"], case_sensitive=False),
|
|
@@ -95,7 +98,7 @@ def _check_if_upload(metadata_filepath: str, force: bool):
|
|
|
95
98
|
raise ValueError(f"'force' is False and {metadata_filepath} has already a 'disdrodb_data_url' specified.")
|
|
96
99
|
|
|
97
100
|
|
|
98
|
-
def _filter_already_uploaded(metadata_filepaths:
|
|
101
|
+
def _filter_already_uploaded(metadata_filepaths: list[str], force: bool) -> list[str]:
|
|
99
102
|
"""Filter metadata files that already have a remote url specified."""
|
|
100
103
|
filtered = []
|
|
101
104
|
for metadata_filepath in metadata_filepaths:
|
|
@@ -124,7 +127,8 @@ def upload_station(
|
|
|
124
127
|
station_name: str,
|
|
125
128
|
platform: Optional[str] = "sandbox.zenodo",
|
|
126
129
|
force: bool = False,
|
|
127
|
-
|
|
130
|
+
data_archive_dir: Optional[str] = None,
|
|
131
|
+
metadata_archive_dir: Optional[str] = None,
|
|
128
132
|
) -> None:
|
|
129
133
|
"""
|
|
130
134
|
Upload data from a single DISDRODB station on a remote repository.
|
|
@@ -141,45 +145,57 @@ def upload_station(
|
|
|
141
145
|
The name of the campaign. Must be provided in UPPER CASE.
|
|
142
146
|
station_name : str
|
|
143
147
|
The name of the station.
|
|
144
|
-
|
|
145
|
-
The
|
|
146
|
-
|
|
148
|
+
data_archive_dir : str (optional)
|
|
149
|
+
The directory path where the DISDRODB Data Archive is located.
|
|
150
|
+
The directory path must end with ``<...>/DISDRODB``.
|
|
151
|
+
If ``None``, it uses the ``data_archive_dir`` path specified
|
|
152
|
+
in the DISDRODB active configuration.
|
|
147
153
|
platform: str, optional
|
|
148
|
-
Name of the remote platform.
|
|
149
|
-
The default platform is "sandbox.zenodo"
|
|
154
|
+
Name of the remote data storage platform.
|
|
155
|
+
The default platform is ``"sandbox.zenodo"`` (for testing purposes).
|
|
156
|
+
Switch to ``"zenodo"`` for final data dissemination.
|
|
150
157
|
force: bool, optional
|
|
151
|
-
If True
|
|
152
|
-
The default is force=False
|
|
158
|
+
If ``True``, upload the data and overwrite the ``disdrodb_data_url``.
|
|
159
|
+
The default value is ``force=False``.
|
|
153
160
|
|
|
154
161
|
"""
|
|
162
|
+
# Retrieve the DISDRODB Metadata and Data Archive Directories
|
|
163
|
+
data_archive_dir = get_data_archive_dir(data_archive_dir)
|
|
164
|
+
metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
|
|
165
|
+
|
|
166
|
+
# Check valid platform
|
|
155
167
|
_check_valid_platform(platform)
|
|
156
168
|
|
|
157
169
|
# Define metadata_filepath
|
|
158
170
|
metadata_filepath = define_metadata_filepath(
|
|
171
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
159
172
|
data_source=data_source,
|
|
160
173
|
campaign_name=campaign_name,
|
|
161
174
|
station_name=station_name,
|
|
162
|
-
base_dir=base_dir,
|
|
163
|
-
product="RAW",
|
|
164
175
|
check_exists=True,
|
|
165
176
|
)
|
|
166
177
|
# Check if data must be uploaded
|
|
167
178
|
_check_if_upload(metadata_filepath, force=force)
|
|
168
179
|
|
|
169
|
-
|
|
180
|
+
# Zip station data
|
|
181
|
+
print(f" - Zipping station data of {data_source} {campaign_name} {station_name}")
|
|
182
|
+
station_zip_filepath = archive_station_data(metadata_filepath, data_archive_dir=data_archive_dir)
|
|
183
|
+
|
|
184
|
+
print(f" - Start uploading of {data_source} {campaign_name} {station_name}")
|
|
170
185
|
# Upload the data
|
|
171
186
|
if platform == "zenodo":
|
|
172
|
-
upload_station_to_zenodo(metadata_filepath, sandbox=False)
|
|
187
|
+
upload_station_to_zenodo(metadata_filepath, station_zip_filepath=station_zip_filepath, sandbox=False)
|
|
173
188
|
|
|
174
189
|
else: # platform == "sandbox.zenodo": # Only for testing purposes, not available through CLI
|
|
175
|
-
upload_station_to_zenodo(metadata_filepath, sandbox=True)
|
|
190
|
+
upload_station_to_zenodo(metadata_filepath, station_zip_filepath=station_zip_filepath, sandbox=True)
|
|
176
191
|
|
|
177
192
|
|
|
178
193
|
def upload_archive(
|
|
179
194
|
platform: Optional[str] = None,
|
|
180
195
|
force: bool = False,
|
|
181
|
-
|
|
182
|
-
|
|
196
|
+
data_archive_dir: Optional[str] = None,
|
|
197
|
+
metadata_archive_dir: Optional[str] = None,
|
|
198
|
+
**fields_kwargs,
|
|
183
199
|
) -> None:
|
|
184
200
|
"""Find all stations containing local data and upload them to a remote repository.
|
|
185
201
|
|
|
@@ -187,39 +203,49 @@ def upload_archive(
|
|
|
187
203
|
----------
|
|
188
204
|
platform: str, optional
|
|
189
205
|
Name of the remote platform.
|
|
190
|
-
|
|
191
|
-
|
|
206
|
+
The default platform is ``"sandbox.zenodo"`` (for testing purposes).
|
|
207
|
+
Switch to ``"zenodo"`` for final data dissemination.
|
|
192
208
|
force: bool, optional
|
|
193
|
-
If True
|
|
194
|
-
The default is force=False
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
209
|
+
If ``True``, upload even if data already exists on another remote location.
|
|
210
|
+
The default value is ``force=False``.
|
|
211
|
+
data_archive_dir : str (optional)
|
|
212
|
+
The directory path where the DISDRODB Data Archive is located.
|
|
213
|
+
The directory path must end with ``<...>/DISDRODB``.
|
|
214
|
+
If ``None``, it uses the ``data_archive_dir`` path specified
|
|
215
|
+
in the DISDRODB active configuration.
|
|
198
216
|
|
|
199
217
|
Other Parameters
|
|
200
218
|
----------------
|
|
201
|
-
|
|
202
219
|
data_sources: str or list of str, optional
|
|
203
220
|
Data source name (eg: EPFL).
|
|
204
|
-
If not provided (None), all data sources will be uploaded.
|
|
205
|
-
The default is data_source=None
|
|
221
|
+
If not provided (``None``), all data sources will be uploaded.
|
|
222
|
+
The default value is ``data_source=None``.
|
|
206
223
|
campaign_names: str or list of str, optional
|
|
207
224
|
Campaign name (eg: EPFL_ROOF_2012).
|
|
208
|
-
If not provided (None), all campaigns will be uploaded.
|
|
209
|
-
The default is campaign_name=None
|
|
225
|
+
If not provided (``None``), all campaigns will be uploaded.
|
|
226
|
+
The default value is ``campaign_name=None``.
|
|
210
227
|
station_names: str or list of str, optional
|
|
211
228
|
Station name.
|
|
212
|
-
If not provided (None), all stations will be uploaded.
|
|
213
|
-
The default is station_name=None
|
|
229
|
+
If not provided (``None``), all stations will be uploaded.
|
|
230
|
+
The default value is ``station_name=None``.
|
|
214
231
|
"""
|
|
215
232
|
_check_valid_platform(platform)
|
|
216
233
|
|
|
217
|
-
#
|
|
234
|
+
# Retrieve the DISDRODB Metadata and Data Archive Directories
|
|
235
|
+
data_archive_dir = get_data_archive_dir(data_archive_dir)
|
|
236
|
+
metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
|
|
237
|
+
|
|
238
|
+
# Retrieve only metadata_filepaths of stations with RAW data in the local DISDRODB Data Archive
|
|
218
239
|
metadata_filepaths = get_list_metadata(
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
240
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
241
|
+
data_archive_dir=data_archive_dir,
|
|
242
|
+
product="RAW", # --> Search in local DISDRODB Data Archive
|
|
243
|
+
available_data=True, # --> Select only stations with raw data
|
|
244
|
+
raise_error_if_empty=False, # Do not raise error if no matching metadata file found
|
|
245
|
+
invalid_fields_policy="raise", # Raise error if invalid filtering criteria are specified
|
|
246
|
+
**fields_kwargs, # data_sources, campaign_names, station_names
|
|
222
247
|
)
|
|
248
|
+
|
|
223
249
|
# If force=False, keep only metadata without disdrodb_data_url
|
|
224
250
|
if not force:
|
|
225
251
|
metadata_filepaths = _filter_already_uploaded(metadata_filepaths, force=force)
|
|
@@ -237,7 +263,8 @@ def upload_archive(
|
|
|
237
263
|
station_name = metadata["station_name"]
|
|
238
264
|
try:
|
|
239
265
|
upload_station(
|
|
240
|
-
|
|
266
|
+
data_archive_dir=data_archive_dir,
|
|
267
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
241
268
|
data_source=data_source,
|
|
242
269
|
campaign_name=campaign_name,
|
|
243
270
|
station_name=station_name,
|
disdrodb/data_transfer/zenodo.py
CHANGED
|
@@ -20,12 +20,10 @@
|
|
|
20
20
|
|
|
21
21
|
import json
|
|
22
22
|
import os
|
|
23
|
-
from typing import Tuple
|
|
24
23
|
|
|
25
24
|
import requests
|
|
26
25
|
|
|
27
26
|
from disdrodb.configs import get_zenodo_token
|
|
28
|
-
from disdrodb.utils.compression import archive_station_data
|
|
29
27
|
from disdrodb.utils.yaml import read_yaml, write_yaml
|
|
30
28
|
|
|
31
29
|
|
|
@@ -35,7 +33,6 @@ def _check_http_response(
|
|
|
35
33
|
task_description: str,
|
|
36
34
|
) -> None:
|
|
37
35
|
"""Check the Zenodo HTTP request response status code and raise an error if not the expected one."""
|
|
38
|
-
|
|
39
36
|
if response.status_code == expected_status_code:
|
|
40
37
|
return
|
|
41
38
|
|
|
@@ -52,7 +49,7 @@ def _check_http_response(
|
|
|
52
49
|
raise ValueError(error_message)
|
|
53
50
|
|
|
54
51
|
|
|
55
|
-
def _create_zenodo_deposition(sandbox) ->
|
|
52
|
+
def _create_zenodo_deposition(sandbox) -> tuple[int, str]:
|
|
56
53
|
"""Create a new Zenodo deposition and get the deposit information.
|
|
57
54
|
|
|
58
55
|
At every function call, the deposit_id and bucket url will change !
|
|
@@ -60,8 +57,8 @@ def _create_zenodo_deposition(sandbox) -> Tuple[int, str]:
|
|
|
60
57
|
Parameters
|
|
61
58
|
----------
|
|
62
59
|
sandbox : bool
|
|
63
|
-
If True
|
|
64
|
-
If False
|
|
60
|
+
If ``True``, create the deposit on Zenodo Sandbox for testing purposes.
|
|
61
|
+
If ``False``, create the deposit on Zenodo.
|
|
65
62
|
|
|
66
63
|
Returns
|
|
67
64
|
-------
|
|
@@ -96,7 +93,6 @@ def _define_disdrodb_data_url(zenodo_host, deposit_id, filename):
|
|
|
96
93
|
|
|
97
94
|
def _upload_file_to_zenodo(filepath: str, metadata_filepath: str, sandbox: bool) -> None:
|
|
98
95
|
"""Upload a file to a Zenodo bucket."""
|
|
99
|
-
|
|
100
96
|
# Read metadata
|
|
101
97
|
metadata = read_yaml(metadata_filepath)
|
|
102
98
|
data_source = metadata["data_source"]
|
|
@@ -186,7 +182,7 @@ def _define_zenodo_metadata(metadata):
|
|
|
186
182
|
description = f"Disdrometer measurements of the {name} station. "
|
|
187
183
|
description += "This dataset is part of the DISDRODB project. "
|
|
188
184
|
description += "Station metadata are available at "
|
|
189
|
-
description += f"https://github.com/ltelab/
|
|
185
|
+
description += f"https://github.com/ltelab/DISDRODB-METADATA/blob/main/DISDRODB/METADATA/{data_source}/{campaign_name}/metadata/{station_name}.yml . " # noqa: E501
|
|
190
186
|
description += "The software to easily process and standardize the raw data into netCDF files is available at "
|
|
191
187
|
description += "https://github.com/ltelab/disdrodb ."
|
|
192
188
|
|
|
@@ -196,7 +192,7 @@ def _define_zenodo_metadata(metadata):
|
|
|
196
192
|
"upload_type": "dataset",
|
|
197
193
|
"description": description,
|
|
198
194
|
"creators": _define_creators_list(metadata),
|
|
199
|
-
}
|
|
195
|
+
},
|
|
200
196
|
}
|
|
201
197
|
return zenodo_metadata
|
|
202
198
|
|
|
@@ -207,7 +203,7 @@ def _update_metadata_with_zenodo_url(metadata_filepath: str, disdrodb_data_url:
|
|
|
207
203
|
Parameters
|
|
208
204
|
----------
|
|
209
205
|
metadata_filepath: str
|
|
210
|
-
|
|
206
|
+
Path to the station metadata file.
|
|
211
207
|
disdrodb_data_url: str
|
|
212
208
|
Remote URL where the station data are stored.
|
|
213
209
|
"""
|
|
@@ -216,26 +212,27 @@ def _update_metadata_with_zenodo_url(metadata_filepath: str, disdrodb_data_url:
|
|
|
216
212
|
write_yaml(metadata_dict, metadata_filepath)
|
|
217
213
|
|
|
218
214
|
|
|
219
|
-
def upload_station_to_zenodo(metadata_filepath: str, sandbox: bool = True) -> str:
|
|
215
|
+
def upload_station_to_zenodo(metadata_filepath: str, station_zip_filepath: str, sandbox: bool = True) -> str:
|
|
220
216
|
"""Zip station data, upload data to Zenodo and update the metadata disdrodb_data_url.
|
|
221
217
|
|
|
222
218
|
Parameters
|
|
223
219
|
----------
|
|
224
220
|
metadata_filepath: str
|
|
225
|
-
|
|
221
|
+
Path to the station metadata file.
|
|
222
|
+
station_zip_filepath: str
|
|
223
|
+
Path to the zip file containing the station data.
|
|
226
224
|
sandbox: bool
|
|
227
|
-
If True
|
|
225
|
+
If ``True``, upload to Zenodo Sandbox (for testing purposes).
|
|
226
|
+
If ``False``, upload to Zenodo.
|
|
228
227
|
"""
|
|
229
|
-
# Zip station data
|
|
230
|
-
print(" - Zipping station data")
|
|
231
|
-
station_zip_filepath = archive_station_data(metadata_filepath)
|
|
232
|
-
|
|
233
228
|
# Upload the station data zip file on Zenodo
|
|
234
229
|
# - After upload, it removes the zip file !
|
|
235
230
|
print(" - Uploading station data")
|
|
236
231
|
try:
|
|
237
232
|
disdrodb_data_url = _upload_file_to_zenodo(
|
|
238
|
-
filepath=station_zip_filepath,
|
|
233
|
+
filepath=station_zip_filepath,
|
|
234
|
+
metadata_filepath=metadata_filepath,
|
|
235
|
+
sandbox=sandbox,
|
|
239
236
|
)
|
|
240
237
|
os.remove(station_zip_filepath)
|
|
241
238
|
except Exception as e:
|
disdrodb/docs.py
CHANGED
|
@@ -25,7 +25,7 @@ from disdrodb.api.checks import check_sensor_name
|
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
def open_sensor_documentation(sensor_name):
|
|
28
|
-
"""Open the sensor documentation
|
|
28
|
+
"""Open the sensor documentation PDF in the browser."""
|
|
29
29
|
from disdrodb import __root_path__
|
|
30
30
|
|
|
31
31
|
check_sensor_name(sensor_name)
|
disdrodb/issue/__init__.py
CHANGED
|
@@ -1,9 +1,22 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
Created on Sat Nov 11 01:12:40 2023
|
|
4
2
|
|
|
5
|
-
|
|
6
|
-
|
|
3
|
+
# -----------------------------------------------------------------------------.
|
|
4
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
5
|
+
#
|
|
6
|
+
# This program is free software: you can redistribute it and/or modify
|
|
7
|
+
# it under the terms of the GNU General Public License as published by
|
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
9
|
+
# (at your option) any later version.
|
|
10
|
+
#
|
|
11
|
+
# This program is distributed in the hope that it will be useful,
|
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
# GNU General Public License for more details.
|
|
15
|
+
#
|
|
16
|
+
# You should have received a copy of the GNU General Public License
|
|
17
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
18
|
+
# -----------------------------------------------------------------------------.
|
|
19
|
+
"""DISDRODB issue files module."""
|
|
7
20
|
|
|
8
21
|
from disdrodb.issue.reader import read_station_issue
|
|
9
22
|
|
disdrodb/issue/checks.py
CHANGED
|
@@ -22,26 +22,22 @@ import logging
|
|
|
22
22
|
import numpy as np
|
|
23
23
|
import pandas as pd
|
|
24
24
|
|
|
25
|
-
from disdrodb.utils.logger import log_error
|
|
26
|
-
|
|
27
25
|
logger = logging.getLogger(__name__)
|
|
28
26
|
|
|
29
27
|
|
|
30
28
|
def _is_numpy_array_string(arr):
|
|
31
|
-
"""Check if the numpy array contains strings
|
|
29
|
+
"""Check if the numpy array contains strings.
|
|
32
30
|
|
|
33
31
|
Parameters
|
|
34
32
|
----------
|
|
35
33
|
arr : numpy array
|
|
36
34
|
Numpy array to check.
|
|
37
35
|
"""
|
|
38
|
-
|
|
39
|
-
dtype = arr.dtype.type
|
|
40
|
-
return dtype == np.str_ or dtype == np.unicode_
|
|
36
|
+
return np.issubdtype(arr.dtype, np.str_)
|
|
41
37
|
|
|
42
38
|
|
|
43
39
|
def _is_numpy_array_datetime(arr):
|
|
44
|
-
"""Check if the numpy array contains datetime64
|
|
40
|
+
"""Check if the numpy array contains datetime64.
|
|
45
41
|
|
|
46
42
|
Parameters
|
|
47
43
|
----------
|
|
@@ -53,7 +49,7 @@ def _is_numpy_array_datetime(arr):
|
|
|
53
49
|
numpy array
|
|
54
50
|
Numpy array checked.
|
|
55
51
|
"""
|
|
56
|
-
return arr.dtype
|
|
52
|
+
return np.issubdtype(arr.dtype, np.datetime64)
|
|
57
53
|
|
|
58
54
|
|
|
59
55
|
def _check_timestep_datetime_accuracy(timesteps, unit="s"):
|
|
@@ -64,7 +60,7 @@ def _check_timestep_datetime_accuracy(timesteps, unit="s"):
|
|
|
64
60
|
timesteps : numpy array
|
|
65
61
|
Numpy array to check.
|
|
66
62
|
unit : str, optional
|
|
67
|
-
|
|
63
|
+
Time unit. The default value is "s".
|
|
68
64
|
|
|
69
65
|
Returns
|
|
70
66
|
-------
|
|
@@ -75,14 +71,13 @@ def _check_timestep_datetime_accuracy(timesteps, unit="s"):
|
|
|
75
71
|
------
|
|
76
72
|
ValueError
|
|
77
73
|
"""
|
|
78
|
-
if
|
|
74
|
+
if timesteps.dtype != f"<M8[{unit}]":
|
|
79
75
|
msg = f"The timesteps does not have datetime64 {unit} accuracy."
|
|
80
|
-
log_error(logger, msg=msg, verbose=False)
|
|
81
76
|
raise ValueError(msg)
|
|
82
77
|
return timesteps
|
|
83
78
|
|
|
84
79
|
|
|
85
|
-
def _check_timestep_string_second_accuracy(timesteps
|
|
80
|
+
def _check_timestep_string_second_accuracy(timesteps):
|
|
86
81
|
"""Check the timesteps string are provided with second accuracy.
|
|
87
82
|
|
|
88
83
|
Note: it assumes the YYYY-mm-dd HH:MM:SS format
|
|
@@ -94,7 +89,6 @@ def _check_timestep_string_second_accuracy(timesteps, n=19):
|
|
|
94
89
|
f"The following timesteps are mispecified: {mispecified_timesteps}. Expecting the YYYY-mm-dd HH:MM:SS"
|
|
95
90
|
" format."
|
|
96
91
|
)
|
|
97
|
-
log_error(logger, msg=msg, verbose=False)
|
|
98
92
|
raise ValueError(msg)
|
|
99
93
|
return timesteps
|
|
100
94
|
|
|
@@ -116,7 +110,6 @@ def _check_timesteps_string(timesteps):
|
|
|
116
110
|
f"The following timesteps are mispecified: {mispecified_timesteps}. Expecting the YYYY-mm-dd HH:MM:SS"
|
|
117
111
|
" format."
|
|
118
112
|
)
|
|
119
|
-
log_error(logger, msg=msg, verbose=False)
|
|
120
113
|
raise ValueError(msg)
|
|
121
114
|
# Convert to numpy
|
|
122
115
|
new_timesteps = new_timesteps.to_numpy()
|
|
@@ -148,18 +141,14 @@ def check_timesteps(timesteps):
|
|
|
148
141
|
|
|
149
142
|
def _check_time_period_nested_list_format(time_periods):
|
|
150
143
|
"""Check that the time_periods is a list of list of length 2."""
|
|
151
|
-
|
|
152
144
|
if not isinstance(time_periods, list):
|
|
153
145
|
msg = "'time_periods' must be a list'"
|
|
154
|
-
log_error(logger, msg=msg, verbose=False)
|
|
155
146
|
raise TypeError(msg)
|
|
156
147
|
|
|
157
148
|
for time_period in time_periods:
|
|
158
149
|
if not isinstance(time_period, (list, np.ndarray)) or len(time_period) != 2:
|
|
159
150
|
msg = "Every time period of time_periods must be a list of length 2."
|
|
160
|
-
log_error(logger, msg=msg, verbose=False)
|
|
161
151
|
raise ValueError(msg)
|
|
162
|
-
return None
|
|
163
152
|
|
|
164
153
|
|
|
165
154
|
def check_time_periods(time_periods):
|
|
@@ -178,7 +167,6 @@ def check_time_periods(time_periods):
|
|
|
178
167
|
for time_period in new_time_periods:
|
|
179
168
|
if time_period[0] > time_period[1]:
|
|
180
169
|
msg = f"The {time_period} time_period is invalid. Start time occurs after end time."
|
|
181
|
-
log_error(logger, msg=msg, verbose=False)
|
|
182
170
|
raise ValueError(msg)
|
|
183
171
|
return new_time_periods
|
|
184
172
|
|
|
@@ -201,7 +189,7 @@ def _get_issue_time_periods(issue_dict):
|
|
|
201
189
|
|
|
202
190
|
|
|
203
191
|
def check_issue_dict(issue_dict):
|
|
204
|
-
"""Check validity of the issue dictionary"""
|
|
192
|
+
"""Check validity of the issue dictionary."""
|
|
205
193
|
# Check is empty
|
|
206
194
|
if len(issue_dict) == 0:
|
|
207
195
|
return issue_dict
|
|
@@ -211,7 +199,6 @@ def check_issue_dict(issue_dict):
|
|
|
211
199
|
invalid_keys = [k for k in keys if k not in valid_keys]
|
|
212
200
|
if len(invalid_keys) > 0:
|
|
213
201
|
msg = f"Invalid {invalid_keys} keys. The issue YAML file accept only {valid_keys}"
|
|
214
|
-
log_error(logger, msg=msg, verbose=False)
|
|
215
202
|
raise ValueError(msg)
|
|
216
203
|
|
|
217
204
|
# Check timesteps
|
|
@@ -225,12 +212,12 @@ def check_issue_dict(issue_dict):
|
|
|
225
212
|
return issue_dict
|
|
226
213
|
|
|
227
214
|
|
|
228
|
-
def check_issue_compliance(data_source, campaign_name, station_name,
|
|
215
|
+
def check_issue_compliance(data_source, campaign_name, station_name, metadata_archive_dir=None):
|
|
229
216
|
"""Check DISDRODB issue compliance."""
|
|
230
217
|
from disdrodb.issue.reader import read_station_issue
|
|
231
218
|
|
|
232
219
|
issue_dict = read_station_issue(
|
|
233
|
-
|
|
220
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
234
221
|
data_source=data_source,
|
|
235
222
|
campaign_name=campaign_name,
|
|
236
223
|
station_name=station_name,
|
disdrodb/issue/reader.py
CHANGED
|
@@ -28,16 +28,16 @@ logger = logging.getLogger(__name__)
|
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
class NoDatesSafeLoader(yaml.SafeLoader):
|
|
31
|
+
"""A YAML loader that does not parse dates."""
|
|
32
|
+
|
|
31
33
|
@classmethod
|
|
32
34
|
def remove_implicit_resolver(cls, tag_to_remove):
|
|
33
35
|
"""
|
|
34
|
-
Remove implicit resolvers for a particular tag
|
|
36
|
+
Remove implicit resolvers for a particular tag.
|
|
35
37
|
|
|
36
38
|
Takes care not to modify resolvers in super classes.
|
|
37
39
|
|
|
38
|
-
We want to load datetimes as strings, not dates
|
|
39
|
-
go on to serialise as json which doesn't have the advanced types
|
|
40
|
-
of yaml, and leads to incompatibilities down the track.
|
|
40
|
+
We want to load datetimes as strings, not dates.
|
|
41
41
|
"""
|
|
42
42
|
if "yaml_implicit_resolvers" not in cls.__dict__:
|
|
43
43
|
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
|
|
@@ -49,7 +49,7 @@ class NoDatesSafeLoader(yaml.SafeLoader):
|
|
|
49
49
|
|
|
50
50
|
|
|
51
51
|
def _load_yaml_without_date_parsing(filepath):
|
|
52
|
-
"Read a YAML file without converting automatically date string to datetime."
|
|
52
|
+
"""Read a YAML file without converting automatically date string to datetime."""
|
|
53
53
|
NoDatesSafeLoader.remove_implicit_resolver("tag:yaml.org,2002:timestamp")
|
|
54
54
|
with open(filepath) as f:
|
|
55
55
|
dictionary = yaml.load(f, Loader=NoDatesSafeLoader)
|
|
@@ -77,7 +77,7 @@ def read_issue(filepath: str) -> dict:
|
|
|
77
77
|
return issue_dict
|
|
78
78
|
|
|
79
79
|
|
|
80
|
-
def read_station_issue(data_source, campaign_name, station_name,
|
|
80
|
+
def read_station_issue(data_source, campaign_name, station_name, metadata_archive_dir=None):
|
|
81
81
|
"""Open the station issue YAML file into a dictionary.
|
|
82
82
|
|
|
83
83
|
Parameters
|
|
@@ -90,12 +90,9 @@ def read_station_issue(data_source, campaign_name, station_name, base_dir=None):
|
|
|
90
90
|
The name of the campaign. Must be provided in UPPER CASE.
|
|
91
91
|
station_name : str
|
|
92
92
|
The name of the station.
|
|
93
|
-
|
|
93
|
+
data_archive_dir : str, optional
|
|
94
94
|
The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
|
|
95
|
-
If not specified, the path specified in the DISDRODB active configuration will be used.
|
|
96
|
-
product : str, optional
|
|
97
|
-
The DISDRODB product in which to search for the metadata file.
|
|
98
|
-
The default is "RAW".
|
|
95
|
+
If not specified, the ``data_archive_dir`` path specified in the DISDRODB active configuration will be used.
|
|
99
96
|
|
|
100
97
|
Returns
|
|
101
98
|
-------
|
|
@@ -105,7 +102,7 @@ def read_station_issue(data_source, campaign_name, station_name, base_dir=None):
|
|
|
105
102
|
"""
|
|
106
103
|
# Retrieve metadata filepath
|
|
107
104
|
issue_filepath = define_issue_filepath(
|
|
108
|
-
|
|
105
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
109
106
|
data_source=data_source,
|
|
110
107
|
campaign_name=campaign_name,
|
|
111
108
|
station_name=station_name,
|
disdrodb/issue/writer.py
CHANGED
|
@@ -20,6 +20,7 @@
|
|
|
20
20
|
|
|
21
21
|
import logging
|
|
22
22
|
import os
|
|
23
|
+
from typing import Optional
|
|
23
24
|
|
|
24
25
|
import yaml
|
|
25
26
|
|
|
@@ -29,8 +30,9 @@ logger = logging.getLogger(__name__)
|
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
def _write_issue_docs(f):
|
|
32
|
-
"""Provide template for issue.yml"""
|
|
33
|
-
f.write(
|
|
33
|
+
"""Provide template for issue.yml."""
|
|
34
|
+
f.write(
|
|
35
|
+
"""# This file is used to store timesteps/time periods with wrong/corrupted observation.
|
|
34
36
|
# The specified timesteps are dropped during the L0 processing.
|
|
35
37
|
# The time format used is the isoformat : YYYY-mm-dd HH:MM:SS.
|
|
36
38
|
# The 'timesteps' key enable to specify the list of timesteps to be discarded.
|
|
@@ -46,11 +48,11 @@ def _write_issue_docs(f):
|
|
|
46
48
|
# - ['2018-08-01 12:00:00', '2018-08-01 14:00:00']
|
|
47
49
|
# - ['2018-08-01 15:44:30', '2018-08-01 15:59:31']
|
|
48
50
|
# - ['2018-08-02 12:44:30', '2018-08-02 12:59:31'] \n
|
|
49
|
-
"""
|
|
50
|
-
|
|
51
|
+
""",
|
|
52
|
+
)
|
|
51
53
|
|
|
52
54
|
|
|
53
|
-
def write_issue(filepath: str, timesteps: list = None, time_periods: list = None) -> None:
|
|
55
|
+
def write_issue(filepath: str, timesteps: Optional[list] = None, time_periods: Optional[list] = None) -> None:
|
|
54
56
|
"""Write the issue YAML file.
|
|
55
57
|
|
|
56
58
|
Parameters
|
|
@@ -59,20 +61,17 @@ def write_issue(filepath: str, timesteps: list = None, time_periods: list = None
|
|
|
59
61
|
Filepath of the issue YAML to write.
|
|
60
62
|
timesteps : list, optional
|
|
61
63
|
List of timesteps (to be dropped in L0 processing).
|
|
62
|
-
The default is None
|
|
64
|
+
The default value is ``None``..
|
|
63
65
|
time_periods : list, optional
|
|
64
66
|
A list of time periods (to be dropped in L0 processing).
|
|
65
|
-
The default is None
|
|
67
|
+
The default value is ``None``..
|
|
66
68
|
"""
|
|
67
69
|
# Preprocess timesteps and time_periods (to plain list of strings)
|
|
68
70
|
if timesteps is not None:
|
|
69
71
|
timesteps = timesteps.astype(str).tolist()
|
|
70
72
|
|
|
71
73
|
if time_periods is not None:
|
|
72
|
-
|
|
73
|
-
for time_period in time_periods:
|
|
74
|
-
new_periods.append(time_period.astype(str).tolist())
|
|
75
|
-
time_periods = new_periods
|
|
74
|
+
time_periods = [time_period.astype(str).tolist() for time_period in time_periods]
|
|
76
75
|
|
|
77
76
|
# Write the issue YAML file
|
|
78
77
|
logger.info(f"Creating issue YAML file at {filepath}")
|
|
@@ -89,10 +88,9 @@ def write_issue(filepath: str, timesteps: list = None, time_periods: list = None
|
|
|
89
88
|
if time_periods is not None:
|
|
90
89
|
time_periods_dict = {"time_periods": time_periods}
|
|
91
90
|
yaml.dump(time_periods_dict, f, default_flow_style=None)
|
|
92
|
-
return None
|
|
93
91
|
|
|
94
92
|
|
|
95
|
-
def create_station_issue(data_source, campaign_name, station_name,
|
|
93
|
+
def create_station_issue(data_source, campaign_name, station_name, metadata_archive_dir=None):
|
|
96
94
|
"""Write an empty YAML issue YAML file for a DISDRODB station.
|
|
97
95
|
|
|
98
96
|
An error is raised if the file already exists !
|
|
@@ -107,9 +105,9 @@ def create_station_issue(data_source, campaign_name, station_name, base_dir=None
|
|
|
107
105
|
The name of the campaign. Must be provided in UPPER CASE.
|
|
108
106
|
station_name : str
|
|
109
107
|
The name of the station.
|
|
110
|
-
|
|
108
|
+
data_archive_dir : str, optional
|
|
111
109
|
The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
|
|
112
|
-
If not specified, the path specified in the DISDRODB active configuration will be used.
|
|
110
|
+
If not specified, the ``data_archive_dir`` path specified in the DISDRODB active configuration will be used.
|
|
113
111
|
|
|
114
112
|
"""
|
|
115
113
|
# Define issue filepath
|
|
@@ -117,7 +115,7 @@ def create_station_issue(data_source, campaign_name, station_name, base_dir=None
|
|
|
117
115
|
data_source=data_source,
|
|
118
116
|
campaign_name=campaign_name,
|
|
119
117
|
station_name=station_name,
|
|
120
|
-
|
|
118
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
121
119
|
check_exists=False,
|
|
122
120
|
)
|
|
123
121
|
if os.path.exists(issue_filepath):
|
|
@@ -128,4 +126,3 @@ def create_station_issue(data_source, campaign_name, station_name, base_dir=None
|
|
|
128
126
|
# Write issue file
|
|
129
127
|
write_issue(filepath=issue_filepath)
|
|
130
128
|
print(f"An empty issue YAML file for station {station_name} has been created .")
|
|
131
|
-
return None
|