disdrodb 0.0.21__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/__init__.py +132 -15
- disdrodb/_config.py +4 -2
- disdrodb/_version.py +9 -4
- disdrodb/api/checks.py +264 -237
- disdrodb/api/configs.py +4 -8
- disdrodb/api/create_directories.py +235 -290
- disdrodb/api/info.py +217 -26
- disdrodb/api/io.py +306 -270
- disdrodb/api/path.py +597 -173
- disdrodb/api/search.py +486 -0
- disdrodb/{metadata/scripts → cli}/disdrodb_check_metadata_archive.py +12 -7
- disdrodb/{utils/pandas.py → cli/disdrodb_data_archive_directory.py} +9 -18
- disdrodb/cli/disdrodb_download_archive.py +86 -0
- disdrodb/cli/disdrodb_download_metadata_archive.py +53 -0
- disdrodb/cli/disdrodb_download_station.py +84 -0
- disdrodb/{api/scripts → cli}/disdrodb_initialize_station.py +22 -10
- disdrodb/cli/disdrodb_metadata_archive_directory.py +32 -0
- disdrodb/{data_transfer/scripts/disdrodb_download_station.py → cli/disdrodb_open_data_archive.py} +22 -22
- disdrodb/cli/disdrodb_open_logs_directory.py +69 -0
- disdrodb/{data_transfer/scripts/disdrodb_upload_station.py → cli/disdrodb_open_metadata_archive.py} +22 -24
- disdrodb/cli/disdrodb_open_metadata_directory.py +71 -0
- disdrodb/cli/disdrodb_open_product_directory.py +74 -0
- disdrodb/cli/disdrodb_open_readers_directory.py +32 -0
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0.py +38 -31
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0_station.py +32 -30
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a_station.py +24 -33
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b_station.py +25 -34
- disdrodb/cli/disdrodb_run_l0c.py +130 -0
- disdrodb/cli/disdrodb_run_l0c_station.py +129 -0
- disdrodb/cli/disdrodb_run_l1.py +122 -0
- disdrodb/cli/disdrodb_run_l1_station.py +121 -0
- disdrodb/cli/disdrodb_run_l2e.py +122 -0
- disdrodb/cli/disdrodb_run_l2e_station.py +122 -0
- disdrodb/cli/disdrodb_run_l2m.py +122 -0
- disdrodb/cli/disdrodb_run_l2m_station.py +122 -0
- disdrodb/cli/disdrodb_upload_archive.py +105 -0
- disdrodb/cli/disdrodb_upload_station.py +98 -0
- disdrodb/configs.py +90 -25
- disdrodb/data_transfer/__init__.py +22 -0
- disdrodb/data_transfer/download_data.py +87 -90
- disdrodb/data_transfer/upload_data.py +64 -37
- disdrodb/data_transfer/zenodo.py +15 -18
- disdrodb/docs.py +1 -1
- disdrodb/issue/__init__.py +17 -4
- disdrodb/issue/checks.py +10 -23
- disdrodb/issue/reader.py +9 -12
- disdrodb/issue/writer.py +14 -17
- disdrodb/l0/__init__.py +17 -26
- disdrodb/l0/check_configs.py +35 -23
- disdrodb/l0/check_standards.py +46 -51
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_diameter.yml +44 -44
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_velocity.yml +40 -40
- disdrodb/l0/configs/LPM/l0a_encodings.yml +80 -0
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_cf_attrs.yml +84 -65
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_encodings.yml +50 -9
- disdrodb/l0/configs/{Thies_LPM → LPM}/raw_data_format.yml +285 -245
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_diameter.yml +66 -66
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL/l0a_encodings.yml +32 -0
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_cf_attrs.yml +23 -21
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_encodings.yml +17 -17
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/raw_data_format.yml +77 -77
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_diameter.yml +64 -64
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +39 -0
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_cf_attrs.yml +28 -26
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_encodings.yml +20 -20
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/raw_data_format.yml +107 -107
- disdrodb/l0/configs/PWS100/bins_diameter.yml +173 -0
- disdrodb/l0/configs/PWS100/bins_velocity.yml +173 -0
- disdrodb/l0/configs/PWS100/l0a_encodings.yml +19 -0
- disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +76 -0
- disdrodb/l0/configs/PWS100/l0b_encodings.yml +176 -0
- disdrodb/l0/configs/PWS100/raw_data_format.yml +182 -0
- disdrodb/l0/configs/{RD_80 → RD80}/bins_diameter.yml +40 -40
- disdrodb/l0/configs/RD80/l0a_encodings.yml +16 -0
- disdrodb/l0/configs/{RD_80 → RD80}/l0b_cf_attrs.yml +3 -3
- disdrodb/l0/configs/RD80/l0b_encodings.yml +135 -0
- disdrodb/l0/configs/{RD_80 → RD80}/raw_data_format.yml +46 -50
- disdrodb/l0/l0_reader.py +216 -340
- disdrodb/l0/l0a_processing.py +237 -208
- disdrodb/l0/l0b_nc_processing.py +227 -80
- disdrodb/l0/l0b_processing.py +96 -174
- disdrodb/l0/l0c_processing.py +627 -0
- disdrodb/l0/readers/{ARM → LPM/ARM}/ARM_LPM.py +36 -58
- disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +236 -0
- disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +185 -0
- disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +185 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +195 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +210 -0
- disdrodb/l0/readers/{BRAZIL/GOAMAZON_LPM.py → LPM/KIT/CHWALA.py} +97 -76
- disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +197 -0
- disdrodb/l0/readers/LPM/SLOVENIA/CRNI_VRH.py +197 -0
- disdrodb/l0/readers/{UK → LPM/UK}/DIVEN.py +14 -35
- disdrodb/l0/readers/PARSIVEL/AUSTRALIA/MELBOURNE_2007_PARSIVEL.py +157 -0
- disdrodb/l0/readers/PARSIVEL/CHINA/CHONGQING.py +113 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/ARCTIC_2021.py +40 -57
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/COMMON_2011.py +37 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/DAVOS_2009_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_2009.py +34 -51
- disdrodb/l0/readers/{EPFL/PARADISO_2014.py → PARSIVEL/EPFL/EPFL_ROOF_2008.py} +38 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/EPFL_ROOF_2010.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2012.py +33 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GENEPI_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007_2.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HPICONET_2010.py +34 -51
- disdrodb/l0/readers/{EPFL/EPFL_ROOF_2010.py → PARSIVEL/EPFL/HYMEX_LTE_SOP2.py} +37 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/HYMEX_LTE_SOP3.py +111 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HYMEX_LTE_SOP4.py +36 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2018.py +34 -52
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2019.py +38 -56
- disdrodb/l0/readers/PARSIVEL/EPFL/PARADISO_2014.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PARSIVEL_2007.py +27 -45
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PLATO_2019.py +24 -44
- disdrodb/l0/readers/PARSIVEL/EPFL/RACLETS_2019.py +140 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RACLETS_2019_WJF.py +41 -59
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RIETHOLZBACH_2011.py +34 -51
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2017.py +117 -0
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2019.py +137 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/UNIL_2022.py +42 -55
- disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py +104 -0
- disdrodb/l0/readers/{GPM → PARSIVEL/GPM}/LPVEX.py +29 -48
- disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py +184 -0
- disdrodb/l0/readers/PARSIVEL/KIT/BURKINA_FASO.py +133 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py +113 -0
- disdrodb/l0/readers/{NCAR/VORTEX_SE_2016_P1.py → PARSIVEL/NCAR/OWLES_MIPS.py} +46 -72
- disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +125 -0
- disdrodb/l0/readers/{NCAR/OWLES_MIPS.py → PARSIVEL/NCAR/PLOWS_MIPS.py} +45 -64
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +114 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +176 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +183 -0
- disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL_FGG.py +121 -0
- disdrodb/l0/readers/{ARM/ARM_LD.py → PARSIVEL2/ARM/ARM_PARSIVEL2.py} +27 -50
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/{DENMARK → PARSIVEL2/DENMARK}/EROSION_nc.py +14 -35
- disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +189 -0
- disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +119 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +104 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py +176 -0
- disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py +32 -0
- disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py +56 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +120 -0
- disdrodb/l0/readers/{NCAR → PARSIVEL2/NCAR}/PECAN_MIPS.py +45 -64
- disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +181 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +160 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +160 -0
- disdrodb/l0/readers/{NCAR/PLOWS_MIPS.py → PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py} +49 -66
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +118 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +152 -0
- disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py +166 -0
- disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +150 -0
- disdrodb/l0/readers/{NCAR/RELAMPAGO_RD80.py → RD80/BRAZIL/CHUVA_RD80.py} +36 -60
- disdrodb/l0/readers/{BRAZIL → RD80/BRAZIL}/GOAMAZON_RD80.py +36 -55
- disdrodb/l0/readers/{NCAR → RD80/NCAR}/CINDY_2011_RD80.py +35 -54
- disdrodb/l0/readers/{BRAZIL/CHUVA_RD80.py → RD80/NCAR/RELAMPAGO_RD80.py} +40 -54
- disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +274 -0
- disdrodb/l0/readers/template_reader_raw_netcdf_data.py +62 -0
- disdrodb/l0/readers/{reader_template.py → template_reader_raw_text_data.py} +20 -44
- disdrodb/l0/routines.py +885 -581
- disdrodb/l0/standards.py +77 -238
- disdrodb/l0/template_tools.py +105 -110
- disdrodb/l1/__init__.py +17 -0
- disdrodb/l1/beard_model.py +716 -0
- disdrodb/l1/encoding_attrs.py +635 -0
- disdrodb/l1/fall_velocity.py +260 -0
- disdrodb/l1/filters.py +192 -0
- disdrodb/l1/processing.py +202 -0
- disdrodb/l1/resampling.py +236 -0
- disdrodb/l1/routines.py +358 -0
- disdrodb/l1_env/__init__.py +17 -0
- disdrodb/l1_env/routines.py +38 -0
- disdrodb/l2/__init__.py +17 -0
- disdrodb/l2/empirical_dsd.py +1833 -0
- disdrodb/l2/event.py +388 -0
- disdrodb/l2/processing.py +528 -0
- disdrodb/l2/processing_options.py +213 -0
- disdrodb/l2/routines.py +868 -0
- disdrodb/metadata/__init__.py +9 -2
- disdrodb/metadata/checks.py +180 -124
- disdrodb/metadata/download.py +81 -0
- disdrodb/metadata/geolocation.py +146 -0
- disdrodb/metadata/info.py +20 -13
- disdrodb/metadata/manipulation.py +3 -3
- disdrodb/metadata/reader.py +59 -8
- disdrodb/metadata/search.py +77 -144
- disdrodb/metadata/standards.py +83 -80
- disdrodb/metadata/writer.py +10 -16
- disdrodb/psd/__init__.py +38 -0
- disdrodb/psd/fitting.py +2146 -0
- disdrodb/psd/models.py +774 -0
- disdrodb/routines.py +1412 -0
- disdrodb/scattering/__init__.py +28 -0
- disdrodb/scattering/axis_ratio.py +344 -0
- disdrodb/scattering/routines.py +456 -0
- disdrodb/utils/__init__.py +17 -0
- disdrodb/utils/attrs.py +208 -0
- disdrodb/utils/cli.py +269 -0
- disdrodb/utils/compression.py +60 -42
- disdrodb/utils/dask.py +62 -0
- disdrodb/utils/dataframe.py +342 -0
- disdrodb/utils/decorators.py +110 -0
- disdrodb/utils/directories.py +107 -46
- disdrodb/utils/encoding.py +127 -0
- disdrodb/utils/list.py +29 -0
- disdrodb/utils/logger.py +168 -46
- disdrodb/utils/time.py +657 -0
- disdrodb/utils/warnings.py +30 -0
- disdrodb/utils/writer.py +57 -0
- disdrodb/utils/xarray.py +138 -47
- disdrodb/utils/yaml.py +0 -1
- disdrodb/viz/__init__.py +17 -0
- disdrodb/viz/plots.py +17 -0
- disdrodb-0.1.1.dist-info/METADATA +294 -0
- disdrodb-0.1.1.dist-info/RECORD +232 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/WHEEL +1 -1
- disdrodb-0.1.1.dist-info/entry_points.txt +30 -0
- disdrodb/data_transfer/scripts/disdrodb_download_archive.py +0 -53
- disdrodb/data_transfer/scripts/disdrodb_upload_archive.py +0 -57
- disdrodb/l0/configs/OTT_Parsivel/l0a_encodings.yml +0 -32
- disdrodb/l0/configs/OTT_Parsivel2/l0a_encodings.yml +0 -39
- disdrodb/l0/configs/RD_80/l0a_encodings.yml +0 -16
- disdrodb/l0/configs/RD_80/l0b_encodings.yml +0 -135
- disdrodb/l0/configs/Thies_LPM/l0a_encodings.yml +0 -80
- disdrodb/l0/io.py +0 -257
- disdrodb/l0/l0_processing.py +0 -1091
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_OTT.py +0 -178
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_THIES.py +0 -247
- disdrodb/l0/readers/BRAZIL/CHUVA_LPM.py +0 -204
- disdrodb/l0/readers/BRAZIL/CHUVA_OTT.py +0 -183
- disdrodb/l0/readers/BRAZIL/GOAMAZON_OTT.py +0 -183
- disdrodb/l0/readers/CHINA/CHONGQING.py +0 -131
- disdrodb/l0/readers/EPFL/EPFL_ROOF_2008.py +0 -128
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP2.py +0 -127
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP3.py +0 -129
- disdrodb/l0/readers/EPFL/RACLETS_2019.py +0 -158
- disdrodb/l0/readers/EPFL/SAMOYLOV_2017.py +0 -136
- disdrodb/l0/readers/EPFL/SAMOYLOV_2019.py +0 -158
- disdrodb/l0/readers/FRANCE/SIRTA_OTT2.py +0 -138
- disdrodb/l0/readers/GPM/GCPEX.py +0 -123
- disdrodb/l0/readers/GPM/IFLOODS.py +0 -123
- disdrodb/l0/readers/GPM/MC3E.py +0 -123
- disdrodb/l0/readers/GPM/NSSTC.py +0 -164
- disdrodb/l0/readers/ITALY/GID.py +0 -199
- disdrodb/l0/readers/MEXICO/OH_IIUNAM_nc.py +0 -92
- disdrodb/l0/readers/NCAR/CCOPE_2015.py +0 -133
- disdrodb/l0/readers/NCAR/PECAN_FP3.py +0 -137
- disdrodb/l0/readers/NCAR/PECAN_MOBILE.py +0 -144
- disdrodb/l0/readers/NCAR/RELAMPAGO_OTT.py +0 -195
- disdrodb/l0/readers/NCAR/SNOWIE_PJ.py +0 -172
- disdrodb/l0/readers/NCAR/SNOWIE_SB.py +0 -179
- disdrodb/l0/readers/NCAR/VORTEX2_2009.py +0 -133
- disdrodb/l0/readers/NCAR/VORTEX2_2010.py +0 -188
- disdrodb/l0/readers/NCAR/VORTEX2_2010_UF.py +0 -191
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_P2.py +0 -135
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_PIPS.py +0 -170
- disdrodb/l0/readers/NETHERLANDS/DELFT.py +0 -187
- disdrodb/l0/readers/SPAIN/SBEGUERIA.py +0 -179
- disdrodb/l0/scripts/disdrodb_run_l0b_concat.py +0 -93
- disdrodb/l0/scripts/disdrodb_run_l0b_concat_station.py +0 -85
- disdrodb/utils/netcdf.py +0 -452
- disdrodb/utils/scripts.py +0 -102
- disdrodb-0.0.21.dist-info/AUTHORS.md +0 -18
- disdrodb-0.0.21.dist-info/METADATA +0 -186
- disdrodb-0.0.21.dist-info/RECORD +0 -168
- disdrodb-0.0.21.dist-info/entry_points.txt +0 -15
- /disdrodb/l0/configs/{RD_80 → RD80}/bins_velocity.yml +0 -0
- /disdrodb/l0/manuals/{Thies_LPM.pdf → LPM.pdf} +0 -0
- /disdrodb/l0/manuals/{ODM_470.pdf → ODM470.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel.pdf → PARSIVEL.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel2.pdf → PARSIVEL2.pdf} +0 -0
- /disdrodb/l0/manuals/{PWS_100.pdf → PWS100.pdf} +0 -0
- /disdrodb/l0/manuals/{RD_80.pdf → RD80.pdf} +0 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info/licenses}/LICENSE +0 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/top_level.txt +0 -0
disdrodb/api/checks.py
CHANGED
|
@@ -17,23 +17,22 @@
|
|
|
17
17
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
18
18
|
# -----------------------------------------------------------------------------.
|
|
19
19
|
"""DISDRODB Checks Functions."""
|
|
20
|
-
|
|
21
20
|
import logging
|
|
22
21
|
import os
|
|
23
22
|
import re
|
|
23
|
+
import warnings
|
|
24
|
+
|
|
25
|
+
import numpy as np
|
|
24
26
|
|
|
25
|
-
from disdrodb.api.info import infer_disdrodb_tree_path_components
|
|
26
27
|
from disdrodb.api.path import (
|
|
28
|
+
define_data_dir,
|
|
27
29
|
define_issue_dir,
|
|
28
30
|
define_issue_filepath,
|
|
29
|
-
define_metadata_dir,
|
|
30
31
|
define_metadata_filepath,
|
|
31
|
-
define_station_dir,
|
|
32
32
|
)
|
|
33
33
|
from disdrodb.utils.directories import (
|
|
34
34
|
ensure_string_path,
|
|
35
35
|
list_files,
|
|
36
|
-
remove_path_trailing_slash,
|
|
37
36
|
)
|
|
38
37
|
|
|
39
38
|
logger = logging.getLogger(__name__)
|
|
@@ -67,14 +66,10 @@ def check_url(url: str) -> bool:
|
|
|
67
66
|
Returns
|
|
68
67
|
-------
|
|
69
68
|
bool
|
|
70
|
-
True if url well formatted, False if not well formatted.
|
|
69
|
+
``True`` if url well formatted, ``False`` if not well formatted.
|
|
71
70
|
"""
|
|
72
71
|
regex = r"^(https?:\/\/)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)$" # noqa: E501
|
|
73
|
-
|
|
74
|
-
if re.match(regex, url):
|
|
75
|
-
return True
|
|
76
|
-
else:
|
|
77
|
-
return False
|
|
72
|
+
return re.match(regex, url)
|
|
78
73
|
|
|
79
74
|
|
|
80
75
|
def check_path_is_a_directory(dir_path, path_name=""):
|
|
@@ -87,21 +82,94 @@ def check_path_is_a_directory(dir_path, path_name=""):
|
|
|
87
82
|
|
|
88
83
|
|
|
89
84
|
def check_directories_inside(dir_path):
|
|
90
|
-
"""Check there are directories inside the specified dir_path
|
|
85
|
+
"""Check there are directories inside the specified ``dir_path``."""
|
|
91
86
|
dir_paths = os.listdir(dir_path)
|
|
92
87
|
if len(dir_paths) == 0:
|
|
93
88
|
raise ValueError(f"There are not directories within {dir_path}")
|
|
94
89
|
|
|
95
90
|
|
|
96
|
-
def
|
|
97
|
-
"""Raise an error if the path does not end with
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
91
|
+
def check_data_archive_dir(data_archive_dir: str):
|
|
92
|
+
"""Raise an error if the path does not end with ``DISDRODB``."""
|
|
93
|
+
data_archive_dir = str(data_archive_dir) # convert Pathlib to string
|
|
94
|
+
data_archive_dir = os.path.normpath(data_archive_dir)
|
|
95
|
+
if not data_archive_dir.endswith("DISDRODB"):
|
|
96
|
+
raise ValueError(f"The path {data_archive_dir} does not end with DISDRODB. Please check the path.")
|
|
97
|
+
return data_archive_dir
|
|
98
|
+
|
|
102
99
|
|
|
100
|
+
def check_metadata_archive_dir(metadata_archive_dir: str):
|
|
101
|
+
"""Raise an error if the path does not end with ``DISDRODB``."""
|
|
102
|
+
metadata_archive_dir = str(metadata_archive_dir) # convert Pathlib to string
|
|
103
|
+
metadata_archive_dir = os.path.normpath(metadata_archive_dir)
|
|
104
|
+
if not metadata_archive_dir.endswith("DISDRODB"):
|
|
105
|
+
raise ValueError(f"The path {metadata_archive_dir} does not end with DISDRODB. Please check the path.")
|
|
106
|
+
return metadata_archive_dir
|
|
103
107
|
|
|
104
|
-
|
|
108
|
+
|
|
109
|
+
def check_measurement_interval(measurement_interval):
|
|
110
|
+
"""Check measurement interval validity."""
|
|
111
|
+
if isinstance(measurement_interval, str) and measurement_interval == "":
|
|
112
|
+
raise ValueError("measurement_interval' must be specified as an integer value.")
|
|
113
|
+
if isinstance(measurement_interval, type(None)):
|
|
114
|
+
raise ValueError("measurement_interval' can not be None.")
|
|
115
|
+
if isinstance(measurement_interval, str) and not measurement_interval.isdigit():
|
|
116
|
+
raise ValueError("measurement_interval' is not a positive digit.")
|
|
117
|
+
return int(measurement_interval)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def check_measurement_intervals(measurement_intervals):
|
|
121
|
+
"""Check measurement interval.
|
|
122
|
+
|
|
123
|
+
Can be a list. It must be a positive natural number
|
|
124
|
+
"""
|
|
125
|
+
if isinstance(measurement_intervals, (int, float, str)):
|
|
126
|
+
measurement_intervals = [measurement_intervals]
|
|
127
|
+
measurement_intervals = [check_measurement_interval(v) for v in measurement_intervals]
|
|
128
|
+
return measurement_intervals
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def check_sample_interval(sample_interval):
|
|
132
|
+
"""Check sample_interval argument validity."""
|
|
133
|
+
if not isinstance(sample_interval, int):
|
|
134
|
+
raise ValueError("'sample_interval' must be an integer.")
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def check_rolling(rolling):
|
|
138
|
+
"""Check rolling argument validity."""
|
|
139
|
+
if not isinstance(rolling, bool):
|
|
140
|
+
raise ValueError("'rolling' must be a boolean.")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def check_folder_partitioning(folder_partitioning):
|
|
144
|
+
"""
|
|
145
|
+
Check if the given folder partitioning scheme is valid.
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
folder_partitioning : str or None
|
|
150
|
+
Defines the subdirectory structure based on the dataset's start time.
|
|
151
|
+
Allowed values are:
|
|
152
|
+
- "": No additional subdirectories, files are saved directly in data_dir.
|
|
153
|
+
- "year": Files are stored under a subdirectory for the year (<data_dir>/2025).
|
|
154
|
+
- "year/month": Files are stored under subdirectories by year and month (<data_dir>/2025/04).
|
|
155
|
+
- "year/month/day": Files are stored under subdirectories by year, month and day (<data_dir>/2025/04/01).
|
|
156
|
+
- "year/month_name": Files are stored under subdirectories by year and month name (<data_dir>/2025/April).
|
|
157
|
+
- "year/quarter": Files are stored under subdirectories by year and quarter (<data_dir>/2025/Q2).
|
|
158
|
+
|
|
159
|
+
Returns
|
|
160
|
+
-------
|
|
161
|
+
folder_partitioning
|
|
162
|
+
The verified folder partitioning scheme.
|
|
163
|
+
"""
|
|
164
|
+
valid_options = ["", "year", "year/month", "year/month/day", "year/month_name", "year/quarter"]
|
|
165
|
+
if folder_partitioning not in valid_options:
|
|
166
|
+
raise ValueError(
|
|
167
|
+
f"Invalid folder_partitioning scheme '{folder_partitioning}'. Valid options are: {valid_options}.",
|
|
168
|
+
)
|
|
169
|
+
return folder_partitioning
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def check_sensor_name(sensor_name: str) -> None:
|
|
105
173
|
"""Check sensor name.
|
|
106
174
|
|
|
107
175
|
Parameters
|
|
@@ -114,23 +182,22 @@ def check_sensor_name(sensor_name: str, product: str = "L0A") -> None:
|
|
|
114
182
|
Raises
|
|
115
183
|
------
|
|
116
184
|
TypeError
|
|
117
|
-
Error if
|
|
185
|
+
Error if ``sensor_name`` is not a string.
|
|
118
186
|
ValueError
|
|
119
187
|
Error if the input sensor name has not been found in the list of available sensors.
|
|
120
188
|
"""
|
|
121
189
|
from disdrodb.api.configs import available_sensor_names
|
|
122
190
|
|
|
123
|
-
sensor_names = available_sensor_names(
|
|
191
|
+
sensor_names = available_sensor_names()
|
|
124
192
|
if not isinstance(sensor_name, str):
|
|
125
193
|
raise TypeError("'sensor_name' must be a string.")
|
|
126
194
|
if sensor_name not in sensor_names:
|
|
127
|
-
msg = f"{sensor_name} not valid
|
|
128
|
-
logger.error(msg)
|
|
195
|
+
msg = f"'{sensor_name}' is not a valid sensor_name. Valid values are {sensor_names}."
|
|
129
196
|
raise ValueError(msg)
|
|
130
197
|
|
|
131
198
|
|
|
132
199
|
def check_campaign_name(campaign_name):
|
|
133
|
-
"""Check the campaign name is upper case
|
|
200
|
+
"""Check the campaign name is upper case !."""
|
|
134
201
|
upper_campaign_name = campaign_name.upper()
|
|
135
202
|
if campaign_name != upper_campaign_name:
|
|
136
203
|
msg = f"The campaign directory name {campaign_name} must be defined uppercase: {upper_campaign_name}"
|
|
@@ -139,7 +206,7 @@ def check_campaign_name(campaign_name):
|
|
|
139
206
|
|
|
140
207
|
|
|
141
208
|
def check_data_source(data_source):
|
|
142
|
-
"""Check the data_source name is upper case
|
|
209
|
+
"""Check the data_source name is upper case !."""
|
|
143
210
|
upper_data_source = data_source.upper()
|
|
144
211
|
if data_source != upper_data_source:
|
|
145
212
|
msg = f"The data source directory name {data_source} must be defined uppercase: {upper_data_source}"
|
|
@@ -149,9 +216,11 @@ def check_data_source(data_source):
|
|
|
149
216
|
|
|
150
217
|
def check_product(product):
|
|
151
218
|
"""Check DISDRODB product."""
|
|
219
|
+
from disdrodb import PRODUCTS
|
|
220
|
+
|
|
152
221
|
if not isinstance(product, str):
|
|
153
222
|
raise TypeError("`product` must be a string.")
|
|
154
|
-
valid_products =
|
|
223
|
+
valid_products = PRODUCTS
|
|
155
224
|
if product.upper() not in valid_products:
|
|
156
225
|
msg = f"Valid `products` are {valid_products}."
|
|
157
226
|
logger.error(msg)
|
|
@@ -159,75 +228,183 @@ def check_product(product):
|
|
|
159
228
|
return product
|
|
160
229
|
|
|
161
230
|
|
|
162
|
-
def
|
|
163
|
-
"""
|
|
164
|
-
station_dir = define_station_dir(
|
|
165
|
-
product=product,
|
|
166
|
-
base_dir=base_dir,
|
|
167
|
-
data_source=data_source,
|
|
168
|
-
campaign_name=campaign_name,
|
|
169
|
-
station_name=station_name,
|
|
170
|
-
check_exists=False,
|
|
171
|
-
)
|
|
172
|
-
if not os.path.exists(station_dir) and os.path.isdir(station_dir):
|
|
173
|
-
msg = f"The station {station_name} data directory does not exist at {station_dir}."
|
|
174
|
-
logger.error(msg)
|
|
175
|
-
raise ValueError(msg)
|
|
176
|
-
return station_dir
|
|
231
|
+
def check_product_kwargs(product, product_kwargs):
|
|
232
|
+
"""Validate that product_kwargs for a given product contains exactly the required parameters.
|
|
177
233
|
|
|
234
|
+
Parameters
|
|
235
|
+
----------
|
|
236
|
+
product : str
|
|
237
|
+
The product name (e.g., "L2E", "L2M").
|
|
238
|
+
product_kwargs : dict
|
|
239
|
+
Keyword arguments provided for this product.
|
|
240
|
+
|
|
241
|
+
Returns
|
|
242
|
+
-------
|
|
243
|
+
dict
|
|
244
|
+
The validated product_kwargs.
|
|
245
|
+
|
|
246
|
+
Raises
|
|
247
|
+
------
|
|
248
|
+
ValueError
|
|
249
|
+
If required arguments are missing or if there are unexpected extra arguments.
|
|
250
|
+
"""
|
|
251
|
+
from disdrodb import PRODUCTS_ARGUMENTS
|
|
252
|
+
|
|
253
|
+
required = set(PRODUCTS_ARGUMENTS.get(product, []))
|
|
254
|
+
provided = set(product_kwargs.keys())
|
|
255
|
+
missing = required - provided
|
|
256
|
+
extra = provided - required
|
|
257
|
+
if missing and extra:
|
|
258
|
+
raise ValueError(
|
|
259
|
+
f"For product '{product}', missing arguments: {sorted(missing)}, " f"unexpected arguments: {sorted(extra)}",
|
|
260
|
+
)
|
|
261
|
+
if missing:
|
|
262
|
+
raise ValueError(f"For product '{product}', missing arguments: {sorted(missing)}")
|
|
263
|
+
if extra:
|
|
264
|
+
raise ValueError(f"For product '{product}', unexpected arguments: {sorted(extra)}")
|
|
265
|
+
return product_kwargs
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def select_required_product_kwargs(product, product_kwargs):
|
|
269
|
+
"""Select the required product arguments."""
|
|
270
|
+
from disdrodb import PRODUCTS_ARGUMENTS
|
|
271
|
+
|
|
272
|
+
required = set(PRODUCTS_ARGUMENTS.get(product, []))
|
|
273
|
+
provided = set(product_kwargs.keys())
|
|
274
|
+
missing = required - provided
|
|
275
|
+
# If missing, raise error
|
|
276
|
+
if missing:
|
|
277
|
+
raise ValueError(f"For product '{product}', missing arguments: {sorted(missing)}")
|
|
278
|
+
# Else return just required arguments
|
|
279
|
+
# --> e.g. for L0 no product arguments
|
|
280
|
+
return {k: product_kwargs[k] for k in required}
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _check_fields(fields):
|
|
284
|
+
if fields is None: # isinstance(fields, type(None)):
|
|
285
|
+
return fields
|
|
286
|
+
# Ensure is a list
|
|
287
|
+
if isinstance(fields, str):
|
|
288
|
+
fields = [fields]
|
|
289
|
+
# Remove duplicates
|
|
290
|
+
fields = np.unique(np.array(fields))
|
|
291
|
+
return fields
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def check_data_sources(data_sources):
|
|
295
|
+
"""Check DISDRODB data sources."""
|
|
296
|
+
return _check_fields(data_sources)
|
|
178
297
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
298
|
+
|
|
299
|
+
def check_campaign_names(campaign_names):
|
|
300
|
+
"""Check DISDRODB campaign names."""
|
|
301
|
+
return _check_fields(campaign_names)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def check_station_names(station_names):
|
|
305
|
+
"""Check DISDRODB station names."""
|
|
306
|
+
return _check_fields(station_names)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def check_invalid_fields_policy(invalid_fields):
|
|
310
|
+
"""Check invalid fields policy."""
|
|
311
|
+
if invalid_fields not in ["raise", "warn", "ignore"]:
|
|
312
|
+
raise ValueError(
|
|
313
|
+
f"Invalid value for invalid_fields: {invalid_fields}. " "Valid values are 'raise', 'warn', or 'ignore'.",
|
|
314
|
+
)
|
|
315
|
+
return invalid_fields
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def check_valid_fields(fields, available_fields, field_name, invalid_fields_policy="raise"):
|
|
319
|
+
"""Check if fields are valid."""
|
|
320
|
+
if fields is None:
|
|
321
|
+
return fields
|
|
322
|
+
if isinstance(fields, str):
|
|
323
|
+
fields = [fields]
|
|
324
|
+
fields = np.unique(np.array(fields))
|
|
325
|
+
invalid_fields_policy = check_invalid_fields_policy(invalid_fields_policy)
|
|
326
|
+
# Check for invalid fields
|
|
327
|
+
fields = np.array(fields)
|
|
328
|
+
is_valid = np.isin(fields, available_fields)
|
|
329
|
+
invalid_fields_values = fields[~is_valid].tolist()
|
|
330
|
+
fields = fields[is_valid].tolist()
|
|
331
|
+
# Error handling for invalid fields were found
|
|
332
|
+
if invalid_fields_policy == "warn" and invalid_fields_values:
|
|
333
|
+
warnings.warn(f"Ignoring invalid {field_name}: {invalid_fields_values}", UserWarning, stacklevel=2)
|
|
334
|
+
elif invalid_fields_policy == "raise" and invalid_fields_values:
|
|
335
|
+
raise ValueError(f"These {field_name} does not exist: {invalid_fields_values}.")
|
|
336
|
+
else: # "ignore" silently drop invalid entries
|
|
337
|
+
pass
|
|
338
|
+
# If no valid fields left, raise error
|
|
339
|
+
if len(fields) == 0:
|
|
340
|
+
raise ValueError(f"All specified {field_name} do not exist !.")
|
|
341
|
+
return fields
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def has_available_data(
|
|
345
|
+
data_source,
|
|
346
|
+
campaign_name,
|
|
347
|
+
station_name,
|
|
348
|
+
product,
|
|
349
|
+
data_archive_dir=None,
|
|
350
|
+
# Product Options
|
|
351
|
+
**product_kwargs,
|
|
352
|
+
):
|
|
353
|
+
"""Return ``True`` if data are available for the given product and station."""
|
|
354
|
+
# Define product directory
|
|
355
|
+
data_dir = define_data_dir(
|
|
182
356
|
product=product,
|
|
183
|
-
|
|
357
|
+
data_archive_dir=data_archive_dir,
|
|
184
358
|
data_source=data_source,
|
|
185
359
|
campaign_name=campaign_name,
|
|
186
360
|
station_name=station_name,
|
|
361
|
+
# Directory options
|
|
362
|
+
check_exists=False,
|
|
363
|
+
# Product Options
|
|
364
|
+
**product_kwargs,
|
|
187
365
|
)
|
|
188
|
-
|
|
366
|
+
# If the product directory does not exists, return False
|
|
367
|
+
if not os.path.isdir(data_dir):
|
|
368
|
+
return False
|
|
369
|
+
|
|
370
|
+
# If no files, return False
|
|
371
|
+
filepaths = list_files(data_dir, glob_pattern="*", recursive=True)
|
|
189
372
|
nfiles = len(filepaths)
|
|
190
373
|
return nfiles >= 1
|
|
191
374
|
|
|
192
375
|
|
|
193
|
-
def
|
|
194
|
-
|
|
195
|
-
|
|
376
|
+
def check_data_availability(
|
|
377
|
+
product,
|
|
378
|
+
data_source,
|
|
379
|
+
campaign_name,
|
|
380
|
+
station_name,
|
|
381
|
+
data_archive_dir=None,
|
|
382
|
+
# Product Options
|
|
383
|
+
**product_kwargs,
|
|
384
|
+
):
|
|
385
|
+
"""Check the station product data directory has files inside. If not, raise an error."""
|
|
386
|
+
if not has_available_data(
|
|
196
387
|
product=product,
|
|
197
|
-
|
|
388
|
+
data_archive_dir=data_archive_dir,
|
|
198
389
|
data_source=data_source,
|
|
199
390
|
campaign_name=campaign_name,
|
|
200
391
|
station_name=station_name,
|
|
392
|
+
# Product Options
|
|
393
|
+
**product_kwargs,
|
|
201
394
|
):
|
|
202
395
|
msg = f"The {product} station data directory of {data_source} {campaign_name} {station_name} is empty !"
|
|
203
|
-
logger.error(msg)
|
|
204
396
|
raise ValueError(msg)
|
|
205
397
|
|
|
206
398
|
|
|
207
|
-
def
|
|
208
|
-
"""Check existence of the metadata directory. If does not exists, raise an error."""
|
|
209
|
-
metadata_dir = define_metadata_dir(
|
|
210
|
-
product=product, base_dir=base_dir, data_source=data_source, campaign_name=campaign_name, check_exists=False
|
|
211
|
-
)
|
|
212
|
-
if not os.path.exists(metadata_dir) and os.path.isdir(metadata_dir):
|
|
213
|
-
msg = f"The metadata directory does not exist at {metadata_dir}."
|
|
214
|
-
logger.error(msg)
|
|
215
|
-
raise ValueError(msg)
|
|
216
|
-
return metadata_dir
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
def check_metadata_file(product, data_source, campaign_name, station_name, base_dir=None, check_validity=True):
|
|
399
|
+
def check_metadata_file(metadata_archive_dir, data_source, campaign_name, station_name, check_validity=True):
|
|
220
400
|
"""Check existence of a valid metadata YAML file. If does not exists, raise an error."""
|
|
221
|
-
from disdrodb.metadata.checks import
|
|
401
|
+
from disdrodb.metadata.checks import check_station_metadata
|
|
222
402
|
|
|
223
|
-
_ = check_metadata_dir(product=product, base_dir=base_dir, data_source=data_source, campaign_name=campaign_name)
|
|
224
403
|
metadata_filepath = define_metadata_filepath(
|
|
225
|
-
|
|
226
|
-
base_dir=base_dir,
|
|
404
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
227
405
|
data_source=data_source,
|
|
228
406
|
campaign_name=campaign_name,
|
|
229
407
|
station_name=station_name,
|
|
230
|
-
check_exists=False,
|
|
231
408
|
)
|
|
232
409
|
# Check existence
|
|
233
410
|
if not os.path.exists(metadata_filepath):
|
|
@@ -240,20 +417,22 @@ def check_metadata_file(product, data_source, campaign_name, station_name, base_
|
|
|
240
417
|
|
|
241
418
|
# Check validity
|
|
242
419
|
if check_validity:
|
|
243
|
-
|
|
244
|
-
|
|
420
|
+
check_station_metadata(
|
|
421
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
245
422
|
data_source=data_source,
|
|
246
423
|
campaign_name=campaign_name,
|
|
247
424
|
station_name=station_name,
|
|
248
|
-
product=product,
|
|
249
425
|
)
|
|
250
426
|
return metadata_filepath
|
|
251
427
|
|
|
252
428
|
|
|
253
|
-
def check_issue_dir(data_source, campaign_name,
|
|
429
|
+
def check_issue_dir(data_source, campaign_name, metadata_archive_dir=None):
|
|
254
430
|
"""Check existence of the issue directory. If does not exists, raise an error."""
|
|
255
431
|
issue_dir = define_issue_dir(
|
|
256
|
-
|
|
432
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
433
|
+
data_source=data_source,
|
|
434
|
+
campaign_name=campaign_name,
|
|
435
|
+
check_exists=False,
|
|
257
436
|
)
|
|
258
437
|
if not os.path.exists(issue_dir) and os.path.isdir(issue_dir):
|
|
259
438
|
msg = "The issue directory does not exist at {issue_dir}."
|
|
@@ -262,17 +441,18 @@ def check_issue_dir(data_source, campaign_name, base_dir=None):
|
|
|
262
441
|
return issue_dir
|
|
263
442
|
|
|
264
443
|
|
|
265
|
-
def check_issue_file(data_source, campaign_name, station_name,
|
|
444
|
+
def check_issue_file(data_source, campaign_name, station_name, metadata_archive_dir=None):
|
|
266
445
|
"""Check existence of a valid issue YAML file. If does not exists, raise an error."""
|
|
267
446
|
from disdrodb.issue.checks import check_issue_compliance
|
|
447
|
+
from disdrodb.issue.writer import create_station_issue
|
|
268
448
|
|
|
269
449
|
_ = check_issue_dir(
|
|
270
|
-
|
|
450
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
271
451
|
data_source=data_source,
|
|
272
452
|
campaign_name=campaign_name,
|
|
273
453
|
)
|
|
274
454
|
issue_filepath = define_issue_filepath(
|
|
275
|
-
|
|
455
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
276
456
|
data_source=data_source,
|
|
277
457
|
campaign_name=campaign_name,
|
|
278
458
|
station_name=station_name,
|
|
@@ -280,171 +460,18 @@ def check_issue_file(data_source, campaign_name, station_name, base_dir=None):
|
|
|
280
460
|
)
|
|
281
461
|
# Check existence
|
|
282
462
|
if not os.path.exists(issue_filepath):
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
463
|
+
create_station_issue(
|
|
464
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
465
|
+
data_source=data_source,
|
|
466
|
+
campaign_name=campaign_name,
|
|
467
|
+
station_name=station_name,
|
|
468
|
+
)
|
|
286
469
|
|
|
287
470
|
# Check validity
|
|
288
471
|
check_issue_compliance(
|
|
289
|
-
|
|
472
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
290
473
|
data_source=data_source,
|
|
291
474
|
campaign_name=campaign_name,
|
|
292
475
|
station_name=station_name,
|
|
293
476
|
)
|
|
294
477
|
return issue_filepath
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
def check_is_within_raw_directory(path):
|
|
298
|
-
"""Check the path is within the DISDRODB 'Raw' directory."""
|
|
299
|
-
components = infer_disdrodb_tree_path_components(path)
|
|
300
|
-
if components[1] != "Raw":
|
|
301
|
-
msg = f"{path} is not within the 'Raw' directory."
|
|
302
|
-
logger.error(msg)
|
|
303
|
-
raise ValueError(msg)
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
def check_is_within_processed_directory(path):
|
|
307
|
-
"""Check the path is within the DISDRODB 'Processed' directory."""
|
|
308
|
-
components = infer_disdrodb_tree_path_components(path)
|
|
309
|
-
if components[1] != "Processed":
|
|
310
|
-
msg = f"{path} is not within the 'Processed' directory."
|
|
311
|
-
logger.error(msg)
|
|
312
|
-
raise ValueError(msg)
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
def check_valid_campaign_dir(campaign_dir):
|
|
316
|
-
"""Check the validity of a campaign directory path.
|
|
317
|
-
|
|
318
|
-
Used to check validity of 'raw_dir' and 'processed_dir'.
|
|
319
|
-
|
|
320
|
-
The path must represents this path */DISDRODB/<Raw or Processed>/<DATA_SOURCE>/<CAMPAIGN_NAME>
|
|
321
|
-
"""
|
|
322
|
-
last_component = os.path.basename(campaign_dir)
|
|
323
|
-
tree_components = infer_disdrodb_tree_path_components(campaign_dir)
|
|
324
|
-
tree_path = "/".join(tree_components)
|
|
325
|
-
# Check that is not data_source or 'Raw'/Processed' directory
|
|
326
|
-
if len(tree_components) < 4:
|
|
327
|
-
msg = (
|
|
328
|
-
"Expecting the campaign directory path to comply with the pattern <...>/DISDRODB//<Raw or"
|
|
329
|
-
" Processed>/<DATA_SOURCE>/<CAMPAIGN_NAME>."
|
|
330
|
-
)
|
|
331
|
-
msg = msg + f"It only provides {tree_path}"
|
|
332
|
-
logger.error(msg)
|
|
333
|
-
raise ValueError(msg)
|
|
334
|
-
# Check that ends with the campaign_name
|
|
335
|
-
campaign_name = tree_components[3]
|
|
336
|
-
if last_component != campaign_name:
|
|
337
|
-
msg = (
|
|
338
|
-
"Expecting the campaign directory path to comply with the pattern <...>/DISDRODB//<Raw or"
|
|
339
|
-
" Processed>/<DATA_SOURCE>/<CAMPAIGN_NAME>."
|
|
340
|
-
)
|
|
341
|
-
msg = msg + f"The 'campaign directory path {campaign_dir} does not end with '{campaign_name}'!"
|
|
342
|
-
logger.error(msg)
|
|
343
|
-
raise ValueError(msg)
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
def check_raw_dir(raw_dir: str, station_name: str) -> None:
|
|
347
|
-
"""Check validity of raw_dir content.
|
|
348
|
-
|
|
349
|
-
Steps:
|
|
350
|
-
1. Check that 'raw_dir' is a valid directory path
|
|
351
|
-
2. Check that 'raw_dir' follows the expect directory structure
|
|
352
|
-
3. Check that each station_name directory contains data
|
|
353
|
-
4. Check that for each station_name the mandatory metadata.yml is specified.
|
|
354
|
-
5. Check that for each station_name the mandatory issue.yml is specified.
|
|
355
|
-
|
|
356
|
-
Parameters
|
|
357
|
-
----------
|
|
358
|
-
raw_dir : str
|
|
359
|
-
Input raw campaign directory.
|
|
360
|
-
station_name : str
|
|
361
|
-
Station name.
|
|
362
|
-
verbose : bool, optional
|
|
363
|
-
Whether to verbose the processing.
|
|
364
|
-
The default is False.
|
|
365
|
-
|
|
366
|
-
"""
|
|
367
|
-
# Ensure valid path format
|
|
368
|
-
raw_dir = remove_path_trailing_slash(raw_dir)
|
|
369
|
-
|
|
370
|
-
# Check raw_dir is an existing directory
|
|
371
|
-
check_path_is_a_directory(raw_dir, path_name="raw_dir")
|
|
372
|
-
|
|
373
|
-
# Check is a valid campaign directory path
|
|
374
|
-
check_valid_campaign_dir(raw_dir)
|
|
375
|
-
|
|
376
|
-
# Check is inside the 'Raw' directory
|
|
377
|
-
check_is_within_raw_directory(raw_dir)
|
|
378
|
-
|
|
379
|
-
# Retrieve data_source and campaign_name
|
|
380
|
-
base_dir, product_type, data_source, campaign_name = infer_disdrodb_tree_path_components(raw_dir)
|
|
381
|
-
|
|
382
|
-
# Check <DATA_SOURCE> and <CAMPAIGN_NAME> are upper case
|
|
383
|
-
check_campaign_name(campaign_name)
|
|
384
|
-
check_data_source(data_source)
|
|
385
|
-
|
|
386
|
-
# Check there are directories in raw_dir
|
|
387
|
-
check_directories_inside(raw_dir)
|
|
388
|
-
|
|
389
|
-
# Check there is data in the station directory
|
|
390
|
-
check_station_has_data(
|
|
391
|
-
product="RAW",
|
|
392
|
-
base_dir=base_dir,
|
|
393
|
-
data_source=data_source,
|
|
394
|
-
campaign_name=campaign_name,
|
|
395
|
-
station_name=station_name,
|
|
396
|
-
)
|
|
397
|
-
|
|
398
|
-
# Check there is a valid metadata YAML file
|
|
399
|
-
check_metadata_file(
|
|
400
|
-
product="RAW",
|
|
401
|
-
base_dir=base_dir,
|
|
402
|
-
data_source=data_source,
|
|
403
|
-
campaign_name=campaign_name,
|
|
404
|
-
station_name=station_name,
|
|
405
|
-
)
|
|
406
|
-
|
|
407
|
-
# Check there is valid issue YAML file
|
|
408
|
-
check_issue_file(
|
|
409
|
-
base_dir=base_dir,
|
|
410
|
-
data_source=data_source,
|
|
411
|
-
campaign_name=campaign_name,
|
|
412
|
-
station_name=station_name,
|
|
413
|
-
)
|
|
414
|
-
return raw_dir
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
def check_processed_dir(processed_dir):
|
|
418
|
-
"""Check input, format and validity of the 'processed_dir' directory path.
|
|
419
|
-
|
|
420
|
-
Parameters
|
|
421
|
-
----------
|
|
422
|
-
processed_dir : str
|
|
423
|
-
Path to the campaign directory in the 'DISDRODB/Processed directory tree
|
|
424
|
-
|
|
425
|
-
Returns
|
|
426
|
-
-------
|
|
427
|
-
str
|
|
428
|
-
Path of the processed campaign directory
|
|
429
|
-
"""
|
|
430
|
-
# Check path type
|
|
431
|
-
processed_dir = ensure_string_path(processed_dir, msg="Provide 'processed_dir' as a string", accepth_pathlib=True)
|
|
432
|
-
|
|
433
|
-
# Ensure valid path format
|
|
434
|
-
processed_dir = remove_path_trailing_slash(processed_dir)
|
|
435
|
-
|
|
436
|
-
# Check is a valid campaign directory path
|
|
437
|
-
# - <...>/DISDRODB/Processed/<DATA_SOURCE>/<CAMPAIGN_NAME>
|
|
438
|
-
check_valid_campaign_dir(processed_dir)
|
|
439
|
-
|
|
440
|
-
# Check is inside the 'Processed' directory
|
|
441
|
-
check_is_within_processed_directory(processed_dir)
|
|
442
|
-
|
|
443
|
-
# Retrieve data_source and campaign_name
|
|
444
|
-
base_dir, product_type, data_source, campaign_name = infer_disdrodb_tree_path_components(processed_dir)
|
|
445
|
-
|
|
446
|
-
# Check <DATA_SOURCE> and <CAMPAIGN_NAME> are upper case
|
|
447
|
-
check_campaign_name(campaign_name)
|
|
448
|
-
check_data_source(data_source)
|
|
449
|
-
|
|
450
|
-
return processed_dir
|
disdrodb/api/configs.py
CHANGED
|
@@ -48,7 +48,7 @@ def get_sensor_configs_dir(sensor_name: str, product: str) -> str:
|
|
|
48
48
|
ValueError
|
|
49
49
|
Error if the config directory does not exist.
|
|
50
50
|
"""
|
|
51
|
-
check_sensor_name(sensor_name
|
|
51
|
+
check_sensor_name(sensor_name)
|
|
52
52
|
product = check_product(product)
|
|
53
53
|
config_dir = define_config_dir(product=product)
|
|
54
54
|
config_sensor_dir = os.path.join(config_dir, sensor_name)
|
|
@@ -79,7 +79,7 @@ def read_config_file(sensor_name: str, product: str, filename: str) -> dict:
|
|
|
79
79
|
ValueError
|
|
80
80
|
Error if file does not exist.
|
|
81
81
|
"""
|
|
82
|
-
check_sensor_name(sensor_name
|
|
82
|
+
check_sensor_name(sensor_name)
|
|
83
83
|
product = check_product(product)
|
|
84
84
|
config_sensor_dir = get_sensor_configs_dir(sensor_name, product=product)
|
|
85
85
|
config_filepath = os.path.join(config_sensor_dir, filename)
|
|
@@ -93,17 +93,13 @@ def read_config_file(sensor_name: str, product: str, filename: str) -> dict:
|
|
|
93
93
|
return dictionary
|
|
94
94
|
|
|
95
95
|
|
|
96
|
-
def available_sensor_names(
|
|
96
|
+
def available_sensor_names() -> list:
|
|
97
97
|
"""Get available names of sensors.
|
|
98
98
|
|
|
99
99
|
Returns
|
|
100
100
|
-------
|
|
101
101
|
sensor_names: list
|
|
102
102
|
Sorted list of the available sensors
|
|
103
|
-
product: str
|
|
104
|
-
DISDRODB product.
|
|
105
|
-
By default, it returns the sensors available for DISDRODB L0A products.
|
|
106
103
|
"""
|
|
107
|
-
|
|
108
|
-
config_dir = define_config_dir(product=product)
|
|
104
|
+
config_dir = define_config_dir(product="L0A")
|
|
109
105
|
return sorted(os.listdir(config_dir))
|