disdrodb 0.0.21__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- disdrodb/__init__.py +132 -15
- disdrodb/_config.py +4 -2
- disdrodb/_version.py +9 -4
- disdrodb/api/checks.py +264 -237
- disdrodb/api/configs.py +4 -8
- disdrodb/api/create_directories.py +235 -290
- disdrodb/api/info.py +217 -26
- disdrodb/api/io.py +306 -270
- disdrodb/api/path.py +597 -173
- disdrodb/api/search.py +486 -0
- disdrodb/{metadata/scripts → cli}/disdrodb_check_metadata_archive.py +12 -7
- disdrodb/{utils/pandas.py → cli/disdrodb_data_archive_directory.py} +9 -18
- disdrodb/cli/disdrodb_download_archive.py +86 -0
- disdrodb/cli/disdrodb_download_metadata_archive.py +53 -0
- disdrodb/cli/disdrodb_download_station.py +84 -0
- disdrodb/{api/scripts → cli}/disdrodb_initialize_station.py +22 -10
- disdrodb/cli/disdrodb_metadata_archive_directory.py +32 -0
- disdrodb/{data_transfer/scripts/disdrodb_download_station.py → cli/disdrodb_open_data_archive.py} +22 -22
- disdrodb/cli/disdrodb_open_logs_directory.py +69 -0
- disdrodb/{data_transfer/scripts/disdrodb_upload_station.py → cli/disdrodb_open_metadata_archive.py} +22 -24
- disdrodb/cli/disdrodb_open_metadata_directory.py +71 -0
- disdrodb/cli/disdrodb_open_product_directory.py +74 -0
- disdrodb/cli/disdrodb_open_readers_directory.py +32 -0
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0.py +38 -31
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0_station.py +32 -30
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0a_station.py +24 -33
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b.py +30 -21
- disdrodb/{l0/scripts → cli}/disdrodb_run_l0b_station.py +25 -34
- disdrodb/cli/disdrodb_run_l0c.py +130 -0
- disdrodb/cli/disdrodb_run_l0c_station.py +129 -0
- disdrodb/cli/disdrodb_run_l1.py +122 -0
- disdrodb/cli/disdrodb_run_l1_station.py +121 -0
- disdrodb/cli/disdrodb_run_l2e.py +122 -0
- disdrodb/cli/disdrodb_run_l2e_station.py +122 -0
- disdrodb/cli/disdrodb_run_l2m.py +122 -0
- disdrodb/cli/disdrodb_run_l2m_station.py +122 -0
- disdrodb/cli/disdrodb_upload_archive.py +105 -0
- disdrodb/cli/disdrodb_upload_station.py +98 -0
- disdrodb/configs.py +90 -25
- disdrodb/data_transfer/__init__.py +22 -0
- disdrodb/data_transfer/download_data.py +87 -90
- disdrodb/data_transfer/upload_data.py +64 -37
- disdrodb/data_transfer/zenodo.py +15 -18
- disdrodb/docs.py +1 -1
- disdrodb/issue/__init__.py +17 -4
- disdrodb/issue/checks.py +10 -23
- disdrodb/issue/reader.py +9 -12
- disdrodb/issue/writer.py +14 -17
- disdrodb/l0/__init__.py +17 -26
- disdrodb/l0/check_configs.py +35 -23
- disdrodb/l0/check_standards.py +46 -51
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_diameter.yml +44 -44
- disdrodb/l0/configs/{Thies_LPM → LPM}/bins_velocity.yml +40 -40
- disdrodb/l0/configs/LPM/l0a_encodings.yml +80 -0
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_cf_attrs.yml +84 -65
- disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_encodings.yml +50 -9
- disdrodb/l0/configs/{Thies_LPM → LPM}/raw_data_format.yml +285 -245
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_diameter.yml +66 -66
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL/l0a_encodings.yml +32 -0
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_cf_attrs.yml +23 -21
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_encodings.yml +17 -17
- disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/raw_data_format.yml +77 -77
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_diameter.yml +64 -64
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_velocity.yml +64 -64
- disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +39 -0
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_cf_attrs.yml +28 -26
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_encodings.yml +20 -20
- disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/raw_data_format.yml +107 -107
- disdrodb/l0/configs/PWS100/bins_diameter.yml +173 -0
- disdrodb/l0/configs/PWS100/bins_velocity.yml +173 -0
- disdrodb/l0/configs/PWS100/l0a_encodings.yml +19 -0
- disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +76 -0
- disdrodb/l0/configs/PWS100/l0b_encodings.yml +176 -0
- disdrodb/l0/configs/PWS100/raw_data_format.yml +182 -0
- disdrodb/l0/configs/{RD_80 → RD80}/bins_diameter.yml +40 -40
- disdrodb/l0/configs/RD80/l0a_encodings.yml +16 -0
- disdrodb/l0/configs/{RD_80 → RD80}/l0b_cf_attrs.yml +3 -3
- disdrodb/l0/configs/RD80/l0b_encodings.yml +135 -0
- disdrodb/l0/configs/{RD_80 → RD80}/raw_data_format.yml +46 -50
- disdrodb/l0/l0_reader.py +216 -340
- disdrodb/l0/l0a_processing.py +237 -208
- disdrodb/l0/l0b_nc_processing.py +227 -80
- disdrodb/l0/l0b_processing.py +96 -174
- disdrodb/l0/l0c_processing.py +627 -0
- disdrodb/l0/readers/{ARM → LPM/ARM}/ARM_LPM.py +36 -58
- disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +236 -0
- disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +185 -0
- disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +185 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +195 -0
- disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +210 -0
- disdrodb/l0/readers/{BRAZIL/GOAMAZON_LPM.py → LPM/KIT/CHWALA.py} +97 -76
- disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +197 -0
- disdrodb/l0/readers/LPM/SLOVENIA/CRNI_VRH.py +197 -0
- disdrodb/l0/readers/{UK → LPM/UK}/DIVEN.py +14 -35
- disdrodb/l0/readers/PARSIVEL/AUSTRALIA/MELBOURNE_2007_PARSIVEL.py +157 -0
- disdrodb/l0/readers/PARSIVEL/CHINA/CHONGQING.py +113 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/ARCTIC_2021.py +40 -57
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/COMMON_2011.py +37 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/DAVOS_2009_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_2009.py +34 -51
- disdrodb/l0/readers/{EPFL/PARADISO_2014.py → PARSIVEL/EPFL/EPFL_ROOF_2008.py} +38 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/EPFL_ROOF_2010.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2011.py +34 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2012.py +33 -51
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GENEPI_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007_2.py +25 -44
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HPICONET_2010.py +34 -51
- disdrodb/l0/readers/{EPFL/EPFL_ROOF_2010.py → PARSIVEL/EPFL/HYMEX_LTE_SOP2.py} +37 -50
- disdrodb/l0/readers/PARSIVEL/EPFL/HYMEX_LTE_SOP3.py +111 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HYMEX_LTE_SOP4.py +36 -54
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2018.py +34 -52
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2019.py +38 -56
- disdrodb/l0/readers/PARSIVEL/EPFL/PARADISO_2014.py +105 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PARSIVEL_2007.py +27 -45
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PLATO_2019.py +24 -44
- disdrodb/l0/readers/PARSIVEL/EPFL/RACLETS_2019.py +140 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RACLETS_2019_WJF.py +41 -59
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RIETHOLZBACH_2011.py +34 -51
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2017.py +117 -0
- disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2019.py +137 -0
- disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/UNIL_2022.py +42 -55
- disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py +104 -0
- disdrodb/l0/readers/{GPM → PARSIVEL/GPM}/LPVEX.py +29 -48
- disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py +184 -0
- disdrodb/l0/readers/PARSIVEL/KIT/BURKINA_FASO.py +133 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py +113 -0
- disdrodb/l0/readers/{NCAR/VORTEX_SE_2016_P1.py → PARSIVEL/NCAR/OWLES_MIPS.py} +46 -72
- disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +125 -0
- disdrodb/l0/readers/{NCAR/OWLES_MIPS.py → PARSIVEL/NCAR/PLOWS_MIPS.py} +45 -64
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +114 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +176 -0
- disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +183 -0
- disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL_FGG.py +121 -0
- disdrodb/l0/readers/{ARM/ARM_LD.py → PARSIVEL2/ARM/ARM_PARSIVEL2.py} +27 -50
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +163 -0
- disdrodb/l0/readers/{DENMARK → PARSIVEL2/DENMARK}/EROSION_nc.py +14 -35
- disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +189 -0
- disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +119 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +104 -0
- disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py +176 -0
- disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py +32 -0
- disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py +56 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +120 -0
- disdrodb/l0/readers/{NCAR → PARSIVEL2/NCAR}/PECAN_MIPS.py +45 -64
- disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +181 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +160 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +160 -0
- disdrodb/l0/readers/{NCAR/PLOWS_MIPS.py → PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py} +49 -66
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +118 -0
- disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +152 -0
- disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py +166 -0
- disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +150 -0
- disdrodb/l0/readers/{NCAR/RELAMPAGO_RD80.py → RD80/BRAZIL/CHUVA_RD80.py} +36 -60
- disdrodb/l0/readers/{BRAZIL → RD80/BRAZIL}/GOAMAZON_RD80.py +36 -55
- disdrodb/l0/readers/{NCAR → RD80/NCAR}/CINDY_2011_RD80.py +35 -54
- disdrodb/l0/readers/{BRAZIL/CHUVA_RD80.py → RD80/NCAR/RELAMPAGO_RD80.py} +40 -54
- disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +274 -0
- disdrodb/l0/readers/template_reader_raw_netcdf_data.py +62 -0
- disdrodb/l0/readers/{reader_template.py → template_reader_raw_text_data.py} +20 -44
- disdrodb/l0/routines.py +885 -581
- disdrodb/l0/standards.py +77 -238
- disdrodb/l0/template_tools.py +105 -110
- disdrodb/l1/__init__.py +17 -0
- disdrodb/l1/beard_model.py +716 -0
- disdrodb/l1/encoding_attrs.py +635 -0
- disdrodb/l1/fall_velocity.py +260 -0
- disdrodb/l1/filters.py +192 -0
- disdrodb/l1/processing.py +202 -0
- disdrodb/l1/resampling.py +236 -0
- disdrodb/l1/routines.py +358 -0
- disdrodb/l1_env/__init__.py +17 -0
- disdrodb/l1_env/routines.py +38 -0
- disdrodb/l2/__init__.py +17 -0
- disdrodb/l2/empirical_dsd.py +1833 -0
- disdrodb/l2/event.py +388 -0
- disdrodb/l2/processing.py +528 -0
- disdrodb/l2/processing_options.py +213 -0
- disdrodb/l2/routines.py +868 -0
- disdrodb/metadata/__init__.py +9 -2
- disdrodb/metadata/checks.py +180 -124
- disdrodb/metadata/download.py +81 -0
- disdrodb/metadata/geolocation.py +146 -0
- disdrodb/metadata/info.py +20 -13
- disdrodb/metadata/manipulation.py +3 -3
- disdrodb/metadata/reader.py +59 -8
- disdrodb/metadata/search.py +77 -144
- disdrodb/metadata/standards.py +83 -80
- disdrodb/metadata/writer.py +10 -16
- disdrodb/psd/__init__.py +38 -0
- disdrodb/psd/fitting.py +2146 -0
- disdrodb/psd/models.py +774 -0
- disdrodb/routines.py +1412 -0
- disdrodb/scattering/__init__.py +28 -0
- disdrodb/scattering/axis_ratio.py +344 -0
- disdrodb/scattering/routines.py +456 -0
- disdrodb/utils/__init__.py +17 -0
- disdrodb/utils/attrs.py +208 -0
- disdrodb/utils/cli.py +269 -0
- disdrodb/utils/compression.py +60 -42
- disdrodb/utils/dask.py +62 -0
- disdrodb/utils/dataframe.py +342 -0
- disdrodb/utils/decorators.py +110 -0
- disdrodb/utils/directories.py +107 -46
- disdrodb/utils/encoding.py +127 -0
- disdrodb/utils/list.py +29 -0
- disdrodb/utils/logger.py +168 -46
- disdrodb/utils/time.py +657 -0
- disdrodb/utils/warnings.py +30 -0
- disdrodb/utils/writer.py +57 -0
- disdrodb/utils/xarray.py +138 -47
- disdrodb/utils/yaml.py +0 -1
- disdrodb/viz/__init__.py +17 -0
- disdrodb/viz/plots.py +17 -0
- disdrodb-0.1.1.dist-info/METADATA +294 -0
- disdrodb-0.1.1.dist-info/RECORD +232 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/WHEEL +1 -1
- disdrodb-0.1.1.dist-info/entry_points.txt +30 -0
- disdrodb/data_transfer/scripts/disdrodb_download_archive.py +0 -53
- disdrodb/data_transfer/scripts/disdrodb_upload_archive.py +0 -57
- disdrodb/l0/configs/OTT_Parsivel/l0a_encodings.yml +0 -32
- disdrodb/l0/configs/OTT_Parsivel2/l0a_encodings.yml +0 -39
- disdrodb/l0/configs/RD_80/l0a_encodings.yml +0 -16
- disdrodb/l0/configs/RD_80/l0b_encodings.yml +0 -135
- disdrodb/l0/configs/Thies_LPM/l0a_encodings.yml +0 -80
- disdrodb/l0/io.py +0 -257
- disdrodb/l0/l0_processing.py +0 -1091
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_OTT.py +0 -178
- disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_THIES.py +0 -247
- disdrodb/l0/readers/BRAZIL/CHUVA_LPM.py +0 -204
- disdrodb/l0/readers/BRAZIL/CHUVA_OTT.py +0 -183
- disdrodb/l0/readers/BRAZIL/GOAMAZON_OTT.py +0 -183
- disdrodb/l0/readers/CHINA/CHONGQING.py +0 -131
- disdrodb/l0/readers/EPFL/EPFL_ROOF_2008.py +0 -128
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP2.py +0 -127
- disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP3.py +0 -129
- disdrodb/l0/readers/EPFL/RACLETS_2019.py +0 -158
- disdrodb/l0/readers/EPFL/SAMOYLOV_2017.py +0 -136
- disdrodb/l0/readers/EPFL/SAMOYLOV_2019.py +0 -158
- disdrodb/l0/readers/FRANCE/SIRTA_OTT2.py +0 -138
- disdrodb/l0/readers/GPM/GCPEX.py +0 -123
- disdrodb/l0/readers/GPM/IFLOODS.py +0 -123
- disdrodb/l0/readers/GPM/MC3E.py +0 -123
- disdrodb/l0/readers/GPM/NSSTC.py +0 -164
- disdrodb/l0/readers/ITALY/GID.py +0 -199
- disdrodb/l0/readers/MEXICO/OH_IIUNAM_nc.py +0 -92
- disdrodb/l0/readers/NCAR/CCOPE_2015.py +0 -133
- disdrodb/l0/readers/NCAR/PECAN_FP3.py +0 -137
- disdrodb/l0/readers/NCAR/PECAN_MOBILE.py +0 -144
- disdrodb/l0/readers/NCAR/RELAMPAGO_OTT.py +0 -195
- disdrodb/l0/readers/NCAR/SNOWIE_PJ.py +0 -172
- disdrodb/l0/readers/NCAR/SNOWIE_SB.py +0 -179
- disdrodb/l0/readers/NCAR/VORTEX2_2009.py +0 -133
- disdrodb/l0/readers/NCAR/VORTEX2_2010.py +0 -188
- disdrodb/l0/readers/NCAR/VORTEX2_2010_UF.py +0 -191
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_P2.py +0 -135
- disdrodb/l0/readers/NCAR/VORTEX_SE_2016_PIPS.py +0 -170
- disdrodb/l0/readers/NETHERLANDS/DELFT.py +0 -187
- disdrodb/l0/readers/SPAIN/SBEGUERIA.py +0 -179
- disdrodb/l0/scripts/disdrodb_run_l0b_concat.py +0 -93
- disdrodb/l0/scripts/disdrodb_run_l0b_concat_station.py +0 -85
- disdrodb/utils/netcdf.py +0 -452
- disdrodb/utils/scripts.py +0 -102
- disdrodb-0.0.21.dist-info/AUTHORS.md +0 -18
- disdrodb-0.0.21.dist-info/METADATA +0 -186
- disdrodb-0.0.21.dist-info/RECORD +0 -168
- disdrodb-0.0.21.dist-info/entry_points.txt +0 -15
- /disdrodb/l0/configs/{RD_80 → RD80}/bins_velocity.yml +0 -0
- /disdrodb/l0/manuals/{Thies_LPM.pdf → LPM.pdf} +0 -0
- /disdrodb/l0/manuals/{ODM_470.pdf → ODM470.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel.pdf → PARSIVEL.pdf} +0 -0
- /disdrodb/l0/manuals/{OTT_Parsivel2.pdf → PARSIVEL2.pdf} +0 -0
- /disdrodb/l0/manuals/{PWS_100.pdf → PWS100.pdf} +0 -0
- /disdrodb/l0/manuals/{RD_80.pdf → RD80.pdf} +0 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info/licenses}/LICENSE +0 -0
- {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
# -----------------------------------------------------------------------------.
|
|
2
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
3
|
+
#
|
|
4
|
+
# This program is free software: you can redistribute it and/or modify
|
|
5
|
+
# it under the terms of the GNU General Public License as published by
|
|
6
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
7
|
+
# (at your option) any later version.
|
|
8
|
+
#
|
|
9
|
+
# This program is distributed in the hope that it will be useful,
|
|
10
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12
|
+
# GNU General Public License for more details.
|
|
13
|
+
#
|
|
14
|
+
# You should have received a copy of the GNU General Public License
|
|
15
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
16
|
+
# -----------------------------------------------------------------------------.
|
|
17
|
+
"""Utilities for temporal resampling."""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
import pandas as pd
|
|
21
|
+
import xarray as xr
|
|
22
|
+
|
|
23
|
+
from disdrodb.utils.time import regularize_dataset
|
|
24
|
+
|
|
25
|
+
DEFAULT_ACCUMULATIONS = ["10s", "30s", "1min", "2min", "5min", "10min", "30min", "1hour"]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def add_sample_interval(ds, sample_interval):
|
|
29
|
+
"""Add a sample_interval coordinate to the dataset.
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
ds : xarray.Dataset
|
|
34
|
+
The input dataset to which the sample_interval coordinate will be added.
|
|
35
|
+
sample_interval : int or float
|
|
36
|
+
The dataset sample interval in seconds.
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
xarray.Dataset
|
|
41
|
+
The dataset with the added sample interval coordinate.
|
|
42
|
+
|
|
43
|
+
Notes
|
|
44
|
+
-----
|
|
45
|
+
The function adds a new coordinate named 'sample_interval' to the dataset and
|
|
46
|
+
updates the 'measurement_interval' attribute.
|
|
47
|
+
"""
|
|
48
|
+
# Add sample_interval coordinate
|
|
49
|
+
ds["sample_interval"] = sample_interval
|
|
50
|
+
ds["sample_interval"].attrs["description"] = "Sample interval"
|
|
51
|
+
ds["sample_interval"].attrs["long_name"] = "Sample interval"
|
|
52
|
+
ds["sample_interval"].attrs["units"] = "seconds"
|
|
53
|
+
ds = ds.set_coords("sample_interval")
|
|
54
|
+
# Update measurement_interval attribute
|
|
55
|
+
ds.attrs = ds.attrs.copy()
|
|
56
|
+
ds.attrs["measurement_interval"] = int(sample_interval)
|
|
57
|
+
return ds
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def define_window_size(sample_interval, accumulation_interval):
|
|
61
|
+
"""
|
|
62
|
+
Calculate the rolling window size based on sampling and accumulation intervals.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
sampling_interval : int
|
|
67
|
+
The sampling interval in seconds.
|
|
68
|
+
accumulation_interval : int
|
|
69
|
+
The desired accumulation interval in seconds.
|
|
70
|
+
|
|
71
|
+
Returns
|
|
72
|
+
-------
|
|
73
|
+
int
|
|
74
|
+
The calculated window size as the number of sampling intervals required to cover the accumulation interval.
|
|
75
|
+
|
|
76
|
+
Raises
|
|
77
|
+
------
|
|
78
|
+
ValueError
|
|
79
|
+
If the accumulation interval is not a multiple of the sampling interval.
|
|
80
|
+
|
|
81
|
+
Examples
|
|
82
|
+
--------
|
|
83
|
+
>>> define_window_size(60, 300)
|
|
84
|
+
5
|
|
85
|
+
|
|
86
|
+
>>> define_window_size(120, 600)
|
|
87
|
+
5
|
|
88
|
+
"""
|
|
89
|
+
# Check compatitiblity
|
|
90
|
+
if accumulation_interval % sample_interval != 0:
|
|
91
|
+
raise ValueError("The accumulation interval must be a multiple of the sample interval.")
|
|
92
|
+
|
|
93
|
+
# Calculate the window size
|
|
94
|
+
window_size = accumulation_interval // sample_interval
|
|
95
|
+
|
|
96
|
+
return window_size
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def resample_dataset(ds, sample_interval, accumulation_interval, rolling=True):
|
|
100
|
+
"""
|
|
101
|
+
Resample the dataset to a specified accumulation interval.
|
|
102
|
+
|
|
103
|
+
Parameters
|
|
104
|
+
----------
|
|
105
|
+
ds : xarray.Dataset
|
|
106
|
+
The input dataset to be resampled.
|
|
107
|
+
sample_interval : int
|
|
108
|
+
The sample interval of the input dataset.
|
|
109
|
+
accumulation_interval : int
|
|
110
|
+
The interval in seconds over which to accumulate the data.
|
|
111
|
+
rolling : bool, optional
|
|
112
|
+
If True, apply a rolling window before resampling. Default is True.
|
|
113
|
+
If True, forward rolling is performed.
|
|
114
|
+
The output timesteps correspond to the starts of the periods over which
|
|
115
|
+
the resampling operation has been performed !
|
|
116
|
+
|
|
117
|
+
Returns
|
|
118
|
+
-------
|
|
119
|
+
xarray.Dataset
|
|
120
|
+
The resampled dataset with updated attributes.
|
|
121
|
+
|
|
122
|
+
Notes
|
|
123
|
+
-----
|
|
124
|
+
- The function regularizes the dataset (infill possible missing timesteps)
|
|
125
|
+
before performing the resampling operation.
|
|
126
|
+
- Variables are categorized into those to be averaged, accumulated, minimized, and maximized.
|
|
127
|
+
- Custom processing for quality flags and handling of NaNs is defined.
|
|
128
|
+
- The function updates the dataset attributes and the sample_interval coordinate.
|
|
129
|
+
|
|
130
|
+
"""
|
|
131
|
+
# Retrieve attributes
|
|
132
|
+
attrs = ds.attrs.copy()
|
|
133
|
+
|
|
134
|
+
# TODO: here infill NaN with zero if necessary before regularizing !
|
|
135
|
+
|
|
136
|
+
# Ensure regular dataset without missing timesteps
|
|
137
|
+
ds = regularize_dataset(ds, freq=f"{sample_interval}s")
|
|
138
|
+
|
|
139
|
+
# Initialize resample dataset
|
|
140
|
+
ds_resampled = xr.Dataset()
|
|
141
|
+
|
|
142
|
+
# Retrieve variables to average/sum
|
|
143
|
+
var_to_average = ["fall_velocity"]
|
|
144
|
+
var_to_cumulate = ["raw_drop_number", "drop_number", "drop_counts", "N", "Nremoved"]
|
|
145
|
+
var_to_min = ["Dmin"]
|
|
146
|
+
var_to_max = ["Dmax"]
|
|
147
|
+
|
|
148
|
+
# Retrieve available variables
|
|
149
|
+
var_to_average = [var for var in var_to_average if var in ds]
|
|
150
|
+
var_to_cumulate = [var for var in var_to_cumulate if var in ds]
|
|
151
|
+
var_to_min = [var for var in var_to_min if var in ds]
|
|
152
|
+
var_to_max = [var for var in var_to_max if var in ds]
|
|
153
|
+
|
|
154
|
+
# TODO Define custom processing
|
|
155
|
+
# - quality_flag --> take worst
|
|
156
|
+
# - skipna if less than fraction (to not waste lot of data when aggregating over i.e. hours)
|
|
157
|
+
|
|
158
|
+
# Resample the dataset
|
|
159
|
+
# - Rolling currently does not allow direct rolling forward.
|
|
160
|
+
# - We currently use center=False which means search for data backward (right-aligned) !
|
|
161
|
+
# - We then drop the first 'window_size' NaN timesteps and we shift backward the timesteps.
|
|
162
|
+
# - https://github.com/pydata/xarray/issues/9773
|
|
163
|
+
# - https://github.com/pydata/xarray/issues/8958
|
|
164
|
+
if not rolling:
|
|
165
|
+
# Resample
|
|
166
|
+
if len(var_to_average) > 0:
|
|
167
|
+
ds_resampled.update(
|
|
168
|
+
ds[var_to_average].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).mean(skipna=False),
|
|
169
|
+
)
|
|
170
|
+
if len(var_to_cumulate) > 0:
|
|
171
|
+
ds_resampled.update(
|
|
172
|
+
ds[var_to_cumulate].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).sum(skipna=False),
|
|
173
|
+
)
|
|
174
|
+
if len(var_to_min) > 0:
|
|
175
|
+
ds_resampled.update(
|
|
176
|
+
ds[var_to_min].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).min(skipna=False),
|
|
177
|
+
)
|
|
178
|
+
if len(var_to_max) > 0:
|
|
179
|
+
ds_resampled.update(
|
|
180
|
+
ds[var_to_max].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).max(skipna=False),
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
else:
|
|
184
|
+
# Roll and Resample
|
|
185
|
+
window_size = define_window_size(sample_interval=sample_interval, accumulation_interval=accumulation_interval)
|
|
186
|
+
if len(var_to_average) > 0:
|
|
187
|
+
ds_resampled.update(ds[var_to_average].rolling({"time": window_size}, center=False).mean(skipna=False))
|
|
188
|
+
if len(var_to_cumulate) > 0:
|
|
189
|
+
ds_resampled.update(ds[var_to_cumulate].rolling({"time": window_size}, center=False).sum(skipna=False))
|
|
190
|
+
|
|
191
|
+
if len(var_to_min) > 0:
|
|
192
|
+
ds_resampled.update(ds[var_to_min].rolling({"time": window_size}, center=False).min(skipna=False))
|
|
193
|
+
if len(var_to_max) > 0:
|
|
194
|
+
ds_resampled.update(ds[var_to_max].rolling({"time": window_size}, center=False).max(skipna=False))
|
|
195
|
+
# Ensure time to correspond to the start time of the integration
|
|
196
|
+
ds_resampled = ds_resampled.isel(time=slice(window_size - 1, None)).assign_coords(
|
|
197
|
+
{"time": ds_resampled["time"].data[: -window_size + 1]},
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
# Add attributes
|
|
201
|
+
ds_resampled.attrs = attrs
|
|
202
|
+
if rolling:
|
|
203
|
+
ds_resampled.attrs["rolled"] = "True"
|
|
204
|
+
else:
|
|
205
|
+
ds_resampled.attrs["rolled"] = "False"
|
|
206
|
+
|
|
207
|
+
# Add accumulation_interval as new sample_interval coordinate
|
|
208
|
+
ds_resampled = add_sample_interval(ds_resampled, sample_interval=accumulation_interval)
|
|
209
|
+
return ds_resampled
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def get_possible_accumulations(sample_interval, accumulations=None):
|
|
213
|
+
"""
|
|
214
|
+
Get a list of valid accumulation intervals based on the sampling time.
|
|
215
|
+
|
|
216
|
+
Parameters
|
|
217
|
+
----------
|
|
218
|
+
- sample_interval (int): The inferred sampling time in seconds.
|
|
219
|
+
- accumulations (list of int or string): List of desired accumulation intervals.
|
|
220
|
+
If provide integers, specify accumulation in seconds.
|
|
221
|
+
|
|
222
|
+
Returns
|
|
223
|
+
-------
|
|
224
|
+
- list of int: Valid accumulation intervals in seconds.
|
|
225
|
+
"""
|
|
226
|
+
# Select default accumulations
|
|
227
|
+
if accumulations is None:
|
|
228
|
+
accumulations = DEFAULT_ACCUMULATIONS
|
|
229
|
+
|
|
230
|
+
# Get accumulations in seconds
|
|
231
|
+
accumulations = [int(pd.Timedelta(acc).total_seconds()) if isinstance(acc, str) else acc for acc in accumulations]
|
|
232
|
+
|
|
233
|
+
# Filter candidate accumulations to include only those that are multiples of the sampling time
|
|
234
|
+
possible_accumulations = [acc for acc in accumulations if acc % sample_interval == 0]
|
|
235
|
+
|
|
236
|
+
return possible_accumulations
|
disdrodb/l1/routines.py
ADDED
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
# -----------------------------------------------------------------------------.
|
|
4
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
5
|
+
#
|
|
6
|
+
# This program is free software: you can redistribute it and/or modify
|
|
7
|
+
# it under the terms of the GNU General Public License as published by
|
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
9
|
+
# (at your option) any later version.
|
|
10
|
+
#
|
|
11
|
+
# This program is distributed in the hope that it will be useful,
|
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
# GNU General Public License for more details.
|
|
15
|
+
#
|
|
16
|
+
# You should have received a copy of the GNU General Public License
|
|
17
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
18
|
+
# -----------------------------------------------------------------------------.
|
|
19
|
+
"""Implement DISDRODB L1 processing."""
|
|
20
|
+
|
|
21
|
+
import datetime
|
|
22
|
+
import logging
|
|
23
|
+
import os
|
|
24
|
+
import time
|
|
25
|
+
from typing import Optional
|
|
26
|
+
|
|
27
|
+
import dask
|
|
28
|
+
import xarray as xr
|
|
29
|
+
|
|
30
|
+
# Directory
|
|
31
|
+
from disdrodb.api.create_directories import (
|
|
32
|
+
create_logs_directory,
|
|
33
|
+
create_product_directory,
|
|
34
|
+
)
|
|
35
|
+
from disdrodb.api.io import find_files
|
|
36
|
+
from disdrodb.api.path import (
|
|
37
|
+
define_file_folder_path,
|
|
38
|
+
define_l1_filename,
|
|
39
|
+
)
|
|
40
|
+
from disdrodb.api.search import get_required_product
|
|
41
|
+
from disdrodb.configs import get_data_archive_dir, get_folder_partitioning, get_metadata_archive_dir
|
|
42
|
+
from disdrodb.l1.processing import generate_l1
|
|
43
|
+
from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
|
|
44
|
+
|
|
45
|
+
# Logger
|
|
46
|
+
from disdrodb.utils.logger import (
|
|
47
|
+
close_logger,
|
|
48
|
+
create_logger_file,
|
|
49
|
+
create_product_logs,
|
|
50
|
+
log_error,
|
|
51
|
+
log_info,
|
|
52
|
+
)
|
|
53
|
+
from disdrodb.utils.writer import write_product
|
|
54
|
+
|
|
55
|
+
logger = logging.getLogger(__name__)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_l1_options():
|
|
59
|
+
"""Get L1 options."""
|
|
60
|
+
# - TODO: from YAML
|
|
61
|
+
# - TODO: as function of sensor name
|
|
62
|
+
|
|
63
|
+
# minimum_diameter
|
|
64
|
+
# --> PWS100: 0.05
|
|
65
|
+
# --> PARSIVEL: 0.2495
|
|
66
|
+
# --> RD80: 0.313
|
|
67
|
+
# --> LPM: 0.125 (we currently discard first bin with this setting)
|
|
68
|
+
|
|
69
|
+
# maximum_diameter
|
|
70
|
+
# LPM: 8 mm
|
|
71
|
+
# RD80: 5.6 mm
|
|
72
|
+
# OTT: 26 mm
|
|
73
|
+
|
|
74
|
+
l1_options = {
|
|
75
|
+
# Fall velocity option
|
|
76
|
+
"fall_velocity_method": "Beard1976",
|
|
77
|
+
# Diameter-Velocity Filtering Options
|
|
78
|
+
"minimum_diameter": 0.2495, # OTT PARSIVEL first two bin no data !
|
|
79
|
+
"maximum_diameter": 10,
|
|
80
|
+
"minimum_velocity": 0,
|
|
81
|
+
"maximum_velocity": 12,
|
|
82
|
+
"above_velocity_fraction": 0.5,
|
|
83
|
+
"above_velocity_tolerance": None,
|
|
84
|
+
"below_velocity_fraction": 0.5,
|
|
85
|
+
"below_velocity_tolerance": None,
|
|
86
|
+
"small_diameter_threshold": 1, # 2
|
|
87
|
+
"small_velocity_threshold": 2.5, # 3
|
|
88
|
+
"maintain_smallest_drops": True,
|
|
89
|
+
}
|
|
90
|
+
return l1_options
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@delayed_if_parallel
|
|
94
|
+
@single_threaded_if_parallel
|
|
95
|
+
def _generate_l1(
|
|
96
|
+
filepath,
|
|
97
|
+
data_dir,
|
|
98
|
+
logs_dir,
|
|
99
|
+
campaign_name,
|
|
100
|
+
station_name,
|
|
101
|
+
# Processing options
|
|
102
|
+
force,
|
|
103
|
+
verbose,
|
|
104
|
+
parallel, # this is used only to initialize the correct logger !
|
|
105
|
+
):
|
|
106
|
+
"""Generate the L1 product from the DISRODB L0C netCDF file.
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
filepath : str
|
|
111
|
+
Path to the L0C netCDF file.
|
|
112
|
+
data_dir : str
|
|
113
|
+
Directory where the L1 netCDF file will be saved.
|
|
114
|
+
logs_dir : str
|
|
115
|
+
Directory where the log file will be saved.
|
|
116
|
+
campaign_name : str
|
|
117
|
+
Name of the campaign.
|
|
118
|
+
station_name : str
|
|
119
|
+
Name of the station.
|
|
120
|
+
force : bool
|
|
121
|
+
If True, overwrite existing files.
|
|
122
|
+
verbose : bool
|
|
123
|
+
Whether to verbose the processing.
|
|
124
|
+
|
|
125
|
+
Returns
|
|
126
|
+
-------
|
|
127
|
+
str
|
|
128
|
+
Path to the log file generated during processing.
|
|
129
|
+
|
|
130
|
+
Notes
|
|
131
|
+
-----
|
|
132
|
+
If an error occurs during processing, it is caught and logged,
|
|
133
|
+
but no error is raised to interrupt the execution.
|
|
134
|
+
"""
|
|
135
|
+
# -----------------------------------------------------------------.
|
|
136
|
+
# Define product name
|
|
137
|
+
product = "L1"
|
|
138
|
+
|
|
139
|
+
# Define folder partitioning
|
|
140
|
+
folder_partitioning = get_folder_partitioning()
|
|
141
|
+
|
|
142
|
+
# -----------------------------------------------------------------.
|
|
143
|
+
# Create file logger
|
|
144
|
+
filename = os.path.basename(filepath)
|
|
145
|
+
logger, logger_filepath = create_logger_file(
|
|
146
|
+
logs_dir=logs_dir,
|
|
147
|
+
filename=filename,
|
|
148
|
+
parallel=parallel,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
##------------------------------------------------------------------------.
|
|
152
|
+
# Log start processing
|
|
153
|
+
msg = f"{product} processing of {filename} has started."
|
|
154
|
+
log_info(logger=logger, msg=msg, verbose=verbose)
|
|
155
|
+
|
|
156
|
+
##------------------------------------------------------------------------.
|
|
157
|
+
# Retrieve L1 configurations
|
|
158
|
+
l1_options = get_l1_options()
|
|
159
|
+
|
|
160
|
+
##------------------------------------------------------------------------.
|
|
161
|
+
### Core computation
|
|
162
|
+
try:
|
|
163
|
+
# Open the raw netCDF
|
|
164
|
+
with xr.open_dataset(filepath, chunks={}, decode_timedelta=False, cache=False) as ds:
|
|
165
|
+
ds = ds[["raw_drop_number"]].load()
|
|
166
|
+
|
|
167
|
+
# Produce L1 dataset
|
|
168
|
+
ds = generate_l1(ds=ds, **l1_options)
|
|
169
|
+
|
|
170
|
+
# Write L1 netCDF4 dataset
|
|
171
|
+
if ds["time"].size > 1:
|
|
172
|
+
# Define filepath
|
|
173
|
+
filename = define_l1_filename(ds, campaign_name=campaign_name, station_name=station_name)
|
|
174
|
+
folder_path = define_file_folder_path(ds, data_dir=data_dir, folder_partitioning=folder_partitioning)
|
|
175
|
+
filepath = os.path.join(folder_path, filename)
|
|
176
|
+
# Write to disk
|
|
177
|
+
write_product(ds, product=product, filepath=filepath, force=force)
|
|
178
|
+
|
|
179
|
+
##--------------------------------------------------------------------.
|
|
180
|
+
# Clean environment
|
|
181
|
+
del ds
|
|
182
|
+
|
|
183
|
+
# Log end processing
|
|
184
|
+
msg = f"{product} processing of {filename} has ended."
|
|
185
|
+
log_info(logger=logger, msg=msg, verbose=verbose)
|
|
186
|
+
|
|
187
|
+
##--------------------------------------------------------------------.
|
|
188
|
+
# Otherwise log the error
|
|
189
|
+
except Exception as e:
|
|
190
|
+
error_type = str(type(e).__name__)
|
|
191
|
+
msg = f"{error_type}: {e}"
|
|
192
|
+
log_error(logger, msg, verbose=verbose)
|
|
193
|
+
|
|
194
|
+
# Close the file logger
|
|
195
|
+
close_logger(logger)
|
|
196
|
+
|
|
197
|
+
# Return the logger file path
|
|
198
|
+
return logger_filepath
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def run_l1_station(
|
|
202
|
+
# Station arguments
|
|
203
|
+
data_source,
|
|
204
|
+
campaign_name,
|
|
205
|
+
station_name,
|
|
206
|
+
# Processing options
|
|
207
|
+
force: bool = False,
|
|
208
|
+
verbose: bool = True,
|
|
209
|
+
parallel: bool = True,
|
|
210
|
+
debugging_mode: bool = False,
|
|
211
|
+
# DISDRODB root directories
|
|
212
|
+
data_archive_dir: Optional[str] = None,
|
|
213
|
+
metadata_archive_dir: Optional[str] = None,
|
|
214
|
+
):
|
|
215
|
+
"""
|
|
216
|
+
Run the L1 processing of a specific DISDRODB station when invoked from the terminal.
|
|
217
|
+
|
|
218
|
+
The L1 routines just filter the raw drop spectrum and compute basic statistics.
|
|
219
|
+
The L1 routine expects as input L0C files where each file has a unique sample interval.
|
|
220
|
+
|
|
221
|
+
This function is intended to be called through the ``disdrodb_run_l1_station``
|
|
222
|
+
command-line interface.
|
|
223
|
+
|
|
224
|
+
Parameters
|
|
225
|
+
----------
|
|
226
|
+
data_source : str
|
|
227
|
+
The name of the institution (for campaigns spanning multiple countries) or
|
|
228
|
+
the name of the country (for campaigns or sensor networks within a single country).
|
|
229
|
+
Must be provided in UPPER CASE.
|
|
230
|
+
campaign_name : str
|
|
231
|
+
The name of the campaign. Must be provided in UPPER CASE.
|
|
232
|
+
station_name : str
|
|
233
|
+
The name of the station.
|
|
234
|
+
force : bool, optional
|
|
235
|
+
If ``True``, existing data in the destination directories will be overwritten.
|
|
236
|
+
If ``False`` (default), an error will be raised if data already exists in the destination directories.
|
|
237
|
+
verbose : bool, optional
|
|
238
|
+
If ``True`` (default), detailed processing information will be printed to the terminal.
|
|
239
|
+
If ``False``, less information will be displayed.
|
|
240
|
+
parallel : bool, optional
|
|
241
|
+
If ``True``, files will be processed in multiple processes simultaneously,
|
|
242
|
+
with each process using a single thread to avoid issues with the HDF/netCDF library.
|
|
243
|
+
If ``False`` (default), files will be processed sequentially in a single process,
|
|
244
|
+
and multi-threading will be automatically exploited to speed up I/O tasks.
|
|
245
|
+
debugging_mode : bool, optional
|
|
246
|
+
If ``True``, the amount of data processed will be reduced.
|
|
247
|
+
Only the first 3 files will be processed. The default value is ``False``.
|
|
248
|
+
data_archive_dir : str, optional
|
|
249
|
+
The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
|
|
250
|
+
If not specified, the path specified in the DISDRODB active configuration will be used.
|
|
251
|
+
|
|
252
|
+
"""
|
|
253
|
+
# Define product
|
|
254
|
+
product = "L1"
|
|
255
|
+
|
|
256
|
+
# Define base directory
|
|
257
|
+
data_archive_dir = get_data_archive_dir(data_archive_dir)
|
|
258
|
+
|
|
259
|
+
# Retrieve DISDRODB Metadata Archive directory
|
|
260
|
+
metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
|
|
261
|
+
|
|
262
|
+
# Define logs directory
|
|
263
|
+
logs_dir = create_logs_directory(
|
|
264
|
+
product=product,
|
|
265
|
+
data_archive_dir=data_archive_dir,
|
|
266
|
+
data_source=data_source,
|
|
267
|
+
campaign_name=campaign_name,
|
|
268
|
+
station_name=station_name,
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
# ------------------------------------------------------------------------.
|
|
272
|
+
# Start processing
|
|
273
|
+
if verbose:
|
|
274
|
+
t_i = time.time()
|
|
275
|
+
msg = f"{product} processing of station {station_name} has started."
|
|
276
|
+
log_info(logger=logger, msg=msg, verbose=verbose)
|
|
277
|
+
|
|
278
|
+
# ------------------------------------------------------------------------.
|
|
279
|
+
# Create directory structure
|
|
280
|
+
data_dir = create_product_directory(
|
|
281
|
+
data_archive_dir=data_archive_dir,
|
|
282
|
+
metadata_archive_dir=metadata_archive_dir,
|
|
283
|
+
data_source=data_source,
|
|
284
|
+
campaign_name=campaign_name,
|
|
285
|
+
station_name=station_name,
|
|
286
|
+
product=product,
|
|
287
|
+
force=force,
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
# -------------------------------------------------------------------------.
|
|
291
|
+
# List files to process
|
|
292
|
+
required_product = get_required_product(product)
|
|
293
|
+
flag_not_available_data = False
|
|
294
|
+
try:
|
|
295
|
+
filepaths = find_files(
|
|
296
|
+
data_archive_dir=data_archive_dir,
|
|
297
|
+
data_source=data_source,
|
|
298
|
+
campaign_name=campaign_name,
|
|
299
|
+
station_name=station_name,
|
|
300
|
+
product=required_product,
|
|
301
|
+
# Processing options
|
|
302
|
+
debugging_mode=debugging_mode,
|
|
303
|
+
)
|
|
304
|
+
except Exception as e:
|
|
305
|
+
print(str(e)) # Case where no file paths available
|
|
306
|
+
flag_not_available_data = True
|
|
307
|
+
|
|
308
|
+
# -------------------------------------------------------------------------.
|
|
309
|
+
# If no data available, print error message and return None
|
|
310
|
+
if flag_not_available_data:
|
|
311
|
+
msg = (
|
|
312
|
+
f"{product} processing of {data_source} {campaign_name} {station_name}"
|
|
313
|
+
+ f"has not been launched because of missing {required_product} data."
|
|
314
|
+
)
|
|
315
|
+
print(msg)
|
|
316
|
+
return
|
|
317
|
+
|
|
318
|
+
# -----------------------------------------------------------------.
|
|
319
|
+
# Generate L1 files
|
|
320
|
+
# - Loop over the L0 netCDF files and generate L1 files.
|
|
321
|
+
# - If parallel=True, it does that in parallel using dask.delayed
|
|
322
|
+
list_tasks = [
|
|
323
|
+
_generate_l1(
|
|
324
|
+
filepath=filepath,
|
|
325
|
+
data_dir=data_dir,
|
|
326
|
+
logs_dir=logs_dir,
|
|
327
|
+
campaign_name=campaign_name,
|
|
328
|
+
station_name=station_name,
|
|
329
|
+
# Processing options
|
|
330
|
+
force=force,
|
|
331
|
+
verbose=verbose,
|
|
332
|
+
parallel=parallel,
|
|
333
|
+
)
|
|
334
|
+
for filepath in filepaths
|
|
335
|
+
]
|
|
336
|
+
list_logs = dask.compute(*list_tasks) if parallel else list_tasks
|
|
337
|
+
|
|
338
|
+
# -----------------------------------------------------------------.
|
|
339
|
+
# Define L1 summary logs
|
|
340
|
+
create_product_logs(
|
|
341
|
+
product=product,
|
|
342
|
+
data_source=data_source,
|
|
343
|
+
campaign_name=campaign_name,
|
|
344
|
+
station_name=station_name,
|
|
345
|
+
data_archive_dir=data_archive_dir,
|
|
346
|
+
# Logs list
|
|
347
|
+
list_logs=list_logs,
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# ---------------------------------------------------------------------.
|
|
351
|
+
# End L1 processing
|
|
352
|
+
if verbose:
|
|
353
|
+
timedelta_str = str(datetime.timedelta(seconds=round(time.time() - t_i)))
|
|
354
|
+
msg = f"{product} processing of station {station_name} completed in {timedelta_str}"
|
|
355
|
+
log_info(logger=logger, msg=msg, verbose=verbose)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
####-------------------------------------------------------------------------------------------------------------------.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# -----------------------------------------------------------------------------.
|
|
2
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
3
|
+
#
|
|
4
|
+
# This program is free software: you can redistribute it and/or modify
|
|
5
|
+
# it under the terms of the GNU General Public License as published by
|
|
6
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
7
|
+
# (at your option) any later version.
|
|
8
|
+
#
|
|
9
|
+
# This program is distributed in the hope that it will be useful,
|
|
10
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12
|
+
# GNU General Public License for more details.
|
|
13
|
+
#
|
|
14
|
+
# You should have received a copy of the GNU General Public License
|
|
15
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
16
|
+
# -----------------------------------------------------------------------------.
|
|
17
|
+
"""Core functions for DISDRODB L1 ENV production."""
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# -----------------------------------------------------------------------------.
|
|
2
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
3
|
+
#
|
|
4
|
+
# This program is free software: you can redistribute it and/or modify
|
|
5
|
+
# it under the terms of the GNU General Public License as published by
|
|
6
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
7
|
+
# (at your option) any later version.
|
|
8
|
+
#
|
|
9
|
+
# This program is distributed in the hope that it will be useful,
|
|
10
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12
|
+
# GNU General Public License for more details.
|
|
13
|
+
#
|
|
14
|
+
# You should have received a copy of the GNU General Public License
|
|
15
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
16
|
+
# -----------------------------------------------------------------------------.
|
|
17
|
+
"""Core functions for DISDRODB ENV production."""
|
|
18
|
+
|
|
19
|
+
import xarray as xr
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_default_environment_dataset():
|
|
23
|
+
"""Define defaults values for the ENV dataset."""
|
|
24
|
+
ds_env = xr.Dataset()
|
|
25
|
+
ds_env["sea_level_air_pressure"] = 101_325
|
|
26
|
+
ds_env["gas_constant_dry_air"] = 287.04
|
|
27
|
+
ds_env["lapse_rate"] = 0.0065
|
|
28
|
+
ds_env["relative_humidity"] = 0.95 # Value between 0 and 1 !
|
|
29
|
+
ds_env["temperature"] = 20 + 273.15
|
|
30
|
+
return ds_env
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def load_env_dataset(ds):
|
|
34
|
+
"""Load the ENV dataset."""
|
|
35
|
+
# TODO - Retrieve relative_humidity and temperature from L1-ENV
|
|
36
|
+
ds_env = get_default_environment_dataset()
|
|
37
|
+
ds_env = ds_env.assign_coords({"altitude": ds["altitude"], "latitude": ds["latitude"]})
|
|
38
|
+
return ds_env
|
disdrodb/l2/__init__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# -----------------------------------------------------------------------------.
|
|
2
|
+
# Copyright (c) 2021-2023 DISDRODB developers
|
|
3
|
+
#
|
|
4
|
+
# This program is free software: you can redistribute it and/or modify
|
|
5
|
+
# it under the terms of the GNU General Public License as published by
|
|
6
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
7
|
+
# (at your option) any later version.
|
|
8
|
+
#
|
|
9
|
+
# This program is distributed in the hope that it will be useful,
|
|
10
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12
|
+
# GNU General Public License for more details.
|
|
13
|
+
#
|
|
14
|
+
# You should have received a copy of the GNU General Public License
|
|
15
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
16
|
+
# -----------------------------------------------------------------------------.
|
|
17
|
+
"""Module for DISDRODB L2 production."""
|