disdrodb 0.0.20__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (264) hide show
  1. disdrodb/__init__.py +132 -15
  2. disdrodb/_config.py +4 -2
  3. disdrodb/_version.py +9 -4
  4. disdrodb/api/checks.py +264 -237
  5. disdrodb/api/configs.py +4 -8
  6. disdrodb/api/create_directories.py +235 -290
  7. disdrodb/api/info.py +217 -26
  8. disdrodb/api/io.py +295 -269
  9. disdrodb/api/path.py +597 -173
  10. disdrodb/api/search.py +486 -0
  11. disdrodb/{metadata/scripts → cli}/disdrodb_check_metadata_archive.py +12 -7
  12. disdrodb/{utils/pandas.py → cli/disdrodb_data_archive_directory.py} +9 -18
  13. disdrodb/cli/disdrodb_download_archive.py +86 -0
  14. disdrodb/cli/disdrodb_download_metadata_archive.py +53 -0
  15. disdrodb/cli/disdrodb_download_station.py +84 -0
  16. disdrodb/{api/scripts → cli}/disdrodb_initialize_station.py +22 -10
  17. disdrodb/cli/disdrodb_metadata_archive_directory.py +32 -0
  18. disdrodb/{data_transfer/scripts/disdrodb_download_station.py → cli/disdrodb_open_data_archive.py} +22 -22
  19. disdrodb/cli/disdrodb_open_logs_directory.py +69 -0
  20. disdrodb/{data_transfer/scripts/disdrodb_upload_station.py → cli/disdrodb_open_metadata_archive.py} +22 -24
  21. disdrodb/cli/disdrodb_open_metadata_directory.py +71 -0
  22. disdrodb/cli/disdrodb_open_product_directory.py +74 -0
  23. disdrodb/cli/disdrodb_open_readers_directory.py +32 -0
  24. disdrodb/{l0/scripts → cli}/disdrodb_run_l0.py +38 -31
  25. disdrodb/{l0/scripts → cli}/disdrodb_run_l0_station.py +32 -30
  26. disdrodb/{l0/scripts → cli}/disdrodb_run_l0a.py +30 -21
  27. disdrodb/{l0/scripts → cli}/disdrodb_run_l0a_station.py +24 -33
  28. disdrodb/{l0/scripts → cli}/disdrodb_run_l0b.py +30 -21
  29. disdrodb/{l0/scripts → cli}/disdrodb_run_l0b_station.py +25 -34
  30. disdrodb/cli/disdrodb_run_l0c.py +130 -0
  31. disdrodb/cli/disdrodb_run_l0c_station.py +129 -0
  32. disdrodb/cli/disdrodb_run_l1.py +122 -0
  33. disdrodb/cli/disdrodb_run_l1_station.py +121 -0
  34. disdrodb/cli/disdrodb_run_l2e.py +122 -0
  35. disdrodb/cli/disdrodb_run_l2e_station.py +122 -0
  36. disdrodb/cli/disdrodb_run_l2m.py +122 -0
  37. disdrodb/cli/disdrodb_run_l2m_station.py +122 -0
  38. disdrodb/cli/disdrodb_upload_archive.py +105 -0
  39. disdrodb/cli/disdrodb_upload_station.py +98 -0
  40. disdrodb/configs.py +90 -25
  41. disdrodb/data_transfer/__init__.py +22 -0
  42. disdrodb/data_transfer/download_data.py +87 -90
  43. disdrodb/data_transfer/upload_data.py +64 -37
  44. disdrodb/data_transfer/zenodo.py +15 -18
  45. disdrodb/docs.py +1 -1
  46. disdrodb/issue/__init__.py +17 -4
  47. disdrodb/issue/checks.py +10 -23
  48. disdrodb/issue/reader.py +9 -12
  49. disdrodb/issue/writer.py +14 -17
  50. disdrodb/l0/__init__.py +17 -26
  51. disdrodb/l0/check_configs.py +35 -23
  52. disdrodb/l0/check_standards.py +32 -42
  53. disdrodb/l0/configs/{Thies_LPM → LPM}/bins_diameter.yml +44 -44
  54. disdrodb/l0/configs/{Thies_LPM → LPM}/bins_velocity.yml +40 -40
  55. disdrodb/l0/configs/LPM/l0a_encodings.yml +80 -0
  56. disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_cf_attrs.yml +62 -59
  57. disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_encodings.yml +9 -9
  58. disdrodb/l0/configs/{Thies_LPM → LPM}/raw_data_format.yml +245 -245
  59. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_diameter.yml +66 -66
  60. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_velocity.yml +64 -64
  61. disdrodb/l0/configs/PARSIVEL/l0a_encodings.yml +32 -0
  62. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_cf_attrs.yml +22 -20
  63. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_encodings.yml +17 -17
  64. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/raw_data_format.yml +77 -77
  65. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_diameter.yml +64 -64
  66. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_velocity.yml +64 -64
  67. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +39 -0
  68. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_cf_attrs.yml +24 -22
  69. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_encodings.yml +20 -20
  70. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/raw_data_format.yml +98 -98
  71. disdrodb/l0/configs/{RD_80 → RD80}/bins_diameter.yml +40 -40
  72. disdrodb/l0/configs/RD80/l0a_encodings.yml +16 -0
  73. disdrodb/l0/configs/{RD_80 → RD80}/l0b_cf_attrs.yml +3 -3
  74. disdrodb/l0/configs/RD80/l0b_encodings.yml +135 -0
  75. disdrodb/l0/configs/{RD_80 → RD80}/raw_data_format.yml +48 -48
  76. disdrodb/l0/l0_reader.py +216 -340
  77. disdrodb/l0/l0a_processing.py +237 -208
  78. disdrodb/l0/l0b_nc_processing.py +227 -80
  79. disdrodb/l0/l0b_processing.py +93 -173
  80. disdrodb/l0/l0c_processing.py +627 -0
  81. disdrodb/l0/readers/{ARM → LPM/ARM}/ARM_LPM.py +36 -58
  82. disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +226 -0
  83. disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +185 -0
  84. disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +183 -0
  85. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +179 -0
  86. disdrodb/l0/readers/{UK → LPM/UK}/DIVEN.py +14 -35
  87. disdrodb/l0/readers/PARSIVEL/AUSTRALIA/MELBOURNE_2007_PARSIVEL.py +157 -0
  88. disdrodb/l0/readers/PARSIVEL/CHINA/CHONGQING.py +113 -0
  89. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/ARCTIC_2021.py +40 -57
  90. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/COMMON_2011.py +37 -54
  91. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/DAVOS_2009_2011.py +34 -51
  92. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_2009.py +34 -51
  93. disdrodb/l0/readers/{EPFL/PARADISO_2014.py → PARSIVEL/EPFL/EPFL_ROOF_2008.py} +38 -50
  94. disdrodb/l0/readers/PARSIVEL/EPFL/EPFL_ROOF_2010.py +105 -0
  95. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2011.py +34 -51
  96. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2012.py +33 -51
  97. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GENEPI_2007.py +25 -44
  98. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007.py +25 -44
  99. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007_2.py +25 -44
  100. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HPICONET_2010.py +34 -51
  101. disdrodb/l0/readers/{EPFL/EPFL_ROOF_2010.py → PARSIVEL/EPFL/HYMEX_LTE_SOP2.py} +37 -50
  102. disdrodb/l0/readers/PARSIVEL/EPFL/HYMEX_LTE_SOP3.py +111 -0
  103. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HYMEX_LTE_SOP4.py +36 -54
  104. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2018.py +34 -52
  105. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2019.py +38 -56
  106. disdrodb/l0/readers/PARSIVEL/EPFL/PARADISO_2014.py +105 -0
  107. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PARSIVEL_2007.py +27 -45
  108. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PLATO_2019.py +24 -44
  109. disdrodb/l0/readers/PARSIVEL/EPFL/RACLETS_2019.py +140 -0
  110. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RACLETS_2019_WJF.py +41 -59
  111. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RIETHOLZBACH_2011.py +34 -51
  112. disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2017.py +117 -0
  113. disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2019.py +137 -0
  114. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/UNIL_2022.py +42 -55
  115. disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py +104 -0
  116. disdrodb/l0/readers/{GPM → PARSIVEL/GPM}/LPVEX.py +29 -48
  117. disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py +184 -0
  118. disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py +113 -0
  119. disdrodb/l0/readers/{NCAR/VORTEX_SE_2016_P1.py → PARSIVEL/NCAR/OWLES_MIPS.py} +46 -72
  120. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +125 -0
  121. disdrodb/l0/readers/{NCAR/OWLES_MIPS.py → PARSIVEL/NCAR/PLOWS_MIPS.py} +45 -64
  122. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +114 -0
  123. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +176 -0
  124. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +183 -0
  125. disdrodb/l0/readers/{ARM/ARM_LD.py → PARSIVEL2/ARM/ARM_PARSIVEL2.py} +27 -50
  126. disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +163 -0
  127. disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +163 -0
  128. disdrodb/l0/readers/{DENMARK → PARSIVEL2/DENMARK}/EROSION_nc.py +14 -35
  129. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +119 -0
  130. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +104 -0
  131. disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py +176 -0
  132. disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py +32 -0
  133. disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py +56 -0
  134. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +120 -0
  135. disdrodb/l0/readers/{NCAR → PARSIVEL2/NCAR}/PECAN_MIPS.py +45 -64
  136. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +181 -0
  137. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +160 -0
  138. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +160 -0
  139. disdrodb/l0/readers/{NCAR/PLOWS_MIPS.py → PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py} +49 -66
  140. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +118 -0
  141. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +152 -0
  142. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py +166 -0
  143. disdrodb/l0/readers/{NCAR/RELAMPAGO_RD80.py → RD80/BRAZIL/CHUVA_RD80.py} +36 -60
  144. disdrodb/l0/readers/{BRAZIL → RD80/BRAZIL}/GOAMAZON_RD80.py +36 -55
  145. disdrodb/l0/readers/{NCAR → RD80/NCAR}/CINDY_2011_RD80.py +35 -54
  146. disdrodb/l0/readers/{BRAZIL/CHUVA_RD80.py → RD80/NCAR/RELAMPAGO_RD80.py} +40 -54
  147. disdrodb/l0/readers/template_reader_raw_netcdf_data.py +62 -0
  148. disdrodb/l0/readers/{reader_template.py → template_reader_raw_text_data.py} +20 -44
  149. disdrodb/l0/routines.py +885 -581
  150. disdrodb/l0/standards.py +72 -236
  151. disdrodb/l0/template_tools.py +104 -109
  152. disdrodb/l1/__init__.py +17 -0
  153. disdrodb/l1/beard_model.py +716 -0
  154. disdrodb/l1/encoding_attrs.py +620 -0
  155. disdrodb/l1/fall_velocity.py +260 -0
  156. disdrodb/l1/filters.py +192 -0
  157. disdrodb/l1/processing.py +200 -0
  158. disdrodb/l1/resampling.py +236 -0
  159. disdrodb/l1/routines.py +357 -0
  160. disdrodb/l1_env/__init__.py +17 -0
  161. disdrodb/l1_env/routines.py +38 -0
  162. disdrodb/l2/__init__.py +17 -0
  163. disdrodb/l2/empirical_dsd.py +1735 -0
  164. disdrodb/l2/event.py +388 -0
  165. disdrodb/l2/processing.py +519 -0
  166. disdrodb/l2/processing_options.py +213 -0
  167. disdrodb/l2/routines.py +868 -0
  168. disdrodb/metadata/__init__.py +9 -2
  169. disdrodb/metadata/checks.py +165 -118
  170. disdrodb/metadata/download.py +81 -0
  171. disdrodb/metadata/geolocation.py +146 -0
  172. disdrodb/metadata/info.py +20 -13
  173. disdrodb/metadata/manipulation.py +1 -1
  174. disdrodb/metadata/reader.py +59 -8
  175. disdrodb/metadata/search.py +77 -144
  176. disdrodb/metadata/standards.py +7 -8
  177. disdrodb/metadata/writer.py +8 -14
  178. disdrodb/psd/__init__.py +38 -0
  179. disdrodb/psd/fitting.py +2146 -0
  180. disdrodb/psd/models.py +774 -0
  181. disdrodb/routines.py +1176 -0
  182. disdrodb/scattering/__init__.py +28 -0
  183. disdrodb/scattering/axis_ratio.py +344 -0
  184. disdrodb/scattering/routines.py +456 -0
  185. disdrodb/utils/__init__.py +17 -0
  186. disdrodb/utils/attrs.py +208 -0
  187. disdrodb/utils/cli.py +269 -0
  188. disdrodb/utils/compression.py +60 -42
  189. disdrodb/utils/dask.py +62 -0
  190. disdrodb/utils/decorators.py +110 -0
  191. disdrodb/utils/directories.py +107 -46
  192. disdrodb/utils/encoding.py +127 -0
  193. disdrodb/utils/list.py +29 -0
  194. disdrodb/utils/logger.py +168 -46
  195. disdrodb/utils/time.py +657 -0
  196. disdrodb/utils/warnings.py +30 -0
  197. disdrodb/utils/writer.py +57 -0
  198. disdrodb/utils/xarray.py +138 -47
  199. disdrodb/utils/yaml.py +0 -1
  200. disdrodb/viz/__init__.py +17 -0
  201. disdrodb/viz/plots.py +17 -0
  202. disdrodb-0.1.0.dist-info/METADATA +321 -0
  203. disdrodb-0.1.0.dist-info/RECORD +216 -0
  204. {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info}/WHEEL +1 -1
  205. disdrodb-0.1.0.dist-info/entry_points.txt +30 -0
  206. disdrodb/data_transfer/scripts/disdrodb_download_archive.py +0 -53
  207. disdrodb/data_transfer/scripts/disdrodb_upload_archive.py +0 -57
  208. disdrodb/l0/configs/OTT_Parsivel/l0a_encodings.yml +0 -32
  209. disdrodb/l0/configs/OTT_Parsivel2/l0a_encodings.yml +0 -39
  210. disdrodb/l0/configs/RD_80/l0a_encodings.yml +0 -16
  211. disdrodb/l0/configs/RD_80/l0b_encodings.yml +0 -135
  212. disdrodb/l0/configs/Thies_LPM/l0a_encodings.yml +0 -80
  213. disdrodb/l0/io.py +0 -257
  214. disdrodb/l0/l0_processing.py +0 -1091
  215. disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_OTT.py +0 -178
  216. disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_THIES.py +0 -247
  217. disdrodb/l0/readers/BRAZIL/CHUVA_LPM.py +0 -204
  218. disdrodb/l0/readers/BRAZIL/CHUVA_OTT.py +0 -183
  219. disdrodb/l0/readers/BRAZIL/GOAMAZON_LPM.py +0 -204
  220. disdrodb/l0/readers/BRAZIL/GOAMAZON_OTT.py +0 -183
  221. disdrodb/l0/readers/CHINA/CHONGQING.py +0 -131
  222. disdrodb/l0/readers/EPFL/EPFL_ROOF_2008.py +0 -128
  223. disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP2.py +0 -127
  224. disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP3.py +0 -129
  225. disdrodb/l0/readers/EPFL/RACLETS_2019.py +0 -158
  226. disdrodb/l0/readers/EPFL/SAMOYLOV_2017.py +0 -136
  227. disdrodb/l0/readers/EPFL/SAMOYLOV_2019.py +0 -158
  228. disdrodb/l0/readers/FRANCE/SIRTA_OTT2.py +0 -138
  229. disdrodb/l0/readers/GPM/GCPEX.py +0 -123
  230. disdrodb/l0/readers/GPM/IFLOODS.py +0 -123
  231. disdrodb/l0/readers/GPM/MC3E.py +0 -123
  232. disdrodb/l0/readers/GPM/NSSTC.py +0 -164
  233. disdrodb/l0/readers/ITALY/GID.py +0 -199
  234. disdrodb/l0/readers/MEXICO/OH_IIUNAM_nc.py +0 -92
  235. disdrodb/l0/readers/NCAR/CCOPE_2015.py +0 -133
  236. disdrodb/l0/readers/NCAR/PECAN_FP3.py +0 -137
  237. disdrodb/l0/readers/NCAR/PECAN_MOBILE.py +0 -144
  238. disdrodb/l0/readers/NCAR/RELAMPAGO_OTT.py +0 -195
  239. disdrodb/l0/readers/NCAR/SNOWIE_PJ.py +0 -172
  240. disdrodb/l0/readers/NCAR/SNOWIE_SB.py +0 -179
  241. disdrodb/l0/readers/NCAR/VORTEX2_2009.py +0 -133
  242. disdrodb/l0/readers/NCAR/VORTEX2_2010.py +0 -188
  243. disdrodb/l0/readers/NCAR/VORTEX2_2010_UF.py +0 -191
  244. disdrodb/l0/readers/NCAR/VORTEX_SE_2016_P2.py +0 -135
  245. disdrodb/l0/readers/NCAR/VORTEX_SE_2016_PIPS.py +0 -170
  246. disdrodb/l0/readers/NETHERLANDS/DELFT.py +0 -187
  247. disdrodb/l0/readers/SPAIN/SBEGUERIA.py +0 -179
  248. disdrodb/l0/scripts/disdrodb_run_l0b_concat.py +0 -93
  249. disdrodb/l0/scripts/disdrodb_run_l0b_concat_station.py +0 -85
  250. disdrodb/utils/netcdf.py +0 -452
  251. disdrodb/utils/scripts.py +0 -102
  252. disdrodb-0.0.20.dist-info/AUTHORS.md +0 -18
  253. disdrodb-0.0.20.dist-info/METADATA +0 -186
  254. disdrodb-0.0.20.dist-info/RECORD +0 -168
  255. disdrodb-0.0.20.dist-info/entry_points.txt +0 -15
  256. /disdrodb/l0/configs/{RD_80 → RD80}/bins_velocity.yml +0 -0
  257. /disdrodb/l0/manuals/{Thies_LPM.pdf → LPM.pdf} +0 -0
  258. /disdrodb/l0/manuals/{ODM_470.pdf → ODM470.pdf} +0 -0
  259. /disdrodb/l0/manuals/{OTT_Parsivel.pdf → PARSIVEL.pdf} +0 -0
  260. /disdrodb/l0/manuals/{OTT_Parsivel2.pdf → PARSIVEL2.pdf} +0 -0
  261. /disdrodb/l0/manuals/{PWS_100.pdf → PWS100.pdf} +0 -0
  262. /disdrodb/l0/manuals/{RD_80.pdf → RD80.pdf} +0 -0
  263. {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info/licenses}/LICENSE +0 -0
  264. {disdrodb-0.0.20.dist-info → disdrodb-0.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,236 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """Utilities for temporal resampling."""
18
+
19
+
20
+ import pandas as pd
21
+ import xarray as xr
22
+
23
+ from disdrodb.utils.time import regularize_dataset
24
+
25
+ DEFAULT_ACCUMULATIONS = ["10s", "30s", "1min", "2min", "5min", "10min", "30min", "1hour"]
26
+
27
+
28
+ def add_sample_interval(ds, sample_interval):
29
+ """Add a sample_interval coordinate to the dataset.
30
+
31
+ Parameters
32
+ ----------
33
+ ds : xarray.Dataset
34
+ The input dataset to which the sample_interval coordinate will be added.
35
+ sample_interval : int or float
36
+ The dataset sample interval in seconds.
37
+
38
+ Returns
39
+ -------
40
+ xarray.Dataset
41
+ The dataset with the added sample interval coordinate.
42
+
43
+ Notes
44
+ -----
45
+ The function adds a new coordinate named 'sample_interval' to the dataset and
46
+ updates the 'measurement_interval' attribute.
47
+ """
48
+ # Add sample_interval coordinate
49
+ ds["sample_interval"] = sample_interval
50
+ ds["sample_interval"].attrs["description"] = "Sample interval"
51
+ ds["sample_interval"].attrs["long_name"] = "Sample interval"
52
+ ds["sample_interval"].attrs["units"] = "seconds"
53
+ ds = ds.set_coords("sample_interval")
54
+ # Update measurement_interval attribute
55
+ ds.attrs = ds.attrs.copy()
56
+ ds.attrs["measurement_interval"] = int(sample_interval)
57
+ return ds
58
+
59
+
60
+ def define_window_size(sample_interval, accumulation_interval):
61
+ """
62
+ Calculate the rolling window size based on sampling and accumulation intervals.
63
+
64
+ Parameters
65
+ ----------
66
+ sampling_interval : int
67
+ The sampling interval in seconds.
68
+ accumulation_interval : int
69
+ The desired accumulation interval in seconds.
70
+
71
+ Returns
72
+ -------
73
+ int
74
+ The calculated window size as the number of sampling intervals required to cover the accumulation interval.
75
+
76
+ Raises
77
+ ------
78
+ ValueError
79
+ If the accumulation interval is not a multiple of the sampling interval.
80
+
81
+ Examples
82
+ --------
83
+ >>> define_window_size(60, 300)
84
+ 5
85
+
86
+ >>> define_window_size(120, 600)
87
+ 5
88
+ """
89
+ # Check compatitiblity
90
+ if accumulation_interval % sample_interval != 0:
91
+ raise ValueError("The accumulation interval must be a multiple of the sample interval.")
92
+
93
+ # Calculate the window size
94
+ window_size = accumulation_interval // sample_interval
95
+
96
+ return window_size
97
+
98
+
99
+ def resample_dataset(ds, sample_interval, accumulation_interval, rolling=True):
100
+ """
101
+ Resample the dataset to a specified accumulation interval.
102
+
103
+ Parameters
104
+ ----------
105
+ ds : xarray.Dataset
106
+ The input dataset to be resampled.
107
+ sample_interval : int
108
+ The sample interval of the input dataset.
109
+ accumulation_interval : int
110
+ The interval in seconds over which to accumulate the data.
111
+ rolling : bool, optional
112
+ If True, apply a rolling window before resampling. Default is True.
113
+ If True, forward rolling is performed.
114
+ The output timesteps correspond to the starts of the periods over which
115
+ the resampling operation has been performed !
116
+
117
+ Returns
118
+ -------
119
+ xarray.Dataset
120
+ The resampled dataset with updated attributes.
121
+
122
+ Notes
123
+ -----
124
+ - The function regularizes the dataset (infill possible missing timesteps)
125
+ before performing the resampling operation.
126
+ - Variables are categorized into those to be averaged, accumulated, minimized, and maximized.
127
+ - Custom processing for quality flags and handling of NaNs is defined.
128
+ - The function updates the dataset attributes and the sample_interval coordinate.
129
+
130
+ """
131
+ # Retrieve attributes
132
+ attrs = ds.attrs.copy()
133
+
134
+ # TODO: here infill NaN with zero if necessary before regularizing !
135
+
136
+ # Ensure regular dataset without missing timesteps
137
+ ds = regularize_dataset(ds, freq=f"{sample_interval}s")
138
+
139
+ # Initialize resample dataset
140
+ ds_resampled = xr.Dataset()
141
+
142
+ # Retrieve variables to average/sum
143
+ var_to_average = ["fall_velocity"]
144
+ var_to_cumulate = ["raw_drop_number", "drop_number", "drop_counts", "n_drops_selected", "n_drops_discarded"]
145
+ var_to_min = ["Dmin"]
146
+ var_to_max = ["Dmax"]
147
+
148
+ # Retrieve available variables
149
+ var_to_average = [var for var in var_to_average if var in ds]
150
+ var_to_cumulate = [var for var in var_to_cumulate if var in ds]
151
+ var_to_min = [var for var in var_to_min if var in ds]
152
+ var_to_max = [var for var in var_to_max if var in ds]
153
+
154
+ # TODO Define custom processing
155
+ # - quality_flag --> take worst
156
+ # - skipna if less than fraction (to not waste lot of data when aggregating over i.e. hours)
157
+
158
+ # Resample the dataset
159
+ # - Rolling currently does not allow direct rolling forward.
160
+ # - We currently use center=False which means search for data backward (right-aligned) !
161
+ # - We then drop the first 'window_size' NaN timesteps and we shift backward the timesteps.
162
+ # - https://github.com/pydata/xarray/issues/9773
163
+ # - https://github.com/pydata/xarray/issues/8958
164
+ if not rolling:
165
+ # Resample
166
+ if len(var_to_average) > 0:
167
+ ds_resampled.update(
168
+ ds[var_to_average].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).mean(skipna=False),
169
+ )
170
+ if len(var_to_cumulate) > 0:
171
+ ds_resampled.update(
172
+ ds[var_to_cumulate].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).sum(skipna=False),
173
+ )
174
+ if len(var_to_min) > 0:
175
+ ds_resampled.update(
176
+ ds[var_to_min].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).min(skipna=False),
177
+ )
178
+ if len(var_to_max) > 0:
179
+ ds_resampled.update(
180
+ ds[var_to_max].resample({"time": pd.Timedelta(seconds=accumulation_interval)}).max(skipna=False),
181
+ )
182
+
183
+ else:
184
+ # Roll and Resample
185
+ window_size = define_window_size(sample_interval=sample_interval, accumulation_interval=accumulation_interval)
186
+ if len(var_to_average) > 0:
187
+ ds_resampled.update(ds[var_to_average].rolling({"time": window_size}, center=False).mean(skipna=False))
188
+ if len(var_to_cumulate) > 0:
189
+ ds_resampled.update(ds[var_to_cumulate].rolling({"time": window_size}, center=False).sum(skipna=False))
190
+
191
+ if len(var_to_min) > 0:
192
+ ds_resampled.update(ds[var_to_min].rolling({"time": window_size}, center=False).min(skipna=False))
193
+ if len(var_to_max) > 0:
194
+ ds_resampled.update(ds[var_to_max].rolling({"time": window_size}, center=False).max(skipna=False))
195
+ # Ensure time to correspond to the start time of the integration
196
+ ds_resampled = ds_resampled.isel(time=slice(window_size - 1, None)).assign_coords(
197
+ {"time": ds_resampled["time"].data[: -window_size + 1]},
198
+ )
199
+
200
+ # Add attributes
201
+ ds_resampled.attrs = attrs
202
+ if rolling:
203
+ ds_resampled.attrs["rolled"] = "True"
204
+ else:
205
+ ds_resampled.attrs["rolled"] = "False"
206
+
207
+ # Add accumulation_interval as new sample_interval coordinate
208
+ ds_resampled = add_sample_interval(ds_resampled, sample_interval=accumulation_interval)
209
+ return ds_resampled
210
+
211
+
212
+ def get_possible_accumulations(sample_interval, accumulations=None):
213
+ """
214
+ Get a list of valid accumulation intervals based on the sampling time.
215
+
216
+ Parameters
217
+ ----------
218
+ - sample_interval (int): The inferred sampling time in seconds.
219
+ - accumulations (list of int or string): List of desired accumulation intervals.
220
+ If provide integers, specify accumulation in seconds.
221
+
222
+ Returns
223
+ -------
224
+ - list of int: Valid accumulation intervals in seconds.
225
+ """
226
+ # Select default accumulations
227
+ if accumulations is None:
228
+ accumulations = DEFAULT_ACCUMULATIONS
229
+
230
+ # Get accumulations in seconds
231
+ accumulations = [int(pd.Timedelta(acc).total_seconds()) if isinstance(acc, str) else acc for acc in accumulations]
232
+
233
+ # Filter candidate accumulations to include only those that are multiples of the sampling time
234
+ possible_accumulations = [acc for acc in accumulations if acc % sample_interval == 0]
235
+
236
+ return possible_accumulations
@@ -0,0 +1,357 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # -----------------------------------------------------------------------------.
4
+ # Copyright (c) 2021-2023 DISDRODB developers
5
+ #
6
+ # This program is free software: you can redistribute it and/or modify
7
+ # it under the terms of the GNU General Public License as published by
8
+ # the Free Software Foundation, either version 3 of the License, or
9
+ # (at your option) any later version.
10
+ #
11
+ # This program is distributed in the hope that it will be useful,
12
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
13
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
+ # GNU General Public License for more details.
15
+ #
16
+ # You should have received a copy of the GNU General Public License
17
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
+ # -----------------------------------------------------------------------------.
19
+ """Implement DISDRODB L1 processing."""
20
+
21
+ import datetime
22
+ import logging
23
+ import os
24
+ import time
25
+ from typing import Optional
26
+
27
+ import dask
28
+ import xarray as xr
29
+
30
+ # Directory
31
+ from disdrodb.api.create_directories import (
32
+ create_logs_directory,
33
+ create_product_directory,
34
+ )
35
+ from disdrodb.api.io import find_files
36
+ from disdrodb.api.path import (
37
+ define_file_folder_path,
38
+ define_l1_filename,
39
+ )
40
+ from disdrodb.api.search import get_required_product
41
+ from disdrodb.configs import get_data_archive_dir, get_folder_partitioning, get_metadata_archive_dir
42
+ from disdrodb.l1.processing import generate_l1
43
+ from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
44
+
45
+ # Logger
46
+ from disdrodb.utils.logger import (
47
+ close_logger,
48
+ create_logger_file,
49
+ create_product_logs,
50
+ log_error,
51
+ log_info,
52
+ )
53
+ from disdrodb.utils.writer import write_product
54
+
55
+ logger = logging.getLogger(__name__)
56
+
57
+
58
+ def get_l1_options():
59
+ """Get L1 options."""
60
+ # - TODO: from YAML
61
+ # - TODO: as function of sensor name
62
+
63
+ # minimum_diameter
64
+ # --> PARSIVEL: 0.2495
65
+ # --> RD80: 0.313
66
+ # --> LPM: 0.125 (we currently discard first bin with this setting)
67
+
68
+ # maximum_diameter
69
+ # LPM: 8 mm
70
+ # RD80: 5.6 mm
71
+ # OTT: 26 mm
72
+
73
+ l1_options = {
74
+ # Fall velocity option
75
+ "fall_velocity_method": "Beard1976",
76
+ # Diameter-Velocity Filtering Options
77
+ "minimum_diameter": 0.2495, # OTT PARSIVEL first two bin no data !
78
+ "maximum_diameter": 8,
79
+ "minimum_velocity": 0,
80
+ "maximum_velocity": 12,
81
+ "above_velocity_fraction": 0.5,
82
+ "above_velocity_tolerance": None,
83
+ "below_velocity_fraction": 0.5,
84
+ "below_velocity_tolerance": None,
85
+ "small_diameter_threshold": 1, # 2
86
+ "small_velocity_threshold": 2.5, # 3
87
+ "maintain_smallest_drops": True,
88
+ }
89
+ return l1_options
90
+
91
+
92
+ @delayed_if_parallel
93
+ @single_threaded_if_parallel
94
+ def _generate_l1(
95
+ filepath,
96
+ data_dir,
97
+ logs_dir,
98
+ campaign_name,
99
+ station_name,
100
+ # Processing options
101
+ force,
102
+ verbose,
103
+ parallel, # this is used only to initialize the correct logger !
104
+ ):
105
+ """Generate the L1 product from the DISRODB L0C netCDF file.
106
+
107
+ Parameters
108
+ ----------
109
+ filepath : str
110
+ Path to the L0C netCDF file.
111
+ data_dir : str
112
+ Directory where the L1 netCDF file will be saved.
113
+ logs_dir : str
114
+ Directory where the log file will be saved.
115
+ campaign_name : str
116
+ Name of the campaign.
117
+ station_name : str
118
+ Name of the station.
119
+ force : bool
120
+ If True, overwrite existing files.
121
+ verbose : bool
122
+ Whether to verbose the processing.
123
+
124
+ Returns
125
+ -------
126
+ str
127
+ Path to the log file generated during processing.
128
+
129
+ Notes
130
+ -----
131
+ If an error occurs during processing, it is caught and logged,
132
+ but no error is raised to interrupt the execution.
133
+ """
134
+ # -----------------------------------------------------------------.
135
+ # Define product name
136
+ product = "L1"
137
+
138
+ # Define folder partitioning
139
+ folder_partitioning = get_folder_partitioning()
140
+
141
+ # -----------------------------------------------------------------.
142
+ # Create file logger
143
+ filename = os.path.basename(filepath)
144
+ logger, logger_filepath = create_logger_file(
145
+ logs_dir=logs_dir,
146
+ filename=filename,
147
+ parallel=parallel,
148
+ )
149
+
150
+ ##------------------------------------------------------------------------.
151
+ # Log start processing
152
+ msg = f"{product} processing of {filename} has started."
153
+ log_info(logger=logger, msg=msg, verbose=verbose)
154
+
155
+ ##------------------------------------------------------------------------.
156
+ # Retrieve L1 configurations
157
+ l1_options = get_l1_options()
158
+
159
+ ##------------------------------------------------------------------------.
160
+ ### Core computation
161
+ try:
162
+ # Open the raw netCDF
163
+ with xr.open_dataset(filepath, chunks={}, decode_timedelta=False, cache=False) as ds:
164
+ ds = ds[["raw_drop_number"]].load()
165
+
166
+ # Produce L1 dataset
167
+ ds = generate_l1(ds=ds, **l1_options)
168
+
169
+ # Write L1 netCDF4 dataset
170
+ if ds["time"].size > 1:
171
+ # Define filepath
172
+ filename = define_l1_filename(ds, campaign_name=campaign_name, station_name=station_name)
173
+ folder_path = define_file_folder_path(ds, data_dir=data_dir, folder_partitioning=folder_partitioning)
174
+ filepath = os.path.join(folder_path, filename)
175
+ # Write to disk
176
+ write_product(ds, product=product, filepath=filepath, force=force)
177
+
178
+ ##--------------------------------------------------------------------.
179
+ # Clean environment
180
+ del ds
181
+
182
+ # Log end processing
183
+ msg = f"{product} processing of {filename} has ended."
184
+ log_info(logger=logger, msg=msg, verbose=verbose)
185
+
186
+ ##--------------------------------------------------------------------.
187
+ # Otherwise log the error
188
+ except Exception as e:
189
+ error_type = str(type(e).__name__)
190
+ msg = f"{error_type}: {e}"
191
+ log_error(logger, msg, verbose=verbose)
192
+
193
+ # Close the file logger
194
+ close_logger(logger)
195
+
196
+ # Return the logger file path
197
+ return logger_filepath
198
+
199
+
200
+ def run_l1_station(
201
+ # Station arguments
202
+ data_source,
203
+ campaign_name,
204
+ station_name,
205
+ # Processing options
206
+ force: bool = False,
207
+ verbose: bool = True,
208
+ parallel: bool = True,
209
+ debugging_mode: bool = False,
210
+ # DISDRODB root directories
211
+ data_archive_dir: Optional[str] = None,
212
+ metadata_archive_dir: Optional[str] = None,
213
+ ):
214
+ """
215
+ Run the L1 processing of a specific DISDRODB station when invoked from the terminal.
216
+
217
+ The L1 routines just filter the raw drop spectrum and compute basic statistics.
218
+ The L1 routine expects as input L0C files where each file has a unique sample interval.
219
+
220
+ This function is intended to be called through the ``disdrodb_run_l1_station``
221
+ command-line interface.
222
+
223
+ Parameters
224
+ ----------
225
+ data_source : str
226
+ The name of the institution (for campaigns spanning multiple countries) or
227
+ the name of the country (for campaigns or sensor networks within a single country).
228
+ Must be provided in UPPER CASE.
229
+ campaign_name : str
230
+ The name of the campaign. Must be provided in UPPER CASE.
231
+ station_name : str
232
+ The name of the station.
233
+ force : bool, optional
234
+ If ``True``, existing data in the destination directories will be overwritten.
235
+ If ``False`` (default), an error will be raised if data already exists in the destination directories.
236
+ verbose : bool, optional
237
+ If ``True`` (default), detailed processing information will be printed to the terminal.
238
+ If ``False``, less information will be displayed.
239
+ parallel : bool, optional
240
+ If ``True``, files will be processed in multiple processes simultaneously,
241
+ with each process using a single thread to avoid issues with the HDF/netCDF library.
242
+ If ``False`` (default), files will be processed sequentially in a single process,
243
+ and multi-threading will be automatically exploited to speed up I/O tasks.
244
+ debugging_mode : bool, optional
245
+ If ``True``, the amount of data processed will be reduced.
246
+ Only the first 3 files will be processed. The default value is ``False``.
247
+ data_archive_dir : str, optional
248
+ The base directory of DISDRODB, expected in the format ``<...>/DISDRODB``.
249
+ If not specified, the path specified in the DISDRODB active configuration will be used.
250
+
251
+ """
252
+ # Define product
253
+ product = "L1"
254
+
255
+ # Define base directory
256
+ data_archive_dir = get_data_archive_dir(data_archive_dir)
257
+
258
+ # Retrieve DISDRODB Metadata Archive directory
259
+ metadata_archive_dir = get_metadata_archive_dir(metadata_archive_dir)
260
+
261
+ # Define logs directory
262
+ logs_dir = create_logs_directory(
263
+ product=product,
264
+ data_archive_dir=data_archive_dir,
265
+ data_source=data_source,
266
+ campaign_name=campaign_name,
267
+ station_name=station_name,
268
+ )
269
+
270
+ # ------------------------------------------------------------------------.
271
+ # Start processing
272
+ if verbose:
273
+ t_i = time.time()
274
+ msg = f"{product} processing of station {station_name} has started."
275
+ log_info(logger=logger, msg=msg, verbose=verbose)
276
+
277
+ # ------------------------------------------------------------------------.
278
+ # Create directory structure
279
+ data_dir = create_product_directory(
280
+ data_archive_dir=data_archive_dir,
281
+ metadata_archive_dir=metadata_archive_dir,
282
+ data_source=data_source,
283
+ campaign_name=campaign_name,
284
+ station_name=station_name,
285
+ product=product,
286
+ force=force,
287
+ )
288
+
289
+ # -------------------------------------------------------------------------.
290
+ # List files to process
291
+ required_product = get_required_product(product)
292
+ flag_not_available_data = False
293
+ try:
294
+ filepaths = find_files(
295
+ data_archive_dir=data_archive_dir,
296
+ data_source=data_source,
297
+ campaign_name=campaign_name,
298
+ station_name=station_name,
299
+ product=required_product,
300
+ # Processing options
301
+ debugging_mode=debugging_mode,
302
+ )
303
+ except Exception as e:
304
+ print(str(e)) # Case where no file paths available
305
+ flag_not_available_data = True
306
+
307
+ # -------------------------------------------------------------------------.
308
+ # If no data available, print error message and return None
309
+ if flag_not_available_data:
310
+ msg = (
311
+ f"{product} processing of {data_source} {campaign_name} {station_name}"
312
+ + f"has not been launched because of missing {required_product} data."
313
+ )
314
+ print(msg)
315
+ return
316
+
317
+ # -----------------------------------------------------------------.
318
+ # Generate L1 files
319
+ # - Loop over the L0 netCDF files and generate L1 files.
320
+ # - If parallel=True, it does that in parallel using dask.delayed
321
+ list_tasks = [
322
+ _generate_l1(
323
+ filepath=filepath,
324
+ data_dir=data_dir,
325
+ logs_dir=logs_dir,
326
+ campaign_name=campaign_name,
327
+ station_name=station_name,
328
+ # Processing options
329
+ force=force,
330
+ verbose=verbose,
331
+ parallel=parallel,
332
+ )
333
+ for filepath in filepaths
334
+ ]
335
+ list_logs = dask.compute(*list_tasks) if parallel else list_tasks
336
+
337
+ # -----------------------------------------------------------------.
338
+ # Define L1 summary logs
339
+ create_product_logs(
340
+ product=product,
341
+ data_source=data_source,
342
+ campaign_name=campaign_name,
343
+ station_name=station_name,
344
+ data_archive_dir=data_archive_dir,
345
+ # Logs list
346
+ list_logs=list_logs,
347
+ )
348
+
349
+ # ---------------------------------------------------------------------.
350
+ # End L1 processing
351
+ if verbose:
352
+ timedelta_str = str(datetime.timedelta(seconds=round(time.time() - t_i)))
353
+ msg = f"{product} processing of station {station_name} completed in {timedelta_str}"
354
+ log_info(logger=logger, msg=msg, verbose=verbose)
355
+
356
+
357
+ ####-------------------------------------------------------------------------------------------------------------------.
@@ -0,0 +1,17 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """Core functions for DISDRODB L1 ENV production."""
@@ -0,0 +1,38 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """Core functions for DISDRODB ENV production."""
18
+
19
+ import xarray as xr
20
+
21
+
22
+ def get_default_environment_dataset():
23
+ """Define defaults values for the ENV dataset."""
24
+ ds_env = xr.Dataset()
25
+ ds_env["sea_level_air_pressure"] = 101_325
26
+ ds_env["gas_constant_dry_air"] = 287.04
27
+ ds_env["lapse_rate"] = 0.0065
28
+ ds_env["relative_humidity"] = 0.95 # Value between 0 and 1 !
29
+ ds_env["temperature"] = 20 + 273.15
30
+ return ds_env
31
+
32
+
33
+ def load_env_dataset(ds):
34
+ """Load the ENV dataset."""
35
+ # TODO - Retrieve relative_humidity and temperature from L1-ENV
36
+ ds_env = get_default_environment_dataset()
37
+ ds_env = ds_env.assign_coords({"altitude": ds["altitude"], "latitude": ds["latitude"]})
38
+ return ds_env
@@ -0,0 +1,17 @@
1
+ # -----------------------------------------------------------------------------.
2
+ # Copyright (c) 2021-2023 DISDRODB developers
3
+ #
4
+ # This program is free software: you can redistribute it and/or modify
5
+ # it under the terms of the GNU General Public License as published by
6
+ # the Free Software Foundation, either version 3 of the License, or
7
+ # (at your option) any later version.
8
+ #
9
+ # This program is distributed in the hope that it will be useful,
10
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
11
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
+ # GNU General Public License for more details.
13
+ #
14
+ # You should have received a copy of the GNU General Public License
15
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
+ # -----------------------------------------------------------------------------.
17
+ """Module for DISDRODB L2 production."""