disdrodb 0.0.21__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (279) hide show
  1. disdrodb/__init__.py +132 -15
  2. disdrodb/_config.py +4 -2
  3. disdrodb/_version.py +9 -4
  4. disdrodb/api/checks.py +264 -237
  5. disdrodb/api/configs.py +4 -8
  6. disdrodb/api/create_directories.py +235 -290
  7. disdrodb/api/info.py +217 -26
  8. disdrodb/api/io.py +306 -270
  9. disdrodb/api/path.py +597 -173
  10. disdrodb/api/search.py +486 -0
  11. disdrodb/{metadata/scripts → cli}/disdrodb_check_metadata_archive.py +12 -7
  12. disdrodb/{utils/pandas.py → cli/disdrodb_data_archive_directory.py} +9 -18
  13. disdrodb/cli/disdrodb_download_archive.py +86 -0
  14. disdrodb/cli/disdrodb_download_metadata_archive.py +53 -0
  15. disdrodb/cli/disdrodb_download_station.py +84 -0
  16. disdrodb/{api/scripts → cli}/disdrodb_initialize_station.py +22 -10
  17. disdrodb/cli/disdrodb_metadata_archive_directory.py +32 -0
  18. disdrodb/{data_transfer/scripts/disdrodb_download_station.py → cli/disdrodb_open_data_archive.py} +22 -22
  19. disdrodb/cli/disdrodb_open_logs_directory.py +69 -0
  20. disdrodb/{data_transfer/scripts/disdrodb_upload_station.py → cli/disdrodb_open_metadata_archive.py} +22 -24
  21. disdrodb/cli/disdrodb_open_metadata_directory.py +71 -0
  22. disdrodb/cli/disdrodb_open_product_directory.py +74 -0
  23. disdrodb/cli/disdrodb_open_readers_directory.py +32 -0
  24. disdrodb/{l0/scripts → cli}/disdrodb_run_l0.py +38 -31
  25. disdrodb/{l0/scripts → cli}/disdrodb_run_l0_station.py +32 -30
  26. disdrodb/{l0/scripts → cli}/disdrodb_run_l0a.py +30 -21
  27. disdrodb/{l0/scripts → cli}/disdrodb_run_l0a_station.py +24 -33
  28. disdrodb/{l0/scripts → cli}/disdrodb_run_l0b.py +30 -21
  29. disdrodb/{l0/scripts → cli}/disdrodb_run_l0b_station.py +25 -34
  30. disdrodb/cli/disdrodb_run_l0c.py +130 -0
  31. disdrodb/cli/disdrodb_run_l0c_station.py +129 -0
  32. disdrodb/cli/disdrodb_run_l1.py +122 -0
  33. disdrodb/cli/disdrodb_run_l1_station.py +121 -0
  34. disdrodb/cli/disdrodb_run_l2e.py +122 -0
  35. disdrodb/cli/disdrodb_run_l2e_station.py +122 -0
  36. disdrodb/cli/disdrodb_run_l2m.py +122 -0
  37. disdrodb/cli/disdrodb_run_l2m_station.py +122 -0
  38. disdrodb/cli/disdrodb_upload_archive.py +105 -0
  39. disdrodb/cli/disdrodb_upload_station.py +98 -0
  40. disdrodb/configs.py +90 -25
  41. disdrodb/data_transfer/__init__.py +22 -0
  42. disdrodb/data_transfer/download_data.py +87 -90
  43. disdrodb/data_transfer/upload_data.py +64 -37
  44. disdrodb/data_transfer/zenodo.py +15 -18
  45. disdrodb/docs.py +1 -1
  46. disdrodb/issue/__init__.py +17 -4
  47. disdrodb/issue/checks.py +10 -23
  48. disdrodb/issue/reader.py +9 -12
  49. disdrodb/issue/writer.py +14 -17
  50. disdrodb/l0/__init__.py +17 -26
  51. disdrodb/l0/check_configs.py +35 -23
  52. disdrodb/l0/check_standards.py +46 -51
  53. disdrodb/l0/configs/{Thies_LPM → LPM}/bins_diameter.yml +44 -44
  54. disdrodb/l0/configs/{Thies_LPM → LPM}/bins_velocity.yml +40 -40
  55. disdrodb/l0/configs/LPM/l0a_encodings.yml +80 -0
  56. disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_cf_attrs.yml +84 -65
  57. disdrodb/l0/configs/{Thies_LPM → LPM}/l0b_encodings.yml +50 -9
  58. disdrodb/l0/configs/{Thies_LPM → LPM}/raw_data_format.yml +285 -245
  59. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_diameter.yml +66 -66
  60. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/bins_velocity.yml +64 -64
  61. disdrodb/l0/configs/PARSIVEL/l0a_encodings.yml +32 -0
  62. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_cf_attrs.yml +23 -21
  63. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/l0b_encodings.yml +17 -17
  64. disdrodb/l0/configs/{OTT_Parsivel → PARSIVEL}/raw_data_format.yml +77 -77
  65. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_diameter.yml +64 -64
  66. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/bins_velocity.yml +64 -64
  67. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +39 -0
  68. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_cf_attrs.yml +28 -26
  69. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/l0b_encodings.yml +20 -20
  70. disdrodb/l0/configs/{OTT_Parsivel2 → PARSIVEL2}/raw_data_format.yml +107 -107
  71. disdrodb/l0/configs/PWS100/bins_diameter.yml +173 -0
  72. disdrodb/l0/configs/PWS100/bins_velocity.yml +173 -0
  73. disdrodb/l0/configs/PWS100/l0a_encodings.yml +19 -0
  74. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +76 -0
  75. disdrodb/l0/configs/PWS100/l0b_encodings.yml +176 -0
  76. disdrodb/l0/configs/PWS100/raw_data_format.yml +182 -0
  77. disdrodb/l0/configs/{RD_80 → RD80}/bins_diameter.yml +40 -40
  78. disdrodb/l0/configs/RD80/l0a_encodings.yml +16 -0
  79. disdrodb/l0/configs/{RD_80 → RD80}/l0b_cf_attrs.yml +3 -3
  80. disdrodb/l0/configs/RD80/l0b_encodings.yml +135 -0
  81. disdrodb/l0/configs/{RD_80 → RD80}/raw_data_format.yml +46 -50
  82. disdrodb/l0/l0_reader.py +216 -340
  83. disdrodb/l0/l0a_processing.py +237 -208
  84. disdrodb/l0/l0b_nc_processing.py +227 -80
  85. disdrodb/l0/l0b_processing.py +96 -174
  86. disdrodb/l0/l0c_processing.py +627 -0
  87. disdrodb/l0/readers/{ARM → LPM/ARM}/ARM_LPM.py +36 -58
  88. disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +236 -0
  89. disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +185 -0
  90. disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +185 -0
  91. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +195 -0
  92. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +210 -0
  93. disdrodb/l0/readers/{BRAZIL/GOAMAZON_LPM.py → LPM/KIT/CHWALA.py} +97 -76
  94. disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +197 -0
  95. disdrodb/l0/readers/LPM/SLOVENIA/CRNI_VRH.py +197 -0
  96. disdrodb/l0/readers/{UK → LPM/UK}/DIVEN.py +14 -35
  97. disdrodb/l0/readers/PARSIVEL/AUSTRALIA/MELBOURNE_2007_PARSIVEL.py +157 -0
  98. disdrodb/l0/readers/PARSIVEL/CHINA/CHONGQING.py +113 -0
  99. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/ARCTIC_2021.py +40 -57
  100. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/COMMON_2011.py +37 -54
  101. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/DAVOS_2009_2011.py +34 -51
  102. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_2009.py +34 -51
  103. disdrodb/l0/readers/{EPFL/PARADISO_2014.py → PARSIVEL/EPFL/EPFL_ROOF_2008.py} +38 -50
  104. disdrodb/l0/readers/PARSIVEL/EPFL/EPFL_ROOF_2010.py +105 -0
  105. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2011.py +34 -51
  106. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/EPFL_ROOF_2012.py +33 -51
  107. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GENEPI_2007.py +25 -44
  108. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007.py +25 -44
  109. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/GRAND_ST_BERNARD_2007_2.py +25 -44
  110. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HPICONET_2010.py +34 -51
  111. disdrodb/l0/readers/{EPFL/EPFL_ROOF_2010.py → PARSIVEL/EPFL/HYMEX_LTE_SOP2.py} +37 -50
  112. disdrodb/l0/readers/PARSIVEL/EPFL/HYMEX_LTE_SOP3.py +111 -0
  113. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/HYMEX_LTE_SOP4.py +36 -54
  114. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2018.py +34 -52
  115. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/LOCARNO_2019.py +38 -56
  116. disdrodb/l0/readers/PARSIVEL/EPFL/PARADISO_2014.py +105 -0
  117. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PARSIVEL_2007.py +27 -45
  118. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/PLATO_2019.py +24 -44
  119. disdrodb/l0/readers/PARSIVEL/EPFL/RACLETS_2019.py +140 -0
  120. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RACLETS_2019_WJF.py +41 -59
  121. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/RIETHOLZBACH_2011.py +34 -51
  122. disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2017.py +117 -0
  123. disdrodb/l0/readers/PARSIVEL/EPFL/SAMOYLOV_2019.py +137 -0
  124. disdrodb/l0/readers/{EPFL → PARSIVEL/EPFL}/UNIL_2022.py +42 -55
  125. disdrodb/l0/readers/PARSIVEL/GPM/IFLOODS.py +104 -0
  126. disdrodb/l0/readers/{GPM → PARSIVEL/GPM}/LPVEX.py +29 -48
  127. disdrodb/l0/readers/PARSIVEL/GPM/MC3E.py +184 -0
  128. disdrodb/l0/readers/PARSIVEL/KIT/BURKINA_FASO.py +133 -0
  129. disdrodb/l0/readers/PARSIVEL/NCAR/CCOPE_2015.py +113 -0
  130. disdrodb/l0/readers/{NCAR/VORTEX_SE_2016_P1.py → PARSIVEL/NCAR/OWLES_MIPS.py} +46 -72
  131. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +125 -0
  132. disdrodb/l0/readers/{NCAR/OWLES_MIPS.py → PARSIVEL/NCAR/PLOWS_MIPS.py} +45 -64
  133. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +114 -0
  134. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +176 -0
  135. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +183 -0
  136. disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL_FGG.py +121 -0
  137. disdrodb/l0/readers/{ARM/ARM_LD.py → PARSIVEL2/ARM/ARM_PARSIVEL2.py} +27 -50
  138. disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +163 -0
  139. disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +163 -0
  140. disdrodb/l0/readers/{DENMARK → PARSIVEL2/DENMARK}/EROSION_nc.py +14 -35
  141. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +189 -0
  142. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +119 -0
  143. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +104 -0
  144. disdrodb/l0/readers/PARSIVEL2/GPM/NSSTC.py +176 -0
  145. disdrodb/l0/readers/PARSIVEL2/ITALY/GID_PARSIVEL2.py +32 -0
  146. disdrodb/l0/readers/PARSIVEL2/MEXICO/OH_IIUNAM_nc.py +56 -0
  147. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +120 -0
  148. disdrodb/l0/readers/{NCAR → PARSIVEL2/NCAR}/PECAN_MIPS.py +45 -64
  149. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +181 -0
  150. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +160 -0
  151. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +160 -0
  152. disdrodb/l0/readers/{NCAR/PLOWS_MIPS.py → PARSIVEL2/NCAR/VORTEX_SE_2016_P1.py} +49 -66
  153. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +118 -0
  154. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +152 -0
  155. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT.py +166 -0
  156. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +150 -0
  157. disdrodb/l0/readers/{NCAR/RELAMPAGO_RD80.py → RD80/BRAZIL/CHUVA_RD80.py} +36 -60
  158. disdrodb/l0/readers/{BRAZIL → RD80/BRAZIL}/GOAMAZON_RD80.py +36 -55
  159. disdrodb/l0/readers/{NCAR → RD80/NCAR}/CINDY_2011_RD80.py +35 -54
  160. disdrodb/l0/readers/{BRAZIL/CHUVA_RD80.py → RD80/NCAR/RELAMPAGO_RD80.py} +40 -54
  161. disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +274 -0
  162. disdrodb/l0/readers/template_reader_raw_netcdf_data.py +62 -0
  163. disdrodb/l0/readers/{reader_template.py → template_reader_raw_text_data.py} +20 -44
  164. disdrodb/l0/routines.py +885 -581
  165. disdrodb/l0/standards.py +77 -238
  166. disdrodb/l0/template_tools.py +105 -110
  167. disdrodb/l1/__init__.py +17 -0
  168. disdrodb/l1/beard_model.py +716 -0
  169. disdrodb/l1/encoding_attrs.py +635 -0
  170. disdrodb/l1/fall_velocity.py +260 -0
  171. disdrodb/l1/filters.py +192 -0
  172. disdrodb/l1/processing.py +202 -0
  173. disdrodb/l1/resampling.py +236 -0
  174. disdrodb/l1/routines.py +358 -0
  175. disdrodb/l1_env/__init__.py +17 -0
  176. disdrodb/l1_env/routines.py +38 -0
  177. disdrodb/l2/__init__.py +17 -0
  178. disdrodb/l2/empirical_dsd.py +1833 -0
  179. disdrodb/l2/event.py +388 -0
  180. disdrodb/l2/processing.py +528 -0
  181. disdrodb/l2/processing_options.py +213 -0
  182. disdrodb/l2/routines.py +868 -0
  183. disdrodb/metadata/__init__.py +9 -2
  184. disdrodb/metadata/checks.py +180 -124
  185. disdrodb/metadata/download.py +81 -0
  186. disdrodb/metadata/geolocation.py +146 -0
  187. disdrodb/metadata/info.py +20 -13
  188. disdrodb/metadata/manipulation.py +3 -3
  189. disdrodb/metadata/reader.py +59 -8
  190. disdrodb/metadata/search.py +77 -144
  191. disdrodb/metadata/standards.py +83 -80
  192. disdrodb/metadata/writer.py +10 -16
  193. disdrodb/psd/__init__.py +38 -0
  194. disdrodb/psd/fitting.py +2146 -0
  195. disdrodb/psd/models.py +774 -0
  196. disdrodb/routines.py +1412 -0
  197. disdrodb/scattering/__init__.py +28 -0
  198. disdrodb/scattering/axis_ratio.py +344 -0
  199. disdrodb/scattering/routines.py +456 -0
  200. disdrodb/utils/__init__.py +17 -0
  201. disdrodb/utils/attrs.py +208 -0
  202. disdrodb/utils/cli.py +269 -0
  203. disdrodb/utils/compression.py +60 -42
  204. disdrodb/utils/dask.py +62 -0
  205. disdrodb/utils/dataframe.py +342 -0
  206. disdrodb/utils/decorators.py +110 -0
  207. disdrodb/utils/directories.py +107 -46
  208. disdrodb/utils/encoding.py +127 -0
  209. disdrodb/utils/list.py +29 -0
  210. disdrodb/utils/logger.py +168 -46
  211. disdrodb/utils/time.py +657 -0
  212. disdrodb/utils/warnings.py +30 -0
  213. disdrodb/utils/writer.py +57 -0
  214. disdrodb/utils/xarray.py +138 -47
  215. disdrodb/utils/yaml.py +0 -1
  216. disdrodb/viz/__init__.py +17 -0
  217. disdrodb/viz/plots.py +17 -0
  218. disdrodb-0.1.1.dist-info/METADATA +294 -0
  219. disdrodb-0.1.1.dist-info/RECORD +232 -0
  220. {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/WHEEL +1 -1
  221. disdrodb-0.1.1.dist-info/entry_points.txt +30 -0
  222. disdrodb/data_transfer/scripts/disdrodb_download_archive.py +0 -53
  223. disdrodb/data_transfer/scripts/disdrodb_upload_archive.py +0 -57
  224. disdrodb/l0/configs/OTT_Parsivel/l0a_encodings.yml +0 -32
  225. disdrodb/l0/configs/OTT_Parsivel2/l0a_encodings.yml +0 -39
  226. disdrodb/l0/configs/RD_80/l0a_encodings.yml +0 -16
  227. disdrodb/l0/configs/RD_80/l0b_encodings.yml +0 -135
  228. disdrodb/l0/configs/Thies_LPM/l0a_encodings.yml +0 -80
  229. disdrodb/l0/io.py +0 -257
  230. disdrodb/l0/l0_processing.py +0 -1091
  231. disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_OTT.py +0 -178
  232. disdrodb/l0/readers/AUSTRALIA/MELBOURNE_2007_THIES.py +0 -247
  233. disdrodb/l0/readers/BRAZIL/CHUVA_LPM.py +0 -204
  234. disdrodb/l0/readers/BRAZIL/CHUVA_OTT.py +0 -183
  235. disdrodb/l0/readers/BRAZIL/GOAMAZON_OTT.py +0 -183
  236. disdrodb/l0/readers/CHINA/CHONGQING.py +0 -131
  237. disdrodb/l0/readers/EPFL/EPFL_ROOF_2008.py +0 -128
  238. disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP2.py +0 -127
  239. disdrodb/l0/readers/EPFL/HYMEX_LTE_SOP3.py +0 -129
  240. disdrodb/l0/readers/EPFL/RACLETS_2019.py +0 -158
  241. disdrodb/l0/readers/EPFL/SAMOYLOV_2017.py +0 -136
  242. disdrodb/l0/readers/EPFL/SAMOYLOV_2019.py +0 -158
  243. disdrodb/l0/readers/FRANCE/SIRTA_OTT2.py +0 -138
  244. disdrodb/l0/readers/GPM/GCPEX.py +0 -123
  245. disdrodb/l0/readers/GPM/IFLOODS.py +0 -123
  246. disdrodb/l0/readers/GPM/MC3E.py +0 -123
  247. disdrodb/l0/readers/GPM/NSSTC.py +0 -164
  248. disdrodb/l0/readers/ITALY/GID.py +0 -199
  249. disdrodb/l0/readers/MEXICO/OH_IIUNAM_nc.py +0 -92
  250. disdrodb/l0/readers/NCAR/CCOPE_2015.py +0 -133
  251. disdrodb/l0/readers/NCAR/PECAN_FP3.py +0 -137
  252. disdrodb/l0/readers/NCAR/PECAN_MOBILE.py +0 -144
  253. disdrodb/l0/readers/NCAR/RELAMPAGO_OTT.py +0 -195
  254. disdrodb/l0/readers/NCAR/SNOWIE_PJ.py +0 -172
  255. disdrodb/l0/readers/NCAR/SNOWIE_SB.py +0 -179
  256. disdrodb/l0/readers/NCAR/VORTEX2_2009.py +0 -133
  257. disdrodb/l0/readers/NCAR/VORTEX2_2010.py +0 -188
  258. disdrodb/l0/readers/NCAR/VORTEX2_2010_UF.py +0 -191
  259. disdrodb/l0/readers/NCAR/VORTEX_SE_2016_P2.py +0 -135
  260. disdrodb/l0/readers/NCAR/VORTEX_SE_2016_PIPS.py +0 -170
  261. disdrodb/l0/readers/NETHERLANDS/DELFT.py +0 -187
  262. disdrodb/l0/readers/SPAIN/SBEGUERIA.py +0 -179
  263. disdrodb/l0/scripts/disdrodb_run_l0b_concat.py +0 -93
  264. disdrodb/l0/scripts/disdrodb_run_l0b_concat_station.py +0 -85
  265. disdrodb/utils/netcdf.py +0 -452
  266. disdrodb/utils/scripts.py +0 -102
  267. disdrodb-0.0.21.dist-info/AUTHORS.md +0 -18
  268. disdrodb-0.0.21.dist-info/METADATA +0 -186
  269. disdrodb-0.0.21.dist-info/RECORD +0 -168
  270. disdrodb-0.0.21.dist-info/entry_points.txt +0 -15
  271. /disdrodb/l0/configs/{RD_80 → RD80}/bins_velocity.yml +0 -0
  272. /disdrodb/l0/manuals/{Thies_LPM.pdf → LPM.pdf} +0 -0
  273. /disdrodb/l0/manuals/{ODM_470.pdf → ODM470.pdf} +0 -0
  274. /disdrodb/l0/manuals/{OTT_Parsivel.pdf → PARSIVEL.pdf} +0 -0
  275. /disdrodb/l0/manuals/{OTT_Parsivel2.pdf → PARSIVEL2.pdf} +0 -0
  276. /disdrodb/l0/manuals/{PWS_100.pdf → PWS100.pdf} +0 -0
  277. /disdrodb/l0/manuals/{RD_80.pdf → RD80.pdf} +0 -0
  278. {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info/licenses}/LICENSE +0 -0
  279. {disdrodb-0.0.21.dist-info → disdrodb-0.1.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,342 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # -----------------------------------------------------------------------------.
4
+ # Copyright (c) 2021-2023 DISDRODB developers
5
+ #
6
+ # This program is free software: you can redistribute it and/or modify
7
+ # it under the terms of the GNU General Public License as published by
8
+ # the Free Software Foundation, either version 3 of the License, or
9
+ # (at your option) any later version.
10
+ #
11
+ # This program is distributed in the hope that it will be useful,
12
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
13
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
+ # GNU General Public License for more details.
15
+ #
16
+ # You should have received a copy of the GNU General Public License
17
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
+ # -----------------------------------------------------------------------------.
19
+ """Dataframe utilities."""
20
+ import numpy as np
21
+ import pandas as pd
22
+
23
+
24
+ def log_arange(start, stop, log_step=0.1, base=10):
25
+ """
26
+ Return numbers spaced evenly on a log scale (similar to np.arange but in log space).
27
+
28
+ Parameters
29
+ ----------
30
+ start : float
31
+ The starting value of the sequence (must be > 0).
32
+ stop : float
33
+ The end value of the sequence (must be > 0).
34
+ log_step : float
35
+ The step size in log-space (default is 0.1).
36
+ base : float
37
+ The logarithmic base (default is 10).
38
+
39
+ Returns
40
+ -------
41
+ np.ndarray
42
+ Array of values spaced in log scale.
43
+ """
44
+ if start <= 0 or stop <= 0:
45
+ raise ValueError("Both start and stop must be > 0 for log spacing.")
46
+
47
+ log_start = np.log(start) / np.log(base)
48
+ log_stop = np.log(stop) / np.log(base)
49
+
50
+ log_values = np.arange(log_start, log_stop, log_step)
51
+ return base**log_values
52
+
53
+
54
+ def compute_1d_histogram(df, column, variables=None, bins=10, labels=None, prefix_name=True, include_quantiles=False):
55
+ """Compute conditional univariate statistics.
56
+
57
+ Parameters
58
+ ----------
59
+ df : pandas.DataFrame
60
+ Input dataframe
61
+ column : str
62
+ Column name to be binned.
63
+ variables : str or list, optional
64
+ Column names for which conditional statistics will be computed.
65
+ If None, only counts are computed.
66
+ bins : int or array-like
67
+ Number of bins or bin edges.
68
+ labels : array-like, optional
69
+ Labels for the column bins. If None, uses bin centers.
70
+
71
+ Returns
72
+ -------
73
+ pandas.DataFrame
74
+ """
75
+ # Copy data
76
+ df = df.copy()
77
+
78
+ # Ensure `variables` is a list of variables
79
+ # - If no variable specified, create dummy variable
80
+ if variables is None:
81
+ variables = ["dummy"]
82
+ df["dummy"] = np.ones(df[column].shape)
83
+ variables_specified = False
84
+ elif isinstance(variables, str):
85
+ variables = [variables]
86
+ variables_specified = True
87
+ elif isinstance(variables, list):
88
+ variables_specified = True
89
+ else:
90
+ raise TypeError("`variables` must be a string, list of strings, or None.")
91
+ variables = np.unique(variables)
92
+
93
+ # Handle column binning
94
+ if isinstance(bins, int):
95
+ bins = np.linspace(df[column].min(), df[column].max(), bins + 1)
96
+
97
+ # Drop rows where any of the key columns have NaN
98
+ df = df.dropna(subset=[column, *variables])
99
+
100
+ if len(df) == 0:
101
+ raise ValueError("No valid data points after removing NaN values")
102
+
103
+ # Create binned columns with explicit handling of out-of-bounds values
104
+ df[f"{column}_binned"] = pd.cut(df[column], bins=bins, include_lowest=True)
105
+
106
+ # Create complete IntervalIndex for both dimensions
107
+ intervals = df[f"{column}_binned"].cat.categories
108
+
109
+ # Create IntervalIndex with all possible combinations
110
+ full_index = pd.Index(intervals, name=f"{column}_binned")
111
+
112
+ # Define grouping object
113
+ df_grouped = df.groupby([f"{column}_binned"], observed=False)
114
+
115
+ # Compute statistics for specified variables
116
+ variables_stats = []
117
+ for i, var in enumerate(variables):
118
+ # Prepare prefix
119
+ prefix = f"{var}_" if prefix_name and variables_specified else ""
120
+
121
+ # Define statistics to compute
122
+ if variables_specified:
123
+ # Compute quantiles
124
+ quantiles = [0.01, 0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95, 0.99]
125
+ df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1)
126
+ df_stats_quantiles.columns = [f"{prefix}Q{int(q*100)}" for q in df_stats_quantiles.columns]
127
+ df_stats_quantiles = df_stats_quantiles.rename(
128
+ columns={
129
+ f"{prefix}Q50": f"{prefix}median",
130
+ },
131
+ )
132
+ # Define other stats to compute
133
+ list_stats = [
134
+ (f"{prefix}std", "std"),
135
+ (f"{prefix}min", "min"),
136
+ (f"{prefix}max", "max"),
137
+ (f"{prefix}mad", lambda s: np.median(np.abs(s - np.median(s)))),
138
+ ]
139
+ if i == 0:
140
+ list_stats.append(("count", "count"))
141
+ else:
142
+ list_stats = [("count", "count")]
143
+
144
+ # Compute statistics
145
+ df_stats = df_grouped[var].agg(list_stats)
146
+
147
+ # Compute other variable statistics
148
+ if variables_specified:
149
+ df_stats[f"{prefix}range"] = df_stats[f"{prefix}max"] - df_stats[f"{prefix}min"]
150
+ df_stats[f"{prefix}iqr"] = df_stats_quantiles[f"{prefix}Q75"] - df_stats_quantiles[f"{prefix}Q25"]
151
+ df_stats[f"{prefix}ipr80"] = df_stats_quantiles[f"{prefix}Q90"] - df_stats_quantiles[f"{prefix}Q10"]
152
+ df_stats[f"{prefix}ipr90"] = df_stats_quantiles[f"{prefix}Q95"] - df_stats_quantiles[f"{prefix}Q5"]
153
+ df_stats[f"{prefix}ipr98"] = df_stats_quantiles[f"{prefix}Q99"] - df_stats_quantiles[f"{prefix}Q1"]
154
+ if include_quantiles:
155
+ df_stats = pd.concat((df_stats, df_stats_quantiles), axis=1)
156
+ else:
157
+ df_stats[f"{prefix}median"] = df_stats_quantiles[f"{prefix}median"]
158
+ variables_stats.append(df_stats)
159
+
160
+ # Combine all statistics into a single DataFrame
161
+ df_stats = pd.concat(variables_stats, axis=1)
162
+
163
+ # Reindex to include all interval combinations
164
+ df_stats = df_stats.reindex(full_index)
165
+
166
+ # Determine bin centers
167
+ centers = intervals.mid
168
+
169
+ # Use provided labels if available
170
+ coords = labels if labels is not None else centers
171
+
172
+ # Reset index and add coordinates/labels
173
+ df_stats = df_stats.reset_index()
174
+ df_stats[f"{column}"] = pd.Categorical(df_stats[f"{column}_binned"].map(dict(zip(intervals, coords, strict=False))))
175
+ df_stats = df_stats.drop(columns=f"{column}_binned")
176
+
177
+ return df_stats
178
+
179
+
180
+ def compute_2d_histogram(
181
+ df,
182
+ x,
183
+ y,
184
+ variables=None,
185
+ x_bins=10,
186
+ y_bins=10,
187
+ x_labels=None,
188
+ y_labels=None,
189
+ prefix_name=True,
190
+ include_quantiles=False,
191
+ ):
192
+ """Compute conditional bivariate statistics.
193
+
194
+ Parameters
195
+ ----------
196
+ df : pandas.DataFrame
197
+ Input dataframe
198
+ x : str
199
+ Column name for x-axis binning (will be rounded to integers)
200
+ y : str
201
+ Column name for y-axis binning
202
+ variables : str or list, optional
203
+ Column names for which statistics will be computed.
204
+ If None, only counts are computed.
205
+ x_bins : int or array-like
206
+ Number of bins or bin edges for x
207
+ y_bins : int or array-like
208
+ Number of bins or bin edges for y
209
+ x_labels : array-like, optional
210
+ Labels for x bins. If None, uses bin centers
211
+ y_labels : array-like, optional
212
+ Labels for y bins. If None, uses bin centers
213
+
214
+ Returns
215
+ -------
216
+ xarray.Dataset
217
+ Dataset with dimensions corresponding to binned variables and
218
+ data variables for each statistic
219
+ """
220
+ # # If polars, cast to pandas
221
+ # if isinstance(df, pl.DataFrame):
222
+ # df = df.to_pandas()
223
+
224
+ # Copy data
225
+ df = df.copy()
226
+
227
+ # Ensure `variables` is a list of variables
228
+ # - If no variable specified, create dummy variable
229
+ if variables is None:
230
+ variables = ["dummy"]
231
+ df["dummy"] = np.ones(df[x].shape)
232
+ variables_specified = False
233
+ elif isinstance(variables, str):
234
+ variables = [variables]
235
+ variables_specified = True
236
+ elif isinstance(variables, list):
237
+ variables_specified = True
238
+ else:
239
+ raise TypeError("`variables` must be a string, list of strings, or None.")
240
+ variables = np.unique(variables)
241
+
242
+ # Handle x-axis binning
243
+ if isinstance(x_bins, int):
244
+ x_bins = np.linspace(df[x].min(), df[x].max(), x_bins + 1)
245
+ # Handle y-axis binning
246
+ if isinstance(y_bins, int):
247
+ y_bins = np.linspace(df[y].min(), df[y].max(), y_bins + 1)
248
+
249
+ # Drop rows where any of the key columns have NaN
250
+ df = df.dropna(subset=[x, y, *variables])
251
+
252
+ if len(df) == 0:
253
+ raise ValueError("No valid data points after removing NaN values")
254
+
255
+ # Create binned columns with explicit handling of out-of-bounds values
256
+ df[f"{x}_binned"] = pd.cut(df[x], bins=x_bins, include_lowest=True)
257
+ df[f"{y}_binned"] = pd.cut(df[y], bins=y_bins, include_lowest=True)
258
+
259
+ # Create complete IntervalIndex for both dimensions
260
+ x_intervals = df[f"{x}_binned"].cat.categories
261
+ y_intervals = df[f"{y}_binned"].cat.categories
262
+
263
+ # Create MultiIndex with all possible combinations
264
+ full_index = pd.MultiIndex.from_product([x_intervals, y_intervals], names=[f"{x}_binned", f"{y}_binned"])
265
+
266
+ # Define grouping object
267
+ df_grouped = df.groupby([f"{x}_binned", f"{y}_binned"], observed=False)
268
+
269
+ # Compute statistics for specified variables
270
+ variables_stats = []
271
+ for i, var in enumerate(variables):
272
+ # Prepare prefix
273
+ prefix = f"{var}_" if prefix_name and variables_specified else ""
274
+
275
+ # Define statistics to compute
276
+ if variables_specified:
277
+ # Compute quantiles
278
+ quantiles = [0.01, 0.05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95, 0.99]
279
+ df_stats_quantiles = df_grouped[var].quantile(quantiles).unstack(level=-1)
280
+ df_stats_quantiles.columns = [f"{prefix}Q{int(q*100)}" for q in df_stats_quantiles.columns]
281
+ df_stats_quantiles = df_stats_quantiles.rename(
282
+ columns={
283
+ f"{prefix}Q50": f"{prefix}median",
284
+ },
285
+ )
286
+ # Define other stats to compute
287
+ list_stats = [
288
+ (f"{prefix}std", "std"),
289
+ (f"{prefix}min", "min"),
290
+ (f"{prefix}max", "max"),
291
+ (f"{prefix}mad", lambda s: np.median(np.abs(s - np.median(s)))),
292
+ ]
293
+ if i == 0:
294
+ list_stats.append(("count", "count"))
295
+ else:
296
+ list_stats = [("count", "count")]
297
+
298
+ # Compute statistics
299
+ df_stats = df_grouped[var].agg(list_stats)
300
+
301
+ # Compute other variable statistics
302
+ if variables_specified:
303
+ df_stats[f"{prefix}range"] = df_stats[f"{prefix}max"] - df_stats[f"{prefix}min"]
304
+ df_stats[f"{prefix}iqr"] = df_stats_quantiles[f"{prefix}Q75"] - df_stats_quantiles[f"{prefix}Q25"]
305
+ df_stats[f"{prefix}ipr80"] = df_stats_quantiles[f"{prefix}Q90"] - df_stats_quantiles[f"{prefix}Q10"]
306
+ df_stats[f"{prefix}ipr90"] = df_stats_quantiles[f"{prefix}Q95"] - df_stats_quantiles[f"{prefix}Q5"]
307
+ df_stats[f"{prefix}ipr98"] = df_stats_quantiles[f"{prefix}Q99"] - df_stats_quantiles[f"{prefix}Q1"]
308
+ if include_quantiles:
309
+ df_stats = pd.concat((df_stats, df_stats_quantiles), axis=1)
310
+ else:
311
+ df_stats[f"{prefix}median"] = df_stats_quantiles[f"{prefix}median"]
312
+ variables_stats.append(df_stats)
313
+
314
+ # Combine all statistics into a single DataFrame
315
+ df_stats = pd.concat(variables_stats, axis=1)
316
+
317
+ # Reindex to include all interval combinations
318
+ df_stats = df_stats.reindex(full_index)
319
+
320
+ # Determine coordinates
321
+ x_centers = x_intervals.mid
322
+ y_centers = y_intervals.mid
323
+
324
+ # Use provided labels if available
325
+ x_coords = x_labels if x_labels is not None else x_centers
326
+ y_coords = y_labels if y_labels is not None else y_centers
327
+
328
+ # Reset index and set new coordinates
329
+ df_stats = df_stats.reset_index()
330
+ df_stats[f"{x}"] = pd.Categorical(df_stats[f"{x}_binned"].map(dict(zip(x_intervals, x_coords, strict=False))))
331
+ df_stats[f"{y}"] = pd.Categorical(df_stats[f"{y}_binned"].map(dict(zip(y_intervals, y_coords, strict=False))))
332
+
333
+ # Set new MultiIndex with coordinates
334
+ df_stats = df_stats.set_index([f"{x}", f"{y}"])
335
+ df_stats = df_stats.drop(columns=[f"{x}_binned", f"{y}_binned"])
336
+
337
+ # Convert to dataset
338
+ ds = df_stats.to_xarray()
339
+
340
+ # Transpose arrays
341
+ ds = ds.transpose(y, x)
342
+ return ds
@@ -0,0 +1,110 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # -----------------------------------------------------------------------------.
4
+ # Copyright (c) 2021-2023 DISDRODB developers
5
+ #
6
+ # This program is free software: you can redistribute it and/or modify
7
+ # it under the terms of the GNU General Public License as published by
8
+ # the Free Software Foundation, either version 3 of the License, or
9
+ # (at your option) any later version.
10
+ #
11
+ # This program is distributed in the hope that it will be useful,
12
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
13
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
+ # GNU General Public License for more details.
15
+ #
16
+ # You should have received a copy of the GNU General Public License
17
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
18
+ # -----------------------------------------------------------------------------.
19
+ """DISDRODB decorators."""
20
+ import functools
21
+ import importlib
22
+
23
+ import dask
24
+
25
+
26
+ def delayed_if_parallel(function):
27
+ """Decorator to make the function delayed if its ``parallel`` argument is ``True``."""
28
+
29
+ @functools.wraps(function)
30
+ def wrapper(*args, **kwargs):
31
+ # Check if it must be a delayed function
32
+ parallel = kwargs.get("parallel")
33
+ # If parallel is True
34
+ if parallel:
35
+ # Enforce verbose to be False
36
+ kwargs["verbose"] = False
37
+ # Define the delayed task
38
+ result = dask.delayed(function)(*args, **kwargs)
39
+ else:
40
+ # Else run the function
41
+ result = function(*args, **kwargs)
42
+ return result
43
+
44
+ return wrapper
45
+
46
+
47
+ def single_threaded_if_parallel(function):
48
+ """Decorator to make a function use a single threadon delayed if its ``parallel`` argument is ``True``."""
49
+
50
+ @functools.wraps(function)
51
+ def wrapper(*args, **kwargs):
52
+ # Check if it must be a delayed function
53
+ parallel = kwargs.get("parallel")
54
+ # If parallel is True
55
+ if parallel:
56
+ # Call function with single thread
57
+ # with dask.config.set(scheduler='single-threaded'):
58
+ with dask.config.set(scheduler="synchronous"):
59
+ result = function(*args, **kwargs)
60
+ else:
61
+ # Else run the function as usual
62
+ result = function(*args, **kwargs)
63
+ return result
64
+
65
+ return wrapper
66
+
67
+
68
+ def check_software_availability(software, conda_package):
69
+ """A decorator to ensure that a software package is installed.
70
+
71
+ Parameters
72
+ ----------
73
+ software : str
74
+ The package name as recognized by Python's import system.
75
+ conda_package : str
76
+ The package name as recognized by conda-forge.
77
+ """
78
+
79
+ def decorator(func):
80
+ @functools.wraps(func)
81
+ def wrapper(*args, **kwargs):
82
+ if not importlib.util.find_spec(software):
83
+ raise ImportError(
84
+ f"The '{software}' package is required but not found.\n"
85
+ "Please install it using conda:\n"
86
+ f" conda install -c conda-forge {conda_package}",
87
+ )
88
+ return func(*args, **kwargs)
89
+
90
+ return wrapper
91
+
92
+ return decorator
93
+
94
+
95
+ def check_pytmatrix_availability(func):
96
+ """Decorator to ensure that the 'pytmatrix' package is installed."""
97
+
98
+ @functools.wraps(func)
99
+ def wrapper(*args, **kwargs):
100
+ if not importlib.util.find_spec("pytmatrix"):
101
+ raise ImportError(
102
+ "The 'pytmatrix' package is required but not found. \n"
103
+ "Please install the following software: \n"
104
+ " conda install conda-forge gfortran \n"
105
+ " conda install conda-forge meson \n"
106
+ " pip install git+https://github.com/ltelab/pytmatrix-lte.git@main \n",
107
+ )
108
+ return func(*args, **kwargs)
109
+
110
+ return wrapper