disdrodb 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. disdrodb/__init__.py +64 -34
  2. disdrodb/_config.py +5 -4
  3. disdrodb/_version.py +16 -3
  4. disdrodb/accessor/__init__.py +20 -0
  5. disdrodb/accessor/methods.py +125 -0
  6. disdrodb/api/checks.py +139 -9
  7. disdrodb/api/configs.py +4 -2
  8. disdrodb/api/info.py +10 -10
  9. disdrodb/api/io.py +237 -18
  10. disdrodb/api/path.py +81 -75
  11. disdrodb/api/search.py +6 -6
  12. disdrodb/cli/disdrodb_create_summary_station.py +91 -0
  13. disdrodb/cli/disdrodb_run_l0.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0_station.py +1 -1
  15. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  16. disdrodb/cli/disdrodb_run_l0b_station.py +1 -1
  17. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  18. disdrodb/cli/disdrodb_run_l0c_station.py +1 -1
  19. disdrodb/cli/disdrodb_run_l2e_station.py +1 -1
  20. disdrodb/configs.py +149 -4
  21. disdrodb/constants.py +61 -0
  22. disdrodb/data_transfer/download_data.py +145 -14
  23. disdrodb/etc/configs/attributes.yaml +339 -0
  24. disdrodb/etc/configs/encodings.yaml +473 -0
  25. disdrodb/etc/products/L1/global.yaml +13 -0
  26. disdrodb/etc/products/L2E/10MIN.yaml +12 -0
  27. disdrodb/etc/products/L2E/1MIN.yaml +1 -0
  28. disdrodb/etc/products/L2E/global.yaml +22 -0
  29. disdrodb/etc/products/L2M/10MIN.yaml +12 -0
  30. disdrodb/etc/products/L2M/GAMMA_ML.yaml +8 -0
  31. disdrodb/etc/products/L2M/NGAMMA_GS_LOG_ND_MAE.yaml +6 -0
  32. disdrodb/etc/products/L2M/NGAMMA_GS_ND_MAE.yaml +6 -0
  33. disdrodb/etc/products/L2M/NGAMMA_GS_Z_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/global.yaml +26 -0
  35. disdrodb/l0/__init__.py +13 -0
  36. disdrodb/l0/configs/LPM/bins_diameter.yml +3 -3
  37. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +4 -4
  38. disdrodb/l0/configs/PARSIVEL/l0b_cf_attrs.yml +1 -1
  39. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +3 -3
  40. disdrodb/l0/configs/PARSIVEL/raw_data_format.yml +1 -1
  41. disdrodb/l0/configs/PARSIVEL2/l0a_encodings.yml +4 -0
  42. disdrodb/l0/configs/PARSIVEL2/l0b_cf_attrs.yml +20 -4
  43. disdrodb/l0/configs/PARSIVEL2/l0b_encodings.yml +44 -3
  44. disdrodb/l0/configs/PARSIVEL2/raw_data_format.yml +41 -1
  45. disdrodb/l0/configs/PWS100/l0b_cf_attrs.yml +4 -4
  46. disdrodb/l0/configs/PWS100/raw_data_format.yml +1 -1
  47. disdrodb/l0/l0a_processing.py +30 -30
  48. disdrodb/l0/l0b_nc_processing.py +108 -2
  49. disdrodb/l0/l0b_processing.py +4 -4
  50. disdrodb/l0/l0c_processing.py +5 -13
  51. disdrodb/l0/manuals/SWS250.pdf +0 -0
  52. disdrodb/l0/manuals/VPF730.pdf +0 -0
  53. disdrodb/l0/manuals/VPF750.pdf +0 -0
  54. disdrodb/l0/readers/LPM/NETHERLANDS/DELFT_LPM_NC.py +66 -0
  55. disdrodb/l0/readers/LPM/SLOVENIA/{CRNI_VRH.py → UL.py} +3 -0
  56. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +195 -0
  57. disdrodb/l0/readers/PARSIVEL/GPM/PIERS.py +105 -0
  58. disdrodb/l0/readers/PARSIVEL/JAPAN/JMA.py +128 -0
  59. disdrodb/l0/readers/PARSIVEL/NCAR/PECAN_MOBILE.py +1 -1
  60. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2009.py +1 -1
  61. disdrodb/l0/readers/PARSIVEL2/BELGIUM/ILVO.py +168 -0
  62. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +165 -0
  63. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +69 -0
  64. disdrodb/l0/readers/PARSIVEL2/FRANCE/ENPC_PARSIVEL2.py +255 -134
  65. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +525 -0
  66. disdrodb/l0/readers/PARSIVEL2/FRANCE/SIRTA_PARSIVEL2.py +1 -1
  67. disdrodb/l0/readers/PARSIVEL2/GPM/GCPEX.py +9 -7
  68. disdrodb/l0/readers/{PARSIVEL → PARSIVEL2}/KIT/BURKINA_FASO.py +1 -1
  69. disdrodb/l0/readers/PARSIVEL2/KIT/TEAMX.py +123 -0
  70. disdrodb/l0/readers/PARSIVEL2/NASA/APU.py +120 -0
  71. disdrodb/l0/readers/PARSIVEL2/{NETHERLANDS/DELFT.py → NCAR/FARM_PARSIVEL2.py} +43 -70
  72. disdrodb/l0/readers/PARSIVEL2/NCAR/PECAN_FP3.py +1 -1
  73. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +126 -0
  74. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_PIPS.py +165 -0
  75. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_P2.py +1 -1
  76. disdrodb/l0/readers/PARSIVEL2/NCAR/VORTEX_SE_2016_PIPS.py +29 -12
  77. disdrodb/l0/readers/PARSIVEL2/NETHERLANDS/DELFT_NC.py +69 -0
  78. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +144 -0
  79. disdrodb/l0/readers/PARSIVEL2/SPAIN/CR1000DL.py +201 -0
  80. disdrodb/l0/readers/PARSIVEL2/SPAIN/LIAISE.py +137 -0
  81. disdrodb/l0/readers/PARSIVEL2/USA/C3WE.py +146 -0
  82. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100.py +105 -99
  83. disdrodb/l0/readers/PWS100/FRANCE/ENPC_PWS100_SIRTA.py +151 -0
  84. disdrodb/l0/readers/RD80/NOAA/PSL_RD80.py +31 -14
  85. disdrodb/l0/routines.py +105 -14
  86. disdrodb/l1/__init__.py +5 -0
  87. disdrodb/l1/filters.py +34 -20
  88. disdrodb/l1/processing.py +45 -44
  89. disdrodb/l1/resampling.py +77 -66
  90. disdrodb/l1/routines.py +35 -42
  91. disdrodb/l1_env/routines.py +18 -3
  92. disdrodb/l2/__init__.py +7 -0
  93. disdrodb/l2/empirical_dsd.py +58 -10
  94. disdrodb/l2/event.py +27 -120
  95. disdrodb/l2/processing.py +267 -116
  96. disdrodb/l2/routines.py +618 -254
  97. disdrodb/metadata/standards.py +3 -1
  98. disdrodb/psd/fitting.py +463 -144
  99. disdrodb/psd/models.py +8 -5
  100. disdrodb/routines.py +3 -3
  101. disdrodb/scattering/__init__.py +16 -4
  102. disdrodb/scattering/axis_ratio.py +56 -36
  103. disdrodb/scattering/permittivity.py +486 -0
  104. disdrodb/scattering/routines.py +701 -159
  105. disdrodb/summary/__init__.py +17 -0
  106. disdrodb/summary/routines.py +4120 -0
  107. disdrodb/utils/attrs.py +68 -125
  108. disdrodb/utils/compression.py +30 -1
  109. disdrodb/utils/dask.py +59 -8
  110. disdrodb/utils/dataframe.py +63 -9
  111. disdrodb/utils/directories.py +49 -17
  112. disdrodb/utils/encoding.py +33 -19
  113. disdrodb/utils/logger.py +13 -6
  114. disdrodb/utils/manipulations.py +71 -0
  115. disdrodb/utils/subsetting.py +214 -0
  116. disdrodb/utils/time.py +165 -19
  117. disdrodb/utils/writer.py +20 -7
  118. disdrodb/utils/xarray.py +85 -4
  119. disdrodb/viz/__init__.py +13 -0
  120. disdrodb/viz/plots.py +327 -0
  121. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/METADATA +3 -2
  122. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/RECORD +127 -87
  123. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/entry_points.txt +1 -0
  124. disdrodb/l1/encoding_attrs.py +0 -635
  125. disdrodb/l2/processing_options.py +0 -213
  126. /disdrodb/l0/readers/PARSIVEL/SLOVENIA/{UL_FGG.py → UL.py} +0 -0
  127. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/WHEEL +0 -0
  128. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/licenses/LICENSE +0 -0
  129. {disdrodb-0.1.1.dist-info → disdrodb-0.1.3.dist-info}/top_level.txt +0 -0
disdrodb/utils/time.py CHANGED
@@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
33
33
  #### Sampling Interval Acronyms
34
34
 
35
35
 
36
- def seconds_to_acronym(seconds):
36
+ def seconds_to_temporal_resolution(seconds):
37
37
  """
38
38
  Convert a duration in seconds to a readable string format (e.g., "1H30", "1D2H").
39
39
 
@@ -57,27 +57,27 @@ def seconds_to_acronym(seconds):
57
57
  parts.append(f"{components.minutes}MIN")
58
58
  if components.seconds > 0:
59
59
  parts.append(f"{components.seconds}S")
60
- acronym = "".join(parts)
61
- return acronym
60
+ temporal_resolution = "".join(parts)
61
+ return temporal_resolution
62
62
 
63
63
 
64
- def get_resampling_information(sample_interval_acronym):
64
+ def get_resampling_information(temporal_resolution):
65
65
  """
66
- Extract resampling information from the sample interval acronym.
66
+ Extract resampling information from the temporal_resolution string.
67
67
 
68
68
  Parameters
69
69
  ----------
70
- sample_interval_acronym: str
71
- A string representing the sample interval: e.g., "1H30MIN", "ROLL1H30MIN".
70
+ temporal_resolution: str
71
+ A string representing the product temporal resolution: e.g., "1H30MIN", "ROLL1H30MIN".
72
72
 
73
73
  Returns
74
74
  -------
75
75
  sample_interval_seconds, rolling: tuple
76
76
  Sample_interval in seconds and whether rolling is enabled.
77
77
  """
78
- rolling = sample_interval_acronym.startswith("ROLL")
78
+ rolling = temporal_resolution.startswith("ROLL")
79
79
  if rolling:
80
- sample_interval_acronym = sample_interval_acronym[4:] # Remove "ROLL"
80
+ temporal_resolution = temporal_resolution[4:] # Remove "ROLL"
81
81
 
82
82
  # Allowed pattern: one or more occurrences of "<number><unit>"
83
83
  # where unit is exactly one of D, H, MIN, or S.
@@ -85,15 +85,15 @@ def get_resampling_information(sample_interval_acronym):
85
85
  pattern = r"^(\d+(?:D|H|MIN|S))+$"
86
86
 
87
87
  # Check if the entire string matches the pattern
88
- if not re.match(pattern, sample_interval_acronym):
88
+ if not re.match(pattern, temporal_resolution):
89
89
  raise ValueError(
90
- f"Invalid sample interval acronym '{sample_interval_acronym}'. "
90
+ f"Invalid temporal resolution '{temporal_resolution}'. "
91
91
  "Must be composed of one or more <number><unit> groups, where unit is D, H, MIN, or S.",
92
92
  )
93
93
 
94
94
  # Regular expression to match duration components and extract all (value, unit) pairs
95
95
  pattern = r"(\d+)(D|H|MIN|S)"
96
- matches = re.findall(pattern, sample_interval_acronym)
96
+ matches = re.findall(pattern, temporal_resolution)
97
97
 
98
98
  # Conversion factors for each unit
99
99
  unit_to_seconds = {
@@ -112,21 +112,21 @@ def get_resampling_information(sample_interval_acronym):
112
112
  return sample_interval, rolling
113
113
 
114
114
 
115
- def acronym_to_seconds(acronym):
115
+ def temporal_resolution_to_seconds(temporal_resolution):
116
116
  """
117
- Extract the interval in seconds from the duration acronym.
117
+ Extract the measurement interval in seconds from the temporal resolution string.
118
118
 
119
119
  Parameters
120
120
  ----------
121
- acronym: str
122
- A string representing a duration: e.g., "1H30MIN", "ROLL1H30MIN".
121
+ temporal_resolution: str
122
+ A string representing the product measurement interval: e.g., "1H30MIN", "ROLL1H30MIN".
123
123
 
124
124
  Returns
125
125
  -------
126
126
  seconds
127
127
  Duration in seconds.
128
128
  """
129
- seconds, _ = get_resampling_information(acronym)
129
+ seconds, _ = get_resampling_information(temporal_resolution)
130
130
  return seconds
131
131
 
132
132
 
@@ -262,6 +262,7 @@ def regularize_dataset(
262
262
  Regularized dataset.
263
263
 
264
264
  """
265
+ attrs = xr_obj.attrs.copy()
265
266
  xr_obj = _check_time_sorted(xr_obj, time_dim=time_dim)
266
267
  start_time, end_time = get_dataset_start_end_time(xr_obj, time_dim=time_dim)
267
268
 
@@ -289,11 +290,14 @@ def regularize_dataset(
289
290
  # tolerance=tolerance, # mismatch in seconds
290
291
  fill_value=fill_value,
291
292
  )
293
+
294
+ # Ensure attributes are preserved
295
+ xr_obj.attrs = attrs
292
296
  return xr_obj
293
297
 
294
298
 
295
299
  ####------------------------------------------
296
- #### Sampling interval utilities
300
+ #### Interval utilities
297
301
 
298
302
 
299
303
  def ensure_sample_interval_in_seconds(sample_interval): # noqa: PLR0911
@@ -376,7 +380,7 @@ def ensure_sample_interval_in_seconds(sample_interval): # noqa: PLR0911
376
380
  raise TypeError("Float array sample_interval must contain only whole numbers.")
377
381
  return sample_interval.astype(int)
378
382
 
379
- # Deal with xarray.DataArrayy of floats that are all integer-valued (with optionally some NaN)
383
+ # Deal with xarray.DataArray of floats that are all integer-valued (with optionally some NaN)
380
384
  if isinstance(sample_interval, xr.DataArray) and np.issubdtype(sample_interval.dtype, np.floating):
381
385
  arr = sample_interval.copy()
382
386
  data = arr.data
@@ -397,6 +401,17 @@ def ensure_sample_interval_in_seconds(sample_interval): # noqa: PLR0911
397
401
  )
398
402
 
399
403
 
404
+ def ensure_timedelta_seconds_interval(interval):
405
+ """Return interval as numpy.timedelta64 in seconds."""
406
+ if isinstance(interval, (xr.DataArray, np.ndarray)):
407
+ return ensure_sample_interval_in_seconds(interval).astype("m8[s]")
408
+ return np.array(ensure_sample_interval_in_seconds(interval), dtype="m8[s]")
409
+
410
+
411
+ ####------------------------------------------
412
+ #### Sample Interval Utilities
413
+
414
+
400
415
  def infer_sample_interval(ds, robust=False, verbose=False, logger=None):
401
416
  """Infer the sample interval of a dataset.
402
417
 
@@ -655,3 +670,134 @@ def regularize_timesteps(ds, sample_interval, robust=False, add_quality_flag=Tru
655
670
  ds = ds.isel(time=idx_valid_timesteps)
656
671
  # Return dataset
657
672
  return ds
673
+
674
+
675
+ ####---------------------------------------------------------------------------------
676
+ #### Time blocks
677
+
678
+
679
+ def check_freq(freq: str) -> None:
680
+ """Check validity of freq argument."""
681
+ valid_freq = ["none", "year", "season", "quarter", "month", "day", "hour"]
682
+ if not isinstance(freq, str):
683
+ raise TypeError("'freq' must be a string.")
684
+ if freq not in valid_freq:
685
+ raise ValueError(
686
+ f"'freq' '{freq}' is not possible. Must be one of: {valid_freq}.",
687
+ )
688
+ return freq
689
+
690
+
691
+ def generate_time_blocks(start_time: np.datetime64, end_time: np.datetime64, freq: str) -> np.ndarray: # noqa: PLR0911
692
+ """Generate time blocks between `start_time` and `end_time` for a given frequency.
693
+
694
+ Parameters
695
+ ----------
696
+ start_time : numpy.datetime64
697
+ Inclusive start of the overall time range.
698
+ end_time : numpy.datetime64
699
+ Inclusive end of the overall time range.
700
+ freq : str
701
+ Frequency specifier. Accepted values are:
702
+ - 'none' : return a single block [start_time, end_time]
703
+ - 'day' : split into daily blocks
704
+ - 'month' : split into calendar months
705
+ - 'quarter' : split into calendar quarters
706
+ - 'year' : split into calendar years
707
+ - 'season' : split into meteorological seasons (MAM, JJA, SON, DJF)
708
+
709
+ Returns
710
+ -------
711
+ numpy.ndarray
712
+ Array of shape (n, 2) with dtype datetime64[s], where each row is [block_start, block_end].
713
+
714
+ """
715
+ freq = check_freq(freq)
716
+ if freq == "none":
717
+ return np.array([[start_time, end_time]], dtype="datetime64[s]")
718
+
719
+ if freq == "hour":
720
+ periods = pd.period_range(start=start_time, end=end_time, freq="h")
721
+ blocks = np.array(
722
+ [
723
+ [
724
+ period.start_time.to_datetime64().astype("datetime64[s]"),
725
+ period.end_time.to_datetime64().astype("datetime64[s]"),
726
+ ]
727
+ for period in periods
728
+ ],
729
+ dtype="datetime64[s]",
730
+ )
731
+ return blocks
732
+
733
+ if freq == "day":
734
+ periods = pd.period_range(start=start_time, end=end_time, freq="d")
735
+ blocks = np.array(
736
+ [
737
+ [
738
+ period.start_time.to_datetime64().astype("datetime64[s]"),
739
+ period.end_time.to_datetime64().astype("datetime64[s]"),
740
+ ]
741
+ for period in periods
742
+ ],
743
+ dtype="datetime64[s]",
744
+ )
745
+ return blocks
746
+
747
+ if freq == "month":
748
+ periods = pd.period_range(start=start_time, end=end_time, freq="M")
749
+ blocks = np.array(
750
+ [
751
+ [
752
+ period.start_time.to_datetime64().astype("datetime64[s]"),
753
+ period.end_time.to_datetime64().astype("datetime64[s]"),
754
+ ]
755
+ for period in periods
756
+ ],
757
+ dtype="datetime64[s]",
758
+ )
759
+ return blocks
760
+
761
+ if freq == "year":
762
+ periods = pd.period_range(start=start_time, end=end_time, freq="Y")
763
+ blocks = np.array(
764
+ [
765
+ [
766
+ period.start_time.to_datetime64().astype("datetime64[s]"),
767
+ period.end_time.to_datetime64().astype("datetime64[s]"),
768
+ ]
769
+ for period in periods
770
+ ],
771
+ dtype="datetime64[s]",
772
+ )
773
+ return blocks
774
+
775
+ if freq == "quarter":
776
+ periods = pd.period_range(start=start_time, end=end_time, freq="Q")
777
+ blocks = np.array(
778
+ [
779
+ [
780
+ period.start_time.to_datetime64().astype("datetime64[s]"),
781
+ period.end_time.floor("s").to_datetime64().astype("datetime64[s]"),
782
+ ]
783
+ for period in periods
784
+ ],
785
+ dtype="datetime64[s]",
786
+ )
787
+ return blocks
788
+
789
+ if freq == "season":
790
+ # Fiscal quarter frequency ending in Feb → seasons DJF, MAM, JJA, SON
791
+ periods = pd.period_range(start=start_time, end=end_time, freq="Q-FEB")
792
+ blocks = np.array(
793
+ [
794
+ [
795
+ period.start_time.to_datetime64().astype("datetime64[s]"),
796
+ period.end_time.to_datetime64().astype("datetime64[s]"),
797
+ ]
798
+ for period in periods
799
+ ],
800
+ dtype="datetime64[s]",
801
+ )
802
+ return blocks
803
+ raise NotImplementedError(f"Frequency '{freq}' is not implemented.")
disdrodb/utils/writer.py CHANGED
@@ -22,11 +22,29 @@ import os
22
22
 
23
23
  import xarray as xr
24
24
 
25
- from disdrodb.utils.attrs import set_disdrodb_attrs
25
+ from disdrodb.utils.attrs import get_attrs_dict, set_attrs, set_disdrodb_attrs
26
26
  from disdrodb.utils.directories import create_directory, remove_if_exists
27
+ from disdrodb.utils.encoding import get_encodings_dict, set_encodings
27
28
 
28
29
 
29
- def write_product(ds: xr.Dataset, filepath: str, product: str, force: bool = False) -> None:
30
+ def finalize_product(ds, product=None) -> xr.Dataset:
31
+ """Finalize DISDRODB product."""
32
+ # Add variables attributes
33
+ attrs_dict = get_attrs_dict()
34
+ ds = set_attrs(ds, attrs_dict=attrs_dict)
35
+
36
+ # Add variables encoding
37
+ encodings_dict = get_encodings_dict()
38
+ ds = set_encodings(ds, encodings_dict=encodings_dict)
39
+
40
+ # Add DISDRODB global attributes
41
+ # - e.g. in generate_l2_radar it inherit from input dataset !
42
+ if product is not None:
43
+ ds = set_disdrodb_attrs(ds, product=product)
44
+ return ds
45
+
46
+
47
+ def write_product(ds: xr.Dataset, filepath: str, force: bool = False) -> None:
30
48
  """Save the xarray dataset into a NetCDF file.
31
49
 
32
50
  Parameters
@@ -35,8 +53,6 @@ def write_product(ds: xr.Dataset, filepath: str, product: str, force: bool = Fal
35
53
  Input xarray dataset.
36
54
  filepath : str
37
55
  Output file path.
38
- product: str
39
- DISDRODB product name.
40
56
  force : bool, optional
41
57
  Whether to overwrite existing data.
42
58
  If ``True``, overwrite existing data into destination directories.
@@ -50,8 +66,5 @@ def write_product(ds: xr.Dataset, filepath: str, product: str, force: bool = Fal
50
66
  # - If force=False --> Raise error
51
67
  remove_if_exists(filepath, force=force)
52
68
 
53
- # Update attributes
54
- ds = set_disdrodb_attrs(ds, product=product)
55
-
56
69
  # Write netcdf
57
70
  ds.to_netcdf(filepath, engine="netcdf4")
disdrodb/utils/xarray.py CHANGED
@@ -21,6 +21,8 @@ import numpy as np
21
21
  import xarray as xr
22
22
  from xarray.core import dtypes
23
23
 
24
+ from disdrodb.constants import DIAMETER_COORDS, VELOCITY_COORDS
25
+
24
26
 
25
27
  def xr_get_last_valid_idx(da_condition, dim, fill_value=None):
26
28
  """
@@ -97,6 +99,89 @@ def xr_get_last_valid_idx(da_condition, dim, fill_value=None):
97
99
  return last_idx
98
100
 
99
101
 
102
+ ####-------------------------------------------------------------------
103
+ #### Unstacking dimension
104
+
105
+
106
+ def _check_coord_handling(coord_handling):
107
+ if coord_handling not in {"keep", "drop", "unstack"}:
108
+ raise ValueError("coord_handling must be one of 'keep', 'drop', or 'unstack'.")
109
+
110
+
111
+ def _unstack_coordinates(xr_obj, dim, prefix, suffix):
112
+ # Identify coordinates that share the target dimension
113
+ coords_with_dim = _get_non_dimensional_coordinates(xr_obj, dim=dim)
114
+ ds = xr.Dataset()
115
+ for coord_name in coords_with_dim:
116
+ coord_da = xr_obj[coord_name]
117
+ # Split the coordinate DataArray along the target dimension, drop coordinate and merge
118
+ split_ds = unstack_datarray_dimension(coord_da, coord_handling="drop", dim=dim, prefix=prefix, suffix=suffix)
119
+ ds.update(split_ds)
120
+ return ds
121
+
122
+
123
+ def _handle_unstack_non_dim_coords(ds, source_xr_obj, coord_handling, dim, prefix, suffix):
124
+ # Deal with coordinates sharing the target dimension
125
+ if coord_handling == "keep":
126
+ return ds
127
+ if coord_handling == "unstack":
128
+ ds_coords = _unstack_coordinates(source_xr_obj, dim=dim, prefix=prefix, suffix=suffix)
129
+ ds.update(ds_coords)
130
+ # Remove non dimensional coordinates (unstack and drop coord_handling)
131
+ ds = ds.drop_vars(_get_non_dimensional_coordinates(ds, dim=dim))
132
+ return ds
133
+
134
+
135
+ def _get_non_dimensional_coordinates(xr_obj, dim):
136
+ return [coord_name for coord_name, coord_da in xr_obj.coords.items() if dim in coord_da.dims and coord_name != dim]
137
+
138
+
139
+ def unstack_datarray_dimension(da, dim, coord_handling="keep", prefix="", suffix=""):
140
+ """
141
+ Split a DataArray along a specified dimension into a Dataset with separate prefixed and suffixed variables.
142
+
143
+ Parameters
144
+ ----------
145
+ da : xarray.DataArray
146
+ The DataArray to split.
147
+ dim : str
148
+ The dimension along which to split the DataArray.
149
+ coord_handling : str, optional
150
+ Option to handle coordinates sharing the target dimension.
151
+ Choices are 'keep', 'drop', or 'unstack'. Defaults to 'keep'.
152
+ prefix : str, optional
153
+ String to prepend to each new variable name.
154
+ suffix : str, optional
155
+ String to append to each new variable name.
156
+
157
+ Returns
158
+ -------
159
+ xarray.Dataset
160
+ A Dataset with each variable split along the specified dimension.
161
+ The Dataset variables are named "{prefix}{name}{suffix}{dim_value}".
162
+ Coordinates sharing the target dimension are handled based on `coord_handling`.
163
+ """
164
+ # Retrieve DataArray name
165
+ name = da.name
166
+ # Unstack variables
167
+ ds = da.to_dataset(dim=dim)
168
+ rename_dict = {dim_value: f"{prefix}{name}{suffix}{dim_value}" for dim_value in list(ds.data_vars)}
169
+ ds = ds.rename_vars(rename_dict)
170
+ # Deal with coordinates sharing the target dimension
171
+ return _handle_unstack_non_dim_coords(
172
+ ds=ds,
173
+ source_xr_obj=da,
174
+ coord_handling=coord_handling,
175
+ dim=dim,
176
+ prefix=prefix,
177
+ suffix=suffix,
178
+ )
179
+
180
+
181
+ ####--------------------------------------------------------------------------
182
+ #### Fill Values Utilities
183
+
184
+
100
185
  def define_dataarray_fill_value(da):
101
186
  """Define the fill value for a numerical xarray.DataArray."""
102
187
  if np.issubdtype(da.dtype, np.floating):
@@ -163,13 +248,9 @@ def define_fill_value_dictionary(xr_obj):
163
248
 
164
249
  def remove_diameter_coordinates(xr_obj):
165
250
  """Drop diameter coordinates from xarray object."""
166
- from disdrodb import DIAMETER_COORDS
167
-
168
251
  return xr_obj.drop_vars(DIAMETER_COORDS, errors="ignore")
169
252
 
170
253
 
171
254
  def remove_velocity_coordinates(xr_obj):
172
255
  """Drop velocity coordinates from xarray object."""
173
- from disdrodb import VELOCITY_COORDS
174
-
175
256
  return xr_obj.drop_vars(VELOCITY_COORDS, errors="ignore")
disdrodb/viz/__init__.py CHANGED
@@ -15,3 +15,16 @@
15
15
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
16
  # -----------------------------------------------------------------------------.
17
17
  """DISDRODB Visualization Module."""
18
+ from disdrodb.viz.plots import (
19
+ compute_dense_lines,
20
+ max_blend_images,
21
+ plot_nd,
22
+ to_rgba,
23
+ )
24
+
25
+ __all__ = [
26
+ "compute_dense_lines",
27
+ "max_blend_images",
28
+ "plot_nd",
29
+ "to_rgba",
30
+ ]