cloudnetpy 1.65.7__py3-none-any.whl → 1.66.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. cloudnetpy/categorize/__init__.py +0 -1
  2. cloudnetpy/categorize/atmos_utils.py +278 -59
  3. cloudnetpy/categorize/attenuation.py +31 -0
  4. cloudnetpy/categorize/attenuations/__init__.py +37 -0
  5. cloudnetpy/categorize/attenuations/gas_attenuation.py +30 -0
  6. cloudnetpy/categorize/attenuations/liquid_attenuation.py +80 -0
  7. cloudnetpy/categorize/attenuations/melting_attenuation.py +75 -0
  8. cloudnetpy/categorize/attenuations/rain_attenuation.py +84 -0
  9. cloudnetpy/categorize/categorize.py +140 -81
  10. cloudnetpy/categorize/classify.py +92 -128
  11. cloudnetpy/categorize/containers.py +45 -31
  12. cloudnetpy/categorize/droplet.py +2 -2
  13. cloudnetpy/categorize/falling.py +3 -3
  14. cloudnetpy/categorize/freezing.py +2 -2
  15. cloudnetpy/categorize/itu.py +243 -0
  16. cloudnetpy/categorize/melting.py +0 -3
  17. cloudnetpy/categorize/model.py +31 -14
  18. cloudnetpy/categorize/radar.py +28 -12
  19. cloudnetpy/constants.py +3 -6
  20. cloudnetpy/model_evaluation/file_handler.py +2 -2
  21. cloudnetpy/model_evaluation/products/observation_products.py +8 -8
  22. cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py +5 -2
  23. cloudnetpy/model_evaluation/tests/unit/test_observation_products.py +11 -11
  24. cloudnetpy/output.py +46 -26
  25. cloudnetpy/plotting/plot_meta.py +8 -2
  26. cloudnetpy/plotting/plotting.py +31 -8
  27. cloudnetpy/products/classification.py +39 -34
  28. cloudnetpy/products/der.py +15 -13
  29. cloudnetpy/products/drizzle_tools.py +22 -21
  30. cloudnetpy/products/ier.py +8 -45
  31. cloudnetpy/products/iwc.py +7 -22
  32. cloudnetpy/products/lwc.py +14 -15
  33. cloudnetpy/products/mwr_tools.py +15 -2
  34. cloudnetpy/products/product_tools.py +121 -119
  35. cloudnetpy/utils.py +4 -0
  36. cloudnetpy/version.py +2 -2
  37. {cloudnetpy-1.65.7.dist-info → cloudnetpy-1.66.0.dist-info}/METADATA +1 -1
  38. {cloudnetpy-1.65.7.dist-info → cloudnetpy-1.66.0.dist-info}/RECORD +41 -35
  39. {cloudnetpy-1.65.7.dist-info → cloudnetpy-1.66.0.dist-info}/WHEEL +1 -1
  40. cloudnetpy/categorize/atmos.py +0 -376
  41. {cloudnetpy-1.65.7.dist-info → cloudnetpy-1.66.0.dist-info}/LICENSE +0 -0
  42. {cloudnetpy-1.65.7.dist-info → cloudnetpy-1.66.0.dist-info}/top_level.txt +0 -0
cloudnetpy/output.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  import logging
5
+ from dataclasses import fields
5
6
  from os import PathLike
6
7
  from uuid import UUID
7
8
 
@@ -10,6 +11,9 @@ import numpy as np
10
11
  from numpy import ma
11
12
 
12
13
  from cloudnetpy import utils, version
14
+ from cloudnetpy.categorize.containers import Observations
15
+ from cloudnetpy.categorize.model import Model
16
+ from cloudnetpy.datasource import DataSource
13
17
  from cloudnetpy.instruments.instruments import Instrument
14
18
  from cloudnetpy.metadata import COMMON_ATTRIBUTES, MetaData
15
19
 
@@ -69,7 +73,7 @@ def _get_netcdf_dimensions(obj) -> dict:
69
73
 
70
74
  def save_product_file(
71
75
  short_id: str,
72
- obj,
76
+ obj: DataSource,
73
77
  file_name: str,
74
78
  uuid: str | None = None,
75
79
  copy_from_cat: tuple = (),
@@ -105,13 +109,13 @@ def save_product_file(
105
109
  f"{human_readable_file_type.capitalize()} products from"
106
110
  f" {obj.dataset.location}"
107
111
  )
108
- nc.source_file_uuids = get_source_uuids(nc, obj)
112
+ nc.source_file_uuids = get_source_uuids([nc, obj])
109
113
  copy_global(
110
114
  obj.dataset,
111
115
  nc,
112
116
  ("location", "day", "month", "year", "source", "voodoonet_version"),
113
117
  )
114
- merge_history(nc, human_readable_file_type, {"categorize": obj})
118
+ merge_history(nc, human_readable_file_type, obj)
115
119
  nc.references = get_references(short_id)
116
120
  return file_uuid
117
121
 
@@ -172,26 +176,32 @@ def get_references(identifier: str | None = None, extra: list | None = None) ->
172
176
  return references
173
177
 
174
178
 
175
- def get_source_uuids(*sources) -> str:
179
+ def get_source_uuids(data: Observations | list[netCDF4.Dataset | DataSource]) -> str:
176
180
  """Returns file_uuid attributes of objects.
177
181
 
178
182
  Args:
179
- *sources: Objects whose file_uuid attributes are read (if exist).
183
+ data: Observations instance.
180
184
 
181
185
  Returns:
182
186
  str: UUIDs separated by comma.
183
187
 
184
188
  """
189
+ if isinstance(data, Observations):
190
+ obs = [getattr(data, field.name) for field in fields(data)]
191
+ elif isinstance(data, list):
192
+ obs = data
185
193
  uuids = [
186
- source.dataset.file_uuid
187
- for source in sources
188
- if hasattr(source, "dataset") and hasattr(source.dataset, "file_uuid")
194
+ obj.dataset.file_uuid
195
+ for obj in obs
196
+ if hasattr(obj, "dataset") and hasattr(obj.dataset, "file_uuid")
189
197
  ]
190
- unique_uuids = list(set(uuids))
198
+ unique_uuids = sorted(set(uuids))
191
199
  return ", ".join(unique_uuids)
192
200
 
193
201
 
194
- def merge_history(nc: netCDF4.Dataset, file_type: str, data: dict) -> None:
202
+ def merge_history(
203
+ nc: netCDF4.Dataset, file_type: str, data: Observations | DataSource
204
+ ) -> None:
195
205
  """Merges history fields from one or several files and creates a new record.
196
206
 
197
207
  Args:
@@ -202,26 +212,36 @@ def merge_history(nc: netCDF4.Dataset, file_type: str, data: dict) -> None:
202
212
  """
203
213
  new_record = f"{utils.get_time()} - {file_type} file created"
204
214
  histories = []
205
- for key, obj in data.items():
206
- if (
207
- not isinstance(obj, str | list)
208
- and obj is not None
209
- and hasattr(obj.dataset, "history")
210
- ):
211
- history = obj.dataset.history
212
- history = history.split("\n")[-1] if key == "model" else history
213
- histories.append(history)
215
+ if (
216
+ isinstance(data, DataSource)
217
+ and hasattr(data, "dataset")
218
+ and hasattr(data.dataset, "history")
219
+ ):
220
+ history = data.dataset.history
221
+ histories.append(history)
222
+ if isinstance(data, Observations):
223
+ for field in fields(data):
224
+ obj = getattr(data, field.name)
225
+ if hasattr(obj, "dataset") and hasattr(obj.dataset, "history"):
226
+ history = obj.dataset.history
227
+ history = history.split("\n")[-1] if isinstance(obj, Model) else history
228
+ histories.append(history)
214
229
  histories.sort(reverse=True)
215
230
  old_history = [f"\n{history}" for history in histories]
216
231
  old_history_str = "".join(old_history)
217
232
  nc.history = f"{new_record}{old_history_str}"
218
233
 
219
234
 
220
- def add_source_instruments(nc: netCDF4.Dataset, data: dict) -> None:
235
+ def add_source_instruments(nc: netCDF4.Dataset, data: Observations) -> None:
221
236
  """Adds source attribute to categorize file."""
222
- sources = [obj.source for obj in data.values() if hasattr(obj, "source")]
223
- sources = [sources[0]] + [f"\n{source}" for source in sources[1:]]
224
- nc.source = "".join(sources)
237
+ sources = []
238
+ for field in fields(data):
239
+ obj = getattr(data, field.name)
240
+ if hasattr(obj, "source"):
241
+ sources.append(obj.source)
242
+ if sources:
243
+ formatted_sources = [sources[0]] + [f"\n{source}" for source in sources[1:]]
244
+ nc.source = "".join(formatted_sources)
225
245
 
226
246
 
227
247
  def init_file(
@@ -319,7 +339,7 @@ def add_time_attribute(
319
339
  return attributes
320
340
 
321
341
 
322
- def add_source_attribute(attributes: dict, data: dict) -> dict:
342
+ def add_source_attribute(attributes: dict, data: Observations) -> dict:
323
343
  """Adds source attribute to variables."""
324
344
  variables = {
325
345
  "radar": (
@@ -340,9 +360,9 @@ def add_source_attribute(attributes: dict, data: dict) -> dict:
340
360
  "disdrometer": ("rainfall_rate",),
341
361
  }
342
362
  for instrument, keys in variables.items():
343
- if data[instrument] is None:
363
+ if getattr(data, instrument) is None:
344
364
  continue
345
- source = data[instrument].dataset.source
365
+ source = getattr(data, instrument).dataset.source
346
366
  for key in keys:
347
367
  if key in attributes:
348
368
  attributes[key] = attributes[key]._replace(source=source)
@@ -96,7 +96,7 @@ _CLABEL = {
96
96
  ("Uncorrected", _COLORS["orange"]),
97
97
  ("Corrected", _COLORS["lightgreen"]),
98
98
  ("Ice from lidar", _COLORS["yellow"]),
99
- ("Ice above rain", _COLORS["darksky"]),
99
+ ("_Ice above rain", _COLORS["darksky"]),
100
100
  ("Clear above rain", _COLORS["skyblue"]),
101
101
  ("Positive temp.", _COLORS["seaweed"]),
102
102
  ),
@@ -497,7 +497,13 @@ ATTRIBUTES = {
497
497
  plot_range=(0, 5),
498
498
  ),
499
499
  "radar_gas_atten": PlotMeta(
500
- plot_range=(0, 1),
500
+ plot_range=(0, 5),
501
+ ),
502
+ "radar_rain_atten": PlotMeta(
503
+ plot_range=(0, 5),
504
+ ),
505
+ "radar_melting_atten": PlotMeta(
506
+ plot_range=(0, 5),
501
507
  ),
502
508
  "iwv": PlotMeta(
503
509
  cmap="Blues",
@@ -21,6 +21,7 @@ from mpl_toolkits.axes_grid1 import make_axes_locatable
21
21
  from numpy import ma, ndarray
22
22
  from scipy.ndimage import uniform_filter
23
23
 
24
+ from cloudnetpy.categorize.freezing import find_t0_alt
24
25
  from cloudnetpy.exceptions import PlottingError
25
26
  from cloudnetpy.instruments.ceilometer import calc_sigma_units
26
27
  from cloudnetpy.plotting.plot_meta import ATTRIBUTES, PlotMeta
@@ -399,8 +400,9 @@ class Plot:
399
400
  flag_names = [
400
401
  f"{self.sub_plot.variable.name}_quality_flag",
401
402
  "temperature_quality_flag",
402
- "quality_flag",
403
403
  ]
404
+ if self.sub_plot.variable.name != "irt":
405
+ flag_names.append("quality_flag")
404
406
  for flag_name in flag_names:
405
407
  if flag_name in figure_data.file.variables:
406
408
  return figure_data.file.variables[flag_name][:] > 0
@@ -426,6 +428,22 @@ class Plot2D(Plot):
426
428
  if figure_data.is_mwrpy_product():
427
429
  self._fill_flagged_data(figure_data)
428
430
 
431
+ if self.sub_plot.variable.name == "Tw":
432
+ tw = figure_data.file["Tw"][:]
433
+ height = figure_data.height
434
+ if height is None:
435
+ msg = "No height information in the file."
436
+ raise ValueError(msg)
437
+ t0_alt = find_t0_alt(tw, height)
438
+ t0_alt = ma.masked_where(t0_alt <= height[0], t0_alt)
439
+ self._ax.plot(
440
+ figure_data.time,
441
+ t0_alt,
442
+ color="gray",
443
+ linestyle="dashed",
444
+ zorder=_get_zorder("t0"),
445
+ )
446
+
429
447
  def _fill_flagged_data(self, figure_data: FigureData) -> None:
430
448
  flags = self._read_flagged_data(figure_data)
431
449
  batches = find_batches_of_ones(flags)
@@ -490,6 +508,10 @@ class Plot2D(Plot):
490
508
  smoothed_data = uniform_filter(self._data[valid_time_ind, :], sigma_units)
491
509
  self._data[valid_time_ind, :] = smoothed_data
492
510
 
511
+ if self._data.mask.all():
512
+ msg = "All data is masked"
513
+ raise PlottingError(msg)
514
+
493
515
  pcolor_kwargs = {
494
516
  "cmap": plt.get_cmap(str(self._plot_meta.cmap)),
495
517
  "vmin": vmin,
@@ -584,10 +606,10 @@ class Plot1D(Plot):
584
606
  msg = "All data is masked"
585
607
  raise PlottingError(msg)
586
608
  self._data_orig = self._data_orig[:, freq_ind]
587
- is_bad_zenith = self._get_bad_zenith_profiles(figure_data)
588
- self._data[is_bad_zenith] = ma.masked
589
- self._data_orig[is_bad_zenith] = ma.masked
590
609
  if self.sub_plot.variable.name == "tb":
610
+ is_bad_zenith = self._get_bad_zenith_profiles(figure_data)
611
+ self._data[is_bad_zenith] = ma.masked
612
+ self._data_orig[is_bad_zenith] = ma.masked
591
613
  flags = self._read_flagged_data(figure_data)[:, freq_ind]
592
614
  flags[is_bad_zenith] = False
593
615
  if np.any(flags):
@@ -602,7 +624,7 @@ class Plot1D(Plot):
602
624
  value = figure_data.file.variables["frequency"][freq_ind]
603
625
  unit = "GHz"
604
626
  else:
605
- label = "Wavel"
627
+ label = "WL"
606
628
  variable = figure_data.file.variables["ir_wavelength"]
607
629
  # `ir_wavelength` is scalar in old files
608
630
  value = variable[:] if len(variable.shape) == 0 else variable[freq_ind]
@@ -681,7 +703,7 @@ class Plot1D(Plot):
681
703
  time = figure_data.time.copy()
682
704
  data = self._data_orig.copy()
683
705
  flags = self._read_flagged_data(figure_data)
684
- if hacky_freq_ind is not None:
706
+ if hacky_freq_ind is not None and np.any(flags):
685
707
  flags = flags[:, hacky_freq_ind]
686
708
  is_invalid = ma.getmaskarray(data)
687
709
  if np.any(flags):
@@ -689,7 +711,8 @@ class Plot1D(Plot):
689
711
 
690
712
  is_wind_direction = self.sub_plot.variable.name == "wind_direction"
691
713
  if is_wind_direction:
692
- data = np.stack([figure_data.file["wind_speed"], data])
714
+ wind_speed = figure_data.file["wind_speed"]
715
+ data = np.stack([wind_speed, data], axis=1)
693
716
 
694
717
  block_ind = np.where(np.diff(is_invalid))[0] + 1
695
718
  valid_time_blocks = np.split(time, block_ind)[is_invalid[0] :: 2]
@@ -698,7 +721,7 @@ class Plot1D(Plot):
698
721
  for time1, data1 in zip(valid_time_blocks, valid_data_blocks, strict=False):
699
722
  if is_wind_direction:
700
723
  sma = self._calculate_average_wind_direction(
701
- data1[0], data1[1], time1, window=15
724
+ data1[:, 0], data1[:, 1], time1, window=15
702
725
  )
703
726
  else:
704
727
  sma = self._calculate_moving_average(data1, time1, window=5)
@@ -4,7 +4,7 @@ import numpy as np
4
4
  from numpy import ma
5
5
 
6
6
  from cloudnetpy import output, utils
7
- from cloudnetpy.categorize import atmos
7
+ from cloudnetpy.categorize import atmos_utils
8
8
  from cloudnetpy.datasource import DataSource
9
9
  from cloudnetpy.metadata import MetaData
10
10
  from cloudnetpy.products.product_tools import CategorizeBits
@@ -67,40 +67,45 @@ def _get_target_classification(
67
67
  categorize_bits: CategorizeBits,
68
68
  ) -> ma.MaskedArray:
69
69
  bits = categorize_bits.category_bits
70
- clutter = categorize_bits.quality_bits["clutter"]
71
- classification = ma.zeros(bits["cold"].shape, dtype=int)
72
- classification[bits["droplet"] & ~bits["falling"]] = 1 # Cloud droplets
73
- classification[~bits["droplet"] & bits["falling"]] = 2 # Drizzle or rain
74
- classification[bits["droplet"] & bits["falling"]] = (
75
- 3 # Drizzle or rain and droplets
76
- )
77
- classification[~bits["droplet"] & bits["falling"] & bits["cold"]] = 4 # ice
78
- classification[bits["droplet"] & bits["falling"] & bits["cold"]] = (
79
- 5 # ice + supercooled
80
- )
81
- classification[bits["melting"]] = 6 # melting layer
82
- classification[bits["melting"] & bits["droplet"]] = 7 # melting + droplets
83
- classification[bits["aerosol"]] = 8 # aerosols
84
- classification[bits["insect"] & ~clutter] = 9 # insects
85
- classification[bits["aerosol"] & bits["insect"] & ~clutter] = (
86
- 10 # insects + aerosols
87
- )
88
- classification[clutter & ~bits["aerosol"]] = 0
70
+ clutter = categorize_bits.quality_bits.clutter
71
+ classification = ma.zeros(bits.freezing.shape, dtype=int)
72
+ classification[bits.droplet & ~bits.falling] = 1 # Cloud droplets
73
+ classification[~bits.droplet & bits.falling] = 2 # Drizzle or rain
74
+ classification[bits.droplet & bits.falling] = 3 # Drizzle or rain and droplets
75
+ classification[~bits.droplet & bits.falling & bits.freezing] = 4 # ice
76
+ classification[bits.droplet & bits.falling & bits.freezing] = 5 # ice + supercooled
77
+ classification[bits.melting] = 6 # melting layer
78
+ classification[bits.melting & bits.droplet] = 7 # melting + droplets
79
+ classification[bits.aerosol] = 8 # aerosols
80
+ classification[bits.insect & ~clutter] = 9 # insects
81
+ classification[bits.aerosol & bits.insect & ~clutter] = 10 # insects + aerosols
82
+ classification[clutter & ~bits.aerosol] = 0
89
83
  return classification
90
84
 
91
85
 
92
86
  def _get_detection_status(categorize_bits: CategorizeBits) -> np.ndarray:
93
87
  bits = categorize_bits.quality_bits
94
- status = np.zeros(bits["radar"].shape, dtype=int)
95
- status[bits["lidar"] & ~bits["radar"]] = 1
96
- status[bits["radar"] & bits["lidar"]] = 3
97
- status[~bits["radar"] & bits["attenuated"] & ~bits["corrected"]] = 4
98
- status[bits["radar"] & ~bits["lidar"] & ~bits["attenuated"]] = 5
99
- status[~bits["radar"] & bits["attenuated"] & bits["corrected"]] = 6
100
- status[bits["radar"] & bits["corrected"]] = 7
101
- status[bits["radar"] & bits["attenuated"] & ~bits["corrected"]] = 2
102
- status[bits["clutter"]] = 8
103
- status[bits["molecular"] & ~bits["radar"]] = 9
88
+
89
+ is_attenuated = (
90
+ bits.attenuated_liquid | bits.attenuated_rain | bits.attenuated_melting
91
+ )
92
+ is_corrected = (
93
+ is_attenuated
94
+ & (~bits.attenuated_liquid | bits.corrected_liquid)
95
+ & (~bits.attenuated_rain | bits.corrected_rain)
96
+ & (~bits.attenuated_melting | bits.corrected_melting)
97
+ )
98
+
99
+ status = np.zeros(bits.radar.shape, dtype=int)
100
+ status[bits.lidar & ~bits.radar] = 1
101
+ status[bits.radar & bits.lidar] = 3
102
+ status[~bits.radar & is_attenuated & ~is_corrected] = 4
103
+ status[bits.radar & ~bits.lidar & ~is_attenuated] = 5
104
+ status[~bits.radar & is_attenuated & is_corrected] = 6
105
+ status[bits.radar & is_corrected] = 7
106
+ status[bits.radar & is_attenuated & ~is_corrected] = 2
107
+ status[bits.clutter] = 8
108
+ status[bits.molecular & ~bits.radar] = 9
104
109
  return status
105
110
 
106
111
 
@@ -112,8 +117,8 @@ def _get_cloud_base_and_top_heights(
112
117
  cloud_mask = _find_cloud_mask(classification)
113
118
  if not cloud_mask.any():
114
119
  return ma.masked_all(cloud_mask.shape[0]), ma.masked_all(cloud_mask.shape[0])
115
- lowest_bases = atmos.find_lowest_cloud_bases(cloud_mask, height)
116
- highest_tops = atmos.find_highest_cloud_tops(cloud_mask, height)
120
+ lowest_bases = atmos_utils.find_lowest_cloud_bases(cloud_mask, height)
121
+ highest_tops = atmos_utils.find_highest_cloud_tops(cloud_mask, height)
117
122
  if not (highest_tops - lowest_bases >= 0).all():
118
123
  msg = "Cloud base higher than cloud top!"
119
124
  raise ValueError(msg)
@@ -168,8 +173,8 @@ DEFINITIONS = {
168
173
  attenuation that would be experienced is unknown.""",
169
174
  5: """Good radar echo only.""",
170
175
  6: """No radar echo but known attenuation.""",
171
- 7: """Radar echo corrected for liquid attenuation using microwave
172
- radiometer data.""",
176
+ 7: """Radar echo corrected for liquid, rain or melting
177
+ attenuation.""",
173
178
  8: """Radar ground clutter.""",
174
179
  9: """Lidar clear-air molecular scattering.""",
175
180
  }
@@ -8,7 +8,7 @@ import numpy as np
8
8
  from numpy import ma
9
9
 
10
10
  from cloudnetpy import output, utils
11
- from cloudnetpy.categorize import atmos
11
+ from cloudnetpy.categorize import atmos_utils
12
12
  from cloudnetpy.datasource import DataSource
13
13
  from cloudnetpy.exceptions import InvalidSourceFileError
14
14
  from cloudnetpy.metadata import MetaData
@@ -94,18 +94,18 @@ class DropletClassification(ProductClassification):
94
94
  self.is_ice = self._find_ice()
95
95
 
96
96
  def _find_droplet(self) -> np.ndarray:
97
- return self.category_bits["droplet"]
97
+ return self.category_bits.droplet
98
98
 
99
99
  def _find_mixed(self) -> np.ndarray:
100
- return self.category_bits["falling"] & self.category_bits["droplet"]
100
+ return self.category_bits.falling & self.category_bits.droplet
101
101
 
102
102
  def _find_ice(self) -> np.ndarray:
103
103
  return (
104
- self.category_bits["falling"]
105
- & self.category_bits["cold"]
106
- & ~self.category_bits["melting"]
107
- & ~self.category_bits["droplet"]
108
- & ~self.category_bits["insect"]
104
+ self.category_bits.falling
105
+ & self.category_bits.freezing
106
+ & ~self.category_bits.melting
107
+ & ~self.category_bits.droplet
108
+ & ~self.category_bits.insect
109
109
  )
110
110
 
111
111
 
@@ -131,7 +131,6 @@ class DerSource(DataSource):
131
131
  rho_l = 1000 # density of liquid water(kg m-3)
132
132
 
133
133
  var_x = params.sigma_x * params.sigma_x
134
- dheight = utils.mdiff(self.getvar("height"))
135
134
 
136
135
  Z = self.getvar("Z")
137
136
  Z = utils.db2lin(Z)
@@ -146,9 +145,12 @@ class DerSource(DataSource):
146
145
  der_scaled_error = np.zeros(Z.shape)
147
146
  N_scaled = np.zeros(Z.shape)
148
147
 
149
- is_droplet = self.categorize_bits.category_bits["droplet"]
150
- liquid_bases = atmos.find_cloud_bases(is_droplet)
151
- liquid_tops = atmos.find_cloud_tops(is_droplet)
148
+ is_droplet = self.categorize_bits.category_bits.droplet
149
+ liquid_bases = atmos_utils.find_cloud_bases(is_droplet)
150
+ liquid_tops = atmos_utils.find_cloud_tops(is_droplet)
151
+
152
+ height = self.getvar("height")
153
+ path_lengths = utils.path_lengths_from_ground(height)
152
154
 
153
155
  for base, top in zip(
154
156
  zip(*np.where(liquid_bases), strict=True),
@@ -162,7 +164,7 @@ class DerSource(DataSource):
162
164
  if Z[ind_t, idx_layer].mask.all():
163
165
  continue
164
166
 
165
- integral = ma.sum(ma.sqrt(Z[ind_t, idx_layer])) * dheight
167
+ integral = ma.sum(ma.sqrt(Z[ind_t, idx_layer])) * path_lengths[idx_layer]
166
168
 
167
169
  # der formula (5)
168
170
  A = (Z[ind_t, idx_layer] / params.N) ** (1 / 6)
@@ -21,7 +21,6 @@ class DrizzleSource(DataSource):
21
21
 
22
22
  Attributes:
23
23
  mie (dict): Mie look-up table data.
24
- dheight (float): Median difference of height array.
25
24
  z (ndarray): 2D radar echo (linear units).
26
25
  beta (ndarray): 2D lidar backscatter.
27
26
  v (ndarray): 2D doppler velocity.
@@ -31,7 +30,7 @@ class DrizzleSource(DataSource):
31
30
  def __init__(self, categorize_file: str):
32
31
  super().__init__(categorize_file)
33
32
  self.mie = self._read_mie_lut()
34
- self.dheight = utils.mdiff(self.getvar("height"))
33
+ self.height_vector = self.getvar("height")
35
34
  self.z = self._convert_z_units()
36
35
  self.beta = self.getvar("beta")
37
36
  self.v = self.getvar("v")
@@ -106,21 +105,22 @@ class DrizzleClassification(ProductClassification):
106
105
  return np.isfinite(v_sigma)
107
106
 
108
107
  def _find_warm_liquid(self) -> np.ndarray:
109
- return self.category_bits["droplet"] & ~self.category_bits["cold"]
108
+ return self.category_bits.droplet & ~self.category_bits.freezing
110
109
 
111
110
  def _find_drizzle(self) -> np.ndarray:
112
111
  return (
113
112
  ~utils.transpose(self.is_rain)
114
- & self.category_bits["falling"]
115
- & ~self.category_bits["droplet"]
116
- & ~self.category_bits["cold"]
117
- & ~self.category_bits["melting"]
118
- & ~self.category_bits["insect"]
119
- & self.quality_bits["radar"]
120
- & self.quality_bits["lidar"]
121
- & ~self.quality_bits["clutter"]
122
- & ~self.quality_bits["molecular"]
123
- & ~self.quality_bits["attenuated"]
113
+ & self.category_bits.falling
114
+ & ~self.category_bits.droplet
115
+ & ~self.category_bits.freezing
116
+ & ~self.category_bits.melting
117
+ & ~self.category_bits.insect
118
+ & self.quality_bits.radar
119
+ & self.quality_bits.lidar
120
+ & ~self.quality_bits.clutter
121
+ & ~self.quality_bits.molecular
122
+ & ~self.quality_bits.attenuated_liquid
123
+ & ~self.quality_bits.attenuated_rain
124
124
  & self.is_v_sigma
125
125
  )
126
126
 
@@ -128,16 +128,16 @@ class DrizzleClassification(ProductClassification):
128
128
  return (
129
129
  ~utils.transpose(self.is_rain)
130
130
  & self.warm_liquid
131
- & self.category_bits["falling"]
132
- & ~self.category_bits["melting"]
133
- & ~self.category_bits["insect"]
134
- & self.quality_bits["radar"]
135
- & ~self.quality_bits["clutter"]
136
- & ~self.quality_bits["molecular"]
131
+ & self.category_bits.falling
132
+ & ~self.category_bits.melting
133
+ & ~self.category_bits.insect
134
+ & self.quality_bits.radar
135
+ & ~self.quality_bits.clutter
136
+ & ~self.quality_bits.molecular
137
137
  )
138
138
 
139
139
  def _find_cold_rain(self) -> np.ndarray:
140
- return np.any(self.category_bits["melting"], axis=1)
140
+ return np.any(self.category_bits.melting, axis=1)
141
141
 
142
142
 
143
143
  class SpectralWidth:
@@ -250,6 +250,7 @@ class DrizzleSolver:
250
250
  dia_init[drizzle_ind] = self._calc_dia(self._beta_z_ratio[drizzle_ind], k=18.8)
251
251
  n_widths, n_dia = self._width_lut.shape[0], len(self._data.mie["Do"])
252
252
  max_ite = 10
253
+ path_lengths = utils.path_lengths_from_ground(self._data.height_vector)
253
254
  for ind in zip(*drizzle_ind, strict=True):
254
255
  for _ in range(max_ite):
255
256
  lut_ind = self._find_lut_indices(ind, dia_init, n_dia, n_widths)
@@ -264,7 +265,7 @@ class DrizzleSolver:
264
265
  break
265
266
  self._dia_init[ind] = dia
266
267
  beta_factor = np.exp(
267
- 2 * self.params["S"][ind] * self._data.beta[ind] * self._data.dheight,
268
+ 2 * self.params["S"][ind] * self._data.beta[ind] * path_lengths[ind[-1]]
268
269
  )
269
270
  self.params["beta_corr"][ind[0], (ind[-1] + 1) :] *= beta_factor
270
271
 
@@ -5,6 +5,7 @@ from numpy import ma
5
5
 
6
6
  from cloudnetpy import constants, output, utils
7
7
  from cloudnetpy.metadata import MetaData
8
+ from cloudnetpy.products.iwc import DEFINITIONS as IWC_DEFINITION
8
9
  from cloudnetpy.products.product_tools import IceClassification, IceSource
9
10
 
10
11
 
@@ -57,11 +58,10 @@ def generate_ier(
57
58
  product = "ier"
58
59
  with IerSource(categorize_file, product) as ier_source:
59
60
  ice_classification = IceClassification(categorize_file)
60
- ier_source.append_main_variable_including_rain(ice_classification)
61
- ier_source.append_main_variable(ice_classification)
61
+ ier_source.append_icy_data(ice_classification)
62
62
  ier_source.convert_units()
63
63
  ier_source.append_status(ice_classification)
64
- ier_source.append_ier_error(ice_classification)
64
+ ier_source.append_ier_error()
65
65
  date = ier_source.get_date()
66
66
  attributes = output.add_time_attribute(IER_ATTRIBUTES, date)
67
67
  attributes = _add_ier_comment(attributes, ier_source)
@@ -74,12 +74,10 @@ class IerSource(IceSource):
74
74
 
75
75
  def convert_units(self) -> None:
76
76
  """Convert um to m."""
77
- for prod in ("ier", "ier_inc_rain"):
78
- self.data[prod].data[:] /= 1e6
77
+ self.data["ier"].data[:] /= 1e6
79
78
 
80
- def append_ier_error(self, ice_classification: IceClassification) -> None:
81
- error = ma.copy(self.data[f"{self.product}_inc_rain"][:])
82
- error[ice_classification.ice_above_rain] = ma.masked
79
+ def append_ier_error(self) -> None:
80
+ error = ma.copy(self.data[f"{self.product}"][:])
83
81
  error = error * np.sqrt(0.4**2 + 0.4**2)
84
82
  self.append_data(error, f"{self.product}_error")
85
83
 
@@ -105,9 +103,7 @@ def _add_ier_comment(attributes: dict, ier: IerSource) -> dict:
105
103
  "data has diagnosed that the radar echo is due to ice, but note\n"
106
104
  "that in some cases supercooled drizzle will erroneously be identified\n"
107
105
  "as ice. Missing data indicates either that ice cloud was present but it was\n"
108
- "only detected by the lidar so its ice water content could not be estimated,\n"
109
- "or than there was rain below the ice associated with uncertain attenuation\n"
110
- "of the reflectivities in the ice.\n",
106
+ "only detected by the lidar so its ice water content could not be estimated."
111
107
  )
112
108
  return attributes
113
109
 
@@ -122,37 +118,9 @@ COMMENTS = {
122
118
  "This variable describes whether a retrieval was performed\n"
123
119
  "for each pixel, and its associated quality."
124
120
  ),
125
- "ier_inc_rain": (
126
- "This variable is the same as ier but it also contains ier values\n"
127
- "above rain. The ier values above rain have been severely affected\n"
128
- "by attenuation and should be used when the effect of attenuation\n"
129
- "is being studied."
130
- ),
131
121
  }
132
122
 
133
- DEFINITIONS = {
134
- "ier_retrieval_status": utils.status_field_definition(
135
- {
136
- 0: """No ice present.""",
137
- 1: """Reliable retrieval.""",
138
- 2: """Unreliable retrieval due to uncorrected attenuation from
139
- liquid water below the ice (no liquid water path measurement
140
- available).""",
141
- 3: """Retrieval performed but radar corrected for liquid attenuation
142
- using radiometer liquid water path which is not always
143
- accurate.""",
144
- 4: """Ice detected only by the lidar.""",
145
- 5: """Ice detected by radar but rain below so no retrieval performed
146
- due to very uncertain attenuation.""",
147
- 6: """Clear sky above rain wet-bulb temperature less than 0degC: if
148
- rain attenuation were strong then ice could be present but
149
- undetected.""",
150
- 7: """Drizzle or rain that would have been classified as ice if the
151
- wet-bulb temperature were less than 0degC: may be ice if
152
- temperature is in error.""",
153
- }
154
- ),
155
- }
123
+ DEFINITIONS = {"ier_retrieval_status": IWC_DEFINITION["iwc_retrieval_status"]}
156
124
 
157
125
  IER_ATTRIBUTES = {
158
126
  "ier": MetaData(
@@ -160,11 +128,6 @@ IER_ATTRIBUTES = {
160
128
  units="m",
161
129
  ancillary_variables="ier_error",
162
130
  ),
163
- "ier_inc_rain": MetaData(
164
- long_name="Ice effective radius including rain",
165
- units="m",
166
- comment=COMMENTS["ier_inc_rain"],
167
- ),
168
131
  "ier_error": MetaData(
169
132
  long_name="Random error in ice effective radius",
170
133
  units="m",