cloudnetpy 1.49.9__py3-none-any.whl → 1.87.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. cloudnetpy/categorize/__init__.py +1 -2
  2. cloudnetpy/categorize/atmos_utils.py +297 -67
  3. cloudnetpy/categorize/attenuation.py +31 -0
  4. cloudnetpy/categorize/attenuations/__init__.py +37 -0
  5. cloudnetpy/categorize/attenuations/gas_attenuation.py +30 -0
  6. cloudnetpy/categorize/attenuations/liquid_attenuation.py +84 -0
  7. cloudnetpy/categorize/attenuations/melting_attenuation.py +78 -0
  8. cloudnetpy/categorize/attenuations/rain_attenuation.py +84 -0
  9. cloudnetpy/categorize/categorize.py +332 -156
  10. cloudnetpy/categorize/classify.py +127 -125
  11. cloudnetpy/categorize/containers.py +107 -76
  12. cloudnetpy/categorize/disdrometer.py +40 -0
  13. cloudnetpy/categorize/droplet.py +23 -21
  14. cloudnetpy/categorize/falling.py +53 -24
  15. cloudnetpy/categorize/freezing.py +25 -12
  16. cloudnetpy/categorize/insects.py +35 -23
  17. cloudnetpy/categorize/itu.py +243 -0
  18. cloudnetpy/categorize/lidar.py +36 -41
  19. cloudnetpy/categorize/melting.py +34 -26
  20. cloudnetpy/categorize/model.py +84 -37
  21. cloudnetpy/categorize/mwr.py +18 -14
  22. cloudnetpy/categorize/radar.py +215 -102
  23. cloudnetpy/cli.py +578 -0
  24. cloudnetpy/cloudnetarray.py +43 -89
  25. cloudnetpy/concat_lib.py +218 -78
  26. cloudnetpy/constants.py +28 -10
  27. cloudnetpy/datasource.py +61 -86
  28. cloudnetpy/exceptions.py +49 -20
  29. cloudnetpy/instruments/__init__.py +5 -0
  30. cloudnetpy/instruments/basta.py +29 -12
  31. cloudnetpy/instruments/bowtie.py +135 -0
  32. cloudnetpy/instruments/ceilo.py +138 -115
  33. cloudnetpy/instruments/ceilometer.py +164 -80
  34. cloudnetpy/instruments/cl61d.py +21 -5
  35. cloudnetpy/instruments/cloudnet_instrument.py +74 -36
  36. cloudnetpy/instruments/copernicus.py +108 -30
  37. cloudnetpy/instruments/da10.py +54 -0
  38. cloudnetpy/instruments/disdrometer/common.py +126 -223
  39. cloudnetpy/instruments/disdrometer/parsivel.py +453 -94
  40. cloudnetpy/instruments/disdrometer/thies.py +254 -87
  41. cloudnetpy/instruments/fd12p.py +201 -0
  42. cloudnetpy/instruments/galileo.py +65 -23
  43. cloudnetpy/instruments/hatpro.py +123 -49
  44. cloudnetpy/instruments/instruments.py +113 -1
  45. cloudnetpy/instruments/lufft.py +39 -17
  46. cloudnetpy/instruments/mira.py +268 -61
  47. cloudnetpy/instruments/mrr.py +187 -0
  48. cloudnetpy/instruments/nc_lidar.py +19 -8
  49. cloudnetpy/instruments/nc_radar.py +109 -55
  50. cloudnetpy/instruments/pollyxt.py +135 -51
  51. cloudnetpy/instruments/radiometrics.py +313 -59
  52. cloudnetpy/instruments/rain_e_h3.py +171 -0
  53. cloudnetpy/instruments/rpg.py +321 -189
  54. cloudnetpy/instruments/rpg_reader.py +74 -40
  55. cloudnetpy/instruments/toa5.py +49 -0
  56. cloudnetpy/instruments/vaisala.py +95 -343
  57. cloudnetpy/instruments/weather_station.py +774 -105
  58. cloudnetpy/metadata.py +90 -19
  59. cloudnetpy/model_evaluation/file_handler.py +55 -52
  60. cloudnetpy/model_evaluation/metadata.py +46 -20
  61. cloudnetpy/model_evaluation/model_metadata.py +1 -1
  62. cloudnetpy/model_evaluation/plotting/plot_tools.py +32 -37
  63. cloudnetpy/model_evaluation/plotting/plotting.py +327 -117
  64. cloudnetpy/model_evaluation/products/advance_methods.py +92 -83
  65. cloudnetpy/model_evaluation/products/grid_methods.py +88 -63
  66. cloudnetpy/model_evaluation/products/model_products.py +43 -35
  67. cloudnetpy/model_evaluation/products/observation_products.py +41 -35
  68. cloudnetpy/model_evaluation/products/product_resampling.py +17 -7
  69. cloudnetpy/model_evaluation/products/tools.py +29 -20
  70. cloudnetpy/model_evaluation/statistics/statistical_methods.py +30 -20
  71. cloudnetpy/model_evaluation/tests/e2e/conftest.py +3 -3
  72. cloudnetpy/model_evaluation/tests/e2e/process_cf/main.py +9 -5
  73. cloudnetpy/model_evaluation/tests/e2e/process_cf/tests.py +15 -14
  74. cloudnetpy/model_evaluation/tests/e2e/process_iwc/main.py +9 -5
  75. cloudnetpy/model_evaluation/tests/e2e/process_iwc/tests.py +15 -14
  76. cloudnetpy/model_evaluation/tests/e2e/process_lwc/main.py +9 -5
  77. cloudnetpy/model_evaluation/tests/e2e/process_lwc/tests.py +15 -14
  78. cloudnetpy/model_evaluation/tests/unit/conftest.py +42 -41
  79. cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py +41 -48
  80. cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py +216 -194
  81. cloudnetpy/model_evaluation/tests/unit/test_model_products.py +23 -21
  82. cloudnetpy/model_evaluation/tests/unit/test_observation_products.py +37 -38
  83. cloudnetpy/model_evaluation/tests/unit/test_plot_tools.py +43 -40
  84. cloudnetpy/model_evaluation/tests/unit/test_plotting.py +30 -36
  85. cloudnetpy/model_evaluation/tests/unit/test_statistical_methods.py +68 -31
  86. cloudnetpy/model_evaluation/tests/unit/test_tools.py +33 -26
  87. cloudnetpy/model_evaluation/utils.py +2 -1
  88. cloudnetpy/output.py +170 -111
  89. cloudnetpy/plotting/__init__.py +2 -1
  90. cloudnetpy/plotting/plot_meta.py +562 -822
  91. cloudnetpy/plotting/plotting.py +1142 -704
  92. cloudnetpy/products/__init__.py +1 -0
  93. cloudnetpy/products/classification.py +370 -88
  94. cloudnetpy/products/der.py +85 -55
  95. cloudnetpy/products/drizzle.py +77 -34
  96. cloudnetpy/products/drizzle_error.py +15 -11
  97. cloudnetpy/products/drizzle_tools.py +79 -59
  98. cloudnetpy/products/epsilon.py +211 -0
  99. cloudnetpy/products/ier.py +27 -50
  100. cloudnetpy/products/iwc.py +55 -48
  101. cloudnetpy/products/lwc.py +96 -70
  102. cloudnetpy/products/mwr_tools.py +186 -0
  103. cloudnetpy/products/product_tools.py +170 -128
  104. cloudnetpy/utils.py +455 -240
  105. cloudnetpy/version.py +2 -2
  106. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/METADATA +44 -40
  107. cloudnetpy-1.87.3.dist-info/RECORD +127 -0
  108. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/WHEEL +1 -1
  109. cloudnetpy-1.87.3.dist-info/entry_points.txt +2 -0
  110. docs/source/conf.py +2 -2
  111. cloudnetpy/categorize/atmos.py +0 -361
  112. cloudnetpy/products/mwr_multi.py +0 -68
  113. cloudnetpy/products/mwr_single.py +0 -75
  114. cloudnetpy-1.49.9.dist-info/RECORD +0 -112
  115. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info/licenses}/LICENSE +0 -0
  116. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/top_level.txt +0 -0
cloudnetpy/output.py CHANGED
@@ -1,49 +1,57 @@
1
- """ Functions for file writing."""
1
+ """Functions for file writing."""
2
+
2
3
  import datetime
3
4
  import logging
5
+ from dataclasses import fields
6
+ from os import PathLike
7
+ from uuid import UUID
4
8
 
5
9
  import netCDF4
6
- import numpy as np
7
10
  from numpy import ma
8
11
 
9
12
  from cloudnetpy import utils, version
13
+ from cloudnetpy.categorize.containers import Observations
14
+ from cloudnetpy.categorize.model import Model
15
+ from cloudnetpy.datasource import DataSource
10
16
  from cloudnetpy.instruments.instruments import Instrument
11
- from cloudnetpy.metadata import COMMON_ATTRIBUTES, MetaData
17
+ from cloudnetpy.metadata import COMMON_ATTRIBUTES
12
18
 
13
19
 
14
- def save_level1b(obj, output_file: str, uuid: str | None = None) -> str:
20
+ def save_level1b(
21
+ obj, # noqa: ANN001
22
+ output_file: PathLike | str,
23
+ uuid: UUID,
24
+ ) -> None:
15
25
  """Saves Cloudnet Level 1b file."""
16
26
  dimensions = _get_netcdf_dimensions(obj)
17
27
  with init_file(output_file, dimensions, obj.data, uuid) as nc:
18
- file_uuid = nc.file_uuid
19
28
  fix_attribute_name(nc)
20
29
  location = obj.site_meta["name"]
21
30
  nc.cloudnet_file_type = obj.instrument.domain
22
31
  nc.title = get_l1b_title(obj.instrument, location)
23
- if isinstance(obj.date, list):
24
- nc.year, nc.month, nc.day = obj.date
25
- elif isinstance(obj.date, datetime.date):
26
- nc.year = str(obj.date.year)
27
- nc.month = str(obj.date.month).zfill(2)
28
- nc.day = str(obj.date.day).zfill(2)
29
- else:
30
- raise TypeError
32
+ nc.year = str(obj.date.year)
33
+ nc.month = str(obj.date.month).zfill(2)
34
+ nc.day = str(obj.date.day).zfill(2)
31
35
  nc.location = location
32
36
  nc.history = get_l1b_history(obj.instrument)
33
37
  nc.source = get_l1b_source(obj.instrument)
34
38
  if hasattr(obj, "serial_number") and obj.serial_number is not None:
35
39
  nc.serial_number = obj.serial_number
40
+ if hasattr(obj, "software"):
41
+ for software, version in obj.software.items():
42
+ nc.setncattr(f"{software}_version", version)
36
43
  nc.references = get_references()
37
- return file_uuid
38
44
 
39
45
 
40
- def _get_netcdf_dimensions(obj) -> dict:
46
+ def _get_netcdf_dimensions(obj) -> dict: # noqa: ANN001
41
47
  dimensions = {
42
48
  key: len(obj.data[key][:]) for key in ("time", "range") if key in obj.data
43
49
  }
44
50
  # RPG cloud radar
45
51
  if "chirp_start_indices" in obj.data:
46
- dimensions["chirp_sequence"] = len(obj.data["chirp_start_indices"][:])
52
+ ind = obj.data["chirp_start_indices"][:]
53
+ dimensions["chirp_sequence"] = ind.shape[1] if ind.ndim > 1 else len(ind)
54
+
47
55
  # disdrometer
48
56
  if hasattr(obj, "n_diameter") and hasattr(obj, "n_velocity"):
49
57
  dimensions["diameter"] = obj.n_diameter
@@ -62,11 +70,11 @@ def _get_netcdf_dimensions(obj) -> dict:
62
70
 
63
71
  def save_product_file(
64
72
  short_id: str,
65
- obj,
66
- file_name: str,
67
- uuid: str | None = None,
73
+ obj: DataSource,
74
+ file_name: str | PathLike,
75
+ uuid: UUID,
68
76
  copy_from_cat: tuple = (),
69
- ) -> str:
77
+ ) -> None:
70
78
  """Saves a standard Cloudnet product file.
71
79
 
72
80
  Args:
@@ -83,7 +91,6 @@ def save_product_file(
83
91
  "height": len(obj.dataset.variables["height"]),
84
92
  }
85
93
  with init_file(file_name, dimensions, obj.data, uuid) as nc:
86
- file_uuid = nc.file_uuid
87
94
  nc.cloudnet_file_type = short_id
88
95
  vars_from_source = (
89
96
  "altitude",
@@ -91,24 +98,37 @@ def save_product_file(
91
98
  "longitude",
92
99
  "time",
93
100
  "height",
94
- ) + copy_from_cat
101
+ *copy_from_cat,
102
+ )
95
103
  copy_variables(obj.dataset, nc, vars_from_source)
96
104
  nc.title = (
97
105
  f"{human_readable_file_type.capitalize()} products from"
98
106
  f" {obj.dataset.location}"
99
107
  )
100
- nc.source_file_uuids = get_source_uuids(nc, obj)
101
- copy_global(obj.dataset, nc, ("location", "day", "month", "year", "source"))
102
- merge_history(nc, human_readable_file_type, {"categorize": obj})
108
+ nc.source_file_uuids = get_source_uuids([nc, obj])
109
+ copy_global(
110
+ obj.dataset,
111
+ nc,
112
+ (
113
+ "location",
114
+ "day",
115
+ "month",
116
+ "year",
117
+ "source",
118
+ "source_instrument_pids",
119
+ "voodoonet_version",
120
+ ),
121
+ )
122
+ merge_history(nc, human_readable_file_type, obj)
103
123
  nc.references = get_references(short_id)
104
- return file_uuid
105
124
 
106
125
 
107
126
  def get_l1b_source(instrument: Instrument) -> str:
108
127
  """Returns level 1b file source."""
109
- return " ".join(
128
+ parts = [
110
129
  item for item in [instrument.manufacturer, instrument.model] if item is not None
111
- )
130
+ ]
131
+ return " ".join(parts) if parts else instrument.category.capitalize()
112
132
 
113
133
 
114
134
  def get_l1b_history(instrument: Instrument) -> str:
@@ -128,10 +148,11 @@ def get_l1b_title(instrument: Instrument, location: str) -> str:
128
148
 
129
149
 
130
150
  def get_references(identifier: str | None = None, extra: list | None = None) -> str:
131
- """ "Returns references.
151
+ """Returns references.
132
152
 
133
153
  Args:
134
154
  identifier: Cloudnet file type, e.g., 'iwc'.
155
+ extra: List of additional references to include
135
156
 
136
157
  """
137
158
  references = "https://doi.org/10.21105/joss.02123"
@@ -158,63 +179,93 @@ def get_references(identifier: str | None = None, extra: list | None = None) ->
158
179
  return references
159
180
 
160
181
 
161
- def get_source_uuids(*sources) -> str:
182
+ def get_source_uuids(data: Observations | list[netCDF4.Dataset | DataSource]) -> str:
162
183
  """Returns file_uuid attributes of objects.
163
184
 
164
185
  Args:
165
- *sources: Objects whose file_uuid attributes are read (if exist).
186
+ data: Observations instance.
166
187
 
167
188
  Returns:
168
189
  str: UUIDs separated by comma.
169
190
 
170
191
  """
192
+ if isinstance(data, Observations):
193
+ obs = [getattr(data, field.name) for field in fields(data)]
194
+ elif isinstance(data, list):
195
+ obs = data
171
196
  uuids = [
172
- source.dataset.file_uuid
173
- for source in sources
174
- if hasattr(source, "dataset") and hasattr(source.dataset, "file_uuid")
197
+ obj.dataset.file_uuid
198
+ for obj in obs
199
+ if hasattr(obj, "dataset") and hasattr(obj.dataset, "file_uuid")
175
200
  ]
176
- unique_uuids = list(set(uuids))
201
+ unique_uuids = sorted(set(uuids))
177
202
  return ", ".join(unique_uuids)
178
203
 
179
204
 
180
- def merge_history(nc: netCDF4.Dataset, file_type: str, data: dict) -> None:
181
- """Merges history fields from one or several files and creates a new record.
205
+ def merge_history(
206
+ nc: netCDF4.Dataset, file_type: str, data: Observations | DataSource
207
+ ) -> None:
208
+ """Merges history fields from one or several files and creates a new record."""
182
209
 
183
- Args:
184
- nc: The netCDF Dataset instance.
185
- file_type: Long description of the file.
186
- data: Dictionary of objects with history attribute.
210
+ def extract_history(obj: DataSource | Observations) -> list[str]:
211
+ if hasattr(obj, "dataset") and hasattr(obj.dataset, "history"):
212
+ history = obj.dataset.history
213
+ if isinstance(obj, Model):
214
+ return [history.split("\n")[-1]]
215
+ return history.split("\n")
216
+ return []
217
+
218
+ histories: list[str] = []
219
+ if isinstance(data, DataSource):
220
+ histories.extend(extract_history(data))
221
+ elif isinstance(data, Observations):
222
+ for field in fields(data):
223
+ histories.extend(extract_history(getattr(data, field.name)))
224
+
225
+ # Remove duplicates
226
+ histories = list(dict.fromkeys(histories))
227
+
228
+ def parse_time(line: str) -> datetime.datetime:
229
+ try:
230
+ return datetime.datetime.strptime(
231
+ line.split(" - ")[0].strip(), "%Y-%m-%d %H:%M:%S %z"
232
+ )
233
+ except ValueError:
234
+ return datetime.datetime.min.replace(
235
+ tzinfo=datetime.timezone.utc
236
+ ) # malformed lines to bottom
187
237
 
188
- """
238
+ histories.sort(key=parse_time, reverse=True)
189
239
  new_record = f"{utils.get_time()} - {file_type} file created"
190
- histories = []
191
- for key, obj in data.items():
192
- if (
193
- not isinstance(obj, (str, list))
194
- and obj is not None
195
- and hasattr(obj.dataset, "history")
196
- ):
197
- history = obj.dataset.history
198
- history = history.split("\n")[-1] if key == "model" else history
199
- histories.append(history)
200
- histories.sort(reverse=True)
201
- old_history = [f"\n{history}" for history in histories]
202
- old_history_str = "".join(old_history)
203
- nc.history = f"{new_record}{old_history_str}"
240
+ nc.history = new_record + "".join(f"\n{h}" for h in histories)
204
241
 
205
242
 
206
- def add_source_instruments(nc: netCDF4.Dataset, data: dict) -> None:
243
+ def add_source_instruments(nc: netCDF4.Dataset, data: Observations) -> None:
207
244
  """Adds source attribute to categorize file."""
208
- sources = [obj.source for obj in data.values() if hasattr(obj, "source")]
209
- sources = [sources[0]] + [f"\n{source}" for source in sources[1:]]
210
- nc.source = "".join(sources)
245
+ sources = {
246
+ src
247
+ for field in fields(data)
248
+ for obj in [getattr(data, field.name)]
249
+ if hasattr(obj, "source")
250
+ for src in obj.source.split("\n")
251
+ }
252
+ if sources:
253
+ nc.source = "\n".join(sorted(sources))
254
+ source_pids = {
255
+ obj.instrument_pid
256
+ for field in fields(data)
257
+ for obj in [getattr(data, field.name)]
258
+ if getattr(obj, "instrument_pid", "")
259
+ }
260
+ if source_pids:
261
+ nc.source_instrument_pids = "\n".join(sorted(source_pids))
211
262
 
212
263
 
213
264
  def init_file(
214
- file_name: str,
265
+ file_name: PathLike | str,
215
266
  dimensions: dict,
216
267
  cloudnet_arrays: dict,
217
- uuid: str | None = None,
268
+ uuid: UUID,
218
269
  ) -> netCDF4.Dataset:
219
270
  """Initializes a Cloudnet file for writing.
220
271
 
@@ -234,7 +285,9 @@ def init_file(
234
285
 
235
286
 
236
287
  def copy_variables(
237
- source: netCDF4.Dataset, target: netCDF4.Dataset, keys: tuple
288
+ source: netCDF4.Dataset,
289
+ target: netCDF4.Dataset,
290
+ keys: tuple,
238
291
  ) -> None:
239
292
  """Copies variables (and their attributes) from one file to another.
240
293
 
@@ -252,6 +305,7 @@ def copy_variables(
252
305
  key,
253
306
  variable.datatype,
254
307
  variable.dimensions,
308
+ zlib=True,
255
309
  fill_value=fill_value,
256
310
  )
257
311
  var_out.setncatts(
@@ -259,13 +313,15 @@ def copy_variables(
259
313
  k: variable.getncattr(k)
260
314
  for k in variable.ncattrs()
261
315
  if k != "_FillValue"
262
- }
316
+ },
263
317
  )
264
318
  var_out[:] = variable[:]
265
319
 
266
320
 
267
321
  def copy_global(
268
- source: netCDF4.Dataset, target: netCDF4.Dataset, attributes: tuple
322
+ source: netCDF4.Dataset,
323
+ target: netCDF4.Dataset,
324
+ attributes: tuple,
269
325
  ) -> None:
270
326
  """Copies global attributes from one file to another.
271
327
 
@@ -282,25 +338,21 @@ def copy_global(
282
338
 
283
339
 
284
340
  def add_time_attribute(
285
- attributes: dict, date: list[str] | datetime.date, key: str = "time"
341
+ attributes: dict,
342
+ date: datetime.date,
343
+ key: str = "time",
286
344
  ) -> dict:
287
345
  """Adds time attribute with correct units."""
288
- if isinstance(date, list):
289
- date_str = "-".join(date)
290
- elif isinstance(date, datetime.date):
291
- date_str = date.isoformat()
292
- else:
293
- raise TypeError
346
+ date_str = date.isoformat()
294
347
  units = f"hours since {date_str} 00:00:00 +00:00"
295
348
  if key not in attributes:
296
- attributes[key] = MetaData(units=units)
297
- else:
298
- attributes[key] = attributes[key]._replace(units=units)
349
+ attributes[key] = COMMON_ATTRIBUTES[key]
350
+ attributes[key] = attributes[key]._replace(units=units)
299
351
  return attributes
300
352
 
301
353
 
302
- def add_source_attribute(attributes: dict, data: dict):
303
- """Adds source attribute."""
354
+ def add_source_attribute(attributes: dict, data: Observations) -> dict:
355
+ """Adds source attribute to variables."""
304
356
  variables = {
305
357
  "radar": (
306
358
  "v",
@@ -317,14 +369,19 @@ def add_source_attribute(attributes: dict, data: dict):
317
369
  "lidar": ("beta", "lidar_wavelength"),
318
370
  "mwr": ("lwp",),
319
371
  "model": ("uwind", "vwind", "Tw", "q", "pressure", "temperature"),
372
+ "disdrometer": ("rainfall_rate",),
320
373
  }
321
374
  for instrument, keys in variables.items():
322
- source = data[instrument].dataset.source
375
+ if getattr(data, instrument) is None:
376
+ continue
377
+ source = getattr(data, instrument).dataset.source
378
+ source_pid = getattr(getattr(data, instrument).dataset, "instrument_pid", None)
323
379
  for key in keys:
324
- if key in attributes:
325
- attributes[key] = attributes[key]._replace(source=source)
326
- else:
327
- attributes[key] = MetaData(source=source)
380
+ if key not in attributes:
381
+ attributes[key] = COMMON_ATTRIBUTES[key]
382
+ attributes[key] = attributes[key]._replace(
383
+ source=source, source_instrument_pid=source_pid
384
+ )
328
385
  return attributes
329
386
 
330
387
 
@@ -353,34 +410,36 @@ def _write_vars2nc(nc: netCDF4.Dataset, cloudnet_variables: dict) -> None:
353
410
  fill_value = netCDF4.default_fillvals[obj.data_type]
354
411
  else:
355
412
  fill_value = False
356
-
357
- size = obj.dimensions or _get_dimensions(nc, obj.data)
358
-
413
+ size = obj.dimensions if obj.dimensions is not None else ()
359
414
  nc_variable = nc.createVariable(
360
- obj.name, obj.data_type, size, zlib=True, fill_value=fill_value
415
+ obj.name,
416
+ obj.data_type,
417
+ size,
418
+ zlib=True,
419
+ fill_value=fill_value,
361
420
  )
362
- nc_variable[:] = obj.data
421
+ try:
422
+ nc_variable[:] = obj.data
423
+ except IndexError as err:
424
+ msg = f"Unable to write variable {obj.name} to file: {err}"
425
+ raise IndexError(msg) from err
363
426
  for attr in obj.fetch_attributes():
364
427
  setattr(nc_variable, attr, getattr(obj, attr))
365
428
 
366
429
 
367
- def _get_dimensions(nc: netCDF4.Dataset, data: np.ndarray) -> tuple:
368
- """Finds correct dimensions for a variable."""
369
- if utils.isscalar(data):
370
- return ()
371
- variable_size: list = []
372
- file_dims = nc.dimensions
373
- array_dims = data.shape
374
- for length in array_dims:
375
- dim = [key for key in file_dims.keys() if file_dims[key].size == length][0]
376
- variable_size = variable_size + [dim]
377
- return tuple(variable_size)
378
-
379
-
380
430
  def _get_identifier(short_id: str) -> str:
381
- valid_ids = ("lwc", "iwc", "drizzle", "classification", "der", "ier")
431
+ valid_ids = (
432
+ "lwc",
433
+ "iwc",
434
+ "drizzle",
435
+ "classification",
436
+ "der",
437
+ "ier",
438
+ "classification-voodoo",
439
+ )
382
440
  if short_id not in valid_ids:
383
- raise ValueError("Invalid product id.")
441
+ msg = f"Invalid file identifier: {short_id}"
442
+ raise ValueError(msg)
384
443
  if short_id == "iwc":
385
444
  return "ice water content"
386
445
  if short_id == "lwc":
@@ -392,12 +451,10 @@ def _get_identifier(short_id: str) -> str:
392
451
  return short_id
393
452
 
394
453
 
395
- def add_standard_global_attributes(
396
- nc: netCDF4.Dataset, uuid: str | None = None
397
- ) -> None:
454
+ def add_standard_global_attributes(nc: netCDF4.Dataset, uuid: UUID) -> None:
398
455
  nc.Conventions = "CF-1.8"
399
456
  nc.cloudnetpy_version = version.__version__
400
- nc.file_uuid = uuid or utils.get_uuid()
457
+ nc.file_uuid = str(uuid)
401
458
 
402
459
 
403
460
  def fix_attribute_name(nc: netCDF4.Dataset) -> None:
@@ -417,14 +474,16 @@ def fix_time_attributes(nc: netCDF4.Dataset) -> None:
417
474
  nc.variables["time"].standard_name = "time"
418
475
  nc.variables["time"].long_name = "Time UTC"
419
476
  nc.variables["time"].calendar = "standard"
420
- nc.variables["time"].units = (
421
- f"hours since " f"{nc.year}-{nc.month}-{nc.day} " f"00:00:00 +00:00"
422
- )
477
+ nc.variables[
478
+ "time"
479
+ ].units = f"hours since {nc.year}-{nc.month}-{nc.day} 00:00:00 +00:00"
423
480
 
424
481
 
425
482
  def replace_attribute_with_standard_value(
426
- nc: netCDF4.Dataset, variables: tuple, attributes: tuple
427
- ):
483
+ nc: netCDF4.Dataset,
484
+ variables: tuple,
485
+ attributes: tuple,
486
+ ) -> None:
428
487
  for key in variables:
429
488
  if key in COMMON_ATTRIBUTES and key in nc.variables:
430
489
  for attr in attributes:
@@ -1 +1,2 @@
1
- from .plotting import compare_files, generate_figure, plot_2d
1
+ from .plot_meta import PlotMeta
2
+ from .plotting import Dimensions, PlotParameters, generate_figure, plot_2d