cloudnetpy 1.82.0__py3-none-any.whl → 1.82.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -79,12 +79,14 @@ def ws2nc(
79
79
  ws.add_date()
80
80
  ws.add_site_geolocation()
81
81
  ws.add_data()
82
+ ws.remove_duplicate_timestamps()
82
83
  ws.convert_temperature_and_humidity()
83
84
  ws.convert_pressure()
84
85
  ws.convert_rainfall_rate()
85
86
  ws.convert_rainfall_amount()
86
87
  ws.normalize_cumulative_amount("rainfall_amount")
87
88
  ws.calculate_rainfall_amount()
89
+ ws.wrap_wind_direction()
88
90
  attributes = output.add_time_attribute({}, ws.date)
89
91
  output.update_attributes(ws.data, attributes)
90
92
  output.save_level1b(ws, output_file, uuid)
@@ -148,6 +150,16 @@ class WS(CSVFile):
148
150
  def convert_rainfall_amount(self) -> None:
149
151
  pass
150
152
 
153
+ def wrap_wind_direction(self) -> None:
154
+ if "wind_direction" not in self.data:
155
+ return
156
+ # Wrap values little outside of [0, 360), keep original values
157
+ # otherwise.
158
+ threshold = 2
159
+ values = self.data["wind_direction"].data
160
+ values[(values > -threshold) & (values < 0)] += 360
161
+ values[(values >= 360) & (values < 360 + threshold)] -= 360
162
+
151
163
 
152
164
  class PalaiseauWS(WS):
153
165
  def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
cloudnetpy/output.py CHANGED
@@ -197,46 +197,52 @@ def get_source_uuids(data: Observations | list[netCDF4.Dataset | DataSource]) ->
197
197
  def merge_history(
198
198
  nc: netCDF4.Dataset, file_type: str, data: Observations | DataSource
199
199
  ) -> None:
200
- """Merges history fields from one or several files and creates a new record.
200
+ """Merges history fields from one or several files and creates a new record."""
201
+
202
+ def extract_history(obj: DataSource | Observations) -> list[str]:
203
+ if hasattr(obj, "dataset") and hasattr(obj.dataset, "history"):
204
+ history = obj.dataset.history
205
+ if isinstance(obj, Model):
206
+ return [history.split("\n")[-1]]
207
+ return history.split("\n")
208
+ return []
209
+
210
+ histories: list[str] = []
211
+ if isinstance(data, DataSource):
212
+ histories.extend(extract_history(data))
213
+ elif isinstance(data, Observations):
214
+ for field in fields(data):
215
+ histories.extend(extract_history(getattr(data, field.name)))
201
216
 
202
- Args:
203
- nc: The netCDF Dataset instance.
204
- file_type: Long description of the file.
205
- data: Dictionary of objects with history attribute.
217
+ # Remove duplicates
218
+ histories = list(dict.fromkeys(histories))
206
219
 
207
- """
220
+ def parse_time(line: str) -> datetime.datetime:
221
+ try:
222
+ return datetime.datetime.strptime(
223
+ line.split(" - ")[0].strip(), "%Y-%m-%d %H:%M:%S %z"
224
+ )
225
+ except ValueError:
226
+ return datetime.datetime.min.replace(
227
+ tzinfo=datetime.timezone.utc
228
+ ) # malformed lines to bottom
229
+
230
+ histories.sort(key=parse_time, reverse=True)
208
231
  new_record = f"{utils.get_time()} - {file_type} file created"
209
- histories = []
210
- if (
211
- isinstance(data, DataSource)
212
- and hasattr(data, "dataset")
213
- and hasattr(data.dataset, "history")
214
- ):
215
- history = data.dataset.history
216
- histories.append(history)
217
- if isinstance(data, Observations):
218
- for field in fields(data):
219
- obj = getattr(data, field.name)
220
- if hasattr(obj, "dataset") and hasattr(obj.dataset, "history"):
221
- history = obj.dataset.history
222
- history = history.split("\n")[-1] if isinstance(obj, Model) else history
223
- histories.append(history)
224
- histories.sort(reverse=True)
225
- old_history = [f"\n{history}" for history in histories]
226
- old_history_str = "".join(old_history)
227
- nc.history = f"{new_record}{old_history_str}"
232
+ nc.history = new_record + "".join(f"\n{h}" for h in histories)
228
233
 
229
234
 
230
235
  def add_source_instruments(nc: netCDF4.Dataset, data: Observations) -> None:
231
236
  """Adds source attribute to categorize file."""
232
- sources = []
233
- for field in fields(data):
234
- obj = getattr(data, field.name)
235
- if hasattr(obj, "source"):
236
- sources.append(obj.source)
237
+ sources = {
238
+ src
239
+ for field in fields(data)
240
+ for obj in [getattr(data, field.name)]
241
+ if hasattr(obj, "source")
242
+ for src in obj.source.split("\n")
243
+ }
237
244
  if sources:
238
- formatted_sources = [sources[0]] + [f"\n{source}" for source in sources[1:]]
239
- nc.source = "".join(formatted_sources)
245
+ nc.source = "\n".join(sorted(sources))
240
246
 
241
247
 
242
248
  def init_file(
cloudnetpy/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  MAJOR = 1
2
2
  MINOR = 82
3
- PATCH = 0
3
+ PATCH = 1
4
4
  __version__ = f"{MAJOR}.{MINOR}.{PATCH}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudnetpy
3
- Version: 1.82.0
3
+ Version: 1.82.1
4
4
  Summary: Python package for Cloudnet processing
5
5
  Author: Simo Tukiainen
6
6
  License: MIT License
@@ -6,10 +6,10 @@ cloudnetpy/constants.py,sha256=YnoSzZm35NDooJfhlulSJBc7g0eSchT3yGytRaTaJEI,845
6
6
  cloudnetpy/datasource.py,sha256=HzvqTTHLCH9GniUsV_IWwyrvvONnFJh0tmBM61hsqxM,6364
7
7
  cloudnetpy/exceptions.py,sha256=ZB3aUwjVRznR0CcZ5sZHrB0yz13URDf52Ksv7G7C7EA,1817
8
8
  cloudnetpy/metadata.py,sha256=CFpXmdEkVPzvLPv2xHIR-aMMQ-TR26KfESYw-98j7sk,7213
9
- cloudnetpy/output.py,sha256=bUp13wv5TVtfZ-wBPU_n2qvWZa-PviozrVUhJnonbYE,14830
9
+ cloudnetpy/output.py,sha256=0bybnILsgKHWIuw2GYkqTz2iMCJDZLUN25IQ9o_v3Cg,14968
10
10
  cloudnetpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  cloudnetpy/utils.py,sha256=7PHfJo9iLMdePwEApLfYH4XiVC9DhlFQMdQTxesZylA,31797
12
- cloudnetpy/version.py,sha256=go0Zq4vIkrnCH45JYO1gxm5LNSQAgp0clDMvI3GQL7g,72
12
+ cloudnetpy/version.py,sha256=EMMHBz37udPqFCzwa7lw3YJBjdK_4TrgZP1Ji90Au4k,72
13
13
  cloudnetpy/categorize/__init__.py,sha256=gtvzWr0IDRn2oA6yHBvinEhTGTuub-JkrOv93lBsgrE,61
14
14
  cloudnetpy/categorize/atmos_utils.py,sha256=uWc9TABVYPI0sn4H5Az9Jf6NVRaWyEKIi17f0pAJQxE,10679
15
15
  cloudnetpy/categorize/attenuation.py,sha256=Y_-fzmQTltWTqIZTulJhovC7a6ifpMcaAazDJcnMIOc,990
@@ -56,7 +56,7 @@ cloudnetpy/instruments/rpg.py,sha256=R1rUdeSADvB1IMkGOF1S0rUEJDGEI_19SPrmErZpn5M
56
56
  cloudnetpy/instruments/rpg_reader.py,sha256=VbF5MN94Bmxo6DTDoUUdRd7s-S1YmvFmum4ztc7KN2g,11539
57
57
  cloudnetpy/instruments/toa5.py,sha256=CfmmBMv5iMGaWHIGBK01Rw24cuXC1R1RMNTXkmsm340,1760
58
58
  cloudnetpy/instruments/vaisala.py,sha256=tu7aljkMKep0uCWz-Sd-GuBXF_Yy421a4nHy0ffpMoc,4725
59
- cloudnetpy/instruments/weather_station.py,sha256=dRI5iN6I6lZ3zNeX4hgJly2qaieDQKCRs50bOkcGL5k,27119
59
+ cloudnetpy/instruments/weather_station.py,sha256=FuaGILEkd4MxXMpLrNGXNUjuuTkMIBf-J7y9oepIsdM,27586
60
60
  cloudnetpy/instruments/disdrometer/__init__.py,sha256=lyjwttWvFvuwYxEkusoAvgRcbBmglmOp5HJOpXUqLWo,93
61
61
  cloudnetpy/instruments/disdrometer/common.py,sha256=WCPRCfAlElUzZpllOSjjWrLG2jgkiRIy0rWz_omFoJQ,10815
62
62
  cloudnetpy/instruments/disdrometer/parsivel.py,sha256=1HIA52f1nGOvSd4SSTr2y3-JT3eKZWwdbMnIMRVvQ_U,25811
@@ -117,10 +117,10 @@ cloudnetpy/products/lwc.py,sha256=xsNiiG6dGKIkWaFk0xWTabc1bZ4ULf6SqcqHs7itAUk,19
117
117
  cloudnetpy/products/mie_lu_tables.nc,sha256=It4fYpqJXlqOgL8jeZ-PxGzP08PMrELIDVe55y9ob58,16637951
118
118
  cloudnetpy/products/mwr_tools.py,sha256=MMWnp68U7bv157-CPB2VeTQvaR6zl7sexbBT_kJ_pn8,6734
119
119
  cloudnetpy/products/product_tools.py,sha256=eyqIw_0KhlpmmYQE69RpGdRIAOW7JVPlEgkTBp2kdps,11302
120
- cloudnetpy-1.82.0.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
120
+ cloudnetpy-1.82.1.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
121
121
  docs/source/conf.py,sha256=IKiFWw6xhUd8NrCg0q7l596Ck1d61XWeVjIFHVSG9Og,1490
122
- cloudnetpy-1.82.0.dist-info/METADATA,sha256=psXUo1YNPF33S6X7i1WnEGuaMY5V1XZlYJTguJl9-0I,5836
123
- cloudnetpy-1.82.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
124
- cloudnetpy-1.82.0.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
- cloudnetpy-1.82.0.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
- cloudnetpy-1.82.0.dist-info/RECORD,,
122
+ cloudnetpy-1.82.1.dist-info/METADATA,sha256=6att3AnwvsPph-Pr9pPa8h3dP-Pu1lfIzXd_qB2RUjw,5836
123
+ cloudnetpy-1.82.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
124
+ cloudnetpy-1.82.1.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
+ cloudnetpy-1.82.1.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
+ cloudnetpy-1.82.1.dist-info/RECORD,,