tfv-get-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. tfv_get_tools/__init__.py +4 -0
  2. tfv_get_tools/_standard_attrs.py +107 -0
  3. tfv_get_tools/atmos.py +167 -0
  4. tfv_get_tools/cli/_cli_base.py +173 -0
  5. tfv_get_tools/cli/atmos_cli.py +192 -0
  6. tfv_get_tools/cli/ocean_cli.py +204 -0
  7. tfv_get_tools/cli/tide_cli.py +118 -0
  8. tfv_get_tools/cli/wave_cli.py +183 -0
  9. tfv_get_tools/fvc/__init__.py +3 -0
  10. tfv_get_tools/fvc/_atmos.py +230 -0
  11. tfv_get_tools/fvc/_fvc.py +218 -0
  12. tfv_get_tools/fvc/_ocean.py +171 -0
  13. tfv_get_tools/fvc/_tide.py +195 -0
  14. tfv_get_tools/ocean.py +170 -0
  15. tfv_get_tools/providers/__init__.py +0 -0
  16. tfv_get_tools/providers/_custom_conversions.py +34 -0
  17. tfv_get_tools/providers/_downloader.py +566 -0
  18. tfv_get_tools/providers/_merger.py +520 -0
  19. tfv_get_tools/providers/_utilities.py +255 -0
  20. tfv_get_tools/providers/atmos/barra2.py +209 -0
  21. tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
  22. tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
  23. tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
  24. tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
  25. tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
  26. tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
  27. tfv_get_tools/providers/atmos/cfsr.py +207 -0
  28. tfv_get_tools/providers/atmos/era5.py +20 -0
  29. tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
  30. tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
  31. tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
  32. tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
  33. tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
  34. tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
  35. tfv_get_tools/providers/ocean/hycom.py +611 -0
  36. tfv_get_tools/providers/wave/cawcr.py +166 -0
  37. tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
  38. tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
  39. tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
  40. tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
  41. tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
  42. tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
  43. tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
  44. tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
  45. tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
  46. tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
  47. tfv_get_tools/providers/wave/era5.py +232 -0
  48. tfv_get_tools/providers/wave/era5_gcp.py +169 -0
  49. tfv_get_tools/tide/__init__.py +2 -0
  50. tfv_get_tools/tide/_nodestring.py +214 -0
  51. tfv_get_tools/tide/_tidal_base.py +568 -0
  52. tfv_get_tools/utilities/_tfv_bc.py +78 -0
  53. tfv_get_tools/utilities/horizontal_padding.py +89 -0
  54. tfv_get_tools/utilities/land_masking.py +93 -0
  55. tfv_get_tools/utilities/parsers.py +44 -0
  56. tfv_get_tools/utilities/warnings.py +38 -0
  57. tfv_get_tools/wave.py +179 -0
  58. tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
  59. tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
  60. tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
  61. tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
  62. tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,520 @@
1
+ """Base merger class, inherited by all source specific mergers."""
2
+
3
+ import re
4
+ from abc import ABC, abstractmethod
5
+ from pathlib import Path
6
+ from typing import List, Optional, Tuple, Union
7
+
8
+ import numpy as np
9
+ import pandas as pd
10
+ import xarray as xr
11
+ from dask.diagnostics.progress import ProgressBar
12
+ from pandas.tseries.offsets import MonthBegin, MonthEnd
13
+ from pyproj import CRS, Transformer
14
+
15
+ from tfv_get_tools._standard_attrs import STDVARS
16
+ from tfv_get_tools.fvc import write_atmos_fvc, write_ocean_fvc
17
+ from tfv_get_tools.providers._custom_conversions import *
18
+ from tfv_get_tools.providers._utilities import _get_config, wrap_longitude
19
+ from tfv_get_tools.utilities.horizontal_padding import horizontal_pad
20
+ from tfv_get_tools.utilities.land_masking import mask_land_data
21
+ from tfv_get_tools.utilities.parsers import _parse_date, _parse_path
22
+
23
+
24
+ def check_path(path: Path) -> Path:
25
+ """Helper function to check if a path exists and create it if it doesn't."""
26
+ path = Path(path)
27
+ path.mkdir(parents=True, exist_ok=True)
28
+ return path
29
+
30
+
31
+ class BaseMerger(ABC):
32
+ """Base class for merger operations."""
33
+
34
+ def __init__(
35
+ self,
36
+ in_path: Path = Path("./raw"),
37
+ out_path: Path = Path("."),
38
+ fname: Optional[str] = None,
39
+ time_start: Optional[str] = None,
40
+ time_end: Optional[str] = None,
41
+ write_fvc: bool = True,
42
+ reproject: Optional[int] = None,
43
+ local_tz: Optional[Tuple[float, str]] = None,
44
+ pad_dry: bool = False,
45
+ wrapto360: bool = False,
46
+ source: str = "HYCOM",
47
+ mode: str = "OCEAN",
48
+ model: str = "default",
49
+ execute: bool = True,
50
+ write: bool = True,
51
+ verbose: bool = True,
52
+ ):
53
+ """
54
+ Initialize the BaseMerger with parameters for merging and processing data files.
55
+
56
+ Args:
57
+ in_path: Directory of the raw data files
58
+ out_path: Output directory for the merged netcdf and (opt) the fvc
59
+ fname: Merged netcdf filename
60
+ time_start: Start time limit of the merged dataset (format: "YYYY-mm-dd HH:MM")
61
+ time_end: End time limit of the merged dataset (format: "YYYY-mm-dd HH:MM")
62
+ write_fvc: Whether to write an accompanying .fvc file
63
+ reproject: EPSG code for optional reprojection
64
+ local_tz: Local timezone info as (Offset[float], Label[str])
65
+ pad_dry: Whether to pad horizontally
66
+ wrapto360: Whether to wrap longitudes to 360 degrees
67
+ source: Source name {HYCOM, COPERNICUS}
68
+ mode: Mode name
69
+ model: Model name
70
+ execute: Execute processing
71
+ write: Write the dataset. If False, access via `.ds`
72
+ verbose: Enable verbose output
73
+ """
74
+ # Parse and validate inputs
75
+ self.in_path = _parse_path(in_path)
76
+ self.out_path = _parse_path(out_path)
77
+
78
+ # Time indexing
79
+ self.ts = _parse_date(time_start) if time_start is not None else None
80
+ self.te = _parse_date(time_end) if time_end is not None else None
81
+ self.time_slice = slice(self.ts, self.te)
82
+
83
+ # Validate and set attributes
84
+ self.fname = self._validate_fname(fname)
85
+ self.write_fvc = write_fvc
86
+ self.reproject = self._validate_reproject(reproject)
87
+ self.local_tz = self._validate_local_tz(local_tz)
88
+ self.pad_dry = pad_dry
89
+ self.wrapto360 = wrapto360
90
+ self.verbose = verbose
91
+
92
+ # Source/mode/model attributes
93
+ self.mode = mode
94
+ self.source = source
95
+ self.model = model if model else "default"
96
+ self.write = write
97
+
98
+ # Initialize source-specific settings
99
+ self._init_specific()
100
+
101
+ if self.verbose:
102
+ self._print_config()
103
+
104
+ if execute:
105
+ self.process()
106
+
107
+ def _print_config(self) -> None:
108
+ """Print configuration information."""
109
+ print("Running TUFLOW FV Boundary Condition Merger")
110
+ print(f"Source: {self.source}")
111
+ print(f"Mode: {self.mode}")
112
+ if self.model != "default":
113
+ print(f"Model: {self.model}")
114
+ print("\nOpening raw files and preparing to interpolate all to a common grid")
115
+ print("This step can take a while, please wait")
116
+
117
+ @staticmethod
118
+ def _validate_fname(fname: Optional[str]) -> Optional[str]:
119
+ """Validate filename."""
120
+ if fname and not fname.endswith(".nc"):
121
+ raise ValueError("Filename must end with '.nc'")
122
+ return fname
123
+
124
+ @staticmethod
125
+ def _validate_reproject(reproject: Optional[int]) -> Optional[int]:
126
+ """Validate EPSG code for reprojection."""
127
+ if reproject and not (1000 <= reproject <= 32767):
128
+ raise ValueError("Invalid EPSG code for reprojection")
129
+ return reproject
130
+
131
+ @staticmethod
132
+ def _validate_local_tz(
133
+ local_tz: Optional[Tuple[float, str]],
134
+ ) -> Optional[Tuple[float, str]]:
135
+ """Validate local timezone information."""
136
+ if local_tz:
137
+ if not isinstance(local_tz, tuple) or len(local_tz) != 2:
138
+ raise ValueError("local_tz must be a tuple of (float, str)")
139
+ if not isinstance(local_tz[0], (int, float)) or not isinstance(
140
+ local_tz[1], str
141
+ ):
142
+ raise ValueError("local_tz must be in the format (float, str)")
143
+ return local_tz
144
+
145
+ def _load_config(self) -> None:
146
+ """Load configuration for the specific source/mode/model combination."""
147
+ cfg, _ = _get_config(self.mode, self.source, self.model)
148
+ self.cfg = cfg
149
+
150
+ @abstractmethod
151
+ def _init_specific(self) -> None:
152
+ """Initialize source-specific settings. Must be implemented by subclasses."""
153
+ pass
154
+
155
+ def _open_subset_netcdf(
156
+ self, file: Path, time: Union[str, Tuple[str, ...]] = "time", **kwargs
157
+ ) -> Optional[xr.Dataset]:
158
+ """
159
+ Open a subset netcdf file and validate for merging.
160
+
161
+ Args:
162
+ file: Path to the netcdf file
163
+ time: Time coordinate name(s)
164
+ **kwargs: Additional arguments for xr.open_dataset
165
+
166
+ Returns:
167
+ Dataset or None if file cannot be opened
168
+ """
169
+ chunks = kwargs.pop("chunks", dict(time=24))
170
+
171
+ try:
172
+ ds = xr.open_dataset(file, chunks=chunks, **kwargs)
173
+
174
+ # Handle multiple possible time coordinate names
175
+ if isinstance(time, tuple):
176
+ time_var = None
177
+ for t in time:
178
+ if t in ds:
179
+ time_var = t
180
+ break
181
+ time = time_var
182
+
183
+ if time and pd.api.types.is_datetime64_any_dtype(ds[time]):
184
+ return ds
185
+ else:
186
+ if self.verbose:
187
+ print(f"Skipping file {file.name} - time error")
188
+ return None
189
+
190
+ except Exception as e:
191
+ if self.verbose:
192
+ print(f"Skipping file {file.name}: {str(e)}")
193
+ return None
194
+
195
+ def _filter_files_by_time(self, file_list: List[Path]) -> List[Path]:
196
+ """
197
+ Filter files based on time constraints.
198
+
199
+ Args:
200
+ file_list: List of file paths
201
+
202
+ Returns:
203
+ Filtered list of file paths
204
+ """
205
+ if self.ts is None and self.te is None:
206
+ return file_list
207
+
208
+ download_interval = self.cfg.get("_DOWNLOAD_INTERVAL", "monthly")
209
+
210
+ if download_interval == "monthly":
211
+ start_time_strings = [x.stem.split("_")[-2] for x in file_list]
212
+ end_time_strings = [x.stem.split("_")[-1] for x in file_list]
213
+ start_times = pd.DatetimeIndex(
214
+ [pd.Timestamp(x, unit="h") for x in start_time_strings]
215
+ )
216
+ end_times = pd.DatetimeIndex(
217
+ [pd.Timestamp(x, unit="h") for x in end_time_strings]
218
+ )
219
+
220
+ elif download_interval == "daily":
221
+ start_time_strings = [x.stem.split("_")[-1] for x in file_list]
222
+
223
+ # Handle legacy HYCOM files
224
+ if start_time_strings and start_time_strings[0] == "0000":
225
+ if self.verbose:
226
+ print("WARNING: Detected legacy HYCOM files with time suffixes")
227
+ start_time_strings = [
228
+ x.name.split(".")[0].split("_")[-2] for x in file_list
229
+ ]
230
+
231
+ start_times = pd.DatetimeIndex(
232
+ [pd.Timestamp(x, unit="h") for x in start_time_strings]
233
+ )
234
+ end_times = start_times + pd.Timedelta("23.9h")
235
+ else:
236
+ raise ValueError(f"Unknown download interval: {download_interval}")
237
+
238
+ time_start = self.ts if self.ts is not None else start_times.min()
239
+ time_end = self.te if self.te is not None else end_times.max()
240
+
241
+ time_start = pd.Timestamp(time_start)
242
+ time_end = pd.Timestamp(time_end)
243
+
244
+ if download_interval == "monthly":
245
+ time_start = time_start - MonthBegin(1)
246
+ time_end = time_end + MonthEnd(1)
247
+
248
+ mask = (start_times >= time_start) & (end_times <= time_end)
249
+ return [file_list[i] for i in mask.nonzero()[0]]
250
+
251
+ def get_file_list(self) -> List[Path]:
252
+ """
253
+ Get the list of files to be merged.
254
+
255
+ Returns:
256
+ List of file paths to be merged
257
+ """
258
+ # Construct search pattern
259
+ query_parts = [self.source, self.mode]
260
+ if self.model != "default":
261
+ query_parts.append(self.model)
262
+ query = "_".join(query_parts).upper()
263
+
264
+ file_list = list(self.in_path.glob(f"*{query}*.nc"))
265
+
266
+ if not file_list:
267
+ raise FileNotFoundError(
268
+ f"No files found matching pattern '*{query}*.nc' in {self.in_path}"
269
+ )
270
+
271
+ return self._filter_files_by_time(file_list)
272
+
273
+ @abstractmethod
274
+ def merge_files(self, file_list: List[Path]) -> Tuple[xr.Dataset, List[Path]]:
275
+ """
276
+ Merge the given files into a single xarray Dataset.
277
+
278
+ Args:
279
+ file_list: List of file paths to merge
280
+
281
+ Returns:
282
+ Tuple of (merged dataset, list of skipped files)
283
+ """
284
+ pass
285
+
286
+ def _apply_preprocessing(self, ds: xr.Dataset) -> xr.Dataset:
287
+ """
288
+ Apply preprocessing steps defined in configuration.
289
+
290
+ Args:
291
+ ds: Input dataset
292
+
293
+ Returns:
294
+ Dataset with preprocessing applied
295
+ """
296
+ for vals in self.cfg.values():
297
+ if "pre_process" in vals:
298
+ var = vals["source_var"]
299
+ pattern = r"\b([a-zA-Z_]\w*)\b"
300
+ components = re.findall(pattern, vals["pre_process"])
301
+ fn_call = eval(components[0])
302
+ in_var_strs = components[1:]
303
+
304
+ in_vars = [ds[v] for v in in_var_strs if v in ds.data_vars.keys()]
305
+ if len(in_vars) == len(in_var_strs):
306
+ ds[var] = xr.apply_ufunc(fn_call, *in_vars, dask="allowed")
307
+
308
+ return ds
309
+
310
+ def standardise_dataset(self, ds: xr.Dataset) -> xr.Dataset:
311
+ """
312
+ Standardise dataset by renaming variables and applying attributes.
313
+
314
+ Args:
315
+ ds: Merged dataset with source-specific variable names
316
+
317
+ Returns:
318
+ Dataset with standardised variable names and attributes
319
+ """
320
+ # Apply preprocessing
321
+ ds = self._apply_preprocessing(ds)
322
+
323
+ # Rename variables
324
+ name_remap = {
325
+ v["source_var"]: k
326
+ for k, v in self.cfg.items()
327
+ if not k.startswith("_") and v["source_var"] in ds.data_vars
328
+ }
329
+ ds = ds.rename(name_remap)
330
+
331
+ # Apply standard attributes
332
+ for dv in ds.data_vars:
333
+ if dv in STDVARS:
334
+ ds[dv].attrs = STDVARS[dv]
335
+
336
+ # Add coordinate attributes
337
+ crs = CRS.from_epsg(4326)
338
+ if "longitude" in ds.coords:
339
+ ds["longitude"].attrs = dict(crs.cs_to_cf()[1], epsg=4326, name=crs.name)
340
+ if "latitude" in ds.coords:
341
+ ds["latitude"].attrs = dict(crs.cs_to_cf()[0], epsg=4326, name=crs.name)
342
+
343
+ # Add coordinates field to data variables
344
+ for dv in ds.data_vars:
345
+ if {"longitude", "latitude"}.issubset(set(ds[dv].dims)):
346
+ ds[dv].encoding.pop("coordinates", None)
347
+
348
+ if "depth" in ds[dv].dims:
349
+ ds[dv].attrs["coordinates"] = "time depth latitude longitude"
350
+ else:
351
+ ds[dv].attrs["coordinates"] = "time latitude longitude"
352
+
353
+ # Add in a UTC label on the time array.
354
+ ds["time"].attrs["tz"] = "UTC"
355
+
356
+ return ds
357
+
358
+ def wrap_longitudes(self, dataset: xr.Dataset, wrapto360: bool) -> xr.Dataset:
359
+ """Wrap longitudes around 360 or 180 degrees."""
360
+ return wrap_longitude(dataset, wrapto360=wrapto360)
361
+
362
+ def pad_dataset(self, ds: xr.Dataset) -> xr.Dataset:
363
+ """Pad the dataset to fill NaNs in horizontal space."""
364
+ mode_lower = self.mode.lower()
365
+
366
+ if mode_lower == "ocean":
367
+ return horizontal_pad(ds)
368
+ elif mode_lower == "atmos":
369
+ return mask_land_data(ds, self.source.lower())
370
+ elif mode_lower == "wave":
371
+ if self.verbose:
372
+ print(
373
+ "Land masking is not valid for wave data. Ignoring and carrying on"
374
+ )
375
+ return ds
376
+ else:
377
+ raise ValueError(f"Unknown mode for padding: {self.mode}")
378
+
379
+ @staticmethod
380
+ def reproject_dataset(ds: xr.Dataset, target_crs: int) -> Tuple[xr.Dataset, str]:
381
+ """Reproject the dataset to a specified CRS."""
382
+ crs = CRS.from_epsg(target_crs)
383
+ transformer = Transformer.from_crs("epsg:4326", crs, always_xy=True)
384
+
385
+ xv, yv = np.meshgrid(ds.longitude, ds.latitude)
386
+ xp, yp = transformer.transform(xv, yv)
387
+ ds = ds.assign_coords(
388
+ dict(x=(("latitude", "longitude"), xp), y=(("latitude", "longitude"), yp))
389
+ )
390
+ ds["x"].attrs = dict(crs.cs_to_cf()[0], epsg=target_crs, name=crs.name)
391
+ ds["y"].attrs = dict(crs.cs_to_cf()[1], epsg=target_crs, name=crs.name)
392
+
393
+ return ds, f"EPSG{target_crs}"
394
+
395
+ @staticmethod
396
+ def add_local_timezone(ds: xr.Dataset, offset: float, label: str) -> xr.Dataset:
397
+ """Add local timezone information to the dataset."""
398
+ ds["time"].attrs = {"tz": "UTC"}
399
+
400
+ ds = ds.assign_coords(
401
+ dict(local_time=ds["time"] + pd.Timedelta(offset, unit="h"))
402
+ )
403
+ ds["local_time"].attrs = {"tz": label}
404
+
405
+ # Handle high-resolution water level time if present
406
+ if "wl_time" in ds.dims:
407
+ ds = ds.assign_coords(
408
+ dict(wl_local_time=ds["wl_time"] + pd.Timedelta(offset, unit="h"))
409
+ )
410
+ ds["wl_local_time"].attrs = {"tz": label}
411
+
412
+ return ds
413
+
414
+ def _generate_filename(self, ds: xr.Dataset) -> str:
415
+ """Generate output filename based on dataset and settings."""
416
+ if self.fname is not None:
417
+ return self.fname
418
+
419
+ # Extract time range
420
+ time_values = pd.to_datetime(ds["time"][[0, -1]].values)
421
+ ts = time_values[0].strftime("%Y%m%d")
422
+ te = time_values[1].strftime("%Y%m%d")
423
+
424
+ # Base filename
425
+ name_parts = [self.source, self.mode]
426
+ if self.model != "default":
427
+ name_parts.append(self.model)
428
+
429
+ base_name = "_".join(name_parts).upper()
430
+ fname = f"{base_name}_{ts}_{te}.nc"
431
+
432
+ # Add suffixes for processing options
433
+ if self.local_tz is not None:
434
+ tz_label = self.local_tz[1].replace(".", "p")
435
+ fname = fname.replace(".", f"_{tz_label}.")
436
+
437
+ if self.reproject is not None:
438
+ fname = fname.replace(".", f"_EPSG{self.reproject}.")
439
+
440
+ if self.pad_dry:
441
+ fname = fname.replace(".", "_padded.")
442
+
443
+ return fname
444
+
445
+ def write_dataset(self, ds: xr.Dataset, output_path: str) -> None:
446
+ """Write the dataset to a file."""
447
+ time_vars = [coord for coord in ds.coords if "time" in coord]
448
+
449
+ encoding = {
450
+ var: {"units": "hours since 1990-01-01 00:00:00", "dtype": np.float64}
451
+ for var in time_vars
452
+ }
453
+
454
+ write_task = ds.to_netcdf(output_path, compute=False, encoding=encoding)
455
+
456
+ with ProgressBar():
457
+ write_task.compute()
458
+
459
+ def process(self) -> None:
460
+ """Run through all steps to merge the dataset."""
461
+ file_list = self.get_file_list()
462
+
463
+ if not file_list:
464
+ raise ValueError("No files found to merge")
465
+
466
+ ds, skipped_list = self.merge_files(file_list)
467
+ ds = self.standardise_dataset(ds)
468
+ ds = self.wrap_longitudes(ds, self.wrapto360)
469
+
470
+ if self.pad_dry:
471
+ if self.verbose:
472
+ print("...padding dataset")
473
+ ds = self.pad_dataset(ds)
474
+
475
+ crslbl = None
476
+ if self.reproject is not None:
477
+ if self.verbose:
478
+ print(f"...reprojecting dataset to EPSG {self.reproject}")
479
+ ds, crslbl = self.reproject_dataset(ds, self.reproject)
480
+
481
+ if self.local_tz is not None:
482
+ dt, lbl = self.local_tz
483
+ if self.verbose:
484
+ print(f"...adding local timezone {lbl} with offset of {dt} hours")
485
+ ds = self.add_local_timezone(ds, dt, lbl)
486
+
487
+ fname = self._generate_filename(ds)
488
+
489
+ if self.verbose:
490
+ print(f"Writing dataset: {fname}")
491
+
492
+ if self.write:
493
+ output_path = self.out_path / fname
494
+ self.write_dataset(ds, output_path.as_posix())
495
+
496
+ # Optionally write fvc - but only bother if we're writing the netty
497
+ if self.write_fvc:
498
+ fvc_fname = fname.replace(".nc", ".fvc")
499
+ fvc_args = {
500
+ "nc_path": output_path.as_posix(),
501
+ "output_path": self.out_path,
502
+ "filename": fvc_fname,
503
+ "source": self.source,
504
+ "model": self.model,
505
+ "info_url": self.cfg.get("_INFO_URL", None),
506
+ }
507
+ if self.mode == "ATMOS":
508
+ write_atmos_fvc(ds, **fvc_args)
509
+ elif self.mode == "OCEAN":
510
+ write_ocean_fvc(ds, **fvc_args)
511
+ else:
512
+ self.ds = ds
513
+
514
+ if self.verbose:
515
+ print("Merging finished")
516
+ if skipped_list:
517
+ print(
518
+ "The following raw files were skipped (file open / corruption failure)"
519
+ )
520
+ print(" \n".join([f.name for f in skipped_list]))