tfv-get-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. tfv_get_tools/__init__.py +4 -0
  2. tfv_get_tools/_standard_attrs.py +107 -0
  3. tfv_get_tools/atmos.py +167 -0
  4. tfv_get_tools/cli/_cli_base.py +173 -0
  5. tfv_get_tools/cli/atmos_cli.py +192 -0
  6. tfv_get_tools/cli/ocean_cli.py +204 -0
  7. tfv_get_tools/cli/tide_cli.py +118 -0
  8. tfv_get_tools/cli/wave_cli.py +183 -0
  9. tfv_get_tools/fvc/__init__.py +3 -0
  10. tfv_get_tools/fvc/_atmos.py +230 -0
  11. tfv_get_tools/fvc/_fvc.py +218 -0
  12. tfv_get_tools/fvc/_ocean.py +171 -0
  13. tfv_get_tools/fvc/_tide.py +195 -0
  14. tfv_get_tools/ocean.py +170 -0
  15. tfv_get_tools/providers/__init__.py +0 -0
  16. tfv_get_tools/providers/_custom_conversions.py +34 -0
  17. tfv_get_tools/providers/_downloader.py +566 -0
  18. tfv_get_tools/providers/_merger.py +520 -0
  19. tfv_get_tools/providers/_utilities.py +255 -0
  20. tfv_get_tools/providers/atmos/barra2.py +209 -0
  21. tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
  22. tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
  23. tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
  24. tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
  25. tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
  26. tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
  27. tfv_get_tools/providers/atmos/cfsr.py +207 -0
  28. tfv_get_tools/providers/atmos/era5.py +20 -0
  29. tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
  30. tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
  31. tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
  32. tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
  33. tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
  34. tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
  35. tfv_get_tools/providers/ocean/hycom.py +611 -0
  36. tfv_get_tools/providers/wave/cawcr.py +166 -0
  37. tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
  38. tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
  39. tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
  40. tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
  41. tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
  42. tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
  43. tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
  44. tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
  45. tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
  46. tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
  47. tfv_get_tools/providers/wave/era5.py +232 -0
  48. tfv_get_tools/providers/wave/era5_gcp.py +169 -0
  49. tfv_get_tools/tide/__init__.py +2 -0
  50. tfv_get_tools/tide/_nodestring.py +214 -0
  51. tfv_get_tools/tide/_tidal_base.py +568 -0
  52. tfv_get_tools/utilities/_tfv_bc.py +78 -0
  53. tfv_get_tools/utilities/horizontal_padding.py +89 -0
  54. tfv_get_tools/utilities/land_masking.py +93 -0
  55. tfv_get_tools/utilities/parsers.py +44 -0
  56. tfv_get_tools/utilities/warnings.py +38 -0
  57. tfv_get_tools/wave.py +179 -0
  58. tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
  59. tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
  60. tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
  61. tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
  62. tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,230 @@
1
+ from pathlib import Path
2
+ from typing import Union, List, Optional, Dict
3
+
4
+ import xarray as xr
5
+
6
+ from tfv_get_tools.fvc._fvc import FVCWriter
7
+
8
+
9
+ class AtmosFVCWriter(FVCWriter):
10
+ """Writer for ATMOS FVC files."""
11
+
12
+ # Default mappings of dataset variables to TUFLOW-FV variables
13
+ DEFAULT_VAR_MAPPINGS = {
14
+ "t2m": {
15
+ "tfv_var": "AIR_TEMP_GRID",
16
+ "bc_scale": 1.0,
17
+ "bc_offset": -273.15, # K to C
18
+ },
19
+ "relhum": {"tfv_var": "REL_HUM_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
20
+ "u10": {
21
+ "tfv_var": "W10_GRID", # Special case - paired with vwnd10m
22
+ "bc_scale": 1.0,
23
+ "bc_offset": 0.0,
24
+ },
25
+ "v10": {
26
+ "tfv_var": "W10_GRID", # Special case - paired with uwnd10m
27
+ "bc_scale": 1.0,
28
+ "bc_offset": 0.0,
29
+ },
30
+ "mslp": {
31
+ "tfv_var": "MSLP_GRID", # Special case - paired with uwnd10m
32
+ "bc_scale": 0.01,
33
+ "bc_offset": 0.0,
34
+ },
35
+ "prate": {"tfv_var": "PRECIP_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
36
+ "dlwrf": {"tfv_var": "LW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
37
+ "dswrf": {"tfv_var": "SW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
38
+ }
39
+
40
+ def __init__(self, var_mappings: Optional[Dict] = None, source=None, model=None, info_url=None):
41
+ """Initialize atmospheric FVC writer.
42
+
43
+ Args:
44
+ var_mappings: Optional dictionary overriding default variable mappings
45
+ """
46
+ super().__init__(requires_coordinates=True, source=source, model=model, info_url=info_url)
47
+ self.var_mappings = var_mappings or self.DEFAULT_VAR_MAPPINGS
48
+ self.available_vars = set()
49
+
50
+ def detect_variables(self, ds: xr.Dataset) -> None:
51
+ """Detect available variables in the dataset.
52
+
53
+ Args:
54
+ ds: Input xarray Dataset
55
+ """
56
+ self.available_vars = set(ds.data_vars) & set(self.var_mappings.keys())
57
+
58
+ # Verify wind components come in pairs
59
+ if ("uwnd10m" in self.available_vars) != ("vwnd10m" in self.available_vars):
60
+ raise ValueError(
61
+ "Both uwnd10m and vwnd10m must be present for wind configuration"
62
+ )
63
+
64
+ def write_grid_definition(self, lines: List[str], nc_path: str) -> None:
65
+ """Write grid definition block."""
66
+ xvar, yvar = self.coordinate_vars
67
+ lines.extend(
68
+ [
69
+ f"Grid Definition File == {nc_path}",
70
+ f" Grid Definition Variables == {xvar}, {yvar}",
71
+ " Grid Definition Label == atmos",
72
+ "End Grid",
73
+ "",
74
+ ]
75
+ )
76
+
77
+ def write_boundary_conditions(self, lines: List[str], nc_path: str) -> None:
78
+ """Write boundary conditions block."""
79
+ wind_written = False
80
+
81
+ for var_name in sorted(self.available_vars):
82
+ config = self.var_mappings[var_name]
83
+ tfv_var = config["tfv_var"]
84
+ bc_scale = config["bc_scale"]
85
+ bc_offset = config["bc_offset"]
86
+
87
+ # Handle wind components specially
88
+ if tfv_var == "W10_GRID":
89
+ if wind_written:
90
+ continue
91
+ wind_written = True
92
+ lines.extend(
93
+ [
94
+ f"BC == {tfv_var}, atmos, {nc_path}",
95
+ f" BC Header == {self.time_var}, u10, v10",
96
+ ]
97
+ )
98
+ if bc_scale != 1.0:
99
+ lines.append(f" BC Scale == {bc_scale}, {bc_scale}")
100
+ if bc_offset != 0.0:
101
+ lines.append(f" BC Offset == {bc_offset}, {bc_offset}")
102
+ else:
103
+ lines.extend(
104
+ [
105
+ f"BC == {tfv_var}, atmos, {nc_path}",
106
+ f" BC Header == {self.time_var}, {var_name}",
107
+ ]
108
+ )
109
+ if bc_scale != 1.0:
110
+ lines.append(f" BC Scale == {bc_scale}")
111
+ if bc_offset != 0.0:
112
+ lines.append(f" BC Offset == {bc_offset}")
113
+
114
+ # Common BC settings
115
+ lines.extend(
116
+ [
117
+ " BC Update dt == 3600.",
118
+ " BC Time Units == hours",
119
+ " BC Reference Time == 01/01/1990 00:00",
120
+ " BC Default == NaN",
121
+ "End BC",
122
+ "",
123
+ ]
124
+ )
125
+
126
+ def generate(
127
+ self,
128
+ ds: xr.Dataset,
129
+ nc_path: str = "atmos_forcing.nc",
130
+ ) -> List[str]:
131
+ """Generate FVC configuration content for atmospheric forcing.
132
+
133
+ Args:
134
+ ds: Input xarray Dataset containing atmospheric variables
135
+ nc_path: Path or filename for the NetCDF file (referenced in FVC content)
136
+
137
+ Returns:
138
+ List[str]: FVC configuration content as a list of strings
139
+ """
140
+ # Process dataset to detect coordinates, time settings, and variables
141
+ self.process_dataset(ds)
142
+ self.detect_variables(ds)
143
+
144
+ if not self.available_vars:
145
+ raise ValueError("No supported atmospheric variables found in dataset")
146
+
147
+ lines = []
148
+
149
+ # Write header
150
+ self.write_header(lines, "Atmospheric Dataset", ds)
151
+
152
+ # Write grid definition
153
+ self.write_grid_definition(lines, nc_path)
154
+
155
+ # Write boundary conditions
156
+ self.write_boundary_conditions(lines, nc_path)
157
+
158
+ return lines
159
+
160
+ def write_file(
161
+ self,
162
+ lines: List[str],
163
+ output_path: Union[str, Path],
164
+ filename: Optional[str] = None,
165
+ ) -> None:
166
+ """Write FVC content to a file."""
167
+ output_path = Path(output_path)
168
+ filename = filename or "atmos_forcing.fvc"
169
+ if not filename.endswith(".fvc"):
170
+ filename = filename.replace(".nc", ".fvc")
171
+
172
+ with open(output_path / filename, "w") as f:
173
+ for line in lines:
174
+ f.write(line + "\n")
175
+
176
+
177
+ def write_atmos_fvc(
178
+ ds: xr.Dataset,
179
+ nc_path: str = "atmos_forcing.nc",
180
+ output_path: Optional[Union[str, Path]] = None,
181
+ filename: Optional[str] = None,
182
+ var_mappings: Optional[Dict] = None,
183
+ source: Optional[str] = None,
184
+ model: Optional[str] = None,
185
+ info_url: Optional[str] = None,
186
+ ) -> List[str]:
187
+ """Generate (and optionally write) TUFLOW-FV atmospheric forcing configuration.
188
+
189
+ Args:
190
+ ds: Input xarray Dataset containing atmospheric variables
191
+ nc_path: Path or filename for the NetCDF file (referenced in FVC content)
192
+ output_path: Optional path to write FVC file (if None, no file is written)
193
+ filename: Optional filename for FVC file (if None, derives from nc_path)
194
+ var_mappings: Optional dictionary to override default variable mappings
195
+ source: Optional source string for the FVC header
196
+ model: Optional model string for the FVC header
197
+ info_url: Optional URL for source information to be printed in FVC
198
+
199
+ Returns:
200
+ List[str]: FVC configuration content as a list of strings
201
+
202
+ Examples:
203
+ # Just generate FVC content with auto-detected variables
204
+ >>> lines = write_atmos_fvc(dataset, nc_path="forcing.nc")
205
+
206
+ # Generate and write to file
207
+ >>> lines = write_atmos_fvc(
208
+ ... dataset,
209
+ ... nc_path="forcing.nc",
210
+ ... output_path="path/to/output"
211
+ ... )
212
+
213
+ # Generate with custom variable mappings
214
+ >>> custom_mappings = {
215
+ ... "temperature": {"tfv_var": "T2_GRID", "bc_scale": 1.0, "bc_offset": -273.15}
216
+ ... }
217
+ >>> lines = write_atmos_fvc(
218
+ ... dataset,
219
+ ... nc_path="forcing.nc",
220
+ ... var_mappings=custom_mappings
221
+ ... )
222
+ """
223
+ writer = AtmosFVCWriter(var_mappings=var_mappings, source=source, model=model, info_url=info_url)
224
+
225
+ lines = writer.generate(ds, nc_path)
226
+
227
+ if output_path is not None:
228
+ writer.write_file(lines, output_path, filename)
229
+
230
+ return lines
@@ -0,0 +1,218 @@
1
+ from datetime import datetime
2
+ from typing import Callable, List, Optional, Tuple, Union
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import xarray as xr
7
+
8
+
9
+ class FVCWriter:
10
+ """Base class for writing FVC files."""
11
+
12
+ def __init__(
13
+ self, requires_coordinates: bool = True, source=None, model=None, info_url=None
14
+ ):
15
+ """Initialize FVC writer.
16
+
17
+ Args:
18
+ requires_coordinates: Whether this FVC type requires coordinate information
19
+ """
20
+ self.requires_coordinates = requires_coordinates
21
+ self.coordinate_vars: Optional[Tuple[str, str]] = ("longitude", "latitude")
22
+ self.time_var: str = "time"
23
+ self.tzlbl: Optional[str] = None
24
+ self.epsg: Optional[str] = None
25
+ self.crs_name: Optional[str] = None
26
+
27
+ # For metadata
28
+ self.source = source
29
+ self.model = model
30
+ self.info_url = info_url
31
+
32
+ def detect_coordinates(self, ds: xr.Dataset) -> Optional[Tuple[str, str]]:
33
+ """Detect coordinate system from dataset if present.
34
+
35
+ Args:
36
+ ds: Input xarray Dataset
37
+
38
+ Returns:
39
+ Optional[Tuple[str, str]]: x and y coordinate variable names if found
40
+ """
41
+ if not self.requires_coordinates:
42
+ return None
43
+
44
+ if "x" in ds and "y" in ds:
45
+ # Get CRS information from reprojected coordinates
46
+ try:
47
+ self.epsg = ds["x"].attrs.get("epsg")
48
+ self.crs_name = ds["x"].attrs.get("name")
49
+ except (KeyError, AttributeError):
50
+ pass
51
+ return "x", "y"
52
+ elif "longitude" in ds and "latitude" in ds:
53
+ # Get CRS information from geographic coordinates if available
54
+ try:
55
+ self.epsg = ds["longitude"].attrs.get("epsg")
56
+ self.crs_name = ds["longitude"].attrs.get("name")
57
+ except (KeyError, AttributeError):
58
+ pass
59
+ return "longitude", "latitude"
60
+ elif self.requires_coordinates:
61
+ raise ValueError(
62
+ "Dataset requires coordinate variables (x,y or longitude,latitude) but none were found"
63
+ )
64
+ return None
65
+
66
+ def detect_time_settings(self, ds: xr.Dataset) -> str:
67
+ """Detect time variable and settings from dataset.
68
+
69
+ Args:
70
+ ds: Input xarray Dataset
71
+
72
+ Returns:
73
+ str: Name of time variable to use
74
+ """
75
+ if "local_time" in ds:
76
+ try:
77
+ self.tzlbl = ds["local_time"].attrs.get("tz")
78
+ except (KeyError, AttributeError):
79
+ pass
80
+ return "local_time"
81
+ return "time"
82
+
83
+ def process_dataset(self, ds: xr.Dataset) -> None:
84
+ """Process dataset to detect coordinate system and time settings.
85
+
86
+ Args:
87
+ ds: Input xarray Dataset
88
+ """
89
+ self.coordinate_vars = self.detect_coordinates(ds)
90
+ self.time_var = self.detect_time_settings(ds)
91
+
92
+ def get_coordinate_info(self, ds: xr.Dataset) -> Optional[dict]:
93
+ """Extract coordinate information from dataset if available.
94
+
95
+ Args:
96
+ ds: Input xarray Dataset
97
+
98
+ Returns:
99
+ Optional[dict]: Dictionary containing coordinate variable names and limits if coordinates exist
100
+ """
101
+ if not self.coordinate_vars:
102
+ return None
103
+
104
+ xvar, yvar = self.coordinate_vars
105
+ return {
106
+ "xvar": xvar,
107
+ "yvar": yvar,
108
+ "xlims": format_limits(ds[xvar].values),
109
+ "ylims": format_limits(ds[yvar].values),
110
+ }
111
+
112
+ def write_header(
113
+ self, f: Union[List[str], any], title: str, ds: xr.Dataset
114
+ ) -> None:
115
+ """Write standard FVC file header.
116
+
117
+ Args:
118
+ f: File handle or list to write to
119
+ title: Title string for the header
120
+ ds: xarray Dataset for time/coordinate info
121
+ """
122
+ # Process dataset if not already done
123
+ self.process_dataset(ds)
124
+
125
+ lines = []
126
+ lines.append(f"! TUFLOW FV FVC File for {title}")
127
+ lines.append("! Written by TUFLOW FV `tfv-get-tools`")
128
+ lines.append("")
129
+ # These lines are too long, need to split over several lines
130
+ lines.append("! This control file has been prepared using the TUFLOW FV Get Tools (tfv-get-tools),")
131
+ lines.append("! a free set of Python tools designed to assist with the download and formatting of")
132
+ lines.append("! boundary condition data from global model sources such as ERA5 and CFSR for use in TUFLOW FV.")
133
+ lines.append("! These external model datasets are subject to change over time and are provided 'as is'.")
134
+ lines.append("! Users are responsible for reviewing and, where possible, verifying these inputs against")
135
+ lines.append("! observational data before use in any modelling application.")
136
+ lines.append("")
137
+
138
+ # Now bang in some standard source info and a sicko disclaimer
139
+ lines.append(f"! Source: {self.source if self.source else 'Unknown'}")
140
+ if self.model:
141
+ if self.model != "default":
142
+ lines.append(f"! Model: {self.model}")
143
+ if self.info_url:
144
+ lines.append(f"! Info: {self.info_url}")
145
+ lines.append("")
146
+
147
+ # Add timezone information if using local time
148
+ time_var = "local_time" if "local_time" in ds else "time"
149
+ if time_var == "local_time":
150
+ lines.append(f"! NetCDF time datum: {ds[time_var].attrs['tz']}")
151
+ else:
152
+ lines.append("! NetCDF time datum: UTC")
153
+
154
+ # Add time information
155
+ lines.append(f"! NetCDF start time: {ds_time_to_str(ds, 0)}")
156
+ lines.append(f"! NetCDF end time: {ds_time_to_str(ds, -1)}")
157
+ lines.append("")
158
+
159
+ # Add coordinate system information if available
160
+ if self.requires_coordinates and (self.epsg or self.crs_name):
161
+ if self.epsg:
162
+ lines.append(f"! Coordinate system EPSG: {self.epsg}")
163
+ if self.crs_name:
164
+ lines.append(f"! Coordinate system name: {self.crs_name}")
165
+
166
+ # Add coordinate limits if required and available
167
+ if self.requires_coordinates:
168
+ coords = self.get_coordinate_info(ds)
169
+ if coords:
170
+ lines.append(f"! NetCDF x-limits: {coords['xlims']}")
171
+ lines.append(f"! NetCDF y-limits: {coords['ylims']}")
172
+
173
+ lines.append("")
174
+
175
+ if isinstance(f, list):
176
+ f.extend(lines)
177
+ else:
178
+ for line in lines:
179
+ f.write(line + "\n")
180
+
181
+
182
+ def format_limits(values: np.ndarray, funcs: List[Callable] = [np.min, np.max]) -> str:
183
+ """Format array limits into a string using specified functions."""
184
+ return ", ".join([f"{fn(values):0.4f}" for fn in funcs])
185
+
186
+
187
+ def format_timestamp(
188
+ timestamp: Union[str, datetime, np.datetime64, pd.Timestamp],
189
+ fmt: str = "%Y-%m-%d %H:%M",
190
+ ) -> str:
191
+ """Format various timestamp types to a consistent string format.
192
+
193
+ Args:
194
+ timestamp: Input timestamp as string, datetime, numpy.datetime64, or pandas.Timestamp
195
+ fmt: Output format string (default: "%Y-%m-%d %H:%M")
196
+
197
+ Returns:
198
+ str: Formatted timestamp string
199
+
200
+ Examples:
201
+ >>> format_timestamp("2024-01-01")
202
+ '2024-01-01 00:00'
203
+ >>> format_timestamp(np.datetime64("2024-01-01"))
204
+ '2024-01-01 00:00'
205
+ >>> format_timestamp(pd.Timestamp("2024-01-01"))
206
+ '2024-01-01 00:00'
207
+ """
208
+ # Convert to pandas Timestamp
209
+ if isinstance(timestamp, (str, np.datetime64, datetime)):
210
+ timestamp = pd.Timestamp(timestamp)
211
+
212
+ return timestamp.strftime(fmt)
213
+
214
+
215
+ def ds_time_to_str(ds: xr.Dataset, i: int, fmt: str = "%Y-%m-%d %H:%M") -> str:
216
+ """Convert a time from an xarray Dataset to a formatted string."""
217
+ time_var = "local_time" if "local_time" in ds else "time"
218
+ return format_timestamp(ds[time_var].values[i], fmt)
@@ -0,0 +1,171 @@
1
+ from typing import Union, List, Optional
2
+ from pathlib import Path
3
+
4
+ import xarray as xr
5
+
6
+ from tfv_get_tools.fvc._fvc import FVCWriter
7
+
8
+
9
+ class OceanFVCWriter(FVCWriter):
10
+ """Writer for OGCM FVC include files."""
11
+
12
+ DEFAULT_VAR_ORDER = ["surf_el", "water_u", "water_v", "salinity", "water_temp"]
13
+
14
+ def __init__(self, var_order: Optional[List[str]] = None, source=None, model=None, info_url=None):
15
+ """Initialize ocean FVC writer.
16
+
17
+ Args:
18
+ var_order: Optional list specifying order of variables in the boundary condition
19
+ """
20
+ super().__init__(requires_coordinates=True, source=source, model=model, info_url=info_url)
21
+ self.var_order = var_order or self.DEFAULT_VAR_ORDER
22
+
23
+ def write_grid_definition(self, lines: List[str], nc_path: str):
24
+ """Write grid definition block.
25
+
26
+ Args:
27
+ lines: List to append lines to
28
+ nc_path: Path to the NetCDF file
29
+ """
30
+ xvar, yvar = self.coordinate_vars
31
+ lines.extend(
32
+ [
33
+ f"Grid Definition File == {nc_path}",
34
+ f" Grid Definition Variables == {xvar}, {yvar}, depth",
35
+ " Grid Definition Label == ocean",
36
+ " Boundary Gridmap == 1",
37
+ "End Grid",
38
+ "",
39
+ ]
40
+ )
41
+
42
+ def write_boundary_conditions(self, lines: List[str], nc_path: str):
43
+ """Write boundary conditions block.
44
+
45
+ Args:
46
+ lines: List to append lines to
47
+ nc_path: Path to the NetCDF file
48
+ """
49
+ vlist = ",".join(self.var_order)
50
+ lines.extend(
51
+ [
52
+ f"BC == OBC_GRID, ocean, {nc_path}",
53
+ f" BC Nodestrings == # ! Please supply open boundary ns list",
54
+ " Sub-type == 6",
55
+ f" BC Header == {self.time_var},{vlist}",
56
+ " BC Update dt == 900.",
57
+ " BC Time Units == hours",
58
+ " BC Reference Time == 01/01/1990 00:00",
59
+ " BC Offset == -0.0, 0.0, 0.0, 0.0, 0.0 ! Check Offset -0.0",
60
+ " BC Default == NaN",
61
+ " Vertical Coordinate Type == depth",
62
+ "End BC",
63
+ "",
64
+ ]
65
+ )
66
+
67
+ def generate(
68
+ self,
69
+ ds: xr.Dataset,
70
+ nc_path: str = "ocean_forcing.nc",
71
+ ) -> List[str]:
72
+ """Generate FVC configuration content for ocean forcing.
73
+
74
+ Args:
75
+ ds: Input xarray Dataset containing ocean variables
76
+ nc_path: Path or filename for the NetCDF file (referenced in FVC content)
77
+
78
+ Returns:
79
+ List[str]: FVC configuration content as a list of strings
80
+ """
81
+ # Process dataset to detect coordinates and time settings
82
+ self.process_dataset(ds)
83
+
84
+ lines = []
85
+
86
+ # Write header
87
+ self.write_header(lines, "Ocean Dataset", ds)
88
+
89
+ # Write grid definition
90
+ self.write_grid_definition(lines, nc_path)
91
+
92
+ # Write boundary conditions
93
+ self.write_boundary_conditions(lines, nc_path)
94
+
95
+ return lines
96
+
97
+ def write_file(
98
+ self,
99
+ lines: List[str],
100
+ output_path: Union[str, Path],
101
+ filename: Optional[str] = None,
102
+ ) -> None:
103
+ """Write FVC content to a file.
104
+
105
+ Args:
106
+ lines: FVC configuration content
107
+ output_path: Directory to write the file to
108
+ filename: Optional filename (if None, will use 'ocean_forcing.fvc')
109
+ """
110
+ output_path = Path(output_path)
111
+ filename = filename or "ocean_forcing.fvc"
112
+ if not filename.endswith(".fvc"):
113
+ filename = filename.replace(".nc", ".fvc")
114
+
115
+ output_path.mkdir(parents=True, exist_ok=True)
116
+ with open(output_path / filename, "w") as f:
117
+ for line in lines:
118
+ f.write(line + "\n")
119
+
120
+
121
+ def write_ocean_fvc(
122
+ ds: xr.Dataset,
123
+ nc_path: str = "ocean_forcing.nc",
124
+ output_path: Optional[Union[str, Path]] = None,
125
+ filename: Optional[str] = None,
126
+ var_order: Optional[List[str]] = None,
127
+ source: Optional[str] = None,
128
+ model: Optional[str] = None,
129
+ info_url: Optional[str] = None,
130
+ ) -> List[str]:
131
+ """Generate (and optionally write) TUFLOW-FV ocean forcing configuration.
132
+
133
+ Args:
134
+ ds: Input xarray Dataset containing ocean variables
135
+ nc_path: Path or filename for the NetCDF file (referenced in FVC content)
136
+ output_path: Optional path to write FVC file (if None, no file is written)
137
+ filename: Optional filename for FVC file (if None, derives from nc_path)
138
+ var_order: Optional list specifying order of variables
139
+ source: Optional source string for the FVC header
140
+ model: Optional model string for the FVC header
141
+ info_url: Optional URL for source information to be printed in FVC
142
+
143
+ Returns:
144
+ List[str]: FVC configuration content as a list of strings
145
+
146
+ Examples:
147
+ # Just generate FVC content
148
+ >>> lines = write_ocean_fvc(dataset, nc_path="forcing.nc")
149
+
150
+ # Generate and write to file
151
+ >>> lines = write_ocean_fvc(
152
+ ... dataset,
153
+ ... nc_path="forcing.nc",
154
+ ... output_path="path/to/output"
155
+ ... )
156
+
157
+ # Generate with custom variable order
158
+ >>> lines = write_ocean_fvc(
159
+ ... dataset,
160
+ ... nc_path="forcing.nc",
161
+ ... var_order=["water_temp", "salinity", "water_u", "water_v", "surf_el"]
162
+ ... )
163
+ """
164
+ writer = OceanFVCWriter(var_order=var_order, source=source, model=model, info_url=info_url)
165
+
166
+ lines = writer.generate(ds, nc_path)
167
+
168
+ if output_path is not None:
169
+ writer.write_file(lines, output_path, filename)
170
+
171
+ return lines