tfv-get-tools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tfv_get_tools/__init__.py +4 -0
- tfv_get_tools/_standard_attrs.py +107 -0
- tfv_get_tools/atmos.py +167 -0
- tfv_get_tools/cli/_cli_base.py +173 -0
- tfv_get_tools/cli/atmos_cli.py +192 -0
- tfv_get_tools/cli/ocean_cli.py +204 -0
- tfv_get_tools/cli/tide_cli.py +118 -0
- tfv_get_tools/cli/wave_cli.py +183 -0
- tfv_get_tools/fvc/__init__.py +3 -0
- tfv_get_tools/fvc/_atmos.py +230 -0
- tfv_get_tools/fvc/_fvc.py +218 -0
- tfv_get_tools/fvc/_ocean.py +171 -0
- tfv_get_tools/fvc/_tide.py +195 -0
- tfv_get_tools/ocean.py +170 -0
- tfv_get_tools/providers/__init__.py +0 -0
- tfv_get_tools/providers/_custom_conversions.py +34 -0
- tfv_get_tools/providers/_downloader.py +566 -0
- tfv_get_tools/providers/_merger.py +520 -0
- tfv_get_tools/providers/_utilities.py +255 -0
- tfv_get_tools/providers/atmos/barra2.py +209 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
- tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
- tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
- tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
- tfv_get_tools/providers/atmos/cfsr.py +207 -0
- tfv_get_tools/providers/atmos/era5.py +20 -0
- tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
- tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
- tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
- tfv_get_tools/providers/ocean/hycom.py +611 -0
- tfv_get_tools/providers/wave/cawcr.py +166 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
- tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
- tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
- tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
- tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
- tfv_get_tools/providers/wave/era5.py +232 -0
- tfv_get_tools/providers/wave/era5_gcp.py +169 -0
- tfv_get_tools/tide/__init__.py +2 -0
- tfv_get_tools/tide/_nodestring.py +214 -0
- tfv_get_tools/tide/_tidal_base.py +568 -0
- tfv_get_tools/utilities/_tfv_bc.py +78 -0
- tfv_get_tools/utilities/horizontal_padding.py +89 -0
- tfv_get_tools/utilities/land_masking.py +93 -0
- tfv_get_tools/utilities/parsers.py +44 -0
- tfv_get_tools/utilities/warnings.py +38 -0
- tfv_get_tools/wave.py +179 -0
- tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
- tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
- tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
- tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
- tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,568 @@
|
|
|
1
|
+
"""
|
|
2
|
+
GetTide. Only supports FES2014 and FES2022.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
import pickle
|
|
8
|
+
import os
|
|
9
|
+
from typing import Union, Tuple, Optional
|
|
10
|
+
|
|
11
|
+
import numpy as np
|
|
12
|
+
import pandas as pd
|
|
13
|
+
import geopandas as gpd
|
|
14
|
+
import xarray as xr
|
|
15
|
+
from tqdm.auto import tqdm
|
|
16
|
+
from shapely import LineString
|
|
17
|
+
from pyproj import Geod, CRS
|
|
18
|
+
|
|
19
|
+
# Import the tidal prediction, pyTMD
|
|
20
|
+
import pyTMD
|
|
21
|
+
from pyTMD.io.FES import extract_constants as extract_FES_constants
|
|
22
|
+
from pyTMD.predict import time_series as predict_tidal_ts
|
|
23
|
+
from tfv_get_tools.utilities.parsers import _parse_date, _parse_path
|
|
24
|
+
from tfv_get_tools.utilities.warnings import deprecated
|
|
25
|
+
from tfv_get_tools.tide._nodestring import (
|
|
26
|
+
load_nodestring_shapefile,
|
|
27
|
+
process_nodestring_gdf,
|
|
28
|
+
)
|
|
29
|
+
from tfv_get_tools.fvc import write_tide_fvc
|
|
30
|
+
|
|
31
|
+
crs = CRS.from_epsg(4326)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class TidalExtractor:
|
|
35
|
+
"""Wrapper for PyTMD operations to enable testing."""
|
|
36
|
+
|
|
37
|
+
def extract_fes_constants(self, coords, files, source, interpolate_method):
|
|
38
|
+
"""Extract FES constants - wrapped for testing."""
|
|
39
|
+
return extract_FES_constants(
|
|
40
|
+
coords[:, 0],
|
|
41
|
+
coords[:, 1],
|
|
42
|
+
files,
|
|
43
|
+
TYPE="z",
|
|
44
|
+
VERSION=source,
|
|
45
|
+
METHOD=interpolate_method,
|
|
46
|
+
GZIP=False,
|
|
47
|
+
SCALE=1.0 / 100.0,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
def predict_tidal_timeseries(self, tvec, hc, cons):
|
|
51
|
+
"""Predict tidal timeseries - wrapped for testing."""
|
|
52
|
+
return predict_tidal_ts(tvec, hc, cons, corrections="FES")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
# Default extractor instance
|
|
56
|
+
_default_extractor = TidalExtractor()
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _detect_tide_model_source(model_dir: Path):
|
|
60
|
+
"""Detect tidal model source based on model_dir."""
|
|
61
|
+
original_model_dir = model_dir
|
|
62
|
+
name = model_dir.name
|
|
63
|
+
if name == "ocean_tide":
|
|
64
|
+
model_dir = (model_dir / "..").resolve()
|
|
65
|
+
name = model_dir.name
|
|
66
|
+
|
|
67
|
+
if "fes2014" in name.lower():
|
|
68
|
+
source = "FES2014" # Return uppercase to match VALID_SOURCES
|
|
69
|
+
elif "fes2022" in name.lower():
|
|
70
|
+
source = "FES2022" # Return uppercase to match VALID_SOURCES
|
|
71
|
+
else:
|
|
72
|
+
source = None
|
|
73
|
+
|
|
74
|
+
# Return the resolved model_dir only if we actually resolved it
|
|
75
|
+
if original_model_dir.name == "ocean_tide":
|
|
76
|
+
return source, model_dir
|
|
77
|
+
else:
|
|
78
|
+
return source, original_model_dir
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def _get_model_dir(source="FES2014", model_dir: Union[str, Path] = None) -> Path:
|
|
82
|
+
"""Get and validate model directory."""
|
|
83
|
+
VALID_SOURCES = {"FES2014", "FES2022"}
|
|
84
|
+
|
|
85
|
+
if source not in VALID_SOURCES:
|
|
86
|
+
raise ValueError(
|
|
87
|
+
f"Requested source {source} not supported. "
|
|
88
|
+
f"Valid sources: {VALID_SOURCES}"
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
if model_dir is None:
|
|
92
|
+
env = f"{source}_DIR"
|
|
93
|
+
if env not in os.environ:
|
|
94
|
+
raise ValueError(
|
|
95
|
+
f"The {env} root directory needs to be supplied, either as "
|
|
96
|
+
f"`model_dir` variable, or as environment variable '{env}'"
|
|
97
|
+
)
|
|
98
|
+
model_dir = os.environ[env]
|
|
99
|
+
|
|
100
|
+
model_dir = Path(model_dir)
|
|
101
|
+
|
|
102
|
+
if not model_dir.exists():
|
|
103
|
+
raise FileNotFoundError(
|
|
104
|
+
f"{source} model directory ({model_dir.as_posix()}) does not exist"
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
return model_dir
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _get_chainage_array(array: np.ndarray):
|
|
111
|
+
"""Calculate chainage from coordinates."""
|
|
112
|
+
geod = Geod(ellps="WGS84")
|
|
113
|
+
numCoords = array.shape[0] - 1
|
|
114
|
+
geo = LineString(array)
|
|
115
|
+
|
|
116
|
+
stf = 0
|
|
117
|
+
for i in range(0, numCoords):
|
|
118
|
+
point1 = geo.coords[i]
|
|
119
|
+
point2 = geo.coords[i + 1]
|
|
120
|
+
_, _, dist = geod.inv(point1[0], point1[1], point2[0], point2[1])
|
|
121
|
+
stf += dist
|
|
122
|
+
|
|
123
|
+
nx = len(geo.xy[0])
|
|
124
|
+
incr = stf / (nx - 1)
|
|
125
|
+
chainage = [incr * x for x in range(1, nx)]
|
|
126
|
+
chainage.insert(0, 0)
|
|
127
|
+
|
|
128
|
+
return chainage, nx
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _check_coords(coords):
|
|
132
|
+
"""Validate coordinate format."""
|
|
133
|
+
if not isinstance(coords, np.ndarray):
|
|
134
|
+
coords = np.asarray(coords)
|
|
135
|
+
|
|
136
|
+
if len(coords.shape) == 1:
|
|
137
|
+
coords = coords.reshape([1, 2])
|
|
138
|
+
|
|
139
|
+
if coords.shape[1] != 2:
|
|
140
|
+
raise ValueError(
|
|
141
|
+
"Coordinates should be Nx2 format or tuple/list for single location (x, y)"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
return coords
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _normalise_coordinates(coordinates: Union[tuple, np.ndarray, dict]) -> dict:
|
|
148
|
+
"""Convert coordinates to consistent dict format."""
|
|
149
|
+
if isinstance(coordinates, tuple):
|
|
150
|
+
if len(coordinates) != 2:
|
|
151
|
+
raise ValueError("Tuple coordinates must be (lon, lat)")
|
|
152
|
+
return {1: np.asarray(coordinates)[None, :]}
|
|
153
|
+
|
|
154
|
+
elif isinstance(coordinates, np.ndarray):
|
|
155
|
+
if coordinates.shape[1] != 2:
|
|
156
|
+
raise ValueError("Array coordinates must be Nx2")
|
|
157
|
+
return {1: coordinates}
|
|
158
|
+
|
|
159
|
+
elif isinstance(coordinates, dict):
|
|
160
|
+
if len(coordinates) == 0:
|
|
161
|
+
raise ValueError("No coordinates provided")
|
|
162
|
+
return coordinates
|
|
163
|
+
|
|
164
|
+
else:
|
|
165
|
+
raise ValueError("Unsupported coordinate format")
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def get_constituents(
|
|
169
|
+
coordinates: Union[tuple, np.ndarray, dict],
|
|
170
|
+
model_dir: Union[str, Path] = None,
|
|
171
|
+
interpolate_method="spline",
|
|
172
|
+
save_cons: Union[str, Path] = None,
|
|
173
|
+
source="FES2014",
|
|
174
|
+
extractor=None,
|
|
175
|
+
):
|
|
176
|
+
"""Get tidal constituents."""
|
|
177
|
+
if extractor is None:
|
|
178
|
+
extractor = _default_extractor
|
|
179
|
+
|
|
180
|
+
coordinates = _normalise_coordinates(coordinates)
|
|
181
|
+
|
|
182
|
+
cons_dict = dict()
|
|
183
|
+
for bnd_id, coords in coordinates.items():
|
|
184
|
+
coords = _check_coords(coords)
|
|
185
|
+
|
|
186
|
+
if coords.shape[0] > 1:
|
|
187
|
+
chainage, nx = _get_chainage_array(coords)
|
|
188
|
+
else:
|
|
189
|
+
chainage = 0
|
|
190
|
+
nx = 1
|
|
191
|
+
|
|
192
|
+
model_dir = _get_model_dir(source, model_dir)
|
|
193
|
+
srcdir = model_dir / ".."
|
|
194
|
+
|
|
195
|
+
files = [x for x in model_dir.rglob("*.nc")]
|
|
196
|
+
file_cons = [f.stem for f in files]
|
|
197
|
+
files = [files[i] for i in np.argsort(file_cons)]
|
|
198
|
+
|
|
199
|
+
model = pyTMD.io.model(srcdir).elevation(source)
|
|
200
|
+
cons = model.constituents
|
|
201
|
+
|
|
202
|
+
if source in ("FES2014", "FES2022"):
|
|
203
|
+
if len(files) != 34:
|
|
204
|
+
raise ValueError(f"Cannot find 34 .nc files for {source}")
|
|
205
|
+
|
|
206
|
+
print("... extracting constituents from database")
|
|
207
|
+
|
|
208
|
+
amp, ph = extractor.extract_fes_constants(
|
|
209
|
+
coords, files, source, interpolate_method
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
cons_dict[bnd_id] = dict(
|
|
213
|
+
cons=(amp, ph, cons), geo=(coords, chainage, nx), source=source
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
if save_cons:
|
|
217
|
+
with open(save_cons, "wb") as f:
|
|
218
|
+
pickle.dump(cons_dict, f)
|
|
219
|
+
|
|
220
|
+
return cons_dict
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def predict_waterlevel_timeseries(
|
|
224
|
+
time_start: Union[str, pd.Timestamp, datetime],
|
|
225
|
+
time_end: Union[str, pd.Timestamp, datetime],
|
|
226
|
+
freq: Union[str, pd.Timedelta, timedelta] = "15min",
|
|
227
|
+
coords: Union[tuple, np.ndarray, dict] = None,
|
|
228
|
+
source: str = "FES2014",
|
|
229
|
+
model_dir: Union[str, Path] = None,
|
|
230
|
+
interpolate_method: str = "spline",
|
|
231
|
+
constituents: Union[dict, str, Path] = None,
|
|
232
|
+
extractor=None,
|
|
233
|
+
):
|
|
234
|
+
"""Extract tidal waterlevels for coordinates."""
|
|
235
|
+
if extractor is None:
|
|
236
|
+
extractor = _default_extractor
|
|
237
|
+
|
|
238
|
+
timevec = pd.date_range(
|
|
239
|
+
start=pd.Timestamp(time_start),
|
|
240
|
+
end=pd.Timestamp(time_end),
|
|
241
|
+
freq=freq,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
ref_date = pd.Timestamp(1992, 1, 1)
|
|
245
|
+
tvec = (timevec - ref_date).to_numpy().astype(float) / (10**9 * 60 * 60 * 24)
|
|
246
|
+
|
|
247
|
+
# Load constituents if needed
|
|
248
|
+
if constituents is None:
|
|
249
|
+
if coords is None:
|
|
250
|
+
raise ValueError("Either coords or constituents must be provided")
|
|
251
|
+
constituents = get_constituents(
|
|
252
|
+
coords,
|
|
253
|
+
model_dir,
|
|
254
|
+
source=source,
|
|
255
|
+
interpolate_method=interpolate_method,
|
|
256
|
+
extractor=extractor,
|
|
257
|
+
)
|
|
258
|
+
else:
|
|
259
|
+
if not isinstance(constituents, dict):
|
|
260
|
+
constituents = Path(constituents)
|
|
261
|
+
if not constituents.exists():
|
|
262
|
+
raise FileNotFoundError(f"Constituents file not found: {constituents}")
|
|
263
|
+
|
|
264
|
+
with open(constituents, "rb") as f:
|
|
265
|
+
constituents = pickle.load(f)
|
|
266
|
+
|
|
267
|
+
dsset = {}
|
|
268
|
+
for label, dat in constituents.items():
|
|
269
|
+
(amp, ph, cons) = dat["cons"]
|
|
270
|
+
source = dat["source"]
|
|
271
|
+
nx = amp.shape[0]
|
|
272
|
+
|
|
273
|
+
# Calculate complex phase in radians
|
|
274
|
+
cph = -1j * ph * np.pi / 180.0
|
|
275
|
+
hc = amp * np.exp(cph)
|
|
276
|
+
|
|
277
|
+
print("...Expanding timeseries")
|
|
278
|
+
ha = np.ma.zeros((tvec.shape[0], nx))
|
|
279
|
+
for j in tqdm(range(nx)):
|
|
280
|
+
ha[:, j] = extractor.predict_tidal_timeseries(
|
|
281
|
+
tvec, hc[j].reshape(1, len(cons)), cons
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
# Remove invalid values by using nearest valid
|
|
285
|
+
if hasattr(ha, "mask") and ha.mask.any():
|
|
286
|
+
mask = ha.mask[0, :]
|
|
287
|
+
real = np.where(mask == False)[0]
|
|
288
|
+
for idx, cond in enumerate(mask.tolist()):
|
|
289
|
+
if cond:
|
|
290
|
+
nearest_idx = real[np.argmin((real - idx) ** 2)]
|
|
291
|
+
ha[:, idx] = ha[:, nearest_idx]
|
|
292
|
+
|
|
293
|
+
# Create dataset
|
|
294
|
+
if isinstance(dat["geo"][1], (int, float)):
|
|
295
|
+
chain = np.array([dat["geo"][1]])
|
|
296
|
+
squeeze = True
|
|
297
|
+
else:
|
|
298
|
+
chain = dat["geo"][1]
|
|
299
|
+
squeeze = False
|
|
300
|
+
|
|
301
|
+
ds = xr.Dataset(
|
|
302
|
+
coords=dict(time=timevec, chainage=chain),
|
|
303
|
+
data_vars=dict(
|
|
304
|
+
wl=(("time", "chainage"), ha),
|
|
305
|
+
longitude=(("chainage",), dat["geo"][0][:, 0]),
|
|
306
|
+
latitude=(("chainage",), dat["geo"][0][:, 1]),
|
|
307
|
+
),
|
|
308
|
+
)
|
|
309
|
+
ds.attrs["name"] = label
|
|
310
|
+
ds.attrs["source"] = source
|
|
311
|
+
ds["chainage"].attrs = {"long_name": "projected chainage", "units": "m"}
|
|
312
|
+
ds["wl"].attrs = {"long_name": "tidal waterlevel", "units": "m"}
|
|
313
|
+
ds["longitude"].attrs = crs.cs_to_cf()[1]
|
|
314
|
+
ds["latitude"].attrs = crs.cs_to_cf()[0]
|
|
315
|
+
|
|
316
|
+
if squeeze:
|
|
317
|
+
ds = ds.isel(chainage=0)
|
|
318
|
+
|
|
319
|
+
dsset[label] = ds
|
|
320
|
+
|
|
321
|
+
if len(dsset) == 1:
|
|
322
|
+
return list(dsset.values())[0]
|
|
323
|
+
else:
|
|
324
|
+
return dsset
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def ExtractTide(
|
|
328
|
+
time_start: Union[str, pd.Timestamp, datetime],
|
|
329
|
+
time_end: Union[str, pd.Timestamp, datetime],
|
|
330
|
+
fname: Union[str, Path] = None,
|
|
331
|
+
model_dir: Union[str, Path] = None,
|
|
332
|
+
shapefile: Union[str, Path] = None,
|
|
333
|
+
out_path: Path = Path("."),
|
|
334
|
+
process_ids: Union[tuple, list] = None,
|
|
335
|
+
freq: Union[str, pd.Timedelta, timedelta] = "15min",
|
|
336
|
+
spacing: int = 2500,
|
|
337
|
+
attrs=dict(),
|
|
338
|
+
interpolate_method="spline",
|
|
339
|
+
source=None,
|
|
340
|
+
local_tz: Tuple[float, str] = None,
|
|
341
|
+
constituents: Union[str, Path, dict] = None,
|
|
342
|
+
write_netcdf: bool = True,
|
|
343
|
+
write_fvc: bool = True,
|
|
344
|
+
fvc_path: Path = None,
|
|
345
|
+
nc_path_str: str = None,
|
|
346
|
+
extractor=None,
|
|
347
|
+
):
|
|
348
|
+
"""Full workflow for tidal waterlevel extraction."""
|
|
349
|
+
if extractor is None:
|
|
350
|
+
extractor = _default_extractor
|
|
351
|
+
|
|
352
|
+
out_path = _parse_path(out_path)
|
|
353
|
+
time_start = _parse_date(time_start)
|
|
354
|
+
time_end = _parse_date(time_end)
|
|
355
|
+
|
|
356
|
+
# Extract constituents if not provided
|
|
357
|
+
if constituents is None:
|
|
358
|
+
if source is None:
|
|
359
|
+
if model_dir is None:
|
|
360
|
+
raise ValueError("Either source or model_dir must be provided")
|
|
361
|
+
source, model_dir = _detect_tide_model_source(Path(model_dir))
|
|
362
|
+
if source is None:
|
|
363
|
+
raise ValueError(
|
|
364
|
+
"Could not detect tidal model source from model directory"
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
if shapefile is None:
|
|
368
|
+
raise ValueError("Shapefile required when constituents not provided")
|
|
369
|
+
|
|
370
|
+
shapefile = Path(shapefile)
|
|
371
|
+
if not shapefile.exists():
|
|
372
|
+
raise FileNotFoundError(f"Shapefile not found: {shapefile}")
|
|
373
|
+
|
|
374
|
+
gdf = load_nodestring_shapefile(shapefile, process_ids=process_ids)
|
|
375
|
+
ns_dat = process_nodestring_gdf(gdf, spacing=spacing)
|
|
376
|
+
|
|
377
|
+
print("Running GetTide")
|
|
378
|
+
print("--------------------")
|
|
379
|
+
print("Confirming Request:")
|
|
380
|
+
print(f"...Time Start: {time_start.strftime('%Y-%m-%d %H:%M')}")
|
|
381
|
+
print(f"...Time End: {time_end.strftime('%Y-%m-%d %H:%M')}")
|
|
382
|
+
print(f"...Model Dir: {model_dir.absolute().as_posix()}")
|
|
383
|
+
print(f"...Tidal Data Source: {source}")
|
|
384
|
+
print(f"...Nodestring Name: {shapefile.name}")
|
|
385
|
+
print(f"...Nodestring Folder: {shapefile.parent.absolute().as_posix()}")
|
|
386
|
+
print(f"...Nodestring ID's to Process: {list(ns_dat.keys())}")
|
|
387
|
+
print(f"...Nodestring CRS (EPSG): {gdf.crs.to_epsg()}")
|
|
388
|
+
print(f"...Nodestring Spacing: {spacing:0.1f}m")
|
|
389
|
+
|
|
390
|
+
constituents = get_constituents(
|
|
391
|
+
ns_dat,
|
|
392
|
+
model_dir,
|
|
393
|
+
source=source,
|
|
394
|
+
interpolate_method=interpolate_method,
|
|
395
|
+
extractor=extractor,
|
|
396
|
+
)
|
|
397
|
+
else:
|
|
398
|
+
if isinstance(constituents, (str, Path)):
|
|
399
|
+
with open(constituents, "rb") as f:
|
|
400
|
+
constituents = pickle.load(f)
|
|
401
|
+
print(f"...Using pre-extracted constituents file")
|
|
402
|
+
# Get source from constituents
|
|
403
|
+
k = list(constituents.keys())[0]
|
|
404
|
+
source = constituents[k]["source"]
|
|
405
|
+
|
|
406
|
+
if local_tz is None:
|
|
407
|
+
print(f"...Timezone: UTC (GMT+0.0)")
|
|
408
|
+
else:
|
|
409
|
+
print(f"...Timezone: UTC (GMT+0.0) AND")
|
|
410
|
+
print(f"...Local Timezone: {local_tz[0]:0.1f}, {local_tz[1]}")
|
|
411
|
+
|
|
412
|
+
ns_wlev = predict_waterlevel_timeseries(
|
|
413
|
+
time_start, time_end, freq=freq, constituents=constituents, extractor=extractor
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
# Convert to dict if single dataset
|
|
417
|
+
if isinstance(ns_wlev, xr.Dataset):
|
|
418
|
+
ns_wlev = {ns_wlev.attrs["name"]: ns_wlev}
|
|
419
|
+
|
|
420
|
+
# Set filename
|
|
421
|
+
if fname is None:
|
|
422
|
+
tsstr = time_start.strftime("%Y%m%d")
|
|
423
|
+
testr = time_end.strftime("%Y%m%d")
|
|
424
|
+
|
|
425
|
+
fname = f"{source.upper()}_TIDE_{tsstr}_{testr}.nc"
|
|
426
|
+
|
|
427
|
+
if local_tz is not None:
|
|
428
|
+
tzlbl = local_tz[1]
|
|
429
|
+
fname = fname.replace(".", f"_{tzlbl}.")
|
|
430
|
+
|
|
431
|
+
outname = out_path / fname
|
|
432
|
+
|
|
433
|
+
# Write netcdf
|
|
434
|
+
print(f"Writing dataset: {fname}")
|
|
435
|
+
nc = _netcdf_writer(
|
|
436
|
+
constituents,
|
|
437
|
+
ns_wlev,
|
|
438
|
+
outname,
|
|
439
|
+
time_start,
|
|
440
|
+
time_end,
|
|
441
|
+
freq,
|
|
442
|
+
source,
|
|
443
|
+
write_netcdf=write_netcdf,
|
|
444
|
+
local_tz=local_tz,
|
|
445
|
+
attrs=attrs,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
# TODO: This is hardcoded, but we only have FES support so it's ok for now.
|
|
449
|
+
# We must change this if we add other tidal models.
|
|
450
|
+
info_url = "https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes.html"
|
|
451
|
+
|
|
452
|
+
# Write FVC control file
|
|
453
|
+
if write_fvc:
|
|
454
|
+
nc_path_str = outname.as_posix() if not nc_path_str else nc_path_str
|
|
455
|
+
fvc_fname = fname.replace(".nc", ".fvc")
|
|
456
|
+
fvc_path = out_path if not fvc_path else fvc_path
|
|
457
|
+
|
|
458
|
+
write_tide_fvc(
|
|
459
|
+
nc,
|
|
460
|
+
nc_path=nc_path_str,
|
|
461
|
+
output_path=fvc_path,
|
|
462
|
+
filename=fvc_fname,
|
|
463
|
+
source=source,
|
|
464
|
+
info_url=info_url,
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
return nc
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
def _netcdf_writer(
|
|
471
|
+
constituents: dict,
|
|
472
|
+
ns_wlev: dict,
|
|
473
|
+
outname: Path,
|
|
474
|
+
time_start: pd.Timestamp,
|
|
475
|
+
time_end: pd.Timestamp,
|
|
476
|
+
freq: str,
|
|
477
|
+
source: str,
|
|
478
|
+
local_tz=None,
|
|
479
|
+
attrs=dict(),
|
|
480
|
+
write_netcdf=True,
|
|
481
|
+
):
|
|
482
|
+
"""Write netcdf file."""
|
|
483
|
+
encoding = dict()
|
|
484
|
+
|
|
485
|
+
timevec = pd.date_range(
|
|
486
|
+
start=pd.Timestamp(time_start),
|
|
487
|
+
end=pd.Timestamp(time_end),
|
|
488
|
+
freq=freq,
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
ns = list(ns_wlev.keys())
|
|
492
|
+
|
|
493
|
+
nc = xr.Dataset(
|
|
494
|
+
coords=dict(time=timevec), attrs=attrs.copy() # Copy to avoid mutable default
|
|
495
|
+
)
|
|
496
|
+
nc.attrs["source"] = source
|
|
497
|
+
nc["time"].attrs["tz"] = "UTC"
|
|
498
|
+
encoding["time"] = dict(dtype=np.float64, units="days since 1990-01-01 00:00:00")
|
|
499
|
+
|
|
500
|
+
# Add local time if requested
|
|
501
|
+
if local_tz is not None:
|
|
502
|
+
tz_offset = local_tz[0]
|
|
503
|
+
tz_name = local_tz[1]
|
|
504
|
+
if tz_name is None:
|
|
505
|
+
tz_name = f"GMT{tz_offset:+}"
|
|
506
|
+
|
|
507
|
+
nc["local_time"] = (("time",), timevec + pd.Timedelta(tz_offset, unit="h"))
|
|
508
|
+
nc["local_time"].attrs = dict(tz=tz_name)
|
|
509
|
+
encoding["local_time"] = dict(
|
|
510
|
+
dtype=np.float64, units="days since 1990-01-01 00:00:00"
|
|
511
|
+
)
|
|
512
|
+
|
|
513
|
+
for bnd_id in ns_wlev.keys():
|
|
514
|
+
coords, chainage, nx = constituents[bnd_id]["geo"]
|
|
515
|
+
wl_data = ns_wlev[bnd_id]["wl"]
|
|
516
|
+
|
|
517
|
+
dimstr = f"ns{bnd_id}_chain"
|
|
518
|
+
chnstr = f"ns{bnd_id}_chainage"
|
|
519
|
+
varstr = f"ns{bnd_id}_wl"
|
|
520
|
+
|
|
521
|
+
# Ensure chainage is always an array, even for single points
|
|
522
|
+
chainage_array = np.asarray(chainage).astype(np.float32)
|
|
523
|
+
if chainage_array.ndim == 0: # scalar case
|
|
524
|
+
chainage_array = np.array([chainage_array])
|
|
525
|
+
|
|
526
|
+
nc[chnstr] = ((dimstr,), chainage_array)
|
|
527
|
+
nc[chnstr].attrs["units"] = "m"
|
|
528
|
+
nc[chnstr].attrs["longname"] = f"nodestring {bnd_id} chainage"
|
|
529
|
+
|
|
530
|
+
# Handle water level data dimensions properly
|
|
531
|
+
if wl_data.ndim == 1: # Single point case (squeezed)
|
|
532
|
+
# Reshape to (time, 1) for single chain point
|
|
533
|
+
wl_values = wl_data.values.reshape(-1, 1).astype(np.float32)
|
|
534
|
+
else: # Multiple points case
|
|
535
|
+
wl_values = wl_data.values.astype(np.float32)
|
|
536
|
+
|
|
537
|
+
nc[varstr] = (("time", dimstr), wl_values)
|
|
538
|
+
nc[varstr].attrs["units"] = "m"
|
|
539
|
+
nc[varstr].attrs["long_name"] = f"nodestring {bnd_id} waterlevel"
|
|
540
|
+
|
|
541
|
+
nc[f"ns{bnd_id}_longitude"] = ((dimstr,), coords[:, 0], crs.cs_to_cf()[1])
|
|
542
|
+
nc[f"ns{bnd_id}_latitude"] = ((dimstr,), coords[:, 1], crs.cs_to_cf()[0])
|
|
543
|
+
nc[f"ns{bnd_id}_longitude"].attrs["description"] = "X extraction coordinate"
|
|
544
|
+
nc[f"ns{bnd_id}_latitude"].attrs["description"] = "Y extraction coordinate"
|
|
545
|
+
|
|
546
|
+
encoding[chnstr] = dict(zlib=True, complevel=1, dtype=np.float32)
|
|
547
|
+
encoding[varstr] = dict(zlib=True, complevel=1, dtype=np.float32)
|
|
548
|
+
|
|
549
|
+
if write_netcdf:
|
|
550
|
+
nc.to_netcdf(outname, encoding=encoding)
|
|
551
|
+
|
|
552
|
+
return nc
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
# Keep the deprecated function for backwards compatibility
|
|
556
|
+
@deprecated(ExtractTide)
|
|
557
|
+
def gen_tfv_tide_netcdf(*args, **kwargs):
|
|
558
|
+
"""Deprecated - use ExtractTide instead."""
|
|
559
|
+
# Convert old parameter names to new ones
|
|
560
|
+
if "outname" in kwargs:
|
|
561
|
+
kwargs["fname"] = Path(kwargs.pop("outname")).name
|
|
562
|
+
kwargs["out_path"] = Path(kwargs["fname"]).parent
|
|
563
|
+
if "version" in kwargs:
|
|
564
|
+
kwargs["source"] = kwargs.pop("version")
|
|
565
|
+
if "cons_file" in kwargs:
|
|
566
|
+
kwargs["constituents"] = kwargs.pop("cons_file")
|
|
567
|
+
|
|
568
|
+
return ExtractTide(*args, **kwargs)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import xarray as xr
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def write_tuflowfv_fvc(
|
|
8
|
+
ds: xr.Dataset,
|
|
9
|
+
fname: str,
|
|
10
|
+
out_path: Path,
|
|
11
|
+
reproject=False,
|
|
12
|
+
local_time=False,
|
|
13
|
+
var_order=["surf_el", "water_u", "water_v", "salinity", "water_temp"],
|
|
14
|
+
):
|
|
15
|
+
"""Write a tuflow-fv .fvc file to accompany the merged dataset
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
ds (xr.Dataset): merged barra dataset to supply headers
|
|
19
|
+
fname (str): the filename of the .nc merged dataset
|
|
20
|
+
out_path (Path): output directory
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
# Check to see if ds has been reprojected. Use x/y if so.
|
|
24
|
+
if reproject:
|
|
25
|
+
xvar = "x"
|
|
26
|
+
yvar = "y"
|
|
27
|
+
else:
|
|
28
|
+
xvar = "longitude"
|
|
29
|
+
yvar = "latitude"
|
|
30
|
+
|
|
31
|
+
# Check to see if local_time has been added - use local_time if so.
|
|
32
|
+
if local_time:
|
|
33
|
+
time = "local_time"
|
|
34
|
+
else:
|
|
35
|
+
time = "time"
|
|
36
|
+
|
|
37
|
+
xlims = ", ".join([f"{fn(ds[xvar].values):0.4f}" for fn in [np.min, np.max]])
|
|
38
|
+
ylims = ", ".join([f"{fn(ds[yvar].values):0.4f}" for fn in [np.min, np.max]])
|
|
39
|
+
|
|
40
|
+
vlist = ",".join(var_order)
|
|
41
|
+
|
|
42
|
+
nc_path = (out_path / fname).as_posix()
|
|
43
|
+
fname_fvc = fname.replace(".nc", ".fvc")
|
|
44
|
+
with open(out_path / fname_fvc, "w") as f:
|
|
45
|
+
f.write("! TUFLOW-FV FVC File for Ocean Dataset\n")
|
|
46
|
+
f.write("! Written by `get_ocean`\n")
|
|
47
|
+
f.write("\n")
|
|
48
|
+
f.write(f"! Netcdf start time: {ds_time_to_str(ds, 0)}\n")
|
|
49
|
+
f.write(f"! Netcdf end time: {ds_time_to_str(ds, -1)}\n")
|
|
50
|
+
f.write(f"! Netcdf x-limits: {xlims}\n")
|
|
51
|
+
f.write(f"! Netcdf y-limits: {ylims}\n")
|
|
52
|
+
f.write("\n")
|
|
53
|
+
|
|
54
|
+
# Grid def block
|
|
55
|
+
f.write(f"grid definition file == {nc_path}\n")
|
|
56
|
+
f.write(f" grid definition variables == {xvar}, {yvar}, depth\n")
|
|
57
|
+
f.write(" grid definition label == ocean\n")
|
|
58
|
+
f.write(" boundary gridmap == 1\n")
|
|
59
|
+
f.write("end grid\n")
|
|
60
|
+
f.write("\n")
|
|
61
|
+
|
|
62
|
+
f.write(f"bc == OBC_GRID, ocean, {nc_path}\n")
|
|
63
|
+
f.write(f" bc nodestrings == # ! Please supply open boundary ns list\n")
|
|
64
|
+
f.write(" sub-type == 6\n")
|
|
65
|
+
f.write(f" bc header == {time},{vlist}\n")
|
|
66
|
+
f.write(" bc update dt == 900.\n")
|
|
67
|
+
f.write(" bc time units == hours\n")
|
|
68
|
+
f.write(" bc reference time == 01/01/1990 00:00\n")
|
|
69
|
+
f.write(" bc offset == -0.5, 0.0, 0.0, 0.0, 0.0 ! Replace -0.5 with relevant surf-el offset\n")
|
|
70
|
+
f.write(" bc default == NaN\n")
|
|
71
|
+
f.write(" vertical coordinate type == depth\n")
|
|
72
|
+
|
|
73
|
+
f.write("end bc\n")
|
|
74
|
+
f.write("\n")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def ds_time_to_str(ds: xr.Dataset, i: int, fmt="%Y-%m-%d %H:%M") -> str:
|
|
78
|
+
return pd.Timestamp(ds.time[i].values).strftime(fmt)
|