tfv-get-tools 0.2.4__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -61,12 +61,12 @@ STDVARS = {
61
61
  "water_temp": {
62
62
  "long_name": "sea_water_temperature",
63
63
  "standard_name": "sea_water_temperature",
64
- "units": "K"
64
+ "units": "degC"
65
65
  },
66
66
  "salinity": {
67
67
  "long_name": "sea_water_salinity",
68
68
  "standard_name": "sea_water_practical_salinity",
69
- "units": "1e-3"
69
+ "units": "psu"
70
70
  },
71
71
  # Wave parameters
72
72
  "hs": {
@@ -17,7 +17,7 @@ class CLIBase(ABC):
17
17
  self.parser = argparse.ArgumentParser(
18
18
  prog=prog_name,
19
19
  description=description,
20
- epilog="See '<command> --help' to read about a specific sub-command.",
20
+ epilog="See '<command> --help' to read about a specific sub-command",
21
21
  )
22
22
  self.subparsers = self.parser.add_subparsers(
23
23
  dest="command", help="Sub-commands"
@@ -53,7 +53,15 @@ class CLIBase(ABC):
53
53
  default=".",
54
54
  type=self.dir_path,
55
55
  help="Output directory, needs to exist first",
56
+ )
57
+ parser.add_argument(
58
+ "-pf",
59
+ "--prefix",
60
+ type=str,
61
+ default=None,
62
+ help="Filename prefix"
56
63
  )
64
+
57
65
  self.add_source_arguments(parser)
58
66
  parser.set_defaults(func=self.run_download)
59
67
 
@@ -42,8 +42,8 @@ def print_atmos_info():
42
42
 
43
43
  3. BoM's "BARRA2"
44
44
  - "R2" - An 11-km grid reanalysis covering Australia
45
- - "C2" - An 4-km grid reanalysis covering Australia, with only wind and pressure fields downloaded.
46
- - "RE2" - (Testing Only) An experimental ensembles 22-km grid covering Australia.
45
+ - "C2" - An 4-km grid reanalysis covering Australia, with only wind and pressure fields downloaded
46
+ - "RE2" - (Testing Only) An experimental ensembles 22-km grid covering Australia
47
47
 
48
48
  Example Usage:
49
49
  ---------
@@ -114,25 +114,19 @@ class GetAtmos(CLIBase):
114
114
  """
115
115
  super().add_source_arguments(parser)
116
116
 
117
- parser.add_argument(
118
- '--info',
119
- action="store_true",
120
- help="Display the full program help"
121
- )
122
-
123
117
  parser.add_argument(
124
118
  "-s",
125
119
  "--source",
126
120
  type=str,
127
121
  default="ERA5",
128
- help='Atmos data source. Default = "ERA5". Optionally others see wiki.',
122
+ help='Atmos data source. Default = "ERA5". Specified CFSR and BARRA2',
129
123
  )
130
124
  parser.add_argument(
131
125
  "-m",
132
126
  "--model",
133
127
  type=str,
134
128
  default="default",
135
- help='Model from source. Default is "default". Optionally others see wiki.',
129
+ help='Model from source. Default is "default". Refer to GetAtmos --info',
136
130
  )
137
131
  parser.add_argument(
138
132
  "--test",
@@ -155,8 +149,10 @@ class GetAtmos(CLIBase):
155
149
  xlims,
156
150
  ylims,
157
151
  source=args.source,
152
+ model=args.model,
158
153
  out_path=args.path,
159
154
  TEST_MODE=args.test,
155
+ prefix=args.prefix,
160
156
  )
161
157
 
162
158
  def run_merge(self, args):
@@ -189,4 +185,8 @@ class GetAtmos(CLIBase):
189
185
  local_tz=tz,
190
186
  wrapto360=args.wrapto360,
191
187
  pad_dry=args.pad_dry,
192
- )
188
+ )
189
+
190
+ # For debugging
191
+ if __name__ == "__main__":
192
+ entry()
@@ -38,7 +38,8 @@ def print_ocean_info():
38
38
  1. (Default) "HYCOM" - Global Ocean Forecasting System
39
39
  - "default" - Global ocean analysis and forecast system
40
40
 
41
- 2. Other sources available - see documentation
41
+ 2. "Copernicus" - Copernicus Marine Service
42
+ - "default" - Global ocean analysis and forecast system
42
43
 
43
44
  Example Usage:
44
45
  ---------
@@ -89,7 +90,7 @@ class GetOcean(CLIBase):
89
90
  "--timestep",
90
91
  default=24,
91
92
  type=int,
92
- help="Download timestep interval in hours, only relevant for HYCOM. Must be a multiple of 3 (highest resolution available). Default is 24 (daily). All sources other than HYCOM are downloaded in the best available time resolution.",
93
+ help="Download timestep interval in hours, only relevant for HYCOM. Must be a multiple of 3 (highest resolution available). Default is 24 (daily). All sources other than HYCOM are downloaded in the best available time resolution",
93
94
  )
94
95
 
95
96
  dparser.add_argument(
@@ -98,7 +99,7 @@ class GetOcean(CLIBase):
98
99
  nargs=2,
99
100
  type=float,
100
101
  default=None,
101
- help='Minimum and maximum depth "zmin zmax". Defaults to the maximum for source.',
102
+ help='Minimum and maximum depth "zmin zmax". Defaults to the maximum for source',
102
103
  )
103
104
 
104
105
  self.add_info_parser()
@@ -127,25 +128,19 @@ class GetOcean(CLIBase):
127
128
  """
128
129
  super().add_source_arguments(parser)
129
130
 
130
- parser.add_argument(
131
- '--info',
132
- action="store_true",
133
- help="Display the full program help"
134
- )
135
-
136
131
  parser.add_argument(
137
132
  "-s",
138
133
  "--source",
139
134
  type=str,
140
135
  default="HYCOM",
141
- help='Ocean data source. Default = "HYCOM". Optionally others see wiki.',
136
+ help='Ocean data source. Default = "HYCOM". Specified = Copernicus',
142
137
  )
143
138
  parser.add_argument(
144
139
  "-m",
145
140
  "--model",
146
141
  type=str,
147
142
  default="default",
148
- help='Model from source. Default is "default". Optionally others see wiki.',
143
+ help='Model from source. Default is "default". Refer to GetOcean info',
149
144
  )
150
145
 
151
146
  def run_download(self, args):
@@ -169,6 +164,7 @@ class GetOcean(CLIBase):
169
164
  time_interval=args.timestep,
170
165
  source=args.source,
171
166
  out_path=args.path,
167
+ prefix=args.prefix,
172
168
  )
173
169
 
174
170
  def run_merge(self, args):
@@ -70,15 +70,17 @@ def entry():
70
70
  )
71
71
 
72
72
  parser.add_argument(
73
+ "-s",
73
74
  "--source",
74
75
  type=str,
75
76
  default=None,
76
- help="Tidal model source name. This will attempt to be automatically detected from model_dir, but it can be overriden if there are issues."
77
+ help="Tidal model source name. This will attempt to be automatically detected from model_dir, but it can be overriden if there are issues"
77
78
  )
78
79
  parser.add_argument(
79
80
  "-fvc",
80
81
  "--write_fvc",
81
82
  action="store_true",
83
+ default=True, # <-- makes it True unless overridden
82
84
  help="Write a TUFLOW FV '.fvc' file to accompany extracted tide dataset",
83
85
  )
84
86
 
@@ -104,9 +106,9 @@ def entry():
104
106
 
105
107
 
106
108
  ExtractTide(
109
+ args.time_start,
110
+ args.time_end,
107
111
  args.out,
108
- args.tstart,
109
- args.tend,
110
112
  model_dir,
111
113
  shapefile=args.nodestring,
112
114
  freq=args.freq,
@@ -116,3 +118,6 @@ def entry():
116
118
  write_fvc=args.write_fvc,
117
119
  )
118
120
 
121
+ # For debugging
122
+ if __name__ == "__main__":
123
+ entry()
@@ -35,6 +35,9 @@ class AtmosFVCWriter(FVCWriter):
35
35
  "prate": {"tfv_var": "PRECIP_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
36
36
  "dlwrf": {"tfv_var": "LW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
37
37
  "dswrf": {"tfv_var": "SW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
38
+ "avg_tprate": {"tfv_var": "PRECIP_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
39
+ "avg_sdlwrf": {"tfv_var": "LW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
40
+ "avg_sdswrf": {"tfv_var": "SW_RAD_GRID", "bc_scale": 1.0, "bc_offset": 0.0},
38
41
  }
39
42
 
40
43
  def __init__(self, var_mappings: Optional[Dict] = None, source=None, model=None, info_url=None):
@@ -106,10 +109,20 @@ class AtmosFVCWriter(FVCWriter):
106
109
  f" BC Header == {self.time_var}, {var_name}",
107
110
  ]
108
111
  )
109
- if bc_scale != 1.0:
110
- lines.append(f" BC Scale == {bc_scale}")
111
- if bc_offset != 0.0:
112
- lines.append(f" BC Offset == {bc_offset}")
112
+ if var_name in ("prate", "avg_tprate"):
113
+ bc_scale = 86.4
114
+ lines.append(f" BC Scale == {bc_scale} ! Conversion: [kg m-2 s-1] to [m day-1]")
115
+ elif var_name == "mslp":
116
+ lines.append(f" BC Scale == {bc_scale} ! Conversion: pascal [Pa] to hectopascal [hPa]")
117
+ if bc_offset != 0.0:
118
+ lines.append(f" BC Offset == {bc_offset}")
119
+ elif var_name == "t2m":
120
+ lines.append(f" BC Offset == {bc_offset} ! Conversion: kelvin [K] to Celsius [C]")
121
+ else:
122
+ if bc_scale != 1.0:
123
+ lines.append(f" BC Scale == {bc_scale}")
124
+ if bc_offset != 0.0:
125
+ lines.append(f" BC Offset == {bc_offset}")
113
126
 
114
127
  # Common BC settings
115
128
  lines.extend(
@@ -184,7 +184,7 @@ class MergeBARRA2(BaseMerger):
184
184
  # Merge the common start_dates first, then concatenate by time afterwards
185
185
  dssetm = []
186
186
  for v in dsset.values():
187
- dssetm.append(xr.merge(v))
187
+ dssetm.append(xr.merge(v,compat="override"))
188
188
 
189
189
  ds = xr.concat(
190
190
  dssetm,
@@ -184,7 +184,7 @@ class MergeCFSRAtmos(BaseMerger):
184
184
  kwargs=dict(fill_value='extrapolate'))
185
185
  for ds in date_group
186
186
  ]
187
- merged_by_date.append(xr.merge(interpolated, compat='override'))
187
+ merged_by_date.append(xr.merge(interpolated, compat='override',join='outer'))
188
188
 
189
189
  # Concatenate along time dimension
190
190
  merged = xr.concat(merged_by_date, dim="time", combine_attrs="override",
@@ -23,6 +23,11 @@ class DownloadHycom(BaseDownloader):
23
23
  self.source = "HYCOM"
24
24
  self.mode = "OCEAN"
25
25
  self._load_config()
26
+
27
+ # --- ESPC-D-V02 handling (post-2024-08-10) ---
28
+ self.espc_v02_cutoff = pd.Timestamp("2024-08-10 00:00:00")
29
+ self._time_interval_user = self.time_interval # preserve user-configured interval
30
+ self._printed_time_override_msg = False
26
31
 
27
32
  # Dictionary to cache database coordinate information
28
33
  self.db_cache = {}
@@ -170,6 +175,19 @@ class DownloadHycom(BaseDownloader):
170
175
  def download(self):
171
176
  """HYCOM-specific download loop - yields tasks for new base class"""
172
177
  for date in self.times:
178
+
179
+ # Enforce 'best' time interval for ESPC-D-V02 era (post 2024-08-10)
180
+ if pd.Timestamp(date) >= self.espc_v02_cutoff:
181
+ if not self._printed_time_override_msg:
182
+ print("... HYCOM ESPC-D-V02 detected (post-2024-08-10).")
183
+ print(f"... Overriding user-defined time_interval ('{self._time_interval_user}') "
184
+ f"with 'best' to preserve native sub-daily resolution.")
185
+ self._printed_time_override_msg = True
186
+
187
+ self.time_interval = "best"
188
+ else:
189
+ self.time_interval = self._time_interval_user
190
+
173
191
  # Get database mapping for this date
174
192
  db_info = self._get_database(date)
175
193
  if db_info is None:
@@ -574,8 +592,13 @@ class MergeHYCOM(BaseMerger):
574
592
 
575
593
  print("Concatenating and interpolating xarray dataset")
576
594
  if has_post_cutoff_data and apply_tidal_filtering:
577
- print('... Dataset contains sub-daily data post-2024-08-10 (HYCOM ESPC-D-V02), applying tidal filtering using a simple 25h rolling mean.')
578
- print('... Warning: Your dataset should be padded at least 1 full day either side before using in TUFLOW FV.')
595
+ print("... Dataset contains sub-daily HYCOM data post-2024-08-10 (ESPC-D-V02).")
596
+ print("... Applying a centred ~25-hour low-pass filter to remove tidal variability.")
597
+ print("... Warning: Filtered values within ~12 hours of the dataset start/end are not reliable")
598
+ print("... without sufficient temporal padding.")
599
+ print("... Ensure at least ±1 day of data is included beyond the model forcing period.")
600
+ print("... For example, if forcing is required from 01/01/2025 to 01/02/2025,")
601
+ print("... download data from 31/12/2024 to 02/02/2025.")
579
602
 
580
603
  # Merge variables for each start date group
581
604
  merged_by_date = []
@@ -224,7 +224,7 @@ class MergeERA5Wave(BaseMerger):
224
224
 
225
225
  # Concatenate and clean up
226
226
  merged = xr.concat(datasets, dim="time", combine_attrs="override",
227
- data_vars="minimal", coords="minimal", compat="override")
227
+ data_vars="minimal", coords="minimal", compat="override", join='outer')
228
228
 
229
229
  # Remove duplicates and sort
230
230
  merged = merged.sortby("time")
@@ -1,12 +1,26 @@
1
1
  """
2
- GetTide. Only supports FES2014 and FES2022.
2
+ GetTide. Supports FES2014, FES2014_extrapolated, FES2022 and FES2022_extrapolated.
3
+
4
+ Model-key to subdirectory mapping (explicit):
5
+ - FES2014 -> fes2014/ocean_tide
6
+ - FES2014_extrapolated -> fes2014/ocean_tide_extrapolated
7
+ - FES2022 -> fes2022b/ocean_tide_20241025
8
+ - FES2022_extrapolated -> fes2022b/ocean_tide_extrapolated
9
+
10
+ Notes:
11
+ - pyTMD "model keys" may include extrapolated variants depending on pyTMD version.
12
+ If an extrapolated key is not present in pyTMD's database, this module falls back
13
+ to the non-extrapolated key for reading the constituent list, while still reading
14
+ the extrapolated NetCDF files from disk.
15
+ - pyTMD FES extractor VERSION must be FES2014 or FES2022. For extrapolated model keys,
16
+ VERSION is derived by stripping the "_extrapolated" suffix.
3
17
  """
4
18
 
5
19
  from datetime import datetime, timedelta
6
20
  from pathlib import Path
7
21
  import pickle
8
22
  import os
9
- from typing import Union, Tuple, Optional
23
+ from typing import Union, Tuple
10
24
 
11
25
  import numpy as np
12
26
  import pandas as pd
@@ -30,18 +44,128 @@ from tfv_get_tools.fvc import write_tide_fvc
30
44
 
31
45
  crs = CRS.from_epsg(4326)
32
46
 
47
+ # Model keys supported by this module (pyTMD database keys)
48
+ VALID_MODEL_KEYS = {"FES2014", "FES2014_extrapolated", "FES2022", "FES2022_extrapolated"}
49
+ # FES "versions" supported by pyTMD FES extractor
50
+ VALID_FES_VERSIONS = {"FES2014", "FES2022"}
51
+
52
+
53
+ def _fes_version_from_model_key(model_key: str) -> str:
54
+ """Map pyTMD model key -> FES extractor VERSION."""
55
+ if model_key not in VALID_MODEL_KEYS:
56
+ raise ValueError(
57
+ f"Requested source {model_key} not supported. "
58
+ f"Valid sources: {VALID_MODEL_KEYS}"
59
+ )
60
+ if model_key.endswith("_extrapolated"):
61
+ return model_key.replace("_extrapolated", "")
62
+ return model_key
63
+
64
+
65
+ def _pytmd_root_from_any_model_path(path: Path) -> Path:
66
+ """
67
+ Resolve the pyTMD root directory expected by pyTMD.io.model(...).
68
+
69
+ pyTMD database paths for FES are typically relative to a directory that contains
70
+ 'fes2014/' or 'fes2022b/' etc. Therefore, we want the parent directory ABOVE the
71
+ fes folder.
72
+ """
73
+ path = Path(path).resolve()
74
+
75
+ # If user passed an ocean_tide-style folder, promote to fes directory first
76
+ if path.name in (
77
+ "ocean_tide",
78
+ "ocean_tide_extrapolated",
79
+ "ocean_tide_20241025",
80
+ "load_tide",
81
+ "load_tide_extrapolated",
82
+ ):
83
+ fes_dir = path.parent
84
+ else:
85
+ fes_dir = path
86
+
87
+ # If we are sitting in a known fes directory, root is its parent
88
+ if fes_dir.name.lower() in ("fes2014", "fes2022b", "fes2022"):
89
+ return fes_dir.parent
90
+
91
+ # Otherwise: scan upward for a folder that looks like the fes directory
92
+ for parent in [fes_dir] + list(fes_dir.parents):
93
+ if parent.name.lower() in ("fes2014", "fes2022b", "fes2022"):
94
+ return parent.parent
95
+
96
+ # Fallback: one level up (best effort)
97
+ return fes_dir.parent
98
+
99
+
100
+ def _select_constituent_dir(model_dir: Path, model_key: str) -> Path:
101
+ """
102
+ Choose the directory that contains the 34 elevation constituent NetCDF files,
103
+ using an explicit model_key -> subdirectory mapping:
104
+
105
+ - FES2014 -> ocean_tide
106
+ - FES2014_extrapolated -> ocean_tide_extrapolated
107
+ - FES2022 -> ocean_tide_20241025
108
+ - FES2022_extrapolated -> ocean_tide_extrapolated
109
+
110
+ The caller may supply:
111
+ - the constituent folder directly (must match the mapping)
112
+ - the FES folder (fes2014 / fes2022b), in which case the mapped subfolder is used
113
+ - a higher-level root containing the FES folder(s), which will be located
114
+ """
115
+ model_dir = Path(model_dir).resolve()
116
+
117
+ subdir_map = {
118
+ "FES2014": "ocean_tide",
119
+ "FES2014_extrapolated": "ocean_tide_extrapolated",
120
+ "FES2022": "ocean_tide_20241025",
121
+ "FES2022_extrapolated": "ocean_tide_extrapolated",
122
+ }
123
+
124
+ wanted = subdir_map[model_key].lower()
125
+ here = model_dir.name.lower()
126
+
127
+ # If user passed the constituent directory directly, accept only if it matches
128
+ if here == wanted:
129
+ return model_dir
130
+
131
+ # If user passed the FES directory, append wanted subfolder
132
+ if here in ("fes2014", "fes2022b", "fes2022"):
133
+ candidate = (model_dir / subdir_map[model_key]).resolve()
134
+ if candidate.exists():
135
+ return candidate
136
+ raise FileNotFoundError(
137
+ f"Expected subfolder '{subdir_map[model_key]}' was not found under {model_dir.as_posix()}"
138
+ )
139
+
140
+ # Otherwise treat as a higher-level root and locate the correct FES folder
141
+ fes_folder = "fes2022b" if model_key.startswith("FES2022") else "fes2014"
142
+ fes_candidates = list(model_dir.rglob(fes_folder))
143
+ if not fes_candidates:
144
+ raise FileNotFoundError(
145
+ f"Could not locate '{fes_folder}' under {model_dir.as_posix()}"
146
+ )
147
+
148
+ fes_dir = fes_candidates[0].resolve()
149
+ candidate = (fes_dir / subdir_map[model_key]).resolve()
150
+ if candidate.exists():
151
+ return candidate
152
+
153
+ raise FileNotFoundError(
154
+ f"Expected subfolder '{subdir_map[model_key]}' was not found under {fes_dir.as_posix()}"
155
+ )
156
+
33
157
 
34
158
  class TidalExtractor:
35
159
  """Wrapper for PyTMD operations to enable testing."""
36
160
 
37
- def extract_fes_constants(self, coords, files, source, interpolate_method):
161
+ def extract_fes_constants(self, coords, files, fes_version, interpolate_method):
38
162
  """Extract FES constants - wrapped for testing."""
39
163
  return extract_FES_constants(
40
164
  coords[:, 0],
41
165
  coords[:, 1],
42
166
  files,
43
167
  TYPE="z",
44
- VERSION=source,
168
+ VERSION=fes_version,
45
169
  METHOD=interpolate_method,
46
170
  GZIP=False,
47
171
  SCALE=1.0 / 100.0,
@@ -57,39 +181,39 @@ _default_extractor = TidalExtractor()
57
181
 
58
182
 
59
183
  def _detect_tide_model_source(model_dir: Path):
60
- """Detect tidal model source based on model_dir."""
61
- original_model_dir = model_dir
62
- name = model_dir.name
63
- if name == "ocean_tide":
64
- model_dir = (model_dir / "..").resolve()
65
- name = model_dir.name
66
-
67
- if "fes2014" in name.lower():
68
- source = "FES2014" # Return uppercase to match VALID_SOURCES
69
- elif "fes2022" in name.lower():
70
- source = "FES2022" # Return uppercase to match VALID_SOURCES
71
- else:
72
- source = None
184
+ """Detect tidal model key based on model_dir."""
185
+ original_model_dir = Path(model_dir)
186
+ model_dir = original_model_dir.resolve()
73
187
 
74
- # Return the resolved model_dir only if we actually resolved it
75
- if original_model_dir.name == "ocean_tide":
76
- return source, model_dir
77
- else:
78
- return source, original_model_dir
188
+ search_names = {
189
+ model_dir.name.lower(),
190
+ model_dir.parent.name.lower(),
191
+ str(model_dir).lower(),
192
+ }
79
193
 
194
+ if any("fes2014" in s for s in search_names):
195
+ if any("extrap" in s for s in search_names):
196
+ return "FES2014_extrapolated", original_model_dir
197
+ return "FES2014", original_model_dir
80
198
 
81
- def _get_model_dir(source="FES2014", model_dir: Union[str, Path] = None) -> Path:
82
- """Get and validate model directory."""
83
- VALID_SOURCES = {"FES2014", "FES2022"}
199
+ if any("fes2022" in s for s in search_names):
200
+ if any("extrap" in s for s in search_names):
201
+ return "FES2022_extrapolated", original_model_dir
202
+ return "FES2022", original_model_dir
203
+
204
+ return None, original_model_dir
84
205
 
85
- if source not in VALID_SOURCES:
206
+
207
+ def _get_model_dir(fes_version="FES2014", model_dir: Union[str, Path] = None) -> Path:
208
+ """Get and validate model directory."""
209
+ if fes_version not in VALID_FES_VERSIONS:
86
210
  raise ValueError(
87
- f"Requested source {source} not supported. "
88
- f"Valid sources: {VALID_SOURCES}"
211
+ f"Requested FES version {fes_version} not supported. "
212
+ f"Valid versions: {VALID_FES_VERSIONS}"
89
213
  )
90
214
 
91
215
  if model_dir is None:
92
- env = f"{source}_DIR"
216
+ env = f"{fes_version}_DIR"
93
217
  if env not in os.environ:
94
218
  raise ValueError(
95
219
  f"The {env} root directory needs to be supplied, either as "
@@ -101,7 +225,7 @@ def _get_model_dir(source="FES2014", model_dir: Union[str, Path] = None) -> Path
101
225
 
102
226
  if not model_dir.exists():
103
227
  raise FileNotFoundError(
104
- f"{source} model directory ({model_dir.as_posix()}) does not exist"
228
+ f"{fes_version} model directory ({model_dir.as_posix()}) does not exist"
105
229
  )
106
230
 
107
231
  return model_dir
@@ -177,6 +301,10 @@ def get_constituents(
177
301
  if extractor is None:
178
302
  extractor = _default_extractor
179
303
 
304
+ # source is a pyTMD model key (e.g. FES2022_extrapolated)
305
+ model_key = source
306
+ fes_version = _fes_version_from_model_key(model_key)
307
+
180
308
  coordinates = _normalise_coordinates(coordinates)
181
309
 
182
310
  cons_dict = dict()
@@ -189,28 +317,48 @@ def get_constituents(
189
317
  chainage = 0
190
318
  nx = 1
191
319
 
192
- model_dir = _get_model_dir(source, model_dir)
193
- srcdir = model_dir / ".."
320
+ # Validate the provided model_dir exists (based on FES VERSION, not model key)
321
+ raw_model_dir = _get_model_dir(fes_version, model_dir)
322
+
323
+ # Choose the directory containing elevation constituents
324
+ constituent_dir = _select_constituent_dir(raw_model_dir, model_key)
325
+
326
+ # Resolve pyTMD root for database-relative paths
327
+ srcdir = _pytmd_root_from_any_model_path(constituent_dir)
328
+
329
+ # List constituent files. Exclude land/mask/grid helper files.
330
+ files = []
331
+ for x in constituent_dir.glob("*.nc"):
332
+ stem = x.stem.lower()
333
+ if any(k in stem for k in ("mask", "land", "grid")):
334
+ continue
335
+ files.append(x)
194
336
 
195
- files = [x for x in model_dir.rglob("*.nc")]
196
337
  file_cons = [f.stem for f in files]
197
338
  files = [files[i] for i in np.argsort(file_cons)]
198
339
 
199
- model = pyTMD.io.model(srcdir).elevation(source)
340
+ try:
341
+ model = pyTMD.io.model(srcdir).elevation(model_key)
342
+ except Exception:
343
+ # Fallback if pyTMD database does not contain the extrapolated key
344
+ model = pyTMD.io.model(srcdir).elevation(fes_version)
200
345
  cons = model.constituents
201
346
 
202
- if source in ("FES2014", "FES2022"):
347
+ # Expect 34 constituents for elevation for these FES solutions
348
+ if fes_version in ("FES2014", "FES2022"):
203
349
  if len(files) != 34:
204
- raise ValueError(f"Cannot find 34 .nc files for {source}")
350
+ raise ValueError(
351
+ f"Cannot find 34 .nc files for {model_key} "
352
+ f"(found {len(files)}) in {constituent_dir.as_posix()}"
353
+ )
205
354
 
206
355
  print("... extracting constituents from database")
207
-
208
356
  amp, ph = extractor.extract_fes_constants(
209
- coords, files, source, interpolate_method
357
+ coords, files, fes_version, interpolate_method
210
358
  )
211
359
 
212
360
  cons_dict[bnd_id] = dict(
213
- cons=(amp, ph, cons), geo=(coords, chainage, nx), source=source
361
+ cons=(amp, ph, cons), geo=(coords, chainage, nx), source=model_key
214
362
  )
215
363
 
216
364
  if save_cons:
@@ -364,6 +512,12 @@ def ExtractTide(
364
512
  "Could not detect tidal model source from model directory"
365
513
  )
366
514
 
515
+ if source not in VALID_MODEL_KEYS:
516
+ raise ValueError(
517
+ f"Requested source {source} not supported. "
518
+ f"Valid sources: {VALID_MODEL_KEYS}"
519
+ )
520
+
367
521
  if shapefile is None:
368
522
  raise ValueError("Shapefile required when constituents not provided")
369
523
 
@@ -379,7 +533,7 @@ def ExtractTide(
379
533
  print("Confirming Request:")
380
534
  print(f"...Time Start: {time_start.strftime('%Y-%m-%d %H:%M')}")
381
535
  print(f"...Time End: {time_end.strftime('%Y-%m-%d %H:%M')}")
382
- print(f"...Model Dir: {model_dir.absolute().as_posix()}")
536
+ print(f"...Model Dir: {Path(model_dir).absolute().as_posix()}")
383
537
  print(f"...Tidal Data Source: {source}")
384
538
  print(f"...Nodestring Name: {shapefile.name}")
385
539
  print(f"...Nodestring Folder: {shapefile.parent.absolute().as_posix()}")
@@ -488,8 +642,6 @@ def _netcdf_writer(
488
642
  freq=freq,
489
643
  )
490
644
 
491
- ns = list(ns_wlev.keys())
492
-
493
645
  nc = xr.Dataset(
494
646
  coords=dict(time=timevec), attrs=attrs.copy() # Copy to avoid mutable default
495
647
  )
@@ -20,12 +20,18 @@ def horizontal_pad(ds):
20
20
  for var in ds.data_vars:
21
21
  if set(ds[var].dims) >= {'latitude', 'longitude', 'time'}:
22
22
  # Compute nearest indices for each depth level
23
+ ny = ds.sizes["latitude"]
24
+ nx = ds.sizes["longitude"]
25
+
23
26
  nearest_indices = xr.apply_ufunc(
24
27
  _compute_nearest_indices,
25
28
  ds[var].isel(time=0),
26
29
  input_core_dims=[['latitude', 'longitude']],
27
30
  output_core_dims=[['latitude', 'longitude']],
28
- vectorize=True
31
+ vectorize=True,
32
+ dask='parallelized',
33
+ output_dtypes=[np.int64],
34
+ dask_gufunc_kwargs={"output_sizes": {"latitude": ny, "longitude": nx}},
29
35
  )
30
36
 
31
37
  # Apply filling to all time steps for each depth
@@ -35,7 +41,10 @@ def horizontal_pad(ds):
35
41
  nearest_indices,
36
42
  input_core_dims=[['latitude', 'longitude'], ['latitude', 'longitude']],
37
43
  output_core_dims=[['latitude', 'longitude']],
38
- vectorize=True
44
+ vectorize=True,
45
+ dask='parallelized',
46
+ output_dtypes=[ds[var].dtype],
47
+ dask_gufunc_kwargs={"output_sizes": {"latitude": ny, "longitude": nx}},
39
48
  )
40
49
 
41
50
  ds[var] = filled_var
@@ -80,10 +89,12 @@ def _fill_nans_with_precomputed_indices(values: np.ndarray, nearest_indices: np.
80
89
  np.ndarray: filled variable values
81
90
  """
82
91
  valid_values = values[~np.isnan(values)]
92
+ if valid_values.size == 0:
93
+ return values
83
94
 
84
95
  # Apply the precomputed indices to fill NaN values
85
96
  filled_values = values.copy()
86
97
  nan_mask = np.isnan(filled_values)
87
98
  filled_values[nan_mask] = valid_values[nearest_indices[nan_mask]]
88
99
 
89
- return filled_values
100
+ return filled_values
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tfv_get_tools
3
- Version: 0.2.4
3
+ Version: 0.2.5
4
4
  Summary: Tool for downloading and processing data for TUFLOW FV modelling
5
- Author-email: Alex Waterhouse <alex.waterhouse@apac.bmt.org>, Mitchell Smith <mitchell.smith@apac.bmt.org>, TUFLOW Support <support@tuflow.com>
5
+ Author-email: TUFLOW Support <support@tuflow.com>
6
6
  License: MIT
7
7
  Project-URL: Homepage, https://gitlab.com/tuflow-user-group/tuflow-fv/data-pre-processing/tfv-get-tools
8
8
  Project-URL: Repository, https://gitlab.com/tuflow-user-group/tuflow-fv/data-pre-processing/tfv-get-tools
@@ -28,7 +28,7 @@ Requires-Dist: siphon>=0.9
28
28
  Requires-Dist: cdsapi>=0.7.0
29
29
  Requires-Dist: copernicusmarine>=1.2.3
30
30
  Requires-Dist: bottleneck>=1.3.5
31
- Requires-Dist: pyTMD>=2.1.5
31
+ Requires-Dist: pyTMD<3,>=2.1.7
32
32
  Requires-Dist: shapely>=2.0.6
33
33
  Requires-Dist: geopandas>=1.0.1
34
34
  Provides-Extra: dev
@@ -104,29 +104,30 @@ The package provides command-line tools for downloading and processing data:
104
104
 
105
105
  **Ocean Data Example:**
106
106
  ```bash
107
- # Download HYCOM data for January 2011 in southeast Queensland
108
- GetOcean A 2011-01-01 2011-02-01 153 156 -29 -24
107
+ # Download HYCOM data for January 2011 on the east coast of Australia
108
+ GetOcean A 2011-01-01 2011-02-01 153 154 -29 -28
109
109
 
110
- # Download with custom options (top 20m, 3-hourly data, custom prefix)
111
- GetOcean A -p raw_data -pf SEQ_HYCOM -ts 3 -z 0 20 2011-01-01 2011-02-01 153 156 -29 -24
110
+ # Download with options (output directory path (-p), custom filename prefix (-pf) 3-hourly data (-ts), top 20 m (-z))
111
+ GetOcean A -p raw_data -pf studysite -ts 3 -z 0 20 2011-01-01 2011-02-01 153 154 -29 -28
112
112
 
113
- # Merge downloaded files
114
- GetOcean B -i raw_data -o output -rp 28350 -tz 8 -ltz AWST -n merged_hycom.nc
113
+ # Merge downloaded files with options (input directory (-i), output directory (-o), timezone conversion (-tz), timezone attribute metadata (-ltz) and custom filename (-f))
114
+ GetOcean B -i raw_data -o output -tz 10 -ltz AEST -f merged_hycom.nc
115
115
  ```
116
116
 
117
117
  **Atmospheric Data Example:**
118
118
  ```bash
119
- # Download ERA5 atmospheric data
120
- GetAtmos A 2011-01-01 2011-02-01 152 154 -28 -26
119
+ # Download ERA5 atmospheric data with options (output directory path (-p))
120
+ GetAtmos A -p raw_data 2010-03-01 2010-04-01 153 154 -29 -28
121
121
 
122
- # Merge with reprojection and timezone conversion
123
- GetAtmos B -i raw_data -o output -rp 7856 -tz 10 -ltz AEST
122
+ # Merge downloaded files with options (input directory (-i), output directory (-o), reprojected to EPSG:28356 (-rp), timezone conversion (-tz) and timezone attribute metadata (-ltz))
123
+ GetAtmos B -i raw_data -o output -rp 28356 -tz 10 -ltz AEST
124
124
  ```
125
125
 
126
126
  **Tidal Data Example:**
127
127
  ```bash
128
- # Extract tidal data using a boundary nodestring shapefile
129
- GetTide output/tide_data.nc 2011-01-01 2012-01-01 boundaries/nodestring.shp path/to/fes2022/
128
+ # Extract tidal data from FES2022 extrapolated ocean tide using a boundary nodestring shapefile
129
+ GetTide output/tide_data.nc 2010-03-01 2010-04-01 nodestrings/2d_ns_Open_Boundary_001_L.shp -s FES2022_extrapolated fes2022b/ocean_tide_extrapolated
130
+ # Get Tide supports multiple FES tide models and each requires a specific directory structure. Refer to the wiki: (https://fvwiki.tuflow.com/TUFLOW_FV_Get_Tide_draft#Mandatory_FES_Folder_Structure)
130
131
  ```
131
132
 
132
133
  ### Python API
@@ -139,20 +140,20 @@ from tfv_get_tools import DownloadOcean, MergeOcean
139
140
  result = DownloadOcean(
140
141
  start_date='2011-01-01',
141
142
  end_date='2011-02-01',
142
- xlims=(153, 156),
143
- ylims=(-29, -24),
143
+ xlims=(153, 154),
144
+ ylims=(-29, -28),
144
145
  out_path='./raw_data',
145
146
  source='HYCOM',
146
147
  time_interval=24
147
148
  )
148
149
 
149
- # Merge downloaded files
150
+ # Merge downloaded files and shift time +10h with timezone attribute set to 'AEST'
150
151
  MergeOcean(
151
152
  in_path='./raw_data',
152
153
  out_path='./output',
153
- source='HYCOM',
154
- reproject=28350,
155
- local_tz=(8, 'AWST')
154
+ source='HYCOM',
155
+ local_tz=(10, 'AEST'),
156
+ fname= 'HYCOM_20110101_20110201_AEST.nc',
156
157
  )
157
158
  ```
158
159
 
@@ -162,21 +163,22 @@ from tfv_get_tools import DownloadAtmos, MergeAtmos
162
163
 
163
164
  # Download BARRA2 data
164
165
  result = DownloadAtmos(
165
- start_date='2020-01-01',
166
- end_date='2020-02-01',
167
- xlims=(152.72, 153.78),
168
- ylims=(-27.49, -25.39),
166
+ start_date='2022-12-01',
167
+ end_date='2023-01-01',
168
+ xlims=(153, 154),
169
+ ylims=(-29, -28),
169
170
  out_path='./raw_data',
170
171
  source='BARRA2',
171
172
  model='C2'
172
173
  )
173
174
 
174
- # Merge with reprojection to GDA2020 MGA56
175
+ # Merge downloaded files, reproject to GDA2020 MGA56, and shift time +10h with timezone attribute set to 'AEST'
175
176
  MergeAtmos(
176
177
  in_path='./raw_data',
177
178
  out_path='./output',
179
+ fname= 'BARRA2_C2_20221201_20230101_EPSG7856_AEST.nc',
178
180
  source='BARRA2',
179
- model='C2',
181
+ model='C2',
180
182
  reproject=7856,
181
183
  local_tz=(10.0, 'AEST')
182
184
  )
@@ -187,38 +189,50 @@ MergeAtmos(
187
189
  from pathlib import Path
188
190
  from tfv_get_tools.tide import ExtractTide
189
191
 
192
+ # User input
193
+ t_start = '2023-01-01'
194
+ t_end = '2023-02-01'
195
+ fes_dir = './fes2022b/ocean_tide_extrapolated' # Get Tide supports multiple FES tide models and each requires a specific directory structure. Refer to the wiki: (https://fvwiki.tuflow.com/TUFLOW_FV_Get_Tide_draft#Mandatory_FES_Folder_Structure)
196
+ output_dir = './output'
197
+ shp_file = './2d_ns_Open_Boundary_001_L.shp'
198
+ output_name = 'GOC_FES2022_extrapolated_20230101_20230125.nc'
199
+ model = 'FES2022_extrapolated'
200
+
190
201
  # Basic tidal extraction
191
202
  ExtractTide(
192
- start_date='2011-01-01',
193
- end_date='2012-01-01',
194
- filename='tide_data.nc',
195
- out_path='./output',
203
+ time_start=t_start,
204
+ time_end=t_end,
205
+ model_dir=fes_dir,
206
+ source=model,
207
+ fname=output_name,
208
+ out_path=output_dir,
196
209
  freq='15min',
197
- shapefile='boundaries/nodestring.shv'
210
+ shapefile=shp_file,
198
211
  )
199
212
 
200
213
  # Advanced usage with constituent caching
201
214
  from tfv_get_tools.tide import load_nodestring_shapefile, process_nodestring_gdf, get_constituents
202
215
 
203
216
  # Load and process boundary shapefile
204
- gdf = load_nodestring_shapefile('boundaries/nodestring.shp', process_ids=[1])
205
- coordinates = process_nodestring_gdf(gdf, spacing=5000)
217
+ gdf = load_nodestring_shapefile(shp_file)
218
+ coordinates = process_nodestring_gdf(gdf, spacing=2500)
206
219
 
207
220
  # Extract constituents once (slow first time, fast afterwards)
208
221
  constituents = get_constituents(
209
222
  coordinates,
210
- source='FES2022',
211
- save_cons='boundary_constituents.pkl'
223
+ model_dir=fes_dir,
224
+ source=model,
225
+ save_cons='boundary_constituents.pkl',
212
226
  )
213
227
 
214
228
  # Use cached constituents for faster extraction
215
229
  ExtractTide(
216
- start_date='2011-01-01',
217
- end_date='2012-01-01',
218
- filename='tide_data.nc',
219
- out_path='./output',
230
+ time_start=t_start,
231
+ time_end=t_end,
232
+ fname=output_name,
233
+ out_path=output_dir,
220
234
  freq='15min',
221
- constituents='boundary_constituents.pkl'
235
+ constituents='boundary_constituents.pkl',
222
236
  )
223
237
  ```
224
238
 
@@ -276,7 +290,7 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
276
290
 
277
291
  ## Authors
278
292
 
279
- Developed by [TUFLOW](https://www.tuflow.com/), 2025
293
+ Developed by [TUFLOW](https://www.tuflow.com/), 2026
280
294
 
281
295
  ## Project Status
282
296
 
@@ -284,4 +298,4 @@ Developed by [TUFLOW](https://www.tuflow.com/), 2025
284
298
 
285
299
  ---
286
300
 
287
- *Last updated: July 2025*
301
+ *Last updated: January 2026*
@@ -1,15 +1,15 @@
1
1
  tfv_get_tools/__init__.py,sha256=HmxXSx__5cKkdENL8hyxppPUSR1tZ7bsye9sp63qX2s,225
2
- tfv_get_tools/_standard_attrs.py,sha256=mhmheUm49e-G8ePMLGnIHvWHYW3ZHjCXQlokEIBEgXM,3247
2
+ tfv_get_tools/_standard_attrs.py,sha256=1fKqM_b8PXZ9kUPiCBSb2ID7ZAKF-ajeen7KmsV-Z7c,3249
3
3
  tfv_get_tools/atmos.py,sha256=vmga1ZC3cWM8eejt-lluYOaQpp2lNn0M59s61u9B3Zw,5799
4
4
  tfv_get_tools/ocean.py,sha256=27EsEP52u-JrT-TOxtH5rqS7ROgEporZ52d0jdbvJH0,6588
5
5
  tfv_get_tools/wave.py,sha256=5OKU4BENNR5y0boEH0nHc0ZCgcPx5gcqS9eKr8jAwFY,6779
6
- tfv_get_tools/cli/_cli_base.py,sha256=foc3Jr_PiN3gmouy4NHLCQYJYolupxx2Q-kFbiNgNoE,5996
7
- tfv_get_tools/cli/atmos_cli.py,sha256=Jvc8JxYTURx3PCOkeqMoWtt5jsIKfpRnlH7FNBNqEs8,6581
8
- tfv_get_tools/cli/ocean_cli.py,sha256=nyNIQN7qwDvME2Ts9oWUCaOtANrFDaQZAmr48v5k5D0,6912
9
- tfv_get_tools/cli/tide_cli.py,sha256=ML7l1-HEyxg1xaEVmclN09nY5D4kkSVAgFZ6SiBYJok,3334
6
+ tfv_get_tools/cli/_cli_base.py,sha256=9JZrgM5dH2ODaBkeuaJdQMNrCNfwe0qCrNPkD2H6ZYA,6165
7
+ tfv_get_tools/cli/atmos_cli.py,sha256=YIzcBkPcJ2b5M1pQUu3JK8wAHDSyZB4gbZF325zCFig,6548
8
+ tfv_get_tools/cli/ocean_cli.py,sha256=MfW_hhnELV-gVXRtYO-JAEctnYjR5xocBsupkBJ-qPE,6853
9
+ tfv_get_tools/cli/tide_cli.py,sha256=mXG3tidaSmeOJCAl251KYI7xniM9cVsfUMNE1SSdqsg,3470
10
10
  tfv_get_tools/cli/wave_cli.py,sha256=kKMY93SUKcDRbrOWJb3bNJUuMMJ9kTrLRIdc-ATAEtY,6132
11
11
  tfv_get_tools/fvc/__init__.py,sha256=33GxmA5OwA-I9k79ClTwZDH7gG2O7ZQLYd-gNytE43k,157
12
- tfv_get_tools/fvc/_atmos.py,sha256=NrMiTnH9HImktcMxdkvJDXQtovlXewdsOzBlCNUmIDA,8081
12
+ tfv_get_tools/fvc/_atmos.py,sha256=EmqRTjDuGJC-myEE3IBfrwW3vBXdlN29IFgXsOrDZLw,8989
13
13
  tfv_get_tools/fvc/_fvc.py,sha256=iMqWJ6RgWcVnpI-J_ZsZtScgYYtchLy9nfo2XL2UaqU,7933
14
14
  tfv_get_tools/fvc/_ocean.py,sha256=ZXtDj7HZe2S4yuaM3RLoES3p-kVGE2HA2D_PBZZdIXk,5734
15
15
  tfv_get_tools/fvc/_tide.py,sha256=tjs4GTg4NRIa7cFcBNFqMqo-yMzOZ6BKgdixSWZafTo,6397
@@ -18,8 +18,8 @@ tfv_get_tools/providers/_custom_conversions.py,sha256=0w9wUkq2k4yireErfLwKKRaj6R
18
18
  tfv_get_tools/providers/_downloader.py,sha256=zLwYSweTKYJ_OmtikwzEQRw1s8Rc4Ma1aCIqY1jIxXM,19786
19
19
  tfv_get_tools/providers/_merger.py,sha256=gj6NkEsdx2QIUT8QmhaOzEXaFKEjpqBEmf_g2qaufzo,18367
20
20
  tfv_get_tools/providers/_utilities.py,sha256=8JiiadD8OGsMjZhGn8ce8s7vqtYZpDB-oQFieGAvKeA,7873
21
- tfv_get_tools/providers/atmos/barra2.py,sha256=8k_NJdlHwNgc2FMElabWaycuAdJBRARGc9QfzzDxQXc,6820
22
- tfv_get_tools/providers/atmos/cfsr.py,sha256=Cf_6mCBiPi1SCUd3NDX_psmDdITU1lqwqf5vNhj7ttY,8055
21
+ tfv_get_tools/providers/atmos/barra2.py,sha256=IM5iV11p9RD_rcRdikL6UWrGnfH8fnmAv9G-_ph6iuk,6838
22
+ tfv_get_tools/providers/atmos/cfsr.py,sha256=t4T1apZFf-XIxh7FuddYdwHZBm5QY_48tC-wjnC1UCY,8068
23
23
  tfv_get_tools/providers/atmos/era5.py,sha256=4CiPu6voEEHlrrqR9W1NN6T_CeR8b9ZiHdo1EWsb_lc,562
24
24
  tfv_get_tools/providers/atmos/era5_gcp.py,sha256=Fr4oFek9klZfhXxtZdBhbDRXhiQo53qxxyvzBWKndco,569
25
25
  tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml,sha256=5LZpl1vz6FOQ3t55uN4V0Me1naToy2peHNI9HJ0vHrI,1655
@@ -29,14 +29,14 @@ tfv_get_tools/providers/atmos/cfgs/cfsr.yaml,sha256=8HgskImaZHoW7Xm6rP5RgpFdTK_q
29
29
  tfv_get_tools/providers/atmos/cfgs/era5.yaml,sha256=_a34TT3CUWNGvtKHFVbNFS2_0YFZKQU1Sqnu3TC9fwg,1654
30
30
  tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml,sha256=CY-y4Uxq3fPFKtqCcsJcL0_j6Vu-hahNqK8HsTGS3Bc,1719
31
31
  tfv_get_tools/providers/ocean/copernicus_ocean.py,sha256=hPnxONvQ3YEAI65pG0KMaGImqu2POBYvW7-Q6xhjf3s,17848
32
- tfv_get_tools/providers/ocean/hycom.py,sha256=fqIJGDVAwNB6A5k4Tu18mQs5BxwWkaeECOqYknOr5LI,26809
32
+ tfv_get_tools/providers/ocean/hycom.py,sha256=wle3Wq9EayTLWAzBs6R41tx0g4dFgxPgXOoMqq0JDaI,28123
33
33
  tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml,sha256=XGCj8_rDLNHSq6kQnvgjrahcPGaiLInxqB_9LoMvlAY,1482
34
34
  tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml,sha256=IMETXv3u783YzxB3fRF_uQ5hYBN28aHAH3iJzXcuDhU,1443
35
35
  tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml,sha256=rQhSEynvKgypohDhyIqizjZDriETbBiZPBtDp8dFLOY,1346
36
36
  tfv_get_tools/providers/ocean/cfgs/hycom.yaml,sha256=GQoEcyLYmF_PRPXeY2IO7jiqFcZad7y8bzPjsBn9PS4,1599
37
37
  tfv_get_tools/providers/wave/cawcr.py,sha256=95YZCewImgtldiDj6qJ6lkcIo_QIz2rFTYDB0qwkwVk,6028
38
38
  tfv_get_tools/providers/wave/copernicus_wave.py,sha256=FeoJUSDMH_tuo29VoZYSDXalra-lcfm9Mue--fJ-E7U,1031
39
- tfv_get_tools/providers/wave/era5.py,sha256=4e1l_h5aZR-1LlvajDVAqG8jsEk8qdXHSm_NHK8wsA4,9190
39
+ tfv_get_tools/providers/wave/era5.py,sha256=EqoucdeV06AHnwPWUUJRV11M5dI79ULSxX9ar1EMy8A,9204
40
40
  tfv_get_tools/providers/wave/era5_gcp.py,sha256=hLY_4YqQxhQ-P9uPC30Knv_LJ4vFMu1ZJJbGccR4xb8,6165
41
41
  tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml,sha256=PCk6fkYHTD1uMv1oZP_7M4BuFWMxLaUdZmbbrJ4zA4w,936
42
42
  tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml,sha256=LJ-8C8PFzVO4nmIMM4CUfhHsnnb1cTdKeYRr_DLIFKI,935
@@ -49,15 +49,15 @@ tfv_get_tools/providers/wave/cfgs/era5.yaml,sha256=vT24g2J2MmlMEJ3GNzpt44T42Zf97
49
49
  tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml,sha256=HM1C2AlZ-RFJuUbPaQ2Eqn4N4D-OxIhzgyfCa4wh9wM,1138
50
50
  tfv_get_tools/tide/__init__.py,sha256=qt_L_3ONraItnQL0IVLwOGfrV-z49nEAV3MjmAEPKTo,160
51
51
  tfv_get_tools/tide/_nodestring.py,sha256=7oSteM7nMRFF_UGiArdlh4HaEAmjD0ItWIzzuh1WEsk,7535
52
- tfv_get_tools/tide/_tidal_base.py,sha256=Yo0ZiYiW0OoL-DrNTDBj8xFxE10Z17nB6Yq3HhE6ty8,18150
52
+ tfv_get_tools/tide/_tidal_base.py,sha256=AQ7DsjpfzsM9kezLka0_Sjz1FOvDYDCWn8bh5bbQRGc,24240
53
53
  tfv_get_tools/utilities/_tfv_bc.py,sha256=Mf31N8cKnfooika-BNPk3DXzW3I3_G3xQMXFeQKLh0E,2664
54
- tfv_get_tools/utilities/horizontal_padding.py,sha256=-bqLDzqm17fOZqYrjJPXYwdVYwoudCPFtKYfvzxG8D4,2898
54
+ tfv_get_tools/utilities/horizontal_padding.py,sha256=MIllkOOz1f3nP_gXDShynqd6gEVlKcftwkJ_z_SE6WA,3381
55
55
  tfv_get_tools/utilities/land_masking.py,sha256=19r9iiMMqxXChGlfmmXPzEM5VyhsnR-nqTsSjaKzP34,2743
56
56
  tfv_get_tools/utilities/parsers.py,sha256=V4ZBcpLPtSbkM3k5XoZS_xpauJVEzHUs9woNHznbHI4,1284
57
57
  tfv_get_tools/utilities/warnings.py,sha256=GWrj7Jh2gU3b9u2kzSfaqYPk8cL9aeMbkJgspn0a9W8,1146
58
- tfv_get_tools-0.2.4.dist-info/licenses/LICENSE,sha256=ALmu4D6vRZ-Xxz6IjzUIc_XyZGfVIWCOxIA1qe3tnVY,1059
59
- tfv_get_tools-0.2.4.dist-info/METADATA,sha256=nOj4d7OVrA2ZmmZO_9bJN9CxiXCKDzEty2q3duaI7cA,10701
60
- tfv_get_tools-0.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
- tfv_get_tools-0.2.4.dist-info/entry_points.txt,sha256=pTCS55WUArvFH-Z_EsjTpICz9p1rqqVJ10e7aX3S2MA,194
62
- tfv_get_tools-0.2.4.dist-info/top_level.txt,sha256=K_ewT8V9jhtf59kUDU5YqahUczoInqs7ZFlc4Ho3IjE,14
63
- tfv_get_tools-0.2.4.dist-info/RECORD,,
58
+ tfv_get_tools-0.2.5.dist-info/licenses/LICENSE,sha256=ALmu4D6vRZ-Xxz6IjzUIc_XyZGfVIWCOxIA1qe3tnVY,1059
59
+ tfv_get_tools-0.2.5.dist-info/METADATA,sha256=8pqCzkxyT47DDTFvZmw1Dk7gbPjbkDEHIxBITob2Ios,11921
60
+ tfv_get_tools-0.2.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
61
+ tfv_get_tools-0.2.5.dist-info/entry_points.txt,sha256=pTCS55WUArvFH-Z_EsjTpICz9p1rqqVJ10e7aX3S2MA,194
62
+ tfv_get_tools-0.2.5.dist-info/top_level.txt,sha256=K_ewT8V9jhtf59kUDU5YqahUczoInqs7ZFlc4Ho3IjE,14
63
+ tfv_get_tools-0.2.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5