tfv-get-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. tfv_get_tools/__init__.py +4 -0
  2. tfv_get_tools/_standard_attrs.py +107 -0
  3. tfv_get_tools/atmos.py +167 -0
  4. tfv_get_tools/cli/_cli_base.py +173 -0
  5. tfv_get_tools/cli/atmos_cli.py +192 -0
  6. tfv_get_tools/cli/ocean_cli.py +204 -0
  7. tfv_get_tools/cli/tide_cli.py +118 -0
  8. tfv_get_tools/cli/wave_cli.py +183 -0
  9. tfv_get_tools/fvc/__init__.py +3 -0
  10. tfv_get_tools/fvc/_atmos.py +230 -0
  11. tfv_get_tools/fvc/_fvc.py +218 -0
  12. tfv_get_tools/fvc/_ocean.py +171 -0
  13. tfv_get_tools/fvc/_tide.py +195 -0
  14. tfv_get_tools/ocean.py +170 -0
  15. tfv_get_tools/providers/__init__.py +0 -0
  16. tfv_get_tools/providers/_custom_conversions.py +34 -0
  17. tfv_get_tools/providers/_downloader.py +566 -0
  18. tfv_get_tools/providers/_merger.py +520 -0
  19. tfv_get_tools/providers/_utilities.py +255 -0
  20. tfv_get_tools/providers/atmos/barra2.py +209 -0
  21. tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
  22. tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
  23. tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
  24. tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
  25. tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
  26. tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
  27. tfv_get_tools/providers/atmos/cfsr.py +207 -0
  28. tfv_get_tools/providers/atmos/era5.py +20 -0
  29. tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
  30. tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
  31. tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
  32. tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
  33. tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
  34. tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
  35. tfv_get_tools/providers/ocean/hycom.py +611 -0
  36. tfv_get_tools/providers/wave/cawcr.py +166 -0
  37. tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
  38. tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
  39. tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
  40. tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
  41. tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
  42. tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
  43. tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
  44. tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
  45. tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
  46. tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
  47. tfv_get_tools/providers/wave/era5.py +232 -0
  48. tfv_get_tools/providers/wave/era5_gcp.py +169 -0
  49. tfv_get_tools/tide/__init__.py +2 -0
  50. tfv_get_tools/tide/_nodestring.py +214 -0
  51. tfv_get_tools/tide/_tidal_base.py +568 -0
  52. tfv_get_tools/utilities/_tfv_bc.py +78 -0
  53. tfv_get_tools/utilities/horizontal_padding.py +89 -0
  54. tfv_get_tools/utilities/land_masking.py +93 -0
  55. tfv_get_tools/utilities/parsers.py +44 -0
  56. tfv_get_tools/utilities/warnings.py +38 -0
  57. tfv_get_tools/wave.py +179 -0
  58. tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
  59. tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
  60. tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
  61. tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
  62. tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,166 @@
1
+ from pathlib import Path
2
+ from typing import List, Tuple
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import xarray as xr
7
+ from pandas.tseries.offsets import MonthEnd
8
+ from tqdm import tqdm
9
+
10
+ from tfv_get_tools.providers._downloader import BaseDownloader
11
+ from tfv_get_tools.providers._merger import BaseMerger
12
+ from tfv_get_tools.providers._utilities import todstr
13
+
14
+
15
+ class DownloadCAWCR(BaseDownloader):
16
+ """CAWCR Wave downloader"""
17
+
18
+ def _init_specific(self, **kwargs):
19
+ """Set source and mode - matches original interface"""
20
+ if self.model == 'default':
21
+ self.log("Default model has been selected == 'glob_24m'")
22
+ self.model = 'glob_24m'
23
+
24
+ self.source = "CAWCR"
25
+ self.mode = "WAVE"
26
+
27
+ # Validate model selection
28
+ MODELS = {"pac_4m", "pac_10m", "glob_24m", "aus_4m", "aus_10m"}
29
+ if self.model not in MODELS:
30
+ raise ValueError(f"Model must be one of {MODELS}")
31
+
32
+ self._load_config()
33
+
34
+ # CAWCR-specific: track whether we've checked for valid data
35
+ self.checked_data = False
36
+
37
+ def _get_output_filename(self, ts: pd.Timestamp, te: pd.Timestamp) -> Path:
38
+ """CAWCR filename pattern includes model name"""
39
+ return self.outdir / f"{self.prefix}_{self.model}_{todstr(ts)}_{todstr(te)}.nc"
40
+
41
+ def _construct_opendap_url(self, date: pd.Timestamp) -> str:
42
+ """Construct the OPeNDAP URL for the given date"""
43
+ date_str = date.strftime("%Y%m")
44
+ url = f"{self.base_url}/ww3.{self.model}.{date_str}.nc"
45
+ return url
46
+
47
+ def _check_data_validity(self, ds: xr.Dataset) -> None:
48
+ """Check if the dataset contains valid data (CAWCR-specific)"""
49
+ if not self.checked_data:
50
+ # Check first variable for valid data
51
+ v = next(iter(self.variables))
52
+ nonnan = np.nansum(ds[v][0].values)
53
+ if nonnan == 0.0:
54
+ raise ValueError(
55
+ "No valid data detected in netcdf - please check requested extents against the data source"
56
+ )
57
+ else:
58
+ self.checked_data = True
59
+
60
+ def _download_single_file(self, fname: Path, url: str) -> bool:
61
+ """Download single file from CAWCR OPeNDAP server"""
62
+ try:
63
+ ds = xr.open_dataset(url)
64
+
65
+ # Handle longitude selection (CAWCR-specific logic)
66
+ if self.xlims[0] < self.xlims[1]:
67
+ lon_idx = np.hstack(
68
+ np.where(
69
+ (self.xlims[0] <= ds["longitude"])
70
+ & (ds["longitude"] <= self.xlims[1])
71
+ )
72
+ )
73
+ else:
74
+ lon_idx = np.hstack(
75
+ (
76
+ np.where(ds["longitude"] <= self.xlims[1])[0],
77
+ np.where(ds["longitude"] >= self.xlims[0])[0],
78
+ )
79
+ )
80
+
81
+ if lon_idx.size <= 1:
82
+ raise ValueError("No longitude points selected! You may need to increase your grid extents")
83
+
84
+ # Select latitude indices
85
+ lat_idx = np.hstack(
86
+ np.where(
87
+ (self.ylims[0] <= ds["latitude"]) & (ds["latitude"] <= self.ylims[1])
88
+ )
89
+ )
90
+
91
+ if lat_idx.size <= 1:
92
+ raise ValueError("No latitude points selected! You may need to increase your grid extents")
93
+
94
+ # Subset dataset by latitude and longitude
95
+ ds = ds.isel(longitude=lon_idx, latitude=lat_idx)
96
+
97
+ # Subset dataset to requested variables
98
+ ds = ds[self.variables]
99
+
100
+ # CAWCR-specific: Check for valid data on first download
101
+ self._check_data_validity(ds)
102
+
103
+ # Save to file
104
+ ds.to_netcdf(fname)
105
+ return True
106
+
107
+ except Exception as e:
108
+ if self.verbose:
109
+ print(f"Failed to download {url}: {e}")
110
+ return False
111
+
112
+ def download(self):
113
+ """CAWCR-specific download loop - yields tasks for new base class"""
114
+ for ts in self.times:
115
+ te = ts + MonthEnd() + pd.Timedelta("23.9h")
116
+
117
+ output_file = self._get_output_filename(ts, te)
118
+ url = self._construct_opendap_url(ts)
119
+
120
+ yield {
121
+ 'file_path': output_file,
122
+ 'url': url,
123
+ 'timestamp': ts,
124
+ 'variable': f"{len(self.variables)}_vars",
125
+ 'download_func': lambda f=output_file, u=url: self._download_single_file(f, u)
126
+ }
127
+
128
+ class MergeCAWCR(BaseMerger):
129
+ def _init_specific(self) -> None:
130
+ self.source = "CAWCR"
131
+ self.mode = "WAVE"
132
+
133
+ if self.model == 'default':
134
+ self.model = 'glob_24m'
135
+
136
+ self._load_config()
137
+
138
+ def merge_files(self, file_list: List[Path]) -> Tuple[xr.Dataset, List[Path]]:
139
+ """Merge CAWCR wave files using time concatenation."""
140
+ if not file_list:
141
+ raise ValueError("No files provided for merging")
142
+
143
+ datasets = []
144
+ skipped_files = []
145
+
146
+ for file_path in tqdm(file_list, disable=not self.verbose):
147
+ ds = self._open_subset_netcdf(file_path)
148
+ if ds is not None:
149
+ datasets.append(ds)
150
+ else:
151
+ skipped_files.append(file_path)
152
+
153
+ if not datasets:
154
+ raise ValueError("No valid datasets could be loaded")
155
+
156
+ # Concatenate and clean up
157
+ merged = xr.concat(datasets, dim="time", combine_attrs="override",
158
+ data_vars="minimal", coords="minimal", compat="override")
159
+
160
+ # Remove duplicates and sort
161
+ merged = merged.sortby("time")
162
+ _, unique_idx = np.unique(merged["time"], return_index=True)
163
+ merged = merged.isel(time=np.sort(unique_idx))
164
+
165
+ return merged, skipped_files
166
+
@@ -0,0 +1,39 @@
1
+ _BASE_URL: "https://data-cbr.csiro.au/thredds/dodsC/catch_all/CMAR_CAWCR-Wave_archive/CAWCR_Wave_Hindcast_aggregate/gridded"
2
+ _INFO_URL: https://data.csiro.au/collection/csiro:39819
3
+
4
+ _SOURCE_XLIMS: [100., 175.]
5
+ _SOURCE_YLIMS: [-50., 3.]
6
+ _SOURCE_TIMELIMS: ["1979-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+ _VARIABLES: ["hs", "fp", "dir", "t02", "spr"]
10
+ _DATASETS: { 1979-01-01: { ww3.aus_10m.%y%m: default } }
11
+
12
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
13
+ # source var == var as in the original dataset
14
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
15
+
16
+ hs:
17
+ source_var: "hs"
18
+ tfv_var: "WVHT"
19
+ bc_scale: 1
20
+ bc_offset: 0
21
+
22
+ tp:
23
+ source_var: "fp"
24
+ tfv_var: "WVPER"
25
+ bc_scale: 1
26
+ bc_offset: 0
27
+ pre_process: freq_to_period(fp)
28
+
29
+ tm02:
30
+ source_var: "t02"
31
+ tfv_var: "WVPER"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ mwd:
36
+ source_var: "dir"
37
+ tfv_var: "WVDIR"
38
+ bc_scale: 1
39
+ bc_offset: 0
@@ -0,0 +1,39 @@
1
+ _BASE_URL: "https://data-cbr.csiro.au/thredds/dodsC/catch_all/CMAR_CAWCR-Wave_archive/CAWCR_Wave_Hindcast_aggregate/gridded"
2
+ _INFO_URL: https://data.csiro.au/collection/csiro:39819
3
+
4
+ _SOURCE_XLIMS: [100., 175.]
5
+ _SOURCE_YLIMS: [-50., 3.]
6
+ _SOURCE_TIMELIMS: ["1979-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+ _VARIABLES: ["hs", "fp", "dir", "t02", "spr"]
10
+ _DATASETS: { 1979-01-01: { ww3.aus_4m.%y%m: default } }
11
+
12
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
13
+ # source var == var as in the original dataset
14
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
15
+
16
+ hs:
17
+ source_var: "hs"
18
+ tfv_var: "WVHT"
19
+ bc_scale: 1
20
+ bc_offset: 0
21
+
22
+ tp:
23
+ source_var: "fp"
24
+ tfv_var: "WVPER"
25
+ bc_scale: 1
26
+ bc_offset: 0
27
+ pre_process: freq_to_period(fp)
28
+
29
+ tm02:
30
+ source_var: "t02"
31
+ tfv_var: "WVPER"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ mwd:
36
+ source_var: "dir"
37
+ tfv_var: "WVDIR"
38
+ bc_scale: 1
39
+ bc_offset: 0
@@ -0,0 +1,39 @@
1
+ _BASE_URL: "https://data-cbr.csiro.au/thredds/dodsC/catch_all/CMAR_CAWCR-Wave_archive/CAWCR_Wave_Hindcast_aggregate/gridded"
2
+ _INFO_URL: https://data.csiro.au/collection/csiro:39819
3
+
4
+ _SOURCE_XLIMS: [-180, 360]
5
+ _SOURCE_YLIMS: [-78, 78]
6
+ _SOURCE_TIMELIMS: ["1979-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+ _VARIABLES: ["hs", "fp", "dir", "t02", "spr"]
10
+ _DATASETS: { 1979-01-01: { ww3.glob_24.%y%m: default } }
11
+
12
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
13
+ # source var == var as in the original dataset
14
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
15
+
16
+ hs:
17
+ source_var: "hs"
18
+ tfv_var: "WVHT"
19
+ bc_scale: 1
20
+ bc_offset: 0
21
+
22
+ tp:
23
+ source_var: "fp"
24
+ tfv_var: "WVPER"
25
+ bc_scale: 1
26
+ bc_offset: 0
27
+ pre_process: freq_to_period(fp)
28
+
29
+ tm02:
30
+ source_var: "t02"
31
+ tfv_var: "WVPER"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ mwd:
36
+ source_var: "dir"
37
+ tfv_var: "WVDIR"
38
+ bc_scale: 1
39
+ bc_offset: 0
@@ -0,0 +1,39 @@
1
+ _BASE_URL: "https://data-cbr.csiro.au/thredds/dodsC/catch_all/CMAR_CAWCR-Wave_archive/CAWCR_Wave_Hindcast_aggregate/gridded"
2
+ _INFO_URL: https://data.csiro.au/collection/csiro:39819
3
+
4
+ _SOURCE_XLIMS: [125., 210.]
5
+ _SOURCE_YLIMS: [-30., 20.]
6
+ _SOURCE_TIMELIMS: ["1979-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+ _VARIABLES: ["hs", "fp", "dir", "t02", "spr"]
10
+ _DATASETS: { 1979-01-01: { ww3.pac_10m.%y%m: default } }
11
+
12
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
13
+ # source var == var as in the original dataset
14
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
15
+
16
+ hs:
17
+ source_var: "hs"
18
+ tfv_var: "WVHT"
19
+ bc_scale: 1
20
+ bc_offset: 0
21
+
22
+ tp:
23
+ source_var: "fp"
24
+ tfv_var: "WVPER"
25
+ bc_scale: 1
26
+ bc_offset: 0
27
+ pre_process: freq_to_period(fp)
28
+
29
+ tm02:
30
+ source_var: "t02"
31
+ tfv_var: "WVPER"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ mwd:
36
+ source_var: "dir"
37
+ tfv_var: "WVDIR"
38
+ bc_scale: 1
39
+ bc_offset: 0
@@ -0,0 +1,39 @@
1
+ _BASE_URL: "https://data-cbr.csiro.au/thredds/dodsC/catch_all/CMAR_CAWCR-Wave_archive/CAWCR_Wave_Hindcast_aggregate/gridded"
2
+ _INFO_URL: https://data.csiro.au/collection/csiro:39819
3
+
4
+ _SOURCE_XLIMS: [125., 210.]
5
+ _SOURCE_YLIMS: [-30., 20.]
6
+ _SOURCE_TIMELIMS: ["1979-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+ _VARIABLES: ["hs", "fp", "dir", "t02", "spr"]
10
+ _DATASETS: { 1979-01-01: { ww3.pac_4m.%y%m: default } }
11
+
12
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
13
+ # source var == var as in the original dataset
14
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
15
+
16
+ hs:
17
+ source_var: "hs"
18
+ tfv_var: "WVHT"
19
+ bc_scale: 1
20
+ bc_offset: 0
21
+
22
+ tp:
23
+ source_var: "fp"
24
+ tfv_var: "WVPER"
25
+ bc_scale: 1
26
+ bc_offset: 0
27
+ pre_process: freq_to_period(fp)
28
+
29
+ tm02:
30
+ source_var: "t02"
31
+ tfv_var: "WVPER"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ mwd:
36
+ source_var: "dir"
37
+ tfv_var: "WVDIR"
38
+ bc_scale: 1
39
+ bc_offset: 0
@@ -0,0 +1,56 @@
1
+ _BASE_URL: "N/A"
2
+
3
+ _SOURCE_XLIMS: [-180, 360]
4
+ _SOURCE_YLIMS: [-90, 90]
5
+ _SOURCE_TIMELIMS: ["1993-01-01 00:00:00", null]
6
+
7
+ _DOWNLOAD_INTERVAL: monthly
8
+
9
+ # Default variables to extract if no `variables` arg is provided.
10
+ _VARIABLES: ["VHM0", "VTPK", "VMDR", "VTM02"]
11
+
12
+ # This is the map of datasets and what variables to snatch from each.
13
+ # default == take the _VARIABLES above.
14
+ _DATASETS:
15
+ {
16
+ 1993-01-01:
17
+ { cmems_mod_glo_wav_my_0.2deg_PT3H-i: ["VHM0", "VTPK", "VMDR", "VTM02"] },
18
+ 2023-05-01:
19
+ {
20
+ cmems_mod_glo_phy_myint_0.083deg_P1D-m:
21
+ ["VHM0", "VTPK", "VMDR", "VTM02"],
22
+ },
23
+ forecast:
24
+ {
25
+ cmems_mod_glo_wav_anfc_0.083deg_PT3H-i:
26
+ ["VHM0", "VTPK", "VMDR", "VTM02"],
27
+ },
28
+ }
29
+
30
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
31
+ # source var == var as in the original dataset
32
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
33
+
34
+ hs:
35
+ source_var: "VHM0"
36
+ tfv_var: "WVHT"
37
+ bc_scale: 1
38
+ bc_offset: 0
39
+
40
+ tp:
41
+ source_var: "VTPK"
42
+ tfv_var: "WVPER"
43
+ bc_scale: 1
44
+ bc_offset: 0
45
+
46
+ tm02:
47
+ source_var: "VTM02"
48
+ tfv_var: "WVPER"
49
+ bc_scale: 1
50
+ bc_offset: 0
51
+
52
+ mwd:
53
+ source_var: "VMDR"
54
+ tfv_var: "WVDIR"
55
+ bc_scale: 1
56
+ bc_offset: 0
@@ -0,0 +1,51 @@
1
+ _BASE_URL: "N/A"
2
+
3
+ _SOURCE_XLIMS: [-16.0, 13.0]
4
+ _SOURCE_YLIMS: [46.0, 62.74]
5
+ _SOURCE_ZLIMS: [null, null]
6
+ _SOURCE_TIMELIMS: ["1980-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+
10
+ # Default variables to extract if no `variables` arg is provided.
11
+ _VARIABLES: ["VHM0", "VTPK", "VMDR", "VTM02"]
12
+
13
+ # This is the map of datasets and what variables to snatch from each.
14
+ # default == take the _VARIABLES above.
15
+ _DATASETS:
16
+ {
17
+ 1993-01-01: { MetO-NWS-WAV-RAN: ["VHM0", "VTPK", "VMDR", "VTM02"] },
18
+ forecast:
19
+ {
20
+ cmems_mod_nws_wav_anfc_0.05deg_PT1H-i:
21
+ ["VHM0", "VTPK", "VMDR", "VTM02"],
22
+ },
23
+ }
24
+
25
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
26
+ # source var == var as in the original dataset
27
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
28
+
29
+ hs:
30
+ source_var: "VHM0"
31
+ tfv_var: "WVHT"
32
+ bc_scale: 1
33
+ bc_offset: 0
34
+
35
+ tp:
36
+ source_var: "VTPK"
37
+ tfv_var: "WVPER"
38
+ bc_scale: 1
39
+ bc_offset: 0
40
+
41
+ tm02:
42
+ source_var: "VTM02"
43
+ tfv_var: "WVPER"
44
+ bc_scale: 1
45
+ bc_offset: 0
46
+
47
+ mwd:
48
+ source_var: "VMDR"
49
+ tfv_var: "WVDIR"
50
+ bc_scale: 1
51
+ bc_offset: 0
@@ -0,0 +1,48 @@
1
+ _BASE_URL: "N/A"
2
+ _INFO_URL: https://cds.climate.copernicus.eu/datasets/reanalysis-era5-single-levels
3
+
4
+ _SOURCE_XLIMS: [-180, 360]
5
+ _SOURCE_YLIMS: [-90, 90]
6
+ _SOURCE_TIMELIMS: ["1940-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+
10
+ # Default variables to extract if no `variables` arg is provided.
11
+ _VARIABLES:
12
+ [
13
+ "significant_height_of_combined_wind_waves_and_swell",
14
+ "peak_wave_period",
15
+ "mean_wave_direction",
16
+ "wave_spectral_directional_width",
17
+ ]
18
+
19
+ # ERA5 only has the single single-levels dataset.
20
+ _DATASETS: { 1940-01-01: { reanalysis-era5-single-levels: default } }
21
+
22
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
23
+ # source var == var as in the original dataset
24
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
25
+
26
+ hs:
27
+ source_var: "swh"
28
+ tfv_var: "WVHT"
29
+ bc_scale: 1
30
+ bc_offset: 0
31
+
32
+ tp:
33
+ source_var: "pp1d"
34
+ tfv_var: "WVPER"
35
+ bc_scale: 1
36
+ bc_offset: 0
37
+
38
+ mwd:
39
+ source_var: "mwd"
40
+ tfv_var: "WVDIR"
41
+ bc_scale: 1
42
+ bc_offset: 0
43
+
44
+ spr:
45
+ source_var: "wdw"
46
+ tfv_var: null
47
+ bc_scale: 1
48
+ bc_offset: 0
@@ -0,0 +1,48 @@
1
+ _BASE_URL: "gs://gcp-public-data-arco-era5/ar/full_37-1h-0p25deg-chunk-1.zarr-v3"
2
+ _INFO_URL: https://cds.climate.copernicus.eu/datasets/reanalysis-era5-single-levels
3
+
4
+ _SOURCE_XLIMS: [-180, 360]
5
+ _SOURCE_YLIMS: [-90, 90]
6
+ _SOURCE_TIMELIMS: ["1940-01-01 00:00:00", null]
7
+
8
+ _DOWNLOAD_INTERVAL: monthly
9
+
10
+ # Default variables to extract if no `variables` arg is provided.
11
+ _VARIABLES:
12
+ [
13
+ "significant_height_of_combined_wind_waves_and_swell",
14
+ "peak_wave_period",
15
+ "mean_wave_direction",
16
+ "wave_spectral_directional_width",
17
+ ]
18
+
19
+ # ERA5 only has the single single-levels dataset.
20
+ _DATASETS: { 1940-01-01: { reanalysis-era5-single-levels: default } }
21
+
22
+ # KEY: The standard NC Variable for merged data (and per `_standard_attrs.py`)
23
+ # source var == var as in the original dataset
24
+ # tfv_var == standard tuflow fv var name (for FVC writer template)
25
+
26
+ hs:
27
+ source_var: "swh"
28
+ tfv_var: "WVHT"
29
+ bc_scale: 1
30
+ bc_offset: 0
31
+
32
+ tp:
33
+ source_var: "pp1d"
34
+ tfv_var: "WVPER"
35
+ bc_scale: 1
36
+ bc_offset: 0
37
+
38
+ mwd:
39
+ source_var: "mwd"
40
+ tfv_var: "WVDIR"
41
+ bc_scale: 1
42
+ bc_offset: 0
43
+
44
+ spr:
45
+ source_var: "wdw"
46
+ tfv_var: null
47
+ bc_scale: 1
48
+ bc_offset: 0
@@ -0,0 +1,38 @@
1
+ """
2
+ This copernicus wave model is branched from the Copernicus ocean downloader.
3
+ """
4
+
5
+ from tfv_get_tools.providers.ocean.copernicus_ocean import (
6
+ DownloadCopernicusOcean,
7
+ MergeCopernicusOcean,
8
+ )
9
+ import logging
10
+
11
+
12
+ class DownloadCopernicusWave(DownloadCopernicusOcean):
13
+ def _init_specific(self):
14
+ if self.model == "default":
15
+ self.log("Default model has been selected == 'GLO'")
16
+ self.model = "GLO"
17
+
18
+ self.source = "COPERNICUS"
19
+ self.mode = "WAVE"
20
+ self._load_config()
21
+
22
+ # User login check not yet performed
23
+ self._logged_in = False
24
+
25
+ # Cache for temporal extents
26
+ self._temporal_extents_cache = {}
27
+
28
+ if not self.verbose:
29
+ logging.getLogger("copernicusmarine").setLevel(logging.WARNING)
30
+
31
+
32
+ class MergeCopernicusWave(MergeCopernicusOcean):
33
+ def _init_specific(self):
34
+ self.source = "COPERNICUS"
35
+ self.mode = "WAVE"
36
+ if self.model == "default":
37
+ self.model = "GLO"
38
+ self._load_config()