tfv-get-tools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tfv_get_tools/__init__.py +4 -0
- tfv_get_tools/_standard_attrs.py +107 -0
- tfv_get_tools/atmos.py +167 -0
- tfv_get_tools/cli/_cli_base.py +173 -0
- tfv_get_tools/cli/atmos_cli.py +192 -0
- tfv_get_tools/cli/ocean_cli.py +204 -0
- tfv_get_tools/cli/tide_cli.py +118 -0
- tfv_get_tools/cli/wave_cli.py +183 -0
- tfv_get_tools/fvc/__init__.py +3 -0
- tfv_get_tools/fvc/_atmos.py +230 -0
- tfv_get_tools/fvc/_fvc.py +218 -0
- tfv_get_tools/fvc/_ocean.py +171 -0
- tfv_get_tools/fvc/_tide.py +195 -0
- tfv_get_tools/ocean.py +170 -0
- tfv_get_tools/providers/__init__.py +0 -0
- tfv_get_tools/providers/_custom_conversions.py +34 -0
- tfv_get_tools/providers/_downloader.py +566 -0
- tfv_get_tools/providers/_merger.py +520 -0
- tfv_get_tools/providers/_utilities.py +255 -0
- tfv_get_tools/providers/atmos/barra2.py +209 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_c2.yaml +52 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_r2.yaml +85 -0
- tfv_get_tools/providers/atmos/cfgs/barra2_re2.yaml +70 -0
- tfv_get_tools/providers/atmos/cfgs/cfsr.yaml +68 -0
- tfv_get_tools/providers/atmos/cfgs/era5.yaml +77 -0
- tfv_get_tools/providers/atmos/cfgs/era5_gcp.yaml +77 -0
- tfv_get_tools/providers/atmos/cfsr.py +207 -0
- tfv_get_tools/providers/atmos/era5.py +20 -0
- tfv_get_tools/providers/atmos/era5_gcp.py +20 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_blk.yaml +64 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_glo.yaml +67 -0
- tfv_get_tools/providers/ocean/cfgs/copernicus_nws.yaml +62 -0
- tfv_get_tools/providers/ocean/cfgs/hycom.yaml +73 -0
- tfv_get_tools/providers/ocean/copernicus_ocean.py +457 -0
- tfv_get_tools/providers/ocean/hycom.py +611 -0
- tfv_get_tools/providers/wave/cawcr.py +166 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_aus_10m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_aus_4m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_glob_24m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_pac_10m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/cawcr_pac_4m.yaml +39 -0
- tfv_get_tools/providers/wave/cfgs/copernicus_glo.yaml +56 -0
- tfv_get_tools/providers/wave/cfgs/copernicus_nws.yaml +51 -0
- tfv_get_tools/providers/wave/cfgs/era5.yaml +48 -0
- tfv_get_tools/providers/wave/cfgs/era5_gcp.yaml +48 -0
- tfv_get_tools/providers/wave/copernicus_wave.py +38 -0
- tfv_get_tools/providers/wave/era5.py +232 -0
- tfv_get_tools/providers/wave/era5_gcp.py +169 -0
- tfv_get_tools/tide/__init__.py +2 -0
- tfv_get_tools/tide/_nodestring.py +214 -0
- tfv_get_tools/tide/_tidal_base.py +568 -0
- tfv_get_tools/utilities/_tfv_bc.py +78 -0
- tfv_get_tools/utilities/horizontal_padding.py +89 -0
- tfv_get_tools/utilities/land_masking.py +93 -0
- tfv_get_tools/utilities/parsers.py +44 -0
- tfv_get_tools/utilities/warnings.py +38 -0
- tfv_get_tools/wave.py +179 -0
- tfv_get_tools-0.2.0.dist-info/METADATA +286 -0
- tfv_get_tools-0.2.0.dist-info/RECORD +62 -0
- tfv_get_tools-0.2.0.dist-info/WHEEL +5 -0
- tfv_get_tools-0.2.0.dist-info/entry_points.txt +5 -0
- tfv_get_tools-0.2.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import List, Optional, Union
|
|
4
|
+
|
|
5
|
+
import xarray as xr
|
|
6
|
+
|
|
7
|
+
from tfv_get_tools.fvc._fvc import FVCWriter
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TideFVCWriter(FVCWriter):
|
|
11
|
+
"""Writer for TIDE profile FVC files."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, source=None, info_url=None):
|
|
14
|
+
"""Initialize tide FVC writer."""
|
|
15
|
+
super().__init__(requires_coordinates=False, source=source, info_url=info_url)
|
|
16
|
+
self.nodestrings: Dict[str, str] = {} # Maps ns prefix to ID
|
|
17
|
+
|
|
18
|
+
def detect_nodestrings(self, ds: xr.Dataset) -> None:
|
|
19
|
+
"""Detect nodestrings from dataset variables.
|
|
20
|
+
|
|
21
|
+
Looks for variables with pattern 'nsX_*' where X is the nodestring identifier.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
ds: Input xarray Dataset
|
|
25
|
+
|
|
26
|
+
Raises:
|
|
27
|
+
ValueError: If no nodestring variables are found
|
|
28
|
+
"""
|
|
29
|
+
# Find all variables starting with 'ns'
|
|
30
|
+
ns_pattern = re.compile(r"^ns(.+)_[^_]*$")
|
|
31
|
+
|
|
32
|
+
# Get unique nodestring identifiers
|
|
33
|
+
self.nodestrings = {}
|
|
34
|
+
for var_name in ds.data_vars:
|
|
35
|
+
match = ns_pattern.match(var_name)
|
|
36
|
+
if match:
|
|
37
|
+
ns_id = match.group(1)
|
|
38
|
+
ns_prefix = f"ns{ns_id}"
|
|
39
|
+
self.nodestrings[ns_prefix] = ns_id
|
|
40
|
+
|
|
41
|
+
if not self.nodestrings:
|
|
42
|
+
raise ValueError(
|
|
43
|
+
"No nodestring variables found in dataset. "
|
|
44
|
+
"Expected variables with pattern 'nsX_*' where X is the nodestring identifier."
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
def validate_nodestring_variables(self, ds: xr.Dataset) -> None:
|
|
48
|
+
"""Validate dataset has required variables for each nodestring.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
ds: Input xarray Dataset
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
ValueError: If required variables are missing
|
|
55
|
+
"""
|
|
56
|
+
required_suffixes = ["_chainage", "_wl"]
|
|
57
|
+
missing = []
|
|
58
|
+
|
|
59
|
+
for ns_prefix in self.nodestrings:
|
|
60
|
+
for suffix in required_suffixes:
|
|
61
|
+
var_name = f"{ns_prefix}{suffix}"
|
|
62
|
+
if var_name not in ds:
|
|
63
|
+
missing.append(var_name)
|
|
64
|
+
|
|
65
|
+
if missing:
|
|
66
|
+
raise ValueError(
|
|
67
|
+
f"Missing required variables for tide configuration: {', '.join(missing)}"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def write_boundary_conditions(
|
|
71
|
+
self,
|
|
72
|
+
lines: List[str],
|
|
73
|
+
nc_path: str,
|
|
74
|
+
) -> None:
|
|
75
|
+
"""Write boundary conditions block.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
lines: List to append lines to
|
|
79
|
+
nc_path: Path to the NetCDF file
|
|
80
|
+
"""
|
|
81
|
+
for ns_prefix, ns_id in sorted(self.nodestrings.items()):
|
|
82
|
+
lines.extend(
|
|
83
|
+
[
|
|
84
|
+
f"BC == WL_CURT, {ns_id}, {nc_path}",
|
|
85
|
+
f" BC Header == {self.time_var}, {ns_prefix}_chainage, dummy, {ns_prefix}_wl",
|
|
86
|
+
" BC Update dt == 60.",
|
|
87
|
+
" BC Time Units == days",
|
|
88
|
+
" BC Reference Time == 01/01/1990 00:00",
|
|
89
|
+
" BC Default == NaN",
|
|
90
|
+
" Includes MSLP == 0",
|
|
91
|
+
"End BC",
|
|
92
|
+
"",
|
|
93
|
+
]
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
def generate(
|
|
97
|
+
self,
|
|
98
|
+
ds: xr.Dataset,
|
|
99
|
+
nc_path: str = "tide_forcing.nc",
|
|
100
|
+
) -> List[str]:
|
|
101
|
+
"""Generate FVC configuration content for tide forcing.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
ds: Input xarray Dataset containing tide variables
|
|
105
|
+
nc_path: Path or filename for the NetCDF file (referenced in FVC content)
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
List[str]: FVC configuration content as a list of strings
|
|
109
|
+
|
|
110
|
+
Raises:
|
|
111
|
+
ValueError: If required variables are missing for any nodestring
|
|
112
|
+
"""
|
|
113
|
+
# Process dataset to detect time settings and nodestrings
|
|
114
|
+
self.process_dataset(ds)
|
|
115
|
+
self.detect_nodestrings(ds)
|
|
116
|
+
self.validate_nodestring_variables(ds)
|
|
117
|
+
|
|
118
|
+
lines = []
|
|
119
|
+
|
|
120
|
+
# Write header with detected nodestrings
|
|
121
|
+
source = ds.attrs.get("source", "")
|
|
122
|
+
self.write_header(lines, f"{source} TIDE Dataset", ds)
|
|
123
|
+
lines.append(f"! Nodestrings: {', '.join(sorted(self.nodestrings.values()))}")
|
|
124
|
+
lines.append("")
|
|
125
|
+
|
|
126
|
+
# Write boundary conditions
|
|
127
|
+
self.write_boundary_conditions(lines, nc_path)
|
|
128
|
+
|
|
129
|
+
return lines
|
|
130
|
+
|
|
131
|
+
def write_file(
|
|
132
|
+
self,
|
|
133
|
+
lines: List[str],
|
|
134
|
+
output_path: Union[str, Path],
|
|
135
|
+
filename: Optional[str] = None,
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Write FVC content to a file."""
|
|
138
|
+
output_path = Path(output_path)
|
|
139
|
+
filename = filename or "tide_forcing.fvc"
|
|
140
|
+
if not filename.endswith(".fvc"):
|
|
141
|
+
filename = filename.replace(".nc", ".fvc")
|
|
142
|
+
|
|
143
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
144
|
+
with open(output_path / filename, "w") as f:
|
|
145
|
+
for line in lines:
|
|
146
|
+
f.write(line + "\n")
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def write_tide_fvc(
|
|
150
|
+
ds: xr.Dataset,
|
|
151
|
+
nc_path: str = "tide_forcing.nc",
|
|
152
|
+
output_path: Optional[Union[str, Path]] = None,
|
|
153
|
+
filename: Optional[str] = None,
|
|
154
|
+
source: Optional[str] = None,
|
|
155
|
+
info_url: Optional[str] = None,
|
|
156
|
+
) -> List[str]:
|
|
157
|
+
"""Generate (and optionally write) TUFLOW-FV tide forcing configuration.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
ds: Input xarray Dataset containing tide variables
|
|
161
|
+
nc_path: Path or filename for the NetCDF file (referenced in FVC content)
|
|
162
|
+
output_path: Optional path to write FVC file (if None, no file is written)
|
|
163
|
+
filename: Optional filename for FVC file (if None, derives from nc_path)
|
|
164
|
+
info_url: Optional URL for source information to be printed in FVC
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
List[str]: FVC configuration content as a list of strings
|
|
168
|
+
|
|
169
|
+
Examples:
|
|
170
|
+
# Dataset with numeric nodestring identifiers
|
|
171
|
+
>>> ds = xr.Dataset({
|
|
172
|
+
... 'ns1_chainage': [...],
|
|
173
|
+
... 'ns1_wl': [...],
|
|
174
|
+
... 'ns2_chainage': [...],
|
|
175
|
+
... 'ns2_wl': [...]
|
|
176
|
+
... })
|
|
177
|
+
>>> lines = write_tide_fvc(ds, nc_path="tide.nc")
|
|
178
|
+
|
|
179
|
+
# Dataset with named nodestring identifiers
|
|
180
|
+
>>> ds = xr.Dataset({
|
|
181
|
+
... 'nsEast_chainage': [...],
|
|
182
|
+
... 'nsEast_wl': [...],
|
|
183
|
+
... 'nsWest_chainage': [...],
|
|
184
|
+
... 'nsWest_wl': [...]
|
|
185
|
+
... })
|
|
186
|
+
>>> lines = write_tide_fvc(ds, nc_path="tide.nc")
|
|
187
|
+
"""
|
|
188
|
+
writer = TideFVCWriter(source=source, info_url=info_url)
|
|
189
|
+
|
|
190
|
+
lines = writer.generate(ds, nc_path)
|
|
191
|
+
|
|
192
|
+
if output_path is not None:
|
|
193
|
+
writer.write_file(lines, output_path, filename)
|
|
194
|
+
|
|
195
|
+
return lines
|
tfv_get_tools/ocean.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Union, Tuple, Optional, List, Literal
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
from tfv_get_tools.utilities._tfv_bc import write_tuflowfv_fvc
|
|
7
|
+
from tfv_get_tools.providers._downloader import BatchDownloadResult
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def DownloadOcean(
|
|
11
|
+
start_date: Union[str, datetime, pd.Timestamp],
|
|
12
|
+
end_date: Union[str, datetime, pd.Timestamp],
|
|
13
|
+
xlims: Tuple[float, float],
|
|
14
|
+
ylims: Tuple[float, float],
|
|
15
|
+
zlims: Optional[Tuple[float, float]] = None,
|
|
16
|
+
out_path: Union[str, Path] = Path("./raw"),
|
|
17
|
+
source: str = "HYCOM",
|
|
18
|
+
model: str = "default",
|
|
19
|
+
prefix: Optional[str] = None,
|
|
20
|
+
verbose: bool = False,
|
|
21
|
+
variables: Optional[List[str]] = None,
|
|
22
|
+
time_interval: Union[int, Literal["best"]] = 24,
|
|
23
|
+
skip_check: bool = False,
|
|
24
|
+
**kwargs,
|
|
25
|
+
) -> BatchDownloadResult:
|
|
26
|
+
"""Download Ocean Data
|
|
27
|
+
|
|
28
|
+
Users should call this function, not the individual downloader classes directly.
|
|
29
|
+
|
|
30
|
+
This module will download ocean data from several possible sources to facilitate
|
|
31
|
+
TUFLOW FV and SWAN modelling.
|
|
32
|
+
|
|
33
|
+
The following sources have been implemented:
|
|
34
|
+
- `HYCOM` - Naval Research Laboratory - Global Ocean Forecast System (GOFS) 3.1
|
|
35
|
+
- `Copernicus` - Various models from the Copernicus Marine Service
|
|
36
|
+
- `GLO` - Global model domain
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
start_date: Start date. The string format is `%Y-%m-%d` (e.g., '2011-01-01')
|
|
40
|
+
end_date: End date. The string format is `%Y-%m-%d` (e.g., '2011-02-01')
|
|
41
|
+
xlims: Minimum and maximum longitude, as floats. e.g., (115, 120)
|
|
42
|
+
ylims: Minimum and maximum latitude, as floats. e.g., (-40, -35)
|
|
43
|
+
zlims: Minimum and maximum depth, as floats. e.g., (50, 250).
|
|
44
|
+
Defaults to the maximum per data source.
|
|
45
|
+
out_path: Output directory for data files
|
|
46
|
+
source: Data source to download. One of {'HYCOM', 'Copernicus'}
|
|
47
|
+
model: Choice of model, depending on "source"
|
|
48
|
+
prefix: Extra file name prefix
|
|
49
|
+
verbose: Print extra program information
|
|
50
|
+
variables: List of variables to download (surf_el, salinity, water_temp, water_u, water_v)
|
|
51
|
+
time_interval: Time interval in hours for HYCOM only. Defaults to 24. Use "best" for the highest available.
|
|
52
|
+
skip_check: Skip user confirmation
|
|
53
|
+
**kwargs: Additional arguments
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
BatchDownloadResult: Results of the download operation
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
if source.lower() == "hycom":
|
|
60
|
+
from tfv_get_tools.providers.ocean.hycom import DownloadHycom
|
|
61
|
+
|
|
62
|
+
downloader = DownloadHycom(
|
|
63
|
+
start_date=start_date,
|
|
64
|
+
end_date=end_date,
|
|
65
|
+
xlims=xlims,
|
|
66
|
+
ylims=ylims,
|
|
67
|
+
zlims=zlims,
|
|
68
|
+
out_path=out_path,
|
|
69
|
+
model=model,
|
|
70
|
+
prefix=prefix,
|
|
71
|
+
time_interval=time_interval,
|
|
72
|
+
verbose=verbose,
|
|
73
|
+
variables=variables,
|
|
74
|
+
skip_check=skip_check,
|
|
75
|
+
**kwargs
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return downloader.execute_download()
|
|
79
|
+
|
|
80
|
+
elif source.lower() == "copernicus":
|
|
81
|
+
from tfv_get_tools.providers.ocean.copernicus_ocean import DownloadCopernicusOcean
|
|
82
|
+
|
|
83
|
+
downloader = DownloadCopernicusOcean(
|
|
84
|
+
start_date=start_date,
|
|
85
|
+
end_date=end_date,
|
|
86
|
+
xlims=xlims,
|
|
87
|
+
ylims=ylims,
|
|
88
|
+
zlims=zlims,
|
|
89
|
+
out_path=out_path,
|
|
90
|
+
model=model,
|
|
91
|
+
prefix=prefix,
|
|
92
|
+
time_interval=time_interval,
|
|
93
|
+
verbose=verbose,
|
|
94
|
+
variables=variables,
|
|
95
|
+
skip_check=skip_check,
|
|
96
|
+
**kwargs
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
return downloader.execute_download()
|
|
100
|
+
|
|
101
|
+
else:
|
|
102
|
+
raise ValueError(f'Unrecognised source {source}. Must be one of: HYCOM, Copernicus')
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def MergeOcean(
|
|
106
|
+
in_path: Path = Path("./raw"),
|
|
107
|
+
out_path: Path = Path("."),
|
|
108
|
+
source: str = 'HYCOM',
|
|
109
|
+
model: str = 'default',
|
|
110
|
+
fname: str = None,
|
|
111
|
+
time_start: str = None,
|
|
112
|
+
time_end: str = None,
|
|
113
|
+
write_fvc=True,
|
|
114
|
+
reproject: int = None,
|
|
115
|
+
local_tz: Tuple[float, str] = None,
|
|
116
|
+
pad_dry: bool = False,
|
|
117
|
+
wrapto360=False,
|
|
118
|
+
write=True,
|
|
119
|
+
):
|
|
120
|
+
"""
|
|
121
|
+
Merge raw downloaded ocean datafiles into a single netcdf file, ready for TUFLOW-FV modelling
|
|
122
|
+
Optionally create an accompanying .fvc file.
|
|
123
|
+
|
|
124
|
+
**Use the same `source` and `model` that was supplied to the Downloader function**
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
in_path (Path, optional): Directory of the raw ocean data-files. Defaults to Path(".").
|
|
128
|
+
out_path (Path, optional): Output directory for the merged ocean netcdf and (opt) the fvc. Defaults to Path(".").
|
|
129
|
+
fname (str, optional): Merged ocean netcdf filename. Defaults to None.
|
|
130
|
+
source (str, optional): Source to be merged. Defaults to HYCOM.
|
|
131
|
+
model (str, optional): Model for source to be merged. Defaults to 'default'.
|
|
132
|
+
time_start (str, optional): Start time limit of the merged dataset (str: "YYYY-mm-dd HH:MM"). Defaults to None.
|
|
133
|
+
time_end (str, optional): End time limit of the merged dataset (str: "YYYY-mm-dd HH:MM"). Defaults to None.
|
|
134
|
+
write_fvc (bool, optional): Optionally write an accompanying .fvc file. Defaults to True.
|
|
135
|
+
reproject (int, optional): Optionally reproject based, based on EPSG code. Defaults to None.
|
|
136
|
+
local_tz: (Tuple(float, str): optional): Add local timezone format is a tuple with Offset[float] and Label[str]
|
|
137
|
+
pad_dry: (bool, optional): Optionally pad horizontally (i.e., fill nans with respect to depth). Defaults to False.
|
|
138
|
+
wrapto360: (bool, optional): Optionally wrap longitude to (0, 360) rather than (-180, 180). Defaults to False.
|
|
139
|
+
write (bool): Write the dataset. If False, the virtual merged dataset will be returned.
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
args = tuple()
|
|
143
|
+
|
|
144
|
+
kwargs = dict(
|
|
145
|
+
in_path=in_path,
|
|
146
|
+
out_path=out_path,
|
|
147
|
+
fname=fname,
|
|
148
|
+
source=source,
|
|
149
|
+
model=model,
|
|
150
|
+
time_start=time_start,
|
|
151
|
+
time_end=time_end,
|
|
152
|
+
# write_fvc=write_fvc,
|
|
153
|
+
reproject=reproject,
|
|
154
|
+
local_tz=local_tz,
|
|
155
|
+
pad_dry=pad_dry,
|
|
156
|
+
wrapto360=wrapto360,
|
|
157
|
+
write=write,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if source.lower() == "hycom":
|
|
161
|
+
from tfv_get_tools.providers.ocean.hycom import MergeHYCOM
|
|
162
|
+
mrg = MergeHYCOM(*args, **kwargs)
|
|
163
|
+
|
|
164
|
+
elif source.lower() == "copernicus":
|
|
165
|
+
from tfv_get_tools.providers.ocean.copernicus_ocean import MergeCopernicusOcean
|
|
166
|
+
mrg = MergeCopernicusOcean(*args, **kwargs)
|
|
167
|
+
|
|
168
|
+
# If the user requested no-write, return the dataset object.
|
|
169
|
+
if not write:
|
|
170
|
+
return mrg.ds
|
|
File without changes
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import xarray as xr
|
|
2
|
+
import numpy as np
|
|
3
|
+
from xarray import DataArray
|
|
4
|
+
import dask.array as da
|
|
5
|
+
|
|
6
|
+
def freq_to_period(freq: xr.DataArray) -> xr.DataArray:
|
|
7
|
+
"""Inverse frequency to get period (s)"""
|
|
8
|
+
return 1/freq
|
|
9
|
+
|
|
10
|
+
def dewpt_to_relhum(dew_temp: DataArray, sfc_temp: DataArray) -> DataArray:
|
|
11
|
+
"""Calculate relative humidity using dew_temp and sfc_temp
|
|
12
|
+
RH (%) = (e/esat)*100
|
|
13
|
+
a1=611.21 ; a3=17.502 ; a4=32.19 ; T0=273.16
|
|
14
|
+
https://confluence.ecmwf.int/pages/viewpage.action?pageId=171411214
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
dew_temp (np.ndarray): dew point temperature in Kelvin
|
|
18
|
+
sfc_temp (np.ndarray): surface temperature in Kelvin
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
np.ndarray: Relative humidity (%)
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
a1=611.21
|
|
25
|
+
a3=17.502
|
|
26
|
+
a4=32.19
|
|
27
|
+
T0=273.16
|
|
28
|
+
|
|
29
|
+
esat = a1*np.exp(a3*((sfc_temp-T0)/(sfc_temp-a4)))
|
|
30
|
+
e = a1*np.exp(a3*((dew_temp-T0)/(dew_temp-a4)))
|
|
31
|
+
|
|
32
|
+
rel_hum = (e/esat)*100
|
|
33
|
+
|
|
34
|
+
return rel_hum
|