roms-tools 2.2.1__py3-none-any.whl → 2.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ci/environment.yml +1 -0
- roms_tools/__init__.py +2 -0
- roms_tools/analysis/roms_output.py +590 -0
- roms_tools/{setup/download.py → download.py} +3 -0
- roms_tools/{setup/plot.py → plot.py} +34 -28
- roms_tools/setup/boundary_forcing.py +199 -203
- roms_tools/setup/datasets.py +60 -136
- roms_tools/setup/grid.py +40 -67
- roms_tools/setup/initial_conditions.py +249 -247
- roms_tools/setup/nesting.py +6 -27
- roms_tools/setup/river_forcing.py +41 -76
- roms_tools/setup/surface_forcing.py +125 -75
- roms_tools/setup/tides.py +31 -51
- roms_tools/setup/topography.py +1 -1
- roms_tools/setup/utils.py +44 -224
- roms_tools/tests/test_analysis/test_roms_output.py +269 -0
- roms_tools/tests/{test_setup/test_regrid.py → test_regrid.py} +1 -1
- roms_tools/tests/test_setup/test_boundary_forcing.py +221 -58
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/.zattrs +5 -3
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/.zmetadata +156 -121
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/abs_time/.zarray +2 -2
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/abs_time/.zattrs +2 -1
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/abs_time/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/bry_time/.zarray +2 -2
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/bry_time/.zattrs +1 -1
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/bry_time/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_east/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_north/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_south/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_west/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_east/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_north/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_south/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_west/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_east/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_north/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_south/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_west/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_east/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_north/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_south/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_west/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_east/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_north/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_south/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_west/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_east/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_north/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_south/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_west/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_east/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_east/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_east/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_north/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_north/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_north/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_south/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_south/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_south/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_west/.zarray +4 -4
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_west/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zeta_west/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zattrs +4 -4
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zmetadata +4 -4
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/angle/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/angle_coarse/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/f/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/h/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_coarse/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_rho/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_u/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_v/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_coarse/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_rho/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_u/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_v/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_coarse/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_rho/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_u/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_v/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/pm/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/pn/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/.zattrs +2 -1
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/.zmetadata +6 -4
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/Cs_r/.zattrs +1 -1
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/Cs_w/.zattrs +1 -1
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/NH4/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/NO3/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/PO4/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/abs_time/.zattrs +1 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatSi/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/ocean_time/.zattrs +1 -1
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spC/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spCaCO3/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spFe/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/temp/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/u/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/ubar/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/v/0.0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/vbar/0.0.0 +0 -0
- roms_tools/tests/test_setup/test_data/river_forcing_no_climatology.zarr/.zmetadata +30 -0
- roms_tools/tests/test_setup/test_data/river_forcing_no_climatology.zarr/river_location/.zarray +22 -0
- roms_tools/tests/test_setup/test_data/river_forcing_no_climatology.zarr/river_location/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/river_forcing_no_climatology.zarr/river_location/0.0 +0 -0
- roms_tools/tests/test_setup/test_data/river_forcing_with_bgc.zarr/.zmetadata +30 -0
- roms_tools/tests/test_setup/test_data/river_forcing_with_bgc.zarr/river_location/.zarray +22 -0
- roms_tools/tests/test_setup/test_data/river_forcing_with_bgc.zarr/river_location/.zattrs +8 -0
- roms_tools/tests/test_setup/test_data/river_forcing_with_bgc.zarr/river_location/0.0 +0 -0
- roms_tools/tests/test_setup/test_datasets.py +1 -1
- roms_tools/tests/test_setup/test_grid.py +1 -14
- roms_tools/tests/test_setup/test_initial_conditions.py +205 -67
- roms_tools/tests/test_setup/test_nesting.py +0 -16
- roms_tools/tests/test_setup/test_river_forcing.py +9 -37
- roms_tools/tests/test_setup/test_surface_forcing.py +103 -74
- roms_tools/tests/test_setup/test_tides.py +5 -17
- roms_tools/tests/test_setup/test_topography.py +1 -1
- roms_tools/tests/test_setup/test_utils.py +57 -1
- roms_tools/tests/{test_utils.py → test_tiling/test_partition.py} +1 -1
- roms_tools/tiling/partition.py +338 -0
- roms_tools/utils.py +310 -276
- roms_tools/vertical_coordinate.py +227 -0
- {roms_tools-2.2.1.dist-info → roms_tools-2.4.0.dist-info}/METADATA +1 -1
- {roms_tools-2.2.1.dist-info → roms_tools-2.4.0.dist-info}/RECORD +151 -142
- roms_tools/setup/vertical_coordinate.py +0 -109
- /roms_tools/{setup/regrid.py → regrid.py} +0 -0
- {roms_tools-2.2.1.dist-info → roms_tools-2.4.0.dist-info}/LICENSE +0 -0
- {roms_tools-2.2.1.dist-info → roms_tools-2.4.0.dist-info}/WHEEL +0 -0
- {roms_tools-2.2.1.dist-info → roms_tools-2.4.0.dist-info}/top_level.txt +0 -0
roms_tools/setup/tides.py
CHANGED
|
@@ -3,9 +3,13 @@ import xarray as xr
|
|
|
3
3
|
import numpy as np
|
|
4
4
|
from typing import Dict, Union, List
|
|
5
5
|
import importlib.metadata
|
|
6
|
+
import matplotlib.pyplot as plt
|
|
7
|
+
from pathlib import Path
|
|
6
8
|
from dataclasses import dataclass, field
|
|
7
|
-
from roms_tools
|
|
8
|
-
from roms_tools.
|
|
9
|
+
from roms_tools import Grid
|
|
10
|
+
from roms_tools.plot import _plot
|
|
11
|
+
from roms_tools.regrid import LateralRegrid
|
|
12
|
+
from roms_tools.utils import save_datasets
|
|
9
13
|
from roms_tools.setup.datasets import TPXODataset
|
|
10
14
|
from roms_tools.setup.utils import (
|
|
11
15
|
nan_check,
|
|
@@ -13,16 +17,12 @@ from roms_tools.setup.utils import (
|
|
|
13
17
|
interpolate_from_rho_to_u,
|
|
14
18
|
interpolate_from_rho_to_v,
|
|
15
19
|
get_variable_metadata,
|
|
16
|
-
save_datasets,
|
|
17
20
|
get_target_coords,
|
|
18
21
|
rotate_velocities,
|
|
19
22
|
get_vector_pairs,
|
|
20
23
|
_to_yaml,
|
|
21
24
|
_from_yaml,
|
|
22
25
|
)
|
|
23
|
-
from roms_tools.setup.regrid import LateralRegrid
|
|
24
|
-
import matplotlib.pyplot as plt
|
|
25
|
-
from pathlib import Path
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
@dataclass(frozen=True, kw_only=True)
|
|
@@ -319,6 +319,8 @@ class TidalForcing:
|
|
|
319
319
|
>>> tidal_forcing.plot("ssh_Re", nc=0)
|
|
320
320
|
"""
|
|
321
321
|
|
|
322
|
+
if var_name not in self.ds:
|
|
323
|
+
raise ValueError(f"Variable '{var_name}' is not found in dataset.")
|
|
322
324
|
field = self.ds[var_name].isel(ntides=ntides)
|
|
323
325
|
|
|
324
326
|
if self.use_dask:
|
|
@@ -328,25 +330,28 @@ class TidalForcing:
|
|
|
328
330
|
field = field.load()
|
|
329
331
|
|
|
330
332
|
if all(dim in field.dims for dim in ["eta_rho", "xi_rho"]):
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
)
|
|
333
|
+
lon_deg = self.grid.ds["lon_rho"]
|
|
334
|
+
lat_deg = self.grid.ds["lat_rho"]
|
|
335
|
+
mask = self.grid.ds["mask_rho"]
|
|
335
336
|
|
|
336
337
|
elif all(dim in field.dims for dim in ["eta_rho", "xi_u"]):
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
)
|
|
338
|
+
lon_deg = self.grid.ds["lon_u"]
|
|
339
|
+
lat_deg = self.grid.ds["lat_u"]
|
|
340
|
+
mask = self.grid.ds["mask_u"]
|
|
341
341
|
|
|
342
342
|
elif all(dim in field.dims for dim in ["eta_v", "xi_rho"]):
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
343
|
+
lon_deg = self.grid.ds["lon_v"]
|
|
344
|
+
lat_deg = self.grid.ds["lat_v"]
|
|
345
|
+
mask = self.grid.ds["mask_v"]
|
|
346
|
+
|
|
347
347
|
else:
|
|
348
348
|
ValueError("provided field does not have two horizontal dimension")
|
|
349
349
|
|
|
350
|
+
field = field.where(mask)
|
|
351
|
+
if self.grid.straddle:
|
|
352
|
+
lon_deg = xr.where(lon_deg > 180, lon_deg - 360, lon_deg)
|
|
353
|
+
field = field.assign_coords({"lon": lon_deg, "lat": lat_deg})
|
|
354
|
+
|
|
350
355
|
title = "%s, ntides = %i" % (field.long_name, self.ds[var_name].ntides[ntides])
|
|
351
356
|
|
|
352
357
|
vmax = max(field.max(), -field.min())
|
|
@@ -357,44 +362,25 @@ class TidalForcing:
|
|
|
357
362
|
kwargs = {"vmax": vmax, "vmin": vmin, "cmap": cmap}
|
|
358
363
|
|
|
359
364
|
_plot(
|
|
360
|
-
self.grid.ds,
|
|
361
365
|
field=field,
|
|
362
|
-
|
|
366
|
+
title=title,
|
|
363
367
|
c="g",
|
|
364
368
|
kwargs=kwargs,
|
|
365
|
-
title=title,
|
|
366
369
|
)
|
|
367
370
|
|
|
368
|
-
def save(
|
|
369
|
-
self, filepath: Union[str, Path], np_eta: int = None, np_xi: int = None
|
|
370
|
-
) -> None:
|
|
371
|
+
def save(self, filepath: Union[str, Path]) -> None:
|
|
371
372
|
"""Save the tidal forcing information to a netCDF4 file.
|
|
372
373
|
|
|
373
|
-
This method supports saving the dataset in two modes:
|
|
374
|
-
|
|
375
|
-
1. **Single File Mode (default)**:
|
|
376
|
-
|
|
377
|
-
If both `np_eta` and `np_xi` are `None`, the entire dataset is saved as a single netCDF4 file
|
|
378
|
-
with the base filename specified by `filepath.nc`.
|
|
379
|
-
|
|
380
|
-
2. **Partitioned Mode**:
|
|
381
|
-
|
|
382
|
-
- If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
|
|
383
|
-
- Each spatial tile is saved as a separate netCDF4 file.
|
|
384
|
-
|
|
385
374
|
Parameters
|
|
386
375
|
----------
|
|
387
376
|
filepath : Union[str, Path]
|
|
388
|
-
The
|
|
389
|
-
|
|
390
|
-
The number of partitions along the `eta` direction. If `None`, no spatial partitioning is performed.
|
|
391
|
-
np_xi : int, optional
|
|
392
|
-
The number of partitions along the `xi` direction. If `None`, no spatial partitioning is performed.
|
|
377
|
+
The path or filename where the dataset will be saved. If a directory is specified,
|
|
378
|
+
the file will be saved with a default name within that directory.
|
|
393
379
|
|
|
394
380
|
Returns
|
|
395
381
|
-------
|
|
396
|
-
|
|
397
|
-
A
|
|
382
|
+
Path
|
|
383
|
+
A `Path` object representing the location of the saved file.
|
|
398
384
|
"""
|
|
399
385
|
|
|
400
386
|
# Ensure filepath is a Path object
|
|
@@ -404,17 +390,11 @@ class TidalForcing:
|
|
|
404
390
|
if filepath.suffix == ".nc":
|
|
405
391
|
filepath = filepath.with_suffix("")
|
|
406
392
|
|
|
407
|
-
if self.use_dask:
|
|
408
|
-
from dask.diagnostics import ProgressBar
|
|
409
|
-
|
|
410
|
-
with ProgressBar():
|
|
411
|
-
self.ds.load()
|
|
412
|
-
|
|
413
393
|
dataset_list = [self.ds]
|
|
414
394
|
output_filenames = [str(filepath)]
|
|
415
395
|
|
|
416
396
|
saved_filenames = save_datasets(
|
|
417
|
-
dataset_list, output_filenames,
|
|
397
|
+
dataset_list, output_filenames, use_dask=self.use_dask
|
|
418
398
|
)
|
|
419
399
|
|
|
420
400
|
return saved_filenames
|
|
@@ -464,7 +444,7 @@ class TidalForcing:
|
|
|
464
444
|
grid=grid,
|
|
465
445
|
**tidal_forcing_params,
|
|
466
446
|
use_dask=use_dask,
|
|
467
|
-
bypass_validation=bypass_validation
|
|
447
|
+
bypass_validation=bypass_validation,
|
|
468
448
|
)
|
|
469
449
|
|
|
470
450
|
def _correct_tides(self, data):
|
roms_tools/setup/topography.py
CHANGED
|
@@ -6,8 +6,8 @@ import gcm_filters
|
|
|
6
6
|
from roms_tools.setup.utils import handle_boundaries
|
|
7
7
|
import warnings
|
|
8
8
|
from itertools import count
|
|
9
|
+
from roms_tools.regrid import LateralRegrid
|
|
9
10
|
from roms_tools.setup.datasets import ETOPO5Dataset, SRTM15Dataset
|
|
10
|
-
from roms_tools.setup.regrid import LateralRegrid
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def _add_topography(
|
roms_tools/setup/utils.py
CHANGED
|
@@ -3,12 +3,12 @@ import numpy as np
|
|
|
3
3
|
from typing import Union, Any, Dict, Type
|
|
4
4
|
import pandas as pd
|
|
5
5
|
import cftime
|
|
6
|
-
from roms_tools.utils import partition
|
|
7
6
|
from pathlib import Path
|
|
8
7
|
from datetime import datetime
|
|
9
8
|
from dataclasses import fields, asdict
|
|
10
9
|
import importlib.metadata
|
|
11
10
|
import yaml
|
|
11
|
+
from roms_tools.utils import interpolate_from_rho_to_u, interpolate_from_rho_to_v
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def nan_check(field, mask, error_message=None) -> None:
|
|
@@ -71,100 +71,6 @@ def substitute_nans_by_fillvalue(field, fill_value=0.0) -> xr.DataArray:
|
|
|
71
71
|
return field.fillna(fill_value)
|
|
72
72
|
|
|
73
73
|
|
|
74
|
-
def interpolate_from_rho_to_u(field, method="additive"):
|
|
75
|
-
"""Interpolates the given field from rho points to u points.
|
|
76
|
-
|
|
77
|
-
This function performs an interpolation from the rho grid (cell centers) to the u grid
|
|
78
|
-
(cell edges in the xi direction). Depending on the chosen method, it either averages
|
|
79
|
-
(additive) or multiplies (multiplicative) the field values between adjacent rho points
|
|
80
|
-
along the xi dimension. It also handles the removal of unnecessary coordinate variables
|
|
81
|
-
and updates the dimensions accordingly.
|
|
82
|
-
|
|
83
|
-
Parameters
|
|
84
|
-
----------
|
|
85
|
-
field : xr.DataArray
|
|
86
|
-
The input data array on the rho grid to be interpolated. It is assumed to have a dimension
|
|
87
|
-
named "xi_rho".
|
|
88
|
-
|
|
89
|
-
method : str, optional, default='additive'
|
|
90
|
-
The method to use for interpolation. Options are:
|
|
91
|
-
- 'additive': Average the field values between adjacent rho points.
|
|
92
|
-
- 'multiplicative': Multiply the field values between adjacent rho points. Appropriate for
|
|
93
|
-
binary masks.
|
|
94
|
-
|
|
95
|
-
Returns
|
|
96
|
-
-------
|
|
97
|
-
field_interpolated : xr.DataArray
|
|
98
|
-
The interpolated data array on the u grid with the dimension "xi_u".
|
|
99
|
-
"""
|
|
100
|
-
|
|
101
|
-
if method == "additive":
|
|
102
|
-
field_interpolated = 0.5 * (field + field.shift(xi_rho=1)).isel(
|
|
103
|
-
xi_rho=slice(1, None)
|
|
104
|
-
)
|
|
105
|
-
elif method == "multiplicative":
|
|
106
|
-
field_interpolated = (field * field.shift(xi_rho=1)).isel(xi_rho=slice(1, None))
|
|
107
|
-
else:
|
|
108
|
-
raise NotImplementedError(f"Unsupported method '{method}' specified.")
|
|
109
|
-
|
|
110
|
-
vars_to_drop = ["lat_rho", "lon_rho", "eta_rho", "xi_rho"]
|
|
111
|
-
for var in vars_to_drop:
|
|
112
|
-
if var in field_interpolated.coords:
|
|
113
|
-
field_interpolated = field_interpolated.drop_vars(var)
|
|
114
|
-
|
|
115
|
-
field_interpolated = field_interpolated.swap_dims({"xi_rho": "xi_u"})
|
|
116
|
-
|
|
117
|
-
return field_interpolated
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def interpolate_from_rho_to_v(field, method="additive"):
|
|
121
|
-
"""Interpolates the given field from rho points to v points.
|
|
122
|
-
|
|
123
|
-
This function performs an interpolation from the rho grid (cell centers) to the v grid
|
|
124
|
-
(cell edges in the eta direction). Depending on the chosen method, it either averages
|
|
125
|
-
(additive) or multiplies (multiplicative) the field values between adjacent rho points
|
|
126
|
-
along the eta dimension. It also handles the removal of unnecessary coordinate variables
|
|
127
|
-
and updates the dimensions accordingly.
|
|
128
|
-
|
|
129
|
-
Parameters
|
|
130
|
-
----------
|
|
131
|
-
field : xr.DataArray
|
|
132
|
-
The input data array on the rho grid to be interpolated. It is assumed to have a dimension
|
|
133
|
-
named "eta_rho".
|
|
134
|
-
|
|
135
|
-
method : str, optional, default='additive'
|
|
136
|
-
The method to use for interpolation. Options are:
|
|
137
|
-
- 'additive': Average the field values between adjacent rho points.
|
|
138
|
-
- 'multiplicative': Multiply the field values between adjacent rho points. Appropriate for
|
|
139
|
-
binary masks.
|
|
140
|
-
|
|
141
|
-
Returns
|
|
142
|
-
-------
|
|
143
|
-
field_interpolated : xr.DataArray
|
|
144
|
-
The interpolated data array on the v grid with the dimension "eta_v".
|
|
145
|
-
"""
|
|
146
|
-
|
|
147
|
-
if method == "additive":
|
|
148
|
-
field_interpolated = 0.5 * (field + field.shift(eta_rho=1)).isel(
|
|
149
|
-
eta_rho=slice(1, None)
|
|
150
|
-
)
|
|
151
|
-
elif method == "multiplicative":
|
|
152
|
-
field_interpolated = (field * field.shift(eta_rho=1)).isel(
|
|
153
|
-
eta_rho=slice(1, None)
|
|
154
|
-
)
|
|
155
|
-
else:
|
|
156
|
-
raise NotImplementedError(f"Unsupported method '{method}' specified.")
|
|
157
|
-
|
|
158
|
-
vars_to_drop = ["lat_rho", "lon_rho", "eta_rho", "xi_rho"]
|
|
159
|
-
for var in vars_to_drop:
|
|
160
|
-
if var in field_interpolated.coords:
|
|
161
|
-
field_interpolated = field_interpolated.drop_vars(var)
|
|
162
|
-
|
|
163
|
-
field_interpolated = field_interpolated.swap_dims({"eta_rho": "eta_v"})
|
|
164
|
-
|
|
165
|
-
return field_interpolated
|
|
166
|
-
|
|
167
|
-
|
|
168
74
|
def one_dim_fill(da: xr.DataArray, dim: str, direction="forward") -> xr.DataArray:
|
|
169
75
|
"""Fill NaN values in a DataArray along a specified dimension.
|
|
170
76
|
|
|
@@ -224,23 +130,36 @@ def interpolate_from_climatology(
|
|
|
224
130
|
time_dim_name: str,
|
|
225
131
|
time: Union[xr.DataArray, pd.DatetimeIndex],
|
|
226
132
|
) -> Union[xr.DataArray, xr.Dataset]:
|
|
227
|
-
"""
|
|
133
|
+
"""Temporally interpolates a field based on specified time points.
|
|
228
134
|
|
|
229
|
-
|
|
135
|
+
This function performs temporal interpolation on the input `field` to match the provided `time` values.
|
|
136
|
+
If the input `field` is an `xarray.Dataset`, the interpolation is applied to all its data variables individually.
|
|
230
137
|
|
|
231
138
|
Parameters
|
|
232
139
|
----------
|
|
233
140
|
field : xarray.DataArray or xarray.Dataset
|
|
234
|
-
The field
|
|
141
|
+
The input field to be interpolated.
|
|
142
|
+
- If `field` is an `xarray.DataArray`, it should have a time dimension identified by `time_dim_name`.
|
|
143
|
+
- If `field` is an `xarray.Dataset`, all variables within the dataset are interpolated along the specified time dimension.
|
|
144
|
+
The time dimension is assumed to represent `day_of_year` for climatological purposes.
|
|
235
145
|
time_dim_name : str
|
|
236
|
-
The name of the dimension in `field
|
|
146
|
+
The name of the time dimension in the `field`. This dimension is used for interpolation.
|
|
237
147
|
time : xarray.DataArray or pandas.DatetimeIndex
|
|
238
|
-
The target time points for interpolation.
|
|
148
|
+
The target time points for interpolation. The time values should be compatible with the time format used in the `field`.
|
|
239
149
|
|
|
240
150
|
Returns
|
|
241
151
|
-------
|
|
242
152
|
xarray.DataArray or xarray.Dataset
|
|
243
|
-
The field
|
|
153
|
+
The interpolated field, with the same type as the input (`xarray.DataArray` or `xarray.Dataset`),
|
|
154
|
+
but aligned to the specified `time` values.
|
|
155
|
+
|
|
156
|
+
Notes
|
|
157
|
+
-----
|
|
158
|
+
- The interpolation assumes the time dimension in `field` corresponds to `day_of_year`.
|
|
159
|
+
If the input time values are in a datetime format, ensure they are converted to `day_of_year` before calling this function.
|
|
160
|
+
For example, you can preprocess the time as follows:
|
|
161
|
+
|
|
162
|
+
>>> field["time"] = field["time"].dt.dayofyear
|
|
244
163
|
"""
|
|
245
164
|
|
|
246
165
|
def interpolate_single_field(data_array: xr.DataArray) -> xr.DataArray:
|
|
@@ -250,11 +169,11 @@ def interpolate_from_climatology(
|
|
|
250
169
|
day_of_year = time.dt.dayofyear
|
|
251
170
|
else:
|
|
252
171
|
if np.size(time) == 1:
|
|
253
|
-
|
|
172
|
+
# Convert single datetime64 object to pandas.Timestamp
|
|
173
|
+
day_of_year = pd.Timestamp(time).dayofyear
|
|
254
174
|
else:
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
data_array[time_dim_name] = data_array[time_dim_name].dt.days
|
|
175
|
+
# Convert each datetime64 object in the array to pandas.Timestamp
|
|
176
|
+
day_of_year = np.array([pd.Timestamp(t).dayofyear for t in time])
|
|
258
177
|
|
|
259
178
|
# Concatenate across the beginning and end of the year
|
|
260
179
|
time_concat = xr.concat(
|
|
@@ -294,6 +213,7 @@ def interpolate_from_climatology(
|
|
|
294
213
|
for var, data_array in field.data_vars.items()
|
|
295
214
|
}
|
|
296
215
|
return xr.Dataset(interpolated_data_vars, attrs=field.attrs)
|
|
216
|
+
|
|
297
217
|
else:
|
|
298
218
|
raise TypeError("Input 'field' must be an xarray.DataArray or xarray.Dataset.")
|
|
299
219
|
|
|
@@ -661,59 +581,6 @@ def group_by_year(ds, filepath):
|
|
|
661
581
|
return dataset_list, output_filenames
|
|
662
582
|
|
|
663
583
|
|
|
664
|
-
def save_datasets(dataset_list, output_filenames, np_eta=None, np_xi=None):
|
|
665
|
-
"""Save the list of datasets to netCDF4 files, with optional spatial partitioning.
|
|
666
|
-
|
|
667
|
-
Parameters
|
|
668
|
-
----------
|
|
669
|
-
dataset_list : list
|
|
670
|
-
List of datasets to be saved.
|
|
671
|
-
output_filenames : list
|
|
672
|
-
List of filenames for the output files.
|
|
673
|
-
np_eta : int, optional
|
|
674
|
-
The number of partitions along the `eta` direction. If `None`, no spatial partitioning is performed.
|
|
675
|
-
np_xi : int, optional
|
|
676
|
-
The number of partitions along the `xi` direction. If `None`, no spatial partitioning is performed.
|
|
677
|
-
|
|
678
|
-
Returns
|
|
679
|
-
-------
|
|
680
|
-
List[Path]
|
|
681
|
-
A list of Path objects for the filenames that were saved.
|
|
682
|
-
"""
|
|
683
|
-
|
|
684
|
-
saved_filenames = []
|
|
685
|
-
|
|
686
|
-
if np_eta is None and np_xi is None:
|
|
687
|
-
# Save the dataset as a single file
|
|
688
|
-
output_filenames = [f"{filename}.nc" for filename in output_filenames]
|
|
689
|
-
xr.save_mfdataset(dataset_list, output_filenames)
|
|
690
|
-
|
|
691
|
-
saved_filenames.extend(Path(f) for f in output_filenames)
|
|
692
|
-
|
|
693
|
-
else:
|
|
694
|
-
# Partition the dataset and save each partition as a separate file
|
|
695
|
-
np_eta = np_eta or 1
|
|
696
|
-
np_xi = np_xi or 1
|
|
697
|
-
|
|
698
|
-
partitioned_datasets = []
|
|
699
|
-
partitioned_filenames = []
|
|
700
|
-
for dataset, base_filename in zip(dataset_list, output_filenames):
|
|
701
|
-
partition_indices, partitions = partition(
|
|
702
|
-
dataset, np_eta=np_eta, np_xi=np_xi
|
|
703
|
-
)
|
|
704
|
-
partition_filenames = [
|
|
705
|
-
f"{base_filename}.{index}.nc" for index in partition_indices
|
|
706
|
-
]
|
|
707
|
-
partitioned_datasets.extend(partitions)
|
|
708
|
-
partitioned_filenames.extend(partition_filenames)
|
|
709
|
-
|
|
710
|
-
xr.save_mfdataset(partitioned_datasets, partitioned_filenames)
|
|
711
|
-
|
|
712
|
-
saved_filenames.extend(Path(f) for f in partitioned_filenames)
|
|
713
|
-
|
|
714
|
-
return saved_filenames
|
|
715
|
-
|
|
716
|
-
|
|
717
584
|
def get_target_coords(grid, use_coarse_grid=False):
|
|
718
585
|
"""Retrieves longitude and latitude coordinates from the grid, adjusting them based
|
|
719
586
|
on longitude range.
|
|
@@ -745,8 +612,6 @@ def get_target_coords(grid, use_coarse_grid=False):
|
|
|
745
612
|
mask = grid.ds.get("mask_coarse")
|
|
746
613
|
if mask is not None:
|
|
747
614
|
mask = mask.rename({"eta_coarse": "eta_rho", "xi_coarse": "xi_rho"})
|
|
748
|
-
mask_u = interpolate_from_rho_to_u(mask, method="multiplicative")
|
|
749
|
-
mask_v = interpolate_from_rho_to_v(mask, method="multiplicative")
|
|
750
615
|
|
|
751
616
|
lat_psi = grid.ds.get("lat_psi_coarse")
|
|
752
617
|
lon_psi = grid.ds.get("lon_psi_coarse")
|
|
@@ -756,8 +621,6 @@ def get_target_coords(grid, use_coarse_grid=False):
|
|
|
756
621
|
lon = grid.ds.lon_rho
|
|
757
622
|
angle = grid.ds.angle
|
|
758
623
|
mask = grid.ds.get("mask_rho")
|
|
759
|
-
mask_u = grid.ds.get("mask_u")
|
|
760
|
-
mask_v = grid.ds.get("mask_v")
|
|
761
624
|
lat_psi = grid.ds.get("lat_psi")
|
|
762
625
|
lon_psi = grid.ds.get("lon_psi")
|
|
763
626
|
|
|
@@ -780,8 +643,6 @@ def get_target_coords(grid, use_coarse_grid=False):
|
|
|
780
643
|
"lon_psi": lon_psi,
|
|
781
644
|
"angle": angle,
|
|
782
645
|
"mask": mask,
|
|
783
|
-
"mask_u": mask_u,
|
|
784
|
-
"mask_v": mask_v,
|
|
785
646
|
"straddle": straddle,
|
|
786
647
|
}
|
|
787
648
|
|
|
@@ -863,45 +724,6 @@ def compute_barotropic_velocity(
|
|
|
863
724
|
return vel_bar
|
|
864
725
|
|
|
865
726
|
|
|
866
|
-
def transpose_dimensions(da: xr.DataArray) -> xr.DataArray:
|
|
867
|
-
"""Transpose the dimensions of an xarray.DataArray to ensure that 'time', any
|
|
868
|
-
dimension starting with 's_', 'eta_', and 'xi_' are ordered first, followed by the
|
|
869
|
-
remaining dimensions in their original order.
|
|
870
|
-
|
|
871
|
-
Parameters
|
|
872
|
-
----------
|
|
873
|
-
da : xarray.DataArray
|
|
874
|
-
The input DataArray whose dimensions are to be reordered.
|
|
875
|
-
|
|
876
|
-
Returns
|
|
877
|
-
-------
|
|
878
|
-
xarray.DataArray
|
|
879
|
-
The DataArray with dimensions reordered so that 'time', 's_*', 'eta_*',
|
|
880
|
-
and 'xi_*' are first, in that order, if they exist.
|
|
881
|
-
"""
|
|
882
|
-
|
|
883
|
-
# List of preferred dimension patterns
|
|
884
|
-
preferred_order = ["time", "s_", "eta_", "xi_"]
|
|
885
|
-
|
|
886
|
-
# Get the existing dimensions in the DataArray
|
|
887
|
-
dims = list(da.dims)
|
|
888
|
-
|
|
889
|
-
# Collect dimensions that match any of the preferred patterns
|
|
890
|
-
matched_dims = []
|
|
891
|
-
for pattern in preferred_order:
|
|
892
|
-
# Find dimensions that start with the pattern
|
|
893
|
-
matched_dims += [dim for dim in dims if dim.startswith(pattern)]
|
|
894
|
-
|
|
895
|
-
# Create a new order: first the matched dimensions, then the rest
|
|
896
|
-
remaining_dims = [dim for dim in dims if dim not in matched_dims]
|
|
897
|
-
new_order = matched_dims + remaining_dims
|
|
898
|
-
|
|
899
|
-
# Transpose the DataArray to the new order
|
|
900
|
-
transposed_da = da.transpose(*new_order)
|
|
901
|
-
|
|
902
|
-
return transposed_da
|
|
903
|
-
|
|
904
|
-
|
|
905
727
|
def get_vector_pairs(variable_info):
|
|
906
728
|
"""Extracts all unique vector pairs from the variable_info dictionary.
|
|
907
729
|
|
|
@@ -1079,22 +901,20 @@ def _to_yaml(forcing_object, filepath: Union[str, Path]) -> None:
|
|
|
1079
901
|
|
|
1080
902
|
grid_yaml_data = {**parent_grid_yaml_data, **child_grid_yaml_data}
|
|
1081
903
|
|
|
1082
|
-
#
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
)
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
# Step 2: Get ROMS Tools version
|
|
1097
|
-
# Fetch the version of the 'roms-tools' package for inclusion in the YAML header
|
|
904
|
+
# Step 2: Ensure Paths are Strings
|
|
905
|
+
def ensure_paths_are_strings(obj, key):
|
|
906
|
+
attr = getattr(obj, key, None)
|
|
907
|
+
if attr is not None and "path" in attr:
|
|
908
|
+
paths = attr["path"]
|
|
909
|
+
if isinstance(paths, list):
|
|
910
|
+
attr["path"] = [str(p) if isinstance(p, Path) else p for p in paths]
|
|
911
|
+
elif isinstance(paths, Path):
|
|
912
|
+
attr["path"] = str(paths)
|
|
913
|
+
|
|
914
|
+
ensure_paths_are_strings(forcing_object, "source")
|
|
915
|
+
ensure_paths_are_strings(forcing_object, "bgc_source")
|
|
916
|
+
|
|
917
|
+
# Step 3: Get ROMS Tools version
|
|
1098
918
|
try:
|
|
1099
919
|
roms_tools_version = importlib.metadata.version("roms-tools")
|
|
1100
920
|
except importlib.metadata.PackageNotFoundError:
|
|
@@ -1103,8 +923,7 @@ def _to_yaml(forcing_object, filepath: Union[str, Path]) -> None:
|
|
|
1103
923
|
# Create YAML header with version information
|
|
1104
924
|
header = f"---\nroms_tools_version: {roms_tools_version}\n---\n"
|
|
1105
925
|
|
|
1106
|
-
# Step
|
|
1107
|
-
# Prepare the forcing object fields, excluding 'grid' and 'ds'
|
|
926
|
+
# Step 4: Prepare Forcing Data
|
|
1108
927
|
forcing_data = {}
|
|
1109
928
|
field_names = [field.name for field in fields(forcing_object)]
|
|
1110
929
|
filtered_field_names = [
|
|
@@ -1133,14 +952,13 @@ def _to_yaml(forcing_object, filepath: Union[str, Path]) -> None:
|
|
|
1133
952
|
# Add the field and its value to the forcing_data dictionary
|
|
1134
953
|
forcing_data[field_name] = value
|
|
1135
954
|
|
|
1136
|
-
# Step
|
|
1137
|
-
# Combine grid and forcing data into a single dictionary for the final YAML content
|
|
955
|
+
# Step 5: Combine Grid and Forcing Data into a single dictionary for the final YAML content
|
|
1138
956
|
yaml_data = {
|
|
1139
957
|
**grid_yaml_data, # Add the grid data to the final YAML structure
|
|
1140
958
|
forcing_object.__class__.__name__: forcing_data, # Include the serialized forcing object data
|
|
1141
959
|
}
|
|
1142
960
|
|
|
1143
|
-
# Step
|
|
961
|
+
# Step 6: Write to YAML file
|
|
1144
962
|
with filepath.open("w") as file:
|
|
1145
963
|
# Write the header first
|
|
1146
964
|
file.write(header)
|
|
@@ -1186,6 +1004,8 @@ def _from_yaml(forcing_object: Type, filepath: Union[str, Path]) -> Dict[str, An
|
|
|
1186
1004
|
ValueError
|
|
1187
1005
|
If no configuration for the specified class name is found in the YAML file.
|
|
1188
1006
|
"""
|
|
1007
|
+
# Ensure filepath is a Path object
|
|
1008
|
+
filepath = Path(filepath)
|
|
1189
1009
|
|
|
1190
1010
|
# Read the entire file content
|
|
1191
1011
|
with filepath.open("r") as file:
|