roms-tools 3.3.0__py3-none-any.whl → 3.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roms_tools/__init__.py +1 -1
- roms_tools/analysis/cdr_ensemble.py +10 -13
- roms_tools/analysis/roms_output.py +5 -304
- roms_tools/{download.py → datasets/download.py} +1 -0
- roms_tools/{setup → datasets}/lat_lon_datasets.py +88 -64
- roms_tools/{setup → datasets}/river_datasets.py +9 -4
- roms_tools/datasets/roms_dataset.py +854 -0
- roms_tools/datasets/utils.py +487 -0
- roms_tools/{setup/fill.py → fill.py} +110 -13
- roms_tools/plot.py +4 -4
- roms_tools/regrid.py +76 -0
- roms_tools/setup/boundary_forcing.py +53 -45
- roms_tools/setup/cdr_release.py +2 -4
- roms_tools/setup/grid.py +46 -15
- roms_tools/setup/initial_conditions.py +330 -71
- roms_tools/setup/mask.py +2 -5
- roms_tools/setup/nesting.py +13 -6
- roms_tools/setup/river_forcing.py +4 -4
- roms_tools/setup/surface_forcing.py +15 -11
- roms_tools/setup/tides.py +7 -6
- roms_tools/setup/topography.py +10 -2
- roms_tools/setup/utils.py +292 -666
- roms_tools/tests/test_analysis/test_cdr_ensemble.py +4 -6
- roms_tools/tests/test_analysis/test_roms_output.py +1 -220
- roms_tools/tests/{test_setup → test_datasets}/test_lat_lon_datasets.py +4 -4
- roms_tools/tests/{test_setup → test_datasets}/test_river_datasets.py +1 -1
- roms_tools/tests/test_datasets/test_roms_dataset.py +743 -0
- roms_tools/tests/test_datasets/test_utils.py +527 -0
- roms_tools/tests/{test_setup/test_fill.py → test_fill.py} +72 -9
- roms_tools/tests/test_regrid.py +120 -1
- roms_tools/tests/test_setup/test_boundary_forcing.py +57 -138
- roms_tools/tests/test_setup/test_cdr_release.py +4 -5
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zarr.json +293 -2021
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/zarr.json +294 -2022
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ALK/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/ALK_west → initial_conditions_from_roms.zarr/ALK}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ALK_ALT_CO2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/ALK_ALT_CO2_west → initial_conditions_from_roms.zarr/ALK_ALT_CO2}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/Cs_r/c/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_unified_climatology.zarr/diatFe_west → initial_conditions_from_roms.zarr/Cs_r}/zarr.json +5 -12
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/Cs_w/c/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/diatFe_west → initial_conditions_from_roms.zarr/Cs_w}/zarr.json +3 -10
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DIC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DOCr_west → initial_conditions_from_roms.zarr/DIC}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DIC_ALT_CO2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DIC_ALT_CO2/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOC/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOCr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DIC_ALT_CO2_west → initial_conditions_from_roms.zarr/DOCr}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DON/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DON/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DONr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DONr/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOP/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOPr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/DOPr/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/Fe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/Fe/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/Lig/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DOP_west → initial_conditions_from_roms.zarr/Lig}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/NH4/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DON_west → initial_conditions_from_roms.zarr/NH4}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/NO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/NO3/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/O2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/Lig_west → initial_conditions_from_roms.zarr/O2}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/PO4/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/PO4/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/SiO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/SiO3/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/abs_time/zarr.json +47 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diatC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/diatC_west → initial_conditions_from_roms.zarr/diatC}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diatChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/diatChl_west → initial_conditions_from_roms.zarr/diatChl}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diatFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/O2_west → initial_conditions_from_roms.zarr/diatFe}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diatP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DIC_west → initial_conditions_from_roms.zarr/diatP}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diatSi/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/DOC_west → initial_conditions_from_roms.zarr/diatSi}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazC/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/diazChl_west → initial_conditions_from_roms.zarr/diazChl}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/Fe_west → initial_conditions_from_roms.zarr/diazFe}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/diazP/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ocean_time/c/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ocean_time/zarr.json +47 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/salt/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_unified_climatology.zarr/ALK_west → initial_conditions_from_roms.zarr/salt}/zarr.json +12 -9
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spC/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spCaCO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spCaCO3/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/spChl_west → initial_conditions_from_roms.zarr/spChl}/zarr.json +11 -8
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spFe/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/spP/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/temp/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/temp/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/u/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/{bgc_boundary_forcing_from_climatology.zarr/NH4_west → initial_conditions_from_roms.zarr/u}/zarr.json +12 -9
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ubar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/ubar/zarr.json +54 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/v/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/v/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/vbar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/vbar/zarr.json +54 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/w/zarr.json +57 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/zarr.json +2481 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/zeta/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/zeta/zarr.json +54 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/zooC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_from_roms.zarr/zooC/zarr.json +57 -0
- roms_tools/tests/test_setup/test_grid.py +66 -1
- roms_tools/tests/test_setup/test_initial_conditions.py +130 -104
- roms_tools/tests/test_setup/test_nesting.py +2 -1
- roms_tools/tests/test_setup/test_surface_forcing.py +1 -1
- roms_tools/tests/test_setup/test_tides.py +1 -1
- roms_tools/tests/test_setup/test_utils.py +100 -15
- roms_tools/tests/test_setup/test_validation.py +15 -0
- roms_tools/tests/test_tiling/test_partition.py +63 -15
- roms_tools/tests/test_utils.py +365 -0
- roms_tools/tiling/partition.py +81 -211
- roms_tools/utils.py +360 -62
- {roms_tools-3.3.0.dist-info → roms_tools-3.5.0.dist-info}/METADATA +2 -3
- {roms_tools-3.3.0.dist-info → roms_tools-3.5.0.dist-info}/RECORD +137 -174
- {roms_tools-3.3.0.dist-info → roms_tools-3.5.0.dist-info}/WHEEL +1 -1
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOCr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DON_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DONr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DONr_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOPr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOPr_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Fe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Lig_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NH4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/O2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/PO4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/PO4_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/SiO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/SiO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatSi_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatSi_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazFe_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spCaCO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spCaCO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spFe_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zooC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zooC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/ALK_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/ALK_ALT_CO2_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/ALK_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DIC_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DIC_ALT_CO2_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DIC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DIC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOCr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOCr_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DON_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DON_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DONr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DONr_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOPr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/DOPr_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/Fe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/Fe_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/Lig_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/Lig_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/NH4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/NH4_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/NO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/NO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/O2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/O2_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/PO4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/PO4_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/SiO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/SiO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatChl_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatSi_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diatSi_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazChl_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazFe_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/diazP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spC_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spCaCO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spCaCO3_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spChl_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spFe_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/spP_west/zarr.json +0 -54
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/zooC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_unified_climatology.zarr/zooC_west/zarr.json +0 -54
- {roms_tools-3.3.0.dist-info → roms_tools-3.5.0.dist-info}/licenses/LICENSE +0 -0
- {roms_tools-3.3.0.dist-info → roms_tools-3.5.0.dist-info}/top_level.txt +0 -0
roms_tools/setup/utils.py
CHANGED
|
@@ -3,13 +3,13 @@ import logging
|
|
|
3
3
|
import time
|
|
4
4
|
import typing
|
|
5
5
|
from collections.abc import Sequence
|
|
6
|
+
from copy import deepcopy
|
|
6
7
|
from dataclasses import asdict, fields, is_dataclass
|
|
7
|
-
from datetime import datetime
|
|
8
|
+
from datetime import datetime
|
|
8
9
|
from enum import StrEnum
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
from typing import Any, Literal, TypeAlias
|
|
11
12
|
|
|
12
|
-
import cftime
|
|
13
13
|
import numba as nb
|
|
14
14
|
import numpy as np
|
|
15
15
|
import pandas as pd
|
|
@@ -18,7 +18,6 @@ import yaml
|
|
|
18
18
|
from pydantic import BaseModel
|
|
19
19
|
|
|
20
20
|
from roms_tools.constants import R_EARTH
|
|
21
|
-
from roms_tools.utils import interpolate_from_rho_to_u, interpolate_from_rho_to_v
|
|
22
21
|
|
|
23
22
|
if typing.TYPE_CHECKING:
|
|
24
23
|
from roms_tools.setup.grid import Grid
|
|
@@ -128,32 +127,6 @@ def substitute_nans_by_fillvalue(field, fill_value=0.0) -> xr.DataArray:
|
|
|
128
127
|
return field.fillna(fill_value)
|
|
129
128
|
|
|
130
129
|
|
|
131
|
-
def one_dim_fill(da: xr.DataArray, dim: str, direction="forward") -> xr.DataArray:
|
|
132
|
-
"""Fill NaN values in a DataArray along a specified dimension.
|
|
133
|
-
|
|
134
|
-
Parameters
|
|
135
|
-
----------
|
|
136
|
-
da : xr.DataArray
|
|
137
|
-
The input DataArray with NaN values to be filled, which must include the specified dimension.
|
|
138
|
-
dim : str
|
|
139
|
-
The name of the dimension along which to fill NaN values (e.g., 'depth' or 'time').
|
|
140
|
-
direction : str, optional
|
|
141
|
-
The filling direction; either "forward" to propagate non-NaN values downward or "backward" to propagate them upward.
|
|
142
|
-
Defaults to "forward".
|
|
143
|
-
|
|
144
|
-
Returns
|
|
145
|
-
-------
|
|
146
|
-
xr.DataArray
|
|
147
|
-
A new DataArray with NaN values filled in the specified direction, leaving the original data unchanged.
|
|
148
|
-
"""
|
|
149
|
-
if dim in da.dims:
|
|
150
|
-
if direction == "forward":
|
|
151
|
-
return da.ffill(dim=dim)
|
|
152
|
-
elif direction == "backward":
|
|
153
|
-
return da.bfill(dim=dim)
|
|
154
|
-
return da
|
|
155
|
-
|
|
156
|
-
|
|
157
130
|
def assign_dates_to_climatology(ds: xr.Dataset, time_dim: str) -> xr.Dataset:
|
|
158
131
|
"""Assigns climatology dates to the dataset's time dimension.
|
|
159
132
|
|
|
@@ -182,240 +155,6 @@ def assign_dates_to_climatology(ds: xr.Dataset, time_dim: str) -> xr.Dataset:
|
|
|
182
155
|
return ds
|
|
183
156
|
|
|
184
157
|
|
|
185
|
-
def interpolate_cyclic_time(
|
|
186
|
-
data_array: xr.DataArray,
|
|
187
|
-
time_dim_name: str,
|
|
188
|
-
day_of_year: int | float | np.ndarray | xr.DataArray | Sequence[int | float],
|
|
189
|
-
) -> xr.DataArray:
|
|
190
|
-
"""Interpolates a DataArray cyclically across the start and end of the year.
|
|
191
|
-
|
|
192
|
-
This function extends the data cyclically by appending the last time step
|
|
193
|
-
(shifted back by one year) at the beginning and the first time step
|
|
194
|
-
(shifted forward by one year) at the end. It then performs linear interpolation
|
|
195
|
-
to match the specified `day_of_year` values.
|
|
196
|
-
|
|
197
|
-
Parameters
|
|
198
|
-
----------
|
|
199
|
-
data_array : xr.DataArray
|
|
200
|
-
The input data array containing a time-like dimension.
|
|
201
|
-
time_dim_name : str
|
|
202
|
-
The name of the time dimension in the dataset.
|
|
203
|
-
day_of_year : Union[int, float, np.ndarray, xr.DataArray, Sequence[Union[int, float]]]
|
|
204
|
-
The target day(s) of the year for interpolation. This can be:
|
|
205
|
-
- A single integer or float representing the day of the year.
|
|
206
|
-
- A NumPy array or xarray DataArray containing multiple days.
|
|
207
|
-
- A list or tuple of integers or floats for multiple target days.
|
|
208
|
-
|
|
209
|
-
Returns
|
|
210
|
-
-------
|
|
211
|
-
xr.DataArray
|
|
212
|
-
The interpolated DataArray, ensuring cyclic continuity across year boundaries.
|
|
213
|
-
|
|
214
|
-
Notes
|
|
215
|
-
-----
|
|
216
|
-
- This function is useful for interpolating climatological data, where the time axis
|
|
217
|
-
represents a repeating annual cycle.
|
|
218
|
-
- The `day_of_year` values should be within the range [1, 365] or [1, 366] for leap years.
|
|
219
|
-
"""
|
|
220
|
-
# Concatenate across the beginning and end of the year
|
|
221
|
-
time_concat = xr.concat(
|
|
222
|
-
[
|
|
223
|
-
data_array[time_dim_name][-1] - 365.25, # Shift last time backward
|
|
224
|
-
data_array[time_dim_name],
|
|
225
|
-
data_array[time_dim_name][0] + 365.25, # Shift first time forward
|
|
226
|
-
],
|
|
227
|
-
dim=time_dim_name,
|
|
228
|
-
)
|
|
229
|
-
|
|
230
|
-
data_array_concat = xr.concat(
|
|
231
|
-
[
|
|
232
|
-
data_array.isel(
|
|
233
|
-
**{time_dim_name: -1}
|
|
234
|
-
), # Append last value at the beginning
|
|
235
|
-
data_array,
|
|
236
|
-
data_array.isel(**{time_dim_name: 0}), # Append first value at the end
|
|
237
|
-
],
|
|
238
|
-
dim=time_dim_name,
|
|
239
|
-
)
|
|
240
|
-
data_array_concat[time_dim_name] = time_concat
|
|
241
|
-
|
|
242
|
-
# Interpolate to specified times
|
|
243
|
-
data_array_interpolated = data_array_concat.interp(
|
|
244
|
-
**{time_dim_name: day_of_year}, method="linear"
|
|
245
|
-
)
|
|
246
|
-
|
|
247
|
-
return data_array_interpolated
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
def interpolate_from_climatology(
|
|
251
|
-
field: xr.DataArray | xr.Dataset,
|
|
252
|
-
time_dim_name: str,
|
|
253
|
-
time: xr.DataArray | pd.DatetimeIndex,
|
|
254
|
-
) -> xr.DataArray | xr.Dataset:
|
|
255
|
-
"""Interpolates a climatological field to specified time points.
|
|
256
|
-
|
|
257
|
-
This function interpolates the input `field` based on `day_of_year` values
|
|
258
|
-
extracted from the provided `time` points. If `field` is an `xarray.Dataset`,
|
|
259
|
-
interpolation is applied to all its data variables individually.
|
|
260
|
-
|
|
261
|
-
Parameters
|
|
262
|
-
----------
|
|
263
|
-
field : xarray.DataArray or xarray.Dataset
|
|
264
|
-
The input field to be interpolated.
|
|
265
|
-
- If `field` is an `xarray.DataArray`, it must have a time dimension identified by `time_dim_name`.
|
|
266
|
-
- If `field` is an `xarray.Dataset`, all variables within the dataset are interpolated along `time_dim_name`.
|
|
267
|
-
The time dimension is assumed to represent `day_of_year` for climatological purposes.
|
|
268
|
-
time_dim_name : str
|
|
269
|
-
The name of the time dimension in `field`. This dimension is used for interpolation.
|
|
270
|
-
time : xarray.DataArray or pandas.DatetimeIndex
|
|
271
|
-
The target time points for interpolation. These are internally converted to `day_of_year`
|
|
272
|
-
before performing interpolation.
|
|
273
|
-
|
|
274
|
-
Returns
|
|
275
|
-
-------
|
|
276
|
-
xarray.DataArray or xarray.Dataset
|
|
277
|
-
The interpolated field, maintaining the same type (`xarray.DataArray` or `xarray.Dataset`)
|
|
278
|
-
but aligned to the specified `time` values.
|
|
279
|
-
|
|
280
|
-
Notes
|
|
281
|
-
-----
|
|
282
|
-
- This function assumes that `field` represents a climatological dataset, where time is expressed as `day_of_year` (1-365).
|
|
283
|
-
- The `time` input is automatically converted to `day_of_year`, so manual conversion is not required before calling this function.
|
|
284
|
-
"""
|
|
285
|
-
|
|
286
|
-
def interpolate_single_field(data_array: xr.DataArray) -> xr.DataArray:
|
|
287
|
-
if isinstance(time, xr.DataArray):
|
|
288
|
-
# Extract day of year from xarray.DataArray
|
|
289
|
-
day_of_year = time.dt.dayofyear
|
|
290
|
-
else:
|
|
291
|
-
if np.size(time) == 1:
|
|
292
|
-
# Convert single datetime64 object to pandas.Timestamp
|
|
293
|
-
date = pd.Timestamp(time)
|
|
294
|
-
day_of_year = (
|
|
295
|
-
date.dayofyear
|
|
296
|
-
+ (date.hour / 24)
|
|
297
|
-
+ (date.minute / 1440)
|
|
298
|
-
+ (date.second / 86400)
|
|
299
|
-
)
|
|
300
|
-
else:
|
|
301
|
-
# Convert each datetime64 object in the array to pandas.Timestamp and compute fractional day of year
|
|
302
|
-
day_of_year = np.array(
|
|
303
|
-
[
|
|
304
|
-
pd.Timestamp(t).dayofyear
|
|
305
|
-
+ (pd.Timestamp(t).hour / 24)
|
|
306
|
-
+ (pd.Timestamp(t).minute / 1440)
|
|
307
|
-
+ (pd.Timestamp(t).second / 86400)
|
|
308
|
-
for t in time
|
|
309
|
-
]
|
|
310
|
-
)
|
|
311
|
-
|
|
312
|
-
data_array_interpolated = interpolate_cyclic_time(
|
|
313
|
-
data_array, time_dim_name, day_of_year
|
|
314
|
-
)
|
|
315
|
-
|
|
316
|
-
if np.size(time) == 1:
|
|
317
|
-
data_array_interpolated = data_array_interpolated.expand_dims(
|
|
318
|
-
{time_dim_name: 1}
|
|
319
|
-
)
|
|
320
|
-
return data_array_interpolated
|
|
321
|
-
|
|
322
|
-
if isinstance(field, xr.DataArray):
|
|
323
|
-
return interpolate_single_field(field)
|
|
324
|
-
elif isinstance(field, xr.Dataset):
|
|
325
|
-
interpolated_data_vars = {
|
|
326
|
-
var: interpolate_single_field(data_array)
|
|
327
|
-
for var, data_array in field.data_vars.items()
|
|
328
|
-
}
|
|
329
|
-
return xr.Dataset(interpolated_data_vars, attrs=field.attrs)
|
|
330
|
-
|
|
331
|
-
else:
|
|
332
|
-
raise TypeError("Input 'field' must be an xarray.DataArray or xarray.Dataset.")
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
def get_time_type(data_array: xr.DataArray) -> str:
|
|
336
|
-
"""Determines the type of time values in the xarray DataArray.
|
|
337
|
-
|
|
338
|
-
Parameters
|
|
339
|
-
----------
|
|
340
|
-
data_array : xr.DataArray
|
|
341
|
-
The xarray DataArray to be checked for time data types.
|
|
342
|
-
|
|
343
|
-
Returns
|
|
344
|
-
-------
|
|
345
|
-
str
|
|
346
|
-
A string indicating the type of the time data: 'cftime', 'datetime', or 'int'.
|
|
347
|
-
|
|
348
|
-
Raises
|
|
349
|
-
------
|
|
350
|
-
TypeError
|
|
351
|
-
If the values in the DataArray are not of type numpy.ndarray or list.
|
|
352
|
-
"""
|
|
353
|
-
# List of cftime datetime types
|
|
354
|
-
cftime_types = (
|
|
355
|
-
cftime.DatetimeNoLeap,
|
|
356
|
-
cftime.DatetimeJulian,
|
|
357
|
-
cftime.DatetimeGregorian,
|
|
358
|
-
cftime.Datetime360Day,
|
|
359
|
-
cftime.DatetimeProlepticGregorian,
|
|
360
|
-
)
|
|
361
|
-
|
|
362
|
-
# Check if any of the coordinate values are of cftime, datetime, or integer type
|
|
363
|
-
if isinstance(data_array.values, np.ndarray | list):
|
|
364
|
-
# Check if the data type is numpy datetime64, indicating standard datetime objects
|
|
365
|
-
if data_array.values.dtype == "datetime64[ns]":
|
|
366
|
-
return "datetime"
|
|
367
|
-
|
|
368
|
-
# Check if any values in the array are instances of cftime types
|
|
369
|
-
if any(isinstance(value, cftime_types) for value in data_array.values):
|
|
370
|
-
return "cftime"
|
|
371
|
-
|
|
372
|
-
# Check if all values are of integer type (e.g., for indices or time steps)
|
|
373
|
-
if np.issubdtype(data_array.values.dtype, np.integer):
|
|
374
|
-
return "int"
|
|
375
|
-
|
|
376
|
-
# If none of the above conditions are met, raise a ValueError
|
|
377
|
-
raise ValueError("Unsupported data type for time values in input dataset.")
|
|
378
|
-
|
|
379
|
-
# Handle unexpected types
|
|
380
|
-
raise TypeError("DataArray values must be of type numpy.ndarray or list.")
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
def convert_cftime_to_datetime(data_array: np.ndarray) -> np.ndarray:
|
|
384
|
-
"""Converts cftime datetime objects to numpy datetime64 objects in a numpy ndarray.
|
|
385
|
-
|
|
386
|
-
Parameters
|
|
387
|
-
----------
|
|
388
|
-
data_array : np.ndarray
|
|
389
|
-
The numpy ndarray containing cftime datetime objects to be converted.
|
|
390
|
-
|
|
391
|
-
Returns
|
|
392
|
-
-------
|
|
393
|
-
np.ndarray
|
|
394
|
-
The ndarray with cftime datetimes converted to numpy datetime64 objects.
|
|
395
|
-
|
|
396
|
-
Notes
|
|
397
|
-
-----
|
|
398
|
-
This function is intended to be used with numpy ndarrays. If you need to convert
|
|
399
|
-
cftime datetime objects in an xarray.DataArray, please use the appropriate function
|
|
400
|
-
to handle xarray.DataArray conversions.
|
|
401
|
-
"""
|
|
402
|
-
# List of cftime datetime types
|
|
403
|
-
cftime_types = (
|
|
404
|
-
cftime.DatetimeNoLeap,
|
|
405
|
-
cftime.DatetimeJulian,
|
|
406
|
-
cftime.DatetimeGregorian,
|
|
407
|
-
)
|
|
408
|
-
|
|
409
|
-
# Define a conversion function for cftime to numpy datetime64
|
|
410
|
-
def convert_datetime(dt):
|
|
411
|
-
if isinstance(dt, cftime_types):
|
|
412
|
-
# Convert to ISO format and then to nanosecond precision
|
|
413
|
-
return np.datetime64(dt.isoformat(), "ns")
|
|
414
|
-
return np.datetime64(dt, "ns")
|
|
415
|
-
|
|
416
|
-
return np.vectorize(convert_datetime)(data_array)
|
|
417
|
-
|
|
418
|
-
|
|
419
158
|
def get_variable_metadata():
|
|
420
159
|
"""Retrieves metadata for commonly used variables in the dataset.
|
|
421
160
|
|
|
@@ -1227,46 +966,6 @@ def get_target_coords(
|
|
|
1227
966
|
return target_coords
|
|
1228
967
|
|
|
1229
968
|
|
|
1230
|
-
def rotate_velocities(
|
|
1231
|
-
u: xr.DataArray, v: xr.DataArray, angle: xr.DataArray, interpolate: bool = True
|
|
1232
|
-
) -> tuple[xr.DataArray, xr.DataArray]:
|
|
1233
|
-
"""Rotate and optionally interpolate velocity components to align with grid
|
|
1234
|
-
orientation.
|
|
1235
|
-
|
|
1236
|
-
Parameters
|
|
1237
|
-
----------
|
|
1238
|
-
u : xarray.DataArray
|
|
1239
|
-
Zonal (east-west) velocity component at u-points.
|
|
1240
|
-
v : xarray.DataArray
|
|
1241
|
-
Meridional (north-south) velocity component at v-points.
|
|
1242
|
-
angle : xarray.DataArray
|
|
1243
|
-
Grid angle values for rotation.
|
|
1244
|
-
interpolate : bool, optional
|
|
1245
|
-
If True, interpolates rotated velocities to grid points (default is True).
|
|
1246
|
-
|
|
1247
|
-
Returns
|
|
1248
|
-
-------
|
|
1249
|
-
tuple of xarray.DataArray
|
|
1250
|
-
Rotated velocity components (u_rot, v_rot).
|
|
1251
|
-
|
|
1252
|
-
Notes
|
|
1253
|
-
-----
|
|
1254
|
-
- Rotation formulas:
|
|
1255
|
-
- u_rot = u * cos(angle) + v * sin(angle)
|
|
1256
|
-
- v_rot = v * cos(angle) - u * sin(angle)
|
|
1257
|
-
"""
|
|
1258
|
-
# Rotate velocities to grid orientation
|
|
1259
|
-
u_rot = u * np.cos(angle) + v * np.sin(angle)
|
|
1260
|
-
v_rot = v * np.cos(angle) - u * np.sin(angle)
|
|
1261
|
-
|
|
1262
|
-
# Interpolate to u- and v-points
|
|
1263
|
-
if interpolate:
|
|
1264
|
-
u_rot = interpolate_from_rho_to_u(u_rot)
|
|
1265
|
-
v_rot = interpolate_from_rho_to_v(v_rot)
|
|
1266
|
-
|
|
1267
|
-
return u_rot, v_rot
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
969
|
def compute_barotropic_velocity(
|
|
1271
970
|
vel: xr.DataArray, interface_depth: xr.DataArray
|
|
1272
971
|
) -> xr.DataArray:
|
|
@@ -1626,142 +1325,262 @@ def write_to_yaml(yaml_data, filepath: str | Path) -> None:
|
|
|
1626
1325
|
)
|
|
1627
1326
|
|
|
1628
1327
|
|
|
1328
|
+
def serialize_paths(value: Any) -> Any:
|
|
1329
|
+
"""Recursively convert Path objects to strings."""
|
|
1330
|
+
if isinstance(value, Path):
|
|
1331
|
+
return str(value)
|
|
1332
|
+
if isinstance(value, list):
|
|
1333
|
+
return [serialize_paths(v) for v in value]
|
|
1334
|
+
if isinstance(value, dict):
|
|
1335
|
+
return {k: serialize_paths(v) for k, v in value.items()}
|
|
1336
|
+
return value
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
def normalize_paths(value: Any) -> Any:
|
|
1340
|
+
"""Recursively convert path-like strings back to Path objects.
|
|
1341
|
+
|
|
1342
|
+
Heuristic: strings containing '/' or ending with '.nc' are treated as paths.
|
|
1343
|
+
"""
|
|
1344
|
+
if isinstance(value, str):
|
|
1345
|
+
return Path(value) if "/" in value or value.endswith(".nc") else value
|
|
1346
|
+
if isinstance(value, list):
|
|
1347
|
+
return [normalize_paths(v) for v in value]
|
|
1348
|
+
if isinstance(value, dict):
|
|
1349
|
+
return {k: normalize_paths(v) for k, v in value.items()}
|
|
1350
|
+
return value
|
|
1351
|
+
|
|
1352
|
+
|
|
1353
|
+
def serialize_datetime(value: datetime | list[datetime] | Any) -> Any:
|
|
1354
|
+
"""Convert datetime or list of datetimes to ISO 8601 strings."""
|
|
1355
|
+
if isinstance(value, datetime):
|
|
1356
|
+
return value.isoformat()
|
|
1357
|
+
if isinstance(value, list) and all(isinstance(v, datetime) for v in value):
|
|
1358
|
+
return [v.isoformat() for v in value]
|
|
1359
|
+
return value
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
def deserialize_datetime(
|
|
1363
|
+
value: str | list[str] | datetime | Any,
|
|
1364
|
+
) -> datetime | list[datetime] | Any:
|
|
1365
|
+
"""Convert ISO 8601 string(s) to datetime object(s).
|
|
1366
|
+
|
|
1367
|
+
Returns:
|
|
1368
|
+
datetime if input is string,
|
|
1369
|
+
list of datetime if input is list of strings,
|
|
1370
|
+
original value if parsing fails or input is already datetime.
|
|
1371
|
+
"""
|
|
1372
|
+
if isinstance(value, list):
|
|
1373
|
+
result: list[datetime | Any] = []
|
|
1374
|
+
for v in value:
|
|
1375
|
+
try:
|
|
1376
|
+
result.append(datetime.fromisoformat(str(v)))
|
|
1377
|
+
except ValueError:
|
|
1378
|
+
result.append(v)
|
|
1379
|
+
return result
|
|
1380
|
+
|
|
1381
|
+
if isinstance(value, str):
|
|
1382
|
+
try:
|
|
1383
|
+
return datetime.fromisoformat(value)
|
|
1384
|
+
except ValueError:
|
|
1385
|
+
return value
|
|
1386
|
+
|
|
1387
|
+
return value
|
|
1388
|
+
|
|
1389
|
+
|
|
1390
|
+
def serialize_source_dict(src: dict[str, Any] | None) -> dict[str, Any] | None:
|
|
1391
|
+
"""Serialize a source or BGC source dictionary for YAML or JSON output.
|
|
1392
|
+
|
|
1393
|
+
This function performs the following transformations:
|
|
1394
|
+
- Converts any `Path` objects (including nested lists or dicts) to strings.
|
|
1395
|
+
- Serializes any nested `Grid` objects using `serialize_grid`.
|
|
1396
|
+
- Creates a deep copy of the input dictionary to avoid modifying the original.
|
|
1397
|
+
|
|
1398
|
+
Parameters
|
|
1399
|
+
----------
|
|
1400
|
+
src : dict[str, Any] | None
|
|
1401
|
+
The source or BGC source dictionary to serialize. Keys typically include:
|
|
1402
|
+
- "path": path(s) to files
|
|
1403
|
+
- "grid": a Grid object
|
|
1404
|
+
|
|
1405
|
+
Returns
|
|
1406
|
+
-------
|
|
1407
|
+
dict[str, Any] | None
|
|
1408
|
+
A serialized dictionary suitable for saving to YAML or JSON, with:
|
|
1409
|
+
- Paths converted to strings
|
|
1410
|
+
- Nested Grid objects serialized
|
|
1411
|
+
Returns `None` if input `src` is `None`.
|
|
1412
|
+
"""
|
|
1413
|
+
if src is None:
|
|
1414
|
+
return None
|
|
1415
|
+
|
|
1416
|
+
src = deepcopy(src)
|
|
1417
|
+
|
|
1418
|
+
# Serialize paths
|
|
1419
|
+
if "path" in src:
|
|
1420
|
+
src["path"] = serialize_paths(src["path"])
|
|
1421
|
+
|
|
1422
|
+
# Serialize nested grid
|
|
1423
|
+
if "grid" in src and src["grid"] is not None:
|
|
1424
|
+
src["grid"] = serialize_grid(src["grid"])
|
|
1425
|
+
|
|
1426
|
+
return src
|
|
1427
|
+
|
|
1428
|
+
|
|
1429
|
+
def deserialize_source_dict(src: dict[str, Any] | None) -> dict[str, Any] | None:
|
|
1430
|
+
"""Deserialize a source / bgc_source dictionary.
|
|
1431
|
+
|
|
1432
|
+
Converts string paths back to Path objects.
|
|
1433
|
+
|
|
1434
|
+
Parameters
|
|
1435
|
+
----------
|
|
1436
|
+
src : dict[str, Any] | None
|
|
1437
|
+
Serialized source or bgc_source dictionary.
|
|
1438
|
+
|
|
1439
|
+
Returns
|
|
1440
|
+
-------
|
|
1441
|
+
dict[str, Any] | None
|
|
1442
|
+
Dictionary with paths converted to Path objects.
|
|
1443
|
+
"""
|
|
1444
|
+
if src is None:
|
|
1445
|
+
return None
|
|
1446
|
+
|
|
1447
|
+
src = deepcopy(src)
|
|
1448
|
+
|
|
1449
|
+
# Deserialize paths
|
|
1450
|
+
if "path" in src:
|
|
1451
|
+
src["path"] = normalize_paths(src["path"])
|
|
1452
|
+
|
|
1453
|
+
return src
|
|
1454
|
+
|
|
1455
|
+
|
|
1456
|
+
def serialize_grid(grid_obj: Any) -> dict[str, Any]:
|
|
1457
|
+
"""Serialize a Grid object to a dictionary, excluding non-serializable attributes."""
|
|
1458
|
+
return pop_grid_data(asdict(grid_obj))
|
|
1459
|
+
|
|
1460
|
+
|
|
1461
|
+
def pop_grid_data(grid_data: dict[str, Any]) -> dict[str, Any]:
|
|
1462
|
+
"""Remove non-serializable or unnecessary keys from a Grid dictionary.
|
|
1463
|
+
|
|
1464
|
+
Removes 'ds', 'straddle', and 'verbose' keys if present.
|
|
1465
|
+
|
|
1466
|
+
Parameters
|
|
1467
|
+
----------
|
|
1468
|
+
grid_data : dict
|
|
1469
|
+
Dictionary representation of a Grid object.
|
|
1470
|
+
|
|
1471
|
+
Returns
|
|
1472
|
+
-------
|
|
1473
|
+
dict
|
|
1474
|
+
Cleaned dictionary suitable for serialization.
|
|
1475
|
+
"""
|
|
1476
|
+
for key in ("ds", "straddle", "verbose"):
|
|
1477
|
+
grid_data.pop(key, None)
|
|
1478
|
+
return grid_data
|
|
1479
|
+
|
|
1480
|
+
|
|
1629
1481
|
def to_dict(forcing_object, exclude: list[str] | None = None) -> dict:
|
|
1630
1482
|
"""Serialize a forcing object (including its grid) into a dictionary.
|
|
1631
1483
|
|
|
1632
|
-
This function serializes a
|
|
1633
|
-
|
|
1634
|
-
that are not serializable or meant to be excluded.
|
|
1484
|
+
This function serializes a forcing object (dataclass or pydantic model),
|
|
1485
|
+
including its associated grid(s), into a dictionary suitable for YAML output.
|
|
1635
1486
|
|
|
1636
|
-
|
|
1637
|
-
|
|
1487
|
+
- Top-level grids (`grid`, `parent_grid`) are serialized consistently
|
|
1488
|
+
- Nested grids inside `source` and `bgc_source` are also serialized
|
|
1489
|
+
- Datetime objects are converted to ISO strings
|
|
1490
|
+
- Path objects are converted to strings
|
|
1638
1491
|
|
|
1639
1492
|
Parameters
|
|
1640
1493
|
----------
|
|
1641
1494
|
forcing_object : object
|
|
1642
|
-
|
|
1643
|
-
such as `grid`, `start_time`, `end_time`, etc.
|
|
1495
|
+
A dataclass or pydantic model representing a forcing configuration.
|
|
1644
1496
|
exclude : list[str], optional
|
|
1645
|
-
List of
|
|
1497
|
+
List of field names to exclude from serialization. The fields
|
|
1498
|
+
"grid", "parent_grid", and "ds" are always excluded.
|
|
1646
1499
|
|
|
1647
1500
|
Returns
|
|
1648
1501
|
-------
|
|
1649
1502
|
dict
|
|
1503
|
+
Serialized representation of the forcing object.
|
|
1650
1504
|
"""
|
|
1651
|
-
|
|
1505
|
+
exclude_list = exclude or []
|
|
1506
|
+
exclude_set: set[str] = {"grid", "parent_grid", "ds", *exclude_list}
|
|
1507
|
+
|
|
1508
|
+
# --- Serialize top-level grid(s) ---
|
|
1509
|
+
yaml_data = {}
|
|
1510
|
+
|
|
1652
1511
|
if hasattr(forcing_object, "grid") and forcing_object.grid is not None:
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
if attr is not None and "path" in attr:
|
|
1663
|
-
paths = attr["path"]
|
|
1664
|
-
if isinstance(paths, list):
|
|
1665
|
-
attr["path"] = [str(p) if isinstance(p, Path) else p for p in paths]
|
|
1666
|
-
elif isinstance(paths, Path):
|
|
1667
|
-
attr["path"] = str(paths)
|
|
1668
|
-
elif isinstance(paths, dict):
|
|
1669
|
-
for key, path in paths.items():
|
|
1670
|
-
attr["path"][key] = str(path)
|
|
1671
|
-
|
|
1672
|
-
ensure_paths_are_strings(forcing_object, "source")
|
|
1673
|
-
ensure_paths_are_strings(forcing_object, "bgc_source")
|
|
1674
|
-
|
|
1675
|
-
# Prepare Forcing Data
|
|
1676
|
-
forcing_data = {}
|
|
1512
|
+
yaml_data["Grid"] = serialize_grid(forcing_object.grid)
|
|
1513
|
+
|
|
1514
|
+
if (
|
|
1515
|
+
hasattr(forcing_object, "parent_grid")
|
|
1516
|
+
and forcing_object.parent_grid is not None
|
|
1517
|
+
):
|
|
1518
|
+
yaml_data["ParentGrid"] = serialize_grid(forcing_object.parent_grid)
|
|
1519
|
+
|
|
1520
|
+
# --- Collect forcing fields ---
|
|
1677
1521
|
if isinstance(forcing_object, BaseModel):
|
|
1678
|
-
field_names = forcing_object.model_fields
|
|
1522
|
+
field_names = forcing_object.model_fields.keys()
|
|
1679
1523
|
elif is_dataclass(forcing_object):
|
|
1680
|
-
field_names = [
|
|
1524
|
+
field_names = [f.name for f in fields(forcing_object)]
|
|
1681
1525
|
else:
|
|
1682
|
-
raise TypeError("Forcing object
|
|
1526
|
+
raise TypeError("Forcing object must be a dataclass or pydantic model")
|
|
1683
1527
|
|
|
1684
|
-
|
|
1685
|
-
exclude = []
|
|
1686
|
-
exclude = ["grid", "parent_grid", "ds", *exclude]
|
|
1687
|
-
|
|
1688
|
-
filtered_field_names = [param for param in field_names if param not in exclude]
|
|
1689
|
-
|
|
1690
|
-
for field_name in filtered_field_names:
|
|
1691
|
-
# Retrieve the value of each field using getattr
|
|
1692
|
-
value = getattr(forcing_object, field_name)
|
|
1528
|
+
forcing_data = {}
|
|
1693
1529
|
|
|
1694
|
-
|
|
1695
|
-
if
|
|
1696
|
-
|
|
1697
|
-
# Convert list of datetimes to list of ISO strings
|
|
1698
|
-
elif isinstance(value, list) and all(isinstance(v, datetime) for v in value):
|
|
1699
|
-
value = [v.isoformat() for v in value]
|
|
1530
|
+
for name in field_names:
|
|
1531
|
+
if name in exclude_set:
|
|
1532
|
+
continue
|
|
1700
1533
|
|
|
1701
|
-
|
|
1702
|
-
forcing_data[field_name] = value
|
|
1534
|
+
value = getattr(forcing_object, name)
|
|
1703
1535
|
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
forcing_object.__class__.__name__: forcing_data, # Include the serialized forcing object data
|
|
1708
|
-
}
|
|
1536
|
+
if name in {"source", "bgc_source"}:
|
|
1537
|
+
forcing_data[name] = serialize_source_dict(value)
|
|
1538
|
+
continue
|
|
1709
1539
|
|
|
1710
|
-
|
|
1540
|
+
value = serialize_datetime(value)
|
|
1541
|
+
value = serialize_paths(value)
|
|
1711
1542
|
|
|
1543
|
+
forcing_data[name] = value
|
|
1712
1544
|
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
grid_data.pop("straddle", None)
|
|
1716
|
-
grid_data.pop("verbose", None)
|
|
1545
|
+
# --- Final YAML structure ---
|
|
1546
|
+
yaml_data[forcing_object.__class__.__name__] = forcing_data
|
|
1717
1547
|
|
|
1718
|
-
return
|
|
1548
|
+
return yaml_data
|
|
1719
1549
|
|
|
1720
1550
|
|
|
1721
1551
|
def from_yaml(forcing_object: type, filepath: str | Path) -> dict[str, Any]:
|
|
1722
|
-
"""
|
|
1552
|
+
"""Load configuration for a forcing object from a YAML file.
|
|
1723
1553
|
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1554
|
+
Searches for a dictionary keyed by the class name of `forcing_object` and
|
|
1555
|
+
returns it, converting:
|
|
1556
|
+
- ISO-format date strings to `datetime` objects
|
|
1557
|
+
- Path-like strings back to `Path` objects
|
|
1558
|
+
- `source` and `bgc_source` nested dictionaries back to proper Grid objects
|
|
1728
1559
|
|
|
1729
1560
|
Parameters
|
|
1730
1561
|
----------
|
|
1731
|
-
|
|
1732
|
-
The
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
from the YAML file. The class name is used to locate the relevant data in
|
|
1736
|
-
the YAML structure.
|
|
1562
|
+
forcing_object : type
|
|
1563
|
+
The class type whose configuration to load (e.g., `TidalForcing`).
|
|
1564
|
+
filepath : str | Path
|
|
1565
|
+
Path to the YAML file containing the configuration.
|
|
1737
1566
|
|
|
1738
1567
|
Returns
|
|
1739
1568
|
-------
|
|
1740
|
-
dict
|
|
1741
|
-
|
|
1742
|
-
This dictionary contains key-value pairs where the keys are the parameter
|
|
1743
|
-
names, and the values are the corresponding values from the YAML file.
|
|
1744
|
-
Any date fields are converted from ISO format if necessary.
|
|
1569
|
+
dict[str, Any]
|
|
1570
|
+
Dictionary of configuration parameters with dates, paths, and nested grids restored.
|
|
1745
1571
|
|
|
1746
1572
|
Raises
|
|
1747
1573
|
------
|
|
1748
1574
|
ValueError
|
|
1749
|
-
If no configuration for the specified class
|
|
1575
|
+
If no configuration for the specified class is found in the YAML file.
|
|
1750
1576
|
"""
|
|
1751
|
-
# Ensure filepath is a Path object
|
|
1752
1577
|
filepath = Path(filepath)
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
with filepath.open("r") as file:
|
|
1756
|
-
file_content = file.read()
|
|
1757
|
-
|
|
1758
|
-
# Split the content into YAML documents
|
|
1759
|
-
documents = list(yaml.safe_load_all(file_content))
|
|
1578
|
+
with filepath.open("r") as f:
|
|
1579
|
+
documents = list(yaml.safe_load_all(f))
|
|
1760
1580
|
|
|
1761
1581
|
forcing_data = None
|
|
1762
1582
|
forcing_object_name = forcing_object.__name__
|
|
1763
1583
|
|
|
1764
|
-
# Process the YAML documents to find the forcing data for the given object
|
|
1765
1584
|
for doc in documents:
|
|
1766
1585
|
if doc is None:
|
|
1767
1586
|
continue
|
|
@@ -1774,21 +1593,19 @@ def from_yaml(forcing_object: type, filepath: str | Path) -> dict[str, Any]:
|
|
|
1774
1593
|
f"No {forcing_object_name} configuration found in the YAML file."
|
|
1775
1594
|
)
|
|
1776
1595
|
|
|
1777
|
-
# Convert
|
|
1596
|
+
# Convert ISO date strings to datetime objects
|
|
1778
1597
|
for key, value in forcing_data.items():
|
|
1779
|
-
forcing_data[key] =
|
|
1598
|
+
forcing_data[key] = deserialize_datetime(value)
|
|
1780
1599
|
|
|
1781
|
-
#
|
|
1782
|
-
|
|
1600
|
+
# Convert path-like strings back to Path objects
|
|
1601
|
+
forcing_data = normalize_paths(forcing_data)
|
|
1783
1602
|
|
|
1603
|
+
# Deserialize source and bgc_source nested dictionaries
|
|
1604
|
+
for key in ["source", "bgc_source"]:
|
|
1605
|
+
if key in forcing_data:
|
|
1606
|
+
forcing_data[key] = deserialize_source_dict(forcing_data[key])
|
|
1784
1607
|
|
|
1785
|
-
|
|
1786
|
-
try:
|
|
1787
|
-
# Return the parsed datetime object if successful
|
|
1788
|
-
return datetime.fromisoformat(str(value))
|
|
1789
|
-
except ValueError:
|
|
1790
|
-
# Return None or raise an exception if parsing fails
|
|
1791
|
-
return value
|
|
1608
|
+
return forcing_data
|
|
1792
1609
|
|
|
1793
1610
|
|
|
1794
1611
|
def handle_boundaries(field):
|
|
@@ -1861,38 +1678,6 @@ def get_boundary_coords():
|
|
|
1861
1678
|
return bdry_coords
|
|
1862
1679
|
|
|
1863
1680
|
|
|
1864
|
-
def wrap_longitudes(grid_ds, straddle):
|
|
1865
|
-
"""Adjusts longitude values in a dataset to handle dateline crossing.
|
|
1866
|
-
|
|
1867
|
-
Parameters
|
|
1868
|
-
----------
|
|
1869
|
-
grid_ds : xr.Dataset
|
|
1870
|
-
The dataset containing longitude variables to adjust.
|
|
1871
|
-
straddle : bool
|
|
1872
|
-
If True, adjusts longitudes to the range [-180, 180] for datasets
|
|
1873
|
-
that straddle the dateline. If False, adjusts longitudes to the
|
|
1874
|
-
range [0, 360].
|
|
1875
|
-
|
|
1876
|
-
Returns
|
|
1877
|
-
-------
|
|
1878
|
-
xr.Dataset
|
|
1879
|
-
The dataset with adjusted longitude values.
|
|
1880
|
-
"""
|
|
1881
|
-
for lon_dim in ["lon_rho", "lon_u", "lon_v"]:
|
|
1882
|
-
if straddle:
|
|
1883
|
-
grid_ds[lon_dim] = xr.where(
|
|
1884
|
-
grid_ds[lon_dim] > 180,
|
|
1885
|
-
grid_ds[lon_dim] - 360,
|
|
1886
|
-
grid_ds[lon_dim],
|
|
1887
|
-
)
|
|
1888
|
-
else:
|
|
1889
|
-
grid_ds[lon_dim] = xr.where(
|
|
1890
|
-
grid_ds[lon_dim] < 0, grid_ds[lon_dim] + 360, grid_ds[lon_dim]
|
|
1891
|
-
)
|
|
1892
|
-
|
|
1893
|
-
return grid_ds
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
1681
|
def to_float(val):
|
|
1897
1682
|
"""Convert a value or list of values to float.
|
|
1898
1683
|
|
|
@@ -1967,260 +1752,101 @@ def validate_names(
|
|
|
1967
1752
|
return names
|
|
1968
1753
|
|
|
1969
1754
|
|
|
1970
|
-
def
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
"""Check if the dataset contains the specified variables and dimensions.
|
|
1755
|
+
def check_and_set_boundaries(
|
|
1756
|
+
boundaries: dict[str, bool] | None,
|
|
1757
|
+
mask: xr.DataArray,
|
|
1758
|
+
) -> dict[str, bool]:
|
|
1759
|
+
"""
|
|
1760
|
+
Validate and finalize the `boundaries` dictionary.
|
|
1977
1761
|
|
|
1978
1762
|
Parameters
|
|
1979
1763
|
----------
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
Dictionary of variable names that are required in the dataset.
|
|
1986
|
-
opt_var_names : Optional[Dict[str, str]], optional
|
|
1987
|
-
Dictionary of optional variable names.
|
|
1988
|
-
These variables are not strictly required, and the function will not raise an error if they are missing.
|
|
1989
|
-
Default is None, meaning no optional variables are considered.
|
|
1764
|
+
boundaries : dict[str, bool] or None
|
|
1765
|
+
User-supplied dictionary controlling which boundaries are active.
|
|
1766
|
+
Keys may include any subset of {"south", "east", "north", "west"}.
|
|
1767
|
+
Missing keys will be filled from mask-based defaults.
|
|
1768
|
+
If None, all boundaries are inferred from the land mask.
|
|
1990
1769
|
|
|
1770
|
+
mask : xr.DataArray
|
|
1771
|
+
2D land/sea mask on rho-points. Used to determine which boundaries
|
|
1772
|
+
contain at least one ocean point.
|
|
1991
1773
|
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1774
|
+
Returns
|
|
1775
|
+
-------
|
|
1776
|
+
dict[str, bool]
|
|
1777
|
+
Completed and validated boundary configuration.
|
|
1996
1778
|
"""
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
|
|
1779
|
+
valid_keys = {"south", "east", "north", "west"}
|
|
1780
|
+
|
|
1781
|
+
# --------------------------------------------
|
|
1782
|
+
# Case 1: boundaries not provided → infer them
|
|
1783
|
+
# --------------------------------------------
|
|
1784
|
+
if boundaries is None:
|
|
1785
|
+
inferred = _infer_valid_boundaries_from_mask(mask)
|
|
1786
|
+
logging.info(f"No `boundaries` provided. Using mask-based defaults: {inferred}")
|
|
1787
|
+
return inferred
|
|
1788
|
+
|
|
1789
|
+
# --------------------------------------------
|
|
1790
|
+
# Case 2: boundaries provided → validate
|
|
1791
|
+
# --------------------------------------------
|
|
1792
|
+
if not isinstance(boundaries, dict):
|
|
1793
|
+
raise TypeError(
|
|
1794
|
+
"`boundaries` must be a dict mapping boundary names to booleans."
|
|
2001
1795
|
)
|
|
2002
1796
|
|
|
2003
|
-
|
|
2004
|
-
|
|
1797
|
+
# Unknown keys?
|
|
1798
|
+
unknown_keys = set(boundaries) - valid_keys
|
|
1799
|
+
if unknown_keys:
|
|
2005
1800
|
raise ValueError(
|
|
2006
|
-
f"
|
|
1801
|
+
f"`boundaries` contains invalid keys: {unknown_keys}. "
|
|
1802
|
+
"Allowed keys are: 'south', 'east', 'north', 'west'."
|
|
2007
1803
|
)
|
|
2008
1804
|
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
1805
|
+
# Type-check provided values
|
|
1806
|
+
for key, val in boundaries.items():
|
|
1807
|
+
if not isinstance(val, bool):
|
|
1808
|
+
raise TypeError(f"Boundary '{key}' must be a boolean.")
|
|
1809
|
+
|
|
1810
|
+
# Fill missing boundaries using defaults
|
|
1811
|
+
inferred_defaults = _infer_valid_boundaries_from_mask(mask)
|
|
1812
|
+
completed = boundaries.copy()
|
|
1813
|
+
|
|
1814
|
+
for key in valid_keys:
|
|
1815
|
+
if key not in completed:
|
|
1816
|
+
completed[key] = inferred_defaults[key]
|
|
1817
|
+
logging.info(
|
|
1818
|
+
f"`boundaries[{key!r}]` not provided — defaulting to "
|
|
1819
|
+
f"{inferred_defaults[key]}"
|
|
2016
1820
|
)
|
|
2017
1821
|
|
|
1822
|
+
logging.info(f"Using boundary configuration: {completed}")
|
|
1823
|
+
return completed
|
|
2018
1824
|
|
|
2019
|
-
def select_relevant_times(
|
|
2020
|
-
ds: xr.Dataset,
|
|
2021
|
-
time_dim: str,
|
|
2022
|
-
start_time: datetime,
|
|
2023
|
-
end_time: datetime | None = None,
|
|
2024
|
-
climatology: bool = False,
|
|
2025
|
-
allow_flex_time: bool = False,
|
|
2026
|
-
) -> xr.Dataset:
|
|
2027
|
-
"""
|
|
2028
|
-
Select a subset of the dataset based on time constraints.
|
|
2029
|
-
|
|
2030
|
-
This function supports two main use cases:
|
|
2031
|
-
|
|
2032
|
-
1. **Time range selection (start_time + end_time provided):**
|
|
2033
|
-
- Returns all records strictly between `start_time` and `end_time`.
|
|
2034
|
-
- Ensures at least one record at or before `start_time` and one record at or
|
|
2035
|
-
after `end_time` are included, even if they fall outside the strict range.
|
|
2036
|
-
|
|
2037
|
-
2. **Initial condition selection (start_time provided, end_time=None):**
|
|
2038
|
-
- Delegates to `_select_initial_time`, which reduces the dataset to exactly one
|
|
2039
|
-
time entry.
|
|
2040
|
-
- If `allow_flex_time=True`, a +24-hour buffer around `start_time` is allowed,
|
|
2041
|
-
and the closest timestamp is chosen.
|
|
2042
|
-
- If `allow_flex_time=False`, requires an exact timestamp match.
|
|
2043
|
-
|
|
2044
|
-
Additional behavior:
|
|
2045
|
-
- If `climatology=True`, the dataset must contain exactly 12 time steps. If valid,
|
|
2046
|
-
the climatology dataset is returned without further filtering.
|
|
2047
|
-
- If the dataset uses `cftime` datetime objects, these are converted to
|
|
2048
|
-
`np.datetime64` before filtering.
|
|
2049
1825
|
|
|
2050
|
-
|
|
2051
|
-
----------
|
|
2052
|
-
ds : xr.Dataset
|
|
2053
|
-
The dataset to filter. Must contain a valid time dimension.
|
|
2054
|
-
time_dim : str
|
|
2055
|
-
Name of the time dimension in `ds`.
|
|
2056
|
-
start_time : datetime
|
|
2057
|
-
Start time for filtering.
|
|
2058
|
-
end_time : datetime or None
|
|
2059
|
-
End time for filtering. If `None`, the function assumes an initial condition
|
|
2060
|
-
use case and selects exactly one timestamp.
|
|
2061
|
-
climatology : bool, optional
|
|
2062
|
-
If True, requires exactly 12 time steps and bypasses normal filtering.
|
|
2063
|
-
Defaults to False.
|
|
2064
|
-
allow_flex_time : bool, optional
|
|
2065
|
-
Whether to allow a +24h search window after `start_time` when `end_time`
|
|
2066
|
-
is None. If False (default), requires an exact match.
|
|
2067
|
-
|
|
2068
|
-
Returns
|
|
2069
|
-
-------
|
|
2070
|
-
xr.Dataset
|
|
2071
|
-
A filtered dataset containing only the selected time entries.
|
|
2072
|
-
|
|
2073
|
-
Raises
|
|
2074
|
-
------
|
|
2075
|
-
ValueError
|
|
2076
|
-
- If `climatology=True` but the dataset does not contain exactly 12 time steps.
|
|
2077
|
-
- If `climatology=False` and the dataset contains integer time values.
|
|
2078
|
-
- If no valid records are found within the requested range or window.
|
|
2079
|
-
|
|
2080
|
-
Warns
|
|
2081
|
-
-----
|
|
2082
|
-
UserWarning
|
|
2083
|
-
- If no records exist at or before `start_time` or at or after `end_time`.
|
|
2084
|
-
- If the specified time dimension does not exist in the dataset.
|
|
2085
|
-
|
|
2086
|
-
Notes
|
|
2087
|
-
-----
|
|
2088
|
-
- For initial conditions (end_time=None), see `_select_initial_time` for details
|
|
2089
|
-
on strict vs. flexible selection behavior.
|
|
2090
|
-
- Logs warnings instead of failing hard when boundary records are missing, and
|
|
2091
|
-
defaults to using the earliest or latest available time in such cases.
|
|
1826
|
+
def _infer_valid_boundaries_from_mask(mask: xr.DataArray) -> dict[str, bool]:
|
|
2092
1827
|
"""
|
|
2093
|
-
|
|
2094
|
-
logging.warning(
|
|
2095
|
-
f"Dataset does not contain time dimension '{time_dim}'. "
|
|
2096
|
-
"Please check variable naming or dataset structure."
|
|
2097
|
-
)
|
|
2098
|
-
return ds
|
|
2099
|
-
|
|
2100
|
-
time_type = get_time_type(ds[time_dim])
|
|
1828
|
+
Determine which grid boundaries contain at least one ocean point.
|
|
2101
1829
|
|
|
2102
|
-
|
|
2103
|
-
if len(ds[time_dim]) != 12:
|
|
2104
|
-
raise ValueError(
|
|
2105
|
-
f"The dataset contains {len(ds[time_dim])} time steps, but the climatology flag is set to True, which requires exactly 12 time steps."
|
|
2106
|
-
)
|
|
2107
|
-
else:
|
|
2108
|
-
if time_type == "int":
|
|
2109
|
-
raise ValueError(
|
|
2110
|
-
"The dataset contains integer time values, which are only supported when the climatology flag is set to True. However, your climatology flag is set to False."
|
|
2111
|
-
)
|
|
2112
|
-
if time_type == "cftime":
|
|
2113
|
-
ds = ds.assign_coords({time_dim: convert_cftime_to_datetime(ds[time_dim])})
|
|
2114
|
-
|
|
2115
|
-
if not end_time:
|
|
2116
|
-
# Assume we are looking for exactly one time record for initial conditions
|
|
2117
|
-
return _select_initial_time(
|
|
2118
|
-
ds, time_dim, start_time, climatology, allow_flex_time
|
|
2119
|
-
)
|
|
2120
|
-
|
|
2121
|
-
if climatology:
|
|
2122
|
-
return ds
|
|
2123
|
-
|
|
2124
|
-
# Identify records before or at start_time
|
|
2125
|
-
before_start = ds[time_dim] <= np.datetime64(start_time)
|
|
2126
|
-
if before_start.any():
|
|
2127
|
-
closest_before_start = ds[time_dim].where(before_start, drop=True)[-1]
|
|
2128
|
-
else:
|
|
2129
|
-
logging.warning(f"No records found at or before the start_time: {start_time}.")
|
|
2130
|
-
closest_before_start = ds[time_dim][0]
|
|
2131
|
-
|
|
2132
|
-
# Identify records after or at end_time
|
|
2133
|
-
after_end = ds[time_dim] >= np.datetime64(end_time)
|
|
2134
|
-
if after_end.any():
|
|
2135
|
-
closest_after_end = ds[time_dim].where(after_end, drop=True).min()
|
|
2136
|
-
else:
|
|
2137
|
-
logging.warning(f"No records found at or after the end_time: {end_time}.")
|
|
2138
|
-
closest_after_end = ds[time_dim].max()
|
|
2139
|
-
|
|
2140
|
-
# Select records within the time range and add the closest before/after
|
|
2141
|
-
within_range = (ds[time_dim] > np.datetime64(start_time)) & (
|
|
2142
|
-
ds[time_dim] < np.datetime64(end_time)
|
|
2143
|
-
)
|
|
2144
|
-
selected_times = ds[time_dim].where(
|
|
2145
|
-
within_range
|
|
2146
|
-
| (ds[time_dim] == closest_before_start)
|
|
2147
|
-
| (ds[time_dim] == closest_after_end),
|
|
2148
|
-
drop=True,
|
|
2149
|
-
)
|
|
2150
|
-
ds = ds.sel({time_dim: selected_times})
|
|
2151
|
-
|
|
2152
|
-
return ds
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
def _select_initial_time(
|
|
2156
|
-
ds: xr.Dataset,
|
|
2157
|
-
time_dim: str,
|
|
2158
|
-
ini_time: datetime,
|
|
2159
|
-
climatology: bool,
|
|
2160
|
-
allow_flex_time: bool = False,
|
|
2161
|
-
) -> xr.Dataset:
|
|
2162
|
-
"""Select exactly one initial time from dataset.
|
|
1830
|
+
Any boundary consisting entirely of land is considered inactive.
|
|
2163
1831
|
|
|
2164
1832
|
Parameters
|
|
2165
1833
|
----------
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
time_dim : str
|
|
2169
|
-
Name of the time dimension.
|
|
2170
|
-
ini_time : datetime
|
|
2171
|
-
The desired initial time.
|
|
2172
|
-
allow_flex_time : bool
|
|
2173
|
-
- If True: allow a +24h window and pick the closest available timestamp.
|
|
2174
|
-
- If False (default): require an exact match, otherwise raise ValueError.
|
|
1834
|
+
mask : xr.DataArray
|
|
1835
|
+
2D mask array on rho-points where 1 = ocean, 0 = land.
|
|
2175
1836
|
|
|
2176
1837
|
Returns
|
|
2177
1838
|
-------
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
|
|
2181
|
-
Raises
|
|
2182
|
-
------
|
|
2183
|
-
ValueError
|
|
2184
|
-
If no matching time is found (when `allow_flex_time=False`), or no entries are
|
|
2185
|
-
available within the +24h window (when `allow_flex_time=True`).
|
|
1839
|
+
dict[str, bool]
|
|
1840
|
+
Boolean availability for {south, east, north, west}.
|
|
2186
1841
|
"""
|
|
2187
|
-
|
|
2188
|
-
|
|
2189
|
-
ds["time"] = ds["time"] / np.timedelta64(1, "D")
|
|
2190
|
-
# Interpolate from climatology for initial conditions
|
|
2191
|
-
return interpolate_from_climatology(ds, time_dim, ini_time)
|
|
2192
|
-
|
|
2193
|
-
if allow_flex_time:
|
|
2194
|
-
# Look in time range [ini_time, ini_time + 24h)
|
|
2195
|
-
end_time = ini_time + timedelta(days=1)
|
|
2196
|
-
times = (np.datetime64(ini_time) <= ds[time_dim]) & (
|
|
2197
|
-
ds[time_dim] < np.datetime64(end_time)
|
|
2198
|
-
)
|
|
2199
|
-
|
|
2200
|
-
if np.all(~times):
|
|
2201
|
-
raise ValueError(
|
|
2202
|
-
f"No time entries found between {ini_time} and {end_time}."
|
|
2203
|
-
)
|
|
1842
|
+
bdry_coords = get_boundary_coords()
|
|
1843
|
+
boundaries = {}
|
|
2204
1844
|
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
|
|
2208
|
-
ds = ds.isel({time_dim: 0})
|
|
1845
|
+
for direction in ["south", "east", "north", "west"]:
|
|
1846
|
+
coords = bdry_coords["rho"][direction]
|
|
1847
|
+
bdry_mask = mask.isel(**coords)
|
|
2209
1848
|
|
|
2210
|
-
|
|
2211
|
-
|
|
2212
|
-
)
|
|
2213
|
-
|
|
2214
|
-
else:
|
|
2215
|
-
# Strict match required
|
|
2216
|
-
if not (ds[time_dim].values == np.datetime64(ini_time)).any():
|
|
2217
|
-
raise ValueError(
|
|
2218
|
-
f"No exact match found for initial time {ini_time}. Consider setting allow_flex_time to True."
|
|
2219
|
-
)
|
|
2220
|
-
|
|
2221
|
-
ds = ds.sel({time_dim: np.datetime64(ini_time)})
|
|
1849
|
+
# Boundary is valid if ANY ocean point exists
|
|
1850
|
+
boundaries[direction] = bool(bdry_mask.values.any())
|
|
2222
1851
|
|
|
2223
|
-
|
|
2224
|
-
ds = ds.expand_dims(time_dim)
|
|
2225
|
-
|
|
2226
|
-
return ds
|
|
1852
|
+
return boundaries
|