roms-tools 3.2.0__py3-none-any.whl → 3.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roms_tools/__init__.py +1 -1
- roms_tools/plot.py +38 -30
- roms_tools/setup/boundary_forcing.py +2 -2
- roms_tools/setup/grid.py +13 -5
- roms_tools/setup/initial_conditions.py +5 -5
- roms_tools/setup/{datasets.py → lat_lon_datasets.py} +23 -804
- roms_tools/setup/nesting.py +262 -90
- roms_tools/setup/river_datasets.py +527 -0
- roms_tools/setup/river_forcing.py +2 -2
- roms_tools/setup/surface_forcing.py +4 -4
- roms_tools/setup/tides.py +1 -1
- roms_tools/setup/topography.py +4 -6
- roms_tools/setup/utils.py +263 -2
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_ALT_CO2_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_ALT_CO2_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/ALK_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_ALT_CO2_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_ALT_CO2_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_ALT_CO2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DIC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOCr_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOCr_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOCr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DON_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DON_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DON_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DONr_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DONr_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DONr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOP_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOP_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOPr_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOPr_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/DOPr_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Fe_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Fe_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Fe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Lig_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Lig_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/Lig_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NH4_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NH4_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NH4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NO3_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NO3_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/NO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/O2_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/O2_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/O2_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/PO4_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/PO4_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/PO4_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/SiO3_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/SiO3_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/SiO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatChl_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatChl_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatFe_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatFe_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatP_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatP_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatSi_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatSi_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diatSi_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazChl_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazChl_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazFe_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazFe_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazP_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazP_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/diazP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spCaCO3_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spCaCO3_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spCaCO3_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spChl_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spChl_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spChl_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spFe_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spFe_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spFe_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spP_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spP_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/spP_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zarr.json +406 -406
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zooC_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zooC_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/bgc_boundary_forcing_from_climatology.zarr/zooC_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_south/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/salt_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_south/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/temp_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_south/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/u_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_east/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_north/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_south/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/ubar_west/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_east/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_north/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_south/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/v_west/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_east/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_north/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_south/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/vbar_west/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/boundary_forcing.zarr/zarr.json +182 -182
- roms_tools/tests/test_setup/test_data/grid.zarr/h/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/grid.zarr/zarr.json +191 -191
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/h/c/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/zarr.json +210 -210
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/ALK/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/ALK_ALT_CO2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DIC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DIC_ALT_CO2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DOC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DOCr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DON/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DONr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DOP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/DOPr/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/Fe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/Lig/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/NH4/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/NO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/O2/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/PO4/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/SiO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diatSi/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diazC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diazChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diazFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/diazP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/salt/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spCaCO3/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spChl/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spFe/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/spP/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/temp/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/u/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/ubar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/v/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/vbar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/zarr.json +182 -182
- roms_tools/tests/test_setup/test_data/initial_conditions_with_bgc_from_climatology.zarr/zooC/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/salt/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/temp/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/u/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/ubar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/v/c/0/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/vbar/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/initial_conditions_with_unified_bgc_from_climatology.zarr/zarr.json +187 -187
- roms_tools/tests/test_setup/test_data/tidal_forcing.zarr/u_Im/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/tidal_forcing.zarr/u_Re/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/tidal_forcing.zarr/v_Im/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/tidal_forcing.zarr/v_Re/c/0/0/0 +0 -0
- roms_tools/tests/test_setup/test_data/tidal_forcing.zarr/zarr.json +66 -66
- roms_tools/tests/test_setup/test_grid.py +14 -0
- roms_tools/tests/test_setup/test_initial_conditions.py +1 -1
- roms_tools/tests/test_setup/{test_datasets.py → test_lat_lon_datasets.py} +22 -61
- roms_tools/tests/test_setup/test_nesting.py +119 -31
- roms_tools/tests/test_setup/test_river_datasets.py +48 -0
- roms_tools/tests/test_setup/test_surface_forcing.py +1 -1
- roms_tools/tests/test_setup/test_utils.py +1 -1
- {roms_tools-3.2.0.dist-info → roms_tools-3.3.0.dist-info}/METADATA +1 -1
- {roms_tools-3.2.0.dist-info → roms_tools-3.3.0.dist-info}/RECORD +202 -200
- {roms_tools-3.2.0.dist-info → roms_tools-3.3.0.dist-info}/WHEEL +0 -0
- {roms_tools-3.2.0.dist-info → roms_tools-3.3.0.dist-info}/licenses/LICENSE +0 -0
- {roms_tools-3.2.0.dist-info → roms_tools-3.3.0.dist-info}/top_level.txt +0 -0
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import importlib.util
|
|
4
|
-
import logging
|
|
5
4
|
import typing
|
|
6
|
-
from collections import Counter, defaultdict
|
|
7
5
|
from collections.abc import Callable, Mapping
|
|
8
6
|
from dataclasses import dataclass, field
|
|
9
|
-
from datetime import datetime
|
|
7
|
+
from datetime import datetime
|
|
10
8
|
from pathlib import Path
|
|
11
9
|
from types import ModuleType
|
|
12
|
-
from typing import Any, ClassVar, Literal,
|
|
10
|
+
from typing import Any, ClassVar, Literal, cast
|
|
13
11
|
|
|
14
12
|
if typing.TYPE_CHECKING:
|
|
15
13
|
from roms_tools.setup.grid import Grid
|
|
@@ -20,7 +18,6 @@ import xarray as xr
|
|
|
20
18
|
from roms_tools.constants import R_EARTH
|
|
21
19
|
from roms_tools.download import (
|
|
22
20
|
download_correction_data,
|
|
23
|
-
download_river_data,
|
|
24
21
|
download_sal_data,
|
|
25
22
|
download_topo,
|
|
26
23
|
)
|
|
@@ -28,13 +25,11 @@ from roms_tools.setup.fill import LateralFill
|
|
|
28
25
|
from roms_tools.setup.utils import (
|
|
29
26
|
Timed,
|
|
30
27
|
assign_dates_to_climatology,
|
|
31
|
-
|
|
32
|
-
gc_dist,
|
|
28
|
+
check_dataset,
|
|
33
29
|
get_target_coords,
|
|
34
|
-
get_time_type,
|
|
35
30
|
interpolate_cyclic_time,
|
|
36
|
-
interpolate_from_climatology,
|
|
37
31
|
one_dim_fill,
|
|
32
|
+
select_relevant_times,
|
|
38
33
|
)
|
|
39
34
|
from roms_tools.utils import get_dask_chunks, get_pkg_error_msg, has_gcsfs, load_data
|
|
40
35
|
|
|
@@ -51,13 +46,10 @@ DEFAULT_NR_BUFFER_POINTS = (
|
|
|
51
46
|
# - Too few points → potential boundary artifacts when lateral refill is performed
|
|
52
47
|
# See discussion: https://github.com/CWorthy-ocean/roms-tools/issues/153
|
|
53
48
|
# This default will be applied consistently across all datasets requiring lateral fill.
|
|
54
|
-
RawDataSource: TypeAlias = dict[str, str | Path | list[str | Path] | bool]
|
|
55
|
-
|
|
56
|
-
# lat-lon datasets
|
|
57
49
|
|
|
58
50
|
|
|
59
51
|
@dataclass(kw_only=True)
|
|
60
|
-
class
|
|
52
|
+
class LatLonDataset:
|
|
61
53
|
"""Represents forcing data on original grid.
|
|
62
54
|
|
|
63
55
|
Parameters
|
|
@@ -247,7 +239,7 @@ class Dataset:
|
|
|
247
239
|
ValueError
|
|
248
240
|
If the dataset does not contain the specified variables or dimensions.
|
|
249
241
|
"""
|
|
250
|
-
|
|
242
|
+
check_dataset(ds, self.dim_names, self.var_names)
|
|
251
243
|
|
|
252
244
|
def select_relevant_fields(self, ds: xr.Dataset) -> xr.Dataset:
|
|
253
245
|
"""Selects and returns a subset of the dataset containing only the variables
|
|
@@ -343,7 +335,7 @@ class Dataset:
|
|
|
343
335
|
if self.start_time is None:
|
|
344
336
|
raise ValueError("select_relevant_times called but start_time is None.")
|
|
345
337
|
|
|
346
|
-
ds =
|
|
338
|
+
ds = select_relevant_times(
|
|
347
339
|
ds,
|
|
348
340
|
time_dim,
|
|
349
341
|
self.start_time,
|
|
@@ -563,7 +555,7 @@ class Dataset:
|
|
|
563
555
|
return_copy: bool = False,
|
|
564
556
|
return_coords_only: bool = False,
|
|
565
557
|
verbose: bool = False,
|
|
566
|
-
) -> xr.Dataset |
|
|
558
|
+
) -> xr.Dataset | LatLonDataset | None:
|
|
567
559
|
"""Selects a subdomain from the xarray Dataset based on specified target
|
|
568
560
|
coordinates, extending the selection by a defined buffer. Adjusts longitude
|
|
569
561
|
ranges as necessary to accommodate the dataset's expected range and handles
|
|
@@ -618,7 +610,7 @@ class Dataset:
|
|
|
618
610
|
return coords_ds
|
|
619
611
|
|
|
620
612
|
if return_copy:
|
|
621
|
-
return
|
|
613
|
+
return LatLonDataset.from_ds(self, subdomain)
|
|
622
614
|
else:
|
|
623
615
|
self.ds = subdomain
|
|
624
616
|
return None
|
|
@@ -687,8 +679,8 @@ class Dataset:
|
|
|
687
679
|
)
|
|
688
680
|
|
|
689
681
|
@classmethod
|
|
690
|
-
def from_ds(cls, original_dataset:
|
|
691
|
-
"""Substitute the internal dataset of a
|
|
682
|
+
def from_ds(cls, original_dataset: LatLonDataset, ds: xr.Dataset) -> LatLonDataset:
|
|
683
|
+
"""Substitute the internal dataset of a LatLonDataset object with a new xarray
|
|
692
684
|
Dataset.
|
|
693
685
|
|
|
694
686
|
This method creates a new Dataset instance, bypassing the usual `__init__`
|
|
@@ -698,18 +690,18 @@ class Dataset:
|
|
|
698
690
|
|
|
699
691
|
Parameters
|
|
700
692
|
----------
|
|
701
|
-
original_dataset :
|
|
702
|
-
The original
|
|
693
|
+
original_dataset : LatLonDataset
|
|
694
|
+
The original LatLonDataset instance from which attributes will be copied.
|
|
703
695
|
ds : xarray.Dataset
|
|
704
696
|
The new xarray Dataset to assign to the `ds` attribute of the new instance.
|
|
705
697
|
|
|
706
698
|
Returns
|
|
707
699
|
-------
|
|
708
|
-
|
|
700
|
+
LatLonDataset
|
|
709
701
|
A new Dataset instance with the `ds` attribute set to the provided dataset
|
|
710
702
|
and other attributes copied from the original instance.
|
|
711
703
|
"""
|
|
712
|
-
# Create a new
|
|
704
|
+
# Create a new LatLonDataset instance without calling __init__ or __post_init__
|
|
713
705
|
dataset = cls.__new__(cls)
|
|
714
706
|
|
|
715
707
|
# Directly set the provided dataset as the 'ds' attribute
|
|
@@ -724,7 +716,7 @@ class Dataset:
|
|
|
724
716
|
|
|
725
717
|
|
|
726
718
|
@dataclass(kw_only=True)
|
|
727
|
-
class TPXODataset(
|
|
719
|
+
class TPXODataset(LatLonDataset):
|
|
728
720
|
"""Represents tidal data on the original grid from the TPXO dataset.
|
|
729
721
|
|
|
730
722
|
Parameters
|
|
@@ -939,7 +931,7 @@ class TPXODataset(Dataset):
|
|
|
939
931
|
|
|
940
932
|
|
|
941
933
|
@dataclass(kw_only=True)
|
|
942
|
-
class GLORYSDataset(
|
|
934
|
+
class GLORYSDataset(LatLonDataset):
|
|
943
935
|
"""Represents GLORYS data on original grid."""
|
|
944
936
|
|
|
945
937
|
var_names: dict[str, str] = field(
|
|
@@ -1104,7 +1096,7 @@ class GLORYSDefaultDataset(GLORYSDataset):
|
|
|
1104
1096
|
|
|
1105
1097
|
|
|
1106
1098
|
@dataclass(kw_only=True)
|
|
1107
|
-
class UnifiedDataset(
|
|
1099
|
+
class UnifiedDataset(LatLonDataset):
|
|
1108
1100
|
"""Represents unified BGC data on original grid.
|
|
1109
1101
|
|
|
1110
1102
|
Notes
|
|
@@ -1253,7 +1245,7 @@ class UnifiedBGCSurfaceDataset(UnifiedDataset):
|
|
|
1253
1245
|
|
|
1254
1246
|
|
|
1255
1247
|
@dataclass(kw_only=True)
|
|
1256
|
-
class CESMDataset(
|
|
1248
|
+
class CESMDataset(LatLonDataset):
|
|
1257
1249
|
"""Represents CESM data on original grid."""
|
|
1258
1250
|
|
|
1259
1251
|
# overwrite clean_up method from parent class
|
|
@@ -1460,7 +1452,7 @@ class CESMBGCSurfaceForcingDataset(CESMDataset):
|
|
|
1460
1452
|
|
|
1461
1453
|
|
|
1462
1454
|
@dataclass(kw_only=True)
|
|
1463
|
-
class ERA5Dataset(
|
|
1455
|
+
class ERA5Dataset(LatLonDataset):
|
|
1464
1456
|
"""Represents ERA5 data on original grid."""
|
|
1465
1457
|
|
|
1466
1458
|
var_names: dict[str, str] = field(
|
|
@@ -1584,7 +1576,7 @@ class ERA5ARCODataset(ERA5Dataset):
|
|
|
1584
1576
|
|
|
1585
1577
|
|
|
1586
1578
|
@dataclass(kw_only=True)
|
|
1587
|
-
class ERA5Correction(
|
|
1579
|
+
class ERA5Correction(LatLonDataset):
|
|
1588
1580
|
"""Global dataset to correct ERA5 radiation.
|
|
1589
1581
|
|
|
1590
1582
|
The dataset contains multiplicative correction factors for the ERA5 shortwave
|
|
@@ -1667,7 +1659,7 @@ class ERA5Correction(Dataset):
|
|
|
1667
1659
|
|
|
1668
1660
|
|
|
1669
1661
|
@dataclass(kw_only=True)
|
|
1670
|
-
class ETOPO5Dataset(
|
|
1662
|
+
class ETOPO5Dataset(LatLonDataset):
|
|
1671
1663
|
"""Represents topography data on the original grid from the ETOPO5 dataset."""
|
|
1672
1664
|
|
|
1673
1665
|
filename: str = field(default_factory=lambda: download_topo("etopo5.nc"))
|
|
@@ -1703,7 +1695,7 @@ class ETOPO5Dataset(Dataset):
|
|
|
1703
1695
|
|
|
1704
1696
|
|
|
1705
1697
|
@dataclass(kw_only=True)
|
|
1706
|
-
class SRTM15Dataset(
|
|
1698
|
+
class SRTM15Dataset(LatLonDataset):
|
|
1707
1699
|
"""Represents topography data on the original grid from the SRTM15 dataset."""
|
|
1708
1700
|
|
|
1709
1701
|
var_names: dict[str, str] = field(
|
|
@@ -1716,428 +1708,6 @@ class SRTM15Dataset(Dataset):
|
|
|
1716
1708
|
)
|
|
1717
1709
|
|
|
1718
1710
|
|
|
1719
|
-
# river datasets
|
|
1720
|
-
@dataclass(kw_only=True)
|
|
1721
|
-
class RiverDataset:
|
|
1722
|
-
"""Represents river data.
|
|
1723
|
-
|
|
1724
|
-
Parameters
|
|
1725
|
-
----------
|
|
1726
|
-
filename : Union[str, Path, List[Union[str, Path]]]
|
|
1727
|
-
The path to the data file(s). Can be a single string (with or without wildcards), a single Path object,
|
|
1728
|
-
or a list of strings or Path objects containing multiple files.
|
|
1729
|
-
start_time : datetime
|
|
1730
|
-
The start time for selecting relevant data.
|
|
1731
|
-
end_time : datetime
|
|
1732
|
-
The end time for selecting relevant data.
|
|
1733
|
-
dim_names: Dict[str, str]
|
|
1734
|
-
Dictionary specifying the names of dimensions in the dataset.
|
|
1735
|
-
Requires "station" and "time" as keys.
|
|
1736
|
-
var_names: Dict[str, str]
|
|
1737
|
-
Dictionary of variable names that are required in the dataset.
|
|
1738
|
-
Requires the keys "latitude", "longitude", "flux", "ratio", and "name".
|
|
1739
|
-
opt_var_names: Dict[str, str], optional
|
|
1740
|
-
Dictionary of variable names that are optional in the dataset.
|
|
1741
|
-
Defaults to an empty dictionary.
|
|
1742
|
-
climatology : bool
|
|
1743
|
-
Indicates whether the dataset is climatological. Defaults to False.
|
|
1744
|
-
|
|
1745
|
-
Attributes
|
|
1746
|
-
----------
|
|
1747
|
-
ds : xr.Dataset
|
|
1748
|
-
The xarray Dataset containing the forcing data on its original grid.
|
|
1749
|
-
"""
|
|
1750
|
-
|
|
1751
|
-
filename: str | Path | list[str | Path]
|
|
1752
|
-
start_time: datetime
|
|
1753
|
-
end_time: datetime
|
|
1754
|
-
dim_names: dict[str, str]
|
|
1755
|
-
var_names: dict[str, str]
|
|
1756
|
-
opt_var_names: dict[str, str] | None = field(default_factory=dict)
|
|
1757
|
-
climatology: bool = False
|
|
1758
|
-
ds: xr.Dataset = field(init=False, repr=False)
|
|
1759
|
-
|
|
1760
|
-
def __post_init__(self):
|
|
1761
|
-
# Validate start_time and end_time
|
|
1762
|
-
if not isinstance(self.start_time, datetime):
|
|
1763
|
-
raise TypeError(
|
|
1764
|
-
f"start_time must be a datetime object, but got {type(self.start_time).__name__}."
|
|
1765
|
-
)
|
|
1766
|
-
if not isinstance(self.end_time, datetime):
|
|
1767
|
-
raise TypeError(
|
|
1768
|
-
f"end_time must be a datetime object, but got {type(self.end_time).__name__}."
|
|
1769
|
-
)
|
|
1770
|
-
|
|
1771
|
-
ds = self.load_data()
|
|
1772
|
-
ds = self.clean_up(ds)
|
|
1773
|
-
self.check_dataset(ds)
|
|
1774
|
-
ds = _deduplicate_river_names(
|
|
1775
|
-
ds, self.var_names["name"], self.dim_names["station"]
|
|
1776
|
-
)
|
|
1777
|
-
|
|
1778
|
-
# Select relevant times
|
|
1779
|
-
ds = self.add_time_info(ds)
|
|
1780
|
-
self.ds = ds
|
|
1781
|
-
|
|
1782
|
-
def load_data(self) -> xr.Dataset:
|
|
1783
|
-
"""Load dataset from the specified file.
|
|
1784
|
-
|
|
1785
|
-
Returns
|
|
1786
|
-
-------
|
|
1787
|
-
ds : xr.Dataset
|
|
1788
|
-
The loaded xarray Dataset containing the forcing data.
|
|
1789
|
-
"""
|
|
1790
|
-
ds = load_data(
|
|
1791
|
-
self.filename, self.dim_names, use_dask=False, decode_times=False
|
|
1792
|
-
)
|
|
1793
|
-
|
|
1794
|
-
return ds
|
|
1795
|
-
|
|
1796
|
-
def clean_up(self, ds: xr.Dataset) -> xr.Dataset:
|
|
1797
|
-
"""Decodes the 'name' variable (if byte-encoded) and updates the dataset.
|
|
1798
|
-
|
|
1799
|
-
This method checks if the 'name' variable is of dtype 'object' (i.e., byte-encoded),
|
|
1800
|
-
and if so, decodes each byte array to a string and updates the dataset.
|
|
1801
|
-
It also ensures that the 'station' dimension is of integer type.
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
Parameters
|
|
1805
|
-
----------
|
|
1806
|
-
ds : xr.Dataset
|
|
1807
|
-
The dataset containing the 'name' variable to decode.
|
|
1808
|
-
|
|
1809
|
-
Returns
|
|
1810
|
-
-------
|
|
1811
|
-
ds : xr.Dataset
|
|
1812
|
-
The dataset with the decoded 'name' variable.
|
|
1813
|
-
"""
|
|
1814
|
-
if ds[self.var_names["name"]].dtype == "object":
|
|
1815
|
-
names = []
|
|
1816
|
-
for i in range(len(ds[self.dim_names["station"]])):
|
|
1817
|
-
byte_array = ds[self.var_names["name"]].isel(
|
|
1818
|
-
**{self.dim_names["station"]: i}
|
|
1819
|
-
)
|
|
1820
|
-
name = decode_string(byte_array)
|
|
1821
|
-
names.append(name)
|
|
1822
|
-
ds[self.var_names["name"]] = xr.DataArray(
|
|
1823
|
-
data=names, dims=self.dim_names["station"]
|
|
1824
|
-
)
|
|
1825
|
-
|
|
1826
|
-
if ds[self.dim_names["station"]].dtype == "float64":
|
|
1827
|
-
ds[self.dim_names["station"]] = ds[self.dim_names["station"]].astype(int)
|
|
1828
|
-
|
|
1829
|
-
# Drop all variables that have chars dim
|
|
1830
|
-
vars_to_drop = ["ocn_name", "stn_name", "ct_name", "cn_name", "chars"]
|
|
1831
|
-
existing_vars = [var for var in vars_to_drop if var in ds]
|
|
1832
|
-
ds = ds.drop_vars(existing_vars)
|
|
1833
|
-
|
|
1834
|
-
return ds
|
|
1835
|
-
|
|
1836
|
-
def check_dataset(self, ds: xr.Dataset) -> None:
|
|
1837
|
-
"""Validate required variables, dimensions, and uniqueness of river names.
|
|
1838
|
-
|
|
1839
|
-
Parameters
|
|
1840
|
-
----------
|
|
1841
|
-
ds : xr.Dataset
|
|
1842
|
-
The xarray Dataset to check.
|
|
1843
|
-
|
|
1844
|
-
Raises
|
|
1845
|
-
------
|
|
1846
|
-
ValueError
|
|
1847
|
-
If the dataset does not contain the specified variables or dimensions.
|
|
1848
|
-
"""
|
|
1849
|
-
_check_dataset(ds, self.dim_names, self.var_names, self.opt_var_names)
|
|
1850
|
-
|
|
1851
|
-
def add_time_info(self, ds: xr.Dataset) -> xr.Dataset:
|
|
1852
|
-
"""Dummy method to be overridden by child classes to add time information to the
|
|
1853
|
-
dataset.
|
|
1854
|
-
|
|
1855
|
-
This method is intended as a placeholder and should be implemented in subclasses
|
|
1856
|
-
to provide specific functionality for adding time-related information to the dataset.
|
|
1857
|
-
|
|
1858
|
-
Parameters
|
|
1859
|
-
----------
|
|
1860
|
-
ds : xr.Dataset
|
|
1861
|
-
The xarray Dataset to which time information will be added.
|
|
1862
|
-
|
|
1863
|
-
Returns
|
|
1864
|
-
-------
|
|
1865
|
-
xr.Dataset
|
|
1866
|
-
The xarray Dataset with time information added (as implemented by child classes).
|
|
1867
|
-
"""
|
|
1868
|
-
return ds
|
|
1869
|
-
|
|
1870
|
-
def select_relevant_times(self, ds) -> xr.Dataset:
|
|
1871
|
-
"""Select a subset of the dataset based on the specified time range.
|
|
1872
|
-
|
|
1873
|
-
This method filters the dataset to include all records between `start_time` and `end_time`.
|
|
1874
|
-
Additionally, it ensures that one record at or before `start_time` and one record at or
|
|
1875
|
-
after `end_time` are included, even if they fall outside the strict time range.
|
|
1876
|
-
|
|
1877
|
-
If no `end_time` is specified, the method will select the time range of
|
|
1878
|
-
[start_time, start_time + 24 hours] and return the closest time entry to `start_time` within that range.
|
|
1879
|
-
|
|
1880
|
-
Parameters
|
|
1881
|
-
----------
|
|
1882
|
-
ds : xr.Dataset
|
|
1883
|
-
The input dataset to be filtered. Must contain a time dimension.
|
|
1884
|
-
|
|
1885
|
-
Returns
|
|
1886
|
-
-------
|
|
1887
|
-
xr.Dataset
|
|
1888
|
-
A dataset filtered to the specified time range, including the closest entries
|
|
1889
|
-
at or before `start_time` and at or after `end_time` if applicable.
|
|
1890
|
-
|
|
1891
|
-
Warns
|
|
1892
|
-
-----
|
|
1893
|
-
UserWarning
|
|
1894
|
-
If no records at or before `start_time` or no records at or after `end_time` are found.
|
|
1895
|
-
|
|
1896
|
-
UserWarning
|
|
1897
|
-
If the dataset does not contain any time dimension or the time dimension is incorrectly named.
|
|
1898
|
-
"""
|
|
1899
|
-
time_dim = self.dim_names["time"]
|
|
1900
|
-
|
|
1901
|
-
ds = _select_relevant_times(ds, time_dim, self.start_time, self.end_time, False)
|
|
1902
|
-
|
|
1903
|
-
return ds
|
|
1904
|
-
|
|
1905
|
-
def compute_climatology(self):
|
|
1906
|
-
logging.info("Compute climatology for river forcing.")
|
|
1907
|
-
|
|
1908
|
-
time_dim = self.dim_names["time"]
|
|
1909
|
-
|
|
1910
|
-
flux = self.ds[self.var_names["flux"]].groupby(f"{time_dim}.month").mean()
|
|
1911
|
-
self.ds[self.var_names["flux"]] = flux
|
|
1912
|
-
|
|
1913
|
-
ds = assign_dates_to_climatology(self.ds, "month")
|
|
1914
|
-
ds = ds.swap_dims({"month": "time"})
|
|
1915
|
-
self.ds = ds
|
|
1916
|
-
|
|
1917
|
-
updated_dim_names = {**self.dim_names}
|
|
1918
|
-
updated_dim_names["time"] = "time"
|
|
1919
|
-
self.dim_names = updated_dim_names
|
|
1920
|
-
|
|
1921
|
-
self.climatology = True
|
|
1922
|
-
|
|
1923
|
-
def sort_by_river_volume(self, ds: xr.Dataset) -> xr.Dataset:
|
|
1924
|
-
"""Sorts the dataset by river volume in descending order (largest rivers first),
|
|
1925
|
-
if the volume variable is available.
|
|
1926
|
-
|
|
1927
|
-
This method uses the river volume to reorder the dataset such that the rivers with
|
|
1928
|
-
the largest volumes come first in the `station` dimension. If the volume variable
|
|
1929
|
-
is not present in the dataset, a warning is logged.
|
|
1930
|
-
|
|
1931
|
-
Parameters
|
|
1932
|
-
----------
|
|
1933
|
-
ds : xr.Dataset
|
|
1934
|
-
The xarray Dataset containing the river data to be sorted by volume.
|
|
1935
|
-
|
|
1936
|
-
Returns
|
|
1937
|
-
-------
|
|
1938
|
-
xr.Dataset
|
|
1939
|
-
The dataset with rivers sorted by their volume in descending order.
|
|
1940
|
-
If the volume variable is not available, the original dataset is returned.
|
|
1941
|
-
"""
|
|
1942
|
-
if self.opt_var_names is not None and "vol" in self.opt_var_names:
|
|
1943
|
-
volume_values = ds[self.opt_var_names["vol"]].values
|
|
1944
|
-
if isinstance(volume_values, np.ndarray):
|
|
1945
|
-
# Check if all volume values are the same
|
|
1946
|
-
if np.all(volume_values == volume_values[0]):
|
|
1947
|
-
# If all volumes are the same, no need to reverse order
|
|
1948
|
-
sorted_indices = np.argsort(
|
|
1949
|
-
volume_values
|
|
1950
|
-
) # Sort in ascending order
|
|
1951
|
-
else:
|
|
1952
|
-
# If volumes differ, reverse order for descending sort
|
|
1953
|
-
sorted_indices = np.argsort(volume_values)[
|
|
1954
|
-
::-1
|
|
1955
|
-
] # Reverse for descending order
|
|
1956
|
-
|
|
1957
|
-
ds = ds.isel(**{self.dim_names["station"]: sorted_indices})
|
|
1958
|
-
|
|
1959
|
-
else:
|
|
1960
|
-
logging.warning("The volume data is not in a valid array format.")
|
|
1961
|
-
else:
|
|
1962
|
-
logging.warning(
|
|
1963
|
-
"Cannot sort rivers by volume. 'vol' is missing in the variable names."
|
|
1964
|
-
)
|
|
1965
|
-
|
|
1966
|
-
return ds
|
|
1967
|
-
|
|
1968
|
-
def extract_relevant_rivers(self, target_coords, dx):
|
|
1969
|
-
"""Extracts a subset of the dataset based on the proximity of river mouths to
|
|
1970
|
-
target coordinates.
|
|
1971
|
-
|
|
1972
|
-
This method calculates the distance between each river mouth and the provided target coordinates
|
|
1973
|
-
(latitude and longitude) using the `gc_dist` function. It then filters the dataset to include only those
|
|
1974
|
-
river stations whose minimum distance from the target is less than a specified threshold distance (`dx`).
|
|
1975
|
-
|
|
1976
|
-
Parameters
|
|
1977
|
-
----------
|
|
1978
|
-
target_coords : dict
|
|
1979
|
-
A dictionary containing the target coordinates for the comparison. It should include:
|
|
1980
|
-
- "lon" (float): The target longitude in degrees.
|
|
1981
|
-
- "lat" (float): The target latitude in degrees.
|
|
1982
|
-
- "straddle" (bool): A flag indicating whether to adjust the longitudes for stations that cross the
|
|
1983
|
-
International Date Line. If `True`, longitudes greater than 180 degrees are adjusted by subtracting 360,
|
|
1984
|
-
otherwise, negative longitudes are adjusted by adding 360.
|
|
1985
|
-
|
|
1986
|
-
dx : float
|
|
1987
|
-
The maximum distance threshold (in meters) for including a river station. Only river mouths that are
|
|
1988
|
-
within `dx` meters from the target coordinates will be included in the returned dataset.
|
|
1989
|
-
|
|
1990
|
-
Returns
|
|
1991
|
-
-------
|
|
1992
|
-
indices : dict[str, list[tuple]]
|
|
1993
|
-
A dictionary containing the indices of the rivers that are within the threshold distance from
|
|
1994
|
-
the target coordinates. The dictionary structure consists of river names as keys, and each value is a list of tuples. Each tuple represents
|
|
1995
|
-
a pair of indices corresponding to the `eta_rho` and `xi_rho` grid coordinates of the river.
|
|
1996
|
-
"""
|
|
1997
|
-
# Retrieve longitude and latitude of river mouths
|
|
1998
|
-
river_lon = self.ds[self.var_names["longitude"]]
|
|
1999
|
-
river_lat = self.ds[self.var_names["latitude"]]
|
|
2000
|
-
|
|
2001
|
-
# Adjust longitude based on whether it crosses the International Date Line (straddle case)
|
|
2002
|
-
if target_coords["straddle"]:
|
|
2003
|
-
river_lon = xr.where(river_lon > 180, river_lon - 360, river_lon)
|
|
2004
|
-
else:
|
|
2005
|
-
river_lon = xr.where(river_lon < 0, river_lon + 360, river_lon)
|
|
2006
|
-
|
|
2007
|
-
# Calculate the distance between the target coordinates and each river mouth
|
|
2008
|
-
dist = gc_dist(target_coords["lon"], target_coords["lat"], river_lon, river_lat)
|
|
2009
|
-
dist_min = dist.min(dim=["eta_rho", "xi_rho"])
|
|
2010
|
-
# Filter the dataset to include only stations within the distance threshold
|
|
2011
|
-
if (dist_min < dx).any():
|
|
2012
|
-
ds = self.ds.where(dist_min < dx, drop=True)
|
|
2013
|
-
ds = self.sort_by_river_volume(ds)
|
|
2014
|
-
dist = dist.where(dist_min < dx, drop=True).transpose(
|
|
2015
|
-
self.dim_names["station"], "eta_rho", "xi_rho"
|
|
2016
|
-
)
|
|
2017
|
-
|
|
2018
|
-
river_indices = get_indices_of_nearest_grid_cell_for_rivers(dist, self)
|
|
2019
|
-
else:
|
|
2020
|
-
ds = xr.Dataset()
|
|
2021
|
-
river_indices = {}
|
|
2022
|
-
|
|
2023
|
-
self.ds = ds
|
|
2024
|
-
|
|
2025
|
-
return river_indices
|
|
2026
|
-
|
|
2027
|
-
def extract_named_rivers(self, indices):
|
|
2028
|
-
"""Extracts a subset of the dataset based on the provided river names in the
|
|
2029
|
-
indices dictionary.
|
|
2030
|
-
|
|
2031
|
-
This method filters the dataset to include only the rivers specified in the `indices` dictionary.
|
|
2032
|
-
The resulting subset is stored in the `ds` attribute of the class.
|
|
2033
|
-
|
|
2034
|
-
Parameters
|
|
2035
|
-
----------
|
|
2036
|
-
indices : dict
|
|
2037
|
-
A dictionary where the keys are river names (strings) and the values are dictionaries
|
|
2038
|
-
containing river-related data (e.g., river indices, coordinates).
|
|
2039
|
-
|
|
2040
|
-
Returns
|
|
2041
|
-
-------
|
|
2042
|
-
None
|
|
2043
|
-
The method modifies the `self.ds` attribute in place, setting it to the filtered dataset
|
|
2044
|
-
containing only the data related to the specified rivers.
|
|
2045
|
-
|
|
2046
|
-
Raises
|
|
2047
|
-
------
|
|
2048
|
-
ValueError
|
|
2049
|
-
- If `indices` is not a dictionary.
|
|
2050
|
-
- If any of the requested river names are not found in the dataset.
|
|
2051
|
-
"""
|
|
2052
|
-
if not isinstance(indices, dict):
|
|
2053
|
-
raise ValueError("`indices` must be a dictionary.")
|
|
2054
|
-
|
|
2055
|
-
river_names = list(indices.keys())
|
|
2056
|
-
|
|
2057
|
-
# Ensure the dataset is filtered based on the provided river names
|
|
2058
|
-
ds_filtered = self.ds.where(
|
|
2059
|
-
self.ds[self.var_names["name"]].isin(river_names), drop=True
|
|
2060
|
-
)
|
|
2061
|
-
|
|
2062
|
-
# Check that all requested rivers exist in the dataset
|
|
2063
|
-
filtered_river_names = set(ds_filtered[self.var_names["name"]].values)
|
|
2064
|
-
missing_rivers = set(river_names) - filtered_river_names
|
|
2065
|
-
|
|
2066
|
-
if missing_rivers:
|
|
2067
|
-
raise ValueError(
|
|
2068
|
-
f"The following rivers were not found in the dataset: {missing_rivers}"
|
|
2069
|
-
)
|
|
2070
|
-
|
|
2071
|
-
# Set the filtered dataset as the new `ds`
|
|
2072
|
-
self.ds = ds_filtered
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
@dataclass(kw_only=True)
|
|
2076
|
-
class DaiRiverDataset(RiverDataset):
|
|
2077
|
-
"""Represents river data from the Dai river dataset."""
|
|
2078
|
-
|
|
2079
|
-
filename: str | Path | list[str | Path] = field(
|
|
2080
|
-
default_factory=lambda: download_river_data("dai_trenberth_may2019.nc")
|
|
2081
|
-
)
|
|
2082
|
-
dim_names: dict[str, str] = field(
|
|
2083
|
-
default_factory=lambda: {
|
|
2084
|
-
"station": "station",
|
|
2085
|
-
"time": "time",
|
|
2086
|
-
}
|
|
2087
|
-
)
|
|
2088
|
-
var_names: dict[str, str] = field(
|
|
2089
|
-
default_factory=lambda: {
|
|
2090
|
-
"latitude": "lat_mou",
|
|
2091
|
-
"longitude": "lon_mou",
|
|
2092
|
-
"flux": "FLOW",
|
|
2093
|
-
"ratio": "ratio_m2s",
|
|
2094
|
-
"name": "riv_name",
|
|
2095
|
-
}
|
|
2096
|
-
)
|
|
2097
|
-
opt_var_names: dict[str, str] = field(
|
|
2098
|
-
default_factory=lambda: {
|
|
2099
|
-
"vol": "vol_stn",
|
|
2100
|
-
}
|
|
2101
|
-
)
|
|
2102
|
-
climatology: bool = False
|
|
2103
|
-
|
|
2104
|
-
def add_time_info(self, ds: xr.Dataset) -> xr.Dataset:
|
|
2105
|
-
"""Adds time information to the dataset based on the climatology flag and
|
|
2106
|
-
dimension names.
|
|
2107
|
-
|
|
2108
|
-
This method processes the dataset to include time information according to the climatology
|
|
2109
|
-
setting. If the dataset represents climatology data and the time dimension is labeled as
|
|
2110
|
-
"month", it assigns dates to the dataset based on a monthly climatology. Additionally, it
|
|
2111
|
-
handles dimension name updates if necessary.
|
|
2112
|
-
|
|
2113
|
-
Parameters
|
|
2114
|
-
----------
|
|
2115
|
-
ds : xr.Dataset
|
|
2116
|
-
The input dataset to which time information will be added.
|
|
2117
|
-
|
|
2118
|
-
Returns
|
|
2119
|
-
-------
|
|
2120
|
-
xr.Dataset
|
|
2121
|
-
The dataset with time information added, including adjustments for climatology and
|
|
2122
|
-
dimension names.
|
|
2123
|
-
"""
|
|
2124
|
-
time_dim = self.dim_names["time"]
|
|
2125
|
-
|
|
2126
|
-
# Extract the 'time' variable as a numpy array
|
|
2127
|
-
time_vals = ds[time_dim].values
|
|
2128
|
-
|
|
2129
|
-
# Handle rounding of the time values
|
|
2130
|
-
year = np.round(time_vals * 1e-2).astype(int)
|
|
2131
|
-
month = np.round((time_vals * 1e-2 - year) * 1e2).astype(int)
|
|
2132
|
-
|
|
2133
|
-
# Convert to datetime (assuming the day is always 15th for this example)
|
|
2134
|
-
dates = [datetime(year=i, month=m, day=15) for i, m in zip(year, month)]
|
|
2135
|
-
|
|
2136
|
-
ds[time_dim] = dates
|
|
2137
|
-
|
|
2138
|
-
return ds
|
|
2139
|
-
|
|
2140
|
-
|
|
2141
1711
|
@dataclass
|
|
2142
1712
|
class TPXOManager:
|
|
2143
1713
|
"""Manages multiple TPXODataset instances and selects and processes tidal
|
|
@@ -2625,281 +2195,6 @@ class TPXOManager:
|
|
|
2625
2195
|
object.__setattr__(self.datasets["sal"], "var_names", var_names)
|
|
2626
2196
|
|
|
2627
2197
|
|
|
2628
|
-
# shared functions
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
def _check_dataset(
|
|
2632
|
-
ds: xr.Dataset,
|
|
2633
|
-
dim_names: dict[str, str],
|
|
2634
|
-
var_names: dict[str, str],
|
|
2635
|
-
opt_var_names: dict[str, str] | None = None,
|
|
2636
|
-
) -> None:
|
|
2637
|
-
"""Check if the dataset contains the specified variables and dimensions.
|
|
2638
|
-
|
|
2639
|
-
Parameters
|
|
2640
|
-
----------
|
|
2641
|
-
ds : xr.Dataset
|
|
2642
|
-
The xarray Dataset to check.
|
|
2643
|
-
dim_names: Dict[str, str], optional
|
|
2644
|
-
Dictionary specifying the names of dimensions in the dataset.
|
|
2645
|
-
var_names: Dict[str, str]
|
|
2646
|
-
Dictionary of variable names that are required in the dataset.
|
|
2647
|
-
opt_var_names : Optional[Dict[str, str]], optional
|
|
2648
|
-
Dictionary of optional variable names.
|
|
2649
|
-
These variables are not strictly required, and the function will not raise an error if they are missing.
|
|
2650
|
-
Default is None, meaning no optional variables are considered.
|
|
2651
|
-
|
|
2652
|
-
|
|
2653
|
-
Raises
|
|
2654
|
-
------
|
|
2655
|
-
ValueError
|
|
2656
|
-
If the dataset does not contain the specified variables or dimensions.
|
|
2657
|
-
"""
|
|
2658
|
-
missing_dims = [dim for dim in dim_names.values() if dim not in ds.dims]
|
|
2659
|
-
if missing_dims:
|
|
2660
|
-
raise ValueError(
|
|
2661
|
-
f"Dataset does not contain all required dimensions. The following dimensions are missing: {missing_dims}"
|
|
2662
|
-
)
|
|
2663
|
-
|
|
2664
|
-
missing_vars = [var for var in var_names.values() if var not in ds.data_vars]
|
|
2665
|
-
if missing_vars:
|
|
2666
|
-
raise ValueError(
|
|
2667
|
-
f"Dataset does not contain all required variables. The following variables are missing: {missing_vars}"
|
|
2668
|
-
)
|
|
2669
|
-
|
|
2670
|
-
if opt_var_names:
|
|
2671
|
-
missing_optional_vars = [
|
|
2672
|
-
var for var in opt_var_names.values() if var not in ds.data_vars
|
|
2673
|
-
]
|
|
2674
|
-
if missing_optional_vars:
|
|
2675
|
-
logging.warning(
|
|
2676
|
-
f"Optional variables missing (but not critical): {missing_optional_vars}"
|
|
2677
|
-
)
|
|
2678
|
-
|
|
2679
|
-
|
|
2680
|
-
def _select_relevant_times(
|
|
2681
|
-
ds: xr.Dataset,
|
|
2682
|
-
time_dim: str,
|
|
2683
|
-
start_time: datetime,
|
|
2684
|
-
end_time: datetime | None = None,
|
|
2685
|
-
climatology: bool = False,
|
|
2686
|
-
allow_flex_time: bool = False,
|
|
2687
|
-
) -> xr.Dataset:
|
|
2688
|
-
"""
|
|
2689
|
-
Select a subset of the dataset based on time constraints.
|
|
2690
|
-
|
|
2691
|
-
This function supports two main use cases:
|
|
2692
|
-
|
|
2693
|
-
1. **Time range selection (start_time + end_time provided):**
|
|
2694
|
-
- Returns all records strictly between `start_time` and `end_time`.
|
|
2695
|
-
- Ensures at least one record at or before `start_time` and one record at or
|
|
2696
|
-
after `end_time` are included, even if they fall outside the strict range.
|
|
2697
|
-
|
|
2698
|
-
2. **Initial condition selection (start_time provided, end_time=None):**
|
|
2699
|
-
- Delegates to `_select_initial_time`, which reduces the dataset to exactly one
|
|
2700
|
-
time entry.
|
|
2701
|
-
- If `allow_flex_time=True`, a +24-hour buffer around `start_time` is allowed,
|
|
2702
|
-
and the closest timestamp is chosen.
|
|
2703
|
-
- If `allow_flex_time=False`, requires an exact timestamp match.
|
|
2704
|
-
|
|
2705
|
-
Additional behavior:
|
|
2706
|
-
- If `climatology=True`, the dataset must contain exactly 12 time steps. If valid,
|
|
2707
|
-
the climatology dataset is returned without further filtering.
|
|
2708
|
-
- If the dataset uses `cftime` datetime objects, these are converted to
|
|
2709
|
-
`np.datetime64` before filtering.
|
|
2710
|
-
|
|
2711
|
-
Parameters
|
|
2712
|
-
----------
|
|
2713
|
-
ds : xr.Dataset
|
|
2714
|
-
The dataset to filter. Must contain a valid time dimension.
|
|
2715
|
-
time_dim : str
|
|
2716
|
-
Name of the time dimension in `ds`.
|
|
2717
|
-
start_time : datetime
|
|
2718
|
-
Start time for filtering.
|
|
2719
|
-
end_time : datetime or None
|
|
2720
|
-
End time for filtering. If `None`, the function assumes an initial condition
|
|
2721
|
-
use case and selects exactly one timestamp.
|
|
2722
|
-
climatology : bool, optional
|
|
2723
|
-
If True, requires exactly 12 time steps and bypasses normal filtering.
|
|
2724
|
-
Defaults to False.
|
|
2725
|
-
allow_flex_time : bool, optional
|
|
2726
|
-
Whether to allow a +24h search window after `start_time` when `end_time`
|
|
2727
|
-
is None. If False (default), requires an exact match.
|
|
2728
|
-
|
|
2729
|
-
Returns
|
|
2730
|
-
-------
|
|
2731
|
-
xr.Dataset
|
|
2732
|
-
A filtered dataset containing only the selected time entries.
|
|
2733
|
-
|
|
2734
|
-
Raises
|
|
2735
|
-
------
|
|
2736
|
-
ValueError
|
|
2737
|
-
- If `climatology=True` but the dataset does not contain exactly 12 time steps.
|
|
2738
|
-
- If `climatology=False` and the dataset contains integer time values.
|
|
2739
|
-
- If no valid records are found within the requested range or window.
|
|
2740
|
-
|
|
2741
|
-
Warns
|
|
2742
|
-
-----
|
|
2743
|
-
UserWarning
|
|
2744
|
-
- If no records exist at or before `start_time` or at or after `end_time`.
|
|
2745
|
-
- If the specified time dimension does not exist in the dataset.
|
|
2746
|
-
|
|
2747
|
-
Notes
|
|
2748
|
-
-----
|
|
2749
|
-
- For initial conditions (end_time=None), see `_select_initial_time` for details
|
|
2750
|
-
on strict vs. flexible selection behavior.
|
|
2751
|
-
- Logs warnings instead of failing hard when boundary records are missing, and
|
|
2752
|
-
defaults to using the earliest or latest available time in such cases.
|
|
2753
|
-
"""
|
|
2754
|
-
if time_dim not in ds.variables:
|
|
2755
|
-
logging.warning(
|
|
2756
|
-
f"Dataset does not contain time dimension '{time_dim}'. "
|
|
2757
|
-
"Please check variable naming or dataset structure."
|
|
2758
|
-
)
|
|
2759
|
-
return ds
|
|
2760
|
-
|
|
2761
|
-
time_type = get_time_type(ds[time_dim])
|
|
2762
|
-
|
|
2763
|
-
if climatology:
|
|
2764
|
-
if len(ds[time_dim]) != 12:
|
|
2765
|
-
raise ValueError(
|
|
2766
|
-
f"The dataset contains {len(ds[time_dim])} time steps, but the climatology flag is set to True, which requires exactly 12 time steps."
|
|
2767
|
-
)
|
|
2768
|
-
else:
|
|
2769
|
-
if time_type == "int":
|
|
2770
|
-
raise ValueError(
|
|
2771
|
-
"The dataset contains integer time values, which are only supported when the climatology flag is set to True. However, your climatology flag is set to False."
|
|
2772
|
-
)
|
|
2773
|
-
if time_type == "cftime":
|
|
2774
|
-
ds = ds.assign_coords({time_dim: convert_cftime_to_datetime(ds[time_dim])})
|
|
2775
|
-
|
|
2776
|
-
if not end_time:
|
|
2777
|
-
# Assume we are looking for exactly one time record for initial conditions
|
|
2778
|
-
return _select_initial_time(
|
|
2779
|
-
ds, time_dim, start_time, climatology, allow_flex_time
|
|
2780
|
-
)
|
|
2781
|
-
|
|
2782
|
-
if climatology:
|
|
2783
|
-
return ds
|
|
2784
|
-
|
|
2785
|
-
# Identify records before or at start_time
|
|
2786
|
-
before_start = ds[time_dim] <= np.datetime64(start_time)
|
|
2787
|
-
if before_start.any():
|
|
2788
|
-
closest_before_start = ds[time_dim].where(before_start, drop=True)[-1]
|
|
2789
|
-
else:
|
|
2790
|
-
logging.warning(f"No records found at or before the start_time: {start_time}.")
|
|
2791
|
-
closest_before_start = ds[time_dim][0]
|
|
2792
|
-
|
|
2793
|
-
# Identify records after or at end_time
|
|
2794
|
-
after_end = ds[time_dim] >= np.datetime64(end_time)
|
|
2795
|
-
if after_end.any():
|
|
2796
|
-
closest_after_end = ds[time_dim].where(after_end, drop=True).min()
|
|
2797
|
-
else:
|
|
2798
|
-
logging.warning(f"No records found at or after the end_time: {end_time}.")
|
|
2799
|
-
closest_after_end = ds[time_dim].max()
|
|
2800
|
-
|
|
2801
|
-
# Select records within the time range and add the closest before/after
|
|
2802
|
-
within_range = (ds[time_dim] > np.datetime64(start_time)) & (
|
|
2803
|
-
ds[time_dim] < np.datetime64(end_time)
|
|
2804
|
-
)
|
|
2805
|
-
selected_times = ds[time_dim].where(
|
|
2806
|
-
within_range
|
|
2807
|
-
| (ds[time_dim] == closest_before_start)
|
|
2808
|
-
| (ds[time_dim] == closest_after_end),
|
|
2809
|
-
drop=True,
|
|
2810
|
-
)
|
|
2811
|
-
ds = ds.sel({time_dim: selected_times})
|
|
2812
|
-
|
|
2813
|
-
return ds
|
|
2814
|
-
|
|
2815
|
-
|
|
2816
|
-
def _select_initial_time(
|
|
2817
|
-
ds: xr.Dataset,
|
|
2818
|
-
time_dim: str,
|
|
2819
|
-
ini_time: datetime,
|
|
2820
|
-
climatology: bool,
|
|
2821
|
-
allow_flex_time: bool = False,
|
|
2822
|
-
) -> xr.Dataset:
|
|
2823
|
-
"""Select exactly one initial time from dataset.
|
|
2824
|
-
|
|
2825
|
-
Parameters
|
|
2826
|
-
----------
|
|
2827
|
-
ds : xr.Dataset
|
|
2828
|
-
The input dataset with a time dimension.
|
|
2829
|
-
time_dim : str
|
|
2830
|
-
Name of the time dimension.
|
|
2831
|
-
ini_time : datetime
|
|
2832
|
-
The desired initial time.
|
|
2833
|
-
allow_flex_time : bool
|
|
2834
|
-
- If True: allow a +24h window and pick the closest available timestamp.
|
|
2835
|
-
- If False (default): require an exact match, otherwise raise ValueError.
|
|
2836
|
-
|
|
2837
|
-
Returns
|
|
2838
|
-
-------
|
|
2839
|
-
xr.Dataset
|
|
2840
|
-
Dataset reduced to exactly one timestamp.
|
|
2841
|
-
|
|
2842
|
-
Raises
|
|
2843
|
-
------
|
|
2844
|
-
ValueError
|
|
2845
|
-
If no matching time is found (when `allow_flex_time=False`), or no entries are
|
|
2846
|
-
available within the +24h window (when `allow_flex_time=True`).
|
|
2847
|
-
"""
|
|
2848
|
-
if climatology:
|
|
2849
|
-
# Convert from timedelta64[ns] to fractional days
|
|
2850
|
-
ds["time"] = ds["time"] / np.timedelta64(1, "D")
|
|
2851
|
-
# Interpolate from climatology for initial conditions
|
|
2852
|
-
return interpolate_from_climatology(ds, time_dim, ini_time)
|
|
2853
|
-
|
|
2854
|
-
if allow_flex_time:
|
|
2855
|
-
# Look in time range [ini_time, ini_time + 24h)
|
|
2856
|
-
end_time = ini_time + timedelta(days=1)
|
|
2857
|
-
times = (np.datetime64(ini_time) <= ds[time_dim]) & (
|
|
2858
|
-
ds[time_dim] < np.datetime64(end_time)
|
|
2859
|
-
)
|
|
2860
|
-
|
|
2861
|
-
if np.all(~times):
|
|
2862
|
-
raise ValueError(
|
|
2863
|
-
f"No time entries found between {ini_time} and {end_time}."
|
|
2864
|
-
)
|
|
2865
|
-
|
|
2866
|
-
ds = ds.where(times, drop=True)
|
|
2867
|
-
if ds.sizes[time_dim] > 1:
|
|
2868
|
-
# Pick the time closest to start_time
|
|
2869
|
-
ds = ds.isel({time_dim: 0})
|
|
2870
|
-
|
|
2871
|
-
logging.warning(
|
|
2872
|
-
f"Selected time entry closest to the specified start_time in +24 hour range: {ds[time_dim].values}"
|
|
2873
|
-
)
|
|
2874
|
-
|
|
2875
|
-
else:
|
|
2876
|
-
# Strict match required
|
|
2877
|
-
if not (ds[time_dim].values == np.datetime64(ini_time)).any():
|
|
2878
|
-
raise ValueError(
|
|
2879
|
-
f"No exact match found for initial time {ini_time}. Consider setting allow_flex_time to True."
|
|
2880
|
-
)
|
|
2881
|
-
|
|
2882
|
-
ds = ds.sel({time_dim: np.datetime64(ini_time)})
|
|
2883
|
-
|
|
2884
|
-
if time_dim not in ds.dims:
|
|
2885
|
-
ds = ds.expand_dims(time_dim)
|
|
2886
|
-
|
|
2887
|
-
return ds
|
|
2888
|
-
|
|
2889
|
-
|
|
2890
|
-
def decode_string(byte_array):
|
|
2891
|
-
# Decode each byte and handle errors with 'ignore'
|
|
2892
|
-
decoded_string = "".join(
|
|
2893
|
-
[
|
|
2894
|
-
x.decode("utf-8", errors="ignore") # Ignore invalid byte sequences
|
|
2895
|
-
for x in byte_array.values
|
|
2896
|
-
if isinstance(x, bytes) and x != b" " and x is not np.nan
|
|
2897
|
-
]
|
|
2898
|
-
)
|
|
2899
|
-
|
|
2900
|
-
return decoded_string
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
2198
|
def modified_julian_days(year, month, day, hour=0):
|
|
2904
2199
|
"""Calculate the Modified Julian Day (MJD) for a given date and time.
|
|
2905
2200
|
|
|
@@ -2957,82 +2252,6 @@ def modified_julian_days(year, month, day, hour=0):
|
|
|
2957
2252
|
return mjd
|
|
2958
2253
|
|
|
2959
2254
|
|
|
2960
|
-
def get_indices_of_nearest_grid_cell_for_rivers(
|
|
2961
|
-
dist: xr.DataArray, data: RiverDataset
|
|
2962
|
-
) -> dict[str, list[tuple[int, int]]]:
|
|
2963
|
-
"""Get the indices of the nearest grid cell for each river based on distance.
|
|
2964
|
-
|
|
2965
|
-
Parameters
|
|
2966
|
-
----------
|
|
2967
|
-
dist : xr.DataArray
|
|
2968
|
-
A 2D or 3D array representing distances from each river to coastal grid cells,
|
|
2969
|
-
with dimensions including "eta_rho" and "xi_rho".
|
|
2970
|
-
data : RiverDataset
|
|
2971
|
-
An instance of RiverDataset containing river names and dimension metadata.
|
|
2972
|
-
|
|
2973
|
-
Returns
|
|
2974
|
-
-------
|
|
2975
|
-
dict[str, list[tuple[int, int]]]
|
|
2976
|
-
Dictionary mapping each river name to a list containing the (eta_rho, xi_rho) index
|
|
2977
|
-
of the closest coastal grid cell.
|
|
2978
|
-
"""
|
|
2979
|
-
# Find indices of the nearest coastal grid cell for each river
|
|
2980
|
-
indices = dist.argmin(dim=["eta_rho", "xi_rho"])
|
|
2981
|
-
|
|
2982
|
-
eta_rho_values = indices["eta_rho"].values
|
|
2983
|
-
xi_rho_values = indices["xi_rho"].values
|
|
2984
|
-
|
|
2985
|
-
# Get the corresponding station indices and river names
|
|
2986
|
-
stations = indices["eta_rho"][data.dim_names["station"]].values
|
|
2987
|
-
names = (
|
|
2988
|
-
data.ds[data.var_names["name"]]
|
|
2989
|
-
.sel({data.dim_names["station"]: stations})
|
|
2990
|
-
.values
|
|
2991
|
-
)
|
|
2992
|
-
|
|
2993
|
-
# Build dictionary of river name to grid index
|
|
2994
|
-
river_indices = {
|
|
2995
|
-
str(names[i]): [(int(eta_rho_values[i]), int(xi_rho_values[i]))]
|
|
2996
|
-
for i in range(len(stations))
|
|
2997
|
-
}
|
|
2998
|
-
|
|
2999
|
-
return river_indices
|
|
3000
|
-
|
|
3001
|
-
|
|
3002
|
-
def _deduplicate_river_names(
|
|
3003
|
-
ds: xr.Dataset, name_var: str, station_dim: str
|
|
3004
|
-
) -> xr.Dataset:
|
|
3005
|
-
"""Ensure river names are unique by appending _1, _2 to duplicates, excluding non-
|
|
3006
|
-
duplicates.
|
|
3007
|
-
"""
|
|
3008
|
-
original = ds[name_var]
|
|
3009
|
-
|
|
3010
|
-
# Force cast to plain Python strings
|
|
3011
|
-
names = [str(name) for name in original.values]
|
|
3012
|
-
|
|
3013
|
-
# Count all names
|
|
3014
|
-
name_counts = Counter(names)
|
|
3015
|
-
seen: defaultdict[str, int] = defaultdict(int)
|
|
3016
|
-
|
|
3017
|
-
unique_names = []
|
|
3018
|
-
for name in names:
|
|
3019
|
-
if name_counts[name] > 1:
|
|
3020
|
-
seen[name] += 1
|
|
3021
|
-
unique_names.append(f"{name}_{seen[name]}")
|
|
3022
|
-
else:
|
|
3023
|
-
unique_names.append(name)
|
|
3024
|
-
|
|
3025
|
-
# Replace with updated names while preserving dtype, dims, attrs
|
|
3026
|
-
updated_array = xr.DataArray(
|
|
3027
|
-
data=np.array(unique_names, dtype=f"<U{max(len(n) for n in unique_names)}"),
|
|
3028
|
-
dims=original.dims,
|
|
3029
|
-
attrs=original.attrs,
|
|
3030
|
-
)
|
|
3031
|
-
ds[name_var] = updated_array
|
|
3032
|
-
|
|
3033
|
-
return ds
|
|
3034
|
-
|
|
3035
|
-
|
|
3036
2255
|
def _concatenate_longitudes(
|
|
3037
2256
|
ds: xr.Dataset,
|
|
3038
2257
|
dim_names: Mapping[str, str],
|