roms-tools 1.7.0__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. roms_tools/_version.py +1 -1
  2. roms_tools/setup/boundary_forcing.py +253 -144
  3. roms_tools/setup/datasets.py +216 -48
  4. roms_tools/setup/download.py +13 -17
  5. roms_tools/setup/grid.py +561 -512
  6. roms_tools/setup/initial_conditions.py +148 -30
  7. roms_tools/setup/mask.py +69 -0
  8. roms_tools/setup/plot.py +4 -8
  9. roms_tools/setup/regrid.py +4 -2
  10. roms_tools/setup/surface_forcing.py +11 -18
  11. roms_tools/setup/tides.py +9 -12
  12. roms_tools/setup/topography.py +92 -128
  13. roms_tools/setup/utils.py +49 -25
  14. roms_tools/setup/vertical_coordinate.py +5 -16
  15. roms_tools/tests/test_setup/test_boundary_forcing.py +10 -5
  16. roms_tools/tests/test_setup/test_data/grid.zarr/.zattrs +0 -1
  17. roms_tools/tests/test_setup/test_data/grid.zarr/.zmetadata +56 -201
  18. roms_tools/tests/test_setup/test_data/grid.zarr/Cs_r/.zattrs +1 -1
  19. roms_tools/tests/test_setup/test_data/grid.zarr/Cs_w/.zattrs +1 -1
  20. roms_tools/tests/test_setup/test_data/grid.zarr/{interface_depth_rho → sigma_r}/.zarray +2 -6
  21. roms_tools/tests/test_setup/test_data/grid.zarr/sigma_r/.zattrs +7 -0
  22. roms_tools/tests/test_setup/test_data/grid.zarr/sigma_r/0 +0 -0
  23. roms_tools/tests/test_setup/test_data/grid.zarr/{interface_depth_u → sigma_w}/.zarray +2 -6
  24. roms_tools/tests/test_setup/test_data/grid.zarr/sigma_w/.zattrs +7 -0
  25. roms_tools/tests/test_setup/test_data/grid.zarr/sigma_w/0 +0 -0
  26. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zattrs +1 -2
  27. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zmetadata +58 -203
  28. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/Cs_r/.zattrs +1 -1
  29. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/Cs_w/.zattrs +1 -1
  30. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/h/.zattrs +1 -1
  31. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/h/0.0 +0 -0
  32. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_coarse/0.0 +0 -0
  33. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_rho/0.0 +0 -0
  34. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_u/0.0 +0 -0
  35. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/mask_v/0.0 +0 -0
  36. roms_tools/tests/test_setup/test_data/{grid.zarr/interface_depth_v → grid_that_straddles_dateline.zarr/sigma_r}/.zarray +2 -6
  37. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/sigma_r/.zattrs +7 -0
  38. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/sigma_r/0 +0 -0
  39. roms_tools/tests/test_setup/test_data/{grid.zarr/layer_depth_rho → grid_that_straddles_dateline.zarr/sigma_w}/.zarray +2 -6
  40. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/sigma_w/.zattrs +7 -0
  41. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/sigma_w/0 +0 -0
  42. roms_tools/tests/test_setup/test_grid.py +110 -12
  43. roms_tools/tests/test_setup/test_initial_conditions.py +2 -1
  44. roms_tools/tests/test_setup/test_river_forcing.py +3 -2
  45. roms_tools/tests/test_setup/test_surface_forcing.py +2 -22
  46. roms_tools/tests/test_setup/test_tides.py +2 -1
  47. roms_tools/tests/test_setup/test_topography.py +106 -1
  48. {roms_tools-1.7.0.dist-info → roms_tools-2.0.0.dist-info}/LICENSE +1 -1
  49. {roms_tools-1.7.0.dist-info → roms_tools-2.0.0.dist-info}/METADATA +2 -1
  50. {roms_tools-1.7.0.dist-info → roms_tools-2.0.0.dist-info}/RECORD +52 -76
  51. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_rho/.zattrs +0 -9
  52. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_rho/0.0.0 +0 -0
  53. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_u/.zattrs +0 -9
  54. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_u/0.0.0 +0 -0
  55. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_v/.zattrs +0 -9
  56. roms_tools/tests/test_setup/test_data/grid.zarr/interface_depth_v/0.0.0 +0 -0
  57. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_rho/.zattrs +0 -9
  58. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_rho/0.0.0 +0 -0
  59. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_u/.zarray +0 -24
  60. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_u/.zattrs +0 -9
  61. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_u/0.0.0 +0 -0
  62. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_v/.zarray +0 -24
  63. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_v/.zattrs +0 -9
  64. roms_tools/tests/test_setup/test_data/grid.zarr/layer_depth_v/0.0.0 +0 -0
  65. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_rho/.zarray +0 -24
  66. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_rho/.zattrs +0 -9
  67. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_rho/0.0.0 +0 -0
  68. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_u/.zarray +0 -24
  69. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_u/.zattrs +0 -9
  70. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_u/0.0.0 +0 -0
  71. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_v/.zarray +0 -24
  72. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_v/.zattrs +0 -9
  73. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/interface_depth_v/0.0.0 +0 -0
  74. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_rho/.zarray +0 -24
  75. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_rho/.zattrs +0 -9
  76. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_rho/0.0.0 +0 -0
  77. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_u/.zarray +0 -24
  78. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_u/.zattrs +0 -9
  79. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_u/0.0.0 +0 -0
  80. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_v/.zarray +0 -24
  81. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_v/.zattrs +0 -9
  82. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/layer_depth_v/0.0.0 +0 -0
  83. roms_tools/tests/test_setup/test_vertical_coordinate.py +0 -91
  84. {roms_tools-1.7.0.dist-info → roms_tools-2.0.0.dist-info}/WHEEL +0 -0
  85. {roms_tools-1.7.0.dist-info → roms_tools-2.0.0.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
1
+ import time
1
2
  import re
2
3
  import xarray as xr
3
4
  from dataclasses import dataclass, field
@@ -15,7 +16,11 @@ from roms_tools.setup.utils import (
15
16
  one_dim_fill,
16
17
  gc_dist,
17
18
  )
18
- from roms_tools.setup.download import download_correction_data, download_river_data
19
+ from roms_tools.setup.download import (
20
+ download_correction_data,
21
+ download_topo,
22
+ download_river_data,
23
+ )
19
24
  from roms_tools.setup.fill import LateralFill
20
25
 
21
26
  # lat-lon datasets
@@ -74,7 +79,7 @@ class Dataset:
74
79
  )
75
80
  var_names: Dict[str, str]
76
81
  climatology: Optional[bool] = False
77
- use_dask: Optional[bool] = True
82
+ use_dask: Optional[bool] = False
78
83
  apply_post_processing: Optional[bool] = True
79
84
 
80
85
  is_global: bool = field(init=False, repr=False)
@@ -117,6 +122,8 @@ class Dataset:
117
122
 
118
123
  # Make sure that latitude is ascending
119
124
  ds = self.ensure_dimension_is_ascending(ds, dim="latitude")
125
+ # Make sure there are no 360 degree jumps in longitude
126
+ ds = self.ensure_dimension_is_ascending(ds, dim="longitude")
120
127
 
121
128
  if "depth" in self.dim_names:
122
129
  # Make sure that depth is ascending
@@ -126,11 +133,6 @@ class Dataset:
126
133
 
127
134
  # Check whether the data covers the entire globe
128
135
  object.__setattr__(self, "is_global", self.check_if_global(ds))
129
-
130
- # If dataset is global concatenate three copies of field along longitude dimension
131
- if self.is_global:
132
- ds = self.concatenate_longitudes(ds)
133
-
134
136
  object.__setattr__(self, "ds", ds)
135
137
 
136
138
  if self.apply_post_processing:
@@ -289,7 +291,11 @@ class Dataset:
289
291
  ) -> xr.Dataset:
290
292
  """Ensure that the specified dimension in the dataset is in ascending order.
291
293
 
292
- If the values along the specified dimension are in descending order, this function reverses the order of the dimension to make it ascending.
294
+ This function checks the order of values along the specified dimension. If they
295
+ are in descending order, it reverses the dimension to make it ascending. For
296
+ the "longitude" dimension, if it has a discontinuity (e.g., [0, 180][-180, 0]),
297
+ the function adjusts values to eliminate the 360-degree jump, transforming
298
+ the range into a continuous [0, 360) span.
293
299
 
294
300
  Parameters
295
301
  ----------
@@ -303,14 +309,23 @@ class Dataset:
303
309
  -------
304
310
  xr.Dataset
305
311
  A new `xarray.Dataset` with the specified dimension in ascending order.
306
- If the dimension was already in ascending order, the original dataset is returned unchanged.
307
- If the dimension was in descending order, the dataset is returned with the dimension reversed.
312
+ - If the dimension was already in ascending order, the original dataset is returned unchanged.
313
+ - If the dimension was in descending order, the dataset is returned with the dimension reversed.
314
+ - If the dimension is "longitude" with a discontinuity (e.g., [0, 180][-180, 0]), the values are adjusted to eliminate the 360-degree jump.
308
315
  """
309
- # Make sure that latitude is ascending
316
+ # Check if the dimension is in descending order and reverse if needed
310
317
  diff = np.diff(ds[self.dim_names[dim]])
311
318
  if np.all(diff < 0):
312
319
  ds = ds.isel(**{self.dim_names[dim]: slice(None, None, -1)})
313
320
 
321
+ # Check for a discontinuity in longitude and adjust values if present
322
+ elif np.any(diff < 0) and dim == "longitude":
323
+ ds[self.dim_names[dim]] = xr.where(
324
+ ds[self.dim_names[dim]] < 0,
325
+ ds[self.dim_names[dim]] + 360,
326
+ ds[self.dim_names[dim]],
327
+ )
328
+
314
329
  return ds
315
330
 
316
331
  def infer_horizontal_resolution(self, ds: xr.Dataset):
@@ -364,43 +379,68 @@ class Dataset:
364
379
 
365
380
  return is_global
366
381
 
367
- def concatenate_longitudes(self, ds):
368
- """
369
- Concatenates the field three times: with longitudes shifted by -360, original longitudes, and shifted by +360.
382
+ def concatenate_longitudes(self, ds, end="upper", verbose=False):
383
+ """Concatenates fields in dataset twice along the longitude dimension.
370
384
 
371
385
  Parameters
372
386
  ----------
373
- field : xr.DataArray
374
- The field to be concatenated.
387
+ ds: xr.Dataset
388
+ The dataset to be concatenated. The longitude dimension must be present in this dataset.
389
+ end : str, optional
390
+ Specifies which end to shift the longitudes.
391
+ Options are:
392
+ - "lower": shifts longitudes by -360 degrees and concatenates to the lower end.
393
+ - "upper": shifts longitudes by +360 degrees and concatenates to the upper end.
394
+ - "both": shifts longitudes by -360 degrees and 360 degrees and concatenates to both ends.
395
+ Default is "upper".
396
+ verbose : bool, optional
397
+ If True, print message if dataset is concatenated along longitude dimension.
398
+ Defaults to False.
375
399
 
376
400
  Returns
377
401
  -------
378
- xr.DataArray
379
- The concatenated field, with the longitude dimension extended.
402
+ ds_concatenated : xr.Dataset
403
+ The concatenated dataset.
404
+ """
380
405
 
381
- Notes
382
- -----
383
- Concatenating three times may be overkill in most situations, but it is safe. Alternatively, we could refactor
384
- to figure out whether concatenating on the lower end, upper end, or at all is needed.
406
+ if verbose:
407
+ start_time = time.time()
385
408
 
386
- """
387
409
  ds_concatenated = xr.Dataset()
388
410
 
389
411
  lon = ds[self.dim_names["longitude"]]
390
- lon_minus360 = lon - 360
391
- lon_plus360 = lon + 360
392
- lon_concatenated = xr.concat(
393
- [lon_minus360, lon, lon_plus360], dim=self.dim_names["longitude"]
394
- )
412
+ if end == "lower":
413
+ lon_minus360 = lon - 360
414
+ lon_concatenated = xr.concat(
415
+ [lon_minus360, lon], dim=self.dim_names["longitude"]
416
+ )
395
417
 
396
- ds_concatenated[self.dim_names["longitude"]] = lon_concatenated
418
+ elif end == "upper":
419
+ lon_plus360 = lon + 360
420
+ lon_concatenated = xr.concat(
421
+ [lon, lon_plus360], dim=self.dim_names["longitude"]
422
+ )
397
423
 
398
- for var in self.var_names.values():
424
+ elif end == "both":
425
+ lon_minus360 = lon - 360
426
+ lon_plus360 = lon + 360
427
+ lon_concatenated = xr.concat(
428
+ [lon_minus360, lon, lon_plus360], dim=self.dim_names["longitude"]
429
+ )
430
+
431
+ for var in ds.data_vars:
399
432
  if self.dim_names["longitude"] in ds[var].dims:
400
433
  field = ds[var]
401
- field_concatenated = xr.concat(
402
- [field, field, field], dim=self.dim_names["longitude"]
403
- )
434
+
435
+ if end == "both":
436
+ field_concatenated = xr.concat(
437
+ [field, field, field], dim=self.dim_names["longitude"]
438
+ )
439
+ else:
440
+ field_concatenated = xr.concat(
441
+ [field, field], dim=self.dim_names["longitude"]
442
+ )
443
+
404
444
  if self.use_dask:
405
445
  field_concatenated = field_concatenated.chunk(
406
446
  {self.dim_names["longitude"]: -1}
@@ -410,6 +450,13 @@ class Dataset:
410
450
  else:
411
451
  ds_concatenated[var] = ds[var]
412
452
 
453
+ ds_concatenated[self.dim_names["longitude"]] = lon_concatenated
454
+
455
+ if verbose:
456
+ logging.info(
457
+ f"Concatenating the data along the longitude dimension: {time.time() - start_time:.3f} seconds"
458
+ )
459
+
413
460
  return ds_concatenated
414
461
 
415
462
  def post_process(self):
@@ -423,7 +470,9 @@ class Dataset:
423
470
  """
424
471
  pass
425
472
 
426
- def choose_subdomain(self, target_coords, buffer_points=20, return_copy=False):
473
+ def choose_subdomain(
474
+ self, target_coords, buffer_points=20, return_copy=False, verbose=False
475
+ ):
427
476
  """Selects a subdomain from the xarray Dataset based on specified target
428
477
  coordinates, extending the selection by a defined buffer. Adjusts longitude
429
478
  ranges as necessary to accommodate the dataset's expected range and handles
@@ -440,6 +489,9 @@ class Dataset:
440
489
  return_subdomain : bool, optional
441
490
  If True, returns the subset of the original dataset representing the chosen
442
491
  subdomain. If False, assigns the subset to `self.ds`. Defaults to False.
492
+ verbose : bool, optional
493
+ If True, print message if dataset is concatenated along longitude dimension.
494
+ Defaults to False.
443
495
 
444
496
  Returns
445
497
  -------
@@ -462,9 +514,43 @@ class Dataset:
462
514
 
463
515
  margin = self.resolution * buffer_points
464
516
 
465
- if not self.is_global:
517
+ # Select the subdomain in latitude direction (so that we have to concatenate fewer latitudes below if concatenation is necessary)
518
+ subdomain = self.ds.sel(
519
+ **{
520
+ self.dim_names["latitude"]: slice(lat_min - margin, lat_max + margin),
521
+ }
522
+ )
523
+ lon = subdomain[self.dim_names["longitude"]]
524
+
525
+ if self.is_global:
526
+ # Concatenate only if necessary
527
+ if lon_max + margin > lon.max():
528
+ # See if shifting by +360 degrees helps
529
+ if (lon_min - margin > (lon + 360).min()) and (
530
+ lon_max + margin < (lon + 360).max()
531
+ ):
532
+ subdomain[self.dim_names["longitude"]] = lon + 360
533
+ lon = subdomain[self.dim_names["longitude"]]
534
+ else:
535
+ subdomain = self.concatenate_longitudes(
536
+ subdomain, end="upper", verbose=verbose
537
+ )
538
+ lon = subdomain[self.dim_names["longitude"]]
539
+ if lon_min - margin < lon.min():
540
+ # See if shifting by -360 degrees helps
541
+ if (lon_min - margin > (lon - 360).min()) and (
542
+ lon_max + margin < (lon - 360).max()
543
+ ):
544
+ subdomain[self.dim_names["longitude"]] = lon - 360
545
+ lon = subdomain[self.dim_names["longitude"]]
546
+ else:
547
+ subdomain = self.concatenate_longitudes(
548
+ subdomain, end="lower", verbose=verbose
549
+ )
550
+ lon = subdomain[self.dim_names["longitude"]]
551
+
552
+ else:
466
553
  # Adjust longitude range if needed to match the expected range
467
- lon = self.ds[self.dim_names["longitude"]]
468
554
  if not target_coords["straddle"]:
469
555
  if lon.min() < -180:
470
556
  if lon_max + margin > 0:
@@ -484,12 +570,9 @@ class Dataset:
484
570
  if lon_min - margin < 0:
485
571
  lon_min += 360
486
572
  lon_max += 360
487
-
488
- # Select the subdomain
489
-
490
- subdomain = self.ds.sel(
573
+ # Select the subdomain in longitude direction
574
+ subdomain = subdomain.sel(
491
575
  **{
492
- self.dim_names["latitude"]: slice(lat_min - margin, lat_max + margin),
493
576
  self.dim_names["longitude"]: slice(lon_min - margin, lon_max + margin),
494
577
  }
495
578
  )
@@ -1346,6 +1429,98 @@ class ERA5Correction(Dataset):
1346
1429
  object.__setattr__(self, "ds", subdomain)
1347
1430
 
1348
1431
 
1432
+ @dataclass(frozen=True, kw_only=True)
1433
+ class ETOPO5Dataset(Dataset):
1434
+ """Represents topography data on the original grid from the ETOPO5 dataset.
1435
+
1436
+ Parameters
1437
+ ----------
1438
+ filename : str, optional
1439
+ The path to the ETOPO5 dataset file. If not provided, the dataset will be downloaded
1440
+ automatically via the `pooch` library.
1441
+ var_names : Dict[str, str], optional
1442
+ Dictionary of variable names required in the dataset. Defaults to:
1443
+ {
1444
+ "topo": "topo",
1445
+ }
1446
+ dim_names : Dict[str, str], optional
1447
+ Dictionary specifying the names of dimensions in the dataset. Defaults to:
1448
+ {"longitude": "lon", "latitude": "lat"}.
1449
+
1450
+ Attributes
1451
+ ----------
1452
+ ds : xr.Dataset
1453
+ The xarray Dataset containing the ETOPO5 data, loaded from the specified file.
1454
+ """
1455
+
1456
+ filename: str = field(default_factory=lambda: download_topo("etopo5.nc"))
1457
+ var_names: Dict[str, str] = field(
1458
+ default_factory=lambda: {
1459
+ "topo": "topo",
1460
+ }
1461
+ )
1462
+ dim_names: Dict[str, str] = field(
1463
+ default_factory=lambda: {"longitude": "lon", "latitude": "lat"}
1464
+ )
1465
+ ds: xr.Dataset = field(init=False, repr=False)
1466
+
1467
+ def clean_up(self, ds: xr.Dataset) -> xr.Dataset:
1468
+ """Assign lat and lon as coordinates.
1469
+
1470
+ Parameters
1471
+ ----------
1472
+ ds : xr.Dataset
1473
+ The input dataset.
1474
+
1475
+ Returns
1476
+ -------
1477
+ ds : xr.Dataset
1478
+ A cleaned `xarray.Dataset` with updated coordinates.
1479
+ """
1480
+ ds = ds.assign_coords(
1481
+ {
1482
+ "lon": ds["topo_lon"],
1483
+ "lat": ds["topo_lat"],
1484
+ }
1485
+ )
1486
+ return ds
1487
+
1488
+
1489
+ @dataclass(frozen=True, kw_only=True)
1490
+ class SRTM15Dataset(Dataset):
1491
+ """Represents topography data on the original grid from the SRTM15 dataset.
1492
+
1493
+ Parameters
1494
+ ----------
1495
+ filename : str
1496
+ The path to the SRTM15 dataset file.
1497
+ var_names : Dict[str, str], optional
1498
+ Dictionary of variable names required in the dataset. Defaults to:
1499
+ {
1500
+ "topo": "z",
1501
+ }
1502
+ dim_names : Dict[str, str], optional
1503
+ Dictionary specifying the names of dimensions in the dataset. Defaults to:
1504
+ {"longitude": "lon", "latitude": "lat"}.
1505
+
1506
+ Attributes
1507
+ ----------
1508
+ ds : xr.Dataset
1509
+ The xarray Dataset containing the SRTM15 data, loaded from the specified file.
1510
+ """
1511
+
1512
+ filename: str
1513
+ var_names: Dict[str, str] = field(
1514
+ default_factory=lambda: {
1515
+ "topo": "z",
1516
+ }
1517
+ )
1518
+ dim_names: Dict[str, str] = field(
1519
+ default_factory=lambda: {"longitude": "lon", "latitude": "lat"}
1520
+ )
1521
+ ds: xr.Dataset = field(init=False, repr=False)
1522
+
1523
+
1349
1524
  # river datasets
1350
1525
  @dataclass(frozen=True, kw_only=True)
1351
1526
  class RiverDataset:
@@ -1414,13 +1589,6 @@ class RiverDataset:
1414
1589
  -------
1415
1590
  ds : xr.Dataset
1416
1591
  The loaded xarray Dataset containing the forcing data.
1417
-
1418
- Raises
1419
- ------
1420
- FileNotFoundError
1421
- If the specified file does not exist.
1422
- ValueError
1423
- If a list of files is provided but self.dim_names["time"] is not available or use_dask=False.
1424
1592
  """
1425
1593
  ds = _load_data(
1426
1594
  self.filename, self.dim_names, use_dask=False, decode_times=False
@@ -1,5 +1,4 @@
1
1
  import pooch
2
- import xarray as xr
3
2
 
4
3
  # Create a Pooch object to manage the global topography data
5
4
  topo_data = pooch.create(
@@ -19,7 +18,6 @@ correction_data = pooch.create(
19
18
  base_url="https://github.com/CWorthy-ocean/roms-tools-data/raw/main/",
20
19
  # The registry specifies the files that can be fetched
21
20
  registry={
22
- "etopo5.nc": "sha256:23600e422d59bbf7c3666090166a0d468c8ee16092f4f14e32c4e928fbcd627b",
23
21
  "SSR_correction.nc": "sha256:a170c1698e6cc2765b3f0bb51a18c6a979bc796ac3a4c014585aeede1f1f8ea0",
24
22
  },
25
23
  )
@@ -50,6 +48,7 @@ pup_test_data = pooch.create(
50
48
  "ERA5_global_test_data.nc": "8ed177ab64c02caf509b9fb121cf6713f286cc603b1f302f15f3f4eb0c21dc4f",
51
49
  "TPXO_global_test_data.nc": "457bfe87a7b247ec6e04e3c7d3e741ccf223020c41593f8ae33a14f2b5255e60",
52
50
  "TPXO_regional_test_data.nc": "11739245e2286d9c9d342dce5221e6435d2072b50028bef2e86a30287b3b4032",
51
+ "CESM_BGC_coarse_global_clim.nc": "20806e4e99285d6de168d3236e2d9245f4e9106474b1464beaa266a73e6ef79f",
53
52
  "CESM_BGC_2012.nc": "e374d5df3c1be742d564fd26fd861c2d40af73be50a432c51d258171d5638eb6",
54
53
  "CESM_regional_test_data_one_time_slice.nc": "43b578ecc067c85f95d6b97ed7b9dc8da7846f07c95331c6ba7f4a3161036a17",
55
54
  "CESM_regional_test_data_climatology.nc": "986a200029d9478fd43e6e4a8bc43e8a8f4407554893c59b5fcc2e86fd203272",
@@ -58,36 +57,33 @@ pup_test_data = pooch.create(
58
57
  "CESM_surface_global_test_data_climatology.nc": "a072757110c6f7b716a98f867688ef4195a5966741d2f368201ac24617254e35",
59
58
  "CESM_surface_global_test_data.nc": "874106ffbc8b1b220db09df1551bbb89d22439d795b4d1e5a24ee775e9a7bf6e",
60
59
  "grid_created_with_matlab.nc": "fd537ef8159fabb18e38495ec8d44e2fa1b7fb615fcb1417dd4c0e1bb5f4e41d",
60
+ "etopo5_coarsened_and_shifted.nc": "9a5cb4b38c779d22ddb0ad069b298b9722db34ca85a89273eccca691e89e6f96",
61
+ "srtm15_coarsened.nc": "48bc8f4beecfdca9c192b13f4cbeef1455f49d8261a82563aaec5757e100dff9",
61
62
  },
62
63
  )
63
64
 
64
65
 
65
- def fetch_topo(topography_source: str) -> xr.Dataset:
66
- """Load the global topography data as an xarray Dataset.
66
+ def download_topo(filename: str) -> str:
67
+ """Download simple topography file.
67
68
 
68
69
  Parameters
69
70
  ----------
70
- topography_source : str
71
- The source of the topography data to be loaded. Available options:
72
- - "ETOPO5"
71
+ filename : str
72
+ The name of the test data file to be downloaded. Available options:
73
+ - "etopo5.nc"
73
74
 
74
75
  Returns
75
76
  -------
76
- xr.Dataset
77
- The global topography data as an xarray Dataset.
77
+ str
78
+ The path to the downloaded test data file.
78
79
  """
79
- # Mapping from user-specified topography options to corresponding filenames in the registry
80
- topo_dict = {"ETOPO5": "etopo5.nc"}
81
-
82
80
  # Fetch the file using Pooch, downloading if necessary
83
- fname = topo_data.fetch(topo_dict[topography_source])
81
+ fname = topo_data.fetch(filename)
84
82
 
85
- # Load the dataset using xarray and return it
86
- ds = xr.open_dataset(fname)
87
- return ds
83
+ return fname
88
84
 
89
85
 
90
- def download_river_data(filename: str) -> xr.Dataset:
86
+ def download_river_data(filename: str) -> str:
91
87
  """Download river data file.
92
88
 
93
89
  Parameters