roms-tools 1.4.1__py3-none-any.whl → 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. roms_tools/_version.py +1 -1
  2. roms_tools/setup/boundary_forcing.py +77 -70
  3. roms_tools/setup/datasets.py +38 -59
  4. roms_tools/setup/download.py +3 -6
  5. roms_tools/setup/fill.py +8 -16
  6. roms_tools/setup/grid.py +74 -113
  7. roms_tools/setup/initial_conditions.py +43 -36
  8. roms_tools/setup/mixins.py +10 -14
  9. roms_tools/setup/surface_forcing.py +35 -33
  10. roms_tools/setup/tides.py +37 -41
  11. roms_tools/setup/topography.py +9 -17
  12. roms_tools/setup/utils.py +19 -40
  13. roms_tools/setup/vertical_coordinate.py +4 -6
  14. roms_tools/tests/test_setup/test_boundary_forcing.py +6 -13
  15. roms_tools/tests/test_setup/test_data/grid.zarr/.zattrs +2 -2
  16. roms_tools/tests/test_setup/test_data/grid.zarr/.zmetadata +2 -62
  17. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zattrs +2 -2
  18. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zmetadata +2 -62
  19. roms_tools/tests/test_setup/test_datasets.py +8 -18
  20. roms_tools/tests/test_setup/test_grid.py +9 -9
  21. roms_tools/tests/test_setup/test_initial_conditions.py +4 -7
  22. roms_tools/tests/test_setup/test_surface_forcing.py +56 -56
  23. roms_tools/tests/test_setup/test_tides.py +6 -10
  24. roms_tools/tests/test_setup/test_topography.py +2 -4
  25. roms_tools/tests/test_setup/test_vertical_coordinate.py +2 -6
  26. roms_tools/tests/test_utils.py +30 -30
  27. roms_tools/utils.py +6 -7
  28. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/METADATA +1 -1
  29. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/RECORD +32 -44
  30. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/.zarray +0 -22
  31. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/.zattrs +0 -8
  32. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/0.0 +0 -0
  33. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/.zarray +0 -22
  34. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/.zattrs +0 -8
  35. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/0.0 +0 -0
  36. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/.zarray +0 -22
  37. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/.zattrs +0 -8
  38. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/0.0 +0 -0
  39. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/.zarray +0 -22
  40. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/.zattrs +0 -8
  41. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/0.0 +0 -0
  42. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/LICENSE +0 -0
  43. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/WHEEL +0 -0
  44. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/top_level.txt +0 -0
roms_tools/_version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # Do not change! Do not track in version control!
2
- __version__ = "1.4.1"
2
+ __version__ = "1.4.2"
@@ -24,8 +24,7 @@ from pathlib import Path
24
24
 
25
25
  @dataclass(frozen=True, kw_only=True)
26
26
  class BoundaryForcing(ROMSToolsMixins):
27
- """
28
- Represents boundary forcing input data for ROMS.
27
+ """Represents boundary forcing input data for ROMS.
29
28
 
30
29
  Parameters
31
30
  ----------
@@ -38,21 +37,27 @@ class BoundaryForcing(ROMSToolsMixins):
38
37
  boundaries : Dict[str, bool], optional
39
38
  Dictionary specifying which boundaries are forced (south, east, north, west). Default is all True.
40
39
  source : Dict[str, Union[str, Path, List[Union[str, Path]]], bool]
41
- Dictionary specifying the source of the boundary forcing data:
42
- - "name" (str): Name of the data source (e.g., "GLORYS").
43
- - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). Can be a single string (with or without wildcards),
44
- a single Path object, or a list of strings or Path objects containing multiple files.
45
- - "climatology" (bool): Indicates if the data is climatology data. Defaults to False.
40
+ Dictionary specifying the source of the boundary forcing data. Keys include:
41
+
42
+ - "name" (str): Name of the data source (e.g., "GLORYS").
43
+ - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). This can be:
44
+
45
+ - A single string (with or without wildcards).
46
+ - A single Path object.
47
+ - A list of strings or Path objects containing multiple files.
48
+ - "climatology" (bool): Indicates if the data is climatology data. Defaults to False.
49
+
50
+ type : str
51
+ Specifies the type of forcing data. Options are:
52
+
53
+ - "physics": for physical atmospheric forcing.
54
+ - "bgc": for biogeochemical forcing.
55
+
46
56
  model_reference_date : datetime, optional
47
57
  Reference date for the model. Default is January 1, 2000.
48
58
  use_dask: bool, optional
49
59
  Indicates whether to use dask for processing. If True, data is processed with dask; if False, data is processed eagerly. Defaults to False.
50
60
 
51
- Attributes
52
- ----------
53
- ds : xr.Dataset
54
- Xarray Dataset containing the boundary forcing data.
55
-
56
61
  Examples
57
62
  --------
58
63
  >>> boundary_forcing = BoundaryForcing(
@@ -86,7 +91,7 @@ class BoundaryForcing(ROMSToolsMixins):
86
91
  def __post_init__(self):
87
92
 
88
93
  self._input_checks()
89
- lon, lat, angle, straddle = super().get_target_lon_lat()
94
+ lon, lat, angle, straddle = super()._get_target_lon_lat()
90
95
 
91
96
  data = self._get_data()
92
97
  data.choose_subdomain(
@@ -103,10 +108,10 @@ class BoundaryForcing(ROMSToolsMixins):
103
108
  vars_2d = []
104
109
  vars_3d = data.var_names.keys()
105
110
 
106
- data_vars = super().regrid_data(data, vars_2d, vars_3d, lon, lat)
111
+ data_vars = super()._regrid_data(data, vars_2d, vars_3d, lon, lat)
107
112
 
108
113
  if self.type == "physics":
109
- data_vars = super().process_velocities(data_vars, angle, "u", "v")
114
+ data_vars = super()._process_velocities(data_vars, angle, "u", "v")
110
115
  object.__setattr__(data, "data_vars", data_vars)
111
116
 
112
117
  d_meta = get_variable_metadata()
@@ -286,8 +291,7 @@ class BoundaryForcing(ROMSToolsMixins):
286
291
  return ds
287
292
 
288
293
  def _get_coordinates(self, direction, point):
289
- """
290
- Retrieve layer and interface depth coordinates for a specified grid boundary.
294
+ """Retrieve layer and interface depth coordinates for a specified grid boundary.
291
295
 
292
296
  This method extracts the layer depth and interface depth coordinates along
293
297
  a specified boundary (north, south, east, or west) and for a specified point
@@ -349,52 +353,55 @@ class BoundaryForcing(ROMSToolsMixins):
349
353
  time=0,
350
354
  layer_contours=False,
351
355
  ) -> None:
352
- """
353
- Plot the boundary forcing field for a given time-slice.
356
+ """Plot the boundary forcing field for a given time-slice.
354
357
 
355
358
  Parameters
356
359
  ----------
357
360
  varname : str
358
361
  The name of the boundary forcing field to plot. Options include:
359
- - "temp_{direction}": Potential temperature, where {direction} can be one of ["south", "east", "north", "west"].
360
- - "salt_{direction}": Salinity, where {direction} can be one of ["south", "east", "north", "west"].
361
- - "zeta_{direction}": Sea surface height, where {direction} can be one of ["south", "east", "north", "west"].
362
- - "u_{direction}": u-flux component, where {direction} can be one of ["south", "east", "north", "west"].
363
- - "v_{direction}": v-flux component, where {direction} can be one of ["south", "east", "north", "west"].
364
- - "ubar_{direction}": Vertically integrated u-flux component, where {direction} can be one of ["south", "east", "north", "west"].
365
- - "vbar_{direction}": Vertically integrated v-flux component, where {direction} can be one of ["south", "east", "north", "west"].
366
- - "PO4_{direction}": Dissolved Inorganic Phosphate (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
367
- - "NO3_{direction}": Dissolved Inorganic Nitrate (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
368
- - "SiO3_{direction}": Dissolved Inorganic Silicate (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
369
- - "NH4_{direction}": Dissolved Ammonia (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
370
- - "Fe_{direction}": Dissolved Inorganic Iron (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
371
- - "Lig_{direction}": Iron Binding Ligand (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
372
- - "O2_{direction}": Dissolved Oxygen (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
373
- - "DIC_{direction}": Dissolved Inorganic Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
374
- - "DIC_ALT_CO2_{direction}": Dissolved Inorganic Carbon, Alternative CO2 (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
375
- - "ALK_{direction}": Alkalinity (meq/m³), where {direction} can be one of ["south", "east", "north", "west"].
376
- - "ALK_ALT_CO2_{direction}": Alkalinity, Alternative CO2 (meq/m³), where {direction} can be one of ["south", "east", "north", "west"].
377
- - "DOC_{direction}": Dissolved Organic Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
378
- - "DON_{direction}": Dissolved Organic Nitrogen (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
379
- - "DOP_{direction}": Dissolved Organic Phosphorus (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
380
- - "DOPr_{direction}": Refractory Dissolved Organic Phosphorus (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
381
- - "DONr_{direction}": Refractory Dissolved Organic Nitrogen (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
382
- - "DOCr_{direction}": Refractory Dissolved Organic Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
383
- - "zooC_{direction}": Zooplankton Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
384
- - "spChl_{direction}": Small Phytoplankton Chlorophyll (mg/m³), where {direction} can be one of ["south", "east", "north", "west"].
385
- - "spC_{direction}": Small Phytoplankton Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
386
- - "spP_{direction}": Small Phytoplankton Phosphorous (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
387
- - "spFe_{direction}": Small Phytoplankton Iron (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
388
- - "spCaCO3_{direction}": Small Phytoplankton CaCO3 (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
389
- - "diatChl_{direction}": Diatom Chlorophyll (mg/m³), where {direction} can be one of ["south", "east", "north", "west"].
390
- - "diatC_{direction}": Diatom Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
391
- - "diatP_{direction}": Diatom Phosphorus (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
392
- - "diatFe_{direction}": Diatom Iron (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
393
- - "diatSi_{direction}": Diatom Silicate (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
394
- - "diazChl_{direction}": Diazotroph Chlorophyll (mg/m³), where {direction} can be one of ["south", "east", "north", "west"].
395
- - "diazC_{direction}": Diazotroph Carbon (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
396
- - "diazP_{direction}": Diazotroph Phosphorus (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
397
- - "diazFe_{direction}": Diazotroph Iron (mmol/m³), where {direction} can be one of ["south", "east", "north", "west"].
362
+
363
+ - "temp_{direction}": Potential temperature,
364
+ - "salt_{direction}": Salinity,
365
+ - "zeta_{direction}": Sea surface height,
366
+ - "u_{direction}": u-flux component,
367
+ - "v_{direction}": v-flux component,
368
+ - "ubar_{direction}": Vertically integrated u-flux component,
369
+ - "vbar_{direction}": Vertically integrated v-flux component,
370
+ - "PO4_{direction}": Dissolved Inorganic Phosphate (mmol/m³),
371
+ - "NO3_{direction}": Dissolved Inorganic Nitrate (mmol/m³),
372
+ - "SiO3_{direction}": Dissolved Inorganic Silicate (mmol/m³),
373
+ - "NH4_{direction}": Dissolved Ammonia (mmol/m³),
374
+ - "Fe_{direction}": Dissolved Inorganic Iron (mmol/m³),
375
+ - "Lig_{direction}": Iron Binding Ligand (mmol/m³),
376
+ - "O2_{direction}": Dissolved Oxygen (mmol/m³),
377
+ - "DIC_{direction}": Dissolved Inorganic Carbon (mmol/m³),
378
+ - "DIC_ALT_CO2_{direction}": Dissolved Inorganic Carbon, Alternative CO2 (mmol/m³),
379
+ - "ALK_{direction}": Alkalinity (meq/m³),
380
+ - "ALK_ALT_CO2_{direction}": Alkalinity, Alternative CO2 (meq/m³),
381
+ - "DOC_{direction}": Dissolved Organic Carbon (mmol/m³),
382
+ - "DON_{direction}": Dissolved Organic Nitrogen (mmol/m³),
383
+ - "DOP_{direction}": Dissolved Organic Phosphorus (mmol/m³),
384
+ - "DOPr_{direction}": Refractory Dissolved Organic Phosphorus (mmol/m³),
385
+ - "DONr_{direction}": Refractory Dissolved Organic Nitrogen (mmol/m³),
386
+ - "DOCr_{direction}": Refractory Dissolved Organic Carbon (mmol/m³),
387
+ - "zooC_{direction}": Zooplankton Carbon (mmol/m³),
388
+ - "spChl_{direction}": Small Phytoplankton Chlorophyll (mg/m³),
389
+ - "spC_{direction}": Small Phytoplankton Carbon (mmol/m³),
390
+ - "spP_{direction}": Small Phytoplankton Phosphorous (mmol/m³),
391
+ - "spFe_{direction}": Small Phytoplankton Iron (mmol/m³),
392
+ - "spCaCO3_{direction}": Small Phytoplankton CaCO3 (mmol/m³),
393
+ - "diatChl_{direction}": Diatom Chlorophyll (mg/m³),
394
+ - "diatC_{direction}": Diatom Carbon (mmol/m³),
395
+ - "diatP_{direction}": Diatom Phosphorus (mmol/m³),
396
+ - "diatFe_{direction}": Diatom Iron (mmol/m³),
397
+ - "diatSi_{direction}": Diatom Silicate (mmol/m³),
398
+ - "diazChl_{direction}": Diazotroph Chlorophyll (mg/m³),
399
+ - "diazC_{direction}": Diazotroph Carbon (mmol/m³),
400
+ - "diazP_{direction}": Diazotroph Phosphorus (mmol/m³),
401
+ - "diazFe_{direction}": Diazotroph Iron (mmol/m³),
402
+
403
+ where {direction} can be one of ["south", "east", "north", "west"].
404
+
398
405
  time : int, optional
399
406
  The time index to plot. Default is 0.
400
407
  layer_contours : bool, optional
@@ -468,21 +475,22 @@ class BoundaryForcing(ROMSToolsMixins):
468
475
  def save(
469
476
  self, filepath: Union[str, Path], np_eta: int = None, np_xi: int = None
470
477
  ) -> None:
471
- """
472
- Save the boundary forcing fields to netCDF4 files.
478
+ """Save the boundary forcing fields to netCDF4 files.
473
479
 
474
480
  This method saves the dataset by grouping it into subsets based on the data frequency. The subsets are then written
475
481
  to one or more netCDF4 files. The filenames of the output files reflect the temporal coverage of the data.
476
482
 
477
483
  There are two modes of saving the dataset:
478
484
 
479
- 1. **Single File Mode (default)**:
480
- - If both `np_eta` and `np_xi` are `None`, the entire dataset, divided by temporal subsets, is saved as a single netCDF4 file
481
- with the base filename specified by `filepath.nc`.
485
+ 1. **Single File Mode (default)**:
482
486
 
483
- 2. **Partitioned Mode**:
484
- - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
485
- - Each spatial tile is saved as a separate netCDF4 file.
487
+ If both `np_eta` and `np_xi` are `None`, the entire dataset, divided by temporal subsets, is saved as a single netCDF4 file
488
+ with the base filename specified by `filepath.nc`.
489
+
490
+ 2. **Partitioned Mode**:
491
+
492
+ - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
493
+ - Each spatial tile is saved as a separate netCDF4 file.
486
494
 
487
495
  Parameters
488
496
  ----------
@@ -516,8 +524,8 @@ class BoundaryForcing(ROMSToolsMixins):
516
524
  return saved_filenames
517
525
 
518
526
  def to_yaml(self, filepath: Union[str, Path]) -> None:
519
- """
520
- Export the parameters of the class to a YAML file, including the version of roms-tools.
527
+ """Export the parameters of the class to a YAML file, including the version of
528
+ roms-tools.
521
529
 
522
530
  Parameters
523
531
  ----------
@@ -568,8 +576,7 @@ class BoundaryForcing(ROMSToolsMixins):
568
576
  def from_yaml(
569
577
  cls, filepath: Union[str, Path], use_dask: bool = False
570
578
  ) -> "BoundaryForcing":
571
- """
572
- Create an instance of the BoundaryForcing class from a YAML file.
579
+ """Create an instance of the BoundaryForcing class from a YAML file.
573
580
 
574
581
  Parameters
575
582
  ----------
@@ -18,8 +18,7 @@ from roms_tools.setup.download import download_correction_data
18
18
 
19
19
  @dataclass(frozen=True, kw_only=True)
20
20
  class Dataset:
21
- """
22
- Represents forcing data on original grid.
21
+ """Represents forcing data on original grid.
23
22
 
24
23
  Parameters
25
24
  ----------
@@ -131,8 +130,7 @@ class Dataset:
131
130
  self.post_process()
132
131
 
133
132
  def load_data(self) -> xr.Dataset:
134
- """
135
- Load dataset from the specified file.
133
+ """Load dataset from the specified file.
136
134
 
137
135
  Returns
138
136
  -------
@@ -246,8 +244,7 @@ class Dataset:
246
244
  return ds
247
245
 
248
246
  def clean_up(self, ds: xr.Dataset) -> xr.Dataset:
249
- """
250
- Dummy method to be overridden by child classes to clean up the dataset.
247
+ """Dummy method to be overridden by child classes to clean up the dataset.
251
248
 
252
249
  This method is intended as a placeholder and should be implemented in subclasses
253
250
  to provide specific functionality.
@@ -265,8 +262,7 @@ class Dataset:
265
262
  return ds
266
263
 
267
264
  def check_dataset(self, ds: xr.Dataset) -> None:
268
- """
269
- Check if the dataset contains the specified variables and dimensions.
265
+ """Check if the dataset contains the specified variables and dimensions.
270
266
 
271
267
  Parameters
272
268
  ----------
@@ -293,8 +289,8 @@ class Dataset:
293
289
  )
294
290
 
295
291
  def select_relevant_fields(self, ds) -> xr.Dataset:
296
- """
297
- Selects and returns a subset of the dataset containing only the variables specified in `self.var_names`.
292
+ """Selects and returns a subset of the dataset containing only the variables
293
+ specified in `self.var_names`.
298
294
 
299
295
  Parameters
300
296
  ----------
@@ -305,7 +301,6 @@ class Dataset:
305
301
  -------
306
302
  xr.Dataset
307
303
  A dataset containing only the variables specified in `self.var_names`.
308
-
309
304
  """
310
305
 
311
306
  for var in ds.data_vars:
@@ -315,8 +310,8 @@ class Dataset:
315
310
  return ds
316
311
 
317
312
  def add_time_info(self, ds: xr.Dataset) -> xr.Dataset:
318
- """
319
- Dummy method to be overridden by child classes to add time information to the dataset.
313
+ """Dummy method to be overridden by child classes to add time information to the
314
+ dataset.
320
315
 
321
316
  This method is intended as a placeholder and should be implemented in subclasses
322
317
  to provide specific functionality for adding time-related information to the dataset.
@@ -334,8 +329,7 @@ class Dataset:
334
329
  return ds
335
330
 
336
331
  def select_relevant_times(self, ds) -> xr.Dataset:
337
- """
338
- Select a subset of the dataset based on the specified time range.
332
+ """Select a subset of the dataset based on the specified time range.
339
333
 
340
334
  This method filters the dataset to include all records between `start_time` and `end_time`.
341
335
  Additionally, it ensures that one record at or before `start_time` and one record at or
@@ -467,8 +461,7 @@ class Dataset:
467
461
  def ensure_dimension_is_ascending(
468
462
  self, ds: xr.Dataset, dim="latitude"
469
463
  ) -> xr.Dataset:
470
- """
471
- Ensure that the specified dimension in the dataset is in ascending order.
464
+ """Ensure that the specified dimension in the dataset is in ascending order.
472
465
 
473
466
  If the values along the specified dimension are in descending order, this function reverses the order of the dimension to make it ascending.
474
467
 
@@ -486,7 +479,6 @@ class Dataset:
486
479
  A new `xarray.Dataset` with the specified dimension in ascending order.
487
480
  If the dimension was already in ascending order, the original dataset is returned unchanged.
488
481
  If the dimension was in descending order, the dataset is returned with the dimension reversed.
489
-
490
482
  """
491
483
  # Make sure that latitude is ascending
492
484
  diff = np.diff(ds[self.dim_names[dim]])
@@ -496,8 +488,7 @@ class Dataset:
496
488
  return ds
497
489
 
498
490
  def check_if_global(self, ds) -> bool:
499
- """
500
- Checks if the dataset covers the entire globe in the longitude dimension.
491
+ """Checks if the dataset covers the entire globe in the longitude dimension.
501
492
 
502
493
  This function calculates the mean difference between consecutive longitude values.
503
494
  It then checks if the difference between the first and last longitude values (plus 360 degrees)
@@ -508,7 +499,6 @@ class Dataset:
508
499
  -------
509
500
  bool
510
501
  True if the dataset covers the entire globe in the longitude dimension, False otherwise.
511
-
512
502
  """
513
503
  dlon_mean = (
514
504
  ds[self.dim_names["longitude"]].diff(dim=self.dim_names["longitude"]).mean()
@@ -569,8 +559,8 @@ class Dataset:
569
559
  return ds_concatenated
570
560
 
571
561
  def post_process(self):
572
- """
573
- Placeholder method to be overridden by subclasses for dataset post-processing.
562
+ """Placeholder method to be overridden by subclasses for dataset post-
563
+ processing.
574
564
 
575
565
  Returns
576
566
  -------
@@ -582,10 +572,9 @@ class Dataset:
582
572
  def choose_subdomain(
583
573
  self, latitude_range, longitude_range, margin, straddle, return_subdomain=False
584
574
  ):
585
- """
586
- Selects a subdomain from the xarray Dataset based on specified latitude and longitude ranges,
587
- extending the selection by a specified margin. Handles longitude conversions to accommodate different
588
- longitude ranges.
575
+ """Selects a subdomain from the xarray Dataset based on specified latitude and
576
+ longitude ranges, extending the selection by a specified margin. Handles
577
+ longitude conversions to accommodate different longitude ranges.
589
578
 
590
579
  Parameters
591
580
  ----------
@@ -679,8 +668,7 @@ class Dataset:
679
668
 
680
669
  @dataclass(frozen=True, kw_only=True)
681
670
  class TPXODataset(Dataset):
682
- """
683
- Represents tidal data on the original grid from the TPXO dataset.
671
+ """Represents tidal data on the original grid from the TPXO dataset.
684
672
 
685
673
  Parameters
686
674
  ----------
@@ -732,8 +720,8 @@ class TPXODataset(Dataset):
732
720
  reference_date: datetime = datetime(1992, 1, 1)
733
721
 
734
722
  def clean_up(self, ds: xr.Dataset) -> xr.Dataset:
735
- """
736
- Clean up and standardize the dimensions and coordinates of the dataset for further processing.
723
+ """Clean up and standardize the dimensions and coordinates of the dataset for
724
+ further processing.
737
725
 
738
726
  This method performs the following operations:
739
727
  - Assigns new coordinate variables for 'omega', 'longitude', and 'latitude' based on existing dataset variables.
@@ -782,8 +770,7 @@ class TPXODataset(Dataset):
782
770
  return ds
783
771
 
784
772
  def check_number_constituents(self, ntides: int):
785
- """
786
- Checks if the number of constituents in the dataset is at least `ntides`.
773
+ """Checks if the number of constituents in the dataset is at least `ntides`.
787
774
 
788
775
  Parameters
789
776
  ----------
@@ -801,8 +788,8 @@ class TPXODataset(Dataset):
801
788
  )
802
789
 
803
790
  def post_process(self):
804
- """
805
- Apply a depth-based mask to the dataset, ensuring only positive depths are retained.
791
+ """Apply a depth-based mask to the dataset, ensuring only positive depths are
792
+ retained.
806
793
 
807
794
  This method checks if the 'depth' variable is present in the dataset. If found, a mask is created where
808
795
  depths greater than 0 are considered valid (mask value of 1). This mask is applied to all data variables
@@ -826,8 +813,7 @@ class TPXODataset(Dataset):
826
813
 
827
814
  @dataclass(frozen=True, kw_only=True)
828
815
  class GLORYSDataset(Dataset):
829
- """
830
- Represents GLORYS data on original grid.
816
+ """Represents GLORYS data on original grid.
831
817
 
832
818
  Parameters
833
819
  ----------
@@ -873,8 +859,8 @@ class GLORYSDataset(Dataset):
873
859
  climatology: Optional[bool] = False
874
860
 
875
861
  def post_process(self):
876
- """
877
- Apply a mask to the dataset based on the 'zeta' variable, with 0 where 'zeta' is NaN.
862
+ """Apply a mask to the dataset based on the 'zeta' variable, with 0 where 'zeta'
863
+ is NaN.
878
864
 
879
865
  This method creates a mask based on the
880
866
  first time step (time=0) of 'zeta'. The mask has 1 for valid data and 0 where 'zeta' is NaN. This mask is applied
@@ -885,7 +871,6 @@ class GLORYSDataset(Dataset):
885
871
  -------
886
872
  None
887
873
  The dataset is modified in-place by applying the mask to each variable.
888
-
889
874
  """
890
875
 
891
876
  mask = xr.where(
@@ -902,8 +887,7 @@ class GLORYSDataset(Dataset):
902
887
 
903
888
  @dataclass(frozen=True, kw_only=True)
904
889
  class CESMDataset(Dataset):
905
- """
906
- Represents CESM data on original grid.
890
+ """Represents CESM data on original grid.
907
891
 
908
892
  Parameters
909
893
  ----------
@@ -929,8 +913,7 @@ class CESMDataset(Dataset):
929
913
 
930
914
  # overwrite clean_up method from parent class
931
915
  def clean_up(self, ds: xr.Dataset) -> xr.Dataset:
932
- """
933
- Ensure the dataset's time dimension is correctly defined and standardized.
916
+ """Ensure the dataset's time dimension is correctly defined and standardized.
934
917
 
935
918
  This method verifies that the time dimension exists in the dataset and assigns it appropriately. If the "time" dimension is missing, the method attempts to assign an existing "time" or "month" dimension. If neither exists, it expands the dataset to include a "time" dimension with a size of one.
936
919
 
@@ -938,7 +921,6 @@ class CESMDataset(Dataset):
938
921
  -------
939
922
  ds : xr.Dataset
940
923
  The xarray Dataset with the correct time dimension assigned or added.
941
-
942
924
  """
943
925
 
944
926
  if "time" not in self.dim_names:
@@ -954,8 +936,8 @@ class CESMDataset(Dataset):
954
936
  return ds
955
937
 
956
938
  def add_time_info(self, ds: xr.Dataset) -> xr.Dataset:
957
- """
958
- Adds time information to the dataset based on the climatology flag and dimension names.
939
+ """Adds time information to the dataset based on the climatology flag and
940
+ dimension names.
959
941
 
960
942
  This method processes the dataset to include time information according to the climatology
961
943
  setting. If the dataset represents climatology data and the time dimension is labeled as
@@ -991,8 +973,7 @@ class CESMDataset(Dataset):
991
973
 
992
974
  @dataclass(frozen=True, kw_only=True)
993
975
  class CESMBGCDataset(CESMDataset):
994
- """
995
- Represents CESM BGC data on original grid.
976
+ """Represents CESM BGC data on original grid.
996
977
 
997
978
  Parameters
998
979
  ----------
@@ -1115,8 +1096,7 @@ class CESMBGCDataset(CESMDataset):
1115
1096
 
1116
1097
  @dataclass(frozen=True, kw_only=True)
1117
1098
  class CESMBGCSurfaceForcingDataset(CESMDataset):
1118
- """
1119
- Represents CESM BGC surface forcing data on original grid.
1099
+ """Represents CESM BGC surface forcing data on original grid.
1120
1100
 
1121
1101
  Parameters
1122
1102
  ----------
@@ -1161,8 +1141,7 @@ class CESMBGCSurfaceForcingDataset(CESMDataset):
1161
1141
  climatology: Optional[bool] = False
1162
1142
 
1163
1143
  def post_process(self):
1164
- """
1165
- Perform post-processing on the dataset to remove specific variables.
1144
+ """Perform post-processing on the dataset to remove specific variables.
1166
1145
 
1167
1146
  This method checks if the variable "z_t" exists in the dataset. If it does,
1168
1147
  the variable is removed from the dataset. The modified dataset is then
@@ -1189,8 +1168,7 @@ class CESMBGCSurfaceForcingDataset(CESMDataset):
1189
1168
 
1190
1169
  @dataclass(frozen=True, kw_only=True)
1191
1170
  class ERA5Dataset(Dataset):
1192
- """
1193
- Represents ERA5 data on original grid.
1171
+ """Represents ERA5 data on original grid.
1194
1172
 
1195
1173
  Parameters
1196
1174
  ----------
@@ -1301,8 +1279,9 @@ class ERA5Dataset(Dataset):
1301
1279
 
1302
1280
  @dataclass(frozen=True, kw_only=True)
1303
1281
  class ERA5Correction(Dataset):
1304
- """
1305
- Global dataset to correct ERA5 radiation. The dataset contains multiplicative correction factors for the ERA5 shortwave radiation, obtained by comparing the COREv2 climatology to the ERA5 climatology.
1282
+ """Global dataset to correct ERA5 radiation. The dataset contains multiplicative
1283
+ correction factors for the ERA5 shortwave radiation, obtained by comparing the
1284
+ COREv2 climatology to the ERA5 climatology.
1306
1285
 
1307
1286
  Parameters
1308
1287
  ----------
@@ -1352,8 +1331,8 @@ class ERA5Correction(Dataset):
1352
1331
  super().__post_init__()
1353
1332
 
1354
1333
  def choose_subdomain(self, coords, straddle: bool):
1355
- """
1356
- Converts longitude values in the dataset if necessary and selects a subdomain based on the specified coordinates.
1334
+ """Converts longitude values in the dataset if necessary and selects a subdomain
1335
+ based on the specified coordinates.
1357
1336
 
1358
1337
  This method converts longitude values between different ranges if required and then extracts a subset of the
1359
1338
  dataset according to the given coordinates. It updates the dataset in place to reflect the selected subdomain.
@@ -53,8 +53,7 @@ pup_test_data = pooch.create(
53
53
 
54
54
 
55
55
  def fetch_topo(topography_source: str) -> xr.Dataset:
56
- """
57
- Load the global topography data as an xarray Dataset.
56
+ """Load the global topography data as an xarray Dataset.
58
57
 
59
58
  Parameters
60
59
  ----------
@@ -79,8 +78,7 @@ def fetch_topo(topography_source: str) -> xr.Dataset:
79
78
 
80
79
 
81
80
  def download_correction_data(filename: str) -> str:
82
- """
83
- Download the correction data file.
81
+ """Download the correction data file.
84
82
 
85
83
  Parameters
86
84
  ----------
@@ -100,8 +98,7 @@ def download_correction_data(filename: str) -> str:
100
98
 
101
99
 
102
100
  def download_test_data(filename: str) -> str:
103
- """
104
- Download the test data file.
101
+ """Download the test data file.
105
102
 
106
103
  Parameters
107
104
  ----------
roms_tools/setup/fill.py CHANGED
@@ -6,9 +6,8 @@ from scipy import sparse
6
6
 
7
7
  class LateralFill:
8
8
  def __init__(self, mask, dims, tol=1.0e-4):
9
- """
10
- Initializes the LateralFill class, which fills NaN values in a DataArray
11
- by iteratively solving a Poisson equation using a lateral diffusion approach.
9
+ """Initializes the LateralFill class, which fills NaN values in a DataArray by
10
+ iteratively solving a Poisson equation using a lateral diffusion approach.
12
11
 
13
12
  Parameters
14
13
  ----------
@@ -51,8 +50,7 @@ class LateralFill:
51
50
  self.tol = tol
52
51
 
53
52
  def apply(self, var):
54
- """
55
- Fills NaN values in an xarray DataArray using iterative lateral diffusion.
53
+ """Fills NaN values in an xarray DataArray using iterative lateral diffusion.
56
54
 
57
55
  Parameters
58
56
  ----------
@@ -65,7 +63,6 @@ class LateralFill:
65
63
  var_filled : xarray.DataArray
66
64
  A DataArray with NaN values filled by iterative smoothing, while preserving
67
65
  non-NaN values.
68
-
69
66
  """
70
67
  # Apply fill to anomaly field
71
68
  mean = var.mean(dim=self.dims, skipna=True)
@@ -96,11 +93,9 @@ class LateralFill:
96
93
 
97
94
 
98
95
  def _lateral_fill_np_array(x0, b, ml, tol=1.0e-4):
99
- """
100
- Fills all NaN values in a 2D NumPy array using an iterative solver,
101
- while preserving the existing non-NaN values.
102
- The filling process uses an AMG solver to efficiently perform smoothing
103
- based on the Laplace operator.
96
+ """Fills all NaN values in a 2D NumPy array using an iterative solver, while
97
+ preserving the existing non-NaN values. The filling process uses an AMG solver to
98
+ efficiently perform smoothing based on the Laplace operator.
104
99
 
105
100
  Parameters
106
101
  ----------
@@ -136,8 +131,7 @@ def _lateral_fill_np_array(x0, b, ml, tol=1.0e-4):
136
131
 
137
132
 
138
133
  def laplacian(grid, mask, dtype=float, format=None):
139
- """
140
- Return a sparse matrix for solving a 2-dimensional Poisson problem.
134
+ """Return a sparse matrix for solving a 2-dimensional Poisson problem.
141
135
 
142
136
  This function generates a finite difference approximation of the Laplacian operator
143
137
  on a 2-dimensional grid with unit grid spacing and Dirichlet boundary conditions.
@@ -164,7 +158,6 @@ def laplacian(grid, mask, dtype=float, format=None):
164
158
  sparse matrix
165
159
  A sparse matrix representing the finite difference Laplacian operator for
166
160
  the given grid.
167
-
168
161
  """
169
162
  grid = tuple(grid)
170
163
 
@@ -180,8 +173,7 @@ def laplacian(grid, mask, dtype=float, format=None):
180
173
 
181
174
 
182
175
  def stencil_grid_mod(S, grid, msk, dtype=None, format=None):
183
- """
184
- Construct a sparse matrix from a local matrix stencil.
176
+ """Construct a sparse matrix from a local matrix stencil.
185
177
 
186
178
  This function generates a sparse matrix that represents an operator
187
179
  by applying the given stencil `S` at each vertex of a regular grid with