roms-tools 1.4.1__py3-none-any.whl → 1.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. roms_tools/_version.py +1 -1
  2. roms_tools/setup/boundary_forcing.py +77 -70
  3. roms_tools/setup/datasets.py +38 -59
  4. roms_tools/setup/download.py +3 -6
  5. roms_tools/setup/fill.py +8 -16
  6. roms_tools/setup/grid.py +74 -113
  7. roms_tools/setup/initial_conditions.py +43 -36
  8. roms_tools/setup/mixins.py +10 -14
  9. roms_tools/setup/surface_forcing.py +35 -33
  10. roms_tools/setup/tides.py +37 -41
  11. roms_tools/setup/topography.py +9 -17
  12. roms_tools/setup/utils.py +19 -40
  13. roms_tools/setup/vertical_coordinate.py +4 -6
  14. roms_tools/tests/test_setup/test_boundary_forcing.py +6 -13
  15. roms_tools/tests/test_setup/test_data/grid.zarr/.zattrs +2 -2
  16. roms_tools/tests/test_setup/test_data/grid.zarr/.zmetadata +2 -62
  17. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zattrs +2 -2
  18. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/.zmetadata +2 -62
  19. roms_tools/tests/test_setup/test_datasets.py +8 -18
  20. roms_tools/tests/test_setup/test_grid.py +9 -9
  21. roms_tools/tests/test_setup/test_initial_conditions.py +4 -7
  22. roms_tools/tests/test_setup/test_surface_forcing.py +56 -56
  23. roms_tools/tests/test_setup/test_tides.py +6 -10
  24. roms_tools/tests/test_setup/test_topography.py +2 -4
  25. roms_tools/tests/test_setup/test_vertical_coordinate.py +2 -6
  26. roms_tools/tests/test_utils.py +30 -30
  27. roms_tools/utils.py +6 -7
  28. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/METADATA +1 -1
  29. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/RECORD +32 -44
  30. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/.zarray +0 -22
  31. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/.zattrs +0 -8
  32. roms_tools/tests/test_setup/test_data/grid.zarr/lat_psi/0.0 +0 -0
  33. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/.zarray +0 -22
  34. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/.zattrs +0 -8
  35. roms_tools/tests/test_setup/test_data/grid.zarr/lon_psi/0.0 +0 -0
  36. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/.zarray +0 -22
  37. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/.zattrs +0 -8
  38. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lat_psi/0.0 +0 -0
  39. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/.zarray +0 -22
  40. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/.zattrs +0 -8
  41. roms_tools/tests/test_setup/test_data/grid_that_straddles_dateline.zarr/lon_psi/0.0 +0 -0
  42. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/LICENSE +0 -0
  43. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/WHEEL +0 -0
  44. {roms_tools-1.4.1.dist-info → roms_tools-1.4.2.dist-info}/top_level.txt +0 -0
@@ -28,8 +28,7 @@ from pathlib import Path
28
28
 
29
29
  @dataclass(frozen=True, kw_only=True)
30
30
  class SurfaceForcing(ROMSToolsMixins):
31
- """
32
- Represents surface forcing input data for ROMS.
31
+ """Represents surface forcing input data for ROMS.
33
32
 
34
33
  Parameters
35
34
  ----------
@@ -40,14 +39,22 @@ class SurfaceForcing(ROMSToolsMixins):
40
39
  end_time : datetime
41
40
  End time of the desired surface forcing data.
42
41
  source : Dict[str, Union[str, Path, List[Union[str, Path]]], bool]
43
- Dictionary specifying the source of the surface forcing data:
44
- - "name" (str): Name of the data source (e.g., "ERA5").
45
- - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). Can be a single string (with or without wildcards),
46
- a single Path object, or a list of strings or Path objects containing multiple files.
47
- - "climatology" (bool): Indicates if the data is climatology data. Defaults to False.
42
+ Dictionary specifying the source of the surface forcing data. Keys include:
43
+
44
+ - "name" (str): Name of the data source (e.g., "ERA5").
45
+ - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). This can be:
46
+
47
+ - A single string (with or without wildcards).
48
+ - A single Path object.
49
+ - A list of strings or Path objects containing multiple files.
50
+ - "climatology" (bool): Indicates if the data is climatology data. Defaults to False.
51
+
48
52
  type : str
49
- Specifies the type of forcing data, either "physics" for physical
50
- atmospheric forcing or "bgc" for biogeochemical forcing.
53
+ Specifies the type of forcing data. Options are:
54
+
55
+ - "physics": for physical atmospheric forcing.
56
+ - "bgc": for biogeochemical forcing.
57
+
51
58
  correct_radiation : bool
52
59
  Whether to correct shortwave radiation. Default is False.
53
60
  use_coarse_grid: bool
@@ -57,12 +64,6 @@ class SurfaceForcing(ROMSToolsMixins):
57
64
  use_dask: bool, optional
58
65
  Indicates whether to use dask for processing. If True, data is processed with dask; if False, data is processed eagerly. Defaults to False.
59
66
 
60
- Attributes
61
- ----------
62
- ds : xr.Dataset
63
- Xarray Dataset containing the surface forcing data.
64
-
65
-
66
67
  Examples
67
68
  --------
68
69
  >>> surface_forcing = SurfaceForcing(
@@ -90,7 +91,7 @@ class SurfaceForcing(ROMSToolsMixins):
90
91
  def __post_init__(self):
91
92
 
92
93
  self._input_checks()
93
- lon, lat, angle, straddle = super().get_target_lon_lat(self.use_coarse_grid)
94
+ lon, lat, angle, straddle = super()._get_target_lon_lat(self.use_coarse_grid)
94
95
  object.__setattr__(self, "target_lon", lon)
95
96
  object.__setattr__(self, "target_lat", lat)
96
97
 
@@ -107,10 +108,10 @@ class SurfaceForcing(ROMSToolsMixins):
107
108
  vars_2d = data.var_names.keys()
108
109
  vars_3d = []
109
110
 
110
- data_vars = super().regrid_data(data, vars_2d, vars_3d, lon, lat)
111
+ data_vars = super()._regrid_data(data, vars_2d, vars_3d, lon, lat)
111
112
 
112
113
  if self.type == "physics":
113
- data_vars = super().process_velocities(
114
+ data_vars = super()._process_velocities(
114
115
  data_vars, angle, "uwnd", "vwnd", interpolate=False
115
116
  )
116
117
  if self.correct_radiation:
@@ -136,7 +137,7 @@ class SurfaceForcing(ROMSToolsMixins):
136
137
  vars_2d = ["swr_corr"]
137
138
  vars_3d = []
138
139
  # spatial interpolation
139
- data_vars_corr = super().regrid_data(
140
+ data_vars_corr = super()._regrid_data(
140
141
  correction_data, vars_2d, vars_3d, lon, lat
141
142
  )
142
143
  # temporal interpolation
@@ -333,13 +334,13 @@ class SurfaceForcing(ROMSToolsMixins):
333
334
  return ds
334
335
 
335
336
  def plot(self, varname, time=0) -> None:
336
- """
337
- Plot the specified surface forcing field for a given time slice.
337
+ """Plot the specified surface forcing field for a given time slice.
338
338
 
339
339
  Parameters
340
340
  ----------
341
341
  varname : str
342
342
  The name of the surface forcing field to plot. Options include:
343
+
343
344
  - "uwnd": 10 meter wind in x-direction.
344
345
  - "vwnd": 10 meter wind in y-direction.
345
346
  - "swrad": Downward short-wave (solar) radiation.
@@ -353,6 +354,7 @@ class SurfaceForcing(ROMSToolsMixins):
353
354
  - "dust": Dust decomposition.
354
355
  - "nox": NOx decomposition.
355
356
  - "nhy": NHy decomposition.
357
+
356
358
  time : int, optional
357
359
  The time index to plot. Default is 0, which corresponds to the first
358
360
  time slice.
@@ -416,21 +418,22 @@ class SurfaceForcing(ROMSToolsMixins):
416
418
  def save(
417
419
  self, filepath: Union[str, Path], np_eta: int = None, np_xi: int = None
418
420
  ) -> None:
419
- """
420
- Save the surface forcing fields to netCDF4 files.
421
+ """Save the surface forcing fields to netCDF4 files.
421
422
 
422
423
  This method saves the dataset by grouping it into subsets based on the data frequency. The subsets are then written
423
424
  to one or more netCDF4 files. The filenames of the output files reflect the temporal coverage of the data.
424
425
 
425
426
  There are two modes of saving the dataset:
426
427
 
427
- 1. **Single File Mode (default)**:
428
- - If both `np_eta` and `np_xi` are `None`, the entire dataset, divided by temporal subsets, is saved as a single netCDF4 file
429
- with the base filename specified by `filepath.nc`.
428
+ 1. **Single File Mode (default)**:
430
429
 
431
- 2. **Partitioned Mode**:
432
- - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
433
- - Each spatial tile is saved as a separate netCDF4 file.
430
+ If both `np_eta` and `np_xi` are `None`, the entire dataset, divided by temporal subsets, is saved as a single netCDF4 file
431
+ with the base filename specified by `filepath.nc`.
432
+
433
+ 2. **Partitioned Mode**:
434
+
435
+ - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
436
+ - Each spatial tile is saved as a separate netCDF4 file.
434
437
 
435
438
  Parameters
436
439
  ----------
@@ -464,8 +467,8 @@ class SurfaceForcing(ROMSToolsMixins):
464
467
  return saved_filenames
465
468
 
466
469
  def to_yaml(self, filepath: Union[str, Path]) -> None:
467
- """
468
- Export the parameters of the class to a YAML file, including the version of roms-tools.
470
+ """Export the parameters of the class to a YAML file, including the version of
471
+ roms-tools.
469
472
 
470
473
  Parameters
471
474
  ----------
@@ -520,8 +523,7 @@ class SurfaceForcing(ROMSToolsMixins):
520
523
  def from_yaml(
521
524
  cls, filepath: Union[str, Path], use_dask: bool = False
522
525
  ) -> "SurfaceForcing":
523
- """
524
- Create an instance of the SurfaceForcing class from a YAML file.
526
+ """Create an instance of the SurfaceForcing class from a YAML file.
525
527
 
526
528
  Parameters
527
529
  ----------
roms_tools/setup/tides.py CHANGED
@@ -24,18 +24,22 @@ from pathlib import Path
24
24
 
25
25
  @dataclass(frozen=True, kw_only=True)
26
26
  class TidalForcing(ROMSToolsMixins):
27
- """
28
- Represents tidal forcing data used in ocean modeling.
27
+ """Represents tidal forcing data used in ocean modeling.
29
28
 
30
29
  Parameters
31
30
  ----------
32
31
  grid : Grid
33
32
  The grid object representing the ROMS grid associated with the tidal forcing data.
34
33
  source : Dict[str, Union[str, Path, List[Union[str, Path]]]]
35
- Dictionary specifying the source of the tidal data:
36
- - "name" (str): Name of the data source (e.g., "TPXO").
37
- - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). Can be a single string (with or without wildcards),
38
- a single Path object, or a list of strings or Path objects containing multiple files.
34
+ Dictionary specifying the source of the tidal data. Keys include:
35
+
36
+ - "name" (str): Name of the data source (e.g., "TPXO").
37
+ - "path" (Union[str, Path, List[Union[str, Path]]]): The path to the raw data file(s). This can be:
38
+
39
+ - A single string (with or without wildcards).
40
+ - A single Path object.
41
+ - A list of strings or Path objects containing multiple files.
42
+
39
43
  ntides : int, optional
40
44
  Number of constituents to consider. Maximum number is 14. Default is 10.
41
45
  allan_factor : float, optional
@@ -45,11 +49,6 @@ class TidalForcing(ROMSToolsMixins):
45
49
  use_dask: bool, optional
46
50
  Indicates whether to use dask for processing. If True, data is processed with dask; if False, data is processed eagerly. Defaults to False.
47
51
 
48
- Attributes
49
- ----------
50
- ds : xr.Dataset
51
- The xarray Dataset containing the tidal forcing data.
52
-
53
52
  Examples
54
53
  --------
55
54
  >>> tidal_forcing = TidalForcing(
@@ -69,7 +68,7 @@ class TidalForcing(ROMSToolsMixins):
69
68
  def __post_init__(self):
70
69
 
71
70
  self._input_checks()
72
- lon, lat, angle, straddle = super().get_target_lon_lat()
71
+ lon, lat, angle, straddle = super()._get_target_lon_lat()
73
72
 
74
73
  data = self._get_data()
75
74
 
@@ -98,12 +97,12 @@ class TidalForcing(ROMSToolsMixins):
98
97
  ]
99
98
  vars_3d = []
100
99
 
101
- data_vars = super().regrid_data(data, vars_2d, vars_3d, lon, lat)
100
+ data_vars = super()._regrid_data(data, vars_2d, vars_3d, lon, lat)
102
101
 
103
- data_vars = super().process_velocities(
102
+ data_vars = super()._process_velocities(
104
103
  data_vars, angle, "u_Re", "v_Re", interpolate=False
105
104
  )
106
- data_vars = super().process_velocities(
105
+ data_vars = super()._process_velocities(
107
106
  data_vars, angle, "u_Im", "v_Im", interpolate=False
108
107
  )
109
108
 
@@ -180,13 +179,13 @@ class TidalForcing(ROMSToolsMixins):
180
179
  return ds
181
180
 
182
181
  def plot(self, varname, ntides=0) -> None:
183
- """
184
- Plot the specified tidal forcing variable for a given tidal constituent.
182
+ """Plot the specified tidal forcing variable for a given tidal constituent.
185
183
 
186
184
  Parameters
187
185
  ----------
188
186
  varname : str
189
187
  The tidal forcing variable to plot. Options include:
188
+
190
189
  - "ssh_Re": Real part of tidal elevation.
191
190
  - "ssh_Im": Imaginary part of tidal elevation.
192
191
  - "pot_Re": Real part of tidal potential.
@@ -195,6 +194,7 @@ class TidalForcing(ROMSToolsMixins):
195
194
  - "u_Im": Imaginary part of tidal velocity in the x-direction.
196
195
  - "v_Re": Real part of tidal velocity in the y-direction.
197
196
  - "v_Im": Imaginary part of tidal velocity in the y-direction.
197
+
198
198
  ntides : int, optional
199
199
  The index of the tidal constituent to plot. Default is 0, which corresponds
200
200
  to the first constituent.
@@ -258,17 +258,19 @@ class TidalForcing(ROMSToolsMixins):
258
258
  def save(
259
259
  self, filepath: Union[str, Path], np_eta: int = None, np_xi: int = None
260
260
  ) -> None:
261
- """
262
- Save the tidal forcing information to a netCDF4 file.
261
+ """Save the tidal forcing information to a netCDF4 file.
263
262
 
264
263
  This method supports saving the dataset in two modes:
265
264
 
266
- 1. **Single File Mode (default)**:
267
- - If both `np_eta` and `np_xi` are `None`, the entire dataset is saved as a single file at the specified `filepath.nc`.
265
+ 1. **Single File Mode (default)**:
266
+
267
+ If both `np_eta` and `np_xi` are `None`, the entire dataset is saved as a single netCDF4 file
268
+ with the base filename specified by `filepath.nc`.
268
269
 
269
- 2. **Partitioned Mode**:
270
- - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
271
- - The files are saved as `filepath.0.nc`, `filepath.1.nc`, ..., where the numbering corresponds to the partition index.
270
+ 2. **Partitioned Mode**:
271
+
272
+ - If either `np_eta` or `np_xi` is specified, the dataset is divided into spatial tiles along the eta-axis and xi-axis.
273
+ - Each spatial tile is saved as a separate netCDF4 file.
272
274
 
273
275
  Parameters
274
276
  ----------
@@ -302,8 +304,8 @@ class TidalForcing(ROMSToolsMixins):
302
304
  return saved_filenames
303
305
 
304
306
  def to_yaml(self, filepath: Union[str, Path]) -> None:
305
- """
306
- Export the parameters of the class to a YAML file, including the version of roms-tools.
307
+ """Export the parameters of the class to a YAML file, including the version of
308
+ roms-tools.
307
309
 
308
310
  Parameters
309
311
  ----------
@@ -351,8 +353,7 @@ class TidalForcing(ROMSToolsMixins):
351
353
  def from_yaml(
352
354
  cls, filepath: Union[str, Path], use_dask: bool = False
353
355
  ) -> "TidalForcing":
354
- """
355
- Create an instance of the TidalForcing class from a YAML file.
356
+ """Create an instance of the TidalForcing class from a YAML file.
356
357
 
357
358
  Parameters
358
359
  ----------
@@ -400,10 +401,10 @@ class TidalForcing(ROMSToolsMixins):
400
401
  return cls(grid=grid, **tidal_forcing_params, use_dask=use_dask)
401
402
 
402
403
  def _correct_tides(self, data):
403
- """
404
- Apply tidal corrections to the dataset.
405
- This method corrects the dataset for equilibrium tides, self-attraction and loading (SAL) effects, and
406
- adjusts phases and amplitudes of tidal elevations and transports using Egbert's correction.
404
+ """Apply tidal corrections to the dataset. This method corrects the dataset for
405
+ equilibrium tides, self-attraction and loading (SAL) effects, and adjusts phases
406
+ and amplitudes of tidal elevations and transports using Egbert's correction.
407
+
407
408
  Parameters
408
409
  ----------
409
410
  data : Dataset
@@ -460,8 +461,7 @@ class TidalForcing(ROMSToolsMixins):
460
461
 
461
462
 
462
463
  def modified_julian_days(year, month, day, hour=0):
463
- """
464
- Calculate the Modified Julian Day (MJD) for a given date and time.
464
+ """Calculate the Modified Julian Day (MJD) for a given date and time.
465
465
 
466
466
  The Modified Julian Day (MJD) is a modified Julian day count starting from
467
467
  November 17, 1858 AD. It is commonly used in astronomy and geodesy.
@@ -519,9 +519,8 @@ def modified_julian_days(year, month, day, hour=0):
519
519
 
520
520
 
521
521
  def egbert_correction(date):
522
- """
523
- Correct phases and amplitudes for real-time runs using parts of the
524
- post-processing code from Egbert's & Erofeeva's (OSU) TPXO model.
522
+ """Correct phases and amplitudes for real-time runs using parts of the post-
523
+ processing code from Egbert's & Erofeeva's (OSU) TPXO model.
525
524
 
526
525
  Parameters
527
526
  ----------
@@ -541,7 +540,6 @@ def egbert_correction(date):
541
540
  ----------
542
541
  - Egbert, G.D., and S.Y. Erofeeva. "Efficient inverse modeling of barotropic ocean
543
542
  tides." Journal of Atmospheric and Oceanic Technology 19, no. 2 (2002): 183-204.
544
-
545
543
  """
546
544
 
547
545
  year = date.year
@@ -676,8 +674,7 @@ def egbert_correction(date):
676
674
 
677
675
 
678
676
  def compute_equilibrium_tide(lon, lat):
679
- """
680
- Compute equilibrium tide for given longitudes and latitudes.
677
+ """Compute equilibrium tide for given longitudes and latitudes.
681
678
 
682
679
  Parameters
683
680
  ----------
@@ -699,7 +696,6 @@ def compute_equilibrium_tide(lon, lat):
699
696
  - 2: semidiurnal
700
697
  - 1: diurnal
701
698
  - 0: long-term
702
-
703
699
  """
704
700
 
705
701
  # Amplitudes and elasticity factors for 15 tidal constituents
@@ -12,8 +12,8 @@ from itertools import count
12
12
  def _add_topography_and_mask(
13
13
  ds, topography_source, hmin, smooth_factor=8.0, rmax=0.2
14
14
  ) -> xr.Dataset:
15
- """
16
- Adds topography and a land/water mask to the dataset based on the provided topography source.
15
+ """Adds topography and a land/water mask to the dataset based on the provided
16
+ topography source.
17
17
 
18
18
  This function performs the following operations:
19
19
  1. Interpolates topography data onto the desired grid.
@@ -86,9 +86,8 @@ def _add_topography_and_mask(
86
86
 
87
87
 
88
88
  def _make_raw_topography(lon, lat, topography_source) -> np.ndarray:
89
- """
90
- Given a grid of (lon, lat) points, fetch the topography file and interpolate height values onto the desired grid.
91
- """
89
+ """Given a grid of (lon, lat) points, fetch the topography file and interpolate
90
+ height values onto the desired grid."""
92
91
 
93
92
  topo_ds = fetch_topo(topography_source)
94
93
 
@@ -150,9 +149,7 @@ def _smooth_topography_globally(hraw, factor) -> xr.DataArray:
150
149
 
151
150
 
152
151
  def _fill_enclosed_basins(mask) -> np.ndarray:
153
- """
154
- Fills in enclosed basins with land
155
- """
152
+ """Fills in enclosed basins with land."""
156
153
 
157
154
  # Label connected regions in the mask
158
155
  reg, nreg = label(mask)
@@ -174,9 +171,7 @@ def _fill_enclosed_basins(mask) -> np.ndarray:
174
171
 
175
172
 
176
173
  def _smooth_topography_locally(h, hmin=5, rmax=0.2):
177
- """
178
- Smoothes topography locally to satisfy r < rmax
179
- """
174
+ """Smoothes topography locally to satisfy r < rmax."""
180
175
  # Compute rmax_log
181
176
  if rmax > 0.0:
182
177
  rmax_log = np.log((1.0 + rmax * 0.9) / (1.0 - rmax * 0.9))
@@ -254,8 +249,7 @@ def _smooth_topography_locally(h, hmin=5, rmax=0.2):
254
249
 
255
250
 
256
251
  def _handle_boundaries(field):
257
- """
258
- Adjust the boundaries of a 2D field by copying values from adjacent cells.
252
+ """Adjust the boundaries of a 2D field by copying values from adjacent cells.
259
253
 
260
254
  Parameters
261
255
  ----------
@@ -267,7 +261,6 @@ def _handle_boundaries(field):
267
261
  -------
268
262
  field : numpy.ndarray or xarray.DataArray
269
263
  The input field with adjusted boundary values.
270
-
271
264
  """
272
265
 
273
266
  field[0, :] = field[1, :]
@@ -279,9 +272,8 @@ def _handle_boundaries(field):
279
272
 
280
273
 
281
274
  def _compute_rfactor(h):
282
- """
283
- Computes slope parameter (or r-factor) r = |Delta h| / 2h in both horizontal grid directions.
284
- """
275
+ """Computes slope parameter (or r-factor) r = |Delta h| / 2h in both horizontal grid
276
+ directions."""
285
277
  # compute r_{i-1/2} = |h_i - h_{i-1}| / (h_i + h_{i+1})
286
278
  r_eta = np.abs(h.diff("eta_rho")) / (h + h.shift(eta_rho=1)).isel(
287
279
  eta_rho=slice(1, None)
roms_tools/setup/utils.py CHANGED
@@ -8,8 +8,7 @@ from pathlib import Path
8
8
 
9
9
 
10
10
  def nan_check(field, mask) -> None:
11
- """
12
- Checks for NaN values at wet points in the field.
11
+ """Checks for NaN values at wet points in the field.
13
12
 
14
13
  This function examines the interpolated input field for NaN values at positions indicated as wet points by the mask.
15
14
  If any NaN values are found at these wet points, a ValueError is raised.
@@ -28,7 +27,6 @@ def nan_check(field, mask) -> None:
28
27
  ValueError
29
28
  If the field contains NaN values at any of the wet points indicated by the mask.
30
29
  The error message will explain the potential cause and suggest ensuring the dataset's coverage.
31
-
32
30
  """
33
31
 
34
32
  # Replace values in field with 0 where mask is not 1
@@ -44,8 +42,7 @@ def nan_check(field, mask) -> None:
44
42
 
45
43
 
46
44
  def substitute_nans_by_fillvalue(field, fill_value=0.0) -> xr.DataArray:
47
- """
48
- Replace NaN values in the field with a specified fill value.
45
+ """Replace NaN values in the field with a specified fill value.
49
46
 
50
47
  This function replaces any NaN values in the input field with the provided fill value.
51
48
 
@@ -66,9 +63,7 @@ def substitute_nans_by_fillvalue(field, fill_value=0.0) -> xr.DataArray:
66
63
 
67
64
 
68
65
  def interpolate_from_rho_to_u(field, method="additive"):
69
-
70
- """
71
- Interpolates the given field from rho points to u points.
66
+ """Interpolates the given field from rho points to u points.
72
67
 
73
68
  This function performs an interpolation from the rho grid (cell centers) to the u grid
74
69
  (cell edges in the xi direction). Depending on the chosen method, it either averages
@@ -114,9 +109,7 @@ def interpolate_from_rho_to_u(field, method="additive"):
114
109
 
115
110
 
116
111
  def interpolate_from_rho_to_v(field, method="additive"):
117
-
118
- """
119
- Interpolates the given field from rho points to v points.
112
+ """Interpolates the given field from rho points to v points.
120
113
 
121
114
  This function performs an interpolation from the rho grid (cell centers) to the v grid
122
115
  (cell edges in the eta direction). Depending on the chosen method, it either averages
@@ -164,8 +157,8 @@ def interpolate_from_rho_to_v(field, method="additive"):
164
157
 
165
158
 
166
159
  def extrapolate_deepest_to_bottom(field: xr.DataArray, dim: str) -> xr.DataArray:
167
- """
168
- Extrapolates the deepest non-NaN values to the bottom along the specified dimension using forward fill.
160
+ """Extrapolates the deepest non-NaN values to the bottom along the specified
161
+ dimension using forward fill.
169
162
 
170
163
  This function assumes that the specified dimension is ordered from top to bottom (e.g., a vertical dimension like 'depth').
171
164
  It fills `NaN` values below the deepest valid (non-NaN) entry along the given dimension by carrying forward the last valid value.
@@ -187,7 +180,6 @@ def extrapolate_deepest_to_bottom(field: xr.DataArray, dim: str) -> xr.DataArray
187
180
  A new `xarray.DataArray` with the `NaN` values along the specified dimension
188
181
  filled by forward filling the deepest valid values down to the bottom.
189
182
  The original input data remains unmodified.
190
-
191
183
  """
192
184
  field_interpolated = field.ffill(dim=dim)
193
185
 
@@ -195,8 +187,7 @@ def extrapolate_deepest_to_bottom(field: xr.DataArray, dim: str) -> xr.DataArray
195
187
 
196
188
 
197
189
  def assign_dates_to_climatology(ds: xr.Dataset, time_dim: str) -> xr.Dataset:
198
- """
199
- Assigns climatology dates to the dataset's time dimension.
190
+ """Assigns climatology dates to the dataset's time dimension.
200
191
 
201
192
  This function updates the dataset's time coordinates to reflect climatological dates.
202
193
  It defines fixed day increments for each month and assigns these to the specified time dimension.
@@ -213,7 +204,6 @@ def assign_dates_to_climatology(ds: xr.Dataset, time_dim: str) -> xr.Dataset:
213
204
  -------
214
205
  xr.Dataset
215
206
  The updated xarray Dataset with climatological dates assigned to the specified time dimension.
216
-
217
207
  """
218
208
  # Define the days in each month and convert to timedelta
219
209
  increments = [15, 30, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30]
@@ -229,8 +219,7 @@ def interpolate_from_climatology(
229
219
  time_dim_name: str,
230
220
  time: Union[xr.DataArray, pd.DatetimeIndex],
231
221
  ) -> Union[xr.DataArray, xr.Dataset]:
232
- """
233
- Interpolates the given field temporally based on the specified time points.
222
+ """Interpolates the given field temporally based on the specified time points.
234
223
 
235
224
  If `field` is an xarray.Dataset, this function applies the interpolation to all data variables in the dataset.
236
225
 
@@ -305,8 +294,7 @@ def interpolate_from_climatology(
305
294
 
306
295
 
307
296
  def get_time_type(data_array: xr.DataArray) -> str:
308
- """
309
- Determines the type of time values in the xarray DataArray.
297
+ """Determines the type of time values in the xarray DataArray.
310
298
 
311
299
  Parameters
312
300
  ----------
@@ -354,8 +342,7 @@ def get_time_type(data_array: xr.DataArray) -> str:
354
342
 
355
343
 
356
344
  def convert_cftime_to_datetime(data_array: np.ndarray) -> np.ndarray:
357
- """
358
- Converts cftime datetime objects to numpy datetime64 objects in a numpy ndarray.
345
+ """Converts cftime datetime objects to numpy datetime64 objects in a numpy ndarray.
359
346
 
360
347
  Parameters
361
348
  ----------
@@ -391,8 +378,7 @@ def convert_cftime_to_datetime(data_array: np.ndarray) -> np.ndarray:
391
378
 
392
379
 
393
380
  def get_variable_metadata():
394
- """
395
- Retrieves metadata for commonly used variables in the dataset.
381
+ """Retrieves metadata for commonly used variables in the dataset.
396
382
 
397
383
  This function returns a dictionary containing the metadata for various variables, including long names
398
384
  and units for each variable.
@@ -401,7 +387,6 @@ def get_variable_metadata():
401
387
  -------
402
388
  dict of str: dict
403
389
  Dictionary where keys are variable names and values are dictionaries with "long_name" and "units" keys.
404
-
405
390
  """
406
391
 
407
392
  d = {
@@ -519,10 +504,8 @@ def get_variable_metadata():
519
504
 
520
505
 
521
506
  def get_boundary_info():
522
-
523
- """
524
- This function provides information about the boundary points for the rho, u, and v
525
- variables on the grid, specifying the indices for the south, east, north, and west
507
+ """This function provides information about the boundary points for the rho, u, and
508
+ v variables on the grid, specifying the indices for the south, east, north, and west
526
509
  boundaries.
527
510
 
528
511
  Returns
@@ -559,8 +542,7 @@ def get_boundary_info():
559
542
 
560
543
 
561
544
  def extract_single_value(data):
562
- """
563
- Extracts a single value from an xarray.DataArray or numpy array.
545
+ """Extracts a single value from an xarray.DataArray or numpy array.
564
546
 
565
547
  Parameters
566
548
  ----------
@@ -589,8 +571,8 @@ def extract_single_value(data):
589
571
 
590
572
 
591
573
  def group_dataset(ds, filepath):
592
- """
593
- Group the dataset into monthly or yearly subsets based on the frequency of the data.
574
+ """Group the dataset into monthly or yearly subsets based on the frequency of the
575
+ data.
594
576
 
595
577
  Parameters
596
578
  ----------
@@ -643,8 +625,7 @@ def group_dataset(ds, filepath):
643
625
 
644
626
 
645
627
  def group_by_month(ds, filepath):
646
- """
647
- Group the dataset by month and generate filenames with 'YYYYMM' format.
628
+ """Group the dataset by month and generate filenames with 'YYYYMM' format.
648
629
 
649
630
  Parameters
650
631
  ----------
@@ -681,8 +662,7 @@ def group_by_month(ds, filepath):
681
662
 
682
663
 
683
664
  def group_by_year(ds, filepath):
684
- """
685
- Group the dataset by year and generate filenames with 'YYYY' format.
665
+ """Group the dataset by year and generate filenames with 'YYYY' format.
686
666
 
687
667
  Parameters
688
668
  ----------
@@ -715,8 +695,7 @@ def group_by_year(ds, filepath):
715
695
 
716
696
 
717
697
  def save_datasets(dataset_list, output_filenames, np_eta=None, np_xi=None):
718
- """
719
- Save the list of datasets to netCDF4 files, with optional spatial partitioning.
698
+ """Save the list of datasets to netCDF4 files, with optional spatial partitioning.
720
699
 
721
700
  Parameters
722
701
  ----------
@@ -3,8 +3,8 @@ import xarray as xr
3
3
 
4
4
 
5
5
  def compute_cs(sigma, theta_s, theta_b):
6
- """
7
- Compute the S-coordinate stretching curves according to Shchepetkin and McWilliams (2009).
6
+ """Compute the S-coordinate stretching curves according to Shchepetkin and
7
+ McWilliams (2009).
8
8
 
9
9
  Parameters
10
10
  ----------
@@ -37,8 +37,7 @@ def compute_cs(sigma, theta_s, theta_b):
37
37
 
38
38
 
39
39
  def sigma_stretch(theta_s, theta_b, N, type):
40
- """
41
- Compute sigma and stretching curves based on the type and parameters.
40
+ """Compute sigma and stretching curves based on the type and parameters.
42
41
 
43
42
  Parameters
44
43
  ----------
@@ -80,8 +79,7 @@ def sigma_stretch(theta_s, theta_b, N, type):
80
79
 
81
80
 
82
81
  def compute_depth(zeta, h, hc, cs, sigma):
83
- """
84
- Compute the depth at different sigma levels.
82
+ """Compute the depth at different sigma levels.
85
83
 
86
84
  Parameters
87
85
  ----------