roms-tools 0.1.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,118 @@
1
+ import pooch
2
+ import xarray as xr
3
+
4
+ # Create a Pooch object to manage the global topography data
5
+ topo_data = pooch.create(
6
+ # Use the default cache folder for the operating system
7
+ path=pooch.os_cache("roms-tools"),
8
+ base_url="https://github.com/CWorthy-ocean/roms-tools-data/raw/main/",
9
+ # The registry specifies the files that can be fetched
10
+ registry={
11
+ "etopo5.nc": "sha256:23600e422d59bbf7c3666090166a0d468c8ee16092f4f14e32c4e928fbcd627b",
12
+ },
13
+ )
14
+
15
+ # Create a Pooch object to manage the global SWR correction data
16
+ correction_data = pooch.create(
17
+ # Use the default cache folder for the operating system
18
+ path=pooch.os_cache("roms-tools"),
19
+ base_url="https://github.com/CWorthy-ocean/roms-tools-data/raw/main/",
20
+ # The registry specifies the files that can be fetched
21
+ registry={
22
+ "etopo5.nc": "sha256:23600e422d59bbf7c3666090166a0d468c8ee16092f4f14e32c4e928fbcd627b",
23
+ "SSR_correction.nc": "sha256:a170c1698e6cc2765b3f0bb51a18c6a979bc796ac3a4c014585aeede1f1f8ea0",
24
+ },
25
+ )
26
+
27
+ # Create a Pooch object to manage the test data
28
+ pup_test_data = pooch.create(
29
+ # Use the default cache folder for the operating system
30
+ path=pooch.os_cache("roms-tools"),
31
+ base_url="https://github.com/CWorthy-ocean/roms-tools-test-data/raw/main/",
32
+ # The registry specifies the files that can be fetched
33
+ registry={
34
+ "GLORYS_test_data.nc": "648f88ec29c433bcf65f257c1fb9497bd3d5d3880640186336b10ed54f7129d2",
35
+ "ERA5_regional_test_data.nc": "bd12ce3b562fbea2a80a3b79ba74c724294043c28dc98ae092ad816d74eac794",
36
+ "ERA5_global_test_data.nc": "8ed177ab64c02caf509b9fb121cf6713f286cc603b1f302f15f3f4eb0c21dc4f",
37
+ "TPXO_global_test_data.nc": "457bfe87a7b247ec6e04e3c7d3e741ccf223020c41593f8ae33a14f2b5255e60",
38
+ "TPXO_regional_test_data.nc": "11739245e2286d9c9d342dce5221e6435d2072b50028bef2e86a30287b3b4032",
39
+ "CESM_regional_test_data_one_time_slice.nc": "43b578ecc067c85f95d6b97ed7b9dc8da7846f07c95331c6ba7f4a3161036a17",
40
+ "CESM_regional_test_data_climatology.nc": "986a200029d9478fd43e6e4a8bc43e8a8f4407554893c59b5fcc2e86fd203272",
41
+ "CESM_surface_global_test_data_climatology.nc": "a072757110c6f7b716a98f867688ef4195a5966741d2f368201ac24617254e35",
42
+ "CESM_surface_global_test_data.nc": "874106ffbc8b1b220db09df1551bbb89d22439d795b4d1e5a24ee775e9a7bf6e",
43
+ },
44
+ )
45
+
46
+
47
+ def fetch_topo(topography_source: str) -> xr.Dataset:
48
+ """
49
+ Load the global topography data as an xarray Dataset.
50
+
51
+ Parameters
52
+ ----------
53
+ topography_source : str
54
+ The source of the topography data to be loaded. Available options:
55
+ - "ETOPO5"
56
+
57
+ Returns
58
+ -------
59
+ xr.Dataset
60
+ The global topography data as an xarray Dataset.
61
+ """
62
+ # Mapping from user-specified topography options to corresponding filenames in the registry
63
+ topo_dict = {"ETOPO5": "etopo5.nc"}
64
+
65
+ # Fetch the file using Pooch, downloading if necessary
66
+ fname = topo_data.fetch(topo_dict[topography_source])
67
+
68
+ # Load the dataset using xarray and return it
69
+ ds = xr.open_dataset(fname)
70
+ return ds
71
+
72
+
73
+ def download_correction_data(filename: str) -> str:
74
+ """
75
+ Download the correction data file.
76
+
77
+ Parameters
78
+ ----------
79
+ filename : str
80
+ The name of the test data file to be downloaded. Available options:
81
+ - "SSR_correction.nc"
82
+
83
+ Returns
84
+ -------
85
+ str
86
+ The path to the downloaded test data file.
87
+ """
88
+ # Fetch the file using Pooch, downloading if necessary
89
+ fname = correction_data.fetch(filename)
90
+
91
+ return fname
92
+
93
+
94
+ def download_test_data(filename: str) -> str:
95
+ """
96
+ Download the test data file.
97
+
98
+ Parameters
99
+ ----------
100
+ filename : str
101
+ The name of the test data file to be downloaded. Available options:
102
+ - "GLORYS_test_data.nc"
103
+ - "ERA5_regional_test_data.nc"
104
+ - "ERA5_global_test_data.nc"
105
+ - "TPXO_global_test_data.nc"
106
+ - "TPXO_regional_test_data.nc"
107
+ - "CESM_regional_test_data_one_time_slice.nc"
108
+ - "CESM_regional_test_data_climatology.nc"
109
+
110
+ Returns
111
+ -------
112
+ str
113
+ The path to the downloaded test data file.
114
+ """
115
+ # Fetch the file using Pooch, downloading if necessary
116
+ fname = pup_test_data.fetch(filename)
117
+
118
+ return fname
roms_tools/setup/fill.py CHANGED
@@ -3,7 +3,103 @@ import xarray as xr
3
3
  from numba import jit
4
4
 
5
5
 
6
- def lateral_fill(var, land_mask, dims=["latitude", "longitude"]):
6
+ def fill_and_interpolate(
7
+ field,
8
+ mask,
9
+ fill_dims,
10
+ coords,
11
+ method="linear",
12
+ fillvalue_fill=0.0,
13
+ fillvalue_interp=np.nan,
14
+ ):
15
+ """
16
+ Propagates ocean values into land areas and interpolates the data to specified coordinates using a given method.
17
+
18
+ Parameters
19
+ ----------
20
+ field : xr.DataArray
21
+ The data array to be interpolated, typically containing oceanographic or atmospheric data
22
+ with dimensions such as latitude and longitude.
23
+
24
+ mask : xr.DataArray
25
+ A data array with the same spatial dimensions as `field`, where `1` indicates ocean points
26
+ and `0` indicates land points. This mask is used to identify land and ocean areas in the dataset.
27
+
28
+ fill_dims : list of str
29
+ List specifying the dimensions along which to perform the lateral fill, typically the horizontal
30
+ dimensions such as latitude and longitude, e.g., ["latitude", "longitude"].
31
+
32
+ coords : dict
33
+ Dictionary specifying the target coordinates for interpolation. The keys should match the dimensions
34
+ of `field` (e.g., {"longitude": lon_values, "latitude": lat_values, "depth": depth_values}).
35
+ This dictionary provides the new coordinates onto which the data array will be interpolated.
36
+
37
+ method : str, optional, default='linear'
38
+ The interpolation method to use. Valid options are those supported by `xarray.DataArray.interp`,
39
+ such as 'linear' or 'nearest'.
40
+
41
+ fillvalue_fill : float, optional, default=0.0
42
+ Value to use in the fill step if an entire data slice along the fill dimensions contains only NaNs.
43
+
44
+ fillvalue_interp : float, optional, default=np.nan
45
+ Value to use in the interpolation step. `np.nan` means that no extrapolation is applied.
46
+ `None` means that extrapolation is applied, which often makes sense when interpolating in the
47
+ vertical direction to avoid NaNs at the surface if the lowest depth is greater than zero.
48
+
49
+ Returns
50
+ -------
51
+ xr.DataArray
52
+ The interpolated data array. This array has the same dimensions as the input `field` but with values
53
+ interpolated to the new coordinates specified in `coords`.
54
+
55
+ Notes
56
+ -----
57
+ This method performs the following steps:
58
+ 1. Sets land values to NaN based on the provided mask to ensure that interpolation does not cross
59
+ the land-ocean boundary.
60
+ 2. Uses the `lateral_fill` function to propagate ocean values into the land interior, helping to fill
61
+ gaps in the dataset.
62
+ 3. Interpolates the filled data array over the specified coordinates using the selected interpolation method.
63
+
64
+ Example
65
+ -------
66
+ >>> import xarray as xr
67
+ >>> field = xr.DataArray(...)
68
+ >>> mask = xr.DataArray(...)
69
+ >>> fill_dims = ["latitude", "longitude"]
70
+ >>> coords = {"latitude": new_lat_values, "longitude": new_lon_values}
71
+ >>> interpolated_field = fill_and_interpolate(
72
+ ... field, mask, fill_dims, coords, method="linear"
73
+ ... )
74
+ >>> print(interpolated_field)
75
+ """
76
+ if not isinstance(field, xr.DataArray):
77
+ raise TypeError("field must be an xarray.DataArray")
78
+ if not isinstance(mask, xr.DataArray):
79
+ raise TypeError("mask must be an xarray.DataArray")
80
+ if not isinstance(coords, dict):
81
+ raise TypeError("coords must be a dictionary")
82
+ if not all(dim in field.dims for dim in coords.keys()):
83
+ raise ValueError("All keys in coords must match dimensions of field")
84
+ if method not in ["linear", "nearest"]:
85
+ raise ValueError(
86
+ "Unsupported interpolation method. Choose from 'linear', 'nearest'"
87
+ )
88
+
89
+ # Set land values to NaN
90
+ field = field.where(mask)
91
+
92
+ # Propagate ocean values into land interior before interpolation
93
+ field = lateral_fill(field, 1 - mask, fill_dims, fillvalue_fill)
94
+
95
+ field_interpolated = field.interp(
96
+ coords, method=method, kwargs={"fill_value": fillvalue_interp}
97
+ ).drop_vars(list(coords.keys()))
98
+
99
+ return field_interpolated
100
+
101
+
102
+ def lateral_fill(var, land_mask, dims=["latitude", "longitude"], fillvalue=0.0):
7
103
  """
8
104
  Perform lateral fill on an xarray DataArray using a land mask.
9
105
 
@@ -20,6 +116,9 @@ def lateral_fill(var, land_mask, dims=["latitude", "longitude"]):
20
116
  dims : list of str, optional, default=['latitude', 'longitude']
21
117
  Dimensions along which to perform the fill. The default is ['latitude', 'longitude'].
22
118
 
119
+ fillvalue : float, optional, default=0.0
120
+ Value to use if an entire data slice along the dims contains only NaNs.
121
+
23
122
  Returns
24
123
  -------
25
124
  var_filled : xarray.DataArray
@@ -27,21 +126,25 @@ def lateral_fill(var, land_mask, dims=["latitude", "longitude"]):
27
126
  specified by `land_mask` where NaNs are preserved.
28
127
 
29
128
  """
129
+
30
130
  var_filled = xr.apply_ufunc(
31
131
  _lateral_fill_np_array,
32
132
  var,
33
133
  land_mask,
34
134
  input_core_dims=[dims, dims],
35
135
  output_core_dims=[dims],
36
- dask="parallelized",
37
136
  output_dtypes=[var.dtype],
137
+ dask="parallelized",
38
138
  vectorize=True,
139
+ kwargs={"fillvalue": fillvalue},
39
140
  )
40
141
 
41
142
  return var_filled
42
143
 
43
144
 
44
- def _lateral_fill_np_array(var, isvalid_mask, tol=1.0e-4, rc=1.8, max_iter=10000):
145
+ def _lateral_fill_np_array(
146
+ var, isvalid_mask, fillvalue=0.0, tol=1.0e-4, rc=1.8, max_iter=10000
147
+ ):
45
148
  """
46
149
  Perform lateral fill on a numpy array.
47
150
 
@@ -55,6 +158,9 @@ def _lateral_fill_np_array(var, isvalid_mask, tol=1.0e-4, rc=1.8, max_iter=10000
55
158
  Valid values mask: `True` where data should be filled. Must have same shape
56
159
  as `var`.
57
160
 
161
+ fillvalue: float
162
+ Value to use if the full field `var` contains only NaNs. Default is 0.0.
163
+
58
164
  tol : float, optional, default=1.0e-4
59
165
  Convergence criteria: stop filling when the value change is less than
60
166
  or equal to `tol * var`, i.e., `delta <= tol * np.abs(var[j, i])`.
@@ -90,14 +196,14 @@ def _lateral_fill_np_array(var, isvalid_mask, tol=1.0e-4, rc=1.8, max_iter=10000
90
196
 
91
197
  fillmask = np.isnan(var) # Fill all NaNs
92
198
  keepNaNs = ~isvalid_mask & np.isnan(var)
93
- var = _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter)
199
+ var = _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter, fillvalue)
94
200
  var[keepNaNs] = np.nan # Replace NaNs in areas not designated for filling
95
201
 
96
202
  return var
97
203
 
98
204
 
99
205
  @jit(nopython=True, parallel=True)
100
- def _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter):
206
+ def _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter, fillvalue=0.0):
101
207
  """
102
208
  Perform an iterative land fill algorithm using the Successive Over-Relaxation (SOR)
103
209
  solution of the Laplace Equation.
@@ -126,6 +232,9 @@ def _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter):
126
232
  max_iter : int
127
233
  Maximum number of iterations allowed before the process is terminated.
128
234
 
235
+ fillvalue: float
236
+ Value to use if the full field is NaNs. Default is 0.0.
237
+
129
238
  Returns
130
239
  -------
131
240
  None
@@ -155,6 +264,10 @@ def _iterative_fill_sor(nlat, nlon, var, fillmask, tol, rc, max_iter):
155
264
  if np.max(np.fabs(var)) == 0.0:
156
265
  var = np.zeros_like(var)
157
266
  return var
267
+ # If field consists only of NaNs, fill NaNs with fill value
268
+ if np.isnan(var).all():
269
+ var = fillvalue * np.ones_like(var)
270
+ return var
158
271
 
159
272
  # Compute a zonal mean to use as a first guess
160
273
  zoncnt = np.zeros(nlat)
roms_tools/setup/grid.py CHANGED
@@ -1,12 +1,16 @@
1
1
  import copy
2
- from dataclasses import dataclass, field
2
+ from dataclasses import dataclass, field, asdict
3
3
 
4
4
  import numpy as np
5
5
  import xarray as xr
6
+ import yaml
7
+ import importlib.metadata
6
8
 
7
-
8
- from roms_tools.setup.topography import _add_topography_and_mask
9
+ from roms_tools.setup.topography import _add_topography_and_mask, _add_velocity_masks
9
10
  from roms_tools.setup.plot import _plot
11
+ from roms_tools.setup.utils import interpolate_from_rho_to_u, interpolate_from_rho_to_v
12
+
13
+ import warnings
10
14
 
11
15
  RADIUS_OF_EARTH = 6371315.0 # in m
12
16
 
@@ -41,7 +45,7 @@ class Grid:
41
45
  The default is 0, which means that the x-direction of the grid is aligned with lines of constant latitude.
42
46
  topography_source : str, optional
43
47
  Specifies the data source to use for the topography. Options are
44
- "etopo5". The default is "etopo5".
48
+ "ETOPO5". The default is "ETOPO5".
45
49
  smooth_factor : float, optional
46
50
  The smoothing factor used in the domain-wide Gaussian smoothing of the
47
51
  topography. Smaller values result in less smoothing, while larger
@@ -97,7 +101,7 @@ class Grid:
97
101
  center_lon: float
98
102
  center_lat: float
99
103
  rot: float = 0
100
- topography_source: str = "etopo5"
104
+ topography_source: str = "ETOPO5"
101
105
  smooth_factor: int = 8
102
106
  hmin: float = 5.0
103
107
  rmax: float = 0.2
@@ -130,7 +134,7 @@ class Grid:
130
134
  self._straddle()
131
135
 
132
136
  def add_topography_and_mask(
133
- self, topography_source="etopo5", smooth_factor=8, hmin=5.0, rmax=0.2
137
+ self, topography_source="ETOPO5", smooth_factor=8, hmin=5.0, rmax=0.2
134
138
  ) -> None:
135
139
  """
136
140
  Add topography and mask to the grid dataset.
@@ -144,7 +148,7 @@ class Grid:
144
148
  ----------
145
149
  topography_source : str, optional
146
150
  Specifies the data source to use for the topography. Options are
147
- "etopo5". The default is "etopo5".
151
+ "ETOPO5". The default is "ETOPO5".
148
152
  smooth_factor : float, optional
149
153
  The smoothing factor used in the domain-wide Gaussian smoothing of the
150
154
  topography. Smaller values result in less smoothing, while larger
@@ -204,6 +208,37 @@ class Grid:
204
208
  """
205
209
  self.ds.to_netcdf(filepath)
206
210
 
211
+ def to_yaml(self, filepath: str) -> None:
212
+ """
213
+ Export the parameters of the class to a YAML file, including the version of roms-tools.
214
+
215
+ Parameters
216
+ ----------
217
+ filepath : str
218
+ The path to the YAML file where the parameters will be saved.
219
+ """
220
+ data = asdict(self)
221
+ data.pop("ds", None)
222
+ data.pop("straddle", None)
223
+
224
+ # Include the version of roms-tools
225
+ try:
226
+ roms_tools_version = importlib.metadata.version("roms-tools")
227
+ except importlib.metadata.PackageNotFoundError:
228
+ roms_tools_version = "unknown"
229
+
230
+ # Create header
231
+ header = f"---\nroms_tools_version: {roms_tools_version}\n---\n"
232
+
233
+ # Use the class name as the top-level key
234
+ yaml_data = {self.__class__.__name__: data}
235
+
236
+ with open(filepath, "w") as file:
237
+ # Write header
238
+ file.write(header)
239
+ # Write YAML data
240
+ yaml.dump(yaml_data, file, default_flow_style=False)
241
+
207
242
  @classmethod
208
243
  def from_file(cls, filepath: str) -> "Grid":
209
244
  """
@@ -222,6 +257,11 @@ class Grid:
222
257
  # Load the dataset from the file
223
258
  ds = xr.open_dataset(filepath)
224
259
 
260
+ if not all(mask in ds for mask in ["mask_u", "mask_v"]):
261
+ ds = _add_velocity_masks(ds)
262
+ if not all(coord in ds for coord in ["lat_u", "lon_u", "lat_v", "lon_v"]):
263
+ ds = _add_lat_lon_at_velocity_points(ds)
264
+
225
265
  # Create a new Grid instance without calling __init__ and __post_init__
226
266
  grid = cls.__new__(cls)
227
267
 
@@ -251,6 +291,62 @@ class Grid:
251
291
 
252
292
  return grid
253
293
 
294
+ @classmethod
295
+ def from_yaml(cls, filepath: str) -> "Grid":
296
+ """
297
+ Create an instance of the class from a YAML file.
298
+
299
+ Parameters
300
+ ----------
301
+ filepath : str
302
+ The path to the YAML file from which the parameters will be read.
303
+
304
+ Returns
305
+ -------
306
+ Grid
307
+ An instance of the Grid class.
308
+ """
309
+ # Read the entire file content
310
+ with open(filepath, "r") as file:
311
+ file_content = file.read()
312
+
313
+ # Split the content into YAML documents
314
+ documents = list(yaml.safe_load_all(file_content))
315
+
316
+ header_data = None
317
+ grid_data = None
318
+
319
+ # Iterate over documents to find the header and grid configuration
320
+ for doc in documents:
321
+ if doc is None:
322
+ continue
323
+ if "roms_tools_version" in doc:
324
+ header_data = doc
325
+ elif "Grid" in doc:
326
+ grid_data = doc["Grid"]
327
+
328
+ if header_data is None:
329
+ raise ValueError("Version of ROMS-Tools not found in the YAML file.")
330
+ else:
331
+ # Check the roms_tools_version
332
+ roms_tools_version_header = header_data.get("roms_tools_version")
333
+ # Get current version of roms-tools
334
+ try:
335
+ roms_tools_version_current = importlib.metadata.version("roms-tools")
336
+ except importlib.metadata.PackageNotFoundError:
337
+ roms_tools_version_current = "unknown"
338
+
339
+ if roms_tools_version_header != roms_tools_version_current:
340
+ warnings.warn(
341
+ f"Current roms-tools version ({roms_tools_version_current}) does not match the version in the YAML header ({roms_tools_version_header}).",
342
+ UserWarning,
343
+ )
344
+
345
+ if grid_data is None:
346
+ raise ValueError("No Grid configuration found in the YAML file.")
347
+
348
+ return cls(**grid_data)
349
+
254
350
  # override __repr__ method to only print attributes that are actually set
255
351
  def __repr__(self) -> str:
256
352
  cls = self.__class__
@@ -304,6 +400,7 @@ class Grid:
304
400
 
305
401
  if bathymetry:
306
402
  kwargs = {"cmap": "YlGnBu"}
403
+
307
404
  _plot(
308
405
  self.ds,
309
406
  field=self.ds.h.where(self.ds.mask_rho),
@@ -719,6 +816,18 @@ def _create_grid_ds(
719
816
  },
720
817
  )
721
818
 
819
+ ds["tra_lon"] = center_lon
820
+ ds["tra_lon"].attrs["long_name"] = "Longitudinal translation of base grid"
821
+ ds["tra_lon"].attrs["units"] = "degrees East"
822
+
823
+ ds["tra_lat"] = center_lat
824
+ ds["tra_lat"].attrs["long_name"] = "Latitudinal translation of base grid"
825
+ ds["tra_lat"].attrs["units"] = "degrees North"
826
+
827
+ ds["rotate"] = rot
828
+ ds["rotate"].attrs["long_name"] = "Rotation of base grid"
829
+ ds["rotate"].attrs["units"] = "degrees"
830
+
722
831
  ds["lon_rho"] = xr.Variable(
723
832
  data=lon * 180 / np.pi,
724
833
  dims=["eta_rho", "xi_rho"],
@@ -731,23 +840,21 @@ def _create_grid_ds(
731
840
  attrs={"long_name": "latitude of rho-points", "units": "degrees North"},
732
841
  )
733
842
 
734
- ds["tra_lon"] = center_lon
735
- ds["tra_lon"].attrs["long_name"] = "Longitudinal translation of base grid"
736
- ds["tra_lon"].attrs["units"] = "degrees East"
737
-
738
- ds["tra_lat"] = center_lat
739
- ds["tra_lat"].attrs["long_name"] = "Latitudinal translation of base grid"
740
- ds["tra_lat"].attrs["units"] = "degrees North"
741
-
742
- ds["rotate"] = rot
743
- ds["rotate"].attrs["long_name"] = "Rotation of base grid"
744
- ds["rotate"].attrs["units"] = "degrees"
843
+ ds = _add_lat_lon_at_velocity_points(ds)
745
844
 
746
845
  return ds
747
846
 
748
847
 
749
848
  def _add_global_metadata(ds, size_x, size_y):
750
- ds.attrs["Type"] = "ROMS grid produced by roms-tools"
849
+ ds.attrs["title"] = "ROMS grid created by ROMS-Tools"
850
+
851
+ # Include the version of roms-tools
852
+ try:
853
+ roms_tools_version = importlib.metadata.version("roms-tools")
854
+ except importlib.metadata.PackageNotFoundError:
855
+ roms_tools_version = "unknown"
856
+
857
+ ds.attrs["roms_tools_version"] = roms_tools_version
751
858
  ds.attrs["size_x"] = size_x
752
859
  ds.attrs["size_y"] = size_y
753
860
 
@@ -807,3 +914,22 @@ def _f2c_xdir(f):
807
914
  fc[-1, :] = f[-1, :] + 0.5 * (f[-1, :] - f[-2, :])
808
915
 
809
916
  return fc
917
+
918
+
919
+ def _add_lat_lon_at_velocity_points(ds):
920
+
921
+ lat_u = interpolate_from_rho_to_u(ds["lat_rho"])
922
+ lon_u = interpolate_from_rho_to_u(ds["lon_rho"])
923
+ lat_v = interpolate_from_rho_to_v(ds["lat_rho"])
924
+ lon_v = interpolate_from_rho_to_v(ds["lon_rho"])
925
+
926
+ lat_u.attrs = {"long_name": "latitude of u-points", "units": "degrees North"}
927
+ lon_u.attrs = {"long_name": "longitude of u-points", "units": "degrees East"}
928
+ lat_v.attrs = {"long_name": "latitude of v-points", "units": "degrees North"}
929
+ lon_v.attrs = {"long_name": "longitude of v-points", "units": "degrees East"}
930
+
931
+ ds = ds.assign_coords(
932
+ {"lat_u": lat_u, "lon_u": lon_u, "lat_v": lat_v, "lon_v": lon_v}
933
+ )
934
+
935
+ return ds