roms-tools 0.0.6__py3-none-any.whl → 0.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ci/environment.yml +29 -0
- roms_tools/__init__.py +6 -0
- roms_tools/_version.py +1 -1
- roms_tools/setup/atmospheric_forcing.py +935 -0
- roms_tools/setup/boundary_forcing.py +711 -0
- roms_tools/setup/datasets.py +457 -0
- roms_tools/setup/fill.py +376 -0
- roms_tools/setup/grid.py +610 -325
- roms_tools/setup/initial_conditions.py +528 -0
- roms_tools/setup/plot.py +203 -0
- roms_tools/setup/tides.py +809 -0
- roms_tools/setup/topography.py +257 -0
- roms_tools/setup/utils.py +162 -0
- roms_tools/setup/vertical_coordinate.py +494 -0
- roms_tools/tests/test_atmospheric_forcing.py +1645 -0
- roms_tools/tests/test_boundary_forcing.py +332 -0
- roms_tools/tests/test_datasets.py +306 -0
- roms_tools/tests/test_grid.py +226 -0
- roms_tools/tests/test_initial_conditions.py +300 -0
- roms_tools/tests/test_tides.py +366 -0
- roms_tools/tests/test_topography.py +78 -0
- roms_tools/tests/test_vertical_coordinate.py +337 -0
- roms_tools-0.20.dist-info/METADATA +90 -0
- roms_tools-0.20.dist-info/RECORD +28 -0
- {roms_tools-0.0.6.dist-info → roms_tools-0.20.dist-info}/WHEEL +1 -1
- {roms_tools-0.0.6.dist-info → roms_tools-0.20.dist-info}/top_level.txt +1 -0
- roms_tools/tests/test_setup.py +0 -54
- roms_tools-0.0.6.dist-info/METADATA +0 -134
- roms_tools-0.0.6.dist-info/RECORD +0 -10
- {roms_tools-0.0.6.dist-info → roms_tools-0.20.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from roms_tools import BoundaryForcing, Grid, VerticalCoordinate
|
|
4
|
+
import xarray as xr
|
|
5
|
+
import numpy as np
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
import textwrap
|
|
9
|
+
from roms_tools.setup.datasets import download_test_data
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@pytest.fixture
|
|
13
|
+
def example_grid():
|
|
14
|
+
"""
|
|
15
|
+
Fixture for creating a Grid object.
|
|
16
|
+
"""
|
|
17
|
+
grid = Grid(
|
|
18
|
+
nx=2, ny=2, size_x=500, size_y=1000, center_lon=0, center_lat=55, rot=10
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
return grid
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@pytest.fixture
|
|
25
|
+
def example_vertical_coordinate(example_grid):
|
|
26
|
+
"""
|
|
27
|
+
Fixture for creating a VerticalCoordinate object.
|
|
28
|
+
"""
|
|
29
|
+
vertical_coordinate = VerticalCoordinate(
|
|
30
|
+
grid=example_grid,
|
|
31
|
+
N=3, # number of vertical levels
|
|
32
|
+
theta_s=5.0, # surface control parameter
|
|
33
|
+
theta_b=2.0, # bottom control parameter
|
|
34
|
+
hc=250.0, # critical depth
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
return vertical_coordinate
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@pytest.fixture
|
|
41
|
+
def boundary_forcing(example_grid, example_vertical_coordinate):
|
|
42
|
+
"""
|
|
43
|
+
Fixture for creating a BoundaryForcing object.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
fname = download_test_data("GLORYS_test_data.nc")
|
|
47
|
+
|
|
48
|
+
return BoundaryForcing(
|
|
49
|
+
grid=example_grid,
|
|
50
|
+
vertical_coordinate=example_vertical_coordinate,
|
|
51
|
+
start_time=datetime(2021, 6, 29),
|
|
52
|
+
end_time=datetime(2021, 6, 30),
|
|
53
|
+
source="glorys",
|
|
54
|
+
filename=fname,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_boundary_forcing_creation(boundary_forcing):
|
|
59
|
+
"""
|
|
60
|
+
Test the creation of the BoundaryForcing object.
|
|
61
|
+
"""
|
|
62
|
+
assert boundary_forcing.start_time == datetime(2021, 6, 29)
|
|
63
|
+
assert boundary_forcing.end_time == datetime(2021, 6, 30)
|
|
64
|
+
assert boundary_forcing.filename == download_test_data("GLORYS_test_data.nc")
|
|
65
|
+
assert boundary_forcing.source == "glorys"
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def test_boundary_forcing_ds_attribute(boundary_forcing):
|
|
69
|
+
"""
|
|
70
|
+
Test the ds attribute of the BoundaryForcing object.
|
|
71
|
+
"""
|
|
72
|
+
assert isinstance(boundary_forcing.ds, xr.Dataset)
|
|
73
|
+
for direction in ["south", "east", "north", "west"]:
|
|
74
|
+
assert f"temp_{direction}" in boundary_forcing.ds
|
|
75
|
+
assert f"salt_{direction}" in boundary_forcing.ds
|
|
76
|
+
assert f"u_{direction}" in boundary_forcing.ds
|
|
77
|
+
assert f"v_{direction}" in boundary_forcing.ds
|
|
78
|
+
assert f"zeta_{direction}" in boundary_forcing.ds
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def test_boundary_forcing_data_consistency_plot_save(boundary_forcing):
|
|
82
|
+
"""
|
|
83
|
+
Test that the data within the BoundaryForcing object remains consistent.
|
|
84
|
+
Also test plot and save methods in the same test since we dask arrays are already computed.
|
|
85
|
+
"""
|
|
86
|
+
boundary_forcing.ds.load()
|
|
87
|
+
|
|
88
|
+
# Define the expected data
|
|
89
|
+
expected_zeta_south = np.array(
|
|
90
|
+
[[-0.30468762, -0.29416865, -0.30391693, -0.32985148]], dtype=np.float32
|
|
91
|
+
)
|
|
92
|
+
expected_zeta_east = np.array(
|
|
93
|
+
[[-0.32985148, -0.36176518, -0.40663475, -0.40699923]], dtype=np.float32
|
|
94
|
+
)
|
|
95
|
+
expected_zeta_north = np.array(
|
|
96
|
+
[[-0.5534979, -0.5270749, -0.45107934, -0.40699923]], dtype=np.float32
|
|
97
|
+
)
|
|
98
|
+
expected_zeta_west = np.array(
|
|
99
|
+
[[-0.30468762, -0.34336275, -0.3699948, -0.5534979]], dtype=np.float32
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
expected_temp_south = np.array(
|
|
103
|
+
[
|
|
104
|
+
[
|
|
105
|
+
[16.84414, 16.905312, 16.967817],
|
|
106
|
+
[18.088203, 18.121834, 18.315424],
|
|
107
|
+
[18.431192, 18.496748, 18.718002],
|
|
108
|
+
[19.294329, 19.30358, 19.439777],
|
|
109
|
+
]
|
|
110
|
+
],
|
|
111
|
+
dtype=np.float32,
|
|
112
|
+
)
|
|
113
|
+
expected_temp_east = np.array(
|
|
114
|
+
[
|
|
115
|
+
[
|
|
116
|
+
[19.294329, 19.30358, 19.439777],
|
|
117
|
+
[18.633307, 18.637077, 18.667465],
|
|
118
|
+
[8.710737, 11.25943, 13.111585],
|
|
119
|
+
[9.20282, 10.667074, 11.752404],
|
|
120
|
+
]
|
|
121
|
+
],
|
|
122
|
+
dtype=np.float32,
|
|
123
|
+
)
|
|
124
|
+
expected_temp_north = np.array(
|
|
125
|
+
[
|
|
126
|
+
[
|
|
127
|
+
[10.233599, 10.546486, 10.671082],
|
|
128
|
+
[10.147332, 10.502733, 10.68275],
|
|
129
|
+
[10.458557, 11.209945, 11.377164],
|
|
130
|
+
[9.20282, 10.667074, 11.752404],
|
|
131
|
+
]
|
|
132
|
+
],
|
|
133
|
+
dtype=np.float32,
|
|
134
|
+
)
|
|
135
|
+
expected_temp_west = np.array(
|
|
136
|
+
[
|
|
137
|
+
[
|
|
138
|
+
[16.84414, 16.905312, 16.967817],
|
|
139
|
+
[12.639833, 13.479691, 14.426711],
|
|
140
|
+
[11.027701, 11.650267, 12.200586],
|
|
141
|
+
[10.233599, 10.546486, 10.671082],
|
|
142
|
+
]
|
|
143
|
+
],
|
|
144
|
+
dtype=np.float32,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
expected_u_south = np.array(
|
|
148
|
+
[[[-0.0, -0.0, -0.0], [-0.0, -0.0, -0.0], [0.0, -0.0, -0.0]]], dtype=np.float32
|
|
149
|
+
)
|
|
150
|
+
expected_u_east = np.array(
|
|
151
|
+
[
|
|
152
|
+
[
|
|
153
|
+
[0.0, -0.0, -0.0],
|
|
154
|
+
[-0.0, -0.0, -0.0],
|
|
155
|
+
[0.06979556, 0.06167743, -0.02247071],
|
|
156
|
+
[0.0211786, 0.03679834, 0.0274788],
|
|
157
|
+
]
|
|
158
|
+
],
|
|
159
|
+
dtype=np.float32,
|
|
160
|
+
)
|
|
161
|
+
expected_u_north = np.array(
|
|
162
|
+
[
|
|
163
|
+
[
|
|
164
|
+
[0.04268532, 0.03889201, 0.03351666],
|
|
165
|
+
[0.04645353, 0.04914769, 0.03673013],
|
|
166
|
+
[0.0211786, 0.03679834, 0.0274788],
|
|
167
|
+
]
|
|
168
|
+
],
|
|
169
|
+
dtype=np.float32,
|
|
170
|
+
)
|
|
171
|
+
expected_u_west = np.array(
|
|
172
|
+
[
|
|
173
|
+
[
|
|
174
|
+
[-0.0, -0.0, -0.0],
|
|
175
|
+
[0.0, -0.0, -0.0],
|
|
176
|
+
[0.0, 0.0, -0.0],
|
|
177
|
+
[0.04268532, 0.03889201, 0.03351666],
|
|
178
|
+
]
|
|
179
|
+
],
|
|
180
|
+
dtype=np.float32,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
expected_v_south = np.array(
|
|
184
|
+
[[[0.0, 0.0, 0.0], [0.0, 0.0, -0.0], [-0.0, -0.0, -0.0], [-0.0, -0.0, -0.0]]],
|
|
185
|
+
dtype=np.float32,
|
|
186
|
+
)
|
|
187
|
+
expected_v_east = np.array(
|
|
188
|
+
[
|
|
189
|
+
[
|
|
190
|
+
[-0.0, -0.0, -0.0],
|
|
191
|
+
[-0.0, -0.0, -0.0],
|
|
192
|
+
[-0.06720348, -0.08354441, -0.13835917],
|
|
193
|
+
]
|
|
194
|
+
],
|
|
195
|
+
dtype=np.float32,
|
|
196
|
+
)
|
|
197
|
+
expected_v_north = np.array(
|
|
198
|
+
[
|
|
199
|
+
[
|
|
200
|
+
[-0.00951457, -0.00576979, -0.02147919],
|
|
201
|
+
[-0.0, -0.0, -0.0],
|
|
202
|
+
[0.01915873, 0.02625698, 0.01757628],
|
|
203
|
+
[-0.06720348, -0.08354441, -0.13835917],
|
|
204
|
+
]
|
|
205
|
+
],
|
|
206
|
+
dtype=np.float32,
|
|
207
|
+
)
|
|
208
|
+
expected_v_west = np.array(
|
|
209
|
+
[
|
|
210
|
+
[
|
|
211
|
+
[0.0, 0.0, 0.0],
|
|
212
|
+
[-0.0, -0.0, -0.0],
|
|
213
|
+
[-0.00951457, -0.00576979, -0.02147919],
|
|
214
|
+
]
|
|
215
|
+
],
|
|
216
|
+
dtype=np.float32,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
expected_ubar_south = np.array([[0.0], [0.0], [0.0]], dtype=np.float32)
|
|
220
|
+
expected_ubar_east = np.array(
|
|
221
|
+
[[0.0], [0.0], [0.04028399], [0.02812303]], dtype=np.float32
|
|
222
|
+
)
|
|
223
|
+
expected_ubar_north = np.array(
|
|
224
|
+
[[0.03866891], [0.04446249], [0.02812303]], dtype=np.float32
|
|
225
|
+
)
|
|
226
|
+
expected_ubar_west = np.array([[0.0], [0.0], [0.0], [0.03866891]], dtype=np.float32)
|
|
227
|
+
|
|
228
|
+
expected_vbar_south = np.array([[0.0], [0.0], [0.0], [0.0]], dtype=np.float32)
|
|
229
|
+
expected_vbar_east = np.array([[0.0], [0.0], [-0.09326097]], dtype=np.float32)
|
|
230
|
+
expected_vbar_north = np.array(
|
|
231
|
+
[[-0.01189703], [0.0], [0.02102064], [-0.09326097]], dtype=np.float32
|
|
232
|
+
)
|
|
233
|
+
expected_vbar_west = np.array([[0.0], [0.0], [-0.01189703]], dtype=np.float32)
|
|
234
|
+
|
|
235
|
+
# Check the values in the dataset
|
|
236
|
+
assert np.allclose(boundary_forcing.ds["zeta_south"].values, expected_zeta_south)
|
|
237
|
+
assert np.allclose(boundary_forcing.ds["zeta_east"].values, expected_zeta_east)
|
|
238
|
+
assert np.allclose(boundary_forcing.ds["zeta_north"].values, expected_zeta_north)
|
|
239
|
+
assert np.allclose(boundary_forcing.ds["zeta_west"].values, expected_zeta_west)
|
|
240
|
+
assert np.allclose(boundary_forcing.ds["temp_south"].values, expected_temp_south)
|
|
241
|
+
assert np.allclose(boundary_forcing.ds["temp_east"].values, expected_temp_east)
|
|
242
|
+
assert np.allclose(boundary_forcing.ds["temp_north"].values, expected_temp_north)
|
|
243
|
+
assert np.allclose(boundary_forcing.ds["temp_west"].values, expected_temp_west)
|
|
244
|
+
assert np.allclose(boundary_forcing.ds["u_south"].values, expected_u_south)
|
|
245
|
+
assert np.allclose(boundary_forcing.ds["u_east"].values, expected_u_east)
|
|
246
|
+
assert np.allclose(boundary_forcing.ds["u_north"].values, expected_u_north)
|
|
247
|
+
assert np.allclose(boundary_forcing.ds["u_west"].values, expected_u_west)
|
|
248
|
+
assert np.allclose(boundary_forcing.ds["v_south"].values, expected_v_south)
|
|
249
|
+
assert np.allclose(boundary_forcing.ds["v_east"].values, expected_v_east)
|
|
250
|
+
assert np.allclose(boundary_forcing.ds["v_north"].values, expected_v_north)
|
|
251
|
+
assert np.allclose(boundary_forcing.ds["v_west"].values, expected_v_west)
|
|
252
|
+
assert np.allclose(boundary_forcing.ds["ubar_south"].values, expected_ubar_south)
|
|
253
|
+
assert np.allclose(boundary_forcing.ds["ubar_east"].values, expected_ubar_east)
|
|
254
|
+
assert np.allclose(boundary_forcing.ds["ubar_north"].values, expected_ubar_north)
|
|
255
|
+
assert np.allclose(boundary_forcing.ds["ubar_west"].values, expected_ubar_west)
|
|
256
|
+
assert np.allclose(boundary_forcing.ds["vbar_south"].values, expected_vbar_south)
|
|
257
|
+
assert np.allclose(boundary_forcing.ds["vbar_east"].values, expected_vbar_east)
|
|
258
|
+
assert np.allclose(boundary_forcing.ds["vbar_north"].values, expected_vbar_north)
|
|
259
|
+
assert np.allclose(boundary_forcing.ds["vbar_west"].values, expected_vbar_west)
|
|
260
|
+
|
|
261
|
+
boundary_forcing.plot(varname="temp_south")
|
|
262
|
+
boundary_forcing.plot(varname="temp_east")
|
|
263
|
+
boundary_forcing.plot(varname="temp_north")
|
|
264
|
+
boundary_forcing.plot(varname="temp_west")
|
|
265
|
+
boundary_forcing.plot(varname="zeta_south")
|
|
266
|
+
boundary_forcing.plot(varname="zeta_east")
|
|
267
|
+
boundary_forcing.plot(varname="zeta_north")
|
|
268
|
+
boundary_forcing.plot(varname="zeta_west")
|
|
269
|
+
|
|
270
|
+
# Create a temporary file
|
|
271
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
272
|
+
filepath = tmpfile.name
|
|
273
|
+
|
|
274
|
+
boundary_forcing.save(filepath)
|
|
275
|
+
extended_filepath = filepath + ".20210629-29.nc"
|
|
276
|
+
|
|
277
|
+
try:
|
|
278
|
+
assert os.path.exists(extended_filepath)
|
|
279
|
+
finally:
|
|
280
|
+
os.remove(extended_filepath)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def test_roundtrip_yaml(boundary_forcing):
|
|
284
|
+
"""Test that creating a BoundaryForcing object, saving its parameters to yaml file, and re-opening yaml file creates the same object."""
|
|
285
|
+
|
|
286
|
+
# Create a temporary file
|
|
287
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
288
|
+
filepath = tmpfile.name
|
|
289
|
+
|
|
290
|
+
try:
|
|
291
|
+
boundary_forcing.to_yaml(filepath)
|
|
292
|
+
|
|
293
|
+
boundary_forcing_from_file = BoundaryForcing.from_yaml(filepath)
|
|
294
|
+
|
|
295
|
+
assert boundary_forcing == boundary_forcing_from_file
|
|
296
|
+
|
|
297
|
+
finally:
|
|
298
|
+
os.remove(filepath)
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def test_from_yaml_missing_boundary_forcing():
|
|
302
|
+
yaml_content = textwrap.dedent(
|
|
303
|
+
"""\
|
|
304
|
+
---
|
|
305
|
+
roms_tools_version: 0.0.0
|
|
306
|
+
---
|
|
307
|
+
Grid:
|
|
308
|
+
nx: 100
|
|
309
|
+
ny: 100
|
|
310
|
+
size_x: 1800
|
|
311
|
+
size_y: 2400
|
|
312
|
+
center_lon: -10
|
|
313
|
+
center_lat: 61
|
|
314
|
+
rot: -20
|
|
315
|
+
topography_source: ETOPO5
|
|
316
|
+
smooth_factor: 8
|
|
317
|
+
hmin: 5.0
|
|
318
|
+
rmax: 0.2
|
|
319
|
+
"""
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
|
|
323
|
+
yaml_filepath = tmp_file.name
|
|
324
|
+
tmp_file.write(yaml_content.encode())
|
|
325
|
+
|
|
326
|
+
try:
|
|
327
|
+
with pytest.raises(
|
|
328
|
+
ValueError, match="No BoundaryForcing configuration found in the YAML file."
|
|
329
|
+
):
|
|
330
|
+
BoundaryForcing.from_yaml(yaml_filepath)
|
|
331
|
+
finally:
|
|
332
|
+
os.remove(yaml_filepath)
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
import numpy as np
|
|
4
|
+
import xarray as xr
|
|
5
|
+
from roms_tools.setup.datasets import Dataset
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@pytest.fixture
|
|
11
|
+
def global_dataset():
|
|
12
|
+
lon = np.linspace(0, 359, 360)
|
|
13
|
+
lat = np.linspace(-90, 90, 180)
|
|
14
|
+
depth = np.linspace(0, 2000, 10)
|
|
15
|
+
time = [
|
|
16
|
+
np.datetime64("2022-01-01T00:00:00"),
|
|
17
|
+
np.datetime64("2022-02-01T00:00:00"),
|
|
18
|
+
np.datetime64("2022-03-01T00:00:00"),
|
|
19
|
+
np.datetime64("2022-04-01T00:00:00"),
|
|
20
|
+
]
|
|
21
|
+
data = np.random.rand(4, 10, 180, 360)
|
|
22
|
+
ds = xr.Dataset(
|
|
23
|
+
{"var": (["time", "depth", "latitude", "longitude"], data)},
|
|
24
|
+
coords={
|
|
25
|
+
"time": (["time"], time),
|
|
26
|
+
"depth": (["depth"], depth),
|
|
27
|
+
"latitude": (["latitude"], lat),
|
|
28
|
+
"longitude": (["longitude"], lon),
|
|
29
|
+
},
|
|
30
|
+
)
|
|
31
|
+
return ds
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@pytest.fixture
|
|
35
|
+
def global_dataset_with_noon_times():
|
|
36
|
+
lon = np.linspace(0, 359, 360)
|
|
37
|
+
lat = np.linspace(-90, 90, 180)
|
|
38
|
+
time = [
|
|
39
|
+
np.datetime64("2022-01-01T12:00:00"),
|
|
40
|
+
np.datetime64("2022-02-01T12:00:00"),
|
|
41
|
+
np.datetime64("2022-03-01T12:00:00"),
|
|
42
|
+
np.datetime64("2022-04-01T12:00:00"),
|
|
43
|
+
]
|
|
44
|
+
data = np.random.rand(4, 180, 360)
|
|
45
|
+
ds = xr.Dataset(
|
|
46
|
+
{"var": (["time", "latitude", "longitude"], data)},
|
|
47
|
+
coords={
|
|
48
|
+
"time": (["time"], time),
|
|
49
|
+
"latitude": (["latitude"], lat),
|
|
50
|
+
"longitude": (["longitude"], lon),
|
|
51
|
+
},
|
|
52
|
+
)
|
|
53
|
+
return ds
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@pytest.fixture
|
|
57
|
+
def global_dataset_with_multiple_times_per_day():
|
|
58
|
+
lon = np.linspace(0, 359, 360)
|
|
59
|
+
lat = np.linspace(-90, 90, 180)
|
|
60
|
+
time = [
|
|
61
|
+
np.datetime64("2022-01-01T00:00:00"),
|
|
62
|
+
np.datetime64("2022-01-01T12:00:00"),
|
|
63
|
+
np.datetime64("2022-02-01T00:00:00"),
|
|
64
|
+
np.datetime64("2022-02-01T12:00:00"),
|
|
65
|
+
np.datetime64("2022-03-01T00:00:00"),
|
|
66
|
+
np.datetime64("2022-03-01T12:00:00"),
|
|
67
|
+
np.datetime64("2022-04-01T00:00:00"),
|
|
68
|
+
np.datetime64("2022-04-01T12:00:00"),
|
|
69
|
+
]
|
|
70
|
+
data = np.random.rand(8, 180, 360)
|
|
71
|
+
ds = xr.Dataset(
|
|
72
|
+
{"var": (["time", "latitude", "longitude"], data)},
|
|
73
|
+
coords={
|
|
74
|
+
"time": (["time"], time),
|
|
75
|
+
"latitude": (["latitude"], lat),
|
|
76
|
+
"longitude": (["longitude"], lon),
|
|
77
|
+
},
|
|
78
|
+
)
|
|
79
|
+
return ds
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@pytest.fixture
|
|
83
|
+
def non_global_dataset():
|
|
84
|
+
lon = np.linspace(0, 180, 181)
|
|
85
|
+
lat = np.linspace(-90, 90, 180)
|
|
86
|
+
data = np.random.rand(180, 181)
|
|
87
|
+
ds = xr.Dataset(
|
|
88
|
+
{"var": (["latitude", "longitude"], data)},
|
|
89
|
+
coords={"latitude": (["latitude"], lat), "longitude": (["longitude"], lon)},
|
|
90
|
+
)
|
|
91
|
+
return ds
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@pytest.mark.parametrize(
|
|
95
|
+
"data_fixture, expected_time_values",
|
|
96
|
+
[
|
|
97
|
+
("global_dataset", [np.datetime64("2022-02-01T00:00:00")]),
|
|
98
|
+
("global_dataset_with_noon_times", [np.datetime64("2022-02-01T12:00:00")]),
|
|
99
|
+
(
|
|
100
|
+
"global_dataset_with_multiple_times_per_day",
|
|
101
|
+
[
|
|
102
|
+
np.datetime64("2022-02-01T00:00:00"),
|
|
103
|
+
np.datetime64("2022-02-01T12:00:00"),
|
|
104
|
+
],
|
|
105
|
+
),
|
|
106
|
+
],
|
|
107
|
+
)
|
|
108
|
+
def test_select_times(data_fixture, expected_time_values, request):
|
|
109
|
+
"""
|
|
110
|
+
Test selecting times with different datasets.
|
|
111
|
+
"""
|
|
112
|
+
start_time = datetime(2022, 2, 1)
|
|
113
|
+
end_time = datetime(2022, 3, 1)
|
|
114
|
+
|
|
115
|
+
# Get the fixture dynamically based on the parameter
|
|
116
|
+
dataset = request.getfixturevalue(data_fixture)
|
|
117
|
+
|
|
118
|
+
# Create a temporary file
|
|
119
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
120
|
+
filepath = tmpfile.name
|
|
121
|
+
dataset.to_netcdf(filepath)
|
|
122
|
+
try:
|
|
123
|
+
# Instantiate Dataset object using the temporary file
|
|
124
|
+
dataset = Dataset(
|
|
125
|
+
filename=filepath,
|
|
126
|
+
var_names=["var"],
|
|
127
|
+
start_time=start_time,
|
|
128
|
+
end_time=end_time,
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
assert dataset.ds is not None
|
|
132
|
+
assert len(dataset.ds.time) == len(expected_time_values)
|
|
133
|
+
for expected_time in expected_time_values:
|
|
134
|
+
assert expected_time in dataset.ds.time.values
|
|
135
|
+
finally:
|
|
136
|
+
os.remove(filepath)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
@pytest.mark.parametrize(
|
|
140
|
+
"data_fixture, expected_time_values",
|
|
141
|
+
[
|
|
142
|
+
("global_dataset", [np.datetime64("2022-02-01T00:00:00")]),
|
|
143
|
+
("global_dataset_with_noon_times", [np.datetime64("2022-02-01T12:00:00")]),
|
|
144
|
+
],
|
|
145
|
+
)
|
|
146
|
+
def test_select_times_no_end_time(data_fixture, expected_time_values, request):
|
|
147
|
+
"""
|
|
148
|
+
Test selecting times with only start_time specified.
|
|
149
|
+
"""
|
|
150
|
+
start_time = datetime(2022, 2, 1)
|
|
151
|
+
|
|
152
|
+
# Get the fixture dynamically based on the parameter
|
|
153
|
+
dataset = request.getfixturevalue(data_fixture)
|
|
154
|
+
|
|
155
|
+
# Create a temporary file
|
|
156
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
157
|
+
filepath = tmpfile.name
|
|
158
|
+
dataset.to_netcdf(filepath)
|
|
159
|
+
try:
|
|
160
|
+
# Instantiate Dataset object using the temporary file
|
|
161
|
+
dataset = Dataset(filename=filepath, var_names=["var"], start_time=start_time)
|
|
162
|
+
|
|
163
|
+
assert dataset.ds is not None
|
|
164
|
+
assert len(dataset.ds.time) == len(expected_time_values)
|
|
165
|
+
for expected_time in expected_time_values:
|
|
166
|
+
assert expected_time in dataset.ds.time.values
|
|
167
|
+
finally:
|
|
168
|
+
os.remove(filepath)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def test_multiple_matching_times(global_dataset_with_multiple_times_per_day):
|
|
172
|
+
"""
|
|
173
|
+
Test handling when multiple matching times are found when end_time is not specified.
|
|
174
|
+
"""
|
|
175
|
+
start_time = datetime(2022, 1, 1)
|
|
176
|
+
|
|
177
|
+
# Create a temporary file
|
|
178
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
179
|
+
filepath = tmpfile.name
|
|
180
|
+
global_dataset_with_multiple_times_per_day.to_netcdf(filepath)
|
|
181
|
+
try:
|
|
182
|
+
# Instantiate Dataset object using the temporary file
|
|
183
|
+
with pytest.raises(
|
|
184
|
+
ValueError,
|
|
185
|
+
match="There must be exactly one time matching the start_time. Found 2 matching times.",
|
|
186
|
+
):
|
|
187
|
+
Dataset(filename=filepath, var_names=["var"], start_time=start_time)
|
|
188
|
+
finally:
|
|
189
|
+
os.remove(filepath)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def test_no_matching_times(global_dataset):
|
|
193
|
+
"""
|
|
194
|
+
Test handling when no matching times are found.
|
|
195
|
+
"""
|
|
196
|
+
start_time = datetime(2021, 1, 1)
|
|
197
|
+
end_time = datetime(2021, 2, 1)
|
|
198
|
+
|
|
199
|
+
# Create a temporary file
|
|
200
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
201
|
+
filepath = tmpfile.name
|
|
202
|
+
global_dataset.to_netcdf(filepath)
|
|
203
|
+
try:
|
|
204
|
+
# Instantiate Dataset object using the temporary file
|
|
205
|
+
with pytest.raises(ValueError, match="No matching times found."):
|
|
206
|
+
Dataset(
|
|
207
|
+
filename=filepath,
|
|
208
|
+
var_names=["var"],
|
|
209
|
+
start_time=start_time,
|
|
210
|
+
end_time=end_time,
|
|
211
|
+
)
|
|
212
|
+
finally:
|
|
213
|
+
os.remove(filepath)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def test_reverse_latitude_choose_subdomain_negative_depth(global_dataset):
|
|
217
|
+
"""
|
|
218
|
+
Test reversing latitude when it is not ascending, the choose_subdomain method, and the convert_to_negative_depth method of the Dataset class.
|
|
219
|
+
"""
|
|
220
|
+
start_time = datetime(2022, 1, 1)
|
|
221
|
+
|
|
222
|
+
# Create a temporary file
|
|
223
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
224
|
+
filepath = tmpfile.name
|
|
225
|
+
global_dataset["latitude"] = global_dataset["latitude"][::-1]
|
|
226
|
+
global_dataset.to_netcdf(filepath)
|
|
227
|
+
try:
|
|
228
|
+
# Instantiate Dataset object using the temporary file
|
|
229
|
+
dataset = Dataset(
|
|
230
|
+
filename=filepath,
|
|
231
|
+
var_names=["var"],
|
|
232
|
+
dim_names={
|
|
233
|
+
"latitude": "latitude",
|
|
234
|
+
"longitude": "longitude",
|
|
235
|
+
"time": "time",
|
|
236
|
+
"depth": "depth",
|
|
237
|
+
},
|
|
238
|
+
start_time=start_time,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
assert np.all(np.diff(dataset.ds["latitude"]) > 0)
|
|
242
|
+
|
|
243
|
+
# test choosing subdomain for domain that straddles the dateline
|
|
244
|
+
dataset.choose_subdomain(
|
|
245
|
+
latitude_range=(-10, 10), longitude_range=(-10, 10), margin=1, straddle=True
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
assert -11 <= dataset.ds["latitude"].min() <= 11
|
|
249
|
+
assert -11 <= dataset.ds["latitude"].max() <= 11
|
|
250
|
+
assert -11 <= dataset.ds["longitude"].min() <= 11
|
|
251
|
+
assert -11 <= dataset.ds["longitude"].max() <= 11
|
|
252
|
+
|
|
253
|
+
# test choosing subdomain for domain that does not straddle the dateline
|
|
254
|
+
dataset = Dataset(
|
|
255
|
+
filename=filepath,
|
|
256
|
+
var_names=["var"],
|
|
257
|
+
dim_names={
|
|
258
|
+
"latitude": "latitude",
|
|
259
|
+
"longitude": "longitude",
|
|
260
|
+
"time": "time",
|
|
261
|
+
"depth": "depth",
|
|
262
|
+
},
|
|
263
|
+
start_time=start_time,
|
|
264
|
+
)
|
|
265
|
+
dataset.choose_subdomain(
|
|
266
|
+
latitude_range=(-10, 10), longitude_range=(10, 20), margin=1, straddle=False
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
assert -11 <= dataset.ds["latitude"].min() <= 11
|
|
270
|
+
assert -11 <= dataset.ds["latitude"].max() <= 11
|
|
271
|
+
assert 9 <= dataset.ds["longitude"].min() <= 21
|
|
272
|
+
assert 9 <= dataset.ds["longitude"].max() <= 21
|
|
273
|
+
|
|
274
|
+
dataset.convert_to_negative_depth()
|
|
275
|
+
|
|
276
|
+
assert (dataset.ds["depth"] <= 0).all()
|
|
277
|
+
|
|
278
|
+
finally:
|
|
279
|
+
os.remove(filepath)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def test_check_if_global_with_global_dataset(global_dataset):
|
|
283
|
+
|
|
284
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
285
|
+
filepath = tmpfile.name
|
|
286
|
+
global_dataset.to_netcdf(filepath)
|
|
287
|
+
try:
|
|
288
|
+
dataset = Dataset(filename=filepath, var_names=["var"])
|
|
289
|
+
is_global = dataset.check_if_global(dataset.ds)
|
|
290
|
+
assert is_global
|
|
291
|
+
finally:
|
|
292
|
+
os.remove(filepath)
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def test_check_if_global_with_non_global_dataset(non_global_dataset):
|
|
296
|
+
|
|
297
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
|
298
|
+
filepath = tmpfile.name
|
|
299
|
+
non_global_dataset.to_netcdf(filepath)
|
|
300
|
+
try:
|
|
301
|
+
dataset = Dataset(filename=filepath, var_names=["var"])
|
|
302
|
+
is_global = dataset.check_if_global(dataset.ds)
|
|
303
|
+
|
|
304
|
+
assert not is_global
|
|
305
|
+
finally:
|
|
306
|
+
os.remove(filepath)
|