anemoi-datasets 0.3.10__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anemoi/datasets/_version.py +2 -2
- anemoi/datasets/commands/inspect.py +6 -6
- anemoi/datasets/commands/scan.py +4 -4
- anemoi/datasets/compute/recentre.py +14 -9
- anemoi/datasets/create/config.py +1 -1
- anemoi/datasets/create/functions/__init__.py +3 -3
- anemoi/datasets/create/functions/filters/empty.py +4 -4
- anemoi/datasets/create/functions/filters/rename.py +6 -6
- anemoi/datasets/create/functions/filters/rotate_winds.py +6 -59
- anemoi/datasets/create/functions/filters/unrotate_winds.py +14 -64
- anemoi/datasets/create/functions/sources/accumulations.py +27 -15
- anemoi/datasets/create/functions/sources/constants.py +8 -4
- anemoi/datasets/create/functions/sources/empty.py +2 -2
- anemoi/datasets/create/functions/sources/forcings.py +3 -3
- anemoi/datasets/create/functions/sources/grib.py +4 -4
- anemoi/datasets/create/functions/sources/hindcasts.py +24 -11
- anemoi/datasets/create/functions/sources/mars.py +5 -5
- anemoi/datasets/create/functions/sources/netcdf.py +4 -4
- anemoi/datasets/create/functions/sources/source.py +3 -3
- anemoi/datasets/create/functions/sources/tendencies.py +7 -7
- anemoi/datasets/create/input.py +19 -19
- anemoi/datasets/create/loaders.py +35 -6
- anemoi/datasets/create/statistics/__init__.py +2 -1
- anemoi/datasets/create/utils.py +6 -5
- anemoi/datasets/data/dataset.py +4 -4
- anemoi/datasets/data/stores.py +13 -5
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/METADATA +8 -7
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/RECORD +32 -32
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/LICENSE +0 -0
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/WHEEL +0 -0
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/entry_points.txt +0 -0
- {anemoi_datasets-0.3.10.dist-info → anemoi_datasets-0.4.0.dist-info}/top_level.txt +0 -0
anemoi/datasets/_version.py
CHANGED
|
@@ -382,7 +382,7 @@ class NoVersion(Version):
|
|
|
382
382
|
@property
|
|
383
383
|
def last_date(self):
|
|
384
384
|
monthly = find(self.metadata, "monthly")
|
|
385
|
-
time = max([int(t) for t in find(self.metadata["
|
|
385
|
+
time = max([int(t) for t in find(self.metadata["earthkit-data"], "time")])
|
|
386
386
|
assert isinstance(time, int), (time, type(time))
|
|
387
387
|
if time > 100:
|
|
388
388
|
time = time // 100
|
|
@@ -390,7 +390,7 @@ class NoVersion(Version):
|
|
|
390
390
|
|
|
391
391
|
@property
|
|
392
392
|
def frequency(self):
|
|
393
|
-
time = find(self.metadata["
|
|
393
|
+
time = find(self.metadata["earthkit-data"], "time")
|
|
394
394
|
return 24 // len(time)
|
|
395
395
|
|
|
396
396
|
@property
|
|
@@ -444,9 +444,9 @@ class Version0_4(Version):
|
|
|
444
444
|
z = self.zarr
|
|
445
445
|
|
|
446
446
|
# for backward compatibility
|
|
447
|
-
if "
|
|
448
|
-
|
|
449
|
-
print(f"
|
|
447
|
+
if "earthkit-data" in z.attrs:
|
|
448
|
+
ekd_version = z.attrs["earthkit-data"].get("versions", {}).get("earthkit-data", "unkwown")
|
|
449
|
+
print(f"earthkit-data version used to create this zarr: {ekd_version}. Not supported.")
|
|
450
450
|
return
|
|
451
451
|
|
|
452
452
|
version = z.attrs.get("version")
|
|
@@ -455,7 +455,7 @@ class Version0_4(Version):
|
|
|
455
455
|
print(" Cannot find metadata information about versions.")
|
|
456
456
|
else:
|
|
457
457
|
print(f"Zarr format (version {version})", end="")
|
|
458
|
-
print(f" created by
|
|
458
|
+
print(f" created by earthkit-data={versions.pop('earthkit-data')}", end="")
|
|
459
459
|
timestamp = z.attrs.get("creation_timestamp")
|
|
460
460
|
timestamp = datetime.datetime.fromisoformat(timestamp)
|
|
461
461
|
print(f" on {timestamp}", end="")
|
anemoi/datasets/commands/scan.py
CHANGED
|
@@ -3,7 +3,7 @@ import os
|
|
|
3
3
|
import sys
|
|
4
4
|
from collections import defaultdict
|
|
5
5
|
|
|
6
|
-
import
|
|
6
|
+
import earthkit.data as ekd
|
|
7
7
|
import tqdm
|
|
8
8
|
import yaml
|
|
9
9
|
|
|
@@ -50,9 +50,9 @@ class Scan(Command):
|
|
|
50
50
|
for path in tqdm.tqdm(paths, leave=False):
|
|
51
51
|
if not match(path):
|
|
52
52
|
continue
|
|
53
|
-
for field in tqdm.tqdm(
|
|
54
|
-
dates.add(field.
|
|
55
|
-
mars = field.
|
|
53
|
+
for field in tqdm.tqdm(ekd.from_source("file", path), leave=False):
|
|
54
|
+
dates.add(field.datetime()["valid_time"])
|
|
55
|
+
mars = field.metadata(namespace="mars")
|
|
56
56
|
keys = tuple(mars.get(k) for k in KEYS)
|
|
57
57
|
gribs[keys].add(path)
|
|
58
58
|
for k, v in mars.items():
|
|
@@ -10,10 +10,10 @@
|
|
|
10
10
|
import logging
|
|
11
11
|
|
|
12
12
|
import numpy as np
|
|
13
|
-
from
|
|
14
|
-
from
|
|
13
|
+
from earthkit.data.core.temporary import temp_file
|
|
14
|
+
from earthkit.data.readers.grib.output import new_grib_output
|
|
15
15
|
|
|
16
|
-
from anemoi.datasets.create.functions import
|
|
16
|
+
from anemoi.datasets.create.functions import assert_is_fieldlist
|
|
17
17
|
|
|
18
18
|
LOG = logging.getLogger(__name__)
|
|
19
19
|
|
|
@@ -96,7 +96,7 @@ def recentre(
|
|
|
96
96
|
|
|
97
97
|
for i, centre_field in enumerate(centre):
|
|
98
98
|
param = centre_field.metadata("param")
|
|
99
|
-
centre_field_as_mars = centre_field.
|
|
99
|
+
centre_field_as_mars = centre_field.metadata(namespace="mars")
|
|
100
100
|
|
|
101
101
|
# load the centre field
|
|
102
102
|
centre_np = centre_field.to_numpy()
|
|
@@ -106,8 +106,13 @@ def recentre(
|
|
|
106
106
|
|
|
107
107
|
for j in range(n_numbers):
|
|
108
108
|
ensemble_field = members[i * n_numbers + j]
|
|
109
|
-
ensemble_field_as_mars = ensemble_field.
|
|
110
|
-
check_compatible(
|
|
109
|
+
ensemble_field_as_mars = ensemble_field.metadata(namespace="mars")
|
|
110
|
+
check_compatible(
|
|
111
|
+
centre_field,
|
|
112
|
+
ensemble_field,
|
|
113
|
+
centre_field_as_mars,
|
|
114
|
+
ensemble_field_as_mars,
|
|
115
|
+
)
|
|
111
116
|
members_np[j] = ensemble_field.to_numpy()
|
|
112
117
|
|
|
113
118
|
ensemble_field_as_mars = tuple(sorted(ensemble_field_as_mars.items()))
|
|
@@ -149,10 +154,10 @@ def recentre(
|
|
|
149
154
|
if output is not None:
|
|
150
155
|
return path
|
|
151
156
|
|
|
152
|
-
from
|
|
157
|
+
from earthkit.data import from_source
|
|
153
158
|
|
|
154
|
-
ds =
|
|
155
|
-
|
|
159
|
+
ds = from_source("file", path)
|
|
160
|
+
assert_is_fieldlist(ds)
|
|
156
161
|
# save a reference to the tmp file so it is deleted
|
|
157
162
|
# only when the dataset is not used anymore
|
|
158
163
|
ds._tmp = tmp
|
anemoi/datasets/create/config.py
CHANGED
|
@@ -13,10 +13,10 @@ import importlib
|
|
|
13
13
|
import entrypoints
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
def
|
|
17
|
-
from
|
|
16
|
+
def assert_is_fieldlist(obj):
|
|
17
|
+
from earthkit.data.indexing.fieldlist import FieldList
|
|
18
18
|
|
|
19
|
-
assert isinstance(obj,
|
|
19
|
+
assert isinstance(obj, FieldList), type(obj)
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
def import_function(name, kind):
|
|
@@ -7,10 +7,10 @@
|
|
|
7
7
|
# nor does it submit to any jurisdiction.
|
|
8
8
|
#
|
|
9
9
|
|
|
10
|
-
import
|
|
10
|
+
import earthkit.data as ekd
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def execute(context, input, **kwargs):
|
|
14
|
-
#
|
|
15
|
-
# So we can reference an earlier step in a function like '
|
|
16
|
-
return
|
|
14
|
+
# Useful to create a pipeline that returns an empty result
|
|
15
|
+
# So we can reference an earlier step in a function like 'constants'
|
|
16
|
+
return ekd.from_source("empty")
|
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
|
|
10
10
|
import re
|
|
11
11
|
|
|
12
|
-
from
|
|
12
|
+
from earthkit.data.indexing.fieldlist import FieldArray
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class RenamedFieldMapping:
|
|
@@ -26,8 +26,8 @@ class RenamedFieldMapping:
|
|
|
26
26
|
self.what = what
|
|
27
27
|
self.renaming = renaming
|
|
28
28
|
|
|
29
|
-
def metadata(self, key):
|
|
30
|
-
value = self.field.metadata(key)
|
|
29
|
+
def metadata(self, key, **kwargs):
|
|
30
|
+
value = self.field.metadata(key, **kwargs)
|
|
31
31
|
if key == self.what:
|
|
32
32
|
return self.renaming.get(value, value)
|
|
33
33
|
return value
|
|
@@ -48,10 +48,10 @@ class RenamedFieldFormat:
|
|
|
48
48
|
self.format = format
|
|
49
49
|
self.bits = re.findall(r"{(\w+)}", format)
|
|
50
50
|
|
|
51
|
-
def metadata(self, key):
|
|
52
|
-
value = self.field.metadata(key)
|
|
51
|
+
def metadata(self, key, **kwargs):
|
|
52
|
+
value = self.field.metadata(key, **kwargs)
|
|
53
53
|
if "{" + key + "}" in self.format:
|
|
54
|
-
bits = {b: self.field.metadata(b) for b in self.bits}
|
|
54
|
+
bits = {b: self.field.metadata(b, **kwargs) for b in self.bits}
|
|
55
55
|
return self.format.format(**bits)
|
|
56
56
|
return value
|
|
57
57
|
|
|
@@ -9,61 +9,8 @@
|
|
|
9
9
|
|
|
10
10
|
from collections import defaultdict
|
|
11
11
|
|
|
12
|
-
from
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def rotate_winds(lats, lons, x_wind, y_wind, source_projection, target_projection):
|
|
16
|
-
"""Code provided by MetNO"""
|
|
17
|
-
import numpy as np
|
|
18
|
-
import pyproj
|
|
19
|
-
|
|
20
|
-
if source_projection == target_projection:
|
|
21
|
-
return x_wind, x_wind
|
|
22
|
-
|
|
23
|
-
source_projection = pyproj.Proj(source_projection)
|
|
24
|
-
target_projection = pyproj.Proj(target_projection)
|
|
25
|
-
|
|
26
|
-
transformer = pyproj.transformer.Transformer.from_proj(source_projection, target_projection)
|
|
27
|
-
|
|
28
|
-
# To compute the new vector components:
|
|
29
|
-
# 1) perturb each position in the direction of the winds
|
|
30
|
-
# 2) convert the perturbed positions into the new coordinate system
|
|
31
|
-
# 3) measure the new x/y components.
|
|
32
|
-
#
|
|
33
|
-
# A complication occurs when using the longlat "projections", since this is not a cartesian grid
|
|
34
|
-
# (i.e. distances in each direction is not consistent), we need to deal with the fact that the
|
|
35
|
-
# width of a longitude varies with latitude
|
|
36
|
-
orig_speed = np.sqrt(x_wind**2 + y_wind**2)
|
|
37
|
-
|
|
38
|
-
x0, y0 = source_projection(lons, lats)
|
|
39
|
-
|
|
40
|
-
if source_projection.name != "longlat":
|
|
41
|
-
x1 = x0 + x_wind
|
|
42
|
-
y1 = y0 + y_wind
|
|
43
|
-
else:
|
|
44
|
-
# Reduce the perturbation, since x_wind and y_wind are in meters, which would create
|
|
45
|
-
# large perturbations in lat, lon. Also, deal with the fact that the width of longitude
|
|
46
|
-
# varies with latitude.
|
|
47
|
-
factor = 3600000.0
|
|
48
|
-
x1 = x0 + x_wind / factor / np.cos(np.deg2rad(lats))
|
|
49
|
-
y1 = y0 + y_wind / factor
|
|
50
|
-
|
|
51
|
-
X0, Y0 = transformer.transform(x0, y0)
|
|
52
|
-
X1, Y1 = transformer.transform(x1, y1)
|
|
53
|
-
|
|
54
|
-
new_x_wind = X1 - X0
|
|
55
|
-
new_y_wind = Y1 - Y0
|
|
56
|
-
if target_projection.name == "longlat":
|
|
57
|
-
new_x_wind *= np.cos(np.deg2rad(lats))
|
|
58
|
-
|
|
59
|
-
if target_projection.name == "longlat" or source_projection.name == "longlat":
|
|
60
|
-
# Ensure the wind speed is not changed (which might not the case since the units in longlat
|
|
61
|
-
# is degrees, not meters)
|
|
62
|
-
curr_speed = np.sqrt(new_x_wind**2 + new_y_wind**2)
|
|
63
|
-
new_x_wind *= orig_speed / curr_speed
|
|
64
|
-
new_y_wind *= orig_speed / curr_speed
|
|
65
|
-
|
|
66
|
-
return new_x_wind, new_y_wind
|
|
12
|
+
from earthkit.data.indexing.fieldlist import FieldArray
|
|
13
|
+
from earthkit.geo.rotate import rotate_vector
|
|
67
14
|
|
|
68
15
|
|
|
69
16
|
class NewDataField:
|
|
@@ -94,7 +41,7 @@ def execute(
|
|
|
94
41
|
wind_pairs = defaultdict(dict)
|
|
95
42
|
|
|
96
43
|
for f in input:
|
|
97
|
-
key = f.
|
|
44
|
+
key = f.metadata(namespace="mars")
|
|
98
45
|
param = key.pop("param")
|
|
99
46
|
|
|
100
47
|
if param not in wind_params:
|
|
@@ -118,11 +65,11 @@ def execute(
|
|
|
118
65
|
assert x.grid_mapping == y.grid_mapping
|
|
119
66
|
|
|
120
67
|
lats, lons = x.grid_points()
|
|
121
|
-
x_new, y_new =
|
|
68
|
+
x_new, y_new = rotate_vector(
|
|
122
69
|
lats,
|
|
123
70
|
lons,
|
|
124
|
-
x.to_numpy(
|
|
125
|
-
y.to_numpy(
|
|
71
|
+
x.to_numpy(flatten=True),
|
|
72
|
+
y.to_numpy(flatten=True),
|
|
126
73
|
(source_projection if source_projection is not None else CRS.from_cf(x.grid_mapping)),
|
|
127
74
|
target_projection,
|
|
128
75
|
)
|
|
@@ -9,60 +9,9 @@
|
|
|
9
9
|
|
|
10
10
|
from collections import defaultdict
|
|
11
11
|
|
|
12
|
-
import numpy as np
|
|
13
|
-
from
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def normalise(x):
|
|
17
|
-
return max(min(x, 1.0), -1.0)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def normalise_longitude(lon, minimum):
|
|
21
|
-
while lon < minimum:
|
|
22
|
-
lon += 360
|
|
23
|
-
|
|
24
|
-
while lon >= minimum + 360:
|
|
25
|
-
lon -= 360
|
|
26
|
-
|
|
27
|
-
return lon
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def rotate_winds(
|
|
31
|
-
lats,
|
|
32
|
-
lons,
|
|
33
|
-
raw_lats,
|
|
34
|
-
raw_lons,
|
|
35
|
-
x_wind,
|
|
36
|
-
y_wind,
|
|
37
|
-
south_pole_latitude,
|
|
38
|
-
south_pole_longitude,
|
|
39
|
-
south_pole_rotation_angle=0,
|
|
40
|
-
):
|
|
41
|
-
# Code from MIR
|
|
42
|
-
assert south_pole_rotation_angle == 0
|
|
43
|
-
C = np.deg2rad(90 - south_pole_latitude)
|
|
44
|
-
cos_C = np.cos(C)
|
|
45
|
-
sin_C = np.sin(C)
|
|
46
|
-
|
|
47
|
-
new_x = np.zeros_like(x_wind)
|
|
48
|
-
new_y = np.zeros_like(y_wind)
|
|
49
|
-
|
|
50
|
-
for i, (vx, vy, lat, lon, raw_lat, raw_lon) in enumerate(zip(x_wind, y_wind, lats, lons, raw_lats, raw_lons)):
|
|
51
|
-
lonRotated = south_pole_longitude - lon
|
|
52
|
-
lon_rotated = normalise_longitude(lonRotated, -180)
|
|
53
|
-
lon_unrotated = raw_lon
|
|
54
|
-
|
|
55
|
-
a = np.deg2rad(lon_rotated)
|
|
56
|
-
b = np.deg2rad(lon_unrotated)
|
|
57
|
-
q = 1 if (sin_C * lon_rotated < 0.0) else -1.0 # correct quadrant
|
|
58
|
-
|
|
59
|
-
cos_c = normalise(np.cos(a) * np.cos(b) + np.sin(a) * np.sin(b) * cos_C)
|
|
60
|
-
sin_c = q * np.sqrt(1.0 - cos_c * cos_c)
|
|
61
|
-
|
|
62
|
-
new_x[i] = cos_c * vx + sin_c * vy
|
|
63
|
-
new_y[i] = -sin_c * vx + cos_c * vy
|
|
64
|
-
|
|
65
|
-
return new_x, new_y
|
|
12
|
+
# import numpy as np
|
|
13
|
+
from earthkit.data.indexing.fieldlist import FieldArray
|
|
14
|
+
from earthkit.geo.rotate import unrotate_vector
|
|
66
15
|
|
|
67
16
|
|
|
68
17
|
class NewDataField:
|
|
@@ -85,7 +34,7 @@ def execute(context, input, u, v):
|
|
|
85
34
|
wind_pairs = defaultdict(dict)
|
|
86
35
|
|
|
87
36
|
for f in input:
|
|
88
|
-
key = f.
|
|
37
|
+
key = f.metadata(namespace="mars")
|
|
89
38
|
param = key.pop("param")
|
|
90
39
|
|
|
91
40
|
if param not in wind_params:
|
|
@@ -107,18 +56,19 @@ def execute(context, input, u, v):
|
|
|
107
56
|
y = pairs[v]
|
|
108
57
|
|
|
109
58
|
lats, lons = x.grid_points()
|
|
110
|
-
raw_lats, raw_longs = x.
|
|
59
|
+
raw_lats, raw_longs = x.grid_points_unrotated()
|
|
111
60
|
|
|
112
61
|
assert x.rotation == y.rotation
|
|
113
62
|
|
|
114
|
-
u_new, v_new =
|
|
63
|
+
u_new, v_new = unrotate_vector(
|
|
115
64
|
lats,
|
|
116
65
|
lons,
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
x.
|
|
120
|
-
|
|
121
|
-
|
|
66
|
+
x.to_numpy(flatten=True),
|
|
67
|
+
y.to_numpy(flatten=True),
|
|
68
|
+
*x.rotation[:2],
|
|
69
|
+
south_pole_rotation_angle=x.rotation[2],
|
|
70
|
+
lat_unrotated=raw_lats,
|
|
71
|
+
lon_unrotated=raw_longs,
|
|
122
72
|
)
|
|
123
73
|
|
|
124
74
|
result.append(NewDataField(x, u_new))
|
|
@@ -128,9 +78,9 @@ def execute(context, input, u, v):
|
|
|
128
78
|
|
|
129
79
|
|
|
130
80
|
if __name__ == "__main__":
|
|
131
|
-
from
|
|
81
|
+
from earthkit.data import from_source
|
|
132
82
|
|
|
133
|
-
source =
|
|
83
|
+
source = from_source(
|
|
134
84
|
"mars",
|
|
135
85
|
date=-1,
|
|
136
86
|
param="10u/10v",
|
|
@@ -11,11 +11,11 @@ import logging
|
|
|
11
11
|
import warnings
|
|
12
12
|
from copy import deepcopy
|
|
13
13
|
|
|
14
|
-
import
|
|
14
|
+
import earthkit.data as ekd
|
|
15
15
|
import numpy as np
|
|
16
|
-
from
|
|
17
|
-
from
|
|
18
|
-
from
|
|
16
|
+
from earthkit.data.core.temporary import temp_file
|
|
17
|
+
from earthkit.data.readers.grib.output import new_grib_output
|
|
18
|
+
from earthkit.data.utils.availability import Availability
|
|
19
19
|
|
|
20
20
|
from anemoi.datasets.create.utils import to_datetime_list
|
|
21
21
|
|
|
@@ -26,7 +26,7 @@ LOG = logging.getLogger(__name__)
|
|
|
26
26
|
|
|
27
27
|
def member(field):
|
|
28
28
|
# Bug in eccodes has number=0 randomly
|
|
29
|
-
number = field.metadata("number")
|
|
29
|
+
number = field.metadata("number", default=0)
|
|
30
30
|
if number is None:
|
|
31
31
|
number = 0
|
|
32
32
|
return number
|
|
@@ -54,16 +54,25 @@ class Accumulation:
|
|
|
54
54
|
|
|
55
55
|
def check(self, field):
|
|
56
56
|
if self._check is None:
|
|
57
|
-
self._check = field.
|
|
57
|
+
self._check = field.metadata(namespace="mars")
|
|
58
58
|
|
|
59
|
-
assert self.param == field.metadata("param"), (
|
|
60
|
-
|
|
61
|
-
|
|
59
|
+
assert self.param == field.metadata("param"), (
|
|
60
|
+
self.param,
|
|
61
|
+
field.metadata("param"),
|
|
62
|
+
)
|
|
63
|
+
assert self.date == field.metadata("date"), (
|
|
64
|
+
self.date,
|
|
65
|
+
field.metadata("date"),
|
|
66
|
+
)
|
|
67
|
+
assert self.time == field.metadata("time"), (
|
|
68
|
+
self.time,
|
|
69
|
+
field.metadata("time"),
|
|
70
|
+
)
|
|
62
71
|
assert self.number == member(field), (self.number, member(field))
|
|
63
72
|
|
|
64
73
|
return
|
|
65
74
|
|
|
66
|
-
mars = field.
|
|
75
|
+
mars = field.metadata(namespace="mars")
|
|
67
76
|
keys1 = sorted(self._check.keys())
|
|
68
77
|
keys2 = sorted(mars.keys())
|
|
69
78
|
|
|
@@ -196,7 +205,11 @@ class AccumulationFromLastStep(Accumulation):
|
|
|
196
205
|
|
|
197
206
|
def compute(self, values, startStep, endStep):
|
|
198
207
|
|
|
199
|
-
assert endStep - startStep == self.frequency, (
|
|
208
|
+
assert endStep - startStep == self.frequency, (
|
|
209
|
+
startStep,
|
|
210
|
+
endStep,
|
|
211
|
+
self.frequency,
|
|
212
|
+
)
|
|
200
213
|
|
|
201
214
|
if self.startStep is None:
|
|
202
215
|
self.startStep = startStep
|
|
@@ -307,14 +320,13 @@ def compute_accumulations(
|
|
|
307
320
|
)
|
|
308
321
|
|
|
309
322
|
compressed = Availability(requests)
|
|
310
|
-
ds =
|
|
323
|
+
ds = ekd.from_source("empty")
|
|
311
324
|
for r in compressed.iterate():
|
|
312
325
|
request.update(r)
|
|
313
326
|
if context.use_grib_paramid and "param" in request:
|
|
314
327
|
request = use_grib_paramid(request)
|
|
315
328
|
print("🌧️", request)
|
|
316
|
-
|
|
317
|
-
ds = ds + cml.load_source("mars", **request)
|
|
329
|
+
ds = ds + ekd.from_source("mars", **request)
|
|
318
330
|
|
|
319
331
|
accumulations = {}
|
|
320
332
|
for a in [AccumulationClass(out, frequency=frequency, **r) for r in requests]:
|
|
@@ -341,7 +353,7 @@ def compute_accumulations(
|
|
|
341
353
|
|
|
342
354
|
out.close()
|
|
343
355
|
|
|
344
|
-
ds =
|
|
356
|
+
ds = ekd.from_source("file", path)
|
|
345
357
|
|
|
346
358
|
assert len(ds) / len(param) / len(number) == len(dates), (
|
|
347
359
|
len(ds),
|
|
@@ -6,15 +6,19 @@
|
|
|
6
6
|
# granted to it by virtue of its status as an intergovernmental organisation
|
|
7
7
|
# nor does it submit to any jurisdiction.
|
|
8
8
|
#
|
|
9
|
-
from
|
|
9
|
+
from earthkit.data import from_source
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def constants(context, dates, template, param):
|
|
13
13
|
from warnings import warn
|
|
14
14
|
|
|
15
|
-
warn(
|
|
16
|
-
|
|
17
|
-
|
|
15
|
+
warn(
|
|
16
|
+
"The source `constants` is deprecated, use `forcings` instead.",
|
|
17
|
+
DeprecationWarning,
|
|
18
|
+
stacklevel=2,
|
|
19
|
+
)
|
|
20
|
+
context.trace("✅", f"from_source(constants, {template}, {param}")
|
|
21
|
+
return from_source("forcings", source_or_dataset=template, date=dates, param=param)
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
execute = constants
|
|
@@ -6,12 +6,12 @@
|
|
|
6
6
|
# granted to it by virtue of its status as an intergovernmental organisation
|
|
7
7
|
# nor does it submit to any jurisdiction.
|
|
8
8
|
#
|
|
9
|
-
from
|
|
9
|
+
from earthkit.data import from_source
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def forcings(context, dates, template, param):
|
|
13
|
-
context.trace("✅", f"
|
|
14
|
-
return
|
|
13
|
+
context.trace("✅", f"from_source(forcings, {template}, {param}")
|
|
14
|
+
return from_source("forcings", source_or_dataset=template, date=dates, param=param)
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
execute = forcings
|
|
@@ -10,8 +10,8 @@
|
|
|
10
10
|
|
|
11
11
|
import glob
|
|
12
12
|
|
|
13
|
-
from
|
|
14
|
-
from
|
|
13
|
+
from earthkit.data import from_source
|
|
14
|
+
from earthkit.data.utils.patterns import Pattern
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
def check(ds, paths, **kwargs):
|
|
@@ -33,7 +33,7 @@ def _expand(paths):
|
|
|
33
33
|
def execute(context, dates, path, *args, **kwargs):
|
|
34
34
|
given_paths = path if isinstance(path, list) else [path]
|
|
35
35
|
|
|
36
|
-
ds =
|
|
36
|
+
ds = from_source("empty")
|
|
37
37
|
dates = [d.isoformat() for d in dates]
|
|
38
38
|
|
|
39
39
|
for path in given_paths:
|
|
@@ -45,7 +45,7 @@ def execute(context, dates, path, *args, **kwargs):
|
|
|
45
45
|
|
|
46
46
|
for path in _expand(paths):
|
|
47
47
|
context.trace("📁", "PATH", path)
|
|
48
|
-
s =
|
|
48
|
+
s = from_source("file", path)
|
|
49
49
|
s = s.sel(valid_datetime=dates, **kwargs)
|
|
50
50
|
ds = ds + s
|
|
51
51
|
|
|
@@ -10,11 +10,11 @@ import datetime
|
|
|
10
10
|
import warnings
|
|
11
11
|
from copy import deepcopy
|
|
12
12
|
|
|
13
|
-
import
|
|
13
|
+
import earthkit.data as ekd
|
|
14
14
|
import numpy as np
|
|
15
|
-
from
|
|
16
|
-
from
|
|
17
|
-
from
|
|
15
|
+
from earthkit.data.core.temporary import temp_file
|
|
16
|
+
from earthkit.data.readers.grib.output import new_grib_output
|
|
17
|
+
from earthkit.utils.availability import Availability
|
|
18
18
|
|
|
19
19
|
from anemoi.datasets.create.functions.sources.mars import mars
|
|
20
20
|
|
|
@@ -53,9 +53,18 @@ class Accumulation:
|
|
|
53
53
|
if self._check is None:
|
|
54
54
|
self._check = field.as_mars()
|
|
55
55
|
|
|
56
|
-
assert self.param == field.metadata("param"), (
|
|
57
|
-
|
|
58
|
-
|
|
56
|
+
assert self.param == field.metadata("param"), (
|
|
57
|
+
self.param,
|
|
58
|
+
field.metadata("param"),
|
|
59
|
+
)
|
|
60
|
+
assert self.date == field.metadata("date"), (
|
|
61
|
+
self.date,
|
|
62
|
+
field.metadata("date"),
|
|
63
|
+
)
|
|
64
|
+
assert self.time == field.metadata("time"), (
|
|
65
|
+
self.time,
|
|
66
|
+
field.metadata("time"),
|
|
67
|
+
)
|
|
59
68
|
assert self.number == member(field), (self.number, member(field))
|
|
60
69
|
|
|
61
70
|
return
|
|
@@ -189,7 +198,11 @@ class AccumulationFromLastStep(Accumulation):
|
|
|
189
198
|
|
|
190
199
|
def compute(self, values, startStep, endStep):
|
|
191
200
|
|
|
192
|
-
assert endStep - startStep == self.frequency, (
|
|
201
|
+
assert endStep - startStep == self.frequency, (
|
|
202
|
+
startStep,
|
|
203
|
+
endStep,
|
|
204
|
+
self.frequency,
|
|
205
|
+
)
|
|
193
206
|
|
|
194
207
|
if self.startStep is None:
|
|
195
208
|
self.startStep = startStep
|
|
@@ -299,11 +312,11 @@ def compute_accumulations(
|
|
|
299
312
|
)
|
|
300
313
|
|
|
301
314
|
compressed = Availability(requests)
|
|
302
|
-
ds =
|
|
315
|
+
ds = ekd.from_source("empty")
|
|
303
316
|
for r in compressed.iterate():
|
|
304
317
|
request.update(r)
|
|
305
318
|
print("🌧️", request)
|
|
306
|
-
ds = ds +
|
|
319
|
+
ds = ds + ekd.from_source("mars", **request)
|
|
307
320
|
|
|
308
321
|
accumulations = {}
|
|
309
322
|
for a in [AccumulationClass(out, frequency=frequency, **r) for r in requests]:
|
|
@@ -330,7 +343,7 @@ def compute_accumulations(
|
|
|
330
343
|
|
|
331
344
|
out.close()
|
|
332
345
|
|
|
333
|
-
ds =
|
|
346
|
+
ds = ekd.from_source("file", path)
|
|
334
347
|
|
|
335
348
|
assert len(ds) / len(param) / len(number) == len(dates), (
|
|
336
349
|
len(ds),
|
|
@@ -10,8 +10,8 @@ import datetime
|
|
|
10
10
|
from copy import deepcopy
|
|
11
11
|
|
|
12
12
|
from anemoi.utils.humanize import did_you_mean
|
|
13
|
-
from
|
|
14
|
-
from
|
|
13
|
+
from earthkit.data import from_source
|
|
14
|
+
from earthkit.data.utils.availability import Availability
|
|
15
15
|
|
|
16
16
|
from anemoi.datasets.create.utils import to_datetime_list
|
|
17
17
|
|
|
@@ -176,7 +176,7 @@ def mars(context, dates, *requests, date_key="date", **kwargs):
|
|
|
176
176
|
requests = [kwargs]
|
|
177
177
|
|
|
178
178
|
requests = factorise_requests(dates, *requests, date_key=date_key)
|
|
179
|
-
ds =
|
|
179
|
+
ds = from_source("empty")
|
|
180
180
|
for r in requests:
|
|
181
181
|
r = {k: v for k, v in r.items() if v != ("-",)}
|
|
182
182
|
|
|
@@ -184,14 +184,14 @@ def mars(context, dates, *requests, date_key="date", **kwargs):
|
|
|
184
184
|
r = use_grib_paramid(r)
|
|
185
185
|
|
|
186
186
|
if DEBUG:
|
|
187
|
-
context.trace("✅", f"
|
|
187
|
+
context.trace("✅", f"from_source(mars, {r}")
|
|
188
188
|
|
|
189
189
|
for k, v in r.items():
|
|
190
190
|
if k not in MARS_KEYS:
|
|
191
191
|
raise ValueError(
|
|
192
192
|
f"⚠️ Unknown key {k}={v} in MARS request. Did you mean '{did_you_mean(k, MARS_KEYS)}' ?"
|
|
193
193
|
)
|
|
194
|
-
ds = ds +
|
|
194
|
+
ds = ds + from_source("mars", **r)
|
|
195
195
|
return ds
|
|
196
196
|
|
|
197
197
|
|
|
@@ -9,8 +9,8 @@
|
|
|
9
9
|
|
|
10
10
|
import glob
|
|
11
11
|
|
|
12
|
-
from
|
|
13
|
-
from
|
|
12
|
+
from earthkit.data import from_source
|
|
13
|
+
from earthkit.data.utils.patterns import Pattern
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def _expand(paths):
|
|
@@ -44,7 +44,7 @@ def load_netcdfs(emoji, what, context, dates, path, *args, **kwargs):
|
|
|
44
44
|
given_paths = path if isinstance(path, list) else [path]
|
|
45
45
|
|
|
46
46
|
dates = [d.isoformat() for d in dates]
|
|
47
|
-
ds =
|
|
47
|
+
ds = from_source("empty")
|
|
48
48
|
|
|
49
49
|
for path in given_paths:
|
|
50
50
|
paths = Pattern(path, ignore_missing_keys=True).substitute(*args, date=dates, **kwargs)
|
|
@@ -53,7 +53,7 @@ def load_netcdfs(emoji, what, context, dates, path, *args, **kwargs):
|
|
|
53
53
|
|
|
54
54
|
for path in _expand(paths):
|
|
55
55
|
context.trace(emoji, what.upper(), path)
|
|
56
|
-
s =
|
|
56
|
+
s = from_source("opendap", path)
|
|
57
57
|
s = s.sel(
|
|
58
58
|
valid_datetime=dates,
|
|
59
59
|
param=kwargs["param"],
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
# granted to it by virtue of its status as an intergovernmental organisation
|
|
7
7
|
# nor does it submit to any jurisdiction.
|
|
8
8
|
#
|
|
9
|
-
from
|
|
9
|
+
from earthkit.data import from_source
|
|
10
10
|
|
|
11
11
|
from anemoi.datasets.create.utils import to_datetime_list
|
|
12
12
|
|
|
@@ -15,12 +15,12 @@ DEBUG = True
|
|
|
15
15
|
|
|
16
16
|
def source(context, dates, **kwargs):
|
|
17
17
|
name = kwargs.pop("name")
|
|
18
|
-
context.trace("✅", f"
|
|
18
|
+
context.trace("✅", f"from_source({name}, {dates}, {kwargs}")
|
|
19
19
|
if kwargs["date"] == "$from_dates":
|
|
20
20
|
kwargs["date"] = list({d.strftime("%Y%m%d") for d in dates})
|
|
21
21
|
if kwargs["time"] == "$from_dates":
|
|
22
22
|
kwargs["time"] = list({d.strftime("%H%M") for d in dates})
|
|
23
|
-
return
|
|
23
|
+
return from_source(name, **kwargs)
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
execute = source
|
|
@@ -9,10 +9,10 @@
|
|
|
9
9
|
import datetime
|
|
10
10
|
from collections import defaultdict
|
|
11
11
|
|
|
12
|
-
from
|
|
13
|
-
from
|
|
12
|
+
from earthkit.data.core.temporary import temp_file
|
|
13
|
+
from earthkit.data.readers.grib.output import new_grib_output
|
|
14
14
|
|
|
15
|
-
from anemoi.datasets.create.functions import
|
|
15
|
+
from anemoi.datasets.create.functions import assert_is_fieldlist
|
|
16
16
|
from anemoi.datasets.create.utils import to_datetime_list
|
|
17
17
|
|
|
18
18
|
|
|
@@ -36,7 +36,7 @@ def normalise_time_delta(t):
|
|
|
36
36
|
def group_by_field(ds):
|
|
37
37
|
d = defaultdict(list)
|
|
38
38
|
for field in ds.order_by("valid_datetime"):
|
|
39
|
-
m = field.
|
|
39
|
+
m = field.metadata(namespace="mars")
|
|
40
40
|
for k in ("date", "time", "step"):
|
|
41
41
|
m.pop(k, None)
|
|
42
42
|
keys = tuple(m.items())
|
|
@@ -103,10 +103,10 @@ def tendencies(dates, time_increment, **kwargs):
|
|
|
103
103
|
|
|
104
104
|
out.close()
|
|
105
105
|
|
|
106
|
-
from
|
|
106
|
+
from earthkit.data import from_source
|
|
107
107
|
|
|
108
|
-
ds =
|
|
109
|
-
|
|
108
|
+
ds = from_source("file", path)
|
|
109
|
+
assert_is_fieldlist(ds)
|
|
110
110
|
# save a reference to the tmp file so it is deleted
|
|
111
111
|
# only when the dataset is not used anymore
|
|
112
112
|
ds._tmp = tmp
|
anemoi/datasets/create/input.py
CHANGED
|
@@ -15,8 +15,8 @@ from functools import cached_property
|
|
|
15
15
|
from functools import wraps
|
|
16
16
|
|
|
17
17
|
import numpy as np
|
|
18
|
-
from
|
|
19
|
-
from
|
|
18
|
+
from earthkit.data.core.fieldlist import FieldList
|
|
19
|
+
from earthkit.data.core.order import build_remapping
|
|
20
20
|
|
|
21
21
|
from anemoi.datasets.dates import Dates
|
|
22
22
|
|
|
@@ -75,18 +75,18 @@ def is_function(name, kind):
|
|
|
75
75
|
return False
|
|
76
76
|
|
|
77
77
|
|
|
78
|
-
def
|
|
78
|
+
def assert_fieldlist(method):
|
|
79
79
|
@wraps(method)
|
|
80
80
|
def wrapper(self, *args, **kwargs):
|
|
81
81
|
result = method(self, *args, **kwargs)
|
|
82
|
-
assert isinstance(result,
|
|
82
|
+
assert isinstance(result, FieldList), type(result)
|
|
83
83
|
return result
|
|
84
84
|
|
|
85
85
|
return wrapper
|
|
86
86
|
|
|
87
87
|
|
|
88
|
-
def
|
|
89
|
-
assert isinstance(obj,
|
|
88
|
+
def assert_is_fieldlist(obj):
|
|
89
|
+
assert isinstance(obj, FieldList), type(obj)
|
|
90
90
|
|
|
91
91
|
|
|
92
92
|
def _data_request(data):
|
|
@@ -101,12 +101,12 @@ def _data_request(data):
|
|
|
101
101
|
continue
|
|
102
102
|
|
|
103
103
|
if date is None:
|
|
104
|
-
date = field.
|
|
104
|
+
date = field.datetime()["valid_time"]
|
|
105
105
|
|
|
106
|
-
if field.
|
|
106
|
+
if field.datetime()["valid_time"] != date:
|
|
107
107
|
continue
|
|
108
108
|
|
|
109
|
-
as_mars = field.
|
|
109
|
+
as_mars = field.metadata(namespace="mars")
|
|
110
110
|
step = as_mars.get("step")
|
|
111
111
|
levtype = as_mars.get("levtype", "sfc")
|
|
112
112
|
param = as_mars["param"]
|
|
@@ -399,12 +399,12 @@ class EmptyResult(Result):
|
|
|
399
399
|
super().__init__(context, action_path + ["empty"], dates)
|
|
400
400
|
|
|
401
401
|
@cached_property
|
|
402
|
-
@
|
|
402
|
+
@assert_fieldlist
|
|
403
403
|
@trace_datasource
|
|
404
404
|
def datasource(self):
|
|
405
|
-
from
|
|
405
|
+
from earthkit.data import from_source
|
|
406
406
|
|
|
407
|
-
return
|
|
407
|
+
return from_source("empty")
|
|
408
408
|
|
|
409
409
|
@property
|
|
410
410
|
def variables(self):
|
|
@@ -423,7 +423,7 @@ class FunctionResult(Result):
|
|
|
423
423
|
return f"{self.action.name}({shorten(self.dates)})"
|
|
424
424
|
|
|
425
425
|
@cached_property
|
|
426
|
-
@
|
|
426
|
+
@assert_fieldlist
|
|
427
427
|
@notify_result
|
|
428
428
|
@trace_datasource
|
|
429
429
|
def datasource(self):
|
|
@@ -452,7 +452,7 @@ class JoinResult(Result):
|
|
|
452
452
|
self.results = [r for r in results if not r.empty]
|
|
453
453
|
|
|
454
454
|
@cached_property
|
|
455
|
-
@
|
|
455
|
+
@assert_fieldlist
|
|
456
456
|
@notify_result
|
|
457
457
|
@trace_datasource
|
|
458
458
|
def datasource(self):
|
|
@@ -504,11 +504,11 @@ class UnShiftResult(Result):
|
|
|
504
504
|
return f"{self.action.delta}({shorten(self.dates)})"
|
|
505
505
|
|
|
506
506
|
@cached_property
|
|
507
|
-
@
|
|
507
|
+
@assert_fieldlist
|
|
508
508
|
@notify_result
|
|
509
509
|
@trace_datasource
|
|
510
510
|
def datasource(self):
|
|
511
|
-
from
|
|
511
|
+
from earthkit.data.indexing.fieldlist import FieldArray
|
|
512
512
|
|
|
513
513
|
class DateShiftedField:
|
|
514
514
|
def __init__(self, field, delta):
|
|
@@ -615,7 +615,7 @@ class StepAction(Action):
|
|
|
615
615
|
|
|
616
616
|
class StepFunctionResult(StepResult):
|
|
617
617
|
@cached_property
|
|
618
|
-
@
|
|
618
|
+
@assert_fieldlist
|
|
619
619
|
@notify_result
|
|
620
620
|
@trace_datasource
|
|
621
621
|
def datasource(self):
|
|
@@ -638,7 +638,7 @@ class StepFunctionResult(StepResult):
|
|
|
638
638
|
class FilterStepResult(StepResult):
|
|
639
639
|
@property
|
|
640
640
|
@notify_result
|
|
641
|
-
@
|
|
641
|
+
@assert_fieldlist
|
|
642
642
|
@trace_datasource
|
|
643
643
|
def datasource(self):
|
|
644
644
|
ds = self.upstream_result.datasource
|
|
@@ -665,7 +665,7 @@ class ConcatResult(Result):
|
|
|
665
665
|
self.results = [r for r in results if not r.empty]
|
|
666
666
|
|
|
667
667
|
@cached_property
|
|
668
|
-
@
|
|
668
|
+
@assert_fieldlist
|
|
669
669
|
@notify_result
|
|
670
670
|
@trace_datasource
|
|
671
671
|
def datasource(self):
|
|
@@ -176,7 +176,7 @@ class DatasetHandlerWithStatistics(GenericDatasetHandler):
|
|
|
176
176
|
|
|
177
177
|
class Loader(DatasetHandlerWithStatistics):
|
|
178
178
|
def build_input(self):
|
|
179
|
-
from
|
|
179
|
+
from earthkit.data.core.order import build_remapping
|
|
180
180
|
|
|
181
181
|
builder = build_input(
|
|
182
182
|
self.main_config.input,
|
|
@@ -581,7 +581,16 @@ class StatisticsAdder(DatasetHandlerWithStatistics):
|
|
|
581
581
|
if not all(self.registry.get_flags(sync=False)):
|
|
582
582
|
raise Exception(f"❗Zarr {self.path} is not fully built, not writting statistics into dataset.")
|
|
583
583
|
|
|
584
|
-
for k in [
|
|
584
|
+
for k in [
|
|
585
|
+
"mean",
|
|
586
|
+
"stdev",
|
|
587
|
+
"minimum",
|
|
588
|
+
"maximum",
|
|
589
|
+
"sums",
|
|
590
|
+
"squares",
|
|
591
|
+
"count",
|
|
592
|
+
"has_nans",
|
|
593
|
+
]:
|
|
585
594
|
self._add_dataset(name=k, array=stats[k])
|
|
586
595
|
|
|
587
596
|
self.registry.add_to_history("compute_statistics_end")
|
|
@@ -643,8 +652,16 @@ class GenericAdditions(GenericDatasetHandler):
|
|
|
643
652
|
for k in ["minimum", "maximum", "sums", "squares", "count", "has_nans"]:
|
|
644
653
|
agg[k][i, ...] = stats[k]
|
|
645
654
|
|
|
646
|
-
assert len(found) + len(missing) == len(self.dates), (
|
|
647
|
-
|
|
655
|
+
assert len(found) + len(missing) == len(self.dates), (
|
|
656
|
+
len(found),
|
|
657
|
+
len(missing),
|
|
658
|
+
len(self.dates),
|
|
659
|
+
)
|
|
660
|
+
assert found.union(missing) == set(self.dates), (
|
|
661
|
+
found,
|
|
662
|
+
missing,
|
|
663
|
+
set(self.dates),
|
|
664
|
+
)
|
|
648
665
|
|
|
649
666
|
if len(ifound) < 2:
|
|
650
667
|
LOG.warn(f"Not enough data found in {self.path} to compute {self.__class__.__name__}. Skipped.")
|
|
@@ -656,7 +673,10 @@ class GenericAdditions(GenericDatasetHandler):
|
|
|
656
673
|
agg[k] = agg[k][mask, ...]
|
|
657
674
|
|
|
658
675
|
for k in ["minimum", "maximum", "sums", "squares", "count", "has_nans"]:
|
|
659
|
-
assert agg[k].shape == agg["count"].shape, (
|
|
676
|
+
assert agg[k].shape == agg["count"].shape, (
|
|
677
|
+
agg[k].shape,
|
|
678
|
+
agg["count"].shape,
|
|
679
|
+
)
|
|
660
680
|
|
|
661
681
|
minimum = np.nanmin(agg["minimum"], axis=0)
|
|
662
682
|
maximum = np.nanmax(agg["maximum"], axis=0)
|
|
@@ -699,7 +719,16 @@ class GenericAdditions(GenericDatasetHandler):
|
|
|
699
719
|
self.tmp_storage.delete()
|
|
700
720
|
|
|
701
721
|
def _write(self, summary):
|
|
702
|
-
for k in [
|
|
722
|
+
for k in [
|
|
723
|
+
"mean",
|
|
724
|
+
"stdev",
|
|
725
|
+
"minimum",
|
|
726
|
+
"maximum",
|
|
727
|
+
"sums",
|
|
728
|
+
"squares",
|
|
729
|
+
"count",
|
|
730
|
+
"has_nans",
|
|
731
|
+
]:
|
|
703
732
|
name = self.final_storage_name(k)
|
|
704
733
|
self._add_dataset(name=name, array=summary[k])
|
|
705
734
|
self.registry.add_to_history(f"compute_statistics_{self.__class__.__name__.lower()}_end")
|
|
@@ -71,7 +71,7 @@ def to_datetime(date):
|
|
|
71
71
|
if isinstance(date, str):
|
|
72
72
|
return np.datetime64(date)
|
|
73
73
|
if isinstance(date, datetime.datetime):
|
|
74
|
-
return np.datetime64(date)
|
|
74
|
+
return np.datetime64(date, "s")
|
|
75
75
|
return date
|
|
76
76
|
|
|
77
77
|
|
|
@@ -242,6 +242,7 @@ class StatAggregator:
|
|
|
242
242
|
|
|
243
243
|
found = set()
|
|
244
244
|
offset = 0
|
|
245
|
+
|
|
245
246
|
for _, _dates, stats in self.owner._gather_data():
|
|
246
247
|
assert isinstance(stats, dict), stats
|
|
247
248
|
assert stats["minimum"].shape[0] == len(_dates), (stats["minimum"].shape, len(_dates))
|
anemoi/datasets/create/utils.py
CHANGED
|
@@ -13,8 +13,8 @@ from contextlib import contextmanager
|
|
|
13
13
|
|
|
14
14
|
import numpy as np
|
|
15
15
|
import yaml
|
|
16
|
-
from
|
|
17
|
-
from
|
|
16
|
+
from earthkit.data import settings
|
|
17
|
+
from earthkit.data.utils.humanize import seconds # noqa: F401
|
|
18
18
|
from tqdm.auto import tqdm
|
|
19
19
|
|
|
20
20
|
|
|
@@ -27,7 +27,8 @@ def cache_context(dirname):
|
|
|
27
27
|
return no_cache_context()
|
|
28
28
|
|
|
29
29
|
os.makedirs(dirname, exist_ok=True)
|
|
30
|
-
return settings.temporary("cache-directory", dirname)
|
|
30
|
+
# return settings.temporary("cache-directory", dirname)
|
|
31
|
+
return settings.temporary({"cache-policy": "user", "user-cache-directory": dirname})
|
|
31
32
|
|
|
32
33
|
|
|
33
34
|
def bytes(n):
|
|
@@ -51,13 +52,13 @@ def bytes(n):
|
|
|
51
52
|
|
|
52
53
|
|
|
53
54
|
def to_datetime_list(*args, **kwargs):
|
|
54
|
-
from
|
|
55
|
+
from earthkit.data.utils.dates import to_datetime_list as to_datetime_list_
|
|
55
56
|
|
|
56
57
|
return to_datetime_list_(*args, **kwargs)
|
|
57
58
|
|
|
58
59
|
|
|
59
60
|
def to_datetime(*args, **kwargs):
|
|
60
|
-
from
|
|
61
|
+
from earthkit.data.utils.dates import to_datetime as to_datetime_
|
|
61
62
|
|
|
62
63
|
return to_datetime_(*args, **kwargs)
|
|
63
64
|
|
anemoi/datasets/data/dataset.py
CHANGED
|
@@ -187,8 +187,8 @@ class Dataset:
|
|
|
187
187
|
specific=self.metadata_specific(),
|
|
188
188
|
frequency=self.frequency,
|
|
189
189
|
variables=self.variables,
|
|
190
|
-
start_date=self.dates[0],
|
|
191
|
-
end_date=self.dates[-1],
|
|
190
|
+
start_date=self.dates[0].astype(str),
|
|
191
|
+
end_date=self.dates[-1].astype(str),
|
|
192
192
|
)
|
|
193
193
|
)
|
|
194
194
|
|
|
@@ -200,8 +200,8 @@ class Dataset:
|
|
|
200
200
|
variables=self.variables,
|
|
201
201
|
shape=self.shape,
|
|
202
202
|
frequency=self.frequency,
|
|
203
|
-
start_date=self.dates[0],
|
|
204
|
-
end_date=self.dates[-1],
|
|
203
|
+
start_date=self.dates[0].astype(str),
|
|
204
|
+
end_date=self.dates[-1].astype(str),
|
|
205
205
|
**kwargs,
|
|
206
206
|
)
|
|
207
207
|
|
anemoi/datasets/data/stores.py
CHANGED
|
@@ -58,15 +58,17 @@ class HTTPStore(ReadOnlyStore):
|
|
|
58
58
|
|
|
59
59
|
|
|
60
60
|
class S3Store(ReadOnlyStore):
|
|
61
|
-
"""We write our own S3Store because the one used by zarr (fsspec)
|
|
61
|
+
"""We write our own S3Store because the one used by zarr (fsspec)
|
|
62
|
+
does not play well with fork() and multiprocessing. Also, we get
|
|
63
|
+
to control the s3 client.
|
|
64
|
+
"""
|
|
62
65
|
|
|
63
66
|
def __init__(self, url):
|
|
64
|
-
import
|
|
67
|
+
from anemoi.utils.s3 import s3_client
|
|
65
68
|
|
|
66
|
-
self.bucket, self.key = url
|
|
69
|
+
_, _, self.bucket, self.key = url.split("/", 3)
|
|
67
70
|
|
|
68
|
-
|
|
69
|
-
self.s3 = boto3.Session(profile_name=None).client("s3")
|
|
71
|
+
self.s3 = s3_client(self.bucket)
|
|
70
72
|
|
|
71
73
|
def __getitem__(self, key):
|
|
72
74
|
try:
|
|
@@ -111,6 +113,12 @@ def open_zarr(path, dont_fail=False, cache=None):
|
|
|
111
113
|
|
|
112
114
|
if DEBUG_ZARR_LOADING:
|
|
113
115
|
if isinstance(store, str):
|
|
116
|
+
import os
|
|
117
|
+
|
|
118
|
+
if not os.path.isdir(store):
|
|
119
|
+
raise NotImplementedError(
|
|
120
|
+
"DEBUG_ZARR_LOADING is only implemented for DirectoryStore. Please disable it for other backends."
|
|
121
|
+
)
|
|
114
122
|
store = zarr.storage.DirectoryStore(store)
|
|
115
123
|
store = DebugStore(store)
|
|
116
124
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: anemoi-datasets
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: A package to hold various functions to support training of ML models on ECMWF data.
|
|
5
5
|
Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
|
|
6
6
|
License: Apache License
|
|
@@ -223,7 +223,7 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
|
223
223
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
224
224
|
Requires-Python: >=3.9
|
|
225
225
|
License-File: LICENSE
|
|
226
|
-
Requires-Dist: anemoi-utils[provenance] >=0.3.
|
|
226
|
+
Requires-Dist: anemoi-utils[provenance] >=0.3.5
|
|
227
227
|
Requires-Dist: numpy
|
|
228
228
|
Requires-Dist: pyyaml
|
|
229
229
|
Requires-Dist: semantic-version
|
|
@@ -231,7 +231,8 @@ Requires-Dist: tqdm
|
|
|
231
231
|
Requires-Dist: zarr <=2.17
|
|
232
232
|
Provides-Extra: all
|
|
233
233
|
Requires-Dist: boto3 ; extra == 'all'
|
|
234
|
-
Requires-Dist:
|
|
234
|
+
Requires-Dist: earthkit-data[mars] >=0.9 ; extra == 'all'
|
|
235
|
+
Requires-Dist: earthkit-geo >=0.2 ; extra == 'all'
|
|
235
236
|
Requires-Dist: earthkit-meteo ; extra == 'all'
|
|
236
237
|
Requires-Dist: ecmwflibs >=0.6.3 ; extra == 'all'
|
|
237
238
|
Requires-Dist: entrypoints ; extra == 'all'
|
|
@@ -239,14 +240,16 @@ Requires-Dist: pyproj ; extra == 'all'
|
|
|
239
240
|
Requires-Dist: requests ; extra == 'all'
|
|
240
241
|
Requires-Dist: s3fs ; extra == 'all'
|
|
241
242
|
Provides-Extra: create
|
|
242
|
-
Requires-Dist:
|
|
243
|
+
Requires-Dist: earthkit-data[mars] >=0.9 ; extra == 'create'
|
|
244
|
+
Requires-Dist: earthkit-geo >=0.2 ; extra == 'create'
|
|
243
245
|
Requires-Dist: earthkit-meteo ; extra == 'create'
|
|
244
246
|
Requires-Dist: ecmwflibs >=0.6.3 ; extra == 'create'
|
|
245
247
|
Requires-Dist: entrypoints ; extra == 'create'
|
|
246
248
|
Requires-Dist: pyproj ; extra == 'create'
|
|
247
249
|
Provides-Extra: dev
|
|
248
250
|
Requires-Dist: boto3 ; extra == 'dev'
|
|
249
|
-
Requires-Dist:
|
|
251
|
+
Requires-Dist: earthkit-data[mars] >=0.9 ; extra == 'dev'
|
|
252
|
+
Requires-Dist: earthkit-geo >=0.2 ; extra == 'dev'
|
|
250
253
|
Requires-Dist: earthkit-meteo ; extra == 'dev'
|
|
251
254
|
Requires-Dist: ecmwflibs >=0.6.3 ; extra == 'dev'
|
|
252
255
|
Requires-Dist: entrypoints ; extra == 'dev'
|
|
@@ -257,13 +260,11 @@ Requires-Dist: pytest ; extra == 'dev'
|
|
|
257
260
|
Requires-Dist: requests ; extra == 'dev'
|
|
258
261
|
Requires-Dist: s3fs ; extra == 'dev'
|
|
259
262
|
Requires-Dist: sphinx ; extra == 'dev'
|
|
260
|
-
Requires-Dist: sphinx-argparse ; extra == 'dev'
|
|
261
263
|
Requires-Dist: sphinx-rtd-theme ; extra == 'dev'
|
|
262
264
|
Provides-Extra: docs
|
|
263
265
|
Requires-Dist: nbsphinx ; extra == 'docs'
|
|
264
266
|
Requires-Dist: pandoc ; extra == 'docs'
|
|
265
267
|
Requires-Dist: sphinx ; extra == 'docs'
|
|
266
|
-
Requires-Dist: sphinx-argparse ; extra == 'docs'
|
|
267
268
|
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
|
268
269
|
Provides-Extra: remote
|
|
269
270
|
Requires-Dist: boto3 ; extra == 'remote'
|
|
@@ -1,53 +1,53 @@
|
|
|
1
1
|
anemoi/datasets/__init__.py,sha256=Z1gqZWhecLcT0RZQqYBLlz01MUlUZd0kWEj_RavbITM,782
|
|
2
2
|
anemoi/datasets/__main__.py,sha256=cLA2PidDTOUHaDGzd0_E5iioKYNe-PSTv567Y2fuwQk,723
|
|
3
|
-
anemoi/datasets/_version.py,sha256=
|
|
3
|
+
anemoi/datasets/_version.py,sha256=j90u3VVU4UrJf1fgMUhaZarHK_Do2XGYXr-vZvOFzVo,411
|
|
4
4
|
anemoi/datasets/grids.py,sha256=3YBMMJodgYhavarXPAlMZHaMtDT9v2IbTmAXZTqf8Qo,8481
|
|
5
5
|
anemoi/datasets/commands/__init__.py,sha256=qAybFZPBBQs0dyx7dZ3X5JsLpE90pwrqt1vSV7cqEIw,706
|
|
6
6
|
anemoi/datasets/commands/compare.py,sha256=p2jQOAC3JhScCLF0GjTCO8goYLWLN8p7vzy_gf5fFcI,1473
|
|
7
7
|
anemoi/datasets/commands/copy.py,sha256=SxAeN51owyN5gwtwpt30xhJSIJRlJb9YOUt_4K4m-D8,11780
|
|
8
8
|
anemoi/datasets/commands/create.py,sha256=POdOsVDlvRrHFFkI3SNXNgNIbSxkVUUPMoo660x7Ma0,987
|
|
9
|
-
anemoi/datasets/commands/inspect.py,sha256=
|
|
10
|
-
anemoi/datasets/commands/scan.py,sha256=
|
|
9
|
+
anemoi/datasets/commands/inspect.py,sha256=NipFE2SDxuc6uzjZJT1wDJSFO24yVh95YJpiHz0WBmw,18666
|
|
10
|
+
anemoi/datasets/commands/scan.py,sha256=MaTdne4JrtlqO3LhOUr43DZhZ6O-RZwC7uQ7C6PG7Os,2910
|
|
11
11
|
anemoi/datasets/compute/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
anemoi/datasets/compute/recentre.py,sha256=
|
|
12
|
+
anemoi/datasets/compute/recentre.py,sha256=oIqhENljB9ad-wnMJCJ3P0Xf1v76zQjqYCu1TuySgSI,4919
|
|
13
13
|
anemoi/datasets/create/__init__.py,sha256=Q8uXUdbE-SRYYaZd5cPQ2RVbSoHnGX7-eKdOJHYVhDk,5951
|
|
14
14
|
anemoi/datasets/create/check.py,sha256=DLjw-eyaCNxPhoKFsP4Yn_l3SIr57YHdyPR-tE5vx80,5791
|
|
15
15
|
anemoi/datasets/create/chunks.py,sha256=YEDcr0K2KiiceSTiBuZzj0TbRbzZ9J546XO7rrrTFQw,2441
|
|
16
|
-
anemoi/datasets/create/config.py,sha256=
|
|
17
|
-
anemoi/datasets/create/input.py,sha256=
|
|
18
|
-
anemoi/datasets/create/loaders.py,sha256
|
|
16
|
+
anemoi/datasets/create/config.py,sha256=Dek5SlifOie8U3TNnWgLKmyOATomO3SQisEJ663lKPI,7687
|
|
17
|
+
anemoi/datasets/create/input.py,sha256=s7agDYIB8XzmEOVm6Kq1B5lpv749u_xvzt6XHHM2kwg,27800
|
|
18
|
+
anemoi/datasets/create/loaders.py,sha256=mT3dSUHEfHrhlM-AiYRKs_zQxGyMeQ2zCkuLJW5gB4s,30953
|
|
19
19
|
anemoi/datasets/create/patch.py,sha256=xjCLhvIQKRqmypsKInRU1CvFh1uoaB3YGSQP1UVZZik,3682
|
|
20
20
|
anemoi/datasets/create/persistent.py,sha256=nT8gvhVPdI1H3zW_F7uViGKIlQQ94jCDrMSWTmhQ2_A,4290
|
|
21
21
|
anemoi/datasets/create/size.py,sha256=A1w6RkaL0L9IlwIdmYsCTJTecmY_QtvbkGf__jvQle0,1068
|
|
22
22
|
anemoi/datasets/create/template.py,sha256=2roItOYJzjGB0bKS28f6EjfpomP0ppT4v6T9fYzjRxQ,4263
|
|
23
|
-
anemoi/datasets/create/utils.py,sha256=
|
|
23
|
+
anemoi/datasets/create/utils.py,sha256=Z1d8A_q0hg0yvbdnR7hw-Ia-zj4wE6jEc9YUI3C06yg,3783
|
|
24
24
|
anemoi/datasets/create/writer.py,sha256=G1qAPvdn8anGnpWYhvSSP4u3Km_tHKPdMXm0G4skKSk,1379
|
|
25
25
|
anemoi/datasets/create/zarr.py,sha256=hwM_PaYTa_IgFY1VC7qdYTWQ5MXCWWlMrzXsV_eAY0Q,4776
|
|
26
|
-
anemoi/datasets/create/functions/__init__.py,sha256=
|
|
26
|
+
anemoi/datasets/create/functions/__init__.py,sha256=5HmelLkXDjFOhNhX0Z78aV3ZlW2txiJliJwT4jfLEN4,945
|
|
27
27
|
anemoi/datasets/create/functions/filters/__init__.py,sha256=Xe9G54CKvCI3ji-7k0R5l0WZZdhlydRgawsXuBcX_hg,379
|
|
28
|
-
anemoi/datasets/create/functions/filters/empty.py,sha256=
|
|
28
|
+
anemoi/datasets/create/functions/filters/empty.py,sha256=QGj7YEfbo3gwlmwHi1lPATjST0332TH2-uc6_wKENjI,621
|
|
29
29
|
anemoi/datasets/create/functions/filters/noop.py,sha256=ZP434Z1rFlqdgXse_1ZzqC2XAqRQlYlXlVfGLx7rK8g,444
|
|
30
|
-
anemoi/datasets/create/functions/filters/rename.py,sha256=
|
|
31
|
-
anemoi/datasets/create/functions/filters/rotate_winds.py,sha256=
|
|
32
|
-
anemoi/datasets/create/functions/filters/unrotate_winds.py,sha256=
|
|
30
|
+
anemoi/datasets/create/functions/filters/rename.py,sha256=U0jbZe7GYqbvw85NpJrsNkVEm3SO5RR661yCnMLS3jE,2160
|
|
31
|
+
anemoi/datasets/create/functions/filters/rotate_winds.py,sha256=Li3Xgf92yRkIm_2XDPOhStcSLMQei-bjh63DtDhPNDA,2065
|
|
32
|
+
anemoi/datasets/create/functions/filters/unrotate_winds.py,sha256=hiIwgWi_2lk_ntxsPFMyZ6Ku8_5p91ht36VN_2kHYDA,2414
|
|
33
33
|
anemoi/datasets/create/functions/sources/__init__.py,sha256=Xe9G54CKvCI3ji-7k0R5l0WZZdhlydRgawsXuBcX_hg,379
|
|
34
|
-
anemoi/datasets/create/functions/sources/accumulations.py,sha256=
|
|
35
|
-
anemoi/datasets/create/functions/sources/constants.py,sha256=
|
|
36
|
-
anemoi/datasets/create/functions/sources/empty.py,sha256=
|
|
37
|
-
anemoi/datasets/create/functions/sources/forcings.py,sha256=
|
|
38
|
-
anemoi/datasets/create/functions/sources/grib.py,sha256=
|
|
39
|
-
anemoi/datasets/create/functions/sources/hindcasts.py,sha256=
|
|
40
|
-
anemoi/datasets/create/functions/sources/mars.py,sha256=
|
|
41
|
-
anemoi/datasets/create/functions/sources/netcdf.py,sha256=
|
|
34
|
+
anemoi/datasets/create/functions/sources/accumulations.py,sha256=aFDgVQQjSPRbI5K_HLo3v1IBZzCp31gkCPqfRSETJ9E,13191
|
|
35
|
+
anemoi/datasets/create/functions/sources/constants.py,sha256=QscxBGAKYeSRhhzLcsK5Yy3kM4s3rltS2eDgDE6OmQA,819
|
|
36
|
+
anemoi/datasets/create/functions/sources/empty.py,sha256=ZrXGs8Y3VrLSV8C8YlJTJcHV7Bmi7xPiUlrq8R0JZQY,485
|
|
37
|
+
anemoi/datasets/create/functions/sources/forcings.py,sha256=tF3EyIs5AGF1Ppvp6dIExONM-kGF-wcnMO1sZc_wDuo,646
|
|
38
|
+
anemoi/datasets/create/functions/sources/grib.py,sha256=XOq1Efn-r9rX5bJj0ARhmKtMgKP6rraq_YRMXALxP54,1669
|
|
39
|
+
anemoi/datasets/create/functions/sources/hindcasts.py,sha256=_O94pLbn-ShD9KBU5sBlppVm4wojkkz8iwsckndIXyw,13241
|
|
40
|
+
anemoi/datasets/create/functions/sources/mars.py,sha256=Am9jHddLEHxsnKQ-0Ug0kgQknkEJ2UwU_v-Yy9yv_8c,5293
|
|
41
|
+
anemoi/datasets/create/functions/sources/netcdf.py,sha256=hsRqNIakef5AQCk4aPHxCW6U5qLJmW3DLfN3WSXTWzY,2092
|
|
42
42
|
anemoi/datasets/create/functions/sources/opendap.py,sha256=T0CPinscfafrVLaye5ue-PbiCNbcNqf_3m6pphN9rCU,543
|
|
43
43
|
anemoi/datasets/create/functions/sources/recentre.py,sha256=t07LIXG3Hp9gmPkPriILVt86TxubsHyS1EL1lzwgtXY,1810
|
|
44
|
-
anemoi/datasets/create/functions/sources/source.py,sha256=
|
|
45
|
-
anemoi/datasets/create/functions/sources/tendencies.py,sha256=
|
|
46
|
-
anemoi/datasets/create/statistics/__init__.py,sha256=
|
|
44
|
+
anemoi/datasets/create/functions/sources/source.py,sha256=J3O4M0nB1a-67IJuY_aWqDDqyNGXB_uzxVbicFldO4U,1422
|
|
45
|
+
anemoi/datasets/create/functions/sources/tendencies.py,sha256=OIYKdRTabDvxnBXCcffWWBcorVlhZVNj3VkcVuQjIYA,4088
|
|
46
|
+
anemoi/datasets/create/statistics/__init__.py,sha256=0_cUKLeYRD19wpSdx-N5SBl4KvdFnKEo9kT2_Lg_Pxo,11016
|
|
47
47
|
anemoi/datasets/create/statistics/summary.py,sha256=sgmhA24y3VRyjmDUgTnPIqcHSlWBbFA0qynx6gJ9Xw8,3370
|
|
48
48
|
anemoi/datasets/data/__init__.py,sha256=to9L_RZVQ4OgyHUpX6lcvt4GqJdZjBa5HCTaWx1aGKo,1046
|
|
49
49
|
anemoi/datasets/data/concat.py,sha256=AkpyOs16OjW7X0cdyYFQfWSCV6dteXBp-x9WlokO-DI,3550
|
|
50
|
-
anemoi/datasets/data/dataset.py,sha256=
|
|
50
|
+
anemoi/datasets/data/dataset.py,sha256=hcspK-Fjp-rYdOYZo4qlnL_GUeGpXk4NDINWjEpeSNc,7671
|
|
51
51
|
anemoi/datasets/data/debug.css,sha256=z2X_ZDSnZ9C3pyZPWnQiEyAxuMxUaxJxET4oaCImTAQ,211
|
|
52
52
|
anemoi/datasets/data/debug.py,sha256=PcyrjgxaLzeb_vf12pvUtPPVvBRHNm1SimythZvqsP4,6303
|
|
53
53
|
anemoi/datasets/data/ensemble.py,sha256=AsP7Xx0ZHLoZs6a4EC0jtyGYIcOvZvvKXhgNsIvqIN8,1137
|
|
@@ -59,15 +59,15 @@ anemoi/datasets/data/masked.py,sha256=czAv1ZfZ9q6Wr4RqI2Xj8SEm7yoCgJrwMl-CPDs_wS
|
|
|
59
59
|
anemoi/datasets/data/misc.py,sha256=tuNsUY06nWh3Raf_RCi8bzCXsMB4t2hOuIkNGV4epj8,10501
|
|
60
60
|
anemoi/datasets/data/select.py,sha256=Oje3KG1shRawjuBy2-GM8s_Nk_68l-uujvx5SGW0tUM,3781
|
|
61
61
|
anemoi/datasets/data/statistics.py,sha256=lZCcKw9s7ttMBEp6ANyxtbXoZZvchhE7SClq-D4AUR8,1645
|
|
62
|
-
anemoi/datasets/data/stores.py,sha256=
|
|
62
|
+
anemoi/datasets/data/stores.py,sha256=MBl81LUUWic2xJS8l8nbdm3aHvSZ1zCEC8HNEzGv-q4,11460
|
|
63
63
|
anemoi/datasets/data/subset.py,sha256=9urVTXdnwCgqn0_BRYquMi8oiXn4ubAf0n4586hWfKw,3814
|
|
64
64
|
anemoi/datasets/data/unchecked.py,sha256=xhdMg-ToI1UfBWHNsWyn1y2meZWngZtHx-33L0KqKp8,4037
|
|
65
65
|
anemoi/datasets/dates/__init__.py,sha256=4ItowfLLh90T8L_JOjtv98lE6M7gAaWt7dV3niUrFvk,4473
|
|
66
66
|
anemoi/datasets/dates/groups.py,sha256=iq310Pi7ullglOhcNblv14MmcT8FPgYCD5s45qAfV_s,3383
|
|
67
67
|
anemoi/datasets/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
|
-
anemoi_datasets-0.
|
|
69
|
-
anemoi_datasets-0.
|
|
70
|
-
anemoi_datasets-0.
|
|
71
|
-
anemoi_datasets-0.
|
|
72
|
-
anemoi_datasets-0.
|
|
73
|
-
anemoi_datasets-0.
|
|
68
|
+
anemoi_datasets-0.4.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
69
|
+
anemoi_datasets-0.4.0.dist-info/METADATA,sha256=IZd5nok8kl49TlacMsgs7VcucD3YVF6Otf6u5AKscOw,16099
|
|
70
|
+
anemoi_datasets-0.4.0.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
|
|
71
|
+
anemoi_datasets-0.4.0.dist-info/entry_points.txt,sha256=yR-o-4uiPEA_GLBL81SkMYnUoxq3CAV3hHulQiRtGG0,66
|
|
72
|
+
anemoi_datasets-0.4.0.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
|
|
73
|
+
anemoi_datasets-0.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|