anemoi-datasets 0.5.19__py3-none-any.whl → 0.5.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anemoi/datasets/_version.py +2 -2
- anemoi/datasets/commands/compare-lam.py +401 -0
- anemoi/datasets/commands/grib-index.py +114 -0
- anemoi/datasets/create/filters/pressure_level_relative_humidity_to_specific_humidity.py +3 -1
- anemoi/datasets/create/filters/pressure_level_specific_humidity_to_relative_humidity.py +3 -1
- anemoi/datasets/create/filters/wz_to_w.py +3 -2
- anemoi/datasets/create/input/action.py +2 -0
- anemoi/datasets/create/input/result.py +1 -1
- anemoi/datasets/create/sources/anemoi_dataset.py +73 -0
- anemoi/datasets/create/sources/grib.py +7 -0
- anemoi/datasets/create/sources/grib_index.py +614 -0
- anemoi/datasets/create/sources/xarray_support/__init__.py +1 -1
- anemoi/datasets/create/sources/xarray_support/fieldlist.py +2 -2
- anemoi/datasets/create/sources/xarray_support/flavour.py +6 -0
- anemoi/datasets/data/__init__.py +16 -0
- anemoi/datasets/data/complement.py +4 -1
- anemoi/datasets/data/dataset.py +14 -0
- anemoi/datasets/data/interpolate.py +76 -0
- anemoi/datasets/data/masked.py +77 -0
- anemoi/datasets/data/misc.py +159 -0
- anemoi/datasets/grids.py +8 -2
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/METADATA +10 -4
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/RECORD +27 -23
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/WHEEL +0 -0
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/entry_points.txt +0 -0
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/licenses/LICENSE +0 -0
- {anemoi_datasets-0.5.19.dist-info → anemoi_datasets-0.5.20.dist-info}/top_level.txt +0 -0
anemoi/datasets/data/__init__.py
CHANGED
|
@@ -16,6 +16,7 @@ from typing import Set
|
|
|
16
16
|
# from .dataset import Shape
|
|
17
17
|
# from .dataset import TupleIndex
|
|
18
18
|
from .misc import _open_dataset
|
|
19
|
+
from .misc import _save_dataset
|
|
19
20
|
from .misc import add_dataset_path
|
|
20
21
|
from .misc import add_named_dataset
|
|
21
22
|
|
|
@@ -92,6 +93,21 @@ def open_dataset(*args: Any, **kwargs: Any) -> "Dataset":
|
|
|
92
93
|
return ds
|
|
93
94
|
|
|
94
95
|
|
|
96
|
+
def save_dataset(recipe: dict, zarr_path: str, n_workers: int = 1) -> None:
|
|
97
|
+
"""Open a dataset and save it to disk.
|
|
98
|
+
|
|
99
|
+
Parameters
|
|
100
|
+
----------
|
|
101
|
+
recipe : dict
|
|
102
|
+
Recipe used with open_dataset (not a dataset creation recipe).
|
|
103
|
+
zarr_path : str
|
|
104
|
+
Path to store the obtained anemoi dataset to disk.
|
|
105
|
+
n_workers : int
|
|
106
|
+
Number of workers to use for parallel processing. If none, sequential processing will be performed.
|
|
107
|
+
"""
|
|
108
|
+
_save_dataset(recipe, zarr_path, n_workers)
|
|
109
|
+
|
|
110
|
+
|
|
95
111
|
def list_dataset_names(*args: Any, **kwargs: Any) -> list[str]:
|
|
96
112
|
"""List the names of datasets.
|
|
97
113
|
|
|
@@ -207,7 +207,7 @@ class ComplementNone(Complement):
|
|
|
207
207
|
class ComplementNearest(Complement):
|
|
208
208
|
"""A class to complement a target dataset with variables from a source dataset using nearest neighbor interpolation."""
|
|
209
209
|
|
|
210
|
-
def __init__(self, target: Any, source: Any) -> None:
|
|
210
|
+
def __init__(self, target: Any, source: Any, max_distance: float = None) -> None:
|
|
211
211
|
"""Initializes the ComplementNearest class.
|
|
212
212
|
|
|
213
213
|
Parameters
|
|
@@ -216,6 +216,8 @@ class ComplementNearest(Complement):
|
|
|
216
216
|
The target dataset.
|
|
217
217
|
source : Any
|
|
218
218
|
The source dataset.
|
|
219
|
+
max_distance : float, optional
|
|
220
|
+
The maximum distance for nearest neighbor interpolation, default is None.
|
|
219
221
|
"""
|
|
220
222
|
super().__init__(target, source)
|
|
221
223
|
|
|
@@ -224,6 +226,7 @@ class ComplementNearest(Complement):
|
|
|
224
226
|
self._source.longitudes,
|
|
225
227
|
self._target.latitudes,
|
|
226
228
|
self._target.longitudes,
|
|
229
|
+
max_distance=max_distance,
|
|
227
230
|
)
|
|
228
231
|
|
|
229
232
|
def check_compatibility(self, d1: Dataset, d2: Dataset) -> None:
|
anemoi/datasets/data/dataset.py
CHANGED
|
@@ -237,6 +237,13 @@ class Dataset(ABC, Sized):
|
|
|
237
237
|
|
|
238
238
|
return Statistics(self, open_dataset(statistics))._subset(**kwargs).mutate()
|
|
239
239
|
|
|
240
|
+
# Note: trim_edge should go before thinning
|
|
241
|
+
if "trim_edge" in kwargs:
|
|
242
|
+
from .masked import TrimEdge
|
|
243
|
+
|
|
244
|
+
edge = kwargs.pop("trim_edge")
|
|
245
|
+
return TrimEdge(self, edge)._subset(**kwargs).mutate()
|
|
246
|
+
|
|
240
247
|
if "thinning" in kwargs:
|
|
241
248
|
from .masked import Thinning
|
|
242
249
|
|
|
@@ -284,6 +291,13 @@ class Dataset(ABC, Sized):
|
|
|
284
291
|
interpolate_frequency = kwargs.pop("interpolate_frequency")
|
|
285
292
|
return InterpolateFrequency(self, interpolate_frequency)._subset(**kwargs).mutate()
|
|
286
293
|
|
|
294
|
+
if "interpolate_variables" in kwargs:
|
|
295
|
+
from .interpolate import InterpolateNearest
|
|
296
|
+
|
|
297
|
+
interpolate_variables = kwargs.pop("interpolate_variables")
|
|
298
|
+
max_distance = kwargs.pop("max_distance", None)
|
|
299
|
+
return InterpolateNearest(self, interpolate_variables, max_distance=max_distance)._subset(**kwargs).mutate()
|
|
300
|
+
|
|
287
301
|
# Keep last
|
|
288
302
|
if "shuffle" in kwargs:
|
|
289
303
|
from .subset import Subset
|
|
@@ -13,7 +13,11 @@ import logging
|
|
|
13
13
|
from functools import cached_property
|
|
14
14
|
from typing import Any
|
|
15
15
|
from typing import Dict
|
|
16
|
+
from typing import List
|
|
17
|
+
from typing import Optional
|
|
16
18
|
from typing import Set
|
|
19
|
+
from typing import Tuple
|
|
20
|
+
from typing import Union
|
|
17
21
|
|
|
18
22
|
import numpy as np
|
|
19
23
|
from anemoi.utils.dates import frequency_to_timedelta
|
|
@@ -214,3 +218,75 @@ class InterpolateFrequency(Forwards):
|
|
|
214
218
|
return {
|
|
215
219
|
# "frequency": frequency_to_string(self._frequency),
|
|
216
220
|
}
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class InterpolateNearest(Forwards):
|
|
224
|
+
def __init__(
|
|
225
|
+
self, dataset: Dataset, interpolate_variables: List[str], max_distance: Optional[float] = None
|
|
226
|
+
) -> None:
|
|
227
|
+
"""Initialize the InterpolateNearest class.
|
|
228
|
+
|
|
229
|
+
Parameters
|
|
230
|
+
----------
|
|
231
|
+
dataset : Dataset
|
|
232
|
+
The dataset to be interpolated.
|
|
233
|
+
interpolate_variables : List[str]
|
|
234
|
+
The variables to be interpolated.
|
|
235
|
+
max_distance : Optional[float], optional
|
|
236
|
+
The maximum distance for nearest neighbor search, by default None.
|
|
237
|
+
"""
|
|
238
|
+
from ..grids import nearest_grid_points
|
|
239
|
+
|
|
240
|
+
super().__init__(dataset)
|
|
241
|
+
self.vars = interpolate_variables
|
|
242
|
+
self.var_mask = {
|
|
243
|
+
v: np.where(~np.isnan(dataset[0, dataset.name_to_index[v]].squeeze()))[0] for v in interpolate_variables
|
|
244
|
+
}
|
|
245
|
+
self.dataset = dataset
|
|
246
|
+
self.ngp_var = {
|
|
247
|
+
v: nearest_grid_points(
|
|
248
|
+
dataset.latitudes[self.var_mask[v]],
|
|
249
|
+
dataset.longitudes[self.var_mask[v]],
|
|
250
|
+
dataset.latitudes,
|
|
251
|
+
dataset.longitudes,
|
|
252
|
+
max_distance=max_distance,
|
|
253
|
+
)
|
|
254
|
+
for v in interpolate_variables
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
def tree(self) -> Node:
|
|
258
|
+
return Node(self, [self.forward.tree()], interpolate_variables=self.vars)
|
|
259
|
+
|
|
260
|
+
@property
|
|
261
|
+
def shape(self) -> Shape:
|
|
262
|
+
return self.forward.shape
|
|
263
|
+
|
|
264
|
+
@property
|
|
265
|
+
def metadata(self) -> Dict[str, Any]:
|
|
266
|
+
return self.forward.metadata()
|
|
267
|
+
|
|
268
|
+
@staticmethod
|
|
269
|
+
def slice_len(slice_obj: slice) -> int:
|
|
270
|
+
# Compute the length of the slice
|
|
271
|
+
return max(0, (slice_obj.stop - slice_obj.start + (slice_obj.step or 1) - 1) // (slice_obj.step or 1))
|
|
272
|
+
|
|
273
|
+
@expand_list_indexing
|
|
274
|
+
def _get_tuple(self, index: TupleIndex) -> NDArray[Any]:
|
|
275
|
+
index, changes = index_to_slices(index, self.shape)
|
|
276
|
+
source_data = self.forward[index[0]]
|
|
277
|
+
target_data = source_data.copy()
|
|
278
|
+
vars_to_interpolate = [self.forward.name_to_index[v] for v in self.vars]
|
|
279
|
+
for v, i in zip(self.vars, vars_to_interpolate):
|
|
280
|
+
target_data[:, i, ...] = source_data[:, i][..., self.var_mask[v][self.ngp_var[v]]]
|
|
281
|
+
result = target_data[(slice(None),) + index[1:]]
|
|
282
|
+
return apply_index_to_slices_changes(result, changes)
|
|
283
|
+
|
|
284
|
+
def __getitem__(self, index: Union[int, slice, Tuple[Union[int, slice], ...]]) -> NDArray[Any]:
|
|
285
|
+
if isinstance(index, (int, slice)):
|
|
286
|
+
index = (index, slice(None), slice(None), slice(None))
|
|
287
|
+
return self._get_tuple(index)
|
|
288
|
+
|
|
289
|
+
def subclass_metadata_specific(self) -> Dict[str, Any]:
|
|
290
|
+
return {
|
|
291
|
+
"interpolate_variables": self.vars,
|
|
292
|
+
}
|
anemoi/datasets/data/masked.py
CHANGED
|
@@ -254,3 +254,80 @@ class Cropping(Masked):
|
|
|
254
254
|
The metadata specific to the Cropping subclass.
|
|
255
255
|
"""
|
|
256
256
|
return dict(area=self.area)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
class TrimEdge(Masked):
|
|
260
|
+
"""A class that removes the boundary of a dataset."""
|
|
261
|
+
|
|
262
|
+
def __init__(self, forward, edge):
|
|
263
|
+
if isinstance(edge, int):
|
|
264
|
+
self.edge = [edge] * 4
|
|
265
|
+
elif isinstance(edge, (list, tuple)) and len(edge) == 4:
|
|
266
|
+
self.edge = edge
|
|
267
|
+
else:
|
|
268
|
+
raise ValueError("'edge' must be an integer or a list of 4 integers")
|
|
269
|
+
|
|
270
|
+
for e in self.edge:
|
|
271
|
+
if not isinstance(e, int) or e < 0:
|
|
272
|
+
raise ValueError("'edge' must be integer(s) 0 or greater")
|
|
273
|
+
|
|
274
|
+
shape = forward.field_shape
|
|
275
|
+
if len(shape) != 2:
|
|
276
|
+
raise ValueError("TrimEdge only works on regular grids")
|
|
277
|
+
|
|
278
|
+
if self.edge[0] + self.edge[1] >= shape[0]:
|
|
279
|
+
raise ValueError("Too much triming of the first grid dimension, resulting in an empty dataset")
|
|
280
|
+
if self.edge[2] + self.edge[3] >= shape[1]:
|
|
281
|
+
raise ValueError("Too much triming of the second grid dimension, resulting in an empty dataset")
|
|
282
|
+
|
|
283
|
+
mask = np.full(shape, True, dtype=bool)
|
|
284
|
+
mask[0 : self.edge[0], :] = False
|
|
285
|
+
mask[:, 0 : self.edge[2]] = False
|
|
286
|
+
if self.edge[1] != 0:
|
|
287
|
+
mask[-self.edge[1] :, :] = False
|
|
288
|
+
if self.edge[3] != 0:
|
|
289
|
+
mask[:, -self.edge[3] :] = False
|
|
290
|
+
|
|
291
|
+
mask = mask.flatten()
|
|
292
|
+
|
|
293
|
+
super().__init__(forward, mask)
|
|
294
|
+
|
|
295
|
+
def mutate(self) -> Dataset:
|
|
296
|
+
"""Mutate the dataset.
|
|
297
|
+
|
|
298
|
+
Returns
|
|
299
|
+
-------
|
|
300
|
+
Dataset
|
|
301
|
+
The mutated dataset.
|
|
302
|
+
"""
|
|
303
|
+
if self.edge is None:
|
|
304
|
+
return self.forward.mutate()
|
|
305
|
+
return super().mutate()
|
|
306
|
+
|
|
307
|
+
def tree(self) -> Node:
|
|
308
|
+
"""Get the tree representation of the dataset.
|
|
309
|
+
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
Node
|
|
313
|
+
The tree representation of the dataset.
|
|
314
|
+
"""
|
|
315
|
+
return Node(self, [self.forward.tree()], edge=self.edge)
|
|
316
|
+
|
|
317
|
+
def forwards_subclass_metadata_specific(self) -> Dict[str, Any]:
|
|
318
|
+
"""Get the metadata specific to the TrimEdge subclass.
|
|
319
|
+
|
|
320
|
+
Returns
|
|
321
|
+
-------
|
|
322
|
+
Dict[str, Any]
|
|
323
|
+
The metadata specific to the TrimEdge subclass.
|
|
324
|
+
"""
|
|
325
|
+
return dict(edge=self.edge)
|
|
326
|
+
|
|
327
|
+
@property
|
|
328
|
+
def field_shape(self) -> Shape:
|
|
329
|
+
"""Returns the field shape of the dataset."""
|
|
330
|
+
x, y = self.forward.field_shape
|
|
331
|
+
x -= self.edge[0] + self.edge[1]
|
|
332
|
+
y -= self.edge[2] + self.edge[3]
|
|
333
|
+
return x, y
|
anemoi/datasets/data/misc.py
CHANGED
|
@@ -539,3 +539,162 @@ def _open_dataset(*args: Any, **kwargs: Any) -> "Dataset":
|
|
|
539
539
|
return dataset._subset(**kwargs)
|
|
540
540
|
|
|
541
541
|
return sets[0]._subset(**kwargs)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def append_to_zarr(new_data: np.ndarray, new_dates: np.ndarray, zarr_path: str) -> None:
|
|
545
|
+
"""Append data from a subset (for one date) to the Zarr store.
|
|
546
|
+
|
|
547
|
+
Parameters
|
|
548
|
+
----------
|
|
549
|
+
new_data : np.ndarray
|
|
550
|
+
The new data to append.
|
|
551
|
+
new_dates : np.ndarray
|
|
552
|
+
The new dates to append.
|
|
553
|
+
zarr_path : str
|
|
554
|
+
The path to the Zarr store.
|
|
555
|
+
|
|
556
|
+
Notes
|
|
557
|
+
-----
|
|
558
|
+
- "dates" dataset is created with chunks equal to len(big_dataset.dates).
|
|
559
|
+
- "data" dataset is created with chunk size 1 along the first (time) dimension.
|
|
560
|
+
"""
|
|
561
|
+
print("Appending data for", new_dates, flush=True)
|
|
562
|
+
# Re-open the zarr store to avoid root object accumulating memory.
|
|
563
|
+
root = zarr.open(zarr_path, mode="a")
|
|
564
|
+
# Convert new dates to strings (using str) regardless of input dtype.
|
|
565
|
+
new_dates = np.array(new_dates, dtype="datetime64[ns]")
|
|
566
|
+
dates_ds = root["dates"]
|
|
567
|
+
old_len = dates_ds.shape[0]
|
|
568
|
+
dates_ds.resize((old_len + len(new_dates),))
|
|
569
|
+
dates_ds[old_len:] = new_dates
|
|
570
|
+
# Append to "data" dataset.
|
|
571
|
+
data_ds = root["data"]
|
|
572
|
+
old_shape = data_ds.shape # (time, n_vars, ensembles, gridpoints)
|
|
573
|
+
new_shape = (old_shape[0] + len(new_dates),) + old_shape[1:]
|
|
574
|
+
data_ds.resize(new_shape)
|
|
575
|
+
data_ds[old_shape[0] :] = new_data
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
def process_date(date: Any, big_dataset: Any) -> Tuple[np.ndarray, np.ndarray]:
|
|
579
|
+
"""Open the subset corresponding to the given date and return (date, subset).
|
|
580
|
+
|
|
581
|
+
Parameters
|
|
582
|
+
----------
|
|
583
|
+
date : Any
|
|
584
|
+
The date to process.
|
|
585
|
+
big_dataset : Any
|
|
586
|
+
The dataset to process.
|
|
587
|
+
|
|
588
|
+
Returns
|
|
589
|
+
-------
|
|
590
|
+
Tuple[np.ndarray, np.ndarray]
|
|
591
|
+
The subset and the date.
|
|
592
|
+
"""
|
|
593
|
+
print("Processing:", date, flush=True)
|
|
594
|
+
subset = _open_dataset(big_dataset, start=date, end=date).mutate()
|
|
595
|
+
s = subset[:]
|
|
596
|
+
date = subset.dates
|
|
597
|
+
return s, date
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
def initialize_zarr_store(root: Any, big_dataset: Any, recipe: Dict[str, Any]) -> None:
|
|
601
|
+
"""Initialize the Zarr store with the given dataset and recipe.
|
|
602
|
+
|
|
603
|
+
Parameters
|
|
604
|
+
----------
|
|
605
|
+
root : Any
|
|
606
|
+
The root of the Zarr store.
|
|
607
|
+
big_dataset : Any
|
|
608
|
+
The dataset to initialize the store with.
|
|
609
|
+
recipe : Dict[str, Any]
|
|
610
|
+
The recipe for initializing the store.
|
|
611
|
+
"""
|
|
612
|
+
ensembles = big_dataset.shape[1]
|
|
613
|
+
# Create or append to "dates" dataset.
|
|
614
|
+
if "dates" not in root:
|
|
615
|
+
full_length = len(big_dataset.dates)
|
|
616
|
+
root.create_dataset("dates", data=np.array([], dtype="datetime64[s]"), chunks=(full_length,))
|
|
617
|
+
|
|
618
|
+
if "data" not in root:
|
|
619
|
+
dims = (1, len(big_dataset.variables), ensembles, big_dataset.grids[0])
|
|
620
|
+
root.create_dataset(
|
|
621
|
+
"data",
|
|
622
|
+
shape=dims,
|
|
623
|
+
dtype=np.float64,
|
|
624
|
+
chunks=dims,
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
for k, v in big_dataset.statistics.items():
|
|
628
|
+
if k not in root:
|
|
629
|
+
root.create_dataset(
|
|
630
|
+
k,
|
|
631
|
+
data=v,
|
|
632
|
+
compressor=None,
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
# Create spatial coordinate datasets if missing.
|
|
636
|
+
if "latitudes" not in root or "longitudes" not in root:
|
|
637
|
+
root.create_dataset("latitudes", data=big_dataset.latitudes, compressor=None)
|
|
638
|
+
root.create_dataset("longitudes", data=big_dataset.longitudes, compressor=None)
|
|
639
|
+
|
|
640
|
+
# Set store-wide attributes if not already set.
|
|
641
|
+
if "frequency" not in root.attrs:
|
|
642
|
+
root.attrs["frequency"] = "10m"
|
|
643
|
+
root.attrs["resolution"] = "1km"
|
|
644
|
+
root.attrs["name_to_index"] = {k: i for i, k in enumerate(big_dataset.variables)}
|
|
645
|
+
root.attrs["ensemble_dimension"] = 1
|
|
646
|
+
root.attrs["field_shape"] = big_dataset.field_shape
|
|
647
|
+
root.attrs["flatten_grid"] = True
|
|
648
|
+
root.attrs["recipe"] = recipe
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def _save_dataset(recipe: Dict[str, Any], zarr_path: str, n_workers: int = 1) -> None:
|
|
652
|
+
"""Incrementally create (or update) a Zarr store from an Anemoi dataset.
|
|
653
|
+
|
|
654
|
+
Parameters
|
|
655
|
+
----------
|
|
656
|
+
recipe : Dict[str, Any]
|
|
657
|
+
The recipe for creating the dataset.
|
|
658
|
+
zarr_path : str
|
|
659
|
+
The path to the Zarr store.
|
|
660
|
+
n_workers : int, optional
|
|
661
|
+
The number of worker processes to use, by default 1.
|
|
662
|
+
|
|
663
|
+
Notes
|
|
664
|
+
-----
|
|
665
|
+
Worker processes extract data for each date in parallel, but all writes
|
|
666
|
+
to the store happen sequentially in the main process (i.e. single-writer).
|
|
667
|
+
|
|
668
|
+
The "dates" dataset is created with chunking equal to the full length of
|
|
669
|
+
big_dataset.dates, while "data" is chunked with 1 in the time dimension.
|
|
670
|
+
"""
|
|
671
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
672
|
+
|
|
673
|
+
full_ds = _open_dataset(recipe).mutate()
|
|
674
|
+
print("Opened full dataset.", flush=True)
|
|
675
|
+
|
|
676
|
+
# Use ProcessPoolExecutor for parallel data extraction.
|
|
677
|
+
# Workers return (date, subset) tuples.
|
|
678
|
+
root = zarr.open(zarr_path, mode="a")
|
|
679
|
+
initialize_zarr_store(root, full_ds, recipe)
|
|
680
|
+
print("Zarr store initialized.", flush=True)
|
|
681
|
+
|
|
682
|
+
existing_dates = np.array(sorted(root["dates"]), dtype="datetime64[s]")
|
|
683
|
+
all_dates = full_ds.dates
|
|
684
|
+
# To resume creation of the Zarr store in case the job is interrupted.
|
|
685
|
+
dates_to_process = np.array(sorted(set(all_dates).difference(existing_dates)), dtype="datetime64[s]")
|
|
686
|
+
|
|
687
|
+
use_pool = False
|
|
688
|
+
|
|
689
|
+
if use_pool:
|
|
690
|
+
with ProcessPoolExecutor(n_workers) as pool:
|
|
691
|
+
futures = [pool.submit(process_date, date, full_ds) for date in dates_to_process]
|
|
692
|
+
for future in futures:
|
|
693
|
+
subset, date = future.result()
|
|
694
|
+
# All appends happen sequentially here to
|
|
695
|
+
# avoid dates being added in a random order.
|
|
696
|
+
append_to_zarr(subset, date, zarr_path)
|
|
697
|
+
else:
|
|
698
|
+
for date in dates_to_process:
|
|
699
|
+
subset, date = process_date(date, full_ds)
|
|
700
|
+
append_to_zarr(subset, date, zarr_path)
|
anemoi/datasets/grids.py
CHANGED
|
@@ -604,6 +604,7 @@ def nearest_grid_points(
|
|
|
604
604
|
source_longitudes: NDArray[Any],
|
|
605
605
|
target_latitudes: NDArray[Any],
|
|
606
606
|
target_longitudes: NDArray[Any],
|
|
607
|
+
max_distance: float = None,
|
|
607
608
|
) -> NDArray[Any]:
|
|
608
609
|
"""Find the nearest grid points from source to target coordinates.
|
|
609
610
|
|
|
@@ -617,6 +618,9 @@ def nearest_grid_points(
|
|
|
617
618
|
Target latitude coordinates.
|
|
618
619
|
target_longitudes : NDArray[Any]
|
|
619
620
|
Target longitude coordinates.
|
|
621
|
+
max_distance: float, optional
|
|
622
|
+
Maximum distance between nearest point and point to interpolate. Defaults to None.
|
|
623
|
+
For example, 1e-3 is 1 km.
|
|
620
624
|
|
|
621
625
|
Returns
|
|
622
626
|
-------
|
|
@@ -632,8 +636,10 @@ def nearest_grid_points(
|
|
|
632
636
|
|
|
633
637
|
target_xyz = latlon_to_xyz(target_latitudes, target_longitudes)
|
|
634
638
|
target_points = np.array(target_xyz).transpose()
|
|
635
|
-
|
|
636
|
-
|
|
639
|
+
if max_distance is None:
|
|
640
|
+
_, indices = cKDTree(source_points).query(target_points, k=1)
|
|
641
|
+
else:
|
|
642
|
+
_, indices = cKDTree(source_points).query(target_points, k=1, distance_upper_bound=max_distance)
|
|
637
643
|
return indices
|
|
638
644
|
|
|
639
645
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: anemoi-datasets
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.20
|
|
4
4
|
Summary: A package to hold various functions to support training of ML models on ECMWF data.
|
|
5
5
|
Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
|
|
6
6
|
License: Apache License
|
|
@@ -205,6 +205,7 @@ License: Apache License
|
|
|
205
205
|
See the License for the specific language governing permissions and
|
|
206
206
|
limitations under the License.
|
|
207
207
|
|
|
208
|
+
Project-URL: Changelog, https://github.com/ecmwf/anemoi-datasets/CHANGELOG.md
|
|
208
209
|
Project-URL: Documentation, https://anemoi-datasets.readthedocs.io/
|
|
209
210
|
Project-URL: Homepage, https://github.com/ecmwf/anemoi-datasets/
|
|
210
211
|
Project-URL: Issues, https://github.com/ecmwf/anemoi-datasets/issues
|
|
@@ -224,8 +225,8 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
|
224
225
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
225
226
|
Requires-Python: >=3.9
|
|
226
227
|
License-File: LICENSE
|
|
227
|
-
Requires-Dist: anemoi-transform>=0.1.
|
|
228
|
-
Requires-Dist: anemoi-utils[provenance]>=0.4.
|
|
228
|
+
Requires-Dist: anemoi-transform>=0.1.6
|
|
229
|
+
Requires-Dist: anemoi-utils[provenance]>=0.4.18
|
|
229
230
|
Requires-Dist: cfunits
|
|
230
231
|
Requires-Dist: numpy
|
|
231
232
|
Requires-Dist: pyyaml
|
|
@@ -233,8 +234,12 @@ Requires-Dist: semantic-version
|
|
|
233
234
|
Requires-Dist: tqdm
|
|
234
235
|
Requires-Dist: zarr<=2.18.4
|
|
235
236
|
Provides-Extra: all
|
|
236
|
-
Requires-Dist: anemoi-datasets[create,remote,xarray]; extra == "all"
|
|
237
|
+
Requires-Dist: anemoi-datasets[comparelam,create,remote,xarray]; extra == "all"
|
|
238
|
+
Provides-Extra: comparelam
|
|
239
|
+
Requires-Dist: prettytable; extra == "comparelam"
|
|
240
|
+
Requires-Dist: termcolor; extra == "comparelam"
|
|
237
241
|
Provides-Extra: create
|
|
242
|
+
Requires-Dist: cachetools; extra == "create"
|
|
238
243
|
Requires-Dist: earthkit-data[mars]>=0.12.4; extra == "create"
|
|
239
244
|
Requires-Dist: earthkit-geo>=0.3; extra == "create"
|
|
240
245
|
Requires-Dist: earthkit-meteo>=0.3; extra == "create"
|
|
@@ -244,6 +249,7 @@ Requires-Dist: pyproj>=3; extra == "create"
|
|
|
244
249
|
Provides-Extra: dev
|
|
245
250
|
Requires-Dist: anemoi-datasets[all,docs,tests]; extra == "dev"
|
|
246
251
|
Provides-Extra: docs
|
|
252
|
+
Requires-Dist: anemoi-datasets[all]; extra == "docs"
|
|
247
253
|
Requires-Dist: nbsphinx; extra == "docs"
|
|
248
254
|
Requires-Dist: pandoc; extra == "docs"
|
|
249
255
|
Requires-Dist: sphinx; extra == "docs"
|
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
anemoi/datasets/__init__.py,sha256=i_wsAT3ezEYF7o5dpqGrpoG4wmLS-QIBug18uJbSYMs,1065
|
|
2
2
|
anemoi/datasets/__main__.py,sha256=ErwAqE3rBc7OaNO2JRsEOhWpB8ldjAt7BFSuRhbnlqQ,936
|
|
3
|
-
anemoi/datasets/_version.py,sha256=
|
|
4
|
-
anemoi/datasets/grids.py,sha256=
|
|
3
|
+
anemoi/datasets/_version.py,sha256=0Ib1b9ijC26N78gsH6oTzFbmTu3c8De1ac8TlFGaEZU,513
|
|
4
|
+
anemoi/datasets/grids.py,sha256=Hhj1aOXHvDjmI46M_UlLSjCs1qYqxH-uqd_kapDSdbU,18134
|
|
5
5
|
anemoi/datasets/testing.py,sha256=fy_JzavUwLlK_2rtXAT-UGUyo5gjyQW2y826zf334Wg,2645
|
|
6
6
|
anemoi/datasets/commands/__init__.py,sha256=O5W3yHZywRoAqmRUioAr3zMCh0hGVV18wZYGvc00ioM,698
|
|
7
7
|
anemoi/datasets/commands/cleanup.py,sha256=FX082xkHKCSd8d-FUN5zDBSiKA-QYQEeUZ6dCUD-Ob8,1816
|
|
8
|
+
anemoi/datasets/commands/compare-lam.py,sha256=F5GYRsKOtdhDePhifgf1TCj5L2T8EVIA2N8AdO-AKKY,14857
|
|
8
9
|
anemoi/datasets/commands/compare.py,sha256=jzhjbbt1U-YANTVRBhrwSh2CcYgk4qX2IiTMJtcn82s,3678
|
|
9
10
|
anemoi/datasets/commands/copy.py,sha256=UlvW9YIlP7jwKAY7TikVAfhkrjaQ9Kkxqfx8jEut-Jg,16010
|
|
10
11
|
anemoi/datasets/commands/create.py,sha256=5BXdPZMO-ZULBnEVgyeSS-IMy4p84HSyFVG855gqj3k,6598
|
|
11
12
|
anemoi/datasets/commands/finalise-additions.py,sha256=2LqU7ke3i-yRQbjkgldX6e2QlyE-tKqp0b6QOhJF19g,1985
|
|
12
13
|
anemoi/datasets/commands/finalise.py,sha256=-YtN9wFFDrM_i_V9YHoXZsajF3eAax-73Zsi4uHAFCI,1709
|
|
14
|
+
anemoi/datasets/commands/grib-index.py,sha256=1aO4Lm_7ZpRF_lRRF-OEMZ37ByYPZfUAIFW7jOsIf7Q,3161
|
|
13
15
|
anemoi/datasets/commands/init-additions.py,sha256=2vMom5L38UvLLopzP2z-R_Fq31fU2uMvKXoDq5d8oI4,1931
|
|
14
16
|
anemoi/datasets/commands/init.py,sha256=5IKyJ_hJA4lLIbpT88XtcGzXccHLSGwSoqVSvVJGxPg,2852
|
|
15
17
|
anemoi/datasets/commands/inspect.py,sha256=dGx_3IQTNaV3iGfwhHp_G1fIecBWeRbOLU9ckrZ7epg,26578
|
|
@@ -39,8 +41,8 @@ anemoi/datasets/create/filters/empty.py,sha256=Dw1kUnAlFt6b5ds0kmrw9Gak09XjSqF8m
|
|
|
39
41
|
anemoi/datasets/create/filters/legacy.py,sha256=6JY6uX7m-8NZjoZ1sqs0EAqT-uorvnZ-eFOwMU3LmRU,2536
|
|
40
42
|
anemoi/datasets/create/filters/noop.py,sha256=WHl-k3NojGJMX4iNYxQ6Ln21pM8ERP4z8pQ5zLRDvXs,1019
|
|
41
43
|
anemoi/datasets/create/filters/orog_to_z.py,sha256=vnZ1hD9LXoOfHCIbzkurMuBl_NSfXSiiHS2yZt8ndeQ,1784
|
|
42
|
-
anemoi/datasets/create/filters/pressure_level_relative_humidity_to_specific_humidity.py,sha256=
|
|
43
|
-
anemoi/datasets/create/filters/pressure_level_specific_humidity_to_relative_humidity.py,sha256=
|
|
44
|
+
anemoi/datasets/create/filters/pressure_level_relative_humidity_to_specific_humidity.py,sha256=dBAQFNAc3GEZ_HwyDrcctFaKZQYddh2ldnDA2XSfSRg,2646
|
|
45
|
+
anemoi/datasets/create/filters/pressure_level_specific_humidity_to_relative_humidity.py,sha256=51t6kbNZsXK87H0HVR9j0a54siokID47ve9r1a8rOLE,2663
|
|
44
46
|
anemoi/datasets/create/filters/rename.py,sha256=pKi3CU6fvox2sPH7szXdA79NjdIcSz59IB7HsiS_9Co,5779
|
|
45
47
|
anemoi/datasets/create/filters/rotate_winds.py,sha256=fVyAbypO_EsENHjQCujbEXp2gUEb97sMoG0s4YiPXfc,3102
|
|
46
48
|
anemoi/datasets/create/filters/single_level_dewpoint_to_relative_humidity.py,sha256=hCS3yiN9nZf-P6shQmBm5Or9rMOwU1fTwHw_qFIjT9s,2378
|
|
@@ -52,9 +54,9 @@ anemoi/datasets/create/filters/sum.py,sha256=aGT6JkdHJ3i2SKzklqiyJ4ZFV3bVMYhHOSo
|
|
|
52
54
|
anemoi/datasets/create/filters/transform.py,sha256=gIDLvaJlnn3Nc6P29aPOvNYM6yBWcIGrR2e_1bM6_Nw,1418
|
|
53
55
|
anemoi/datasets/create/filters/unrotate_winds.py,sha256=3AJf0crnVVySLlXLIdfEUxRRlQeKgheUuD-UCrSrgo8,2798
|
|
54
56
|
anemoi/datasets/create/filters/uv_to_speeddir.py,sha256=Zdc34AG5Bsz-Z7JGuznyRJr6F-BnWKXPiI3mjmOpbek,2883
|
|
55
|
-
anemoi/datasets/create/filters/wz_to_w.py,sha256=
|
|
57
|
+
anemoi/datasets/create/filters/wz_to_w.py,sha256=slOiX5RibG48Zrkss8Qjpb-8ZTnvSvmKlk1Hy45_wzU,2812
|
|
56
58
|
anemoi/datasets/create/input/__init__.py,sha256=XeURpmbReQvpELltGFKzg3oZFXWRdUxW9SK3K662SBQ,3364
|
|
57
|
-
anemoi/datasets/create/input/action.py,sha256=
|
|
59
|
+
anemoi/datasets/create/input/action.py,sha256=WmRPe5ZYQz8vxAtOr6hLYGLzikcldyps6dG3BwHiBp8,7709
|
|
58
60
|
anemoi/datasets/create/input/concat.py,sha256=bU8SWfBVfK8bRAmmN4UO9zpIGxwQvRUk9_vwrKPOTE4,5355
|
|
59
61
|
anemoi/datasets/create/input/context.py,sha256=qrLccxMe9UkyQxsNuR6JSK7oLzZq21dt38AxZ9kYzsY,2714
|
|
60
62
|
anemoi/datasets/create/input/data_sources.py,sha256=4xUUShM0pCXIZVPJW_cSNMUwCO_wLx996MLFpTLChm0,4385
|
|
@@ -65,18 +67,20 @@ anemoi/datasets/create/input/join.py,sha256=RAdgE4lVcC71_J47dNa1weJuWdTXSQIvo06G
|
|
|
65
67
|
anemoi/datasets/create/input/misc.py,sha256=FVaH_ym52RZI_fnLSMM_dKTQmWTrInucP780E3gGqvw,3357
|
|
66
68
|
anemoi/datasets/create/input/pipe.py,sha256=-tCz161IwXoI8pl1hilA9T_j5eHSr-sgbijFLp9HHNc,2083
|
|
67
69
|
anemoi/datasets/create/input/repeated_dates.py,sha256=HaPzDCNHQBY1VVp6gvd3drwjWjYpSBh-GLgHqBRJTz0,12012
|
|
68
|
-
anemoi/datasets/create/input/result.py,sha256=
|
|
70
|
+
anemoi/datasets/create/input/result.py,sha256=wX5iAAtm50ezLn2QsIDIQnGEGmgE08lWltRwnkFWSLw,24220
|
|
69
71
|
anemoi/datasets/create/input/step.py,sha256=WcR9NgRvUKF60Fo5veLvRCAQMrOd55x1gOEAmd2t2r4,5948
|
|
70
72
|
anemoi/datasets/create/input/template.py,sha256=Iycw9VmfA0WEIDP_Of8bp-8HsV0EUfwbnm0WjxiO4GA,4092
|
|
71
73
|
anemoi/datasets/create/input/trace.py,sha256=dakPYMmwKq6s17Scww1CN-xYBD3btJTGeDknOhAcnEM,3320
|
|
72
74
|
anemoi/datasets/create/sources/__init__.py,sha256=XNiiGaC6NbxnGfl6glPw-gTJASi3vsGKwVlfkMqYGk4,950
|
|
73
75
|
anemoi/datasets/create/sources/accumulations.py,sha256=ZA8F8RJPMHok5RpIHH4x-txwiSll8zuWwqJ3rn95JHk,20295
|
|
74
76
|
anemoi/datasets/create/sources/accumulations2.py,sha256=iBORRrH0N7r3gMWm3mCkJ6XmB-dO_lEckHPwvmk9fu0,20673
|
|
77
|
+
anemoi/datasets/create/sources/anemoi_dataset.py,sha256=2xJJTmKlv87F_2ECMKeehaeW7_oWLlDcLt8C_Prp1RI,2017
|
|
75
78
|
anemoi/datasets/create/sources/constants.py,sha256=5O6d9tEuAmVjl5vNkNfmkaAjKXFlw1UjeueTsF1GZCI,1528
|
|
76
79
|
anemoi/datasets/create/sources/eccc_fstd.py,sha256=8HK38f444HcWMvBhooP0XqTfMXYoCbN_8G9RI_Ne5rc,659
|
|
77
80
|
anemoi/datasets/create/sources/empty.py,sha256=5mVIVRUwnBfE3zp-bvNA_imXCSpyds-4mewcI8HXAiY,1020
|
|
78
81
|
anemoi/datasets/create/sources/forcings.py,sha256=877OZoXUoJncQ2_AAGSijwWqM-4kJJdxdIa6SFvZBUw,1216
|
|
79
|
-
anemoi/datasets/create/sources/grib.py,sha256=
|
|
82
|
+
anemoi/datasets/create/sources/grib.py,sha256=nGjrKTQZB8GQCvfX7SgfdmcVJVSXF9kpb2GmSX7H7bM,8731
|
|
83
|
+
anemoi/datasets/create/sources/grib_index.py,sha256=Pnm0RLga9lpD4MqVaZr7IqXMBlw1DtTIWZRfz7fq30Q,19026
|
|
80
84
|
anemoi/datasets/create/sources/hindcasts.py,sha256=_4880rgd4AsRxlDXVi6dkh8mlKXrz2i27btVlmlMFjY,2611
|
|
81
85
|
anemoi/datasets/create/sources/legacy.py,sha256=RJce-9TwmUUCFbgC8A3Dp61nSBfB8_lWti8WNoOMIcU,2652
|
|
82
86
|
anemoi/datasets/create/sources/mars.py,sha256=tesQz7Ne6SLBChE_cNJU6Sxr6e0LXFlUKQ8gCdRiCMw,13155
|
|
@@ -91,11 +95,11 @@ anemoi/datasets/create/sources/xarray_kerchunk.py,sha256=vdFaFzze8VLjYUgIX8Lc39E
|
|
|
91
95
|
anemoi/datasets/create/sources/xarray_zarr.py,sha256=McY-vgXmUbGAkBViAfYwBUeVmGUU-Qr8UW-jUGu5-9s,1209
|
|
92
96
|
anemoi/datasets/create/sources/zenodo.py,sha256=KEetFEk5GzGFpoos8rbBQBTa2XElWG7oTYjfZXgbu0Q,2065
|
|
93
97
|
anemoi/datasets/create/sources/xarray_support/README.md,sha256=56olM9Jh0vI0_bU9GI-IqbBcz4DZXWONqvdzN_VeAFE,78
|
|
94
|
-
anemoi/datasets/create/sources/xarray_support/__init__.py,sha256=
|
|
98
|
+
anemoi/datasets/create/sources/xarray_support/__init__.py,sha256=8Dv7KQW8O3VvHOsSbqYdjaaomYIhXIKgSGatnNEweNU,5564
|
|
95
99
|
anemoi/datasets/create/sources/xarray_support/coordinates.py,sha256=rPEuijS77mQ9V9tpN7wjg-w9rBxj7bZf_c30lLgSscE,11029
|
|
96
100
|
anemoi/datasets/create/sources/xarray_support/field.py,sha256=YRxx6kh1qO2qQ6I_VyR51h3dwNiiFM7CNwQNfpp-p-E,6375
|
|
97
|
-
anemoi/datasets/create/sources/xarray_support/fieldlist.py,sha256=
|
|
98
|
-
anemoi/datasets/create/sources/xarray_support/flavour.py,sha256=
|
|
101
|
+
anemoi/datasets/create/sources/xarray_support/fieldlist.py,sha256=UyUljq2Ax-PpQ-bvG4Dsi_lkZucuPgCy120EadDeUMU,8271
|
|
102
|
+
anemoi/datasets/create/sources/xarray_support/flavour.py,sha256=UyfzBjYMNfugMCq-r5Ie3qDuorLwaalPi_0oZHckZcg,32073
|
|
99
103
|
anemoi/datasets/create/sources/xarray_support/grid.py,sha256=lsE8bQwBH9pflzvsJ89Z6ExYPdHJd54xorMNzL2gTd0,6181
|
|
100
104
|
anemoi/datasets/create/sources/xarray_support/metadata.py,sha256=WRO86l-ZB7iJ7pG5Vz9kVv5h1MokfF0fuy0bNSNBRIc,10687
|
|
101
105
|
anemoi/datasets/create/sources/xarray_support/patch.py,sha256=Snk8bz7gp0HrG0MrY5hrXu7VC0tKgtoiWXByi2sBYJc,2037
|
|
@@ -103,10 +107,10 @@ anemoi/datasets/create/sources/xarray_support/time.py,sha256=Y_lZTUOXWJH4jcSgyL4
|
|
|
103
107
|
anemoi/datasets/create/sources/xarray_support/variable.py,sha256=fcazws9vuizmx55JCXwbkwffg4WxJllPrEg2US1VysE,9163
|
|
104
108
|
anemoi/datasets/create/statistics/__init__.py,sha256=_BuPcuUrwQAEcMQVds93EV9M5ys2ao8jCWKV4OVoSSA,18291
|
|
105
109
|
anemoi/datasets/create/statistics/summary.py,sha256=JdtChTmsr1Y958_nka36HltTbeZkawuGbprbfZD7Ux8,4790
|
|
106
|
-
anemoi/datasets/data/__init__.py,sha256=
|
|
107
|
-
anemoi/datasets/data/complement.py,sha256=
|
|
110
|
+
anemoi/datasets/data/__init__.py,sha256=wzhk_7VQImge12Xkg99xuiFOC7DAjBW1mu446y0Iq60,3057
|
|
111
|
+
anemoi/datasets/data/complement.py,sha256=QcbcAW3HaG1g4QBrEzH6Lzk8YEdnK9hJP4OpxW_oU2E,9835
|
|
108
112
|
anemoi/datasets/data/concat.py,sha256=eY5rujcdal00BJCv00mKSlxp0FKVvPQd7uqrBnL9fj4,8996
|
|
109
|
-
anemoi/datasets/data/dataset.py,sha256=
|
|
113
|
+
anemoi/datasets/data/dataset.py,sha256=d-LZPCczG2-ZAua29FGP3_QWzwdnWPHhKpG1dHFQKio,31290
|
|
110
114
|
anemoi/datasets/data/debug.css,sha256=z2X_ZDSnZ9C3pyZPWnQiEyAxuMxUaxJxET4oaCImTAQ,211
|
|
111
115
|
anemoi/datasets/data/debug.py,sha256=hVa1jAQ-TK7CoKJNyyUC0eZPobFG-FpkVXEaO_3B-MA,10796
|
|
112
116
|
anemoi/datasets/data/ensemble.py,sha256=-36kMjuT2y5jUeSnjCRTCyE4um6DLAADBVSKSTkHZZg,5352
|
|
@@ -114,11 +118,11 @@ anemoi/datasets/data/fill_missing.py,sha256=ceONpzD-PWLMTtG4WOw6USw-Cd1O55VYzfpA
|
|
|
114
118
|
anemoi/datasets/data/forwards.py,sha256=d0LL0AORQApGmuPIfMWevOOZuePlgZ6GldHUKmqnTtg,19732
|
|
115
119
|
anemoi/datasets/data/grids.py,sha256=vTAfGq3SaTU4tQzzfeRpFAKlmfL-JVvpjP4e3nGWO3s,22045
|
|
116
120
|
anemoi/datasets/data/indexing.py,sha256=DasVd1j0FB0iTw6eqvhiLka4ztf2zJcI5NgWxmtxzCw,7526
|
|
117
|
-
anemoi/datasets/data/interpolate.py,sha256
|
|
121
|
+
anemoi/datasets/data/interpolate.py,sha256=-kSYwdjKH7zJtfITdbqdH6KyOFGVZDyHg4TaFk9shEI,9279
|
|
118
122
|
anemoi/datasets/data/join.py,sha256=ZEHOsCecKBkKKH-vki404Sm7r7cV368ECO7PXPpay3s,9212
|
|
119
|
-
anemoi/datasets/data/masked.py,sha256=
|
|
123
|
+
anemoi/datasets/data/masked.py,sha256=giOvHLcGbLf6mZPqZjAxQd1kvydmkepDFh2EqchXLTQ,10213
|
|
120
124
|
anemoi/datasets/data/merge.py,sha256=SvQhJHf-C-Kn7hEjFqomienk-epPPjMtoccRNCJpMtw,8733
|
|
121
|
-
anemoi/datasets/data/misc.py,sha256=
|
|
125
|
+
anemoi/datasets/data/misc.py,sha256=l8ZJwEjUf9CoPn-RhnilVDVTGypkVIzJ5xV77h8zzkc,20951
|
|
122
126
|
anemoi/datasets/data/missing.py,sha256=ogfVDponbs0bGHMxps32Fj_fq4gT26R70yEMco5gdK8,12593
|
|
123
127
|
anemoi/datasets/data/rescale.py,sha256=nGfJ5tWCncMJ7NMXkLbmt6z0ELrD6FxpbjJreQ3W91g,7004
|
|
124
128
|
anemoi/datasets/data/select.py,sha256=Xs6uOzJL0CoOGeWA_E5_ukr8Jav2kXbZ41vhk7Vr8PE,8277
|
|
@@ -130,9 +134,9 @@ anemoi/datasets/data/xy.py,sha256=-jWzYismrK3eI3YCKIBpU1BCmraRncmVn0_2IUY--lk,75
|
|
|
130
134
|
anemoi/datasets/dates/__init__.py,sha256=pEArHDQ7w5E0WC8Vvf9ypyKSdm6gnhoN9TmooITB7C4,13617
|
|
131
135
|
anemoi/datasets/dates/groups.py,sha256=IOveL6IyTXZwEdXZEnRAnpu9pINY95VN7LzcpLfJ09E,10105
|
|
132
136
|
anemoi/datasets/utils/__init__.py,sha256=hCW0QcLHJmE-C1r38P27_ZOvCLNewex5iQEtZqx2ckI,393
|
|
133
|
-
anemoi_datasets-0.5.
|
|
134
|
-
anemoi_datasets-0.5.
|
|
135
|
-
anemoi_datasets-0.5.
|
|
136
|
-
anemoi_datasets-0.5.
|
|
137
|
-
anemoi_datasets-0.5.
|
|
138
|
-
anemoi_datasets-0.5.
|
|
137
|
+
anemoi_datasets-0.5.20.dist-info/licenses/LICENSE,sha256=8HznKF1Vi2IvfLsKNE5A2iVyiri3pRjRPvPC9kxs6qk,11354
|
|
138
|
+
anemoi_datasets-0.5.20.dist-info/METADATA,sha256=iGcUiyvv_JjdtsXo_oq_WgwweD1nPE0xAM8n5d0qeQU,16039
|
|
139
|
+
anemoi_datasets-0.5.20.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
140
|
+
anemoi_datasets-0.5.20.dist-info/entry_points.txt,sha256=yR-o-4uiPEA_GLBL81SkMYnUoxq3CAV3hHulQiRtGG0,66
|
|
141
|
+
anemoi_datasets-0.5.20.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
|
|
142
|
+
anemoi_datasets-0.5.20.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|