anemoi-datasets 0.3.3__py3-none-any.whl → 0.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anemoi/datasets/_version.py +2 -2
- anemoi/datasets/commands/{inspect/zarr.py → inspect.py} +27 -56
- anemoi/datasets/create/functions/sources/grib.py +9 -1
- anemoi/datasets/create/functions/sources/netcdf.py +20 -1
- anemoi/datasets/data/concat.py +1 -1
- anemoi/datasets/data/ensemble.py +1 -1
- anemoi/datasets/data/{forewards.py → forwards.py} +6 -0
- anemoi/datasets/data/grids.py +6 -6
- anemoi/datasets/data/join.py +1 -1
- anemoi/datasets/data/masked.py +7 -1
- anemoi/datasets/data/select.py +8 -4
- anemoi/datasets/data/statistics.py +3 -6
- anemoi/datasets/data/stores.py +5 -2
- anemoi/datasets/data/subset.py +7 -1
- anemoi/datasets/data/unchecked.py +1 -1
- anemoi/datasets/dates/__init__.py +2 -2
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/METADATA +4 -1
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/RECORD +22 -23
- anemoi/datasets/commands/inspect/__init__.py +0 -37
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/LICENSE +0 -0
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/WHEEL +0 -0
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/entry_points.txt +0 -0
- {anemoi_datasets-0.3.3.dist-info → anemoi_datasets-0.3.5.dist-info}/top_level.txt +0 -0
anemoi/datasets/_version.py
CHANGED
|
@@ -24,6 +24,9 @@ from anemoi.utils.text import table
|
|
|
24
24
|
|
|
25
25
|
from anemoi.datasets import open_dataset
|
|
26
26
|
from anemoi.datasets.data.stores import open_zarr
|
|
27
|
+
from anemoi.datasets.data.stores import zarr_lookup
|
|
28
|
+
|
|
29
|
+
from . import Command
|
|
27
30
|
|
|
28
31
|
LOG = logging.getLogger(__name__)
|
|
29
32
|
|
|
@@ -78,46 +81,12 @@ class Version:
|
|
|
78
81
|
self.metadata = metadata
|
|
79
82
|
self.version = version
|
|
80
83
|
self.dataset = None
|
|
81
|
-
# try:
|
|
82
84
|
self.dataset = open_dataset(self.path)
|
|
83
|
-
# except Exception as e:
|
|
84
|
-
# LOG.error("Error opening dataset '%s': %s", self.path, e)
|
|
85
85
|
|
|
86
86
|
def describe(self):
|
|
87
87
|
print(f"📦 Path : {self.path}")
|
|
88
88
|
print(f"🔢 Format version: {self.version}")
|
|
89
89
|
|
|
90
|
-
def probe(self):
|
|
91
|
-
if "cos_local_time" not in self.name_to_index:
|
|
92
|
-
print("⚠️ probe: no cos_local_time")
|
|
93
|
-
return
|
|
94
|
-
|
|
95
|
-
try:
|
|
96
|
-
lon = self.longitudes
|
|
97
|
-
except AttributeError:
|
|
98
|
-
print("⚠️ probe: no longitudes")
|
|
99
|
-
return
|
|
100
|
-
# print(json.dumps(self.metadata, indent=4))
|
|
101
|
-
cos_local_time = self.name_to_index["cos_local_time"]
|
|
102
|
-
data = self.data
|
|
103
|
-
start, end, frequency = self.first_date, self.last_date, self.frequency
|
|
104
|
-
date = start
|
|
105
|
-
same = 0
|
|
106
|
-
for i in range(10):
|
|
107
|
-
field = data[i, cos_local_time]
|
|
108
|
-
buggy = cos_local_time_bug(lon, date).reshape(field.hape)
|
|
109
|
-
diff = np.abs(field - buggy)
|
|
110
|
-
if np.max(diff) < 1e-5:
|
|
111
|
-
same += 1
|
|
112
|
-
date += datetime.timedelta(hours=frequency)
|
|
113
|
-
if date > end:
|
|
114
|
-
break
|
|
115
|
-
if same > 1:
|
|
116
|
-
print("❌ probe: cos_local_time is buggy")
|
|
117
|
-
return
|
|
118
|
-
|
|
119
|
-
print("✅ probe: cos_local_time is fixed")
|
|
120
|
-
|
|
121
90
|
@property
|
|
122
91
|
def name_to_index(self):
|
|
123
92
|
return find(self.metadata, "name_to_index")
|
|
@@ -586,29 +555,35 @@ VERSIONS = {
|
|
|
586
555
|
}
|
|
587
556
|
|
|
588
557
|
|
|
589
|
-
class InspectZarr:
|
|
590
|
-
"""Inspect a
|
|
558
|
+
class InspectZarr(Command):
|
|
559
|
+
"""Inspect a zarr dataset."""
|
|
591
560
|
|
|
592
|
-
def
|
|
593
|
-
|
|
561
|
+
def add_arguments(self, command_parser):
|
|
562
|
+
command_parser.add_argument("path", metavar="DATASET")
|
|
563
|
+
command_parser.add_argument("--detailed", action="store_true")
|
|
564
|
+
|
|
565
|
+
command_parser.add_argument("--progress", action="store_true")
|
|
566
|
+
command_parser.add_argument("--statistics", action="store_true")
|
|
567
|
+
command_parser.add_argument("--size", action="store_true", help="Print size")
|
|
594
568
|
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
569
|
+
def run(self, args):
|
|
570
|
+
self.inspect_zarr(**vars(args))
|
|
571
|
+
|
|
572
|
+
def inspect_zarr(self, path, progress=False, statistics=False, detailed=False, size=False, **kwargs):
|
|
573
|
+
version = self._info(path)
|
|
600
574
|
|
|
601
575
|
dotted_line()
|
|
602
576
|
version.describe()
|
|
603
577
|
|
|
604
578
|
try:
|
|
605
|
-
if
|
|
606
|
-
return version.probe()
|
|
607
|
-
if kwargs.get("progress"):
|
|
579
|
+
if progress:
|
|
608
580
|
return version.progress()
|
|
609
|
-
|
|
581
|
+
|
|
582
|
+
if statistics:
|
|
610
583
|
return version.brute_force_statistics()
|
|
611
|
-
|
|
584
|
+
|
|
585
|
+
version.info(detailed, size)
|
|
586
|
+
|
|
612
587
|
except Exception as e:
|
|
613
588
|
LOG.error("Error inspecting zarr file '%s': %s", path, e)
|
|
614
589
|
|
|
@@ -616,14 +591,7 @@ class InspectZarr:
|
|
|
616
591
|
raise
|
|
617
592
|
|
|
618
593
|
def _info(self, path):
|
|
619
|
-
|
|
620
|
-
path = path[:-1]
|
|
621
|
-
|
|
622
|
-
try:
|
|
623
|
-
z = open_zarr(path)
|
|
624
|
-
except Exception as e:
|
|
625
|
-
LOG.error("Error opening zarr file '%s': %s", path, e)
|
|
626
|
-
raise
|
|
594
|
+
z = open_zarr(zarr_lookup(path))
|
|
627
595
|
|
|
628
596
|
metadata = dict(z.attrs)
|
|
629
597
|
version = metadata.get("version", "0.0.0")
|
|
@@ -640,3 +608,6 @@ class InspectZarr:
|
|
|
640
608
|
candidate = klass
|
|
641
609
|
|
|
642
610
|
return candidate(path, z, metadata, version)
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
command = InspectZarr
|
|
@@ -8,6 +8,8 @@
|
|
|
8
8
|
#
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
import glob
|
|
12
|
+
|
|
11
13
|
from climetlab import load_source
|
|
12
14
|
from climetlab.utils.patterns import Pattern
|
|
13
15
|
|
|
@@ -22,6 +24,12 @@ def check(ds, paths, **kwargs):
|
|
|
22
24
|
raise ValueError(f"Expected {count} fields, got {len(ds)} (kwargs={kwargs}, paths={paths})")
|
|
23
25
|
|
|
24
26
|
|
|
27
|
+
def _expand(paths):
|
|
28
|
+
for path in paths:
|
|
29
|
+
for p in glob.glob(path):
|
|
30
|
+
yield p
|
|
31
|
+
|
|
32
|
+
|
|
25
33
|
def execute(context, dates, path, *args, **kwargs):
|
|
26
34
|
given_paths = path if isinstance(path, list) else [path]
|
|
27
35
|
|
|
@@ -35,7 +43,7 @@ def execute(context, dates, path, *args, **kwargs):
|
|
|
35
43
|
if name in kwargs:
|
|
36
44
|
raise ValueError(f"MARS interpolation parameter '{name}' not supported")
|
|
37
45
|
|
|
38
|
-
for path in paths:
|
|
46
|
+
for path in _expand(paths):
|
|
39
47
|
context.trace("📁", "PATH", path)
|
|
40
48
|
s = load_source("file", path)
|
|
41
49
|
s = s.sel(valid_datetime=dates, **kwargs)
|
|
@@ -7,10 +7,29 @@
|
|
|
7
7
|
# nor does it submit to any jurisdiction.
|
|
8
8
|
#
|
|
9
9
|
|
|
10
|
+
import glob
|
|
11
|
+
|
|
10
12
|
from climetlab import load_source
|
|
11
13
|
from climetlab.utils.patterns import Pattern
|
|
12
14
|
|
|
13
15
|
|
|
16
|
+
def _expand(paths):
|
|
17
|
+
for path in paths:
|
|
18
|
+
if path.startswith("file://"):
|
|
19
|
+
path = path[7:]
|
|
20
|
+
|
|
21
|
+
if path.startswith("http://"):
|
|
22
|
+
yield path
|
|
23
|
+
continue
|
|
24
|
+
|
|
25
|
+
if path.startswith("https://"):
|
|
26
|
+
yield path
|
|
27
|
+
continue
|
|
28
|
+
|
|
29
|
+
for p in glob.glob(path):
|
|
30
|
+
yield p
|
|
31
|
+
|
|
32
|
+
|
|
14
33
|
def check(what, ds, paths, **kwargs):
|
|
15
34
|
count = 1
|
|
16
35
|
for k, v in kwargs.items():
|
|
@@ -32,7 +51,7 @@ def load_netcdfs(emoji, what, context, dates, path, *args, **kwargs):
|
|
|
32
51
|
|
|
33
52
|
levels = kwargs.get("level", kwargs.get("levelist"))
|
|
34
53
|
|
|
35
|
-
for path in paths:
|
|
54
|
+
for path in _expand(paths):
|
|
36
55
|
context.trace(emoji, what.upper(), path)
|
|
37
56
|
s = load_source("opendap", path)
|
|
38
57
|
s = s.sel(
|
anemoi/datasets/data/concat.py
CHANGED
|
@@ -12,7 +12,7 @@ import numpy as np
|
|
|
12
12
|
|
|
13
13
|
from .debug import Node
|
|
14
14
|
from .debug import debug_indexing
|
|
15
|
-
from .
|
|
15
|
+
from .forwards import Combined
|
|
16
16
|
from .indexing import apply_index_to_slices_changes
|
|
17
17
|
from .indexing import expand_list_indexing
|
|
18
18
|
from .indexing import index_to_slices
|
anemoi/datasets/data/ensemble.py
CHANGED
|
@@ -91,12 +91,18 @@ class Forwards(Dataset):
|
|
|
91
91
|
def metadata_specific(self, **kwargs):
|
|
92
92
|
return super().metadata_specific(
|
|
93
93
|
forward=self.forward.metadata_specific(),
|
|
94
|
+
**self.subclass_metadata_specific(),
|
|
94
95
|
**kwargs,
|
|
95
96
|
)
|
|
96
97
|
|
|
97
98
|
def source(self, index):
|
|
98
99
|
return self.forward.source(index)
|
|
99
100
|
|
|
101
|
+
def subclass_metadata_specific(self):
|
|
102
|
+
raise NotImplementedError(
|
|
103
|
+
f"subclass_metadata_specific() must be implemented in derived class {self.__class__.__name__}"
|
|
104
|
+
)
|
|
105
|
+
|
|
100
106
|
|
|
101
107
|
class Combined(Forwards):
|
|
102
108
|
def __init__(self, datasets):
|
anemoi/datasets/data/grids.py
CHANGED
|
@@ -12,8 +12,8 @@ import numpy as np
|
|
|
12
12
|
|
|
13
13
|
from .debug import Node
|
|
14
14
|
from .debug import debug_indexing
|
|
15
|
-
from .
|
|
16
|
-
from .
|
|
15
|
+
from .forwards import Combined
|
|
16
|
+
from .forwards import GivenAxis
|
|
17
17
|
from .indexing import apply_index_to_slices_changes
|
|
18
18
|
from .indexing import expand_list_indexing
|
|
19
19
|
from .indexing import index_to_slices
|
|
@@ -91,7 +91,7 @@ class Concat(Combined):
|
|
|
91
91
|
return Node(self, [d.tree() for d in self.datasets])
|
|
92
92
|
|
|
93
93
|
|
|
94
|
-
class
|
|
94
|
+
class GridsBase(GivenAxis):
|
|
95
95
|
def __init__(self, datasets, axis):
|
|
96
96
|
super().__init__(datasets, axis)
|
|
97
97
|
# Shape: (dates, variables, ensemble, 1d-values)
|
|
@@ -106,7 +106,7 @@ class Grids(GivenAxis):
|
|
|
106
106
|
pass
|
|
107
107
|
|
|
108
108
|
|
|
109
|
-
class
|
|
109
|
+
class Grids(GridsBase):
|
|
110
110
|
# TODO: select the statistics of the most global grid?
|
|
111
111
|
@property
|
|
112
112
|
def latitudes(self):
|
|
@@ -127,7 +127,7 @@ class ConcatGrids(Grids):
|
|
|
127
127
|
return Node(self, [d.tree() for d in self.datasets], mode="concat")
|
|
128
128
|
|
|
129
129
|
|
|
130
|
-
class Cutout(
|
|
130
|
+
class Cutout(GridsBase):
|
|
131
131
|
def __init__(self, datasets, axis):
|
|
132
132
|
from anemoi.datasets.grids import cutout_mask
|
|
133
133
|
|
|
@@ -220,7 +220,7 @@ def grids_factory(args, kwargs):
|
|
|
220
220
|
datasets = [_open(e) for e in grids]
|
|
221
221
|
datasets, kwargs = _auto_adjust(datasets, kwargs)
|
|
222
222
|
|
|
223
|
-
return
|
|
223
|
+
return Grids(datasets, axis=axis)._subset(**kwargs)
|
|
224
224
|
|
|
225
225
|
|
|
226
226
|
def cutout_factory(args, kwargs):
|
anemoi/datasets/data/join.py
CHANGED
|
@@ -13,7 +13,7 @@ import numpy as np
|
|
|
13
13
|
from .debug import Node
|
|
14
14
|
from .debug import Source
|
|
15
15
|
from .debug import debug_indexing
|
|
16
|
-
from .
|
|
16
|
+
from .forwards import Combined
|
|
17
17
|
from .indexing import apply_index_to_slices_changes
|
|
18
18
|
from .indexing import expand_list_indexing
|
|
19
19
|
from .indexing import index_to_slices
|
anemoi/datasets/data/masked.py
CHANGED
|
@@ -14,7 +14,7 @@ from ..grids import cropping_mask
|
|
|
14
14
|
from .dataset import Dataset
|
|
15
15
|
from .debug import Node
|
|
16
16
|
from .debug import debug_indexing
|
|
17
|
-
from .
|
|
17
|
+
from .forwards import Forwards
|
|
18
18
|
from .indexing import apply_index_to_slices_changes
|
|
19
19
|
from .indexing import expand_list_indexing
|
|
20
20
|
from .indexing import index_to_slices
|
|
@@ -83,6 +83,9 @@ class Thinning(Masked):
|
|
|
83
83
|
def tree(self):
|
|
84
84
|
return Node(self, [self.forward.tree()], thinning=self.thinning, method=self.method)
|
|
85
85
|
|
|
86
|
+
def subclass_metadata_specific(self):
|
|
87
|
+
return dict(thinning=self.thinning, method=self.method)
|
|
88
|
+
|
|
86
89
|
|
|
87
90
|
class Cropping(Masked):
|
|
88
91
|
def __init__(self, forward, area):
|
|
@@ -104,3 +107,6 @@ class Cropping(Masked):
|
|
|
104
107
|
|
|
105
108
|
def tree(self):
|
|
106
109
|
return Node(self, [self.forward.tree()], area=self.area)
|
|
110
|
+
|
|
111
|
+
def metadata_specific(self, **kwargs):
|
|
112
|
+
return super().metadata_specific(area=self.area, **kwargs)
|
anemoi/datasets/data/select.py
CHANGED
|
@@ -11,7 +11,7 @@ from functools import cached_property
|
|
|
11
11
|
from .debug import Node
|
|
12
12
|
from .debug import Source
|
|
13
13
|
from .debug import debug_indexing
|
|
14
|
-
from .
|
|
14
|
+
from .forwards import Forwards
|
|
15
15
|
from .indexing import apply_index_to_slices_changes
|
|
16
16
|
from .indexing import expand_list_indexing
|
|
17
17
|
from .indexing import index_to_slices
|
|
@@ -88,6 +88,10 @@ class Select(Forwards):
|
|
|
88
88
|
def tree(self):
|
|
89
89
|
return Node(self, [self.dataset.tree()], **self.title)
|
|
90
90
|
|
|
91
|
+
def subclass_metadata_specific(self):
|
|
92
|
+
# return dict(indices=self.indices)
|
|
93
|
+
return {}
|
|
94
|
+
|
|
91
95
|
|
|
92
96
|
class Rename(Forwards):
|
|
93
97
|
def __init__(self, dataset, rename):
|
|
@@ -105,8 +109,8 @@ class Rename(Forwards):
|
|
|
105
109
|
def name_to_index(self):
|
|
106
110
|
return {k: i for i, k in enumerate(self.variables)}
|
|
107
111
|
|
|
108
|
-
def metadata_specific(self, **kwargs):
|
|
109
|
-
return super().metadata_specific(rename=self.rename, **kwargs)
|
|
110
|
-
|
|
111
112
|
def tree(self):
|
|
112
113
|
return Node(self, [self.forward.tree()], rename=self.rename)
|
|
114
|
+
|
|
115
|
+
def subclass_metadata_specific(self):
|
|
116
|
+
return dict(rename=self.rename)
|
|
@@ -10,7 +10,7 @@ from functools import cached_property
|
|
|
10
10
|
|
|
11
11
|
from . import open_dataset
|
|
12
12
|
from .debug import Node
|
|
13
|
-
from .
|
|
13
|
+
from .forwards import Forwards
|
|
14
14
|
|
|
15
15
|
LOG = logging.getLogger(__name__)
|
|
16
16
|
|
|
@@ -34,11 +34,8 @@ class Statistics(Forwards):
|
|
|
34
34
|
delta = self.frequency
|
|
35
35
|
return self._statistic.statistics_tendencies(delta)
|
|
36
36
|
|
|
37
|
-
def
|
|
38
|
-
return
|
|
39
|
-
statistics=self._statistic.metadata_specific(),
|
|
40
|
-
**kwargs,
|
|
41
|
-
)
|
|
37
|
+
def subclass_metadata_specific(self):
|
|
38
|
+
return dict(statistics=self._statistic.metadata_specific())
|
|
42
39
|
|
|
43
40
|
def tree(self):
|
|
44
41
|
return Node(self, [self.forward.tree()])
|
anemoi/datasets/data/stores.py
CHANGED
|
@@ -347,7 +347,7 @@ class ZarrWithMissingDates(Zarr):
|
|
|
347
347
|
return "zarr*"
|
|
348
348
|
|
|
349
349
|
|
|
350
|
-
def zarr_lookup(name):
|
|
350
|
+
def zarr_lookup(name, fail=True):
|
|
351
351
|
|
|
352
352
|
if name.endswith(".zarr") or name.endswith(".zip"):
|
|
353
353
|
return name
|
|
@@ -372,4 +372,7 @@ def zarr_lookup(name):
|
|
|
372
372
|
except zarr.errors.PathNotFoundError:
|
|
373
373
|
pass
|
|
374
374
|
|
|
375
|
-
|
|
375
|
+
if fail:
|
|
376
|
+
raise ValueError(f"Cannot find a dataset that matched '{name}'. Tried: {tried}")
|
|
377
|
+
|
|
378
|
+
return None
|
anemoi/datasets/data/subset.py
CHANGED
|
@@ -13,7 +13,7 @@ import numpy as np
|
|
|
13
13
|
from .debug import Node
|
|
14
14
|
from .debug import Source
|
|
15
15
|
from .debug import debug_indexing
|
|
16
|
-
from .
|
|
16
|
+
from .forwards import Forwards
|
|
17
17
|
from .indexing import apply_index_to_slices_changes
|
|
18
18
|
from .indexing import expand_list_indexing
|
|
19
19
|
from .indexing import index_to_slices
|
|
@@ -109,3 +109,9 @@ class Subset(Forwards):
|
|
|
109
109
|
|
|
110
110
|
def tree(self):
|
|
111
111
|
return Node(self, [self.dataset.tree()], **self.reason)
|
|
112
|
+
|
|
113
|
+
def subclass_metadata_specific(self):
|
|
114
|
+
return {
|
|
115
|
+
# "indices": self.indices,
|
|
116
|
+
"reason": self.reason,
|
|
117
|
+
}
|
|
@@ -116,8 +116,8 @@ class StartEndDates(Dates):
|
|
|
116
116
|
start = no_time_zone(start)
|
|
117
117
|
end = no_time_zone(end)
|
|
118
118
|
|
|
119
|
-
if end <= start:
|
|
120
|
-
|
|
119
|
+
# if end <= start:
|
|
120
|
+
# raise ValueError(f"End date {end} must be after start date {start}")
|
|
121
121
|
|
|
122
122
|
increment = datetime.timedelta(hours=frequency)
|
|
123
123
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: anemoi-datasets
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.5
|
|
4
4
|
Summary: A package to hold various functions to support training of ML models on ECMWF data.
|
|
5
5
|
Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
|
|
6
6
|
License: Apache License
|
|
@@ -238,6 +238,7 @@ Requires-Dist: ecmwflibs >=0.6.3 ; extra == 'all'
|
|
|
238
238
|
Requires-Dist: entrypoints ; extra == 'all'
|
|
239
239
|
Requires-Dist: numpy ; extra == 'all'
|
|
240
240
|
Requires-Dist: pyproj ; extra == 'all'
|
|
241
|
+
Requires-Dist: pytest ; extra == 'all'
|
|
241
242
|
Requires-Dist: pyyaml ; extra == 'all'
|
|
242
243
|
Requires-Dist: requests ; extra == 'all'
|
|
243
244
|
Requires-Dist: s3fs ; extra == 'all'
|
|
@@ -272,4 +273,6 @@ Provides-Extra: remote
|
|
|
272
273
|
Requires-Dist: boto3 ; extra == 'remote'
|
|
273
274
|
Requires-Dist: requests ; extra == 'remote'
|
|
274
275
|
Requires-Dist: s3fs ; extra == 'remote'
|
|
276
|
+
Provides-Extra: test
|
|
277
|
+
Requires-Dist: pytest ; extra == 'test'
|
|
275
278
|
|
|
@@ -1,14 +1,13 @@
|
|
|
1
1
|
anemoi/datasets/__init__.py,sha256=DC7ttKT--pmhBQALX_Cn7P28dngsJucKi5y-Ydm28QM,700
|
|
2
2
|
anemoi/datasets/__main__.py,sha256=cLA2PidDTOUHaDGzd0_E5iioKYNe-PSTv567Y2fuwQk,723
|
|
3
|
-
anemoi/datasets/_version.py,sha256=
|
|
3
|
+
anemoi/datasets/_version.py,sha256=3eLsZPTwWh0zKBhR6n3c4iAL9geCCdJGMojIL6dF0IA,411
|
|
4
4
|
anemoi/datasets/grids.py,sha256=3YBMMJodgYhavarXPAlMZHaMtDT9v2IbTmAXZTqf8Qo,8481
|
|
5
5
|
anemoi/datasets/commands/__init__.py,sha256=qAybFZPBBQs0dyx7dZ3X5JsLpE90pwrqt1vSV7cqEIw,706
|
|
6
6
|
anemoi/datasets/commands/compare.py,sha256=tN3eqihvnZ0rFc0OUzrfI34PHDlYfc2l90ZIQBE1TDQ,1300
|
|
7
7
|
anemoi/datasets/commands/copy.py,sha256=fba-zjD0iTHHXHhPEcm8VhDzsXQXDUxlbtTA1TovyT0,9991
|
|
8
8
|
anemoi/datasets/commands/create.py,sha256=POdOsVDlvRrHFFkI3SNXNgNIbSxkVUUPMoo660x7Ma0,987
|
|
9
|
+
anemoi/datasets/commands/inspect.py,sha256=G3fzcgiLaU8jln7GKvgamN7Y06-qC_JnFw2SbNn1_E4,18646
|
|
9
10
|
anemoi/datasets/commands/scan.py,sha256=HxsLdCgBMSdEXjlJfPq5M_9LxXHHQIoZ1ZEHO_AoPgA,2881
|
|
10
|
-
anemoi/datasets/commands/inspect/__init__.py,sha256=v6fPUTdMRdmUiEUUs0F74QlzPr-x5XEEOql3mkFme7E,1500
|
|
11
|
-
anemoi/datasets/commands/inspect/zarr.py,sha256=Q1waDTgdJZwJXNST4jkO4DCIbqbf2T_2Us2k6yKGToo,19684
|
|
12
11
|
anemoi/datasets/compute/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
12
|
anemoi/datasets/compute/recentre.py,sha256=j8LdC8kq1t4PW7WFTXf93hSxok10un8ENIPwCehzbP8,4768
|
|
14
13
|
anemoi/datasets/create/__init__.py,sha256=jji65Zni5aPTvS269fAMix4pN9ukmSoK0z5SVsbpr5E,5807
|
|
@@ -36,10 +35,10 @@ anemoi/datasets/create/functions/sources/accumulations.py,sha256=li1tpEew1XUv4sJ
|
|
|
36
35
|
anemoi/datasets/create/functions/sources/constants.py,sha256=aqquu6HDc8t-zsF9KRFLaj0eV4S0UPZ59BVna8E3bU8,785
|
|
37
36
|
anemoi/datasets/create/functions/sources/empty.py,sha256=SBuAfC33imbfcRnFnnOR44y8Q3KSQcqx3juIcXfCa3c,481
|
|
38
37
|
anemoi/datasets/create/functions/sources/forcings.py,sha256=EVcdu8puMSW451qj3LKCWWXaSf2LlmF8YXVs8hSMxkU,643
|
|
39
|
-
anemoi/datasets/create/functions/sources/grib.py,sha256=
|
|
38
|
+
anemoi/datasets/create/functions/sources/grib.py,sha256=YQNuGnlh2EYb2NIHYpzlipwUTmOhrmyQtP3zgk8MAUU,1661
|
|
40
39
|
anemoi/datasets/create/functions/sources/hindcasts.py,sha256=0Psnsx2J0cRLMpJuNN-gESm1xJFC1gmQzI8sdnXCoYE,13042
|
|
41
40
|
anemoi/datasets/create/functions/sources/mars.py,sha256=Jau-ceN_cI3Z2-uql92iS4-Emh9Pie7omdRkFB5oe1I,4025
|
|
42
|
-
anemoi/datasets/create/functions/sources/netcdf.py,sha256=
|
|
41
|
+
anemoi/datasets/create/functions/sources/netcdf.py,sha256=kic6PH7SAK3gseXChD38IDXw6Zcg2zhF4SeDXB2LQ8Q,2084
|
|
43
42
|
anemoi/datasets/create/functions/sources/opendap.py,sha256=T0CPinscfafrVLaye5ue-PbiCNbcNqf_3m6pphN9rCU,543
|
|
44
43
|
anemoi/datasets/create/functions/sources/recentre.py,sha256=t07LIXG3Hp9gmPkPriILVt86TxubsHyS1EL1lzwgtXY,1810
|
|
45
44
|
anemoi/datasets/create/functions/sources/source.py,sha256=hPQnV_6UIxFw97uRKcTA8TplcgG1kC8NlFHoEaaLet4,1418
|
|
@@ -47,28 +46,28 @@ anemoi/datasets/create/functions/sources/tendencies.py,sha256=kwS_GZt8R9kpfs5Rrv
|
|
|
47
46
|
anemoi/datasets/create/statistics/__init__.py,sha256=X50drgE-ltuNe7bSIyvyeC4GeTqGTQGbglh2-2aVWKE,15445
|
|
48
47
|
anemoi/datasets/create/statistics/summary.py,sha256=sgmhA24y3VRyjmDUgTnPIqcHSlWBbFA0qynx6gJ9Xw8,3370
|
|
49
48
|
anemoi/datasets/data/__init__.py,sha256=tacn6K_VZ-pYhLmGePG5sze8kmqGpqscYb-bMyQnWtk,888
|
|
50
|
-
anemoi/datasets/data/concat.py,sha256=
|
|
49
|
+
anemoi/datasets/data/concat.py,sha256=AkpyOs16OjW7X0cdyYFQfWSCV6dteXBp-x9WlokO-DI,3550
|
|
51
50
|
anemoi/datasets/data/dataset.py,sha256=UDnidq2amyCT2COH05pGfDCJcmkdMj1ubtHk9cl-qcE,7384
|
|
52
51
|
anemoi/datasets/data/debug.css,sha256=z2X_ZDSnZ9C3pyZPWnQiEyAxuMxUaxJxET4oaCImTAQ,211
|
|
53
52
|
anemoi/datasets/data/debug.py,sha256=PcyrjgxaLzeb_vf12pvUtPPVvBRHNm1SimythZvqsP4,6303
|
|
54
|
-
anemoi/datasets/data/ensemble.py,sha256=
|
|
55
|
-
anemoi/datasets/data/
|
|
56
|
-
anemoi/datasets/data/grids.py,sha256=
|
|
53
|
+
anemoi/datasets/data/ensemble.py,sha256=AsP7Xx0ZHLoZs6a4EC0jtyGYIcOvZvvKXhgNsIvqIN8,1137
|
|
54
|
+
anemoi/datasets/data/forwards.py,sha256=t9YQCN7j75VMInt0uP9JUJoh1klF1Z1xnwy5_kDMDQs,7700
|
|
55
|
+
anemoi/datasets/data/grids.py,sha256=rooOeR6rvjl4U8B4LO3N23fcgxvGE7ZUmhVryk1QS4M,7493
|
|
57
56
|
anemoi/datasets/data/indexing.py,sha256=625m__JG5m_tDMrkz1hB6Vydenwt0oHuyAlc-o3Zwos,4799
|
|
58
|
-
anemoi/datasets/data/join.py,sha256=
|
|
59
|
-
anemoi/datasets/data/masked.py,sha256=
|
|
57
|
+
anemoi/datasets/data/join.py,sha256=dtCBbMTicqrRPxfBULi3RwEcQBLhQpIcvCjdN5A3XUU,4892
|
|
58
|
+
anemoi/datasets/data/masked.py,sha256=Fzkehyka70CiS0LYSy_uyVYu2gKLwDSxlbm8GiC_pYs,3742
|
|
60
59
|
anemoi/datasets/data/misc.py,sha256=m_28VIhX546RIoVfGpimPOThl5EwOhkun2UgWMAUxqw,10355
|
|
61
|
-
anemoi/datasets/data/select.py,sha256=
|
|
62
|
-
anemoi/datasets/data/statistics.py,sha256=
|
|
63
|
-
anemoi/datasets/data/stores.py,sha256=
|
|
64
|
-
anemoi/datasets/data/subset.py,sha256=
|
|
65
|
-
anemoi/datasets/data/unchecked.py,sha256=
|
|
66
|
-
anemoi/datasets/dates/__init__.py,sha256=
|
|
60
|
+
anemoi/datasets/data/select.py,sha256=U3AEid80mrJKu0SF4lLc-bRWMVcAZwHNUHUHRehvuHU,3680
|
|
61
|
+
anemoi/datasets/data/statistics.py,sha256=PKRgcCiZEb1HjkIveVGhE3TzUy9Qe3AYWGFD72Urah8,1514
|
|
62
|
+
anemoi/datasets/data/stores.py,sha256=oEjUra0zzIysiUvh-RBQRzcbviggejEQiMO5RfpjPyM,10896
|
|
63
|
+
anemoi/datasets/data/subset.py,sha256=9urVTXdnwCgqn0_BRYquMi8oiXn4ubAf0n4586hWfKw,3814
|
|
64
|
+
anemoi/datasets/data/unchecked.py,sha256=xhdMg-ToI1UfBWHNsWyn1y2meZWngZtHx-33L0KqKp8,4037
|
|
65
|
+
anemoi/datasets/dates/__init__.py,sha256=4ItowfLLh90T8L_JOjtv98lE6M7gAaWt7dV3niUrFvk,4473
|
|
67
66
|
anemoi/datasets/dates/groups.py,sha256=iq310Pi7ullglOhcNblv14MmcT8FPgYCD5s45qAfV_s,3383
|
|
68
67
|
anemoi/datasets/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
|
-
anemoi_datasets-0.3.
|
|
70
|
-
anemoi_datasets-0.3.
|
|
71
|
-
anemoi_datasets-0.3.
|
|
72
|
-
anemoi_datasets-0.3.
|
|
73
|
-
anemoi_datasets-0.3.
|
|
74
|
-
anemoi_datasets-0.3.
|
|
68
|
+
anemoi_datasets-0.3.5.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
69
|
+
anemoi_datasets-0.3.5.dist-info/METADATA,sha256=rrQ3_Oz3ToYC03NGG2unQIwQ9kCVW6tQOvGwnZzTJ-0,16190
|
|
70
|
+
anemoi_datasets-0.3.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
71
|
+
anemoi_datasets-0.3.5.dist-info/entry_points.txt,sha256=yR-o-4uiPEA_GLBL81SkMYnUoxq3CAV3hHulQiRtGG0,66
|
|
72
|
+
anemoi_datasets-0.3.5.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
|
|
73
|
+
anemoi_datasets-0.3.5.dist-info/RECORD,,
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
# (C) Copyright 2023 European Centre for Medium-Range Weather Forecasts.
|
|
2
|
-
# This software is licensed under the terms of the Apache Licence Version 2.0
|
|
3
|
-
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
|
|
4
|
-
# In applying this licence, ECMWF does not waive the privileges and immunities
|
|
5
|
-
# granted to it by virtue of its status as an intergovernmental organisation
|
|
6
|
-
# nor does it submit to any jurisdiction.
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
import os
|
|
10
|
-
|
|
11
|
-
from .. import Command
|
|
12
|
-
from .zarr import InspectZarr
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class Inspect(Command, InspectZarr):
|
|
16
|
-
"""Inspect a zarr dataset."""
|
|
17
|
-
|
|
18
|
-
def add_arguments(self, command_parser):
|
|
19
|
-
# g = command_parser.add_mutually_exclusive_group()
|
|
20
|
-
command_parser.add_argument("path", metavar="PATH", nargs="+")
|
|
21
|
-
command_parser.add_argument("--detailed", action="store_true")
|
|
22
|
-
# command_parser.add_argument("--probe", action="store_true")
|
|
23
|
-
command_parser.add_argument("--progress", action="store_true")
|
|
24
|
-
command_parser.add_argument("--statistics", action="store_true")
|
|
25
|
-
command_parser.add_argument("--size", action="store_true", help="Print size")
|
|
26
|
-
|
|
27
|
-
def run(self, args):
|
|
28
|
-
dic = vars(args)
|
|
29
|
-
for path in dic.pop("path"):
|
|
30
|
-
if os.path.isdir(path) or path.endswith(".zarr.zip") or path.endswith(".zarr"):
|
|
31
|
-
self.inspect_zarr(path=path, **dic)
|
|
32
|
-
else:
|
|
33
|
-
raise ValueError(f"Unknown file type: {path}")
|
|
34
|
-
# self.inspect_checkpoint(path=path, **dic)
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
command = Inspect
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|