ocf-data-sampler 0.5.3__py3-none-any.whl → 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ocf-data-sampler might be problematic. Click here for more details.
- ocf_data_sampler/load/nwp/providers/utils.py +1 -1
- ocf_data_sampler/load/open_tensorstore_zarrs.py +2 -1
- ocf_data_sampler/load/satellite.py +1 -1
- ocf_data_sampler/load/xarray_tensorstore.py +299 -0
- {ocf_data_sampler-0.5.3.dist-info → ocf_data_sampler-0.5.5.dist-info}/METADATA +5 -4
- {ocf_data_sampler-0.5.3.dist-info → ocf_data_sampler-0.5.5.dist-info}/RECORD +8 -7
- {ocf_data_sampler-0.5.3.dist-info → ocf_data_sampler-0.5.5.dist-info}/WHEEL +0 -0
- {ocf_data_sampler-0.5.3.dist-info → ocf_data_sampler-0.5.5.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"""Satellite loader."""
|
|
2
2
|
import numpy as np
|
|
3
3
|
import xarray as xr
|
|
4
|
-
from xarray_tensorstore import open_zarr
|
|
5
4
|
|
|
6
5
|
from ocf_data_sampler.load.utils import (
|
|
7
6
|
check_time_unique_increasing,
|
|
8
7
|
get_xr_data_array_from_xr_dataset,
|
|
9
8
|
make_spatial_coords_increasing,
|
|
10
9
|
)
|
|
10
|
+
from ocf_data_sampler.load.xarray_tensorstore import open_zarr
|
|
11
11
|
|
|
12
12
|
from .open_tensorstore_zarrs import open_zarrs
|
|
13
13
|
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
# Copyright 2023 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""Utilities for loading TensorStore data into Xarray.
|
|
15
|
+
|
|
16
|
+
Copied from https://github.com/google-research/tensorstore/blob/main/tensorstore/xarray.py
|
|
17
|
+
But we added small changes so that it works for zarr3
|
|
18
|
+
https://github.com/google/xarray-tensorstore/pull/22
|
|
19
|
+
"""
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import dataclasses
|
|
23
|
+
import math
|
|
24
|
+
import os.path
|
|
25
|
+
import re
|
|
26
|
+
from typing import TypeVar
|
|
27
|
+
|
|
28
|
+
import numpy as np
|
|
29
|
+
import tensorstore
|
|
30
|
+
import xarray
|
|
31
|
+
import zarr
|
|
32
|
+
from xarray.core import indexing
|
|
33
|
+
|
|
34
|
+
__version__ = "0.1.5" # keep in sync with setup.py
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
Index = TypeVar("Index", int, slice, np.ndarray, None)
|
|
38
|
+
XarrayData = TypeVar("XarrayData", xarray.Dataset, xarray.DataArray)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _numpy_to_tensorstore_index(index: Index, size: int) -> Index:
|
|
42
|
+
"""Switch from NumPy to TensorStore indexing conventions."""
|
|
43
|
+
# https://google.github.io/tensorstore/python/indexing.html#differences-compared-to-numpy-indexing
|
|
44
|
+
if index is None:
|
|
45
|
+
return None
|
|
46
|
+
elif isinstance(index, int):
|
|
47
|
+
# Negative integers do not count from the end in TensorStore
|
|
48
|
+
return index + size if index < 0 else index
|
|
49
|
+
elif isinstance(index, slice):
|
|
50
|
+
start = _numpy_to_tensorstore_index(index.start, size)
|
|
51
|
+
stop = _numpy_to_tensorstore_index(index.stop, size)
|
|
52
|
+
if stop is not None:
|
|
53
|
+
# TensorStore does not allow out of bounds slicing
|
|
54
|
+
stop = min(stop, size)
|
|
55
|
+
return slice(start, stop, index.step)
|
|
56
|
+
else:
|
|
57
|
+
assert isinstance(index, np.ndarray) # noqa S101
|
|
58
|
+
return np.where(index < 0, index + size, index)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclasses.dataclass(frozen=True)
|
|
62
|
+
class _TensorStoreAdapter(indexing.ExplicitlyIndexed):
|
|
63
|
+
"""TensorStore array that can be wrapped by xarray.Variable.
|
|
64
|
+
|
|
65
|
+
We use Xarray's semi-internal ExplicitlyIndexed API so that Xarray will not
|
|
66
|
+
attempt to load our array into memory as a NumPy array. In the future, this
|
|
67
|
+
should be supported by public Xarray APIs, as part of the refactor discussed
|
|
68
|
+
in: https://github.com/pydata/xarray/issues/3981
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
array: tensorstore.TensorStore
|
|
72
|
+
future: tensorstore.Future | None = None
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def shape(self) -> tuple[int, ...]:
|
|
76
|
+
return self.array.shape
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def dtype(self) -> np.dtype:
|
|
80
|
+
return self.array.dtype.numpy_dtype
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def ndim(self) -> int:
|
|
84
|
+
return len(self.shape)
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def size(self) -> int:
|
|
88
|
+
return math.prod(self.shape)
|
|
89
|
+
|
|
90
|
+
def __getitem__(self, key: indexing.ExplicitIndexer) -> _TensorStoreAdapter:
|
|
91
|
+
index_tuple = tuple(map(_numpy_to_tensorstore_index, key.tuple, self.shape))
|
|
92
|
+
if isinstance(key, indexing.OuterIndexer):
|
|
93
|
+
# TODO(shoyer): fix this for newer versions of Xarray.
|
|
94
|
+
# We get the error message:
|
|
95
|
+
# AttributeError: '_TensorStoreAdapter' object has no attribute 'oindex'
|
|
96
|
+
indexed = self.array.oindex[index_tuple]
|
|
97
|
+
elif isinstance(key, indexing.VectorizedIndexer):
|
|
98
|
+
indexed = self.array.vindex[index_tuple]
|
|
99
|
+
else:
|
|
100
|
+
assert isinstance(key, indexing.BasicIndexer) # noqa S101
|
|
101
|
+
indexed = self.array[index_tuple]
|
|
102
|
+
# Translate to the origin so repeated indexing is relative to the new bounds
|
|
103
|
+
# like NumPy, not absolute like TensorStore
|
|
104
|
+
translated = indexed[tensorstore.d[:].translate_to[0]]
|
|
105
|
+
return type(self)(translated)
|
|
106
|
+
|
|
107
|
+
def __setitem__(self, key: indexing.ExplicitIndexer, value) -> None: # noqa ANN001
|
|
108
|
+
index_tuple = tuple(map(_numpy_to_tensorstore_index, key.tuple, self.shape))
|
|
109
|
+
if isinstance(key, indexing.OuterIndexer):
|
|
110
|
+
self.array.oindex[index_tuple] = value
|
|
111
|
+
elif isinstance(key, indexing.VectorizedIndexer):
|
|
112
|
+
self.array.vindex[index_tuple] = value
|
|
113
|
+
else:
|
|
114
|
+
assert isinstance(key, indexing.BasicIndexer) # noqa S101
|
|
115
|
+
self.array[index_tuple] = value
|
|
116
|
+
# Invalidate the future so that the next read will pick up the new value
|
|
117
|
+
object.__setattr__(self, "future", None)
|
|
118
|
+
|
|
119
|
+
# xarray>2024.02.0 uses oindex and vindex properties, which are expected to
|
|
120
|
+
# return objects whose __getitem__ method supports the appropriate form of
|
|
121
|
+
# indexing.
|
|
122
|
+
@property
|
|
123
|
+
def oindex(self) -> _TensorStoreAdapter:
|
|
124
|
+
return self
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def vindex(self) -> _TensorStoreAdapter:
|
|
128
|
+
return self
|
|
129
|
+
|
|
130
|
+
def transpose(self, order: tuple[int, ...]) -> _TensorStoreAdapter:
|
|
131
|
+
transposed = self.array[tensorstore.d[order].transpose[:]]
|
|
132
|
+
return type(self)(transposed)
|
|
133
|
+
|
|
134
|
+
def read(self) -> _TensorStoreAdapter:
|
|
135
|
+
future = self.array.read()
|
|
136
|
+
return type(self)(self.array, future)
|
|
137
|
+
|
|
138
|
+
def __array__(self, dtype: np.dtype | None = None) -> np.ndarray: # type: ignore
|
|
139
|
+
future = self.array.read() if self.future is None else self.future
|
|
140
|
+
return np.asarray(future.result(), dtype=dtype)
|
|
141
|
+
|
|
142
|
+
def get_duck_array(self) -> np.ndarray:
|
|
143
|
+
# special method for xarray to return an in-memory (computed) representation
|
|
144
|
+
return np.asarray(self)
|
|
145
|
+
|
|
146
|
+
# Work around the missing __copy__ and __deepcopy__ methods from TensorStore,
|
|
147
|
+
# which are needed for Xarray:
|
|
148
|
+
# https://github.com/google/tensorstore/issues/109
|
|
149
|
+
# TensorStore objects are immutable, so there's no need to actually copy them.
|
|
150
|
+
|
|
151
|
+
def __copy__(self) -> _TensorStoreAdapter:
|
|
152
|
+
return type(self)(self.array, self.future)
|
|
153
|
+
|
|
154
|
+
def __deepcopy__(self, memo) -> _TensorStoreAdapter: # noqa ANN001
|
|
155
|
+
return self.__copy__()
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def _read_tensorstore(
|
|
159
|
+
array: indexing.ExplicitlyIndexed,
|
|
160
|
+
) -> indexing.ExplicitlyIndexed:
|
|
161
|
+
"""Starts async reading on a TensorStore array."""
|
|
162
|
+
return array.read() if isinstance(array, _TensorStoreAdapter) else array
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def read(xarraydata: XarrayData, /) -> XarrayData:
|
|
166
|
+
"""Starts async reads on all TensorStore arrays."""
|
|
167
|
+
# pylint: disable=protected-access
|
|
168
|
+
if isinstance(xarraydata, xarray.Dataset):
|
|
169
|
+
data = {
|
|
170
|
+
name: _read_tensorstore(var.variable._data)
|
|
171
|
+
for name, var in xarraydata.data_vars.items()
|
|
172
|
+
}
|
|
173
|
+
elif isinstance(xarraydata, xarray.DataArray):
|
|
174
|
+
data = _read_tensorstore(xarraydata.variable._data)
|
|
175
|
+
else:
|
|
176
|
+
raise TypeError(f"argument is not a DataArray or Dataset: {xarraydata}")
|
|
177
|
+
# pylint: enable=protected-access
|
|
178
|
+
return xarraydata.copy(data=data)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
_DEFAULT_STORAGE_DRIVER = "file"
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _zarr_spec_from_path(path: str, zarr_format: int) -> ...:
|
|
185
|
+
if re.match(r"\w+\://", path): # path is a URI
|
|
186
|
+
kv_store = path
|
|
187
|
+
else:
|
|
188
|
+
kv_store = {"driver": _DEFAULT_STORAGE_DRIVER, "path": path}
|
|
189
|
+
|
|
190
|
+
if zarr_format == 2:
|
|
191
|
+
return {"driver": "zarr2", "kvstore": kv_store}
|
|
192
|
+
else:
|
|
193
|
+
return {"driver": "zarr3", "kvstore": kv_store}
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _raise_if_mask_and_scale_used_for_data_vars(ds: xarray.Dataset) -> None:
|
|
197
|
+
"""Check a dataset for data variables that would need masking or scaling."""
|
|
198
|
+
advice = (
|
|
199
|
+
"Consider re-opening with xarray_tensorstore.open_zarr(..., "
|
|
200
|
+
"mask_and_scale=False), or falling back to use xarray.open_zarr()."
|
|
201
|
+
)
|
|
202
|
+
for k in ds:
|
|
203
|
+
encoding = ds[k].encoding
|
|
204
|
+
for attr in ["_FillValue", "missing_value"]:
|
|
205
|
+
fill_value = encoding.get(attr, np.nan)
|
|
206
|
+
if fill_value == fill_value: # pylint: disable=comparison-with-itself
|
|
207
|
+
raise ValueError(
|
|
208
|
+
f"variable {k} has non-NaN fill value, which is not supported by"
|
|
209
|
+
f" xarray-tensorstore: {fill_value}. {advice}",
|
|
210
|
+
)
|
|
211
|
+
for attr in ["scale_factor", "add_offset"]:
|
|
212
|
+
if attr in encoding:
|
|
213
|
+
raise ValueError(
|
|
214
|
+
f"variable {k} uses scale/offset encoding, which is not supported"
|
|
215
|
+
f" by xarray-tensorstore: {encoding}. {advice}",
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def open_zarr(
|
|
220
|
+
path: str,
|
|
221
|
+
*,
|
|
222
|
+
context: tensorstore.Context | None = None,
|
|
223
|
+
mask_and_scale: bool = True,
|
|
224
|
+
write: bool = False,
|
|
225
|
+
) -> xarray.Dataset:
|
|
226
|
+
"""Open an xarray.Dataset from Zarr using TensorStore.
|
|
227
|
+
|
|
228
|
+
For best performance, explicitly call `read()` to asynchronously load data
|
|
229
|
+
in parallel. Otherwise, xarray's `.compute()` method will load each variable's
|
|
230
|
+
data in sequence.
|
|
231
|
+
|
|
232
|
+
Example usage:
|
|
233
|
+
|
|
234
|
+
import xarray_tensorstore
|
|
235
|
+
|
|
236
|
+
ds = xarray_tensorstore.open_zarr(path)
|
|
237
|
+
|
|
238
|
+
# indexing & transposing is lazy
|
|
239
|
+
example = ds.sel(time='2020-01-01').transpose('longitude', 'latitude', ...)
|
|
240
|
+
|
|
241
|
+
# start reading data asynchronously
|
|
242
|
+
read_example = xarray_tensorstore.read(example)
|
|
243
|
+
|
|
244
|
+
# blocking conversion of the data into NumPy arrays
|
|
245
|
+
numpy_example = read_example.compute()
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
path: path or URI to Zarr group to open.
|
|
249
|
+
context: TensorStore configuration options to use when opening arrays.
|
|
250
|
+
mask_and_scale: if True (default), attempt to apply masking and scaling like
|
|
251
|
+
xarray.open_zarr(). This is only supported for coordinate variables and
|
|
252
|
+
otherwise will raise an error.
|
|
253
|
+
write: Allow write access. Defaults to False.
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
Dataset with all data variables opened via TensorStore.
|
|
257
|
+
"""
|
|
258
|
+
# We use xarray.open_zarr (which uses Zarr Python internally) to open the
|
|
259
|
+
# initial version of the dataset for a few reasons:
|
|
260
|
+
# 1. TensorStore does not support Zarr groups or array attributes, which we
|
|
261
|
+
# need to open in the xarray.Dataset. We use Zarr Python instead of
|
|
262
|
+
# parsing the raw Zarr metadata files ourselves.
|
|
263
|
+
# 2. TensorStore doesn't support non-standard Zarr dtypes like UTF-8 strings.
|
|
264
|
+
# 3. Xarray's open_zarr machinery does some pre-processing (e.g., from numeric
|
|
265
|
+
# to datetime64 dtypes) that we would otherwise need to invoke explicitly
|
|
266
|
+
# via xarray.decode_cf().
|
|
267
|
+
#
|
|
268
|
+
# Fortunately (2) and (3) are most commonly encountered on small coordinate
|
|
269
|
+
# arrays, for which the performance advantages of TensorStore are irrelevant.
|
|
270
|
+
|
|
271
|
+
if context is None:
|
|
272
|
+
context = tensorstore.Context()
|
|
273
|
+
|
|
274
|
+
# chunks=None means avoid using dask
|
|
275
|
+
ds = xarray.open_zarr(path, chunks=None, mask_and_scale=mask_and_scale)
|
|
276
|
+
|
|
277
|
+
# find out if its 2 or 3
|
|
278
|
+
try:
|
|
279
|
+
# this should work with zarr>=3 - https://github.com/zarr-developers/zarr-python
|
|
280
|
+
zarr_format = zarr.open(path).metadata.zarr_format
|
|
281
|
+
except: # noqa E722
|
|
282
|
+
# try to open it, but if it fails, assume zarr_format 2
|
|
283
|
+
zarr_format = 2
|
|
284
|
+
|
|
285
|
+
if mask_and_scale:
|
|
286
|
+
# Data variables get replaced below with _TensorStoreAdapter arrays, which
|
|
287
|
+
# don't get masked or scaled. Raising an error avoids surprising users with
|
|
288
|
+
# incorrect data values.
|
|
289
|
+
_raise_if_mask_and_scale_used_for_data_vars(ds)
|
|
290
|
+
|
|
291
|
+
specs = {k: _zarr_spec_from_path(os.path.join(path, k), zarr_format) for k in ds}
|
|
292
|
+
array_futures = {
|
|
293
|
+
k: tensorstore.open(spec, read=True, write=write, context=context)
|
|
294
|
+
for k, spec in specs.items()
|
|
295
|
+
}
|
|
296
|
+
arrays = {k: v.result() for k, v in array_futures.items()}
|
|
297
|
+
new_data = {k: _TensorStoreAdapter(v) for k, v in arrays.items()}
|
|
298
|
+
|
|
299
|
+
return ds.copy(data=new_data)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ocf-data-sampler
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.5
|
|
4
4
|
Author: James Fulton, Peter Dudfield
|
|
5
5
|
Author-email: Open Climate Fix team <info@openclimatefix.org>
|
|
6
6
|
License: MIT License
|
|
@@ -28,14 +28,14 @@ License: MIT License
|
|
|
28
28
|
Project-URL: repository, https://github.com/openclimatefix/ocf-data-sampler
|
|
29
29
|
Classifier: Programming Language :: Python :: 3
|
|
30
30
|
Classifier: License :: OSI Approved :: MIT License
|
|
31
|
-
Requires-Python: >=3.
|
|
31
|
+
Requires-Python: >=3.11
|
|
32
32
|
Description-Content-Type: text/markdown
|
|
33
33
|
Requires-Dist: torch
|
|
34
34
|
Requires-Dist: numpy
|
|
35
35
|
Requires-Dist: pandas
|
|
36
36
|
Requires-Dist: xarray
|
|
37
37
|
Requires-Dist: zarr
|
|
38
|
-
Requires-Dist: numcodecs
|
|
38
|
+
Requires-Dist: numcodecs
|
|
39
39
|
Requires-Dist: dask
|
|
40
40
|
Requires-Dist: matplotlib
|
|
41
41
|
Requires-Dist: pvlib
|
|
@@ -44,7 +44,8 @@ Requires-Dist: pyproj
|
|
|
44
44
|
Requires-Dist: pyaml_env
|
|
45
45
|
Requires-Dist: pyresample
|
|
46
46
|
Requires-Dist: h5netcdf
|
|
47
|
-
Requires-Dist:
|
|
47
|
+
Requires-Dist: tensorstore
|
|
48
|
+
Requires-Dist: zarr>=3
|
|
48
49
|
|
|
49
50
|
# ocf-data-sampler
|
|
50
51
|
|
|
@@ -9,10 +9,11 @@ ocf_data_sampler/data/uk_gsp_locations_20250109.csv,sha256=XZISFatnbpO9j8LwaxNKF
|
|
|
9
9
|
ocf_data_sampler/load/__init__.py,sha256=-vQP9g0UOWdVbjEGyVX_ipa7R1btmiETIKAf6aw4d78,201
|
|
10
10
|
ocf_data_sampler/load/gsp.py,sha256=d30jQWnwFaLj6rKNMHdz1qD8fzF8q--RNnEXT7bGiX0,2981
|
|
11
11
|
ocf_data_sampler/load/load_dataset.py,sha256=K8rWykjII-3g127If7WRRFivzHNx3SshCvZj4uQlf28,2089
|
|
12
|
-
ocf_data_sampler/load/open_tensorstore_zarrs.py,sha256=
|
|
13
|
-
ocf_data_sampler/load/satellite.py,sha256=
|
|
12
|
+
ocf_data_sampler/load/open_tensorstore_zarrs.py,sha256=ElXmW7GhYDpsHZr7KjM-KIDNJMc4lmgzVIBwHx5Wl0Q,2748
|
|
13
|
+
ocf_data_sampler/load/satellite.py,sha256=X5ZqFfMgab_WDwI7w1ZmdyMeh3GwV1g7mBd8tFgr8dM,1862
|
|
14
14
|
ocf_data_sampler/load/site.py,sha256=WtOy20VMHJIY0IwEemCdcecSDUGcVaLUown-4ixJw90,2147
|
|
15
15
|
ocf_data_sampler/load/utils.py,sha256=AGL0aOOQPrgqNBTjlBtR7Qg1PyQov3DFJo-y198u8pY,2044
|
|
16
|
+
ocf_data_sampler/load/xarray_tensorstore.py,sha256=DSZl364Hn3QjcVxxPmBKU9rsc5BlJBdzL_SMrv-9os0,10997
|
|
16
17
|
ocf_data_sampler/load/nwp/__init__.py,sha256=SmcrnbygO5xtCKmGR4wtHrj-HI7nOAvnAtfuvRufBGQ,25
|
|
17
18
|
ocf_data_sampler/load/nwp/nwp.py,sha256=0E9shei3Mq1N7F-fBlEKY5Hm0_kI7ysY_rffnWIshvk,3612
|
|
18
19
|
ocf_data_sampler/load/nwp/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -21,7 +22,7 @@ ocf_data_sampler/load/nwp/providers/ecmwf.py,sha256=P7JqfssmQq8eHKKXaBexsxts325A
|
|
|
21
22
|
ocf_data_sampler/load/nwp/providers/gfs.py,sha256=h6vm-Rfz1JGOE4P_fP1_XQJ3bugNbeNAIyt56N8B1Dc,1066
|
|
22
23
|
ocf_data_sampler/load/nwp/providers/icon.py,sha256=iVZwLKRr_D74_kAu5MHir6pRKEfbTmIxFRZAxzmiYdI,1257
|
|
23
24
|
ocf_data_sampler/load/nwp/providers/ukv.py,sha256=2i32VM9gnmWUpbL0qBSp_AKzuyKucXZPS8yklbcGlbc,1039
|
|
24
|
-
ocf_data_sampler/load/nwp/providers/utils.py,sha256=
|
|
25
|
+
ocf_data_sampler/load/nwp/providers/utils.py,sha256=5LrLmy74AVY5uLwL2qEhy-yPqSYLoxOgN8W1v8FmaQA,2355
|
|
25
26
|
ocf_data_sampler/numpy_sample/__init__.py,sha256=5bdpzM8hMAEe0XRSZ9AZFQdqEeBsEPhaF79Y8bDx3GQ,407
|
|
26
27
|
ocf_data_sampler/numpy_sample/collate.py,sha256=hoxIc5SoHoIs3Nx37aRZzWChpswjy9lHUgaKgHIoo80,2039
|
|
27
28
|
ocf_data_sampler/numpy_sample/common_types.py,sha256=9CjYHkUTx0ObduWh43fhsybZCTXvexql7qC2ptMDoek,377
|
|
@@ -56,7 +57,7 @@ ocf_data_sampler/torch_datasets/utils/validation_utils.py,sha256=YqmT-lExWlI8_ul
|
|
|
56
57
|
scripts/download_gsp_location_data.py,sha256=rRDXMoqX-RYY4jPdxhdlxJGhWdl6r245F5UARgKV6P4,3121
|
|
57
58
|
scripts/refactor_site.py,sha256=skzvsPP0Cn9yTKndzkilyNcGz4DZ88ctvCJ0XrBdc2A,3135
|
|
58
59
|
utils/compute_icon_mean_stddev.py,sha256=a1oWMRMnny39rV-dvu8rcx85sb4bXzPFrR1gkUr4Jpg,2296
|
|
59
|
-
ocf_data_sampler-0.5.
|
|
60
|
-
ocf_data_sampler-0.5.
|
|
61
|
-
ocf_data_sampler-0.5.
|
|
62
|
-
ocf_data_sampler-0.5.
|
|
60
|
+
ocf_data_sampler-0.5.5.dist-info/METADATA,sha256=R9MPrxfVGCnkBbUehSjd3taDZxeREDo_YaIv5ccqnyg,12581
|
|
61
|
+
ocf_data_sampler-0.5.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
62
|
+
ocf_data_sampler-0.5.5.dist-info/top_level.txt,sha256=LEFU4Uk-PEo72QGLAfnVZIUEm37Q8mKuMeg_Xk-p33g,31
|
|
63
|
+
ocf_data_sampler-0.5.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|