ocf-data-sampler 0.2.37__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ocf-data-sampler might be problematic. Click here for more details.

@@ -25,7 +25,7 @@ def get_dataset_dict(
25
25
  zarr_path=input_config.gsp.zarr_path,
26
26
  boundaries_version=input_config.gsp.boundaries_version,
27
27
  public=input_config.gsp.public,
28
- ).compute()
28
+ )
29
29
 
30
30
  if gsp_ids is None:
31
31
  # Remove national (gsp_id=0)
@@ -28,7 +28,7 @@ def open_cloudcasting(zarr_path: str | list[str]) -> xr.DataArray:
28
28
  [3] https://github.com/openclimatefix/sat_pred
29
29
  """
30
30
  # Open the data
31
- ds = open_zarr_paths(zarr_path)
31
+ ds = open_zarr_paths(zarr_path, backend="tensorstore")
32
32
 
33
33
  # Rename
34
34
  ds = ds.rename(
@@ -19,7 +19,7 @@ def open_ifs(zarr_path: str | list[str]) -> xr.DataArray:
19
19
  Returns:
20
20
  Xarray DataArray of the NWP data
21
21
  """
22
- ds = open_zarr_paths(zarr_path)
22
+ ds = open_zarr_paths(zarr_path, backend="tensorstore")
23
23
 
24
24
  # LEGACY SUPPORT - rename variable to channel if it exists
25
25
  ds = ds.rename({"init_time": "init_time_utc", "variable": "channel"})
@@ -23,7 +23,12 @@ def open_gfs(zarr_path: str | list[str], public: bool = False) -> xr.DataArray:
23
23
  _log.info("Loading NWP GFS data")
24
24
 
25
25
  # Open data
26
- gfs: xr.Dataset = open_zarr_paths(zarr_path, time_dim="init_time_utc", public=public)
26
+ gfs: xr.Dataset = open_zarr_paths(
27
+ zarr_path,
28
+ time_dim="init_time_utc",
29
+ public=public,
30
+ backend="dask",
31
+ )
27
32
  nwp: xr.DataArray = gfs.to_array(dim="channel")
28
33
 
29
34
  del gfs
@@ -20,7 +20,7 @@ def open_icon_eu(zarr_path: str | list[str]) -> xr.DataArray:
20
20
  Xarray DataArray of the NWP data
21
21
  """
22
22
  # Open and check initially
23
- ds = open_zarr_paths(zarr_path, time_dim="init_time_utc")
23
+ ds = open_zarr_paths(zarr_path, time_dim="init_time_utc", backend="dask")
24
24
 
25
25
  if "icon_eu_data" in ds.data_vars:
26
26
  nwp = ds["icon_eu_data"]
@@ -19,7 +19,7 @@ def open_ukv(zarr_path: str | list[str]) -> xr.DataArray:
19
19
  Returns:
20
20
  Xarray DataArray of the NWP data
21
21
  """
22
- ds = open_zarr_paths(zarr_path)
22
+ ds = open_zarr_paths(zarr_path, backend="tensorstore")
23
23
 
24
24
  ds = ds.rename(
25
25
  {
@@ -1,21 +1,48 @@
1
1
  """Utility functions for the NWP data processing."""
2
2
 
3
+ from glob import glob
4
+
3
5
  import xarray as xr
6
+ from xarray_tensorstore import open_zarr
7
+
8
+ from ocf_data_sampler.load.open_tensorstore_zarrs import open_zarrs
4
9
 
5
10
 
6
11
  def open_zarr_paths(
7
- zarr_path: str | list[str], time_dim: str = "init_time", public: bool = False,
12
+ zarr_path: str | list[str],
13
+ time_dim: str = "init_time",
14
+ public: bool = False,
15
+ backend: str = "dask",
8
16
  ) -> xr.Dataset:
9
17
  """Opens the NWP data.
10
18
 
11
19
  Args:
12
20
  zarr_path: Path to the zarr(s) to open
13
21
  time_dim: Name of the time dimension
14
- public: Whether the data is public or private
22
+ public: Whether the data is public or private. Only available for the dask backend.
23
+ backend: The xarray backend to use.
15
24
 
16
25
  Returns:
17
26
  The opened Xarray Dataset
18
27
  """
28
+ if backend not in ["dask", "tensorstore"]:
29
+ raise ValueError(
30
+ f"Unsupported backend: {backend}. Supported backends are 'dask' and 'tensorstore'.",
31
+ )
32
+
33
+ if public and backend == "tensorstore":
34
+ raise ValueError("Public data is only supported with the 'dask' backend.")
35
+
36
+ if backend == "tensorstore":
37
+ ds = _tensostore_open_zarr_paths(zarr_path, time_dim)
38
+
39
+ elif backend == "dask":
40
+ ds = _dask_open_zarr_paths(zarr_path, time_dim, public)
41
+
42
+ return ds
43
+
44
+
45
+ def _dask_open_zarr_paths(zarr_path: str | list[str], time_dim: str, public: bool) -> xr.Dataset:
19
46
  general_kwargs = {
20
47
  "engine": "zarr",
21
48
  "chunks": "auto",
@@ -26,7 +53,7 @@ def open_zarr_paths(
26
53
  # note this only works for s3 zarr paths at the moment
27
54
  general_kwargs["storage_options"] = {"anon": True}
28
55
 
29
- if type(zarr_path) in [list, tuple] or "*" in str(zarr_path): # Multi-file dataset
56
+ if isinstance(zarr_path, list | tuple) or "*" in str(zarr_path): # Multi-file dataset
30
57
  ds = xr.open_mfdataset(
31
58
  zarr_path,
32
59
  concat_dim=time_dim,
@@ -41,3 +68,16 @@ def open_zarr_paths(
41
68
  **general_kwargs,
42
69
  )
43
70
  return ds
71
+
72
+
73
+ def _tensostore_open_zarr_paths(zarr_path: str | list[str], time_dim: str) -> xr.Dataset:
74
+
75
+ if "*" in str(zarr_path):
76
+ zarr_path = sorted(glob(zarr_path))
77
+
78
+ if isinstance(zarr_path, list | tuple):
79
+ ds = open_zarrs(zarr_path, concat_dim=time_dim).sortby(time_dim)
80
+ else:
81
+ ds = open_zarr(zarr_path)
82
+ return ds
83
+
@@ -0,0 +1,92 @@
1
+ """Open multiple zarrs with TensorStore.
2
+
3
+ This extendds the functionality of xarray_tensorstore to open multiple zarr stores
4
+ """
5
+
6
+ import os
7
+
8
+ import tensorstore as ts
9
+ import xarray as xr
10
+ from xarray_tensorstore import (
11
+ _raise_if_mask_and_scale_used_for_data_vars,
12
+ _TensorStoreAdapter,
13
+ _zarr_spec_from_path,
14
+ )
15
+
16
+
17
+ def tensorstore_open_multi_zarrs(
18
+ paths: list[str],
19
+ data_vars: list[str],
20
+ concat_axes: list[int],
21
+ context: ts.Context,
22
+ write: bool,
23
+ ) -> dict[str, ts.TensorStore]:
24
+ """Open multiple zarrs with TensorStore.
25
+
26
+ Args:
27
+ paths: List of paths to zarr stores.
28
+ data_vars: List of data variable names to open.
29
+ concat_axes: List of axes along which to concatenate the data variables.
30
+ context: TensorStore context.
31
+ write: Whether to open the stores for writing.
32
+ """
33
+ arrays_list = []
34
+ for path in paths:
35
+ specs = {k: _zarr_spec_from_path(os.path.join(path, k)) for k in data_vars}
36
+ array_futures = {
37
+ k: ts.open(spec, read=True, write=write, context=context)
38
+ for k, spec in specs.items()
39
+ }
40
+ arrays_list.append({k: v.result() for k, v in array_futures.items()})
41
+
42
+ arrays = {}
43
+ for k, axis in zip(data_vars, concat_axes, strict=False):
44
+ datasets = [d[k] for d in arrays_list]
45
+ arrays[k] = ts.concat(datasets, axis=axis)
46
+
47
+ return arrays
48
+
49
+
50
+ def open_zarrs(
51
+ paths: list[str],
52
+ concat_dim: str,
53
+ *,
54
+ context: ts.Context | None = None,
55
+ mask_and_scale: bool = True,
56
+ write: bool = False,
57
+ ) -> xr.Dataset:
58
+ """Open multiple zarrs with TensorStore.
59
+
60
+ Args:
61
+ paths: List of paths to zarr stores.
62
+ concat_dim: Dimension along which to concatenate the data variables.
63
+ context: TensorStore context.
64
+ mask_and_scale: Whether to mask and scale the data.
65
+ write: Whether to open the stores for writing.
66
+ """
67
+ if context is None:
68
+ context = ts.Context()
69
+
70
+ ds = xr.open_mfdataset(
71
+ paths,
72
+ concat_dim=concat_dim,
73
+ combine="nested",
74
+ mask_and_scale=mask_and_scale,
75
+ decode_timedelta=True,
76
+ )
77
+
78
+ if mask_and_scale:
79
+ # Data variables get replaced below with _TensorStoreAdapter arrays, which
80
+ # don't get masked or scaled. Raising an error avoids surprising users with
81
+ # incorrect data values.
82
+ _raise_if_mask_and_scale_used_for_data_vars(ds)
83
+
84
+ data_vars = list(ds.data_vars)
85
+
86
+ concat_axes = [ds[v].dims.index(concat_dim) for v in data_vars]
87
+
88
+ arrays = tensorstore_open_multi_zarrs(paths, data_vars, concat_axes, context, write)
89
+
90
+ new_data = {k: _TensorStoreAdapter(v) for k, v in arrays.items()}
91
+
92
+ return ds.copy(data=new_data)
@@ -1,6 +1,7 @@
1
1
  """Satellite loader."""
2
2
  import numpy as np
3
3
  import xarray as xr
4
+ from xarray_tensorstore import open_zarr
4
5
 
5
6
  from ocf_data_sampler.load.utils import (
6
7
  check_time_unique_increasing,
@@ -8,39 +9,7 @@ from ocf_data_sampler.load.utils import (
8
9
  make_spatial_coords_increasing,
9
10
  )
10
11
 
11
-
12
- def get_single_sat_data(zarr_path: str) -> xr.Dataset:
13
- """Helper function to open a zarr from either a local or GCP path.
14
-
15
- Args:
16
- zarr_path: path to a zarr file. Wildcards (*) are supported only for local paths
17
- GCS paths (gs://) do not support wildcards
18
-
19
- Returns:
20
- An xarray Dataset containing satellite data
21
-
22
- Raises:
23
- ValueError: If a wildcard (*) is used in a GCS (gs://) path
24
- """
25
- # Raise an error if a wildcard is used in a GCP path
26
- if "gs://" in str(zarr_path) and "*" in str(zarr_path):
27
- raise ValueError("Wildcard (*) paths are not supported for GCP (gs://) URLs")
28
-
29
- # Handle multi-file dataset for local paths
30
- if "*" in str(zarr_path):
31
- ds = xr.open_mfdataset(
32
- zarr_path,
33
- engine="zarr",
34
- concat_dim="time",
35
- combine="nested",
36
- chunks="auto",
37
- join="override",
38
- )
39
- check_time_unique_increasing(ds.time)
40
- else:
41
- ds = xr.open_dataset(zarr_path, engine="zarr", chunks="auto")
42
-
43
- return ds
12
+ from .open_tensorstore_zarrs import open_zarrs
44
13
 
45
14
 
46
15
  def open_sat_data(zarr_path: str | list[str]) -> xr.DataArray:
@@ -52,14 +21,11 @@ def open_sat_data(zarr_path: str | list[str]) -> xr.DataArray:
52
21
  """
53
22
  # Open the data
54
23
  if isinstance(zarr_path, list | tuple):
55
- ds = xr.combine_nested(
56
- [get_single_sat_data(path) for path in zarr_path],
57
- concat_dim="time",
58
- combine_attrs="override",
59
- join="override",
60
- )
24
+ ds = open_zarrs(zarr_path, concat_dim="time")
61
25
  else:
62
- ds = get_single_sat_data(zarr_path)
26
+ ds = open_zarr(zarr_path)
27
+
28
+ check_time_unique_increasing(ds.time)
63
29
 
64
30
  ds = ds.rename(
65
31
  {
@@ -47,7 +47,7 @@ def get_xr_data_array_from_xr_dataset(ds: xr.Dataset) -> xr.DataArray:
47
47
  Args:
48
48
  ds: xr.Dataset to extract xr.DataArray from
49
49
  """
50
- datavars = list(ds.var())
50
+ datavars = list(ds.data_vars)
51
51
  if len(datavars) != 1:
52
52
  raise ValueError("Cannot open as xr.DataArray: dataset contains multiple variables")
53
53
  return ds[datavars[0]]
@@ -242,6 +242,11 @@ def intersection_of_2_dataframes_of_periods(a: pd.DataFrame, b: pd.DataFrame) ->
242
242
  if a.empty or b.empty:
243
243
  return pd.DataFrame(columns=["start_dt", "end_dt"])
244
244
 
245
+ # Maybe switch these for efficiency in the next section. We will do the native python loop over
246
+ # the shorter dataframe
247
+ if len(a) > len(b):
248
+ a, b = b, a
249
+
245
250
  all_intersecting_periods = []
246
251
  for a_period in a.itertuples():
247
252
  # Five ways in which two periods may overlap:
@@ -250,12 +255,12 @@ def intersection_of_2_dataframes_of_periods(a: pd.DataFrame, b: pd.DataFrame) ->
250
255
  # In all five, `a` must always start before (or equal to) where `b` ends,
251
256
  # and `a` must always end after (or equal to) where `b` starts.
252
257
 
253
- overlapping_periods = b[(a_period.start_dt <= b.end_dt) & (a_period.end_dt >= b.start_dt)]
254
-
255
258
  # There are two ways in which two periods may *not* overlap:
256
259
  # a: |---| or |---|
257
260
  # b: |---| |---|
258
- # `overlapping` will not include periods which do *not* overlap.
261
+ # `overlapping_periods` will not include periods which do *not* overlap.
262
+
263
+ overlapping_periods = b[(a_period.start_dt <= b.end_dt) & (a_period.end_dt >= b.start_dt)]
259
264
 
260
265
  # Now find the intersection of each period in `overlapping_periods` with
261
266
  # the period from `a` that starts at `a_start_dt` and ends at `a_end_dt`.
@@ -269,5 +274,6 @@ def intersection_of_2_dataframes_of_periods(a: pd.DataFrame, b: pd.DataFrame) ->
269
274
 
270
275
  all_intersecting_periods.append(intersection)
271
276
 
277
+
272
278
  all_intersecting_periods = pd.concat(all_intersecting_periods)
273
279
  return all_intersecting_periods.sort_values(by="start_dt").reset_index(drop=True)
@@ -270,6 +270,8 @@ class PVNetUKRegionalDataset(AbstractPVNetUKDataset):
270
270
  def __getitem__(self, idx: int) -> NumpySample:
271
271
  # Get the coordinates of the sample
272
272
 
273
+ idx = int(idx)
274
+
273
275
  if idx >= len(self):
274
276
  raise ValueError(f"Index {idx} out of range for dataset of length {len(self)}")
275
277
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ocf-data-sampler
3
- Version: 0.2.37
3
+ Version: 0.3.0
4
4
  Author: James Fulton, Peter Dudfield
5
5
  Author-email: Open Climate Fix team <info@openclimatefix.org>
6
6
  License: MIT License
@@ -44,6 +44,7 @@ Requires-Dist: pyproj
44
44
  Requires-Dist: pyaml_env
45
45
  Requires-Dist: pyresample
46
46
  Requires-Dist: h5netcdf
47
+ Requires-Dist: xarray-tensorstore==0.1.5
47
48
 
48
49
  # ocf-data-sampler
49
50
 
@@ -8,19 +8,20 @@ ocf_data_sampler/data/uk_gsp_locations_20220314.csv,sha256=RSh7DRh55E3n8lVAaWXGT
8
8
  ocf_data_sampler/data/uk_gsp_locations_20250109.csv,sha256=XZISFatnbpO9j8LwaxNKFzQSjs6hcHFsV8a9uDDpy2E,9055334
9
9
  ocf_data_sampler/load/__init__.py,sha256=-vQP9g0UOWdVbjEGyVX_ipa7R1btmiETIKAf6aw4d78,201
10
10
  ocf_data_sampler/load/gsp.py,sha256=IrTA6z9quN08imKGHJLf8gRktarxn1-utNMNFD0zWQs,2944
11
- ocf_data_sampler/load/load_dataset.py,sha256=WjB3DvHbDQQYYnPmDFOWg_TQPgARZ5pu8fiRZSGtIg0,2099
12
- ocf_data_sampler/load/satellite.py,sha256=Gsc3oyPydEZLy6slUDtIpBCYLxWy9P3pD1VyI4W9-2w,2944
11
+ ocf_data_sampler/load/load_dataset.py,sha256=K8rWykjII-3g127If7WRRFivzHNx3SshCvZj4uQlf28,2089
12
+ ocf_data_sampler/load/open_tensorstore_zarrs.py,sha256=_RHWe0GmrBSA9s1TH5I9VCMPpeZEsuRuhDt5Vyyx5Fo,2725
13
+ ocf_data_sampler/load/satellite.py,sha256=RylkJz8avxdM5pK_liaTlD1DTboyPMgykXJ4_Ek9WBA,1840
13
14
  ocf_data_sampler/load/site.py,sha256=WtOy20VMHJIY0IwEemCdcecSDUGcVaLUown-4ixJw90,2147
14
- ocf_data_sampler/load/utils.py,sha256=sZ0-zzconcLkVQwAkCYrqKDo98Hrh5ChdiQJv5Bh91g,2040
15
+ ocf_data_sampler/load/utils.py,sha256=AGL0aOOQPrgqNBTjlBtR7Qg1PyQov3DFJo-y198u8pY,2044
15
16
  ocf_data_sampler/load/nwp/__init__.py,sha256=SmcrnbygO5xtCKmGR4wtHrj-HI7nOAvnAtfuvRufBGQ,25
16
17
  ocf_data_sampler/load/nwp/nwp.py,sha256=0E9shei3Mq1N7F-fBlEKY5Hm0_kI7ysY_rffnWIshvk,3612
17
18
  ocf_data_sampler/load/nwp/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- ocf_data_sampler/load/nwp/providers/cloudcasting.py,sha256=fozXpB3a2rNqQgnpRDC7xunxffh7Wwmc0kkCiYmDVJ4,1521
19
- ocf_data_sampler/load/nwp/providers/ecmwf.py,sha256=an-gXsZwkPQvRXeza1U_4MNU5yEnVm0_8tn03rxTudI,997
20
- ocf_data_sampler/load/nwp/providers/gfs.py,sha256=anNSITZ8MSCMnJhGdwUUJ1Ed-ArGKeqJ6HB6eJ814c8,1011
21
- ocf_data_sampler/load/nwp/providers/icon.py,sha256=BnY3vAa5pHn1cyrImj0ymaRFKHanNtfD9_JO-4p2IZY,1241
22
- ocf_data_sampler/load/nwp/providers/ukv.py,sha256=Ka1KFZcJYPwr5vuxo-xWGVQC0pudheqGBonUnbyJCMg,1016
23
- ocf_data_sampler/load/nwp/providers/utils.py,sha256=NrzE3JAtoc6oEywJHxPUdi_I4UJgJ_l5dxLZ4DLKvcg,1124
19
+ ocf_data_sampler/load/nwp/providers/cloudcasting.py,sha256=EbhJO1QNtTtumuyh3I7HQa7xZ50WeD-SQe7AsMxZC84,1544
20
+ ocf_data_sampler/load/nwp/providers/ecmwf.py,sha256=P7JqfssmQq8eHKKXaBexsxts325AnK-JF3bl5ymVCjY,1020
21
+ ocf_data_sampler/load/nwp/providers/gfs.py,sha256=h6vm-Rfz1JGOE4P_fP1_XQJ3bugNbeNAIyt56N8B1Dc,1066
22
+ ocf_data_sampler/load/nwp/providers/icon.py,sha256=iVZwLKRr_D74_kAu5MHir6pRKEfbTmIxFRZAxzmiYdI,1257
23
+ ocf_data_sampler/load/nwp/providers/ukv.py,sha256=2i32VM9gnmWUpbL0qBSp_AKzuyKucXZPS8yklbcGlbc,1039
24
+ ocf_data_sampler/load/nwp/providers/utils.py,sha256=cVwCiC8FqNpkZFSUGv1CRqIQlKdjx1sIsb2SIUlvWV8,2333
24
25
  ocf_data_sampler/numpy_sample/__init__.py,sha256=nY5C6CcuxiWZ_jrXRzWtN7WyKXhJImSiVTIG6Rz4B_4,401
25
26
  ocf_data_sampler/numpy_sample/collate.py,sha256=hoxIc5SoHoIs3Nx37aRZzWChpswjy9lHUgaKgHIoo80,2039
26
27
  ocf_data_sampler/numpy_sample/common_types.py,sha256=9CjYHkUTx0ObduWh43fhsybZCTXvexql7qC2ptMDoek,377
@@ -33,13 +34,13 @@ ocf_data_sampler/numpy_sample/sun_position.py,sha256=5tt-zNm6aRuZMsxZPaAxyg7Heik
33
34
  ocf_data_sampler/select/__init__.py,sha256=mK7Wu_-j9IXGTYrOuDf5yDDuU5a306b0iGKTAooNg_s,210
34
35
  ocf_data_sampler/select/dropout.py,sha256=9gPyDF7bGmvSoMjMPu1j0gTZFHNFqsT3ToIo9mFNA00,1565
35
36
  ocf_data_sampler/select/fill_time_periods.py,sha256=TlGxp1xiAqnhdWfLy0pv3FuZc00dtimjWdLzr4JoTGA,865
36
- ocf_data_sampler/select/find_contiguous_time_periods.py,sha256=8lkWsV5i7iLCVGqQ-PGZbvWxsz3wBvLO70GSf6WeR0k,11363
37
+ ocf_data_sampler/select/find_contiguous_time_periods.py,sha256=etkr6LuB7zxkfzWJ6SgHiULdRuFzFlq5bOUNd257Qx4,11545
37
38
  ocf_data_sampler/select/geospatial.py,sha256=CDExkl36eZOKmdJPzUr_K0Wn3axHqv5nYo-EkSiINcc,5032
38
39
  ocf_data_sampler/select/location.py,sha256=AZvGR8y62opiW7zACGXjoOtBEWRfSLOZIA73O5Deu0c,1037
39
40
  ocf_data_sampler/select/select_spatial_slice.py,sha256=Hd4jGRUfIZRoWCirOQZeoLpaUnStB6KyFSTPX69wZLw,8790
40
41
  ocf_data_sampler/select/select_time_slice.py,sha256=HeHbwZ0CP03x0-LaJtpbSdtpLufwVTR73p6wH6O_PS8,5513
41
42
  ocf_data_sampler/torch_datasets/datasets/__init__.py,sha256=jfJSFcR0eO1AqeH7S3KnGjsBqVZT5w3oyi784PUR6Q0,146
42
- ocf_data_sampler/torch_datasets/datasets/pvnet_uk.py,sha256=GuwDB27Ja0o8OwLt0-7KzZBxg5AU1uIDjvKpLBUI90I,12233
43
+ ocf_data_sampler/torch_datasets/datasets/pvnet_uk.py,sha256=v63goKEMI6UgBPnQCnIbxhFFdwuP_sxgcPYY6iNfGkc,12257
43
44
  ocf_data_sampler/torch_datasets/datasets/site.py,sha256=R9sYZz3e1zr8NAtlYQp8_DgI3wIfC7Zvaeo_73TyiA8,24936
44
45
  ocf_data_sampler/torch_datasets/sample/__init__.py,sha256=GL84vdZl_SjHDGVyh9Uekx2XhPYuZ0dnO3l6f6KXnHI,100
45
46
  ocf_data_sampler/torch_datasets/sample/base.py,sha256=cQ1oIyhdmlotejZK8B3Cw6MNvpdnBPD8G_o2h7Ye4Vc,2206
@@ -55,7 +56,7 @@ ocf_data_sampler/torch_datasets/utils/validation_utils.py,sha256=YqmT-lExWlI8_ul
55
56
  scripts/download_gsp_location_data.py,sha256=rRDXMoqX-RYY4jPdxhdlxJGhWdl6r245F5UARgKV6P4,3121
56
57
  scripts/refactor_site.py,sha256=skzvsPP0Cn9yTKndzkilyNcGz4DZ88ctvCJ0XrBdc2A,3135
57
58
  utils/compute_icon_mean_stddev.py,sha256=a1oWMRMnny39rV-dvu8rcx85sb4bXzPFrR1gkUr4Jpg,2296
58
- ocf_data_sampler-0.2.37.dist-info/METADATA,sha256=KejDSbMqRFuGL-XF8CVoM4QFqzgCbzzTEDrkY8C3VJc,12184
59
- ocf_data_sampler-0.2.37.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
60
- ocf_data_sampler-0.2.37.dist-info/top_level.txt,sha256=LEFU4Uk-PEo72QGLAfnVZIUEm37Q8mKuMeg_Xk-p33g,31
61
- ocf_data_sampler-0.2.37.dist-info/RECORD,,
59
+ ocf_data_sampler-0.3.0.dist-info/METADATA,sha256=Kq7LhwcpxOpfu4S4NOq-JHFJYI7eeeuxPleNPx6UMLE,12224
60
+ ocf_data_sampler-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
+ ocf_data_sampler-0.3.0.dist-info/top_level.txt,sha256=LEFU4Uk-PEo72QGLAfnVZIUEm37Q8mKuMeg_Xk-p33g,31
62
+ ocf_data_sampler-0.3.0.dist-info/RECORD,,