ocf-data-sampler 0.5.24__py3-none-any.whl → 0.5.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ocf-data-sampler might be problematic. Click here for more details.

@@ -75,7 +75,7 @@ def _tensostore_open_zarr_paths(zarr_path: str | list[str], time_dim: str) -> xr
75
75
  zarr_path = sorted(glob(zarr_path))
76
76
 
77
77
  if isinstance(zarr_path, list | tuple):
78
- ds = open_zarrs(zarr_path, concat_dim=time_dim).sortby(time_dim)
78
+ ds = open_zarrs(zarr_path, concat_dim=time_dim, data_source="nwp").sortby(time_dim)
79
79
  else:
80
80
  ds = open_zarr(zarr_path)
81
81
  return ds
@@ -14,6 +14,7 @@ References:
14
14
  [2] https://www.apache.org/licenses/LICENSE-2.0
15
15
  """
16
16
 
17
+ import logging
17
18
  import os.path
18
19
  import re
19
20
 
@@ -26,6 +27,7 @@ from xarray_tensorstore import (
26
27
  _TensorStoreAdapter,
27
28
  )
28
29
 
30
+ logger = logging.getLogger(__name__)
29
31
 
30
32
  def _zarr_spec_from_path(path: str, zarr_format: int) -> ...:
31
33
  if re.match(r"\w+\://", path): # path is a URI
@@ -127,6 +129,7 @@ def open_zarrs(
127
129
  concat_dim: str,
128
130
  context: ts.Context | None = None,
129
131
  mask_and_scale: bool = True,
132
+ data_source: str = "unknown",
130
133
  ) -> xr.Dataset:
131
134
  """Open multiple zarrs with TensorStore.
132
135
 
@@ -135,6 +138,7 @@ def open_zarrs(
135
138
  concat_dim: Dimension along which to concatenate the data variables.
136
139
  context: TensorStore context.
137
140
  mask_and_scale: Whether to mask and scale the data.
141
+ data_source: Which data source is being opened. Used for warning context.
138
142
 
139
143
  Returns:
140
144
  Concatenated Dataset with all data variables opened via TensorStore.
@@ -143,13 +147,28 @@ def open_zarrs(
143
147
  context = ts.Context()
144
148
 
145
149
  ds_list = [xr.open_zarr(p, mask_and_scale=mask_and_scale, decode_timedelta=True) for p in paths]
146
- ds = xr.concat(
147
- ds_list,
148
- dim=concat_dim,
149
- data_vars="minimal",
150
- compat="equals",
151
- combine_attrs="drop_conflicts",
152
- )
150
+ try:
151
+ ds = xr.concat(
152
+ ds_list,
153
+ dim=concat_dim,
154
+ data_vars="minimal",
155
+ compat="equals",
156
+ combine_attrs="drop_conflicts",
157
+ join="exact",
158
+ )
159
+ except ValueError:
160
+ logger.warning(f"Coordinate mismatch found in {data_source} input data. "
161
+ f"The coordinates will be overwritten! "
162
+ f"This might be fine for satellite data. "
163
+ f"Proceed with caution.")
164
+ ds = xr.concat(
165
+ ds_list,
166
+ dim=concat_dim,
167
+ data_vars="minimal",
168
+ compat="equals",
169
+ combine_attrs="drop_conflicts",
170
+ join="override",
171
+ )
153
172
 
154
173
  if mask_and_scale:
155
174
  _raise_if_mask_and_scale_used_for_data_vars(ds)
@@ -19,7 +19,7 @@ def open_sat_data(zarr_path: str | list[str]) -> xr.DataArray:
19
19
  """
20
20
  # Open the data
21
21
  if isinstance(zarr_path, list | tuple):
22
- ds = open_zarrs(zarr_path, concat_dim="time")
22
+ ds = open_zarrs(zarr_path, concat_dim="time", data_source="satellite")
23
23
  else:
24
24
  ds = open_zarr(zarr_path)
25
25
 
@@ -31,9 +31,10 @@ def open_site(generation_file_path: str, metadata_file_path: str) -> xr.DataArra
31
31
  capacity_kwp=("site_id", metadata_df["capacity_kwp"].values),
32
32
  )
33
33
 
34
- # Sanity checks
35
- if not np.isfinite(generation_ds.generation_kw.values).all():
36
- raise ValueError("generation_kw contains non-finite values")
34
+ # Sanity checks, to prevent inf or negative values
35
+ # Note NaNs are allowed in generation_kw as can have non overlapping time periods for sites
36
+ if np.isinf(generation_ds.generation_kw.values).all():
37
+ raise ValueError("generation_kw contains infinite (+/- inf) values")
37
38
  if not (generation_ds.capacity_kwp.values > 0).all():
38
39
  raise ValueError("capacity_kwp contains non-positive values")
39
40
 
@@ -43,17 +44,21 @@ def open_site(generation_file_path: str, metadata_file_path: str) -> xr.DataArra
43
44
  if not np.issubdtype(site_da.dtype, np.floating):
44
45
  raise TypeError(f"Generation data should be float, not {site_da.dtype}")
45
46
 
47
+
46
48
  coord_dtypes = {
47
- "time_utc": np.datetime64,
48
- "site_id": np.integer,
49
- "capacity_kwp": np.floating,
50
- "latitude": np.floating,
51
- "longitude": np.floating,
52
- }
53
-
54
- for coord, expected_dtype in coord_dtypes.items():
55
- if not np.issubdtype(site_da.coords[coord].dtype, expected_dtype):
49
+ "time_utc": (np.datetime64,),
50
+ "site_id": (np.integer,),
51
+ "capacity_kwp": (np.integer, np.floating),
52
+ "latitude": (np.floating,),
53
+ "longitude": (np.floating,),
54
+ }
55
+ for coord, expected_dtypes in coord_dtypes.items():
56
+ if not any(np.issubdtype(site_da.coords[coord].dtype, dt) for dt in expected_dtypes):
56
57
  dtype = site_da.coords[coord].dtype
57
- raise TypeError(f"{coord} should be {expected_dtype.__name__}, not {dtype}")
58
+ allowed = ", ".join(dt.__name__ for dt in expected_dtypes)
59
+ raise TypeError(f"{coord} should be one of ({allowed}), not {dtype}")
58
60
 
59
- return site_da
61
+ # Load the data eagerly into memory by calling compute
62
+ # this makes the dataset faster to sample from, but
63
+ # at the cost of a little extra memory usage
64
+ return site_da.compute()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ocf-data-sampler
3
- Version: 0.5.24
3
+ Version: 0.5.26
4
4
  Author: James Fulton, Peter Dudfield
5
5
  Author-email: Open Climate Fix team <info@openclimatefix.org>
6
6
  License: MIT License
@@ -9,9 +9,9 @@ ocf_data_sampler/data/uk_gsp_locations_20250109.csv,sha256=XZISFatnbpO9j8LwaxNKF
9
9
  ocf_data_sampler/load/__init__.py,sha256=-vQP9g0UOWdVbjEGyVX_ipa7R1btmiETIKAf6aw4d78,201
10
10
  ocf_data_sampler/load/gsp.py,sha256=zsQ39dZBS45qd86lGfCZUjheLRTtMzIUozj-j8c87UQ,3058
11
11
  ocf_data_sampler/load/load_dataset.py,sha256=K8rWykjII-3g127If7WRRFivzHNx3SshCvZj4uQlf28,2089
12
- ocf_data_sampler/load/open_xarray_tensorstore.py,sha256=kAqlIavGe1dcCPkzAtoZo2dFS-tW36E-wRE_3w1HMfg,5620
13
- ocf_data_sampler/load/satellite.py,sha256=B-m0_Py_D0GwzwX5o-ixyeXntV5Z4k4MbmMBHZLUWMM,1831
14
- ocf_data_sampler/load/site.py,sha256=WtOy20VMHJIY0IwEemCdcecSDUGcVaLUown-4ixJw90,2147
12
+ ocf_data_sampler/load/open_xarray_tensorstore.py,sha256=YglCBeKa4mSjUU5qlcMOLZXUtFrPFWVKDeKHLjs_YbA,6353
13
+ ocf_data_sampler/load/satellite.py,sha256=5o5SfcplQfZFlm3JJq73j8_m_cWKpFtKk0tTKGjjCuE,1856
14
+ ocf_data_sampler/load/site.py,sha256=bpFABjpvlstn6yJ6OPVlPZms-CjJdxNwCkQafpnj0Ik,2539
15
15
  ocf_data_sampler/load/utils.py,sha256=AGL0aOOQPrgqNBTjlBtR7Qg1PyQov3DFJo-y198u8pY,2044
16
16
  ocf_data_sampler/load/nwp/__init__.py,sha256=SmcrnbygO5xtCKmGR4wtHrj-HI7nOAvnAtfuvRufBGQ,25
17
17
  ocf_data_sampler/load/nwp/nwp.py,sha256=0E9shei3Mq1N7F-fBlEKY5Hm0_kI7ysY_rffnWIshvk,3612
@@ -21,7 +21,7 @@ ocf_data_sampler/load/nwp/providers/ecmwf.py,sha256=P7JqfssmQq8eHKKXaBexsxts325A
21
21
  ocf_data_sampler/load/nwp/providers/gfs.py,sha256=h6vm-Rfz1JGOE4P_fP1_XQJ3bugNbeNAIyt56N8B1Dc,1066
22
22
  ocf_data_sampler/load/nwp/providers/icon.py,sha256=iVZwLKRr_D74_kAu5MHir6pRKEfbTmIxFRZAxzmiYdI,1257
23
23
  ocf_data_sampler/load/nwp/providers/ukv.py,sha256=2i32VM9gnmWUpbL0qBSp_AKzuyKucXZPS8yklbcGlbc,1039
24
- ocf_data_sampler/load/nwp/providers/utils.py,sha256=IjJ3w7zDgXNFaVa4TMk8yVCvdzfrIRu5tn1OaaQ7Zso,2304
24
+ ocf_data_sampler/load/nwp/providers/utils.py,sha256=Pyp1U3kUpUoqv6ArEtYBfEfzjO2JGQFlUOL-ndsEmlY,2323
25
25
  ocf_data_sampler/numpy_sample/__init__.py,sha256=5bdpzM8hMAEe0XRSZ9AZFQdqEeBsEPhaF79Y8bDx3GQ,407
26
26
  ocf_data_sampler/numpy_sample/collate.py,sha256=hoxIc5SoHoIs3Nx37aRZzWChpswjy9lHUgaKgHIoo80,2039
27
27
  ocf_data_sampler/numpy_sample/common_types.py,sha256=9CjYHkUTx0ObduWh43fhsybZCTXvexql7qC2ptMDoek,377
@@ -59,7 +59,7 @@ ocf_data_sampler/torch_datasets/utils/valid_time_periods.py,sha256=xcy75cVxl0Wrg
59
59
  ocf_data_sampler/torch_datasets/utils/validation_utils.py,sha256=YqmT-lExWlI8_ul3l0EP73Ik002fStr_bhsZh9mQqEU,4735
60
60
  scripts/download_gsp_location_data.py,sha256=rRDXMoqX-RYY4jPdxhdlxJGhWdl6r245F5UARgKV6P4,3121
61
61
  scripts/refactor_site.py,sha256=skzvsPP0Cn9yTKndzkilyNcGz4DZ88ctvCJ0XrBdc2A,3135
62
- ocf_data_sampler-0.5.24.dist-info/METADATA,sha256=r5w9nbLOb6vB8rIALgrp02EY7HM6KJ43hE0xp3xLGEE,13541
63
- ocf_data_sampler-0.5.24.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
- ocf_data_sampler-0.5.24.dist-info/top_level.txt,sha256=deUxqmsONNAGZDNbsntbXH7BRA1MqWaUeAJrCo6q_xA,25
65
- ocf_data_sampler-0.5.24.dist-info/RECORD,,
62
+ ocf_data_sampler-0.5.26.dist-info/METADATA,sha256=NQUuT_DW3MI5F3ZFyupsHs3nfTLLU2Fo2PYzn9PxNqM,13541
63
+ ocf_data_sampler-0.5.26.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
+ ocf_data_sampler-0.5.26.dist-info/top_level.txt,sha256=deUxqmsONNAGZDNbsntbXH7BRA1MqWaUeAJrCo6q_xA,25
65
+ ocf_data_sampler-0.5.26.dist-info/RECORD,,