h5netcdf 1.7.1__tar.gz → 1.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of h5netcdf might be problematic. Click here for more details.
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/CHANGELOG.rst +11 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/PKG-INFO +1 -1
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/_version.py +3 -3
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/core.py +29 -26
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/dimensions.py +15 -9
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/tests/test_h5netcdf.py +36 -2
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf.egg-info/PKG-INFO +1 -1
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/.pre-commit-config.yaml +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/AUTHORS.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/LICENSE +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/README.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/Makefile +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/api.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/changelog.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/conf.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/devguide.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/feature.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/index.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/doc/legacyapi.rst +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/__init__.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/attrs.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/legacyapi.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/tests/conftest.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/tests/pytest.ini +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf/utils.py +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf.egg-info/SOURCES.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf.egg-info/dependency_links.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf.egg-info/requires.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/h5netcdf.egg-info/top_level.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/licenses/H5PY_LICENSE.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/licenses/PSF_LICENSE.txt +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/pyproject.toml +0 -0
- {h5netcdf-1.7.1 → h5netcdf-1.7.3}/setup.cfg +0 -0
|
@@ -1,6 +1,17 @@
|
|
|
1
1
|
Change Log
|
|
2
2
|
----------
|
|
3
3
|
|
|
4
|
+
Version 1.7.3 (October 21st, 2025):
|
|
5
|
+
|
|
6
|
+
- Fix indexing with empty arrays and lists (:pull:``).
|
|
7
|
+
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
|
|
8
|
+
|
|
9
|
+
Version 1.7.2 (October 17th, 2025):
|
|
10
|
+
|
|
11
|
+
- Fix regression where format was requested from group instance instead of _root. Simplify logic to check and raise ``CompatibilityError``. (:issue:`293`, :pull:`294`).
|
|
12
|
+
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
|
|
13
|
+
|
|
14
|
+
|
|
4
15
|
Version 1.7.1 (October 16th, 2025):
|
|
5
16
|
|
|
6
17
|
- Fix regression where attributes with list of strings were written with h5py low-level API instead of high-level API (:issue:`291`, :pull:`292`).
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '1.7.
|
|
32
|
-
__version_tuple__ = version_tuple = (1, 7,
|
|
31
|
+
__version__ = version = '1.7.3'
|
|
32
|
+
__version_tuple__ = version_tuple = (1, 7, 3)
|
|
33
33
|
|
|
34
|
-
__commit_id__ = commit_id = '
|
|
34
|
+
__commit_id__ = commit_id = 'g6bd08d40a'
|
|
@@ -12,7 +12,7 @@ from packaging import version
|
|
|
12
12
|
|
|
13
13
|
from . import __version__
|
|
14
14
|
from .attrs import Attributes
|
|
15
|
-
from .dimensions import Dimension, Dimensions
|
|
15
|
+
from .dimensions import Dimension, Dimensions, _check_classic_unlimited
|
|
16
16
|
from .utils import (
|
|
17
17
|
CompatibilityError,
|
|
18
18
|
Frozen,
|
|
@@ -55,20 +55,13 @@ def _invalid_netcdf_feature(feature, allow):
|
|
|
55
55
|
|
|
56
56
|
def _transform_1d_boolean_indexers(key):
|
|
57
57
|
"""Find and transform 1D boolean indexers to int"""
|
|
58
|
-
#
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
)
|
|
66
|
-
for k in key
|
|
67
|
-
]
|
|
68
|
-
except TypeError:
|
|
69
|
-
return key
|
|
70
|
-
|
|
71
|
-
return tuple(key)
|
|
58
|
+
# Convert 1D boolean arrays/lists to integer indices,
|
|
59
|
+
# leaving all other types unchanged
|
|
60
|
+
return tuple(
|
|
61
|
+
np.flatnonzero(arr) if arr.dtype == bool else k
|
|
62
|
+
for k in key
|
|
63
|
+
for arr in [np.asanyarray(k)]
|
|
64
|
+
)
|
|
72
65
|
|
|
73
66
|
|
|
74
67
|
def _expanded_indexer(key, ndim):
|
|
@@ -526,13 +519,27 @@ class BaseVariable(BaseObject):
|
|
|
526
519
|
h5ds_shape = self._h5ds.shape
|
|
527
520
|
shape = self.shape
|
|
528
521
|
|
|
529
|
-
# check for ndarray and list
|
|
530
522
|
# see https://github.com/pydata/xarray/issues/7154
|
|
523
|
+
# see https://github.com/pydata/xarray/issues/10867
|
|
531
524
|
# first get maximum index
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
525
|
+
def _get_max_index(k):
|
|
526
|
+
# Return the maximum index for ndarray, list, slice, or int,
|
|
527
|
+
# handling empty arrays/lists safely
|
|
528
|
+
if isinstance(k, np.ndarray):
|
|
529
|
+
if k.size == 0:
|
|
530
|
+
return None
|
|
531
|
+
return k.max() + 1
|
|
532
|
+
elif isinstance(k, list):
|
|
533
|
+
return max(k) + 1 if k else None
|
|
534
|
+
elif isinstance(k, slice):
|
|
535
|
+
return k.stop
|
|
536
|
+
elif isinstance(k, int):
|
|
537
|
+
return k + 1
|
|
538
|
+
else:
|
|
539
|
+
return None
|
|
540
|
+
|
|
541
|
+
max_index = [_get_max_index(k) for k in key0]
|
|
542
|
+
|
|
536
543
|
# second convert to max shape
|
|
537
544
|
# we take the minimum of shape vs max_index to not return
|
|
538
545
|
# slices larger than expected data
|
|
@@ -1030,12 +1037,8 @@ class Group(Mapping):
|
|
|
1030
1037
|
|
|
1031
1038
|
@dimensions.setter
|
|
1032
1039
|
def dimensions(self, value):
|
|
1033
|
-
if self._format == "NETCDF4_CLASSIC":
|
|
1034
|
-
|
|
1035
|
-
if len(unlimited_dims) > 1:
|
|
1036
|
-
raise CompatibilityError(
|
|
1037
|
-
"NETCDF4_CLASSIC format only allows one unlimited dimension."
|
|
1038
|
-
)
|
|
1040
|
+
if self._root._format == "NETCDF4_CLASSIC":
|
|
1041
|
+
_check_classic_unlimited(value)
|
|
1039
1042
|
|
|
1040
1043
|
for k, v in self._all_dimensions.maps[0].items():
|
|
1041
1044
|
if k in value:
|
|
@@ -1,12 +1,24 @@
|
|
|
1
1
|
import weakref
|
|
2
2
|
from collections import OrderedDict
|
|
3
|
-
from collections.abc import MutableMapping
|
|
3
|
+
from collections.abc import Mapping, MutableMapping
|
|
4
4
|
|
|
5
5
|
import numpy as np
|
|
6
6
|
|
|
7
7
|
from .utils import CompatibilityError
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
def _check_classic_unlimited(value, unlimited=None):
|
|
11
|
+
if isinstance(value, Mapping):
|
|
12
|
+
multiple_unlimited_dimensions = sum(v in (None, 0) for v in value.values()) > 1
|
|
13
|
+
else:
|
|
14
|
+
multiple_unlimited_dimensions = unlimited and value in (None, 0)
|
|
15
|
+
|
|
16
|
+
if multiple_unlimited_dimensions:
|
|
17
|
+
raise CompatibilityError(
|
|
18
|
+
"Only one unlimited dimension allowed in the NETCDF4_CLASSIC format."
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
10
22
|
class Dimensions(MutableMapping):
|
|
11
23
|
def __init__(self, group):
|
|
12
24
|
self._group_ref = weakref.ref(group)
|
|
@@ -25,14 +37,8 @@ class Dimensions(MutableMapping):
|
|
|
25
37
|
raise RuntimeError("H5NetCDF: Write to read only")
|
|
26
38
|
if name in self._objects:
|
|
27
39
|
raise ValueError(f"dimension {name!r} already exists")
|
|
28
|
-
if
|
|
29
|
-
size
|
|
30
|
-
and self._unlimited()
|
|
31
|
-
and self._group._format == "NETCDF4_CLASSIC"
|
|
32
|
-
):
|
|
33
|
-
raise CompatibilityError(
|
|
34
|
-
"Only one unlimited dimension allowed in the NETCDF4_CLASSIC format."
|
|
35
|
-
)
|
|
40
|
+
if self._group._root._format == "NETCDF4_CLASSIC":
|
|
41
|
+
_check_classic_unlimited(size, self._unlimited())
|
|
36
42
|
|
|
37
43
|
self._objects[name] = Dimension(self._group, name, size, create_h5ds=True)
|
|
38
44
|
|
|
@@ -211,7 +211,7 @@ def write_h5netcdf(tmp_netcdf, compression="gzip", format="NETCDF4"):
|
|
|
211
211
|
if ds.data_model == "NETCDF4_CLASSIC":
|
|
212
212
|
with raises(
|
|
213
213
|
CompatibilityError,
|
|
214
|
-
match="
|
|
214
|
+
match="Only one unlimited dimension allowed",
|
|
215
215
|
):
|
|
216
216
|
ds.dimensions = {"x": 4, "y": 5, "z": 6, "unlimited": None, "empty": 0}
|
|
217
217
|
|
|
@@ -1973,6 +1973,13 @@ def test_fancy_indexing(tmp_local_or_remote_netcdf):
|
|
|
1973
1973
|
np.testing.assert_array_equal(ds["hello"][[4, 5, 6], 1], [41, 0, 0])
|
|
1974
1974
|
np.testing.assert_array_equal(ds["hello"][slice(4, 7), 1], [41, 0, 0])
|
|
1975
1975
|
|
|
1976
|
+
# test empty slices
|
|
1977
|
+
# regression test for https://github.com/pydata/xarray/pull/10870
|
|
1978
|
+
empty = np.empty(0, dtype="int64")
|
|
1979
|
+
np.testing.assert_array_equal(ds["hello"][1, []], empty)
|
|
1980
|
+
np.testing.assert_array_equal(ds["hello"][1, np.array([], dtype="int")], empty)
|
|
1981
|
+
np.testing.assert_array_equal(ds["hello"][1, slice(0, 0)], empty)
|
|
1982
|
+
|
|
1976
1983
|
|
|
1977
1984
|
def test_h5py_chunking(tmp_local_netcdf):
|
|
1978
1985
|
with h5netcdf.File(tmp_local_netcdf, "w") as ds:
|
|
@@ -2903,7 +2910,7 @@ def test_raise_on_closed_file(tmp_local_netcdf):
|
|
|
2903
2910
|
v = f.create_variable("hello", ("x",), float)
|
|
2904
2911
|
v[:] = np.ones(5)
|
|
2905
2912
|
f.close()
|
|
2906
|
-
with
|
|
2913
|
+
with raises(
|
|
2907
2914
|
ValueError,
|
|
2908
2915
|
match=f"I/O operation on <Closed h5netcdf.File>: '{tmp_local_netcdf}'",
|
|
2909
2916
|
):
|
|
@@ -3013,3 +3020,30 @@ def test_attributes_list(tmp_local_netcdf, attr):
|
|
|
3013
3020
|
assert hf.attrs["foo"][0] == attr[0]
|
|
3014
3021
|
assert hf.attrs["foo"][1] == attr[1]
|
|
3015
3022
|
assert isinstance(hf.attrs["foo"], list)
|
|
3023
|
+
|
|
3024
|
+
|
|
3025
|
+
def test_group_dimensions(tmp_local_netcdf):
|
|
3026
|
+
# regression test for https://github.com/h5netcdf/h5netcdf/issues/293
|
|
3027
|
+
with h5netcdf.File(tmp_local_netcdf, mode="w") as f:
|
|
3028
|
+
group = f.create_group("data")
|
|
3029
|
+
dims = {"y": 3, "x": 3, "z": None, "z1": None}
|
|
3030
|
+
group.dimensions = dims
|
|
3031
|
+
assert list(group.dimensions) == ["y", "x", "z", "z1"]
|
|
3032
|
+
group.dimensions["z2"] = None
|
|
3033
|
+
assert list(group.dimensions) == ["y", "x", "z", "z1", "z2"]
|
|
3034
|
+
|
|
3035
|
+
|
|
3036
|
+
def test_group_dimensions_classic(tmp_local_netcdf):
|
|
3037
|
+
# regression test for https://github.com/h5netcdf/h5netcdf/issues/293
|
|
3038
|
+
with h5netcdf.File(tmp_local_netcdf, mode="w", format="NETCDF4_CLASSIC") as f:
|
|
3039
|
+
group = f.create_group("data")
|
|
3040
|
+
dims = {"y": 3, "x": 3, "z": None, "z1": None}
|
|
3041
|
+
with raises(CompatibilityError, match=r"Only one unlimited dimension allowed"):
|
|
3042
|
+
group.dimensions = dims
|
|
3043
|
+
assert list(group.dimensions) == []
|
|
3044
|
+
dims = {"y": 3, "x": 3, "z": None}
|
|
3045
|
+
group.dimensions = dims
|
|
3046
|
+
assert list(group.dimensions) == ["y", "x", "z"]
|
|
3047
|
+
with raises(CompatibilityError, match=r"Only one unlimited dimension allowed"):
|
|
3048
|
+
group.dimensions["z1"] = None
|
|
3049
|
+
assert list(group.dimensions) == ["y", "x", "z"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|