h5netcdf 1.6.1__tar.gz → 1.6.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of h5netcdf might be problematic. Click here for more details.

Files changed (34) hide show
  1. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/CHANGELOG.rst +32 -12
  2. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/PKG-INFO +4 -3
  3. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/README.rst +1 -1
  4. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/conf.py +1 -1
  5. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/devguide.rst +2 -0
  6. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/index.rst +1 -1
  7. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/_version.py +2 -2
  8. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/core.py +31 -12
  9. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/dimensions.py +1 -1
  10. h5netcdf-1.6.3/h5netcdf/tests/conftest.py +83 -0
  11. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/tests/test_h5netcdf.py +156 -112
  12. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf.egg-info/PKG-INFO +4 -3
  13. h5netcdf-1.6.1/h5netcdf/tests/conftest.py +0 -65
  14. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/.pre-commit-config.yaml +0 -0
  15. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/AUTHORS.txt +0 -0
  16. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/LICENSE +0 -0
  17. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/Makefile +0 -0
  18. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/api.rst +0 -0
  19. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/changelog.rst +0 -0
  20. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/feature.rst +0 -0
  21. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/doc/legacyapi.rst +0 -0
  22. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/__init__.py +0 -0
  23. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/attrs.py +0 -0
  24. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/legacyapi.py +0 -0
  25. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/tests/pytest.ini +0 -0
  26. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf/utils.py +0 -0
  27. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf.egg-info/SOURCES.txt +0 -0
  28. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf.egg-info/dependency_links.txt +0 -0
  29. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf.egg-info/requires.txt +0 -0
  30. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/h5netcdf.egg-info/top_level.txt +0 -0
  31. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/licenses/H5PY_LICENSE.txt +0 -0
  32. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/licenses/PSF_LICENSE.txt +0 -0
  33. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/pyproject.toml +0 -0
  34. {h5netcdf-1.6.1 → h5netcdf-1.6.3}/setup.cfg +0 -0
@@ -1,50 +1,70 @@
1
1
  Change Log
2
2
  ----------
3
3
 
4
+ Version 1.6.3 (June 30th, 2025):
5
+
6
+ - fix invalid string format specifier, match raises/warns with messages in test suite,
7
+ remove tests for h5py < 3.7, fix sphinx issue and pr roles in CHANGELOG.rst (:issue:`269`, :pull:`270`).
8
+ By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
9
+
10
+ Version 1.6.2 (June 26th, 2025):
11
+
12
+ - Codespell fixes (:pull:`261`).
13
+ By `Kurt Schwehr <https://github.com/schwehr>`_
14
+ - Fix hsds/h5pyd test fixture spinup issues (:pull:`265`).
15
+ By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
16
+ - Fix and add circular referrer tests for Python 3.14 and update CI matrix (:pull:`264`).
17
+ By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
18
+ - Avoid opening h5pyd file to check if there is a preexisting file,
19
+ instead remap mode "a" -> "r+", resort to "w" if file doesn't exist (:issue:`262`, :pull:`266`).
20
+ By `Jonas Grönberg <https://github.com/JonasGronberg>`_ and `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
21
+ - Reduce CI time by installing available scientific-python-nightly-wheels and using pip cache (:pull:`267`).
22
+ By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
23
+
4
24
  Version 1.6.1 (March 7th, 2025):
5
25
 
6
26
  - Let Variable.chunks return None for scalar variables, independent of what the underlying
7
- h5ds object returns ({pull}`259`).
27
+ h5ds object returns (:pull:`259`).
8
28
  By `Rickard Holmberg <https://github.com/rho-novatron>`_
9
29
 
10
30
  Version 1.6.0 (March 7th, 2025):
11
31
 
12
- - Allow specifying `h5netcdf.File(driver="h5pyd")` to force the use of h5pyd ({issue}`255`, {pull}`256`).
32
+ - Allow specifying `h5netcdf.File(driver="h5pyd")` to force the use of h5pyd (:issue:`255`, :pull:`256`).
13
33
  By `Rickard Holmberg <https://github.com/rho-novatron>`_
14
- - Add pytest-mypy-plugins for xarray nightly test ({pull}`257`).
34
+ - Add pytest-mypy-plugins for xarray nightly test (:pull:`257`).
15
35
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
16
36
 
17
37
  Version 1.5.0 (January 26th, 2025):
18
38
 
19
- - Update CI to new versions (Python 3.13, 3.14 alpha), remove numpy 1 from h5pyd runs ({pull}`250`).
39
+ - Update CI to new versions (Python 3.13, 3.14 alpha), remove numpy 1 from h5pyd runs (:pull:`250`).
20
40
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
21
- - Update CI and reinstate h5pyd/hsds test runs ({pull}`247`).
41
+ - Update CI and reinstate h5pyd/hsds test runs (:pull:`247`).
22
42
  By `John Readey <https://github.com/jreadey>`_
23
43
  - Allow ``zlib`` to be used as an alias for ``gzip`` for enhanced compatibility with h5netcdf's API and xarray.
24
44
  By `Mark Harfouche <https://github.com/hmaarrfk>`_
25
45
 
26
46
  Version 1.4.1 (November 13th, 2024):
27
47
 
28
- - Add CI run for hdf5 1.10.6, fix complex tests, fix enum/user type tests ({pull}`244`).
48
+ - Add CI run for hdf5 1.10.6, fix complex tests, fix enum/user type tests (:pull:`244`).
29
49
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
30
50
 
31
51
 
32
52
  Version 1.4.0 (October 7th, 2024):
33
53
 
34
- - Add UserType class, add EnumType ({pull}`229`).
54
+ - Add UserType class, add EnumType (:pull:`229`).
35
55
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
36
- - Refactor fillvalue and dtype handling for user types, enhance sanity checks and tests ({pull}`230`).
56
+ - Refactor fillvalue and dtype handling for user types, enhance sanity checks and tests (:pull:`230`).
37
57
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
38
- - Add VLType and CompoundType, commit complex compound type to file. Align with nc-complex ({pull}`227`).
58
+ - Add VLType and CompoundType, commit complex compound type to file. Align with nc-complex (:pull:`227`).
39
59
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
40
60
  - Update h5pyd testing.
41
61
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
42
- - CI and lint maintenance ({pull}`235`).
62
+ - CI and lint maintenance (:pull:`235`).
43
63
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
44
64
  - Support wrapping an h5py ``File`` object. Closing the h5netcdf file object
45
- does not close the h5py file ({pull}`238`).
65
+ does not close the h5py file (:pull:`238`).
46
66
  By `Thomas Kluyver <https://github.com/takluyver>`_
47
- - CI and lint maintenance (format README.rst, use more f-strings, change Python 3.9 to 3.10 in CI) ({pull}`239`).
67
+ - CI and lint maintenance (format README.rst, use more f-strings, change Python 3.9 to 3.10 in CI) (:pull:`239`).
48
68
  By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
49
69
 
50
70
  Version 1.3.0 (November 7th, 2023):
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: h5netcdf
3
- Version: 1.6.1
3
+ Version: 1.6.3
4
4
  Summary: netCDF4 via h5py
5
5
  Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
6
6
  Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
@@ -56,6 +56,7 @@ Requires-Dist: packaging
56
56
  Provides-Extra: test
57
57
  Requires-Dist: netCDF4; extra == "test"
58
58
  Requires-Dist: pytest; extra == "test"
59
+ Dynamic: license-file
59
60
 
60
61
  h5netcdf
61
62
  ========
@@ -318,7 +319,7 @@ The following describes the behavior of h5netcdf with respect to order tracking
318
319
  for a few key versions:
319
320
 
320
321
  - Version 0.12.0 and earlier, the ``track_order`` parameter`order was missing
321
- and thus order tracking was implicitely set to ``False``.
322
+ and thus order tracking was implicitly set to ``False``.
322
323
  - Version 0.13.0 enabled order tracking by setting the parameter
323
324
  ``track_order`` to ``True`` by default without deprecation.
324
325
  - Versions 0.13.1 to 1.0.2 set ``track_order`` to ``False`` due to a bug in a
@@ -259,7 +259,7 @@ The following describes the behavior of h5netcdf with respect to order tracking
259
259
  for a few key versions:
260
260
 
261
261
  - Version 0.12.0 and earlier, the ``track_order`` parameter`order was missing
262
- and thus order tracking was implicitely set to ``False``.
262
+ and thus order tracking was implicitly set to ``False``.
263
263
  - Version 0.13.0 enabled order tracking by setting the parameter
264
264
  ``track_order`` to ``True`` by default without deprecation.
265
265
  - Versions 0.13.1 to 1.0.2 set ``track_order`` to ``False`` due to a bug in a
@@ -117,7 +117,7 @@ napoleon_type_aliases = {
117
117
  "Path": "~~pathlib.Path",
118
118
  }
119
119
 
120
- # handle release substition
120
+ # handle release substitution
121
121
  url = "https://github.com/h5netcdf"
122
122
 
123
123
  # get version
@@ -19,6 +19,8 @@ Contributors
19
19
  - `Frédéric Laliberté <https://github.com/laliberte>`_
20
20
  - `Ghislain Vaillant <https://github.com/ghisvail>`_
21
21
  - `John Readey <https://github.com/jreadey>`_
22
+ - `Jonas Grönberg <https://github.com/JonasGronberg>`_
23
+ - `Kurt Schwehr <https://github.com/schwehr>`_
22
24
  - `Lion Krischer <https://github.com/krischer>`_
23
25
  - `Mark Harfouche <https://github.com/hmaarrfk>`_
24
26
  - `Martin Raspaud <https://github.com/mraspaud>`_
@@ -50,7 +50,7 @@ by Stephan Hoyer. The first `official` ``h5netcdf`` announcement was made by Ste
50
50
  `xarray issue tracker`_ only one day later.
51
51
 
52
52
  The library evolved constantly over the years (fixing bugs and adding enhancements)
53
- and gained contributions from 19 other :ref:`contributors` so far. The library is widely used,
53
+ and gained contributions from 21 other :ref:`contributors` so far. The library is widely used,
54
54
  especially as backend within `xarray`_.
55
55
 
56
56
  Early 2020 Kai Mühlbauer started to add contributions and after some time he volunteered
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '1.6.1'
21
- __version_tuple__ = version_tuple = (1, 6, 1)
20
+ __version__ = version = '1.6.3'
21
+ __version_tuple__ = version_tuple = (1, 6, 3)
@@ -982,16 +982,16 @@ class Group(Mapping):
982
982
  for k, v in self._all_dimensions.maps[0].items():
983
983
  if k in value:
984
984
  if v != value[k]:
985
- raise ValueError(f"cannot modify existing dimension {k:!r}")
985
+ raise ValueError(f"cannot modify existing dimension {k!r}")
986
986
  else:
987
987
  raise ValueError(
988
- f"new dimensions do not include existing dimension {k:!r}"
988
+ f"new dimensions do not include existing dimension {k!r}"
989
989
  )
990
990
  self._dimensions.update(value)
991
991
 
992
992
  def _create_child_group(self, name):
993
993
  if name in self:
994
- raise ValueError(f"unable to create group {name:!r} (name already exists)")
994
+ raise ValueError(f"unable to create group {name!r} (name already exists)")
995
995
  kwargs = {}
996
996
  kwargs.update(track_order=self._track_order)
997
997
 
@@ -1035,7 +1035,7 @@ class Group(Mapping):
1035
1035
  ):
1036
1036
  if name in self:
1037
1037
  raise ValueError(
1038
- f"unable to create variable {name:!r} (name already exists)"
1038
+ f"unable to create variable {name!r} (name already exists)"
1039
1039
  )
1040
1040
  if data is not None:
1041
1041
  data = np.asarray(data)
@@ -1519,16 +1519,35 @@ class File(Group):
1519
1519
  "No module named 'h5pyd'. h5pyd is required for "
1520
1520
  f"opening urls: {path}"
1521
1521
  )
1522
+ self._preexisting_file = mode in {"r", "r+", "a"}
1523
+ # remap "a" -> "r+" to check file existence
1524
+ # fallback to "w" if not
1525
+ _mode = mode
1526
+ if mode == "a":
1527
+ mode = "r+"
1528
+ self._h5py = h5pyd
1522
1529
  try:
1523
- with h5pyd.File(path, "r", **kwargs) as f: # noqa
1524
- pass
1525
- self._preexisting_file = True
1530
+ self._h5file = self._h5py.File(
1531
+ path, mode, track_order=track_order, **kwargs
1532
+ )
1533
+ self._preexisting_file = mode != "w"
1526
1534
  except OSError:
1527
- self._preexisting_file = False
1528
- self._h5py = h5pyd
1529
- self._h5file = self._h5py.File(
1530
- path, mode, track_order=track_order, **kwargs
1531
- )
1535
+ # if file does not exist, create it
1536
+ if _mode == "a":
1537
+ mode = "w"
1538
+ self._h5file = self._h5py.File(
1539
+ path, mode, track_order=track_order, **kwargs
1540
+ )
1541
+ self._preexisting_file = False
1542
+ msg = (
1543
+ "Append mode for h5pyd now probes with 'r+' first and "
1544
+ "only falls back to 'w' if the file is missing.\n"
1545
+ "To silence this warning use 'r+' (open-existing) or 'w' "
1546
+ "(create-new) directly."
1547
+ )
1548
+ warnings.warn(msg, UserWarning, stacklevel=2)
1549
+ else:
1550
+ raise
1532
1551
  else:
1533
1552
  self._preexisting_file = os.path.exists(path) and mode != "w"
1534
1553
  self._h5py = h5py
@@ -22,7 +22,7 @@ class Dimensions(MutableMapping):
22
22
  if not self._group._root._writable:
23
23
  raise RuntimeError("H5NetCDF: Write to read only")
24
24
  if name in self._objects:
25
- raise ValueError(f"dimension {name:!r} already exists")
25
+ raise ValueError(f"dimension {name!r} already exists")
26
26
 
27
27
  self._objects[name] = Dimension(self._group, name, size, create_h5ds=True)
28
28
 
@@ -0,0 +1,83 @@
1
+ import os
2
+ import tempfile
3
+ import time
4
+ from pathlib import Path
5
+ from shutil import rmtree
6
+
7
+ import pytest
8
+
9
+ try:
10
+ from h5pyd import Folder
11
+ from hsds.hsds_app import HsdsApp
12
+
13
+ with_reqd_pkgs = True
14
+ except ImportError:
15
+ with_reqd_pkgs = False
16
+
17
+
18
+ @pytest.fixture(scope="session")
19
+ def hsds_up():
20
+ """Provide HDF Highly Scalable Data Service (HSDS) for h5pyd testing."""
21
+ if not with_reqd_pkgs:
22
+ pytest.skip("Required packages h5pyd and hsds not available")
23
+
24
+ root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
25
+ bucket_name = "pytest"
26
+ os.environ["BUCKET_NAME"] = bucket_name
27
+ # need to create a directory for our bucket
28
+ (root_dir / bucket_name).mkdir()
29
+
30
+ kwargs = {
31
+ "username": "h5netcdf-pytest",
32
+ "password": "TestEarlyTestEverything",
33
+ "root_dir": str(root_dir),
34
+ "logfile": str(root_dir / "hsds.log"),
35
+ "log_level": "DEBUG",
36
+ "host": "localhost",
37
+ "sn_port": 5101,
38
+ }
39
+
40
+ os.environ.update(
41
+ {
42
+ "BUCKET_NAME": bucket_name,
43
+ "HS_USERNAME": kwargs["username"],
44
+ "HS_PASSWORD": kwargs["password"],
45
+ "HS_USE_HTTPS": "False",
46
+ }
47
+ )
48
+
49
+ hsds = HsdsApp(**kwargs)
50
+
51
+ try:
52
+ hsds.run()
53
+ timeout = time.time() + 60
54
+ while not hsds.ready:
55
+ if time.time() > timeout:
56
+ raise TimeoutError("HSDS server did not become ready in time")
57
+ time.sleep(1)
58
+
59
+ os.environ["HS_ENDPOINT"] = hsds.endpoint
60
+ # make folders expected by pytest
61
+ Folder("/home/", mode="w")
62
+ Folder("/home/h5netcdf-pytest/", mode="w")
63
+
64
+ yield True
65
+
66
+ except Exception as err:
67
+ log_path = kwargs["logfile"]
68
+ if os.path.exists(log_path):
69
+ with open(log_path) as f:
70
+ print("\n=== HSDS Log ===")
71
+ print(f.read())
72
+ else:
73
+ print(f"HSDS log not found at: {log_path}")
74
+ raise err
75
+
76
+ finally:
77
+ try:
78
+ hsds.check_processes()
79
+ hsds.stop()
80
+ except Exception:
81
+ pass
82
+
83
+ rmtree(root_dir, ignore_errors=True)
@@ -3,7 +3,9 @@ import io
3
3
  import random
4
4
  import re
5
5
  import string
6
+ import sys
6
7
  import tempfile
8
+ import weakref
7
9
  from os import environ as env
8
10
 
9
11
  import h5py
@@ -11,7 +13,7 @@ import netCDF4
11
13
  import numpy as np
12
14
  import pytest
13
15
  from packaging import version
14
- from pytest import raises
16
+ from pytest import raises, warns
15
17
 
16
18
  import h5netcdf
17
19
  from h5netcdf import legacyapi
@@ -30,6 +32,7 @@ except ImportError:
30
32
 
31
33
 
32
34
  remote_h5 = ("http:", "hdf5:")
35
+ python_version = version.parse(".".join(map(str, sys.version_info[:3])))
33
36
 
34
37
 
35
38
  @pytest.fixture
@@ -37,26 +40,30 @@ def tmp_local_netcdf(tmpdir):
37
40
  return str(tmpdir.join("testfile.nc"))
38
41
 
39
42
 
43
+ @pytest.fixture()
44
+ def setup_h5pyd_config(hsds_up):
45
+ env["HS_ENDPOINT"] = "http://127.0.0.1:5101"
46
+ env["HS_USERNAME"] = "h5netcdf-pytest"
47
+ env["HS_PASSWORD"] = "TestEarlyTestEverything"
48
+ env["HS_USE_HTTPS"] = "False"
49
+
50
+
40
51
  @pytest.fixture(params=["testfile.nc", "hdf5://testfile"])
41
- def tmp_local_or_remote_netcdf(request, tmpdir, hsds_up):
42
- if request.param.startswith(remote_h5):
43
- if without_h5pyd:
44
- pytest.skip("h5pyd package not available")
45
- elif not hsds_up:
46
- pytest.skip("HSDS service not running")
47
- rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
48
- return (
49
- "hdf5://"
50
- + "home"
51
- + "/"
52
- + env["HS_USERNAME"]
53
- + "/"
54
- + "testfile"
55
- + rnd
56
- + ".nc"
57
- )
52
+ def tmp_local_or_remote_netcdf(request, tmpdir):
53
+ param = request.param
54
+ if param.startswith(remote_h5):
55
+ try:
56
+ hsds_up = request.getfixturevalue("hsds_up")
57
+ except pytest.skip.Exception:
58
+ pytest.skip("HSDS not available")
59
+
60
+ if not hsds_up:
61
+ pytest.skip("HSDS fixture returned False (not running)")
62
+
63
+ rnd = "".join(random.choices(string.ascii_uppercase, k=5))
64
+ return f"hdf5://home/{env['HS_USERNAME']}/testfile{rnd}.nc"
58
65
  else:
59
- return str(tmpdir.join(request.param))
66
+ return str(tmpdir.join(param))
60
67
 
61
68
 
62
69
  @pytest.fixture(params=[True, False])
@@ -157,7 +164,10 @@ def write_legacy_netcdf(tmp_netcdf, write_module):
157
164
  v = ds.createVariable("foo_unlimited", float, ("x", "unlimited"))
158
165
  v[...] = 1
159
166
 
160
- with raises((h5netcdf.CompatibilityError, TypeError)):
167
+ with raises(
168
+ (h5netcdf.CompatibilityError, TypeError),
169
+ match=r"(?i)(boolean dtypes are not a supported NetCDF feature|illegal primitive data type)",
170
+ ):
161
171
  ds.createVariable("boolean", np.bool_, ("x"))
162
172
 
163
173
  g = ds.createGroup("subgroup")
@@ -250,7 +260,7 @@ def read_legacy_netcdf(tmp_netcdf, read_module, write_module):
250
260
  if write_module is not netCDF4:
251
261
  # skip for now: https://github.com/Unidata/netcdf4-python/issues/388
252
262
  assert ds.other_attr == "yes"
253
- with pytest.raises(AttributeError):
263
+ with raises(AttributeError, match="not found"):
254
264
  ds.does_not_exist
255
265
  assert set(ds.dimensions) == set(
256
266
  ["x", "y", "z", "empty", "string3", "mismatched_dim", "unlimited"]
@@ -646,25 +656,27 @@ def test_optional_netcdf4_attrs(tmp_local_or_remote_netcdf):
646
656
  def test_error_handling(tmp_local_or_remote_netcdf):
647
657
  with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
648
658
  ds.dimensions["x"] = 1
649
- with raises(ValueError):
659
+ with raises(ValueError, match="already exists"):
650
660
  ds.dimensions["x"] = 2
651
- with raises(ValueError):
661
+ with raises(ValueError, match="cannot modify existing dimension"):
652
662
  ds.dimensions = {"x": 2}
653
- with raises(ValueError):
663
+ with raises(
664
+ ValueError, match="new dimensions do not include existing dimension"
665
+ ):
654
666
  ds.dimensions = {"y": 3}
655
667
  ds.create_variable("x", ("x",), dtype=float)
656
- with raises(ValueError):
668
+ with raises(ValueError, match="unable to create variable"):
657
669
  ds.create_variable("x", ("x",), dtype=float)
658
- with raises(ValueError):
670
+ with raises(ValueError, match="name parameter cannot be an empty string"):
659
671
  ds.create_variable("y/", ("x",), dtype=float)
660
672
  ds.create_group("subgroup")
661
- with raises(ValueError):
673
+ with raises(ValueError, match="unable to create group"):
662
674
  ds.create_group("subgroup")
663
675
 
664
676
 
665
677
  def test_decode_string_error(tmp_local_or_remote_netcdf):
666
678
  write_h5netcdf(tmp_local_or_remote_netcdf)
667
- with pytest.raises(TypeError):
679
+ with raises(TypeError, match="keyword argument is not allowed"):
668
680
  with h5netcdf.legacyapi.Dataset(
669
681
  tmp_local_or_remote_netcdf, "r", decode_vlen_strings=True
670
682
  ) as ds:
@@ -731,10 +743,10 @@ def test_invalid_netcdf4(tmp_local_or_remote_netcdf):
731
743
  check_invalid_netcdf4(var, i)
732
744
 
733
745
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
734
- with raises(ValueError):
746
+ with raises(ValueError, match="has no dimension scale associated"):
735
747
  ds["bar"].variables["foo1"].dimensions
736
748
 
737
- with raises(ValueError):
749
+ with raises(ValueError, match="unknown value"):
738
750
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r", phony_dims="srt") as ds:
739
751
  pass
740
752
 
@@ -799,7 +811,7 @@ def test_invalid_netcdf4_mixed(tmp_local_or_remote_netcdf):
799
811
  check_invalid_netcdf4_mixed(var, 3)
800
812
 
801
813
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
802
- with raises(ValueError):
814
+ with raises(ValueError, match="has no dimension scale associated with"):
803
815
  ds.variables["foo1"].dimensions
804
816
 
805
817
 
@@ -817,12 +829,12 @@ def test_invalid_netcdf_malformed_dimension_scales(tmp_local_or_remote_netcdf):
817
829
  f["z"].make_scale()
818
830
  f["foo1"].dims[0].attach_scale(f["x"])
819
831
 
820
- with raises(ValueError):
832
+ with raises(ValueError, match="has mixing of labeled and unlabeled dimensions"):
821
833
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
822
834
  assert ds
823
835
  print(ds)
824
836
 
825
- with raises(ValueError):
837
+ with raises(ValueError, match="has mixing of labeled and unlabeled dimensions"):
826
838
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r", phony_dims="sort") as ds:
827
839
  assert ds
828
840
  print(ds)
@@ -936,14 +948,17 @@ def test_invalid_netcdf_error(tmp_local_or_remote_netcdf):
936
948
  f.create_variable(
937
949
  "lzf_compressed", data=[1], dimensions=("x"), compression="lzf"
938
950
  )
939
- with pytest.raises(h5netcdf.CompatibilityError):
951
+ with raises(
952
+ h5netcdf.CompatibilityError,
953
+ match="scale-offset filters are not a supported NetCDF feature",
954
+ ):
940
955
  f.create_variable("scaleoffset", data=[1], dimensions=("x",), scaleoffset=0)
941
956
 
942
957
 
943
958
  def test_invalid_netcdf_okay(tmp_local_or_remote_netcdf):
944
959
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
945
960
  pytest.skip("h5pyd does not support NumPy complex dtype yet")
946
- with pytest.warns(UserWarning, match="invalid netcdf features"):
961
+ with warns(UserWarning, match="invalid netcdf features"):
947
962
  with h5netcdf.File(tmp_local_or_remote_netcdf, "w", invalid_netcdf=True) as f:
948
963
  f.create_variable(
949
964
  "lzf_compressed", data=[1], dimensions=("x"), compression="lzf"
@@ -965,7 +980,7 @@ def test_invalid_netcdf_overwrite_valid(tmp_local_netcdf):
965
980
  # https://github.com/h5netcdf/h5netcdf/issues/165
966
981
  with netCDF4.Dataset(tmp_local_netcdf, mode="w"):
967
982
  pass
968
- with pytest.warns(UserWarning):
983
+ with warns(UserWarning, match="You are writing invalid netcdf features"):
969
984
  with h5netcdf.File(tmp_local_netcdf, "a", invalid_netcdf=True) as f:
970
985
  f.create_variable(
971
986
  "lzf_compressed", data=[1], dimensions=("x"), compression="lzf"
@@ -994,7 +1009,7 @@ def test_reopen_file_different_dimension_sizes(tmp_local_netcdf):
994
1009
 
995
1010
 
996
1011
  def test_invalid_then_valid_no_ncproperties(tmp_local_or_remote_netcdf):
997
- with pytest.warns(UserWarning, match="invalid netcdf features"):
1012
+ with warns(UserWarning, match="invalid netcdf features"):
998
1013
  with h5netcdf.File(tmp_local_or_remote_netcdf, "w", invalid_netcdf=True):
999
1014
  pass
1000
1015
  with h5netcdf.File(tmp_local_or_remote_netcdf, "a"):
@@ -1012,11 +1027,8 @@ def test_creating_and_resizing_unlimited_dimensions(tmp_local_or_remote_netcdf):
1012
1027
  f.dimensions["z"] = None
1013
1028
  f.resize_dimension("z", 20)
1014
1029
 
1015
- with pytest.raises(ValueError) as e:
1030
+ with raises(ValueError, match="is not unlimited and thus cannot be resized"):
1016
1031
  f.resize_dimension("y", 20)
1017
- assert e.value.args[0] == (
1018
- "Dimension 'y' is not unlimited and thus cannot be resized."
1019
- )
1020
1032
 
1021
1033
  h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
1022
1034
  # Assert some behavior observed by using the C netCDF bindings.
@@ -1042,11 +1054,10 @@ def test_creating_variables_with_unlimited_dimensions(tmp_local_or_remote_netcdf
1042
1054
 
1043
1055
  # Trying to create a variable while the current size of the dimension
1044
1056
  # is still zero will fail.
1045
- with pytest.raises(ValueError) as e:
1057
+ with raises(ValueError, match="Shape tuple is incompatible with data"):
1046
1058
  f.create_variable(
1047
1059
  "dummy2", data=np.array([[1, 2], [3, 4]]), dimensions=("x", "y")
1048
1060
  )
1049
- assert e.value.args[0] == "Shape tuple is incompatible with data"
1050
1061
 
1051
1062
  # Creating a coordinate variable
1052
1063
  f.create_variable("x", dimensions=("x",), dtype=np.int64)
@@ -1071,7 +1082,7 @@ def test_creating_variables_with_unlimited_dimensions(tmp_local_or_remote_netcdf
1071
1082
  # We don't expect any errors. This is effectively a void context manager
1072
1083
  expected_errors = memoryview(b"")
1073
1084
  else:
1074
- expected_errors = pytest.raises(TypeError)
1085
+ expected_errors = raises(TypeError, match="Can't broadcast")
1075
1086
  with expected_errors as e:
1076
1087
  f.variables["dummy3"][:] = np.ones((5, 2))
1077
1088
  if not tmp_local_or_remote_netcdf.startswith(remote_h5):
@@ -1108,11 +1119,10 @@ def test_writing_to_an_unlimited_dimension(tmp_local_or_remote_netcdf):
1108
1119
  f.dimensions["z"] = None
1109
1120
 
1110
1121
  # Cannot create it without first resizing it.
1111
- with pytest.raises(ValueError) as e:
1122
+ with raises(ValueError, match="Shape tuple is incompatible with data"):
1112
1123
  f.create_variable(
1113
1124
  "dummy1", data=np.array([[1, 2, 3]]), dimensions=("x", "y")
1114
1125
  )
1115
- assert e.value.args[0] == "Shape tuple is incompatible with data"
1116
1126
 
1117
1127
  # Without data.
1118
1128
  f.create_variable("dummy1", dimensions=("x", "y"), dtype=np.int64)
@@ -1141,7 +1151,9 @@ def test_writing_to_an_unlimited_dimension(tmp_local_or_remote_netcdf):
1141
1151
 
1142
1152
  # broadcast writing
1143
1153
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
1144
- expected_errors = pytest.raises(OSError)
1154
+ expected_errors = raises(
1155
+ OSError, match="Got asyncio.IncompleteReadError during binary read"
1156
+ )
1145
1157
  else:
1146
1158
  # We don't expect any errors. This is effectively a void context manager
1147
1159
  expected_errors = memoryview(b"")
@@ -1300,6 +1312,24 @@ def test_overwrite_existing_file(tmp_local_netcdf):
1300
1312
  assert ds.attrs._h5attrs.get("_NCProperties", False)
1301
1313
 
1302
1314
 
1315
+ def test_overwrite_existing_remote_file(tmp_local_or_remote_netcdf):
1316
+ # create file with legacyapi
1317
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
1318
+ ds.createDimension("x", 10)
1319
+
1320
+ # check attribute
1321
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1322
+ assert ds.attrs._h5attrs.get("_NCProperties", False)
1323
+
1324
+ # overwrite file with new api
1325
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
1326
+ ds.dimensions["x"] = 10
1327
+
1328
+ # check attribute
1329
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1330
+ assert ds.attrs._h5attrs.get("_NCProperties", False)
1331
+
1332
+
1303
1333
  def test_scales_on_append(tmp_local_netcdf):
1304
1334
  # create file with _NCProperties attribute
1305
1335
  with netCDF4.Dataset(tmp_local_netcdf, "w") as ds:
@@ -1555,7 +1585,38 @@ def test_no_circular_references(tmp_local_or_remote_netcdf):
1555
1585
  refs = gc.get_referrers(ds)
1556
1586
  for ref in refs:
1557
1587
  print(ref)
1558
- assert len(refs) == 1
1588
+ if python_version >= version.parse("3.14"):
1589
+ assert len(refs) == 0
1590
+ else:
1591
+ assert len(refs) == 1
1592
+
1593
+
1594
+ def test_no_circular_references_py314(tmp_local_or_remote_netcdf):
1595
+ # https://github.com/h5py/h5py/issues/2019
1596
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
1597
+ ds.dimensions["x"] = 2
1598
+ ds.dimensions["y"] = 2
1599
+
1600
+ # clean up everything
1601
+ gc.collect()
1602
+ gc.garbage.clear()
1603
+
1604
+ # use weakref to hold on object
1605
+ file_ref = None
1606
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1607
+ file_ref = weakref.ref(ds)
1608
+
1609
+ # clean up
1610
+ gc.collect()
1611
+
1612
+ # check garbage list
1613
+ if file_ref() is not None:
1614
+ print("Uncollectable object:", file_ref())
1615
+ print("Potential GC garbage:")
1616
+ for obj in gc.garbage:
1617
+ print(repr(obj))
1618
+
1619
+ assert file_ref() is None or "<Closed h5netcdf.File>"
1559
1620
 
1560
1621
 
1561
1622
  def test_expanded_variables_netcdf4(tmp_local_netcdf, netcdf_write_module):
@@ -1693,7 +1754,7 @@ def test_track_order_specification(tmp_local_netcdf):
1693
1754
  # While netcdf4-c has historically only allowed track_order to be True
1694
1755
  # There doesn't seem to be a good reason for this
1695
1756
  # https://github.com/Unidata/netcdf-c/issues/2054 historically, h5netcdf
1696
- # has not specified this parameter (leaving it implicitely as False)
1757
+ # has not specified this parameter (leaving it implicitly as False)
1697
1758
  # We want to make sure we allow both here
1698
1759
  with h5netcdf.File(tmp_local_netcdf, "w", track_order=False):
1699
1760
  pass
@@ -1717,18 +1778,10 @@ def test_more_than_7_attr_creation(tmp_local_netcdf):
1717
1778
  # https://github.com/h5netcdf/h5netcdf/issues/136#issuecomment-1017457067
1718
1779
  @pytest.mark.parametrize("track_order", [False, True])
1719
1780
  def test_more_than_7_attr_creation_track_order(tmp_local_netcdf, track_order):
1720
- h5py_version = version.parse(h5py.__version__)
1721
- if track_order and h5py_version < version.parse("3.7.0"):
1722
- expected_errors = pytest.raises(KeyError)
1723
- else:
1724
- # We don't expect any errors. This is effectively a void context manager
1725
- expected_errors = memoryview(b"")
1726
-
1727
1781
  with h5netcdf.File(tmp_local_netcdf, "w", track_order=track_order) as h5file:
1728
- with expected_errors:
1729
- for i in range(100):
1730
- h5file.attrs[f"key{i}"] = i
1731
- h5file.attrs[f"key{i}"] = 0
1782
+ for i in range(100):
1783
+ h5file.attrs[f"key{i}"] = i
1784
+ h5file.attrs[f"key{i}"] = 0
1732
1785
 
1733
1786
 
1734
1787
  def test_group_names(tmp_local_netcdf):
@@ -1815,18 +1868,11 @@ def test_bool_slicing_length_one_dim(tmp_local_netcdf):
1815
1868
  data = ds["hello"][bool_slice, :]
1816
1869
  np.testing.assert_equal(data, np.zeros((1, 2)))
1817
1870
 
1818
- # should raise for h5py >= 3.0.0 and h5py < 3.7.0
1871
+ # regression test
1819
1872
  # https://github.com/h5py/h5py/pull/2079
1820
1873
  # https://github.com/h5netcdf/h5netcdf/pull/125/
1821
1874
  with h5netcdf.File(tmp_local_netcdf, "r") as ds:
1822
- h5py_version = version.parse(h5py.__version__)
1823
- if version.parse("3.0.0") <= h5py_version < version.parse("3.7.0"):
1824
- error = "Indexing arrays must have integer dtypes"
1825
- with pytest.raises(TypeError) as e:
1826
- ds["hello"][bool_slice, :]
1827
- assert error == str(e.value)
1828
- else:
1829
- ds["hello"][bool_slice, :]
1875
+ ds["hello"][bool_slice, :]
1830
1876
 
1831
1877
 
1832
1878
  def test_fancy_indexing(tmp_local_or_remote_netcdf):
@@ -2247,38 +2293,36 @@ def test_user_type_errors_new_api(tmp_local_or_remote_netcdf):
2247
2293
  enum_type = ds.create_enumtype(np.uint8, "enum_t", enum_dict1)
2248
2294
 
2249
2295
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
2250
- testcontext = pytest.raises(RuntimeError, match="Conflict")
2296
+ testcontext = raises(RuntimeError, match="Conflict")
2251
2297
  else:
2252
- testcontext = pytest.raises(
2253
- (KeyError, TypeError), match="name already exists"
2254
- )
2298
+ testcontext = raises((KeyError, TypeError), match="name already exists")
2255
2299
  with testcontext:
2256
2300
  ds.create_enumtype(np.uint8, "enum_t", enum_dict2)
2257
2301
 
2258
2302
  enum_type2 = g.create_enumtype(np.uint8, "enum_t2", enum_dict2)
2259
2303
  g.create_enumtype(np.uint8, "enum_t", enum_dict2)
2260
- with pytest.raises(TypeError, match="Please provide h5netcdf user type"):
2304
+ with raises(TypeError, match="Please provide h5netcdf user type"):
2261
2305
  ds.create_variable(
2262
2306
  "enum_var1",
2263
2307
  ("enum_dim",),
2264
2308
  dtype=enum_type._h5ds,
2265
2309
  fillvalue=enum_dict1["missing"],
2266
2310
  )
2267
- with pytest.raises(TypeError, match="is not committed into current file"):
2311
+ with raises(TypeError, match="is not committed into current file"):
2268
2312
  ds.create_variable(
2269
2313
  "enum_var2",
2270
2314
  ("enum_dim",),
2271
2315
  dtype=enum_type_ext,
2272
2316
  fillvalue=enum_dict1["missing"],
2273
2317
  )
2274
- with pytest.raises(TypeError, match="is not accessible in current group"):
2318
+ with raises(TypeError, match="is not accessible in current group"):
2275
2319
  ds.create_variable(
2276
2320
  "enum_var3",
2277
2321
  ("enum_dim",),
2278
2322
  dtype=enum_type2,
2279
2323
  fillvalue=enum_dict2["missing"],
2280
2324
  )
2281
- with pytest.raises(TypeError, match="Another dtype with same name"):
2325
+ with raises(TypeError, match="Another dtype with same name"):
2282
2326
  g.create_variable(
2283
2327
  "enum_var4",
2284
2328
  ("enum_dim",),
@@ -2297,38 +2341,36 @@ def test_user_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2297
2341
  g = ds.createGroup("subgroup")
2298
2342
  enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2299
2343
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
2300
- testcontext = pytest.raises(RuntimeError, match="Conflict")
2344
+ testcontext = raises(RuntimeError, match="Conflict")
2301
2345
  else:
2302
- testcontext = pytest.raises(
2303
- (KeyError, TypeError), match="name already exists"
2304
- )
2346
+ testcontext = raises((KeyError, TypeError), match="name already exists")
2305
2347
  with testcontext:
2306
2348
  ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2307
2349
 
2308
2350
  enum_type2 = g.createEnumType(np.uint8, "enum_t2", enum_dict2)
2309
2351
  g.create_enumtype(np.uint8, "enum_t", enum_dict2)
2310
- with pytest.raises(TypeError, match="Please provide h5netcdf user type"):
2352
+ with raises(TypeError, match="Please provide h5netcdf user type"):
2311
2353
  ds.createVariable(
2312
2354
  "enum_var1",
2313
2355
  enum_type._h5ds,
2314
2356
  ("enum_dim",),
2315
2357
  fill_value=enum_dict1["missing"],
2316
2358
  )
2317
- with pytest.raises(TypeError, match="is not committed into current file"):
2359
+ with raises(TypeError, match="is not committed into current file"):
2318
2360
  ds.createVariable(
2319
2361
  "enum_var2",
2320
2362
  enum_type_ext,
2321
2363
  ("enum_dim",),
2322
2364
  fill_value=enum_dict1["missing"],
2323
2365
  )
2324
- with pytest.raises(TypeError, match="is not accessible in current group"):
2366
+ with raises(TypeError, match="is not accessible in current group"):
2325
2367
  ds.createVariable(
2326
2368
  "enum_var3",
2327
2369
  enum_type2,
2328
2370
  ("enum_dim",),
2329
2371
  fill_value=enum_dict2["missing"],
2330
2372
  )
2331
- with pytest.raises(TypeError, match="Another dtype with same name"):
2373
+ with raises(TypeError, match="Another dtype with same name"):
2332
2374
  g.createVariable(
2333
2375
  "enum_var4",
2334
2376
  enum_type,
@@ -2346,7 +2388,7 @@ def test_enum_type_errors_new_api(tmp_local_or_remote_netcdf):
2346
2388
  enum_type2 = ds.create_enumtype(np.uint8, "enum_t2", enum_dict2)
2347
2389
 
2348
2390
  # 1.
2349
- with pytest.warns(UserWarning, match="default fill_value 0 which IS defined"):
2391
+ with warns(UserWarning, match="default fill_value 0 which IS defined"):
2350
2392
  ds.create_variable(
2351
2393
  "enum_var1",
2352
2394
  ("enum_dim",),
@@ -2354,18 +2396,14 @@ def test_enum_type_errors_new_api(tmp_local_or_remote_netcdf):
2354
2396
  )
2355
2397
  # 2. is for legacyapi only
2356
2398
  # 3.
2357
- with pytest.warns(
2358
- UserWarning, match="default fill_value 0 which IS NOT defined"
2359
- ):
2399
+ with warns(UserWarning, match="default fill_value 0 which IS NOT defined"):
2360
2400
  ds.create_variable(
2361
2401
  "enum_var2",
2362
2402
  ("enum_dim",),
2363
2403
  dtype=enum_type,
2364
2404
  )
2365
2405
  # 4.
2366
- with pytest.warns(
2367
- UserWarning, match="with specified fill_value 0 which IS NOT"
2368
- ):
2406
+ with warns(UserWarning, match="with specified fill_value 0 which IS NOT"):
2369
2407
  ds.create_variable(
2370
2408
  "enum_var3",
2371
2409
  ("enum_dim",),
@@ -2373,9 +2411,7 @@ def test_enum_type_errors_new_api(tmp_local_or_remote_netcdf):
2373
2411
  fillvalue=0,
2374
2412
  )
2375
2413
  # 5.
2376
- with pytest.raises(
2377
- ValueError, match="with specified fill_value 100 which IS NOT"
2378
- ):
2414
+ with raises(ValueError, match="with specified fill_value 100 which IS NOT"):
2379
2415
  ds.create_variable(
2380
2416
  "enum_var4",
2381
2417
  ("enum_dim",),
@@ -2393,14 +2429,14 @@ def test_enum_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2393
2429
  enum_type2 = ds.createEnumType(np.uint8, "enum_t2", enum_dict2)
2394
2430
 
2395
2431
  # 1.
2396
- with pytest.warns(UserWarning, match="default fill_value 255 which IS defined"):
2432
+ with warns(UserWarning, match="default fill_value 255 which IS defined"):
2397
2433
  ds.createVariable(
2398
2434
  "enum_var1",
2399
2435
  enum_type2,
2400
2436
  ("enum_dim",),
2401
2437
  )
2402
2438
  # 2.
2403
- with pytest.raises(ValueError, match="default fill_value 255 which IS NOT"):
2439
+ with raises(ValueError, match="default fill_value 255 which IS NOT"):
2404
2440
  ds.createVariable(
2405
2441
  "enum_var2",
2406
2442
  enum_type,
@@ -2408,9 +2444,7 @@ def test_enum_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2408
2444
  )
2409
2445
  # 3. is only for new api
2410
2446
  # 4.
2411
- with pytest.warns(
2412
- UserWarning, match="interpreted as '_UNDEFINED' by netcdf-c."
2413
- ):
2447
+ with warns(UserWarning, match="interpreted as '_UNDEFINED' by netcdf-c."):
2414
2448
  ds.createVariable(
2415
2449
  "enum_var3",
2416
2450
  enum_type,
@@ -2418,9 +2452,7 @@ def test_enum_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2418
2452
  fill_value=0,
2419
2453
  )
2420
2454
  # 5.
2421
- with pytest.raises(
2422
- ValueError, match="with specified fill_value 100 which IS NOT"
2423
- ):
2455
+ with raises(ValueError, match="with specified fill_value 100 which IS NOT"):
2424
2456
  ds.createVariable("enum_var4", enum_type, ("enum_dim",), fill_value=100)
2425
2457
 
2426
2458
 
@@ -2438,9 +2470,8 @@ def test_enum_type(tmp_local_or_remote_netcdf):
2438
2470
  "enum_var", ("enum_dim",), dtype=enum_type, fillvalue=enum_dict["missing"]
2439
2471
  )
2440
2472
  v[0:3] = [1, 2, 3]
2441
- with pytest.raises(ValueError) as e:
2473
+ with raises(ValueError, match="assign illegal value"):
2442
2474
  v[3] = 5
2443
- assert "assign illegal value(s)" in e.value.args[0]
2444
2475
 
2445
2476
  # check, if new API can read them
2446
2477
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
@@ -2481,9 +2512,8 @@ def test_enum_type(tmp_local_or_remote_netcdf):
2481
2512
  "enum_var", enum_type, ("enum_dim",), fill_value=enum_dict["missing"]
2482
2513
  )
2483
2514
  v[0:3] = [1, 2, 3]
2484
- with pytest.raises(ValueError) as e:
2515
+ with raises(ValueError, match="assign illegal value"):
2485
2516
  v[3] = 5
2486
- assert "assign illegal value(s)" in e.value.args[0]
2487
2517
 
2488
2518
  # check, if new API can read them
2489
2519
  with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
@@ -2525,9 +2555,7 @@ def test_enum_type(tmp_local_or_remote_netcdf):
2525
2555
  "enum_var", enum_type, ("enum_dim",), fill_value=enum_dict["missing"]
2526
2556
  )
2527
2557
  v[0:3] = [1, 2, 3]
2528
- with pytest.raises(
2529
- ValueError, match="assign illegal value to Enum variable"
2530
- ):
2558
+ with raises(ValueError, match="assign illegal value to Enum variable"):
2531
2559
  v[3] = 5
2532
2560
 
2533
2561
  # check, if new API can read them
@@ -2714,14 +2742,14 @@ def test_complex_type_creation_errors(tmp_local_netcdf):
2714
2742
 
2715
2743
  with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
2716
2744
  ds.createDimension("x", size=len(complex_array))
2717
- with pytest.raises(TypeError, match="data type 'c4' not understood"):
2745
+ with raises(TypeError, match="data type 'c4' not understood"):
2718
2746
  ds.createVariable("data", "c4", ("x",))
2719
2747
 
2720
2748
  if "complex256" not in np.sctypeDict:
2721
2749
  pytest.skip("numpy 'complex256' dtype not available")
2722
2750
  with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
2723
2751
  ds.createDimension("x", size=len(complex_array))
2724
- with pytest.raises(
2752
+ with raises(
2725
2753
  TypeError,
2726
2754
  match="Currently only 'complex64' and 'complex128' dtypes are allowed.",
2727
2755
  ):
@@ -2773,3 +2801,19 @@ def test_h5pyd_nonchunked_scalars(hsds_up):
2773
2801
  assert ds["foo"]._h5ds.chunks == (1,)
2774
2802
  # However, since it is a scalar dataset, we should not expose the chunking
2775
2803
  assert ds["foo"].chunks is None
2804
+
2805
+
2806
+ def test_h5pyd_append(hsds_up):
2807
+ if without_h5pyd:
2808
+ pytest.skip("h5pyd package not available")
2809
+ elif not hsds_up:
2810
+ pytest.skip("HSDS service not running")
2811
+ rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
2812
+ fname = f"hdf5://testfile{rnd}.nc"
2813
+
2814
+ with warns(UserWarning, match="Append mode for h5pyd"):
2815
+ with h5netcdf.File(fname, "a", driver="h5pyd") as ds:
2816
+ assert not ds._preexisting_file
2817
+
2818
+ with h5netcdf.File(fname, "a", driver="h5pyd") as ds:
2819
+ assert ds._preexisting_file
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: h5netcdf
3
- Version: 1.6.1
3
+ Version: 1.6.3
4
4
  Summary: netCDF4 via h5py
5
5
  Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
6
6
  Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
@@ -56,6 +56,7 @@ Requires-Dist: packaging
56
56
  Provides-Extra: test
57
57
  Requires-Dist: netCDF4; extra == "test"
58
58
  Requires-Dist: pytest; extra == "test"
59
+ Dynamic: license-file
59
60
 
60
61
  h5netcdf
61
62
  ========
@@ -318,7 +319,7 @@ The following describes the behavior of h5netcdf with respect to order tracking
318
319
  for a few key versions:
319
320
 
320
321
  - Version 0.12.0 and earlier, the ``track_order`` parameter`order was missing
321
- and thus order tracking was implicitely set to ``False``.
322
+ and thus order tracking was implicitly set to ``False``.
322
323
  - Version 0.13.0 enabled order tracking by setting the parameter
323
324
  ``track_order`` to ``True`` by default without deprecation.
324
325
  - Versions 0.13.1 to 1.0.2 set ``track_order`` to ``False`` due to a bug in a
@@ -1,65 +0,0 @@
1
- import os
2
- import tempfile
3
- from pathlib import Path
4
- from shutil import rmtree
5
-
6
- import pytest
7
-
8
- try:
9
- from h5pyd import Folder
10
- from hsds.hsds_app import HsdsApp
11
-
12
- with_reqd_pkgs = True
13
- except ImportError:
14
- with_reqd_pkgs = False
15
-
16
-
17
- @pytest.fixture(scope="session")
18
- def hsds_up():
19
- """Provide HDF Highly Scalabale Data Service (HSDS) for h5pyd testing."""
20
- if with_reqd_pkgs:
21
- root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
22
- bucket_name = "pytest"
23
- os.environ["BUCKET_NAME"] = bucket_name
24
- os.mkdir(
25
- f"{root_dir}/{bucket_name}"
26
- ) # need to create a directory for our bucket
27
-
28
- hs_username = "h5netcdf-pytest"
29
- hs_password = "TestEarlyTestEverything"
30
-
31
- kwargs = {}
32
- kwargs["username"] = hs_username
33
- kwargs["password"] = hs_password
34
- kwargs["root_dir"] = str(root_dir)
35
- kwargs["logfile"] = f"{root_dir}/hsds.log"
36
- kwargs["log_level"] = "DEBUG"
37
- kwargs["host"] = "localhost"
38
- kwargs["sn_port"] = 5101
39
-
40
- try:
41
- hsds = HsdsApp(**kwargs)
42
-
43
- hsds.run()
44
- is_up = hsds.ready
45
-
46
- if is_up:
47
- os.environ["HS_ENDPOINT"] = hsds.endpoint
48
- os.environ["HS_USERNAME"] = hs_username
49
- os.environ["HS_PASSWORD"] = hs_password
50
- # make folders expected by pytest
51
- # pytest/home/h5netcdf-pytest
52
- # Folder("/pytest/", mode='w')
53
- Folder("/home/", mode="w")
54
- Folder("/home/h5netcdf-pytest/", mode="w")
55
- except Exception:
56
- is_up = False
57
-
58
- yield is_up
59
- hsds.check_processes() # this will capture hsds log output
60
- hsds.stop()
61
-
62
- rmtree(root_dir, ignore_errors=True)
63
-
64
- else:
65
- yield False
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes