h5netcdf 1.4.1__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of h5netcdf might be problematic. Click here for more details.
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/CHANGELOG.rst +16 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/PKG-INFO +2 -2
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/_version.py +9 -4
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/core.py +15 -7
- h5netcdf-1.6.0/h5netcdf/tests/conftest.py +65 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/tests/test_h5netcdf.py +27 -6
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf.egg-info/PKG-INFO +2 -2
- h5netcdf-1.4.1/h5netcdf/tests/conftest.py +0 -181
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/.pre-commit-config.yaml +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/AUTHORS.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/LICENSE +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/README.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/Makefile +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/api.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/changelog.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/conf.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/devguide.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/feature.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/index.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/doc/legacyapi.rst +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/__init__.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/attrs.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/dimensions.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/legacyapi.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/tests/pytest.ini +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf/utils.py +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf.egg-info/SOURCES.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf.egg-info/dependency_links.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf.egg-info/requires.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/h5netcdf.egg-info/top_level.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/licenses/H5PY_LICENSE.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/licenses/PSF_LICENSE.txt +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/pyproject.toml +0 -0
- {h5netcdf-1.4.1 → h5netcdf-1.6.0}/setup.cfg +0 -0
|
@@ -1,6 +1,22 @@
|
|
|
1
1
|
Change Log
|
|
2
2
|
----------
|
|
3
3
|
|
|
4
|
+
Version 1.6.0 (March 7th, 2025):
|
|
5
|
+
|
|
6
|
+
- Allow specifying `h5netcdf.File(driver="h5pyd")` to force the use of h5pyd ({issue}`255`, {pull}`256`).
|
|
7
|
+
By `Rickard Holmberg <https://github.com/rho-novatron>`_
|
|
8
|
+
- Add pytest-mypy-plugins for xarray nightly test ({pull}`257`).
|
|
9
|
+
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
|
|
10
|
+
|
|
11
|
+
Version 1.5.0 (January 26th, 2025):
|
|
12
|
+
|
|
13
|
+
- Update CI to new versions (Python 3.13, 3.14 alpha), remove numpy 1 from h5pyd runs ({pull}`250`).
|
|
14
|
+
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_
|
|
15
|
+
- Update CI and reinstate h5pyd/hsds test runs ({pull}`247`).
|
|
16
|
+
By `John Readey <https://github.com/jreadey>`_
|
|
17
|
+
- Allow ``zlib`` to be used as an alias for ``gzip`` for enhanced compatibility with h5netcdf's API and xarray.
|
|
18
|
+
By `Mark Harfouche <https://github.com/hmaarrfk>`_
|
|
19
|
+
|
|
4
20
|
Version 1.4.1 (November 13th, 2024):
|
|
5
21
|
|
|
6
22
|
- Add CI run for hdf5 1.10.6, fix complex tests, fix enum/user type tests ({pull}`244`).
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: h5netcdf
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: netCDF4 via h5py
|
|
5
5
|
Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
|
|
6
6
|
Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
|
|
@@ -1,8 +1,13 @@
|
|
|
1
|
-
# file generated by
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
2
|
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
|
|
5
|
+
|
|
3
6
|
TYPE_CHECKING = False
|
|
4
7
|
if TYPE_CHECKING:
|
|
5
|
-
from typing import Tuple
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
from typing import Union
|
|
10
|
+
|
|
6
11
|
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
7
12
|
else:
|
|
8
13
|
VERSION_TUPLE = object
|
|
@@ -12,5 +17,5 @@ __version__: str
|
|
|
12
17
|
__version_tuple__: VERSION_TUPLE
|
|
13
18
|
version_tuple: VERSION_TUPLE
|
|
14
19
|
|
|
15
|
-
__version__ = version = '1.
|
|
16
|
-
__version_tuple__ = version_tuple = (1,
|
|
20
|
+
__version__ = version = '1.6.0'
|
|
21
|
+
__version_tuple__ = version_tuple = (1, 6, 0)
|
|
@@ -140,6 +140,9 @@ class BaseObject:
|
|
|
140
140
|
|
|
141
141
|
|
|
142
142
|
_h5type_mapping = {
|
|
143
|
+
"H5T_INTEGER": 0,
|
|
144
|
+
"H5T_FLOAT": 1,
|
|
145
|
+
"H5T_STRING": 3,
|
|
143
146
|
"H5T_COMPOUND": 6,
|
|
144
147
|
"H5T_ENUM": 8,
|
|
145
148
|
"H5T_VLEN": 9,
|
|
@@ -985,8 +988,7 @@ class Group(Mapping):
|
|
|
985
988
|
if name in self:
|
|
986
989
|
raise ValueError(f"unable to create group {name:!r} (name already exists)")
|
|
987
990
|
kwargs = {}
|
|
988
|
-
|
|
989
|
-
kwargs.update(track_order=self._track_order)
|
|
991
|
+
kwargs.update(track_order=self._track_order)
|
|
990
992
|
|
|
991
993
|
self._h5group.create_group(name, **kwargs)
|
|
992
994
|
self._groups[name] = self._group_cls(self, name)
|
|
@@ -1102,8 +1104,7 @@ class Group(Mapping):
|
|
|
1102
1104
|
self._dimensions[name]._detach_scale()
|
|
1103
1105
|
del self._h5group[name]
|
|
1104
1106
|
|
|
1105
|
-
|
|
1106
|
-
kwargs.update(dict(track_order=self._parent._track_order))
|
|
1107
|
+
kwargs.update(dict(track_order=self._parent._track_order))
|
|
1107
1108
|
|
|
1108
1109
|
# fill value handling
|
|
1109
1110
|
fillvalue, h5fillvalue = _check_fillvalue(self, fillvalue, dtype)
|
|
@@ -1182,9 +1183,9 @@ class Group(Mapping):
|
|
|
1182
1183
|
``h5netcdf``. Discussion on ``h5netcdf`` chunking can be found in (:issue:`52`)
|
|
1183
1184
|
and (:pull:`127`).
|
|
1184
1185
|
compression : str, optional
|
|
1185
|
-
Compression filter to apply, defaults to ``gzip``
|
|
1186
|
+
Compression filter to apply, defaults to ``gzip``. ``zlib`` is an alias for ``gzip``.
|
|
1186
1187
|
compression_opts : int
|
|
1187
|
-
Parameter for compression filter. For ``compression="gzip"`` Integer from 1 to 9 specifying
|
|
1188
|
+
Parameter for compression filter. For ``compression="gzip"``/``compression="zlib"`` Integer from 1 to 9 specifying
|
|
1188
1189
|
the compression level. Defaults to 4.
|
|
1189
1190
|
fletcher32 : bool
|
|
1190
1191
|
If ``True``, HDF5 Fletcher32 checksum algorithm is applied. Defaults to ``False``.
|
|
@@ -1231,6 +1232,13 @@ class Group(Mapping):
|
|
|
1231
1232
|
group = self
|
|
1232
1233
|
for k in keys[:-1]:
|
|
1233
1234
|
group = group._require_child_group(k)
|
|
1235
|
+
|
|
1236
|
+
# Allow zlib to be an alias for gzip
|
|
1237
|
+
# but use getters and setters so as not to change the behavior
|
|
1238
|
+
# of the default h5py functions
|
|
1239
|
+
if kwargs.get("compression", None) == "zlib":
|
|
1240
|
+
kwargs["compression"] = "gzip"
|
|
1241
|
+
|
|
1234
1242
|
return group._create_child_variable(
|
|
1235
1243
|
keys[-1],
|
|
1236
1244
|
dimensions,
|
|
@@ -1497,7 +1505,7 @@ class File(Group):
|
|
|
1497
1505
|
self._close_h5file = True
|
|
1498
1506
|
try:
|
|
1499
1507
|
if isinstance(path, str):
|
|
1500
|
-
if (
|
|
1508
|
+
if kwargs.get("driver") == "h5pyd" or (
|
|
1501
1509
|
path.startswith(("http://", "https://", "hdf5://"))
|
|
1502
1510
|
and "driver" not in kwargs
|
|
1503
1511
|
):
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tempfile
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from shutil import rmtree
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from h5pyd import Folder
|
|
10
|
+
from hsds.hsds_app import HsdsApp
|
|
11
|
+
|
|
12
|
+
with_reqd_pkgs = True
|
|
13
|
+
except ImportError:
|
|
14
|
+
with_reqd_pkgs = False
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@pytest.fixture(scope="session")
|
|
18
|
+
def hsds_up():
|
|
19
|
+
"""Provide HDF Highly Scalabale Data Service (HSDS) for h5pyd testing."""
|
|
20
|
+
if with_reqd_pkgs:
|
|
21
|
+
root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
|
|
22
|
+
bucket_name = "pytest"
|
|
23
|
+
os.environ["BUCKET_NAME"] = bucket_name
|
|
24
|
+
os.mkdir(
|
|
25
|
+
f"{root_dir}/{bucket_name}"
|
|
26
|
+
) # need to create a directory for our bucket
|
|
27
|
+
|
|
28
|
+
hs_username = "h5netcdf-pytest"
|
|
29
|
+
hs_password = "TestEarlyTestEverything"
|
|
30
|
+
|
|
31
|
+
kwargs = {}
|
|
32
|
+
kwargs["username"] = hs_username
|
|
33
|
+
kwargs["password"] = hs_password
|
|
34
|
+
kwargs["root_dir"] = str(root_dir)
|
|
35
|
+
kwargs["logfile"] = f"{root_dir}/hsds.log"
|
|
36
|
+
kwargs["log_level"] = "DEBUG"
|
|
37
|
+
kwargs["host"] = "localhost"
|
|
38
|
+
kwargs["sn_port"] = 5101
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
hsds = HsdsApp(**kwargs)
|
|
42
|
+
|
|
43
|
+
hsds.run()
|
|
44
|
+
is_up = hsds.ready
|
|
45
|
+
|
|
46
|
+
if is_up:
|
|
47
|
+
os.environ["HS_ENDPOINT"] = hsds.endpoint
|
|
48
|
+
os.environ["HS_USERNAME"] = hs_username
|
|
49
|
+
os.environ["HS_PASSWORD"] = hs_password
|
|
50
|
+
# make folders expected by pytest
|
|
51
|
+
# pytest/home/h5netcdf-pytest
|
|
52
|
+
# Folder("/pytest/", mode='w')
|
|
53
|
+
Folder("/home/", mode="w")
|
|
54
|
+
Folder("/home/h5netcdf-pytest/", mode="w")
|
|
55
|
+
except Exception:
|
|
56
|
+
is_up = False
|
|
57
|
+
|
|
58
|
+
yield is_up
|
|
59
|
+
hsds.check_processes() # this will capture hsds log output
|
|
60
|
+
hsds.stop()
|
|
61
|
+
|
|
62
|
+
rmtree(root_dir, ignore_errors=True)
|
|
63
|
+
|
|
64
|
+
else:
|
|
65
|
+
yield False
|
|
@@ -107,7 +107,9 @@ _vlen_string = "foo"
|
|
|
107
107
|
|
|
108
108
|
|
|
109
109
|
def is_h5py_char_working(tmp_netcdf, name):
|
|
110
|
-
if not isinstance(tmp_netcdf, h5py.File)
|
|
110
|
+
if not isinstance(tmp_netcdf, h5py.File) and (
|
|
111
|
+
without_h5pyd or not isinstance(tmp_netcdf, h5pyd.File)
|
|
112
|
+
):
|
|
111
113
|
h5 = get_hdf5_module(tmp_netcdf)
|
|
112
114
|
# https://github.com/Unidata/netcdf-c/issues/298
|
|
113
115
|
with h5.File(tmp_netcdf, "r") as ds:
|
|
@@ -184,14 +186,14 @@ def write_legacy_netcdf(tmp_netcdf, write_module):
|
|
|
184
186
|
ds.close()
|
|
185
187
|
|
|
186
188
|
|
|
187
|
-
def write_h5netcdf(tmp_netcdf):
|
|
189
|
+
def write_h5netcdf(tmp_netcdf, compression="gzip"):
|
|
188
190
|
ds = h5netcdf.File(tmp_netcdf, "w")
|
|
189
191
|
ds.attrs["global"] = 42
|
|
190
192
|
ds.attrs["other_attr"] = "yes"
|
|
191
193
|
ds.dimensions = {"x": 4, "y": 5, "z": 6, "empty": 0, "unlimited": None}
|
|
192
194
|
|
|
193
195
|
v = ds.create_variable(
|
|
194
|
-
"foo", ("x", "y"), float, chunks=(4, 5), compression=
|
|
196
|
+
"foo", ("x", "y"), float, chunks=(4, 5), compression=compression, shuffle=True
|
|
195
197
|
)
|
|
196
198
|
v[...] = 1
|
|
197
199
|
v.attrs["units"] = "meters"
|
|
@@ -515,6 +517,11 @@ def test_roundtrip_h5netcdf(tmp_local_or_remote_netcdf, decode_vlen_strings):
|
|
|
515
517
|
read_h5netcdf(tmp_local_or_remote_netcdf, h5netcdf, decode_vlen_strings)
|
|
516
518
|
|
|
517
519
|
|
|
520
|
+
def test_write_compression_as_zlib(tmp_local_netcdf):
|
|
521
|
+
write_h5netcdf(tmp_local_netcdf, compression="zlib")
|
|
522
|
+
read_legacy_netcdf(tmp_local_netcdf, netCDF4, h5netcdf)
|
|
523
|
+
|
|
524
|
+
|
|
518
525
|
def test_write_netCDF4_read_h5netcdf(tmp_local_netcdf, decode_vlen_strings):
|
|
519
526
|
write_legacy_netcdf(tmp_local_netcdf, netCDF4)
|
|
520
527
|
read_h5netcdf(tmp_local_netcdf, netCDF4, decode_vlen_strings)
|
|
@@ -2649,6 +2656,8 @@ def test_compoundtype_creation(tmp_local_or_remote_netcdf, netcdf_write_module):
|
|
|
2649
2656
|
reason="does not work before netCDF4 v1.7.0",
|
|
2650
2657
|
)
|
|
2651
2658
|
def test_nc_complex_compatibility(tmp_local_or_remote_netcdf, netcdf_write_module):
|
|
2659
|
+
if tmp_local_or_remote_netcdf.startswith(remote_h5):
|
|
2660
|
+
pytest.skip("not yet implemented in h5pyd/hsds")
|
|
2652
2661
|
# native complex
|
|
2653
2662
|
complex_array = np.array([0 + 0j, 1 + 0j, 0 + 1j, 1 + 1j, 0.25 + 0.75j])
|
|
2654
2663
|
# compound complex
|
|
@@ -2726,9 +2735,7 @@ def test_hsds(hsds_up):
|
|
|
2726
2735
|
elif not hsds_up:
|
|
2727
2736
|
pytest.skip("HSDS service not running")
|
|
2728
2737
|
rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
|
|
2729
|
-
fname =
|
|
2730
|
-
"hdf5://" + "home" + "/" + env["HS_USERNAME"] + "/" + "testfile" + rnd + ".nc"
|
|
2731
|
-
)
|
|
2738
|
+
fname = f"hdf5://testfile{rnd}.nc"
|
|
2732
2739
|
with h5netcdf.File(fname, "w") as ds:
|
|
2733
2740
|
g = ds.create_group("test")
|
|
2734
2741
|
g.dimensions["x"] = None
|
|
@@ -2736,3 +2743,17 @@ def test_hsds(hsds_up):
|
|
|
2736
2743
|
|
|
2737
2744
|
with h5netcdf.File(fname, "r") as ds:
|
|
2738
2745
|
print(ds["test"]["var1"])
|
|
2746
|
+
|
|
2747
|
+
|
|
2748
|
+
def test_h5pyd_driver(hsds_up):
|
|
2749
|
+
# test that specifying driver='h5pyd' forces use of h5pyd
|
|
2750
|
+
if without_h5pyd:
|
|
2751
|
+
pytest.skip("h5pyd package not available")
|
|
2752
|
+
elif not hsds_up:
|
|
2753
|
+
pytest.skip("HSDS service not running")
|
|
2754
|
+
rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
|
|
2755
|
+
for prefix in ("/", "hdf5://"):
|
|
2756
|
+
fname = f"{prefix}testfile{rnd}.nc"
|
|
2757
|
+
with h5netcdf.File(fname, "w", driver="h5pyd") as ds:
|
|
2758
|
+
assert ds._h5py == h5pyd
|
|
2759
|
+
assert isinstance(ds._h5file, h5pyd.File)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: h5netcdf
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: netCDF4 via h5py
|
|
5
5
|
Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
|
|
6
6
|
Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
|
|
@@ -1,181 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import sys
|
|
3
|
-
import tempfile
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from shutil import rmtree
|
|
6
|
-
|
|
7
|
-
import pytest
|
|
8
|
-
|
|
9
|
-
try:
|
|
10
|
-
from h5pyd._apps.hstouch import main as hstouch
|
|
11
|
-
from hsds.hsds_app import HsdsApp
|
|
12
|
-
|
|
13
|
-
with_reqd_pkgs = True
|
|
14
|
-
except ImportError:
|
|
15
|
-
with_reqd_pkgs = False
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def set_hsds_root():
|
|
19
|
-
"""Make required HSDS root directory."""
|
|
20
|
-
hsds_root = Path(os.environ["ROOT_DIR"]) / os.environ["BUCKET_NAME"] / "home"
|
|
21
|
-
if hsds_root.exists():
|
|
22
|
-
rmtree(hsds_root)
|
|
23
|
-
|
|
24
|
-
old_sysargv = sys.argv
|
|
25
|
-
sys.argv = [""]
|
|
26
|
-
sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
|
|
27
|
-
sys.argv.extend(["-u", "admin"])
|
|
28
|
-
sys.argv.extend(["-p", "admin"])
|
|
29
|
-
sys.argv.extend(["--bucket", os.environ["BUCKET_NAME"]])
|
|
30
|
-
sys.argv.append("/home/")
|
|
31
|
-
hstouch()
|
|
32
|
-
|
|
33
|
-
sys.argv = [""]
|
|
34
|
-
sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
|
|
35
|
-
sys.argv.extend(["-u", "admin"])
|
|
36
|
-
sys.argv.extend(["-p", "admin"])
|
|
37
|
-
sys.argv.extend(["--bucket", os.environ["BUCKET_NAME"]])
|
|
38
|
-
sys.argv.extend(["-o", os.environ["HS_USERNAME"]])
|
|
39
|
-
sys.argv.append(f'/home/{os.environ["HS_USERNAME"]}/')
|
|
40
|
-
hstouch()
|
|
41
|
-
sys.argv = old_sysargv
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
@pytest.fixture(scope="session")
|
|
45
|
-
def hsds_up():
|
|
46
|
-
"""Provide HDF Highly Scalabale Data Service (HSDS) for h5pyd testing."""
|
|
47
|
-
if with_reqd_pkgs:
|
|
48
|
-
root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
|
|
49
|
-
os.environ["BUCKET_NAME"] = "data"
|
|
50
|
-
(root_dir / os.getenv("BUCKET_NAME")).mkdir(parents=True, exist_ok=True)
|
|
51
|
-
os.environ["ROOT_DIR"] = str(root_dir)
|
|
52
|
-
os.environ["HS_USERNAME"] = "h5netcdf-pytest"
|
|
53
|
-
os.environ["HS_PASSWORD"] = "TestEarlyTestEverything"
|
|
54
|
-
|
|
55
|
-
config = """allow_noauth: true
|
|
56
|
-
auth_expiration: -1
|
|
57
|
-
default_public: False
|
|
58
|
-
aws_access_key_id: xxx
|
|
59
|
-
aws_secret_access_key: xxx
|
|
60
|
-
aws_iam_role: hsds_role
|
|
61
|
-
aws_region: us-east-1
|
|
62
|
-
hsds_endpoint: http://hsds.hdf.test
|
|
63
|
-
aws_s3_gateway: null
|
|
64
|
-
aws_dynamodb_gateway: null
|
|
65
|
-
aws_dynamodb_users_table: null
|
|
66
|
-
azure_connection_string: null
|
|
67
|
-
azure_resource_id: null
|
|
68
|
-
azure_storage_account: null
|
|
69
|
-
azure_resource_group: null
|
|
70
|
-
root_dir: null
|
|
71
|
-
password_salt: null
|
|
72
|
-
bucket_name: hsdstest
|
|
73
|
-
head_port: 5100
|
|
74
|
-
head_ram: 512m
|
|
75
|
-
dn_port: 6101
|
|
76
|
-
dn_ram: 3g
|
|
77
|
-
sn_port: 5101
|
|
78
|
-
sn_ram: 1g
|
|
79
|
-
rangeget_port: 6900
|
|
80
|
-
rangeget_ram: 2g
|
|
81
|
-
target_sn_count: 0
|
|
82
|
-
target_dn_count: 0
|
|
83
|
-
log_level: INFO
|
|
84
|
-
log_timestamps: false
|
|
85
|
-
log_prefix: null
|
|
86
|
-
max_tcp_connections: 100
|
|
87
|
-
head_sleep_time: 10
|
|
88
|
-
node_sleep_time: 10
|
|
89
|
-
async_sleep_time: 10
|
|
90
|
-
s3_sync_interval: 1
|
|
91
|
-
s3_sync_task_timeout: 10
|
|
92
|
-
store_read_timeout: 1
|
|
93
|
-
store_read_sleep_interval: 0.1
|
|
94
|
-
max_pending_write_requests: 20
|
|
95
|
-
flush_sleep_interval: 1
|
|
96
|
-
max_chunks_per_request: 1000
|
|
97
|
-
min_chunk_size: 1m
|
|
98
|
-
max_chunk_size: 4m
|
|
99
|
-
max_request_size: 100m
|
|
100
|
-
max_chunks_per_folder: 0
|
|
101
|
-
max_task_count: 100
|
|
102
|
-
max_tasks_per_node_per_request: 16
|
|
103
|
-
aio_max_pool_connections: 64
|
|
104
|
-
metadata_mem_cache_size: 128m
|
|
105
|
-
metadata_mem_cache_expire: 3600
|
|
106
|
-
chunk_mem_cache_size: 128m
|
|
107
|
-
chunk_mem_cache_expire: 3600
|
|
108
|
-
data_cache_size: 128m
|
|
109
|
-
data_cache_max_req_size: 128k
|
|
110
|
-
data_cache_expire_time: 3600
|
|
111
|
-
data_cache_page_size: 4m
|
|
112
|
-
data_cache_max_concurrent_read: 16
|
|
113
|
-
timeout: 30
|
|
114
|
-
password_file: /config/passwd.txt
|
|
115
|
-
groups_file: /config/groups.txt
|
|
116
|
-
server_name: Highly Scalable Data Service (HSDS)
|
|
117
|
-
greeting: Welcome to HSDS!
|
|
118
|
-
about: HSDS is a webservice for HDF data
|
|
119
|
-
top_level_domains: []
|
|
120
|
-
cors_domain: "*"
|
|
121
|
-
admin_user: admin
|
|
122
|
-
admin_group: null
|
|
123
|
-
openid_provider: azure
|
|
124
|
-
openid_url: null
|
|
125
|
-
openid_audience: null
|
|
126
|
-
openid_claims: unique_name,appid,roles
|
|
127
|
-
chaos_die: 0
|
|
128
|
-
standalone_app: false
|
|
129
|
-
blosc_nthreads: 2
|
|
130
|
-
http_compression: false
|
|
131
|
-
http_max_url_length: 512
|
|
132
|
-
k8s_app_label: hsds
|
|
133
|
-
k8s_namespace: null
|
|
134
|
-
restart_policy: on-failure
|
|
135
|
-
domain_req_max_objects_limit: 500
|
|
136
|
-
"""
|
|
137
|
-
tmp_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-"))
|
|
138
|
-
config_file = tmp_dir / "config.yml"
|
|
139
|
-
config_file.write_text(config)
|
|
140
|
-
passwd_file = tmp_dir / "passwd.txt"
|
|
141
|
-
passwd_file.write_text(
|
|
142
|
-
f'admin:admin\n{os.environ["HS_USERNAME"]}:{os.environ["HS_PASSWORD"]}\n'
|
|
143
|
-
)
|
|
144
|
-
log_file = str(tmp_dir / "hsds.log")
|
|
145
|
-
tmp_dir = str(tmp_dir)
|
|
146
|
-
if sys.platform == "darwin":
|
|
147
|
-
# macOS temp directory paths can be very long and break low-level
|
|
148
|
-
# socket comms code...
|
|
149
|
-
socket_dir = "/tmp/hsds"
|
|
150
|
-
else:
|
|
151
|
-
socket_dir = tmp_dir
|
|
152
|
-
|
|
153
|
-
try:
|
|
154
|
-
hsds = HsdsApp(
|
|
155
|
-
username=os.environ["HS_USERNAME"],
|
|
156
|
-
password=os.environ["HS_PASSWORD"],
|
|
157
|
-
password_file=str(passwd_file),
|
|
158
|
-
log_level=os.getenv("LOG_LEVEL", "DEBUG"),
|
|
159
|
-
logfile=log_file,
|
|
160
|
-
socket_dir=socket_dir,
|
|
161
|
-
config_dir=tmp_dir,
|
|
162
|
-
dn_count=2,
|
|
163
|
-
)
|
|
164
|
-
hsds.run()
|
|
165
|
-
is_up = hsds.ready
|
|
166
|
-
|
|
167
|
-
if is_up:
|
|
168
|
-
os.environ["HS_ENDPOINT"] = hsds.endpoint
|
|
169
|
-
set_hsds_root()
|
|
170
|
-
except Exception:
|
|
171
|
-
is_up = False
|
|
172
|
-
|
|
173
|
-
yield is_up
|
|
174
|
-
|
|
175
|
-
hsds.stop()
|
|
176
|
-
rmtree(tmp_dir, ignore_errors=True)
|
|
177
|
-
rmtree(socket_dir, ignore_errors=True)
|
|
178
|
-
rmtree(root_dir, ignore_errors=True)
|
|
179
|
-
|
|
180
|
-
else:
|
|
181
|
-
yield False
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|