h5netcdf 1.4.0__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of h5netcdf might be problematic. Click here for more details.

h5netcdf/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.4.0'
16
- __version_tuple__ = version_tuple = (1, 4, 0)
15
+ __version__ = version = '1.5.0'
16
+ __version_tuple__ = version_tuple = (1, 5, 0)
h5netcdf/core.py CHANGED
@@ -140,6 +140,9 @@ class BaseObject:
140
140
 
141
141
 
142
142
  _h5type_mapping = {
143
+ "H5T_INTEGER": 0,
144
+ "H5T_FLOAT": 1,
145
+ "H5T_STRING": 3,
143
146
  "H5T_COMPOUND": 6,
144
147
  "H5T_ENUM": 8,
145
148
  "H5T_VLEN": 9,
@@ -985,8 +988,7 @@ class Group(Mapping):
985
988
  if name in self:
986
989
  raise ValueError(f"unable to create group {name:!r} (name already exists)")
987
990
  kwargs = {}
988
- if self._root._h5py.__name__ == "h5py":
989
- kwargs.update(track_order=self._track_order)
991
+ kwargs.update(track_order=self._track_order)
990
992
 
991
993
  self._h5group.create_group(name, **kwargs)
992
994
  self._groups[name] = self._group_cls(self, name)
@@ -1102,8 +1104,7 @@ class Group(Mapping):
1102
1104
  self._dimensions[name]._detach_scale()
1103
1105
  del self._h5group[name]
1104
1106
 
1105
- if self._root._h5py.__name__ == "h5py":
1106
- kwargs.update(dict(track_order=self._parent._track_order))
1107
+ kwargs.update(dict(track_order=self._parent._track_order))
1107
1108
 
1108
1109
  # fill value handling
1109
1110
  fillvalue, h5fillvalue = _check_fillvalue(self, fillvalue, dtype)
@@ -1182,9 +1183,9 @@ class Group(Mapping):
1182
1183
  ``h5netcdf``. Discussion on ``h5netcdf`` chunking can be found in (:issue:`52`)
1183
1184
  and (:pull:`127`).
1184
1185
  compression : str, optional
1185
- Compression filter to apply, defaults to ``gzip``
1186
+ Compression filter to apply, defaults to ``gzip``. ``zlib`` is an alias for ``gzip``.
1186
1187
  compression_opts : int
1187
- Parameter for compression filter. For ``compression="gzip"`` Integer from 1 to 9 specifying
1188
+ Parameter for compression filter. For ``compression="gzip"``/``compression="zlib"`` Integer from 1 to 9 specifying
1188
1189
  the compression level. Defaults to 4.
1189
1190
  fletcher32 : bool
1190
1191
  If ``True``, HDF5 Fletcher32 checksum algorithm is applied. Defaults to ``False``.
@@ -1231,6 +1232,13 @@ class Group(Mapping):
1231
1232
  group = self
1232
1233
  for k in keys[:-1]:
1233
1234
  group = group._require_child_group(k)
1235
+
1236
+ # Allow zlib to be an alias for gzip
1237
+ # but use getters and setters so as not to change the behavior
1238
+ # of the default h5py functions
1239
+ if kwargs.get("compression", None) == "zlib":
1240
+ kwargs["compression"] = "gzip"
1241
+
1234
1242
  return group._create_child_variable(
1235
1243
  keys[-1],
1236
1244
  dimensions,
@@ -1,5 +1,4 @@
1
1
  import os
2
- import sys
3
2
  import tempfile
4
3
  from pathlib import Path
5
4
  from shutil import rmtree
@@ -7,7 +6,7 @@ from shutil import rmtree
7
6
  import pytest
8
7
 
9
8
  try:
10
- from h5pyd._apps.hstouch import main as hstouch
9
+ from h5pyd import Folder
11
10
  from hsds.hsds_app import HsdsApp
12
11
 
13
12
  with_reqd_pkgs = True
@@ -15,166 +14,51 @@ except ImportError:
15
14
  with_reqd_pkgs = False
16
15
 
17
16
 
18
- def set_hsds_root():
19
- """Make required HSDS root directory."""
20
- hsds_root = Path(os.environ["ROOT_DIR"]) / os.environ["BUCKET_NAME"] / "home"
21
- if hsds_root.exists():
22
- rmtree(hsds_root)
23
-
24
- old_sysargv = sys.argv
25
- sys.argv = [""]
26
- sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
27
- sys.argv.extend(["-u", "admin"])
28
- sys.argv.extend(["-p", "admin"])
29
- sys.argv.extend(["--bucket", os.environ["BUCKET_NAME"]])
30
- sys.argv.append("/home/")
31
- hstouch()
32
-
33
- sys.argv = [""]
34
- sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
35
- sys.argv.extend(["-u", "admin"])
36
- sys.argv.extend(["-p", "admin"])
37
- sys.argv.extend(["--bucket", os.environ["BUCKET_NAME"]])
38
- sys.argv.extend(["-o", os.environ["HS_USERNAME"]])
39
- sys.argv.append(f'/home/{os.environ["HS_USERNAME"]}/')
40
- hstouch()
41
- sys.argv = old_sysargv
42
-
43
-
44
17
  @pytest.fixture(scope="session")
45
18
  def hsds_up():
46
19
  """Provide HDF Highly Scalabale Data Service (HSDS) for h5pyd testing."""
47
20
  if with_reqd_pkgs:
48
21
  root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
49
- os.environ["BUCKET_NAME"] = "data"
50
- (root_dir / os.getenv("BUCKET_NAME")).mkdir(parents=True, exist_ok=True)
51
- os.environ["ROOT_DIR"] = str(root_dir)
52
- os.environ["HS_USERNAME"] = "h5netcdf-pytest"
53
- os.environ["HS_PASSWORD"] = "TestEarlyTestEverything"
22
+ bucket_name = "pytest"
23
+ os.environ["BUCKET_NAME"] = bucket_name
24
+ os.mkdir(
25
+ f"{root_dir}/{bucket_name}"
26
+ ) # need to create a directory for our bucket
54
27
 
55
- config = """allow_noauth: true
56
- auth_expiration: -1
57
- default_public: False
58
- aws_access_key_id: xxx
59
- aws_secret_access_key: xxx
60
- aws_iam_role: hsds_role
61
- aws_region: us-east-1
62
- hsds_endpoint: http://hsds.hdf.test
63
- aws_s3_gateway: null
64
- aws_dynamodb_gateway: null
65
- aws_dynamodb_users_table: null
66
- azure_connection_string: null
67
- azure_resource_id: null
68
- azure_storage_account: null
69
- azure_resource_group: null
70
- root_dir: null
71
- password_salt: null
72
- bucket_name: hsdstest
73
- head_port: 5100
74
- head_ram: 512m
75
- dn_port: 6101
76
- dn_ram: 3g
77
- sn_port: 5101
78
- sn_ram: 1g
79
- rangeget_port: 6900
80
- rangeget_ram: 2g
81
- target_sn_count: 0
82
- target_dn_count: 0
83
- log_level: INFO
84
- log_timestamps: false
85
- log_prefix: null
86
- max_tcp_connections: 100
87
- head_sleep_time: 10
88
- node_sleep_time: 10
89
- async_sleep_time: 10
90
- s3_sync_interval: 1
91
- s3_sync_task_timeout: 10
92
- store_read_timeout: 1
93
- store_read_sleep_interval: 0.1
94
- max_pending_write_requests: 20
95
- flush_sleep_interval: 1
96
- max_chunks_per_request: 1000
97
- min_chunk_size: 1m
98
- max_chunk_size: 4m
99
- max_request_size: 100m
100
- max_chunks_per_folder: 0
101
- max_task_count: 100
102
- max_tasks_per_node_per_request: 16
103
- aio_max_pool_connections: 64
104
- metadata_mem_cache_size: 128m
105
- metadata_mem_cache_expire: 3600
106
- chunk_mem_cache_size: 128m
107
- chunk_mem_cache_expire: 3600
108
- data_cache_size: 128m
109
- data_cache_max_req_size: 128k
110
- data_cache_expire_time: 3600
111
- data_cache_page_size: 4m
112
- data_cache_max_concurrent_read: 16
113
- timeout: 30
114
- password_file: /config/passwd.txt
115
- groups_file: /config/groups.txt
116
- server_name: Highly Scalable Data Service (HSDS)
117
- greeting: Welcome to HSDS!
118
- about: HSDS is a webservice for HDF data
119
- top_level_domains: []
120
- cors_domain: "*"
121
- admin_user: admin
122
- admin_group: null
123
- openid_provider: azure
124
- openid_url: null
125
- openid_audience: null
126
- openid_claims: unique_name,appid,roles
127
- chaos_die: 0
128
- standalone_app: false
129
- blosc_nthreads: 2
130
- http_compression: false
131
- http_max_url_length: 512
132
- k8s_app_label: hsds
133
- k8s_namespace: null
134
- restart_policy: on-failure
135
- domain_req_max_objects_limit: 500
136
- """
137
- tmp_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-"))
138
- config_file = tmp_dir / "config.yml"
139
- config_file.write_text(config)
140
- passwd_file = tmp_dir / "passwd.txt"
141
- passwd_file.write_text(
142
- f'admin:admin\n{os.environ["HS_USERNAME"]}:{os.environ["HS_PASSWORD"]}\n'
143
- )
144
- log_file = str(tmp_dir / "hsds.log")
145
- tmp_dir = str(tmp_dir)
146
- if sys.platform == "darwin":
147
- # macOS temp directory paths can be very long and break low-level
148
- # socket comms code...
149
- socket_dir = "/tmp/hsds"
150
- else:
151
- socket_dir = tmp_dir
28
+ hs_username = "h5netcdf-pytest"
29
+ hs_password = "TestEarlyTestEverything"
30
+
31
+ kwargs = {}
32
+ kwargs["username"] = hs_username
33
+ kwargs["password"] = hs_password
34
+ kwargs["root_dir"] = str(root_dir)
35
+ kwargs["logfile"] = f"{root_dir}/hsds.log"
36
+ kwargs["log_level"] = "DEBUG"
37
+ kwargs["host"] = "localhost"
38
+ kwargs["sn_port"] = 5101
152
39
 
153
40
  try:
154
- hsds = HsdsApp(
155
- username=os.environ["HS_USERNAME"],
156
- password=os.environ["HS_PASSWORD"],
157
- password_file=str(passwd_file),
158
- log_level=os.getenv("LOG_LEVEL", "DEBUG"),
159
- logfile=log_file,
160
- socket_dir=socket_dir,
161
- config_dir=tmp_dir,
162
- dn_count=2,
163
- )
41
+ hsds = HsdsApp(**kwargs)
42
+
164
43
  hsds.run()
165
44
  is_up = hsds.ready
166
45
 
167
46
  if is_up:
168
47
  os.environ["HS_ENDPOINT"] = hsds.endpoint
169
- set_hsds_root()
48
+ os.environ["HS_USERNAME"] = hs_username
49
+ os.environ["HS_PASSWORD"] = hs_password
50
+ # make folders expected by pytest
51
+ # pytest/home/h5netcdf-pytest
52
+ # Folder("/pytest/", mode='w')
53
+ Folder("/home/", mode="w")
54
+ Folder("/home/h5netcdf-pytest/", mode="w")
170
55
  except Exception:
171
56
  is_up = False
172
57
 
173
58
  yield is_up
174
-
59
+ hsds.check_processes() # this will capture hsds log output
175
60
  hsds.stop()
176
- rmtree(tmp_dir, ignore_errors=True)
177
- rmtree(socket_dir, ignore_errors=True)
61
+
178
62
  rmtree(root_dir, ignore_errors=True)
179
63
 
180
64
  else:
@@ -107,7 +107,9 @@ _vlen_string = "foo"
107
107
 
108
108
 
109
109
  def is_h5py_char_working(tmp_netcdf, name):
110
- if not isinstance(tmp_netcdf, h5py.File):
110
+ if not isinstance(tmp_netcdf, h5py.File) and (
111
+ without_h5pyd or not isinstance(tmp_netcdf, h5pyd.File)
112
+ ):
111
113
  h5 = get_hdf5_module(tmp_netcdf)
112
114
  # https://github.com/Unidata/netcdf-c/issues/298
113
115
  with h5.File(tmp_netcdf, "r") as ds:
@@ -184,14 +186,14 @@ def write_legacy_netcdf(tmp_netcdf, write_module):
184
186
  ds.close()
185
187
 
186
188
 
187
- def write_h5netcdf(tmp_netcdf):
189
+ def write_h5netcdf(tmp_netcdf, compression="gzip"):
188
190
  ds = h5netcdf.File(tmp_netcdf, "w")
189
191
  ds.attrs["global"] = 42
190
192
  ds.attrs["other_attr"] = "yes"
191
193
  ds.dimensions = {"x": 4, "y": 5, "z": 6, "empty": 0, "unlimited": None}
192
194
 
193
195
  v = ds.create_variable(
194
- "foo", ("x", "y"), float, chunks=(4, 5), compression="gzip", shuffle=True
196
+ "foo", ("x", "y"), float, chunks=(4, 5), compression=compression, shuffle=True
195
197
  )
196
198
  v[...] = 1
197
199
  v.attrs["units"] = "meters"
@@ -515,6 +517,11 @@ def test_roundtrip_h5netcdf(tmp_local_or_remote_netcdf, decode_vlen_strings):
515
517
  read_h5netcdf(tmp_local_or_remote_netcdf, h5netcdf, decode_vlen_strings)
516
518
 
517
519
 
520
+ def test_write_compression_as_zlib(tmp_local_netcdf):
521
+ write_h5netcdf(tmp_local_netcdf, compression="zlib")
522
+ read_legacy_netcdf(tmp_local_netcdf, netCDF4, h5netcdf)
523
+
524
+
518
525
  def test_write_netCDF4_read_h5netcdf(tmp_local_netcdf, decode_vlen_strings):
519
526
  write_legacy_netcdf(tmp_local_netcdf, netCDF4)
520
527
  read_h5netcdf(tmp_local_netcdf, netCDF4, decode_vlen_strings)
@@ -2242,7 +2249,9 @@ def test_user_type_errors_new_api(tmp_local_or_remote_netcdf):
2242
2249
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
2243
2250
  testcontext = pytest.raises(RuntimeError, match="Conflict")
2244
2251
  else:
2245
- testcontext = pytest.raises(KeyError, match="name already exists")
2252
+ testcontext = pytest.raises(
2253
+ (KeyError, TypeError), match="name already exists"
2254
+ )
2246
2255
  with testcontext:
2247
2256
  ds.create_enumtype(np.uint8, "enum_t", enum_dict2)
2248
2257
 
@@ -2290,7 +2299,9 @@ def test_user_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2290
2299
  if tmp_local_or_remote_netcdf.startswith(remote_h5):
2291
2300
  testcontext = pytest.raises(RuntimeError, match="Conflict")
2292
2301
  else:
2293
- testcontext = pytest.raises(KeyError, match="name already exists")
2302
+ testcontext = pytest.raises(
2303
+ (KeyError, TypeError), match="name already exists"
2304
+ )
2294
2305
  with testcontext:
2295
2306
  ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2296
2307
 
@@ -2645,6 +2656,8 @@ def test_compoundtype_creation(tmp_local_or_remote_netcdf, netcdf_write_module):
2645
2656
  reason="does not work before netCDF4 v1.7.0",
2646
2657
  )
2647
2658
  def test_nc_complex_compatibility(tmp_local_or_remote_netcdf, netcdf_write_module):
2659
+ if tmp_local_or_remote_netcdf.startswith(remote_h5):
2660
+ pytest.skip("not yet implemented in h5pyd/hsds")
2648
2661
  # native complex
2649
2662
  complex_array = np.array([0 + 0j, 1 + 0j, 0 + 1j, 1 + 1j, 0.25 + 0.75j])
2650
2663
  # compound complex
@@ -2704,6 +2717,8 @@ def test_complex_type_creation_errors(tmp_local_netcdf):
2704
2717
  with pytest.raises(TypeError, match="data type 'c4' not understood"):
2705
2718
  ds.createVariable("data", "c4", ("x",))
2706
2719
 
2720
+ if "complex256" not in np.sctypeDict:
2721
+ pytest.skip("numpy 'complex256' dtype not available")
2707
2722
  with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
2708
2723
  ds.createDimension("x", size=len(complex_array))
2709
2724
  with pytest.raises(
@@ -2720,9 +2735,7 @@ def test_hsds(hsds_up):
2720
2735
  elif not hsds_up:
2721
2736
  pytest.skip("HSDS service not running")
2722
2737
  rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
2723
- fname = (
2724
- "hdf5://" + "home" + "/" + env["HS_USERNAME"] + "/" + "testfile" + rnd + ".nc"
2725
- )
2738
+ fname = f"hdf5://testfile{rnd}.nc"
2726
2739
  with h5netcdf.File(fname, "w") as ds:
2727
2740
  g = ds.create_group("test")
2728
2741
  g.dimensions["x"] = None
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: h5netcdf
3
- Version: 1.4.0
3
+ Version: 1.5.0
4
4
  Summary: netCDF4 via h5py
5
5
  Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
6
6
  Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
@@ -0,0 +1,16 @@
1
+ h5netcdf/__init__.py,sha256=Y0EBCcmlJctwl1kCmj7yLijTVy9AioBTr2091vInAtw,456
2
+ h5netcdf/_version.py,sha256=OYzqgMEgfFG0au4hzbEdgYI-c7Hxo3wdBtrpEjK1RoY,411
3
+ h5netcdf/attrs.py,sha256=4IvV4ULLWkz4igFsvu9S2LB745wgUKrIdIuSeO5kpX8,3581
4
+ h5netcdf/core.py,sha256=eRBedSrVW1XTi2VaQmnbkb5mmNStjXLKtMfKS2aXZ9M,62784
5
+ h5netcdf/dimensions.py,sha256=2g0p9DOAC0hhQ94spIAjWeKC1qyhzzO0s15xCFYSscM,7803
6
+ h5netcdf/legacyapi.py,sha256=MIZlht5Ad4hDFF1Slz2vXmKkgbv7Fhhf2YwNIe16Lfk,7682
7
+ h5netcdf/utils.py,sha256=6E-HAIE0ONMyL4SxI3oUyQvrDgDWifR5EPde91V9rT0,674
8
+ h5netcdf/tests/conftest.py,sha256=l6bOQyqe4gcdKHSNNijeWlFYTpEZse4QEUWbUntAIf4,1825
9
+ h5netcdf/tests/pytest.ini,sha256=ruJxrLdCIA4bCPVuPQjxsLSlvVxuIsIakK6iQOmz-ak,107
10
+ h5netcdf/tests/test_h5netcdf.py,sha256=0YEueWNipnriBEPCr1-WHc6yas2OhAxyKRYJPpdktfQ,107166
11
+ h5netcdf-1.5.0.dist-info/AUTHORS.txt,sha256=LTKzUh9o4Wc_oT3aFC48cyDCCP6tdm6VEV_6RrNy4uo,272
12
+ h5netcdf-1.5.0.dist-info/LICENSE,sha256=Xer1Jg8iL_n9Da0xt0S99blk6tsg9tee_JdgH1rWTjs,1505
13
+ h5netcdf-1.5.0.dist-info/METADATA,sha256=4nRbqYGrDkO_v8Pjp7Hn3qkSrfG3zXbtce_ZbjzY2lc,13366
14
+ h5netcdf-1.5.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
15
+ h5netcdf-1.5.0.dist-info/top_level.txt,sha256=Fb_KIpOE6MBqjSvxV1Ay7oYce1mdmQ1pO9JQJPDeGqg,9
16
+ h5netcdf-1.5.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,16 +0,0 @@
1
- h5netcdf/__init__.py,sha256=Y0EBCcmlJctwl1kCmj7yLijTVy9AioBTr2091vInAtw,456
2
- h5netcdf/_version.py,sha256=R8-T9fmURjcuoxYpHTAjyNAhgJPDtI2jogCjqYYkfCU,411
3
- h5netcdf/attrs.py,sha256=4IvV4ULLWkz4igFsvu9S2LB745wgUKrIdIuSeO5kpX8,3581
4
- h5netcdf/core.py,sha256=5rjGhCTIHZKJ_yMHyvZHIPndPr1Em9CVzyiV5F9H3E8,62511
5
- h5netcdf/dimensions.py,sha256=2g0p9DOAC0hhQ94spIAjWeKC1qyhzzO0s15xCFYSscM,7803
6
- h5netcdf/legacyapi.py,sha256=MIZlht5Ad4hDFF1Slz2vXmKkgbv7Fhhf2YwNIe16Lfk,7682
7
- h5netcdf/utils.py,sha256=6E-HAIE0ONMyL4SxI3oUyQvrDgDWifR5EPde91V9rT0,674
8
- h5netcdf/tests/conftest.py,sha256=cI0BXKM_LRdsQ8vAl3vJ0r1ShGpNUfj2xOH6KmgfZHw,5034
9
- h5netcdf/tests/pytest.ini,sha256=ruJxrLdCIA4bCPVuPQjxsLSlvVxuIsIakK6iQOmz-ak,107
10
- h5netcdf/tests/test_h5netcdf.py,sha256=ufQ4NSwrJlu2hQkVLzrkyntqWXpxSxRmBYoSiR_PMB4,106640
11
- h5netcdf-1.4.0.dist-info/AUTHORS.txt,sha256=LTKzUh9o4Wc_oT3aFC48cyDCCP6tdm6VEV_6RrNy4uo,272
12
- h5netcdf-1.4.0.dist-info/LICENSE,sha256=Xer1Jg8iL_n9Da0xt0S99blk6tsg9tee_JdgH1rWTjs,1505
13
- h5netcdf-1.4.0.dist-info/METADATA,sha256=PMKsb8Ehgh8_d-MMmVvBNP8tN6ugP54gagEAqjYTUos,13366
14
- h5netcdf-1.4.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
15
- h5netcdf-1.4.0.dist-info/top_level.txt,sha256=Fb_KIpOE6MBqjSvxV1Ay7oYce1mdmQ1pO9JQJPDeGqg,9
16
- h5netcdf-1.4.0.dist-info/RECORD,,