h5netcdf 1.3.0__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of h5netcdf might be problematic. Click here for more details.

@@ -15,7 +15,11 @@ from pytest import raises
15
15
 
16
16
  import h5netcdf
17
17
  from h5netcdf import legacyapi
18
- from h5netcdf.core import NOT_A_VARIABLE, CompatibilityError
18
+ from h5netcdf.core import (
19
+ NOT_A_VARIABLE,
20
+ CompatibilityError,
21
+ VLType,
22
+ )
19
23
 
20
24
  try:
21
25
  import h5pyd
@@ -103,18 +107,21 @@ _vlen_string = "foo"
103
107
 
104
108
 
105
109
  def is_h5py_char_working(tmp_netcdf, name):
106
- h5 = get_hdf5_module(tmp_netcdf)
107
- # https://github.com/Unidata/netcdf-c/issues/298
108
- with h5.File(tmp_netcdf, "r") as ds:
109
- v = ds[name]
110
- try:
111
- assert array_equal(v, _char_array)
112
- return True
113
- except Exception as e:
114
- if re.match("^Can't read data", e.args[0]):
115
- return False
116
- else:
117
- raise
110
+ if not isinstance(tmp_netcdf, h5py.File):
111
+ h5 = get_hdf5_module(tmp_netcdf)
112
+ # https://github.com/Unidata/netcdf-c/issues/298
113
+ with h5.File(tmp_netcdf, "r") as ds:
114
+ return is_h5py_char_working(ds, name)
115
+
116
+ v = tmp_netcdf[name]
117
+ try:
118
+ assert array_equal(v, _char_array)
119
+ return True
120
+ except Exception as e:
121
+ if re.match("^Can't read data", e.args[0]):
122
+ return False
123
+ else:
124
+ raise
118
125
 
119
126
 
120
127
  def write_legacy_netcdf(tmp_netcdf, write_module):
@@ -164,6 +171,16 @@ def write_legacy_netcdf(tmp_netcdf, write_module):
164
171
  v = ds.createVariable("var_len_str", str, ("x"))
165
172
  v[0] = "foo"
166
173
 
174
+ enum_dict = dict(one=1, two=2, three=3, missing=255)
175
+ enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict)
176
+ v = ds.createVariable(
177
+ "enum_var",
178
+ enum_type,
179
+ ("x",),
180
+ fill_value=enum_dict["missing"],
181
+ )
182
+ v[0:3] = [1, 2, 3]
183
+
167
184
  ds.close()
168
185
 
169
186
 
@@ -214,6 +231,13 @@ def write_h5netcdf(tmp_netcdf):
214
231
  v = ds.create_variable("var_len_str", ("x",), dtype=dt)
215
232
  v[0] = _vlen_string
216
233
 
234
+ enum_dict = dict(one=1, two=2, three=3, missing=255)
235
+ enum_type = ds.create_enumtype(np.uint8, "enum_t", enum_dict)
236
+ v = ds.create_variable(
237
+ "enum_var", ("x",), dtype=enum_type, fillvalue=enum_dict["missing"]
238
+ )
239
+ v[0:3] = [1, 2, 3]
240
+
217
241
  ds.close()
218
242
 
219
243
 
@@ -231,6 +255,7 @@ def read_legacy_netcdf(tmp_netcdf, read_module, write_module):
231
255
  )
232
256
  assert set(ds.variables) == set(
233
257
  [
258
+ "enum_var",
234
259
  "foo",
235
260
  "y",
236
261
  "z",
@@ -242,6 +267,8 @@ def read_legacy_netcdf(tmp_netcdf, read_module, write_module):
242
267
  ]
243
268
  )
244
269
 
270
+ assert set(ds.enumtypes) == set(["enum_t"])
271
+
245
272
  assert set(ds.groups) == set(["subgroup"])
246
273
  assert ds.parent is None
247
274
  v = ds.variables["foo"]
@@ -325,6 +352,12 @@ def read_legacy_netcdf(tmp_netcdf, read_module, write_module):
325
352
  assert v.shape == (10,)
326
353
  assert "y" in ds.groups["subgroup"].dimensions
327
354
 
355
+ enum_dict = dict(one=1, two=2, three=3, missing=255)
356
+ enum_type = ds.enumtypes["enum_t"]
357
+ assert enum_type.enum_dict == enum_dict
358
+ v = ds.variables["enum_var"]
359
+ assert array_equal(v, np.ma.masked_equal([1, 2, 3, 255], 255))
360
+
328
361
  ds.close()
329
362
 
330
363
 
@@ -342,6 +375,7 @@ def read_h5netcdf(tmp_netcdf, write_module, decode_vlen_strings):
342
375
  )
343
376
  variables = set(
344
377
  [
378
+ "enum_var",
345
379
  "foo",
346
380
  "z",
347
381
  "intscalar",
@@ -418,7 +452,7 @@ def read_h5netcdf(tmp_netcdf, write_module, decode_vlen_strings):
418
452
  assert list(v.attrs) == []
419
453
 
420
454
  v = ds["var_len_str"]
421
- assert h5py.check_dtype(vlen=v.dtype) == str
455
+ assert h5py.check_dtype(vlen=v.dtype) is str
422
456
  if getattr(ds, "decode_vlen_strings", True):
423
457
  assert v[0] == _vlen_string
424
458
  else:
@@ -440,6 +474,12 @@ def read_h5netcdf(tmp_netcdf, write_module, decode_vlen_strings):
440
474
  assert ds["/subgroup/y_var"].shape == (10,)
441
475
  assert ds["/subgroup"].dimensions["y"].size == 10
442
476
 
477
+ enum_dict = dict(one=1, two=2, three=3, missing=255)
478
+ enum_type = ds.enumtypes["enum_t"]
479
+ assert enum_type.enum_dict == enum_dict
480
+ v = ds.variables["enum_var"]
481
+ assert array_equal(v, np.ma.masked_equal([1, 2, 3, 255], 255))
482
+
443
483
  ds.close()
444
484
 
445
485
 
@@ -494,6 +534,16 @@ def test_fileobj(decode_vlen_strings):
494
534
  read_h5netcdf(fileobj, h5netcdf, decode_vlen_strings)
495
535
 
496
536
 
537
+ def test_h5py_file_obj(tmp_local_netcdf, decode_vlen_strings):
538
+ with h5py.File(tmp_local_netcdf, "w") as h5py_f:
539
+ write_h5netcdf(h5py_f)
540
+ read_h5netcdf(h5py_f, h5netcdf, decode_vlen_strings)
541
+
542
+ # The h5py File object should still be open & usable, although the
543
+ # h5netcdf file object has been closed.
544
+ assert isinstance(h5py_f["foo"], h5py.Dataset)
545
+
546
+
497
547
  def test_repr(tmp_local_or_remote_netcdf):
498
548
  write_h5netcdf(tmp_local_or_remote_netcdf)
499
549
  f = h5netcdf.File(tmp_local_or_remote_netcdf, "a")
@@ -645,8 +695,6 @@ def check_invalid_netcdf4(var, i):
645
695
 
646
696
 
647
697
  def test_invalid_netcdf4(tmp_local_or_remote_netcdf):
648
- if tmp_local_or_remote_netcdf.startswith(remote_h5):
649
- pytest.skip("netCDF4 package does not work with remote HDF5 files")
650
698
  h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
651
699
  with h5.File(tmp_local_or_remote_netcdf, "w") as f:
652
700
  var, var2 = create_invalid_netcdf_data()
@@ -714,8 +762,6 @@ def check_invalid_netcdf4_mixed(var, i):
714
762
 
715
763
 
716
764
  def test_invalid_netcdf4_mixed(tmp_local_or_remote_netcdf):
717
- if tmp_local_or_remote_netcdf.startswith(remote_h5):
718
- pytest.skip("netCDF4 package does not work with remote HDF5 files")
719
765
  h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
720
766
  with h5.File(tmp_local_or_remote_netcdf, "w") as f:
721
767
  var, var2 = create_invalid_netcdf_data()
@@ -790,15 +836,16 @@ def test_hierarchical_access_auto_create(tmp_local_or_remote_netcdf):
790
836
  ds.close()
791
837
 
792
838
 
793
- def test_Netcdf4Dimid(tmp_local_netcdf):
839
+ def test_Netcdf4Dimid(tmp_local_or_remote_netcdf):
794
840
  # regression test for https://github.com/h5netcdf/h5netcdf/issues/53
795
- with h5netcdf.File(tmp_local_netcdf, "w") as f:
841
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as f:
796
842
  f.dimensions["x"] = 1
797
843
  g = f.create_group("foo")
798
844
  g.dimensions["x"] = 2
799
845
  g.dimensions["y"] = 3
800
846
 
801
- with h5py.File(tmp_local_netcdf, "r") as f:
847
+ h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
848
+ with h5.File(tmp_local_or_remote_netcdf, "r") as f:
802
849
  # all dimension IDs should be present exactly once
803
850
  dim_ids = {f[name].attrs["_Netcdf4Dimid"] for name in ["x", "foo/x", "foo/y"]}
804
851
  assert dim_ids == {0, 1, 2}
@@ -859,9 +906,6 @@ def test_failed_read_open_and_clean_delete(tmpdir):
859
906
  def test_create_variable_matching_saved_dimension(tmp_local_or_remote_netcdf):
860
907
  h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
861
908
 
862
- # if h5 is not h5py:
863
- # pytest.xfail("https://github.com/h5netcdf/h5netcdf/issues/48")
864
-
865
909
  with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as f:
866
910
  f.dimensions["x"] = 2
867
911
  f.create_variable("y", data=[1, 2], dimensions=("x",))
@@ -885,11 +929,6 @@ def test_invalid_netcdf_error(tmp_local_or_remote_netcdf):
885
929
  f.create_variable(
886
930
  "lzf_compressed", data=[1], dimensions=("x"), compression="lzf"
887
931
  )
888
- # invalid
889
- with pytest.raises(h5netcdf.CompatibilityError):
890
- f.create_variable("complex", data=1j)
891
- with pytest.raises(h5netcdf.CompatibilityError):
892
- f.attrs["complex_attr"] = 1j
893
932
  with pytest.raises(h5netcdf.CompatibilityError):
894
933
  f.create_variable("scaleoffset", data=[1], dimensions=("x",), scaleoffset=0)
895
934
 
@@ -1192,19 +1231,19 @@ def test_reading_special_datatype_created_with_c_api(tmp_local_netcdf):
1192
1231
  pass
1193
1232
 
1194
1233
 
1195
- def test_nc4_non_coord(tmp_local_netcdf):
1234
+ def test_nc4_non_coord(tmp_local_or_remote_netcdf):
1196
1235
  # Here we generate a few variables and coordinates
1197
1236
  # The default should be to track the order of creation
1198
1237
  # Thus, on reopening the file, the order in which
1199
1238
  # the variables are listed should be maintained
1200
1239
  # y -- refers to the coordinate y
1201
1240
  # _nc4_non_coord_y -- refers to the data y
1202
- with h5netcdf.File(tmp_local_netcdf, "w") as f:
1241
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as f:
1203
1242
  f.dimensions = {"x": None, "y": 2}
1204
1243
  f.create_variable("test", dimensions=("x",), dtype=np.int64)
1205
1244
  f.create_variable("y", dimensions=("x",), dtype=np.int64)
1206
1245
 
1207
- with h5netcdf.File(tmp_local_netcdf, "r") as f:
1246
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as f:
1208
1247
  assert list(f.dimensions) == ["x", "y"]
1209
1248
  assert f.dimensions["x"].size == 0
1210
1249
  assert f.dimensions["x"].isunlimited()
@@ -1213,12 +1252,12 @@ def test_nc4_non_coord(tmp_local_netcdf):
1213
1252
  assert list(f.variables) == ["test", "y"]
1214
1253
  assert list(f._h5group.keys()) == ["x", "y", "test", "_nc4_non_coord_y"]
1215
1254
 
1216
- with h5netcdf.File(tmp_local_netcdf, "w") as f:
1255
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as f:
1217
1256
  f.dimensions = {"x": None, "y": 2}
1218
1257
  f.create_variable("y", dimensions=("x",), dtype=np.int64)
1219
1258
  f.create_variable("test", dimensions=("x",), dtype=np.int64)
1220
1259
 
1221
- with h5netcdf.File(tmp_local_netcdf, "r") as f:
1260
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as f:
1222
1261
  assert list(f.dimensions) == ["x", "y"]
1223
1262
  assert f.dimensions["x"].size == 0
1224
1263
  assert f.dimensions["x"].isunlimited()
@@ -1306,42 +1345,42 @@ def test_create_attach_scales_legacyapi(tmp_local_netcdf):
1306
1345
  create_attach_scales(tmp_local_netcdf, legacyapi)
1307
1346
 
1308
1347
 
1309
- def test_detach_scale(tmp_local_netcdf):
1310
- with h5netcdf.File(tmp_local_netcdf, "w") as ds:
1348
+ def test_detach_scale(tmp_local_or_remote_netcdf):
1349
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
1311
1350
  ds.dimensions["x"] = 2
1312
1351
  ds.dimensions["y"] = 2
1313
1352
 
1314
- with h5netcdf.File(tmp_local_netcdf, "a") as ds:
1353
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "a") as ds:
1315
1354
  ds.create_variable("test", dimensions=("x",), dtype=np.int64)
1316
1355
 
1317
- with h5netcdf.File(tmp_local_netcdf, "r") as ds:
1356
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1318
1357
  refs = ds._h5group["x"].attrs.get("REFERENCE_LIST", False)
1319
1358
  assert len(refs) == 1
1320
1359
  for (ref, dim), name in zip(refs, ["/test"]):
1321
1360
  assert dim == 0
1322
1361
  assert ds._root._h5file[ref].name == name
1323
1362
 
1324
- with h5netcdf.File(tmp_local_netcdf, "a") as ds:
1363
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "a") as ds:
1325
1364
  ds.dimensions["x"]._detach_scale()
1326
1365
 
1327
- with h5netcdf.File(tmp_local_netcdf, "r") as ds:
1366
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1328
1367
  refs = ds._h5group["x"].attrs.get("REFERENCE_LIST", False)
1329
1368
  assert not refs
1330
1369
 
1331
1370
 
1332
- def test_is_scale(tmp_local_netcdf):
1333
- with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
1371
+ def test_is_scale(tmp_local_or_remote_netcdf):
1372
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
1334
1373
  ds.createDimension("x", 10)
1335
- with legacyapi.Dataset(tmp_local_netcdf, "r") as ds:
1374
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
1336
1375
  assert ds.dimensions["x"]._isscale
1337
1376
 
1338
1377
 
1339
- def test_get_dim_scale_refs(tmp_local_netcdf):
1340
- with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
1378
+ def test_get_dim_scale_refs(tmp_local_or_remote_netcdf):
1379
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
1341
1380
  ds.createDimension("x", 10)
1342
1381
  ds.createVariable("test0", "i8", ("x",))
1343
1382
  ds.createVariable("test1", "i8", ("x",))
1344
- with legacyapi.Dataset(tmp_local_netcdf, "r") as ds:
1383
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
1345
1384
  refs = ds.dimensions["x"]._scale_refs
1346
1385
  assert ds._h5file[refs[0][0]] == ds["test0"]._h5ds
1347
1386
  assert ds._h5file[refs[1][0]] == ds["test1"]._h5ds
@@ -1498,14 +1537,14 @@ def test_dimensions(tmp_local_netcdf, read_write_matrix):
1498
1537
  )
1499
1538
 
1500
1539
 
1501
- def test_no_circular_references(tmp_local_netcdf):
1540
+ def test_no_circular_references(tmp_local_or_remote_netcdf):
1502
1541
  # https://github.com/h5py/h5py/issues/2019
1503
- with h5netcdf.File(tmp_local_netcdf, "w") as ds:
1542
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
1504
1543
  ds.dimensions["x"] = 2
1505
1544
  ds.dimensions["y"] = 2
1506
1545
 
1507
1546
  gc.collect()
1508
- with h5netcdf.File(tmp_local_netcdf, "r") as ds:
1547
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1509
1548
  refs = gc.get_referrers(ds)
1510
1549
  for ref in refs:
1511
1550
  print(ref)
@@ -1713,13 +1752,13 @@ def test_group_names(tmp_local_netcdf):
1713
1752
  assert ds[name].name == name
1714
1753
 
1715
1754
 
1716
- def test_legacyapi_endianess(tmp_local_netcdf):
1755
+ def test_legacyapi_endianess(tmp_local_or_remote_netcdf):
1717
1756
  # https://github.com/h5netcdf/h5netcdf/issues/15
1718
1757
  big = legacyapi._check_return_dtype_endianess("big")
1719
1758
  little = legacyapi._check_return_dtype_endianess("little")
1720
1759
  native = legacyapi._check_return_dtype_endianess("native")
1721
1760
 
1722
- with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
1761
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
1723
1762
  ds.createDimension("x", 4)
1724
1763
  # test creating variable using endian keyword argument
1725
1764
  v = ds.createVariable("big", int, ("x"), endian="big")
@@ -1729,25 +1768,27 @@ def test_legacyapi_endianess(tmp_local_netcdf):
1729
1768
  v = ds.createVariable("native", int, ("x"), endian="native")
1730
1769
  v[...] = 65535
1731
1770
 
1732
- with h5py.File(tmp_local_netcdf, "r") as ds:
1771
+ h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
1772
+ with h5.File(tmp_local_or_remote_netcdf, "r") as ds:
1733
1773
  assert ds["big"].dtype.byteorder == big
1734
1774
  assert ds["little"].dtype.byteorder == little
1735
1775
  assert ds["native"].dtype.byteorder == native
1736
1776
 
1737
- with h5netcdf.File(tmp_local_netcdf, "r") as ds:
1777
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
1738
1778
  assert ds["big"].dtype.byteorder == big
1739
1779
  assert ds["little"].dtype.byteorder == little
1740
1780
  assert ds["native"].dtype.byteorder == native
1741
1781
 
1742
- with legacyapi.Dataset(tmp_local_netcdf, "r") as ds:
1782
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
1743
1783
  assert ds["big"].dtype.byteorder == big
1744
1784
  assert ds["little"].dtype.byteorder == little
1745
1785
  assert ds["native"].dtype.byteorder == native
1746
1786
 
1747
- with netCDF4.Dataset(tmp_local_netcdf, "r") as ds:
1748
- assert ds["big"].dtype.byteorder == big
1749
- assert ds["little"].dtype.byteorder == little
1750
- assert ds["native"].dtype.byteorder == native
1787
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
1788
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
1789
+ assert ds["big"].dtype.byteorder == big
1790
+ assert ds["little"].dtype.byteorder == little
1791
+ assert ds["native"].dtype.byteorder == native
1751
1792
 
1752
1793
 
1753
1794
  def test_bool_slicing_length_one_dim(tmp_local_netcdf):
@@ -1781,9 +1822,9 @@ def test_bool_slicing_length_one_dim(tmp_local_netcdf):
1781
1822
  ds["hello"][bool_slice, :]
1782
1823
 
1783
1824
 
1784
- def test_fancy_indexing(tmp_local_netcdf):
1825
+ def test_fancy_indexing(tmp_local_or_remote_netcdf):
1785
1826
  # regression test for https://github.com/pydata/xarray/issues/7154
1786
- with h5netcdf.legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
1827
+ with h5netcdf.legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
1787
1828
  ds.createDimension("x", None)
1788
1829
  ds.createDimension("y", None)
1789
1830
  ds.createVariable("hello", int, ("x", "y"), fill_value=0)
@@ -1791,7 +1832,7 @@ def test_fancy_indexing(tmp_local_netcdf):
1791
1832
  ds.createVariable("hello2", int, ("x", "y"))
1792
1833
  ds["hello2"][:10, :20] = np.arange(10 * 20, dtype="int").reshape((10, 20))
1793
1834
 
1794
- with legacyapi.Dataset(tmp_local_netcdf, "a") as ds:
1835
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "a") as ds:
1795
1836
  np.testing.assert_array_equal(ds["hello"][1, [7, 8, 9]], [17, 18, 19])
1796
1837
  np.testing.assert_array_equal(ds["hello"][1, [9, 10, 11]], [19, 0, 0])
1797
1838
  np.testing.assert_array_equal(ds["hello"][1, slice(9, 12)], [19, 0, 0])
@@ -1877,9 +1918,9 @@ def test_h5netcdf_chunking(tmp_local_netcdf):
1877
1918
  assert chunks_h5netcdf == (5, 5, 5, 10)
1878
1919
 
1879
1920
 
1880
- def test_create_invalid_netcdf_catch_error(tmp_local_netcdf):
1921
+ def test_create_invalid_netcdf_catch_error(tmp_local_or_remote_netcdf):
1881
1922
  # see https://github.com/h5netcdf/h5netcdf/issues/138
1882
- with h5netcdf.File(tmp_local_netcdf, "w") as f:
1923
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as f:
1883
1924
  try:
1884
1925
  f.create_variable("test", ("x", "y"), data=np.ones((10, 10), dtype="bool"))
1885
1926
  except CompatibilityError:
@@ -2186,3 +2227,512 @@ def test_ros3():
2186
2227
  f = h5netcdf.File(fname, "r", driver="ros3")
2187
2228
  assert "Temperature" in list(f)
2188
2229
  f.close()
2230
+
2231
+
2232
+ def test_user_type_errors_new_api(tmp_local_or_remote_netcdf):
2233
+ enum_dict1 = dict(one=1, two=2, three=3, missing=254)
2234
+ enum_dict2 = dict(one=0, two=2, three=3, missing=255)
2235
+ with h5netcdf.File("test.nc", "w") as ds0:
2236
+ enum_type_ext = ds0.create_enumtype(np.uint8, "enum_t", enum_dict1)
2237
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
2238
+ ds.dimensions = {"enum_dim": 4}
2239
+ g = ds.create_group("subgroup")
2240
+ enum_type = ds.create_enumtype(np.uint8, "enum_t", enum_dict1)
2241
+
2242
+ if tmp_local_or_remote_netcdf.startswith(remote_h5):
2243
+ testcontext = pytest.raises(RuntimeError, match="Conflict")
2244
+ else:
2245
+ testcontext = pytest.raises(
2246
+ (KeyError, TypeError), match="name already exists"
2247
+ )
2248
+ with testcontext:
2249
+ ds.create_enumtype(np.uint8, "enum_t", enum_dict2)
2250
+
2251
+ enum_type2 = g.create_enumtype(np.uint8, "enum_t2", enum_dict2)
2252
+ g.create_enumtype(np.uint8, "enum_t", enum_dict2)
2253
+ with pytest.raises(TypeError, match="Please provide h5netcdf user type"):
2254
+ ds.create_variable(
2255
+ "enum_var1",
2256
+ ("enum_dim",),
2257
+ dtype=enum_type._h5ds,
2258
+ fillvalue=enum_dict1["missing"],
2259
+ )
2260
+ with pytest.raises(TypeError, match="is not committed into current file"):
2261
+ ds.create_variable(
2262
+ "enum_var2",
2263
+ ("enum_dim",),
2264
+ dtype=enum_type_ext,
2265
+ fillvalue=enum_dict1["missing"],
2266
+ )
2267
+ with pytest.raises(TypeError, match="is not accessible in current group"):
2268
+ ds.create_variable(
2269
+ "enum_var3",
2270
+ ("enum_dim",),
2271
+ dtype=enum_type2,
2272
+ fillvalue=enum_dict2["missing"],
2273
+ )
2274
+ with pytest.raises(TypeError, match="Another dtype with same name"):
2275
+ g.create_variable(
2276
+ "enum_var4",
2277
+ ("enum_dim",),
2278
+ dtype=enum_type,
2279
+ fillvalue=enum_dict2["missing"],
2280
+ )
2281
+
2282
+
2283
+ def test_user_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2284
+ enum_dict1 = dict(one=1, two=2, three=3, missing=254)
2285
+ enum_dict2 = dict(one=0, two=2, three=3, missing=255)
2286
+ with legacyapi.Dataset("test.nc", "w") as ds0:
2287
+ enum_type_ext = ds0.createEnumType(np.uint8, "enum_t", enum_dict1)
2288
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2289
+ ds.createDimension("enum_dim", 4)
2290
+ g = ds.createGroup("subgroup")
2291
+ enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2292
+ if tmp_local_or_remote_netcdf.startswith(remote_h5):
2293
+ testcontext = pytest.raises(RuntimeError, match="Conflict")
2294
+ else:
2295
+ testcontext = pytest.raises(
2296
+ (KeyError, TypeError), match="name already exists"
2297
+ )
2298
+ with testcontext:
2299
+ ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2300
+
2301
+ enum_type2 = g.createEnumType(np.uint8, "enum_t2", enum_dict2)
2302
+ g.create_enumtype(np.uint8, "enum_t", enum_dict2)
2303
+ with pytest.raises(TypeError, match="Please provide h5netcdf user type"):
2304
+ ds.createVariable(
2305
+ "enum_var1",
2306
+ enum_type._h5ds,
2307
+ ("enum_dim",),
2308
+ fill_value=enum_dict1["missing"],
2309
+ )
2310
+ with pytest.raises(TypeError, match="is not committed into current file"):
2311
+ ds.createVariable(
2312
+ "enum_var2",
2313
+ enum_type_ext,
2314
+ ("enum_dim",),
2315
+ fill_value=enum_dict1["missing"],
2316
+ )
2317
+ with pytest.raises(TypeError, match="is not accessible in current group"):
2318
+ ds.createVariable(
2319
+ "enum_var3",
2320
+ enum_type2,
2321
+ ("enum_dim",),
2322
+ fill_value=enum_dict2["missing"],
2323
+ )
2324
+ with pytest.raises(TypeError, match="Another dtype with same name"):
2325
+ g.createVariable(
2326
+ "enum_var4",
2327
+ enum_type,
2328
+ ("enum_dim",),
2329
+ fill_value=enum_dict2["missing"],
2330
+ )
2331
+
2332
+
2333
+ def test_enum_type_errors_new_api(tmp_local_or_remote_netcdf):
2334
+ enum_dict1 = dict(one=1, two=2, three=3, missing=254)
2335
+ enum_dict2 = dict(one=0, two=2, three=3, missing=255)
2336
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
2337
+ ds.dimensions = {"enum_dim": 4}
2338
+ enum_type = ds.create_enumtype(np.uint8, "enum_t", enum_dict1)
2339
+ enum_type2 = ds.create_enumtype(np.uint8, "enum_t2", enum_dict2)
2340
+
2341
+ # 1.
2342
+ with pytest.warns(UserWarning, match="default fill_value 0 which IS defined"):
2343
+ ds.create_variable(
2344
+ "enum_var1",
2345
+ ("enum_dim",),
2346
+ dtype=enum_type2,
2347
+ )
2348
+ # 2. is for legacyapi only
2349
+ # 3.
2350
+ with pytest.warns(
2351
+ UserWarning, match="default fill_value 0 which IS NOT defined"
2352
+ ):
2353
+ ds.create_variable(
2354
+ "enum_var2",
2355
+ ("enum_dim",),
2356
+ dtype=enum_type,
2357
+ )
2358
+ # 4.
2359
+ with pytest.warns(
2360
+ UserWarning, match="with specified fill_value 0 which IS NOT"
2361
+ ):
2362
+ ds.create_variable(
2363
+ "enum_var3",
2364
+ ("enum_dim",),
2365
+ dtype=enum_type,
2366
+ fillvalue=0,
2367
+ )
2368
+ # 5.
2369
+ with pytest.raises(
2370
+ ValueError, match="with specified fill_value 100 which IS NOT"
2371
+ ):
2372
+ ds.create_variable(
2373
+ "enum_var4",
2374
+ ("enum_dim",),
2375
+ dtype=enum_type,
2376
+ fillvalue=100,
2377
+ )
2378
+
2379
+
2380
+ def test_enum_type_errors_legacyapi(tmp_local_or_remote_netcdf):
2381
+ enum_dict1 = dict(one=1, two=2, three=3, missing=254)
2382
+ enum_dict2 = dict(one=0, two=2, three=3, missing=255)
2383
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2384
+ ds.createDimension("enum_dim", 4)
2385
+ enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict1)
2386
+ enum_type2 = ds.createEnumType(np.uint8, "enum_t2", enum_dict2)
2387
+
2388
+ # 1.
2389
+ with pytest.warns(UserWarning, match="default fill_value 255 which IS defined"):
2390
+ ds.createVariable(
2391
+ "enum_var1",
2392
+ enum_type2,
2393
+ ("enum_dim",),
2394
+ )
2395
+ # 2.
2396
+ with pytest.raises(ValueError, match="default fill_value 255 which IS NOT"):
2397
+ ds.createVariable(
2398
+ "enum_var2",
2399
+ enum_type,
2400
+ ("enum_dim",),
2401
+ )
2402
+ # 3. is only for new api
2403
+ # 4.
2404
+ with pytest.warns(
2405
+ UserWarning, match="interpreted as '_UNDEFINED' by netcdf-c."
2406
+ ):
2407
+ ds.createVariable(
2408
+ "enum_var3",
2409
+ enum_type,
2410
+ ("enum_dim",),
2411
+ fill_value=0,
2412
+ )
2413
+ # 5.
2414
+ with pytest.raises(
2415
+ ValueError, match="with specified fill_value 100 which IS NOT"
2416
+ ):
2417
+ ds.createVariable("enum_var4", enum_type, ("enum_dim",), fill_value=100)
2418
+
2419
+
2420
+ def test_enum_type(tmp_local_or_remote_netcdf):
2421
+ # test EnumType
2422
+ enum_dict = dict(one=1, two=2, three=3, missing=255)
2423
+ enum_dict2 = dict(one=1, two=2, three=3, missing=254)
2424
+
2425
+ # first with new API
2426
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "w") as ds:
2427
+ ds.dimensions = {"enum_dim": 4}
2428
+ ds.create_enumtype(np.uint8, "enum_t2", enum_dict2)
2429
+ enum_type = ds.create_enumtype(np.uint8, "enum_t", enum_dict)
2430
+ v = ds.create_variable(
2431
+ "enum_var", ("enum_dim",), dtype=enum_type, fillvalue=enum_dict["missing"]
2432
+ )
2433
+ v[0:3] = [1, 2, 3]
2434
+ with pytest.raises(ValueError) as e:
2435
+ v[3] = 5
2436
+ assert "assign illegal value(s)" in e.value.args[0]
2437
+
2438
+ # check, if new API can read them
2439
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
2440
+ enum_type = ds.enumtypes["enum_t"]
2441
+ enum_var = ds["enum_var"]
2442
+ assert enum_type.enum_dict == enum_dict
2443
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2444
+ assert enum_var.attrs["_FillValue"] == 255
2445
+ assert enum_var.datatype == enum_type
2446
+ assert enum_var.datatype.name == "enum_t"
2447
+
2448
+ # check if legacyapi can read them
2449
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2450
+ enum_type = ds.enumtypes["enum_t"]
2451
+ enum_var = ds["enum_var"]
2452
+ assert enum_type.enum_dict == enum_dict
2453
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2454
+ assert enum_var.attrs["_FillValue"] == 255
2455
+ assert enum_var.datatype == enum_type
2456
+ assert enum_var.datatype.name == "enum_t"
2457
+
2458
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2459
+ # check if netCDF4-python can read them
2460
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2461
+ enum_type = ds.enumtypes["enum_t"]
2462
+ enum_var = ds["enum_var"]
2463
+ assert enum_type.enum_dict == enum_dict
2464
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2465
+ assert enum_var._FillValue == 255
2466
+ assert repr(enum_var.datatype) == repr(enum_type)
2467
+ assert enum_var.datatype.name == "enum_t"
2468
+
2469
+ # second with legacyapi
2470
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2471
+ ds.createDimension("enum_dim", 4)
2472
+ enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict)
2473
+ v = ds.createVariable(
2474
+ "enum_var", enum_type, ("enum_dim",), fill_value=enum_dict["missing"]
2475
+ )
2476
+ v[0:3] = [1, 2, 3]
2477
+ with pytest.raises(ValueError) as e:
2478
+ v[3] = 5
2479
+ assert "assign illegal value(s)" in e.value.args[0]
2480
+
2481
+ # check, if new API can read them
2482
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
2483
+ enum_type = ds.enumtypes["enum_t"]
2484
+ enum_var = ds["enum_var"]
2485
+ assert enum_type.enum_dict == enum_dict
2486
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2487
+ assert enum_var.attrs["_FillValue"] == 255
2488
+ assert enum_var.datatype == enum_type
2489
+ assert enum_var.datatype.name == "enum_t"
2490
+
2491
+ # check if legacyapi can read them
2492
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2493
+ enum_type = ds.enumtypes["enum_t"]
2494
+ enum_var = ds["enum_var"]
2495
+ assert enum_type.enum_dict == enum_dict
2496
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2497
+ assert enum_var.attrs["_FillValue"] == 255
2498
+ assert enum_var.datatype == enum_type
2499
+ assert enum_var.datatype.name == "enum_t"
2500
+
2501
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2502
+ # check if netCDF4-python can read them
2503
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2504
+ enum_type = ds.enumtypes["enum_t"]
2505
+ enum_var = ds["enum_var"]
2506
+ assert enum_type.enum_dict == enum_dict
2507
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2508
+ assert enum_var._FillValue == 255
2509
+ assert repr(enum_var.datatype) == repr(enum_type)
2510
+ assert enum_var.datatype.name == "enum_t"
2511
+
2512
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2513
+ # third with netCDF4 api
2514
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2515
+ ds.createDimension("enum_dim", 4)
2516
+ enum_type = ds.createEnumType(np.uint8, "enum_t", enum_dict)
2517
+ v = ds.createVariable(
2518
+ "enum_var", enum_type, ("enum_dim",), fill_value=enum_dict["missing"]
2519
+ )
2520
+ v[0:3] = [1, 2, 3]
2521
+ with pytest.raises(
2522
+ ValueError, match="assign illegal value to Enum variable"
2523
+ ):
2524
+ v[3] = 5
2525
+
2526
+ # check, if new API can read them
2527
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
2528
+ enum_type = ds.enumtypes["enum_t"]
2529
+ enum_var = ds["enum_var"]
2530
+ assert enum_type.enum_dict == enum_dict
2531
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2532
+ assert enum_var.attrs["_FillValue"] == 255
2533
+ assert enum_var.datatype == enum_type
2534
+ assert enum_var.datatype.name == "enum_t"
2535
+
2536
+ # check if legacyapi can read them
2537
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2538
+ enum_type = ds.enumtypes["enum_t"]
2539
+ enum_var = ds["enum_var"]
2540
+ assert enum_type.enum_dict == enum_dict
2541
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2542
+ assert enum_var.attrs["_FillValue"] == 255
2543
+ assert enum_var.datatype == enum_type
2544
+ assert enum_var.datatype.name == "enum_t"
2545
+
2546
+ # check if netCDF4-python can read them
2547
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2548
+ enum_type = ds.enumtypes["enum_t"]
2549
+ enum_var = ds["enum_var"]
2550
+ assert enum_type.enum_dict == enum_dict
2551
+ assert array_equal(enum_var, np.ma.masked_equal([1, 2, 3, 255], 255))
2552
+ assert enum_var._FillValue == 255
2553
+ assert repr(enum_var.datatype) == repr(enum_type)
2554
+ assert enum_var.datatype.name == "enum_t"
2555
+
2556
+
2557
+ @pytest.mark.parametrize("dtype", ["int", "int8", "uint16", "float32", "int64"])
2558
+ def test_vltype_creation(tmp_local_or_remote_netcdf, netcdf_write_module, dtype):
2559
+ # skip for netCDF4 writer for remote hsds files
2560
+ if netcdf_write_module == netCDF4 and tmp_local_or_remote_netcdf.startswith(
2561
+ remote_h5
2562
+ ):
2563
+ pytest.skip()
2564
+
2565
+ with netcdf_write_module.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2566
+ ds.createVLType(dtype, "vlen_t")
2567
+
2568
+ with h5netcdf.File(tmp_local_or_remote_netcdf, "r") as ds:
2569
+ vlen_type = ds.vltypes["vlen_t"]
2570
+ assert isinstance(vlen_type, VLType)
2571
+ assert h5py.check_vlen_dtype(vlen_type.dtype) == np.dtype(dtype)
2572
+ assert vlen_type.name == "vlen_t"
2573
+
2574
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2575
+ vlen_type = ds.vltypes["vlen_t"]
2576
+ assert isinstance(vlen_type, legacyapi.VLType)
2577
+ assert h5py.check_vlen_dtype(vlen_type.dtype) == np.dtype(dtype)
2578
+ assert vlen_type.name == "vlen_t"
2579
+
2580
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2581
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2582
+ vlen_type = ds.vltypes["vlen_t"]
2583
+ assert isinstance(vlen_type, netCDF4.VLType)
2584
+ assert vlen_type.dtype == np.dtype(dtype)
2585
+ assert vlen_type.name == "vlen_t"
2586
+
2587
+
2588
+ def test_compoundtype_creation(tmp_local_or_remote_netcdf, netcdf_write_module):
2589
+ # compound type is created with array of chars
2590
+ compound = np.dtype(
2591
+ [
2592
+ ("time", np.int32),
2593
+ ("station_name", "S1", 10),
2594
+ ("temperature", np.float32),
2595
+ ("pressure", np.float32),
2596
+ ]
2597
+ )
2598
+
2599
+ # data is filled with fixed strings
2600
+ compound2 = np.dtype(
2601
+ [
2602
+ ("time", np.int32),
2603
+ ("station_name", "S10"),
2604
+ ("temperature", np.float32),
2605
+ ("pressure", np.float32),
2606
+ ]
2607
+ )
2608
+ cmp_array = np.array(
2609
+ [
2610
+ (0, *["Boulder"], 0.0, 0.0),
2611
+ (1, *["New York"], 2.0, 3.0),
2612
+ (2, *["Denver"], 4.0, 6.0),
2613
+ (3, *["Washington"], 5.0, 7.0),
2614
+ (4, *["Wachtberg"], 6.0, 8.0),
2615
+ ],
2616
+ dtype=compound2,
2617
+ )
2618
+ if (
2619
+ netcdf_write_module.__name__ == "netCDF4"
2620
+ and tmp_local_or_remote_netcdf.startswith(remote_h5)
2621
+ ):
2622
+ pytest.skip("does not work for netCDF4")
2623
+ with netcdf_write_module.Dataset(tmp_local_or_remote_netcdf, "w") as ds:
2624
+ ds.createDimension("x", 5)
2625
+ ds.createGroup("test")
2626
+ compound_t = ds.createCompoundType(compound, "cmp_t")
2627
+ var = ds.createVariable("data", compound_t, ("x",))
2628
+ var[:] = cmp_array
2629
+
2630
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2631
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2632
+ cmptype = ds.cmptypes["cmp_t"]
2633
+ assert isinstance(cmptype, netCDF4.CompoundType)
2634
+ assert cmptype.name == "cmp_t"
2635
+ assert array_equal(ds["data"][:], cmp_array)
2636
+ assert ds["data"].datatype == cmptype.dtype
2637
+
2638
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2639
+ cmptype = ds.cmptypes["cmp_t"]
2640
+ assert isinstance(cmptype, h5netcdf.legacyapi.CompoundType)
2641
+ assert cmptype.name == "cmp_t"
2642
+ assert array_equal(ds["data"][:], cmp_array)
2643
+ assert ds["data"].datatype == cmptype
2644
+ assert ds["data"].dtype == cmptype.dtype
2645
+
2646
+
2647
+ @pytest.mark.skipif(
2648
+ version.parse(netCDF4.__version__) < version.parse("1.7.0"),
2649
+ reason="does not work before netCDF4 v1.7.0",
2650
+ )
2651
+ def test_nc_complex_compatibility(tmp_local_or_remote_netcdf, netcdf_write_module):
2652
+ # native complex
2653
+ complex_array = np.array([0 + 0j, 1 + 0j, 0 + 1j, 1 + 1j, 0.25 + 0.75j])
2654
+ # compound complex
2655
+ complex128 = np.dtype(
2656
+ {
2657
+ "names": ["r", "i"],
2658
+ "formats": ["f8", "f8"],
2659
+ "offsets": [0, 8],
2660
+ "itemsize": 16,
2661
+ "aligned": True,
2662
+ }
2663
+ )
2664
+ cdata = np.array(
2665
+ [(0.0, 0.0), (1.0, 0.0), (0.0, 1.0), (1.0, 1.0), (0.25, 0.75)], dtype=complex128
2666
+ )
2667
+ kwargs = {}
2668
+ if (
2669
+ netcdf_write_module.__name__ == "netCDF4"
2670
+ and tmp_local_or_remote_netcdf.startswith(remote_h5)
2671
+ ):
2672
+ pytest.skip("does not work for netCDF4")
2673
+
2674
+ if netcdf_write_module.__name__ == "netCDF4":
2675
+ kwargs.update(auto_complex=True)
2676
+ with netcdf_write_module.Dataset(tmp_local_or_remote_netcdf, "w", **kwargs) as ds:
2677
+ ds.createDimension("x", size=len(complex_array))
2678
+ var = ds.createVariable("data", "c16", ("x",))
2679
+ var[:] = complex_array
2680
+
2681
+ with legacyapi.Dataset(tmp_local_or_remote_netcdf, "r") as ds:
2682
+ dtype = ds.cmptypes["_PFNC_DOUBLE_COMPLEX_TYPE"]
2683
+ assert isinstance(dtype, h5netcdf.legacyapi.CompoundType)
2684
+ assert dtype.name == "_PFNC_DOUBLE_COMPLEX_TYPE"
2685
+ assert array_equal(ds["data"][:], complex_array)
2686
+
2687
+ if not tmp_local_or_remote_netcdf.startswith(remote_h5):
2688
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r", auto_complex=True) as ds:
2689
+ dtype = ds.cmptypes["_PFNC_DOUBLE_COMPLEX_TYPE"]
2690
+ assert isinstance(dtype, netCDF4._netCDF4.CompoundType)
2691
+ assert array_equal(ds["data"][:], complex_array)
2692
+
2693
+ with netCDF4.Dataset(tmp_local_or_remote_netcdf, "r", auto_complex=False) as ds:
2694
+ dtype = ds.cmptypes["_PFNC_DOUBLE_COMPLEX_TYPE"]
2695
+ assert isinstance(dtype, netCDF4._netCDF4.CompoundType)
2696
+ assert array_equal(ds["data"][:], cdata)
2697
+
2698
+
2699
+ @pytest.mark.skipif(
2700
+ version.parse(netCDF4.__version__) < version.parse("1.7.0"),
2701
+ reason="does not work before netCDF4 v1.7.0",
2702
+ )
2703
+ def test_complex_type_creation_errors(tmp_local_netcdf):
2704
+ complex_array = np.array([0 + 0j, 1 + 0j, 0 + 1j, 1 + 1j, 0.25 + 0.75j])
2705
+
2706
+ with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
2707
+ ds.createDimension("x", size=len(complex_array))
2708
+ with pytest.raises(TypeError, match="data type 'c4' not understood"):
2709
+ ds.createVariable("data", "c4", ("x",))
2710
+
2711
+ if "complex256" not in np.sctypeDict:
2712
+ pytest.skip("numpy 'complex256' dtype not available")
2713
+ with legacyapi.Dataset(tmp_local_netcdf, "w") as ds:
2714
+ ds.createDimension("x", size=len(complex_array))
2715
+ with pytest.raises(
2716
+ TypeError,
2717
+ match="Currently only 'complex64' and 'complex128' dtypes are allowed.",
2718
+ ):
2719
+ ds.createVariable("data", "c32", ("x",))
2720
+
2721
+
2722
+ def test_hsds(hsds_up):
2723
+ # test hsds setup/write
2724
+ if without_h5pyd:
2725
+ pytest.skip("h5pyd package not available")
2726
+ elif not hsds_up:
2727
+ pytest.skip("HSDS service not running")
2728
+ rnd = "".join(random.choice(string.ascii_uppercase) for _ in range(5))
2729
+ fname = (
2730
+ "hdf5://" + "home" + "/" + env["HS_USERNAME"] + "/" + "testfile" + rnd + ".nc"
2731
+ )
2732
+ with h5netcdf.File(fname, "w") as ds:
2733
+ g = ds.create_group("test")
2734
+ g.dimensions["x"] = None
2735
+ g.create_variable("var1", ("x",), dtype="i8")
2736
+
2737
+ with h5netcdf.File(fname, "r") as ds:
2738
+ print(ds["test"]["var1"])