h5netcdf 1.6.3__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of h5netcdf might be problematic. Click here for more details.

h5netcdf/utils.py CHANGED
@@ -1,5 +1,12 @@
1
1
  from collections.abc import Mapping
2
2
 
3
+ import h5py
4
+ import numpy as np
5
+
6
+
7
+ class CompatibilityError(Exception):
8
+ """Raised when using features that are not part of the NetCDF4 API."""
9
+
3
10
 
4
11
  class Frozen(Mapping):
5
12
  """Wrapper around an object implementing the mapping interface to make it
@@ -24,3 +31,201 @@ class Frozen(Mapping):
24
31
 
25
32
  def __repr__(self):
26
33
  return f"{type(self).__name__}({self._mapping!r})"
34
+
35
+
36
+ def _create_classic_string_dataset(gid, name, value, shape, chunks):
37
+ """Write a string dataset to an HDF5 object with control over the strpad.
38
+
39
+ Parameters
40
+ ----------
41
+ gid : h5py.h5g.GroupID
42
+ Group ID where to write the dataset.
43
+ name : str
44
+ Dataset name.
45
+ value : str
46
+ Dataset contents to be written.
47
+ shape : tuple
48
+ Dataset shape.
49
+ chunks : tuple or None
50
+ Chunk shape.
51
+ """
52
+ tid = h5py.h5t.C_S1.copy()
53
+ tid.set_size(1)
54
+ tid.set_strpad(h5py.h5t.STR_NULLTERM)
55
+ kwargs = {}
56
+ if len(shape) == 0:
57
+ sid = h5py.h5s.create(h5py.h5s.SCALAR)
58
+ else:
59
+ if chunks is not None:
60
+ # for resizing, we need to provide maxshape
61
+ # with unlimited as the first dimension
62
+ maxshape = (h5py.h5s.UNLIMITED,) + shape[1:]
63
+ # and we also need to create a chunked dataset
64
+ dcpl = h5py.h5p.create(h5py.h5p.DATASET_CREATE)
65
+ # try automatic chunking
66
+ dcpl.set_chunk(chunks)
67
+ kwargs["dcpl"] = dcpl
68
+ else:
69
+ maxshape = shape
70
+ sid = h5py.h5s.create_simple(shape, maxshape)
71
+ did = h5py.h5d.create(gid, name.encode(), tid, sid, **kwargs)
72
+ if value is not None:
73
+ value = np.array(np.bytes_(value))
74
+ did.write(h5py.h5s.ALL, h5py.h5s.ALL, value, mtype=did.get_type())
75
+
76
+
77
+ def _create_enum_dataset(group, name, shape, enum_type, fillvalue=None):
78
+ """Create a dataset with a transient enum dtype.
79
+
80
+ Parameters
81
+ ----------
82
+ group : h5netcdf.Group
83
+ name : str
84
+ dataset name
85
+ shape : tuple
86
+ dataset shape
87
+ enum_type : h5netcdf.EnumType
88
+
89
+ Keyword arguments
90
+ -----------------
91
+ fillvalue : optional scalar fill value
92
+ """
93
+ # copy from existing committed type
94
+ enum_tid = enum_type._h5ds.id.copy()
95
+ space = h5py.h5s.create_simple(shape)
96
+
97
+ dcpl = h5py.h5p.create(h5py.h5p.DATASET_CREATE)
98
+ if fillvalue is not None:
99
+ dcpl.set_fill_value(np.array(fillvalue, dtype=enum_type.dtype))
100
+
101
+ h5py.h5d.create(group._h5group.id, name.encode("ascii"), enum_tid, space, dcpl=dcpl)
102
+ enum_tid.close()
103
+
104
+
105
+ def _create_enum_dataset_attribute(ds, name, value, enum_type):
106
+ """Create an enum attribute at the dataset.
107
+
108
+ Parameters
109
+ ----------
110
+ ds : h5netcdf.Variable
111
+ name : str
112
+ dataset name
113
+ value : array of ints
114
+ enum values
115
+ enum_type : h5netcdf.EnumType
116
+ """
117
+ tid = enum_type._h5ds.id.copy()
118
+ space = h5py.h5s.create_simple((1,))
119
+
120
+ aid = h5py.h5a.create(ds._h5ds.id, name.encode("ascii"), tid, space)
121
+ aid.write(value, mtype=aid.get_type())
122
+
123
+
124
+ def _make_enum_tid(enum_dict, basetype):
125
+ """Make enum tid
126
+
127
+ Parameters
128
+ ----------
129
+ enum_dict : dict
130
+ dictionary with Enum field/value pairs
131
+ basetype : np.dtype
132
+ basetype of the enum
133
+ """
134
+ items = sorted(enum_dict.items(), key=lambda kv: kv[1]) # sort by value
135
+ base_tid = h5py.h5t.py_create(np.dtype(basetype))
136
+ tid = h5py.h5t.enum_create(base_tid)
137
+ for name, val in items:
138
+ tid.enum_insert(name.encode("utf-8"), int(val))
139
+ return tid
140
+
141
+
142
+ def _commit_enum_type(group, name, enum_dict, basetype):
143
+ """Commit an enum type to the given group.
144
+
145
+ Parameters
146
+ ----------
147
+ group : h5netcdf.Group
148
+ name : str
149
+ dataset name
150
+ enum_dict : dict
151
+ dictionary with Enum field/value pairs
152
+ basetype : np.dtype
153
+ basetype of the enum
154
+ """
155
+ tid = _make_enum_tid(enum_dict, basetype)
156
+ tid.commit(group._h5group.id, name.encode("ascii"))
157
+ tid.close()
158
+
159
+
160
+ def _create_string_attribute(gid, name, value):
161
+ """Create a string attribute to an HDF5 object with control over the strpad.
162
+
163
+ Parameters
164
+ ----------
165
+ gid : h5py.h5g.GroupID
166
+ Group ID where to write the attribute.
167
+ name : str
168
+ Attribute name.
169
+ value : str
170
+ Attributes contents to be written.
171
+ """
172
+ # handle charset and encoding
173
+ charset = h5py.h5t.CSET_ASCII
174
+ if isinstance(value, str):
175
+ if not value.isascii():
176
+ value = value.encode("utf-8")
177
+ charset = h5py.h5t.CSET_UTF8
178
+ else:
179
+ value = value.encode("ascii")
180
+ charset = h5py.h5t.CSET_ASCII
181
+
182
+ # create string type
183
+ tid = h5py.h5t.C_S1.copy()
184
+ tid.set_size(len(value))
185
+ tid.set_strpad(h5py.h5t.STR_NULLTERM)
186
+ tid.set_cset(charset)
187
+
188
+ sid = h5py.h5s.create(h5py.h5s.SCALAR)
189
+
190
+ if h5py.h5a.exists(gid, name.encode()):
191
+ h5py.h5a.delete(gid, name.encode())
192
+
193
+ aid = h5py.h5a.create(gid, name.encode(), tid, sid)
194
+ value = np.array(value)
195
+ aid.write(value, mtype=aid.get_type())
196
+
197
+
198
+ def h5dump(fn: str, dataset=None, strict=False):
199
+ """Call h5dump on an h5netcdf file."""
200
+ import re
201
+ import subprocess
202
+
203
+ arglist = ["h5dump", "-A"]
204
+ if dataset is not None:
205
+ arglist.append(f"-d {dataset}")
206
+ arglist.append(fn)
207
+
208
+ out = subprocess.run(arglist, check=False, capture_output=True).stdout.decode()
209
+
210
+ # Strip non-deterministic components
211
+ out = re.sub(r"DATASET [0-9]+ ", "DATASET XXXX ", out)
212
+
213
+ # Strip the _NCProperties header, which includes software versions which won't match.
214
+ pattern = (
215
+ r'ATTRIBUTE "_NCProperties"' # match the attribute start
216
+ r"\s*{" # opening brace
217
+ r"(?:[^{}]*{[^{}]*}[^{}]*)*" # match multiple inner braces
218
+ r"}" # closing brace
219
+ )
220
+ out = re.sub(
221
+ pattern,
222
+ 'ATTRIBUTE "_NCProperties" { ... }',
223
+ out,
224
+ flags=re.DOTALL,
225
+ )
226
+
227
+ if not strict:
228
+ out = re.sub(r"STRPAD H5T_STR_NULL(?:TERM|PAD);", "STRPAD { ... };", out)
229
+ out = re.sub(r"CSET H5T_CSET_(?:UTF8|ASCII);", "CSET { ... };", out)
230
+
231
+ return out
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: h5netcdf
3
- Version: 1.6.3
3
+ Version: 1.7.0
4
4
  Summary: netCDF4 via h5py
5
5
  Author-email: Stephan Hoyer <shoyer@gmail.com>, Kai Mühlbauer <kmuehlbauer@wradlib.org>
6
6
  Maintainer-email: h5netcdf developers <devteam@h5netcdf.org>
@@ -124,10 +124,10 @@ Usage
124
124
  -----
125
125
 
126
126
  h5netcdf has two APIs, a new API and a legacy API. Both interfaces currently
127
- reproduce most of the features of the netCDF interface, with the notable
128
- exception of support for operations that rename or delete existing objects.
129
- We simply haven't gotten around to implementing this yet. Patches
130
- would be very welcome.
127
+ reproduce most of the features of the netCDF interface, including the ability
128
+ to write NETCDF4 and NETCDF4_CLASSIC formatted files. Support for operations
129
+ that rename or delete existing objects is still missing, and patches would be
130
+ very welcome.
131
131
 
132
132
  New API
133
133
  ~~~~~~~
@@ -0,0 +1,16 @@
1
+ h5netcdf/__init__.py,sha256=Y0EBCcmlJctwl1kCmj7yLijTVy9AioBTr2091vInAtw,456
2
+ h5netcdf/_version.py,sha256=oGRWiKvEGHesjf5wCNHGVlYfAA3dInDJeL5EiMaru6A,704
3
+ h5netcdf/attrs.py,sha256=yPwGhaEhr7TtihH--Tx24j7gKgRDQIXs-mtx6ngIZRo,4494
4
+ h5netcdf/core.py,sha256=Fg_68fF9vcyozlk-2vO1Qa0U_oRbvWfE976ax--CvxM,69698
5
+ h5netcdf/dimensions.py,sha256=pxtt3ID55bTMp6djIpAka3RgXzioIUs9qBBgMqd9HhU,8336
6
+ h5netcdf/legacyapi.py,sha256=MIZlht5Ad4hDFF1Slz2vXmKkgbv7Fhhf2YwNIe16Lfk,7682
7
+ h5netcdf/utils.py,sha256=btxKI-VZP-Wn0Rk_wmnhnUls-mrxO42w0s_uX_su4FI,6528
8
+ h5netcdf/tests/conftest.py,sha256=qS7XTZxos0NIRFtMCJwVEyx0paZw8Le1loPK1MtoQ_0,2350
9
+ h5netcdf/tests/pytest.ini,sha256=ruJxrLdCIA4bCPVuPQjxsLSlvVxuIsIakK6iQOmz-ak,107
10
+ h5netcdf/tests/test_h5netcdf.py,sha256=s5m3N3huMsxmIAN_W78-YnawB-lKWQPmQgSutO6bwzA,116652
11
+ h5netcdf-1.7.0.dist-info/licenses/AUTHORS.txt,sha256=LTKzUh9o4Wc_oT3aFC48cyDCCP6tdm6VEV_6RrNy4uo,272
12
+ h5netcdf-1.7.0.dist-info/licenses/LICENSE,sha256=Xer1Jg8iL_n9Da0xt0S99blk6tsg9tee_JdgH1rWTjs,1505
13
+ h5netcdf-1.7.0.dist-info/METADATA,sha256=lNPv7WhB1VSjMJYsmC0qJX_wRvRP_deMsIXidu0Ch3A,13396
14
+ h5netcdf-1.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
+ h5netcdf-1.7.0.dist-info/top_level.txt,sha256=Fb_KIpOE6MBqjSvxV1Ay7oYce1mdmQ1pO9JQJPDeGqg,9
16
+ h5netcdf-1.7.0.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- h5netcdf/__init__.py,sha256=Y0EBCcmlJctwl1kCmj7yLijTVy9AioBTr2091vInAtw,456
2
- h5netcdf/_version.py,sha256=RrM0JkMSEquWJIGj8RCqoST__VpnXFzYf3k-uS1FCko,511
3
- h5netcdf/attrs.py,sha256=4IvV4ULLWkz4igFsvu9S2LB745wgUKrIdIuSeO5kpX8,3581
4
- h5netcdf/core.py,sha256=xO62cSVvXXd-EQ7xuLjZC_gc99ou0naNmyorW7bKZxs,64150
5
- h5netcdf/dimensions.py,sha256=ln6n3J-BE2PlJDPlr9YiqPYHhXRUHVH3j_7_1O0VGS0,7802
6
- h5netcdf/legacyapi.py,sha256=MIZlht5Ad4hDFF1Slz2vXmKkgbv7Fhhf2YwNIe16Lfk,7682
7
- h5netcdf/utils.py,sha256=6E-HAIE0ONMyL4SxI3oUyQvrDgDWifR5EPde91V9rT0,674
8
- h5netcdf/tests/conftest.py,sha256=4fLa2qoB8br2UpokaOn1-mjHgqVUgVV0G3QLIUzfbZo,2133
9
- h5netcdf/tests/pytest.ini,sha256=ruJxrLdCIA4bCPVuPQjxsLSlvVxuIsIakK6iQOmz-ak,107
10
- h5netcdf/tests/test_h5netcdf.py,sha256=4VslXYKJNx6vOhhyc4_-mCc7M4W_l8fftZPoMk7x-50,110275
11
- h5netcdf-1.6.3.dist-info/licenses/AUTHORS.txt,sha256=LTKzUh9o4Wc_oT3aFC48cyDCCP6tdm6VEV_6RrNy4uo,272
12
- h5netcdf-1.6.3.dist-info/licenses/LICENSE,sha256=Xer1Jg8iL_n9Da0xt0S99blk6tsg9tee_JdgH1rWTjs,1505
13
- h5netcdf-1.6.3.dist-info/METADATA,sha256=SctpwYrbXsezPM2YQwNsV2dkXn3mEB5PSR7zwdvWRtI,13387
14
- h5netcdf-1.6.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
- h5netcdf-1.6.3.dist-info/top_level.txt,sha256=Fb_KIpOE6MBqjSvxV1Ay7oYce1mdmQ1pO9JQJPDeGqg,9
16
- h5netcdf-1.6.3.dist-info/RECORD,,