pninexus 3.3.0__cp310-cp310-manylinux_2_35_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pninexus/__init__.py +6 -0
- pninexus/filters/__init__.py +7 -0
- pninexus/filters/libh5LZF.so +0 -0
- pninexus/filters/libh5blosc.so +0 -0
- pninexus/filters/libh5bshuf.so +0 -0
- pninexus/filters/libh5bz2.so +0 -0
- pninexus/filters/libh5jpeg.so +0 -0
- pninexus/filters/libh5lz4.so +0 -0
- pninexus/filters/libh5mafisc.so +0 -0
- pninexus/filters/libh5zfp.so +0 -0
- pninexus/filters/liblzf_filter.so +0 -0
- pninexus/h5cpp/__init__.py +28 -0
- pninexus/h5cpp/_attribute.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_dataspace.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_datatype.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_file.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_filter.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_h5cpp.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_node.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/_property.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus/h5cpp/attribute/__init__.py +65 -0
- pninexus/h5cpp/dataspace/__init__.py +18 -0
- pninexus/h5cpp/datatype/__init__.py +183 -0
- pninexus/h5cpp/file/__init__.py +15 -0
- pninexus/h5cpp/filter/__init__.py +37 -0
- pninexus/h5cpp/node/__init__.py +466 -0
- pninexus/h5cpp/property/__init__.py +71 -0
- pninexus/nexus/__init__.py +192 -0
- pninexus/nexus/_nexus.cpython-310-x86_64-linux-gnu.so +0 -0
- pninexus-3.3.0.dist-info/LICENSE +339 -0
- pninexus-3.3.0.dist-info/METADATA +184 -0
- pninexus-3.3.0.dist-info/RECORD +79 -0
- pninexus-3.3.0.dist-info/WHEEL +5 -0
- pninexus-3.3.0.dist-info/top_level.txt +1 -0
- pninexus.libs/libaec-df4ff8a4.so.0.0.12 +0 -0
- pninexus.libs/libblosc-1a5323ad.so.1.21.1 +0 -0
- pninexus.libs/libboost_python310-80079748.so.1.74.0 +0 -0
- pninexus.libs/libboost_regex-e2b8b679.so.1.74.0 +0 -0
- pninexus.libs/libbrotlicommon-abf86ae9.so.1.0.9 +0 -0
- pninexus.libs/libbrotlidec-db9dbda7.so.1.0.9 +0 -0
- pninexus.libs/libbz2-5e516f77.so.1.0.4 +0 -0
- pninexus.libs/libcom_err-f196091d.so.2.1 +0 -0
- pninexus.libs/libcrypto-ee446395.so.3 +0 -0
- pninexus.libs/libcurl-4a475a33.so.4.7.0 +0 -0
- pninexus.libs/libffi-247da4d5.so.8.1.0 +0 -0
- pninexus.libs/libgmp-4dc20a90.so.10.4.1 +0 -0
- pninexus.libs/libgnutls-029add8e.so.30.31.0 +0 -0
- pninexus.libs/libgssapi_krb5-ef9ec823.so.2.2 +0 -0
- pninexus.libs/libh5cpp-cf5d864b.so.0.7.1 +0 -0
- pninexus.libs/libhdf5_serial-12e4461a.so.103.3.0 +0 -0
- pninexus.libs/libhdf5_serial_hl-701cc4c7.so.100.1.4 +0 -0
- pninexus.libs/libhogweed-47d56894.so.6.4 +0 -0
- pninexus.libs/libicudata-c1404396.so.70.1 +0 -0
- pninexus.libs/libicui18n-24fa78ca.so.70.1 +0 -0
- pninexus.libs/libicuuc-4683f362.so.70.1 +0 -0
- pninexus.libs/libidn2-1420c60a.so.0.3.7 +0 -0
- pninexus.libs/libjpeg-2cfbcf67.so.8.2.2 +0 -0
- pninexus.libs/libk5crypto-6625483f.so.3.1 +0 -0
- pninexus.libs/libkeyutils-ad20d5fb.so.1.9 +0 -0
- pninexus.libs/libkrb5-8b125d47.so.3.3 +0 -0
- pninexus.libs/libkrb5support-9061fd80.so.0.1 +0 -0
- pninexus.libs/liblber-2-07024854.5.so.0.1.13 +0 -0
- pninexus.libs/libldap-2-9e9c4a26.5.so.0.1.13 +0 -0
- pninexus.libs/liblz4-65f47a3d.so.1.9.3 +0 -0
- pninexus.libs/liblzf-2b4a8e1a.so.1.5 +0 -0
- pninexus.libs/libnettle-2d3bda6c.so.8.4 +0 -0
- pninexus.libs/libnghttp2-1cc16764.so.14.20.1 +0 -0
- pninexus.libs/libp11-kit-d2b01eaa.so.0.3.0 +0 -0
- pninexus.libs/libpninexus-82078339.so.3.3.0 +0 -0
- pninexus.libs/libpsl-95ca960e.so.5.3.2 +0 -0
- pninexus.libs/librtmp-2401c4fc.so.1 +0 -0
- pninexus.libs/libsasl2-344870a9.so.2.0.25 +0 -0
- pninexus.libs/libsnappy-0b6e39db.so.1.1.8 +0 -0
- pninexus.libs/libssh-c00fffad.so.4.8.7 +0 -0
- pninexus.libs/libssl-4194be63.so.3 +0 -0
- pninexus.libs/libsz-53f3c9c7.so.2.0.1 +0 -0
- pninexus.libs/libtasn1-5982aae4.so.6.6.2 +0 -0
- pninexus.libs/libunistring-9c28d595.so.2.2.0 +0 -0
- pninexus.libs/libzstd-5df4f4df.so.1.4.8 +0 -0
@@ -0,0 +1,466 @@
|
|
1
|
+
from __future__ import print_function
|
2
|
+
from pninexus.h5cpp._h5cpp import Path
|
3
|
+
from pninexus.h5cpp import property
|
4
|
+
from pninexus.h5cpp import dataspace
|
5
|
+
from pninexus.h5cpp import datatype
|
6
|
+
from pninexus.h5cpp.filter import ExternalFilters
|
7
|
+
import numpy
|
8
|
+
import sys
|
9
|
+
# from collections import OrderedDict
|
10
|
+
|
11
|
+
#
|
12
|
+
# import enumeration wrappers
|
13
|
+
#
|
14
|
+
from pninexus.h5cpp._node import Type
|
15
|
+
from pninexus.h5cpp._node import LinkType
|
16
|
+
|
17
|
+
#
|
18
|
+
# import node classes
|
19
|
+
#
|
20
|
+
from pninexus.h5cpp._node import Node
|
21
|
+
from pninexus.h5cpp._node import GroupView
|
22
|
+
from pninexus.h5cpp._node import NodeView
|
23
|
+
from pninexus.h5cpp._node import LinkView
|
24
|
+
from pninexus.h5cpp._node import Group
|
25
|
+
from pninexus.h5cpp._node import Dataset
|
26
|
+
from pninexus.h5cpp._node import LinkTarget
|
27
|
+
from pninexus.h5cpp._node import Link
|
28
|
+
from pninexus.h5cpp._node import RecursiveNodeIterator
|
29
|
+
|
30
|
+
#
|
31
|
+
# import node related functions
|
32
|
+
#
|
33
|
+
from pninexus.h5cpp._node import is_dataset
|
34
|
+
from pninexus.h5cpp._node import is_group
|
35
|
+
from pninexus.h5cpp._node import get_node_
|
36
|
+
|
37
|
+
from pninexus.h5cpp._node import _copy
|
38
|
+
from pninexus.h5cpp._node import _link
|
39
|
+
from pninexus.h5cpp._node import _move
|
40
|
+
from pninexus.h5cpp._node import _remove
|
41
|
+
|
42
|
+
try:
|
43
|
+
from pninexus.h5cpp._node import VirtualDataset
|
44
|
+
VDSAvailable = True
|
45
|
+
except Exception:
|
46
|
+
VDSAvailable = False
|
47
|
+
|
48
|
+
|
49
|
+
if sys.version_info > (3,):
|
50
|
+
unicode = str
|
51
|
+
|
52
|
+
|
53
|
+
def copy(node, base, path=None, link_creation_list=property.LinkCreationList(),
|
54
|
+
object_copy_list=property.ObjectCopyList()):
|
55
|
+
"""Copy an object within the HDF5 tree
|
56
|
+
|
57
|
+
Copies an existing object `node` to a new location. The new location is
|
58
|
+
determined by `base` and `path` argument. If `path` is not given a
|
59
|
+
copy of the original object will be created with the same name under
|
60
|
+
`base`. If `path` is given, it determines the new path and thus name
|
61
|
+
of the copied object relative to the `base` object.
|
62
|
+
|
63
|
+
The behavior of the copy operation can be controlled by a link and
|
64
|
+
object copy property list which can be passed as optional arguments.
|
65
|
+
|
66
|
+
:param Node node: the object to copy
|
67
|
+
:param Group base: the base group for the new location
|
68
|
+
:param pninexus.h5cpp.Path path: optional HDF5 path determining the final
|
69
|
+
location of the copied object
|
70
|
+
:param LinkCreationList link_creation_list: optional link creation
|
71
|
+
property list
|
72
|
+
:param ObjectCopyList object_copy_list: optional object copy property list
|
73
|
+
:raise RuntimeError: in case of errors
|
74
|
+
"""
|
75
|
+
|
76
|
+
if path is not None:
|
77
|
+
_copy(node, base, path, object_copy_list, link_creation_list)
|
78
|
+
else:
|
79
|
+
_copy(node, base, object_copy_list, link_creation_list)
|
80
|
+
|
81
|
+
|
82
|
+
def move(node, base, path=None, link_creation_list=property.LinkCreationList(),
|
83
|
+
link_access_list=property.LinkAccessList()):
|
84
|
+
"""Moving a node within the HDF5 tree
|
85
|
+
|
86
|
+
Move an instance of :py:class:`Node` (dataset or group) to a new location.
|
87
|
+
The new location is determined by the `base` and `path` argument.
|
88
|
+
If `path` is not given `node` is moved below the `base` using the same
|
89
|
+
link name as it has under its original group.
|
90
|
+
If `path` is given the new location is determined by this path relative
|
91
|
+
to the `base` group.
|
92
|
+
|
93
|
+
Technically this function does not move any data but, like on a filesystem,
|
94
|
+
only links are altered.
|
95
|
+
|
96
|
+
:param Node node: the node (dataset or group) to move
|
97
|
+
:param Group base: the base group where to move the node
|
98
|
+
:param Path path: optional HDF5 path determining the new location of the
|
99
|
+
node to be moved relative to the `base` group
|
100
|
+
:param LinkCreationList link_creation_list: optional link creation
|
101
|
+
property list
|
102
|
+
:param LinkAccessList link_access_list: optional link access property list
|
103
|
+
"""
|
104
|
+
|
105
|
+
if path is not None:
|
106
|
+
_move(node, base, path, link_creation_list, link_access_list)
|
107
|
+
else:
|
108
|
+
_move(node, base, link_creation_list, link_access_list)
|
109
|
+
|
110
|
+
|
111
|
+
def remove(node=None, base=None, path=None,
|
112
|
+
link_access_list=property.LinkAccessList()):
|
113
|
+
"""Remove a node from the HDF5 node tree
|
114
|
+
|
115
|
+
This function can be used in two modes:
|
116
|
+
|
117
|
+
* either the node to remove is referenced directly by `node`
|
118
|
+
* or by `base` and `path`.
|
119
|
+
|
120
|
+
.. attention::
|
121
|
+
|
122
|
+
It is important that this function does not remove any data from the
|
123
|
+
file. It only removes the link to the given node. An object is
|
124
|
+
considered deleted if no link leading to this node exists. However,
|
125
|
+
the file size will not change. In order to remove all the data
|
126
|
+
associated with this node `h5repack` has to be used which will
|
127
|
+
simply not copy nodes without links to the new file.
|
128
|
+
|
129
|
+
|
130
|
+
:param Node node: the node to remove
|
131
|
+
:param Group base: base group from which to search
|
132
|
+
:param Path path: HDF5 path to the object to remove
|
133
|
+
:param LinkAccessList link_access_list: optional link access property list
|
134
|
+
:raises TypeError: if any of the arguments is not of appropriate type
|
135
|
+
:raises RuntimeError: in case of any other error
|
136
|
+
"""
|
137
|
+
|
138
|
+
if not isinstance(link_access_list, property.LinkAccessList):
|
139
|
+
raise TypeError(
|
140
|
+
"The 'link_access_list' must be an instance of a link access "
|
141
|
+
"property list!")
|
142
|
+
|
143
|
+
if node is not None:
|
144
|
+
if not isinstance(node, Node):
|
145
|
+
raise TypeError(
|
146
|
+
"The 'node' argument must be an instance of `Node`!")
|
147
|
+
|
148
|
+
_remove(node, link_access_list)
|
149
|
+
|
150
|
+
elif base is not None and base is not None:
|
151
|
+
|
152
|
+
if not isinstance(base, Group):
|
153
|
+
raise TypeError("The 'base' argument must be a Group instance!")
|
154
|
+
|
155
|
+
if not isinstance(path, Path):
|
156
|
+
raise TypeError(
|
157
|
+
"The 'path' argument must be an instance of an HDF5 path!")
|
158
|
+
|
159
|
+
_remove(base, path, link_access_list)
|
160
|
+
|
161
|
+
else:
|
162
|
+
raise RuntimeError(
|
163
|
+
"You have to provide either `node` argument or the `base` "
|
164
|
+
"and `path` argument!")
|
165
|
+
|
166
|
+
|
167
|
+
def link(target,
|
168
|
+
link_base,
|
169
|
+
link_path,
|
170
|
+
target_file=None,
|
171
|
+
link_creation_list=property.LinkCreationList(),
|
172
|
+
link_access_list=property.LinkAccessList()):
|
173
|
+
"""Create a new link
|
174
|
+
|
175
|
+
Create a new soft link to a node referenced by `target` under `link_path`
|
176
|
+
relative to `link_base`. If `target_file` is set an external link is
|
177
|
+
provided an external link is created.
|
178
|
+
|
179
|
+
.. attention::
|
180
|
+
|
181
|
+
The target object does not need to exist at the time the link is
|
182
|
+
created. Only when the new link should be dereferenced the target
|
183
|
+
object has to exist.
|
184
|
+
|
185
|
+
:param Node/Path target: the target for the new link
|
186
|
+
:param Group link_base: the base for the new link
|
187
|
+
:param Path link_path: the path to the new link relative to the `link_base`
|
188
|
+
:param LinkCreationList link_creation_list: optional reference to
|
189
|
+
a link creation property list
|
190
|
+
:param LinkAccessList link_access_list: optional reference to a link
|
191
|
+
access property list
|
192
|
+
:raises TypeError: if any of the arguments does not match the required type
|
193
|
+
:raises RuntimError: in the case of any other error
|
194
|
+
"""
|
195
|
+
|
196
|
+
if not isinstance(link_creation_list, property.LinkCreationList):
|
197
|
+
raise TypeError(
|
198
|
+
"`link_creation_list` must be an instance of a link creation "
|
199
|
+
"property list!")
|
200
|
+
|
201
|
+
if not isinstance(link_access_list, property.LinkAccessList):
|
202
|
+
raise TypeError(
|
203
|
+
"`link_access_list` must be an instance of a link access "
|
204
|
+
"property list!")
|
205
|
+
|
206
|
+
if not isinstance(link_base, Group):
|
207
|
+
raise TypeError("`link_base` must be an instance of `Gruop`!")
|
208
|
+
|
209
|
+
if not isinstance(link_path, Path):
|
210
|
+
raise TypeError("`link_path` must be an instance of an HDF5 path!")
|
211
|
+
|
212
|
+
if target_file is not None:
|
213
|
+
|
214
|
+
_link(target_file, target, link_base, link_path, link_creation_list,
|
215
|
+
link_access_list)
|
216
|
+
else:
|
217
|
+
|
218
|
+
_link(target, link_base, link_path, link_creation_list,
|
219
|
+
link_access_list)
|
220
|
+
|
221
|
+
|
222
|
+
def selection_to_shape(selection):
|
223
|
+
"""Convert a selection to a numpy array shape
|
224
|
+
|
225
|
+
This utilty function converts an HDF5 selection to a tuple which can
|
226
|
+
be used as a numpy array shape. This function performs some kind of index
|
227
|
+
reduction: the resulting shape is the minimum shape required to store the
|
228
|
+
data referenced by the selection. This means that all unnecessary
|
229
|
+
dimensions with only a single element are removed.
|
230
|
+
|
231
|
+
For instance
|
232
|
+
[1,1,1,1] -> [1]
|
233
|
+
[1,2,3,1] -> [2,3]
|
234
|
+
|
235
|
+
"""
|
236
|
+
|
237
|
+
if isinstance(selection, dataspace.Hyperslab):
|
238
|
+
shape = []
|
239
|
+
size = 1
|
240
|
+
for blocks, counts in zip(selection.block(), selection.count()):
|
241
|
+
size *= blocks * counts
|
242
|
+
shape.append(blocks * counts)
|
243
|
+
|
244
|
+
if size == 1:
|
245
|
+
#
|
246
|
+
# it the total number of elements in the selection is 1
|
247
|
+
# the shape is always (1,)
|
248
|
+
# no matter how many dimension are in the selection.
|
249
|
+
#
|
250
|
+
return (1,)
|
251
|
+
elif len(shape) > 1:
|
252
|
+
shape = [s for s in shape if s != 1]
|
253
|
+
|
254
|
+
return shape
|
255
|
+
elif isinstance(selection, dataspace.Points):
|
256
|
+
return [selection.points]
|
257
|
+
else:
|
258
|
+
raise TypeError(
|
259
|
+
"Shape conversion currently only works for Hyperslabs or Points")
|
260
|
+
|
261
|
+
|
262
|
+
def dataset_write(self, data, selection=None):
|
263
|
+
""" write data to a dataset
|
264
|
+
|
265
|
+
Writes `data` to a dataset
|
266
|
+
|
267
|
+
:param object data: Python object with data to write
|
268
|
+
:param pninexus.h5cpp.dataspace.Selection selection: an optional selection
|
269
|
+
:raises RuntimeError: in case of a failure
|
270
|
+
"""
|
271
|
+
|
272
|
+
#
|
273
|
+
# in case that the parameter passed is not an numpy array we
|
274
|
+
# have to create one from it
|
275
|
+
#
|
276
|
+
if not isinstance(data, numpy.ndarray):
|
277
|
+
data = numpy.array(data)
|
278
|
+
|
279
|
+
#
|
280
|
+
# if the data is a unicode numpy array we have to convert it to a
|
281
|
+
# simple string array
|
282
|
+
if data.dtype.kind == 'U':
|
283
|
+
try:
|
284
|
+
data = data.astype('S')
|
285
|
+
except Exception:
|
286
|
+
if isinstance(data, numpy.ndarray) and data.shape:
|
287
|
+
shape = data.shape
|
288
|
+
if len(shape) > 1:
|
289
|
+
data = data.flatten()
|
290
|
+
data = numpy.array(
|
291
|
+
[bytes(unicode(dt).encode('utf-8')) for dt in data])
|
292
|
+
if len(shape) > 1:
|
293
|
+
data = data.reshape(shape)
|
294
|
+
else:
|
295
|
+
data = numpy.array(unicode(data).encode('utf-8'))
|
296
|
+
#
|
297
|
+
# determine memory datatype and dataspace
|
298
|
+
# - if the file type is a variable length string we have to adjust the
|
299
|
+
# memory type accordingly
|
300
|
+
memory_space = dataspace.Simple(data.shape)
|
301
|
+
memory_type = datatype.kFactory.create(data.dtype)
|
302
|
+
|
303
|
+
if isinstance(self.datatype, datatype.String):
|
304
|
+
if self.datatype.is_variable_length:
|
305
|
+
memory_type = self.datatype
|
306
|
+
|
307
|
+
# if the data is bool numpy array we have to convert it to a
|
308
|
+
# int array
|
309
|
+
#
|
310
|
+
if data.dtype == 'bool':
|
311
|
+
data = data.astype("int8")
|
312
|
+
|
313
|
+
#
|
314
|
+
# get the file dataspace
|
315
|
+
#
|
316
|
+
file_space = self.dataspace
|
317
|
+
|
318
|
+
if selection is not None:
|
319
|
+
file_space.selection(dataspace.SelectionOperation.SET, selection)
|
320
|
+
|
321
|
+
self._write(data, memory_type, memory_space, file_space)
|
322
|
+
|
323
|
+
|
324
|
+
def dataset_read(self, data=None, selection=None):
|
325
|
+
""" read data from a dataset
|
326
|
+
|
327
|
+
Reads `data` from a dataset
|
328
|
+
|
329
|
+
:param object data: Python object with data to read
|
330
|
+
:param pninexus.h5cpp.dataspace.Selection selection: an optional selection
|
331
|
+
:returns: read data
|
332
|
+
:raises RuntimeError: in case of a failure
|
333
|
+
"""
|
334
|
+
|
335
|
+
memory_space = None
|
336
|
+
memory_type = None
|
337
|
+
file_space = self.dataspace
|
338
|
+
|
339
|
+
if selection is not None:
|
340
|
+
file_space.selection(dataspace.SelectionOperation.SET, selection)
|
341
|
+
|
342
|
+
if data is not None:
|
343
|
+
#
|
344
|
+
# if data has been provided by the user we have to determine the
|
345
|
+
# datatype and dataspace for the memory representation
|
346
|
+
#
|
347
|
+
if not isinstance(data, numpy.ndarray):
|
348
|
+
raise TypeError(
|
349
|
+
"Inplace reading is only supported for numpy arrays!")
|
350
|
+
|
351
|
+
memory_space = dataspace.Simple(data.shape)
|
352
|
+
memory_type = datatype.kFactory.create(data.dtype)
|
353
|
+
|
354
|
+
if isinstance(self.datatype, datatype.String):
|
355
|
+
if self.datatype.is_variable_length:
|
356
|
+
memory_type = datatype.String.variable()
|
357
|
+
|
358
|
+
else:
|
359
|
+
#
|
360
|
+
# if no data was provided by the user we can safely take the
|
361
|
+
# dataspace and datatype from the dataset in the file
|
362
|
+
#
|
363
|
+
memory_type = self.datatype
|
364
|
+
|
365
|
+
if selection is not None:
|
366
|
+
shape = selection_to_shape(selection)
|
367
|
+
memory_space = dataspace.Simple(shape)
|
368
|
+
else:
|
369
|
+
memory_space = file_space
|
370
|
+
shape = (1,)
|
371
|
+
if file_space.type == dataspace.Type.SIMPLE:
|
372
|
+
shape = dataspace.Simple(file_space).current_dimensions
|
373
|
+
|
374
|
+
#
|
375
|
+
# create an empty numpy array to which we read the data
|
376
|
+
#
|
377
|
+
data = numpy.empty(shape, dtype=datatype.to_numpy(memory_type))
|
378
|
+
|
379
|
+
data = self._read(data, memory_type, memory_space, file_space)
|
380
|
+
|
381
|
+
if data.dtype.kind == 'S':
|
382
|
+
try:
|
383
|
+
data = data.astype('U')
|
384
|
+
except Exception:
|
385
|
+
print(data)
|
386
|
+
|
387
|
+
return data
|
388
|
+
|
389
|
+
|
390
|
+
def dataset_filters(self):
|
391
|
+
""" read filters from a dataset
|
392
|
+
|
393
|
+
Reads filters from a dataset
|
394
|
+
|
395
|
+
:returns: a list of filters
|
396
|
+
|
397
|
+
:raises RuntimeError: in case of a failure
|
398
|
+
"""
|
399
|
+
|
400
|
+
efilters = ExternalFilters()
|
401
|
+
efilters.fill(self.creation_list)
|
402
|
+
return efilters
|
403
|
+
|
404
|
+
|
405
|
+
def group_get_dataset(self, path, lapl=None):
|
406
|
+
if isinstance(path, str):
|
407
|
+
path = Path(path)
|
408
|
+
if lapl is None:
|
409
|
+
return self.get_dataset_(path)
|
410
|
+
else:
|
411
|
+
return self.get_dataset_(path, lapl)
|
412
|
+
|
413
|
+
|
414
|
+
def group_has_dataset(self, path, lapl=None):
|
415
|
+
if isinstance(path, str):
|
416
|
+
path = Path(path)
|
417
|
+
if lapl is None:
|
418
|
+
return self.has_dataset_(path)
|
419
|
+
else:
|
420
|
+
return self.has_dataset_(path, lapl)
|
421
|
+
|
422
|
+
|
423
|
+
def group_get_group(self, path, lapl=None):
|
424
|
+
if isinstance(path, str):
|
425
|
+
path = Path(path)
|
426
|
+
if lapl is None:
|
427
|
+
return self.get_group_(path)
|
428
|
+
else:
|
429
|
+
return self.get_group_(path, lapl)
|
430
|
+
|
431
|
+
|
432
|
+
def group_has_group(self, path, lapl=None):
|
433
|
+
if isinstance(path, str):
|
434
|
+
path = Path(path)
|
435
|
+
if lapl is None:
|
436
|
+
return self.has_group_(path)
|
437
|
+
else:
|
438
|
+
return self.has_group_(path, lapl)
|
439
|
+
|
440
|
+
|
441
|
+
def get_node(base, path, lapl=None):
|
442
|
+
if isinstance(path, str):
|
443
|
+
path = Path(path)
|
444
|
+
if lapl is None:
|
445
|
+
return get_node_(base, path)
|
446
|
+
else:
|
447
|
+
return get_node_(base, path, lapl)
|
448
|
+
|
449
|
+
|
450
|
+
Dataset.write = dataset_write
|
451
|
+
Dataset.read = dataset_read
|
452
|
+
Dataset.filters = dataset_filters
|
453
|
+
|
454
|
+
Group.get_dataset = group_get_dataset
|
455
|
+
Group.has_dataset = group_has_dataset
|
456
|
+
Group.get_group = group_get_group
|
457
|
+
Group.has_group = group_has_group
|
458
|
+
|
459
|
+
|
460
|
+
__all__ = ["Type", "LinkType", "Node", "GroupView", "NodeView", "LinkView",
|
461
|
+
"Group", "Dataset", "LinkTarget", "Link", "RecursiveNodeIterator",
|
462
|
+
"is_dataset", "is_group",
|
463
|
+
"get_node", "copy", "move", "remove", "link", "selection_to_shape"]
|
464
|
+
|
465
|
+
if VDSAvailable:
|
466
|
+
__all__.extend(["VirtualDataset"])
|
@@ -0,0 +1,71 @@
|
|
1
|
+
#
|
2
|
+
# import enumerations
|
3
|
+
#
|
4
|
+
|
5
|
+
from pninexus.h5cpp._property import DatasetFillValueStatus
|
6
|
+
from pninexus.h5cpp._property import DatasetFillTime
|
7
|
+
from pninexus.h5cpp._property import DatasetAllocTime
|
8
|
+
from pninexus.h5cpp._property import DatasetLayout
|
9
|
+
from pninexus.h5cpp._property import LibVersion
|
10
|
+
from pninexus.h5cpp._property import CloseDegree
|
11
|
+
from pninexus.h5cpp._property import CopyFlag
|
12
|
+
|
13
|
+
#
|
14
|
+
# import utility classes
|
15
|
+
#
|
16
|
+
from pninexus.h5cpp._property import CopyFlags
|
17
|
+
from pninexus.h5cpp._property import ChunkCacheParameters
|
18
|
+
from pninexus.h5cpp._property import CreationOrder
|
19
|
+
|
20
|
+
#
|
21
|
+
# import propety list classes
|
22
|
+
#
|
23
|
+
from pninexus.h5cpp._property import List
|
24
|
+
from pninexus.h5cpp._property import DatasetTransferList
|
25
|
+
from pninexus.h5cpp._property import FileAccessList
|
26
|
+
from pninexus.h5cpp._property import FileCreationList
|
27
|
+
from pninexus.h5cpp._property import FileMountList
|
28
|
+
from pninexus.h5cpp._property import LinkAccessList
|
29
|
+
from pninexus.h5cpp._property import ObjectCopyList
|
30
|
+
from pninexus.h5cpp._property import ObjectCreationList
|
31
|
+
from pninexus.h5cpp._property import StringCreationList
|
32
|
+
from pninexus.h5cpp._property import DatasetAccessList
|
33
|
+
from pninexus.h5cpp._property import DatatypeAccessList
|
34
|
+
from pninexus.h5cpp._property import GroupAccessList
|
35
|
+
from pninexus.h5cpp._property import DatasetCreationList
|
36
|
+
from pninexus.h5cpp._property import GroupCreationList
|
37
|
+
from pninexus.h5cpp._property import TypeCreationList
|
38
|
+
from pninexus.h5cpp._property import AttributeCreationList
|
39
|
+
from pninexus.h5cpp._property import LinkCreationList
|
40
|
+
|
41
|
+
try:
|
42
|
+
from pninexus.h5cpp._property import VirtualDataView
|
43
|
+
from pninexus.h5cpp._property import VirtualDataMap
|
44
|
+
from pninexus.h5cpp._property import VirtualDataMaps
|
45
|
+
VDSAvailable = True
|
46
|
+
except Exception:
|
47
|
+
VDSAvailable = False
|
48
|
+
|
49
|
+
|
50
|
+
def CopyFlag_or(self, b):
|
51
|
+
if isinstance(b, (CopyFlag, CopyFlags)):
|
52
|
+
return CopyFlags(self) | b
|
53
|
+
else:
|
54
|
+
raise TypeError("RHS of | operator must be a CopyFlag instance!")
|
55
|
+
|
56
|
+
|
57
|
+
CopyFlag.__or__ = CopyFlag_or
|
58
|
+
|
59
|
+
|
60
|
+
__all__ = ["CopyFlag", "DatasetFillValueStatus", "DatasetFillTime",
|
61
|
+
"DatasetAllocTime", "DatasetLayout", "LibVersion", "CopyFlag",
|
62
|
+
"CopyFlags", "ChunkCacheParameters", "CreationOrder", "List",
|
63
|
+
"DatasetTransferList", "FileAccessList", "FileCreationList",
|
64
|
+
"FileMountList", "LinkAccessList", "ObjectCopyList", "CloseDegree",
|
65
|
+
"ObjectCreationList", "StringCreationList", "DatasetAccessList",
|
66
|
+
"DatatypeAccessList", "GroupAccessList", "DatasetCreationList",
|
67
|
+
"GroupCreationList", "TypeCreationList", "AttributeCreationList",
|
68
|
+
"LinkCreationList", "VDSAvailable"]
|
69
|
+
|
70
|
+
if VDSAvailable:
|
71
|
+
__all__.extend(["VirtualDataView", "VirtualDataMap", "VirtualDataMaps"])
|