h5yaml 0.2.0__tar.gz → 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {h5yaml-0.2.0 → h5yaml-0.2.1}/.coverage +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/.github/workflows/python-package.yml +2 -1
- {h5yaml-0.2.0 → h5yaml-0.2.1}/PKG-INFO +15 -3
- {h5yaml-0.2.0 → h5yaml-0.2.1}/README.md +14 -2
- {h5yaml-0.2.0 → h5yaml-0.2.1}/coverage.xml +136 -163
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/Data/h5_testing.yaml +23 -1
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/Data/nc_testing.yaml +13 -1
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/conf_from_yaml.py +3 -3
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/yaml_h5.py +16 -18
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/yaml_nc.py +19 -16
- {h5yaml-0.2.0 → h5yaml-0.2.1}/tests/test_from_yaml.py +16 -1
- {h5yaml-0.2.0 → h5yaml-0.2.1}/tests/test_yaml_h5.py +10 -10
- {h5yaml-0.2.0 → h5yaml-0.2.1}/tests/test_yaml_nc.py +16 -12
- h5yaml-0.2.0/klad.h5 +0 -0
- h5yaml-0.2.0/src/h5yaml/lib/chunksizes.py +0 -73
- {h5yaml-0.2.0 → h5yaml-0.2.1}/.gitignore +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/.pre-commit-config.yaml +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/LICENSE +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/MANIFEST.in +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/pyproject.toml +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/Data/h5_compound.yaml +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/Data/h5_unsupported.yaml +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/__init__.py +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/lib/__init__.py +0 -0
- {h5yaml-0.2.0 → h5yaml-0.2.1}/src/h5yaml/lib/adjust_attr.py +0 -0
|
Binary file
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: h5yaml
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.1
|
|
4
4
|
Summary: Use YAML configuration file to generate HDF5/netCDF4 formated files.
|
|
5
5
|
Project-URL: Homepage, https://github.com/rmvanhees/h5_yaml
|
|
6
6
|
Project-URL: Source, https://github.com/rmvanhees/h5_yaml
|
|
@@ -61,7 +61,19 @@ However, package `netCDF4` has some limitations, which `h5py` has not, for examp
|
|
|
61
61
|
not allow variable-length variables to have a compound data-type.
|
|
62
62
|
|
|
63
63
|
## Installation
|
|
64
|
-
|
|
64
|
+
The package `h5yaml` is available from PyPI. To install it use `pip`:
|
|
65
|
+
|
|
66
|
+
> $ pip install [--user] h5yaml
|
|
67
|
+
|
|
68
|
+
The module `h5yaml` requires Python3.10+ and Python modules: h5py (v3.14+), netCDF4 (v1.7+) and numpy (v2.0+).
|
|
69
|
+
|
|
70
|
+
**Note**: the packages `h5py` and `netCDF4` come with their own HDF5 libraries. If these are different then they may
|
|
71
|
+
collide and result in a *''HDF5 error''*.
|
|
72
|
+
If this is the case then you have to install the development packages of HDF5 and netCDF4 (or compile them from source).
|
|
73
|
+
And reinstall `h5py` and `netCDF4` using the commands:
|
|
74
|
+
|
|
75
|
+
> $ pip uninstall h5py; pip install --no-binary=h5py h5py
|
|
76
|
+
> $ pip uninstall netCDF4; pip install --no-binary=netCDF4 netCDF4
|
|
65
77
|
|
|
66
78
|
## Usage
|
|
67
79
|
|
|
@@ -176,7 +188,7 @@ The YAML file should be structured as follows:
|
|
|
176
188
|
|
|
177
189
|
## Support [TBW]
|
|
178
190
|
|
|
179
|
-
##
|
|
191
|
+
## Road map
|
|
180
192
|
|
|
181
193
|
* Release v0.1 : stable API to read your YAML files and generate the HDF5/netCDF4 file
|
|
182
194
|
|
|
@@ -29,7 +29,19 @@ However, package `netCDF4` has some limitations, which `h5py` has not, for examp
|
|
|
29
29
|
not allow variable-length variables to have a compound data-type.
|
|
30
30
|
|
|
31
31
|
## Installation
|
|
32
|
-
|
|
32
|
+
The package `h5yaml` is available from PyPI. To install it use `pip`:
|
|
33
|
+
|
|
34
|
+
> $ pip install [--user] h5yaml
|
|
35
|
+
|
|
36
|
+
The module `h5yaml` requires Python3.10+ and Python modules: h5py (v3.14+), netCDF4 (v1.7+) and numpy (v2.0+).
|
|
37
|
+
|
|
38
|
+
**Note**: the packages `h5py` and `netCDF4` come with their own HDF5 libraries. If these are different then they may
|
|
39
|
+
collide and result in a *''HDF5 error''*.
|
|
40
|
+
If this is the case then you have to install the development packages of HDF5 and netCDF4 (or compile them from source).
|
|
41
|
+
And reinstall `h5py` and `netCDF4` using the commands:
|
|
42
|
+
|
|
43
|
+
> $ pip uninstall h5py; pip install --no-binary=h5py h5py
|
|
44
|
+
> $ pip uninstall netCDF4; pip install --no-binary=netCDF4 netCDF4
|
|
33
45
|
|
|
34
46
|
## Usage
|
|
35
47
|
|
|
@@ -144,7 +156,7 @@ The YAML file should be structured as follows:
|
|
|
144
156
|
|
|
145
157
|
## Support [TBW]
|
|
146
158
|
|
|
147
|
-
##
|
|
159
|
+
## Road map
|
|
148
160
|
|
|
149
161
|
* Release v0.1 : stable API to read your YAML files and generate the HDF5/netCDF4 file
|
|
150
162
|
|
|
@@ -1,18 +1,18 @@
|
|
|
1
1
|
<?xml version="1.0" ?>
|
|
2
|
-
<coverage version="7.12.0" timestamp="
|
|
2
|
+
<coverage version="7.12.0" timestamp="1764076997420" lines-valid="326" lines-covered="297" line-rate="0.911" branches-covered="0" branches-valid="0" branch-rate="0" complexity="0">
|
|
3
3
|
<!-- Generated by coverage.py: https://coverage.readthedocs.io/en/7.12.0 -->
|
|
4
4
|
<!-- Based on https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd -->
|
|
5
5
|
<sources>
|
|
6
6
|
<source>/home/richardh/git/h5_yaml</source>
|
|
7
7
|
</sources>
|
|
8
8
|
<packages>
|
|
9
|
-
<package name="src.h5yaml" line-rate="0.
|
|
9
|
+
<package name="src.h5yaml" line-rate="0.9007" branch-rate="0" complexity="0">
|
|
10
10
|
<classes>
|
|
11
11
|
<class name="__init__.py" filename="src/h5yaml/__init__.py" complexity="0" line-rate="1" branch-rate="0">
|
|
12
12
|
<methods/>
|
|
13
13
|
<lines/>
|
|
14
14
|
</class>
|
|
15
|
-
<class name="conf_from_yaml.py" filename="src/h5yaml/conf_from_yaml.py" complexity="0" line-rate="
|
|
15
|
+
<class name="conf_from_yaml.py" filename="src/h5yaml/conf_from_yaml.py" complexity="0" line-rate="1" branch-rate="0">
|
|
16
16
|
<methods/>
|
|
17
17
|
<lines>
|
|
18
18
|
<line number="23" hits="1"/>
|
|
@@ -21,18 +21,18 @@
|
|
|
21
21
|
<line number="29" hits="1"/>
|
|
22
22
|
<line number="33" hits="1"/>
|
|
23
23
|
<line number="47" hits="1"/>
|
|
24
|
-
<line number="48" hits="
|
|
24
|
+
<line number="48" hits="1"/>
|
|
25
25
|
<line number="50" hits="1"/>
|
|
26
26
|
<line number="51" hits="1"/>
|
|
27
27
|
<line number="53" hits="1"/>
|
|
28
28
|
<line number="54" hits="1"/>
|
|
29
29
|
<line number="55" hits="1"/>
|
|
30
|
-
<line number="56" hits="
|
|
31
|
-
<line number="57" hits="
|
|
30
|
+
<line number="56" hits="1"/>
|
|
31
|
+
<line number="57" hits="1"/>
|
|
32
32
|
<line number="59" hits="1"/>
|
|
33
33
|
</lines>
|
|
34
34
|
</class>
|
|
35
|
-
<class name="yaml_h5.py" filename="src/h5yaml/yaml_h5.py" complexity="0" line-rate="0.
|
|
35
|
+
<class name="yaml_h5.py" filename="src/h5yaml/yaml_h5.py" complexity="0" line-rate="0.9556" branch-rate="0">
|
|
36
36
|
<methods/>
|
|
37
37
|
<lines>
|
|
38
38
|
<line number="23" hits="1"/>
|
|
@@ -43,39 +43,39 @@
|
|
|
43
43
|
<line number="31" hits="1"/>
|
|
44
44
|
<line number="33" hits="1"/>
|
|
45
45
|
<line number="34" hits="1"/>
|
|
46
|
-
<line number="
|
|
47
|
-
<line number="
|
|
48
|
-
<line number="
|
|
49
|
-
<line number="
|
|
46
|
+
<line number="38" hits="1"/>
|
|
47
|
+
<line number="48" hits="1"/>
|
|
48
|
+
<line number="50" hits="1"/>
|
|
49
|
+
<line number="52" hits="1"/>
|
|
50
50
|
<line number="53" hits="1"/>
|
|
51
51
|
<line number="54" hits="1"/>
|
|
52
|
-
<line number="55" hits="
|
|
53
|
-
<line number="
|
|
54
|
-
<line number="
|
|
55
|
-
<line number="
|
|
52
|
+
<line number="55" hits="0"/>
|
|
53
|
+
<line number="57" hits="1"/>
|
|
54
|
+
<line number="59" hits="1"/>
|
|
55
|
+
<line number="61" hits="1"/>
|
|
56
56
|
<line number="62" hits="1"/>
|
|
57
|
-
<line number="
|
|
58
|
-
<line number="
|
|
57
|
+
<line number="64" hits="1"/>
|
|
58
|
+
<line number="66" hits="1"/>
|
|
59
59
|
<line number="67" hits="1"/>
|
|
60
60
|
<line number="68" hits="1"/>
|
|
61
61
|
<line number="69" hits="1"/>
|
|
62
|
-
<line number="
|
|
62
|
+
<line number="73" hits="1"/>
|
|
63
63
|
<line number="74" hits="1"/>
|
|
64
64
|
<line number="75" hits="1"/>
|
|
65
|
-
<line number="76" hits="
|
|
66
|
-
<line number="
|
|
67
|
-
<line number="
|
|
65
|
+
<line number="76" hits="0"/>
|
|
66
|
+
<line number="77" hits="1"/>
|
|
67
|
+
<line number="86" hits="1"/>
|
|
68
68
|
<line number="91" hits="1"/>
|
|
69
69
|
<line number="92" hits="1"/>
|
|
70
70
|
<line number="93" hits="1"/>
|
|
71
|
-
<line number="
|
|
72
|
-
<line number="
|
|
71
|
+
<line number="94" hits="1"/>
|
|
72
|
+
<line number="96" hits="1"/>
|
|
73
73
|
<line number="101" hits="1"/>
|
|
74
74
|
<line number="102" hits="1"/>
|
|
75
|
-
<line number="
|
|
76
|
-
<line number="
|
|
77
|
-
<line number="107" hits="
|
|
78
|
-
<line number="
|
|
75
|
+
<line number="103" hits="1"/>
|
|
76
|
+
<line number="105" hits="1"/>
|
|
77
|
+
<line number="107" hits="1"/>
|
|
78
|
+
<line number="108" hits="0"/>
|
|
79
79
|
<line number="110" hits="1"/>
|
|
80
80
|
<line number="111" hits="1"/>
|
|
81
81
|
<line number="112" hits="1"/>
|
|
@@ -84,35 +84,35 @@
|
|
|
84
84
|
<line number="115" hits="1"/>
|
|
85
85
|
<line number="116" hits="1"/>
|
|
86
86
|
<line number="117" hits="1"/>
|
|
87
|
-
<line number="118" hits="
|
|
87
|
+
<line number="118" hits="1"/>
|
|
88
88
|
<line number="119" hits="0"/>
|
|
89
|
-
<line number="120" hits="
|
|
89
|
+
<line number="120" hits="0"/>
|
|
90
90
|
<line number="121" hits="1"/>
|
|
91
|
-
<line number="
|
|
91
|
+
<line number="122" hits="1"/>
|
|
92
92
|
<line number="124" hits="1"/>
|
|
93
|
-
<line number="
|
|
93
|
+
<line number="125" hits="1"/>
|
|
94
94
|
<line number="131" hits="1"/>
|
|
95
95
|
<line number="132" hits="1"/>
|
|
96
96
|
<line number="133" hits="1"/>
|
|
97
97
|
<line number="134" hits="1"/>
|
|
98
|
-
<line number="
|
|
99
|
-
<line number="
|
|
100
|
-
<line number="
|
|
101
|
-
<line number="
|
|
98
|
+
<line number="135" hits="1"/>
|
|
99
|
+
<line number="137" hits="1"/>
|
|
100
|
+
<line number="139" hits="1"/>
|
|
101
|
+
<line number="141" hits="1"/>
|
|
102
102
|
<line number="154" hits="1"/>
|
|
103
103
|
<line number="155" hits="1"/>
|
|
104
104
|
<line number="156" hits="1"/>
|
|
105
105
|
<line number="158" hits="1"/>
|
|
106
|
-
<line number="
|
|
106
|
+
<line number="160" hits="1"/>
|
|
107
107
|
<line number="161" hits="1"/>
|
|
108
108
|
<line number="162" hits="1"/>
|
|
109
|
-
<line number="
|
|
109
|
+
<line number="167" hits="1"/>
|
|
110
110
|
<line number="168" hits="1"/>
|
|
111
|
-
<line number="
|
|
111
|
+
<line number="174" hits="1"/>
|
|
112
112
|
<line number="175" hits="1"/>
|
|
113
113
|
<line number="176" hits="1"/>
|
|
114
114
|
<line number="177" hits="1"/>
|
|
115
|
-
<line number="
|
|
115
|
+
<line number="179" hits="1"/>
|
|
116
116
|
<line number="180" hits="1"/>
|
|
117
117
|
<line number="181" hits="1"/>
|
|
118
118
|
<line number="182" hits="1"/>
|
|
@@ -120,12 +120,15 @@
|
|
|
120
120
|
<line number="184" hits="1"/>
|
|
121
121
|
<line number="185" hits="1"/>
|
|
122
122
|
<line number="186" hits="1"/>
|
|
123
|
-
<line number="
|
|
123
|
+
<line number="189" hits="1"/>
|
|
124
124
|
<line number="190" hits="1"/>
|
|
125
|
-
<line number="
|
|
126
|
-
<line number="
|
|
127
|
-
<line number="
|
|
128
|
-
<line number="
|
|
125
|
+
<line number="192" hits="1"/>
|
|
126
|
+
<line number="193" hits="0"/>
|
|
127
|
+
<line number="198" hits="1"/>
|
|
128
|
+
<line number="199" hits="1"/>
|
|
129
|
+
<line number="208" hits="1"/>
|
|
130
|
+
<line number="209" hits="1"/>
|
|
131
|
+
<line number="210" hits="1"/>
|
|
129
132
|
<line number="211" hits="1"/>
|
|
130
133
|
<line number="212" hits="1"/>
|
|
131
134
|
<line number="214" hits="1"/>
|
|
@@ -137,41 +140,39 @@
|
|
|
137
140
|
<line number="225" hits="1"/>
|
|
138
141
|
<line number="226" hits="1"/>
|
|
139
142
|
<line number="227" hits="1"/>
|
|
140
|
-
<line number="228" hits="1"/>
|
|
141
143
|
<line number="229" hits="1"/>
|
|
142
|
-
<line number="
|
|
143
|
-
<line number="
|
|
144
|
+
<line number="240" hits="1"/>
|
|
145
|
+
<line number="241" hits="1"/>
|
|
144
146
|
<line number="243" hits="1"/>
|
|
147
|
+
<line number="244" hits="1"/>
|
|
145
148
|
<line number="245" hits="1"/>
|
|
146
|
-
<line number="246" hits="1"/>
|
|
147
149
|
<line number="247" hits="1"/>
|
|
150
|
+
<line number="248" hits="1"/>
|
|
148
151
|
<line number="249" hits="1"/>
|
|
149
152
|
<line number="250" hits="1"/>
|
|
150
153
|
<line number="251" hits="1"/>
|
|
151
|
-
<line number="252" hits="1"/>
|
|
152
154
|
<line number="253" hits="1"/>
|
|
153
|
-
<line number="
|
|
155
|
+
<line number="254" hits="1"/>
|
|
154
156
|
<line number="256" hits="1"/>
|
|
155
157
|
<line number="258" hits="1"/>
|
|
156
158
|
<line number="260" hits="1"/>
|
|
159
|
+
<line number="261" hits="1"/>
|
|
157
160
|
<line number="262" hits="1"/>
|
|
158
161
|
<line number="263" hits="1"/>
|
|
159
162
|
<line number="264" hits="1"/>
|
|
160
163
|
<line number="265" hits="1"/>
|
|
161
|
-
<line number="266" hits="1"/>
|
|
162
164
|
<line number="267" hits="1"/>
|
|
163
|
-
<line number="
|
|
165
|
+
<line number="276" hits="1"/>
|
|
166
|
+
<line number="277" hits="1"/>
|
|
164
167
|
<line number="278" hits="1"/>
|
|
165
168
|
<line number="279" hits="1"/>
|
|
166
169
|
<line number="280" hits="1"/>
|
|
167
170
|
<line number="281" hits="1"/>
|
|
168
171
|
<line number="282" hits="1"/>
|
|
169
172
|
<line number="283" hits="1"/>
|
|
170
|
-
<line number="284" hits="1"/>
|
|
171
|
-
<line number="285" hits="1"/>
|
|
172
173
|
</lines>
|
|
173
174
|
</class>
|
|
174
|
-
<class name="yaml_nc.py" filename="src/h5yaml/yaml_nc.py" complexity="0" line-rate="0.
|
|
175
|
+
<class name="yaml_nc.py" filename="src/h5yaml/yaml_nc.py" complexity="0" line-rate="0.838" branch-rate="0">
|
|
175
176
|
<methods/>
|
|
176
177
|
<lines>
|
|
177
178
|
<line number="23" hits="1"/>
|
|
@@ -183,145 +184,144 @@
|
|
|
183
184
|
<line number="34" hits="1"/>
|
|
184
185
|
<line number="36" hits="1"/>
|
|
185
186
|
<line number="37" hits="1"/>
|
|
186
|
-
<line number="
|
|
187
|
-
<line number="45" hits="1"/>
|
|
188
|
-
<line number="48" hits="1"/>
|
|
189
|
-
<line number="50" hits="1"/>
|
|
190
|
-
<line number="52" hits="1"/>
|
|
191
|
-
<line number="53" hits="1"/>
|
|
187
|
+
<line number="44" hits="1"/>
|
|
192
188
|
<line number="54" hits="1"/>
|
|
193
|
-
<line number="
|
|
194
|
-
<line number="
|
|
189
|
+
<line number="56" hits="1"/>
|
|
190
|
+
<line number="58" hits="1"/>
|
|
195
191
|
<line number="59" hits="1"/>
|
|
196
|
-
<line number="
|
|
197
|
-
<line number="
|
|
192
|
+
<line number="60" hits="1"/>
|
|
193
|
+
<line number="61" hits="0"/>
|
|
198
194
|
<line number="63" hits="1"/>
|
|
199
|
-
<line number="
|
|
200
|
-
<line number="
|
|
195
|
+
<line number="65" hits="1"/>
|
|
196
|
+
<line number="67" hits="1"/>
|
|
201
197
|
<line number="68" hits="1"/>
|
|
198
|
+
<line number="69" hits="1"/>
|
|
202
199
|
<line number="70" hits="1"/>
|
|
203
|
-
<line number="71" hits="1"/>
|
|
204
200
|
<line number="72" hits="1"/>
|
|
205
|
-
<line number="
|
|
206
|
-
<line number="
|
|
201
|
+
<line number="74" hits="1"/>
|
|
202
|
+
<line number="76" hits="1"/>
|
|
207
203
|
<line number="77" hits="1"/>
|
|
208
204
|
<line number="78" hits="1"/>
|
|
209
|
-
<line number="
|
|
205
|
+
<line number="79" hits="1"/>
|
|
210
206
|
<line number="81" hits="1"/>
|
|
211
|
-
<line number="
|
|
207
|
+
<line number="83" hits="1"/>
|
|
208
|
+
<line number="84" hits="1"/>
|
|
212
209
|
<line number="86" hits="1"/>
|
|
213
210
|
<line number="87" hits="1"/>
|
|
214
|
-
<line number="
|
|
215
|
-
<line number="
|
|
216
|
-
<line number="
|
|
217
|
-
<line number="
|
|
218
|
-
<line number="105" hits="1"/>
|
|
219
|
-
<line number="106" hits="1"/>
|
|
211
|
+
<line number="88" hits="1"/>
|
|
212
|
+
<line number="92" hits="1"/>
|
|
213
|
+
<line number="93" hits="1"/>
|
|
214
|
+
<line number="101" hits="1"/>
|
|
220
215
|
<line number="108" hits="1"/>
|
|
221
|
-
<line number="
|
|
222
|
-
<line number="
|
|
223
|
-
<line number="
|
|
224
|
-
<line number="
|
|
216
|
+
<line number="109" hits="1"/>
|
|
217
|
+
<line number="110" hits="1"/>
|
|
218
|
+
<line number="111" hits="1"/>
|
|
219
|
+
<line number="112" hits="1"/>
|
|
220
|
+
<line number="114" hits="1"/>
|
|
225
221
|
<line number="122" hits="1"/>
|
|
226
|
-
<line number="
|
|
227
|
-
<line number="124" hits="0"/>
|
|
222
|
+
<line number="124" hits="1"/>
|
|
228
223
|
<line number="125" hits="0"/>
|
|
229
|
-
<line number="
|
|
230
|
-
<line number="
|
|
231
|
-
<line number="128" hits="0"/>
|
|
224
|
+
<line number="127" hits="1"/>
|
|
225
|
+
<line number="128" hits="1"/>
|
|
232
226
|
<line number="129" hits="0"/>
|
|
233
227
|
<line number="130" hits="0"/>
|
|
234
228
|
<line number="131" hits="0"/>
|
|
235
229
|
<line number="132" hits="0"/>
|
|
236
230
|
<line number="133" hits="0"/>
|
|
237
|
-
<line number="
|
|
238
|
-
<line number="
|
|
231
|
+
<line number="134" hits="0"/>
|
|
232
|
+
<line number="135" hits="0"/>
|
|
233
|
+
<line number="136" hits="0"/>
|
|
234
|
+
<line number="137" hits="0"/>
|
|
235
|
+
<line number="138" hits="0"/>
|
|
236
|
+
<line number="139" hits="0"/>
|
|
237
|
+
<line number="141" hits="1"/>
|
|
239
238
|
<line number="142" hits="1"/>
|
|
240
|
-
<line number="143" hits="1"/>
|
|
241
|
-
<line number="144" hits="1"/>
|
|
242
|
-
<line number="145" hits="1"/>
|
|
243
|
-
<line number="146" hits="1"/>
|
|
244
239
|
<line number="148" hits="1"/>
|
|
245
240
|
<line number="149" hits="1"/>
|
|
241
|
+
<line number="150" hits="1"/>
|
|
246
242
|
<line number="151" hits="1"/>
|
|
247
|
-
<line number="
|
|
248
|
-
<line number="
|
|
249
|
-
<line number="
|
|
250
|
-
<line number="
|
|
251
|
-
<line number="
|
|
252
|
-
<line number="173" hits="1"/>
|
|
243
|
+
<line number="152" hits="1"/>
|
|
244
|
+
<line number="154" hits="1"/>
|
|
245
|
+
<line number="155" hits="1"/>
|
|
246
|
+
<line number="157" hits="1"/>
|
|
247
|
+
<line number="159" hits="1"/>
|
|
253
248
|
<line number="174" hits="1"/>
|
|
254
249
|
<line number="175" hits="1"/>
|
|
250
|
+
<line number="176" hits="1"/>
|
|
255
251
|
<line number="177" hits="1"/>
|
|
256
|
-
<line number="
|
|
252
|
+
<line number="179" hits="1"/>
|
|
257
253
|
<line number="180" hits="1"/>
|
|
258
|
-
<line number="181" hits="1"/>
|
|
259
254
|
<line number="182" hits="1"/>
|
|
260
|
-
<line number="
|
|
261
|
-
<line number="
|
|
262
|
-
<line number="
|
|
263
|
-
<line number="
|
|
264
|
-
<line number="
|
|
265
|
-
<line number="
|
|
266
|
-
<line number="
|
|
255
|
+
<line number="184" hits="1"/>
|
|
256
|
+
<line number="185" hits="1"/>
|
|
257
|
+
<line number="186" hits="1"/>
|
|
258
|
+
<line number="191" hits="1"/>
|
|
259
|
+
<line number="192" hits="1"/>
|
|
260
|
+
<line number="198" hits="1"/>
|
|
261
|
+
<line number="205" hits="1"/>
|
|
267
262
|
<line number="207" hits="1"/>
|
|
268
263
|
<line number="208" hits="1"/>
|
|
269
264
|
<line number="210" hits="1"/>
|
|
270
265
|
<line number="211" hits="1"/>
|
|
271
266
|
<line number="212" hits="1"/>
|
|
272
|
-
<line number="213" hits="1"/>
|
|
273
267
|
<line number="214" hits="1"/>
|
|
274
268
|
<line number="215" hits="1"/>
|
|
275
269
|
<line number="216" hits="1"/>
|
|
276
270
|
<line number="217" hits="1"/>
|
|
277
|
-
<line number="218" hits="
|
|
271
|
+
<line number="218" hits="1"/>
|
|
272
|
+
<line number="219" hits="1"/>
|
|
278
273
|
<line number="220" hits="1"/>
|
|
279
274
|
<line number="221" hits="1"/>
|
|
280
275
|
<line number="222" hits="1"/>
|
|
281
|
-
<line number="
|
|
276
|
+
<line number="224" hits="1"/>
|
|
277
|
+
<line number="225" hits="1"/>
|
|
282
278
|
<line number="226" hits="1"/>
|
|
283
|
-
<line number="227" hits="
|
|
279
|
+
<line number="227" hits="1"/>
|
|
284
280
|
<line number="230" hits="1"/>
|
|
285
|
-
<line number="
|
|
286
|
-
<line number="
|
|
281
|
+
<line number="231" hits="0"/>
|
|
282
|
+
<line number="233" hits="1"/>
|
|
283
|
+
<line number="234" hits="0"/>
|
|
287
284
|
<line number="238" hits="1"/>
|
|
288
285
|
<line number="239" hits="1"/>
|
|
289
|
-
<line number="
|
|
290
|
-
<line number="
|
|
291
|
-
<line number="249" hits="0"/>
|
|
292
|
-
<line number="250" hits="1"/>
|
|
286
|
+
<line number="242" hits="1"/>
|
|
287
|
+
<line number="243" hits="1"/>
|
|
293
288
|
<line number="251" hits="1"/>
|
|
294
289
|
<line number="252" hits="1"/>
|
|
295
290
|
<line number="253" hits="1"/>
|
|
291
|
+
<line number="254" hits="1"/>
|
|
296
292
|
<line number="255" hits="1"/>
|
|
297
|
-
<line number="
|
|
298
|
-
<line number="
|
|
299
|
-
<line number="
|
|
300
|
-
<line number="
|
|
301
|
-
<line number="
|
|
293
|
+
<line number="256" hits="0"/>
|
|
294
|
+
<line number="257" hits="1"/>
|
|
295
|
+
<line number="258" hits="1"/>
|
|
296
|
+
<line number="260" hits="1"/>
|
|
297
|
+
<line number="270" hits="1"/>
|
|
298
|
+
<line number="278" hits="1"/>
|
|
302
299
|
<line number="279" hits="0"/>
|
|
303
|
-
<line number="
|
|
304
|
-
<line number="
|
|
300
|
+
<line number="280" hits="0"/>
|
|
301
|
+
<line number="281" hits="0"/>
|
|
302
|
+
<line number="282" hits="0"/>
|
|
305
303
|
<line number="284" hits="1"/>
|
|
306
|
-
<line number="
|
|
307
|
-
<line number="
|
|
304
|
+
<line number="285" hits="1"/>
|
|
305
|
+
<line number="287" hits="1"/>
|
|
308
306
|
<line number="289" hits="1"/>
|
|
309
|
-
<line number="290" hits="1"/>
|
|
310
307
|
<line number="291" hits="1"/>
|
|
311
308
|
<line number="292" hits="1"/>
|
|
309
|
+
<line number="293" hits="1"/>
|
|
312
310
|
<line number="294" hits="1"/>
|
|
313
|
-
<line number="
|
|
314
|
-
<line number="
|
|
315
|
-
<line number="
|
|
316
|
-
<line number="
|
|
317
|
-
<line number="
|
|
318
|
-
<line number="
|
|
319
|
-
<line number="
|
|
311
|
+
<line number="295" hits="1"/>
|
|
312
|
+
<line number="297" hits="1"/>
|
|
313
|
+
<line number="306" hits="1"/>
|
|
314
|
+
<line number="307" hits="1"/>
|
|
315
|
+
<line number="308" hits="0"/>
|
|
316
|
+
<line number="309" hits="0"/>
|
|
317
|
+
<line number="310" hits="0"/>
|
|
318
|
+
<line number="311" hits="1"/>
|
|
319
|
+
<line number="312" hits="1"/>
|
|
320
320
|
</lines>
|
|
321
321
|
</class>
|
|
322
322
|
</classes>
|
|
323
323
|
</package>
|
|
324
|
-
<package name="src.h5yaml.lib" line-rate="
|
|
324
|
+
<package name="src.h5yaml.lib" line-rate="1" branch-rate="0" complexity="0">
|
|
325
325
|
<classes>
|
|
326
326
|
<class name="__init__.py" filename="src/h5yaml/lib/__init__.py" complexity="0" line-rate="1" branch-rate="0">
|
|
327
327
|
<methods/>
|
|
@@ -366,33 +366,6 @@
|
|
|
366
366
|
<line number="79" hits="1"/>
|
|
367
367
|
</lines>
|
|
368
368
|
</class>
|
|
369
|
-
<class name="chunksizes.py" filename="src/h5yaml/lib/chunksizes.py" complexity="0" line-rate="0.7273" branch-rate="0">
|
|
370
|
-
<methods/>
|
|
371
|
-
<lines>
|
|
372
|
-
<line number="23" hits="1"/>
|
|
373
|
-
<line number="25" hits="1"/>
|
|
374
|
-
<line number="27" hits="1"/>
|
|
375
|
-
<line number="33" hits="1"/>
|
|
376
|
-
<line number="48" hits="1"/>
|
|
377
|
-
<line number="49" hits="1"/>
|
|
378
|
-
<line number="50" hits="1"/>
|
|
379
|
-
<line number="51" hits="1"/>
|
|
380
|
-
<line number="54" hits="1"/>
|
|
381
|
-
<line number="55" hits="0"/>
|
|
382
|
-
<line number="56" hits="0"/>
|
|
383
|
-
<line number="58" hits="0"/>
|
|
384
|
-
<line number="59" hits="0"/>
|
|
385
|
-
<line number="60" hits="0"/>
|
|
386
|
-
<line number="63" hits="1"/>
|
|
387
|
-
<line number="64" hits="1"/>
|
|
388
|
-
<line number="66" hits="1"/>
|
|
389
|
-
<line number="67" hits="1"/>
|
|
390
|
-
<line number="68" hits="1"/>
|
|
391
|
-
<line number="69" hits="0"/>
|
|
392
|
-
<line number="71" hits="1"/>
|
|
393
|
-
<line number="73" hits="1"/>
|
|
394
|
-
</lines>
|
|
395
|
-
</class>
|
|
396
369
|
</classes>
|
|
397
370
|
</package>
|
|
398
371
|
</packages>
|
|
@@ -25,6 +25,8 @@ groups:
|
|
|
25
25
|
- group_01
|
|
26
26
|
- group_02
|
|
27
27
|
- group_03
|
|
28
|
+
- processing_control
|
|
29
|
+
- /processing_control/input_parameters
|
|
28
30
|
|
|
29
31
|
# Define dimensions
|
|
30
32
|
# Note dimensions with an attribute 'long_name' will also be generated as variable
|
|
@@ -91,6 +93,26 @@ variables:
|
|
|
91
93
|
units: '1'
|
|
92
94
|
valid_min: 0
|
|
93
95
|
valid_max: 65534
|
|
96
|
+
/group_00/detector_images_chunked:
|
|
97
|
+
_dtype: u2
|
|
98
|
+
_dims: [number_of_images, column, row]
|
|
99
|
+
_FillValue: 65535
|
|
100
|
+
_chunks: [1, 640, 512]
|
|
101
|
+
long_name: Detector pixel values
|
|
102
|
+
comment: unbinned full-frame data
|
|
103
|
+
units: '1'
|
|
104
|
+
valid_min: 0
|
|
105
|
+
valid_max: 65534
|
|
106
|
+
/group_00/detector_images_autochunk:
|
|
107
|
+
_dtype: u2
|
|
108
|
+
_dims: [number_of_images, column, row]
|
|
109
|
+
_chunks: True
|
|
110
|
+
_FillValue: 65535
|
|
111
|
+
long_name: Detector pixel values
|
|
112
|
+
comment: unbinned full-frame data
|
|
113
|
+
units: '1'
|
|
114
|
+
valid_min: 0
|
|
115
|
+
valid_max: 65534
|
|
94
116
|
# ---------- GROUP 01 ----------
|
|
95
117
|
/group_01/detector_images:
|
|
96
118
|
_dtype: u2
|
|
@@ -199,7 +221,7 @@ variables:
|
|
|
199
221
|
valid_max: 999.9
|
|
200
222
|
/group_03/ds_10:
|
|
201
223
|
_dtype: f4
|
|
202
|
-
_dims: [number_of_images]
|
|
224
|
+
_dims: [number_of_images, /group_03/viewport]
|
|
203
225
|
long_name: float dataset
|
|
204
226
|
units: '1'
|
|
205
227
|
valid_min: -999.9
|
|
@@ -25,6 +25,8 @@ groups:
|
|
|
25
25
|
- group_01
|
|
26
26
|
- group_02
|
|
27
27
|
- group_03
|
|
28
|
+
- processing_control
|
|
29
|
+
- /processing_control/input_parameters
|
|
28
30
|
|
|
29
31
|
# Define dimensions
|
|
30
32
|
# Note dimensions with an attribute 'long_name' will also be generated as variable
|
|
@@ -87,6 +89,16 @@ variables:
|
|
|
87
89
|
units: '1'
|
|
88
90
|
valid_min: 0
|
|
89
91
|
valid_max: 65534
|
|
92
|
+
/group_00/detector_images_chunked:
|
|
93
|
+
_dtype: u2
|
|
94
|
+
_dims: [number_of_images, column, row]
|
|
95
|
+
_FillValue: 65535
|
|
96
|
+
_chunks: [1, 640, 512]
|
|
97
|
+
long_name: Detector pixel values
|
|
98
|
+
comment: unbinned full-frame data
|
|
99
|
+
units: '1'
|
|
100
|
+
valid_min: 0
|
|
101
|
+
valid_max: 65534
|
|
90
102
|
# ---------- GROUP 01 ----------
|
|
91
103
|
/group_01/detector_images:
|
|
92
104
|
_dtype: u2
|
|
@@ -190,7 +202,7 @@ variables:
|
|
|
190
202
|
# valid_max: 999.9
|
|
191
203
|
/group_03/ds_10:
|
|
192
204
|
_dtype: f4
|
|
193
|
-
_dims: [number_of_images]
|
|
205
|
+
_dims: [number_of_images, /group_03/viewport]
|
|
194
206
|
long_name: float dataset
|
|
195
207
|
units: '1'
|
|
196
208
|
valid_min: -999.9
|
|
@@ -45,7 +45,7 @@ def conf_from_yaml(file_path: Path | str) -> dict:
|
|
|
45
45
|
|
|
46
46
|
"""
|
|
47
47
|
if isinstance(file_path, str):
|
|
48
|
-
file_path = Path(
|
|
48
|
+
file_path = Path(file_path)
|
|
49
49
|
|
|
50
50
|
if not file_path.is_file():
|
|
51
51
|
raise FileNotFoundError(f"{file_path} not found")
|
|
@@ -53,7 +53,7 @@ def conf_from_yaml(file_path: Path | str) -> dict:
|
|
|
53
53
|
with file_path.open("r", encoding="ascii") as fid:
|
|
54
54
|
try:
|
|
55
55
|
settings = yaml.safe_load(fid)
|
|
56
|
-
except yaml.
|
|
57
|
-
raise RuntimeError from exc
|
|
56
|
+
except yaml.parser.ParserError as exc:
|
|
57
|
+
raise RuntimeError(f"Failed to parse {file_path}") from exc
|
|
58
58
|
|
|
59
59
|
return settings
|
|
@@ -32,7 +32,6 @@ import numpy as np
|
|
|
32
32
|
|
|
33
33
|
from .conf_from_yaml import conf_from_yaml
|
|
34
34
|
from .lib.adjust_attr import adjust_attr
|
|
35
|
-
from .lib.chunksizes import guess_chunks
|
|
36
35
|
|
|
37
36
|
|
|
38
37
|
# - class definition -----------------------------------
|
|
@@ -41,12 +40,12 @@ class H5Yaml:
|
|
|
41
40
|
|
|
42
41
|
Parameters
|
|
43
42
|
----------
|
|
44
|
-
h5_yaml_fl : Path
|
|
45
|
-
YAML
|
|
43
|
+
h5_yaml_fl : Path | str
|
|
44
|
+
YAML file with the HDF5 format definition
|
|
46
45
|
|
|
47
46
|
"""
|
|
48
47
|
|
|
49
|
-
def __init__(self: H5Yaml, h5_yaml_fl: Path) -> None:
|
|
48
|
+
def __init__(self: H5Yaml, h5_yaml_fl: Path | str) -> None:
|
|
50
49
|
"""Construct a H5Yaml instance."""
|
|
51
50
|
self.logger = logging.getLogger("h5yaml.H5Yaml")
|
|
52
51
|
|
|
@@ -72,12 +71,14 @@ class H5Yaml:
|
|
|
72
71
|
)
|
|
73
72
|
|
|
74
73
|
if val["_size"] == 0:
|
|
75
|
-
ds_chunk = val.get("_chunks"
|
|
74
|
+
ds_chunk = val.get("_chunks")
|
|
75
|
+
if ds_chunk is not None and not isinstance(ds_chunk, bool):
|
|
76
|
+
ds_chunk = tuple(ds_chunk)
|
|
76
77
|
dset = fid.create_dataset(
|
|
77
78
|
key,
|
|
78
79
|
shape=(0,),
|
|
79
80
|
dtype="T" if val["_dtype"] == "str" else val["_dtype"],
|
|
80
|
-
chunks=ds_chunk
|
|
81
|
+
chunks=ds_chunk,
|
|
81
82
|
maxshape=(None,),
|
|
82
83
|
fillvalue=fillvalue,
|
|
83
84
|
)
|
|
@@ -153,10 +154,8 @@ class H5Yaml:
|
|
|
153
154
|
for key, val in self.h5_def["variables"].items():
|
|
154
155
|
if val["_dtype"] in fid:
|
|
155
156
|
ds_dtype = fid[val["_dtype"]]
|
|
156
|
-
dtype_size = fid[val["_dtype"]].dtype.itemsize
|
|
157
157
|
else:
|
|
158
158
|
ds_dtype = "T" if val["_dtype"] == "str" else val["_dtype"]
|
|
159
|
-
dtype_size = np.dtype(val["_dtype"]).itemsize
|
|
160
159
|
|
|
161
160
|
fillvalue = None
|
|
162
161
|
if "_FillValue" in val:
|
|
@@ -190,15 +189,13 @@ class H5Yaml:
|
|
|
190
189
|
if n_udim > 1:
|
|
191
190
|
raise ValueError(f"{key} has more than one unlimited dimension")
|
|
192
191
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
else guess_chunks(ds_shape, dtype_size)
|
|
198
|
-
)
|
|
192
|
+
if None in ds_maxshape and val.get("_chunks") == "contiguous":
|
|
193
|
+
raise KeyError(
|
|
194
|
+
"you can not create a contiguous dataset with unlimited dimensions."
|
|
195
|
+
)
|
|
199
196
|
|
|
200
197
|
# create the variable
|
|
201
|
-
if
|
|
198
|
+
if val.get("_chunks") == "contiguous":
|
|
202
199
|
dset = fid.create_dataset(
|
|
203
200
|
key,
|
|
204
201
|
ds_shape,
|
|
@@ -208,6 +205,9 @@ class H5Yaml:
|
|
|
208
205
|
fillvalue=fillvalue,
|
|
209
206
|
)
|
|
210
207
|
else:
|
|
208
|
+
ds_chunk = val.get("_chunks")
|
|
209
|
+
if ds_chunk is not None and not isinstance(ds_chunk, bool):
|
|
210
|
+
ds_chunk = tuple(ds_chunk)
|
|
211
211
|
compression = None
|
|
212
212
|
shuffle = False
|
|
213
213
|
# currently only gzip compression is supported
|
|
@@ -225,14 +225,12 @@ class H5Yaml:
|
|
|
225
225
|
fid[ds_name] = h5py.vlen_dtype(ds_dtype)
|
|
226
226
|
ds_dtype = fid[ds_name]
|
|
227
227
|
fillvalue = None
|
|
228
|
-
if ds_maxshape == (None,):
|
|
229
|
-
ds_chunk = (16,)
|
|
230
228
|
|
|
231
229
|
dset = fid.create_dataset(
|
|
232
230
|
key,
|
|
233
231
|
ds_shape,
|
|
234
232
|
dtype=ds_dtype,
|
|
235
|
-
chunks=ds_chunk
|
|
233
|
+
chunks=ds_chunk,
|
|
236
234
|
maxshape=ds_maxshape,
|
|
237
235
|
fillvalue=fillvalue,
|
|
238
236
|
compression=compression,
|
|
@@ -35,7 +35,6 @@ from netCDF4 import Dataset
|
|
|
35
35
|
|
|
36
36
|
from .conf_from_yaml import conf_from_yaml
|
|
37
37
|
from .lib.adjust_attr import adjust_attr
|
|
38
|
-
from .lib.chunksizes import guess_chunks
|
|
39
38
|
|
|
40
39
|
if TYPE_CHECKING:
|
|
41
40
|
from pathlib import Path
|
|
@@ -43,9 +42,16 @@ if TYPE_CHECKING:
|
|
|
43
42
|
|
|
44
43
|
# - class definition -----------------------------------
|
|
45
44
|
class NcYaml:
|
|
46
|
-
"""Class to create a HDF5/netCDF4 formated file from a YAML configuration file.
|
|
45
|
+
"""Class to create a HDF5/netCDF4 formated file from a YAML configuration file.
|
|
47
46
|
|
|
48
|
-
|
|
47
|
+
Parameters
|
|
48
|
+
----------
|
|
49
|
+
nc_yaml_fl : Path | str
|
|
50
|
+
YAML file with the netCDF4 format definition
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(self: NcYaml, nc_yaml_fl: Path | str) -> None:
|
|
49
55
|
"""Construct a NcYaml instance."""
|
|
50
56
|
self.logger = logging.getLogger("h5yaml.NcYaml")
|
|
51
57
|
|
|
@@ -172,10 +178,8 @@ class NcYaml:
|
|
|
172
178
|
|
|
173
179
|
if val["_dtype"] in fid.cmptypes:
|
|
174
180
|
ds_dtype = fid.cmptypes[val["_dtype"]].dtype
|
|
175
|
-
sz_dtype = ds_dtype.itemsize
|
|
176
181
|
else:
|
|
177
182
|
ds_dtype = val["_dtype"]
|
|
178
|
-
sz_dtype = np.dtype(val["_dtype"]).itemsize
|
|
179
183
|
|
|
180
184
|
fillvalue = None
|
|
181
185
|
if "_FillValue" in val:
|
|
@@ -226,16 +230,16 @@ class NcYaml:
|
|
|
226
230
|
if n_udim > 1:
|
|
227
231
|
raise ValueError("more than one unlimited dimension")
|
|
228
232
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
+
if None in ds_maxshape and val.get("_chunks") == "contiguous":
|
|
234
|
+
raise KeyError(
|
|
235
|
+
"you can not create a contiguous dataset with unlimited dimensions."
|
|
236
|
+
)
|
|
233
237
|
|
|
234
238
|
if val["_dtype"] in fid.cmptypes:
|
|
235
239
|
val["_dtype"] = fid.cmptypes[val["_dtype"]]
|
|
236
240
|
|
|
237
241
|
# create the variable
|
|
238
|
-
if
|
|
242
|
+
if val.get("_chunks") == "contiguous":
|
|
239
243
|
dset = var_grp.createVariable(
|
|
240
244
|
var_name,
|
|
241
245
|
val["_dtype"],
|
|
@@ -244,25 +248,24 @@ class NcYaml:
|
|
|
244
248
|
contiguous=True,
|
|
245
249
|
)
|
|
246
250
|
else:
|
|
251
|
+
ds_chunk = val.get("_chunks")
|
|
252
|
+
if ds_chunk is not None and not isinstance(ds_chunk, bool):
|
|
253
|
+
ds_chunk = tuple(ds_chunk)
|
|
247
254
|
if val.get("_vlen"):
|
|
248
255
|
if val["_dtype"] in fid.cmptypes:
|
|
249
256
|
raise ValueError("can not have vlen with compounds")
|
|
250
257
|
val["_dtype"] = fid.createVLType(ds_dtype, val["_dtype"])
|
|
251
258
|
fillvalue = None
|
|
252
|
-
if ds_maxshape == (None,):
|
|
253
|
-
ds_chunk = (16,)
|
|
254
259
|
|
|
255
260
|
dset = var_grp.createVariable(
|
|
256
261
|
var_name,
|
|
257
262
|
str if val["_dtype"] == "str" else val["_dtype"],
|
|
258
263
|
dimensions=var_dims,
|
|
259
264
|
fill_value=fillvalue,
|
|
260
|
-
contiguous=False,
|
|
261
265
|
compression=compression,
|
|
262
266
|
complevel=complevel,
|
|
263
|
-
chunksizes=
|
|
264
|
-
|
|
265
|
-
),
|
|
267
|
+
chunksizes=ds_chunk,
|
|
268
|
+
contiguous=False,
|
|
266
269
|
)
|
|
267
270
|
dset.setncatts(
|
|
268
271
|
{
|
|
@@ -22,7 +22,22 @@
|
|
|
22
22
|
|
|
23
23
|
from __future__ import annotations
|
|
24
24
|
|
|
25
|
+
from importlib.resources import files
|
|
26
|
+
|
|
27
|
+
import pytest
|
|
28
|
+
|
|
29
|
+
from h5yaml.conf_from_yaml import conf_from_yaml
|
|
30
|
+
|
|
25
31
|
|
|
26
32
|
def test_from_yaml() -> None:
|
|
27
33
|
"""..."""
|
|
28
|
-
|
|
34
|
+
with pytest.raises(FileNotFoundError, match="not found") as excinfo:
|
|
35
|
+
_ = conf_from_yaml(files("h5yaml.Data") / "not_existing.yaml")
|
|
36
|
+
assert f"{files('h5yaml.Data') / 'not_existing.yaml'} not found" in str(excinfo)
|
|
37
|
+
|
|
38
|
+
with pytest.raises(RuntimeError, match=r"Failed to parse .*") as excinfo:
|
|
39
|
+
_ = conf_from_yaml("README.md")
|
|
40
|
+
assert "Failed to parse" in str(excinfo)
|
|
41
|
+
|
|
42
|
+
res = conf_from_yaml(files("h5yaml.Data") / "nc_testing.yaml")
|
|
43
|
+
assert isinstance(res, dict)
|
|
@@ -38,21 +38,21 @@ class TestH5Yaml:
|
|
|
38
38
|
FID = H5Yaml(files("h5yaml.Data") / "h5_testing.yaml").diskless()
|
|
39
39
|
|
|
40
40
|
def test_exceptions(self: TestH5Yaml) -> None:
|
|
41
|
-
"""
|
|
41
|
+
"""Unit-test for class exeptions."""
|
|
42
42
|
# tests which should raise an exception because the file can not be created
|
|
43
43
|
l1a_name = "/this/folder/does/not/exists/test.h5"
|
|
44
44
|
with pytest.raises(FileNotFoundError, match=r"[Errno 2] .*") as excinfo:
|
|
45
|
-
|
|
45
|
+
H5Yaml(files("h5yaml.Data") / "h5_testing.yaml").create(l1a_name)
|
|
46
46
|
assert "'No such file or directory" in str(excinfo)
|
|
47
47
|
|
|
48
48
|
l1a_name = Path("/this/folder/does/not/exists/test.h5")
|
|
49
49
|
with pytest.raises(FileNotFoundError, match=r"[Errno 2] .*") as excinfo:
|
|
50
|
-
|
|
50
|
+
H5Yaml(files("h5yaml.Data") / "h5_testing.yaml").create(l1a_name)
|
|
51
51
|
assert "'No such file or directory" in str(excinfo)
|
|
52
52
|
|
|
53
53
|
l1a_name = "/test.h5"
|
|
54
54
|
with pytest.raises(RuntimeError, match=r"failed create .*") as excinfo:
|
|
55
|
-
|
|
55
|
+
H5Yaml(files("h5yaml.Data") / "h5_testing.yaml").create(l1a_name)
|
|
56
56
|
assert f"failed create {l1a_name}" in str(excinfo.value)
|
|
57
57
|
|
|
58
58
|
# tests which raise an exception because the YAML file is corrupted
|
|
@@ -62,11 +62,11 @@ class TestH5Yaml:
|
|
|
62
62
|
assert f"{file_path} not found" in str(excinfo.value)
|
|
63
63
|
|
|
64
64
|
with pytest.raises(ValueError, match=r".* unlimited dimension") as excinfo:
|
|
65
|
-
|
|
65
|
+
H5Yaml(files("h5yaml.Data") / "h5_unsupported.yaml").create("klad.h5")
|
|
66
66
|
assert "has more than one unlimited dimension" in str(excinfo.value)
|
|
67
67
|
|
|
68
68
|
def test_groups(self: TestH5Yaml) -> None:
|
|
69
|
-
"""
|
|
69
|
+
"""Unit-test to check the groups."""
|
|
70
70
|
if "groups" not in self.H5_DEF:
|
|
71
71
|
return
|
|
72
72
|
|
|
@@ -74,7 +74,7 @@ class TestH5Yaml:
|
|
|
74
74
|
assert key in self.FID
|
|
75
75
|
|
|
76
76
|
def test_dimensions(self: TestH5Yaml) -> None:
|
|
77
|
-
"""
|
|
77
|
+
"""Unit-test to check the dimensions."""
|
|
78
78
|
if "dimensions" not in self.H5_DEF:
|
|
79
79
|
return
|
|
80
80
|
|
|
@@ -101,7 +101,7 @@ class TestH5Yaml:
|
|
|
101
101
|
assert self.FID[key].attrs[attr] == self.H5_DEF["dimensions"][key][attr]
|
|
102
102
|
|
|
103
103
|
def test_compounds(self: TestH5Yaml) -> None:
|
|
104
|
-
"""
|
|
104
|
+
"""Unit-test to check the compounds."""
|
|
105
105
|
if "compounds" not in self.H5_DEF:
|
|
106
106
|
return
|
|
107
107
|
|
|
@@ -109,7 +109,7 @@ class TestH5Yaml:
|
|
|
109
109
|
assert key in self.FID
|
|
110
110
|
|
|
111
111
|
def test_variables(self: TestH5Yaml) -> None:
|
|
112
|
-
"""
|
|
112
|
+
"""Unit-test to check the variables."""
|
|
113
113
|
if "variables" not in self.H5_DEF:
|
|
114
114
|
return
|
|
115
115
|
|
|
@@ -130,5 +130,5 @@ class TestH5Yaml:
|
|
|
130
130
|
assert self.FID[key].attrs[attr] == dset[key][attr]
|
|
131
131
|
|
|
132
132
|
def test_close(self: TestH5Yaml) -> None:
|
|
133
|
-
"""
|
|
133
|
+
"""Close the in-memory HDF5 file."""
|
|
134
134
|
self.FID.close()
|
|
@@ -38,21 +38,21 @@ class TestNcYaml:
|
|
|
38
38
|
FID = NcYaml(files("h5yaml.Data") / "nc_testing.yaml").diskless()
|
|
39
39
|
|
|
40
40
|
def test_exceptions(self: TestNcYaml) -> None:
|
|
41
|
-
"""
|
|
41
|
+
"""Unit-test for the class exdeptions."""
|
|
42
42
|
# tests which should raise an exception because the file can not be created
|
|
43
43
|
l1a_name = "/this/folder/does/not/exists/test.nc"
|
|
44
44
|
with pytest.raises(RuntimeError, match=r"failed to create .*") as excinfo:
|
|
45
|
-
|
|
45
|
+
NcYaml(files("h5yaml.Data") / "nc_testing.yaml").create(l1a_name)
|
|
46
46
|
assert f"failed to create {l1a_name}" in str(excinfo)
|
|
47
47
|
|
|
48
48
|
l1a_name = Path("/this/folder/does/not/exists/test.nc")
|
|
49
49
|
with pytest.raises(RuntimeError, match=r"failed to create .*") as excinfo:
|
|
50
|
-
|
|
50
|
+
NcYaml(files("h5yaml.Data") / "nc_testing.yaml").create(l1a_name)
|
|
51
51
|
assert f"failed to create {l1a_name}" in str(excinfo)
|
|
52
52
|
|
|
53
53
|
l1a_name = "/test.nc"
|
|
54
54
|
with pytest.raises(RuntimeError, match=r"failed to create .*") as excinfo:
|
|
55
|
-
|
|
55
|
+
NcYaml(files("h5yaml.Data") / "nc_testing.yaml").create(l1a_name)
|
|
56
56
|
assert f"failed to create {l1a_name}" in str(excinfo.value)
|
|
57
57
|
|
|
58
58
|
# tests which raise an exception because the YAML file is corrupted
|
|
@@ -62,15 +62,19 @@ class TestNcYaml:
|
|
|
62
62
|
assert f"{file_path} not found" in str(excinfo.value)
|
|
63
63
|
|
|
64
64
|
def test_groups(self: TestNcYaml) -> None:
|
|
65
|
-
"""
|
|
65
|
+
"""Unit-test to check the groups."""
|
|
66
66
|
if "groups" not in self.NC_DEF:
|
|
67
67
|
return
|
|
68
68
|
|
|
69
69
|
for key in self.NC_DEF["groups"]:
|
|
70
|
-
|
|
70
|
+
pkey = PurePosixPath(key)
|
|
71
|
+
if pkey.is_absolute():
|
|
72
|
+
assert pkey.name in self.FID[pkey.parent].groups
|
|
73
|
+
else:
|
|
74
|
+
assert key in self.FID.groups
|
|
71
75
|
|
|
72
76
|
def test_dimensions(self: TestNcYaml) -> None:
|
|
73
|
-
"""
|
|
77
|
+
"""Unit-test to check the dimensions."""
|
|
74
78
|
if "dimensions" not in self.NC_DEF:
|
|
75
79
|
return
|
|
76
80
|
|
|
@@ -102,7 +106,7 @@ class TestNcYaml:
|
|
|
102
106
|
assert getattr(nc_dim, attr) == self.NC_DEF["dimensions"][key][attr]
|
|
103
107
|
|
|
104
108
|
def test_compounds(self: TestNcYaml) -> None:
|
|
105
|
-
"""
|
|
109
|
+
"""Unit-test to check the compounds."""
|
|
106
110
|
if "compounds" not in self.NC_DEF:
|
|
107
111
|
return
|
|
108
112
|
|
|
@@ -110,7 +114,7 @@ class TestNcYaml:
|
|
|
110
114
|
assert key in self.FID.cmptypes
|
|
111
115
|
|
|
112
116
|
def test_variables(self: TestNcYaml) -> None:
|
|
113
|
-
"""
|
|
117
|
+
"""Unit-test to check the variables."""
|
|
114
118
|
if "variables" not in self.NC_DEF:
|
|
115
119
|
return
|
|
116
120
|
|
|
@@ -137,6 +141,6 @@ class TestNcYaml:
|
|
|
137
141
|
else:
|
|
138
142
|
assert getattr(nc_var, attr) == self.NC_DEF["variables"][key][attr]
|
|
139
143
|
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
144
|
+
def test_close(self: TestNcYaml) -> None:
|
|
145
|
+
"""Close the in-memory netCDF4 file."""
|
|
146
|
+
self.FID.close()
|
h5yaml-0.2.0/klad.h5
DELETED
|
Binary file
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# This file is part of Python package: `h5yaml`
|
|
3
|
-
#
|
|
4
|
-
# https://github.com/rmvanhees/pyxarr.git
|
|
5
|
-
#
|
|
6
|
-
# Copyright (c) 2025 - R.M. van Hees (SRON)
|
|
7
|
-
# All rights reserved.
|
|
8
|
-
#
|
|
9
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
10
|
-
# you may not use this file except in compliance with the License.
|
|
11
|
-
# You may obtain a copy of the License at
|
|
12
|
-
#
|
|
13
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
14
|
-
#
|
|
15
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
16
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
17
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
18
|
-
# See the License for the specific language governing permissions and
|
|
19
|
-
# limitations under the License.
|
|
20
|
-
#
|
|
21
|
-
"""Obtain chunksizes for HDF5 datasets."""
|
|
22
|
-
|
|
23
|
-
from __future__ import annotations
|
|
24
|
-
|
|
25
|
-
__all__ = ["guess_chunks"]
|
|
26
|
-
|
|
27
|
-
from typing import TYPE_CHECKING
|
|
28
|
-
|
|
29
|
-
if TYPE_CHECKING:
|
|
30
|
-
from numpy.typing import ArrayLike
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def guess_chunks(dims: ArrayLike[int], dtype_sz: int) -> str | tuple[int]:
|
|
34
|
-
"""Perform an educated guess for the dataset chunk sizes.
|
|
35
|
-
|
|
36
|
-
Parameters
|
|
37
|
-
----------
|
|
38
|
-
dims : ArrayLike[int]
|
|
39
|
-
Dimensions of the variable
|
|
40
|
-
dtype_sz : int
|
|
41
|
-
The element size of the data-type of the variable
|
|
42
|
-
|
|
43
|
-
Returns
|
|
44
|
-
-------
|
|
45
|
-
"contiguous" or tuple with chunk-sizes
|
|
46
|
-
|
|
47
|
-
"""
|
|
48
|
-
fixed_size = dtype_sz
|
|
49
|
-
if len(dims) > 1:
|
|
50
|
-
for val in [x for x in dims[1:] if x > 0]:
|
|
51
|
-
fixed_size *= val
|
|
52
|
-
|
|
53
|
-
# first variables without an unlimited dimension
|
|
54
|
-
if 0 not in dims:
|
|
55
|
-
if fixed_size < 400000:
|
|
56
|
-
return "contiguous"
|
|
57
|
-
|
|
58
|
-
res = list(dims)
|
|
59
|
-
res[0] = max(1, 2048000 // fixed_size)
|
|
60
|
-
return tuple(res)
|
|
61
|
-
|
|
62
|
-
# then variables with an unlimited dimension
|
|
63
|
-
if len(dims) == 1:
|
|
64
|
-
return (1024,)
|
|
65
|
-
|
|
66
|
-
udim = dims.index(0)
|
|
67
|
-
res = list(dims)
|
|
68
|
-
if fixed_size < 400000:
|
|
69
|
-
res[udim] = 1024
|
|
70
|
-
else:
|
|
71
|
-
res[udim] = max(1, 2048000 // fixed_size)
|
|
72
|
-
|
|
73
|
-
return tuple(res)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|