h5yaml 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,18 +19,19 @@
19
19
  # See the License for the specific language governing permissions and
20
20
  # limitations under the License.
21
21
  #
22
- stats_dtype:
23
- time:
24
- - u8
25
- - seconds since 1970-01-01T00:00:00
26
- - timestamp
27
- index: [u2, '1', index]
28
- tbl_id: [u1, '1', binning id]
29
- saa: [u1, '1', saa-flag]
30
- coad: [u1, '1', co-addings]
31
- texp: [f4, ms, exposure time]
32
- lat: [f4, degree, latitude]
33
- lon: [f4, degree, longitude]
34
- avg: [f4, '1', '$S - S_{ref}$']
35
- unc: [f4, '1', '\u03c3($S - S_{ref}$)']
36
- dark_offs: [f4, '1', dark-offset]
22
+ compounds:
23
+ stats_dtype:
24
+ time:
25
+ - u8
26
+ - seconds since 1970-01-01T00:00:00
27
+ - timestamp
28
+ index: [u2, '1', index]
29
+ tbl_id: [u1, '1', binning id]
30
+ saa: [u1, '1', saa-flag]
31
+ coad: [u1, '1', co-addings]
32
+ texp: [f4, ms, exposure time]
33
+ lat: [f4, degree, latitude]
34
+ lon: [f4, degree, longitude]
35
+ avg: [f4, '1', '$S - S_{ref}$']
36
+ unc: [f4, '1', '\u03c3($S - S_{ref}$)']
37
+ dark_offs: [f4, '1', dark-offset]
@@ -26,7 +26,7 @@ groups:
26
26
  - group_02
27
27
  - group_03
28
28
  - processing_control
29
- - /processing_control/input_parameters
29
+ - processing_control/input_parameters
30
30
 
31
31
  # Define dimensions
32
32
  # Note dimensions with an attribute 'long_name' will also be generated as variable
@@ -39,12 +39,6 @@ dimensions:
39
39
  valid_min: 0
40
40
  valid_max: 92400
41
41
 
42
- # Define compound types
43
- # - compound elements must have a data-type, and can have a unit and long_name
44
- compounds:
45
- - h5_compound.yaml
46
- - h5_nocompound.yaml
47
-
48
42
  # Define variables
49
43
  variables:
50
44
  ds_01:
@@ -26,7 +26,7 @@ groups:
26
26
  - group_02
27
27
  - group_03
28
28
  - processing_control
29
- - /processing_control/input_parameters
29
+ - processing_control/input_parameters
30
30
 
31
31
  # Define dimensions
32
32
  # Note dimensions with an attribute 'long_name' will also be generated as variable
h5yaml/yaml_h5.py CHANGED
@@ -40,30 +40,74 @@ class H5Yaml:
40
40
 
41
41
  Parameters
42
42
  ----------
43
- h5_yaml_fl : Path | str
44
- YAML file with the HDF5 format definition
43
+ h5_yaml_fl : Path | str | list[Path | str]
44
+ YAML files with the HDF5 format definition
45
45
 
46
46
  """
47
47
 
48
- def __init__(self: H5Yaml, h5_yaml_fl: Path | str) -> None:
48
+ def __init__(self: H5Yaml, h5_yaml_fl: Path | str | list[Path | str]) -> None:
49
49
  """Construct a H5Yaml instance."""
50
50
  self.logger = logging.getLogger("h5yaml.H5Yaml")
51
+ self._h5_def = {
52
+ "groups": set(),
53
+ "attrs_global": {},
54
+ "attrs_groups": {},
55
+ "compounds": {},
56
+ "dimensions": {},
57
+ "variables": {},
58
+ }
59
+
60
+ for yaml_fl in h5_yaml_fl if isinstance(h5_yaml_fl, list) else [h5_yaml_fl]:
61
+ try:
62
+ config = conf_from_yaml(yaml_fl)
63
+ except RuntimeError as exc:
64
+ raise RuntimeError from exc
65
+
66
+ for key in self._h5_def:
67
+ if key in config:
68
+ self._h5_def[key] |= (
69
+ set(config[key]) if key == "groups" else config[key]
70
+ )
71
+
72
+ def __attrs(self: H5Yaml, fid: h5py.File) -> None:
73
+ """Create global and group attributes.
51
74
 
52
- try:
53
- self._h5_def = conf_from_yaml(h5_yaml_fl)
54
- except RuntimeError as exc:
55
- raise RuntimeError from exc
75
+ Parameters
76
+ ----------
77
+ fid : h5py.File
78
+ HDF5 file pointer (mode 'r+')
79
+
80
+ """
81
+ for key, value in self._h5_def["attrs_global"].items():
82
+ if key not in fid.attrs and value != "TBW":
83
+ fid.attrs[key] = value
56
84
 
57
- self.yaml_dir = h5_yaml_fl.parent
85
+ for key, value in self._h5_def["attrs_groups"].items():
86
+ if key not in fid.attrs and value != "TBW":
87
+ fid[str(Path(key).parent)].attrs[Path(key).name] = value
58
88
 
59
89
  def __groups(self: H5Yaml, fid: h5py.File) -> None:
60
- """Create groups in HDF5 product."""
61
- for key in self.h5_def["groups"]:
62
- _ = fid.create_group(key)
90
+ """Create groups in HDF5 product.
91
+
92
+ Parameters
93
+ ----------
94
+ fid : h5py.File
95
+ HDF5 file pointer (mode 'r+')
96
+
97
+ """
98
+ for key in self._h5_def["groups"]:
99
+ _ = fid.require_group(key)
63
100
 
64
101
  def __dimensions(self: H5Yaml, fid: h5py.File) -> None:
65
- """Add dimensions to HDF5 product."""
66
- for key, val in self.h5_def["dimensions"].items():
102
+ """Add dimensions to HDF5 product.
103
+
104
+ Parameters
105
+ ----------
106
+ fid : h5py.File
107
+ HDF5 file pointer (mode 'r+')
108
+
109
+ """
110
+ for key, val in self._h5_def["dimensions"].items():
67
111
  fillvalue = None
68
112
  if "_FillValue" in val:
69
113
  fillvalue = (
@@ -102,56 +146,28 @@ class H5Yaml:
102
146
  if not attr.startswith("_"):
103
147
  dset.attrs[attr] = adjust_attr(val["_dtype"], attr, attr_val)
104
148
 
105
- def __compounds(self: H5Yaml, fid: h5py.File) -> dict[str, str | int | float]:
106
- """Add compound datatypes to HDF5 product."""
107
- if "compounds" not in self.h5_def:
108
- return {}
109
-
110
- compounds = {}
111
- if isinstance(self.h5_def["compounds"], list):
112
- file_list = self.h5_def["compounds"].copy()
113
- self.h5_def["compounds"] = {}
114
- for name in file_list:
115
- if not (yaml_fl := self.yaml_dir / name).is_file():
116
- continue
117
- try:
118
- res = conf_from_yaml(yaml_fl)
119
- except RuntimeError as exc:
120
- raise RuntimeError from exc
121
- for key, value in res.items():
122
- self.h5_def["compounds"][key] = value
123
-
124
- for key, val in self.h5_def["compounds"].items():
125
- compounds[key] = {
126
- "dtype": [],
127
- "units": [],
128
- "names": [],
129
- }
130
-
131
- for _key, _val in val.items():
132
- compounds[key]["dtype"].append((_key, _val[0]))
133
- if len(_val) == 3:
134
- compounds[key]["units"].append(_val[1])
135
- compounds[key]["names"].append(_val[2] if len(_val) == 3 else _val[1])
136
-
137
- fid[key] = np.dtype(compounds[key]["dtype"])
138
-
139
- return compounds
140
-
141
- def __variables(
142
- self: H5Yaml, fid: h5py.File, compounds: dict[str, str | int | float] | None
143
- ) -> None:
149
+ def __compounds(self: H5Yaml, fid: h5py.File) -> None:
150
+ """Add compound datatypes to HDF5 product.
151
+
152
+ Parameters
153
+ ----------
154
+ fid : h5py.File
155
+ HDF5 file pointer (mode 'r+')
156
+
157
+ """
158
+ for key, val in self._h5_def["compounds"].items():
159
+ fid[key] = np.dtype([(k, v[0]) for k, v in val.items()])
160
+
161
+ def __variables(self: H5Yaml, fid: h5py.File) -> None:
144
162
  """Add datasets to HDF5 product.
145
163
 
146
164
  Parameters
147
165
  ----------
148
166
  fid : h5py.File
149
167
  HDF5 file pointer (mode 'r+')
150
- compounds : dict[str, str | int | float]
151
- Definition of the compound(s) in the product
152
168
 
153
169
  """
154
- for key, val in self.h5_def["variables"].items():
170
+ for key, val in self._h5_def["variables"].items():
155
171
  if val["_dtype"] in fid:
156
172
  ds_dtype = fid[val["_dtype"]]
157
173
  else:
@@ -244,11 +260,14 @@ class H5Yaml:
244
260
  if not attr.startswith("_"):
245
261
  dset.attrs[attr] = adjust_attr(val["_dtype"], attr, attr_val)
246
262
 
247
- if compounds is not None and val["_dtype"] in compounds:
248
- if compounds[val["_dtype"]]["units"]:
249
- dset.attrs["units"] = compounds[val["_dtype"]]["units"]
250
- if compounds[val["_dtype"]]["names"]:
251
- dset.attrs["long_name"] = compounds[val["_dtype"]]["names"]
263
+ if val["_dtype"] in self._h5_def["compounds"]:
264
+ compound = self._h5_def["compounds"][val["_dtype"]]
265
+ res = [v[2] for k, v in compound.items() if len(v) == 3]
266
+ if res:
267
+ dset.attrs["units"] = [v[1] for k, v in compound.items()]
268
+ dset.attrs["long_name"] = res
269
+ else:
270
+ dset.attrs["long_name"] = [v[1] for k, v in compound.items()]
252
271
 
253
272
  @property
254
273
  def h5_def(self: H5Yaml) -> dict:
@@ -258,10 +277,11 @@ class H5Yaml:
258
277
  def diskless(self: H5Yaml) -> h5py.File:
259
278
  """Create a HDF5/netCDF4 file in memory."""
260
279
  fid = h5py.File.in_memory()
261
- if "groups" in self.h5_def:
262
- self.__groups(fid)
280
+ self.__groups(fid)
263
281
  self.__dimensions(fid)
264
- self.__variables(fid, self.__compounds(fid))
282
+ self.__compounds(fid)
283
+ self.__variables(fid)
284
+ self.__attrs(fid)
265
285
  return fid
266
286
 
267
287
  def create(self: H5Yaml, l1a_name: Path | str) -> None:
@@ -275,9 +295,10 @@ class H5Yaml:
275
295
  """
276
296
  try:
277
297
  with h5py.File(l1a_name, "w") as fid:
278
- if "groups" in self.h5_def:
279
- self.__groups(fid)
298
+ self.__groups(fid)
280
299
  self.__dimensions(fid)
281
- self.__variables(fid, self.__compounds(fid))
300
+ self.__compounds(fid)
301
+ self.__variables(fid)
302
+ self.__attrs(fid)
282
303
  except PermissionError as exc:
283
304
  raise RuntimeError(f"failed create {l1a_name}") from exc
h5yaml/yaml_nc.py CHANGED
@@ -18,15 +18,14 @@
18
18
  # See the License for the specific language governing permissions and
19
19
  # limitations under the License.
20
20
  #
21
- """Create HDF5/netCDF4 formatted file from a YAML configuration file using netCDF4."""
21
+ """Create netCDF4 formatted file from a YAML configuration file using netCDF4."""
22
22
 
23
23
  from __future__ import annotations
24
24
 
25
25
  __all__ = ["NcYaml"]
26
26
 
27
27
  import logging
28
- from pathlib import PurePosixPath
29
- from typing import TYPE_CHECKING
28
+ from pathlib import Path, PurePosixPath
30
29
 
31
30
  import numpy as np
32
31
 
@@ -36,43 +35,90 @@ from netCDF4 import Dataset
36
35
  from .conf_from_yaml import conf_from_yaml
37
36
  from .lib.adjust_attr import adjust_attr
38
37
 
39
- if TYPE_CHECKING:
40
- from pathlib import Path
41
-
42
38
 
43
39
  # - class definition -----------------------------------
44
40
  class NcYaml:
45
- """Class to create a HDF5/netCDF4 formated file from a YAML configuration file.
41
+ """Class to create a netCDF4 formated file from a YAML configuration file.
46
42
 
47
43
  Parameters
48
44
  ----------
49
- nc_yaml_fl : Path | str
50
- YAML file with the netCDF4 format definition
45
+ nc_yaml_fl : Path | str | list[Path | str]
46
+ YAML files with the netCDF4 format definition
51
47
 
52
48
  """
53
49
 
54
- def __init__(self: NcYaml, nc_yaml_fl: Path | str) -> None:
50
+ def __init__(self: NcYaml, nc_yaml_fl: Path | str | list[Path | str]) -> None:
55
51
  """Construct a NcYaml instance."""
56
52
  self.logger = logging.getLogger("h5yaml.NcYaml")
53
+ self._nc_def = {
54
+ "groups": set(),
55
+ "attrs_global": {},
56
+ "attrs_groups": {},
57
+ "compounds": {},
58
+ "dimensions": {},
59
+ "variables": {},
60
+ }
61
+
62
+ for yaml_fl in nc_yaml_fl if isinstance(nc_yaml_fl, list) else [nc_yaml_fl]:
63
+ try:
64
+ config = conf_from_yaml(yaml_fl)
65
+ except RuntimeError as exc:
66
+ raise RuntimeError from exc
67
+
68
+ for key in self._nc_def:
69
+ if key in config:
70
+ self._nc_def[key] |= (
71
+ set(config[key]) if key == "groups" else config[key]
72
+ )
73
+
74
+ def __attrs(self: NcYaml, fid: Dataset) -> None:
75
+ """Create global and group attributes.
57
76
 
58
- try:
59
- self._nc_def = conf_from_yaml(nc_yaml_fl)
60
- except RuntimeError as exc:
61
- raise RuntimeError from exc
77
+ Parameters
78
+ ----------
79
+ fid : netCDF4.Dataset
80
+ netCDF4 Dataset (mode 'r+')
81
+
82
+ """
83
+ for key, val in self.nc_def["attrs_global"].items():
84
+ if val == "TBW":
85
+ continue
86
+
87
+ if isinstance(val, str):
88
+ fid.setncattr_string(key, val)
89
+ else:
90
+ fid.setncattr(key, val)
62
91
 
63
- self.yaml_dir = nc_yaml_fl.parent
92
+ for key, val in self.nc_def["attrs_groups"].items():
93
+ if val == "TBW":
94
+ continue
95
+
96
+ if isinstance(val, str):
97
+ fid[str(Path(key).parent)].setncattr_string(Path(key).name, val)
98
+ else:
99
+ fid[str(Path(key).parent)].setncattr(Path(key).name, val)
64
100
 
65
101
  def __groups(self: NcYaml, fid: Dataset) -> None:
66
- """Create groups in HDF5 product."""
102
+ """Create groups in a netCDF4 product.
103
+
104
+ Parameters
105
+ ----------
106
+ fid : netCDF4.Dataset
107
+ netCDF4 Dataset (mode 'r+')
108
+
109
+ """
67
110
  for key in self.nc_def["groups"]:
68
- pkey = PurePosixPath(key)
69
- if pkey.is_absolute():
70
- _ = fid[pkey.parent].createGroup(pkey.name)
71
- else:
72
- _ = fid.createGroup(key)
111
+ _ = fid.createGroup(key)
73
112
 
74
113
  def __dimensions(self: NcYaml, fid: Dataset) -> None:
75
- """Add dimensions to HDF5 product."""
114
+ """Add dimensions to a netCDF4 product.
115
+
116
+ Parameters
117
+ ----------
118
+ fid : netCDF4.Dataset
119
+ netCDF4 Dataset (mode 'r+')
120
+
121
+ """
76
122
  for key, value in self.nc_def["dimensions"].items():
77
123
  pkey = PurePosixPath(key)
78
124
  if pkey.is_absolute():
@@ -119,56 +165,26 @@ class NcYaml:
119
165
  }
120
166
  )
121
167
 
122
- def __compounds(self: NcYaml, fid: Dataset) -> dict[str, str | int | float]:
123
- """Add compound datatypes to HDF5 product."""
124
- if "compounds" not in self.nc_def:
125
- return {}
126
-
127
- compounds = {}
128
- if isinstance(self.nc_def["compounds"], list):
129
- file_list = self.nc_def["compounds"].copy()
130
- self.nc_def["compounds"] = {}
131
- for name in file_list:
132
- if not (yaml_fl := self.yaml_dir / name).is_file():
133
- continue
134
- try:
135
- res = conf_from_yaml(yaml_fl)
136
- except RuntimeError as exc:
137
- raise RuntimeError from exc
138
- for key, value in res.items():
139
- self.nc_def["compounds"][key] = value
140
-
141
- for key, value in self.nc_def["compounds"].items():
142
- compounds[key] = {
143
- "dtype": [],
144
- "units": [],
145
- "names": [],
146
- }
147
-
148
- for _key, _val in value.items():
149
- compounds[key]["dtype"].append((_key, _val[0]))
150
- if len(_val) == 3:
151
- compounds[key]["units"].append(_val[1])
152
- compounds[key]["names"].append(_val[2] if len(_val) == 3 else _val[1])
153
-
154
- comp_t = np.dtype(compounds[key]["dtype"])
155
- _ = fid.createCompoundType(comp_t, key)
168
+ def __compounds(self: NcYaml, fid: Dataset) -> None:
169
+ """Add compound datatypes to a netCDF4 product.
156
170
 
157
- return compounds
171
+ Parameters
172
+ ----------
173
+ fid : netCDF4.Dataset
174
+ netCDF4 Dataset (mode 'r+')
158
175
 
159
- def __variables(
160
- self: NcYaml,
161
- fid: Dataset,
162
- compounds: dict[str, str | int | float] | None,
163
- ) -> None:
164
- """Add datasets to HDF5 product.
176
+ """
177
+ for key, val in self.nc_def["compounds"].items():
178
+ comp_t = np.dtype([(k, v[0]) for k, v in val.items()])
179
+ _ = fid.createCompoundType(comp_t, key)
180
+
181
+ def __variables(self: NcYaml, fid: Dataset) -> None:
182
+ """Add datasets to a netCDF4 product.
165
183
 
166
184
  Parameters
167
185
  ----------
168
186
  fid : netCDF4.Dataset
169
- HDF5 file pointer (mode 'r+')
170
- compounds : dict[str, str | int | float]
171
- Definition of the compound(s) in the product
187
+ netCDF4 Dataset (mode 'r+')
172
188
 
173
189
  """
174
190
  for key, val in self.nc_def["variables"].items():
@@ -275,38 +291,45 @@ class NcYaml:
275
291
  }
276
292
  )
277
293
 
278
- if compounds is not None and val["_dtype"] in compounds:
279
- if compounds[val["_dtype"]]["units"]:
280
- dset.attrs["units"] = compounds[val["_dtype"]]["units"]
281
- if compounds[val["_dtype"]]["names"]:
282
- dset.attrs["long_name"] = compounds[val["_dtype"]]["names"]
294
+ if val["_dtype"] in self._nc_def["compounds"]:
295
+ compound = self._nc_def["compounds"][val["_dtype"]]
296
+ res = [v[2] for k, v in compound.items() if len(v) == 3]
297
+ if res:
298
+ dset.attrs["units"] = [v[1] for k, v in compound.items()]
299
+ dset.attrs["long_name"] = res
300
+ else:
301
+ dset.attrs["long_name"] = [v[1] for k, v in compound.items()]
283
302
 
284
303
  @property
285
304
  def nc_def(self: NcYaml) -> dict:
286
- """Return definition of the HDF5/netCDF4 product."""
305
+ """Return definition of the netCDF4 product."""
287
306
  return self._nc_def
288
307
 
289
308
  def diskless(self: NcYaml) -> Dataset:
290
- """Create a HDF5/netCDF4 file in memory."""
309
+ """Create a netCDF4 file in memory."""
291
310
  fid = Dataset("diskless_test.nc", "w", diskless=True, persistent=False)
292
311
  self.__groups(fid)
293
312
  self.__dimensions(fid)
294
- self.__variables(fid, self.__compounds(fid))
313
+ self.__compounds(fid)
314
+ self.__variables(fid)
315
+ self.__attrs(fid)
295
316
  return fid
296
317
 
297
318
  def create(self: NcYaml, l1a_name: Path | str) -> None:
298
- """Create a HDF5/netCDF4 file (overwrite if exist).
319
+ """Create a netCDF4 file (overwrite if exist).
299
320
 
300
321
  Parameters
301
322
  ----------
302
323
  l1a_name : Path | str
303
- Full name of the HDF5/netCDF4 file to be generated
324
+ Full name of the netCDF4 file to be generated
304
325
 
305
326
  """
306
327
  try:
307
328
  with Dataset(l1a_name, "w") as fid:
308
329
  self.__groups(fid)
309
330
  self.__dimensions(fid)
310
- self.__variables(fid, self.__compounds(fid))
331
+ self.__compounds(fid)
332
+ self.__variables(fid)
333
+ self.__attrs(fid)
311
334
  except PermissionError as exc:
312
335
  raise RuntimeError(f"failed to create {l1a_name}") from exc
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: h5yaml
3
- Version: 0.2.1
3
+ Version: 0.3.0
4
4
  Summary: Use YAML configuration file to generate HDF5/netCDF4 formated files.
5
5
  Project-URL: Homepage, https://github.com/rmvanhees/h5_yaml
6
6
  Project-URL: Source, https://github.com/rmvanhees/h5_yaml
@@ -79,17 +79,21 @@ And reinstall `h5py` and `netCDF4` using the commands:
79
79
 
80
80
  The YAML file should be structured as follows:
81
81
 
82
- * The top level are: 'groups', 'dimensions', 'compounds' and 'variables'
82
+ * The top level are: 'groups', 'dimensions', 'compounds', 'variables', 'attrs\_global' and 'attrs\_groups'.
83
+ * > 'attrs\_global' and 'attrs\_groups' are added in version 0.3.0
84
+ * The names of the attributes, groups, dimensions, compounds and viariable should be specified as PosixPaths, however:
85
+ * The names of groups should never start with a slash (always erlative to root);
86
+ * All other elements which are stored in root should also not start with a slash;
87
+ * But these elements require a starting slash (absolute paths) when they are stored not the root.
83
88
  * The section 'groups' are optional, but you should provide each group you want to use
84
89
  in your file. The 'groups' section in the YAML file may look like this:
85
-
86
90
  ```
87
91
  groups:
88
92
  - engineering_data
89
93
  - image_attributes
90
94
  - navigation_data
91
- - processing_control
92
95
  - science_data
96
+ - processing_control/input_data
93
97
  ```
94
98
 
95
99
  * The section 'dimensions' is obligatory, you should define the dimensions for each
@@ -145,14 +149,6 @@ The YAML file should be structured as follows:
145
149
  dark_offs: [f4, '1', dark-offset]
146
150
  ```
147
151
 
148
- Alternatively, provide a list with names of YAML files which contain the definitions
149
- of the compounds.
150
-
151
- ```
152
- compounds:
153
- - h5_nomhk_tm.yaml
154
- - h5_science_hk.yaml
155
- ```
156
152
  * The 'variables' are defined by their data-type ('_dtype') and dimensions ('_dims'),
157
153
  and optionally chunk sizes ('_chunks'), compression ('_compression'), variable length
158
154
  ('_vlen'). In addition, each variable can have as many attributes as you like,
@@ -160,6 +156,16 @@ The YAML file should be structured as follows:
160
156
 
161
157
  ```
162
158
  variables:
159
+ /science_data/detector_images:
160
+ _dtype: u2
161
+ _dims: [number_of_images, samples_per_image]
162
+ _compression: 3
163
+ _FillValue: 65535
164
+ long_name: Detector pixel values
165
+ coverage_content_type: image
166
+ units: '1'
167
+ valid_min: 0
168
+ valid_max: 65534
163
169
  /image_attributes/nr_coadditions:
164
170
  _dtype: u2
165
171
  _dims: [number_of_images]
@@ -175,16 +181,14 @@ The YAML file should be structured as follows:
175
181
  units: seconds
176
182
  stats_163:
177
183
  _dtype: stats_dtype
178
- _vlen: True
179
184
  _dims: [days]
185
+ _vlen: True
180
186
  comment: detector map statistics (MPS=163)
181
187
  ```
182
188
 
183
- ### Notes and ToDo:
189
+ ### Notes and ToDo
184
190
 
185
- * The usage of older versions of h5py may result in broken netCDF4 files
186
- * Explain usage of parameter '_chunks', which is currently not correctly implemented.
187
- * Explain that the usage of variable length data-sets may break netCDF4 compatibility
191
+ * The layout of a HDF5 or netCDF4 file can be complex. From version 0.3.0, you can split the file definition over several YAML files and provide a list with the names of YAML files as input to H5Yaml and NcYaml.
188
192
 
189
193
  ## Support [TBW]
190
194
 
@@ -0,0 +1,14 @@
1
+ h5yaml/__init__.py,sha256=NdNciPgYnbq-aVM6QqNGNZtdL72rTGLAMrDy0Yw7ckk,751
2
+ h5yaml/conf_from_yaml.py,sha256=GVbWR-I0_sKRxrXmgLxbnTJvAXz5OtFtNYu4Pp3LaaI,1607
3
+ h5yaml/yaml_h5.py,sha256=R_WqvK2korrR_nCY7MlmycRcD5Hc4yeJC6B4rvuwulk,10399
4
+ h5yaml/yaml_nc.py,sha256=G4kvn_Ec72ND8phXgNbFWY7EML-pgiMcBVwZroPtrQY,11330
5
+ h5yaml/Data/h5_compound.yaml,sha256=pAVGyhGpbbFgsb1NoTQZsttPLK1zktTZRufFdSaX78U,1172
6
+ h5yaml/Data/h5_testing.yaml,sha256=s-kUjHiXKr4IOVf2vqz8mUr1vcU61wxKwZFuQCUbemA,6246
7
+ h5yaml/Data/h5_unsupported.yaml,sha256=EfFztuUpuXDl_7wgwIqelwE_gdvu35zKT-YtsUfGQeM,1342
8
+ h5yaml/Data/nc_testing.yaml,sha256=C30hXo73GG4BifIr7oymkbJ4Bh8hxKKDDMnFOO5VED8,5806
9
+ h5yaml/lib/__init__.py,sha256=NdNciPgYnbq-aVM6QqNGNZtdL72rTGLAMrDy0Yw7ckk,751
10
+ h5yaml/lib/adjust_attr.py,sha256=4dHEGwwIa3a3hihyuSX8jCsC08fYcz_9XWA1pBwiwfc,2284
11
+ h5yaml-0.3.0.dist-info/METADATA,sha256=jFRcsa-2kTu8QIFtKmKWaURD3DcQwrfyiXzNvzsQXnU,8446
12
+ h5yaml-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
13
+ h5yaml-0.3.0.dist-info/licenses/LICENSE,sha256=rLarIZOYK5jHuUjMnFbgdI_Tb_4_HAAKSOOIhwiWlE4,11356
14
+ h5yaml-0.3.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,14 +0,0 @@
1
- h5yaml/__init__.py,sha256=NdNciPgYnbq-aVM6QqNGNZtdL72rTGLAMrDy0Yw7ckk,751
2
- h5yaml/conf_from_yaml.py,sha256=GVbWR-I0_sKRxrXmgLxbnTJvAXz5OtFtNYu4Pp3LaaI,1607
3
- h5yaml/yaml_h5.py,sha256=K9KkdHNDadrrwlR5EJWuzf1yFZcycJLm3EaiM4nLHkw,10112
4
- h5yaml/yaml_nc.py,sha256=0xJ0dYnIXEhUVZGEkv9GZZu4Xo7K4zXPGiG49j8RHKs,10977
5
- h5yaml/Data/h5_compound.yaml,sha256=z3dMCJDRAw14boRp0zT74bz_oFi21yu8coUoKOW-d2Q,1131
6
- h5yaml/Data/h5_testing.yaml,sha256=BSrjenq_L5g8GGoldtU_NbWzLSymwAcvFOh26jtoynM,6247
7
- h5yaml/Data/h5_unsupported.yaml,sha256=v4HYhiTikFt6UoEUJBnmSse_WeHbmBgqF2e1bCJEfLw,1502
8
- h5yaml/Data/nc_testing.yaml,sha256=zKDRkYpWVM3_vDpsu-ZxHAGDlITICb_nwjDKsFzPgcQ,5807
9
- h5yaml/lib/__init__.py,sha256=NdNciPgYnbq-aVM6QqNGNZtdL72rTGLAMrDy0Yw7ckk,751
10
- h5yaml/lib/adjust_attr.py,sha256=4dHEGwwIa3a3hihyuSX8jCsC08fYcz_9XWA1pBwiwfc,2284
11
- h5yaml-0.2.1.dist-info/METADATA,sha256=Z-be-4WMz4o1RY4WiY3o-pnfEq72cdvTkDnmTQUnaJY,7876
12
- h5yaml-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
- h5yaml-0.2.1.dist-info/licenses/LICENSE,sha256=rLarIZOYK5jHuUjMnFbgdI_Tb_4_HAAKSOOIhwiWlE4,11356
14
- h5yaml-0.2.1.dist-info/RECORD,,