legend-pydataobj 1.11.5__tar.gz → 1.11.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/PKG-INFO +4 -2
  2. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/pyproject.toml +2 -1
  3. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/PKG-INFO +4 -2
  4. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/SOURCES.txt +2 -0
  5. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/entry_points.txt +1 -1
  6. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/requires.txt +1 -0
  7. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/_version.py +9 -4
  8. legend_pydataobj-1.11.7/src/lgdo/cli.py +183 -0
  9. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/__init__.py +1 -0
  10. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/utils.py +1 -1
  11. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/vector_of_vectors.py +1 -1
  12. legend_pydataobj-1.11.7/src/lgdo/lh5/concat.py +225 -0
  13. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/table.py +31 -19
  14. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/vectorofvectors.py +1 -1
  15. legend_pydataobj-1.11.5/tests/test_cli.py → legend_pydataobj-1.11.7/tests/lh5/test_concat.py +29 -33
  16. legend_pydataobj-1.11.7/tests/test_cli.py +36 -0
  17. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_table_eval.py +14 -0
  18. legend_pydataobj-1.11.5/src/lgdo/cli.py +0 -328
  19. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/LICENSE +0 -0
  20. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/README.md +0 -0
  21. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/setup.cfg +0 -0
  22. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/dependency_links.txt +0 -0
  23. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/not-zip-safe +0 -0
  24. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/legend_pydataobj.egg-info/top_level.txt +0 -0
  25. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/__init__.py +0 -0
  26. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/__init__.py +0 -0
  27. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/base.py +0 -0
  28. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/generic.py +0 -0
  29. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/radware.py +0 -0
  30. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/utils.py +0 -0
  31. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/compression/varlen.py +0 -0
  32. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lgdo_utils.py +0 -0
  33. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/__init__.py +0 -0
  34. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/__init__.py +0 -0
  35. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/array.py +0 -0
  36. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/composite.py +0 -0
  37. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/encoded.py +0 -0
  38. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/ndarray.py +0 -0
  39. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/read/scalar.py +0 -0
  40. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/write/__init__.py +0 -0
  41. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/write/array.py +0 -0
  42. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/write/composite.py +0 -0
  43. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/write/scalar.py +0 -0
  44. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/_serializers/write/vector_of_vectors.py +0 -0
  45. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/core.py +0 -0
  46. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/datatype.py +0 -0
  47. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/exceptions.py +0 -0
  48. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/iterator.py +0 -0
  49. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/store.py +0 -0
  50. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/tools.py +0 -0
  51. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5/utils.py +0 -0
  52. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/lh5_store.py +0 -0
  53. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/logging.py +0 -0
  54. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/__init__.py +0 -0
  55. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/array.py +0 -0
  56. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/arrayofequalsizedarrays.py +0 -0
  57. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/encoded.py +0 -0
  58. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/fixedsizearray.py +0 -0
  59. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/histogram.py +0 -0
  60. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/lgdo.py +0 -0
  61. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/scalar.py +0 -0
  62. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/struct.py +0 -0
  63. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/vovutils.py +0 -0
  64. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/types/waveformtable.py +0 -0
  65. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/units.py +0 -0
  66. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/src/lgdo/utils.py +0 -0
  67. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/conftest.py +0 -0
  68. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/sigcompress/LDQTA_r117_20200110T105115Z_cal_geds_raw-0.dat +0 -0
  69. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/sigcompress/special-wf-clipped.dat +0 -0
  70. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/test_compression.py +0 -0
  71. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/test_radware_sigcompress.py +0 -0
  72. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/test_str2wfcodec.py +0 -0
  73. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/compression/test_uleb128_zigzag_diff.py +0 -0
  74. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/conftest.py +0 -0
  75. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/conftest.py +0 -0
  76. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_core.py +0 -0
  77. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_exceptions.py +0 -0
  78. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_datatype.py +0 -0
  79. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_iterator.py +0 -0
  80. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_store.py +0 -0
  81. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_tools.py +0 -0
  82. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_utils.py +0 -0
  83. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/lh5/test_lh5_write.py +0 -0
  84. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/test_lgdo_utils.py +0 -0
  85. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_array.py +0 -0
  86. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_arrayofequalsizedarrays.py +0 -0
  87. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_encoded.py +0 -0
  88. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_fixedsizearray.py +0 -0
  89. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_histogram.py +0 -0
  90. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_representations.py +0 -0
  91. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_scalar.py +0 -0
  92. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_struct.py +0 -0
  93. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_table.py +0 -0
  94. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_vectorofvectors.py +0 -0
  95. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_vovutils.py +0 -0
  96. {legend_pydataobj-1.11.5 → legend_pydataobj-1.11.7}/tests/types/test_waveformtable.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: legend_pydataobj
3
- Version: 1.11.5
3
+ Version: 1.11.7
4
4
  Summary: LEGEND Python Data Objects
5
5
  Author: The LEGEND Collaboration
6
6
  Maintainer: The LEGEND Collaboration
@@ -725,6 +725,8 @@ Requires-Dist: pre-commit; extra == "test"
725
725
  Requires-Dist: pylegendtestdata; extra == "test"
726
726
  Requires-Dist: pytest>=6.0; extra == "test"
727
727
  Requires-Dist: pytest-cov; extra == "test"
728
+ Requires-Dist: dbetto; extra == "test"
729
+ Dynamic: license-file
728
730
 
729
731
  # legend-pydataobj
730
732
 
@@ -76,11 +76,12 @@ test = [
76
76
  "pylegendtestdata",
77
77
  "pytest>=6.0",
78
78
  "pytest-cov",
79
+ "dbetto",
79
80
  ]
80
81
 
81
82
  [project.scripts]
82
83
  lh5ls = "lgdo.cli:lh5ls"
83
- lh5concat = "lgdo.cli:lh5concat"
84
+ lh5concat = "lgdo.cli:lh5concat_cli"
84
85
 
85
86
  [tool.setuptools]
86
87
  include-package-data = true
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: legend_pydataobj
3
- Version: 1.11.5
3
+ Version: 1.11.7
4
4
  Summary: LEGEND Python Data Objects
5
5
  Author: The LEGEND Collaboration
6
6
  Maintainer: The LEGEND Collaboration
@@ -725,6 +725,8 @@ Requires-Dist: pre-commit; extra == "test"
725
725
  Requires-Dist: pylegendtestdata; extra == "test"
726
726
  Requires-Dist: pytest>=6.0; extra == "test"
727
727
  Requires-Dist: pytest-cov; extra == "test"
728
+ Requires-Dist: dbetto; extra == "test"
729
+ Dynamic: license-file
728
730
 
729
731
  # legend-pydataobj
730
732
 
@@ -23,6 +23,7 @@ src/lgdo/compression/radware.py
23
23
  src/lgdo/compression/utils.py
24
24
  src/lgdo/compression/varlen.py
25
25
  src/lgdo/lh5/__init__.py
26
+ src/lgdo/lh5/concat.py
26
27
  src/lgdo/lh5/core.py
27
28
  src/lgdo/lh5/datatype.py
28
29
  src/lgdo/lh5/exceptions.py
@@ -68,6 +69,7 @@ tests/compression/test_uleb128_zigzag_diff.py
68
69
  tests/compression/sigcompress/LDQTA_r117_20200110T105115Z_cal_geds_raw-0.dat
69
70
  tests/compression/sigcompress/special-wf-clipped.dat
70
71
  tests/lh5/conftest.py
72
+ tests/lh5/test_concat.py
71
73
  tests/lh5/test_core.py
72
74
  tests/lh5/test_exceptions.py
73
75
  tests/lh5/test_lh5_datatype.py
@@ -1,3 +1,3 @@
1
1
  [console_scripts]
2
- lh5concat = lgdo.cli:lh5concat
2
+ lh5concat = lgdo.cli:lh5concat_cli
3
3
  lh5ls = lgdo.cli:lh5ls
@@ -29,3 +29,4 @@ pre-commit
29
29
  pylegendtestdata
30
30
  pytest>=6.0
31
31
  pytest-cov
32
+ dbetto
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '1.11.5'
16
- __version_tuple__ = version_tuple = (1, 11, 5)
20
+ __version__ = version = '1.11.7'
21
+ __version_tuple__ = version_tuple = (1, 11, 7)
@@ -0,0 +1,183 @@
1
+ """legend-pydataobj's command line interface utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import logging
7
+ import sys
8
+
9
+ from . import __version__, lh5
10
+ from . import logging as lgdogging # eheheh
11
+ from .lh5.concat import lh5concat
12
+
13
+ log = logging.getLogger(__name__)
14
+
15
+
16
+ def lh5ls(args=None):
17
+ """:func:`.lh5.show` command line interface."""
18
+ parser = argparse.ArgumentParser(
19
+ prog="lh5ls", description="Inspect LEGEND HDF5 (LH5) file contents"
20
+ )
21
+
22
+ # global options
23
+ parser.add_argument(
24
+ "--version",
25
+ action="store_true",
26
+ help="""Print legend-pydataobj version and exit""",
27
+ )
28
+ parser.add_argument(
29
+ "--verbose",
30
+ "-v",
31
+ action="store_true",
32
+ help="""Increase the program verbosity""",
33
+ )
34
+ parser.add_argument(
35
+ "--debug",
36
+ action="store_true",
37
+ help="""Increase the program verbosity to maximum""",
38
+ )
39
+
40
+ parser.add_argument(
41
+ "lh5_file",
42
+ help="""Input LH5 file""",
43
+ )
44
+ parser.add_argument("lh5_group", nargs="?", help="""LH5 group.""", default="/")
45
+ parser.add_argument(
46
+ "--attributes", "-a", action="store_true", help="""Print HDF5 attributes too"""
47
+ )
48
+ parser.add_argument(
49
+ "--depth",
50
+ "-d",
51
+ type=int,
52
+ default=None,
53
+ help="""Maximum tree depth of groups to print""",
54
+ )
55
+ parser.add_argument(
56
+ "--detail",
57
+ action="store_true",
58
+ help="""Print details about datasets""",
59
+ )
60
+
61
+ args = parser.parse_args(args)
62
+
63
+ if args.verbose:
64
+ lgdogging.setup(logging.DEBUG)
65
+ elif args.debug:
66
+ lgdogging.setup(logging.DEBUG, logging.root)
67
+ else:
68
+ lgdogging.setup()
69
+
70
+ if args.version:
71
+ print(__version__) # noqa: T201
72
+ sys.exit()
73
+
74
+ lh5.show(
75
+ args.lh5_file,
76
+ args.lh5_group,
77
+ attrs=args.attributes,
78
+ depth=args.depth,
79
+ detail=args.detail,
80
+ )
81
+
82
+
83
+ def lh5concat_cli(args=None):
84
+ """Command line interface for concatenating array-like LGDOs in LH5 files."""
85
+ parser = argparse.ArgumentParser(
86
+ prog="lh5concat",
87
+ description="""
88
+ Concatenate LGDO Arrays, VectorOfVectors and Tables in LH5 files.
89
+
90
+ Examples
91
+ --------
92
+
93
+ Concatenate all eligible objects in file{1,2}.lh5 into concat.lh5:
94
+
95
+ $ lh5concat -o concat.lh5 file1.lh5 file2.lh5
96
+
97
+ Include only the /data/table1 Table:
98
+
99
+ $ lh5concat -o concat.lh5 -i /data/table1/* file1.lh5 file2.lh5
100
+
101
+ Exclude the /data/table1/col1 Table column:
102
+
103
+ $ lh5concat -o concat.lh5 -e /data/table1/col1 file1.lh5 file2.lh5
104
+ """,
105
+ formatter_class=argparse.RawTextHelpFormatter,
106
+ )
107
+
108
+ # global options
109
+ parser.add_argument(
110
+ "--version",
111
+ action="store_true",
112
+ help="""Print legend-pydataobj version and exit""",
113
+ )
114
+ parser.add_argument(
115
+ "--verbose",
116
+ "-v",
117
+ action="store_true",
118
+ help="""Increase the program verbosity""",
119
+ )
120
+ parser.add_argument(
121
+ "--debug",
122
+ action="store_true",
123
+ help="""Increase the program verbosity to maximum""",
124
+ )
125
+
126
+ parser.add_argument(
127
+ "lh5_file",
128
+ nargs="+",
129
+ help="""Input LH5 files""",
130
+ )
131
+ parser.add_argument(
132
+ "--output",
133
+ "-o",
134
+ help="""Output file""",
135
+ default="lh5concat-output.lh5",
136
+ )
137
+ parser.add_argument(
138
+ "--overwrite",
139
+ "-w",
140
+ action="store_true",
141
+ help="""Overwrite output file""",
142
+ )
143
+ parser.add_argument(
144
+ "--include",
145
+ "-i",
146
+ help="""Regular expression (fnmatch style) for object names that should
147
+ be concatenated. To include full tables, you need to explicitly include
148
+ all its columns with e.g. '/path/to/table/*'. The option can be passed
149
+ multiple times to provide a list of patterns.
150
+ """,
151
+ action="append",
152
+ default=None,
153
+ )
154
+ parser.add_argument(
155
+ "--exclude",
156
+ "-e",
157
+ help="""List of object names that should be excluded. Takes priority
158
+ over --include. See --include help for more details.
159
+ """,
160
+ action="append",
161
+ default=None,
162
+ )
163
+
164
+ args = parser.parse_args(args)
165
+
166
+ if args.verbose:
167
+ lgdogging.setup(logging.INFO, log)
168
+ elif args.debug:
169
+ lgdogging.setup(logging.DEBUG, logging.root)
170
+ else:
171
+ lgdogging.setup()
172
+
173
+ if args.version:
174
+ print(__version__) # noqa: T201
175
+ sys.exit()
176
+
177
+ lh5concat(
178
+ lh5_files=args.lh5_file,
179
+ overwrite=args.overwrite,
180
+ output=args.output,
181
+ include_list=args.include,
182
+ exclude_list=args.exclude,
183
+ )
@@ -18,6 +18,7 @@ __all__ = [
18
18
  "DEFAULT_HDF5_SETTINGS",
19
19
  "LH5Iterator",
20
20
  "LH5Store",
21
+ "concat",
21
22
  "load_dfs",
22
23
  "load_nda",
23
24
  "ls",
@@ -34,7 +34,7 @@ def build_field_mask(field_mask: Mapping[str, bool] | Collection[str]) -> defaul
34
34
  default = not field_mask[next(iter(field_mask.keys()))]
35
35
  return defaultdict(lambda: default, field_mask)
36
36
  if isinstance(field_mask, (list, tuple, set)):
37
- return defaultdict(bool, {field: True for field in field_mask})
37
+ return defaultdict(bool, dict.fromkeys(field_mask, True))
38
38
  if isinstance(field_mask, defaultdict):
39
39
  return field_mask
40
40
  msg = "bad field_mask type"
@@ -123,7 +123,7 @@ def _h5_read_vector_of_vectors(
123
123
  )
124
124
  msg = (
125
125
  f"cumulative_length non-increasing between entries "
126
- f"{start_row} and {start_row+n_rows_read}"
126
+ f"{start_row} and {start_row + n_rows_read}"
127
127
  )
128
128
  raise LH5DecodeError(msg, fname, oname)
129
129
 
@@ -0,0 +1,225 @@
1
+ from __future__ import annotations
2
+
3
+ import fnmatch
4
+ import logging
5
+
6
+ from lgdo.lh5 import LH5Iterator
7
+
8
+ from .. import Array, Scalar, Struct, Table, VectorOfVectors, lh5
9
+
10
+ log = logging.getLogger(__name__)
11
+
12
+
13
+ def _get_obj_list(
14
+ lh5_files: list, include_list: list | None = None, exclude_list: list | None = None
15
+ ) -> list[str]:
16
+ """Extract a list of lh5 objects to concatenate.
17
+
18
+ Parameters
19
+ ----------
20
+ lh5_files
21
+ list of input files to concatenate.
22
+ include_list
23
+ patterns for tables to include.
24
+ exclude_list
25
+ patterns for tables to exclude.
26
+
27
+ """
28
+ file0 = lh5_files[0]
29
+ obj_list_full = set(lh5.ls(file0, recursive=True))
30
+
31
+ # let's remove objects with nested LGDOs inside
32
+ to_remove = set()
33
+ for name in obj_list_full:
34
+ if len(fnmatch.filter(obj_list_full, f"{name}/*")) > 1:
35
+ to_remove.add(name)
36
+ obj_list_full -= to_remove
37
+
38
+ obj_list = set()
39
+ # now first remove excluded stuff
40
+ if exclude_list is not None:
41
+ for exc in exclude_list:
42
+ obj_list_full -= set(fnmatch.filter(obj_list_full, exc.strip("/")))
43
+
44
+ # then make list of included, based on latest list
45
+ if include_list is not None:
46
+ for inc in include_list:
47
+ obj_list |= set(fnmatch.filter(obj_list_full, inc.strip("/")))
48
+ else:
49
+ obj_list = obj_list_full
50
+
51
+ # sort
52
+ return sorted(obj_list)
53
+
54
+
55
+ def _get_lgdos(file, obj_list):
56
+ """Get name of LGDO objects."""
57
+
58
+ store = lh5.LH5Store()
59
+ h5f0 = store.gimme_file(file)
60
+
61
+ lgdos = []
62
+ lgdo_structs = {}
63
+
64
+ # loop over object list in the first file
65
+ for name in obj_list:
66
+ # now loop over groups starting from root
67
+ current = ""
68
+ for item in name.split("/"):
69
+ current = f"{current}/{item}".strip("/")
70
+
71
+ if current in lgdos:
72
+ break
73
+
74
+ # not even an LGDO (i.e. a plain HDF5 group)!
75
+ if "datatype" not in h5f0[current].attrs:
76
+ continue
77
+
78
+ # read as little as possible
79
+ obj, _ = store.read(current, h5f0, n_rows=1)
80
+ if isinstance(obj, (Table, Array, VectorOfVectors)):
81
+ lgdos.append(current)
82
+
83
+ elif isinstance(obj, Struct):
84
+ # structs might be used in a "group-like" fashion (i.e. they might only
85
+ # contain array-like objects).
86
+ # note: handle after handling tables, as tables also satisfy this check.
87
+ lgdo_structs[current] = obj.attrs["datatype"]
88
+ continue
89
+
90
+ elif isinstance(obj, Scalar):
91
+ msg = f"cannot concat scalar field {current}"
92
+ log.warning(msg)
93
+
94
+ break
95
+
96
+ msg = f"first-level, array-like objects: {lgdos}"
97
+ log.info(msg)
98
+
99
+ msg = f"nested structs: {lgdo_structs}"
100
+ log.info(msg)
101
+
102
+ h5f0.close()
103
+
104
+ if lgdos == []:
105
+ msg = "did not find any field to concatenate, exit"
106
+ raise RuntimeError(msg)
107
+
108
+ return lgdos, lgdo_structs
109
+
110
+
111
+ def _inplace_table_filter(name, table, obj_list):
112
+ """filter objects nested in this LGDO"""
113
+ skm = fnmatch.filter(obj_list, f"{name}/*")
114
+ kept = {it.removeprefix(name).strip("/").split("/")[0] for it in skm}
115
+
116
+ # now remove fields
117
+ for k in list(table.keys()):
118
+ if k not in kept:
119
+ table.remove_column(k)
120
+
121
+ msg = f"fields left in table '{name}': {table.keys()}"
122
+ log.debug(msg)
123
+
124
+ # recurse!
125
+ for k2, v2 in table.items():
126
+ if not isinstance(v2, Table):
127
+ continue
128
+
129
+ _inplace_table_filter(f"{name}/{k2}", v2, obj_list)
130
+
131
+
132
+ def _remove_nested_fields(lgdos: dict, obj_list: list):
133
+ """Remove (nested) table fields based on obj_list."""
134
+
135
+ for key, val in lgdos.items():
136
+ if not isinstance(val, Table):
137
+ continue
138
+
139
+ _inplace_table_filter(key, val, obj_list)
140
+
141
+
142
+ def _slice(obj, n_rows):
143
+ ak_obj = obj.view_as("ak")[:n_rows]
144
+ obj_type = type(obj)
145
+ return obj_type(ak_obj)
146
+
147
+
148
+ def lh5concat(
149
+ lh5_files: list,
150
+ output: str,
151
+ overwrite: bool = False,
152
+ *,
153
+ include_list: list | None = None,
154
+ exclude_list: list | None = None,
155
+ ) -> None:
156
+ """Concatenate LGDO Arrays, VectorOfVectors and Tables in LH5 files.
157
+
158
+ Parameters
159
+ ----------
160
+ lh5_files
161
+ list of input files to concatenate.
162
+ output
163
+ path to the output file
164
+ include_list
165
+ patterns for tables to include.
166
+ exclude_list
167
+ patterns for tables to exclude.
168
+ """
169
+
170
+ if len(lh5_files) < 2:
171
+ msg = "you must provide at least two input files"
172
+ raise RuntimeError(msg)
173
+
174
+ # determine list of objects by recursively ls'ing first file
175
+ obj_list = _get_obj_list(
176
+ lh5_files, include_list=include_list, exclude_list=exclude_list
177
+ )
178
+
179
+ msg = f"objects matching include patterns {include_list} in {lh5_files[0]}: {obj_list}"
180
+ log.info(msg)
181
+
182
+ lgdos, lgdo_structs = _get_lgdos(lh5_files[0], obj_list)
183
+ first_done = False
184
+ store = lh5.LH5Store()
185
+
186
+ # loop over lgdo objects
187
+ for lgdo in lgdos:
188
+ # iterate over the files
189
+ for lh5_obj, _, n_rows in LH5Iterator(lh5_files, lgdo):
190
+ data = {lgdo: _slice(lh5_obj, n_rows)}
191
+
192
+ # remove the nested fields
193
+ _remove_nested_fields(data, obj_list)
194
+
195
+ if first_done is False:
196
+ msg = f"creating output file {output}"
197
+ log.info(msg)
198
+
199
+ store.write(
200
+ data[lgdo],
201
+ lgdo,
202
+ output,
203
+ wo_mode="overwrite_file"
204
+ if (overwrite and not first_done)
205
+ else "write_safe",
206
+ )
207
+ first_done = True
208
+
209
+ else:
210
+ msg = f"appending to {output}"
211
+ log.info(msg)
212
+
213
+ if isinstance(data[lgdo], Table):
214
+ _inplace_table_filter(lgdo, data[lgdo], obj_list)
215
+
216
+ store.write(data[lgdo], lgdo, output, wo_mode="append")
217
+
218
+ if lgdo_structs != {}:
219
+ output_file = store.gimme_file(output, mode="a")
220
+ for struct, struct_dtype in lgdo_structs.items():
221
+ msg = f"reset datatype of struct {struct} to {struct_dtype}"
222
+ log.debug(msg)
223
+
224
+ output_file[struct].attrs["datatype"] = struct_dtype
225
+ output_file.close()
@@ -351,31 +351,39 @@ class Table(Struct):
351
351
  msg = f"evaluating {expr!r} with locals={(self_unwrap | parameters)} and {has_ak=}"
352
352
  log.debug(msg)
353
353
 
354
- # use numexpr if we are only dealing with numpy data types (and no global dictionary)
355
- if not has_ak and modules is None:
356
- out_data = ne.evaluate(
357
- expr,
358
- local_dict=(self_unwrap | parameters),
359
- )
360
-
361
- msg = f"...the result is {out_data!r}"
362
- log.debug(msg)
363
-
364
- # need to convert back to LGDO
365
- # np.evaluate should always return a numpy thing?
366
- if out_data.ndim == 0:
367
- return Scalar(out_data.item())
368
- if out_data.ndim == 1:
369
- return Array(out_data)
370
- if out_data.ndim == 2:
371
- return ArrayOfEqualSizedArrays(nda=out_data)
354
+ def _make_lgdo(data):
355
+ if data.ndim == 0:
356
+ return Scalar(data.item())
357
+ if data.ndim == 1:
358
+ return Array(data)
359
+ if data.ndim == 2:
360
+ return ArrayOfEqualSizedArrays(nda=data)
372
361
 
373
362
  msg = (
374
- f"evaluation resulted in {out_data.ndim}-dimensional data, "
363
+ f"evaluation resulted in {data.ndim}-dimensional data, "
375
364
  "I don't know which LGDO this corresponds to"
376
365
  )
377
366
  raise RuntimeError(msg)
378
367
 
368
+ # use numexpr if we are only dealing with numpy data types (and no global dictionary)
369
+ if not has_ak and modules is None:
370
+ try:
371
+ out_data = ne.evaluate(
372
+ expr,
373
+ local_dict=(self_unwrap | parameters),
374
+ )
375
+
376
+ msg = f"...the result is {out_data!r}"
377
+ log.debug(msg)
378
+
379
+ # need to convert back to LGDO
380
+ # np.evaluate should always return a numpy thing?
381
+ return _make_lgdo(out_data)
382
+
383
+ except Exception:
384
+ msg = f"Warning {expr} could not be evaluated with numexpr probably due to some not allowed characters, trying with eval()."
385
+ log.debug(msg)
386
+
379
387
  # resort to good ol' eval()
380
388
  globs = {"ak": ak, "np": np}
381
389
  if modules is not None:
@@ -392,6 +400,10 @@ class Table(Struct):
392
400
  return Array(out_data.to_numpy())
393
401
  return VectorOfVectors(out_data)
394
402
 
403
+ # modules can still produce numpy array
404
+ if isinstance(out_data, np.ndarray):
405
+ return _make_lgdo(out_data)
406
+
395
407
  if np.isscalar(out_data):
396
408
  return Scalar(out_data)
397
409
 
@@ -138,7 +138,7 @@ class VectorOfVectors(LGDO):
138
138
  # FIXME: have to copy the buffers, otherwise self will not own the
139
139
  # data and self.resize() will fail. Is it possible to avoid this?
140
140
  flattened_data = np.copy(
141
- container.pop(f"node{data.ndim-1}-data", np.empty(0, dtype=dtype))
141
+ container.pop(f"node{data.ndim - 1}-data", np.empty(0, dtype=dtype))
142
142
  )
143
143
 
144
144
  # if user-provided dtype is different than dtype from Awkward, cast