legend-dataflow-scripts 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. legend_dataflow_scripts-0.1.0.dist-info/METADATA +57 -0
  2. legend_dataflow_scripts-0.1.0.dist-info/RECORD +36 -0
  3. legend_dataflow_scripts-0.1.0.dist-info/WHEEL +5 -0
  4. legend_dataflow_scripts-0.1.0.dist-info/entry_points.txt +18 -0
  5. legend_dataflow_scripts-0.1.0.dist-info/top_level.txt +1 -0
  6. legenddataflowscripts/__init__.py +17 -0
  7. legenddataflowscripts/_version.py +21 -0
  8. legenddataflowscripts/par/__init__.py +0 -0
  9. legenddataflowscripts/par/geds/__init__.py +0 -0
  10. legenddataflowscripts/par/geds/dsp/__init__.py +0 -0
  11. legenddataflowscripts/par/geds/dsp/dplms.py +145 -0
  12. legenddataflowscripts/par/geds/dsp/eopt.py +398 -0
  13. legenddataflowscripts/par/geds/dsp/evtsel.py +400 -0
  14. legenddataflowscripts/par/geds/dsp/nopt.py +120 -0
  15. legenddataflowscripts/par/geds/dsp/pz.py +217 -0
  16. legenddataflowscripts/par/geds/dsp/svm.py +28 -0
  17. legenddataflowscripts/par/geds/dsp/svm_build.py +69 -0
  18. legenddataflowscripts/par/geds/hit/__init__.py +0 -0
  19. legenddataflowscripts/par/geds/hit/aoe.py +245 -0
  20. legenddataflowscripts/par/geds/hit/ecal.py +778 -0
  21. legenddataflowscripts/par/geds/hit/lq.py +213 -0
  22. legenddataflowscripts/par/geds/hit/qc.py +326 -0
  23. legenddataflowscripts/tier/__init__.py +0 -0
  24. legenddataflowscripts/tier/dsp.py +263 -0
  25. legenddataflowscripts/tier/hit.py +148 -0
  26. legenddataflowscripts/utils/__init__.py +15 -0
  27. legenddataflowscripts/utils/alias_table.py +28 -0
  28. legenddataflowscripts/utils/cfgtools.py +14 -0
  29. legenddataflowscripts/utils/convert_np.py +31 -0
  30. legenddataflowscripts/utils/log.py +77 -0
  31. legenddataflowscripts/utils/pulser_removal.py +16 -0
  32. legenddataflowscripts/workflow/__init__.py +20 -0
  33. legenddataflowscripts/workflow/execenv.py +327 -0
  34. legenddataflowscripts/workflow/filedb.py +107 -0
  35. legenddataflowscripts/workflow/pre_compile_catalog.py +24 -0
  36. legenddataflowscripts/workflow/utils.py +113 -0
@@ -0,0 +1,263 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import json
5
+ import time
6
+ import warnings
7
+ from multiprocessing import Pool
8
+ from pathlib import Path
9
+
10
+ import numpy as np
11
+ from dbetto import TextDB
12
+ from dbetto.catalog import Props
13
+ from dspeed import build_dsp
14
+ from lgdo import lh5
15
+
16
+ from ..utils import alias_table, build_log
17
+
18
+ warnings.filterwarnings(action="ignore", category=RuntimeWarning)
19
+
20
+
21
+ def _replace_list_with_array(dic):
22
+ for key, value in dic.items():
23
+ if isinstance(value, dict):
24
+ dic[key] = _replace_list_with_array(value)
25
+ elif isinstance(value, list):
26
+ dic[key] = np.array(value, dtype="float32")
27
+ else:
28
+ pass
29
+ return dic
30
+
31
+
32
+ def build_dsp_wrapper(kwargs):
33
+ build_dsp(**kwargs)
34
+
35
+
36
+ def build_tier_dsp() -> None:
37
+ # CLI config
38
+ argparser = argparse.ArgumentParser()
39
+ argparser.add_argument(
40
+ "--configs", help="path to dataflow config files", required=True
41
+ )
42
+ argparser.add_argument(
43
+ "--table-map",
44
+ help="mapping from channel to table name",
45
+ required=False,
46
+ type=str,
47
+ )
48
+ argparser.add_argument("--log", help="log file name")
49
+ argparser.add_argument("--alias-table", help="Alias table", type=str, default=None)
50
+
51
+ argparser.add_argument("--n-processes", help="log file name", default=1, type=int)
52
+
53
+ argparser.add_argument("--datatype", help="datatype", required=True)
54
+ argparser.add_argument("--timestamp", help="timestamp", required=True)
55
+ argparser.add_argument("--tier", help="tier", required=True)
56
+
57
+ argparser.add_argument(
58
+ "--pars-file", help="database file for HPGes", nargs="*", default=[]
59
+ )
60
+ argparser.add_argument("--input", help="input file")
61
+
62
+ argparser.add_argument("--output", help="output file")
63
+ args = argparser.parse_args()
64
+
65
+ # set number of threads to use
66
+ # set_num_threads(1)
67
+
68
+ table_map = json.loads(args.table_map) if args.table_map is not None else None
69
+
70
+ df_configs = TextDB(args.configs, lazy=True)
71
+ config_dict = df_configs.on(args.timestamp, system=args.datatype).snakemake_rules
72
+ config_dict = config_dict[f"tier_{args.tier}"]
73
+
74
+ log = build_log(config_dict, args.log, fallback=__name__)
75
+
76
+ settings_dict = config_dict.options.get("settings", {})
77
+ if isinstance(settings_dict, str):
78
+ settings_dict = Props.read_from(settings_dict)
79
+
80
+ chan_cfg_map = config_dict.inputs.processing_chain
81
+
82
+ # if the dictionary only contains one __default__ key, build the channel
83
+ # list from the (processable) channel map and assign the default config
84
+ if list(chan_cfg_map.keys()) == ["__default__"]:
85
+ chan_cfg_map = dict.fromkeys(table_map, chan_cfg_map.__default__)
86
+
87
+ # now construct the dictionary of DSP configs for build_dsp()
88
+ dsp_cfg_tbl_dict = {}
89
+ for chan, file in chan_cfg_map.items():
90
+ if chan in table_map:
91
+ input_tbl_name = table_map[chan] if table_map is not None else chan + "/raw"
92
+ else:
93
+ continue
94
+
95
+ # check if the raw tables are all existing
96
+ if len(lh5.ls(args.input, input_tbl_name)) > 0:
97
+ dsp_cfg_tbl_dict[input_tbl_name] = Props.read_from(file)
98
+ else:
99
+ msg = f"table {input_tbl_name} not found in {args.input} skipping"
100
+ log.info(msg)
101
+
102
+ if len(dsp_cfg_tbl_dict) == 0:
103
+ msg = f"could not find any of the requested channels in {args.input}"
104
+ raise RuntimeError(msg)
105
+
106
+ # par files
107
+ db_files = [
108
+ par_file
109
+ for par_file in args.pars_file
110
+ if Path(par_file).suffix in (".json", ".yaml", ".yml")
111
+ ]
112
+
113
+ database_dict = _replace_list_with_array(
114
+ Props.read_from(db_files, subst_pathvar=True)
115
+ )
116
+ database_dict = {
117
+ (table_map[chan].split("/")[0] if chan in table_map else chan): dic
118
+ for chan, dic in database_dict.items()
119
+ }
120
+ log.info("loaded database files")
121
+
122
+ Path(args.output).parent.mkdir(parents=True, exist_ok=True)
123
+
124
+ start = time.time()
125
+ if args.n_processes > 1:
126
+ # sort by table lengths, longest tables first
127
+ dsp_cfg_tbl_dict = dict(
128
+ sorted(
129
+ dsp_cfg_tbl_dict.items(),
130
+ key=lambda item: lh5.read_n_rows(item[0], args.input),
131
+ reverse=True,
132
+ )
133
+ )
134
+
135
+ chan_configs = [{} for _ in range(args.n_processes)]
136
+ for i, key in enumerate(dsp_cfg_tbl_dict):
137
+ chan_configs[i % args.n_processes][key] = dsp_cfg_tbl_dict[key]
138
+
139
+ dsp_files = [
140
+ f"{args.output}{i}" if i > 0 else args.output
141
+ for i in range(args.n_processes)
142
+ ]
143
+
144
+ # Process arguments for each worker
145
+ process_kwargs_list = []
146
+ for i, config in enumerate(chan_configs):
147
+ kwargs = {
148
+ "f_raw": args.input,
149
+ "f_dsp": dsp_files[i],
150
+ "chan_config": config,
151
+ "database": database_dict,
152
+ "write_mode": "r",
153
+ "buffer_len": settings_dict.get("buffer_len", 1000),
154
+ "block_width": settings_dict.get("block_width", 16),
155
+ }
156
+ process_kwargs_list.append(kwargs)
157
+
158
+ # Create a multiprocessing pool
159
+ with Pool(processes=args.n_processes) as pool:
160
+ # Use starmap to pass multiple arguments to the process function
161
+ pool.map(build_dsp_wrapper, process_kwargs_list)
162
+
163
+ # merge the DSPs
164
+ log.info("Merging DSPs")
165
+ for i, file in enumerate(dsp_files[1:]):
166
+ chans = chan_configs[i + 1]
167
+ for chan in chans:
168
+ tbl = lh5.read(chan.replace("raw", "dsp"), file)
169
+ lh5.write(tbl, chan.replace("raw", "dsp"), args.output, wo_mode="a")
170
+ Path(file).unlink()
171
+
172
+ else:
173
+ build_dsp(
174
+ args.input,
175
+ args.output,
176
+ database=database_dict,
177
+ chan_config=dsp_cfg_tbl_dict,
178
+ write_mode="r",
179
+ buffer_len=settings_dict.get("buffer_len", 1000),
180
+ block_width=settings_dict.get("block_width", 16),
181
+ )
182
+
183
+ msg = f"Finished building DSP in {time.time() - start:.2f} seconds"
184
+ log.info(msg)
185
+ if args.alias_table is not None:
186
+ log.info("Creating alias table")
187
+ alias_table(args.output, args.alias_table)
188
+
189
+
190
+ def build_tier_dsp_single_channel() -> None:
191
+ # CLI config
192
+ argparser = argparse.ArgumentParser()
193
+ argparser.add_argument(
194
+ "--configs", help="path to dataflow config files", required=True
195
+ )
196
+ argparser.add_argument(
197
+ "--channel",
198
+ help="channel to process",
199
+ required=False,
200
+ type=str,
201
+ )
202
+ argparser.add_argument("--log", help="log file name")
203
+
204
+ argparser.add_argument("--datatype", help="datatype", required=True)
205
+ argparser.add_argument("--timestamp", help="timestamp", required=True)
206
+ argparser.add_argument("--tier", help="tier", required=True)
207
+
208
+ argparser.add_argument(
209
+ "--pars-file", help="database file for HPGes", nargs="*", default=[]
210
+ )
211
+ argparser.add_argument("--input", help="input file")
212
+
213
+ argparser.add_argument("--output", help="output file")
214
+ args = argparser.parse_args()
215
+
216
+ df_configs = TextDB(args.configs, lazy=True)
217
+ config_dict = df_configs.on(args.timestamp, system=args.datatype).snakemake_rules
218
+ config_dict = config_dict[f"tier_{args.tier}"]
219
+ config_dict = (
220
+ config_dict[args.channel]
221
+ if args.channel is not None and args.channel in config_dict
222
+ else config_dict
223
+ )
224
+
225
+ log = build_log(config_dict, args.log, fallback=__name__)
226
+
227
+ settings_dict = config_dict.options.get("settings", {})
228
+ if isinstance(settings_dict, str):
229
+ settings_dict = Props.read_from(settings_dict)
230
+
231
+ proc_chain = config_dict.inputs.processing_chain
232
+
233
+ # par files
234
+ db_files = [
235
+ par_file
236
+ for par_file in args.pars_file
237
+ if Path(par_file).suffix in (".json", ".yaml", ".yml")
238
+ ]
239
+
240
+ database_dict = _replace_list_with_array(
241
+ Props.read_from(db_files, subst_pathvar=True)
242
+ )
243
+ database_dict = (
244
+ database_dict[args.channel]
245
+ if args.channel is not None and args.channel in database_dict
246
+ else database_dict
247
+ )
248
+
249
+ Path(args.output).parent.mkdir(parents=True, exist_ok=True)
250
+
251
+ start = time.time()
252
+
253
+ build_dsp(
254
+ args.input,
255
+ args.output,
256
+ proc_chain,
257
+ database=database_dict,
258
+ write_mode="r",
259
+ buffer_len=settings_dict.get("buffer_len", 1000),
260
+ block_width=settings_dict.get("block_width", 16),
261
+ )
262
+ msg = f"Finished building DSP in {time.time() - start:.2f} seconds"
263
+ log.info(msg)
@@ -0,0 +1,148 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import json
5
+ import time
6
+ from pathlib import Path
7
+
8
+ from dbetto.catalog import Props
9
+ from legendmeta import TextDB
10
+ from lgdo import lh5
11
+ from pygama.hit.build_hit import build_hit
12
+
13
+ from ..utils import alias_table, build_log
14
+
15
+
16
+ def build_tier_hit() -> None:
17
+ argparser = argparse.ArgumentParser()
18
+ argparser.add_argument("--input")
19
+ argparser.add_argument("--pars-file", nargs="*")
20
+
21
+ argparser.add_argument("--configs", required=True)
22
+ argparser.add_argument("--table-map", required=False, type=str)
23
+ argparser.add_argument("--log")
24
+ argparser.add_argument("--alias-table", help="Alias table", type=str, default=None)
25
+
26
+ argparser.add_argument("--datatype", required=True)
27
+ argparser.add_argument("--timestamp", required=True)
28
+ argparser.add_argument("--tier", required=True)
29
+
30
+ argparser.add_argument("--output")
31
+ args = argparser.parse_args()
32
+
33
+ table_map = json.loads(args.table_map) if args.table_map is not None else None
34
+
35
+ df_config = (
36
+ TextDB(args.configs, lazy=True)
37
+ .on(args.timestamp, system=args.datatype)
38
+ .snakemake_rules[f"tier_{args.tier}"]
39
+ )
40
+ log = build_log(df_config, args.log, fallback=__name__)
41
+ log.info("initializing")
42
+
43
+ settings_dict = df_config.options.get("settings", {})
44
+
45
+ if isinstance(settings_dict, str):
46
+ settings_dict = Props.read_from(settings_dict)
47
+
48
+ # mapping channel -> hit config file
49
+ chan_cfg_map = df_config.inputs.hit_config
50
+
51
+ log.info("building the build_hit config")
52
+ # if the mapping only contains one __default__ key, build the channel
53
+ # list from the (processable) channel map and assign the default config
54
+ if list(chan_cfg_map.keys()) == ["__default__"]:
55
+ chan_cfg_map = dict.fromkeys(table_map, chan_cfg_map.__default__)
56
+
57
+ # now construct the dictionary of hit configs for build_hit()
58
+ channel_dict = {}
59
+ pars_dict = {ch: chd["pars"] for ch, chd in Props.read_from(args.pars_file).items()}
60
+ for chan, file in chan_cfg_map.items():
61
+ hit_cfg = Props.read_from(file)
62
+
63
+ # get pars (to override hit config)
64
+ Props.add_to(hit_cfg, pars_dict.get(chan, {}).copy())
65
+
66
+ if chan in table_map:
67
+ input_tbl_name = table_map[chan] if table_map is not None else chan + "/dsp"
68
+ else:
69
+ continue
70
+
71
+ # check if the raw tables are all existing
72
+ if len(lh5.ls(args.input, input_tbl_name)) > 0:
73
+ channel_dict[input_tbl_name] = hit_cfg
74
+ else:
75
+ msg = f"table {input_tbl_name} not found in {args.input} skipping"
76
+ log.warning(msg)
77
+
78
+ log.info("running build_hit()...")
79
+ start = time.time()
80
+ Path(args.output).parent.mkdir(parents=True, exist_ok=True)
81
+ build_hit(args.input, lh5_tables_config=channel_dict, outfile=args.output)
82
+ msg = f"Hit built in {time.time() - start:.2f} seconds"
83
+ log.info(msg)
84
+ if args.alias_table is not None:
85
+ log.info("Creating alias table")
86
+ alias_table(args.output, args.alias_table)
87
+
88
+
89
+ def build_tier_hit_single_channel() -> None:
90
+ argparser = argparse.ArgumentParser()
91
+ argparser.add_argument("--input")
92
+ argparser.add_argument("--pars-file", nargs="*")
93
+
94
+ argparser.add_argument("--configs", required=True)
95
+ argparser.add_argument("--log")
96
+
97
+ argparser.add_argument(
98
+ "--channel",
99
+ help="channel to process",
100
+ required=False,
101
+ type=str,
102
+ )
103
+ argparser.add_argument("--datatype", required=True)
104
+ argparser.add_argument("--timestamp", required=True)
105
+ argparser.add_argument("--tier", required=True)
106
+
107
+ argparser.add_argument("--output")
108
+ args = argparser.parse_args()
109
+
110
+ df_config = (
111
+ TextDB(args.configs, lazy=True)
112
+ .on(args.timestamp, system=args.datatype)
113
+ .snakemake_rules[f"tier_{args.tier}"]
114
+ )
115
+ log = build_log(df_config, args.log, fallback=__name__)
116
+ log.info("initializing")
117
+
118
+ settings_dict = df_config.options.get("settings", {})
119
+
120
+ if isinstance(settings_dict, str):
121
+ settings_dict = Props.read_from(settings_dict)
122
+
123
+ # mapping channel -> hit config file
124
+ chan_cfg_map = df_config.inputs.hit_config
125
+ chan_cfg_map = (
126
+ chan_cfg_map[args.channel]
127
+ if args.channel is not None and args.channel in chan_cfg_map
128
+ else chan_cfg_map
129
+ )
130
+
131
+ # now construct the dictionary of hit configs for build_hit()
132
+ channel_dict = {}
133
+ pars_dict = Props.read_from(args.pars_file)
134
+ pars_dict = (
135
+ pars_dict[args.channel]
136
+ if args.channel is not None and args.channel in pars_dict
137
+ else pars_dict
138
+ )
139
+
140
+ hit_cfg = Props.read_from(chan_cfg_map)
141
+ Props.add_to(hit_cfg, pars_dict.copy())
142
+
143
+ log.info("running build_hit()...")
144
+ start = time.time()
145
+ Path(args.output).parent.mkdir(parents=True, exist_ok=True)
146
+ build_hit(args.input, hit_config=channel_dict, outfile=args.output)
147
+ msg = f"Hit built in {time.time() - start:.2f} seconds"
148
+ log.info(msg)
@@ -0,0 +1,15 @@
1
+ from __future__ import annotations
2
+
3
+ from .alias_table import alias_table
4
+ from .cfgtools import get_channel_config
5
+ from .convert_np import convert_dict_np_to_float
6
+ from .log import build_log
7
+ from .pulser_removal import get_pulser_mask
8
+
9
+ __all__ = [
10
+ "alias_table",
11
+ "build_log",
12
+ "convert_dict_np_to_float",
13
+ "get_channel_config",
14
+ "get_pulser_mask",
15
+ ]
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+
5
+ import h5py
6
+
7
+
8
+ def alias_table(file, mapping):
9
+ """
10
+ Create an alias table for the given file and mapping.
11
+
12
+ Args:
13
+ file (str): Path to the input file.
14
+ mapping (dict): Mapping of current table name and alias table name.
15
+
16
+ Returns:
17
+ dict: A dictionary containing the alias table.
18
+ """
19
+ if isinstance(mapping, str):
20
+ mapping = json.loads(mapping)
21
+ with h5py.File(file, "a") as f:
22
+ for raw_id, alias in mapping.items():
23
+ if raw_id in f:
24
+ if isinstance(alias, list | tuple):
25
+ for a in alias:
26
+ f[a] = f[raw_id]
27
+ else:
28
+ f[alias] = f[raw_id]
@@ -0,0 +1,14 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping
4
+
5
+
6
+ def get_channel_config(
7
+ mapping: Mapping, channel: str, default_key: str = "__default__"
8
+ ):
9
+ """Get channel key from mapping with default.
10
+
11
+ Returns the value at key `channel`, if existing, otherwise return value at
12
+ `default_key`.
13
+ """
14
+ return mapping.get(channel, mapping[default_key])
@@ -0,0 +1,31 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Mapping, Sequence
4
+
5
+ import numpy as np
6
+
7
+
8
+ def convert_dict_np_to_float(dic: dict) -> dict:
9
+ """
10
+ Convert all numpy floats in a dictionary to Python floats.
11
+
12
+ Parameters
13
+ ----------
14
+ dic : dict
15
+ The dictionary to convert.
16
+
17
+ Returns
18
+ -------
19
+ dict
20
+ The dictionary with all numpy floats converted to Python floats.
21
+ """
22
+ for key, value in dic.items():
23
+ if isinstance(value, Mapping):
24
+ convert_dict_np_to_float(value)
25
+ elif isinstance(value, np.float32 | np.float64):
26
+ dic[key] = float(value)
27
+ elif isinstance(dic[key], Sequence):
28
+ dic[key] = [
29
+ float(x) if isinstance(x, np.float32 | np.float64) else x for x in value
30
+ ]
31
+ return dic
@@ -0,0 +1,77 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import sys
5
+ import traceback
6
+ from logging.config import dictConfig
7
+ from pathlib import Path
8
+
9
+ from dbetto import Props
10
+
11
+
12
+ class StreamToLogger:
13
+ """File-like stream object that redirects writes to a logger instance."""
14
+
15
+ def __init__(self, logger, log_level=logging.ERROR):
16
+ self.logger = logger
17
+ self.log_level = log_level
18
+ self.linebuf = ""
19
+
20
+ def write(self, buf):
21
+ for line in buf.rstrip().splitlines():
22
+ self.logger.log(self.log_level, line.rstrip())
23
+
24
+ def flush(self):
25
+ pass
26
+
27
+
28
+ def build_log(
29
+ config_dict: dict, log_file: str | None = None, fallback: str = "prod"
30
+ ) -> logging.Logger:
31
+ """Build a logger from a configuration dictionary.
32
+
33
+ If a log file is provided, the logger will write to that file.
34
+
35
+ Parameters
36
+ ----------
37
+ config_dict
38
+ A dictionary containing the logging configuration.
39
+ log_file
40
+ The path to the log file.
41
+ """
42
+ if "logging" in config_dict["options"]:
43
+ log_config = config_dict["options"]["logging"]
44
+ log_config = Props.read_from(log_config)
45
+
46
+ if log_file is not None:
47
+ Path(log_file).parent.mkdir(parents=True, exist_ok=True)
48
+ log_config["handlers"]["dataflow"]["filename"] = log_file
49
+
50
+ dictConfig(log_config)
51
+ log = logging.getLogger(config_dict["options"].get("logger", "prod"))
52
+
53
+ else:
54
+ if log_file is not None:
55
+ Path(log_file).parent.mkdir(parents=True, exist_ok=True)
56
+ logging.basicConfig(level=logging.INFO, filename=log_file, filemode="w")
57
+
58
+ log = logging.getLogger(fallback)
59
+
60
+ # Redirect stderr to the logger (using the error level)
61
+ sys.stderr = StreamToLogger(log, logging.ERROR)
62
+
63
+ # Extract the stream from the logger's file handler.
64
+ log_stream = None
65
+ for handler in log.handlers:
66
+ if hasattr(handler, "stream"):
67
+ log_stream = handler.stream
68
+ break
69
+ if log_stream is None:
70
+ log_stream = sys.stdout
71
+
72
+ def excepthook(exc_type, exc_value, exc_traceback):
73
+ traceback.print_exception(exc_type, exc_value, exc_traceback, file=log_stream)
74
+
75
+ sys.excepthook = excepthook
76
+
77
+ return log
@@ -0,0 +1,16 @@
1
+ from __future__ import annotations
2
+
3
+ import numpy as np
4
+ from dbetto.catalog import Props
5
+
6
+
7
+ def get_pulser_mask(pulser_file):
8
+ if not isinstance(pulser_file, list):
9
+ pulser_file = [pulser_file]
10
+ mask = np.array([], dtype=bool)
11
+ for file in pulser_file:
12
+ pulser_dict = Props.read_from(file)
13
+ pulser_mask = np.array(pulser_dict["mask"])
14
+ mask = np.append(mask, pulser_mask)
15
+
16
+ return mask
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from .execenv import execenv_prefix, execenv_pyexe
4
+ from .utils import (
5
+ as_ro,
6
+ set_last_rule_name,
7
+ subst_vars,
8
+ subst_vars_impl,
9
+ subst_vars_in_snakemake_config,
10
+ )
11
+
12
+ __all__ = [
13
+ "as_ro",
14
+ "execenv_prefix",
15
+ "execenv_pyexe",
16
+ "set_last_rule_name",
17
+ "subst_vars",
18
+ "subst_vars_impl",
19
+ "subst_vars_in_snakemake_config",
20
+ ]