triggerflow 0.2.1__tar.gz → 0.2.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {triggerflow-0.2.1/src/triggerflow.egg-info → triggerflow-0.2.3}/PKG-INFO +76 -7
- {triggerflow-0.2.1 → triggerflow-0.2.3}/README.md +75 -6
- {triggerflow-0.2.1 → triggerflow-0.2.3}/pyproject.toml +2 -2
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_loader/cluster_manager.py +3 -3
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_loader/loader.py +55 -3
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/core.py +107 -73
- triggerflow-0.2.3/src/triggerflow/interfaces/uGT.py +127 -0
- triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py +0 -0
- triggerflow-0.2.3/src/triggerflow/templates/build_ugt.tcl +46 -0
- triggerflow-0.2.3/src/triggerflow/templates/data_types.h +524 -0
- triggerflow-0.2.3/src/triggerflow/templates/model-gt.cpp +104 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3/src/triggerflow.egg-info}/PKG-INFO +76 -7
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow.egg-info/SOURCES.txt +5 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/tests/test.py +9 -13
- {triggerflow-0.2.1 → triggerflow-0.2.3}/MANIFEST.in +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/setup.cfg +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_dataset/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_dataset/core.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_loader/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/trigger_loader/processor.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/cli.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models → triggerflow-0.2.3/src/triggerflow/interfaces}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/mlflow_wrapper.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/.gitignore +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/README.md +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/cookiecutter.json +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/prompts.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/README.md +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests}/__init__.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines}/__init__.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py +0 -0
- {triggerflow-0.2.1/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training}/__init__.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/templates/makefile +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/templates/makefile_version +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/templates/model_template.cpp +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow/templates/scales.h +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow.egg-info/dependency_links.txt +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow.egg-info/entry_points.txt +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow.egg-info/requires.txt +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/src/triggerflow.egg-info/top_level.txt +0 -0
- {triggerflow-0.2.1 → triggerflow-0.2.3}/tests/test_loader.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: triggerflow
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.3
|
|
4
4
|
Summary: Utilities for ML models targeting hardware triggers
|
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
|
6
6
|
Classifier: License :: OSI Approved :: MIT License
|
|
@@ -35,22 +35,91 @@ pip install triggerflow
|
|
|
35
35
|
|
|
36
36
|
from triggerflow.core import TriggerModel
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
|
|
38
|
+
|
|
39
|
+
scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
|
|
40
|
+
'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
trigger_model = TriggerModel(
|
|
44
|
+
config="triggermodel_config.yaml",
|
|
45
|
+
native_model=model, #Native XGboost/Keras model
|
|
46
|
+
scales=scales
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
trigger_model() #Vivado requird on $PATH for Firmware build.
|
|
40
50
|
|
|
41
51
|
# then:
|
|
42
|
-
output_software =
|
|
43
|
-
output_firmware =
|
|
44
|
-
output_qonnx =
|
|
52
|
+
output_software = trigger_model.software_predict(input_data)
|
|
53
|
+
output_firmware = trigger_model.firmware_predict(input_data)
|
|
54
|
+
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
45
55
|
|
|
46
56
|
# save and load trigger models:
|
|
47
|
-
|
|
57
|
+
trigger_model.save("triggerflow.tar.xz")
|
|
48
58
|
|
|
49
59
|
# in a separate session:
|
|
50
60
|
from triggerflow.core import TriggerModel
|
|
51
61
|
triggerflow = TriggerModel.load("triggerflow.tar.xz")
|
|
52
62
|
```
|
|
53
63
|
|
|
64
|
+
## The Config file:
|
|
65
|
+
|
|
66
|
+
Use this `.yaml` template and change as needed.
|
|
67
|
+
|
|
68
|
+
```yaml
|
|
69
|
+
compiler:
|
|
70
|
+
name: "AXO"
|
|
71
|
+
ml_backend: "keras"
|
|
72
|
+
compiler: "hls4ml"
|
|
73
|
+
fpga_part: "xc7vx690t-ffg1927-2"
|
|
74
|
+
clock_period: 25
|
|
75
|
+
n_outputs: 1
|
|
76
|
+
project_name: "AXO_project"
|
|
77
|
+
namespace: "AXO"
|
|
78
|
+
io_type: "io_parallel"
|
|
79
|
+
backend: "Vitis"
|
|
80
|
+
write_weights_txt: false
|
|
81
|
+
|
|
82
|
+
subsystem:
|
|
83
|
+
name: "uGT"
|
|
84
|
+
n_inputs: 50
|
|
85
|
+
offset_type: "ap_fixed<10,10>"
|
|
86
|
+
shift_type: "ap_fixed<10,10>"
|
|
87
|
+
|
|
88
|
+
objects:
|
|
89
|
+
muons:
|
|
90
|
+
size: 4
|
|
91
|
+
features: [pt, eta_extrapolated, phi_extrapolated]
|
|
92
|
+
|
|
93
|
+
jets:
|
|
94
|
+
size: 4
|
|
95
|
+
features: [et, eta, phi]
|
|
96
|
+
|
|
97
|
+
egammas:
|
|
98
|
+
size: 4
|
|
99
|
+
features: [et, eta, phi]
|
|
100
|
+
|
|
101
|
+
taus:
|
|
102
|
+
size: 4
|
|
103
|
+
features: [et, eta, phi]
|
|
104
|
+
|
|
105
|
+
global_features:
|
|
106
|
+
#- et.et
|
|
107
|
+
#- ht.et
|
|
108
|
+
- etmiss.et
|
|
109
|
+
- etmiss.phi
|
|
110
|
+
#- htmiss.et
|
|
111
|
+
#- htmiss.phi
|
|
112
|
+
#- ethfmiss.et
|
|
113
|
+
#- ethfmiss.phi
|
|
114
|
+
#- hthfmiss.et
|
|
115
|
+
#- hthfmiss.phi
|
|
116
|
+
|
|
117
|
+
muon_size: 4
|
|
118
|
+
jet_size: 4
|
|
119
|
+
egamma_size: 4
|
|
120
|
+
tau_size: 4
|
|
121
|
+
```
|
|
122
|
+
|
|
54
123
|
## Logging with MLflow
|
|
55
124
|
|
|
56
125
|
```python
|
|
@@ -15,22 +15,91 @@ pip install triggerflow
|
|
|
15
15
|
|
|
16
16
|
from triggerflow.core import TriggerModel
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
|
|
19
|
+
scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
|
|
20
|
+
'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
trigger_model = TriggerModel(
|
|
24
|
+
config="triggermodel_config.yaml",
|
|
25
|
+
native_model=model, #Native XGboost/Keras model
|
|
26
|
+
scales=scales
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
trigger_model() #Vivado requird on $PATH for Firmware build.
|
|
20
30
|
|
|
21
31
|
# then:
|
|
22
|
-
output_software =
|
|
23
|
-
output_firmware =
|
|
24
|
-
output_qonnx =
|
|
32
|
+
output_software = trigger_model.software_predict(input_data)
|
|
33
|
+
output_firmware = trigger_model.firmware_predict(input_data)
|
|
34
|
+
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
25
35
|
|
|
26
36
|
# save and load trigger models:
|
|
27
|
-
|
|
37
|
+
trigger_model.save("triggerflow.tar.xz")
|
|
28
38
|
|
|
29
39
|
# in a separate session:
|
|
30
40
|
from triggerflow.core import TriggerModel
|
|
31
41
|
triggerflow = TriggerModel.load("triggerflow.tar.xz")
|
|
32
42
|
```
|
|
33
43
|
|
|
44
|
+
## The Config file:
|
|
45
|
+
|
|
46
|
+
Use this `.yaml` template and change as needed.
|
|
47
|
+
|
|
48
|
+
```yaml
|
|
49
|
+
compiler:
|
|
50
|
+
name: "AXO"
|
|
51
|
+
ml_backend: "keras"
|
|
52
|
+
compiler: "hls4ml"
|
|
53
|
+
fpga_part: "xc7vx690t-ffg1927-2"
|
|
54
|
+
clock_period: 25
|
|
55
|
+
n_outputs: 1
|
|
56
|
+
project_name: "AXO_project"
|
|
57
|
+
namespace: "AXO"
|
|
58
|
+
io_type: "io_parallel"
|
|
59
|
+
backend: "Vitis"
|
|
60
|
+
write_weights_txt: false
|
|
61
|
+
|
|
62
|
+
subsystem:
|
|
63
|
+
name: "uGT"
|
|
64
|
+
n_inputs: 50
|
|
65
|
+
offset_type: "ap_fixed<10,10>"
|
|
66
|
+
shift_type: "ap_fixed<10,10>"
|
|
67
|
+
|
|
68
|
+
objects:
|
|
69
|
+
muons:
|
|
70
|
+
size: 4
|
|
71
|
+
features: [pt, eta_extrapolated, phi_extrapolated]
|
|
72
|
+
|
|
73
|
+
jets:
|
|
74
|
+
size: 4
|
|
75
|
+
features: [et, eta, phi]
|
|
76
|
+
|
|
77
|
+
egammas:
|
|
78
|
+
size: 4
|
|
79
|
+
features: [et, eta, phi]
|
|
80
|
+
|
|
81
|
+
taus:
|
|
82
|
+
size: 4
|
|
83
|
+
features: [et, eta, phi]
|
|
84
|
+
|
|
85
|
+
global_features:
|
|
86
|
+
#- et.et
|
|
87
|
+
#- ht.et
|
|
88
|
+
- etmiss.et
|
|
89
|
+
- etmiss.phi
|
|
90
|
+
#- htmiss.et
|
|
91
|
+
#- htmiss.phi
|
|
92
|
+
#- ethfmiss.et
|
|
93
|
+
#- ethfmiss.phi
|
|
94
|
+
#- hthfmiss.et
|
|
95
|
+
#- hthfmiss.phi
|
|
96
|
+
|
|
97
|
+
muon_size: 4
|
|
98
|
+
jet_size: 4
|
|
99
|
+
egamma_size: 4
|
|
100
|
+
tau_size: 4
|
|
101
|
+
```
|
|
102
|
+
|
|
34
103
|
## Logging with MLflow
|
|
35
104
|
|
|
36
105
|
```python
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "triggerflow"
|
|
7
|
-
version = "0.2.
|
|
7
|
+
version = "0.2.3"
|
|
8
8
|
description = "Utilities for ML models targeting hardware triggers"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.11"
|
|
@@ -13,7 +13,7 @@ dependencies = [
|
|
|
13
13
|
"PyYAML>=6",
|
|
14
14
|
"Jinja2>=3",
|
|
15
15
|
"mlflow>=2.0",
|
|
16
|
-
"kedro==1.0.0"
|
|
16
|
+
"kedro==1.0.0"
|
|
17
17
|
]
|
|
18
18
|
classifiers = [
|
|
19
19
|
"Programming Language :: Python :: 3",
|
|
@@ -4,9 +4,6 @@ import logging
|
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
6
|
from dask.distributed import Client, LocalCluster
|
|
7
|
-
from dask_cuda import LocalCUDACluster
|
|
8
|
-
from dask_jobqueue import HTCondorCluster
|
|
9
|
-
from dask_kubernetes import KubeCluster
|
|
10
7
|
|
|
11
8
|
logger = logging.getLogger(__name__)
|
|
12
9
|
|
|
@@ -63,15 +60,18 @@ class ClusterManager:
|
|
|
63
60
|
self.cluster = LocalCluster(**self.cluster_config)
|
|
64
61
|
|
|
65
62
|
elif ct == "condor":
|
|
63
|
+
from dask_jobqueue import HTCondorCluster
|
|
66
64
|
self.cluster = HTCondorCluster(**self.cluster_config)
|
|
67
65
|
if self.jobs and self.jobs > 0:
|
|
68
66
|
# Scale to the requested number of jobs
|
|
69
67
|
self.cluster.scale(jobs=self.jobs)
|
|
70
68
|
|
|
71
69
|
elif ct == "cuda":
|
|
70
|
+
from dask_cuda import LocalCUDACluster
|
|
72
71
|
self.cluster = LocalCUDACluster(**self.cluster_config)
|
|
73
72
|
|
|
74
73
|
elif ct == "kubernetes":
|
|
74
|
+
from dask_kubernetes import KubeCluster
|
|
75
75
|
self.cluster = KubeCluster(**self.cluster_config)
|
|
76
76
|
if self.jobs and self.jobs > 0:
|
|
77
77
|
try:
|
|
@@ -45,8 +45,58 @@ class TriggerLoader:
|
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
def _load_sample_json(self, sample_json: str) -> dict:
|
|
48
|
+
"""
|
|
49
|
+
Loads the JSON and resolves file paths using the priority:
|
|
50
|
+
1. Explicit 'files' list or directory path (Local/Explicit)
|
|
51
|
+
2. 'DAS' query (Remote Fallback)
|
|
52
|
+
|
|
53
|
+
Returns the canonical coffea fileset format: {dataset_name: [file_path_list]}.
|
|
54
|
+
"""
|
|
55
|
+
import glob
|
|
56
|
+
import os
|
|
57
|
+
|
|
58
|
+
# Helper function definition needed here if it's not imported:
|
|
59
|
+
# def _fetch_files_from_das(das_query: str) -> list[str]: ... (placeholder or actual implementation)
|
|
60
|
+
|
|
48
61
|
with open(sample_json) as f:
|
|
49
|
-
|
|
62
|
+
full_data = json.load(f)
|
|
63
|
+
dataset_metadata = full_data.get("samples", full_data)
|
|
64
|
+
|
|
65
|
+
fileset = {}
|
|
66
|
+
for ds_name, ds_info in dataset_metadata.items():
|
|
67
|
+
files = []
|
|
68
|
+
|
|
69
|
+
if "files" in ds_info:
|
|
70
|
+
file_info = ds_info["files"]
|
|
71
|
+
|
|
72
|
+
if isinstance(file_info, list):
|
|
73
|
+
files = file_info
|
|
74
|
+
|
|
75
|
+
elif isinstance(file_info, str):
|
|
76
|
+
if os.path.isdir(file_info):
|
|
77
|
+
path_glob = os.path.join(file_info, "*.root")
|
|
78
|
+
files = glob.glob(path_glob)
|
|
79
|
+
logger.info(f"Resolved {len(files)} files from directory {file_info}.")
|
|
80
|
+
else:
|
|
81
|
+
files = [file_info]
|
|
82
|
+
|
|
83
|
+
if files:
|
|
84
|
+
logger.info(f"Using {len(files)} local/explicit files for {ds_name}.")
|
|
85
|
+
|
|
86
|
+
if not files and "DAS" in ds_info:
|
|
87
|
+
try:
|
|
88
|
+
files = _fetch_files_from_das(ds_info["DAS"])
|
|
89
|
+
logger.info(f"Resolved {len(files)} files via DAS for {ds_name}.")
|
|
90
|
+
except NameError:
|
|
91
|
+
logger.error("DAS fetching skipped: _fetch_files_from_das is not defined.")
|
|
92
|
+
|
|
93
|
+
if not files:
|
|
94
|
+
logger.warning(f"No files found for dataset: {ds_name}. Skipping.")
|
|
95
|
+
continue
|
|
96
|
+
|
|
97
|
+
fileset[ds_name] = files
|
|
98
|
+
|
|
99
|
+
return fileset
|
|
50
100
|
|
|
51
101
|
def _write_run_metadata_file(self, path: str, duration_s: float | None = None):
|
|
52
102
|
meta_path = f"{path}/run_metadata.json"
|
|
@@ -58,9 +108,11 @@ class TriggerLoader:
|
|
|
58
108
|
json.dump(data, f, indent=2)
|
|
59
109
|
|
|
60
110
|
def _run(self, runner: processor.Runner, label: str):
|
|
61
|
-
logger.log(f"Starting processing ({label})...")
|
|
111
|
+
logger.log(logging.INFO, f"Starting processing ({label})...")
|
|
62
112
|
start = time.time()
|
|
63
113
|
proc = self._build_processor()
|
|
114
|
+
print(self.fileset)
|
|
115
|
+
|
|
64
116
|
acc = runner(
|
|
65
117
|
self.fileset,
|
|
66
118
|
treename="Events",
|
|
@@ -68,7 +120,7 @@ class TriggerLoader:
|
|
|
68
120
|
)
|
|
69
121
|
elapsed = time.time() - start
|
|
70
122
|
self._write_run_metadata_file(self.output_path, elapsed)
|
|
71
|
-
logger.log(f"Finished in {elapsed:.2f}s (run_uuid={self.run_uuid})")
|
|
123
|
+
logger.log(logging.INFO, f"Finished in {elapsed:.2f}s (run_uuid={self.run_uuid})")
|
|
72
124
|
return acc
|
|
73
125
|
|
|
74
126
|
def run_distributed(self, cluster_type: str, cluster_config: dict,
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
import json
|
|
3
|
+
import yaml
|
|
3
4
|
import numpy as np
|
|
4
5
|
import tarfile
|
|
5
6
|
import importlib
|
|
@@ -8,6 +9,7 @@ from typing import Optional, Dict, Any, Union
|
|
|
8
9
|
import shutil, warnings
|
|
9
10
|
import importlib.resources as pkg_resources
|
|
10
11
|
import triggerflow.templates
|
|
12
|
+
from triggerflow.interfaces.uGT import build_ugt_model
|
|
11
13
|
|
|
12
14
|
|
|
13
15
|
class ModelConverter(ABC):
|
|
@@ -73,37 +75,31 @@ class NoOpConverter(ModelConverter):
|
|
|
73
75
|
|
|
74
76
|
|
|
75
77
|
class HLS4MLStrategy(CompilerStrategy):
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def compile(self, model, workspace: Path, config: Optional[Dict] = None, **kwargs) -> Any:
|
|
78
|
+
def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
|
|
79
79
|
import hls4ml
|
|
80
|
-
|
|
80
|
+
|
|
81
81
|
firmware_dir = workspace / "firmware"
|
|
82
82
|
firmware_dir.mkdir(exist_ok=True)
|
|
83
|
-
|
|
84
|
-
cfg = config or hls4ml.utils.config_from_keras_model(model, granularity="name")
|
|
85
83
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
hls_kwargs.update(kwargs)
|
|
84
|
+
hls_config = hls4ml.utils.config_from_keras_model(model, granularity="name")
|
|
85
|
+
hls_kwargs = {}
|
|
86
|
+
|
|
87
|
+
for key in ["project_name", "namespace", "io_type", "backend", "write_weights_txt"]:
|
|
88
|
+
if key in config:
|
|
89
|
+
hls_kwargs[key] = config[key]
|
|
93
90
|
|
|
94
91
|
firmware_model = hls4ml.converters.convert_from_keras_model(
|
|
95
92
|
model,
|
|
93
|
+
hls_config=hls_config,
|
|
94
|
+
output_dir=str(firmware_dir),
|
|
96
95
|
**hls_kwargs
|
|
97
96
|
)
|
|
98
97
|
|
|
99
98
|
firmware_model.compile()
|
|
100
|
-
if shutil.which("vivado") is not None:
|
|
101
|
-
firmware_model.build()
|
|
102
|
-
else:
|
|
103
|
-
warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
|
|
104
99
|
firmware_model.save(workspace / "firmware_model.fml")
|
|
105
100
|
return firmware_model
|
|
106
|
-
|
|
101
|
+
|
|
102
|
+
|
|
107
103
|
def load_compiled_model(self, workspace: Path) -> Any:
|
|
108
104
|
from hls4ml.converters import link_existing_project
|
|
109
105
|
|
|
@@ -113,46 +109,31 @@ class HLS4MLStrategy(CompilerStrategy):
|
|
|
113
109
|
|
|
114
110
|
|
|
115
111
|
class ConiferStrategy(CompilerStrategy):
|
|
116
|
-
"""Conifer compilation strategy for XGBoost models"""
|
|
112
|
+
"""Conifer compilation strategy for XGBoost models, unified config/workspace handling."""
|
|
117
113
|
|
|
118
|
-
def compile(self, model, workspace: Path, config: Optional[Dict] = None
|
|
114
|
+
def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
|
|
119
115
|
import conifer
|
|
120
|
-
import shutil
|
|
121
|
-
import warnings
|
|
122
116
|
import os
|
|
123
117
|
|
|
124
118
|
firmware_dir = workspace / "firmware"
|
|
125
119
|
firmware_dir.mkdir(exist_ok=True)
|
|
126
|
-
os.environ['JSON_ROOT'] = '/eos/user/m/maglowac/TriggerModel/json'
|
|
127
|
-
os.environ['XILINX_AP_INCLUDE'] = '/eos/user/m/maglowac/TriggerModel/HLS_arbitrary_Precision_Types/include'
|
|
128
120
|
|
|
129
|
-
|
|
130
|
-
cfg = conifer.backends.xilinxhls.auto_config()#config or conifer.backends.cpp.auto_config()
|
|
121
|
+
cfg = conifer.backends.xilinxhls.auto_config()
|
|
131
122
|
cfg['OutputDir'] = str(firmware_dir)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
123
|
+
cfg['ProjectName'] = config['project_name']
|
|
124
|
+
cfg['XilinxPart'] = config['fpga_part']
|
|
125
|
+
cfg['ClockPeriod'] = config['clock_period']
|
|
135
126
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
config=cfg
|
|
140
|
-
)
|
|
127
|
+
if config:
|
|
128
|
+
for key, value in config.items():
|
|
129
|
+
cfg[key] = value
|
|
141
130
|
|
|
142
|
-
firmware_model.
|
|
143
|
-
proj_name = cfg.get('ProjectName', 'my_prj')
|
|
144
|
-
bridge_file = firmware_dir / "bridge.cpp"
|
|
145
|
-
text = bridge_file.read_text()
|
|
146
|
-
text = text.replace("my_prj.h", f"{proj_name}.h")
|
|
147
|
-
bridge_file.write_text(text)
|
|
131
|
+
firmware_model = conifer.converters.convert_from_xgboost(model, config=cfg)
|
|
148
132
|
firmware_model.compile()
|
|
149
|
-
if shutil.which("vivado") is not None:
|
|
150
|
-
firmware_model.build()
|
|
151
|
-
else:
|
|
152
|
-
warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
|
|
153
|
-
|
|
154
133
|
firmware_model.save(firmware_dir / "firmware_model.fml")
|
|
134
|
+
|
|
155
135
|
return firmware_model
|
|
136
|
+
|
|
156
137
|
|
|
157
138
|
def load_compiled_model(self, workspace: Path) -> Any:
|
|
158
139
|
from conifer import load_model
|
|
@@ -384,33 +365,45 @@ class ModelSerializer:
|
|
|
384
365
|
return model, input_name
|
|
385
366
|
return None, None
|
|
386
367
|
|
|
368
|
+
|
|
387
369
|
class TriggerModel:
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
self.name = name
|
|
397
|
-
self.ml_backend = ml_backend.lower()
|
|
398
|
-
self.scales = scales
|
|
399
|
-
self.unscaled_type = unscaled_type
|
|
400
|
-
self.n_outputs = n_outputs
|
|
401
|
-
self.compiler = compiler.lower()
|
|
370
|
+
def __init__(self, config: Union[str, Path, Dict], native_model, scales):
|
|
371
|
+
if isinstance(config, (str, Path)):
|
|
372
|
+
with open(config, "r") as f:
|
|
373
|
+
config = yaml.safe_load(f)
|
|
374
|
+
elif not isinstance(config, dict):
|
|
375
|
+
raise TypeError("config must be a dict or path to a YAML file")
|
|
376
|
+
|
|
402
377
|
self.native_model = native_model
|
|
403
|
-
self.
|
|
404
|
-
|
|
378
|
+
self.scales = scales
|
|
379
|
+
|
|
380
|
+
self.compiler_cfg = config.get("compiler", {})
|
|
381
|
+
self.subsystem_cfg = config.get("subsystem", {})
|
|
382
|
+
|
|
383
|
+
self.name = self.compiler_cfg.get("name", "model")
|
|
384
|
+
self.ml_backend = self.compiler_cfg.get("ml_backend", "").lower()
|
|
385
|
+
self.compiler = self.compiler_cfg.get("compiler", "").lower()
|
|
386
|
+
|
|
387
|
+
self.n_outputs = self.compiler_cfg.get("n_outputs")
|
|
388
|
+
self.unscaled_type = self.subsystem_cfg.get("unscaled_type", "ap_fixed<16,6>")
|
|
389
|
+
|
|
390
|
+
if self.ml_backend not in ("keras", "xgboost"):
|
|
391
|
+
raise ValueError("Unsupported backend")
|
|
392
|
+
|
|
405
393
|
self.workspace_manager = WorkspaceManager()
|
|
406
|
-
self.converter = ConverterFactory.create_converter(ml_backend, compiler)
|
|
407
|
-
self.compiler_strategy = CompilerFactory.create_compiler(ml_backend, compiler)
|
|
408
|
-
|
|
394
|
+
self.converter = ConverterFactory.create_converter(self.ml_backend, self.compiler)
|
|
395
|
+
self.compiler_strategy = CompilerFactory.create_compiler(self.ml_backend, self.compiler)
|
|
396
|
+
|
|
409
397
|
self.firmware_model = None
|
|
410
398
|
self.model_qonnx = None
|
|
411
399
|
self.input_name = None
|
|
412
400
|
|
|
413
|
-
|
|
401
|
+
|
|
402
|
+
self.workspace_manager.setup_workspace(
|
|
403
|
+
self.name,
|
|
404
|
+
self.ml_backend,
|
|
405
|
+
self.compiler
|
|
406
|
+
)
|
|
414
407
|
|
|
415
408
|
@property
|
|
416
409
|
def workspace(self) -> Path:
|
|
@@ -427,8 +420,8 @@ class TriggerModel:
|
|
|
427
420
|
"""Get metadata dictionary"""
|
|
428
421
|
return self.workspace_manager.metadata
|
|
429
422
|
|
|
430
|
-
def __call__(self
|
|
431
|
-
"""Execute
|
|
423
|
+
def __call__(self):
|
|
424
|
+
"""Execute full model conversion and compilation pipeline using YAML config"""
|
|
432
425
|
self.parse_dataset_object()
|
|
433
426
|
|
|
434
427
|
# Save native model
|
|
@@ -445,22 +438,57 @@ class TriggerModel:
|
|
|
445
438
|
self.input_name = self.model_qonnx.graph.input[0].name
|
|
446
439
|
self.workspace_manager.add_artifact("qonnx", qonnx_path)
|
|
447
440
|
self.workspace_manager.add_version({"qonnx": str(qonnx_path)})
|
|
441
|
+
|
|
448
442
|
|
|
449
443
|
# Compile model
|
|
450
444
|
self.firmware_model = self.compiler_strategy.compile(
|
|
451
445
|
self.native_model,
|
|
452
446
|
self.workspace_manager.workspace,
|
|
453
|
-
self.
|
|
454
|
-
**
|
|
447
|
+
self.compiler_cfg,
|
|
448
|
+
**self.compiler_cfg.get("kwargs", {})
|
|
455
449
|
)
|
|
456
450
|
|
|
457
451
|
self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
|
|
452
|
+
if self.compiler != "conifer" and self.scales is not None:
|
|
453
|
+
self.build_emulator(
|
|
454
|
+
self.scales['shifts'],
|
|
455
|
+
self.scales['offsets'],
|
|
456
|
+
self.n_outputs,
|
|
457
|
+
self.unscaled_type
|
|
458
|
+
)
|
|
458
459
|
|
|
459
|
-
if self.compiler is not "conifer" and self.scales is not None:
|
|
460
|
-
self.build_emulator(self.scales['shifts'], self.scales['offsets'], self.n_outputs, self.unscaled_type)
|
|
461
460
|
|
|
461
|
+
if shutil.which("vivado") is not None:
|
|
462
|
+
build_ugt_model(
|
|
463
|
+
templates_dir=self.subsystem_cfg.get("templates_dir", Path("templates")),
|
|
464
|
+
firmware_dir=self.workspace_manager.workspace / "firmware",
|
|
465
|
+
compiler = self.compiler,
|
|
466
|
+
model_name=self.name,
|
|
467
|
+
n_inputs=self.subsystem_cfg["n_inputs"],
|
|
468
|
+
n_outputs=self.subsystem_cfg.get("n_outputs", self.n_outputs),
|
|
469
|
+
nn_offsets=self.scales["offsets"],
|
|
470
|
+
nn_shifts=self.scales["shifts"],
|
|
471
|
+
muon_size=self.subsystem_cfg.get("muon_size", 0),
|
|
472
|
+
jet_size=self.subsystem_cfg.get("jet_size", 0),
|
|
473
|
+
egamma_size=self.subsystem_cfg.get("egamma_size", 0),
|
|
474
|
+
tau_size=self.subsystem_cfg.get("tau_size", 0),
|
|
475
|
+
output_type=self.subsystem_cfg.get("output_type", "result_t"),
|
|
476
|
+
offset_type=self.subsystem_cfg.get("offset_type", "ap_fixed<10,10>"),
|
|
477
|
+
shift_type=self.subsystem_cfg.get("shift_type", "ap_fixed<10,10>"),
|
|
478
|
+
object_features=self.subsystem_cfg.get("object_features"),
|
|
479
|
+
global_features=self.subsystem_cfg.get("global_features")
|
|
480
|
+
)
|
|
481
|
+
else:
|
|
482
|
+
warnings.warn(
|
|
483
|
+
"Vivado executable not found on the system PATH. "
|
|
484
|
+
"Skipping FW build. ",
|
|
485
|
+
UserWarning
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
|
|
462
489
|
self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
|
|
463
490
|
self.workspace_manager.save_metadata()
|
|
491
|
+
|
|
464
492
|
|
|
465
493
|
@staticmethod
|
|
466
494
|
def parse_dataset_object():
|
|
@@ -482,10 +510,16 @@ class TriggerModel:
|
|
|
482
510
|
predictor = SoftwarePredictor(self.native_model, self.ml_backend)
|
|
483
511
|
return predictor.predict(input_data)
|
|
484
512
|
|
|
485
|
-
def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray:
|
|
513
|
+
def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray | None:
|
|
486
514
|
"""Make predictions using QONNX model"""
|
|
515
|
+
|
|
487
516
|
if self.model_qonnx is None:
|
|
488
|
-
|
|
517
|
+
warnings.warn(
|
|
518
|
+
"QONNX model is not available. Prediction skipped.",
|
|
519
|
+
UserWarning
|
|
520
|
+
)
|
|
521
|
+
return None
|
|
522
|
+
|
|
489
523
|
predictor = QONNXPredictor(self.model_qonnx, self.input_name)
|
|
490
524
|
return predictor.predict(input_data)
|
|
491
525
|
|