triggerflow 0.2.2__tar.gz → 0.2.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {triggerflow-0.2.2/src/triggerflow.egg-info → triggerflow-0.2.3}/PKG-INFO +76 -7
- {triggerflow-0.2.2 → triggerflow-0.2.3}/README.md +75 -6
- {triggerflow-0.2.2 → triggerflow-0.2.3}/pyproject.toml +2 -2
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/core.py +107 -73
- triggerflow-0.2.3/src/triggerflow/interfaces/uGT.py +127 -0
- triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py +0 -0
- triggerflow-0.2.3/src/triggerflow/templates/build_ugt.tcl +46 -0
- triggerflow-0.2.3/src/triggerflow/templates/data_types.h +524 -0
- triggerflow-0.2.3/src/triggerflow/templates/model-gt.cpp +104 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3/src/triggerflow.egg-info}/PKG-INFO +76 -7
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow.egg-info/SOURCES.txt +5 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/tests/test.py +9 -13
- {triggerflow-0.2.2 → triggerflow-0.2.3}/MANIFEST.in +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/setup.cfg +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_dataset/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_dataset/core.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_loader/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_loader/cluster_manager.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_loader/loader.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/trigger_loader/processor.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/cli.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models → triggerflow-0.2.3/src/triggerflow/interfaces}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/mlflow_wrapper.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/.gitignore +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/README.md +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/cookiecutter.json +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/prompts.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/README.md +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests}/__init__.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines}/__init__.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py +0 -0
- {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation → triggerflow-0.2.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training}/__init__.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/templates/makefile +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/templates/makefile_version +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/templates/model_template.cpp +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow/templates/scales.h +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow.egg-info/dependency_links.txt +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow.egg-info/entry_points.txt +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow.egg-info/requires.txt +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/src/triggerflow.egg-info/top_level.txt +0 -0
- {triggerflow-0.2.2 → triggerflow-0.2.3}/tests/test_loader.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: triggerflow
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.3
|
|
4
4
|
Summary: Utilities for ML models targeting hardware triggers
|
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
|
6
6
|
Classifier: License :: OSI Approved :: MIT License
|
|
@@ -35,22 +35,91 @@ pip install triggerflow
|
|
|
35
35
|
|
|
36
36
|
from triggerflow.core import TriggerModel
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
|
|
38
|
+
|
|
39
|
+
scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
|
|
40
|
+
'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
trigger_model = TriggerModel(
|
|
44
|
+
config="triggermodel_config.yaml",
|
|
45
|
+
native_model=model, #Native XGboost/Keras model
|
|
46
|
+
scales=scales
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
trigger_model() #Vivado requird on $PATH for Firmware build.
|
|
40
50
|
|
|
41
51
|
# then:
|
|
42
|
-
output_software =
|
|
43
|
-
output_firmware =
|
|
44
|
-
output_qonnx =
|
|
52
|
+
output_software = trigger_model.software_predict(input_data)
|
|
53
|
+
output_firmware = trigger_model.firmware_predict(input_data)
|
|
54
|
+
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
45
55
|
|
|
46
56
|
# save and load trigger models:
|
|
47
|
-
|
|
57
|
+
trigger_model.save("triggerflow.tar.xz")
|
|
48
58
|
|
|
49
59
|
# in a separate session:
|
|
50
60
|
from triggerflow.core import TriggerModel
|
|
51
61
|
triggerflow = TriggerModel.load("triggerflow.tar.xz")
|
|
52
62
|
```
|
|
53
63
|
|
|
64
|
+
## The Config file:
|
|
65
|
+
|
|
66
|
+
Use this `.yaml` template and change as needed.
|
|
67
|
+
|
|
68
|
+
```yaml
|
|
69
|
+
compiler:
|
|
70
|
+
name: "AXO"
|
|
71
|
+
ml_backend: "keras"
|
|
72
|
+
compiler: "hls4ml"
|
|
73
|
+
fpga_part: "xc7vx690t-ffg1927-2"
|
|
74
|
+
clock_period: 25
|
|
75
|
+
n_outputs: 1
|
|
76
|
+
project_name: "AXO_project"
|
|
77
|
+
namespace: "AXO"
|
|
78
|
+
io_type: "io_parallel"
|
|
79
|
+
backend: "Vitis"
|
|
80
|
+
write_weights_txt: false
|
|
81
|
+
|
|
82
|
+
subsystem:
|
|
83
|
+
name: "uGT"
|
|
84
|
+
n_inputs: 50
|
|
85
|
+
offset_type: "ap_fixed<10,10>"
|
|
86
|
+
shift_type: "ap_fixed<10,10>"
|
|
87
|
+
|
|
88
|
+
objects:
|
|
89
|
+
muons:
|
|
90
|
+
size: 4
|
|
91
|
+
features: [pt, eta_extrapolated, phi_extrapolated]
|
|
92
|
+
|
|
93
|
+
jets:
|
|
94
|
+
size: 4
|
|
95
|
+
features: [et, eta, phi]
|
|
96
|
+
|
|
97
|
+
egammas:
|
|
98
|
+
size: 4
|
|
99
|
+
features: [et, eta, phi]
|
|
100
|
+
|
|
101
|
+
taus:
|
|
102
|
+
size: 4
|
|
103
|
+
features: [et, eta, phi]
|
|
104
|
+
|
|
105
|
+
global_features:
|
|
106
|
+
#- et.et
|
|
107
|
+
#- ht.et
|
|
108
|
+
- etmiss.et
|
|
109
|
+
- etmiss.phi
|
|
110
|
+
#- htmiss.et
|
|
111
|
+
#- htmiss.phi
|
|
112
|
+
#- ethfmiss.et
|
|
113
|
+
#- ethfmiss.phi
|
|
114
|
+
#- hthfmiss.et
|
|
115
|
+
#- hthfmiss.phi
|
|
116
|
+
|
|
117
|
+
muon_size: 4
|
|
118
|
+
jet_size: 4
|
|
119
|
+
egamma_size: 4
|
|
120
|
+
tau_size: 4
|
|
121
|
+
```
|
|
122
|
+
|
|
54
123
|
## Logging with MLflow
|
|
55
124
|
|
|
56
125
|
```python
|
|
@@ -15,22 +15,91 @@ pip install triggerflow
|
|
|
15
15
|
|
|
16
16
|
from triggerflow.core import TriggerModel
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
|
|
19
|
+
scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
|
|
20
|
+
'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
trigger_model = TriggerModel(
|
|
24
|
+
config="triggermodel_config.yaml",
|
|
25
|
+
native_model=model, #Native XGboost/Keras model
|
|
26
|
+
scales=scales
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
trigger_model() #Vivado requird on $PATH for Firmware build.
|
|
20
30
|
|
|
21
31
|
# then:
|
|
22
|
-
output_software =
|
|
23
|
-
output_firmware =
|
|
24
|
-
output_qonnx =
|
|
32
|
+
output_software = trigger_model.software_predict(input_data)
|
|
33
|
+
output_firmware = trigger_model.firmware_predict(input_data)
|
|
34
|
+
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
25
35
|
|
|
26
36
|
# save and load trigger models:
|
|
27
|
-
|
|
37
|
+
trigger_model.save("triggerflow.tar.xz")
|
|
28
38
|
|
|
29
39
|
# in a separate session:
|
|
30
40
|
from triggerflow.core import TriggerModel
|
|
31
41
|
triggerflow = TriggerModel.load("triggerflow.tar.xz")
|
|
32
42
|
```
|
|
33
43
|
|
|
44
|
+
## The Config file:
|
|
45
|
+
|
|
46
|
+
Use this `.yaml` template and change as needed.
|
|
47
|
+
|
|
48
|
+
```yaml
|
|
49
|
+
compiler:
|
|
50
|
+
name: "AXO"
|
|
51
|
+
ml_backend: "keras"
|
|
52
|
+
compiler: "hls4ml"
|
|
53
|
+
fpga_part: "xc7vx690t-ffg1927-2"
|
|
54
|
+
clock_period: 25
|
|
55
|
+
n_outputs: 1
|
|
56
|
+
project_name: "AXO_project"
|
|
57
|
+
namespace: "AXO"
|
|
58
|
+
io_type: "io_parallel"
|
|
59
|
+
backend: "Vitis"
|
|
60
|
+
write_weights_txt: false
|
|
61
|
+
|
|
62
|
+
subsystem:
|
|
63
|
+
name: "uGT"
|
|
64
|
+
n_inputs: 50
|
|
65
|
+
offset_type: "ap_fixed<10,10>"
|
|
66
|
+
shift_type: "ap_fixed<10,10>"
|
|
67
|
+
|
|
68
|
+
objects:
|
|
69
|
+
muons:
|
|
70
|
+
size: 4
|
|
71
|
+
features: [pt, eta_extrapolated, phi_extrapolated]
|
|
72
|
+
|
|
73
|
+
jets:
|
|
74
|
+
size: 4
|
|
75
|
+
features: [et, eta, phi]
|
|
76
|
+
|
|
77
|
+
egammas:
|
|
78
|
+
size: 4
|
|
79
|
+
features: [et, eta, phi]
|
|
80
|
+
|
|
81
|
+
taus:
|
|
82
|
+
size: 4
|
|
83
|
+
features: [et, eta, phi]
|
|
84
|
+
|
|
85
|
+
global_features:
|
|
86
|
+
#- et.et
|
|
87
|
+
#- ht.et
|
|
88
|
+
- etmiss.et
|
|
89
|
+
- etmiss.phi
|
|
90
|
+
#- htmiss.et
|
|
91
|
+
#- htmiss.phi
|
|
92
|
+
#- ethfmiss.et
|
|
93
|
+
#- ethfmiss.phi
|
|
94
|
+
#- hthfmiss.et
|
|
95
|
+
#- hthfmiss.phi
|
|
96
|
+
|
|
97
|
+
muon_size: 4
|
|
98
|
+
jet_size: 4
|
|
99
|
+
egamma_size: 4
|
|
100
|
+
tau_size: 4
|
|
101
|
+
```
|
|
102
|
+
|
|
34
103
|
## Logging with MLflow
|
|
35
104
|
|
|
36
105
|
```python
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "triggerflow"
|
|
7
|
-
version = "0.2.
|
|
7
|
+
version = "0.2.3"
|
|
8
8
|
description = "Utilities for ML models targeting hardware triggers"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.11"
|
|
@@ -13,7 +13,7 @@ dependencies = [
|
|
|
13
13
|
"PyYAML>=6",
|
|
14
14
|
"Jinja2>=3",
|
|
15
15
|
"mlflow>=2.0",
|
|
16
|
-
"kedro==1.0.0"
|
|
16
|
+
"kedro==1.0.0"
|
|
17
17
|
]
|
|
18
18
|
classifiers = [
|
|
19
19
|
"Programming Language :: Python :: 3",
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
import json
|
|
3
|
+
import yaml
|
|
3
4
|
import numpy as np
|
|
4
5
|
import tarfile
|
|
5
6
|
import importlib
|
|
@@ -8,6 +9,7 @@ from typing import Optional, Dict, Any, Union
|
|
|
8
9
|
import shutil, warnings
|
|
9
10
|
import importlib.resources as pkg_resources
|
|
10
11
|
import triggerflow.templates
|
|
12
|
+
from triggerflow.interfaces.uGT import build_ugt_model
|
|
11
13
|
|
|
12
14
|
|
|
13
15
|
class ModelConverter(ABC):
|
|
@@ -73,37 +75,31 @@ class NoOpConverter(ModelConverter):
|
|
|
73
75
|
|
|
74
76
|
|
|
75
77
|
class HLS4MLStrategy(CompilerStrategy):
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def compile(self, model, workspace: Path, config: Optional[Dict] = None, **kwargs) -> Any:
|
|
78
|
+
def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
|
|
79
79
|
import hls4ml
|
|
80
|
-
|
|
80
|
+
|
|
81
81
|
firmware_dir = workspace / "firmware"
|
|
82
82
|
firmware_dir.mkdir(exist_ok=True)
|
|
83
|
-
|
|
84
|
-
cfg = config or hls4ml.utils.config_from_keras_model(model, granularity="name")
|
|
85
83
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
hls_kwargs.update(kwargs)
|
|
84
|
+
hls_config = hls4ml.utils.config_from_keras_model(model, granularity="name")
|
|
85
|
+
hls_kwargs = {}
|
|
86
|
+
|
|
87
|
+
for key in ["project_name", "namespace", "io_type", "backend", "write_weights_txt"]:
|
|
88
|
+
if key in config:
|
|
89
|
+
hls_kwargs[key] = config[key]
|
|
93
90
|
|
|
94
91
|
firmware_model = hls4ml.converters.convert_from_keras_model(
|
|
95
92
|
model,
|
|
93
|
+
hls_config=hls_config,
|
|
94
|
+
output_dir=str(firmware_dir),
|
|
96
95
|
**hls_kwargs
|
|
97
96
|
)
|
|
98
97
|
|
|
99
98
|
firmware_model.compile()
|
|
100
|
-
if shutil.which("vivado") is not None:
|
|
101
|
-
firmware_model.build()
|
|
102
|
-
else:
|
|
103
|
-
warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
|
|
104
99
|
firmware_model.save(workspace / "firmware_model.fml")
|
|
105
100
|
return firmware_model
|
|
106
|
-
|
|
101
|
+
|
|
102
|
+
|
|
107
103
|
def load_compiled_model(self, workspace: Path) -> Any:
|
|
108
104
|
from hls4ml.converters import link_existing_project
|
|
109
105
|
|
|
@@ -113,46 +109,31 @@ class HLS4MLStrategy(CompilerStrategy):
|
|
|
113
109
|
|
|
114
110
|
|
|
115
111
|
class ConiferStrategy(CompilerStrategy):
|
|
116
|
-
"""Conifer compilation strategy for XGBoost models"""
|
|
112
|
+
"""Conifer compilation strategy for XGBoost models, unified config/workspace handling."""
|
|
117
113
|
|
|
118
|
-
def compile(self, model, workspace: Path, config: Optional[Dict] = None
|
|
114
|
+
def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
|
|
119
115
|
import conifer
|
|
120
|
-
import shutil
|
|
121
|
-
import warnings
|
|
122
116
|
import os
|
|
123
117
|
|
|
124
118
|
firmware_dir = workspace / "firmware"
|
|
125
119
|
firmware_dir.mkdir(exist_ok=True)
|
|
126
|
-
os.environ['JSON_ROOT'] = '/eos/user/m/maglowac/TriggerModel/json'
|
|
127
|
-
os.environ['XILINX_AP_INCLUDE'] = '/eos/user/m/maglowac/TriggerModel/HLS_arbitrary_Precision_Types/include'
|
|
128
120
|
|
|
129
|
-
|
|
130
|
-
cfg = conifer.backends.xilinxhls.auto_config()#config or conifer.backends.cpp.auto_config()
|
|
121
|
+
cfg = conifer.backends.xilinxhls.auto_config()
|
|
131
122
|
cfg['OutputDir'] = str(firmware_dir)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
123
|
+
cfg['ProjectName'] = config['project_name']
|
|
124
|
+
cfg['XilinxPart'] = config['fpga_part']
|
|
125
|
+
cfg['ClockPeriod'] = config['clock_period']
|
|
135
126
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
config=cfg
|
|
140
|
-
)
|
|
127
|
+
if config:
|
|
128
|
+
for key, value in config.items():
|
|
129
|
+
cfg[key] = value
|
|
141
130
|
|
|
142
|
-
firmware_model.
|
|
143
|
-
proj_name = cfg.get('ProjectName', 'my_prj')
|
|
144
|
-
bridge_file = firmware_dir / "bridge.cpp"
|
|
145
|
-
text = bridge_file.read_text()
|
|
146
|
-
text = text.replace("my_prj.h", f"{proj_name}.h")
|
|
147
|
-
bridge_file.write_text(text)
|
|
131
|
+
firmware_model = conifer.converters.convert_from_xgboost(model, config=cfg)
|
|
148
132
|
firmware_model.compile()
|
|
149
|
-
if shutil.which("vivado") is not None:
|
|
150
|
-
firmware_model.build()
|
|
151
|
-
else:
|
|
152
|
-
warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
|
|
153
|
-
|
|
154
133
|
firmware_model.save(firmware_dir / "firmware_model.fml")
|
|
134
|
+
|
|
155
135
|
return firmware_model
|
|
136
|
+
|
|
156
137
|
|
|
157
138
|
def load_compiled_model(self, workspace: Path) -> Any:
|
|
158
139
|
from conifer import load_model
|
|
@@ -384,33 +365,45 @@ class ModelSerializer:
|
|
|
384
365
|
return model, input_name
|
|
385
366
|
return None, None
|
|
386
367
|
|
|
368
|
+
|
|
387
369
|
class TriggerModel:
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
self.name = name
|
|
397
|
-
self.ml_backend = ml_backend.lower()
|
|
398
|
-
self.scales = scales
|
|
399
|
-
self.unscaled_type = unscaled_type
|
|
400
|
-
self.n_outputs = n_outputs
|
|
401
|
-
self.compiler = compiler.lower()
|
|
370
|
+
def __init__(self, config: Union[str, Path, Dict], native_model, scales):
|
|
371
|
+
if isinstance(config, (str, Path)):
|
|
372
|
+
with open(config, "r") as f:
|
|
373
|
+
config = yaml.safe_load(f)
|
|
374
|
+
elif not isinstance(config, dict):
|
|
375
|
+
raise TypeError("config must be a dict or path to a YAML file")
|
|
376
|
+
|
|
402
377
|
self.native_model = native_model
|
|
403
|
-
self.
|
|
404
|
-
|
|
378
|
+
self.scales = scales
|
|
379
|
+
|
|
380
|
+
self.compiler_cfg = config.get("compiler", {})
|
|
381
|
+
self.subsystem_cfg = config.get("subsystem", {})
|
|
382
|
+
|
|
383
|
+
self.name = self.compiler_cfg.get("name", "model")
|
|
384
|
+
self.ml_backend = self.compiler_cfg.get("ml_backend", "").lower()
|
|
385
|
+
self.compiler = self.compiler_cfg.get("compiler", "").lower()
|
|
386
|
+
|
|
387
|
+
self.n_outputs = self.compiler_cfg.get("n_outputs")
|
|
388
|
+
self.unscaled_type = self.subsystem_cfg.get("unscaled_type", "ap_fixed<16,6>")
|
|
389
|
+
|
|
390
|
+
if self.ml_backend not in ("keras", "xgboost"):
|
|
391
|
+
raise ValueError("Unsupported backend")
|
|
392
|
+
|
|
405
393
|
self.workspace_manager = WorkspaceManager()
|
|
406
|
-
self.converter = ConverterFactory.create_converter(ml_backend, compiler)
|
|
407
|
-
self.compiler_strategy = CompilerFactory.create_compiler(ml_backend, compiler)
|
|
408
|
-
|
|
394
|
+
self.converter = ConverterFactory.create_converter(self.ml_backend, self.compiler)
|
|
395
|
+
self.compiler_strategy = CompilerFactory.create_compiler(self.ml_backend, self.compiler)
|
|
396
|
+
|
|
409
397
|
self.firmware_model = None
|
|
410
398
|
self.model_qonnx = None
|
|
411
399
|
self.input_name = None
|
|
412
400
|
|
|
413
|
-
|
|
401
|
+
|
|
402
|
+
self.workspace_manager.setup_workspace(
|
|
403
|
+
self.name,
|
|
404
|
+
self.ml_backend,
|
|
405
|
+
self.compiler
|
|
406
|
+
)
|
|
414
407
|
|
|
415
408
|
@property
|
|
416
409
|
def workspace(self) -> Path:
|
|
@@ -427,8 +420,8 @@ class TriggerModel:
|
|
|
427
420
|
"""Get metadata dictionary"""
|
|
428
421
|
return self.workspace_manager.metadata
|
|
429
422
|
|
|
430
|
-
def __call__(self
|
|
431
|
-
"""Execute
|
|
423
|
+
def __call__(self):
|
|
424
|
+
"""Execute full model conversion and compilation pipeline using YAML config"""
|
|
432
425
|
self.parse_dataset_object()
|
|
433
426
|
|
|
434
427
|
# Save native model
|
|
@@ -445,22 +438,57 @@ class TriggerModel:
|
|
|
445
438
|
self.input_name = self.model_qonnx.graph.input[0].name
|
|
446
439
|
self.workspace_manager.add_artifact("qonnx", qonnx_path)
|
|
447
440
|
self.workspace_manager.add_version({"qonnx": str(qonnx_path)})
|
|
441
|
+
|
|
448
442
|
|
|
449
443
|
# Compile model
|
|
450
444
|
self.firmware_model = self.compiler_strategy.compile(
|
|
451
445
|
self.native_model,
|
|
452
446
|
self.workspace_manager.workspace,
|
|
453
|
-
self.
|
|
454
|
-
**
|
|
447
|
+
self.compiler_cfg,
|
|
448
|
+
**self.compiler_cfg.get("kwargs", {})
|
|
455
449
|
)
|
|
456
450
|
|
|
457
451
|
self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
|
|
452
|
+
if self.compiler != "conifer" and self.scales is not None:
|
|
453
|
+
self.build_emulator(
|
|
454
|
+
self.scales['shifts'],
|
|
455
|
+
self.scales['offsets'],
|
|
456
|
+
self.n_outputs,
|
|
457
|
+
self.unscaled_type
|
|
458
|
+
)
|
|
458
459
|
|
|
459
|
-
if self.compiler is not "conifer" and self.scales is not None:
|
|
460
|
-
self.build_emulator(self.scales['shifts'], self.scales['offsets'], self.n_outputs, self.unscaled_type)
|
|
461
460
|
|
|
461
|
+
if shutil.which("vivado") is not None:
|
|
462
|
+
build_ugt_model(
|
|
463
|
+
templates_dir=self.subsystem_cfg.get("templates_dir", Path("templates")),
|
|
464
|
+
firmware_dir=self.workspace_manager.workspace / "firmware",
|
|
465
|
+
compiler = self.compiler,
|
|
466
|
+
model_name=self.name,
|
|
467
|
+
n_inputs=self.subsystem_cfg["n_inputs"],
|
|
468
|
+
n_outputs=self.subsystem_cfg.get("n_outputs", self.n_outputs),
|
|
469
|
+
nn_offsets=self.scales["offsets"],
|
|
470
|
+
nn_shifts=self.scales["shifts"],
|
|
471
|
+
muon_size=self.subsystem_cfg.get("muon_size", 0),
|
|
472
|
+
jet_size=self.subsystem_cfg.get("jet_size", 0),
|
|
473
|
+
egamma_size=self.subsystem_cfg.get("egamma_size", 0),
|
|
474
|
+
tau_size=self.subsystem_cfg.get("tau_size", 0),
|
|
475
|
+
output_type=self.subsystem_cfg.get("output_type", "result_t"),
|
|
476
|
+
offset_type=self.subsystem_cfg.get("offset_type", "ap_fixed<10,10>"),
|
|
477
|
+
shift_type=self.subsystem_cfg.get("shift_type", "ap_fixed<10,10>"),
|
|
478
|
+
object_features=self.subsystem_cfg.get("object_features"),
|
|
479
|
+
global_features=self.subsystem_cfg.get("global_features")
|
|
480
|
+
)
|
|
481
|
+
else:
|
|
482
|
+
warnings.warn(
|
|
483
|
+
"Vivado executable not found on the system PATH. "
|
|
484
|
+
"Skipping FW build. ",
|
|
485
|
+
UserWarning
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
|
|
462
489
|
self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
|
|
463
490
|
self.workspace_manager.save_metadata()
|
|
491
|
+
|
|
464
492
|
|
|
465
493
|
@staticmethod
|
|
466
494
|
def parse_dataset_object():
|
|
@@ -482,10 +510,16 @@ class TriggerModel:
|
|
|
482
510
|
predictor = SoftwarePredictor(self.native_model, self.ml_backend)
|
|
483
511
|
return predictor.predict(input_data)
|
|
484
512
|
|
|
485
|
-
def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray:
|
|
513
|
+
def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray | None:
|
|
486
514
|
"""Make predictions using QONNX model"""
|
|
515
|
+
|
|
487
516
|
if self.model_qonnx is None:
|
|
488
|
-
|
|
517
|
+
warnings.warn(
|
|
518
|
+
"QONNX model is not available. Prediction skipped.",
|
|
519
|
+
UserWarning
|
|
520
|
+
)
|
|
521
|
+
return None
|
|
522
|
+
|
|
489
523
|
predictor = QONNXPredictor(self.model_qonnx, self.input_name)
|
|
490
524
|
return predictor.predict(input_data)
|
|
491
525
|
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import shutil
|
|
3
|
+
import pkg_resources
|
|
4
|
+
from jinja2 import Template
|
|
5
|
+
import re
|
|
6
|
+
|
|
7
|
+
def _render_template(template_file: str, output_file: Path, context: dict):
|
|
8
|
+
with open(template_file, "r") as f:
|
|
9
|
+
template_text = f.read()
|
|
10
|
+
|
|
11
|
+
template = Template(template_text)
|
|
12
|
+
rendered = template.render(**context)
|
|
13
|
+
|
|
14
|
+
with open(output_file, "w") as f:
|
|
15
|
+
f.write(rendered)
|
|
16
|
+
|
|
17
|
+
def build_ugt_model(
|
|
18
|
+
templates_dir: Path,
|
|
19
|
+
firmware_dir: Path,
|
|
20
|
+
compiler: str,
|
|
21
|
+
model_name: str,
|
|
22
|
+
n_inputs: int,
|
|
23
|
+
n_outputs: int,
|
|
24
|
+
nn_offsets: list,
|
|
25
|
+
nn_shifts: list,
|
|
26
|
+
muon_size: int,
|
|
27
|
+
jet_size: int,
|
|
28
|
+
egamma_size: int,
|
|
29
|
+
tau_size: int,
|
|
30
|
+
output_type: str = "result_t",
|
|
31
|
+
offset_type: str = "ap_fixed<10,10>",
|
|
32
|
+
shift_type: str = "ap_fixed<10,10>",
|
|
33
|
+
object_features: dict = None,
|
|
34
|
+
global_features: list = None
|
|
35
|
+
):
|
|
36
|
+
"""
|
|
37
|
+
Render uGT top func.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
if object_features is None:
|
|
42
|
+
object_features = {
|
|
43
|
+
"muons": ["pt", "eta_extrapolated", "phi_extrapolated"],
|
|
44
|
+
"jets": ["et", "eta", "phi"],
|
|
45
|
+
"egammas": ["et", "eta", "phi"],
|
|
46
|
+
"taus": ["et", "eta", "phi"]
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if global_features is None:
|
|
50
|
+
global_features = [
|
|
51
|
+
"et.et",
|
|
52
|
+
"ht.et",
|
|
53
|
+
"etmiss.et", "etmiss.phi",
|
|
54
|
+
"htmiss.et", "htmiss.phi",
|
|
55
|
+
"ethfmiss.et", "ethfmiss.phi",
|
|
56
|
+
"hthfmiss.et", "hthfmiss.phi"
|
|
57
|
+
]
|
|
58
|
+
|
|
59
|
+
header_path = firmware_dir / "firmware" / f"{model_name}_project.h"
|
|
60
|
+
if compiler.lower() == "conifer":
|
|
61
|
+
output_layer = "score"
|
|
62
|
+
output_type = "score_arr_t"
|
|
63
|
+
header_path = firmware_dir / "firmware" / f"{model_name}_project.h"
|
|
64
|
+
removal_pattern = re.compile(
|
|
65
|
+
r',\s*score_t\s+tree_scores\[BDT::fn_classes\(n_classes\)\s*\*\s*n_trees\]',
|
|
66
|
+
re.DOTALL
|
|
67
|
+
)
|
|
68
|
+
modified_content = removal_pattern.sub('', header_path.read_text(encoding='utf-8'))
|
|
69
|
+
header_path.write_text(modified_content, encoding='utf-8')
|
|
70
|
+
out = output_layer
|
|
71
|
+
else:
|
|
72
|
+
header_content = header_path.read_text(encoding='utf-8')
|
|
73
|
+
layer_pattern = re.compile(
|
|
74
|
+
r'result_t\s+(\w+)\[\d+\]\s*\)',
|
|
75
|
+
re.DOTALL
|
|
76
|
+
)
|
|
77
|
+
match = layer_pattern.search(header_content)
|
|
78
|
+
layer_name = match.group(1)
|
|
79
|
+
output_layer = f"{layer_name}[{n_outputs}]"
|
|
80
|
+
out = layer_name
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
context = {
|
|
84
|
+
"MODEL_NAME": model_name,
|
|
85
|
+
"N_INPUTS": n_inputs,
|
|
86
|
+
"N_OUTPUTS": n_outputs,
|
|
87
|
+
"NN_OFFSETS": ", ".join(map(str, nn_offsets)),
|
|
88
|
+
"NN_SHIFTS": ", ".join(map(str, nn_shifts)),
|
|
89
|
+
"MUON_SIZE": muon_size,
|
|
90
|
+
"JET_SIZE": jet_size,
|
|
91
|
+
"EGAMMA_SIZE": egamma_size,
|
|
92
|
+
"TAU_SIZE": tau_size,
|
|
93
|
+
"OUTPUT_TYPE": output_type,
|
|
94
|
+
"OUTPUT_LAYER": output_layer,
|
|
95
|
+
"OUT": out,
|
|
96
|
+
"OFFSET_TYPE": offset_type,
|
|
97
|
+
"SHIFT_TYPE": shift_type,
|
|
98
|
+
"MUON_FEATURES": object_features["muons"],
|
|
99
|
+
"JET_FEATURES": object_features["jets"],
|
|
100
|
+
"EGAMMA_FEATURES": object_features["egammas"],
|
|
101
|
+
"TAU_FEATURES": object_features["taus"],
|
|
102
|
+
"GLOBAL_FEATURES": global_features
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
context_tcl = {
|
|
106
|
+
"MODEL_NAME": model_name,
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
out_path = firmware_dir / "firmware/model-gt.cpp"
|
|
110
|
+
|
|
111
|
+
_render_template(f"{templates_dir}/model-gt.cpp", out_path, context)
|
|
112
|
+
|
|
113
|
+
out_path = firmware_dir / "firmware/build_ugt.tcl"
|
|
114
|
+
_render_template(f"{templates_dir}/build_ugt.tcl", out_path, context_tcl)
|
|
115
|
+
|
|
116
|
+
shutil.copy(f"{templates_dir}/data_types.h", firmware_dir / "firmware")
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
subprocess.run(
|
|
120
|
+
["vitis_hls", "-f", "build_ugt.tcl"],
|
|
121
|
+
cwd=firmware_dir/"firmware",
|
|
122
|
+
check=True
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
|
|
File without changes
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
file mkdir prj_{{ MODEL_NAME }}
|
|
2
|
+
|
|
3
|
+
open_project -reset prj_{{ MODEL_NAME }}
|
|
4
|
+
|
|
5
|
+
set_top {{ MODEL_NAME }}_GT
|
|
6
|
+
|
|
7
|
+
set core_files "model-gt.cpp {{ MODEL_NAME }}_project.cpp"
|
|
8
|
+
|
|
9
|
+
if { [file exists "BDT.cpp"] } {
|
|
10
|
+
set all_files "$core_files BDT.cpp"
|
|
11
|
+
} else {
|
|
12
|
+
set all_files "$core_files"
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
add_files $all_files -cflags "-std=c++11 -I../"
|
|
16
|
+
|
|
17
|
+
open_solution -reset solution1
|
|
18
|
+
set_part {xc7vx690t-ffg1927-2}
|
|
19
|
+
|
|
20
|
+
create_clock -period 25
|
|
21
|
+
set_clock_uncertainty 0
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
config_array_partition -complete_threshold 2
|
|
25
|
+
|
|
26
|
+
csynth_design
|
|
27
|
+
|
|
28
|
+
file mkdir firmware
|
|
29
|
+
file mkdir firmware/hdl
|
|
30
|
+
file mkdir firmware/hdl/payload
|
|
31
|
+
file mkdir firmware/hdl/payload/gtl
|
|
32
|
+
file mkdir firmware/hdl/payload/gtl/model
|
|
33
|
+
file mkdir firmware/cfg
|
|
34
|
+
|
|
35
|
+
set f [open firmware/cfg/model.dep "w"]
|
|
36
|
+
|
|
37
|
+
if {[file exists prj_{{ MODEL_NAME }}/solution1/syn/vhdl]} {
|
|
38
|
+
foreach filepath [glob -nocomplain prj_{{ MODEL_NAME }}/solution1/syn/vhdl/*] {
|
|
39
|
+
set filename [file tail $filepath]
|
|
40
|
+
file copy -force $filepath firmware/hdl/payload/gtl/model/$filename
|
|
41
|
+
puts $f "src payload/gtl/model/$filename"
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
close $f
|
|
46
|
+
exit
|