triggerflow 0.1.4__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trigger_dataset/__init__.py +0 -0
- trigger_dataset/core.py +88 -0
- trigger_loader/__init__.py +0 -0
- trigger_loader/cluster_manager.py +107 -0
- trigger_loader/loader.py +147 -0
- trigger_loader/processor.py +211 -0
- triggerflow/cli.py +122 -0
- triggerflow/core.py +127 -69
- triggerflow/interfaces/__init__.py +0 -0
- triggerflow/interfaces/uGT.py +127 -0
- triggerflow/mlflow_wrapper.py +190 -19
- triggerflow/starter/.gitignore +143 -0
- triggerflow/starter/README.md +0 -0
- triggerflow/starter/cookiecutter.json +5 -0
- triggerflow/starter/prompts.yml +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore +3 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore +143 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml +56 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/README.md +29 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md +26 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml +84 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml +14 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml +8 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml +5 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml +5 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml +84 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml +14 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml +8 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml +5 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml +5 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml +43 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json +15 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json +26 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml +7 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml +21 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml +50 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py +3 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py +25 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py +20 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py +137 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +88 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py +35 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py +155 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py +16 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +17 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +50 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +40 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py +28 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py +12 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py +20 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +31 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py +24 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py +10 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py +29 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py +24 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py +46 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py +4 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py +598 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py +0 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py +9 -0
- triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py +27 -0
- triggerflow/templates/build_ugt.tcl +46 -0
- triggerflow/templates/data_types.h +524 -0
- triggerflow/templates/makefile +3 -3
- triggerflow/templates/makefile_version +2 -2
- triggerflow/templates/model-gt.cpp +104 -0
- triggerflow/templates/model_template.cpp +19 -18
- triggerflow/templates/scales.h +1 -1
- triggerflow-0.2.4.dist-info/METADATA +192 -0
- triggerflow-0.2.4.dist-info/RECORD +102 -0
- triggerflow-0.2.4.dist-info/entry_points.txt +2 -0
- triggerflow-0.2.4.dist-info/top_level.txt +3 -0
- triggerflow-0.1.4.dist-info/METADATA +0 -61
- triggerflow-0.1.4.dist-info/RECORD +0 -11
- triggerflow-0.1.4.dist-info/top_level.txt +0 -1
- {triggerflow-0.1.4.dist-info → triggerflow-0.2.4.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
#include "{{MODEL_NAME}}_project.h"
|
|
2
|
+
#include "data_types.h"
|
|
3
|
+
|
|
4
|
+
namespace {{MODEL_NAME}} {
|
|
5
|
+
|
|
6
|
+
typedef {{OFFSET_TYPE}} offset_t;
|
|
7
|
+
typedef {{SHIFT_TYPE}} shift_t;
|
|
8
|
+
|
|
9
|
+
static const offset_t NN_OFFSETS[{{N_INPUTS}}] = { {{NN_OFFSETS}} };
|
|
10
|
+
static const shift_t NN_SHIFTS[{{N_INPUTS}}] = { {{NN_SHIFTS}} };
|
|
11
|
+
|
|
12
|
+
static void scaleNNInputs(
|
|
13
|
+
input_t unscaled[{{N_INPUTS}}],
|
|
14
|
+
input_t scaled[{{N_INPUTS}}]
|
|
15
|
+
) {
|
|
16
|
+
#pragma HLS pipeline
|
|
17
|
+
for (int i = 0; i < {{N_INPUTS}}; i++) {
|
|
18
|
+
#pragma HLS unroll
|
|
19
|
+
input_t tmp0 = unscaled[i] - NN_OFFSETS[i];
|
|
20
|
+
input_t tmp1 = tmp0 >> NN_SHIFTS[i];
|
|
21
|
+
scaled[i] = tmp1;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
void {{MODEL_NAME}}_GT(
|
|
26
|
+
Muon muons[{{MUON_SIZE}}],
|
|
27
|
+
Jet jets[{{JET_SIZE}}],
|
|
28
|
+
EGamma egammas[{{EGAMMA_SIZE}}],
|
|
29
|
+
Tau taus[{{TAU_SIZE}}],
|
|
30
|
+
ET et,
|
|
31
|
+
HT ht,
|
|
32
|
+
ETMiss etmiss,
|
|
33
|
+
HTMiss htmiss,
|
|
34
|
+
ETHFMiss ethfmiss,
|
|
35
|
+
HTHFMiss hthfmiss,
|
|
36
|
+
{{OUTPUT_TYPE}} {{OUTPUT_LAYER}}
|
|
37
|
+
) {
|
|
38
|
+
#pragma HLS aggregate variable=muons compact=bit
|
|
39
|
+
#pragma HLS aggregate variable=jets compact=bit
|
|
40
|
+
#pragma HLS aggregate variable=egammas compact=bit
|
|
41
|
+
#pragma HLS aggregate variable=taus compact=bit
|
|
42
|
+
#pragma HLS aggregate variable=et compact=bit
|
|
43
|
+
#pragma HLS aggregate variable=ht compact=bit
|
|
44
|
+
#pragma HLS aggregate variable=etmiss compact=bit
|
|
45
|
+
#pragma HLS aggregate variable=htmiss compact=bit
|
|
46
|
+
#pragma HLS aggregate variable=ethfmiss compact=bit
|
|
47
|
+
#pragma HLS aggregate variable=hthfmiss compact=bit
|
|
48
|
+
|
|
49
|
+
#pragma HLS array_partition variable=muons complete
|
|
50
|
+
#pragma HLS array_partition variable=jets complete
|
|
51
|
+
#pragma HLS array_partition variable=egammas complete
|
|
52
|
+
#pragma HLS array_partition variable=taus complete
|
|
53
|
+
|
|
54
|
+
#pragma HLS pipeline II=1
|
|
55
|
+
#pragma HLS latency min=2 max=2
|
|
56
|
+
#pragma HLS inline recursive
|
|
57
|
+
|
|
58
|
+
input_t input_unscaled[{{N_INPUTS}}];
|
|
59
|
+
input_t input_scaled[{{N_INPUTS}}];
|
|
60
|
+
int idx = 0;
|
|
61
|
+
|
|
62
|
+
// Muons
|
|
63
|
+
for (int i = 0; i < {{MUON_SIZE}}; i++) {
|
|
64
|
+
#pragma HLS unroll
|
|
65
|
+
{% for f in MUON_FEATURES %}
|
|
66
|
+
input_unscaled[idx++] = muons[i].{{f}};
|
|
67
|
+
{% endfor %}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Jets
|
|
71
|
+
for (int i = 0; i < {{JET_SIZE}}; i++) {
|
|
72
|
+
#pragma HLS unroll
|
|
73
|
+
{% for f in JET_FEATURES %}
|
|
74
|
+
input_unscaled[idx++] = jets[i].{{f}};
|
|
75
|
+
{% endfor %}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// EGammas
|
|
79
|
+
for (int i = 0; i < {{EGAMMA_SIZE}}; i++) {
|
|
80
|
+
#pragma HLS unroll
|
|
81
|
+
{% for f in EGAMMA_FEATURES %}
|
|
82
|
+
input_unscaled[idx++] = egammas[i].{{f}};
|
|
83
|
+
{% endfor %}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Taus
|
|
87
|
+
for (int i = 0; i < {{TAU_SIZE}}; i++) {
|
|
88
|
+
#pragma HLS unroll
|
|
89
|
+
{% for f in TAU_FEATURES %}
|
|
90
|
+
input_unscaled[idx++] = taus[i].{{f}};
|
|
91
|
+
{% endfor %}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Scalars / global objects
|
|
95
|
+
{% for f in GLOBAL_FEATURES %}
|
|
96
|
+
input_unscaled[idx++] = {{f}};
|
|
97
|
+
{% endfor %}
|
|
98
|
+
|
|
99
|
+
scaleNNInputs(input_unscaled, input_scaled);
|
|
100
|
+
|
|
101
|
+
{{MODEL_NAME}}_project(input_scaled, {{OUT}});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
} // namespace {{MODEL_NAME}}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
#include "NN/{{MODEL_NAME}}.h"
|
|
1
|
+
#include "NN/{{MODEL_NAME}}_project.h"
|
|
2
2
|
#include "emulator.h"
|
|
3
3
|
#include "NN/nnet_utils/nnet_common.h"
|
|
4
4
|
#include <any>
|
|
@@ -13,40 +13,41 @@ using namespace hls4ml_{{MODEL_NAME}};
|
|
|
13
13
|
class {{MODEL_NAME}}_emulator : public hls4mlEmulator::Model {
|
|
14
14
|
|
|
15
15
|
private:
|
|
16
|
-
unscaled_t
|
|
17
|
-
|
|
18
|
-
result_t _result[{{N_OUTPUTS}}];
|
|
16
|
+
typedef {{UNSCALED_TYPE}} unscaled_t;
|
|
17
|
+
static const int N_INPUT_SIZE = {{N_INPUTS}};
|
|
19
18
|
|
|
20
|
-
|
|
19
|
+
unscaled_t _unscaled_input[N_INPUT_SIZE];
|
|
20
|
+
{{MODEL_NAME}}::input_t _scaled_input;
|
|
21
|
+
{{MODEL_NAME}}::result_t _result;
|
|
22
|
+
|
|
23
|
+
// Scale the raw input array to the model input type
|
|
24
|
+
virtual void _scaleNNInputs(unscaled_t unscaled[N_INPUT_SIZE], {{MODEL_NAME}}::input_t &scaled)
|
|
21
25
|
{
|
|
22
|
-
for (int i = 0; i <
|
|
26
|
+
for (int i = 0; i < N_INPUT_SIZE; i++)
|
|
23
27
|
{
|
|
24
28
|
unscaled_t tmp0 = unscaled[i] - hls4ml_{{MODEL_NAME}}::ad_offsets[i];
|
|
25
|
-
|
|
29
|
+
{{UNSCALED_TYPE}} tmp1 = tmp0 >> hls4ml_{{MODEL_NAME}}::ad_shift[i];
|
|
26
30
|
scaled[i] = tmp1;
|
|
27
31
|
}
|
|
28
32
|
}
|
|
29
33
|
|
|
30
|
-
public:
|
|
34
|
+
public:
|
|
31
35
|
virtual void prepare_input(std::any input) {
|
|
32
36
|
unscaled_t *unscaled_input_p = std::any_cast<unscaled_t*>(input);
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
_unscaled_input[i] = std::any_cast<unscaled_t>(unscaled_input_p[i]);
|
|
37
|
+
for (int i = 0; i < N_INPUT_SIZE; i++) {
|
|
38
|
+
_unscaled_input[i] = unscaled_input_p[i];
|
|
36
39
|
}
|
|
37
|
-
|
|
38
40
|
_scaleNNInputs(_unscaled_input, _scaled_input);
|
|
39
41
|
}
|
|
40
42
|
|
|
41
43
|
virtual void predict() {
|
|
42
|
-
|
|
44
|
+
// Call the io_parallel model function; pass pointers
|
|
45
|
+
{{MODEL_NAME}}::{{MODEL_NAME}}_project(&_scaled_input, &_result);
|
|
43
46
|
}
|
|
44
|
-
|
|
47
|
+
|
|
45
48
|
virtual void read_result(std::any result) {
|
|
46
|
-
result_t *result_p = std::any_cast<result_t*>(result);
|
|
47
|
-
|
|
48
|
-
result_p[i] = _result[i];
|
|
49
|
-
}
|
|
49
|
+
{{MODEL_NAME}}::result_t *result_p = std::any_cast<{{MODEL_NAME}}::result_t*>(result);
|
|
50
|
+
*result_p = _result;
|
|
50
51
|
}
|
|
51
52
|
};
|
|
52
53
|
|
triggerflow/templates/scales.h
CHANGED
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: triggerflow
|
|
3
|
+
Version: 0.2.4
|
|
4
|
+
Summary: Utilities for ML models targeting hardware triggers
|
|
5
|
+
Classifier: Programming Language :: Python :: 3
|
|
6
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
7
|
+
Classifier: Operating System :: OS Independent
|
|
8
|
+
Requires-Python: >=3.11
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
Requires-Dist: cookiecutter>=2.3
|
|
11
|
+
Requires-Dist: PyYAML>=6
|
|
12
|
+
Requires-Dist: Jinja2>=3
|
|
13
|
+
Requires-Dist: mlflow>=2.0
|
|
14
|
+
Requires-Dist: kedro==1.0.0
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Requires-Dist: pytest-cov~=3.0; extra == "dev"
|
|
17
|
+
Requires-Dist: pytest-mock<2.0,>=1.7.1; extra == "dev"
|
|
18
|
+
Requires-Dist: pytest~=7.2; extra == "dev"
|
|
19
|
+
Requires-Dist: ruff~=0.1.8; extra == "dev"
|
|
20
|
+
Provides-Extra: extended
|
|
21
|
+
Requires-Dist: coffea; extra == "extended"
|
|
22
|
+
Requires-Dist: dask; extra == "extended"
|
|
23
|
+
Requires-Dist: pyarrow; extra == "extended"
|
|
24
|
+
Requires-Dist: pandas==2.0.3; extra == "extended"
|
|
25
|
+
Requires-Dist: uproot4==4.0.0; extra == "extended"
|
|
26
|
+
Requires-Dist: pyarrow==17.0.0; extra == "extended"
|
|
27
|
+
Requires-Dist: hist==2.7.3; extra == "extended"
|
|
28
|
+
Requires-Dist: mlflow==2.21.0; extra == "extended"
|
|
29
|
+
Requires-Dist: tensorflow==2.13.0; extra == "extended"
|
|
30
|
+
Requires-Dist: QKeras==0.9.0; extra == "extended"
|
|
31
|
+
Requires-Dist: keras==2.13.1; extra == "extended"
|
|
32
|
+
Requires-Dist: numpy; extra == "extended"
|
|
33
|
+
Requires-Dist: pyparsing; extra == "extended"
|
|
34
|
+
Requires-Dist: PyYAML; extra == "extended"
|
|
35
|
+
Requires-Dist: matplotlib; extra == "extended"
|
|
36
|
+
Requires-Dist: mplhep; extra == "extended"
|
|
37
|
+
Requires-Dist: h5py; extra == "extended"
|
|
38
|
+
Requires-Dist: xxhash; extra == "extended"
|
|
39
|
+
Requires-Dist: shap; extra == "extended"
|
|
40
|
+
Requires-Dist: awkward-pandas; extra == "extended"
|
|
41
|
+
Requires-Dist: qonnx==0.4.0; extra == "extended"
|
|
42
|
+
Requires-Dist: tf_keras; extra == "extended"
|
|
43
|
+
Requires-Dist: tf2onnx; extra == "extended"
|
|
44
|
+
Requires-Dist: hls4ml; extra == "extended"
|
|
45
|
+
Requires-Dist: conifer; extra == "extended"
|
|
46
|
+
|
|
47
|
+
# Machine Learning for Hardware Triggers
|
|
48
|
+
|
|
49
|
+
`triggerflow` provides a set of utilities for Machine Learning models targeting FPGA deployment.
|
|
50
|
+
The `TriggerModel` class consolidates several Machine Learning frontends and compiler backends to construct a "trigger model". MLflow utilities are for logging, versioning, and loading of trigger models.
|
|
51
|
+
|
|
52
|
+
## Installation
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
pip install triggerflow
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Usage
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
|
|
62
|
+
from triggerflow.core import TriggerModel
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
|
|
66
|
+
'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
trigger_model = TriggerModel(
|
|
70
|
+
config="triggermodel_config.yaml",
|
|
71
|
+
native_model=model, #Native XGboost/Keras model
|
|
72
|
+
scales=scales
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
trigger_model() #Vivado requird on $PATH for Firmware build.
|
|
76
|
+
|
|
77
|
+
# then:
|
|
78
|
+
output_software = trigger_model.software_predict(input_data)
|
|
79
|
+
output_firmware = trigger_model.firmware_predict(input_data)
|
|
80
|
+
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
81
|
+
|
|
82
|
+
# save and load trigger models:
|
|
83
|
+
trigger_model.save("triggerflow.tar.xz")
|
|
84
|
+
|
|
85
|
+
# in a separate session:
|
|
86
|
+
from triggerflow.core import TriggerModel
|
|
87
|
+
triggerflow = TriggerModel.load("triggerflow.tar.xz")
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## The Config file:
|
|
91
|
+
|
|
92
|
+
Use this `.yaml` template and change as needed.
|
|
93
|
+
|
|
94
|
+
```yaml
|
|
95
|
+
compiler:
|
|
96
|
+
name: "AXO"
|
|
97
|
+
ml_backend: "keras"
|
|
98
|
+
compiler: "hls4ml"
|
|
99
|
+
fpga_part: "xc7vx690t-ffg1927-2"
|
|
100
|
+
clock_period: 25
|
|
101
|
+
n_outputs: 1
|
|
102
|
+
project_name: "AXO_project"
|
|
103
|
+
namespace: "AXO"
|
|
104
|
+
io_type: "io_parallel"
|
|
105
|
+
backend: "Vitis"
|
|
106
|
+
write_weights_txt: false
|
|
107
|
+
|
|
108
|
+
subsystem:
|
|
109
|
+
name: "uGT"
|
|
110
|
+
n_inputs: 50
|
|
111
|
+
offset_type: "ap_fixed<10,10>"
|
|
112
|
+
shift_type: "ap_fixed<10,10>"
|
|
113
|
+
|
|
114
|
+
objects:
|
|
115
|
+
muons:
|
|
116
|
+
size: 4
|
|
117
|
+
features: [pt, eta_extrapolated, phi_extrapolated]
|
|
118
|
+
|
|
119
|
+
jets:
|
|
120
|
+
size: 4
|
|
121
|
+
features: [et, eta, phi]
|
|
122
|
+
|
|
123
|
+
egammas:
|
|
124
|
+
size: 4
|
|
125
|
+
features: [et, eta, phi]
|
|
126
|
+
|
|
127
|
+
taus:
|
|
128
|
+
size: 4
|
|
129
|
+
features: [et, eta, phi]
|
|
130
|
+
|
|
131
|
+
global_features:
|
|
132
|
+
#- et.et
|
|
133
|
+
#- ht.et
|
|
134
|
+
- etmiss.et
|
|
135
|
+
- etmiss.phi
|
|
136
|
+
#- htmiss.et
|
|
137
|
+
#- htmiss.phi
|
|
138
|
+
#- ethfmiss.et
|
|
139
|
+
#- ethfmiss.phi
|
|
140
|
+
#- hthfmiss.et
|
|
141
|
+
#- hthfmiss.phi
|
|
142
|
+
|
|
143
|
+
muon_size: 4
|
|
144
|
+
jet_size: 4
|
|
145
|
+
egamma_size: 4
|
|
146
|
+
tau_size: 4
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## Logging with MLflow
|
|
150
|
+
|
|
151
|
+
```python
|
|
152
|
+
# logging with MLFlow:
|
|
153
|
+
import mlflow
|
|
154
|
+
from triggerflow.mlflow_wrapper import log_model
|
|
155
|
+
|
|
156
|
+
mlflow.set_tracking_uri("https://ngt.cern.ch/models")
|
|
157
|
+
experiment_id = mlflow.create_experiment("example-experiment")
|
|
158
|
+
|
|
159
|
+
with mlflow.start_run(run_name="trial-v1", experiment_id=experiment_id):
|
|
160
|
+
log_model(triggerflow, registered_model_name="TriggerModel")
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
### Note: This package doesn't install dependencies so it won't disrupt specific training environments or custom compilers. For a reference environment, see `environment.yml`.
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# Creating a kedro pipeline
|
|
167
|
+
|
|
168
|
+
This repository also comes with a default pipeline for trigger models based on kedro.
|
|
169
|
+
One can create a new pipeline via:
|
|
170
|
+
|
|
171
|
+
NOTE: no "-" and upper cases!
|
|
172
|
+
|
|
173
|
+
```bash
|
|
174
|
+
# Create a conda environment & activate it
|
|
175
|
+
conda create -n triggerflow python=3.11
|
|
176
|
+
conda activate triggerflow
|
|
177
|
+
|
|
178
|
+
# install triggerflow
|
|
179
|
+
pip install triggerflow
|
|
180
|
+
|
|
181
|
+
# Create a pipeline
|
|
182
|
+
triggerflow new demo_pipeline
|
|
183
|
+
|
|
184
|
+
# NOTE: since we dont install dependency one has to create a
|
|
185
|
+
# conda env based on the environment.yml file of the pipeline
|
|
186
|
+
# this file can be changed to the needs of the indiviual project
|
|
187
|
+
cd demo_pipeline
|
|
188
|
+
conda env update -n triggerflow --file environment.yml
|
|
189
|
+
|
|
190
|
+
# Run Kedro
|
|
191
|
+
kedro run
|
|
192
|
+
```
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
trigger_dataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
trigger_dataset/core.py,sha256=ZX96U6rWxxfCatDQbst6IRZvtlyDj1_2JA7stPydGTQ,2645
|
|
3
|
+
trigger_loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
trigger_loader/cluster_manager.py,sha256=XgmY1xeW8zrpQDJqssKamWzjn6TQ60NGNzpcdZwL6NE,3617
|
|
5
|
+
trigger_loader/loader.py,sha256=wMkeZ3k36wpxt-B8OpKOa6j7z0-fnJUqQ-5AbVjNpBM,5158
|
|
6
|
+
trigger_loader/processor.py,sha256=cvBfYmvcr4FLzOHgGE50oy7EkFzFaV80Z_66amqfsEY,7724
|
|
7
|
+
triggerflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
triggerflow/cli.py,sha256=ZNQb3XQN8Ir6Hp6KX_ugec9bm2kqxLNZ0KdVGJmnqFA,4498
|
|
9
|
+
triggerflow/core.py,sha256=QMU_zuWrYXuZekB7z8Q90Cuaga3B_uuDnYOPthQGdps,22465
|
|
10
|
+
triggerflow/mlflow_wrapper.py,sha256=yCaIS-H7oC2KxnExj24ka9ylF4A1wgzRIpc7Y43ervI,10667
|
|
11
|
+
triggerflow/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
triggerflow/interfaces/uGT.py,sha256=UBCK0WtF-MUkI9mWrulOdExtDXgwKPsHDS4C-FXIgMs,3637
|
|
13
|
+
triggerflow/starter/.gitignore,sha256=tH2z_M-tPM9MLWC2wPz1Z43Dq-wdVmb_kVYtrLT3tN4,2052
|
|
14
|
+
triggerflow/starter/README.md,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
triggerflow/starter/cookiecutter.json,sha256=neplzXvVe3g6OVqYD8M2olfmjRKNr1UKqmflzP3j1UY,259
|
|
16
|
+
triggerflow/starter/prompts.yml,sha256=j-X_5sOsH2nvE1wg9awdNDtkNiZ7QjbvVKs9d9xyMaM,330
|
|
17
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore,sha256=HYBS6WcsRMYtwnpN9zztNW8gy5wY0ae1qgkkkwlglYM,139
|
|
18
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore,sha256=tH2z_M-tPM9MLWC2wPz1Z43Dq-wdVmb_kVYtrLT3tN4,2052
|
|
19
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml,sha256=lcahT5HExM3JsqXj3u44cNEmrEl-q97JUrGxPMG6jsQ,941
|
|
20
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/README.md,sha256=4t7erte67zY0Cp6qZco04EkXSXk2lb0uFVXaAx0C154,955
|
|
21
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml,sha256=G9TOE23-awIEt3BmMH66OCN7x04aPqFdsJDC8VbApVg,74
|
|
22
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml,sha256=J84BmAfnK_TsW7v_WifTxW9Wc9v0gX_lzmh0-c0zutI,285
|
|
23
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml,sha256=2maFdFnLln3Q6z8YODT2Wpw-jEi5bKN2yoC-QG3rGVM,1378
|
|
24
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md,sha256=0jCfkFBPK5SqR2wJOKd7XTB9e1yvN0gw4D4Zi3nI8aI,1067
|
|
25
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml,sha256=SaMNpZDTQhhuyCV_x6DaW1Vb8BX3WxxcV3euK98XmGg,1038
|
|
26
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml,sha256=Bell27MJvByOdkQmRcOoG0aKAU3hCyiy1ivws1xZcJQ,2397
|
|
27
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml,sha256=I88osguJBIfLWyKJcMZg-TOUL-HOVo0sEbro8tTndBI,392
|
|
29
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml,sha256=YYWXiyHDzRnr2kIMytw_umfEpazLqPDYRG7USSPwLUo,273
|
|
30
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml,sha256=Jol2shPtxOEtVpju9e8xXsWkiuyuvVrdpLC0LSMezbU,235
|
|
31
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml,sha256=l4QPLJ66i73sZ7wT03r6aOFWzhhBIIulsBoMsBVOw-g,335
|
|
32
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml,sha256=6eBMmMoTJRp0dsTo0Tdjr6Mig3BrrBSlIs_ZN98hyPQ,242
|
|
33
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml,sha256=Bell27MJvByOdkQmRcOoG0aKAU3hCyiy1ivws1xZcJQ,2397
|
|
34
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml,sha256=yDB39m9OszZaqX67RgcUcwh37EHGWfJAp_WRblwNFI4,397
|
|
36
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml,sha256=ABcV4sNpIAwsSkkbTk27wIcE_5QEDpaGix1t34mMvjk,278
|
|
37
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml,sha256=fDn4tpNdgwSl-mC6-nUZP_bykj08eeEAt8XrEfHEuzY,240
|
|
38
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml,sha256=l4QPLJ66i73sZ7wT03r6aOFWzhhBIIulsBoMsBVOw-g,335
|
|
39
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml,sha256=zroINrWbzGvp1A2TiYdO_bp8LuPav0fIpVa1fbK8dGc,244
|
|
40
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json,sha256=-gLHehOyK7ziLMRNe2px7f56SlWr3n1du1L6XgdbI5o,375
|
|
42
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json,sha256=QX17MFlxXOObx5ghl-imT82jVHCojSZKlytO0uw1fJM,707
|
|
43
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
45
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py,sha256=a-MTXhipkVaDsY3XW0DVoRSmAxbE4_NRNQo7lT4IqsM,59
|
|
50
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py,sha256=YGS0Nx3OcMauvo68ReZhvznjKK_XoEUzLcnTdP1806g,659
|
|
51
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py,sha256=LZHowiYdVJu2N1I0G2u2l7IcTU44FM8a4u_EAXZBpTk,435
|
|
52
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py,sha256=TRMhMNzWwELn7LuO3Xswbrcidxb4uP-0-YhvCSgglFg,1648
|
|
53
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py,sha256=0y5nOkz17N8rr8b8U053eOQ2DUmPakAhdBfJmvd-npQ,367
|
|
54
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py,sha256=VZityUPdOigfGrqj9TMT5DtyYpMS5jXfoBmlfRGmF5Q,4972
|
|
55
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py,sha256=siI8gyGvIKMd4DCtKm2wJftl5a5zo1HlO5s3WOzLw5U,2785
|
|
56
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py,sha256=VLj4E7hsxZwswVKxyPCYrCzJ5ZCunSvNyijCWHzrPUo,936
|
|
57
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py,sha256=PJ1S_1qIk8UyMvoGEY7UiHE7nilE4xrnwWVY9gPRh_k,4788
|
|
59
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py,sha256=VkHps3wrtQcfM-CA_Ic6fsw-VSJv_FZcrSRfIKBJ1Xs,419
|
|
60
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py,sha256=X2jAlHHzleo1Pb9hKoh4aHOo3L6BqrnlsAxA4AVBfMM,168
|
|
61
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py,sha256=A9_Eoky7BWXTgvEiZUqUUdtX0mrUH7joNOof-en1MH8,1414
|
|
62
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py,sha256=yeWpTxOxSONbF-pUW1bipqSgMLabWIWIe9nVdTkzG0E,207
|
|
63
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py,sha256=Eflz9--0_L6PipS2SW9vmSKR3g0yZBjxbEnNoZ9bjQM,176
|
|
64
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py,sha256=bmn-GeSQ54V2Tz5iz9Cb8MnhQXAcDlGecOiWn847su0,1053
|
|
65
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py,sha256=PjFIWN0esIMUQ2-b1fz2FZNY7Tbn5tymBW6tF57Tm04,989
|
|
66
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py,sha256=M4_hXNbopG9IGnCLazqQLMT5yjMoNdUbyimcmaIFX0U,175
|
|
67
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py,sha256=so7UHCsivnIT6K2p-Gw9JBuiD_7BQOTRpKJ4eVMtVt8,319
|
|
68
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py,sha256=f2GVZNZWfKXaeHCJmBnoU4ljJ_wCfIc670z0r2dTSjU,607
|
|
69
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py,sha256=M4_hXNbopG9IGnCLazqQLMT5yjMoNdUbyimcmaIFX0U,175
|
|
70
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py,sha256=f38DDVELduNOoJGzHhw8P-XQZ4151B6pee6iDb1WqHM,883
|
|
71
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py,sha256=5LIoAo2CQGyeGIvcsSXOXP1CIJqdk_MlKZPfdHo8Ax4,700
|
|
72
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py,sha256=uHp2bxK1LF7_NXcftYOb1wb4QU41_cGL_VV2-J18lJw,177
|
|
73
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py,sha256=Kk6Fybq-w6EnBB14bdn-MLjbm_eigLlgFn_LVrnplCE,615
|
|
74
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py,sha256=iUhZhIJ2waEQpCxn6iGEaM28VTtAC47gUww_DkQ-VIo,654
|
|
75
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py,sha256=nS5PCf2VaLlgl44uqAcz-PraQ3GW53coeVbuG9c19R0,82
|
|
77
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py,sha256=YkB8bll9dtRiQ0gIbLDdKR2afG6OYVrFmhLwPP60oJ8,18527
|
|
78
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
79
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py,sha256=aGLSgzTBQGKPcezqsMDmxMj3vNZBV7VTaPwpd4XwIrQ,912
|
|
80
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
81
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
82
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py,sha256=bhbQ8Pteov85kD_4rH6oaJFmxoO--zgE5BWBwv7bYog,290
|
|
83
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
84
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py,sha256=LhrGZ619rDs3Mr2wsR6iysATVYj9pSBrsBJZrtlhGUg,298
|
|
85
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
86
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py,sha256=BlPsVwdspuEuQwvyVcPztAQFN6aSo1Z0Y1WK4BBJbhk,292
|
|
87
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
88
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py,sha256=0N747l50lEmjAgCy9K8LrNMW436edMUMo1E7tS4MADY,297
|
|
89
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
90
|
+
triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py,sha256=2BTupcyuFEXTyMrGLVmF6Yv0CAZcdedc7fxEfmLGmxo,299
|
|
91
|
+
triggerflow/templates/build_ugt.tcl,sha256=P1y42s9Ju6zDyRskCVlD-vSnblAmTafw7Jpzsq4_HWA,1061
|
|
92
|
+
triggerflow/templates/data_types.h,sha256=m7_jLsRixSVjp8auxHZUUNAcSO777C5TTZl__9ENk90,12640
|
|
93
|
+
triggerflow/templates/makefile,sha256=A-aetsLC51Bop0T_-yPY8Z8Hg29ApN4YPvKx_jjPuHw,970
|
|
94
|
+
triggerflow/templates/makefile_version,sha256=6kFc_u2oiM9l2rH7RK_BLzdZu1ZEK8PQTQKGBLRY0v4,328
|
|
95
|
+
triggerflow/templates/model-gt.cpp,sha256=qZwuTtsvrKB_mOB-HDb2uOD7mDo4-20EjFiQzRjMdPo,2969
|
|
96
|
+
triggerflow/templates/model_template.cpp,sha256=jMNRcO7NgC6I9Wd2BV3Bim-P1qPsAl_oeVQ8KofQGEw,1807
|
|
97
|
+
triggerflow/templates/scales.h,sha256=MFcB5S0DEvfzHuUhyZqILR0O4ktugOG-fLnuCDUUewM,373
|
|
98
|
+
triggerflow-0.2.4.dist-info/METADATA,sha256=wa26vICKVPibMc7wxqLiJiwn-apZoMxBxAFqKDGA-II,5535
|
|
99
|
+
triggerflow-0.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
100
|
+
triggerflow-0.2.4.dist-info/entry_points.txt,sha256=5QSV9YDseB_FqgVh9q10BdL4b1I6t68rGwPLXgVL60g,53
|
|
101
|
+
triggerflow-0.2.4.dist-info/top_level.txt,sha256=cX0jkuM9tfxGp002ZBQ1AYgx-6D_NgBtomgPL0WA9bE,43
|
|
102
|
+
triggerflow-0.2.4.dist-info/RECORD,,
|
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: triggerflow
|
|
3
|
-
Version: 0.1.4
|
|
4
|
-
Summary: Utilities for ML models targeting hardware triggers
|
|
5
|
-
Classifier: Programming Language :: Python :: 3
|
|
6
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
7
|
-
Classifier: Operating System :: OS Independent
|
|
8
|
-
Requires-Python: >=3.10
|
|
9
|
-
Description-Content-Type: text/markdown
|
|
10
|
-
Requires-Dist: mlflow>=2.0
|
|
11
|
-
|
|
12
|
-
# Machine Learning for Hardware Triggers
|
|
13
|
-
|
|
14
|
-
`triggerflow` provides a set of utilities for Machine Learning models targeting FPGA deployment.
|
|
15
|
-
The `TriggerModel` class consolidates several Machine Learning frontends and compiler backends to construct a "trigger model". MLflow utilities are for logging, versioning, and loading of trigger models.
|
|
16
|
-
|
|
17
|
-
## Installation
|
|
18
|
-
|
|
19
|
-
```bash
|
|
20
|
-
pip install triggerflow
|
|
21
|
-
```
|
|
22
|
-
|
|
23
|
-
## Usage
|
|
24
|
-
|
|
25
|
-
```python
|
|
26
|
-
|
|
27
|
-
from triggerflow.core import TriggerModel
|
|
28
|
-
|
|
29
|
-
trigger_model = TriggerModel(name="my-trigger-model", ml_backend="Keras", compiler="hls4ml", model, compiler_config or None)
|
|
30
|
-
trigger_model() # call the constructor
|
|
31
|
-
|
|
32
|
-
# then:
|
|
33
|
-
output_software = trigger_model.software_predict(input_data)
|
|
34
|
-
output_firmware = trigger_model.firmware_predict(input_data)
|
|
35
|
-
output_qonnx = trigger_model.qonnx_predict(input_data)
|
|
36
|
-
|
|
37
|
-
# save and load trigger models:
|
|
38
|
-
trigger_model.save("trigger_model.tar.xz")
|
|
39
|
-
|
|
40
|
-
# in a separate session:
|
|
41
|
-
from trigger_model.core import TriggerModel
|
|
42
|
-
trigger_model = TriggerModel.load("trigger_model.tar.xz")
|
|
43
|
-
```
|
|
44
|
-
|
|
45
|
-
## Logging with MLflow
|
|
46
|
-
|
|
47
|
-
```python
|
|
48
|
-
# logging with MLFlow:
|
|
49
|
-
import mlflow
|
|
50
|
-
from trigger_model.mlflow_wrapper import log_model
|
|
51
|
-
|
|
52
|
-
mlflow.set_tracking_uri("https://ngt.cern.ch/models")
|
|
53
|
-
experiment_id = mlflow.create_experiment("example-experiment")
|
|
54
|
-
|
|
55
|
-
with mlflow.start_run(run_name="trial-v1", experiment_id=experiment_id):
|
|
56
|
-
log_model(trigger_model, registered_model_name="TriggerModel")
|
|
57
|
-
```
|
|
58
|
-
|
|
59
|
-
### Note: This package doesn't install dependencies so it won't disrupt specific training environments or custom compilers. For a reference environment, see `environment.yml`.
|
|
60
|
-
|
|
61
|
-
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
triggerflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
triggerflow/core.py,sha256=8yxV3xH3SjixyDx3OP0fHEtGjteMUA01MIGrdofsusg,20349
|
|
3
|
-
triggerflow/mlflow_wrapper.py,sha256=mtg2mQTRlYkv9ojO6aRZ5Oln9lbE6AYA220Zw4rdEzM,3888
|
|
4
|
-
triggerflow/templates/makefile,sha256=VL39isTUBewrs8zTSDzdP6LLln7zpGoCZnLadpMu7CA,808
|
|
5
|
-
triggerflow/templates/makefile_version,sha256=Tmu0tyAopJbiBQVMMOa6l2Cz5GkEn20mwgzIi0CfhyM,338
|
|
6
|
-
triggerflow/templates/model_template.cpp,sha256=eGwY5ca_HgjoIvqorOBPSJspP0wngpjJheq3meb48r4,1616
|
|
7
|
-
triggerflow/templates/scales.h,sha256=5bq6lVF36SRQKE2zg9RpBG6K5orpPlnJ8g125nbtFow,365
|
|
8
|
-
triggerflow-0.1.4.dist-info/METADATA,sha256=R5p0NDzGnDhROZLRAdWbY_JBE9EPS1dma1UWaMoQ5Ac,1942
|
|
9
|
-
triggerflow-0.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
10
|
-
triggerflow-0.1.4.dist-info/top_level.txt,sha256=g4M0nqpVPFZcmVmsoLExDtJFLDBK4fzobCIBqo13BEw,12
|
|
11
|
-
triggerflow-0.1.4.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
triggerflow
|
|
File without changes
|