triggerflow 0.2.2__tar.gz → 0.2.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. triggerflow-0.2.4/PKG-INFO +192 -0
  2. triggerflow-0.2.2/src/triggerflow.egg-info/PKG-INFO → triggerflow-0.2.4/README.md +76 -27
  3. {triggerflow-0.2.2 → triggerflow-0.2.4}/pyproject.toml +37 -3
  4. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/core.py +107 -73
  5. triggerflow-0.2.4/src/triggerflow/interfaces/uGT.py +127 -0
  6. triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py +0 -0
  7. triggerflow-0.2.4/src/triggerflow/templates/build_ugt.tcl +46 -0
  8. triggerflow-0.2.4/src/triggerflow/templates/data_types.h +524 -0
  9. triggerflow-0.2.4/src/triggerflow/templates/model-gt.cpp +104 -0
  10. triggerflow-0.2.4/src/triggerflow.egg-info/PKG-INFO +192 -0
  11. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow.egg-info/SOURCES.txt +5 -0
  12. triggerflow-0.2.4/src/triggerflow.egg-info/requires.txt +38 -0
  13. {triggerflow-0.2.2 → triggerflow-0.2.4}/tests/test.py +9 -13
  14. triggerflow-0.2.2/PKG-INFO +0 -97
  15. triggerflow-0.2.2/README.md +0 -77
  16. triggerflow-0.2.2/src/triggerflow.egg-info/requires.txt +0 -11
  17. {triggerflow-0.2.2 → triggerflow-0.2.4}/MANIFEST.in +0 -0
  18. {triggerflow-0.2.2 → triggerflow-0.2.4}/setup.cfg +0 -0
  19. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_dataset/__init__.py +0 -0
  20. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_dataset/core.py +0 -0
  21. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_loader/__init__.py +0 -0
  22. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_loader/cluster_manager.py +0 -0
  23. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_loader/loader.py +0 -0
  24. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/trigger_loader/processor.py +0 -0
  25. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/__init__.py +0 -0
  26. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/cli.py +0 -0
  27. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models → triggerflow-0.2.4/src/triggerflow/interfaces}/__init__.py +0 -0
  28. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/mlflow_wrapper.py +0 -0
  29. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/.gitignore +0 -0
  30. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/README.md +0 -0
  31. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/cookiecutter.json +0 -0
  32. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/prompts.yml +0 -0
  33. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore +0 -0
  34. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore +0 -0
  35. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml +0 -0
  36. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/README.md +0 -0
  37. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md +0 -0
  38. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml +0 -0
  39. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml +0 -0
  40. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml +0 -0
  41. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml +0 -0
  42. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml +0 -0
  43. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml +0 -0
  44. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml +0 -0
  45. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml +0 -0
  46. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml +0 -0
  47. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml +0 -0
  48. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml +0 -0
  49. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml +0 -0
  50. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml +0 -0
  51. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml +0 -0
  52. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml +0 -0
  53. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep +0 -0
  54. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json +0 -0
  55. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json +0 -0
  56. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep +0 -0
  57. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep +0 -0
  58. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep +0 -0
  59. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep +0 -0
  60. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep +0 -0
  61. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep +0 -0
  62. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml +0 -0
  63. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml +0 -0
  64. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml +0 -0
  65. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py +0 -0
  66. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py +0 -0
  67. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py +0 -0
  68. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py +0 -0
  69. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +0 -0
  70. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py +0 -0
  71. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models}/__init__.py +0 -0
  72. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py +0 -0
  73. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py +0 -0
  74. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +0 -0
  75. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py +0 -0
  76. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +0 -0
  77. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +0 -0
  78. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py +0 -0
  79. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +0 -0
  80. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py +0 -0
  81. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py +0 -0
  82. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py +0 -0
  83. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py +0 -0
  84. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py +0 -0
  85. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +0 -0
  86. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py +0 -0
  87. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py +0 -0
  88. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py +0 -0
  89. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py +0 -0
  90. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py +0 -0
  91. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils}/__init__.py +0 -0
  92. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py +0 -0
  93. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py +0 -0
  94. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests}/__init__.py +0 -0
  95. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines}/__init__.py +0 -0
  96. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile}/__init__.py +0 -0
  97. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py +0 -0
  98. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing}/__init__.py +0 -0
  99. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py +0 -0
  100. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data}/__init__.py +0 -0
  101. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py +0 -0
  102. {triggerflow-0.2.2/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation → triggerflow-0.2.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training}/__init__.py +0 -0
  103. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py +0 -0
  104. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py +0 -0
  105. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py +0 -0
  106. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/templates/makefile +0 -0
  107. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/templates/makefile_version +0 -0
  108. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/templates/model_template.cpp +0 -0
  109. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow/templates/scales.h +0 -0
  110. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow.egg-info/dependency_links.txt +0 -0
  111. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow.egg-info/entry_points.txt +0 -0
  112. {triggerflow-0.2.2 → triggerflow-0.2.4}/src/triggerflow.egg-info/top_level.txt +0 -0
  113. {triggerflow-0.2.2 → triggerflow-0.2.4}/tests/test_loader.py +0 -0
@@ -0,0 +1,192 @@
1
+ Metadata-Version: 2.4
2
+ Name: triggerflow
3
+ Version: 0.2.4
4
+ Summary: Utilities for ML models targeting hardware triggers
5
+ Classifier: Programming Language :: Python :: 3
6
+ Classifier: License :: OSI Approved :: MIT License
7
+ Classifier: Operating System :: OS Independent
8
+ Requires-Python: >=3.11
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: cookiecutter>=2.3
11
+ Requires-Dist: PyYAML>=6
12
+ Requires-Dist: Jinja2>=3
13
+ Requires-Dist: mlflow>=2.0
14
+ Requires-Dist: kedro==1.0.0
15
+ Provides-Extra: dev
16
+ Requires-Dist: pytest-cov~=3.0; extra == "dev"
17
+ Requires-Dist: pytest-mock<2.0,>=1.7.1; extra == "dev"
18
+ Requires-Dist: pytest~=7.2; extra == "dev"
19
+ Requires-Dist: ruff~=0.1.8; extra == "dev"
20
+ Provides-Extra: extended
21
+ Requires-Dist: coffea; extra == "extended"
22
+ Requires-Dist: dask; extra == "extended"
23
+ Requires-Dist: pyarrow; extra == "extended"
24
+ Requires-Dist: pandas==2.0.3; extra == "extended"
25
+ Requires-Dist: uproot4==4.0.0; extra == "extended"
26
+ Requires-Dist: pyarrow==17.0.0; extra == "extended"
27
+ Requires-Dist: hist==2.7.3; extra == "extended"
28
+ Requires-Dist: mlflow==2.21.0; extra == "extended"
29
+ Requires-Dist: tensorflow==2.13.0; extra == "extended"
30
+ Requires-Dist: QKeras==0.9.0; extra == "extended"
31
+ Requires-Dist: keras==2.13.1; extra == "extended"
32
+ Requires-Dist: numpy; extra == "extended"
33
+ Requires-Dist: pyparsing; extra == "extended"
34
+ Requires-Dist: PyYAML; extra == "extended"
35
+ Requires-Dist: matplotlib; extra == "extended"
36
+ Requires-Dist: mplhep; extra == "extended"
37
+ Requires-Dist: h5py; extra == "extended"
38
+ Requires-Dist: xxhash; extra == "extended"
39
+ Requires-Dist: shap; extra == "extended"
40
+ Requires-Dist: awkward-pandas; extra == "extended"
41
+ Requires-Dist: qonnx==0.4.0; extra == "extended"
42
+ Requires-Dist: tf_keras; extra == "extended"
43
+ Requires-Dist: tf2onnx; extra == "extended"
44
+ Requires-Dist: hls4ml; extra == "extended"
45
+ Requires-Dist: conifer; extra == "extended"
46
+
47
+ # Machine Learning for Hardware Triggers
48
+
49
+ `triggerflow` provides a set of utilities for Machine Learning models targeting FPGA deployment.
50
+ The `TriggerModel` class consolidates several Machine Learning frontends and compiler backends to construct a "trigger model". MLflow utilities are for logging, versioning, and loading of trigger models.
51
+
52
+ ## Installation
53
+
54
+ ```bash
55
+ pip install triggerflow
56
+ ```
57
+
58
+ ## Usage
59
+
60
+ ```python
61
+
62
+ from triggerflow.core import TriggerModel
63
+
64
+
65
+ scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
66
+ 'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
67
+
68
+
69
+ trigger_model = TriggerModel(
70
+ config="triggermodel_config.yaml",
71
+ native_model=model, #Native XGboost/Keras model
72
+ scales=scales
73
+ )
74
+
75
+ trigger_model() #Vivado requird on $PATH for Firmware build.
76
+
77
+ # then:
78
+ output_software = trigger_model.software_predict(input_data)
79
+ output_firmware = trigger_model.firmware_predict(input_data)
80
+ output_qonnx = trigger_model.qonnx_predict(input_data)
81
+
82
+ # save and load trigger models:
83
+ trigger_model.save("triggerflow.tar.xz")
84
+
85
+ # in a separate session:
86
+ from triggerflow.core import TriggerModel
87
+ triggerflow = TriggerModel.load("triggerflow.tar.xz")
88
+ ```
89
+
90
+ ## The Config file:
91
+
92
+ Use this `.yaml` template and change as needed.
93
+
94
+ ```yaml
95
+ compiler:
96
+ name: "AXO"
97
+ ml_backend: "keras"
98
+ compiler: "hls4ml"
99
+ fpga_part: "xc7vx690t-ffg1927-2"
100
+ clock_period: 25
101
+ n_outputs: 1
102
+ project_name: "AXO_project"
103
+ namespace: "AXO"
104
+ io_type: "io_parallel"
105
+ backend: "Vitis"
106
+ write_weights_txt: false
107
+
108
+ subsystem:
109
+ name: "uGT"
110
+ n_inputs: 50
111
+ offset_type: "ap_fixed<10,10>"
112
+ shift_type: "ap_fixed<10,10>"
113
+
114
+ objects:
115
+ muons:
116
+ size: 4
117
+ features: [pt, eta_extrapolated, phi_extrapolated]
118
+
119
+ jets:
120
+ size: 4
121
+ features: [et, eta, phi]
122
+
123
+ egammas:
124
+ size: 4
125
+ features: [et, eta, phi]
126
+
127
+ taus:
128
+ size: 4
129
+ features: [et, eta, phi]
130
+
131
+ global_features:
132
+ #- et.et
133
+ #- ht.et
134
+ - etmiss.et
135
+ - etmiss.phi
136
+ #- htmiss.et
137
+ #- htmiss.phi
138
+ #- ethfmiss.et
139
+ #- ethfmiss.phi
140
+ #- hthfmiss.et
141
+ #- hthfmiss.phi
142
+
143
+ muon_size: 4
144
+ jet_size: 4
145
+ egamma_size: 4
146
+ tau_size: 4
147
+ ```
148
+
149
+ ## Logging with MLflow
150
+
151
+ ```python
152
+ # logging with MLFlow:
153
+ import mlflow
154
+ from triggerflow.mlflow_wrapper import log_model
155
+
156
+ mlflow.set_tracking_uri("https://ngt.cern.ch/models")
157
+ experiment_id = mlflow.create_experiment("example-experiment")
158
+
159
+ with mlflow.start_run(run_name="trial-v1", experiment_id=experiment_id):
160
+ log_model(triggerflow, registered_model_name="TriggerModel")
161
+ ```
162
+
163
+ ### Note: This package doesn't install dependencies so it won't disrupt specific training environments or custom compilers. For a reference environment, see `environment.yml`.
164
+
165
+
166
+ # Creating a kedro pipeline
167
+
168
+ This repository also comes with a default pipeline for trigger models based on kedro.
169
+ One can create a new pipeline via:
170
+
171
+ NOTE: no "-" and upper cases!
172
+
173
+ ```bash
174
+ # Create a conda environment & activate it
175
+ conda create -n triggerflow python=3.11
176
+ conda activate triggerflow
177
+
178
+ # install triggerflow
179
+ pip install triggerflow
180
+
181
+ # Create a pipeline
182
+ triggerflow new demo_pipeline
183
+
184
+ # NOTE: since we dont install dependency one has to create a
185
+ # conda env based on the environment.yml file of the pipeline
186
+ # this file can be changed to the needs of the indiviual project
187
+ cd demo_pipeline
188
+ conda env update -n triggerflow --file environment.yml
189
+
190
+ # Run Kedro
191
+ kedro run
192
+ ```
@@ -1,23 +1,3 @@
1
- Metadata-Version: 2.4
2
- Name: triggerflow
3
- Version: 0.2.2
4
- Summary: Utilities for ML models targeting hardware triggers
5
- Classifier: Programming Language :: Python :: 3
6
- Classifier: License :: OSI Approved :: MIT License
7
- Classifier: Operating System :: OS Independent
8
- Requires-Python: >=3.11
9
- Description-Content-Type: text/markdown
10
- Requires-Dist: cookiecutter>=2.3
11
- Requires-Dist: PyYAML>=6
12
- Requires-Dist: Jinja2>=3
13
- Requires-Dist: mlflow>=2.0
14
- Requires-Dist: kedro==1.0.0
15
- Provides-Extra: dev
16
- Requires-Dist: pytest-cov~=3.0; extra == "dev"
17
- Requires-Dist: pytest-mock<2.0,>=1.7.1; extra == "dev"
18
- Requires-Dist: pytest~=7.2; extra == "dev"
19
- Requires-Dist: ruff~=0.1.8; extra == "dev"
20
-
21
1
  # Machine Learning for Hardware Triggers
22
2
 
23
3
  `triggerflow` provides a set of utilities for Machine Learning models targeting FPGA deployment.
@@ -35,22 +15,91 @@ pip install triggerflow
35
15
 
36
16
  from triggerflow.core import TriggerModel
37
17
 
38
- triggerflow = TriggerModel(name="my-trigger-model", ml_backend="Keras", compiler="hls4ml", model, compiler_config or None)
39
- triggerflow() # call the constructor
18
+
19
+ scales = {'offsets': np.array([18, 0, 72, 7, 0, 73, 4, 0, 73, 4, 0, 72, 3, 0, 72, 6, -0, 286, 3, -2, 285, 3, -2, 282, 3, -2, 286, 29, 0, 72, 22, 0, 72, 18, 0, 72, 14, 0, 72, 11, 0, 72, 10, 0, 72, 10, 0, 73, 9, 0], dtype='int'),
20
+ 'shifts': np.array([3, 0, 6, 2, 5, 6, 0, 5, 6, 0, 5, 6, -1, 5, 6, 2, 7, 8, 0, 7, 8, 0, 7, 8, 0, 7, 8, 4, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6, 6, 3, 6], dtype='int')}
21
+
22
+
23
+ trigger_model = TriggerModel(
24
+ config="triggermodel_config.yaml",
25
+ native_model=model, #Native XGboost/Keras model
26
+ scales=scales
27
+ )
28
+
29
+ trigger_model() #Vivado requird on $PATH for Firmware build.
40
30
 
41
31
  # then:
42
- output_software = triggerflow.software_predict(input_data)
43
- output_firmware = triggerflow.firmware_predict(input_data)
44
- output_qonnx = triggerflow.qonnx_predict(input_data)
32
+ output_software = trigger_model.software_predict(input_data)
33
+ output_firmware = trigger_model.firmware_predict(input_data)
34
+ output_qonnx = trigger_model.qonnx_predict(input_data)
45
35
 
46
36
  # save and load trigger models:
47
- triggerflow.save("triggerflow.tar.xz")
37
+ trigger_model.save("triggerflow.tar.xz")
48
38
 
49
39
  # in a separate session:
50
40
  from triggerflow.core import TriggerModel
51
41
  triggerflow = TriggerModel.load("triggerflow.tar.xz")
52
42
  ```
53
43
 
44
+ ## The Config file:
45
+
46
+ Use this `.yaml` template and change as needed.
47
+
48
+ ```yaml
49
+ compiler:
50
+ name: "AXO"
51
+ ml_backend: "keras"
52
+ compiler: "hls4ml"
53
+ fpga_part: "xc7vx690t-ffg1927-2"
54
+ clock_period: 25
55
+ n_outputs: 1
56
+ project_name: "AXO_project"
57
+ namespace: "AXO"
58
+ io_type: "io_parallel"
59
+ backend: "Vitis"
60
+ write_weights_txt: false
61
+
62
+ subsystem:
63
+ name: "uGT"
64
+ n_inputs: 50
65
+ offset_type: "ap_fixed<10,10>"
66
+ shift_type: "ap_fixed<10,10>"
67
+
68
+ objects:
69
+ muons:
70
+ size: 4
71
+ features: [pt, eta_extrapolated, phi_extrapolated]
72
+
73
+ jets:
74
+ size: 4
75
+ features: [et, eta, phi]
76
+
77
+ egammas:
78
+ size: 4
79
+ features: [et, eta, phi]
80
+
81
+ taus:
82
+ size: 4
83
+ features: [et, eta, phi]
84
+
85
+ global_features:
86
+ #- et.et
87
+ #- ht.et
88
+ - etmiss.et
89
+ - etmiss.phi
90
+ #- htmiss.et
91
+ #- htmiss.phi
92
+ #- ethfmiss.et
93
+ #- ethfmiss.phi
94
+ #- hthfmiss.et
95
+ #- hthfmiss.phi
96
+
97
+ muon_size: 4
98
+ jet_size: 4
99
+ egamma_size: 4
100
+ tau_size: 4
101
+ ```
102
+
54
103
  ## Logging with MLflow
55
104
 
56
105
  ```python
@@ -94,4 +143,4 @@ conda env update -n triggerflow --file environment.yml
94
143
 
95
144
  # Run Kedro
96
145
  kedro run
97
- ```
146
+ ```
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "triggerflow"
7
- version = "0.2.2"
7
+ version = "0.2.4"
8
8
  description = "Utilities for ML models targeting hardware triggers"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.11"
@@ -13,7 +13,7 @@ dependencies = [
13
13
  "PyYAML>=6",
14
14
  "Jinja2>=3",
15
15
  "mlflow>=2.0",
16
- "kedro==1.0.0",
16
+ "kedro==1.0.0"
17
17
  ]
18
18
  classifiers = [
19
19
  "Programming Language :: Python :: 3",
@@ -28,6 +28,40 @@ dev = [
28
28
  "pytest~=7.2", "ruff~=0.1.8",
29
29
  ]
30
30
 
31
+ extended = [
32
+ # Conda-equivalent packages (can often be installed via pip)
33
+ "coffea",
34
+ "dask",
35
+ "pyarrow",
36
+
37
+ # Explicitly Versioned Packages
38
+ "pandas==2.0.3",
39
+ "uproot4==4.0.0",
40
+ "pyarrow==17.0.0",
41
+ "hist==2.7.3",
42
+ "mlflow==2.21.0",
43
+ "tensorflow==2.13.0",
44
+ "QKeras==0.9.0",
45
+ "keras==2.13.1",
46
+
47
+ # Packages with required versions/source links
48
+ "numpy",
49
+ "pyparsing",
50
+ "PyYAML",
51
+ "matplotlib",
52
+ "mplhep",
53
+ "h5py",
54
+ "xxhash",
55
+ "shap",
56
+ "awkward-pandas",
57
+ "qonnx==0.4.0",
58
+ "tf_keras",
59
+ "tf2onnx",
60
+ "hls4ml",
61
+ "conifer"
62
+ ]
63
+
64
+
31
65
  [tool.setuptools]
32
66
  include-package-data = true
33
67
 
@@ -46,4 +80,4 @@ extend-exclude = ["src/triggerflow/starter"]
46
80
 
47
81
  # expose CLI entrypoint
48
82
  [project.scripts]
49
- triggerflow = "triggerflow.cli:main"
83
+ triggerflow = "triggerflow.cli:main"
@@ -1,5 +1,6 @@
1
1
  from pathlib import Path
2
2
  import json
3
+ import yaml
3
4
  import numpy as np
4
5
  import tarfile
5
6
  import importlib
@@ -8,6 +9,7 @@ from typing import Optional, Dict, Any, Union
8
9
  import shutil, warnings
9
10
  import importlib.resources as pkg_resources
10
11
  import triggerflow.templates
12
+ from triggerflow.interfaces.uGT import build_ugt_model
11
13
 
12
14
 
13
15
  class ModelConverter(ABC):
@@ -73,37 +75,31 @@ class NoOpConverter(ModelConverter):
73
75
 
74
76
 
75
77
  class HLS4MLStrategy(CompilerStrategy):
76
- """HLS4ML compilation strategy for Keras models"""
77
-
78
- def compile(self, model, workspace: Path, config: Optional[Dict] = None, **kwargs) -> Any:
78
+ def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
79
79
  import hls4ml
80
-
80
+
81
81
  firmware_dir = workspace / "firmware"
82
82
  firmware_dir.mkdir(exist_ok=True)
83
-
84
- cfg = config or hls4ml.utils.config_from_keras_model(model, granularity="name")
85
83
 
86
- hls_kwargs = {
87
- "hls_config": cfg,
88
- "output_dir": str(firmware_dir),
89
- "io_type": "io_stream",
90
- "backend": "Vitis"
91
- }
92
- hls_kwargs.update(kwargs)
84
+ hls_config = hls4ml.utils.config_from_keras_model(model, granularity="name")
85
+ hls_kwargs = {}
86
+
87
+ for key in ["project_name", "namespace", "io_type", "backend", "write_weights_txt"]:
88
+ if key in config:
89
+ hls_kwargs[key] = config[key]
93
90
 
94
91
  firmware_model = hls4ml.converters.convert_from_keras_model(
95
92
  model,
93
+ hls_config=hls_config,
94
+ output_dir=str(firmware_dir),
96
95
  **hls_kwargs
97
96
  )
98
97
 
99
98
  firmware_model.compile()
100
- if shutil.which("vivado") is not None:
101
- firmware_model.build()
102
- else:
103
- warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
104
99
  firmware_model.save(workspace / "firmware_model.fml")
105
100
  return firmware_model
106
-
101
+
102
+
107
103
  def load_compiled_model(self, workspace: Path) -> Any:
108
104
  from hls4ml.converters import link_existing_project
109
105
 
@@ -113,46 +109,31 @@ class HLS4MLStrategy(CompilerStrategy):
113
109
 
114
110
 
115
111
  class ConiferStrategy(CompilerStrategy):
116
- """Conifer compilation strategy for XGBoost models"""
112
+ """Conifer compilation strategy for XGBoost models, unified config/workspace handling."""
117
113
 
118
- def compile(self, model, workspace: Path, config: Optional[Dict] = None, **kwargs) -> Any:
114
+ def compile(self, model, workspace: Path, config: Optional[Dict] = None) -> Any:
119
115
  import conifer
120
- import shutil
121
- import warnings
122
116
  import os
123
117
 
124
118
  firmware_dir = workspace / "firmware"
125
119
  firmware_dir.mkdir(exist_ok=True)
126
- os.environ['JSON_ROOT'] = '/eos/user/m/maglowac/TriggerModel/json'
127
- os.environ['XILINX_AP_INCLUDE'] = '/eos/user/m/maglowac/TriggerModel/HLS_arbitrary_Precision_Types/include'
128
120
 
129
-
130
- cfg = conifer.backends.xilinxhls.auto_config()#config or conifer.backends.cpp.auto_config()
121
+ cfg = conifer.backends.xilinxhls.auto_config()
131
122
  cfg['OutputDir'] = str(firmware_dir)
132
-
133
- for key, value in kwargs.items():
134
- cfg[key] = value
123
+ cfg['ProjectName'] = config['project_name']
124
+ cfg['XilinxPart'] = config['fpga_part']
125
+ cfg['ClockPeriod'] = config['clock_period']
135
126
 
136
- print(cfg)
137
- firmware_model = conifer.converters.convert_from_xgboost(
138
- model,
139
- config=cfg
140
- )
127
+ if config:
128
+ for key, value in config.items():
129
+ cfg[key] = value
141
130
 
142
- firmware_model.write()
143
- proj_name = cfg.get('ProjectName', 'my_prj')
144
- bridge_file = firmware_dir / "bridge.cpp"
145
- text = bridge_file.read_text()
146
- text = text.replace("my_prj.h", f"{proj_name}.h")
147
- bridge_file.write_text(text)
131
+ firmware_model = conifer.converters.convert_from_xgboost(model, config=cfg)
148
132
  firmware_model.compile()
149
- if shutil.which("vivado") is not None:
150
- firmware_model.build()
151
- else:
152
- warnings.warn("Vivado not found in PATH. Firmware build failed.", UserWarning)
153
-
154
133
  firmware_model.save(firmware_dir / "firmware_model.fml")
134
+
155
135
  return firmware_model
136
+
156
137
 
157
138
  def load_compiled_model(self, workspace: Path) -> Any:
158
139
  from conifer import load_model
@@ -384,33 +365,45 @@ class ModelSerializer:
384
365
  return model, input_name
385
366
  return None, None
386
367
 
368
+
387
369
  class TriggerModel:
388
- """Main facade class that orchestrates model conversion, compilation, and inference"""
389
-
390
- def __init__(self, name: str, ml_backend: str, n_outputs:int, compiler: str,
391
- native_model: object, compiler_config: dict = None, scales: dict = None, unscaled_type: str = "ap_fixed<16,6>"):
392
-
393
- if ml_backend.lower() not in ("keras", "xgboost"):
394
- raise ValueError("Only Keras or XGBoost backends are currently supported.")
395
-
396
- self.name = name
397
- self.ml_backend = ml_backend.lower()
398
- self.scales = scales
399
- self.unscaled_type = unscaled_type
400
- self.n_outputs = n_outputs
401
- self.compiler = compiler.lower()
370
+ def __init__(self, config: Union[str, Path, Dict], native_model, scales):
371
+ if isinstance(config, (str, Path)):
372
+ with open(config, "r") as f:
373
+ config = yaml.safe_load(f)
374
+ elif not isinstance(config, dict):
375
+ raise TypeError("config must be a dict or path to a YAML file")
376
+
402
377
  self.native_model = native_model
403
- self.compiler_conifg = compiler_config
404
-
378
+ self.scales = scales
379
+
380
+ self.compiler_cfg = config.get("compiler", {})
381
+ self.subsystem_cfg = config.get("subsystem", {})
382
+
383
+ self.name = self.compiler_cfg.get("name", "model")
384
+ self.ml_backend = self.compiler_cfg.get("ml_backend", "").lower()
385
+ self.compiler = self.compiler_cfg.get("compiler", "").lower()
386
+
387
+ self.n_outputs = self.compiler_cfg.get("n_outputs")
388
+ self.unscaled_type = self.subsystem_cfg.get("unscaled_type", "ap_fixed<16,6>")
389
+
390
+ if self.ml_backend not in ("keras", "xgboost"):
391
+ raise ValueError("Unsupported backend")
392
+
405
393
  self.workspace_manager = WorkspaceManager()
406
- self.converter = ConverterFactory.create_converter(ml_backend, compiler)
407
- self.compiler_strategy = CompilerFactory.create_compiler(ml_backend, compiler)
408
-
394
+ self.converter = ConverterFactory.create_converter(self.ml_backend, self.compiler)
395
+ self.compiler_strategy = CompilerFactory.create_compiler(self.ml_backend, self.compiler)
396
+
409
397
  self.firmware_model = None
410
398
  self.model_qonnx = None
411
399
  self.input_name = None
412
400
 
413
- self.workspace_manager.setup_workspace(name, self.ml_backend, self.compiler)
401
+
402
+ self.workspace_manager.setup_workspace(
403
+ self.name,
404
+ self.ml_backend,
405
+ self.compiler
406
+ )
414
407
 
415
408
  @property
416
409
  def workspace(self) -> Path:
@@ -427,8 +420,8 @@ class TriggerModel:
427
420
  """Get metadata dictionary"""
428
421
  return self.workspace_manager.metadata
429
422
 
430
- def __call__(self, **compiler_kwargs):
431
- """Execute the full model conversion and compilation pipeline"""
423
+ def __call__(self):
424
+ """Execute full model conversion and compilation pipeline using YAML config"""
432
425
  self.parse_dataset_object()
433
426
 
434
427
  # Save native model
@@ -445,22 +438,57 @@ class TriggerModel:
445
438
  self.input_name = self.model_qonnx.graph.input[0].name
446
439
  self.workspace_manager.add_artifact("qonnx", qonnx_path)
447
440
  self.workspace_manager.add_version({"qonnx": str(qonnx_path)})
441
+
448
442
 
449
443
  # Compile model
450
444
  self.firmware_model = self.compiler_strategy.compile(
451
445
  self.native_model,
452
446
  self.workspace_manager.workspace,
453
- self.compiler_conifg,
454
- **compiler_kwargs
447
+ self.compiler_cfg,
448
+ **self.compiler_cfg.get("kwargs", {})
455
449
  )
456
450
 
457
451
  self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
452
+ if self.compiler != "conifer" and self.scales is not None:
453
+ self.build_emulator(
454
+ self.scales['shifts'],
455
+ self.scales['offsets'],
456
+ self.n_outputs,
457
+ self.unscaled_type
458
+ )
458
459
 
459
- if self.compiler is not "conifer" and self.scales is not None:
460
- self.build_emulator(self.scales['shifts'], self.scales['offsets'], self.n_outputs, self.unscaled_type)
461
460
 
461
+ if shutil.which("vivado") is not None:
462
+ build_ugt_model(
463
+ templates_dir=self.subsystem_cfg.get("templates_dir", Path("templates")),
464
+ firmware_dir=self.workspace_manager.workspace / "firmware",
465
+ compiler = self.compiler,
466
+ model_name=self.name,
467
+ n_inputs=self.subsystem_cfg["n_inputs"],
468
+ n_outputs=self.subsystem_cfg.get("n_outputs", self.n_outputs),
469
+ nn_offsets=self.scales["offsets"],
470
+ nn_shifts=self.scales["shifts"],
471
+ muon_size=self.subsystem_cfg.get("muon_size", 0),
472
+ jet_size=self.subsystem_cfg.get("jet_size", 0),
473
+ egamma_size=self.subsystem_cfg.get("egamma_size", 0),
474
+ tau_size=self.subsystem_cfg.get("tau_size", 0),
475
+ output_type=self.subsystem_cfg.get("output_type", "result_t"),
476
+ offset_type=self.subsystem_cfg.get("offset_type", "ap_fixed<10,10>"),
477
+ shift_type=self.subsystem_cfg.get("shift_type", "ap_fixed<10,10>"),
478
+ object_features=self.subsystem_cfg.get("object_features"),
479
+ global_features=self.subsystem_cfg.get("global_features")
480
+ )
481
+ else:
482
+ warnings.warn(
483
+ "Vivado executable not found on the system PATH. "
484
+ "Skipping FW build. ",
485
+ UserWarning
486
+ )
487
+
488
+
462
489
  self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
463
490
  self.workspace_manager.save_metadata()
491
+
464
492
 
465
493
  @staticmethod
466
494
  def parse_dataset_object():
@@ -482,10 +510,16 @@ class TriggerModel:
482
510
  predictor = SoftwarePredictor(self.native_model, self.ml_backend)
483
511
  return predictor.predict(input_data)
484
512
 
485
- def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray:
513
+ def qonnx_predict(self, input_data: np.ndarray) -> np.ndarray | None:
486
514
  """Make predictions using QONNX model"""
515
+
487
516
  if self.model_qonnx is None:
488
- raise RuntimeError("QONNX model not available")
517
+ warnings.warn(
518
+ "QONNX model is not available. Prediction skipped.",
519
+ UserWarning
520
+ )
521
+ return None
522
+
489
523
  predictor = QONNXPredictor(self.model_qonnx, self.input_name)
490
524
  return predictor.predict(input_data)
491
525