triggerflow 0.3.3__tar.gz → 0.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. {triggerflow-0.3.3 → triggerflow-0.3.4}/PKG-INFO +46 -32
  2. {triggerflow-0.3.3 → triggerflow-0.3.4}/pyproject.toml +42 -33
  3. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_loader/loader.py +25 -13
  4. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_loader/processor.py +1 -1
  5. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/core.py +18 -21
  6. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/interfaces/uGT.py +80 -32
  7. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples.json +0 -4
  8. triggerflow-0.3.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +49 -0
  9. triggerflow-0.3.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +70 -0
  10. triggerflow-0.3.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +20 -0
  11. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/nodes.py +1 -0
  12. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/nodes.py +1 -0
  13. triggerflow-0.3.4/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +48 -0
  14. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/pipeline.py +1 -1
  15. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/nodes.py +3 -1
  16. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/model-gt.cpp +3 -3
  17. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/model_template.cpp +10 -10
  18. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/PKG-INFO +46 -32
  19. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/SOURCES.txt +0 -1
  20. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/requires.txt +33 -15
  21. {triggerflow-0.3.3 → triggerflow-0.3.4}/tests/test.py +1 -1
  22. triggerflow-0.3.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/samples_dummy.json +0 -26
  23. triggerflow-0.3.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/meta_dataset.py +0 -88
  24. triggerflow-0.3.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/nodes.py +0 -50
  25. triggerflow-0.3.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/pipeline.py +0 -10
  26. triggerflow-0.3.3/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/nodes.py +0 -31
  27. {triggerflow-0.3.3 → triggerflow-0.3.4}/MANIFEST.in +0 -0
  28. {triggerflow-0.3.3 → triggerflow-0.3.4}/README.md +0 -0
  29. {triggerflow-0.3.3 → triggerflow-0.3.4}/setup.cfg +0 -0
  30. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_dataset/__init__.py +0 -0
  31. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_dataset/core.py +0 -0
  32. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_loader/__init__.py +0 -0
  33. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/trigger_loader/cluster_manager.py +0 -0
  34. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/__init__.py +0 -0
  35. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/cli.py +0 -0
  36. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/interfaces/__init__.py +0 -0
  37. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/mlflow_wrapper.py +0 -0
  38. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/.gitignore +0 -0
  39. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/README.md +0 -0
  40. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/cookiecutter.json +0 -0
  41. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/prompts.yml +0 -0
  42. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.dvcignore +0 -0
  43. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitignore +0 -0
  44. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/.gitlab-ci.yml +0 -0
  45. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/README.md +0 -0
  46. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/README.md +0 -0
  47. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/catalog.yml +0 -0
  48. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters.yml +0 -0
  49. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_compile.yml +0 -0
  50. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_data_processing.yml +0 -0
  51. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_load_data.yml +0 -0
  52. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_training.yml +0 -0
  53. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/base/parameters_model_validation.yml +0 -0
  54. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/catalog.yml +0 -0
  55. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters.yml +0 -0
  56. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_compile.yml +0 -0
  57. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_data_processing.yml +0 -0
  58. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_load_data.yml +0 -0
  59. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_training.yml +0 -0
  60. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/local/parameters_model_validation.yml +0 -0
  61. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/conf/logging.yml +0 -0
  62. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/.gitkeep +0 -0
  63. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/condor_config.json +0 -0
  64. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/cuda_config.json +0 -0
  65. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/settings.json +0 -0
  66. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/01_raw/test.root +0 -0
  67. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/02_loaded/.gitkeep +0 -0
  68. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/03_preprocessed/.gitkeep +0 -0
  69. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/04_models/.gitkeep +0 -0
  70. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/05_validation/.gitkeep +0 -0
  71. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/06_compile/.gitkeep +0 -0
  72. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/data/07_reporting/.gitkeep +0 -0
  73. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/dvc.yaml +0 -0
  74. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/environment.yml +0 -0
  75. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/pyproject.toml +0 -0
  76. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__init__.py +0 -0
  77. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/__main__.py +0 -0
  78. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/any_object.py +0 -0
  79. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_dataset.py +0 -0
  80. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/base_loader.py +0 -0
  81. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_dataset.py +0 -0
  82. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/datasets/{{ cookiecutter.python_package }}_loader.py +0 -0
  83. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/__init__.py +0 -0
  84. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/base_model.py +0 -0
  85. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/models/{{ cookiecutter.python_package }}_model.py +0 -0
  86. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipeline_registry.py +0 -0
  87. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/compile/__init__.py +0 -0
  88. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/__init__.py +0 -0
  89. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/data_processing/pipeline.py +0 -0
  90. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/__init__.py +0 -0
  91. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/load_data/pipeline.py +0 -0
  92. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_training/__init__.py +0 -0
  93. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/__init__.py +0 -0
  94. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/pipelines/model_validation/pipeline.py +0 -0
  95. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/settings.py +0 -0
  96. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/__init__.py +0 -0
  97. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/metric.py +0 -0
  98. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/src/{{ cookiecutter.python_package }}/utils/plotting.py +0 -0
  99. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/__init__.py +0 -0
  100. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/__init__.py +0 -0
  101. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/__init__.py +0 -0
  102. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/compile/test_pipeline.py +0 -0
  103. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/__init__.py +0 -0
  104. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/data_processing/test_pipeline.py +0 -0
  105. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/__init__.py +0 -0
  106. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/load_data/test_pipeline.py +0 -0
  107. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/__init__.py +0 -0
  108. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_training/test_pipeline.py +0 -0
  109. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/__init__.py +0 -0
  110. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/pipelines/model_validation/test_pipeline.py +0 -0
  111. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/starter/{{ cookiecutter.repo_name }}/tests/test_run.py +0 -0
  112. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/build_ugt.tcl +0 -0
  113. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/data_types.h +0 -0
  114. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/makefile +0 -0
  115. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/makefile_version +0 -0
  116. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow/templates/scales.h +0 -0
  117. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/dependency_links.txt +0 -0
  118. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/entry_points.txt +0 -0
  119. {triggerflow-0.3.3 → triggerflow-0.3.4}/src/triggerflow.egg-info/top_level.txt +0 -0
  120. {triggerflow-0.3.3 → triggerflow-0.3.4}/tests/test_loader.py +0 -0
  121. {triggerflow-0.3.3 → triggerflow-0.3.4}/tests/test_starter.py +0 -0
@@ -1,48 +1,62 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: triggerflow
3
- Version: 0.3.3
3
+ Version: 0.3.4
4
4
  Summary: Utilities for ML models targeting hardware triggers
5
- Classifier: Programming Language :: Python :: 3
6
- Classifier: License :: OSI Approved :: MIT License
7
- Classifier: Operating System :: OS Independent
8
- Requires-Python: >=3.11
5
+ Requires-Python: >=3.10
9
6
  Description-Content-Type: text/markdown
10
7
  Requires-Dist: cookiecutter>=2.3
11
8
  Requires-Dist: PyYAML>=6
12
9
  Requires-Dist: Jinja2>=3
13
- Requires-Dist: mlflow==2.21.0
14
10
  Requires-Dist: kedro==1.0.0
11
+ Requires-Dist: kedro-datasets
12
+ Requires-Dist: kedro-mlflow==2.0.1
13
+ Requires-Dist: awkward<3,>=2.8
14
+ Requires-Dist: dask==2025.3.0
15
+ Requires-Dist: coffea>=2025.12
16
+ Requires-Dist: distributed==2025.3.0
15
17
  Provides-Extra: dev
16
18
  Requires-Dist: pytest-cov~=3.0; extra == "dev"
17
19
  Requires-Dist: pytest-mock<2.0,>=1.7.1; extra == "dev"
18
20
  Requires-Dist: pytest~=7.2; extra == "dev"
19
21
  Requires-Dist: ruff~=0.1.8; extra == "dev"
20
- Provides-Extra: extended
21
- Requires-Dist: coffea; extra == "extended"
22
- Requires-Dist: dask; extra == "extended"
23
- Requires-Dist: pyarrow; extra == "extended"
24
- Requires-Dist: pandas==2.0.3; extra == "extended"
25
- Requires-Dist: uproot4==4.0.0; extra == "extended"
26
- Requires-Dist: pyarrow==17.0.0; extra == "extended"
27
- Requires-Dist: hist==2.7.3; extra == "extended"
28
- Requires-Dist: mlflow==2.21.0; extra == "extended"
29
- Requires-Dist: tensorflow==2.13.0; extra == "extended"
30
- Requires-Dist: QKeras==0.9.0; extra == "extended"
31
- Requires-Dist: keras==2.13.1; extra == "extended"
32
- Requires-Dist: numpy; extra == "extended"
33
- Requires-Dist: pyparsing; extra == "extended"
34
- Requires-Dist: PyYAML; extra == "extended"
35
- Requires-Dist: matplotlib; extra == "extended"
36
- Requires-Dist: mplhep; extra == "extended"
37
- Requires-Dist: h5py; extra == "extended"
38
- Requires-Dist: xxhash; extra == "extended"
39
- Requires-Dist: shap; extra == "extended"
40
- Requires-Dist: awkward-pandas; extra == "extended"
41
- Requires-Dist: qonnx==0.4.0; extra == "extended"
42
- Requires-Dist: tf_keras; extra == "extended"
43
- Requires-Dist: tf2onnx; extra == "extended"
44
- Requires-Dist: hls4ml; extra == "extended"
45
- Requires-Dist: conifer; extra == "extended"
22
+ Provides-Extra: qkeras
23
+ Requires-Dist: tensorflow==2.13.0; extra == "qkeras"
24
+ Requires-Dist: QKeras==0.9.0; extra == "qkeras"
25
+ Requires-Dist: keras==2.13.1; extra == "qkeras"
26
+ Requires-Dist: qonnx==0.4.0; extra == "qkeras"
27
+ Requires-Dist: tf_keras; extra == "qkeras"
28
+ Requires-Dist: tf2onnx; extra == "qkeras"
29
+ Requires-Dist: hls4ml; extra == "qkeras"
30
+ Requires-Dist: conifer; extra == "qkeras"
31
+ Requires-Dist: coffea; extra == "qkeras"
32
+ Requires-Dist: dask; extra == "qkeras"
33
+ Requires-Dist: pyarrow==17.0.0; extra == "qkeras"
34
+ Requires-Dist: pandas==2.0.3; extra == "qkeras"
35
+ Requires-Dist: uproot==4.0.0; extra == "qkeras"
36
+ Requires-Dist: hist==2.7.3; extra == "qkeras"
37
+ Requires-Dist: awkward-pandas; extra == "qkeras"
38
+ Requires-Dist: matplotlib; extra == "qkeras"
39
+ Requires-Dist: mplhep; extra == "qkeras"
40
+ Requires-Dist: h5py; extra == "qkeras"
41
+ Requires-Dist: xxhash; extra == "qkeras"
42
+ Requires-Dist: shap; extra == "qkeras"
43
+ Requires-Dist: numpy==1.24.3; extra == "qkeras"
44
+ Provides-Extra: keras3
45
+ Requires-Dist: keras==3.11.0; extra == "keras3"
46
+ Requires-Dist: tensorflow==2.16.1; extra == "keras3"
47
+ Requires-Dist: hgq2; extra == "keras3"
48
+ Requires-Dist: hls4ml; extra == "keras3"
49
+ Requires-Dist: conifer; extra == "keras3"
50
+ Requires-Dist: pyarrow==17.0.0; extra == "keras3"
51
+ Requires-Dist: pandas==2.0.3; extra == "keras3"
52
+ Requires-Dist: uproot==4.0.0; extra == "keras3"
53
+ Requires-Dist: hist==2.7.3; extra == "keras3"
54
+ Requires-Dist: awkward-pandas; extra == "keras3"
55
+ Requires-Dist: matplotlib; extra == "keras3"
56
+ Requires-Dist: mplhep; extra == "keras3"
57
+ Requires-Dist: h5py; extra == "keras3"
58
+ Requires-Dist: xxhash; extra == "keras3"
59
+ Requires-Dist: shap; extra == "keras3"
46
60
 
47
61
  # Machine Learning for Hardware Triggers
48
62
 
@@ -4,63 +4,73 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "triggerflow"
7
- version = "0.3.3"
7
+ version = "0.3.4"
8
8
  description = "Utilities for ML models targeting hardware triggers"
9
9
  readme = "README.md"
10
- requires-python = ">=3.11"
10
+ requires-python = ">=3.10"
11
11
  dependencies = [
12
12
  "cookiecutter>=2.3",
13
13
  "PyYAML>=6",
14
14
  "Jinja2>=3",
15
- "mlflow==2.21.0",
16
- "kedro==1.0.0"
17
- ]
18
- classifiers = [
19
- "Programming Language :: Python :: 3",
20
- "License :: OSI Approved :: MIT License",
21
- "Operating System :: OS Independent"
15
+ "kedro==1.0.0",
16
+ "kedro-datasets",
17
+ "kedro-mlflow==2.0.1",
18
+ "awkward>=2.8,<3",
19
+ "dask==2025.3.0",
20
+ "coffea>=2025.12",
21
+ "distributed==2025.3.0"
22
22
  ]
23
23
 
24
24
  [project.optional-dependencies]
25
25
  dev = [
26
26
  "pytest-cov~=3.0",
27
27
  "pytest-mock>=1.7.1, <2.0",
28
- "pytest~=7.2", "ruff~=0.1.8",
28
+ "pytest~=7.2",
29
+ "ruff~=0.1.8",
29
30
  ]
30
31
 
31
- extended = [
32
- # Conda-equivalent packages (can often be installed via pip)
32
+ qkeras = [
33
+ "tensorflow==2.13.0",
34
+ "QKeras==0.9.0",
35
+ "keras==2.13.1",
36
+ "qonnx==0.4.0",
37
+ "tf_keras",
38
+ "tf2onnx",
39
+ "hls4ml",
40
+ "conifer",
41
+ # Physics/Data tools included in this flow
33
42
  "coffea",
34
43
  "dask",
35
- "pyarrow",
36
-
37
- # Explicitly Versioned Packages
38
- "pandas==2.0.3",
39
- "uproot4==4.0.0",
40
44
  "pyarrow==17.0.0",
45
+ "pandas==2.0.3",
46
+ "uproot==4.0.0",
41
47
  "hist==2.7.3",
42
- "mlflow==2.21.0",
43
- "tensorflow==2.13.0",
44
- "QKeras==0.9.0",
45
- "keras==2.13.1",
46
-
47
- # Packages with required versions/source links
48
- "numpy",
49
- "pyparsing",
50
- "PyYAML",
48
+ "awkward-pandas",
51
49
  "matplotlib",
52
50
  "mplhep",
53
51
  "h5py",
54
52
  "xxhash",
55
53
  "shap",
56
- "awkward-pandas",
57
- "qonnx==0.4.0",
58
- "tf_keras",
59
- "tf2onnx",
60
- "hls4ml",
61
- "conifer"
54
+ "numpy==1.24.3"
62
55
  ]
63
56
 
57
+ keras3 = [
58
+ "keras==3.11.0",
59
+ "tensorflow==2.16.1",
60
+ "hgq2",
61
+ "hls4ml",
62
+ "conifer",
63
+ "pyarrow==17.0.0",
64
+ "pandas==2.0.3",
65
+ "uproot==4.0.0",
66
+ "hist==2.7.3",
67
+ "awkward-pandas",
68
+ "matplotlib",
69
+ "mplhep",
70
+ "h5py",
71
+ "xxhash",
72
+ "shap",
73
+ ]
64
74
 
65
75
  [tool.setuptools]
66
76
  include-package-data = true
@@ -78,6 +88,5 @@ select = [ "F", "W", "E", "I", "UP", "PL", "T201",]
78
88
  ignore = [ "E501",]
79
89
  extend-exclude = ["src/triggerflow/starter"]
80
90
 
81
- # expose CLI entrypoint
82
91
  [project.scripts]
83
92
  triggerflow = "triggerflow.cli:main"
@@ -46,16 +46,16 @@ class TriggerLoader:
46
46
  def _load_sample_json(self, sample_json: str) -> dict:
47
47
  """
48
48
  Loads the JSON and resolves file paths using the priority:
49
- 1. Explicit 'files' list or directory path (Local/Explicit)
50
- 2. 'DAS' query (Remote Fallback)
49
+ 1. 'files' list or path (local)
50
+ 2. 'DAS' query (remote fallback)
51
51
 
52
- Returns the canonical coffea fileset format: {dataset_name: [file_path_list]}.
52
+ Returns canonical coffea fileset: {dataset_name: [file_path_list]}.
53
53
  """
54
54
  import glob
55
55
  import os
56
-
57
- # Helper function definition needed here if it's not imported:
58
- # def _fetch_files_from_das(das_query: str) -> list[str]: ... (placeholder or actual implementation)
56
+
57
+ # Placeholder for DAS fetching if needed
58
+ # def _fetch_files_from_das(das_query: str) -> list[str]: ...
59
59
 
60
60
  with open(sample_json) as f:
61
61
  full_data = json.load(f)
@@ -64,23 +64,31 @@ class TriggerLoader:
64
64
  fileset, meta_data = {}, {}
65
65
  for ds_name, ds_info in dataset_metadata.items():
66
66
  files = []
67
+
67
68
  if "files" in ds_info:
68
69
  file_info = ds_info["files"]
70
+
69
71
  if isinstance(file_info, list):
70
- files = file_info
72
+ files = []
73
+ for f in file_info:
74
+ if os.path.isdir(f):
75
+ files.extend(glob.glob(os.path.join(f, "*.root")))
76
+ elif os.path.isfile(f):
77
+ files.append(f)
78
+
71
79
  elif isinstance(file_info, str):
72
80
  if os.path.isdir(file_info):
73
- path_glob = os.path.join(file_info, "*.root")
74
- files = glob.glob(path_glob)
75
- self.logger.info(f"Resolved {len(files)} files from directory {file_info}.")
76
- else:
81
+ files = glob.glob(os.path.join(file_info, "*.root"))
82
+ elif os.path.isfile(file_info):
77
83
  files = [file_info]
84
+
78
85
  if files:
79
86
  self.logger.info(f"Using {len(files)} local/explicit files for {ds_name}.")
80
87
 
88
+ # Fallback to DAS if no local files found
81
89
  if not files and "DAS" in ds_info:
82
90
  try:
83
- files = _fetch_files_from_das(ds_info["DAS"])
91
+ files = _fetch_files_from_das(ds_info["DAS"])
84
92
  self.logger.info(f"Resolved {len(files)} files via DAS for {ds_name}.")
85
93
  except NameError:
86
94
  self.logger.info("DAS fetching skipped: _fetch_files_from_das is not defined.")
@@ -90,10 +98,14 @@ class TriggerLoader:
90
98
  continue
91
99
 
92
100
  fileset[ds_name] = files
93
- meta_data[ds_name] = {"files": files, "is_signal": ds_info["is_signal"]}
101
+ meta_data[ds_name] = {
102
+ "files": files,
103
+ "is_signal": ds_info.get("is_signal", False)
104
+ }
94
105
 
95
106
  return fileset, meta_data
96
107
 
108
+
97
109
  def _write_run_metadata_file(self, path: str, duration_s: float | None = None):
98
110
  meta_path = f"{path}/run_metadata.json"
99
111
  data = {
@@ -164,7 +164,7 @@ class TriggerProcessor(processor.ProcessorABC):
164
164
  "entrystop": event_meta["stop"],
165
165
  "n_events_written": len(table),
166
166
  "columns": table.column_names,
167
- "created_utc": dt.datetime.now(dt.UTC).isoformat(timespec="seconds") + "Z",
167
+ "created_utc": dt.datetime.now(dt.timezone.utc).isoformat(timespec="seconds") + "Z",
168
168
  "compression": self.compression,
169
169
  "processing_time_s": round(elapsed_s, 6),
170
170
  }
@@ -9,7 +9,7 @@ from typing import Optional, Dict, Any, Union
9
9
  import shutil, warnings
10
10
  import importlib.resources as pkg_resources
11
11
  import triggerflow.templates
12
- from triggerflow.interfaces.uGT import build_ugt_model
12
+ from importlib import import_module
13
13
 
14
14
 
15
15
  class ModelConverter(ABC):
@@ -404,6 +404,8 @@ class TriggerModel:
404
404
 
405
405
  self.n_outputs = self.compiler_cfg.get("n_outputs")
406
406
  self.unscaled_type = self.subsystem_cfg.get("unscaled_type", "ap_fixed<16,6>")
407
+ self.namespace = self.compiler_cfg.get("namespace", "triggerflow")
408
+ self.project_name = self.compiler_cfg.get("project_name", "triggerflow")
407
409
 
408
410
  if self.ml_backend not in ("keras", "xgboost"):
409
411
  raise ValueError("Unsupported backend")
@@ -475,27 +477,20 @@ class TriggerModel:
475
477
  self.unscaled_type
476
478
  )
477
479
 
480
+ subsystem_name = self.subsystem_cfg.get("name", "uGT")
481
+ interface_module = import_module(f"triggerflow.interfaces.{subsystem_name}")
482
+ build_firmware = getattr(interface_module, f"build_{subsystem_name.lower()}_model")
483
+
484
+ build_firmware(
485
+ subsystem_cfg=self.subsystem_cfg,
486
+ compiler_cfg=self.compiler_cfg,
487
+ workspace_manager=self.workspace_manager,
488
+ compiler=self.compiler,
489
+ scales=self.scales,
490
+ name=self.name,
491
+ n_outputs=self.n_outputs
492
+ )
478
493
 
479
- build_ugt_model(
480
- templates_dir=self.subsystem_cfg.get("templates_dir", Path("templates")),
481
- firmware_dir=self.workspace_manager.workspace / "firmware",
482
- compiler = self.compiler,
483
- model_name=self.name,
484
- n_inputs=self.subsystem_cfg["n_inputs"],
485
- n_outputs=self.subsystem_cfg.get("n_outputs", self.n_outputs),
486
- nn_offsets=self.scales["offsets"],
487
- nn_shifts=self.scales["shifts"],
488
- muon_size=self.subsystem_cfg.get("muon_size", 0),
489
- jet_size=self.subsystem_cfg.get("jet_size", 0),
490
- egamma_size=self.subsystem_cfg.get("egamma_size", 0),
491
- tau_size=self.subsystem_cfg.get("tau_size", 0),
492
- output_type=self.subsystem_cfg.get("output_type", "result_t"),
493
- offset_type=self.subsystem_cfg.get("offset_type", "ap_fixed<10,10>"),
494
- shift_type=self.subsystem_cfg.get("shift_type", "ap_fixed<10,10>"),
495
- object_features=self.subsystem_cfg.get("object_features"),
496
- global_features=self.subsystem_cfg.get("global_features")
497
- )
498
-
499
494
 
500
495
 
501
496
  self.workspace_manager.add_artifact("firmware", self.workspace_manager.workspace / "firmware")
@@ -563,6 +558,8 @@ class TriggerModel:
563
558
  "AD_SHIFT": ", ".join(map(str, ad_shift)),
564
559
  "AD_OFFSETS": ", ".join(map(str, ad_offsets)),
565
560
  "UNSCALED_TYPE": unscaled_type,
561
+ "NAMESPACE": self.namespace,
562
+ "PROJECT_NAME": self.project_name,
566
563
  }
567
564
  self._render_template(scales_template_path, scales_out_path, context)
568
565
 
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path
2
- import shutil, warnings
2
+ import shutil, warnings, subprocess
3
3
  import pkg_resources
4
4
  from jinja2 import Template
5
5
  import re
@@ -7,7 +7,7 @@ import re
7
7
 
8
8
  def _render_template(template_name: str, output_file: Path, context: dict):
9
9
  template_bytes = pkg_resources.resource_string(
10
- "triggerflow" , template_name
10
+ "triggerflow", template_name
11
11
  )
12
12
  template_text = template_bytes.decode('utf-8')
13
13
 
@@ -17,11 +17,62 @@ def _render_template(template_name: str, output_file: Path, context: dict):
17
17
  with open(output_file, "w") as f:
18
18
  f.write(rendered)
19
19
 
20
+
20
21
  def build_ugt_model(
22
+ subsystem_cfg: dict,
23
+ compiler_cfg: dict,
24
+ workspace_manager,
25
+ compiler,
26
+ scales: dict,
27
+ name: str,
28
+ n_outputs: int
29
+ ):
30
+
31
+ firmware_dir = workspace_manager.workspace / "firmware"
32
+ templates_dir = subsystem_cfg.get("templates_dir", Path("templates"))
33
+
34
+ objects = subsystem_cfg.get("objects", {})
35
+
36
+ object_features = {}
37
+
38
+ for object_name, object_config in objects.items():
39
+ object_features[object_name] = object_config.get("features", [])
40
+
41
+ muon_size = subsystem_cfg.get("muon_size", 0)
42
+ jet_size = subsystem_cfg.get("jet_size", 0)
43
+ egamma_size = subsystem_cfg.get("egamma_size", 0)
44
+ tau_size = subsystem_cfg.get("tau_size", 0)
45
+
46
+ uGT(
47
+ templates_dir=templates_dir,
48
+ firmware_dir=firmware_dir,
49
+ compiler=compiler,
50
+ model_name=name,
51
+ namespace=compiler_cfg.get("namespace", "triggerflow"),
52
+ project_name=compiler_cfg.get("project_name", "triggerflow"),
53
+ n_inputs=subsystem_cfg["n_inputs"],
54
+ n_outputs=subsystem_cfg.get("n_outputs", n_outputs),
55
+ nn_offsets=scales["offsets"],
56
+ nn_shifts=scales["shifts"],
57
+ muon_size=muon_size,
58
+ jet_size=jet_size,
59
+ egamma_size=egamma_size,
60
+ tau_size=tau_size,
61
+ output_type=subsystem_cfg.get("output_type", "result_t"),
62
+ offset_type=subsystem_cfg.get("offset_type", "ap_fixed<10,10>"),
63
+ shift_type=subsystem_cfg.get("shift_type", "ap_fixed<10,10>"),
64
+ object_features=object_features,
65
+ global_features=subsystem_cfg.get("global_features")
66
+ )
67
+
68
+
69
+ def uGT(
21
70
  templates_dir: Path,
22
71
  firmware_dir: Path,
23
72
  compiler: str,
24
73
  model_name: str,
74
+ namespace: str,
75
+ project_name: str,
25
76
  n_inputs: int,
26
77
  n_outputs: int,
27
78
  nn_offsets: list,
@@ -40,7 +91,6 @@ def build_ugt_model(
40
91
  Render uGT top func.
41
92
  """
42
93
 
43
-
44
94
  if object_features is None:
45
95
  object_features = {
46
96
  "muons": ["pt", "eta_extrapolated", "phi_extrapolated"],
@@ -84,29 +134,31 @@ def build_ugt_model(
84
134
 
85
135
 
86
136
  context = {
87
- "MODEL_NAME": model_name,
88
- "N_INPUTS": n_inputs,
89
- "N_OUTPUTS": n_outputs,
90
- "NN_OFFSETS": ", ".join(map(str, nn_offsets)),
91
- "NN_SHIFTS": ", ".join(map(str, nn_shifts)),
92
- "MUON_SIZE": muon_size,
93
- "JET_SIZE": jet_size,
94
- "EGAMMA_SIZE": egamma_size,
95
- "TAU_SIZE": tau_size,
96
- "OUTPUT_TYPE": output_type,
97
- "OUTPUT_LAYER": output_layer,
98
- "OUT": out,
99
- "OFFSET_TYPE": offset_type,
100
- "SHIFT_TYPE": shift_type,
101
- "MUON_FEATURES": object_features["muons"],
102
- "JET_FEATURES": object_features["jets"],
103
- "EGAMMA_FEATURES": object_features["egammas"],
104
- "TAU_FEATURES": object_features["taus"],
105
- "GLOBAL_FEATURES": global_features
137
+ "MODEL_NAME": model_name,
138
+ "NAMESPACE": namespace,
139
+ "PROJECT_NAME": project_name,
140
+ "N_INPUTS": n_inputs,
141
+ "N_OUTPUTS": n_outputs,
142
+ "NN_OFFSETS": ", ".join(map(str, nn_offsets)),
143
+ "NN_SHIFTS": ", ".join(map(str, nn_shifts)),
144
+ "MUON_SIZE": muon_size,
145
+ "JET_SIZE": jet_size,
146
+ "EGAMMA_SIZE": egamma_size,
147
+ "TAU_SIZE": tau_size,
148
+ "OUTPUT_TYPE": output_type,
149
+ "OUTPUT_LAYER": output_layer,
150
+ "OUT": out,
151
+ "OFFSET_TYPE": offset_type,
152
+ "SHIFT_TYPE": shift_type,
153
+ "MUON_FEATURES": object_features["muons"],
154
+ "JET_FEATURES": object_features["jets"],
155
+ "EGAMMA_FEATURES": object_features["egammas"],
156
+ "TAU_FEATURES": object_features["taus"],
157
+ "GLOBAL_FEATURES": global_features
106
158
  }
107
159
 
108
160
  context_tcl = {
109
- "MODEL_NAME": model_name,
161
+ "MODEL_NAME": model_name,
110
162
  }
111
163
 
112
164
  out_path = firmware_dir / "firmware/model-gt.cpp"
@@ -128,12 +180,8 @@ def build_ugt_model(
128
180
  check=True
129
181
  )
130
182
  else:
131
- warnings.warn(
132
- "Vivado executable not found on the system PATH. "
133
- "Skipping FW build. ",
134
- UserWarning
135
- )
136
-
137
-
138
-
139
-
183
+ warnings.warn(
184
+ "Vivado executable not found on the system PATH. "
185
+ "Skipping FW build. ",
186
+ UserWarning
187
+ )
@@ -1,9 +1,7 @@
1
1
  {
2
2
  "samples" : {
3
3
  "signal" : {
4
- "all_file_path":"data/01_raw/test.root",
5
4
  "files":"data/01_raw/test.root",
6
- "path":"data/01_raw/test.root",
7
5
  "file_pattern":["test.root"],
8
6
  "DAS" : "Blabla",
9
7
  "type" : "123",
@@ -13,9 +11,7 @@
13
11
  "is_signal": true
14
12
  },
15
13
  "background" : {
16
- "all_file_path":"data/01_raw/test.root",
17
14
  "files":"data/01_raw/test.root",
18
- "path":"data/01_raw/test.root",
19
15
  "file_pattern":["test.root"],
20
16
  "DAS" : "Blabla",
21
17
  "type" : "123",
@@ -0,0 +1,49 @@
1
+ import logging, json
2
+ from glob import glob
3
+ from kedro.io import AbstractDataset
4
+
5
+ METADATA_CONFIG = {"x": 0}
6
+
7
+ class MetaDataset(AbstractDataset):
8
+ def __init__(self, filepath: str, sample_key: str):
9
+ self._filepath = filepath
10
+ self._sample_key = sample_key
11
+ self.logger = logging.getLogger(__name__)
12
+
13
+ def get_dasgoclient_metadata(self, das_name: str, config: dict) -> dict:
14
+ self.logger.info(f"Fetching DAS metadata for dataset: {das_name}")
15
+ return {"gridpack": "0.0.0"}
16
+
17
+ def _load(self) -> dict:
18
+ self.logger.info(f"Processing file: {self._filepath}")
19
+ with open(self._filepath, "r") as f:
20
+ data = json.load(f)
21
+ return data
22
+
23
+ def _save(self, samples: dict) -> None:
24
+ metadata = {}
25
+
26
+ dataset_content = samples.get(self._sample_key, {})
27
+
28
+ for sample_name, sample_info in dataset_content.items():
29
+ self.logger.info(f"Processing sample: {sample_name}")
30
+
31
+ sample_files = sample_info.get("files", "")
32
+
33
+ resolved_files = glob(sample_files) if isinstance(sample_files, str) else sample_files
34
+ sample_info["files"] = resolved_files
35
+
36
+ self.logger.info(f"Found {len(resolved_files)} files for {sample_name}.")
37
+
38
+ metadata[sample_name] = self.get_dasgoclient_metadata(
39
+ sample_info.get("DAS", "Unknown"),
40
+ METADATA_CONFIG
41
+ )
42
+
43
+ with open(self._filepath, "w") as f:
44
+ json.dump(metadata, f, indent=4)
45
+
46
+ def _describe(self) -> dict:
47
+ return {"filepath": self._filepath, "sample_key": self._sample_key}
48
+
49
+
@@ -0,0 +1,70 @@
1
+ """
2
+ This is a boilerplate pipeline 'compile'
3
+ generated using Kedro 1.0.0
4
+ """
5
+
6
+ import logging
7
+ import mlflow
8
+ from mlflow.tracking import MlflowClient
9
+ import os
10
+ import numpy as np
11
+ import pandas as pd
12
+ from triggerflow.core import TriggerModel
13
+ from triggerflow.mlflow_wrapper import log_model
14
+ from sklearn.metrics import roc_auc_score
15
+
16
+
17
+ def compile_model(
18
+ model, X_test: pd.DataFrame, y_test: pd.DataFrame
19
+ ) -> pd.DataFrame:
20
+ """Compiles the model and runs some further checks.
21
+
22
+ Args:
23
+ model:
24
+ X_test:
25
+ y_test:
26
+ config:
27
+ Returns:
28
+ Model prediction.
29
+ """
30
+ # get logger for reporting
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ scales = {
35
+ 'offsets': [int(x) for x in np.ones(X_test.shape[0])],
36
+ 'shifts': [int(x) for x in np.ones(X_test.shape[0])]
37
+ }
38
+
39
+ trigger_model = TriggerModel(
40
+ config="trigger_model_config.yaml",
41
+ native_model= model,
42
+ scales=scales
43
+ )
44
+ trigger_model()
45
+
46
+ trigger_model.save("triggermodel.tar.xz")
47
+
48
+ X = np.ascontiguousarray(X_test.values)
49
+
50
+ output_software = trigger_model.software_predict(X)
51
+ output_firmware = trigger_model.firmware_predict(X)
52
+ try:
53
+ output_qonnx = trigger_model.qonnx_predict(X)
54
+ except:
55
+ logger.info('QONNX representation not found')
56
+
57
+
58
+ outdir = "data/06_compile"
59
+
60
+ np.save(os.path.join(outdir, "output_software.npy"), np.array(output_software))
61
+ np.save(os.path.join(outdir, "output_firmware.npy"), np.array(output_firmware))
62
+
63
+ auc_software = roc_auc_score(y_test, output_software)
64
+ auc_firmware = roc_auc_score(y_test, output_firmware)
65
+
66
+ logger.info(f"Area under ROC curve Software: {auc_software:.4f}")
67
+ logger.info(f"Area under ROC curve Firmware: {auc_firmware:.4f}")
68
+
69
+
70
+ return [auc_software, auc_firmware]