emhass 0.12.1__tar.gz → 0.12.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. emhass-0.12.3/.devcontainer/devcontainer.json +49 -0
  2. emhass-0.12.3/.github/ISSUE_TEMPLATE/bug_report.md +35 -0
  3. emhass-0.12.3/.github/dependabot.yml +17 -0
  4. emhass-0.12.3/.github/workflows/codecov.yaml +38 -0
  5. emhass-0.12.3/.github/workflows/codeql.yml +55 -0
  6. emhass-0.12.3/.github/workflows/docker-build-test.yaml +87 -0
  7. emhass-0.12.3/.github/workflows/publish_docker-test.yaml +177 -0
  8. emhass-0.12.3/.github/workflows/publish_docker.yaml +173 -0
  9. emhass-0.12.3/.github/workflows/python-test.yml +60 -0
  10. emhass-0.12.3/.github/workflows/updatePVLibDB.yaml +38 -0
  11. emhass-0.12.3/.github/workflows/upload-package-to-pypi.yaml +57 -0
  12. emhass-0.12.3/.gitignore +162 -0
  13. emhass-0.12.3/.python-version +1 -0
  14. emhass-0.12.3/.readthedocs.yaml +28 -0
  15. {emhass-0.12.1 → emhass-0.12.3}/CHANGELOG.md +19 -0
  16. emhass-0.12.3/Dockerfile +142 -0
  17. {emhass-0.12.1 → emhass-0.12.3}/PKG-INFO +34 -31
  18. {emhass-0.12.1 → emhass-0.12.3}/README.md +1 -0
  19. emhass-0.12.3/deploy_docker.mk +12 -0
  20. emhass-0.12.3/docs/Makefile +20 -0
  21. emhass-0.12.3/docs/_static/css/custom.css +375 -0
  22. emhass-0.12.3/docs/conf.py +62 -0
  23. emhass-0.12.3/docs/config.md +123 -0
  24. emhass-0.12.3/docs/develop.md +340 -0
  25. emhass-0.12.3/docs/differences.md +121 -0
  26. emhass-0.12.3/docs/emhass.md +55 -0
  27. emhass-0.12.3/docs/forecasts.md +345 -0
  28. emhass-0.12.3/docs/images/Community_button.svg +40 -0
  29. emhass-0.12.3/docs/images/Documentation_button.svg +40 -0
  30. emhass-0.12.3/docs/images/EMHASS_Add_on_button.svg +40 -0
  31. emhass-0.12.3/docs/images/EMHASS_button.svg +40 -0
  32. emhass-0.12.3/docs/images/Issues_button.svg +40 -0
  33. emhass-0.12.3/docs/images/deferrable_timewindow_addon_config.png +0 -0
  34. emhass-0.12.3/docs/images/deferrable_timewindow_edge_cases.png +0 -0
  35. emhass-0.12.3/docs/images/deferrable_timewindow_evexample.png +0 -0
  36. emhass-0.12.3/docs/images/emhass_logo.png +0 -0
  37. emhass-0.12.3/docs/images/emhass_logo.svg +563 -0
  38. emhass-0.12.3/docs/images/ems_schema.png +0 -0
  39. emhass-0.12.3/docs/images/ems_schema.svg +5062 -0
  40. emhass-0.12.3/docs/images/forecasted_PV_data.png +0 -0
  41. emhass-0.12.3/docs/images/hp_hc_periods.png +0 -0
  42. emhass-0.12.3/docs/images/hp_hc_periods.svg +1679 -0
  43. emhass-0.12.3/docs/images/inputs_cost_price.png +0 -0
  44. emhass-0.12.3/docs/images/inputs_cost_price.svg +1 -0
  45. emhass-0.12.3/docs/images/inputs_dayahead.png +0 -0
  46. emhass-0.12.3/docs/images/inputs_dayahead.svg +1 -0
  47. emhass-0.12.3/docs/images/inputs_power.png +0 -0
  48. emhass-0.12.3/docs/images/inputs_power.svg +1 -0
  49. emhass-0.12.3/docs/images/inputs_power_load_forecast.svg +1 -0
  50. emhass-0.12.3/docs/images/load_forecast_knn_bare.svg +1 -0
  51. emhass-0.12.3/docs/images/load_forecast_knn_bare_backtest.svg +1 -0
  52. emhass-0.12.3/docs/images/load_forecast_knn_optimized.svg +1 -0
  53. emhass-0.12.3/docs/images/load_forecast_production.svg +1 -0
  54. emhass-0.12.3/docs/images/naive_forecast.png +0 -0
  55. emhass-0.12.3/docs/images/naive_forecast.svg +1778 -0
  56. emhass-0.12.3/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim.png +0 -0
  57. emhass-0.12.3/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim.svg +1 -0
  58. emhass-0.12.3/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.png +0 -0
  59. emhass-0.12.3/docs/images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.svg +1 -0
  60. emhass-0.12.3/docs/images/optim_results_PV_defLoads_dayaheadOptim.png +0 -0
  61. emhass-0.12.3/docs/images/optim_results_PV_defLoads_dayaheadOptim.svg +1 -0
  62. emhass-0.12.3/docs/images/optim_results_PV_defLoads_perfectOptim.png +0 -0
  63. emhass-0.12.3/docs/images/optim_results_PV_defLoads_perfectOptim.svg +1 -0
  64. emhass-0.12.3/docs/images/optim_results_bar_plot.png +0 -0
  65. emhass-0.12.3/docs/images/optim_results_defLoads_dayaheadOptim.png +0 -0
  66. emhass-0.12.3/docs/images/optim_results_defLoads_dayaheadOptim.svg +1 -0
  67. emhass-0.12.3/docs/images/optimization_graphics.png +0 -0
  68. emhass-0.12.3/docs/images/optimization_graphics.svg +4701 -0
  69. emhass-0.12.3/docs/images/thermal_load_diagram.svg +733 -0
  70. emhass-0.12.3/docs/images/workflow.png +0 -0
  71. emhass-0.12.3/docs/images/workflow.svg +6568 -0
  72. emhass-0.12.3/docs/index.md +37 -0
  73. emhass-0.12.3/docs/intro.md +4 -0
  74. emhass-0.12.3/docs/log.txt +48 -0
  75. emhass-0.12.3/docs/lpems.md +259 -0
  76. emhass-0.12.3/docs/make.bat +35 -0
  77. emhass-0.12.3/docs/mlforecaster.md +170 -0
  78. emhass-0.12.3/docs/mlregressor.md +191 -0
  79. emhass-0.12.3/docs/requirements.txt +6 -0
  80. emhass-0.12.3/docs/study_case.md +167 -0
  81. emhass-0.12.3/docs/thermal_model.md +125 -0
  82. emhass-0.12.3/gunicorn.conf.py +11 -0
  83. emhass-0.12.3/options.json +13 -0
  84. {emhass-0.12.1 → emhass-0.12.3}/pyproject.toml +23 -20
  85. emhass-0.12.3/scripts/dayahead_optim.sh +3 -0
  86. emhass-0.12.3/scripts/load_clustering.py +265 -0
  87. emhass-0.12.3/scripts/load_forecast_sklearn.py +376 -0
  88. emhass-0.12.3/scripts/optim_results_analysis.py +214 -0
  89. emhass-0.12.3/scripts/publish_data.sh +3 -0
  90. emhass-0.12.3/scripts/read_csv_plot_data.py +207 -0
  91. emhass-0.12.3/scripts/requirements.txt +3 -0
  92. emhass-0.12.3/scripts/save_pvlib_module_inverter_database.py +138 -0
  93. emhass-0.12.3/scripts/script_debug_forecasts.py +75 -0
  94. emhass-0.12.3/scripts/script_debug_optim.py +184 -0
  95. emhass-0.12.3/scripts/script_simple_thermal_model.py +215 -0
  96. emhass-0.12.3/scripts/script_thermal_model_optim.py +209 -0
  97. emhass-0.12.3/scripts/special_config_analysis.py +356 -0
  98. emhass-0.12.3/scripts/special_options.json +89 -0
  99. emhass-0.12.3/scripts/use_cases_analysis.py +282 -0
  100. emhass-0.12.3/secrets_emhass(example).yaml +13 -0
  101. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/command_line.py +13 -8
  102. emhass-0.12.3/src/emhass/data/cec_inverters.pbz2 +0 -0
  103. emhass-0.12.3/src/emhass/data/cec_modules.pbz2 +0 -0
  104. emhass-0.12.3/src/emhass/data/emhass_inverters.csv +8 -0
  105. emhass-0.12.3/src/emhass/data/emhass_modules.csv +6 -0
  106. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/forecast.py +123 -95
  107. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/machine_learning_forecaster.py +41 -49
  108. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/optimization.py +88 -24
  109. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/retrieve_hass.py +29 -1
  110. emhass-0.12.3/src/emhass/static/img/emhass_icon.png +0 -0
  111. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/utils.py +54 -27
  112. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/web_server.py +71 -29
  113. emhass-0.12.3/tests/__init__.py +0 -0
  114. {emhass-0.12.1 → emhass-0.12.3}/tests/test_command_line_utils.py +1 -2
  115. {emhass-0.12.1 → emhass-0.12.3}/tests/test_forecast.py +63 -2
  116. {emhass-0.12.1 → emhass-0.12.3}/tests/test_machine_learning_forecaster.py +6 -4
  117. {emhass-0.12.1 → emhass-0.12.3}/tests/test_optimization.py +6 -1
  118. {emhass-0.12.1 → emhass-0.12.3}/tests/test_retrieve_hass.py +25 -18
  119. {emhass-0.12.1 → emhass-0.12.3}/tests/test_utils.py +534 -10
  120. emhass-0.12.1/data/data_train_load_clustering.pkl +0 -0
  121. emhass-0.12.1/data/data_train_load_forecast.pkl +0 -0
  122. emhass-0.12.1/data/test_df_final.pkl +0 -0
  123. emhass-0.12.1/setup.cfg +0 -4
  124. emhass-0.12.1/src/emhass/data/cec_inverters.pbz2 +0 -0
  125. emhass-0.12.1/src/emhass/data/cec_modules.pbz2 +0 -0
  126. emhass-0.12.1/src/emhass.egg-info/PKG-INFO +0 -668
  127. emhass-0.12.1/src/emhass.egg-info/SOURCES.txt +0 -62
  128. emhass-0.12.1/src/emhass.egg-info/dependency_links.txt +0 -1
  129. emhass-0.12.1/src/emhass.egg-info/entry_points.txt +0 -2
  130. emhass-0.12.1/src/emhass.egg-info/requires.txt +0 -28
  131. emhass-0.12.1/src/emhass.egg-info/top_level.txt +0 -1
  132. {emhass-0.12.1 → emhass-0.12.3}/CODE_OF_CONDUCT.md +0 -0
  133. {emhass-0.12.1 → emhass-0.12.3}/CONTRIBUTING.md +0 -0
  134. {emhass-0.12.1 → emhass-0.12.3}/LICENSE +0 -0
  135. {emhass-0.12.1 → emhass-0.12.3}/MANIFEST.in +0 -0
  136. {emhass-0.12.1 → emhass-0.12.3}/data/data_load_cost_forecast.csv +0 -0
  137. {emhass-0.12.1 → emhass-0.12.3}/data/data_load_forecast.csv +0 -0
  138. {emhass-0.12.1 → emhass-0.12.3}/data/data_prod_price_forecast.csv +0 -0
  139. {emhass-0.12.1 → emhass-0.12.3}/data/data_weather_forecast.csv +0 -0
  140. {emhass-0.12.1 → emhass-0.12.3}/data/heating_prediction.csv +0 -0
  141. {emhass-0.12.1 → emhass-0.12.3}/data/opt_res_latest.csv +0 -0
  142. {emhass-0.12.1 → emhass-0.12.3}/data/opt_res_perfect_optim_cost.csv +0 -0
  143. {emhass-0.12.1 → emhass-0.12.3}/data/opt_res_perfect_optim_profit.csv +0 -0
  144. {emhass-0.12.1 → emhass-0.12.3}/data/opt_res_perfect_optim_self-consumption.csv +0 -0
  145. {emhass-0.12.1 → emhass-0.12.3}/data/test_response_get_data_get_method.pbz2 +0 -0
  146. {emhass-0.12.1 → emhass-0.12.3}/data/test_response_scrapper_get_method.pbz2 +0 -0
  147. {emhass-0.12.1 → emhass-0.12.3}/data/test_response_solarforecast_get_method.pbz2 +0 -0
  148. {emhass-0.12.1 → emhass-0.12.3}/data/test_response_solcast_get_method.pbz2 +0 -0
  149. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/__init__.py +0 -0
  150. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/data/associations.csv +0 -0
  151. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/data/config_defaults.json +0 -0
  152. {emhass-0.12.1/src/emhass/static → emhass-0.12.3/src/emhass}/img/emhass_icon.png +0 -0
  153. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/machine_learning_regressor.py +0 -0
  154. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/advanced.html +0 -0
  155. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/basic.html +0 -0
  156. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/configuration_list.html +0 -0
  157. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/configuration_script.js +0 -0
  158. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/data/param_definitions.json +0 -0
  159. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/img/emhass_logo_short.svg +0 -0
  160. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/img/feather-sprite.svg +0 -0
  161. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/script.js +0 -0
  162. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/static/style.css +0 -0
  163. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/templates/configuration.html +0 -0
  164. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/templates/index.html +0 -0
  165. {emhass-0.12.1 → emhass-0.12.3}/src/emhass/templates/template.html +0 -0
  166. {emhass-0.12.1 → emhass-0.12.3}/tests/test_machine_learning_regressor.py +0 -0
@@ -0,0 +1,49 @@
1
+ // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2
+ // README at: https://github.com/devcontainers/templates/tree/main/src/python
3
+ {
4
+ "name": "EMHASS",
5
+ "build": {
6
+ "dockerfile": "../Dockerfile",
7
+ "context": "../",
8
+ "args": { "TARGETARCH": "amd64"}
9
+ },
10
+ "features": {
11
+ "ghcr.io/devcontainers/features/common-utils:2": {
12
+ "installBash": "true",
13
+ "configureBashAsDefaultShell": "true"
14
+ }
15
+ // "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}
16
+ },
17
+ //"appPort": ["5000:5000"] //used to access app from external device (User discretion advised)
18
+
19
+
20
+ // Security issue, may be needed for Podman
21
+ // "runArgs": [
22
+ // "--userns=keep-id",
23
+ // "--pid=host"
24
+ // ],
25
+ // "remoteUser": "root",
26
+ // "containerUser": "root",
27
+
28
+
29
+ "customizations": {
30
+ // Configure properties specific to VS Code.
31
+ "vscode": {
32
+ // Add the IDs of extensions you want installed when the container is created.
33
+ "extensions": ["ms-python.debugpy", "ms-python.python","charliermarsh.ruff"],
34
+ "settings": {
35
+ "[python]": {
36
+ "editor.formatOnSave": true,
37
+ "editor.codeActionsOnSave": {
38
+ "source.fixAll": "explicit",
39
+ "source.organizeImports": "explicit"
40
+ },
41
+ "editor.defaultFormatter": "charliermarsh.ruff"
42
+ }
43
+ }
44
+ }
45
+ },
46
+
47
+ "postCreateCommand": ["uv", "pip", "install", "--cache-dir", ".cache", "--verbose", ".[test]"]
48
+
49
+ }
@@ -0,0 +1,35 @@
1
+ ---
2
+ name: Bug report
3
+ about: Create a report to help us improve
4
+ title: ''
5
+ labels: ''
6
+ assignees: ''
7
+
8
+ ---
9
+
10
+ **Describe the bug**
11
+ A clear and concise description of what the bug is.
12
+
13
+ **To Reproduce**
14
+ Steps to reproduce the behavior
15
+
16
+ **Expected behavior**
17
+ A clear and concise description of what you expected to happen.
18
+
19
+ **Screenshots**
20
+ If applicable, add screenshots to help explain your problem.
21
+
22
+ **Home Assistant installation type**
23
+ - Home Assistant OS
24
+ - Home Assistant Supervised
25
+ - Home Assistant Core
26
+
27
+ **Your hardware**
28
+ - OS: HA OS, Windows, Linux, etc
29
+ - Architecture: amd64, armhf, armv7, aarch64
30
+
31
+ **EMHASS installation type**
32
+ - Add-on, Docker Standalone, Legacy Python virtual environment
33
+
34
+ **Additional context**
35
+ Add any other context about the problem here.
@@ -0,0 +1,17 @@
1
+ # To get started with Dependabot version updates, you'll need to specify which
2
+ # package ecosystems to update and where the package manifests are located.
3
+ # Please see the documentation for all configuration options:
4
+ # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5
+
6
+ version: 2
7
+ updates:
8
+ - package-ecosystem: "pip"
9
+ directory: "/"
10
+ schedule:
11
+ interval: "weekly"
12
+
13
+ # Enable version updates for Docker
14
+ - package-ecosystem: "docker"
15
+ directory: "/"
16
+ schedule:
17
+ interval: "weekly"
@@ -0,0 +1,38 @@
1
+ name: CodeCov
2
+
3
+ on:
4
+ push:
5
+ branches: [ master ]
6
+ pull_request:
7
+ branches: [ master ]
8
+ jobs:
9
+ run:
10
+ runs-on: ${{ matrix.os }}
11
+ strategy:
12
+ matrix:
13
+ os: [ubuntu-latest]
14
+ env:
15
+ OS: ${{ matrix.os }}
16
+ PYTHON: '3.11'
17
+ steps:
18
+ - uses: actions/checkout@master
19
+ - name: Install uv
20
+ uses: astral-sh/setup-uv@v5
21
+ - name: Set up Python
22
+ uses: actions/setup-python@v5
23
+ with:
24
+ python-version-file: ".python-version"
25
+ - name: Generate Report
26
+ run: |
27
+ uv venv
28
+ uv pip install .[test]
29
+ coverage run -m --source=emhass unittest
30
+ coverage report
31
+ coverage xml
32
+ - name: Upload coverage reports to Codecov
33
+ run: |
34
+ # Replace `linux` below with the appropriate OS
35
+ # Options are `alpine`, `linux`, `macos`, `windows`
36
+ curl -Os https://uploader.codecov.io/latest/linux/codecov
37
+ chmod +x codecov
38
+ ./codecov -t ${CODECOV_TOKEN}
@@ -0,0 +1,55 @@
1
+ # For most projects, this workflow file will not need changing; you simply need
2
+ # to commit it to your repository.
3
+ #
4
+ # You may wish to alter this file to override the set of languages analyzed,
5
+ # or to provide custom queries or build logic.
6
+ #
7
+ # ******** NOTE ********
8
+ # We have attempted to detect the languages in your repository. Please check
9
+ # the `language` matrix defined below to confirm you have the correct set of
10
+ # supported CodeQL languages.
11
+ #
12
+ name: "CodeQL"
13
+
14
+ on:
15
+ pull_request:
16
+ branches: [ "master" ]
17
+ schedule:
18
+ - cron: '22 3 * * 2'
19
+
20
+ jobs:
21
+ analyze:
22
+ name: Analyze
23
+ runs-on: ubuntu-latest
24
+ permissions:
25
+ actions: read
26
+ contents: read
27
+ security-events: write
28
+
29
+ strategy:
30
+ fail-fast: false
31
+ matrix:
32
+ language: [ 'python' , 'javascript' ]
33
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
34
+ # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
35
+
36
+ steps:
37
+ - name: Checkout repository
38
+ uses: actions/checkout@v3
39
+
40
+ # Initializes the CodeQL tools for scanning.
41
+ - name: Initialize CodeQL
42
+ uses: github/codeql-action/init@v2
43
+ with:
44
+ languages: ${{ matrix.language }}
45
+ # If you wish to specify custom queries, you can do so here or in a config file.
46
+ # By default, queries listed here will override any specified in a config file.
47
+ # Prefix the list here with "+" to use these queries and those in the config file.
48
+
49
+ # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
50
+ # queries: security-extended,security-and-quality
51
+
52
+ - name: Perform CodeQL Analysis
53
+ uses: github/codeql-action/analyze@v2
54
+ with:
55
+ category: "/language:${{matrix.language}}"
@@ -0,0 +1,87 @@
1
+ #This action test builds EMHASS Docker images, in each architecture.
2
+ name: "Test Building Docker Image"
3
+
4
+ on:
5
+ push:
6
+ branches: [master]
7
+ pull_request:
8
+ branches: [master]
9
+ workflow_dispatch:
10
+
11
+ permissions:
12
+ actions: read
13
+ security-events: write
14
+ contents: read
15
+
16
+ jobs:
17
+ build:
18
+ runs-on: ubuntu-latest
19
+ strategy:
20
+ fail-fast: false
21
+ matrix:
22
+ platform:
23
+ [
24
+ { target_arch: amd64, os_version: debian },
25
+ { target_arch: armv7, os_version: debian },
26
+ { target_arch: armhf, os_version: raspbian },
27
+ { target_arch: aarch64, os_version: debian },
28
+ ]
29
+ steps:
30
+ - name: Checkout the repository
31
+ uses: actions/checkout@v4
32
+ - name: Set up QEMU
33
+ uses: docker/setup-qemu-action@v3
34
+ - name: Set up Docker Buildx
35
+ uses: docker/setup-buildx-action@v3
36
+ - name: Build and push by digest
37
+ id: build
38
+ uses: docker/build-push-action@v5
39
+ with:
40
+ context: .
41
+ platforms: ${{ matrix.platform.buildx }}
42
+ build-args: |
43
+ TARGETARCH=${{ matrix.platform.target_arch }}
44
+ os_version=${{ matrix.platform.os_version }}
45
+ load: true
46
+ - name: Test Image #Assume Docker fail with FileNotFound secrets_emhass.yaml error
47
+ run: docker run --rm -it ${{ steps.build.outputs.imageid }} | grep -q secrets_emhass.yaml && echo 0 || echo 1
48
+ # Google OSV-Scanner
49
+ # Extract Debian and Python packadge list stored in Image
50
+ - name: Export Debian package list
51
+ run: mkdir OSV && docker run --rm --entrypoint '/bin/cat' ${{ steps.build.outputs.imageid }} /var/lib/dpkg/status >> ./OSV/${{ matrix.platform.target_arch }}.status
52
+ - name: Export Python package list
53
+ run: docker run --rm --entrypoint '/bin/cat' ${{ steps.build.outputs.imageid }} uv.lock >> ./OSV/${{ matrix.platform.target_arch }}.lock
54
+ - name: Upload package list as digest
55
+ uses: actions/upload-artifact@v4
56
+ with:
57
+ name: ${{ matrix.platform.target_arch }}-packages
58
+ path: ./OSV/*
59
+ if-no-files-found: error
60
+ retention-days: 1
61
+
62
+ osv-scan:
63
+ needs:
64
+ - build
65
+ strategy:
66
+ fail-fast: false
67
+ matrix:
68
+ platform:
69
+ [
70
+ { target_arch: amd64 },
71
+ { target_arch: armv7 },
72
+ { target_arch: armhf },
73
+ { target_arch: aarch64 }
74
+ ]
75
+ # Check Docker image debian and python packages list for known vulnerabilities
76
+ uses: "geoderp/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v0.0.1"
77
+ with:
78
+ download-artifact: "${{ matrix.platform.target_arch }}-packages"
79
+ matrix-property: "${{ matrix.platform.target_arch }}-"
80
+ fail-on-vuln: false
81
+ scan-args: |-
82
+ --lockfile=dpkg-status:./${{ matrix.platform.target_arch }}.status
83
+ --lockfile=poetry.lock:./${{matrix.platform.target_arch }}.lock
84
+ permissions:
85
+ security-events: write
86
+ contents: read
87
+ actions: read
@@ -0,0 +1,177 @@
1
+ #template modified from: https://docs.docker.com/build/ci/github-actions/multi-platform/ & https://docs.github.com/en/actions/use-cases-and-examples/publishing-packages/publishing-docker-images
2
+ #builds from branch master, for testing before deploying a new release
3
+ name: "Publish Docker test image"
4
+
5
+ on:
6
+ push:
7
+ branches: [testing]
8
+ workflow_dispatch:
9
+
10
+ env:
11
+ REGISTRY: ghcr.io
12
+ IMAGE_NAME: ${{ github.repository }}
13
+
14
+ jobs:
15
+ build:
16
+ runs-on: ubuntu-latest
17
+ permissions:
18
+ contents: read
19
+ packages: write
20
+ attestations: write
21
+ id-token: write
22
+ strategy:
23
+ fail-fast: false
24
+ matrix:
25
+ platform:
26
+ [
27
+ { target_arch: amd64, os_version: debian },
28
+ { target_arch: armv7, os_version: debian },
29
+ { target_arch: armhf, os_version: raspbian },
30
+ { target_arch: aarch64, os_version: debian },
31
+ ]
32
+ steps:
33
+ # Pull git repo and build each architecture image separately (with QEMU and Buildx)
34
+ - name: lowercase repo
35
+ run: |
36
+ echo "IMAGE_NAME_LOWER=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
37
+ - name: Checkout the repository
38
+ uses: actions/checkout@v4
39
+ - name: Set up QEMU
40
+ uses: docker/setup-qemu-action@v3
41
+ - name: Set up Docker Buildx
42
+ uses: docker/setup-buildx-action@v3
43
+ - name: Log in to the Container registry
44
+ uses: docker/login-action@v3
45
+ with:
46
+ registry: ${{ env.REGISTRY }}
47
+ username: ${{ github.actor }}
48
+ password: ${{ secrets.GITHUB_TOKEN }}
49
+
50
+ # Build and save Docker image locally to check security of container packages
51
+ - name: Build Docker image and cache
52
+ id: cache
53
+ uses: docker/build-push-action@v5
54
+ with:
55
+ context: .
56
+ platforms: ${{ matrix.platform.buildx }}
57
+ build-args: |
58
+ TARGETARCH=${{ matrix.platform.target_arch }}
59
+ os_version=${{ matrix.platform.os_version }}
60
+ load: true
61
+ # Extract a list of the installed Debian packages, export as artifact for vulnerability scanning with OSV
62
+ - name: Export Debian package list
63
+ run: mkdir OSV && docker run --rm --entrypoint '/bin/cat' ${{ steps.cache.outputs.imageid }} /var/lib/dpkg/status >> ./OSV/${{ matrix.platform.target_arch }}.status
64
+ - name: Export Python package list
65
+ run: docker run --rm --entrypoint '/bin/cat' ${{ steps.cache.outputs.imageid }} uv.lock >> ./OSV/${{ matrix.platform.target_arch }}.lock
66
+ - name: Upload package list as digest
67
+ uses: actions/upload-artifact@v4
68
+ with:
69
+ name: ${{ matrix.platform.target_arch }}-packages
70
+ path: ./OSV/*
71
+ if-no-files-found: error
72
+ retention-days: 1
73
+ - name: Extract metadata (tags, labels) for Docker
74
+ id: meta
75
+ uses: docker/metadata-action@v5
76
+ with:
77
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}
78
+
79
+ # Build Docker image for pushing to registry
80
+ - name: Build and push Docker image by digest
81
+ id: build
82
+ uses: docker/build-push-action@v5
83
+ with:
84
+ context: .
85
+ platforms: linux/${{ matrix.platform.target_arch }}
86
+ build-args: |
87
+ TARGETARCH=${{ matrix.platform.target_arch }}
88
+ os_version=${{ matrix.platform.os_version }}
89
+ labels: ${{ steps.meta.outputs.labels }}
90
+ outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }},push-by-digest=true,name-canonical=true,push=true
91
+ # Export the build images as artifact for the next job of merging
92
+ - name: Export digest
93
+ run: |
94
+ mkdir -p /tmp/digests
95
+ digest="${{ steps.build.outputs.digest }}"
96
+ touch "/tmp/digests/${digest#sha256:}"
97
+ - name: Upload digest
98
+ uses: actions/upload-artifact@v4
99
+ with:
100
+ name: digests-${{ matrix.platform.target_arch }}
101
+ path: /tmp/digests/*
102
+ if-no-files-found: error
103
+ retention-days: 1
104
+
105
+ # Google OSV-Scanner (check known vulnerabilities for Python & Debian packages )
106
+ osv-scan:
107
+ needs:
108
+ - build
109
+ strategy:
110
+ fail-fast: false
111
+ matrix:
112
+ platform:
113
+ [
114
+ { target_arch: amd64 },
115
+ { target_arch: armv7 },
116
+ { target_arch: armhf },
117
+ { target_arch: aarch64 },
118
+ ]
119
+ uses: "geoderp/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v0.0.1"
120
+ with:
121
+ download-artifact: "${{ matrix.platform.target_arch }}-packages"
122
+ matrix-property: "${{ matrix.platform.target_arch }}-"
123
+ fail-on-vuln: false
124
+ scan-args: |-
125
+ --lockfile=dpkg-status:./${{ matrix.platform.target_arch }}.status
126
+ --lockfile=poetry.lock:./${{matrix.platform.target_arch }}.lock
127
+ permissions:
128
+ security-events: write
129
+ contents: read
130
+ actions: read
131
+
132
+ # Merge platforms into images into a multi-platform image
133
+ merge:
134
+ if: always()
135
+ runs-on: ubuntu-latest
136
+ permissions:
137
+ packages: write
138
+ contents: read
139
+ attestations: write
140
+ id-token: write
141
+ needs:
142
+ - osv-scan
143
+ - build
144
+ steps:
145
+ - name: lowercase repo
146
+ run: |
147
+ echo "IMAGE_NAME_LOWER=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
148
+ - name: Download digests
149
+ uses: actions/download-artifact@v4
150
+ with:
151
+ path: /tmp/digests
152
+ pattern: digests-*
153
+ merge-multiple: true
154
+ - name: Set up Docker Buildx
155
+ uses: docker/setup-buildx-action@v3
156
+ - name: Extract metadata (tags, labels) for Docker
157
+ id: meta
158
+ uses: docker/metadata-action@v5
159
+ with:
160
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}
161
+ tags: |
162
+ type=sha
163
+ type=raw,value=test
164
+ - name: Log in to the Container registry
165
+ uses: docker/login-action@v3
166
+ with:
167
+ registry: ${{ env.REGISTRY }}
168
+ username: ${{ github.actor }}
169
+ password: ${{ secrets.GITHUB_TOKEN }}
170
+ - name: Create manifest list and push
171
+ working-directory: /tmp/digests
172
+ run: |
173
+ docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
174
+ $(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}@sha256:%s ' *)
175
+ - name: Inspect image
176
+ run: |
177
+ docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}:${{ steps.meta.outputs.version }}
@@ -0,0 +1,173 @@
1
+ #template modified from: https://docs.docker.com/build/ci/github-actions/multi-platform/ & https://docs.github.com/en/actions/use-cases-and-examples/publishing-packages/publishing-docker-images
2
+ name: "Publish Docker"
3
+
4
+ on:
5
+ release:
6
+ types: [published]
7
+ workflow_dispatch:
8
+
9
+ env:
10
+ REGISTRY: ghcr.io
11
+ IMAGE_NAME: ${{ github.repository }}
12
+
13
+ jobs:
14
+ build:
15
+ runs-on: ubuntu-latest
16
+ permissions:
17
+ contents: read
18
+ packages: write
19
+ attestations: write
20
+ id-token: write
21
+ strategy:
22
+ fail-fast: false
23
+ matrix:
24
+ platform:
25
+ [
26
+ { target_arch: amd64, os_version: debian },
27
+ { target_arch: armv7, os_version: debian },
28
+ { target_arch: armhf, os_version: raspbian },
29
+ { target_arch: aarch64, os_version: debian },
30
+ ]
31
+ steps:
32
+ # Pull git repo and build each architecture image separately (with QEMU and Buildx)
33
+ - name: lowercase repo
34
+ run: |
35
+ echo "IMAGE_NAME_LOWER=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
36
+ - name: Checkout the repository
37
+ uses: actions/checkout@v4
38
+ - name: Set up QEMU
39
+ uses: docker/setup-qemu-action@v3
40
+ - name: Set up Docker Buildx
41
+ uses: docker/setup-buildx-action@v3
42
+ - name: Log in to the Container registry
43
+ uses: docker/login-action@v3
44
+ with:
45
+ registry: ${{ env.REGISTRY }}
46
+ username: ${{ github.actor }}
47
+ password: ${{ secrets.GITHUB_TOKEN }}
48
+
49
+ # Build and save Docker image locally to check security of container packages
50
+ - name: Build Docker image and cache
51
+ id: cache
52
+ uses: docker/build-push-action@v5
53
+ with:
54
+ context: .
55
+ platforms: ${{ matrix.platform.buildx }}
56
+ build-args: |
57
+ TARGETARCH=${{ matrix.platform.target_arch }}
58
+ os_version=${{ matrix.platform.os_version }}
59
+ load: true
60
+ # Extract a list of the installed Debian packages, export as artifact for vulnerability scanning with OSV
61
+ - name: Export Debian package list
62
+ run: mkdir OSV && docker run --rm --entrypoint '/bin/cat' ${{ steps.cache.outputs.imageid }} /var/lib/dpkg/status >> ./OSV/${{ matrix.platform.target_arch }}.status
63
+ - name: Export Python package list
64
+ run: docker run --rm --entrypoint '/bin/cat' ${{ steps.cache.outputs.imageid }} uv.lock >> ./OSV/${{ matrix.platform.target_arch }}.lock
65
+ - name: Upload package list as digest
66
+ uses: actions/upload-artifact@v4
67
+ with:
68
+ name: ${{ matrix.platform.target_arch }}-packages
69
+ path: ./OSV/*
70
+ if-no-files-found: error
71
+ retention-days: 1
72
+ - name: Extract metadata (tags, labels) for Docker
73
+ id: meta
74
+ uses: docker/metadata-action@v5
75
+ with:
76
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}
77
+
78
+ # Build Docker image for pushing to registry
79
+ - name: Build and push Docker image by digest
80
+ id: build
81
+ uses: docker/build-push-action@v5
82
+ with:
83
+ context: .
84
+ platforms: linux/${{ matrix.platform.target_arch }}
85
+ build-args: |
86
+ TARGETARCH=${{ matrix.platform.target_arch }}
87
+ os_version=${{ matrix.platform.os_version }}
88
+ labels: ${{ steps.meta.outputs.labels }}
89
+ outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }},push-by-digest=true,name-canonical=true,push=true
90
+ # Export the build images as artifact for the next job of merging
91
+ - name: Export digest
92
+ run: |
93
+ mkdir -p /tmp/digests
94
+ digest="${{ steps.build.outputs.digest }}"
95
+ touch "/tmp/digests/${digest#sha256:}"
96
+ - name: Upload digest
97
+ uses: actions/upload-artifact@v4
98
+ with:
99
+ name: digests-${{ matrix.platform.target_arch }}
100
+ path: /tmp/digests/*
101
+ if-no-files-found: error
102
+ retention-days: 1
103
+
104
+ # Google OSV-Scanner (check known vulnerabilities for Python & Debian packages )
105
+ osv-scan:
106
+ needs:
107
+ - build
108
+ strategy:
109
+ fail-fast: false
110
+ matrix:
111
+ platform:
112
+ [
113
+ { target_arch: amd64 },
114
+ { target_arch: armv7 },
115
+ { target_arch: armhf },
116
+ { target_arch: aarch64 },
117
+ ]
118
+ uses: "geoderp/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v0.0.1"
119
+ with:
120
+ download-artifact: "${{ matrix.platform.target_arch }}-packages"
121
+ matrix-property: "${{ matrix.platform.target_arch }}-"
122
+ fail-on-vuln: false
123
+ scan-args: |-
124
+ --lockfile=dpkg-status:./${{ matrix.platform.target_arch }}.status
125
+ --lockfile=poetry.lock:./${{matrix.platform.target_arch }}.lock
126
+ permissions:
127
+ security-events: write
128
+ contents: read
129
+ actions: read
130
+
131
+ # Merge platforms into images into a multi-platform image
132
+ merge:
133
+ if: always()
134
+ runs-on: ubuntu-latest
135
+ permissions:
136
+ packages: write
137
+ contents: read
138
+ attestations: write
139
+ id-token: write
140
+ needs:
141
+ - osv-scan
142
+ - build
143
+ steps:
144
+ - name: lowercase repo
145
+ run: |
146
+ echo "IMAGE_NAME_LOWER=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
147
+ - name: Download digests
148
+ uses: actions/download-artifact@v4
149
+ with:
150
+ path: /tmp/digests
151
+ pattern: digests-*
152
+ merge-multiple: true
153
+ - name: Set up Docker Buildx
154
+ uses: docker/setup-buildx-action@v3
155
+ - name: Extract metadata (tags, labels) for Docker
156
+ id: meta
157
+ uses: docker/metadata-action@v5
158
+ with:
159
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}
160
+ - name: Log in to the Container registry
161
+ uses: docker/login-action@v3
162
+ with:
163
+ registry: ${{ env.REGISTRY }}
164
+ username: ${{ github.actor }}
165
+ password: ${{ secrets.GITHUB_TOKEN }}
166
+ - name: Create manifest list and push
167
+ working-directory: /tmp/digests
168
+ run: |
169
+ docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
170
+ $(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}@sha256:%s ' *)
171
+ - name: Inspect image
172
+ run: |
173
+ docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_LOWER }}:${{ steps.meta.outputs.version }}
@@ -0,0 +1,60 @@
1
+ name: Python test
2
+
3
+ on:
4
+ push:
5
+ branches: [master]
6
+ pull_request:
7
+ branches: [master]
8
+
9
+ permissions:
10
+ security-events: write
11
+ contents: read
12
+
13
+ jobs:
14
+ # Google OSV-Scanner
15
+ build:
16
+ runs-on: ${{ matrix.os }}
17
+ strategy:
18
+ fail-fast: false
19
+ matrix:
20
+ python-version: ["3.10", "3.11"]
21
+ os: [ubuntu-latest, macos-latest, windows-latest]
22
+ env:
23
+ OS: ${{ matrix.os }}
24
+ PYTHON: ${{ matrix.python-version }}
25
+
26
+ steps:
27
+ - uses: actions/checkout@v4
28
+ - name: Install uv
29
+ uses: astral-sh/setup-uv@v5
30
+ - name: Set up Python
31
+ uses: actions/setup-python@v5
32
+ with:
33
+ python-version-file: ".python-version"
34
+ - name: Set up Python venv
35
+ run: uv venv
36
+ - name: Special dependencies for macos
37
+ run: |
38
+ brew install hdf5
39
+ uv pip install numpy==1.26.0
40
+ uv pip install tables==3.9.1
41
+ if: ${{ matrix.os == 'macos-latest' }}
42
+ - name: Install EMHASS with test dependencies
43
+ run: |
44
+ uv pip install .[test] && uv lock
45
+ - name: Test with pytest
46
+ run: |
47
+ uv run pytest
48
+ scan-pr:
49
+ needs:
50
+ - build
51
+ uses: "geoderp/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@v0.0.1"
52
+ with:
53
+ fail-on-vuln: false
54
+ scan-args: |-
55
+ --recursive
56
+ ./
57
+ permissions:
58
+ security-events: write
59
+ contents: read
60
+ actions: read