hafnia 0.1.23__tar.gz → 0.1.25__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/build.yaml +2 -2
  2. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/check_release.yaml +1 -1
  3. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/ci_cd.yaml +1 -9
  4. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/lint.yaml +1 -1
  5. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/publish_docker.yaml +3 -3
  6. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/publish_pypi.yaml +1 -1
  7. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/tests.yaml +2 -2
  8. {hafnia-0.1.23 → hafnia-0.1.25}/.pre-commit-config.yaml +1 -1
  9. {hafnia-0.1.23 → hafnia-0.1.25}/.vscode/launch.json +26 -0
  10. {hafnia-0.1.23 → hafnia-0.1.25}/LICENSE +1 -1
  11. {hafnia-0.1.23 → hafnia-0.1.25}/PKG-INFO +32 -11
  12. {hafnia-0.1.23 → hafnia-0.1.25}/README.md +26 -8
  13. {hafnia-0.1.23 → hafnia-0.1.25}/docs/cli.md +2 -1
  14. {hafnia-0.1.23 → hafnia-0.1.25}/pyproject.toml +6 -3
  15. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/__main__.py +2 -1
  16. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/consts.py +1 -0
  17. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/data_cmds.py +5 -6
  18. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/experiment_cmds.py +0 -26
  19. hafnia-0.1.25/src/cli/recipe_cmds.py +49 -0
  20. hafnia-0.1.25/src/cli/runc_cmds.py +143 -0
  21. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/data/factory.py +10 -29
  22. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/experiment/hafnia_logger.py +6 -2
  23. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/builder.py +5 -5
  24. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/executor.py +6 -6
  25. hafnia-0.1.25/src/hafnia/utils.py +135 -0
  26. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_builder.py +84 -4
  27. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_executor.py +1 -1
  28. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_mdi_logger.py +13 -6
  29. {hafnia-0.1.23 → hafnia-0.1.25}/uv.lock +1254 -1209
  30. hafnia-0.1.23/docs/s2m.md +0 -84
  31. hafnia-0.1.23/examples/script2model/pytorch/Dockerfile +0 -10
  32. hafnia-0.1.23/examples/script2model/pytorch/src/lib/train_utils.py +0 -252
  33. hafnia-0.1.23/examples/script2model/pytorch/src/scripts/train.py +0 -60
  34. hafnia-0.1.23/src/cli/runc_cmds.py +0 -68
  35. hafnia-0.1.23/src/hafnia/utils.py +0 -83
  36. {hafnia-0.1.23 → hafnia-0.1.25}/.devcontainer/devcontainer.json +0 -0
  37. {hafnia-0.1.23 → hafnia-0.1.25}/.devcontainer/hooks/post_create +0 -0
  38. {hafnia-0.1.23 → hafnia-0.1.25}/.github/dependabot.yaml +0 -0
  39. {hafnia-0.1.23 → hafnia-0.1.25}/.github/workflows/Dockerfile +0 -0
  40. {hafnia-0.1.23 → hafnia-0.1.25}/.gitignore +0 -0
  41. {hafnia-0.1.23 → hafnia-0.1.25}/.python-version +0 -0
  42. {hafnia-0.1.23 → hafnia-0.1.25}/.vscode/extensions.json +0 -0
  43. {hafnia-0.1.23 → hafnia-0.1.25}/.vscode/settings.json +0 -0
  44. {hafnia-0.1.23 → hafnia-0.1.25}/docs/release.md +0 -0
  45. {hafnia-0.1.23 → hafnia-0.1.25}/examples/dataset_builder.py +0 -0
  46. {hafnia-0.1.23 → hafnia-0.1.25}/examples/example_load_dataset.py +0 -0
  47. {hafnia-0.1.23 → hafnia-0.1.25}/examples/example_logger.py +0 -0
  48. {hafnia-0.1.23 → hafnia-0.1.25}/examples/example_torchvision_dataloader.py +0 -0
  49. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/__init__.py +0 -0
  50. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/config.py +0 -0
  51. {hafnia-0.1.23 → hafnia-0.1.25}/src/cli/profile_cmds.py +0 -0
  52. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/__init__.py +0 -0
  53. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/data/__init__.py +0 -0
  54. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/experiment/__init__.py +0 -0
  55. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/http.py +0 -0
  56. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/log.py +0 -0
  57. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/__init__.py +0 -0
  58. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/api.py +0 -0
  59. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/download.py +0 -0
  60. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/platform/experiment.py +0 -0
  61. {hafnia-0.1.23 → hafnia-0.1.25}/src/hafnia/torch_helpers.py +0 -0
  62. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_check_example_scripts.py +0 -0
  63. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_cli.py +0 -0
  64. {hafnia-0.1.23 → hafnia-0.1.25}/tests/test_samples.py +0 -0
@@ -18,7 +18,7 @@ jobs:
18
18
  package-version: ${{ steps.extract-version.outputs.package_version }}
19
19
  steps:
20
20
  - uses: actions/checkout@v4.2.2
21
- - uses: actions/setup-python@v5.4.0
21
+ - uses: actions/setup-python@v5.6.0
22
22
  with:
23
23
  python-version-file: ${{ inputs.python-version-file }}
24
24
 
@@ -29,7 +29,7 @@ jobs:
29
29
  echo "package_version=$VERSION" >> $GITHUB_OUTPUT
30
30
 
31
31
  - name: Install uv
32
- uses: astral-sh/setup-uv@v5
32
+ uses: astral-sh/setup-uv@v6
33
33
  with:
34
34
  version: 0.6.8
35
35
 
@@ -20,7 +20,7 @@ jobs:
20
20
  make_release: ${{ steps.check_release.outputs.make_release }}
21
21
  steps:
22
22
  - name: Download package artifact
23
- uses: actions/download-artifact@v4.2.1
23
+ uses: actions/download-artifact@v4.3.0
24
24
  with:
25
25
  name: python-package
26
26
  path: dist/
@@ -44,17 +44,9 @@ jobs:
44
44
  with:
45
45
  python-version-file: "pyproject.toml"
46
46
 
47
- publish-pypi-test:
48
- name: Publish Package to TestPyPI
49
- needs: build
50
- if: github.event_name == 'push' && github.ref == 'refs/heads/main'
51
- uses: ./.github/workflows/publish_pypi.yaml
52
- with:
53
- environment: testpypi
54
-
55
47
  publish-docker-staging:
56
48
  name: Publish Docker Image to Staging
57
- needs: [build, publish-pypi-test]
49
+ needs: build
58
50
  secrets: inherit
59
51
  if: github.event_name == 'push' && github.ref == 'refs/heads/main'
60
52
  uses: ./.github/workflows/publish_docker.yaml
@@ -11,7 +11,7 @@ jobs:
11
11
  runs-on: ubuntu-latest
12
12
  steps:
13
13
  - uses: actions/checkout@v4.2.2
14
- - uses: actions/setup-python@v5.4.0
14
+ - uses: actions/setup-python@v5.6.0
15
15
  with:
16
16
  python-version-file: ${{ inputs.python-version-file }}
17
17
  - uses: pre-commit/action@v3.0.1
@@ -25,13 +25,13 @@ jobs:
25
25
  runs-on: ubuntu-latest
26
26
  steps:
27
27
  - uses: actions/checkout@v4.2.2
28
- - uses: actions/setup-python@v5.4.0
28
+ - uses: actions/setup-python@v5.6.0
29
29
  id: python
30
30
  with:
31
31
  python-version-file: ${{ inputs.python-version-file }}
32
32
 
33
33
  - name: Download package artifact
34
- uses: actions/download-artifact@v4.2.1
34
+ uses: actions/download-artifact@v4.3.0
35
35
  with:
36
36
  name: python-package
37
37
  path: dist/
@@ -60,7 +60,7 @@ jobs:
60
60
  uses: docker/setup-buildx-action@v3.10.0
61
61
 
62
62
  - name: Build and push
63
- uses: docker/build-push-action@v6.15.0
63
+ uses: docker/build-push-action@v6.16.0
64
64
  env:
65
65
  ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
66
66
  ECR_REPOSITORY: mdi-runtime
@@ -17,7 +17,7 @@ jobs:
17
17
  contents: read
18
18
  steps:
19
19
  - name: Download package artifact
20
- uses: actions/download-artifact@v4.2.1
20
+ uses: actions/download-artifact@v4.3.0
21
21
  with:
22
22
  name: python-package
23
23
  path: dist/
@@ -12,11 +12,11 @@ jobs:
12
12
  runs-on: ubuntu-latest
13
13
  steps:
14
14
  - uses: actions/checkout@v4.2.2
15
- - uses: actions/setup-python@v5.4.0
15
+ - uses: actions/setup-python@v5.6.0
16
16
  with:
17
17
  python-version-file: ${{ inputs.python-version-file }}
18
18
  - name: Install uv
19
- uses: astral-sh/setup-uv@v5
19
+ uses: astral-sh/setup-uv@v6
20
20
  with:
21
21
  version: 0.6.8
22
22
  - name: Install the project
@@ -1,6 +1,6 @@
1
1
  repos:
2
2
  - repo: https://github.com/astral-sh/ruff-pre-commit
3
- rev: v0.8.4
3
+ rev: v0.11.8
4
4
  hooks:
5
5
  - id: ruff
6
6
  types_or: [python, pyi]
@@ -21,6 +21,32 @@
21
21
  "ls"
22
22
  ],
23
23
  },
24
+ {
25
+ "name": "cmd: hafnia runc launch-local",
26
+ "type": "debugpy",
27
+ "request": "launch",
28
+ "program": "${workspaceFolder}/src/cli/__main__.py",
29
+ "args": [
30
+ "runc",
31
+ "launch-local",
32
+ "--dataset",
33
+ "midwest-vehicle-detection-tiny",
34
+ "train --config-name yolov4-hafnia.yaml"
35
+ ],
36
+ },
37
+ {
38
+ "name": "cmd: hafnia runc build-local",
39
+ "type": "debugpy",
40
+ "request": "launch",
41
+ "program": "${workspaceFolder}/src/cli/__main__.py",
42
+ "args": [
43
+ "runc",
44
+ "build-local",
45
+ "train",
46
+ "--dataset",
47
+ "mnist",
48
+ ],
49
+ },
24
50
  {
25
51
  "name": "debug (hafnia data download mnist)",
26
52
  "type": "debugpy",
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Data-insight-Platform
3
+ Copyright (c) 2025 Milestone Systems A/S
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,18 +1,21 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.1.23
4
- Summary: Python tools for communication with Hafnia platform.
5
- Author-email: Ivan Sahumbaiev <ivsa@milestone.dk>
3
+ Version: 0.1.25
4
+ Summary: Python SDK for communication with Hafnia platform.
5
+ Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.10
8
8
  Requires-Dist: boto3>=1.35.91
9
9
  Requires-Dist: click>=8.1.8
10
10
  Requires-Dist: datasets>=3.2.0
11
+ Requires-Dist: emoji>=2.14.1
11
12
  Requires-Dist: flatten-dict>=0.4.2
13
+ Requires-Dist: pathspec>=0.12.1
12
14
  Requires-Dist: pillow>=11.1.0
13
15
  Requires-Dist: pyarrow>=18.1.0
14
16
  Requires-Dist: pydantic>=2.10.4
15
17
  Requires-Dist: rich>=13.9.4
18
+ Requires-Dist: seedir>=0.5.0
16
19
  Requires-Dist: tqdm>=4.67.1
17
20
  Provides-Extra: torch
18
21
  Requires-Dist: flatten-dict>=0.4.2; extra == 'torch'
@@ -167,31 +170,49 @@ and datasets available in the data library.
167
170
  By combining logging and dataset loading, we can now construct our model training recipe.
168
171
 
169
172
  To demonstrate this, we have provided a recipe project that serves as a template for creating and structuring training recipes
170
- [recipe-classification](https://github.com/Data-insight-Platform/recipe-classification)
173
+ [recipe-classification](https://github.com/milestone-hafnia/recipe-classification)
171
174
 
172
175
  The project also contains additional information on how to structure your training recipe, use the `HafniaLogger`, the `load_dataset` function and different approach for launching
173
176
  the training recipe on the Hafnia platform.
174
177
 
178
+
179
+ ## Create, Build and Run `recipe.zip` locally
180
+ In order to test recipe compatibility with Hafnia cloud use the following command to build and
181
+ start the job locally.
182
+
183
+ ```bash
184
+ # Create 'recipe.zip' from source folder '.'
185
+ hafnia recipe create .
186
+
187
+ # Build the docker image locally from a 'recipe.zip' file
188
+ hafnia runc build-local recipe.zip
189
+
190
+ # Execute the docker image locally with a desired dataset
191
+ hafnia runc launch-local --dataset mnist "python scripts/train.py"
192
+ ```
193
+
175
194
  ## Detailed Documentation
176
195
  For more information, go to our [documentation page](https://hafnia.readme.io/docs/welcome-to-hafnia)
177
196
  or in below markdown pages.
178
197
 
179
198
  - [CLI](docs/cli.md) - Detailed guide for the Hafnia command-line interface
180
- - [Script2Model Documentation](docs/s2m.md) - Detailed guide for script2model
181
199
  - [Release lifecycle](docs/release.md) - Details about package release lifecycle.
182
200
 
183
201
  ## Development
184
202
  For development, we are using an uv based virtual python environment.
185
203
 
186
204
  Install uv
187
-
188
- curl -LsSf https://astral.sh/uv/install.sh | sh
189
-
205
+ ```bash
206
+ curl -LsSf https://astral.sh/uv/install.sh | sh
207
+ ```
190
208
 
191
209
  Install python dependencies including developer (`--dev`) and optional dependencies (`--all-extras`).
192
210
 
193
- uv sync --all-extras --dev
211
+ ```bash
212
+ uv sync --all-extras --dev
213
+ ```
194
214
 
195
215
  Run tests:
196
-
197
- uv run pytest tests
216
+ ```bash
217
+ uv run pytest tests
218
+ ```
@@ -145,31 +145,49 @@ and datasets available in the data library.
145
145
  By combining logging and dataset loading, we can now construct our model training recipe.
146
146
 
147
147
  To demonstrate this, we have provided a recipe project that serves as a template for creating and structuring training recipes
148
- [recipe-classification](https://github.com/Data-insight-Platform/recipe-classification)
148
+ [recipe-classification](https://github.com/milestone-hafnia/recipe-classification)
149
149
 
150
150
  The project also contains additional information on how to structure your training recipe, use the `HafniaLogger`, the `load_dataset` function and different approach for launching
151
151
  the training recipe on the Hafnia platform.
152
152
 
153
+
154
+ ## Create, Build and Run `recipe.zip` locally
155
+ In order to test recipe compatibility with Hafnia cloud use the following command to build and
156
+ start the job locally.
157
+
158
+ ```bash
159
+ # Create 'recipe.zip' from source folder '.'
160
+ hafnia recipe create .
161
+
162
+ # Build the docker image locally from a 'recipe.zip' file
163
+ hafnia runc build-local recipe.zip
164
+
165
+ # Execute the docker image locally with a desired dataset
166
+ hafnia runc launch-local --dataset mnist "python scripts/train.py"
167
+ ```
168
+
153
169
  ## Detailed Documentation
154
170
  For more information, go to our [documentation page](https://hafnia.readme.io/docs/welcome-to-hafnia)
155
171
  or in below markdown pages.
156
172
 
157
173
  - [CLI](docs/cli.md) - Detailed guide for the Hafnia command-line interface
158
- - [Script2Model Documentation](docs/s2m.md) - Detailed guide for script2model
159
174
  - [Release lifecycle](docs/release.md) - Details about package release lifecycle.
160
175
 
161
176
  ## Development
162
177
  For development, we are using an uv based virtual python environment.
163
178
 
164
179
  Install uv
165
-
166
- curl -LsSf https://astral.sh/uv/install.sh | sh
167
-
180
+ ```bash
181
+ curl -LsSf https://astral.sh/uv/install.sh | sh
182
+ ```
168
183
 
169
184
  Install python dependencies including developer (`--dev`) and optional dependencies (`--all-extras`).
170
185
 
171
- uv sync --all-extras --dev
186
+ ```bash
187
+ uv sync --all-extras --dev
188
+ ```
172
189
 
173
190
  Run tests:
174
-
175
- uv run pytest tests
191
+ ```bash
192
+ uv run pytest tests
193
+ ```
@@ -95,4 +95,5 @@ Available environment variables:
95
95
  - `MDI_CONFIG_PATH` - Custom path to the configuration file
96
96
  - `MDI_API_KEY_SECRET_NAME` - Name of the AWS Secrets Manager secret containing the API key
97
97
  - `AWS_REGION` - AWS region for ECR and Secrets Manager operations
98
- - `RECIPE_DIR` - Directory containing recipe code (used by the `runc launch` command)
98
+ - `RECIPE_DIR` - Directory containing recipe code (used by the `runc launch` command
99
+ - `HAFNIA_CLOUD` – Allow emulate cloud behaviour
@@ -1,19 +1,22 @@
1
1
  [project]
2
2
  name = "hafnia"
3
- version = "0.1.23"
4
- description = "Python tools for communication with Hafnia platform."
3
+ version = "0.1.25"
4
+ description = "Python SDK for communication with Hafnia platform."
5
5
  readme = "README.md"
6
- authors = [{ name = "Ivan Sahumbaiev", email = "ivsa@milestone.dk" }]
6
+ authors = [{ name = "Milestone Systems", email = "hafniaplatform@milestone.dk" }]
7
7
  requires-python = ">=3.10"
8
8
  dependencies = [
9
9
  "boto3>=1.35.91",
10
10
  "click>=8.1.8",
11
11
  "datasets>=3.2.0",
12
+ "emoji>=2.14.1",
12
13
  "flatten-dict>=0.4.2",
14
+ "pathspec>=0.12.1",
13
15
  "pillow>=11.1.0",
14
16
  "pyarrow>=18.1.0",
15
17
  "pydantic>=2.10.4",
16
18
  "rich>=13.9.4",
19
+ "seedir>=0.5.0",
17
20
  "tqdm>=4.67.1",
18
21
  ]
19
22
 
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  import click
3
3
 
4
- from cli import consts, data_cmds, experiment_cmds, profile_cmds, runc_cmds
4
+ from cli import consts, data_cmds, experiment_cmds, profile_cmds, recipe_cmds, runc_cmds
5
5
  from cli.config import Config, ConfigSchema
6
6
 
7
7
 
@@ -54,6 +54,7 @@ main.add_command(profile_cmds.profile)
54
54
  main.add_command(data_cmds.data)
55
55
  main.add_command(runc_cmds.runc)
56
56
  main.add_command(experiment_cmds.experiment)
57
+ main.add_command(recipe_cmds.recipe)
57
58
 
58
59
  if __name__ == "__main__":
59
60
  main()
@@ -8,6 +8,7 @@ ERROR_CREATE_PROFILE: str = "Failed to create profile. Profile name must be uniq
8
8
  ERROR_GET_RESOURCE: str = "Failed to get the data from platform. Verify url or api key."
9
9
 
10
10
  ERROR_EXPERIMENT_DIR: str = "Source directory does not exist"
11
+ ERROR_RECIPE_FILE_FORMAT: str = "Recipe filename must be a '.zip' file"
11
12
 
12
13
  PROFILE_SWITCHED_SUCCESS: str = "Switched to profile:"
13
14
  PROFILE_REMOVED_SUCCESS: str = "Removed profile:"
@@ -1,3 +1,4 @@
1
+ from pathlib import Path
1
2
  from typing import Optional
2
3
 
3
4
  import click
@@ -35,20 +36,18 @@ def data_get(cfg: Config, url: str, destination: click.Path) -> None:
35
36
  @click.argument("destination", default=None, required=False)
36
37
  @click.option("--force", is_flag=True, default=False, help="Force download")
37
38
  @click.pass_obj
38
- def data_download(cfg: Config, dataset_name: str, destination: Optional[click.Path], force: bool) -> None:
39
+ def data_download(cfg: Config, dataset_name: str, destination: Optional[click.Path], force: bool) -> Path:
39
40
  """Download dataset from Hafnia platform"""
40
41
 
41
42
  from hafnia.data.factory import download_or_get_dataset_path
42
43
 
43
44
  try:
44
- endpoint_dataset = cfg.get_platform_endpoint("datasets")
45
- api_key = cfg.api_key
46
- download_or_get_dataset_path(
45
+ path_dataset = download_or_get_dataset_path(
47
46
  dataset_name=dataset_name,
48
- endpoint=endpoint_dataset,
49
- api_key=api_key,
47
+ cfg=cfg,
50
48
  output_dir=destination,
51
49
  force_redownload=force,
52
50
  )
53
51
  except Exception:
54
52
  raise click.ClickException(consts.ERROR_GET_RESOURCE)
53
+ return path_dataset
@@ -13,32 +13,6 @@ def experiment() -> None:
13
13
  pass
14
14
 
15
15
 
16
- @experiment.command(name="create_recipe")
17
- @click.option("--source_folder", default=".", type=Path, help="Path to the source folder", show_default=True)
18
- @click.option(
19
- "--recipe_filename",
20
- default="recipe.zip",
21
- type=Path,
22
- help="Recipe filename. Should have a '.zip' suffix",
23
- show_default=True,
24
- )
25
- def create_recipe(source_folder: str, recipe_filename: str) -> None:
26
- """Build recipe from local path as image with prefix - localhost"""
27
-
28
- from hafnia.platform.builder import validate_recipe
29
- from hafnia.utils import archive_dir
30
-
31
- path_output_zip = Path(recipe_filename)
32
-
33
- if path_output_zip.suffix != ".zip":
34
- raise click.ClickException("Recipe filename must be a '.zip' file")
35
-
36
- path_source = Path(source_folder)
37
-
38
- path_output_zip = archive_dir(path_source, path_output_zip)
39
- validate_recipe(path_output_zip)
40
-
41
-
42
16
  @experiment.command(name="create")
43
17
  @click.argument("name")
44
18
  @click.argument("source_dir", type=Path)
@@ -0,0 +1,49 @@
1
+ from pathlib import Path
2
+
3
+ import click
4
+
5
+ import cli.consts as consts
6
+
7
+
8
+ @click.group(name="recipe")
9
+ def recipe() -> None:
10
+ """Hafnia Recipe management commands"""
11
+ pass
12
+
13
+
14
+ @recipe.command(name="create")
15
+ @click.argument("source")
16
+ @click.option(
17
+ "--output", type=click.Path(writable=True), default="./recipe.zip", show_default=True, help="Output recipe path."
18
+ )
19
+ def create(source: str, output: str) -> None:
20
+ """Create HRF from local path"""
21
+
22
+ from hafnia.platform.builder import validate_recipe
23
+ from hafnia.utils import archive_dir
24
+
25
+ path_output_zip = Path(output)
26
+ if path_output_zip.suffix != ".zip":
27
+ raise click.ClickException(consts.ERROR_RECIPE_FILE_FORMAT)
28
+
29
+ path_source = Path(source)
30
+ path_output_zip = archive_dir(path_source, path_output_zip)
31
+ validate_recipe(path_output_zip)
32
+
33
+
34
+ @recipe.command(name="view")
35
+ @click.option("--path", type=str, default="./recipe.zip", show_default=True, help="Path of recipe.zip.")
36
+ @click.option("--depth-limit", type=int, default=3, help="Limit the depth of the tree view.", show_default=True)
37
+ def view(path: str, depth_limit: int) -> None:
38
+ """View the content of a recipe zip file."""
39
+ from hafnia.utils import view_recipe_content
40
+
41
+ path_recipe = Path(path)
42
+ if not path_recipe.exists():
43
+ raise click.ClickException(
44
+ f"Recipe file '{path_recipe}' does not exist. Please provide a valid path. "
45
+ f"To create a recipe, use the 'hafnia recipe create' command."
46
+ )
47
+
48
+ tree_str = view_recipe_content(path_recipe, depth_limit=depth_limit)
49
+ click.echo(tree_str)
@@ -0,0 +1,143 @@
1
+ import json
2
+ import subprocess
3
+ import zipfile
4
+ from hashlib import sha256
5
+ from pathlib import Path
6
+ from tempfile import TemporaryDirectory
7
+ from typing import Optional
8
+
9
+ import click
10
+
11
+ from cli.config import Config
12
+
13
+
14
+ @click.group(name="runc")
15
+ def runc():
16
+ """Experiment management commands"""
17
+ pass
18
+
19
+
20
+ @runc.command(name="launch")
21
+ @click.argument("task", required=True)
22
+ def launch(task: str) -> None:
23
+ """Launch a job within the image."""
24
+ from hafnia.platform.executor import handle_launch
25
+
26
+ handle_launch(task)
27
+
28
+
29
+ @runc.command(name="launch-local")
30
+ @click.argument("exec_cmd", type=str)
31
+ @click.option(
32
+ "--dataset",
33
+ type=str,
34
+ help="Hafnia dataset name e.g. mnist, midwest-vehicle-detection or a path to a local dataset",
35
+ required=True,
36
+ )
37
+ @click.option(
38
+ "--image_name",
39
+ type=Optional[str],
40
+ default=None,
41
+ help=(
42
+ "Docker image name to use for the launch. "
43
+ "By default, it will use image name from '.state.json' "
44
+ "file generated by the 'hafnia runc build-local' command"
45
+ ),
46
+ )
47
+ @click.pass_obj
48
+ def launch_local(cfg: Config, exec_cmd: str, dataset: str, image_name: str) -> None:
49
+ """Launch a job within the image."""
50
+ from hafnia.data.factory import download_or_get_dataset_path
51
+
52
+ is_local_dataset = "/" in dataset
53
+ if is_local_dataset:
54
+ click.echo(f"Using local dataset: {dataset}")
55
+ path_dataset = Path(dataset)
56
+ if not path_dataset.exists():
57
+ raise click.ClickException(f"Dataset path does not exist: {path_dataset}")
58
+ else:
59
+ click.echo(f"Using Hafnia dataset: {dataset}")
60
+ path_dataset = download_or_get_dataset_path(dataset_name=dataset, cfg=cfg, force_redownload=False)
61
+
62
+ if image_name is None:
63
+ # Load image name from state.json
64
+ path_state_file = Path("state.json")
65
+ if not path_state_file.exists():
66
+ raise click.ClickException("State file does not exist. Please build the image first.")
67
+ state_dict = json.loads(path_state_file.read_text())
68
+ if "mdi_tag" not in state_dict:
69
+ raise click.ClickException("mdi_tag not found in state file. Please build the image first.")
70
+ image_name = state_dict["mdi_tag"]
71
+
72
+ docker_cmds = [
73
+ "docker",
74
+ "run",
75
+ "--rm",
76
+ "-v",
77
+ f"{path_dataset.absolute()}:/opt/ml/input/data/training",
78
+ "-e",
79
+ "HAFNIA_CLOUD=true",
80
+ "-e",
81
+ "PYTHONPATH=src",
82
+ "--runtime",
83
+ "nvidia",
84
+ image_name,
85
+ ] + exec_cmd.split(" ")
86
+
87
+ # Use the "hafnia runc launch" cmd when we have moved to the new folder structure and
88
+ # direct commands.
89
+ # Replace '+ exec_cmd.split(" ")' with '["hafnia", "runc", "launch"] + exec_cmd.split(" ")'
90
+
91
+ click.echo(f"Running command: \n\t{' '.join(docker_cmds)}")
92
+ subprocess.run(docker_cmds, check=True)
93
+
94
+
95
+ @runc.command(name="build")
96
+ @click.argument("recipe_url")
97
+ @click.argument("state_file", default="state.json")
98
+ @click.argument("ecr_repository", default="localhost")
99
+ @click.argument("image_name", default="recipe")
100
+ @click.pass_obj
101
+ def build(cfg: Config, recipe_url: str, state_file: str, ecr_repository: str, image_name: str) -> None:
102
+ """Build docker image with a given recipe."""
103
+ from hafnia.platform.builder import build_image, prepare_recipe
104
+
105
+ with TemporaryDirectory() as temp_dir:
106
+ image_info = prepare_recipe(recipe_url, Path(temp_dir), cfg.api_key)
107
+ image_info["name"] = image_name
108
+ build_image(image_info, ecr_repository, state_file)
109
+
110
+
111
+ @runc.command(name="build-local")
112
+ @click.argument("recipe")
113
+ @click.argument("state_file", default="state.json")
114
+ @click.argument("image_name", default="recipe")
115
+ def build_local(recipe: str, state_file: str, image_name: str) -> None:
116
+ """Build recipe from local path as image with prefix - localhost"""
117
+
118
+ from hafnia.platform.builder import build_image, validate_recipe
119
+ from hafnia.utils import archive_dir
120
+
121
+ recipe_zip = Path(recipe)
122
+ recipe_created = False
123
+ if not recipe_zip.suffix == ".zip" and recipe_zip.is_dir():
124
+ recipe_zip = archive_dir(recipe_zip)
125
+ recipe_created = True
126
+
127
+ validate_recipe(recipe_zip)
128
+ click.echo("Recipe successfully validated")
129
+ with TemporaryDirectory() as temp_dir:
130
+ temp_dir_path = Path(temp_dir)
131
+ with zipfile.ZipFile(recipe_zip, "r") as zip_ref:
132
+ zip_ref.extractall(temp_dir_path)
133
+
134
+ image_info = {
135
+ "name": image_name,
136
+ "dockerfile": (temp_dir_path / "Dockerfile").as_posix(),
137
+ "docker_context": temp_dir_path.as_posix(),
138
+ "hash": sha256(recipe_zip.read_bytes()).hexdigest()[:8],
139
+ }
140
+ click.echo("Start building image")
141
+ build_image(image_info, "localhost", state_file=state_file)
142
+ if recipe_created:
143
+ recipe_zip.unlink()