hafnia 0.1.17__tar.gz → 0.1.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/ci_cd.yaml +2 -2
  2. {hafnia-0.1.17 → hafnia-0.1.19}/PKG-INFO +8 -9
  3. {hafnia-0.1.17 → hafnia-0.1.19}/README.md +7 -8
  4. {hafnia-0.1.17 → hafnia-0.1.19}/pyproject.toml +1 -1
  5. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/experiment.py +1 -0
  6. {hafnia-0.1.17 → hafnia-0.1.19}/uv.lock +1 -1
  7. {hafnia-0.1.17 → hafnia-0.1.19}/.devcontainer/devcontainer.json +0 -0
  8. {hafnia-0.1.17 → hafnia-0.1.19}/.devcontainer/hooks/post_create +0 -0
  9. {hafnia-0.1.17 → hafnia-0.1.19}/.github/dependabot.yaml +0 -0
  10. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/Dockerfile +0 -0
  11. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/build.yaml +0 -0
  12. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/lint.yaml +0 -0
  13. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/publish_docker.yaml +0 -0
  14. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/publish_pypi.yaml +0 -0
  15. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/publish_release.yaml +0 -0
  16. {hafnia-0.1.17 → hafnia-0.1.19}/.github/workflows/tests.yaml +0 -0
  17. {hafnia-0.1.17 → hafnia-0.1.19}/.gitignore +0 -0
  18. {hafnia-0.1.17 → hafnia-0.1.19}/.pre-commit-config.yaml +0 -0
  19. {hafnia-0.1.17 → hafnia-0.1.19}/.python-version +0 -0
  20. {hafnia-0.1.17 → hafnia-0.1.19}/.vscode/extensions.json +0 -0
  21. {hafnia-0.1.17 → hafnia-0.1.19}/.vscode/launch.json +0 -0
  22. {hafnia-0.1.17 → hafnia-0.1.19}/.vscode/settings.json +0 -0
  23. {hafnia-0.1.17 → hafnia-0.1.19}/LICENSE +0 -0
  24. {hafnia-0.1.17 → hafnia-0.1.19}/docs/cli.md +0 -0
  25. {hafnia-0.1.17 → hafnia-0.1.19}/docs/release.md +0 -0
  26. {hafnia-0.1.17 → hafnia-0.1.19}/docs/s2m.md +0 -0
  27. {hafnia-0.1.17 → hafnia-0.1.19}/examples/dataset_builder.py +0 -0
  28. {hafnia-0.1.17 → hafnia-0.1.19}/examples/example_load_dataset.py +0 -0
  29. {hafnia-0.1.17 → hafnia-0.1.19}/examples/example_logger.py +0 -0
  30. {hafnia-0.1.17 → hafnia-0.1.19}/examples/example_torchvision_dataloader.py +0 -0
  31. {hafnia-0.1.17 → hafnia-0.1.19}/examples/script2model/pytorch/Dockerfile +0 -0
  32. {hafnia-0.1.17 → hafnia-0.1.19}/examples/script2model/pytorch/src/lib/train_utils.py +0 -0
  33. {hafnia-0.1.17 → hafnia-0.1.19}/examples/script2model/pytorch/src/scripts/train.py +0 -0
  34. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/__init__.py +0 -0
  35. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/__main__.py +0 -0
  36. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/config.py +0 -0
  37. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/consts.py +0 -0
  38. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/data_cmds.py +0 -0
  39. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/experiment_cmds.py +0 -0
  40. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/profile_cmds.py +0 -0
  41. {hafnia-0.1.17 → hafnia-0.1.19}/src/cli/runc_cmds.py +0 -0
  42. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/__init__.py +0 -0
  43. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/data/__init__.py +0 -0
  44. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/data/factory.py +0 -0
  45. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/experiment/__init__.py +0 -0
  46. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/experiment/hafnia_logger.py +0 -0
  47. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/http.py +0 -0
  48. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/log.py +0 -0
  49. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/__init__.py +0 -0
  50. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/api.py +0 -0
  51. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/builder.py +0 -0
  52. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/download.py +0 -0
  53. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/platform/executor.py +0 -0
  54. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/torch_helpers.py +0 -0
  55. {hafnia-0.1.17 → hafnia-0.1.19}/src/hafnia/utils.py +0 -0
  56. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_builder.py +0 -0
  57. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_check_example_scripts.py +0 -0
  58. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_cli.py +0 -0
  59. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_executor.py +0 -0
  60. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_mdi_logger.py +0 -0
  61. {hafnia-0.1.17 → hafnia-0.1.19}/tests/test_samples.py +0 -0
@@ -70,7 +70,7 @@ jobs:
70
70
 
71
71
  publish-docker-production:
72
72
  name: Publish Docker Image to Production
73
- needs: publish-pypi
73
+ needs: [build, publish-pypi]
74
74
  secrets: inherit
75
75
  if: github.event_name == 'push' && github.ref == 'refs/heads/main'
76
76
  uses: ./.github/workflows/publish_docker.yaml
@@ -82,7 +82,7 @@ jobs:
82
82
 
83
83
  publish-docker-staging:
84
84
  name: Publish Docker Image to Staging
85
- needs: publish-pypi-test
85
+ needs: [build, publish-pypi-test]
86
86
  secrets: inherit
87
87
  if: github.event_name == 'push' && github.ref == 'refs/heads/main'
88
88
  uses: ./.github/workflows/publish_docker.yaml
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.1.17
3
+ Version: 0.1.19
4
4
  Summary: Python tools for communication with Hafnia platform.
5
5
  Author-email: Ivan Sahumbaiev <ivsa@milestone.dk>
6
6
  License-File: LICENSE
@@ -27,8 +27,8 @@ the [Hafnia Platform](https://hafnia.milestonesys.com/).
27
27
 
28
28
  The package includes the following interfaces:
29
29
 
30
- - `cli`: A Command Line Interface (CLI) to 1) configure/connect to Hafnia and 2) create and
31
- launch [Training-aaS](https://hafnia.readme.io/docs/training-as-a-service) recipe scripts.
30
+ - `cli`: A Command Line Interface (CLI) to 1) configure/connect to Hafnia's [Training-aaS](https://hafnia.readme.io/docs/training-as-a-service) and 2) create and
31
+ launch recipe scripts.
32
32
  - `hafnia`: A python package with helper functions to load and interact with sample datasets and an experiment
33
33
  tracker (`HafniaLogger`).
34
34
 
@@ -48,12 +48,11 @@ To support local development of a training recipe, we have introduced a **sample
48
48
  for each dataset available in the Hafnia [data library](https://hafnia.milestonesys.com/training-aas/datasets). The sample dataset is a small
49
49
  and anonymized subset of the full dataset and available for download.
50
50
 
51
- With the sample dataset, you can seamlessly switch between local and Hafnia training.
51
+ With the sample dataset, you can seamlessly switch between local development and Training-aaS.
52
52
  Locally, you can create, validate and debug your training recipe. The recipe is then
53
- launched with Hafnia Training-aaS, where the recipe runs on the full dataset and can be scaled to run on
53
+ launched with Training-aaS, where the recipe runs on the full dataset and can be scaled to run on
54
54
  multiple GPUs and instances if needed.
55
55
 
56
-
57
56
  ## Getting started: Configuration
58
57
  To get started with Hafnia:
59
58
 
@@ -75,7 +74,7 @@ Copy the key and save it for later use.
75
74
  Hafnia API Key: # Pass your HAFNIA API key
76
75
  Hafnia Platform URL [https://api.mdi.milestonesys.com]: # Press [Enter]
77
76
  ```
78
- 1. Download `mnist` from terminal to verify configuration is working.
77
+ 1. Download `mnist` from terminal to verify that your configuration is working.
79
78
 
80
79
  ```bash
81
80
  hafnia data download mnist --force
@@ -88,7 +87,7 @@ and explore the dataset sample with a python script:
88
87
  ```python
89
88
  from hafnia.data import load_dataset
90
89
 
91
- dataset_splits = load_dataset("midwest-vehicle-detection")
90
+ dataset_splits = load_dataset("mnist")
92
91
  print(dataset_splits)
93
92
  print(dataset_splits["train"])
94
93
  ```
@@ -98,7 +97,7 @@ and contains train, validation and test splits.
98
97
  An important feature of `load_dataset` is that it will return the full dataset
99
98
  when loaded on the Hafnia platform.
100
99
  This enables seamlessly switching between running/validating a training script
101
- locally (on the sample dataset) and running full model trainings in the cloud
100
+ locally (on the sample dataset) and running full model trainings with Training-aaS (on the full dataset).
102
101
  without changing code or configurations for the training script.
103
102
 
104
103
  Available datasets with corresponding sample datasets can be found in [data library](https://hafnia.milestonesys.com/training-aas/datasets) including metadata and description for each dataset.
@@ -5,8 +5,8 @@ the [Hafnia Platform](https://hafnia.milestonesys.com/).
5
5
 
6
6
  The package includes the following interfaces:
7
7
 
8
- - `cli`: A Command Line Interface (CLI) to 1) configure/connect to Hafnia and 2) create and
9
- launch [Training-aaS](https://hafnia.readme.io/docs/training-as-a-service) recipe scripts.
8
+ - `cli`: A Command Line Interface (CLI) to 1) configure/connect to Hafnia's [Training-aaS](https://hafnia.readme.io/docs/training-as-a-service) and 2) create and
9
+ launch recipe scripts.
10
10
  - `hafnia`: A python package with helper functions to load and interact with sample datasets and an experiment
11
11
  tracker (`HafniaLogger`).
12
12
 
@@ -26,12 +26,11 @@ To support local development of a training recipe, we have introduced a **sample
26
26
  for each dataset available in the Hafnia [data library](https://hafnia.milestonesys.com/training-aas/datasets). The sample dataset is a small
27
27
  and anonymized subset of the full dataset and available for download.
28
28
 
29
- With the sample dataset, you can seamlessly switch between local and Hafnia training.
29
+ With the sample dataset, you can seamlessly switch between local development and Training-aaS.
30
30
  Locally, you can create, validate and debug your training recipe. The recipe is then
31
- launched with Hafnia Training-aaS, where the recipe runs on the full dataset and can be scaled to run on
31
+ launched with Training-aaS, where the recipe runs on the full dataset and can be scaled to run on
32
32
  multiple GPUs and instances if needed.
33
33
 
34
-
35
34
  ## Getting started: Configuration
36
35
  To get started with Hafnia:
37
36
 
@@ -53,7 +52,7 @@ Copy the key and save it for later use.
53
52
  Hafnia API Key: # Pass your HAFNIA API key
54
53
  Hafnia Platform URL [https://api.mdi.milestonesys.com]: # Press [Enter]
55
54
  ```
56
- 1. Download `mnist` from terminal to verify configuration is working.
55
+ 1. Download `mnist` from terminal to verify that your configuration is working.
57
56
 
58
57
  ```bash
59
58
  hafnia data download mnist --force
@@ -66,7 +65,7 @@ and explore the dataset sample with a python script:
66
65
  ```python
67
66
  from hafnia.data import load_dataset
68
67
 
69
- dataset_splits = load_dataset("midwest-vehicle-detection")
68
+ dataset_splits = load_dataset("mnist")
70
69
  print(dataset_splits)
71
70
  print(dataset_splits["train"])
72
71
  ```
@@ -76,7 +75,7 @@ and contains train, validation and test splits.
76
75
  An important feature of `load_dataset` is that it will return the full dataset
77
76
  when loaded on the Hafnia platform.
78
77
  This enables seamlessly switching between running/validating a training script
79
- locally (on the sample dataset) and running full model trainings in the cloud
78
+ locally (on the sample dataset) and running full model trainings with Training-aaS (on the full dataset).
80
79
  without changing code or configurations for the training script.
81
80
 
82
81
  Available datasets with corresponding sample datasets can be found in [data library](https://hafnia.milestonesys.com/training-aas/datasets) including metadata and description for each dataset.
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "hafnia"
3
- version = "0.1.17"
3
+ version = "0.1.19"
4
4
  description = "Python tools for communication with Hafnia platform."
5
5
  readme = "README.md"
6
6
  authors = [{ name = "Ivan Sahumbaiev", email = "ivsa@milestone.dk" }]
@@ -17,6 +17,7 @@ def get_dataset_id(dataset_name: str, endpoint: str, api_key: str) -> Optional[s
17
17
 
18
18
  def create_recipe(source_dir: Path, endpoint: str, api_key: str, organization_id: str) -> Optional[str]:
19
19
  headers = {"X-APIKEY": api_key, "accept": "application/json"}
20
+ source_dir = source_dir.resolve() # Ensure the path is absolute to handle '.' paths are given an appropriate name.
20
21
  path_recipe = get_recipe_path(recipe_name=source_dir.name)
21
22
  zip_path = archive_dir(source_dir, output_path=path_recipe)
22
23
 
@@ -549,7 +549,7 @@ http = [
549
549
 
550
550
  [[package]]
551
551
  name = "hafnia"
552
- version = "0.1.17"
552
+ version = "0.1.18"
553
553
  source = { editable = "." }
554
554
  dependencies = [
555
555
  { name = "boto3" },
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes