hafnia 0.5.1__py3-none-any.whl → 0.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,17 +7,20 @@ from flatten_dict import flatten
7
7
  from hafnia import http
8
8
  from hafnia.log import user_logger
9
9
  from hafnia.utils import pretty_print_list_as_table, timed
10
+ from hafnia_cli.config import Config
10
11
 
11
12
 
12
13
  @timed("Get or create dataset recipe")
13
14
  def get_or_create_dataset_recipe(
14
15
  recipe: dict,
15
- endpoint: str,
16
- api_key: str,
17
16
  name: Optional[str] = None,
18
17
  overwrite: bool = False,
18
+ cfg: Optional[Config] = None,
19
19
  ) -> Optional[Dict]:
20
- headers = {"Authorization": api_key}
20
+ cfg = cfg or Config()
21
+
22
+ endpoint = cfg.get_platform_endpoint("dataset_recipes")
23
+ headers = {"Authorization": cfg.api_key}
21
24
  data = {"template": {"body": recipe}, "overwrite": overwrite}
22
25
  if name is not None:
23
26
  data["name"] = name # type: ignore[assignment]
@@ -26,18 +29,22 @@ def get_or_create_dataset_recipe(
26
29
  return response
27
30
 
28
31
 
29
- def get_or_create_dataset_recipe_by_dataset_name(dataset_name: str, endpoint: str, api_key: str) -> Dict:
30
- return get_or_create_dataset_recipe(recipe=dataset_name, endpoint=endpoint, api_key=api_key)
32
+ def get_or_create_dataset_recipe_by_dataset_name(dataset_name: str, cfg: Optional[Config] = None) -> Dict:
33
+ return get_or_create_dataset_recipe(recipe=dataset_name, cfg=cfg)
31
34
 
32
35
 
33
- def get_dataset_recipes(endpoint: str, api_key: str) -> List[Dict]:
34
- headers = {"Authorization": api_key}
36
+ def get_dataset_recipes(cfg: Optional[Config] = None) -> List[Dict]:
37
+ cfg = cfg or Config()
38
+ endpoint = cfg.get_platform_endpoint("dataset_recipes")
39
+ headers = {"Authorization": cfg.api_key}
35
40
  dataset_recipes: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
36
41
  return dataset_recipes
37
42
 
38
43
 
39
- def get_dataset_recipe_by_id(dataset_recipe_id: str, endpoint: str, api_key: str) -> Dict:
40
- headers = {"Authorization": api_key}
44
+ def get_dataset_recipe_by_id(dataset_recipe_id: str, cfg: Optional[Config] = None) -> Dict:
45
+ cfg = cfg or Config()
46
+ endpoint = cfg.get_platform_endpoint("dataset_recipes")
47
+ headers = {"Authorization": cfg.api_key}
41
48
  full_url = f"{endpoint}/{dataset_recipe_id}"
42
49
  dataset_recipe_info: Dict = http.fetch(full_url, headers=headers) # type: ignore[assignment]
43
50
  if not dataset_recipe_info:
@@ -46,25 +53,30 @@ def get_dataset_recipe_by_id(dataset_recipe_id: str, endpoint: str, api_key: str
46
53
 
47
54
 
48
55
  def get_or_create_dataset_recipe_from_path(
49
- path_recipe_json: Path, endpoint: str, api_key: str, name: Optional[str] = None
56
+ path_recipe_json: Path, name: Optional[str] = None, cfg: Optional[Config] = None
50
57
  ) -> Dict:
51
58
  path_recipe_json = Path(path_recipe_json)
52
59
  if not path_recipe_json.exists():
53
60
  raise FileNotFoundError(f"Dataset recipe file '{path_recipe_json}' does not exist.")
54
61
  json_dict = json.loads(path_recipe_json.read_text())
55
- return get_or_create_dataset_recipe(json_dict, endpoint=endpoint, api_key=api_key, name=name)
62
+ return get_or_create_dataset_recipe(json_dict, name=name, cfg=cfg)
56
63
 
57
64
 
58
- def delete_dataset_recipe_by_id(id: str, endpoint: str, api_key: str) -> Dict:
59
- headers = {"Authorization": api_key}
65
+ def delete_dataset_recipe_by_id(id: str, cfg: Optional[Config] = None) -> Dict:
66
+ cfg = cfg or Config()
67
+ endpoint = cfg.get_platform_endpoint("dataset_recipes")
68
+ headers = {"Authorization": cfg.api_key}
60
69
  full_url = f"{endpoint}/{id}"
61
70
  response = http.delete(endpoint=full_url, headers=headers)
62
71
  return response
63
72
 
64
73
 
65
74
  @timed("Get dataset recipe")
66
- def get_dataset_recipe_by_name(name: str, endpoint: str, api_key: str) -> Optional[Dict]:
67
- headers = {"Authorization": api_key}
75
+ def get_dataset_recipe_by_name(name: str, cfg: Optional[Config] = None) -> Optional[Dict]:
76
+ cfg = cfg or Config()
77
+
78
+ endpoint = cfg.get_platform_endpoint("dataset_recipes")
79
+ headers = {"Authorization": cfg.api_key}
68
80
  full_url = f"{endpoint}?name__iexact={name}"
69
81
  dataset_recipes: List[Dict] = http.fetch(full_url, headers=headers) # type: ignore[assignment]
70
82
  if len(dataset_recipes) == 0:
@@ -77,11 +89,11 @@ def get_dataset_recipe_by_name(name: str, endpoint: str, api_key: str) -> Option
77
89
  return dataset_recipe
78
90
 
79
91
 
80
- def delete_dataset_recipe_by_name(name: str, endpoint: str, api_key: str) -> Optional[Dict]:
81
- recipe_response = get_dataset_recipe_by_name(name, endpoint=endpoint, api_key=api_key)
92
+ def delete_dataset_recipe_by_name(name: str, cfg: Optional[Config] = None) -> Optional[Dict]:
93
+ recipe_response = get_dataset_recipe_by_name(name, cfg=cfg)
82
94
 
83
95
  if recipe_response:
84
- return delete_dataset_recipe_by_id(recipe_response["id"], endpoint=endpoint, api_key=api_key)
96
+ return delete_dataset_recipe_by_id(recipe_response["id"], cfg=cfg)
85
97
  return recipe_response
86
98
 
87
99
 
@@ -83,8 +83,11 @@ def get_datasets(cfg: Optional[Config] = None) -> List[Dict[str, str]]:
83
83
 
84
84
 
85
85
  @timed("Fetching dataset info.")
86
- def get_dataset_id(dataset_name: str, endpoint: str, api_key: str) -> str:
87
- headers = {"Authorization": api_key}
86
+ def get_dataset_id(dataset_name: str, cfg: Optional[Config] = None) -> str:
87
+ """Get dataset ID by name from the Hafnia platform."""
88
+ cfg = cfg or Config()
89
+ endpoint = cfg.get_platform_endpoint("datasets")
90
+ headers = {"Authorization": cfg.api_key}
88
91
  full_url = f"{endpoint}?name__iexact={dataset_name}"
89
92
  dataset_responses: List[Dict] = http.fetch(full_url, headers=headers) # type: ignore[assignment]
90
93
  if not dataset_responses:
@@ -186,9 +189,10 @@ def delete_dataset_completely_by_name(
186
189
 
187
190
 
188
191
  @timed("Import dataset details to platform")
189
- def upload_dataset_details(cfg: Config, data: dict, dataset_name: str) -> dict:
192
+ def upload_dataset_details(data: dict, dataset_name: str, cfg: Optional[Config] = None) -> dict:
193
+ cfg = cfg or Config()
190
194
  dataset_endpoint = cfg.get_platform_endpoint("datasets")
191
- dataset_id = get_dataset_id(dataset_name, dataset_endpoint, cfg.api_key)
195
+ dataset_id = get_dataset_id(dataset_name, cfg=cfg)
192
196
 
193
197
  import_endpoint = f"{dataset_endpoint}/{dataset_id}/import"
194
198
  headers = {"Authorization": cfg.api_key}
@@ -1,7 +1,8 @@
1
- from typing import Dict, List
1
+ from typing import Dict, List, Optional
2
2
 
3
3
  from hafnia import http
4
4
  from hafnia.utils import pretty_print_list_as_table, timed
5
+ from hafnia_cli.config import Config
5
6
 
6
7
 
7
8
  @timed("Creating experiment.")
@@ -11,10 +12,11 @@ def create_experiment(
11
12
  trainer_id: str,
12
13
  exec_cmd: str,
13
14
  environment_id: str,
14
- endpoint: str,
15
- api_key: str,
15
+ cfg: Optional[Config] = None,
16
16
  ) -> Dict:
17
- headers = {"Authorization": api_key}
17
+ cfg = cfg or Config()
18
+ endpoint = cfg.get_platform_endpoint("experiments")
19
+ headers = {"Authorization": cfg.api_key}
18
20
  response = http.post(
19
21
  endpoint,
20
22
  headers=headers,
@@ -30,8 +32,10 @@ def create_experiment(
30
32
 
31
33
 
32
34
  @timed("Fetching environment info.")
33
- def get_environments(endpoint: str, api_key: str) -> List[Dict]:
34
- headers = {"Authorization": api_key}
35
+ def get_environments(cfg: Optional[Config] = None) -> List[Dict]:
36
+ cfg = cfg or Config()
37
+ endpoint = cfg.get_platform_endpoint("experiment_environments")
38
+ headers = {"Authorization": cfg.api_key}
35
39
  envs: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
36
40
  return envs
37
41
 
@@ -54,8 +58,8 @@ def pretty_print_training_environments(envs: List[Dict]) -> None:
54
58
  )
55
59
 
56
60
 
57
- def get_exp_environment_id(name: str, endpoint: str, api_key: str) -> str:
58
- envs = get_environments(endpoint=endpoint, api_key=api_key)
61
+ def get_exp_environment_id(name: str, cfg: Optional[Config] = None) -> str:
62
+ envs = get_environments(cfg=cfg)
59
63
 
60
64
  for env in envs:
61
65
  if env["name"] == name:
@@ -1,39 +1,85 @@
1
+ import json
1
2
  from pathlib import Path
2
3
  from typing import Dict, List, Optional
3
4
 
4
5
  from hafnia import http
5
6
  from hafnia.log import user_logger
6
- from hafnia.utils import archive_dir, get_trainer_package_path, pretty_print_list_as_table, timed
7
+ from hafnia.utils import (
8
+ archive_dir,
9
+ get_trainer_package_path,
10
+ pretty_print_list_as_table,
11
+ timed,
12
+ )
13
+ from hafnia_cli.config import Config
7
14
 
8
15
 
9
16
  @timed("Uploading trainer package.")
10
- def create_trainer_package(source_dir: Path, endpoint: str, api_key: str) -> str:
11
- source_dir = source_dir.resolve() # Ensure the path is absolute to handle '.' paths are given an appropriate name.
17
+ def create_trainer_package(
18
+ source_dir: Path,
19
+ name: Optional[str] = None,
20
+ description: Optional[str] = None,
21
+ cmd: Optional[str] = None,
22
+ cfg: Optional[Config] = None,
23
+ ) -> Dict:
24
+ # Ensure the path is absolute to handle '.' paths are given an appropriate name.
25
+ source_dir = Path(source_dir).resolve()
26
+ cfg = cfg or Config()
27
+ endpoint = cfg.get_platform_endpoint("trainers")
28
+
12
29
  path_trainer = get_trainer_package_path(trainer_name=source_dir.name)
13
- zip_path = archive_dir(source_dir, output_path=path_trainer)
30
+ name = name or path_trainer.stem
31
+ zip_path, package_files = archive_dir(source_dir, output_path=path_trainer)
14
32
  user_logger.info(f"Trainer package created and stored in '{path_trainer}'")
15
33
 
16
- headers = {"Authorization": api_key, "accept": "application/json"}
34
+ cmd_builder_schemas = auto_discover_cmd_builder_schemas(package_files)
35
+ cmd = cmd or "python scripts/train.py"
36
+ description = description or f"Trainer package for '{name}'. Created with Hafnia SDK Cli."
37
+ headers = {"Authorization": cfg.api_key, "accept": "application/json"}
17
38
  data = {
18
- "name": path_trainer.name,
19
- "description": "Trainer package created by Hafnia CLI",
39
+ "name": name,
40
+ "description": description,
41
+ "default_command": cmd,
20
42
  "file": (zip_path.name, Path(zip_path).read_bytes()),
21
43
  }
44
+ if len(cmd_builder_schemas) == 0:
45
+ data["command_builder_schemas"] = json.dumps(cmd_builder_schemas)
46
+ user_logger.info(f"Uploading trainer package '{name}' to platform...")
22
47
  response = http.post(endpoint, headers=headers, data=data, multipart=True)
23
- return response["id"]
48
+ user_logger.info(f"Trainer package uploaded successfully with id '{response['id']}'")
49
+ return response
50
+
51
+
52
+ def auto_discover_cmd_builder_schemas(package_files: List[Path]) -> List[Dict]:
53
+ """
54
+ Auto-discover command builder schema files in the trainer package files.
55
+ Looks for files ending with '.schema.json' and loads their content as JSON.
56
+ """
57
+ cmd_builder_schema_files = [file for file in package_files if file.name.endswith(".schema.json")]
58
+ cmd_builder_schemas = []
59
+ for cmd_builder_schema_file in cmd_builder_schema_files:
60
+ cmd_builder_schema = json.loads(cmd_builder_schema_file.read_text())
61
+ cmd_entrypoint = cmd_builder_schema.get("cmd", None)
62
+ user_logger.info(f"Found command builder schema file for entry point '{cmd_entrypoint}'")
63
+ cmd_builder_schemas.append(cmd_builder_schema)
64
+ return cmd_builder_schemas
24
65
 
25
66
 
26
67
  @timed("Get trainer package.")
27
- def get_trainer_package_by_id(id: str, endpoint: str, api_key: str) -> Dict:
68
+ def get_trainer_package_by_id(id: str, cfg: Optional[Config] = None) -> Dict:
69
+ cfg = cfg or Config()
70
+ endpoint = cfg.get_platform_endpoint("trainers")
28
71
  full_url = f"{endpoint}/{id}"
29
- headers = {"Authorization": api_key}
72
+ headers = {"Authorization": cfg.api_key}
30
73
  response: Dict = http.fetch(full_url, headers=headers) # type: ignore[assignment]
74
+
31
75
  return response
32
76
 
33
77
 
34
78
  @timed("Get trainer packages")
35
- def get_trainer_packages(endpoint: str, api_key: str) -> List[Dict]:
36
- headers = {"Authorization": api_key}
79
+ def get_trainer_packages(cfg: Optional[Config] = None) -> List[Dict]:
80
+ cfg = cfg or Config()
81
+ endpoint = cfg.get_platform_endpoint("trainers")
82
+ headers = {"Authorization": cfg.api_key}
37
83
  trainers: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
38
84
  return trainers
39
85
 
hafnia/utils.py CHANGED
@@ -6,7 +6,7 @@ from collections.abc import Sized
6
6
  from datetime import datetime
7
7
  from functools import wraps
8
8
  from pathlib import Path
9
- from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional
9
+ from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple
10
10
  from zipfile import ZipFile
11
11
 
12
12
  import more_itertools
@@ -113,20 +113,22 @@ def archive_dir(
113
113
  recipe_path: Path,
114
114
  output_path: Optional[Path] = None,
115
115
  path_ignore_file: Optional[Path] = None,
116
- ) -> Path:
116
+ ) -> Tuple[Path, List[Path]]:
117
117
  recipe_zip_path = output_path or recipe_path / "trainer.zip"
118
118
  assert recipe_zip_path.suffix == ".zip", "Output path must be a zip file"
119
119
  recipe_zip_path.parent.mkdir(parents=True, exist_ok=True)
120
120
 
121
121
  user_logger.info(f" Creating zip archive of '{recipe_path}'")
122
- include_files = filter_trainer_package_files(recipe_path, path_ignore_file)
122
+ include_files_generator = filter_trainer_package_files(recipe_path, path_ignore_file)
123
+ included_files = []
123
124
  with ZipFile(recipe_zip_path, "w", compression=zipfile.ZIP_STORED, allowZip64=True) as zip_ref:
124
- for str_filepath in include_files:
125
+ for str_filepath in include_files_generator:
125
126
  full_path = recipe_path / str_filepath
126
127
  zip_ref.write(full_path, str_filepath)
128
+ included_files.append(full_path)
127
129
  show_trainer_package_content(recipe_zip_path)
128
130
 
129
- return recipe_zip_path
131
+ return recipe_zip_path, included_files
130
132
 
131
133
 
132
134
  def size_human_readable(size_bytes: int, suffix="B") -> str:
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.5.1
3
+ Version: 0.5.3
4
4
  Summary: Python SDK for communication with Hafnia platform.
5
5
  Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.10
8
8
  Requires-Dist: boto3>=1.35.91
9
9
  Requires-Dist: click>=8.1.8
10
+ Requires-Dist: docstring-parser>=0.17.0
10
11
  Requires-Dist: emoji>=2.14.1
11
12
  Requires-Dist: flatten-dict>=0.4.2
12
13
  Requires-Dist: keyring>=25.6.0
@@ -63,9 +64,16 @@ multiple GPUs and instances if needed.
63
64
  ## Getting started: Configuration
64
65
  To get started with Hafnia:
65
66
 
66
- 1. Install `hafnia` with your favorite python package manager. With pip do this:
67
+ 1. Install `hafnia` with your favorite python package manager:
68
+
69
+ ```bash
70
+ # With uv package manager
71
+ uv add hafnia
72
+
73
+ # With pip
74
+ pip install hafnia
75
+ ```
67
76
 
68
- `pip install hafnia`
69
77
  1. Sign in to the [Hafnia Platform](https://hafnia.milestonesys.com/).
70
78
  1. Create an API KEY for Training aaS. For more instructions, follow this
71
79
  [guide](https://hafnia.readme.io/docs/create-an-api-key).
@@ -93,11 +101,9 @@ With Hafnia configured on your local machine, it is now possible to download
93
101
  and explore the dataset sample with a python script:
94
102
 
95
103
  ```python
96
- from hafnia.data import get_dataset_path
97
104
  from hafnia.dataset.hafnia_dataset import HafniaDataset
98
105
 
99
- # To download the sample dataset use:
100
- path_dataset = get_dataset_path("midwest-vehicle-detection")
106
+ dataset = HafniaDataset.from_name("midwest-vehicle-detection")
101
107
  ```
102
108
 
103
109
  This will download the dataset sample `midwest-vehicle-detection` to the local `.data/datasets/` folder
@@ -123,11 +129,10 @@ midwest-vehicle-detection
123
129
  3 directories, 217 files
124
130
  ```
125
131
 
126
- You can interact with data as you want, but we also provide `HafniaDataset`
127
- for loading/saving, managing and interacting with the dataset.
132
+ We provide the `HafniaDataset` format for loading/saving, managing and interacting with the dataset.
128
133
 
129
134
  We recommend the example script [examples/example_hafnia_dataset.py](examples/example_hafnia_dataset.py)
130
- for a short introduction on the `HafniaDataset`.
135
+ for a quick introduction on the `HafniaDataset`.
131
136
 
132
137
  Below is a short introduction to the `HafniaDataset` class.
133
138
 
@@ -135,7 +140,7 @@ Below is a short introduction to the `HafniaDataset` class.
135
140
  from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
136
141
 
137
142
  # Load dataset from path
138
- dataset = HafniaDataset.read_from_path(path_dataset)
143
+ dataset = HafniaDataset.from_path(path_dataset)
139
144
 
140
145
  # Or get dataset directly by name
141
146
  dataset = HafniaDataset.from_name("midwest-vehicle-detection")
@@ -1,17 +1,17 @@
1
1
  hafnia/__init__.py,sha256=0qpjWfVbcfKzLSnfUW6RdclSGkesMQRFS-n_aTJJoSE,179
2
2
  hafnia/http.py,sha256=PkEuanlUKeERABXttaGAJT6hOZ1_B2CwJodbUV4uZdg,3710
3
3
  hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
4
- hafnia/torch_helpers.py,sha256=Qj8pV5P8tGw6F3W2Rj9Kel7O8hWuUqiKfTdXd3h5UOo,14080
5
- hafnia/utils.py,sha256=l_awkrb3OttxqSMkPiYcpuP3c_kejkSmiqndSahc1s0,8703
6
- hafnia/data/__init__.py,sha256=o9QjiGbEcNa6r-qDmwwmxPXf-1UitNl5-WxFNcujqsg,111
7
- hafnia/data/factory.py,sha256=kHkvOtBUbwaShZBGf1kZzocDJBn_1dHHLrQxnUpJmfY,778
8
- hafnia/dataset/dataset_details_uploader.py,sha256=H_zz67bBwbgo4StUwBNmH89WlqydIc-tEQbrRnZDwgg,24161
4
+ hafnia/utils.py,sha256=YqlrRGmaVuYilT0YaZNNlZDGsBLfUXnjGy1vxsIUFHc,8834
5
+ hafnia/dataset/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
6
+ hafnia/dataset/dataset_details_uploader.py,sha256=TfwOVkuLqmWQXAYZ8FomvJC9IAnomG-4VZTPxf6sgWc,24161
9
7
  hafnia/dataset/dataset_helpers.py,sha256=N8W_ioDlxP2VvNJXzqXLDbcEqgPKz0WyPNOBakHoBUc,6443
10
8
  hafnia/dataset/dataset_names.py,sha256=42_UKrDwcKEW48oTbtBaeyi5qVFVaMAj8vRvDv-mcEI,3616
11
- hafnia/dataset/hafnia_dataset.py,sha256=OOenIMPm8K23AgxHvmc_y05KCzxIwaZa-gv3uNC50NU,38519
12
- hafnia/dataset/hafnia_dataset_types.py,sha256=eCLawdjIFoul67AAtQ4xaKjbVSNAFA-mvbJYofiu2Sg,26848
9
+ hafnia/dataset/hafnia_dataset.py,sha256=7zxCgqWwpVpZEGdnwmEqEYYFNekx_vd05zEj7bkitLE,37154
10
+ hafnia/dataset/hafnia_dataset_types.py,sha256=38sCW_ISlWuG0kdnp_MZdL4OVFSEt2ULVfoTpgDW3lk,26841
11
+ hafnia/dataset/image_visualizations.py,sha256=rB7c-KK-qq0BsSdkaFxCAHOOCTXTUQx0VMEhib7ig0k,7509
13
12
  hafnia/dataset/license_types.py,sha256=b1Jt5e8N89sujIs4T9y39sJEkzpAwCoLDTHDTpkiEOI,2166
14
- hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=Ln49jcpOQ4qzumv-SkWSBCqNgSP1dGQloKSLs7psP90,20991
13
+ hafnia/dataset/torch_helpers.py,sha256=Qj8pV5P8tGw6F3W2Rj9Kel7O8hWuUqiKfTdXd3h5UOo,14080
14
+ hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=YHnSG4lDYLjRsnrybOrNNGASoMhOaLo3PaxiifIwHQ4,18484
15
15
  hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=j3Oiytt3LI2rCaJid7Y44oT9MXvlZVqvZanngMebIWg,3088
16
16
  hafnia/dataset/dataset_recipe/recipe_types.py,sha256=AcrG6gpRt3Igl-CCJ60uyh-WkfI1NCnQ55M8yClSI9Q,5328
17
17
  hafnia/dataset/format_conversions/format_coco.py,sha256=7GjeF016ZBaKxu-VYiqXxuPw8HuuODV1cxc2TbDDZBw,19628
@@ -20,43 +20,42 @@ hafnia/dataset/format_conversions/format_image_classification_folder.py,sha256=A
20
20
  hafnia/dataset/format_conversions/format_yolo.py,sha256=zvCHo2L_0mPJScMbDtwvZUts9UX2ERKhhYbY31Q6tQA,9912
21
21
  hafnia/dataset/format_conversions/torchvision_datasets.py,sha256=sC8DgAt10PEaCHFk_Lm-dIzr_0EF-2g24kG9EINYk7c,12096
22
22
  hafnia/dataset/operations/dataset_s3_storage.py,sha256=xPC77Og47xTpI0JBFAR1pgb5u7l18byAA6p7IlpnpGE,8971
23
- hafnia/dataset/operations/dataset_stats.py,sha256=Ltf-V4_o_IB4UXw9WG9bsVoqeX90yGsjivK0CDggriw,11930
23
+ hafnia/dataset/operations/dataset_stats.py,sha256=uzQJWOoAM7YDLLeUhPVBpE1vFM38AOriDlPxKs2hj5M,11986
24
24
  hafnia/dataset/operations/dataset_transformations.py,sha256=qUNno0rAT1A452uzlR-k1WbatyY9VuMp1QJjkMg9GzE,19495
25
25
  hafnia/dataset/operations/table_transformations.py,sha256=mdjUE1lSQ7QyONjQapSHDg1MkYuKaflcoVUq1Y6Lkqc,13606
26
26
  hafnia/dataset/primitives/__init__.py,sha256=xFLJ3R7gpbuQnNJuFhuu836L3nicwoaY5aHkqk7Bbr8,927
27
27
  hafnia/dataset/primitives/bbox.py,sha256=QJJBebltOd9J3idisp3QdX0gCgz6P5xlIlGbth19fG0,6669
28
28
  hafnia/dataset/primitives/bitmask.py,sha256=Q7RiNYvMDlcFPkXAWXDJkCIERjnUTCrHu6VeEPX1jEA,7212
29
- hafnia/dataset/primitives/classification.py,sha256=rYcf9MS-pYE4O1YrkYutFjICnDhoOJu-t0xW62wh_TA,2669
29
+ hafnia/dataset/primitives/classification.py,sha256=YAMwO_gSOfDiXLUrEq-ObzvChK478rwGTP-RBhWt1LE,2662
30
30
  hafnia/dataset/primitives/point.py,sha256=VzCNLTQOPA6wyJVVKddZHGhltkep6V_B7pg5pk7rd9Y,879
31
31
  hafnia/dataset/primitives/polygon.py,sha256=jZPNVwEs4A3IMJQzI_dlcDDfgju7hdoVc677tMAdEbQ,6271
32
32
  hafnia/dataset/primitives/primitive.py,sha256=Wvby0sCGgYj8ec39PLcHsmip5VKL96ZSCz2cGIBjPqM,1289
33
- hafnia/dataset/primitives/segmentation.py,sha256=hnXIUklkuDMxYkUaff1bgRTcXI_2b32RIbobxR3ejzk,2017
33
+ hafnia/dataset/primitives/segmentation.py,sha256=ACexXYavoFsqviCRA76MDZUvEoBZLO_OTDCl2Px_rV4,2010
34
34
  hafnia/dataset/primitives/utils.py,sha256=3gT1as-xXEj8CamoIuBb9gQwUN9Ae9qnqtqF_uEe0zo,1993
35
35
  hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
36
+ hafnia/experiment/command_builder.py,sha256=F1szeVKD68_W2oHbp-pjkjAo0bbjw9TymQwLjF7QVhE,27587
36
37
  hafnia/experiment/hafnia_logger.py,sha256=BHIOLAds_3JxT0cev_ikUH0XQVIxBJTkcBSx2Q_SIk0,10894
37
38
  hafnia/platform/__init__.py,sha256=L_Q7CNpsJ0HMNPy_rLlLK5RhmuCU7IF4BchxKv6amYc,782
38
39
  hafnia/platform/builder.py,sha256=kUEuj5-qtL1uk5v2tUvOCREn5yV-G4Fr6F31haIAb5E,5808
39
- hafnia/platform/dataset_recipe.py,sha256=ybfSSHVPG0eFUbzg_1McezPSOtMoDZEg7l6rFYndtb4,3857
40
- hafnia/platform/datasets.py,sha256=nXHg3I14p3tJeDX2woPH9NMiOxn_54zlIOPJXvXFI_w,9448
40
+ hafnia/platform/dataset_recipe.py,sha256=UNvsDEbByT_WPuslILLGFsqXb87g65K5xz-Q2ZzvcKs,4242
41
+ hafnia/platform/datasets.py,sha256=z7bQz1SIR-vVVjRJD1FwEPw2X5QPyE_J1Ea1M6XPXwc,9612
41
42
  hafnia/platform/download.py,sha256=e73Pm0afwRPTHxBvRy0gUZSFfDuePHPnfasyhaZ-KGQ,5019
42
- hafnia/platform/experiment.py,sha256=SrEH0nuwwBXf1Iu4diB1BEPqL-TxW3aQkZWBbM1-tY0,1846
43
+ hafnia/platform/experiment.py,sha256=qNg9CKBLIYnOb-bMaEDecv-PptP4_ubQJunXGwdSiaQ,2049
43
44
  hafnia/platform/s5cmd_utils.py,sha256=hHsGPJ1S9_hFIVfCO-efvTF4qbLYreK1nl3VC5caU1w,9491
44
- hafnia/platform/trainer_package.py,sha256=w6JC7o-279ujcwtNTbUaQ9AnPcYRPPbD8EACa6XyUHA,2206
45
- hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
46
- hafnia/visualizations/image_visualizations.py,sha256=rB7c-KK-qq0BsSdkaFxCAHOOCTXTUQx0VMEhib7ig0k,7509
45
+ hafnia/platform/trainer_package.py,sha256=MTmiPm02uy5TIUDRjgCSETL-Q_esxha0NWtHO1h53dw,3949
47
46
  hafnia_cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
47
  hafnia_cli/__main__.py,sha256=CqD_7RfbfwB6MED3WZ8WxclrFxWcRMtZ5A1Snnst3GM,1761
49
48
  hafnia_cli/config.py,sha256=X0dJBYH-7mNAFkHgeZvDbawlQqoyCwoY4C-QhlyYCA0,7695
50
49
  hafnia_cli/consts.py,sha256=uCpYX44NCu_Zvte0QwChunxOo-qqhcaJRSYDAIsoJ8A,972
51
50
  hafnia_cli/dataset_cmds.py,sha256=JfSj7Cei1T2oYUXP1bpz63uQopgL3R_dMMYnPGGcuU8,2072
52
- hafnia_cli/dataset_recipe_cmds.py,sha256=OYSmpKL0Wxo1ZSxIGfH6w7pEWoI7CjUTmfIELJSZjGQ,2894
53
- hafnia_cli/experiment_cmds.py,sha256=_KxsMhbjlkIno1PIMXJ0Omw_PSJi8qi9hmtCUqwcj1M,7970
51
+ hafnia_cli/dataset_recipe_cmds.py,sha256=cprz0RMxuPK8hLLeu3V5MojtKZlSSNb3THBo2pZzdiM,2589
52
+ hafnia_cli/experiment_cmds.py,sha256=ZKSy2W-ke33MhXF5BKWdkQO0o31tqIb4Ld-l_cfc5Lw,7453
54
53
  hafnia_cli/keychain.py,sha256=bNyjjULVQu7kV338wUC65UvbCwmSGOmEjKWPLIQjT0k,2555
55
- hafnia_cli/profile_cmds.py,sha256=yTyOsPsUssLCzFIxURkxbKrFEhYIVDlUC0G2s5Uks-U,3476
54
+ hafnia_cli/profile_cmds.py,sha256=QVTK_hLskuiod9Nmgqld61-McWQEyCMWAgvDlMtB7oE,3709
56
55
  hafnia_cli/runc_cmds.py,sha256=7P5TjF6KA9K4OKPG1qC_0gteXfLJbXlA858WWrosoGQ,5098
57
- hafnia_cli/trainer_package_cmds.py,sha256=hUBc6gCMV28fcAA0xQdXKL1z-a3aL9lMWcVqjvHO1Uo,2326
58
- hafnia-0.5.1.dist-info/METADATA,sha256=QkN31qwV-oFfSrp_s2pXEc5GFRhY0sn5dBVo8r-qOAs,19272
59
- hafnia-0.5.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
60
- hafnia-0.5.1.dist-info/entry_points.txt,sha256=j2jsj1pqajLAiSOnF7sq66A3d1SVeHPKVTVyIFzipSA,52
61
- hafnia-0.5.1.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
62
- hafnia-0.5.1.dist-info/RECORD,,
56
+ hafnia_cli/trainer_package_cmds.py,sha256=RMSiinwzVlK-kDFPuwZ98EoSpw61aaXa6IyRj3UEVlw,3307
57
+ hafnia-0.5.3.dist-info/METADATA,sha256=Evy5X8ZLPuT0ozROYPU9mH26xyTfF_kR8JpnboO_k_k,19258
58
+ hafnia-0.5.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
59
+ hafnia-0.5.3.dist-info/entry_points.txt,sha256=j2jsj1pqajLAiSOnF7sq66A3d1SVeHPKVTVyIFzipSA,52
60
+ hafnia-0.5.3.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
61
+ hafnia-0.5.3.dist-info/RECORD,,
@@ -28,8 +28,7 @@ def cmd_get_or_create_dataset_recipe(cfg: Config, path_json_recipe: Path, name:
28
28
  """Create Hafnia dataset recipe from dataset recipe JSON file"""
29
29
  from hafnia.platform.dataset_recipe import get_or_create_dataset_recipe_from_path
30
30
 
31
- endpoint = cfg.get_platform_endpoint("dataset_recipes")
32
- recipe = get_or_create_dataset_recipe_from_path(path_json_recipe, endpoint=endpoint, api_key=cfg.api_key, name=name)
31
+ recipe = get_or_create_dataset_recipe_from_path(path_json_recipe, name=name, cfg=cfg)
33
32
 
34
33
  if recipe is None:
35
34
  raise click.ClickException("Failed to create dataset recipe.")
@@ -44,8 +43,7 @@ def cmd_list_dataset_recipes(cfg: Config, limit: Optional[int]) -> None:
44
43
  """List available dataset recipes"""
45
44
  from hafnia.platform.dataset_recipe import get_dataset_recipes, pretty_print_dataset_recipes
46
45
 
47
- endpoint = cfg.get_platform_endpoint("dataset_recipes")
48
- recipes = get_dataset_recipes(endpoint=endpoint, api_key=cfg.api_key)
46
+ recipes = get_dataset_recipes(cfg=cfg)
49
47
  # Sort recipes to have the most recent first
50
48
  recipes = sorted(recipes, key=lambda x: x["created_at"], reverse=True)
51
49
  if limit is not None:
@@ -61,12 +59,10 @@ def cmd_delete_dataset_recipe(cfg: Config, id: Optional[str], name: Optional[str
61
59
  """Delete a dataset recipe by ID or name"""
62
60
  from hafnia.platform.dataset_recipe import delete_dataset_recipe_by_id, delete_dataset_recipe_by_name
63
61
 
64
- endpoint = cfg.get_platform_endpoint("dataset_recipes")
65
-
66
62
  if id is not None:
67
- return delete_dataset_recipe_by_id(id=id, endpoint=endpoint, api_key=cfg.api_key)
63
+ return delete_dataset_recipe_by_id(id=id, cfg=cfg)
68
64
  if name is not None:
69
- dataset_recipe = delete_dataset_recipe_by_name(name=name, endpoint=endpoint, api_key=cfg.api_key)
65
+ dataset_recipe = delete_dataset_recipe_by_name(name=name, cfg=cfg)
70
66
  if dataset_recipe is None:
71
67
  raise click.ClickException(f"Dataset recipe with name '{name}' was not found.")
72
68
 
@@ -27,7 +27,7 @@ def cmd_view_environments(cfg: Config):
27
27
  """
28
28
  from hafnia.platform import get_environments, pretty_print_training_environments
29
29
 
30
- envs = get_environments(cfg.get_platform_endpoint("experiment_environments"), cfg.api_key)
30
+ envs = get_environments(cfg=cfg)
31
31
 
32
32
  pretty_print_training_environments(envs)
33
33
 
@@ -132,7 +132,7 @@ def cmd_create_experiment(
132
132
  """
133
133
  from hafnia.platform import create_experiment, get_exp_environment_id
134
134
 
135
- dataset_recipe_response = get_dataset_recipe_by_dataset_identifies(
135
+ dataset_recipe_response = get_dataset_recipe_by_identifiers(
136
136
  cfg=cfg,
137
137
  dataset_name=dataset,
138
138
  dataset_recipe_name=dataset_recipe,
@@ -140,13 +140,8 @@ def cmd_create_experiment(
140
140
  )
141
141
  dataset_recipe_id = dataset_recipe_response["id"]
142
142
 
143
- trainer_id = get_trainer_package_by_identifies(
144
- cfg=cfg,
145
- trainer_path=trainer_path,
146
- trainer_id=trainer_id,
147
- )
148
-
149
- env_id = get_exp_environment_id(environment, cfg.get_platform_endpoint("experiment_environments"), cfg.api_key)
143
+ trainer_id = get_trainer_package_by_identifiers(cfg=cfg, trainer_path=trainer_path, trainer_id=trainer_id)
144
+ env_id = get_exp_environment_id(environment, cfg=cfg)
150
145
 
151
146
  experiment = create_experiment(
152
147
  experiment_name=name,
@@ -154,8 +149,7 @@ def cmd_create_experiment(
154
149
  trainer_id=trainer_id,
155
150
  exec_cmd=cmd,
156
151
  environment_id=env_id,
157
- endpoint=cfg.get_platform_endpoint("experiments"),
158
- api_key=cfg.api_key,
152
+ cfg=cfg,
159
153
  )
160
154
 
161
155
  experiment_properties = {
@@ -172,7 +166,7 @@ def cmd_create_experiment(
172
166
  print(f" {key}: {value}")
173
167
 
174
168
 
175
- def get_dataset_recipe_by_dataset_identifies(
169
+ def get_dataset_recipe_by_identifiers(
176
170
  cfg: Config,
177
171
  dataset_name: Optional[str],
178
172
  dataset_recipe_name: Optional[str],
@@ -186,18 +180,17 @@ def get_dataset_recipe_by_dataset_identifies(
186
180
  "Multiple dataset identifiers have been provided. Define only one dataset identifier."
187
181
  )
188
182
 
189
- dataset_recipe_endpoint = cfg.get_platform_endpoint("dataset_recipes")
190
183
  if dataset_name:
191
- return get_or_create_dataset_recipe_by_dataset_name(dataset_name, dataset_recipe_endpoint, cfg.api_key)
184
+ return get_or_create_dataset_recipe_by_dataset_name(dataset_name, cfg=cfg)
192
185
 
193
186
  if dataset_recipe_name:
194
- recipe = get_dataset_recipe_by_name(dataset_recipe_name, dataset_recipe_endpoint, cfg.api_key)
187
+ recipe = get_dataset_recipe_by_name(dataset_recipe_name, cfg=cfg)
195
188
  if recipe is None:
196
189
  raise click.ClickException(f"Dataset recipe '{dataset_recipe_name}' was not found in the dataset library.")
197
190
  return recipe
198
191
 
199
192
  if dataset_recipe_id:
200
- return get_dataset_recipe_by_id(dataset_recipe_id, dataset_recipe_endpoint, cfg.api_key)
193
+ return get_dataset_recipe_by_id(dataset_recipe_id, cfg=cfg)
201
194
 
202
195
  raise click.MissingParameter(
203
196
  "At least one dataset identifier must be provided. Set one of the following:\n"
@@ -207,7 +200,7 @@ def get_dataset_recipe_by_dataset_identifies(
207
200
  )
208
201
 
209
202
 
210
- def get_trainer_package_by_identifies(
203
+ def get_trainer_package_by_identifiers(
211
204
  cfg: Config,
212
205
  trainer_path: Optional[Path],
213
206
  trainer_id: Optional[str],
@@ -223,17 +216,14 @@ def get_trainer_package_by_identifies(
223
216
  trainer_path = Path(trainer_path)
224
217
  if not trainer_path.exists():
225
218
  raise click.ClickException(f"Trainer package path '{trainer_path}' does not exist.")
226
- trainer_id = create_trainer_package(
227
- trainer_path,
228
- cfg.get_platform_endpoint("trainers"),
229
- cfg.api_key,
219
+ response = create_trainer_package(
220
+ source_dir=trainer_path,
221
+ cfg=cfg,
230
222
  )
231
- return trainer_id
223
+ return response["id"]
232
224
 
233
225
  if trainer_id:
234
- trainer_response = get_trainer_package_by_id(
235
- id=trainer_id, endpoint=cfg.get_platform_endpoint("trainers"), api_key=cfg.api_key
236
- )
226
+ trainer_response = get_trainer_package_by_id(id=trainer_id, cfg=cfg)
237
227
  return trainer_response["id"]
238
228
 
239
229
  raise click.MissingParameter(
@@ -6,10 +6,15 @@ import hafnia_cli.consts as consts
6
6
  from hafnia_cli.config import Config, ConfigSchema
7
7
 
8
8
 
9
- @click.group()
10
- def profile():
9
+ @click.group(invoke_without_command=True)
10
+ @click.pass_context
11
+ def profile(ctx):
11
12
  """Manage profile."""
12
- pass
13
+ if ctx.invoked_subcommand is None:
14
+ # No subcommand provided, show active profile and help
15
+ cfg = ctx.obj
16
+ profile_show(cfg)
17
+ click.echo("\n" + ctx.get_help())
13
18
 
14
19
 
15
20
  @profile.command("ls")