hafnia 0.1.24__py3-none-any.whl → 0.1.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cli/__main__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env python
2
2
  import click
3
3
 
4
- from cli import consts, data_cmds, experiment_cmds, profile_cmds, runc_cmds
4
+ from cli import consts, data_cmds, experiment_cmds, profile_cmds, recipe_cmds, runc_cmds
5
5
  from cli.config import Config, ConfigSchema
6
6
 
7
7
 
@@ -54,6 +54,7 @@ main.add_command(profile_cmds.profile)
54
54
  main.add_command(data_cmds.data)
55
55
  main.add_command(runc_cmds.runc)
56
56
  main.add_command(experiment_cmds.experiment)
57
+ main.add_command(recipe_cmds.recipe)
57
58
 
58
59
  if __name__ == "__main__":
59
60
  main()
cli/consts.py CHANGED
@@ -8,6 +8,7 @@ ERROR_CREATE_PROFILE: str = "Failed to create profile. Profile name must be uniq
8
8
  ERROR_GET_RESOURCE: str = "Failed to get the data from platform. Verify url or api key."
9
9
 
10
10
  ERROR_EXPERIMENT_DIR: str = "Source directory does not exist"
11
+ ERROR_RECIPE_FILE_FORMAT: str = "Recipe filename must be a '.zip' file"
11
12
 
12
13
  PROFILE_SWITCHED_SUCCESS: str = "Switched to profile:"
13
14
  PROFILE_REMOVED_SUCCESS: str = "Removed profile:"
cli/data_cmds.py CHANGED
@@ -1,3 +1,4 @@
1
+ from pathlib import Path
1
2
  from typing import Optional
2
3
 
3
4
  import click
@@ -35,20 +36,18 @@ def data_get(cfg: Config, url: str, destination: click.Path) -> None:
35
36
  @click.argument("destination", default=None, required=False)
36
37
  @click.option("--force", is_flag=True, default=False, help="Force download")
37
38
  @click.pass_obj
38
- def data_download(cfg: Config, dataset_name: str, destination: Optional[click.Path], force: bool) -> None:
39
+ def data_download(cfg: Config, dataset_name: str, destination: Optional[click.Path], force: bool) -> Path:
39
40
  """Download dataset from Hafnia platform"""
40
41
 
41
42
  from hafnia.data.factory import download_or_get_dataset_path
42
43
 
43
44
  try:
44
- endpoint_dataset = cfg.get_platform_endpoint("datasets")
45
- api_key = cfg.api_key
46
- download_or_get_dataset_path(
45
+ path_dataset = download_or_get_dataset_path(
47
46
  dataset_name=dataset_name,
48
- endpoint=endpoint_dataset,
49
- api_key=api_key,
47
+ cfg=cfg,
50
48
  output_dir=destination,
51
49
  force_redownload=force,
52
50
  )
53
51
  except Exception:
54
52
  raise click.ClickException(consts.ERROR_GET_RESOURCE)
53
+ return path_dataset
cli/experiment_cmds.py CHANGED
@@ -13,32 +13,6 @@ def experiment() -> None:
13
13
  pass
14
14
 
15
15
 
16
- @experiment.command(name="create_recipe")
17
- @click.option("--source_folder", default=".", type=Path, help="Path to the source folder", show_default=True)
18
- @click.option(
19
- "--recipe_filename",
20
- default="recipe.zip",
21
- type=Path,
22
- help="Recipe filename. Should have a '.zip' suffix",
23
- show_default=True,
24
- )
25
- def create_recipe(source_folder: str, recipe_filename: str) -> None:
26
- """Build recipe from local path as image with prefix - localhost"""
27
-
28
- from hafnia.platform.builder import validate_recipe
29
- from hafnia.utils import archive_dir
30
-
31
- path_output_zip = Path(recipe_filename)
32
-
33
- if path_output_zip.suffix != ".zip":
34
- raise click.ClickException("Recipe filename must be a '.zip' file")
35
-
36
- path_source = Path(source_folder)
37
-
38
- path_output_zip = archive_dir(path_source, path_output_zip)
39
- validate_recipe(path_output_zip)
40
-
41
-
42
16
  @experiment.command(name="create")
43
17
  @click.argument("name")
44
18
  @click.argument("source_dir", type=Path)
cli/recipe_cmds.py ADDED
@@ -0,0 +1,49 @@
1
+ from pathlib import Path
2
+
3
+ import click
4
+
5
+ import cli.consts as consts
6
+
7
+
8
+ @click.group(name="recipe")
9
+ def recipe() -> None:
10
+ """Hafnia Recipe management commands"""
11
+ pass
12
+
13
+
14
+ @recipe.command(name="create")
15
+ @click.argument("source")
16
+ @click.option(
17
+ "--output", type=click.Path(writable=True), default="./recipe.zip", show_default=True, help="Output recipe path."
18
+ )
19
+ def create(source: str, output: str) -> None:
20
+ """Create HRF from local path"""
21
+
22
+ from hafnia.platform.builder import validate_recipe
23
+ from hafnia.utils import archive_dir
24
+
25
+ path_output_zip = Path(output)
26
+ if path_output_zip.suffix != ".zip":
27
+ raise click.ClickException(consts.ERROR_RECIPE_FILE_FORMAT)
28
+
29
+ path_source = Path(source)
30
+ path_output_zip = archive_dir(path_source, path_output_zip)
31
+ validate_recipe(path_output_zip)
32
+
33
+
34
+ @recipe.command(name="view")
35
+ @click.option("--path", type=str, default="./recipe.zip", show_default=True, help="Path of recipe.zip.")
36
+ @click.option("--depth-limit", type=int, default=3, help="Limit the depth of the tree view.", show_default=True)
37
+ def view(path: str, depth_limit: int) -> None:
38
+ """View the content of a recipe zip file."""
39
+ from hafnia.utils import view_recipe_content
40
+
41
+ path_recipe = Path(path)
42
+ if not path_recipe.exists():
43
+ raise click.ClickException(
44
+ f"Recipe file '{path_recipe}' does not exist. Please provide a valid path. "
45
+ f"To create a recipe, use the 'hafnia recipe create' command."
46
+ )
47
+
48
+ tree_str = view_recipe_content(path_recipe, depth_limit=depth_limit)
49
+ click.echo(tree_str)
cli/runc_cmds.py CHANGED
@@ -1,6 +1,10 @@
1
+ import json
2
+ import subprocess
3
+ import zipfile
1
4
  from hashlib import sha256
2
5
  from pathlib import Path
3
6
  from tempfile import TemporaryDirectory
7
+ from typing import Optional
4
8
 
5
9
  import click
6
10
 
@@ -22,6 +26,72 @@ def launch(task: str) -> None:
22
26
  handle_launch(task)
23
27
 
24
28
 
29
+ @runc.command(name="launch-local")
30
+ @click.argument("exec_cmd", type=str)
31
+ @click.option(
32
+ "--dataset",
33
+ type=str,
34
+ help="Hafnia dataset name e.g. mnist, midwest-vehicle-detection or a path to a local dataset",
35
+ required=True,
36
+ )
37
+ @click.option(
38
+ "--image_name",
39
+ type=Optional[str],
40
+ default=None,
41
+ help=(
42
+ "Docker image name to use for the launch. "
43
+ "By default, it will use image name from '.state.json' "
44
+ "file generated by the 'hafnia runc build-local' command"
45
+ ),
46
+ )
47
+ @click.pass_obj
48
+ def launch_local(cfg: Config, exec_cmd: str, dataset: str, image_name: str) -> None:
49
+ """Launch a job within the image."""
50
+ from hafnia.data.factory import download_or_get_dataset_path
51
+
52
+ is_local_dataset = "/" in dataset
53
+ if is_local_dataset:
54
+ click.echo(f"Using local dataset: {dataset}")
55
+ path_dataset = Path(dataset)
56
+ if not path_dataset.exists():
57
+ raise click.ClickException(f"Dataset path does not exist: {path_dataset}")
58
+ else:
59
+ click.echo(f"Using Hafnia dataset: {dataset}")
60
+ path_dataset = download_or_get_dataset_path(dataset_name=dataset, cfg=cfg, force_redownload=False)
61
+
62
+ if image_name is None:
63
+ # Load image name from state.json
64
+ path_state_file = Path("state.json")
65
+ if not path_state_file.exists():
66
+ raise click.ClickException("State file does not exist. Please build the image first.")
67
+ state_dict = json.loads(path_state_file.read_text())
68
+ if "mdi_tag" not in state_dict:
69
+ raise click.ClickException("mdi_tag not found in state file. Please build the image first.")
70
+ image_name = state_dict["mdi_tag"]
71
+
72
+ docker_cmds = [
73
+ "docker",
74
+ "run",
75
+ "--rm",
76
+ "-v",
77
+ f"{path_dataset.absolute()}:/opt/ml/input/data/training",
78
+ "-e",
79
+ "HAFNIA_CLOUD=true",
80
+ "-e",
81
+ "PYTHONPATH=src",
82
+ "--runtime",
83
+ "nvidia",
84
+ image_name,
85
+ ] + exec_cmd.split(" ")
86
+
87
+ # Use the "hafnia runc launch" cmd when we have moved to the new folder structure and
88
+ # direct commands.
89
+ # Replace '+ exec_cmd.split(" ")' with '["hafnia", "runc", "launch"] + exec_cmd.split(" ")'
90
+
91
+ click.echo(f"Running command: \n\t{' '.join(docker_cmds)}")
92
+ subprocess.run(docker_cmds, check=True)
93
+
94
+
25
95
  @runc.command(name="build")
26
96
  @click.argument("recipe_url")
27
97
  @click.argument("state_file", default="state.json")
@@ -56,13 +126,18 @@ def build_local(recipe: str, state_file: str, image_name: str) -> None:
56
126
 
57
127
  validate_recipe(recipe_zip)
58
128
  click.echo("Recipe successfully validated")
59
- image_info = {
60
- "name": image_name,
61
- "dockerfile": f"{recipe_zip.parent}/Dockerfile",
62
- "docker_context": f"{recipe_zip.parent}",
63
- "hash": sha256(recipe_zip.read_bytes()).hexdigest()[:8],
64
- }
65
- click.echo("Start building image")
66
- build_image(image_info, "localhost", state_file=state_file)
67
- if recipe_created:
68
- recipe_zip.unlink()
129
+ with TemporaryDirectory() as temp_dir:
130
+ temp_dir_path = Path(temp_dir)
131
+ with zipfile.ZipFile(recipe_zip, "r") as zip_ref:
132
+ zip_ref.extractall(temp_dir_path)
133
+
134
+ image_info = {
135
+ "name": image_name,
136
+ "dockerfile": (temp_dir_path / "Dockerfile").as_posix(),
137
+ "docker_context": temp_dir_path.as_posix(),
138
+ "hash": sha256(recipe_zip.read_bytes()).hexdigest()[:8],
139
+ }
140
+ click.echo("Start building image")
141
+ build_image(image_info, "localhost", state_file=state_file)
142
+ if recipe_created:
143
+ recipe_zip.unlink()
hafnia/data/factory.py CHANGED
@@ -21,12 +21,16 @@ def load_local(dataset_path: Path) -> Union[Dataset, DatasetDict]:
21
21
 
22
22
  def download_or_get_dataset_path(
23
23
  dataset_name: str,
24
- endpoint: str,
25
- api_key: str,
24
+ cfg: Optional[Config] = None,
26
25
  output_dir: Optional[str] = None,
27
26
  force_redownload: bool = False,
28
27
  ) -> Path:
29
28
  """Download or get the path of the dataset."""
29
+
30
+ cfg = cfg or Config()
31
+ endpoint_dataset = cfg.get_platform_endpoint("datasets")
32
+ api_key = cfg.api_key
33
+
30
34
  output_dir = output_dir or str(utils.PATH_DATASET)
31
35
  dataset_path_base = Path(output_dir).absolute() / dataset_name
32
36
  dataset_path_base.mkdir(exist_ok=True, parents=True)
@@ -36,8 +40,8 @@ def download_or_get_dataset_path(
36
40
  logger.info("Dataset found locally. Set 'force=True' or add `--force` flag with cli to re-download")
37
41
  return dataset_path_sample
38
42
 
39
- dataset_id = get_dataset_id(dataset_name, endpoint, api_key)
40
- dataset_access_info_url = f"{endpoint}/{dataset_id}/temporary-credentials"
43
+ dataset_id = get_dataset_id(dataset_name, endpoint_dataset, api_key)
44
+ dataset_access_info_url = f"{endpoint_dataset}/{dataset_id}/temporary-credentials"
41
45
 
42
46
  if force_redownload and dataset_path_sample.exists():
43
47
  # Remove old files to avoid old files conflicting with new files
@@ -48,23 +52,6 @@ def download_or_get_dataset_path(
48
52
  raise RuntimeError("Failed to download dataset")
49
53
 
50
54
 
51
- def load_from_platform(
52
- dataset_name: str,
53
- endpoint: str,
54
- api_key: str,
55
- output_dir: Optional[str] = None,
56
- force_redownload: bool = False,
57
- ) -> Union[Dataset, DatasetDict]:
58
- path_dataset = download_or_get_dataset_path(
59
- dataset_name=dataset_name,
60
- endpoint=endpoint,
61
- api_key=api_key,
62
- output_dir=output_dir,
63
- force_redownload=force_redownload,
64
- )
65
- return load_local(path_dataset)
66
-
67
-
68
55
  def load_dataset(dataset_name: str, force_redownload: bool = False) -> Union[Dataset, DatasetDict]:
69
56
  """Load a dataset either from a local path or from the Hafnia platform."""
70
57
 
@@ -72,15 +59,9 @@ def load_dataset(dataset_name: str, force_redownload: bool = False) -> Union[Dat
72
59
  path_dataset = Path(os.getenv("MDI_DATASET_DIR", "/opt/ml/input/data/training"))
73
60
  return load_local(path_dataset)
74
61
 
75
- cfg = Config()
76
- endpoint_dataset = cfg.get_platform_endpoint("datasets")
77
- api_key = cfg.api_key
78
- dataset = load_from_platform(
62
+ path_dataset = download_or_get_dataset_path(
79
63
  dataset_name=dataset_name,
80
- endpoint=endpoint_dataset,
81
- api_key=api_key,
82
- output_dir=None,
83
64
  force_redownload=force_redownload,
84
65
  )
85
-
66
+ dataset = load_local(path_dataset)
86
67
  return dataset
@@ -159,8 +159,12 @@ class HafniaLogger:
159
159
  def log_hparams(self, params: Dict, fname: str = "hparams.json"):
160
160
  file_path = self._path_artifacts() / fname
161
161
  try:
162
- with open(file_path, "w") as f:
163
- json.dump(params, f, indent=2)
162
+ if file_path.exists(): # New params are appended to existing params
163
+ existing_params = json.loads(file_path.read_text())
164
+ else:
165
+ existing_params = {}
166
+ existing_params.update(params)
167
+ file_path.write_text(json.dumps(existing_params, indent=2))
164
168
  logger.info(f"Saved parameters to {file_path}")
165
169
  except Exception as e:
166
170
  logger.error(f"Failed to save parameters to {file_path}: {e}")
@@ -25,7 +25,7 @@ def validate_recipe(zip_path: Path, required_paths: Optional[set] = None) -> Non
25
25
  Raises:
26
26
  FileNotFoundError: If any required file or directory is missing.
27
27
  """
28
- required_paths = {"src/lib/", "src/scripts/", "Dockerfile"} if required_paths is None else required_paths
28
+ required_paths = {"src", "scripts", "Dockerfile"} if required_paths is None else required_paths
29
29
  with ZipFile(zip_path, "r") as archive:
30
30
  archive_contents = {Path(file).as_posix() for file in archive.namelist()}
31
31
  missing_paths = {
@@ -35,10 +35,10 @@ def validate_recipe(zip_path: Path, required_paths: Optional[set] = None) -> Non
35
35
  if missing_paths:
36
36
  raise FileNotFoundError(f"The following required paths are missing in the zip archive: {missing_paths}")
37
37
 
38
- script_files = [f for f in archive_contents if f.startswith("src/scripts/") and f.endswith(".py")]
38
+ script_files = [f for f in archive_contents if f.startswith("scripts/") and f.endswith(".py")]
39
39
 
40
40
  if not script_files:
41
- raise ValueError("No Python script files found in the 'src/scripts/' directory.")
41
+ raise ValueError("No Python script files found in the 'scripts' directory.")
42
42
 
43
43
 
44
44
  def clean_up(files: List[Path], dirs: List[Path], prefix: str = "__") -> None:
@@ -82,11 +82,11 @@ def get_recipe_content(recipe_url: str, output_dir: Path, state_file: str, api_k
82
82
 
83
83
  tag = sha256(recipe_path.read_bytes()).hexdigest()[:8]
84
84
 
85
- scripts_dir = output_dir / "src/scripts"
85
+ scripts_dir = output_dir / "scripts"
86
86
  valid_commands = [str(f.name)[:-3] for f in scripts_dir.iterdir() if f.is_file() and f.suffix.lower() == ".py"]
87
87
 
88
88
  if not valid_commands:
89
- raise ValueError("No valid Python script commands found in the 'src/scripts' directory.")
89
+ raise ValueError("No valid Python script commands found in the 'scripts' directory.")
90
90
 
91
91
  state = {
92
92
  "user_data": (output_dir / "src").as_posix(),
@@ -21,22 +21,22 @@ def handle_mount(source: str) -> None:
21
21
  Mounts the Hafnia environment by adding source directories to PYTHONPATH.
22
22
 
23
23
  Args:
24
- source (str): Path to the root directory containing 'lib' and 'scripts' subdirectories
24
+ source (str): Path to the root directory containing 'src' and 'scripts' subdirectories
25
25
 
26
26
  Raises:
27
27
  FileNotFoundError: If the required directory structure is not found
28
28
  """
29
29
  source_path = Path(source)
30
- lib_dir = source_path / "lib"
30
+ src_dir = source_path / "src"
31
31
  scripts_dir = source_path / "scripts"
32
32
 
33
- if not lib_dir.exists() and not scripts_dir.exists():
34
- logger.error(f"Filestructure is not supported. Expected 'lib' and 'scripts' directories in {source_path}.")
33
+ if not src_dir.exists() and not scripts_dir.exists():
34
+ logger.error(f"Filestructure is not supported. Expected 'src' and 'scripts' directories in {source_path}.")
35
35
  exit(1)
36
36
 
37
- sys.path.extend([lib_dir.as_posix(), scripts_dir.as_posix()])
37
+ sys.path.extend([src_dir.as_posix(), scripts_dir.as_posix()])
38
38
  python_path = os.getenv("PYTHONPATH", "")
39
- os.environ["PYTHONPATH"] = f"{python_path}:{lib_dir.as_posix()}:{scripts_dir.as_posix()}"
39
+ os.environ["PYTHONPATH"] = f"{python_path}:{src_dir.as_posix()}:{scripts_dir.as_posix()}"
40
40
  logger.info(f"Mounted codebase from {source_path}")
41
41
 
42
42
 
hafnia/utils.py CHANGED
@@ -1,18 +1,37 @@
1
1
  import functools
2
2
  import os
3
3
  import sys
4
+ import tempfile
5
+ import zipfile
4
6
  from datetime import datetime
5
7
  from pathlib import Path
6
8
  from typing import Any, Callable, Optional
7
9
  from zipfile import ZipFile
8
10
 
9
11
  import click
12
+ import pathspec
13
+ import seedir
10
14
 
11
15
  from hafnia.log import logger
12
16
 
13
17
  PATH_DATA = Path("./.data")
14
18
  PATH_DATASET = PATH_DATA / "datasets"
15
19
  PATH_RECIPES = PATH_DATA / "recipes"
20
+ FILENAME_HAFNIAIGNORE = ".hafniaignore"
21
+ DEFAULT_IGNORE_SPECIFICATION = [
22
+ "*.jpg",
23
+ "*.png",
24
+ "*.py[cod]",
25
+ "*_cache/",
26
+ ".data",
27
+ ".git",
28
+ ".venv",
29
+ ".vscode",
30
+ "__pycache__",
31
+ "recipe.zip",
32
+ "tests",
33
+ "wandb",
34
+ ]
16
35
 
17
36
 
18
37
  def now_as_str() -> str:
@@ -26,32 +45,40 @@ def get_recipe_path(recipe_name: str) -> Path:
26
45
  return path_recipe
27
46
 
28
47
 
29
- def archive_dir(recipe_path: Path, output_path: Optional[Path] = None) -> Path:
48
+ def archive_dir(
49
+ recipe_path: Path,
50
+ output_path: Optional[Path] = None,
51
+ path_ignore_file: Optional[Path] = None,
52
+ ) -> Path:
30
53
  recipe_zip_path = output_path or recipe_path / "recipe.zip"
31
54
  assert recipe_zip_path.suffix == ".zip", "Output path must be a zip file"
32
55
  recipe_zip_path.parent.mkdir(parents=True, exist_ok=True)
33
56
 
34
- click.echo(f"Creating zip archive {recipe_path}")
57
+ path_ignore_file = path_ignore_file or recipe_path / FILENAME_HAFNIAIGNORE
58
+ if not path_ignore_file.exists():
59
+ ignore_specification_lines = DEFAULT_IGNORE_SPECIFICATION
60
+ click.echo(
61
+ f"No '{FILENAME_HAFNIAIGNORE}' was file found. Files are excluded using the default ignore patterns.\n"
62
+ f"\tDefault ignore patterns: {DEFAULT_IGNORE_SPECIFICATION}\n"
63
+ f"Add a '{FILENAME_HAFNIAIGNORE}' file to the root folder to make custom ignore patterns."
64
+ )
65
+ else:
66
+ ignore_specification_lines = Path(path_ignore_file).read_text().splitlines()
67
+ ignore_specification = pathspec.GitIgnoreSpec.from_lines(ignore_specification_lines)
68
+
69
+ include_files = sorted(ignore_specification.match_tree(recipe_path, negate=True))
70
+ click.echo(f"Creating zip archive of '{recipe_path}'")
35
71
  with ZipFile(recipe_zip_path, "w") as zip_ref:
36
- for item in recipe_path.rglob("*"):
37
- should_skip = (
38
- item == recipe_zip_path
39
- or item.name.endswith(".zip")
40
- or any(part.startswith(".") for part in item.parts)
41
- or any(part == "__pycache__" for part in item.parts)
42
- )
43
-
44
- if should_skip:
45
- if item != recipe_zip_path:
46
- click.echo(f"[-] {item.relative_to(recipe_path)}")
72
+ for str_filepath in include_files:
73
+ path_file = recipe_path / str_filepath
74
+ if not path_file.is_file():
47
75
  continue
48
76
 
49
- if not item.is_file():
50
- continue
77
+ relative_path = path_file.relative_to(recipe_path)
78
+ zip_ref.write(path_file, relative_path)
51
79
 
52
- relative_path = item.relative_to(recipe_path)
53
- click.echo(f"[+] {relative_path}")
54
- zip_ref.write(item, relative_path)
80
+ recipe_dir_tree = view_recipe_content(recipe_zip_path)
81
+ click.echo(recipe_dir_tree)
55
82
  return recipe_zip_path
56
83
 
57
84
 
@@ -77,6 +104,31 @@ def safe(func: Callable) -> Callable:
77
104
  return wrapper
78
105
 
79
106
 
107
+ def size_human_readable(size_bytes: int, suffix="B") -> str:
108
+ # From: https://stackoverflow.com/a/1094933
109
+ size_value = float(size_bytes)
110
+ for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"):
111
+ if abs(size_value) < 1024.0:
112
+ return f"{size_value:3.1f}{unit}{suffix}"
113
+ size_value /= 1024.0
114
+ return f"{size_value:.1f}Yi{suffix}"
115
+
116
+
117
+ def view_recipe_content(recipe_path: Path, style: str = "emoji", depth_limit: int = 3) -> str:
118
+ zf = zipfile.ZipFile(recipe_path)
119
+ with tempfile.TemporaryDirectory() as tempdir:
120
+ path_extract_folder = Path(tempdir) / "recipe"
121
+ zf.extractall(path_extract_folder)
122
+ dir_str = seedir.seedir(
123
+ path_extract_folder, sort=True, first="folders", style=style, depthlimit=depth_limit, printout=False
124
+ )
125
+
126
+ size_str = size_human_readable(os.path.getsize(recipe_path))
127
+
128
+ dir_str = dir_str + f"\n\nRecipe size: {size_str}. Max size 800MiB\n"
129
+ return dir_str
130
+
131
+
80
132
  def is_remote_job() -> bool:
81
133
  """Check if the current job is running in HAFNIA cloud environment."""
82
134
  is_remote = os.getenv("HAFNIA_CLOUD", "false").lower() == "true"
@@ -1,18 +1,21 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.1.24
4
- Summary: Python tools for communication with Hafnia platform.
5
- Author-email: Ivan Sahumbaiev <ivsa@milestone.dk>
3
+ Version: 0.1.25
4
+ Summary: Python SDK for communication with Hafnia platform.
5
+ Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.10
8
8
  Requires-Dist: boto3>=1.35.91
9
9
  Requires-Dist: click>=8.1.8
10
10
  Requires-Dist: datasets>=3.2.0
11
+ Requires-Dist: emoji>=2.14.1
11
12
  Requires-Dist: flatten-dict>=0.4.2
13
+ Requires-Dist: pathspec>=0.12.1
12
14
  Requires-Dist: pillow>=11.1.0
13
15
  Requires-Dist: pyarrow>=18.1.0
14
16
  Requires-Dist: pydantic>=2.10.4
15
17
  Requires-Dist: rich>=13.9.4
18
+ Requires-Dist: seedir>=0.5.0
16
19
  Requires-Dist: tqdm>=4.67.1
17
20
  Provides-Extra: torch
18
21
  Requires-Dist: flatten-dict>=0.4.2; extra == 'torch'
@@ -167,31 +170,49 @@ and datasets available in the data library.
167
170
  By combining logging and dataset loading, we can now construct our model training recipe.
168
171
 
169
172
  To demonstrate this, we have provided a recipe project that serves as a template for creating and structuring training recipes
170
- [recipe-classification](https://github.com/Data-insight-Platform/recipe-classification)
173
+ [recipe-classification](https://github.com/milestone-hafnia/recipe-classification)
171
174
 
172
175
  The project also contains additional information on how to structure your training recipe, use the `HafniaLogger`, the `load_dataset` function and different approach for launching
173
176
  the training recipe on the Hafnia platform.
174
177
 
178
+
179
+ ## Create, Build and Run `recipe.zip` locally
180
+ In order to test recipe compatibility with Hafnia cloud use the following command to build and
181
+ start the job locally.
182
+
183
+ ```bash
184
+ # Create 'recipe.zip' from source folder '.'
185
+ hafnia recipe create .
186
+
187
+ # Build the docker image locally from a 'recipe.zip' file
188
+ hafnia runc build-local recipe.zip
189
+
190
+ # Execute the docker image locally with a desired dataset
191
+ hafnia runc launch-local --dataset mnist "python scripts/train.py"
192
+ ```
193
+
175
194
  ## Detailed Documentation
176
195
  For more information, go to our [documentation page](https://hafnia.readme.io/docs/welcome-to-hafnia)
177
196
  or in below markdown pages.
178
197
 
179
198
  - [CLI](docs/cli.md) - Detailed guide for the Hafnia command-line interface
180
- - [Script2Model Documentation](docs/s2m.md) - Detailed guide for script2model
181
199
  - [Release lifecycle](docs/release.md) - Details about package release lifecycle.
182
200
 
183
201
  ## Development
184
202
  For development, we are using an uv based virtual python environment.
185
203
 
186
204
  Install uv
187
-
188
- curl -LsSf https://astral.sh/uv/install.sh | sh
189
-
205
+ ```bash
206
+ curl -LsSf https://astral.sh/uv/install.sh | sh
207
+ ```
190
208
 
191
209
  Install python dependencies including developer (`--dev`) and optional dependencies (`--all-extras`).
192
210
 
193
- uv sync --all-extras --dev
211
+ ```bash
212
+ uv sync --all-extras --dev
213
+ ```
194
214
 
195
215
  Run tests:
196
-
197
- uv run pytest tests
216
+ ```bash
217
+ uv run pytest tests
218
+ ```
@@ -0,0 +1,29 @@
1
+ cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ cli/__main__.py,sha256=8JgZHtFpWAOUlEFvV0YWviLwesWSA-PTYH_v9COl2xw,1786
3
+ cli/config.py,sha256=Js_dCn39l7hLhA3ovHorOyVqj-LCLzUg_figSy4jNjs,5279
4
+ cli/consts.py,sha256=ybpWMhjkrqqevL7eVmYtdn_13a5-bV_5lCpA6_Wzcz0,964
5
+ cli/data_cmds.py,sha256=BQiythAPwAwudgdUa68v50a345uw5flrcDiBHLGp9lo,1460
6
+ cli/experiment_cmds.py,sha256=nJCnI0kzmFJ1_vmxIzOYWk_2eiiw1Ub0j02jXi2vW_s,2239
7
+ cli/profile_cmds.py,sha256=Rg-5wLHSWlZhNPUZBO7LdyJS-Y-SgI6qKLoAac2gSdk,2534
8
+ cli/recipe_cmds.py,sha256=TnUAoO643NeSio8akVUEJHs6Ttuu2JuprxyTPqzzb4k,1592
9
+ cli/runc_cmds.py,sha256=6qvVfjxQ_1nkm7lrrIzYETdnBzfiXrmdnWo4jpbbdPk,4830
10
+ hafnia/__init__.py,sha256=Zphq-cQoX95Z11zm4lkrU-YiAJxddR7IBfwDkxeHoDE,108
11
+ hafnia/http.py,sha256=rID6Krn9wRGXwsJYvpffsFlt5cwxFgkcihYppqtdT-8,2974
12
+ hafnia/log.py,sha256=ii--Q6IThsWOluRp_Br9WGhwBtKChU80BXk5pK_NU5A,819
13
+ hafnia/torch_helpers.py,sha256=P_Jl4IwqUebKVCOXNe6iTorJZA3S-3d92HV274UHIko,7456
14
+ hafnia/utils.py,sha256=jLq2S8n7W4HS7TsXnDgxTze463Mcatd_wC6pd54a7Os,4221
15
+ hafnia/data/__init__.py,sha256=Pntmo_1fst8OhyrHB60jQ8mhJJ4hL38tdjLvt0YXEJo,73
16
+ hafnia/data/factory.py,sha256=scsXrAHlBEP16AJH8RyQ1fyzhei5GxIwsmMgwEru3Pc,2536
17
+ hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
18
+ hafnia/experiment/hafnia_logger.py,sha256=8baV6SUtCVIijypU-FfgAOIyWIf_eeJ5a62oFzQesmc,6794
19
+ hafnia/platform/__init__.py,sha256=Oz1abs40hEKspLg6mVIokdtsp1tZJF9Pndv8uSMOgtQ,522
20
+ hafnia/platform/api.py,sha256=aJvlQGjzqm-D3WYb2xTEYX60YoJoWN_kyYdlkvqt_MI,382
21
+ hafnia/platform/builder.py,sha256=6xLy64a4cytMZEfqiA0kPzxiATEBbHXmDbf7igTMAiM,6595
22
+ hafnia/platform/download.py,sha256=AWnlSYj9FD7GvZ_-9Sw5jrcxi3RyBSSUVph8U9T9ZbQ,4711
23
+ hafnia/platform/executor.py,sha256=8E6cGmEMr5xYb3OReBuWj8ZnVXc0Es0UkfPamsmjH4g,3759
24
+ hafnia/platform/experiment.py,sha256=951ppXdrp075pW2xGFOM0oiGYGE1I53tP9azQjjIUe8,2305
25
+ hafnia-0.1.25.dist-info/METADATA,sha256=Q5dBhUXq-6lgaIVwR2ndWPsF7GFu4m8-G7dIjcW0iug,8660
26
+ hafnia-0.1.25.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
+ hafnia-0.1.25.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
28
+ hafnia-0.1.25.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
29
+ hafnia-0.1.25.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Data-insight-Platform
3
+ Copyright (c) 2025 Milestone Systems A/S
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,28 +0,0 @@
1
- cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- cli/__main__.py,sha256=h1tOAK15us_dkoMd6Yd4SesVPisojTxOXiYxpLZnatw,1736
3
- cli/config.py,sha256=Js_dCn39l7hLhA3ovHorOyVqj-LCLzUg_figSy4jNjs,5279
4
- cli/consts.py,sha256=nd9fPTypqCJYJoOp9QifNgj7-c91J-T0dTv83c33g50,892
5
- cli/data_cmds.py,sha256=FYZPaeTs6WQnTf9b4a56w3SXQ1VAOMF5s8eUDHeNtd4,1538
6
- cli/experiment_cmds.py,sha256=d9M8008Z0Y49KIITy73qd5ViPehgMymDVAgsvZhwye0,3099
7
- cli/profile_cmds.py,sha256=Rg-5wLHSWlZhNPUZBO7LdyJS-Y-SgI6qKLoAac2gSdk,2534
8
- cli/runc_cmds.py,sha256=fNgPNURXmO4nfLyuorcpqWEeNNuTVsRBHxcIA9FCPts,2197
9
- hafnia/__init__.py,sha256=Zphq-cQoX95Z11zm4lkrU-YiAJxddR7IBfwDkxeHoDE,108
10
- hafnia/http.py,sha256=rID6Krn9wRGXwsJYvpffsFlt5cwxFgkcihYppqtdT-8,2974
11
- hafnia/log.py,sha256=ii--Q6IThsWOluRp_Br9WGhwBtKChU80BXk5pK_NU5A,819
12
- hafnia/torch_helpers.py,sha256=P_Jl4IwqUebKVCOXNe6iTorJZA3S-3d92HV274UHIko,7456
13
- hafnia/utils.py,sha256=WWWXZPolzncQmSb4onArT1lJyISV0D22eEejrHWePoc,2425
14
- hafnia/data/__init__.py,sha256=Pntmo_1fst8OhyrHB60jQ8mhJJ4hL38tdjLvt0YXEJo,73
15
- hafnia/data/factory.py,sha256=61oGQsm1naG_6Nd_UY2teOki8Oiu2F-gT-nGocmqAcg,2992
16
- hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
17
- hafnia/experiment/hafnia_logger.py,sha256=bkSfioEaBXthUEGBjMbQaSefU30b_Gu809VcmWy_wyg,6566
18
- hafnia/platform/__init__.py,sha256=Oz1abs40hEKspLg6mVIokdtsp1tZJF9Pndv8uSMOgtQ,522
19
- hafnia/platform/api.py,sha256=aJvlQGjzqm-D3WYb2xTEYX60YoJoWN_kyYdlkvqt_MI,382
20
- hafnia/platform/builder.py,sha256=VqcbOPxC7HqGAqFMb6ewThBZYLEV5RBgQrVuMd2dbLY,6622
21
- hafnia/platform/download.py,sha256=AWnlSYj9FD7GvZ_-9Sw5jrcxi3RyBSSUVph8U9T9ZbQ,4711
22
- hafnia/platform/executor.py,sha256=HA8IF2ZTZ6ZiRWNjVdIdWKiUa5i8Yoz06mIUBDwzVDk,3759
23
- hafnia/platform/experiment.py,sha256=951ppXdrp075pW2xGFOM0oiGYGE1I53tP9azQjjIUe8,2305
24
- hafnia-0.1.24.dist-info/METADATA,sha256=85f5fVCG57auPMtzbF6T0o05smjq20XvZP-6jqNqEYg,8124
25
- hafnia-0.1.24.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
26
- hafnia-0.1.24.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
27
- hafnia-0.1.24.dist-info/licenses/LICENSE,sha256=DqQ3NOAy7Efwppv0IAVXEm3Za2SI_1OuDfG20ab8eQw,1078
28
- hafnia-0.1.24.dist-info/RECORD,,