hafnia 0.4.3__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,147 @@
1
+ import os
2
+ import shutil
3
+ import subprocess
4
+ import sys
5
+ import tempfile
6
+ import uuid
7
+ from pathlib import Path
8
+ from typing import Dict, List, Optional
9
+
10
+ from hafnia.log import sys_logger, user_logger
11
+ from hafnia.utils import progress_bar
12
+
13
+
14
+ def find_s5cmd() -> Optional[str]:
15
+ """Locate the s5cmd executable across different installation methods.
16
+
17
+ Searches for s5cmd in:
18
+ 1. System PATH (via shutil.which)
19
+ 2. Python bin directory (Unix-like systems)
20
+ 3. Python executable directory (direct installs)
21
+
22
+ Returns:
23
+ str: Absolute path to s5cmd executable if found, None otherwise.
24
+ """
25
+ result = shutil.which("s5cmd")
26
+ if result:
27
+ return result
28
+ python_dir = Path(sys.executable).parent
29
+ locations = (python_dir / "Scripts" / "s5cmd.exe", python_dir / "bin" / "s5cmd", python_dir / "s5cmd")
30
+ for loc in locations:
31
+ if loc.exists():
32
+ return str(loc)
33
+ return None
34
+
35
+
36
+ def execute_command(args: List[str], append_envs: Optional[Dict[str, str]] = None) -> subprocess.CompletedProcess:
37
+ s5cmd_bin = find_s5cmd()
38
+ cmds = [s5cmd_bin] + args
39
+ envs = os.environ.copy()
40
+ if append_envs:
41
+ envs.update(append_envs)
42
+
43
+ result = subprocess.run(
44
+ cmds, # type: ignore[arg-type]
45
+ stdout=subprocess.PIPE,
46
+ stderr=subprocess.PIPE,
47
+ universal_newlines=True,
48
+ env=envs,
49
+ )
50
+ return result
51
+
52
+
53
+ def execute_commands(
54
+ commands: List[str],
55
+ append_envs: Optional[Dict[str, str]] = None,
56
+ description: str = "Executing s5cmd commands",
57
+ ) -> List[str]:
58
+ append_envs = append_envs or {}
59
+
60
+ with tempfile.TemporaryDirectory() as temp_dir:
61
+ tmp_file_path = Path(temp_dir, f"{uuid.uuid4().hex}.txt")
62
+ tmp_file_path.write_text("\n".join(commands))
63
+
64
+ s5cmd_bin = find_s5cmd()
65
+ if s5cmd_bin is None:
66
+ raise ValueError("Can not find s5cmd executable.")
67
+ run_cmds = [s5cmd_bin, "run", str(tmp_file_path)]
68
+ sys_logger.debug(run_cmds)
69
+ envs = os.environ.copy()
70
+ envs.update(append_envs)
71
+
72
+ process = subprocess.Popen(
73
+ run_cmds,
74
+ stdout=subprocess.PIPE,
75
+ stderr=subprocess.STDOUT,
76
+ universal_newlines=True,
77
+ env=envs,
78
+ )
79
+
80
+ error_lines = []
81
+ lines = []
82
+ for line in progress_bar(process.stdout, total=len(commands), description=description): # type: ignore[arg-type]
83
+ if "ERROR" in line or "error" in line:
84
+ error_lines.append(line.strip())
85
+ lines.append(line.strip())
86
+
87
+ if len(error_lines) > 0:
88
+ show_n_lines = min(5, len(error_lines))
89
+ str_error_lines = "\n".join(error_lines[:show_n_lines])
90
+ user_logger.error(
91
+ f"Detected {len(error_lines)} errors occurred while executing a total of {len(commands)} "
92
+ f" commands with s5cmd. The first {show_n_lines} is printed below:\n{str_error_lines}"
93
+ )
94
+ raise RuntimeError("Errors occurred during s5cmd execution.")
95
+ return lines
96
+
97
+
98
+ def delete_bucket_content(
99
+ bucket_prefix: str,
100
+ remove_bucket: bool = True,
101
+ append_envs: Optional[Dict[str, str]] = None,
102
+ ) -> None:
103
+ # Remove all files in the bucket
104
+ returns = execute_command(["rm", f"{bucket_prefix}/*"], append_envs=append_envs)
105
+
106
+ if returns.returncode != 0:
107
+ bucket_is_already_deleted = "no object found" in returns.stderr.strip()
108
+ if bucket_is_already_deleted:
109
+ user_logger.info(f"No action was taken. S3 bucket '{bucket_prefix}' is already empty.")
110
+ else:
111
+ user_logger.error("Error during s5cmd rm command:")
112
+ user_logger.error(returns.stdout)
113
+ raise RuntimeError(f"Failed to delete all files in S3 bucket '{bucket_prefix}'.")
114
+
115
+ if remove_bucket:
116
+ # Remove the bucket itself
117
+ returns = execute_command(["rb", bucket_prefix], append_envs=append_envs)
118
+ if returns.returncode != 0:
119
+ user_logger.error("Error during s5cmd rb command:")
120
+ user_logger.error(returns.stdout)
121
+ raise RuntimeError(f"Failed to delete S3 bucket '{bucket_prefix}'.")
122
+ user_logger.info(f"S3 bucket '{bucket_prefix}' has been deleted.")
123
+
124
+
125
+ def list_bucket(bucket_prefix: str, append_envs: Optional[Dict[str, str]] = None) -> List[str]:
126
+ output = execute_command(["ls", f"{bucket_prefix}/*"], append_envs=append_envs)
127
+ has_missing_folder = "no object found" in output.stderr.strip()
128
+ if output.returncode != 0 and not has_missing_folder:
129
+ user_logger.error("Error during s5cmd ls command:")
130
+ user_logger.error(output.stderr)
131
+ raise RuntimeError(f"Failed to list dataset in S3 bucket '{bucket_prefix}'.")
132
+
133
+ files_in_s3 = [f"{bucket_prefix}/{line.split(' ')[-1]}" for line in output.stdout.splitlines()]
134
+ return files_in_s3
135
+
136
+
137
+ def fast_copy_files(
138
+ src_paths: List[str],
139
+ dst_paths: List[str],
140
+ append_envs: Optional[Dict[str, str]] = None,
141
+ description: str = "Copying files",
142
+ ) -> List[str]:
143
+ if len(src_paths) != len(dst_paths):
144
+ raise ValueError("Source and destination paths must have the same length.")
145
+ cmds = [f"cp {src} {dst}" for src, dst in zip(src_paths, dst_paths)]
146
+ lines = execute_commands(cmds, append_envs=append_envs, description=description)
147
+ return lines
hafnia/utils.py CHANGED
@@ -65,6 +65,10 @@ def timed(label: str):
65
65
  return decorator
66
66
 
67
67
 
68
+ def get_path_dataset_gallery_images(dataset_name: str) -> Path:
69
+ return PATH_DATASETS / dataset_name / "gallery_images"
70
+
71
+
68
72
  def get_path_hafnia_cache() -> Path:
69
73
  return Path.home() / "hafnia"
70
74
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.4.3
3
+ Version: 0.5.0
4
4
  Summary: Python SDK for communication with Hafnia platform.
5
5
  Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
@@ -10,7 +10,7 @@ Requires-Dist: click>=8.1.8
10
10
  Requires-Dist: emoji>=2.14.1
11
11
  Requires-Dist: flatten-dict>=0.4.2
12
12
  Requires-Dist: keyring>=25.6.0
13
- Requires-Dist: mcp==1.16.0
13
+ Requires-Dist: mcp>=1.16.0
14
14
  Requires-Dist: mlflow>=3.4.0
15
15
  Requires-Dist: more-itertools>=10.7.0
16
16
  Requires-Dist: opencv-python-headless>=4.11.0.86
@@ -209,7 +209,7 @@ DatasetInfo(
209
209
  ```
210
210
 
211
211
  You can iterate and access samples in the dataset using the `HafniaDataset` object.
212
- Each sample contain image and annotations information.
212
+ Each sample contain image and annotations information.
213
213
 
214
214
  ```python
215
215
  from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
@@ -1,15 +1,15 @@
1
1
  hafnia/__init__.py,sha256=0qpjWfVbcfKzLSnfUW6RdclSGkesMQRFS-n_aTJJoSE,179
2
- hafnia/http.py,sha256=bjXbV_3uKbBdudqMdYtnsMttUAsNRMsetYZ4F2xXlEs,3635
2
+ hafnia/http.py,sha256=PkEuanlUKeERABXttaGAJT6hOZ1_B2CwJodbUV4uZdg,3710
3
3
  hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
4
4
  hafnia/torch_helpers.py,sha256=Qj8pV5P8tGw6F3W2Rj9Kel7O8hWuUqiKfTdXd3h5UOo,14080
5
- hafnia/utils.py,sha256=AC4MgF-xkaVrVwKppQ8yZMlTgR3O9Q9ZGeG9sMqd1as,8578
5
+ hafnia/utils.py,sha256=l_awkrb3OttxqSMkPiYcpuP3c_kejkSmiqndSahc1s0,8703
6
6
  hafnia/data/__init__.py,sha256=o9QjiGbEcNa6r-qDmwwmxPXf-1UitNl5-WxFNcujqsg,111
7
7
  hafnia/data/factory.py,sha256=kHkvOtBUbwaShZBGf1kZzocDJBn_1dHHLrQxnUpJmfY,778
8
- hafnia/dataset/dataset_details_uploader.py,sha256=I4hh4-hb8Agpv-o6qekx43dxZ_Lfngbqo6RX1D2DNbU,24854
9
- hafnia/dataset/dataset_helpers.py,sha256=0GbS6PfaiYBulDKRCbd0miN5LHaUIp-XzGt_wZay8xs,5044
10
- hafnia/dataset/dataset_names.py,sha256=5yt5fcotl-uc9aFM4Fp-Z-oNN0N1p9OY3tJ1ujSr1FQ,6601
11
- hafnia/dataset/hafnia_dataset.py,sha256=8RTMolUGgPen-F8IA0kDmxCGl4J5t-eendFZNfSFnsQ,29640
12
- hafnia/dataset/hafnia_dataset_types.py,sha256=aDRdDCBzHlkuY4t1qnCRLP8x_yGF2fCtpIQYmtDIJfA,21193
8
+ hafnia/dataset/dataset_details_uploader.py,sha256=H_zz67bBwbgo4StUwBNmH89WlqydIc-tEQbrRnZDwgg,24161
9
+ hafnia/dataset/dataset_helpers.py,sha256=SwQnFy0T0dGVRuBeT7tG_CHsb9WtW8v8mTg9TnQH2k4,4093
10
+ hafnia/dataset/dataset_names.py,sha256=qYbrsqeKiTsu53w7uWDXdw0Y8cHDEHydoalakChKDcY,7413
11
+ hafnia/dataset/hafnia_dataset.py,sha256=YzQjFAY3m5MJvKyHELWdS1mn9UY4xmGmnSRAS8nQRbY,34037
12
+ hafnia/dataset/hafnia_dataset_types.py,sha256=B0yW7wI6Bf0ILw0HS3LeUu_w0NKJyTZhHvKXT_98q_A,21415
13
13
  hafnia/dataset/license_types.py,sha256=b1Jt5e8N89sujIs4T9y39sJEkzpAwCoLDTHDTpkiEOI,2166
14
14
  hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=CkU61k_qaPSu_4Yo-NArr7jHWP417LCmzDKnmzZuDeo,19080
15
15
  hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=j3Oiytt3LI2rCaJid7Y44oT9MXvlZVqvZanngMebIWg,3088
@@ -18,10 +18,11 @@ hafnia/dataset/format_conversions/format_coco.py,sha256=7GjeF016ZBaKxu-VYiqXxuPw
18
18
  hafnia/dataset/format_conversions/format_helpers.py,sha256=-lNgn_mrpVM_Xwb3jHH0BlBEPCMKjLqTeYf4PbssbuQ,1144
19
19
  hafnia/dataset/format_conversions/format_image_classification_folder.py,sha256=ALVsQfSvBfAkpmUDijznqqd2JBh0Qtnvzq7igxMjMb8,7170
20
20
  hafnia/dataset/format_conversions/format_yolo.py,sha256=zvCHo2L_0mPJScMbDtwvZUts9UX2ERKhhYbY31Q6tQA,9912
21
- hafnia/dataset/format_conversions/torchvision_datasets.py,sha256=EmdTKNxrPT-qE8u7ZzNi9TxdwEBdXRphVliKTWgN0B4,12007
21
+ hafnia/dataset/format_conversions/torchvision_datasets.py,sha256=1H4AMaOYRe6xAU-Pjv7oB7djpis6W6qmzYdHg_hxwKg,12096
22
+ hafnia/dataset/operations/dataset_s3_storage.py,sha256=kIQBK3yc8AqD1kDjWJi8m-ujOvZ5xvgsT93RHUpuByA,8864
22
23
  hafnia/dataset/operations/dataset_stats.py,sha256=Ltf-V4_o_IB4UXw9WG9bsVoqeX90yGsjivK0CDggriw,11930
23
24
  hafnia/dataset/operations/dataset_transformations.py,sha256=qUNno0rAT1A452uzlR-k1WbatyY9VuMp1QJjkMg9GzE,19495
24
- hafnia/dataset/operations/table_transformations.py,sha256=l90CLGNZZVhedcEH5r4T3f7OyCprj-2vNKeYXsmWku0,14370
25
+ hafnia/dataset/operations/table_transformations.py,sha256=odcs6e7L-WmkpzkZcF7aRLXIzaXElQX6raFX8FfyZEk,14397
25
26
  hafnia/dataset/primitives/__init__.py,sha256=xFLJ3R7gpbuQnNJuFhuu836L3nicwoaY5aHkqk7Bbr8,927
26
27
  hafnia/dataset/primitives/bbox.py,sha256=QJJBebltOd9J3idisp3QdX0gCgz6P5xlIlGbth19fG0,6669
27
28
  hafnia/dataset/primitives/bitmask.py,sha256=Q7RiNYvMDlcFPkXAWXDJkCIERjnUTCrHu6VeEPX1jEA,7212
@@ -36,9 +37,10 @@ hafnia/experiment/hafnia_logger.py,sha256=BHIOLAds_3JxT0cev_ikUH0XQVIxBJTkcBSx2Q
36
37
  hafnia/platform/__init__.py,sha256=L_Q7CNpsJ0HMNPy_rLlLK5RhmuCU7IF4BchxKv6amYc,782
37
38
  hafnia/platform/builder.py,sha256=kUEuj5-qtL1uk5v2tUvOCREn5yV-G4Fr6F31haIAb5E,5808
38
39
  hafnia/platform/dataset_recipe.py,sha256=ybfSSHVPG0eFUbzg_1McezPSOtMoDZEg7l6rFYndtb4,3857
39
- hafnia/platform/datasets.py,sha256=RX4XqJ4GRYw4CvrF8-87KWPVBJbg97w8JAUqkWR5F_g,9022
40
+ hafnia/platform/datasets.py,sha256=gfFHltZ70BJ3XeqxGvDMn06B1hmfhS-9bO149qtJruA,13079
40
41
  hafnia/platform/download.py,sha256=A6McEvRw5KmEAn3kx3nNnjxQm1Z8ZSAhn_NFgrM66_8,5020
41
42
  hafnia/platform/experiment.py,sha256=SrEH0nuwwBXf1Iu4diB1BEPqL-TxW3aQkZWBbM1-tY0,1846
43
+ hafnia/platform/s5cmd_utils.py,sha256=clI_niKjya0E5d8Jmgvfj86jiuxMD9uK83znWKN6tYY,5335
42
44
  hafnia/platform/trainer_package.py,sha256=w6JC7o-279ujcwtNTbUaQ9AnPcYRPPbD8EACa6XyUHA,2206
43
45
  hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
44
46
  hafnia/visualizations/image_visualizations.py,sha256=rB7c-KK-qq0BsSdkaFxCAHOOCTXTUQx0VMEhib7ig0k,7509
@@ -46,15 +48,15 @@ hafnia_cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
48
  hafnia_cli/__main__.py,sha256=CqD_7RfbfwB6MED3WZ8WxclrFxWcRMtZ5A1Snnst3GM,1761
47
49
  hafnia_cli/config.py,sha256=X0dJBYH-7mNAFkHgeZvDbawlQqoyCwoY4C-QhlyYCA0,7695
48
50
  hafnia_cli/consts.py,sha256=uCpYX44NCu_Zvte0QwChunxOo-qqhcaJRSYDAIsoJ8A,972
49
- hafnia_cli/dataset_cmds.py,sha256=fzw8LVn1mR319qorxK7HJ6Bs9lM9GdpMBkA6tAqssIg,1432
51
+ hafnia_cli/dataset_cmds.py,sha256=gxUgTUMzZYEvwUdEDHTuTGFge7pqQPkXPKg20v4GGNU,1948
50
52
  hafnia_cli/dataset_recipe_cmds.py,sha256=OYSmpKL0Wxo1ZSxIGfH6w7pEWoI7CjUTmfIELJSZjGQ,2894
51
53
  hafnia_cli/experiment_cmds.py,sha256=_KxsMhbjlkIno1PIMXJ0Omw_PSJi8qi9hmtCUqwcj1M,7970
52
54
  hafnia_cli/keychain.py,sha256=bNyjjULVQu7kV338wUC65UvbCwmSGOmEjKWPLIQjT0k,2555
53
- hafnia_cli/profile_cmds.py,sha256=1SJGMQM9tBwvS3prSehw6CgHW9cmrO2EZ7CAsH4f7qU,3477
55
+ hafnia_cli/profile_cmds.py,sha256=yTyOsPsUssLCzFIxURkxbKrFEhYIVDlUC0G2s5Uks-U,3476
54
56
  hafnia_cli/runc_cmds.py,sha256=38SGajsizpAydawCWL6gwza9NtLugtHfCdnldllwWJI,5016
55
57
  hafnia_cli/trainer_package_cmds.py,sha256=hUBc6gCMV28fcAA0xQdXKL1z-a3aL9lMWcVqjvHO1Uo,2326
56
- hafnia-0.4.3.dist-info/METADATA,sha256=VO_uuOnjeKRtV9yuyUKtILlZV6Oi8TqO1BwhcIXk5xg,19273
57
- hafnia-0.4.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
58
- hafnia-0.4.3.dist-info/entry_points.txt,sha256=j2jsj1pqajLAiSOnF7sq66A3d1SVeHPKVTVyIFzipSA,52
59
- hafnia-0.4.3.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
60
- hafnia-0.4.3.dist-info/RECORD,,
58
+ hafnia-0.5.0.dist-info/METADATA,sha256=VRy-2ThTt7kczs4uPFU_qeKgiuye1JZ2Pv8VoN1H1e4,19272
59
+ hafnia-0.5.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
60
+ hafnia-0.5.0.dist-info/entry_points.txt,sha256=j2jsj1pqajLAiSOnF7sq66A3d1SVeHPKVTVyIFzipSA,52
61
+ hafnia-0.5.0.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
62
+ hafnia-0.5.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -50,3 +50,21 @@ def cmd_dataset_download(cfg: Config, dataset_name: str, destination: Optional[c
50
50
  except Exception:
51
51
  raise click.ClickException(consts.ERROR_GET_RESOURCE)
52
52
  return path_dataset
53
+
54
+
55
+ @dataset.command("delete")
56
+ @click.argument("dataset_name")
57
+ @click.option(
58
+ "--interactive/--no-interactive",
59
+ default=True,
60
+ help="Whether to ask for confirmation before deleting the dataset.",
61
+ )
62
+ @click.pass_obj
63
+ def cmd_dataset_delete(cfg: Config, dataset_name: str, interactive: bool) -> None:
64
+ """Delete dataset from Hafnia platform"""
65
+ from hafnia.platform import datasets
66
+
67
+ datasets.delete_dataset_completely_by_name(
68
+ dataset_name=dataset_name,
69
+ interactive=interactive,
70
+ )
@@ -94,7 +94,6 @@ def cmd_profile_active(cfg: Config) -> None:
94
94
  def profile_show(cfg: Config) -> None:
95
95
  masked_key = f"{cfg.api_key[:11]}...{cfg.api_key[-4:]}" if len(cfg.api_key) > 20 else "****"
96
96
  console = Console()
97
-
98
97
  table = Table(title=f"{consts.PROFILE_TABLE_HEADER} {cfg.active_profile}", show_header=False)
99
98
  table.add_column("Property", style="cyan")
100
99
  table.add_column("Value")