hafnia 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import os
4
3
  import shutil
5
4
  from dataclasses import dataclass
6
5
  from pathlib import Path
@@ -182,9 +181,8 @@ class HafniaDataset:
182
181
  table = read_table_from_path(path_folder)
183
182
 
184
183
  # Convert from relative paths to absolute paths
185
- table = table.with_columns(
186
- pl.concat_str([pl.lit(str(path_folder.absolute()) + os.sep), pl.col("file_name")]).alias("file_name")
187
- )
184
+ dataset_root = path_folder.absolute().as_posix() + "/"
185
+ table = table.with_columns((dataset_root + pl.col("file_name")).alias("file_name"))
188
186
  if check_for_images:
189
187
  check_image_paths(table)
190
188
  return HafniaDataset(samples=table, info=dataset_info)
@@ -2,6 +2,7 @@ import os
2
2
  import shutil
3
3
  import subprocess
4
4
  import tempfile
5
+ import uuid
5
6
  from pathlib import Path
6
7
  from typing import Any, Dict, List, Optional
7
8
 
@@ -80,7 +81,7 @@ def download_dataset_from_access_endpoint(
80
81
  ) -> None:
81
82
  resource_credentials = get_resource_credentials(endpoint, api_key)
82
83
 
83
- local_dataset_paths = [str(path_dataset / filename) for filename in DATASET_FILENAMES_REQUIRED]
84
+ local_dataset_paths = [(path_dataset / filename).as_posix() for filename in DATASET_FILENAMES_REQUIRED]
84
85
  s3_uri = resource_credentials.s3_uri()
85
86
  s3_dataset_files = [f"{s3_uri}/{filename}" for filename in DATASET_FILENAMES_REQUIRED]
86
87
 
@@ -94,7 +95,6 @@ def download_dataset_from_access_endpoint(
94
95
 
95
96
  if not download_files:
96
97
  return
97
-
98
98
  dataset = HafniaDataset.from_path(path_dataset, check_for_images=False)
99
99
  fast_copy_files_s3(
100
100
  src_paths=dataset.samples[ColumnName.REMOTE_PATH].to_list(),
@@ -124,8 +124,10 @@ def execute_s5cmd_commands(
124
124
  description: str = "Executing s5cmd commands",
125
125
  ) -> List[str]:
126
126
  append_envs = append_envs or {}
127
- with tempfile.NamedTemporaryFile(suffix=".txt") as tmp_file:
128
- tmp_file_path = Path(tmp_file.name)
127
+ # In Windows default "Temp" directory can not be deleted that is why we need to create a
128
+ # temporary directory.
129
+ with tempfile.TemporaryDirectory() as temp_dir:
130
+ tmp_file_path = Path(temp_dir, f"{uuid.uuid4().hex}.txt")
129
131
  tmp_file_path.write_text("\n".join(commands))
130
132
  run_cmds = [
131
133
  "s5cmd",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Python SDK for communication with Hafnia platform.
5
5
  Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
@@ -8,7 +8,6 @@ cli/profile_cmds.py,sha256=-HQcFgYI6Rqaefi0Nj-91KhiqPKUj7zOaiJWbHx_bac,3196
8
8
  cli/recipe_cmds.py,sha256=qnMfF-te47HXNkgyA0hm9X3etDQsqMnrVEGDCrzVjZU,1462
9
9
  cli/runc_cmds.py,sha256=QqhQe2sd7tK1Bl2aGfIWRyJjpP6F7Tducg7HULrHsZ4,4958
10
10
  hafnia/__init__.py,sha256=Zphq-cQoX95Z11zm4lkrU-YiAJxddR7IBfwDkxeHoDE,108
11
- hafnia/helper_testing.py,sha256=GnaNhXdY81arjCT9M2RUAmvn2-aIzRqlCtbWwGbOIaY,3901
12
11
  hafnia/http.py,sha256=HoPB03IL6e-nglTrw1NGT6sDx1T8VNas5HjTT1QZHnU,3035
13
12
  hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
14
13
  hafnia/torch_helpers.py,sha256=ho65B0WIu_SjbaKPRL4wabDNrnVumWH8QSXVH4r7NAY,11605
@@ -18,7 +17,7 @@ hafnia/data/factory.py,sha256=OY6l6c9UKk6OUDhG4Akb2VgcSaTRLHlbSndAe1HuW2U,813
18
17
  hafnia/dataset/dataset_helpers.py,sha256=WVCpbUfNbHy7MZJqJ3OyJF8k1hSObo3kScxpXT17Sj8,3510
19
18
  hafnia/dataset/dataset_names.py,sha256=mp7A_TOqgoqHUEBCPC4ReKNJ93cxwQB451owoCqD6yM,2120
20
19
  hafnia/dataset/dataset_upload_helper.py,sha256=D1BGaeEar4McpUvXj4Yy8nk1tr12IEVhP_Ma47OoWmU,21150
21
- hafnia/dataset/hafnia_dataset.py,sha256=4SJUq7pAqLkcFzgnOUUx8ERraE_sABctOAsONBJExME,27664
20
+ hafnia/dataset/hafnia_dataset.py,sha256=fVAGfkab1g1JQOA85r2y53cD4xX5LiKN8DEThOj2m28,27649
22
21
  hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=DbPLlmshF6DC98Cwko04XtBaXgSg966LZKR6JXD_9Sg,13632
23
22
  hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=wh1y2XyX0PwOwfuzJ3_17KKng2Rk0zLlgdfSHfS1SyM,1305
24
23
  hafnia/dataset/dataset_recipe/recipe_types.py,sha256=6LxfanhX9ihof1gGSonoC-56zSWsI8k2aS4Uw_QgXoM,5176
@@ -38,13 +37,13 @@ hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M
38
37
  hafnia/experiment/hafnia_logger.py,sha256=dnV3VPzJK7DSeUh0g4Hk9w1g-eSXcVqJD9If0h2d2GE,6885
39
38
  hafnia/platform/__init__.py,sha256=zJsR6Hy_0iUcC9xL-lBnqR0mLfF4EUr_VXa_XQA7SlA,455
40
39
  hafnia/platform/builder.py,sha256=_g8ykQWETz5Y4Np9QU1a6wIzbbJwXCkbiOCA6JcF5Rc,5742
41
- hafnia/platform/datasets.py,sha256=J252hrejrBWUdS6hY4lRc9_SbYy7CMD92068lLHjPC8,6953
40
+ hafnia/platform/datasets.py,sha256=aMh4kNPi0cQ5xN3ffu1dJAyH2SxzDJK70P19e-CLKa8,7101
42
41
  hafnia/platform/download.py,sha256=oJzdxSIDTuw1an7maC6I7A5nZvDaZPhUkuAmyRwN9Kc,6843
43
42
  hafnia/platform/experiment.py,sha256=-nAfTmn1c8sE6pHDCTNZvWDTopkXndarJAPIGvsnk60,2389
44
43
  hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
45
44
  hafnia/visualizations/image_visualizations.py,sha256=RuFFj2fJCm9dxl2Lq0MumJHF81ZnX-IsDsTxm8ZFV9A,7313
46
- hafnia-0.2.1.dist-info/METADATA,sha256=A1_OEYNslARBFGoYBPm7_-3YivfUeA8adwUUbsM3UsY,19040
47
- hafnia-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
48
- hafnia-0.2.1.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
49
- hafnia-0.2.1.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
50
- hafnia-0.2.1.dist-info/RECORD,,
45
+ hafnia-0.2.2.dist-info/METADATA,sha256=_4Ghr-Z_WXv1hsUkU3ODff-TxfacJYf22n__-YJ_xW4,19040
46
+ hafnia-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
47
+ hafnia-0.2.2.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
48
+ hafnia-0.2.2.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
49
+ hafnia-0.2.2.dist-info/RECORD,,
hafnia/helper_testing.py DELETED
@@ -1,108 +0,0 @@
1
- from inspect import getmembers, isfunction, signature
2
- from pathlib import Path
3
- from types import FunctionType
4
- from typing import Any, Callable, Dict, Union, get_origin
5
-
6
- from hafnia import utils
7
- from hafnia.dataset.dataset_names import FILENAME_ANNOTATIONS_JSONL, DatasetVariant
8
- from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
9
-
10
- MICRO_DATASETS = {
11
- "tiny-dataset": utils.PATH_DATASETS / "tiny-dataset",
12
- "coco-2017": utils.PATH_DATASETS / "coco-2017",
13
- }
14
-
15
-
16
- def get_path_workspace() -> Path:
17
- return Path(__file__).parents[2]
18
-
19
-
20
- def get_path_expected_images() -> Path:
21
- return get_path_workspace() / "tests" / "data" / "expected_images"
22
-
23
-
24
- def get_path_test_data() -> Path:
25
- return get_path_workspace() / "tests" / "data"
26
-
27
-
28
- def get_path_micro_hafnia_dataset_no_check() -> Path:
29
- return get_path_test_data() / "micro_test_datasets"
30
-
31
-
32
- def get_path_micro_hafnia_dataset(dataset_name: str, force_update=False) -> Path:
33
- import pytest
34
-
35
- if dataset_name not in MICRO_DATASETS:
36
- raise ValueError(f"Dataset name '{dataset_name}' is not recognized. Available options: {list(MICRO_DATASETS)}")
37
- path_dataset = MICRO_DATASETS[dataset_name]
38
-
39
- path_test_dataset = get_path_micro_hafnia_dataset_no_check() / dataset_name
40
- path_test_dataset_annotations = path_test_dataset / FILENAME_ANNOTATIONS_JSONL
41
- if path_test_dataset_annotations.exists() and not force_update:
42
- return path_test_dataset
43
-
44
- hafnia_dataset = HafniaDataset.from_path(path_dataset / DatasetVariant.SAMPLE.value)
45
- hafnia_dataset = hafnia_dataset.select_samples(n_samples=3, seed=42)
46
- hafnia_dataset.write(path_test_dataset)
47
-
48
- if force_update:
49
- pytest.fail(
50
- "Sample image and metadata have been updated using 'force_update=True'. Set 'force_update=False' and rerun the test."
51
- )
52
- pytest.fail("Missing test sample image. Please rerun the test.")
53
- return path_test_dataset
54
-
55
-
56
- def get_sample_micro_hafnia_dataset(dataset_name: str, force_update=False) -> Sample:
57
- micro_dataset = get_micro_hafnia_dataset(dataset_name=dataset_name, force_update=force_update)
58
- sample_dict = micro_dataset[0]
59
- sample = Sample(**sample_dict)
60
- return sample
61
-
62
-
63
- def get_micro_hafnia_dataset(dataset_name: str, force_update: bool = False) -> HafniaDataset:
64
- path_dataset = get_path_micro_hafnia_dataset(dataset_name=dataset_name, force_update=force_update)
65
- hafnia_dataset = HafniaDataset.from_path(path_dataset)
66
- return hafnia_dataset
67
-
68
-
69
- def is_hafnia_configured() -> bool:
70
- """
71
- Check if Hafnia is configured by verifying if the API key is set.
72
- """
73
- from cli.config import Config
74
-
75
- return Config().is_configured()
76
-
77
-
78
- def is_typing_type(annotation: Any) -> bool:
79
- return get_origin(annotation) is not None
80
-
81
-
82
- def annotation_as_string(annotation: Union[type, str]) -> str:
83
- """Convert type annotation to string."""
84
- if isinstance(annotation, str):
85
- return annotation.replace("'", "")
86
- if is_typing_type(annotation): # Is using typing types like List, Dict, etc.
87
- return str(annotation).replace("typing.", "")
88
- if hasattr(annotation, "__name__"):
89
- return annotation.__name__
90
- return str(annotation)
91
-
92
-
93
- def get_hafnia_functions_from_module(python_module) -> Dict[str, FunctionType]:
94
- def dataset_is_first_arg(func: Callable) -> bool:
95
- """
96
- Check if the function has 'HafniaDataset' as the first parameter.
97
- """
98
- func_signature = signature(func)
99
- params = func_signature.parameters
100
- if len(params) == 0:
101
- return False
102
- first_argument_type = list(params.values())[0]
103
-
104
- annotation_as_str = annotation_as_string(first_argument_type.annotation)
105
- return annotation_as_str == "HafniaDataset"
106
-
107
- functions = {func[0]: func[1] for func in getmembers(python_module, isfunction) if dataset_is_first_arg(func[1])}
108
- return functions
File without changes