hafnia 0.3.0__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. cli/__main__.py +3 -1
  2. cli/config.py +43 -3
  3. cli/keychain.py +88 -0
  4. cli/profile_cmds.py +5 -2
  5. hafnia/__init__.py +1 -1
  6. hafnia/dataset/dataset_helpers.py +9 -2
  7. hafnia/dataset/dataset_names.py +130 -16
  8. hafnia/dataset/dataset_recipe/dataset_recipe.py +49 -37
  9. hafnia/dataset/dataset_recipe/recipe_transforms.py +18 -2
  10. hafnia/dataset/dataset_upload_helper.py +83 -22
  11. hafnia/dataset/format_conversions/format_image_classification_folder.py +110 -0
  12. hafnia/dataset/format_conversions/format_yolo.py +164 -0
  13. hafnia/dataset/format_conversions/torchvision_datasets.py +287 -0
  14. hafnia/dataset/hafnia_dataset.py +396 -96
  15. hafnia/dataset/operations/dataset_stats.py +84 -73
  16. hafnia/dataset/operations/dataset_transformations.py +116 -47
  17. hafnia/dataset/operations/table_transformations.py +135 -17
  18. hafnia/dataset/primitives/bbox.py +25 -14
  19. hafnia/dataset/primitives/bitmask.py +22 -15
  20. hafnia/dataset/primitives/classification.py +16 -8
  21. hafnia/dataset/primitives/point.py +7 -3
  22. hafnia/dataset/primitives/polygon.py +15 -10
  23. hafnia/dataset/primitives/primitive.py +1 -1
  24. hafnia/dataset/primitives/segmentation.py +12 -9
  25. hafnia/experiment/hafnia_logger.py +0 -9
  26. hafnia/platform/dataset_recipe.py +7 -2
  27. hafnia/platform/datasets.py +5 -9
  28. hafnia/platform/download.py +24 -90
  29. hafnia/torch_helpers.py +12 -12
  30. hafnia/utils.py +17 -0
  31. hafnia/visualizations/image_visualizations.py +3 -1
  32. {hafnia-0.3.0.dist-info → hafnia-0.4.1.dist-info}/METADATA +11 -9
  33. hafnia-0.4.1.dist-info/RECORD +57 -0
  34. hafnia-0.3.0.dist-info/RECORD +0 -53
  35. {hafnia-0.3.0.dist-info → hafnia-0.4.1.dist-info}/WHEEL +0 -0
  36. {hafnia-0.3.0.dist-info → hafnia-0.4.1.dist-info}/entry_points.txt +0 -0
  37. {hafnia-0.3.0.dist-info → hafnia-0.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,85 +1,14 @@
1
1
  from pathlib import Path
2
- from typing import Dict
2
+ from typing import Dict, Optional
3
3
 
4
4
  import boto3
5
5
  from botocore.exceptions import ClientError
6
- from pydantic import BaseModel, field_validator
7
- from tqdm import tqdm
6
+ from rich.progress import Progress
8
7
 
8
+ from hafnia.dataset.dataset_names import ResourceCredentials
9
9
  from hafnia.http import fetch
10
10
  from hafnia.log import sys_logger, user_logger
11
11
 
12
- ARN_PREFIX = "arn:aws:s3:::"
13
-
14
-
15
- class ResourceCredentials(BaseModel):
16
- access_key: str
17
- secret_key: str
18
- session_token: str
19
- s3_arn: str
20
- region: str
21
-
22
- @staticmethod
23
- def fix_naming(payload: Dict[str, str]) -> "ResourceCredentials":
24
- """
25
- The endpoint returns a payload with a key called 's3_path', but it
26
- is actually an ARN path (starts with arn:aws:s3::). This method renames it to 's3_arn' for consistency.
27
- """
28
- if "s3_path" in payload and payload["s3_path"].startswith(ARN_PREFIX):
29
- payload["s3_arn"] = payload.pop("s3_path")
30
-
31
- if "region" not in payload:
32
- payload["region"] = "eu-west-1"
33
- return ResourceCredentials(**payload)
34
-
35
- @field_validator("s3_arn")
36
- @classmethod
37
- def validate_s3_arn(cls, value: str) -> str:
38
- """Validate s3_arn to ensure it starts with 'arn:aws:s3:::'"""
39
- if not value.startswith("arn:aws:s3:::"):
40
- raise ValueError(f"Invalid S3 ARN: {value}. It should start with 'arn:aws:s3:::'")
41
- return value
42
-
43
- def s3_path(self) -> str:
44
- """
45
- Extracts the S3 path from the ARN.
46
- Example: arn:aws:s3:::my-bucket/my-prefix -> my-bucket/my-prefix
47
- """
48
- return self.s3_arn[len(ARN_PREFIX) :]
49
-
50
- def s3_uri(self) -> str:
51
- """
52
- Converts the S3 ARN to a URI format.
53
- Example: arn:aws:s3:::my-bucket/my-prefix -> s3://my-bucket/my-prefix
54
- """
55
- return f"s3://{self.s3_path()}"
56
-
57
- def bucket_name(self) -> str:
58
- """
59
- Extracts the bucket name from the S3 ARN.
60
- Example: arn:aws:s3:::my-bucket/my-prefix -> my-bucket
61
- """
62
- return self.s3_path().split("/")[0]
63
-
64
- def object_key(self) -> str:
65
- """
66
- Extracts the object key from the S3 ARN.
67
- Example: arn:aws:s3:::my-bucket/my-prefix -> my-prefix
68
- """
69
- return "/".join(self.s3_path().split("/")[1:])
70
-
71
- def aws_credentials(self) -> Dict[str, str]:
72
- """
73
- Returns the AWS credentials as a dictionary.
74
- """
75
- environment_vars = {
76
- "AWS_ACCESS_KEY_ID": self.access_key,
77
- "AWS_SECRET_ACCESS_KEY": self.secret_key,
78
- "AWS_SESSION_TOKEN": self.session_token,
79
- "AWS_REGION": self.region,
80
- }
81
- return environment_vars
82
-
83
12
 
84
13
  def get_resource_credentials(endpoint: str, api_key: str) -> ResourceCredentials:
85
14
  """
@@ -125,13 +54,15 @@ def download_single_object(s3_client, bucket: str, object_key: str, output_dir:
125
54
  return local_path
126
55
 
127
56
 
128
- def download_resource(resource_url: str, destination: str, api_key: str) -> Dict:
57
+ def download_resource(resource_url: str, destination: str, api_key: str, prefix: Optional[str] = None) -> Dict:
129
58
  """
130
59
  Downloads either a single file from S3 or all objects under a prefix.
131
60
 
132
61
  Args:
133
62
  resource_url (str): The URL or identifier used to fetch S3 credentials.
134
63
  destination (str): Path to local directory where files will be stored.
64
+ api_key (str): API key for authentication when fetching credentials.
65
+ prefix (Optional[str]): If provided, only download objects under this prefix.
135
66
 
136
67
  Returns:
137
68
  Dict[str, Any]: A dictionary containing download info, e.g.:
@@ -147,7 +78,7 @@ def download_resource(resource_url: str, destination: str, api_key: str) -> Dict
147
78
  res_credentials = get_resource_credentials(resource_url, api_key)
148
79
 
149
80
  bucket_name = res_credentials.bucket_name()
150
- key = res_credentials.object_key()
81
+ prefix = prefix or res_credentials.object_key()
151
82
 
152
83
  output_path = Path(destination)
153
84
  output_path.mkdir(parents=True, exist_ok=True)
@@ -159,29 +90,32 @@ def download_resource(resource_url: str, destination: str, api_key: str) -> Dict
159
90
  )
160
91
  downloaded_files = []
161
92
  try:
162
- s3_client.head_object(Bucket=bucket_name, Key=key)
163
- local_file = download_single_object(s3_client, bucket_name, key, output_path)
93
+ s3_client.head_object(Bucket=bucket_name, Key=prefix)
94
+ local_file = download_single_object(s3_client, bucket_name, prefix, output_path)
164
95
  downloaded_files.append(str(local_file))
165
96
  user_logger.info(f"Downloaded single file: {local_file}")
166
97
 
167
98
  except ClientError as e:
168
99
  error_code = e.response.get("Error", {}).get("Code")
169
100
  if error_code == "404":
170
- sys_logger.debug(f"Object '{key}' not found; trying as a prefix.")
171
- response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=key)
101
+ sys_logger.debug(f"Object '{prefix}' not found; trying as a prefix.")
102
+ response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=prefix)
172
103
  contents = response.get("Contents", [])
173
104
 
174
105
  if not contents:
175
- raise ValueError(f"No objects found for prefix '{key}' in bucket '{bucket_name}'")
176
- pbar = tqdm(contents)
177
- for obj in pbar:
178
- sub_key = obj["Key"]
179
- size_mb = obj.get("Size", 0) / 1024 / 1024
180
- pbar.set_description(f"{sub_key} ({size_mb:.2f} MB)")
181
- local_file = download_single_object(s3_client, bucket_name, sub_key, output_path)
182
- downloaded_files.append(local_file.as_posix())
183
-
184
- user_logger.info(f"Downloaded folder/prefix '{key}' with {len(downloaded_files)} object(s).")
106
+ raise ValueError(f"No objects found for prefix '{prefix}' in bucket '{bucket_name}'")
107
+
108
+ with Progress() as progress:
109
+ task = progress.add_task("Downloading files", total=len(contents))
110
+ for obj in contents:
111
+ sub_key = obj["Key"]
112
+ size_mb = obj.get("Size", 0) / 1024 / 1024
113
+ progress.update(task, description=f"Downloading {sub_key} ({size_mb:.2f} MB)")
114
+ local_file = download_single_object(s3_client, bucket_name, sub_key, output_path)
115
+ downloaded_files.append(local_file.as_posix())
116
+ progress.advance(task)
117
+
118
+ user_logger.info(f"Downloaded folder/prefix '{prefix}' with {len(downloaded_files)} object(s).")
185
119
  else:
186
120
  user_logger.error(f"Error checking object or prefix: {e}")
187
121
  raise RuntimeError(f"Failed to check or download S3 resource: {e}") from e
hafnia/torch_helpers.py CHANGED
@@ -9,7 +9,7 @@ from torchvision import tv_tensors
9
9
  from torchvision import utils as tv_utils
10
10
  from torchvision.transforms import v2
11
11
 
12
- from hafnia.dataset.dataset_names import FieldName
12
+ from hafnia.dataset.dataset_names import PrimitiveField
13
13
  from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
14
14
  from hafnia.dataset.primitives import (
15
15
  PRIMITIVE_COLUMN_NAMES,
@@ -68,8 +68,8 @@ class TorchvisionDataset(torch.utils.data.Dataset):
68
68
  for task_name, classifications in class_tasks.items():
69
69
  assert len(classifications) == 1, "Expected exactly one classification task per sample"
70
70
  target_flat[f"{Classification.column_name()}.{task_name}"] = {
71
- FieldName.CLASS_IDX: classifications[0].class_idx,
72
- FieldName.CLASS_NAME: classifications[0].class_name,
71
+ PrimitiveField.CLASS_IDX: classifications[0].class_idx,
72
+ PrimitiveField.CLASS_NAME: classifications[0].class_name,
73
73
  }
74
74
 
75
75
  bbox_tasks: Dict[str, List[Bbox]] = get_primitives_per_task_name_for_primitive(sample, Bbox)
@@ -77,8 +77,8 @@ class TorchvisionDataset(torch.utils.data.Dataset):
77
77
  bboxes_list = [bbox.to_coco(image_height=h, image_width=w) for bbox in bboxes]
78
78
  bboxes_tensor = torch.as_tensor(bboxes_list).reshape(-1, 4)
79
79
  target_flat[f"{Bbox.column_name()}.{task_name}"] = {
80
- FieldName.CLASS_IDX: [bbox.class_idx for bbox in bboxes],
81
- FieldName.CLASS_NAME: [bbox.class_name for bbox in bboxes],
80
+ PrimitiveField.CLASS_IDX: [bbox.class_idx for bbox in bboxes],
81
+ PrimitiveField.CLASS_NAME: [bbox.class_name for bbox in bboxes],
82
82
  "bbox": tv_tensors.BoundingBoxes(bboxes_tensor, format="XYWH", canvas_size=(h, w)),
83
83
  }
84
84
 
@@ -86,8 +86,8 @@ class TorchvisionDataset(torch.utils.data.Dataset):
86
86
  for task_name, bitmasks in bitmask_tasks.items():
87
87
  bitmasks_np = np.array([bitmask.to_mask(img_height=h, img_width=w) for bitmask in bitmasks])
88
88
  target_flat[f"{Bitmask.column_name()}.{task_name}"] = {
89
- FieldName.CLASS_IDX: [bitmask.class_idx for bitmask in bitmasks],
90
- FieldName.CLASS_NAME: [bitmask.class_name for bitmask in bitmasks],
89
+ PrimitiveField.CLASS_IDX: [bitmask.class_idx for bitmask in bitmasks],
90
+ PrimitiveField.CLASS_NAME: [bitmask.class_name for bitmask in bitmasks],
91
91
  "mask": tv_tensors.Mask(bitmasks_np),
92
92
  }
93
93
 
@@ -161,7 +161,7 @@ def draw_image_and_targets(
161
161
  if Bitmask.column_name() in targets:
162
162
  primitive_annotations = targets[Bitmask.column_name()]
163
163
  for task_name, task_annotations in primitive_annotations.items():
164
- colors = [class_color_by_name(class_name) for class_name in task_annotations[FieldName.CLASS_NAME]]
164
+ colors = [class_color_by_name(class_name) for class_name in task_annotations[PrimitiveField.CLASS_NAME]]
165
165
  visualize_image = tv_utils.draw_segmentation_masks(
166
166
  image=visualize_image,
167
167
  masks=task_annotations["mask"],
@@ -172,11 +172,11 @@ def draw_image_and_targets(
172
172
  primitive_annotations = targets[Bbox.column_name()]
173
173
  for task_name, task_annotations in primitive_annotations.items():
174
174
  bboxes = torchvision.ops.box_convert(task_annotations["bbox"], in_fmt="xywh", out_fmt="xyxy")
175
- colors = [class_color_by_name(class_name) for class_name in task_annotations[FieldName.CLASS_NAME]]
175
+ colors = [class_color_by_name(class_name) for class_name in task_annotations[PrimitiveField.CLASS_NAME]]
176
176
  visualize_image = tv_utils.draw_bounding_boxes(
177
177
  image=visualize_image,
178
178
  boxes=bboxes,
179
- labels=task_annotations[FieldName.CLASS_NAME],
179
+ labels=task_annotations[PrimitiveField.CLASS_NAME],
180
180
  width=2,
181
181
  colors=colors,
182
182
  )
@@ -187,9 +187,9 @@ def draw_image_and_targets(
187
187
  text_labels = []
188
188
  for task_name, task_annotations in primitive_annotations.items():
189
189
  if task_name == Classification.default_task_name():
190
- text_label = task_annotations[FieldName.CLASS_NAME]
190
+ text_label = task_annotations[PrimitiveField.CLASS_NAME]
191
191
  else:
192
- text_label = f"{task_name}: {task_annotations[FieldName.CLASS_NAME]}"
192
+ text_label = f"{task_name}: {task_annotations[PrimitiveField.CLASS_NAME]}"
193
193
  text_labels.append(text_label)
194
194
  visualize_image = draw_image_classification(visualize_image, text_labels)
195
195
  return visualize_image
hafnia/utils.py CHANGED
@@ -63,6 +63,18 @@ def timed(label: str):
63
63
  return decorator
64
64
 
65
65
 
66
+ def get_path_hafnia_cache() -> Path:
67
+ return Path.home() / "hafnia"
68
+
69
+
70
+ def get_path_torchvision_downloads() -> Path:
71
+ return get_path_hafnia_cache() / "torchvision_downloads"
72
+
73
+
74
+ def get_path_hafnia_conversions() -> Path:
75
+ return get_path_hafnia_cache() / "hafnia_conversions"
76
+
77
+
66
78
  def now_as_str() -> str:
67
79
  """Get the current date and time as a string."""
68
80
  return datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
@@ -205,3 +217,8 @@ def remove_duplicates_preserve_order(seq: Iterable) -> List:
205
217
  Remove duplicates from a list while preserving the order of elements.
206
218
  """
207
219
  return list(more_itertools.unique_everseen(seq))
220
+
221
+
222
+ def is_image_file(file_path: Path) -> bool:
223
+ image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".tiff", ".tif", ".gif")
224
+ return file_path.suffix.lower() in image_extensions
@@ -193,7 +193,9 @@ def save_dataset_sample_set_visualizations(
193
193
  image = draw_annotations(image, annotations, draw_settings=draw_settings)
194
194
 
195
195
  pil_image = Image.fromarray(image)
196
- path_image = path_output_folder / Path(sample.file_name).name
196
+ if sample.file_path is None:
197
+ raise ValueError("Sample has no file_path defined.")
198
+ path_image = path_output_folder / Path(sample.file_path).name
197
199
  pil_image.save(path_image)
198
200
  paths.append(path_image)
199
201
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.3.0
3
+ Version: 0.4.1
4
4
  Summary: Python SDK for communication with Hafnia platform.
5
5
  Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
@@ -9,7 +9,9 @@ Requires-Dist: boto3>=1.35.91
9
9
  Requires-Dist: click>=8.1.8
10
10
  Requires-Dist: emoji>=2.14.1
11
11
  Requires-Dist: flatten-dict>=0.4.2
12
- Requires-Dist: mlflow>=3.2.0
12
+ Requires-Dist: keyring>=25.6.0
13
+ Requires-Dist: mcp==1.16.0
14
+ Requires-Dist: mlflow>=3.4.0
13
15
  Requires-Dist: more-itertools>=10.7.0
14
16
  Requires-Dist: opencv-python-headless>=4.11.0.86
15
17
  Requires-Dist: pathspec>=0.12.1
@@ -22,7 +24,6 @@ Requires-Dist: rich>=13.9.4
22
24
  Requires-Dist: s5cmd>=0.2.0
23
25
  Requires-Dist: sagemaker-mlflow>=0.1.0
24
26
  Requires-Dist: seedir>=0.5.0
25
- Requires-Dist: tqdm>=4.67.1
26
27
  Requires-Dist: xxhash>=3.5.0
27
28
  Description-Content-Type: text/markdown
28
29
 
@@ -80,6 +81,7 @@ Copy the key and save it for later use.
80
81
  Hafnia API Key: # Pass your HAFNIA API key
81
82
  Hafnia Platform URL [https://api.mdi.milestonesys.com]: # Press [Enter]
82
83
  ```
84
+
83
85
  1. Download `mnist` from terminal to verify that your configuration is working.
84
86
 
85
87
  ```bash
@@ -91,7 +93,7 @@ With Hafnia configured on your local machine, it is now possible to download
91
93
  and explore the dataset sample with a python script:
92
94
 
93
95
  ```python
94
- from hafnia.data import load_dataset, get_dataset_path
96
+ from hafnia.data import get_dataset_path
95
97
  from hafnia.dataset.hafnia_dataset import HafniaDataset
96
98
 
97
99
  # To download the sample dataset use:
@@ -156,7 +158,7 @@ and `dataset.samples` with annotations as a polars DataFrame
156
158
  print(dataset.samples.head(2))
157
159
  shape: (2, 14)
158
160
  ┌──────────────┬─────────────────────────────────┬────────┬───────┬───┬─────────────────────────────────┬──────────┬──────────┬─────────────────────────────────┐
159
- │ sample_index ┆ file_name ┆ height ┆ width ┆ … ┆ objects ┆ bitmasks ┆ polygons ┆ meta │
161
+ │ sample_index ┆ file_name ┆ height ┆ width ┆ … ┆ bboxes ┆ bitmasks ┆ polygons ┆ meta │
160
162
  │ --- ┆ --- ┆ --- ┆ --- ┆ ┆ --- ┆ --- ┆ --- ┆ --- │
161
163
  │ u32 ┆ str ┆ i64 ┆ i64 ┆ ┆ list[struct[11]] ┆ null ┆ null ┆ struct[5] │
162
164
  ╞══════════════╪═════════════════════════════════╪════════╪═══════╪═══╪═════════════════════════════════╪══════════╪══════════╪═════════════════════════════════╡
@@ -216,7 +218,7 @@ sample_dict = dataset[0]
216
218
 
217
219
  for sample_dict in dataset:
218
220
  sample = Sample(**sample_dict)
219
- print(sample.sample_id, sample.objects)
221
+ print(sample.sample_id, sample.bboxes)
220
222
  break
221
223
  ```
222
224
  Not that it is possible to create a `Sample` object from the sample dictionary.
@@ -360,7 +362,7 @@ logger.log_scalar("validation/loss", value=0.1, step=100)
360
362
  logger.log_metric("validation/accuracy", value=0.95, step=100)
361
363
  ```
362
364
 
363
- Similar to `load_dataset`, the tracker behaves differently when running locally or in the cloud.
365
+ The tracker behaves differently when running locally or in the cloud.
364
366
  Locally, experiment data is stored in a local folder `.data/experiments/{DATE_TIME}`.
365
367
 
366
368
  In the cloud, the experiment data will be available in the Hafnia platform under
@@ -384,7 +386,7 @@ and datasets available in the data library.
384
386
 
385
387
  ```python
386
388
  # Load Hugging Face dataset
387
- dataset_splits = load_dataset("midwest-vehicle-detection")
389
+ dataset_splits = HafniaDataset.from_name("midwest-vehicle-detection")
388
390
 
389
391
  # Define transforms
390
392
  train_transforms = v2.Compose(
@@ -419,7 +421,7 @@ pil_image.save("visualized_labels.png")
419
421
 
420
422
  # Create DataLoaders - using TorchVisionCollateFn
421
423
  collate_fn = torch_helpers.TorchVisionCollateFn(
422
- skip_stacking=["objects.bbox", "objects.class_idx"]
424
+ skip_stacking=["bboxes.bbox", "bboxes.class_idx"]
423
425
  )
424
426
  train_loader = DataLoader(train_dataset, batch_size=8, shuffle=True, collate_fn=collate_fn)
425
427
  ```
@@ -0,0 +1,57 @@
1
+ cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ cli/__main__.py,sha256=1IOe1Ewd3QMeuzUMfoWv663_jQpd7_pTn_R1sDdEmbI,1747
3
+ cli/config.py,sha256=xCLdgqBqFN3EInzJ1AO5Nj1e35jOjcc_jaukaodrrMc,7681
4
+ cli/consts.py,sha256=uCpYX44NCu_Zvte0QwChunxOo-qqhcaJRSYDAIsoJ8A,972
5
+ cli/dataset_cmds.py,sha256=KOPYdAJ1SyzTMlEi_J-70vSGIJ5acHPGIPOCKVIdlQ4,1418
6
+ cli/dataset_recipe_cmds.py,sha256=O1Pd-VvFFsmZ-nE1Mh6sCC9x45ztCJEpPCZK93qz_HQ,2887
7
+ cli/experiment_cmds.py,sha256=vc7J9JJog4EvRdgkpoMvr0kceb0QF_Rk7mn6H2KNvFE,7963
8
+ cli/keychain.py,sha256=bNyjjULVQu7kV338wUC65UvbCwmSGOmEjKWPLIQjT0k,2555
9
+ cli/profile_cmds.py,sha256=3OwpomV6Wb21ZG43xrwhvoB5S4IN1IqmVCxs-MItAho,3463
10
+ cli/runc_cmds.py,sha256=qV-LOwbFlbegH8XSELOo4h_2ajFAzdB5LtuVKKoRq8Y,5009
11
+ cli/trainer_package_cmds.py,sha256=nL7yTtR41BKzo1DWZdBdpRXGPZZIbmBe0byHAi6_n2c,2312
12
+ hafnia/__init__.py,sha256=0qpjWfVbcfKzLSnfUW6RdclSGkesMQRFS-n_aTJJoSE,179
13
+ hafnia/http.py,sha256=bjXbV_3uKbBdudqMdYtnsMttUAsNRMsetYZ4F2xXlEs,3635
14
+ hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
15
+ hafnia/torch_helpers.py,sha256=8jbXm6yk8joXFyuwxopaIAjvjMbEZbFT5qSswQSbpdg,11665
16
+ hafnia/utils.py,sha256=ieNNL8IK3IqDsRf7NJnF_NU3eMLi8d3tSQzOgW5sVOw,7319
17
+ hafnia/data/__init__.py,sha256=o9QjiGbEcNa6r-qDmwwmxPXf-1UitNl5-WxFNcujqsg,111
18
+ hafnia/data/factory.py,sha256=kHkvOtBUbwaShZBGf1kZzocDJBn_1dHHLrQxnUpJmfY,778
19
+ hafnia/dataset/dataset_helpers.py,sha256=0GbS6PfaiYBulDKRCbd0miN5LHaUIp-XzGt_wZay8xs,5044
20
+ hafnia/dataset/dataset_names.py,sha256=jxK1XWLu8jzYRkIw-xw0x2Vf1D3HPdphm7yGHtYx-Os,5883
21
+ hafnia/dataset/dataset_upload_helper.py,sha256=vTO2-gvo7cYsK0967N8zGoFKDDXJcrfRim_532wUiqc,30307
22
+ hafnia/dataset/hafnia_dataset.py,sha256=O4ZUA6Y7EidlYn6R-3kILYGmqh3yTTiaCTZkPwolaL0,49560
23
+ hafnia/dataset/license_types.py,sha256=xpanTfui1pGG76mGQ9r6EywcUe1scI_zullEpmCO4GI,2190
24
+ hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=mwNL7VMhbEFHC2p6L_OO7os7bVVb05YFKZxvzQ_BySk,19059
25
+ hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=j3Oiytt3LI2rCaJid7Y44oT9MXvlZVqvZanngMebIWg,3088
26
+ hafnia/dataset/dataset_recipe/recipe_types.py,sha256=AcrG6gpRt3Igl-CCJ60uyh-WkfI1NCnQ55M8yClSI9Q,5328
27
+ hafnia/dataset/format_conversions/format_image_classification_folder.py,sha256=V4W0gmMrZTEG81rmlNIa2oYUtQuci4LU3AiUHJg63iw,4370
28
+ hafnia/dataset/format_conversions/format_yolo.py,sha256=cuGrgk5gk0dtVT2p0yLdCIZDrJUlrvbT3Ine2LmS9VA,6507
29
+ hafnia/dataset/format_conversions/torchvision_datasets.py,sha256=-mt0xg9toArKhb2sPeDTf8nLsC4odXWCtw1avhJmZG8,11973
30
+ hafnia/dataset/operations/dataset_stats.py,sha256=wSiwcl8nHFpg_9kCcQuuZW63IHbH5MGYBDv1wU4ffN0,11915
31
+ hafnia/dataset/operations/dataset_transformations.py,sha256=pgO81Twr2B6BXrq6_HTVB27s9F9BC_HnmesX4lQ6LGY,19538
32
+ hafnia/dataset/operations/table_transformations.py,sha256=jNQcc55l6vyGcEHJN__MPDjWtPgBOy76BwI-8W3bvyk,14229
33
+ hafnia/dataset/primitives/__init__.py,sha256=xFLJ3R7gpbuQnNJuFhuu836L3nicwoaY5aHkqk7Bbr8,927
34
+ hafnia/dataset/primitives/bbox.py,sha256=sju2hSp3heTfcsuY0iuViWvrv2jO-6uaBVGjFfDJILg,6393
35
+ hafnia/dataset/primitives/bitmask.py,sha256=mgm6RJM4vzT4czEl3PgRoa-ZgN4q6wxyXq45v9k0W1g,7965
36
+ hafnia/dataset/primitives/classification.py,sha256=cU0hoHr3ISPdXphCx5L_9th39LVJ3LMfq9FQI7Ca1HQ,2632
37
+ hafnia/dataset/primitives/point.py,sha256=VzCNLTQOPA6wyJVVKddZHGhltkep6V_B7pg5pk7rd9Y,879
38
+ hafnia/dataset/primitives/polygon.py,sha256=w05-O-UL1bNStZnhlfLBwgbcSlYCS7mAL4rEcA3ura4,4503
39
+ hafnia/dataset/primitives/primitive.py,sha256=IcaOYa3vB2uKPclK2dQsULuXOYQEurSjXIlxlSCzpuE,1252
40
+ hafnia/dataset/primitives/segmentation.py,sha256=qUOISz-4uX5cIRrkqun0ix_E0HbUZYWU44Rt_FyVpuY,1980
41
+ hafnia/dataset/primitives/utils.py,sha256=3gT1as-xXEj8CamoIuBb9gQwUN9Ae9qnqtqF_uEe0zo,1993
42
+ hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
43
+ hafnia/experiment/hafnia_logger.py,sha256=Qia8orPu7PZk12geB97alJaq6AjxYedDmKri_tmyldo,10408
44
+ hafnia/platform/__init__.py,sha256=L_Q7CNpsJ0HMNPy_rLlLK5RhmuCU7IF4BchxKv6amYc,782
45
+ hafnia/platform/builder.py,sha256=kUEuj5-qtL1uk5v2tUvOCREn5yV-G4Fr6F31haIAb5E,5808
46
+ hafnia/platform/dataset_recipe.py,sha256=ybfSSHVPG0eFUbzg_1McezPSOtMoDZEg7l6rFYndtb4,3857
47
+ hafnia/platform/datasets.py,sha256=APtG0evcO7gxFbk8N7sWJPxttYLQTJ4ZkZHLuY-rRN0,9000
48
+ hafnia/platform/download.py,sha256=A6McEvRw5KmEAn3kx3nNnjxQm1Z8ZSAhn_NFgrM66_8,5020
49
+ hafnia/platform/experiment.py,sha256=SrEH0nuwwBXf1Iu4diB1BEPqL-TxW3aQkZWBbM1-tY0,1846
50
+ hafnia/platform/trainer_package.py,sha256=w6JC7o-279ujcwtNTbUaQ9AnPcYRPPbD8EACa6XyUHA,2206
51
+ hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
52
+ hafnia/visualizations/image_visualizations.py,sha256=0xsDQqeNZ-9Oat4u5NZwXLXoh4yZ5DisczkZNHTRwkk,7457
53
+ hafnia-0.4.1.dist-info/METADATA,sha256=ynd2TZrdVHzkDMmOZ3xF2ozYoovFwKQMCK60ydKbdYQ,19233
54
+ hafnia-0.4.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
55
+ hafnia-0.4.1.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
56
+ hafnia-0.4.1.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
57
+ hafnia-0.4.1.dist-info/RECORD,,
@@ -1,53 +0,0 @@
1
- cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- cli/__main__.py,sha256=04iqem4UAw6zSVujBt8ER0HQbEhno6rgmARmECUwRPA,1634
3
- cli/config.py,sha256=m9jrka0Rnh8_mnxnU4S_JgoYA7l1pvp2o2a7KgkYInY,6115
4
- cli/consts.py,sha256=uCpYX44NCu_Zvte0QwChunxOo-qqhcaJRSYDAIsoJ8A,972
5
- cli/dataset_cmds.py,sha256=KOPYdAJ1SyzTMlEi_J-70vSGIJ5acHPGIPOCKVIdlQ4,1418
6
- cli/dataset_recipe_cmds.py,sha256=O1Pd-VvFFsmZ-nE1Mh6sCC9x45ztCJEpPCZK93qz_HQ,2887
7
- cli/experiment_cmds.py,sha256=vc7J9JJog4EvRdgkpoMvr0kceb0QF_Rk7mn6H2KNvFE,7963
8
- cli/profile_cmds.py,sha256=eRo4FtPvXPG5LK2fINVFMsBd_HpkNygY468essBb57o,3285
9
- cli/runc_cmds.py,sha256=qV-LOwbFlbegH8XSELOo4h_2ajFAzdB5LtuVKKoRq8Y,5009
10
- cli/trainer_package_cmds.py,sha256=nL7yTtR41BKzo1DWZdBdpRXGPZZIbmBe0byHAi6_n2c,2312
11
- hafnia/__init__.py,sha256=xXUwwQ18P1YqmsZkvlkavaDqq8DbrrHf38pv5_JyV_M,179
12
- hafnia/http.py,sha256=bjXbV_3uKbBdudqMdYtnsMttUAsNRMsetYZ4F2xXlEs,3635
13
- hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
14
- hafnia/torch_helpers.py,sha256=ho65B0WIu_SjbaKPRL4wabDNrnVumWH8QSXVH4r7NAY,11605
15
- hafnia/utils.py,sha256=x2dPsiO0EPP6YnpQX4HBtbl29UN9hV4zHvOnDa9xYTg,6850
16
- hafnia/data/__init__.py,sha256=o9QjiGbEcNa6r-qDmwwmxPXf-1UitNl5-WxFNcujqsg,111
17
- hafnia/data/factory.py,sha256=kHkvOtBUbwaShZBGf1kZzocDJBn_1dHHLrQxnUpJmfY,778
18
- hafnia/dataset/dataset_helpers.py,sha256=HHm4KG_-upIEmxHWeSJO4m8RmrCUxKgseRNs4WD6kUQ,4781
19
- hafnia/dataset/dataset_names.py,sha256=wdLoH48ph1PjVpUYPEDnAfQYVDCvYGQCyqFmR0-ixDU,2286
20
- hafnia/dataset/dataset_upload_helper.py,sha256=QdJl92aKm3czpkgXt3G_AgwBjyOV9R4iKn4bjjlPyXA,28007
21
- hafnia/dataset/hafnia_dataset.py,sha256=XiCHv-ZSzjA4CImpyMevJ2qIJlFcKBLzwNB_HMuQGHo,36841
22
- hafnia/dataset/license_types.py,sha256=xpanTfui1pGG76mGQ9r6EywcUe1scI_zullEpmCO4GI,2190
23
- hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=A54N5qEPcM0Yswg26qaOvnr-uj3xUq-KbOInJayzbEI,19269
24
- hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=6tY4jG1Bzx15-BXp5RavjtVRWcms3o9svzfUO9-m9Ps,2154
25
- hafnia/dataset/dataset_recipe/recipe_types.py,sha256=AcrG6gpRt3Igl-CCJ60uyh-WkfI1NCnQ55M8yClSI9Q,5328
26
- hafnia/dataset/operations/dataset_stats.py,sha256=V2lCmTfIbJ-LeAyu1fZf0_1jSa4MMfP0psuXi77gOYk,11630
27
- hafnia/dataset/operations/dataset_transformations.py,sha256=JVxfw4fV51eGB7ekYfLB5FKQql6l1whTqRY_BwfX0Us,16593
28
- hafnia/dataset/operations/table_transformations.py,sha256=6LFQfFRAb1B25cS3QeliRzj26EgVyub5kE-6Sab5Ymo,9250
29
- hafnia/dataset/primitives/__init__.py,sha256=xFLJ3R7gpbuQnNJuFhuu836L3nicwoaY5aHkqk7Bbr8,927
30
- hafnia/dataset/primitives/bbox.py,sha256=HXYYy5BLNZwh-bO7aiAWg3z0OurUev8ISa-vYey8b8A,6055
31
- hafnia/dataset/primitives/bitmask.py,sha256=mq_wchMqGupJDc-a-mJh9uBO_mjHcXpLH49g591doAM,7619
32
- hafnia/dataset/primitives/classification.py,sha256=ri4lTtS5gBQX13vF07YbeN11rKl1CJaKeTIzCmoT9Iw,2363
33
- hafnia/dataset/primitives/point.py,sha256=JCRwb-E4sDafodkg6wqyuAS1Yj-yaJbwiD8aB69_Ros,635
34
- hafnia/dataset/primitives/polygon.py,sha256=vhPrYHv6TqQZMTAyv9r3NV8Hu6YRSSD0srB0wOCIwQ4,4289
35
- hafnia/dataset/primitives/primitive.py,sha256=7jxcyFADVGf95pjeQHEOqAnR9eucLpxA2h8Blz3ppXI,1253
36
- hafnia/dataset/primitives/segmentation.py,sha256=jUMjOmYr9j4An3YSCw5CJC1W8ihXAbus3CXaTOpc7Xw,1905
37
- hafnia/dataset/primitives/utils.py,sha256=3gT1as-xXEj8CamoIuBb9gQwUN9Ae9qnqtqF_uEe0zo,1993
38
- hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
39
- hafnia/experiment/hafnia_logger.py,sha256=k8UIK3mXNyxpmfFndTD_u8pFA6TKbjmR-9EJ6JjvY-U,10729
40
- hafnia/platform/__init__.py,sha256=L_Q7CNpsJ0HMNPy_rLlLK5RhmuCU7IF4BchxKv6amYc,782
41
- hafnia/platform/builder.py,sha256=kUEuj5-qtL1uk5v2tUvOCREn5yV-G4Fr6F31haIAb5E,5808
42
- hafnia/platform/dataset_recipe.py,sha256=-scelPECr1eLn5tB_jFm5bJdygw_guktrWfoVquSu2A,3790
43
- hafnia/platform/datasets.py,sha256=orWdZUwrNk7BTJFJP_8TLhf0CB5PYopZug4u36w08FQ,9018
44
- hafnia/platform/download.py,sha256=Tzycmb6I6LmwACDHmJmR1zsrOX6OLgYWRbfkCXKEAdQ,6903
45
- hafnia/platform/experiment.py,sha256=SrEH0nuwwBXf1Iu4diB1BEPqL-TxW3aQkZWBbM1-tY0,1846
46
- hafnia/platform/trainer_package.py,sha256=w6JC7o-279ujcwtNTbUaQ9AnPcYRPPbD8EACa6XyUHA,2206
47
- hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
48
- hafnia/visualizations/image_visualizations.py,sha256=6mPnRAc0dMPZCUCTqnHjgtAhQdVL_QrtyToXUuJjwxQ,7355
49
- hafnia-0.3.0.dist-info/METADATA,sha256=nP2mgz_AtfOAq2OX5NgLq6lJG3fTLOog8AbKLGWYSyM,19235
50
- hafnia-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
51
- hafnia-0.3.0.dist-info/entry_points.txt,sha256=FCJVIQ8GP2VE9I3eeGVF5eLxVDNW_01pOJCpG_CGnMM,45
52
- hafnia-0.3.0.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
53
- hafnia-0.3.0.dist-info/RECORD,,
File without changes