hafnia 0.5.1__tar.gz → 0.5.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (182) hide show
  1. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/build.yaml +3 -3
  2. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/ci_cd.yaml +1 -1
  3. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/lint.yaml +2 -2
  4. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/publish_docker.yaml +3 -3
  5. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/tests.yaml +2 -2
  6. {hafnia-0.5.1 → hafnia-0.5.3}/.trivyignore +4 -1
  7. {hafnia-0.5.1 → hafnia-0.5.3}/.vscode/launch.json +17 -2
  8. {hafnia-0.5.1 → hafnia-0.5.3}/PKG-INFO +15 -10
  9. {hafnia-0.5.1 → hafnia-0.5.3}/README.md +13 -9
  10. {hafnia-0.5.1 → hafnia-0.5.3}/examples/example_dataset_recipe.py +1 -1
  11. {hafnia-0.5.1 → hafnia-0.5.3}/examples/example_torchvision_dataloader.py +1 -1
  12. {hafnia-0.5.1 → hafnia-0.5.3}/pyproject.toml +4 -2
  13. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_recipe/dataset_recipe.py +56 -101
  14. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/hafnia_dataset.py +9 -49
  15. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/hafnia_dataset_types.py +1 -1
  16. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/operations/dataset_stats.py +2 -1
  17. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/classification.py +1 -1
  18. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/segmentation.py +1 -1
  19. hafnia-0.5.3/src/hafnia/experiment/command_builder.py +686 -0
  20. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/dataset_recipe.py +30 -18
  21. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/datasets.py +8 -4
  22. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/experiment.py +12 -8
  23. hafnia-0.5.3/src/hafnia/platform/trainer_package.py +103 -0
  24. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/utils.py +7 -5
  25. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/dataset_recipe_cmds.py +4 -8
  26. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/experiment_cmds.py +15 -25
  27. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/profile_cmds.py +8 -3
  28. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/trainer_package_cmds.py +52 -4
  29. {hafnia-0.5.1 → hafnia-0.5.3}/tests/conftest.py +1 -1
  30. {hafnia-0.5.1 → hafnia-0.5.3}/tests/helper_testing.py +1 -2
  31. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_cli_integration.py +19 -3
  32. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_dataset_recipes_with_platform.py +4 -5
  33. hafnia-0.5.3/tests/integration/test_dataset_versioning.py +47 -0
  34. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_samples.py +2 -45
  35. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/dataset_recipe/test_dataset_recipes.py +65 -81
  36. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_colors.py +1 -1
  37. hafnia-0.5.3/tests/unit/test_command_builder.py +467 -0
  38. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/test_utils.py +3 -3
  39. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/test_visualizations.py +1 -1
  40. {hafnia-0.5.1 → hafnia-0.5.3}/uv.lock +708 -502
  41. hafnia-0.5.1/src/hafnia/data/__init__.py +0 -3
  42. hafnia-0.5.1/src/hafnia/data/factory.py +0 -22
  43. hafnia-0.5.1/src/hafnia/platform/trainer_package.py +0 -57
  44. hafnia-0.5.1/tests/unit/dataset/dataset_recipe/test_dataset_recipe_helpers.py +0 -129
  45. {hafnia-0.5.1 → hafnia-0.5.3}/.devcontainer/devcontainer.json +0 -0
  46. {hafnia-0.5.1 → hafnia-0.5.3}/.devcontainer/hooks/post_create +0 -0
  47. {hafnia-0.5.1 → hafnia-0.5.3}/.github/dependabot.yaml +0 -0
  48. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/Dockerfile +0 -0
  49. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/check_release.yaml +0 -0
  50. {hafnia-0.5.1 → hafnia-0.5.3}/.github/workflows/publish_pypi.yaml +0 -0
  51. {hafnia-0.5.1 → hafnia-0.5.3}/.gitignore +0 -0
  52. {hafnia-0.5.1 → hafnia-0.5.3}/.pre-commit-config.yaml +0 -0
  53. {hafnia-0.5.1 → hafnia-0.5.3}/.python-version +0 -0
  54. {hafnia-0.5.1 → hafnia-0.5.3}/.vscode/extensions.json +0 -0
  55. {hafnia-0.5.1 → hafnia-0.5.3}/.vscode/settings.json +0 -0
  56. {hafnia-0.5.1 → hafnia-0.5.3}/LICENSE +0 -0
  57. {hafnia-0.5.1 → hafnia-0.5.3}/docs/cli.md +0 -0
  58. {hafnia-0.5.1 → hafnia-0.5.3}/docs/release.md +0 -0
  59. {hafnia-0.5.1 → hafnia-0.5.3}/examples/example_hafnia_dataset.py +0 -0
  60. {hafnia-0.5.1 → hafnia-0.5.3}/examples/example_logger.py +0 -0
  61. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/__init__.py +0 -0
  62. {hafnia-0.5.1/src/hafnia/visualizations → hafnia-0.5.3/src/hafnia/dataset}/colors.py +0 -0
  63. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_details_uploader.py +1 -1
  64. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_helpers.py +0 -0
  65. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_names.py +0 -0
  66. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_recipe/recipe_transforms.py +0 -0
  67. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/dataset_recipe/recipe_types.py +0 -0
  68. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/format_conversions/format_coco.py +0 -0
  69. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/format_conversions/format_helpers.py +0 -0
  70. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/format_conversions/format_image_classification_folder.py +0 -0
  71. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/format_conversions/format_yolo.py +0 -0
  72. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/format_conversions/torchvision_datasets.py +0 -0
  73. {hafnia-0.5.1/src/hafnia/visualizations → hafnia-0.5.3/src/hafnia/dataset}/image_visualizations.py +0 -0
  74. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/license_types.py +0 -0
  75. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/operations/dataset_s3_storage.py +0 -0
  76. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/operations/dataset_transformations.py +0 -0
  77. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/operations/table_transformations.py +0 -0
  78. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/__init__.py +0 -0
  79. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/bbox.py +0 -0
  80. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/bitmask.py +0 -0
  81. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/point.py +0 -0
  82. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/polygon.py +0 -0
  83. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/primitive.py +0 -0
  84. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/dataset/primitives/utils.py +0 -0
  85. {hafnia-0.5.1/src/hafnia → hafnia-0.5.3/src/hafnia/dataset}/torch_helpers.py +0 -0
  86. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/experiment/__init__.py +0 -0
  87. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/experiment/hafnia_logger.py +0 -0
  88. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/http.py +0 -0
  89. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/log.py +0 -0
  90. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/__init__.py +0 -0
  91. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/builder.py +0 -0
  92. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/download.py +0 -0
  93. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia/platform/s5cmd_utils.py +0 -0
  94. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/__init__.py +0 -0
  95. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/__main__.py +0 -0
  96. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/config.py +0 -0
  97. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/consts.py +0 -0
  98. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/dataset_cmds.py +0 -0
  99. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/keychain.py +0 -0
  100. {hafnia-0.5.1 → hafnia-0.5.3}/src/hafnia_cli/runc_cmds.py +0 -0
  101. {hafnia-0.5.1 → hafnia-0.5.3}/tests/__init__.py +0 -0
  102. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/train/000000000632.jpg +0 -0
  103. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/train/000000000724.jpg +0 -0
  104. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/train/_annotations.coco.json +0 -0
  105. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/valid/000000000139.jpg +0 -0
  106. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/valid/000000000285.jpg +0 -0
  107. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_coco_roboflow/valid/_annotations.coco.json +0 -0
  108. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/obj.names +0 -0
  109. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/train/data/000000000139.jpg +0 -0
  110. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/train/data/000000000139.txt +0 -0
  111. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/train/data/000000000285.jpg +0 -0
  112. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/train/data/000000000285.txt +0 -0
  113. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/train/images.txt +0 -0
  114. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/validation/data/000000000632.jpg +0 -0
  115. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/validation/data/000000000632.txt +0 -0
  116. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_formats/format_yolo/validation/images.txt +0 -0
  117. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/dataset_image_metadata_schema.yaml +0 -0
  118. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_dataset_transformations/test_video_storage_format_read_image.png +0 -0
  119. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_convert_segmentation_to_rle_list[polygon].png +0 -0
  120. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_convert_segmentation_to_rle_list[rle_as_ints].png +0 -0
  121. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_convert_segmentation_to_rle_list[rle_compressed_bytes].png +0 -0
  122. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_convert_segmentation_to_rle_list[rle_compressed_str].png +0 -0
  123. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_from_coco_format_visualized.png +0 -0
  124. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_coco/test_to_coco_format_visualized.png +0 -0
  125. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_yolo/test_format_yolo_import_export_tiny_dataset.png +0 -0
  126. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_format_yolo/test_import_yolo_format_visualized.png +0 -0
  127. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[caltech-101].png +0 -0
  128. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[caltech-256].png +0 -0
  129. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[cifar100].png +0 -0
  130. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[cifar10].png +0 -0
  131. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[coco-2017].png +0 -0
  132. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[midwest-vehicle-detection].png +0 -0
  133. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[mnist].png +0 -0
  134. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_check_dataset[tiny-dataset].png +0 -0
  135. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[caltech-101].png +0 -0
  136. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[caltech-256].png +0 -0
  137. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[cifar100].png +0 -0
  138. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[cifar10].png +0 -0
  139. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[coco-2017].png +0 -0
  140. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[midwest-vehicle-detection].png +0 -0
  141. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[mnist].png +0 -0
  142. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_samples/test_dataset_draw_image_and_target[tiny-dataset].png +0 -0
  143. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_blur_anonymization[micro-coco-2017].png +0 -0
  144. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_blur_anonymization[micro-tiny-dataset].png +0 -0
  145. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_draw_annotations[micro-coco-2017].png +0 -0
  146. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_draw_annotations[micro-tiny-dataset].png +0 -0
  147. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_mask_region[micro-coco-2017].png +0 -0
  148. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_mask_region[micro-tiny-dataset].png +0 -0
  149. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/expected_images/test_visualizations/test_polygon_to_bitmask_conversion.png +0 -0
  150. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/annotations.jsonl +0 -0
  151. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/annotations.parquet +0 -0
  152. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/data/253/253925d334c002ce6662d8133535dd4c.jpg +0 -0
  153. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/data/b1a/b1a09f4d922f8f6904bab0c1caf172ab.jpg +0 -0
  154. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/data/f67/f675c8a1e862b5e00203ab888ac7fff4.jpg +0 -0
  155. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-coco-2017/dataset_info.json +0 -0
  156. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/annotations.jsonl +0 -0
  157. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/annotations.parquet +0 -0
  158. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/data/25c/25c3a206e7b60ab50245ee3d52d97f11.png +0 -0
  159. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/data/962/962fd865fdd45f169d5ca8c8f284d68d.png +0 -0
  160. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/data/ec6/ec60f2f4fb854b59c97e16b45c713de0.png +0 -0
  161. {hafnia-0.5.1 → hafnia-0.5.3}/tests/data/micro_test_datasets/micro-tiny-dataset/dataset_info.json +0 -0
  162. {hafnia-0.5.1 → hafnia-0.5.3}/tests/helper_testing_datasets.py +0 -0
  163. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_bring_your_own_data.py +0 -0
  164. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_check_example_scripts.py +0 -0
  165. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_dataset_merges.py +0 -0
  166. {hafnia-0.5.1 → hafnia-0.5.3}/tests/integration/test_torchvision_datasets.py +0 -0
  167. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/dataset_recipe/test_recipe_transformations.py +0 -0
  168. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/format_conversions/test_format_coco.py +0 -0
  169. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/format_conversions/test_format_image_classification_folder.py +0 -0
  170. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/format_conversions/test_format_yolo.py +0 -0
  171. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/operations/test_dataset_stats.py +0 -0
  172. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/operations/test_dataset_transformations.py +0 -0
  173. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/operations/test_table_transformations.py +0 -0
  174. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_dataset_details_uploader.py +0 -0
  175. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_dataset_helpers.py +0 -0
  176. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_dataset_names.py +0 -0
  177. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_hafnia_dataset.py +0 -0
  178. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_hafnia_dataset_types.py +0 -0
  179. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/dataset/test_shape_primitives.py +0 -0
  180. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/test_builder.py +0 -0
  181. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/test_cli.py +0 -0
  182. {hafnia-0.5.1 → hafnia-0.5.3}/tests/unit/test_hafnia_logger.py +0 -0
@@ -21,8 +21,8 @@ jobs:
21
21
  outputs:
22
22
  package-version: ${{ steps.extract-version.outputs.package_version }}
23
23
  steps:
24
- - uses: actions/checkout@v6.0.1
25
- - uses: actions/setup-python@v6.1.0
24
+ - uses: actions/checkout@v6.0.2
25
+ - uses: actions/setup-python@v6.2.0
26
26
  with:
27
27
  python-version-file: ${{ inputs.python-version-file }}
28
28
 
@@ -38,7 +38,7 @@ jobs:
38
38
  version: 0.6.8
39
39
 
40
40
  - name: Cache dependencies
41
- uses: actions/cache@v4
41
+ uses: actions/cache@v5
42
42
  with:
43
43
  path: ~/.cache/uv
44
44
  key: ${{ runner.os }}-uv-${{ hashFiles('**/pyproject.toml') }}
@@ -19,7 +19,7 @@ jobs:
19
19
  runs-on: ubuntu-latest
20
20
  needs: lint
21
21
  steps:
22
- - uses: actions/checkout@v6.0.1
22
+ - uses: actions/checkout@v6.0.2
23
23
  - name: Run Trivy vulnerability scanner
24
24
  uses: aquasecurity/trivy-action@0.33.1
25
25
  with:
@@ -10,8 +10,8 @@ jobs:
10
10
  lint:
11
11
  runs-on: ubuntu-latest
12
12
  steps:
13
- - uses: actions/checkout@v6.0.1
14
- - uses: actions/setup-python@v6.1.0
13
+ - uses: actions/checkout@v6.0.2
14
+ - uses: actions/setup-python@v6.2.0
15
15
  with:
16
16
  python-version-file: ${{ inputs.python-version-file }}
17
17
  - uses: pre-commit/action@v3.0.1
@@ -24,8 +24,8 @@ jobs:
24
24
  build:
25
25
  runs-on: ubuntu-latest
26
26
  steps:
27
- - uses: actions/checkout@v6.0.1
28
- - uses: actions/setup-python@v6.1.0
27
+ - uses: actions/checkout@v6.0.2
28
+ - uses: actions/setup-python@v6.2.0
29
29
  id: python
30
30
  with:
31
31
  python-version-file: ${{ inputs.python-version-file }}
@@ -57,7 +57,7 @@ jobs:
57
57
  uses: aws-actions/amazon-ecr-login@v2.0.1
58
58
 
59
59
  - name: Set up Docker Buildx
60
- uses: docker/setup-buildx-action@v3.11.1
60
+ uses: docker/setup-buildx-action@v3.12.0
61
61
 
62
62
  - name: Build and push
63
63
  uses: docker/build-push-action@v6.18.0
@@ -15,8 +15,8 @@ jobs:
15
15
  matrix:
16
16
  os: [ubuntu-latest, windows-latest]
17
17
  steps:
18
- - uses: actions/checkout@v6.0.1
19
- - uses: actions/setup-python@v6.1.0
18
+ - uses: actions/checkout@v6.0.2
19
+ - uses: actions/setup-python@v6.2.0
20
20
  with:
21
21
  python-version-file: ${{ inputs.python-version-file }}
22
22
  - name: Install uv
@@ -1,3 +1,6 @@
1
1
  # Ignore 'CVE-2024-37059' issue https://avd.aquasec.com/nvd/2024/cve-2024-37059/
2
2
  # The vulnerability does not apply to our platform as models are not loaded on our platform.
3
- CVE-2024-37059
3
+ CVE-2024-37059
4
+
5
+ # Temporarily ignore 'CVE-2026-0994' issue https://avd.aquasec.com/nvd/2026/cve-2026-0994/
6
+ CVE-2026-0994
@@ -77,9 +77,24 @@
77
77
  "--dataset",
78
78
  "mnist",
79
79
  ]
80
- },
80
+ },
81
+ {
82
+ "name": "cmd: 'hafnia trainer create [X]'",
83
+ "type": "debugpy",
84
+ "request": "launch",
85
+ "program": "${workspaceFolder}/src/hafnia_cli/__main__.py",
86
+ "args": [
87
+ "trainer",
88
+ "create",
89
+ "${workspaceFolder}/../trainer-classification",
90
+ "--name",
91
+ "Classification Trainer Package Launched in debug mode from vc-code",
92
+ "--description",
93
+ "A trainer package for image classification tasks launched in debug mode."
94
+ ]
95
+ },
81
96
  {
82
- "name": "cmd: 'hafnia train-recipe [X]'",
97
+ "name": "cmd: 'hafnia trainer [X]'",
83
98
  "type": "debugpy",
84
99
  "request": "launch",
85
100
  "program": "${workspaceFolder}/src/hafnia_cli/__main__.py",
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hafnia
3
- Version: 0.5.1
3
+ Version: 0.5.3
4
4
  Summary: Python SDK for communication with Hafnia platform.
5
5
  Author-email: Milestone Systems <hafniaplatform@milestone.dk>
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.10
8
8
  Requires-Dist: boto3>=1.35.91
9
9
  Requires-Dist: click>=8.1.8
10
+ Requires-Dist: docstring-parser>=0.17.0
10
11
  Requires-Dist: emoji>=2.14.1
11
12
  Requires-Dist: flatten-dict>=0.4.2
12
13
  Requires-Dist: keyring>=25.6.0
@@ -63,9 +64,16 @@ multiple GPUs and instances if needed.
63
64
  ## Getting started: Configuration
64
65
  To get started with Hafnia:
65
66
 
66
- 1. Install `hafnia` with your favorite python package manager. With pip do this:
67
+ 1. Install `hafnia` with your favorite python package manager:
68
+
69
+ ```bash
70
+ # With uv package manager
71
+ uv add hafnia
72
+
73
+ # With pip
74
+ pip install hafnia
75
+ ```
67
76
 
68
- `pip install hafnia`
69
77
  1. Sign in to the [Hafnia Platform](https://hafnia.milestonesys.com/).
70
78
  1. Create an API KEY for Training aaS. For more instructions, follow this
71
79
  [guide](https://hafnia.readme.io/docs/create-an-api-key).
@@ -93,11 +101,9 @@ With Hafnia configured on your local machine, it is now possible to download
93
101
  and explore the dataset sample with a python script:
94
102
 
95
103
  ```python
96
- from hafnia.data import get_dataset_path
97
104
  from hafnia.dataset.hafnia_dataset import HafniaDataset
98
105
 
99
- # To download the sample dataset use:
100
- path_dataset = get_dataset_path("midwest-vehicle-detection")
106
+ dataset = HafniaDataset.from_name("midwest-vehicle-detection")
101
107
  ```
102
108
 
103
109
  This will download the dataset sample `midwest-vehicle-detection` to the local `.data/datasets/` folder
@@ -123,11 +129,10 @@ midwest-vehicle-detection
123
129
  3 directories, 217 files
124
130
  ```
125
131
 
126
- You can interact with data as you want, but we also provide `HafniaDataset`
127
- for loading/saving, managing and interacting with the dataset.
132
+ We provide the `HafniaDataset` format for loading/saving, managing and interacting with the dataset.
128
133
 
129
134
  We recommend the example script [examples/example_hafnia_dataset.py](examples/example_hafnia_dataset.py)
130
- for a short introduction on the `HafniaDataset`.
135
+ for a quick introduction on the `HafniaDataset`.
131
136
 
132
137
  Below is a short introduction to the `HafniaDataset` class.
133
138
 
@@ -135,7 +140,7 @@ Below is a short introduction to the `HafniaDataset` class.
135
140
  from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
136
141
 
137
142
  # Load dataset from path
138
- dataset = HafniaDataset.read_from_path(path_dataset)
143
+ dataset = HafniaDataset.from_path(path_dataset)
139
144
 
140
145
  # Or get dataset directly by name
141
146
  dataset = HafniaDataset.from_name("midwest-vehicle-detection")
@@ -34,9 +34,16 @@ multiple GPUs and instances if needed.
34
34
  ## Getting started: Configuration
35
35
  To get started with Hafnia:
36
36
 
37
- 1. Install `hafnia` with your favorite python package manager. With pip do this:
37
+ 1. Install `hafnia` with your favorite python package manager:
38
+
39
+ ```bash
40
+ # With uv package manager
41
+ uv add hafnia
42
+
43
+ # With pip
44
+ pip install hafnia
45
+ ```
38
46
 
39
- `pip install hafnia`
40
47
  1. Sign in to the [Hafnia Platform](https://hafnia.milestonesys.com/).
41
48
  1. Create an API KEY for Training aaS. For more instructions, follow this
42
49
  [guide](https://hafnia.readme.io/docs/create-an-api-key).
@@ -64,11 +71,9 @@ With Hafnia configured on your local machine, it is now possible to download
64
71
  and explore the dataset sample with a python script:
65
72
 
66
73
  ```python
67
- from hafnia.data import get_dataset_path
68
74
  from hafnia.dataset.hafnia_dataset import HafniaDataset
69
75
 
70
- # To download the sample dataset use:
71
- path_dataset = get_dataset_path("midwest-vehicle-detection")
76
+ dataset = HafniaDataset.from_name("midwest-vehicle-detection")
72
77
  ```
73
78
 
74
79
  This will download the dataset sample `midwest-vehicle-detection` to the local `.data/datasets/` folder
@@ -94,11 +99,10 @@ midwest-vehicle-detection
94
99
  3 directories, 217 files
95
100
  ```
96
101
 
97
- You can interact with data as you want, but we also provide `HafniaDataset`
98
- for loading/saving, managing and interacting with the dataset.
102
+ We provide the `HafniaDataset` format for loading/saving, managing and interacting with the dataset.
99
103
 
100
104
  We recommend the example script [examples/example_hafnia_dataset.py](examples/example_hafnia_dataset.py)
101
- for a short introduction on the `HafniaDataset`.
105
+ for a quick introduction on the `HafniaDataset`.
102
106
 
103
107
  Below is a short introduction to the `HafniaDataset` class.
104
108
 
@@ -106,7 +110,7 @@ Below is a short introduction to the `HafniaDataset` class.
106
110
  from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
107
111
 
108
112
  # Load dataset from path
109
- dataset = HafniaDataset.read_from_path(path_dataset)
113
+ dataset = HafniaDataset.from_path(path_dataset)
110
114
 
111
115
  # Or get dataset directly by name
112
116
  dataset = HafniaDataset.from_name("midwest-vehicle-detection")
@@ -36,7 +36,7 @@ rprint(dataset_recipe)
36
36
 
37
37
  # Example: Saving and loading a dataset recipe from file.
38
38
  path_recipe = Path(".data/dataset_recipes/example_recipe.json")
39
- json_str: str = dataset_recipe.as_json_file(path_recipe)
39
+ dataset_recipe.as_json_file(path_recipe)
40
40
  dataset_recipe_again: DatasetRecipe = DatasetRecipe.from_json_file(path_recipe)
41
41
 
42
42
  # Verify that the loaded recipe is identical to the original recipe.
@@ -6,7 +6,7 @@ import torchvision.transforms.functional
6
6
  from torch.utils.data import DataLoader
7
7
  from torchvision.transforms import v2
8
8
 
9
- from hafnia import torch_helpers
9
+ from hafnia.dataset import torch_helpers
10
10
  from hafnia.dataset.hafnia_dataset import HafniaDataset
11
11
 
12
12
  if __name__ == "__main__":
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "hafnia"
3
- version = "0.5.1"
3
+ version = "0.5.3"
4
4
  description = "Python SDK for communication with Hafnia platform."
5
5
  readme = "README.md"
6
6
  authors = [
@@ -29,16 +29,18 @@ dependencies = [
29
29
  "mlflow>=3.4.0",
30
30
  "sagemaker-mlflow>=0.1.0",
31
31
  "mcp>=1.23.0",
32
+ "docstring-parser>=0.17.0", # Used to parse docstrings for CommandBuilder
32
33
  ]
33
34
 
34
35
  [dependency-groups]
35
36
  dev = [
36
37
  "ipykernel>=6.29.5",
37
- "lark>=1.2.2",
38
38
  "torch>=2.6.0",
39
39
  "torchvision>=0.21.0",
40
40
  "flatten-dict>=0.4.2",
41
41
  "pytest-cov>=7.0.0",
42
+ "cyclopts>=4.5.0", # Only to test "CommandBuilder" module
43
+ "typer>=0.21.1", # Only to test "CommandBuilder" module
42
44
  ]
43
45
 
44
46
  test = ["pytest>=8.3.4", "pre-commit>=4.2.0", "ruff>=0.9.1"]
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import json
4
4
  import os
5
+ import shutil
5
6
  from pathlib import Path
6
7
  from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
7
8
 
@@ -12,6 +13,7 @@ from pydantic import (
12
13
 
13
14
  from hafnia import utils
14
15
  from hafnia.dataset.dataset_helpers import dataset_name_and_version_from_string
16
+ from hafnia.dataset.dataset_names import FILENAME_RECIPE_JSON
15
17
  from hafnia.dataset.dataset_recipe import recipe_transforms
16
18
  from hafnia.dataset.dataset_recipe.recipe_types import (
17
19
  RecipeCreation,
@@ -22,6 +24,7 @@ from hafnia.dataset.hafnia_dataset import (
22
24
  HafniaDataset,
23
25
  available_dataset_versions_from_name,
24
26
  )
27
+ from hafnia.dataset.hafnia_dataset_types import DatasetMetadataFilePaths
25
28
  from hafnia.dataset.primitives.primitive import Primitive
26
29
  from hafnia.log import user_logger
27
30
 
@@ -117,6 +120,23 @@ class DatasetRecipe(Serializable):
117
120
  json_str = path_json.read_text(encoding="utf-8")
118
121
  return DatasetRecipe.from_json_str(json_str)
119
122
 
123
+ @staticmethod
124
+ def from_recipe_field(recipe_field: Union[str, Dict[str, Any]]) -> "DatasetRecipe":
125
+ """
126
+
127
+ Deserialize from a recipe field which can be either a string or a dictionary.
128
+
129
+ string: A dataset name and version string in the format 'name:version'.
130
+ dict: A dictionary representation of the DatasetRecipe.
131
+
132
+ """
133
+ if isinstance(recipe_field, str):
134
+ return DatasetRecipe.from_name_and_version_string(recipe_field)
135
+ elif isinstance(recipe_field, dict):
136
+ return DatasetRecipe.from_dict(recipe_field)
137
+
138
+ raise TypeError(f"Expected str or dict for recipe_field, got {type(recipe_field).__name__}.")
139
+
120
140
  @staticmethod
121
141
  def from_dict(data: Dict[str, Any]) -> "DatasetRecipe":
122
142
  """Deserialize from a dictionary."""
@@ -130,14 +150,9 @@ class DatasetRecipe(Serializable):
130
150
  from hafnia_cli.config import Config
131
151
 
132
152
  cfg = Config()
133
- endpoint_dataset = cfg.get_platform_endpoint("dataset_recipes")
134
- recipe_dict = get_dataset_recipe_by_id(recipe_id, endpoint=endpoint_dataset, api_key=cfg.api_key)
153
+ recipe_dict = get_dataset_recipe_by_id(recipe_id, cfg=cfg)
135
154
  recipe_dict = recipe_dict["template"]["body"]
136
- if isinstance(recipe_dict, str):
137
- return DatasetRecipe.from_implicit_form(recipe_dict)
138
-
139
- recipe = DatasetRecipe.from_dict(recipe_dict)
140
- return recipe
155
+ return DatasetRecipe.from_recipe_field(recipe_dict)
141
156
 
142
157
  @staticmethod
143
158
  def from_recipe_name(name: str) -> "DatasetRecipe":
@@ -146,8 +161,7 @@ class DatasetRecipe(Serializable):
146
161
  from hafnia_cli.config import Config
147
162
 
148
163
  cfg = Config()
149
- endpoint_dataset = cfg.get_platform_endpoint("dataset_recipes")
150
- recipe = get_dataset_recipe_by_name(name=name, endpoint=endpoint_dataset, api_key=cfg.api_key)
164
+ recipe = get_dataset_recipe_by_name(name=name, cfg=cfg)
151
165
  if not recipe:
152
166
  raise ValueError(f"Dataset recipe '{name}' not found.")
153
167
  recipe_id = recipe["id"]
@@ -168,82 +182,6 @@ class DatasetRecipe(Serializable):
168
182
 
169
183
  return DatasetRecipe.from_name(name=dataset_name, version=version)
170
184
 
171
- @staticmethod
172
- def from_implicit_form(recipe: Any) -> DatasetRecipe:
173
- """
174
- Recursively convert from implicit recipe to explicit form.
175
- Handles mixed implicit/explicit recipes.
176
-
177
- Conversion rules:
178
- - str: Will get a dataset by name -> DatasetRecipeFromName
179
- - Path: Will get a dataset from path -> DatasetRecipeFromPath
180
- - tuple: Will merge datasets specified in the tuple -> RecipeMerger
181
- - list: Will define a list of transformations -> RecipeTransforms
182
-
183
- Example: DataRecipe from dataset name:
184
- ```python
185
- recipe_implicit = "mnist"
186
- recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
187
- >>> recipe_explicit
188
- DatasetRecipeFromName(dataset_name='mnist', force_redownload=False)
189
- ```
190
-
191
- Example: DataRecipe from tuple (merging multiple recipes):
192
- ```python
193
- recipe_implicit = ("dataset1", "dataset2")
194
- recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
195
- >>> recipe_explicit
196
- RecipeMerger(
197
- recipes=[
198
- DatasetRecipeFromName(dataset_name='dataset1', force_redownload=False),
199
- DatasetRecipeFromName(dataset_name='dataset2', force_redownload=False)
200
- ]
201
- )
202
-
203
- Example: DataRecipe from list (recipe and transformations):
204
- ```python
205
- recipe_implicit = ["mnist", SelectSamples(n_samples=20), Shuffle(seed=123)]
206
- recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
207
- >>> recipe_explicit
208
- Transforms(
209
- recipe=DatasetRecipeFromName(dataset_name='mnist', force_redownload=False),
210
- transforms=[SelectSamples(n_samples=20), Shuffle(seed=123)]
211
- )
212
- ```
213
-
214
- """
215
- if isinstance(recipe, DatasetRecipe): # type: ignore
216
- # It is possible to do an early return if recipe is a 'DataRecipe'-type even for nested and
217
- # potentially mixed recipes. If you (really) think about it, this might surprise you,
218
- # as this will bypass the conversion logic for nested recipes.
219
- # However, this is not a problem as 'DataRecipe' classes are also pydantic models,
220
- # so if a user introduces a 'DataRecipe'-class in the recipe (in potentially
221
- # some nested and mixed implicit/explicit form) it will (due to pydantic validation) force
222
- # the user to specify all nested recipes to be converted to explicit form.
223
- return recipe
224
-
225
- if isinstance(recipe, str): # str-type is convert to DatasetFromName
226
- return DatasetRecipe.from_name_and_version_string(string=recipe, resolve_missing_version=True)
227
-
228
- if isinstance(recipe, Path): # Path-type is convert to DatasetFromPath
229
- return DatasetRecipe.from_path(path_folder=recipe)
230
-
231
- if isinstance(recipe, tuple): # tuple-type is convert to DatasetMerger
232
- recipes = [DatasetRecipe.from_implicit_form(item) for item in recipe]
233
- return DatasetRecipe.from_merger(recipes=recipes)
234
-
235
- if isinstance(recipe, list): # list-type is convert to Transforms
236
- if len(recipe) == 0:
237
- raise ValueError("List of recipes cannot be empty")
238
-
239
- dataset_recipe = recipe[0] # First element is the dataset recipe
240
- loader = DatasetRecipe.from_implicit_form(dataset_recipe)
241
-
242
- transforms = recipe[1:] # Remaining items are transformations
243
- return DatasetRecipe(creation=loader.creation, operations=transforms)
244
-
245
- raise ValueError(f"Unsupported recipe type: {type(recipe)}")
246
-
247
185
  ### Upload, store and recipe conversions ###
248
186
  def as_python_code(self, keep_default_fields: bool = False, as_kwargs: bool = True) -> str:
249
187
  str_operations = [self.creation.as_python_code(keep_default_fields=keep_default_fields, as_kwargs=as_kwargs)]
@@ -285,17 +223,10 @@ class DatasetRecipe(Serializable):
285
223
  from hafnia.platform.dataset_recipe import get_or_create_dataset_recipe
286
224
  from hafnia_cli.config import Config
287
225
 
288
- recipe = self.as_dict()
289
226
  cfg = Config()
290
- endpoint_dataset = cfg.get_platform_endpoint("dataset_recipes")
291
- recipe_dict = get_or_create_dataset_recipe(
292
- recipe=recipe,
293
- endpoint=endpoint_dataset,
294
- api_key=cfg.api_key,
295
- name=recipe_name,
296
- overwrite=overwrite,
297
- )
298
227
 
228
+ recipe = self.as_dict()
229
+ recipe_dict = get_or_create_dataset_recipe(recipe=recipe, name=recipe_name, overwrite=overwrite, cfg=cfg)
299
230
  return recipe_dict
300
231
 
301
232
  ### Dataset Recipe Transformations ###
@@ -428,13 +359,6 @@ def unique_name_from_recipe(recipe: DatasetRecipe) -> str:
428
359
  return unique_name
429
360
 
430
361
 
431
- def get_dataset_path_from_recipe(recipe: DatasetRecipe, path_datasets: Optional[Union[Path, str]] = None) -> Path:
432
- path_datasets = path_datasets or utils.PATH_DATASETS
433
- path_datasets = Path(path_datasets)
434
- unique_dataset_name = unique_name_from_recipe(recipe)
435
- return path_datasets / unique_dataset_name
436
-
437
-
438
362
  class FromPath(RecipeCreation):
439
363
  path_folder: Path
440
364
  check_for_images: bool = True
@@ -525,3 +449,34 @@ class FromMerger(RecipeCreation):
525
449
  for recipe in self.recipes:
526
450
  names.extend(recipe.creation.get_dataset_names())
527
451
  return names
452
+
453
+
454
+ def get_dataset_path_from_recipe(recipe: DatasetRecipe, path_datasets: Optional[Union[Path, str]] = None) -> Path:
455
+ path_datasets = path_datasets or utils.PATH_DATASETS
456
+ path_datasets = Path(path_datasets)
457
+ unique_dataset_name = unique_name_from_recipe(recipe)
458
+ return path_datasets / unique_dataset_name
459
+
460
+
461
+ def get_or_create_dataset_path_from_recipe(
462
+ dataset_recipe: DatasetRecipe,
463
+ force_redownload: bool = False,
464
+ path_datasets: Optional[Union[Path, str]] = None,
465
+ ) -> Path:
466
+ path_dataset = get_dataset_path_from_recipe(dataset_recipe, path_datasets=path_datasets)
467
+
468
+ if force_redownload:
469
+ shutil.rmtree(path_dataset, ignore_errors=True)
470
+
471
+ dataset_metadata_files = DatasetMetadataFilePaths.from_path(path_dataset)
472
+ if dataset_metadata_files.exists(raise_error=False):
473
+ return path_dataset
474
+
475
+ path_dataset.mkdir(parents=True, exist_ok=True)
476
+ path_recipe_json = path_dataset / FILENAME_RECIPE_JSON
477
+ path_recipe_json.write_text(dataset_recipe.model_dump_json(indent=4))
478
+
479
+ dataset: HafniaDataset = dataset_recipe.build()
480
+ dataset.write(path_dataset)
481
+
482
+ return path_dataset
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import copy
4
- import shutil
5
4
  from dataclasses import dataclass
6
5
  from pathlib import Path
7
6
  from random import Random
@@ -14,7 +13,6 @@ from hafnia import utils
14
13
  from hafnia.dataset import dataset_helpers
15
14
  from hafnia.dataset.dataset_helpers import is_valid_version_string, version_from_string
16
15
  from hafnia.dataset.dataset_names import (
17
- FILENAME_RECIPE_JSON,
18
16
  TAG_IS_SAMPLE,
19
17
  PrimitiveField,
20
18
  SampleField,
@@ -146,17 +144,6 @@ class HafniaDataset:
146
144
  table = table_transformations.add_dataset_name_if_missing(table, dataset_name=info.dataset_name)
147
145
  return HafniaDataset(info=info, samples=table)
148
146
 
149
- @staticmethod
150
- def from_recipe(dataset_recipe: Any) -> "HafniaDataset":
151
- """
152
- Load a dataset from a recipe. The recipe can be a string (name of the dataset), a dictionary, or a DataRecipe object.
153
- """
154
- from hafnia.dataset.dataset_recipe.dataset_recipe import DatasetRecipe
155
-
156
- recipe_explicit = DatasetRecipe.from_implicit_form(dataset_recipe)
157
-
158
- return recipe_explicit.build() # Build dataset from the recipe
159
-
160
147
  @staticmethod
161
148
  def from_merge(dataset0: "HafniaDataset", dataset1: "HafniaDataset") -> "HafniaDataset":
162
149
  return HafniaDataset.merge(dataset0, dataset1)
@@ -172,6 +159,8 @@ class HafniaDataset:
172
159
  If the dataset is already cached, it will be loaded from the cache.
173
160
  """
174
161
 
162
+ from hafnia.dataset.dataset_recipe.dataset_recipe import get_or_create_dataset_path_from_recipe
163
+
175
164
  path_dataset = get_or_create_dataset_path_from_recipe(
176
165
  dataset_recipe,
177
166
  path_datasets=path_datasets,
@@ -245,7 +234,7 @@ class HafniaDataset:
245
234
 
246
235
  Example: Defining split ratios and applying the transformation
247
236
 
248
- >>> dataset = HafniaDataset.read_from_path(Path("path/to/dataset"))
237
+ >>> dataset = HafniaDataset.from_path(Path("path/to/dataset"))
249
238
  >>> split_ratios = {SplitName.TRAIN: 0.8, SplitName.VAL: 0.1, SplitName.TEST: 0.1}
250
239
  >>> dataset_with_splits = splits_by_ratios(dataset, split_ratios, seed=42)
251
240
  Or use the function as a
@@ -270,7 +259,7 @@ class HafniaDataset:
270
259
  splits based on the provided ratios.
271
260
 
272
261
  Example: Defining split ratios and applying the transformation
273
- >>> dataset = HafniaDataset.read_from_path(Path("path/to/dataset"))
262
+ >>> dataset = HafniaDataset.from_path(Path("path/to/dataset"))
274
263
  >>> split_name = SplitName.TEST
275
264
  >>> split_ratios = {SplitName.TEST: 0.8, SplitName.VAL: 0.2}
276
265
  >>> dataset_with_splits = split_into_multiple_splits(dataset, split_name, split_ratios)
@@ -543,7 +532,7 @@ class HafniaDataset:
543
532
  primitive: Type[Primitive],
544
533
  task_name: Optional[str] = None,
545
534
  keep_sample_data: bool = False,
546
- ) -> pl.DataFrame:
535
+ ) -> Optional[pl.DataFrame]:
547
536
  return table_transformations.create_primitive_table(
548
537
  samples_table=self.samples,
549
538
  PrimitiveType=primitive,
@@ -741,36 +730,6 @@ def check_hafnia_dataset_from_path(path_dataset: Path) -> None:
741
730
  dataset.check_dataset()
742
731
 
743
732
 
744
- def get_or_create_dataset_path_from_recipe(
745
- dataset_recipe: Any,
746
- force_redownload: bool = False,
747
- path_datasets: Optional[Union[Path, str]] = None,
748
- ) -> Path:
749
- from hafnia.dataset.dataset_recipe.dataset_recipe import (
750
- DatasetRecipe,
751
- get_dataset_path_from_recipe,
752
- )
753
-
754
- recipe: DatasetRecipe = DatasetRecipe.from_implicit_form(dataset_recipe)
755
- path_dataset = get_dataset_path_from_recipe(recipe, path_datasets=path_datasets)
756
-
757
- if force_redownload:
758
- shutil.rmtree(path_dataset, ignore_errors=True)
759
-
760
- dataset_metadata_files = DatasetMetadataFilePaths.from_path(path_dataset)
761
- if dataset_metadata_files.exists(raise_error=False):
762
- return path_dataset
763
-
764
- path_dataset.mkdir(parents=True, exist_ok=True)
765
- path_recipe_json = path_dataset / FILENAME_RECIPE_JSON
766
- path_recipe_json.write_text(recipe.model_dump_json(indent=4))
767
-
768
- dataset: HafniaDataset = recipe.build()
769
- dataset.write(path_dataset)
770
-
771
- return path_dataset
772
-
773
-
774
733
  def available_dataset_versions_from_name(dataset_name: str) -> Dict[Version, "DatasetMetadataFilePaths"]:
775
734
  credentials: ResourceCredentials = get_read_credentials_by_name(dataset_name=dataset_name)
776
735
  return available_dataset_versions(credentials=credentials)
@@ -795,12 +754,13 @@ def select_version_from_available_versions(
795
754
 
796
755
  if version is None:
797
756
  str_versions = [str(v) for v in available_versions]
798
- raise ValueError(f"Version must be specified. Available versions: {str_versions}")
799
- elif version == "latest":
757
+ raise ValueError(f"Version must be specified. Available versions: {str_versions}. ")
758
+
759
+ if version == "latest":
800
760
  version_casted = max(available_versions)
801
761
  user_logger.info(f"'latest' version '{version_casted}' has been selected")
802
762
  else:
803
- version_casted = version_from_string(version)
763
+ version_casted = version_from_string(version, raise_error=True)
804
764
 
805
765
  if version_casted not in available_versions:
806
766
  raise ValueError(f"Selected version '{version}' not found in available versions: {available_versions}")
@@ -470,7 +470,7 @@ class Sample(BaseModel):
470
470
  return image
471
471
 
472
472
  def draw_annotations(self, image: Optional[np.ndarray] = None) -> np.ndarray:
473
- from hafnia.visualizations import image_visualizations
473
+ from hafnia.dataset import image_visualizations
474
474
 
475
475
  if image is None:
476
476
  image = self.read_image()
@@ -104,7 +104,8 @@ def calculate_primitive_counts(dataset: HafniaDataset) -> Dict[str, int]:
104
104
  name = task.primitive.__name__
105
105
  if task.name != task.primitive.default_task_name():
106
106
  name = f"{name}.{task.name}"
107
- annotation_counts[name] = len(objects)
107
+ n_objects = 0 if objects is None else len(objects)
108
+ annotation_counts[name] = n_objects
108
109
  return annotation_counts
109
110
 
110
111
 
@@ -39,7 +39,7 @@ class Classification(Primitive):
39
39
  def draw(self, image: np.ndarray, inplace: bool = False, draw_label: bool = True) -> np.ndarray:
40
40
  if draw_label is False:
41
41
  return image
42
- from hafnia.visualizations import image_visualizations
42
+ from hafnia.dataset import image_visualizations
43
43
 
44
44
  class_name = self.get_class_name()
45
45
  if self.task_name == self.default_task_name():
@@ -4,9 +4,9 @@ import cv2
4
4
  import numpy as np
5
5
  from pydantic import Field
6
6
 
7
+ from hafnia.dataset.colors import get_n_colors
7
8
  from hafnia.dataset.primitives.primitive import Primitive
8
9
  from hafnia.dataset.primitives.utils import get_class_name
9
- from hafnia.visualizations.colors import get_n_colors
10
10
 
11
11
 
12
12
  class Segmentation(Primitive):