hafnia 0.5.2__py3-none-any.whl → 0.5.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hafnia/dataset/dataset_details_uploader.py +1 -1
- hafnia/dataset/dataset_recipe/dataset_recipe.py +56 -101
- hafnia/dataset/hafnia_dataset.py +9 -49
- hafnia/dataset/hafnia_dataset_types.py +1 -1
- hafnia/dataset/operations/dataset_stats.py +2 -1
- hafnia/dataset/primitives/classification.py +1 -1
- hafnia/dataset/primitives/segmentation.py +1 -1
- hafnia/experiment/command_builder.py +1 -1
- hafnia/platform/dataset_recipe.py +30 -18
- hafnia/platform/datasets.py +8 -4
- hafnia/platform/experiment.py +12 -8
- hafnia/platform/trainer_package.py +58 -12
- hafnia/utils.py +7 -5
- {hafnia-0.5.2.dist-info → hafnia-0.5.4.dist-info}/METADATA +14 -10
- {hafnia-0.5.2.dist-info → hafnia-0.5.4.dist-info}/RECORD +25 -27
- hafnia_cli/dataset_recipe_cmds.py +4 -8
- hafnia_cli/experiment_cmds.py +15 -25
- hafnia_cli/profile_cmds.py +8 -3
- hafnia_cli/trainer_package_cmds.py +52 -4
- hafnia/data/__init__.py +0 -3
- hafnia/data/factory.py +0 -22
- /hafnia/{visualizations → dataset}/colors.py +0 -0
- /hafnia/{visualizations → dataset}/image_visualizations.py +0 -0
- /hafnia/{torch_helpers.py → dataset/torch_helpers.py} +0 -0
- {hafnia-0.5.2.dist-info → hafnia-0.5.4.dist-info}/WHEEL +0 -0
- {hafnia-0.5.2.dist-info → hafnia-0.5.4.dist-info}/entry_points.txt +0 -0
- {hafnia-0.5.2.dist-info → hafnia-0.5.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -294,9 +294,9 @@ def upload_dataset_details_to_platform(
|
|
|
294
294
|
if update_platform:
|
|
295
295
|
dataset_details_exclude_none = dataset_details.model_dump(exclude_none=True, mode="json")
|
|
296
296
|
upload_dataset_details(
|
|
297
|
-
cfg=cfg,
|
|
298
297
|
data=dataset_details_exclude_none,
|
|
299
298
|
dataset_name=dataset_details.name,
|
|
299
|
+
cfg=cfg,
|
|
300
300
|
)
|
|
301
301
|
|
|
302
302
|
dataset_details_dict = dataset_details.model_dump(exclude_none=False, mode="json")
|
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
+
import shutil
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
|
|
7
8
|
|
|
@@ -12,6 +13,7 @@ from pydantic import (
|
|
|
12
13
|
|
|
13
14
|
from hafnia import utils
|
|
14
15
|
from hafnia.dataset.dataset_helpers import dataset_name_and_version_from_string
|
|
16
|
+
from hafnia.dataset.dataset_names import FILENAME_RECIPE_JSON
|
|
15
17
|
from hafnia.dataset.dataset_recipe import recipe_transforms
|
|
16
18
|
from hafnia.dataset.dataset_recipe.recipe_types import (
|
|
17
19
|
RecipeCreation,
|
|
@@ -22,6 +24,7 @@ from hafnia.dataset.hafnia_dataset import (
|
|
|
22
24
|
HafniaDataset,
|
|
23
25
|
available_dataset_versions_from_name,
|
|
24
26
|
)
|
|
27
|
+
from hafnia.dataset.hafnia_dataset_types import DatasetMetadataFilePaths
|
|
25
28
|
from hafnia.dataset.primitives.primitive import Primitive
|
|
26
29
|
from hafnia.log import user_logger
|
|
27
30
|
|
|
@@ -117,6 +120,23 @@ class DatasetRecipe(Serializable):
|
|
|
117
120
|
json_str = path_json.read_text(encoding="utf-8")
|
|
118
121
|
return DatasetRecipe.from_json_str(json_str)
|
|
119
122
|
|
|
123
|
+
@staticmethod
|
|
124
|
+
def from_recipe_field(recipe_field: Union[str, Dict[str, Any]]) -> "DatasetRecipe":
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
Deserialize from a recipe field which can be either a string or a dictionary.
|
|
128
|
+
|
|
129
|
+
string: A dataset name and version string in the format 'name:version'.
|
|
130
|
+
dict: A dictionary representation of the DatasetRecipe.
|
|
131
|
+
|
|
132
|
+
"""
|
|
133
|
+
if isinstance(recipe_field, str):
|
|
134
|
+
return DatasetRecipe.from_name_and_version_string(recipe_field)
|
|
135
|
+
elif isinstance(recipe_field, dict):
|
|
136
|
+
return DatasetRecipe.from_dict(recipe_field)
|
|
137
|
+
|
|
138
|
+
raise TypeError(f"Expected str or dict for recipe_field, got {type(recipe_field).__name__}.")
|
|
139
|
+
|
|
120
140
|
@staticmethod
|
|
121
141
|
def from_dict(data: Dict[str, Any]) -> "DatasetRecipe":
|
|
122
142
|
"""Deserialize from a dictionary."""
|
|
@@ -130,14 +150,9 @@ class DatasetRecipe(Serializable):
|
|
|
130
150
|
from hafnia_cli.config import Config
|
|
131
151
|
|
|
132
152
|
cfg = Config()
|
|
133
|
-
|
|
134
|
-
recipe_dict = get_dataset_recipe_by_id(recipe_id, endpoint=endpoint_dataset, api_key=cfg.api_key)
|
|
153
|
+
recipe_dict = get_dataset_recipe_by_id(recipe_id, cfg=cfg)
|
|
135
154
|
recipe_dict = recipe_dict["template"]["body"]
|
|
136
|
-
|
|
137
|
-
return DatasetRecipe.from_implicit_form(recipe_dict)
|
|
138
|
-
|
|
139
|
-
recipe = DatasetRecipe.from_dict(recipe_dict)
|
|
140
|
-
return recipe
|
|
155
|
+
return DatasetRecipe.from_recipe_field(recipe_dict)
|
|
141
156
|
|
|
142
157
|
@staticmethod
|
|
143
158
|
def from_recipe_name(name: str) -> "DatasetRecipe":
|
|
@@ -146,8 +161,7 @@ class DatasetRecipe(Serializable):
|
|
|
146
161
|
from hafnia_cli.config import Config
|
|
147
162
|
|
|
148
163
|
cfg = Config()
|
|
149
|
-
|
|
150
|
-
recipe = get_dataset_recipe_by_name(name=name, endpoint=endpoint_dataset, api_key=cfg.api_key)
|
|
164
|
+
recipe = get_dataset_recipe_by_name(name=name, cfg=cfg)
|
|
151
165
|
if not recipe:
|
|
152
166
|
raise ValueError(f"Dataset recipe '{name}' not found.")
|
|
153
167
|
recipe_id = recipe["id"]
|
|
@@ -168,82 +182,6 @@ class DatasetRecipe(Serializable):
|
|
|
168
182
|
|
|
169
183
|
return DatasetRecipe.from_name(name=dataset_name, version=version)
|
|
170
184
|
|
|
171
|
-
@staticmethod
|
|
172
|
-
def from_implicit_form(recipe: Any) -> DatasetRecipe:
|
|
173
|
-
"""
|
|
174
|
-
Recursively convert from implicit recipe to explicit form.
|
|
175
|
-
Handles mixed implicit/explicit recipes.
|
|
176
|
-
|
|
177
|
-
Conversion rules:
|
|
178
|
-
- str: Will get a dataset by name -> DatasetRecipeFromName
|
|
179
|
-
- Path: Will get a dataset from path -> DatasetRecipeFromPath
|
|
180
|
-
- tuple: Will merge datasets specified in the tuple -> RecipeMerger
|
|
181
|
-
- list: Will define a list of transformations -> RecipeTransforms
|
|
182
|
-
|
|
183
|
-
Example: DataRecipe from dataset name:
|
|
184
|
-
```python
|
|
185
|
-
recipe_implicit = "mnist"
|
|
186
|
-
recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
|
|
187
|
-
>>> recipe_explicit
|
|
188
|
-
DatasetRecipeFromName(dataset_name='mnist', force_redownload=False)
|
|
189
|
-
```
|
|
190
|
-
|
|
191
|
-
Example: DataRecipe from tuple (merging multiple recipes):
|
|
192
|
-
```python
|
|
193
|
-
recipe_implicit = ("dataset1", "dataset2")
|
|
194
|
-
recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
|
|
195
|
-
>>> recipe_explicit
|
|
196
|
-
RecipeMerger(
|
|
197
|
-
recipes=[
|
|
198
|
-
DatasetRecipeFromName(dataset_name='dataset1', force_redownload=False),
|
|
199
|
-
DatasetRecipeFromName(dataset_name='dataset2', force_redownload=False)
|
|
200
|
-
]
|
|
201
|
-
)
|
|
202
|
-
|
|
203
|
-
Example: DataRecipe from list (recipe and transformations):
|
|
204
|
-
```python
|
|
205
|
-
recipe_implicit = ["mnist", SelectSamples(n_samples=20), Shuffle(seed=123)]
|
|
206
|
-
recipe_explicit = DatasetRecipe.from_implicit_form(recipe_implicit)
|
|
207
|
-
>>> recipe_explicit
|
|
208
|
-
Transforms(
|
|
209
|
-
recipe=DatasetRecipeFromName(dataset_name='mnist', force_redownload=False),
|
|
210
|
-
transforms=[SelectSamples(n_samples=20), Shuffle(seed=123)]
|
|
211
|
-
)
|
|
212
|
-
```
|
|
213
|
-
|
|
214
|
-
"""
|
|
215
|
-
if isinstance(recipe, DatasetRecipe): # type: ignore
|
|
216
|
-
# It is possible to do an early return if recipe is a 'DataRecipe'-type even for nested and
|
|
217
|
-
# potentially mixed recipes. If you (really) think about it, this might surprise you,
|
|
218
|
-
# as this will bypass the conversion logic for nested recipes.
|
|
219
|
-
# However, this is not a problem as 'DataRecipe' classes are also pydantic models,
|
|
220
|
-
# so if a user introduces a 'DataRecipe'-class in the recipe (in potentially
|
|
221
|
-
# some nested and mixed implicit/explicit form) it will (due to pydantic validation) force
|
|
222
|
-
# the user to specify all nested recipes to be converted to explicit form.
|
|
223
|
-
return recipe
|
|
224
|
-
|
|
225
|
-
if isinstance(recipe, str): # str-type is convert to DatasetFromName
|
|
226
|
-
return DatasetRecipe.from_name_and_version_string(string=recipe, resolve_missing_version=True)
|
|
227
|
-
|
|
228
|
-
if isinstance(recipe, Path): # Path-type is convert to DatasetFromPath
|
|
229
|
-
return DatasetRecipe.from_path(path_folder=recipe)
|
|
230
|
-
|
|
231
|
-
if isinstance(recipe, tuple): # tuple-type is convert to DatasetMerger
|
|
232
|
-
recipes = [DatasetRecipe.from_implicit_form(item) for item in recipe]
|
|
233
|
-
return DatasetRecipe.from_merger(recipes=recipes)
|
|
234
|
-
|
|
235
|
-
if isinstance(recipe, list): # list-type is convert to Transforms
|
|
236
|
-
if len(recipe) == 0:
|
|
237
|
-
raise ValueError("List of recipes cannot be empty")
|
|
238
|
-
|
|
239
|
-
dataset_recipe = recipe[0] # First element is the dataset recipe
|
|
240
|
-
loader = DatasetRecipe.from_implicit_form(dataset_recipe)
|
|
241
|
-
|
|
242
|
-
transforms = recipe[1:] # Remaining items are transformations
|
|
243
|
-
return DatasetRecipe(creation=loader.creation, operations=transforms)
|
|
244
|
-
|
|
245
|
-
raise ValueError(f"Unsupported recipe type: {type(recipe)}")
|
|
246
|
-
|
|
247
185
|
### Upload, store and recipe conversions ###
|
|
248
186
|
def as_python_code(self, keep_default_fields: bool = False, as_kwargs: bool = True) -> str:
|
|
249
187
|
str_operations = [self.creation.as_python_code(keep_default_fields=keep_default_fields, as_kwargs=as_kwargs)]
|
|
@@ -285,17 +223,10 @@ class DatasetRecipe(Serializable):
|
|
|
285
223
|
from hafnia.platform.dataset_recipe import get_or_create_dataset_recipe
|
|
286
224
|
from hafnia_cli.config import Config
|
|
287
225
|
|
|
288
|
-
recipe = self.as_dict()
|
|
289
226
|
cfg = Config()
|
|
290
|
-
endpoint_dataset = cfg.get_platform_endpoint("dataset_recipes")
|
|
291
|
-
recipe_dict = get_or_create_dataset_recipe(
|
|
292
|
-
recipe=recipe,
|
|
293
|
-
endpoint=endpoint_dataset,
|
|
294
|
-
api_key=cfg.api_key,
|
|
295
|
-
name=recipe_name,
|
|
296
|
-
overwrite=overwrite,
|
|
297
|
-
)
|
|
298
227
|
|
|
228
|
+
recipe = self.as_dict()
|
|
229
|
+
recipe_dict = get_or_create_dataset_recipe(recipe=recipe, name=recipe_name, overwrite=overwrite, cfg=cfg)
|
|
299
230
|
return recipe_dict
|
|
300
231
|
|
|
301
232
|
### Dataset Recipe Transformations ###
|
|
@@ -428,13 +359,6 @@ def unique_name_from_recipe(recipe: DatasetRecipe) -> str:
|
|
|
428
359
|
return unique_name
|
|
429
360
|
|
|
430
361
|
|
|
431
|
-
def get_dataset_path_from_recipe(recipe: DatasetRecipe, path_datasets: Optional[Union[Path, str]] = None) -> Path:
|
|
432
|
-
path_datasets = path_datasets or utils.PATH_DATASETS
|
|
433
|
-
path_datasets = Path(path_datasets)
|
|
434
|
-
unique_dataset_name = unique_name_from_recipe(recipe)
|
|
435
|
-
return path_datasets / unique_dataset_name
|
|
436
|
-
|
|
437
|
-
|
|
438
362
|
class FromPath(RecipeCreation):
|
|
439
363
|
path_folder: Path
|
|
440
364
|
check_for_images: bool = True
|
|
@@ -525,3 +449,34 @@ class FromMerger(RecipeCreation):
|
|
|
525
449
|
for recipe in self.recipes:
|
|
526
450
|
names.extend(recipe.creation.get_dataset_names())
|
|
527
451
|
return names
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def get_dataset_path_from_recipe(recipe: DatasetRecipe, path_datasets: Optional[Union[Path, str]] = None) -> Path:
|
|
455
|
+
path_datasets = path_datasets or utils.PATH_DATASETS
|
|
456
|
+
path_datasets = Path(path_datasets)
|
|
457
|
+
unique_dataset_name = unique_name_from_recipe(recipe)
|
|
458
|
+
return path_datasets / unique_dataset_name
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def get_or_create_dataset_path_from_recipe(
|
|
462
|
+
dataset_recipe: DatasetRecipe,
|
|
463
|
+
force_redownload: bool = False,
|
|
464
|
+
path_datasets: Optional[Union[Path, str]] = None,
|
|
465
|
+
) -> Path:
|
|
466
|
+
path_dataset = get_dataset_path_from_recipe(dataset_recipe, path_datasets=path_datasets)
|
|
467
|
+
|
|
468
|
+
if force_redownload:
|
|
469
|
+
shutil.rmtree(path_dataset, ignore_errors=True)
|
|
470
|
+
|
|
471
|
+
dataset_metadata_files = DatasetMetadataFilePaths.from_path(path_dataset)
|
|
472
|
+
if dataset_metadata_files.exists(raise_error=False):
|
|
473
|
+
return path_dataset
|
|
474
|
+
|
|
475
|
+
path_dataset.mkdir(parents=True, exist_ok=True)
|
|
476
|
+
path_recipe_json = path_dataset / FILENAME_RECIPE_JSON
|
|
477
|
+
path_recipe_json.write_text(dataset_recipe.model_dump_json(indent=4))
|
|
478
|
+
|
|
479
|
+
dataset: HafniaDataset = dataset_recipe.build()
|
|
480
|
+
dataset.write(path_dataset)
|
|
481
|
+
|
|
482
|
+
return path_dataset
|
hafnia/dataset/hafnia_dataset.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import copy
|
|
4
|
-
import shutil
|
|
5
4
|
from dataclasses import dataclass
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from random import Random
|
|
@@ -14,7 +13,6 @@ from hafnia import utils
|
|
|
14
13
|
from hafnia.dataset import dataset_helpers
|
|
15
14
|
from hafnia.dataset.dataset_helpers import is_valid_version_string, version_from_string
|
|
16
15
|
from hafnia.dataset.dataset_names import (
|
|
17
|
-
FILENAME_RECIPE_JSON,
|
|
18
16
|
TAG_IS_SAMPLE,
|
|
19
17
|
PrimitiveField,
|
|
20
18
|
SampleField,
|
|
@@ -146,17 +144,6 @@ class HafniaDataset:
|
|
|
146
144
|
table = table_transformations.add_dataset_name_if_missing(table, dataset_name=info.dataset_name)
|
|
147
145
|
return HafniaDataset(info=info, samples=table)
|
|
148
146
|
|
|
149
|
-
@staticmethod
|
|
150
|
-
def from_recipe(dataset_recipe: Any) -> "HafniaDataset":
|
|
151
|
-
"""
|
|
152
|
-
Load a dataset from a recipe. The recipe can be a string (name of the dataset), a dictionary, or a DataRecipe object.
|
|
153
|
-
"""
|
|
154
|
-
from hafnia.dataset.dataset_recipe.dataset_recipe import DatasetRecipe
|
|
155
|
-
|
|
156
|
-
recipe_explicit = DatasetRecipe.from_implicit_form(dataset_recipe)
|
|
157
|
-
|
|
158
|
-
return recipe_explicit.build() # Build dataset from the recipe
|
|
159
|
-
|
|
160
147
|
@staticmethod
|
|
161
148
|
def from_merge(dataset0: "HafniaDataset", dataset1: "HafniaDataset") -> "HafniaDataset":
|
|
162
149
|
return HafniaDataset.merge(dataset0, dataset1)
|
|
@@ -172,6 +159,8 @@ class HafniaDataset:
|
|
|
172
159
|
If the dataset is already cached, it will be loaded from the cache.
|
|
173
160
|
"""
|
|
174
161
|
|
|
162
|
+
from hafnia.dataset.dataset_recipe.dataset_recipe import get_or_create_dataset_path_from_recipe
|
|
163
|
+
|
|
175
164
|
path_dataset = get_or_create_dataset_path_from_recipe(
|
|
176
165
|
dataset_recipe,
|
|
177
166
|
path_datasets=path_datasets,
|
|
@@ -245,7 +234,7 @@ class HafniaDataset:
|
|
|
245
234
|
|
|
246
235
|
Example: Defining split ratios and applying the transformation
|
|
247
236
|
|
|
248
|
-
>>> dataset = HafniaDataset.
|
|
237
|
+
>>> dataset = HafniaDataset.from_path(Path("path/to/dataset"))
|
|
249
238
|
>>> split_ratios = {SplitName.TRAIN: 0.8, SplitName.VAL: 0.1, SplitName.TEST: 0.1}
|
|
250
239
|
>>> dataset_with_splits = splits_by_ratios(dataset, split_ratios, seed=42)
|
|
251
240
|
Or use the function as a
|
|
@@ -270,7 +259,7 @@ class HafniaDataset:
|
|
|
270
259
|
splits based on the provided ratios.
|
|
271
260
|
|
|
272
261
|
Example: Defining split ratios and applying the transformation
|
|
273
|
-
>>> dataset = HafniaDataset.
|
|
262
|
+
>>> dataset = HafniaDataset.from_path(Path("path/to/dataset"))
|
|
274
263
|
>>> split_name = SplitName.TEST
|
|
275
264
|
>>> split_ratios = {SplitName.TEST: 0.8, SplitName.VAL: 0.2}
|
|
276
265
|
>>> dataset_with_splits = split_into_multiple_splits(dataset, split_name, split_ratios)
|
|
@@ -543,7 +532,7 @@ class HafniaDataset:
|
|
|
543
532
|
primitive: Type[Primitive],
|
|
544
533
|
task_name: Optional[str] = None,
|
|
545
534
|
keep_sample_data: bool = False,
|
|
546
|
-
) -> pl.DataFrame:
|
|
535
|
+
) -> Optional[pl.DataFrame]:
|
|
547
536
|
return table_transformations.create_primitive_table(
|
|
548
537
|
samples_table=self.samples,
|
|
549
538
|
PrimitiveType=primitive,
|
|
@@ -741,36 +730,6 @@ def check_hafnia_dataset_from_path(path_dataset: Path) -> None:
|
|
|
741
730
|
dataset.check_dataset()
|
|
742
731
|
|
|
743
732
|
|
|
744
|
-
def get_or_create_dataset_path_from_recipe(
|
|
745
|
-
dataset_recipe: Any,
|
|
746
|
-
force_redownload: bool = False,
|
|
747
|
-
path_datasets: Optional[Union[Path, str]] = None,
|
|
748
|
-
) -> Path:
|
|
749
|
-
from hafnia.dataset.dataset_recipe.dataset_recipe import (
|
|
750
|
-
DatasetRecipe,
|
|
751
|
-
get_dataset_path_from_recipe,
|
|
752
|
-
)
|
|
753
|
-
|
|
754
|
-
recipe: DatasetRecipe = DatasetRecipe.from_implicit_form(dataset_recipe)
|
|
755
|
-
path_dataset = get_dataset_path_from_recipe(recipe, path_datasets=path_datasets)
|
|
756
|
-
|
|
757
|
-
if force_redownload:
|
|
758
|
-
shutil.rmtree(path_dataset, ignore_errors=True)
|
|
759
|
-
|
|
760
|
-
dataset_metadata_files = DatasetMetadataFilePaths.from_path(path_dataset)
|
|
761
|
-
if dataset_metadata_files.exists(raise_error=False):
|
|
762
|
-
return path_dataset
|
|
763
|
-
|
|
764
|
-
path_dataset.mkdir(parents=True, exist_ok=True)
|
|
765
|
-
path_recipe_json = path_dataset / FILENAME_RECIPE_JSON
|
|
766
|
-
path_recipe_json.write_text(recipe.model_dump_json(indent=4))
|
|
767
|
-
|
|
768
|
-
dataset: HafniaDataset = recipe.build()
|
|
769
|
-
dataset.write(path_dataset)
|
|
770
|
-
|
|
771
|
-
return path_dataset
|
|
772
|
-
|
|
773
|
-
|
|
774
733
|
def available_dataset_versions_from_name(dataset_name: str) -> Dict[Version, "DatasetMetadataFilePaths"]:
|
|
775
734
|
credentials: ResourceCredentials = get_read_credentials_by_name(dataset_name=dataset_name)
|
|
776
735
|
return available_dataset_versions(credentials=credentials)
|
|
@@ -795,12 +754,13 @@ def select_version_from_available_versions(
|
|
|
795
754
|
|
|
796
755
|
if version is None:
|
|
797
756
|
str_versions = [str(v) for v in available_versions]
|
|
798
|
-
raise ValueError(f"Version must be specified. Available versions: {str_versions}")
|
|
799
|
-
|
|
757
|
+
raise ValueError(f"Version must be specified. Available versions: {str_versions}. ")
|
|
758
|
+
|
|
759
|
+
if version == "latest":
|
|
800
760
|
version_casted = max(available_versions)
|
|
801
761
|
user_logger.info(f"'latest' version '{version_casted}' has been selected")
|
|
802
762
|
else:
|
|
803
|
-
version_casted = version_from_string(version)
|
|
763
|
+
version_casted = version_from_string(version, raise_error=True)
|
|
804
764
|
|
|
805
765
|
if version_casted not in available_versions:
|
|
806
766
|
raise ValueError(f"Selected version '{version}' not found in available versions: {available_versions}")
|
|
@@ -470,7 +470,7 @@ class Sample(BaseModel):
|
|
|
470
470
|
return image
|
|
471
471
|
|
|
472
472
|
def draw_annotations(self, image: Optional[np.ndarray] = None) -> np.ndarray:
|
|
473
|
-
from hafnia.
|
|
473
|
+
from hafnia.dataset import image_visualizations
|
|
474
474
|
|
|
475
475
|
if image is None:
|
|
476
476
|
image = self.read_image()
|
|
@@ -104,7 +104,8 @@ def calculate_primitive_counts(dataset: HafniaDataset) -> Dict[str, int]:
|
|
|
104
104
|
name = task.primitive.__name__
|
|
105
105
|
if task.name != task.primitive.default_task_name():
|
|
106
106
|
name = f"{name}.{task.name}"
|
|
107
|
-
|
|
107
|
+
n_objects = 0 if objects is None else len(objects)
|
|
108
|
+
annotation_counts[name] = n_objects
|
|
108
109
|
return annotation_counts
|
|
109
110
|
|
|
110
111
|
|
|
@@ -39,7 +39,7 @@ class Classification(Primitive):
|
|
|
39
39
|
def draw(self, image: np.ndarray, inplace: bool = False, draw_label: bool = True) -> np.ndarray:
|
|
40
40
|
if draw_label is False:
|
|
41
41
|
return image
|
|
42
|
-
from hafnia.
|
|
42
|
+
from hafnia.dataset import image_visualizations
|
|
43
43
|
|
|
44
44
|
class_name = self.get_class_name()
|
|
45
45
|
if self.task_name == self.default_task_name():
|
|
@@ -4,9 +4,9 @@ import cv2
|
|
|
4
4
|
import numpy as np
|
|
5
5
|
from pydantic import Field
|
|
6
6
|
|
|
7
|
+
from hafnia.dataset.colors import get_n_colors
|
|
7
8
|
from hafnia.dataset.primitives.primitive import Primitive
|
|
8
9
|
from hafnia.dataset.primitives.utils import get_class_name
|
|
9
|
-
from hafnia.visualizations.colors import get_n_colors
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class Segmentation(Primitive):
|
|
@@ -103,7 +103,7 @@ def auto_save_command_builder_schema(
|
|
|
103
103
|
bool_handling=bool_handling,
|
|
104
104
|
)
|
|
105
105
|
|
|
106
|
-
path_schema = path_schema or path_of_function(cli_function).with_suffix(".json")
|
|
106
|
+
path_schema = path_schema or path_of_function(cli_function).with_suffix(".schema.json")
|
|
107
107
|
launch_schema.to_json_file(path_schema)
|
|
108
108
|
return path_schema
|
|
109
109
|
|
|
@@ -7,17 +7,20 @@ from flatten_dict import flatten
|
|
|
7
7
|
from hafnia import http
|
|
8
8
|
from hafnia.log import user_logger
|
|
9
9
|
from hafnia.utils import pretty_print_list_as_table, timed
|
|
10
|
+
from hafnia_cli.config import Config
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
@timed("Get or create dataset recipe")
|
|
13
14
|
def get_or_create_dataset_recipe(
|
|
14
15
|
recipe: dict,
|
|
15
|
-
endpoint: str,
|
|
16
|
-
api_key: str,
|
|
17
16
|
name: Optional[str] = None,
|
|
18
17
|
overwrite: bool = False,
|
|
18
|
+
cfg: Optional[Config] = None,
|
|
19
19
|
) -> Optional[Dict]:
|
|
20
|
-
|
|
20
|
+
cfg = cfg or Config()
|
|
21
|
+
|
|
22
|
+
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
23
|
+
headers = {"Authorization": cfg.api_key}
|
|
21
24
|
data = {"template": {"body": recipe}, "overwrite": overwrite}
|
|
22
25
|
if name is not None:
|
|
23
26
|
data["name"] = name # type: ignore[assignment]
|
|
@@ -26,18 +29,22 @@ def get_or_create_dataset_recipe(
|
|
|
26
29
|
return response
|
|
27
30
|
|
|
28
31
|
|
|
29
|
-
def get_or_create_dataset_recipe_by_dataset_name(dataset_name: str,
|
|
30
|
-
return get_or_create_dataset_recipe(recipe=dataset_name,
|
|
32
|
+
def get_or_create_dataset_recipe_by_dataset_name(dataset_name: str, cfg: Optional[Config] = None) -> Dict:
|
|
33
|
+
return get_or_create_dataset_recipe(recipe=dataset_name, cfg=cfg)
|
|
31
34
|
|
|
32
35
|
|
|
33
|
-
def get_dataset_recipes(
|
|
34
|
-
|
|
36
|
+
def get_dataset_recipes(cfg: Optional[Config] = None) -> List[Dict]:
|
|
37
|
+
cfg = cfg or Config()
|
|
38
|
+
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
39
|
+
headers = {"Authorization": cfg.api_key}
|
|
35
40
|
dataset_recipes: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
|
|
36
41
|
return dataset_recipes
|
|
37
42
|
|
|
38
43
|
|
|
39
|
-
def get_dataset_recipe_by_id(dataset_recipe_id: str,
|
|
40
|
-
|
|
44
|
+
def get_dataset_recipe_by_id(dataset_recipe_id: str, cfg: Optional[Config] = None) -> Dict:
|
|
45
|
+
cfg = cfg or Config()
|
|
46
|
+
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
47
|
+
headers = {"Authorization": cfg.api_key}
|
|
41
48
|
full_url = f"{endpoint}/{dataset_recipe_id}"
|
|
42
49
|
dataset_recipe_info: Dict = http.fetch(full_url, headers=headers) # type: ignore[assignment]
|
|
43
50
|
if not dataset_recipe_info:
|
|
@@ -46,25 +53,30 @@ def get_dataset_recipe_by_id(dataset_recipe_id: str, endpoint: str, api_key: str
|
|
|
46
53
|
|
|
47
54
|
|
|
48
55
|
def get_or_create_dataset_recipe_from_path(
|
|
49
|
-
path_recipe_json: Path,
|
|
56
|
+
path_recipe_json: Path, name: Optional[str] = None, cfg: Optional[Config] = None
|
|
50
57
|
) -> Dict:
|
|
51
58
|
path_recipe_json = Path(path_recipe_json)
|
|
52
59
|
if not path_recipe_json.exists():
|
|
53
60
|
raise FileNotFoundError(f"Dataset recipe file '{path_recipe_json}' does not exist.")
|
|
54
61
|
json_dict = json.loads(path_recipe_json.read_text())
|
|
55
|
-
return get_or_create_dataset_recipe(json_dict,
|
|
62
|
+
return get_or_create_dataset_recipe(json_dict, name=name, cfg=cfg)
|
|
56
63
|
|
|
57
64
|
|
|
58
|
-
def delete_dataset_recipe_by_id(id: str,
|
|
59
|
-
|
|
65
|
+
def delete_dataset_recipe_by_id(id: str, cfg: Optional[Config] = None) -> Dict:
|
|
66
|
+
cfg = cfg or Config()
|
|
67
|
+
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
68
|
+
headers = {"Authorization": cfg.api_key}
|
|
60
69
|
full_url = f"{endpoint}/{id}"
|
|
61
70
|
response = http.delete(endpoint=full_url, headers=headers)
|
|
62
71
|
return response
|
|
63
72
|
|
|
64
73
|
|
|
65
74
|
@timed("Get dataset recipe")
|
|
66
|
-
def get_dataset_recipe_by_name(name: str,
|
|
67
|
-
|
|
75
|
+
def get_dataset_recipe_by_name(name: str, cfg: Optional[Config] = None) -> Optional[Dict]:
|
|
76
|
+
cfg = cfg or Config()
|
|
77
|
+
|
|
78
|
+
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
79
|
+
headers = {"Authorization": cfg.api_key}
|
|
68
80
|
full_url = f"{endpoint}?name__iexact={name}"
|
|
69
81
|
dataset_recipes: List[Dict] = http.fetch(full_url, headers=headers) # type: ignore[assignment]
|
|
70
82
|
if len(dataset_recipes) == 0:
|
|
@@ -77,11 +89,11 @@ def get_dataset_recipe_by_name(name: str, endpoint: str, api_key: str) -> Option
|
|
|
77
89
|
return dataset_recipe
|
|
78
90
|
|
|
79
91
|
|
|
80
|
-
def delete_dataset_recipe_by_name(name: str,
|
|
81
|
-
recipe_response = get_dataset_recipe_by_name(name,
|
|
92
|
+
def delete_dataset_recipe_by_name(name: str, cfg: Optional[Config] = None) -> Optional[Dict]:
|
|
93
|
+
recipe_response = get_dataset_recipe_by_name(name, cfg=cfg)
|
|
82
94
|
|
|
83
95
|
if recipe_response:
|
|
84
|
-
return delete_dataset_recipe_by_id(recipe_response["id"],
|
|
96
|
+
return delete_dataset_recipe_by_id(recipe_response["id"], cfg=cfg)
|
|
85
97
|
return recipe_response
|
|
86
98
|
|
|
87
99
|
|
hafnia/platform/datasets.py
CHANGED
|
@@ -83,8 +83,11 @@ def get_datasets(cfg: Optional[Config] = None) -> List[Dict[str, str]]:
|
|
|
83
83
|
|
|
84
84
|
|
|
85
85
|
@timed("Fetching dataset info.")
|
|
86
|
-
def get_dataset_id(dataset_name: str,
|
|
87
|
-
|
|
86
|
+
def get_dataset_id(dataset_name: str, cfg: Optional[Config] = None) -> str:
|
|
87
|
+
"""Get dataset ID by name from the Hafnia platform."""
|
|
88
|
+
cfg = cfg or Config()
|
|
89
|
+
endpoint = cfg.get_platform_endpoint("datasets")
|
|
90
|
+
headers = {"Authorization": cfg.api_key}
|
|
88
91
|
full_url = f"{endpoint}?name__iexact={dataset_name}"
|
|
89
92
|
dataset_responses: List[Dict] = http.fetch(full_url, headers=headers) # type: ignore[assignment]
|
|
90
93
|
if not dataset_responses:
|
|
@@ -186,9 +189,10 @@ def delete_dataset_completely_by_name(
|
|
|
186
189
|
|
|
187
190
|
|
|
188
191
|
@timed("Import dataset details to platform")
|
|
189
|
-
def upload_dataset_details(
|
|
192
|
+
def upload_dataset_details(data: dict, dataset_name: str, cfg: Optional[Config] = None) -> dict:
|
|
193
|
+
cfg = cfg or Config()
|
|
190
194
|
dataset_endpoint = cfg.get_platform_endpoint("datasets")
|
|
191
|
-
dataset_id = get_dataset_id(dataset_name,
|
|
195
|
+
dataset_id = get_dataset_id(dataset_name, cfg=cfg)
|
|
192
196
|
|
|
193
197
|
import_endpoint = f"{dataset_endpoint}/{dataset_id}/import"
|
|
194
198
|
headers = {"Authorization": cfg.api_key}
|
hafnia/platform/experiment.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
from typing import Dict, List
|
|
1
|
+
from typing import Dict, List, Optional
|
|
2
2
|
|
|
3
3
|
from hafnia import http
|
|
4
4
|
from hafnia.utils import pretty_print_list_as_table, timed
|
|
5
|
+
from hafnia_cli.config import Config
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
@timed("Creating experiment.")
|
|
@@ -11,10 +12,11 @@ def create_experiment(
|
|
|
11
12
|
trainer_id: str,
|
|
12
13
|
exec_cmd: str,
|
|
13
14
|
environment_id: str,
|
|
14
|
-
|
|
15
|
-
api_key: str,
|
|
15
|
+
cfg: Optional[Config] = None,
|
|
16
16
|
) -> Dict:
|
|
17
|
-
|
|
17
|
+
cfg = cfg or Config()
|
|
18
|
+
endpoint = cfg.get_platform_endpoint("experiments")
|
|
19
|
+
headers = {"Authorization": cfg.api_key}
|
|
18
20
|
response = http.post(
|
|
19
21
|
endpoint,
|
|
20
22
|
headers=headers,
|
|
@@ -30,8 +32,10 @@ def create_experiment(
|
|
|
30
32
|
|
|
31
33
|
|
|
32
34
|
@timed("Fetching environment info.")
|
|
33
|
-
def get_environments(
|
|
34
|
-
|
|
35
|
+
def get_environments(cfg: Optional[Config] = None) -> List[Dict]:
|
|
36
|
+
cfg = cfg or Config()
|
|
37
|
+
endpoint = cfg.get_platform_endpoint("experiment_environments")
|
|
38
|
+
headers = {"Authorization": cfg.api_key}
|
|
35
39
|
envs: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
|
|
36
40
|
return envs
|
|
37
41
|
|
|
@@ -54,8 +58,8 @@ def pretty_print_training_environments(envs: List[Dict]) -> None:
|
|
|
54
58
|
)
|
|
55
59
|
|
|
56
60
|
|
|
57
|
-
def get_exp_environment_id(name: str,
|
|
58
|
-
envs = get_environments(
|
|
61
|
+
def get_exp_environment_id(name: str, cfg: Optional[Config] = None) -> str:
|
|
62
|
+
envs = get_environments(cfg=cfg)
|
|
59
63
|
|
|
60
64
|
for env in envs:
|
|
61
65
|
if env["name"] == name:
|
|
@@ -1,39 +1,85 @@
|
|
|
1
|
+
import json
|
|
1
2
|
from pathlib import Path
|
|
2
3
|
from typing import Dict, List, Optional
|
|
3
4
|
|
|
4
5
|
from hafnia import http
|
|
5
6
|
from hafnia.log import user_logger
|
|
6
|
-
from hafnia.utils import
|
|
7
|
+
from hafnia.utils import (
|
|
8
|
+
archive_dir,
|
|
9
|
+
get_trainer_package_path,
|
|
10
|
+
pretty_print_list_as_table,
|
|
11
|
+
timed,
|
|
12
|
+
)
|
|
13
|
+
from hafnia_cli.config import Config
|
|
7
14
|
|
|
8
15
|
|
|
9
16
|
@timed("Uploading trainer package.")
|
|
10
|
-
def create_trainer_package(
|
|
11
|
-
source_dir
|
|
17
|
+
def create_trainer_package(
|
|
18
|
+
source_dir: Path,
|
|
19
|
+
name: Optional[str] = None,
|
|
20
|
+
description: Optional[str] = None,
|
|
21
|
+
cmd: Optional[str] = None,
|
|
22
|
+
cfg: Optional[Config] = None,
|
|
23
|
+
) -> Dict:
|
|
24
|
+
# Ensure the path is absolute to handle '.' paths are given an appropriate name.
|
|
25
|
+
source_dir = Path(source_dir).resolve()
|
|
26
|
+
cfg = cfg or Config()
|
|
27
|
+
endpoint = cfg.get_platform_endpoint("trainers")
|
|
28
|
+
|
|
12
29
|
path_trainer = get_trainer_package_path(trainer_name=source_dir.name)
|
|
13
|
-
|
|
30
|
+
name = name or path_trainer.stem
|
|
31
|
+
zip_path, package_files = archive_dir(source_dir, output_path=path_trainer)
|
|
14
32
|
user_logger.info(f"Trainer package created and stored in '{path_trainer}'")
|
|
15
33
|
|
|
16
|
-
|
|
34
|
+
cmd_builder_schemas = auto_discover_cmd_builder_schemas(package_files)
|
|
35
|
+
cmd = cmd or "python scripts/train.py"
|
|
36
|
+
description = description or f"Trainer package for '{name}'. Created with Hafnia SDK Cli."
|
|
37
|
+
headers = {"Authorization": cfg.api_key, "accept": "application/json"}
|
|
17
38
|
data = {
|
|
18
|
-
"name":
|
|
19
|
-
"description":
|
|
39
|
+
"name": name,
|
|
40
|
+
"description": description,
|
|
41
|
+
"default_command": cmd,
|
|
20
42
|
"file": (zip_path.name, Path(zip_path).read_bytes()),
|
|
21
43
|
}
|
|
44
|
+
if len(cmd_builder_schemas) == 0:
|
|
45
|
+
data["command_builder_schemas"] = json.dumps(cmd_builder_schemas)
|
|
46
|
+
user_logger.info(f"Uploading trainer package '{name}' to platform...")
|
|
22
47
|
response = http.post(endpoint, headers=headers, data=data, multipart=True)
|
|
23
|
-
|
|
48
|
+
user_logger.info(f"Trainer package uploaded successfully with id '{response['id']}'")
|
|
49
|
+
return response
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def auto_discover_cmd_builder_schemas(package_files: List[Path]) -> List[Dict]:
|
|
53
|
+
"""
|
|
54
|
+
Auto-discover command builder schema files in the trainer package files.
|
|
55
|
+
Looks for files ending with '.schema.json' and loads their content as JSON.
|
|
56
|
+
"""
|
|
57
|
+
cmd_builder_schema_files = [file for file in package_files if file.name.endswith(".schema.json")]
|
|
58
|
+
cmd_builder_schemas = []
|
|
59
|
+
for cmd_builder_schema_file in cmd_builder_schema_files:
|
|
60
|
+
cmd_builder_schema = json.loads(cmd_builder_schema_file.read_text())
|
|
61
|
+
cmd_entrypoint = cmd_builder_schema.get("cmd", None)
|
|
62
|
+
user_logger.info(f"Found command builder schema file for entry point '{cmd_entrypoint}'")
|
|
63
|
+
cmd_builder_schemas.append(cmd_builder_schema)
|
|
64
|
+
return cmd_builder_schemas
|
|
24
65
|
|
|
25
66
|
|
|
26
67
|
@timed("Get trainer package.")
|
|
27
|
-
def get_trainer_package_by_id(id: str,
|
|
68
|
+
def get_trainer_package_by_id(id: str, cfg: Optional[Config] = None) -> Dict:
|
|
69
|
+
cfg = cfg or Config()
|
|
70
|
+
endpoint = cfg.get_platform_endpoint("trainers")
|
|
28
71
|
full_url = f"{endpoint}/{id}"
|
|
29
|
-
headers = {"Authorization": api_key}
|
|
72
|
+
headers = {"Authorization": cfg.api_key}
|
|
30
73
|
response: Dict = http.fetch(full_url, headers=headers) # type: ignore[assignment]
|
|
74
|
+
|
|
31
75
|
return response
|
|
32
76
|
|
|
33
77
|
|
|
34
78
|
@timed("Get trainer packages")
|
|
35
|
-
def get_trainer_packages(
|
|
36
|
-
|
|
79
|
+
def get_trainer_packages(cfg: Optional[Config] = None) -> List[Dict]:
|
|
80
|
+
cfg = cfg or Config()
|
|
81
|
+
endpoint = cfg.get_platform_endpoint("trainers")
|
|
82
|
+
headers = {"Authorization": cfg.api_key}
|
|
37
83
|
trainers: List[Dict] = http.fetch(endpoint, headers=headers) # type: ignore[assignment]
|
|
38
84
|
return trainers
|
|
39
85
|
|
hafnia/utils.py
CHANGED
|
@@ -6,7 +6,7 @@ from collections.abc import Sized
|
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from functools import wraps
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional
|
|
9
|
+
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple
|
|
10
10
|
from zipfile import ZipFile
|
|
11
11
|
|
|
12
12
|
import more_itertools
|
|
@@ -113,20 +113,22 @@ def archive_dir(
|
|
|
113
113
|
recipe_path: Path,
|
|
114
114
|
output_path: Optional[Path] = None,
|
|
115
115
|
path_ignore_file: Optional[Path] = None,
|
|
116
|
-
) -> Path:
|
|
116
|
+
) -> Tuple[Path, List[Path]]:
|
|
117
117
|
recipe_zip_path = output_path or recipe_path / "trainer.zip"
|
|
118
118
|
assert recipe_zip_path.suffix == ".zip", "Output path must be a zip file"
|
|
119
119
|
recipe_zip_path.parent.mkdir(parents=True, exist_ok=True)
|
|
120
120
|
|
|
121
121
|
user_logger.info(f" Creating zip archive of '{recipe_path}'")
|
|
122
|
-
|
|
122
|
+
include_files_generator = filter_trainer_package_files(recipe_path, path_ignore_file)
|
|
123
|
+
included_files = []
|
|
123
124
|
with ZipFile(recipe_zip_path, "w", compression=zipfile.ZIP_STORED, allowZip64=True) as zip_ref:
|
|
124
|
-
for str_filepath in
|
|
125
|
+
for str_filepath in include_files_generator:
|
|
125
126
|
full_path = recipe_path / str_filepath
|
|
126
127
|
zip_ref.write(full_path, str_filepath)
|
|
128
|
+
included_files.append(full_path)
|
|
127
129
|
show_trainer_package_content(recipe_zip_path)
|
|
128
130
|
|
|
129
|
-
return recipe_zip_path
|
|
131
|
+
return recipe_zip_path, included_files
|
|
130
132
|
|
|
131
133
|
|
|
132
134
|
def size_human_readable(size_bytes: int, suffix="B") -> str:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: hafnia
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.4
|
|
4
4
|
Summary: Python SDK for communication with Hafnia platform.
|
|
5
5
|
Author-email: Milestone Systems <hafniaplatform@milestone.dk>
|
|
6
6
|
License-File: LICENSE
|
|
@@ -64,9 +64,16 @@ multiple GPUs and instances if needed.
|
|
|
64
64
|
## Getting started: Configuration
|
|
65
65
|
To get started with Hafnia:
|
|
66
66
|
|
|
67
|
-
1. Install `hafnia` with your favorite python package manager
|
|
67
|
+
1. Install `hafnia` with your favorite python package manager:
|
|
68
|
+
|
|
69
|
+
```bash
|
|
70
|
+
# With uv package manager
|
|
71
|
+
uv add hafnia
|
|
72
|
+
|
|
73
|
+
# With pip
|
|
74
|
+
pip install hafnia
|
|
75
|
+
```
|
|
68
76
|
|
|
69
|
-
`pip install hafnia`
|
|
70
77
|
1. Sign in to the [Hafnia Platform](https://hafnia.milestonesys.com/).
|
|
71
78
|
1. Create an API KEY for Training aaS. For more instructions, follow this
|
|
72
79
|
[guide](https://hafnia.readme.io/docs/create-an-api-key).
|
|
@@ -94,11 +101,9 @@ With Hafnia configured on your local machine, it is now possible to download
|
|
|
94
101
|
and explore the dataset sample with a python script:
|
|
95
102
|
|
|
96
103
|
```python
|
|
97
|
-
from hafnia.data import get_dataset_path
|
|
98
104
|
from hafnia.dataset.hafnia_dataset import HafniaDataset
|
|
99
105
|
|
|
100
|
-
|
|
101
|
-
path_dataset = get_dataset_path("midwest-vehicle-detection")
|
|
106
|
+
dataset = HafniaDataset.from_name("midwest-vehicle-detection")
|
|
102
107
|
```
|
|
103
108
|
|
|
104
109
|
This will download the dataset sample `midwest-vehicle-detection` to the local `.data/datasets/` folder
|
|
@@ -124,11 +129,10 @@ midwest-vehicle-detection
|
|
|
124
129
|
3 directories, 217 files
|
|
125
130
|
```
|
|
126
131
|
|
|
127
|
-
|
|
128
|
-
for loading/saving, managing and interacting with the dataset.
|
|
132
|
+
We provide the `HafniaDataset` format for loading/saving, managing and interacting with the dataset.
|
|
129
133
|
|
|
130
134
|
We recommend the example script [examples/example_hafnia_dataset.py](examples/example_hafnia_dataset.py)
|
|
131
|
-
for a
|
|
135
|
+
for a quick introduction on the `HafniaDataset`.
|
|
132
136
|
|
|
133
137
|
Below is a short introduction to the `HafniaDataset` class.
|
|
134
138
|
|
|
@@ -136,7 +140,7 @@ Below is a short introduction to the `HafniaDataset` class.
|
|
|
136
140
|
from hafnia.dataset.hafnia_dataset import HafniaDataset, Sample
|
|
137
141
|
|
|
138
142
|
# Load dataset from path
|
|
139
|
-
dataset = HafniaDataset.
|
|
143
|
+
dataset = HafniaDataset.from_path(path_dataset)
|
|
140
144
|
|
|
141
145
|
# Or get dataset directly by name
|
|
142
146
|
dataset = HafniaDataset.from_name("midwest-vehicle-detection")
|
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
hafnia/__init__.py,sha256=0qpjWfVbcfKzLSnfUW6RdclSGkesMQRFS-n_aTJJoSE,179
|
|
2
2
|
hafnia/http.py,sha256=PkEuanlUKeERABXttaGAJT6hOZ1_B2CwJodbUV4uZdg,3710
|
|
3
3
|
hafnia/log.py,sha256=sWF8tz78yBtwZ9ddzm19L1MBSBJ3L4G704IGeT1_OEU,784
|
|
4
|
-
hafnia/
|
|
5
|
-
hafnia/
|
|
6
|
-
hafnia/
|
|
7
|
-
hafnia/data/factory.py,sha256=kHkvOtBUbwaShZBGf1kZzocDJBn_1dHHLrQxnUpJmfY,778
|
|
8
|
-
hafnia/dataset/dataset_details_uploader.py,sha256=H_zz67bBwbgo4StUwBNmH89WlqydIc-tEQbrRnZDwgg,24161
|
|
4
|
+
hafnia/utils.py,sha256=YqlrRGmaVuYilT0YaZNNlZDGsBLfUXnjGy1vxsIUFHc,8834
|
|
5
|
+
hafnia/dataset/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
|
|
6
|
+
hafnia/dataset/dataset_details_uploader.py,sha256=TfwOVkuLqmWQXAYZ8FomvJC9IAnomG-4VZTPxf6sgWc,24161
|
|
9
7
|
hafnia/dataset/dataset_helpers.py,sha256=N8W_ioDlxP2VvNJXzqXLDbcEqgPKz0WyPNOBakHoBUc,6443
|
|
10
8
|
hafnia/dataset/dataset_names.py,sha256=42_UKrDwcKEW48oTbtBaeyi5qVFVaMAj8vRvDv-mcEI,3616
|
|
11
|
-
hafnia/dataset/hafnia_dataset.py,sha256=
|
|
12
|
-
hafnia/dataset/hafnia_dataset_types.py,sha256=
|
|
9
|
+
hafnia/dataset/hafnia_dataset.py,sha256=7zxCgqWwpVpZEGdnwmEqEYYFNekx_vd05zEj7bkitLE,37154
|
|
10
|
+
hafnia/dataset/hafnia_dataset_types.py,sha256=38sCW_ISlWuG0kdnp_MZdL4OVFSEt2ULVfoTpgDW3lk,26841
|
|
11
|
+
hafnia/dataset/image_visualizations.py,sha256=rB7c-KK-qq0BsSdkaFxCAHOOCTXTUQx0VMEhib7ig0k,7509
|
|
13
12
|
hafnia/dataset/license_types.py,sha256=b1Jt5e8N89sujIs4T9y39sJEkzpAwCoLDTHDTpkiEOI,2166
|
|
14
|
-
hafnia/dataset/
|
|
13
|
+
hafnia/dataset/torch_helpers.py,sha256=Qj8pV5P8tGw6F3W2Rj9Kel7O8hWuUqiKfTdXd3h5UOo,14080
|
|
14
|
+
hafnia/dataset/dataset_recipe/dataset_recipe.py,sha256=YHnSG4lDYLjRsnrybOrNNGASoMhOaLo3PaxiifIwHQ4,18484
|
|
15
15
|
hafnia/dataset/dataset_recipe/recipe_transforms.py,sha256=j3Oiytt3LI2rCaJid7Y44oT9MXvlZVqvZanngMebIWg,3088
|
|
16
16
|
hafnia/dataset/dataset_recipe/recipe_types.py,sha256=AcrG6gpRt3Igl-CCJ60uyh-WkfI1NCnQ55M8yClSI9Q,5328
|
|
17
17
|
hafnia/dataset/format_conversions/format_coco.py,sha256=7GjeF016ZBaKxu-VYiqXxuPw8HuuODV1cxc2TbDDZBw,19628
|
|
@@ -20,44 +20,42 @@ hafnia/dataset/format_conversions/format_image_classification_folder.py,sha256=A
|
|
|
20
20
|
hafnia/dataset/format_conversions/format_yolo.py,sha256=zvCHo2L_0mPJScMbDtwvZUts9UX2ERKhhYbY31Q6tQA,9912
|
|
21
21
|
hafnia/dataset/format_conversions/torchvision_datasets.py,sha256=sC8DgAt10PEaCHFk_Lm-dIzr_0EF-2g24kG9EINYk7c,12096
|
|
22
22
|
hafnia/dataset/operations/dataset_s3_storage.py,sha256=xPC77Og47xTpI0JBFAR1pgb5u7l18byAA6p7IlpnpGE,8971
|
|
23
|
-
hafnia/dataset/operations/dataset_stats.py,sha256=
|
|
23
|
+
hafnia/dataset/operations/dataset_stats.py,sha256=uzQJWOoAM7YDLLeUhPVBpE1vFM38AOriDlPxKs2hj5M,11986
|
|
24
24
|
hafnia/dataset/operations/dataset_transformations.py,sha256=qUNno0rAT1A452uzlR-k1WbatyY9VuMp1QJjkMg9GzE,19495
|
|
25
25
|
hafnia/dataset/operations/table_transformations.py,sha256=mdjUE1lSQ7QyONjQapSHDg1MkYuKaflcoVUq1Y6Lkqc,13606
|
|
26
26
|
hafnia/dataset/primitives/__init__.py,sha256=xFLJ3R7gpbuQnNJuFhuu836L3nicwoaY5aHkqk7Bbr8,927
|
|
27
27
|
hafnia/dataset/primitives/bbox.py,sha256=QJJBebltOd9J3idisp3QdX0gCgz6P5xlIlGbth19fG0,6669
|
|
28
28
|
hafnia/dataset/primitives/bitmask.py,sha256=Q7RiNYvMDlcFPkXAWXDJkCIERjnUTCrHu6VeEPX1jEA,7212
|
|
29
|
-
hafnia/dataset/primitives/classification.py,sha256=
|
|
29
|
+
hafnia/dataset/primitives/classification.py,sha256=YAMwO_gSOfDiXLUrEq-ObzvChK478rwGTP-RBhWt1LE,2662
|
|
30
30
|
hafnia/dataset/primitives/point.py,sha256=VzCNLTQOPA6wyJVVKddZHGhltkep6V_B7pg5pk7rd9Y,879
|
|
31
31
|
hafnia/dataset/primitives/polygon.py,sha256=jZPNVwEs4A3IMJQzI_dlcDDfgju7hdoVc677tMAdEbQ,6271
|
|
32
32
|
hafnia/dataset/primitives/primitive.py,sha256=Wvby0sCGgYj8ec39PLcHsmip5VKL96ZSCz2cGIBjPqM,1289
|
|
33
|
-
hafnia/dataset/primitives/segmentation.py,sha256=
|
|
33
|
+
hafnia/dataset/primitives/segmentation.py,sha256=ACexXYavoFsqviCRA76MDZUvEoBZLO_OTDCl2Px_rV4,2010
|
|
34
34
|
hafnia/dataset/primitives/utils.py,sha256=3gT1as-xXEj8CamoIuBb9gQwUN9Ae9qnqtqF_uEe0zo,1993
|
|
35
35
|
hafnia/experiment/__init__.py,sha256=OEFE6HqhO5zcTCLZcPcPVjIg7wMFFnvZ1uOtAVhRz7M,85
|
|
36
|
-
hafnia/experiment/command_builder.py,sha256=
|
|
36
|
+
hafnia/experiment/command_builder.py,sha256=F1szeVKD68_W2oHbp-pjkjAo0bbjw9TymQwLjF7QVhE,27587
|
|
37
37
|
hafnia/experiment/hafnia_logger.py,sha256=BHIOLAds_3JxT0cev_ikUH0XQVIxBJTkcBSx2Q_SIk0,10894
|
|
38
38
|
hafnia/platform/__init__.py,sha256=L_Q7CNpsJ0HMNPy_rLlLK5RhmuCU7IF4BchxKv6amYc,782
|
|
39
39
|
hafnia/platform/builder.py,sha256=kUEuj5-qtL1uk5v2tUvOCREn5yV-G4Fr6F31haIAb5E,5808
|
|
40
|
-
hafnia/platform/dataset_recipe.py,sha256=
|
|
41
|
-
hafnia/platform/datasets.py,sha256=
|
|
40
|
+
hafnia/platform/dataset_recipe.py,sha256=UNvsDEbByT_WPuslILLGFsqXb87g65K5xz-Q2ZzvcKs,4242
|
|
41
|
+
hafnia/platform/datasets.py,sha256=z7bQz1SIR-vVVjRJD1FwEPw2X5QPyE_J1Ea1M6XPXwc,9612
|
|
42
42
|
hafnia/platform/download.py,sha256=e73Pm0afwRPTHxBvRy0gUZSFfDuePHPnfasyhaZ-KGQ,5019
|
|
43
|
-
hafnia/platform/experiment.py,sha256=
|
|
43
|
+
hafnia/platform/experiment.py,sha256=qNg9CKBLIYnOb-bMaEDecv-PptP4_ubQJunXGwdSiaQ,2049
|
|
44
44
|
hafnia/platform/s5cmd_utils.py,sha256=hHsGPJ1S9_hFIVfCO-efvTF4qbLYreK1nl3VC5caU1w,9491
|
|
45
|
-
hafnia/platform/trainer_package.py,sha256=
|
|
46
|
-
hafnia/visualizations/colors.py,sha256=003eAJVnBal4abaYIIpsrT7erIOIjTUHHYVJ1Tj1CDc,5226
|
|
47
|
-
hafnia/visualizations/image_visualizations.py,sha256=rB7c-KK-qq0BsSdkaFxCAHOOCTXTUQx0VMEhib7ig0k,7509
|
|
45
|
+
hafnia/platform/trainer_package.py,sha256=MTmiPm02uy5TIUDRjgCSETL-Q_esxha0NWtHO1h53dw,3949
|
|
48
46
|
hafnia_cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
47
|
hafnia_cli/__main__.py,sha256=CqD_7RfbfwB6MED3WZ8WxclrFxWcRMtZ5A1Snnst3GM,1761
|
|
50
48
|
hafnia_cli/config.py,sha256=X0dJBYH-7mNAFkHgeZvDbawlQqoyCwoY4C-QhlyYCA0,7695
|
|
51
49
|
hafnia_cli/consts.py,sha256=uCpYX44NCu_Zvte0QwChunxOo-qqhcaJRSYDAIsoJ8A,972
|
|
52
50
|
hafnia_cli/dataset_cmds.py,sha256=JfSj7Cei1T2oYUXP1bpz63uQopgL3R_dMMYnPGGcuU8,2072
|
|
53
|
-
hafnia_cli/dataset_recipe_cmds.py,sha256=
|
|
54
|
-
hafnia_cli/experiment_cmds.py,sha256=
|
|
51
|
+
hafnia_cli/dataset_recipe_cmds.py,sha256=cprz0RMxuPK8hLLeu3V5MojtKZlSSNb3THBo2pZzdiM,2589
|
|
52
|
+
hafnia_cli/experiment_cmds.py,sha256=ZKSy2W-ke33MhXF5BKWdkQO0o31tqIb4Ld-l_cfc5Lw,7453
|
|
55
53
|
hafnia_cli/keychain.py,sha256=bNyjjULVQu7kV338wUC65UvbCwmSGOmEjKWPLIQjT0k,2555
|
|
56
|
-
hafnia_cli/profile_cmds.py,sha256=
|
|
54
|
+
hafnia_cli/profile_cmds.py,sha256=QVTK_hLskuiod9Nmgqld61-McWQEyCMWAgvDlMtB7oE,3709
|
|
57
55
|
hafnia_cli/runc_cmds.py,sha256=7P5TjF6KA9K4OKPG1qC_0gteXfLJbXlA858WWrosoGQ,5098
|
|
58
|
-
hafnia_cli/trainer_package_cmds.py,sha256=
|
|
59
|
-
hafnia-0.5.
|
|
60
|
-
hafnia-0.5.
|
|
61
|
-
hafnia-0.5.
|
|
62
|
-
hafnia-0.5.
|
|
63
|
-
hafnia-0.5.
|
|
56
|
+
hafnia_cli/trainer_package_cmds.py,sha256=RMSiinwzVlK-kDFPuwZ98EoSpw61aaXa6IyRj3UEVlw,3307
|
|
57
|
+
hafnia-0.5.4.dist-info/METADATA,sha256=MIdQYVCwEQFQr6GLFtJHpNCwnaAhppQeeB1QrL3PlIE,19258
|
|
58
|
+
hafnia-0.5.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
59
|
+
hafnia-0.5.4.dist-info/entry_points.txt,sha256=j2jsj1pqajLAiSOnF7sq66A3d1SVeHPKVTVyIFzipSA,52
|
|
60
|
+
hafnia-0.5.4.dist-info/licenses/LICENSE,sha256=wLZw1B7_mod_CO1H8LXqQgfqlWD6QceJR8--LJYRZGE,1078
|
|
61
|
+
hafnia-0.5.4.dist-info/RECORD,,
|
|
@@ -28,8 +28,7 @@ def cmd_get_or_create_dataset_recipe(cfg: Config, path_json_recipe: Path, name:
|
|
|
28
28
|
"""Create Hafnia dataset recipe from dataset recipe JSON file"""
|
|
29
29
|
from hafnia.platform.dataset_recipe import get_or_create_dataset_recipe_from_path
|
|
30
30
|
|
|
31
|
-
|
|
32
|
-
recipe = get_or_create_dataset_recipe_from_path(path_json_recipe, endpoint=endpoint, api_key=cfg.api_key, name=name)
|
|
31
|
+
recipe = get_or_create_dataset_recipe_from_path(path_json_recipe, name=name, cfg=cfg)
|
|
33
32
|
|
|
34
33
|
if recipe is None:
|
|
35
34
|
raise click.ClickException("Failed to create dataset recipe.")
|
|
@@ -44,8 +43,7 @@ def cmd_list_dataset_recipes(cfg: Config, limit: Optional[int]) -> None:
|
|
|
44
43
|
"""List available dataset recipes"""
|
|
45
44
|
from hafnia.platform.dataset_recipe import get_dataset_recipes, pretty_print_dataset_recipes
|
|
46
45
|
|
|
47
|
-
|
|
48
|
-
recipes = get_dataset_recipes(endpoint=endpoint, api_key=cfg.api_key)
|
|
46
|
+
recipes = get_dataset_recipes(cfg=cfg)
|
|
49
47
|
# Sort recipes to have the most recent first
|
|
50
48
|
recipes = sorted(recipes, key=lambda x: x["created_at"], reverse=True)
|
|
51
49
|
if limit is not None:
|
|
@@ -61,12 +59,10 @@ def cmd_delete_dataset_recipe(cfg: Config, id: Optional[str], name: Optional[str
|
|
|
61
59
|
"""Delete a dataset recipe by ID or name"""
|
|
62
60
|
from hafnia.platform.dataset_recipe import delete_dataset_recipe_by_id, delete_dataset_recipe_by_name
|
|
63
61
|
|
|
64
|
-
endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
65
|
-
|
|
66
62
|
if id is not None:
|
|
67
|
-
return delete_dataset_recipe_by_id(id=id,
|
|
63
|
+
return delete_dataset_recipe_by_id(id=id, cfg=cfg)
|
|
68
64
|
if name is not None:
|
|
69
|
-
dataset_recipe = delete_dataset_recipe_by_name(name=name,
|
|
65
|
+
dataset_recipe = delete_dataset_recipe_by_name(name=name, cfg=cfg)
|
|
70
66
|
if dataset_recipe is None:
|
|
71
67
|
raise click.ClickException(f"Dataset recipe with name '{name}' was not found.")
|
|
72
68
|
|
hafnia_cli/experiment_cmds.py
CHANGED
|
@@ -27,7 +27,7 @@ def cmd_view_environments(cfg: Config):
|
|
|
27
27
|
"""
|
|
28
28
|
from hafnia.platform import get_environments, pretty_print_training_environments
|
|
29
29
|
|
|
30
|
-
envs = get_environments(cfg
|
|
30
|
+
envs = get_environments(cfg=cfg)
|
|
31
31
|
|
|
32
32
|
pretty_print_training_environments(envs)
|
|
33
33
|
|
|
@@ -132,7 +132,7 @@ def cmd_create_experiment(
|
|
|
132
132
|
"""
|
|
133
133
|
from hafnia.platform import create_experiment, get_exp_environment_id
|
|
134
134
|
|
|
135
|
-
dataset_recipe_response =
|
|
135
|
+
dataset_recipe_response = get_dataset_recipe_by_identifiers(
|
|
136
136
|
cfg=cfg,
|
|
137
137
|
dataset_name=dataset,
|
|
138
138
|
dataset_recipe_name=dataset_recipe,
|
|
@@ -140,13 +140,8 @@ def cmd_create_experiment(
|
|
|
140
140
|
)
|
|
141
141
|
dataset_recipe_id = dataset_recipe_response["id"]
|
|
142
142
|
|
|
143
|
-
trainer_id =
|
|
144
|
-
|
|
145
|
-
trainer_path=trainer_path,
|
|
146
|
-
trainer_id=trainer_id,
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
env_id = get_exp_environment_id(environment, cfg.get_platform_endpoint("experiment_environments"), cfg.api_key)
|
|
143
|
+
trainer_id = get_trainer_package_by_identifiers(cfg=cfg, trainer_path=trainer_path, trainer_id=trainer_id)
|
|
144
|
+
env_id = get_exp_environment_id(environment, cfg=cfg)
|
|
150
145
|
|
|
151
146
|
experiment = create_experiment(
|
|
152
147
|
experiment_name=name,
|
|
@@ -154,8 +149,7 @@ def cmd_create_experiment(
|
|
|
154
149
|
trainer_id=trainer_id,
|
|
155
150
|
exec_cmd=cmd,
|
|
156
151
|
environment_id=env_id,
|
|
157
|
-
|
|
158
|
-
api_key=cfg.api_key,
|
|
152
|
+
cfg=cfg,
|
|
159
153
|
)
|
|
160
154
|
|
|
161
155
|
experiment_properties = {
|
|
@@ -172,7 +166,7 @@ def cmd_create_experiment(
|
|
|
172
166
|
print(f" {key}: {value}")
|
|
173
167
|
|
|
174
168
|
|
|
175
|
-
def
|
|
169
|
+
def get_dataset_recipe_by_identifiers(
|
|
176
170
|
cfg: Config,
|
|
177
171
|
dataset_name: Optional[str],
|
|
178
172
|
dataset_recipe_name: Optional[str],
|
|
@@ -186,18 +180,17 @@ def get_dataset_recipe_by_dataset_identifies(
|
|
|
186
180
|
"Multiple dataset identifiers have been provided. Define only one dataset identifier."
|
|
187
181
|
)
|
|
188
182
|
|
|
189
|
-
dataset_recipe_endpoint = cfg.get_platform_endpoint("dataset_recipes")
|
|
190
183
|
if dataset_name:
|
|
191
|
-
return get_or_create_dataset_recipe_by_dataset_name(dataset_name,
|
|
184
|
+
return get_or_create_dataset_recipe_by_dataset_name(dataset_name, cfg=cfg)
|
|
192
185
|
|
|
193
186
|
if dataset_recipe_name:
|
|
194
|
-
recipe = get_dataset_recipe_by_name(dataset_recipe_name,
|
|
187
|
+
recipe = get_dataset_recipe_by_name(dataset_recipe_name, cfg=cfg)
|
|
195
188
|
if recipe is None:
|
|
196
189
|
raise click.ClickException(f"Dataset recipe '{dataset_recipe_name}' was not found in the dataset library.")
|
|
197
190
|
return recipe
|
|
198
191
|
|
|
199
192
|
if dataset_recipe_id:
|
|
200
|
-
return get_dataset_recipe_by_id(dataset_recipe_id,
|
|
193
|
+
return get_dataset_recipe_by_id(dataset_recipe_id, cfg=cfg)
|
|
201
194
|
|
|
202
195
|
raise click.MissingParameter(
|
|
203
196
|
"At least one dataset identifier must be provided. Set one of the following:\n"
|
|
@@ -207,7 +200,7 @@ def get_dataset_recipe_by_dataset_identifies(
|
|
|
207
200
|
)
|
|
208
201
|
|
|
209
202
|
|
|
210
|
-
def
|
|
203
|
+
def get_trainer_package_by_identifiers(
|
|
211
204
|
cfg: Config,
|
|
212
205
|
trainer_path: Optional[Path],
|
|
213
206
|
trainer_id: Optional[str],
|
|
@@ -223,17 +216,14 @@ def get_trainer_package_by_identifies(
|
|
|
223
216
|
trainer_path = Path(trainer_path)
|
|
224
217
|
if not trainer_path.exists():
|
|
225
218
|
raise click.ClickException(f"Trainer package path '{trainer_path}' does not exist.")
|
|
226
|
-
|
|
227
|
-
trainer_path,
|
|
228
|
-
cfg
|
|
229
|
-
cfg.api_key,
|
|
219
|
+
response = create_trainer_package(
|
|
220
|
+
source_dir=trainer_path,
|
|
221
|
+
cfg=cfg,
|
|
230
222
|
)
|
|
231
|
-
return
|
|
223
|
+
return response["id"]
|
|
232
224
|
|
|
233
225
|
if trainer_id:
|
|
234
|
-
trainer_response = get_trainer_package_by_id(
|
|
235
|
-
id=trainer_id, endpoint=cfg.get_platform_endpoint("trainers"), api_key=cfg.api_key
|
|
236
|
-
)
|
|
226
|
+
trainer_response = get_trainer_package_by_id(id=trainer_id, cfg=cfg)
|
|
237
227
|
return trainer_response["id"]
|
|
238
228
|
|
|
239
229
|
raise click.MissingParameter(
|
hafnia_cli/profile_cmds.py
CHANGED
|
@@ -6,10 +6,15 @@ import hafnia_cli.consts as consts
|
|
|
6
6
|
from hafnia_cli.config import Config, ConfigSchema
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
@click.group()
|
|
10
|
-
|
|
9
|
+
@click.group(invoke_without_command=True)
|
|
10
|
+
@click.pass_context
|
|
11
|
+
def profile(ctx):
|
|
11
12
|
"""Manage profile."""
|
|
12
|
-
|
|
13
|
+
if ctx.invoked_subcommand is None:
|
|
14
|
+
# No subcommand provided, show active profile and help
|
|
15
|
+
cfg = ctx.obj
|
|
16
|
+
profile_show(cfg)
|
|
17
|
+
click.echo("\n" + ctx.get_help())
|
|
13
18
|
|
|
14
19
|
|
|
15
20
|
@profile.command("ls")
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import Dict, Optional
|
|
3
3
|
|
|
4
4
|
import click
|
|
5
5
|
|
|
@@ -21,12 +21,60 @@ def cmd_list_trainer_packages(cfg: Config, limit: Optional[int]) -> None:
|
|
|
21
21
|
|
|
22
22
|
from hafnia.platform.trainer_package import get_trainer_packages, pretty_print_trainer_packages
|
|
23
23
|
|
|
24
|
-
|
|
25
|
-
trainers = get_trainer_packages(endpoint, cfg.api_key)
|
|
24
|
+
trainers = get_trainer_packages(cfg=cfg)
|
|
26
25
|
|
|
27
26
|
pretty_print_trainer_packages(trainers, limit=limit)
|
|
28
27
|
|
|
29
28
|
|
|
29
|
+
@trainer_package.command(name="create")
|
|
30
|
+
@click.pass_obj
|
|
31
|
+
@click.argument(
|
|
32
|
+
"path",
|
|
33
|
+
type=Path,
|
|
34
|
+
)
|
|
35
|
+
@click.option(
|
|
36
|
+
"-n",
|
|
37
|
+
"--name",
|
|
38
|
+
type=str,
|
|
39
|
+
default=None,
|
|
40
|
+
help="Name of the trainer package.",
|
|
41
|
+
)
|
|
42
|
+
@click.option(
|
|
43
|
+
"-d",
|
|
44
|
+
"--description",
|
|
45
|
+
type=str,
|
|
46
|
+
default=None,
|
|
47
|
+
help="Description of the trainer package.",
|
|
48
|
+
)
|
|
49
|
+
@click.option(
|
|
50
|
+
"--cmd",
|
|
51
|
+
type=Optional[str],
|
|
52
|
+
default=None,
|
|
53
|
+
show_default=True,
|
|
54
|
+
help="Default command to run the trainer package.",
|
|
55
|
+
)
|
|
56
|
+
def cmd_create_trainer_package(
|
|
57
|
+
cfg: Config,
|
|
58
|
+
path: Path,
|
|
59
|
+
cmd: Optional[str] = None,
|
|
60
|
+
name: Optional[str] = None,
|
|
61
|
+
description: Optional[str] = None,
|
|
62
|
+
) -> Dict:
|
|
63
|
+
"""Create a trainer package on the platform"""
|
|
64
|
+
from hafnia.platform.trainer_package import create_trainer_package
|
|
65
|
+
|
|
66
|
+
path_trainer = Path(path).resolve()
|
|
67
|
+
trainer_response = create_trainer_package(
|
|
68
|
+
source_dir=path_trainer,
|
|
69
|
+
name=name,
|
|
70
|
+
description=description,
|
|
71
|
+
cmd=cmd,
|
|
72
|
+
cfg=cfg,
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return trainer_response
|
|
76
|
+
|
|
77
|
+
|
|
30
78
|
@trainer_package.command(name="create-zip")
|
|
31
79
|
@click.argument("source")
|
|
32
80
|
@click.option(
|
|
@@ -46,7 +94,7 @@ def cmd_create_trainer_package_zip(source: str, output: str) -> None:
|
|
|
46
94
|
raise click.ClickException(consts.ERROR_TRAINER_PACKAGE_FILE_FORMAT)
|
|
47
95
|
|
|
48
96
|
path_source = Path(source)
|
|
49
|
-
path_output_zip = archive_dir(path_source, path_output_zip)
|
|
97
|
+
path_output_zip, _ = archive_dir(path_source, path_output_zip)
|
|
50
98
|
|
|
51
99
|
|
|
52
100
|
@trainer_package.command(name="view-zip")
|
hafnia/data/__init__.py
DELETED
hafnia/data/factory.py
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
from typing import Any
|
|
3
|
-
|
|
4
|
-
from hafnia import utils
|
|
5
|
-
from hafnia.dataset.hafnia_dataset import HafniaDataset, get_or_create_dataset_path_from_recipe
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def load_dataset(recipe: Any, force_redownload: bool = False) -> HafniaDataset:
|
|
9
|
-
"""Load a dataset either from a local path or from the Hafnia platform."""
|
|
10
|
-
|
|
11
|
-
path_dataset = get_dataset_path(recipe, force_redownload=force_redownload)
|
|
12
|
-
dataset = HafniaDataset.from_path(path_dataset)
|
|
13
|
-
return dataset
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def get_dataset_path(recipe: Any, force_redownload: bool = False) -> Path:
|
|
17
|
-
if utils.is_hafnia_cloud_job():
|
|
18
|
-
return utils.get_dataset_path_in_hafnia_cloud()
|
|
19
|
-
|
|
20
|
-
path_dataset = get_or_create_dataset_path_from_recipe(recipe, force_redownload=force_redownload)
|
|
21
|
-
|
|
22
|
-
return path_dataset
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|