cognite-toolkit 0.7.30__py3-none-any.whl → 0.7.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf.py +5 -6
- cognite_toolkit/_cdf_tk/apps/__init__.py +2 -0
- cognite_toolkit/_cdf_tk/apps/_core_app.py +7 -1
- cognite_toolkit/_cdf_tk/apps/_download_app.py +2 -2
- cognite_toolkit/_cdf_tk/apps/_dump_app.py +1 -1
- cognite_toolkit/_cdf_tk/apps/_import_app.py +41 -0
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py +177 -2
- cognite_toolkit/_cdf_tk/builders/_raw.py +1 -1
- cognite_toolkit/_cdf_tk/client/_toolkit_client.py +9 -9
- cognite_toolkit/_cdf_tk/client/api/infield.py +38 -33
- cognite_toolkit/_cdf_tk/client/api/{canvas.py → legacy/canvas.py} +15 -7
- cognite_toolkit/_cdf_tk/client/api/{charts.py → legacy/charts.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_data_modeling.py → legacy/extended_data_modeling.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_files.py → legacy/extended_files.py} +2 -2
- cognite_toolkit/_cdf_tk/client/api/{extended_functions.py → legacy/extended_functions.py} +9 -9
- cognite_toolkit/_cdf_tk/client/api/{extended_raw.py → legacy/extended_raw.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_timeseries.py → legacy/extended_timeseries.py} +5 -2
- cognite_toolkit/_cdf_tk/client/api/{location_filters.py → legacy/location_filters.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/legacy/robotics/__init__.py +8 -0
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/capabilities.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/data_postprocessing.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/frames.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/locations.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/maps.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/robots.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/{search_config.py → legacy/search_config.py} +5 -1
- cognite_toolkit/_cdf_tk/client/api/migration.py +2 -3
- cognite_toolkit/_cdf_tk/client/api/project.py +9 -8
- cognite_toolkit/_cdf_tk/client/api/search.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/streams.py +21 -17
- cognite_toolkit/_cdf_tk/client/api/three_d.py +343 -9
- cognite_toolkit/_cdf_tk/client/data_classes/api_classes.py +13 -0
- cognite_toolkit/_cdf_tk/client/data_classes/base.py +12 -32
- cognite_toolkit/_cdf_tk/client/data_classes/instance_api.py +18 -13
- cognite_toolkit/_cdf_tk/client/data_classes/legacy/__init__.py +0 -0
- cognite_toolkit/_cdf_tk/client/data_classes/{canvas.py → legacy/canvas.py} +1 -1
- cognite_toolkit/_cdf_tk/client/data_classes/three_d.py +65 -0
- cognite_toolkit/_cdf_tk/client/testing.py +24 -16
- cognite_toolkit/_cdf_tk/commands/__init__.py +1 -0
- cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +8 -2
- cognite_toolkit/_cdf_tk/commands/_migrate/creators.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +35 -4
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +149 -14
- cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py +1 -0
- cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +19 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +220 -3
- cognite_toolkit/_cdf_tk/commands/_profile.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_purge.py +9 -11
- cognite_toolkit/_cdf_tk/commands/build_cmd.py +1 -1
- cognite_toolkit/_cdf_tk/commands/build_v2/__init__.py +0 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_cmd.py +241 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_input.py +85 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_issues.py +27 -0
- cognite_toolkit/_cdf_tk/commands/dump_resource.py +4 -4
- cognite_toolkit/_cdf_tk/commands/run.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_data_cruds.py +2 -2
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/auth.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/classic.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py +22 -20
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py +14 -2
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/industrial_tool.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/location.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/raw.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/robotics.py +1 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/transformation.py +49 -14
- cognite_toolkit/_cdf_tk/data_classes/_module_toml.py +1 -0
- cognite_toolkit/_cdf_tk/resource_classes/search_config.py +1 -1
- cognite_toolkit/_cdf_tk/resource_classes/workflow_version.py +164 -5
- cognite_toolkit/_cdf_tk/storageio/_applications.py +2 -2
- cognite_toolkit/_cdf_tk/storageio/_file_content.py +1 -2
- cognite_toolkit/_cdf_tk/storageio/_instances.py +1 -1
- cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py +10 -1
- cognite_toolkit/_cdf_tk/storageio/selectors/_three_d.py +34 -0
- cognite_toolkit/_cdf_tk/utils/cdf.py +1 -1
- cognite_toolkit/_cdf_tk/utils/http_client/__init__.py +28 -0
- cognite_toolkit/_cdf_tk/utils/http_client/_client.py +3 -2
- cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py +6 -0
- cognite_toolkit/_cdf_tk/utils/http_client/_data_classes2.py +67 -7
- cognite_toolkit/_cdf_tk/utils/http_client/_tracker.py +5 -2
- cognite_toolkit/_cdf_tk/utils/interactive_select.py +51 -4
- cognite_toolkit/_cdf_tk/validation.py +4 -0
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.30.dist-info → cognite_toolkit-0.7.39.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.30.dist-info → cognite_toolkit-0.7.39.dist-info}/RECORD +119 -113
- {cognite_toolkit-0.7.30.dist-info → cognite_toolkit-0.7.39.dist-info}/WHEEL +1 -1
- cognite_toolkit/_cdf_tk/client/api/robotics/__init__.py +0 -3
- cognite_toolkit/_cdf_tk/prototypes/import_app.py +0 -41
- /cognite_toolkit/_cdf_tk/{prototypes/commands → client/api/legacy}/__init__.py +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{dml.py → legacy/dml.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{fixed_transformations.py → legacy/fixed_transformations.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/api.py +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/utlis.py +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{apm_config_v1.py → legacy/apm_config_v1.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{charts.py → legacy/charts.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extendable_cognite_file.py → legacy/extendable_cognite_file.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_filemetadata.py → legacy/extended_filemetadata.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_filemetdata.py → legacy/extended_filemetdata.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_timeseries.py → legacy/extended_timeseries.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{functions.py → legacy/functions.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{graphql_data_models.py → legacy/graphql_data_models.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{instances.py → legacy/instances.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{location_filters.py → legacy/location_filters.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{migration.py → legacy/migration.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{pending_instances_ids.py → legacy/pending_instances_ids.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{project.py → legacy/project.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{raw.py → legacy/raw.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{robotics.py → legacy/robotics.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{search_config.py → legacy/search_config.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{sequences.py → legacy/sequences.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{streamlit_.py → legacy/streamlit_.py} +0 -0
- /cognite_toolkit/_cdf_tk/{prototypes/commands/import_.py → commands/_import_cmd.py} +0 -0
- {cognite_toolkit-0.7.30.dist-info → cognite_toolkit-0.7.39.dist-info}/entry_points.txt +0 -0
|
@@ -1,23 +1,43 @@
|
|
|
1
1
|
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
|
2
|
-
from typing import ClassVar, cast
|
|
2
|
+
from typing import ClassVar, Literal, cast
|
|
3
3
|
|
|
4
4
|
from cognite.client.data_classes import Annotation
|
|
5
5
|
from cognite.client.data_classes.data_modeling import EdgeId, InstanceApply, NodeId
|
|
6
6
|
|
|
7
7
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
8
|
-
from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import PendingInstanceId
|
|
8
|
+
from cognite_toolkit._cdf_tk.client.data_classes.legacy.pending_instances_ids import PendingInstanceId
|
|
9
|
+
from cognite_toolkit._cdf_tk.client.data_classes.three_d import (
|
|
10
|
+
AssetMappingDMRequest,
|
|
11
|
+
AssetMappingResponse,
|
|
12
|
+
ThreeDModelResponse,
|
|
13
|
+
)
|
|
14
|
+
from cognite_toolkit._cdf_tk.commands._migrate.data_classes import ThreeDMigrationRequest
|
|
9
15
|
from cognite_toolkit._cdf_tk.constants import MISSING_EXTERNAL_ID, MISSING_INSTANCE_SPACE
|
|
10
16
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError, ToolkitValueError
|
|
11
17
|
from cognite_toolkit._cdf_tk.storageio import (
|
|
12
18
|
AnnotationIO,
|
|
13
19
|
HierarchyIO,
|
|
14
20
|
InstanceIO,
|
|
21
|
+
T_Selector,
|
|
15
22
|
UploadableStorageIO,
|
|
16
23
|
)
|
|
17
24
|
from cognite_toolkit._cdf_tk.storageio._base import Page, UploadItem
|
|
25
|
+
from cognite_toolkit._cdf_tk.storageio.selectors import (
|
|
26
|
+
ThreeDModelFilteredSelector,
|
|
27
|
+
ThreeDModelIdSelector,
|
|
28
|
+
ThreeDSelector,
|
|
29
|
+
)
|
|
18
30
|
from cognite_toolkit._cdf_tk.tk_warnings import MediumSeverityWarning
|
|
19
31
|
from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
|
|
20
|
-
from cognite_toolkit._cdf_tk.utils.http_client import
|
|
32
|
+
from cognite_toolkit._cdf_tk.utils.http_client import (
|
|
33
|
+
FailedResponse,
|
|
34
|
+
HTTPClient,
|
|
35
|
+
HTTPMessage,
|
|
36
|
+
ItemsRequest,
|
|
37
|
+
SimpleBodyRequest,
|
|
38
|
+
SuccessResponseItems,
|
|
39
|
+
ToolkitAPIError,
|
|
40
|
+
)
|
|
21
41
|
from cognite_toolkit._cdf_tk.utils.useful_types import (
|
|
22
42
|
AssetCentricKindExtended,
|
|
23
43
|
AssetCentricType,
|
|
@@ -348,3 +368,200 @@ class AnnotationMigrationIO(
|
|
|
348
368
|
selector: AssetCentricMigrationSelector | None = None,
|
|
349
369
|
) -> list[dict[str, JsonVal]]:
|
|
350
370
|
raise NotImplementedError("Serializing Annotation Migrations to JSON is not supported.")
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse, ThreeDMigrationRequest]):
|
|
374
|
+
"""IO class for downloading and migrating 3D models.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
client: The ToolkitClient to use for CDF interactions.
|
|
378
|
+
data_model_type: The type of 3D data model to download. Either "classic" or "DM".
|
|
379
|
+
|
|
380
|
+
"""
|
|
381
|
+
|
|
382
|
+
KIND = "3DMigration"
|
|
383
|
+
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".ndjson"})
|
|
384
|
+
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
385
|
+
SUPPORTED_READ_FORMATS = frozenset({".ndjson"})
|
|
386
|
+
DOWNLOAD_LIMIT = 1000
|
|
387
|
+
CHUNK_SIZE = 1
|
|
388
|
+
UPLOAD_ENDPOINT = "/3d/migrate/models"
|
|
389
|
+
REVISION_ENDPOINT = "/3d/migrate/revisions"
|
|
390
|
+
|
|
391
|
+
def __init__(self, client: ToolkitClient, data_model_type: Literal["classic", "data modeling"] = "classic") -> None:
|
|
392
|
+
super().__init__(client)
|
|
393
|
+
self.data_model_type = data_model_type
|
|
394
|
+
|
|
395
|
+
def as_id(self, item: ThreeDModelResponse) -> str:
|
|
396
|
+
return f"{item.name}_{item.id!s}"
|
|
397
|
+
|
|
398
|
+
def _is_selected(self, item: ThreeDModelResponse, included_models: set[int] | None) -> bool:
|
|
399
|
+
return self._is_correct_type(item) and (included_models is None or item.id in included_models)
|
|
400
|
+
|
|
401
|
+
def _is_correct_type(self, item: ThreeDModelResponse) -> bool:
|
|
402
|
+
if self.data_model_type == "classic":
|
|
403
|
+
return item.space is None
|
|
404
|
+
else:
|
|
405
|
+
return item.space is not None
|
|
406
|
+
|
|
407
|
+
def stream_data(self, selector: ThreeDSelector, limit: int | None = None) -> Iterable[Page[ThreeDModelResponse]]:
|
|
408
|
+
published: bool | None = None
|
|
409
|
+
if isinstance(selector, ThreeDModelFilteredSelector):
|
|
410
|
+
published = selector.published
|
|
411
|
+
included_models: set[int] | None = None
|
|
412
|
+
if isinstance(selector, ThreeDModelIdSelector):
|
|
413
|
+
included_models = set(selector.ids)
|
|
414
|
+
cursor: str | None = None
|
|
415
|
+
total = 0
|
|
416
|
+
while True:
|
|
417
|
+
request_limit = min(self.DOWNLOAD_LIMIT, limit - total) if limit is not None else self.DOWNLOAD_LIMIT
|
|
418
|
+
response = self.client.tool.three_d.models.iterate(
|
|
419
|
+
published=published, include_revision_info=True, limit=request_limit, cursor=cursor
|
|
420
|
+
)
|
|
421
|
+
items = [item for item in response.items if self._is_selected(item, included_models)]
|
|
422
|
+
total += len(items)
|
|
423
|
+
if items:
|
|
424
|
+
yield Page(worker_id="main", items=items, next_cursor=response.next_cursor)
|
|
425
|
+
if response.next_cursor is None:
|
|
426
|
+
break
|
|
427
|
+
cursor = response.next_cursor
|
|
428
|
+
|
|
429
|
+
def count(self, selector: ThreeDSelector) -> int | None:
|
|
430
|
+
# There is no efficient way to count 3D models in CDF.
|
|
431
|
+
return None
|
|
432
|
+
|
|
433
|
+
def data_to_json_chunk(
|
|
434
|
+
self, data_chunk: Sequence[ThreeDModelResponse], selector: ThreeDSelector | None = None
|
|
435
|
+
) -> list[dict[str, JsonVal]]:
|
|
436
|
+
raise NotImplementedError("Deserializing Annotation Migrations from JSON is not supported.")
|
|
437
|
+
|
|
438
|
+
def json_to_resource(self, item_json: dict[str, JsonVal]) -> ThreeDMigrationRequest:
|
|
439
|
+
raise NotImplementedError("Deserializing ThreeD Migrations from JSON is not supported.")
|
|
440
|
+
|
|
441
|
+
def upload_items(
|
|
442
|
+
self,
|
|
443
|
+
data_chunk: Sequence[UploadItem[ThreeDMigrationRequest]],
|
|
444
|
+
http_client: HTTPClient,
|
|
445
|
+
selector: ThreeDSelector | None = None,
|
|
446
|
+
) -> Sequence[HTTPMessage]:
|
|
447
|
+
"""Migrate 3D models by uploading them to the migrate/models endpoint."""
|
|
448
|
+
if len(data_chunk) > self.CHUNK_SIZE:
|
|
449
|
+
raise RuntimeError(f"Uploading more than {self.CHUNK_SIZE} 3D models at a time is not supported.")
|
|
450
|
+
|
|
451
|
+
results: list[HTTPMessage] = []
|
|
452
|
+
responses = http_client.request_with_retries(
|
|
453
|
+
message=ItemsRequest(
|
|
454
|
+
endpoint_url=self.client.config.create_api_url(self.UPLOAD_ENDPOINT),
|
|
455
|
+
method="POST",
|
|
456
|
+
items=list(data_chunk),
|
|
457
|
+
)
|
|
458
|
+
)
|
|
459
|
+
if (
|
|
460
|
+
failed_response := next((res for res in responses if isinstance(res, FailedResponse)), None)
|
|
461
|
+
) and failed_response.status_code == 400:
|
|
462
|
+
raise ToolkitAPIError("3D model migration failed. You need to enable the 3D migration alpha feature flag.")
|
|
463
|
+
|
|
464
|
+
results.extend(responses)
|
|
465
|
+
success_ids = {id for res in responses if isinstance(res, SuccessResponseItems) for id in res.ids}
|
|
466
|
+
for data in data_chunk:
|
|
467
|
+
if data.source_id not in success_ids:
|
|
468
|
+
continue
|
|
469
|
+
revision = http_client.request_with_retries(
|
|
470
|
+
message=SimpleBodyRequest(
|
|
471
|
+
endpoint_url=self.client.config.create_api_url(self.REVISION_ENDPOINT),
|
|
472
|
+
method="POST",
|
|
473
|
+
body_content={"items": [data.item.revision.dump(camel_case=True)]},
|
|
474
|
+
)
|
|
475
|
+
)
|
|
476
|
+
results.extend(revision.as_item_responses(data.source_id))
|
|
477
|
+
return results
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
class ThreeDAssetMappingMigrationIO(UploadableStorageIO[ThreeDSelector, AssetMappingResponse, AssetMappingDMRequest]):
|
|
481
|
+
KIND = "3DMigrationAssetMapping"
|
|
482
|
+
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".ndjson"})
|
|
483
|
+
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
484
|
+
SUPPORTED_READ_FORMATS = frozenset({".ndjson"})
|
|
485
|
+
DOWNLOAD_LIMIT = 1000
|
|
486
|
+
CHUNK_SIZE = 100
|
|
487
|
+
UPLOAD_ENDPOINT = "/3d/models/{modelId}/revisions/{revisionId}/mappings"
|
|
488
|
+
|
|
489
|
+
def __init__(self, client: ToolkitClient, object_3D_space: str, cad_node_space: str) -> None:
|
|
490
|
+
super().__init__(client)
|
|
491
|
+
self.object_3D_space = object_3D_space
|
|
492
|
+
self.cad_node_space = cad_node_space
|
|
493
|
+
# We can only migrate asset mappings for 3D models that are already migrated to data modeling.
|
|
494
|
+
self._3D_io = ThreeDMigrationIO(client, data_model_type="data modeling")
|
|
495
|
+
|
|
496
|
+
def as_id(self, item: AssetMappingResponse) -> str:
|
|
497
|
+
return f"AssetMapping_{item.model_id!s}_{item.revision_id!s}_{item.asset_id!s}"
|
|
498
|
+
|
|
499
|
+
def stream_data(self, selector: ThreeDSelector, limit: int | None = None) -> Iterable[Page[AssetMappingResponse]]:
|
|
500
|
+
total = 0
|
|
501
|
+
for three_d_page in self._3D_io.stream_data(selector, None):
|
|
502
|
+
for model in three_d_page.items:
|
|
503
|
+
if model.last_revision_info is None or model.last_revision_info.revision_id is None:
|
|
504
|
+
# No revisions, so no asset mappings to
|
|
505
|
+
continue
|
|
506
|
+
cursor: str | None = None
|
|
507
|
+
while True:
|
|
508
|
+
request_limit = (
|
|
509
|
+
min(self.DOWNLOAD_LIMIT, limit - total) if limit is not None else self.DOWNLOAD_LIMIT
|
|
510
|
+
)
|
|
511
|
+
if limit is not None and total >= limit:
|
|
512
|
+
return
|
|
513
|
+
response = self.client.tool.three_d.asset_mappings.iterate(
|
|
514
|
+
model_id=model.id,
|
|
515
|
+
revision_id=model.last_revision_info.revision_id,
|
|
516
|
+
cursor=cursor,
|
|
517
|
+
limit=request_limit,
|
|
518
|
+
)
|
|
519
|
+
items = response.items
|
|
520
|
+
total += len(items)
|
|
521
|
+
if items:
|
|
522
|
+
yield Page(worker_id="main", items=items, next_cursor=response.next_cursor)
|
|
523
|
+
if response.next_cursor is None:
|
|
524
|
+
break
|
|
525
|
+
cursor = response.next_cursor
|
|
526
|
+
|
|
527
|
+
def count(self, selector: ThreeDSelector) -> int | None:
|
|
528
|
+
# There is no efficient way to count 3D asset mappings in CDF.
|
|
529
|
+
return None
|
|
530
|
+
|
|
531
|
+
def upload_items(
|
|
532
|
+
self,
|
|
533
|
+
data_chunk: Sequence[UploadItem[AssetMappingDMRequest]],
|
|
534
|
+
http_client: HTTPClient,
|
|
535
|
+
selector: T_Selector | None = None,
|
|
536
|
+
) -> Sequence[HTTPMessage]:
|
|
537
|
+
"""Migrate 3D asset mappings by uploading them to the migrate/asset-mappings endpoint."""
|
|
538
|
+
if not data_chunk:
|
|
539
|
+
return []
|
|
540
|
+
# Assume all items in the chunk belong to the same model and revision, they should
|
|
541
|
+
# if the .stream_data method is used for downloading.
|
|
542
|
+
first = data_chunk[0]
|
|
543
|
+
model_id = first.item.model_id
|
|
544
|
+
revision_id = first.item.revision_id
|
|
545
|
+
endpoint = self.UPLOAD_ENDPOINT.format(modelId=model_id, revisionId=revision_id)
|
|
546
|
+
responses = http_client.request_with_retries(
|
|
547
|
+
ItemsRequest(
|
|
548
|
+
endpoint_url=self.client.config.create_api_url(endpoint),
|
|
549
|
+
method="POST",
|
|
550
|
+
items=list(data_chunk),
|
|
551
|
+
extra_body_fields={
|
|
552
|
+
"dmsContextualizationConfig": {
|
|
553
|
+
"object3DSpace": self.object_3D_space,
|
|
554
|
+
"cadNodeSpace": self.cad_node_space,
|
|
555
|
+
}
|
|
556
|
+
},
|
|
557
|
+
)
|
|
558
|
+
)
|
|
559
|
+
return responses
|
|
560
|
+
|
|
561
|
+
def json_to_resource(self, item_json: dict[str, JsonVal]) -> AssetMappingDMRequest:
|
|
562
|
+
raise NotImplementedError("Deserializing 3D Asset Mappings from JSON is not supported.")
|
|
563
|
+
|
|
564
|
+
def data_to_json_chunk(
|
|
565
|
+
self, data_chunk: Sequence[AssetMappingResponse], selector: ThreeDSelector | None = None
|
|
566
|
+
) -> list[dict[str, JsonVal]]:
|
|
567
|
+
raise NotImplementedError("Serializing 3D Asset Mappings to JSON is not supported.")
|
|
@@ -21,7 +21,7 @@ from rich.spinner import Spinner
|
|
|
21
21
|
from rich.table import Table
|
|
22
22
|
|
|
23
23
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
24
|
-
from cognite_toolkit._cdf_tk.client.data_classes.raw import RawProfileResults, RawTable
|
|
24
|
+
from cognite_toolkit._cdf_tk.client.data_classes.legacy.raw import RawProfileResults, RawTable
|
|
25
25
|
from cognite_toolkit._cdf_tk.constants import MAX_ROW_ITERATION_RUN_QUERY
|
|
26
26
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitMissingDependencyError, ToolkitThrottledError, ToolkitValueError
|
|
27
27
|
from cognite_toolkit._cdf_tk.utils.aggregators import (
|
|
@@ -16,6 +16,7 @@ from rich.console import Console
|
|
|
16
16
|
from rich.panel import Panel
|
|
17
17
|
|
|
18
18
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
19
|
+
from cognite_toolkit._cdf_tk.client.data_classes.instance_api import TypedInstanceIdentifier
|
|
19
20
|
from cognite_toolkit._cdf_tk.cruds import (
|
|
20
21
|
AssetCRUD,
|
|
21
22
|
ContainerCRUD,
|
|
@@ -59,10 +60,10 @@ from cognite_toolkit._cdf_tk.utils.aggregators import (
|
|
|
59
60
|
TimeSeriesAggregator,
|
|
60
61
|
)
|
|
61
62
|
from cognite_toolkit._cdf_tk.utils.http_client import (
|
|
62
|
-
FailedRequestItems,
|
|
63
|
-
FailedResponseItems,
|
|
64
63
|
HTTPClient,
|
|
65
64
|
ItemsRequest,
|
|
65
|
+
ItemsRequest2,
|
|
66
|
+
ItemsSuccessResponse2,
|
|
66
67
|
SuccessResponseItems,
|
|
67
68
|
)
|
|
68
69
|
from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
|
|
@@ -714,21 +715,18 @@ class PurgeCommand(ToolkitCommand):
|
|
|
714
715
|
results.deleted += len(items)
|
|
715
716
|
return
|
|
716
717
|
|
|
717
|
-
responses = delete_client.
|
|
718
|
-
|
|
719
|
-
delete_client.config.create_api_url("/models/instances/delete"),
|
|
718
|
+
responses = delete_client.request_items_retries(
|
|
719
|
+
ItemsRequest2(
|
|
720
|
+
endpoint_url=delete_client.config.create_api_url("/models/instances/delete"),
|
|
720
721
|
method="POST",
|
|
721
|
-
|
|
722
|
-
items=[DeleteItem(item=item, as_id_fun=InstanceId.load) for item in items], # type: ignore[arg-type]
|
|
722
|
+
items=[TypedInstanceIdentifier._load(item) for item in items],
|
|
723
723
|
)
|
|
724
724
|
)
|
|
725
725
|
for response in responses:
|
|
726
|
-
if isinstance(response,
|
|
726
|
+
if isinstance(response, ItemsSuccessResponse2):
|
|
727
727
|
results.deleted += len(response.ids)
|
|
728
|
-
elif isinstance(response, FailedResponseItems | FailedRequestItems):
|
|
729
|
-
results.failed += len(response.ids)
|
|
730
728
|
else:
|
|
731
|
-
results.failed += len(
|
|
729
|
+
results.failed += len(response.ids)
|
|
732
730
|
|
|
733
731
|
@staticmethod
|
|
734
732
|
def _unlink_timeseries(
|
|
@@ -14,7 +14,7 @@ from rich.progress import track
|
|
|
14
14
|
from cognite_toolkit._cdf_tk.builders import Builder, create_builder
|
|
15
15
|
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
16
16
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
17
|
-
from cognite_toolkit._cdf_tk.client.data_classes.raw import RawDatabase
|
|
17
|
+
from cognite_toolkit._cdf_tk.client.data_classes.legacy.raw import RawDatabase
|
|
18
18
|
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
19
19
|
from cognite_toolkit._cdf_tk.constants import (
|
|
20
20
|
_RUNNING_IN_BROWSER,
|
|
File without changes
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Any, Literal, TypedDict
|
|
3
|
+
|
|
4
|
+
from rich import print
|
|
5
|
+
from rich.panel import Panel
|
|
6
|
+
|
|
7
|
+
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
8
|
+
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
9
|
+
from cognite_toolkit._cdf_tk.commands.build_cmd import BuildCommand as OldBuildCommand
|
|
10
|
+
from cognite_toolkit._cdf_tk.commands.build_v2.build_input import BuildInput
|
|
11
|
+
from cognite_toolkit._cdf_tk.commands.build_v2.build_issues import BuildIssue, BuildIssueList
|
|
12
|
+
from cognite_toolkit._cdf_tk.data_classes import (
|
|
13
|
+
BuildConfigYAML,
|
|
14
|
+
BuildVariables,
|
|
15
|
+
BuiltModuleList,
|
|
16
|
+
ModuleDirectories,
|
|
17
|
+
)
|
|
18
|
+
from cognite_toolkit._cdf_tk.exceptions import ToolkitError
|
|
19
|
+
from cognite_toolkit._cdf_tk.hints import verify_module_directory
|
|
20
|
+
from cognite_toolkit._cdf_tk.tk_warnings import ToolkitWarning, WarningList
|
|
21
|
+
from cognite_toolkit._cdf_tk.utils.file import safe_rmtree
|
|
22
|
+
from cognite_toolkit._cdf_tk.validation import validate_module_selection, validate_modules_variables
|
|
23
|
+
from cognite_toolkit._version import __version__
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class BuildWarnings(TypedDict):
|
|
27
|
+
warning: ToolkitWarning
|
|
28
|
+
location: list[Path]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class BuildCommand(ToolkitCommand):
|
|
32
|
+
def __init__(self, print_warning: bool = True, skip_tracking: bool = False, silent: bool = False) -> None:
|
|
33
|
+
super().__init__(print_warning, skip_tracking, silent)
|
|
34
|
+
self.issues = BuildIssueList()
|
|
35
|
+
|
|
36
|
+
def execute(
|
|
37
|
+
self,
|
|
38
|
+
verbose: bool,
|
|
39
|
+
organization_dir: Path,
|
|
40
|
+
build_dir: Path,
|
|
41
|
+
selected: list[str | Path] | None,
|
|
42
|
+
build_env_name: str | None,
|
|
43
|
+
no_clean: bool,
|
|
44
|
+
client: ToolkitClient | None = None,
|
|
45
|
+
on_error: Literal["continue", "raise"] = "continue",
|
|
46
|
+
) -> BuiltModuleList:
|
|
47
|
+
"""
|
|
48
|
+
Build the resources into deployable artifacts in the build directory.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
self.verbose = verbose
|
|
52
|
+
self.on_error = on_error
|
|
53
|
+
|
|
54
|
+
# Tracking the project and cluster for the build.
|
|
55
|
+
if client:
|
|
56
|
+
self._additional_tracking_info.project = client.config.project
|
|
57
|
+
self._additional_tracking_info.cluster = client.config.cdf_cluster
|
|
58
|
+
|
|
59
|
+
# Setting the parameters for the build.
|
|
60
|
+
input = BuildInput.load(organization_dir, build_dir, build_env_name, client, selected)
|
|
61
|
+
|
|
62
|
+
# Print the build input.
|
|
63
|
+
if self.verbose:
|
|
64
|
+
self._print_build_input(input)
|
|
65
|
+
|
|
66
|
+
# Capture warnings from module structure integrity
|
|
67
|
+
if module_selection_issues := self._validate_modules(input):
|
|
68
|
+
self.issues.extend(module_selection_issues)
|
|
69
|
+
|
|
70
|
+
# Logistics: clean and create build directory
|
|
71
|
+
if prepare_issues := self._prepare_target_directory(input, not no_clean):
|
|
72
|
+
self.issues.extend(prepare_issues)
|
|
73
|
+
|
|
74
|
+
# Compile the configuration and variables,
|
|
75
|
+
# check syntax on module and resource level
|
|
76
|
+
# for any "compilation errors and warnings"
|
|
77
|
+
built_modules, build_integrity_issues = self._build_configuration(input)
|
|
78
|
+
if build_integrity_issues:
|
|
79
|
+
self.issues.extend(build_integrity_issues)
|
|
80
|
+
|
|
81
|
+
# This is where we would add any recommendations for the user to improve the build.
|
|
82
|
+
if build_quality_issues := self._verify_build_quality(built_modules):
|
|
83
|
+
self.issues.extend(build_quality_issues)
|
|
84
|
+
|
|
85
|
+
# Finally, print warnings grouped by category/code and location.
|
|
86
|
+
self._print_or_log_warnings_by_category(self.issues)
|
|
87
|
+
|
|
88
|
+
return built_modules
|
|
89
|
+
|
|
90
|
+
def _print_build_input(self, input: BuildInput) -> None:
|
|
91
|
+
print(
|
|
92
|
+
Panel(
|
|
93
|
+
f"Building {input.organization_dir!s}:\n - Toolkit Version '{__version__!s}'\n"
|
|
94
|
+
f" - Environment name {input.build_env_name!r}, validation-type {input.config.environment.validation_type!r}.\n"
|
|
95
|
+
f" - Config '{input.config.filepath!s}'",
|
|
96
|
+
expand=False,
|
|
97
|
+
)
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def _prepare_target_directory(self, input: BuildInput, clean: bool = False) -> BuildIssueList:
|
|
101
|
+
"""
|
|
102
|
+
Directory logistics
|
|
103
|
+
"""
|
|
104
|
+
issues = BuildIssueList()
|
|
105
|
+
if input.build_dir.exists() and any(input.build_dir.iterdir()):
|
|
106
|
+
if not clean:
|
|
107
|
+
raise ToolkitError("Build directory is not empty. Run without --no-clean to remove existing files.")
|
|
108
|
+
|
|
109
|
+
if self.verbose:
|
|
110
|
+
issues.append(BuildIssue(description=f"Build directory {input.build_dir!s} is not empty. Clearing."))
|
|
111
|
+
safe_rmtree(input.build_dir)
|
|
112
|
+
input.build_dir.mkdir(parents=True, exist_ok=True)
|
|
113
|
+
return issues
|
|
114
|
+
|
|
115
|
+
def _validate_modules(self, input: BuildInput) -> BuildIssueList:
|
|
116
|
+
issues = BuildIssueList()
|
|
117
|
+
# Verify that the modules exists, are not duplicates,
|
|
118
|
+
# and at least one is selected
|
|
119
|
+
verify_module_directory(input.organization_dir, input.build_env_name)
|
|
120
|
+
|
|
121
|
+
# Validate module selection
|
|
122
|
+
user_selected_modules = input.config.environment.get_selected_modules({})
|
|
123
|
+
module_warnings = validate_module_selection(
|
|
124
|
+
modules=input.modules,
|
|
125
|
+
config=input.config,
|
|
126
|
+
packages={},
|
|
127
|
+
selected_modules=user_selected_modules,
|
|
128
|
+
organization_dir=input.organization_dir,
|
|
129
|
+
)
|
|
130
|
+
if module_warnings:
|
|
131
|
+
issues.extend(BuildIssueList.from_warning_list(module_warnings))
|
|
132
|
+
|
|
133
|
+
# Validate variables. Note: this looks for non-replaced template
|
|
134
|
+
# variables <.*?> and can be improved in the future.
|
|
135
|
+
# Keeping for reference.
|
|
136
|
+
variables_warnings = validate_modules_variables(input.variables, input.config.filepath)
|
|
137
|
+
if variables_warnings:
|
|
138
|
+
issues.extend(BuildIssueList.from_warning_list(variables_warnings))
|
|
139
|
+
|
|
140
|
+
# Track LOC of managed configuration
|
|
141
|
+
# Note: _track is not implemented yet, so we skip it for now
|
|
142
|
+
# self._track(input)
|
|
143
|
+
|
|
144
|
+
return issues
|
|
145
|
+
|
|
146
|
+
def _build_configuration(self, input: BuildInput) -> tuple[BuiltModuleList, BuildIssueList]:
|
|
147
|
+
issues = BuildIssueList()
|
|
148
|
+
# Use input.modules.selected directly (it's already a ModuleDirectories)
|
|
149
|
+
if not input.modules.selected:
|
|
150
|
+
return BuiltModuleList(), issues
|
|
151
|
+
|
|
152
|
+
# first collect variables into practical lookup
|
|
153
|
+
# TODO: parallelism is not implemented yet. I'm sure there are optimizations to be had here, but we'll focus on process parallelism since we believe loading yaml and file i/O are the biggest bottlenecks.
|
|
154
|
+
|
|
155
|
+
old_build_command = OldBuildCommand(print_warning=False, skip_tracking=False)
|
|
156
|
+
built_modules = old_build_command.build_config(
|
|
157
|
+
build_dir=input.build_dir,
|
|
158
|
+
organization_dir=input.organization_dir,
|
|
159
|
+
config=input.config,
|
|
160
|
+
packages={},
|
|
161
|
+
clean=False,
|
|
162
|
+
verbose=self.verbose,
|
|
163
|
+
client=input.client,
|
|
164
|
+
progress_bar=False,
|
|
165
|
+
on_error=self.on_error,
|
|
166
|
+
)
|
|
167
|
+
# Copy tracking info from old command to self
|
|
168
|
+
self._additional_tracking_info.package_ids.update(old_build_command._additional_tracking_info.package_ids)
|
|
169
|
+
self._additional_tracking_info.module_ids.update(old_build_command._additional_tracking_info.module_ids)
|
|
170
|
+
|
|
171
|
+
# Collect warnings from the old build command and convert to issues
|
|
172
|
+
# Always convert warnings to issues, even if the list appears empty
|
|
173
|
+
# (WarningList might have custom __bool__ behavior)
|
|
174
|
+
if old_build_command.warning_list:
|
|
175
|
+
converted_issues = BuildIssueList.from_warning_list(old_build_command.warning_list)
|
|
176
|
+
issues.extend(converted_issues)
|
|
177
|
+
return built_modules, issues
|
|
178
|
+
|
|
179
|
+
def _verify_build_quality(self, built_modules: BuiltModuleList) -> BuildIssueList:
|
|
180
|
+
issues = BuildIssueList()
|
|
181
|
+
return issues
|
|
182
|
+
|
|
183
|
+
def _write(self, input: BuildInput) -> None:
|
|
184
|
+
# Write the build to the build directory.
|
|
185
|
+
# Track lines of code built.
|
|
186
|
+
raise NotImplementedError()
|
|
187
|
+
|
|
188
|
+
def _track(self, input: BuildInput) -> None:
|
|
189
|
+
raise NotImplementedError()
|
|
190
|
+
|
|
191
|
+
def _print_or_log_warnings_by_category(self, issues: BuildIssueList) -> None:
|
|
192
|
+
pass
|
|
193
|
+
|
|
194
|
+
# Delegate to old BuildCommand for backward compatibility with tests
|
|
195
|
+
def build_modules(
|
|
196
|
+
self,
|
|
197
|
+
modules: ModuleDirectories,
|
|
198
|
+
build_dir: Path,
|
|
199
|
+
variables: BuildVariables,
|
|
200
|
+
verbose: bool = False,
|
|
201
|
+
progress_bar: bool = False,
|
|
202
|
+
on_error: Literal["continue", "raise"] = "continue",
|
|
203
|
+
) -> BuiltModuleList:
|
|
204
|
+
"""Delegate to old BuildCommand for backward compatibility."""
|
|
205
|
+
old_cmd = OldBuildCommand()
|
|
206
|
+
|
|
207
|
+
built_modules = old_cmd.build_modules(modules, build_dir, variables, verbose, progress_bar, on_error)
|
|
208
|
+
self._additional_tracking_info.package_ids.update(old_cmd._additional_tracking_info.package_ids)
|
|
209
|
+
self._additional_tracking_info.module_ids.update(old_cmd._additional_tracking_info.module_ids)
|
|
210
|
+
self.issues.extend(BuildIssueList.from_warning_list(old_cmd.warning_list or WarningList[ToolkitWarning]()))
|
|
211
|
+
return built_modules
|
|
212
|
+
|
|
213
|
+
def build_config(
|
|
214
|
+
self,
|
|
215
|
+
build_dir: Path,
|
|
216
|
+
organization_dir: Path,
|
|
217
|
+
config: BuildConfigYAML,
|
|
218
|
+
packages: dict[str, list[str]],
|
|
219
|
+
clean: bool = False,
|
|
220
|
+
verbose: bool = False,
|
|
221
|
+
client: ToolkitClient | None = None,
|
|
222
|
+
progress_bar: bool = False,
|
|
223
|
+
on_error: Literal["continue", "raise"] = "continue",
|
|
224
|
+
) -> BuiltModuleList:
|
|
225
|
+
"""Delegate to old BuildCommand for backward compatibility."""
|
|
226
|
+
old_cmd = OldBuildCommand()
|
|
227
|
+
return old_cmd.build_config(
|
|
228
|
+
build_dir, organization_dir, config, packages, clean, verbose, client, progress_bar, on_error
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
def _replace_variables(
|
|
232
|
+
self,
|
|
233
|
+
resource_files: list[Path],
|
|
234
|
+
variables: BuildVariables,
|
|
235
|
+
resource_name: str,
|
|
236
|
+
module_dir: Path,
|
|
237
|
+
verbose: bool = False,
|
|
238
|
+
) -> list[Any]:
|
|
239
|
+
"""Delegate to old BuildCommand for backward compatibility."""
|
|
240
|
+
old_cmd = OldBuildCommand()
|
|
241
|
+
return old_cmd._replace_variables(resource_files, variables, resource_name, module_dir, verbose)
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from functools import cached_property
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
if sys.version_info >= (3, 11):
|
|
6
|
+
from typing import Self
|
|
7
|
+
else:
|
|
8
|
+
from typing_extensions import Self
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, ConfigDict
|
|
11
|
+
|
|
12
|
+
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
13
|
+
from cognite_toolkit._cdf_tk.constants import DEFAULT_ENV
|
|
14
|
+
from cognite_toolkit._cdf_tk.data_classes import (
|
|
15
|
+
BuildConfigYAML,
|
|
16
|
+
BuildVariables,
|
|
17
|
+
ModuleDirectories,
|
|
18
|
+
)
|
|
19
|
+
from cognite_toolkit._cdf_tk.tk_warnings import ToolkitWarning, WarningList
|
|
20
|
+
from cognite_toolkit._cdf_tk.utils.modules import parse_user_selected_modules
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class BuildInput(BaseModel):
|
|
24
|
+
"""Input to the build process."""
|
|
25
|
+
|
|
26
|
+
# need this until we turn BuildConfigYaml and ToolkitClient into Pydantic models
|
|
27
|
+
model_config = ConfigDict(frozen=True, arbitrary_types_allowed=True)
|
|
28
|
+
|
|
29
|
+
organization_dir: Path
|
|
30
|
+
build_dir: Path
|
|
31
|
+
build_env_name: str
|
|
32
|
+
config: BuildConfigYAML
|
|
33
|
+
client: ToolkitClient | None = None
|
|
34
|
+
selected: list[str | Path] | None = None
|
|
35
|
+
warnings: WarningList[ToolkitWarning] | None = None
|
|
36
|
+
|
|
37
|
+
@classmethod
|
|
38
|
+
def load(
|
|
39
|
+
cls,
|
|
40
|
+
organization_dir: Path,
|
|
41
|
+
build_dir: Path,
|
|
42
|
+
build_env_name: str | None,
|
|
43
|
+
client: ToolkitClient | None,
|
|
44
|
+
selected: list[str | Path] | None = None,
|
|
45
|
+
) -> Self:
|
|
46
|
+
resolved_org_dir = Path.cwd() if organization_dir in {Path("."), Path("./")} else organization_dir
|
|
47
|
+
resolved_env = build_env_name or DEFAULT_ENV
|
|
48
|
+
config, warnings = cls._load_config(resolved_org_dir, resolved_env, selected)
|
|
49
|
+
return cls(
|
|
50
|
+
organization_dir=resolved_org_dir,
|
|
51
|
+
build_dir=build_dir,
|
|
52
|
+
build_env_name=resolved_env,
|
|
53
|
+
config=config,
|
|
54
|
+
client=client,
|
|
55
|
+
selected=selected,
|
|
56
|
+
warnings=warnings,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def _load_config(
|
|
61
|
+
cls, organization_dir: Path, build_env_name: str, selected: list[str | Path] | None
|
|
62
|
+
) -> tuple[BuildConfigYAML, WarningList[ToolkitWarning]]:
|
|
63
|
+
warnings: WarningList[ToolkitWarning] = WarningList[ToolkitWarning]()
|
|
64
|
+
if (organization_dir / BuildConfigYAML.get_filename(build_env_name or DEFAULT_ENV)).exists():
|
|
65
|
+
config = BuildConfigYAML.load_from_directory(organization_dir, build_env_name or DEFAULT_ENV)
|
|
66
|
+
else:
|
|
67
|
+
# Loads the default environment
|
|
68
|
+
config = BuildConfigYAML.load_default(organization_dir)
|
|
69
|
+
if selected:
|
|
70
|
+
config.environment.selected = parse_user_selected_modules(selected, organization_dir)
|
|
71
|
+
config.set_environment_variables()
|
|
72
|
+
if environment_warning := config.validate_environment():
|
|
73
|
+
warnings.append(environment_warning)
|
|
74
|
+
return config, warnings
|
|
75
|
+
|
|
76
|
+
@cached_property
|
|
77
|
+
def modules(self) -> ModuleDirectories:
|
|
78
|
+
user_selected_modules = self.config.environment.get_selected_modules({})
|
|
79
|
+
return ModuleDirectories.load(self.organization_dir, user_selected_modules)
|
|
80
|
+
|
|
81
|
+
@cached_property
|
|
82
|
+
def variables(self) -> BuildVariables:
|
|
83
|
+
return BuildVariables.load_raw(
|
|
84
|
+
self.config.variables, self.modules.available_paths, self.modules.selected.available_paths
|
|
85
|
+
)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
if sys.version_info >= (3, 11):
|
|
4
|
+
from typing import Self
|
|
5
|
+
else:
|
|
6
|
+
from typing_extensions import Self
|
|
7
|
+
|
|
8
|
+
from collections import UserList
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
|
|
12
|
+
from cognite_toolkit._cdf_tk.tk_warnings import ToolkitWarning, WarningList
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BuildIssue(BaseModel):
|
|
16
|
+
"""Issue with the build. Can have a recommendation for the user to improve the build."""
|
|
17
|
+
|
|
18
|
+
description: str
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class BuildIssueList(UserList[BuildIssue]):
|
|
22
|
+
"""List of build issues."""
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def from_warning_list(cls, warning_list: WarningList[ToolkitWarning]) -> Self:
|
|
26
|
+
"""Create a BuildIssueList from a WarningList."""
|
|
27
|
+
return cls([BuildIssue(description=warning.get_message()) for warning in warning_list])
|