cognite-toolkit 0.7.47__py3-none-any.whl → 0.7.48__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py +6 -6
- cognite_toolkit/_cdf_tk/client/_toolkit_client.py +6 -4
- cognite_toolkit/_cdf_tk/client/api/instances.py +139 -0
- cognite_toolkit/_cdf_tk/client/api/location_filters.py +177 -0
- cognite_toolkit/_cdf_tk/client/api/raw.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/robotics.py +19 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_capabilities.py +127 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_data_postprocessing.py +138 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_frames.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_locations.py +127 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_maps.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_robots.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/search_config.py +101 -0
- cognite_toolkit/_cdf_tk/client/api/streams.py +63 -55
- cognite_toolkit/_cdf_tk/client/api/three_d.py +293 -277
- cognite_toolkit/_cdf_tk/client/cdf_client/api.py +34 -5
- cognite_toolkit/_cdf_tk/client/http_client/_client.py +5 -2
- cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +4 -3
- cognite_toolkit/_cdf_tk/client/request_classes/filters.py +45 -1
- cognite_toolkit/_cdf_tk/client/resource_classes/apm_config.py +128 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/cognite_file.py +53 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/__init__.py +4 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_instance.py +22 -11
- cognite_toolkit/_cdf_tk/client/resource_classes/identifiers.py +7 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/location_filter.py +9 -2
- cognite_toolkit/_cdf_tk/client/resource_classes/resource_view_mapping.py +38 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_map.py +6 -1
- cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_robot.py +10 -5
- cognite_toolkit/_cdf_tk/client/resource_classes/streams.py +1 -20
- cognite_toolkit/_cdf_tk/client/resource_classes/three_d.py +30 -9
- cognite_toolkit/_cdf_tk/client/testing.py +2 -2
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +5 -5
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +11 -7
- cognite_toolkit/_cdf_tk/commands/build_v2/_module_parser.py +138 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/_modules_parser.py +163 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_cmd.py +83 -96
- cognite_toolkit/_cdf_tk/commands/build_v2/{build_input.py → build_parameters.py} +8 -22
- cognite_toolkit/_cdf_tk/commands/build_v2/data_classes/_modules.py +27 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/data_classes/_resource.py +22 -0
- cognite_toolkit/_cdf_tk/cruds/__init__.py +11 -5
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/streams.py +14 -30
- cognite_toolkit/_cdf_tk/data_classes/__init__.py +3 -0
- cognite_toolkit/_cdf_tk/data_classes/_issues.py +36 -0
- cognite_toolkit/_cdf_tk/data_classes/_module_directories.py +2 -1
- cognite_toolkit/_cdf_tk/storageio/_base.py +2 -0
- cognite_toolkit/_cdf_tk/storageio/logger.py +163 -0
- cognite_toolkit/_cdf_tk/utils/__init__.py +8 -1
- cognite_toolkit/_cdf_tk/utils/interactive_select.py +3 -1
- cognite_toolkit/_cdf_tk/utils/modules.py +7 -0
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.48.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.48.dist-info}/RECORD +58 -40
- cognite_toolkit/_cdf_tk/commands/build_v2/build_issues.py +0 -27
- /cognite_toolkit/_cdf_tk/client/resource_classes/{search_config_resource.py → search_config.py} +0 -0
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.48.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.48.dist-info}/entry_points.txt +0 -0
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import sys
|
|
2
|
-
from typing import Literal
|
|
2
|
+
from typing import ClassVar, Literal
|
|
3
3
|
|
|
4
4
|
from pydantic import Field
|
|
5
5
|
|
|
6
|
-
from .base import BaseModelObject, Identifier, RequestResource, ResponseResource
|
|
7
|
-
from .identifiers import
|
|
6
|
+
from .base import BaseModelObject, Identifier, RequestResource, RequestUpdateable, ResponseResource
|
|
7
|
+
from .identifiers import InternalId
|
|
8
8
|
from .instance_api import NodeReference
|
|
9
9
|
|
|
10
10
|
if sys.version_info >= (3, 11):
|
|
@@ -23,12 +23,18 @@ class RevisionStatus(BaseModelObject):
|
|
|
23
23
|
|
|
24
24
|
class ThreeDModelRequest(RequestResource):
|
|
25
25
|
name: str
|
|
26
|
+
# This field is part of the path request and not the body schema.
|
|
27
|
+
# but is needed for identifier conversion.
|
|
28
|
+
id: int | None = Field(None, exclude=True)
|
|
26
29
|
|
|
27
|
-
def as_id(self) ->
|
|
28
|
-
|
|
30
|
+
def as_id(self) -> InternalId:
|
|
31
|
+
if self.id is None:
|
|
32
|
+
raise ValueError("Cannot convert to InternalId when id is None.")
|
|
33
|
+
return InternalId(id=self.id)
|
|
29
34
|
|
|
30
35
|
|
|
31
|
-
class ThreeDModelClassicRequest(ThreeDModelRequest):
|
|
36
|
+
class ThreeDModelClassicRequest(ThreeDModelRequest, RequestUpdateable):
|
|
37
|
+
container_fields: ClassVar[frozenset[str]] = frozenset({"metadata"})
|
|
32
38
|
data_set_id: int | None = None
|
|
33
39
|
metadata: dict[str, str] | None = None
|
|
34
40
|
|
|
@@ -91,17 +97,32 @@ class AssetMappingClassicRequest(RequestResource, Identifier):
|
|
|
91
97
|
return f"{self.model_id}_{self.revision_id}_{self.node_id}_{asset_part}"
|
|
92
98
|
|
|
93
99
|
|
|
94
|
-
class
|
|
100
|
+
class AssetMappingClassicResponse(ResponseResource[AssetMappingClassicRequest]):
|
|
95
101
|
node_id: int
|
|
96
102
|
asset_id: int | None = None
|
|
97
103
|
asset_instance_id: NodeReference | None = None
|
|
98
104
|
tree_index: int | None = None
|
|
99
105
|
subtree_size: int | None = None
|
|
100
106
|
# These fields are part of the path request and response, but they are included here for convenience.
|
|
101
|
-
model_id: int = Field(exclude=True)
|
|
102
|
-
revision_id: int = Field(exclude=True)
|
|
107
|
+
model_id: int = Field(-1, exclude=True)
|
|
108
|
+
revision_id: int = Field(-1, exclude=True)
|
|
103
109
|
|
|
104
110
|
def as_request_resource(self) -> AssetMappingClassicRequest:
|
|
105
111
|
return AssetMappingClassicRequest.model_validate(
|
|
106
112
|
{**self.dump(), "modelId": self.model_id, "revisionId": self.revision_id}
|
|
107
113
|
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class AssetMappingDMResponse(ResponseResource[AssetMappingDMRequest]):
|
|
117
|
+
node_id: int
|
|
118
|
+
asset_instance_id: NodeReference
|
|
119
|
+
tree_index: int | None = None
|
|
120
|
+
subtree_size: int | None = None
|
|
121
|
+
# These fields are part of the path request and response, but they are included here for convenience.
|
|
122
|
+
model_id: int = Field(-1, exclude=True)
|
|
123
|
+
revision_id: int = Field(-1, exclude=True)
|
|
124
|
+
|
|
125
|
+
def as_request_resource(self) -> AssetMappingDMRequest:
|
|
126
|
+
return AssetMappingDMRequest.model_validate(
|
|
127
|
+
{**self.dump(), "modelId": self.model_id, "revisionId": self.revision_id}
|
|
128
|
+
)
|
|
@@ -65,7 +65,7 @@ from .api.search import SearchAPI
|
|
|
65
65
|
from .api.security_categories import SecurityCategoriesAPI
|
|
66
66
|
from .api.sequences import SequencesAPI
|
|
67
67
|
from .api.streams import StreamsAPI
|
|
68
|
-
from .api.three_d import ThreeDAPI,
|
|
68
|
+
from .api.three_d import ThreeDAPI, ThreeDClassicModelsAPI
|
|
69
69
|
from .api.timeseries import TimeSeriesAPI
|
|
70
70
|
from .api.token import TokenAPI
|
|
71
71
|
from .api.transformations import TransformationsAPI
|
|
@@ -151,7 +151,7 @@ class ToolkitClientMock(CogniteClientMock):
|
|
|
151
151
|
|
|
152
152
|
self.tool = MagicMock(spec=ToolAPI)
|
|
153
153
|
self.tool.three_d = MagicMock(spec=ThreeDAPI)
|
|
154
|
-
self.tool.three_d.
|
|
154
|
+
self.tool.three_d.models_classic = MagicMock(spec_set=ThreeDClassicModelsAPI)
|
|
155
155
|
self.tool.assets = MagicMock(spec_set=AssetsAPI)
|
|
156
156
|
self.tool.timeseries = MagicMock(spec_set=TimeSeriesAPI)
|
|
157
157
|
self.tool.filemetadata = MagicMock(spec_set=FileMetadataAPI)
|
|
@@ -30,8 +30,8 @@ from cognite_toolkit._cdf_tk.client.resource_classes.legacy.canvas import (
|
|
|
30
30
|
from cognite_toolkit._cdf_tk.client.resource_classes.legacy.charts import Chart, ChartWrite
|
|
31
31
|
from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import ResourceViewMappingApply
|
|
32
32
|
from cognite_toolkit._cdf_tk.client.resource_classes.three_d import (
|
|
33
|
+
AssetMappingClassicResponse,
|
|
33
34
|
AssetMappingDMRequest,
|
|
34
|
-
AssetMappingResponse,
|
|
35
35
|
RevisionStatus,
|
|
36
36
|
ThreeDModelResponse,
|
|
37
37
|
)
|
|
@@ -477,12 +477,12 @@ class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrati
|
|
|
477
477
|
return None
|
|
478
478
|
|
|
479
479
|
|
|
480
|
-
class ThreeDAssetMapper(DataMapper[ThreeDSelector,
|
|
480
|
+
class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingClassicResponse, AssetMappingDMRequest]):
|
|
481
481
|
def __init__(self, client: ToolkitClient) -> None:
|
|
482
482
|
self.client = client
|
|
483
483
|
|
|
484
484
|
def map(
|
|
485
|
-
self, source: Sequence[
|
|
485
|
+
self, source: Sequence[AssetMappingClassicResponse]
|
|
486
486
|
) -> Sequence[tuple[AssetMappingDMRequest | None, MigrationIssue]]:
|
|
487
487
|
output: list[tuple[AssetMappingDMRequest | None, MigrationIssue]] = []
|
|
488
488
|
self._populate_cache(source)
|
|
@@ -491,7 +491,7 @@ class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingResponse, AssetMa
|
|
|
491
491
|
output.append((mapped_item, issue))
|
|
492
492
|
return output
|
|
493
493
|
|
|
494
|
-
def _populate_cache(self, source: Sequence[
|
|
494
|
+
def _populate_cache(self, source: Sequence[AssetMappingClassicResponse]) -> None:
|
|
495
495
|
asset_ids: set[int] = set()
|
|
496
496
|
for mapping in source:
|
|
497
497
|
if mapping.asset_id is not None:
|
|
@@ -499,7 +499,7 @@ class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingResponse, AssetMa
|
|
|
499
499
|
self.client.migration.lookup.assets(list(asset_ids))
|
|
500
500
|
|
|
501
501
|
def _map_single_item(
|
|
502
|
-
self, item:
|
|
502
|
+
self, item: AssetMappingClassicResponse
|
|
503
503
|
) -> tuple[AssetMappingDMRequest | None, ThreeDModelMigrationIssue]:
|
|
504
504
|
issue = ThreeDModelMigrationIssue(model_name=f"AssetMapping_{item.model_id}", model_id=item.model_id)
|
|
505
505
|
asset_instance_id = item.asset_instance_id
|
|
@@ -16,8 +16,8 @@ from cognite_toolkit._cdf_tk.client.http_client import (
|
|
|
16
16
|
)
|
|
17
17
|
from cognite_toolkit._cdf_tk.client.resource_classes.legacy.pending_instances_ids import PendingInstanceId
|
|
18
18
|
from cognite_toolkit._cdf_tk.client.resource_classes.three_d import (
|
|
19
|
+
AssetMappingClassicResponse,
|
|
19
20
|
AssetMappingDMRequest,
|
|
20
|
-
AssetMappingResponse,
|
|
21
21
|
ThreeDModelResponse,
|
|
22
22
|
)
|
|
23
23
|
from cognite_toolkit._cdf_tk.commands._migrate.data_classes import ThreeDMigrationRequest
|
|
@@ -415,7 +415,7 @@ class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse,
|
|
|
415
415
|
total = 0
|
|
416
416
|
while True:
|
|
417
417
|
request_limit = min(self.DOWNLOAD_LIMIT, limit - total) if limit is not None else self.DOWNLOAD_LIMIT
|
|
418
|
-
response = self.client.tool.three_d.
|
|
418
|
+
response = self.client.tool.three_d.models_classic.paginate(
|
|
419
419
|
published=published, include_revision_info=True, limit=request_limit, cursor=cursor
|
|
420
420
|
)
|
|
421
421
|
items = [item for item in response.items if self._is_selected(item, included_models)]
|
|
@@ -477,7 +477,9 @@ class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse,
|
|
|
477
477
|
return results
|
|
478
478
|
|
|
479
479
|
|
|
480
|
-
class ThreeDAssetMappingMigrationIO(
|
|
480
|
+
class ThreeDAssetMappingMigrationIO(
|
|
481
|
+
UploadableStorageIO[ThreeDSelector, AssetMappingClassicResponse, AssetMappingDMRequest]
|
|
482
|
+
):
|
|
481
483
|
KIND = "3DMigrationAssetMapping"
|
|
482
484
|
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".ndjson"})
|
|
483
485
|
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
@@ -493,10 +495,12 @@ class ThreeDAssetMappingMigrationIO(UploadableStorageIO[ThreeDSelector, AssetMap
|
|
|
493
495
|
# We can only migrate asset mappings for 3D models that are already migrated to data modeling.
|
|
494
496
|
self._3D_io = ThreeDMigrationIO(client, data_model_type="data modeling")
|
|
495
497
|
|
|
496
|
-
def as_id(self, item:
|
|
498
|
+
def as_id(self, item: AssetMappingClassicResponse) -> str:
|
|
497
499
|
return f"AssetMapping_{item.model_id!s}_{item.revision_id!s}_{item.asset_id!s}"
|
|
498
500
|
|
|
499
|
-
def stream_data(
|
|
501
|
+
def stream_data(
|
|
502
|
+
self, selector: ThreeDSelector, limit: int | None = None
|
|
503
|
+
) -> Iterable[Page[AssetMappingClassicResponse]]:
|
|
500
504
|
total = 0
|
|
501
505
|
for three_d_page in self._3D_io.stream_data(selector, None):
|
|
502
506
|
for model in three_d_page.items:
|
|
@@ -510,7 +514,7 @@ class ThreeDAssetMappingMigrationIO(UploadableStorageIO[ThreeDSelector, AssetMap
|
|
|
510
514
|
)
|
|
511
515
|
if limit is not None and total >= limit:
|
|
512
516
|
return
|
|
513
|
-
response = self.client.tool.three_d.
|
|
517
|
+
response = self.client.tool.three_d.asset_mappings_classic.paginate(
|
|
514
518
|
model_id=model.id,
|
|
515
519
|
revision_id=model.last_revision_info.revision_id,
|
|
516
520
|
cursor=cursor,
|
|
@@ -562,6 +566,6 @@ class ThreeDAssetMappingMigrationIO(UploadableStorageIO[ThreeDSelector, AssetMap
|
|
|
562
566
|
raise NotImplementedError("Deserializing 3D Asset Mappings from JSON is not supported.")
|
|
563
567
|
|
|
564
568
|
def data_to_json_chunk(
|
|
565
|
-
self, data_chunk: Sequence[
|
|
569
|
+
self, data_chunk: Sequence[AssetMappingClassicResponse], selector: ThreeDSelector | None = None
|
|
566
570
|
) -> list[dict[str, JsonVal]]:
|
|
567
571
|
raise NotImplementedError("Serializing 3D Asset Mappings to JSON is not supported.")
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from cognite_toolkit._cdf_tk.constants import EXCL_FILES, MODULES
|
|
4
|
+
from cognite_toolkit._cdf_tk.cruds import CRUDS_BY_FOLDER_NAME, CRUDS_BY_FOLDER_NAME_INCLUDE_ALPHA
|
|
5
|
+
from cognite_toolkit._cdf_tk.data_classes import IssueList
|
|
6
|
+
from cognite_toolkit._cdf_tk.data_classes._issues import ModuleLoadingIssue
|
|
7
|
+
from cognite_toolkit._cdf_tk.exceptions import ToolkitError
|
|
8
|
+
from cognite_toolkit._cdf_tk.utils import humanize_collection, module_path_display_name
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ModulesParser:
|
|
12
|
+
def __init__(self, organization_dir: Path, selected: list[str | Path] | None = None):
|
|
13
|
+
self.organization_dir = organization_dir
|
|
14
|
+
self.selected = selected
|
|
15
|
+
|
|
16
|
+
def parse(self) -> tuple[list[Path], IssueList]:
|
|
17
|
+
modules_root = self.organization_dir / MODULES
|
|
18
|
+
if not modules_root.exists():
|
|
19
|
+
raise ToolkitError(f"Module root directory '{modules_root}' not found")
|
|
20
|
+
|
|
21
|
+
module_paths: list[Path] = []
|
|
22
|
+
excluded_module_paths: list[Path] = []
|
|
23
|
+
issues: IssueList = IssueList()
|
|
24
|
+
for resource_file in self.organization_dir.glob("**/*.y*ml"):
|
|
25
|
+
if resource_file.name in EXCL_FILES:
|
|
26
|
+
continue
|
|
27
|
+
|
|
28
|
+
# Get the module folder for the resource file.
|
|
29
|
+
module_path = self._get_module_path_from_resource_file_path(resource_file)
|
|
30
|
+
if not module_path:
|
|
31
|
+
continue
|
|
32
|
+
|
|
33
|
+
# If the module has already been processed, skip it.
|
|
34
|
+
if module_path in module_paths or module_path in excluded_module_paths:
|
|
35
|
+
continue
|
|
36
|
+
|
|
37
|
+
# Skip the modules that do not match the selection
|
|
38
|
+
if not self._matches_selection(module_path, modules_root, self.selected):
|
|
39
|
+
excluded_module_paths.append(module_path)
|
|
40
|
+
continue
|
|
41
|
+
|
|
42
|
+
module_paths.append(module_path)
|
|
43
|
+
|
|
44
|
+
deepest_module_paths = self._find_modules_with_submodules(module_paths)
|
|
45
|
+
parent_module_paths = set(module_paths) - set(deepest_module_paths)
|
|
46
|
+
if parent_module_paths:
|
|
47
|
+
module_paths = deepest_module_paths
|
|
48
|
+
issues.extend(
|
|
49
|
+
ModuleLoadingIssue(
|
|
50
|
+
message=f"Module {module_path_display_name(self.organization_dir, parent_module_path)!r} is skipped because it has submodules"
|
|
51
|
+
)
|
|
52
|
+
for parent_module_path in parent_module_paths
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
valid_module_paths: list[Path] = []
|
|
56
|
+
for module_path in module_paths:
|
|
57
|
+
valid_module_path, issue = self._check_resource_folder_content(module_path)
|
|
58
|
+
if issue:
|
|
59
|
+
issues.append(issue)
|
|
60
|
+
if valid_module_path:
|
|
61
|
+
valid_module_paths.append(valid_module_path)
|
|
62
|
+
|
|
63
|
+
return valid_module_paths, issues
|
|
64
|
+
|
|
65
|
+
def _get_module_path_from_resource_file_path(self, resource_file: Path) -> Path | None:
|
|
66
|
+
# recognize the module by containing a resource associated by a CRUD.
|
|
67
|
+
# Special case: if the resource folder is a subfolder of a CRUD, return the parent of the subfolder.
|
|
68
|
+
resource_folder = resource_file.parent
|
|
69
|
+
crud = next(iter(CRUDS_BY_FOLDER_NAME_INCLUDE_ALPHA.get(resource_folder.name, [])), None)
|
|
70
|
+
if crud:
|
|
71
|
+
# iterate over the parents of the resource folder until we find the module folder.
|
|
72
|
+
# This is to handle the special case of a subfolder of a CRUD, or yamls in for example function subfolders.
|
|
73
|
+
for p in resource_file.parents:
|
|
74
|
+
if p.name == crud.folder_name:
|
|
75
|
+
return p.parent
|
|
76
|
+
if p.name == MODULES:
|
|
77
|
+
return p
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
def _check_resource_folder_content(self, module_path: Path) -> tuple[None | Path, ModuleLoadingIssue | None]:
|
|
81
|
+
resource_folder_names = {d.name for d in module_path.iterdir() if d.is_dir()}
|
|
82
|
+
unrecognized_resource_folder_names = resource_folder_names - CRUDS_BY_FOLDER_NAME.keys()
|
|
83
|
+
|
|
84
|
+
issue = (
|
|
85
|
+
ModuleLoadingIssue(
|
|
86
|
+
message=f"Module {module_path_display_name(self.organization_dir, module_path)!r} contains unrecognized resource folder(s): {humanize_collection(unrecognized_resource_folder_names)}"
|
|
87
|
+
)
|
|
88
|
+
if unrecognized_resource_folder_names
|
|
89
|
+
else None
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
has_valid_resource_folders = bool(resource_folder_names & CRUDS_BY_FOLDER_NAME.keys())
|
|
93
|
+
return (module_path if has_valid_resource_folders else None, issue)
|
|
94
|
+
|
|
95
|
+
def _matches_selection(self, module_path: Path, modules_root: Path, selected: list[str | Path] | None) -> bool:
|
|
96
|
+
if not selected:
|
|
97
|
+
return True
|
|
98
|
+
|
|
99
|
+
rel = module_path.relative_to(modules_root)
|
|
100
|
+
rel_parts = [p.lower() for p in rel.parts]
|
|
101
|
+
if not rel_parts:
|
|
102
|
+
# module_path is the modules_root itself
|
|
103
|
+
return False
|
|
104
|
+
name_lower = rel_parts[-1]
|
|
105
|
+
modules_lower = MODULES.lower()
|
|
106
|
+
|
|
107
|
+
for sel in selected:
|
|
108
|
+
sel_path = Path(sel) if isinstance(sel, str) else sel
|
|
109
|
+
|
|
110
|
+
sel_parts = [p.lower() for p in sel_path.parts]
|
|
111
|
+
if not sel_parts:
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
if sel_parts[0] == modules_lower:
|
|
115
|
+
sel_parts = sel_parts[1:]
|
|
116
|
+
|
|
117
|
+
if not sel_parts:
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
if len(sel_parts) == 1 and name_lower == sel_parts[0]:
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
if rel_parts[: len(sel_parts)] == sel_parts:
|
|
124
|
+
return True
|
|
125
|
+
|
|
126
|
+
return False
|
|
127
|
+
|
|
128
|
+
def _find_modules_with_submodules(self, module_paths: list[Path]) -> list[Path]:
|
|
129
|
+
"""Remove parent modules when they have submodules. Keep only the deepest modules."""
|
|
130
|
+
return [
|
|
131
|
+
module_path
|
|
132
|
+
for module_path in module_paths
|
|
133
|
+
if not any(
|
|
134
|
+
module_path in other_module_path.parents
|
|
135
|
+
for other_module_path in module_paths
|
|
136
|
+
if other_module_path != module_path
|
|
137
|
+
)
|
|
138
|
+
]
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from cognite_toolkit._cdf_tk.constants import EXCL_FILES, MODULES
|
|
4
|
+
from cognite_toolkit._cdf_tk.cruds import CRUDS_BY_FOLDER_NAME, CRUDS_BY_FOLDER_NAME_INCLUDE_ALPHA
|
|
5
|
+
from cognite_toolkit._cdf_tk.data_classes import IssueList
|
|
6
|
+
from cognite_toolkit._cdf_tk.data_classes._issues import ModuleLoadingIssue
|
|
7
|
+
from cognite_toolkit._cdf_tk.exceptions import ToolkitError
|
|
8
|
+
from cognite_toolkit._cdf_tk.utils import module_path_display_name
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ModulesParser:
|
|
12
|
+
def __init__(self, organization_dir: Path, selected: list[str | Path] | None = None):
|
|
13
|
+
self.organization_dir = organization_dir
|
|
14
|
+
self.selected = selected
|
|
15
|
+
self.issues = IssueList()
|
|
16
|
+
|
|
17
|
+
def parse(self) -> list[Path]:
|
|
18
|
+
modules_root = self.organization_dir / MODULES
|
|
19
|
+
if not modules_root.exists():
|
|
20
|
+
raise ToolkitError(f"Module root directory '{modules_root.as_posix()}' not found")
|
|
21
|
+
|
|
22
|
+
module_paths: list[Path] = []
|
|
23
|
+
for resource_file in self.organization_dir.glob("**/*.y*ml"):
|
|
24
|
+
if resource_file.name in EXCL_FILES:
|
|
25
|
+
continue
|
|
26
|
+
|
|
27
|
+
# Get the module folder for the resource file.
|
|
28
|
+
module_path = self._get_module_path_from_resource_file_path(resource_file)
|
|
29
|
+
if not module_path:
|
|
30
|
+
continue
|
|
31
|
+
|
|
32
|
+
# If the module has already been processed, skip it.
|
|
33
|
+
if module_path in module_paths:
|
|
34
|
+
continue
|
|
35
|
+
|
|
36
|
+
module_paths.append(module_path)
|
|
37
|
+
|
|
38
|
+
deepest_module_paths = self._find_modules_with_submodules(module_paths)
|
|
39
|
+
parent_module_paths = set(module_paths) - set(deepest_module_paths)
|
|
40
|
+
if parent_module_paths:
|
|
41
|
+
module_paths = deepest_module_paths
|
|
42
|
+
self.issues.extend(
|
|
43
|
+
ModuleLoadingIssue(
|
|
44
|
+
message=f"Module {module_path_display_name(self.organization_dir, parent_module_path)!r} is skipped because it has submodules",
|
|
45
|
+
)
|
|
46
|
+
for parent_module_path in parent_module_paths
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
all_module_paths = module_paths
|
|
50
|
+
|
|
51
|
+
if self.selected:
|
|
52
|
+
normalized_selected = self._normalize_selection(self.selected)
|
|
53
|
+
selected_module_paths = [
|
|
54
|
+
module_path
|
|
55
|
+
for module_path in all_module_paths
|
|
56
|
+
if self._matches_selection(module_path, modules_root, normalized_selected)
|
|
57
|
+
]
|
|
58
|
+
for selected_module in self.selected:
|
|
59
|
+
normalized_selected_item = self._normalize_selection([selected_module])
|
|
60
|
+
has_match = any(
|
|
61
|
+
self._matches_selection(found_module, modules_root, normalized_selected_item)
|
|
62
|
+
for found_module in all_module_paths
|
|
63
|
+
)
|
|
64
|
+
if not has_match:
|
|
65
|
+
self.issues.append(
|
|
66
|
+
ModuleLoadingIssue(
|
|
67
|
+
message=f"Module '{selected_module}' not found",
|
|
68
|
+
)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
module_paths = selected_module_paths
|
|
72
|
+
|
|
73
|
+
valid_module_paths: list[Path] = []
|
|
74
|
+
for module_path in module_paths:
|
|
75
|
+
valid_module_path, issue = self._check_resource_folder_content(module_path)
|
|
76
|
+
if issue:
|
|
77
|
+
self.issues.append(issue)
|
|
78
|
+
if valid_module_path:
|
|
79
|
+
valid_module_paths.append(valid_module_path)
|
|
80
|
+
|
|
81
|
+
return valid_module_paths
|
|
82
|
+
|
|
83
|
+
def _get_module_path_from_resource_file_path(self, resource_file: Path) -> Path | None:
|
|
84
|
+
# recognize the module by traversing the parents of the resource file until we find a CRUD folder
|
|
85
|
+
|
|
86
|
+
for parent in resource_file.parents:
|
|
87
|
+
if parent.name in CRUDS_BY_FOLDER_NAME_INCLUDE_ALPHA:
|
|
88
|
+
# special case: if the crud is FunctionCRUD, the resource file has to be a direct descendent.
|
|
89
|
+
if parent.name == "functions" and resource_file.parent.name != "functions":
|
|
90
|
+
return None
|
|
91
|
+
return parent.parent
|
|
92
|
+
if parent.name == MODULES:
|
|
93
|
+
return parent
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
def _check_resource_folder_content(self, module_path: Path) -> tuple[None | Path, ModuleLoadingIssue | None]:
|
|
97
|
+
resource_folder_names = {d.name for d in module_path.iterdir() if d.is_dir()}
|
|
98
|
+
unrecognized_resource_folder_names = resource_folder_names - CRUDS_BY_FOLDER_NAME.keys()
|
|
99
|
+
|
|
100
|
+
issue = (
|
|
101
|
+
ModuleLoadingIssue(
|
|
102
|
+
message=f"Module {module_path_display_name(self.organization_dir, module_path)!r} contains unrecognized resource folder(s): {', '.join(unrecognized_resource_folder_names)}"
|
|
103
|
+
)
|
|
104
|
+
if unrecognized_resource_folder_names
|
|
105
|
+
else None
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
has_valid_resource_folders = bool(resource_folder_names & CRUDS_BY_FOLDER_NAME.keys())
|
|
109
|
+
return (module_path if has_valid_resource_folders else None, issue)
|
|
110
|
+
|
|
111
|
+
def _matches_selection(
|
|
112
|
+
self, module_path: Path, modules_root: Path, normalized_selected: list[tuple[str, ...]] | None
|
|
113
|
+
) -> bool:
|
|
114
|
+
if not normalized_selected:
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
rel = module_path.relative_to(modules_root)
|
|
118
|
+
rel_parts = tuple(p.lower() for p in rel.parts)
|
|
119
|
+
if not rel_parts:
|
|
120
|
+
# module_path is the modules_root itself
|
|
121
|
+
return False
|
|
122
|
+
name_lower = rel_parts[-1]
|
|
123
|
+
|
|
124
|
+
for sel_parts in normalized_selected:
|
|
125
|
+
if not sel_parts:
|
|
126
|
+
return True
|
|
127
|
+
|
|
128
|
+
if len(sel_parts) == 1 and name_lower == sel_parts[0]:
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
if rel_parts[: len(sel_parts)] == sel_parts:
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
def _normalize_selection(self, selected: list[str | Path]) -> list[tuple[str, ...]]:
|
|
137
|
+
normalized: list[tuple[str, ...]] = []
|
|
138
|
+
modules_lower = MODULES.lower()
|
|
139
|
+
for sel in selected:
|
|
140
|
+
if isinstance(sel, Path):
|
|
141
|
+
sel_parts = sel.parts
|
|
142
|
+
else:
|
|
143
|
+
sel_parts = tuple(part for part in str(sel).replace("\\", "/").split("/") if part)
|
|
144
|
+
|
|
145
|
+
sel_parts_lower = tuple(part.lower() for part in sel_parts)
|
|
146
|
+
if sel_parts_lower and sel_parts_lower[0] == modules_lower:
|
|
147
|
+
sel_parts_lower = sel_parts_lower[1:]
|
|
148
|
+
|
|
149
|
+
normalized.append(sel_parts_lower)
|
|
150
|
+
|
|
151
|
+
return normalized
|
|
152
|
+
|
|
153
|
+
def _find_modules_with_submodules(self, module_paths: list[Path]) -> list[Path]:
|
|
154
|
+
"""Remove parent modules when they have submodules. Keep only the deepest modules."""
|
|
155
|
+
return [
|
|
156
|
+
module_path
|
|
157
|
+
for module_path in module_paths
|
|
158
|
+
if not any(
|
|
159
|
+
module_path in other_module_path.parents
|
|
160
|
+
for other_module_path in module_paths
|
|
161
|
+
if other_module_path != module_path
|
|
162
|
+
)
|
|
163
|
+
]
|