cognite-toolkit 0.7.47__py3-none-any.whl → 0.7.49__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py +6 -6
- cognite_toolkit/_cdf_tk/client/_toolkit_client.py +6 -4
- cognite_toolkit/_cdf_tk/client/api/instances.py +139 -0
- cognite_toolkit/_cdf_tk/client/api/location_filters.py +177 -0
- cognite_toolkit/_cdf_tk/client/api/raw.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/robotics.py +19 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_capabilities.py +127 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_data_postprocessing.py +138 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_frames.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_locations.py +127 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_maps.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/robotics_robots.py +122 -0
- cognite_toolkit/_cdf_tk/client/api/search_config.py +101 -0
- cognite_toolkit/_cdf_tk/client/api/streams.py +63 -55
- cognite_toolkit/_cdf_tk/client/api/three_d.py +293 -277
- cognite_toolkit/_cdf_tk/client/cdf_client/api.py +34 -5
- cognite_toolkit/_cdf_tk/client/http_client/_client.py +5 -2
- cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +4 -3
- cognite_toolkit/_cdf_tk/client/request_classes/filters.py +45 -1
- cognite_toolkit/_cdf_tk/client/resource_classes/apm_config.py +128 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/cognite_file.py +53 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/__init__.py +4 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_instance.py +22 -11
- cognite_toolkit/_cdf_tk/client/resource_classes/identifiers.py +7 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/location_filter.py +9 -2
- cognite_toolkit/_cdf_tk/client/resource_classes/resource_view_mapping.py +38 -0
- cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_map.py +6 -1
- cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_robot.py +10 -5
- cognite_toolkit/_cdf_tk/client/resource_classes/streams.py +1 -20
- cognite_toolkit/_cdf_tk/client/resource_classes/three_d.py +30 -9
- cognite_toolkit/_cdf_tk/client/testing.py +2 -2
- cognite_toolkit/_cdf_tk/commands/_migrate/command.py +103 -108
- cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +6 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +119 -41
- cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +21 -38
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +14 -12
- cognite_toolkit/_cdf_tk/commands/build_v2/_module_parser.py +138 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/_modules_parser.py +163 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_cmd.py +83 -96
- cognite_toolkit/_cdf_tk/commands/build_v2/{build_input.py → build_parameters.py} +8 -22
- cognite_toolkit/_cdf_tk/commands/build_v2/data_classes/_modules.py +27 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/data_classes/_resource.py +22 -0
- cognite_toolkit/_cdf_tk/cruds/__init__.py +11 -5
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/streams.py +14 -30
- cognite_toolkit/_cdf_tk/data_classes/__init__.py +3 -0
- cognite_toolkit/_cdf_tk/data_classes/_issues.py +36 -0
- cognite_toolkit/_cdf_tk/data_classes/_module_directories.py +2 -1
- cognite_toolkit/_cdf_tk/storageio/_base.py +2 -0
- cognite_toolkit/_cdf_tk/storageio/logger.py +162 -0
- cognite_toolkit/_cdf_tk/utils/__init__.py +8 -1
- cognite_toolkit/_cdf_tk/utils/interactive_select.py +3 -1
- cognite_toolkit/_cdf_tk/utils/modules.py +7 -0
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.49.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.49.dist-info}/RECORD +61 -43
- cognite_toolkit/_cdf_tk/commands/build_v2/build_issues.py +0 -27
- /cognite_toolkit/_cdf_tk/client/resource_classes/{search_config_resource.py → search_config.py} +0 -0
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.49.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.7.47.dist-info → cognite_toolkit-0.7.49.dist-info}/entry_points.txt +0 -0
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import sys
|
|
2
|
-
from typing import Literal
|
|
2
|
+
from typing import ClassVar, Literal
|
|
3
3
|
|
|
4
4
|
from pydantic import Field
|
|
5
5
|
|
|
6
|
-
from .base import BaseModelObject, Identifier, RequestResource, ResponseResource
|
|
7
|
-
from .identifiers import
|
|
6
|
+
from .base import BaseModelObject, Identifier, RequestResource, RequestUpdateable, ResponseResource
|
|
7
|
+
from .identifiers import InternalId
|
|
8
8
|
from .instance_api import NodeReference
|
|
9
9
|
|
|
10
10
|
if sys.version_info >= (3, 11):
|
|
@@ -23,12 +23,18 @@ class RevisionStatus(BaseModelObject):
|
|
|
23
23
|
|
|
24
24
|
class ThreeDModelRequest(RequestResource):
|
|
25
25
|
name: str
|
|
26
|
+
# This field is part of the path request and not the body schema.
|
|
27
|
+
# but is needed for identifier conversion.
|
|
28
|
+
id: int | None = Field(None, exclude=True)
|
|
26
29
|
|
|
27
|
-
def as_id(self) ->
|
|
28
|
-
|
|
30
|
+
def as_id(self) -> InternalId:
|
|
31
|
+
if self.id is None:
|
|
32
|
+
raise ValueError("Cannot convert to InternalId when id is None.")
|
|
33
|
+
return InternalId(id=self.id)
|
|
29
34
|
|
|
30
35
|
|
|
31
|
-
class ThreeDModelClassicRequest(ThreeDModelRequest):
|
|
36
|
+
class ThreeDModelClassicRequest(ThreeDModelRequest, RequestUpdateable):
|
|
37
|
+
container_fields: ClassVar[frozenset[str]] = frozenset({"metadata"})
|
|
32
38
|
data_set_id: int | None = None
|
|
33
39
|
metadata: dict[str, str] | None = None
|
|
34
40
|
|
|
@@ -91,17 +97,32 @@ class AssetMappingClassicRequest(RequestResource, Identifier):
|
|
|
91
97
|
return f"{self.model_id}_{self.revision_id}_{self.node_id}_{asset_part}"
|
|
92
98
|
|
|
93
99
|
|
|
94
|
-
class
|
|
100
|
+
class AssetMappingClassicResponse(ResponseResource[AssetMappingClassicRequest]):
|
|
95
101
|
node_id: int
|
|
96
102
|
asset_id: int | None = None
|
|
97
103
|
asset_instance_id: NodeReference | None = None
|
|
98
104
|
tree_index: int | None = None
|
|
99
105
|
subtree_size: int | None = None
|
|
100
106
|
# These fields are part of the path request and response, but they are included here for convenience.
|
|
101
|
-
model_id: int = Field(exclude=True)
|
|
102
|
-
revision_id: int = Field(exclude=True)
|
|
107
|
+
model_id: int = Field(-1, exclude=True)
|
|
108
|
+
revision_id: int = Field(-1, exclude=True)
|
|
103
109
|
|
|
104
110
|
def as_request_resource(self) -> AssetMappingClassicRequest:
|
|
105
111
|
return AssetMappingClassicRequest.model_validate(
|
|
106
112
|
{**self.dump(), "modelId": self.model_id, "revisionId": self.revision_id}
|
|
107
113
|
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class AssetMappingDMResponse(ResponseResource[AssetMappingDMRequest]):
|
|
117
|
+
node_id: int
|
|
118
|
+
asset_instance_id: NodeReference
|
|
119
|
+
tree_index: int | None = None
|
|
120
|
+
subtree_size: int | None = None
|
|
121
|
+
# These fields are part of the path request and response, but they are included here for convenience.
|
|
122
|
+
model_id: int = Field(-1, exclude=True)
|
|
123
|
+
revision_id: int = Field(-1, exclude=True)
|
|
124
|
+
|
|
125
|
+
def as_request_resource(self) -> AssetMappingDMRequest:
|
|
126
|
+
return AssetMappingDMRequest.model_validate(
|
|
127
|
+
{**self.dump(), "modelId": self.model_id, "revisionId": self.revision_id}
|
|
128
|
+
)
|
|
@@ -65,7 +65,7 @@ from .api.search import SearchAPI
|
|
|
65
65
|
from .api.security_categories import SecurityCategoriesAPI
|
|
66
66
|
from .api.sequences import SequencesAPI
|
|
67
67
|
from .api.streams import StreamsAPI
|
|
68
|
-
from .api.three_d import ThreeDAPI,
|
|
68
|
+
from .api.three_d import ThreeDAPI, ThreeDClassicModelsAPI
|
|
69
69
|
from .api.timeseries import TimeSeriesAPI
|
|
70
70
|
from .api.token import TokenAPI
|
|
71
71
|
from .api.transformations import TransformationsAPI
|
|
@@ -151,7 +151,7 @@ class ToolkitClientMock(CogniteClientMock):
|
|
|
151
151
|
|
|
152
152
|
self.tool = MagicMock(spec=ToolAPI)
|
|
153
153
|
self.tool.three_d = MagicMock(spec=ThreeDAPI)
|
|
154
|
-
self.tool.three_d.
|
|
154
|
+
self.tool.three_d.models_classic = MagicMock(spec_set=ThreeDClassicModelsAPI)
|
|
155
155
|
self.tool.assets = MagicMock(spec_set=AssetsAPI)
|
|
156
156
|
self.tool.timeseries = MagicMock(spec_set=TimeSeriesAPI)
|
|
157
157
|
self.tool.filemetadata = MagicMock(spec_set=FileMetadataAPI)
|
|
@@ -1,13 +1,19 @@
|
|
|
1
|
-
from collections.abc import Callable,
|
|
2
|
-
from
|
|
1
|
+
from collections.abc import Callable, Sequence
|
|
2
|
+
from dataclasses import dataclass
|
|
3
3
|
from pathlib import Path
|
|
4
|
+
from typing import get_args
|
|
4
5
|
|
|
5
6
|
from rich import print
|
|
6
7
|
from rich.console import Console
|
|
7
8
|
from rich.table import Table
|
|
8
9
|
|
|
9
10
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
10
|
-
from cognite_toolkit._cdf_tk.client.http_client import
|
|
11
|
+
from cognite_toolkit._cdf_tk.client.http_client import (
|
|
12
|
+
FailedRequestItems,
|
|
13
|
+
FailedResponseItems,
|
|
14
|
+
HTTPClient,
|
|
15
|
+
SuccessResponseItems,
|
|
16
|
+
)
|
|
11
17
|
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
12
18
|
from cognite_toolkit._cdf_tk.commands._migrate.creators import MigrationCreator
|
|
13
19
|
from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import DataMapper
|
|
@@ -22,25 +28,30 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
22
28
|
)
|
|
23
29
|
from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
|
|
24
30
|
from cognite_toolkit._cdf_tk.storageio import T_Selector, UploadableStorageIO, UploadItem
|
|
31
|
+
from cognite_toolkit._cdf_tk.storageio.logger import FileDataLogger, OperationStatus
|
|
25
32
|
from cognite_toolkit._cdf_tk.utils import humanize_collection, safe_write, sanitize_filename
|
|
26
33
|
from cognite_toolkit._cdf_tk.utils.file import yaml_safe_dump
|
|
27
|
-
from cognite_toolkit._cdf_tk.utils.fileio import
|
|
34
|
+
from cognite_toolkit._cdf_tk.utils.fileio import NDJsonWriter, Uncompressed
|
|
28
35
|
from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
|
|
29
|
-
from cognite_toolkit._cdf_tk.utils.progress_tracker import AVAILABLE_STATUS, ProgressTracker, Status
|
|
30
36
|
|
|
31
37
|
from .data_model import INSTANCE_SOURCE_VIEW_ID, MODEL_ID, RESOURCE_VIEW_MAPPING_VIEW_ID
|
|
38
|
+
from .issues import WriteIssue
|
|
32
39
|
|
|
33
40
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
41
|
+
@dataclass
|
|
42
|
+
class OperationIssue:
|
|
43
|
+
message: str
|
|
44
|
+
count: int
|
|
45
|
+
|
|
39
46
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
47
|
+
@dataclass
|
|
48
|
+
class MigrationStatusResult:
|
|
49
|
+
status: OperationStatus
|
|
50
|
+
issues: list[OperationIssue]
|
|
51
|
+
count: int
|
|
43
52
|
|
|
53
|
+
|
|
54
|
+
class MigrationCommand(ToolkitCommand):
|
|
44
55
|
def migrate(
|
|
45
56
|
self,
|
|
46
57
|
selected: T_Selector,
|
|
@@ -49,7 +60,7 @@ class MigrationCommand(ToolkitCommand):
|
|
|
49
60
|
log_dir: Path,
|
|
50
61
|
dry_run: bool = False,
|
|
51
62
|
verbose: bool = False,
|
|
52
|
-
) ->
|
|
63
|
+
) -> list[MigrationStatusResult]:
|
|
53
64
|
if log_dir.exists() and any(log_dir.iterdir()):
|
|
54
65
|
raise ToolkitFileExistsError(
|
|
55
66
|
f"Log directory {log_dir} already exists. Please remove it or choose another directory."
|
|
@@ -65,15 +76,18 @@ class MigrationCommand(ToolkitCommand):
|
|
|
65
76
|
self.validate_available_capacity(data.client, total_items)
|
|
66
77
|
|
|
67
78
|
console = Console()
|
|
68
|
-
tracker = ProgressTracker[str](self.Steps.list())
|
|
69
79
|
with (
|
|
70
80
|
NDJsonWriter(log_dir, kind=f"{selected.kind}MigrationIssues", compression=Uncompressed) as log_file,
|
|
71
81
|
HTTPClient(config=data.client.config) as write_client,
|
|
72
82
|
):
|
|
83
|
+
logger = FileDataLogger(log_file)
|
|
84
|
+
data.logger = logger
|
|
85
|
+
mapper.logger = logger
|
|
86
|
+
|
|
73
87
|
executor = ProducerWorkerExecutor[Sequence[T_ResourceResponse], Sequence[UploadItem[T_ResourceRequest]]](
|
|
74
|
-
download_iterable=
|
|
75
|
-
process=self._convert(mapper, data
|
|
76
|
-
write=self._upload(selected, write_client, data,
|
|
88
|
+
download_iterable=(page.items for page in data.stream_data(selected)),
|
|
89
|
+
process=self._convert(mapper, data),
|
|
90
|
+
write=self._upload(selected, write_client, data, dry_run),
|
|
77
91
|
iteration_count=iteration_count,
|
|
78
92
|
max_queue_size=10,
|
|
79
93
|
download_description=f"Downloading {selected.display_name}",
|
|
@@ -86,91 +100,71 @@ class MigrationCommand(ToolkitCommand):
|
|
|
86
100
|
executor.run()
|
|
87
101
|
total = executor.total_items
|
|
88
102
|
|
|
89
|
-
self.
|
|
90
|
-
|
|
103
|
+
results = self._create_status_summary(logger)
|
|
104
|
+
|
|
105
|
+
self._print_rich_tables(results, console)
|
|
106
|
+
self._print_txt(results, log_dir, f"{selected.kind}Items", console)
|
|
91
107
|
executor.raise_on_error()
|
|
92
108
|
action = "Would migrate" if dry_run else "Migrating"
|
|
93
109
|
console.print(f"{action} {total:,} {selected.display_name} to instances.")
|
|
94
|
-
return tracker
|
|
95
110
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
111
|
+
return results
|
|
112
|
+
|
|
113
|
+
# Todo: Move to the logger module
|
|
114
|
+
@classmethod
|
|
115
|
+
def _create_status_summary(cls, logger: FileDataLogger) -> list[MigrationStatusResult]:
|
|
116
|
+
results: list[MigrationStatusResult] = []
|
|
117
|
+
status_counts = logger.tracker.get_status_counts()
|
|
118
|
+
for status in get_args(OperationStatus):
|
|
119
|
+
issue_counts = logger.tracker.get_issue_counts(status)
|
|
120
|
+
issues = [OperationIssue(message=issue, count=count) for issue, count in issue_counts.items()]
|
|
121
|
+
result = MigrationStatusResult(
|
|
122
|
+
status=status,
|
|
123
|
+
issues=issues,
|
|
124
|
+
count=status_counts.get(status, 0),
|
|
101
125
|
)
|
|
126
|
+
results.append(result)
|
|
127
|
+
return results
|
|
102
128
|
|
|
129
|
+
def _print_rich_tables(self, results: list[MigrationStatusResult], console: Console) -> None:
|
|
103
130
|
table = Table(title="Migration Summary", show_lines=True)
|
|
104
|
-
table.add_column("Status", style="
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
for
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
continue
|
|
111
|
-
row = [status]
|
|
112
|
-
for step in self.Steps:
|
|
113
|
-
row.append(str(results.get((step.value, status), 0)))
|
|
114
|
-
table.add_row(*row)
|
|
115
|
-
|
|
131
|
+
table.add_column("Status", style="bold")
|
|
132
|
+
table.add_column("Count", justify="right", style="bold")
|
|
133
|
+
table.add_column("Issues", style="bold")
|
|
134
|
+
for result in results:
|
|
135
|
+
issues_str = "\n".join(f"{issue.message}: {issue.count}" for issue in result.issues) or ""
|
|
136
|
+
table.add_row(result.status, str(result.count), issues_str)
|
|
116
137
|
console.print(table)
|
|
117
138
|
|
|
118
|
-
def
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
SchemaColumn(name="ID", type="string"),
|
|
135
|
-
*(SchemaColumn(name=step, type="string") for step in cls.Steps.list()),
|
|
136
|
-
]
|
|
137
|
-
|
|
138
|
-
def _download_iterable(
|
|
139
|
-
self,
|
|
140
|
-
selected: T_Selector,
|
|
141
|
-
data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
|
|
142
|
-
tracker: ProgressTracker[str],
|
|
143
|
-
) -> Iterable[Sequence[T_ResourceResponse]]:
|
|
144
|
-
for page in data.stream_data(selected):
|
|
145
|
-
for item in page.items:
|
|
146
|
-
tracker.set_progress(data.as_id(item), self.Steps.DOWNLOAD, "success")
|
|
147
|
-
yield page.items
|
|
139
|
+
def _print_txt(self, results: list[MigrationStatusResult], log_dir: Path, kind: str, console: Console) -> None:
|
|
140
|
+
summary_file = log_dir / f"{kind}_migration_summary.txt"
|
|
141
|
+
with summary_file.open("w", encoding="utf-8") as f:
|
|
142
|
+
f.write("Migration Summary\n")
|
|
143
|
+
f.write("=================\n\n")
|
|
144
|
+
for result in results:
|
|
145
|
+
f.write(f"Status: {result.status}\n")
|
|
146
|
+
f.write(f"Count: {result.count}\n")
|
|
147
|
+
f.write("Issues:\n")
|
|
148
|
+
if result.issues:
|
|
149
|
+
for issue in result.issues:
|
|
150
|
+
f.write(f" - {issue.message}: {issue.count}\n")
|
|
151
|
+
else:
|
|
152
|
+
f.write(" None\n")
|
|
153
|
+
f.write("\n")
|
|
154
|
+
console.print(f"Summary written to {log_dir}")
|
|
148
155
|
|
|
156
|
+
@staticmethod
|
|
149
157
|
def _convert(
|
|
150
|
-
self,
|
|
151
158
|
mapper: DataMapper[T_Selector, T_ResourceResponse, T_ResourceRequest],
|
|
152
159
|
data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
|
|
153
|
-
tracker: ProgressTracker[str],
|
|
154
|
-
log_file: NDJsonWriter,
|
|
155
160
|
) -> Callable[[Sequence[T_ResourceResponse]], Sequence[UploadItem[T_ResourceRequest]]]:
|
|
156
161
|
def track_mapping(source: Sequence[T_ResourceResponse]) -> list[UploadItem[T_ResourceRequest]]:
|
|
157
162
|
mapped = mapper.map(source)
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
result: Status = "failed" if target is None else "success"
|
|
164
|
-
tracker.set_progress(id_, step=self.Steps.CONVERT, status=result)
|
|
165
|
-
|
|
166
|
-
if issue.has_issues:
|
|
167
|
-
# MyPy fails to understand that dict[str, JsonVal] is a Chunk
|
|
168
|
-
issues.append(issue.dump()) # type: ignore[arg-type]
|
|
169
|
-
if target is not None:
|
|
170
|
-
targets.append(UploadItem(source_id=id_, item=target))
|
|
171
|
-
if issues:
|
|
172
|
-
log_file.write_chunks(issues)
|
|
173
|
-
return targets
|
|
163
|
+
return [
|
|
164
|
+
UploadItem(source_id=data.as_id(item), item=target)
|
|
165
|
+
for target, item in zip(mapped, source)
|
|
166
|
+
if target is not None
|
|
167
|
+
]
|
|
174
168
|
|
|
175
169
|
return track_mapping
|
|
176
170
|
|
|
@@ -179,36 +173,37 @@ class MigrationCommand(ToolkitCommand):
|
|
|
179
173
|
selected: T_Selector,
|
|
180
174
|
write_client: HTTPClient,
|
|
181
175
|
target: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
|
|
182
|
-
tracker: ProgressTracker[str],
|
|
183
|
-
log_file: NDJsonWriter,
|
|
184
176
|
dry_run: bool,
|
|
185
177
|
) -> Callable[[Sequence[UploadItem[T_ResourceRequest]]], None]:
|
|
186
178
|
def upload_items(data_item: Sequence[UploadItem[T_ResourceRequest]]) -> None:
|
|
187
179
|
if not data_item:
|
|
188
180
|
return None
|
|
189
|
-
responses: Sequence[HTTPMessage]
|
|
190
181
|
if dry_run:
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
issues: list[Chunk] = []
|
|
182
|
+
target.logger.tracker.finalize_item([item.source_id for item in data_item], "pending")
|
|
183
|
+
return None
|
|
184
|
+
|
|
185
|
+
responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
|
|
186
|
+
|
|
187
|
+
# Todo: Move logging into the UploadableStorageIO class
|
|
188
|
+
issues: list[WriteIssue] = []
|
|
200
189
|
for item in responses:
|
|
201
190
|
if isinstance(item, SuccessResponseItems):
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
191
|
+
target.logger.tracker.finalize_item(item.ids, "success")
|
|
192
|
+
continue
|
|
193
|
+
if isinstance(item, FailedResponseItems):
|
|
194
|
+
error = item.error
|
|
195
|
+
for id_ in item.ids:
|
|
196
|
+
issue = WriteIssue(id=str(id_), status_code=error.code, message=error.message)
|
|
197
|
+
issues.append(issue)
|
|
198
|
+
elif isinstance(item, FailedRequestItems):
|
|
199
|
+
for id_ in item.ids:
|
|
200
|
+
issue = WriteIssue(id=str(id_), status_code=0, message=item.error)
|
|
201
|
+
issues.append(issue)
|
|
202
|
+
|
|
203
|
+
if isinstance(item, FailedResponseItems | FailedRequestItems):
|
|
204
|
+
target.logger.tracker.finalize_item(item.ids, "failure")
|
|
210
205
|
if issues:
|
|
211
|
-
|
|
206
|
+
target.logger.log(issues)
|
|
212
207
|
return None
|
|
213
208
|
|
|
214
209
|
return upload_items
|
|
@@ -17,6 +17,7 @@ from cognite.client.utils._identifier import InstanceId
|
|
|
17
17
|
|
|
18
18
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
19
19
|
from cognite_toolkit._cdf_tk.client.resource_classes.asset import AssetResponse
|
|
20
|
+
from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
|
|
20
21
|
from cognite_toolkit._cdf_tk.client.resource_classes.event import EventResponse
|
|
21
22
|
from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataResponse
|
|
22
23
|
from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import (
|
|
@@ -198,7 +199,11 @@ def asset_centric_to_dm(
|
|
|
198
199
|
data_set_id = dumped.pop("dataSetId", None)
|
|
199
200
|
external_id = dumped.pop("externalId", None)
|
|
200
201
|
|
|
201
|
-
issue = ConversionIssue(
|
|
202
|
+
issue = ConversionIssue(
|
|
203
|
+
id=str(AssetCentricId(resource_type, id_=id_)),
|
|
204
|
+
asset_centric_id=AssetCentricId(resource_type, id_=id_),
|
|
205
|
+
instance_id=NodeReference(space=instance_id.space, external_id=instance_id.external_id),
|
|
206
|
+
)
|
|
202
207
|
|
|
203
208
|
properties = create_properties(
|
|
204
209
|
dumped,
|