cognite-toolkit 0.7.48__py3-none-any.whl → 0.7.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,19 @@
1
- from collections.abc import Callable, Iterable, Sequence
2
- from enum import Enum
1
+ from collections.abc import Callable, Sequence
2
+ from dataclasses import dataclass
3
3
  from pathlib import Path
4
+ from typing import get_args
4
5
 
5
6
  from rich import print
6
7
  from rich.console import Console
7
8
  from rich.table import Table
8
9
 
9
10
  from cognite_toolkit._cdf_tk.client import ToolkitClient
10
- from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, HTTPMessage, ItemMessage, SuccessResponseItems
11
+ from cognite_toolkit._cdf_tk.client.http_client import (
12
+ FailedRequestItems,
13
+ FailedResponseItems,
14
+ HTTPClient,
15
+ SuccessResponseItems,
16
+ )
11
17
  from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
12
18
  from cognite_toolkit._cdf_tk.commands._migrate.creators import MigrationCreator
13
19
  from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import DataMapper
@@ -22,25 +28,30 @@ from cognite_toolkit._cdf_tk.exceptions import (
22
28
  )
23
29
  from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
24
30
  from cognite_toolkit._cdf_tk.storageio import T_Selector, UploadableStorageIO, UploadItem
31
+ from cognite_toolkit._cdf_tk.storageio.logger import FileDataLogger, OperationStatus
25
32
  from cognite_toolkit._cdf_tk.utils import humanize_collection, safe_write, sanitize_filename
26
33
  from cognite_toolkit._cdf_tk.utils.file import yaml_safe_dump
27
- from cognite_toolkit._cdf_tk.utils.fileio import Chunk, CSVWriter, NDJsonWriter, SchemaColumn, Uncompressed
34
+ from cognite_toolkit._cdf_tk.utils.fileio import NDJsonWriter, Uncompressed
28
35
  from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
29
- from cognite_toolkit._cdf_tk.utils.progress_tracker import AVAILABLE_STATUS, ProgressTracker, Status
30
36
 
31
37
  from .data_model import INSTANCE_SOURCE_VIEW_ID, MODEL_ID, RESOURCE_VIEW_MAPPING_VIEW_ID
38
+ from .issues import WriteIssue
32
39
 
33
40
 
34
- class MigrationCommand(ToolkitCommand):
35
- class Steps(str, Enum):
36
- DOWNLOAD = "download"
37
- CONVERT = "convert"
38
- UPLOAD = "upload"
41
+ @dataclass
42
+ class OperationIssue:
43
+ message: str
44
+ count: int
45
+
39
46
 
40
- @classmethod
41
- def list(cls) -> list[str]:
42
- return [step.value for step in cls.__members__.values()]
47
+ @dataclass
48
+ class MigrationStatusResult:
49
+ status: OperationStatus
50
+ issues: list[OperationIssue]
51
+ count: int
43
52
 
53
+
54
+ class MigrationCommand(ToolkitCommand):
44
55
  def migrate(
45
56
  self,
46
57
  selected: T_Selector,
@@ -49,7 +60,7 @@ class MigrationCommand(ToolkitCommand):
49
60
  log_dir: Path,
50
61
  dry_run: bool = False,
51
62
  verbose: bool = False,
52
- ) -> ProgressTracker[str]:
63
+ ) -> list[MigrationStatusResult]:
53
64
  if log_dir.exists() and any(log_dir.iterdir()):
54
65
  raise ToolkitFileExistsError(
55
66
  f"Log directory {log_dir} already exists. Please remove it or choose another directory."
@@ -65,15 +76,18 @@ class MigrationCommand(ToolkitCommand):
65
76
  self.validate_available_capacity(data.client, total_items)
66
77
 
67
78
  console = Console()
68
- tracker = ProgressTracker[str](self.Steps.list())
69
79
  with (
70
80
  NDJsonWriter(log_dir, kind=f"{selected.kind}MigrationIssues", compression=Uncompressed) as log_file,
71
81
  HTTPClient(config=data.client.config) as write_client,
72
82
  ):
83
+ logger = FileDataLogger(log_file)
84
+ data.logger = logger
85
+ mapper.logger = logger
86
+
73
87
  executor = ProducerWorkerExecutor[Sequence[T_ResourceResponse], Sequence[UploadItem[T_ResourceRequest]]](
74
- download_iterable=self._download_iterable(selected, data, tracker),
75
- process=self._convert(mapper, data, tracker, log_file),
76
- write=self._upload(selected, write_client, data, tracker, log_file, dry_run),
88
+ download_iterable=(page.items for page in data.stream_data(selected)),
89
+ process=self._convert(mapper, data),
90
+ write=self._upload(selected, write_client, data, dry_run),
77
91
  iteration_count=iteration_count,
78
92
  max_queue_size=10,
79
93
  download_description=f"Downloading {selected.display_name}",
@@ -86,91 +100,71 @@ class MigrationCommand(ToolkitCommand):
86
100
  executor.run()
87
101
  total = executor.total_items
88
102
 
89
- self._print_table(tracker.aggregate(), console)
90
- self._print_csv(tracker, log_dir, f"{selected.kind}Items", console)
103
+ results = self._create_status_summary(logger)
104
+
105
+ self._print_rich_tables(results, console)
106
+ self._print_txt(results, log_dir, f"{selected.kind}Items", console)
91
107
  executor.raise_on_error()
92
108
  action = "Would migrate" if dry_run else "Migrating"
93
109
  console.print(f"{action} {total:,} {selected.display_name} to instances.")
94
- return tracker
95
110
 
96
- def _print_table(self, results: dict[tuple[str, Status], int], console: Console) -> None:
97
- for step in self.Steps:
98
- # We treat pending as failed for summary purposes
99
- results[(step.value, "failed")] = results.get((step.value, "failed"), 0) + results.get(
100
- (step.value, "pending"), 0
111
+ return results
112
+
113
+ # Todo: Move to the logger module
114
+ @classmethod
115
+ def _create_status_summary(cls, logger: FileDataLogger) -> list[MigrationStatusResult]:
116
+ results: list[MigrationStatusResult] = []
117
+ status_counts = logger.tracker.get_status_counts()
118
+ for status in get_args(OperationStatus):
119
+ issue_counts = logger.tracker.get_issue_counts(status)
120
+ issues = [OperationIssue(message=issue, count=count) for issue, count in issue_counts.items()]
121
+ result = MigrationStatusResult(
122
+ status=status,
123
+ issues=issues,
124
+ count=status_counts.get(status, 0),
101
125
  )
126
+ results.append(result)
127
+ return results
102
128
 
129
+ def _print_rich_tables(self, results: list[MigrationStatusResult], console: Console) -> None:
103
130
  table = Table(title="Migration Summary", show_lines=True)
104
- table.add_column("Status", style="cyan", no_wrap=True)
105
- for step in self.Steps:
106
- table.add_column(step.value.capitalize(), style="magenta")
107
- for status in AVAILABLE_STATUS:
108
- if status == "pending":
109
- # Skip pending as we treat it as failed
110
- continue
111
- row = [status]
112
- for step in self.Steps:
113
- row.append(str(results.get((step.value, status), 0)))
114
- table.add_row(*row)
115
-
131
+ table.add_column("Status", style="bold")
132
+ table.add_column("Count", justify="right", style="bold")
133
+ table.add_column("Issues", style="bold")
134
+ for result in results:
135
+ issues_str = "\n".join(f"{issue.message}: {issue.count}" for issue in result.issues) or ""
136
+ table.add_row(result.status, str(result.count), issues_str)
116
137
  console.print(table)
117
138
 
118
- def _print_csv(self, tracker: ProgressTracker[str], log_dir: Path, kind: str, console: Console) -> None:
119
- with CSVWriter(log_dir, kind=kind, compression=Uncompressed, columns=self._csv_columns()) as csv_file:
120
- batch: list[Chunk] = []
121
- steps = self.Steps.list()
122
- for item_id, progress in tracker.result().items():
123
- batch.append({"ID": str(item_id), **{step: progress[step] for step in steps}})
124
- if len(batch) >= 1000:
125
- csv_file.write_chunks(batch)
126
- batch = []
127
- if batch:
128
- csv_file.write_chunks(batch)
129
- console.print(f"Migration items written to {log_dir}")
130
-
131
- @classmethod
132
- def _csv_columns(cls) -> list[SchemaColumn]:
133
- return [
134
- SchemaColumn(name="ID", type="string"),
135
- *(SchemaColumn(name=step, type="string") for step in cls.Steps.list()),
136
- ]
137
-
138
- def _download_iterable(
139
- self,
140
- selected: T_Selector,
141
- data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
142
- tracker: ProgressTracker[str],
143
- ) -> Iterable[Sequence[T_ResourceResponse]]:
144
- for page in data.stream_data(selected):
145
- for item in page.items:
146
- tracker.set_progress(data.as_id(item), self.Steps.DOWNLOAD, "success")
147
- yield page.items
139
+ def _print_txt(self, results: list[MigrationStatusResult], log_dir: Path, kind: str, console: Console) -> None:
140
+ summary_file = log_dir / f"{kind}_migration_summary.txt"
141
+ with summary_file.open("w", encoding="utf-8") as f:
142
+ f.write("Migration Summary\n")
143
+ f.write("=================\n\n")
144
+ for result in results:
145
+ f.write(f"Status: {result.status}\n")
146
+ f.write(f"Count: {result.count}\n")
147
+ f.write("Issues:\n")
148
+ if result.issues:
149
+ for issue in result.issues:
150
+ f.write(f" - {issue.message}: {issue.count}\n")
151
+ else:
152
+ f.write(" None\n")
153
+ f.write("\n")
154
+ console.print(f"Summary written to {log_dir}")
148
155
 
156
+ @staticmethod
149
157
  def _convert(
150
- self,
151
158
  mapper: DataMapper[T_Selector, T_ResourceResponse, T_ResourceRequest],
152
159
  data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
153
- tracker: ProgressTracker[str],
154
- log_file: NDJsonWriter,
155
160
  ) -> Callable[[Sequence[T_ResourceResponse]], Sequence[UploadItem[T_ResourceRequest]]]:
156
161
  def track_mapping(source: Sequence[T_ResourceResponse]) -> list[UploadItem[T_ResourceRequest]]:
157
162
  mapped = mapper.map(source)
158
- issues: list[Chunk] = []
159
- targets: list[UploadItem[T_ResourceRequest]] = []
160
-
161
- for (target, issue), item in zip(mapped, source):
162
- id_ = data.as_id(item)
163
- result: Status = "failed" if target is None else "success"
164
- tracker.set_progress(id_, step=self.Steps.CONVERT, status=result)
165
-
166
- if issue.has_issues:
167
- # MyPy fails to understand that dict[str, JsonVal] is a Chunk
168
- issues.append(issue.dump()) # type: ignore[arg-type]
169
- if target is not None:
170
- targets.append(UploadItem(source_id=id_, item=target))
171
- if issues:
172
- log_file.write_chunks(issues)
173
- return targets
163
+ return [
164
+ UploadItem(source_id=data.as_id(item), item=target)
165
+ for target, item in zip(mapped, source)
166
+ if target is not None
167
+ ]
174
168
 
175
169
  return track_mapping
176
170
 
@@ -179,36 +173,37 @@ class MigrationCommand(ToolkitCommand):
179
173
  selected: T_Selector,
180
174
  write_client: HTTPClient,
181
175
  target: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
182
- tracker: ProgressTracker[str],
183
- log_file: NDJsonWriter,
184
176
  dry_run: bool,
185
177
  ) -> Callable[[Sequence[UploadItem[T_ResourceRequest]]], None]:
186
178
  def upload_items(data_item: Sequence[UploadItem[T_ResourceRequest]]) -> None:
187
179
  if not data_item:
188
180
  return None
189
- responses: Sequence[HTTPMessage]
190
181
  if dry_run:
191
- responses = [
192
- SuccessResponseItems(
193
- status_code=200, body="", content=b"", ids=[item.source_id for item in data_item]
194
- )
195
- ]
196
- else:
197
- responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
198
-
199
- issues: list[Chunk] = []
182
+ target.logger.tracker.finalize_item([item.source_id for item in data_item], "pending")
183
+ return None
184
+
185
+ responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
186
+
187
+ # Todo: Move logging into the UploadableStorageIO class
188
+ issues: list[WriteIssue] = []
200
189
  for item in responses:
201
190
  if isinstance(item, SuccessResponseItems):
202
- for success_id in item.ids:
203
- tracker.set_progress(success_id, step=self.Steps.UPLOAD, status="success")
204
- elif isinstance(item, ItemMessage):
205
- for failed_id in item.ids:
206
- tracker.set_progress(failed_id, step=self.Steps.UPLOAD, status="failed")
207
-
208
- if not isinstance(item, SuccessResponseItems):
209
- issues.append(item.dump()) # type: ignore[arg-type]
191
+ target.logger.tracker.finalize_item(item.ids, "success")
192
+ continue
193
+ if isinstance(item, FailedResponseItems):
194
+ error = item.error
195
+ for id_ in item.ids:
196
+ issue = WriteIssue(id=str(id_), status_code=error.code, message=error.message)
197
+ issues.append(issue)
198
+ elif isinstance(item, FailedRequestItems):
199
+ for id_ in item.ids:
200
+ issue = WriteIssue(id=str(id_), status_code=0, message=item.error)
201
+ issues.append(issue)
202
+
203
+ if isinstance(item, FailedResponseItems | FailedRequestItems):
204
+ target.logger.tracker.finalize_item(item.ids, "failure")
210
205
  if issues:
211
- log_file.write_chunks(issues)
206
+ target.logger.log(issues)
212
207
  return None
213
208
 
214
209
  return upload_items
@@ -17,6 +17,7 @@ from cognite.client.utils._identifier import InstanceId
17
17
 
18
18
  from cognite_toolkit._cdf_tk.client import ToolkitClient
19
19
  from cognite_toolkit._cdf_tk.client.resource_classes.asset import AssetResponse
20
+ from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
20
21
  from cognite_toolkit._cdf_tk.client.resource_classes.event import EventResponse
21
22
  from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataResponse
22
23
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import (
@@ -198,7 +199,11 @@ def asset_centric_to_dm(
198
199
  data_set_id = dumped.pop("dataSetId", None)
199
200
  external_id = dumped.pop("externalId", None)
200
201
 
201
- issue = ConversionIssue(asset_centric_id=AssetCentricId(resource_type, id_=id_), instance_id=instance_id)
202
+ issue = ConversionIssue(
203
+ id=str(AssetCentricId(resource_type, id_=id_)),
204
+ asset_centric_id=AssetCentricId(resource_type, id_=id_),
205
+ instance_id=NodeReference(space=instance_id.space, external_id=instance_id.external_id),
206
+ )
202
207
 
203
208
  properties = create_properties(
204
209
  dumped,
@@ -46,13 +46,13 @@ from cognite_toolkit._cdf_tk.commands._migrate.issues import (
46
46
  CanvasMigrationIssue,
47
47
  ChartMigrationIssue,
48
48
  ConversionIssue,
49
- MigrationIssue,
50
49
  ThreeDModelMigrationIssue,
51
50
  )
52
51
  from cognite_toolkit._cdf_tk.constants import MISSING_INSTANCE_SPACE
53
52
  from cognite_toolkit._cdf_tk.exceptions import ToolkitMigrationError, ToolkitValueError
54
53
  from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
55
54
  from cognite_toolkit._cdf_tk.storageio._base import T_Selector
55
+ from cognite_toolkit._cdf_tk.storageio.logger import DataLogger, NoOpLogger
56
56
  from cognite_toolkit._cdf_tk.storageio.selectors import CanvasSelector, ChartSelector, ThreeDSelector
57
57
  from cognite_toolkit._cdf_tk.utils import humanize_collection
58
58
  from cognite_toolkit._cdf_tk.utils.useful_types2 import T_AssetCentricResourceExtended
@@ -62,6 +62,10 @@ from .selectors import AssetCentricMigrationSelector
62
62
 
63
63
 
64
64
  class DataMapper(Generic[T_Selector, T_ResourceResponse, T_ResourceRequest], ABC):
65
+ def __init__(self, client: ToolkitClient) -> None:
66
+ self.client = client
67
+ self.logger: DataLogger = NoOpLogger()
68
+
65
69
  def prepare(self, source_selector: T_Selector) -> None:
66
70
  """Prepare the data mapper with the given source selector.
67
71
 
@@ -73,14 +77,14 @@ class DataMapper(Generic[T_Selector, T_ResourceResponse, T_ResourceRequest], ABC
73
77
  pass
74
78
 
75
79
  @abstractmethod
76
- def map(self, source: Sequence[T_ResourceResponse]) -> Sequence[tuple[T_ResourceRequest | None, MigrationIssue]]:
80
+ def map(self, source: Sequence[T_ResourceResponse]) -> Sequence[T_ResourceRequest | None]:
77
81
  """Map a chunk of source data to the target format.
78
82
 
79
83
  Args:
80
84
  source: The source data chunk to be mapped.
81
85
 
82
86
  Returns:
83
- A tuple containing the mapped data and a list of any issues encountered during mapping.
87
+ A sequence of mapped target data.
84
88
 
85
89
  """
86
90
  raise NotImplementedError("Subclasses must implement this method.")
@@ -90,7 +94,7 @@ class AssetCentricMapper(
90
94
  DataMapper[AssetCentricMigrationSelector, AssetCentricMapping[T_AssetCentricResourceExtended], InstanceApply]
91
95
  ):
92
96
  def __init__(self, client: ToolkitClient) -> None:
93
- self.client = client
97
+ super().__init__(client)
94
98
  self._ingestion_view_by_id: dict[ViewId, View] = {}
95
99
  self._view_mapping_by_id: dict[str, ResourceViewMappingApply] = {}
96
100
  self._direct_relation_cache = DirectRelationCache(client)
@@ -118,14 +122,37 @@ class AssetCentricMapper(
118
122
 
119
123
  def map(
120
124
  self, source: Sequence[AssetCentricMapping[T_AssetCentricResourceExtended]]
121
- ) -> Sequence[tuple[InstanceApply | None, ConversionIssue]]:
125
+ ) -> Sequence[InstanceApply | None]:
122
126
  """Map a chunk of asset-centric data to InstanceApplyList format."""
123
127
  # We update the direct relation cache in bulk for all resources in the chunk.
124
128
  self._direct_relation_cache.update(item.resource for item in source)
125
- output: list[tuple[InstanceApply | None, ConversionIssue]] = []
129
+ output: list[InstanceApply | None] = []
130
+ issues: list[ConversionIssue] = []
126
131
  for item in source:
127
132
  instance, conversion_issue = self._map_single_item(item)
128
- output.append((instance, conversion_issue))
133
+ identifier = str(item.mapping.as_asset_centric_id())
134
+
135
+ if conversion_issue.missing_instance_space:
136
+ self.logger.tracker.add_issue(identifier, "Missing instance space")
137
+ if conversion_issue.failed_conversions:
138
+ self.logger.tracker.add_issue(identifier, "Failed conversions")
139
+ if conversion_issue.invalid_instance_property_types:
140
+ self.logger.tracker.add_issue(identifier, "Invalid instance property types")
141
+ if conversion_issue.missing_asset_centric_properties:
142
+ self.logger.tracker.add_issue(identifier, "Missing asset-centric properties")
143
+ if conversion_issue.missing_instance_properties:
144
+ self.logger.tracker.add_issue(identifier, "Missing instance properties")
145
+ if conversion_issue.ignored_asset_centric_properties:
146
+ self.logger.tracker.add_issue(identifier, "Ignored asset-centric properties")
147
+
148
+ if conversion_issue.has_issues:
149
+ issues.append(conversion_issue)
150
+
151
+ if instance is None:
152
+ self.logger.tracker.finalize_item(identifier, "failure")
153
+ output.append(instance)
154
+ if issues:
155
+ self.logger.log(issues)
129
156
  return output
130
157
 
131
158
  def _map_single_item(
@@ -154,15 +181,29 @@ class AssetCentricMapper(
154
181
 
155
182
 
156
183
  class ChartMapper(DataMapper[ChartSelector, Chart, ChartWrite]):
157
- def __init__(self, client: ToolkitClient) -> None:
158
- self.client = client
159
-
160
- def map(self, source: Sequence[Chart]) -> Sequence[tuple[ChartWrite | None, MigrationIssue]]:
184
+ def map(self, source: Sequence[Chart]) -> Sequence[ChartWrite | None]:
161
185
  self._populate_cache(source)
162
- output: list[tuple[ChartWrite | None, MigrationIssue]] = []
186
+ output: list[ChartWrite | None] = []
187
+ issues: list[ChartMigrationIssue] = []
163
188
  for item in source:
164
189
  mapped_item, issue = self._map_single_item(item)
165
- output.append((mapped_item, issue))
190
+ identifier = item.external_id
191
+
192
+ if issue.missing_timeseries_ids:
193
+ self.logger.tracker.add_issue(identifier, "Missing timeseries IDs")
194
+ if issue.missing_timeseries_external_ids:
195
+ self.logger.tracker.add_issue(identifier, "Missing timeseries external IDs")
196
+ if issue.missing_timeseries_identifier:
197
+ self.logger.tracker.add_issue(identifier, "Missing timeseries identifier")
198
+
199
+ if issue.has_issues:
200
+ issues.append(issue)
201
+
202
+ if mapped_item is None:
203
+ self.logger.tracker.finalize_item(identifier, "failure")
204
+ output.append(mapped_item)
205
+ if issues:
206
+ self.logger.log(issues)
166
207
  return output
167
208
 
168
209
  def _populate_cache(self, source: Sequence[Chart]) -> None:
@@ -184,7 +225,7 @@ class ChartMapper(DataMapper[ChartSelector, Chart, ChartWrite]):
184
225
  self.client.migration.lookup.time_series(external_id=list(timeseries_external_ids))
185
226
 
186
227
  def _map_single_item(self, item: Chart) -> tuple[ChartWrite | None, ChartMigrationIssue]:
187
- issue = ChartMigrationIssue(chart_external_id=item.external_id)
228
+ issue = ChartMigrationIssue(chart_external_id=item.external_id, id=item.external_id)
188
229
  time_series_collection = item.data.time_series_collection or []
189
230
  timeseries_core_collection = self._create_timeseries_core_collection(time_series_collection, issue)
190
231
  if issue.has_issues:
@@ -280,16 +321,30 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
280
321
  DEFAULT_TIMESERIES_VIEW = ViewId("cdf_cdm", "CogniteTimeSeries", "v1")
281
322
 
282
323
  def __init__(self, client: ToolkitClient, dry_run: bool, skip_on_missing_ref: bool = False) -> None:
283
- self.client = client
324
+ super().__init__(client)
284
325
  self.dry_run = dry_run
285
326
  self.skip_on_missing_ref = skip_on_missing_ref
286
327
 
287
- def map(self, source: Sequence[IndustrialCanvas]) -> Sequence[tuple[IndustrialCanvasApply | None, MigrationIssue]]:
328
+ def map(self, source: Sequence[IndustrialCanvas]) -> Sequence[IndustrialCanvasApply | None]:
288
329
  self._populate_cache(source)
289
- output: list[tuple[IndustrialCanvasApply | None, MigrationIssue]] = []
330
+ output: list[IndustrialCanvasApply | None] = []
331
+ issues: list[CanvasMigrationIssue] = []
290
332
  for item in source:
291
333
  mapped_item, issue = self._map_single_item(item)
292
- output.append((mapped_item, issue))
334
+ identifier = item.as_id()
335
+
336
+ if issue.missing_reference_ids:
337
+ self.logger.tracker.add_issue(identifier, "Missing reference IDs")
338
+
339
+ if issue.has_issues:
340
+ issues.append(issue)
341
+
342
+ if mapped_item is None:
343
+ self.logger.tracker.finalize_item(identifier, "failure")
344
+
345
+ output.append(mapped_item)
346
+ if issues:
347
+ self.logger.log(issues)
293
348
  return output
294
349
 
295
350
  @property
@@ -340,7 +395,9 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
340
395
 
341
396
  def _map_single_item(self, canvas: IndustrialCanvas) -> tuple[IndustrialCanvasApply | None, CanvasMigrationIssue]:
342
397
  update = canvas.as_write()
343
- issue = CanvasMigrationIssue(canvas_external_id=canvas.canvas.external_id, canvas_name=canvas.canvas.name)
398
+ issue = CanvasMigrationIssue(
399
+ canvas_external_id=canvas.canvas.external_id, canvas_name=canvas.canvas.name, id=canvas.canvas.name
400
+ )
344
401
 
345
402
  remaining_container_references: list[ContainerReferenceApply] = []
346
403
  new_fdm_references: list[FdmInstanceContainerReferenceApply] = []
@@ -399,17 +456,26 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
399
456
 
400
457
 
401
458
  class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrationRequest]):
402
- def __init__(self, client: ToolkitClient) -> None:
403
- self.client = client
404
-
405
- def map(
406
- self, source: Sequence[ThreeDModelResponse]
407
- ) -> Sequence[tuple[ThreeDMigrationRequest | None, MigrationIssue]]:
459
+ def map(self, source: Sequence[ThreeDModelResponse]) -> Sequence[ThreeDMigrationRequest | None]:
408
460
  self._populate_cache(source)
409
- output: list[tuple[ThreeDMigrationRequest | None, MigrationIssue]] = []
461
+ output: list[ThreeDMigrationRequest | None] = []
462
+ issues: list[ThreeDModelMigrationIssue] = []
410
463
  for item in source:
411
464
  mapped_item, issue = self._map_single_item(item)
412
- output.append((mapped_item, issue))
465
+ identifier = item.name
466
+
467
+ if issue.error_message:
468
+ for error in issue.error_message:
469
+ self.logger.tracker.add_issue(identifier, error)
470
+
471
+ if issue.has_issues:
472
+ issues.append(issue)
473
+
474
+ if mapped_item is None:
475
+ self.logger.tracker.finalize_item(identifier, "failure")
476
+ output.append(mapped_item)
477
+ if issues:
478
+ self.logger.log(issues)
413
479
  return output
414
480
 
415
481
  def _populate_cache(self, source: Sequence[ThreeDModelResponse]) -> None:
@@ -422,7 +488,7 @@ class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrati
422
488
  def _map_single_item(
423
489
  self, item: ThreeDModelResponse
424
490
  ) -> tuple[ThreeDMigrationRequest | None, ThreeDModelMigrationIssue]:
425
- issue = ThreeDModelMigrationIssue(model_name=item.name, model_id=item.id)
491
+ issue = ThreeDModelMigrationIssue(model_name=item.name, model_id=item.id, id=item.name)
426
492
  instance_space: str | None = None
427
493
  last_revision_id: int | None = None
428
494
  model_type: Literal["CAD", "PointCloud", "Image360"] | None = None
@@ -478,17 +544,27 @@ class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrati
478
544
 
479
545
 
480
546
  class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingClassicResponse, AssetMappingDMRequest]):
481
- def __init__(self, client: ToolkitClient) -> None:
482
- self.client = client
483
-
484
- def map(
485
- self, source: Sequence[AssetMappingClassicResponse]
486
- ) -> Sequence[tuple[AssetMappingDMRequest | None, MigrationIssue]]:
487
- output: list[tuple[AssetMappingDMRequest | None, MigrationIssue]] = []
547
+ def map(self, source: Sequence[AssetMappingClassicResponse]) -> Sequence[AssetMappingDMRequest | None]:
548
+ output: list[AssetMappingDMRequest | None] = []
549
+ issues: list[ThreeDModelMigrationIssue] = []
488
550
  self._populate_cache(source)
489
551
  for item in source:
490
552
  mapped_item, issue = self._map_single_item(item)
491
- output.append((mapped_item, issue))
553
+ identifier = f"AssetMapping_{item.model_id!s}_{item.revision_id!s}_{item.asset_id!s}"
554
+
555
+ if issue.error_message:
556
+ for error in issue.error_message:
557
+ self.logger.tracker.add_issue(identifier, error)
558
+
559
+ if issue.has_issues:
560
+ issues.append(issue)
561
+
562
+ if mapped_item is None:
563
+ self.logger.tracker.finalize_item(identifier, "failure")
564
+
565
+ output.append(mapped_item)
566
+ if issues:
567
+ self.logger.log(issues)
492
568
  return output
493
569
 
494
570
  def _populate_cache(self, source: Sequence[AssetMappingClassicResponse]) -> None:
@@ -501,7 +577,9 @@ class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingClassicResponse,
501
577
  def _map_single_item(
502
578
  self, item: AssetMappingClassicResponse
503
579
  ) -> tuple[AssetMappingDMRequest | None, ThreeDModelMigrationIssue]:
504
- issue = ThreeDModelMigrationIssue(model_name=f"AssetMapping_{item.model_id}", model_id=item.model_id)
580
+ issue = ThreeDModelMigrationIssue(
581
+ model_name=f"AssetMapping_{item.model_id}", model_id=item.model_id, id=f"AssetMapping_{item.model_id}"
582
+ )
505
583
  asset_instance_id = item.asset_instance_id
506
584
  if item.asset_id and asset_instance_id is None:
507
585
  asset_node_id = self.client.migration.lookup.assets(item.asset_id)
@@ -1,34 +1,27 @@
1
- import json
2
- from typing import Any, ClassVar
1
+ from typing import Any, Literal
3
2
 
4
- from cognite.client.data_classes.data_modeling import NodeId
5
- from cognite.client.utils._identifier import InstanceId
6
- from cognite.client.utils._text import to_camel_case
7
3
  from pydantic import BaseModel, Field, field_serializer
4
+ from pydantic.alias_generators import to_camel
8
5
 
6
+ from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
9
7
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import AssetCentricId
10
- from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
8
+ from cognite_toolkit._cdf_tk.storageio.logger import LogEntry
11
9
 
12
10
 
13
- class MigrationObject(BaseModel, alias_generator=to_camel_case, extra="ignore", populate_by_name=True): ...
14
-
15
-
16
- class MigrationIssue(MigrationObject):
11
+ class MigrationIssue(LogEntry):
17
12
  """Represents an issue encountered during migration."""
18
13
 
19
- type: ClassVar[str]
20
-
21
- def dump(self) -> dict[str, JsonVal]:
22
- # Dump json to ensure it is serializable
23
- dumped = json.loads(self.model_dump_json(by_alias=True))
24
- dumped["type"] = self.type
25
- return dumped
14
+ type: str
26
15
 
27
16
  @property
28
17
  def has_issues(self) -> bool:
29
18
  """Check if there are any issues recorded in this MigrationIssue."""
30
19
  return True
31
20
 
21
+ def dump(self) -> dict[str, Any]:
22
+ """Serialize the MigrationIssue to a dictionary."""
23
+ return self.model_dump(by_alias=True)
24
+
32
25
 
33
26
  class ThreeDModelMigrationIssue(MigrationIssue):
34
27
  """Represents a 3D model migration issue encountered during migration.
@@ -37,7 +30,7 @@ class ThreeDModelMigrationIssue(MigrationIssue):
37
30
  model_external_id (str): The external ID of the 3D model that could not be migrated.
38
31
  """
39
32
 
40
- type: ClassVar[str] = "threeDModelMigration"
33
+ type: Literal["threeDModelMigration"] = "threeDModelMigration"
41
34
  model_name: str
42
35
  model_id: int
43
36
  error_message: list[str] = Field(default_factory=list)
@@ -55,7 +48,7 @@ class ChartMigrationIssue(MigrationIssue):
55
48
  chart_external_id (str): The external ID of the chart that could not be migrated.
56
49
  """
57
50
 
58
- type: ClassVar[str] = "chartMigration"
51
+ type: Literal["chartMigration"] = "chartMigration"
59
52
  chart_external_id: str
60
53
  missing_timeseries_ids: list[int] = Field(default_factory=list)
61
54
  missing_timeseries_external_ids: list[str] = Field(default_factory=list)
@@ -70,7 +63,7 @@ class ChartMigrationIssue(MigrationIssue):
70
63
 
71
64
 
72
65
  class CanvasMigrationIssue(MigrationIssue):
73
- type: ClassVar[str] = "canvasMigration"
66
+ type: Literal["canvasMigration"] = "canvasMigration"
74
67
  canvas_external_id: str
75
68
  canvas_name: str
76
69
  missing_reference_ids: list[AssetCentricId] = Field(default_factory=list)
@@ -84,7 +77,7 @@ class CanvasMigrationIssue(MigrationIssue):
84
77
  class ReadIssue(MigrationIssue):
85
78
  """Represents a read issue encountered during migration."""
86
79
 
87
- type: ClassVar[str] = "read"
80
+ ...
88
81
 
89
82
 
90
83
  class ReadFileIssue(ReadIssue):
@@ -95,7 +88,7 @@ class ReadFileIssue(ReadIssue):
95
88
  error (str | None): An optional error message providing additional details about the read issue.
96
89
  """
97
90
 
98
- type: ClassVar[str] = "fileRead"
91
+ type: Literal["fileRead"] = "fileRead"
99
92
 
100
93
  row_no: int
101
94
  error: str | None = None
@@ -109,7 +102,7 @@ class ReadAPIIssue(ReadIssue):
109
102
  error (str | None): An optional error message providing additional details about the read issue.
110
103
  """
111
104
 
112
- type: ClassVar[str] = "apiRead"
105
+ type: Literal["apiRead"] = "apiRead"
113
106
  asset_centric_id: AssetCentricId
114
107
  error: str | None = None
115
108
 
@@ -121,7 +114,7 @@ class ReadAPIIssue(ReadIssue):
121
114
  }
122
115
 
123
116
 
124
- class FailedConversion(MigrationObject):
117
+ class FailedConversion(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_name=True):
125
118
  """Represents a property that failed to convert during migration.
126
119
 
127
120
  Attributes:
@@ -135,7 +128,7 @@ class FailedConversion(MigrationObject):
135
128
  error: str
136
129
 
137
130
 
138
- class InvalidPropertyDataType(MigrationObject):
131
+ class InvalidPropertyDataType(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_name=True):
139
132
  """Represents a property with an invalid type during migration.
140
133
 
141
134
  Attributes:
@@ -160,9 +153,9 @@ class ConversionIssue(MigrationIssue):
160
153
  failed_conversions (list[FailedConversion]): List of properties that failed to convert with reasons.
161
154
  """
162
155
 
163
- type: ClassVar[str] = "conversion"
156
+ type: Literal["conversion"] = "conversion"
164
157
  asset_centric_id: AssetCentricId
165
- instance_id: InstanceId
158
+ instance_id: NodeReference
166
159
  missing_asset_centric_properties: list[str] = Field(default_factory=list)
167
160
  missing_instance_properties: list[str] = Field(default_factory=list)
168
161
  invalid_instance_property_types: list[InvalidPropertyDataType] = Field(default_factory=list)
@@ -181,10 +174,6 @@ class ConversionIssue(MigrationIssue):
181
174
  or self.missing_instance_space
182
175
  )
183
176
 
184
- @field_serializer("instance_id")
185
- def serialize_instance_id(self, instance_id: NodeId) -> dict[str, str]:
186
- return instance_id.dump(include_instance_type=True)
187
-
188
177
  @field_serializer("asset_centric_id")
189
178
  def serialize_asset_centric_id(self, asset_centric_id: AssetCentricId) -> dict[str, Any]:
190
179
  return {
@@ -197,16 +186,10 @@ class WriteIssue(MigrationIssue):
197
186
  """Represents a write issue encountered during migration.
198
187
 
199
188
  Attributes:
200
- instance_id (InstanceId): The InstanceId of the data model instance that could not be written.
201
189
  status_code (int): The HTTP status code returned during the write operation.
202
190
  message (str | None): An optional message providing additional details about the write issue.
203
191
  """
204
192
 
205
- type: ClassVar[str] = "write"
206
- instance_id: InstanceId
193
+ type: Literal["write"] = "write"
207
194
  status_code: int
208
195
  message: str | None = None
209
-
210
- @field_serializer("instance_id")
211
- def serialize_instance_id(self, instance_id: NodeId) -> dict[str, str]:
212
- return instance_id.dump(include_instance_type=True)
@@ -78,7 +78,7 @@ class AssetCentricMigrationIO(
78
78
  self.skip_linking = skip_linking
79
79
 
80
80
  def as_id(self, item: AssetCentricMapping) -> str:
81
- return f"{item.mapping.resource_type}_{item.mapping.id}"
81
+ return str(item.mapping.as_asset_centric_id())
82
82
 
83
83
  def stream_data(self, selector: AssetCentricMigrationSelector, limit: int | None = None) -> Iterator[Page]:
84
84
  if isinstance(selector, MigrationCSVFileSelector):
@@ -181,7 +181,7 @@ class AssetCentricMigrationIO(
181
181
 
182
182
  pending_instance_id_endpoint = self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND[selector.kind]
183
183
  results: list[HTTPMessage] = []
184
- to_upload = self.link_asset_centric(data_chunk, http_client, results, pending_instance_id_endpoint)
184
+ to_upload = self.link_asset_centric(data_chunk, http_client, pending_instance_id_endpoint)
185
185
  if to_upload:
186
186
  results.extend(list(super().upload_items(to_upload, http_client, None)))
187
187
  return results
@@ -191,7 +191,6 @@ class AssetCentricMigrationIO(
191
191
  cls,
192
192
  data_chunk: Sequence[UploadItem[InstanceApply]],
193
193
  http_client: HTTPClient,
194
- results: list[HTTPMessage],
195
194
  pending_instance_id_endpoint: str,
196
195
  ) -> Sequence[UploadItem[InstanceApply]]:
197
196
  """Links asset-centric resources to their (uncreated) instances using the pending-instance-ids endpoint."""
@@ -212,7 +211,6 @@ class AssetCentricMigrationIO(
212
211
  for res in batch_results:
213
212
  if isinstance(res, SuccessResponseItems):
214
213
  successful_linked.update(res.ids)
215
- results.extend(batch_results)
216
214
  to_upload = [item for item in data_chunk if item.source_id in successful_linked]
217
215
  return to_upload
218
216
 
@@ -393,7 +391,7 @@ class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse,
393
391
  self.data_model_type = data_model_type
394
392
 
395
393
  def as_id(self, item: ThreeDModelResponse) -> str:
396
- return f"{item.name}_{item.id!s}"
394
+ return item.name
397
395
 
398
396
  def _is_selected(self, item: ThreeDModelResponse, included_models: set[int] | None) -> bool:
399
397
  return self._is_correct_type(item) and (included_models is None or item.id in included_models)
@@ -14,7 +14,6 @@ class LogEntry(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_
14
14
  """Represents a log entry for tracking storage I/O operations."""
15
15
 
16
16
  id: str
17
- message: str
18
17
 
19
18
 
20
19
  OperationStatus: TypeAlias = Literal["success", "failure", "unchanged", "pending"]
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.7.48
15
+ image: cognite/toolkit:0.7.49
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.7.48
13
+ image: cognite/toolkit:0.7.49
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.7.48"
7
+ version = "0.7.49"
8
8
 
9
9
 
10
10
  [plugins]
@@ -1 +1 @@
1
- __version__ = "0.7.48"
1
+ __version__ = "0.7.49"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.7.48
3
+ Version: 0.7.49
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Author: Cognite AS
6
6
  Author-email: Cognite AS <support@cognite.com>
@@ -222,15 +222,15 @@ cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=TK6U_rm6VZT_V941kTyHMou
222
222
  cognite_toolkit/_cdf_tk/commands/_download.py,sha256=dVddH9t7oGx1kdQ3CCYYQb96Uxxy-xC8Opph98lo46U,6869
223
223
  cognite_toolkit/_cdf_tk/commands/_import_cmd.py,sha256=RkJ7RZI6zxe0_1xrPB-iJhCVchurmIAChilx0_XMR6k,11141
224
224
  cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=8ki04tJGH1dHdF2NtVF4HyhaC0XDDS7onrH_nvd9KtE,153
225
- cognite_toolkit/_cdf_tk/commands/_migrate/command.py,sha256=z5Gt8vJM6klzKpiVXsylTZzD79zQl4wgqF2Vg2s-gfU,14215
226
- cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py,sha256=rTwfriVG7W8wssysymoXVbfFyifCxgZqH82oK0sbWuQ,17658
225
+ cognite_toolkit/_cdf_tk/commands/_migrate/command.py,sha256=S5ZB7POjJ2ue3D54kW8TZLFuRJAilqwlLlELu3HGEMM,13579
226
+ cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py,sha256=-sZktmsDIqkhNg-kg95Ty6eTm7FiyFRi1ZQLOkZ5x2w,17889
227
227
  cognite_toolkit/_cdf_tk/commands/_migrate/creators.py,sha256=Gp3CKruTxeSDS3HiWiLsQ4mN2SZ4BQB5xQ50o68GTs8,9616
228
228
  cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=3RRi9qj4rTcuJCxIPXb4s47Mm03udRH3OC9b3rnYIAc,11686
229
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py,sha256=mwpUHc-M5CS3xhXs0SyGY16UL7gxx7JwcuiFlpq9ipI,24537
229
+ cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py,sha256=qRCXI68Fq4eU4UcUwbiQNt2FCgBT_nWKzzUiEtWzfzI,27707
230
230
  cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=0lRyDRJ8zo00OngwWagQgHQIaemK4eufW9kbWwZ9Yo0,7901
231
231
  cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py,sha256=nH_wC0RzcI721sXwH_pThOxPv1nq4-oBd1rCDSGbr9Y,5555
232
- cognite_toolkit/_cdf_tk/commands/_migrate/issues.py,sha256=sRle8p_3Nc9WqyTXjAIEyvDag0kKFzmvCqudWqJjdZU,7551
233
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py,sha256=RrTFQErm8O1gtwrsBa0OcUAMez3xC82gcoJOnLm9sXY,26915
232
+ cognite_toolkit/_cdf_tk/commands/_migrate/issues.py,sha256=SFkfLBCLI4nZBIWSRnyu6G4xcdS-csYJwzmzc4Jmzho,6988
233
+ cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py,sha256=ClRvEzJDOft_oQJGlKpYt8wBRjrfJ_jtpVmRe7YO8a8,26801
234
234
  cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=RfqaNoso5CyBwc-p6ckwcYqBfZXKhdJgdGIyd0TATaI,2635
235
235
  cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py,sha256=N1H_-rBpPUD6pbrlcofn1uEK1bA694EUXEe1zIXeqyo,2489
236
236
  cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=c42Na8r1Xcd1zSIJtU8eZMQTFBTzNmxtTIBLKBePzyA,43131
@@ -377,7 +377,7 @@ cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=sgO5TAD87gHn8gxTwRM3yn__
377
377
  cognite_toolkit/_cdf_tk/storageio/_file_content.py,sha256=R1f2A9ZkQ-8O0b0kjhku1PNLJnTOlLMTb0tnGSYW-nU,19138
378
378
  cognite_toolkit/_cdf_tk/storageio/_instances.py,sha256=2mb9nTC63cn3r1B9YqA0wAmHcbJhMDiET78T7XY5Yco,10821
379
379
  cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=VqzAF79ON3hjrVRTBsSiSGK9B_Xmm-PWhFxvA0YKgPE,5162
380
- cognite_toolkit/_cdf_tk/storageio/logger.py,sha256=MeeIRplxE8SrP-rWPgoM12_VcchlfG54_5Cwa5-hyZE,5574
380
+ cognite_toolkit/_cdf_tk/storageio/logger.py,sha256=RDQvauxSjtlVX-V5XGkFCtqiWpv4KdjJrEcPFVNqPF0,5557
381
381
  cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=5qqgeCrsmMLVsrRuYKxCKq1bNnvJkWeAsATpTKN1eC4,4394
382
382
  cognite_toolkit/_cdf_tk/storageio/selectors/_asset_centric.py,sha256=7Iv_ccVX6Vzt3ZLFZ0Er3hN92iEsFTm9wgF-yermOWE,1467
383
383
  cognite_toolkit/_cdf_tk/storageio/selectors/_base.py,sha256=hjFkbmNGsK3QIW-jnJV_8YNmvVROERxzG82qIZhU7SM,3065
@@ -430,14 +430,14 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
430
430
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
431
431
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
432
432
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
433
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=lfmpp-r0PD6SNxA7W4vyXbZJbYS1BdNhMl_LHK7uUA4,667
434
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=yQ00CPvH5uHg093kLp5mT9YwHKL0secE07jyg59asWU,2430
435
- cognite_toolkit/_resources/cdf.toml,sha256=lBU3mKRWKTW1NF2Q_8PU08lGP8tyFrBlk9P8OezhilU,475
436
- cognite_toolkit/_version.py,sha256=bTiPIxy2WmA-9Z8WjK-wrhrEltIBC0FGCskW2gVynPQ,23
433
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=fmuInpVjQiw7cSiCZf8auBTyCojatOWboPcCJsucvNM,667
434
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=-lTJS4LYINJ9Y2Tqr6D_A97CdBnhiAD6HEgWwKmJm6A,2430
435
+ cognite_toolkit/_resources/cdf.toml,sha256=nwpyy2sEPEnElhxnXuWfknQVEf-3o__qVyh2ntpgZ5E,475
436
+ cognite_toolkit/_version.py,sha256=ozK73q6JEJBpdtK_85pf9Qpr10X0T3Pso8xvHij8gJs,23
437
437
  cognite_toolkit/config.dev.yaml,sha256=M33FiIKdS3XKif-9vXniQ444GTZ-bLXV8aFH86u9iUQ,332
438
438
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
439
439
  cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
440
- cognite_toolkit-0.7.48.dist-info/WHEEL,sha256=XV0cjMrO7zXhVAIyyc8aFf1VjZ33Fen4IiJk5zFlC3g,80
441
- cognite_toolkit-0.7.48.dist-info/entry_points.txt,sha256=EtZ17K2mUjh-AY0QNU1CPIB_aDSSOdmtNI_4Fj967mA,84
442
- cognite_toolkit-0.7.48.dist-info/METADATA,sha256=jVdGQBFmveYK_NgMkDICE25_GSBGGVhkpIwcdaA33hM,5026
443
- cognite_toolkit-0.7.48.dist-info/RECORD,,
440
+ cognite_toolkit-0.7.49.dist-info/WHEEL,sha256=XV0cjMrO7zXhVAIyyc8aFf1VjZ33Fen4IiJk5zFlC3g,80
441
+ cognite_toolkit-0.7.49.dist-info/entry_points.txt,sha256=EtZ17K2mUjh-AY0QNU1CPIB_aDSSOdmtNI_4Fj967mA,84
442
+ cognite_toolkit-0.7.49.dist-info/METADATA,sha256=hqUn0j9TfjCIp3DiBXADDE70AqkJ6MNokFeM0WcA5LU,5026
443
+ cognite_toolkit-0.7.49.dist-info/RECORD,,