cognite-toolkit 0.7.48__py3-none-any.whl → 0.7.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. cognite_toolkit/_cdf_tk/builders/_function.py +81 -9
  2. cognite_toolkit/_cdf_tk/client/{resource_classes/base.py → _resource_base.py} +24 -9
  3. cognite_toolkit/_cdf_tk/client/cdf_client/api.py +23 -24
  4. cognite_toolkit/_cdf_tk/client/http_client/__init__.py +5 -3
  5. cognite_toolkit/_cdf_tk/client/http_client/_client.py +4 -2
  6. cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +1 -106
  7. cognite_toolkit/_cdf_tk/client/http_client/_item_classes.py +118 -0
  8. cognite_toolkit/_cdf_tk/client/resource_classes/agent.py +1 -1
  9. cognite_toolkit/_cdf_tk/client/resource_classes/annotation.py +2 -2
  10. cognite_toolkit/_cdf_tk/client/resource_classes/apm_config.py +1 -1
  11. cognite_toolkit/_cdf_tk/client/resource_classes/asset.py +2 -2
  12. cognite_toolkit/_cdf_tk/client/resource_classes/charts_data.py +1 -1
  13. cognite_toolkit/_cdf_tk/client/resource_classes/cognite_file.py +1 -1
  14. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_constraints.py +1 -1
  15. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_container.py +1 -1
  16. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_data_model.py +1 -1
  17. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_data_types.py +1 -1
  18. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_indexes.py +1 -1
  19. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_instance.py +1 -1
  20. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_space.py +1 -1
  21. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_view.py +1 -1
  22. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_view_property.py +1 -1
  23. cognite_toolkit/_cdf_tk/client/resource_classes/dataset.py +3 -3
  24. cognite_toolkit/_cdf_tk/client/resource_classes/event.py +3 -3
  25. cognite_toolkit/_cdf_tk/client/resource_classes/extraction_pipeline.py +3 -3
  26. cognite_toolkit/_cdf_tk/client/resource_classes/filemetadata.py +2 -2
  27. cognite_toolkit/_cdf_tk/client/resource_classes/function.py +1 -1
  28. cognite_toolkit/_cdf_tk/client/resource_classes/function_schedule.py +1 -1
  29. cognite_toolkit/_cdf_tk/client/resource_classes/graphql_data_model.py +1 -1
  30. cognite_toolkit/_cdf_tk/client/resource_classes/group/acls.py +1 -1
  31. cognite_toolkit/_cdf_tk/client/resource_classes/group/capability.py +1 -1
  32. cognite_toolkit/_cdf_tk/client/resource_classes/group/group.py +1 -1
  33. cognite_toolkit/_cdf_tk/client/resource_classes/group/scopes.py +1 -1
  34. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_destination.py +3 -3
  35. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_job.py +3 -3
  36. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_mapping.py +3 -3
  37. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_auth.py +1 -1
  38. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_base.py +3 -3
  39. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_certificate.py +1 -1
  40. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_eventhub.py +1 -1
  41. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_kafka.py +1 -1
  42. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_mqtt.py +1 -1
  43. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_rest.py +1 -1
  44. cognite_toolkit/_cdf_tk/client/resource_classes/identifiers.py +1 -1
  45. cognite_toolkit/_cdf_tk/client/resource_classes/infield.py +1 -1
  46. cognite_toolkit/_cdf_tk/client/resource_classes/instance_api.py +1 -1
  47. cognite_toolkit/_cdf_tk/client/resource_classes/label.py +1 -1
  48. cognite_toolkit/_cdf_tk/client/resource_classes/location_filter.py +1 -1
  49. cognite_toolkit/_cdf_tk/client/resource_classes/raw.py +1 -1
  50. cognite_toolkit/_cdf_tk/client/resource_classes/relationship.py +3 -3
  51. cognite_toolkit/_cdf_tk/client/resource_classes/resource_view_mapping.py +1 -1
  52. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_capability.py +3 -3
  53. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_common.py +1 -1
  54. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_data_post_processing.py +3 -3
  55. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_frame.py +3 -3
  56. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_location.py +2 -2
  57. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_map.py +3 -3
  58. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_robot.py +3 -3
  59. cognite_toolkit/_cdf_tk/client/resource_classes/search_config.py +1 -1
  60. cognite_toolkit/_cdf_tk/client/resource_classes/securitycategory.py +1 -1
  61. cognite_toolkit/_cdf_tk/client/resource_classes/sequence.py +3 -3
  62. cognite_toolkit/_cdf_tk/client/resource_classes/sequence_rows.py +1 -1
  63. cognite_toolkit/_cdf_tk/client/resource_classes/simulator_model.py +2 -2
  64. cognite_toolkit/_cdf_tk/client/resource_classes/streamlit_.py +3 -3
  65. cognite_toolkit/_cdf_tk/client/resource_classes/streams.py +1 -1
  66. cognite_toolkit/_cdf_tk/client/resource_classes/three_d.py +9 -2
  67. cognite_toolkit/_cdf_tk/client/resource_classes/timeseries.py +3 -3
  68. cognite_toolkit/_cdf_tk/client/resource_classes/transformation.py +3 -3
  69. cognite_toolkit/_cdf_tk/client/resource_classes/workflow.py +1 -1
  70. cognite_toolkit/_cdf_tk/client/resource_classes/workflow_trigger.py +1 -1
  71. cognite_toolkit/_cdf_tk/client/resource_classes/workflow_version.py +1 -1
  72. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +103 -108
  73. cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +6 -1
  74. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +1 -1
  75. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +115 -37
  76. cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +21 -38
  77. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +3 -5
  78. cognite_toolkit/_cdf_tk/commands/build_cmd.py +11 -1
  79. cognite_toolkit/_cdf_tk/data_classes/_tracking_info.py +4 -0
  80. cognite_toolkit/_cdf_tk/feature_flags.py +4 -0
  81. cognite_toolkit/_cdf_tk/storageio/logger.py +0 -1
  82. cognite_toolkit/_cdf_tk/tk_warnings/__init__.py +2 -0
  83. cognite_toolkit/_cdf_tk/tk_warnings/fileread.py +20 -0
  84. cognite_toolkit/_cdf_tk/utils/__init__.py +3 -0
  85. cognite_toolkit/_cdf_tk/utils/pip_validator.py +96 -0
  86. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  87. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  88. cognite_toolkit/_resources/cdf.toml +1 -1
  89. cognite_toolkit/_version.py +1 -1
  90. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/METADATA +1 -1
  91. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/RECORD +93 -91
  92. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/WHEEL +0 -0
  93. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/entry_points.txt +0 -0
@@ -1,9 +1,9 @@
1
1
  from typing import Any, ClassVar, Literal
2
2
 
3
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
3
+ from cognite_toolkit._cdf_tk.client._resource_base import (
4
4
  BaseModelObject,
5
- RequestUpdateable,
6
5
  ResponseResource,
6
+ UpdatableRequestResource,
7
7
  )
8
8
  from cognite_toolkit._cdf_tk.client.resource_classes.identifiers import DataSetId
9
9
 
@@ -36,7 +36,7 @@ class Robot(BaseModelObject):
36
36
  return DataSetId(data_set_id=self.data_set_id)
37
37
 
38
38
 
39
- class RobotRequest(Robot, RequestUpdateable):
39
+ class RobotRequest(Robot, UpdatableRequestResource):
40
40
  """Request resource for creating or updating a Robot."""
41
41
 
42
42
  container_fields: ClassVar[frozenset[str]] = frozenset({"metadata"})
@@ -1,4 +1,4 @@
1
- from cognite_toolkit._cdf_tk.client.resource_classes.base import BaseModelObject, RequestResource, ResponseResource
1
+ from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource, ResponseResource
2
2
 
3
3
  from .identifiers import Identifier
4
4
 
@@ -1,4 +1,4 @@
1
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
1
+ from cognite_toolkit._cdf_tk.client._resource_base import (
2
2
  BaseModelObject,
3
3
  RequestResource,
4
4
  ResponseResource,
@@ -1,9 +1,9 @@
1
1
  from typing import ClassVar, Literal
2
2
 
3
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
3
+ from cognite_toolkit._cdf_tk.client._resource_base import (
4
4
  BaseModelObject,
5
- RequestUpdateable,
6
5
  ResponseResource,
6
+ UpdatableRequestResource,
7
7
  )
8
8
 
9
9
  from .identifiers import ExternalId
@@ -32,7 +32,7 @@ class Sequence(BaseModelObject):
32
32
  return ExternalId(external_id=self.external_id)
33
33
 
34
34
 
35
- class SequenceRequest(Sequence, RequestUpdateable):
35
+ class SequenceRequest(Sequence, UpdatableRequestResource):
36
36
  container_fields: ClassVar[frozenset[str]] = frozenset({"metadata", "columns"})
37
37
 
38
38
 
@@ -1,6 +1,6 @@
1
1
  from typing import Literal
2
2
 
3
- from cognite_toolkit._cdf_tk.client.resource_classes.base import BaseModelObject, RequestResource, ResponseResource
3
+ from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource, ResponseResource
4
4
 
5
5
  from .identifiers import ExternalId
6
6
 
@@ -2,12 +2,12 @@ from typing import Any, Literal
2
2
 
3
3
  from pydantic import Field
4
4
 
5
- from cognite_toolkit._cdf_tk.client.resource_classes.base import RequestUpdateable, ResponseResource
5
+ from cognite_toolkit._cdf_tk.client._resource_base import ResponseResource, UpdatableRequestResource
6
6
 
7
7
  from .identifiers import ExternalId
8
8
 
9
9
 
10
- class SimulatorModelRequest(RequestUpdateable):
10
+ class SimulatorModelRequest(UpdatableRequestResource):
11
11
  # The 'id' field is not part of the request when creating a new resource,
12
12
  # but is needed when updating an existing resource.
13
13
  id: int | None = Field(default=None, exclude=True)
@@ -1,9 +1,9 @@
1
1
  from typing import Any, Literal
2
2
 
3
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
3
+ from cognite_toolkit._cdf_tk.client._resource_base import (
4
4
  BaseModelObject,
5
- RequestUpdateable,
6
5
  ResponseResource,
6
+ UpdatableRequestResource,
7
7
  )
8
8
 
9
9
  from .filemetadata import FileMetadataRequest
@@ -25,7 +25,7 @@ class StreamlitFile(BaseModelObject):
25
25
  cognite_toolkit_app_hash: str | None = None
26
26
 
27
27
 
28
- class StreamlitRequest(StreamlitFile, RequestUpdateable):
28
+ class StreamlitRequest(StreamlitFile, UpdatableRequestResource):
29
29
  """Request resource for creating/updating Streamlit apps."""
30
30
 
31
31
  def as_id(self) -> ExternalId:
@@ -1,8 +1,8 @@
1
1
  from typing import Literal
2
2
 
3
+ from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource, ResponseResource
3
4
  from cognite_toolkit._cdf_tk.constants import StreamTemplateName
4
5
 
5
- from .base import BaseModelObject, RequestResource, ResponseResource
6
6
  from .identifiers import ExternalId
7
7
 
8
8
 
@@ -3,7 +3,14 @@ from typing import ClassVar, Literal
3
3
 
4
4
  from pydantic import Field
5
5
 
6
- from .base import BaseModelObject, Identifier, RequestResource, RequestUpdateable, ResponseResource
6
+ from cognite_toolkit._cdf_tk.client._resource_base import (
7
+ BaseModelObject,
8
+ Identifier,
9
+ RequestResource,
10
+ ResponseResource,
11
+ UpdatableRequestResource,
12
+ )
13
+
7
14
  from .identifiers import InternalId
8
15
  from .instance_api import NodeReference
9
16
 
@@ -33,7 +40,7 @@ class ThreeDModelRequest(RequestResource):
33
40
  return InternalId(id=self.id)
34
41
 
35
42
 
36
- class ThreeDModelClassicRequest(ThreeDModelRequest, RequestUpdateable):
43
+ class ThreeDModelClassicRequest(ThreeDModelRequest, UpdatableRequestResource):
37
44
  container_fields: ClassVar[frozenset[str]] = frozenset({"metadata"})
38
45
  data_set_id: int | None = None
39
46
  metadata: dict[str, str] | None = None
@@ -1,9 +1,9 @@
1
1
  from typing import Any, ClassVar, Literal
2
2
 
3
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
3
+ from cognite_toolkit._cdf_tk.client._resource_base import (
4
4
  BaseModelObject,
5
- RequestUpdateable,
6
5
  ResponseResource,
6
+ UpdatableRequestResource,
7
7
  )
8
8
 
9
9
  from .identifiers import ExternalId, InternalOrExternalId
@@ -29,7 +29,7 @@ class TimeSeries(BaseModelObject):
29
29
  return ExternalId(external_id=self.external_id)
30
30
 
31
31
 
32
- class TimeSeriesRequest(TimeSeries, RequestUpdateable):
32
+ class TimeSeriesRequest(TimeSeries, UpdatableRequestResource):
33
33
  container_fields: ClassVar[frozenset[str]] = frozenset({"metadata", "security_categories"})
34
34
  non_nullable_fields: ClassVar[frozenset[str]] = frozenset({"is_step"})
35
35
 
@@ -2,10 +2,10 @@ from typing import Annotated, ClassVar, Literal
2
2
 
3
3
  from pydantic import Field, JsonValue
4
4
 
5
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
5
+ from cognite_toolkit._cdf_tk.client._resource_base import (
6
6
  BaseModelObject,
7
- RequestUpdateable,
8
7
  ResponseResource,
8
+ UpdatableRequestResource,
9
9
  )
10
10
 
11
11
  from .identifiers import ExternalId
@@ -106,7 +106,7 @@ class Transformation(BaseModelObject):
106
106
  return ExternalId(external_id=self.external_id)
107
107
 
108
108
 
109
- class TransformationRequest(Transformation, RequestUpdateable):
109
+ class TransformationRequest(Transformation, UpdatableRequestResource):
110
110
  container_fields: ClassVar[frozenset[str]] = frozenset({"tags"})
111
111
  non_nullable_fields: ClassVar[frozenset[str]] = frozenset({"is_public", "query", "destination"})
112
112
  query: str | None = None
@@ -1,4 +1,4 @@
1
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
1
+ from cognite_toolkit._cdf_tk.client._resource_base import (
2
2
  BaseModelObject,
3
3
  RequestResource,
4
4
  ResponseResource,
@@ -2,7 +2,7 @@ from typing import Annotated, Literal
2
2
 
3
3
  from pydantic import Field, JsonValue
4
4
 
5
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
5
+ from cognite_toolkit._cdf_tk.client._resource_base import (
6
6
  BaseModelObject,
7
7
  RequestResource,
8
8
  ResponseResource,
@@ -3,7 +3,7 @@ from typing import Annotated, Any, Literal
3
3
  from pydantic import Field, JsonValue, field_validator
4
4
  from pydantic_core.core_schema import ValidationInfo
5
5
 
6
- from cognite_toolkit._cdf_tk.client.resource_classes.base import (
6
+ from cognite_toolkit._cdf_tk.client._resource_base import (
7
7
  BaseModelObject,
8
8
  RequestResource,
9
9
  ResponseResource,
@@ -1,13 +1,19 @@
1
- from collections.abc import Callable, Iterable, Sequence
2
- from enum import Enum
1
+ from collections.abc import Callable, Sequence
2
+ from dataclasses import dataclass
3
3
  from pathlib import Path
4
+ from typing import get_args
4
5
 
5
6
  from rich import print
6
7
  from rich.console import Console
7
8
  from rich.table import Table
8
9
 
9
10
  from cognite_toolkit._cdf_tk.client import ToolkitClient
10
- from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, HTTPMessage, ItemMessage, SuccessResponseItems
11
+ from cognite_toolkit._cdf_tk.client.http_client import (
12
+ FailedRequestItems,
13
+ FailedResponseItems,
14
+ HTTPClient,
15
+ SuccessResponseItems,
16
+ )
11
17
  from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
12
18
  from cognite_toolkit._cdf_tk.commands._migrate.creators import MigrationCreator
13
19
  from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import DataMapper
@@ -22,25 +28,30 @@ from cognite_toolkit._cdf_tk.exceptions import (
22
28
  )
23
29
  from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
24
30
  from cognite_toolkit._cdf_tk.storageio import T_Selector, UploadableStorageIO, UploadItem
31
+ from cognite_toolkit._cdf_tk.storageio.logger import FileDataLogger, OperationStatus
25
32
  from cognite_toolkit._cdf_tk.utils import humanize_collection, safe_write, sanitize_filename
26
33
  from cognite_toolkit._cdf_tk.utils.file import yaml_safe_dump
27
- from cognite_toolkit._cdf_tk.utils.fileio import Chunk, CSVWriter, NDJsonWriter, SchemaColumn, Uncompressed
34
+ from cognite_toolkit._cdf_tk.utils.fileio import NDJsonWriter, Uncompressed
28
35
  from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
29
- from cognite_toolkit._cdf_tk.utils.progress_tracker import AVAILABLE_STATUS, ProgressTracker, Status
30
36
 
31
37
  from .data_model import INSTANCE_SOURCE_VIEW_ID, MODEL_ID, RESOURCE_VIEW_MAPPING_VIEW_ID
38
+ from .issues import WriteIssue
32
39
 
33
40
 
34
- class MigrationCommand(ToolkitCommand):
35
- class Steps(str, Enum):
36
- DOWNLOAD = "download"
37
- CONVERT = "convert"
38
- UPLOAD = "upload"
41
+ @dataclass
42
+ class OperationIssue:
43
+ message: str
44
+ count: int
45
+
39
46
 
40
- @classmethod
41
- def list(cls) -> list[str]:
42
- return [step.value for step in cls.__members__.values()]
47
+ @dataclass
48
+ class MigrationStatusResult:
49
+ status: OperationStatus
50
+ issues: list[OperationIssue]
51
+ count: int
43
52
 
53
+
54
+ class MigrationCommand(ToolkitCommand):
44
55
  def migrate(
45
56
  self,
46
57
  selected: T_Selector,
@@ -49,7 +60,7 @@ class MigrationCommand(ToolkitCommand):
49
60
  log_dir: Path,
50
61
  dry_run: bool = False,
51
62
  verbose: bool = False,
52
- ) -> ProgressTracker[str]:
63
+ ) -> list[MigrationStatusResult]:
53
64
  if log_dir.exists() and any(log_dir.iterdir()):
54
65
  raise ToolkitFileExistsError(
55
66
  f"Log directory {log_dir} already exists. Please remove it or choose another directory."
@@ -65,15 +76,18 @@ class MigrationCommand(ToolkitCommand):
65
76
  self.validate_available_capacity(data.client, total_items)
66
77
 
67
78
  console = Console()
68
- tracker = ProgressTracker[str](self.Steps.list())
69
79
  with (
70
80
  NDJsonWriter(log_dir, kind=f"{selected.kind}MigrationIssues", compression=Uncompressed) as log_file,
71
81
  HTTPClient(config=data.client.config) as write_client,
72
82
  ):
83
+ logger = FileDataLogger(log_file)
84
+ data.logger = logger
85
+ mapper.logger = logger
86
+
73
87
  executor = ProducerWorkerExecutor[Sequence[T_ResourceResponse], Sequence[UploadItem[T_ResourceRequest]]](
74
- download_iterable=self._download_iterable(selected, data, tracker),
75
- process=self._convert(mapper, data, tracker, log_file),
76
- write=self._upload(selected, write_client, data, tracker, log_file, dry_run),
88
+ download_iterable=(page.items for page in data.stream_data(selected)),
89
+ process=self._convert(mapper, data),
90
+ write=self._upload(selected, write_client, data, dry_run),
77
91
  iteration_count=iteration_count,
78
92
  max_queue_size=10,
79
93
  download_description=f"Downloading {selected.display_name}",
@@ -86,91 +100,71 @@ class MigrationCommand(ToolkitCommand):
86
100
  executor.run()
87
101
  total = executor.total_items
88
102
 
89
- self._print_table(tracker.aggregate(), console)
90
- self._print_csv(tracker, log_dir, f"{selected.kind}Items", console)
103
+ results = self._create_status_summary(logger)
104
+
105
+ self._print_rich_tables(results, console)
106
+ self._print_txt(results, log_dir, f"{selected.kind}Items", console)
91
107
  executor.raise_on_error()
92
108
  action = "Would migrate" if dry_run else "Migrating"
93
109
  console.print(f"{action} {total:,} {selected.display_name} to instances.")
94
- return tracker
95
110
 
96
- def _print_table(self, results: dict[tuple[str, Status], int], console: Console) -> None:
97
- for step in self.Steps:
98
- # We treat pending as failed for summary purposes
99
- results[(step.value, "failed")] = results.get((step.value, "failed"), 0) + results.get(
100
- (step.value, "pending"), 0
111
+ return results
112
+
113
+ # Todo: Move to the logger module
114
+ @classmethod
115
+ def _create_status_summary(cls, logger: FileDataLogger) -> list[MigrationStatusResult]:
116
+ results: list[MigrationStatusResult] = []
117
+ status_counts = logger.tracker.get_status_counts()
118
+ for status in get_args(OperationStatus):
119
+ issue_counts = logger.tracker.get_issue_counts(status)
120
+ issues = [OperationIssue(message=issue, count=count) for issue, count in issue_counts.items()]
121
+ result = MigrationStatusResult(
122
+ status=status,
123
+ issues=issues,
124
+ count=status_counts.get(status, 0),
101
125
  )
126
+ results.append(result)
127
+ return results
102
128
 
129
+ def _print_rich_tables(self, results: list[MigrationStatusResult], console: Console) -> None:
103
130
  table = Table(title="Migration Summary", show_lines=True)
104
- table.add_column("Status", style="cyan", no_wrap=True)
105
- for step in self.Steps:
106
- table.add_column(step.value.capitalize(), style="magenta")
107
- for status in AVAILABLE_STATUS:
108
- if status == "pending":
109
- # Skip pending as we treat it as failed
110
- continue
111
- row = [status]
112
- for step in self.Steps:
113
- row.append(str(results.get((step.value, status), 0)))
114
- table.add_row(*row)
115
-
131
+ table.add_column("Status", style="bold")
132
+ table.add_column("Count", justify="right", style="bold")
133
+ table.add_column("Issues", style="bold")
134
+ for result in results:
135
+ issues_str = "\n".join(f"{issue.message}: {issue.count}" for issue in result.issues) or ""
136
+ table.add_row(result.status, str(result.count), issues_str)
116
137
  console.print(table)
117
138
 
118
- def _print_csv(self, tracker: ProgressTracker[str], log_dir: Path, kind: str, console: Console) -> None:
119
- with CSVWriter(log_dir, kind=kind, compression=Uncompressed, columns=self._csv_columns()) as csv_file:
120
- batch: list[Chunk] = []
121
- steps = self.Steps.list()
122
- for item_id, progress in tracker.result().items():
123
- batch.append({"ID": str(item_id), **{step: progress[step] for step in steps}})
124
- if len(batch) >= 1000:
125
- csv_file.write_chunks(batch)
126
- batch = []
127
- if batch:
128
- csv_file.write_chunks(batch)
129
- console.print(f"Migration items written to {log_dir}")
130
-
131
- @classmethod
132
- def _csv_columns(cls) -> list[SchemaColumn]:
133
- return [
134
- SchemaColumn(name="ID", type="string"),
135
- *(SchemaColumn(name=step, type="string") for step in cls.Steps.list()),
136
- ]
137
-
138
- def _download_iterable(
139
- self,
140
- selected: T_Selector,
141
- data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
142
- tracker: ProgressTracker[str],
143
- ) -> Iterable[Sequence[T_ResourceResponse]]:
144
- for page in data.stream_data(selected):
145
- for item in page.items:
146
- tracker.set_progress(data.as_id(item), self.Steps.DOWNLOAD, "success")
147
- yield page.items
139
+ def _print_txt(self, results: list[MigrationStatusResult], log_dir: Path, kind: str, console: Console) -> None:
140
+ summary_file = log_dir / f"{kind}_migration_summary.txt"
141
+ with summary_file.open("w", encoding="utf-8") as f:
142
+ f.write("Migration Summary\n")
143
+ f.write("=================\n\n")
144
+ for result in results:
145
+ f.write(f"Status: {result.status}\n")
146
+ f.write(f"Count: {result.count}\n")
147
+ f.write("Issues:\n")
148
+ if result.issues:
149
+ for issue in result.issues:
150
+ f.write(f" - {issue.message}: {issue.count}\n")
151
+ else:
152
+ f.write(" None\n")
153
+ f.write("\n")
154
+ console.print(f"Summary written to {log_dir}")
148
155
 
156
+ @staticmethod
149
157
  def _convert(
150
- self,
151
158
  mapper: DataMapper[T_Selector, T_ResourceResponse, T_ResourceRequest],
152
159
  data: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
153
- tracker: ProgressTracker[str],
154
- log_file: NDJsonWriter,
155
160
  ) -> Callable[[Sequence[T_ResourceResponse]], Sequence[UploadItem[T_ResourceRequest]]]:
156
161
  def track_mapping(source: Sequence[T_ResourceResponse]) -> list[UploadItem[T_ResourceRequest]]:
157
162
  mapped = mapper.map(source)
158
- issues: list[Chunk] = []
159
- targets: list[UploadItem[T_ResourceRequest]] = []
160
-
161
- for (target, issue), item in zip(mapped, source):
162
- id_ = data.as_id(item)
163
- result: Status = "failed" if target is None else "success"
164
- tracker.set_progress(id_, step=self.Steps.CONVERT, status=result)
165
-
166
- if issue.has_issues:
167
- # MyPy fails to understand that dict[str, JsonVal] is a Chunk
168
- issues.append(issue.dump()) # type: ignore[arg-type]
169
- if target is not None:
170
- targets.append(UploadItem(source_id=id_, item=target))
171
- if issues:
172
- log_file.write_chunks(issues)
173
- return targets
163
+ return [
164
+ UploadItem(source_id=data.as_id(item), item=target)
165
+ for target, item in zip(mapped, source)
166
+ if target is not None
167
+ ]
174
168
 
175
169
  return track_mapping
176
170
 
@@ -179,36 +173,37 @@ class MigrationCommand(ToolkitCommand):
179
173
  selected: T_Selector,
180
174
  write_client: HTTPClient,
181
175
  target: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
182
- tracker: ProgressTracker[str],
183
- log_file: NDJsonWriter,
184
176
  dry_run: bool,
185
177
  ) -> Callable[[Sequence[UploadItem[T_ResourceRequest]]], None]:
186
178
  def upload_items(data_item: Sequence[UploadItem[T_ResourceRequest]]) -> None:
187
179
  if not data_item:
188
180
  return None
189
- responses: Sequence[HTTPMessage]
190
181
  if dry_run:
191
- responses = [
192
- SuccessResponseItems(
193
- status_code=200, body="", content=b"", ids=[item.source_id for item in data_item]
194
- )
195
- ]
196
- else:
197
- responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
198
-
199
- issues: list[Chunk] = []
182
+ target.logger.tracker.finalize_item([item.source_id for item in data_item], "pending")
183
+ return None
184
+
185
+ responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
186
+
187
+ # Todo: Move logging into the UploadableStorageIO class
188
+ issues: list[WriteIssue] = []
200
189
  for item in responses:
201
190
  if isinstance(item, SuccessResponseItems):
202
- for success_id in item.ids:
203
- tracker.set_progress(success_id, step=self.Steps.UPLOAD, status="success")
204
- elif isinstance(item, ItemMessage):
205
- for failed_id in item.ids:
206
- tracker.set_progress(failed_id, step=self.Steps.UPLOAD, status="failed")
207
-
208
- if not isinstance(item, SuccessResponseItems):
209
- issues.append(item.dump()) # type: ignore[arg-type]
191
+ target.logger.tracker.finalize_item(item.ids, "success")
192
+ continue
193
+ if isinstance(item, FailedResponseItems):
194
+ error = item.error
195
+ for id_ in item.ids:
196
+ issue = WriteIssue(id=str(id_), status_code=error.code, message=error.message)
197
+ issues.append(issue)
198
+ elif isinstance(item, FailedRequestItems):
199
+ for id_ in item.ids:
200
+ issue = WriteIssue(id=str(id_), status_code=0, message=item.error)
201
+ issues.append(issue)
202
+
203
+ if isinstance(item, FailedResponseItems | FailedRequestItems):
204
+ target.logger.tracker.finalize_item(item.ids, "failure")
210
205
  if issues:
211
- log_file.write_chunks(issues)
206
+ target.logger.log(issues)
212
207
  return None
213
208
 
214
209
  return upload_items
@@ -17,6 +17,7 @@ from cognite.client.utils._identifier import InstanceId
17
17
 
18
18
  from cognite_toolkit._cdf_tk.client import ToolkitClient
19
19
  from cognite_toolkit._cdf_tk.client.resource_classes.asset import AssetResponse
20
+ from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
20
21
  from cognite_toolkit._cdf_tk.client.resource_classes.event import EventResponse
21
22
  from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataResponse
22
23
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import (
@@ -198,7 +199,11 @@ def asset_centric_to_dm(
198
199
  data_set_id = dumped.pop("dataSetId", None)
199
200
  external_id = dumped.pop("externalId", None)
200
201
 
201
- issue = ConversionIssue(asset_centric_id=AssetCentricId(resource_type, id_=id_), instance_id=instance_id)
202
+ issue = ConversionIssue(
203
+ id=str(AssetCentricId(resource_type, id_=id_)),
204
+ asset_centric_id=AssetCentricId(resource_type, id_=id_),
205
+ instance_id=NodeReference(space=instance_id.space, external_id=instance_id.external_id),
206
+ )
202
207
 
203
208
  properties = create_properties(
204
209
  dumped,
@@ -11,7 +11,7 @@ from cognite.client.utils._identifier import InstanceId
11
11
  from cognite.client.utils._text import to_camel_case
12
12
  from pydantic import BaseModel, BeforeValidator, Field, field_validator, model_validator
13
13
 
14
- from cognite_toolkit._cdf_tk.client.resource_classes.base import BaseModelObject, RequestResource
14
+ from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource
15
15
  from cognite_toolkit._cdf_tk.client.resource_classes.identifiers import InternalId
16
16
  from cognite_toolkit._cdf_tk.client.resource_classes.instance_api import InstanceIdentifier
17
17
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.instances import InstanceApplyList