cognite-toolkit 0.7.48__py3-none-any.whl → 0.7.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. cognite_toolkit/_cdf_tk/builders/_function.py +81 -9
  2. cognite_toolkit/_cdf_tk/client/{resource_classes/base.py → _resource_base.py} +24 -9
  3. cognite_toolkit/_cdf_tk/client/cdf_client/api.py +23 -24
  4. cognite_toolkit/_cdf_tk/client/http_client/__init__.py +5 -3
  5. cognite_toolkit/_cdf_tk/client/http_client/_client.py +4 -2
  6. cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +1 -106
  7. cognite_toolkit/_cdf_tk/client/http_client/_item_classes.py +118 -0
  8. cognite_toolkit/_cdf_tk/client/resource_classes/agent.py +1 -1
  9. cognite_toolkit/_cdf_tk/client/resource_classes/annotation.py +2 -2
  10. cognite_toolkit/_cdf_tk/client/resource_classes/apm_config.py +1 -1
  11. cognite_toolkit/_cdf_tk/client/resource_classes/asset.py +2 -2
  12. cognite_toolkit/_cdf_tk/client/resource_classes/charts_data.py +1 -1
  13. cognite_toolkit/_cdf_tk/client/resource_classes/cognite_file.py +1 -1
  14. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_constraints.py +1 -1
  15. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_container.py +1 -1
  16. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_data_model.py +1 -1
  17. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_data_types.py +1 -1
  18. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_indexes.py +1 -1
  19. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_instance.py +1 -1
  20. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_space.py +1 -1
  21. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_view.py +1 -1
  22. cognite_toolkit/_cdf_tk/client/resource_classes/data_modeling/_view_property.py +1 -1
  23. cognite_toolkit/_cdf_tk/client/resource_classes/dataset.py +3 -3
  24. cognite_toolkit/_cdf_tk/client/resource_classes/event.py +3 -3
  25. cognite_toolkit/_cdf_tk/client/resource_classes/extraction_pipeline.py +3 -3
  26. cognite_toolkit/_cdf_tk/client/resource_classes/filemetadata.py +2 -2
  27. cognite_toolkit/_cdf_tk/client/resource_classes/function.py +1 -1
  28. cognite_toolkit/_cdf_tk/client/resource_classes/function_schedule.py +1 -1
  29. cognite_toolkit/_cdf_tk/client/resource_classes/graphql_data_model.py +1 -1
  30. cognite_toolkit/_cdf_tk/client/resource_classes/group/acls.py +1 -1
  31. cognite_toolkit/_cdf_tk/client/resource_classes/group/capability.py +1 -1
  32. cognite_toolkit/_cdf_tk/client/resource_classes/group/group.py +1 -1
  33. cognite_toolkit/_cdf_tk/client/resource_classes/group/scopes.py +1 -1
  34. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_destination.py +3 -3
  35. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_job.py +3 -3
  36. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_mapping.py +3 -3
  37. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_auth.py +1 -1
  38. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_base.py +3 -3
  39. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_certificate.py +1 -1
  40. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_eventhub.py +1 -1
  41. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_kafka.py +1 -1
  42. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_mqtt.py +1 -1
  43. cognite_toolkit/_cdf_tk/client/resource_classes/hosted_extractor_source/_rest.py +1 -1
  44. cognite_toolkit/_cdf_tk/client/resource_classes/identifiers.py +1 -1
  45. cognite_toolkit/_cdf_tk/client/resource_classes/infield.py +1 -1
  46. cognite_toolkit/_cdf_tk/client/resource_classes/instance_api.py +1 -1
  47. cognite_toolkit/_cdf_tk/client/resource_classes/label.py +1 -1
  48. cognite_toolkit/_cdf_tk/client/resource_classes/location_filter.py +1 -1
  49. cognite_toolkit/_cdf_tk/client/resource_classes/raw.py +1 -1
  50. cognite_toolkit/_cdf_tk/client/resource_classes/relationship.py +3 -3
  51. cognite_toolkit/_cdf_tk/client/resource_classes/resource_view_mapping.py +1 -1
  52. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_capability.py +3 -3
  53. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_common.py +1 -1
  54. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_data_post_processing.py +3 -3
  55. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_frame.py +3 -3
  56. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_location.py +2 -2
  57. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_map.py +3 -3
  58. cognite_toolkit/_cdf_tk/client/resource_classes/robotics/_robot.py +3 -3
  59. cognite_toolkit/_cdf_tk/client/resource_classes/search_config.py +1 -1
  60. cognite_toolkit/_cdf_tk/client/resource_classes/securitycategory.py +1 -1
  61. cognite_toolkit/_cdf_tk/client/resource_classes/sequence.py +3 -3
  62. cognite_toolkit/_cdf_tk/client/resource_classes/sequence_rows.py +1 -1
  63. cognite_toolkit/_cdf_tk/client/resource_classes/simulator_model.py +2 -2
  64. cognite_toolkit/_cdf_tk/client/resource_classes/streamlit_.py +3 -3
  65. cognite_toolkit/_cdf_tk/client/resource_classes/streams.py +1 -1
  66. cognite_toolkit/_cdf_tk/client/resource_classes/three_d.py +9 -2
  67. cognite_toolkit/_cdf_tk/client/resource_classes/timeseries.py +3 -3
  68. cognite_toolkit/_cdf_tk/client/resource_classes/transformation.py +3 -3
  69. cognite_toolkit/_cdf_tk/client/resource_classes/workflow.py +1 -1
  70. cognite_toolkit/_cdf_tk/client/resource_classes/workflow_trigger.py +1 -1
  71. cognite_toolkit/_cdf_tk/client/resource_classes/workflow_version.py +1 -1
  72. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +103 -108
  73. cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +6 -1
  74. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +1 -1
  75. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +115 -37
  76. cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +21 -38
  77. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +3 -5
  78. cognite_toolkit/_cdf_tk/commands/build_cmd.py +11 -1
  79. cognite_toolkit/_cdf_tk/data_classes/_tracking_info.py +4 -0
  80. cognite_toolkit/_cdf_tk/feature_flags.py +4 -0
  81. cognite_toolkit/_cdf_tk/storageio/logger.py +0 -1
  82. cognite_toolkit/_cdf_tk/tk_warnings/__init__.py +2 -0
  83. cognite_toolkit/_cdf_tk/tk_warnings/fileread.py +20 -0
  84. cognite_toolkit/_cdf_tk/utils/__init__.py +3 -0
  85. cognite_toolkit/_cdf_tk/utils/pip_validator.py +96 -0
  86. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  87. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  88. cognite_toolkit/_resources/cdf.toml +1 -1
  89. cognite_toolkit/_version.py +1 -1
  90. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/METADATA +1 -1
  91. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/RECORD +93 -91
  92. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/WHEEL +0 -0
  93. {cognite_toolkit-0.7.48.dist-info → cognite_toolkit-0.7.50.dist-info}/entry_points.txt +0 -0
@@ -46,13 +46,13 @@ from cognite_toolkit._cdf_tk.commands._migrate.issues import (
46
46
  CanvasMigrationIssue,
47
47
  ChartMigrationIssue,
48
48
  ConversionIssue,
49
- MigrationIssue,
50
49
  ThreeDModelMigrationIssue,
51
50
  )
52
51
  from cognite_toolkit._cdf_tk.constants import MISSING_INSTANCE_SPACE
53
52
  from cognite_toolkit._cdf_tk.exceptions import ToolkitMigrationError, ToolkitValueError
54
53
  from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
55
54
  from cognite_toolkit._cdf_tk.storageio._base import T_Selector
55
+ from cognite_toolkit._cdf_tk.storageio.logger import DataLogger, NoOpLogger
56
56
  from cognite_toolkit._cdf_tk.storageio.selectors import CanvasSelector, ChartSelector, ThreeDSelector
57
57
  from cognite_toolkit._cdf_tk.utils import humanize_collection
58
58
  from cognite_toolkit._cdf_tk.utils.useful_types2 import T_AssetCentricResourceExtended
@@ -62,6 +62,10 @@ from .selectors import AssetCentricMigrationSelector
62
62
 
63
63
 
64
64
  class DataMapper(Generic[T_Selector, T_ResourceResponse, T_ResourceRequest], ABC):
65
+ def __init__(self, client: ToolkitClient) -> None:
66
+ self.client = client
67
+ self.logger: DataLogger = NoOpLogger()
68
+
65
69
  def prepare(self, source_selector: T_Selector) -> None:
66
70
  """Prepare the data mapper with the given source selector.
67
71
 
@@ -73,14 +77,14 @@ class DataMapper(Generic[T_Selector, T_ResourceResponse, T_ResourceRequest], ABC
73
77
  pass
74
78
 
75
79
  @abstractmethod
76
- def map(self, source: Sequence[T_ResourceResponse]) -> Sequence[tuple[T_ResourceRequest | None, MigrationIssue]]:
80
+ def map(self, source: Sequence[T_ResourceResponse]) -> Sequence[T_ResourceRequest | None]:
77
81
  """Map a chunk of source data to the target format.
78
82
 
79
83
  Args:
80
84
  source: The source data chunk to be mapped.
81
85
 
82
86
  Returns:
83
- A tuple containing the mapped data and a list of any issues encountered during mapping.
87
+ A sequence of mapped target data.
84
88
 
85
89
  """
86
90
  raise NotImplementedError("Subclasses must implement this method.")
@@ -90,7 +94,7 @@ class AssetCentricMapper(
90
94
  DataMapper[AssetCentricMigrationSelector, AssetCentricMapping[T_AssetCentricResourceExtended], InstanceApply]
91
95
  ):
92
96
  def __init__(self, client: ToolkitClient) -> None:
93
- self.client = client
97
+ super().__init__(client)
94
98
  self._ingestion_view_by_id: dict[ViewId, View] = {}
95
99
  self._view_mapping_by_id: dict[str, ResourceViewMappingApply] = {}
96
100
  self._direct_relation_cache = DirectRelationCache(client)
@@ -118,14 +122,37 @@ class AssetCentricMapper(
118
122
 
119
123
  def map(
120
124
  self, source: Sequence[AssetCentricMapping[T_AssetCentricResourceExtended]]
121
- ) -> Sequence[tuple[InstanceApply | None, ConversionIssue]]:
125
+ ) -> Sequence[InstanceApply | None]:
122
126
  """Map a chunk of asset-centric data to InstanceApplyList format."""
123
127
  # We update the direct relation cache in bulk for all resources in the chunk.
124
128
  self._direct_relation_cache.update(item.resource for item in source)
125
- output: list[tuple[InstanceApply | None, ConversionIssue]] = []
129
+ output: list[InstanceApply | None] = []
130
+ issues: list[ConversionIssue] = []
126
131
  for item in source:
127
132
  instance, conversion_issue = self._map_single_item(item)
128
- output.append((instance, conversion_issue))
133
+ identifier = str(item.mapping.as_asset_centric_id())
134
+
135
+ if conversion_issue.missing_instance_space:
136
+ self.logger.tracker.add_issue(identifier, "Missing instance space")
137
+ if conversion_issue.failed_conversions:
138
+ self.logger.tracker.add_issue(identifier, "Failed conversions")
139
+ if conversion_issue.invalid_instance_property_types:
140
+ self.logger.tracker.add_issue(identifier, "Invalid instance property types")
141
+ if conversion_issue.missing_asset_centric_properties:
142
+ self.logger.tracker.add_issue(identifier, "Missing asset-centric properties")
143
+ if conversion_issue.missing_instance_properties:
144
+ self.logger.tracker.add_issue(identifier, "Missing instance properties")
145
+ if conversion_issue.ignored_asset_centric_properties:
146
+ self.logger.tracker.add_issue(identifier, "Ignored asset-centric properties")
147
+
148
+ if conversion_issue.has_issues:
149
+ issues.append(conversion_issue)
150
+
151
+ if instance is None:
152
+ self.logger.tracker.finalize_item(identifier, "failure")
153
+ output.append(instance)
154
+ if issues:
155
+ self.logger.log(issues)
129
156
  return output
130
157
 
131
158
  def _map_single_item(
@@ -154,15 +181,29 @@ class AssetCentricMapper(
154
181
 
155
182
 
156
183
  class ChartMapper(DataMapper[ChartSelector, Chart, ChartWrite]):
157
- def __init__(self, client: ToolkitClient) -> None:
158
- self.client = client
159
-
160
- def map(self, source: Sequence[Chart]) -> Sequence[tuple[ChartWrite | None, MigrationIssue]]:
184
+ def map(self, source: Sequence[Chart]) -> Sequence[ChartWrite | None]:
161
185
  self._populate_cache(source)
162
- output: list[tuple[ChartWrite | None, MigrationIssue]] = []
186
+ output: list[ChartWrite | None] = []
187
+ issues: list[ChartMigrationIssue] = []
163
188
  for item in source:
164
189
  mapped_item, issue = self._map_single_item(item)
165
- output.append((mapped_item, issue))
190
+ identifier = item.external_id
191
+
192
+ if issue.missing_timeseries_ids:
193
+ self.logger.tracker.add_issue(identifier, "Missing timeseries IDs")
194
+ if issue.missing_timeseries_external_ids:
195
+ self.logger.tracker.add_issue(identifier, "Missing timeseries external IDs")
196
+ if issue.missing_timeseries_identifier:
197
+ self.logger.tracker.add_issue(identifier, "Missing timeseries identifier")
198
+
199
+ if issue.has_issues:
200
+ issues.append(issue)
201
+
202
+ if mapped_item is None:
203
+ self.logger.tracker.finalize_item(identifier, "failure")
204
+ output.append(mapped_item)
205
+ if issues:
206
+ self.logger.log(issues)
166
207
  return output
167
208
 
168
209
  def _populate_cache(self, source: Sequence[Chart]) -> None:
@@ -184,7 +225,7 @@ class ChartMapper(DataMapper[ChartSelector, Chart, ChartWrite]):
184
225
  self.client.migration.lookup.time_series(external_id=list(timeseries_external_ids))
185
226
 
186
227
  def _map_single_item(self, item: Chart) -> tuple[ChartWrite | None, ChartMigrationIssue]:
187
- issue = ChartMigrationIssue(chart_external_id=item.external_id)
228
+ issue = ChartMigrationIssue(chart_external_id=item.external_id, id=item.external_id)
188
229
  time_series_collection = item.data.time_series_collection or []
189
230
  timeseries_core_collection = self._create_timeseries_core_collection(time_series_collection, issue)
190
231
  if issue.has_issues:
@@ -280,16 +321,30 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
280
321
  DEFAULT_TIMESERIES_VIEW = ViewId("cdf_cdm", "CogniteTimeSeries", "v1")
281
322
 
282
323
  def __init__(self, client: ToolkitClient, dry_run: bool, skip_on_missing_ref: bool = False) -> None:
283
- self.client = client
324
+ super().__init__(client)
284
325
  self.dry_run = dry_run
285
326
  self.skip_on_missing_ref = skip_on_missing_ref
286
327
 
287
- def map(self, source: Sequence[IndustrialCanvas]) -> Sequence[tuple[IndustrialCanvasApply | None, MigrationIssue]]:
328
+ def map(self, source: Sequence[IndustrialCanvas]) -> Sequence[IndustrialCanvasApply | None]:
288
329
  self._populate_cache(source)
289
- output: list[tuple[IndustrialCanvasApply | None, MigrationIssue]] = []
330
+ output: list[IndustrialCanvasApply | None] = []
331
+ issues: list[CanvasMigrationIssue] = []
290
332
  for item in source:
291
333
  mapped_item, issue = self._map_single_item(item)
292
- output.append((mapped_item, issue))
334
+ identifier = item.as_id()
335
+
336
+ if issue.missing_reference_ids:
337
+ self.logger.tracker.add_issue(identifier, "Missing reference IDs")
338
+
339
+ if issue.has_issues:
340
+ issues.append(issue)
341
+
342
+ if mapped_item is None:
343
+ self.logger.tracker.finalize_item(identifier, "failure")
344
+
345
+ output.append(mapped_item)
346
+ if issues:
347
+ self.logger.log(issues)
293
348
  return output
294
349
 
295
350
  @property
@@ -340,7 +395,9 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
340
395
 
341
396
  def _map_single_item(self, canvas: IndustrialCanvas) -> tuple[IndustrialCanvasApply | None, CanvasMigrationIssue]:
342
397
  update = canvas.as_write()
343
- issue = CanvasMigrationIssue(canvas_external_id=canvas.canvas.external_id, canvas_name=canvas.canvas.name)
398
+ issue = CanvasMigrationIssue(
399
+ canvas_external_id=canvas.canvas.external_id, canvas_name=canvas.canvas.name, id=canvas.canvas.name
400
+ )
344
401
 
345
402
  remaining_container_references: list[ContainerReferenceApply] = []
346
403
  new_fdm_references: list[FdmInstanceContainerReferenceApply] = []
@@ -399,17 +456,26 @@ class CanvasMapper(DataMapper[CanvasSelector, IndustrialCanvas, IndustrialCanvas
399
456
 
400
457
 
401
458
  class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrationRequest]):
402
- def __init__(self, client: ToolkitClient) -> None:
403
- self.client = client
404
-
405
- def map(
406
- self, source: Sequence[ThreeDModelResponse]
407
- ) -> Sequence[tuple[ThreeDMigrationRequest | None, MigrationIssue]]:
459
+ def map(self, source: Sequence[ThreeDModelResponse]) -> Sequence[ThreeDMigrationRequest | None]:
408
460
  self._populate_cache(source)
409
- output: list[tuple[ThreeDMigrationRequest | None, MigrationIssue]] = []
461
+ output: list[ThreeDMigrationRequest | None] = []
462
+ issues: list[ThreeDModelMigrationIssue] = []
410
463
  for item in source:
411
464
  mapped_item, issue = self._map_single_item(item)
412
- output.append((mapped_item, issue))
465
+ identifier = item.name
466
+
467
+ if issue.error_message:
468
+ for error in issue.error_message:
469
+ self.logger.tracker.add_issue(identifier, error)
470
+
471
+ if issue.has_issues:
472
+ issues.append(issue)
473
+
474
+ if mapped_item is None:
475
+ self.logger.tracker.finalize_item(identifier, "failure")
476
+ output.append(mapped_item)
477
+ if issues:
478
+ self.logger.log(issues)
413
479
  return output
414
480
 
415
481
  def _populate_cache(self, source: Sequence[ThreeDModelResponse]) -> None:
@@ -422,7 +488,7 @@ class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrati
422
488
  def _map_single_item(
423
489
  self, item: ThreeDModelResponse
424
490
  ) -> tuple[ThreeDMigrationRequest | None, ThreeDModelMigrationIssue]:
425
- issue = ThreeDModelMigrationIssue(model_name=item.name, model_id=item.id)
491
+ issue = ThreeDModelMigrationIssue(model_name=item.name, model_id=item.id, id=item.name)
426
492
  instance_space: str | None = None
427
493
  last_revision_id: int | None = None
428
494
  model_type: Literal["CAD", "PointCloud", "Image360"] | None = None
@@ -478,17 +544,27 @@ class ThreeDMapper(DataMapper[ThreeDSelector, ThreeDModelResponse, ThreeDMigrati
478
544
 
479
545
 
480
546
  class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingClassicResponse, AssetMappingDMRequest]):
481
- def __init__(self, client: ToolkitClient) -> None:
482
- self.client = client
483
-
484
- def map(
485
- self, source: Sequence[AssetMappingClassicResponse]
486
- ) -> Sequence[tuple[AssetMappingDMRequest | None, MigrationIssue]]:
487
- output: list[tuple[AssetMappingDMRequest | None, MigrationIssue]] = []
547
+ def map(self, source: Sequence[AssetMappingClassicResponse]) -> Sequence[AssetMappingDMRequest | None]:
548
+ output: list[AssetMappingDMRequest | None] = []
549
+ issues: list[ThreeDModelMigrationIssue] = []
488
550
  self._populate_cache(source)
489
551
  for item in source:
490
552
  mapped_item, issue = self._map_single_item(item)
491
- output.append((mapped_item, issue))
553
+ identifier = f"AssetMapping_{item.model_id!s}_{item.revision_id!s}_{item.asset_id!s}"
554
+
555
+ if issue.error_message:
556
+ for error in issue.error_message:
557
+ self.logger.tracker.add_issue(identifier, error)
558
+
559
+ if issue.has_issues:
560
+ issues.append(issue)
561
+
562
+ if mapped_item is None:
563
+ self.logger.tracker.finalize_item(identifier, "failure")
564
+
565
+ output.append(mapped_item)
566
+ if issues:
567
+ self.logger.log(issues)
492
568
  return output
493
569
 
494
570
  def _populate_cache(self, source: Sequence[AssetMappingClassicResponse]) -> None:
@@ -501,7 +577,9 @@ class ThreeDAssetMapper(DataMapper[ThreeDSelector, AssetMappingClassicResponse,
501
577
  def _map_single_item(
502
578
  self, item: AssetMappingClassicResponse
503
579
  ) -> tuple[AssetMappingDMRequest | None, ThreeDModelMigrationIssue]:
504
- issue = ThreeDModelMigrationIssue(model_name=f"AssetMapping_{item.model_id}", model_id=item.model_id)
580
+ issue = ThreeDModelMigrationIssue(
581
+ model_name=f"AssetMapping_{item.model_id}", model_id=item.model_id, id=f"AssetMapping_{item.model_id}"
582
+ )
505
583
  asset_instance_id = item.asset_instance_id
506
584
  if item.asset_id and asset_instance_id is None:
507
585
  asset_node_id = self.client.migration.lookup.assets(item.asset_id)
@@ -1,34 +1,27 @@
1
- import json
2
- from typing import Any, ClassVar
1
+ from typing import Any, Literal
3
2
 
4
- from cognite.client.data_classes.data_modeling import NodeId
5
- from cognite.client.utils._identifier import InstanceId
6
- from cognite.client.utils._text import to_camel_case
7
3
  from pydantic import BaseModel, Field, field_serializer
4
+ from pydantic.alias_generators import to_camel
8
5
 
6
+ from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
9
7
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.migration import AssetCentricId
10
- from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
8
+ from cognite_toolkit._cdf_tk.storageio.logger import LogEntry
11
9
 
12
10
 
13
- class MigrationObject(BaseModel, alias_generator=to_camel_case, extra="ignore", populate_by_name=True): ...
14
-
15
-
16
- class MigrationIssue(MigrationObject):
11
+ class MigrationIssue(LogEntry):
17
12
  """Represents an issue encountered during migration."""
18
13
 
19
- type: ClassVar[str]
20
-
21
- def dump(self) -> dict[str, JsonVal]:
22
- # Dump json to ensure it is serializable
23
- dumped = json.loads(self.model_dump_json(by_alias=True))
24
- dumped["type"] = self.type
25
- return dumped
14
+ type: str
26
15
 
27
16
  @property
28
17
  def has_issues(self) -> bool:
29
18
  """Check if there are any issues recorded in this MigrationIssue."""
30
19
  return True
31
20
 
21
+ def dump(self) -> dict[str, Any]:
22
+ """Serialize the MigrationIssue to a dictionary."""
23
+ return self.model_dump(by_alias=True)
24
+
32
25
 
33
26
  class ThreeDModelMigrationIssue(MigrationIssue):
34
27
  """Represents a 3D model migration issue encountered during migration.
@@ -37,7 +30,7 @@ class ThreeDModelMigrationIssue(MigrationIssue):
37
30
  model_external_id (str): The external ID of the 3D model that could not be migrated.
38
31
  """
39
32
 
40
- type: ClassVar[str] = "threeDModelMigration"
33
+ type: Literal["threeDModelMigration"] = "threeDModelMigration"
41
34
  model_name: str
42
35
  model_id: int
43
36
  error_message: list[str] = Field(default_factory=list)
@@ -55,7 +48,7 @@ class ChartMigrationIssue(MigrationIssue):
55
48
  chart_external_id (str): The external ID of the chart that could not be migrated.
56
49
  """
57
50
 
58
- type: ClassVar[str] = "chartMigration"
51
+ type: Literal["chartMigration"] = "chartMigration"
59
52
  chart_external_id: str
60
53
  missing_timeseries_ids: list[int] = Field(default_factory=list)
61
54
  missing_timeseries_external_ids: list[str] = Field(default_factory=list)
@@ -70,7 +63,7 @@ class ChartMigrationIssue(MigrationIssue):
70
63
 
71
64
 
72
65
  class CanvasMigrationIssue(MigrationIssue):
73
- type: ClassVar[str] = "canvasMigration"
66
+ type: Literal["canvasMigration"] = "canvasMigration"
74
67
  canvas_external_id: str
75
68
  canvas_name: str
76
69
  missing_reference_ids: list[AssetCentricId] = Field(default_factory=list)
@@ -84,7 +77,7 @@ class CanvasMigrationIssue(MigrationIssue):
84
77
  class ReadIssue(MigrationIssue):
85
78
  """Represents a read issue encountered during migration."""
86
79
 
87
- type: ClassVar[str] = "read"
80
+ ...
88
81
 
89
82
 
90
83
  class ReadFileIssue(ReadIssue):
@@ -95,7 +88,7 @@ class ReadFileIssue(ReadIssue):
95
88
  error (str | None): An optional error message providing additional details about the read issue.
96
89
  """
97
90
 
98
- type: ClassVar[str] = "fileRead"
91
+ type: Literal["fileRead"] = "fileRead"
99
92
 
100
93
  row_no: int
101
94
  error: str | None = None
@@ -109,7 +102,7 @@ class ReadAPIIssue(ReadIssue):
109
102
  error (str | None): An optional error message providing additional details about the read issue.
110
103
  """
111
104
 
112
- type: ClassVar[str] = "apiRead"
105
+ type: Literal["apiRead"] = "apiRead"
113
106
  asset_centric_id: AssetCentricId
114
107
  error: str | None = None
115
108
 
@@ -121,7 +114,7 @@ class ReadAPIIssue(ReadIssue):
121
114
  }
122
115
 
123
116
 
124
- class FailedConversion(MigrationObject):
117
+ class FailedConversion(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_name=True):
125
118
  """Represents a property that failed to convert during migration.
126
119
 
127
120
  Attributes:
@@ -135,7 +128,7 @@ class FailedConversion(MigrationObject):
135
128
  error: str
136
129
 
137
130
 
138
- class InvalidPropertyDataType(MigrationObject):
131
+ class InvalidPropertyDataType(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_name=True):
139
132
  """Represents a property with an invalid type during migration.
140
133
 
141
134
  Attributes:
@@ -160,9 +153,9 @@ class ConversionIssue(MigrationIssue):
160
153
  failed_conversions (list[FailedConversion]): List of properties that failed to convert with reasons.
161
154
  """
162
155
 
163
- type: ClassVar[str] = "conversion"
156
+ type: Literal["conversion"] = "conversion"
164
157
  asset_centric_id: AssetCentricId
165
- instance_id: InstanceId
158
+ instance_id: NodeReference
166
159
  missing_asset_centric_properties: list[str] = Field(default_factory=list)
167
160
  missing_instance_properties: list[str] = Field(default_factory=list)
168
161
  invalid_instance_property_types: list[InvalidPropertyDataType] = Field(default_factory=list)
@@ -181,10 +174,6 @@ class ConversionIssue(MigrationIssue):
181
174
  or self.missing_instance_space
182
175
  )
183
176
 
184
- @field_serializer("instance_id")
185
- def serialize_instance_id(self, instance_id: NodeId) -> dict[str, str]:
186
- return instance_id.dump(include_instance_type=True)
187
-
188
177
  @field_serializer("asset_centric_id")
189
178
  def serialize_asset_centric_id(self, asset_centric_id: AssetCentricId) -> dict[str, Any]:
190
179
  return {
@@ -197,16 +186,10 @@ class WriteIssue(MigrationIssue):
197
186
  """Represents a write issue encountered during migration.
198
187
 
199
188
  Attributes:
200
- instance_id (InstanceId): The InstanceId of the data model instance that could not be written.
201
189
  status_code (int): The HTTP status code returned during the write operation.
202
190
  message (str | None): An optional message providing additional details about the write issue.
203
191
  """
204
192
 
205
- type: ClassVar[str] = "write"
206
- instance_id: InstanceId
193
+ type: Literal["write"] = "write"
207
194
  status_code: int
208
195
  message: str | None = None
209
-
210
- @field_serializer("instance_id")
211
- def serialize_instance_id(self, instance_id: NodeId) -> dict[str, str]:
212
- return instance_id.dump(include_instance_type=True)
@@ -78,7 +78,7 @@ class AssetCentricMigrationIO(
78
78
  self.skip_linking = skip_linking
79
79
 
80
80
  def as_id(self, item: AssetCentricMapping) -> str:
81
- return f"{item.mapping.resource_type}_{item.mapping.id}"
81
+ return str(item.mapping.as_asset_centric_id())
82
82
 
83
83
  def stream_data(self, selector: AssetCentricMigrationSelector, limit: int | None = None) -> Iterator[Page]:
84
84
  if isinstance(selector, MigrationCSVFileSelector):
@@ -181,7 +181,7 @@ class AssetCentricMigrationIO(
181
181
 
182
182
  pending_instance_id_endpoint = self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND[selector.kind]
183
183
  results: list[HTTPMessage] = []
184
- to_upload = self.link_asset_centric(data_chunk, http_client, results, pending_instance_id_endpoint)
184
+ to_upload = self.link_asset_centric(data_chunk, http_client, pending_instance_id_endpoint)
185
185
  if to_upload:
186
186
  results.extend(list(super().upload_items(to_upload, http_client, None)))
187
187
  return results
@@ -191,7 +191,6 @@ class AssetCentricMigrationIO(
191
191
  cls,
192
192
  data_chunk: Sequence[UploadItem[InstanceApply]],
193
193
  http_client: HTTPClient,
194
- results: list[HTTPMessage],
195
194
  pending_instance_id_endpoint: str,
196
195
  ) -> Sequence[UploadItem[InstanceApply]]:
197
196
  """Links asset-centric resources to their (uncreated) instances using the pending-instance-ids endpoint."""
@@ -212,7 +211,6 @@ class AssetCentricMigrationIO(
212
211
  for res in batch_results:
213
212
  if isinstance(res, SuccessResponseItems):
214
213
  successful_linked.update(res.ids)
215
- results.extend(batch_results)
216
214
  to_upload = [item for item in data_chunk if item.source_id in successful_linked]
217
215
  return to_upload
218
216
 
@@ -393,7 +391,7 @@ class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse,
393
391
  self.data_model_type = data_model_type
394
392
 
395
393
  def as_id(self, item: ThreeDModelResponse) -> str:
396
- return f"{item.name}_{item.id!s}"
394
+ return item.name
397
395
 
398
396
  def _is_selected(self, item: ThreeDModelResponse, included_models: set[int] | None) -> bool:
399
397
  return self._is_correct_type(item) and (included_models is None or item.id in included_models)
@@ -11,7 +11,7 @@ from rich import print
11
11
  from rich.panel import Panel
12
12
  from rich.progress import track
13
13
 
14
- from cognite_toolkit._cdf_tk.builders import Builder, create_builder
14
+ from cognite_toolkit._cdf_tk.builders import Builder, FunctionBuilder, create_builder
15
15
  from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
16
16
  from cognite_toolkit._cdf_tk.client import ToolkitClient
17
17
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.raw import RawDatabase
@@ -33,6 +33,7 @@ from cognite_toolkit._cdf_tk.cruds import (
33
33
  DataSetsCRUD,
34
34
  ExtractionPipelineConfigCRUD,
35
35
  FileCRUD,
36
+ FunctionCRUD,
36
37
  LocationFilterCRUD,
37
38
  NodeCRUD,
38
39
  RawDatabaseCRUD,
@@ -415,6 +416,15 @@ class BuildCommand(ToolkitCommand):
415
416
 
416
417
  build_resources_by_folder[resource_name].extend(built_resources)
417
418
 
419
+ # Collect validation metrics from FunctionBuilder
420
+ if resource_name == FunctionCRUD.folder_name and isinstance(builder, FunctionBuilder):
421
+ self._additional_tracking_info.function_validation_count += builder.validation_count
422
+ self._additional_tracking_info.function_validation_failures += builder.validation_failures
423
+ self._additional_tracking_info.function_validation_credential_errors += (
424
+ builder.validation_credential_errors
425
+ )
426
+ self._additional_tracking_info.function_validation_time_ms += builder.validation_time_ms
427
+
418
428
  return build_resources_by_folder
419
429
 
420
430
  def _get_builder(self, build_dir: Path, resource_name: str) -> Builder:
@@ -32,6 +32,10 @@ class CommandTrackingInfo(BaseModel):
32
32
  downloaded_library_ids: set[str] = Field(default_factory=set, alias="downloadedLibraryIds")
33
33
  downloaded_package_ids: set[str] = Field(default_factory=set, alias="downloadedPackageIds")
34
34
  downloaded_module_ids: set[str] = Field(default_factory=set, alias="downloadedModuleIds")
35
+ function_validation_count: int = Field(default=0, alias="functionValidationCount")
36
+ function_validation_failures: int = Field(default=0, alias="functionValidationFailures")
37
+ function_validation_credential_errors: int = Field(default=0, alias="functionValidationCredentialErrors")
38
+ function_validation_time_ms: int = Field(default=0, alias="functionValidationTimeMs")
35
39
 
36
40
  def to_dict(self) -> dict[str, Any]:
37
41
  """Convert the tracking info to a dictionary for Mixpanel.
@@ -65,6 +65,10 @@ class Flags(Enum):
65
65
  visible=True,
66
66
  description="Enables the support for simulator model resources",
67
67
  )
68
+ FUNCTION_REQUIREMENTS_VALIDATION = FlagMetadata(
69
+ visible=True,
70
+ description="Enables validation of function requirements.txt during build using pip dry-run",
71
+ )
68
72
 
69
73
  def is_enabled(self) -> bool:
70
74
  return FeatureFlag.is_enabled(self)
@@ -14,7 +14,6 @@ class LogEntry(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_
14
14
  """Represents a log entry for tracking storage I/O operations."""
15
15
 
16
16
  id: str
17
- message: str
18
17
 
19
18
 
20
19
  OperationStatus: TypeAlias = Literal["success", "failure", "unchanged", "pending"]
@@ -12,6 +12,7 @@ from .fileread import (
12
12
  EnvironmentVariableMissingWarning,
13
13
  FileExistsWarning,
14
14
  FileReadWarning,
15
+ FunctionRequirementsValidationWarning,
15
16
  MissingFileWarning,
16
17
  MissingReferencedWarning,
17
18
  MissingRequiredParameterWarning,
@@ -51,6 +52,7 @@ __all__ = [
51
52
  "EnvironmentVariableMissingWarning",
52
53
  "FileExistsWarning",
53
54
  "FileReadWarning",
55
+ "FunctionRequirementsValidationWarning",
54
56
  "GeneralWarning",
55
57
  "HTTPWarning",
56
58
  "HighSeverityWarning",
@@ -270,6 +270,26 @@ class StreamlitRequirementsWarning(FileReadWarning):
270
270
  return f"Missing dependencies in requirements.txt: {', '.join(self.dependencies)}"
271
271
 
272
272
 
273
+ @dataclass(frozen=True)
274
+ class FunctionRequirementsValidationWarning(FileReadWarning):
275
+ severity: ClassVar[SeverityLevel] = SeverityLevel.HIGH
276
+ function_external_id: str
277
+ error_details: str
278
+ is_credential_error: bool
279
+
280
+ def get_message(self) -> str:
281
+ message = (
282
+ f"Function [bold]{self.function_external_id}[/bold] requirements.txt validation failed. "
283
+ f"Packages could not be resolved: {self.error_details}"
284
+ )
285
+ if self.is_credential_error:
286
+ message += (
287
+ f"\n{HINT_LEAD_TEXT}This appears to be a credential/authentication issue. "
288
+ "Check if the Personal Access Token (PAT) or credentials in indexUrl are valid and not expired."
289
+ )
290
+ return message
291
+
292
+
273
293
  @dataclass(frozen=True)
274
294
  class ResourceFormatWarning(FileReadWarning):
275
295
  severity: ClassVar[SeverityLevel] = SeverityLevel.HIGH
@@ -26,10 +26,12 @@ from .modules import (
26
26
  module_path_display_name,
27
27
  resource_folder_from_path,
28
28
  )
29
+ from .pip_validator import PipValidationResult, validate_requirements_with_pip
29
30
  from .sentry_utils import sentry_exception_filter
30
31
 
31
32
  __all__ = [
32
33
  "GraphQLParser",
34
+ "PipValidationResult",
33
35
  "YAMLComment",
34
36
  "YAMLWithComments",
35
37
  "calculate_directory_hash",
@@ -57,4 +59,5 @@ __all__ = [
57
59
  "stringify_value_by_key_in_yaml",
58
60
  "tmp_build_directory",
59
61
  "to_diff",
62
+ "validate_requirements_with_pip",
60
63
  ]