cognite-toolkit 0.6.79__py3-none-any.whl → 0.6.81__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-toolkit might be problematic. Click here for more details.

Files changed (36) hide show
  1. cognite_toolkit/_builtin_modules/cdf.toml +1 -1
  2. cognite_toolkit/_cdf.py +0 -4
  3. cognite_toolkit/_cdf_tk/apps/__init__.py +0 -2
  4. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +62 -14
  5. cognite_toolkit/_cdf_tk/apps/_modules_app.py +27 -0
  6. cognite_toolkit/_cdf_tk/apps/_purge.py +4 -3
  7. cognite_toolkit/_cdf_tk/commands/__init__.py +0 -6
  8. cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py +0 -4
  9. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +6 -8
  10. cognite_toolkit/_cdf_tk/commands/_migrate/creators.py +26 -2
  11. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +42 -8
  12. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +4 -6
  13. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +203 -0
  14. cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py +66 -0
  15. cognite_toolkit/_cdf_tk/commands/_purge.py +1 -8
  16. cognite_toolkit/_cdf_tk/commands/modules.py +59 -14
  17. cognite_toolkit/_cdf_tk/constants.py +3 -0
  18. cognite_toolkit/_cdf_tk/feature_flags.py +0 -4
  19. cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +6 -6
  20. cognite_toolkit/_cdf_tk/storageio/_base.py +2 -5
  21. cognite_toolkit/_cdf_tk/utils/useful_types.py +3 -1
  22. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  23. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  24. cognite_toolkit/_resources/cdf.toml +13 -0
  25. cognite_toolkit/_version.py +1 -1
  26. {cognite_toolkit-0.6.79.dist-info → cognite_toolkit-0.6.81.dist-info}/METADATA +1 -1
  27. {cognite_toolkit-0.6.79.dist-info → cognite_toolkit-0.6.81.dist-info}/RECORD +30 -33
  28. cognite_toolkit/_cdf_tk/apps/_populate_app.py +0 -80
  29. cognite_toolkit/_cdf_tk/commands/_migrate/adapter.py +0 -368
  30. cognite_toolkit/_cdf_tk/commands/_migrate/assets.py +0 -0
  31. cognite_toolkit/_cdf_tk/commands/_migrate/files.py +0 -165
  32. cognite_toolkit/_cdf_tk/commands/_migrate/timeseries.py +0 -165
  33. cognite_toolkit/_cdf_tk/commands/_populate.py +0 -306
  34. {cognite_toolkit-0.6.79.dist-info → cognite_toolkit-0.6.81.dist-info}/WHEEL +0 -0
  35. {cognite_toolkit-0.6.79.dist-info → cognite_toolkit-0.6.81.dist-info}/entry_points.txt +0 -0
  36. {cognite_toolkit-0.6.79.dist-info → cognite_toolkit-0.6.81.dist-info}/licenses/LICENSE +0 -0
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.6.79"
7
+ version = "0.6.81"
8
8
 
9
9
 
10
10
  [plugins]
cognite_toolkit/_cdf.py CHANGED
@@ -29,7 +29,6 @@ from cognite_toolkit._cdf_tk.apps import (
29
29
  LandingApp,
30
30
  MigrateApp,
31
31
  ModulesApp,
32
- PopulateApp,
33
32
  ProfileApp,
34
33
  PurgeApp,
35
34
  RepoApp,
@@ -100,9 +99,6 @@ if Plugins.dump.value.is_enabled():
100
99
  if Plugins.purge.value.is_enabled() and not Flags.v07.is_enabled():
101
100
  _app.add_typer(PurgeApp(**default_typer_kws), name="purge")
102
101
 
103
- if Flags.POPULATE.is_enabled():
104
- _app.add_typer(PopulateApp(**default_typer_kws), name="populate")
105
-
106
102
  if Flags.PROFILE.is_enabled():
107
103
  _app.add_typer(ProfileApp(**default_typer_kws), name="profile")
108
104
 
@@ -6,7 +6,6 @@ from ._dump_app import DumpApp
6
6
  from ._landing_app import LandingApp
7
7
  from ._migrate_app import MigrateApp
8
8
  from ._modules_app import ModulesApp
9
- from ._populate_app import PopulateApp
10
9
  from ._profile_app import ProfileApp
11
10
  from ._purge import PurgeApp
12
11
  from ._repo_app import RepoApp
@@ -22,7 +21,6 @@ __all__ = [
22
21
  "LandingApp",
23
22
  "MigrateApp",
24
23
  "ModulesApp",
25
- "PopulateApp",
26
24
  "ProfileApp",
27
25
  "PurgeApp",
28
26
  "RepoApp",
@@ -12,17 +12,20 @@ from cognite_toolkit._cdf_tk.commands import (
12
12
  MigrationPrepareCommand,
13
13
  )
14
14
  from cognite_toolkit._cdf_tk.commands._migrate import MigrationCommand
15
- from cognite_toolkit._cdf_tk.commands._migrate.adapter import (
16
- AssetCentricMigrationIOAdapter,
17
- FileMetaIOAdapter,
15
+ from cognite_toolkit._cdf_tk.commands._migrate.creators import (
16
+ InfieldV2ConfigCreator,
17
+ InstanceSpaceCreator,
18
+ SourceSystemCreator,
19
+ )
20
+ from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import AssetCentricMapper
21
+ from cognite_toolkit._cdf_tk.commands._migrate.migration_io import (
22
+ AssetCentricMigrationIO,
23
+ )
24
+ from cognite_toolkit._cdf_tk.commands._migrate.selectors import (
25
+ AssetCentricMigrationSelector,
18
26
  MigrateDataSetSelector,
19
27
  MigrationCSVFileSelector,
20
- MigrationSelector,
21
- TimeSeriesIOAdapter,
22
28
  )
23
- from cognite_toolkit._cdf_tk.commands._migrate.creators import InstanceSpaceCreator, SourceSystemCreator
24
- from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import AssetCentricMapper
25
- from cognite_toolkit._cdf_tk.storageio import AssetIO, EventIO
26
29
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
27
30
  from cognite_toolkit._cdf_tk.utils.cli_args import parse_view_str
28
31
  from cognite_toolkit._cdf_tk.utils.interactive_select import (
@@ -47,6 +50,8 @@ class MigrateApp(typer.Typer):
47
50
  self.command("timeseries")(self.timeseries)
48
51
  self.command("files")(self.files)
49
52
  self.command("canvas")(self.canvas)
53
+ # Uncomment when infield v2 config migration is ready
54
+ # self.command("infield-configs")(self.infield_configs)
50
55
 
51
56
  def main(self, ctx: typer.Context) -> None:
52
57
  """Migrate resources from Asset-Centric to data modeling in CDF."""
@@ -317,7 +322,7 @@ class MigrateApp(typer.Typer):
317
322
  cmd.run(
318
323
  lambda: cmd.migrate(
319
324
  selected=selected,
320
- data=AssetCentricMigrationIOAdapter(client, AssetIO(client)),
325
+ data=AssetCentricMigrationIO(client),
321
326
  mapper=AssetCentricMapper(client),
322
327
  log_dir=log_dir,
323
328
  dry_run=dry_run,
@@ -337,11 +342,11 @@ class MigrateApp(typer.Typer):
337
342
  kind: AssetCentricKind,
338
343
  resource_type: str,
339
344
  container_id: ContainerId,
340
- ) -> tuple[MigrationSelector, bool, bool]:
345
+ ) -> tuple[AssetCentricMigrationSelector, bool, bool]:
341
346
  if data_set_id is not None and mapping_file is not None:
342
347
  raise typer.BadParameter("Cannot specify both data_set_id and mapping_file")
343
348
  elif mapping_file is not None:
344
- selected: MigrationSelector = MigrationCSVFileSelector(datafile=mapping_file, kind=kind)
349
+ selected: AssetCentricMigrationSelector = MigrationCSVFileSelector(datafile=mapping_file, kind=kind)
345
350
  elif data_set_id is not None:
346
351
  parsed_view = parse_view_str(consumption_view) if consumption_view is not None else None
347
352
  selected = MigrateDataSetSelector(
@@ -466,7 +471,7 @@ class MigrateApp(typer.Typer):
466
471
  cmd.run(
467
472
  lambda: cmd.migrate(
468
473
  selected=selected,
469
- data=AssetCentricMigrationIOAdapter(client, EventIO(client)),
474
+ data=AssetCentricMigrationIO(client),
470
475
  mapper=AssetCentricMapper(client),
471
476
  log_dir=log_dir,
472
477
  dry_run=dry_run,
@@ -573,7 +578,7 @@ class MigrateApp(typer.Typer):
573
578
  cmd.run(
574
579
  lambda: cmd.migrate(
575
580
  selected=selected,
576
- data=TimeSeriesIOAdapter(client, skip_linking=skip_linking),
581
+ data=AssetCentricMigrationIO(client, skip_linking=skip_linking),
577
582
  mapper=AssetCentricMapper(client),
578
583
  log_dir=log_dir,
579
584
  dry_run=dry_run,
@@ -681,7 +686,7 @@ class MigrateApp(typer.Typer):
681
686
  cmd.run(
682
687
  lambda: cmd.migrate(
683
688
  selected=selected,
684
- data=FileMetaIOAdapter(client, skip_linking=skip_linking),
689
+ data=AssetCentricMigrationIO(client, skip_linking=skip_linking),
685
690
  mapper=AssetCentricMapper(client),
686
691
  log_dir=log_dir,
687
692
  dry_run=dry_run,
@@ -734,3 +739,46 @@ class MigrateApp(typer.Typer):
734
739
  verbose=verbose,
735
740
  )
736
741
  )
742
+
743
+ @staticmethod
744
+ def infield_configs(
745
+ ctx: typer.Context,
746
+ output_dir: Annotated[
747
+ Path,
748
+ typer.Option(
749
+ "--output-dir",
750
+ "-o",
751
+ help="Path to the directory where the Infield V2 configuration definitions will be dumped. It is recommended "
752
+ "to govern these configurations in a git repository.",
753
+ ),
754
+ ] = Path("tmp"),
755
+ dry_run: Annotated[
756
+ bool,
757
+ typer.Option(
758
+ "--dry-run",
759
+ "-d",
760
+ help="If set, the migration will not be executed, but only a report of what would be done is printed.",
761
+ ),
762
+ ] = False,
763
+ verbose: Annotated[
764
+ bool,
765
+ typer.Option(
766
+ "--verbose",
767
+ "-v",
768
+ help="Turn on to get more verbose output when running the command",
769
+ ),
770
+ ] = False,
771
+ ) -> None:
772
+ """Creates Infield V2 configurations from existing APM Configurations in CDF."""
773
+ client = EnvironmentVariables.create_from_environment().get_client()
774
+
775
+ cmd = MigrationCommand()
776
+ cmd.run(
777
+ lambda: cmd.create(
778
+ client,
779
+ creator=InfieldV2ConfigCreator(client),
780
+ output_dir=output_dir,
781
+ dry_run=dry_run,
782
+ verbose=verbose,
783
+ )
784
+ )
@@ -6,6 +6,7 @@ from rich import print
6
6
 
7
7
  from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
8
8
  from cognite_toolkit._cdf_tk.commands import ModulesCommand, PullCommand
9
+ from cognite_toolkit._cdf_tk.feature_flags import Flags
9
10
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
10
11
  from cognite_toolkit._version import __version__
11
12
 
@@ -51,6 +52,24 @@ class ModulesApp(typer.Typer):
51
52
  help="Clean target directory if it exists",
52
53
  ),
53
54
  ] = False,
55
+ library_url: Annotated[
56
+ str | None,
57
+ typer.Option(
58
+ "--library-url",
59
+ "-u",
60
+ help="URL of the library to add to the project.",
61
+ hidden=not Flags.EXTERNAL_LIBRARIES.is_enabled(),
62
+ ),
63
+ ] = None,
64
+ library_checksum: Annotated[
65
+ str | None,
66
+ typer.Option(
67
+ "--library-checksum",
68
+ "-c",
69
+ help="Checksum of the library to add to the project.",
70
+ hidden=not Flags.EXTERNAL_LIBRARIES.is_enabled(),
71
+ ),
72
+ ] = None,
54
73
  verbose: Annotated[
55
74
  bool,
56
75
  typer.Option(
@@ -62,12 +81,20 @@ class ModulesApp(typer.Typer):
62
81
  ) -> None:
63
82
  """Initialize or upgrade a new CDF project with templates interactively."""
64
83
 
84
+ if library_url and not library_checksum:
85
+ raise typer.BadParameter(
86
+ "--library-checksum must be provided when --library-url is specified.",
87
+ param_hint="--library-checksum",
88
+ )
89
+
65
90
  with ModulesCommand() as cmd:
66
91
  cmd.run(
67
92
  lambda: cmd.init(
68
93
  organization_dir=organization_dir,
69
94
  select_all=all,
70
95
  clean=clean,
96
+ library_url=library_url,
97
+ library_checksum=library_checksum,
71
98
  )
72
99
  )
73
100
 
@@ -16,6 +16,7 @@ from cognite_toolkit._cdf_tk.storageio.selectors import (
16
16
  SelectedView,
17
17
  )
18
18
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
19
+ from cognite_toolkit._cdf_tk.utils.cli_args import parse_view_str
19
20
  from cognite_toolkit._cdf_tk.utils.interactive_select import DataModelingSelect
20
21
 
21
22
 
@@ -191,10 +192,10 @@ class PurgeApp(typer.Typer):
191
192
  @staticmethod
192
193
  def purge_instances(
193
194
  view: Annotated[
194
- list[str] | None,
195
+ str | None,
195
196
  typer.Argument(
196
197
  help="Purge instances with properties in the specified view. Expected format is "
197
- "'space externalId version'. For example 'cdf_cdm CogniteTimeSeries v1' will purge all nodes"
198
+ "'space:externalId/version'. For example 'cdf_cdm:CogniteTimeSeries/v1' will purge all nodes"
198
199
  "that have properties in the CogniteTimeSeries view. If not provided and no "
199
200
  "instance list is provided, interactive mode will be used.",
200
201
  ),
@@ -291,7 +292,7 @@ class PurgeApp(typer.Typer):
291
292
  elif instance_list is not None:
292
293
  selector = InstanceFileSelector(datafile=instance_list)
293
294
  elif view is not None:
294
- view_id = cmd.get_selected_view_id(view) # Will raise if not exactly one view
295
+ view_id = parse_view_str(view)
295
296
  selector = InstanceViewSelector(
296
297
  view=SelectedView(
297
298
  space=view_id.space, external_id=view_id.external_id, version=cast(str, view_id.version)
@@ -1,11 +1,8 @@
1
1
  from ._download import DownloadCommand
2
2
  from ._migrate import (
3
- MigrateFilesCommand,
4
- MigrateTimeseriesCommand,
5
3
  MigrationCanvasCommand,
6
4
  MigrationPrepareCommand,
7
5
  )
8
- from ._populate import PopulateCommand
9
6
  from ._profile import ProfileAssetCentricCommand, ProfileAssetCommand, ProfileRawCommand, ProfileTransformationCommand
10
7
  from ._purge import PurgeCommand
11
8
  from ._upload import UploadCommand
@@ -34,12 +31,9 @@ __all__ = [
34
31
  "DumpResourceCommand",
35
32
  "FeatureFlagCommand",
36
33
  "InitCommand",
37
- "MigrateFilesCommand",
38
- "MigrateTimeseriesCommand",
39
34
  "MigrationCanvasCommand",
40
35
  "MigrationPrepareCommand",
41
36
  "ModulesCommand",
42
- "PopulateCommand",
43
37
  "ProfileAssetCentricCommand",
44
38
  "ProfileAssetCommand",
45
39
  "ProfileRawCommand",
@@ -1,12 +1,8 @@
1
1
  from .canvas import MigrationCanvasCommand
2
2
  from .command import MigrationCommand
3
- from .files import MigrateFilesCommand
4
3
  from .prepare import MigrationPrepareCommand
5
- from .timeseries import MigrateTimeseriesCommand
6
4
 
7
5
  __all__ = [
8
- "MigrateFilesCommand",
9
- "MigrateTimeseriesCommand",
10
6
  "MigrationCanvasCommand",
11
7
  "MigrationCommand",
12
8
  "MigrationPrepareCommand",
@@ -1,9 +1,8 @@
1
1
  from collections.abc import Callable, Iterable, Sequence
2
2
  from enum import Enum
3
3
  from pathlib import Path
4
- from typing import TypeVar
5
4
 
6
- from cognite.client.data_classes._base import CogniteResource
5
+ from cognite.client.data_classes._base import T_CogniteResource
7
6
  from rich import print
8
7
  from rich.console import Console
9
8
  from rich.table import Table
@@ -28,12 +27,10 @@ from cognite_toolkit._cdf_tk.utils.fileio import Chunk, CSVWriter, NDJsonWriter,
28
27
  from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, ItemMessage, SuccessResponseItems
29
28
  from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
30
29
  from cognite_toolkit._cdf_tk.utils.progress_tracker import AVAILABLE_STATUS, ProgressTracker, Status
30
+ from cognite_toolkit._cdf_tk.utils.useful_types import T_WriteCogniteResource
31
31
 
32
32
  from .data_model import INSTANCE_SOURCE_VIEW_ID, MODEL_ID, RESOURCE_VIEW_MAPPING_VIEW_ID
33
33
 
34
- T_CogniteResource = TypeVar("T_CogniteResource", bound=CogniteResource)
35
- T_WriteCogniteResource = TypeVar("T_WriteCogniteResource", bound=CogniteResource)
36
-
37
34
 
38
35
  class MigrationCommand(ToolkitCommand):
39
36
  class Steps(str, Enum):
@@ -79,7 +76,7 @@ class MigrationCommand(ToolkitCommand):
79
76
  ](
80
77
  download_iterable=self._download_iterable(selected, data, tracker),
81
78
  process=self._convert(mapper, data, tracker, log_file),
82
- write=self._upload(write_client, data, tracker, log_file, dry_run),
79
+ write=self._upload(selected, write_client, data, tracker, log_file, dry_run),
83
80
  iteration_count=iteration_count,
84
81
  max_queue_size=10,
85
82
  download_description=f"Downloading {selected.display_name}",
@@ -179,6 +176,7 @@ class MigrationCommand(ToolkitCommand):
179
176
 
180
177
  def _upload(
181
178
  self,
179
+ selected: T_Selector,
182
180
  write_client: HTTPClient,
183
181
  target: UploadableStorageIO[T_Selector, T_CogniteResource, T_WriteCogniteResource],
184
182
  tracker: ProgressTracker[str],
@@ -192,7 +190,7 @@ class MigrationCommand(ToolkitCommand):
192
190
  if dry_run:
193
191
  responses = [SuccessResponseItems(200, "", [item.source_id for item in data_item])]
194
192
  else:
195
- responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=None)
193
+ responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
196
194
 
197
195
  issues: list[Chunk] = []
198
196
  for item in responses:
@@ -281,7 +279,7 @@ class MigrationCommand(ToolkitCommand):
281
279
  result = deploy_cmd.dry_run_deploy(resources, crud, False, False)
282
280
  else:
283
281
  result = deploy_cmd.actual_deploy(resources, crud)
284
- if result.calculated_total > 0:
282
+ if result.calculated_total > 0 and creator.HAS_LINEAGE:
285
283
  store_count = creator.store_lineage(resource_list)
286
284
  self.console(f"Stored lineage for {store_count:,} {creator.DISPLAY_NAME}.")
287
285
 
@@ -20,6 +20,7 @@ from cognite.client.data_classes.documents import SourceFileProperty
20
20
  from cognite.client.data_classes.events import EventProperty
21
21
 
22
22
  from cognite_toolkit._cdf_tk.client import ToolkitClient
23
+ from cognite_toolkit._cdf_tk.client.data_classes.apm_config_v1 import APMConfig, APMConfigList
23
24
  from cognite_toolkit._cdf_tk.cruds import NodeCRUD, ResourceCRUD, SpaceCRUD
24
25
  from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError
25
26
  from cognite_toolkit._cdf_tk.utils import humanize_collection
@@ -34,10 +35,11 @@ class ResourceConfig:
34
35
 
35
36
 
36
37
  class MigrationCreator(ABC, Generic[T_CogniteResourceList]):
37
- """Base class for migration resources configurations that are created from asset-centric resources."""
38
+ """Base class for migration resources configurations that are created resources."""
38
39
 
39
40
  CRUD: type[ResourceCRUD]
40
41
  DISPLAY_NAME: str
42
+ HAS_LINEAGE: bool = True
41
43
 
42
44
  def __init__(self, client: ToolkitClient) -> None:
43
45
  self.client = client
@@ -50,7 +52,6 @@ class MigrationCreator(ABC, Generic[T_CogniteResourceList]):
50
52
  def resource_configs(self, resources: T_CogniteResourceList) -> list[ResourceConfig]:
51
53
  raise NotImplementedError("Subclasses should implement this method")
52
54
 
53
- @abstractmethod
54
55
  def store_lineage(self, resources: T_CogniteResourceList) -> int:
55
56
  """Store lineage information for the created resources.
56
57
 
@@ -65,6 +66,7 @@ class InstanceSpaceCreator(MigrationCreator[SpaceApplyList]):
65
66
 
66
67
  CRUD = SpaceCRUD
67
68
  DISPLAY_NAME = "Instance Space"
69
+ HAS_LINEAGE = True
68
70
 
69
71
  def __init__(
70
72
  self, client: ToolkitClient, datasets: DataSetList | None = None, data_set_external_ids: list[str] | None = None
@@ -205,3 +207,25 @@ class SourceSystemCreator(MigrationCreator[NodeApplyList]):
205
207
  def store_lineage(self, resources: NodeApplyList) -> int:
206
208
  # We already store lineage when creating the resources.
207
209
  return len(resources)
210
+
211
+
212
+ class InfieldV2ConfigCreator(MigrationCreator[NodeApplyList]):
213
+ CRUD = NodeCRUD
214
+ DISPLAY_NAME = "Infield V2 Configuration"
215
+ HAS_LINEAGE = False
216
+
217
+ def create_resources(self) -> NodeApplyList:
218
+ apm_config_nodes = self.client.data_modeling.instances.list(instance_type="node", sources=APMConfig.view_id)
219
+ apm_config = APMConfigList.from_nodes(apm_config_nodes)
220
+
221
+ new_config_nodes = NodeApplyList([])
222
+ for config in apm_config:
223
+ new_config = self._create_infield_v2_config(config)
224
+ new_config_nodes.append(new_config)
225
+ return new_config_nodes
226
+
227
+ def resource_configs(self, resources: NodeApplyList) -> list[ResourceConfig]:
228
+ return [ResourceConfig(filestem=node.external_id, data=node.dump()) for node in resources]
229
+
230
+ def _create_infield_v2_config(self, config: APMConfig) -> NodeApply:
231
+ raise NotImplementedError("To be implemented")
@@ -1,10 +1,17 @@
1
+ from dataclasses import dataclass
1
2
  from pathlib import Path
2
- from typing import Any, Literal
3
+ from typing import Any, Generic, Literal
3
4
 
4
- from cognite.client.data_classes.data_modeling import NodeId, ViewId
5
+ from cognite.client.data_classes._base import (
6
+ T_WritableCogniteResource,
7
+ WriteableCogniteResource,
8
+ WriteableCogniteResourceList,
9
+ )
10
+ from cognite.client.data_classes.data_modeling import InstanceApply, NodeId, ViewId
5
11
  from cognite.client.utils._text import to_camel_case
6
12
  from pydantic import BaseModel, field_validator, model_validator
7
13
 
14
+ from cognite_toolkit._cdf_tk.client.data_classes.instances import InstanceApplyList
8
15
  from cognite_toolkit._cdf_tk.client.data_classes.migration import AssetCentricId
9
16
  from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import PendingInstanceId
10
17
  from cognite_toolkit._cdf_tk.commands._migrate.default_mappings import create_default_mappings
@@ -12,7 +19,7 @@ from cognite_toolkit._cdf_tk.exceptions import (
12
19
  ToolkitValueError,
13
20
  )
14
21
  from cognite_toolkit._cdf_tk.storageio._data_classes import ModelList
15
- from cognite_toolkit._cdf_tk.utils.useful_types import AssetCentricType
22
+ from cognite_toolkit._cdf_tk.utils.useful_types import AssetCentricKind, AssetCentricType, JsonVal
16
23
 
17
24
 
18
25
  class MigrationMapping(BaseModel, alias_generator=to_camel_case, extra="ignore", populate_by_name=True):
@@ -122,14 +129,14 @@ class MigrationMappingList(ModelList[MigrationMapping]):
122
129
  return {mapping.id: mapping for mapping in self}
123
130
 
124
131
  @classmethod
125
- def read_csv_file(cls, filepath: Path, resource_type: str | None = None) -> "MigrationMappingList":
132
+ def read_csv_file(cls, filepath: Path, resource_type: AssetCentricKind | None = None) -> "MigrationMappingList":
126
133
  if cls is not MigrationMappingList or resource_type is None:
127
134
  return super().read_csv_file(filepath)
128
135
  cls_by_resource_type: dict[str, type[MigrationMappingList]] = {
129
- "asset": AssetMigrationMappingList,
130
- "timeseries": TimeSeriesMigrationMappingList,
131
- "file": FileMigrationMappingList,
132
- "event": EventMigrationMappingList,
136
+ "Assets": AssetMigrationMappingList,
137
+ "TimeSeries": TimeSeriesMigrationMappingList,
138
+ "FileMetadata": FileMigrationMappingList,
139
+ "Events": EventMigrationMappingList,
133
140
  }
134
141
  if resource_type not in cls_by_resource_type:
135
142
  raise ToolkitValueError(
@@ -176,3 +183,30 @@ class TimeSeriesMigrationMappingList(MigrationMappingList):
176
183
  @classmethod
177
184
  def _get_base_model_cls(cls) -> type[TimeSeriesMapping]:
178
185
  return TimeSeriesMapping
186
+
187
+
188
+ @dataclass
189
+ class AssetCentricMapping(Generic[T_WritableCogniteResource], WriteableCogniteResource[InstanceApply]):
190
+ mapping: MigrationMapping
191
+ resource: T_WritableCogniteResource
192
+
193
+ def as_write(self) -> InstanceApply:
194
+ raise NotImplementedError()
195
+
196
+ def dump(self, camel_case: bool = True) -> dict[str, JsonVal]:
197
+ mapping = self.mapping.model_dump(exclude_unset=True, by_alias=camel_case)
198
+ # Ensure that resource type is always included, even if unset.
199
+ mapping["resourceType" if camel_case else "resource_type"] = self.mapping.resource_type
200
+ return {
201
+ "mapping": mapping,
202
+ "resource": self.resource.dump(camel_case=camel_case),
203
+ }
204
+
205
+
206
+ class AssetCentricMappingList(
207
+ WriteableCogniteResourceList[InstanceApply, AssetCentricMapping[T_WritableCogniteResource]]
208
+ ):
209
+ _RESOURCE: type = AssetCentricMapping
210
+
211
+ def as_write(self) -> InstanceApplyList:
212
+ return InstanceApplyList([item.as_write() for item in self])
@@ -8,12 +8,10 @@ from cognite.client.data_classes.data_modeling import DirectRelationReference, I
8
8
 
9
9
  from cognite_toolkit._cdf_tk.client import ToolkitClient
10
10
  from cognite_toolkit._cdf_tk.client.data_classes.migration import ResourceViewMapping
11
- from cognite_toolkit._cdf_tk.commands._migrate.adapter import (
12
- AssetCentricMapping,
13
- MigrationSelector,
14
- )
15
11
  from cognite_toolkit._cdf_tk.commands._migrate.conversion import asset_centric_to_dm
12
+ from cognite_toolkit._cdf_tk.commands._migrate.data_classes import AssetCentricMapping
16
13
  from cognite_toolkit._cdf_tk.commands._migrate.issues import ConversionIssue, MigrationIssue
14
+ from cognite_toolkit._cdf_tk.commands._migrate.selectors import AssetCentricMigrationSelector
17
15
  from cognite_toolkit._cdf_tk.constants import MISSING_INSTANCE_SPACE
18
16
  from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
19
17
  from cognite_toolkit._cdf_tk.storageio._base import T_Selector, T_WriteCogniteResource
@@ -45,7 +43,7 @@ class DataMapper(Generic[T_Selector, T_CogniteResource, T_WriteCogniteResource],
45
43
  raise NotImplementedError("Subclasses must implement this method.")
46
44
 
47
45
 
48
- class AssetCentricMapper(DataMapper[MigrationSelector, AssetCentricMapping, InstanceApply]):
46
+ class AssetCentricMapper(DataMapper[AssetCentricMigrationSelector, AssetCentricMapping, InstanceApply]):
49
47
  def __init__(self, client: ToolkitClient) -> None:
50
48
  self.client = client
51
49
  self._ingestion_view_by_id: dict[ViewId, View] = {}
@@ -55,7 +53,7 @@ class AssetCentricMapper(DataMapper[MigrationSelector, AssetCentricMapping, Inst
55
53
  self._asset_mapping_by_id: dict[int, DirectRelationReference] = {}
56
54
  self._source_system_mapping_by_id: dict[str, DirectRelationReference] = {}
57
55
 
58
- def prepare(self, source_selector: MigrationSelector) -> None:
56
+ def prepare(self, source_selector: AssetCentricMigrationSelector) -> None:
59
57
  ingestion_view_ids = source_selector.get_ingestion_mappings()
60
58
  ingestion_views = self.client.migration.resource_view_mapping.retrieve(ingestion_view_ids)
61
59
  self._view_mapping_by_id = {view.external_id: view for view in ingestion_views}