cognite-toolkit 0.5.63__py3-none-any.whl → 0.5.65__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. cognite_toolkit/_builtin_modules/cdf.toml +1 -1
  2. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +45 -1
  3. cognite_toolkit/_cdf_tk/client/api/extended_timeseries.py +70 -2
  4. cognite_toolkit/_cdf_tk/commands/__init__.py +2 -1
  5. cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py +2 -1
  6. cognite_toolkit/_cdf_tk/commands/_migrate/assets.py +201 -0
  7. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +15 -0
  8. cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py +2 -0
  9. cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py +5 -13
  10. cognite_toolkit/_cdf_tk/commands/deploy.py +36 -43
  11. cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py +10 -14
  12. cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py +50 -10
  13. cognite_toolkit/_cdf_tk/loaders/_worker.py +23 -22
  14. cognite_toolkit/_cdf_tk/resource_classes/workflow.py +15 -0
  15. cognite_toolkit/_cdf_tk/utils/text.py +42 -0
  16. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  17. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  18. cognite_toolkit/_version.py +1 -1
  19. {cognite_toolkit-0.5.63.dist-info → cognite_toolkit-0.5.65.dist-info}/METADATA +1 -1
  20. {cognite_toolkit-0.5.63.dist-info → cognite_toolkit-0.5.65.dist-info}/RECORD +23 -20
  21. {cognite_toolkit-0.5.63.dist-info → cognite_toolkit-0.5.65.dist-info}/WHEEL +0 -0
  22. {cognite_toolkit-0.5.63.dist-info → cognite_toolkit-0.5.65.dist-info}/entry_points.txt +0 -0
  23. {cognite_toolkit-0.5.63.dist-info → cognite_toolkit-0.5.65.dist-info}/licenses/LICENSE +0 -0
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.5.63"
7
+ version = "0.5.65"
8
8
 
9
9
 
10
10
  [plugins]
@@ -3,7 +3,7 @@ from typing import Annotated, Any
3
3
 
4
4
  import typer
5
5
 
6
- from cognite_toolkit._cdf_tk.commands import MigrateTimeseriesCommand, MigrationPrepareCommand
6
+ from cognite_toolkit._cdf_tk.commands import MigrateAssetsCommand, MigrateTimeseriesCommand, MigrationPrepareCommand
7
7
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
8
8
 
9
9
 
@@ -12,6 +12,8 @@ class MigrateApp(typer.Typer):
12
12
  super().__init__(*args, **kwargs)
13
13
  self.callback(invoke_without_command=True)(self.main)
14
14
  self.command("prepare")(self.prepare)
15
+ # Uncomment when command is ready.
16
+ # self.command("assets")(self.assets)
15
17
  self.command("timeseries")(self.timeseries)
16
18
 
17
19
  def main(self, ctx: typer.Context) -> None:
@@ -57,6 +59,48 @@ class MigrateApp(typer.Typer):
57
59
  )
58
60
  )
59
61
 
62
+ @staticmethod
63
+ def assets(
64
+ ctx: typer.Context,
65
+ mapping_file: Annotated[
66
+ Path,
67
+ typer.Option(
68
+ "--mapping-file",
69
+ "-m",
70
+ help="Path to the mapping file that contains the mapping from Assets to CogniteAssets. "
71
+ "This file is expected to have the following columns: [id/externalId, dataSetId, space, externalId]."
72
+ "The dataSetId is optional, and can be skipped. If it is set, it is used to check the access to the dataset.",
73
+ ),
74
+ ],
75
+ dry_run: Annotated[
76
+ bool,
77
+ typer.Option(
78
+ "--dry-run",
79
+ "-d",
80
+ help="If set, the migration will not be executed, but only a report of what would be done is printed.",
81
+ ),
82
+ ] = False,
83
+ verbose: Annotated[
84
+ bool,
85
+ typer.Option(
86
+ "--verbose",
87
+ "-v",
88
+ help="Turn on to get more verbose output when running the command",
89
+ ),
90
+ ] = False,
91
+ ) -> None:
92
+ """Migrate Assets to CogniteAssets."""
93
+ client = EnvironmentVariables.create_from_environment().get_client()
94
+ cmd = MigrateAssetsCommand()
95
+ cmd.run(
96
+ lambda: cmd.migrate_assets(
97
+ client,
98
+ mapping_file=mapping_file,
99
+ dry_run=dry_run,
100
+ verbose=verbose,
101
+ )
102
+ )
103
+
60
104
  @staticmethod
61
105
  def timeseries(
62
106
  ctx: typer.Context,
@@ -1,6 +1,7 @@
1
1
  from collections.abc import Sequence
2
- from typing import Any, overload
2
+ from typing import Any, cast, overload
3
3
 
4
+ from cognite.client import ClientConfig, CogniteClient
4
5
  from cognite.client._api.time_series import SortSpec, TimeSeriesAPI
5
6
  from cognite.client._constants import DEFAULT_LIMIT_READ
6
7
  from cognite.client.data_classes.data_modeling import NodeId
@@ -19,6 +20,11 @@ from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import Pe
19
20
  class ExtendedTimeSeriesAPI(TimeSeriesAPI):
20
21
  """Extended TimeSeriesAPI to include pending ID methods."""
21
22
 
23
+ def __init__(self, config: ClientConfig, api_version: str | None, cognite_client: CogniteClient) -> None:
24
+ super().__init__(config, api_version, cognite_client)
25
+ self._PENDING_IDS_LIMIT = 1000
26
+ self._UNLINK_LIMIT = 1000
27
+
22
28
  @overload
23
29
  def set_pending_ids(
24
30
  self, instance_id: NodeId | tuple[str, str], id: int | None = None, external_id: str | None = None
@@ -74,7 +80,7 @@ class ExtendedTimeSeriesAPI(TimeSeriesAPI):
74
80
  },
75
81
  "api_subversion": "alpha",
76
82
  }
77
- for id_chunk in split_into_chunks(list(identifiers), 1000)
83
+ for id_chunk in split_into_chunks(list(identifiers), self._PENDING_IDS_LIMIT)
78
84
  ]
79
85
  tasks_summary = execute_tasks(
80
86
  self._post,
@@ -90,6 +96,68 @@ class ExtendedTimeSeriesAPI(TimeSeriesAPI):
90
96
 
91
97
  return ExtendedTimeSeriesList._load(retrieved_items, cognite_client=self._cognite_client)
92
98
 
99
+ @overload
100
+ def unlink_instance_ids(
101
+ self,
102
+ id: int | None = None,
103
+ external_id: str | None = None,
104
+ ) -> ExtendedTimeSeries | None: ...
105
+
106
+ @overload
107
+ def unlink_instance_ids(
108
+ self,
109
+ id: Sequence[int] | None = None,
110
+ external_id: SequenceNotStr[str] | None = None,
111
+ ) -> ExtendedTimeSeriesList: ...
112
+
113
+ def unlink_instance_ids(
114
+ self,
115
+ id: int | Sequence[int] | None = None,
116
+ external_id: str | SequenceNotStr[str] | None = None,
117
+ ) -> ExtendedTimeSeries | ExtendedTimeSeriesList | None:
118
+ """Unlink pending instance IDs from time series.
119
+
120
+ Args:
121
+ id (int | Sequence[int] | None): The ID(s) of the time series.
122
+ external_id (str | SequenceNotStr[str] | None): The external ID(s) of the time series.
123
+
124
+ """
125
+ if id is None and external_id is None:
126
+ return None
127
+ if isinstance(id, int) and isinstance(external_id, str):
128
+ raise ValueError("Cannot specify both id and external_id as single values. Use one or the other.")
129
+ is_single = isinstance(id, int) or isinstance(external_id, str)
130
+ identifiers = IdentifierSequence.load(id, external_id)
131
+
132
+ tasks = [
133
+ {
134
+ "url_path": f"{self._RESOURCE_PATH}/unlink-instance-ids",
135
+ "json": {"items": id_chunk},
136
+ "api_subversion": "alpha",
137
+ }
138
+ for id_chunk in split_into_chunks(identifiers.as_dicts(), self._UNLINK_LIMIT)
139
+ ]
140
+ tasks_summary = execute_tasks(
141
+ self._post,
142
+ tasks,
143
+ max_workers=self._config.max_workers,
144
+ fail_fast=True,
145
+ )
146
+ tasks_summary.raise_compound_exception_if_failed_tasks(
147
+ task_unwrap_fn=unpack_items_in_payload,
148
+ )
149
+
150
+ retrieved_items = tasks_summary.joined_results(lambda res: res.json()["items"])
151
+
152
+ result = ExtendedTimeSeriesList._load(retrieved_items, cognite_client=self._cognite_client)
153
+ if is_single:
154
+ if len(result) == 0:
155
+ return None
156
+ if len(result) > 1:
157
+ raise ValueError("Expected a single time series, but multiple were returned.")
158
+ return cast(ExtendedTimeSeries, result[0])
159
+ return cast(ExtendedTimeSeriesList, result)
160
+
93
161
  def retrieve(
94
162
  self, id: int | None = None, external_id: str | None = None, instance_id: NodeId | None = None
95
163
  ) -> ExtendedTimeSeries | None:
@@ -1,4 +1,4 @@
1
- from ._migrate import MigrateTimeseriesCommand, MigrationPrepareCommand
1
+ from ._migrate import MigrateAssetsCommand, MigrateTimeseriesCommand, MigrationPrepareCommand
2
2
  from ._populate import PopulateCommand
3
3
  from ._profile import ProfileAssetCentricCommand, ProfileTransformationCommand
4
4
  from ._purge import PurgeCommand
@@ -26,6 +26,7 @@ __all__ = [
26
26
  "DumpResourceCommand",
27
27
  "FeatureFlagCommand",
28
28
  "InitCommand",
29
+ "MigrateAssetsCommand",
29
30
  "MigrateTimeseriesCommand",
30
31
  "MigrationPrepareCommand",
31
32
  "ModulesCommand",
@@ -1,4 +1,5 @@
1
+ from .assets import MigrateAssetsCommand
1
2
  from .prepare import MigrationPrepareCommand
2
3
  from .timeseries import MigrateTimeseriesCommand
3
4
 
4
- __all__ = ["MigrateTimeseriesCommand", "MigrationPrepareCommand"]
5
+ __all__ = ["MigrateAssetsCommand", "MigrateTimeseriesCommand", "MigrationPrepareCommand"]
@@ -0,0 +1,201 @@
1
+ from collections.abc import Callable, Iterable
2
+ from pathlib import Path
3
+
4
+ from cognite.client.data_classes import Asset, Label, LabelDefinition
5
+ from cognite.client.data_classes.capabilities import (
6
+ Capability,
7
+ DataModelInstancesAcl,
8
+ DataModelsAcl,
9
+ DataSetScope,
10
+ SpaceIDScope,
11
+ TimeSeriesAcl,
12
+ )
13
+ from cognite.client.data_classes.data_modeling import NodeApply, NodeOrEdgeData, ViewId
14
+ from cognite.client.exceptions import CogniteAPIError, CogniteException
15
+ from rich import print
16
+
17
+ from cognite_toolkit._cdf_tk.client import ToolkitClient
18
+ from cognite_toolkit._cdf_tk.client._constants import DATA_MODELING_MAX_WRITE_WORKERS
19
+ from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
20
+ from cognite_toolkit._cdf_tk.constants import DMS_INSTANCE_LIMIT_MARGIN
21
+ from cognite_toolkit._cdf_tk.exceptions import (
22
+ AuthenticationError,
23
+ ResourceCreationError,
24
+ ResourceRetrievalError,
25
+ ToolkitMigrationError,
26
+ ToolkitValueError,
27
+ )
28
+ from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
29
+ from cognite_toolkit._cdf_tk.utils import humanize_collection
30
+ from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
31
+ from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
32
+
33
+ from .data_classes import MigrationMapping, MigrationMappingList
34
+ from .data_model import MAPPING_VIEW_ID
35
+
36
+
37
+ class MigrateAssetsCommand(ToolkitCommand):
38
+ cdf_cdm = "cdf_cdm"
39
+ asset_id = ViewId(cdf_cdm, "CogniteAsset", "v1")
40
+
41
+ # This is the number of timeseries that can be written in parallel.
42
+ chunk_size = 1000 * DATA_MODELING_MAX_WRITE_WORKERS
43
+
44
+ def migrate_assets(
45
+ self,
46
+ client: ToolkitClient,
47
+ mapping_file: Path,
48
+ dry_run: bool = False,
49
+ verbose: bool = False,
50
+ ) -> None:
51
+ """Migrate resources from Asset-Centric to data modeling in CDF."""
52
+ mappings = MigrationMappingList.read_mapping_file(mapping_file)
53
+ self._validate_access(client, mappings)
54
+ self._validate_migration_mappings_exists(client)
55
+ self._validate_available_capacity(client, mappings)
56
+ iteration_count = len(mappings) // self.chunk_size + 1
57
+ executor = ProducerWorkerExecutor[list[tuple[Asset, MigrationMapping]], list[NodeApply]](
58
+ download_iterable=self._download_assets(client, mappings),
59
+ process=self._as_cognite_assets,
60
+ write=self._upload_assets(client, dry_run=dry_run, verbose=verbose),
61
+ iteration_count=iteration_count,
62
+ max_queue_size=10,
63
+ download_description="Downloading assets",
64
+ process_description="Converting assets to CogniteAssets",
65
+ write_description="Uploading CogniteAssets",
66
+ )
67
+ executor.run()
68
+ if executor.error_occurred:
69
+ raise ResourceCreationError(executor.error_message)
70
+
71
+ prefix = "Would have" if dry_run else "Successfully"
72
+ self.console(f"{prefix} migrated {executor.total_items:,} assets to CogniteAssets.")
73
+
74
+ def _validate_access(self, client: ToolkitClient, mappings: MigrationMappingList) -> None:
75
+ required_capabilities: list[Capability] = [
76
+ DataModelsAcl(
77
+ actions=[DataModelsAcl.Action.Read], scope=SpaceIDScope([self.cdf_cdm, MAPPING_VIEW_ID.space])
78
+ ),
79
+ DataModelInstancesAcl(
80
+ actions=[
81
+ DataModelInstancesAcl.Action.Read,
82
+ DataModelInstancesAcl.Action.Write,
83
+ DataModelInstancesAcl.Action.Write_Properties,
84
+ ],
85
+ scope=SpaceIDScope(list(mappings.spaces())),
86
+ ),
87
+ ]
88
+ if data_set_ids := mappings.get_data_set_ids():
89
+ required_capabilities.append(
90
+ TimeSeriesAcl(
91
+ actions=[TimeSeriesAcl.Action.Read, TimeSeriesAcl.Action.Write],
92
+ scope=DataSetScope(list(data_set_ids)),
93
+ )
94
+ )
95
+ if missing := client.iam.verify_capabilities(required_capabilities):
96
+ raise AuthenticationError(f"Missing required capabilities: {humanize_collection(missing)}.")
97
+
98
+ def _validate_migration_mappings_exists(self, client: ToolkitClient) -> None:
99
+ view = client.data_modeling.views.retrieve(MAPPING_VIEW_ID)
100
+ if not view:
101
+ raise ToolkitMigrationError(
102
+ f"The migration mapping view {MAPPING_VIEW_ID} does not exist. "
103
+ f"Please run the `cdf migrate prepare` command to deploy the migration data model."
104
+ )
105
+
106
+ def _validate_available_capacity(self, client: ToolkitClient, mappings: MigrationMappingList) -> None:
107
+ """Validate that the project has enough capacity to accommodate the migration."""
108
+ try:
109
+ stats = client.data_modeling.statistics.project()
110
+ except CogniteAPIError:
111
+ # This endpoint is not yet in alpha, it may change or not be available.
112
+ self.warn(HighSeverityWarning("Cannot check the instances capacity proceeding with migration anyway."))
113
+ return
114
+ available_capacity = stats.instances.instances_limit - stats.instances.instances
115
+ available_capacity_after = available_capacity - len(mappings)
116
+
117
+ if available_capacity_after < DMS_INSTANCE_LIMIT_MARGIN:
118
+ raise ToolkitValueError(
119
+ "Cannot proceed with migration, not enough instance capacity available. Total capacity after migration"
120
+ f"would be {available_capacity_after:,} instances, which is less than the required margin of"
121
+ f"{DMS_INSTANCE_LIMIT_MARGIN:,} instances. Please increase the instance capacity in your CDF project"
122
+ f" or delete some existing instances before proceeding with the migration of {len(mappings):,} assets."
123
+ )
124
+ total_instances = stats.instances.instances + len(mappings)
125
+ self.console(
126
+ f"Project has enough capacity for migration. Total instances after migration: {total_instances:,}."
127
+ )
128
+
129
+ def _download_assets(
130
+ self, client: ToolkitClient, mappings: MigrationMappingList
131
+ ) -> Iterable[list[tuple[Asset, MigrationMapping]]]:
132
+ for chunk in chunker_sequence(mappings, self.chunk_size):
133
+ try:
134
+ asset_list = client.assets.retrieve_multiple(
135
+ chunk.get_ids(), chunk.get_external_ids(), ignore_unknown_ids=True
136
+ )
137
+ except CogniteException as e:
138
+ raise ResourceRetrievalError(f"Failed to retrieve {len(chunk):,} assets: {e!s}") from e
139
+ mapping_by_id = chunk.as_mapping_by_id()
140
+ chunk_list: list[tuple[Asset, MigrationMapping]] = []
141
+ for asset in asset_list:
142
+ if asset.id in mapping_by_id:
143
+ chunk_list.append((asset, mapping_by_id[asset.id]))
144
+ elif asset.external_id in mapping_by_id:
145
+ chunk_list.append((asset, mapping_by_id[asset.external_id]))
146
+ yield chunk_list
147
+
148
+ def _as_cognite_assets(self, assets: list[tuple[Asset, MigrationMapping]]) -> list[NodeApply]:
149
+ """Convert Asset objects to CogniteAssetApply objects."""
150
+ return [self.as_cognite_asset(asset, mapping) for asset, mapping in assets]
151
+
152
+ @classmethod
153
+ def _upload_assets(cls, client: ToolkitClient, dry_run: bool, verbose: bool) -> Callable[[list[NodeApply]], None]:
154
+ def upload_assets(assets: list[NodeApply]) -> None:
155
+ if dry_run:
156
+ if verbose:
157
+ print(f"Would have created {len(assets):,} CogniteAssets.")
158
+ return
159
+ try:
160
+ created = client.data_modeling.instances.apply_fast(assets)
161
+ except CogniteException as e:
162
+ raise ResourceCreationError(f"Failed to upsert CogniteAssets {len(assets):,}: {e!s}") from e
163
+ if verbose:
164
+ print(f"Created {len(created):,} CogniteAssets.")
165
+
166
+ return upload_assets
167
+
168
+ @classmethod
169
+ def as_cognite_asset(cls, asset: Asset, mapping: MigrationMapping) -> NodeApply:
170
+ tags: list[str] = []
171
+ for label in asset.labels or []:
172
+ if isinstance(label, str):
173
+ tags.append(label)
174
+ elif isinstance(label, dict) and "externalId" in label:
175
+ tags.append(label["externalId"])
176
+ elif isinstance(label, Label | LabelDefinition) and label.external_id:
177
+ tags.append(label.external_id)
178
+
179
+ return NodeApply(
180
+ space=mapping.instance_id.space,
181
+ external_id=mapping.instance_id.external_id,
182
+ sources=[
183
+ NodeOrEdgeData(
184
+ source=cls.asset_id,
185
+ properties={
186
+ "name": asset.name,
187
+ "description": asset.description,
188
+ "tags": tags or None,
189
+ },
190
+ ),
191
+ NodeOrEdgeData(
192
+ source=MAPPING_VIEW_ID,
193
+ properties={
194
+ "resourceType": "asset",
195
+ "id": asset.id,
196
+ "dataSetId": asset.data_set_id,
197
+ "classicExternalId": asset.external_id,
198
+ },
199
+ ),
200
+ ],
201
+ )
@@ -1,5 +1,6 @@
1
1
  import csv
2
2
  import sys
3
+ from abc import abstractmethod
3
4
  from collections.abc import Collection, Iterator, Sequence
4
5
  from dataclasses import dataclass
5
6
  from pathlib import Path
@@ -24,18 +25,28 @@ class MigrationMapping:
24
25
  resource_type: str
25
26
  instance_id: NodeId
26
27
 
28
+ @abstractmethod
29
+ def get_id(self) -> int | str:
30
+ raise NotImplementedError()
31
+
27
32
 
28
33
  @dataclass
29
34
  class IdMigrationMapping(MigrationMapping):
30
35
  id: int
31
36
  data_set_id: int | None = None
32
37
 
38
+ def get_id(self) -> int:
39
+ return self.id
40
+
33
41
 
34
42
  @dataclass
35
43
  class ExternalIdMigrationMapping(MigrationMapping):
36
44
  external_id: str
37
45
  data_set_id: int | None = None
38
46
 
47
+ def get_id(self) -> str:
48
+ return self.external_id
49
+
39
50
 
40
51
  class MigrationMappingList(list, Sequence[MigrationMapping]):
41
52
  # Implemented to get correct type hints
@@ -90,6 +101,10 @@ class MigrationMappingList(list, Sequence[MigrationMapping]):
90
101
  if isinstance(mapping, IdMigrationMapping | ExternalIdMigrationMapping) and mapping.data_set_id is not None
91
102
  }
92
103
 
104
+ def as_mapping_by_id(self) -> dict[int | str, MigrationMapping]:
105
+ """Return a mapping of IDs to MigrationMapping objects."""
106
+ return {mapping.get_id(): mapping for mapping in self}
107
+
93
108
  @classmethod
94
109
  def read_mapping_file(cls, mapping_file: Path) -> Self:
95
110
  if not mapping_file.exists():
@@ -66,6 +66,8 @@ MAPPING_VIEW = dm.ViewApply(
66
66
  },
67
67
  )
68
68
 
69
+ MAPPING_VIEW_ID = MAPPING_VIEW.as_id()
70
+
69
71
  COGNITE_MIGRATION_MODEL = dm.DataModelApply(
70
72
  space=SPACE.space,
71
73
  external_id="CogniteMigration",
@@ -25,7 +25,7 @@ class MigrationPrepareCommand(ToolkitCommand):
25
25
  verb = "Would deploy" if dry_run else "Deploying"
26
26
  print(f"{verb} {MODEL_ID!r}")
27
27
  results = DeployResults([], "deploy", dry_run=dry_run)
28
- for loader_cls, resources in [
28
+ for loader_cls, resource_list in [
29
29
  (SpaceLoader, [SPACE]),
30
30
  (ContainerLoader, [MAPPING_CONTAINER]),
31
31
  (ViewLoader, [MAPPING_VIEW]),
@@ -35,23 +35,15 @@ class MigrationPrepareCommand(ToolkitCommand):
35
35
  loader = loader_cls.create_loader(client) # type: ignore[attr-defined]
36
36
  worker = ResourceWorker(loader)
37
37
  # MyPy does not understand that `loader` has a `get_id` method.
38
- local_by_id = {loader.get_id(item): (item.dump(), item) for item in resources} # type: ignore[attr-defined]
38
+ local_by_id = {loader.get_id(item): (item.dump(), item) for item in resource_list} # type: ignore[attr-defined]
39
39
  worker.validate_access(local_by_id, is_dry_run=dry_run)
40
40
  cdf_resources = loader.retrieve(list(local_by_id.keys()))
41
- to_create, to_update, to_delete, unchanged = worker.categorize_resources(
42
- local_by_id, cdf_resources, False, verbose
43
- )
41
+ resources = worker.categorize_resources(local_by_id, cdf_resources, False, verbose)
44
42
 
45
43
  if dry_run:
46
- result = deploy_cmd.dry_run_deploy(to_create, to_update, to_delete, unchanged, loader, False, False)
44
+ result = deploy_cmd.dry_run_deploy(resources, loader, False, False)
47
45
  else:
48
- result = deploy_cmd.actual_deploy(
49
- to_create,
50
- to_update,
51
- to_delete,
52
- unchanged,
53
- loader,
54
- )
46
+ result = deploy_cmd.actual_deploy(resources, loader)
55
47
  if result:
56
48
  results[result.name] = result
57
49
  if results.has_counts:
@@ -45,6 +45,7 @@ from cognite_toolkit._cdf_tk.loaders import (
45
45
  ResourceWorker,
46
46
  )
47
47
  from cognite_toolkit._cdf_tk.loaders._base_loaders import T_WritableCogniteResourceList
48
+ from cognite_toolkit._cdf_tk.loaders._worker import CategorizedResources
48
49
  from cognite_toolkit._cdf_tk.tk_warnings import EnvironmentVariableMissingWarning
49
50
  from cognite_toolkit._cdf_tk.tk_warnings.base import WarningList, catch_warnings
50
51
  from cognite_toolkit._cdf_tk.tk_warnings.other import (
@@ -261,7 +262,7 @@ class DeployCommand(ToolkitCommand):
261
262
  return None
262
263
 
263
264
  with catch_warnings(EnvironmentVariableMissingWarning) as env_var_warnings:
264
- to_create, to_update, to_delete, unchanged = worker.prepare_resources(
265
+ resources = worker.prepare_resources(
265
266
  files,
266
267
  environment_variables=env_vars.dump(include_os=True),
267
268
  is_dry_run=dry_run,
@@ -274,7 +275,7 @@ class DeployCommand(ToolkitCommand):
274
275
 
275
276
  # We are not counting to_delete as these are captured by to_create.
276
277
  # (to_delete is used for resources that does not support update and instead needs to be deleted and recreated)
277
- nr_of_items = len(to_create) + len(to_update) + len(unchanged)
278
+ nr_of_items = len(resources.to_create) + len(resources.to_update) + len(resources.unchanged)
278
279
  if nr_of_items == 0:
279
280
  return ResourceDeployResult(name=loader.display_name)
280
281
 
@@ -288,19 +289,16 @@ class DeployCommand(ToolkitCommand):
288
289
 
289
290
  if dry_run:
290
291
  result = self.dry_run_deploy(
291
- to_create,
292
- to_update,
293
- to_delete,
294
- unchanged,
292
+ resources,
295
293
  loader,
296
294
  has_done_drop,
297
295
  has_dropped_data,
298
296
  )
299
297
  else:
300
- result = self.actual_deploy(to_create, to_update, to_delete, unchanged, loader, env_var_warnings)
298
+ result = self.actual_deploy(resources, loader, env_var_warnings)
301
299
 
302
300
  if verbose:
303
- self._verbose_print(to_create, to_update, unchanged, loader, dry_run)
301
+ self._verbose_print(resources, loader, dry_run)
304
302
 
305
303
  if isinstance(loader, ResourceContainerLoader):
306
304
  return ResourceContainerDeployResult.from_resource_deploy_result(
@@ -311,10 +309,7 @@ class DeployCommand(ToolkitCommand):
311
309
 
312
310
  def actual_deploy(
313
311
  self,
314
- to_create: T_CogniteResourceList,
315
- to_update: T_CogniteResourceList,
316
- to_delete: list[T_ID],
317
- unchanged: T_CogniteResourceList,
312
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
318
313
  loader: ResourceLoader[
319
314
  T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
320
315
  ],
@@ -326,16 +321,16 @@ class DeployCommand(ToolkitCommand):
326
321
  if isinstance(warning, EnvironmentVariableMissingWarning)
327
322
  for identifier in warning.identifiers or []
328
323
  }
329
- nr_of_unchanged = len(unchanged)
324
+ nr_of_unchanged = len(resources.unchanged)
330
325
  nr_of_deleted, nr_of_created, nr_of_changed = 0, 0, 0
331
- if to_delete:
332
- deleted = loader.delete(to_delete)
326
+ if resources.to_delete:
327
+ deleted = loader.delete(resources.to_delete)
333
328
  nr_of_deleted += deleted
334
- if to_create:
335
- created = self._create_resources(to_create, loader, environment_variable_warning_by_id)
329
+ if resources.to_create:
330
+ created = self._create_resources(resources.to_create, loader, environment_variable_warning_by_id)
336
331
  nr_of_created += created
337
- if to_update:
338
- updated = self._update_resources(to_update, loader, environment_variable_warning_by_id)
332
+ if resources.to_update:
333
+ updated = self._update_resources(resources.to_update, loader, environment_variable_warning_by_id)
339
334
  nr_of_changed += updated
340
335
  return ResourceDeployResult(
341
336
  name=loader.display_name,
@@ -348,10 +343,7 @@ class DeployCommand(ToolkitCommand):
348
343
 
349
344
  @staticmethod
350
345
  def dry_run_deploy(
351
- to_create: T_CogniteResourceList,
352
- to_update: T_CogniteResourceList,
353
- to_delete: list[T_ID],
354
- unchanged: T_CogniteResourceList,
346
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
355
347
  loader: ResourceLoader[
356
348
  T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
357
349
  ],
@@ -364,39 +356,40 @@ class DeployCommand(ToolkitCommand):
364
356
  and (not isinstance(loader, ResourceContainerLoader) or has_dropped_data)
365
357
  ):
366
358
  # Means the resources will be deleted and not left unchanged or changed
367
- for item in unchanged:
359
+ for item in resources.unchanged:
368
360
  # We cannot use extents as LoadableNodes cannot be extended.
369
- to_create.append(item)
370
- for item in to_update:
371
- to_create.append(item)
372
- unchanged.clear()
373
- to_update.clear()
361
+ resources.to_create.append(item)
362
+ for item in resources.to_update:
363
+ resources.to_create.append(item)
364
+ resources.unchanged.clear()
365
+ resources.to_update.clear()
374
366
  return ResourceDeployResult(
375
367
  name=loader.display_name,
376
- created=len(to_create),
377
- deleted=len(to_delete),
378
- changed=len(to_update),
379
- unchanged=len(unchanged),
380
- total=len(to_create) + len(to_delete) + len(to_update) + len(unchanged),
368
+ created=len(resources.to_create),
369
+ deleted=len(resources.to_delete),
370
+ changed=len(resources.to_update),
371
+ unchanged=len(resources.unchanged),
372
+ total=len(resources.to_create)
373
+ + len(resources.to_delete)
374
+ + len(resources.to_update)
375
+ + len(resources.unchanged),
381
376
  )
382
377
 
383
378
  @staticmethod
384
379
  def _verbose_print(
385
- to_create: T_CogniteResourceList,
386
- to_update: T_CogniteResourceList,
387
- unchanged: T_CogniteResourceList,
380
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
388
381
  loader: ResourceLoader,
389
382
  dry_run: bool,
390
383
  ) -> None:
391
384
  print_outs = []
392
385
  prefix = "Would have " if dry_run else ""
393
- if to_create:
394
- print_outs.append(f"{prefix}Created {_print_ids_or_length(loader.get_ids(to_create), limit=20)}")
395
- if to_update:
396
- print_outs.append(f"{prefix}Updated {_print_ids_or_length(loader.get_ids(to_update), limit=20)}")
397
- if unchanged:
386
+ if resources.to_create:
387
+ print_outs.append(f"{prefix}Created {_print_ids_or_length(loader.get_ids(resources.to_create), limit=20)}")
388
+ if resources.to_update:
389
+ print_outs.append(f"{prefix}Updated {_print_ids_or_length(loader.get_ids(resources.to_update), limit=20)}")
390
+ if resources.unchanged:
398
391
  print_outs.append(
399
- f"{'Untouched' if dry_run else 'Unchanged'} {_print_ids_or_length(loader.get_ids(unchanged), limit=5)}"
392
+ f"{'Untouched' if dry_run else 'Unchanged'} {_print_ids_or_length(loader.get_ids(resources.unchanged), limit=5)}"
400
393
  )
401
394
  prefix_message = f" {loader.display_name}: "
402
395
  if len(print_outs) == 1:
@@ -46,6 +46,7 @@ from cognite_toolkit._cdf_tk.utils import (
46
46
  calculate_secure_hash,
47
47
  )
48
48
  from cognite_toolkit._cdf_tk.utils.cdf import read_auth, try_find_error
49
+ from cognite_toolkit._cdf_tk.utils.text import suffix_description
49
50
 
50
51
  from .auth_loaders import GroupAllScopedLoader
51
52
  from .data_organization_loaders import DataSetsLoader
@@ -464,20 +465,15 @@ class FunctionScheduleLoader(
464
465
  )
465
466
  self.authentication_by_id[identifier] = credentials
466
467
  auth_hash = calculate_secure_hash(credentials.dump(camel_case=True), shorten=True)
467
- extra_str = f" {self._hash_key}: {auth_hash}"
468
- if "description" not in resource:
469
- resource["description"] = extra_str[1:]
470
- elif resource["description"].endswith(extra_str[1:]):
471
- # The hash is already in the description
472
- ...
473
- elif len(resource["description"]) + len(extra_str) < self._description_character_limit:
474
- resource["description"] += f"{extra_str}"
475
- else:
476
- LowSeverityWarning(f"Description is too long for schedule {identifier!r}. Truncating...").print_warning(
477
- console=self.console
478
- )
479
- truncation = self._description_character_limit - len(extra_str) - 3
480
- resource["description"] = f"{resource['description'][:truncation]}...{extra_str}"
468
+ extra_str = f"{self._hash_key}: {auth_hash}"
469
+ resource["description"] = suffix_description(
470
+ extra_str,
471
+ resource.get("description"),
472
+ self._description_character_limit,
473
+ self.get_id(resource),
474
+ self.display_name,
475
+ self.console,
476
+ )
481
477
  return resources
482
478
 
483
479
  def load_resource(self, resource: dict[str, Any], is_dry_run: bool = False) -> FunctionScheduleWrite:
@@ -1,5 +1,7 @@
1
+ import json
1
2
  from collections.abc import Hashable, Iterable, Sequence
2
3
  from functools import lru_cache
4
+ from pathlib import Path
3
5
  from typing import Any, cast, final
4
6
 
5
7
  from cognite.client.data_classes import (
@@ -28,7 +30,9 @@ from cognite_toolkit._cdf_tk.exceptions import (
28
30
  )
29
31
  from cognite_toolkit._cdf_tk.loaders._base_loaders import ResourceContainerLoader, ResourceLoader
30
32
  from cognite_toolkit._cdf_tk.resource_classes import TimeSeriesYAML
33
+ from cognite_toolkit._cdf_tk.utils import calculate_hash
31
34
  from cognite_toolkit._cdf_tk.utils.diff_list import diff_list_hashable, diff_list_identifiable, dm_identifier
35
+ from cognite_toolkit._cdf_tk.utils.text import suffix_description
32
36
 
33
37
  from .auth_loaders import GroupAllScopedLoader, SecurityCategoryLoader
34
38
  from .classic_loaders import AssetLoader
@@ -216,6 +220,9 @@ class DatapointSubscriptionLoader(
216
220
  }
217
221
  )
218
222
 
223
+ _hash_key = "cdf-hash"
224
+ _description_character_limit = 1000
225
+
219
226
  @property
220
227
  def display_name(self) -> str:
221
228
  return "timeseries subscriptions"
@@ -325,25 +332,58 @@ class DatapointSubscriptionLoader(
325
332
  ) -> Iterable[DatapointSubscription]:
326
333
  return iter(self.client.time_series.subscriptions)
327
334
 
335
+ def load_resource_file(
336
+ self, filepath: Path, environment_variables: dict[str, str | None] | None = None
337
+ ) -> list[dict[str, Any]]:
338
+ resources = super().load_resource_file(filepath, environment_variables)
339
+ for resource in resources:
340
+ if "timeSeriesIds" not in resource and "instanceIds" not in resource:
341
+ continue
342
+ # If the timeSeriesIds or instanceIds is set, we need to add the auth hash to the description.
343
+ # such that we can detect if the subscription has changed.
344
+ content: dict[str, object] = {}
345
+ if "timeSeriesIds" in resource:
346
+ content["timeSeriesIds"] = resource["timeSeriesIds"]
347
+ if "instanceIds" in resource:
348
+ content["instanceIds"] = resource["instanceIds"]
349
+ timeseries_hash = calculate_hash(json.dumps(content), shorten=True)
350
+ extra_str = f"{self._hash_key}: {timeseries_hash}"
351
+ resource["description"] = suffix_description(
352
+ extra_str,
353
+ resource.get("description"),
354
+ self._description_character_limit,
355
+ self.get_id(resource),
356
+ self.display_name,
357
+ self.console,
358
+ )
359
+
360
+ return resources
361
+
328
362
  def load_resource(self, resource: dict[str, Any], is_dry_run: bool = False) -> DataPointSubscriptionWrite:
329
363
  if ds_external_id := resource.pop("dataSetExternalId", None):
330
364
  resource["dataSetId"] = self.client.lookup.data_sets.id(ds_external_id, is_dry_run)
331
365
  return DataPointSubscriptionWrite._load(resource)
332
366
 
333
367
  def dump_resource(self, resource: DatapointSubscription, local: dict[str, Any] | None = None) -> dict[str, Any]:
334
- dumped = resource.as_write().dump()
368
+ if resource.filter is not None:
369
+ dumped = resource.as_write().dump()
370
+ else:
371
+ # If filter is not set, the subscription uses explicit timeSeriesIds, which are not returned in the
372
+ # response. Calling .as_write() in this case raises ValueError because either filter or
373
+ # timeSeriesIds must be set.
374
+ dumped = resource.dump()
375
+ for server_prop in ("createdTime", "lastUpdatedTime", "timeSeriesCount"):
376
+ dumped.pop(server_prop, None)
335
377
  local = local or {}
336
378
  if data_set_id := dumped.pop("dataSetId", None):
337
379
  dumped["dataSetExternalId"] = self.client.lookup.data_sets.external_id(data_set_id)
338
- if "timeSeriesIds" not in dumped:
339
- return dumped
340
- # Sorting the timeSeriesIds in the local order
341
- # Sorting in the same order as the local file.
342
- ts_order_by_id = {ts_id: no for no, ts_id in enumerate(local.get("timeSeriesIds", []))}
343
- end_of_list = len(ts_order_by_id)
344
- dumped["timeSeriesIds"] = sorted(
345
- dumped["timeSeriesIds"], key=lambda ts_id: ts_order_by_id.get(ts_id, end_of_list)
346
- )
380
+ # timeSeriesIds and instanceIds are not returned in the response, so we need to add them
381
+ # to the dumped resource if they are set in the local resource. If there is a discrepancy between
382
+ # the local and dumped resource, th hash added to the description will change.
383
+ if "timeSeriesIds" in local:
384
+ dumped["timeSeriesIds"] = local["timeSeriesIds"]
385
+ if "instanceIds" in local:
386
+ dumped["instanceIds"] = local["instanceIds"]
347
387
  return dumped
348
388
 
349
389
  def diff_list(
@@ -4,6 +4,7 @@ import re
4
4
  import warnings
5
5
  from collections.abc import Hashable
6
6
  from copy import deepcopy
7
+ from dataclasses import dataclass
7
8
  from pathlib import Path
8
9
  from typing import TYPE_CHECKING, Any, Generic, cast
9
10
 
@@ -30,6 +31,14 @@ if TYPE_CHECKING:
30
31
  from cognite_toolkit._cdf_tk.data_classes._module_directories import ReadModule
31
32
 
32
33
 
34
+ @dataclass
35
+ class CategorizedResources(Generic[T_ID, T_CogniteResourceList]):
36
+ to_create: T_CogniteResourceList
37
+ to_update: T_CogniteResourceList
38
+ to_delete: list[T_ID]
39
+ unchanged: T_CogniteResourceList
40
+
41
+
33
42
  class ResourceWorker(
34
43
  Generic[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList]
35
44
  ):
@@ -75,7 +84,7 @@ class ResourceWorker(
75
84
  is_dry_run: bool = False,
76
85
  force_update: bool = False,
77
86
  verbose: bool = False,
78
- ) -> tuple[T_CogniteResourceList, T_CogniteResourceList, list[T_ID], T_CogniteResourceList]:
87
+ ) -> CategorizedResources:
79
88
  """Prepare resources for deployment by loading them from files, validating access, and categorizing them into create, update, delete, and unchanged lists.
80
89
 
81
90
  Args:
@@ -86,12 +95,7 @@ class ResourceWorker(
86
95
  verbose: Whether to print detailed information about the resources being processed.
87
96
 
88
97
  Returns:
89
- A tuple containing:
90
- - to_create: List of resources to create.
91
- - to_update: List of resources to update.
92
- - to_delete: List of resource IDs to delete.
93
- - unchanged: List of resources that are unchanged.
94
-
98
+ CategorizedResources: A categorized list of resources to create, update, delete, and unchanged.
95
99
  """
96
100
  local_by_id = self.load_resources(filepaths, environment_variables, is_dry_run)
97
101
 
@@ -160,31 +164,28 @@ class ResourceWorker(
160
164
  cdf_resources: T_WritableCogniteResourceList,
161
165
  force_update: bool,
162
166
  verbose: bool,
163
- ) -> tuple[T_CogniteResourceList, T_CogniteResourceList, list[T_ID], T_CogniteResourceList]:
164
- to_create: T_CogniteResourceList
165
- to_update: T_CogniteResourceList
166
- to_delete: list[T_ID] = []
167
- unchanged: T_CogniteResourceList
168
- to_create, to_update, unchanged = (
169
- self.loader.list_write_cls([]),
170
- self.loader.list_write_cls([]),
171
- self.loader.list_write_cls([]),
167
+ ) -> CategorizedResources:
168
+ resources: CategorizedResources[T_ID, T_CogniteResourceList] = CategorizedResources(
169
+ to_create=self.loader.list_write_cls([]),
170
+ to_update=self.loader.list_write_cls([]),
171
+ to_delete=[],
172
+ unchanged=self.loader.list_write_cls([]),
172
173
  )
173
174
  cdf_resource_by_id = {self.loader.get_id(resource): resource for resource in cdf_resources}
174
175
  for identifier, (local_dict, local_resource) in local_by_id.items():
175
176
  cdf_resource = cdf_resource_by_id.get(identifier)
176
177
  if cdf_resource is None:
177
- to_create.append(local_resource)
178
+ resources.to_create.append(local_resource)
178
179
  continue
179
180
  cdf_dict = self.loader.dump_resource(cdf_resource, local_dict)
180
181
  if not force_update and cdf_dict == local_dict:
181
- unchanged.append(local_resource)
182
+ resources.unchanged.append(local_resource)
182
183
  continue
183
184
  if self.loader.support_update:
184
- to_update.append(local_resource)
185
+ resources.to_update.append(local_resource)
185
186
  else:
186
- to_delete.append(identifier)
187
- to_create.append(local_resource)
187
+ resources.to_delete.append(identifier)
188
+ resources.to_create.append(local_resource)
188
189
  if verbose:
189
190
  diff_str = "\n".join(to_diff(cdf_dict, local_dict))
190
191
  for sensitive in self.loader.sensitive_strings(local_resource):
@@ -196,4 +197,4 @@ class ResourceWorker(
196
197
  expand=False,
197
198
  )
198
199
  )
199
- return to_create, to_update, to_delete, unchanged
200
+ return resources
@@ -0,0 +1,15 @@
1
+ from cognite.client.data_classes import WorkflowUpsert
2
+ from pydantic import Field
3
+
4
+ from .base import ToolkitResource
5
+
6
+
7
+ class WorkflowYAML(ToolkitResource):
8
+ _cdf_resource = WorkflowUpsert
9
+ external_id: str = Field(
10
+ max_length=255,
11
+ description="Identifier for a workflow. Must be unique for the project."
12
+ " No trailing or leading whitespace and no null characters allowed.",
13
+ )
14
+ description: str | None = Field(None, max_length=500)
15
+ data_set_external_id: str | None = None
@@ -0,0 +1,42 @@
1
+ from collections.abc import Hashable
2
+
3
+ from rich.console import Console
4
+
5
+ from cognite_toolkit._cdf_tk.tk_warnings import LowSeverityWarning
6
+
7
+
8
+ def suffix_description(
9
+ suffix: str,
10
+ description: str | None,
11
+ description_character_limit: int,
12
+ identifier: Hashable,
13
+ resource_type: str,
14
+ console: Console | None = None,
15
+ ) -> str:
16
+ """Appends a suffix to a description if it is not already present.
17
+ If the description is too long after appending the suffix, it will be truncated to fit within the character limit.
18
+
19
+ Args:
20
+ suffix: The suffix to append to the description.
21
+ description: The original description to which the suffix will be appended.
22
+ description_character_limit: The maximum number of characters allowed in the description after appending the suffix.
23
+ identifier: Hashable identifier for the resource, used in warnings.
24
+ resource_type: Type of the resource, used in warnings.
25
+ console: Console object for printing warnings.
26
+
27
+ Returns:
28
+ str: The modified description with the suffix appended, or truncated if necessary.
29
+ """
30
+ if description is None or description == "":
31
+ return suffix
32
+ elif description.endswith(suffix):
33
+ # The suffix is already in the description
34
+ return description
35
+ elif len(description) + len(suffix) + 1 < description_character_limit:
36
+ return f"{description} {suffix}"
37
+ else:
38
+ LowSeverityWarning(f"Description is too long for {resource_type} {identifier!r}. Truncating...").print_warning(
39
+ console=console
40
+ )
41
+ truncation = description_character_limit - len(suffix) - 3
42
+ return f"{description[:truncation]}...{suffix}"
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.5.63
15
+ image: cognite/toolkit:0.5.65
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.5.63
13
+ image: cognite/toolkit:0.5.65
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -1 +1 @@
1
- __version__ = "0.5.63"
1
+ __version__ = "0.5.65"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.5.63
3
+ Version: 0.5.65
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
@@ -1,10 +1,10 @@
1
1
  cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cognite_toolkit/_cdf.py,sha256=WWMslI-y2VbIYDMH19wnINebGwlOvAeYr-qkPRC1f68,5834
3
- cognite_toolkit/_version.py,sha256=NZyB_3d5nSrSdmUTR1qURqDAEzDzElqs5VAFoKfX_F8,23
3
+ cognite_toolkit/_version.py,sha256=uPR7tuL4mJMppx_mxnT2t-XCK6LTKMl7XrNVV0IoCLM,23
4
4
  cognite_toolkit/config.dev.yaml,sha256=CIDmi1OGNOJ-70h2BNCozZRmhvU5BfpZoh6Q04b8iMs,109
5
5
  cognite_toolkit/_builtin_modules/README.md,sha256=roU3G05E6ogP5yhw4hdIvVDKV831zCh2pzt9BVddtBg,307
6
6
  cognite_toolkit/_builtin_modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- cognite_toolkit/_builtin_modules/cdf.toml,sha256=kjIlSaM6fJ5C09gRiWydBtFY7C2IuoHBUMue1zv7y_I,273
7
+ cognite_toolkit/_builtin_modules/cdf.toml,sha256=LGJ7fwBn4yuwF9I5hjQeBFhQ-B6cPwxxW_sUsYKBPJw,273
8
8
  cognite_toolkit/_builtin_modules/packages.toml,sha256=RdY44Sxvh6sUtAkgp1dHID1mtqkOTzP_rbZL2Q27fYw,1147
9
9
  cognite_toolkit/_builtin_modules/bootcamp/README.md,sha256=iTVqoy3PLpC-xPi5pbuMIAEHILBSfWTGLexwa1AltpY,211
10
10
  cognite_toolkit/_builtin_modules/bootcamp/default.config.yaml,sha256=MqYTcRiz03bow4LT8E3jumnd_BsqC5SvjgYOVVkHGE0,93
@@ -499,7 +499,7 @@ cognite_toolkit/_cdf_tk/apps/_auth_app.py,sha256=ER7uYb3ViwsHMXiQEZpyhwU6TIjKaB9
499
499
  cognite_toolkit/_cdf_tk/apps/_core_app.py,sha256=-4ABeNtC0cxw7XvCRouPzTvlmqsS0NRR-jLgMGadW2I,13712
500
500
  cognite_toolkit/_cdf_tk/apps/_dump_app.py,sha256=UXmB8oFwVLOmxJBlxxLIBMLPCLwdgyaFfuG6Ex-GZh4,25608
501
501
  cognite_toolkit/_cdf_tk/apps/_landing_app.py,sha256=v4t2ryxzFre7y9IkEPIDwmyJDO8VDIIv6hIcft5TjpQ,422
502
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=GRsOlqYAWB0rsZsdTJTGfjPm1OkbUq7xBrM4pzQRKoY,3708
502
+ cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=ifzl63MBjxV3gdf3h7dztGLf8VbuXHeRnWG-p4gBCGE,5299
503
503
  cognite_toolkit/_cdf_tk/apps/_modules_app.py,sha256=tjCP-QbuPYd7iw6dkxnhrrWf514Lr25_oVgSJyJcaL8,6642
504
504
  cognite_toolkit/_cdf_tk/apps/_populate_app.py,sha256=PGUqK_USOqdPCDvUJI-4ne9TN6EssC33pUbEeCmiLPg,2805
505
505
  cognite_toolkit/_cdf_tk/apps/_profile_app.py,sha256=TaKTOgkd538QyIWBRdAILJ-TotBxYreZgWBqK4yrebQ,2562
@@ -524,7 +524,7 @@ cognite_toolkit/_cdf_tk/client/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
524
524
  cognite_toolkit/_cdf_tk/client/api/dml.py,sha256=8b1lo86JdvfEsz9mP2rx0Mp9fyWsU6mbXHqLBtvSidU,3546
525
525
  cognite_toolkit/_cdf_tk/client/api/extended_data_modeling.py,sha256=V9a-Ep_xlxd9KQN1D15GOLmg6KrSADciPWlmgAJuaBY,10481
526
526
  cognite_toolkit/_cdf_tk/client/api/extended_raw.py,sha256=9DVbM2aWmIyzbaW-lh10_pzVYJUEQFnIKnxvt413Bjk,2118
527
- cognite_toolkit/_cdf_tk/client/api/extended_timeseries.py,sha256=8GsMDq3F9tb_bR9c6yViRErjf0AsUJZbrZx9JTQZvXY,15150
527
+ cognite_toolkit/_cdf_tk/client/api/extended_timeseries.py,sha256=wi4EzOa6Pup_YKcQ3b3SMvKminJ1kee9-dgYOLwqSQc,17804
528
528
  cognite_toolkit/_cdf_tk/client/api/fixed_transformations.py,sha256=MEC25h_J_fm3wHa-rVJi2V_b5D1Fn3wNapCmIGU2faU,5625
529
529
  cognite_toolkit/_cdf_tk/client/api/location_filters.py,sha256=kUe58xzt2iYdztleep8ocsdE17lOWgqbPjELJnfhoHU,3188
530
530
  cognite_toolkit/_cdf_tk/client/api/lookup.py,sha256=E4YEkk8TzpWQ2v_NacISgXyA4xoelmayqSwaq4vykdk,12250
@@ -562,7 +562,7 @@ cognite_toolkit/_cdf_tk/client/data_classes/streamlit_.py,sha256=OGoMQ_K88F9vSZu
562
562
  cognite_toolkit/_cdf_tk/client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
563
563
  cognite_toolkit/_cdf_tk/client/utils/_concurrency.py,sha256=z6gqFv-kw80DsEpbaR7sI0-_WvZdOdAsR4VoFvTqvyU,1309
564
564
  cognite_toolkit/_cdf_tk/client/utils/_http_client.py,sha256=oXNKrIaizG4WiSAhL_kSCHAuL4aaaEhCU4pOJGxh6Xs,483
565
- cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=6rUv97s6CB5Fje6eg2X3cd9Za9rYJY51xUcPk_RoJT8,1261
565
+ cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=2oQWO2QuSBsXtaLjjKrbmpH909R6gUlVfL55GuyyHwY,1311
566
566
  cognite_toolkit/_cdf_tk/commands/_base.py,sha256=3Zc3ffR8mjZ1eV7WrC-Y1sYmyMzdbbJDDmsiKEMEJwo,2480
567
567
  cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=3bR_C8p02IW6apexwAAoXuneBM4RcUGdX6Hw_Rtx7Kg,24775
568
568
  cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=6nezoDrw3AkF8hANHjUILgTj_nbdzgT0siweaKI35Fk,1047
@@ -575,7 +575,7 @@ cognite_toolkit/_cdf_tk/commands/auth.py,sha256=T6hb90PnlRiTkhihEUvLCbNFyt1_4ML3
575
575
  cognite_toolkit/_cdf_tk/commands/build_cmd.py,sha256=Za0hYNlSE8yMuAdczKYtTgvcqE6DIbCGve1E2OfXekI,30474
576
576
  cognite_toolkit/_cdf_tk/commands/clean.py,sha256=qKHrhkjzerC-oQgkZ_61n7vZw11fxLJsJHILyd9Z5UQ,14418
577
577
  cognite_toolkit/_cdf_tk/commands/collect.py,sha256=zBMKhhvjOpuASMnwP0eeHRI02tANcvFEZgv0CQO1ECc,627
578
- cognite_toolkit/_cdf_tk/commands/deploy.py,sha256=g_mceI5Hq55uz9p5WC7yPVNPR6MBhIMTcBt9G_cWkJQ,19208
578
+ cognite_toolkit/_cdf_tk/commands/deploy.py,sha256=hXviaYPJdx4vxkNb-KbgHydBznfc9LuHxUyW-3M21qI,19218
579
579
  cognite_toolkit/_cdf_tk/commands/dump_data.py,sha256=U_e-fEAEphpkJMlDTHQvQ1F0k3qEMvd0m7zc20XvcQY,21668
580
580
  cognite_toolkit/_cdf_tk/commands/dump_resource.py,sha256=Dt8jlkmtpRtzPDMEjKdpOJPFr92k7Mw-BWkRsE9CJ8s,20515
581
581
  cognite_toolkit/_cdf_tk/commands/featureflag.py,sha256=VPz7FrjVQFqjkz8BYTP2Np3k7BTLFMq_eooNSqmb2ms,1034
@@ -584,10 +584,11 @@ cognite_toolkit/_cdf_tk/commands/modules.py,sha256=lYImbi7eX07j2lbE_8xJ5uix9xa2l
584
584
  cognite_toolkit/_cdf_tk/commands/pull.py,sha256=t7KQCxpoFDNBWTYPohK7chrRzPyAOGVmfaY7iBLnTqM,39286
585
585
  cognite_toolkit/_cdf_tk/commands/repo.py,sha256=vQfLMTzSnI4w6eYCQuMnZ_xXVAVjyLnST4Tmu2zgNfE,3874
586
586
  cognite_toolkit/_cdf_tk/commands/run.py,sha256=88AkfCdS4gXHA4I5ZhdU3HWWA5reOTGbfaauM-Yvp8o,37407
587
- cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=jV7zzYhBxQWGpAWCu-dJ9QxUHp3DBwz2KEQihGb2UuI,161
588
- cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=uONZwIkdD7pcllzu4VT0gHar5x5qpdMayAaQFI4kozQ,6338
589
- cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=y7Fc_bZvavcNTS9LxwiiMnYIDkGOmMXpbm8hTnAjkYw,2593
590
- cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=65oSmngUIDMGzKmwXDLft1ITPiA8COEuIcTnTafo5cg,2511
587
+ cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=E36hrJ71Wfm9y8k3qLXTo-cy2SCxtSy9bH32MixMtm8,226
588
+ cognite_toolkit/_cdf_tk/commands/_migrate/assets.py,sha256=o9kbOLn0WaptXBpLWmlndBY5Wu5AeQsnUvV_lGBzcqY,9296
589
+ cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=tj0IwrW8Tk0U_1ZCte5pbC84Lt95DLe6aVWdhLaI7EA,6776
590
+ cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=pmpHqcv5bOW_lYhG-JfqfesTiMWGSH8dXzEVPbSkDXk,2633
591
+ cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=vtJjmNEVDiYTNajRry1oCqG_yBlHqq00M_NuInvQGZ0,2271
591
592
  cognite_toolkit/_cdf_tk/commands/_migrate/timeseries.py,sha256=noPHSkKKaziI3X0KZLpZY_xjC5yejRX2d9TJgTtyZCo,9659
592
593
  cognite_toolkit/_cdf_tk/data_classes/__init__.py,sha256=Z7ODYLcqrRpo0Cmfx79DDhsA6eEK4hvNST_Qko1vRv0,1645
593
594
  cognite_toolkit/_cdf_tk/data_classes/_base.py,sha256=qyXObVP1SX5Lzqy8cYBZssV9NL3v0Q0-y-pLNF1Ok1I,2652
@@ -605,7 +606,7 @@ cognite_toolkit/_cdf_tk/data_classes/_yaml_comments.py,sha256=zfuDu9aAsb1ExeZBAJ
605
606
  cognite_toolkit/_cdf_tk/loaders/__init__.py,sha256=9giALvw48KIry7WWdCUxA1AvlVFCAR0bOJ5tKAhy-Lk,6241
606
607
  cognite_toolkit/_cdf_tk/loaders/_base_loaders.py,sha256=sF9D7ImyHmjbLBGVM66D2xSmOj8XnG3LmDqlQQZRarQ,20502
607
608
  cognite_toolkit/_cdf_tk/loaders/_data_loaders.py,sha256=GHFylB-LwpYdOHI_hwWPL68TMO3D99iFOzT-oAPZFLc,9190
608
- cognite_toolkit/_cdf_tk/loaders/_worker.py,sha256=xenPKmnx2xbwIzm-KVwSjMTxWoni1_NO4xz7mfDs-EY,9472
609
+ cognite_toolkit/_cdf_tk/loaders/_worker.py,sha256=E1y_xFzsb3mSZiSJJggvRLB6tT-0xBoKlqpa7UaX5us,9385
609
610
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/__init__.py,sha256=d8ucrEuVA8W9zVon8X6KvRejdDABWNCc6Qco-9BR9AQ,2964
610
611
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/agent_loaders.py,sha256=2hsaMB9lyNPAWUGKQ3EMA46ceUmbNefdoBx3huWF4fo,3128
611
612
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py,sha256=1wfR9yV_htDBmG6DATm5eN0m0r9wuHg4H0L8X9bAp20,26332
@@ -615,7 +616,7 @@ cognite_toolkit/_cdf_tk/loaders/_resource_loaders/datamodel_loaders.py,sha256=xK
615
616
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/extraction_pipeline_loaders.py,sha256=zqNPiIX1xvYV8alpxPKMqyy4QlH6oqDYNtrITC7ZKdo,18188
616
617
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/fieldops_loaders.py,sha256=pzjOjnxaJRXId-b0GZ31a6Mab8BdNnKXx3zUiBaxB0E,12203
617
618
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/file_loader.py,sha256=49uHmkYA5dzL7fyXYRIeZCkYnB5laclO-6Pis_qhVes,17633
618
- cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py,sha256=tO5i_xf1MBt_eQ6VUFJi6VZvusq16Xr33zqirp-Jers,26356
619
+ cognite_toolkit/_cdf_tk/loaders/_resource_loaders/function_loaders.py,sha256=5_p3G0Kuz08saJs_4T6hD26BwHsVvvFG20IhAPZlZbw,25937
619
620
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/group_scoped_loader.py,sha256=Rerw0Y6tY6Nle3vmyl4nhX5lRsVkUVcnp39qZ3R_tDs,1830
620
621
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/hosted_extractors.py,sha256=lZuq_dj7_r0O3BcM6xnAenAXAtcJ3ke_LO1IguRKCiE,17527
621
622
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/industrial_tool_loaders.py,sha256=j1FsynrFk_iPIAsHuqjXmtbMM7ZM6RVlpQkxCH9ATO0,8493
@@ -624,7 +625,7 @@ cognite_toolkit/_cdf_tk/loaders/_resource_loaders/raw_loaders.py,sha256=RM-zxDX6
624
625
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/relationship_loader.py,sha256=KRWGHk11vRWhFiQUF0vChv95ElGY-E_VGQNCHkuY5Y4,7219
625
626
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/robotics_loaders.py,sha256=tgPdVA4B8gNfHeHlEMzDgBaWs2jj-FSZAm1O1r5Uzrc,17224
626
627
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/three_d_model_loaders.py,sha256=IcnZ9wyW6Dpl83nlOdAM44UyrMfgum2hLdqi1u2AEuw,8288
627
- cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py,sha256=Xy6Wu4JU4kDVCbiVV3Y0GXBcix6Y-snLDwtj6g33vow,15281
628
+ cognite_toolkit/_cdf_tk/loaders/_resource_loaders/timeseries_loaders.py,sha256=PU2gRi-9EQKooZ8BLpKnQOtg3lNQ0lRBW0C26BU8r_0,17288
628
629
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/transformation_loaders.py,sha256=D0iD7jWPetiUM35JKQ1M_By086k4n5co9RaA8-VfF_A,35304
629
630
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/workflow_loaders.py,sha256=KmqDOIg6ttVqL_7widJgW5rNjs4eipd28AcLDs_bwDc,28427
630
631
  cognite_toolkit/_cdf_tk/prototypes/import_app.py,sha256=7dy852cBlHI2RQF1MidSmxl0jPBxekGWXnd2VtI7QFI,1899
@@ -671,6 +672,7 @@ cognite_toolkit/_cdf_tk/resource_classes/timeseries.py,sha256=wVlXR6tsmC-xmCnF4d
671
672
  cognite_toolkit/_cdf_tk/resource_classes/transformation_destination.py,sha256=AjfGS0JTGGG7aqUiezxtaiv_dBp-QMvHLwulXUIW8sA,6525
672
673
  cognite_toolkit/_cdf_tk/resource_classes/transformation_schedule.py,sha256=eTU1pEtR9z2SGeZU4AcXAGNWGfqf1v3L-wPDEVCSD9s,406
673
674
  cognite_toolkit/_cdf_tk/resource_classes/transformations.py,sha256=dt1coxSflgb7-NGHomYt8jFHPZni-xHgbCzU8i5B7-Q,3555
675
+ cognite_toolkit/_cdf_tk/resource_classes/workflow.py,sha256=fMNfW93D8tdVwO7YgEYYiYvpktSMx4i0viIFg0gD2VY,512
674
676
  cognite_toolkit/_cdf_tk/tk_warnings/__init__.py,sha256=DmvCZhG59NZ7yepDm4s-gy92ysQBJ2t-LnlsLatNwOw,2262
675
677
  cognite_toolkit/_cdf_tk/tk_warnings/base.py,sha256=RMtJ_0iE1xfbTKkvWLkAo5AgscjkNDpT0lmDZ3I8aeI,4435
676
678
  cognite_toolkit/_cdf_tk/tk_warnings/fileread.py,sha256=9xE8i8F_HhrRaQi6IGcE240j4TRZQzrOd7gMByjsXEk,8924
@@ -693,17 +695,18 @@ cognite_toolkit/_cdf_tk/utils/sentry_utils.py,sha256=YWQdsePeFpT214-T-tZ8kEsUyC8
693
695
  cognite_toolkit/_cdf_tk/utils/sql_parser.py,sha256=RhUPWjVjwb9RBv1fixmG7bKvAb4JT_CC0O7Aqnx5Pgg,6196
694
696
  cognite_toolkit/_cdf_tk/utils/table_writers.py,sha256=wEBVlfCFv5bLLy836UiXQubwSxo8kUlSFZeQxnHrTX4,17932
695
697
  cognite_toolkit/_cdf_tk/utils/tarjan.py,sha256=mr3gMzlrkDadn1v7u7-Uzao81KKiM3xfXlZ185HL__A,1359
698
+ cognite_toolkit/_cdf_tk/utils/text.py,sha256=gBl3o60dXRlEBsg8izdnOmuLo86jr35pQFZcxnKdNSY,1715
696
699
  cognite_toolkit/_repo_files/.env.tmpl,sha256=UmgKZVvIp-OzD8oOcYuwb_6c7vSJsqkLhuFaiVgK7RI,972
697
700
  cognite_toolkit/_repo_files/.gitignore,sha256=3exydcQPCJTldGFJoZy1RPHc1horbAprAoaShU8sYnM,5262
698
701
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
699
702
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=KVBxW8urCRDtVlJ6HN-kYmw0NCpW6c4lD-nlxz9tZsQ,692
700
703
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=Cp4KYraeWPjP8SnnEIbJoJnjmrRUwc982DPjOOzy2iM,722
701
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=kPSbNvw3tI5xsGHKjUDkCMebY6HuTTMwJk7Q3yR7EB4,667
702
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=_5mG5DphhCXHAScbPTnIaTkJ9tYTSKtyXVlO6aeQyQo,2430
704
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=r9DQ1_--OcUVFusWhTykM5NKmEn17ijBDatY5XNAoQ8,667
705
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=Go3m_277I0Kv2kuBFjPhYQAf0defj2ORfStEchgy7aA,2430
703
706
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
704
707
  cognite_toolkit/demo/_base.py,sha256=63nWYI_MHU5EuPwEX_inEAQxxiD5P6k8IAmlgl4CxpE,8082
705
- cognite_toolkit-0.5.63.dist-info/METADATA,sha256=o1oTTOVyWL8Lzlm-3ho6SMvBuN57paaH0UJJmfdRSeQ,4410
706
- cognite_toolkit-0.5.63.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
707
- cognite_toolkit-0.5.63.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
708
- cognite_toolkit-0.5.63.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
709
- cognite_toolkit-0.5.63.dist-info/RECORD,,
708
+ cognite_toolkit-0.5.65.dist-info/METADATA,sha256=J_4JmnX17kzzGuODbN5Ght5chcP3ZMZPtwKquCTxfUo,4410
709
+ cognite_toolkit-0.5.65.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
710
+ cognite_toolkit-0.5.65.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
711
+ cognite_toolkit-0.5.65.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
712
+ cognite_toolkit-0.5.65.dist-info/RECORD,,