cognite-toolkit 0.5.64__py3-none-any.whl → 0.5.65__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.5.64"
7
+ version = "0.5.65"
8
8
 
9
9
 
10
10
  [plugins]
@@ -3,7 +3,7 @@ from typing import Annotated, Any
3
3
 
4
4
  import typer
5
5
 
6
- from cognite_toolkit._cdf_tk.commands import MigrateTimeseriesCommand, MigrationPrepareCommand
6
+ from cognite_toolkit._cdf_tk.commands import MigrateAssetsCommand, MigrateTimeseriesCommand, MigrationPrepareCommand
7
7
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
8
8
 
9
9
 
@@ -12,6 +12,8 @@ class MigrateApp(typer.Typer):
12
12
  super().__init__(*args, **kwargs)
13
13
  self.callback(invoke_without_command=True)(self.main)
14
14
  self.command("prepare")(self.prepare)
15
+ # Uncomment when command is ready.
16
+ # self.command("assets")(self.assets)
15
17
  self.command("timeseries")(self.timeseries)
16
18
 
17
19
  def main(self, ctx: typer.Context) -> None:
@@ -57,6 +59,48 @@ class MigrateApp(typer.Typer):
57
59
  )
58
60
  )
59
61
 
62
+ @staticmethod
63
+ def assets(
64
+ ctx: typer.Context,
65
+ mapping_file: Annotated[
66
+ Path,
67
+ typer.Option(
68
+ "--mapping-file",
69
+ "-m",
70
+ help="Path to the mapping file that contains the mapping from Assets to CogniteAssets. "
71
+ "This file is expected to have the following columns: [id/externalId, dataSetId, space, externalId]."
72
+ "The dataSetId is optional, and can be skipped. If it is set, it is used to check the access to the dataset.",
73
+ ),
74
+ ],
75
+ dry_run: Annotated[
76
+ bool,
77
+ typer.Option(
78
+ "--dry-run",
79
+ "-d",
80
+ help="If set, the migration will not be executed, but only a report of what would be done is printed.",
81
+ ),
82
+ ] = False,
83
+ verbose: Annotated[
84
+ bool,
85
+ typer.Option(
86
+ "--verbose",
87
+ "-v",
88
+ help="Turn on to get more verbose output when running the command",
89
+ ),
90
+ ] = False,
91
+ ) -> None:
92
+ """Migrate Assets to CogniteAssets."""
93
+ client = EnvironmentVariables.create_from_environment().get_client()
94
+ cmd = MigrateAssetsCommand()
95
+ cmd.run(
96
+ lambda: cmd.migrate_assets(
97
+ client,
98
+ mapping_file=mapping_file,
99
+ dry_run=dry_run,
100
+ verbose=verbose,
101
+ )
102
+ )
103
+
60
104
  @staticmethod
61
105
  def timeseries(
62
106
  ctx: typer.Context,
@@ -1,6 +1,7 @@
1
1
  from collections.abc import Sequence
2
- from typing import Any, overload
2
+ from typing import Any, cast, overload
3
3
 
4
+ from cognite.client import ClientConfig, CogniteClient
4
5
  from cognite.client._api.time_series import SortSpec, TimeSeriesAPI
5
6
  from cognite.client._constants import DEFAULT_LIMIT_READ
6
7
  from cognite.client.data_classes.data_modeling import NodeId
@@ -19,6 +20,11 @@ from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import Pe
19
20
  class ExtendedTimeSeriesAPI(TimeSeriesAPI):
20
21
  """Extended TimeSeriesAPI to include pending ID methods."""
21
22
 
23
+ def __init__(self, config: ClientConfig, api_version: str | None, cognite_client: CogniteClient) -> None:
24
+ super().__init__(config, api_version, cognite_client)
25
+ self._PENDING_IDS_LIMIT = 1000
26
+ self._UNLINK_LIMIT = 1000
27
+
22
28
  @overload
23
29
  def set_pending_ids(
24
30
  self, instance_id: NodeId | tuple[str, str], id: int | None = None, external_id: str | None = None
@@ -74,7 +80,7 @@ class ExtendedTimeSeriesAPI(TimeSeriesAPI):
74
80
  },
75
81
  "api_subversion": "alpha",
76
82
  }
77
- for id_chunk in split_into_chunks(list(identifiers), 1000)
83
+ for id_chunk in split_into_chunks(list(identifiers), self._PENDING_IDS_LIMIT)
78
84
  ]
79
85
  tasks_summary = execute_tasks(
80
86
  self._post,
@@ -90,6 +96,68 @@ class ExtendedTimeSeriesAPI(TimeSeriesAPI):
90
96
 
91
97
  return ExtendedTimeSeriesList._load(retrieved_items, cognite_client=self._cognite_client)
92
98
 
99
+ @overload
100
+ def unlink_instance_ids(
101
+ self,
102
+ id: int | None = None,
103
+ external_id: str | None = None,
104
+ ) -> ExtendedTimeSeries | None: ...
105
+
106
+ @overload
107
+ def unlink_instance_ids(
108
+ self,
109
+ id: Sequence[int] | None = None,
110
+ external_id: SequenceNotStr[str] | None = None,
111
+ ) -> ExtendedTimeSeriesList: ...
112
+
113
+ def unlink_instance_ids(
114
+ self,
115
+ id: int | Sequence[int] | None = None,
116
+ external_id: str | SequenceNotStr[str] | None = None,
117
+ ) -> ExtendedTimeSeries | ExtendedTimeSeriesList | None:
118
+ """Unlink pending instance IDs from time series.
119
+
120
+ Args:
121
+ id (int | Sequence[int] | None): The ID(s) of the time series.
122
+ external_id (str | SequenceNotStr[str] | None): The external ID(s) of the time series.
123
+
124
+ """
125
+ if id is None and external_id is None:
126
+ return None
127
+ if isinstance(id, int) and isinstance(external_id, str):
128
+ raise ValueError("Cannot specify both id and external_id as single values. Use one or the other.")
129
+ is_single = isinstance(id, int) or isinstance(external_id, str)
130
+ identifiers = IdentifierSequence.load(id, external_id)
131
+
132
+ tasks = [
133
+ {
134
+ "url_path": f"{self._RESOURCE_PATH}/unlink-instance-ids",
135
+ "json": {"items": id_chunk},
136
+ "api_subversion": "alpha",
137
+ }
138
+ for id_chunk in split_into_chunks(identifiers.as_dicts(), self._UNLINK_LIMIT)
139
+ ]
140
+ tasks_summary = execute_tasks(
141
+ self._post,
142
+ tasks,
143
+ max_workers=self._config.max_workers,
144
+ fail_fast=True,
145
+ )
146
+ tasks_summary.raise_compound_exception_if_failed_tasks(
147
+ task_unwrap_fn=unpack_items_in_payload,
148
+ )
149
+
150
+ retrieved_items = tasks_summary.joined_results(lambda res: res.json()["items"])
151
+
152
+ result = ExtendedTimeSeriesList._load(retrieved_items, cognite_client=self._cognite_client)
153
+ if is_single:
154
+ if len(result) == 0:
155
+ return None
156
+ if len(result) > 1:
157
+ raise ValueError("Expected a single time series, but multiple were returned.")
158
+ return cast(ExtendedTimeSeries, result[0])
159
+ return cast(ExtendedTimeSeriesList, result)
160
+
93
161
  def retrieve(
94
162
  self, id: int | None = None, external_id: str | None = None, instance_id: NodeId | None = None
95
163
  ) -> ExtendedTimeSeries | None:
@@ -1,4 +1,4 @@
1
- from ._migrate import MigrateTimeseriesCommand, MigrationPrepareCommand
1
+ from ._migrate import MigrateAssetsCommand, MigrateTimeseriesCommand, MigrationPrepareCommand
2
2
  from ._populate import PopulateCommand
3
3
  from ._profile import ProfileAssetCentricCommand, ProfileTransformationCommand
4
4
  from ._purge import PurgeCommand
@@ -26,6 +26,7 @@ __all__ = [
26
26
  "DumpResourceCommand",
27
27
  "FeatureFlagCommand",
28
28
  "InitCommand",
29
+ "MigrateAssetsCommand",
29
30
  "MigrateTimeseriesCommand",
30
31
  "MigrationPrepareCommand",
31
32
  "ModulesCommand",
@@ -1,4 +1,5 @@
1
+ from .assets import MigrateAssetsCommand
1
2
  from .prepare import MigrationPrepareCommand
2
3
  from .timeseries import MigrateTimeseriesCommand
3
4
 
4
- __all__ = ["MigrateTimeseriesCommand", "MigrationPrepareCommand"]
5
+ __all__ = ["MigrateAssetsCommand", "MigrateTimeseriesCommand", "MigrationPrepareCommand"]
@@ -0,0 +1,201 @@
1
+ from collections.abc import Callable, Iterable
2
+ from pathlib import Path
3
+
4
+ from cognite.client.data_classes import Asset, Label, LabelDefinition
5
+ from cognite.client.data_classes.capabilities import (
6
+ Capability,
7
+ DataModelInstancesAcl,
8
+ DataModelsAcl,
9
+ DataSetScope,
10
+ SpaceIDScope,
11
+ TimeSeriesAcl,
12
+ )
13
+ from cognite.client.data_classes.data_modeling import NodeApply, NodeOrEdgeData, ViewId
14
+ from cognite.client.exceptions import CogniteAPIError, CogniteException
15
+ from rich import print
16
+
17
+ from cognite_toolkit._cdf_tk.client import ToolkitClient
18
+ from cognite_toolkit._cdf_tk.client._constants import DATA_MODELING_MAX_WRITE_WORKERS
19
+ from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
20
+ from cognite_toolkit._cdf_tk.constants import DMS_INSTANCE_LIMIT_MARGIN
21
+ from cognite_toolkit._cdf_tk.exceptions import (
22
+ AuthenticationError,
23
+ ResourceCreationError,
24
+ ResourceRetrievalError,
25
+ ToolkitMigrationError,
26
+ ToolkitValueError,
27
+ )
28
+ from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
29
+ from cognite_toolkit._cdf_tk.utils import humanize_collection
30
+ from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
31
+ from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
32
+
33
+ from .data_classes import MigrationMapping, MigrationMappingList
34
+ from .data_model import MAPPING_VIEW_ID
35
+
36
+
37
+ class MigrateAssetsCommand(ToolkitCommand):
38
+ cdf_cdm = "cdf_cdm"
39
+ asset_id = ViewId(cdf_cdm, "CogniteAsset", "v1")
40
+
41
+ # This is the number of timeseries that can be written in parallel.
42
+ chunk_size = 1000 * DATA_MODELING_MAX_WRITE_WORKERS
43
+
44
+ def migrate_assets(
45
+ self,
46
+ client: ToolkitClient,
47
+ mapping_file: Path,
48
+ dry_run: bool = False,
49
+ verbose: bool = False,
50
+ ) -> None:
51
+ """Migrate resources from Asset-Centric to data modeling in CDF."""
52
+ mappings = MigrationMappingList.read_mapping_file(mapping_file)
53
+ self._validate_access(client, mappings)
54
+ self._validate_migration_mappings_exists(client)
55
+ self._validate_available_capacity(client, mappings)
56
+ iteration_count = len(mappings) // self.chunk_size + 1
57
+ executor = ProducerWorkerExecutor[list[tuple[Asset, MigrationMapping]], list[NodeApply]](
58
+ download_iterable=self._download_assets(client, mappings),
59
+ process=self._as_cognite_assets,
60
+ write=self._upload_assets(client, dry_run=dry_run, verbose=verbose),
61
+ iteration_count=iteration_count,
62
+ max_queue_size=10,
63
+ download_description="Downloading assets",
64
+ process_description="Converting assets to CogniteAssets",
65
+ write_description="Uploading CogniteAssets",
66
+ )
67
+ executor.run()
68
+ if executor.error_occurred:
69
+ raise ResourceCreationError(executor.error_message)
70
+
71
+ prefix = "Would have" if dry_run else "Successfully"
72
+ self.console(f"{prefix} migrated {executor.total_items:,} assets to CogniteAssets.")
73
+
74
+ def _validate_access(self, client: ToolkitClient, mappings: MigrationMappingList) -> None:
75
+ required_capabilities: list[Capability] = [
76
+ DataModelsAcl(
77
+ actions=[DataModelsAcl.Action.Read], scope=SpaceIDScope([self.cdf_cdm, MAPPING_VIEW_ID.space])
78
+ ),
79
+ DataModelInstancesAcl(
80
+ actions=[
81
+ DataModelInstancesAcl.Action.Read,
82
+ DataModelInstancesAcl.Action.Write,
83
+ DataModelInstancesAcl.Action.Write_Properties,
84
+ ],
85
+ scope=SpaceIDScope(list(mappings.spaces())),
86
+ ),
87
+ ]
88
+ if data_set_ids := mappings.get_data_set_ids():
89
+ required_capabilities.append(
90
+ TimeSeriesAcl(
91
+ actions=[TimeSeriesAcl.Action.Read, TimeSeriesAcl.Action.Write],
92
+ scope=DataSetScope(list(data_set_ids)),
93
+ )
94
+ )
95
+ if missing := client.iam.verify_capabilities(required_capabilities):
96
+ raise AuthenticationError(f"Missing required capabilities: {humanize_collection(missing)}.")
97
+
98
+ def _validate_migration_mappings_exists(self, client: ToolkitClient) -> None:
99
+ view = client.data_modeling.views.retrieve(MAPPING_VIEW_ID)
100
+ if not view:
101
+ raise ToolkitMigrationError(
102
+ f"The migration mapping view {MAPPING_VIEW_ID} does not exist. "
103
+ f"Please run the `cdf migrate prepare` command to deploy the migration data model."
104
+ )
105
+
106
+ def _validate_available_capacity(self, client: ToolkitClient, mappings: MigrationMappingList) -> None:
107
+ """Validate that the project has enough capacity to accommodate the migration."""
108
+ try:
109
+ stats = client.data_modeling.statistics.project()
110
+ except CogniteAPIError:
111
+ # This endpoint is not yet in alpha, it may change or not be available.
112
+ self.warn(HighSeverityWarning("Cannot check the instances capacity proceeding with migration anyway."))
113
+ return
114
+ available_capacity = stats.instances.instances_limit - stats.instances.instances
115
+ available_capacity_after = available_capacity - len(mappings)
116
+
117
+ if available_capacity_after < DMS_INSTANCE_LIMIT_MARGIN:
118
+ raise ToolkitValueError(
119
+ "Cannot proceed with migration, not enough instance capacity available. Total capacity after migration"
120
+ f"would be {available_capacity_after:,} instances, which is less than the required margin of"
121
+ f"{DMS_INSTANCE_LIMIT_MARGIN:,} instances. Please increase the instance capacity in your CDF project"
122
+ f" or delete some existing instances before proceeding with the migration of {len(mappings):,} assets."
123
+ )
124
+ total_instances = stats.instances.instances + len(mappings)
125
+ self.console(
126
+ f"Project has enough capacity for migration. Total instances after migration: {total_instances:,}."
127
+ )
128
+
129
+ def _download_assets(
130
+ self, client: ToolkitClient, mappings: MigrationMappingList
131
+ ) -> Iterable[list[tuple[Asset, MigrationMapping]]]:
132
+ for chunk in chunker_sequence(mappings, self.chunk_size):
133
+ try:
134
+ asset_list = client.assets.retrieve_multiple(
135
+ chunk.get_ids(), chunk.get_external_ids(), ignore_unknown_ids=True
136
+ )
137
+ except CogniteException as e:
138
+ raise ResourceRetrievalError(f"Failed to retrieve {len(chunk):,} assets: {e!s}") from e
139
+ mapping_by_id = chunk.as_mapping_by_id()
140
+ chunk_list: list[tuple[Asset, MigrationMapping]] = []
141
+ for asset in asset_list:
142
+ if asset.id in mapping_by_id:
143
+ chunk_list.append((asset, mapping_by_id[asset.id]))
144
+ elif asset.external_id in mapping_by_id:
145
+ chunk_list.append((asset, mapping_by_id[asset.external_id]))
146
+ yield chunk_list
147
+
148
+ def _as_cognite_assets(self, assets: list[tuple[Asset, MigrationMapping]]) -> list[NodeApply]:
149
+ """Convert Asset objects to CogniteAssetApply objects."""
150
+ return [self.as_cognite_asset(asset, mapping) for asset, mapping in assets]
151
+
152
+ @classmethod
153
+ def _upload_assets(cls, client: ToolkitClient, dry_run: bool, verbose: bool) -> Callable[[list[NodeApply]], None]:
154
+ def upload_assets(assets: list[NodeApply]) -> None:
155
+ if dry_run:
156
+ if verbose:
157
+ print(f"Would have created {len(assets):,} CogniteAssets.")
158
+ return
159
+ try:
160
+ created = client.data_modeling.instances.apply_fast(assets)
161
+ except CogniteException as e:
162
+ raise ResourceCreationError(f"Failed to upsert CogniteAssets {len(assets):,}: {e!s}") from e
163
+ if verbose:
164
+ print(f"Created {len(created):,} CogniteAssets.")
165
+
166
+ return upload_assets
167
+
168
+ @classmethod
169
+ def as_cognite_asset(cls, asset: Asset, mapping: MigrationMapping) -> NodeApply:
170
+ tags: list[str] = []
171
+ for label in asset.labels or []:
172
+ if isinstance(label, str):
173
+ tags.append(label)
174
+ elif isinstance(label, dict) and "externalId" in label:
175
+ tags.append(label["externalId"])
176
+ elif isinstance(label, Label | LabelDefinition) and label.external_id:
177
+ tags.append(label.external_id)
178
+
179
+ return NodeApply(
180
+ space=mapping.instance_id.space,
181
+ external_id=mapping.instance_id.external_id,
182
+ sources=[
183
+ NodeOrEdgeData(
184
+ source=cls.asset_id,
185
+ properties={
186
+ "name": asset.name,
187
+ "description": asset.description,
188
+ "tags": tags or None,
189
+ },
190
+ ),
191
+ NodeOrEdgeData(
192
+ source=MAPPING_VIEW_ID,
193
+ properties={
194
+ "resourceType": "asset",
195
+ "id": asset.id,
196
+ "dataSetId": asset.data_set_id,
197
+ "classicExternalId": asset.external_id,
198
+ },
199
+ ),
200
+ ],
201
+ )
@@ -1,5 +1,6 @@
1
1
  import csv
2
2
  import sys
3
+ from abc import abstractmethod
3
4
  from collections.abc import Collection, Iterator, Sequence
4
5
  from dataclasses import dataclass
5
6
  from pathlib import Path
@@ -24,18 +25,28 @@ class MigrationMapping:
24
25
  resource_type: str
25
26
  instance_id: NodeId
26
27
 
28
+ @abstractmethod
29
+ def get_id(self) -> int | str:
30
+ raise NotImplementedError()
31
+
27
32
 
28
33
  @dataclass
29
34
  class IdMigrationMapping(MigrationMapping):
30
35
  id: int
31
36
  data_set_id: int | None = None
32
37
 
38
+ def get_id(self) -> int:
39
+ return self.id
40
+
33
41
 
34
42
  @dataclass
35
43
  class ExternalIdMigrationMapping(MigrationMapping):
36
44
  external_id: str
37
45
  data_set_id: int | None = None
38
46
 
47
+ def get_id(self) -> str:
48
+ return self.external_id
49
+
39
50
 
40
51
  class MigrationMappingList(list, Sequence[MigrationMapping]):
41
52
  # Implemented to get correct type hints
@@ -90,6 +101,10 @@ class MigrationMappingList(list, Sequence[MigrationMapping]):
90
101
  if isinstance(mapping, IdMigrationMapping | ExternalIdMigrationMapping) and mapping.data_set_id is not None
91
102
  }
92
103
 
104
+ def as_mapping_by_id(self) -> dict[int | str, MigrationMapping]:
105
+ """Return a mapping of IDs to MigrationMapping objects."""
106
+ return {mapping.get_id(): mapping for mapping in self}
107
+
93
108
  @classmethod
94
109
  def read_mapping_file(cls, mapping_file: Path) -> Self:
95
110
  if not mapping_file.exists():
@@ -66,6 +66,8 @@ MAPPING_VIEW = dm.ViewApply(
66
66
  },
67
67
  )
68
68
 
69
+ MAPPING_VIEW_ID = MAPPING_VIEW.as_id()
70
+
69
71
  COGNITE_MIGRATION_MODEL = dm.DataModelApply(
70
72
  space=SPACE.space,
71
73
  external_id="CogniteMigration",
@@ -25,7 +25,7 @@ class MigrationPrepareCommand(ToolkitCommand):
25
25
  verb = "Would deploy" if dry_run else "Deploying"
26
26
  print(f"{verb} {MODEL_ID!r}")
27
27
  results = DeployResults([], "deploy", dry_run=dry_run)
28
- for loader_cls, resources in [
28
+ for loader_cls, resource_list in [
29
29
  (SpaceLoader, [SPACE]),
30
30
  (ContainerLoader, [MAPPING_CONTAINER]),
31
31
  (ViewLoader, [MAPPING_VIEW]),
@@ -35,23 +35,15 @@ class MigrationPrepareCommand(ToolkitCommand):
35
35
  loader = loader_cls.create_loader(client) # type: ignore[attr-defined]
36
36
  worker = ResourceWorker(loader)
37
37
  # MyPy does not understand that `loader` has a `get_id` method.
38
- local_by_id = {loader.get_id(item): (item.dump(), item) for item in resources} # type: ignore[attr-defined]
38
+ local_by_id = {loader.get_id(item): (item.dump(), item) for item in resource_list} # type: ignore[attr-defined]
39
39
  worker.validate_access(local_by_id, is_dry_run=dry_run)
40
40
  cdf_resources = loader.retrieve(list(local_by_id.keys()))
41
- to_create, to_update, to_delete, unchanged = worker.categorize_resources(
42
- local_by_id, cdf_resources, False, verbose
43
- )
41
+ resources = worker.categorize_resources(local_by_id, cdf_resources, False, verbose)
44
42
 
45
43
  if dry_run:
46
- result = deploy_cmd.dry_run_deploy(to_create, to_update, to_delete, unchanged, loader, False, False)
44
+ result = deploy_cmd.dry_run_deploy(resources, loader, False, False)
47
45
  else:
48
- result = deploy_cmd.actual_deploy(
49
- to_create,
50
- to_update,
51
- to_delete,
52
- unchanged,
53
- loader,
54
- )
46
+ result = deploy_cmd.actual_deploy(resources, loader)
55
47
  if result:
56
48
  results[result.name] = result
57
49
  if results.has_counts:
@@ -45,6 +45,7 @@ from cognite_toolkit._cdf_tk.loaders import (
45
45
  ResourceWorker,
46
46
  )
47
47
  from cognite_toolkit._cdf_tk.loaders._base_loaders import T_WritableCogniteResourceList
48
+ from cognite_toolkit._cdf_tk.loaders._worker import CategorizedResources
48
49
  from cognite_toolkit._cdf_tk.tk_warnings import EnvironmentVariableMissingWarning
49
50
  from cognite_toolkit._cdf_tk.tk_warnings.base import WarningList, catch_warnings
50
51
  from cognite_toolkit._cdf_tk.tk_warnings.other import (
@@ -261,7 +262,7 @@ class DeployCommand(ToolkitCommand):
261
262
  return None
262
263
 
263
264
  with catch_warnings(EnvironmentVariableMissingWarning) as env_var_warnings:
264
- to_create, to_update, to_delete, unchanged = worker.prepare_resources(
265
+ resources = worker.prepare_resources(
265
266
  files,
266
267
  environment_variables=env_vars.dump(include_os=True),
267
268
  is_dry_run=dry_run,
@@ -274,7 +275,7 @@ class DeployCommand(ToolkitCommand):
274
275
 
275
276
  # We are not counting to_delete as these are captured by to_create.
276
277
  # (to_delete is used for resources that does not support update and instead needs to be deleted and recreated)
277
- nr_of_items = len(to_create) + len(to_update) + len(unchanged)
278
+ nr_of_items = len(resources.to_create) + len(resources.to_update) + len(resources.unchanged)
278
279
  if nr_of_items == 0:
279
280
  return ResourceDeployResult(name=loader.display_name)
280
281
 
@@ -288,19 +289,16 @@ class DeployCommand(ToolkitCommand):
288
289
 
289
290
  if dry_run:
290
291
  result = self.dry_run_deploy(
291
- to_create,
292
- to_update,
293
- to_delete,
294
- unchanged,
292
+ resources,
295
293
  loader,
296
294
  has_done_drop,
297
295
  has_dropped_data,
298
296
  )
299
297
  else:
300
- result = self.actual_deploy(to_create, to_update, to_delete, unchanged, loader, env_var_warnings)
298
+ result = self.actual_deploy(resources, loader, env_var_warnings)
301
299
 
302
300
  if verbose:
303
- self._verbose_print(to_create, to_update, unchanged, loader, dry_run)
301
+ self._verbose_print(resources, loader, dry_run)
304
302
 
305
303
  if isinstance(loader, ResourceContainerLoader):
306
304
  return ResourceContainerDeployResult.from_resource_deploy_result(
@@ -311,10 +309,7 @@ class DeployCommand(ToolkitCommand):
311
309
 
312
310
  def actual_deploy(
313
311
  self,
314
- to_create: T_CogniteResourceList,
315
- to_update: T_CogniteResourceList,
316
- to_delete: list[T_ID],
317
- unchanged: T_CogniteResourceList,
312
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
318
313
  loader: ResourceLoader[
319
314
  T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
320
315
  ],
@@ -326,16 +321,16 @@ class DeployCommand(ToolkitCommand):
326
321
  if isinstance(warning, EnvironmentVariableMissingWarning)
327
322
  for identifier in warning.identifiers or []
328
323
  }
329
- nr_of_unchanged = len(unchanged)
324
+ nr_of_unchanged = len(resources.unchanged)
330
325
  nr_of_deleted, nr_of_created, nr_of_changed = 0, 0, 0
331
- if to_delete:
332
- deleted = loader.delete(to_delete)
326
+ if resources.to_delete:
327
+ deleted = loader.delete(resources.to_delete)
333
328
  nr_of_deleted += deleted
334
- if to_create:
335
- created = self._create_resources(to_create, loader, environment_variable_warning_by_id)
329
+ if resources.to_create:
330
+ created = self._create_resources(resources.to_create, loader, environment_variable_warning_by_id)
336
331
  nr_of_created += created
337
- if to_update:
338
- updated = self._update_resources(to_update, loader, environment_variable_warning_by_id)
332
+ if resources.to_update:
333
+ updated = self._update_resources(resources.to_update, loader, environment_variable_warning_by_id)
339
334
  nr_of_changed += updated
340
335
  return ResourceDeployResult(
341
336
  name=loader.display_name,
@@ -348,10 +343,7 @@ class DeployCommand(ToolkitCommand):
348
343
 
349
344
  @staticmethod
350
345
  def dry_run_deploy(
351
- to_create: T_CogniteResourceList,
352
- to_update: T_CogniteResourceList,
353
- to_delete: list[T_ID],
354
- unchanged: T_CogniteResourceList,
346
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
355
347
  loader: ResourceLoader[
356
348
  T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
357
349
  ],
@@ -364,39 +356,40 @@ class DeployCommand(ToolkitCommand):
364
356
  and (not isinstance(loader, ResourceContainerLoader) or has_dropped_data)
365
357
  ):
366
358
  # Means the resources will be deleted and not left unchanged or changed
367
- for item in unchanged:
359
+ for item in resources.unchanged:
368
360
  # We cannot use extents as LoadableNodes cannot be extended.
369
- to_create.append(item)
370
- for item in to_update:
371
- to_create.append(item)
372
- unchanged.clear()
373
- to_update.clear()
361
+ resources.to_create.append(item)
362
+ for item in resources.to_update:
363
+ resources.to_create.append(item)
364
+ resources.unchanged.clear()
365
+ resources.to_update.clear()
374
366
  return ResourceDeployResult(
375
367
  name=loader.display_name,
376
- created=len(to_create),
377
- deleted=len(to_delete),
378
- changed=len(to_update),
379
- unchanged=len(unchanged),
380
- total=len(to_create) + len(to_delete) + len(to_update) + len(unchanged),
368
+ created=len(resources.to_create),
369
+ deleted=len(resources.to_delete),
370
+ changed=len(resources.to_update),
371
+ unchanged=len(resources.unchanged),
372
+ total=len(resources.to_create)
373
+ + len(resources.to_delete)
374
+ + len(resources.to_update)
375
+ + len(resources.unchanged),
381
376
  )
382
377
 
383
378
  @staticmethod
384
379
  def _verbose_print(
385
- to_create: T_CogniteResourceList,
386
- to_update: T_CogniteResourceList,
387
- unchanged: T_CogniteResourceList,
380
+ resources: CategorizedResources[T_ID, T_CogniteResourceList],
388
381
  loader: ResourceLoader,
389
382
  dry_run: bool,
390
383
  ) -> None:
391
384
  print_outs = []
392
385
  prefix = "Would have " if dry_run else ""
393
- if to_create:
394
- print_outs.append(f"{prefix}Created {_print_ids_or_length(loader.get_ids(to_create), limit=20)}")
395
- if to_update:
396
- print_outs.append(f"{prefix}Updated {_print_ids_or_length(loader.get_ids(to_update), limit=20)}")
397
- if unchanged:
386
+ if resources.to_create:
387
+ print_outs.append(f"{prefix}Created {_print_ids_or_length(loader.get_ids(resources.to_create), limit=20)}")
388
+ if resources.to_update:
389
+ print_outs.append(f"{prefix}Updated {_print_ids_or_length(loader.get_ids(resources.to_update), limit=20)}")
390
+ if resources.unchanged:
398
391
  print_outs.append(
399
- f"{'Untouched' if dry_run else 'Unchanged'} {_print_ids_or_length(loader.get_ids(unchanged), limit=5)}"
392
+ f"{'Untouched' if dry_run else 'Unchanged'} {_print_ids_or_length(loader.get_ids(resources.unchanged), limit=5)}"
400
393
  )
401
394
  prefix_message = f" {loader.display_name}: "
402
395
  if len(print_outs) == 1:
@@ -4,6 +4,7 @@ import re
4
4
  import warnings
5
5
  from collections.abc import Hashable
6
6
  from copy import deepcopy
7
+ from dataclasses import dataclass
7
8
  from pathlib import Path
8
9
  from typing import TYPE_CHECKING, Any, Generic, cast
9
10
 
@@ -30,6 +31,14 @@ if TYPE_CHECKING:
30
31
  from cognite_toolkit._cdf_tk.data_classes._module_directories import ReadModule
31
32
 
32
33
 
34
+ @dataclass
35
+ class CategorizedResources(Generic[T_ID, T_CogniteResourceList]):
36
+ to_create: T_CogniteResourceList
37
+ to_update: T_CogniteResourceList
38
+ to_delete: list[T_ID]
39
+ unchanged: T_CogniteResourceList
40
+
41
+
33
42
  class ResourceWorker(
34
43
  Generic[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList]
35
44
  ):
@@ -75,7 +84,7 @@ class ResourceWorker(
75
84
  is_dry_run: bool = False,
76
85
  force_update: bool = False,
77
86
  verbose: bool = False,
78
- ) -> tuple[T_CogniteResourceList, T_CogniteResourceList, list[T_ID], T_CogniteResourceList]:
87
+ ) -> CategorizedResources:
79
88
  """Prepare resources for deployment by loading them from files, validating access, and categorizing them into create, update, delete, and unchanged lists.
80
89
 
81
90
  Args:
@@ -86,12 +95,7 @@ class ResourceWorker(
86
95
  verbose: Whether to print detailed information about the resources being processed.
87
96
 
88
97
  Returns:
89
- A tuple containing:
90
- - to_create: List of resources to create.
91
- - to_update: List of resources to update.
92
- - to_delete: List of resource IDs to delete.
93
- - unchanged: List of resources that are unchanged.
94
-
98
+ CategorizedResources: A categorized list of resources to create, update, delete, and unchanged.
95
99
  """
96
100
  local_by_id = self.load_resources(filepaths, environment_variables, is_dry_run)
97
101
 
@@ -160,31 +164,28 @@ class ResourceWorker(
160
164
  cdf_resources: T_WritableCogniteResourceList,
161
165
  force_update: bool,
162
166
  verbose: bool,
163
- ) -> tuple[T_CogniteResourceList, T_CogniteResourceList, list[T_ID], T_CogniteResourceList]:
164
- to_create: T_CogniteResourceList
165
- to_update: T_CogniteResourceList
166
- to_delete: list[T_ID] = []
167
- unchanged: T_CogniteResourceList
168
- to_create, to_update, unchanged = (
169
- self.loader.list_write_cls([]),
170
- self.loader.list_write_cls([]),
171
- self.loader.list_write_cls([]),
167
+ ) -> CategorizedResources:
168
+ resources: CategorizedResources[T_ID, T_CogniteResourceList] = CategorizedResources(
169
+ to_create=self.loader.list_write_cls([]),
170
+ to_update=self.loader.list_write_cls([]),
171
+ to_delete=[],
172
+ unchanged=self.loader.list_write_cls([]),
172
173
  )
173
174
  cdf_resource_by_id = {self.loader.get_id(resource): resource for resource in cdf_resources}
174
175
  for identifier, (local_dict, local_resource) in local_by_id.items():
175
176
  cdf_resource = cdf_resource_by_id.get(identifier)
176
177
  if cdf_resource is None:
177
- to_create.append(local_resource)
178
+ resources.to_create.append(local_resource)
178
179
  continue
179
180
  cdf_dict = self.loader.dump_resource(cdf_resource, local_dict)
180
181
  if not force_update and cdf_dict == local_dict:
181
- unchanged.append(local_resource)
182
+ resources.unchanged.append(local_resource)
182
183
  continue
183
184
  if self.loader.support_update:
184
- to_update.append(local_resource)
185
+ resources.to_update.append(local_resource)
185
186
  else:
186
- to_delete.append(identifier)
187
- to_create.append(local_resource)
187
+ resources.to_delete.append(identifier)
188
+ resources.to_create.append(local_resource)
188
189
  if verbose:
189
190
  diff_str = "\n".join(to_diff(cdf_dict, local_dict))
190
191
  for sensitive in self.loader.sensitive_strings(local_resource):
@@ -196,4 +197,4 @@ class ResourceWorker(
196
197
  expand=False,
197
198
  )
198
199
  )
199
- return to_create, to_update, to_delete, unchanged
200
+ return resources
@@ -0,0 +1,15 @@
1
+ from cognite.client.data_classes import WorkflowUpsert
2
+ from pydantic import Field
3
+
4
+ from .base import ToolkitResource
5
+
6
+
7
+ class WorkflowYAML(ToolkitResource):
8
+ _cdf_resource = WorkflowUpsert
9
+ external_id: str = Field(
10
+ max_length=255,
11
+ description="Identifier for a workflow. Must be unique for the project."
12
+ " No trailing or leading whitespace and no null characters allowed.",
13
+ )
14
+ description: str | None = Field(None, max_length=500)
15
+ data_set_external_id: str | None = None
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.5.64
15
+ image: cognite/toolkit:0.5.65
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.5.64
13
+ image: cognite/toolkit:0.5.65
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -1 +1 @@
1
- __version__ = "0.5.64"
1
+ __version__ = "0.5.65"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.5.64
3
+ Version: 0.5.65
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
@@ -1,10 +1,10 @@
1
1
  cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cognite_toolkit/_cdf.py,sha256=WWMslI-y2VbIYDMH19wnINebGwlOvAeYr-qkPRC1f68,5834
3
- cognite_toolkit/_version.py,sha256=YmQRiPOhelComYapMsMGUkwpYKgzkePjx8U_IXrWTZ4,23
3
+ cognite_toolkit/_version.py,sha256=uPR7tuL4mJMppx_mxnT2t-XCK6LTKMl7XrNVV0IoCLM,23
4
4
  cognite_toolkit/config.dev.yaml,sha256=CIDmi1OGNOJ-70h2BNCozZRmhvU5BfpZoh6Q04b8iMs,109
5
5
  cognite_toolkit/_builtin_modules/README.md,sha256=roU3G05E6ogP5yhw4hdIvVDKV831zCh2pzt9BVddtBg,307
6
6
  cognite_toolkit/_builtin_modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- cognite_toolkit/_builtin_modules/cdf.toml,sha256=cNxARlcrmEJ3CEAaRbEy-M9jxKukb_uZ6slnSAO2hI4,273
7
+ cognite_toolkit/_builtin_modules/cdf.toml,sha256=LGJ7fwBn4yuwF9I5hjQeBFhQ-B6cPwxxW_sUsYKBPJw,273
8
8
  cognite_toolkit/_builtin_modules/packages.toml,sha256=RdY44Sxvh6sUtAkgp1dHID1mtqkOTzP_rbZL2Q27fYw,1147
9
9
  cognite_toolkit/_builtin_modules/bootcamp/README.md,sha256=iTVqoy3PLpC-xPi5pbuMIAEHILBSfWTGLexwa1AltpY,211
10
10
  cognite_toolkit/_builtin_modules/bootcamp/default.config.yaml,sha256=MqYTcRiz03bow4LT8E3jumnd_BsqC5SvjgYOVVkHGE0,93
@@ -499,7 +499,7 @@ cognite_toolkit/_cdf_tk/apps/_auth_app.py,sha256=ER7uYb3ViwsHMXiQEZpyhwU6TIjKaB9
499
499
  cognite_toolkit/_cdf_tk/apps/_core_app.py,sha256=-4ABeNtC0cxw7XvCRouPzTvlmqsS0NRR-jLgMGadW2I,13712
500
500
  cognite_toolkit/_cdf_tk/apps/_dump_app.py,sha256=UXmB8oFwVLOmxJBlxxLIBMLPCLwdgyaFfuG6Ex-GZh4,25608
501
501
  cognite_toolkit/_cdf_tk/apps/_landing_app.py,sha256=v4t2ryxzFre7y9IkEPIDwmyJDO8VDIIv6hIcft5TjpQ,422
502
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=GRsOlqYAWB0rsZsdTJTGfjPm1OkbUq7xBrM4pzQRKoY,3708
502
+ cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=ifzl63MBjxV3gdf3h7dztGLf8VbuXHeRnWG-p4gBCGE,5299
503
503
  cognite_toolkit/_cdf_tk/apps/_modules_app.py,sha256=tjCP-QbuPYd7iw6dkxnhrrWf514Lr25_oVgSJyJcaL8,6642
504
504
  cognite_toolkit/_cdf_tk/apps/_populate_app.py,sha256=PGUqK_USOqdPCDvUJI-4ne9TN6EssC33pUbEeCmiLPg,2805
505
505
  cognite_toolkit/_cdf_tk/apps/_profile_app.py,sha256=TaKTOgkd538QyIWBRdAILJ-TotBxYreZgWBqK4yrebQ,2562
@@ -524,7 +524,7 @@ cognite_toolkit/_cdf_tk/client/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
524
524
  cognite_toolkit/_cdf_tk/client/api/dml.py,sha256=8b1lo86JdvfEsz9mP2rx0Mp9fyWsU6mbXHqLBtvSidU,3546
525
525
  cognite_toolkit/_cdf_tk/client/api/extended_data_modeling.py,sha256=V9a-Ep_xlxd9KQN1D15GOLmg6KrSADciPWlmgAJuaBY,10481
526
526
  cognite_toolkit/_cdf_tk/client/api/extended_raw.py,sha256=9DVbM2aWmIyzbaW-lh10_pzVYJUEQFnIKnxvt413Bjk,2118
527
- cognite_toolkit/_cdf_tk/client/api/extended_timeseries.py,sha256=8GsMDq3F9tb_bR9c6yViRErjf0AsUJZbrZx9JTQZvXY,15150
527
+ cognite_toolkit/_cdf_tk/client/api/extended_timeseries.py,sha256=wi4EzOa6Pup_YKcQ3b3SMvKminJ1kee9-dgYOLwqSQc,17804
528
528
  cognite_toolkit/_cdf_tk/client/api/fixed_transformations.py,sha256=MEC25h_J_fm3wHa-rVJi2V_b5D1Fn3wNapCmIGU2faU,5625
529
529
  cognite_toolkit/_cdf_tk/client/api/location_filters.py,sha256=kUe58xzt2iYdztleep8ocsdE17lOWgqbPjELJnfhoHU,3188
530
530
  cognite_toolkit/_cdf_tk/client/api/lookup.py,sha256=E4YEkk8TzpWQ2v_NacISgXyA4xoelmayqSwaq4vykdk,12250
@@ -562,7 +562,7 @@ cognite_toolkit/_cdf_tk/client/data_classes/streamlit_.py,sha256=OGoMQ_K88F9vSZu
562
562
  cognite_toolkit/_cdf_tk/client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
563
563
  cognite_toolkit/_cdf_tk/client/utils/_concurrency.py,sha256=z6gqFv-kw80DsEpbaR7sI0-_WvZdOdAsR4VoFvTqvyU,1309
564
564
  cognite_toolkit/_cdf_tk/client/utils/_http_client.py,sha256=oXNKrIaizG4WiSAhL_kSCHAuL4aaaEhCU4pOJGxh6Xs,483
565
- cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=6rUv97s6CB5Fje6eg2X3cd9Za9rYJY51xUcPk_RoJT8,1261
565
+ cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=2oQWO2QuSBsXtaLjjKrbmpH909R6gUlVfL55GuyyHwY,1311
566
566
  cognite_toolkit/_cdf_tk/commands/_base.py,sha256=3Zc3ffR8mjZ1eV7WrC-Y1sYmyMzdbbJDDmsiKEMEJwo,2480
567
567
  cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=3bR_C8p02IW6apexwAAoXuneBM4RcUGdX6Hw_Rtx7Kg,24775
568
568
  cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=6nezoDrw3AkF8hANHjUILgTj_nbdzgT0siweaKI35Fk,1047
@@ -575,7 +575,7 @@ cognite_toolkit/_cdf_tk/commands/auth.py,sha256=T6hb90PnlRiTkhihEUvLCbNFyt1_4ML3
575
575
  cognite_toolkit/_cdf_tk/commands/build_cmd.py,sha256=Za0hYNlSE8yMuAdczKYtTgvcqE6DIbCGve1E2OfXekI,30474
576
576
  cognite_toolkit/_cdf_tk/commands/clean.py,sha256=qKHrhkjzerC-oQgkZ_61n7vZw11fxLJsJHILyd9Z5UQ,14418
577
577
  cognite_toolkit/_cdf_tk/commands/collect.py,sha256=zBMKhhvjOpuASMnwP0eeHRI02tANcvFEZgv0CQO1ECc,627
578
- cognite_toolkit/_cdf_tk/commands/deploy.py,sha256=g_mceI5Hq55uz9p5WC7yPVNPR6MBhIMTcBt9G_cWkJQ,19208
578
+ cognite_toolkit/_cdf_tk/commands/deploy.py,sha256=hXviaYPJdx4vxkNb-KbgHydBznfc9LuHxUyW-3M21qI,19218
579
579
  cognite_toolkit/_cdf_tk/commands/dump_data.py,sha256=U_e-fEAEphpkJMlDTHQvQ1F0k3qEMvd0m7zc20XvcQY,21668
580
580
  cognite_toolkit/_cdf_tk/commands/dump_resource.py,sha256=Dt8jlkmtpRtzPDMEjKdpOJPFr92k7Mw-BWkRsE9CJ8s,20515
581
581
  cognite_toolkit/_cdf_tk/commands/featureflag.py,sha256=VPz7FrjVQFqjkz8BYTP2Np3k7BTLFMq_eooNSqmb2ms,1034
@@ -584,10 +584,11 @@ cognite_toolkit/_cdf_tk/commands/modules.py,sha256=lYImbi7eX07j2lbE_8xJ5uix9xa2l
584
584
  cognite_toolkit/_cdf_tk/commands/pull.py,sha256=t7KQCxpoFDNBWTYPohK7chrRzPyAOGVmfaY7iBLnTqM,39286
585
585
  cognite_toolkit/_cdf_tk/commands/repo.py,sha256=vQfLMTzSnI4w6eYCQuMnZ_xXVAVjyLnST4Tmu2zgNfE,3874
586
586
  cognite_toolkit/_cdf_tk/commands/run.py,sha256=88AkfCdS4gXHA4I5ZhdU3HWWA5reOTGbfaauM-Yvp8o,37407
587
- cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=jV7zzYhBxQWGpAWCu-dJ9QxUHp3DBwz2KEQihGb2UuI,161
588
- cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=uONZwIkdD7pcllzu4VT0gHar5x5qpdMayAaQFI4kozQ,6338
589
- cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=y7Fc_bZvavcNTS9LxwiiMnYIDkGOmMXpbm8hTnAjkYw,2593
590
- cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=65oSmngUIDMGzKmwXDLft1ITPiA8COEuIcTnTafo5cg,2511
587
+ cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=E36hrJ71Wfm9y8k3qLXTo-cy2SCxtSy9bH32MixMtm8,226
588
+ cognite_toolkit/_cdf_tk/commands/_migrate/assets.py,sha256=o9kbOLn0WaptXBpLWmlndBY5Wu5AeQsnUvV_lGBzcqY,9296
589
+ cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=tj0IwrW8Tk0U_1ZCte5pbC84Lt95DLe6aVWdhLaI7EA,6776
590
+ cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=pmpHqcv5bOW_lYhG-JfqfesTiMWGSH8dXzEVPbSkDXk,2633
591
+ cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=vtJjmNEVDiYTNajRry1oCqG_yBlHqq00M_NuInvQGZ0,2271
591
592
  cognite_toolkit/_cdf_tk/commands/_migrate/timeseries.py,sha256=noPHSkKKaziI3X0KZLpZY_xjC5yejRX2d9TJgTtyZCo,9659
592
593
  cognite_toolkit/_cdf_tk/data_classes/__init__.py,sha256=Z7ODYLcqrRpo0Cmfx79DDhsA6eEK4hvNST_Qko1vRv0,1645
593
594
  cognite_toolkit/_cdf_tk/data_classes/_base.py,sha256=qyXObVP1SX5Lzqy8cYBZssV9NL3v0Q0-y-pLNF1Ok1I,2652
@@ -605,7 +606,7 @@ cognite_toolkit/_cdf_tk/data_classes/_yaml_comments.py,sha256=zfuDu9aAsb1ExeZBAJ
605
606
  cognite_toolkit/_cdf_tk/loaders/__init__.py,sha256=9giALvw48KIry7WWdCUxA1AvlVFCAR0bOJ5tKAhy-Lk,6241
606
607
  cognite_toolkit/_cdf_tk/loaders/_base_loaders.py,sha256=sF9D7ImyHmjbLBGVM66D2xSmOj8XnG3LmDqlQQZRarQ,20502
607
608
  cognite_toolkit/_cdf_tk/loaders/_data_loaders.py,sha256=GHFylB-LwpYdOHI_hwWPL68TMO3D99iFOzT-oAPZFLc,9190
608
- cognite_toolkit/_cdf_tk/loaders/_worker.py,sha256=xenPKmnx2xbwIzm-KVwSjMTxWoni1_NO4xz7mfDs-EY,9472
609
+ cognite_toolkit/_cdf_tk/loaders/_worker.py,sha256=E1y_xFzsb3mSZiSJJggvRLB6tT-0xBoKlqpa7UaX5us,9385
609
610
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/__init__.py,sha256=d8ucrEuVA8W9zVon8X6KvRejdDABWNCc6Qco-9BR9AQ,2964
610
611
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/agent_loaders.py,sha256=2hsaMB9lyNPAWUGKQ3EMA46ceUmbNefdoBx3huWF4fo,3128
611
612
  cognite_toolkit/_cdf_tk/loaders/_resource_loaders/auth_loaders.py,sha256=1wfR9yV_htDBmG6DATm5eN0m0r9wuHg4H0L8X9bAp20,26332
@@ -671,6 +672,7 @@ cognite_toolkit/_cdf_tk/resource_classes/timeseries.py,sha256=wVlXR6tsmC-xmCnF4d
671
672
  cognite_toolkit/_cdf_tk/resource_classes/transformation_destination.py,sha256=AjfGS0JTGGG7aqUiezxtaiv_dBp-QMvHLwulXUIW8sA,6525
672
673
  cognite_toolkit/_cdf_tk/resource_classes/transformation_schedule.py,sha256=eTU1pEtR9z2SGeZU4AcXAGNWGfqf1v3L-wPDEVCSD9s,406
673
674
  cognite_toolkit/_cdf_tk/resource_classes/transformations.py,sha256=dt1coxSflgb7-NGHomYt8jFHPZni-xHgbCzU8i5B7-Q,3555
675
+ cognite_toolkit/_cdf_tk/resource_classes/workflow.py,sha256=fMNfW93D8tdVwO7YgEYYiYvpktSMx4i0viIFg0gD2VY,512
674
676
  cognite_toolkit/_cdf_tk/tk_warnings/__init__.py,sha256=DmvCZhG59NZ7yepDm4s-gy92ysQBJ2t-LnlsLatNwOw,2262
675
677
  cognite_toolkit/_cdf_tk/tk_warnings/base.py,sha256=RMtJ_0iE1xfbTKkvWLkAo5AgscjkNDpT0lmDZ3I8aeI,4435
676
678
  cognite_toolkit/_cdf_tk/tk_warnings/fileread.py,sha256=9xE8i8F_HhrRaQi6IGcE240j4TRZQzrOd7gMByjsXEk,8924
@@ -699,12 +701,12 @@ cognite_toolkit/_repo_files/.gitignore,sha256=3exydcQPCJTldGFJoZy1RPHc1horbAprAo
699
701
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
700
702
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=KVBxW8urCRDtVlJ6HN-kYmw0NCpW6c4lD-nlxz9tZsQ,692
701
703
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=Cp4KYraeWPjP8SnnEIbJoJnjmrRUwc982DPjOOzy2iM,722
702
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=lxYJqdd3aR0ZHU-Xwf4r0tk8vh_yDZABoLHYozuJatA,667
703
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=pJjrtZvQ_1iIuKom7FAAGa4QxiwRwTSifrdNON5oLMs,2430
704
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=r9DQ1_--OcUVFusWhTykM5NKmEn17ijBDatY5XNAoQ8,667
705
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=Go3m_277I0Kv2kuBFjPhYQAf0defj2ORfStEchgy7aA,2430
704
706
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
705
707
  cognite_toolkit/demo/_base.py,sha256=63nWYI_MHU5EuPwEX_inEAQxxiD5P6k8IAmlgl4CxpE,8082
706
- cognite_toolkit-0.5.64.dist-info/METADATA,sha256=PulESZBXZQdjGl-QJlNBp0VFkJ6HgMlHjxKykzoREu8,4410
707
- cognite_toolkit-0.5.64.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
708
- cognite_toolkit-0.5.64.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
709
- cognite_toolkit-0.5.64.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
710
- cognite_toolkit-0.5.64.dist-info/RECORD,,
708
+ cognite_toolkit-0.5.65.dist-info/METADATA,sha256=J_4JmnX17kzzGuODbN5Ght5chcP3ZMZPtwKquCTxfUo,4410
709
+ cognite_toolkit-0.5.65.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
710
+ cognite_toolkit-0.5.65.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
711
+ cognite_toolkit-0.5.65.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
712
+ cognite_toolkit-0.5.65.dist-info/RECORD,,