cognite-toolkit 0.6.81__py3-none-any.whl → 0.6.82__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-toolkit might be problematic. Click here for more details.

@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.6.81"
7
+ version = "0.6.82"
8
8
 
9
9
 
10
10
  [plugins]
@@ -6,12 +6,14 @@ from cognite_toolkit._cdf_tk.commands.deploy import DeployCommand
6
6
  from cognite_toolkit._cdf_tk.cruds import (
7
7
  ContainerCRUD,
8
8
  DataModelCRUD,
9
+ ResourceCRUD,
9
10
  ResourceViewMappingCRUD,
10
11
  ResourceWorker,
11
12
  SpaceCRUD,
12
13
  ViewCRUD,
13
14
  )
14
15
  from cognite_toolkit._cdf_tk.data_classes import DeployResults
16
+ from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
15
17
 
16
18
  from .data_model import COGNITE_MIGRATION_MODEL, CONTAINERS, MODEL_ID, SPACE, VIEWS
17
19
  from .default_mappings import create_default_mappings
@@ -27,27 +29,30 @@ class MigrationPrepareCommand(ToolkitCommand):
27
29
  verb = "Would deploy" if dry_run else "Deploying"
28
30
  print(f"{verb} {MODEL_ID!r}")
29
31
  results = DeployResults([], "deploy", dry_run=dry_run)
30
- for loader_cls, resource_list in [
32
+ crud_cls: type[ResourceCRUD]
33
+ for crud_cls, resource_list in [ # type: ignore[assignment]
31
34
  (SpaceCRUD, [SPACE]),
32
35
  (ContainerCRUD, CONTAINERS),
33
36
  (ViewCRUD, VIEWS),
34
37
  (DataModelCRUD, [COGNITE_MIGRATION_MODEL]),
35
38
  (ResourceViewMappingCRUD, create_default_mappings()),
36
39
  ]:
37
- # MyPy does not understand that `loader_cls` has a `create_loader` method.
38
- loader = loader_cls.create_loader(client) # type: ignore[attr-defined]
39
- worker = ResourceWorker(loader, "deploy")
40
+ crud = crud_cls.create_loader(client)
41
+ if warning := crud.prerequisite_warning():
42
+ self.warn(HighSeverityWarning(warning))
43
+ continue
44
+ worker = ResourceWorker(crud, "deploy")
40
45
  # MyPy does not understand that `loader` has a `get_id` method.
41
- dump_arg = {"context": "local"} if loader_cls is ResourceViewMappingCRUD else {}
42
- local_by_id = {loader.get_id(item): (item.dump(**dump_arg), item) for item in resource_list} # type: ignore[attr-defined]
46
+ dump_arg = {"context": "local"} if crud_cls is ResourceViewMappingCRUD else {}
47
+ local_by_id = {crud.get_id(item): (item.dump(**dump_arg), item) for item in resource_list} # type: ignore[attr-defined]
43
48
  worker.validate_access(local_by_id, is_dry_run=dry_run)
44
- cdf_resources = loader.retrieve(list(local_by_id.keys()))
49
+ cdf_resources = crud.retrieve(list(local_by_id.keys()))
45
50
  resources = worker.categorize_resources(local_by_id, cdf_resources, False, verbose)
46
51
 
47
52
  if dry_run:
48
- result = deploy_cmd.dry_run_deploy(resources, loader, False, False)
53
+ result = deploy_cmd.dry_run_deploy(resources, crud, False, False)
49
54
  else:
50
- result = deploy_cmd.actual_deploy(resources, loader)
55
+ result = deploy_cmd.actual_deploy(resources, crud)
51
56
  if result:
52
57
  results[result.name] = result
53
58
  if results.has_counts:
@@ -1,14 +1,20 @@
1
- import dataclasses
2
1
  import uuid
3
- from collections.abc import Callable, Hashable, Iterable
2
+ from abc import ABC, abstractmethod
3
+ from collections.abc import Callable, Hashable, Iterable, Sequence
4
+ from dataclasses import dataclass
4
5
  from functools import partial
5
6
  from graphlib import TopologicalSorter
6
- from typing import Any, cast
7
+ from typing import Literal, cast
7
8
 
8
9
  import questionary
9
10
  from cognite.client.data_classes import AggregateResultItem, DataSetUpdate
10
11
  from cognite.client.data_classes._base import CogniteResourceList
11
- from cognite.client.data_classes.data_modeling import NodeId, NodeList
12
+ from cognite.client.data_classes.data_modeling import (
13
+ EdgeList,
14
+ NodeId,
15
+ NodeList,
16
+ )
17
+ from cognite.client.data_classes.data_modeling.statistics import SpaceStatistics
12
18
  from cognite.client.exceptions import CogniteAPIError
13
19
  from cognite.client.utils._identifier import InstanceId
14
20
  from rich import print
@@ -67,13 +73,13 @@ from cognite_toolkit._cdf_tk.utils.validate_access import ValidateAccess
67
73
  from ._base import ToolkitCommand
68
74
 
69
75
 
70
- @dataclasses.dataclass
76
+ @dataclass
71
77
  class DeleteResults:
72
78
  deleted: int = 0
73
79
  failed: int = 0
74
80
 
75
81
 
76
- @dataclasses.dataclass
82
+ @dataclass
77
83
  class DeleteItem:
78
84
  item: JsonVal
79
85
  as_id_fun: Callable[[JsonVal], Hashable]
@@ -85,6 +91,80 @@ class DeleteItem:
85
91
  return self.as_id_fun(self.item)
86
92
 
87
93
 
94
+ @dataclass
95
+ class ToDelete(ABC):
96
+ crud: ResourceCRUD
97
+ total: int
98
+ delete_url: str
99
+
100
+ @property
101
+ def display_name(self) -> str:
102
+ return self.crud.display_name
103
+
104
+ @abstractmethod
105
+ def get_process_function(
106
+ self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
107
+ ) -> Callable[[CogniteResourceList], list[JsonVal]]:
108
+ raise NotImplementedError()
109
+
110
+
111
+ @dataclass
112
+ class DataModelingToDelete(ToDelete):
113
+ def get_process_function(
114
+ self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
115
+ ) -> Callable[[CogniteResourceList], list[JsonVal]]:
116
+ def as_id(chunk: CogniteResourceList) -> list[JsonVal]:
117
+ return [item.as_id().dump(include_type=False) for item in chunk]
118
+
119
+ return as_id
120
+
121
+
122
+ @dataclass
123
+ class EdgeToDelete(ToDelete):
124
+ def get_process_function(
125
+ self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
126
+ ) -> Callable[[CogniteResourceList], list[JsonVal]]:
127
+ def as_id(chunk: CogniteResourceList) -> list[JsonVal]:
128
+ return [
129
+ {"space": item.space, "externalId": item.external_id, "instanceType": "edge"}
130
+ for item in cast(EdgeList, chunk)
131
+ ]
132
+
133
+ return as_id
134
+
135
+
136
+ @dataclass
137
+ class NodesToDelete(ToDelete):
138
+ delete_datapoints: bool
139
+ delete_file_content: bool
140
+
141
+ def get_process_function(
142
+ self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
143
+ ) -> Callable[[CogniteResourceList], list[JsonVal]]:
144
+ def check_for_data(chunk: CogniteResourceList) -> list[JsonVal]:
145
+ node_ids = cast(NodeList, chunk).as_ids()
146
+ found_ids: set[InstanceId] = set()
147
+ if not self.delete_datapoints:
148
+ timeseries = client.time_series.retrieve_multiple(instance_ids=node_ids, ignore_unknown_ids=True)
149
+ found_ids |= {ts.instance_id for ts in timeseries if ts.instance_id is not None}
150
+ if not self.delete_file_content:
151
+ files = client.files.retrieve_multiple(instance_ids=node_ids, ignore_unknown_ids=True)
152
+ found_ids |= {f.instance_id for f in files if f.instance_id is not None}
153
+ if found_ids and verbose:
154
+ console.print(f"Skipping {found_ids} nodes as they have datapoints or file content")
155
+ process_results.unchanged += len(found_ids)
156
+ result: list[JsonVal] = []
157
+ for node_id in (n for n in node_ids if n not in found_ids):
158
+ dumped = node_id.dump(include_instance_type=True)
159
+ # The delete endpoint expects "instanceType" instead of "type"
160
+ dumped["instanceType"] = dumped.pop("type")
161
+ # MyPy think complains about invariant here, even though dict[str, str] is a type of JsonVal
162
+ result.append(dumped) # type: ignore[arg-type]
163
+ return result
164
+
165
+ return check_for_data
166
+
167
+
88
168
  class PurgeCommand(ToolkitCommand):
89
169
  BATCH_SIZE_DM = 1000
90
170
 
@@ -99,17 +179,16 @@ class PurgeCommand(ToolkitCommand):
99
179
  auto_yes: bool = False,
100
180
  verbose: bool = False,
101
181
  ) -> DeployResults:
102
- results = DeployResults([], "purge", dry_run=dry_run)
103
-
104
182
  # Warning Messages
105
183
  if not dry_run:
106
184
  self._print_panel("space", selected_space)
185
+
107
186
  if not dry_run and not auto_yes:
108
187
  confirm = questionary.confirm(
109
188
  f"Are you really sure you want to purge the {selected_space!r} space?", default=False
110
189
  ).ask()
111
190
  if not confirm:
112
- return results
191
+ return DeployResults([], "purge", dry_run=dry_run)
113
192
 
114
193
  stats = client.data_modeling.statistics.spaces.retrieve(selected_space)
115
194
  if stats is None:
@@ -118,91 +197,123 @@ class PurgeCommand(ToolkitCommand):
118
197
  # ValidateAuth
119
198
  validator = ValidateAccess(client, "purge")
120
199
  if include_space or (stats.containers + stats.views + stats.data_models) > 0:
200
+ # We check for write even in dry-run mode. This is because dry-run is expected to fail
201
+ # if the user cannot perform the purge.
121
202
  validator.data_model(["read", "write"], spaces={selected_space})
122
203
  if (stats.nodes + stats.edges) > 0:
123
204
  validator.instances(["read", "write"], spaces={selected_space})
124
205
 
125
- config = client.config
126
- total_by_crud_cls = {
127
- EdgeCRUD: (stats.edges, config.create_api_url("/models/instances/delete")),
128
- NodeCRUD: (stats.nodes, config.create_api_url("/models/instances/delete")),
129
- DataModelCRUD: (stats.data_models, config.create_api_url("/models/datamodels/delete")),
130
- ViewCRUD: (stats.views, config.create_api_url("/models/views/delete")),
131
- ContainerCRUD: (stats.containers, config.create_api_url("/models/containers/delete")),
132
- }
133
- console = Console()
206
+ to_delete = self._create_to_delete_list_purge_space(client, delete_datapoints, delete_file_content, stats)
134
207
  if dry_run:
135
- for crud_cls, (total, _) in total_by_crud_cls.items():
136
- crud = crud_cls.create_loader(client) # type: ignore[attr-defined]
137
- results[crud.display_name] = ResourceDeployResult(crud.display_name, deleted=total)
208
+ results = DeployResults([], "purge", dry_run=True)
209
+ for item in to_delete:
210
+ results[item.display_name] = ResourceDeployResult(item.display_name, deleted=item.total)
138
211
  if include_space:
139
212
  space_loader = SpaceCRUD.create_loader(client)
140
213
  results[space_loader.display_name] = ResourceDeployResult(space_loader.display_name, deleted=1)
141
214
  else:
142
- with HTTPClient(client.config, max_retries=10) as delete_client:
143
- for crud_cls, (total, URL) in total_by_crud_cls.items():
144
- crud = crud_cls.create_loader(client) # type: ignore[attr-defined]
145
- if total == 0:
146
- results[crud.display_name] = ResourceDeployResult(crud.display_name, deleted=0)
147
- continue
148
- # Two results objects since they are updated concurrently
149
- process_results = ResourceDeployResult(crud.display_name)
150
- write_results = ResourceDeployResult(crud.display_name)
151
- # Containers, DataModels and Views need special handling to avoid type info in the delete call
152
- # While for instance we need the type info in delete.
153
- if isinstance(crud, ContainerCRUD | DataModelCRUD | ViewCRUD):
154
- dump_args = {"include_type": False}
155
- elif isinstance(crud, EdgeCRUD):
156
- dump_args = {"include_instance_type": True}
157
- else:
158
- dump_args = {}
159
- process = partial(self._as_id_batch, dump_args=dump_args)
160
- if isinstance(crud, NodeCRUD):
161
- process = partial(
162
- self._check_data,
163
- client=client,
164
- delete_datapoints=delete_datapoints,
165
- delete_file_content=delete_file_content,
166
- process_results=process_results,
167
- console=console,
168
- verbose=verbose,
169
- )
170
-
171
- executor = ProducerWorkerExecutor[CogniteResourceList, list[JsonVal]](
172
- download_iterable=self._iterate_batch(crud, selected_space, batch_size=self.BATCH_SIZE_DM),
173
- process=process,
174
- write=self._purge_batch(crud, URL, delete_client, write_results),
175
- max_queue_size=10,
176
- iteration_count=total // self.BATCH_SIZE_DM + (1 if total % self.BATCH_SIZE_DM > 0 else 0),
177
- download_description=f"Downloading {crud.display_name}",
178
- process_description=f"Preparing {crud.display_name} for deletion",
179
- write_description=f"Deleting {crud.display_name}",
180
- console=console,
181
- )
182
- executor.run()
183
- write_results += process_results
184
- results[crud.display_name] = write_results
185
-
186
- if executor.error_occurred:
187
- self.warn(
188
- HighSeverityWarning(f"Failed to delete all {crud.display_name}. {executor.error_message}")
189
- )
215
+ results = self._delete_resources(to_delete, client, verbose, selected_space, None)
190
216
  if include_space:
191
- space_loader = SpaceCRUD.create_loader(client)
192
- try:
193
- space_loader.delete([selected_space])
194
- print(f"Space {selected_space} deleted")
195
- except CogniteAPIError as e:
196
- self.warn(HighSeverityWarning(f"Failed to delete space {selected_space!r}: {e}"))
197
- else:
198
- results[space_loader.display_name] = ResourceDeployResult(space_loader.display_name, deleted=1)
217
+ self._delete_space(client, selected_space, results)
199
218
  print(results.counts_table(exclude_columns={"Created", "Changed", "Total"}))
200
219
  return results
201
220
 
221
+ def _create_to_delete_list_purge_space(
222
+ self, client: ToolkitClient, delete_datapoints: bool, delete_file_content: bool, stats: SpaceStatistics
223
+ ) -> list[ToDelete]:
224
+ config = client.config
225
+ to_delete = [
226
+ EdgeToDelete(
227
+ EdgeCRUD.create_loader(client), stats.edges, config.create_api_url("/models/instances/delete")
228
+ ),
229
+ NodesToDelete(
230
+ NodeCRUD.create_loader(client),
231
+ stats.nodes,
232
+ config.create_api_url(
233
+ "/models/instances/delete",
234
+ ),
235
+ delete_datapoints=delete_datapoints,
236
+ delete_file_content=delete_file_content,
237
+ ),
238
+ DataModelingToDelete(
239
+ DataModelCRUD.create_loader(client),
240
+ stats.data_models,
241
+ config.create_api_url("/models/datamodels/delete"),
242
+ ),
243
+ DataModelingToDelete(
244
+ ViewCRUD.create_loader(client), stats.views, config.create_api_url("/models/views/delete")
245
+ ),
246
+ DataModelingToDelete(
247
+ ContainerCRUD.create_loader(client),
248
+ stats.containers,
249
+ config.create_api_url("/models/containers/delete"),
250
+ ),
251
+ ]
252
+ return to_delete
253
+
254
+ def _delete_space(self, client: ToolkitClient, selected_space: str, results: DeployResults) -> None:
255
+ space_loader = SpaceCRUD.create_loader(client)
256
+ try:
257
+ space_loader.delete([selected_space])
258
+ print(f"Space {selected_space} deleted")
259
+ except CogniteAPIError as e:
260
+ self.warn(HighSeverityWarning(f"Failed to delete space {selected_space!r}: {e}"))
261
+ else:
262
+ results[space_loader.display_name] = ResourceDeployResult(space_loader.display_name, deleted=1)
263
+
264
+ def _delete_resources(
265
+ self,
266
+ to_delete: list[ToDelete],
267
+ client: ToolkitClient,
268
+ verbose: bool,
269
+ space: str | None,
270
+ data_set_external_id: str | None,
271
+ ) -> DeployResults:
272
+ results = DeployResults([], "purge", dry_run=False)
273
+ console = Console()
274
+ with HTTPClient(client.config, max_retries=10) as delete_client:
275
+ for item in to_delete:
276
+ if item.total == 0:
277
+ results[item.display_name] = ResourceDeployResult(item.display_name, deleted=0)
278
+ continue
279
+ # Two results objects since they are updated concurrently
280
+ process_results = ResourceDeployResult(item.display_name)
281
+ write_results = ResourceDeployResult(item.display_name)
282
+ iteration_count: int | None = None
283
+ if item.total > 0:
284
+ iteration_count = item.total // self.BATCH_SIZE_DM + (
285
+ 1 if item.total % self.BATCH_SIZE_DM > 0 else 0
286
+ )
287
+ executor = ProducerWorkerExecutor[CogniteResourceList, list[JsonVal]](
288
+ download_iterable=self._iterate_batch(
289
+ item.crud, space, data_set_external_id, batch_size=self.BATCH_SIZE_DM
290
+ ),
291
+ process=item.get_process_function(client, console, verbose, process_results),
292
+ write=self._purge_batch(item.crud, item.delete_url, delete_client, write_results),
293
+ max_queue_size=10,
294
+ iteration_count=iteration_count,
295
+ download_description=f"Downloading {item.display_name}",
296
+ process_description=f"Preparing {item.display_name} for deletion",
297
+ write_description=f"Deleting {item.display_name}",
298
+ console=console,
299
+ )
300
+ executor.run()
301
+ write_results += process_results
302
+ results[item.display_name] = write_results
303
+ if executor.error_occurred:
304
+ if verbose and executor.error_traceback:
305
+ executor.print_traceback()
306
+ self.warn(
307
+ HighSeverityWarning(f"Failed to delete all {item.display_name}. {executor.error_message}")
308
+ )
309
+ return results
310
+
202
311
  @staticmethod
203
- def _iterate_batch(crud: ResourceCRUD, selected_space: str, batch_size: int) -> Iterable[CogniteResourceList]:
312
+ def _iterate_batch(
313
+ crud: ResourceCRUD, selected_space: str | None, data_set_external_id: str | None, batch_size: int
314
+ ) -> Iterable[CogniteResourceList]:
204
315
  batch = crud.list_cls([])
205
- for resource in crud.iterate(space=selected_space):
316
+ for resource in crud.iterate(space=selected_space, data_set_external_id=data_set_external_id):
206
317
  batch.append(resource)
207
318
  if len(batch) >= batch_size:
208
319
  yield batch
@@ -210,41 +321,6 @@ class PurgeCommand(ToolkitCommand):
210
321
  if batch:
211
322
  yield batch
212
323
 
213
- @staticmethod
214
- def _as_id_batch(chunk: CogniteResourceList, dump_args: dict[str, Any]) -> list[JsonVal]:
215
- return [item.as_id().dump(**dump_args) for item in chunk]
216
-
217
- @staticmethod
218
- def _check_data(
219
- chunk: NodeList,
220
- client: ToolkitClient,
221
- delete_datapoints: bool,
222
- delete_file_content: bool,
223
- process_results: ResourceDeployResult,
224
- console: Console,
225
- verbose: bool,
226
- ) -> list[JsonVal]:
227
- """Check if the node has timeseries or files and delete the data if requested."""
228
- node_ids = chunk.as_ids()
229
- found_ids: set[InstanceId] = set()
230
- if not delete_datapoints:
231
- timeseries = client.time_series.retrieve_multiple(instance_ids=node_ids, ignore_unknown_ids=True)
232
- found_ids |= {ts.instance_id for ts in timeseries if ts.instance_id is not None}
233
- if not delete_file_content:
234
- files = client.files.retrieve_multiple(instance_ids=node_ids, ignore_unknown_ids=True)
235
- found_ids |= {f.instance_id for f in files if f.instance_id is not None}
236
- if found_ids and verbose:
237
- console.print(f"Skipping {found_ids} nodes as they have datapoints or file content")
238
- process_results.unchanged += len(found_ids)
239
- result: list[JsonVal] = []
240
- for node_id in (n for n in node_ids if n not in found_ids):
241
- dumped = node_id.dump(include_instance_type=True)
242
- # The delete endpoint expects "instanceType" instead of "type"
243
- dumped["instanceType"] = dumped.pop("type")
244
- # MyPy think complains about invariant here, even though dict[str, str] is a type of JsonVal
245
- result.append(dumped) # type: ignore[arg-type]
246
- return result
247
-
248
324
  @staticmethod
249
325
  def _purge_batch(
250
326
  crud: ResourceCRUD, delete_url: str, delete_client: HTTPClient, result: ResourceDeployResult
@@ -275,24 +351,71 @@ class PurgeCommand(ToolkitCommand):
275
351
  if loader_cls in dep_cls.dependencies and (exclude is None or dep_cls not in exclude)
276
352
  }
277
353
 
278
- @staticmethod
279
- def _get_selected_space(space: str | None, client: ToolkitClient) -> str:
280
- if space is None:
281
- spaces = client.data_modeling.spaces.list(limit=-1, include_global=False)
282
- selected_space = questionary.select(
283
- "Which space do you want to purge"
284
- " (including all data models, views, containers, nodes and edges within that space)?",
285
- sorted([space.space for space in spaces]),
354
+ def dataset_v2(
355
+ self,
356
+ client: ToolkitClient,
357
+ selected_data_set_external_id: str,
358
+ include_dataset: bool = False,
359
+ include_data: bool = True,
360
+ include_configurations: bool = False,
361
+ dry_run: bool = False,
362
+ auto_yes: bool = False,
363
+ verbose: bool = False,
364
+ ) -> DeployResults:
365
+ # Warning Messages
366
+ if not dry_run:
367
+ self._print_panel("dataSet", selected_data_set_external_id)
368
+ if not dry_run and not auto_yes:
369
+ confirm = questionary.confirm(
370
+ f"Are you really sure you want to purge the {selected_data_set_external_id!r} dataSet?", default=False
286
371
  ).ask()
372
+ if not confirm:
373
+ return DeployResults([], "purge", dry_run=dry_run)
374
+
375
+ # Validate Auth
376
+ validator = ValidateAccess(client, "purge")
377
+ data_set_id = client.lookup.data_sets.id(selected_data_set_external_id)
378
+ action = cast(Sequence[Literal["read", "write"]], ["read"] if dry_run else ["read", "write"])
379
+ if include_data:
380
+ # Check asset, events, time series, files, and sequences access, relationships, labels, 3D access.
381
+ validator.dataset_data(action, dataset_ids={data_set_id})
382
+ if include_configurations:
383
+ # Check workflow, transformations, extraction pipeline access
384
+ validator.dataset_configurations(action, dataset_ids={data_set_id})
385
+
386
+ to_delete: list[ToDelete] = self._create_to_delete_list_purge_dataset(
387
+ client,
388
+ include_data,
389
+ include_configurations,
390
+ )
391
+ if dry_run:
392
+ results = DeployResults([], "purge", dry_run=True)
393
+ for item in to_delete:
394
+ results[item.display_name] = ResourceDeployResult(item.display_name, deleted=item.total)
287
395
  else:
288
- retrieved = client.data_modeling.spaces.retrieve(space)
289
- if retrieved is None:
290
- raise ToolkitMissingResourceError(f"Space {space} does not exist")
291
- selected_space = space
396
+ results = self._delete_resources(to_delete, client, verbose, None, selected_data_set_external_id)
397
+ print(results.counts_table(exclude_columns={"Created", "Changed", "Total"}))
398
+ if include_dataset and not dry_run:
399
+ self._archive_dataset(client, selected_data_set_external_id)
400
+ return results
292
401
 
293
- if selected_space is None:
294
- raise ToolkitValueError("No space selected")
295
- return selected_space
402
+ def _archive_dataset(self, client: ToolkitClient, data_set: str) -> None:
403
+ archived = (
404
+ DataSetUpdate(external_id=data_set)
405
+ .external_id.set(str(uuid.uuid4()))
406
+ .metadata.add({"archived": "true"})
407
+ .write_protected.set(True)
408
+ )
409
+ client.data_sets.update(archived)
410
+ print(f"DataSet {data_set} archived")
411
+
412
+ def _create_to_delete_list_purge_dataset(
413
+ self,
414
+ client: ToolkitClient,
415
+ include_data: bool,
416
+ include_configurations: bool,
417
+ ) -> list[ToDelete]:
418
+ raise NotImplementedError()
296
419
 
297
420
  def dataset(
298
421
  self,
@@ -440,9 +440,11 @@ class AuthCommand(ToolkitCommand):
440
440
  loaders_by_capability_tuple: dict[tuple, list[str]] = defaultdict(list)
441
441
  capability_by_id: dict[frozenset[tuple], Capability] = {}
442
442
  project_type = client.project.status().this_project.data_modeling_status
443
- for loader_cls in cruds.RESOURCE_CRUD_LIST:
444
- loader = loader_cls.create_loader(client)
445
- capability = loader_cls.get_required_capability(None, read_only=False)
443
+ for crud_cls in cruds.RESOURCE_CRUD_LIST:
444
+ crud = crud_cls.create_loader(client)
445
+ if crud.prerequisite_warning() is not None:
446
+ continue
447
+ capability = crud_cls.get_required_capability(None, read_only=False)
446
448
  capabilities = capability if isinstance(capability, list) else [capability]
447
449
  for cap in capabilities:
448
450
  if project_type == "DATA_MODELING_ONLY" and isinstance(cap, AssetsAcl | RelationshipsAcl):
@@ -451,7 +453,7 @@ class AuthCommand(ToolkitCommand):
451
453
  if id_ not in capability_by_id:
452
454
  capability_by_id[id_] = cap
453
455
  for cap_tuple in cap.as_tuples():
454
- loaders_by_capability_tuple[cap_tuple].append(loader.display_name)
456
+ loaders_by_capability_tuple[cap_tuple].append(crud.display_name)
455
457
  return list(capability_by_id.values()), loaders_by_capability_tuple
456
458
 
457
459
  def check_has_any_access(self, client: ToolkitClient) -> TokenInspection:
@@ -257,6 +257,15 @@ class ResourceCRUD(
257
257
  ) -> Iterable[T_WritableCogniteResource]:
258
258
  raise NotImplementedError
259
259
 
260
+ ### These methods can be optionally overwritten in the subclass ###
261
+ def prerequisite_warning(self) -> str | None:
262
+ """ "Returns a warning message if there are any prerequisites that must be met before using this CRUD.
263
+
264
+ This is used for special resource CRUDs that for example require data models/views to be deployed in CDF
265
+ to work. For example, the InfieldV1CRUD and the ResourceViewMappingCRUD.
266
+ """
267
+ return None
268
+
260
269
  @classmethod
261
270
  def get_dependent_items(cls, item: dict) -> "Iterable[tuple[type[ResourceCRUD], Hashable]]":
262
271
  """Returns all items that this item requires.
@@ -75,6 +75,15 @@ class InfieldV1CRUD(ResourceCRUD[str, APMConfigWrite, APMConfig, APMConfigWriteL
75
75
 
76
76
  return DataModelInstancesAcl(actions, DataModelInstancesAcl.Scope.SpaceID([APMConfig.space]))
77
77
 
78
+ def prerequisite_warning(self) -> str | None:
79
+ views = self.client.data_modeling.views.retrieve(APMConfig.view_id)
80
+ if len(views) > 0:
81
+ return None
82
+ return (
83
+ f"{self.display_name} requires the {APMConfig.view_id!r} to be deployed. "
84
+ f"Install the infield options with cdf modules init/add to deploy it."
85
+ )
86
+
78
87
  def create(self, items: APMConfigWriteList) -> NodeApplyResultList:
79
88
  result = self.client.data_modeling.instances.apply(
80
89
  nodes=items.as_nodes(), auto_create_direct_relations=True, replace=False
@@ -67,6 +67,13 @@ class ResourceViewMappingCRUD(
67
67
  actions=actions, scope=capabilities.DataModelInstancesAcl.Scope.SpaceID([COGNITE_MIGRATION_SPACE])
68
68
  )
69
69
 
70
+ def prerequisite_warning(self) -> str | None:
71
+ view_id = ResourceViewMapping.get_source()
72
+ views = self.client.data_modeling.views.retrieve(view_id)
73
+ if len(views) > 0:
74
+ return None
75
+ return f"{self.display_name} requires the {view_id!r} to be deployed. run `cdf migrate prepare` to deploy it."
76
+
70
77
  def create(self, items: NodeApplyList) -> Sized:
71
78
  return self.client.migration.resource_view_mapping.upsert(items)
72
79
 
@@ -193,18 +193,23 @@ class ProducerWorkerExecutor(Generic[T_Download, T_Processed]):
193
193
  """Raises an exception if an error occurred during execution."""
194
194
  if self._error_event.is_set():
195
195
  if self.verbose and self.error_traceback:
196
- self.console.print(
197
- Panel(
198
- self.error_traceback,
199
- title="Traceback",
200
- expand=False,
201
- border_style="red",
202
- )
203
- )
196
+ self.print_traceback()
204
197
  raise ToolkitRuntimeError(f"An error occurred during execution: {self.error_message}")
205
198
  if self._stop_event.is_set():
206
199
  raise ToolkitRuntimeError("Execution was stopped by the user.")
207
200
 
201
+ def print_traceback(self) -> None:
202
+ """Prints the traceback if an error occurred during execution."""
203
+ if self.error_traceback:
204
+ self.console.print(
205
+ Panel(
206
+ self.error_traceback,
207
+ title="Traceback",
208
+ expand=False,
209
+ border_style="red",
210
+ )
211
+ )
212
+
208
213
  def _download_worker(self, progress: Progress, download_task: TaskID) -> None:
209
214
  """Worker thread for downloading data."""
210
215
  try:
@@ -98,6 +98,61 @@ class ValidateAccess:
98
98
  f"Unexpected data model instance scope type: {type(instance_scope)}. Expected SpaceID or All."
99
99
  )
100
100
 
101
+ def dataset_data(
102
+ self,
103
+ action: Sequence[Literal["read", "write"]],
104
+ dataset_ids: set[int] | None = None,
105
+ operation: str | None = None,
106
+ ) -> list[int] | None:
107
+ """Validate access to dataset data.
108
+
109
+ Dataset data resources are:
110
+ - Assets
111
+ - Events
112
+ - Time series
113
+ - Files
114
+ - Sequences
115
+ - Relationships
116
+ - Labels
117
+ - 3D models
118
+
119
+ Args:
120
+ action (Sequence[Literal["read", "write"]]): The actions to validate access for
121
+ dataset_ids (Set[int] | None): The dataset IDs to check access for. If None, checks access for all datasets.
122
+ operation (str | None): The operation being performed, used for error messages.
123
+ Returns:
124
+ list[int] | None: Returns a list of dataset IDs if access is limited to these datasets, or None if access is granted to all datasets.
125
+ Raises:
126
+ ValueError: If the client.token.get_scope() returns an unexpected dataset data scope type.
127
+ AuthorizationError: If the user does not have permission to perform the specified action on the given dataset.
128
+ """
129
+ raise NotImplementedError()
130
+
131
+ def dataset_configurations(
132
+ self,
133
+ action: Sequence[Literal["read", "write"]],
134
+ dataset_ids: set[int] | None = None,
135
+ operation: str | None = None,
136
+ ) -> list[int] | None:
137
+ """Validate access configuration resources.
138
+
139
+ Configuration resources are:
140
+ - Transformations
141
+ - Workflows
142
+ - Extraction pipelines
143
+
144
+ Args:
145
+ action (Sequence[Literal["read", "write"]]): The actions to validate access for
146
+ dataset_ids (Set[int] | None): The dataset IDs to check access for. If None, checks access for all datasets.
147
+ operation (str | None): The operation being performed, used for error messages.
148
+ Returns:
149
+ list[int] | None: Returns a list of dataset IDs if access is limited to these datasets, or None if access is granted to all datasets.
150
+ Raises:
151
+ ValueError: If the client.token.get_scope() returns an unexpected dataset configuration scope type.
152
+ AuthorizationError: If the user does not have permission to perform the specified action on the given dataset.
153
+ """
154
+ raise NotImplementedError()
155
+
101
156
  def timeseries(
102
157
  self,
103
158
  action: Sequence[Literal["read", "write"]],
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.6.81
15
+ image: cognite/toolkit:0.6.82
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.6.81
13
+ image: cognite/toolkit:0.6.82
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.6.81"
7
+ version = "0.6.82"
8
8
 
9
9
 
10
10
  [plugins]
@@ -1 +1 @@
1
- __version__ = "0.6.81"
1
+ __version__ = "0.6.82"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.6.81
3
+ Version: 0.6.82
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
@@ -1,9 +1,9 @@
1
1
  cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cognite_toolkit/_cdf.py,sha256=1OSAvbOeuIrnsczEG2BtGqRP3L3sq0VMPthmugnqCUw,5821
3
- cognite_toolkit/_version.py,sha256=DPAGF065ghV7UhfpWAzKRrfLdniUZxLxOK89394u27s,23
3
+ cognite_toolkit/_version.py,sha256=58YuyHOJzttluV6y6hTf_TMuZ4H160Our5ZqrXkH-98,23
4
4
  cognite_toolkit/_builtin_modules/README.md,sha256=roU3G05E6ogP5yhw4hdIvVDKV831zCh2pzt9BVddtBg,307
5
5
  cognite_toolkit/_builtin_modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- cognite_toolkit/_builtin_modules/cdf.toml,sha256=wsSYJREZmIIx3ONuIlfeb1gn4m_1f3rJGHxvtcGjcAI,273
6
+ cognite_toolkit/_builtin_modules/cdf.toml,sha256=HT23ji8yaWNQ_2s0H_i3E9BZ4Pfk9Kgeiip7Sole8C4,273
7
7
  cognite_toolkit/_builtin_modules/packages.toml,sha256=-z_dCOcZwhW86GV6SCB__XzKSQfZcAupGO7JoNB0TZQ,2735
8
8
  cognite_toolkit/_builtin_modules/bootcamp/README.md,sha256=iTVqoy3PLpC-xPi5pbuMIAEHILBSfWTGLexwa1AltpY,211
9
9
  cognite_toolkit/_builtin_modules/bootcamp/default.config.yaml,sha256=cBKReVJt2ZqFf5nBJl6mod_yo8iqSpXqh_7MQxi692U,94
@@ -574,11 +574,11 @@ cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=DIwuiRpDhWBDpsW3R3yqj0eWLAE3
574
574
  cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=TK6U_rm6VZT_V941kTyHMoulWgJzbDC8YIIQDPJ5x3w,1011
575
575
  cognite_toolkit/_cdf_tk/commands/_download.py,sha256=S5WuOAgbZTgU5kXyjqgTzm7e0hN9FI6IYOeQ9U-44Ow,6718
576
576
  cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=_4iX3AHAI6eLmRVUlWXCSvVHx1BZW2yDr_i2i9ECg6U,43120
577
- cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=yNkcjMcDHlHkTUyj-YUQuK-7Yc5jDsOUqvEKVy1PyMs,37167
577
+ cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=aZ9c7iVrKlvgX62ZbYRKlRqx5eThNu6lheaUIFQ_FOo,41281
578
578
  cognite_toolkit/_cdf_tk/commands/_upload.py,sha256=kXYmP1YMg-JvsuN1iYaMuZH1qZfapya2j-RABGhqGHo,11860
579
579
  cognite_toolkit/_cdf_tk/commands/_utils.py,sha256=ARlbqA_5ZWlgN3-xF-zanzSx4B0-9ULnguA5QgHmKGA,1225
580
580
  cognite_toolkit/_cdf_tk/commands/_virtual_env.py,sha256=GFAid4hplixmj9_HkcXqU5yCLj-fTXm4cloGD6U2swY,2180
581
- cognite_toolkit/_cdf_tk/commands/auth.py,sha256=t1f1acFDokmFd6FPtVxghdgHa8xmj7mmbfne5Mx7goU,31567
581
+ cognite_toolkit/_cdf_tk/commands/auth.py,sha256=N6JgtF0_Qoh-xM8VlBb_IK1n0Lo5I7bIkIHmXm1l7ug,31638
582
582
  cognite_toolkit/_cdf_tk/commands/build_cmd.py,sha256=wamXGF6Sa6G8LSmfm4uJtlxsFL5fs0fihmCKe-Zj7TI,30685
583
583
  cognite_toolkit/_cdf_tk/commands/clean.py,sha256=2VWZKp_AZ49AaUCCpvc1kezFA_je6y--zjglccqxsjQ,14346
584
584
  cognite_toolkit/_cdf_tk/commands/collect.py,sha256=zBMKhhvjOpuASMnwP0eeHRI02tANcvFEZgv0CQO1ECc,627
@@ -603,10 +603,10 @@ cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=i1eUsNX6Dueol9STI
603
603
  cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py,sha256=KkSq_4R6hQ15ccG-jHy7vVgPwC5IDd5OaXZLvz5mIZs,5547
604
604
  cognite_toolkit/_cdf_tk/commands/_migrate/issues.py,sha256=lWSnuS3CfRDbA7i1g12gJ2reJnQcLmZWxHDK19-Wxkk,5772
605
605
  cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py,sha256=J0Cb_TI_G5UoJIxpKGlCOLwjZ5VeDU93G_QP1-84lqU,9878
606
- cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=sTxeEgIp2xNJXW-RoOhtdAXlww5Wxe94GyzZG1RyYW4,2483
606
+ cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=RfqaNoso5CyBwc-p6ckwcYqBfZXKhdJgdGIyd0TATaI,2635
607
607
  cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py,sha256=CYle1Gz69HHnKF4onTIFxrpiOoDApvVK01SFuQuHzP0,2130
608
608
  cognite_toolkit/_cdf_tk/cruds/__init__.py,sha256=j0yxDCwd4Cl9KG7SvGTDQg4Y2bHfYVEDv8CBxbFTWUM,6070
609
- cognite_toolkit/_cdf_tk/cruds/_base_cruds.py,sha256=L7JpH1e3XI7ZgH52zanCHVkO73r8zbe2sDLP83cIkY0,19651
609
+ cognite_toolkit/_cdf_tk/cruds/_base_cruds.py,sha256=PxiteITG04_T9TDkuqaw-aaW9HVLDSkXBolWDe0Guuk,20112
610
610
  cognite_toolkit/_cdf_tk/cruds/_data_cruds.py,sha256=3FYKow5uOBWt2-6kzszRJE8YgqxJQlp6WrvFsgwxyD4,9092
611
611
  cognite_toolkit/_cdf_tk/cruds/_worker.py,sha256=-jbl4JV18Ow3y8BcecYPx8XxDIP897A80yD7frwuilc,9369
612
612
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/__init__.py,sha256=IE6WxGh9AthWxwHnDmj8EJDd7q6W2OGf1vMnr_lrKAY,2769
@@ -617,14 +617,14 @@ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py,sha256=KrL7bj8q5q
617
617
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py,sha256=iXn9iAtwA8mhH-7j9GF-MlLomTcaw3GhEbFY28Wx0iA,9927
618
618
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py,sha256=KED-wNXTZbkrK4bNMsJPnP1d0j8hF5apDUPgaKj52TY,57130
619
619
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py,sha256=G27TZF1aTN3aruJ1HTfpvhI4fZyHkn-uD6NJpKgkSow,18605
620
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py,sha256=U5dbuHBAyyumq9sDqJVihEe_DY_dGWAiTlaWxHrkZRw,11263
620
+ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py,sha256=Gtcu-i-nejPCN0Uxn8O5_QakdX2wgDcVCJn1X7AMu-I,11638
621
621
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py,sha256=F3n2FOWAPder4z3OTYs81VB-6C6r3oUzJsHvigdhaD0,15500
622
622
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py,sha256=JSxkDw9yqdv_jGS_tS_BuqkpSvBSFTkirxp6CMX6TRc,28777
623
623
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/group_scoped.py,sha256=WEg8-CxMP64WfE_XXIlH114zM51K0uLaYa4atd992zI,1690
624
624
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/hosted_extractors.py,sha256=7y2ffuLjUAafGIXfZqqRkXopQKemmFr_IPi_lD4k-fo,15434
625
625
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/industrial_tool.py,sha256=x_hHlFZ1AURmixRKltWQ680JgrEr6CswMFyaG3N-fnk,8011
626
626
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/location.py,sha256=NWbL4JopbHWV-ydnTxYiFqULnyxd6mwnT0Hs4q441Gw,12522
627
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py,sha256=vTRw0a2WR6OnHuBWdoYts6srd3CGTTo3l3HqdbDY4cw,4406
627
+ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py,sha256=BPjlYYwU31DSKe0cs24hhYv4UCTHJ87mDSfmQDI_S2o,4744
628
628
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/raw.py,sha256=8Ojq7MpatpsiYRD0IzlljMFHTbNP3Iwv_OToxggNcNQ,12341
629
629
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/relationship.py,sha256=eXfMaUL7AIpvTBxTPgDn23oMLtrvNdYjrKWLGmuuzQc,6427
630
630
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/robotics.py,sha256=SgSFWxcNt0r5Bs-mn8Fm16RQVYMS1skOSDFkqrf1uBE,17489
@@ -736,7 +736,7 @@ cognite_toolkit/_cdf_tk/utils/graphql_parser.py,sha256=2i2wDjg_Uw3hJ-pHtPK8hczIu
736
736
  cognite_toolkit/_cdf_tk/utils/hashing.py,sha256=3NyNfljyYNTqAyAFBd6XlyWaj43jRzENxIuPdOY6nqo,2116
737
737
  cognite_toolkit/_cdf_tk/utils/interactive_select.py,sha256=veV93_O-gATbQ1PfRbZq0VotTgaXA4JcU34j_nLKpSU,36155
738
738
  cognite_toolkit/_cdf_tk/utils/modules.py,sha256=8VqyqaNyF-xJV5qSsVGT_9eb1TBUWlqloQPSKcLU9ec,6099
739
- cognite_toolkit/_cdf_tk/utils/producer_worker.py,sha256=L9i6l4SF0VMoMLiT3qJmyiRQnXTYQi3yRhG1zCNO9iM,13999
739
+ cognite_toolkit/_cdf_tk/utils/producer_worker.py,sha256=n01Tr8ml8dHffjX45Vdqmzt2DFGZ0IrcgFembo8y9yM,14153
740
740
  cognite_toolkit/_cdf_tk/utils/progress_tracker.py,sha256=LGpC22iSTTlo6FWi38kqBu_E4XouTvZU_N953WAzZWA,3865
741
741
  cognite_toolkit/_cdf_tk/utils/repository.py,sha256=voQLZ6NiNvdAFxqeWHbvzDLsLHl6spjQBihiLyCsGW8,4104
742
742
  cognite_toolkit/_cdf_tk/utils/sentry_utils.py,sha256=Q3ekrR0bWMtlPVQrfUSsETlkLIaDUZ2u-RdNFFr9-dg,564
@@ -745,7 +745,7 @@ cognite_toolkit/_cdf_tk/utils/table_writers.py,sha256=Rxp_CZDDWrNPERNq6u1xsAX1Ov
745
745
  cognite_toolkit/_cdf_tk/utils/text.py,sha256=EpIXjaQ5C5q5fjbUjAW7tncXpdJfiQeV7CYSbr70Bl0,3106
746
746
  cognite_toolkit/_cdf_tk/utils/thread_safe_dict.py,sha256=NbRHcZvWpF9xHP5OkOMGFpxrPNbi0Q3Eea6PUNbGlt4,3426
747
747
  cognite_toolkit/_cdf_tk/utils/useful_types.py,sha256=tPZOcK1PR1hNogMCgF863APMK6p3528t5kKaKbVl0-s,1352
748
- cognite_toolkit/_cdf_tk/utils/validate_access.py,sha256=uB_EdlrMDteOgpZDzcSutOjwp3u9jQ91Xl7uuqrBRh0,12427
748
+ cognite_toolkit/_cdf_tk/utils/validate_access.py,sha256=-0NVHg_dlLJ4GC1mhSagthU2BKGbCC2MGb_aX1j6uVg,14791
749
749
  cognite_toolkit/_cdf_tk/utils/fileio/__init__.py,sha256=_rZp6E2HaqixzPC57XQGaSm6xm1pFNXNJ4hBAnvGx1c,1137
750
750
  cognite_toolkit/_cdf_tk/utils/fileio/_base.py,sha256=MpWaD3lR9vrJ-kGzTiDOtChXhvFD7-xrP-Pzp7vjnLY,756
751
751
  cognite_toolkit/_cdf_tk/utils/fileio/_compression.py,sha256=8BAPgg5OKc3vkEEkqOvYsuyh12iXVNuEmC0omWwyJNQ,2355
@@ -761,13 +761,13 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
761
761
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
762
762
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
763
763
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
764
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=DqcjDhaYjY7CohwYQX-AQK1-lvxRHQWnGfZadqsMiKo,667
765
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=ljKhcf0mmrCoe589XPGr7QsdiM9xOllpLuOQaIVYmJM,2430
766
- cognite_toolkit/_resources/cdf.toml,sha256=wsSYJREZmIIx3ONuIlfeb1gn4m_1f3rJGHxvtcGjcAI,273
764
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=CAS099vzH_2mIGXVoKxLWhKv4QBUL2f5m_b24uGsLcY,667
765
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=COqGUUVpBIuUs0C19lT3kjjGkmtMDRXmXhdEpFV65rg,2430
766
+ cognite_toolkit/_resources/cdf.toml,sha256=HT23ji8yaWNQ_2s0H_i3E9BZ4Pfk9Kgeiip7Sole8C4,273
767
767
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
768
768
  cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
769
- cognite_toolkit-0.6.81.dist-info/METADATA,sha256=-TkhaqYPoFGPkI9w2Ht4qn-Z23x7MpT2J689Y80FW8A,4501
770
- cognite_toolkit-0.6.81.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
771
- cognite_toolkit-0.6.81.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
772
- cognite_toolkit-0.6.81.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
773
- cognite_toolkit-0.6.81.dist-info/RECORD,,
769
+ cognite_toolkit-0.6.82.dist-info/METADATA,sha256=fDkToQr4Av5sjXWQp1gxY5Phck6QjM82-kDMmTR-lo0,4501
770
+ cognite_toolkit-0.6.82.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
771
+ cognite_toolkit-0.6.82.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
772
+ cognite_toolkit-0.6.82.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
773
+ cognite_toolkit-0.6.82.dist-info/RECORD,,