cognite-toolkit 0.7.20__py3-none-any.whl → 0.7.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -437,4 +437,5 @@ class InFieldCDMLocationConfigCRUD(ResourceCRUD[NodeIdentifier, InFieldCDMLocati
437
437
  elif len(json_path) == 4 and json_path[:2] == ("dataExplorationConfig", "filters") and json_path[3] == "values":
438
438
  # Handles dataExplorationConfig.filters[i].values
439
439
  return diff_list_hashable(local, cdf)
440
+
440
441
  return super().diff_list(local, cdf, json_path)
@@ -1,4 +1,5 @@
1
1
  from collections.abc import Iterable, Sequence
2
+ from typing import Any
2
3
 
3
4
  from cognite_toolkit._cdf_tk.client.data_classes.canvas import (
4
5
  IndustrialCanvas,
@@ -6,7 +7,7 @@ from cognite_toolkit._cdf_tk.client.data_classes.canvas import (
6
7
  )
7
8
  from cognite_toolkit._cdf_tk.client.data_classes.charts import Chart, ChartList, ChartWrite
8
9
  from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
9
- from cognite_toolkit._cdf_tk.tk_warnings import MediumSeverityWarning
10
+ from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning, MediumSeverityWarning
10
11
  from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
11
12
  from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, SimpleBodyRequest
12
13
  from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
@@ -218,17 +219,22 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
218
219
  references = dumped.get("containerReferences", [])
219
220
  if not isinstance(references, list):
220
221
  return dumped
222
+ new_container_references: list[Any] = []
221
223
  for container_ref in references:
222
224
  if not isinstance(container_ref, dict):
225
+ new_container_references.append(container_ref)
223
226
  continue
224
227
  sources = container_ref.get("sources", [])
225
228
  if not isinstance(sources, list) or len(sources) == 0:
229
+ new_container_references.append(container_ref)
226
230
  continue
227
231
  source = sources[0]
228
232
  if not isinstance(source, dict) or "properties" not in source:
233
+ new_container_references.append(container_ref)
229
234
  continue
230
235
  properties = source["properties"]
231
236
  if not isinstance(properties, dict):
237
+ new_container_references.append(container_ref)
232
238
  continue
233
239
  reference_type = properties.get("containerReferenceType")
234
240
  if (
@@ -238,9 +244,13 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
238
244
  "dataGrid",
239
245
  }
240
246
  ): # These container reference types are special cases with a resourceId statically set to -1, which is why we skip them
247
+ new_container_references.append(container_ref)
241
248
  continue
242
249
  resource_id = properties.pop("resourceId", None)
243
250
  if not isinstance(resource_id, int):
251
+ HighSeverityWarning(
252
+ f"Invalid resourceId {resource_id!r} in Canvas {canvas.canvas.name}. Skipping."
253
+ ).print_warning(console=self.client.console)
244
254
  continue
245
255
  if reference_type == "asset":
246
256
  external_id = self.client.lookup.assets.external_id(resource_id)
@@ -251,9 +261,16 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
251
261
  elif reference_type == "file":
252
262
  external_id = self.client.lookup.files.external_id(resource_id)
253
263
  else:
264
+ new_container_references.append(container_ref)
254
265
  continue
255
- if external_id is not None:
256
- properties["resourceExternalId"] = external_id
266
+ if external_id is None:
267
+ HighSeverityWarning(
268
+ f"Failed to look-up {reference_type} external ID for resource ID {resource_id!r}. Skipping resource in Canvas {canvas.canvas.name}"
269
+ ).print_warning(console=self.client.console)
270
+ continue
271
+ properties["resourceExternalId"] = external_id
272
+ new_container_references.append(container_ref)
273
+ dumped["containerReferences"] = new_container_references
257
274
  return dumped
258
275
 
259
276
  def json_chunk_to_data(
@@ -312,23 +329,30 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
312
329
  return self._load_resource(item_json)
313
330
 
314
331
  def _load_resource(self, item_json: dict[str, JsonVal]) -> IndustrialCanvasApply:
332
+ name = self._get_name(item_json)
315
333
  references = item_json.get("containerReferences", [])
316
334
  if not isinstance(references, list):
317
335
  return IndustrialCanvasApply._load(item_json)
336
+ new_container_references: list[Any] = []
318
337
  for container_ref in references:
319
338
  if not isinstance(container_ref, dict):
339
+ new_container_references.append(container_ref)
320
340
  continue
321
341
  sources = container_ref.get("sources", [])
322
342
  if not isinstance(sources, list) or len(sources) == 0:
343
+ new_container_references.append(container_ref)
323
344
  continue
324
345
  source = sources[0]
325
346
  if not isinstance(source, dict) or "properties" not in source:
347
+ new_container_references.append(container_ref)
326
348
  continue
327
349
  properties = source["properties"]
328
350
  if not isinstance(properties, dict):
351
+ new_container_references.append(container_ref)
329
352
  continue
330
353
  resource_external_id = properties.pop("resourceExternalId", None)
331
354
  if not isinstance(resource_external_id, str):
355
+ new_container_references.append(container_ref)
332
356
  continue
333
357
  reference_type = properties.get("containerReferenceType")
334
358
  if reference_type == "asset":
@@ -340,8 +364,24 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
340
364
  elif reference_type == "file":
341
365
  resource_id = self.client.lookup.files.id(resource_external_id)
342
366
  else:
367
+ new_container_references.append(container_ref)
343
368
  continue
344
- if resource_id is not None:
345
- properties["resourceId"] = resource_id
346
-
347
- return IndustrialCanvasApply._load(item_json)
369
+ if resource_id is None:
370
+ # Failed look-up, skip the resourceId setting
371
+ HighSeverityWarning(
372
+ f"Failed to look-up {reference_type} ID for external ID {resource_external_id!r}. Skipping resource in Canvas {name}"
373
+ ).print_warning(console=self.client.console)
374
+ continue
375
+ properties["resourceId"] = resource_id
376
+ new_container_references.append(container_ref)
377
+ new_item = dict(item_json)
378
+ new_item["containerReferences"] = new_container_references
379
+
380
+ return IndustrialCanvasApply._load(new_item)
381
+
382
+ @classmethod
383
+ def _get_name(cls, item_json: dict[str, JsonVal]) -> str:
384
+ try:
385
+ return item_json["canvas"]["sources"][0]["properties"]["name"] # type: ignore[index,return-value, call-overload]
386
+ except (KeyError, IndexError, TypeError):
387
+ return "<unknown>"
@@ -12,7 +12,9 @@ from cognite.client.data_classes.data_modeling import NodeId, ViewId
12
12
  from cognite_toolkit._cdf_tk.client import ToolkitClient
13
13
  from cognite_toolkit._cdf_tk.cruds import FileMetadataCRUD
14
14
  from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
15
- from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
15
+ from cognite_toolkit._cdf_tk.protocols import ResourceResponseProtocol
16
+ from cognite_toolkit._cdf_tk.utils import sanitize_filename
17
+ from cognite_toolkit._cdf_tk.utils.collection import chunker, chunker_sequence
16
18
  from cognite_toolkit._cdf_tk.utils.fileio import MultiFileReader
17
19
  from cognite_toolkit._cdf_tk.utils.http_client import (
18
20
  DataBodyRequest,
@@ -35,6 +37,7 @@ from .selectors._file_content import (
35
37
  FileIdentifier,
36
38
  FileInstanceID,
37
39
  FileInternalID,
40
+ FileTemplateSelector,
38
41
  )
39
42
  from .selectors._file_content import NodeId as SelectorNodeId
40
43
 
@@ -47,7 +50,16 @@ class UploadFileContentItem(UploadItem[FileMetadataWrite]):
47
50
  mime_type: str
48
51
 
49
52
 
50
- class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileMetadataWrite]):
53
+ @dataclass
54
+ class MetadataWithFilePath(ResourceResponseProtocol):
55
+ metadata: FileMetadata
56
+ file_path: Path
57
+
58
+ def as_write(self) -> FileMetadataWrite:
59
+ return self.metadata.as_write()
60
+
61
+
62
+ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePath, FileMetadataWrite]):
51
63
  SUPPORTED_DOWNLOAD_FORMATS = frozenset({".ndjson"})
52
64
  SUPPORTED_COMPRESSIONS = frozenset({".gz"})
53
65
  CHUNK_SIZE = 10
@@ -61,10 +73,12 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
61
73
  self._crud = FileMetadataCRUD(client, None, None)
62
74
  self._target_dir = target_dir
63
75
 
64
- def as_id(self, item: FileMetadata) -> str:
65
- return item.external_id or str(item.id)
76
+ def as_id(self, item: MetadataWithFilePath) -> str:
77
+ return item.metadata.external_id or str(item.metadata.id)
66
78
 
67
- def stream_data(self, selector: FileContentSelector, limit: int | None = None) -> Iterable[Page[FileMetadata]]:
79
+ def stream_data(
80
+ self, selector: FileContentSelector, limit: int | None = None
81
+ ) -> Iterable[Page[MetadataWithFilePath]]:
68
82
  if not isinstance(selector, FileIdentifierSelector):
69
83
  raise ToolkitNotImplementedError(
70
84
  f"Download with the manifest, {type(selector).__name__}, is not supported for FileContentIO"
@@ -77,7 +91,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
77
91
  if metadata is None:
78
92
  continue
79
93
  identifiers_map = self._as_metadata_map(metadata)
80
- downloaded_files: list[FileMetadata] = []
94
+ downloaded_files: list[MetadataWithFilePath] = []
81
95
  for identifier in identifiers:
82
96
  if identifier not in identifiers_map:
83
97
  continue
@@ -94,7 +108,12 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
94
108
  with filepath.open(mode="wb") as file_stream:
95
109
  for chunk in response.iter_bytes(chunk_size=8192):
96
110
  file_stream.write(chunk)
97
- downloaded_files.append(meta)
111
+ downloaded_files.append(
112
+ MetadataWithFilePath(
113
+ metadata=meta,
114
+ file_path=filepath.relative_to(self._target_dir),
115
+ )
116
+ )
98
117
  yield Page(items=downloaded_files, worker_id="Main")
99
118
 
100
119
  def _retrieve_metadata(self, identifiers: Sequence[FileIdentifier]) -> Sequence[FileMetadata] | None:
@@ -142,13 +161,13 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
142
161
 
143
162
  def _create_filepath(self, meta: FileMetadata, selector: FileIdentifierSelector) -> Path:
144
163
  # We now that metadata always have name set
145
- filename = Path(cast(str, meta.name))
164
+ filename = Path(sanitize_filename(cast(str, meta.name)))
146
165
  if len(filename.suffix) == 0 and meta.mime_type:
147
166
  if mime_ext := mimetypes.guess_extension(meta.mime_type):
148
167
  filename = filename.with_suffix(mime_ext)
149
- directory = selector.file_directory
168
+ directory = sanitize_filename(selector.file_directory)
150
169
  if isinstance(meta.directory, str) and meta.directory != "":
151
- directory = Path(meta.directory.removeprefix("/"))
170
+ directory = sanitize_filename(meta.directory.removeprefix("/"))
152
171
 
153
172
  counter = 1
154
173
  filepath = self._target_dir / directory / filename
@@ -185,7 +204,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
185
204
  return None
186
205
 
187
206
  def data_to_json_chunk(
188
- self, data_chunk: Sequence[FileMetadata], selector: FileContentSelector | None = None
207
+ self, data_chunk: Sequence[MetadataWithFilePath], selector: FileContentSelector | None = None
189
208
  ) -> list[dict[str, JsonVal]]:
190
209
  """Convert a writable Cognite resource list to a JSON-compatible chunk of data.
191
210
 
@@ -197,7 +216,8 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
197
216
  """
198
217
  result: list[dict[str, JsonVal]] = []
199
218
  for item in data_chunk:
200
- item_json = self._crud.dump_resource(item)
219
+ item_json = self._crud.dump_resource(item.metadata)
220
+ item_json[FILEPATH] = item.file_path.as_posix()
201
221
  result.append(item_json)
202
222
  return result
203
223
 
@@ -213,7 +233,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
213
233
  result: list[UploadFileContentItem] = []
214
234
  for source_id, item_json in data_chunk:
215
235
  item = self.json_to_resource(item_json)
216
- filepath = cast(Path, item_json[FILEPATH])
236
+ filepath = Path(cast(str | Path, item_json[FILEPATH]))
217
237
  mime_type, _ = mimetypes.guess_type(filepath)
218
238
  # application/octet-stream is the standard fallback for binary data when the type is unknown. (at least Claude thinks so)
219
239
  result.append(
@@ -236,7 +256,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
236
256
  selector: FileContentSelector | None = None,
237
257
  ) -> Sequence[HTTPMessage]:
238
258
  results: MutableSequence[HTTPMessage] = []
239
- if isinstance(selector, FileMetadataTemplateSelector):
259
+ if isinstance(selector, FileMetadataTemplateSelector | FileIdentifierSelector):
240
260
  upload_url_getter = self._upload_url_asset_centric
241
261
  elif isinstance(selector, FileDataModelingTemplateSelector):
242
262
  upload_url_getter = self._upload_url_data_modeling
@@ -379,13 +399,35 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, FileMetadata, FileM
379
399
  def read_chunks(
380
400
  cls, reader: MultiFileReader, selector: FileContentSelector
381
401
  ) -> Iterable[list[tuple[str, dict[str, JsonVal]]]]:
382
- for chunk in chunker_sequence(reader.input_files, cls.CHUNK_SIZE):
383
- batch: list[tuple[str, dict[str, JsonVal]]] = []
384
- for file_path in chunk:
385
- metadata = selector.create_instance(file_path)
386
- metadata[FILEPATH] = file_path
387
- batch.append((str(file_path), metadata))
388
- yield batch
402
+ if isinstance(selector, FileTemplateSelector):
403
+ for chunk in chunker_sequence(reader.input_files, cls.CHUNK_SIZE):
404
+ batch: list[tuple[str, dict[str, JsonVal]]] = []
405
+ for file_path in chunk:
406
+ metadata = selector.create_instance(file_path)
407
+ metadata[FILEPATH] = file_path
408
+ batch.append((file_path.as_posix(), metadata))
409
+ yield batch
410
+ elif isinstance(selector, FileIdentifierSelector):
411
+ for item_chunk in chunker(reader.read_chunks(), cls.CHUNK_SIZE):
412
+ batch = []
413
+ for item in item_chunk:
414
+ if FILEPATH not in item:
415
+ # Todo Log warning
416
+ continue
417
+ try:
418
+ file_path = Path(item[FILEPATH])
419
+ except KeyError:
420
+ # Todo Log warning
421
+ continue
422
+ if not file_path.is_absolute():
423
+ file_path = reader.input_file.parent / file_path
424
+ item[FILEPATH] = file_path
425
+ batch.append((file_path.as_posix(), item))
426
+ yield batch
427
+ else:
428
+ raise ToolkitNotImplementedError(
429
+ f"Reading with the manifest, {type(selector).__name__}, is not supported for FileContentIO"
430
+ )
389
431
 
390
432
  @classmethod
391
433
  def count_chunks(cls, reader: MultiFileReader) -> int:
@@ -6,8 +6,6 @@ from typing import Annotated, Any, Literal
6
6
 
7
7
  from pydantic import ConfigDict, Field, field_validator, model_validator
8
8
 
9
- from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
10
-
11
9
  from ._base import DataSelector, SelectorObject
12
10
  from ._instances import SelectedView
13
11
 
@@ -17,6 +15,9 @@ FILEPATH = "$FILEPATH"
17
15
 
18
16
  class FileContentSelector(DataSelector, ABC):
19
17
  kind: Literal["FileContent"] = "FileContent"
18
+
19
+
20
+ class FileTemplateSelector(FileContentSelector, ABC):
20
21
  file_directory: Path
21
22
 
22
23
  def find_data_files(self, input_dir: Path, manifest_file: Path) -> list[Path]:
@@ -52,7 +53,7 @@ class FileMetadataTemplate(FileTemplate):
52
53
  return v
53
54
 
54
55
 
55
- class FileMetadataTemplateSelector(FileContentSelector):
56
+ class FileMetadataTemplateSelector(FileTemplateSelector):
56
57
  type: Literal["fileMetadataTemplate"] = "fileMetadataTemplate"
57
58
  template: FileMetadataTemplate
58
59
 
@@ -96,7 +97,7 @@ class FileDataModelingTemplate(FileTemplate):
96
97
  return data
97
98
 
98
99
 
99
- class FileDataModelingTemplateSelector(FileContentSelector):
100
+ class FileDataModelingTemplateSelector(FileTemplateSelector):
100
101
  type: Literal["fileDataModelingTemplate"] = "fileDataModelingTemplate"
101
102
  view_id: SelectedView = SelectedView(space="cdf_cdm", external_id="CogniteFile", version="v1")
102
103
  template: FileDataModelingTemplate
@@ -150,7 +151,7 @@ FileIdentifier = Annotated[FileInstanceID | FileExternalID | FileInternalID, Fie
150
151
 
151
152
  class FileIdentifierSelector(FileContentSelector):
152
153
  type: Literal["fileIdentifier"] = "fileIdentifier"
153
- file_directory: Path = Path("file_content")
154
+ file_directory: str = "file_content"
154
155
  use_metadata_directory: bool = True
155
156
  identifiers: tuple[FileIdentifier, ...]
156
157
 
@@ -161,6 +162,3 @@ class FileIdentifierSelector(FileContentSelector):
161
162
  def __str__(self) -> str:
162
163
  hash_ = hashlib.md5(",".join(sorted(str(self.identifiers))).encode()).hexdigest()[:8]
163
164
  return f"file_{len(self.identifiers)}_identifiers_{hash_}"
164
-
165
- def create_instance(self, filepath: Path) -> dict[str, Any]:
166
- raise ToolkitNotImplementedError("FileIdentifierSelector does not support creating instances from file paths.")
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.7.20
15
+ image: cognite/toolkit:0.7.22
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.7.20
13
+ image: cognite/toolkit:0.7.22
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.7.20"
7
+ version = "0.7.22"
8
8
 
9
9
 
10
10
  [plugins]
@@ -1 +1 @@
1
- __version__ = "0.7.20"
1
+ __version__ = "0.7.22"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.7.20
3
+ Version: 0.7.22
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
@@ -1,6 +1,6 @@
1
1
  cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cognite_toolkit/_cdf.py,sha256=sefGD2JQuOTBZhEqSj_ECbNZ7nTRN4AwGwX1pSUhoow,5636
3
- cognite_toolkit/_version.py,sha256=k_NYgI0xLUB_dPJL4N6BdDmPFD0V9fwqtEqrNG-_EGQ,23
3
+ cognite_toolkit/_version.py,sha256=05OpHzT8pDPhLRXPkdqyUUquJk9jolNT5B2C6ovgfac,23
4
4
  cognite_toolkit/config.dev.yaml,sha256=M33FiIKdS3XKif-9vXniQ444GTZ-bLXV8aFH86u9iUQ,332
5
5
  cognite_toolkit/_cdf_tk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=VSWV9h44HusWIaKpWgjrOMrc3hDoPTTXBXlp6-NOrIM,9079
@@ -152,7 +152,7 @@ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py,sha256=plVGY-hvT0
152
152
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py,sha256=U0ItuoNr1KEtoFQAiIe-K19_72ht9-kGndFVgF-iC10,9524
153
153
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py,sha256=SagiSp3JERgEU3SnkjQ76vrxSM7gRA17lvoh0BW4KeQ,64867
154
154
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py,sha256=a2HywkruYNJGLZxqOjlp8mrpRGtJDPqIb6qY00eUbEI,17701
155
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py,sha256=2My16O11WrWomjanLalspZqlQBWG3H-ke2-MTG25bfI,20790
155
+ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py,sha256=Tzrkfp9mtseUgnMdpVy_iBTup8S_1caypR5tknedeH4,20791
156
156
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py,sha256=vyeRsiIOEbUeYslBsgXoyCk5hozDsubUilA7bdjqS5c,14855
157
157
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py,sha256=v3kjn3igwTF53LJ6pp0O8d4S1XFJ1eXQGCchWAcaAx0,28439
158
158
  cognite_toolkit/_cdf_tk/cruds/_resource_cruds/group_scoped.py,sha256=WEg8-CxMP64WfE_XXIlH114zM51K0uLaYa4atd992zI,1690
@@ -244,12 +244,12 @@ cognite_toolkit/_cdf_tk/resource_classes/robotics/location.py,sha256=dbc9HT-bc2Q
244
244
  cognite_toolkit/_cdf_tk/resource_classes/robotics/map.py,sha256=j77z7CzCMiMj8r94BdUKCum9EuZRUjaSlUAy9K9DL_Q,942
245
245
  cognite_toolkit/_cdf_tk/storageio/__init__.py,sha256=h5Wr4i7zNIgsslrsRJxmp7ls4bNRKl0uZzQ7GLRMP7g,1920
246
246
  cognite_toolkit/_cdf_tk/storageio/_annotations.py,sha256=JI_g18_Y9S7pbc9gm6dZMyo3Z-bCndJXF9C2lOva0bQ,4848
247
- cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=VlTRHqp8jVu16SW7LtrN05BNYZHSPtJ_wf9EsBAAsvE,16419
247
+ cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=ozWeuTqay1_GSFuQZUxXJspsBhLEBIS7EksAi93B8_4,18722
248
248
  cognite_toolkit/_cdf_tk/storageio/_asset_centric.py,sha256=TirKLSNPoLqKjczsw0djWAsR0VvopwmU23aUxrBOJN8,32464
249
249
  cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=ElvqhIEBnhcz0yY1Ds164wVN0_7CFNK-uT0-z7LcR9U,13067
250
250
  cognite_toolkit/_cdf_tk/storageio/_data_classes.py,sha256=s3TH04BJ1q7rXndRhEbVMEnoOXjxrGg4n-w9Z5uUL-o,3480
251
251
  cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=xE1YgoP98-mJjIeF5536KwChzhVY90KYl-bW5sRVhFQ,20206
252
- cognite_toolkit/_cdf_tk/storageio/_file_content.py,sha256=qcJDk7YGUZ7YmVTYUDAi8eOK2sozEoxmehpmWrA45Ak,17127
252
+ cognite_toolkit/_cdf_tk/storageio/_file_content.py,sha256=Wo31GekIP2VZAUpUy-f0qW2b4IjkRo7aiTXIZxbHoFo,18965
253
253
  cognite_toolkit/_cdf_tk/storageio/_instances.py,sha256=t9fNpHnT6kCk8LDoPj3qZXmHpyDbPF5BZ6pI8ziTyFw,10810
254
254
  cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=pgZN5MbqDwMZl9Ow1KouDJUO2Ngga8_b6hwv7H31SVQ,5161
255
255
  cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=VUK1A76zsu4a25A3oaPUrQEEuRcCpUBK6o8UMMKw7qg,2458
@@ -258,7 +258,7 @@ cognite_toolkit/_cdf_tk/storageio/selectors/_base.py,sha256=hjFkbmNGsK3QIW-jnJV_
258
258
  cognite_toolkit/_cdf_tk/storageio/selectors/_canvas.py,sha256=E9S-wr-JUqRosI_2cSCfR0tF8MdIFTrMxDItuWRcuO4,597
259
259
  cognite_toolkit/_cdf_tk/storageio/selectors/_charts.py,sha256=lQHuNtF3i6SEIMPAlziMm0QlqRcvZJ7MKIug6HMTDrs,1012
260
260
  cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py,sha256=qdR9wttPUoHZIRQjt2RiLO0cPH8C4CD09GsH1KA5KF4,2343
261
- cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py,sha256=A2ikNImKTC-WuG5c-WLEJ6LfaB7FytXS57-D2ORuA1k,5326
261
+ cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py,sha256=e7riknOinuhJszkROHeg7iv3foiVz7mIJmva6lMTiOs,5116
262
262
  cognite_toolkit/_cdf_tk/storageio/selectors/_instances.py,sha256=NCFSJrAw52bNX6UTfOali8PvNjlqHnvxzL0hYBr7ZmA,4934
263
263
  cognite_toolkit/_cdf_tk/storageio/selectors/_raw.py,sha256=sZq9C4G9DMe3S46_usKet0FphQ6ow7cWM_PfXrEAakk,503
264
264
  cognite_toolkit/_cdf_tk/tk_warnings/__init__.py,sha256=U9bT-G2xKrX6mmtZ7nZ1FfQeCjNKfKP_p7pev90dwOE,2316
@@ -305,13 +305,13 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
305
305
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
306
306
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
307
307
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
308
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=sVWo0P7uvKyz1D1mrYhdiRak7DOy0o8GxTNtAR3tNrg,667
309
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=nLRRf5K6lur4MsjQPl3wSLMTFb4vAf8MBeAmq8bFBok,2430
310
- cognite_toolkit/_resources/cdf.toml,sha256=V0xczqShauGTujzM49gbP9aLYGTWf4SWo7ZDWDWkxEM,475
308
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=4IYuywiBl7e1weDj23sLxaod25dD28xNSdPyZAHpvVo,667
309
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=moRss4JbKJnWzdL-KLC9bfSrOIxCCU-WtnkQzyr_3_E,2430
310
+ cognite_toolkit/_resources/cdf.toml,sha256=gRhan9ulqtot-f73Yh_fh0QqJ7cg8zRBwZTEwcZQTmg,475
311
311
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
312
312
  cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
313
- cognite_toolkit-0.7.20.dist-info/METADATA,sha256=ouWrKIy9SwM_KVNSweTz9FxFNOmODdelIly77qGRhy0,4501
314
- cognite_toolkit-0.7.20.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
315
- cognite_toolkit-0.7.20.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
316
- cognite_toolkit-0.7.20.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
317
- cognite_toolkit-0.7.20.dist-info/RECORD,,
313
+ cognite_toolkit-0.7.22.dist-info/METADATA,sha256=UstCMFrvlrg-rFKdD4X_ajXHLrHKOUGBwDYHfxiu_DM,4501
314
+ cognite_toolkit-0.7.22.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
315
+ cognite_toolkit-0.7.22.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
316
+ cognite_toolkit-0.7.22.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
317
+ cognite_toolkit-0.7.22.dist-info/RECORD,,