cognite-toolkit 0.6.85__py3-none-any.whl → 0.6.87__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-toolkit might be problematic. Click here for more details.

@@ -1,4 +1,4 @@
1
- from collections.abc import Iterable
1
+ from collections.abc import Callable, Iterable
2
2
  from functools import partial
3
3
  from pathlib import Path
4
4
 
@@ -42,7 +42,7 @@ class DownloadCommand(ToolkitCommand):
42
42
 
43
43
  console = io.client.console
44
44
  for selector in selectors:
45
- target_dir = output_dir / selector.group
45
+ target_dir = output_dir / sanitize_filename(selector.group)
46
46
  if verbose:
47
47
  console.print(f"Downloading {selector.display_name} '{selector!s}' to {target_dir.as_posix()!r}")
48
48
 
@@ -57,9 +57,10 @@ class DownloadCommand(ToolkitCommand):
57
57
 
58
58
  selector.dump_to_file(target_dir)
59
59
  columns: list[SchemaColumn] | None = None
60
- if file_format in TABLE_WRITE_CLS_BY_FORMAT and isinstance(io, TableStorageIO):
60
+ is_table = file_format in TABLE_WRITE_CLS_BY_FORMAT
61
+ if is_table and isinstance(io, TableStorageIO):
61
62
  columns = io.get_schema(selector)
62
- elif file_format in TABLE_WRITE_CLS_BY_FORMAT:
63
+ elif is_table:
63
64
  raise ToolkitValueError(
64
65
  f"Cannot download {selector.kind} in {file_format!r} format. The {selector.kind!r} storage type does not support table schemas."
65
66
  )
@@ -69,7 +70,7 @@ class DownloadCommand(ToolkitCommand):
69
70
  ) as writer:
70
71
  executor = ProducerWorkerExecutor[Page[T_CogniteResource], list[dict[str, JsonVal]]](
71
72
  download_iterable=io.stream_data(selector, limit),
72
- process=partial(self.process_data_chunk, io=io, selector=selector),
73
+ process=self.create_data_process(io=io, selector=selector, is_table=is_table),
73
74
  write=partial(writer.write_chunks, filestem=filestem),
74
75
  iteration_count=iteration_count,
75
76
  # Limit queue size to avoid filling up memory before the workers can write to disk.
@@ -124,19 +125,20 @@ class DownloadCommand(ToolkitCommand):
124
125
  return False
125
126
 
126
127
  @staticmethod
127
- def process_data_chunk(
128
- data_page: Page[T_CogniteResource],
128
+ def create_data_process(
129
129
  io: StorageIO[T_Selector, T_CogniteResource],
130
130
  selector: T_Selector,
131
- ) -> list[dict[str, JsonVal]]:
132
- """Processes a chunk of data by converting it to a JSON-compatible format.
131
+ is_table: bool,
132
+ ) -> Callable[[Page[T_CogniteResource]], list[dict[str, JsonVal]]]:
133
+ """Creates a data processing function based on the IO type and whether the output is a table."""
134
+ if is_table and isinstance(io, TableStorageIO):
133
135
 
134
- Args:
135
- data_page: The page of data to process.
136
- io: The StorageIO instance that defines how to process the data.
137
- selector: The selection criteria used to identify the data.
136
+ def row_data_process(chunk: Page[T_CogniteResource]) -> list[dict[str, JsonVal]]:
137
+ return io.data_to_row(chunk.items, selector)
138
138
 
139
- Returns:
140
- A list of dictionaries representing the processed data in a JSON-compatible format.
141
- """
142
- return io.data_to_json_chunk(data_page.items, selector)
139
+ return row_data_process
140
+
141
+ def chunk_data_process(data_page: Page[T_CogniteResource]) -> list[dict[str, JsonVal]]:
142
+ return io.data_to_json_chunk(data_page.items, selector)
143
+
144
+ return chunk_data_process
@@ -8,15 +8,16 @@ from rich.console import Console
8
8
 
9
9
  from cognite_toolkit._cdf_tk.client import ToolkitClient
10
10
  from cognite_toolkit._cdf_tk.constants import DATA_MANIFEST_STEM, DATA_RESOURCE_DIR
11
+ from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
11
12
  from cognite_toolkit._cdf_tk.storageio import T_Selector, UploadableStorageIO, are_same_kind, get_upload_io
12
- from cognite_toolkit._cdf_tk.storageio._base import T_WriteCogniteResource, UploadItem
13
+ from cognite_toolkit._cdf_tk.storageio._base import T_WriteCogniteResource, TableUploadableStorageIO, UploadItem
13
14
  from cognite_toolkit._cdf_tk.storageio.selectors import Selector, SelectorAdapter
14
15
  from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning, MediumSeverityWarning
15
16
  from cognite_toolkit._cdf_tk.tk_warnings.fileread import ResourceFormatWarning
16
17
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
17
18
  from cognite_toolkit._cdf_tk.utils.collection import chunker
18
19
  from cognite_toolkit._cdf_tk.utils.file import read_yaml_file
19
- from cognite_toolkit._cdf_tk.utils.fileio import FileReader
20
+ from cognite_toolkit._cdf_tk.utils.fileio import TABLE_READ_CLS_BY_FORMAT, FileReader
20
21
  from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, ItemMessage, SuccessResponseItems
21
22
  from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
22
23
  from cognite_toolkit._cdf_tk.utils.progress_tracker import ProgressTracker
@@ -178,13 +179,19 @@ class UploadCommand(ToolkitCommand):
178
179
  if verbose:
179
180
  console.print(f"{action} {selector.display_name} from {file_display.as_posix()!r}")
180
181
  reader = FileReader.from_filepath(data_file)
182
+ is_table = reader.format in TABLE_READ_CLS_BY_FORMAT
183
+ if is_table and not isinstance(io, TableUploadableStorageIO):
184
+ raise ToolkitValueError(f"{selector.display_name} does not support {reader.format!r} files.")
181
185
  tracker = ProgressTracker[str]([self._UPLOAD])
186
+ data_name = "row" if is_table else "line"
182
187
  executor = ProducerWorkerExecutor[list[tuple[str, dict[str, JsonVal]]], Sequence[UploadItem]](
183
188
  download_iterable=chunker(
184
- ((f"line {line_no}", item) for line_no, item in enumerate(reader.read_chunks(), 1)),
189
+ ((f"{data_name} {line_no}", item) for line_no, item in enumerate(reader.read_chunks(), 1)),
185
190
  io.CHUNK_SIZE,
186
191
  ),
187
- process=io.json_chunk_to_data,
192
+ process=partial(io.rows_to_data, selector=selector)
193
+ if is_table and isinstance(io, TableUploadableStorageIO)
194
+ else io.json_chunk_to_data,
188
195
  write=partial(
189
196
  self._upload_items,
190
197
  upload_client=upload_client,
@@ -67,7 +67,14 @@ from cognite_toolkit._cdf_tk.utils.useful_types import (
67
67
  T_WritableCogniteResourceList,
68
68
  )
69
69
 
70
- from ._base import ConfigurableStorageIO, Page, StorageIOConfig, TableStorageIO, UploadableStorageIO, UploadItem
70
+ from ._base import (
71
+ ConfigurableStorageIO,
72
+ Page,
73
+ StorageIOConfig,
74
+ TableStorageIO,
75
+ TableUploadableStorageIO,
76
+ UploadItem,
77
+ )
71
78
  from .selectors import AssetCentricSelector, AssetSubtreeSelector, DataSetSelector
72
79
 
73
80
 
@@ -75,7 +82,7 @@ class BaseAssetCentricIO(
75
82
  Generic[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList],
76
83
  TableStorageIO[AssetCentricSelector, T_WritableCogniteResource],
77
84
  ConfigurableStorageIO[AssetCentricSelector, T_WritableCogniteResource],
78
- UploadableStorageIO[AssetCentricSelector, T_WritableCogniteResource, T_WriteClass],
85
+ TableUploadableStorageIO[AssetCentricSelector, T_WritableCogniteResource, T_WriteClass],
79
86
  ABC,
80
87
  ):
81
88
  RESOURCE_TYPE: ClassVar[AssetCentricType]
@@ -179,6 +186,32 @@ class BaseAssetCentricIO(
179
186
  asset_ids.update(item.asset_ids or [])
180
187
  self.client.lookup.assets.external_id(list(asset_ids))
181
188
 
189
+ def data_to_row(
190
+ self, data_chunk: Sequence[T_WritableCogniteResource], selector: AssetCentricSelector | None = None
191
+ ) -> list[dict[str, JsonVal]]:
192
+ rows: list[dict[str, JsonVal]] = []
193
+ for chunk in self.data_to_json_chunk(data_chunk, selector):
194
+ if "metadata" in chunk and isinstance(chunk["metadata"], dict):
195
+ metadata = chunk.pop("metadata")
196
+ # MyPy does understand that metadata is a dict here due to the check above.
197
+ for key, value in metadata.items(): # type: ignore[union-attr]
198
+ chunk[f"metadata.{key}"] = value
199
+ rows.append(chunk)
200
+ return rows
201
+
202
+ def row_to_resource(self, row: dict[str, JsonVal], selector: AssetCentricSelector | None = None) -> T_WriteClass:
203
+ metadata: dict[str, JsonVal] = {}
204
+ cleaned_row: dict[str, JsonVal] = {}
205
+ for key, value in row.items():
206
+ if key.startswith("metadata."):
207
+ metadata_key = key[len("metadata.") :]
208
+ metadata[metadata_key] = value
209
+ else:
210
+ cleaned_row[key] = value
211
+ if metadata:
212
+ cleaned_row["metadata"] = metadata
213
+ return self.json_to_resource(cleaned_row)
214
+
182
215
 
183
216
  class AssetIO(BaseAssetCentricIO[str, AssetWrite, Asset, AssetWriteList, AssetList]):
184
217
  KIND = "Assets"
@@ -206,6 +206,41 @@ class UploadableStorageIO(
206
206
  raise NotImplementedError()
207
207
 
208
208
 
209
+ class TableUploadableStorageIO(UploadableStorageIO[T_Selector, T_CogniteResource, T_WriteCogniteResource], ABC):
210
+ """A base class for storage items that support uploading data with table schemas."""
211
+
212
+ def rows_to_data(
213
+ self, rows: list[tuple[str, dict[str, JsonVal]]], selector: T_Selector | None = None
214
+ ) -> Sequence[UploadItem[T_WriteCogniteResource]]:
215
+ """Convert a row-based JSON-compatible chunk of data back to a writable Cognite resource list.
216
+
217
+ Args:
218
+ rows: A list of tuples, each containing a source ID and a dictionary representing
219
+ the data in a JSON-compatible format.
220
+ selector: Optional selection criteria to identify where to upload the data. This is required for some storage types.
221
+
222
+ Returns:
223
+ A writable Cognite resource list representing the data.
224
+ """
225
+ result: list[UploadItem[T_WriteCogniteResource]] = []
226
+ for source_id, row in rows:
227
+ item = self.row_to_resource(row, selector=selector)
228
+ result.append(UploadItem(source_id=source_id, item=item))
229
+ return result
230
+
231
+ @abstractmethod
232
+ def row_to_resource(self, row: dict[str, JsonVal], selector: T_Selector | None = None) -> T_WriteCogniteResource:
233
+ """Convert a row-based JSON-compatible dictionary back to a writable Cognite resource.
234
+
235
+ Args:
236
+ row: A dictionary representing the data in a JSON-compatible format.
237
+ selector: Optional selection criteria to identify where to upload the data. This is required for some storage types.
238
+ Returns:
239
+ A writable Cognite resource representing the data.
240
+ """
241
+ raise NotImplementedError()
242
+
243
+
209
244
  class ConfigurableStorageIO(StorageIO[T_Selector, T_CogniteResource], ABC):
210
245
  """A base class for storage items that support configurations for different storage items."""
211
246
 
@@ -230,3 +265,19 @@ class TableStorageIO(StorageIO[T_Selector, T_CogniteResource], ABC):
230
265
 
231
266
  """
232
267
  raise NotImplementedError()
268
+
269
+ @abstractmethod
270
+ def data_to_row(
271
+ self, data_chunk: Sequence[T_CogniteResource], selector: T_Selector | None = None
272
+ ) -> list[dict[str, JsonVal]]:
273
+ """Convert a chunk of data to a row-based JSON-compatible format.
274
+
275
+ Args:
276
+ data_chunk: The chunk of data to convert, which should be a writable Cognite resource list.
277
+ selector: Optional selection criteria to identify the data. This is required for some storage types.
278
+
279
+ Returns:
280
+ A list of dictionaries representing the data in a JSON-compatible format.
281
+
282
+ """
283
+ raise NotImplementedError()
@@ -1,4 +1,5 @@
1
1
  from collections.abc import Iterable, Sequence
2
+ from uuid import uuid4
2
3
 
3
4
  from cognite.client.data_classes import Row, RowWrite
4
5
 
@@ -8,13 +9,19 @@ from cognite_toolkit._cdf_tk.utils import sanitize_filename
8
9
  from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, ItemsRequest
9
10
  from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
10
11
 
11
- from ._base import ConfigurableStorageIO, Page, StorageIOConfig, UploadableStorageIO, UploadItem
12
+ from ._base import (
13
+ ConfigurableStorageIO,
14
+ Page,
15
+ StorageIOConfig,
16
+ TableUploadableStorageIO,
17
+ UploadItem,
18
+ )
12
19
  from .selectors import RawTableSelector
13
20
 
14
21
 
15
22
  class RawIO(
16
23
  ConfigurableStorageIO[RawTableSelector, Row],
17
- UploadableStorageIO[RawTableSelector, Row, RowWrite],
24
+ TableUploadableStorageIO[RawTableSelector, Row, RowWrite],
18
25
  ):
19
26
  KIND = "RawRows"
20
27
  DISPLAY_NAME = "Raw Rows"
@@ -81,3 +88,9 @@ class RawIO(
81
88
  yield StorageIOConfig(
82
89
  kind=RawTableCRUD.kind, folder_name=RawTableCRUD.folder_name, value=selector.table.model_dump(by_alias=True)
83
90
  )
91
+
92
+ def row_to_resource(self, row: dict[str, JsonVal], selector: RawTableSelector | None = None) -> RowWrite:
93
+ key = str(uuid4())
94
+ if selector is not None and selector.key is not None and selector.key in row:
95
+ key = str(row.pop(selector.key))
96
+ return RowWrite(key=key, columns=row)
@@ -14,6 +14,7 @@ class RawTableSelector(DataSelector):
14
14
  type: Literal["rawTable"] = "rawTable"
15
15
  kind: Literal["RawRows"] = "RawRows"
16
16
  table: SelectedTable
17
+ key: str | None = None
17
18
 
18
19
  @property
19
20
  def group(self) -> str:
@@ -8,6 +8,7 @@ from ._compression import (
8
8
  )
9
9
  from ._readers import (
10
10
  FILE_READ_CLS_BY_FORMAT,
11
+ TABLE_READ_CLS_BY_FORMAT,
11
12
  CSVReader,
12
13
  FailedParsing,
13
14
  FileReader,
@@ -32,6 +33,7 @@ __all__ = [
32
33
  "COMPRESSION_BY_SUFFIX",
33
34
  "FILE_READ_CLS_BY_FORMAT",
34
35
  "FILE_WRITE_CLS_BY_FORMAT",
36
+ "TABLE_READ_CLS_BY_FORMAT",
35
37
  "TABLE_WRITE_CLS_BY_FORMAT",
36
38
  "CSVReader",
37
39
  "CSVWriter",
@@ -87,7 +87,10 @@ class FailedParsing:
87
87
  error: str
88
88
 
89
89
 
90
- class CSVReader(FileReader):
90
+ class TableReader(FileReader, ABC): ...
91
+
92
+
93
+ class CSVReader(TableReader):
91
94
  """Reads CSV files and yields each row as a dictionary.
92
95
 
93
96
  Args:
@@ -229,7 +232,7 @@ class CSVReader(FileReader):
229
232
  yield from csv.DictReader(file)
230
233
 
231
234
 
232
- class ParquetReader(FileReader):
235
+ class ParquetReader(TableReader):
233
236
  format = ".parquet"
234
237
 
235
238
  def read_chunks(self) -> Iterator[dict[str, JsonVal]]:
@@ -257,6 +260,7 @@ class ParquetReader(FileReader):
257
260
 
258
261
 
259
262
  FILE_READ_CLS_BY_FORMAT: Mapping[str, type[FileReader]] = {}
263
+ TABLE_READ_CLS_BY_FORMAT: Mapping[str, type[TableReader]] = {}
260
264
  for subclass in get_concrete_subclasses(FileReader): # type: ignore[type-abstract]
261
265
  if not getattr(subclass, "format", None):
262
266
  continue
@@ -267,3 +271,10 @@ for subclass in get_concrete_subclasses(FileReader): # type: ignore[type-abstra
267
271
  )
268
272
  # We know we have a dict, but we want to expose FILE_READ_CLS_BY_FORMAT as a Mapping
269
273
  FILE_READ_CLS_BY_FORMAT[subclass.format] = subclass # type: ignore[index]
274
+ if issubclass(subclass, TableReader):
275
+ if subclass.format in TABLE_READ_CLS_BY_FORMAT:
276
+ raise TypeError(
277
+ f"Duplicate table file format {subclass.format!r} found for classes "
278
+ f"{TABLE_READ_CLS_BY_FORMAT[subclass.format].__name__!r} and {subclass.__name__!r}."
279
+ )
280
+ TABLE_READ_CLS_BY_FORMAT[subclass.format] = subclass # type: ignore[index]
@@ -235,7 +235,7 @@ class ProducerWorkerExecutor(Generic[T_Download, T_Processed]):
235
235
  break
236
236
  except Exception as e:
237
237
  self._error_event.set()
238
- self.error_message = str(e)
238
+ self.error_message = f"{type(e).__name__} {e!s}"
239
239
  self.error_traceback = traceback.format_exc()
240
240
  self.console.print(f"[red]Error[/red] occurred while {self.download_description}: {self.error_message}")
241
241
  break
@@ -275,7 +275,7 @@ class ProducerWorkerExecutor(Generic[T_Download, T_Processed]):
275
275
  continue
276
276
  except Exception as e:
277
277
  self._error_event.set()
278
- self.error_message = str(e)
278
+ self.error_message = f"{type(e).__name__} {e!s}"
279
279
  self.error_traceback = traceback.format_exc()
280
280
  self.console.print(f"[red]Error[/red] occurred while {self.process_description}: {self.error_message}")
281
281
  break
@@ -297,7 +297,7 @@ class ProducerWorkerExecutor(Generic[T_Download, T_Processed]):
297
297
  continue
298
298
  except Exception as e:
299
299
  self._error_event.set()
300
- self.error_message = str(e)
300
+ self.error_message = f"{type(e).__name__} {e!s}"
301
301
  self.error_traceback = traceback.format_exc()
302
302
  self.console.print(f"[red]Error[/red] occurred while {self.write_description}: {self.error_message}")
303
303
  break
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.6.85
15
+ image: cognite/toolkit:0.6.87
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.6.85
13
+ image: cognite/toolkit:0.6.87
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.6.85"
7
+ version = "0.6.87"
8
8
 
9
9
  [alpha_flags]
10
10
  external-libraries = true
@@ -1 +1 @@
1
- __version__ = "0.6.85"
1
+ __version__ = "0.6.87"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.6.85
3
+ Version: 0.6.87
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
@@ -1,6 +1,6 @@
1
1
  cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  cognite_toolkit/_cdf.py,sha256=1OSAvbOeuIrnsczEG2BtGqRP3L3sq0VMPthmugnqCUw,5821
3
- cognite_toolkit/_version.py,sha256=fSvL3Nga6Lczp2I4CiTq0ik_l-HhTaEENmTQBPEDsxU,23
3
+ cognite_toolkit/_version.py,sha256=nMZoX2Jk52AMlhjMQEp4ob9g30rtUl1w3nR2aN9H9Js,23
4
4
  cognite_toolkit/_cdf_tk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=DAUmHf19ByVIGH4MDPdXKHZ0G97CxdD5J-EzHTq66C8,8025
6
6
  cognite_toolkit/_cdf_tk/constants.py,sha256=e9XmGvQCqGq7zYQrNoopU5e2KnYZYBPyUC5raGShK7k,6364
@@ -95,10 +95,10 @@ cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=OJYtHiERtUBXm3cjUTyPVaYIMVQp
95
95
  cognite_toolkit/_cdf_tk/commands/_base.py,sha256=m2hnXo_AAHhsoSayHZO_zUa4xEt5w5oMB4WCHmJr-AY,2595
96
96
  cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=DIwuiRpDhWBDpsW3R3yqj0eWLAE3c_kPbmCaUkxjFuo,24852
97
97
  cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=TK6U_rm6VZT_V941kTyHMoulWgJzbDC8YIIQDPJ5x3w,1011
98
- cognite_toolkit/_cdf_tk/commands/_download.py,sha256=qkbzHzd6FZydNiG83vwciBJEmQAG0t9EFfvZb0K89TA,6693
98
+ cognite_toolkit/_cdf_tk/commands/_download.py,sha256=OBKPM_HGGA1i32th1SAgkQM_81CUFvm39kGqBuOeeTs,6816
99
99
  cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=_4iX3AHAI6eLmRVUlWXCSvVHx1BZW2yDr_i2i9ECg6U,43120
100
100
  cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=RadQHsmkPez3fZ5HCP9b82o2_fBx8P_-bTo7prkvWXU,32525
101
- cognite_toolkit/_cdf_tk/commands/_upload.py,sha256=kXYmP1YMg-JvsuN1iYaMuZH1qZfapya2j-RABGhqGHo,11860
101
+ cognite_toolkit/_cdf_tk/commands/_upload.py,sha256=Y0k0q4Iu4F7g3Ax3slSrpll3AHxmODYNq55waHw4mzc,12473
102
102
  cognite_toolkit/_cdf_tk/commands/_utils.py,sha256=ARlbqA_5ZWlgN3-xF-zanzSx4B0-9ULnguA5QgHmKGA,1225
103
103
  cognite_toolkit/_cdf_tk/commands/_virtual_env.py,sha256=GFAid4hplixmj9_HkcXqU5yCLj-fTXm4cloGD6U2swY,2180
104
104
  cognite_toolkit/_cdf_tk/commands/auth.py,sha256=N6JgtF0_Qoh-xM8VlBb_IK1n0Lo5I7bIkIHmXm1l7ug,31638
@@ -227,18 +227,18 @@ cognite_toolkit/_cdf_tk/resource_classes/robotics/location.py,sha256=dbc9HT-bc2Q
227
227
  cognite_toolkit/_cdf_tk/resource_classes/robotics/map.py,sha256=j77z7CzCMiMj8r94BdUKCum9EuZRUjaSlUAy9K9DL_Q,942
228
228
  cognite_toolkit/_cdf_tk/storageio/__init__.py,sha256=aM-skaPnKTH1B7HG0faeTUNf7u1b-sT8l7hh5JRZ1E8,2288
229
229
  cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=bhyG1d2_9duPkX-otC2brVcpChvdXSPkYhBHS5T_72g,4343
230
- cognite_toolkit/_cdf_tk/storageio/_asset_centric.py,sha256=spFAdoHxGKDDEWItM4RovSddudwoPIa_uev8WCAQ9zs,27027
231
- cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=yqAaBlZcE53V_HKcKi_q-OjpC2Nnhkw13cbSvXjx2wY,8682
230
+ cognite_toolkit/_cdf_tk/storageio/_asset_centric.py,sha256=Rhy64zUW4oxacq_vYomDeTRPmF6Vx-1mkYAFAqJE9vk,28312
231
+ cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=NWXPdgzUnpBiav5Hi8XGHkWU9QiMjNzBQTxMcuxF-LA,11017
232
232
  cognite_toolkit/_cdf_tk/storageio/_data_classes.py,sha256=s3TH04BJ1q7rXndRhEbVMEnoOXjxrGg4n-w9Z5uUL-o,3480
233
233
  cognite_toolkit/_cdf_tk/storageio/_instances.py,sha256=_tKOdlo7tMJoh7y-47o7sySfDMRa-G-AFVprmzjn3EQ,9311
234
- cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=5UkIk5MkyyCyGRYmyD6qTIsEDKbI2EulPesYbqM8qAA,3466
234
+ cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=gU4muTo5MGOu2FrxwQz5XX21m1rGS_ZsUwWOC8KDrkY,3839
235
235
  cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=fXzR99mN8wDVkaH9fstoiyIfYajXxeM8x3ZFeQdkIUI,1333
236
236
  cognite_toolkit/_cdf_tk/storageio/selectors/_asset_centric.py,sha256=7Iv_ccVX6Vzt3ZLFZ0Er3hN92iEsFTm9wgF-yermOWE,1467
237
237
  cognite_toolkit/_cdf_tk/storageio/selectors/_base.py,sha256=FsHF63HIcVstvXPGLJ2WeDiEIC7JUVHXfhBtbJk-dF8,2440
238
238
  cognite_toolkit/_cdf_tk/storageio/selectors/_canvas.py,sha256=fs0i5rI13x6IStOPFQ3E-n8ey1eoV_QORD1-RtPF4A4,182
239
239
  cognite_toolkit/_cdf_tk/storageio/selectors/_charts.py,sha256=vcSjubTSd6_x3bA72vqk-9enp35V_opk6ZtzV2ZE_UQ,602
240
240
  cognite_toolkit/_cdf_tk/storageio/selectors/_instances.py,sha256=NCFSJrAw52bNX6UTfOali8PvNjlqHnvxzL0hYBr7ZmA,4934
241
- cognite_toolkit/_cdf_tk/storageio/selectors/_raw.py,sha256=AlzUNXldZ_iRBH_95ATS5n9SIXUTJC_06N6rlxpr2T4,476
241
+ cognite_toolkit/_cdf_tk/storageio/selectors/_raw.py,sha256=sZq9C4G9DMe3S46_usKet0FphQ6ow7cWM_PfXrEAakk,503
242
242
  cognite_toolkit/_cdf_tk/tk_warnings/__init__.py,sha256=U9bT-G2xKrX6mmtZ7nZ1FfQeCjNKfKP_p7pev90dwOE,2316
243
243
  cognite_toolkit/_cdf_tk/tk_warnings/base.py,sha256=cX8TCmb56gqx3lc7dankXuqpm5HGASJ4wTb07-MCJWs,4401
244
244
  cognite_toolkit/_cdf_tk/tk_warnings/fileread.py,sha256=d2Kx6YyLmCkyFNjK8MO6eKGceCIEaFLZ4LYcG-EjnuM,8947
@@ -259,7 +259,7 @@ cognite_toolkit/_cdf_tk/utils/graphql_parser.py,sha256=2i2wDjg_Uw3hJ-pHtPK8hczIu
259
259
  cognite_toolkit/_cdf_tk/utils/hashing.py,sha256=3NyNfljyYNTqAyAFBd6XlyWaj43jRzENxIuPdOY6nqo,2116
260
260
  cognite_toolkit/_cdf_tk/utils/interactive_select.py,sha256=veV93_O-gATbQ1PfRbZq0VotTgaXA4JcU34j_nLKpSU,36155
261
261
  cognite_toolkit/_cdf_tk/utils/modules.py,sha256=9RvOGUaGEi_-A7Qrq0E1tCx82QK8GbvEZXB7r1RnD_U,5974
262
- cognite_toolkit/_cdf_tk/utils/producer_worker.py,sha256=n01Tr8ml8dHffjX45Vdqmzt2DFGZ0IrcgFembo8y9yM,14153
262
+ cognite_toolkit/_cdf_tk/utils/producer_worker.py,sha256=1l77HIehkq1ARCBH6SlZ_V-jd6QKijYKeWetcUmAXg0,14216
263
263
  cognite_toolkit/_cdf_tk/utils/progress_tracker.py,sha256=LGpC22iSTTlo6FWi38kqBu_E4XouTvZU_N953WAzZWA,3865
264
264
  cognite_toolkit/_cdf_tk/utils/repository.py,sha256=voQLZ6NiNvdAFxqeWHbvzDLsLHl6spjQBihiLyCsGW8,4104
265
265
  cognite_toolkit/_cdf_tk/utils/sentry_utils.py,sha256=Q3ekrR0bWMtlPVQrfUSsETlkLIaDUZ2u-RdNFFr9-dg,564
@@ -269,10 +269,10 @@ cognite_toolkit/_cdf_tk/utils/text.py,sha256=EpIXjaQ5C5q5fjbUjAW7tncXpdJfiQeV7CY
269
269
  cognite_toolkit/_cdf_tk/utils/thread_safe_dict.py,sha256=NbRHcZvWpF9xHP5OkOMGFpxrPNbi0Q3Eea6PUNbGlt4,3426
270
270
  cognite_toolkit/_cdf_tk/utils/useful_types.py,sha256=tPZOcK1PR1hNogMCgF863APMK6p3528t5kKaKbVl0-s,1352
271
271
  cognite_toolkit/_cdf_tk/utils/validate_access.py,sha256=1puswcpgEDNCwdk91dhLqCBSu_aaUAd3Hsw21d-YVFs,21955
272
- cognite_toolkit/_cdf_tk/utils/fileio/__init__.py,sha256=_rZp6E2HaqixzPC57XQGaSm6xm1pFNXNJ4hBAnvGx1c,1137
272
+ cognite_toolkit/_cdf_tk/utils/fileio/__init__.py,sha256=ts5kYu_1Ks7xjnM6pIrVUrZe0nkYI6euYXeE4ox34xk,1199
273
273
  cognite_toolkit/_cdf_tk/utils/fileio/_base.py,sha256=MpWaD3lR9vrJ-kGzTiDOtChXhvFD7-xrP-Pzp7vjnLY,756
274
274
  cognite_toolkit/_cdf_tk/utils/fileio/_compression.py,sha256=8BAPgg5OKc3vkEEkqOvYsuyh12iXVNuEmC0omWwyJNQ,2355
275
- cognite_toolkit/_cdf_tk/utils/fileio/_readers.py,sha256=d6TdQlous0TnfzpsDjaLd6inHhUkh2TEdrlBrMayaCs,11094
275
+ cognite_toolkit/_cdf_tk/utils/fileio/_readers.py,sha256=77Uq5r0pnD8gXLDVPzLT-1VPfzyOK7TZk85BtWxW7DQ,11613
276
276
  cognite_toolkit/_cdf_tk/utils/fileio/_writers.py,sha256=ghNGBZjkISAlbxe8o5YWWloLXG9QKOtF_qGA9JkvYss,17712
277
277
  cognite_toolkit/_cdf_tk/utils/http_client/__init__.py,sha256=H1T-cyIoVaPL4MvN1IuG-cHgj-cqB7eszu2kIN939lw,813
278
278
  cognite_toolkit/_cdf_tk/utils/http_client/_client.py,sha256=zsN5eP1spgMkIRN6qeQ-djAohJVVuacpD2fnQh5QYx0,10916
@@ -284,13 +284,13 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
284
284
  cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
285
285
  cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
286
286
  cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
287
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=oypvpptzzkRZcztIcOxkwipNPTJ54zZGTRUIHwpbTrQ,667
288
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=wOg0WNsPFO4Kd14lD9OTlLL7m-W7kF5YbgNZL_uv9zg,2430
289
- cognite_toolkit/_resources/cdf.toml,sha256=NwEvIORLNsZ8IaxYk-mUhrTuw3b7bKXswk79nxWjRvM,487
287
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=hDzsZty3J7SXnQx9Lcg61xUQ8ntSlCKFTJU9xn1662Y,667
288
+ cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=b02Giaqbtk6qj9QBeSZ2JvEoKUtEnRmd2Vx3noqarTg,2430
289
+ cognite_toolkit/_resources/cdf.toml,sha256=QySdcuiKfdavn44fE166L8WT9qlSJmpnbkjxuFlCVlM,487
290
290
  cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
291
291
  cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
292
- cognite_toolkit-0.6.85.dist-info/METADATA,sha256=xmey3u0EyVh4Ef5qgxa0VibUKZ_FLz6jAYQ5opMl_6g,4501
293
- cognite_toolkit-0.6.85.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
294
- cognite_toolkit-0.6.85.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
295
- cognite_toolkit-0.6.85.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
296
- cognite_toolkit-0.6.85.dist-info/RECORD,,
292
+ cognite_toolkit-0.6.87.dist-info/METADATA,sha256=kMPZ6AAe859YlluZLJZ4C-MlhyNrbzx2RFIx27xIOb4,4501
293
+ cognite_toolkit-0.6.87.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
294
+ cognite_toolkit-0.6.87.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
295
+ cognite_toolkit-0.6.87.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
296
+ cognite_toolkit-0.6.87.dist-info/RECORD,,