cognite-toolkit 0.7.55__py3-none-any.whl → 0.7.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. cognite_toolkit/_cdf_tk/apps/_auth_app.py +2 -2
  2. cognite_toolkit/_cdf_tk/apps/_core_app.py +4 -4
  3. cognite_toolkit/_cdf_tk/apps/_dev_app.py +10 -1
  4. cognite_toolkit/_cdf_tk/apps/_download_app.py +13 -12
  5. cognite_toolkit/_cdf_tk/apps/_dump_app.py +13 -13
  6. cognite_toolkit/_cdf_tk/apps/_landing_app.py +10 -1
  7. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +13 -13
  8. cognite_toolkit/_cdf_tk/apps/_modules_app.py +29 -5
  9. cognite_toolkit/_cdf_tk/apps/_profile_app.py +4 -4
  10. cognite_toolkit/_cdf_tk/apps/_purge.py +4 -5
  11. cognite_toolkit/_cdf_tk/apps/_repo_app.py +9 -2
  12. cognite_toolkit/_cdf_tk/apps/_run.py +5 -4
  13. cognite_toolkit/_cdf_tk/apps/_upload_app.py +2 -2
  14. cognite_toolkit/_cdf_tk/client/api/agents.py +2 -4
  15. cognite_toolkit/_cdf_tk/client/api/annotations.py +2 -2
  16. cognite_toolkit/_cdf_tk/client/api/assets.py +3 -5
  17. cognite_toolkit/_cdf_tk/client/api/containers.py +2 -2
  18. cognite_toolkit/_cdf_tk/client/api/data_models.py +2 -2
  19. cognite_toolkit/_cdf_tk/client/api/datasets.py +3 -3
  20. cognite_toolkit/_cdf_tk/client/api/events.py +3 -5
  21. cognite_toolkit/_cdf_tk/client/api/extraction_pipelines.py +3 -3
  22. cognite_toolkit/_cdf_tk/client/api/filemetadata.py +6 -6
  23. cognite_toolkit/_cdf_tk/client/api/function_schedules.py +2 -2
  24. cognite_toolkit/_cdf_tk/client/api/functions.py +2 -2
  25. cognite_toolkit/_cdf_tk/client/api/graphql_data_models.py +5 -5
  26. cognite_toolkit/_cdf_tk/client/api/groups.py +5 -7
  27. cognite_toolkit/_cdf_tk/client/api/hosted_extractor_destinations.py +3 -3
  28. cognite_toolkit/_cdf_tk/client/api/hosted_extractor_jobs.py +3 -3
  29. cognite_toolkit/_cdf_tk/client/api/hosted_extractor_mappings.py +3 -3
  30. cognite_toolkit/_cdf_tk/client/api/hosted_extractor_sources.py +4 -4
  31. cognite_toolkit/_cdf_tk/client/api/infield.py +8 -8
  32. cognite_toolkit/_cdf_tk/client/api/instances.py +3 -3
  33. cognite_toolkit/_cdf_tk/client/api/labels.py +3 -5
  34. cognite_toolkit/_cdf_tk/client/api/legacy/extended_functions.py +3 -3
  35. cognite_toolkit/_cdf_tk/client/api/location_filters.py +8 -8
  36. cognite_toolkit/_cdf_tk/client/api/project.py +14 -2
  37. cognite_toolkit/_cdf_tk/client/api/raw.py +5 -5
  38. cognite_toolkit/_cdf_tk/client/api/relationships.py +2 -2
  39. cognite_toolkit/_cdf_tk/client/api/robotics_capabilities.py +3 -3
  40. cognite_toolkit/_cdf_tk/client/api/robotics_data_postprocessing.py +3 -3
  41. cognite_toolkit/_cdf_tk/client/api/robotics_frames.py +3 -3
  42. cognite_toolkit/_cdf_tk/client/api/robotics_locations.py +3 -3
  43. cognite_toolkit/_cdf_tk/client/api/robotics_maps.py +3 -3
  44. cognite_toolkit/_cdf_tk/client/api/robotics_robots.py +3 -5
  45. cognite_toolkit/_cdf_tk/client/api/search_config.py +5 -5
  46. cognite_toolkit/_cdf_tk/client/api/security_categories.py +3 -3
  47. cognite_toolkit/_cdf_tk/client/api/sequences.py +3 -3
  48. cognite_toolkit/_cdf_tk/client/api/simulator_models.py +3 -3
  49. cognite_toolkit/_cdf_tk/client/api/spaces.py +2 -4
  50. cognite_toolkit/_cdf_tk/client/api/streams.py +6 -6
  51. cognite_toolkit/_cdf_tk/client/api/three_d.py +5 -5
  52. cognite_toolkit/_cdf_tk/client/api/timeseries.py +3 -3
  53. cognite_toolkit/_cdf_tk/client/api/transformations.py +3 -3
  54. cognite_toolkit/_cdf_tk/client/api/views.py +2 -4
  55. cognite_toolkit/_cdf_tk/client/api/workflow_triggers.py +3 -3
  56. cognite_toolkit/_cdf_tk/client/api/workflow_versions.py +3 -3
  57. cognite_toolkit/_cdf_tk/client/api/workflows.py +3 -3
  58. cognite_toolkit/_cdf_tk/client/cdf_client/api.py +11 -11
  59. cognite_toolkit/_cdf_tk/client/http_client/__init__.py +13 -51
  60. cognite_toolkit/_cdf_tk/client/http_client/_client.py +48 -209
  61. cognite_toolkit/_cdf_tk/client/http_client/_data_classes.py +106 -383
  62. cognite_toolkit/_cdf_tk/client/http_client/_item_classes.py +16 -16
  63. cognite_toolkit/_cdf_tk/client/resource_classes/filemetadata.py +7 -1
  64. cognite_toolkit/_cdf_tk/client/resource_classes/project.py +30 -0
  65. cognite_toolkit/_cdf_tk/commands/_base.py +18 -1
  66. cognite_toolkit/_cdf_tk/commands/_import_cmd.py +3 -1
  67. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +8 -8
  68. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +25 -24
  69. cognite_toolkit/_cdf_tk/commands/_profile.py +10 -5
  70. cognite_toolkit/_cdf_tk/commands/_purge.py +30 -35
  71. cognite_toolkit/_cdf_tk/commands/_upload.py +4 -6
  72. cognite_toolkit/_cdf_tk/commands/build_cmd.py +2 -1
  73. cognite_toolkit/_cdf_tk/commands/build_v2/build_cmd.py +8 -2
  74. cognite_toolkit/_cdf_tk/commands/deploy.py +8 -2
  75. cognite_toolkit/_cdf_tk/commands/init.py +9 -2
  76. cognite_toolkit/_cdf_tk/commands/modules.py +3 -1
  77. cognite_toolkit/_cdf_tk/commands/pull.py +8 -2
  78. cognite_toolkit/_cdf_tk/commands/repo.py +3 -1
  79. cognite_toolkit/_cdf_tk/commands/resources.py +0 -3
  80. cognite_toolkit/_cdf_tk/data_classes/_tracking_info.py +1 -0
  81. cognite_toolkit/_cdf_tk/protocols.py +3 -1
  82. cognite_toolkit/_cdf_tk/storageio/_applications.py +9 -9
  83. cognite_toolkit/_cdf_tk/storageio/_base.py +15 -10
  84. cognite_toolkit/_cdf_tk/storageio/_datapoints.py +36 -24
  85. cognite_toolkit/_cdf_tk/storageio/_file_content.py +47 -43
  86. cognite_toolkit/_cdf_tk/storageio/_raw.py +5 -4
  87. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  88. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  89. cognite_toolkit/_resources/cdf.toml +1 -1
  90. cognite_toolkit/_version.py +1 -1
  91. {cognite_toolkit-0.7.55.dist-info → cognite_toolkit-0.7.57.dist-info}/METADATA +1 -1
  92. {cognite_toolkit-0.7.55.dist-info → cognite_toolkit-0.7.57.dist-info}/RECORD +94 -94
  93. cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +0 -151
  94. {cognite_toolkit-0.7.55.dist-info → cognite_toolkit-0.7.57.dist-info}/WHEEL +0 -0
  95. {cognite_toolkit-0.7.55.dist-info → cognite_toolkit-0.7.57.dist-info}/entry_points.txt +0 -0
@@ -7,11 +7,11 @@ from pydantic import JsonValue
7
7
  from cognite_toolkit._cdf_tk.client import ToolkitClient
8
8
  from cognite_toolkit._cdf_tk.client.http_client import (
9
9
  HTTPClient,
10
- HTTPMessage,
11
- HTTPResult2,
12
- RequestMessage2,
13
- SuccessResponse2,
10
+ HTTPResult,
11
+ RequestMessage,
12
+ SuccessResponse,
14
13
  )
14
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsResultList
15
15
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.canvas import (
16
16
  IndustrialCanvas,
17
17
  IndustrialCanvasApply,
@@ -189,9 +189,9 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
189
189
  data_chunk: Sequence[UploadItem[IndustrialCanvasApply]],
190
190
  http_client: HTTPClient,
191
191
  selector: CanvasSelector | None = None,
192
- ) -> Sequence[HTTPMessage]:
192
+ ) -> ItemsResultList:
193
193
  config = http_client.config
194
- results: list[HTTPMessage] = []
194
+ results = ItemsResultList()
195
195
  for item in data_chunk:
196
196
  instances = item.item.as_instances()
197
197
  upsert_items: list[dict[str, JsonValue]] = []
@@ -217,7 +217,7 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
217
217
  else:
218
218
  to_delete = []
219
219
 
220
- last_response: HTTPResult2 | None = None
220
+ last_response: HTTPResult | None = None
221
221
  for upsert_chunk, delete_chunk in zip_longest(
222
222
  chunker_sequence(upsert_items, 1000), chunker_sequence(to_delete, 1000), fillvalue=None
223
223
  ):
@@ -229,13 +229,13 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
229
229
  body_content["delete"] = delete_chunk
230
230
 
231
231
  response = http_client.request_single_retries(
232
- message=RequestMessage2(
232
+ message=RequestMessage(
233
233
  endpoint_url=config.create_api_url("/models/instances"),
234
234
  method="POST",
235
235
  body_content=body_content,
236
236
  )
237
237
  )
238
- if not isinstance(response, SuccessResponse2):
238
+ if not isinstance(response, SuccessResponse):
239
239
  results.append(response.as_item_response(item.source_id))
240
240
  last_response = response
241
241
  if last_response is not None:
@@ -1,10 +1,14 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from collections.abc import Iterable, Mapping, Sequence, Sized
3
3
  from dataclasses import dataclass
4
- from typing import ClassVar, Generic, Literal, TypeVar
4
+ from typing import Any, ClassVar, Generic, Literal, TypeVar
5
+
6
+ from pydantic import ConfigDict
5
7
 
6
8
  from cognite_toolkit._cdf_tk.client import ToolkitClient
7
- from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, HTTPMessage, ItemsRequest
9
+ from cognite_toolkit._cdf_tk.client._resource_base import RequestItem
10
+ from cognite_toolkit._cdf_tk.client.http_client import HTTPClient
11
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsRequest, ItemsResultList
8
12
  from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
9
13
  from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
10
14
  from cognite_toolkit._cdf_tk.utils.collection import chunker
@@ -36,8 +40,7 @@ class Page(Generic[T_ResourceResponse], Sized):
36
40
  return len(self.items)
37
41
 
38
42
 
39
- @dataclass
40
- class UploadItem(Generic[T_ResourceRequest]):
43
+ class UploadItem(RequestItem, Generic[T_ResourceRequest]):
41
44
  """An item to be uploaded to CDF, consisting of a source ID and the writable Cognite resource.
42
45
 
43
46
  Attributes:
@@ -45,14 +48,16 @@ class UploadItem(Generic[T_ResourceRequest]):
45
48
  item: The writable Cognite resource to be uploaded.
46
49
  """
47
50
 
51
+ model_config = ConfigDict(arbitrary_types_allowed=True)
52
+
48
53
  source_id: str
49
54
  item: T_ResourceRequest
50
55
 
51
- def as_id(self) -> str:
56
+ def __str__(self) -> str:
52
57
  return self.source_id
53
58
 
54
- def dump(self) -> JsonVal:
55
- return self.item.dump(camel_case=True)
59
+ def dump(self, camel_case: bool = True, exclude_extra: bool = False) -> dict[str, Any]:
60
+ return self.item.dump(camel_case=camel_case)
56
61
 
57
62
 
58
63
  class StorageIO(ABC, Generic[T_Selector, T_ResourceResponse]):
@@ -155,7 +160,7 @@ class UploadableStorageIO(
155
160
  data_chunk: Sequence[UploadItem[T_ResourceRequest]],
156
161
  http_client: HTTPClient,
157
162
  selector: T_Selector | None = None,
158
- ) -> Sequence[HTTPMessage]:
163
+ ) -> ItemsResultList:
159
164
  """Upload a chunk of data to the storage using a custom HTTP client.
160
165
  This ensures that even if one item in the chunk fails, the rest will still be uploaded.
161
166
 
@@ -179,11 +184,11 @@ class UploadableStorageIO(
179
184
  else:
180
185
  raise ToolkitNotImplementedError(f"Unsupported UPLOAD_ENDPOINT_TYPE {self.UPLOAD_ENDPOINT_TYPE!r}.")
181
186
 
182
- return http_client.request_with_retries(
187
+ return http_client.request_items_retries(
183
188
  message=ItemsRequest(
184
189
  endpoint_url=url,
185
190
  method=self.UPLOAD_ENDPOINT_METHOD,
186
- items=list(data_chunk),
191
+ items=data_chunk,
187
192
  extra_body_fields=dict(self.UPLOAD_EXTRA_ARGS or {}),
188
193
  )
189
194
  )
@@ -13,15 +13,16 @@ from cognite.client._proto.data_points_pb2 import (
13
13
  from cognite.client.data_classes import TimeSeriesFilter
14
14
  from cognite.client.data_classes.filters import Exists
15
15
  from cognite.client.data_classes.time_series import TimeSeriesProperty
16
+ from pydantic import ConfigDict
16
17
 
17
18
  from cognite_toolkit._cdf_tk.client import ToolkitClient
19
+ from cognite_toolkit._cdf_tk.client._resource_base import Identifier, RequestResource
18
20
  from cognite_toolkit._cdf_tk.client.http_client import (
19
- DataBodyRequest,
20
21
  HTTPClient,
21
- HTTPMessage,
22
- SimpleBodyRequest,
22
+ RequestMessage,
23
23
  SuccessResponse,
24
24
  )
25
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsResultList
25
26
  from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
26
27
  from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
27
28
  from cognite_toolkit._cdf_tk.utils import humanize_collection
@@ -39,9 +40,22 @@ from ._base import Page, TableStorageIO, TableUploadableStorageIO, UploadItem
39
40
  from .selectors import DataPointsDataSetSelector, DataPointsFileSelector, DataPointsSelector
40
41
 
41
42
 
43
+ class DatapointsRequestAdapter(RequestResource):
44
+ model_config = ConfigDict(arbitrary_types_allowed=True)
45
+ datapoints: DataPointInsertionRequest
46
+
47
+ def dump(self, camel_case: bool = True, exclude_extra: bool = False) -> dict[str, Any]:
48
+ return {"datapoints": self.datapoints.SerializeToString()}
49
+
50
+ def as_id(self) -> Identifier:
51
+ raise NotImplementedError(
52
+ "DatapointsRequestAdapter does not have an identifier. - it wraps multiple timeseries"
53
+ )
54
+
55
+
42
56
  class DatapointsIO(
43
57
  TableStorageIO[DataPointsSelector, DataPointListResponse],
44
- TableUploadableStorageIO[DataPointsSelector, DataPointListResponse, DataPointInsertionRequest],
58
+ TableUploadableStorageIO[DataPointsSelector, DataPointListResponse, DatapointsRequestAdapter],
45
59
  ):
46
60
  SUPPORTED_DOWNLOAD_FORMATS = frozenset({".csv"})
47
61
  SUPPORTED_COMPRESSIONS = frozenset({".gz"})
@@ -117,8 +131,8 @@ class DatapointsIO(
117
131
  }
118
132
  for ts in timeseries
119
133
  ]
120
- responses = self.client.http_client.request_with_retries(
121
- SimpleBodyRequest(
134
+ response = self.client.http_client.request_single_retries(
135
+ RequestMessage(
122
136
  endpoint_url=config.create_api_url("/timeseries/data/list"),
123
137
  method="POST",
124
138
  accept="application/protobuf",
@@ -126,10 +140,9 @@ class DatapointsIO(
126
140
  body_content={"items": items}, # type: ignore[dict-item]
127
141
  )
128
142
  )
129
- first_success = next((resp for resp in responses if isinstance(resp, SuccessResponse)), None)
130
- if first_success is None:
143
+ if not isinstance(response, SuccessResponse):
131
144
  continue
132
- aggregate_response: DataPointListResponse = DataPointListResponse.FromString(first_success.content)
145
+ aggregate_response: DataPointListResponse = DataPointListResponse.FromString(response.content)
133
146
  timeseries_ids_with_data: dict[int, int] = {}
134
147
  for dp in aggregate_response.items:
135
148
  if dp.aggregateDatapoints.datapoints:
@@ -176,8 +189,8 @@ class DatapointsIO(
176
189
  yield page
177
190
 
178
191
  def _fetch_datapoints_batch(self, batch: list[dict[str, Any]], config: Any) -> Page[DataPointListResponse] | None:
179
- responses = self.client.http_client.request_with_retries(
180
- SimpleBodyRequest(
192
+ response = self.client.http_client.request_single_retries(
193
+ RequestMessage(
181
194
  endpoint_url=config.create_api_url("/timeseries/data/list"),
182
195
  method="POST",
183
196
  accept="application/protobuf",
@@ -185,10 +198,9 @@ class DatapointsIO(
185
198
  body_content={"items": batch}, # type: ignore[dict-item]
186
199
  )
187
200
  )
188
- first_success = next((resp for resp in responses if isinstance(resp, SuccessResponse)), None)
189
- if first_success is None:
201
+ if not isinstance(response, SuccessResponse):
190
202
  return None
191
- data_response: DataPointListResponse = DataPointListResponse.FromString(first_success.content)
203
+ data_response: DataPointListResponse = DataPointListResponse.FromString(response.content)
192
204
  return Page("Main", [data_response])
193
205
 
194
206
  def count(self, selector: DataPointsSelector) -> int | None:
@@ -238,26 +250,26 @@ class DatapointsIO(
238
250
 
239
251
  def upload_items(
240
252
  self,
241
- data_chunk: Sequence[UploadItem[DataPointInsertionRequest]],
253
+ data_chunk: Sequence[UploadItem[DatapointsRequestAdapter]],
242
254
  http_client: HTTPClient,
243
255
  selector: DataPointsSelector | None = None,
244
- ) -> Sequence[HTTPMessage]:
245
- results: list[HTTPMessage] = []
256
+ ) -> ItemsResultList:
257
+ results = ItemsResultList()
246
258
  for item in data_chunk:
247
- response = http_client.request_with_retries(
248
- DataBodyRequest(
259
+ response = http_client.request_single_retries(
260
+ RequestMessage(
249
261
  endpoint_url=http_client.config.create_api_url(self.UPLOAD_ENDPOINT),
250
262
  method="POST",
251
263
  content_type="application/protobuf",
252
- data_content=item.item.SerializeToString(),
264
+ data_content=item.item.datapoints.SerializeToString(),
253
265
  )
254
266
  )
255
- results.extend(response)
267
+ results.append(response.as_item_response(item.source_id))
256
268
  return results
257
269
 
258
270
  def row_to_resource(
259
271
  self, source_id: str, row: dict[str, JsonVal], selector: DataPointsSelector | None = None
260
- ) -> DataPointInsertionRequest:
272
+ ) -> DatapointsRequestAdapter:
261
273
  if selector is None:
262
274
  raise ValueError("Selector must be provided to convert row to DataPointInsertionItem.")
263
275
  # We assume that the row was read using the read_chunks method.
@@ -270,7 +282,7 @@ class DatapointsIO(
270
282
  raise RuntimeError(
271
283
  f"Unsupported selector type {type(selector).__name__} for {type(self).__name__}. Trying to transform {source_id!r} from rows to DataPointInsertionRequest."
272
284
  )
273
- return DataPointInsertionRequest(items=datapoints_items)
285
+ return DatapointsRequestAdapter(datapoints=DataPointInsertionRequest(items=datapoints_items))
274
286
 
275
287
  def _rows_to_datapoint_items_file_selector(
276
288
  self, rows: dict[str, list[Any]], selector: DataPointsFileSelector, source_id: str
@@ -406,7 +418,7 @@ class DatapointsIO(
406
418
  ).print_warning(console=self.client.console)
407
419
  self._warned_columns.add(column)
408
420
 
409
- def json_to_resource(self, item_json: dict[str, JsonVal]) -> DataPointInsertionRequest:
421
+ def json_to_resource(self, item_json: dict[str, JsonVal]) -> DatapointsRequestAdapter:
410
422
  raise ToolkitNotImplementedError(
411
423
  f"Upload of {type(DatapointsIO).__name__.removesuffix('IO')} does not support json format."
412
424
  )
@@ -1,9 +1,9 @@
1
1
  import json
2
2
  import mimetypes
3
- from collections.abc import Iterable, MutableSequence, Sequence
3
+ from collections.abc import Iterable, Sequence
4
4
  from dataclasses import dataclass
5
5
  from pathlib import Path
6
- from typing import cast
6
+ from typing import Any, cast
7
7
 
8
8
  import httpx
9
9
  from cognite.client.data_classes.data_modeling import ViewId
@@ -11,15 +11,13 @@ from cognite.client.data_classes.data_modeling import ViewId
11
11
  from cognite_toolkit._cdf_tk.client import ToolkitClient
12
12
  from cognite_toolkit._cdf_tk.client.http_client import (
13
13
  ErrorDetails,
14
- FailedResponse2,
15
- FailedResponseItems,
14
+ FailedResponse,
16
15
  HTTPClient,
17
- HTTPMessage,
18
- HTTPResult2,
19
- RequestMessage2,
20
- SimpleBodyRequest,
21
- SuccessResponse2,
16
+ HTTPResult,
17
+ RequestMessage,
18
+ SuccessResponse,
22
19
  )
20
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsFailedResponse, ItemsResultList
23
21
  from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
24
22
  from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataRequest, FileMetadataResponse
25
23
  from cognite_toolkit._cdf_tk.cruds import FileMetadataCRUD
@@ -46,13 +44,12 @@ from .selectors._file_content import NodeId as SelectorNodeId
46
44
  COGNITE_FILE_VIEW = ViewId("cdf_cdm", "CogniteFile", "v1")
47
45
 
48
46
 
49
- @dataclass
50
47
  class UploadFileContentItem(UploadItem[FileMetadataRequest]):
51
48
  file_path: Path
52
49
  mime_type: str
53
50
 
54
- def dump(self) -> JsonVal:
55
- return self.item.dump(camel_case=True, exclude_extra=True)
51
+ def dump(self, camel_case: bool = True, exclude_extra: bool = True) -> dict[str, Any]:
52
+ return self.item.dump(camel_case=camel_case, exclude_extra=exclude_extra)
56
53
 
57
54
 
58
55
  @dataclass
@@ -123,8 +120,8 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
123
120
 
124
121
  def _retrieve_metadata(self, identifiers: Sequence[FileIdentifier]) -> Sequence[FileMetadataResponse] | None:
125
122
  config = self.client.config
126
- responses = self.client.http_client.request_with_retries(
127
- message=SimpleBodyRequest(
123
+ response = self.client.http_client.request_single_retries(
124
+ message=RequestMessage(
128
125
  endpoint_url=config.create_api_url("/files/byids"),
129
126
  method="POST",
130
127
  body_content={
@@ -136,9 +133,13 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
136
133
  },
137
134
  )
138
135
  )
139
- if responses.has_failed:
136
+ if not isinstance(response, SuccessResponse):
137
+ return None
138
+ try:
139
+ body = response.body_json
140
+ except ValueError:
140
141
  return None
141
- body = responses.get_first_body()
142
+
142
143
  items_data = body.get("items", [])
143
144
  if not isinstance(items_data, list):
144
145
  return None
@@ -182,20 +183,24 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
182
183
 
183
184
  def _retrieve_download_url(self, identifier: FileIdentifier) -> str | None:
184
185
  config = self.client.config
185
- responses = self.client.http_client.request_with_retries(
186
- message=SimpleBodyRequest(
186
+ response = self.client.http_client.request_single_retries(
187
+ message=RequestMessage(
187
188
  endpoint_url=config.create_api_url("/files/downloadlink"),
188
189
  method="POST",
189
190
  body_content={"items": [identifier.model_dump(mode="json", by_alias=True, exclude={"id_type"})]},
190
191
  )
191
192
  )
193
+ if not isinstance(response, SuccessResponse):
194
+ return None
195
+
192
196
  try:
193
- body = responses.get_first_body()
197
+ body = response.body_json
194
198
  except ValueError:
195
199
  return None
200
+
196
201
  if "items" in body and isinstance(body["items"], list) and len(body["items"]) > 0:
197
202
  # The API responses is not following the API docs, this is a workaround
198
- body = body["items"][0] # type: ignore[assignment]
203
+ body = body["items"][0]
199
204
  try:
200
205
  return cast(str, body["downloadUrl"])
201
206
  except (KeyError, IndexError):
@@ -257,8 +262,8 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
257
262
  data_chunk: Sequence[UploadItem[FileMetadataRequest]],
258
263
  http_client: HTTPClient,
259
264
  selector: FileContentSelector | None = None,
260
- ) -> Sequence[HTTPMessage]:
261
- results: MutableSequence[HTTPMessage] = []
265
+ ) -> ItemsResultList:
266
+ results = ItemsResultList()
262
267
  if isinstance(selector, FileMetadataTemplateSelector | FileIdentifierSelector):
263
268
  upload_url_getter = self._upload_url_asset_centric
264
269
  elif isinstance(selector, FileDataModelingTemplateSelector):
@@ -276,7 +281,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
276
281
 
277
282
  content_bytes = item.file_path.read_bytes()
278
283
  upload_response = http_client.request_single_retries(
279
- RequestMessage2(
284
+ RequestMessage(
280
285
  endpoint_url=upload_url,
281
286
  method="PUT",
282
287
  content_type=item.mime_type,
@@ -284,18 +289,17 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
284
289
  content_length=len(content_bytes),
285
290
  )
286
291
  )
287
- results.append(upload_response.as_item_response(item.as_id()))
292
+ results.append(upload_response.as_item_response(str(item)))
288
293
  return results
289
294
 
290
295
  def _upload_url_asset_centric(
291
- self, item: UploadFileContentItem, http_client: HTTPClient, results: MutableSequence[HTTPMessage]
296
+ self, item: UploadFileContentItem, http_client: HTTPClient, results: ItemsResultList
292
297
  ) -> str | None:
293
298
  response = http_client.request_single_retries(
294
- message=RequestMessage2(
299
+ message=RequestMessage(
295
300
  endpoint_url=http_client.config.create_api_url(self.UPLOAD_ENDPOINT),
296
301
  method="POST",
297
- # MyPy does not understand that .dump is valid json
298
- body_content=item.dump(), # type: ignore[arg-type]
302
+ body_content=item.dump(),
299
303
  )
300
304
  )
301
305
  return self._parse_upload_link_response(response, item, results)
@@ -304,7 +308,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
304
308
  self,
305
309
  item: UploadFileContentItem,
306
310
  http_client: HTTPClient,
307
- results: MutableSequence[HTTPMessage],
311
+ results: ItemsResultList,
308
312
  created_node: bool = False,
309
313
  ) -> str | None:
310
314
  """Get upload URL for data modeling file upload.
@@ -326,14 +330,14 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
326
330
  # We know that instance_id is always set for data modeling uploads
327
331
  instance_id = cast(NodeReference, item.item.instance_id)
328
332
  response = http_client.request_single_retries(
329
- message=RequestMessage2(
333
+ message=RequestMessage(
330
334
  endpoint_url=http_client.config.create_api_url("/files/uploadlink"),
331
335
  method="POST",
332
336
  body_content={"items": [{"instanceId": instance_id.dump()}]},
333
337
  )
334
338
  )
335
- if isinstance(response, FailedResponse2) and response.error.missing and not created_node:
336
- if self._create_cognite_file_node(instance_id, http_client, item.as_id(), results):
339
+ if isinstance(response, FailedResponse) and response.error.missing and not created_node:
340
+ if self._create_cognite_file_node(instance_id, http_client, item.source_id, results):
337
341
  return self._upload_url_data_modeling(item, http_client, results, created_node=True)
338
342
  else:
339
343
  return None
@@ -342,10 +346,10 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
342
346
 
343
347
  @classmethod
344
348
  def _create_cognite_file_node(
345
- cls, instance_id: NodeReference, http_client: HTTPClient, upload_id: str, results: MutableSequence[HTTPMessage]
349
+ cls, instance_id: NodeReference, http_client: HTTPClient, upload_id: str, results: ItemsResultList
346
350
  ) -> bool:
347
351
  node_creation = http_client.request_single_retries(
348
- message=RequestMessage2(
352
+ message=RequestMessage(
349
353
  endpoint_url=http_client.config.create_api_url("/models/instances"),
350
354
  method="POST",
351
355
  body_content={
@@ -365,28 +369,28 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
365
369
  },
366
370
  )
367
371
  )
368
- if isinstance(node_creation, SuccessResponse2):
372
+ if isinstance(node_creation, SuccessResponse):
369
373
  # Node created successfully
370
374
  return True
371
- results.append(node_creation.as_item_response(instance_id))
375
+ results.append(node_creation.as_item_response(upload_id))
372
376
  return False
373
377
 
374
378
  @classmethod
375
379
  def _parse_upload_link_response(
376
- cls, response: HTTPResult2, item: UploadFileContentItem, results: MutableSequence[HTTPMessage]
380
+ cls, response: HTTPResult, item: UploadFileContentItem, results: ItemsResultList
377
381
  ) -> str | None:
378
- if not isinstance(response, SuccessResponse2):
379
- results.append(response.as_item_response(item.as_id()))
382
+ if not isinstance(response, SuccessResponse):
383
+ results.append(response.as_item_response(item.source_id))
380
384
  return None
381
385
  try:
382
386
  body = response.body_json
383
387
  except ValueError:
384
388
  results.append(
385
- FailedResponseItems(
389
+ ItemsFailedResponse(
386
390
  status_code=response.status_code,
387
391
  body=response.body,
388
392
  error=ErrorDetails(code=response.status_code, message="Invalid JSON response"),
389
- ids=[item.as_id()],
393
+ ids=[item.source_id],
390
394
  )
391
395
  )
392
396
  return None
@@ -396,11 +400,11 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
396
400
  upload_url = cast(str, body["uploadUrl"])
397
401
  except (KeyError, IndexError):
398
402
  results.append(
399
- FailedResponseItems(
403
+ ItemsFailedResponse(
400
404
  status_code=200,
401
405
  body=json.dumps(body),
402
406
  error=ErrorDetails(code=200, message="Malformed response"),
403
- ids=[item.as_id()],
407
+ ids=[item.source_id],
404
408
  )
405
409
  )
406
410
  return None
@@ -4,7 +4,8 @@ from uuid import uuid4
4
4
 
5
5
  from cognite.client.data_classes import Row, RowWrite
6
6
 
7
- from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, HTTPMessage, ItemsRequest
7
+ from cognite_toolkit._cdf_tk.client.http_client import HTTPClient
8
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsRequest, ItemsResultList
8
9
  from cognite_toolkit._cdf_tk.cruds import RawDatabaseCRUD, RawTableCRUD
9
10
  from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
10
11
  from cognite_toolkit._cdf_tk.utils import sanitize_filename
@@ -60,16 +61,16 @@ class RawIO(
60
61
  data_chunk: Sequence[UploadItem[RowWrite]],
61
62
  http_client: HTTPClient,
62
63
  selector: RawTableSelector | None = None,
63
- ) -> Sequence[HTTPMessage]:
64
+ ) -> ItemsResultList:
64
65
  if selector is None:
65
66
  raise ToolkitValueError("Selector must be provided for RawIO upload_items")
66
67
  url = self.UPLOAD_ENDPOINT.format(dbName=selector.table.db_name, tableName=selector.table.table_name)
67
68
  config = http_client.config
68
- return http_client.request_with_retries(
69
+ return http_client.request_items_retries(
69
70
  message=ItemsRequest(
70
71
  endpoint_url=config.create_api_url(url),
71
72
  method="POST",
72
- items=list(data_chunk),
73
+ items=data_chunk,
73
74
  )
74
75
  )
75
76
 
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.7.55
15
+ image: cognite/toolkit:0.7.57
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.7.55
13
+ image: cognite/toolkit:0.7.57
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
4
4
  [modules]
5
5
  # This is the version of the modules. It should not be changed manually.
6
6
  # It will be updated by the 'cdf modules upgrade' command.
7
- version = "0.7.55"
7
+ version = "0.7.57"
8
8
 
9
9
 
10
10
  [plugins]
@@ -1 +1 @@
1
- __version__ = "0.7.55"
1
+ __version__ = "0.7.57"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.7.55
3
+ Version: 0.7.57
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Author: Cognite AS
6
6
  Author-email: Cognite AS <support@cognite.com>