cognite-toolkit 0.7.54__py3-none-any.whl → 0.7.56__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf_tk/apps/_download_app.py +19 -42
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py +28 -36
- cognite_toolkit/_cdf_tk/apps/_purge.py +14 -15
- cognite_toolkit/_cdf_tk/apps/_upload_app.py +3 -9
- cognite_toolkit/_cdf_tk/client/http_client/__init__.py +0 -38
- cognite_toolkit/_cdf_tk/client/http_client/_client.py +4 -161
- cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +18 -18
- cognite_toolkit/_cdf_tk/client/resource_classes/filemetadata.py +7 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/command.py +8 -8
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +26 -25
- cognite_toolkit/_cdf_tk/commands/_profile.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_purge.py +20 -21
- cognite_toolkit/_cdf_tk/commands/_upload.py +4 -6
- cognite_toolkit/_cdf_tk/commands/auth.py +12 -15
- cognite_toolkit/_cdf_tk/commands/clean.py +2 -1
- cognite_toolkit/_cdf_tk/commands/dump_resource.py +30 -19
- cognite_toolkit/_cdf_tk/commands/init.py +3 -3
- cognite_toolkit/_cdf_tk/commands/modules.py +17 -10
- cognite_toolkit/_cdf_tk/commands/pull.py +2 -2
- cognite_toolkit/_cdf_tk/commands/repo.py +1 -1
- cognite_toolkit/_cdf_tk/commands/resources.py +8 -5
- cognite_toolkit/_cdf_tk/commands/run.py +8 -7
- cognite_toolkit/_cdf_tk/protocols.py +3 -1
- cognite_toolkit/_cdf_tk/storageio/_applications.py +3 -3
- cognite_toolkit/_cdf_tk/storageio/_base.py +16 -11
- cognite_toolkit/_cdf_tk/storageio/_datapoints.py +37 -25
- cognite_toolkit/_cdf_tk/storageio/_file_content.py +39 -35
- cognite_toolkit/_cdf_tk/storageio/_raw.py +6 -5
- cognite_toolkit/_cdf_tk/utils/auth.py +7 -7
- cognite_toolkit/_cdf_tk/utils/interactive_select.py +49 -49
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/RECORD +38 -39
- cognite_toolkit/_cdf_tk/client/http_client/_data_classes.py +0 -428
- {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/entry_points.txt +0 -0
|
@@ -13,15 +13,16 @@ from cognite.client._proto.data_points_pb2 import (
|
|
|
13
13
|
from cognite.client.data_classes import TimeSeriesFilter
|
|
14
14
|
from cognite.client.data_classes.filters import Exists
|
|
15
15
|
from cognite.client.data_classes.time_series import TimeSeriesProperty
|
|
16
|
+
from pydantic import ConfigDict
|
|
16
17
|
|
|
17
18
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
19
|
+
from cognite_toolkit._cdf_tk.client._resource_base import Identifier, RequestResource
|
|
18
20
|
from cognite_toolkit._cdf_tk.client.http_client import (
|
|
19
|
-
DataBodyRequest,
|
|
20
21
|
HTTPClient,
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
SuccessResponse,
|
|
22
|
+
RequestMessage2,
|
|
23
|
+
SuccessResponse2,
|
|
24
24
|
)
|
|
25
|
+
from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsResultList
|
|
25
26
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
|
|
26
27
|
from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
|
|
27
28
|
from cognite_toolkit._cdf_tk.utils import humanize_collection
|
|
@@ -39,9 +40,22 @@ from ._base import Page, TableStorageIO, TableUploadableStorageIO, UploadItem
|
|
|
39
40
|
from .selectors import DataPointsDataSetSelector, DataPointsFileSelector, DataPointsSelector
|
|
40
41
|
|
|
41
42
|
|
|
43
|
+
class DatapointsRequestAdapter(RequestResource):
|
|
44
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
45
|
+
datapoints: DataPointInsertionRequest
|
|
46
|
+
|
|
47
|
+
def dump(self, camel_case: bool = True, exclude_extra: bool = False) -> dict[str, Any]:
|
|
48
|
+
return {"datapoints": self.datapoints.SerializeToString()}
|
|
49
|
+
|
|
50
|
+
def as_id(self) -> Identifier:
|
|
51
|
+
raise NotImplementedError(
|
|
52
|
+
"DatapointsRequestAdapter does not have an identifier. - it wraps multiple timeseries"
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
42
56
|
class DatapointsIO(
|
|
43
57
|
TableStorageIO[DataPointsSelector, DataPointListResponse],
|
|
44
|
-
TableUploadableStorageIO[DataPointsSelector, DataPointListResponse,
|
|
58
|
+
TableUploadableStorageIO[DataPointsSelector, DataPointListResponse, DatapointsRequestAdapter],
|
|
45
59
|
):
|
|
46
60
|
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".csv"})
|
|
47
61
|
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
@@ -117,8 +131,8 @@ class DatapointsIO(
|
|
|
117
131
|
}
|
|
118
132
|
for ts in timeseries
|
|
119
133
|
]
|
|
120
|
-
|
|
121
|
-
|
|
134
|
+
response = self.client.http_client.request_single_retries(
|
|
135
|
+
RequestMessage2(
|
|
122
136
|
endpoint_url=config.create_api_url("/timeseries/data/list"),
|
|
123
137
|
method="POST",
|
|
124
138
|
accept="application/protobuf",
|
|
@@ -126,10 +140,9 @@ class DatapointsIO(
|
|
|
126
140
|
body_content={"items": items}, # type: ignore[dict-item]
|
|
127
141
|
)
|
|
128
142
|
)
|
|
129
|
-
|
|
130
|
-
if first_success is None:
|
|
143
|
+
if not isinstance(response, SuccessResponse2):
|
|
131
144
|
continue
|
|
132
|
-
aggregate_response: DataPointListResponse = DataPointListResponse.FromString(
|
|
145
|
+
aggregate_response: DataPointListResponse = DataPointListResponse.FromString(response.content)
|
|
133
146
|
timeseries_ids_with_data: dict[int, int] = {}
|
|
134
147
|
for dp in aggregate_response.items:
|
|
135
148
|
if dp.aggregateDatapoints.datapoints:
|
|
@@ -176,8 +189,8 @@ class DatapointsIO(
|
|
|
176
189
|
yield page
|
|
177
190
|
|
|
178
191
|
def _fetch_datapoints_batch(self, batch: list[dict[str, Any]], config: Any) -> Page[DataPointListResponse] | None:
|
|
179
|
-
|
|
180
|
-
|
|
192
|
+
response = self.client.http_client.request_single_retries(
|
|
193
|
+
RequestMessage2(
|
|
181
194
|
endpoint_url=config.create_api_url("/timeseries/data/list"),
|
|
182
195
|
method="POST",
|
|
183
196
|
accept="application/protobuf",
|
|
@@ -185,10 +198,9 @@ class DatapointsIO(
|
|
|
185
198
|
body_content={"items": batch}, # type: ignore[dict-item]
|
|
186
199
|
)
|
|
187
200
|
)
|
|
188
|
-
|
|
189
|
-
if first_success is None:
|
|
201
|
+
if not isinstance(response, SuccessResponse2):
|
|
190
202
|
return None
|
|
191
|
-
data_response: DataPointListResponse = DataPointListResponse.FromString(
|
|
203
|
+
data_response: DataPointListResponse = DataPointListResponse.FromString(response.content)
|
|
192
204
|
return Page("Main", [data_response])
|
|
193
205
|
|
|
194
206
|
def count(self, selector: DataPointsSelector) -> int | None:
|
|
@@ -238,26 +250,26 @@ class DatapointsIO(
|
|
|
238
250
|
|
|
239
251
|
def upload_items(
|
|
240
252
|
self,
|
|
241
|
-
data_chunk: Sequence[UploadItem[
|
|
253
|
+
data_chunk: Sequence[UploadItem[DatapointsRequestAdapter]],
|
|
242
254
|
http_client: HTTPClient,
|
|
243
255
|
selector: DataPointsSelector | None = None,
|
|
244
|
-
) ->
|
|
245
|
-
results
|
|
256
|
+
) -> ItemsResultList:
|
|
257
|
+
results = ItemsResultList()
|
|
246
258
|
for item in data_chunk:
|
|
247
|
-
response = http_client.
|
|
248
|
-
|
|
259
|
+
response = http_client.request_single_retries(
|
|
260
|
+
RequestMessage2(
|
|
249
261
|
endpoint_url=http_client.config.create_api_url(self.UPLOAD_ENDPOINT),
|
|
250
262
|
method="POST",
|
|
251
263
|
content_type="application/protobuf",
|
|
252
|
-
data_content=item.item.SerializeToString(),
|
|
264
|
+
data_content=item.item.datapoints.SerializeToString(),
|
|
253
265
|
)
|
|
254
266
|
)
|
|
255
|
-
results.
|
|
267
|
+
results.append(response.as_item_response(item.source_id))
|
|
256
268
|
return results
|
|
257
269
|
|
|
258
270
|
def row_to_resource(
|
|
259
271
|
self, source_id: str, row: dict[str, JsonVal], selector: DataPointsSelector | None = None
|
|
260
|
-
) ->
|
|
272
|
+
) -> DatapointsRequestAdapter:
|
|
261
273
|
if selector is None:
|
|
262
274
|
raise ValueError("Selector must be provided to convert row to DataPointInsertionItem.")
|
|
263
275
|
# We assume that the row was read using the read_chunks method.
|
|
@@ -270,7 +282,7 @@ class DatapointsIO(
|
|
|
270
282
|
raise RuntimeError(
|
|
271
283
|
f"Unsupported selector type {type(selector).__name__} for {type(self).__name__}. Trying to transform {source_id!r} from rows to DataPointInsertionRequest."
|
|
272
284
|
)
|
|
273
|
-
return DataPointInsertionRequest(items=datapoints_items)
|
|
285
|
+
return DatapointsRequestAdapter(datapoints=DataPointInsertionRequest(items=datapoints_items))
|
|
274
286
|
|
|
275
287
|
def _rows_to_datapoint_items_file_selector(
|
|
276
288
|
self, rows: dict[str, list[Any]], selector: DataPointsFileSelector, source_id: str
|
|
@@ -406,7 +418,7 @@ class DatapointsIO(
|
|
|
406
418
|
).print_warning(console=self.client.console)
|
|
407
419
|
self._warned_columns.add(column)
|
|
408
420
|
|
|
409
|
-
def json_to_resource(self, item_json: dict[str, JsonVal]) ->
|
|
421
|
+
def json_to_resource(self, item_json: dict[str, JsonVal]) -> DatapointsRequestAdapter:
|
|
410
422
|
raise ToolkitNotImplementedError(
|
|
411
423
|
f"Upload of {type(DatapointsIO).__name__.removesuffix('IO')} does not support json format."
|
|
412
424
|
)
|
|
@@ -1,25 +1,23 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import mimetypes
|
|
3
|
-
from collections.abc import Iterable,
|
|
3
|
+
from collections.abc import Iterable, Sequence
|
|
4
4
|
from dataclasses import dataclass
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import cast
|
|
6
|
+
from typing import Any, cast
|
|
7
7
|
|
|
8
8
|
import httpx
|
|
9
9
|
from cognite.client.data_classes.data_modeling import ViewId
|
|
10
10
|
|
|
11
11
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
12
12
|
from cognite_toolkit._cdf_tk.client.http_client import (
|
|
13
|
-
|
|
13
|
+
ErrorDetails2,
|
|
14
14
|
FailedResponse2,
|
|
15
|
-
FailedResponseItems,
|
|
16
15
|
HTTPClient,
|
|
17
|
-
HTTPMessage,
|
|
18
16
|
HTTPResult2,
|
|
19
17
|
RequestMessage2,
|
|
20
|
-
SimpleBodyRequest,
|
|
21
18
|
SuccessResponse2,
|
|
22
19
|
)
|
|
20
|
+
from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsFailedResponse2, ItemsResultList
|
|
23
21
|
from cognite_toolkit._cdf_tk.client.resource_classes.data_modeling import NodeReference
|
|
24
22
|
from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataRequest, FileMetadataResponse
|
|
25
23
|
from cognite_toolkit._cdf_tk.cruds import FileMetadataCRUD
|
|
@@ -46,13 +44,12 @@ from .selectors._file_content import NodeId as SelectorNodeId
|
|
|
46
44
|
COGNITE_FILE_VIEW = ViewId("cdf_cdm", "CogniteFile", "v1")
|
|
47
45
|
|
|
48
46
|
|
|
49
|
-
@dataclass
|
|
50
47
|
class UploadFileContentItem(UploadItem[FileMetadataRequest]):
|
|
51
48
|
file_path: Path
|
|
52
49
|
mime_type: str
|
|
53
50
|
|
|
54
|
-
def dump(self) ->
|
|
55
|
-
return self.item.dump(camel_case=
|
|
51
|
+
def dump(self, camel_case: bool = True, exclude_extra: bool = True) -> dict[str, Any]:
|
|
52
|
+
return self.item.dump(camel_case=camel_case, exclude_extra=exclude_extra)
|
|
56
53
|
|
|
57
54
|
|
|
58
55
|
@dataclass
|
|
@@ -123,8 +120,8 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
123
120
|
|
|
124
121
|
def _retrieve_metadata(self, identifiers: Sequence[FileIdentifier]) -> Sequence[FileMetadataResponse] | None:
|
|
125
122
|
config = self.client.config
|
|
126
|
-
|
|
127
|
-
message=
|
|
123
|
+
response = self.client.http_client.request_single_retries(
|
|
124
|
+
message=RequestMessage2(
|
|
128
125
|
endpoint_url=config.create_api_url("/files/byids"),
|
|
129
126
|
method="POST",
|
|
130
127
|
body_content={
|
|
@@ -136,9 +133,13 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
136
133
|
},
|
|
137
134
|
)
|
|
138
135
|
)
|
|
139
|
-
if
|
|
136
|
+
if not isinstance(response, SuccessResponse2):
|
|
137
|
+
return None
|
|
138
|
+
try:
|
|
139
|
+
body = response.body_json
|
|
140
|
+
except ValueError:
|
|
140
141
|
return None
|
|
141
|
-
|
|
142
|
+
|
|
142
143
|
items_data = body.get("items", [])
|
|
143
144
|
if not isinstance(items_data, list):
|
|
144
145
|
return None
|
|
@@ -182,20 +183,24 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
182
183
|
|
|
183
184
|
def _retrieve_download_url(self, identifier: FileIdentifier) -> str | None:
|
|
184
185
|
config = self.client.config
|
|
185
|
-
|
|
186
|
-
message=
|
|
186
|
+
response = self.client.http_client.request_single_retries(
|
|
187
|
+
message=RequestMessage2(
|
|
187
188
|
endpoint_url=config.create_api_url("/files/downloadlink"),
|
|
188
189
|
method="POST",
|
|
189
190
|
body_content={"items": [identifier.model_dump(mode="json", by_alias=True, exclude={"id_type"})]},
|
|
190
191
|
)
|
|
191
192
|
)
|
|
193
|
+
if not isinstance(response, SuccessResponse2):
|
|
194
|
+
return None
|
|
195
|
+
|
|
192
196
|
try:
|
|
193
|
-
body =
|
|
197
|
+
body = response.body_json
|
|
194
198
|
except ValueError:
|
|
195
199
|
return None
|
|
200
|
+
|
|
196
201
|
if "items" in body and isinstance(body["items"], list) and len(body["items"]) > 0:
|
|
197
202
|
# The API responses is not following the API docs, this is a workaround
|
|
198
|
-
body = body["items"][0]
|
|
203
|
+
body = body["items"][0]
|
|
199
204
|
try:
|
|
200
205
|
return cast(str, body["downloadUrl"])
|
|
201
206
|
except (KeyError, IndexError):
|
|
@@ -257,8 +262,8 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
257
262
|
data_chunk: Sequence[UploadItem[FileMetadataRequest]],
|
|
258
263
|
http_client: HTTPClient,
|
|
259
264
|
selector: FileContentSelector | None = None,
|
|
260
|
-
) ->
|
|
261
|
-
results
|
|
265
|
+
) -> ItemsResultList:
|
|
266
|
+
results = ItemsResultList()
|
|
262
267
|
if isinstance(selector, FileMetadataTemplateSelector | FileIdentifierSelector):
|
|
263
268
|
upload_url_getter = self._upload_url_asset_centric
|
|
264
269
|
elif isinstance(selector, FileDataModelingTemplateSelector):
|
|
@@ -284,18 +289,17 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
284
289
|
content_length=len(content_bytes),
|
|
285
290
|
)
|
|
286
291
|
)
|
|
287
|
-
results.append(upload_response.as_item_response(item
|
|
292
|
+
results.append(upload_response.as_item_response(str(item)))
|
|
288
293
|
return results
|
|
289
294
|
|
|
290
295
|
def _upload_url_asset_centric(
|
|
291
|
-
self, item: UploadFileContentItem, http_client: HTTPClient, results:
|
|
296
|
+
self, item: UploadFileContentItem, http_client: HTTPClient, results: ItemsResultList
|
|
292
297
|
) -> str | None:
|
|
293
298
|
response = http_client.request_single_retries(
|
|
294
299
|
message=RequestMessage2(
|
|
295
300
|
endpoint_url=http_client.config.create_api_url(self.UPLOAD_ENDPOINT),
|
|
296
301
|
method="POST",
|
|
297
|
-
|
|
298
|
-
body_content=item.dump(), # type: ignore[arg-type]
|
|
302
|
+
body_content=item.dump(),
|
|
299
303
|
)
|
|
300
304
|
)
|
|
301
305
|
return self._parse_upload_link_response(response, item, results)
|
|
@@ -304,7 +308,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
304
308
|
self,
|
|
305
309
|
item: UploadFileContentItem,
|
|
306
310
|
http_client: HTTPClient,
|
|
307
|
-
results:
|
|
311
|
+
results: ItemsResultList,
|
|
308
312
|
created_node: bool = False,
|
|
309
313
|
) -> str | None:
|
|
310
314
|
"""Get upload URL for data modeling file upload.
|
|
@@ -333,7 +337,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
333
337
|
)
|
|
334
338
|
)
|
|
335
339
|
if isinstance(response, FailedResponse2) and response.error.missing and not created_node:
|
|
336
|
-
if self._create_cognite_file_node(instance_id, http_client, item.
|
|
340
|
+
if self._create_cognite_file_node(instance_id, http_client, item.source_id, results):
|
|
337
341
|
return self._upload_url_data_modeling(item, http_client, results, created_node=True)
|
|
338
342
|
else:
|
|
339
343
|
return None
|
|
@@ -342,7 +346,7 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
342
346
|
|
|
343
347
|
@classmethod
|
|
344
348
|
def _create_cognite_file_node(
|
|
345
|
-
cls, instance_id: NodeReference, http_client: HTTPClient, upload_id: str, results:
|
|
349
|
+
cls, instance_id: NodeReference, http_client: HTTPClient, upload_id: str, results: ItemsResultList
|
|
346
350
|
) -> bool:
|
|
347
351
|
node_creation = http_client.request_single_retries(
|
|
348
352
|
message=RequestMessage2(
|
|
@@ -368,25 +372,25 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
368
372
|
if isinstance(node_creation, SuccessResponse2):
|
|
369
373
|
# Node created successfully
|
|
370
374
|
return True
|
|
371
|
-
results.append(node_creation.as_item_response(
|
|
375
|
+
results.append(node_creation.as_item_response(upload_id))
|
|
372
376
|
return False
|
|
373
377
|
|
|
374
378
|
@classmethod
|
|
375
379
|
def _parse_upload_link_response(
|
|
376
|
-
cls, response: HTTPResult2, item: UploadFileContentItem, results:
|
|
380
|
+
cls, response: HTTPResult2, item: UploadFileContentItem, results: ItemsResultList
|
|
377
381
|
) -> str | None:
|
|
378
382
|
if not isinstance(response, SuccessResponse2):
|
|
379
|
-
results.append(response.as_item_response(item.
|
|
383
|
+
results.append(response.as_item_response(item.source_id))
|
|
380
384
|
return None
|
|
381
385
|
try:
|
|
382
386
|
body = response.body_json
|
|
383
387
|
except ValueError:
|
|
384
388
|
results.append(
|
|
385
|
-
|
|
389
|
+
ItemsFailedResponse2(
|
|
386
390
|
status_code=response.status_code,
|
|
387
391
|
body=response.body,
|
|
388
|
-
error=
|
|
389
|
-
ids=[item.
|
|
392
|
+
error=ErrorDetails2(code=response.status_code, message="Invalid JSON response"),
|
|
393
|
+
ids=[item.source_id],
|
|
390
394
|
)
|
|
391
395
|
)
|
|
392
396
|
return None
|
|
@@ -396,11 +400,11 @@ class FileContentIO(UploadableStorageIO[FileContentSelector, MetadataWithFilePat
|
|
|
396
400
|
upload_url = cast(str, body["uploadUrl"])
|
|
397
401
|
except (KeyError, IndexError):
|
|
398
402
|
results.append(
|
|
399
|
-
|
|
403
|
+
ItemsFailedResponse2(
|
|
400
404
|
status_code=200,
|
|
401
405
|
body=json.dumps(body),
|
|
402
|
-
error=
|
|
403
|
-
ids=[item.
|
|
406
|
+
error=ErrorDetails2(code=200, message="Malformed response"),
|
|
407
|
+
ids=[item.source_id],
|
|
404
408
|
)
|
|
405
409
|
)
|
|
406
410
|
return None
|
|
@@ -4,7 +4,8 @@ from uuid import uuid4
|
|
|
4
4
|
|
|
5
5
|
from cognite.client.data_classes import Row, RowWrite
|
|
6
6
|
|
|
7
|
-
from cognite_toolkit._cdf_tk.client.http_client import HTTPClient
|
|
7
|
+
from cognite_toolkit._cdf_tk.client.http_client import HTTPClient
|
|
8
|
+
from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsRequest2, ItemsResultList
|
|
8
9
|
from cognite_toolkit._cdf_tk.cruds import RawDatabaseCRUD, RawTableCRUD
|
|
9
10
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
|
|
10
11
|
from cognite_toolkit._cdf_tk.utils import sanitize_filename
|
|
@@ -60,16 +61,16 @@ class RawIO(
|
|
|
60
61
|
data_chunk: Sequence[UploadItem[RowWrite]],
|
|
61
62
|
http_client: HTTPClient,
|
|
62
63
|
selector: RawTableSelector | None = None,
|
|
63
|
-
) ->
|
|
64
|
+
) -> ItemsResultList:
|
|
64
65
|
if selector is None:
|
|
65
66
|
raise ToolkitValueError("Selector must be provided for RawIO upload_items")
|
|
66
67
|
url = self.UPLOAD_ENDPOINT.format(dbName=selector.table.db_name, tableName=selector.table.table_name)
|
|
67
68
|
config = http_client.config
|
|
68
|
-
return http_client.
|
|
69
|
-
message=
|
|
69
|
+
return http_client.request_items_retries(
|
|
70
|
+
message=ItemsRequest2(
|
|
70
71
|
endpoint_url=config.create_api_url(url),
|
|
71
72
|
method="POST",
|
|
72
|
-
items=
|
|
73
|
+
items=data_chunk,
|
|
73
74
|
)
|
|
74
75
|
)
|
|
75
76
|
|
|
@@ -450,7 +450,7 @@ def prompt_user_environment_variables(current: EnvironmentVariables | None = Non
|
|
|
450
450
|
if provider != "cdf"
|
|
451
451
|
],
|
|
452
452
|
default=current.PROVIDER if current else "entra_id",
|
|
453
|
-
).
|
|
453
|
+
).unsafe_ask()
|
|
454
454
|
exclude = set()
|
|
455
455
|
if provider == "cdf":
|
|
456
456
|
exclude = set(VALID_LOGIN_FLOWS) - {"client_credentials"}
|
|
@@ -467,10 +467,10 @@ def prompt_user_environment_variables(current: EnvironmentVariables | None = Non
|
|
|
467
467
|
"Choose the login flow (How do you going to authenticate?)",
|
|
468
468
|
choices=choices,
|
|
469
469
|
default=current.LOGIN_FLOW if current else "client_credentials",
|
|
470
|
-
).
|
|
470
|
+
).unsafe_ask()
|
|
471
471
|
|
|
472
|
-
cdf_cluster = questionary.text("Enter the CDF cluster", default=current.CDF_CLUSTER if current else "").
|
|
473
|
-
cdf_project = questionary.text("Enter the CDF project", default=current.CDF_PROJECT if current else "").
|
|
472
|
+
cdf_cluster = questionary.text("Enter the CDF cluster", default=current.CDF_CLUSTER if current else "").unsafe_ask()
|
|
473
|
+
cdf_project = questionary.text("Enter the CDF project", default=current.CDF_PROJECT if current else "").unsafe_ask()
|
|
474
474
|
args: dict[str, Any] = (
|
|
475
475
|
current.dump(include_os=False)
|
|
476
476
|
if current and _is_unchanged(current, provider, login_flow, cdf_project, cdf_cluster) # type: ignore[arg-type]
|
|
@@ -490,7 +490,7 @@ def prompt_user_environment_variables(current: EnvironmentVariables | None = Non
|
|
|
490
490
|
optional_values = env_vars.get_optional_with_value()
|
|
491
491
|
for field_, value in optional_values:
|
|
492
492
|
print(f" {field_.name}={value}")
|
|
493
|
-
if questionary.confirm("Do you want to change any of these variables?", default=False).
|
|
493
|
+
if questionary.confirm("Do you want to change any of these variables?", default=False).unsafe_ask():
|
|
494
494
|
for field_, value in optional_values:
|
|
495
495
|
user_value = get_user_value(field_, value, provider, cdf_cluster, cdf_project, idp_tenant_id)
|
|
496
496
|
setattr(env_vars, field_.name, user_value)
|
|
@@ -517,9 +517,9 @@ def get_user_value(
|
|
|
517
517
|
elif value is not None and not isinstance(value, str):
|
|
518
518
|
default = str(value)
|
|
519
519
|
if is_secret:
|
|
520
|
-
user_value = questionary.password(f"Enter the {display_name}:", default=default).
|
|
520
|
+
user_value = questionary.password(f"Enter the {display_name}:", default=default).unsafe_ask()
|
|
521
521
|
else:
|
|
522
|
-
user_value = questionary.text(f"Enter the {display_name}:", default=default).
|
|
522
|
+
user_value = questionary.text(f"Enter the {display_name}:", default=default).unsafe_ask()
|
|
523
523
|
if user_value is None:
|
|
524
524
|
raise typer.Exit(0)
|
|
525
525
|
if field_.type is int:
|