cognite-toolkit 0.7.54__py3-none-any.whl → 0.7.56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. cognite_toolkit/_cdf_tk/apps/_download_app.py +19 -42
  2. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +28 -36
  3. cognite_toolkit/_cdf_tk/apps/_purge.py +14 -15
  4. cognite_toolkit/_cdf_tk/apps/_upload_app.py +3 -9
  5. cognite_toolkit/_cdf_tk/client/http_client/__init__.py +0 -38
  6. cognite_toolkit/_cdf_tk/client/http_client/_client.py +4 -161
  7. cognite_toolkit/_cdf_tk/client/http_client/_data_classes2.py +18 -18
  8. cognite_toolkit/_cdf_tk/client/resource_classes/filemetadata.py +7 -1
  9. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +8 -8
  10. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +26 -25
  11. cognite_toolkit/_cdf_tk/commands/_profile.py +1 -1
  12. cognite_toolkit/_cdf_tk/commands/_purge.py +20 -21
  13. cognite_toolkit/_cdf_tk/commands/_upload.py +4 -6
  14. cognite_toolkit/_cdf_tk/commands/auth.py +12 -15
  15. cognite_toolkit/_cdf_tk/commands/clean.py +2 -1
  16. cognite_toolkit/_cdf_tk/commands/dump_resource.py +30 -19
  17. cognite_toolkit/_cdf_tk/commands/init.py +3 -3
  18. cognite_toolkit/_cdf_tk/commands/modules.py +17 -10
  19. cognite_toolkit/_cdf_tk/commands/pull.py +2 -2
  20. cognite_toolkit/_cdf_tk/commands/repo.py +1 -1
  21. cognite_toolkit/_cdf_tk/commands/resources.py +8 -5
  22. cognite_toolkit/_cdf_tk/commands/run.py +8 -7
  23. cognite_toolkit/_cdf_tk/protocols.py +3 -1
  24. cognite_toolkit/_cdf_tk/storageio/_applications.py +3 -3
  25. cognite_toolkit/_cdf_tk/storageio/_base.py +16 -11
  26. cognite_toolkit/_cdf_tk/storageio/_datapoints.py +37 -25
  27. cognite_toolkit/_cdf_tk/storageio/_file_content.py +39 -35
  28. cognite_toolkit/_cdf_tk/storageio/_raw.py +6 -5
  29. cognite_toolkit/_cdf_tk/utils/auth.py +7 -7
  30. cognite_toolkit/_cdf_tk/utils/interactive_select.py +49 -49
  31. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  32. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  33. cognite_toolkit/_resources/cdf.toml +1 -1
  34. cognite_toolkit/_version.py +1 -1
  35. {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/METADATA +1 -1
  36. {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/RECORD +38 -39
  37. cognite_toolkit/_cdf_tk/client/http_client/_data_classes.py +0 -428
  38. {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/WHEEL +0 -0
  39. {cognite_toolkit-0.7.54.dist-info → cognite_toolkit-0.7.56.dist-info}/entry_points.txt +0 -0
@@ -1,23 +1,17 @@
1
1
  import gzip
2
2
  from abc import ABC, abstractmethod
3
- from collections.abc import Hashable
4
3
  from typing import Any, Literal
5
4
 
6
5
  import httpx
7
6
  from cognite.client import global_config
8
- from pydantic import BaseModel, JsonValue, TypeAdapter, model_validator
9
-
10
- from cognite_toolkit._cdf_tk.client.http_client._data_classes import (
11
- ErrorDetails,
12
- FailedRequestItems,
13
- FailedRequestMessage,
14
- FailedResponseItems,
15
- ResponseMessage,
16
- SuccessResponseItems,
17
- )
7
+ from pydantic import TYPE_CHECKING, BaseModel, JsonValue, TypeAdapter, model_validator
8
+
18
9
  from cognite_toolkit._cdf_tk.client.http_client._exception import ToolkitAPIError
19
10
  from cognite_toolkit._cdf_tk.utils.useful_types import PrimitiveType
20
11
 
12
+ if TYPE_CHECKING:
13
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import ItemsResultMessage2
14
+
21
15
 
22
16
  class HTTPResult2(BaseModel):
23
17
  def get_success_or_raise(self) -> "SuccessResponse2":
@@ -35,18 +29,24 @@ class HTTPResult2(BaseModel):
35
29
  else:
36
30
  raise ToolkitAPIError("Unknown HTTPResult2 type")
37
31
 
38
- # Todo: Remove when HTTPResult2 is renamed to HTTPResponse and the old HTTPResponse is deleted
39
- def as_item_response(self, item_id: Hashable) -> ResponseMessage | FailedRequestMessage:
32
+ def as_item_response(self, item_id: str) -> "ItemsResultMessage2":
33
+ # Avoid circular import
34
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import (
35
+ ItemsFailedRequest2,
36
+ ItemsFailedResponse2,
37
+ ItemsSuccessResponse2,
38
+ )
39
+
40
40
  if isinstance(self, SuccessResponse2):
41
- return SuccessResponseItems(
41
+ return ItemsSuccessResponse2(
42
42
  status_code=self.status_code, content=self.content, ids=[item_id], body=self.body
43
43
  )
44
44
  elif isinstance(self, FailedResponse2):
45
- return FailedResponseItems(
45
+ return ItemsFailedResponse2(
46
46
  status_code=self.status_code,
47
47
  ids=[item_id],
48
48
  body=self.body,
49
- error=ErrorDetails(
49
+ error=ErrorDetails2(
50
50
  code=self.error.code,
51
51
  message=self.error.message,
52
52
  missing=self.error.missing,
@@ -54,9 +54,9 @@ class HTTPResult2(BaseModel):
54
54
  ),
55
55
  )
56
56
  elif isinstance(self, FailedRequest2):
57
- return FailedRequestItems(ids=[item_id], error=self.error)
57
+ return ItemsFailedRequest2(ids=[item_id], error_message=self.error)
58
58
  else:
59
- raise ToolkitAPIError("Unknown HTTPResult2 type")
59
+ raise ToolkitAPIError(f"Unknown {type(self).__name__} type")
60
60
 
61
61
 
62
62
  class FailedRequest2(HTTPResult2):
@@ -1,4 +1,4 @@
1
- from typing import ClassVar, Literal
1
+ from typing import Any, ClassVar, Literal
2
2
 
3
3
  from pydantic import Field, JsonValue
4
4
 
@@ -37,6 +37,12 @@ class FileMetadataRequest(FileMetadata, UpdatableRequestResource):
37
37
  # from response to request.
38
38
  instance_id: NodeReference | None = Field(default=None, exclude=True)
39
39
 
40
+ def as_update(self, mode: Literal["patch", "replace"]) -> dict[str, Any]:
41
+ update = super().as_update(mode)
42
+ # Name cannot be updated.
43
+ update["update"].pop("name", None)
44
+ return update
45
+
40
46
 
41
47
  class FileMetadataResponse(FileMetadata, ResponseResource[FileMetadataRequest]):
42
48
  created_time: int
@@ -9,10 +9,10 @@ from rich.table import Table
9
9
 
10
10
  from cognite_toolkit._cdf_tk.client import ToolkitClient
11
11
  from cognite_toolkit._cdf_tk.client.http_client import (
12
- FailedRequestItems,
13
- FailedResponseItems,
14
12
  HTTPClient,
15
- SuccessResponseItems,
13
+ ItemsFailedRequest2,
14
+ ItemsFailedResponse2,
15
+ ItemsSuccessResponse2,
16
16
  )
17
17
  from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
18
18
  from cognite_toolkit._cdf_tk.commands._migrate.creators import MigrationCreator
@@ -187,20 +187,20 @@ class MigrationCommand(ToolkitCommand):
187
187
  # Todo: Move logging into the UploadableStorageIO class
188
188
  issues: list[WriteIssue] = []
189
189
  for item in responses:
190
- if isinstance(item, SuccessResponseItems):
190
+ if isinstance(item, ItemsSuccessResponse2):
191
191
  target.logger.tracker.finalize_item(item.ids, "success")
192
192
  continue
193
- if isinstance(item, FailedResponseItems):
193
+ if isinstance(item, ItemsFailedResponse2):
194
194
  error = item.error
195
195
  for id_ in item.ids:
196
196
  issue = WriteIssue(id=str(id_), status_code=error.code, message=error.message)
197
197
  issues.append(issue)
198
- elif isinstance(item, FailedRequestItems):
198
+ elif isinstance(item, ItemsFailedRequest2):
199
199
  for id_ in item.ids:
200
- issue = WriteIssue(id=str(id_), status_code=0, message=item.error)
200
+ issue = WriteIssue(id=str(id_), status_code=0, message=item.error_message)
201
201
  issues.append(issue)
202
202
 
203
- if isinstance(item, FailedResponseItems | FailedRequestItems):
203
+ if isinstance(item, ItemsFailedResponse2 | ItemsFailedRequest2):
204
204
  target.logger.tracker.finalize_item(item.ids, "failure")
205
205
  if issues:
206
206
  target.logger.log(issues)
@@ -6,14 +6,16 @@ from cognite.client.data_classes.data_modeling import EdgeId, InstanceApply, Nod
6
6
 
7
7
  from cognite_toolkit._cdf_tk.client import ToolkitClient
8
8
  from cognite_toolkit._cdf_tk.client.http_client import (
9
- FailedResponse,
10
9
  HTTPClient,
11
- HTTPMessage,
12
- ItemsRequest,
13
10
  RequestMessage2,
14
- SuccessResponseItems,
15
11
  ToolkitAPIError,
16
12
  )
13
+ from cognite_toolkit._cdf_tk.client.http_client._item_classes import (
14
+ ItemsFailedResponse2,
15
+ ItemsRequest2,
16
+ ItemsResultList,
17
+ ItemsSuccessResponse2,
18
+ )
17
19
  from cognite_toolkit._cdf_tk.client.resource_classes.legacy.pending_instances_ids import PendingInstanceId
18
20
  from cognite_toolkit._cdf_tk.client.resource_classes.three_d import (
19
21
  AssetMappingClassicResponse,
@@ -170,20 +172,20 @@ class AssetCentricMigrationIO(
170
172
  data_chunk: Sequence[UploadItem[InstanceApply]],
171
173
  http_client: HTTPClient,
172
174
  selector: AssetCentricMigrationSelector | None = None,
173
- ) -> Sequence[HTTPMessage]:
175
+ ) -> ItemsResultList:
174
176
  """Upload items by first linking them using files/set-pending-instance-ids and then uploading the instances."""
175
177
  if self.skip_linking:
176
- return list(super().upload_items(data_chunk, http_client, None))
178
+ return super().upload_items(data_chunk, http_client, None)
177
179
  elif selector is None:
178
180
  raise ToolkitNotImplementedError(f"Selector must be provided for uploading {self.KIND} items.")
179
181
  elif selector.kind not in self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND:
180
- return list(super().upload_items(data_chunk, http_client, None))
182
+ return super().upload_items(data_chunk, http_client, None)
181
183
 
182
184
  pending_instance_id_endpoint = self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND[selector.kind]
183
- results: list[HTTPMessage] = []
185
+ results = ItemsResultList()
184
186
  to_upload = self.link_asset_centric(data_chunk, http_client, pending_instance_id_endpoint)
185
187
  if to_upload:
186
- results.extend(list(super().upload_items(to_upload, http_client, None)))
188
+ results.extend(super().upload_items(to_upload, http_client, None))
187
189
  return results
188
190
 
189
191
  @classmethod
@@ -197,8 +199,8 @@ class AssetCentricMigrationIO(
197
199
  config = http_client.config
198
200
  successful_linked: set[str] = set()
199
201
  for batch in chunker_sequence(data_chunk, cls.CHUNK_SIZE):
200
- batch_results = http_client.request_with_retries(
201
- message=ItemsRequest(
202
+ batch_results = http_client.request_items_retries(
203
+ message=ItemsRequest2(
202
204
  endpoint_url=config.create_api_url(pending_instance_id_endpoint),
203
205
  method="POST",
204
206
  api_version="alpha",
@@ -209,7 +211,7 @@ class AssetCentricMigrationIO(
209
211
  )
210
212
  )
211
213
  for res in batch_results:
212
- if isinstance(res, SuccessResponseItems):
214
+ if isinstance(res, ItemsSuccessResponse2):
213
215
  successful_linked.update(res.ids)
214
216
  to_upload = [item for item in data_chunk if item.source_id in successful_linked]
215
217
  return to_upload
@@ -441,26 +443,26 @@ class ThreeDMigrationIO(UploadableStorageIO[ThreeDSelector, ThreeDModelResponse,
441
443
  data_chunk: Sequence[UploadItem[ThreeDMigrationRequest]],
442
444
  http_client: HTTPClient,
443
445
  selector: ThreeDSelector | None = None,
444
- ) -> Sequence[HTTPMessage]:
446
+ ) -> ItemsResultList:
445
447
  """Migrate 3D models by uploading them to the migrate/models endpoint."""
446
448
  if len(data_chunk) > self.CHUNK_SIZE:
447
449
  raise RuntimeError(f"Uploading more than {self.CHUNK_SIZE} 3D models at a time is not supported.")
448
450
 
449
- results: list[HTTPMessage] = []
450
- responses = http_client.request_with_retries(
451
- message=ItemsRequest(
451
+ results = ItemsResultList()
452
+ responses = http_client.request_items_retries(
453
+ message=ItemsRequest2(
452
454
  endpoint_url=self.client.config.create_api_url(self.UPLOAD_ENDPOINT),
453
455
  method="POST",
454
- items=list(data_chunk),
456
+ items=data_chunk,
455
457
  )
456
458
  )
457
459
  if (
458
- failed_response := next((res for res in responses if isinstance(res, FailedResponse)), None)
460
+ failed_response := next((res for res in responses if isinstance(res, ItemsFailedResponse2)), None)
459
461
  ) and failed_response.status_code == 400:
460
462
  raise ToolkitAPIError("3D model migration failed. You need to enable the 3D migration alpha feature flag.")
461
463
 
462
464
  results.extend(responses)
463
- success_ids = {id for res in responses if isinstance(res, SuccessResponseItems) for id in res.ids}
465
+ success_ids = {id for res in responses if isinstance(res, ItemsSuccessResponse2) for id in res.ids}
464
466
  for data in data_chunk:
465
467
  if data.source_id not in success_ids:
466
468
  continue
@@ -535,21 +537,21 @@ class ThreeDAssetMappingMigrationIO(
535
537
  data_chunk: Sequence[UploadItem[AssetMappingDMRequest]],
536
538
  http_client: HTTPClient,
537
539
  selector: T_Selector | None = None,
538
- ) -> Sequence[HTTPMessage]:
540
+ ) -> ItemsResultList:
539
541
  """Migrate 3D asset mappings by uploading them to the migrate/asset-mappings endpoint."""
540
542
  if not data_chunk:
541
- return []
543
+ return ItemsResultList()
542
544
  # Assume all items in the chunk belong to the same model and revision, they should
543
545
  # if the .stream_data method is used for downloading.
544
546
  first = data_chunk[0]
545
547
  model_id = first.item.model_id
546
548
  revision_id = first.item.revision_id
547
549
  endpoint = self.UPLOAD_ENDPOINT.format(modelId=model_id, revisionId=revision_id)
548
- responses = http_client.request_with_retries(
549
- ItemsRequest(
550
+ return http_client.request_items_retries(
551
+ ItemsRequest2(
550
552
  endpoint_url=self.client.config.create_api_url(endpoint),
551
553
  method="POST",
552
- items=list(data_chunk),
554
+ items=data_chunk,
553
555
  extra_body_fields={
554
556
  "dmsContextualizationConfig": {
555
557
  "object3DSpace": self.object_3D_space,
@@ -558,7 +560,6 @@ class ThreeDAssetMappingMigrationIO(
558
560
  },
559
561
  )
560
562
  )
561
- return responses
562
563
 
563
564
  def json_to_resource(self, item_json: dict[str, JsonVal]) -> AssetMappingDMRequest:
564
565
  raise NotImplementedError("Deserializing 3D Asset Mappings from JSON is not supported.")
@@ -329,7 +329,7 @@ class ProfileCommand(ToolkitCommand, ABC, Generic[T_Index]):
329
329
  )
330
330
 
331
331
  def _ask_store_file(self) -> None:
332
- if file_path := questionary.path("Where do you want to save the profile?").ask():
332
+ if file_path := questionary.path("Where do you want to save the profile?").unsafe_ask():
333
333
  self.output_spreadsheet = Path(file_path)
334
334
 
335
335
 
@@ -1,9 +1,9 @@
1
1
  import uuid
2
2
  from abc import ABC, abstractmethod
3
- from collections.abc import Callable, Hashable, Iterable, Sequence
3
+ from collections.abc import Callable, Iterable, Sequence
4
4
  from dataclasses import dataclass
5
5
  from functools import partial
6
- from typing import Literal, cast
6
+ from typing import Any, Literal, cast
7
7
 
8
8
  import questionary
9
9
  from cognite.client.data_classes import DataSetUpdate
@@ -11,17 +11,17 @@ from cognite.client.data_classes.data_modeling import Edge, NodeId
11
11
  from cognite.client.data_classes.data_modeling.statistics import SpaceStatistics
12
12
  from cognite.client.exceptions import CogniteAPIError
13
13
  from cognite.client.utils._identifier import InstanceId
14
+ from pydantic import JsonValue
14
15
  from rich import print
15
16
  from rich.console import Console
16
17
  from rich.panel import Panel
17
18
 
18
19
  from cognite_toolkit._cdf_tk.client import ToolkitClient
20
+ from cognite_toolkit._cdf_tk.client._resource_base import RequestItem
19
21
  from cognite_toolkit._cdf_tk.client.http_client import (
20
22
  HTTPClient,
21
- ItemsRequest,
22
23
  ItemsRequest2,
23
24
  ItemsSuccessResponse2,
24
- SuccessResponseItems,
25
25
  )
26
26
  from cognite_toolkit._cdf_tk.client.resource_classes.instance_api import TypedInstanceIdentifier
27
27
  from cognite_toolkit._cdf_tk.cruds import (
@@ -79,15 +79,14 @@ class DeleteResults:
79
79
  failed: int = 0
80
80
 
81
81
 
82
- @dataclass
83
- class DeleteItem:
84
- item: JsonVal
85
- as_id_fun: Callable[[JsonVal], Hashable]
82
+ class DeleteItem(RequestItem):
83
+ item: JsonValue
84
+ as_id_fun: Callable[[JsonValue], str]
86
85
 
87
- def dump(self) -> JsonVal:
88
- return self.item
86
+ def dump(self, camel_case: bool = True, exclude_extra: bool = False) -> dict[str, Any]:
87
+ return self.item # type: ignore[return-value]
89
88
 
90
- def as_id(self) -> Hashable:
89
+ def __str__(self) -> str:
91
90
  return self.as_id_fun(self.item)
92
91
 
93
92
 
@@ -222,7 +221,7 @@ class PurgeCommand(ToolkitCommand):
222
221
  if not dry_run and not auto_yes:
223
222
  confirm = questionary.confirm(
224
223
  f"Are you really sure you want to purge the {selected_space!r} space?", default=False
225
- ).ask()
224
+ ).unsafe_ask()
226
225
  if not confirm:
227
226
  return DeployResults([], "purge", dry_run=dry_run)
228
227
 
@@ -364,16 +363,16 @@ class PurgeCommand(ToolkitCommand):
364
363
  ) -> Callable[[list[JsonVal]], None]:
365
364
  crud = delete_item.crud
366
365
 
367
- def as_id(item: JsonVal) -> Hashable:
366
+ def as_id(item: JsonVal) -> str:
368
367
  try:
369
- return crud.get_id(item)
368
+ return str(crud.get_id(item))
370
369
  except KeyError:
371
370
  # Fallback to internal ID
372
- return crud.get_internal_id(item)
371
+ return str(crud.get_internal_id(item))
373
372
 
374
373
  def process(items: list[JsonVal]) -> None:
375
- responses = delete_client.request_with_retries(
376
- ItemsRequest(
374
+ responses = delete_client.request_items_retries(
375
+ ItemsRequest2(
377
376
  endpoint_url=delete_url,
378
377
  method="POST",
379
378
  items=[DeleteItem(item=item, as_id_fun=as_id) for item in items],
@@ -381,10 +380,10 @@ class PurgeCommand(ToolkitCommand):
381
380
  )
382
381
  )
383
382
  for response in responses:
384
- if isinstance(response, SuccessResponseItems):
383
+ if isinstance(response, ItemsSuccessResponse2):
385
384
  result.deleted += len(response.ids)
386
385
  else:
387
- result.unchanged += len(items)
386
+ result.unchanged += len(response.ids)
388
387
 
389
388
  return process
390
389
 
@@ -423,7 +422,7 @@ class PurgeCommand(ToolkitCommand):
423
422
  if not dry_run and not auto_yes:
424
423
  confirm = questionary.confirm(
425
424
  f"Are you really sure you want to purge the {selected_data_set_external_id!r} dataSet?", default=False
426
- ).ask()
425
+ ).unsafe_ask()
427
426
  if not confirm:
428
427
  return DeployResults([], "purge", dry_run=dry_run)
429
428
 
@@ -595,7 +594,7 @@ class PurgeCommand(ToolkitCommand):
595
594
  confirm = questionary.confirm(
596
595
  f"Are you really sure you want to purge all {total:,} instances in {selector!s}?",
597
596
  default=False,
598
- ).ask()
597
+ ).unsafe_ask()
599
598
  if not confirm:
600
599
  return DeleteResults()
601
600
 
@@ -3,13 +3,11 @@ from collections.abc import Sequence
3
3
  from functools import partial
4
4
  from pathlib import Path
5
5
 
6
- from cognite.client.data_classes.data_modeling import (
7
- ViewId,
8
- )
6
+ from cognite.client.data_classes.data_modeling import ViewId
9
7
  from rich.console import Console
10
8
 
11
9
  from cognite_toolkit._cdf_tk.client import ToolkitClient
12
- from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, ItemMessage, SuccessResponseItems
10
+ from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, ItemsResultMessage2, ItemsSuccessResponse2
13
11
  from cognite_toolkit._cdf_tk.constants import DATA_MANIFEST_SUFFIX, DATA_RESOURCE_DIR
14
12
  from cognite_toolkit._cdf_tk.cruds import ViewCRUD
15
13
  from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
@@ -279,10 +277,10 @@ class UploadCommand(ToolkitCommand):
279
277
  return
280
278
  results = io.upload_items(data_chunk, upload_client, selector)
281
279
  for message in results:
282
- if isinstance(message, SuccessResponseItems):
280
+ if isinstance(message, ItemsSuccessResponse2):
283
281
  for id_ in message.ids:
284
282
  tracker.set_progress(id_, step=cls._UPLOAD, status="success")
285
- elif isinstance(message, ItemMessage):
283
+ elif isinstance(message, ItemsResultMessage2):
286
284
  for id_ in message.ids:
287
285
  tracker.set_progress(id_, step=cls._UPLOAD, status="failed")
288
286
  else:
@@ -84,7 +84,7 @@ class AuthCommand(ToolkitCommand):
84
84
  ask_user = True
85
85
  if env_vars and not env_vars.get_missing_vars():
86
86
  print("Auth variables are already set.")
87
- ask_user = questionary.confirm("Do you want to reconfigure the auth variables?", default=False).ask()
87
+ ask_user = questionary.confirm("Do you want to reconfigure the auth variables?", default=False).unsafe_ask()
88
88
 
89
89
  if ask_user or not env_vars:
90
90
  env_vars = prompt_user_environment_variables(env_vars)
@@ -111,10 +111,10 @@ class AuthCommand(ToolkitCommand):
111
111
  if questionary.confirm(
112
112
  f"Do you want to overwrite the existing '.env' file? The existing will be renamed to {filename}",
113
113
  default=False,
114
- ).ask():
114
+ ).unsafe_ask():
115
115
  shutil.move(".env", filename)
116
116
  Path(".env").write_text(new_env_file, encoding="utf-8")
117
- elif questionary.confirm("Do you want to save these to .env file for next time?", default=True).ask():
117
+ elif questionary.confirm("Do you want to save these to .env file for next time?", default=True).unsafe_ask():
118
118
  Path(".env").write_text(new_env_file, encoding="utf-8")
119
119
 
120
120
  def verify(
@@ -197,7 +197,7 @@ class AuthCommand(ToolkitCommand):
197
197
  if (
198
198
  is_interactive
199
199
  and missing_capabilities
200
- and questionary.confirm("Do you want to update the group with the missing capabilities?").ask()
200
+ and questionary.confirm("Do you want to update the group with the missing capabilities?").unsafe_ask()
201
201
  ) or is_demo:
202
202
  has_added_capabilities = self._update_missing_capabilities(
203
203
  client, cdf_toolkit_group, missing_capabilities, dry_run
@@ -213,7 +213,7 @@ class AuthCommand(ToolkitCommand):
213
213
  if (
214
214
  is_interactive
215
215
  and missing_capabilities
216
- and questionary.confirm("Do you want to update the group with the missing capabilities?").ask()
216
+ and questionary.confirm("Do you want to update the group with the missing capabilities?").unsafe_ask()
217
217
  ):
218
218
  self._update_missing_capabilities(client, cdf_toolkit_group, missing_capabilities, dry_run)
219
219
  elif is_demo:
@@ -246,7 +246,7 @@ class AuthCommand(ToolkitCommand):
246
246
  if extra := self.check_duplicated_names(all_groups, cdf_toolkit_group):
247
247
  if (
248
248
  is_interactive
249
- and questionary.confirm("Do you want to delete the extra groups?", default=True).ask()
249
+ and questionary.confirm("Do you want to delete the extra groups?", default=True).unsafe_ask()
250
250
  ):
251
251
  try:
252
252
  client.iam.groups.delete(extra.as_ids())
@@ -274,7 +274,7 @@ class AuthCommand(ToolkitCommand):
274
274
  if not questionary.confirm(
275
275
  "Do you want to create it?",
276
276
  default=True,
277
- ).ask():
277
+ ).unsafe_ask():
278
278
  return None
279
279
 
280
280
  if dry_run:
@@ -283,13 +283,10 @@ class AuthCommand(ToolkitCommand):
283
283
  )
284
284
  return None
285
285
 
286
- while True:
287
- source_id = questionary.text(
288
- "What is the source id for the new group (typically a group id in the identity provider)?"
289
- ).ask()
290
- if source_id:
291
- break
292
- print("Source id cannot be empty.")
286
+ source_id = questionary.text(
287
+ "What is the source id for the new group (typically a group id in the identity provider)?",
288
+ validate=lambda value: value.strip() != "",
289
+ ).unsafe_ask()
293
290
 
294
291
  toolkit_group.source_id = source_id
295
292
  if already_used := [group.name for group in all_groups if group.source_id == source_id]:
@@ -298,7 +295,7 @@ class AuthCommand(ToolkitCommand):
298
295
  f"The source id {source_id!r} is already used by the groups: {humanize_collection(already_used)!r}."
299
296
  )
300
297
  )
301
- if not questionary.confirm("This is NOT recommended. Do you want to continue?", default=False).ask():
298
+ if not questionary.confirm("This is NOT recommended. Do you want to continue?", default=False).unsafe_ask():
302
299
  return None
303
300
 
304
301
  return self._create_toolkit_group_in_cdf(client, toolkit_group)
@@ -197,7 +197,8 @@ class CleanCommand(ToolkitCommand):
197
197
  "Which modules would you like to clean?",
198
198
  instruction="Use arrow up/down, press space to select item(s) and enter to save",
199
199
  choices=choices,
200
- ).ask()
200
+ validate=lambda choice: "You must select at least one module." if len(choice) == 0 else True,
201
+ ).unsafe_ask()
201
202
 
202
203
  if not selected_modules:
203
204
  return None