glaip-sdk 0.0.19__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- glaip_sdk/_version.py +2 -2
- glaip_sdk/branding.py +27 -2
- glaip_sdk/cli/auth.py +93 -28
- glaip_sdk/cli/commands/__init__.py +2 -2
- glaip_sdk/cli/commands/agents.py +127 -21
- glaip_sdk/cli/commands/configure.py +141 -90
- glaip_sdk/cli/commands/mcps.py +82 -31
- glaip_sdk/cli/commands/models.py +4 -3
- glaip_sdk/cli/commands/tools.py +27 -14
- glaip_sdk/cli/commands/update.py +66 -0
- glaip_sdk/cli/config.py +13 -2
- glaip_sdk/cli/display.py +35 -26
- glaip_sdk/cli/io.py +14 -5
- glaip_sdk/cli/main.py +185 -73
- glaip_sdk/cli/pager.py +2 -1
- glaip_sdk/cli/resolution.py +4 -1
- glaip_sdk/cli/slash/__init__.py +3 -4
- glaip_sdk/cli/slash/agent_session.py +88 -36
- glaip_sdk/cli/slash/prompt.py +20 -48
- glaip_sdk/cli/slash/session.py +437 -189
- glaip_sdk/cli/transcript/__init__.py +71 -0
- glaip_sdk/cli/transcript/cache.py +338 -0
- glaip_sdk/cli/transcript/capture.py +278 -0
- glaip_sdk/cli/transcript/export.py +38 -0
- glaip_sdk/cli/transcript/launcher.py +79 -0
- glaip_sdk/cli/transcript/viewer.py +794 -0
- glaip_sdk/cli/update_notifier.py +29 -5
- glaip_sdk/cli/utils.py +255 -74
- glaip_sdk/client/agents.py +3 -1
- glaip_sdk/client/run_rendering.py +126 -21
- glaip_sdk/icons.py +25 -0
- glaip_sdk/models.py +6 -0
- glaip_sdk/rich_components.py +29 -1
- glaip_sdk/utils/__init__.py +1 -1
- glaip_sdk/utils/client_utils.py +6 -4
- glaip_sdk/utils/display.py +61 -32
- glaip_sdk/utils/rendering/formatting.py +55 -11
- glaip_sdk/utils/rendering/models.py +15 -2
- glaip_sdk/utils/rendering/renderer/__init__.py +0 -2
- glaip_sdk/utils/rendering/renderer/base.py +1287 -227
- glaip_sdk/utils/rendering/renderer/config.py +3 -5
- glaip_sdk/utils/rendering/renderer/debug.py +73 -16
- glaip_sdk/utils/rendering/renderer/panels.py +27 -15
- glaip_sdk/utils/rendering/renderer/progress.py +61 -38
- glaip_sdk/utils/rendering/renderer/stream.py +3 -3
- glaip_sdk/utils/rendering/renderer/toggle.py +184 -0
- glaip_sdk/utils/rendering/step_tree_state.py +102 -0
- glaip_sdk/utils/rendering/steps.py +944 -16
- glaip_sdk/utils/serialization.py +5 -2
- glaip_sdk/utils/validation.py +1 -2
- {glaip_sdk-0.0.19.dist-info → glaip_sdk-0.1.0.dist-info}/METADATA +12 -1
- glaip_sdk-0.1.0.dist-info/RECORD +82 -0
- glaip_sdk/utils/rich_utils.py +0 -29
- glaip_sdk-0.0.19.dist-info/RECORD +0 -73
- {glaip_sdk-0.0.19.dist-info → glaip_sdk-0.1.0.dist-info}/WHEEL +0 -0
- {glaip_sdk-0.0.19.dist-info → glaip_sdk-0.1.0.dist-info}/entry_points.txt +0 -0
glaip_sdk/cli/update_notifier.py
CHANGED
|
@@ -8,12 +8,19 @@ from __future__ import annotations
|
|
|
8
8
|
|
|
9
9
|
import os
|
|
10
10
|
from collections.abc import Callable
|
|
11
|
+
from typing import Any, Literal
|
|
11
12
|
|
|
12
13
|
import httpx
|
|
13
14
|
from packaging.version import InvalidVersion, Version
|
|
15
|
+
from rich import box
|
|
14
16
|
from rich.console import Console
|
|
15
17
|
|
|
16
|
-
from glaip_sdk.
|
|
18
|
+
from glaip_sdk.branding import (
|
|
19
|
+
ACCENT_STYLE,
|
|
20
|
+
SUCCESS_STYLE,
|
|
21
|
+
WARNING_STYLE,
|
|
22
|
+
)
|
|
23
|
+
from glaip_sdk.cli.utils import command_hint, format_command_hint
|
|
17
24
|
from glaip_sdk.rich_components import AIPPanel
|
|
18
25
|
|
|
19
26
|
FetchLatestVersion = Callable[[], str | None]
|
|
@@ -63,16 +70,20 @@ def _build_update_panel(
|
|
|
63
70
|
command_text: str,
|
|
64
71
|
) -> AIPPanel:
|
|
65
72
|
"""Create a Rich panel that prompts the user to update."""
|
|
73
|
+
command_markup = format_command_hint(command_text) or command_text
|
|
66
74
|
message = (
|
|
67
|
-
f"[
|
|
75
|
+
f"[{WARNING_STYLE}]✨ Update available![/] "
|
|
68
76
|
f"{current_version} → {latest_version}\n\n"
|
|
69
77
|
"See the latest release notes:\n"
|
|
70
78
|
f"https://pypi.org/project/glaip-sdk/{latest_version}/\n\n"
|
|
71
|
-
f"[
|
|
79
|
+
f"[{ACCENT_STYLE}]Run[/] {command_markup} to install."
|
|
72
80
|
)
|
|
73
81
|
return AIPPanel(
|
|
74
82
|
message,
|
|
75
|
-
title="[
|
|
83
|
+
title=f"[{SUCCESS_STYLE}]AIP SDK Update[/]",
|
|
84
|
+
box=box.ROUNDED,
|
|
85
|
+
padding=(0, 3),
|
|
86
|
+
expand=False,
|
|
76
87
|
)
|
|
77
88
|
|
|
78
89
|
|
|
@@ -82,6 +93,9 @@ def maybe_notify_update(
|
|
|
82
93
|
package_name: str = "glaip-sdk",
|
|
83
94
|
console: Console | None = None,
|
|
84
95
|
fetch_latest_version: FetchLatestVersion | None = None,
|
|
96
|
+
ctx: Any | None = None,
|
|
97
|
+
slash_command: str | None = None,
|
|
98
|
+
style: Literal["panel", "inline"] = "panel",
|
|
85
99
|
) -> None:
|
|
86
100
|
"""Check PyPI for a newer version and display a prompt if one exists.
|
|
87
101
|
|
|
@@ -101,11 +115,21 @@ def maybe_notify_update(
|
|
|
101
115
|
if current is None or latest is None or latest <= current:
|
|
102
116
|
return
|
|
103
117
|
|
|
104
|
-
command_text = command_hint("update")
|
|
118
|
+
command_text = command_hint("update", slash_command=slash_command, ctx=ctx)
|
|
105
119
|
if command_text is None:
|
|
106
120
|
return
|
|
107
121
|
|
|
108
122
|
active_console = console or Console()
|
|
123
|
+
if style == "inline":
|
|
124
|
+
command_markup = format_command_hint(command_text) or command_text
|
|
125
|
+
message = (
|
|
126
|
+
f"[{WARNING_STYLE}]✨ Update[/] "
|
|
127
|
+
f"{current_version} → {latest_version} "
|
|
128
|
+
f"- {command_markup}"
|
|
129
|
+
)
|
|
130
|
+
active_console.print(message)
|
|
131
|
+
return
|
|
132
|
+
|
|
109
133
|
panel = _build_update_panel(current_version, latest_version, command_text)
|
|
110
134
|
active_console.print(panel)
|
|
111
135
|
|
glaip_sdk/cli/utils.py
CHANGED
|
@@ -7,29 +7,44 @@ Authors:
|
|
|
7
7
|
|
|
8
8
|
from __future__ import annotations
|
|
9
9
|
|
|
10
|
+
import importlib
|
|
10
11
|
import json
|
|
11
12
|
import logging
|
|
12
13
|
import os
|
|
13
14
|
import sys
|
|
14
|
-
from collections.abc import Callable
|
|
15
|
+
from collections.abc import Callable, Iterable
|
|
15
16
|
from contextlib import AbstractContextManager, nullcontext
|
|
16
|
-
from typing import TYPE_CHECKING, Any
|
|
17
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
17
18
|
|
|
18
19
|
import click
|
|
19
20
|
from rich.console import Console, Group
|
|
20
21
|
from rich.markdown import Markdown
|
|
21
22
|
from rich.pretty import Pretty
|
|
22
23
|
|
|
24
|
+
from glaip_sdk.branding import (
|
|
25
|
+
ACCENT_STYLE,
|
|
26
|
+
HINT_COMMAND_STYLE,
|
|
27
|
+
HINT_DESCRIPTION_COLOR,
|
|
28
|
+
SUCCESS_STYLE,
|
|
29
|
+
WARNING_STYLE,
|
|
30
|
+
)
|
|
23
31
|
from glaip_sdk.cli.rich_helpers import markup_text
|
|
32
|
+
from glaip_sdk.icons import ICON_AGENT
|
|
24
33
|
from glaip_sdk.rich_components import AIPPanel
|
|
25
34
|
|
|
26
35
|
# Optional interactive deps (fuzzy palette)
|
|
27
36
|
try:
|
|
37
|
+
from prompt_toolkit.buffer import Buffer
|
|
28
38
|
from prompt_toolkit.completion import Completion
|
|
29
|
-
from prompt_toolkit.
|
|
39
|
+
from prompt_toolkit.selection import SelectionType
|
|
40
|
+
from prompt_toolkit.shortcuts import PromptSession, prompt
|
|
30
41
|
|
|
31
42
|
_HAS_PTK = True
|
|
32
43
|
except Exception: # pragma: no cover - optional dependency
|
|
44
|
+
Buffer = None # type: ignore[assignment]
|
|
45
|
+
SelectionType = None # type: ignore[assignment]
|
|
46
|
+
PromptSession = None # type: ignore[assignment]
|
|
47
|
+
prompt = None # type: ignore[assignment]
|
|
33
48
|
_HAS_PTK = False
|
|
34
49
|
|
|
35
50
|
try:
|
|
@@ -114,25 +129,54 @@ def command_hint(
|
|
|
114
129
|
return f"aip {cli_command}"
|
|
115
130
|
|
|
116
131
|
|
|
132
|
+
def format_command_hint(
|
|
133
|
+
command: str | None,
|
|
134
|
+
description: str | None = None,
|
|
135
|
+
) -> str | None:
|
|
136
|
+
"""Return a Rich markup string that highlights a command hint.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
command: Command text to highlight (already formatted for the active mode).
|
|
140
|
+
description: Optional short description to display alongside the command.
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Markup string suitable for Rich rendering, or ``None`` when ``command`` is falsy.
|
|
144
|
+
"""
|
|
145
|
+
if not command:
|
|
146
|
+
return None
|
|
147
|
+
|
|
148
|
+
highlighted = f"[{HINT_COMMAND_STYLE}]{command}[/]"
|
|
149
|
+
if description:
|
|
150
|
+
highlighted += (
|
|
151
|
+
f" [{HINT_DESCRIPTION_COLOR}]{description}[/{HINT_DESCRIPTION_COLOR}]"
|
|
152
|
+
)
|
|
153
|
+
return highlighted
|
|
154
|
+
|
|
155
|
+
|
|
117
156
|
def spinner_context(
|
|
118
157
|
ctx: Any | None,
|
|
119
158
|
message: str,
|
|
120
159
|
*,
|
|
121
160
|
console_override: Console | None = None,
|
|
122
161
|
spinner: str = "dots",
|
|
123
|
-
spinner_style: str =
|
|
162
|
+
spinner_style: str = ACCENT_STYLE,
|
|
124
163
|
) -> AbstractContextManager[Any]:
|
|
125
164
|
"""Return a context manager that renders a spinner when appropriate."""
|
|
126
165
|
active_console = console_override or console
|
|
127
166
|
if not _can_use_spinner(ctx, active_console):
|
|
128
167
|
return nullcontext()
|
|
129
168
|
|
|
130
|
-
|
|
169
|
+
status = active_console.status(
|
|
131
170
|
message,
|
|
132
171
|
spinner=spinner,
|
|
133
172
|
spinner_style=spinner_style,
|
|
134
173
|
)
|
|
135
174
|
|
|
175
|
+
if not hasattr(status, "__enter__") or not hasattr(status, "__exit__"):
|
|
176
|
+
return nullcontext()
|
|
177
|
+
|
|
178
|
+
return status
|
|
179
|
+
|
|
136
180
|
|
|
137
181
|
def _can_use_spinner(ctx: Any | None, active_console: Console) -> bool:
|
|
138
182
|
"""Check if spinner output is allowed in the current environment."""
|
|
@@ -189,8 +233,8 @@ _spinner_stop = stop_spinner
|
|
|
189
233
|
|
|
190
234
|
def get_client(ctx: Any) -> Client: # pragma: no cover
|
|
191
235
|
"""Get configured client from context, env, and config file (ctx > env > file)."""
|
|
192
|
-
|
|
193
|
-
|
|
236
|
+
module = importlib.import_module("glaip_sdk")
|
|
237
|
+
client_class = cast("type[Client]", getattr(module, "Client"))
|
|
194
238
|
file_config = load_config() or {}
|
|
195
239
|
context_config_obj = getattr(ctx, "obj", None)
|
|
196
240
|
context_config = context_config_obj or {}
|
|
@@ -223,7 +267,7 @@ def get_client(ctx: Any) -> Client: # pragma: no cover
|
|
|
223
267
|
actions.append("set AIP_* env vars")
|
|
224
268
|
raise click.ClickException(f"Missing api_url/api_key. {' or '.join(actions)}.")
|
|
225
269
|
|
|
226
|
-
return
|
|
270
|
+
return client_class(
|
|
227
271
|
api_url=config.get("api_url"),
|
|
228
272
|
api_key=config.get("api_key"),
|
|
229
273
|
timeout=float(config.get("timeout") or 30.0),
|
|
@@ -335,6 +379,86 @@ def _build_unique_labels(
|
|
|
335
379
|
return labels, by_label
|
|
336
380
|
|
|
337
381
|
|
|
382
|
+
def _basic_prompt(
|
|
383
|
+
message: str,
|
|
384
|
+
completer: Any,
|
|
385
|
+
) -> str | None:
|
|
386
|
+
"""Fallback prompt handler when PromptSession is unavailable or fails."""
|
|
387
|
+
if prompt is None: # pragma: no cover - optional dependency path
|
|
388
|
+
return None
|
|
389
|
+
|
|
390
|
+
try:
|
|
391
|
+
return prompt(
|
|
392
|
+
message=message,
|
|
393
|
+
completer=completer,
|
|
394
|
+
complete_in_thread=True,
|
|
395
|
+
complete_while_typing=True,
|
|
396
|
+
)
|
|
397
|
+
except (KeyboardInterrupt, EOFError):
|
|
398
|
+
return None
|
|
399
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
400
|
+
logger.debug("Fallback prompt failed: %s", exc)
|
|
401
|
+
return None
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _prompt_with_auto_select(
|
|
405
|
+
message: str,
|
|
406
|
+
completer: Any,
|
|
407
|
+
choices: Iterable[str],
|
|
408
|
+
) -> str | None:
|
|
409
|
+
"""Prompt with fuzzy completer that auto-selects suggested matches."""
|
|
410
|
+
if not _HAS_PTK or PromptSession is None or Buffer is None or SelectionType is None:
|
|
411
|
+
return _basic_prompt(message, completer)
|
|
412
|
+
|
|
413
|
+
try:
|
|
414
|
+
session = PromptSession(
|
|
415
|
+
message,
|
|
416
|
+
completer=completer,
|
|
417
|
+
complete_in_thread=True,
|
|
418
|
+
complete_while_typing=True,
|
|
419
|
+
reserve_space_for_menu=8,
|
|
420
|
+
)
|
|
421
|
+
except Exception as exc: # pragma: no cover - depends on prompt_toolkit
|
|
422
|
+
logger.debug(
|
|
423
|
+
"PromptSession init failed (%s); falling back to basic prompt.", exc
|
|
424
|
+
)
|
|
425
|
+
return _basic_prompt(message, completer)
|
|
426
|
+
|
|
427
|
+
buffer = session.default_buffer
|
|
428
|
+
valid_choices = set(choices)
|
|
429
|
+
|
|
430
|
+
def _auto_select(_: Buffer) -> None:
|
|
431
|
+
text = buffer.text
|
|
432
|
+
if not text or text not in valid_choices:
|
|
433
|
+
return
|
|
434
|
+
buffer.cursor_position = 0
|
|
435
|
+
buffer.start_selection(selection_type=SelectionType.CHARACTERS)
|
|
436
|
+
buffer.cursor_position = len(text)
|
|
437
|
+
|
|
438
|
+
handler_attached = False
|
|
439
|
+
try:
|
|
440
|
+
buffer.on_text_changed += _auto_select
|
|
441
|
+
handler_attached = True
|
|
442
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
443
|
+
logger.debug("Failed to attach auto-select handler: %s", exc)
|
|
444
|
+
|
|
445
|
+
try:
|
|
446
|
+
return session.prompt()
|
|
447
|
+
except (KeyboardInterrupt, EOFError):
|
|
448
|
+
return None
|
|
449
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
450
|
+
logger.debug(
|
|
451
|
+
"PromptSession prompt failed (%s); falling back to basic prompt.", exc
|
|
452
|
+
)
|
|
453
|
+
return _basic_prompt(message, completer)
|
|
454
|
+
finally:
|
|
455
|
+
if handler_attached:
|
|
456
|
+
try:
|
|
457
|
+
buffer.on_text_changed -= _auto_select
|
|
458
|
+
except Exception: # pragma: no cover - defensive
|
|
459
|
+
pass
|
|
460
|
+
|
|
461
|
+
|
|
338
462
|
class _FuzzyCompleter:
|
|
339
463
|
"""Fuzzy completer for prompt_toolkit."""
|
|
340
464
|
|
|
@@ -404,17 +528,12 @@ def _fuzzy_pick(
|
|
|
404
528
|
|
|
405
529
|
# Create fuzzy completer
|
|
406
530
|
completer = _FuzzyCompleter(labels)
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
complete_while_typing=True,
|
|
414
|
-
)
|
|
415
|
-
except (KeyboardInterrupt, EOFError): # pragma: no cover - user cancelled input
|
|
416
|
-
return None
|
|
417
|
-
except Exception: # pragma: no cover - prompt_toolkit not available in headless env
|
|
531
|
+
answer = _prompt_with_auto_select(
|
|
532
|
+
f"Find {title.rstrip('s')}: ",
|
|
533
|
+
completer,
|
|
534
|
+
labels,
|
|
535
|
+
)
|
|
536
|
+
if answer is None:
|
|
418
537
|
return None
|
|
419
538
|
|
|
420
539
|
return _perform_fuzzy_search(answer, labels, by_label) if answer else None
|
|
@@ -557,7 +676,7 @@ def output_result(
|
|
|
557
676
|
if panel_title:
|
|
558
677
|
console.print(AIPPanel(renderable, title=panel_title))
|
|
559
678
|
else:
|
|
560
|
-
console.print(markup_text(f"[
|
|
679
|
+
console.print(markup_text(f"[{ACCENT_STYLE}]{title}:[/]"))
|
|
561
680
|
console.print(renderable)
|
|
562
681
|
|
|
563
682
|
|
|
@@ -665,7 +784,7 @@ def _handle_markdown_output(
|
|
|
665
784
|
|
|
666
785
|
def _handle_empty_items(title: str) -> None:
|
|
667
786
|
"""Handle case when no items are found."""
|
|
668
|
-
console.print(markup_text(f"[
|
|
787
|
+
console.print(markup_text(f"[{WARNING_STYLE}]No {title.lower()} found.[/]"))
|
|
669
788
|
|
|
670
789
|
|
|
671
790
|
def _should_use_fuzzy_picker() -> bool:
|
|
@@ -816,6 +935,18 @@ def coerce_to_row(item: Any, keys: list[str]) -> dict[str, Any]:
|
|
|
816
935
|
return result
|
|
817
936
|
|
|
818
937
|
|
|
938
|
+
def _register_renderer_with_session(ctx: Any, renderer: RichStreamRenderer) -> None:
|
|
939
|
+
"""Attach renderer to an active slash session when present."""
|
|
940
|
+
try:
|
|
941
|
+
ctx_obj = getattr(ctx, "obj", None)
|
|
942
|
+
session = ctx_obj.get("_slash_session") if isinstance(ctx_obj, dict) else None
|
|
943
|
+
if session and hasattr(session, "register_active_renderer"):
|
|
944
|
+
session.register_active_renderer(renderer)
|
|
945
|
+
except Exception:
|
|
946
|
+
# Never let session bookkeeping break renderer creation
|
|
947
|
+
pass
|
|
948
|
+
|
|
949
|
+
|
|
819
950
|
def build_renderer(
|
|
820
951
|
_ctx: Any,
|
|
821
952
|
*,
|
|
@@ -841,21 +972,16 @@ def build_renderer(
|
|
|
841
972
|
Tuple of (renderer, capturing_console) for streaming output.
|
|
842
973
|
"""
|
|
843
974
|
# Use capturing console if saving output
|
|
844
|
-
working_console = console
|
|
845
|
-
if save_path:
|
|
846
|
-
working_console = CapturingConsole(console, capture=True)
|
|
975
|
+
working_console = CapturingConsole(console, capture=True) if save_path else console
|
|
847
976
|
|
|
848
977
|
# Configure renderer based on verbose mode and explicit overrides
|
|
849
|
-
if live is None
|
|
850
|
-
|
|
851
|
-
else:
|
|
852
|
-
live_enabled = bool(live)
|
|
978
|
+
live_enabled = bool(live) if live is not None else not verbose
|
|
979
|
+
style = "debug" if verbose else "pretty"
|
|
853
980
|
|
|
854
981
|
renderer_cfg = RendererConfig(
|
|
855
982
|
theme=theme,
|
|
856
|
-
style=
|
|
983
|
+
style=style,
|
|
857
984
|
live=live_enabled,
|
|
858
|
-
show_delegate_tool_panels=True,
|
|
859
985
|
append_finished_snapshots=bool(snapshots)
|
|
860
986
|
if snapshots is not None
|
|
861
987
|
else RendererConfig.append_finished_snapshots,
|
|
@@ -871,15 +997,7 @@ def build_renderer(
|
|
|
871
997
|
)
|
|
872
998
|
|
|
873
999
|
# Link the renderer back to the slash session when running from the palette.
|
|
874
|
-
|
|
875
|
-
ctx_obj = getattr(_ctx, "obj", None)
|
|
876
|
-
if isinstance(ctx_obj, dict):
|
|
877
|
-
session = ctx_obj.get("_slash_session")
|
|
878
|
-
if session and hasattr(session, "register_active_renderer"):
|
|
879
|
-
session.register_active_renderer(renderer)
|
|
880
|
-
except Exception:
|
|
881
|
-
# Never let session bookkeeping break renderer creation
|
|
882
|
-
pass
|
|
1000
|
+
_register_renderer_with_session(_ctx, renderer)
|
|
883
1001
|
|
|
884
1002
|
return renderer, working_console
|
|
885
1003
|
|
|
@@ -935,15 +1053,12 @@ def _fuzzy_pick_for_resources(
|
|
|
935
1053
|
|
|
936
1054
|
# Create fuzzy completer
|
|
937
1055
|
completer = _FuzzyCompleter(labels)
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
complete_while_typing=True,
|
|
945
|
-
)
|
|
946
|
-
except (KeyboardInterrupt, EOFError):
|
|
1056
|
+
answer = _prompt_with_auto_select(
|
|
1057
|
+
f"Find {ICON_AGENT} {resource_type.title()}: ",
|
|
1058
|
+
completer,
|
|
1059
|
+
labels,
|
|
1060
|
+
)
|
|
1061
|
+
if answer is None:
|
|
947
1062
|
return None
|
|
948
1063
|
|
|
949
1064
|
return _perform_fuzzy_search(answer, labels, by_label) if answer else None
|
|
@@ -967,19 +1082,19 @@ def _resolve_by_name_multiple_with_select(matches: list[Any], select: int) -> An
|
|
|
967
1082
|
def _resolve_by_name_multiple_fuzzy(
|
|
968
1083
|
ctx: Any, ref: str, matches: list[Any], label: str
|
|
969
1084
|
) -> Any:
|
|
970
|
-
"""Resolve multiple matches
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
# Fallback to original ambiguity handler if fuzzy picker fails
|
|
975
|
-
return handle_ambiguous_resource(ctx, label.lower(), ref, matches)
|
|
1085
|
+
"""Resolve multiple matches preferring the fuzzy picker interface."""
|
|
1086
|
+
return handle_ambiguous_resource(
|
|
1087
|
+
ctx, label.lower(), ref, matches, interface_preference="fuzzy"
|
|
1088
|
+
)
|
|
976
1089
|
|
|
977
1090
|
|
|
978
1091
|
def _resolve_by_name_multiple_questionary(
|
|
979
1092
|
ctx: Any, ref: str, matches: list[Any], label: str
|
|
980
1093
|
) -> Any:
|
|
981
|
-
"""Resolve multiple matches
|
|
982
|
-
return handle_ambiguous_resource(
|
|
1094
|
+
"""Resolve multiple matches preferring the questionary interface."""
|
|
1095
|
+
return handle_ambiguous_resource(
|
|
1096
|
+
ctx, label.lower(), ref, matches, interface_preference="questionary"
|
|
1097
|
+
)
|
|
983
1098
|
|
|
984
1099
|
|
|
985
1100
|
def resolve_resource(
|
|
@@ -1015,7 +1130,7 @@ def resolve_resource(
|
|
|
1015
1130
|
_spinner_update(spinner, f"[bold blue]Fetching {label} by ID…[/bold blue]")
|
|
1016
1131
|
result = _resolve_by_id(ref, get_by_id)
|
|
1017
1132
|
if result is not None:
|
|
1018
|
-
_spinner_update(spinner, f"[
|
|
1133
|
+
_spinner_update(spinner, f"[{SUCCESS_STYLE}]{label} found[/]")
|
|
1019
1134
|
return result
|
|
1020
1135
|
|
|
1021
1136
|
# If get_by_id returned None, the resource doesn't exist
|
|
@@ -1033,7 +1148,7 @@ def resolve_resource(
|
|
|
1033
1148
|
raise click.ClickException(f"{label} '{ref}' not found")
|
|
1034
1149
|
|
|
1035
1150
|
if len(matches) == 1:
|
|
1036
|
-
_spinner_update(spinner, f"[
|
|
1151
|
+
_spinner_update(spinner, f"[{SUCCESS_STYLE}]{label} found[/]")
|
|
1037
1152
|
return matches[0]
|
|
1038
1153
|
|
|
1039
1154
|
# Multiple matches found, handle ambiguity
|
|
@@ -1043,7 +1158,10 @@ def resolve_resource(
|
|
|
1043
1158
|
|
|
1044
1159
|
# Choose interface based on preference
|
|
1045
1160
|
_spinner_stop(spinner)
|
|
1046
|
-
|
|
1161
|
+
preference = (interface_preference or "fuzzy").lower()
|
|
1162
|
+
if preference not in {"fuzzy", "questionary"}:
|
|
1163
|
+
preference = "fuzzy"
|
|
1164
|
+
if preference == "fuzzy":
|
|
1047
1165
|
return _resolve_by_name_multiple_fuzzy(ctx, ref, matches, label)
|
|
1048
1166
|
else:
|
|
1049
1167
|
return _resolve_by_name_multiple_questionary(ctx, ref, matches, label)
|
|
@@ -1093,7 +1211,7 @@ def _handle_fallback_numeric_ambiguity(
|
|
|
1093
1211
|
|
|
1094
1212
|
console.print(
|
|
1095
1213
|
markup_text(
|
|
1096
|
-
f"[
|
|
1214
|
+
f"[{WARNING_STYLE}]Multiple {safe_resource_type}s found matching '{safe_ref}':[/]"
|
|
1097
1215
|
)
|
|
1098
1216
|
)
|
|
1099
1217
|
table = AIPTable(
|
|
@@ -1101,7 +1219,7 @@ def _handle_fallback_numeric_ambiguity(
|
|
|
1101
1219
|
)
|
|
1102
1220
|
table.add_column("#", style="dim", width=3)
|
|
1103
1221
|
table.add_column("ID", style="dim", width=36)
|
|
1104
|
-
table.add_column("Name", style=
|
|
1222
|
+
table.add_column("Name", style=ACCENT_STYLE)
|
|
1105
1223
|
for i, m in enumerate(matches, 1):
|
|
1106
1224
|
table.add_row(str(i), str(getattr(m, "id", "")), str(getattr(m, "name", "")))
|
|
1107
1225
|
console.print(table)
|
|
@@ -1127,22 +1245,85 @@ def _should_fallback_to_numeric_prompt(exception: Exception) -> bool:
|
|
|
1127
1245
|
return True
|
|
1128
1246
|
|
|
1129
1247
|
|
|
1248
|
+
def _normalize_interface_preference(preference: str) -> str:
|
|
1249
|
+
"""Normalize and validate interface preference."""
|
|
1250
|
+
normalized = (preference or "questionary").lower()
|
|
1251
|
+
return normalized if normalized in {"fuzzy", "questionary"} else "questionary"
|
|
1252
|
+
|
|
1253
|
+
|
|
1254
|
+
def _get_interface_order(preference: str) -> tuple[str, str]:
|
|
1255
|
+
"""Get the ordered interface preferences."""
|
|
1256
|
+
interface_orders = {
|
|
1257
|
+
"fuzzy": ("fuzzy", "questionary"),
|
|
1258
|
+
"questionary": ("questionary", "fuzzy"),
|
|
1259
|
+
}
|
|
1260
|
+
return interface_orders.get(preference, ("questionary", "fuzzy"))
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
def _try_fuzzy_selection(
|
|
1264
|
+
resource_type: str,
|
|
1265
|
+
ref: str,
|
|
1266
|
+
matches: list[Any],
|
|
1267
|
+
) -> Any | None:
|
|
1268
|
+
"""Try fuzzy interface selection."""
|
|
1269
|
+
picked = _fuzzy_pick_for_resources(matches, resource_type, ref)
|
|
1270
|
+
return picked if picked else None
|
|
1271
|
+
|
|
1272
|
+
|
|
1273
|
+
def _try_questionary_selection(
|
|
1274
|
+
resource_type: str,
|
|
1275
|
+
ref: str,
|
|
1276
|
+
matches: list[Any],
|
|
1277
|
+
) -> Any | None:
|
|
1278
|
+
"""Try questionary interface selection."""
|
|
1279
|
+
try:
|
|
1280
|
+
return _handle_questionary_ambiguity(resource_type, ref, matches)
|
|
1281
|
+
except Exception as exc:
|
|
1282
|
+
if not _should_fallback_to_numeric_prompt(exc):
|
|
1283
|
+
raise
|
|
1284
|
+
return None
|
|
1285
|
+
|
|
1286
|
+
|
|
1287
|
+
def _try_interface_selection(
|
|
1288
|
+
interface_order: tuple[str, str],
|
|
1289
|
+
resource_type: str,
|
|
1290
|
+
ref: str,
|
|
1291
|
+
matches: list[Any],
|
|
1292
|
+
) -> Any | None:
|
|
1293
|
+
"""Try interface selection in order, return result or None if all failed."""
|
|
1294
|
+
interface_handlers = {
|
|
1295
|
+
"fuzzy": _try_fuzzy_selection,
|
|
1296
|
+
"questionary": _try_questionary_selection,
|
|
1297
|
+
}
|
|
1298
|
+
|
|
1299
|
+
for interface in interface_order:
|
|
1300
|
+
handler = interface_handlers.get(interface)
|
|
1301
|
+
if handler:
|
|
1302
|
+
result = handler(resource_type, ref, matches)
|
|
1303
|
+
if result:
|
|
1304
|
+
return result
|
|
1305
|
+
|
|
1306
|
+
return None
|
|
1307
|
+
|
|
1308
|
+
|
|
1130
1309
|
def handle_ambiguous_resource(
|
|
1131
|
-
ctx: Any,
|
|
1310
|
+
ctx: Any,
|
|
1311
|
+
resource_type: str,
|
|
1312
|
+
ref: str,
|
|
1313
|
+
matches: list[Any],
|
|
1314
|
+
*,
|
|
1315
|
+
interface_preference: str = "questionary",
|
|
1132
1316
|
) -> Any:
|
|
1133
1317
|
"""Handle multiple resource matches gracefully."""
|
|
1134
1318
|
if _get_view(ctx) == "json":
|
|
1135
1319
|
return _handle_json_view_ambiguity(matches)
|
|
1136
1320
|
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
else:
|
|
1147
|
-
# Re-raise cancellation exceptions
|
|
1148
|
-
raise
|
|
1321
|
+
preference = _normalize_interface_preference(interface_preference)
|
|
1322
|
+
interface_order = _get_interface_order(preference)
|
|
1323
|
+
|
|
1324
|
+
result = _try_interface_selection(interface_order, resource_type, ref, matches)
|
|
1325
|
+
|
|
1326
|
+
if result is not None:
|
|
1327
|
+
return result
|
|
1328
|
+
|
|
1329
|
+
return _handle_fallback_numeric_ambiguity(resource_type, ref, matches)
|
glaip_sdk/client/agents.py
CHANGED
|
@@ -928,6 +928,8 @@ class AgentClient(BaseClient):
|
|
|
928
928
|
started_monotonic: float | None = None
|
|
929
929
|
finished_monotonic: float | None = None
|
|
930
930
|
|
|
931
|
+
timeout_seconds = compute_timeout_seconds(kwargs)
|
|
932
|
+
|
|
931
933
|
try:
|
|
932
934
|
response = self.http_client.stream(
|
|
933
935
|
"POST",
|
|
@@ -936,12 +938,12 @@ class AgentClient(BaseClient):
|
|
|
936
938
|
data=data_payload,
|
|
937
939
|
files=files_payload,
|
|
938
940
|
headers=headers,
|
|
941
|
+
timeout=timeout_seconds,
|
|
939
942
|
)
|
|
940
943
|
|
|
941
944
|
with response as stream_response:
|
|
942
945
|
stream_response.raise_for_status()
|
|
943
946
|
|
|
944
|
-
timeout_seconds = compute_timeout_seconds(kwargs)
|
|
945
947
|
agent_name = kwargs.get("agent_name")
|
|
946
948
|
|
|
947
949
|
(
|