shotgun-sh 0.2.3.dev2__py3-none-any.whl → 0.2.11.dev5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of shotgun-sh might be problematic. Click here for more details.
- shotgun/agents/agent_manager.py +664 -75
- shotgun/agents/common.py +76 -70
- shotgun/agents/config/constants.py +0 -6
- shotgun/agents/config/manager.py +78 -36
- shotgun/agents/config/models.py +41 -1
- shotgun/agents/config/provider.py +70 -15
- shotgun/agents/context_analyzer/__init__.py +28 -0
- shotgun/agents/context_analyzer/analyzer.py +471 -0
- shotgun/agents/context_analyzer/constants.py +9 -0
- shotgun/agents/context_analyzer/formatter.py +115 -0
- shotgun/agents/context_analyzer/models.py +212 -0
- shotgun/agents/conversation_history.py +125 -2
- shotgun/agents/conversation_manager.py +57 -19
- shotgun/agents/export.py +6 -7
- shotgun/agents/history/compaction.py +9 -4
- shotgun/agents/history/context_extraction.py +93 -6
- shotgun/agents/history/history_processors.py +14 -2
- shotgun/agents/history/token_counting/anthropic.py +49 -11
- shotgun/agents/history/token_counting/base.py +14 -3
- shotgun/agents/history/token_counting/openai.py +8 -0
- shotgun/agents/history/token_counting/sentencepiece_counter.py +8 -0
- shotgun/agents/history/token_counting/tokenizer_cache.py +3 -1
- shotgun/agents/history/token_counting/utils.py +0 -3
- shotgun/agents/models.py +50 -2
- shotgun/agents/plan.py +6 -7
- shotgun/agents/research.py +7 -8
- shotgun/agents/specify.py +6 -7
- shotgun/agents/tasks.py +6 -7
- shotgun/agents/tools/__init__.py +0 -2
- shotgun/agents/tools/codebase/codebase_shell.py +6 -0
- shotgun/agents/tools/codebase/directory_lister.py +6 -0
- shotgun/agents/tools/codebase/file_read.py +11 -2
- shotgun/agents/tools/codebase/query_graph.py +6 -0
- shotgun/agents/tools/codebase/retrieve_code.py +6 -0
- shotgun/agents/tools/file_management.py +82 -16
- shotgun/agents/tools/registry.py +217 -0
- shotgun/agents/tools/web_search/__init__.py +30 -18
- shotgun/agents/tools/web_search/anthropic.py +26 -5
- shotgun/agents/tools/web_search/gemini.py +23 -11
- shotgun/agents/tools/web_search/openai.py +22 -13
- shotgun/agents/tools/web_search/utils.py +2 -2
- shotgun/agents/usage_manager.py +16 -11
- shotgun/api_endpoints.py +7 -3
- shotgun/build_constants.py +1 -1
- shotgun/cli/clear.py +53 -0
- shotgun/cli/compact.py +186 -0
- shotgun/cli/config.py +8 -5
- shotgun/cli/context.py +111 -0
- shotgun/cli/export.py +1 -1
- shotgun/cli/feedback.py +4 -2
- shotgun/cli/models.py +1 -0
- shotgun/cli/plan.py +1 -1
- shotgun/cli/research.py +1 -1
- shotgun/cli/specify.py +1 -1
- shotgun/cli/tasks.py +1 -1
- shotgun/cli/update.py +16 -2
- shotgun/codebase/core/change_detector.py +5 -3
- shotgun/codebase/core/code_retrieval.py +4 -2
- shotgun/codebase/core/ingestor.py +10 -8
- shotgun/codebase/core/manager.py +13 -4
- shotgun/codebase/core/nl_query.py +1 -1
- shotgun/llm_proxy/__init__.py +5 -2
- shotgun/llm_proxy/clients.py +12 -7
- shotgun/logging_config.py +18 -27
- shotgun/main.py +73 -11
- shotgun/posthog_telemetry.py +23 -7
- shotgun/prompts/agents/export.j2 +18 -1
- shotgun/prompts/agents/partials/common_agent_system_prompt.j2 +5 -1
- shotgun/prompts/agents/partials/interactive_mode.j2 +24 -7
- shotgun/prompts/agents/plan.j2 +1 -1
- shotgun/prompts/agents/research.j2 +1 -1
- shotgun/prompts/agents/specify.j2 +270 -3
- shotgun/prompts/agents/state/system_state.j2 +4 -0
- shotgun/prompts/agents/tasks.j2 +1 -1
- shotgun/prompts/loader.py +2 -2
- shotgun/prompts/tools/web_search.j2 +14 -0
- shotgun/sentry_telemetry.py +7 -16
- shotgun/settings.py +238 -0
- shotgun/telemetry.py +18 -33
- shotgun/tui/app.py +243 -43
- shotgun/tui/commands/__init__.py +1 -1
- shotgun/tui/components/context_indicator.py +179 -0
- shotgun/tui/components/mode_indicator.py +70 -0
- shotgun/tui/components/status_bar.py +48 -0
- shotgun/tui/containers.py +91 -0
- shotgun/tui/dependencies.py +39 -0
- shotgun/tui/protocols.py +45 -0
- shotgun/tui/screens/chat/__init__.py +5 -0
- shotgun/tui/screens/chat/chat.tcss +54 -0
- shotgun/tui/screens/chat/chat_screen.py +1202 -0
- shotgun/tui/screens/chat/codebase_index_prompt_screen.py +64 -0
- shotgun/tui/screens/chat/codebase_index_selection.py +12 -0
- shotgun/tui/screens/chat/help_text.py +40 -0
- shotgun/tui/screens/chat/prompt_history.py +48 -0
- shotgun/tui/screens/chat.tcss +11 -0
- shotgun/tui/screens/chat_screen/command_providers.py +78 -2
- shotgun/tui/screens/chat_screen/history/__init__.py +22 -0
- shotgun/tui/screens/chat_screen/history/agent_response.py +66 -0
- shotgun/tui/screens/chat_screen/history/chat_history.py +116 -0
- shotgun/tui/screens/chat_screen/history/formatters.py +115 -0
- shotgun/tui/screens/chat_screen/history/partial_response.py +43 -0
- shotgun/tui/screens/chat_screen/history/user_question.py +42 -0
- shotgun/tui/screens/confirmation_dialog.py +151 -0
- shotgun/tui/screens/feedback.py +4 -4
- shotgun/tui/screens/github_issue.py +102 -0
- shotgun/tui/screens/model_picker.py +49 -24
- shotgun/tui/screens/onboarding.py +431 -0
- shotgun/tui/screens/pipx_migration.py +153 -0
- shotgun/tui/screens/provider_config.py +50 -27
- shotgun/tui/screens/shotgun_auth.py +2 -2
- shotgun/tui/screens/welcome.py +32 -10
- shotgun/tui/services/__init__.py +5 -0
- shotgun/tui/services/conversation_service.py +184 -0
- shotgun/tui/state/__init__.py +7 -0
- shotgun/tui/state/processing_state.py +185 -0
- shotgun/tui/utils/mode_progress.py +14 -7
- shotgun/tui/widgets/__init__.py +5 -0
- shotgun/tui/widgets/widget_coordinator.py +262 -0
- shotgun/utils/datetime_utils.py +77 -0
- shotgun/utils/file_system_utils.py +22 -2
- shotgun/utils/marketing.py +110 -0
- shotgun/utils/update_checker.py +69 -14
- shotgun_sh-0.2.11.dev5.dist-info/METADATA +130 -0
- shotgun_sh-0.2.11.dev5.dist-info/RECORD +193 -0
- {shotgun_sh-0.2.3.dev2.dist-info → shotgun_sh-0.2.11.dev5.dist-info}/entry_points.txt +1 -0
- {shotgun_sh-0.2.3.dev2.dist-info → shotgun_sh-0.2.11.dev5.dist-info}/licenses/LICENSE +1 -1
- shotgun/agents/tools/user_interaction.py +0 -37
- shotgun/tui/screens/chat.py +0 -804
- shotgun/tui/screens/chat_screen/history.py +0 -352
- shotgun_sh-0.2.3.dev2.dist-info/METADATA +0 -467
- shotgun_sh-0.2.3.dev2.dist-info/RECORD +0 -154
- {shotgun_sh-0.2.3.dev2.dist-info → shotgun_sh-0.2.11.dev5.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""Modal dialog for codebase indexing prompts."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from textual import on
|
|
6
|
+
from textual.app import ComposeResult
|
|
7
|
+
from textual.containers import Container
|
|
8
|
+
from textual.screen import ModalScreen
|
|
9
|
+
from textual.widgets import Button, Label, Static
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CodebaseIndexPromptScreen(ModalScreen[bool]):
|
|
13
|
+
"""Modal dialog asking whether to index the detected codebase."""
|
|
14
|
+
|
|
15
|
+
DEFAULT_CSS = """
|
|
16
|
+
CodebaseIndexPromptScreen {
|
|
17
|
+
align: center middle;
|
|
18
|
+
background: rgba(0, 0, 0, 0.0);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
CodebaseIndexPromptScreen > #index-prompt-dialog {
|
|
22
|
+
width: 60%;
|
|
23
|
+
max-width: 60;
|
|
24
|
+
height: auto;
|
|
25
|
+
border: wide $primary;
|
|
26
|
+
padding: 1 2;
|
|
27
|
+
layout: vertical;
|
|
28
|
+
background: $surface;
|
|
29
|
+
height: auto;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
#index-prompt-buttons {
|
|
33
|
+
layout: horizontal;
|
|
34
|
+
align-horizontal: right;
|
|
35
|
+
height: auto;
|
|
36
|
+
}
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def compose(self) -> ComposeResult:
|
|
40
|
+
with Container(id="index-prompt-dialog"):
|
|
41
|
+
yield Label("Index this codebase?", id="index-prompt-title")
|
|
42
|
+
yield Static(
|
|
43
|
+
f"Would you like to index the codebase at:\n{Path.cwd()}\n\n"
|
|
44
|
+
"This is required for the agent to understand your code and answer "
|
|
45
|
+
"questions about it. Without indexing, the agent cannot analyze "
|
|
46
|
+
"your codebase."
|
|
47
|
+
)
|
|
48
|
+
with Container(id="index-prompt-buttons"):
|
|
49
|
+
yield Button(
|
|
50
|
+
"Index now",
|
|
51
|
+
id="index-prompt-confirm",
|
|
52
|
+
variant="primary",
|
|
53
|
+
)
|
|
54
|
+
yield Button("Not now", id="index-prompt-cancel")
|
|
55
|
+
|
|
56
|
+
@on(Button.Pressed, "#index-prompt-cancel")
|
|
57
|
+
def handle_cancel(self, event: Button.Pressed) -> None:
|
|
58
|
+
event.stop()
|
|
59
|
+
self.dismiss(False)
|
|
60
|
+
|
|
61
|
+
@on(Button.Pressed, "#index-prompt-confirm")
|
|
62
|
+
def handle_confirm(self, event: Button.Pressed) -> None:
|
|
63
|
+
event.stop()
|
|
64
|
+
self.dismiss(True)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""Helper functions for chat screen help text."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def help_text_with_codebase(already_indexed: bool = False) -> str:
|
|
5
|
+
"""Generate help text for when a codebase is available.
|
|
6
|
+
|
|
7
|
+
Args:
|
|
8
|
+
already_indexed: Whether the codebase is already indexed.
|
|
9
|
+
|
|
10
|
+
Returns:
|
|
11
|
+
Formatted help text string.
|
|
12
|
+
"""
|
|
13
|
+
return (
|
|
14
|
+
"Howdy! Welcome to Shotgun - Spec Driven Development for Developers and AI Agents.\n\n"
|
|
15
|
+
"Shotgun writes codebase-aware specs for your AI coding agents so they don't derail.\n\n"
|
|
16
|
+
f"{'It' if already_indexed else 'Once your codebase is indexed, it'} can help you:\n"
|
|
17
|
+
"- Research your codebase and spec out new features\n"
|
|
18
|
+
"- Create implementation plans that fit your architecture\n"
|
|
19
|
+
"- Generate AGENTS.md files for AI coding agents\n"
|
|
20
|
+
"- Onboard to existing projects or plan refactors\n\n"
|
|
21
|
+
"Ready to build something? Let's go.\n"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def help_text_empty_dir() -> str:
|
|
26
|
+
"""Generate help text for empty directory.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Formatted help text string.
|
|
30
|
+
"""
|
|
31
|
+
return (
|
|
32
|
+
"Howdy! Welcome to Shotgun - Spec Driven Development for Developers and AI Agents.\n\n"
|
|
33
|
+
"Shotgun writes codebase-aware specs for your AI coding agents so they don't derail.\n\n"
|
|
34
|
+
"It can help you:\n"
|
|
35
|
+
"- Research your codebase and spec out new features\n"
|
|
36
|
+
"- Create implementation plans that fit your architecture\n"
|
|
37
|
+
"- Generate AGENTS.md files for AI coding agents\n"
|
|
38
|
+
"- Onboard to existing projects or plan refactors\n\n"
|
|
39
|
+
"Ready to build something? Let's go.\n"
|
|
40
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Prompt history management for chat screen."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class PromptHistory(BaseModel):
|
|
7
|
+
"""Manages prompt history for navigation in chat input."""
|
|
8
|
+
|
|
9
|
+
prompts: list[str] = Field(default_factory=lambda: ["Hello there!"])
|
|
10
|
+
curr: int | None = None
|
|
11
|
+
|
|
12
|
+
def next(self) -> str:
|
|
13
|
+
"""Navigate to next prompt in history.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
The next prompt in history.
|
|
17
|
+
"""
|
|
18
|
+
if self.curr is None:
|
|
19
|
+
self.curr = -1
|
|
20
|
+
else:
|
|
21
|
+
self.curr = -1
|
|
22
|
+
return self.prompts[self.curr]
|
|
23
|
+
|
|
24
|
+
def prev(self) -> str:
|
|
25
|
+
"""Navigate to previous prompt in history.
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
The previous prompt in history.
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
Exception: If current entry is None.
|
|
32
|
+
"""
|
|
33
|
+
if self.curr is None:
|
|
34
|
+
raise Exception("current entry is none")
|
|
35
|
+
if self.curr == -1:
|
|
36
|
+
self.curr = None
|
|
37
|
+
return ""
|
|
38
|
+
self.curr += 1
|
|
39
|
+
return ""
|
|
40
|
+
|
|
41
|
+
def append(self, text: str) -> None:
|
|
42
|
+
"""Add a new prompt to history.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
text: The prompt text to add.
|
|
46
|
+
"""
|
|
47
|
+
self.prompts.append(text)
|
|
48
|
+
self.curr = None
|
shotgun/tui/screens/chat.tcss
CHANGED
|
@@ -130,6 +130,38 @@ class UsageProvider(Provider):
|
|
|
130
130
|
)
|
|
131
131
|
|
|
132
132
|
|
|
133
|
+
class ContextProvider(Provider):
|
|
134
|
+
"""Command provider for showing conversation context analysis."""
|
|
135
|
+
|
|
136
|
+
@property
|
|
137
|
+
def chat_screen(self) -> "ChatScreen":
|
|
138
|
+
from shotgun.tui.screens.chat import ChatScreen
|
|
139
|
+
|
|
140
|
+
return cast(ChatScreen, self.screen)
|
|
141
|
+
|
|
142
|
+
async def discover(self) -> AsyncGenerator[DiscoveryHit, None]:
|
|
143
|
+
"""Provide context command when palette opens."""
|
|
144
|
+
yield DiscoveryHit(
|
|
145
|
+
"Show context",
|
|
146
|
+
self.chat_screen.action_show_context,
|
|
147
|
+
help="Display conversation context composition and statistics",
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
async def search(self, query: str) -> AsyncGenerator[Hit, None]:
|
|
151
|
+
"""Search for context command."""
|
|
152
|
+
matcher = self.matcher(query)
|
|
153
|
+
|
|
154
|
+
async for discovery_hit in self.discover():
|
|
155
|
+
score = matcher.match(discovery_hit.text or "")
|
|
156
|
+
if score > 0:
|
|
157
|
+
yield Hit(
|
|
158
|
+
score,
|
|
159
|
+
matcher.highlight(discovery_hit.text or ""),
|
|
160
|
+
discovery_hit.command,
|
|
161
|
+
help=discovery_hit.help,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
|
|
133
165
|
class ProviderSetupProvider(Provider):
|
|
134
166
|
"""Command palette entries for provider configuration."""
|
|
135
167
|
|
|
@@ -145,7 +177,9 @@ class ProviderSetupProvider(Provider):
|
|
|
145
177
|
|
|
146
178
|
def open_model_picker(self) -> None:
|
|
147
179
|
"""Show the model picker screen."""
|
|
148
|
-
self.chat_screen.app.push_screen(
|
|
180
|
+
self.chat_screen.app.push_screen(
|
|
181
|
+
ModelPickerScreen(), callback=self.chat_screen.handle_model_selected
|
|
182
|
+
)
|
|
149
183
|
|
|
150
184
|
async def discover(self) -> AsyncGenerator[DiscoveryHit, None]:
|
|
151
185
|
yield DiscoveryHit(
|
|
@@ -288,11 +322,18 @@ class UnifiedCommandProvider(Provider):
|
|
|
288
322
|
|
|
289
323
|
def open_model_picker(self) -> None:
|
|
290
324
|
"""Show the model picker screen."""
|
|
291
|
-
self.chat_screen.app.push_screen(
|
|
325
|
+
self.chat_screen.app.push_screen(
|
|
326
|
+
ModelPickerScreen(), callback=self.chat_screen.handle_model_selected
|
|
327
|
+
)
|
|
292
328
|
|
|
293
329
|
async def discover(self) -> AsyncGenerator[DiscoveryHit, None]:
|
|
294
330
|
"""Provide commands in alphabetical order when palette opens."""
|
|
295
331
|
# Alphabetically ordered commands
|
|
332
|
+
yield DiscoveryHit(
|
|
333
|
+
"Clear Conversation",
|
|
334
|
+
self.chat_screen.action_clear_conversation,
|
|
335
|
+
help="Clear the entire conversation history",
|
|
336
|
+
)
|
|
296
337
|
yield DiscoveryHit(
|
|
297
338
|
"Codebase: Delete Codebase Index",
|
|
298
339
|
self.chat_screen.delete_codebase_command,
|
|
@@ -303,6 +344,11 @@ class UnifiedCommandProvider(Provider):
|
|
|
303
344
|
self.chat_screen.index_codebase_command,
|
|
304
345
|
help="Index a repository into the codebase graph",
|
|
305
346
|
)
|
|
347
|
+
yield DiscoveryHit(
|
|
348
|
+
"Compact Conversation",
|
|
349
|
+
self.chat_screen.action_compact_conversation,
|
|
350
|
+
help="Reduce conversation size by compacting message history",
|
|
351
|
+
)
|
|
306
352
|
yield DiscoveryHit(
|
|
307
353
|
"Open Provider Setup",
|
|
308
354
|
self.open_provider_config,
|
|
@@ -313,11 +359,21 @@ class UnifiedCommandProvider(Provider):
|
|
|
313
359
|
self.open_model_picker,
|
|
314
360
|
help="🤖 Choose which AI model to use",
|
|
315
361
|
)
|
|
362
|
+
yield DiscoveryHit(
|
|
363
|
+
"Show context",
|
|
364
|
+
self.chat_screen.action_show_context,
|
|
365
|
+
help="Display conversation context composition and statistics",
|
|
366
|
+
)
|
|
316
367
|
yield DiscoveryHit(
|
|
317
368
|
"Show usage",
|
|
318
369
|
self.chat_screen.action_show_usage,
|
|
319
370
|
help="Display usage information for the current session",
|
|
320
371
|
)
|
|
372
|
+
yield DiscoveryHit(
|
|
373
|
+
"View Onboarding",
|
|
374
|
+
self.chat_screen.action_view_onboarding,
|
|
375
|
+
help="View the onboarding tutorial and helpful resources",
|
|
376
|
+
)
|
|
321
377
|
|
|
322
378
|
async def search(self, query: str) -> AsyncGenerator[Hit, None]:
|
|
323
379
|
"""Search for commands in alphabetical order."""
|
|
@@ -325,6 +381,11 @@ class UnifiedCommandProvider(Provider):
|
|
|
325
381
|
|
|
326
382
|
# Define all commands in alphabetical order
|
|
327
383
|
commands = [
|
|
384
|
+
(
|
|
385
|
+
"Clear Conversation",
|
|
386
|
+
self.chat_screen.action_clear_conversation,
|
|
387
|
+
"Clear the entire conversation history",
|
|
388
|
+
),
|
|
328
389
|
(
|
|
329
390
|
"Codebase: Delete Codebase Index",
|
|
330
391
|
self.chat_screen.delete_codebase_command,
|
|
@@ -335,6 +396,11 @@ class UnifiedCommandProvider(Provider):
|
|
|
335
396
|
self.chat_screen.index_codebase_command,
|
|
336
397
|
"Index a repository into the codebase graph",
|
|
337
398
|
),
|
|
399
|
+
(
|
|
400
|
+
"Compact Conversation",
|
|
401
|
+
self.chat_screen.action_compact_conversation,
|
|
402
|
+
"Reduce conversation size by compacting message history",
|
|
403
|
+
),
|
|
338
404
|
(
|
|
339
405
|
"Open Provider Setup",
|
|
340
406
|
self.open_provider_config,
|
|
@@ -345,11 +411,21 @@ class UnifiedCommandProvider(Provider):
|
|
|
345
411
|
self.open_model_picker,
|
|
346
412
|
"🤖 Choose which AI model to use",
|
|
347
413
|
),
|
|
414
|
+
(
|
|
415
|
+
"Show context",
|
|
416
|
+
self.chat_screen.action_show_context,
|
|
417
|
+
"Display conversation context composition and statistics",
|
|
418
|
+
),
|
|
348
419
|
(
|
|
349
420
|
"Show usage",
|
|
350
421
|
self.chat_screen.action_show_usage,
|
|
351
422
|
"Display usage information for the current session",
|
|
352
423
|
),
|
|
424
|
+
(
|
|
425
|
+
"View Onboarding",
|
|
426
|
+
self.chat_screen.action_view_onboarding,
|
|
427
|
+
"View the onboarding tutorial and helpful resources",
|
|
428
|
+
),
|
|
353
429
|
]
|
|
354
430
|
|
|
355
431
|
for title, callback, help_text in commands:
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Chat history package - displays conversation messages in the TUI.
|
|
2
|
+
|
|
3
|
+
This package provides widgets for displaying chat history including:
|
|
4
|
+
- User questions
|
|
5
|
+
- Agent responses
|
|
6
|
+
- Tool calls
|
|
7
|
+
- Streaming/partial responses
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from .agent_response import AgentResponseWidget
|
|
11
|
+
from .chat_history import ChatHistory
|
|
12
|
+
from .formatters import ToolFormatter
|
|
13
|
+
from .partial_response import PartialResponseWidget
|
|
14
|
+
from .user_question import UserQuestionWidget
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"ChatHistory",
|
|
18
|
+
"PartialResponseWidget",
|
|
19
|
+
"AgentResponseWidget",
|
|
20
|
+
"UserQuestionWidget",
|
|
21
|
+
"ToolFormatter",
|
|
22
|
+
]
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Agent response widget for chat history."""
|
|
2
|
+
|
|
3
|
+
from pydantic_ai.messages import (
|
|
4
|
+
BuiltinToolCallPart,
|
|
5
|
+
BuiltinToolReturnPart,
|
|
6
|
+
ModelResponse,
|
|
7
|
+
TextPart,
|
|
8
|
+
ThinkingPart,
|
|
9
|
+
ToolCallPart,
|
|
10
|
+
)
|
|
11
|
+
from textual.app import ComposeResult
|
|
12
|
+
from textual.widget import Widget
|
|
13
|
+
from textual.widgets import Markdown
|
|
14
|
+
|
|
15
|
+
from .formatters import ToolFormatter
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AgentResponseWidget(Widget):
|
|
19
|
+
"""Widget that displays agent responses in the chat history."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, item: ModelResponse | None) -> None:
|
|
22
|
+
super().__init__()
|
|
23
|
+
self.item = item
|
|
24
|
+
|
|
25
|
+
def compose(self) -> ComposeResult:
|
|
26
|
+
self.display = self.item is not None
|
|
27
|
+
if self.item is None:
|
|
28
|
+
yield Markdown(markdown="")
|
|
29
|
+
else:
|
|
30
|
+
yield Markdown(markdown=self.compute_output())
|
|
31
|
+
|
|
32
|
+
def compute_output(self) -> str:
|
|
33
|
+
"""Compute the markdown output for the agent response."""
|
|
34
|
+
acc = ""
|
|
35
|
+
if self.item is None:
|
|
36
|
+
return ""
|
|
37
|
+
|
|
38
|
+
for idx, part in enumerate(self.item.parts):
|
|
39
|
+
if isinstance(part, TextPart):
|
|
40
|
+
# Only show the circle prefix if there's actual content
|
|
41
|
+
if part.content and part.content.strip():
|
|
42
|
+
acc += f"**⏺** {part.content}\n\n"
|
|
43
|
+
elif isinstance(part, ToolCallPart):
|
|
44
|
+
parts_str = ToolFormatter.format_tool_call_part(part)
|
|
45
|
+
if parts_str: # Only add if there's actual content
|
|
46
|
+
acc += parts_str + "\n\n"
|
|
47
|
+
elif isinstance(part, BuiltinToolCallPart):
|
|
48
|
+
# Format builtin tool calls using registry
|
|
49
|
+
formatted = ToolFormatter.format_builtin_tool_call(part)
|
|
50
|
+
if formatted: # Only add if not hidden
|
|
51
|
+
acc += formatted + "\n\n"
|
|
52
|
+
elif isinstance(part, BuiltinToolReturnPart):
|
|
53
|
+
# Don't show tool return parts in the UI
|
|
54
|
+
pass
|
|
55
|
+
elif isinstance(part, ThinkingPart):
|
|
56
|
+
if (
|
|
57
|
+
idx == len(self.item.parts) - 1
|
|
58
|
+
): # show the thinking part only if it's the last part
|
|
59
|
+
acc += (
|
|
60
|
+
f"thinking: {part.content}\n\n"
|
|
61
|
+
if part.content
|
|
62
|
+
else "Thinking..."
|
|
63
|
+
)
|
|
64
|
+
else:
|
|
65
|
+
continue
|
|
66
|
+
return acc.strip()
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""Chat history widget - main container for message display."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Generator, Sequence
|
|
4
|
+
|
|
5
|
+
from pydantic_ai.messages import (
|
|
6
|
+
ModelMessage,
|
|
7
|
+
ModelRequest,
|
|
8
|
+
ModelResponse,
|
|
9
|
+
UserPromptPart,
|
|
10
|
+
)
|
|
11
|
+
from textual.app import ComposeResult
|
|
12
|
+
from textual.reactive import reactive
|
|
13
|
+
from textual.widget import Widget
|
|
14
|
+
|
|
15
|
+
from shotgun.tui.components.vertical_tail import VerticalTail
|
|
16
|
+
from shotgun.tui.screens.chat_screen.hint_message import HintMessage, HintMessageWidget
|
|
17
|
+
|
|
18
|
+
from .agent_response import AgentResponseWidget
|
|
19
|
+
from .partial_response import PartialResponseWidget
|
|
20
|
+
from .user_question import UserQuestionWidget
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ChatHistory(Widget):
|
|
24
|
+
"""Main widget for displaying chat message history."""
|
|
25
|
+
|
|
26
|
+
DEFAULT_CSS = """
|
|
27
|
+
VerticalTail {
|
|
28
|
+
align: left bottom;
|
|
29
|
+
|
|
30
|
+
}
|
|
31
|
+
VerticalTail > * {
|
|
32
|
+
height: auto;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
Horizontal {
|
|
36
|
+
height: auto;
|
|
37
|
+
background: $secondary-muted;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
Markdown {
|
|
41
|
+
height: auto;
|
|
42
|
+
}
|
|
43
|
+
"""
|
|
44
|
+
partial_response: reactive[ModelMessage | None] = reactive(None)
|
|
45
|
+
|
|
46
|
+
def __init__(self) -> None:
|
|
47
|
+
super().__init__()
|
|
48
|
+
self.items: Sequence[ModelMessage | HintMessage] = []
|
|
49
|
+
self.vertical_tail: VerticalTail | None = None
|
|
50
|
+
self.partial_response = None
|
|
51
|
+
self._rendered_count = 0 # Track how many messages have been mounted
|
|
52
|
+
|
|
53
|
+
def compose(self) -> ComposeResult:
|
|
54
|
+
"""Compose the chat history widget."""
|
|
55
|
+
self.vertical_tail = VerticalTail()
|
|
56
|
+
|
|
57
|
+
filtered = list(self.filtered_items())
|
|
58
|
+
with self.vertical_tail:
|
|
59
|
+
for item in filtered:
|
|
60
|
+
if isinstance(item, ModelRequest):
|
|
61
|
+
yield UserQuestionWidget(item)
|
|
62
|
+
elif isinstance(item, HintMessage):
|
|
63
|
+
yield HintMessageWidget(item)
|
|
64
|
+
elif isinstance(item, ModelResponse):
|
|
65
|
+
yield AgentResponseWidget(item)
|
|
66
|
+
yield PartialResponseWidget(self.partial_response).data_bind(
|
|
67
|
+
item=ChatHistory.partial_response
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# Track how many messages were rendered during initial compose
|
|
71
|
+
self._rendered_count = len(filtered)
|
|
72
|
+
|
|
73
|
+
def filtered_items(self) -> Generator[ModelMessage | HintMessage, None, None]:
|
|
74
|
+
"""Filter and yield items for display."""
|
|
75
|
+
for item in self.items:
|
|
76
|
+
# Skip ModelRequest messages that only contain ToolReturnPart
|
|
77
|
+
# (these are internal tool results, not user prompts)
|
|
78
|
+
if isinstance(item, ModelRequest):
|
|
79
|
+
has_user_content = any(
|
|
80
|
+
isinstance(part, UserPromptPart) for part in item.parts
|
|
81
|
+
)
|
|
82
|
+
if not has_user_content:
|
|
83
|
+
# This is just a tool return, skip displaying it
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
yield item
|
|
87
|
+
|
|
88
|
+
def update_messages(self, messages: list[ModelMessage | HintMessage]) -> None:
|
|
89
|
+
"""Update the displayed messages using incremental mounting."""
|
|
90
|
+
if not self.vertical_tail:
|
|
91
|
+
return
|
|
92
|
+
|
|
93
|
+
self.items = messages
|
|
94
|
+
filtered = list(self.filtered_items())
|
|
95
|
+
|
|
96
|
+
# Only mount new messages that haven't been rendered yet
|
|
97
|
+
if len(filtered) > self._rendered_count:
|
|
98
|
+
new_messages = filtered[self._rendered_count :]
|
|
99
|
+
for item in new_messages:
|
|
100
|
+
widget: Widget
|
|
101
|
+
if isinstance(item, ModelRequest):
|
|
102
|
+
widget = UserQuestionWidget(item)
|
|
103
|
+
elif isinstance(item, HintMessage):
|
|
104
|
+
widget = HintMessageWidget(item)
|
|
105
|
+
elif isinstance(item, ModelResponse):
|
|
106
|
+
widget = AgentResponseWidget(item)
|
|
107
|
+
else:
|
|
108
|
+
continue
|
|
109
|
+
|
|
110
|
+
# Mount before the PartialResponseWidget
|
|
111
|
+
self.vertical_tail.mount(widget, before=self.vertical_tail.children[-1])
|
|
112
|
+
|
|
113
|
+
self._rendered_count = len(filtered)
|
|
114
|
+
|
|
115
|
+
# Scroll to bottom to show newly added messages
|
|
116
|
+
self.vertical_tail.scroll_end(animate=False)
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"""Tool formatting utilities for chat history display."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from pydantic_ai.messages import BuiltinToolCallPart, ToolCallPart
|
|
6
|
+
|
|
7
|
+
from shotgun.agents.tools.registry import get_tool_display_config
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ToolFormatter:
|
|
11
|
+
"""Formats tool calls for display in the TUI."""
|
|
12
|
+
|
|
13
|
+
@staticmethod
|
|
14
|
+
def truncate(text: str, max_length: int = 100) -> str:
|
|
15
|
+
"""Truncate text to max_length characters, adding ellipsis if needed."""
|
|
16
|
+
if len(text) <= max_length:
|
|
17
|
+
return text
|
|
18
|
+
return text[: max_length - 3] + "..."
|
|
19
|
+
|
|
20
|
+
@staticmethod
|
|
21
|
+
def parse_args(args: dict[str, object] | str | None) -> dict[str, object]:
|
|
22
|
+
"""Parse tool call arguments, handling both dict and JSON string formats."""
|
|
23
|
+
if args is None:
|
|
24
|
+
return {}
|
|
25
|
+
if isinstance(args, str):
|
|
26
|
+
try:
|
|
27
|
+
return json.loads(args) if args.strip() else {}
|
|
28
|
+
except json.JSONDecodeError:
|
|
29
|
+
return {}
|
|
30
|
+
return args if isinstance(args, dict) else {}
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def format_tool_call_part(cls, part: ToolCallPart) -> str:
|
|
34
|
+
"""Format a tool call part using the tool display registry."""
|
|
35
|
+
# Look up the display config for this tool
|
|
36
|
+
display_config = get_tool_display_config(part.tool_name)
|
|
37
|
+
|
|
38
|
+
if display_config:
|
|
39
|
+
# Tool is registered - use its display config
|
|
40
|
+
if display_config.hide:
|
|
41
|
+
return ""
|
|
42
|
+
|
|
43
|
+
# Parse args
|
|
44
|
+
args = cls.parse_args(part.args)
|
|
45
|
+
|
|
46
|
+
# Get the key argument value
|
|
47
|
+
if args and isinstance(args, dict) and display_config.key_arg in args:
|
|
48
|
+
# Special handling for codebase_shell which needs command + args
|
|
49
|
+
if part.tool_name == "codebase_shell" and "command" in args:
|
|
50
|
+
command = args.get("command", "")
|
|
51
|
+
cmd_args = args.get("args", [])
|
|
52
|
+
if isinstance(cmd_args, list):
|
|
53
|
+
args_str = " ".join(str(arg) for arg in cmd_args)
|
|
54
|
+
else:
|
|
55
|
+
args_str = ""
|
|
56
|
+
key_value = f"{command} {args_str}".strip()
|
|
57
|
+
else:
|
|
58
|
+
key_value = str(args[display_config.key_arg])
|
|
59
|
+
|
|
60
|
+
# Format: "display_text: key_value"
|
|
61
|
+
return f"{display_config.display_text}: {cls.truncate(key_value)}"
|
|
62
|
+
else:
|
|
63
|
+
# No key arg value available - show just display_text
|
|
64
|
+
return display_config.display_text
|
|
65
|
+
|
|
66
|
+
# Tool not registered - use fallback formatting
|
|
67
|
+
args = cls.parse_args(part.args)
|
|
68
|
+
if args and isinstance(args, dict):
|
|
69
|
+
# Try to extract common fields
|
|
70
|
+
if "query" in args:
|
|
71
|
+
return f"{part.tool_name}: {cls.truncate(str(args['query']))}"
|
|
72
|
+
elif "question" in args:
|
|
73
|
+
return f"{part.tool_name}: {cls.truncate(str(args['question']))}"
|
|
74
|
+
elif "filename" in args:
|
|
75
|
+
return f"{part.tool_name}: {args['filename']}"
|
|
76
|
+
else:
|
|
77
|
+
# Show tool name with truncated args
|
|
78
|
+
args_str = (
|
|
79
|
+
str(part.args)[:50] + "..."
|
|
80
|
+
if len(str(part.args)) > 50
|
|
81
|
+
else str(part.args)
|
|
82
|
+
)
|
|
83
|
+
return f"{part.tool_name}({args_str})"
|
|
84
|
+
else:
|
|
85
|
+
return f"{part.tool_name}()"
|
|
86
|
+
|
|
87
|
+
@classmethod
|
|
88
|
+
def format_builtin_tool_call(cls, part: BuiltinToolCallPart) -> str:
|
|
89
|
+
"""Format a builtin tool call part using the tool display registry."""
|
|
90
|
+
display_config = get_tool_display_config(part.tool_name or "")
|
|
91
|
+
|
|
92
|
+
if display_config:
|
|
93
|
+
if display_config.hide:
|
|
94
|
+
return ""
|
|
95
|
+
|
|
96
|
+
args = cls.parse_args(part.args)
|
|
97
|
+
# Get the key argument value
|
|
98
|
+
if args and isinstance(args, dict) and display_config.key_arg in args:
|
|
99
|
+
key_value = str(args[display_config.key_arg])
|
|
100
|
+
# Format: "display_text: key_value"
|
|
101
|
+
return f"{display_config.display_text}: {cls.truncate(key_value)}"
|
|
102
|
+
else:
|
|
103
|
+
# No key arg value available - show just display_text
|
|
104
|
+
return display_config.display_text
|
|
105
|
+
else:
|
|
106
|
+
# Fallback for unregistered builtin tools
|
|
107
|
+
if part.args:
|
|
108
|
+
args_str = (
|
|
109
|
+
str(part.args)[:50] + "..."
|
|
110
|
+
if len(str(part.args)) > 50
|
|
111
|
+
else str(part.args)
|
|
112
|
+
)
|
|
113
|
+
return f"{part.tool_name}({args_str})"
|
|
114
|
+
else:
|
|
115
|
+
return f"{part.tool_name}()"
|