gobby 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/claude_code.py +99 -61
  3. gobby/adapters/gemini.py +140 -38
  4. gobby/agents/isolation.py +130 -0
  5. gobby/agents/registry.py +11 -0
  6. gobby/agents/session.py +1 -0
  7. gobby/agents/spawn_executor.py +43 -13
  8. gobby/agents/spawners/macos.py +26 -1
  9. gobby/app_context.py +59 -0
  10. gobby/cli/__init__.py +0 -2
  11. gobby/cli/memory.py +185 -0
  12. gobby/cli/utils.py +5 -17
  13. gobby/clones/git.py +177 -0
  14. gobby/config/features.py +0 -20
  15. gobby/config/skills.py +31 -0
  16. gobby/config/tasks.py +4 -0
  17. gobby/hooks/event_handlers/__init__.py +155 -0
  18. gobby/hooks/event_handlers/_agent.py +175 -0
  19. gobby/hooks/event_handlers/_base.py +87 -0
  20. gobby/hooks/event_handlers/_misc.py +66 -0
  21. gobby/hooks/event_handlers/_session.py +573 -0
  22. gobby/hooks/event_handlers/_tool.py +196 -0
  23. gobby/hooks/hook_manager.py +21 -1
  24. gobby/install/gemini/hooks/hook_dispatcher.py +74 -15
  25. gobby/llm/claude.py +377 -42
  26. gobby/mcp_proxy/importer.py +4 -41
  27. gobby/mcp_proxy/instructions.py +2 -2
  28. gobby/mcp_proxy/manager.py +13 -3
  29. gobby/mcp_proxy/registries.py +35 -4
  30. gobby/mcp_proxy/services/recommendation.py +2 -28
  31. gobby/mcp_proxy/tools/agent_messaging.py +93 -44
  32. gobby/mcp_proxy/tools/agents.py +45 -9
  33. gobby/mcp_proxy/tools/artifacts.py +46 -12
  34. gobby/mcp_proxy/tools/sessions/_commits.py +31 -24
  35. gobby/mcp_proxy/tools/sessions/_crud.py +5 -5
  36. gobby/mcp_proxy/tools/sessions/_handoff.py +45 -41
  37. gobby/mcp_proxy/tools/sessions/_messages.py +35 -7
  38. gobby/mcp_proxy/tools/spawn_agent.py +44 -6
  39. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  40. gobby/mcp_proxy/tools/tasks/_context.py +18 -0
  41. gobby/mcp_proxy/tools/tasks/_crud.py +13 -6
  42. gobby/mcp_proxy/tools/tasks/_lifecycle.py +29 -14
  43. gobby/mcp_proxy/tools/tasks/_session.py +22 -7
  44. gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
  45. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  46. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  47. gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
  48. gobby/mcp_proxy/tools/workflows/_query.py +207 -0
  49. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  50. gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
  51. gobby/mcp_proxy/tools/worktrees.py +32 -7
  52. gobby/memory/components/__init__.py +0 -0
  53. gobby/memory/components/ingestion.py +98 -0
  54. gobby/memory/components/search.py +108 -0
  55. gobby/memory/extractor.py +15 -1
  56. gobby/memory/manager.py +16 -25
  57. gobby/paths.py +51 -0
  58. gobby/prompts/loader.py +1 -35
  59. gobby/runner.py +36 -10
  60. gobby/servers/http.py +186 -149
  61. gobby/servers/routes/admin.py +12 -0
  62. gobby/servers/routes/mcp/endpoints/execution.py +15 -7
  63. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  64. gobby/servers/routes/mcp/hooks.py +50 -3
  65. gobby/servers/websocket.py +57 -1
  66. gobby/sessions/analyzer.py +4 -4
  67. gobby/sessions/manager.py +9 -0
  68. gobby/sessions/transcripts/gemini.py +100 -34
  69. gobby/skills/parser.py +23 -0
  70. gobby/skills/sync.py +5 -4
  71. gobby/storage/artifacts.py +19 -0
  72. gobby/storage/database.py +9 -2
  73. gobby/storage/memories.py +32 -21
  74. gobby/storage/migrations.py +46 -4
  75. gobby/storage/sessions.py +4 -2
  76. gobby/storage/skills.py +87 -7
  77. gobby/tasks/external_validator.py +4 -17
  78. gobby/tasks/validation.py +13 -87
  79. gobby/tools/summarizer.py +18 -51
  80. gobby/utils/status.py +13 -0
  81. gobby/workflows/actions.py +5 -0
  82. gobby/workflows/context_actions.py +21 -24
  83. gobby/workflows/detection_helpers.py +38 -24
  84. gobby/workflows/enforcement/__init__.py +11 -1
  85. gobby/workflows/enforcement/blocking.py +109 -1
  86. gobby/workflows/enforcement/handlers.py +35 -1
  87. gobby/workflows/engine.py +96 -0
  88. gobby/workflows/evaluator.py +110 -0
  89. gobby/workflows/hooks.py +41 -0
  90. gobby/workflows/lifecycle_evaluator.py +2 -1
  91. gobby/workflows/memory_actions.py +11 -0
  92. gobby/workflows/safe_evaluator.py +8 -0
  93. gobby/workflows/summary_actions.py +123 -50
  94. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
  95. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/RECORD +99 -107
  96. gobby/cli/tui.py +0 -34
  97. gobby/hooks/event_handlers.py +0 -909
  98. gobby/mcp_proxy/tools/workflows.py +0 -973
  99. gobby/tui/__init__.py +0 -5
  100. gobby/tui/api_client.py +0 -278
  101. gobby/tui/app.py +0 -329
  102. gobby/tui/screens/__init__.py +0 -25
  103. gobby/tui/screens/agents.py +0 -333
  104. gobby/tui/screens/chat.py +0 -450
  105. gobby/tui/screens/dashboard.py +0 -377
  106. gobby/tui/screens/memory.py +0 -305
  107. gobby/tui/screens/metrics.py +0 -231
  108. gobby/tui/screens/orchestrator.py +0 -903
  109. gobby/tui/screens/sessions.py +0 -412
  110. gobby/tui/screens/tasks.py +0 -440
  111. gobby/tui/screens/workflows.py +0 -289
  112. gobby/tui/screens/worktrees.py +0 -174
  113. gobby/tui/widgets/__init__.py +0 -21
  114. gobby/tui/widgets/chat.py +0 -210
  115. gobby/tui/widgets/conductor.py +0 -104
  116. gobby/tui/widgets/menu.py +0 -132
  117. gobby/tui/widgets/message_panel.py +0 -160
  118. gobby/tui/widgets/review_gate.py +0 -224
  119. gobby/tui/widgets/task_tree.py +0 -99
  120. gobby/tui/widgets/token_budget.py +0 -166
  121. gobby/tui/ws_client.py +0 -258
  122. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
  123. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
  124. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
  125. {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
gobby/tui/screens/chat.py DELETED
@@ -1,450 +0,0 @@
1
- """Chat screen for LLM interface with conductor."""
2
-
3
- from __future__ import annotations
4
-
5
- from datetime import datetime
6
- from typing import Any
7
-
8
- from textual.app import ComposeResult
9
- from textual.containers import Container, Horizontal, VerticalScroll
10
- from textual.events import Key
11
- from textual.reactive import reactive
12
- from textual.widget import Widget
13
- from textual.widgets import (
14
- Button,
15
- LoadingIndicator,
16
- Select,
17
- Static,
18
- TextArea,
19
- )
20
-
21
- from gobby.tui.api_client import GobbyAPIClient
22
- from gobby.tui.ws_client import GobbyWebSocketClient
23
-
24
-
25
- class ChatMessage(Static):
26
- """A single chat message display."""
27
-
28
- DEFAULT_CSS = """
29
- ChatMessage {
30
- padding: 1;
31
- margin-bottom: 1;
32
- height: auto;
33
- }
34
-
35
- ChatMessage.--user {
36
- margin-left: 8;
37
- background: #313244;
38
- border: round #06b6d4;
39
- }
40
-
41
- ChatMessage.--conductor {
42
- margin-right: 8;
43
- background: #313244;
44
- border: round #7c3aed;
45
- }
46
-
47
- ChatMessage .message-header {
48
- layout: horizontal;
49
- height: 1;
50
- margin-bottom: 1;
51
- }
52
-
53
- ChatMessage .message-sender {
54
- text-style: bold;
55
- width: 1fr;
56
- }
57
-
58
- ChatMessage .message-time {
59
- color: #6c7086;
60
- width: auto;
61
- }
62
-
63
- ChatMessage.--user .message-sender {
64
- color: #06b6d4;
65
- }
66
-
67
- ChatMessage.--conductor .message-sender {
68
- color: #a78bfa;
69
- }
70
-
71
- ChatMessage .message-content {
72
- color: #cdd6f4;
73
- }
74
- """
75
-
76
- def __init__(
77
- self,
78
- sender: str,
79
- content: str,
80
- is_user: bool = True,
81
- timestamp: str | None = None,
82
- **kwargs: Any,
83
- ) -> None:
84
- super().__init__(**kwargs)
85
- self.sender = sender
86
- self.content = content
87
- self.is_user = is_user
88
- self.timestamp = timestamp or datetime.now().strftime("%H:%M")
89
- self.add_class("--user" if is_user else "--conductor")
90
-
91
- def compose(self) -> ComposeResult:
92
- with Horizontal(classes="message-header"):
93
- yield Static(self.sender, classes="message-sender")
94
- yield Static(self.timestamp, classes="message-time")
95
- yield Static(self.content, classes="message-content")
96
-
97
-
98
- class ChatHistory(VerticalScroll):
99
- """Scrollable chat history container."""
100
-
101
- DEFAULT_CSS = """
102
- ChatHistory {
103
- height: 1fr;
104
- padding: 1;
105
- background: #1e1e2e;
106
- }
107
- """
108
-
109
- def add_message(
110
- self,
111
- sender: str,
112
- content: str,
113
- is_user: bool = True,
114
- timestamp: str | None = None,
115
- ) -> None:
116
- """Add a new message to the chat history."""
117
- message = ChatMessage(sender, content, is_user, timestamp)
118
- self.mount(message)
119
- # Scroll to bottom
120
- self.scroll_end(animate=False)
121
-
122
-
123
- class ChatInputArea(Widget):
124
- """Input area for composing chat messages."""
125
-
126
- DEFAULT_CSS = """
127
- ChatInputArea {
128
- height: auto;
129
- min-height: 4;
130
- max-height: 10;
131
- padding: 1;
132
- border-top: solid #45475a;
133
- background: #313244;
134
- }
135
-
136
- ChatInputArea .input-row {
137
- layout: horizontal;
138
- height: auto;
139
- }
140
-
141
- ChatInputArea #chat-input {
142
- width: 1fr;
143
- height: auto;
144
- min-height: 3;
145
- margin-right: 1;
146
- }
147
-
148
- ChatInputArea #send-button {
149
- width: 10;
150
- height: 3;
151
- }
152
-
153
- ChatInputArea .mode-row {
154
- layout: horizontal;
155
- height: 1;
156
- margin-top: 1;
157
- }
158
-
159
- ChatInputArea .mode-label {
160
- color: #a6adc8;
161
- width: auto;
162
- margin-right: 1;
163
- }
164
-
165
- ChatInputArea #mode-select {
166
- width: 20;
167
- }
168
- """
169
-
170
- def compose(self) -> ComposeResult:
171
- with Horizontal(classes="input-row"):
172
- yield TextArea(id="chat-input")
173
- yield Button("Send", variant="primary", id="send-button")
174
- with Horizontal(classes="mode-row"):
175
- yield Static("Mode:", classes="mode-label")
176
- yield Select(
177
- [
178
- ("Haiku", "haiku"),
179
- ("Prose", "prose"),
180
- ("Terse", "terse"),
181
- ],
182
- value="haiku",
183
- id="mode-select",
184
- )
185
-
186
- def get_message(self) -> str:
187
- """Get the current message text."""
188
- text_area = self.query_one("#chat-input", TextArea)
189
- return text_area.text
190
-
191
- def clear_input(self) -> None:
192
- """Clear the input field."""
193
- text_area = self.query_one("#chat-input", TextArea)
194
- text_area.clear()
195
-
196
- def get_mode(self) -> str:
197
- """Get the current response mode."""
198
- select = self.query_one("#mode-select", Select)
199
- return str(select.value)
200
-
201
-
202
- class ChatScreen(Widget):
203
- """Chat screen for interacting with the conductor LLM."""
204
-
205
- DEFAULT_CSS = """
206
- ChatScreen {
207
- width: 1fr;
208
- height: 1fr;
209
- layout: vertical;
210
- }
211
-
212
- ChatScreen .chat-header {
213
- height: 3;
214
- padding: 1;
215
- background: #313244;
216
- border-bottom: solid #45475a;
217
- }
218
-
219
- ChatScreen .header-title {
220
- text-style: bold;
221
- color: #a78bfa;
222
- }
223
-
224
- ChatScreen .header-mode {
225
- color: #a6adc8;
226
- dock: right;
227
- }
228
-
229
- ChatScreen #chat-history {
230
- height: 1fr;
231
- }
232
-
233
- ChatScreen #chat-input-area {
234
- height: auto;
235
- }
236
-
237
- ChatScreen .loading-indicator {
238
- height: 3;
239
- content-align: center middle;
240
- background: #313244;
241
- }
242
- """
243
-
244
- sending = reactive(False)
245
- messages: reactive[list[dict[str, Any]]] = reactive(list)
246
-
247
- def __init__(
248
- self,
249
- api_client: GobbyAPIClient,
250
- ws_client: GobbyWebSocketClient,
251
- **kwargs: Any,
252
- ) -> None:
253
- super().__init__(**kwargs)
254
- self.api_client = api_client
255
- self.ws_client = ws_client
256
-
257
- def compose(self) -> ComposeResult:
258
- with Horizontal(classes="chat-header"):
259
- yield Static("💬 Chat with Conductor", classes="header-title")
260
- yield ChatHistory(id="chat-history")
261
- if self.sending:
262
- with Container(classes="loading-indicator"):
263
- yield LoadingIndicator()
264
- yield ChatInputArea(id="chat-input-area")
265
-
266
- async def on_mount(self) -> None:
267
- """Initialize the chat screen."""
268
- # Add welcome message
269
- history = self.query_one("#chat-history", ChatHistory)
270
- history.add_message(
271
- "Conductor",
272
- "Welcome to Gobby Chat. Ask me about tasks, status, or give commands.",
273
- is_user=False,
274
- )
275
-
276
- async def on_button_pressed(self, event: Button.Pressed) -> None:
277
- """Handle send button press."""
278
- if event.button.id == "send-button":
279
- await self._send_message()
280
-
281
- async def on_key(self, event: Key) -> None:
282
- """Handle key events for sending messages."""
283
- # Check for Enter key in text area (without shift)
284
- # In Textual, Shift+Enter would be key="shift+enter", not "enter"
285
- if event.key == "enter":
286
- # Check if focus is on the text area
287
- input_area = self.query_one("#chat-input-area", ChatInputArea)
288
- text_area = input_area.query_one("#chat-input", TextArea)
289
- if text_area.has_focus:
290
- event.stop()
291
- await self._send_message()
292
-
293
- async def _send_message(self) -> None:
294
- """Send a message to the conductor."""
295
- input_area = self.query_one("#chat-input-area", ChatInputArea)
296
- message = input_area.get_message().strip()
297
-
298
- if not message:
299
- return
300
-
301
- # Add user message to history
302
- history = self.query_one("#chat-history", ChatHistory)
303
- history.add_message("You", message, is_user=True)
304
- input_area.clear_input()
305
-
306
- # Show loading state
307
- self.sending = True
308
- await self.recompose()
309
-
310
- try:
311
- # Get response mode
312
- mode = input_area.get_mode()
313
-
314
- # Send to conductor (via LLM service)
315
- response = await self._get_conductor_response(message, mode)
316
-
317
- # Add conductor response
318
- history.add_message("Conductor", response, is_user=False)
319
-
320
- except Exception as e:
321
- history.add_message(
322
- "System",
323
- f"Error: {e}",
324
- is_user=False,
325
- )
326
- finally:
327
- self.sending = False
328
- await self.recompose()
329
-
330
- async def _get_conductor_response(self, message: str, mode: str) -> str:
331
- """Get a response from the conductor LLM."""
332
- # Build prompt based on mode
333
- # Note: system_prompt would be used in full LLM implementation
334
- # For now, we use simple pattern matching in _generate_response
335
-
336
- try:
337
- async with GobbyAPIClient(self.api_client.base_url) as client:
338
- # Get current status for context
339
- status = await client.get_status()
340
-
341
- # Build context
342
- tasks_info = status.get("tasks", {})
343
- agents_info = await client.list_agents()
344
-
345
- context = f"""System Status:
346
- - Open tasks: {tasks_info.get("open", 0)}
347
- - In progress: {tasks_info.get("in_progress", 0)}
348
- - Running agents: {len([a for a in agents_info if a.get("status") == "running"])}
349
- """
350
-
351
- # For now, generate a simple response
352
- # In a full implementation, this would call the LLM service
353
- response = await self._generate_response(message, mode, context, tasks_info)
354
- return response
355
-
356
- except Exception as e:
357
- raise Exception(f"Failed to get response: {e}") from e
358
-
359
- async def _generate_response(
360
- self,
361
- message: str,
362
- mode: str,
363
- context: str,
364
- tasks_info: dict[str, Any],
365
- ) -> str:
366
- """Generate a response based on the message and mode."""
367
- message_lower = message.lower()
368
-
369
- # Simple pattern matching for common queries
370
- # In production, this would use an actual LLM
371
-
372
- if "status" in message_lower or "what" in message_lower:
373
- open_count = tasks_info.get("open", 0)
374
- in_progress = tasks_info.get("in_progress", 0)
375
-
376
- if mode == "haiku":
377
- if in_progress > 0:
378
- return f"{in_progress} task{'s' if in_progress != 1 else ''} in progress\nCode flows through busy hands\nWork carries on"
379
- elif open_count > 0:
380
- return (
381
- f"{open_count} tasks await you\nReady for your attention\nChoose and begin"
382
- )
383
- else:
384
- return "All is quiet now\nNo tasks need attention here\nRest or create more"
385
- elif mode == "terse":
386
- return f"Open: {open_count}, In Progress: {in_progress}"
387
- else:
388
- return f"Currently there are {open_count} open tasks and {in_progress} in progress. Use `/gobby-tasks` to see details or ask me to suggest the next task."
389
-
390
- elif "next" in message_lower or "suggest" in message_lower:
391
- if mode == "haiku":
392
- return "Check the task queue now\nPriority guides your path\nBegin with the first"
393
- else:
394
- return "I recommend checking the Tasks screen (press T) to see prioritized tasks. The suggest_next_task tool can help identify what to work on next."
395
-
396
- elif "autonomous" in message_lower or "auto" in message_lower:
397
- if mode == "haiku":
398
- return "Autonomous mode\nI work while you observe\nTrust but verify"
399
- else:
400
- return "To enable autonomous mode, use the Orchestrator screen (press O) and toggle the mode. I'll work through tasks independently and pause for reviews when needed."
401
-
402
- elif "help" in message_lower:
403
- if mode == "haiku":
404
- return "Ask me anything\nTasks, status, or guidance\nI am here to help"
405
- else:
406
- return "I can help you with:\n- Checking task and agent status\n- Explaining system state\n- Suggesting next tasks\n- Enabling autonomous mode\n\nJust ask!"
407
-
408
- else:
409
- # Default response
410
- if mode == "haiku":
411
- return "Your words reach my ears\nBut meaning escapes me now\nPlease ask once more"
412
- else:
413
- return "I'm not sure how to help with that specific request. Try asking about task status, next tasks, or system state. You can also use the other screens (D/T/S/A/O) for direct interaction."
414
-
415
- def on_ws_event(self, event_type: str, data: dict[str, Any]) -> None:
416
- """Handle WebSocket events."""
417
- # Show relevant events in chat
418
- try:
419
- history = self.query_one("#chat-history", ChatHistory)
420
-
421
- if event_type == "autonomous_event":
422
- event = data.get("event", "")
423
- task_id = data.get("task_id", "")
424
- history.add_message(
425
- "System",
426
- f"Autonomous: {event} ({task_id})",
427
- is_user=False,
428
- )
429
-
430
- elif event_type == "agent_event":
431
- event = data.get("event", "")
432
- run_id = data.get("run_id", "")[:8]
433
- if event in ["agent_started", "agent_completed", "agent_failed"]:
434
- history.add_message(
435
- "System",
436
- f"Agent {run_id}: {event}",
437
- is_user=False,
438
- )
439
-
440
- except Exception:
441
- pass # nosec B110 - TUI event handling failure is non-critical
442
-
443
- def activate_search(self) -> None:
444
- """Focus the chat input."""
445
- try:
446
- input_area = self.query_one("#chat-input-area", ChatInputArea)
447
- text_area = input_area.query_one("#chat-input", TextArea)
448
- text_area.focus()
449
- except Exception:
450
- pass # nosec B110 - widget may not be mounted yet