gobby 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/claude_code.py +3 -26
- gobby/app_context.py +59 -0
- gobby/cli/utils.py +5 -17
- gobby/config/features.py +0 -20
- gobby/config/tasks.py +4 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +87 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +573 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/hook_manager.py +2 -0
- gobby/llm/claude.py +377 -42
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/registries.py +14 -0
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/tools/artifacts.py +3 -3
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
- gobby/mcp_proxy/tools/workflows/_query.py +207 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +23 -10
- gobby/servers/http.py +186 -149
- gobby/servers/routes/admin.py +12 -0
- gobby/servers/routes/mcp/endpoints/execution.py +15 -7
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/sessions/analyzer.py +2 -2
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/migrations.py +25 -2
- gobby/storage/skills.py +47 -7
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +5 -0
- gobby/workflows/context_actions.py +21 -24
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +96 -0
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/engine.py +6 -3
- gobby/workflows/lifecycle_evaluator.py +2 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/RECORD +61 -45
- gobby/hooks/event_handlers.py +0 -1008
- gobby/mcp_proxy/tools/workflows.py +0 -1023
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
gobby/hooks/event_handlers.py
DELETED
|
@@ -1,1008 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Event handlers module for hook event processing.
|
|
3
|
-
|
|
4
|
-
This module is extracted from hook_manager.py using Strangler Fig pattern.
|
|
5
|
-
It provides centralized event handler registration and dispatch.
|
|
6
|
-
|
|
7
|
-
Classes:
|
|
8
|
-
EventHandlers: Manages event handler registration and dispatch.
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
from __future__ import annotations
|
|
12
|
-
|
|
13
|
-
import logging
|
|
14
|
-
from collections.abc import Callable
|
|
15
|
-
from typing import TYPE_CHECKING, Any
|
|
16
|
-
|
|
17
|
-
from gobby.hooks.events import HookEvent, HookEventType, HookResponse
|
|
18
|
-
|
|
19
|
-
if TYPE_CHECKING:
|
|
20
|
-
from gobby.config.skills import SkillsConfig
|
|
21
|
-
from gobby.hooks.session_coordinator import SessionCoordinator
|
|
22
|
-
from gobby.hooks.skill_manager import HookSkillManager
|
|
23
|
-
from gobby.sessions.manager import SessionManager
|
|
24
|
-
from gobby.sessions.summary import SummaryFileGenerator
|
|
25
|
-
from gobby.storage.session_messages import LocalSessionMessageManager
|
|
26
|
-
from gobby.storage.session_tasks import SessionTaskManager
|
|
27
|
-
from gobby.storage.sessions import LocalSessionManager
|
|
28
|
-
from gobby.storage.tasks import LocalTaskManager
|
|
29
|
-
from gobby.workflows.hooks import WorkflowHookHandler
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
EDIT_TOOLS = {
|
|
33
|
-
"write_file",
|
|
34
|
-
"replace",
|
|
35
|
-
"edit_file",
|
|
36
|
-
"notebook_edit",
|
|
37
|
-
"edit",
|
|
38
|
-
"write",
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
class EventHandlers:
|
|
43
|
-
"""
|
|
44
|
-
Manages event handler registration and dispatch.
|
|
45
|
-
|
|
46
|
-
Provides handler methods for all HookEventType values and a registration
|
|
47
|
-
mechanism for looking up handlers by event type.
|
|
48
|
-
|
|
49
|
-
Extracted from HookManager to separate event handling concerns.
|
|
50
|
-
"""
|
|
51
|
-
|
|
52
|
-
def __init__(
|
|
53
|
-
self,
|
|
54
|
-
session_manager: SessionManager | None = None,
|
|
55
|
-
workflow_handler: WorkflowHookHandler | None = None,
|
|
56
|
-
session_storage: LocalSessionManager | None = None,
|
|
57
|
-
session_task_manager: SessionTaskManager | None = None,
|
|
58
|
-
message_processor: Any | None = None,
|
|
59
|
-
summary_file_generator: SummaryFileGenerator | None = None,
|
|
60
|
-
task_manager: LocalTaskManager | None = None,
|
|
61
|
-
session_coordinator: SessionCoordinator | None = None,
|
|
62
|
-
message_manager: LocalSessionMessageManager | None = None,
|
|
63
|
-
skill_manager: HookSkillManager | None = None,
|
|
64
|
-
skills_config: SkillsConfig | None = None,
|
|
65
|
-
get_machine_id: Callable[[], str] | None = None,
|
|
66
|
-
resolve_project_id: Callable[[str | None, str | None], str] | None = None,
|
|
67
|
-
logger: logging.Logger | None = None,
|
|
68
|
-
) -> None:
|
|
69
|
-
"""
|
|
70
|
-
Initialize EventHandlers.
|
|
71
|
-
|
|
72
|
-
Args:
|
|
73
|
-
session_manager: SessionManager for session operations
|
|
74
|
-
workflow_handler: WorkflowHookHandler for lifecycle workflows
|
|
75
|
-
session_storage: LocalSessionManager for session storage
|
|
76
|
-
session_task_manager: SessionTaskManager for session-task links
|
|
77
|
-
message_processor: SessionMessageProcessor for message handling
|
|
78
|
-
summary_file_generator: SummaryFileGenerator for summaries
|
|
79
|
-
task_manager: LocalTaskManager for task operations
|
|
80
|
-
session_coordinator: SessionCoordinator for session tracking
|
|
81
|
-
message_manager: LocalSessionMessageManager for messages
|
|
82
|
-
skill_manager: HookSkillManager for skill discovery
|
|
83
|
-
skills_config: SkillsConfig for skill injection settings
|
|
84
|
-
get_machine_id: Function to get machine ID
|
|
85
|
-
resolve_project_id: Function to resolve project ID from cwd
|
|
86
|
-
logger: Optional logger instance
|
|
87
|
-
"""
|
|
88
|
-
self._session_manager = session_manager
|
|
89
|
-
self._workflow_handler = workflow_handler
|
|
90
|
-
self._session_storage = session_storage
|
|
91
|
-
self._session_task_manager = session_task_manager
|
|
92
|
-
self._message_processor = message_processor
|
|
93
|
-
self._summary_file_generator = summary_file_generator
|
|
94
|
-
self._task_manager = task_manager
|
|
95
|
-
self._session_coordinator = session_coordinator
|
|
96
|
-
self._message_manager = message_manager
|
|
97
|
-
self._skill_manager = skill_manager
|
|
98
|
-
self._skills_config = skills_config
|
|
99
|
-
self._get_machine_id = get_machine_id or (lambda: "unknown-machine")
|
|
100
|
-
self._resolve_project_id = resolve_project_id or (lambda p, c: p or "")
|
|
101
|
-
self.logger = logger or logging.getLogger(__name__)
|
|
102
|
-
|
|
103
|
-
# Build handler map
|
|
104
|
-
self._handler_map: dict[HookEventType, Callable[[HookEvent], HookResponse]] = {
|
|
105
|
-
HookEventType.SESSION_START: self.handle_session_start,
|
|
106
|
-
HookEventType.SESSION_END: self.handle_session_end,
|
|
107
|
-
HookEventType.BEFORE_AGENT: self.handle_before_agent,
|
|
108
|
-
HookEventType.AFTER_AGENT: self.handle_after_agent,
|
|
109
|
-
HookEventType.BEFORE_TOOL: self.handle_before_tool,
|
|
110
|
-
HookEventType.AFTER_TOOL: self.handle_after_tool,
|
|
111
|
-
HookEventType.PRE_COMPACT: self.handle_pre_compact,
|
|
112
|
-
HookEventType.SUBAGENT_START: self.handle_subagent_start,
|
|
113
|
-
HookEventType.SUBAGENT_STOP: self.handle_subagent_stop,
|
|
114
|
-
HookEventType.NOTIFICATION: self.handle_notification,
|
|
115
|
-
HookEventType.BEFORE_TOOL_SELECTION: self.handle_before_tool_selection,
|
|
116
|
-
HookEventType.BEFORE_MODEL: self.handle_before_model,
|
|
117
|
-
HookEventType.AFTER_MODEL: self.handle_after_model,
|
|
118
|
-
HookEventType.PERMISSION_REQUEST: self.handle_permission_request,
|
|
119
|
-
HookEventType.STOP: self.handle_stop,
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
def get_handler(
|
|
123
|
-
self, event_type: HookEventType | str
|
|
124
|
-
) -> Callable[[HookEvent], HookResponse] | None:
|
|
125
|
-
"""
|
|
126
|
-
Get handler for an event type.
|
|
127
|
-
|
|
128
|
-
Args:
|
|
129
|
-
event_type: The event type to get handler for
|
|
130
|
-
|
|
131
|
-
Returns:
|
|
132
|
-
Handler callable or None if not found
|
|
133
|
-
"""
|
|
134
|
-
if isinstance(event_type, str):
|
|
135
|
-
try:
|
|
136
|
-
event_type = HookEventType(event_type)
|
|
137
|
-
except ValueError:
|
|
138
|
-
return None
|
|
139
|
-
return self._handler_map.get(event_type)
|
|
140
|
-
|
|
141
|
-
def get_handler_map(self) -> dict[HookEventType, Callable[[HookEvent], HookResponse]]:
|
|
142
|
-
"""
|
|
143
|
-
Get a copy of the handler map.
|
|
144
|
-
|
|
145
|
-
Returns:
|
|
146
|
-
Copy of handler map (modifications don't affect internal state)
|
|
147
|
-
"""
|
|
148
|
-
return dict(self._handler_map)
|
|
149
|
-
|
|
150
|
-
def _auto_activate_workflow(
|
|
151
|
-
self, workflow_name: str, session_id: str, project_path: str | None
|
|
152
|
-
) -> None:
|
|
153
|
-
"""Auto-activate a workflow for a session.
|
|
154
|
-
|
|
155
|
-
Args:
|
|
156
|
-
workflow_name: Name of the workflow to activate
|
|
157
|
-
session_id: Session ID to activate workflow for
|
|
158
|
-
project_path: Project path for workflow context
|
|
159
|
-
"""
|
|
160
|
-
if not self._workflow_handler:
|
|
161
|
-
return
|
|
162
|
-
|
|
163
|
-
try:
|
|
164
|
-
result = self._workflow_handler.activate_workflow(
|
|
165
|
-
workflow_name=workflow_name,
|
|
166
|
-
session_id=session_id,
|
|
167
|
-
project_path=project_path,
|
|
168
|
-
)
|
|
169
|
-
if result.get("success"):
|
|
170
|
-
self.logger.info(
|
|
171
|
-
"Auto-activated workflow for session",
|
|
172
|
-
extra={
|
|
173
|
-
"workflow_name": workflow_name,
|
|
174
|
-
"session_id": session_id,
|
|
175
|
-
"project_path": project_path,
|
|
176
|
-
},
|
|
177
|
-
)
|
|
178
|
-
else:
|
|
179
|
-
self.logger.warning(
|
|
180
|
-
"Failed to auto-activate workflow",
|
|
181
|
-
extra={
|
|
182
|
-
"workflow_name": workflow_name,
|
|
183
|
-
"session_id": session_id,
|
|
184
|
-
"project_path": project_path,
|
|
185
|
-
"error": result.get("error"),
|
|
186
|
-
},
|
|
187
|
-
)
|
|
188
|
-
except Exception as e:
|
|
189
|
-
self.logger.warning(
|
|
190
|
-
"Failed to auto-activate workflow",
|
|
191
|
-
extra={
|
|
192
|
-
"workflow_name": workflow_name,
|
|
193
|
-
"session_id": session_id,
|
|
194
|
-
"project_path": project_path,
|
|
195
|
-
"error": str(e),
|
|
196
|
-
},
|
|
197
|
-
exc_info=True,
|
|
198
|
-
)
|
|
199
|
-
|
|
200
|
-
# ==================== SESSION HANDLERS ====================
|
|
201
|
-
|
|
202
|
-
def handle_session_start(self, event: HookEvent) -> HookResponse:
|
|
203
|
-
"""
|
|
204
|
-
Handle SESSION_START event.
|
|
205
|
-
|
|
206
|
-
Register session and execute session-handoff workflow.
|
|
207
|
-
"""
|
|
208
|
-
external_id = event.session_id
|
|
209
|
-
input_data = event.data
|
|
210
|
-
transcript_path = input_data.get("transcript_path")
|
|
211
|
-
cli_source = event.source.value
|
|
212
|
-
cwd = input_data.get("cwd")
|
|
213
|
-
session_source = input_data.get("source", "startup")
|
|
214
|
-
|
|
215
|
-
# Resolve project_id (auto-creates if needed)
|
|
216
|
-
project_id = self._resolve_project_id(input_data.get("project_id"), cwd)
|
|
217
|
-
# Always use Gobby's machine_id for cross-CLI consistency
|
|
218
|
-
machine_id = self._get_machine_id()
|
|
219
|
-
|
|
220
|
-
self.logger.debug(
|
|
221
|
-
f"SESSION_START: cli={cli_source}, project={project_id}, source={session_source}"
|
|
222
|
-
)
|
|
223
|
-
|
|
224
|
-
# Step 0: Check if this is a pre-created session (terminal mode agent)
|
|
225
|
-
# When we spawn an agent in terminal mode, we pass --session-id <internal_id>
|
|
226
|
-
# to Claude, so external_id here might actually be our internal session ID
|
|
227
|
-
existing_session = None
|
|
228
|
-
if self._session_storage:
|
|
229
|
-
try:
|
|
230
|
-
# Try to find by internal ID first (terminal mode case)
|
|
231
|
-
existing_session = self._session_storage.get(external_id)
|
|
232
|
-
if existing_session:
|
|
233
|
-
self.logger.info(
|
|
234
|
-
f"Found pre-created session {external_id}, updating instead of creating"
|
|
235
|
-
)
|
|
236
|
-
# Update the session with actual runtime info
|
|
237
|
-
self._session_storage.update(
|
|
238
|
-
session_id=existing_session.id,
|
|
239
|
-
jsonl_path=transcript_path,
|
|
240
|
-
status="active",
|
|
241
|
-
)
|
|
242
|
-
# Return early with the pre-created session's context
|
|
243
|
-
session_id: str | None = existing_session.id
|
|
244
|
-
parent_session_id = existing_session.parent_session_id
|
|
245
|
-
|
|
246
|
-
# Track registered session
|
|
247
|
-
if transcript_path and self._session_coordinator:
|
|
248
|
-
try:
|
|
249
|
-
self._session_coordinator.register_session(external_id)
|
|
250
|
-
except Exception as e:
|
|
251
|
-
self.logger.error(f"Failed to setup session tracking: {e}")
|
|
252
|
-
|
|
253
|
-
# Start the agent run if this is a terminal-mode agent session
|
|
254
|
-
if existing_session.agent_run_id and self._session_coordinator:
|
|
255
|
-
try:
|
|
256
|
-
self._session_coordinator.start_agent_run(existing_session.agent_run_id)
|
|
257
|
-
except Exception as e:
|
|
258
|
-
self.logger.warning(f"Failed to start agent run: {e}")
|
|
259
|
-
|
|
260
|
-
# Auto-activate workflow if specified for this session
|
|
261
|
-
if existing_session.workflow_name and session_id:
|
|
262
|
-
self._auto_activate_workflow(
|
|
263
|
-
existing_session.workflow_name, session_id, cwd
|
|
264
|
-
)
|
|
265
|
-
|
|
266
|
-
# Update event metadata
|
|
267
|
-
event.metadata["_platform_session_id"] = session_id
|
|
268
|
-
|
|
269
|
-
# Register with Message Processor
|
|
270
|
-
if self._message_processor and transcript_path:
|
|
271
|
-
try:
|
|
272
|
-
self._message_processor.register_session(
|
|
273
|
-
session_id, transcript_path, source=cli_source
|
|
274
|
-
)
|
|
275
|
-
except Exception as e:
|
|
276
|
-
self.logger.warning(f"Failed to register with message processor: {e}")
|
|
277
|
-
|
|
278
|
-
# Execute lifecycle workflows
|
|
279
|
-
context_parts = [""]
|
|
280
|
-
wf_response = HookResponse(decision="allow", context="")
|
|
281
|
-
if self._workflow_handler:
|
|
282
|
-
try:
|
|
283
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
284
|
-
if wf_response.context:
|
|
285
|
-
context_parts.append(wf_response.context)
|
|
286
|
-
except Exception as e:
|
|
287
|
-
self.logger.warning(f"Workflow error: {e}")
|
|
288
|
-
|
|
289
|
-
# Build system message (terminal display only)
|
|
290
|
-
# Display #N format if seq_num available, fallback to UUID
|
|
291
|
-
session_ref = (
|
|
292
|
-
f"#{existing_session.seq_num}" if existing_session.seq_num else session_id
|
|
293
|
-
)
|
|
294
|
-
system_message = f"\nGobby Session ID: {session_ref}"
|
|
295
|
-
system_message += " <- Use this for MCP tool calls (session_id parameter)"
|
|
296
|
-
system_message += f"\nExternal ID: {external_id} (CLI-native, rarely needed)"
|
|
297
|
-
if parent_session_id:
|
|
298
|
-
context_parts.append(f"Parent session: {parent_session_id}")
|
|
299
|
-
|
|
300
|
-
# Add active lifecycle workflows
|
|
301
|
-
if wf_response.metadata and "discovered_workflows" in wf_response.metadata:
|
|
302
|
-
wf_list = wf_response.metadata["discovered_workflows"]
|
|
303
|
-
if wf_list:
|
|
304
|
-
system_message += "\nActive workflows:"
|
|
305
|
-
for w in wf_list:
|
|
306
|
-
source = "project" if w["is_project"] else "global"
|
|
307
|
-
system_message += (
|
|
308
|
-
f"\n - {w['name']} ({source}, priority={w['priority']})"
|
|
309
|
-
)
|
|
310
|
-
|
|
311
|
-
if wf_response.system_message:
|
|
312
|
-
system_message += f"\n\n{wf_response.system_message}"
|
|
313
|
-
|
|
314
|
-
return HookResponse(
|
|
315
|
-
decision="allow",
|
|
316
|
-
context="\n".join(context_parts) if context_parts else None,
|
|
317
|
-
system_message=system_message,
|
|
318
|
-
metadata={
|
|
319
|
-
"session_id": session_id,
|
|
320
|
-
"session_ref": session_ref,
|
|
321
|
-
"parent_session_id": parent_session_id,
|
|
322
|
-
"machine_id": machine_id,
|
|
323
|
-
"project_id": existing_session.project_id,
|
|
324
|
-
"external_id": external_id,
|
|
325
|
-
"task_id": event.task_id,
|
|
326
|
-
"is_pre_created": True,
|
|
327
|
-
},
|
|
328
|
-
)
|
|
329
|
-
except Exception as e:
|
|
330
|
-
self.logger.debug(f"No pre-created session found: {e}")
|
|
331
|
-
|
|
332
|
-
# Step 1: Find parent session
|
|
333
|
-
# Check env vars first (spawned agent case), then handoff (source='clear')
|
|
334
|
-
parent_session_id = input_data.get("parent_session_id")
|
|
335
|
-
workflow_name = input_data.get("workflow_name")
|
|
336
|
-
agent_depth = input_data.get("agent_depth")
|
|
337
|
-
|
|
338
|
-
if not parent_session_id and session_source == "clear" and self._session_storage:
|
|
339
|
-
try:
|
|
340
|
-
parent = self._session_storage.find_parent(
|
|
341
|
-
machine_id=machine_id,
|
|
342
|
-
project_id=project_id,
|
|
343
|
-
source=cli_source,
|
|
344
|
-
status="handoff_ready",
|
|
345
|
-
)
|
|
346
|
-
if parent:
|
|
347
|
-
parent_session_id = parent.id
|
|
348
|
-
self.logger.debug(f"Found parent session: {parent_session_id}")
|
|
349
|
-
except Exception as e:
|
|
350
|
-
self.logger.warning(f"Error finding parent session: {e}")
|
|
351
|
-
|
|
352
|
-
# Step 2: Register new session with parent if found
|
|
353
|
-
# Extract terminal context (injected by hook_dispatcher for terminal correlation)
|
|
354
|
-
terminal_context = input_data.get("terminal_context")
|
|
355
|
-
# Parse agent_depth as int if provided
|
|
356
|
-
agent_depth_val = 0
|
|
357
|
-
if agent_depth:
|
|
358
|
-
try:
|
|
359
|
-
agent_depth_val = int(agent_depth)
|
|
360
|
-
except (ValueError, TypeError):
|
|
361
|
-
pass
|
|
362
|
-
|
|
363
|
-
session_id = None
|
|
364
|
-
if self._session_manager:
|
|
365
|
-
session_id = self._session_manager.register_session(
|
|
366
|
-
external_id=external_id,
|
|
367
|
-
machine_id=machine_id,
|
|
368
|
-
project_id=project_id,
|
|
369
|
-
parent_session_id=parent_session_id,
|
|
370
|
-
jsonl_path=transcript_path,
|
|
371
|
-
source=cli_source,
|
|
372
|
-
project_path=cwd,
|
|
373
|
-
terminal_context=terminal_context,
|
|
374
|
-
workflow_name=workflow_name,
|
|
375
|
-
agent_depth=agent_depth_val,
|
|
376
|
-
)
|
|
377
|
-
|
|
378
|
-
# Step 2b: Mark parent session as expired after successful handoff
|
|
379
|
-
if parent_session_id and self._session_manager:
|
|
380
|
-
try:
|
|
381
|
-
self._session_manager.mark_session_expired(parent_session_id)
|
|
382
|
-
self.logger.debug(f"Marked parent session {parent_session_id} as expired")
|
|
383
|
-
except Exception as e:
|
|
384
|
-
self.logger.warning(f"Failed to mark parent session as expired: {e}")
|
|
385
|
-
|
|
386
|
-
# Step 2c: Auto-activate workflow if specified (for spawned agents)
|
|
387
|
-
if workflow_name and session_id:
|
|
388
|
-
self._auto_activate_workflow(workflow_name, session_id, cwd)
|
|
389
|
-
|
|
390
|
-
# Step 3: Track registered session
|
|
391
|
-
if transcript_path and self._session_coordinator:
|
|
392
|
-
try:
|
|
393
|
-
self._session_coordinator.register_session(external_id)
|
|
394
|
-
except Exception as e:
|
|
395
|
-
self.logger.error(f"Failed to setup session tracking: {e}", exc_info=True)
|
|
396
|
-
|
|
397
|
-
# Step 4: Update event metadata with the newly registered session_id
|
|
398
|
-
event.metadata["_platform_session_id"] = session_id
|
|
399
|
-
if parent_session_id:
|
|
400
|
-
event.metadata["_parent_session_id"] = parent_session_id
|
|
401
|
-
|
|
402
|
-
# Step 5: Register with Message Processor
|
|
403
|
-
if self._message_processor and transcript_path and session_id:
|
|
404
|
-
try:
|
|
405
|
-
self._message_processor.register_session(
|
|
406
|
-
session_id, transcript_path, source=cli_source
|
|
407
|
-
)
|
|
408
|
-
except Exception as e:
|
|
409
|
-
self.logger.warning(f"Failed to register session with message processor: {e}")
|
|
410
|
-
|
|
411
|
-
# Step 6: Execute lifecycle workflows
|
|
412
|
-
context_parts = [""]
|
|
413
|
-
wf_response = HookResponse(decision="allow", context="")
|
|
414
|
-
if self._workflow_handler:
|
|
415
|
-
try:
|
|
416
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
417
|
-
if wf_response.context:
|
|
418
|
-
context_parts.append(wf_response.context)
|
|
419
|
-
except Exception as e:
|
|
420
|
-
self.logger.warning(f"Workflow error: {e}")
|
|
421
|
-
|
|
422
|
-
if parent_session_id:
|
|
423
|
-
context_parts.append(f"Parent session: {parent_session_id}")
|
|
424
|
-
|
|
425
|
-
# Build system message (terminal display only)
|
|
426
|
-
# Fetch session to get seq_num for #N display
|
|
427
|
-
session_ref = session_id # fallback
|
|
428
|
-
if session_id and self._session_storage:
|
|
429
|
-
session_obj = self._session_storage.get(session_id)
|
|
430
|
-
if session_obj and session_obj.seq_num:
|
|
431
|
-
session_ref = f"#{session_obj.seq_num}"
|
|
432
|
-
# Format: "Gobby Session ID: #N" with usage hint
|
|
433
|
-
if session_ref and session_ref != session_id:
|
|
434
|
-
system_message = f"\nGobby Session ID: {session_ref}"
|
|
435
|
-
else:
|
|
436
|
-
system_message = f"\nGobby Session ID: {session_id}"
|
|
437
|
-
system_message += " <- Use this for MCP tool calls (session_id parameter)"
|
|
438
|
-
system_message += f"\nExternal ID: {external_id} (CLI-native, rarely needed)"
|
|
439
|
-
|
|
440
|
-
# Add active lifecycle workflows
|
|
441
|
-
if wf_response.metadata and "discovered_workflows" in wf_response.metadata:
|
|
442
|
-
wf_list = wf_response.metadata["discovered_workflows"]
|
|
443
|
-
if wf_list:
|
|
444
|
-
system_message += "\nActive workflows:"
|
|
445
|
-
for w in wf_list:
|
|
446
|
-
source = "project" if w["is_project"] else "global"
|
|
447
|
-
system_message += f"\n - {w['name']} ({source}, priority={w['priority']})"
|
|
448
|
-
|
|
449
|
-
if wf_response.system_message:
|
|
450
|
-
system_message += f"\n\n{wf_response.system_message}"
|
|
451
|
-
|
|
452
|
-
# Inject active task context if available
|
|
453
|
-
if event.task_id:
|
|
454
|
-
task_title = event.metadata.get("_task_title", "Unknown Task")
|
|
455
|
-
context_parts.append("\n## Active Task Context\n")
|
|
456
|
-
context_parts.append(f"You are working on task: {task_title} ({event.task_id})")
|
|
457
|
-
|
|
458
|
-
# Inject core skills if enabled (restoring from parent session if available)
|
|
459
|
-
skill_context = self._build_skill_injection_context(parent_session_id)
|
|
460
|
-
if skill_context:
|
|
461
|
-
context_parts.append(skill_context)
|
|
462
|
-
|
|
463
|
-
# Build metadata with terminal context (filter out nulls)
|
|
464
|
-
metadata: dict[str, Any] = {
|
|
465
|
-
"session_id": session_id,
|
|
466
|
-
"session_ref": session_ref,
|
|
467
|
-
"parent_session_id": parent_session_id,
|
|
468
|
-
"machine_id": machine_id,
|
|
469
|
-
"project_id": project_id,
|
|
470
|
-
"external_id": external_id,
|
|
471
|
-
"task_id": event.task_id,
|
|
472
|
-
}
|
|
473
|
-
if terminal_context:
|
|
474
|
-
# Only include non-null terminal values
|
|
475
|
-
for key, value in terminal_context.items():
|
|
476
|
-
if value is not None:
|
|
477
|
-
metadata[f"terminal_{key}"] = value
|
|
478
|
-
|
|
479
|
-
return HookResponse(
|
|
480
|
-
decision="allow",
|
|
481
|
-
context="\n".join(context_parts) if context_parts else None,
|
|
482
|
-
system_message=system_message,
|
|
483
|
-
metadata=metadata,
|
|
484
|
-
)
|
|
485
|
-
|
|
486
|
-
def handle_session_end(self, event: HookEvent) -> HookResponse:
|
|
487
|
-
"""Handle SESSION_END event."""
|
|
488
|
-
from gobby.tasks.commits import auto_link_commits
|
|
489
|
-
|
|
490
|
-
external_id = event.session_id
|
|
491
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
492
|
-
|
|
493
|
-
if session_id:
|
|
494
|
-
self.logger.debug(f"SESSION_END: session {session_id}")
|
|
495
|
-
else:
|
|
496
|
-
self.logger.warning(f"SESSION_END: session_id not found for external_id={external_id}")
|
|
497
|
-
|
|
498
|
-
# If not in mapping, query database
|
|
499
|
-
if not session_id and external_id and self._session_manager:
|
|
500
|
-
self.logger.debug(f"external_id {external_id} not in mapping, querying database")
|
|
501
|
-
# Resolve context for lookup
|
|
502
|
-
machine_id = self._get_machine_id()
|
|
503
|
-
cwd = event.data.get("cwd")
|
|
504
|
-
project_id = self._resolve_project_id(event.data.get("project_id"), cwd)
|
|
505
|
-
# Lookup with full composite key
|
|
506
|
-
session_id = self._session_manager.lookup_session_id(
|
|
507
|
-
external_id,
|
|
508
|
-
source=event.source.value,
|
|
509
|
-
machine_id=machine_id,
|
|
510
|
-
project_id=project_id,
|
|
511
|
-
)
|
|
512
|
-
|
|
513
|
-
# Ensure session_id is available in event metadata for workflow actions
|
|
514
|
-
if session_id and not event.metadata.get("_platform_session_id"):
|
|
515
|
-
event.metadata["_platform_session_id"] = session_id
|
|
516
|
-
|
|
517
|
-
# Execute lifecycle workflow triggers
|
|
518
|
-
if self._workflow_handler:
|
|
519
|
-
try:
|
|
520
|
-
self._workflow_handler.handle_all_lifecycles(event)
|
|
521
|
-
except Exception as e:
|
|
522
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
523
|
-
|
|
524
|
-
# Auto-link commits made during this session to tasks
|
|
525
|
-
if session_id and self._session_storage and self._task_manager:
|
|
526
|
-
try:
|
|
527
|
-
session = self._session_storage.get(session_id)
|
|
528
|
-
if session:
|
|
529
|
-
cwd = event.data.get("cwd")
|
|
530
|
-
link_result = auto_link_commits(
|
|
531
|
-
task_manager=self._task_manager,
|
|
532
|
-
since=session.created_at,
|
|
533
|
-
cwd=cwd,
|
|
534
|
-
)
|
|
535
|
-
if link_result.total_linked > 0:
|
|
536
|
-
self.logger.info(
|
|
537
|
-
f"Auto-linked {link_result.total_linked} commits to tasks: "
|
|
538
|
-
f"{list(link_result.linked_tasks.keys())}"
|
|
539
|
-
)
|
|
540
|
-
except Exception as e:
|
|
541
|
-
self.logger.warning(f"Failed to auto-link session commits: {e}")
|
|
542
|
-
|
|
543
|
-
# Complete agent run if this is a terminal-mode agent session
|
|
544
|
-
if session_id and self._session_storage and self._session_coordinator:
|
|
545
|
-
try:
|
|
546
|
-
session = self._session_storage.get(session_id)
|
|
547
|
-
if session and session.agent_run_id:
|
|
548
|
-
self._session_coordinator.complete_agent_run(session)
|
|
549
|
-
except Exception as e:
|
|
550
|
-
self.logger.warning(f"Failed to complete agent run: {e}")
|
|
551
|
-
|
|
552
|
-
# Generate independent session summary file
|
|
553
|
-
if self._summary_file_generator:
|
|
554
|
-
try:
|
|
555
|
-
summary_input = {
|
|
556
|
-
"session_id": external_id,
|
|
557
|
-
"transcript_path": event.data.get("transcript_path"),
|
|
558
|
-
}
|
|
559
|
-
self._summary_file_generator.generate_session_summary(
|
|
560
|
-
session_id=session_id or external_id,
|
|
561
|
-
input_data=summary_input,
|
|
562
|
-
)
|
|
563
|
-
except Exception as e:
|
|
564
|
-
self.logger.error(f"Failed to generate failover summary: {e}")
|
|
565
|
-
|
|
566
|
-
# Unregister from message processor
|
|
567
|
-
if self._message_processor and (session_id or external_id):
|
|
568
|
-
try:
|
|
569
|
-
target_id = session_id or external_id
|
|
570
|
-
self._message_processor.unregister_session(target_id)
|
|
571
|
-
except Exception as e:
|
|
572
|
-
self.logger.warning(f"Failed to unregister session from message processor: {e}")
|
|
573
|
-
|
|
574
|
-
return HookResponse(decision="allow")
|
|
575
|
-
|
|
576
|
-
def _build_skill_injection_context(self, parent_session_id: str | None = None) -> str | None:
|
|
577
|
-
"""Build skill injection context for session-start.
|
|
578
|
-
|
|
579
|
-
Combines alwaysApply skills with skills restored from parent session.
|
|
580
|
-
|
|
581
|
-
Args:
|
|
582
|
-
parent_session_id: Optional parent session ID to restore skills from
|
|
583
|
-
|
|
584
|
-
Returns context string with available skills if injection is enabled,
|
|
585
|
-
or None if disabled.
|
|
586
|
-
"""
|
|
587
|
-
# Skip if no skill manager or config
|
|
588
|
-
if not self._skill_manager or not self._skills_config:
|
|
589
|
-
return None
|
|
590
|
-
|
|
591
|
-
# Check if injection is enabled
|
|
592
|
-
if not self._skills_config.inject_core_skills:
|
|
593
|
-
return None
|
|
594
|
-
|
|
595
|
-
# Check injection format
|
|
596
|
-
if self._skills_config.injection_format == "none":
|
|
597
|
-
return None
|
|
598
|
-
|
|
599
|
-
# Get alwaysApply skills
|
|
600
|
-
try:
|
|
601
|
-
core_skills = self._skill_manager.discover_core_skills()
|
|
602
|
-
always_apply_skills = [s for s in core_skills if s.is_always_apply()]
|
|
603
|
-
|
|
604
|
-
# Get restored skills from parent session
|
|
605
|
-
restored_skills = self._restore_skills_from_parent(parent_session_id)
|
|
606
|
-
|
|
607
|
-
# Combine: alwaysApply skills + any additional restored skills
|
|
608
|
-
skill_names = [s.name for s in always_apply_skills]
|
|
609
|
-
for skill_name in restored_skills:
|
|
610
|
-
if skill_name not in skill_names:
|
|
611
|
-
skill_names.append(skill_name)
|
|
612
|
-
|
|
613
|
-
if not skill_names:
|
|
614
|
-
return None
|
|
615
|
-
|
|
616
|
-
# Build context based on format
|
|
617
|
-
if self._skills_config.injection_format == "summary":
|
|
618
|
-
return (
|
|
619
|
-
"\n## Available Skills\n"
|
|
620
|
-
f"The following skills are always available: {', '.join(skill_names)}\n"
|
|
621
|
-
"Use the /skill-name syntax to invoke them."
|
|
622
|
-
)
|
|
623
|
-
elif self._skills_config.injection_format == "full":
|
|
624
|
-
parts = ["\n## Available Skills\n"]
|
|
625
|
-
# Build a map of always_apply skills for quick lookup
|
|
626
|
-
always_apply_map = {s.name: s for s in always_apply_skills}
|
|
627
|
-
# Iterate over combined skill_names list (always_apply + restored)
|
|
628
|
-
for skill_name in skill_names:
|
|
629
|
-
parts.append(f"### {skill_name}")
|
|
630
|
-
# Get description from always_apply skill if available
|
|
631
|
-
if skill_name in always_apply_map:
|
|
632
|
-
skill = always_apply_map[skill_name]
|
|
633
|
-
if skill.description:
|
|
634
|
-
parts.append(skill.description)
|
|
635
|
-
parts.append("")
|
|
636
|
-
return "\n".join(parts)
|
|
637
|
-
else:
|
|
638
|
-
return None
|
|
639
|
-
|
|
640
|
-
except Exception as e:
|
|
641
|
-
self.logger.warning(f"Failed to build skill injection context: {e}")
|
|
642
|
-
return None
|
|
643
|
-
|
|
644
|
-
def _restore_skills_from_parent(self, parent_session_id: str | None) -> list[str]:
|
|
645
|
-
"""Restore active skills from parent session's handoff context.
|
|
646
|
-
|
|
647
|
-
Args:
|
|
648
|
-
parent_session_id: Parent session ID to restore from
|
|
649
|
-
|
|
650
|
-
Returns:
|
|
651
|
-
List of skill names from the parent session
|
|
652
|
-
"""
|
|
653
|
-
if not parent_session_id or not self._session_storage:
|
|
654
|
-
return []
|
|
655
|
-
|
|
656
|
-
try:
|
|
657
|
-
parent = self._session_storage.get(parent_session_id)
|
|
658
|
-
if not parent:
|
|
659
|
-
return []
|
|
660
|
-
|
|
661
|
-
compact_md = getattr(parent, "compact_markdown", None)
|
|
662
|
-
if not compact_md:
|
|
663
|
-
return []
|
|
664
|
-
|
|
665
|
-
# Parse active skills from markdown
|
|
666
|
-
# Format: "### Active Skills\nSkills available: skill1, skill2, skill3"
|
|
667
|
-
import re
|
|
668
|
-
|
|
669
|
-
match = re.search(r"### Active Skills\s*\nSkills available:\s*([^\n]+)", compact_md)
|
|
670
|
-
if match:
|
|
671
|
-
skills_str = match.group(1).strip()
|
|
672
|
-
skills = [s.strip() for s in skills_str.split(",") if s.strip()]
|
|
673
|
-
self.logger.debug(f"Restored {len(skills)} skills from parent session")
|
|
674
|
-
return skills
|
|
675
|
-
|
|
676
|
-
return []
|
|
677
|
-
|
|
678
|
-
except Exception as e:
|
|
679
|
-
self.logger.warning(f"Failed to restore skills from parent: {e}")
|
|
680
|
-
return []
|
|
681
|
-
|
|
682
|
-
# ==================== AGENT HANDLERS ====================
|
|
683
|
-
|
|
684
|
-
def handle_before_agent(self, event: HookEvent) -> HookResponse:
|
|
685
|
-
"""Handle BEFORE_AGENT event (user prompt submit)."""
|
|
686
|
-
input_data = event.data
|
|
687
|
-
prompt = input_data.get("prompt", "")
|
|
688
|
-
transcript_path = input_data.get("transcript_path")
|
|
689
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
690
|
-
|
|
691
|
-
context_parts = []
|
|
692
|
-
|
|
693
|
-
if session_id:
|
|
694
|
-
self.logger.debug(f"BEFORE_AGENT: session {session_id}")
|
|
695
|
-
self.logger.debug(f" Prompt: {prompt[:100]}...")
|
|
696
|
-
|
|
697
|
-
# Update status to active (unless /clear or /exit)
|
|
698
|
-
prompt_lower = prompt.strip().lower()
|
|
699
|
-
if prompt_lower not in ("/clear", "/exit") and self._session_manager:
|
|
700
|
-
try:
|
|
701
|
-
self._session_manager.update_session_status(session_id, "active")
|
|
702
|
-
if self._session_storage:
|
|
703
|
-
self._session_storage.reset_transcript_processed(session_id)
|
|
704
|
-
except Exception as e:
|
|
705
|
-
self.logger.warning(f"Failed to update session status: {e}")
|
|
706
|
-
|
|
707
|
-
# Handle /clear command - lifecycle workflows handle handoff
|
|
708
|
-
if prompt_lower in ("/clear", "/exit") and transcript_path:
|
|
709
|
-
self.logger.debug(f"Detected {prompt_lower} - lifecycle workflows handle handoff")
|
|
710
|
-
|
|
711
|
-
# Execute lifecycle workflow triggers
|
|
712
|
-
if self._workflow_handler:
|
|
713
|
-
try:
|
|
714
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
715
|
-
if wf_response.context:
|
|
716
|
-
context_parts.append(wf_response.context)
|
|
717
|
-
if wf_response.decision != "allow":
|
|
718
|
-
return wf_response
|
|
719
|
-
except Exception as e:
|
|
720
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
721
|
-
|
|
722
|
-
return HookResponse(
|
|
723
|
-
decision="allow",
|
|
724
|
-
context="\n\n".join(context_parts) if context_parts else None,
|
|
725
|
-
)
|
|
726
|
-
|
|
727
|
-
def handle_after_agent(self, event: HookEvent) -> HookResponse:
|
|
728
|
-
"""Handle AFTER_AGENT event."""
|
|
729
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
730
|
-
cli_source = event.source.value
|
|
731
|
-
|
|
732
|
-
if session_id:
|
|
733
|
-
self.logger.debug(f"AFTER_AGENT: session {session_id}, cli={cli_source}")
|
|
734
|
-
if self._session_manager:
|
|
735
|
-
try:
|
|
736
|
-
self._session_manager.update_session_status(session_id, "paused")
|
|
737
|
-
except Exception as e:
|
|
738
|
-
self.logger.warning(f"Failed to update session status: {e}")
|
|
739
|
-
else:
|
|
740
|
-
self.logger.debug(f"AFTER_AGENT: cli={cli_source}")
|
|
741
|
-
|
|
742
|
-
# Execute lifecycle workflow triggers
|
|
743
|
-
if self._workflow_handler:
|
|
744
|
-
try:
|
|
745
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
746
|
-
if wf_response.decision != "allow":
|
|
747
|
-
return wf_response
|
|
748
|
-
if wf_response.context:
|
|
749
|
-
return wf_response
|
|
750
|
-
except Exception as e:
|
|
751
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
752
|
-
|
|
753
|
-
return HookResponse(decision="allow")
|
|
754
|
-
|
|
755
|
-
# ==================== TOOL HANDLERS ====================
|
|
756
|
-
|
|
757
|
-
def handle_before_tool(self, event: HookEvent) -> HookResponse:
|
|
758
|
-
"""Handle BEFORE_TOOL event."""
|
|
759
|
-
input_data = event.data
|
|
760
|
-
tool_name = input_data.get("tool_name", "unknown")
|
|
761
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
762
|
-
|
|
763
|
-
if session_id:
|
|
764
|
-
self.logger.debug(f"BEFORE_TOOL: {tool_name}, session {session_id}")
|
|
765
|
-
else:
|
|
766
|
-
self.logger.debug(f"BEFORE_TOOL: {tool_name}")
|
|
767
|
-
|
|
768
|
-
context_parts = []
|
|
769
|
-
|
|
770
|
-
# Execute lifecycle workflow triggers
|
|
771
|
-
if self._workflow_handler:
|
|
772
|
-
try:
|
|
773
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
774
|
-
if wf_response.context:
|
|
775
|
-
context_parts.append(wf_response.context)
|
|
776
|
-
if wf_response.decision != "allow":
|
|
777
|
-
return wf_response
|
|
778
|
-
except Exception as e:
|
|
779
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
780
|
-
|
|
781
|
-
return HookResponse(
|
|
782
|
-
decision="allow",
|
|
783
|
-
context="\n\n".join(context_parts) if context_parts else None,
|
|
784
|
-
)
|
|
785
|
-
|
|
786
|
-
def handle_after_tool(self, event: HookEvent) -> HookResponse:
|
|
787
|
-
"""Handle AFTER_TOOL event."""
|
|
788
|
-
input_data = event.data
|
|
789
|
-
tool_name = input_data.get("tool_name", "unknown")
|
|
790
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
791
|
-
is_failure = event.metadata.get("is_failure", False)
|
|
792
|
-
|
|
793
|
-
status = "FAIL" if is_failure else "OK"
|
|
794
|
-
if session_id:
|
|
795
|
-
self.logger.debug(f"AFTER_TOOL [{status}]: {tool_name}, session {session_id}")
|
|
796
|
-
|
|
797
|
-
# Track edits for session high-water mark
|
|
798
|
-
# Only if tool succeeded, matches edit tools, and session has claimed a task
|
|
799
|
-
# Skip .gobby/ internal files (tasks.jsonl, memories.jsonl, etc.)
|
|
800
|
-
tool_input = input_data.get("tool_input", {})
|
|
801
|
-
file_path = tool_input.get("file_path", "")
|
|
802
|
-
is_gobby_internal = "/.gobby/" in file_path or file_path.startswith(".gobby/")
|
|
803
|
-
|
|
804
|
-
if (
|
|
805
|
-
not is_failure
|
|
806
|
-
and tool_name
|
|
807
|
-
and tool_name.lower() in EDIT_TOOLS
|
|
808
|
-
and not is_gobby_internal
|
|
809
|
-
and self._session_storage
|
|
810
|
-
and self._task_manager
|
|
811
|
-
):
|
|
812
|
-
try:
|
|
813
|
-
# Check if session has any claimed tasks in progress
|
|
814
|
-
claimed_tasks = self._task_manager.list_tasks(
|
|
815
|
-
assignee=session_id, status="in_progress", limit=1
|
|
816
|
-
)
|
|
817
|
-
if claimed_tasks:
|
|
818
|
-
self._session_storage.mark_had_edits(session_id)
|
|
819
|
-
self.logger.debug(f"Marked session {session_id} as had_edits")
|
|
820
|
-
except Exception as e:
|
|
821
|
-
self.logger.warning(f"Failed to track edit history: {e}")
|
|
822
|
-
|
|
823
|
-
else:
|
|
824
|
-
self.logger.debug(f"AFTER_TOOL [{status}]: {tool_name}")
|
|
825
|
-
|
|
826
|
-
context_parts = []
|
|
827
|
-
|
|
828
|
-
# Execute lifecycle workflow triggers
|
|
829
|
-
if self._workflow_handler:
|
|
830
|
-
try:
|
|
831
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
832
|
-
if wf_response.context:
|
|
833
|
-
context_parts.append(wf_response.context)
|
|
834
|
-
if wf_response.decision != "allow":
|
|
835
|
-
return wf_response
|
|
836
|
-
except Exception as e:
|
|
837
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
838
|
-
|
|
839
|
-
return HookResponse(
|
|
840
|
-
decision="allow",
|
|
841
|
-
context="\n\n".join(context_parts) if context_parts else None,
|
|
842
|
-
)
|
|
843
|
-
|
|
844
|
-
# ==================== STOP HANDLER ====================
|
|
845
|
-
|
|
846
|
-
def handle_stop(self, event: HookEvent) -> HookResponse:
|
|
847
|
-
"""Handle STOP event (Claude Code only)."""
|
|
848
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
849
|
-
cli_source = event.source.value
|
|
850
|
-
|
|
851
|
-
self.logger.debug(f"STOP: session {session_id}, cli={cli_source}")
|
|
852
|
-
|
|
853
|
-
# Execute lifecycle workflow triggers for on_stop
|
|
854
|
-
if self._workflow_handler:
|
|
855
|
-
try:
|
|
856
|
-
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
857
|
-
if wf_response.decision != "allow":
|
|
858
|
-
return wf_response
|
|
859
|
-
if wf_response.context:
|
|
860
|
-
return wf_response
|
|
861
|
-
except Exception as e:
|
|
862
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
863
|
-
|
|
864
|
-
return HookResponse(decision="allow")
|
|
865
|
-
|
|
866
|
-
# ==================== COMPACT HANDLER ====================
|
|
867
|
-
|
|
868
|
-
def handle_pre_compact(self, event: HookEvent) -> HookResponse:
|
|
869
|
-
"""Handle PRE_COMPACT event.
|
|
870
|
-
|
|
871
|
-
Note: Gemini fires PreCompress constantly during normal operation,
|
|
872
|
-
unlike Claude which fires it only when approaching context limits.
|
|
873
|
-
We skip handoff logic and workflow execution for Gemini to avoid
|
|
874
|
-
excessive state changes and workflow interruptions.
|
|
875
|
-
"""
|
|
876
|
-
from gobby.hooks.events import SessionSource
|
|
877
|
-
|
|
878
|
-
trigger = event.data.get("trigger", "auto")
|
|
879
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
880
|
-
|
|
881
|
-
# Skip handoff logic for Gemini - it fires PreCompress too frequently
|
|
882
|
-
if event.source == SessionSource.GEMINI:
|
|
883
|
-
self.logger.debug(f"PRE_COMPACT ({trigger}): session {session_id} [Gemini - skipped]")
|
|
884
|
-
return HookResponse(decision="allow")
|
|
885
|
-
|
|
886
|
-
if session_id:
|
|
887
|
-
self.logger.debug(f"PRE_COMPACT ({trigger}): session {session_id}")
|
|
888
|
-
# Mark session as handoff_ready so it can be found as parent after compact
|
|
889
|
-
if self._session_manager:
|
|
890
|
-
self._session_manager.update_session_status(session_id, "handoff_ready")
|
|
891
|
-
else:
|
|
892
|
-
self.logger.debug(f"PRE_COMPACT ({trigger})")
|
|
893
|
-
|
|
894
|
-
# Execute lifecycle workflows
|
|
895
|
-
if self._workflow_handler:
|
|
896
|
-
try:
|
|
897
|
-
return self._workflow_handler.handle_all_lifecycles(event)
|
|
898
|
-
except Exception as e:
|
|
899
|
-
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
900
|
-
|
|
901
|
-
return HookResponse(decision="allow")
|
|
902
|
-
|
|
903
|
-
# ==================== SUBAGENT HANDLERS ====================
|
|
904
|
-
|
|
905
|
-
def handle_subagent_start(self, event: HookEvent) -> HookResponse:
|
|
906
|
-
"""Handle SUBAGENT_START event."""
|
|
907
|
-
input_data = event.data
|
|
908
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
909
|
-
agent_id = input_data.get("agent_id")
|
|
910
|
-
subagent_id = input_data.get("subagent_id")
|
|
911
|
-
|
|
912
|
-
log_msg = f"SUBAGENT_START: session {session_id}" if session_id else "SUBAGENT_START"
|
|
913
|
-
if agent_id:
|
|
914
|
-
log_msg += f", agent_id={agent_id}"
|
|
915
|
-
if subagent_id:
|
|
916
|
-
log_msg += f", subagent_id={subagent_id}"
|
|
917
|
-
self.logger.debug(log_msg)
|
|
918
|
-
|
|
919
|
-
return HookResponse(decision="allow")
|
|
920
|
-
|
|
921
|
-
def handle_subagent_stop(self, event: HookEvent) -> HookResponse:
|
|
922
|
-
"""Handle SUBAGENT_STOP event."""
|
|
923
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
924
|
-
|
|
925
|
-
if session_id:
|
|
926
|
-
self.logger.debug(f"SUBAGENT_STOP: session {session_id}")
|
|
927
|
-
else:
|
|
928
|
-
self.logger.debug("SUBAGENT_STOP")
|
|
929
|
-
|
|
930
|
-
return HookResponse(decision="allow")
|
|
931
|
-
|
|
932
|
-
# ==================== NOTIFICATION HANDLER ====================
|
|
933
|
-
|
|
934
|
-
def handle_notification(self, event: HookEvent) -> HookResponse:
|
|
935
|
-
"""Handle NOTIFICATION event."""
|
|
936
|
-
input_data = event.data
|
|
937
|
-
notification_type = (
|
|
938
|
-
input_data.get("notification_type")
|
|
939
|
-
or input_data.get("notificationType")
|
|
940
|
-
or input_data.get("type")
|
|
941
|
-
or "general"
|
|
942
|
-
)
|
|
943
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
944
|
-
|
|
945
|
-
if session_id:
|
|
946
|
-
self.logger.debug(f"NOTIFICATION ({notification_type}): session {session_id}")
|
|
947
|
-
if self._session_manager:
|
|
948
|
-
try:
|
|
949
|
-
self._session_manager.update_session_status(session_id, "paused")
|
|
950
|
-
except Exception as e:
|
|
951
|
-
self.logger.warning(f"Failed to update session status: {e}")
|
|
952
|
-
else:
|
|
953
|
-
self.logger.debug(f"NOTIFICATION ({notification_type})")
|
|
954
|
-
|
|
955
|
-
return HookResponse(decision="allow")
|
|
956
|
-
|
|
957
|
-
# ==================== PERMISSION HANDLER ====================
|
|
958
|
-
|
|
959
|
-
def handle_permission_request(self, event: HookEvent) -> HookResponse:
|
|
960
|
-
"""Handle PERMISSION_REQUEST event (Claude Code only)."""
|
|
961
|
-
input_data = event.data
|
|
962
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
963
|
-
permission_type = input_data.get("permission_type", "unknown")
|
|
964
|
-
|
|
965
|
-
if session_id:
|
|
966
|
-
self.logger.debug(f"PERMISSION_REQUEST ({permission_type}): session {session_id}")
|
|
967
|
-
else:
|
|
968
|
-
self.logger.debug(f"PERMISSION_REQUEST ({permission_type})")
|
|
969
|
-
|
|
970
|
-
return HookResponse(decision="allow")
|
|
971
|
-
|
|
972
|
-
# ==================== GEMINI-ONLY HANDLERS ====================
|
|
973
|
-
|
|
974
|
-
def handle_before_tool_selection(self, event: HookEvent) -> HookResponse:
|
|
975
|
-
"""Handle BEFORE_TOOL_SELECTION event (Gemini only)."""
|
|
976
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
977
|
-
|
|
978
|
-
if session_id:
|
|
979
|
-
self.logger.debug(f"BEFORE_TOOL_SELECTION: session {session_id}")
|
|
980
|
-
else:
|
|
981
|
-
self.logger.debug("BEFORE_TOOL_SELECTION")
|
|
982
|
-
|
|
983
|
-
return HookResponse(decision="allow")
|
|
984
|
-
|
|
985
|
-
def handle_before_model(self, event: HookEvent) -> HookResponse:
|
|
986
|
-
"""Handle BEFORE_MODEL event (Gemini only)."""
|
|
987
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
988
|
-
|
|
989
|
-
if session_id:
|
|
990
|
-
self.logger.debug(f"BEFORE_MODEL: session {session_id}")
|
|
991
|
-
else:
|
|
992
|
-
self.logger.debug("BEFORE_MODEL")
|
|
993
|
-
|
|
994
|
-
return HookResponse(decision="allow")
|
|
995
|
-
|
|
996
|
-
def handle_after_model(self, event: HookEvent) -> HookResponse:
|
|
997
|
-
"""Handle AFTER_MODEL event (Gemini only)."""
|
|
998
|
-
session_id = event.metadata.get("_platform_session_id")
|
|
999
|
-
|
|
1000
|
-
if session_id:
|
|
1001
|
-
self.logger.debug(f"AFTER_MODEL: session {session_id}")
|
|
1002
|
-
else:
|
|
1003
|
-
self.logger.debug("AFTER_MODEL")
|
|
1004
|
-
|
|
1005
|
-
return HookResponse(decision="allow")
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
__all__ = ["EventHandlers"]
|