gobby 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/claude_code.py +3 -26
- gobby/app_context.py +59 -0
- gobby/cli/utils.py +5 -17
- gobby/config/features.py +0 -20
- gobby/config/tasks.py +4 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +87 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +573 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/hook_manager.py +2 -0
- gobby/llm/claude.py +377 -42
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/registries.py +14 -0
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/tools/artifacts.py +3 -3
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
- gobby/mcp_proxy/tools/workflows/_query.py +207 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +23 -10
- gobby/servers/http.py +186 -149
- gobby/servers/routes/admin.py +12 -0
- gobby/servers/routes/mcp/endpoints/execution.py +15 -7
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/sessions/analyzer.py +2 -2
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/migrations.py +25 -2
- gobby/storage/skills.py +47 -7
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +5 -0
- gobby/workflows/context_actions.py +21 -24
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +96 -0
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/engine.py +6 -3
- gobby/workflows/lifecycle_evaluator.py +2 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/RECORD +61 -45
- gobby/hooks/event_handlers.py +0 -1008
- gobby/mcp_proxy/tools/workflows.py +0 -1023
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from gobby.hooks.event_handlers._base import EventHandlersBase
|
|
4
|
+
from gobby.hooks.events import HookEvent, HookResponse
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class MiscEventHandlerMixin(EventHandlersBase):
|
|
8
|
+
"""Mixin for handling miscellaneous events."""
|
|
9
|
+
|
|
10
|
+
def handle_notification(self, event: HookEvent) -> HookResponse:
|
|
11
|
+
"""Handle NOTIFICATION event."""
|
|
12
|
+
input_data = event.data
|
|
13
|
+
notification_type = (
|
|
14
|
+
input_data.get("notification_type")
|
|
15
|
+
or input_data.get("notificationType")
|
|
16
|
+
or input_data.get("type")
|
|
17
|
+
or "general"
|
|
18
|
+
)
|
|
19
|
+
session_id = event.metadata.get("_platform_session_id")
|
|
20
|
+
|
|
21
|
+
if session_id:
|
|
22
|
+
self.logger.debug(f"NOTIFICATION ({notification_type}): session {session_id}")
|
|
23
|
+
if self._session_manager:
|
|
24
|
+
try:
|
|
25
|
+
self._session_manager.update_session_status(session_id, "paused")
|
|
26
|
+
except Exception as e:
|
|
27
|
+
self.logger.warning(f"Failed to update session status: {e}")
|
|
28
|
+
else:
|
|
29
|
+
self.logger.debug(f"NOTIFICATION ({notification_type})")
|
|
30
|
+
|
|
31
|
+
return HookResponse(decision="allow")
|
|
32
|
+
|
|
33
|
+
def handle_permission_request(self, event: HookEvent) -> HookResponse:
|
|
34
|
+
"""Handle PERMISSION_REQUEST event (Claude Code only)."""
|
|
35
|
+
input_data = event.data
|
|
36
|
+
session_id = event.metadata.get("_platform_session_id")
|
|
37
|
+
permission_type = input_data.get("permission_type", "unknown")
|
|
38
|
+
|
|
39
|
+
if session_id:
|
|
40
|
+
self.logger.debug(f"PERMISSION_REQUEST ({permission_type}): session {session_id}")
|
|
41
|
+
else:
|
|
42
|
+
self.logger.debug(f"PERMISSION_REQUEST ({permission_type})")
|
|
43
|
+
|
|
44
|
+
return HookResponse(decision="allow")
|
|
45
|
+
|
|
46
|
+
def handle_before_model(self, event: HookEvent) -> HookResponse:
|
|
47
|
+
"""Handle BEFORE_MODEL event (Gemini only)."""
|
|
48
|
+
session_id = event.metadata.get("_platform_session_id")
|
|
49
|
+
|
|
50
|
+
if session_id:
|
|
51
|
+
self.logger.debug(f"BEFORE_MODEL: session {session_id}")
|
|
52
|
+
else:
|
|
53
|
+
self.logger.debug("BEFORE_MODEL")
|
|
54
|
+
|
|
55
|
+
return HookResponse(decision="allow")
|
|
56
|
+
|
|
57
|
+
def handle_after_model(self, event: HookEvent) -> HookResponse:
|
|
58
|
+
"""Handle AFTER_MODEL event (Gemini only)."""
|
|
59
|
+
session_id = event.metadata.get("_platform_session_id")
|
|
60
|
+
|
|
61
|
+
if session_id:
|
|
62
|
+
self.logger.debug(f"AFTER_MODEL: session {session_id}")
|
|
63
|
+
else:
|
|
64
|
+
self.logger.debug("AFTER_MODEL")
|
|
65
|
+
|
|
66
|
+
return HookResponse(decision="allow")
|
|
@@ -0,0 +1,573 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
from gobby.hooks.event_handlers._base import EventHandlersBase
|
|
7
|
+
from gobby.hooks.events import HookEvent, HookResponse
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from gobby.storage.sessions import Session
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SessionEventHandlerMixin(EventHandlersBase):
|
|
14
|
+
"""Mixin for handling session-related events."""
|
|
15
|
+
|
|
16
|
+
def handle_session_start(self, event: HookEvent) -> HookResponse:
|
|
17
|
+
"""
|
|
18
|
+
Handle SESSION_START event.
|
|
19
|
+
|
|
20
|
+
Register session and execute session-handoff workflow.
|
|
21
|
+
"""
|
|
22
|
+
external_id = event.session_id
|
|
23
|
+
input_data = event.data
|
|
24
|
+
transcript_path = input_data.get("transcript_path")
|
|
25
|
+
cli_source = event.source.value
|
|
26
|
+
cwd = input_data.get("cwd")
|
|
27
|
+
session_source = input_data.get("source", "startup")
|
|
28
|
+
|
|
29
|
+
# Resolve project_id (auto-creates if needed)
|
|
30
|
+
project_id = self._resolve_project_id(input_data.get("project_id"), cwd)
|
|
31
|
+
# Always use Gobby's machine_id for cross-CLI consistency
|
|
32
|
+
machine_id = self._get_machine_id()
|
|
33
|
+
|
|
34
|
+
self.logger.debug(
|
|
35
|
+
f"SESSION_START: cli={cli_source}, project={project_id}, source={session_source}"
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Step 0: Check if this is a pre-created session (terminal mode agent)
|
|
39
|
+
# When we spawn an agent in terminal mode, we pass --session-id <internal_id>
|
|
40
|
+
# to Claude, so external_id here might actually be our internal session ID
|
|
41
|
+
existing_session = None
|
|
42
|
+
if self._session_storage:
|
|
43
|
+
try:
|
|
44
|
+
# Try to find by internal ID first (terminal mode case)
|
|
45
|
+
existing_session = self._session_storage.get(external_id)
|
|
46
|
+
if existing_session:
|
|
47
|
+
return self._handle_pre_created_session(
|
|
48
|
+
existing_session=existing_session,
|
|
49
|
+
external_id=external_id,
|
|
50
|
+
transcript_path=transcript_path,
|
|
51
|
+
cli_source=cli_source,
|
|
52
|
+
event=event,
|
|
53
|
+
cwd=cwd,
|
|
54
|
+
)
|
|
55
|
+
except Exception as e:
|
|
56
|
+
self.logger.debug(f"No pre-created session found: {e}")
|
|
57
|
+
|
|
58
|
+
# Step 1: Find parent session
|
|
59
|
+
# Check env vars first (spawned agent case), then handoff (source='clear')
|
|
60
|
+
parent_session_id = input_data.get("parent_session_id")
|
|
61
|
+
workflow_name = input_data.get("workflow_name")
|
|
62
|
+
agent_depth = input_data.get("agent_depth")
|
|
63
|
+
|
|
64
|
+
if not parent_session_id and session_source == "clear" and self._session_storage:
|
|
65
|
+
try:
|
|
66
|
+
parent = self._session_storage.find_parent(
|
|
67
|
+
machine_id=machine_id,
|
|
68
|
+
project_id=project_id,
|
|
69
|
+
source=cli_source,
|
|
70
|
+
status="handoff_ready",
|
|
71
|
+
)
|
|
72
|
+
if parent:
|
|
73
|
+
parent_session_id = parent.id
|
|
74
|
+
self.logger.debug(f"Found parent session: {parent_session_id}")
|
|
75
|
+
except Exception as e:
|
|
76
|
+
self.logger.warning(f"Error finding parent session: {e}")
|
|
77
|
+
|
|
78
|
+
# Step 2: Register new session with parent if found
|
|
79
|
+
# Extract terminal context (injected by hook_dispatcher for terminal correlation)
|
|
80
|
+
terminal_context = input_data.get("terminal_context")
|
|
81
|
+
# Parse agent_depth as int if provided
|
|
82
|
+
agent_depth_val = 0
|
|
83
|
+
if agent_depth:
|
|
84
|
+
try:
|
|
85
|
+
agent_depth_val = int(agent_depth)
|
|
86
|
+
except (ValueError, TypeError):
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
session_id = None
|
|
90
|
+
if self._session_manager:
|
|
91
|
+
session_id = self._session_manager.register_session(
|
|
92
|
+
external_id=external_id,
|
|
93
|
+
machine_id=machine_id,
|
|
94
|
+
project_id=project_id,
|
|
95
|
+
parent_session_id=parent_session_id,
|
|
96
|
+
jsonl_path=transcript_path,
|
|
97
|
+
source=cli_source,
|
|
98
|
+
project_path=cwd,
|
|
99
|
+
terminal_context=terminal_context,
|
|
100
|
+
workflow_name=workflow_name,
|
|
101
|
+
agent_depth=agent_depth_val,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Step 2b: Mark parent session as expired after successful handoff
|
|
105
|
+
if parent_session_id and self._session_manager:
|
|
106
|
+
try:
|
|
107
|
+
self._session_manager.mark_session_expired(parent_session_id)
|
|
108
|
+
self.logger.debug(f"Marked parent session {parent_session_id} as expired")
|
|
109
|
+
except Exception as e:
|
|
110
|
+
self.logger.warning(f"Failed to mark parent session as expired: {e}")
|
|
111
|
+
|
|
112
|
+
# Step 2c: Auto-activate workflow if specified (for spawned agents)
|
|
113
|
+
if workflow_name and session_id:
|
|
114
|
+
self._auto_activate_workflow(workflow_name, session_id, cwd)
|
|
115
|
+
|
|
116
|
+
# Step 3: Track registered session
|
|
117
|
+
if transcript_path and self._session_coordinator:
|
|
118
|
+
try:
|
|
119
|
+
self._session_coordinator.register_session(external_id)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
self.logger.error(f"Failed to setup session tracking: {e}", exc_info=True)
|
|
122
|
+
|
|
123
|
+
# Step 4: Update event metadata with the newly registered session_id
|
|
124
|
+
event.metadata["_platform_session_id"] = session_id
|
|
125
|
+
if parent_session_id:
|
|
126
|
+
event.metadata["_parent_session_id"] = parent_session_id
|
|
127
|
+
|
|
128
|
+
# Step 5: Register with Message Processor
|
|
129
|
+
if self._message_processor and transcript_path and session_id:
|
|
130
|
+
try:
|
|
131
|
+
self._message_processor.register_session(
|
|
132
|
+
session_id, transcript_path, source=cli_source
|
|
133
|
+
)
|
|
134
|
+
except Exception as e:
|
|
135
|
+
self.logger.warning(f"Failed to register session with message processor: {e}")
|
|
136
|
+
|
|
137
|
+
# Step 6: Execute lifecycle workflows
|
|
138
|
+
wf_response = HookResponse(decision="allow", context="")
|
|
139
|
+
if self._workflow_handler:
|
|
140
|
+
try:
|
|
141
|
+
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
142
|
+
except Exception as e:
|
|
143
|
+
self.logger.warning(f"Workflow error: {e}")
|
|
144
|
+
|
|
145
|
+
# Build additional context (task and skill injection)
|
|
146
|
+
additional_context: list[str] = []
|
|
147
|
+
if event.task_id:
|
|
148
|
+
task_title = event.metadata.get("_task_title", "Unknown Task")
|
|
149
|
+
additional_context.append("\n## Active Task Context\n")
|
|
150
|
+
additional_context.append(f"You are working on task: {task_title} ({event.task_id})")
|
|
151
|
+
|
|
152
|
+
skill_context = self._build_skill_injection_context(parent_session_id)
|
|
153
|
+
if skill_context:
|
|
154
|
+
additional_context.append(skill_context)
|
|
155
|
+
|
|
156
|
+
# Fetch session to get seq_num for #N display
|
|
157
|
+
session_obj = None
|
|
158
|
+
if session_id and self._session_storage:
|
|
159
|
+
session_obj = self._session_storage.get(session_id)
|
|
160
|
+
|
|
161
|
+
return self._compose_session_response(
|
|
162
|
+
session=session_obj,
|
|
163
|
+
wf_response=wf_response,
|
|
164
|
+
session_id=session_id,
|
|
165
|
+
external_id=external_id,
|
|
166
|
+
parent_session_id=parent_session_id,
|
|
167
|
+
machine_id=machine_id,
|
|
168
|
+
project_id=project_id,
|
|
169
|
+
task_id=event.task_id,
|
|
170
|
+
additional_context=additional_context,
|
|
171
|
+
terminal_context=terminal_context,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
def handle_session_end(self, event: HookEvent) -> HookResponse:
|
|
175
|
+
"""Handle SESSION_END event."""
|
|
176
|
+
from gobby.tasks.commits import auto_link_commits
|
|
177
|
+
|
|
178
|
+
external_id = event.session_id
|
|
179
|
+
session_id = event.metadata.get("_platform_session_id")
|
|
180
|
+
|
|
181
|
+
if session_id:
|
|
182
|
+
self.logger.debug(f"SESSION_END: session {session_id}")
|
|
183
|
+
else:
|
|
184
|
+
self.logger.warning(f"SESSION_END: session_id not found for external_id={external_id}")
|
|
185
|
+
|
|
186
|
+
# If not in mapping, query database
|
|
187
|
+
if not session_id and external_id and self._session_manager:
|
|
188
|
+
self.logger.debug(f"external_id {external_id} not in mapping, querying database")
|
|
189
|
+
# Resolve context for lookup
|
|
190
|
+
machine_id = self._get_machine_id()
|
|
191
|
+
cwd = event.data.get("cwd")
|
|
192
|
+
project_id = self._resolve_project_id(event.data.get("project_id"), cwd)
|
|
193
|
+
# Lookup with full composite key
|
|
194
|
+
session_id = self._session_manager.lookup_session_id(
|
|
195
|
+
external_id,
|
|
196
|
+
source=event.source.value,
|
|
197
|
+
machine_id=machine_id,
|
|
198
|
+
project_id=project_id,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
# Ensure session_id is available in event metadata for workflow actions
|
|
202
|
+
if session_id and not event.metadata.get("_platform_session_id"):
|
|
203
|
+
event.metadata["_platform_session_id"] = session_id
|
|
204
|
+
|
|
205
|
+
# Execute lifecycle workflow triggers
|
|
206
|
+
if self._workflow_handler:
|
|
207
|
+
try:
|
|
208
|
+
self._workflow_handler.handle_all_lifecycles(event)
|
|
209
|
+
except Exception as e:
|
|
210
|
+
self.logger.error(f"Failed to execute lifecycle workflows: {e}", exc_info=True)
|
|
211
|
+
|
|
212
|
+
# Auto-link commits made during this session to tasks
|
|
213
|
+
if session_id and self._session_storage and self._task_manager:
|
|
214
|
+
try:
|
|
215
|
+
session = self._session_storage.get(session_id)
|
|
216
|
+
if session:
|
|
217
|
+
cwd = event.data.get("cwd")
|
|
218
|
+
link_result = auto_link_commits(
|
|
219
|
+
task_manager=self._task_manager,
|
|
220
|
+
since=session.created_at,
|
|
221
|
+
cwd=cwd,
|
|
222
|
+
)
|
|
223
|
+
if link_result.total_linked > 0:
|
|
224
|
+
self.logger.info(
|
|
225
|
+
f"Auto-linked {link_result.total_linked} commits to tasks: "
|
|
226
|
+
f"{list(link_result.linked_tasks.keys())}"
|
|
227
|
+
)
|
|
228
|
+
except Exception as e:
|
|
229
|
+
self.logger.warning(f"Failed to auto-link session commits: {e}")
|
|
230
|
+
|
|
231
|
+
# Complete agent run if this is a terminal-mode agent session
|
|
232
|
+
if session_id and self._session_storage and self._session_coordinator:
|
|
233
|
+
try:
|
|
234
|
+
session = self._session_storage.get(session_id)
|
|
235
|
+
if session and session.agent_run_id:
|
|
236
|
+
self._session_coordinator.complete_agent_run(session)
|
|
237
|
+
except Exception as e:
|
|
238
|
+
self.logger.warning(f"Failed to complete agent run: {e}")
|
|
239
|
+
|
|
240
|
+
# Generate independent session summary file
|
|
241
|
+
if self._summary_file_generator:
|
|
242
|
+
try:
|
|
243
|
+
summary_input = {
|
|
244
|
+
"session_id": external_id,
|
|
245
|
+
"transcript_path": event.data.get("transcript_path"),
|
|
246
|
+
}
|
|
247
|
+
self._summary_file_generator.generate_session_summary(
|
|
248
|
+
session_id=session_id or external_id,
|
|
249
|
+
input_data=summary_input,
|
|
250
|
+
)
|
|
251
|
+
except Exception as e:
|
|
252
|
+
self.logger.error(f"Failed to generate failover summary: {e}")
|
|
253
|
+
|
|
254
|
+
# Unregister from message processor
|
|
255
|
+
if self._message_processor and (session_id or external_id):
|
|
256
|
+
try:
|
|
257
|
+
target_id = session_id or external_id
|
|
258
|
+
self._message_processor.unregister_session(target_id)
|
|
259
|
+
except Exception as e:
|
|
260
|
+
self.logger.warning(f"Failed to unregister session from message processor: {e}")
|
|
261
|
+
|
|
262
|
+
return HookResponse(decision="allow")
|
|
263
|
+
|
|
264
|
+
def _handle_pre_created_session(
|
|
265
|
+
self,
|
|
266
|
+
existing_session: Session,
|
|
267
|
+
external_id: str,
|
|
268
|
+
transcript_path: str | None,
|
|
269
|
+
cli_source: str,
|
|
270
|
+
event: HookEvent,
|
|
271
|
+
cwd: str | None,
|
|
272
|
+
) -> HookResponse:
|
|
273
|
+
"""Handle session start for a pre-created session (terminal mode agent).
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
existing_session: Pre-created session object
|
|
277
|
+
external_id: External (CLI-native) session ID
|
|
278
|
+
transcript_path: Path to transcript file
|
|
279
|
+
cli_source: CLI source (e.g., "claude-code")
|
|
280
|
+
event: Hook event
|
|
281
|
+
cwd: Current working directory
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
HookResponse for the pre-created session
|
|
285
|
+
"""
|
|
286
|
+
self.logger.info(f"Found pre-created session {external_id}, updating instead of creating")
|
|
287
|
+
|
|
288
|
+
# Update the session with actual runtime info
|
|
289
|
+
if self._session_storage:
|
|
290
|
+
self._session_storage.update(
|
|
291
|
+
session_id=existing_session.id,
|
|
292
|
+
jsonl_path=transcript_path,
|
|
293
|
+
status="active",
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
session_id = existing_session.id
|
|
297
|
+
parent_session_id = existing_session.parent_session_id
|
|
298
|
+
machine_id = self._get_machine_id()
|
|
299
|
+
|
|
300
|
+
# Track registered session
|
|
301
|
+
if transcript_path and self._session_coordinator:
|
|
302
|
+
try:
|
|
303
|
+
self._session_coordinator.register_session(external_id)
|
|
304
|
+
except Exception as e:
|
|
305
|
+
self.logger.error(f"Failed to setup session tracking: {e}")
|
|
306
|
+
|
|
307
|
+
# Start the agent run if this is a terminal-mode agent session
|
|
308
|
+
if existing_session.agent_run_id and self._session_coordinator:
|
|
309
|
+
try:
|
|
310
|
+
self._session_coordinator.start_agent_run(existing_session.agent_run_id)
|
|
311
|
+
except Exception as e:
|
|
312
|
+
self.logger.warning(f"Failed to start agent run: {e}")
|
|
313
|
+
|
|
314
|
+
# Auto-activate workflow if specified for this session
|
|
315
|
+
if existing_session.workflow_name and session_id:
|
|
316
|
+
self._auto_activate_workflow(existing_session.workflow_name, session_id, cwd)
|
|
317
|
+
|
|
318
|
+
# Update event metadata
|
|
319
|
+
event.metadata["_platform_session_id"] = session_id
|
|
320
|
+
|
|
321
|
+
# Register with Message Processor
|
|
322
|
+
if self._message_processor and transcript_path:
|
|
323
|
+
try:
|
|
324
|
+
self._message_processor.register_session(
|
|
325
|
+
session_id, transcript_path, source=cli_source
|
|
326
|
+
)
|
|
327
|
+
except Exception as e:
|
|
328
|
+
self.logger.warning(f"Failed to register with message processor: {e}")
|
|
329
|
+
|
|
330
|
+
# Execute lifecycle workflows
|
|
331
|
+
wf_response = HookResponse(decision="allow", context="")
|
|
332
|
+
if self._workflow_handler:
|
|
333
|
+
try:
|
|
334
|
+
wf_response = self._workflow_handler.handle_all_lifecycles(event)
|
|
335
|
+
except Exception as e:
|
|
336
|
+
self.logger.warning(f"Workflow error: {e}")
|
|
337
|
+
|
|
338
|
+
return self._compose_session_response(
|
|
339
|
+
session=existing_session,
|
|
340
|
+
wf_response=wf_response,
|
|
341
|
+
session_id=session_id,
|
|
342
|
+
external_id=external_id,
|
|
343
|
+
parent_session_id=parent_session_id,
|
|
344
|
+
machine_id=machine_id,
|
|
345
|
+
project_id=existing_session.project_id,
|
|
346
|
+
task_id=event.task_id,
|
|
347
|
+
is_pre_created=True,
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
def _compose_session_response(
|
|
351
|
+
self,
|
|
352
|
+
session: Session | None,
|
|
353
|
+
wf_response: HookResponse,
|
|
354
|
+
session_id: str | None,
|
|
355
|
+
external_id: str,
|
|
356
|
+
parent_session_id: str | None,
|
|
357
|
+
machine_id: str,
|
|
358
|
+
project_id: str | None = None,
|
|
359
|
+
task_id: str | None = None,
|
|
360
|
+
additional_context: list[str] | None = None,
|
|
361
|
+
is_pre_created: bool = False,
|
|
362
|
+
terminal_context: dict[str, Any] | None = None,
|
|
363
|
+
) -> HookResponse:
|
|
364
|
+
"""Build HookResponse for session start.
|
|
365
|
+
|
|
366
|
+
Shared helper that builds the system message, context, and metadata
|
|
367
|
+
for both pre-created and newly-created sessions.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
session: Session object (used for seq_num)
|
|
371
|
+
wf_response: Response from workflow handler
|
|
372
|
+
session_id: Session ID
|
|
373
|
+
external_id: External (CLI-native) session ID
|
|
374
|
+
parent_session_id: Parent session ID if any
|
|
375
|
+
machine_id: Machine ID
|
|
376
|
+
project_id: Project ID
|
|
377
|
+
task_id: Task ID if any
|
|
378
|
+
additional_context: Additional context strings to append (e.g., task/skill context)
|
|
379
|
+
is_pre_created: Whether this is a pre-created session
|
|
380
|
+
terminal_context: Terminal context dict to add to metadata
|
|
381
|
+
|
|
382
|
+
Returns:
|
|
383
|
+
HookResponse with system_message, context, and metadata
|
|
384
|
+
"""
|
|
385
|
+
# Build context_parts
|
|
386
|
+
context_parts: list[str] = []
|
|
387
|
+
if wf_response.context:
|
|
388
|
+
context_parts.append(wf_response.context)
|
|
389
|
+
if parent_session_id:
|
|
390
|
+
context_parts.append(f"Parent session: {parent_session_id}")
|
|
391
|
+
if additional_context:
|
|
392
|
+
context_parts.extend(additional_context)
|
|
393
|
+
|
|
394
|
+
# Compute session_ref from session object or fallback to session_id
|
|
395
|
+
session_ref = session_id
|
|
396
|
+
if session and session.seq_num:
|
|
397
|
+
session_ref = f"#{session.seq_num}"
|
|
398
|
+
|
|
399
|
+
# Build system message (terminal display only)
|
|
400
|
+
if session_ref and session_ref != session_id:
|
|
401
|
+
system_message = f"\nGobby Session ID: {session_ref}"
|
|
402
|
+
else:
|
|
403
|
+
system_message = f"\nGobby Session ID: {session_id}"
|
|
404
|
+
system_message += " <- Use this for MCP tool calls (session_id parameter)"
|
|
405
|
+
system_message += f"\nExternal ID: {external_id} (CLI-native, rarely needed)"
|
|
406
|
+
|
|
407
|
+
# Add active lifecycle workflows
|
|
408
|
+
if wf_response.metadata and "discovered_workflows" in wf_response.metadata:
|
|
409
|
+
wf_list = wf_response.metadata["discovered_workflows"]
|
|
410
|
+
if wf_list:
|
|
411
|
+
system_message += "\nActive workflows:"
|
|
412
|
+
for w in wf_list:
|
|
413
|
+
source = "project" if w["is_project"] else "global"
|
|
414
|
+
system_message += f"\n - {w['name']} ({source}, priority={w['priority']})"
|
|
415
|
+
|
|
416
|
+
if wf_response.system_message:
|
|
417
|
+
system_message += f"\n\n{wf_response.system_message}"
|
|
418
|
+
|
|
419
|
+
# Build metadata
|
|
420
|
+
metadata: dict[str, Any] = {
|
|
421
|
+
"session_id": session_id,
|
|
422
|
+
"session_ref": session_ref,
|
|
423
|
+
"parent_session_id": parent_session_id,
|
|
424
|
+
"machine_id": machine_id,
|
|
425
|
+
"project_id": project_id,
|
|
426
|
+
"external_id": external_id,
|
|
427
|
+
"task_id": task_id,
|
|
428
|
+
}
|
|
429
|
+
if is_pre_created:
|
|
430
|
+
metadata["is_pre_created"] = True
|
|
431
|
+
if terminal_context:
|
|
432
|
+
# Only include non-null terminal values
|
|
433
|
+
for key, value in terminal_context.items():
|
|
434
|
+
if value is not None:
|
|
435
|
+
metadata[f"terminal_{key}"] = value
|
|
436
|
+
|
|
437
|
+
final_context = "\n".join(context_parts) if context_parts else None
|
|
438
|
+
|
|
439
|
+
# Debug: echo additionalContext to system_message if enabled
|
|
440
|
+
# Workflow variable takes precedence over config
|
|
441
|
+
debug_echo = False
|
|
442
|
+
workflow_vars = (wf_response.metadata or {}).get("workflow_variables", {})
|
|
443
|
+
if workflow_vars.get("debug_echo_context") is not None:
|
|
444
|
+
debug_echo = bool(workflow_vars.get("debug_echo_context"))
|
|
445
|
+
elif self._workflow_config and self._workflow_config.debug_echo_context:
|
|
446
|
+
debug_echo = True
|
|
447
|
+
|
|
448
|
+
if debug_echo and final_context:
|
|
449
|
+
system_message += f"\n\n[DEBUG additionalContext]\n{final_context}"
|
|
450
|
+
|
|
451
|
+
return HookResponse(
|
|
452
|
+
decision="allow",
|
|
453
|
+
context=final_context,
|
|
454
|
+
system_message=system_message,
|
|
455
|
+
metadata=metadata,
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
def _build_skill_injection_context(self, parent_session_id: str | None = None) -> str | None:
|
|
459
|
+
"""Build skill injection context for session-start.
|
|
460
|
+
|
|
461
|
+
Combines alwaysApply skills with skills restored from parent session.
|
|
462
|
+
Uses per-skill injection_format to control how each skill is injected:
|
|
463
|
+
- "summary": name + description only
|
|
464
|
+
- "full" or "content": name + description + full content
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
parent_session_id: Optional parent session ID to restore skills from
|
|
468
|
+
|
|
469
|
+
Returns context string with available skills if injection is enabled,
|
|
470
|
+
or None if disabled.
|
|
471
|
+
"""
|
|
472
|
+
# Skip if no skill manager or config
|
|
473
|
+
if not self._skill_manager or not self._skills_config:
|
|
474
|
+
return None
|
|
475
|
+
|
|
476
|
+
# Check if injection is enabled
|
|
477
|
+
if not self._skills_config.inject_core_skills:
|
|
478
|
+
return None
|
|
479
|
+
|
|
480
|
+
# Check injection format (global config level)
|
|
481
|
+
if self._skills_config.injection_format == "none":
|
|
482
|
+
return None
|
|
483
|
+
|
|
484
|
+
# Get alwaysApply skills (efficiently via column query)
|
|
485
|
+
try:
|
|
486
|
+
always_apply_skills = self._skill_manager.discover_core_skills()
|
|
487
|
+
|
|
488
|
+
# Get restored skills from parent session
|
|
489
|
+
restored_skills = self._restore_skills_from_parent(parent_session_id)
|
|
490
|
+
|
|
491
|
+
# Build a map of always_apply skills for quick lookup
|
|
492
|
+
always_apply_map = {s.name: s for s in always_apply_skills}
|
|
493
|
+
|
|
494
|
+
# Combine: alwaysApply skills + any additional restored skills
|
|
495
|
+
skill_names = [s.name for s in always_apply_skills]
|
|
496
|
+
for skill_name in restored_skills:
|
|
497
|
+
if skill_name not in skill_names:
|
|
498
|
+
skill_names.append(skill_name)
|
|
499
|
+
|
|
500
|
+
if not skill_names:
|
|
501
|
+
return None
|
|
502
|
+
|
|
503
|
+
# Build context with per-skill injection format
|
|
504
|
+
parts = ["\n## Available Skills\n"]
|
|
505
|
+
|
|
506
|
+
for skill_name in skill_names:
|
|
507
|
+
skill = always_apply_map.get(skill_name)
|
|
508
|
+
if not skill:
|
|
509
|
+
# Restored skill not in always_apply - just list the name
|
|
510
|
+
parts.append(f"- **{skill_name}**")
|
|
511
|
+
continue
|
|
512
|
+
|
|
513
|
+
# Determine injection format for this skill
|
|
514
|
+
# Use per-skill injection_format, fallback to global config
|
|
515
|
+
skill_format = skill.injection_format or self._skills_config.injection_format
|
|
516
|
+
|
|
517
|
+
if skill_format in ("full", "content"):
|
|
518
|
+
# Full injection: name + description + content
|
|
519
|
+
parts.append(f"### {skill_name}")
|
|
520
|
+
if skill.description:
|
|
521
|
+
parts.append(f"*{skill.description}*\n")
|
|
522
|
+
if skill.content:
|
|
523
|
+
parts.append(skill.content)
|
|
524
|
+
parts.append("")
|
|
525
|
+
else:
|
|
526
|
+
# Summary injection: name + description only
|
|
527
|
+
if skill.description:
|
|
528
|
+
parts.append(f"- **{skill_name}**: {skill.description}")
|
|
529
|
+
else:
|
|
530
|
+
parts.append(f"- **{skill_name}**")
|
|
531
|
+
|
|
532
|
+
return "\n".join(parts)
|
|
533
|
+
|
|
534
|
+
except Exception as e:
|
|
535
|
+
self.logger.warning(f"Failed to build skill injection context: {e}")
|
|
536
|
+
return None
|
|
537
|
+
|
|
538
|
+
def _restore_skills_from_parent(self, parent_session_id: str | None) -> list[str]:
|
|
539
|
+
"""Restore active skills from parent session's handoff context.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
parent_session_id: Parent session ID to restore from
|
|
543
|
+
|
|
544
|
+
Returns:
|
|
545
|
+
List of skill names from the parent session
|
|
546
|
+
"""
|
|
547
|
+
if not parent_session_id or not self._session_storage:
|
|
548
|
+
return []
|
|
549
|
+
|
|
550
|
+
try:
|
|
551
|
+
parent = self._session_storage.get(parent_session_id)
|
|
552
|
+
if not parent:
|
|
553
|
+
return []
|
|
554
|
+
|
|
555
|
+
compact_md = getattr(parent, "compact_markdown", None)
|
|
556
|
+
if not compact_md:
|
|
557
|
+
return []
|
|
558
|
+
|
|
559
|
+
# Parse active skills from markdown
|
|
560
|
+
# Format: "### Active Skills\nSkills available: skill1, skill2, skill3"
|
|
561
|
+
|
|
562
|
+
match = re.search(r"### Active Skills\s*\nSkills available:\s*([^\n]+)", compact_md)
|
|
563
|
+
if match:
|
|
564
|
+
skills_str = match.group(1).strip()
|
|
565
|
+
skills = [s.strip() for s in skills_str.split(",") if s.strip()]
|
|
566
|
+
self.logger.debug(f"Restored {len(skills)} skills from parent session")
|
|
567
|
+
return skills
|
|
568
|
+
|
|
569
|
+
return []
|
|
570
|
+
|
|
571
|
+
except Exception as e:
|
|
572
|
+
self.logger.warning(f"Failed to restore skills from parent: {e}")
|
|
573
|
+
return []
|