gobby 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/claude_code.py +3 -26
  3. gobby/app_context.py +59 -0
  4. gobby/cli/utils.py +5 -17
  5. gobby/config/features.py +0 -20
  6. gobby/config/tasks.py +4 -0
  7. gobby/hooks/event_handlers/__init__.py +155 -0
  8. gobby/hooks/event_handlers/_agent.py +175 -0
  9. gobby/hooks/event_handlers/_base.py +87 -0
  10. gobby/hooks/event_handlers/_misc.py +66 -0
  11. gobby/hooks/event_handlers/_session.py +573 -0
  12. gobby/hooks/event_handlers/_tool.py +196 -0
  13. gobby/hooks/hook_manager.py +2 -0
  14. gobby/llm/claude.py +377 -42
  15. gobby/mcp_proxy/importer.py +4 -41
  16. gobby/mcp_proxy/manager.py +13 -3
  17. gobby/mcp_proxy/registries.py +14 -0
  18. gobby/mcp_proxy/services/recommendation.py +2 -28
  19. gobby/mcp_proxy/tools/artifacts.py +3 -3
  20. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  21. gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
  22. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  23. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  24. gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
  25. gobby/mcp_proxy/tools/workflows/_query.py +207 -0
  26. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  27. gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
  28. gobby/memory/components/__init__.py +0 -0
  29. gobby/memory/components/ingestion.py +98 -0
  30. gobby/memory/components/search.py +108 -0
  31. gobby/memory/manager.py +16 -25
  32. gobby/paths.py +51 -0
  33. gobby/prompts/loader.py +1 -35
  34. gobby/runner.py +23 -10
  35. gobby/servers/http.py +186 -149
  36. gobby/servers/routes/admin.py +12 -0
  37. gobby/servers/routes/mcp/endpoints/execution.py +15 -7
  38. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  39. gobby/sessions/analyzer.py +2 -2
  40. gobby/skills/parser.py +23 -0
  41. gobby/skills/sync.py +5 -4
  42. gobby/storage/artifacts.py +19 -0
  43. gobby/storage/migrations.py +25 -2
  44. gobby/storage/skills.py +47 -7
  45. gobby/tasks/external_validator.py +4 -17
  46. gobby/tasks/validation.py +13 -87
  47. gobby/tools/summarizer.py +18 -51
  48. gobby/utils/status.py +13 -0
  49. gobby/workflows/actions.py +5 -0
  50. gobby/workflows/context_actions.py +21 -24
  51. gobby/workflows/enforcement/__init__.py +11 -1
  52. gobby/workflows/enforcement/blocking.py +96 -0
  53. gobby/workflows/enforcement/handlers.py +35 -1
  54. gobby/workflows/engine.py +6 -3
  55. gobby/workflows/lifecycle_evaluator.py +2 -1
  56. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
  57. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/RECORD +61 -45
  58. gobby/hooks/event_handlers.py +0 -1008
  59. gobby/mcp_proxy/tools/workflows.py +0 -1023
  60. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
  61. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
  62. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
  63. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
@@ -143,6 +143,7 @@ def setup_internal_registries(
143
143
 
144
144
  workflows_registry = create_workflows_registry(
145
145
  session_manager=local_session_manager,
146
+ db=getattr(local_session_manager, "db", None) if local_session_manager else None,
146
147
  )
147
148
  manager.add_registry(workflows_registry)
148
149
  logger.debug("Workflows registry initialized")
@@ -281,6 +282,19 @@ def setup_internal_registries(
281
282
  else:
282
283
  logger.debug("Skills registry not initialized: task_manager is None")
283
284
 
285
+ # Initialize artifacts registry using the existing database from task_manager
286
+ if task_manager is not None:
287
+ from gobby.mcp_proxy.tools.artifacts import create_artifacts_registry
288
+
289
+ artifacts_registry = create_artifacts_registry(
290
+ db=task_manager.db,
291
+ session_manager=local_session_manager,
292
+ )
293
+ manager.add_registry(artifacts_registry)
294
+ logger.debug("Artifacts registry initialized")
295
+ else:
296
+ logger.debug("Artifacts registry not initialized: task_manager is None")
297
+
284
298
  logger.info(f"Internal registries initialized: {len(manager)} registries")
285
299
  return manager
286
300
 
@@ -15,22 +15,6 @@ logger = logging.getLogger("gobby.mcp.server")
15
15
  # Search mode type
16
16
  SearchMode = Literal["llm", "semantic", "hybrid"]
17
17
 
18
- DEFAULT_HYBRID_RERANK_PROMPT = """Re-rank the following tools for the task: "{task_description}"
19
-
20
- Candidates:
21
- {candidate_list}
22
-
23
- Select the best {top_k} tools. Return JSON:
24
- {{"recommendations": [{{"server": "...", "tool": "...", "reason": "..."}}]}}"""
25
-
26
- DEFAULT_LLM_PROMPT = """Recommend tools for the task: "{task_description}"
27
-
28
- Available Servers:
29
- {available_servers}
30
-
31
- Return JSON:
32
- {{"recommendations": [{{"server": "...", "tool": "...", "reason": "..."}}]}}"""
33
-
34
18
 
35
19
  class RecommendationService:
36
20
  """Service for recommending tools."""
@@ -49,10 +33,6 @@ class RecommendationService:
49
33
  self._project_id = project_id
50
34
  self._config = config
51
35
  self._loader = PromptLoader()
52
- self._loader.register_fallback(
53
- "features/recommend_hybrid", lambda: DEFAULT_HYBRID_RERANK_PROMPT
54
- )
55
- self._loader.register_fallback("features/recommend_llm", lambda: DEFAULT_LLM_PROMPT)
56
36
 
57
37
  def _get_config(self) -> RecommendToolsConfig:
58
38
  """Get config with fallback to defaults."""
@@ -181,10 +161,7 @@ class RecommendationService:
181
161
  "candidate_list": candidate_list,
182
162
  "top_k": top_k,
183
163
  }
184
- try:
185
- prompt = self._loader.render(prompt_path, context)
186
- except Exception:
187
- prompt = DEFAULT_HYBRID_RERANK_PROMPT.format(**context)
164
+ prompt = self._loader.render(prompt_path, context)
188
165
 
189
166
  provider = self._llm_service.get_default_provider()
190
167
  response = await provider.generate_text(prompt)
@@ -223,10 +200,7 @@ class RecommendationService:
223
200
  "task_description": task_description,
224
201
  "available_servers": ", ".join(available_servers),
225
202
  }
226
- try:
227
- prompt = self._loader.render(prompt_path, context)
228
- except Exception:
229
- prompt = DEFAULT_LLM_PROMPT.format(**context)
203
+ prompt = self._loader.render(prompt_path, context)
230
204
 
231
205
  provider = self._llm_service.get_default_provider()
232
206
  response = await provider.generate_text(prompt)
@@ -19,11 +19,11 @@ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
19
19
 
20
20
  if TYPE_CHECKING:
21
21
  from gobby.storage.artifacts import LocalArtifactManager
22
- from gobby.storage.database import LocalDatabase
22
+ from gobby.storage.database import DatabaseProtocol
23
23
 
24
24
 
25
25
  def create_artifacts_registry(
26
- db: LocalDatabase | None = None,
26
+ db: DatabaseProtocol | None = None,
27
27
  artifact_manager: LocalArtifactManager | None = None,
28
28
  session_manager: Any | None = None,
29
29
  ) -> InternalToolRegistry:
@@ -31,7 +31,7 @@ def create_artifacts_registry(
31
31
  Create an artifacts tool registry with all artifact-related tools.
32
32
 
33
33
  Args:
34
- db: LocalDatabase instance (used to create artifact_manager if not provided)
34
+ db: DatabaseProtocol instance (used to create artifact_manager if not provided)
35
35
  artifact_manager: LocalArtifactManager instance
36
36
  session_manager: Session manager for resolving session references
37
37
 
@@ -14,6 +14,7 @@ from collections.abc import Callable
14
14
  from typing import TYPE_CHECKING, Any
15
15
 
16
16
  from gobby.mcp_proxy.tools.internal import InternalToolRegistry
17
+ from gobby.storage.sessions import LocalSessionManager
17
18
  from gobby.storage.tasks import TaskNotFoundError
18
19
  from gobby.utils.project_context import get_project_context
19
20
  from gobby.workflows.state_manager import WorkflowStateManager
@@ -227,6 +228,7 @@ def create_readiness_registry(
227
228
 
228
229
  # Create workflow state manager for session_task scoping
229
230
  workflow_state_manager = WorkflowStateManager(task_manager.db)
231
+ session_manager = LocalSessionManager(task_manager.db)
230
232
 
231
233
  # --- list_ready_tasks ---
232
234
 
@@ -376,7 +378,16 @@ def create_readiness_registry(
376
378
 
377
379
  # Auto-scope to session_task if session_id is provided and parent_task_id is not set
378
380
  if session_id and not parent_task_id:
379
- workflow_state = workflow_state_manager.get_state(session_id)
381
+ # Resolve session_id from #N format to UUID
382
+ try:
383
+ resolved_session_id = session_manager.resolve_session_reference(
384
+ session_id, project_id
385
+ )
386
+ except Exception as e:
387
+ logger.warning(f"Could not resolve session_id '{session_id}': {e}")
388
+ resolved_session_id = session_id
389
+
390
+ workflow_state = workflow_state_manager.get_state(resolved_session_id)
380
391
  if workflow_state:
381
392
  session_task = workflow_state.variables.get("session_task")
382
393
  if session_task and session_task != "*":
@@ -395,6 +406,19 @@ def create_readiness_registry(
395
406
  ready_tasks = _get_ready_descendants(
396
407
  task_manager, parent_task_id, task_type, project_id
397
408
  )
409
+ # If no ready descendants, check if the parent task itself is ready
410
+ # This handles the case where session_task is a leaf task with no children
411
+ if not ready_tasks:
412
+ parent_task = task_manager.get_task(parent_task_id)
413
+ if parent_task and parent_task.status == "open":
414
+ # Check if it matches task_type filter
415
+ if task_type is None or parent_task.task_type == task_type:
416
+ # Check if task is ready by seeing if it appears in ready list
417
+ ready_check = task_manager.list_ready_tasks(
418
+ project_id=project_id, limit=200
419
+ )
420
+ if any(t.id == parent_task_id for t in ready_check):
421
+ ready_tasks = [parent_task]
398
422
  else:
399
423
  ready_tasks = task_manager.list_ready_tasks(
400
424
  task_type=task_type, limit=50, project_id=project_id
@@ -492,7 +516,7 @@ def create_readiness_registry(
492
516
  "score": best_score,
493
517
  "reason": f"Selected because: {', '.join(reasons) if reasons else 'best available option'}",
494
518
  "alternatives": [
495
- {"ref": t.to_brief()["ref"], "title": t.title, "score": s}
519
+ {"ref": t.to_brief().get("ref", t.id), "title": t.title, "score": s}
496
520
  for t, s, _, _ in scored[1:4] # Show top 3 alternatives
497
521
  ],
498
522
  "recommended_skills": recommended_skills,
@@ -525,10 +549,9 @@ def create_readiness_registry(
525
549
  },
526
550
  "session_id": {
527
551
  "type": "string",
528
- "description": "Your session ID (from system context). Used to auto-scope suggestions based on workflow's session_task variable.",
552
+ "description": "Your session ID (from system context). When provided, auto-scopes suggestions based on workflow's session_task variable.",
529
553
  },
530
554
  },
531
- "required": ["session_id"],
532
555
  },
533
556
  func=suggest_next_task,
534
557
  )
@@ -0,0 +1,266 @@
1
+ """
2
+ Internal MCP tools for Gobby Workflow System.
3
+
4
+ Exposes functionality for:
5
+ - get_workflow: Get details about a specific workflow definition
6
+ - list_workflows: Discover available workflow definitions
7
+ - activate_workflow: Start a step-based workflow (supports initial variables)
8
+ - end_workflow: Complete/terminate active workflow
9
+ - get_workflow_status: Get current workflow state
10
+ - request_step_transition: Request transition to a different step
11
+ - mark_artifact_complete: Register an artifact as complete
12
+ - set_variable: Set a workflow variable for the session
13
+ - get_variable: Get workflow variable(s) for the session
14
+ - import_workflow: Import a workflow from a file path
15
+ - reload_cache: Clear the workflow loader cache to pick up file changes
16
+ - close_terminal: Agent self-termination
17
+
18
+ These tools are registered with the InternalToolRegistry and accessed
19
+ via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
20
+ """
21
+
22
+ from typing import Any
23
+
24
+ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
25
+ from gobby.mcp_proxy.tools.workflows._artifacts import (
26
+ get_variable,
27
+ mark_artifact_complete,
28
+ set_variable,
29
+ )
30
+ from gobby.mcp_proxy.tools.workflows._import import import_workflow, reload_cache
31
+ from gobby.mcp_proxy.tools.workflows._lifecycle import (
32
+ activate_workflow,
33
+ end_workflow,
34
+ request_step_transition,
35
+ )
36
+ from gobby.mcp_proxy.tools.workflows._query import (
37
+ get_workflow,
38
+ get_workflow_status,
39
+ list_workflows,
40
+ )
41
+ from gobby.mcp_proxy.tools.workflows._terminal import close_terminal
42
+ from gobby.storage.database import DatabaseProtocol
43
+ from gobby.storage.sessions import LocalSessionManager
44
+ from gobby.utils.project_context import get_workflow_project_path
45
+ from gobby.workflows.loader import WorkflowLoader
46
+ from gobby.workflows.state_manager import WorkflowStateManager
47
+
48
+ __all__ = [
49
+ "create_workflows_registry",
50
+ "get_workflow_project_path",
51
+ ]
52
+
53
+
54
+ def create_workflows_registry(
55
+ loader: WorkflowLoader | None = None,
56
+ state_manager: WorkflowStateManager | None = None,
57
+ session_manager: LocalSessionManager | None = None,
58
+ db: DatabaseProtocol | None = None,
59
+ ) -> InternalToolRegistry:
60
+ """
61
+ Create a workflow tool registry with all workflow-related tools.
62
+
63
+ Args:
64
+ loader: WorkflowLoader instance
65
+ state_manager: WorkflowStateManager instance (created from db if not provided)
66
+ session_manager: LocalSessionManager instance (created from db if not provided)
67
+ db: Database instance for creating default managers
68
+
69
+ Returns:
70
+ InternalToolRegistry with workflow tools registered
71
+
72
+ Note:
73
+ If db is None and state_manager/session_manager are not provided,
74
+ tools requiring database access will return errors when called.
75
+ """
76
+ _db = db
77
+ _loader = loader or WorkflowLoader()
78
+
79
+ # Create default managers only if db is provided
80
+ if state_manager is not None:
81
+ _state_manager = state_manager
82
+ elif _db is not None:
83
+ _state_manager = WorkflowStateManager(_db)
84
+ else:
85
+ _state_manager = None
86
+
87
+ if session_manager is not None:
88
+ _session_manager = session_manager
89
+ elif _db is not None:
90
+ _session_manager = LocalSessionManager(_db)
91
+ else:
92
+ _session_manager = None
93
+
94
+ registry = InternalToolRegistry(
95
+ name="gobby-workflows",
96
+ description="Workflow management - list, activate, status, transition, end",
97
+ )
98
+
99
+ @registry.tool(
100
+ name="get_workflow",
101
+ description="Get details about a specific workflow definition.",
102
+ )
103
+ def _get_workflow(
104
+ name: str,
105
+ project_path: str | None = None,
106
+ ) -> dict[str, Any]:
107
+ return get_workflow(_loader, name, project_path)
108
+
109
+ @registry.tool(
110
+ name="list_workflows",
111
+ description="List available workflow definitions from project and global directories.",
112
+ )
113
+ def _list_workflows(
114
+ project_path: str | None = None,
115
+ workflow_type: str | None = None,
116
+ global_only: bool = False,
117
+ ) -> dict[str, Any]:
118
+ return list_workflows(_loader, project_path, workflow_type, global_only)
119
+
120
+ @registry.tool(
121
+ name="activate_workflow",
122
+ description="Activate a step-based workflow for the current session. Accepts #N, N, UUID, or prefix for session_id.",
123
+ )
124
+ def _activate_workflow(
125
+ name: str,
126
+ session_id: str | None = None,
127
+ initial_step: str | None = None,
128
+ variables: dict[str, Any] | None = None,
129
+ project_path: str | None = None,
130
+ ) -> dict[str, Any]:
131
+ if _state_manager is None or _session_manager is None or _db is None:
132
+ return {"success": False, "error": "Workflow tools require database connection"}
133
+ return activate_workflow(
134
+ _loader,
135
+ _state_manager,
136
+ _session_manager,
137
+ _db,
138
+ name,
139
+ session_id,
140
+ initial_step,
141
+ variables,
142
+ project_path,
143
+ )
144
+
145
+ @registry.tool(
146
+ name="end_workflow",
147
+ description="End the currently active step-based workflow. Accepts #N, N, UUID, or prefix for session_id.",
148
+ )
149
+ def _end_workflow(
150
+ session_id: str | None = None,
151
+ reason: str | None = None,
152
+ project_path: str | None = None,
153
+ ) -> dict[str, Any]:
154
+ if _state_manager is None or _session_manager is None:
155
+ return {"success": False, "error": "Workflow tools require database connection"}
156
+ return end_workflow(
157
+ _loader, _state_manager, _session_manager, session_id, reason, project_path
158
+ )
159
+
160
+ @registry.tool(
161
+ name="get_workflow_status",
162
+ description="Get current workflow step and state. Accepts #N, N, UUID, or prefix for session_id.",
163
+ )
164
+ def _get_workflow_status(session_id: str | None = None) -> dict[str, Any]:
165
+ if _state_manager is None or _session_manager is None:
166
+ return {"success": False, "error": "Workflow tools require database connection"}
167
+ return get_workflow_status(_state_manager, _session_manager, session_id)
168
+
169
+ @registry.tool(
170
+ name="request_step_transition",
171
+ description="Request transition to a different step. Accepts #N, N, UUID, or prefix for session_id.",
172
+ )
173
+ def _request_step_transition(
174
+ to_step: str,
175
+ reason: str | None = None,
176
+ session_id: str | None = None,
177
+ force: bool = False,
178
+ project_path: str | None = None,
179
+ ) -> dict[str, Any]:
180
+ if _state_manager is None or _session_manager is None:
181
+ return {"success": False, "error": "Workflow tools require database connection"}
182
+ return request_step_transition(
183
+ _loader,
184
+ _state_manager,
185
+ _session_manager,
186
+ to_step,
187
+ reason,
188
+ session_id,
189
+ force,
190
+ project_path,
191
+ )
192
+
193
+ @registry.tool(
194
+ name="mark_artifact_complete",
195
+ description="Register an artifact as complete (plan, spec, etc.). Accepts #N, N, UUID, or prefix for session_id.",
196
+ )
197
+ def _mark_artifact_complete(
198
+ artifact_type: str,
199
+ file_path: str,
200
+ session_id: str | None = None,
201
+ ) -> dict[str, Any]:
202
+ if _state_manager is None or _session_manager is None:
203
+ return {"success": False, "error": "Workflow tools require database connection"}
204
+ return mark_artifact_complete(
205
+ _state_manager, _session_manager, artifact_type, file_path, session_id
206
+ )
207
+
208
+ @registry.tool(
209
+ name="set_variable",
210
+ description="Set a workflow variable for the current session (session-scoped, not persisted to YAML). Accepts #N, N, UUID, or prefix for session_id.",
211
+ )
212
+ def _set_variable(
213
+ name: str,
214
+ value: str | int | float | bool | None,
215
+ session_id: str | None = None,
216
+ ) -> dict[str, Any]:
217
+ if _state_manager is None or _session_manager is None or _db is None:
218
+ return {"success": False, "error": "Workflow tools require database connection"}
219
+ return set_variable(_state_manager, _session_manager, _db, name, value, session_id)
220
+
221
+ @registry.tool(
222
+ name="get_variable",
223
+ description="Get workflow variable(s) for the current session. Accepts #N, N, UUID, or prefix for session_id.",
224
+ )
225
+ def _get_variable(
226
+ name: str | None = None,
227
+ session_id: str | None = None,
228
+ ) -> dict[str, Any]:
229
+ if _state_manager is None or _session_manager is None:
230
+ return {"success": False, "error": "Workflow tools require database connection"}
231
+ return get_variable(_state_manager, _session_manager, name, session_id)
232
+
233
+ @registry.tool(
234
+ name="import_workflow",
235
+ description="Import a workflow from a file path into the project or global directory.",
236
+ )
237
+ def _import_workflow(
238
+ source_path: str,
239
+ workflow_name: str | None = None,
240
+ is_global: bool = False,
241
+ project_path: str | None = None,
242
+ ) -> dict[str, Any]:
243
+ return import_workflow(_loader, source_path, workflow_name, is_global, project_path)
244
+
245
+ @registry.tool(
246
+ name="reload_cache",
247
+ description="Clear the workflow cache. Use this after modifying workflow YAML files.",
248
+ )
249
+ def _reload_cache() -> dict[str, Any]:
250
+ return reload_cache(_loader)
251
+
252
+ @registry.tool(
253
+ name="close_terminal",
254
+ description=(
255
+ "Close the current terminal window/pane (agent self-termination). "
256
+ "Launches ~/.gobby/scripts/agent_shutdown.sh which handles "
257
+ "terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing."
258
+ ),
259
+ )
260
+ async def _close_terminal(
261
+ signal: str = "TERM",
262
+ delay_ms: int = 0,
263
+ ) -> dict[str, Any]:
264
+ return await close_terminal(signal, delay_ms)
265
+
266
+ return registry
@@ -0,0 +1,225 @@
1
+ """
2
+ Artifact and variable tools for workflows.
3
+ """
4
+
5
+ import logging
6
+ from datetime import UTC, datetime
7
+ from typing import Any
8
+
9
+ from gobby.mcp_proxy.tools.workflows._resolution import (
10
+ resolve_session_id,
11
+ resolve_session_task_value,
12
+ )
13
+ from gobby.storage.database import DatabaseProtocol
14
+ from gobby.storage.sessions import LocalSessionManager
15
+ from gobby.workflows.definitions import WorkflowState
16
+ from gobby.workflows.state_manager import WorkflowStateManager
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def mark_artifact_complete(
22
+ state_manager: WorkflowStateManager,
23
+ session_manager: LocalSessionManager,
24
+ artifact_type: str,
25
+ file_path: str,
26
+ session_id: str | None = None,
27
+ ) -> dict[str, Any]:
28
+ """
29
+ Register an artifact as complete.
30
+
31
+ Args:
32
+ state_manager: WorkflowStateManager instance
33
+ session_manager: LocalSessionManager instance
34
+ artifact_type: Type of artifact (e.g., "plan", "spec", "test")
35
+ file_path: Path to the artifact file
36
+ session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
37
+
38
+ Returns:
39
+ Success status
40
+ """
41
+ # Require explicit session_id to prevent cross-session bleed
42
+ if not session_id:
43
+ return {
44
+ "success": False,
45
+ "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
46
+ }
47
+
48
+ # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
49
+ try:
50
+ resolved_session_id = resolve_session_id(session_manager, session_id)
51
+ except ValueError as e:
52
+ return {"success": False, "error": str(e)}
53
+
54
+ state = state_manager.get_state(resolved_session_id)
55
+ if not state:
56
+ return {"success": False, "error": "No workflow active for session"}
57
+
58
+ # Update artifacts
59
+ state.artifacts[artifact_type] = file_path
60
+ state_manager.save_state(state)
61
+
62
+ return {"success": True}
63
+
64
+
65
+ def set_variable(
66
+ state_manager: WorkflowStateManager,
67
+ session_manager: LocalSessionManager,
68
+ db: DatabaseProtocol,
69
+ name: str,
70
+ value: str | int | float | bool | None,
71
+ session_id: str | None = None,
72
+ ) -> dict[str, Any]:
73
+ """
74
+ Set a workflow variable for the current session.
75
+
76
+ Variables set this way are session-scoped - they persist in the database
77
+ for the duration of the session but do not modify the workflow YAML file.
78
+
79
+ This is useful for:
80
+ - Setting session_epic to enforce epic completion before stopping
81
+ - Setting is_worktree to mark a session as a worktree agent
82
+ - Dynamic configuration without modifying workflow definitions
83
+
84
+ Args:
85
+ state_manager: WorkflowStateManager instance
86
+ session_manager: LocalSessionManager instance
87
+ db: LocalDatabase instance
88
+ name: Variable name (e.g., "session_epic", "is_worktree")
89
+ value: Variable value (string, number, boolean, or null)
90
+ session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
91
+
92
+ Returns:
93
+ Success status and updated variables
94
+ """
95
+ # Require explicit session_id to prevent cross-session bleed
96
+ if not session_id:
97
+ return {
98
+ "success": False,
99
+ "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
100
+ }
101
+
102
+ # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
103
+ try:
104
+ resolved_session_id = resolve_session_id(session_manager, session_id)
105
+ except ValueError as e:
106
+ return {"success": False, "error": str(e)}
107
+
108
+ # Get or create state
109
+ state = state_manager.get_state(resolved_session_id)
110
+ if not state:
111
+ # Create a minimal lifecycle state for variable storage
112
+ state = WorkflowState(
113
+ session_id=resolved_session_id,
114
+ workflow_name="__lifecycle__",
115
+ step="",
116
+ step_entered_at=datetime.now(UTC),
117
+ variables={},
118
+ )
119
+
120
+ # Block modification of session_task when a real workflow is active
121
+ # This prevents circumventing workflows by changing the tracked task
122
+ if name == "session_task" and state.workflow_name != "__lifecycle__":
123
+ current_value = state.variables.get("session_task")
124
+ if current_value is not None and value != current_value:
125
+ return {
126
+ "success": False,
127
+ "error": (
128
+ f"Cannot modify session_task while workflow '{state.workflow_name}' is active. "
129
+ f"Current value: {current_value}. "
130
+ f"Use end_workflow() first if you need to change the tracked task."
131
+ ),
132
+ }
133
+
134
+ # Resolve session_task references (#N or N) to UUIDs upfront
135
+ # This prevents repeated resolution failures in condition evaluation
136
+ if name == "session_task" and isinstance(value, str):
137
+ try:
138
+ value = resolve_session_task_value(value, resolved_session_id, session_manager, db)
139
+ except (ValueError, KeyError) as e:
140
+ logger.warning(
141
+ f"Failed to resolve session_task value '{value}' for session {resolved_session_id}: {e}"
142
+ )
143
+ return {
144
+ "success": False,
145
+ "error": f"Failed to resolve session_task value '{value}': {e}",
146
+ }
147
+
148
+ # Set the variable
149
+ state.variables[name] = value
150
+ state_manager.save_state(state)
151
+
152
+ # Add deprecation warning for session_task on __lifecycle__ workflow
153
+ if name == "session_task" and state.workflow_name == "__lifecycle__":
154
+ return {
155
+ "warning": (
156
+ "DEPRECATED: Setting session_task via set_variable on __lifecycle__ workflow. "
157
+ "Prefer using activate_workflow(variables={session_task: ...}) instead."
158
+ )
159
+ }
160
+
161
+ return {}
162
+
163
+
164
+ def get_variable(
165
+ state_manager: WorkflowStateManager,
166
+ session_manager: LocalSessionManager,
167
+ name: str | None = None,
168
+ session_id: str | None = None,
169
+ ) -> dict[str, Any]:
170
+ """
171
+ Get workflow variable(s) for the current session.
172
+
173
+ Args:
174
+ state_manager: WorkflowStateManager instance
175
+ session_manager: LocalSessionManager instance
176
+ name: Variable name to get (if None, returns all variables)
177
+ session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
178
+
179
+ Returns:
180
+ Variable value(s) and session info
181
+ """
182
+ # Require explicit session_id to prevent cross-session bleed
183
+ if not session_id:
184
+ return {
185
+ "success": False,
186
+ "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
187
+ }
188
+
189
+ # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
190
+ try:
191
+ resolved_session_id = resolve_session_id(session_manager, session_id)
192
+ except ValueError as e:
193
+ return {"success": False, "error": str(e)}
194
+
195
+ state = state_manager.get_state(resolved_session_id)
196
+ if not state:
197
+ if name:
198
+ return {
199
+ "success": True,
200
+ "session_id": resolved_session_id,
201
+ "variable": name,
202
+ "value": None,
203
+ "exists": False,
204
+ }
205
+ return {
206
+ "success": True,
207
+ "session_id": resolved_session_id,
208
+ "variables": {},
209
+ }
210
+
211
+ if name:
212
+ value = state.variables.get(name)
213
+ return {
214
+ "success": True,
215
+ "session_id": resolved_session_id,
216
+ "variable": name,
217
+ "value": value,
218
+ "exists": name in state.variables,
219
+ }
220
+
221
+ return {
222
+ "success": True,
223
+ "session_id": resolved_session_id,
224
+ "variables": state.variables,
225
+ }