gobby 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/claude_code.py +99 -61
- gobby/adapters/gemini.py +140 -38
- gobby/agents/isolation.py +130 -0
- gobby/agents/registry.py +11 -0
- gobby/agents/session.py +1 -0
- gobby/agents/spawn_executor.py +43 -13
- gobby/agents/spawners/macos.py +26 -1
- gobby/app_context.py +59 -0
- gobby/cli/__init__.py +0 -2
- gobby/cli/memory.py +185 -0
- gobby/cli/utils.py +5 -17
- gobby/clones/git.py +177 -0
- gobby/config/features.py +0 -20
- gobby/config/skills.py +31 -0
- gobby/config/tasks.py +4 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +87 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +573 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/hook_manager.py +21 -1
- gobby/install/gemini/hooks/hook_dispatcher.py +74 -15
- gobby/llm/claude.py +377 -42
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/instructions.py +2 -2
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/registries.py +35 -4
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/tools/agent_messaging.py +93 -44
- gobby/mcp_proxy/tools/agents.py +45 -9
- gobby/mcp_proxy/tools/artifacts.py +46 -12
- gobby/mcp_proxy/tools/sessions/_commits.py +31 -24
- gobby/mcp_proxy/tools/sessions/_crud.py +5 -5
- gobby/mcp_proxy/tools/sessions/_handoff.py +45 -41
- gobby/mcp_proxy/tools/sessions/_messages.py +35 -7
- gobby/mcp_proxy/tools/spawn_agent.py +44 -6
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/tasks/_context.py +18 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +13 -6
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +29 -14
- gobby/mcp_proxy/tools/tasks/_session.py +22 -7
- gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
- gobby/mcp_proxy/tools/workflows/_query.py +207 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
- gobby/mcp_proxy/tools/worktrees.py +32 -7
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/extractor.py +15 -1
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +36 -10
- gobby/servers/http.py +186 -149
- gobby/servers/routes/admin.py +12 -0
- gobby/servers/routes/mcp/endpoints/execution.py +15 -7
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/servers/routes/mcp/hooks.py +50 -3
- gobby/servers/websocket.py +57 -1
- gobby/sessions/analyzer.py +4 -4
- gobby/sessions/manager.py +9 -0
- gobby/sessions/transcripts/gemini.py +100 -34
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/database.py +9 -2
- gobby/storage/memories.py +32 -21
- gobby/storage/migrations.py +46 -4
- gobby/storage/sessions.py +4 -2
- gobby/storage/skills.py +87 -7
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +5 -0
- gobby/workflows/context_actions.py +21 -24
- gobby/workflows/detection_helpers.py +38 -24
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +109 -1
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/engine.py +96 -0
- gobby/workflows/evaluator.py +110 -0
- gobby/workflows/hooks.py +41 -0
- gobby/workflows/lifecycle_evaluator.py +2 -1
- gobby/workflows/memory_actions.py +11 -0
- gobby/workflows/safe_evaluator.py +8 -0
- gobby/workflows/summary_actions.py +123 -50
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/RECORD +99 -107
- gobby/cli/tui.py +0 -34
- gobby/hooks/event_handlers.py +0 -909
- gobby/mcp_proxy/tools/workflows.py +0 -973
- gobby/tui/__init__.py +0 -5
- gobby/tui/api_client.py +0 -278
- gobby/tui/app.py +0 -329
- gobby/tui/screens/__init__.py +0 -25
- gobby/tui/screens/agents.py +0 -333
- gobby/tui/screens/chat.py +0 -450
- gobby/tui/screens/dashboard.py +0 -377
- gobby/tui/screens/memory.py +0 -305
- gobby/tui/screens/metrics.py +0 -231
- gobby/tui/screens/orchestrator.py +0 -903
- gobby/tui/screens/sessions.py +0 -412
- gobby/tui/screens/tasks.py +0 -440
- gobby/tui/screens/workflows.py +0 -289
- gobby/tui/screens/worktrees.py +0 -174
- gobby/tui/widgets/__init__.py +0 -21
- gobby/tui/widgets/chat.py +0 -210
- gobby/tui/widgets/conductor.py +0 -104
- gobby/tui/widgets/menu.py +0 -132
- gobby/tui/widgets/message_panel.py +0 -160
- gobby/tui/widgets/review_gate.py +0 -224
- gobby/tui/widgets/task_tree.py +0 -99
- gobby/tui/widgets/token_budget.py +0 -166
- gobby/tui/ws_client.py +0 -258
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Internal MCP tools for Gobby Workflow System.
|
|
3
|
+
|
|
4
|
+
Exposes functionality for:
|
|
5
|
+
- get_workflow: Get details about a specific workflow definition
|
|
6
|
+
- list_workflows: Discover available workflow definitions
|
|
7
|
+
- activate_workflow: Start a step-based workflow (supports initial variables)
|
|
8
|
+
- end_workflow: Complete/terminate active workflow
|
|
9
|
+
- get_workflow_status: Get current workflow state
|
|
10
|
+
- request_step_transition: Request transition to a different step
|
|
11
|
+
- mark_artifact_complete: Register an artifact as complete
|
|
12
|
+
- set_variable: Set a workflow variable for the session
|
|
13
|
+
- get_variable: Get workflow variable(s) for the session
|
|
14
|
+
- import_workflow: Import a workflow from a file path
|
|
15
|
+
- reload_cache: Clear the workflow loader cache to pick up file changes
|
|
16
|
+
- close_terminal: Agent self-termination
|
|
17
|
+
|
|
18
|
+
These tools are registered with the InternalToolRegistry and accessed
|
|
19
|
+
via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
25
|
+
from gobby.mcp_proxy.tools.workflows._artifacts import (
|
|
26
|
+
get_variable,
|
|
27
|
+
mark_artifact_complete,
|
|
28
|
+
set_variable,
|
|
29
|
+
)
|
|
30
|
+
from gobby.mcp_proxy.tools.workflows._import import import_workflow, reload_cache
|
|
31
|
+
from gobby.mcp_proxy.tools.workflows._lifecycle import (
|
|
32
|
+
activate_workflow,
|
|
33
|
+
end_workflow,
|
|
34
|
+
request_step_transition,
|
|
35
|
+
)
|
|
36
|
+
from gobby.mcp_proxy.tools.workflows._query import (
|
|
37
|
+
get_workflow,
|
|
38
|
+
get_workflow_status,
|
|
39
|
+
list_workflows,
|
|
40
|
+
)
|
|
41
|
+
from gobby.mcp_proxy.tools.workflows._terminal import close_terminal
|
|
42
|
+
from gobby.storage.database import DatabaseProtocol
|
|
43
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
44
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
45
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
46
|
+
from gobby.workflows.state_manager import WorkflowStateManager
|
|
47
|
+
|
|
48
|
+
__all__ = [
|
|
49
|
+
"create_workflows_registry",
|
|
50
|
+
"get_workflow_project_path",
|
|
51
|
+
]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def create_workflows_registry(
|
|
55
|
+
loader: WorkflowLoader | None = None,
|
|
56
|
+
state_manager: WorkflowStateManager | None = None,
|
|
57
|
+
session_manager: LocalSessionManager | None = None,
|
|
58
|
+
db: DatabaseProtocol | None = None,
|
|
59
|
+
) -> InternalToolRegistry:
|
|
60
|
+
"""
|
|
61
|
+
Create a workflow tool registry with all workflow-related tools.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
loader: WorkflowLoader instance
|
|
65
|
+
state_manager: WorkflowStateManager instance (created from db if not provided)
|
|
66
|
+
session_manager: LocalSessionManager instance (created from db if not provided)
|
|
67
|
+
db: Database instance for creating default managers
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
InternalToolRegistry with workflow tools registered
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
If db is None and state_manager/session_manager are not provided,
|
|
74
|
+
tools requiring database access will return errors when called.
|
|
75
|
+
"""
|
|
76
|
+
_db = db
|
|
77
|
+
_loader = loader or WorkflowLoader()
|
|
78
|
+
|
|
79
|
+
# Create default managers only if db is provided
|
|
80
|
+
if state_manager is not None:
|
|
81
|
+
_state_manager = state_manager
|
|
82
|
+
elif _db is not None:
|
|
83
|
+
_state_manager = WorkflowStateManager(_db)
|
|
84
|
+
else:
|
|
85
|
+
_state_manager = None
|
|
86
|
+
|
|
87
|
+
if session_manager is not None:
|
|
88
|
+
_session_manager = session_manager
|
|
89
|
+
elif _db is not None:
|
|
90
|
+
_session_manager = LocalSessionManager(_db)
|
|
91
|
+
else:
|
|
92
|
+
_session_manager = None
|
|
93
|
+
|
|
94
|
+
registry = InternalToolRegistry(
|
|
95
|
+
name="gobby-workflows",
|
|
96
|
+
description="Workflow management - list, activate, status, transition, end",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
@registry.tool(
|
|
100
|
+
name="get_workflow",
|
|
101
|
+
description="Get details about a specific workflow definition.",
|
|
102
|
+
)
|
|
103
|
+
def _get_workflow(
|
|
104
|
+
name: str,
|
|
105
|
+
project_path: str | None = None,
|
|
106
|
+
) -> dict[str, Any]:
|
|
107
|
+
return get_workflow(_loader, name, project_path)
|
|
108
|
+
|
|
109
|
+
@registry.tool(
|
|
110
|
+
name="list_workflows",
|
|
111
|
+
description="List available workflow definitions from project and global directories.",
|
|
112
|
+
)
|
|
113
|
+
def _list_workflows(
|
|
114
|
+
project_path: str | None = None,
|
|
115
|
+
workflow_type: str | None = None,
|
|
116
|
+
global_only: bool = False,
|
|
117
|
+
) -> dict[str, Any]:
|
|
118
|
+
return list_workflows(_loader, project_path, workflow_type, global_only)
|
|
119
|
+
|
|
120
|
+
@registry.tool(
|
|
121
|
+
name="activate_workflow",
|
|
122
|
+
description="Activate a step-based workflow for the current session. Accepts #N, N, UUID, or prefix for session_id.",
|
|
123
|
+
)
|
|
124
|
+
def _activate_workflow(
|
|
125
|
+
name: str,
|
|
126
|
+
session_id: str | None = None,
|
|
127
|
+
initial_step: str | None = None,
|
|
128
|
+
variables: dict[str, Any] | None = None,
|
|
129
|
+
project_path: str | None = None,
|
|
130
|
+
) -> dict[str, Any]:
|
|
131
|
+
if _state_manager is None or _session_manager is None or _db is None:
|
|
132
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
133
|
+
return activate_workflow(
|
|
134
|
+
_loader,
|
|
135
|
+
_state_manager,
|
|
136
|
+
_session_manager,
|
|
137
|
+
_db,
|
|
138
|
+
name,
|
|
139
|
+
session_id,
|
|
140
|
+
initial_step,
|
|
141
|
+
variables,
|
|
142
|
+
project_path,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
@registry.tool(
|
|
146
|
+
name="end_workflow",
|
|
147
|
+
description="End the currently active step-based workflow. Accepts #N, N, UUID, or prefix for session_id.",
|
|
148
|
+
)
|
|
149
|
+
def _end_workflow(
|
|
150
|
+
session_id: str | None = None,
|
|
151
|
+
reason: str | None = None,
|
|
152
|
+
project_path: str | None = None,
|
|
153
|
+
) -> dict[str, Any]:
|
|
154
|
+
if _state_manager is None or _session_manager is None:
|
|
155
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
156
|
+
return end_workflow(
|
|
157
|
+
_loader, _state_manager, _session_manager, session_id, reason, project_path
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
@registry.tool(
|
|
161
|
+
name="get_workflow_status",
|
|
162
|
+
description="Get current workflow step and state. Accepts #N, N, UUID, or prefix for session_id.",
|
|
163
|
+
)
|
|
164
|
+
def _get_workflow_status(session_id: str | None = None) -> dict[str, Any]:
|
|
165
|
+
if _state_manager is None or _session_manager is None:
|
|
166
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
167
|
+
return get_workflow_status(_state_manager, _session_manager, session_id)
|
|
168
|
+
|
|
169
|
+
@registry.tool(
|
|
170
|
+
name="request_step_transition",
|
|
171
|
+
description="Request transition to a different step. Accepts #N, N, UUID, or prefix for session_id.",
|
|
172
|
+
)
|
|
173
|
+
def _request_step_transition(
|
|
174
|
+
to_step: str,
|
|
175
|
+
reason: str | None = None,
|
|
176
|
+
session_id: str | None = None,
|
|
177
|
+
force: bool = False,
|
|
178
|
+
project_path: str | None = None,
|
|
179
|
+
) -> dict[str, Any]:
|
|
180
|
+
if _state_manager is None or _session_manager is None:
|
|
181
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
182
|
+
return request_step_transition(
|
|
183
|
+
_loader,
|
|
184
|
+
_state_manager,
|
|
185
|
+
_session_manager,
|
|
186
|
+
to_step,
|
|
187
|
+
reason,
|
|
188
|
+
session_id,
|
|
189
|
+
force,
|
|
190
|
+
project_path,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
@registry.tool(
|
|
194
|
+
name="mark_artifact_complete",
|
|
195
|
+
description="Register an artifact as complete (plan, spec, etc.). Accepts #N, N, UUID, or prefix for session_id.",
|
|
196
|
+
)
|
|
197
|
+
def _mark_artifact_complete(
|
|
198
|
+
artifact_type: str,
|
|
199
|
+
file_path: str,
|
|
200
|
+
session_id: str | None = None,
|
|
201
|
+
) -> dict[str, Any]:
|
|
202
|
+
if _state_manager is None or _session_manager is None:
|
|
203
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
204
|
+
return mark_artifact_complete(
|
|
205
|
+
_state_manager, _session_manager, artifact_type, file_path, session_id
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
@registry.tool(
|
|
209
|
+
name="set_variable",
|
|
210
|
+
description="Set a workflow variable for the current session (session-scoped, not persisted to YAML). Accepts #N, N, UUID, or prefix for session_id.",
|
|
211
|
+
)
|
|
212
|
+
def _set_variable(
|
|
213
|
+
name: str,
|
|
214
|
+
value: str | int | float | bool | None,
|
|
215
|
+
session_id: str | None = None,
|
|
216
|
+
) -> dict[str, Any]:
|
|
217
|
+
if _state_manager is None or _session_manager is None or _db is None:
|
|
218
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
219
|
+
return set_variable(_state_manager, _session_manager, _db, name, value, session_id)
|
|
220
|
+
|
|
221
|
+
@registry.tool(
|
|
222
|
+
name="get_variable",
|
|
223
|
+
description="Get workflow variable(s) for the current session. Accepts #N, N, UUID, or prefix for session_id.",
|
|
224
|
+
)
|
|
225
|
+
def _get_variable(
|
|
226
|
+
name: str | None = None,
|
|
227
|
+
session_id: str | None = None,
|
|
228
|
+
) -> dict[str, Any]:
|
|
229
|
+
if _state_manager is None or _session_manager is None:
|
|
230
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
231
|
+
return get_variable(_state_manager, _session_manager, name, session_id)
|
|
232
|
+
|
|
233
|
+
@registry.tool(
|
|
234
|
+
name="import_workflow",
|
|
235
|
+
description="Import a workflow from a file path into the project or global directory.",
|
|
236
|
+
)
|
|
237
|
+
def _import_workflow(
|
|
238
|
+
source_path: str,
|
|
239
|
+
workflow_name: str | None = None,
|
|
240
|
+
is_global: bool = False,
|
|
241
|
+
project_path: str | None = None,
|
|
242
|
+
) -> dict[str, Any]:
|
|
243
|
+
return import_workflow(_loader, source_path, workflow_name, is_global, project_path)
|
|
244
|
+
|
|
245
|
+
@registry.tool(
|
|
246
|
+
name="reload_cache",
|
|
247
|
+
description="Clear the workflow cache. Use this after modifying workflow YAML files.",
|
|
248
|
+
)
|
|
249
|
+
def _reload_cache() -> dict[str, Any]:
|
|
250
|
+
return reload_cache(_loader)
|
|
251
|
+
|
|
252
|
+
@registry.tool(
|
|
253
|
+
name="close_terminal",
|
|
254
|
+
description=(
|
|
255
|
+
"Close the current terminal window/pane (agent self-termination). "
|
|
256
|
+
"Launches ~/.gobby/scripts/agent_shutdown.sh which handles "
|
|
257
|
+
"terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing."
|
|
258
|
+
),
|
|
259
|
+
)
|
|
260
|
+
async def _close_terminal(
|
|
261
|
+
signal: str = "TERM",
|
|
262
|
+
delay_ms: int = 0,
|
|
263
|
+
) -> dict[str, Any]:
|
|
264
|
+
return await close_terminal(signal, delay_ms)
|
|
265
|
+
|
|
266
|
+
return registry
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Artifact and variable tools for workflows.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from datetime import UTC, datetime
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from gobby.mcp_proxy.tools.workflows._resolution import (
|
|
10
|
+
resolve_session_id,
|
|
11
|
+
resolve_session_task_value,
|
|
12
|
+
)
|
|
13
|
+
from gobby.storage.database import DatabaseProtocol
|
|
14
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
15
|
+
from gobby.workflows.definitions import WorkflowState
|
|
16
|
+
from gobby.workflows.state_manager import WorkflowStateManager
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def mark_artifact_complete(
|
|
22
|
+
state_manager: WorkflowStateManager,
|
|
23
|
+
session_manager: LocalSessionManager,
|
|
24
|
+
artifact_type: str,
|
|
25
|
+
file_path: str,
|
|
26
|
+
session_id: str | None = None,
|
|
27
|
+
) -> dict[str, Any]:
|
|
28
|
+
"""
|
|
29
|
+
Register an artifact as complete.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
state_manager: WorkflowStateManager instance
|
|
33
|
+
session_manager: LocalSessionManager instance
|
|
34
|
+
artifact_type: Type of artifact (e.g., "plan", "spec", "test")
|
|
35
|
+
file_path: Path to the artifact file
|
|
36
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
Success status
|
|
40
|
+
"""
|
|
41
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
42
|
+
if not session_id:
|
|
43
|
+
return {
|
|
44
|
+
"success": False,
|
|
45
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
49
|
+
try:
|
|
50
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
51
|
+
except ValueError as e:
|
|
52
|
+
return {"success": False, "error": str(e)}
|
|
53
|
+
|
|
54
|
+
state = state_manager.get_state(resolved_session_id)
|
|
55
|
+
if not state:
|
|
56
|
+
return {"success": False, "error": "No workflow active for session"}
|
|
57
|
+
|
|
58
|
+
# Update artifacts
|
|
59
|
+
state.artifacts[artifact_type] = file_path
|
|
60
|
+
state_manager.save_state(state)
|
|
61
|
+
|
|
62
|
+
return {"success": True}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def set_variable(
|
|
66
|
+
state_manager: WorkflowStateManager,
|
|
67
|
+
session_manager: LocalSessionManager,
|
|
68
|
+
db: DatabaseProtocol,
|
|
69
|
+
name: str,
|
|
70
|
+
value: str | int | float | bool | None,
|
|
71
|
+
session_id: str | None = None,
|
|
72
|
+
) -> dict[str, Any]:
|
|
73
|
+
"""
|
|
74
|
+
Set a workflow variable for the current session.
|
|
75
|
+
|
|
76
|
+
Variables set this way are session-scoped - they persist in the database
|
|
77
|
+
for the duration of the session but do not modify the workflow YAML file.
|
|
78
|
+
|
|
79
|
+
This is useful for:
|
|
80
|
+
- Setting session_epic to enforce epic completion before stopping
|
|
81
|
+
- Setting is_worktree to mark a session as a worktree agent
|
|
82
|
+
- Dynamic configuration without modifying workflow definitions
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
state_manager: WorkflowStateManager instance
|
|
86
|
+
session_manager: LocalSessionManager instance
|
|
87
|
+
db: LocalDatabase instance
|
|
88
|
+
name: Variable name (e.g., "session_epic", "is_worktree")
|
|
89
|
+
value: Variable value (string, number, boolean, or null)
|
|
90
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Success status and updated variables
|
|
94
|
+
"""
|
|
95
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
96
|
+
if not session_id:
|
|
97
|
+
return {
|
|
98
|
+
"success": False,
|
|
99
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
103
|
+
try:
|
|
104
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
105
|
+
except ValueError as e:
|
|
106
|
+
return {"success": False, "error": str(e)}
|
|
107
|
+
|
|
108
|
+
# Get or create state
|
|
109
|
+
state = state_manager.get_state(resolved_session_id)
|
|
110
|
+
if not state:
|
|
111
|
+
# Create a minimal lifecycle state for variable storage
|
|
112
|
+
state = WorkflowState(
|
|
113
|
+
session_id=resolved_session_id,
|
|
114
|
+
workflow_name="__lifecycle__",
|
|
115
|
+
step="",
|
|
116
|
+
step_entered_at=datetime.now(UTC),
|
|
117
|
+
variables={},
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Block modification of session_task when a real workflow is active
|
|
121
|
+
# This prevents circumventing workflows by changing the tracked task
|
|
122
|
+
if name == "session_task" and state.workflow_name != "__lifecycle__":
|
|
123
|
+
current_value = state.variables.get("session_task")
|
|
124
|
+
if current_value is not None and value != current_value:
|
|
125
|
+
return {
|
|
126
|
+
"success": False,
|
|
127
|
+
"error": (
|
|
128
|
+
f"Cannot modify session_task while workflow '{state.workflow_name}' is active. "
|
|
129
|
+
f"Current value: {current_value}. "
|
|
130
|
+
f"Use end_workflow() first if you need to change the tracked task."
|
|
131
|
+
),
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
# Resolve session_task references (#N or N) to UUIDs upfront
|
|
135
|
+
# This prevents repeated resolution failures in condition evaluation
|
|
136
|
+
if name == "session_task" and isinstance(value, str):
|
|
137
|
+
try:
|
|
138
|
+
value = resolve_session_task_value(value, resolved_session_id, session_manager, db)
|
|
139
|
+
except (ValueError, KeyError) as e:
|
|
140
|
+
logger.warning(
|
|
141
|
+
f"Failed to resolve session_task value '{value}' for session {resolved_session_id}: {e}"
|
|
142
|
+
)
|
|
143
|
+
return {
|
|
144
|
+
"success": False,
|
|
145
|
+
"error": f"Failed to resolve session_task value '{value}': {e}",
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
# Set the variable
|
|
149
|
+
state.variables[name] = value
|
|
150
|
+
state_manager.save_state(state)
|
|
151
|
+
|
|
152
|
+
# Add deprecation warning for session_task on __lifecycle__ workflow
|
|
153
|
+
if name == "session_task" and state.workflow_name == "__lifecycle__":
|
|
154
|
+
return {
|
|
155
|
+
"warning": (
|
|
156
|
+
"DEPRECATED: Setting session_task via set_variable on __lifecycle__ workflow. "
|
|
157
|
+
"Prefer using activate_workflow(variables={session_task: ...}) instead."
|
|
158
|
+
)
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return {}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def get_variable(
|
|
165
|
+
state_manager: WorkflowStateManager,
|
|
166
|
+
session_manager: LocalSessionManager,
|
|
167
|
+
name: str | None = None,
|
|
168
|
+
session_id: str | None = None,
|
|
169
|
+
) -> dict[str, Any]:
|
|
170
|
+
"""
|
|
171
|
+
Get workflow variable(s) for the current session.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
state_manager: WorkflowStateManager instance
|
|
175
|
+
session_manager: LocalSessionManager instance
|
|
176
|
+
name: Variable name to get (if None, returns all variables)
|
|
177
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
Variable value(s) and session info
|
|
181
|
+
"""
|
|
182
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
183
|
+
if not session_id:
|
|
184
|
+
return {
|
|
185
|
+
"success": False,
|
|
186
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
190
|
+
try:
|
|
191
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
192
|
+
except ValueError as e:
|
|
193
|
+
return {"success": False, "error": str(e)}
|
|
194
|
+
|
|
195
|
+
state = state_manager.get_state(resolved_session_id)
|
|
196
|
+
if not state:
|
|
197
|
+
if name:
|
|
198
|
+
return {
|
|
199
|
+
"success": True,
|
|
200
|
+
"session_id": resolved_session_id,
|
|
201
|
+
"variable": name,
|
|
202
|
+
"value": None,
|
|
203
|
+
"exists": False,
|
|
204
|
+
}
|
|
205
|
+
return {
|
|
206
|
+
"success": True,
|
|
207
|
+
"session_id": resolved_session_id,
|
|
208
|
+
"variables": {},
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if name:
|
|
212
|
+
value = state.variables.get(name)
|
|
213
|
+
return {
|
|
214
|
+
"success": True,
|
|
215
|
+
"session_id": resolved_session_id,
|
|
216
|
+
"variable": name,
|
|
217
|
+
"value": value,
|
|
218
|
+
"exists": name in state.variables,
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return {
|
|
222
|
+
"success": True,
|
|
223
|
+
"session_id": resolved_session_id,
|
|
224
|
+
"variables": state.variables,
|
|
225
|
+
}
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Import and cache tools for workflows.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import re
|
|
7
|
+
import shutil
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
14
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def import_workflow(
|
|
20
|
+
loader: WorkflowLoader,
|
|
21
|
+
source_path: str,
|
|
22
|
+
workflow_name: str | None = None,
|
|
23
|
+
is_global: bool = False,
|
|
24
|
+
project_path: str | None = None,
|
|
25
|
+
) -> dict[str, Any]:
|
|
26
|
+
"""
|
|
27
|
+
Import a workflow from a file.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
loader: WorkflowLoader instance
|
|
31
|
+
source_path: Path to the workflow YAML file
|
|
32
|
+
workflow_name: Override the workflow name (defaults to name in file)
|
|
33
|
+
is_global: Install to global ~/.gobby/workflows instead of project
|
|
34
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Success status and destination path
|
|
38
|
+
"""
|
|
39
|
+
source = Path(source_path)
|
|
40
|
+
if not source.exists():
|
|
41
|
+
return {"success": False, "error": f"File not found: {source_path}"}
|
|
42
|
+
|
|
43
|
+
if source.suffix != ".yaml":
|
|
44
|
+
return {"success": False, "error": "Workflow file must have .yaml extension"}
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
with open(source, encoding="utf-8") as f:
|
|
48
|
+
data = yaml.safe_load(f)
|
|
49
|
+
|
|
50
|
+
if not data or "name" not in data:
|
|
51
|
+
return {"success": False, "error": "Invalid workflow: missing 'name' field"}
|
|
52
|
+
|
|
53
|
+
except yaml.YAMLError as e:
|
|
54
|
+
return {"success": False, "error": f"Invalid YAML: {e}"}
|
|
55
|
+
|
|
56
|
+
raw_name = workflow_name or data.get("name", source.stem)
|
|
57
|
+
# Sanitize name to prevent path traversal: strip path components, allow only safe chars
|
|
58
|
+
safe_name = Path(raw_name).name # Strip any path components
|
|
59
|
+
safe_name = re.sub(r"[^a-zA-Z0-9_\-.]", "_", safe_name) # Replace unsafe chars
|
|
60
|
+
safe_name = safe_name.strip("._") # Remove leading/trailing dots and underscores
|
|
61
|
+
if not safe_name:
|
|
62
|
+
safe_name = source.stem # Fallback to source filename
|
|
63
|
+
filename = f"{safe_name}.yaml"
|
|
64
|
+
|
|
65
|
+
if is_global:
|
|
66
|
+
dest_dir = Path.home() / ".gobby" / "workflows"
|
|
67
|
+
else:
|
|
68
|
+
# Auto-discover project path if not provided
|
|
69
|
+
if not project_path:
|
|
70
|
+
discovered = get_workflow_project_path()
|
|
71
|
+
if discovered:
|
|
72
|
+
project_path = str(discovered)
|
|
73
|
+
|
|
74
|
+
proj = Path(project_path) if project_path else None
|
|
75
|
+
if not proj:
|
|
76
|
+
return {
|
|
77
|
+
"success": False,
|
|
78
|
+
"error": "project_path required when not using is_global (could not auto-discover)",
|
|
79
|
+
}
|
|
80
|
+
dest_dir = proj / ".gobby" / "workflows"
|
|
81
|
+
|
|
82
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
83
|
+
dest_path = dest_dir / filename
|
|
84
|
+
|
|
85
|
+
shutil.copy(source, dest_path)
|
|
86
|
+
|
|
87
|
+
# Clear loader cache so new workflow is discoverable
|
|
88
|
+
loader.clear_cache()
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
"success": True,
|
|
92
|
+
"workflow_name": safe_name,
|
|
93
|
+
"destination": str(dest_path),
|
|
94
|
+
"is_global": is_global,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def reload_cache(loader: WorkflowLoader) -> dict[str, Any]:
|
|
99
|
+
"""
|
|
100
|
+
Clear the workflow loader cache.
|
|
101
|
+
|
|
102
|
+
This forces the daemon to re-read workflow YAML files from disk
|
|
103
|
+
on the next access. Use this when you've modified workflow files
|
|
104
|
+
and want the changes to take effect immediately without restarting
|
|
105
|
+
the daemon.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
Success status
|
|
109
|
+
"""
|
|
110
|
+
loader.clear_cache()
|
|
111
|
+
logger.info("Workflow cache cleared via reload_cache tool")
|
|
112
|
+
return {"message": "Workflow cache cleared"}
|