gobby 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/claude_code.py +3 -26
- gobby/app_context.py +59 -0
- gobby/cli/utils.py +5 -17
- gobby/config/features.py +0 -20
- gobby/config/tasks.py +4 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +87 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +573 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/hook_manager.py +2 -0
- gobby/llm/claude.py +377 -42
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/registries.py +14 -0
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/tools/artifacts.py +3 -3
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
- gobby/mcp_proxy/tools/workflows/_query.py +207 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +23 -10
- gobby/servers/http.py +186 -149
- gobby/servers/routes/admin.py +12 -0
- gobby/servers/routes/mcp/endpoints/execution.py +15 -7
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/sessions/analyzer.py +2 -2
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/migrations.py +25 -2
- gobby/storage/skills.py +47 -7
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +5 -0
- gobby/workflows/context_actions.py +21 -24
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +96 -0
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/engine.py +6 -3
- gobby/workflows/lifecycle_evaluator.py +2 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/RECORD +61 -45
- gobby/hooks/event_handlers.py +0 -1008
- gobby/mcp_proxy/tools/workflows.py +0 -1023
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Import and cache tools for workflows.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import re
|
|
7
|
+
import shutil
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
14
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def import_workflow(
|
|
20
|
+
loader: WorkflowLoader,
|
|
21
|
+
source_path: str,
|
|
22
|
+
workflow_name: str | None = None,
|
|
23
|
+
is_global: bool = False,
|
|
24
|
+
project_path: str | None = None,
|
|
25
|
+
) -> dict[str, Any]:
|
|
26
|
+
"""
|
|
27
|
+
Import a workflow from a file.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
loader: WorkflowLoader instance
|
|
31
|
+
source_path: Path to the workflow YAML file
|
|
32
|
+
workflow_name: Override the workflow name (defaults to name in file)
|
|
33
|
+
is_global: Install to global ~/.gobby/workflows instead of project
|
|
34
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Success status and destination path
|
|
38
|
+
"""
|
|
39
|
+
source = Path(source_path)
|
|
40
|
+
if not source.exists():
|
|
41
|
+
return {"success": False, "error": f"File not found: {source_path}"}
|
|
42
|
+
|
|
43
|
+
if source.suffix != ".yaml":
|
|
44
|
+
return {"success": False, "error": "Workflow file must have .yaml extension"}
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
with open(source, encoding="utf-8") as f:
|
|
48
|
+
data = yaml.safe_load(f)
|
|
49
|
+
|
|
50
|
+
if not data or "name" not in data:
|
|
51
|
+
return {"success": False, "error": "Invalid workflow: missing 'name' field"}
|
|
52
|
+
|
|
53
|
+
except yaml.YAMLError as e:
|
|
54
|
+
return {"success": False, "error": f"Invalid YAML: {e}"}
|
|
55
|
+
|
|
56
|
+
raw_name = workflow_name or data.get("name", source.stem)
|
|
57
|
+
# Sanitize name to prevent path traversal: strip path components, allow only safe chars
|
|
58
|
+
safe_name = Path(raw_name).name # Strip any path components
|
|
59
|
+
safe_name = re.sub(r"[^a-zA-Z0-9_\-.]", "_", safe_name) # Replace unsafe chars
|
|
60
|
+
safe_name = safe_name.strip("._") # Remove leading/trailing dots and underscores
|
|
61
|
+
if not safe_name:
|
|
62
|
+
safe_name = source.stem # Fallback to source filename
|
|
63
|
+
filename = f"{safe_name}.yaml"
|
|
64
|
+
|
|
65
|
+
if is_global:
|
|
66
|
+
dest_dir = Path.home() / ".gobby" / "workflows"
|
|
67
|
+
else:
|
|
68
|
+
# Auto-discover project path if not provided
|
|
69
|
+
if not project_path:
|
|
70
|
+
discovered = get_workflow_project_path()
|
|
71
|
+
if discovered:
|
|
72
|
+
project_path = str(discovered)
|
|
73
|
+
|
|
74
|
+
proj = Path(project_path) if project_path else None
|
|
75
|
+
if not proj:
|
|
76
|
+
return {
|
|
77
|
+
"success": False,
|
|
78
|
+
"error": "project_path required when not using is_global (could not auto-discover)",
|
|
79
|
+
}
|
|
80
|
+
dest_dir = proj / ".gobby" / "workflows"
|
|
81
|
+
|
|
82
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
83
|
+
dest_path = dest_dir / filename
|
|
84
|
+
|
|
85
|
+
shutil.copy(source, dest_path)
|
|
86
|
+
|
|
87
|
+
# Clear loader cache so new workflow is discoverable
|
|
88
|
+
loader.clear_cache()
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
"success": True,
|
|
92
|
+
"workflow_name": safe_name,
|
|
93
|
+
"destination": str(dest_path),
|
|
94
|
+
"is_global": is_global,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def reload_cache(loader: WorkflowLoader) -> dict[str, Any]:
|
|
99
|
+
"""
|
|
100
|
+
Clear the workflow loader cache.
|
|
101
|
+
|
|
102
|
+
This forces the daemon to re-read workflow YAML files from disk
|
|
103
|
+
on the next access. Use this when you've modified workflow files
|
|
104
|
+
and want the changes to take effect immediately without restarting
|
|
105
|
+
the daemon.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
Success status
|
|
109
|
+
"""
|
|
110
|
+
loader.clear_cache()
|
|
111
|
+
logger.info("Workflow cache cleared via reload_cache tool")
|
|
112
|
+
return {"message": "Workflow cache cleared"}
|
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Lifecycle tools for workflows (activate, end, transition).
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from gobby.mcp_proxy.tools.workflows._resolution import (
|
|
10
|
+
resolve_session_id,
|
|
11
|
+
resolve_session_task_value,
|
|
12
|
+
)
|
|
13
|
+
from gobby.storage.database import DatabaseProtocol
|
|
14
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
15
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
16
|
+
from gobby.workflows.definitions import WorkflowState
|
|
17
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
18
|
+
from gobby.workflows.state_manager import WorkflowStateManager
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def activate_workflow(
|
|
22
|
+
loader: WorkflowLoader,
|
|
23
|
+
state_manager: WorkflowStateManager,
|
|
24
|
+
session_manager: LocalSessionManager,
|
|
25
|
+
db: DatabaseProtocol,
|
|
26
|
+
name: str,
|
|
27
|
+
session_id: str | None = None,
|
|
28
|
+
initial_step: str | None = None,
|
|
29
|
+
variables: dict[str, Any] | None = None,
|
|
30
|
+
project_path: str | None = None,
|
|
31
|
+
) -> dict[str, Any]:
|
|
32
|
+
"""
|
|
33
|
+
Activate a step-based workflow for the current session.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
loader: WorkflowLoader instance
|
|
37
|
+
state_manager: WorkflowStateManager instance
|
|
38
|
+
session_manager: LocalSessionManager instance
|
|
39
|
+
db: LocalDatabase instance
|
|
40
|
+
name: Workflow name (e.g., "plan-act-reflect", "auto-task")
|
|
41
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
42
|
+
initial_step: Optional starting step (defaults to first step)
|
|
43
|
+
variables: Optional initial variables to set (merged with workflow defaults)
|
|
44
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Success status, workflow info, and current step.
|
|
48
|
+
"""
|
|
49
|
+
# Auto-discover project path if not provided
|
|
50
|
+
if not project_path:
|
|
51
|
+
discovered = get_workflow_project_path()
|
|
52
|
+
if discovered:
|
|
53
|
+
project_path = str(discovered)
|
|
54
|
+
|
|
55
|
+
proj = Path(project_path) if project_path else None
|
|
56
|
+
|
|
57
|
+
# Load workflow
|
|
58
|
+
definition = loader.load_workflow(name, proj)
|
|
59
|
+
if not definition:
|
|
60
|
+
return {"success": False, "error": f"Workflow '{name}' not found"}
|
|
61
|
+
|
|
62
|
+
if definition.type == "lifecycle":
|
|
63
|
+
return {
|
|
64
|
+
"success": False,
|
|
65
|
+
"error": f"Workflow '{name}' is lifecycle type (auto-runs on events, not manually activated)",
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
69
|
+
if not session_id:
|
|
70
|
+
return {
|
|
71
|
+
"success": False,
|
|
72
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
76
|
+
try:
|
|
77
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
78
|
+
except ValueError as e:
|
|
79
|
+
return {"success": False, "error": str(e)}
|
|
80
|
+
|
|
81
|
+
# Check for existing workflow
|
|
82
|
+
# Allow if:
|
|
83
|
+
# - No existing state
|
|
84
|
+
# - Existing is __lifecycle__ placeholder
|
|
85
|
+
# - Existing is a lifecycle-type workflow (they run concurrently with step workflows)
|
|
86
|
+
existing = state_manager.get_state(resolved_session_id)
|
|
87
|
+
if existing and existing.workflow_name != "__lifecycle__":
|
|
88
|
+
# Check if existing workflow is a lifecycle type
|
|
89
|
+
existing_def = loader.load_workflow(existing.workflow_name, proj)
|
|
90
|
+
# Only allow if we can confirm it's a lifecycle workflow
|
|
91
|
+
# If definition not found or it's a step workflow, block activation
|
|
92
|
+
if not existing_def or existing_def.type != "lifecycle":
|
|
93
|
+
# It's a step workflow (or unknown) - can only have one active
|
|
94
|
+
return {
|
|
95
|
+
"success": False,
|
|
96
|
+
"error": f"Session already has step workflow '{existing.workflow_name}' active. Use end_workflow first.",
|
|
97
|
+
}
|
|
98
|
+
# Existing is a lifecycle workflow - allow step workflow to activate alongside it
|
|
99
|
+
|
|
100
|
+
# Determine initial step
|
|
101
|
+
if initial_step:
|
|
102
|
+
if not any(s.name == initial_step for s in definition.steps):
|
|
103
|
+
return {
|
|
104
|
+
"success": False,
|
|
105
|
+
"error": f"Step '{initial_step}' not found. Available: {[s.name for s in definition.steps]}",
|
|
106
|
+
}
|
|
107
|
+
step = initial_step
|
|
108
|
+
else:
|
|
109
|
+
if not definition.steps:
|
|
110
|
+
return {
|
|
111
|
+
"success": False,
|
|
112
|
+
"error": f"Workflow '{name}' has no steps defined. Cannot activate a workflow without steps.",
|
|
113
|
+
}
|
|
114
|
+
step = definition.steps[0].name
|
|
115
|
+
|
|
116
|
+
# Merge variables: preserve existing lifecycle variables, then apply workflow declarations
|
|
117
|
+
# Priority: existing state < workflow defaults < passed-in variables
|
|
118
|
+
# This preserves lifecycle variables (like unlocked_tools) that the step workflow doesn't declare
|
|
119
|
+
merged_variables = dict(existing.variables) if existing else {}
|
|
120
|
+
merged_variables.update(definition.variables) # Override with workflow-declared defaults
|
|
121
|
+
if variables:
|
|
122
|
+
merged_variables.update(variables) # Override with passed-in values
|
|
123
|
+
|
|
124
|
+
# Resolve session_task references (#N or N) to UUIDs upfront
|
|
125
|
+
# This prevents repeated resolution failures in condition evaluation
|
|
126
|
+
if "session_task" in merged_variables:
|
|
127
|
+
session_task_val = merged_variables["session_task"]
|
|
128
|
+
if isinstance(session_task_val, str):
|
|
129
|
+
merged_variables["session_task"] = resolve_session_task_value(
|
|
130
|
+
session_task_val, resolved_session_id, session_manager, db
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Create state
|
|
134
|
+
state = WorkflowState(
|
|
135
|
+
session_id=resolved_session_id,
|
|
136
|
+
workflow_name=name,
|
|
137
|
+
step=step,
|
|
138
|
+
step_entered_at=datetime.now(UTC),
|
|
139
|
+
step_action_count=0,
|
|
140
|
+
total_action_count=0,
|
|
141
|
+
artifacts={},
|
|
142
|
+
observations=[],
|
|
143
|
+
reflection_pending=False,
|
|
144
|
+
context_injected=False,
|
|
145
|
+
variables=merged_variables,
|
|
146
|
+
task_list=None,
|
|
147
|
+
current_task_index=0,
|
|
148
|
+
files_modified_this_task=0,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
state_manager.save_state(state)
|
|
152
|
+
|
|
153
|
+
return {
|
|
154
|
+
"success": True,
|
|
155
|
+
"session_id": resolved_session_id,
|
|
156
|
+
"workflow": name,
|
|
157
|
+
"step": step,
|
|
158
|
+
"steps": [s.name for s in definition.steps],
|
|
159
|
+
"variables": merged_variables,
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def end_workflow(
|
|
164
|
+
loader: WorkflowLoader,
|
|
165
|
+
state_manager: WorkflowStateManager,
|
|
166
|
+
session_manager: LocalSessionManager,
|
|
167
|
+
session_id: str | None = None,
|
|
168
|
+
reason: str | None = None,
|
|
169
|
+
project_path: str | None = None,
|
|
170
|
+
) -> dict[str, Any]:
|
|
171
|
+
"""
|
|
172
|
+
End the currently active step-based workflow.
|
|
173
|
+
|
|
174
|
+
Allows starting a different workflow afterward.
|
|
175
|
+
Does not affect lifecycle workflows (they continue running).
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
loader: WorkflowLoader instance
|
|
179
|
+
state_manager: WorkflowStateManager instance
|
|
180
|
+
session_manager: LocalSessionManager instance
|
|
181
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
182
|
+
reason: Optional reason for ending
|
|
183
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
Success status
|
|
187
|
+
"""
|
|
188
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
189
|
+
if not session_id:
|
|
190
|
+
return {
|
|
191
|
+
"success": False,
|
|
192
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
196
|
+
try:
|
|
197
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
198
|
+
except ValueError as e:
|
|
199
|
+
return {"success": False, "error": str(e)}
|
|
200
|
+
|
|
201
|
+
state = state_manager.get_state(resolved_session_id)
|
|
202
|
+
if not state:
|
|
203
|
+
return {"success": False, "error": "No workflow active for session"}
|
|
204
|
+
|
|
205
|
+
# Check if this is a lifecycle workflow - those cannot be ended manually
|
|
206
|
+
# Auto-discover project path if not provided
|
|
207
|
+
if not project_path:
|
|
208
|
+
discovered = get_workflow_project_path()
|
|
209
|
+
if discovered:
|
|
210
|
+
project_path = str(discovered)
|
|
211
|
+
|
|
212
|
+
proj = Path(project_path) if project_path else None
|
|
213
|
+
definition = loader.load_workflow(state.workflow_name, proj)
|
|
214
|
+
|
|
215
|
+
# If definition exists and is lifecycle type, block manual ending
|
|
216
|
+
if definition and definition.type == "lifecycle":
|
|
217
|
+
return {
|
|
218
|
+
"success": False,
|
|
219
|
+
"error": f"Workflow '{state.workflow_name}' is lifecycle type (auto-runs on events, cannot be manually ended).",
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
state_manager.delete_state(resolved_session_id)
|
|
223
|
+
|
|
224
|
+
return {"success": True, "workflow": state.workflow_name, "reason": reason}
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def request_step_transition(
|
|
228
|
+
loader: WorkflowLoader,
|
|
229
|
+
state_manager: WorkflowStateManager,
|
|
230
|
+
session_manager: LocalSessionManager,
|
|
231
|
+
to_step: str,
|
|
232
|
+
reason: str | None = None,
|
|
233
|
+
session_id: str | None = None,
|
|
234
|
+
force: bool = False,
|
|
235
|
+
project_path: str | None = None,
|
|
236
|
+
) -> dict[str, Any]:
|
|
237
|
+
"""
|
|
238
|
+
Request transition to a different step. May require approval.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
loader: WorkflowLoader instance
|
|
242
|
+
state_manager: WorkflowStateManager instance
|
|
243
|
+
session_manager: LocalSessionManager instance
|
|
244
|
+
to_step: Target step name
|
|
245
|
+
reason: Reason for transition
|
|
246
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
247
|
+
force: Skip exit condition checks
|
|
248
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Success status and new step info
|
|
252
|
+
"""
|
|
253
|
+
# Auto-discover project path if not provided
|
|
254
|
+
if not project_path:
|
|
255
|
+
discovered = get_workflow_project_path()
|
|
256
|
+
if discovered:
|
|
257
|
+
project_path = str(discovered)
|
|
258
|
+
|
|
259
|
+
proj = Path(project_path) if project_path else None
|
|
260
|
+
|
|
261
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
262
|
+
if not session_id:
|
|
263
|
+
return {
|
|
264
|
+
"success": False,
|
|
265
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
269
|
+
try:
|
|
270
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
271
|
+
except ValueError as e:
|
|
272
|
+
return {"success": False, "error": str(e)}
|
|
273
|
+
|
|
274
|
+
state = state_manager.get_state(resolved_session_id)
|
|
275
|
+
if not state:
|
|
276
|
+
return {"success": False, "error": "No workflow active for session"}
|
|
277
|
+
|
|
278
|
+
# Load workflow to validate step
|
|
279
|
+
definition = loader.load_workflow(state.workflow_name, proj)
|
|
280
|
+
if not definition:
|
|
281
|
+
return {"success": False, "error": f"Workflow '{state.workflow_name}' not found"}
|
|
282
|
+
|
|
283
|
+
if not any(s.name == to_step for s in definition.steps):
|
|
284
|
+
return {
|
|
285
|
+
"success": False,
|
|
286
|
+
"error": f"Step '{to_step}' not found. Available: {[s.name for s in definition.steps]}",
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
# Block manual transitions to steps that have conditional auto-transitions
|
|
290
|
+
# These steps should only be reached when their conditions are met
|
|
291
|
+
# Skip this check when force=True to allow bypassing workflow guards
|
|
292
|
+
if not force:
|
|
293
|
+
current_step_def = next((s for s in definition.steps if s.name == state.step), None)
|
|
294
|
+
if current_step_def and current_step_def.transitions:
|
|
295
|
+
for transition in current_step_def.transitions:
|
|
296
|
+
if transition.to == to_step and transition.when:
|
|
297
|
+
# This step has a conditional transition - block manual transition
|
|
298
|
+
return {
|
|
299
|
+
"success": False,
|
|
300
|
+
"error": (
|
|
301
|
+
f"Step '{to_step}' has a conditional auto-transition "
|
|
302
|
+
f"(when: {transition.when}). Manual transitions to this step "
|
|
303
|
+
f"are blocked to prevent workflow circumvention. "
|
|
304
|
+
f"The transition will occur automatically when the condition is met."
|
|
305
|
+
),
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
old_step = state.step
|
|
309
|
+
state.step = to_step
|
|
310
|
+
state.step_entered_at = datetime.now(UTC)
|
|
311
|
+
state.step_action_count = 0
|
|
312
|
+
|
|
313
|
+
state_manager.save_state(state)
|
|
314
|
+
|
|
315
|
+
return {
|
|
316
|
+
"success": True,
|
|
317
|
+
"from_step": old_step,
|
|
318
|
+
"to_step": to_step,
|
|
319
|
+
"reason": reason,
|
|
320
|
+
"forced": force,
|
|
321
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Query tools for workflows.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
from gobby.mcp_proxy.tools.workflows._resolution import resolve_session_id
|
|
12
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
13
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
14
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
15
|
+
from gobby.workflows.state_manager import WorkflowStateManager
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_workflow(
|
|
21
|
+
loader: WorkflowLoader,
|
|
22
|
+
name: str,
|
|
23
|
+
project_path: str | None = None,
|
|
24
|
+
) -> dict[str, Any]:
|
|
25
|
+
"""
|
|
26
|
+
Get workflow details including steps, triggers, and settings.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
loader: WorkflowLoader instance
|
|
30
|
+
name: Workflow name (without .yaml extension)
|
|
31
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Workflow definition details
|
|
35
|
+
"""
|
|
36
|
+
# Auto-discover project path if not provided
|
|
37
|
+
if not project_path:
|
|
38
|
+
discovered = get_workflow_project_path()
|
|
39
|
+
if discovered:
|
|
40
|
+
project_path = str(discovered)
|
|
41
|
+
|
|
42
|
+
proj = Path(project_path) if project_path else None
|
|
43
|
+
definition = loader.load_workflow(name, proj)
|
|
44
|
+
|
|
45
|
+
if not definition:
|
|
46
|
+
return {"success": False, "error": f"Workflow '{name}' not found"}
|
|
47
|
+
|
|
48
|
+
return {
|
|
49
|
+
"success": True,
|
|
50
|
+
"name": definition.name,
|
|
51
|
+
"type": definition.type,
|
|
52
|
+
"description": definition.description,
|
|
53
|
+
"version": definition.version,
|
|
54
|
+
"steps": (
|
|
55
|
+
[
|
|
56
|
+
{
|
|
57
|
+
"name": s.name,
|
|
58
|
+
"description": s.description,
|
|
59
|
+
"allowed_tools": s.allowed_tools,
|
|
60
|
+
"blocked_tools": s.blocked_tools,
|
|
61
|
+
}
|
|
62
|
+
for s in definition.steps
|
|
63
|
+
]
|
|
64
|
+
if definition.steps
|
|
65
|
+
else []
|
|
66
|
+
),
|
|
67
|
+
"triggers": (
|
|
68
|
+
{name: len(actions) for name, actions in definition.triggers.items()}
|
|
69
|
+
if definition.triggers
|
|
70
|
+
else {}
|
|
71
|
+
),
|
|
72
|
+
"settings": definition.settings,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def list_workflows(
|
|
77
|
+
loader: WorkflowLoader,
|
|
78
|
+
project_path: str | None = None,
|
|
79
|
+
workflow_type: str | None = None,
|
|
80
|
+
global_only: bool = False,
|
|
81
|
+
) -> dict[str, Any]:
|
|
82
|
+
"""
|
|
83
|
+
List available workflows.
|
|
84
|
+
|
|
85
|
+
Lists workflows from both project (.gobby/workflows) and global (~/.gobby/workflows)
|
|
86
|
+
directories. Project workflows shadow global ones with the same name.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
loader: WorkflowLoader instance
|
|
90
|
+
project_path: Project directory path. Auto-discovered from cwd if not provided.
|
|
91
|
+
workflow_type: Filter by type ("step" or "lifecycle")
|
|
92
|
+
global_only: If True, only show global workflows (ignore project)
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
List of workflows with name, type, description, and source
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
# Auto-discover project path if not provided
|
|
99
|
+
if not project_path:
|
|
100
|
+
discovered = get_workflow_project_path()
|
|
101
|
+
if discovered:
|
|
102
|
+
project_path = str(discovered)
|
|
103
|
+
|
|
104
|
+
search_dirs = list(loader.global_dirs)
|
|
105
|
+
proj = Path(project_path) if project_path else None
|
|
106
|
+
|
|
107
|
+
# Include project workflows unless global_only (project searched first to shadow global)
|
|
108
|
+
if not global_only and proj:
|
|
109
|
+
project_dir = proj / ".gobby" / "workflows"
|
|
110
|
+
if project_dir.exists():
|
|
111
|
+
search_dirs.insert(0, project_dir)
|
|
112
|
+
|
|
113
|
+
workflows = []
|
|
114
|
+
seen_names = set()
|
|
115
|
+
|
|
116
|
+
for search_dir in search_dirs:
|
|
117
|
+
if not search_dir.exists():
|
|
118
|
+
continue
|
|
119
|
+
|
|
120
|
+
is_project = proj and search_dir == (proj / ".gobby" / "workflows")
|
|
121
|
+
|
|
122
|
+
for yaml_path in search_dir.glob("*.yaml"):
|
|
123
|
+
name = yaml_path.stem
|
|
124
|
+
if name in seen_names:
|
|
125
|
+
continue
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
with open(yaml_path, encoding="utf-8") as f:
|
|
129
|
+
data = yaml.safe_load(f)
|
|
130
|
+
|
|
131
|
+
if not data:
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
wf_type = data.get("type", "step")
|
|
135
|
+
|
|
136
|
+
if workflow_type and wf_type != workflow_type:
|
|
137
|
+
continue
|
|
138
|
+
|
|
139
|
+
workflows.append(
|
|
140
|
+
{
|
|
141
|
+
"name": name,
|
|
142
|
+
"type": wf_type,
|
|
143
|
+
"description": data.get("description", ""),
|
|
144
|
+
"source": "project" if is_project else "global",
|
|
145
|
+
}
|
|
146
|
+
)
|
|
147
|
+
seen_names.add(name)
|
|
148
|
+
|
|
149
|
+
except Exception as e:
|
|
150
|
+
logger.debug(
|
|
151
|
+
"Skipping invalid workflow file %s: %s",
|
|
152
|
+
yaml_path,
|
|
153
|
+
e,
|
|
154
|
+
exc_info=True,
|
|
155
|
+
) # nosec B110
|
|
156
|
+
|
|
157
|
+
return {"workflows": workflows, "count": len(workflows)}
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def get_workflow_status(
|
|
161
|
+
state_manager: WorkflowStateManager,
|
|
162
|
+
session_manager: LocalSessionManager,
|
|
163
|
+
session_id: str | None = None,
|
|
164
|
+
) -> dict[str, Any]:
|
|
165
|
+
"""
|
|
166
|
+
Get current workflow step and state.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
state_manager: WorkflowStateManager instance
|
|
170
|
+
session_manager: LocalSessionManager instance
|
|
171
|
+
session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Workflow state including step, action counts, artifacts
|
|
175
|
+
"""
|
|
176
|
+
# Require explicit session_id to prevent cross-session bleed
|
|
177
|
+
if not session_id:
|
|
178
|
+
return {
|
|
179
|
+
"has_workflow": False,
|
|
180
|
+
"error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
184
|
+
try:
|
|
185
|
+
resolved_session_id = resolve_session_id(session_manager, session_id)
|
|
186
|
+
except ValueError as e:
|
|
187
|
+
return {"has_workflow": False, "error": str(e)}
|
|
188
|
+
|
|
189
|
+
state = state_manager.get_state(resolved_session_id)
|
|
190
|
+
if not state:
|
|
191
|
+
return {"has_workflow": False, "session_id": resolved_session_id}
|
|
192
|
+
|
|
193
|
+
return {
|
|
194
|
+
"has_workflow": True,
|
|
195
|
+
"session_id": resolved_session_id,
|
|
196
|
+
"workflow_name": state.workflow_name,
|
|
197
|
+
"step": state.step,
|
|
198
|
+
"step_action_count": state.step_action_count,
|
|
199
|
+
"total_action_count": state.total_action_count,
|
|
200
|
+
"reflection_pending": state.reflection_pending,
|
|
201
|
+
"artifacts": list(state.artifacts.keys()) if state.artifacts else [],
|
|
202
|
+
"variables": state.variables,
|
|
203
|
+
"task_progress": (
|
|
204
|
+
f"{state.current_task_index + 1}/{len(state.task_list)}" if state.task_list else None
|
|
205
|
+
),
|
|
206
|
+
"updated_at": state.updated_at.isoformat() if state.updated_at else None,
|
|
207
|
+
}
|