gobby 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +6 -0
- gobby/adapters/base.py +11 -2
- gobby/adapters/claude_code.py +2 -2
- gobby/adapters/codex_impl/adapter.py +38 -43
- gobby/adapters/copilot.py +324 -0
- gobby/adapters/cursor.py +373 -0
- gobby/adapters/gemini.py +2 -26
- gobby/adapters/windsurf.py +359 -0
- gobby/agents/definitions.py +162 -2
- gobby/agents/isolation.py +33 -1
- gobby/agents/pty_reader.py +192 -0
- gobby/agents/registry.py +10 -1
- gobby/agents/runner.py +24 -8
- gobby/agents/sandbox.py +8 -3
- gobby/agents/session.py +4 -0
- gobby/agents/spawn.py +9 -2
- gobby/agents/spawn_executor.py +49 -61
- gobby/agents/spawners/command_builder.py +4 -4
- gobby/app_context.py +5 -0
- gobby/cli/__init__.py +4 -0
- gobby/cli/install.py +259 -4
- gobby/cli/installers/__init__.py +12 -0
- gobby/cli/installers/copilot.py +242 -0
- gobby/cli/installers/cursor.py +244 -0
- gobby/cli/installers/shared.py +3 -0
- gobby/cli/installers/windsurf.py +242 -0
- gobby/cli/pipelines.py +639 -0
- gobby/cli/sessions.py +3 -1
- gobby/cli/skills.py +209 -0
- gobby/cli/tasks/crud.py +6 -5
- gobby/cli/tasks/search.py +1 -1
- gobby/cli/ui.py +116 -0
- gobby/cli/workflows.py +38 -17
- gobby/config/app.py +5 -0
- gobby/config/skills.py +23 -2
- gobby/hooks/broadcaster.py +9 -0
- gobby/hooks/event_handlers/_base.py +6 -1
- gobby/hooks/event_handlers/_session.py +44 -130
- gobby/hooks/events.py +48 -0
- gobby/hooks/hook_manager.py +25 -3
- gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
- gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
- gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
- gobby/llm/__init__.py +14 -1
- gobby/llm/claude.py +217 -1
- gobby/llm/service.py +149 -0
- gobby/mcp_proxy/instructions.py +9 -27
- gobby/mcp_proxy/models.py +1 -0
- gobby/mcp_proxy/registries.py +56 -9
- gobby/mcp_proxy/server.py +6 -2
- gobby/mcp_proxy/services/tool_filter.py +7 -0
- gobby/mcp_proxy/services/tool_proxy.py +19 -1
- gobby/mcp_proxy/stdio.py +37 -21
- gobby/mcp_proxy/tools/agents.py +7 -0
- gobby/mcp_proxy/tools/hub.py +30 -1
- gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
- gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
- gobby/mcp_proxy/tools/orchestration/review.py +17 -4
- gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
- gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
- gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
- gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
- gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
- gobby/mcp_proxy/tools/skills/__init__.py +184 -30
- gobby/mcp_proxy/tools/spawn_agent.py +229 -14
- gobby/mcp_proxy/tools/tasks/_context.py +8 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
- gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
- gobby/mcp_proxy/tools/tasks/_search.py +1 -1
- gobby/mcp_proxy/tools/workflows/__init__.py +9 -2
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +12 -1
- gobby/mcp_proxy/tools/workflows/_query.py +45 -26
- gobby/mcp_proxy/tools/workflows/_terminal.py +39 -3
- gobby/mcp_proxy/tools/worktrees.py +54 -15
- gobby/memory/context.py +5 -5
- gobby/runner.py +108 -6
- gobby/servers/http.py +7 -1
- gobby/servers/routes/__init__.py +2 -0
- gobby/servers/routes/admin.py +44 -0
- gobby/servers/routes/mcp/endpoints/execution.py +18 -25
- gobby/servers/routes/mcp/hooks.py +10 -1
- gobby/servers/routes/pipelines.py +227 -0
- gobby/servers/websocket.py +314 -1
- gobby/sessions/analyzer.py +87 -1
- gobby/sessions/manager.py +5 -5
- gobby/sessions/transcripts/__init__.py +3 -0
- gobby/sessions/transcripts/claude.py +5 -0
- gobby/sessions/transcripts/codex.py +5 -0
- gobby/sessions/transcripts/gemini.py +5 -0
- gobby/skills/hubs/__init__.py +25 -0
- gobby/skills/hubs/base.py +234 -0
- gobby/skills/hubs/claude_plugins.py +328 -0
- gobby/skills/hubs/clawdhub.py +289 -0
- gobby/skills/hubs/github_collection.py +465 -0
- gobby/skills/hubs/manager.py +263 -0
- gobby/skills/hubs/skillhub.py +342 -0
- gobby/storage/memories.py +4 -4
- gobby/storage/migrations.py +95 -3
- gobby/storage/pipelines.py +367 -0
- gobby/storage/sessions.py +23 -4
- gobby/storage/skills.py +1 -1
- gobby/storage/tasks/_aggregates.py +2 -2
- gobby/storage/tasks/_lifecycle.py +4 -4
- gobby/storage/tasks/_models.py +7 -1
- gobby/storage/tasks/_queries.py +3 -3
- gobby/sync/memories.py +4 -3
- gobby/tasks/commits.py +48 -17
- gobby/workflows/actions.py +75 -0
- gobby/workflows/context_actions.py +246 -5
- gobby/workflows/definitions.py +119 -1
- gobby/workflows/detection_helpers.py +23 -11
- gobby/workflows/enforcement/task_policy.py +18 -0
- gobby/workflows/engine.py +20 -1
- gobby/workflows/evaluator.py +8 -5
- gobby/workflows/lifecycle_evaluator.py +57 -26
- gobby/workflows/loader.py +567 -30
- gobby/workflows/lobster_compat.py +147 -0
- gobby/workflows/pipeline_executor.py +801 -0
- gobby/workflows/pipeline_state.py +172 -0
- gobby/workflows/pipeline_webhooks.py +206 -0
- gobby/workflows/premature_stop.py +5 -0
- gobby/worktrees/git.py +135 -20
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/RECORD +134 -106
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
|
@@ -35,7 +35,9 @@ def register_orchestrator(
|
|
|
35
35
|
|
|
36
36
|
async def orchestrate_ready_tasks(
|
|
37
37
|
parent_task_id: str,
|
|
38
|
-
provider: Literal[
|
|
38
|
+
provider: Literal[
|
|
39
|
+
"claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
|
|
40
|
+
] = "gemini",
|
|
39
41
|
model: str | None = None,
|
|
40
42
|
terminal: str = "auto",
|
|
41
43
|
mode: str = "terminal",
|
|
@@ -43,7 +45,10 @@ def register_orchestrator(
|
|
|
43
45
|
max_concurrent: int = 3,
|
|
44
46
|
parent_session_id: str | None = None,
|
|
45
47
|
project_path: str | None = None,
|
|
46
|
-
coding_provider: Literal[
|
|
48
|
+
coding_provider: Literal[
|
|
49
|
+
"claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
|
|
50
|
+
]
|
|
51
|
+
| None = None,
|
|
47
52
|
coding_model: str | None = None,
|
|
48
53
|
base_branch: str | None = None,
|
|
49
54
|
) -> dict[str, Any]:
|
|
@@ -652,7 +657,7 @@ def register_orchestrator(
|
|
|
652
657
|
},
|
|
653
658
|
"provider": {
|
|
654
659
|
"type": "string",
|
|
655
|
-
"description": "Fallback LLM provider (claude, gemini, codex, antigravity)",
|
|
660
|
+
"description": "Fallback LLM provider (claude, gemini, codex, antigravity, cursor, windsurf, copilot)",
|
|
656
661
|
"default": "gemini",
|
|
657
662
|
},
|
|
658
663
|
"model": {
|
|
@@ -30,7 +30,9 @@ def register_reviewer(
|
|
|
30
30
|
|
|
31
31
|
async def spawn_review_agent(
|
|
32
32
|
task_id: str,
|
|
33
|
-
review_provider: Literal[
|
|
33
|
+
review_provider: Literal[
|
|
34
|
+
"claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
|
|
35
|
+
] = "claude",
|
|
34
36
|
review_model: str | None = "claude-opus-4-5",
|
|
35
37
|
terminal: str = "auto",
|
|
36
38
|
mode: str = "terminal",
|
|
@@ -61,7 +63,15 @@ def register_reviewer(
|
|
|
61
63
|
"""
|
|
62
64
|
# Validate mode and review_provider
|
|
63
65
|
allowed_modes = {"terminal", "embedded", "headless"}
|
|
64
|
-
allowed_providers = {
|
|
66
|
+
allowed_providers = {
|
|
67
|
+
"claude",
|
|
68
|
+
"gemini",
|
|
69
|
+
"codex",
|
|
70
|
+
"antigravity",
|
|
71
|
+
"cursor",
|
|
72
|
+
"windsurf",
|
|
73
|
+
"copilot",
|
|
74
|
+
}
|
|
65
75
|
|
|
66
76
|
mode_lower = mode.lower() if mode else "terminal"
|
|
67
77
|
if mode_lower not in allowed_modes:
|
|
@@ -314,7 +324,7 @@ def register_reviewer(
|
|
|
314
324
|
},
|
|
315
325
|
"review_provider": {
|
|
316
326
|
"type": "string",
|
|
317
|
-
"description": "LLM provider for review (claude, gemini, codex, antigravity)",
|
|
327
|
+
"description": "LLM provider for review (claude, gemini, codex, antigravity, cursor, windsurf, copilot)",
|
|
318
328
|
"default": "claude",
|
|
319
329
|
},
|
|
320
330
|
"review_model": {
|
|
@@ -350,7 +360,10 @@ def register_reviewer(
|
|
|
350
360
|
async def process_completed_agents(
|
|
351
361
|
parent_session_id: str,
|
|
352
362
|
spawn_reviews: bool = True,
|
|
353
|
-
review_provider: Literal[
|
|
363
|
+
review_provider: Literal[
|
|
364
|
+
"claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
|
|
365
|
+
]
|
|
366
|
+
| None = None,
|
|
354
367
|
review_model: str | None = None,
|
|
355
368
|
terminal: str = "auto",
|
|
356
369
|
mode: str = "terminal",
|
|
@@ -62,7 +62,7 @@ def register_wait(
|
|
|
62
62
|
|
|
63
63
|
# Consider task complete if status is "closed" or "review"
|
|
64
64
|
# (review tasks have completed their work, just awaiting human approval)
|
|
65
|
-
is_complete = task.status in ("closed", "
|
|
65
|
+
is_complete = task.status in ("closed", "needs_review")
|
|
66
66
|
return is_complete, task_info
|
|
67
67
|
|
|
68
68
|
async def wait_for_task(
|
|
@@ -73,7 +73,7 @@ def register_wait(
|
|
|
73
73
|
"""
|
|
74
74
|
Wait for a single task to complete.
|
|
75
75
|
|
|
76
|
-
Blocks until the task reaches "closed" or "
|
|
76
|
+
Blocks until the task reaches "closed" or "needs_review" status, or timeout expires.
|
|
77
77
|
|
|
78
78
|
Args:
|
|
79
79
|
task_id: Task reference (#N, N, path, or UUID)
|
|
@@ -170,7 +170,7 @@ def register_wait(
|
|
|
170
170
|
name="wait_for_task",
|
|
171
171
|
description=(
|
|
172
172
|
"Wait for a single task to complete. "
|
|
173
|
-
"Blocks until task reaches 'closed' or '
|
|
173
|
+
"Blocks until task reaches 'closed' or 'needs_review' status, or timeout expires."
|
|
174
174
|
),
|
|
175
175
|
input_schema={
|
|
176
176
|
"type": "object",
|
|
@@ -201,7 +201,7 @@ def register_wait(
|
|
|
201
201
|
"""
|
|
202
202
|
Wait for any one of multiple tasks to complete.
|
|
203
203
|
|
|
204
|
-
Blocks until at least one task reaches "closed" or "
|
|
204
|
+
Blocks until at least one task reaches "closed" or "needs_review" status, or timeout expires.
|
|
205
205
|
|
|
206
206
|
Args:
|
|
207
207
|
task_ids: List of task references (#N, N, path, or UUID)
|
|
@@ -290,7 +290,7 @@ def register_wait(
|
|
|
290
290
|
name="wait_for_any_task",
|
|
291
291
|
description=(
|
|
292
292
|
"Wait for any one of multiple tasks to complete. "
|
|
293
|
-
"Returns as soon as the first task reaches 'closed' or '
|
|
293
|
+
"Returns as soon as the first task reaches 'closed' or 'needs_review' status."
|
|
294
294
|
),
|
|
295
295
|
input_schema={
|
|
296
296
|
"type": "object",
|
|
@@ -322,7 +322,7 @@ def register_wait(
|
|
|
322
322
|
"""
|
|
323
323
|
Wait for all tasks to complete.
|
|
324
324
|
|
|
325
|
-
Blocks until all tasks reach "closed" or "
|
|
325
|
+
Blocks until all tasks reach "closed" or "needs_review" status, or timeout expires.
|
|
326
326
|
|
|
327
327
|
Args:
|
|
328
328
|
task_ids: List of task references (#N, N, path, or UUID)
|
|
@@ -442,7 +442,7 @@ def register_wait(
|
|
|
442
442
|
name="wait_for_all_tasks",
|
|
443
443
|
description=(
|
|
444
444
|
"Wait for all tasks to complete. "
|
|
445
|
-
"Blocks until all tasks reach 'closed' or '
|
|
445
|
+
"Blocks until all tasks reach 'closed' or 'needs_review' status, or timeout expires."
|
|
446
446
|
),
|
|
447
447
|
input_schema={
|
|
448
448
|
"type": "object",
|
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Internal MCP tools for Gobby Pipeline System.
|
|
3
|
+
|
|
4
|
+
Exposes functionality for:
|
|
5
|
+
- list_pipelines: Discover available pipeline definitions
|
|
6
|
+
- Dynamic pipeline tools: Pipelines with expose_as_tool=True are exposed as MCP tools
|
|
7
|
+
|
|
8
|
+
These tools are registered with the InternalToolRegistry and accessed
|
|
9
|
+
via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import logging
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
16
|
+
from gobby.mcp_proxy.tools.pipelines._discovery import list_pipelines
|
|
17
|
+
from gobby.mcp_proxy.tools.pipelines._execution import (
|
|
18
|
+
approve_pipeline,
|
|
19
|
+
get_pipeline_status,
|
|
20
|
+
reject_pipeline,
|
|
21
|
+
run_pipeline,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
"create_pipelines_registry",
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def create_pipelines_registry(
|
|
32
|
+
loader: Any | None = None,
|
|
33
|
+
executor: Any | None = None,
|
|
34
|
+
execution_manager: Any | None = None,
|
|
35
|
+
) -> InternalToolRegistry:
|
|
36
|
+
"""
|
|
37
|
+
Create a pipeline tool registry with all pipeline-related tools.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
loader: WorkflowLoader instance for discovering pipelines
|
|
41
|
+
executor: PipelineExecutor instance for running pipelines
|
|
42
|
+
execution_manager: LocalPipelineExecutionManager for tracking executions
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
InternalToolRegistry with pipeline tools registered
|
|
46
|
+
"""
|
|
47
|
+
_loader = loader
|
|
48
|
+
_executor = executor
|
|
49
|
+
_execution_manager = execution_manager
|
|
50
|
+
|
|
51
|
+
registry = InternalToolRegistry(
|
|
52
|
+
name="gobby-pipelines",
|
|
53
|
+
description="Pipeline management - list, run, and monitor pipeline executions",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Register dynamic tools for pipelines with expose_as_tool=True
|
|
57
|
+
_register_exposed_pipeline_tools(registry, _loader, _executor)
|
|
58
|
+
|
|
59
|
+
@registry.tool(
|
|
60
|
+
name="list_pipelines",
|
|
61
|
+
description="List available pipeline definitions from project and global directories.",
|
|
62
|
+
)
|
|
63
|
+
def _list_pipelines(
|
|
64
|
+
project_path: str | None = None,
|
|
65
|
+
) -> dict[str, Any]:
|
|
66
|
+
return list_pipelines(_loader, project_path)
|
|
67
|
+
|
|
68
|
+
@registry.tool(
|
|
69
|
+
name="run_pipeline",
|
|
70
|
+
description="Run a pipeline by name with given inputs.",
|
|
71
|
+
)
|
|
72
|
+
async def _run_pipeline(
|
|
73
|
+
name: str,
|
|
74
|
+
inputs: dict[str, Any] | None = None,
|
|
75
|
+
project_id: str | None = None,
|
|
76
|
+
) -> dict[str, Any]:
|
|
77
|
+
return await run_pipeline(
|
|
78
|
+
loader=_loader,
|
|
79
|
+
executor=_executor,
|
|
80
|
+
name=name,
|
|
81
|
+
inputs=inputs or {},
|
|
82
|
+
project_id=project_id or "",
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
@registry.tool(
|
|
86
|
+
name="approve_pipeline",
|
|
87
|
+
description="Approve a pipeline execution that is waiting for approval.",
|
|
88
|
+
)
|
|
89
|
+
async def _approve_pipeline(
|
|
90
|
+
token: str,
|
|
91
|
+
approved_by: str | None = None,
|
|
92
|
+
) -> dict[str, Any]:
|
|
93
|
+
return await approve_pipeline(
|
|
94
|
+
executor=_executor,
|
|
95
|
+
token=token,
|
|
96
|
+
approved_by=approved_by,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
@registry.tool(
|
|
100
|
+
name="reject_pipeline",
|
|
101
|
+
description="Reject a pipeline execution that is waiting for approval.",
|
|
102
|
+
)
|
|
103
|
+
async def _reject_pipeline(
|
|
104
|
+
token: str,
|
|
105
|
+
rejected_by: str | None = None,
|
|
106
|
+
) -> dict[str, Any]:
|
|
107
|
+
return await reject_pipeline(
|
|
108
|
+
executor=_executor,
|
|
109
|
+
token=token,
|
|
110
|
+
rejected_by=rejected_by,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
@registry.tool(
|
|
114
|
+
name="get_pipeline_status",
|
|
115
|
+
description="Get the status of a pipeline execution including step details.",
|
|
116
|
+
)
|
|
117
|
+
def _get_pipeline_status(
|
|
118
|
+
execution_id: str,
|
|
119
|
+
) -> dict[str, Any]:
|
|
120
|
+
return get_pipeline_status(
|
|
121
|
+
execution_manager=_execution_manager,
|
|
122
|
+
execution_id=execution_id,
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
return registry
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _register_exposed_pipeline_tools(
|
|
129
|
+
registry: InternalToolRegistry,
|
|
130
|
+
loader: Any | None,
|
|
131
|
+
executor: Any | None,
|
|
132
|
+
) -> None:
|
|
133
|
+
"""
|
|
134
|
+
Register dynamic tools for pipelines with expose_as_tool=True.
|
|
135
|
+
|
|
136
|
+
Each exposed pipeline becomes an MCP tool named "pipeline:<pipeline_name>".
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
registry: The registry to add tools to
|
|
140
|
+
loader: WorkflowLoader for discovering pipelines
|
|
141
|
+
executor: PipelineExecutor for running pipelines
|
|
142
|
+
"""
|
|
143
|
+
if loader is None:
|
|
144
|
+
logger.debug("Skipping dynamic pipeline tools: no loader")
|
|
145
|
+
return
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
discovered = loader.discover_pipeline_workflows()
|
|
149
|
+
except Exception:
|
|
150
|
+
logger.warning("Failed to discover pipelines for dynamic tools", exc_info=True)
|
|
151
|
+
return
|
|
152
|
+
|
|
153
|
+
for workflow in discovered:
|
|
154
|
+
pipeline = workflow.definition
|
|
155
|
+
|
|
156
|
+
# Only expose pipelines with expose_as_tool=True
|
|
157
|
+
if not getattr(pipeline, "expose_as_tool", False):
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
_create_pipeline_tool(registry, pipeline, loader, executor)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _create_pipeline_tool(
|
|
164
|
+
registry: InternalToolRegistry,
|
|
165
|
+
pipeline: Any,
|
|
166
|
+
loader: Any,
|
|
167
|
+
executor: Any | None,
|
|
168
|
+
) -> None:
|
|
169
|
+
"""
|
|
170
|
+
Create a dynamic tool for a single pipeline.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
registry: The registry to add the tool to
|
|
174
|
+
pipeline: The PipelineDefinition to expose
|
|
175
|
+
loader: WorkflowLoader for loading pipelines
|
|
176
|
+
executor: PipelineExecutor for running pipelines
|
|
177
|
+
"""
|
|
178
|
+
tool_name = f"pipeline:{pipeline.name}"
|
|
179
|
+
description = pipeline.description or f"Run the {pipeline.name} pipeline"
|
|
180
|
+
|
|
181
|
+
# Build input schema from pipeline inputs
|
|
182
|
+
input_schema = _build_input_schema(pipeline)
|
|
183
|
+
|
|
184
|
+
# Create closure to capture pipeline name
|
|
185
|
+
pipeline_name = pipeline.name
|
|
186
|
+
|
|
187
|
+
async def _execute_pipeline(**kwargs: Any) -> dict[str, Any]:
|
|
188
|
+
return await run_pipeline(
|
|
189
|
+
loader=loader,
|
|
190
|
+
executor=executor,
|
|
191
|
+
name=pipeline_name,
|
|
192
|
+
inputs=kwargs,
|
|
193
|
+
project_id="",
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Register the tool with the schema
|
|
197
|
+
registry.register(
|
|
198
|
+
name=tool_name,
|
|
199
|
+
description=description,
|
|
200
|
+
func=_execute_pipeline,
|
|
201
|
+
input_schema=input_schema,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
logger.debug(f"Registered dynamic pipeline tool: {tool_name}")
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def _build_input_schema(pipeline: Any) -> dict[str, Any]:
|
|
208
|
+
"""
|
|
209
|
+
Build JSON Schema for pipeline inputs.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
pipeline: The PipelineDefinition
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
JSON Schema dict for the pipeline's inputs
|
|
216
|
+
"""
|
|
217
|
+
properties = {}
|
|
218
|
+
required = []
|
|
219
|
+
|
|
220
|
+
for name, input_def in pipeline.inputs.items():
|
|
221
|
+
if isinstance(input_def, dict):
|
|
222
|
+
# Input is already a schema-like dict
|
|
223
|
+
prop = {}
|
|
224
|
+
if "type" in input_def:
|
|
225
|
+
prop["type"] = input_def["type"]
|
|
226
|
+
else:
|
|
227
|
+
prop["type"] = "string"
|
|
228
|
+
|
|
229
|
+
if "description" in input_def:
|
|
230
|
+
prop["description"] = input_def["description"]
|
|
231
|
+
|
|
232
|
+
if "default" in input_def:
|
|
233
|
+
prop["default"] = input_def["default"]
|
|
234
|
+
else:
|
|
235
|
+
# No default means required
|
|
236
|
+
required.append(name)
|
|
237
|
+
|
|
238
|
+
properties[name] = prop
|
|
239
|
+
else:
|
|
240
|
+
# Input is a simple default value
|
|
241
|
+
properties[name] = {
|
|
242
|
+
"type": "string",
|
|
243
|
+
"default": input_def,
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
schema: dict[str, Any] = {
|
|
247
|
+
"type": "object",
|
|
248
|
+
"properties": properties,
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
if required:
|
|
252
|
+
schema["required"] = required
|
|
253
|
+
|
|
254
|
+
return schema
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""Pipeline discovery tools."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def list_pipelines(
|
|
10
|
+
loader: Any,
|
|
11
|
+
project_path: str | None = None,
|
|
12
|
+
) -> dict[str, Any]:
|
|
13
|
+
"""
|
|
14
|
+
List available pipeline definitions.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
loader: WorkflowLoader instance
|
|
18
|
+
project_path: Optional project path for project-specific pipelines
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Dict with success status and list of pipeline info
|
|
22
|
+
"""
|
|
23
|
+
if not loader:
|
|
24
|
+
return {
|
|
25
|
+
"success": False,
|
|
26
|
+
"error": "No loader configured",
|
|
27
|
+
"pipelines": [],
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
try:
|
|
31
|
+
discovered = loader.discover_pipeline_workflows(project_path=project_path)
|
|
32
|
+
|
|
33
|
+
pipelines = []
|
|
34
|
+
for workflow in discovered:
|
|
35
|
+
pipeline_info = {
|
|
36
|
+
"name": workflow.name,
|
|
37
|
+
"description": workflow.definition.description,
|
|
38
|
+
"is_project": workflow.is_project,
|
|
39
|
+
"path": str(workflow.path),
|
|
40
|
+
"priority": workflow.priority,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
# Add step count if available
|
|
44
|
+
if hasattr(workflow.definition, "steps"):
|
|
45
|
+
pipeline_info["step_count"] = len(workflow.definition.steps)
|
|
46
|
+
|
|
47
|
+
pipelines.append(pipeline_info)
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
"success": True,
|
|
51
|
+
"pipelines": pipelines,
|
|
52
|
+
"count": len(pipelines),
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
except (FileNotFoundError, ValueError) as e:
|
|
56
|
+
return {
|
|
57
|
+
"success": False,
|
|
58
|
+
"error": str(e),
|
|
59
|
+
"pipelines": [],
|
|
60
|
+
}
|
|
61
|
+
except Exception:
|
|
62
|
+
logger.exception("Unexpected error discovering pipelines")
|
|
63
|
+
return {
|
|
64
|
+
"success": False,
|
|
65
|
+
"error": "Internal error during pipeline discovery",
|
|
66
|
+
"pipelines": [],
|
|
67
|
+
}
|