gobby 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +6 -0
- gobby/adapters/base.py +11 -2
- gobby/adapters/claude_code.py +2 -2
- gobby/adapters/codex_impl/adapter.py +38 -43
- gobby/adapters/copilot.py +324 -0
- gobby/adapters/cursor.py +373 -0
- gobby/adapters/gemini.py +2 -26
- gobby/adapters/windsurf.py +359 -0
- gobby/agents/definitions.py +162 -2
- gobby/agents/isolation.py +33 -1
- gobby/agents/pty_reader.py +192 -0
- gobby/agents/registry.py +10 -1
- gobby/agents/runner.py +24 -8
- gobby/agents/sandbox.py +8 -3
- gobby/agents/session.py +4 -0
- gobby/agents/spawn.py +9 -2
- gobby/agents/spawn_executor.py +49 -61
- gobby/agents/spawners/command_builder.py +4 -4
- gobby/app_context.py +5 -0
- gobby/cli/__init__.py +4 -0
- gobby/cli/install.py +259 -4
- gobby/cli/installers/__init__.py +12 -0
- gobby/cli/installers/copilot.py +242 -0
- gobby/cli/installers/cursor.py +244 -0
- gobby/cli/installers/shared.py +3 -0
- gobby/cli/installers/windsurf.py +242 -0
- gobby/cli/pipelines.py +639 -0
- gobby/cli/sessions.py +3 -1
- gobby/cli/skills.py +209 -0
- gobby/cli/tasks/crud.py +6 -5
- gobby/cli/tasks/search.py +1 -1
- gobby/cli/ui.py +116 -0
- gobby/cli/workflows.py +38 -17
- gobby/config/app.py +5 -0
- gobby/config/skills.py +23 -2
- gobby/hooks/broadcaster.py +9 -0
- gobby/hooks/event_handlers/_base.py +6 -1
- gobby/hooks/event_handlers/_session.py +44 -130
- gobby/hooks/events.py +48 -0
- gobby/hooks/hook_manager.py +25 -3
- gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
- gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
- gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
- gobby/llm/__init__.py +14 -1
- gobby/llm/claude.py +217 -1
- gobby/llm/service.py +149 -0
- gobby/mcp_proxy/instructions.py +9 -27
- gobby/mcp_proxy/models.py +1 -0
- gobby/mcp_proxy/registries.py +56 -9
- gobby/mcp_proxy/server.py +6 -2
- gobby/mcp_proxy/services/tool_filter.py +7 -0
- gobby/mcp_proxy/services/tool_proxy.py +19 -1
- gobby/mcp_proxy/stdio.py +37 -21
- gobby/mcp_proxy/tools/agents.py +7 -0
- gobby/mcp_proxy/tools/hub.py +30 -1
- gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
- gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
- gobby/mcp_proxy/tools/orchestration/review.py +17 -4
- gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
- gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
- gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
- gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
- gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
- gobby/mcp_proxy/tools/skills/__init__.py +184 -30
- gobby/mcp_proxy/tools/spawn_agent.py +229 -14
- gobby/mcp_proxy/tools/tasks/_context.py +8 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
- gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
- gobby/mcp_proxy/tools/tasks/_search.py +1 -1
- gobby/mcp_proxy/tools/workflows/__init__.py +9 -2
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +12 -1
- gobby/mcp_proxy/tools/workflows/_query.py +45 -26
- gobby/mcp_proxy/tools/workflows/_terminal.py +39 -3
- gobby/mcp_proxy/tools/worktrees.py +54 -15
- gobby/memory/context.py +5 -5
- gobby/runner.py +108 -6
- gobby/servers/http.py +7 -1
- gobby/servers/routes/__init__.py +2 -0
- gobby/servers/routes/admin.py +44 -0
- gobby/servers/routes/mcp/endpoints/execution.py +18 -25
- gobby/servers/routes/mcp/hooks.py +10 -1
- gobby/servers/routes/pipelines.py +227 -0
- gobby/servers/websocket.py +314 -1
- gobby/sessions/analyzer.py +87 -1
- gobby/sessions/manager.py +5 -5
- gobby/sessions/transcripts/__init__.py +3 -0
- gobby/sessions/transcripts/claude.py +5 -0
- gobby/sessions/transcripts/codex.py +5 -0
- gobby/sessions/transcripts/gemini.py +5 -0
- gobby/skills/hubs/__init__.py +25 -0
- gobby/skills/hubs/base.py +234 -0
- gobby/skills/hubs/claude_plugins.py +328 -0
- gobby/skills/hubs/clawdhub.py +289 -0
- gobby/skills/hubs/github_collection.py +465 -0
- gobby/skills/hubs/manager.py +263 -0
- gobby/skills/hubs/skillhub.py +342 -0
- gobby/storage/memories.py +4 -4
- gobby/storage/migrations.py +95 -3
- gobby/storage/pipelines.py +367 -0
- gobby/storage/sessions.py +23 -4
- gobby/storage/skills.py +1 -1
- gobby/storage/tasks/_aggregates.py +2 -2
- gobby/storage/tasks/_lifecycle.py +4 -4
- gobby/storage/tasks/_models.py +7 -1
- gobby/storage/tasks/_queries.py +3 -3
- gobby/sync/memories.py +4 -3
- gobby/tasks/commits.py +48 -17
- gobby/workflows/actions.py +75 -0
- gobby/workflows/context_actions.py +246 -5
- gobby/workflows/definitions.py +119 -1
- gobby/workflows/detection_helpers.py +23 -11
- gobby/workflows/enforcement/task_policy.py +18 -0
- gobby/workflows/engine.py +20 -1
- gobby/workflows/evaluator.py +8 -5
- gobby/workflows/lifecycle_evaluator.py +57 -26
- gobby/workflows/loader.py +567 -30
- gobby/workflows/lobster_compat.py +147 -0
- gobby/workflows/pipeline_executor.py +801 -0
- gobby/workflows/pipeline_state.py +172 -0
- gobby/workflows/pipeline_webhooks.py +206 -0
- gobby/workflows/premature_stop.py +5 -0
- gobby/worktrees/git.py +135 -20
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/RECORD +134 -106
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
gobby/cli/pipelines.py
ADDED
|
@@ -0,0 +1,639 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CLI commands for managing Gobby pipelines.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import click
|
|
12
|
+
import yaml
|
|
13
|
+
|
|
14
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
15
|
+
from gobby.workflows.lobster_compat import LobsterImporter
|
|
16
|
+
from gobby.workflows.pipeline_state import ApprovalRequired
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_workflow_loader() -> WorkflowLoader:
|
|
22
|
+
"""Get workflow loader instance."""
|
|
23
|
+
return WorkflowLoader()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_project_path() -> Path | None:
|
|
27
|
+
"""Get current project path if in a gobby project."""
|
|
28
|
+
cwd = Path.cwd()
|
|
29
|
+
if (cwd / ".gobby").exists():
|
|
30
|
+
return cwd
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _get_project_id() -> str:
|
|
35
|
+
"""Get project ID from current project if available."""
|
|
36
|
+
project_path = get_project_path()
|
|
37
|
+
if not project_path:
|
|
38
|
+
return ""
|
|
39
|
+
project_json = project_path / ".gobby" / "project.json"
|
|
40
|
+
if not project_json.exists():
|
|
41
|
+
return ""
|
|
42
|
+
try:
|
|
43
|
+
with open(project_json) as f:
|
|
44
|
+
project_data = json.load(f)
|
|
45
|
+
project_id = project_data.get("id", "")
|
|
46
|
+
return str(project_id) if project_id else ""
|
|
47
|
+
except Exception:
|
|
48
|
+
return ""
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_pipeline_executor() -> Any:
|
|
52
|
+
"""Get pipeline executor instance.
|
|
53
|
+
|
|
54
|
+
Returns a mock executor for CLI use. In production, this would be
|
|
55
|
+
connected to the daemon via HTTP API.
|
|
56
|
+
"""
|
|
57
|
+
# For CLI, we create a lightweight executor
|
|
58
|
+
# The actual execution happens through the daemon
|
|
59
|
+
from gobby.storage.database import LocalDatabase
|
|
60
|
+
from gobby.storage.pipelines import LocalPipelineExecutionManager
|
|
61
|
+
from gobby.workflows.pipeline_executor import PipelineExecutor
|
|
62
|
+
|
|
63
|
+
db = LocalDatabase()
|
|
64
|
+
|
|
65
|
+
project_id = _get_project_id()
|
|
66
|
+
execution_manager = LocalPipelineExecutionManager(db, project_id)
|
|
67
|
+
|
|
68
|
+
return PipelineExecutor(
|
|
69
|
+
db=db,
|
|
70
|
+
execution_manager=execution_manager,
|
|
71
|
+
llm_service=None, # Not needed for exec steps
|
|
72
|
+
loader=get_workflow_loader(),
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def parse_input(input_str: str) -> tuple[str, str]:
|
|
77
|
+
"""Parse a key=value input string."""
|
|
78
|
+
if "=" not in input_str:
|
|
79
|
+
raise click.BadParameter(f"Input must be in 'key=value' format: {input_str}")
|
|
80
|
+
key, value = input_str.split("=", 1)
|
|
81
|
+
return key.strip(), value.strip()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@click.group()
|
|
85
|
+
def pipelines() -> None:
|
|
86
|
+
"""Manage Gobby pipelines."""
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@pipelines.command("list")
|
|
91
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
92
|
+
@click.pass_context
|
|
93
|
+
def list_pipelines(ctx: click.Context, json_format: bool) -> None:
|
|
94
|
+
"""List available pipeline definitions."""
|
|
95
|
+
loader = get_workflow_loader()
|
|
96
|
+
project_path = get_project_path()
|
|
97
|
+
|
|
98
|
+
discovered = loader.discover_pipeline_workflows(
|
|
99
|
+
project_path=str(project_path) if project_path else None
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
if json_format:
|
|
103
|
+
pipeline_list = []
|
|
104
|
+
for wf in discovered:
|
|
105
|
+
pipeline_list.append(
|
|
106
|
+
{
|
|
107
|
+
"name": wf.name,
|
|
108
|
+
"description": wf.definition.description,
|
|
109
|
+
"is_project": wf.is_project,
|
|
110
|
+
"path": str(wf.path),
|
|
111
|
+
"step_count": len(wf.definition.steps),
|
|
112
|
+
}
|
|
113
|
+
)
|
|
114
|
+
click.echo(json.dumps({"pipelines": pipeline_list, "count": len(pipeline_list)}, indent=2))
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
if not discovered:
|
|
118
|
+
click.echo("No pipelines found.")
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
click.echo(f"Found {len(discovered)} pipeline(s):\n")
|
|
122
|
+
for wf in discovered:
|
|
123
|
+
source_tag = "[project]" if wf.is_project else ""
|
|
124
|
+
step_count = len(wf.definition.steps)
|
|
125
|
+
click.echo(f" {wf.name} ({step_count} steps) {source_tag}")
|
|
126
|
+
if wf.definition.description:
|
|
127
|
+
click.echo(f" {wf.definition.description[:80]}")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@pipelines.command("show")
|
|
131
|
+
@click.argument("name")
|
|
132
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
133
|
+
@click.pass_context
|
|
134
|
+
def show_pipeline(ctx: click.Context, name: str, json_format: bool) -> None:
|
|
135
|
+
"""Show pipeline definition details."""
|
|
136
|
+
loader = get_workflow_loader()
|
|
137
|
+
project_path = get_project_path()
|
|
138
|
+
|
|
139
|
+
pipeline = loader.load_pipeline(name, project_path=project_path)
|
|
140
|
+
if not pipeline:
|
|
141
|
+
click.echo(f"Pipeline '{name}' not found.", err=True)
|
|
142
|
+
raise SystemExit(1)
|
|
143
|
+
|
|
144
|
+
if json_format:
|
|
145
|
+
pipeline_dict = {
|
|
146
|
+
"name": pipeline.name,
|
|
147
|
+
"description": pipeline.description,
|
|
148
|
+
"steps": [
|
|
149
|
+
{
|
|
150
|
+
"id": step.id,
|
|
151
|
+
"exec": step.exec,
|
|
152
|
+
"prompt": step.prompt,
|
|
153
|
+
"invoke_pipeline": step.invoke_pipeline,
|
|
154
|
+
"condition": step.condition,
|
|
155
|
+
}
|
|
156
|
+
for step in pipeline.steps
|
|
157
|
+
],
|
|
158
|
+
"inputs": pipeline.inputs,
|
|
159
|
+
"outputs": pipeline.outputs,
|
|
160
|
+
}
|
|
161
|
+
click.echo(json.dumps(pipeline_dict, indent=2, default=str))
|
|
162
|
+
return
|
|
163
|
+
|
|
164
|
+
click.echo(f"Pipeline: {pipeline.name}")
|
|
165
|
+
if pipeline.description:
|
|
166
|
+
click.echo(f"Description: {pipeline.description}")
|
|
167
|
+
|
|
168
|
+
if pipeline.inputs:
|
|
169
|
+
click.echo("\nInputs:")
|
|
170
|
+
for input_name, input_def in pipeline.inputs.items():
|
|
171
|
+
required = input_def.get("required", False)
|
|
172
|
+
req_tag = " (required)" if required else ""
|
|
173
|
+
click.echo(f" - {input_name}{req_tag}")
|
|
174
|
+
if input_def.get("description"):
|
|
175
|
+
click.echo(f" {input_def['description']}")
|
|
176
|
+
|
|
177
|
+
click.echo(f"\nSteps ({len(pipeline.steps)}):")
|
|
178
|
+
for step in pipeline.steps:
|
|
179
|
+
step_type = "exec" if step.exec else "prompt" if step.prompt else "pipeline"
|
|
180
|
+
click.echo(f" - {step.id} ({step_type})")
|
|
181
|
+
if step.exec:
|
|
182
|
+
cmd_preview = step.exec[:60] + "..." if len(step.exec) > 60 else step.exec
|
|
183
|
+
click.echo(f" {cmd_preview}")
|
|
184
|
+
elif step.prompt:
|
|
185
|
+
prompt_preview = step.prompt[:60] + "..." if len(step.prompt) > 60 else step.prompt
|
|
186
|
+
click.echo(f" {prompt_preview}")
|
|
187
|
+
elif step.invoke_pipeline:
|
|
188
|
+
click.echo(f" invoke: {step.invoke_pipeline}")
|
|
189
|
+
if step.condition:
|
|
190
|
+
click.echo(f" condition: {step.condition}")
|
|
191
|
+
|
|
192
|
+
if pipeline.outputs:
|
|
193
|
+
click.echo("\nOutputs:")
|
|
194
|
+
for output_name, output_expr in pipeline.outputs.items():
|
|
195
|
+
click.echo(f" - {output_name}: {output_expr}")
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
@pipelines.command("run")
|
|
199
|
+
@click.argument("name", required=False)
|
|
200
|
+
@click.option(
|
|
201
|
+
"-i",
|
|
202
|
+
"--input",
|
|
203
|
+
"inputs",
|
|
204
|
+
multiple=True,
|
|
205
|
+
help="Input values as key=value (can be repeated)",
|
|
206
|
+
)
|
|
207
|
+
@click.option(
|
|
208
|
+
"--lobster",
|
|
209
|
+
"lobster_path",
|
|
210
|
+
type=click.Path(exists=True),
|
|
211
|
+
help="Run a Lobster file directly without saving",
|
|
212
|
+
)
|
|
213
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
214
|
+
@click.pass_context
|
|
215
|
+
def run_pipeline(
|
|
216
|
+
ctx: click.Context,
|
|
217
|
+
name: str | None,
|
|
218
|
+
inputs: tuple[str, ...],
|
|
219
|
+
lobster_path: str | None,
|
|
220
|
+
json_format: bool,
|
|
221
|
+
) -> None:
|
|
222
|
+
"""Run a pipeline by name or Lobster file.
|
|
223
|
+
|
|
224
|
+
Examples:
|
|
225
|
+
|
|
226
|
+
gobby pipelines run deploy
|
|
227
|
+
|
|
228
|
+
gobby pipelines run deploy -i env=prod -i version=1.0
|
|
229
|
+
|
|
230
|
+
gobby pipelines run --lobster ci.lobster
|
|
231
|
+
"""
|
|
232
|
+
# Handle --lobster flag: import and run directly without saving
|
|
233
|
+
pipeline: Any = None # Will be PipelineDefinition after loading
|
|
234
|
+
if lobster_path:
|
|
235
|
+
importer = LobsterImporter()
|
|
236
|
+
try:
|
|
237
|
+
pipeline = importer.import_file(lobster_path)
|
|
238
|
+
except FileNotFoundError:
|
|
239
|
+
click.echo(f"File not found: {lobster_path}", err=True)
|
|
240
|
+
raise SystemExit(1) from None
|
|
241
|
+
except Exception as e:
|
|
242
|
+
click.echo(f"Failed to import Lobster file: {e}", err=True)
|
|
243
|
+
raise SystemExit(1) from None
|
|
244
|
+
else:
|
|
245
|
+
# Standard mode: load by name
|
|
246
|
+
if not name:
|
|
247
|
+
click.echo("Pipeline name is required (or use --lobster).", err=True)
|
|
248
|
+
raise SystemExit(1)
|
|
249
|
+
|
|
250
|
+
loader = get_workflow_loader()
|
|
251
|
+
project_path = get_project_path()
|
|
252
|
+
pipeline = loader.load_pipeline(name, project_path=project_path)
|
|
253
|
+
if not pipeline:
|
|
254
|
+
click.echo(f"Pipeline '{name}' not found.", err=True)
|
|
255
|
+
raise SystemExit(1)
|
|
256
|
+
|
|
257
|
+
# Parse inputs
|
|
258
|
+
input_dict: dict[str, str] = {}
|
|
259
|
+
for input_str in inputs:
|
|
260
|
+
try:
|
|
261
|
+
key, value = parse_input(input_str)
|
|
262
|
+
input_dict[key] = value
|
|
263
|
+
except click.BadParameter as e:
|
|
264
|
+
click.echo(str(e), err=True)
|
|
265
|
+
raise SystemExit(1) from None
|
|
266
|
+
|
|
267
|
+
# Get executor and run
|
|
268
|
+
executor = get_pipeline_executor()
|
|
269
|
+
|
|
270
|
+
project_id = _get_project_id()
|
|
271
|
+
|
|
272
|
+
try:
|
|
273
|
+
# Run the pipeline
|
|
274
|
+
execution = asyncio.run(
|
|
275
|
+
executor.execute(
|
|
276
|
+
pipeline=pipeline,
|
|
277
|
+
inputs=input_dict,
|
|
278
|
+
project_id=project_id,
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Output result
|
|
283
|
+
if json_format:
|
|
284
|
+
result = {
|
|
285
|
+
"execution_id": execution.id,
|
|
286
|
+
"status": execution.status.value,
|
|
287
|
+
"pipeline_name": execution.pipeline_name,
|
|
288
|
+
}
|
|
289
|
+
if execution.outputs_json:
|
|
290
|
+
try:
|
|
291
|
+
result["outputs"] = json.loads(execution.outputs_json)
|
|
292
|
+
except json.JSONDecodeError:
|
|
293
|
+
result["outputs"] = execution.outputs_json
|
|
294
|
+
click.echo(json.dumps(result, indent=2))
|
|
295
|
+
else:
|
|
296
|
+
display_name = name or pipeline.name or "pipeline"
|
|
297
|
+
click.echo(f"✓ Pipeline '{display_name}' completed")
|
|
298
|
+
click.echo(f" Execution ID: {execution.id}")
|
|
299
|
+
click.echo(f" Status: {execution.status.value}")
|
|
300
|
+
|
|
301
|
+
except ApprovalRequired as e:
|
|
302
|
+
# Pipeline paused for approval
|
|
303
|
+
display_name = name or (pipeline.name if pipeline else None) or "pipeline"
|
|
304
|
+
if json_format:
|
|
305
|
+
result = {
|
|
306
|
+
"execution_id": e.execution_id,
|
|
307
|
+
"status": "waiting_approval",
|
|
308
|
+
"step_id": e.step_id,
|
|
309
|
+
"token": e.token,
|
|
310
|
+
"message": e.message,
|
|
311
|
+
}
|
|
312
|
+
click.echo(json.dumps(result, indent=2))
|
|
313
|
+
else:
|
|
314
|
+
click.echo(f"⏸ Pipeline '{display_name}' waiting for approval")
|
|
315
|
+
click.echo(f" Execution ID: {e.execution_id}")
|
|
316
|
+
click.echo(f" Step: {e.step_id}")
|
|
317
|
+
click.echo(f" Message: {e.message}")
|
|
318
|
+
click.echo(f"\nTo approve: gobby pipelines approve {e.token}")
|
|
319
|
+
click.echo(f"To reject: gobby pipelines reject {e.token}")
|
|
320
|
+
|
|
321
|
+
except Exception as e:
|
|
322
|
+
click.echo(f"Pipeline execution failed: {e}", err=True)
|
|
323
|
+
raise SystemExit(1) from None
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def get_execution_manager() -> Any:
|
|
327
|
+
"""Get pipeline execution manager instance."""
|
|
328
|
+
from gobby.storage.database import LocalDatabase
|
|
329
|
+
from gobby.storage.pipelines import LocalPipelineExecutionManager
|
|
330
|
+
|
|
331
|
+
db = LocalDatabase()
|
|
332
|
+
|
|
333
|
+
project_id = _get_project_id()
|
|
334
|
+
return LocalPipelineExecutionManager(db, project_id)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
@pipelines.command("status")
|
|
338
|
+
@click.argument("execution_id")
|
|
339
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
340
|
+
@click.pass_context
|
|
341
|
+
def status_pipeline(ctx: click.Context, execution_id: str, json_format: bool) -> None:
|
|
342
|
+
"""Show status of a pipeline execution.
|
|
343
|
+
|
|
344
|
+
Examples:
|
|
345
|
+
|
|
346
|
+
gobby pipelines status pe-abc123
|
|
347
|
+
|
|
348
|
+
gobby pipelines status pe-abc123 --json
|
|
349
|
+
"""
|
|
350
|
+
execution_manager = get_execution_manager()
|
|
351
|
+
|
|
352
|
+
# Fetch execution
|
|
353
|
+
execution = execution_manager.get_execution(execution_id)
|
|
354
|
+
if not execution:
|
|
355
|
+
click.echo(f"Execution '{execution_id}' not found.", err=True)
|
|
356
|
+
raise SystemExit(1)
|
|
357
|
+
|
|
358
|
+
# Fetch step executions
|
|
359
|
+
steps = execution_manager.get_steps_for_execution(execution_id)
|
|
360
|
+
|
|
361
|
+
if json_format:
|
|
362
|
+
exec_dict: dict[str, Any] = {
|
|
363
|
+
"id": execution.id,
|
|
364
|
+
"pipeline_name": execution.pipeline_name,
|
|
365
|
+
"status": execution.status.value,
|
|
366
|
+
"created_at": execution.created_at,
|
|
367
|
+
"updated_at": execution.updated_at,
|
|
368
|
+
}
|
|
369
|
+
if execution.inputs_json:
|
|
370
|
+
try:
|
|
371
|
+
exec_dict["inputs"] = json.loads(execution.inputs_json)
|
|
372
|
+
except json.JSONDecodeError:
|
|
373
|
+
exec_dict["inputs"] = execution.inputs_json
|
|
374
|
+
if execution.outputs_json:
|
|
375
|
+
try:
|
|
376
|
+
exec_dict["outputs"] = json.loads(execution.outputs_json)
|
|
377
|
+
except json.JSONDecodeError:
|
|
378
|
+
exec_dict["outputs"] = execution.outputs_json
|
|
379
|
+
result: dict[str, Any] = {
|
|
380
|
+
"execution": exec_dict,
|
|
381
|
+
"steps": [
|
|
382
|
+
{
|
|
383
|
+
"id": step.id,
|
|
384
|
+
"step_id": step.step_id,
|
|
385
|
+
"status": step.status.value,
|
|
386
|
+
}
|
|
387
|
+
for step in steps
|
|
388
|
+
],
|
|
389
|
+
}
|
|
390
|
+
click.echo(json.dumps(result, indent=2))
|
|
391
|
+
return
|
|
392
|
+
|
|
393
|
+
# Human-readable output
|
|
394
|
+
click.echo(f"Execution: {execution.id}")
|
|
395
|
+
click.echo(f"Pipeline: {execution.pipeline_name}")
|
|
396
|
+
click.echo(f"Status: {execution.status.value}")
|
|
397
|
+
click.echo(f"Created: {execution.created_at}")
|
|
398
|
+
click.echo(f"Updated: {execution.updated_at}")
|
|
399
|
+
|
|
400
|
+
if steps:
|
|
401
|
+
click.echo(f"\nSteps ({len(steps)}):")
|
|
402
|
+
for step in steps:
|
|
403
|
+
status_icon = (
|
|
404
|
+
"✓"
|
|
405
|
+
if step.status.value == "completed"
|
|
406
|
+
else "→"
|
|
407
|
+
if step.status.value == "running"
|
|
408
|
+
else "○"
|
|
409
|
+
)
|
|
410
|
+
click.echo(f" {status_icon} {step.step_id} ({step.status.value})")
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
@pipelines.command("approve")
|
|
414
|
+
@click.argument("token")
|
|
415
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
416
|
+
@click.pass_context
|
|
417
|
+
def approve_pipeline(ctx: click.Context, token: str, json_format: bool) -> None:
|
|
418
|
+
"""Approve a pipeline execution waiting for approval.
|
|
419
|
+
|
|
420
|
+
Examples:
|
|
421
|
+
|
|
422
|
+
gobby pipelines approve approval-token-xyz
|
|
423
|
+
|
|
424
|
+
gobby pipelines approve approval-token-xyz --json
|
|
425
|
+
"""
|
|
426
|
+
executor = get_pipeline_executor()
|
|
427
|
+
|
|
428
|
+
try:
|
|
429
|
+
execution = asyncio.run(executor.approve(token, approved_by=None))
|
|
430
|
+
|
|
431
|
+
if json_format:
|
|
432
|
+
result = {
|
|
433
|
+
"execution_id": execution.id,
|
|
434
|
+
"pipeline_name": execution.pipeline_name,
|
|
435
|
+
"status": execution.status.value,
|
|
436
|
+
}
|
|
437
|
+
click.echo(json.dumps(result, indent=2))
|
|
438
|
+
else:
|
|
439
|
+
click.echo("✓ Pipeline approved")
|
|
440
|
+
click.echo(f" Execution ID: {execution.id}")
|
|
441
|
+
click.echo(f" Status: {execution.status.value}")
|
|
442
|
+
|
|
443
|
+
except ValueError as e:
|
|
444
|
+
click.echo(f"Invalid token: {e}", err=True)
|
|
445
|
+
raise SystemExit(1) from None
|
|
446
|
+
except Exception as e:
|
|
447
|
+
click.echo(f"Approval failed: {e}", err=True)
|
|
448
|
+
raise SystemExit(1) from None
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@pipelines.command("reject")
|
|
452
|
+
@click.argument("token")
|
|
453
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
454
|
+
@click.pass_context
|
|
455
|
+
def reject_pipeline(ctx: click.Context, token: str, json_format: bool) -> None:
|
|
456
|
+
"""Reject a pipeline execution waiting for approval.
|
|
457
|
+
|
|
458
|
+
Examples:
|
|
459
|
+
|
|
460
|
+
gobby pipelines reject approval-token-xyz
|
|
461
|
+
|
|
462
|
+
gobby pipelines reject approval-token-xyz --json
|
|
463
|
+
"""
|
|
464
|
+
executor = get_pipeline_executor()
|
|
465
|
+
|
|
466
|
+
try:
|
|
467
|
+
execution = asyncio.run(executor.reject(token, rejected_by=None))
|
|
468
|
+
|
|
469
|
+
if json_format:
|
|
470
|
+
result = {
|
|
471
|
+
"execution_id": execution.id,
|
|
472
|
+
"pipeline_name": execution.pipeline_name,
|
|
473
|
+
"status": execution.status.value,
|
|
474
|
+
}
|
|
475
|
+
click.echo(json.dumps(result, indent=2))
|
|
476
|
+
else:
|
|
477
|
+
click.echo("✗ Pipeline rejected")
|
|
478
|
+
click.echo(f" Execution ID: {execution.id}")
|
|
479
|
+
click.echo(f" Status: {execution.status.value}")
|
|
480
|
+
|
|
481
|
+
except ValueError as e:
|
|
482
|
+
click.echo(f"Invalid token: {e}", err=True)
|
|
483
|
+
raise SystemExit(1) from None
|
|
484
|
+
except Exception as e:
|
|
485
|
+
click.echo(f"Rejection failed: {e}", err=True)
|
|
486
|
+
raise SystemExit(1) from None
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
@pipelines.command("history")
|
|
490
|
+
@click.argument("name")
|
|
491
|
+
@click.option("--limit", default=20, help="Maximum number of executions to show")
|
|
492
|
+
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
493
|
+
@click.pass_context
|
|
494
|
+
def history_pipeline(ctx: click.Context, name: str, limit: int, json_format: bool) -> None:
|
|
495
|
+
"""Show execution history for a pipeline.
|
|
496
|
+
|
|
497
|
+
Examples:
|
|
498
|
+
|
|
499
|
+
gobby pipelines history deploy
|
|
500
|
+
|
|
501
|
+
gobby pipelines history deploy --limit 10
|
|
502
|
+
|
|
503
|
+
gobby pipelines history deploy --json
|
|
504
|
+
"""
|
|
505
|
+
execution_manager = get_execution_manager()
|
|
506
|
+
|
|
507
|
+
# List executions filtered by pipeline name
|
|
508
|
+
executions = execution_manager.list_executions(pipeline_name=name, limit=limit)
|
|
509
|
+
|
|
510
|
+
if json_format:
|
|
511
|
+
result = {
|
|
512
|
+
"pipeline_name": name,
|
|
513
|
+
"executions": [
|
|
514
|
+
{
|
|
515
|
+
"id": ex.id,
|
|
516
|
+
"status": ex.status.value,
|
|
517
|
+
"created_at": ex.created_at,
|
|
518
|
+
"updated_at": ex.updated_at,
|
|
519
|
+
}
|
|
520
|
+
for ex in executions
|
|
521
|
+
],
|
|
522
|
+
"count": len(executions),
|
|
523
|
+
}
|
|
524
|
+
click.echo(json.dumps(result, indent=2))
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
if not executions:
|
|
528
|
+
click.echo(f"No executions found for pipeline '{name}'.")
|
|
529
|
+
return
|
|
530
|
+
|
|
531
|
+
click.echo(f"Execution history for '{name}' ({len(executions)} executions):\n")
|
|
532
|
+
for ex in executions:
|
|
533
|
+
status_icon = (
|
|
534
|
+
"✓"
|
|
535
|
+
if ex.status.value == "completed"
|
|
536
|
+
else "✗"
|
|
537
|
+
if ex.status.value == "failed"
|
|
538
|
+
else "→"
|
|
539
|
+
if ex.status.value == "running"
|
|
540
|
+
else "○"
|
|
541
|
+
)
|
|
542
|
+
click.echo(f" {status_icon} {ex.id} ({ex.status.value}) - {ex.created_at}")
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
@pipelines.command("import")
|
|
546
|
+
@click.argument("path", type=click.Path(exists=True))
|
|
547
|
+
@click.option(
|
|
548
|
+
"-o",
|
|
549
|
+
"--output",
|
|
550
|
+
"output_path",
|
|
551
|
+
type=click.Path(),
|
|
552
|
+
help="Custom output path for converted pipeline",
|
|
553
|
+
)
|
|
554
|
+
@click.pass_context
|
|
555
|
+
def import_pipeline(ctx: click.Context, path: str, output_path: str | None) -> None:
|
|
556
|
+
"""Import a Lobster pipeline and convert to Gobby format.
|
|
557
|
+
|
|
558
|
+
Reads a .lobster file, converts it to Gobby's pipeline format,
|
|
559
|
+
and saves it to .gobby/workflows/ in the current project.
|
|
560
|
+
|
|
561
|
+
Examples:
|
|
562
|
+
|
|
563
|
+
gobby pipelines import ci.lobster
|
|
564
|
+
|
|
565
|
+
gobby pipelines import deploy.lobster -o custom/path.yaml
|
|
566
|
+
"""
|
|
567
|
+
# Get project path
|
|
568
|
+
project_path = get_project_path()
|
|
569
|
+
|
|
570
|
+
if not output_path and not project_path:
|
|
571
|
+
click.echo("Not in a Gobby project. Use --output to specify destination.", err=True)
|
|
572
|
+
raise SystemExit(1)
|
|
573
|
+
|
|
574
|
+
# Import the Lobster file
|
|
575
|
+
importer = LobsterImporter()
|
|
576
|
+
try:
|
|
577
|
+
pipeline = importer.import_file(path)
|
|
578
|
+
except FileNotFoundError:
|
|
579
|
+
click.echo(f"File not found: {path}", err=True)
|
|
580
|
+
raise SystemExit(1) from None
|
|
581
|
+
except Exception as e:
|
|
582
|
+
click.echo(f"Failed to import: {e}", err=True)
|
|
583
|
+
raise SystemExit(1) from None
|
|
584
|
+
|
|
585
|
+
# Determine output path
|
|
586
|
+
if output_path:
|
|
587
|
+
dest_path = Path(output_path)
|
|
588
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
589
|
+
else:
|
|
590
|
+
# Save to project workflows directory
|
|
591
|
+
workflows_dir = project_path / ".gobby" / "workflows" # type: ignore
|
|
592
|
+
workflows_dir.mkdir(parents=True, exist_ok=True)
|
|
593
|
+
dest_path = workflows_dir / f"{pipeline.name}.yaml"
|
|
594
|
+
|
|
595
|
+
# Convert pipeline to dict for YAML serialization
|
|
596
|
+
pipeline_dict: dict[str, Any] = {
|
|
597
|
+
"name": pipeline.name,
|
|
598
|
+
"type": "pipeline",
|
|
599
|
+
"version": pipeline.version,
|
|
600
|
+
}
|
|
601
|
+
if pipeline.description:
|
|
602
|
+
pipeline_dict["description"] = pipeline.description
|
|
603
|
+
if pipeline.inputs:
|
|
604
|
+
pipeline_dict["inputs"] = pipeline.inputs
|
|
605
|
+
if pipeline.outputs:
|
|
606
|
+
pipeline_dict["outputs"] = pipeline.outputs
|
|
607
|
+
|
|
608
|
+
# Convert steps
|
|
609
|
+
steps = []
|
|
610
|
+
for step in pipeline.steps:
|
|
611
|
+
step_dict: dict[str, Any] = {"id": step.id}
|
|
612
|
+
if step.exec:
|
|
613
|
+
step_dict["exec"] = step.exec
|
|
614
|
+
if step.prompt:
|
|
615
|
+
step_dict["prompt"] = step.prompt
|
|
616
|
+
if step.invoke_pipeline:
|
|
617
|
+
step_dict["invoke_pipeline"] = step.invoke_pipeline
|
|
618
|
+
if step.condition:
|
|
619
|
+
step_dict["condition"] = step.condition
|
|
620
|
+
if step.input:
|
|
621
|
+
step_dict["input"] = step.input
|
|
622
|
+
if step.approval:
|
|
623
|
+
step_dict["approval"] = {
|
|
624
|
+
"required": step.approval.required,
|
|
625
|
+
}
|
|
626
|
+
if step.approval.message:
|
|
627
|
+
step_dict["approval"]["message"] = step.approval.message
|
|
628
|
+
if step.approval.timeout_seconds:
|
|
629
|
+
step_dict["approval"]["timeout_seconds"] = step.approval.timeout_seconds
|
|
630
|
+
if step.tools:
|
|
631
|
+
step_dict["tools"] = step.tools
|
|
632
|
+
steps.append(step_dict)
|
|
633
|
+
pipeline_dict["steps"] = steps
|
|
634
|
+
|
|
635
|
+
# Write YAML file
|
|
636
|
+
dest_path.write_text(yaml.dump(pipeline_dict, default_flow_style=False, sort_keys=False))
|
|
637
|
+
|
|
638
|
+
click.echo(f"✓ Imported '{pipeline.name}' from Lobster format")
|
|
639
|
+
click.echo(f" Saved to: {dest_path}")
|
gobby/cli/sessions.py
CHANGED
|
@@ -58,7 +58,9 @@ def sessions() -> None:
|
|
|
58
58
|
@sessions.command("list")
|
|
59
59
|
@click.option("--project", "-p", "project_ref", help="Filter by project (name or UUID)")
|
|
60
60
|
@click.option("--status", "-s", help="Filter by status (active, completed, handoff_ready)")
|
|
61
|
-
@click.option(
|
|
61
|
+
@click.option(
|
|
62
|
+
"--source", help="Filter by source (claude, gemini, codex, cursor, windsurf, copilot)"
|
|
63
|
+
)
|
|
62
64
|
@click.option("--limit", "-n", default=20, help="Max sessions to show")
|
|
63
65
|
@click.option("--json", "json_format", is_flag=True, help="Output as JSON")
|
|
64
66
|
def list_sessions(
|