gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +6 -0
- gobby/adapters/base.py +11 -2
- gobby/adapters/claude_code.py +5 -28
- gobby/adapters/codex_impl/adapter.py +38 -43
- gobby/adapters/copilot.py +324 -0
- gobby/adapters/cursor.py +373 -0
- gobby/adapters/gemini.py +2 -26
- gobby/adapters/windsurf.py +359 -0
- gobby/agents/definitions.py +162 -2
- gobby/agents/isolation.py +33 -1
- gobby/agents/pty_reader.py +192 -0
- gobby/agents/registry.py +10 -1
- gobby/agents/runner.py +24 -8
- gobby/agents/sandbox.py +8 -3
- gobby/agents/session.py +4 -0
- gobby/agents/spawn.py +9 -2
- gobby/agents/spawn_executor.py +49 -61
- gobby/agents/spawners/command_builder.py +4 -4
- gobby/app_context.py +64 -0
- gobby/cli/__init__.py +4 -0
- gobby/cli/install.py +259 -4
- gobby/cli/installers/__init__.py +12 -0
- gobby/cli/installers/copilot.py +242 -0
- gobby/cli/installers/cursor.py +244 -0
- gobby/cli/installers/shared.py +3 -0
- gobby/cli/installers/windsurf.py +242 -0
- gobby/cli/pipelines.py +639 -0
- gobby/cli/sessions.py +3 -1
- gobby/cli/skills.py +209 -0
- gobby/cli/tasks/crud.py +6 -5
- gobby/cli/tasks/search.py +1 -1
- gobby/cli/ui.py +116 -0
- gobby/cli/utils.py +5 -17
- gobby/cli/workflows.py +38 -17
- gobby/config/app.py +5 -0
- gobby/config/features.py +0 -20
- gobby/config/skills.py +23 -2
- gobby/config/tasks.py +4 -0
- gobby/hooks/broadcaster.py +9 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +92 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +487 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/events.py +48 -0
- gobby/hooks/hook_manager.py +27 -3
- gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
- gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
- gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
- gobby/llm/__init__.py +14 -1
- gobby/llm/claude.py +594 -43
- gobby/llm/service.py +149 -0
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/instructions.py +9 -27
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/models.py +1 -0
- gobby/mcp_proxy/registries.py +66 -5
- gobby/mcp_proxy/server.py +6 -2
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/services/tool_filter.py +7 -0
- gobby/mcp_proxy/services/tool_proxy.py +19 -1
- gobby/mcp_proxy/stdio.py +37 -21
- gobby/mcp_proxy/tools/agents.py +7 -0
- gobby/mcp_proxy/tools/artifacts.py +3 -3
- gobby/mcp_proxy/tools/hub.py +30 -1
- gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
- gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
- gobby/mcp_proxy/tools/orchestration/review.py +17 -4
- gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
- gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
- gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
- gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
- gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
- gobby/mcp_proxy/tools/skills/__init__.py +184 -30
- gobby/mcp_proxy/tools/spawn_agent.py +229 -14
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/tasks/_context.py +8 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
- gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
- gobby/mcp_proxy/tools/tasks/_search.py +1 -1
- gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
- gobby/mcp_proxy/tools/workflows/_query.py +226 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
- gobby/mcp_proxy/tools/worktrees.py +54 -15
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/context.py +5 -5
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +131 -16
- gobby/servers/http.py +193 -150
- gobby/servers/routes/__init__.py +2 -0
- gobby/servers/routes/admin.py +56 -0
- gobby/servers/routes/mcp/endpoints/execution.py +33 -32
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/servers/routes/mcp/hooks.py +10 -1
- gobby/servers/routes/pipelines.py +227 -0
- gobby/servers/websocket.py +314 -1
- gobby/sessions/analyzer.py +89 -3
- gobby/sessions/manager.py +5 -5
- gobby/sessions/transcripts/__init__.py +3 -0
- gobby/sessions/transcripts/claude.py +5 -0
- gobby/sessions/transcripts/codex.py +5 -0
- gobby/sessions/transcripts/gemini.py +5 -0
- gobby/skills/hubs/__init__.py +25 -0
- gobby/skills/hubs/base.py +234 -0
- gobby/skills/hubs/claude_plugins.py +328 -0
- gobby/skills/hubs/clawdhub.py +289 -0
- gobby/skills/hubs/github_collection.py +465 -0
- gobby/skills/hubs/manager.py +263 -0
- gobby/skills/hubs/skillhub.py +342 -0
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/memories.py +4 -4
- gobby/storage/migrations.py +118 -3
- gobby/storage/pipelines.py +367 -0
- gobby/storage/sessions.py +23 -4
- gobby/storage/skills.py +48 -8
- gobby/storage/tasks/_aggregates.py +2 -2
- gobby/storage/tasks/_lifecycle.py +4 -4
- gobby/storage/tasks/_models.py +7 -1
- gobby/storage/tasks/_queries.py +3 -3
- gobby/sync/memories.py +4 -3
- gobby/tasks/commits.py +48 -17
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +80 -0
- gobby/workflows/context_actions.py +265 -27
- gobby/workflows/definitions.py +119 -1
- gobby/workflows/detection_helpers.py +23 -11
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +96 -0
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/enforcement/task_policy.py +18 -0
- gobby/workflows/engine.py +26 -4
- gobby/workflows/evaluator.py +8 -5
- gobby/workflows/lifecycle_evaluator.py +59 -27
- gobby/workflows/loader.py +567 -30
- gobby/workflows/lobster_compat.py +147 -0
- gobby/workflows/pipeline_executor.py +801 -0
- gobby/workflows/pipeline_state.py +172 -0
- gobby/workflows/pipeline_webhooks.py +206 -0
- gobby/workflows/premature_stop.py +5 -0
- gobby/worktrees/git.py +135 -20
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
- gobby/hooks/event_handlers.py +0 -1008
- gobby/mcp_proxy/tools/workflows.py +0 -1023
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""Pipeline execution state models.
|
|
2
|
+
|
|
3
|
+
This module defines the runtime state models for pipeline executions,
|
|
4
|
+
including execution status tracking, step execution records, and the
|
|
5
|
+
ApprovalRequired exception for approval gates.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from enum import Enum
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ExecutionStatus(Enum):
|
|
16
|
+
"""Status values for pipeline executions."""
|
|
17
|
+
|
|
18
|
+
PENDING = "pending"
|
|
19
|
+
RUNNING = "running"
|
|
20
|
+
WAITING_APPROVAL = "waiting_approval"
|
|
21
|
+
COMPLETED = "completed"
|
|
22
|
+
FAILED = "failed"
|
|
23
|
+
CANCELLED = "cancelled"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class StepStatus(Enum):
|
|
27
|
+
"""Status values for individual step executions."""
|
|
28
|
+
|
|
29
|
+
PENDING = "pending"
|
|
30
|
+
RUNNING = "running"
|
|
31
|
+
WAITING_APPROVAL = "waiting_approval"
|
|
32
|
+
COMPLETED = "completed"
|
|
33
|
+
FAILED = "failed"
|
|
34
|
+
SKIPPED = "skipped"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class PipelineExecution:
|
|
39
|
+
"""Execution state for a pipeline.
|
|
40
|
+
|
|
41
|
+
Tracks the overall execution of a pipeline including inputs, outputs,
|
|
42
|
+
status, and optional resume token for approval gates.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
id: str # Format: pe-{12hex}
|
|
46
|
+
pipeline_name: str
|
|
47
|
+
project_id: str
|
|
48
|
+
status: ExecutionStatus
|
|
49
|
+
created_at: str
|
|
50
|
+
updated_at: str
|
|
51
|
+
inputs_json: str | None = None
|
|
52
|
+
outputs_json: str | None = None
|
|
53
|
+
completed_at: str | None = None
|
|
54
|
+
resume_token: str | None = None # Token for resuming after approval
|
|
55
|
+
session_id: str | None = None # Session that triggered execution
|
|
56
|
+
parent_execution_id: str | None = None # For nested pipeline invocations
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def from_row(cls, row: Any) -> PipelineExecution:
|
|
60
|
+
"""Create PipelineExecution from database row."""
|
|
61
|
+
return cls(
|
|
62
|
+
id=row["id"],
|
|
63
|
+
pipeline_name=row["pipeline_name"],
|
|
64
|
+
project_id=row["project_id"],
|
|
65
|
+
status=ExecutionStatus(row["status"]),
|
|
66
|
+
inputs_json=row["inputs_json"],
|
|
67
|
+
outputs_json=row["outputs_json"],
|
|
68
|
+
created_at=row["created_at"],
|
|
69
|
+
updated_at=row["updated_at"],
|
|
70
|
+
completed_at=row["completed_at"],
|
|
71
|
+
resume_token=row["resume_token"],
|
|
72
|
+
session_id=row["session_id"],
|
|
73
|
+
parent_execution_id=row["parent_execution_id"],
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
def to_dict(self) -> dict[str, Any]:
|
|
77
|
+
"""Convert to dictionary for API responses."""
|
|
78
|
+
return {
|
|
79
|
+
"id": self.id,
|
|
80
|
+
"pipeline_name": self.pipeline_name,
|
|
81
|
+
"project_id": self.project_id,
|
|
82
|
+
"status": self.status.value,
|
|
83
|
+
"inputs_json": self.inputs_json,
|
|
84
|
+
"outputs_json": self.outputs_json,
|
|
85
|
+
"created_at": self.created_at,
|
|
86
|
+
"updated_at": self.updated_at,
|
|
87
|
+
"completed_at": self.completed_at,
|
|
88
|
+
"resume_token": self.resume_token,
|
|
89
|
+
"session_id": self.session_id,
|
|
90
|
+
"parent_execution_id": self.parent_execution_id,
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class StepExecution:
|
|
96
|
+
"""Execution state for a single pipeline step.
|
|
97
|
+
|
|
98
|
+
Tracks individual step execution including input/output, timing,
|
|
99
|
+
errors, and approval state.
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
id: int # Auto-increment integer
|
|
103
|
+
execution_id: str # Parent pipeline execution ID
|
|
104
|
+
step_id: str # Step ID from pipeline definition
|
|
105
|
+
status: StepStatus
|
|
106
|
+
started_at: str | None = None
|
|
107
|
+
completed_at: str | None = None
|
|
108
|
+
input_json: str | None = None
|
|
109
|
+
output_json: str | None = None
|
|
110
|
+
error: str | None = None
|
|
111
|
+
approval_token: str | None = None # Unique token for this step's approval
|
|
112
|
+
approved_by: str | None = None # Who approved (email, user ID, etc.)
|
|
113
|
+
approved_at: str | None = None
|
|
114
|
+
|
|
115
|
+
@classmethod
|
|
116
|
+
def from_row(cls, row: Any) -> StepExecution:
|
|
117
|
+
"""Create StepExecution from database row."""
|
|
118
|
+
return cls(
|
|
119
|
+
id=row["id"],
|
|
120
|
+
execution_id=row["execution_id"],
|
|
121
|
+
step_id=row["step_id"],
|
|
122
|
+
status=StepStatus(row["status"]),
|
|
123
|
+
started_at=row["started_at"],
|
|
124
|
+
completed_at=row["completed_at"],
|
|
125
|
+
input_json=row["input_json"],
|
|
126
|
+
output_json=row["output_json"],
|
|
127
|
+
error=row["error"],
|
|
128
|
+
approval_token=row["approval_token"],
|
|
129
|
+
approved_by=row["approved_by"],
|
|
130
|
+
approved_at=row["approved_at"],
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
def to_dict(self) -> dict[str, Any]:
|
|
134
|
+
"""Convert to dictionary for API responses."""
|
|
135
|
+
return {
|
|
136
|
+
"id": self.id,
|
|
137
|
+
"execution_id": self.execution_id,
|
|
138
|
+
"step_id": self.step_id,
|
|
139
|
+
"status": self.status.value,
|
|
140
|
+
"started_at": self.started_at,
|
|
141
|
+
"completed_at": self.completed_at,
|
|
142
|
+
"input_json": self.input_json,
|
|
143
|
+
"output_json": self.output_json,
|
|
144
|
+
"error": self.error,
|
|
145
|
+
"approval_token": self.approval_token,
|
|
146
|
+
"approved_by": self.approved_by,
|
|
147
|
+
"approved_at": self.approved_at,
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class ApprovalRequired(Exception):
|
|
152
|
+
"""Exception raised when a pipeline step requires approval.
|
|
153
|
+
|
|
154
|
+
This exception is raised during pipeline execution when a step
|
|
155
|
+
has an approval gate. The execution pauses and waits for external
|
|
156
|
+
approval via the resume token.
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
def __init__(
|
|
160
|
+
self,
|
|
161
|
+
execution_id: str,
|
|
162
|
+
step_id: str,
|
|
163
|
+
token: str,
|
|
164
|
+
message: str,
|
|
165
|
+
) -> None:
|
|
166
|
+
self.execution_id = execution_id
|
|
167
|
+
self.step_id = step_id
|
|
168
|
+
self.token = token
|
|
169
|
+
self.message = message
|
|
170
|
+
super().__init__(
|
|
171
|
+
f"Approval required for step '{step_id}' in execution '{execution_id}': {message}"
|
|
172
|
+
)
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""Pipeline webhook notifier for sending HTTP notifications.
|
|
2
|
+
|
|
3
|
+
This module provides the WebhookNotifier class for sending webhook
|
|
4
|
+
notifications during pipeline execution events (approval pending,
|
|
5
|
+
completion, failure).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
from typing import TYPE_CHECKING, Any
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from gobby.workflows.definitions import PipelineDefinition
|
|
20
|
+
from gobby.workflows.pipeline_state import PipelineExecution
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class WebhookNotifier:
|
|
26
|
+
"""Sends webhook notifications for pipeline execution events.
|
|
27
|
+
|
|
28
|
+
Handles approval pending, completion, and failure notifications
|
|
29
|
+
with environment variable expansion in headers.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, base_url: str = "http://localhost:7778"):
|
|
33
|
+
"""Initialize the webhook notifier.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
base_url: Base URL for generating approve/reject URLs.
|
|
37
|
+
Defaults to localhost Gobby daemon URL.
|
|
38
|
+
"""
|
|
39
|
+
self.base_url = base_url.rstrip("/")
|
|
40
|
+
|
|
41
|
+
async def notify_approval_pending(
|
|
42
|
+
self,
|
|
43
|
+
execution: PipelineExecution,
|
|
44
|
+
pipeline: PipelineDefinition,
|
|
45
|
+
step_id: str,
|
|
46
|
+
token: str,
|
|
47
|
+
message: str,
|
|
48
|
+
) -> None:
|
|
49
|
+
"""Send notification when approval is required.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
execution: The pipeline execution state
|
|
53
|
+
pipeline: The pipeline definition (contains webhook config)
|
|
54
|
+
step_id: The step ID requiring approval
|
|
55
|
+
token: The approval token for approve/reject URLs
|
|
56
|
+
message: The approval message to display
|
|
57
|
+
"""
|
|
58
|
+
if not pipeline.webhooks or not pipeline.webhooks.on_approval_pending:
|
|
59
|
+
logger.debug(f"No on_approval_pending webhook configured for {pipeline.name}")
|
|
60
|
+
return
|
|
61
|
+
|
|
62
|
+
endpoint = pipeline.webhooks.on_approval_pending
|
|
63
|
+
payload = {
|
|
64
|
+
"execution_id": execution.id,
|
|
65
|
+
"pipeline_name": execution.pipeline_name,
|
|
66
|
+
"step_id": step_id,
|
|
67
|
+
"token": token,
|
|
68
|
+
"message": message,
|
|
69
|
+
"approve_url": f"{self.base_url}/api/pipelines/approve/{token}",
|
|
70
|
+
"reject_url": f"{self.base_url}/api/pipelines/reject/{token}",
|
|
71
|
+
"status": execution.status.value,
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
await self._send_webhook(endpoint.url, endpoint.method, endpoint.headers, payload)
|
|
75
|
+
|
|
76
|
+
async def notify_complete(
|
|
77
|
+
self,
|
|
78
|
+
execution: PipelineExecution,
|
|
79
|
+
pipeline: PipelineDefinition,
|
|
80
|
+
) -> None:
|
|
81
|
+
"""Send notification when pipeline completes successfully.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
execution: The pipeline execution state
|
|
85
|
+
pipeline: The pipeline definition (contains webhook config)
|
|
86
|
+
"""
|
|
87
|
+
if not pipeline.webhooks or not pipeline.webhooks.on_complete:
|
|
88
|
+
logger.debug(f"No on_complete webhook configured for {pipeline.name}")
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
endpoint = pipeline.webhooks.on_complete
|
|
92
|
+
|
|
93
|
+
# Parse outputs JSON if present
|
|
94
|
+
outputs = None
|
|
95
|
+
if execution.outputs_json:
|
|
96
|
+
try:
|
|
97
|
+
outputs = json.loads(execution.outputs_json)
|
|
98
|
+
except json.JSONDecodeError:
|
|
99
|
+
outputs = execution.outputs_json
|
|
100
|
+
|
|
101
|
+
payload = {
|
|
102
|
+
"execution_id": execution.id,
|
|
103
|
+
"pipeline_name": execution.pipeline_name,
|
|
104
|
+
"status": execution.status.value,
|
|
105
|
+
"outputs": outputs,
|
|
106
|
+
"completed_at": execution.completed_at,
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
await self._send_webhook(endpoint.url, endpoint.method, endpoint.headers, payload)
|
|
110
|
+
|
|
111
|
+
async def notify_failure(
|
|
112
|
+
self,
|
|
113
|
+
execution: PipelineExecution,
|
|
114
|
+
pipeline: PipelineDefinition,
|
|
115
|
+
error: str,
|
|
116
|
+
) -> None:
|
|
117
|
+
"""Send notification when pipeline fails.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
execution: The pipeline execution state
|
|
121
|
+
pipeline: The pipeline definition (contains webhook config)
|
|
122
|
+
error: The error message describing the failure
|
|
123
|
+
"""
|
|
124
|
+
if not pipeline.webhooks or not pipeline.webhooks.on_failure:
|
|
125
|
+
logger.debug(f"No on_failure webhook configured for {pipeline.name}")
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
endpoint = pipeline.webhooks.on_failure
|
|
129
|
+
payload = {
|
|
130
|
+
"execution_id": execution.id,
|
|
131
|
+
"pipeline_name": execution.pipeline_name,
|
|
132
|
+
"status": execution.status.value,
|
|
133
|
+
"error": error,
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
await self._send_webhook(endpoint.url, endpoint.method, endpoint.headers, payload)
|
|
137
|
+
|
|
138
|
+
async def _send_webhook(
|
|
139
|
+
self,
|
|
140
|
+
url: str,
|
|
141
|
+
method: str,
|
|
142
|
+
headers: dict[str, str],
|
|
143
|
+
payload: dict[str, Any],
|
|
144
|
+
) -> None:
|
|
145
|
+
"""Send HTTP webhook request.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
url: Target URL
|
|
149
|
+
method: HTTP method (POST, PUT, etc.)
|
|
150
|
+
headers: Request headers (supports ${VAR} expansion)
|
|
151
|
+
payload: JSON payload to send
|
|
152
|
+
"""
|
|
153
|
+
# Expand environment variables in headers
|
|
154
|
+
expanded_headers = self._expand_env_vars(headers)
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
async with httpx.AsyncClient() as client:
|
|
158
|
+
if method.upper() == "POST":
|
|
159
|
+
response = await client.post(
|
|
160
|
+
url=url,
|
|
161
|
+
headers=expanded_headers,
|
|
162
|
+
json=payload,
|
|
163
|
+
timeout=30.0,
|
|
164
|
+
)
|
|
165
|
+
elif method.upper() == "PUT":
|
|
166
|
+
response = await client.put(
|
|
167
|
+
url=url,
|
|
168
|
+
headers=expanded_headers,
|
|
169
|
+
json=payload,
|
|
170
|
+
timeout=30.0,
|
|
171
|
+
)
|
|
172
|
+
else:
|
|
173
|
+
logger.warning(f"Unsupported webhook method: {method}")
|
|
174
|
+
return
|
|
175
|
+
|
|
176
|
+
if response.status_code >= 400:
|
|
177
|
+
logger.error(
|
|
178
|
+
f"Webhook request failed: {response.status_code} - {response.text}"
|
|
179
|
+
)
|
|
180
|
+
else:
|
|
181
|
+
logger.debug(f"Webhook sent successfully to {url}")
|
|
182
|
+
|
|
183
|
+
except Exception as e:
|
|
184
|
+
logger.error(f"Failed to send webhook to {url}: {e}")
|
|
185
|
+
|
|
186
|
+
def _expand_env_vars(self, headers: dict[str, str]) -> dict[str, str]:
|
|
187
|
+
"""Expand ${VAR} patterns in header values from environment.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
headers: Header dict with potential ${VAR} patterns
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
New dict with expanded values
|
|
194
|
+
"""
|
|
195
|
+
result = {}
|
|
196
|
+
pattern = re.compile(r"\$\{([^}]+)\}")
|
|
197
|
+
|
|
198
|
+
for key, value in headers.items():
|
|
199
|
+
|
|
200
|
+
def replacer(match: re.Match[str]) -> str:
|
|
201
|
+
var_name = match.group(1)
|
|
202
|
+
return os.environ.get(var_name, match.group(0))
|
|
203
|
+
|
|
204
|
+
result[key] = pattern.sub(replacer, value)
|
|
205
|
+
|
|
206
|
+
return result
|
|
@@ -12,6 +12,7 @@ from types import SimpleNamespace
|
|
|
12
12
|
from typing import TYPE_CHECKING, Any
|
|
13
13
|
|
|
14
14
|
from gobby.hooks.events import HookEvent, HookResponse
|
|
15
|
+
from gobby.workflows.definitions import WorkflowDefinition
|
|
15
16
|
|
|
16
17
|
if TYPE_CHECKING:
|
|
17
18
|
from .evaluator import ConditionEvaluator
|
|
@@ -68,6 +69,10 @@ async def check_premature_stop(
|
|
|
68
69
|
logger.warning(f"Workflow '{state.workflow_name}' not found for premature stop check")
|
|
69
70
|
return None
|
|
70
71
|
|
|
72
|
+
# Premature stop handling only applies to WorkflowDefinition, not PipelineDefinition
|
|
73
|
+
if not isinstance(workflow, WorkflowDefinition):
|
|
74
|
+
return None
|
|
75
|
+
|
|
71
76
|
# Check if workflow has exit_condition and on_premature_stop
|
|
72
77
|
if not workflow.exit_condition:
|
|
73
78
|
return None
|
gobby/worktrees/git.py
CHANGED
|
@@ -114,6 +114,7 @@ class WorktreeGitManager:
|
|
|
114
114
|
branch_name: str,
|
|
115
115
|
base_branch: str = "main",
|
|
116
116
|
create_branch: bool = True,
|
|
117
|
+
use_local: bool = False,
|
|
117
118
|
) -> GitOperationResult:
|
|
118
119
|
"""
|
|
119
120
|
Create a new git worktree.
|
|
@@ -123,6 +124,8 @@ class WorktreeGitManager:
|
|
|
123
124
|
branch_name: Name of the branch for the worktree
|
|
124
125
|
base_branch: Branch to base the new branch on (if create_branch=True)
|
|
125
126
|
create_branch: Whether to create a new branch or use existing
|
|
127
|
+
use_local: If True, create from local branch ref instead of origin/
|
|
128
|
+
This preserves unpushed commits in the worktree.
|
|
126
129
|
|
|
127
130
|
Returns:
|
|
128
131
|
GitOperationResult with success status and message
|
|
@@ -141,28 +144,52 @@ class WorktreeGitManager:
|
|
|
141
144
|
|
|
142
145
|
try:
|
|
143
146
|
if create_branch:
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
147
|
+
if use_local:
|
|
148
|
+
# Create worktree from local branch (preserves unpushed commits)
|
|
149
|
+
# Verify local branch exists
|
|
150
|
+
verify_result = self._run_git(["rev-parse", "--verify", base_branch], timeout=5)
|
|
151
|
+
if verify_result.returncode != 0:
|
|
152
|
+
return GitOperationResult(
|
|
153
|
+
success=False,
|
|
154
|
+
message=f"Local branch '{base_branch}' not found",
|
|
155
|
+
error=verify_result.stderr,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
# Create worktree with new branch based on local ref
|
|
159
|
+
result = self._run_git(
|
|
160
|
+
[
|
|
161
|
+
"worktree",
|
|
162
|
+
"add",
|
|
163
|
+
"-b",
|
|
164
|
+
branch_name,
|
|
165
|
+
str(worktree_path),
|
|
166
|
+
base_branch, # Local ref, not origin/
|
|
167
|
+
],
|
|
168
|
+
timeout=60,
|
|
152
169
|
)
|
|
170
|
+
else:
|
|
171
|
+
# Create worktree with new branch based on origin (original behavior)
|
|
172
|
+
# First, fetch to ensure we have latest refs
|
|
173
|
+
fetch_result = self._run_git(["fetch", "origin", base_branch], timeout=60)
|
|
174
|
+
if fetch_result.returncode != 0:
|
|
175
|
+
return GitOperationResult(
|
|
176
|
+
success=False,
|
|
177
|
+
message=f"Failed to fetch origin/{base_branch}: {fetch_result.stderr}",
|
|
178
|
+
error=fetch_result.stderr,
|
|
179
|
+
)
|
|
153
180
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
181
|
+
# Create worktree with new branch
|
|
182
|
+
result = self._run_git(
|
|
183
|
+
[
|
|
184
|
+
"worktree",
|
|
185
|
+
"add",
|
|
186
|
+
"-b",
|
|
187
|
+
branch_name,
|
|
188
|
+
str(worktree_path),
|
|
189
|
+
f"origin/{base_branch}",
|
|
190
|
+
],
|
|
191
|
+
timeout=60,
|
|
192
|
+
)
|
|
166
193
|
else:
|
|
167
194
|
# Use existing branch
|
|
168
195
|
result = self._run_git(
|
|
@@ -688,3 +715,91 @@ class WorktreeGitManager:
|
|
|
688
715
|
# Method 3: Fall back to "main"
|
|
689
716
|
logger.debug("Could not detect default branch, falling back to 'main'")
|
|
690
717
|
return "main"
|
|
718
|
+
|
|
719
|
+
def get_current_branch(self) -> str | None:
|
|
720
|
+
"""
|
|
721
|
+
Get the current branch of the repository.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
Branch name, or None if in detached HEAD state
|
|
725
|
+
"""
|
|
726
|
+
try:
|
|
727
|
+
result = self._run_git(
|
|
728
|
+
["branch", "--show-current"],
|
|
729
|
+
timeout=5,
|
|
730
|
+
)
|
|
731
|
+
if result.returncode == 0:
|
|
732
|
+
branch = result.stdout.strip()
|
|
733
|
+
return branch if branch else None
|
|
734
|
+
return None
|
|
735
|
+
except Exception:
|
|
736
|
+
return None
|
|
737
|
+
|
|
738
|
+
def has_unpushed_commits(self, branch: str | None = None) -> tuple[bool, int]:
|
|
739
|
+
"""
|
|
740
|
+
Check if the branch has commits not pushed to origin.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
branch: Branch to check (defaults to current branch)
|
|
744
|
+
|
|
745
|
+
Returns:
|
|
746
|
+
Tuple of (has_unpushed, count) where:
|
|
747
|
+
- has_unpushed: True if there are unpushed commits
|
|
748
|
+
- count: Number of unpushed commits (0 if none or error)
|
|
749
|
+
"""
|
|
750
|
+
if branch is None:
|
|
751
|
+
branch = self.get_current_branch()
|
|
752
|
+
if not branch:
|
|
753
|
+
return False, 0
|
|
754
|
+
|
|
755
|
+
try:
|
|
756
|
+
# Check if remote tracking branch exists
|
|
757
|
+
result = self._run_git(
|
|
758
|
+
["rev-parse", "--verify", f"origin/{branch}"],
|
|
759
|
+
timeout=5,
|
|
760
|
+
)
|
|
761
|
+
if result.returncode != 0:
|
|
762
|
+
# No remote tracking branch - all local commits are "unpushed"
|
|
763
|
+
# Count commits on the branch
|
|
764
|
+
count_result = self._run_git(
|
|
765
|
+
["rev-list", "--count", branch],
|
|
766
|
+
timeout=5,
|
|
767
|
+
)
|
|
768
|
+
if count_result.returncode == 0:
|
|
769
|
+
count = int(count_result.stdout.strip())
|
|
770
|
+
return count > 0, count
|
|
771
|
+
return True, 0
|
|
772
|
+
|
|
773
|
+
# Count commits ahead of origin
|
|
774
|
+
result = self._run_git(
|
|
775
|
+
["rev-list", "--count", f"origin/{branch}..{branch}"],
|
|
776
|
+
timeout=5,
|
|
777
|
+
)
|
|
778
|
+
if result.returncode == 0:
|
|
779
|
+
count = int(result.stdout.strip())
|
|
780
|
+
return count > 0, count
|
|
781
|
+
return False, 0
|
|
782
|
+
except Exception as e:
|
|
783
|
+
logger.warning(f"Error checking unpushed commits: {e}")
|
|
784
|
+
return False, 0
|
|
785
|
+
|
|
786
|
+
def get_local_commit(self, branch: str) -> str | None:
|
|
787
|
+
"""
|
|
788
|
+
Get the commit SHA of a local branch.
|
|
789
|
+
|
|
790
|
+
Args:
|
|
791
|
+
branch: Branch name
|
|
792
|
+
|
|
793
|
+
Returns:
|
|
794
|
+
Commit SHA, or None if branch doesn't exist
|
|
795
|
+
"""
|
|
796
|
+
try:
|
|
797
|
+
result = self._run_git(
|
|
798
|
+
["rev-parse", branch],
|
|
799
|
+
timeout=5,
|
|
800
|
+
)
|
|
801
|
+
if result.returncode == 0:
|
|
802
|
+
return result.stdout.strip()
|
|
803
|
+
return None
|
|
804
|
+
except Exception:
|
|
805
|
+
return None
|