gobby 0.2.6__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +2 -1
- gobby/adapters/codex_impl/__init__.py +28 -0
- gobby/adapters/codex_impl/adapter.py +722 -0
- gobby/adapters/codex_impl/client.py +679 -0
- gobby/adapters/codex_impl/protocol.py +20 -0
- gobby/adapters/codex_impl/types.py +68 -0
- gobby/agents/definitions.py +11 -1
- gobby/agents/isolation.py +395 -0
- gobby/agents/sandbox.py +261 -0
- gobby/agents/spawn.py +42 -287
- gobby/agents/spawn_executor.py +385 -0
- gobby/agents/spawners/__init__.py +24 -0
- gobby/agents/spawners/command_builder.py +189 -0
- gobby/agents/spawners/embedded.py +21 -2
- gobby/agents/spawners/headless.py +21 -2
- gobby/agents/spawners/prompt_manager.py +125 -0
- gobby/cli/install.py +4 -4
- gobby/cli/installers/claude.py +6 -0
- gobby/cli/installers/gemini.py +6 -0
- gobby/cli/installers/shared.py +103 -4
- gobby/cli/sessions.py +1 -1
- gobby/cli/utils.py +9 -2
- gobby/config/__init__.py +12 -97
- gobby/config/app.py +10 -94
- gobby/config/extensions.py +2 -2
- gobby/config/features.py +7 -130
- gobby/config/tasks.py +4 -28
- gobby/hooks/__init__.py +0 -13
- gobby/hooks/event_handlers.py +45 -2
- gobby/hooks/hook_manager.py +2 -2
- gobby/hooks/plugins.py +1 -1
- gobby/hooks/webhooks.py +1 -1
- gobby/llm/resolver.py +3 -2
- gobby/mcp_proxy/importer.py +62 -4
- gobby/mcp_proxy/instructions.py +2 -0
- gobby/mcp_proxy/registries.py +1 -4
- gobby/mcp_proxy/services/recommendation.py +43 -11
- gobby/mcp_proxy/tools/agents.py +31 -731
- gobby/mcp_proxy/tools/clones.py +0 -385
- gobby/mcp_proxy/tools/memory.py +2 -2
- gobby/mcp_proxy/tools/sessions/__init__.py +14 -0
- gobby/mcp_proxy/tools/sessions/_commits.py +232 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +253 -0
- gobby/mcp_proxy/tools/sessions/_factory.py +63 -0
- gobby/mcp_proxy/tools/sessions/_handoff.py +499 -0
- gobby/mcp_proxy/tools/sessions/_messages.py +138 -0
- gobby/mcp_proxy/tools/skills/__init__.py +14 -29
- gobby/mcp_proxy/tools/spawn_agent.py +417 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +52 -18
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +1 -1
- gobby/mcp_proxy/tools/worktrees.py +0 -343
- gobby/memory/ingestion/__init__.py +5 -0
- gobby/memory/ingestion/multimodal.py +221 -0
- gobby/memory/manager.py +62 -283
- gobby/memory/search/__init__.py +10 -0
- gobby/memory/search/coordinator.py +248 -0
- gobby/memory/services/__init__.py +5 -0
- gobby/memory/services/crossref.py +142 -0
- gobby/prompts/loader.py +5 -2
- gobby/servers/http.py +1 -4
- gobby/servers/routes/admin.py +14 -0
- gobby/servers/routes/mcp/endpoints/__init__.py +61 -0
- gobby/servers/routes/mcp/endpoints/discovery.py +405 -0
- gobby/servers/routes/mcp/endpoints/execution.py +568 -0
- gobby/servers/routes/mcp/endpoints/registry.py +378 -0
- gobby/servers/routes/mcp/endpoints/server.py +304 -0
- gobby/servers/routes/mcp/hooks.py +1 -1
- gobby/servers/routes/mcp/tools.py +48 -1506
- gobby/sessions/lifecycle.py +1 -1
- gobby/sessions/processor.py +10 -0
- gobby/sessions/transcripts/base.py +1 -0
- gobby/sessions/transcripts/claude.py +15 -5
- gobby/skills/parser.py +30 -2
- gobby/storage/migrations.py +159 -372
- gobby/storage/sessions.py +43 -7
- gobby/storage/skills.py +37 -4
- gobby/storage/tasks/_lifecycle.py +18 -3
- gobby/sync/memories.py +1 -1
- gobby/tasks/external_validator.py +1 -1
- gobby/tasks/validation.py +22 -20
- gobby/tools/summarizer.py +91 -10
- gobby/utils/project_context.py +2 -3
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +221 -1217
- gobby/workflows/artifact_actions.py +31 -0
- gobby/workflows/autonomous_actions.py +11 -0
- gobby/workflows/context_actions.py +50 -1
- gobby/workflows/enforcement/__init__.py +47 -0
- gobby/workflows/enforcement/blocking.py +269 -0
- gobby/workflows/enforcement/commit_policy.py +283 -0
- gobby/workflows/enforcement/handlers.py +269 -0
- gobby/workflows/enforcement/task_policy.py +542 -0
- gobby/workflows/git_utils.py +106 -0
- gobby/workflows/llm_actions.py +30 -0
- gobby/workflows/mcp_actions.py +20 -1
- gobby/workflows/memory_actions.py +80 -0
- gobby/workflows/safe_evaluator.py +183 -0
- gobby/workflows/session_actions.py +44 -0
- gobby/workflows/state_actions.py +60 -1
- gobby/workflows/stop_signal_actions.py +55 -0
- gobby/workflows/summary_actions.py +94 -1
- gobby/workflows/task_sync_actions.py +347 -0
- gobby/workflows/todo_actions.py +34 -1
- gobby/workflows/webhook_actions.py +185 -0
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/METADATA +6 -1
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/RECORD +111 -111
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/WHEEL +1 -1
- gobby/adapters/codex.py +0 -1332
- gobby/install/claude/commands/gobby/bug.md +0 -51
- gobby/install/claude/commands/gobby/chore.md +0 -51
- gobby/install/claude/commands/gobby/epic.md +0 -52
- gobby/install/claude/commands/gobby/eval.md +0 -235
- gobby/install/claude/commands/gobby/feat.md +0 -49
- gobby/install/claude/commands/gobby/nit.md +0 -52
- gobby/install/claude/commands/gobby/ref.md +0 -52
- gobby/mcp_proxy/tools/session_messages.py +0 -1055
- gobby/prompts/defaults/expansion/system.md +0 -119
- gobby/prompts/defaults/expansion/user.md +0 -48
- gobby/prompts/defaults/external_validation/agent.md +0 -72
- gobby/prompts/defaults/external_validation/external.md +0 -63
- gobby/prompts/defaults/external_validation/spawn.md +0 -83
- gobby/prompts/defaults/external_validation/system.md +0 -6
- gobby/prompts/defaults/features/import_mcp.md +0 -22
- gobby/prompts/defaults/features/import_mcp_github.md +0 -17
- gobby/prompts/defaults/features/import_mcp_search.md +0 -16
- gobby/prompts/defaults/features/recommend_tools.md +0 -32
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +0 -35
- gobby/prompts/defaults/features/recommend_tools_llm.md +0 -30
- gobby/prompts/defaults/features/server_description.md +0 -20
- gobby/prompts/defaults/features/server_description_system.md +0 -6
- gobby/prompts/defaults/features/task_description.md +0 -31
- gobby/prompts/defaults/features/task_description_system.md +0 -6
- gobby/prompts/defaults/features/tool_summary.md +0 -17
- gobby/prompts/defaults/features/tool_summary_system.md +0 -6
- gobby/prompts/defaults/handoff/compact.md +0 -63
- gobby/prompts/defaults/handoff/session_end.md +0 -57
- gobby/prompts/defaults/memory/extract.md +0 -61
- gobby/prompts/defaults/research/step.md +0 -58
- gobby/prompts/defaults/validation/criteria.md +0 -47
- gobby/prompts/defaults/validation/validate.md +0 -38
- gobby/storage/migrations_legacy.py +0 -1359
- gobby/workflows/task_enforcement_actions.py +0 -1343
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,347 @@
|
|
|
1
|
+
"""Task sync workflow actions.
|
|
2
|
+
|
|
3
|
+
Extracted from actions.py as part of strangler fig decomposition.
|
|
4
|
+
These functions handle task sync import/export and workflow task operations.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import logging
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from gobby.storage.database import DatabaseProtocol
|
|
13
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
14
|
+
from gobby.workflows.definitions import WorkflowState
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def task_sync_import(
|
|
20
|
+
task_sync_manager: Any,
|
|
21
|
+
session_manager: "LocalSessionManager",
|
|
22
|
+
session_id: str,
|
|
23
|
+
) -> dict[str, Any]:
|
|
24
|
+
"""Import tasks from JSONL file.
|
|
25
|
+
|
|
26
|
+
Reads .gobby/tasks.jsonl and imports tasks into SQLite using
|
|
27
|
+
Last-Write-Wins conflict resolution based on updated_at.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
task_sync_manager: TaskSyncManager instance
|
|
31
|
+
session_manager: Session manager for project lookup
|
|
32
|
+
session_id: Current session ID
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Dict with imported status or error
|
|
36
|
+
"""
|
|
37
|
+
if not task_sync_manager:
|
|
38
|
+
logger.debug("task_sync_import: No task_sync_manager available")
|
|
39
|
+
return {"error": "Task Sync Manager not available"}
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
# Get project_id from session for project-scoped sync
|
|
43
|
+
project_id = None
|
|
44
|
+
session = await asyncio.to_thread(session_manager.get, session_id)
|
|
45
|
+
if session:
|
|
46
|
+
project_id = session.project_id
|
|
47
|
+
|
|
48
|
+
await asyncio.to_thread(task_sync_manager.import_from_jsonl, project_id=project_id)
|
|
49
|
+
logger.info("Task sync import completed")
|
|
50
|
+
return {"imported": True}
|
|
51
|
+
except Exception as e:
|
|
52
|
+
logger.error(f"task_sync_import failed: {e}", exc_info=True)
|
|
53
|
+
return {"error": str(e)}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
async def task_sync_export(
|
|
57
|
+
task_sync_manager: Any,
|
|
58
|
+
session_manager: "LocalSessionManager",
|
|
59
|
+
session_id: str,
|
|
60
|
+
) -> dict[str, Any]:
|
|
61
|
+
"""Export tasks to JSONL file.
|
|
62
|
+
|
|
63
|
+
Writes tasks and dependencies to .gobby/tasks.jsonl for Git persistence.
|
|
64
|
+
Uses content hashing to skip writes if nothing changed.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
task_sync_manager: TaskSyncManager instance
|
|
68
|
+
session_manager: Session manager for project lookup
|
|
69
|
+
session_id: Current session ID
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Dict with exported status or error
|
|
73
|
+
"""
|
|
74
|
+
if not task_sync_manager:
|
|
75
|
+
logger.debug("task_sync_export: No task_sync_manager available")
|
|
76
|
+
return {"error": "Task Sync Manager not available"}
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
# Get project_id from session for project-scoped sync
|
|
80
|
+
project_id = None
|
|
81
|
+
session = await asyncio.to_thread(session_manager.get, session_id)
|
|
82
|
+
if session:
|
|
83
|
+
project_id = session.project_id
|
|
84
|
+
|
|
85
|
+
await asyncio.to_thread(task_sync_manager.export_to_jsonl, project_id=project_id)
|
|
86
|
+
logger.info("Task sync export completed")
|
|
87
|
+
return {"exported": True}
|
|
88
|
+
except Exception as e:
|
|
89
|
+
logger.error(f"task_sync_export failed: {e}", exc_info=True)
|
|
90
|
+
return {"error": str(e)}
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def persist_tasks(
|
|
94
|
+
db: "DatabaseProtocol",
|
|
95
|
+
session_manager: "LocalSessionManager",
|
|
96
|
+
session_id: str,
|
|
97
|
+
state: "WorkflowState",
|
|
98
|
+
tasks: list[dict[str, Any]] | None = None,
|
|
99
|
+
source: str | None = None,
|
|
100
|
+
workflow_name: str | None = None,
|
|
101
|
+
parent_task_id: str | None = None,
|
|
102
|
+
) -> dict[str, Any]:
|
|
103
|
+
"""Persist a list of task dicts to Gobby task system.
|
|
104
|
+
|
|
105
|
+
Enhanced to support workflow integration with ID mapping.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
db: Database instance
|
|
109
|
+
session_manager: Session manager
|
|
110
|
+
session_id: Current session ID
|
|
111
|
+
state: WorkflowState for variables access
|
|
112
|
+
tasks: List of task dicts
|
|
113
|
+
source: Variable name containing task list (alternative to tasks)
|
|
114
|
+
workflow_name: Associate tasks with this workflow
|
|
115
|
+
parent_task_id: Optional parent task for all created tasks
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Dict with tasks_persisted count, ids list, and id_mapping dict
|
|
119
|
+
"""
|
|
120
|
+
# Get tasks from either 'tasks' kwarg or 'source' variable
|
|
121
|
+
task_list = tasks or []
|
|
122
|
+
|
|
123
|
+
if source and state.variables:
|
|
124
|
+
source_data = state.variables.get(source)
|
|
125
|
+
if source_data:
|
|
126
|
+
# Handle nested structure like task_list.tasks
|
|
127
|
+
if isinstance(source_data, dict) and "tasks" in source_data:
|
|
128
|
+
task_list = source_data["tasks"]
|
|
129
|
+
elif isinstance(source_data, list):
|
|
130
|
+
task_list = source_data
|
|
131
|
+
|
|
132
|
+
if not task_list:
|
|
133
|
+
return {"tasks_persisted": 0, "ids": [], "id_mapping": {}}
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
from gobby.workflows.task_actions import persist_decomposed_tasks
|
|
137
|
+
|
|
138
|
+
current_session = await asyncio.to_thread(session_manager.get, session_id)
|
|
139
|
+
project_id = current_session.project_id if current_session else "default"
|
|
140
|
+
|
|
141
|
+
# Get workflow name from kwargs or state
|
|
142
|
+
wf_name = workflow_name
|
|
143
|
+
if not wf_name and state.workflow_name:
|
|
144
|
+
wf_name = state.workflow_name
|
|
145
|
+
|
|
146
|
+
id_mapping = await asyncio.to_thread(
|
|
147
|
+
persist_decomposed_tasks,
|
|
148
|
+
db=db,
|
|
149
|
+
project_id=project_id,
|
|
150
|
+
tasks_data=task_list,
|
|
151
|
+
workflow_name=wf_name or "unnamed",
|
|
152
|
+
parent_task_id=parent_task_id,
|
|
153
|
+
created_in_session_id=session_id,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Store ID mapping in workflow state for reference
|
|
157
|
+
if not state.variables:
|
|
158
|
+
state.variables = {}
|
|
159
|
+
state.variables["task_id_mapping"] = id_mapping
|
|
160
|
+
|
|
161
|
+
return {
|
|
162
|
+
"tasks_persisted": len(id_mapping),
|
|
163
|
+
"ids": list(id_mapping.values()),
|
|
164
|
+
"id_mapping": id_mapping,
|
|
165
|
+
}
|
|
166
|
+
except Exception as e:
|
|
167
|
+
logger.error(f"persist_tasks: Failed: {e}", exc_info=True)
|
|
168
|
+
return {"error": str(e)}
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
async def get_workflow_tasks(
|
|
172
|
+
db: "DatabaseProtocol",
|
|
173
|
+
session_manager: "LocalSessionManager",
|
|
174
|
+
session_id: str,
|
|
175
|
+
state: "WorkflowState",
|
|
176
|
+
workflow_name: str | None = None,
|
|
177
|
+
include_closed: bool = False,
|
|
178
|
+
output_as: str | None = None,
|
|
179
|
+
) -> dict[str, Any]:
|
|
180
|
+
"""Get tasks associated with the current workflow.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
db: Database instance
|
|
184
|
+
session_manager: Session manager
|
|
185
|
+
session_id: Current session ID
|
|
186
|
+
state: WorkflowState for variables access
|
|
187
|
+
workflow_name: Override workflow name (defaults to current)
|
|
188
|
+
include_closed: Include closed tasks (default: False)
|
|
189
|
+
output_as: Variable name to store result in
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Dict with tasks list and count
|
|
193
|
+
"""
|
|
194
|
+
from gobby.workflows.task_actions import get_workflow_tasks as _get_workflow_tasks
|
|
195
|
+
|
|
196
|
+
wf_name = workflow_name
|
|
197
|
+
if not wf_name and state.workflow_name:
|
|
198
|
+
wf_name = state.workflow_name
|
|
199
|
+
|
|
200
|
+
if not wf_name:
|
|
201
|
+
return {"error": "No workflow name specified"}
|
|
202
|
+
|
|
203
|
+
try:
|
|
204
|
+
current_session = await asyncio.to_thread(session_manager.get, session_id)
|
|
205
|
+
project_id = current_session.project_id if current_session else None
|
|
206
|
+
|
|
207
|
+
tasks = await asyncio.to_thread(
|
|
208
|
+
_get_workflow_tasks,
|
|
209
|
+
db=db,
|
|
210
|
+
workflow_name=wf_name,
|
|
211
|
+
project_id=project_id,
|
|
212
|
+
include_closed=include_closed,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# Convert to dicts for YAML/JSON serialization
|
|
216
|
+
tasks_data = [t.to_dict() for t in tasks]
|
|
217
|
+
|
|
218
|
+
# Store in variable if requested
|
|
219
|
+
if output_as:
|
|
220
|
+
if not state.variables:
|
|
221
|
+
state.variables = {}
|
|
222
|
+
state.variables[output_as] = tasks_data
|
|
223
|
+
|
|
224
|
+
# Also update task_list in state for workflow engine use
|
|
225
|
+
state.task_list = [{"id": t.id, "title": t.title, "status": t.status} for t in tasks]
|
|
226
|
+
|
|
227
|
+
return {"tasks": tasks_data, "count": len(tasks)}
|
|
228
|
+
except Exception as e:
|
|
229
|
+
logger.error(f"get_workflow_tasks: Failed: {e}", exc_info=True)
|
|
230
|
+
return {"error": str(e)}
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
async def update_workflow_task(
|
|
234
|
+
db: "DatabaseProtocol",
|
|
235
|
+
state: "WorkflowState",
|
|
236
|
+
task_id: str | None = None,
|
|
237
|
+
status: str | None = None,
|
|
238
|
+
verification: str | None = None,
|
|
239
|
+
validation_status: str | None = None,
|
|
240
|
+
validation_feedback: str | None = None,
|
|
241
|
+
) -> dict[str, Any]:
|
|
242
|
+
"""Update a task from workflow context.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
db: Database instance
|
|
246
|
+
state: WorkflowState for task_list access
|
|
247
|
+
task_id: ID of task to update (required)
|
|
248
|
+
status: New status
|
|
249
|
+
verification: Verification result
|
|
250
|
+
validation_status: Validation status
|
|
251
|
+
validation_feedback: Validation feedback
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Dict with updated task data
|
|
255
|
+
"""
|
|
256
|
+
from gobby.workflows.task_actions import update_task_from_workflow
|
|
257
|
+
|
|
258
|
+
tid = task_id
|
|
259
|
+
if not tid:
|
|
260
|
+
# Try to get from current_task_index in state
|
|
261
|
+
if state.task_list and state.current_task_index is not None:
|
|
262
|
+
idx = state.current_task_index
|
|
263
|
+
if 0 <= idx < len(state.task_list):
|
|
264
|
+
tid = state.task_list[idx].get("id")
|
|
265
|
+
|
|
266
|
+
if not tid:
|
|
267
|
+
return {"error": "No task_id specified"}
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
task = await asyncio.to_thread(
|
|
271
|
+
update_task_from_workflow,
|
|
272
|
+
db=db,
|
|
273
|
+
task_id=tid,
|
|
274
|
+
status=status,
|
|
275
|
+
verification=verification,
|
|
276
|
+
validation_status=validation_status,
|
|
277
|
+
validation_feedback=validation_feedback,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
if task:
|
|
281
|
+
return {"updated": True, "task": task.to_dict()}
|
|
282
|
+
return {"updated": False, "error": "Task not found"}
|
|
283
|
+
except Exception as e:
|
|
284
|
+
logger.error(f"update_workflow_task: Failed for task {tid}: {e}", exc_info=True)
|
|
285
|
+
return {"updated": False, "error": str(e)}
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
# --- ActionHandler-compatible wrappers ---
|
|
289
|
+
# These match the ActionHandler protocol: (context: ActionContext, **kwargs) -> dict | None
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
async def handle_task_sync_import(context: Any, **kwargs: Any) -> dict[str, Any] | None:
|
|
293
|
+
"""ActionHandler wrapper for task_sync_import."""
|
|
294
|
+
return await task_sync_import(
|
|
295
|
+
task_sync_manager=context.task_sync_manager,
|
|
296
|
+
session_manager=context.session_manager,
|
|
297
|
+
session_id=context.session_id,
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
async def handle_task_sync_export(context: Any, **kwargs: Any) -> dict[str, Any] | None:
|
|
302
|
+
"""ActionHandler wrapper for task_sync_export."""
|
|
303
|
+
return await task_sync_export(
|
|
304
|
+
task_sync_manager=context.task_sync_manager,
|
|
305
|
+
session_manager=context.session_manager,
|
|
306
|
+
session_id=context.session_id,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
async def handle_persist_tasks(context: Any, **kwargs: Any) -> dict[str, Any] | None:
|
|
311
|
+
"""ActionHandler wrapper for persist_tasks."""
|
|
312
|
+
return await persist_tasks(
|
|
313
|
+
db=context.db,
|
|
314
|
+
session_manager=context.session_manager,
|
|
315
|
+
session_id=context.session_id,
|
|
316
|
+
state=context.state,
|
|
317
|
+
tasks=kwargs.get("tasks"),
|
|
318
|
+
source=kwargs.get("source"),
|
|
319
|
+
workflow_name=kwargs.get("workflow_name"),
|
|
320
|
+
parent_task_id=kwargs.get("parent_task_id"),
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
async def handle_get_workflow_tasks(context: Any, **kwargs: Any) -> dict[str, Any] | None:
|
|
325
|
+
"""ActionHandler wrapper for get_workflow_tasks."""
|
|
326
|
+
return await get_workflow_tasks(
|
|
327
|
+
db=context.db,
|
|
328
|
+
session_manager=context.session_manager,
|
|
329
|
+
session_id=context.session_id,
|
|
330
|
+
state=context.state,
|
|
331
|
+
workflow_name=kwargs.get("workflow_name"),
|
|
332
|
+
include_closed=kwargs.get("include_closed", False),
|
|
333
|
+
output_as=kwargs.get("as"),
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
async def handle_update_workflow_task(context: Any, **kwargs: Any) -> dict[str, Any] | None:
|
|
338
|
+
"""ActionHandler wrapper for update_workflow_task."""
|
|
339
|
+
return await update_workflow_task(
|
|
340
|
+
db=context.db,
|
|
341
|
+
state=context.state,
|
|
342
|
+
task_id=kwargs.get("task_id"),
|
|
343
|
+
status=kwargs.get("status"),
|
|
344
|
+
verification=kwargs.get("verification"),
|
|
345
|
+
validation_status=kwargs.get("validation_status"),
|
|
346
|
+
validation_feedback=kwargs.get("validation_feedback"),
|
|
347
|
+
)
|
gobby/workflows/todo_actions.py
CHANGED
|
@@ -4,9 +4,13 @@ Extracted from actions.py as part of strangler fig decomposition.
|
|
|
4
4
|
These functions handle TODO.md file operations.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
import asyncio
|
|
7
8
|
import logging
|
|
8
9
|
import os
|
|
9
|
-
from typing import Any
|
|
10
|
+
from typing import TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from gobby.workflows.actions import ActionContext
|
|
10
14
|
|
|
11
15
|
logger = logging.getLogger(__name__)
|
|
12
16
|
|
|
@@ -82,3 +86,32 @@ def mark_todo_complete(
|
|
|
82
86
|
except Exception as e:
|
|
83
87
|
logger.error(f"mark_todo_complete: Failed: {e}")
|
|
84
88
|
return {"error": str(e)}
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# --- ActionHandler-compatible wrappers ---
|
|
92
|
+
# These match the ActionHandler protocol: (context: ActionContext, **kwargs) -> dict | None
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
async def handle_write_todos(context: "ActionContext", **kwargs: Any) -> dict[str, Any] | None:
|
|
96
|
+
"""ActionHandler wrapper for write_todos."""
|
|
97
|
+
return await asyncio.to_thread(
|
|
98
|
+
write_todos,
|
|
99
|
+
todos=kwargs.get("todos", []),
|
|
100
|
+
filename=kwargs.get("filename", "TODO.md"),
|
|
101
|
+
mode=kwargs.get("mode", "w"),
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
async def handle_mark_todo_complete(
|
|
106
|
+
context: "ActionContext", **kwargs: Any
|
|
107
|
+
) -> dict[str, Any] | None:
|
|
108
|
+
"""ActionHandler wrapper for mark_todo_complete."""
|
|
109
|
+
todo_text = kwargs.get("todo_text")
|
|
110
|
+
if not todo_text:
|
|
111
|
+
return {"error": "Missing required parameter: todo_text"}
|
|
112
|
+
|
|
113
|
+
return await asyncio.to_thread(
|
|
114
|
+
mark_todo_complete,
|
|
115
|
+
todo_text,
|
|
116
|
+
kwargs.get("filename", "TODO.md"),
|
|
117
|
+
)
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""Webhook workflow actions.
|
|
2
|
+
|
|
3
|
+
Extracted from actions.py as part of strangler fig decomposition.
|
|
4
|
+
These functions handle webhook HTTP request execution from workflows.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
from urllib.parse import urlparse, urlunparse
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from gobby.workflows.definitions import WorkflowState
|
|
13
|
+
from gobby.workflows.templates import TemplateEngine
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def execute_webhook(
|
|
19
|
+
template_engine: "TemplateEngine",
|
|
20
|
+
state: "WorkflowState",
|
|
21
|
+
config: Any | None,
|
|
22
|
+
url: str | None = None,
|
|
23
|
+
webhook_id: str | None = None,
|
|
24
|
+
method: str = "POST",
|
|
25
|
+
headers: dict[str, str] | None = None,
|
|
26
|
+
payload: dict[str, Any] | str | None = None,
|
|
27
|
+
timeout: int = 30,
|
|
28
|
+
retry: dict[str, Any] | None = None,
|
|
29
|
+
capture_response: dict[str, str] | None = None,
|
|
30
|
+
) -> dict[str, Any]:
|
|
31
|
+
"""Execute a webhook HTTP request.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
template_engine: Template engine for interpolation
|
|
35
|
+
state: WorkflowState for variables/artifacts access
|
|
36
|
+
config: Daemon config for webhook_secrets
|
|
37
|
+
url: Target URL for the request
|
|
38
|
+
webhook_id: ID of a pre-configured webhook (alternative to url)
|
|
39
|
+
method: HTTP method (GET, POST, PUT, PATCH, DELETE)
|
|
40
|
+
headers: Request headers dict
|
|
41
|
+
payload: Request body as dict or string
|
|
42
|
+
timeout: Request timeout in seconds
|
|
43
|
+
retry: Retry configuration dict
|
|
44
|
+
capture_response: Response capture config
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Dict with success status, status_code, and captured response data.
|
|
48
|
+
"""
|
|
49
|
+
from gobby.workflows.webhook import WebhookAction
|
|
50
|
+
from gobby.workflows.webhook_executor import WebhookExecutor
|
|
51
|
+
|
|
52
|
+
# Build kwargs dict for WebhookAction
|
|
53
|
+
webhook_kwargs: dict[str, Any] = {
|
|
54
|
+
"method": method,
|
|
55
|
+
"timeout": timeout,
|
|
56
|
+
}
|
|
57
|
+
if url:
|
|
58
|
+
webhook_kwargs["url"] = url
|
|
59
|
+
if webhook_id:
|
|
60
|
+
webhook_kwargs["webhook_id"] = webhook_id
|
|
61
|
+
if headers:
|
|
62
|
+
webhook_kwargs["headers"] = headers
|
|
63
|
+
if payload:
|
|
64
|
+
webhook_kwargs["payload"] = payload
|
|
65
|
+
if retry:
|
|
66
|
+
webhook_kwargs["retry"] = retry
|
|
67
|
+
if capture_response:
|
|
68
|
+
webhook_kwargs["capture_response"] = capture_response
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
# Parse WebhookAction from kwargs to validate config
|
|
72
|
+
webhook_action = WebhookAction.from_dict(webhook_kwargs)
|
|
73
|
+
except ValueError as e:
|
|
74
|
+
logger.error(f"Invalid webhook action config: {e}")
|
|
75
|
+
return {"success": False, "error": str(e)}
|
|
76
|
+
|
|
77
|
+
# Build context for variable interpolation
|
|
78
|
+
interpolation_context: dict[str, Any] = {}
|
|
79
|
+
if state.variables:
|
|
80
|
+
interpolation_context["state"] = {"variables": state.variables}
|
|
81
|
+
if state.artifacts:
|
|
82
|
+
interpolation_context["artifacts"] = state.artifacts
|
|
83
|
+
|
|
84
|
+
# Get secrets from config if available
|
|
85
|
+
secrets: dict[str, str] = {}
|
|
86
|
+
if config:
|
|
87
|
+
secrets = getattr(config, "webhook_secrets", {})
|
|
88
|
+
|
|
89
|
+
# Create executor with template engine for payload interpolation
|
|
90
|
+
executor = WebhookExecutor(
|
|
91
|
+
template_engine=template_engine,
|
|
92
|
+
secrets=secrets,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Execute the webhook
|
|
96
|
+
if webhook_action.url:
|
|
97
|
+
result = await executor.execute(
|
|
98
|
+
url=webhook_action.url,
|
|
99
|
+
method=webhook_action.method,
|
|
100
|
+
headers=webhook_action.headers,
|
|
101
|
+
payload=webhook_action.payload,
|
|
102
|
+
timeout=webhook_action.timeout,
|
|
103
|
+
retry_config=webhook_action.retry.to_dict() if webhook_action.retry else None,
|
|
104
|
+
context=interpolation_context,
|
|
105
|
+
)
|
|
106
|
+
elif webhook_action.webhook_id:
|
|
107
|
+
# webhook_id execution requires a registry which would be configured
|
|
108
|
+
# at the daemon level - for now we return an error if no registry
|
|
109
|
+
logger.warning("webhook_id execution not yet supported without registry")
|
|
110
|
+
return {"success": False, "error": "webhook_id requires configured webhook registry"}
|
|
111
|
+
else:
|
|
112
|
+
return {"success": False, "error": "Either url or webhook_id is required"}
|
|
113
|
+
|
|
114
|
+
# Capture response into workflow variables if configured
|
|
115
|
+
if webhook_action.capture_response:
|
|
116
|
+
if not state.variables:
|
|
117
|
+
state.variables = {}
|
|
118
|
+
|
|
119
|
+
capture = webhook_action.capture_response
|
|
120
|
+
if capture.status_var and result.status_code is not None:
|
|
121
|
+
state.variables[capture.status_var] = result.status_code
|
|
122
|
+
if capture.body_var and result.body is not None:
|
|
123
|
+
# Try to parse as JSON, fall back to raw string
|
|
124
|
+
json_body = result.json_body()
|
|
125
|
+
state.variables[capture.body_var] = json_body if json_body else result.body
|
|
126
|
+
if capture.headers_var and result.headers is not None:
|
|
127
|
+
state.variables[capture.headers_var] = result.headers
|
|
128
|
+
|
|
129
|
+
# Sanitize URL for logging (remove query params which may contain secrets)
|
|
130
|
+
def _sanitize_url(url: str | None) -> str:
|
|
131
|
+
if not url:
|
|
132
|
+
return "<no-url>"
|
|
133
|
+
try:
|
|
134
|
+
parsed = urlparse(url)
|
|
135
|
+
# Remove query string for logging
|
|
136
|
+
sanitized = urlunparse((parsed.scheme, parsed.netloc, parsed.path, "", "", ""))
|
|
137
|
+
return sanitized or url
|
|
138
|
+
except Exception:
|
|
139
|
+
return "<invalid-url>"
|
|
140
|
+
|
|
141
|
+
sanitized_url = _sanitize_url(webhook_action.url)
|
|
142
|
+
|
|
143
|
+
# Log outcome
|
|
144
|
+
if result.success:
|
|
145
|
+
logger.info(
|
|
146
|
+
f"Webhook {webhook_action.method} {sanitized_url} succeeded: {result.status_code}"
|
|
147
|
+
)
|
|
148
|
+
else:
|
|
149
|
+
logger.warning(
|
|
150
|
+
f"Webhook {webhook_action.method} {sanitized_url} failed: "
|
|
151
|
+
f"{result.error or result.status_code}"
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
return {
|
|
155
|
+
"success": result.success,
|
|
156
|
+
"status_code": result.status_code,
|
|
157
|
+
"error": result.error,
|
|
158
|
+
"body": result.body if result.success else None,
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
# --- ActionHandler-compatible wrappers ---
|
|
163
|
+
# These match the ActionHandler protocol: (context: ActionContext, **kwargs) -> dict | None
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
async def handle_webhook(
|
|
167
|
+
context: Any, config: Any | None = None, **kwargs: Any
|
|
168
|
+
) -> dict[str, Any] | None:
|
|
169
|
+
"""ActionHandler wrapper for execute_webhook.
|
|
170
|
+
|
|
171
|
+
Note: config is passed via closure from register_defaults.
|
|
172
|
+
"""
|
|
173
|
+
return await execute_webhook(
|
|
174
|
+
template_engine=context.template_engine,
|
|
175
|
+
state=context.state,
|
|
176
|
+
config=config,
|
|
177
|
+
url=kwargs.get("url"),
|
|
178
|
+
webhook_id=kwargs.get("webhook_id"),
|
|
179
|
+
method=kwargs.get("method", "POST"),
|
|
180
|
+
headers=kwargs.get("headers"),
|
|
181
|
+
payload=kwargs.get("payload"),
|
|
182
|
+
timeout=kwargs.get("timeout", 30),
|
|
183
|
+
retry=kwargs.get("retry"),
|
|
184
|
+
capture_response=kwargs.get("capture_response"),
|
|
185
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gobby
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.7
|
|
4
4
|
Summary: A local-first daemon to unify your AI coding tools. Session tracking and handoffs across Claude Code, Gemini CLI, and Codex. An MCP proxy that discovers tools without flooding context. Task management with dependencies, validation, and TDD expansion. Agent spawning and worktree orchestration. Persistent memory, extensible workflows, and hooks.
|
|
5
5
|
Author-email: Josh Wilhelmi <josh@gobby.ai>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -39,6 +39,7 @@ Requires-Dist: gitingest>=0.3.1
|
|
|
39
39
|
Requires-Dist: scikit-learn>=1.0.0
|
|
40
40
|
Requires-Dist: textual>=7.3.0
|
|
41
41
|
Requires-Dist: memu-py>=1.0.0
|
|
42
|
+
Requires-Dist: python-multipart>=0.0.22
|
|
42
43
|
Provides-Extra: mem0
|
|
43
44
|
Requires-Dist: mem0ai; extra == "mem0"
|
|
44
45
|
Dynamic: license-file
|
|
@@ -378,6 +379,10 @@ See [ROADMAP.md](ROADMAP.md) for the full plan, but highlights:
|
|
|
378
379
|
|
|
379
380
|
**Vision:** Always local first, but Pro cloud features to keep the lights on: Fleet management (manage sessions across multiple machines), Plugin ecosystem, Team workflows, Enterprise hardening
|
|
380
381
|
|
|
382
|
+
## Changelog
|
|
383
|
+
|
|
384
|
+
See [CHANGELOG.md](CHANGELOG.md) for release history and detailed changes.
|
|
385
|
+
|
|
381
386
|
## Development
|
|
382
387
|
|
|
383
388
|
```bash
|