gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
from .definitions import WorkflowDefinition
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class DiscoveredWorkflow:
|
|
15
|
+
"""A discovered workflow with metadata for ordering."""
|
|
16
|
+
|
|
17
|
+
name: str
|
|
18
|
+
definition: WorkflowDefinition
|
|
19
|
+
priority: int # Lower = higher priority (runs first)
|
|
20
|
+
is_project: bool # True if from project, False if global
|
|
21
|
+
path: Path
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class WorkflowLoader:
|
|
25
|
+
def __init__(self, workflow_dirs: list[Path] | None = None):
|
|
26
|
+
# Default global workflow directory
|
|
27
|
+
self.global_dirs = workflow_dirs or [Path.home() / ".gobby" / "workflows"]
|
|
28
|
+
self._cache: dict[str, WorkflowDefinition] = {}
|
|
29
|
+
# Cache for discovered workflows per project path
|
|
30
|
+
self._discovery_cache: dict[str, list[DiscoveredWorkflow]] = {}
|
|
31
|
+
|
|
32
|
+
def load_workflow(
|
|
33
|
+
self,
|
|
34
|
+
name: str,
|
|
35
|
+
project_path: Path | str | None = None,
|
|
36
|
+
_inheritance_chain: list[str] | None = None,
|
|
37
|
+
) -> WorkflowDefinition | None:
|
|
38
|
+
"""
|
|
39
|
+
Load a workflow by name (without extension).
|
|
40
|
+
Supports inheritance via 'extends' field with cycle detection.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
name: Workflow name (without .yaml extension)
|
|
44
|
+
project_path: Optional project directory for project-specific workflows.
|
|
45
|
+
Searches: 1) {project_path}/.gobby/workflows/ 2) ~/.gobby/workflows/
|
|
46
|
+
_inheritance_chain: Internal parameter for cycle detection. Do not pass directly.
|
|
47
|
+
|
|
48
|
+
Raises:
|
|
49
|
+
ValueError: If circular inheritance is detected.
|
|
50
|
+
"""
|
|
51
|
+
# Initialize or check inheritance chain for cycle detection
|
|
52
|
+
if _inheritance_chain is None:
|
|
53
|
+
_inheritance_chain = []
|
|
54
|
+
|
|
55
|
+
if name in _inheritance_chain:
|
|
56
|
+
cycle_path = " -> ".join(_inheritance_chain + [name])
|
|
57
|
+
logger.error(f"Circular workflow inheritance detected: {cycle_path}")
|
|
58
|
+
raise ValueError(f"Circular workflow inheritance detected: {cycle_path}")
|
|
59
|
+
# Build cache key including project path for project-specific caching
|
|
60
|
+
cache_key = f"{project_path or 'global'}:{name}"
|
|
61
|
+
if cache_key in self._cache:
|
|
62
|
+
return self._cache[cache_key]
|
|
63
|
+
|
|
64
|
+
# Build search directories: project-specific first, then global
|
|
65
|
+
search_dirs = list(self.global_dirs)
|
|
66
|
+
if project_path:
|
|
67
|
+
project_dir = Path(project_path) / ".gobby" / "workflows"
|
|
68
|
+
search_dirs.insert(0, project_dir)
|
|
69
|
+
|
|
70
|
+
# 1. Find file
|
|
71
|
+
path = self._find_workflow_file(name, search_dirs)
|
|
72
|
+
if not path:
|
|
73
|
+
logger.warning(f"Workflow '{name}' not found in {search_dirs}")
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
# 2. Parse YAML
|
|
78
|
+
with open(path) as f:
|
|
79
|
+
data = yaml.safe_load(f)
|
|
80
|
+
|
|
81
|
+
# 3. Handle inheritance with cycle detection
|
|
82
|
+
if "extends" in data:
|
|
83
|
+
parent_name = data["extends"]
|
|
84
|
+
# Add current workflow to chain before loading parent
|
|
85
|
+
parent = self.load_workflow(
|
|
86
|
+
parent_name,
|
|
87
|
+
project_path=project_path,
|
|
88
|
+
_inheritance_chain=_inheritance_chain + [name],
|
|
89
|
+
)
|
|
90
|
+
if parent:
|
|
91
|
+
data = self._merge_workflows(parent.model_dump(), data)
|
|
92
|
+
else:
|
|
93
|
+
logger.error(f"Parent workflow '{parent_name}' not found for '{name}'")
|
|
94
|
+
|
|
95
|
+
# 4. Validate and create model
|
|
96
|
+
definition = WorkflowDefinition(**data)
|
|
97
|
+
self._cache[cache_key] = definition
|
|
98
|
+
return definition
|
|
99
|
+
|
|
100
|
+
except ValueError:
|
|
101
|
+
# Re-raise ValueError (used for cycle detection)
|
|
102
|
+
raise
|
|
103
|
+
except Exception as e:
|
|
104
|
+
logger.error(f"Failed to load workflow '{name}' from {path}: {e}", exc_info=True)
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
def _find_workflow_file(self, name: str, search_dirs: list[Path]) -> Path | None:
|
|
108
|
+
filename = f"{name}.yaml"
|
|
109
|
+
for d in search_dirs:
|
|
110
|
+
# Check root directory
|
|
111
|
+
candidate = d / filename
|
|
112
|
+
if candidate.exists():
|
|
113
|
+
return candidate
|
|
114
|
+
# Check subdirectories (lifecycle/, etc.)
|
|
115
|
+
for subdir in d.iterdir() if d.exists() else []:
|
|
116
|
+
if subdir.is_dir():
|
|
117
|
+
candidate = subdir / filename
|
|
118
|
+
if candidate.exists():
|
|
119
|
+
return candidate
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
def _merge_workflows(self, parent: dict[str, Any], child: dict[str, Any]) -> dict[str, Any]:
|
|
123
|
+
"""
|
|
124
|
+
Deep merge parent and child workflow dicts.
|
|
125
|
+
Child overrides parent.
|
|
126
|
+
"""
|
|
127
|
+
merged = parent.copy()
|
|
128
|
+
|
|
129
|
+
for key, value in child.items():
|
|
130
|
+
if key in merged and isinstance(merged[key], dict) and isinstance(value, dict):
|
|
131
|
+
merged[key] = self._merge_workflows(merged[key], value)
|
|
132
|
+
elif key in ("phases", "steps") and ("phases" in merged or "steps" in merged):
|
|
133
|
+
# Special handling for steps/phases: merge by name
|
|
134
|
+
# Support both 'steps' (new) and 'phases' (legacy YAML)
|
|
135
|
+
parent_list = merged.get("phases") or merged.get("steps", [])
|
|
136
|
+
merged_key = "phases" if "phases" in merged else "steps"
|
|
137
|
+
merged[merged_key] = self._merge_steps(parent_list, value)
|
|
138
|
+
else:
|
|
139
|
+
merged[key] = value
|
|
140
|
+
|
|
141
|
+
return merged
|
|
142
|
+
|
|
143
|
+
def _merge_steps(self, parent_steps: list[Any], child_steps: list[Any]) -> list[Any]:
|
|
144
|
+
"""
|
|
145
|
+
Merge step lists by step name.
|
|
146
|
+
"""
|
|
147
|
+
# Convert parent list to dict by name, creating copies to avoid mutating originals
|
|
148
|
+
parent_map: dict[str, dict[str, Any]] = {}
|
|
149
|
+
for s in parent_steps:
|
|
150
|
+
if "name" not in s:
|
|
151
|
+
logger.warning("Skipping parent step without 'name' key")
|
|
152
|
+
continue
|
|
153
|
+
# Create a shallow copy to avoid mutating the original
|
|
154
|
+
parent_map[s["name"]] = dict(s)
|
|
155
|
+
|
|
156
|
+
for child_step in child_steps:
|
|
157
|
+
if "name" not in child_step:
|
|
158
|
+
logger.warning("Skipping child step without 'name' key")
|
|
159
|
+
continue
|
|
160
|
+
name = child_step["name"]
|
|
161
|
+
if name in parent_map:
|
|
162
|
+
# Merge existing step by updating the copy with child values
|
|
163
|
+
parent_map[name].update(child_step)
|
|
164
|
+
else:
|
|
165
|
+
# Add new step as a copy
|
|
166
|
+
parent_map[name] = dict(child_step)
|
|
167
|
+
|
|
168
|
+
return list(parent_map.values())
|
|
169
|
+
|
|
170
|
+
def discover_lifecycle_workflows(
|
|
171
|
+
self, project_path: Path | str | None = None
|
|
172
|
+
) -> list[DiscoveredWorkflow]:
|
|
173
|
+
"""
|
|
174
|
+
Discover all lifecycle workflows from project and global directories.
|
|
175
|
+
|
|
176
|
+
Returns workflows sorted by:
|
|
177
|
+
1. Project workflows first (is_project=True), then global
|
|
178
|
+
2. Within each group: by priority (ascending), then alphabetically by name
|
|
179
|
+
|
|
180
|
+
Project workflows shadow global workflows with the same name.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
project_path: Optional project directory. If provided, searches
|
|
184
|
+
{project_path}/.gobby/workflows/ first.
|
|
185
|
+
|
|
186
|
+
Returns:
|
|
187
|
+
List of DiscoveredWorkflow objects, sorted and deduplicated.
|
|
188
|
+
"""
|
|
189
|
+
cache_key = str(project_path) if project_path else "global"
|
|
190
|
+
|
|
191
|
+
# Check cache
|
|
192
|
+
if cache_key in self._discovery_cache:
|
|
193
|
+
return self._discovery_cache[cache_key]
|
|
194
|
+
|
|
195
|
+
discovered: dict[str, DiscoveredWorkflow] = {} # name -> workflow (for shadowing)
|
|
196
|
+
|
|
197
|
+
# 1. Scan global lifecycle directory first (will be shadowed by project)
|
|
198
|
+
for global_dir in self.global_dirs:
|
|
199
|
+
self._scan_directory(global_dir / "lifecycle", is_project=False, discovered=discovered)
|
|
200
|
+
|
|
201
|
+
# 2. Scan project lifecycle directory (shadows global)
|
|
202
|
+
if project_path:
|
|
203
|
+
project_dir = Path(project_path) / ".gobby" / "workflows" / "lifecycle"
|
|
204
|
+
self._scan_directory(project_dir, is_project=True, discovered=discovered)
|
|
205
|
+
|
|
206
|
+
# 3. Filter to lifecycle workflows only
|
|
207
|
+
lifecycle_workflows = [w for w in discovered.values() if w.definition.type == "lifecycle"]
|
|
208
|
+
|
|
209
|
+
# 4. Sort: project first, then by priority (asc), then by name (alpha)
|
|
210
|
+
sorted_workflows = sorted(
|
|
211
|
+
lifecycle_workflows,
|
|
212
|
+
key=lambda w: (
|
|
213
|
+
0 if w.is_project else 1, # Project first
|
|
214
|
+
w.priority, # Lower priority = runs first
|
|
215
|
+
w.name, # Alphabetical
|
|
216
|
+
),
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# Cache and return
|
|
220
|
+
self._discovery_cache[cache_key] = sorted_workflows
|
|
221
|
+
return sorted_workflows
|
|
222
|
+
|
|
223
|
+
def _scan_directory(
|
|
224
|
+
self,
|
|
225
|
+
directory: Path,
|
|
226
|
+
is_project: bool,
|
|
227
|
+
discovered: dict[str, DiscoveredWorkflow],
|
|
228
|
+
) -> None:
|
|
229
|
+
"""
|
|
230
|
+
Scan a directory for workflow YAML files and add to discovered dict.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
directory: Directory to scan
|
|
234
|
+
is_project: Whether this is a project directory (for shadowing)
|
|
235
|
+
discovered: Dict to update (name -> DiscoveredWorkflow)
|
|
236
|
+
"""
|
|
237
|
+
if not directory.exists():
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
for yaml_path in directory.glob("*.yaml"):
|
|
241
|
+
name = yaml_path.stem
|
|
242
|
+
try:
|
|
243
|
+
with open(yaml_path) as f:
|
|
244
|
+
data = yaml.safe_load(f)
|
|
245
|
+
|
|
246
|
+
if not data:
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
# Handle inheritance with cycle detection
|
|
250
|
+
if "extends" in data:
|
|
251
|
+
parent_name = data["extends"]
|
|
252
|
+
try:
|
|
253
|
+
parent = self.load_workflow(
|
|
254
|
+
parent_name,
|
|
255
|
+
_inheritance_chain=[name],
|
|
256
|
+
)
|
|
257
|
+
if parent:
|
|
258
|
+
data = self._merge_workflows(parent.model_dump(), data)
|
|
259
|
+
except ValueError as e:
|
|
260
|
+
logger.warning(f"Skipping workflow {name}: {e}")
|
|
261
|
+
continue
|
|
262
|
+
|
|
263
|
+
definition = WorkflowDefinition(**data)
|
|
264
|
+
|
|
265
|
+
# Get priority from workflow settings or default to 100
|
|
266
|
+
priority = 100
|
|
267
|
+
if definition.settings and "priority" in definition.settings:
|
|
268
|
+
priority = definition.settings["priority"]
|
|
269
|
+
|
|
270
|
+
# Project workflows shadow global (overwrite in dict)
|
|
271
|
+
# Global is scanned first, so project overwrites
|
|
272
|
+
discovered[name] = DiscoveredWorkflow(
|
|
273
|
+
name=name,
|
|
274
|
+
definition=definition,
|
|
275
|
+
priority=priority,
|
|
276
|
+
is_project=is_project,
|
|
277
|
+
path=yaml_path,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
except Exception as e:
|
|
281
|
+
logger.warning(f"Failed to load workflow from {yaml_path}: {e}")
|
|
282
|
+
|
|
283
|
+
def clear_cache(self) -> None:
|
|
284
|
+
"""
|
|
285
|
+
Clear the workflow definitions and discovery cache.
|
|
286
|
+
Call when workflows may have changed on disk.
|
|
287
|
+
"""
|
|
288
|
+
self._cache.clear()
|
|
289
|
+
self._discovery_cache.clear()
|
|
290
|
+
|
|
291
|
+
def clear_discovery_cache(self) -> None:
|
|
292
|
+
"""Clear the discovery cache. Call when workflows may have changed."""
|
|
293
|
+
# Deprecated: use clear_cache instead to clear everything
|
|
294
|
+
self.clear_cache()
|
|
295
|
+
|
|
296
|
+
def validate_workflow_for_agent(
|
|
297
|
+
self,
|
|
298
|
+
workflow_name: str,
|
|
299
|
+
project_path: Path | str | None = None,
|
|
300
|
+
) -> tuple[bool, str | None]:
|
|
301
|
+
"""
|
|
302
|
+
Validate that a workflow can be used for agent spawning.
|
|
303
|
+
|
|
304
|
+
Lifecycle workflows run automatically via hooks and cannot be
|
|
305
|
+
explicitly activated for agents. Only step workflows are valid.
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
workflow_name: Name of the workflow to validate
|
|
309
|
+
project_path: Optional project path for workflow resolution
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
Tuple of (is_valid, error_message).
|
|
313
|
+
If valid, returns (True, None).
|
|
314
|
+
If invalid, returns (False, error_message).
|
|
315
|
+
"""
|
|
316
|
+
try:
|
|
317
|
+
workflow = self.load_workflow(workflow_name, project_path=project_path)
|
|
318
|
+
except ValueError as e:
|
|
319
|
+
# Circular inheritance or other workflow loading errors
|
|
320
|
+
return False, f"Failed to load workflow '{workflow_name}': {e}"
|
|
321
|
+
|
|
322
|
+
if not workflow:
|
|
323
|
+
# Workflow not found - let the caller decide if this is an error
|
|
324
|
+
return True, None
|
|
325
|
+
|
|
326
|
+
if workflow.type == "lifecycle":
|
|
327
|
+
return False, (
|
|
328
|
+
f"Cannot use lifecycle workflow '{workflow_name}' for agent spawning. "
|
|
329
|
+
f"Lifecycle workflows run automatically on events. "
|
|
330
|
+
f"Use a step workflow like 'plan-execute' instead."
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
return True, None
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""MCP tool invocation workflow actions.
|
|
2
|
+
|
|
3
|
+
Extracted from actions.py as part of strangler fig decomposition.
|
|
4
|
+
These functions handle MCP tool calls from workflows.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def call_mcp_tool(
|
|
14
|
+
mcp_manager: Any,
|
|
15
|
+
state: Any,
|
|
16
|
+
server_name: str | None,
|
|
17
|
+
tool_name: str | None,
|
|
18
|
+
arguments: dict[str, Any] | None = None,
|
|
19
|
+
output_as: str | None = None,
|
|
20
|
+
) -> dict[str, Any]:
|
|
21
|
+
"""Call an MCP tool on a connected server.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
mcp_manager: MCP client manager instance
|
|
25
|
+
state: WorkflowState object for storing results
|
|
26
|
+
server_name: Name of the MCP server
|
|
27
|
+
tool_name: Name of the tool to call
|
|
28
|
+
arguments: Arguments to pass to the tool
|
|
29
|
+
output_as: Optional variable name to store result
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Dict with result and stored_as, or error
|
|
33
|
+
"""
|
|
34
|
+
if not server_name or not tool_name:
|
|
35
|
+
return {"error": "Missing server_name or tool_name"}
|
|
36
|
+
|
|
37
|
+
if not mcp_manager:
|
|
38
|
+
logger.warning("call_mcp_tool: MCP manager not available")
|
|
39
|
+
return {"error": "MCP manager not available"}
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
# Check connection
|
|
43
|
+
if server_name not in mcp_manager.connections:
|
|
44
|
+
return {"error": f"Server {server_name} not connected"}
|
|
45
|
+
|
|
46
|
+
# Call tool
|
|
47
|
+
result = await mcp_manager.call_tool(server_name, tool_name, arguments or {})
|
|
48
|
+
|
|
49
|
+
# Store result in workflow variable if 'as' specified
|
|
50
|
+
if output_as:
|
|
51
|
+
if state is None:
|
|
52
|
+
raise ValueError("state must be provided when output_as is specified")
|
|
53
|
+
if not state.variables:
|
|
54
|
+
state.variables = {}
|
|
55
|
+
state.variables[output_as] = result
|
|
56
|
+
|
|
57
|
+
return {"result": result, "stored_as": output_as}
|
|
58
|
+
except Exception as e:
|
|
59
|
+
logger.error(f"call_mcp_tool: Failed: {e}")
|
|
60
|
+
return {"error": str(e)}
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
"""Memory-related workflow actions.
|
|
2
|
+
|
|
3
|
+
Extracted from actions.py as part of strangler fig decomposition.
|
|
4
|
+
These functions handle memory injection, extraction, saving, and recall.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import logging
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _content_fingerprint(content: str) -> str:
|
|
15
|
+
"""Generate a secure fingerprint of content for logging (avoids PII exposure)."""
|
|
16
|
+
return hashlib.sha256(content.encode("utf-8")).hexdigest()[:16]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def memory_sync_import(memory_sync_manager: Any) -> dict[str, Any]:
|
|
20
|
+
"""Import memories from filesystem.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
memory_sync_manager: The memory sync manager instance
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
Dict with imported count or error
|
|
27
|
+
"""
|
|
28
|
+
if not memory_sync_manager:
|
|
29
|
+
return {"error": "Memory Sync Manager not available"}
|
|
30
|
+
|
|
31
|
+
count = await memory_sync_manager.import_from_files()
|
|
32
|
+
logger.info(f"Memory sync import: {count} memories imported")
|
|
33
|
+
return {"imported": {"memories": count}}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async def memory_sync_export(memory_sync_manager: Any) -> dict[str, Any]:
|
|
37
|
+
"""Export memories to filesystem.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
memory_sync_manager: The memory sync manager instance
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Dict with exported count or error
|
|
44
|
+
"""
|
|
45
|
+
if not memory_sync_manager:
|
|
46
|
+
return {"error": "Memory Sync Manager not available"}
|
|
47
|
+
|
|
48
|
+
count = await memory_sync_manager.export_to_files()
|
|
49
|
+
logger.info(f"Memory sync export: {count} memories exported")
|
|
50
|
+
return {"exported": {"memories": count}}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
async def memory_save(
|
|
54
|
+
memory_manager: Any,
|
|
55
|
+
session_manager: Any,
|
|
56
|
+
session_id: str,
|
|
57
|
+
content: str | None = None,
|
|
58
|
+
memory_type: str = "fact",
|
|
59
|
+
importance: float = 0.5,
|
|
60
|
+
tags: list[str] | None = None,
|
|
61
|
+
project_id: str | None = None,
|
|
62
|
+
) -> dict[str, Any] | None:
|
|
63
|
+
"""Save a memory directly from workflow context.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
memory_manager: The memory manager instance
|
|
67
|
+
session_manager: The session manager instance
|
|
68
|
+
session_id: Current session ID
|
|
69
|
+
content: The memory content to save (required)
|
|
70
|
+
memory_type: One of 'fact', 'preference', 'pattern', 'context'
|
|
71
|
+
importance: Float 0.0-1.0
|
|
72
|
+
tags: List of string tags
|
|
73
|
+
project_id: Override project ID
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Dict with saved status and memory_id, or error
|
|
77
|
+
"""
|
|
78
|
+
if not memory_manager:
|
|
79
|
+
return {"error": "Memory Manager not available"}
|
|
80
|
+
|
|
81
|
+
if not memory_manager.config.enabled:
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
if not content:
|
|
85
|
+
return {"error": "Missing required 'content' parameter"}
|
|
86
|
+
|
|
87
|
+
# Resolve project_id
|
|
88
|
+
if not project_id:
|
|
89
|
+
session = session_manager.get(session_id)
|
|
90
|
+
if session:
|
|
91
|
+
project_id = session.project_id
|
|
92
|
+
|
|
93
|
+
if not project_id:
|
|
94
|
+
return {"error": "No project_id found"}
|
|
95
|
+
|
|
96
|
+
# Validate memory_type
|
|
97
|
+
if memory_type not in ("fact", "preference", "pattern", "context"):
|
|
98
|
+
memory_type = "fact"
|
|
99
|
+
|
|
100
|
+
# Validate importance
|
|
101
|
+
if not isinstance(importance, int | float):
|
|
102
|
+
importance = 0.5
|
|
103
|
+
importance = max(0.0, min(1.0, float(importance)))
|
|
104
|
+
|
|
105
|
+
# Validate tags
|
|
106
|
+
if tags is None:
|
|
107
|
+
tags = []
|
|
108
|
+
if not isinstance(tags, list):
|
|
109
|
+
tags = []
|
|
110
|
+
|
|
111
|
+
try:
|
|
112
|
+
if memory_manager.content_exists(content, project_id):
|
|
113
|
+
logger.debug(f"save_memory: Skipping duplicate: {content[:50]}...")
|
|
114
|
+
return {"saved": False, "reason": "duplicate"}
|
|
115
|
+
|
|
116
|
+
memory = await memory_manager.remember(
|
|
117
|
+
content=content,
|
|
118
|
+
memory_type=memory_type,
|
|
119
|
+
importance=importance,
|
|
120
|
+
project_id=project_id,
|
|
121
|
+
source_type="workflow",
|
|
122
|
+
source_session_id=session_id,
|
|
123
|
+
tags=tags,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
logger.info(f"save_memory: Created {memory_type} memory: {content[:50]}...")
|
|
127
|
+
return {
|
|
128
|
+
"saved": True,
|
|
129
|
+
"memory_id": memory.id,
|
|
130
|
+
"memory_type": memory_type,
|
|
131
|
+
"importance": importance,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
except Exception as e:
|
|
135
|
+
logger.error(f"save_memory: Failed: {e}", exc_info=True)
|
|
136
|
+
return {"error": str(e)}
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
async def memory_recall_relevant(
|
|
140
|
+
memory_manager: Any,
|
|
141
|
+
session_manager: Any,
|
|
142
|
+
session_id: str,
|
|
143
|
+
prompt_text: str | None = None,
|
|
144
|
+
project_id: str | None = None,
|
|
145
|
+
limit: int = 5,
|
|
146
|
+
min_importance: float = 0.3,
|
|
147
|
+
state: Any | None = None,
|
|
148
|
+
) -> dict[str, Any] | None:
|
|
149
|
+
"""Recall memories relevant to the current user prompt.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
memory_manager: The memory manager instance
|
|
153
|
+
session_manager: The session manager instance
|
|
154
|
+
session_id: Current session ID
|
|
155
|
+
prompt_text: The user's prompt text
|
|
156
|
+
project_id: Override project ID
|
|
157
|
+
limit: Max memories to retrieve
|
|
158
|
+
min_importance: Minimum importance threshold
|
|
159
|
+
state: WorkflowState for tracking injected memory IDs (for deduplication)
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
Dict with inject_context and count, or None if disabled
|
|
163
|
+
"""
|
|
164
|
+
if not memory_manager:
|
|
165
|
+
return None
|
|
166
|
+
|
|
167
|
+
if not memory_manager.config.enabled:
|
|
168
|
+
return None
|
|
169
|
+
|
|
170
|
+
if not prompt_text:
|
|
171
|
+
logger.debug("memory_recall_relevant: No prompt_text provided")
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
# Skip for very short prompts or commands
|
|
175
|
+
if len(prompt_text.strip()) < 10 or prompt_text.strip().startswith("/"):
|
|
176
|
+
logger.debug("memory_recall_relevant: Skipping short/command prompt")
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
# Resolve project_id
|
|
180
|
+
if not project_id:
|
|
181
|
+
session = session_manager.get(session_id)
|
|
182
|
+
if session:
|
|
183
|
+
project_id = session.project_id
|
|
184
|
+
|
|
185
|
+
# Get already-injected memory IDs from state for deduplication
|
|
186
|
+
injected_ids: set[str] = set()
|
|
187
|
+
if state is not None:
|
|
188
|
+
# Access variables dict, defaulting to empty if not set
|
|
189
|
+
variables = getattr(state, "variables", None) or {}
|
|
190
|
+
injected_ids = set(variables.get("_injected_memory_ids", []))
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
memories = memory_manager.recall(
|
|
194
|
+
query=prompt_text,
|
|
195
|
+
project_id=project_id,
|
|
196
|
+
limit=limit,
|
|
197
|
+
min_importance=min_importance,
|
|
198
|
+
use_semantic=True,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if not memories:
|
|
202
|
+
logger.debug("memory_recall_relevant: No relevant memories found")
|
|
203
|
+
return {"injected": False, "count": 0}
|
|
204
|
+
|
|
205
|
+
# Filter out memories that have already been injected in this session
|
|
206
|
+
new_memories = [m for m in memories if m.id not in injected_ids]
|
|
207
|
+
|
|
208
|
+
if not new_memories:
|
|
209
|
+
logger.debug(
|
|
210
|
+
f"memory_recall_relevant: All {len(memories)} memories already injected, skipping"
|
|
211
|
+
)
|
|
212
|
+
return {"injected": False, "count": 0, "skipped": len(memories)}
|
|
213
|
+
|
|
214
|
+
from gobby.memory.context import build_memory_context
|
|
215
|
+
|
|
216
|
+
memory_context = build_memory_context(new_memories)
|
|
217
|
+
|
|
218
|
+
# Track newly injected memory IDs in state
|
|
219
|
+
if state is not None:
|
|
220
|
+
new_ids = {m.id for m in new_memories}
|
|
221
|
+
all_injected = injected_ids | new_ids
|
|
222
|
+
# Ensure variables dict exists
|
|
223
|
+
if not hasattr(state, "variables") or state.variables is None:
|
|
224
|
+
state.variables = {}
|
|
225
|
+
state.variables["_injected_memory_ids"] = list(all_injected)
|
|
226
|
+
logger.debug(
|
|
227
|
+
f"memory_recall_relevant: Tracking {len(new_ids)} new IDs, "
|
|
228
|
+
f"{len(all_injected)} total injected"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
logger.info(f"memory_recall_relevant: Injecting {len(new_memories)} relevant memories")
|
|
232
|
+
|
|
233
|
+
return {
|
|
234
|
+
"inject_context": memory_context,
|
|
235
|
+
"injected": True,
|
|
236
|
+
"count": len(new_memories),
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
logger.error(f"memory_recall_relevant: Failed: {e}", exc_info=True)
|
|
241
|
+
return {"error": str(e)}
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def reset_memory_injection_tracking(state: Any | None = None) -> dict[str, Any]:
|
|
245
|
+
"""Reset the memory injection tracking, allowing previously injected memories to be recalled again.
|
|
246
|
+
|
|
247
|
+
This should be called on pre_compact hook or /clear command so memories can be
|
|
248
|
+
re-injected after context loss.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
state: WorkflowState containing the injection tracking in variables
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Dict with cleared count and success status
|
|
255
|
+
"""
|
|
256
|
+
if state is None:
|
|
257
|
+
logger.debug("reset_memory_injection_tracking: No state provided")
|
|
258
|
+
return {"success": False, "cleared": 0, "reason": "no_state"}
|
|
259
|
+
|
|
260
|
+
variables = getattr(state, "variables", None)
|
|
261
|
+
if variables is None:
|
|
262
|
+
logger.debug("reset_memory_injection_tracking: No variables in state")
|
|
263
|
+
return {"success": True, "cleared": 0}
|
|
264
|
+
|
|
265
|
+
injected_ids = variables.get("_injected_memory_ids", [])
|
|
266
|
+
cleared_count = len(injected_ids)
|
|
267
|
+
|
|
268
|
+
if cleared_count > 0:
|
|
269
|
+
variables["_injected_memory_ids"] = []
|
|
270
|
+
logger.info(f"reset_memory_injection_tracking: Cleared {cleared_count} injected memory IDs")
|
|
271
|
+
|
|
272
|
+
return {"success": True, "cleared": cleared_count}
|