gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
gobby/memory/manager.py
ADDED
|
@@ -0,0 +1,1001 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import mimetypes
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
8
|
+
|
|
9
|
+
from gobby.config.app import MemoryConfig
|
|
10
|
+
from gobby.memory.backends import get_backend
|
|
11
|
+
from gobby.memory.context import build_memory_context
|
|
12
|
+
from gobby.memory.protocol import MediaAttachment, MemoryBackendProtocol
|
|
13
|
+
from gobby.storage.database import DatabaseProtocol
|
|
14
|
+
from gobby.storage.memories import LocalMemoryManager, Memory
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from gobby.llm.service import LLMService
|
|
18
|
+
from gobby.memory.search import SearchBackend
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class MemoryManager:
|
|
24
|
+
"""
|
|
25
|
+
High-level manager for memory operations.
|
|
26
|
+
Handles storage, ranking, decay, and business logic.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
db: DatabaseProtocol,
|
|
32
|
+
config: MemoryConfig,
|
|
33
|
+
llm_service: LLMService | None = None,
|
|
34
|
+
):
|
|
35
|
+
self.db = db
|
|
36
|
+
self.config = config
|
|
37
|
+
self._llm_service = llm_service
|
|
38
|
+
|
|
39
|
+
# Initialize storage backend based on config
|
|
40
|
+
# Note: SQLiteBackend wraps LocalMemoryManager internally
|
|
41
|
+
backend_type = getattr(config, "backend", "sqlite")
|
|
42
|
+
self._backend: MemoryBackendProtocol = get_backend(backend_type, database=db)
|
|
43
|
+
|
|
44
|
+
# Keep storage reference for backward compatibility with sync methods
|
|
45
|
+
# The SQLiteBackend uses LocalMemoryManager internally
|
|
46
|
+
self.storage = LocalMemoryManager(db)
|
|
47
|
+
|
|
48
|
+
self._search_backend: SearchBackend | None = None
|
|
49
|
+
self._search_backend_fitted = False
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def llm_service(self) -> LLMService | None:
|
|
53
|
+
"""Get the LLM service for image description."""
|
|
54
|
+
return self._llm_service
|
|
55
|
+
|
|
56
|
+
@llm_service.setter
|
|
57
|
+
def llm_service(self, service: LLMService | None) -> None:
|
|
58
|
+
"""Set the LLM service for image description."""
|
|
59
|
+
self._llm_service = service
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def search_backend(self) -> SearchBackend:
|
|
63
|
+
"""
|
|
64
|
+
Lazy-init search backend based on configuration.
|
|
65
|
+
|
|
66
|
+
The backend type is determined by config.search_backend:
|
|
67
|
+
- "tfidf" (default): Zero-dependency TF-IDF search
|
|
68
|
+
- "text": Simple text substring matching
|
|
69
|
+
"""
|
|
70
|
+
if self._search_backend is None:
|
|
71
|
+
from gobby.memory.search import get_search_backend
|
|
72
|
+
|
|
73
|
+
backend_type = getattr(self.config, "search_backend", "tfidf")
|
|
74
|
+
logger.debug(f"Initializing search backend: {backend_type}")
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
self._search_backend = get_search_backend(
|
|
78
|
+
backend_type=backend_type,
|
|
79
|
+
db=self.db,
|
|
80
|
+
)
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.warning(
|
|
83
|
+
f"Failed to initialize {backend_type} backend: {e}. Falling back to tfidf"
|
|
84
|
+
)
|
|
85
|
+
self._search_backend = get_search_backend("tfidf")
|
|
86
|
+
|
|
87
|
+
return self._search_backend
|
|
88
|
+
|
|
89
|
+
def _ensure_search_backend_fitted(self) -> None:
|
|
90
|
+
"""Ensure the search backend is fitted with current memories."""
|
|
91
|
+
if self._search_backend_fitted:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
backend = self.search_backend
|
|
95
|
+
if not backend.needs_refit():
|
|
96
|
+
self._search_backend_fitted = True
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
# Fit the backend with all memories
|
|
100
|
+
memories = self.storage.list_memories(limit=10000)
|
|
101
|
+
memory_tuples = [(m.id, m.content) for m in memories]
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
backend.fit(memory_tuples)
|
|
105
|
+
self._search_backend_fitted = True
|
|
106
|
+
logger.info(f"Search backend fitted with {len(memory_tuples)} memories")
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Failed to fit search backend: {e}")
|
|
109
|
+
raise
|
|
110
|
+
|
|
111
|
+
def mark_search_refit_needed(self) -> None:
|
|
112
|
+
"""Mark that the search backend needs to be refitted."""
|
|
113
|
+
self._search_backend_fitted = False
|
|
114
|
+
|
|
115
|
+
def reindex_search(self) -> dict[str, Any]:
|
|
116
|
+
"""
|
|
117
|
+
Force rebuild of the search index.
|
|
118
|
+
|
|
119
|
+
This method explicitly rebuilds the TF-IDF (or other configured)
|
|
120
|
+
search index from all stored memories. Useful for:
|
|
121
|
+
- Initial index building
|
|
122
|
+
- Recovery after corruption
|
|
123
|
+
- After bulk memory operations
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
Dict with index statistics including memory_count, backend_type, etc.
|
|
127
|
+
"""
|
|
128
|
+
# Get all memories
|
|
129
|
+
memories = self.storage.list_memories(limit=10000)
|
|
130
|
+
memory_tuples = [(m.id, m.content) for m in memories]
|
|
131
|
+
|
|
132
|
+
# Force refit the backend
|
|
133
|
+
backend = self.search_backend
|
|
134
|
+
backend_type = getattr(self.config, "search_backend", "tfidf")
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
backend.fit(memory_tuples)
|
|
138
|
+
self._search_backend_fitted = True
|
|
139
|
+
|
|
140
|
+
# Get backend stats
|
|
141
|
+
stats = backend.get_stats() if hasattr(backend, "get_stats") else {}
|
|
142
|
+
|
|
143
|
+
return {
|
|
144
|
+
"success": True,
|
|
145
|
+
"memory_count": len(memory_tuples),
|
|
146
|
+
"backend_type": backend_type,
|
|
147
|
+
"fitted": True,
|
|
148
|
+
**stats,
|
|
149
|
+
}
|
|
150
|
+
except Exception as e:
|
|
151
|
+
logger.error(f"Failed to reindex search backend: {e}")
|
|
152
|
+
return {
|
|
153
|
+
"success": False,
|
|
154
|
+
"error": str(e),
|
|
155
|
+
"memory_count": len(memory_tuples),
|
|
156
|
+
"backend_type": backend_type,
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
async def remember(
|
|
160
|
+
self,
|
|
161
|
+
content: str,
|
|
162
|
+
memory_type: str = "fact",
|
|
163
|
+
importance: float = 0.5,
|
|
164
|
+
project_id: str | None = None,
|
|
165
|
+
source_type: str = "user",
|
|
166
|
+
source_session_id: str | None = None,
|
|
167
|
+
tags: list[str] | None = None,
|
|
168
|
+
) -> Memory:
|
|
169
|
+
"""
|
|
170
|
+
Store a new memory.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
content: The memory content
|
|
174
|
+
memory_type: Type of memory (fact, preference, etc)
|
|
175
|
+
importance: 0.0-1.0 importance score
|
|
176
|
+
project_id: Optional project context
|
|
177
|
+
source_type: Origin of memory
|
|
178
|
+
source_session_id: Origin session
|
|
179
|
+
tags: Optional tags
|
|
180
|
+
"""
|
|
181
|
+
# Future: Duplicate detection via embeddings or fuzzy match?
|
|
182
|
+
# For now, rely on storage layer (which uses content-hash ID for dedup)
|
|
183
|
+
|
|
184
|
+
memory = self.storage.create_memory(
|
|
185
|
+
content=content,
|
|
186
|
+
memory_type=memory_type,
|
|
187
|
+
importance=importance,
|
|
188
|
+
project_id=project_id,
|
|
189
|
+
source_type=source_type,
|
|
190
|
+
source_session_id=source_session_id,
|
|
191
|
+
tags=tags,
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Mark search index for refit since we added new content
|
|
195
|
+
self.mark_search_refit_needed()
|
|
196
|
+
|
|
197
|
+
# Auto cross-reference if enabled
|
|
198
|
+
if getattr(self.config, "auto_crossref", False):
|
|
199
|
+
try:
|
|
200
|
+
self._create_crossrefs(memory)
|
|
201
|
+
except Exception as e:
|
|
202
|
+
# Don't fail the remember if crossref fails
|
|
203
|
+
logger.warning(f"Auto-crossref failed for {memory.id}: {e}")
|
|
204
|
+
|
|
205
|
+
return memory
|
|
206
|
+
|
|
207
|
+
async def remember_with_image(
|
|
208
|
+
self,
|
|
209
|
+
image_path: str,
|
|
210
|
+
context: str | None = None,
|
|
211
|
+
memory_type: str = "fact",
|
|
212
|
+
importance: float = 0.5,
|
|
213
|
+
project_id: str | None = None,
|
|
214
|
+
source_type: str = "user",
|
|
215
|
+
source_session_id: str | None = None,
|
|
216
|
+
tags: list[str] | None = None,
|
|
217
|
+
) -> Memory:
|
|
218
|
+
"""
|
|
219
|
+
Store a memory with an image attachment.
|
|
220
|
+
|
|
221
|
+
Uses the configured LLM provider to generate a description of the image,
|
|
222
|
+
then stores the memory with the description as content and the image
|
|
223
|
+
as a media attachment.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
image_path: Path to the image file
|
|
227
|
+
context: Optional context to guide the image description
|
|
228
|
+
memory_type: Type of memory (fact, preference, etc)
|
|
229
|
+
importance: 0.0-1.0 importance score
|
|
230
|
+
project_id: Optional project context
|
|
231
|
+
source_type: Origin of memory
|
|
232
|
+
source_session_id: Origin session
|
|
233
|
+
tags: Optional tags
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
The created Memory object
|
|
237
|
+
|
|
238
|
+
Raises:
|
|
239
|
+
ValueError: If LLM service is not configured or image not found
|
|
240
|
+
"""
|
|
241
|
+
path = Path(image_path)
|
|
242
|
+
if not path.exists():
|
|
243
|
+
raise ValueError(f"Image not found: {image_path}")
|
|
244
|
+
|
|
245
|
+
# Get LLM provider for image description
|
|
246
|
+
if not self._llm_service:
|
|
247
|
+
raise ValueError(
|
|
248
|
+
"LLM service not configured. Pass llm_service to MemoryManager "
|
|
249
|
+
"to enable remember_with_image."
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
provider = self._llm_service.get_default_provider()
|
|
253
|
+
|
|
254
|
+
# Generate image description
|
|
255
|
+
description = await provider.describe_image(image_path, context=context)
|
|
256
|
+
|
|
257
|
+
# Determine MIME type
|
|
258
|
+
mime_type, _ = mimetypes.guess_type(str(path))
|
|
259
|
+
if not mime_type:
|
|
260
|
+
mime_type = "application/octet-stream"
|
|
261
|
+
|
|
262
|
+
# Create media attachment
|
|
263
|
+
media = MediaAttachment(
|
|
264
|
+
media_type="image",
|
|
265
|
+
content_path=str(path.absolute()),
|
|
266
|
+
mime_type=mime_type,
|
|
267
|
+
description=description,
|
|
268
|
+
description_model=provider.provider_name,
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
# Store memory with media attachment via backend
|
|
272
|
+
record = await self._backend.create(
|
|
273
|
+
content=description,
|
|
274
|
+
memory_type=memory_type,
|
|
275
|
+
importance=importance,
|
|
276
|
+
project_id=project_id,
|
|
277
|
+
source_type=source_type,
|
|
278
|
+
source_session_id=source_session_id,
|
|
279
|
+
tags=tags,
|
|
280
|
+
media=[media],
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Mark search index for refit
|
|
284
|
+
self.mark_search_refit_needed()
|
|
285
|
+
|
|
286
|
+
# Return as Memory object for backward compatibility
|
|
287
|
+
# Note: The backend returns MemoryRecord, but we need Memory
|
|
288
|
+
memory = self.storage.get_memory(record.id)
|
|
289
|
+
if memory is not None:
|
|
290
|
+
return memory
|
|
291
|
+
|
|
292
|
+
# Fallback: construct Memory from MemoryRecord if storage lookup fails
|
|
293
|
+
# This can happen with synthetic records from failed backend calls
|
|
294
|
+
return Memory(
|
|
295
|
+
id=record.id,
|
|
296
|
+
content=record.content,
|
|
297
|
+
memory_type=record.memory_type,
|
|
298
|
+
created_at=record.created_at.isoformat(),
|
|
299
|
+
updated_at=record.updated_at.isoformat()
|
|
300
|
+
if record.updated_at
|
|
301
|
+
else record.created_at.isoformat(),
|
|
302
|
+
project_id=record.project_id,
|
|
303
|
+
source_type=record.source_type,
|
|
304
|
+
source_session_id=record.source_session_id,
|
|
305
|
+
importance=record.importance,
|
|
306
|
+
tags=record.tags,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
async def remember_screenshot(
|
|
310
|
+
self,
|
|
311
|
+
screenshot_bytes: bytes,
|
|
312
|
+
context: str | None = None,
|
|
313
|
+
memory_type: str = "observation",
|
|
314
|
+
importance: float = 0.5,
|
|
315
|
+
project_id: str | None = None,
|
|
316
|
+
source_type: str = "user",
|
|
317
|
+
source_session_id: str | None = None,
|
|
318
|
+
tags: list[str] | None = None,
|
|
319
|
+
) -> Memory:
|
|
320
|
+
"""
|
|
321
|
+
Store a memory from raw screenshot bytes.
|
|
322
|
+
|
|
323
|
+
Saves the screenshot to .gobby/resources/ with a timestamp-based filename,
|
|
324
|
+
then delegates to remember_with_image() for LLM description and storage.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
screenshot_bytes: Raw PNG screenshot bytes (from Playwright/Puppeteer)
|
|
328
|
+
context: Optional context to guide the image description
|
|
329
|
+
memory_type: Type of memory (default: "observation")
|
|
330
|
+
importance: 0.0-1.0 importance score
|
|
331
|
+
project_id: Optional project context
|
|
332
|
+
source_type: Origin of memory
|
|
333
|
+
source_session_id: Origin session
|
|
334
|
+
tags: Optional tags
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
The created Memory object
|
|
338
|
+
|
|
339
|
+
Raises:
|
|
340
|
+
ValueError: If LLM service is not configured or screenshot bytes are empty
|
|
341
|
+
"""
|
|
342
|
+
if not screenshot_bytes:
|
|
343
|
+
raise ValueError("Screenshot bytes cannot be empty")
|
|
344
|
+
|
|
345
|
+
# Determine resources directory using centralized utility
|
|
346
|
+
from datetime import datetime as dt
|
|
347
|
+
|
|
348
|
+
from gobby.cli.utils import get_resources_dir
|
|
349
|
+
from gobby.utils.project_context import get_project_context
|
|
350
|
+
|
|
351
|
+
ctx = get_project_context()
|
|
352
|
+
project_path = ctx.get("path") if ctx else None
|
|
353
|
+
resources_dir = get_resources_dir(project_path)
|
|
354
|
+
|
|
355
|
+
# Generate timestamp-based filename
|
|
356
|
+
timestamp = dt.now().strftime("%Y%m%d_%H%M%S_%f")
|
|
357
|
+
filename = f"screenshot_{timestamp}.png"
|
|
358
|
+
filepath = resources_dir / filename
|
|
359
|
+
|
|
360
|
+
# Write screenshot to file
|
|
361
|
+
filepath.write_bytes(screenshot_bytes)
|
|
362
|
+
logger.debug(f"Saved screenshot to {filepath}")
|
|
363
|
+
|
|
364
|
+
# Delegate to remember_with_image
|
|
365
|
+
return await self.remember_with_image(
|
|
366
|
+
image_path=str(filepath),
|
|
367
|
+
context=context,
|
|
368
|
+
memory_type=memory_type,
|
|
369
|
+
importance=importance,
|
|
370
|
+
project_id=project_id,
|
|
371
|
+
source_type=source_type,
|
|
372
|
+
source_session_id=source_session_id,
|
|
373
|
+
tags=tags,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
def _create_crossrefs(
|
|
377
|
+
self,
|
|
378
|
+
memory: Memory,
|
|
379
|
+
threshold: float | None = None,
|
|
380
|
+
max_links: int | None = None,
|
|
381
|
+
) -> int:
|
|
382
|
+
"""
|
|
383
|
+
Find and link similar memories.
|
|
384
|
+
|
|
385
|
+
Uses the search backend to find memories similar to the given one
|
|
386
|
+
and creates cross-references for those above the threshold.
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
memory: The memory to find links for
|
|
390
|
+
threshold: Minimum similarity to create link (default from config)
|
|
391
|
+
max_links: Maximum links to create (default from config)
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Number of cross-references created
|
|
395
|
+
"""
|
|
396
|
+
# Get thresholds from config or use defaults
|
|
397
|
+
if threshold is None:
|
|
398
|
+
threshold = getattr(self.config, "crossref_threshold", None)
|
|
399
|
+
if threshold is None:
|
|
400
|
+
threshold = 0.3
|
|
401
|
+
if max_links is None:
|
|
402
|
+
max_links = getattr(self.config, "crossref_max_links", None)
|
|
403
|
+
if max_links is None:
|
|
404
|
+
max_links = 5
|
|
405
|
+
|
|
406
|
+
# Ensure search backend is fitted
|
|
407
|
+
self._ensure_search_backend_fitted()
|
|
408
|
+
|
|
409
|
+
# Search for similar memories
|
|
410
|
+
similar = self.search_backend.search(memory.content, top_k=max_links + 1)
|
|
411
|
+
|
|
412
|
+
# Create cross-references
|
|
413
|
+
created = 0
|
|
414
|
+
for other_id, score in similar:
|
|
415
|
+
# Skip self-reference
|
|
416
|
+
if other_id == memory.id:
|
|
417
|
+
continue
|
|
418
|
+
|
|
419
|
+
# Skip below threshold
|
|
420
|
+
if score < threshold:
|
|
421
|
+
continue
|
|
422
|
+
|
|
423
|
+
# Create the crossref
|
|
424
|
+
self.storage.create_crossref(memory.id, other_id, score)
|
|
425
|
+
created += 1
|
|
426
|
+
|
|
427
|
+
if created >= max_links:
|
|
428
|
+
break
|
|
429
|
+
|
|
430
|
+
if created > 0:
|
|
431
|
+
logger.debug(f"Created {created} crossrefs for memory {memory.id}")
|
|
432
|
+
|
|
433
|
+
return created
|
|
434
|
+
|
|
435
|
+
def get_related(
|
|
436
|
+
self,
|
|
437
|
+
memory_id: str,
|
|
438
|
+
limit: int = 5,
|
|
439
|
+
min_similarity: float = 0.0,
|
|
440
|
+
) -> list[Memory]:
|
|
441
|
+
"""
|
|
442
|
+
Get memories linked to this one via cross-references.
|
|
443
|
+
|
|
444
|
+
Args:
|
|
445
|
+
memory_id: The memory ID to find related memories for
|
|
446
|
+
limit: Maximum number of results
|
|
447
|
+
min_similarity: Minimum similarity threshold
|
|
448
|
+
|
|
449
|
+
Returns:
|
|
450
|
+
List of related Memory objects, sorted by similarity
|
|
451
|
+
"""
|
|
452
|
+
crossrefs = self.storage.get_crossrefs(
|
|
453
|
+
memory_id, limit=limit, min_similarity=min_similarity
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
# Get the actual Memory objects
|
|
457
|
+
memories = []
|
|
458
|
+
for ref in crossrefs:
|
|
459
|
+
# Get the "other" memory in the relationship
|
|
460
|
+
other_id = ref.target_id if ref.source_id == memory_id else ref.source_id
|
|
461
|
+
memory = self.get_memory(other_id)
|
|
462
|
+
if memory:
|
|
463
|
+
memories.append(memory)
|
|
464
|
+
|
|
465
|
+
return memories
|
|
466
|
+
|
|
467
|
+
def recall(
|
|
468
|
+
self,
|
|
469
|
+
query: str | None = None,
|
|
470
|
+
project_id: str | None = None,
|
|
471
|
+
limit: int = 10,
|
|
472
|
+
min_importance: float | None = None,
|
|
473
|
+
memory_type: str | None = None,
|
|
474
|
+
use_semantic: bool | None = None,
|
|
475
|
+
search_mode: str | None = None,
|
|
476
|
+
tags_all: list[str] | None = None,
|
|
477
|
+
tags_any: list[str] | None = None,
|
|
478
|
+
tags_none: list[str] | None = None,
|
|
479
|
+
) -> list[Memory]:
|
|
480
|
+
"""
|
|
481
|
+
Retrieve memories.
|
|
482
|
+
|
|
483
|
+
If query is provided, performs search/ranking.
|
|
484
|
+
If no query, returns top important memories.
|
|
485
|
+
|
|
486
|
+
Args:
|
|
487
|
+
query: Optional search query for semantic/text search
|
|
488
|
+
project_id: Filter by project
|
|
489
|
+
limit: Maximum memories to return
|
|
490
|
+
min_importance: Minimum importance threshold
|
|
491
|
+
memory_type: Filter by memory type
|
|
492
|
+
use_semantic: Use semantic search (deprecated, use search_mode instead)
|
|
493
|
+
search_mode: Search mode - "auto" (default), "tfidf", "openai", "hybrid", "text"
|
|
494
|
+
tags_all: Memory must have ALL of these tags
|
|
495
|
+
tags_any: Memory must have at least ONE of these tags
|
|
496
|
+
tags_none: Memory must have NONE of these tags
|
|
497
|
+
"""
|
|
498
|
+
threshold = (
|
|
499
|
+
min_importance if min_importance is not None else self.config.importance_threshold
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
if query:
|
|
503
|
+
memories = self._recall_with_search(
|
|
504
|
+
query=query,
|
|
505
|
+
project_id=project_id,
|
|
506
|
+
limit=limit,
|
|
507
|
+
min_importance=threshold,
|
|
508
|
+
use_semantic=use_semantic,
|
|
509
|
+
search_mode=search_mode,
|
|
510
|
+
tags_all=tags_all,
|
|
511
|
+
tags_any=tags_any,
|
|
512
|
+
tags_none=tags_none,
|
|
513
|
+
)
|
|
514
|
+
else:
|
|
515
|
+
# Just get top memories
|
|
516
|
+
memories = self.storage.list_memories(
|
|
517
|
+
project_id=project_id,
|
|
518
|
+
memory_type=memory_type,
|
|
519
|
+
min_importance=threshold,
|
|
520
|
+
limit=limit,
|
|
521
|
+
tags_all=tags_all,
|
|
522
|
+
tags_any=tags_any,
|
|
523
|
+
tags_none=tags_none,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
# Update access stats for retrieved memories
|
|
527
|
+
self._update_access_stats(memories)
|
|
528
|
+
|
|
529
|
+
return memories
|
|
530
|
+
|
|
531
|
+
def _recall_with_search(
|
|
532
|
+
self,
|
|
533
|
+
query: str,
|
|
534
|
+
project_id: str | None = None,
|
|
535
|
+
limit: int = 10,
|
|
536
|
+
min_importance: float | None = None,
|
|
537
|
+
use_semantic: bool | None = None,
|
|
538
|
+
search_mode: str | None = None,
|
|
539
|
+
tags_all: list[str] | None = None,
|
|
540
|
+
tags_any: list[str] | None = None,
|
|
541
|
+
tags_none: list[str] | None = None,
|
|
542
|
+
) -> list[Memory]:
|
|
543
|
+
"""
|
|
544
|
+
Perform search using the configured search backend.
|
|
545
|
+
|
|
546
|
+
Uses the new search backend by default (TF-IDF),
|
|
547
|
+
falling back to legacy semantic search if configured.
|
|
548
|
+
"""
|
|
549
|
+
# Determine search mode from config or parameters
|
|
550
|
+
if search_mode is None:
|
|
551
|
+
search_mode = getattr(self.config, "search_backend", "tfidf")
|
|
552
|
+
|
|
553
|
+
# Legacy compatibility: use_semantic is deprecated
|
|
554
|
+
if use_semantic is not None:
|
|
555
|
+
logger.warning("use_semantic argument is deprecated and ignored")
|
|
556
|
+
|
|
557
|
+
# Use the search backend
|
|
558
|
+
try:
|
|
559
|
+
self._ensure_search_backend_fitted()
|
|
560
|
+
# Fetch more results to allow for filtering
|
|
561
|
+
fetch_multiplier = 3 if (tags_all or tags_any or tags_none) else 2
|
|
562
|
+
results = self.search_backend.search(query, top_k=limit * fetch_multiplier)
|
|
563
|
+
|
|
564
|
+
# Get the actual Memory objects
|
|
565
|
+
memory_ids = [mid for mid, _ in results]
|
|
566
|
+
memories = []
|
|
567
|
+
for mid in memory_ids:
|
|
568
|
+
memory = self.get_memory(mid)
|
|
569
|
+
if memory:
|
|
570
|
+
# Apply filters
|
|
571
|
+
if project_id and memory.project_id != project_id:
|
|
572
|
+
if memory.project_id is not None: # Allow global memories
|
|
573
|
+
continue
|
|
574
|
+
if min_importance and memory.importance < min_importance:
|
|
575
|
+
continue
|
|
576
|
+
# Apply tag filters
|
|
577
|
+
if not self._passes_tag_filter(memory, tags_all, tags_any, tags_none):
|
|
578
|
+
continue
|
|
579
|
+
memories.append(memory)
|
|
580
|
+
if len(memories) >= limit:
|
|
581
|
+
break
|
|
582
|
+
|
|
583
|
+
return memories
|
|
584
|
+
|
|
585
|
+
except Exception as e:
|
|
586
|
+
logger.warning(f"Search backend failed, falling back to text search: {e}")
|
|
587
|
+
# Fall back to text search with tag filtering
|
|
588
|
+
memories = self.storage.search_memories(
|
|
589
|
+
query_text=query,
|
|
590
|
+
project_id=project_id,
|
|
591
|
+
limit=limit * 2,
|
|
592
|
+
tags_all=tags_all,
|
|
593
|
+
tags_any=tags_any,
|
|
594
|
+
tags_none=tags_none,
|
|
595
|
+
)
|
|
596
|
+
if min_importance:
|
|
597
|
+
memories = [m for m in memories if m.importance >= min_importance]
|
|
598
|
+
return memories[:limit]
|
|
599
|
+
|
|
600
|
+
def _passes_tag_filter(
|
|
601
|
+
self,
|
|
602
|
+
memory: Memory,
|
|
603
|
+
tags_all: list[str] | None = None,
|
|
604
|
+
tags_any: list[str] | None = None,
|
|
605
|
+
tags_none: list[str] | None = None,
|
|
606
|
+
) -> bool:
|
|
607
|
+
"""Check if a memory passes the tag filter criteria."""
|
|
608
|
+
memory_tags = set(memory.tags) if memory.tags else set()
|
|
609
|
+
|
|
610
|
+
# Check tags_all: memory must have ALL specified tags
|
|
611
|
+
if tags_all and not set(tags_all).issubset(memory_tags):
|
|
612
|
+
return False
|
|
613
|
+
|
|
614
|
+
# Check tags_any: memory must have at least ONE specified tag
|
|
615
|
+
if tags_any and not memory_tags.intersection(tags_any):
|
|
616
|
+
return False
|
|
617
|
+
|
|
618
|
+
# Check tags_none: memory must have NONE of the specified tags
|
|
619
|
+
if tags_none and memory_tags.intersection(tags_none):
|
|
620
|
+
return False
|
|
621
|
+
|
|
622
|
+
return True
|
|
623
|
+
|
|
624
|
+
def recall_as_context(
|
|
625
|
+
self,
|
|
626
|
+
project_id: str | None = None,
|
|
627
|
+
limit: int = 10,
|
|
628
|
+
min_importance: float | None = None,
|
|
629
|
+
) -> str:
|
|
630
|
+
"""
|
|
631
|
+
Retrieve memories and format them as context for LLM prompts.
|
|
632
|
+
|
|
633
|
+
Convenience method that combines recall() with build_memory_context().
|
|
634
|
+
|
|
635
|
+
Args:
|
|
636
|
+
project_id: Filter by project
|
|
637
|
+
limit: Maximum memories to return
|
|
638
|
+
min_importance: Minimum importance threshold
|
|
639
|
+
|
|
640
|
+
Returns:
|
|
641
|
+
Formatted markdown string wrapped in <project-memory> tags,
|
|
642
|
+
or empty string if no memories found
|
|
643
|
+
"""
|
|
644
|
+
memories = self.recall(
|
|
645
|
+
project_id=project_id,
|
|
646
|
+
limit=limit,
|
|
647
|
+
min_importance=min_importance,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
return build_memory_context(memories)
|
|
651
|
+
|
|
652
|
+
def _update_access_stats(self, memories: list[Memory]) -> None:
|
|
653
|
+
"""
|
|
654
|
+
Update access count and time for memories.
|
|
655
|
+
|
|
656
|
+
Implements debouncing to avoid excessive database writes when the same
|
|
657
|
+
memory is accessed multiple times in quick succession.
|
|
658
|
+
"""
|
|
659
|
+
if not memories:
|
|
660
|
+
return
|
|
661
|
+
|
|
662
|
+
now = datetime.now(UTC)
|
|
663
|
+
debounce_seconds = getattr(self.config, "access_debounce_seconds", 60)
|
|
664
|
+
|
|
665
|
+
for memory in memories:
|
|
666
|
+
# Check if we should debounce this update
|
|
667
|
+
if memory.last_accessed_at:
|
|
668
|
+
try:
|
|
669
|
+
last_access = datetime.fromisoformat(memory.last_accessed_at)
|
|
670
|
+
if last_access.tzinfo is None:
|
|
671
|
+
last_access = last_access.replace(tzinfo=UTC)
|
|
672
|
+
seconds_since = (now - last_access).total_seconds()
|
|
673
|
+
if seconds_since < debounce_seconds:
|
|
674
|
+
# Skip update - accessed too recently
|
|
675
|
+
continue
|
|
676
|
+
except (ValueError, TypeError):
|
|
677
|
+
# Invalid timestamp, proceed with update
|
|
678
|
+
pass
|
|
679
|
+
|
|
680
|
+
# Update access stats
|
|
681
|
+
try:
|
|
682
|
+
self.storage.update_access_stats(memory.id, now.isoformat())
|
|
683
|
+
except Exception as e:
|
|
684
|
+
logger.warning(f"Failed to update access stats for {memory.id}: {e}")
|
|
685
|
+
|
|
686
|
+
def forget(self, memory_id: str) -> bool:
|
|
687
|
+
"""Forget a memory."""
|
|
688
|
+
result = self.storage.delete_memory(memory_id)
|
|
689
|
+
if result:
|
|
690
|
+
# Mark search index for refit since we removed content
|
|
691
|
+
self.mark_search_refit_needed()
|
|
692
|
+
return result
|
|
693
|
+
|
|
694
|
+
def list_memories(
|
|
695
|
+
self,
|
|
696
|
+
project_id: str | None = None,
|
|
697
|
+
memory_type: str | None = None,
|
|
698
|
+
min_importance: float | None = None,
|
|
699
|
+
limit: int = 50,
|
|
700
|
+
offset: int = 0,
|
|
701
|
+
tags_all: list[str] | None = None,
|
|
702
|
+
tags_any: list[str] | None = None,
|
|
703
|
+
tags_none: list[str] | None = None,
|
|
704
|
+
) -> list[Memory]:
|
|
705
|
+
"""
|
|
706
|
+
List memories with optional filtering.
|
|
707
|
+
|
|
708
|
+
Args:
|
|
709
|
+
project_id: Filter by project ID (or None for global)
|
|
710
|
+
memory_type: Filter by memory type
|
|
711
|
+
min_importance: Minimum importance threshold
|
|
712
|
+
limit: Maximum results
|
|
713
|
+
offset: Offset for pagination
|
|
714
|
+
tags_all: Memory must have ALL of these tags
|
|
715
|
+
tags_any: Memory must have at least ONE of these tags
|
|
716
|
+
tags_none: Memory must have NONE of these tags
|
|
717
|
+
"""
|
|
718
|
+
return self.storage.list_memories(
|
|
719
|
+
project_id=project_id,
|
|
720
|
+
memory_type=memory_type,
|
|
721
|
+
min_importance=min_importance,
|
|
722
|
+
limit=limit,
|
|
723
|
+
offset=offset,
|
|
724
|
+
tags_all=tags_all,
|
|
725
|
+
tags_any=tags_any,
|
|
726
|
+
tags_none=tags_none,
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
def content_exists(self, content: str, project_id: str | None = None) -> bool:
|
|
730
|
+
"""Check if a memory with identical content already exists."""
|
|
731
|
+
return self.storage.content_exists(content, project_id)
|
|
732
|
+
|
|
733
|
+
def get_memory(self, memory_id: str) -> Memory | None:
|
|
734
|
+
"""Get a specific memory by ID."""
|
|
735
|
+
try:
|
|
736
|
+
return self.storage.get_memory(memory_id)
|
|
737
|
+
except ValueError:
|
|
738
|
+
return None
|
|
739
|
+
|
|
740
|
+
def find_by_prefix(self, prefix: str, limit: int = 5) -> list[Memory]:
|
|
741
|
+
"""
|
|
742
|
+
Find memories whose IDs start with the given prefix.
|
|
743
|
+
|
|
744
|
+
Used for resolving short ID references (e.g., "abc123" -> full UUID).
|
|
745
|
+
|
|
746
|
+
Args:
|
|
747
|
+
prefix: ID prefix to search for
|
|
748
|
+
limit: Maximum number of results
|
|
749
|
+
|
|
750
|
+
Returns:
|
|
751
|
+
List of Memory objects with matching ID prefixes
|
|
752
|
+
"""
|
|
753
|
+
rows = self.db.fetchall(
|
|
754
|
+
"SELECT * FROM memories WHERE id LIKE ? LIMIT ?",
|
|
755
|
+
(f"{prefix}%", limit),
|
|
756
|
+
)
|
|
757
|
+
return [Memory.from_row(row) for row in rows]
|
|
758
|
+
|
|
759
|
+
def update_memory(
|
|
760
|
+
self,
|
|
761
|
+
memory_id: str,
|
|
762
|
+
content: str | None = None,
|
|
763
|
+
importance: float | None = None,
|
|
764
|
+
tags: list[str] | None = None,
|
|
765
|
+
) -> Memory:
|
|
766
|
+
"""
|
|
767
|
+
Update an existing memory.
|
|
768
|
+
|
|
769
|
+
Args:
|
|
770
|
+
memory_id: The memory to update
|
|
771
|
+
content: New content (optional)
|
|
772
|
+
importance: New importance (optional)
|
|
773
|
+
tags: New tags (optional)
|
|
774
|
+
|
|
775
|
+
Returns:
|
|
776
|
+
Updated Memory object
|
|
777
|
+
|
|
778
|
+
Raises:
|
|
779
|
+
ValueError: If memory not found
|
|
780
|
+
"""
|
|
781
|
+
result = self.storage.update_memory(
|
|
782
|
+
memory_id=memory_id,
|
|
783
|
+
content=content,
|
|
784
|
+
importance=importance,
|
|
785
|
+
tags=tags,
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# Mark search index for refit if content changed
|
|
789
|
+
if content is not None:
|
|
790
|
+
self.mark_search_refit_needed()
|
|
791
|
+
|
|
792
|
+
return result
|
|
793
|
+
|
|
794
|
+
def get_stats(self, project_id: str | None = None) -> dict[str, Any]:
|
|
795
|
+
"""
|
|
796
|
+
Get statistics about stored memories.
|
|
797
|
+
|
|
798
|
+
Args:
|
|
799
|
+
project_id: Optional project to filter stats by
|
|
800
|
+
|
|
801
|
+
Returns:
|
|
802
|
+
Dictionary with memory statistics
|
|
803
|
+
"""
|
|
804
|
+
# Get all memories (use large limit)
|
|
805
|
+
memories = self.storage.list_memories(project_id=project_id, limit=10000)
|
|
806
|
+
|
|
807
|
+
if not memories:
|
|
808
|
+
return {
|
|
809
|
+
"total_count": 0,
|
|
810
|
+
"by_type": {},
|
|
811
|
+
"avg_importance": 0.0,
|
|
812
|
+
"project_id": project_id,
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
# Count by type
|
|
816
|
+
by_type: dict[str, int] = {}
|
|
817
|
+
total_importance = 0.0
|
|
818
|
+
|
|
819
|
+
for m in memories:
|
|
820
|
+
by_type[m.memory_type] = by_type.get(m.memory_type, 0) + 1
|
|
821
|
+
total_importance += m.importance
|
|
822
|
+
|
|
823
|
+
return {
|
|
824
|
+
"total_count": len(memories),
|
|
825
|
+
"by_type": by_type,
|
|
826
|
+
"avg_importance": round(total_importance / len(memories), 3),
|
|
827
|
+
"project_id": project_id,
|
|
828
|
+
}
|
|
829
|
+
|
|
830
|
+
def decay_memories(self) -> int:
|
|
831
|
+
"""
|
|
832
|
+
Apply importance decay to all memories.
|
|
833
|
+
|
|
834
|
+
Returns:
|
|
835
|
+
Number of memories updated.
|
|
836
|
+
"""
|
|
837
|
+
if not self.config.decay_enabled:
|
|
838
|
+
return 0
|
|
839
|
+
|
|
840
|
+
rate = self.config.decay_rate
|
|
841
|
+
floor = self.config.decay_floor
|
|
842
|
+
|
|
843
|
+
# This is a potentially expensive operation if there are many memories.
|
|
844
|
+
# Ideally we'd do this in the database with SQL, but SQLite math functions
|
|
845
|
+
# might be limited or we want Python control.
|
|
846
|
+
# Or we only decay memories accessed > X days ago.
|
|
847
|
+
|
|
848
|
+
# Simple implementation: fetch all > floor, decay them, update if changed.
|
|
849
|
+
# Optimization: Only process a batch or do it entirely in SQL.
|
|
850
|
+
|
|
851
|
+
# Let's do a SQL-based update for efficiency if possible, but
|
|
852
|
+
# LocalMemoryManager doesn't expose a raw execute.
|
|
853
|
+
# Let's iterate for now (simplest, robust), but limit to 100 at a time maybe?
|
|
854
|
+
# Or better: Add a `decay_all` method to storage layer?
|
|
855
|
+
|
|
856
|
+
# For now, let's just implement the logic here iterating over ALL memories
|
|
857
|
+
# which is fine for < 1000 memories.
|
|
858
|
+
|
|
859
|
+
# Use snapshot-based iteration to avoid pagination issues during updates
|
|
860
|
+
count = 0
|
|
861
|
+
|
|
862
|
+
# Note: listing all memories (limit=10000) to avoid pagination drift when modifying them.
|
|
863
|
+
# If dataset grows larger, we should implement a cursor-based approach or add list_memories_ids.
|
|
864
|
+
memories = self.storage.list_memories(min_importance=floor + 0.001, limit=10000)
|
|
865
|
+
|
|
866
|
+
for memory in memories:
|
|
867
|
+
# Calculate simple linear decay since last update
|
|
868
|
+
last_update = datetime.fromisoformat(memory.updated_at)
|
|
869
|
+
# Ensure last_update is timezone-aware for subtraction
|
|
870
|
+
if last_update.tzinfo is None:
|
|
871
|
+
last_update = last_update.replace(tzinfo=UTC)
|
|
872
|
+
hours_since = (datetime.now(UTC) - last_update).total_seconds() / 3600
|
|
873
|
+
|
|
874
|
+
# If it's been less than 24h, skip to avoid over-decaying if called frequently
|
|
875
|
+
if hours_since < 24:
|
|
876
|
+
continue
|
|
877
|
+
|
|
878
|
+
# Decay factor: rate * (days since) / 30
|
|
879
|
+
# Linear decay
|
|
880
|
+
months_passed = hours_since / (24 * 30)
|
|
881
|
+
decay_amount = rate * months_passed
|
|
882
|
+
|
|
883
|
+
if decay_amount < 0.001:
|
|
884
|
+
continue
|
|
885
|
+
|
|
886
|
+
new_importance = max(floor, memory.importance - decay_amount)
|
|
887
|
+
|
|
888
|
+
if new_importance != memory.importance:
|
|
889
|
+
self.storage.update_memory(
|
|
890
|
+
memory.id,
|
|
891
|
+
importance=new_importance,
|
|
892
|
+
)
|
|
893
|
+
count += 1
|
|
894
|
+
|
|
895
|
+
return count
|
|
896
|
+
|
|
897
|
+
def export_markdown(
|
|
898
|
+
self,
|
|
899
|
+
project_id: str | None = None,
|
|
900
|
+
include_metadata: bool = True,
|
|
901
|
+
include_stats: bool = True,
|
|
902
|
+
) -> str:
|
|
903
|
+
"""
|
|
904
|
+
Export memories as a formatted markdown document.
|
|
905
|
+
|
|
906
|
+
Creates a human-readable markdown export of memories, suitable for
|
|
907
|
+
backup, documentation, or sharing.
|
|
908
|
+
|
|
909
|
+
Args:
|
|
910
|
+
project_id: Filter by project ID (None for all memories)
|
|
911
|
+
include_metadata: Include memory metadata (type, importance, tags)
|
|
912
|
+
include_stats: Include summary statistics at the top
|
|
913
|
+
|
|
914
|
+
Returns:
|
|
915
|
+
Formatted markdown string with all memories
|
|
916
|
+
|
|
917
|
+
Example output:
|
|
918
|
+
# Memory Export
|
|
919
|
+
|
|
920
|
+
**Exported:** 2026-01-19 12:34:56 UTC
|
|
921
|
+
**Total memories:** 42
|
|
922
|
+
|
|
923
|
+
---
|
|
924
|
+
|
|
925
|
+
## Memory: abc123
|
|
926
|
+
|
|
927
|
+
User prefers dark mode for all applications.
|
|
928
|
+
|
|
929
|
+
- **Type:** preference
|
|
930
|
+
- **Importance:** 0.8
|
|
931
|
+
- **Tags:** ui, settings
|
|
932
|
+
- **Created:** 2026-01-15 10:00:00
|
|
933
|
+
"""
|
|
934
|
+
memories = self.storage.list_memories(project_id=project_id, limit=10000)
|
|
935
|
+
|
|
936
|
+
lines: list[str] = []
|
|
937
|
+
|
|
938
|
+
# Header
|
|
939
|
+
lines.append("# Memory Export")
|
|
940
|
+
lines.append("")
|
|
941
|
+
|
|
942
|
+
# Stats section
|
|
943
|
+
if include_stats:
|
|
944
|
+
now = datetime.now(UTC)
|
|
945
|
+
lines.append(f"**Exported:** {now.strftime('%Y-%m-%d %H:%M:%S')} UTC")
|
|
946
|
+
lines.append(f"**Total memories:** {len(memories)}")
|
|
947
|
+
if project_id:
|
|
948
|
+
lines.append(f"**Project:** {project_id}")
|
|
949
|
+
|
|
950
|
+
# Type breakdown
|
|
951
|
+
if memories:
|
|
952
|
+
by_type: dict[str, int] = {}
|
|
953
|
+
for m in memories:
|
|
954
|
+
by_type[m.memory_type] = by_type.get(m.memory_type, 0) + 1
|
|
955
|
+
type_str = ", ".join(f"{k}: {v}" for k, v in sorted(by_type.items()))
|
|
956
|
+
lines.append(f"**By type:** {type_str}")
|
|
957
|
+
|
|
958
|
+
lines.append("")
|
|
959
|
+
lines.append("---")
|
|
960
|
+
lines.append("")
|
|
961
|
+
|
|
962
|
+
# Individual memories
|
|
963
|
+
for memory in memories:
|
|
964
|
+
# Memory header with short ID
|
|
965
|
+
short_id = memory.id[:8] if len(memory.id) > 8 else memory.id
|
|
966
|
+
lines.append(f"## Memory: {short_id}")
|
|
967
|
+
lines.append("")
|
|
968
|
+
|
|
969
|
+
# Content
|
|
970
|
+
lines.append(memory.content)
|
|
971
|
+
lines.append("")
|
|
972
|
+
|
|
973
|
+
# Metadata
|
|
974
|
+
if include_metadata:
|
|
975
|
+
lines.append(f"- **Type:** {memory.memory_type}")
|
|
976
|
+
lines.append(f"- **Importance:** {memory.importance}")
|
|
977
|
+
|
|
978
|
+
if memory.tags:
|
|
979
|
+
tags_str = ", ".join(memory.tags)
|
|
980
|
+
lines.append(f"- **Tags:** {tags_str}")
|
|
981
|
+
|
|
982
|
+
if memory.source_type:
|
|
983
|
+
lines.append(f"- **Source:** {memory.source_type}")
|
|
984
|
+
|
|
985
|
+
# Parse and format created_at
|
|
986
|
+
try:
|
|
987
|
+
created = datetime.fromisoformat(memory.created_at)
|
|
988
|
+
created_str = created.strftime("%Y-%m-%d %H:%M:%S")
|
|
989
|
+
except (ValueError, TypeError):
|
|
990
|
+
created_str = memory.created_at
|
|
991
|
+
lines.append(f"- **Created:** {created_str}")
|
|
992
|
+
|
|
993
|
+
if memory.access_count > 0:
|
|
994
|
+
lines.append(f"- **Accessed:** {memory.access_count} times")
|
|
995
|
+
|
|
996
|
+
lines.append("")
|
|
997
|
+
|
|
998
|
+
lines.append("---")
|
|
999
|
+
lines.append("")
|
|
1000
|
+
|
|
1001
|
+
return "\n".join(lines)
|