gobby 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (383) hide show
  1. gobby/__init__.py +3 -0
  2. gobby/adapters/__init__.py +30 -0
  3. gobby/adapters/base.py +93 -0
  4. gobby/adapters/claude_code.py +276 -0
  5. gobby/adapters/codex.py +1292 -0
  6. gobby/adapters/gemini.py +343 -0
  7. gobby/agents/__init__.py +37 -0
  8. gobby/agents/codex_session.py +120 -0
  9. gobby/agents/constants.py +112 -0
  10. gobby/agents/context.py +362 -0
  11. gobby/agents/definitions.py +133 -0
  12. gobby/agents/gemini_session.py +111 -0
  13. gobby/agents/registry.py +618 -0
  14. gobby/agents/runner.py +968 -0
  15. gobby/agents/session.py +259 -0
  16. gobby/agents/spawn.py +916 -0
  17. gobby/agents/spawners/__init__.py +77 -0
  18. gobby/agents/spawners/base.py +142 -0
  19. gobby/agents/spawners/cross_platform.py +266 -0
  20. gobby/agents/spawners/embedded.py +225 -0
  21. gobby/agents/spawners/headless.py +226 -0
  22. gobby/agents/spawners/linux.py +125 -0
  23. gobby/agents/spawners/macos.py +277 -0
  24. gobby/agents/spawners/windows.py +308 -0
  25. gobby/agents/tty_config.py +319 -0
  26. gobby/autonomous/__init__.py +32 -0
  27. gobby/autonomous/progress_tracker.py +447 -0
  28. gobby/autonomous/stop_registry.py +269 -0
  29. gobby/autonomous/stuck_detector.py +383 -0
  30. gobby/cli/__init__.py +67 -0
  31. gobby/cli/__main__.py +8 -0
  32. gobby/cli/agents.py +529 -0
  33. gobby/cli/artifacts.py +266 -0
  34. gobby/cli/daemon.py +329 -0
  35. gobby/cli/extensions.py +526 -0
  36. gobby/cli/github.py +263 -0
  37. gobby/cli/init.py +53 -0
  38. gobby/cli/install.py +614 -0
  39. gobby/cli/installers/__init__.py +37 -0
  40. gobby/cli/installers/antigravity.py +65 -0
  41. gobby/cli/installers/claude.py +363 -0
  42. gobby/cli/installers/codex.py +192 -0
  43. gobby/cli/installers/gemini.py +294 -0
  44. gobby/cli/installers/git_hooks.py +377 -0
  45. gobby/cli/installers/shared.py +737 -0
  46. gobby/cli/linear.py +250 -0
  47. gobby/cli/mcp.py +30 -0
  48. gobby/cli/mcp_proxy.py +698 -0
  49. gobby/cli/memory.py +304 -0
  50. gobby/cli/merge.py +384 -0
  51. gobby/cli/projects.py +79 -0
  52. gobby/cli/sessions.py +622 -0
  53. gobby/cli/tasks/__init__.py +30 -0
  54. gobby/cli/tasks/_utils.py +658 -0
  55. gobby/cli/tasks/ai.py +1025 -0
  56. gobby/cli/tasks/commits.py +169 -0
  57. gobby/cli/tasks/crud.py +685 -0
  58. gobby/cli/tasks/deps.py +135 -0
  59. gobby/cli/tasks/labels.py +63 -0
  60. gobby/cli/tasks/main.py +273 -0
  61. gobby/cli/tasks/search.py +178 -0
  62. gobby/cli/tui.py +34 -0
  63. gobby/cli/utils.py +513 -0
  64. gobby/cli/workflows.py +927 -0
  65. gobby/cli/worktrees.py +481 -0
  66. gobby/config/__init__.py +129 -0
  67. gobby/config/app.py +551 -0
  68. gobby/config/extensions.py +167 -0
  69. gobby/config/features.py +472 -0
  70. gobby/config/llm_providers.py +98 -0
  71. gobby/config/logging.py +66 -0
  72. gobby/config/mcp.py +346 -0
  73. gobby/config/persistence.py +247 -0
  74. gobby/config/servers.py +141 -0
  75. gobby/config/sessions.py +250 -0
  76. gobby/config/tasks.py +784 -0
  77. gobby/hooks/__init__.py +104 -0
  78. gobby/hooks/artifact_capture.py +213 -0
  79. gobby/hooks/broadcaster.py +243 -0
  80. gobby/hooks/event_handlers.py +723 -0
  81. gobby/hooks/events.py +218 -0
  82. gobby/hooks/git.py +169 -0
  83. gobby/hooks/health_monitor.py +171 -0
  84. gobby/hooks/hook_manager.py +856 -0
  85. gobby/hooks/hook_types.py +575 -0
  86. gobby/hooks/plugins.py +813 -0
  87. gobby/hooks/session_coordinator.py +396 -0
  88. gobby/hooks/verification_runner.py +268 -0
  89. gobby/hooks/webhooks.py +339 -0
  90. gobby/install/claude/commands/gobby/bug.md +51 -0
  91. gobby/install/claude/commands/gobby/chore.md +51 -0
  92. gobby/install/claude/commands/gobby/epic.md +52 -0
  93. gobby/install/claude/commands/gobby/eval.md +235 -0
  94. gobby/install/claude/commands/gobby/feat.md +49 -0
  95. gobby/install/claude/commands/gobby/nit.md +52 -0
  96. gobby/install/claude/commands/gobby/ref.md +52 -0
  97. gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
  98. gobby/install/claude/hooks/hook_dispatcher.py +364 -0
  99. gobby/install/claude/hooks/validate_settings.py +102 -0
  100. gobby/install/claude/hooks-template.json +118 -0
  101. gobby/install/codex/hooks/hook_dispatcher.py +153 -0
  102. gobby/install/codex/prompts/forget.md +7 -0
  103. gobby/install/codex/prompts/memories.md +7 -0
  104. gobby/install/codex/prompts/recall.md +7 -0
  105. gobby/install/codex/prompts/remember.md +13 -0
  106. gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
  107. gobby/install/gemini/hooks-template.json +138 -0
  108. gobby/install/shared/plugins/code_guardian.py +456 -0
  109. gobby/install/shared/plugins/example_notify.py +331 -0
  110. gobby/integrations/__init__.py +10 -0
  111. gobby/integrations/github.py +145 -0
  112. gobby/integrations/linear.py +145 -0
  113. gobby/llm/__init__.py +40 -0
  114. gobby/llm/base.py +120 -0
  115. gobby/llm/claude.py +578 -0
  116. gobby/llm/claude_executor.py +503 -0
  117. gobby/llm/codex.py +322 -0
  118. gobby/llm/codex_executor.py +513 -0
  119. gobby/llm/executor.py +316 -0
  120. gobby/llm/factory.py +34 -0
  121. gobby/llm/gemini.py +258 -0
  122. gobby/llm/gemini_executor.py +339 -0
  123. gobby/llm/litellm.py +287 -0
  124. gobby/llm/litellm_executor.py +303 -0
  125. gobby/llm/resolver.py +499 -0
  126. gobby/llm/service.py +236 -0
  127. gobby/mcp_proxy/__init__.py +29 -0
  128. gobby/mcp_proxy/actions.py +175 -0
  129. gobby/mcp_proxy/daemon_control.py +198 -0
  130. gobby/mcp_proxy/importer.py +436 -0
  131. gobby/mcp_proxy/lazy.py +325 -0
  132. gobby/mcp_proxy/manager.py +798 -0
  133. gobby/mcp_proxy/metrics.py +609 -0
  134. gobby/mcp_proxy/models.py +139 -0
  135. gobby/mcp_proxy/registries.py +215 -0
  136. gobby/mcp_proxy/schema_hash.py +381 -0
  137. gobby/mcp_proxy/semantic_search.py +706 -0
  138. gobby/mcp_proxy/server.py +549 -0
  139. gobby/mcp_proxy/services/__init__.py +0 -0
  140. gobby/mcp_proxy/services/fallback.py +306 -0
  141. gobby/mcp_proxy/services/recommendation.py +224 -0
  142. gobby/mcp_proxy/services/server_mgmt.py +214 -0
  143. gobby/mcp_proxy/services/system.py +72 -0
  144. gobby/mcp_proxy/services/tool_filter.py +231 -0
  145. gobby/mcp_proxy/services/tool_proxy.py +309 -0
  146. gobby/mcp_proxy/stdio.py +565 -0
  147. gobby/mcp_proxy/tools/__init__.py +27 -0
  148. gobby/mcp_proxy/tools/agents.py +1103 -0
  149. gobby/mcp_proxy/tools/artifacts.py +207 -0
  150. gobby/mcp_proxy/tools/hub.py +335 -0
  151. gobby/mcp_proxy/tools/internal.py +337 -0
  152. gobby/mcp_proxy/tools/memory.py +543 -0
  153. gobby/mcp_proxy/tools/merge.py +422 -0
  154. gobby/mcp_proxy/tools/metrics.py +283 -0
  155. gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
  156. gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
  157. gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
  158. gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
  159. gobby/mcp_proxy/tools/orchestration/review.py +736 -0
  160. gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
  161. gobby/mcp_proxy/tools/session_messages.py +1056 -0
  162. gobby/mcp_proxy/tools/task_dependencies.py +219 -0
  163. gobby/mcp_proxy/tools/task_expansion.py +591 -0
  164. gobby/mcp_proxy/tools/task_github.py +393 -0
  165. gobby/mcp_proxy/tools/task_linear.py +379 -0
  166. gobby/mcp_proxy/tools/task_orchestration.py +77 -0
  167. gobby/mcp_proxy/tools/task_readiness.py +522 -0
  168. gobby/mcp_proxy/tools/task_sync.py +351 -0
  169. gobby/mcp_proxy/tools/task_validation.py +843 -0
  170. gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
  171. gobby/mcp_proxy/tools/tasks/_context.py +112 -0
  172. gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
  173. gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
  174. gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
  175. gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
  176. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
  177. gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
  178. gobby/mcp_proxy/tools/tasks/_search.py +215 -0
  179. gobby/mcp_proxy/tools/tasks/_session.py +125 -0
  180. gobby/mcp_proxy/tools/workflows.py +973 -0
  181. gobby/mcp_proxy/tools/worktrees.py +1264 -0
  182. gobby/mcp_proxy/transports/__init__.py +0 -0
  183. gobby/mcp_proxy/transports/base.py +95 -0
  184. gobby/mcp_proxy/transports/factory.py +44 -0
  185. gobby/mcp_proxy/transports/http.py +139 -0
  186. gobby/mcp_proxy/transports/stdio.py +213 -0
  187. gobby/mcp_proxy/transports/websocket.py +136 -0
  188. gobby/memory/backends/__init__.py +116 -0
  189. gobby/memory/backends/mem0.py +408 -0
  190. gobby/memory/backends/memu.py +485 -0
  191. gobby/memory/backends/null.py +111 -0
  192. gobby/memory/backends/openmemory.py +537 -0
  193. gobby/memory/backends/sqlite.py +304 -0
  194. gobby/memory/context.py +87 -0
  195. gobby/memory/manager.py +1001 -0
  196. gobby/memory/protocol.py +451 -0
  197. gobby/memory/search/__init__.py +66 -0
  198. gobby/memory/search/text.py +127 -0
  199. gobby/memory/viz.py +258 -0
  200. gobby/prompts/__init__.py +13 -0
  201. gobby/prompts/defaults/expansion/system.md +119 -0
  202. gobby/prompts/defaults/expansion/user.md +48 -0
  203. gobby/prompts/defaults/external_validation/agent.md +72 -0
  204. gobby/prompts/defaults/external_validation/external.md +63 -0
  205. gobby/prompts/defaults/external_validation/spawn.md +83 -0
  206. gobby/prompts/defaults/external_validation/system.md +6 -0
  207. gobby/prompts/defaults/features/import_mcp.md +22 -0
  208. gobby/prompts/defaults/features/import_mcp_github.md +17 -0
  209. gobby/prompts/defaults/features/import_mcp_search.md +16 -0
  210. gobby/prompts/defaults/features/recommend_tools.md +32 -0
  211. gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
  212. gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
  213. gobby/prompts/defaults/features/server_description.md +20 -0
  214. gobby/prompts/defaults/features/server_description_system.md +6 -0
  215. gobby/prompts/defaults/features/task_description.md +31 -0
  216. gobby/prompts/defaults/features/task_description_system.md +6 -0
  217. gobby/prompts/defaults/features/tool_summary.md +17 -0
  218. gobby/prompts/defaults/features/tool_summary_system.md +6 -0
  219. gobby/prompts/defaults/research/step.md +58 -0
  220. gobby/prompts/defaults/validation/criteria.md +47 -0
  221. gobby/prompts/defaults/validation/validate.md +38 -0
  222. gobby/prompts/loader.py +346 -0
  223. gobby/prompts/models.py +113 -0
  224. gobby/py.typed +0 -0
  225. gobby/runner.py +488 -0
  226. gobby/search/__init__.py +23 -0
  227. gobby/search/protocol.py +104 -0
  228. gobby/search/tfidf.py +232 -0
  229. gobby/servers/__init__.py +7 -0
  230. gobby/servers/http.py +636 -0
  231. gobby/servers/models.py +31 -0
  232. gobby/servers/routes/__init__.py +23 -0
  233. gobby/servers/routes/admin.py +416 -0
  234. gobby/servers/routes/dependencies.py +118 -0
  235. gobby/servers/routes/mcp/__init__.py +24 -0
  236. gobby/servers/routes/mcp/hooks.py +135 -0
  237. gobby/servers/routes/mcp/plugins.py +121 -0
  238. gobby/servers/routes/mcp/tools.py +1337 -0
  239. gobby/servers/routes/mcp/webhooks.py +159 -0
  240. gobby/servers/routes/sessions.py +582 -0
  241. gobby/servers/websocket.py +766 -0
  242. gobby/sessions/__init__.py +13 -0
  243. gobby/sessions/analyzer.py +322 -0
  244. gobby/sessions/lifecycle.py +240 -0
  245. gobby/sessions/manager.py +563 -0
  246. gobby/sessions/processor.py +225 -0
  247. gobby/sessions/summary.py +532 -0
  248. gobby/sessions/transcripts/__init__.py +41 -0
  249. gobby/sessions/transcripts/base.py +125 -0
  250. gobby/sessions/transcripts/claude.py +386 -0
  251. gobby/sessions/transcripts/codex.py +143 -0
  252. gobby/sessions/transcripts/gemini.py +195 -0
  253. gobby/storage/__init__.py +21 -0
  254. gobby/storage/agents.py +409 -0
  255. gobby/storage/artifact_classifier.py +341 -0
  256. gobby/storage/artifacts.py +285 -0
  257. gobby/storage/compaction.py +67 -0
  258. gobby/storage/database.py +357 -0
  259. gobby/storage/inter_session_messages.py +194 -0
  260. gobby/storage/mcp.py +680 -0
  261. gobby/storage/memories.py +562 -0
  262. gobby/storage/merge_resolutions.py +550 -0
  263. gobby/storage/migrations.py +860 -0
  264. gobby/storage/migrations_legacy.py +1359 -0
  265. gobby/storage/projects.py +166 -0
  266. gobby/storage/session_messages.py +251 -0
  267. gobby/storage/session_tasks.py +97 -0
  268. gobby/storage/sessions.py +817 -0
  269. gobby/storage/task_dependencies.py +223 -0
  270. gobby/storage/tasks/__init__.py +42 -0
  271. gobby/storage/tasks/_aggregates.py +180 -0
  272. gobby/storage/tasks/_crud.py +449 -0
  273. gobby/storage/tasks/_id.py +104 -0
  274. gobby/storage/tasks/_lifecycle.py +311 -0
  275. gobby/storage/tasks/_manager.py +889 -0
  276. gobby/storage/tasks/_models.py +300 -0
  277. gobby/storage/tasks/_ordering.py +119 -0
  278. gobby/storage/tasks/_path_cache.py +110 -0
  279. gobby/storage/tasks/_queries.py +343 -0
  280. gobby/storage/tasks/_search.py +143 -0
  281. gobby/storage/workflow_audit.py +393 -0
  282. gobby/storage/worktrees.py +547 -0
  283. gobby/sync/__init__.py +29 -0
  284. gobby/sync/github.py +333 -0
  285. gobby/sync/linear.py +304 -0
  286. gobby/sync/memories.py +284 -0
  287. gobby/sync/tasks.py +641 -0
  288. gobby/tasks/__init__.py +8 -0
  289. gobby/tasks/build_verification.py +193 -0
  290. gobby/tasks/commits.py +633 -0
  291. gobby/tasks/context.py +747 -0
  292. gobby/tasks/criteria.py +342 -0
  293. gobby/tasks/enhanced_validator.py +226 -0
  294. gobby/tasks/escalation.py +263 -0
  295. gobby/tasks/expansion.py +626 -0
  296. gobby/tasks/external_validator.py +764 -0
  297. gobby/tasks/issue_extraction.py +171 -0
  298. gobby/tasks/prompts/expand.py +327 -0
  299. gobby/tasks/research.py +421 -0
  300. gobby/tasks/tdd.py +352 -0
  301. gobby/tasks/tree_builder.py +263 -0
  302. gobby/tasks/validation.py +712 -0
  303. gobby/tasks/validation_history.py +357 -0
  304. gobby/tasks/validation_models.py +89 -0
  305. gobby/tools/__init__.py +0 -0
  306. gobby/tools/summarizer.py +170 -0
  307. gobby/tui/__init__.py +5 -0
  308. gobby/tui/api_client.py +281 -0
  309. gobby/tui/app.py +327 -0
  310. gobby/tui/screens/__init__.py +25 -0
  311. gobby/tui/screens/agents.py +333 -0
  312. gobby/tui/screens/chat.py +450 -0
  313. gobby/tui/screens/dashboard.py +377 -0
  314. gobby/tui/screens/memory.py +305 -0
  315. gobby/tui/screens/metrics.py +231 -0
  316. gobby/tui/screens/orchestrator.py +904 -0
  317. gobby/tui/screens/sessions.py +412 -0
  318. gobby/tui/screens/tasks.py +442 -0
  319. gobby/tui/screens/workflows.py +289 -0
  320. gobby/tui/screens/worktrees.py +174 -0
  321. gobby/tui/widgets/__init__.py +21 -0
  322. gobby/tui/widgets/chat.py +210 -0
  323. gobby/tui/widgets/conductor.py +104 -0
  324. gobby/tui/widgets/menu.py +132 -0
  325. gobby/tui/widgets/message_panel.py +160 -0
  326. gobby/tui/widgets/review_gate.py +224 -0
  327. gobby/tui/widgets/task_tree.py +99 -0
  328. gobby/tui/widgets/token_budget.py +166 -0
  329. gobby/tui/ws_client.py +258 -0
  330. gobby/utils/__init__.py +3 -0
  331. gobby/utils/daemon_client.py +235 -0
  332. gobby/utils/git.py +222 -0
  333. gobby/utils/id.py +38 -0
  334. gobby/utils/json_helpers.py +161 -0
  335. gobby/utils/logging.py +376 -0
  336. gobby/utils/machine_id.py +135 -0
  337. gobby/utils/metrics.py +589 -0
  338. gobby/utils/project_context.py +182 -0
  339. gobby/utils/project_init.py +263 -0
  340. gobby/utils/status.py +256 -0
  341. gobby/utils/validation.py +80 -0
  342. gobby/utils/version.py +23 -0
  343. gobby/workflows/__init__.py +4 -0
  344. gobby/workflows/actions.py +1310 -0
  345. gobby/workflows/approval_flow.py +138 -0
  346. gobby/workflows/artifact_actions.py +103 -0
  347. gobby/workflows/audit_helpers.py +110 -0
  348. gobby/workflows/autonomous_actions.py +286 -0
  349. gobby/workflows/context_actions.py +394 -0
  350. gobby/workflows/definitions.py +130 -0
  351. gobby/workflows/detection_helpers.py +208 -0
  352. gobby/workflows/engine.py +485 -0
  353. gobby/workflows/evaluator.py +669 -0
  354. gobby/workflows/git_utils.py +96 -0
  355. gobby/workflows/hooks.py +169 -0
  356. gobby/workflows/lifecycle_evaluator.py +613 -0
  357. gobby/workflows/llm_actions.py +70 -0
  358. gobby/workflows/loader.py +333 -0
  359. gobby/workflows/mcp_actions.py +60 -0
  360. gobby/workflows/memory_actions.py +272 -0
  361. gobby/workflows/premature_stop.py +164 -0
  362. gobby/workflows/session_actions.py +139 -0
  363. gobby/workflows/state_actions.py +123 -0
  364. gobby/workflows/state_manager.py +104 -0
  365. gobby/workflows/stop_signal_actions.py +163 -0
  366. gobby/workflows/summary_actions.py +344 -0
  367. gobby/workflows/task_actions.py +249 -0
  368. gobby/workflows/task_enforcement_actions.py +901 -0
  369. gobby/workflows/templates.py +52 -0
  370. gobby/workflows/todo_actions.py +84 -0
  371. gobby/workflows/webhook.py +223 -0
  372. gobby/workflows/webhook_executor.py +399 -0
  373. gobby/worktrees/__init__.py +5 -0
  374. gobby/worktrees/git.py +690 -0
  375. gobby/worktrees/merge/__init__.py +20 -0
  376. gobby/worktrees/merge/conflict_parser.py +177 -0
  377. gobby/worktrees/merge/resolver.py +485 -0
  378. gobby-0.2.5.dist-info/METADATA +351 -0
  379. gobby-0.2.5.dist-info/RECORD +383 -0
  380. gobby-0.2.5.dist-info/WHEEL +5 -0
  381. gobby-0.2.5.dist-info/entry_points.txt +2 -0
  382. gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
  383. gobby-0.2.5.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1001 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import mimetypes
5
+ from datetime import UTC, datetime
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from gobby.config.app import MemoryConfig
10
+ from gobby.memory.backends import get_backend
11
+ from gobby.memory.context import build_memory_context
12
+ from gobby.memory.protocol import MediaAttachment, MemoryBackendProtocol
13
+ from gobby.storage.database import DatabaseProtocol
14
+ from gobby.storage.memories import LocalMemoryManager, Memory
15
+
16
+ if TYPE_CHECKING:
17
+ from gobby.llm.service import LLMService
18
+ from gobby.memory.search import SearchBackend
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ class MemoryManager:
24
+ """
25
+ High-level manager for memory operations.
26
+ Handles storage, ranking, decay, and business logic.
27
+ """
28
+
29
+ def __init__(
30
+ self,
31
+ db: DatabaseProtocol,
32
+ config: MemoryConfig,
33
+ llm_service: LLMService | None = None,
34
+ ):
35
+ self.db = db
36
+ self.config = config
37
+ self._llm_service = llm_service
38
+
39
+ # Initialize storage backend based on config
40
+ # Note: SQLiteBackend wraps LocalMemoryManager internally
41
+ backend_type = getattr(config, "backend", "sqlite")
42
+ self._backend: MemoryBackendProtocol = get_backend(backend_type, database=db)
43
+
44
+ # Keep storage reference for backward compatibility with sync methods
45
+ # The SQLiteBackend uses LocalMemoryManager internally
46
+ self.storage = LocalMemoryManager(db)
47
+
48
+ self._search_backend: SearchBackend | None = None
49
+ self._search_backend_fitted = False
50
+
51
+ @property
52
+ def llm_service(self) -> LLMService | None:
53
+ """Get the LLM service for image description."""
54
+ return self._llm_service
55
+
56
+ @llm_service.setter
57
+ def llm_service(self, service: LLMService | None) -> None:
58
+ """Set the LLM service for image description."""
59
+ self._llm_service = service
60
+
61
+ @property
62
+ def search_backend(self) -> SearchBackend:
63
+ """
64
+ Lazy-init search backend based on configuration.
65
+
66
+ The backend type is determined by config.search_backend:
67
+ - "tfidf" (default): Zero-dependency TF-IDF search
68
+ - "text": Simple text substring matching
69
+ """
70
+ if self._search_backend is None:
71
+ from gobby.memory.search import get_search_backend
72
+
73
+ backend_type = getattr(self.config, "search_backend", "tfidf")
74
+ logger.debug(f"Initializing search backend: {backend_type}")
75
+
76
+ try:
77
+ self._search_backend = get_search_backend(
78
+ backend_type=backend_type,
79
+ db=self.db,
80
+ )
81
+ except Exception as e:
82
+ logger.warning(
83
+ f"Failed to initialize {backend_type} backend: {e}. Falling back to tfidf"
84
+ )
85
+ self._search_backend = get_search_backend("tfidf")
86
+
87
+ return self._search_backend
88
+
89
+ def _ensure_search_backend_fitted(self) -> None:
90
+ """Ensure the search backend is fitted with current memories."""
91
+ if self._search_backend_fitted:
92
+ return
93
+
94
+ backend = self.search_backend
95
+ if not backend.needs_refit():
96
+ self._search_backend_fitted = True
97
+ return
98
+
99
+ # Fit the backend with all memories
100
+ memories = self.storage.list_memories(limit=10000)
101
+ memory_tuples = [(m.id, m.content) for m in memories]
102
+
103
+ try:
104
+ backend.fit(memory_tuples)
105
+ self._search_backend_fitted = True
106
+ logger.info(f"Search backend fitted with {len(memory_tuples)} memories")
107
+ except Exception as e:
108
+ logger.error(f"Failed to fit search backend: {e}")
109
+ raise
110
+
111
+ def mark_search_refit_needed(self) -> None:
112
+ """Mark that the search backend needs to be refitted."""
113
+ self._search_backend_fitted = False
114
+
115
+ def reindex_search(self) -> dict[str, Any]:
116
+ """
117
+ Force rebuild of the search index.
118
+
119
+ This method explicitly rebuilds the TF-IDF (or other configured)
120
+ search index from all stored memories. Useful for:
121
+ - Initial index building
122
+ - Recovery after corruption
123
+ - After bulk memory operations
124
+
125
+ Returns:
126
+ Dict with index statistics including memory_count, backend_type, etc.
127
+ """
128
+ # Get all memories
129
+ memories = self.storage.list_memories(limit=10000)
130
+ memory_tuples = [(m.id, m.content) for m in memories]
131
+
132
+ # Force refit the backend
133
+ backend = self.search_backend
134
+ backend_type = getattr(self.config, "search_backend", "tfidf")
135
+
136
+ try:
137
+ backend.fit(memory_tuples)
138
+ self._search_backend_fitted = True
139
+
140
+ # Get backend stats
141
+ stats = backend.get_stats() if hasattr(backend, "get_stats") else {}
142
+
143
+ return {
144
+ "success": True,
145
+ "memory_count": len(memory_tuples),
146
+ "backend_type": backend_type,
147
+ "fitted": True,
148
+ **stats,
149
+ }
150
+ except Exception as e:
151
+ logger.error(f"Failed to reindex search backend: {e}")
152
+ return {
153
+ "success": False,
154
+ "error": str(e),
155
+ "memory_count": len(memory_tuples),
156
+ "backend_type": backend_type,
157
+ }
158
+
159
+ async def remember(
160
+ self,
161
+ content: str,
162
+ memory_type: str = "fact",
163
+ importance: float = 0.5,
164
+ project_id: str | None = None,
165
+ source_type: str = "user",
166
+ source_session_id: str | None = None,
167
+ tags: list[str] | None = None,
168
+ ) -> Memory:
169
+ """
170
+ Store a new memory.
171
+
172
+ Args:
173
+ content: The memory content
174
+ memory_type: Type of memory (fact, preference, etc)
175
+ importance: 0.0-1.0 importance score
176
+ project_id: Optional project context
177
+ source_type: Origin of memory
178
+ source_session_id: Origin session
179
+ tags: Optional tags
180
+ """
181
+ # Future: Duplicate detection via embeddings or fuzzy match?
182
+ # For now, rely on storage layer (which uses content-hash ID for dedup)
183
+
184
+ memory = self.storage.create_memory(
185
+ content=content,
186
+ memory_type=memory_type,
187
+ importance=importance,
188
+ project_id=project_id,
189
+ source_type=source_type,
190
+ source_session_id=source_session_id,
191
+ tags=tags,
192
+ )
193
+
194
+ # Mark search index for refit since we added new content
195
+ self.mark_search_refit_needed()
196
+
197
+ # Auto cross-reference if enabled
198
+ if getattr(self.config, "auto_crossref", False):
199
+ try:
200
+ self._create_crossrefs(memory)
201
+ except Exception as e:
202
+ # Don't fail the remember if crossref fails
203
+ logger.warning(f"Auto-crossref failed for {memory.id}: {e}")
204
+
205
+ return memory
206
+
207
+ async def remember_with_image(
208
+ self,
209
+ image_path: str,
210
+ context: str | None = None,
211
+ memory_type: str = "fact",
212
+ importance: float = 0.5,
213
+ project_id: str | None = None,
214
+ source_type: str = "user",
215
+ source_session_id: str | None = None,
216
+ tags: list[str] | None = None,
217
+ ) -> Memory:
218
+ """
219
+ Store a memory with an image attachment.
220
+
221
+ Uses the configured LLM provider to generate a description of the image,
222
+ then stores the memory with the description as content and the image
223
+ as a media attachment.
224
+
225
+ Args:
226
+ image_path: Path to the image file
227
+ context: Optional context to guide the image description
228
+ memory_type: Type of memory (fact, preference, etc)
229
+ importance: 0.0-1.0 importance score
230
+ project_id: Optional project context
231
+ source_type: Origin of memory
232
+ source_session_id: Origin session
233
+ tags: Optional tags
234
+
235
+ Returns:
236
+ The created Memory object
237
+
238
+ Raises:
239
+ ValueError: If LLM service is not configured or image not found
240
+ """
241
+ path = Path(image_path)
242
+ if not path.exists():
243
+ raise ValueError(f"Image not found: {image_path}")
244
+
245
+ # Get LLM provider for image description
246
+ if not self._llm_service:
247
+ raise ValueError(
248
+ "LLM service not configured. Pass llm_service to MemoryManager "
249
+ "to enable remember_with_image."
250
+ )
251
+
252
+ provider = self._llm_service.get_default_provider()
253
+
254
+ # Generate image description
255
+ description = await provider.describe_image(image_path, context=context)
256
+
257
+ # Determine MIME type
258
+ mime_type, _ = mimetypes.guess_type(str(path))
259
+ if not mime_type:
260
+ mime_type = "application/octet-stream"
261
+
262
+ # Create media attachment
263
+ media = MediaAttachment(
264
+ media_type="image",
265
+ content_path=str(path.absolute()),
266
+ mime_type=mime_type,
267
+ description=description,
268
+ description_model=provider.provider_name,
269
+ )
270
+
271
+ # Store memory with media attachment via backend
272
+ record = await self._backend.create(
273
+ content=description,
274
+ memory_type=memory_type,
275
+ importance=importance,
276
+ project_id=project_id,
277
+ source_type=source_type,
278
+ source_session_id=source_session_id,
279
+ tags=tags,
280
+ media=[media],
281
+ )
282
+
283
+ # Mark search index for refit
284
+ self.mark_search_refit_needed()
285
+
286
+ # Return as Memory object for backward compatibility
287
+ # Note: The backend returns MemoryRecord, but we need Memory
288
+ memory = self.storage.get_memory(record.id)
289
+ if memory is not None:
290
+ return memory
291
+
292
+ # Fallback: construct Memory from MemoryRecord if storage lookup fails
293
+ # This can happen with synthetic records from failed backend calls
294
+ return Memory(
295
+ id=record.id,
296
+ content=record.content,
297
+ memory_type=record.memory_type,
298
+ created_at=record.created_at.isoformat(),
299
+ updated_at=record.updated_at.isoformat()
300
+ if record.updated_at
301
+ else record.created_at.isoformat(),
302
+ project_id=record.project_id,
303
+ source_type=record.source_type,
304
+ source_session_id=record.source_session_id,
305
+ importance=record.importance,
306
+ tags=record.tags,
307
+ )
308
+
309
+ async def remember_screenshot(
310
+ self,
311
+ screenshot_bytes: bytes,
312
+ context: str | None = None,
313
+ memory_type: str = "observation",
314
+ importance: float = 0.5,
315
+ project_id: str | None = None,
316
+ source_type: str = "user",
317
+ source_session_id: str | None = None,
318
+ tags: list[str] | None = None,
319
+ ) -> Memory:
320
+ """
321
+ Store a memory from raw screenshot bytes.
322
+
323
+ Saves the screenshot to .gobby/resources/ with a timestamp-based filename,
324
+ then delegates to remember_with_image() for LLM description and storage.
325
+
326
+ Args:
327
+ screenshot_bytes: Raw PNG screenshot bytes (from Playwright/Puppeteer)
328
+ context: Optional context to guide the image description
329
+ memory_type: Type of memory (default: "observation")
330
+ importance: 0.0-1.0 importance score
331
+ project_id: Optional project context
332
+ source_type: Origin of memory
333
+ source_session_id: Origin session
334
+ tags: Optional tags
335
+
336
+ Returns:
337
+ The created Memory object
338
+
339
+ Raises:
340
+ ValueError: If LLM service is not configured or screenshot bytes are empty
341
+ """
342
+ if not screenshot_bytes:
343
+ raise ValueError("Screenshot bytes cannot be empty")
344
+
345
+ # Determine resources directory using centralized utility
346
+ from datetime import datetime as dt
347
+
348
+ from gobby.cli.utils import get_resources_dir
349
+ from gobby.utils.project_context import get_project_context
350
+
351
+ ctx = get_project_context()
352
+ project_path = ctx.get("path") if ctx else None
353
+ resources_dir = get_resources_dir(project_path)
354
+
355
+ # Generate timestamp-based filename
356
+ timestamp = dt.now().strftime("%Y%m%d_%H%M%S_%f")
357
+ filename = f"screenshot_{timestamp}.png"
358
+ filepath = resources_dir / filename
359
+
360
+ # Write screenshot to file
361
+ filepath.write_bytes(screenshot_bytes)
362
+ logger.debug(f"Saved screenshot to {filepath}")
363
+
364
+ # Delegate to remember_with_image
365
+ return await self.remember_with_image(
366
+ image_path=str(filepath),
367
+ context=context,
368
+ memory_type=memory_type,
369
+ importance=importance,
370
+ project_id=project_id,
371
+ source_type=source_type,
372
+ source_session_id=source_session_id,
373
+ tags=tags,
374
+ )
375
+
376
+ def _create_crossrefs(
377
+ self,
378
+ memory: Memory,
379
+ threshold: float | None = None,
380
+ max_links: int | None = None,
381
+ ) -> int:
382
+ """
383
+ Find and link similar memories.
384
+
385
+ Uses the search backend to find memories similar to the given one
386
+ and creates cross-references for those above the threshold.
387
+
388
+ Args:
389
+ memory: The memory to find links for
390
+ threshold: Minimum similarity to create link (default from config)
391
+ max_links: Maximum links to create (default from config)
392
+
393
+ Returns:
394
+ Number of cross-references created
395
+ """
396
+ # Get thresholds from config or use defaults
397
+ if threshold is None:
398
+ threshold = getattr(self.config, "crossref_threshold", None)
399
+ if threshold is None:
400
+ threshold = 0.3
401
+ if max_links is None:
402
+ max_links = getattr(self.config, "crossref_max_links", None)
403
+ if max_links is None:
404
+ max_links = 5
405
+
406
+ # Ensure search backend is fitted
407
+ self._ensure_search_backend_fitted()
408
+
409
+ # Search for similar memories
410
+ similar = self.search_backend.search(memory.content, top_k=max_links + 1)
411
+
412
+ # Create cross-references
413
+ created = 0
414
+ for other_id, score in similar:
415
+ # Skip self-reference
416
+ if other_id == memory.id:
417
+ continue
418
+
419
+ # Skip below threshold
420
+ if score < threshold:
421
+ continue
422
+
423
+ # Create the crossref
424
+ self.storage.create_crossref(memory.id, other_id, score)
425
+ created += 1
426
+
427
+ if created >= max_links:
428
+ break
429
+
430
+ if created > 0:
431
+ logger.debug(f"Created {created} crossrefs for memory {memory.id}")
432
+
433
+ return created
434
+
435
+ def get_related(
436
+ self,
437
+ memory_id: str,
438
+ limit: int = 5,
439
+ min_similarity: float = 0.0,
440
+ ) -> list[Memory]:
441
+ """
442
+ Get memories linked to this one via cross-references.
443
+
444
+ Args:
445
+ memory_id: The memory ID to find related memories for
446
+ limit: Maximum number of results
447
+ min_similarity: Minimum similarity threshold
448
+
449
+ Returns:
450
+ List of related Memory objects, sorted by similarity
451
+ """
452
+ crossrefs = self.storage.get_crossrefs(
453
+ memory_id, limit=limit, min_similarity=min_similarity
454
+ )
455
+
456
+ # Get the actual Memory objects
457
+ memories = []
458
+ for ref in crossrefs:
459
+ # Get the "other" memory in the relationship
460
+ other_id = ref.target_id if ref.source_id == memory_id else ref.source_id
461
+ memory = self.get_memory(other_id)
462
+ if memory:
463
+ memories.append(memory)
464
+
465
+ return memories
466
+
467
+ def recall(
468
+ self,
469
+ query: str | None = None,
470
+ project_id: str | None = None,
471
+ limit: int = 10,
472
+ min_importance: float | None = None,
473
+ memory_type: str | None = None,
474
+ use_semantic: bool | None = None,
475
+ search_mode: str | None = None,
476
+ tags_all: list[str] | None = None,
477
+ tags_any: list[str] | None = None,
478
+ tags_none: list[str] | None = None,
479
+ ) -> list[Memory]:
480
+ """
481
+ Retrieve memories.
482
+
483
+ If query is provided, performs search/ranking.
484
+ If no query, returns top important memories.
485
+
486
+ Args:
487
+ query: Optional search query for semantic/text search
488
+ project_id: Filter by project
489
+ limit: Maximum memories to return
490
+ min_importance: Minimum importance threshold
491
+ memory_type: Filter by memory type
492
+ use_semantic: Use semantic search (deprecated, use search_mode instead)
493
+ search_mode: Search mode - "auto" (default), "tfidf", "openai", "hybrid", "text"
494
+ tags_all: Memory must have ALL of these tags
495
+ tags_any: Memory must have at least ONE of these tags
496
+ tags_none: Memory must have NONE of these tags
497
+ """
498
+ threshold = (
499
+ min_importance if min_importance is not None else self.config.importance_threshold
500
+ )
501
+
502
+ if query:
503
+ memories = self._recall_with_search(
504
+ query=query,
505
+ project_id=project_id,
506
+ limit=limit,
507
+ min_importance=threshold,
508
+ use_semantic=use_semantic,
509
+ search_mode=search_mode,
510
+ tags_all=tags_all,
511
+ tags_any=tags_any,
512
+ tags_none=tags_none,
513
+ )
514
+ else:
515
+ # Just get top memories
516
+ memories = self.storage.list_memories(
517
+ project_id=project_id,
518
+ memory_type=memory_type,
519
+ min_importance=threshold,
520
+ limit=limit,
521
+ tags_all=tags_all,
522
+ tags_any=tags_any,
523
+ tags_none=tags_none,
524
+ )
525
+
526
+ # Update access stats for retrieved memories
527
+ self._update_access_stats(memories)
528
+
529
+ return memories
530
+
531
+ def _recall_with_search(
532
+ self,
533
+ query: str,
534
+ project_id: str | None = None,
535
+ limit: int = 10,
536
+ min_importance: float | None = None,
537
+ use_semantic: bool | None = None,
538
+ search_mode: str | None = None,
539
+ tags_all: list[str] | None = None,
540
+ tags_any: list[str] | None = None,
541
+ tags_none: list[str] | None = None,
542
+ ) -> list[Memory]:
543
+ """
544
+ Perform search using the configured search backend.
545
+
546
+ Uses the new search backend by default (TF-IDF),
547
+ falling back to legacy semantic search if configured.
548
+ """
549
+ # Determine search mode from config or parameters
550
+ if search_mode is None:
551
+ search_mode = getattr(self.config, "search_backend", "tfidf")
552
+
553
+ # Legacy compatibility: use_semantic is deprecated
554
+ if use_semantic is not None:
555
+ logger.warning("use_semantic argument is deprecated and ignored")
556
+
557
+ # Use the search backend
558
+ try:
559
+ self._ensure_search_backend_fitted()
560
+ # Fetch more results to allow for filtering
561
+ fetch_multiplier = 3 if (tags_all or tags_any or tags_none) else 2
562
+ results = self.search_backend.search(query, top_k=limit * fetch_multiplier)
563
+
564
+ # Get the actual Memory objects
565
+ memory_ids = [mid for mid, _ in results]
566
+ memories = []
567
+ for mid in memory_ids:
568
+ memory = self.get_memory(mid)
569
+ if memory:
570
+ # Apply filters
571
+ if project_id and memory.project_id != project_id:
572
+ if memory.project_id is not None: # Allow global memories
573
+ continue
574
+ if min_importance and memory.importance < min_importance:
575
+ continue
576
+ # Apply tag filters
577
+ if not self._passes_tag_filter(memory, tags_all, tags_any, tags_none):
578
+ continue
579
+ memories.append(memory)
580
+ if len(memories) >= limit:
581
+ break
582
+
583
+ return memories
584
+
585
+ except Exception as e:
586
+ logger.warning(f"Search backend failed, falling back to text search: {e}")
587
+ # Fall back to text search with tag filtering
588
+ memories = self.storage.search_memories(
589
+ query_text=query,
590
+ project_id=project_id,
591
+ limit=limit * 2,
592
+ tags_all=tags_all,
593
+ tags_any=tags_any,
594
+ tags_none=tags_none,
595
+ )
596
+ if min_importance:
597
+ memories = [m for m in memories if m.importance >= min_importance]
598
+ return memories[:limit]
599
+
600
+ def _passes_tag_filter(
601
+ self,
602
+ memory: Memory,
603
+ tags_all: list[str] | None = None,
604
+ tags_any: list[str] | None = None,
605
+ tags_none: list[str] | None = None,
606
+ ) -> bool:
607
+ """Check if a memory passes the tag filter criteria."""
608
+ memory_tags = set(memory.tags) if memory.tags else set()
609
+
610
+ # Check tags_all: memory must have ALL specified tags
611
+ if tags_all and not set(tags_all).issubset(memory_tags):
612
+ return False
613
+
614
+ # Check tags_any: memory must have at least ONE specified tag
615
+ if tags_any and not memory_tags.intersection(tags_any):
616
+ return False
617
+
618
+ # Check tags_none: memory must have NONE of the specified tags
619
+ if tags_none and memory_tags.intersection(tags_none):
620
+ return False
621
+
622
+ return True
623
+
624
+ def recall_as_context(
625
+ self,
626
+ project_id: str | None = None,
627
+ limit: int = 10,
628
+ min_importance: float | None = None,
629
+ ) -> str:
630
+ """
631
+ Retrieve memories and format them as context for LLM prompts.
632
+
633
+ Convenience method that combines recall() with build_memory_context().
634
+
635
+ Args:
636
+ project_id: Filter by project
637
+ limit: Maximum memories to return
638
+ min_importance: Minimum importance threshold
639
+
640
+ Returns:
641
+ Formatted markdown string wrapped in <project-memory> tags,
642
+ or empty string if no memories found
643
+ """
644
+ memories = self.recall(
645
+ project_id=project_id,
646
+ limit=limit,
647
+ min_importance=min_importance,
648
+ )
649
+
650
+ return build_memory_context(memories)
651
+
652
+ def _update_access_stats(self, memories: list[Memory]) -> None:
653
+ """
654
+ Update access count and time for memories.
655
+
656
+ Implements debouncing to avoid excessive database writes when the same
657
+ memory is accessed multiple times in quick succession.
658
+ """
659
+ if not memories:
660
+ return
661
+
662
+ now = datetime.now(UTC)
663
+ debounce_seconds = getattr(self.config, "access_debounce_seconds", 60)
664
+
665
+ for memory in memories:
666
+ # Check if we should debounce this update
667
+ if memory.last_accessed_at:
668
+ try:
669
+ last_access = datetime.fromisoformat(memory.last_accessed_at)
670
+ if last_access.tzinfo is None:
671
+ last_access = last_access.replace(tzinfo=UTC)
672
+ seconds_since = (now - last_access).total_seconds()
673
+ if seconds_since < debounce_seconds:
674
+ # Skip update - accessed too recently
675
+ continue
676
+ except (ValueError, TypeError):
677
+ # Invalid timestamp, proceed with update
678
+ pass
679
+
680
+ # Update access stats
681
+ try:
682
+ self.storage.update_access_stats(memory.id, now.isoformat())
683
+ except Exception as e:
684
+ logger.warning(f"Failed to update access stats for {memory.id}: {e}")
685
+
686
+ def forget(self, memory_id: str) -> bool:
687
+ """Forget a memory."""
688
+ result = self.storage.delete_memory(memory_id)
689
+ if result:
690
+ # Mark search index for refit since we removed content
691
+ self.mark_search_refit_needed()
692
+ return result
693
+
694
+ def list_memories(
695
+ self,
696
+ project_id: str | None = None,
697
+ memory_type: str | None = None,
698
+ min_importance: float | None = None,
699
+ limit: int = 50,
700
+ offset: int = 0,
701
+ tags_all: list[str] | None = None,
702
+ tags_any: list[str] | None = None,
703
+ tags_none: list[str] | None = None,
704
+ ) -> list[Memory]:
705
+ """
706
+ List memories with optional filtering.
707
+
708
+ Args:
709
+ project_id: Filter by project ID (or None for global)
710
+ memory_type: Filter by memory type
711
+ min_importance: Minimum importance threshold
712
+ limit: Maximum results
713
+ offset: Offset for pagination
714
+ tags_all: Memory must have ALL of these tags
715
+ tags_any: Memory must have at least ONE of these tags
716
+ tags_none: Memory must have NONE of these tags
717
+ """
718
+ return self.storage.list_memories(
719
+ project_id=project_id,
720
+ memory_type=memory_type,
721
+ min_importance=min_importance,
722
+ limit=limit,
723
+ offset=offset,
724
+ tags_all=tags_all,
725
+ tags_any=tags_any,
726
+ tags_none=tags_none,
727
+ )
728
+
729
+ def content_exists(self, content: str, project_id: str | None = None) -> bool:
730
+ """Check if a memory with identical content already exists."""
731
+ return self.storage.content_exists(content, project_id)
732
+
733
+ def get_memory(self, memory_id: str) -> Memory | None:
734
+ """Get a specific memory by ID."""
735
+ try:
736
+ return self.storage.get_memory(memory_id)
737
+ except ValueError:
738
+ return None
739
+
740
+ def find_by_prefix(self, prefix: str, limit: int = 5) -> list[Memory]:
741
+ """
742
+ Find memories whose IDs start with the given prefix.
743
+
744
+ Used for resolving short ID references (e.g., "abc123" -> full UUID).
745
+
746
+ Args:
747
+ prefix: ID prefix to search for
748
+ limit: Maximum number of results
749
+
750
+ Returns:
751
+ List of Memory objects with matching ID prefixes
752
+ """
753
+ rows = self.db.fetchall(
754
+ "SELECT * FROM memories WHERE id LIKE ? LIMIT ?",
755
+ (f"{prefix}%", limit),
756
+ )
757
+ return [Memory.from_row(row) for row in rows]
758
+
759
+ def update_memory(
760
+ self,
761
+ memory_id: str,
762
+ content: str | None = None,
763
+ importance: float | None = None,
764
+ tags: list[str] | None = None,
765
+ ) -> Memory:
766
+ """
767
+ Update an existing memory.
768
+
769
+ Args:
770
+ memory_id: The memory to update
771
+ content: New content (optional)
772
+ importance: New importance (optional)
773
+ tags: New tags (optional)
774
+
775
+ Returns:
776
+ Updated Memory object
777
+
778
+ Raises:
779
+ ValueError: If memory not found
780
+ """
781
+ result = self.storage.update_memory(
782
+ memory_id=memory_id,
783
+ content=content,
784
+ importance=importance,
785
+ tags=tags,
786
+ )
787
+
788
+ # Mark search index for refit if content changed
789
+ if content is not None:
790
+ self.mark_search_refit_needed()
791
+
792
+ return result
793
+
794
+ def get_stats(self, project_id: str | None = None) -> dict[str, Any]:
795
+ """
796
+ Get statistics about stored memories.
797
+
798
+ Args:
799
+ project_id: Optional project to filter stats by
800
+
801
+ Returns:
802
+ Dictionary with memory statistics
803
+ """
804
+ # Get all memories (use large limit)
805
+ memories = self.storage.list_memories(project_id=project_id, limit=10000)
806
+
807
+ if not memories:
808
+ return {
809
+ "total_count": 0,
810
+ "by_type": {},
811
+ "avg_importance": 0.0,
812
+ "project_id": project_id,
813
+ }
814
+
815
+ # Count by type
816
+ by_type: dict[str, int] = {}
817
+ total_importance = 0.0
818
+
819
+ for m in memories:
820
+ by_type[m.memory_type] = by_type.get(m.memory_type, 0) + 1
821
+ total_importance += m.importance
822
+
823
+ return {
824
+ "total_count": len(memories),
825
+ "by_type": by_type,
826
+ "avg_importance": round(total_importance / len(memories), 3),
827
+ "project_id": project_id,
828
+ }
829
+
830
+ def decay_memories(self) -> int:
831
+ """
832
+ Apply importance decay to all memories.
833
+
834
+ Returns:
835
+ Number of memories updated.
836
+ """
837
+ if not self.config.decay_enabled:
838
+ return 0
839
+
840
+ rate = self.config.decay_rate
841
+ floor = self.config.decay_floor
842
+
843
+ # This is a potentially expensive operation if there are many memories.
844
+ # Ideally we'd do this in the database with SQL, but SQLite math functions
845
+ # might be limited or we want Python control.
846
+ # Or we only decay memories accessed > X days ago.
847
+
848
+ # Simple implementation: fetch all > floor, decay them, update if changed.
849
+ # Optimization: Only process a batch or do it entirely in SQL.
850
+
851
+ # Let's do a SQL-based update for efficiency if possible, but
852
+ # LocalMemoryManager doesn't expose a raw execute.
853
+ # Let's iterate for now (simplest, robust), but limit to 100 at a time maybe?
854
+ # Or better: Add a `decay_all` method to storage layer?
855
+
856
+ # For now, let's just implement the logic here iterating over ALL memories
857
+ # which is fine for < 1000 memories.
858
+
859
+ # Use snapshot-based iteration to avoid pagination issues during updates
860
+ count = 0
861
+
862
+ # Note: listing all memories (limit=10000) to avoid pagination drift when modifying them.
863
+ # If dataset grows larger, we should implement a cursor-based approach or add list_memories_ids.
864
+ memories = self.storage.list_memories(min_importance=floor + 0.001, limit=10000)
865
+
866
+ for memory in memories:
867
+ # Calculate simple linear decay since last update
868
+ last_update = datetime.fromisoformat(memory.updated_at)
869
+ # Ensure last_update is timezone-aware for subtraction
870
+ if last_update.tzinfo is None:
871
+ last_update = last_update.replace(tzinfo=UTC)
872
+ hours_since = (datetime.now(UTC) - last_update).total_seconds() / 3600
873
+
874
+ # If it's been less than 24h, skip to avoid over-decaying if called frequently
875
+ if hours_since < 24:
876
+ continue
877
+
878
+ # Decay factor: rate * (days since) / 30
879
+ # Linear decay
880
+ months_passed = hours_since / (24 * 30)
881
+ decay_amount = rate * months_passed
882
+
883
+ if decay_amount < 0.001:
884
+ continue
885
+
886
+ new_importance = max(floor, memory.importance - decay_amount)
887
+
888
+ if new_importance != memory.importance:
889
+ self.storage.update_memory(
890
+ memory.id,
891
+ importance=new_importance,
892
+ )
893
+ count += 1
894
+
895
+ return count
896
+
897
+ def export_markdown(
898
+ self,
899
+ project_id: str | None = None,
900
+ include_metadata: bool = True,
901
+ include_stats: bool = True,
902
+ ) -> str:
903
+ """
904
+ Export memories as a formatted markdown document.
905
+
906
+ Creates a human-readable markdown export of memories, suitable for
907
+ backup, documentation, or sharing.
908
+
909
+ Args:
910
+ project_id: Filter by project ID (None for all memories)
911
+ include_metadata: Include memory metadata (type, importance, tags)
912
+ include_stats: Include summary statistics at the top
913
+
914
+ Returns:
915
+ Formatted markdown string with all memories
916
+
917
+ Example output:
918
+ # Memory Export
919
+
920
+ **Exported:** 2026-01-19 12:34:56 UTC
921
+ **Total memories:** 42
922
+
923
+ ---
924
+
925
+ ## Memory: abc123
926
+
927
+ User prefers dark mode for all applications.
928
+
929
+ - **Type:** preference
930
+ - **Importance:** 0.8
931
+ - **Tags:** ui, settings
932
+ - **Created:** 2026-01-15 10:00:00
933
+ """
934
+ memories = self.storage.list_memories(project_id=project_id, limit=10000)
935
+
936
+ lines: list[str] = []
937
+
938
+ # Header
939
+ lines.append("# Memory Export")
940
+ lines.append("")
941
+
942
+ # Stats section
943
+ if include_stats:
944
+ now = datetime.now(UTC)
945
+ lines.append(f"**Exported:** {now.strftime('%Y-%m-%d %H:%M:%S')} UTC")
946
+ lines.append(f"**Total memories:** {len(memories)}")
947
+ if project_id:
948
+ lines.append(f"**Project:** {project_id}")
949
+
950
+ # Type breakdown
951
+ if memories:
952
+ by_type: dict[str, int] = {}
953
+ for m in memories:
954
+ by_type[m.memory_type] = by_type.get(m.memory_type, 0) + 1
955
+ type_str = ", ".join(f"{k}: {v}" for k, v in sorted(by_type.items()))
956
+ lines.append(f"**By type:** {type_str}")
957
+
958
+ lines.append("")
959
+ lines.append("---")
960
+ lines.append("")
961
+
962
+ # Individual memories
963
+ for memory in memories:
964
+ # Memory header with short ID
965
+ short_id = memory.id[:8] if len(memory.id) > 8 else memory.id
966
+ lines.append(f"## Memory: {short_id}")
967
+ lines.append("")
968
+
969
+ # Content
970
+ lines.append(memory.content)
971
+ lines.append("")
972
+
973
+ # Metadata
974
+ if include_metadata:
975
+ lines.append(f"- **Type:** {memory.memory_type}")
976
+ lines.append(f"- **Importance:** {memory.importance}")
977
+
978
+ if memory.tags:
979
+ tags_str = ", ".join(memory.tags)
980
+ lines.append(f"- **Tags:** {tags_str}")
981
+
982
+ if memory.source_type:
983
+ lines.append(f"- **Source:** {memory.source_type}")
984
+
985
+ # Parse and format created_at
986
+ try:
987
+ created = datetime.fromisoformat(memory.created_at)
988
+ created_str = created.strftime("%Y-%m-%d %H:%M:%S")
989
+ except (ValueError, TypeError):
990
+ created_str = memory.created_at
991
+ lines.append(f"- **Created:** {created_str}")
992
+
993
+ if memory.access_count > 0:
994
+ lines.append(f"- **Accessed:** {memory.access_count} times")
995
+
996
+ lines.append("")
997
+
998
+ lines.append("---")
999
+ lines.append("")
1000
+
1001
+ return "\n".join(lines)