agent-cli 0.70.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_cli/__init__.py +5 -0
- agent_cli/__main__.py +6 -0
- agent_cli/_extras.json +14 -0
- agent_cli/_requirements/.gitkeep +0 -0
- agent_cli/_requirements/audio.txt +79 -0
- agent_cli/_requirements/faster-whisper.txt +215 -0
- agent_cli/_requirements/kokoro.txt +425 -0
- agent_cli/_requirements/llm.txt +183 -0
- agent_cli/_requirements/memory.txt +355 -0
- agent_cli/_requirements/mlx-whisper.txt +222 -0
- agent_cli/_requirements/piper.txt +176 -0
- agent_cli/_requirements/rag.txt +402 -0
- agent_cli/_requirements/server.txt +154 -0
- agent_cli/_requirements/speed.txt +77 -0
- agent_cli/_requirements/vad.txt +155 -0
- agent_cli/_requirements/wyoming.txt +71 -0
- agent_cli/_tools.py +368 -0
- agent_cli/agents/__init__.py +23 -0
- agent_cli/agents/_voice_agent_common.py +136 -0
- agent_cli/agents/assistant.py +383 -0
- agent_cli/agents/autocorrect.py +284 -0
- agent_cli/agents/chat.py +496 -0
- agent_cli/agents/memory/__init__.py +31 -0
- agent_cli/agents/memory/add.py +190 -0
- agent_cli/agents/memory/proxy.py +160 -0
- agent_cli/agents/rag_proxy.py +128 -0
- agent_cli/agents/speak.py +209 -0
- agent_cli/agents/transcribe.py +671 -0
- agent_cli/agents/transcribe_daemon.py +499 -0
- agent_cli/agents/voice_edit.py +291 -0
- agent_cli/api.py +22 -0
- agent_cli/cli.py +106 -0
- agent_cli/config.py +503 -0
- agent_cli/config_cmd.py +307 -0
- agent_cli/constants.py +27 -0
- agent_cli/core/__init__.py +1 -0
- agent_cli/core/audio.py +461 -0
- agent_cli/core/audio_format.py +299 -0
- agent_cli/core/chroma.py +88 -0
- agent_cli/core/deps.py +191 -0
- agent_cli/core/openai_proxy.py +139 -0
- agent_cli/core/process.py +195 -0
- agent_cli/core/reranker.py +120 -0
- agent_cli/core/sse.py +87 -0
- agent_cli/core/transcription_logger.py +70 -0
- agent_cli/core/utils.py +526 -0
- agent_cli/core/vad.py +175 -0
- agent_cli/core/watch.py +65 -0
- agent_cli/dev/__init__.py +14 -0
- agent_cli/dev/cli.py +1588 -0
- agent_cli/dev/coding_agents/__init__.py +19 -0
- agent_cli/dev/coding_agents/aider.py +24 -0
- agent_cli/dev/coding_agents/base.py +167 -0
- agent_cli/dev/coding_agents/claude.py +39 -0
- agent_cli/dev/coding_agents/codex.py +24 -0
- agent_cli/dev/coding_agents/continue_dev.py +15 -0
- agent_cli/dev/coding_agents/copilot.py +24 -0
- agent_cli/dev/coding_agents/cursor_agent.py +48 -0
- agent_cli/dev/coding_agents/gemini.py +28 -0
- agent_cli/dev/coding_agents/opencode.py +15 -0
- agent_cli/dev/coding_agents/registry.py +49 -0
- agent_cli/dev/editors/__init__.py +19 -0
- agent_cli/dev/editors/base.py +89 -0
- agent_cli/dev/editors/cursor.py +15 -0
- agent_cli/dev/editors/emacs.py +46 -0
- agent_cli/dev/editors/jetbrains.py +56 -0
- agent_cli/dev/editors/nano.py +31 -0
- agent_cli/dev/editors/neovim.py +33 -0
- agent_cli/dev/editors/registry.py +59 -0
- agent_cli/dev/editors/sublime.py +20 -0
- agent_cli/dev/editors/vim.py +42 -0
- agent_cli/dev/editors/vscode.py +15 -0
- agent_cli/dev/editors/zed.py +20 -0
- agent_cli/dev/project.py +568 -0
- agent_cli/dev/registry.py +52 -0
- agent_cli/dev/skill/SKILL.md +141 -0
- agent_cli/dev/skill/examples.md +571 -0
- agent_cli/dev/terminals/__init__.py +19 -0
- agent_cli/dev/terminals/apple_terminal.py +82 -0
- agent_cli/dev/terminals/base.py +56 -0
- agent_cli/dev/terminals/gnome.py +51 -0
- agent_cli/dev/terminals/iterm2.py +84 -0
- agent_cli/dev/terminals/kitty.py +77 -0
- agent_cli/dev/terminals/registry.py +48 -0
- agent_cli/dev/terminals/tmux.py +58 -0
- agent_cli/dev/terminals/warp.py +132 -0
- agent_cli/dev/terminals/zellij.py +78 -0
- agent_cli/dev/worktree.py +856 -0
- agent_cli/docs_gen.py +417 -0
- agent_cli/example-config.toml +185 -0
- agent_cli/install/__init__.py +5 -0
- agent_cli/install/common.py +89 -0
- agent_cli/install/extras.py +174 -0
- agent_cli/install/hotkeys.py +48 -0
- agent_cli/install/services.py +87 -0
- agent_cli/memory/__init__.py +7 -0
- agent_cli/memory/_files.py +250 -0
- agent_cli/memory/_filters.py +63 -0
- agent_cli/memory/_git.py +157 -0
- agent_cli/memory/_indexer.py +142 -0
- agent_cli/memory/_ingest.py +408 -0
- agent_cli/memory/_persistence.py +182 -0
- agent_cli/memory/_prompt.py +91 -0
- agent_cli/memory/_retrieval.py +294 -0
- agent_cli/memory/_store.py +169 -0
- agent_cli/memory/_streaming.py +44 -0
- agent_cli/memory/_tasks.py +48 -0
- agent_cli/memory/api.py +113 -0
- agent_cli/memory/client.py +272 -0
- agent_cli/memory/engine.py +361 -0
- agent_cli/memory/entities.py +43 -0
- agent_cli/memory/models.py +112 -0
- agent_cli/opts.py +433 -0
- agent_cli/py.typed +0 -0
- agent_cli/rag/__init__.py +3 -0
- agent_cli/rag/_indexer.py +67 -0
- agent_cli/rag/_indexing.py +226 -0
- agent_cli/rag/_prompt.py +30 -0
- agent_cli/rag/_retriever.py +156 -0
- agent_cli/rag/_store.py +48 -0
- agent_cli/rag/_utils.py +218 -0
- agent_cli/rag/api.py +175 -0
- agent_cli/rag/client.py +299 -0
- agent_cli/rag/engine.py +302 -0
- agent_cli/rag/models.py +55 -0
- agent_cli/scripts/.runtime/.gitkeep +0 -0
- agent_cli/scripts/__init__.py +1 -0
- agent_cli/scripts/check_plugin_skill_sync.py +50 -0
- agent_cli/scripts/linux-hotkeys/README.md +63 -0
- agent_cli/scripts/linux-hotkeys/toggle-autocorrect.sh +45 -0
- agent_cli/scripts/linux-hotkeys/toggle-transcription.sh +58 -0
- agent_cli/scripts/linux-hotkeys/toggle-voice-edit.sh +58 -0
- agent_cli/scripts/macos-hotkeys/README.md +45 -0
- agent_cli/scripts/macos-hotkeys/skhd-config-example +5 -0
- agent_cli/scripts/macos-hotkeys/toggle-autocorrect.sh +12 -0
- agent_cli/scripts/macos-hotkeys/toggle-transcription.sh +37 -0
- agent_cli/scripts/macos-hotkeys/toggle-voice-edit.sh +37 -0
- agent_cli/scripts/nvidia-asr-server/README.md +99 -0
- agent_cli/scripts/nvidia-asr-server/pyproject.toml +27 -0
- agent_cli/scripts/nvidia-asr-server/server.py +255 -0
- agent_cli/scripts/nvidia-asr-server/shell.nix +32 -0
- agent_cli/scripts/nvidia-asr-server/uv.lock +4654 -0
- agent_cli/scripts/run-openwakeword.sh +11 -0
- agent_cli/scripts/run-piper-windows.ps1 +30 -0
- agent_cli/scripts/run-piper.sh +24 -0
- agent_cli/scripts/run-whisper-linux.sh +40 -0
- agent_cli/scripts/run-whisper-macos.sh +6 -0
- agent_cli/scripts/run-whisper-windows.ps1 +51 -0
- agent_cli/scripts/run-whisper.sh +9 -0
- agent_cli/scripts/run_faster_whisper_server.py +136 -0
- agent_cli/scripts/setup-linux-hotkeys.sh +72 -0
- agent_cli/scripts/setup-linux.sh +108 -0
- agent_cli/scripts/setup-macos-hotkeys.sh +61 -0
- agent_cli/scripts/setup-macos.sh +76 -0
- agent_cli/scripts/setup-windows.ps1 +63 -0
- agent_cli/scripts/start-all-services-windows.ps1 +53 -0
- agent_cli/scripts/start-all-services.sh +178 -0
- agent_cli/scripts/sync_extras.py +138 -0
- agent_cli/server/__init__.py +3 -0
- agent_cli/server/cli.py +721 -0
- agent_cli/server/common.py +222 -0
- agent_cli/server/model_manager.py +288 -0
- agent_cli/server/model_registry.py +225 -0
- agent_cli/server/proxy/__init__.py +3 -0
- agent_cli/server/proxy/api.py +444 -0
- agent_cli/server/streaming.py +67 -0
- agent_cli/server/tts/__init__.py +3 -0
- agent_cli/server/tts/api.py +335 -0
- agent_cli/server/tts/backends/__init__.py +82 -0
- agent_cli/server/tts/backends/base.py +139 -0
- agent_cli/server/tts/backends/kokoro.py +403 -0
- agent_cli/server/tts/backends/piper.py +253 -0
- agent_cli/server/tts/model_manager.py +201 -0
- agent_cli/server/tts/model_registry.py +28 -0
- agent_cli/server/tts/wyoming_handler.py +249 -0
- agent_cli/server/whisper/__init__.py +3 -0
- agent_cli/server/whisper/api.py +413 -0
- agent_cli/server/whisper/backends/__init__.py +89 -0
- agent_cli/server/whisper/backends/base.py +97 -0
- agent_cli/server/whisper/backends/faster_whisper.py +225 -0
- agent_cli/server/whisper/backends/mlx.py +270 -0
- agent_cli/server/whisper/languages.py +116 -0
- agent_cli/server/whisper/model_manager.py +157 -0
- agent_cli/server/whisper/model_registry.py +28 -0
- agent_cli/server/whisper/wyoming_handler.py +203 -0
- agent_cli/services/__init__.py +343 -0
- agent_cli/services/_wyoming_utils.py +64 -0
- agent_cli/services/asr.py +506 -0
- agent_cli/services/llm.py +228 -0
- agent_cli/services/tts.py +450 -0
- agent_cli/services/wake_word.py +142 -0
- agent_cli-0.70.5.dist-info/METADATA +2118 -0
- agent_cli-0.70.5.dist-info/RECORD +196 -0
- agent_cli-0.70.5.dist-info/WHEEL +4 -0
- agent_cli-0.70.5.dist-info/entry_points.txt +4 -0
- agent_cli-0.70.5.dist-info/licenses/LICENSE +21 -0
agent_cli/memory/_git.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
"""Git integration for memory versioning."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import logging
|
|
7
|
+
import shutil
|
|
8
|
+
import subprocess
|
|
9
|
+
from typing import TYPE_CHECKING, NamedTuple
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
LOGGER = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class GitCommandResult(NamedTuple):
|
|
18
|
+
"""Result of a git command execution."""
|
|
19
|
+
|
|
20
|
+
returncode: int
|
|
21
|
+
stdout: str
|
|
22
|
+
stderr: str
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _is_git_installed() -> bool:
|
|
26
|
+
"""Check if git is available in the path."""
|
|
27
|
+
return shutil.which("git") is not None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _run_git_sync(
|
|
31
|
+
args: list[str],
|
|
32
|
+
cwd: Path,
|
|
33
|
+
check: bool = True,
|
|
34
|
+
) -> GitCommandResult:
|
|
35
|
+
"""Run a git command synchronously."""
|
|
36
|
+
proc = subprocess.run(
|
|
37
|
+
["git", *args], # noqa: S607
|
|
38
|
+
cwd=cwd,
|
|
39
|
+
check=check,
|
|
40
|
+
capture_output=True,
|
|
41
|
+
text=True,
|
|
42
|
+
encoding="utf-8",
|
|
43
|
+
errors="replace",
|
|
44
|
+
)
|
|
45
|
+
return GitCommandResult(proc.returncode, proc.stdout, proc.stderr)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def _run_git_async(
|
|
49
|
+
args: list[str],
|
|
50
|
+
cwd: Path,
|
|
51
|
+
check: bool = True,
|
|
52
|
+
) -> GitCommandResult:
|
|
53
|
+
"""Run a git command asynchronously."""
|
|
54
|
+
proc = await asyncio.create_subprocess_exec(
|
|
55
|
+
"git",
|
|
56
|
+
*args,
|
|
57
|
+
cwd=cwd,
|
|
58
|
+
stdout=asyncio.subprocess.PIPE,
|
|
59
|
+
stderr=asyncio.subprocess.PIPE,
|
|
60
|
+
)
|
|
61
|
+
stdout, stderr = await proc.communicate()
|
|
62
|
+
assert proc.returncode is not None
|
|
63
|
+
stdout_text = stdout.decode("utf-8", errors="replace")
|
|
64
|
+
stderr_text = stderr.decode("utf-8", errors="replace")
|
|
65
|
+
|
|
66
|
+
if check and proc.returncode != 0:
|
|
67
|
+
raise subprocess.CalledProcessError(
|
|
68
|
+
proc.returncode,
|
|
69
|
+
["git", *args],
|
|
70
|
+
output=stdout_text,
|
|
71
|
+
stderr=stderr_text,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
return GitCommandResult(proc.returncode, stdout_text, stderr_text)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def init_repo(path: Path) -> None:
|
|
78
|
+
"""Initialize a git repository if one does not exist."""
|
|
79
|
+
if not _is_git_installed():
|
|
80
|
+
LOGGER.warning("Git is not installed; skipping repository initialization.")
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
if (path / ".git").exists():
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
LOGGER.info("Initializing git repository in %s", path)
|
|
88
|
+
_run_git_sync(["init"], cwd=path)
|
|
89
|
+
|
|
90
|
+
# Configure local user if not set (to avoid commit errors)
|
|
91
|
+
try:
|
|
92
|
+
_run_git_sync(["config", "user.email"], cwd=path)
|
|
93
|
+
except subprocess.CalledProcessError:
|
|
94
|
+
# No email configured, set local config
|
|
95
|
+
_run_git_sync(["config", "user.email", "agent-cli@local"], cwd=path)
|
|
96
|
+
_run_git_sync(["config", "user.name", "Agent CLI"], cwd=path)
|
|
97
|
+
|
|
98
|
+
# Create .gitignore to exclude derived data (vector db, cache)
|
|
99
|
+
gitignore_path = path / ".gitignore"
|
|
100
|
+
if not gitignore_path.exists():
|
|
101
|
+
gitignore_content = "chroma/\nmemory_index.json\n__pycache__/\n*.tmp\n.DS_Store\n"
|
|
102
|
+
gitignore_path.write_text(gitignore_content, encoding="utf-8")
|
|
103
|
+
|
|
104
|
+
# Create README.md
|
|
105
|
+
readme_path = path / "README.md"
|
|
106
|
+
if not readme_path.exists():
|
|
107
|
+
readme_content = (
|
|
108
|
+
"# Agent Memory Store\n\n"
|
|
109
|
+
"This repository contains the long-term memory for the Agent CLI.\n"
|
|
110
|
+
"Files are automatically managed and versioned by the memory proxy.\n\n"
|
|
111
|
+
"- `entries/`: Markdown files containing facts and conversation logs.\n"
|
|
112
|
+
"- `deleted/`: Soft-deleted memories (tombstones).\n"
|
|
113
|
+
)
|
|
114
|
+
readme_path.write_text(readme_content, encoding="utf-8")
|
|
115
|
+
|
|
116
|
+
# Initial commit
|
|
117
|
+
_run_git_sync(["add", "."], cwd=path)
|
|
118
|
+
_run_git_sync(
|
|
119
|
+
["commit", "--allow-empty", "-m", "Initial commit"],
|
|
120
|
+
cwd=path,
|
|
121
|
+
check=False,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
except subprocess.CalledProcessError:
|
|
125
|
+
LOGGER.exception("Failed to initialize git repo")
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
async def commit_changes(path: Path, message: str) -> None:
|
|
129
|
+
"""Stage and commit all changes in the given path."""
|
|
130
|
+
if not _is_git_installed():
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
if not (path / ".git").exists():
|
|
134
|
+
LOGGER.warning("Not a git repository: %s", path)
|
|
135
|
+
return
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
# Check if there are changes
|
|
139
|
+
status = await _run_git_async(
|
|
140
|
+
["status", "--porcelain"],
|
|
141
|
+
cwd=path,
|
|
142
|
+
check=False,
|
|
143
|
+
)
|
|
144
|
+
if status.returncode != 0:
|
|
145
|
+
LOGGER.error("Failed to check git status")
|
|
146
|
+
return
|
|
147
|
+
|
|
148
|
+
if not status.stdout.strip():
|
|
149
|
+
return # Nothing to commit
|
|
150
|
+
|
|
151
|
+
LOGGER.info("Committing changes to memory store: %s", message)
|
|
152
|
+
|
|
153
|
+
await _run_git_async(["add", "."], cwd=path)
|
|
154
|
+
await _run_git_async(["commit", "-m", message], cwd=path)
|
|
155
|
+
|
|
156
|
+
except Exception:
|
|
157
|
+
LOGGER.exception("Failed to commit changes")
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""File watcher and indexing for file-backed memories."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from agent_cli.core.watch import watch_directory
|
|
10
|
+
from agent_cli.memory._files import (
|
|
11
|
+
_DELETED_DIRNAME,
|
|
12
|
+
MemoryFileRecord,
|
|
13
|
+
ensure_store_dirs,
|
|
14
|
+
load_memory_files,
|
|
15
|
+
load_snapshot,
|
|
16
|
+
read_memory_file,
|
|
17
|
+
write_snapshot,
|
|
18
|
+
)
|
|
19
|
+
from agent_cli.memory._store import delete_entries, upsert_memories
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from chromadb import Collection
|
|
25
|
+
from watchfiles import Change
|
|
26
|
+
|
|
27
|
+
LOGGER = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class MemoryIndex:
|
|
32
|
+
"""In-memory view of memory files plus a JSON snapshot on disk."""
|
|
33
|
+
|
|
34
|
+
entries: dict[str, MemoryFileRecord] = field(default_factory=dict)
|
|
35
|
+
snapshot_path: Path | None = None
|
|
36
|
+
|
|
37
|
+
@classmethod
|
|
38
|
+
def from_snapshot(cls, snapshot_path: Path) -> MemoryIndex:
|
|
39
|
+
"""Restore index state from a snapshot file if present."""
|
|
40
|
+
return cls(entries=load_snapshot(snapshot_path), snapshot_path=snapshot_path)
|
|
41
|
+
|
|
42
|
+
def replace(self, records: list[MemoryFileRecord]) -> None:
|
|
43
|
+
"""Replace the in-memory index with the given records."""
|
|
44
|
+
self.entries = {rec.id: rec for rec in records}
|
|
45
|
+
self._persist()
|
|
46
|
+
|
|
47
|
+
def upsert(self, record: MemoryFileRecord) -> None:
|
|
48
|
+
"""Insert or update a record and persist the snapshot."""
|
|
49
|
+
self.entries[record.id] = record
|
|
50
|
+
self._persist()
|
|
51
|
+
|
|
52
|
+
def remove(self, doc_id: str) -> None:
|
|
53
|
+
"""Remove a record by id and persist the snapshot."""
|
|
54
|
+
self.entries.pop(doc_id, None)
|
|
55
|
+
self._persist()
|
|
56
|
+
|
|
57
|
+
def find_id_by_path(self, path: Path) -> str | None:
|
|
58
|
+
"""Find a record id by its file path, if present."""
|
|
59
|
+
for doc_id, record in self.entries.items():
|
|
60
|
+
if record.path == path:
|
|
61
|
+
return doc_id
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
def _persist(self) -> None:
|
|
65
|
+
if self.snapshot_path:
|
|
66
|
+
write_snapshot(self.snapshot_path, self.entries.values())
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def initial_index(collection: Collection, root: Path, *, index: MemoryIndex) -> None:
|
|
70
|
+
"""Load memory files, reconcile against snapshot, and index into Chroma."""
|
|
71
|
+
entries_dir, snapshot_path = ensure_store_dirs(root)
|
|
72
|
+
if index.snapshot_path is None:
|
|
73
|
+
index.snapshot_path = snapshot_path
|
|
74
|
+
|
|
75
|
+
records = load_memory_files(root)
|
|
76
|
+
current_ids = {rec.id for rec in records}
|
|
77
|
+
|
|
78
|
+
# Remove stale docs that were present in last snapshot but missing now
|
|
79
|
+
stale_ids = set(index.entries) - current_ids
|
|
80
|
+
if stale_ids:
|
|
81
|
+
LOGGER.info("Removing %d stale memory docs from index", len(stale_ids))
|
|
82
|
+
delete_entries(collection, list(stale_ids))
|
|
83
|
+
|
|
84
|
+
if records:
|
|
85
|
+
ids = [rec.id for rec in records]
|
|
86
|
+
docs = [rec.content for rec in records]
|
|
87
|
+
metas = [rec.metadata for rec in records]
|
|
88
|
+
upsert_memories(collection, ids=ids, contents=docs, metadatas=metas)
|
|
89
|
+
LOGGER.info("Indexed %d memory docs from %s", len(records), entries_dir)
|
|
90
|
+
else:
|
|
91
|
+
LOGGER.info("No memory files found in %s", entries_dir)
|
|
92
|
+
|
|
93
|
+
index.replace(records)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
async def watch_memory_store(collection: Collection, root: Path, *, index: MemoryIndex) -> None:
|
|
97
|
+
"""Watch the memory entries folder and keep Chroma in sync."""
|
|
98
|
+
entries_dir, snapshot_path = ensure_store_dirs(root)
|
|
99
|
+
if index.snapshot_path is None:
|
|
100
|
+
index.snapshot_path = snapshot_path
|
|
101
|
+
|
|
102
|
+
LOGGER.info("📁 Watching memory store: %s", entries_dir)
|
|
103
|
+
await watch_directory(
|
|
104
|
+
entries_dir,
|
|
105
|
+
lambda change, path: _handle_change(change, path, collection, index),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _handle_change(change: Change, path: Path, collection: Collection, index: MemoryIndex) -> None:
|
|
110
|
+
from watchfiles import Change # noqa: PLC0415
|
|
111
|
+
|
|
112
|
+
if path.suffix == ".tmp":
|
|
113
|
+
return
|
|
114
|
+
|
|
115
|
+
if _DELETED_DIRNAME in path.parts:
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
if change == Change.deleted:
|
|
119
|
+
doc_id = index.find_id_by_path(path)
|
|
120
|
+
if not doc_id:
|
|
121
|
+
# Fallback: try to parse ID from filename (timestamp__uuid.md)
|
|
122
|
+
parts = path.stem.split("__")
|
|
123
|
+
doc_id = parts[-1] if len(parts) > 1 else path.stem
|
|
124
|
+
|
|
125
|
+
LOGGER.info("[deleted] %s", path.name)
|
|
126
|
+
delete_entries(collection, [doc_id])
|
|
127
|
+
index.remove(doc_id)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
if change in {Change.added, Change.modified}:
|
|
131
|
+
action = "added" if change == Change.added else "modified"
|
|
132
|
+
LOGGER.info("[%s] %s", action, path.name)
|
|
133
|
+
record = read_memory_file(path)
|
|
134
|
+
if not record:
|
|
135
|
+
return
|
|
136
|
+
upsert_memories(
|
|
137
|
+
collection,
|
|
138
|
+
ids=[record.id],
|
|
139
|
+
contents=[record.content],
|
|
140
|
+
metadatas=[record.metadata],
|
|
141
|
+
)
|
|
142
|
+
index.upsert(record)
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
"""Ingestion logic for memory (LLM Extraction, Reconciliation, Summarization)."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
from datetime import UTC, datetime
|
|
8
|
+
from time import perf_counter
|
|
9
|
+
from typing import TYPE_CHECKING
|
|
10
|
+
from uuid import uuid4
|
|
11
|
+
|
|
12
|
+
from agent_cli.memory._git import commit_changes
|
|
13
|
+
from agent_cli.memory._persistence import delete_memory_files, persist_entries, persist_summary
|
|
14
|
+
from agent_cli.memory._prompt import (
|
|
15
|
+
FACT_INSTRUCTIONS,
|
|
16
|
+
FACT_SYSTEM_PROMPT,
|
|
17
|
+
SUMMARY_PROMPT,
|
|
18
|
+
UPDATE_MEMORY_PROMPT,
|
|
19
|
+
)
|
|
20
|
+
from agent_cli.memory._retrieval import gather_relevant_existing_memories
|
|
21
|
+
from agent_cli.memory._store import delete_entries, get_summary_entry
|
|
22
|
+
from agent_cli.memory.entities import Fact, Summary
|
|
23
|
+
from agent_cli.memory.models import (
|
|
24
|
+
MemoryAdd,
|
|
25
|
+
MemoryDecision,
|
|
26
|
+
MemoryDelete,
|
|
27
|
+
MemoryIgnore,
|
|
28
|
+
MemoryUpdate,
|
|
29
|
+
SummaryOutput,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from pathlib import Path
|
|
34
|
+
|
|
35
|
+
from chromadb import Collection
|
|
36
|
+
|
|
37
|
+
LOGGER = logging.getLogger(__name__)
|
|
38
|
+
|
|
39
|
+
_SUMMARY_ROLE = "summary"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _elapsed_ms(start: float) -> float:
|
|
43
|
+
"""Return elapsed milliseconds since start."""
|
|
44
|
+
return (perf_counter() - start) * 1000
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def extract_salient_facts(
|
|
48
|
+
*,
|
|
49
|
+
user_message: str | None,
|
|
50
|
+
assistant_message: str | None,
|
|
51
|
+
openai_base_url: str,
|
|
52
|
+
api_key: str | None,
|
|
53
|
+
model: str,
|
|
54
|
+
) -> list[str]:
|
|
55
|
+
"""Run an LLM agent to extract facts from the transcript."""
|
|
56
|
+
if not user_message and not assistant_message:
|
|
57
|
+
return []
|
|
58
|
+
|
|
59
|
+
import httpx # noqa: PLC0415
|
|
60
|
+
from pydantic_ai import Agent # noqa: PLC0415
|
|
61
|
+
from pydantic_ai.exceptions import AgentRunError, UnexpectedModelBehavior # noqa: PLC0415
|
|
62
|
+
from pydantic_ai.models.openai import OpenAIChatModel # noqa: PLC0415
|
|
63
|
+
from pydantic_ai.providers.openai import OpenAIProvider # noqa: PLC0415
|
|
64
|
+
|
|
65
|
+
# Extract facts from the latest user turn only (ignore assistant/system).
|
|
66
|
+
transcript = user_message or ""
|
|
67
|
+
LOGGER.info("Extracting facts from transcript: %r", transcript)
|
|
68
|
+
|
|
69
|
+
provider = OpenAIProvider(api_key=api_key or "dummy", base_url=openai_base_url)
|
|
70
|
+
model_cfg = OpenAIChatModel(model_name=model, provider=provider)
|
|
71
|
+
agent = Agent(
|
|
72
|
+
model=model_cfg,
|
|
73
|
+
system_prompt=FACT_SYSTEM_PROMPT,
|
|
74
|
+
output_type=list[str],
|
|
75
|
+
retries=2,
|
|
76
|
+
)
|
|
77
|
+
instructions = FACT_INSTRUCTIONS
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
facts = await agent.run(transcript, instructions=instructions)
|
|
81
|
+
LOGGER.info("Raw fact extraction output: %s", facts.output)
|
|
82
|
+
return facts.output
|
|
83
|
+
except (httpx.HTTPError, AgentRunError, UnexpectedModelBehavior):
|
|
84
|
+
LOGGER.warning("PydanticAI fact extraction transient failure", exc_info=True)
|
|
85
|
+
return []
|
|
86
|
+
except Exception:
|
|
87
|
+
LOGGER.exception("PydanticAI fact extraction internal error")
|
|
88
|
+
raise
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def process_reconciliation_decisions(
|
|
92
|
+
decisions: list[MemoryDecision],
|
|
93
|
+
id_map: dict[int, str],
|
|
94
|
+
conversation_id: str,
|
|
95
|
+
source_id: str,
|
|
96
|
+
created_at: datetime,
|
|
97
|
+
) -> tuple[list[Fact], list[str], dict[str, str]]:
|
|
98
|
+
"""Process LLM decisions into actionable changes."""
|
|
99
|
+
to_add: list[Fact] = []
|
|
100
|
+
to_delete: list[str] = []
|
|
101
|
+
replacement_map: dict[str, str] = {}
|
|
102
|
+
|
|
103
|
+
LOGGER.info(
|
|
104
|
+
"Reconcile decisions raw: %s",
|
|
105
|
+
[d.model_dump() for d in decisions],
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
for dec in decisions:
|
|
109
|
+
if isinstance(dec, MemoryAdd):
|
|
110
|
+
text = dec.text.strip()
|
|
111
|
+
if text:
|
|
112
|
+
to_add.append(
|
|
113
|
+
Fact(
|
|
114
|
+
id=str(uuid4()),
|
|
115
|
+
conversation_id=conversation_id,
|
|
116
|
+
content=text,
|
|
117
|
+
source_id=source_id,
|
|
118
|
+
created_at=created_at,
|
|
119
|
+
),
|
|
120
|
+
)
|
|
121
|
+
elif isinstance(dec, MemoryUpdate):
|
|
122
|
+
text = dec.text.strip()
|
|
123
|
+
if text:
|
|
124
|
+
# Update existing memory: delete old, add new
|
|
125
|
+
orig = id_map[dec.id] # Guaranteed valid by output_validator
|
|
126
|
+
new_id = str(uuid4())
|
|
127
|
+
to_delete.append(orig)
|
|
128
|
+
to_add.append(
|
|
129
|
+
Fact(
|
|
130
|
+
id=new_id,
|
|
131
|
+
conversation_id=conversation_id,
|
|
132
|
+
content=text,
|
|
133
|
+
source_id=source_id,
|
|
134
|
+
created_at=created_at,
|
|
135
|
+
),
|
|
136
|
+
)
|
|
137
|
+
replacement_map[orig] = new_id
|
|
138
|
+
elif isinstance(dec, MemoryDelete):
|
|
139
|
+
to_delete.append(id_map[dec.id]) # Guaranteed valid by output_validator
|
|
140
|
+
elif isinstance(dec, MemoryIgnore):
|
|
141
|
+
pass # NONE ignored
|
|
142
|
+
return to_add, to_delete, replacement_map
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
async def reconcile_facts(
|
|
146
|
+
collection: Collection,
|
|
147
|
+
conversation_id: str,
|
|
148
|
+
new_facts: list[str],
|
|
149
|
+
source_id: str,
|
|
150
|
+
created_at: datetime,
|
|
151
|
+
*,
|
|
152
|
+
openai_base_url: str,
|
|
153
|
+
api_key: str | None,
|
|
154
|
+
model: str,
|
|
155
|
+
) -> tuple[list[Fact], list[str], dict[str, str]]:
|
|
156
|
+
"""Use an LLM to decide add/update/delete/none for facts, with id remapping."""
|
|
157
|
+
if not new_facts:
|
|
158
|
+
return [], [], {}
|
|
159
|
+
|
|
160
|
+
existing = gather_relevant_existing_memories(collection, conversation_id, new_facts)
|
|
161
|
+
LOGGER.info("Reconcile: Found %d existing memories for new facts %s", len(existing), new_facts)
|
|
162
|
+
if not existing:
|
|
163
|
+
LOGGER.info("Reconcile: no existing memory facts; defaulting to add all new facts")
|
|
164
|
+
entries = [
|
|
165
|
+
Fact(
|
|
166
|
+
id=str(uuid4()),
|
|
167
|
+
conversation_id=conversation_id,
|
|
168
|
+
content=f,
|
|
169
|
+
source_id=source_id,
|
|
170
|
+
created_at=created_at,
|
|
171
|
+
)
|
|
172
|
+
for f in new_facts
|
|
173
|
+
if f.strip()
|
|
174
|
+
]
|
|
175
|
+
return entries, [], {}
|
|
176
|
+
|
|
177
|
+
import httpx # noqa: PLC0415
|
|
178
|
+
from pydantic_ai import Agent, ModelRetry, PromptedOutput # noqa: PLC0415
|
|
179
|
+
from pydantic_ai.exceptions import AgentRunError, UnexpectedModelBehavior # noqa: PLC0415
|
|
180
|
+
from pydantic_ai.models.openai import OpenAIChatModel # noqa: PLC0415
|
|
181
|
+
from pydantic_ai.providers.openai import OpenAIProvider # noqa: PLC0415
|
|
182
|
+
from pydantic_ai.settings import ModelSettings # noqa: PLC0415
|
|
183
|
+
|
|
184
|
+
id_map: dict[int, str] = {idx: mem.id for idx, mem in enumerate(existing)}
|
|
185
|
+
existing_json = [{"id": idx, "text": mem.content} for idx, mem in enumerate(existing)]
|
|
186
|
+
existing_ids = set(id_map.keys())
|
|
187
|
+
|
|
188
|
+
provider = OpenAIProvider(api_key=api_key or "dummy", base_url=openai_base_url)
|
|
189
|
+
model_cfg = OpenAIChatModel(
|
|
190
|
+
model_name=model,
|
|
191
|
+
provider=provider,
|
|
192
|
+
settings=ModelSettings(temperature=0.0, max_tokens=512),
|
|
193
|
+
)
|
|
194
|
+
agent = Agent(
|
|
195
|
+
model=model_cfg,
|
|
196
|
+
system_prompt=UPDATE_MEMORY_PROMPT,
|
|
197
|
+
output_type=PromptedOutput(list[MemoryDecision]), # JSON mode instead of tool calls
|
|
198
|
+
retries=3,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
@agent.output_validator
|
|
202
|
+
def validate_decisions(decisions: list[MemoryDecision]) -> list[MemoryDecision]:
|
|
203
|
+
"""Validate LLM decisions and provide feedback for retry."""
|
|
204
|
+
errors = []
|
|
205
|
+
for dec in decisions:
|
|
206
|
+
if (
|
|
207
|
+
isinstance(dec, (MemoryUpdate, MemoryDelete, MemoryIgnore))
|
|
208
|
+
and dec.id not in existing_ids
|
|
209
|
+
):
|
|
210
|
+
if isinstance(dec, MemoryUpdate):
|
|
211
|
+
errors.append(
|
|
212
|
+
f"UPDATE with id={dec.id} is invalid: that ID doesn't exist. "
|
|
213
|
+
f"Valid existing IDs are: {sorted(existing_ids)}. "
|
|
214
|
+
f"For NEW facts, use ADD with a new ID.",
|
|
215
|
+
)
|
|
216
|
+
elif isinstance(dec, MemoryDelete):
|
|
217
|
+
errors.append(f"DELETE with id={dec.id} is invalid: that ID doesn't exist.")
|
|
218
|
+
else: # MemoryIgnore (NONE)
|
|
219
|
+
errors.append(f"NONE with id={dec.id} is invalid: that ID doesn't exist.")
|
|
220
|
+
if errors:
|
|
221
|
+
msg = "Invalid memory decisions:\n" + "\n".join(f"- {e}" for e in errors)
|
|
222
|
+
raise ModelRetry(msg)
|
|
223
|
+
return decisions
|
|
224
|
+
|
|
225
|
+
# Format with separate sections for existing and new facts
|
|
226
|
+
existing_str = json.dumps(existing_json, ensure_ascii=False, indent=2)
|
|
227
|
+
new_facts_str = json.dumps(new_facts, ensure_ascii=False, indent=2)
|
|
228
|
+
payload = f"""Current memory:
|
|
229
|
+
```
|
|
230
|
+
{existing_str}
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
New facts to process:
|
|
234
|
+
```
|
|
235
|
+
{new_facts_str}
|
|
236
|
+
```"""
|
|
237
|
+
LOGGER.info("Reconcile payload: %s", payload)
|
|
238
|
+
try:
|
|
239
|
+
result = await agent.run(payload)
|
|
240
|
+
decisions = result.output
|
|
241
|
+
except (httpx.HTTPError, AgentRunError, UnexpectedModelBehavior):
|
|
242
|
+
LOGGER.warning(
|
|
243
|
+
"Update memory agent transient failure; defaulting to add all new facts",
|
|
244
|
+
exc_info=True,
|
|
245
|
+
)
|
|
246
|
+
entries = [
|
|
247
|
+
Fact(
|
|
248
|
+
id=str(uuid4()),
|
|
249
|
+
conversation_id=conversation_id,
|
|
250
|
+
content=f,
|
|
251
|
+
source_id=source_id,
|
|
252
|
+
created_at=created_at,
|
|
253
|
+
)
|
|
254
|
+
for f in new_facts
|
|
255
|
+
if f.strip()
|
|
256
|
+
]
|
|
257
|
+
return entries, [], {}
|
|
258
|
+
except Exception:
|
|
259
|
+
LOGGER.exception("Update memory agent internal error")
|
|
260
|
+
raise
|
|
261
|
+
|
|
262
|
+
to_add, to_delete, replacement_map = process_reconciliation_decisions(
|
|
263
|
+
decisions,
|
|
264
|
+
id_map,
|
|
265
|
+
conversation_id=conversation_id,
|
|
266
|
+
source_id=source_id,
|
|
267
|
+
created_at=created_at,
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
LOGGER.info(
|
|
271
|
+
"Reconcile decisions: add=%d, delete=%d, events=%s",
|
|
272
|
+
len(to_add),
|
|
273
|
+
len(to_delete),
|
|
274
|
+
[dec.event for dec in decisions],
|
|
275
|
+
)
|
|
276
|
+
return to_add, to_delete, replacement_map
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
async def update_summary(
|
|
280
|
+
*,
|
|
281
|
+
prior_summary: str | None,
|
|
282
|
+
new_facts: list[str],
|
|
283
|
+
openai_base_url: str,
|
|
284
|
+
api_key: str | None,
|
|
285
|
+
model: str,
|
|
286
|
+
max_tokens: int = 256,
|
|
287
|
+
) -> str | None:
|
|
288
|
+
"""Update the conversation summary based on new facts."""
|
|
289
|
+
if not new_facts:
|
|
290
|
+
return prior_summary
|
|
291
|
+
|
|
292
|
+
from pydantic_ai import Agent # noqa: PLC0415
|
|
293
|
+
from pydantic_ai.models.openai import OpenAIChatModel # noqa: PLC0415
|
|
294
|
+
from pydantic_ai.providers.openai import OpenAIProvider # noqa: PLC0415
|
|
295
|
+
from pydantic_ai.settings import ModelSettings # noqa: PLC0415
|
|
296
|
+
|
|
297
|
+
system_prompt = SUMMARY_PROMPT
|
|
298
|
+
user_parts: list[str] = []
|
|
299
|
+
if prior_summary:
|
|
300
|
+
user_parts.append(f"Previous summary:\n{prior_summary}")
|
|
301
|
+
user_parts.append("New facts:\n" + "\n".join(f"- {fact}" for fact in new_facts))
|
|
302
|
+
prompt_text = "\n\n".join(user_parts)
|
|
303
|
+
provider = OpenAIProvider(api_key=api_key or "dummy", base_url=openai_base_url)
|
|
304
|
+
model_cfg = OpenAIChatModel(
|
|
305
|
+
model_name=model,
|
|
306
|
+
provider=provider,
|
|
307
|
+
settings=ModelSettings(temperature=0.2, max_tokens=max_tokens),
|
|
308
|
+
)
|
|
309
|
+
agent = Agent(model=model_cfg, system_prompt=system_prompt, output_type=SummaryOutput)
|
|
310
|
+
result = await agent.run(prompt_text)
|
|
311
|
+
return result.output.summary or prior_summary
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
async def extract_and_store_facts_and_summaries(
|
|
315
|
+
*,
|
|
316
|
+
collection: Collection,
|
|
317
|
+
memory_root: Path,
|
|
318
|
+
conversation_id: str,
|
|
319
|
+
user_message: str | None,
|
|
320
|
+
assistant_message: str | None,
|
|
321
|
+
openai_base_url: str,
|
|
322
|
+
api_key: str | None,
|
|
323
|
+
model: str,
|
|
324
|
+
enable_git_versioning: bool = False,
|
|
325
|
+
source_id: str | None = None,
|
|
326
|
+
enable_summarization: bool = True,
|
|
327
|
+
) -> None:
|
|
328
|
+
"""Run fact extraction and summary updates, persisting results."""
|
|
329
|
+
fact_start = perf_counter()
|
|
330
|
+
effective_source_id = source_id or str(uuid4())
|
|
331
|
+
fact_created_at = datetime.now(UTC)
|
|
332
|
+
|
|
333
|
+
facts = await extract_salient_facts(
|
|
334
|
+
user_message=user_message,
|
|
335
|
+
assistant_message=assistant_message,
|
|
336
|
+
openai_base_url=openai_base_url,
|
|
337
|
+
api_key=api_key,
|
|
338
|
+
model=model,
|
|
339
|
+
)
|
|
340
|
+
LOGGER.info(
|
|
341
|
+
"Fact extraction produced %d facts in %.1f ms (conversation=%s)",
|
|
342
|
+
len(facts),
|
|
343
|
+
_elapsed_ms(fact_start),
|
|
344
|
+
conversation_id,
|
|
345
|
+
)
|
|
346
|
+
to_add, to_delete, replacement_map = await reconcile_facts(
|
|
347
|
+
collection,
|
|
348
|
+
conversation_id,
|
|
349
|
+
facts,
|
|
350
|
+
source_id=effective_source_id,
|
|
351
|
+
created_at=fact_created_at,
|
|
352
|
+
openai_base_url=openai_base_url,
|
|
353
|
+
api_key=api_key,
|
|
354
|
+
model=model,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
if to_delete:
|
|
358
|
+
delete_entries(collection, ids=list(to_delete))
|
|
359
|
+
delete_memory_files(
|
|
360
|
+
memory_root,
|
|
361
|
+
conversation_id,
|
|
362
|
+
list(to_delete),
|
|
363
|
+
replacement_map=replacement_map,
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
if to_add:
|
|
367
|
+
persist_entries(
|
|
368
|
+
collection,
|
|
369
|
+
memory_root=memory_root,
|
|
370
|
+
conversation_id=conversation_id,
|
|
371
|
+
entries=list(to_add),
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
if enable_summarization:
|
|
375
|
+
prior_summary_entry = get_summary_entry(
|
|
376
|
+
collection,
|
|
377
|
+
conversation_id,
|
|
378
|
+
role=_SUMMARY_ROLE,
|
|
379
|
+
)
|
|
380
|
+
prior_summary = prior_summary_entry.content if prior_summary_entry else None
|
|
381
|
+
|
|
382
|
+
summary_start = perf_counter()
|
|
383
|
+
new_summary = await update_summary(
|
|
384
|
+
prior_summary=prior_summary,
|
|
385
|
+
new_facts=facts,
|
|
386
|
+
openai_base_url=openai_base_url,
|
|
387
|
+
api_key=api_key,
|
|
388
|
+
model=model,
|
|
389
|
+
)
|
|
390
|
+
LOGGER.info(
|
|
391
|
+
"Summary update completed in %.1f ms (conversation=%s)",
|
|
392
|
+
_elapsed_ms(summary_start),
|
|
393
|
+
conversation_id,
|
|
394
|
+
)
|
|
395
|
+
if new_summary:
|
|
396
|
+
summary_obj = Summary(
|
|
397
|
+
conversation_id=conversation_id,
|
|
398
|
+
content=new_summary,
|
|
399
|
+
created_at=datetime.now(UTC),
|
|
400
|
+
)
|
|
401
|
+
persist_summary(
|
|
402
|
+
collection,
|
|
403
|
+
memory_root=memory_root,
|
|
404
|
+
summary=summary_obj,
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
if enable_git_versioning:
|
|
408
|
+
await commit_changes(memory_root, f"Add facts to conversation {conversation_id}")
|