agent-cli 0.61.2__py3-none-any.whl → 0.70.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_cli/_extras.json +13 -0
- agent_cli/_requirements/.gitkeep +0 -0
- agent_cli/_requirements/audio.txt +79 -0
- agent_cli/_requirements/faster-whisper.txt +215 -0
- agent_cli/_requirements/kokoro.txt +425 -0
- agent_cli/_requirements/llm.txt +183 -0
- agent_cli/_requirements/memory.txt +355 -0
- agent_cli/_requirements/mlx-whisper.txt +222 -0
- agent_cli/_requirements/piper.txt +176 -0
- agent_cli/_requirements/rag.txt +402 -0
- agent_cli/_requirements/server.txt +154 -0
- agent_cli/_requirements/speed.txt +77 -0
- agent_cli/_requirements/vad.txt +155 -0
- agent_cli/agents/assistant.py +3 -1
- agent_cli/agents/autocorrect.py +5 -2
- agent_cli/agents/chat.py +3 -1
- agent_cli/agents/memory/__init__.py +2 -1
- agent_cli/agents/memory/add.py +2 -0
- agent_cli/agents/memory/proxy.py +7 -12
- agent_cli/agents/rag_proxy.py +5 -10
- agent_cli/agents/speak.py +3 -1
- agent_cli/agents/transcribe.py +7 -2
- agent_cli/agents/transcribe_daemon.py +3 -1
- agent_cli/agents/voice_edit.py +3 -1
- agent_cli/cli.py +19 -3
- agent_cli/config_cmd.py +1 -0
- agent_cli/core/chroma.py +4 -4
- agent_cli/core/deps.py +177 -25
- agent_cli/core/openai_proxy.py +9 -4
- agent_cli/core/process.py +2 -2
- agent_cli/core/reranker.py +5 -4
- agent_cli/core/utils.py +5 -3
- agent_cli/core/vad.py +2 -1
- agent_cli/core/watch.py +8 -6
- agent_cli/dev/cli.py +31 -34
- agent_cli/dev/coding_agents/base.py +1 -2
- agent_cli/dev/skill/SKILL.md +141 -0
- agent_cli/dev/skill/examples.md +571 -0
- agent_cli/dev/worktree.py +53 -5
- agent_cli/docs_gen.py +12 -42
- agent_cli/install/__init__.py +1 -1
- agent_cli/install/extras.py +174 -0
- agent_cli/memory/__init__.py +1 -18
- agent_cli/memory/_files.py +4 -1
- agent_cli/memory/_indexer.py +3 -2
- agent_cli/memory/_ingest.py +6 -5
- agent_cli/memory/_retrieval.py +18 -8
- agent_cli/memory/_streaming.py +2 -2
- agent_cli/memory/api.py +1 -1
- agent_cli/memory/client.py +1 -1
- agent_cli/memory/engine.py +1 -1
- agent_cli/rag/__init__.py +0 -19
- agent_cli/rag/_indexer.py +3 -2
- agent_cli/rag/api.py +1 -0
- agent_cli/scripts/.runtime/.gitkeep +0 -0
- agent_cli/scripts/check_plugin_skill_sync.py +50 -0
- agent_cli/scripts/sync_extras.py +138 -0
- agent_cli/server/cli.py +26 -24
- agent_cli/server/common.py +3 -4
- agent_cli/server/tts/api.py +1 -1
- agent_cli/server/whisper/backends/faster_whisper.py +30 -23
- agent_cli/server/whisper/wyoming_handler.py +22 -27
- agent_cli/services/_wyoming_utils.py +4 -2
- agent_cli/services/asr.py +13 -3
- agent_cli/services/llm.py +2 -1
- agent_cli/services/tts.py +5 -2
- agent_cli/services/wake_word.py +6 -3
- {agent_cli-0.61.2.dist-info → agent_cli-0.70.2.dist-info}/METADATA +168 -73
- {agent_cli-0.61.2.dist-info → agent_cli-0.70.2.dist-info}/RECORD +72 -54
- {agent_cli-0.61.2.dist-info → agent_cli-0.70.2.dist-info}/WHEEL +1 -2
- agent_cli-0.61.2.dist-info/top_level.txt +0 -1
- {agent_cli-0.61.2.dist-info → agent_cli-0.70.2.dist-info}/entry_points.txt +0 -0
- {agent_cli-0.61.2.dist-info → agent_cli-0.70.2.dist-info}/licenses/LICENSE +0 -0
agent_cli/dev/worktree.py
CHANGED
|
@@ -55,6 +55,15 @@ def is_git_repo(path: Path | None = None) -> bool:
|
|
|
55
55
|
return False
|
|
56
56
|
|
|
57
57
|
|
|
58
|
+
def has_origin_remote(path: Path | None = None) -> bool:
|
|
59
|
+
"""Check if the repository has an 'origin' remote configured."""
|
|
60
|
+
try:
|
|
61
|
+
result = _run_git("remote", "get-url", "origin", cwd=path, check=False)
|
|
62
|
+
return result.returncode == 0
|
|
63
|
+
except Exception:
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
|
|
58
67
|
def get_repo_root(path: Path | None = None) -> Path | None:
|
|
59
68
|
"""Get the root directory of the git repository."""
|
|
60
69
|
try:
|
|
@@ -461,6 +470,35 @@ def _init_submodules(
|
|
|
461
470
|
)
|
|
462
471
|
|
|
463
472
|
|
|
473
|
+
def _pull_lfs(
|
|
474
|
+
worktree_path: Path,
|
|
475
|
+
*,
|
|
476
|
+
on_log: Callable[[str], None] | None = None,
|
|
477
|
+
capture_output: bool = True,
|
|
478
|
+
) -> None:
|
|
479
|
+
"""Pull Git LFS files in a worktree if LFS is used.
|
|
480
|
+
|
|
481
|
+
Evidence: https://git-lfs.com/ - `git lfs pull` fetches LFS objects.
|
|
482
|
+
This is a no-op if LFS is not used or files are already present.
|
|
483
|
+
"""
|
|
484
|
+
# Check if .gitattributes contains LFS filters
|
|
485
|
+
gitattributes = worktree_path / ".gitattributes"
|
|
486
|
+
if not gitattributes.exists():
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
if "filter=lfs" not in gitattributes.read_text():
|
|
490
|
+
return
|
|
491
|
+
|
|
492
|
+
# Check if git-lfs is installed
|
|
493
|
+
if not shutil.which("git-lfs"):
|
|
494
|
+
return
|
|
495
|
+
|
|
496
|
+
if on_log:
|
|
497
|
+
on_log("Pulling Git LFS files...")
|
|
498
|
+
|
|
499
|
+
_run_git("lfs", "pull", cwd=worktree_path, check=False, capture_output=capture_output)
|
|
500
|
+
|
|
501
|
+
|
|
464
502
|
def _add_worktree(
|
|
465
503
|
branch_name: str,
|
|
466
504
|
worktree_path: Path,
|
|
@@ -586,8 +624,11 @@ def create_worktree(
|
|
|
586
624
|
# Create base directory if needed
|
|
587
625
|
base_dir.mkdir(parents=True, exist_ok=True)
|
|
588
626
|
|
|
589
|
-
#
|
|
590
|
-
|
|
627
|
+
# Check if origin remote exists
|
|
628
|
+
origin_exists = has_origin_remote(repo_root)
|
|
629
|
+
|
|
630
|
+
# Fetch latest refs (only if origin exists)
|
|
631
|
+
if fetch and origin_exists:
|
|
591
632
|
if on_log:
|
|
592
633
|
on_log("Running: git fetch origin")
|
|
593
634
|
_run_git("fetch", "origin", cwd=repo_root, check=False, capture_output=capture_output)
|
|
@@ -596,10 +637,10 @@ def create_worktree(
|
|
|
596
637
|
from_ref_explicit = from_ref is not None
|
|
597
638
|
|
|
598
639
|
# Determine the reference to create from
|
|
599
|
-
# Use origin/{branch}
|
|
600
|
-
# not a potentially stale local branch
|
|
640
|
+
# Use origin/{branch} if origin exists, otherwise use local branch
|
|
601
641
|
if from_ref is None:
|
|
602
|
-
|
|
642
|
+
default_branch = get_default_branch(repo_root)
|
|
643
|
+
from_ref = f"origin/{default_branch}" if origin_exists else default_branch
|
|
603
644
|
|
|
604
645
|
# Check if branch exists remotely or locally
|
|
605
646
|
remote_exists, local_exists = _check_branch_exists(branch_name, repo_root)
|
|
@@ -634,6 +675,13 @@ def create_worktree(
|
|
|
634
675
|
capture_output=capture_output,
|
|
635
676
|
)
|
|
636
677
|
|
|
678
|
+
# Pull Git LFS files if the repo uses LFS
|
|
679
|
+
_pull_lfs(
|
|
680
|
+
worktree_path,
|
|
681
|
+
on_log=on_log,
|
|
682
|
+
capture_output=capture_output,
|
|
683
|
+
)
|
|
684
|
+
|
|
637
685
|
return CreateWorktreeResult(
|
|
638
686
|
success=True,
|
|
639
687
|
path=worktree_path,
|
agent_cli/docs_gen.py
CHANGED
|
@@ -16,7 +16,6 @@ Example usage in Markdown files:
|
|
|
16
16
|
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
-
from pathlib import Path
|
|
20
19
|
from typing import Any, get_origin
|
|
21
20
|
|
|
22
21
|
import click
|
|
@@ -24,6 +23,7 @@ from typer.main import get_command
|
|
|
24
23
|
|
|
25
24
|
from agent_cli import opts
|
|
26
25
|
from agent_cli.cli import app
|
|
26
|
+
from agent_cli.install.extras import EXTRAS
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
def _get_type_str(annotation: Any) -> str:
|
|
@@ -368,47 +368,6 @@ def config_example(command_path: str | None = None) -> str:
|
|
|
368
368
|
return "\n".join(lines)
|
|
369
369
|
|
|
370
370
|
|
|
371
|
-
def readme_section(section_name: str) -> str:
|
|
372
|
-
"""Extract a section from README.md for reuse in other docs.
|
|
373
|
-
|
|
374
|
-
Sections are marked with HTML comments like:
|
|
375
|
-
<!-- SECTION:section_name:START -->
|
|
376
|
-
Content here...
|
|
377
|
-
<!-- SECTION:section_name:END -->
|
|
378
|
-
|
|
379
|
-
Args:
|
|
380
|
-
section_name: The name of the section to extract (e.g., "why-i-built-this")
|
|
381
|
-
|
|
382
|
-
Returns:
|
|
383
|
-
The content between the section markers (without the markers themselves)
|
|
384
|
-
|
|
385
|
-
"""
|
|
386
|
-
# Find the README.md relative to this module
|
|
387
|
-
readme_path = Path(__file__).parent.parent / "README.md"
|
|
388
|
-
if not readme_path.exists():
|
|
389
|
-
return f"*Could not find README.md at {readme_path}*"
|
|
390
|
-
|
|
391
|
-
content = readme_path.read_text()
|
|
392
|
-
|
|
393
|
-
# Look for section markers
|
|
394
|
-
start_marker = f"<!-- SECTION:{section_name}:START -->"
|
|
395
|
-
end_marker = f"<!-- SECTION:{section_name}:END -->"
|
|
396
|
-
|
|
397
|
-
start_idx = content.find(start_marker)
|
|
398
|
-
if start_idx == -1:
|
|
399
|
-
return f"*Section '{section_name}' not found in README.md*"
|
|
400
|
-
|
|
401
|
-
end_idx = content.find(end_marker, start_idx)
|
|
402
|
-
if end_idx == -1:
|
|
403
|
-
return f"*End marker for section '{section_name}' not found in README.md*"
|
|
404
|
-
|
|
405
|
-
# Extract content between markers (excluding the markers themselves)
|
|
406
|
-
section_content = content[start_idx + len(start_marker) : end_idx]
|
|
407
|
-
|
|
408
|
-
# Strip leading/trailing whitespace but preserve internal formatting
|
|
409
|
-
return section_content.strip()
|
|
410
|
-
|
|
411
|
-
|
|
412
371
|
def all_options_for_docs(command_path: str) -> str:
|
|
413
372
|
"""Generate complete options documentation for a command page.
|
|
414
373
|
|
|
@@ -430,6 +389,17 @@ def all_options_for_docs(command_path: str) -> str:
|
|
|
430
389
|
)
|
|
431
390
|
|
|
432
391
|
|
|
392
|
+
def extras_table() -> str:
|
|
393
|
+
"""Generate a table of available extras for install-extras command."""
|
|
394
|
+
lines = [
|
|
395
|
+
"| Extra | Description |",
|
|
396
|
+
"|-------|-------------|",
|
|
397
|
+
]
|
|
398
|
+
for name, description in EXTRAS.items():
|
|
399
|
+
lines.append(f"| `{name}` | {description} |")
|
|
400
|
+
return "\n".join(lines)
|
|
401
|
+
|
|
402
|
+
|
|
433
403
|
if __name__ == "__main__":
|
|
434
404
|
# Demo: print options for transcribe command
|
|
435
405
|
print("=== Available Commands ===")
|
agent_cli/install/__init__.py
CHANGED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
"""Install optional extras at runtime with pinned versions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import tomllib
|
|
9
|
+
from importlib.metadata import version as get_version
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Annotated
|
|
12
|
+
|
|
13
|
+
import typer
|
|
14
|
+
|
|
15
|
+
from agent_cli.cli import app
|
|
16
|
+
from agent_cli.core.deps import EXTRAS as _EXTRAS_META
|
|
17
|
+
from agent_cli.core.utils import console, print_error_message
|
|
18
|
+
|
|
19
|
+
# Extract descriptions from the centralized EXTRAS metadata
|
|
20
|
+
EXTRAS: dict[str, str] = {name: desc for name, (desc, _) in _EXTRAS_META.items()}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _requirements_dir() -> Path:
|
|
24
|
+
return Path(__file__).parent.parent / "_requirements"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _available_extras() -> list[str]:
|
|
28
|
+
"""List available extras based on requirements files."""
|
|
29
|
+
req_dir = _requirements_dir()
|
|
30
|
+
if not req_dir.exists():
|
|
31
|
+
return []
|
|
32
|
+
return sorted(p.stem for p in req_dir.glob("*.txt"))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _requirements_path(extra: str) -> Path:
|
|
36
|
+
return _requirements_dir() / f"{extra}.txt"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _in_virtualenv() -> bool:
|
|
40
|
+
"""Check if running inside a virtual environment."""
|
|
41
|
+
return sys.prefix != sys.base_prefix
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _is_uv_tool_install() -> bool:
|
|
45
|
+
"""Check if running from a uv tool environment."""
|
|
46
|
+
receipt = Path(sys.prefix) / "uv-receipt.toml"
|
|
47
|
+
return receipt.exists()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _get_current_uv_tool_extras() -> list[str]:
|
|
51
|
+
"""Get extras currently configured in uv-receipt.toml."""
|
|
52
|
+
receipt = Path(sys.prefix) / "uv-receipt.toml"
|
|
53
|
+
if not receipt.exists():
|
|
54
|
+
return []
|
|
55
|
+
data = tomllib.loads(receipt.read_text())
|
|
56
|
+
requirements = data.get("tool", {}).get("requirements", [])
|
|
57
|
+
for req in requirements:
|
|
58
|
+
if req.get("name") == "agent-cli":
|
|
59
|
+
return req.get("extras", [])
|
|
60
|
+
return []
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _install_via_uv_tool(extras: list[str], *, quiet: bool = False) -> bool:
|
|
64
|
+
"""Reinstall agent-cli via uv tool with the specified extras."""
|
|
65
|
+
current_version = get_version("agent-cli").split("+")[0] # Strip local version
|
|
66
|
+
extras_str = ",".join(extras)
|
|
67
|
+
package_spec = f"agent-cli[{extras_str}]=={current_version}"
|
|
68
|
+
python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
69
|
+
cmd = ["uv", "tool", "install", package_spec, "--force", "--python", python_version]
|
|
70
|
+
if quiet:
|
|
71
|
+
cmd.append("-q")
|
|
72
|
+
console.print(f"Running: [cyan]{' '.join(cmd)}[/]")
|
|
73
|
+
result = subprocess.run(cmd, check=False)
|
|
74
|
+
return result.returncode == 0
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _install_cmd() -> list[str]:
|
|
78
|
+
"""Build the install command with appropriate flags."""
|
|
79
|
+
in_venv = _in_virtualenv()
|
|
80
|
+
if shutil.which("uv"):
|
|
81
|
+
cmd = ["uv", "pip", "install", "--python", sys.executable]
|
|
82
|
+
if not in_venv:
|
|
83
|
+
# Allow installing to system Python when not in a venv
|
|
84
|
+
cmd.append("--system")
|
|
85
|
+
return cmd
|
|
86
|
+
cmd = [sys.executable, "-m", "pip", "install"]
|
|
87
|
+
if not in_venv:
|
|
88
|
+
# Install to user site-packages when not in a venv
|
|
89
|
+
cmd.append("--user")
|
|
90
|
+
return cmd
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _install_extras_impl(extras: list[str], *, quiet: bool = False) -> bool:
|
|
94
|
+
"""Install extras. Returns True on success, False on failure."""
|
|
95
|
+
if _is_uv_tool_install():
|
|
96
|
+
current_extras = _get_current_uv_tool_extras()
|
|
97
|
+
new_extras = sorted(set(current_extras) | set(extras))
|
|
98
|
+
return _install_via_uv_tool(new_extras, quiet=quiet)
|
|
99
|
+
|
|
100
|
+
cmd = _install_cmd()
|
|
101
|
+
for extra in extras:
|
|
102
|
+
req_file = _requirements_path(extra)
|
|
103
|
+
if not quiet:
|
|
104
|
+
console.print(f"Installing [cyan]{extra}[/]...")
|
|
105
|
+
result = subprocess.run(
|
|
106
|
+
[*cmd, "-r", str(req_file)],
|
|
107
|
+
check=False,
|
|
108
|
+
capture_output=quiet,
|
|
109
|
+
)
|
|
110
|
+
if result.returncode != 0:
|
|
111
|
+
return False
|
|
112
|
+
return True
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def install_extras_programmatic(extras: list[str], *, quiet: bool = False) -> bool:
|
|
116
|
+
"""Install extras programmatically (for auto-install feature)."""
|
|
117
|
+
available = _available_extras()
|
|
118
|
+
valid = [e for e in extras if e in available]
|
|
119
|
+
invalid = [e for e in extras if e not in available]
|
|
120
|
+
if invalid:
|
|
121
|
+
console.print(f"[yellow]Unknown extras (skipped): {', '.join(invalid)}[/]")
|
|
122
|
+
return bool(valid) and _install_extras_impl(valid, quiet=quiet)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@app.command("install-extras", rich_help_panel="Installation", no_args_is_help=True)
|
|
126
|
+
def install_extras(
|
|
127
|
+
extras: Annotated[list[str] | None, typer.Argument(help="Extras to install")] = None,
|
|
128
|
+
list_extras: Annotated[
|
|
129
|
+
bool,
|
|
130
|
+
typer.Option("--list", "-l", help="List available extras"),
|
|
131
|
+
] = False,
|
|
132
|
+
all_extras: Annotated[
|
|
133
|
+
bool,
|
|
134
|
+
typer.Option("--all", "-a", help="Install all available extras"),
|
|
135
|
+
] = False,
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Install optional extras (rag, memory, vad, etc.) with pinned versions.
|
|
138
|
+
|
|
139
|
+
Examples:
|
|
140
|
+
- `agent-cli install-extras rag` # Install RAG dependencies
|
|
141
|
+
- `agent-cli install-extras memory vad` # Install multiple extras
|
|
142
|
+
- `agent-cli install-extras --list` # Show available extras
|
|
143
|
+
- `agent-cli install-extras --all` # Install all extras
|
|
144
|
+
|
|
145
|
+
"""
|
|
146
|
+
available = _available_extras()
|
|
147
|
+
|
|
148
|
+
if list_extras:
|
|
149
|
+
console.print("[bold]Available extras:[/]")
|
|
150
|
+
for name in available:
|
|
151
|
+
desc = EXTRAS.get(name, "")
|
|
152
|
+
console.print(f" [cyan]{name}[/]: {desc}")
|
|
153
|
+
return
|
|
154
|
+
|
|
155
|
+
if all_extras:
|
|
156
|
+
extras = available
|
|
157
|
+
|
|
158
|
+
if not extras:
|
|
159
|
+
print_error_message("No extras specified. Use --list to see available, or --all.")
|
|
160
|
+
raise typer.Exit(1)
|
|
161
|
+
|
|
162
|
+
invalid = [e for e in extras if e not in available]
|
|
163
|
+
if invalid:
|
|
164
|
+
print_error_message(f"Unknown extras: {invalid}. Use --list to see available.")
|
|
165
|
+
raise typer.Exit(1)
|
|
166
|
+
|
|
167
|
+
if not _install_extras_impl(extras):
|
|
168
|
+
print_error_message("Failed to install extras")
|
|
169
|
+
raise typer.Exit(1)
|
|
170
|
+
|
|
171
|
+
if _is_uv_tool_install():
|
|
172
|
+
console.print("[green]Done! Extras will persist across uv tool upgrade.[/]")
|
|
173
|
+
else:
|
|
174
|
+
console.print("[green]Done![/]")
|
agent_cli/memory/__init__.py
CHANGED
|
@@ -2,23 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from agent_cli.
|
|
6
|
-
|
|
7
|
-
_REQUIRED_DEPS = {
|
|
8
|
-
"chromadb": "chromadb",
|
|
9
|
-
"fastapi": "fastapi",
|
|
10
|
-
"uvicorn": "uvicorn",
|
|
11
|
-
"onnxruntime": "onnxruntime",
|
|
12
|
-
"huggingface_hub": "huggingface-hub",
|
|
13
|
-
"transformers": "transformers",
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
ensure_optional_dependencies(
|
|
17
|
-
_REQUIRED_DEPS,
|
|
18
|
-
extra_name="memory",
|
|
19
|
-
install_hint="`pip install agent-cli[memory]` or `uv sync --extra memory`",
|
|
20
|
-
)
|
|
21
|
-
|
|
22
|
-
from agent_cli.memory.client import MemoryClient # noqa: E402
|
|
5
|
+
from agent_cli.memory.client import MemoryClient
|
|
23
6
|
|
|
24
7
|
__all__ = ["MemoryClient"]
|
agent_cli/memory/_files.py
CHANGED
|
@@ -9,7 +9,6 @@ from pathlib import Path
|
|
|
9
9
|
from typing import TYPE_CHECKING
|
|
10
10
|
from uuid import uuid4
|
|
11
11
|
|
|
12
|
-
import yaml
|
|
13
12
|
from pydantic import ValidationError
|
|
14
13
|
|
|
15
14
|
from agent_cli.core.utils import atomic_write_text
|
|
@@ -218,6 +217,8 @@ def load_snapshot(snapshot_path: Path) -> dict[str, MemoryFileRecord]:
|
|
|
218
217
|
|
|
219
218
|
def _render_front_matter(doc_id: str, metadata: MemoryMetadata) -> str:
|
|
220
219
|
"""Return YAML front matter string."""
|
|
220
|
+
import yaml # noqa: PLC0415
|
|
221
|
+
|
|
221
222
|
meta_dict = metadata.model_dump(exclude_none=True)
|
|
222
223
|
meta_dict = {"id": doc_id, **meta_dict}
|
|
223
224
|
yaml_block = yaml.safe_dump(meta_dict, sort_keys=False)
|
|
@@ -233,6 +234,8 @@ def _split_front_matter(text: str) -> tuple[dict | None, str]:
|
|
|
233
234
|
return None, text
|
|
234
235
|
yaml_part = text[3:end]
|
|
235
236
|
try:
|
|
237
|
+
import yaml # noqa: PLC0415
|
|
238
|
+
|
|
236
239
|
meta = yaml.safe_load(yaml_part) or {}
|
|
237
240
|
except Exception:
|
|
238
241
|
return None, text
|
agent_cli/memory/_indexer.py
CHANGED
|
@@ -6,8 +6,6 @@ import logging
|
|
|
6
6
|
from dataclasses import dataclass, field
|
|
7
7
|
from typing import TYPE_CHECKING
|
|
8
8
|
|
|
9
|
-
from watchfiles import Change
|
|
10
|
-
|
|
11
9
|
from agent_cli.core.watch import watch_directory
|
|
12
10
|
from agent_cli.memory._files import (
|
|
13
11
|
_DELETED_DIRNAME,
|
|
@@ -24,6 +22,7 @@ if TYPE_CHECKING:
|
|
|
24
22
|
from pathlib import Path
|
|
25
23
|
|
|
26
24
|
from chromadb import Collection
|
|
25
|
+
from watchfiles import Change
|
|
27
26
|
|
|
28
27
|
LOGGER = logging.getLogger(__name__)
|
|
29
28
|
|
|
@@ -108,6 +107,8 @@ async def watch_memory_store(collection: Collection, root: Path, *, index: Memor
|
|
|
108
107
|
|
|
109
108
|
|
|
110
109
|
def _handle_change(change: Change, path: Path, collection: Collection, index: MemoryIndex) -> None:
|
|
110
|
+
from watchfiles import Change # noqa: PLC0415
|
|
111
|
+
|
|
111
112
|
if path.suffix == ".tmp":
|
|
112
113
|
return
|
|
113
114
|
|
agent_cli/memory/_ingest.py
CHANGED
|
@@ -9,8 +9,6 @@ from time import perf_counter
|
|
|
9
9
|
from typing import TYPE_CHECKING
|
|
10
10
|
from uuid import uuid4
|
|
11
11
|
|
|
12
|
-
import httpx
|
|
13
|
-
|
|
14
12
|
from agent_cli.memory._git import commit_changes
|
|
15
13
|
from agent_cli.memory._persistence import delete_memory_files, persist_entries, persist_summary
|
|
16
14
|
from agent_cli.memory._prompt import (
|
|
@@ -58,6 +56,7 @@ async def extract_salient_facts(
|
|
|
58
56
|
if not user_message and not assistant_message:
|
|
59
57
|
return []
|
|
60
58
|
|
|
59
|
+
import httpx # noqa: PLC0415
|
|
61
60
|
from pydantic_ai import Agent # noqa: PLC0415
|
|
62
61
|
from pydantic_ai.exceptions import AgentRunError, UnexpectedModelBehavior # noqa: PLC0415
|
|
63
62
|
from pydantic_ai.models.openai import OpenAIChatModel # noqa: PLC0415
|
|
@@ -174,16 +173,18 @@ async def reconcile_facts(
|
|
|
174
173
|
if f.strip()
|
|
175
174
|
]
|
|
176
175
|
return entries, [], {}
|
|
177
|
-
id_map: dict[int, str] = {idx: mem.id for idx, mem in enumerate(existing)}
|
|
178
|
-
existing_json = [{"id": idx, "text": mem.content} for idx, mem in enumerate(existing)]
|
|
179
|
-
existing_ids = set(id_map.keys())
|
|
180
176
|
|
|
177
|
+
import httpx # noqa: PLC0415
|
|
181
178
|
from pydantic_ai import Agent, ModelRetry, PromptedOutput # noqa: PLC0415
|
|
182
179
|
from pydantic_ai.exceptions import AgentRunError, UnexpectedModelBehavior # noqa: PLC0415
|
|
183
180
|
from pydantic_ai.models.openai import OpenAIChatModel # noqa: PLC0415
|
|
184
181
|
from pydantic_ai.providers.openai import OpenAIProvider # noqa: PLC0415
|
|
185
182
|
from pydantic_ai.settings import ModelSettings # noqa: PLC0415
|
|
186
183
|
|
|
184
|
+
id_map: dict[int, str] = {idx: mem.id for idx, mem in enumerate(existing)}
|
|
185
|
+
existing_json = [{"id": idx, "text": mem.content} for idx, mem in enumerate(existing)]
|
|
186
|
+
existing_ids = set(id_map.keys())
|
|
187
|
+
|
|
187
188
|
provider = OpenAIProvider(api_key=api_key or "dummy", base_url=openai_base_url)
|
|
188
189
|
model_cfg = OpenAIChatModel(
|
|
189
190
|
model_name=model,
|
agent_cli/memory/_retrieval.py
CHANGED
|
@@ -25,6 +25,7 @@ LOGGER = logging.getLogger(__name__)
|
|
|
25
25
|
|
|
26
26
|
_DEFAULT_MMR_LAMBDA = 0.7
|
|
27
27
|
_SUMMARY_ROLE = "summary"
|
|
28
|
+
_MIN_MAX_EPSILON = 1e-8 # Avoid division by zero in min-max normalization
|
|
28
29
|
|
|
29
30
|
|
|
30
31
|
def gather_relevant_existing_memories(
|
|
@@ -135,7 +136,7 @@ def retrieve_memory(
|
|
|
135
136
|
include_summary: bool = True,
|
|
136
137
|
mmr_lambda: float = _DEFAULT_MMR_LAMBDA,
|
|
137
138
|
recency_weight: float = 0.2,
|
|
138
|
-
score_threshold: float =
|
|
139
|
+
score_threshold: float | None = None,
|
|
139
140
|
filters: dict[str, Any] | None = None,
|
|
140
141
|
) -> tuple[MemoryRetrieval, list[str]]:
|
|
141
142
|
"""Execute search + rerank + recency + MMR."""
|
|
@@ -161,8 +162,15 @@ def retrieve_memory(
|
|
|
161
162
|
seen_ids.add(rec_id)
|
|
162
163
|
raw_candidates.append(rec)
|
|
163
164
|
|
|
164
|
-
def
|
|
165
|
-
|
|
165
|
+
def _min_max_normalize(scores: list[float]) -> list[float]:
|
|
166
|
+
"""Normalize scores to 0-1 range using min-max scaling."""
|
|
167
|
+
if not scores:
|
|
168
|
+
return scores
|
|
169
|
+
min_score = min(scores)
|
|
170
|
+
max_score = max(scores)
|
|
171
|
+
if max_score - min_score < _MIN_MAX_EPSILON:
|
|
172
|
+
return [0.5] * len(scores) # All scores equal
|
|
173
|
+
return [(s - min_score) / (max_score - min_score) for s in scores]
|
|
166
174
|
|
|
167
175
|
def recency_score(meta: MemoryMetadata) -> float:
|
|
168
176
|
dt = datetime.fromisoformat(meta.created_at)
|
|
@@ -176,10 +184,12 @@ def retrieve_memory(
|
|
|
176
184
|
if raw_candidates:
|
|
177
185
|
pairs = [(query, mem.content) for mem in raw_candidates]
|
|
178
186
|
rr_scores = predict_relevance(reranker_model, pairs)
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
187
|
+
# Normalize raw reranker scores to 0-1 range
|
|
188
|
+
normalized_scores = _min_max_normalize(rr_scores)
|
|
189
|
+
|
|
190
|
+
for mem, relevance in zip(raw_candidates, normalized_scores, strict=False):
|
|
191
|
+
# Filter out low-relevance memories if threshold is set
|
|
192
|
+
if score_threshold is not None and relevance < score_threshold:
|
|
183
193
|
continue
|
|
184
194
|
|
|
185
195
|
recency = recency_score(mem.metadata)
|
|
@@ -235,7 +245,7 @@ async def augment_chat_request(
|
|
|
235
245
|
include_global: bool = True,
|
|
236
246
|
mmr_lambda: float = _DEFAULT_MMR_LAMBDA,
|
|
237
247
|
recency_weight: float = 0.2,
|
|
238
|
-
score_threshold: float =
|
|
248
|
+
score_threshold: float | None = None,
|
|
239
249
|
filters: dict[str, Any] | None = None,
|
|
240
250
|
) -> tuple[ChatRequest, MemoryRetrieval | None, str, list[str]]:
|
|
241
251
|
"""Retrieve memory context and augment the chat request."""
|
agent_cli/memory/_streaming.py
CHANGED
|
@@ -4,8 +4,6 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
from typing import TYPE_CHECKING, Any
|
|
6
6
|
|
|
7
|
-
import httpx
|
|
8
|
-
|
|
9
7
|
from agent_cli.core.sse import extract_content_from_chunk, parse_chunk
|
|
10
8
|
|
|
11
9
|
if TYPE_CHECKING:
|
|
@@ -20,6 +18,8 @@ async def stream_chat_sse(
|
|
|
20
18
|
request_timeout: float = 120.0,
|
|
21
19
|
) -> AsyncGenerator[str, None]:
|
|
22
20
|
"""Stream Server-Sent Events from an OpenAI-compatible chat completion endpoint."""
|
|
21
|
+
import httpx # noqa: PLC0415
|
|
22
|
+
|
|
23
23
|
url = f"{openai_base_url.rstrip('/')}/chat/completions"
|
|
24
24
|
async with (
|
|
25
25
|
httpx.AsyncClient(timeout=request_timeout) as client,
|
agent_cli/memory/api.py
CHANGED
|
@@ -30,7 +30,7 @@ def create_app(
|
|
|
30
30
|
max_entries: int = 500,
|
|
31
31
|
mmr_lambda: float = 0.7,
|
|
32
32
|
recency_weight: float = 0.2,
|
|
33
|
-
score_threshold: float =
|
|
33
|
+
score_threshold: float | None = None,
|
|
34
34
|
enable_git_versioning: bool = True,
|
|
35
35
|
) -> FastAPI:
|
|
36
36
|
"""Create the FastAPI app for memory-backed chat."""
|
agent_cli/memory/client.py
CHANGED
|
@@ -49,7 +49,7 @@ class MemoryClient:
|
|
|
49
49
|
max_entries: int = 500,
|
|
50
50
|
mmr_lambda: float = 0.7,
|
|
51
51
|
recency_weight: float = 0.2,
|
|
52
|
-
score_threshold: float =
|
|
52
|
+
score_threshold: float | None = None,
|
|
53
53
|
start_watcher: bool = False,
|
|
54
54
|
enable_git_versioning: bool = True,
|
|
55
55
|
) -> None:
|
agent_cli/memory/engine.py
CHANGED
|
@@ -236,7 +236,7 @@ async def process_chat_request(
|
|
|
236
236
|
max_entries: int = _DEFAULT_MAX_ENTRIES,
|
|
237
237
|
mmr_lambda: float = _DEFAULT_MMR_LAMBDA,
|
|
238
238
|
recency_weight: float = 0.2,
|
|
239
|
-
score_threshold: float =
|
|
239
|
+
score_threshold: float | None = None,
|
|
240
240
|
postprocess_in_background: bool = True,
|
|
241
241
|
enable_git_versioning: bool = False,
|
|
242
242
|
filters: dict[str, Any] | None = None,
|
agent_cli/rag/__init__.py
CHANGED
|
@@ -1,22 +1,3 @@
|
|
|
1
1
|
"""RAG module."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from agent_cli.core.deps import ensure_optional_dependencies
|
|
6
|
-
|
|
7
|
-
_REQUIRED_DEPS = {
|
|
8
|
-
"chromadb": "chromadb",
|
|
9
|
-
"watchfiles": "watchfiles",
|
|
10
|
-
"markitdown": "markitdown",
|
|
11
|
-
"fastapi": "fastapi",
|
|
12
|
-
"uvicorn": "uvicorn",
|
|
13
|
-
"onnxruntime": "onnxruntime",
|
|
14
|
-
"huggingface_hub": "huggingface-hub",
|
|
15
|
-
"transformers": "transformers",
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
ensure_optional_dependencies(
|
|
19
|
-
_REQUIRED_DEPS,
|
|
20
|
-
extra_name="rag",
|
|
21
|
-
install_hint="`pip install agent-cli[rag]` or `uv sync --extra rag`",
|
|
22
|
-
)
|
agent_cli/rag/_indexer.py
CHANGED
|
@@ -5,8 +5,6 @@ from __future__ import annotations
|
|
|
5
5
|
import logging
|
|
6
6
|
from typing import TYPE_CHECKING
|
|
7
7
|
|
|
8
|
-
from watchfiles import Change
|
|
9
|
-
|
|
10
8
|
from agent_cli.core.watch import watch_directory
|
|
11
9
|
from agent_cli.rag._indexing import index_file, remove_file
|
|
12
10
|
from agent_cli.rag._utils import should_ignore_path
|
|
@@ -15,6 +13,7 @@ if TYPE_CHECKING:
|
|
|
15
13
|
from pathlib import Path
|
|
16
14
|
|
|
17
15
|
from chromadb import Collection
|
|
16
|
+
from watchfiles import Change
|
|
18
17
|
|
|
19
18
|
LOGGER = logging.getLogger(__name__)
|
|
20
19
|
|
|
@@ -50,6 +49,8 @@ def _handle_change(
|
|
|
50
49
|
file_hashes: dict[str, str],
|
|
51
50
|
file_mtimes: dict[str, float],
|
|
52
51
|
) -> None:
|
|
52
|
+
from watchfiles import Change # noqa: PLC0415
|
|
53
|
+
|
|
53
54
|
try:
|
|
54
55
|
if change == Change.deleted:
|
|
55
56
|
LOGGER.info("[deleted] Removing from index: %s", file_path.name)
|
agent_cli/rag/api.py
CHANGED
|
File without changes
|