tribalmemory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tribalmemory/__init__.py +3 -0
- tribalmemory/a21/__init__.py +38 -0
- tribalmemory/a21/config/__init__.py +20 -0
- tribalmemory/a21/config/providers.py +104 -0
- tribalmemory/a21/config/system.py +184 -0
- tribalmemory/a21/container/__init__.py +8 -0
- tribalmemory/a21/container/container.py +212 -0
- tribalmemory/a21/providers/__init__.py +32 -0
- tribalmemory/a21/providers/base.py +241 -0
- tribalmemory/a21/providers/deduplication.py +99 -0
- tribalmemory/a21/providers/lancedb.py +232 -0
- tribalmemory/a21/providers/memory.py +128 -0
- tribalmemory/a21/providers/mock.py +54 -0
- tribalmemory/a21/providers/openai.py +151 -0
- tribalmemory/a21/providers/timestamp.py +88 -0
- tribalmemory/a21/system.py +293 -0
- tribalmemory/cli.py +298 -0
- tribalmemory/interfaces.py +306 -0
- tribalmemory/mcp/__init__.py +9 -0
- tribalmemory/mcp/__main__.py +6 -0
- tribalmemory/mcp/server.py +484 -0
- tribalmemory/performance/__init__.py +1 -0
- tribalmemory/performance/benchmarks.py +285 -0
- tribalmemory/performance/corpus_generator.py +171 -0
- tribalmemory/portability/__init__.py +1 -0
- tribalmemory/portability/embedding_metadata.py +320 -0
- tribalmemory/server/__init__.py +9 -0
- tribalmemory/server/__main__.py +6 -0
- tribalmemory/server/app.py +187 -0
- tribalmemory/server/config.py +115 -0
- tribalmemory/server/models.py +206 -0
- tribalmemory/server/routes.py +378 -0
- tribalmemory/services/__init__.py +15 -0
- tribalmemory/services/deduplication.py +115 -0
- tribalmemory/services/embeddings.py +273 -0
- tribalmemory/services/import_export.py +506 -0
- tribalmemory/services/memory.py +275 -0
- tribalmemory/services/vector_store.py +360 -0
- tribalmemory/testing/__init__.py +22 -0
- tribalmemory/testing/embedding_utils.py +110 -0
- tribalmemory/testing/fixtures.py +123 -0
- tribalmemory/testing/metrics.py +256 -0
- tribalmemory/testing/mocks.py +560 -0
- tribalmemory/testing/semantic_expansions.py +91 -0
- tribalmemory/utils.py +23 -0
- tribalmemory-0.1.0.dist-info/METADATA +275 -0
- tribalmemory-0.1.0.dist-info/RECORD +51 -0
- tribalmemory-0.1.0.dist-info/WHEEL +5 -0
- tribalmemory-0.1.0.dist-info/entry_points.txt +3 -0
- tribalmemory-0.1.0.dist-info/licenses/LICENSE +190 -0
- tribalmemory-0.1.0.dist-info/top_level.txt +1 -0
tribalmemory/cli.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
"""Tribal Memory CLI — init, serve, and MCP entry points.
|
|
2
|
+
|
|
3
|
+
Usage:
|
|
4
|
+
tribalmemory init [--local] # Set up config + MCP integration
|
|
5
|
+
tribalmemory serve # Start the HTTP server
|
|
6
|
+
tribalmemory mcp # Start the MCP server (stdio)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import argparse
|
|
10
|
+
import json
|
|
11
|
+
import os
|
|
12
|
+
import sys
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
import tomllib # Python 3.11+
|
|
17
|
+
except ImportError:
|
|
18
|
+
tomllib = None # type: ignore
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
import tomli_w # For writing TOML
|
|
22
|
+
except ImportError:
|
|
23
|
+
tomli_w = None # type: ignore
|
|
24
|
+
|
|
25
|
+
TRIBAL_DIR = Path.home() / ".tribal-memory"
|
|
26
|
+
CONFIG_FILE = TRIBAL_DIR / "config.yaml"
|
|
27
|
+
DEFAULT_INSTANCE_ID = "default"
|
|
28
|
+
|
|
29
|
+
# MCP config for Claude Code (claude_desktop_config.json)
|
|
30
|
+
CLAUDE_CODE_MCP_CONFIG = {
|
|
31
|
+
"mcpServers": {
|
|
32
|
+
"tribal-memory": {
|
|
33
|
+
"command": "tribalmemory-mcp",
|
|
34
|
+
"env": {}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
OPENAI_CONFIG_TEMPLATE = """\
|
|
40
|
+
# Tribal Memory Configuration
|
|
41
|
+
# Docs: https://github.com/abbudjoe/TribalMemory
|
|
42
|
+
|
|
43
|
+
instance_id: {instance_id}
|
|
44
|
+
|
|
45
|
+
embedding:
|
|
46
|
+
provider: openai
|
|
47
|
+
model: text-embedding-3-small
|
|
48
|
+
dimensions: 1536
|
|
49
|
+
# api_key: sk-... # Or set OPENAI_API_KEY env var
|
|
50
|
+
|
|
51
|
+
db:
|
|
52
|
+
provider: lancedb
|
|
53
|
+
path: {db_path}
|
|
54
|
+
|
|
55
|
+
server:
|
|
56
|
+
host: 127.0.0.1
|
|
57
|
+
port: 18790
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
LOCAL_CONFIG_TEMPLATE = """\
|
|
61
|
+
# Tribal Memory Configuration — Local Mode (Zero Cloud)
|
|
62
|
+
# Uses Ollama for embeddings — no API keys needed!
|
|
63
|
+
# Docs: https://github.com/abbudjoe/TribalMemory
|
|
64
|
+
|
|
65
|
+
instance_id: {instance_id}
|
|
66
|
+
|
|
67
|
+
embedding:
|
|
68
|
+
provider: openai # Uses OpenAI-compatible API
|
|
69
|
+
model: nomic-embed-text # Run: ollama pull nomic-embed-text
|
|
70
|
+
api_base: http://localhost:11434/v1
|
|
71
|
+
dimensions: 768
|
|
72
|
+
# api_key not needed for local Ollama
|
|
73
|
+
|
|
74
|
+
db:
|
|
75
|
+
provider: lancedb
|
|
76
|
+
path: {db_path}
|
|
77
|
+
|
|
78
|
+
server:
|
|
79
|
+
host: 127.0.0.1
|
|
80
|
+
port: 18790
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def cmd_init(args: argparse.Namespace) -> int:
|
|
85
|
+
"""Initialize Tribal Memory config and MCP integration."""
|
|
86
|
+
instance_id = args.instance_id or DEFAULT_INSTANCE_ID
|
|
87
|
+
db_path = str(TRIBAL_DIR / "lancedb")
|
|
88
|
+
|
|
89
|
+
# Create config directory
|
|
90
|
+
TRIBAL_DIR.mkdir(parents=True, exist_ok=True)
|
|
91
|
+
|
|
92
|
+
# Choose template
|
|
93
|
+
if args.local:
|
|
94
|
+
config_content = LOCAL_CONFIG_TEMPLATE.format(
|
|
95
|
+
instance_id=instance_id,
|
|
96
|
+
db_path=db_path,
|
|
97
|
+
)
|
|
98
|
+
else:
|
|
99
|
+
config_content = OPENAI_CONFIG_TEMPLATE.format(
|
|
100
|
+
instance_id=instance_id,
|
|
101
|
+
db_path=db_path,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Write config
|
|
105
|
+
if CONFIG_FILE.exists() and not args.force:
|
|
106
|
+
print(f"⚠️ Config already exists: {CONFIG_FILE}")
|
|
107
|
+
print(" Use --force to overwrite.")
|
|
108
|
+
return 1
|
|
109
|
+
|
|
110
|
+
CONFIG_FILE.write_text(config_content)
|
|
111
|
+
print(f"✅ Config written: {CONFIG_FILE}")
|
|
112
|
+
|
|
113
|
+
if args.local:
|
|
114
|
+
print()
|
|
115
|
+
print("📦 Local mode — make sure Ollama is running:")
|
|
116
|
+
print(" curl -fsSL https://ollama.com/install.sh | sh")
|
|
117
|
+
print(" ollama pull nomic-embed-text")
|
|
118
|
+
print(" ollama serve # if not already running")
|
|
119
|
+
|
|
120
|
+
# Set up MCP integrations
|
|
121
|
+
if args.claude_code:
|
|
122
|
+
_setup_claude_code_mcp(args.local)
|
|
123
|
+
|
|
124
|
+
if args.codex:
|
|
125
|
+
_setup_codex_mcp(args.local)
|
|
126
|
+
|
|
127
|
+
print()
|
|
128
|
+
print("🚀 Start the server:")
|
|
129
|
+
print(" tribalmemory serve")
|
|
130
|
+
print()
|
|
131
|
+
print("🧠 Or use with Claude Code (MCP):")
|
|
132
|
+
print(" tribalmemory-mcp")
|
|
133
|
+
|
|
134
|
+
return 0
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _setup_claude_code_mcp(is_local: bool) -> None:
|
|
138
|
+
"""Add Tribal Memory to Claude Code's MCP configuration."""
|
|
139
|
+
# Claude Code MCP config paths by platform
|
|
140
|
+
claude_config_paths = [
|
|
141
|
+
Path.home() / ".claude" / "claude_desktop_config.json", # Claude Code CLI (all platforms)
|
|
142
|
+
Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json", # Claude Desktop (macOS)
|
|
143
|
+
Path.home() / "AppData" / "Roaming" / "Claude" / "claude_desktop_config.json", # Claude Desktop (Windows)
|
|
144
|
+
]
|
|
145
|
+
|
|
146
|
+
config_path = None
|
|
147
|
+
for p in claude_config_paths:
|
|
148
|
+
if p.exists():
|
|
149
|
+
config_path = p
|
|
150
|
+
break
|
|
151
|
+
|
|
152
|
+
if config_path is None:
|
|
153
|
+
# Create default location
|
|
154
|
+
config_path = claude_config_paths[0]
|
|
155
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
156
|
+
|
|
157
|
+
# Read existing config or start fresh
|
|
158
|
+
if config_path.exists():
|
|
159
|
+
try:
|
|
160
|
+
existing = json.loads(config_path.read_text())
|
|
161
|
+
except json.JSONDecodeError as e:
|
|
162
|
+
print(
|
|
163
|
+
f"⚠️ Existing config has invalid JSON: {e}"
|
|
164
|
+
)
|
|
165
|
+
print(f" Creating fresh config at {config_path}")
|
|
166
|
+
existing = {}
|
|
167
|
+
else:
|
|
168
|
+
existing = {}
|
|
169
|
+
|
|
170
|
+
# Merge MCP server config
|
|
171
|
+
if "mcpServers" not in existing:
|
|
172
|
+
existing["mcpServers"] = {}
|
|
173
|
+
|
|
174
|
+
mcp_entry = {
|
|
175
|
+
"command": "tribalmemory-mcp",
|
|
176
|
+
"env": {},
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if is_local:
|
|
180
|
+
mcp_entry["env"]["TRIBAL_MEMORY_EMBEDDING_API_BASE"] = "http://localhost:11434/v1"
|
|
181
|
+
|
|
182
|
+
existing["mcpServers"]["tribal-memory"] = mcp_entry
|
|
183
|
+
|
|
184
|
+
config_path.write_text(json.dumps(existing, indent=2) + "\n")
|
|
185
|
+
print(f"✅ Claude Code MCP config updated: {config_path}")
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _setup_codex_mcp(is_local: bool) -> None:
|
|
189
|
+
"""Add Tribal Memory to Codex CLI's MCP configuration (~/.codex/config.toml)."""
|
|
190
|
+
codex_config_path = Path.home() / ".codex" / "config.toml"
|
|
191
|
+
codex_config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
192
|
+
|
|
193
|
+
# Build the TOML section manually (avoid tomli_w dependency)
|
|
194
|
+
# Codex uses [mcp_servers.name] sections in config.toml
|
|
195
|
+
section_marker = "[mcp_servers.tribal-memory]"
|
|
196
|
+
|
|
197
|
+
mcp_lines = [
|
|
198
|
+
"",
|
|
199
|
+
"# Tribal Memory — shared memory for AI agents",
|
|
200
|
+
section_marker,
|
|
201
|
+
'command = "tribalmemory-mcp"',
|
|
202
|
+
]
|
|
203
|
+
|
|
204
|
+
if is_local:
|
|
205
|
+
mcp_lines.append("")
|
|
206
|
+
mcp_lines.append("[mcp_servers.tribal-memory.env]")
|
|
207
|
+
mcp_lines.append('TRIBAL_MEMORY_EMBEDDING_API_BASE = "http://localhost:11434/v1"')
|
|
208
|
+
|
|
209
|
+
mcp_block = "\n".join(mcp_lines) + "\n"
|
|
210
|
+
|
|
211
|
+
if codex_config_path.exists():
|
|
212
|
+
existing = codex_config_path.read_text()
|
|
213
|
+
if section_marker in existing:
|
|
214
|
+
print(f"⚠️ Codex config already has tribal-memory: {codex_config_path}")
|
|
215
|
+
print(" Remove the existing section first, or edit manually.")
|
|
216
|
+
return
|
|
217
|
+
# Append to existing config
|
|
218
|
+
if not existing.endswith("\n"):
|
|
219
|
+
existing += "\n"
|
|
220
|
+
codex_config_path.write_text(existing + mcp_block)
|
|
221
|
+
else:
|
|
222
|
+
codex_config_path.write_text(mcp_block.lstrip("\n"))
|
|
223
|
+
|
|
224
|
+
print(f"✅ Codex CLI MCP config updated: {codex_config_path}")
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def cmd_serve(args: argparse.Namespace) -> None:
|
|
228
|
+
"""Start the HTTP server."""
|
|
229
|
+
from .server.app import run_server
|
|
230
|
+
from .server.config import TribalMemoryConfig
|
|
231
|
+
|
|
232
|
+
config_path = args.config
|
|
233
|
+
if config_path:
|
|
234
|
+
config = TribalMemoryConfig.from_file(config_path)
|
|
235
|
+
else:
|
|
236
|
+
config = TribalMemoryConfig.from_env()
|
|
237
|
+
|
|
238
|
+
run_server(
|
|
239
|
+
config=config,
|
|
240
|
+
host=args.host,
|
|
241
|
+
port=args.port,
|
|
242
|
+
log_level=args.log_level or "info",
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def cmd_mcp(args: argparse.Namespace) -> None:
|
|
247
|
+
"""Start the MCP server (stdio)."""
|
|
248
|
+
from .mcp.server import main as mcp_main
|
|
249
|
+
mcp_main()
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def main() -> None:
|
|
253
|
+
"""Main CLI entry point."""
|
|
254
|
+
parser = argparse.ArgumentParser(
|
|
255
|
+
prog="tribalmemory",
|
|
256
|
+
description="Tribal Memory — Shared memory for AI agents",
|
|
257
|
+
)
|
|
258
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
259
|
+
|
|
260
|
+
# init
|
|
261
|
+
init_parser = subparsers.add_parser("init", help="Initialize config and MCP integration")
|
|
262
|
+
init_parser.add_argument("--local", action="store_true",
|
|
263
|
+
help="Use local Ollama embeddings (no API key needed)")
|
|
264
|
+
init_parser.add_argument("--claude-code", action="store_true",
|
|
265
|
+
help="Configure Claude Code MCP integration")
|
|
266
|
+
init_parser.add_argument("--codex", action="store_true",
|
|
267
|
+
help="Configure Codex CLI MCP integration")
|
|
268
|
+
init_parser.add_argument("--instance-id", type=str, default=None,
|
|
269
|
+
help="Instance identifier (default: 'default')")
|
|
270
|
+
init_parser.add_argument("--force", action="store_true",
|
|
271
|
+
help="Overwrite existing config")
|
|
272
|
+
|
|
273
|
+
# serve
|
|
274
|
+
serve_parser = subparsers.add_parser("serve", help="Start the HTTP server")
|
|
275
|
+
serve_parser.add_argument("--host", type=str, default=None)
|
|
276
|
+
serve_parser.add_argument("--port", "-p", type=int, default=None)
|
|
277
|
+
serve_parser.add_argument("--config", "-c", type=str, default=None)
|
|
278
|
+
serve_parser.add_argument("--log-level", type=str, default=None,
|
|
279
|
+
choices=["debug", "info", "warning", "error"])
|
|
280
|
+
|
|
281
|
+
# mcp
|
|
282
|
+
subparsers.add_parser("mcp", help="Start the MCP server (stdio transport)")
|
|
283
|
+
|
|
284
|
+
args = parser.parse_args()
|
|
285
|
+
|
|
286
|
+
if args.command == "init":
|
|
287
|
+
sys.exit(cmd_init(args))
|
|
288
|
+
elif args.command == "serve":
|
|
289
|
+
cmd_serve(args)
|
|
290
|
+
elif args.command == "mcp":
|
|
291
|
+
cmd_mcp(args)
|
|
292
|
+
else:
|
|
293
|
+
parser.print_help()
|
|
294
|
+
sys.exit(1)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
if __name__ == "__main__":
|
|
298
|
+
main()
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"""Core interfaces for Tribal Memory system.
|
|
2
|
+
|
|
3
|
+
These interfaces define the contract that both A2.1 and A2.2 implementations must satisfy.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from typing import Optional
|
|
11
|
+
import uuid
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class MemorySource(Enum):
|
|
15
|
+
"""Source of a memory entry."""
|
|
16
|
+
USER_EXPLICIT = "user_explicit" # User said "remember X"
|
|
17
|
+
AUTO_CAPTURE = "auto_capture" # System detected important info
|
|
18
|
+
CORRECTION = "correction" # Correction to existing memory
|
|
19
|
+
CROSS_INSTANCE = "cross_instance" # Propagated from another instance
|
|
20
|
+
LEGACY = "legacy" # Pre-tribal-memory import
|
|
21
|
+
UNKNOWN = "unknown"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class MemoryEntry:
|
|
26
|
+
"""A single memory entry with full provenance.
|
|
27
|
+
|
|
28
|
+
Attributes:
|
|
29
|
+
id: Unique identifier (UUID by default)
|
|
30
|
+
content: The actual memory content
|
|
31
|
+
embedding: Vector embedding (None until generated)
|
|
32
|
+
source_instance: Which agent instance created this memory
|
|
33
|
+
source_type: How this memory was captured (user_explicit, auto_capture, etc.)
|
|
34
|
+
created_at: When the memory was first created
|
|
35
|
+
updated_at: When the memory was last modified
|
|
36
|
+
tags: Categorization tags for filtering
|
|
37
|
+
context: What triggered this memory (conversation context, etc.)
|
|
38
|
+
confidence: How confident we are in this memory (0.0-1.0).
|
|
39
|
+
Currently always 1.0; reserved for future use with
|
|
40
|
+
uncertain inferences or low-confidence auto-captures.
|
|
41
|
+
supersedes: ID of memory this corrects (for correction chains)
|
|
42
|
+
related_to: IDs of related memories. Reserved for future use with
|
|
43
|
+
knowledge graphs and memory clustering.
|
|
44
|
+
"""
|
|
45
|
+
id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
|
46
|
+
content: str = ""
|
|
47
|
+
embedding: Optional[list[float]] = None
|
|
48
|
+
|
|
49
|
+
# Provenance
|
|
50
|
+
source_instance: str = "unknown"
|
|
51
|
+
source_type: MemorySource = MemorySource.UNKNOWN
|
|
52
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
53
|
+
updated_at: datetime = field(default_factory=datetime.utcnow)
|
|
54
|
+
|
|
55
|
+
# Metadata
|
|
56
|
+
tags: list[str] = field(default_factory=list)
|
|
57
|
+
context: Optional[str] = None
|
|
58
|
+
confidence: float = 1.0
|
|
59
|
+
|
|
60
|
+
# Relationships
|
|
61
|
+
supersedes: Optional[str] = None
|
|
62
|
+
related_to: list[str] = field(default_factory=list)
|
|
63
|
+
|
|
64
|
+
def __repr__(self) -> str:
|
|
65
|
+
"""Concise repr for debugging."""
|
|
66
|
+
content_preview = self.content[:50] + "..." if len(self.content) > 50 else self.content
|
|
67
|
+
return f"MemoryEntry(id={self.id[:8]}..., content='{content_preview}', source={self.source_type.value})"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@dataclass
|
|
71
|
+
class RecallResult:
|
|
72
|
+
"""Result of a memory recall query."""
|
|
73
|
+
memory: MemoryEntry
|
|
74
|
+
similarity_score: float
|
|
75
|
+
retrieval_time_ms: float
|
|
76
|
+
|
|
77
|
+
def __repr__(self) -> str:
|
|
78
|
+
return f"RecallResult(score={self.similarity_score:.3f}, memory_id={self.memory.id[:8]}...)"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@dataclass
|
|
82
|
+
class StoreResult:
|
|
83
|
+
"""Result of storing a memory."""
|
|
84
|
+
success: bool
|
|
85
|
+
memory_id: Optional[str] = None
|
|
86
|
+
duplicate_of: Optional[str] = None # If rejected as duplicate
|
|
87
|
+
error: Optional[str] = None
|
|
88
|
+
|
|
89
|
+
def __repr__(self) -> str:
|
|
90
|
+
if self.success:
|
|
91
|
+
return f"StoreResult(success=True, id={self.memory_id[:8] if self.memory_id else None}...)"
|
|
92
|
+
elif self.duplicate_of:
|
|
93
|
+
return f"StoreResult(success=False, duplicate_of={self.duplicate_of[:8]}...)"
|
|
94
|
+
else:
|
|
95
|
+
return f"StoreResult(success=False, error='{self.error}')"
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class IEmbeddingService(ABC):
|
|
99
|
+
"""Interface for embedding generation."""
|
|
100
|
+
|
|
101
|
+
@abstractmethod
|
|
102
|
+
async def embed(self, text: str) -> list[float]:
|
|
103
|
+
"""Generate embedding for text."""
|
|
104
|
+
pass
|
|
105
|
+
|
|
106
|
+
@abstractmethod
|
|
107
|
+
async def embed_batch(self, texts: list[str]) -> list[list[float]]:
|
|
108
|
+
"""Generate embeddings for multiple texts."""
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
@abstractmethod
|
|
112
|
+
def similarity(self, a: list[float], b: list[float]) -> float:
|
|
113
|
+
"""Calculate cosine similarity between two embeddings."""
|
|
114
|
+
pass
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class IVectorStore(ABC):
|
|
118
|
+
"""Interface for vector storage and retrieval."""
|
|
119
|
+
|
|
120
|
+
@abstractmethod
|
|
121
|
+
async def store(self, entry: MemoryEntry) -> StoreResult:
|
|
122
|
+
"""Store a memory entry."""
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
@abstractmethod
|
|
126
|
+
async def recall(
|
|
127
|
+
self,
|
|
128
|
+
query_embedding: list[float],
|
|
129
|
+
limit: int = 10,
|
|
130
|
+
min_similarity: float = 0.7,
|
|
131
|
+
filters: Optional[dict] = None,
|
|
132
|
+
) -> list[RecallResult]:
|
|
133
|
+
"""Recall memories similar to query.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
query_embedding: Vector to search for
|
|
137
|
+
limit: Maximum results
|
|
138
|
+
min_similarity: Minimum similarity threshold
|
|
139
|
+
filters: Optional metadata filters (e.g., tags, source_instance)
|
|
140
|
+
"""
|
|
141
|
+
pass
|
|
142
|
+
|
|
143
|
+
@abstractmethod
|
|
144
|
+
async def get(self, memory_id: str) -> Optional[MemoryEntry]:
|
|
145
|
+
"""Get a specific memory by ID."""
|
|
146
|
+
pass
|
|
147
|
+
|
|
148
|
+
@abstractmethod
|
|
149
|
+
async def delete(self, memory_id: str) -> bool:
|
|
150
|
+
"""Delete a memory (soft delete with tombstone)."""
|
|
151
|
+
pass
|
|
152
|
+
|
|
153
|
+
async def upsert(self, entry: MemoryEntry) -> StoreResult:
|
|
154
|
+
"""Insert or replace a memory entry by ID.
|
|
155
|
+
|
|
156
|
+
Default implementation: delete existing + store new.
|
|
157
|
+
Subclasses may override for atomic upsert support.
|
|
158
|
+
"""
|
|
159
|
+
await self.delete(entry.id)
|
|
160
|
+
return await self.store(entry)
|
|
161
|
+
|
|
162
|
+
@abstractmethod
|
|
163
|
+
async def list(
|
|
164
|
+
self,
|
|
165
|
+
limit: int = 1000,
|
|
166
|
+
offset: int = 0,
|
|
167
|
+
filters: Optional[dict] = None,
|
|
168
|
+
) -> list[MemoryEntry]:
|
|
169
|
+
"""List memories with pagination and optional filtering."""
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
@abstractmethod
|
|
173
|
+
async def count(self, filters: Optional[dict] = None) -> int:
|
|
174
|
+
"""Count memories matching filters."""
|
|
175
|
+
pass
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class IDeduplicationService(ABC):
|
|
179
|
+
"""Interface for detecting duplicate memories."""
|
|
180
|
+
|
|
181
|
+
@abstractmethod
|
|
182
|
+
async def is_duplicate(
|
|
183
|
+
self,
|
|
184
|
+
content: str,
|
|
185
|
+
embedding: list[float],
|
|
186
|
+
threshold: float = 0.95
|
|
187
|
+
) -> tuple[bool, Optional[str]]:
|
|
188
|
+
"""Check if content is duplicate.
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
Tuple of (is_duplicate, duplicate_of_id)
|
|
192
|
+
- is_duplicate: True if content exceeds threshold similarity
|
|
193
|
+
- duplicate_of_id: ID of the matching memory (or None)
|
|
194
|
+
|
|
195
|
+
.. versionchanged:: 0.2.0
|
|
196
|
+
Return type changed from 3-tuple to 2-tuple. The similarity_score
|
|
197
|
+
was removed from the return value. If you need the similarity score,
|
|
198
|
+
use :meth:`find_similar` to get scored results, or use
|
|
199
|
+
:meth:`get_duplicate_report` for detailed duplicate analysis.
|
|
200
|
+
|
|
201
|
+
Migration guide:
|
|
202
|
+
Old: ``is_dup, dup_id, score = await dedup.is_duplicate(...)``
|
|
203
|
+
New: ``is_dup, dup_id = await dedup.is_duplicate(...)``
|
|
204
|
+
|
|
205
|
+
To get score: ``results = await dedup.find_similar(content, embedding, threshold)``
|
|
206
|
+
The first result's score is the duplicate's similarity.
|
|
207
|
+
"""
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
@abstractmethod
|
|
211
|
+
async def find_similar(
|
|
212
|
+
self,
|
|
213
|
+
content: str,
|
|
214
|
+
embedding: list[float],
|
|
215
|
+
threshold: float = 0.85,
|
|
216
|
+
limit: int = 10,
|
|
217
|
+
) -> list[tuple[str, float]]:
|
|
218
|
+
"""Find similar memories.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
List of (memory_id, similarity_score) tuples
|
|
222
|
+
"""
|
|
223
|
+
pass
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class ITimestampService(ABC):
|
|
227
|
+
"""Interface for RFC 3161 timestamping."""
|
|
228
|
+
|
|
229
|
+
@abstractmethod
|
|
230
|
+
async def timestamp(self, data: bytes) -> bytes:
|
|
231
|
+
"""Get RFC 3161 timestamp token for data."""
|
|
232
|
+
pass
|
|
233
|
+
|
|
234
|
+
@abstractmethod
|
|
235
|
+
async def verify(self, data: bytes, token: bytes) -> tuple[bool, Optional[datetime]]:
|
|
236
|
+
"""Verify timestamp token. Returns (valid, timestamp)."""
|
|
237
|
+
pass
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
class IMemoryService(ABC):
|
|
241
|
+
"""High-level interface for memory operations.
|
|
242
|
+
|
|
243
|
+
This is the main interface that agents (LLMs) interact with.
|
|
244
|
+
Designed for intuitive use with simple, verb-based methods.
|
|
245
|
+
"""
|
|
246
|
+
|
|
247
|
+
@abstractmethod
|
|
248
|
+
async def remember(
|
|
249
|
+
self,
|
|
250
|
+
content: str,
|
|
251
|
+
source_type: MemorySource = MemorySource.AUTO_CAPTURE,
|
|
252
|
+
context: Optional[str] = None,
|
|
253
|
+
tags: Optional[list[str]] = None,
|
|
254
|
+
skip_dedup: bool = False,
|
|
255
|
+
) -> StoreResult:
|
|
256
|
+
"""Store a new memory.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
content: The memory content
|
|
260
|
+
source_type: How this memory was captured
|
|
261
|
+
context: Additional context about capture
|
|
262
|
+
tags: Tags for categorization and filtering
|
|
263
|
+
skip_dedup: If True, store even if similar memory exists
|
|
264
|
+
"""
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
@abstractmethod
|
|
268
|
+
async def recall(
|
|
269
|
+
self,
|
|
270
|
+
query: str,
|
|
271
|
+
limit: int = 5,
|
|
272
|
+
min_relevance: float = 0.7,
|
|
273
|
+
tags: Optional[list[str]] = None,
|
|
274
|
+
) -> list[RecallResult]:
|
|
275
|
+
"""Recall relevant memories for a query.
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
query: Natural language query
|
|
279
|
+
limit: Maximum results
|
|
280
|
+
min_relevance: Minimum similarity score
|
|
281
|
+
tags: Filter by tags (e.g., ["work", "preferences"])
|
|
282
|
+
"""
|
|
283
|
+
pass
|
|
284
|
+
|
|
285
|
+
@abstractmethod
|
|
286
|
+
async def correct(
|
|
287
|
+
self,
|
|
288
|
+
original_id: str,
|
|
289
|
+
corrected_content: str,
|
|
290
|
+
context: Optional[str] = None
|
|
291
|
+
) -> StoreResult:
|
|
292
|
+
"""Store a correction to an existing memory.
|
|
293
|
+
|
|
294
|
+
Creates a correction chain - the new memory supersedes the original.
|
|
295
|
+
"""
|
|
296
|
+
pass
|
|
297
|
+
|
|
298
|
+
@abstractmethod
|
|
299
|
+
async def forget(self, memory_id: str) -> bool:
|
|
300
|
+
"""Forget (soft delete) a memory."""
|
|
301
|
+
pass
|
|
302
|
+
|
|
303
|
+
@abstractmethod
|
|
304
|
+
async def get(self, memory_id: str) -> Optional[MemoryEntry]:
|
|
305
|
+
"""Get a memory by ID with full provenance."""
|
|
306
|
+
pass
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"""MCP server for Tribal Memory.
|
|
2
|
+
|
|
3
|
+
This module provides an MCP (Model Context Protocol) server that exposes
|
|
4
|
+
Tribal Memory as tools for Claude Code and other MCP-compatible clients.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .server import create_server, main
|
|
8
|
+
|
|
9
|
+
__all__ = ["create_server", "main"]
|