tribalmemory 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tribalmemory/__init__.py +3 -0
- tribalmemory/a21/__init__.py +38 -0
- tribalmemory/a21/config/__init__.py +20 -0
- tribalmemory/a21/config/providers.py +104 -0
- tribalmemory/a21/config/system.py +184 -0
- tribalmemory/a21/container/__init__.py +8 -0
- tribalmemory/a21/container/container.py +212 -0
- tribalmemory/a21/providers/__init__.py +32 -0
- tribalmemory/a21/providers/base.py +241 -0
- tribalmemory/a21/providers/deduplication.py +99 -0
- tribalmemory/a21/providers/lancedb.py +232 -0
- tribalmemory/a21/providers/memory.py +128 -0
- tribalmemory/a21/providers/mock.py +54 -0
- tribalmemory/a21/providers/openai.py +151 -0
- tribalmemory/a21/providers/timestamp.py +88 -0
- tribalmemory/a21/system.py +293 -0
- tribalmemory/cli.py +298 -0
- tribalmemory/interfaces.py +306 -0
- tribalmemory/mcp/__init__.py +9 -0
- tribalmemory/mcp/__main__.py +6 -0
- tribalmemory/mcp/server.py +484 -0
- tribalmemory/performance/__init__.py +1 -0
- tribalmemory/performance/benchmarks.py +285 -0
- tribalmemory/performance/corpus_generator.py +171 -0
- tribalmemory/portability/__init__.py +1 -0
- tribalmemory/portability/embedding_metadata.py +320 -0
- tribalmemory/server/__init__.py +9 -0
- tribalmemory/server/__main__.py +6 -0
- tribalmemory/server/app.py +187 -0
- tribalmemory/server/config.py +115 -0
- tribalmemory/server/models.py +206 -0
- tribalmemory/server/routes.py +378 -0
- tribalmemory/services/__init__.py +15 -0
- tribalmemory/services/deduplication.py +115 -0
- tribalmemory/services/embeddings.py +273 -0
- tribalmemory/services/import_export.py +506 -0
- tribalmemory/services/memory.py +275 -0
- tribalmemory/services/vector_store.py +360 -0
- tribalmemory/testing/__init__.py +22 -0
- tribalmemory/testing/embedding_utils.py +110 -0
- tribalmemory/testing/fixtures.py +123 -0
- tribalmemory/testing/metrics.py +256 -0
- tribalmemory/testing/mocks.py +560 -0
- tribalmemory/testing/semantic_expansions.py +91 -0
- tribalmemory/utils.py +23 -0
- tribalmemory-0.1.0.dist-info/METADATA +275 -0
- tribalmemory-0.1.0.dist-info/RECORD +51 -0
- tribalmemory-0.1.0.dist-info/WHEEL +5 -0
- tribalmemory-0.1.0.dist-info/entry_points.txt +3 -0
- tribalmemory-0.1.0.dist-info/licenses/LICENSE +190 -0
- tribalmemory-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,484 @@
|
|
|
1
|
+
"""MCP server for Tribal Memory.
|
|
2
|
+
|
|
3
|
+
Exposes Tribal Memory as MCP tools for Claude Code and other MCP clients.
|
|
4
|
+
Uses stdio transport for integration with Claude Code.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
from mcp.server.fastmcp import FastMCP
|
|
15
|
+
|
|
16
|
+
from ..interfaces import MemorySource
|
|
17
|
+
from ..server.config import TribalMemoryConfig
|
|
18
|
+
from ..services import create_memory_service, TribalMemoryService
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
# Global service instance (initialized on first use)
|
|
23
|
+
_memory_service: Optional[TribalMemoryService] = None
|
|
24
|
+
_service_lock = asyncio.Lock()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async def get_memory_service() -> TribalMemoryService:
|
|
28
|
+
"""Get or create the memory service singleton (thread-safe)."""
|
|
29
|
+
global _memory_service
|
|
30
|
+
|
|
31
|
+
# Fast path: already initialized
|
|
32
|
+
if _memory_service is not None:
|
|
33
|
+
return _memory_service
|
|
34
|
+
|
|
35
|
+
# Slow path: initialize with lock to prevent race conditions
|
|
36
|
+
async with _service_lock:
|
|
37
|
+
# Double-check after acquiring lock
|
|
38
|
+
if _memory_service is not None:
|
|
39
|
+
return _memory_service
|
|
40
|
+
|
|
41
|
+
config = TribalMemoryConfig.from_env()
|
|
42
|
+
|
|
43
|
+
# Override instance_id for MCP context
|
|
44
|
+
instance_id = os.environ.get("TRIBAL_MEMORY_INSTANCE_ID", "mcp-claude-code")
|
|
45
|
+
|
|
46
|
+
# Ensure db directory exists
|
|
47
|
+
db_path = Path(config.db.path)
|
|
48
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
49
|
+
|
|
50
|
+
_memory_service = create_memory_service(
|
|
51
|
+
instance_id=instance_id,
|
|
52
|
+
db_path=config.db.path,
|
|
53
|
+
openai_api_key=config.embedding.api_key,
|
|
54
|
+
api_base=config.embedding.api_base,
|
|
55
|
+
embedding_model=config.embedding.model,
|
|
56
|
+
embedding_dimensions=config.embedding.dimensions,
|
|
57
|
+
)
|
|
58
|
+
logger.info(f"Memory service initialized (instance: {instance_id}, db: {config.db.path})")
|
|
59
|
+
|
|
60
|
+
return _memory_service
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def create_server() -> FastMCP:
|
|
64
|
+
"""Create and configure the MCP server with all tools."""
|
|
65
|
+
mcp = FastMCP("tribal-memory")
|
|
66
|
+
|
|
67
|
+
@mcp.tool()
|
|
68
|
+
async def tribal_remember(
|
|
69
|
+
content: str,
|
|
70
|
+
source_type: str = "auto_capture",
|
|
71
|
+
context: Optional[str] = None,
|
|
72
|
+
tags: Optional[list[str]] = None,
|
|
73
|
+
skip_dedup: bool = False,
|
|
74
|
+
) -> str:
|
|
75
|
+
"""Store a new memory with semantic deduplication.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
content: Memory content to store (required)
|
|
79
|
+
source_type: How this memory was captured - one of:
|
|
80
|
+
- "user_explicit": User explicitly asked to remember
|
|
81
|
+
- "auto_capture": Automatically detected important info
|
|
82
|
+
- "cross_instance": From another agent instance
|
|
83
|
+
context: Additional context about when/why this was captured
|
|
84
|
+
tags: Categorization tags for filtering (e.g., ["preferences", "work"])
|
|
85
|
+
skip_dedup: If True, store even if a similar memory exists
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
JSON with: success, memory_id, duplicate_of (if rejected), error
|
|
89
|
+
"""
|
|
90
|
+
# Input validation
|
|
91
|
+
if not content or not content.strip():
|
|
92
|
+
return json.dumps({
|
|
93
|
+
"success": False,
|
|
94
|
+
"memory_id": None,
|
|
95
|
+
"duplicate_of": None,
|
|
96
|
+
"error": "Content cannot be empty",
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
service = await get_memory_service()
|
|
100
|
+
|
|
101
|
+
# Map string to MemorySource enum
|
|
102
|
+
source_map = {
|
|
103
|
+
"user_explicit": MemorySource.USER_EXPLICIT,
|
|
104
|
+
"auto_capture": MemorySource.AUTO_CAPTURE,
|
|
105
|
+
"cross_instance": MemorySource.CROSS_INSTANCE,
|
|
106
|
+
}
|
|
107
|
+
source = source_map.get(source_type, MemorySource.AUTO_CAPTURE)
|
|
108
|
+
|
|
109
|
+
result = await service.remember(
|
|
110
|
+
content=content,
|
|
111
|
+
source_type=source,
|
|
112
|
+
context=context,
|
|
113
|
+
tags=tags,
|
|
114
|
+
skip_dedup=skip_dedup,
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
return json.dumps({
|
|
118
|
+
"success": result.success,
|
|
119
|
+
"memory_id": result.memory_id,
|
|
120
|
+
"duplicate_of": result.duplicate_of,
|
|
121
|
+
"error": result.error,
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
@mcp.tool()
|
|
125
|
+
async def tribal_recall(
|
|
126
|
+
query: str,
|
|
127
|
+
limit: int = 5,
|
|
128
|
+
min_relevance: float = 0.3,
|
|
129
|
+
tags: Optional[list[str]] = None,
|
|
130
|
+
) -> str:
|
|
131
|
+
"""Search memories by semantic similarity.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
query: Natural language search query (required)
|
|
135
|
+
limit: Maximum number of results (1-50, default 5)
|
|
136
|
+
min_relevance: Minimum similarity score (0.0-1.0, default 0.3)
|
|
137
|
+
tags: Filter results to only memories with these tags
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
JSON with: results (list of memories with similarity scores), query, count
|
|
141
|
+
"""
|
|
142
|
+
# Input validation
|
|
143
|
+
if not query or not query.strip():
|
|
144
|
+
return json.dumps({
|
|
145
|
+
"results": [],
|
|
146
|
+
"query": query,
|
|
147
|
+
"count": 0,
|
|
148
|
+
"error": "Query cannot be empty",
|
|
149
|
+
})
|
|
150
|
+
|
|
151
|
+
service = await get_memory_service()
|
|
152
|
+
|
|
153
|
+
# Clamp limit to valid range
|
|
154
|
+
limit = max(1, min(50, limit))
|
|
155
|
+
min_relevance = max(0.0, min(1.0, min_relevance))
|
|
156
|
+
|
|
157
|
+
results = await service.recall(
|
|
158
|
+
query=query,
|
|
159
|
+
limit=limit,
|
|
160
|
+
min_relevance=min_relevance,
|
|
161
|
+
tags=tags,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
return json.dumps({
|
|
165
|
+
"results": [
|
|
166
|
+
{
|
|
167
|
+
"memory_id": r.memory.id,
|
|
168
|
+
"content": r.memory.content,
|
|
169
|
+
"similarity_score": round(r.similarity_score, 4),
|
|
170
|
+
"source_type": r.memory.source_type.value,
|
|
171
|
+
"source_instance": r.memory.source_instance,
|
|
172
|
+
"tags": r.memory.tags,
|
|
173
|
+
"created_at": r.memory.created_at.isoformat(),
|
|
174
|
+
"context": r.memory.context,
|
|
175
|
+
}
|
|
176
|
+
for r in results
|
|
177
|
+
],
|
|
178
|
+
"query": query,
|
|
179
|
+
"count": len(results),
|
|
180
|
+
})
|
|
181
|
+
|
|
182
|
+
@mcp.tool()
|
|
183
|
+
async def tribal_correct(
|
|
184
|
+
original_id: str,
|
|
185
|
+
corrected_content: str,
|
|
186
|
+
context: Optional[str] = None,
|
|
187
|
+
) -> str:
|
|
188
|
+
"""Update/correct an existing memory.
|
|
189
|
+
|
|
190
|
+
Creates a new memory that supersedes the original. The original is
|
|
191
|
+
preserved for audit trail but the new memory will be preferred in searches.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
original_id: ID of the memory to correct (required)
|
|
195
|
+
corrected_content: The corrected information (required)
|
|
196
|
+
context: Why this correction was made
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
JSON with: success, memory_id (of new correction), error
|
|
200
|
+
"""
|
|
201
|
+
# Input validation
|
|
202
|
+
if not original_id or not original_id.strip():
|
|
203
|
+
return json.dumps({
|
|
204
|
+
"success": False,
|
|
205
|
+
"memory_id": None,
|
|
206
|
+
"error": "Original ID cannot be empty",
|
|
207
|
+
})
|
|
208
|
+
if not corrected_content or not corrected_content.strip():
|
|
209
|
+
return json.dumps({
|
|
210
|
+
"success": False,
|
|
211
|
+
"memory_id": None,
|
|
212
|
+
"error": "Corrected content cannot be empty",
|
|
213
|
+
})
|
|
214
|
+
|
|
215
|
+
service = await get_memory_service()
|
|
216
|
+
|
|
217
|
+
result = await service.correct(
|
|
218
|
+
original_id=original_id,
|
|
219
|
+
corrected_content=corrected_content,
|
|
220
|
+
context=context,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
return json.dumps({
|
|
224
|
+
"success": result.success,
|
|
225
|
+
"memory_id": result.memory_id,
|
|
226
|
+
"error": result.error,
|
|
227
|
+
})
|
|
228
|
+
|
|
229
|
+
@mcp.tool()
|
|
230
|
+
async def tribal_forget(memory_id: str) -> str:
|
|
231
|
+
"""Delete a memory (GDPR-compliant soft delete).
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
memory_id: ID of the memory to delete (required)
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
JSON with: success, memory_id
|
|
238
|
+
"""
|
|
239
|
+
# Input validation
|
|
240
|
+
if not memory_id or not memory_id.strip():
|
|
241
|
+
return json.dumps({
|
|
242
|
+
"success": False,
|
|
243
|
+
"memory_id": memory_id,
|
|
244
|
+
"error": "Memory ID cannot be empty",
|
|
245
|
+
})
|
|
246
|
+
|
|
247
|
+
service = await get_memory_service()
|
|
248
|
+
|
|
249
|
+
success = await service.forget(memory_id)
|
|
250
|
+
|
|
251
|
+
return json.dumps({
|
|
252
|
+
"success": success,
|
|
253
|
+
"memory_id": memory_id,
|
|
254
|
+
})
|
|
255
|
+
|
|
256
|
+
@mcp.tool()
|
|
257
|
+
async def tribal_stats() -> str:
|
|
258
|
+
"""Get memory statistics.
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
JSON with: total_memories, by_source_type, by_tag, by_instance, corrections
|
|
262
|
+
"""
|
|
263
|
+
service = await get_memory_service()
|
|
264
|
+
|
|
265
|
+
stats = await service.get_stats()
|
|
266
|
+
|
|
267
|
+
return json.dumps(stats)
|
|
268
|
+
|
|
269
|
+
@mcp.tool()
|
|
270
|
+
async def tribal_export(
|
|
271
|
+
tags: Optional[list[str]] = None,
|
|
272
|
+
date_from: Optional[str] = None,
|
|
273
|
+
date_to: Optional[str] = None,
|
|
274
|
+
output_path: Optional[str] = None,
|
|
275
|
+
) -> str:
|
|
276
|
+
"""Export memories to a portable JSON bundle.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
tags: Only export memories with any of these tags.
|
|
280
|
+
date_from: ISO 8601 lower bound (created_at).
|
|
281
|
+
date_to: ISO 8601 upper bound (created_at).
|
|
282
|
+
output_path: Write bundle to file (else inline).
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
JSON with: success, memory_count, manifest, entries.
|
|
286
|
+
"""
|
|
287
|
+
from ..portability.embedding_metadata import (
|
|
288
|
+
create_embedding_metadata,
|
|
289
|
+
)
|
|
290
|
+
from ..services.import_export import (
|
|
291
|
+
ExportFilter,
|
|
292
|
+
export_memories as do_export,
|
|
293
|
+
parse_iso_datetime,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
# Validate dates
|
|
297
|
+
parsed_from, err = parse_iso_datetime(
|
|
298
|
+
date_from, "date_from",
|
|
299
|
+
)
|
|
300
|
+
if err:
|
|
301
|
+
return json.dumps({"success": False, "error": err})
|
|
302
|
+
parsed_to, err = parse_iso_datetime(
|
|
303
|
+
date_to, "date_to",
|
|
304
|
+
)
|
|
305
|
+
if err:
|
|
306
|
+
return json.dumps({"success": False, "error": err})
|
|
307
|
+
|
|
308
|
+
service = await get_memory_service()
|
|
309
|
+
emb = service.embedding_service
|
|
310
|
+
meta = create_embedding_metadata(
|
|
311
|
+
model_name=getattr(emb, "model", "unknown"),
|
|
312
|
+
dimensions=getattr(emb, "dimensions", 1536),
|
|
313
|
+
provider="openai",
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
flt = None
|
|
317
|
+
if tags or parsed_from or parsed_to:
|
|
318
|
+
flt = ExportFilter(
|
|
319
|
+
tags=tags,
|
|
320
|
+
date_from=parsed_from,
|
|
321
|
+
date_to=parsed_to,
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
try:
|
|
325
|
+
bundle = await do_export(
|
|
326
|
+
store=service.vector_store,
|
|
327
|
+
embedding_metadata=meta,
|
|
328
|
+
filters=flt,
|
|
329
|
+
)
|
|
330
|
+
except Exception as e:
|
|
331
|
+
return json.dumps({
|
|
332
|
+
"success": False, "error": str(e),
|
|
333
|
+
})
|
|
334
|
+
|
|
335
|
+
bundle_dict = bundle.to_dict()
|
|
336
|
+
|
|
337
|
+
if output_path:
|
|
338
|
+
try:
|
|
339
|
+
with open(output_path, "w") as f:
|
|
340
|
+
json.dump(bundle_dict, f, default=str)
|
|
341
|
+
return json.dumps({
|
|
342
|
+
"success": True,
|
|
343
|
+
"memory_count": bundle.manifest.memory_count,
|
|
344
|
+
"output_path": output_path,
|
|
345
|
+
})
|
|
346
|
+
except Exception as e:
|
|
347
|
+
return json.dumps({
|
|
348
|
+
"success": False,
|
|
349
|
+
"error": f"Write failed: {e}",
|
|
350
|
+
})
|
|
351
|
+
|
|
352
|
+
return json.dumps({
|
|
353
|
+
"success": True,
|
|
354
|
+
"memory_count": bundle.manifest.memory_count,
|
|
355
|
+
"manifest": bundle_dict["manifest"],
|
|
356
|
+
"entries": bundle_dict["entries"],
|
|
357
|
+
}, default=str)
|
|
358
|
+
|
|
359
|
+
@mcp.tool()
|
|
360
|
+
async def tribal_import(
|
|
361
|
+
input_path: Optional[str] = None,
|
|
362
|
+
bundle_json: Optional[str] = None,
|
|
363
|
+
conflict_resolution: str = "skip",
|
|
364
|
+
embedding_strategy: str = "auto",
|
|
365
|
+
dry_run: bool = False,
|
|
366
|
+
) -> str:
|
|
367
|
+
"""Import memories from a portable JSON bundle.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
input_path: Path to a bundle JSON file.
|
|
371
|
+
bundle_json: Inline JSON string of the bundle.
|
|
372
|
+
conflict_resolution: skip | overwrite | merge.
|
|
373
|
+
embedding_strategy: auto | keep | drop.
|
|
374
|
+
dry_run: Preview what would change without writing.
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
JSON with import summary.
|
|
378
|
+
"""
|
|
379
|
+
from ..portability.embedding_metadata import (
|
|
380
|
+
PortableBundle,
|
|
381
|
+
ReembeddingStrategy,
|
|
382
|
+
create_embedding_metadata,
|
|
383
|
+
)
|
|
384
|
+
from ..services.import_export import (
|
|
385
|
+
ConflictResolution,
|
|
386
|
+
import_memories as do_import,
|
|
387
|
+
validate_conflict_resolution,
|
|
388
|
+
validate_embedding_strategy,
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
if not input_path and not bundle_json:
|
|
392
|
+
return json.dumps({
|
|
393
|
+
"success": False,
|
|
394
|
+
"error": "Provide input_path or bundle_json",
|
|
395
|
+
})
|
|
396
|
+
|
|
397
|
+
# Validate enum params
|
|
398
|
+
err = validate_conflict_resolution(conflict_resolution)
|
|
399
|
+
if err:
|
|
400
|
+
return json.dumps({"success": False, "error": err})
|
|
401
|
+
err = validate_embedding_strategy(embedding_strategy)
|
|
402
|
+
if err:
|
|
403
|
+
return json.dumps({"success": False, "error": err})
|
|
404
|
+
|
|
405
|
+
# Parse bundle
|
|
406
|
+
try:
|
|
407
|
+
if input_path:
|
|
408
|
+
with open(input_path) as f:
|
|
409
|
+
raw = json.load(f)
|
|
410
|
+
else:
|
|
411
|
+
raw = json.loads(bundle_json)
|
|
412
|
+
bundle = PortableBundle.from_dict(raw)
|
|
413
|
+
except Exception as e:
|
|
414
|
+
return json.dumps({
|
|
415
|
+
"success": False,
|
|
416
|
+
"error": f"Failed to parse bundle: {e}",
|
|
417
|
+
})
|
|
418
|
+
|
|
419
|
+
service = await get_memory_service()
|
|
420
|
+
emb = service.embedding_service
|
|
421
|
+
target_meta = create_embedding_metadata(
|
|
422
|
+
model_name=getattr(emb, "model", "unknown"),
|
|
423
|
+
dimensions=getattr(emb, "dimensions", 1536),
|
|
424
|
+
provider="openai",
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
cr_map = {
|
|
428
|
+
"skip": ConflictResolution.SKIP,
|
|
429
|
+
"overwrite": ConflictResolution.OVERWRITE,
|
|
430
|
+
"merge": ConflictResolution.MERGE,
|
|
431
|
+
}
|
|
432
|
+
es_map = {
|
|
433
|
+
"auto": ReembeddingStrategy.AUTO,
|
|
434
|
+
"keep": ReembeddingStrategy.KEEP,
|
|
435
|
+
"drop": ReembeddingStrategy.DROP,
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
try:
|
|
439
|
+
summary = await do_import(
|
|
440
|
+
bundle=bundle,
|
|
441
|
+
store=service.vector_store,
|
|
442
|
+
target_metadata=target_meta,
|
|
443
|
+
conflict_resolution=cr_map[conflict_resolution],
|
|
444
|
+
embedding_strategy=es_map[embedding_strategy],
|
|
445
|
+
dry_run=dry_run,
|
|
446
|
+
)
|
|
447
|
+
except Exception as e:
|
|
448
|
+
return json.dumps({
|
|
449
|
+
"success": False, "error": str(e),
|
|
450
|
+
})
|
|
451
|
+
|
|
452
|
+
return json.dumps({
|
|
453
|
+
"success": True,
|
|
454
|
+
"dry_run": summary.dry_run,
|
|
455
|
+
"total": summary.total,
|
|
456
|
+
"imported": summary.imported,
|
|
457
|
+
"skipped": summary.skipped,
|
|
458
|
+
"overwritten": summary.overwritten,
|
|
459
|
+
"errors": summary.errors,
|
|
460
|
+
"needs_reembedding": summary.needs_reembedding,
|
|
461
|
+
"duration_ms": round(summary.duration_ms, 1),
|
|
462
|
+
"error_details": summary.error_details,
|
|
463
|
+
})
|
|
464
|
+
|
|
465
|
+
return mcp
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def main():
|
|
469
|
+
"""Entry point for the MCP server."""
|
|
470
|
+
import sys
|
|
471
|
+
|
|
472
|
+
# Configure logging to stderr (stdout is for MCP protocol)
|
|
473
|
+
logging.basicConfig(
|
|
474
|
+
level=logging.INFO,
|
|
475
|
+
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
476
|
+
stream=sys.stderr,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
mcp = create_server()
|
|
480
|
+
mcp.run()
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
if __name__ == "__main__":
|
|
484
|
+
main()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Performance testing utilities for Tribal Memory."""
|