adaptive-memory-multi-model-router 1.2.2 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +146 -66
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/integrations/airtable.js +20 -0
- package/dist/integrations/discord.js +18 -0
- package/dist/integrations/github.js +23 -0
- package/dist/integrations/gmail.js +19 -0
- package/dist/integrations/google-calendar.js +18 -0
- package/dist/integrations/index.js +61 -0
- package/dist/integrations/jira.js +21 -0
- package/dist/integrations/linear.js +19 -0
- package/dist/integrations/notion.js +19 -0
- package/dist/integrations/slack.js +18 -0
- package/dist/integrations/telegram.js +19 -0
- package/dist/providers/registry.js +7 -3
- package/docs/ARCHITECTURAL-IMPROVEMENTS-2025.md +1391 -0
- package/docs/ARCHITECTURAL-IMPROVEMENTS-REVISED-2025.md +1051 -0
- package/docs/CONFIGURATION.md +476 -0
- package/docs/COUNCIL_DECISION.json +308 -0
- package/docs/COUNCIL_SUMMARY.md +265 -0
- package/docs/COUNCIL_V2.2_DECISION.md +416 -0
- package/docs/IMPROVEMENT_ROADMAP.md +515 -0
- package/docs/LLM_COUNCIL_DECISION.md +508 -0
- package/docs/QUICK_START_VISIBILITY.md +782 -0
- package/docs/REDDIT_GAP_ANALYSIS.md +299 -0
- package/docs/RESEARCH_BACKED_IMPROVEMENTS.md +1180 -0
- package/docs/TMLPD_QNA.md +751 -0
- package/docs/TMLPD_V2.1_COMPLETE.md +763 -0
- package/docs/TMLPD_V2.2_RESEARCH_ROADMAP.md +754 -0
- package/docs/V2.2_IMPLEMENTATION_COMPLETE.md +446 -0
- package/docs/V2_IMPLEMENTATION_GUIDE.md +388 -0
- package/docs/VISIBILITY_ADOPTION_PLAN.md +1005 -0
- package/docs/launch-content/LAUNCH_EXECUTION_CHECKLIST.md +421 -0
- package/docs/launch-content/README.md +457 -0
- package/docs/launch-content/assets/cost_comparison_100_tasks.png +0 -0
- package/docs/launch-content/assets/cumulative_savings.png +0 -0
- package/docs/launch-content/assets/parallel_speedup.png +0 -0
- package/docs/launch-content/assets/provider_pricing_comparison.png +0 -0
- package/docs/launch-content/assets/task_breakdown_comparison.png +0 -0
- package/docs/launch-content/generate_charts.py +313 -0
- package/docs/launch-content/hn_show_post.md +139 -0
- package/docs/launch-content/partner_outreach_templates.md +745 -0
- package/docs/launch-content/reddit_posts.md +467 -0
- package/docs/launch-content/twitter_thread.txt +460 -0
- package/examples/QUICKSTART.md +1 -1
- package/openclaw-alexa-bridge/ALL_REMAINING_FIXES_PLAN.md +313 -0
- package/openclaw-alexa-bridge/REMAINING_FIXES_SUMMARY.md +277 -0
- package/openclaw-alexa-bridge/src/alexa_handler_no_tmlpd.js +1234 -0
- package/openclaw-alexa-bridge/test_fixes.js +77 -0
- package/package.json +120 -29
- package/package.json.tmp +0 -0
- package/qna/TMLPD_QNA.md +3 -3
- package/skill/SKILL.md +2 -2
- package/src/__tests__/integration/tmpld_integration.test.py +540 -0
- package/src/agents/skill_enhanced_agent.py +318 -0
- package/src/memory/__init__.py +15 -0
- package/src/memory/agentic_memory.py +353 -0
- package/src/memory/semantic_memory.py +444 -0
- package/src/memory/simple_memory.py +466 -0
- package/src/memory/working_memory.py +447 -0
- package/src/orchestration/__init__.py +52 -0
- package/src/orchestration/execution_engine.py +353 -0
- package/src/orchestration/halo_orchestrator.py +367 -0
- package/src/orchestration/mcts_workflow.py +498 -0
- package/src/orchestration/role_assigner.py +473 -0
- package/src/orchestration/task_planner.py +522 -0
- package/src/providers/__init__.py +67 -0
- package/src/providers/anthropic.py +304 -0
- package/src/providers/base.py +241 -0
- package/src/providers/cerebras.py +373 -0
- package/src/providers/registry.py +476 -0
- package/src/routing/__init__.py +30 -0
- package/src/routing/universal_router.py +621 -0
- package/src/skills/TMLPD-QUICKREF.md +210 -0
- package/src/skills/TMLPD-SETUP-SUMMARY.md +157 -0
- package/src/skills/TMLPD.md +540 -0
- package/src/skills/__tests__/skill_manager.test.ts +328 -0
- package/src/skills/skill_manager.py +385 -0
- package/src/skills/test-tmlpd.sh +108 -0
- package/src/skills/tmlpd-category.yaml +67 -0
- package/src/skills/tmlpd-monitoring.yaml +188 -0
- package/src/skills/tmlpd-phase.yaml +132 -0
- package/src/state/__init__.py +17 -0
- package/src/state/simple_checkpoint.py +508 -0
- package/src/tmlpd_agent.py +464 -0
- package/src/tmpld_v2.py +427 -0
- package/src/workflows/__init__.py +18 -0
- package/src/workflows/advanced_difficulty_classifier.py +377 -0
- package/src/workflows/chaining_executor.py +417 -0
- package/src/workflows/difficulty_integration.py +209 -0
- package/src/workflows/orchestrator.py +469 -0
- package/src/workflows/orchestrator_executor.py +456 -0
- package/src/workflows/parallelization_executor.py +382 -0
- package/src/workflows/router.py +311 -0
- package/test_integration_simple.py +86 -0
- package/test_mcts_workflow.py +150 -0
- package/test_templd_integration.py +262 -0
- package/test_universal_router.py +275 -0
- package/tmlpd-pi-extension/README.md +36 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.d.ts +114 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.js +285 -0
- package/tmlpd-pi-extension/dist/cache/prefixCache.js.map +1 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.d.ts +58 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.js +153 -0
- package/tmlpd-pi-extension/dist/cache/responseCache.js.map +1 -0
- package/tmlpd-pi-extension/dist/cli.js +59 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.d.ts +95 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.js +240 -0
- package/tmlpd-pi-extension/dist/cost/costTracker.js.map +1 -0
- package/tmlpd-pi-extension/dist/index.d.ts +723 -0
- package/tmlpd-pi-extension/dist/index.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/index.js +239 -0
- package/tmlpd-pi-extension/dist/index.js.map +1 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.d.ts +82 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.js +145 -0
- package/tmlpd-pi-extension/dist/memory/episodicMemory.js.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.d.ts +102 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.js +207 -0
- package/tmlpd-pi-extension/dist/orchestration/haloOrchestrator.js.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.d.ts +85 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.js +210 -0
- package/tmlpd-pi-extension/dist/orchestration/mctsWorkflow.js.map +1 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.d.ts +102 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.js +338 -0
- package/tmlpd-pi-extension/dist/providers/localProvider.js.map +1 -0
- package/tmlpd-pi-extension/dist/providers/registry.d.ts +55 -0
- package/tmlpd-pi-extension/dist/providers/registry.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/providers/registry.js +138 -0
- package/tmlpd-pi-extension/dist/providers/registry.js.map +1 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.d.ts +68 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.js +332 -0
- package/tmlpd-pi-extension/dist/routing/advancedRouter.js.map +1 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.d.ts +101 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.js +368 -0
- package/tmlpd-pi-extension/dist/tools/tmlpdTools.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.d.ts +96 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.js +170 -0
- package/tmlpd-pi-extension/dist/utils/batchProcessor.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/compression.d.ts +61 -0
- package/tmlpd-pi-extension/dist/utils/compression.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/compression.js +281 -0
- package/tmlpd-pi-extension/dist/utils/compression.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/reliability.d.ts +74 -0
- package/tmlpd-pi-extension/dist/utils/reliability.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/reliability.js +177 -0
- package/tmlpd-pi-extension/dist/utils/reliability.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.d.ts +117 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.js +246 -0
- package/tmlpd-pi-extension/dist/utils/speculativeDecoding.js.map +1 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.d.ts +50 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.d.ts.map +1 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.js +124 -0
- package/tmlpd-pi-extension/dist/utils/tokenUtils.js.map +1 -0
- package/tmlpd-pi-extension/examples/QUICKSTART.md +183 -0
- package/tmlpd-pi-extension/package-lock.json +75 -0
- package/tmlpd-pi-extension/package.json +172 -0
- package/tmlpd-pi-extension/python/examples.py +53 -0
- package/tmlpd-pi-extension/python/integrations.py +330 -0
- package/tmlpd-pi-extension/python/setup.py +28 -0
- package/tmlpd-pi-extension/python/tmlpd.py +369 -0
- package/tmlpd-pi-extension/qna/REDDIT_GAP_ANALYSIS.md +299 -0
- package/tmlpd-pi-extension/qna/TMLPD_QNA.md +751 -0
- package/tmlpd-pi-extension/skill/SKILL.md +238 -0
- package/{src → tmlpd-pi-extension/src}/index.ts +1 -1
- package/tmlpd-pi-extension/tsconfig.json +18 -0
- package/demo/research-demo.js +0 -266
- package/notebooks/quickstart.ipynb +0 -157
- package/rust/tmlpd.h +0 -268
- package/src/cache/prefixCache.ts +0 -365
- package/src/routing/advancedRouter.ts +0 -406
- package/src/utils/speculativeDecoding.ts +0 -344
- /package/{src → tmlpd-pi-extension/src}/cache/responseCache.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/cost/costTracker.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/memory/episodicMemory.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/orchestration/haloOrchestrator.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/orchestration/mctsWorkflow.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/providers/localProvider.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/providers/registry.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/tools/tmlpdTools.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/batchProcessor.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/compression.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/reliability.ts +0 -0
- /package/{src → tmlpd-pi-extension/src}/utils/tokenUtils.ts +0 -0
|
@@ -0,0 +1,444 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Phase 3b: Semantic Memory Store
|
|
3
|
+
|
|
4
|
+
Vector-based semantic memory using optional ChromaDB integration.
|
|
5
|
+
Based on Memoria framework (arXiv:2512.12686).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
from typing import Dict, List, Any, Optional
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import uuid
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SemanticMemoryStore:
|
|
16
|
+
"""
|
|
17
|
+
Semantic memory: stores generalized knowledge and patterns.
|
|
18
|
+
|
|
19
|
+
Based on Memoria framework (arXiv:2512.12686)
|
|
20
|
+
|
|
21
|
+
Features:
|
|
22
|
+
- Vector-based similarity search (optional ChromaDB)
|
|
23
|
+
- Pattern extraction and storage
|
|
24
|
+
- Cross-task generalization
|
|
25
|
+
- Fallback to keyword-based search
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
base_dir: str = ".taskmaster/memory/semantic",
|
|
31
|
+
use_chromadb: bool = False,
|
|
32
|
+
collection_name: str = "tmlpd_semantic"
|
|
33
|
+
):
|
|
34
|
+
"""
|
|
35
|
+
Initialize semantic memory store.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
base_dir: Directory to store semantic memories
|
|
39
|
+
use_chromadb: Whether to use ChromaDB for vector search
|
|
40
|
+
collection_name: ChromaDB collection name
|
|
41
|
+
"""
|
|
42
|
+
self.base_dir = Path(base_dir)
|
|
43
|
+
self.base_dir.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
self.use_chromadb = use_chromadb
|
|
46
|
+
self.collection_name = collection_name
|
|
47
|
+
self.chroma_client = None
|
|
48
|
+
self.collection = None
|
|
49
|
+
|
|
50
|
+
# Initialize ChromaDB if requested
|
|
51
|
+
if use_chromadb:
|
|
52
|
+
self._init_chromadb()
|
|
53
|
+
|
|
54
|
+
# Pattern index
|
|
55
|
+
self.patterns_file = self.base_dir / "patterns.json"
|
|
56
|
+
self.patterns = self._load_patterns()
|
|
57
|
+
|
|
58
|
+
def _init_chromadb(self):
|
|
59
|
+
"""Initialize ChromaDB client and collection"""
|
|
60
|
+
try:
|
|
61
|
+
import chromadb
|
|
62
|
+
|
|
63
|
+
# Initialize client
|
|
64
|
+
self.chroma_client = chromadb.Client()
|
|
65
|
+
|
|
66
|
+
# Get or create collection
|
|
67
|
+
try:
|
|
68
|
+
self.collection = self.chroma_client.get_collection(
|
|
69
|
+
name=self.collection_name
|
|
70
|
+
)
|
|
71
|
+
print(f"✓ Loaded existing ChromaDB collection: {self.collection_name}")
|
|
72
|
+
except:
|
|
73
|
+
self.collection = self.chroma_client.create_collection(
|
|
74
|
+
name=self.collection_name,
|
|
75
|
+
metadata={"hnsw:space": "cosine"}
|
|
76
|
+
)
|
|
77
|
+
print(f"✓ Created new ChromaDB collection: {self.collection_name}")
|
|
78
|
+
|
|
79
|
+
except ImportError:
|
|
80
|
+
print("⚠ ChromaDB not installed. Falling back to keyword-based search.")
|
|
81
|
+
print(" Install with: pip install chromadb")
|
|
82
|
+
self.use_chromadb = False
|
|
83
|
+
|
|
84
|
+
def _load_patterns(self) -> Dict[str, Any]:
|
|
85
|
+
"""Load pattern index"""
|
|
86
|
+
if self.patterns_file.exists():
|
|
87
|
+
with open(self.patterns_file, 'r') as f:
|
|
88
|
+
return json.load(f)
|
|
89
|
+
return {"patterns": [], "categories": {}}
|
|
90
|
+
|
|
91
|
+
def _save_patterns(self):
|
|
92
|
+
"""Save pattern index"""
|
|
93
|
+
with open(self.patterns_file, 'w') as f:
|
|
94
|
+
json.dump(self.patterns, f, indent=2)
|
|
95
|
+
|
|
96
|
+
def store_pattern(
|
|
97
|
+
self,
|
|
98
|
+
pattern: str,
|
|
99
|
+
category: str,
|
|
100
|
+
source_task: str,
|
|
101
|
+
success_rate: float = 1.0,
|
|
102
|
+
embeddings: Optional[List[float]] = None,
|
|
103
|
+
metadata: Optional[Dict] = None
|
|
104
|
+
) -> str:
|
|
105
|
+
"""
|
|
106
|
+
Store a generalized pattern.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
pattern: Pattern description (e.g., "REST API with JWT auth")
|
|
110
|
+
category: Pattern category (e.g., "api", "auth", "database")
|
|
111
|
+
source_task: Original task that generated this pattern
|
|
112
|
+
success_rate: Historical success rate (0-1)
|
|
113
|
+
embeddings: Vector embeddings (if using ChromaDB)
|
|
114
|
+
metadata: Additional metadata
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Pattern ID
|
|
118
|
+
"""
|
|
119
|
+
pattern_id = f"pattern_{uuid.uuid4().hex[:12]}"
|
|
120
|
+
|
|
121
|
+
pattern_data = {
|
|
122
|
+
"id": pattern_id,
|
|
123
|
+
"pattern": pattern,
|
|
124
|
+
"category": category,
|
|
125
|
+
"source_task": source_task,
|
|
126
|
+
"success_rate": success_rate,
|
|
127
|
+
"usage_count": 0,
|
|
128
|
+
"created_at": datetime.now().isoformat(),
|
|
129
|
+
"last_used": None,
|
|
130
|
+
"metadata": metadata or {}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
# Store in ChromaDB if available
|
|
134
|
+
if self.use_chromadb and self.collection and embeddings:
|
|
135
|
+
self.collection.add(
|
|
136
|
+
ids=[pattern_id],
|
|
137
|
+
embeddings=[embeddings],
|
|
138
|
+
documents=[pattern],
|
|
139
|
+
metadatas=[{
|
|
140
|
+
"category": category,
|
|
141
|
+
"success_rate": str(success_rate),
|
|
142
|
+
"created_at": pattern_data["created_at"]
|
|
143
|
+
}]
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
# Update pattern index
|
|
147
|
+
self.patterns["patterns"].append(pattern_data)
|
|
148
|
+
|
|
149
|
+
# Update category index
|
|
150
|
+
if category not in self.patterns["categories"]:
|
|
151
|
+
self.patterns["categories"][category] = []
|
|
152
|
+
self.patterns["categories"][category].append(pattern_id)
|
|
153
|
+
|
|
154
|
+
self._save_patterns()
|
|
155
|
+
|
|
156
|
+
return pattern_id
|
|
157
|
+
|
|
158
|
+
def recall_patterns(
|
|
159
|
+
self,
|
|
160
|
+
query: str,
|
|
161
|
+
category: Optional[str] = None,
|
|
162
|
+
top_k: int = 5,
|
|
163
|
+
min_success_rate: float = 0.5,
|
|
164
|
+
query_embeddings: Optional[List[float]] = None
|
|
165
|
+
) -> List[Dict[str, Any]]:
|
|
166
|
+
"""
|
|
167
|
+
Recall relevant patterns.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
query: Query description
|
|
171
|
+
category: Filter by category
|
|
172
|
+
top_k: Maximum patterns to return
|
|
173
|
+
min_success_rate: Minimum success rate
|
|
174
|
+
query_embeddings: Query embeddings (if using ChromaDB)
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
List of relevant patterns with similarity scores
|
|
178
|
+
"""
|
|
179
|
+
# Use ChromaDB if available
|
|
180
|
+
if self.use_chromadb and self.collection and query_embeddings:
|
|
181
|
+
return self._recall_with_chromadb(
|
|
182
|
+
query_embeddings,
|
|
183
|
+
category,
|
|
184
|
+
top_k,
|
|
185
|
+
min_success_rate
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Fallback to keyword-based search
|
|
189
|
+
return self._recall_keywords(query, category, top_k, min_success_rate)
|
|
190
|
+
|
|
191
|
+
def _recall_with_chromadb(
|
|
192
|
+
self,
|
|
193
|
+
query_embeddings: List[float],
|
|
194
|
+
category: Optional[str],
|
|
195
|
+
top_k: int,
|
|
196
|
+
min_success_rate: float
|
|
197
|
+
) -> List[Dict[str, Any]]:
|
|
198
|
+
"""Recall patterns using ChromaDB vector search"""
|
|
199
|
+
# Build filter
|
|
200
|
+
where_clause = {}
|
|
201
|
+
if category:
|
|
202
|
+
where_clause["category"] = category
|
|
203
|
+
|
|
204
|
+
# Query ChromaDB
|
|
205
|
+
results = self.collection.query(
|
|
206
|
+
query_embeddings=[query_embeddings],
|
|
207
|
+
n_results=top_k * 2, # Get more, then filter
|
|
208
|
+
where=where_clause if where_clause else None
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
# Process results
|
|
212
|
+
patterns_with_scores = []
|
|
213
|
+
for i, pattern_id in enumerate(results["ids"][0]):
|
|
214
|
+
# Load full pattern data
|
|
215
|
+
pattern = self._get_pattern_by_id(pattern_id)
|
|
216
|
+
|
|
217
|
+
if not pattern:
|
|
218
|
+
continue
|
|
219
|
+
|
|
220
|
+
# Check success rate
|
|
221
|
+
if pattern["success_rate"] < min_success_rate:
|
|
222
|
+
continue
|
|
223
|
+
|
|
224
|
+
patterns_with_scores.append({
|
|
225
|
+
"pattern": pattern,
|
|
226
|
+
"similarity": 1 - results["distances"][0][i], # Convert distance to similarity
|
|
227
|
+
"matched_via": "vector"
|
|
228
|
+
})
|
|
229
|
+
|
|
230
|
+
# Update usage counts
|
|
231
|
+
for item in patterns_with_scores[:top_k]:
|
|
232
|
+
pattern = item["pattern"]
|
|
233
|
+
pattern["usage_count"] += 1
|
|
234
|
+
pattern["last_used"] = datetime.now().isoformat()
|
|
235
|
+
|
|
236
|
+
self._save_patterns()
|
|
237
|
+
|
|
238
|
+
# Sort by similarity
|
|
239
|
+
patterns_with_scores.sort(key=lambda x: x["similarity"], reverse=True)
|
|
240
|
+
|
|
241
|
+
return patterns_with_scores[:top_k]
|
|
242
|
+
|
|
243
|
+
def _recall_keywords(
|
|
244
|
+
self,
|
|
245
|
+
query: str,
|
|
246
|
+
category: Optional[str],
|
|
247
|
+
top_k: int,
|
|
248
|
+
min_success_rate: float
|
|
249
|
+
) -> List[Dict[str, Any]]:
|
|
250
|
+
"""Recall patterns using keyword matching"""
|
|
251
|
+
query_words = set(query.lower().split())
|
|
252
|
+
|
|
253
|
+
scored_patterns = []
|
|
254
|
+
|
|
255
|
+
for pattern_id in self.patterns["patterns"]:
|
|
256
|
+
# Filter by category
|
|
257
|
+
if category and pattern["category"] != category:
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
# Filter by success rate
|
|
261
|
+
if pattern["success_rate"] < min_success_rate:
|
|
262
|
+
continue
|
|
263
|
+
|
|
264
|
+
# Calculate keyword overlap
|
|
265
|
+
pattern_words = set(pattern["pattern"].lower().split())
|
|
266
|
+
overlap = len(query_words & pattern_words)
|
|
267
|
+
|
|
268
|
+
if overlap > 0:
|
|
269
|
+
scored_patterns.append({
|
|
270
|
+
"pattern": pattern,
|
|
271
|
+
"similarity": overlap / len(query_words),
|
|
272
|
+
"matched_via": "keywords"
|
|
273
|
+
})
|
|
274
|
+
|
|
275
|
+
# Sort by similarity
|
|
276
|
+
scored_patterns.sort(key=lambda x: x["similarity"], reverse=True)
|
|
277
|
+
|
|
278
|
+
# Update usage counts
|
|
279
|
+
for item in scored_patterns[:top_k]:
|
|
280
|
+
pattern = item["pattern"]
|
|
281
|
+
pattern["usage_count"] += 1
|
|
282
|
+
pattern["last_used"] = datetime.now().isoformat()
|
|
283
|
+
|
|
284
|
+
self._save_patterns()
|
|
285
|
+
|
|
286
|
+
return scored_patterns[:top_k]
|
|
287
|
+
|
|
288
|
+
def _get_pattern_by_id(self, pattern_id: str) -> Optional[Dict[str, Any]]:
|
|
289
|
+
"""Get pattern by ID"""
|
|
290
|
+
for pattern in self.patterns["patterns"]:
|
|
291
|
+
if pattern["id"] == pattern_id:
|
|
292
|
+
return pattern
|
|
293
|
+
return None
|
|
294
|
+
|
|
295
|
+
def extract_pattern_from_episode(
|
|
296
|
+
self,
|
|
297
|
+
episode: Dict[str, Any],
|
|
298
|
+
category: str,
|
|
299
|
+
pattern_template: str
|
|
300
|
+
) -> str:
|
|
301
|
+
"""
|
|
302
|
+
Extract generalized pattern from specific episode.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
episode: Episode dict from episodic memory
|
|
306
|
+
category: Pattern category
|
|
307
|
+
pattern_template: Template for pattern (e.g., "Create {X} with {Y}")
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
Pattern ID
|
|
311
|
+
"""
|
|
312
|
+
task_desc = episode["task"]["description"]
|
|
313
|
+
|
|
314
|
+
# Simple pattern extraction (can be enhanced with LLM)
|
|
315
|
+
# Remove specific details to create generalized pattern
|
|
316
|
+
generalized = self._generalize_task(task_desc)
|
|
317
|
+
|
|
318
|
+
return self.store_pattern(
|
|
319
|
+
pattern=generalized,
|
|
320
|
+
category=category,
|
|
321
|
+
source_task=task_desc,
|
|
322
|
+
success_rate=episode["result"]["success"],
|
|
323
|
+
metadata={"episode_id": episode["id"]}
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
def _generalize_task(self, task_description: str) -> str:
|
|
327
|
+
"""
|
|
328
|
+
Generalize task description by removing specific details.
|
|
329
|
+
|
|
330
|
+
Example: "Create React component for user profile with name and email"
|
|
331
|
+
-> "Create React component for displaying user information"
|
|
332
|
+
"""
|
|
333
|
+
# Simple heuristic: remove quoted text and proper nouns
|
|
334
|
+
# In production, use LLM for better generalization
|
|
335
|
+
|
|
336
|
+
generalized = task_description
|
|
337
|
+
|
|
338
|
+
# Remove quoted strings (specific values)
|
|
339
|
+
generalized = re.sub(r'"[^"]*"', "'X'", generalized)
|
|
340
|
+
generalized = re.sub(r"'[^']*'", "'X'", generalized)
|
|
341
|
+
|
|
342
|
+
# Capitalize first letter
|
|
343
|
+
generalized = generalized[0].upper() + generalized[1:]
|
|
344
|
+
|
|
345
|
+
return generalized
|
|
346
|
+
|
|
347
|
+
def get_patterns_by_category(self, category: str) -> List[Dict[str, Any]]:
|
|
348
|
+
"""Get all patterns in a category"""
|
|
349
|
+
pattern_ids = self.patterns["categories"].get(category, [])
|
|
350
|
+
|
|
351
|
+
patterns = []
|
|
352
|
+
for pattern_id in pattern_ids:
|
|
353
|
+
pattern = self._get_pattern_by_id(pattern_id)
|
|
354
|
+
if pattern:
|
|
355
|
+
patterns.append(pattern)
|
|
356
|
+
|
|
357
|
+
return sorted(patterns, key=lambda p: p["success_rate"], reverse=True)
|
|
358
|
+
|
|
359
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
360
|
+
"""Get semantic memory statistics"""
|
|
361
|
+
total_patterns = len(self.patterns["patterns"])
|
|
362
|
+
|
|
363
|
+
# Calculate category distribution
|
|
364
|
+
category_counts = {
|
|
365
|
+
cat: len(ids)
|
|
366
|
+
for cat, ids in self.patterns["categories"].items()
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
# Calculate average success rate
|
|
370
|
+
if total_patterns > 0:
|
|
371
|
+
avg_success = sum(
|
|
372
|
+
p["success_rate"] for p in self.patterns["patterns"]
|
|
373
|
+
) / total_patterns
|
|
374
|
+
else:
|
|
375
|
+
avg_success = 0.0
|
|
376
|
+
|
|
377
|
+
return {
|
|
378
|
+
"total_patterns": total_patterns,
|
|
379
|
+
"categories": category_counts,
|
|
380
|
+
"avg_success_rate": avg_success,
|
|
381
|
+
"using_chromadb": self.use_chromadb,
|
|
382
|
+
"chromadb_collection": self.collection_name if self.use_chromadb else None
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
def export_patterns(
|
|
386
|
+
self,
|
|
387
|
+
output_path: str,
|
|
388
|
+
category: Optional[str] = None
|
|
389
|
+
):
|
|
390
|
+
"""
|
|
391
|
+
Export patterns to JSON file.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
output_path: Path to output file
|
|
395
|
+
category: Filter by category
|
|
396
|
+
"""
|
|
397
|
+
patterns = []
|
|
398
|
+
|
|
399
|
+
for pattern in self.patterns["patterns"]:
|
|
400
|
+
if category and pattern["category"] != category:
|
|
401
|
+
continue
|
|
402
|
+
|
|
403
|
+
patterns.append(pattern)
|
|
404
|
+
|
|
405
|
+
# Write to output
|
|
406
|
+
output_path = Path(output_path)
|
|
407
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
408
|
+
|
|
409
|
+
with open(output_path, 'w') as f:
|
|
410
|
+
json.dump(patterns, f, indent=2)
|
|
411
|
+
|
|
412
|
+
print(f"Exported {len(patterns)} patterns to {output_path}")
|
|
413
|
+
|
|
414
|
+
def cleanup_low_success_patterns(self, min_success_rate: float = 0.3):
|
|
415
|
+
"""
|
|
416
|
+
Remove patterns with low success rates.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
min_success_rate: Minimum success rate threshold
|
|
420
|
+
"""
|
|
421
|
+
original_count = len(self.patterns["patterns"])
|
|
422
|
+
|
|
423
|
+
# Filter patterns
|
|
424
|
+
self.patterns["patterns"] = [
|
|
425
|
+
p for p in self.patterns["patterns"]
|
|
426
|
+
if p["success_rate"] >= min_success_rate
|
|
427
|
+
]
|
|
428
|
+
|
|
429
|
+
# Rebuild category index
|
|
430
|
+
self.patterns["categories"] = {}
|
|
431
|
+
for pattern in self.patterns["patterns"]:
|
|
432
|
+
category = pattern["category"]
|
|
433
|
+
if category not in self.patterns["categories"]:
|
|
434
|
+
self.patterns["categories"][category] = []
|
|
435
|
+
self.patterns["categories"][category].append(pattern["id"])
|
|
436
|
+
|
|
437
|
+
self._save_patterns()
|
|
438
|
+
|
|
439
|
+
removed_count = original_count - len(self.patterns["patterns"])
|
|
440
|
+
print(f"Removed {removed_count} low-success patterns")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
# Import regex at module level
|
|
444
|
+
import re
|