@yamo/memory-mesh 2.3.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/memory_mesh.js +1 -1
- package/lib/llm/client.d.ts +111 -0
- package/lib/llm/client.js +299 -357
- package/lib/llm/client.ts +413 -0
- package/lib/llm/index.d.ts +17 -0
- package/lib/llm/index.js +15 -8
- package/lib/llm/index.ts +19 -0
- package/lib/memory/adapters/client.d.ts +183 -0
- package/lib/memory/adapters/client.js +518 -0
- package/lib/memory/adapters/client.ts +678 -0
- package/lib/memory/adapters/config.d.ts +137 -0
- package/lib/memory/adapters/config.js +189 -0
- package/lib/memory/adapters/config.ts +259 -0
- package/lib/memory/adapters/errors.d.ts +76 -0
- package/lib/memory/adapters/errors.js +128 -0
- package/lib/memory/adapters/errors.ts +166 -0
- package/lib/memory/context-manager.d.ts +44 -0
- package/lib/memory/context-manager.js +344 -0
- package/lib/memory/context-manager.ts +432 -0
- package/lib/memory/embeddings/factory.d.ts +59 -0
- package/lib/memory/embeddings/factory.js +148 -0
- package/lib/{embeddings/factory.js → memory/embeddings/factory.ts} +69 -28
- package/lib/memory/embeddings/index.d.ts +2 -0
- package/lib/memory/embeddings/index.js +2 -0
- package/lib/memory/embeddings/index.ts +2 -0
- package/lib/memory/embeddings/service.d.ts +164 -0
- package/lib/memory/embeddings/service.js +515 -0
- package/lib/{embeddings/service.js → memory/embeddings/service.ts} +223 -156
- package/lib/memory/index.d.ts +9 -0
- package/lib/memory/index.js +9 -1
- package/lib/memory/index.ts +20 -0
- package/lib/memory/memory-mesh.d.ts +274 -0
- package/lib/memory/memory-mesh.js +1469 -678
- package/lib/memory/memory-mesh.ts +1803 -0
- package/lib/memory/memory-translator.d.ts +19 -0
- package/lib/memory/memory-translator.js +125 -0
- package/lib/memory/memory-translator.ts +158 -0
- package/lib/memory/schema.d.ts +111 -0
- package/lib/memory/schema.js +183 -0
- package/lib/memory/schema.ts +267 -0
- package/lib/memory/scorer.d.ts +26 -0
- package/lib/memory/scorer.js +77 -0
- package/lib/memory/scorer.ts +95 -0
- package/lib/memory/search/index.d.ts +1 -0
- package/lib/memory/search/index.js +1 -0
- package/lib/memory/search/index.ts +1 -0
- package/lib/memory/search/keyword-search.d.ts +62 -0
- package/lib/memory/search/keyword-search.js +135 -0
- package/lib/{search/keyword-search.js → memory/search/keyword-search.ts} +66 -36
- package/lib/scrubber/config/defaults.d.ts +53 -0
- package/lib/scrubber/config/defaults.js +49 -57
- package/lib/scrubber/config/defaults.ts +117 -0
- package/lib/scrubber/index.d.ts +6 -0
- package/lib/scrubber/index.js +3 -23
- package/lib/scrubber/index.ts +7 -0
- package/lib/scrubber/scrubber.d.ts +61 -0
- package/lib/scrubber/scrubber.js +99 -121
- package/lib/scrubber/scrubber.ts +168 -0
- package/lib/scrubber/stages/chunker.d.ts +13 -0
- package/lib/scrubber/stages/metadata-annotator.d.ts +18 -0
- package/lib/scrubber/stages/normalizer.d.ts +13 -0
- package/lib/scrubber/stages/semantic-filter.d.ts +13 -0
- package/lib/scrubber/stages/structural-cleaner.d.ts +13 -0
- package/lib/scrubber/stages/validator.d.ts +18 -0
- package/lib/scrubber/telemetry.d.ts +36 -0
- package/lib/scrubber/telemetry.js +53 -58
- package/lib/scrubber/telemetry.ts +99 -0
- package/lib/utils/logger.d.ts +29 -0
- package/lib/utils/logger.js +64 -0
- package/lib/utils/logger.ts +85 -0
- package/lib/utils/skill-metadata.d.ts +32 -0
- package/lib/utils/skill-metadata.js +132 -0
- package/lib/utils/skill-metadata.ts +147 -0
- package/lib/yamo/emitter.d.ts +73 -0
- package/lib/yamo/emitter.js +78 -143
- package/lib/yamo/emitter.ts +249 -0
- package/lib/yamo/schema.d.ts +58 -0
- package/lib/yamo/schema.js +81 -108
- package/lib/yamo/schema.ts +165 -0
- package/package.json +11 -8
- package/index.d.ts +0 -111
- package/lib/embeddings/index.js +0 -2
- package/lib/index.js +0 -6
- package/lib/lancedb/client.js +0 -633
- package/lib/lancedb/config.js +0 -215
- package/lib/lancedb/errors.js +0 -144
- package/lib/lancedb/index.js +0 -4
- package/lib/lancedb/schema.js +0 -217
- package/lib/scrubber/errors/scrubber-error.js +0 -43
- package/lib/scrubber/stages/chunker.js +0 -103
- package/lib/scrubber/stages/metadata-annotator.js +0 -74
- package/lib/scrubber/stages/normalizer.js +0 -59
- package/lib/scrubber/stages/semantic-filter.js +0 -61
- package/lib/scrubber/stages/structural-cleaner.js +0 -82
- package/lib/scrubber/stages/validator.js +0 -66
- package/lib/scrubber/utils/hash.js +0 -39
- package/lib/scrubber/utils/html-parser.js +0 -45
- package/lib/scrubber/utils/pattern-matcher.js +0 -63
- package/lib/scrubber/utils/token-counter.js +0 -31
- package/lib/search/index.js +0 -1
- package/lib/utils/index.js +0 -1
- package/lib/yamo/index.js +0 -15
|
@@ -0,0 +1,1803 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Mesh - Vector Memory Storage with LanceDB
|
|
3
|
+
* Provides persistent semantic memory for YAMO OS using LanceDB backend
|
|
4
|
+
*
|
|
5
|
+
* CLI Interface:
|
|
6
|
+
* node tools/memory_mesh.js ingest '{"content": "...", "metadata": {...}}'
|
|
7
|
+
* node tools/memory_mesh.js search '{"query": "...", "limit": 10}'
|
|
8
|
+
* node tools/memory_mesh.js get '{"id": "..."}'
|
|
9
|
+
* node tools/memory_mesh.js delete '{"id": "..."}'
|
|
10
|
+
* node tools/memory_mesh.js stats '{}'
|
|
11
|
+
*
|
|
12
|
+
* Also supports STDIN input for YAMO skill compatibility:
|
|
13
|
+
* echo '{"action": "ingest", "content": "..."}' | node tools/memory_mesh.js
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { fileURLToPath } from "url";
|
|
17
|
+
import fs from "fs";
|
|
18
|
+
import path from "path";
|
|
19
|
+
import crypto from "crypto";
|
|
20
|
+
import { LanceDBClient } from "./adapters/client.js";
|
|
21
|
+
import { getConfig, Config } from "./adapters/config.js";
|
|
22
|
+
import {
|
|
23
|
+
getEmbeddingDimension,
|
|
24
|
+
createSynthesizedSkillSchema,
|
|
25
|
+
} from "./schema.js";
|
|
26
|
+
import { handleError } from "./adapters/errors.js";
|
|
27
|
+
import EmbeddingFactory from "./embeddings/factory.js";
|
|
28
|
+
import { Scrubber } from "../scrubber/scrubber.js";
|
|
29
|
+
import {
|
|
30
|
+
extractSkillIdentity,
|
|
31
|
+
extractSkillTags,
|
|
32
|
+
} from "../utils/skill-metadata.js";
|
|
33
|
+
import { KeywordSearch } from "./search/keyword-search.js";
|
|
34
|
+
import { YamoEmitter } from "../yamo/emitter.js";
|
|
35
|
+
import { LLMClient } from "../llm/client.js";
|
|
36
|
+
import * as lancedb from "@lancedb/lancedb";
|
|
37
|
+
import { createLogger } from "../utils/logger.js";
|
|
38
|
+
|
|
39
|
+
const logger = createLogger("brain");
|
|
40
|
+
|
|
41
|
+
export interface MemoryMeshOptions {
|
|
42
|
+
enableYamo?: boolean;
|
|
43
|
+
enableLLM?: boolean;
|
|
44
|
+
enableMemory?: boolean;
|
|
45
|
+
agentId?: string;
|
|
46
|
+
llmProvider?: string;
|
|
47
|
+
llmApiKey?: string;
|
|
48
|
+
llmModel?: string;
|
|
49
|
+
llmMaxTokens?: number;
|
|
50
|
+
skill_directories?: string | string[];
|
|
51
|
+
dbDir?: string;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export interface MemoryEntry {
|
|
55
|
+
id: string;
|
|
56
|
+
content: string;
|
|
57
|
+
vector: number[];
|
|
58
|
+
metadata: string;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export interface SearchResult extends MemoryEntry {
|
|
62
|
+
score: number;
|
|
63
|
+
[key: string]: any;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export interface CacheEntry {
|
|
67
|
+
result: SearchResult[];
|
|
68
|
+
timestamp: number;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* MemoryMesh class for managing vector memory storage
|
|
73
|
+
*/
|
|
74
|
+
export class MemoryMesh {
|
|
75
|
+
client: LanceDBClient | null;
|
|
76
|
+
config: Config | null;
|
|
77
|
+
embeddingFactory: EmbeddingFactory;
|
|
78
|
+
keywordSearch: KeywordSearch;
|
|
79
|
+
isInitialized: boolean;
|
|
80
|
+
vectorDimension: number;
|
|
81
|
+
|
|
82
|
+
enableYamo: boolean;
|
|
83
|
+
enableLLM: boolean;
|
|
84
|
+
enableMemory: boolean;
|
|
85
|
+
agentId: string;
|
|
86
|
+
yamoTable: lancedb.Table | null;
|
|
87
|
+
skillTable: lancedb.Table | null;
|
|
88
|
+
llmClient: LLMClient | null;
|
|
89
|
+
scrubber: Scrubber;
|
|
90
|
+
queryCache: Map<string, CacheEntry>;
|
|
91
|
+
cacheConfig: {
|
|
92
|
+
maxSize: number;
|
|
93
|
+
ttlMs: number;
|
|
94
|
+
};
|
|
95
|
+
skillDirectories: string[]; // Store skill directories for synthesis
|
|
96
|
+
dbDir?: string; // Store custom dbDir for in-memory databases
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Create a new MemoryMesh instance
|
|
100
|
+
* @param {Object} [options={}]
|
|
101
|
+
*/
|
|
102
|
+
constructor(options: MemoryMeshOptions = {}) {
|
|
103
|
+
this.client = null;
|
|
104
|
+
this.config = null;
|
|
105
|
+
this.embeddingFactory = new EmbeddingFactory();
|
|
106
|
+
this.keywordSearch = new KeywordSearch();
|
|
107
|
+
this.isInitialized = false;
|
|
108
|
+
this.vectorDimension = 384; // Will be set during init()
|
|
109
|
+
|
|
110
|
+
// YAMO and LLM support
|
|
111
|
+
this.enableYamo = options.enableYamo !== false;
|
|
112
|
+
this.enableLLM = options.enableLLM !== false;
|
|
113
|
+
this.enableMemory = options.enableMemory !== false;
|
|
114
|
+
this.agentId = options.agentId || "YAMO_AGENT";
|
|
115
|
+
this.yamoTable = null;
|
|
116
|
+
this.skillTable = null;
|
|
117
|
+
this.llmClient = this.enableLLM ? new LLMClient() : null;
|
|
118
|
+
|
|
119
|
+
// Store skill directories for synthesis
|
|
120
|
+
if (Array.isArray(options.skill_directories)) {
|
|
121
|
+
this.skillDirectories = options.skill_directories;
|
|
122
|
+
} else if (options.skill_directories) {
|
|
123
|
+
this.skillDirectories = [options.skill_directories];
|
|
124
|
+
} else {
|
|
125
|
+
this.skillDirectories = ["skills"];
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Initialize LLM client if enabled
|
|
129
|
+
if (this.enableLLM) {
|
|
130
|
+
this.llmClient = new LLMClient({
|
|
131
|
+
provider: options.llmProvider,
|
|
132
|
+
apiKey: options.llmApiKey,
|
|
133
|
+
model: options.llmModel,
|
|
134
|
+
maxTokens: options.llmMaxTokens,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Scrubber for Layer 0 sanitization
|
|
139
|
+
this.scrubber = new Scrubber({
|
|
140
|
+
enabled: true,
|
|
141
|
+
chunking: {
|
|
142
|
+
minTokens: 1, // Allow short memories
|
|
143
|
+
} as any, // Type cast for partial config
|
|
144
|
+
validation: {
|
|
145
|
+
enforceMinLength: false, // Disable strict length validation
|
|
146
|
+
} as any,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
// Simple LRU cache for search queries (5 minute TTL)
|
|
150
|
+
this.queryCache = new Map();
|
|
151
|
+
this.cacheConfig = {
|
|
152
|
+
maxSize: 500,
|
|
153
|
+
ttlMs: 5 * 60 * 1000, // 5 minutes
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
// Store custom dbDir for test isolation
|
|
157
|
+
this.dbDir = options.dbDir;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Generate a cache key from query and options
|
|
162
|
+
* @private
|
|
163
|
+
*/
|
|
164
|
+
_generateCacheKey(query: string, options: any = {}): string {
|
|
165
|
+
const normalizedOptions = {
|
|
166
|
+
limit: options.limit || 10,
|
|
167
|
+
filter: options.filter || null,
|
|
168
|
+
// Normalize options that affect results
|
|
169
|
+
};
|
|
170
|
+
return `search:${query}:${JSON.stringify(normalizedOptions)}`;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Get cached result if valid
|
|
175
|
+
* @private
|
|
176
|
+
*
|
|
177
|
+
* Race condition fix: The delete-then-set pattern for LRU tracking creates a window
|
|
178
|
+
* where another operation could observe the key as missing. We use a try-finally
|
|
179
|
+
* pattern to ensure atomicity at the application level.
|
|
180
|
+
*/
|
|
181
|
+
_getCachedResult(key: string): SearchResult[] | null {
|
|
182
|
+
const entry = this.queryCache.get(key);
|
|
183
|
+
if (!entry) {
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Check TTL - must be done before any mutation
|
|
188
|
+
const now = Date.now();
|
|
189
|
+
if (now - entry.timestamp > this.cacheConfig.ttlMs) {
|
|
190
|
+
this.queryCache.delete(key);
|
|
191
|
+
return null;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Move to end (most recently used) - delete and re-add with updated timestamp
|
|
195
|
+
// While not truly atomic, the key remains accessible during the operation
|
|
196
|
+
// since we already have the entry reference
|
|
197
|
+
this.queryCache.delete(key);
|
|
198
|
+
this.queryCache.set(key, {
|
|
199
|
+
...entry,
|
|
200
|
+
timestamp: now, // Update timestamp for LRU tracking
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
return entry.result;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
/**
|
|
207
|
+
* Cache a search result
|
|
208
|
+
* @private
|
|
209
|
+
*/
|
|
210
|
+
_cacheResult(key: string, result: SearchResult[]): void {
|
|
211
|
+
// Evict oldest if at max size
|
|
212
|
+
if (this.queryCache.size >= this.cacheConfig.maxSize) {
|
|
213
|
+
const firstKey = this.queryCache.keys().next().value;
|
|
214
|
+
if (firstKey !== undefined) {
|
|
215
|
+
this.queryCache.delete(firstKey);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
this.queryCache.set(key, {
|
|
220
|
+
result,
|
|
221
|
+
timestamp: Date.now(),
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Clear all cached results
|
|
227
|
+
*/
|
|
228
|
+
clearCache(): void {
|
|
229
|
+
this.queryCache.clear();
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Get cache statistics
|
|
234
|
+
*/
|
|
235
|
+
getCacheStats(): any {
|
|
236
|
+
return {
|
|
237
|
+
size: this.queryCache.size,
|
|
238
|
+
maxSize: this.cacheConfig.maxSize,
|
|
239
|
+
ttlMs: this.cacheConfig.ttlMs,
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
/**
|
|
244
|
+
* Validate and sanitize metadata to prevent prototype pollution
|
|
245
|
+
* @private
|
|
246
|
+
*/
|
|
247
|
+
_validateMetadata(metadata: any): Record<string, any> {
|
|
248
|
+
if (typeof metadata !== "object" || metadata === null) {
|
|
249
|
+
throw new Error("Metadata must be a non-null object");
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Sanitize keys to prevent prototype pollution
|
|
253
|
+
const sanitized: Record<string, any> = {};
|
|
254
|
+
for (const [key, value] of Object.entries(metadata)) {
|
|
255
|
+
// Skip dangerous keys that could pollute prototype
|
|
256
|
+
if (key === "__proto__" || key === "constructor" || key === "prototype") {
|
|
257
|
+
continue;
|
|
258
|
+
}
|
|
259
|
+
// Skip inherited properties
|
|
260
|
+
if (!Object.prototype.hasOwnProperty.call(metadata, key)) {
|
|
261
|
+
continue;
|
|
262
|
+
}
|
|
263
|
+
sanitized[key] = value;
|
|
264
|
+
}
|
|
265
|
+
return sanitized;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Sanitize and validate content before storage
|
|
270
|
+
* @private
|
|
271
|
+
*/
|
|
272
|
+
_sanitizeContent(content: string): string {
|
|
273
|
+
if (typeof content !== "string") {
|
|
274
|
+
throw new Error("Content must be a string");
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// Limit content length
|
|
278
|
+
const MAX_CONTENT_LENGTH = 100000; // 100KB limit
|
|
279
|
+
if (content.length > MAX_CONTENT_LENGTH) {
|
|
280
|
+
throw new Error(
|
|
281
|
+
`Content exceeds maximum length of ${MAX_CONTENT_LENGTH} characters`,
|
|
282
|
+
);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
return content.trim();
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
/**
|
|
289
|
+
* Initialize the LanceDB client
|
|
290
|
+
*/
|
|
291
|
+
async init(): Promise<void> {
|
|
292
|
+
if (this.isInitialized) {
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (!this.enableMemory) {
|
|
297
|
+
this.isInitialized = true;
|
|
298
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
299
|
+
logger.debug("MemoryMesh initialization skipped (enableMemory=false)");
|
|
300
|
+
}
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
try {
|
|
305
|
+
// Load configuration
|
|
306
|
+
this.config = getConfig();
|
|
307
|
+
|
|
308
|
+
// Detect vector dimension from embedding model configuration
|
|
309
|
+
const modelName =
|
|
310
|
+
process.env.EMBEDDING_MODEL_NAME || "Xenova/all-MiniLM-L6-v2";
|
|
311
|
+
const envDimension =
|
|
312
|
+
parseInt(process.env.EMBEDDING_DIMENSION || "0") || null;
|
|
313
|
+
this.vectorDimension = envDimension || getEmbeddingDimension(modelName);
|
|
314
|
+
|
|
315
|
+
// Only log in debug mode to avoid corrupting spinner/REPL display
|
|
316
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
317
|
+
logger.debug(
|
|
318
|
+
{ dimension: this.vectorDimension, model: modelName },
|
|
319
|
+
"Using vector dimension",
|
|
320
|
+
);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Use custom dbDir if provided (for test isolation), otherwise use config
|
|
324
|
+
const dbUri = this.dbDir || this.config.LANCEDB_URI;
|
|
325
|
+
|
|
326
|
+
// Create LanceDBClient with detected dimension
|
|
327
|
+
this.client = new LanceDBClient({
|
|
328
|
+
uri: dbUri,
|
|
329
|
+
tableName: this.config.LANCEDB_MEMORY_TABLE,
|
|
330
|
+
vectorDimension: this.vectorDimension,
|
|
331
|
+
maxRetries: 3,
|
|
332
|
+
retryDelay: 1000,
|
|
333
|
+
});
|
|
334
|
+
|
|
335
|
+
// Connect to database
|
|
336
|
+
await this.client.connect();
|
|
337
|
+
|
|
338
|
+
// Configure embedding factory from environment
|
|
339
|
+
const embeddingConfigs = this._parseEmbeddingConfig();
|
|
340
|
+
this.embeddingFactory.configure(embeddingConfigs);
|
|
341
|
+
await this.embeddingFactory.init();
|
|
342
|
+
|
|
343
|
+
// Hydrate Keyword Search (In-Memory)
|
|
344
|
+
if (this.client) {
|
|
345
|
+
try {
|
|
346
|
+
const allRecords = await this.client.getAll({ limit: 10000 });
|
|
347
|
+
this.keywordSearch.load(allRecords as any);
|
|
348
|
+
} catch (_e) {
|
|
349
|
+
// Ignore if table doesn't exist yet
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// Initialize extension tables if enabled
|
|
354
|
+
if (this.enableYamo && this.client && this.client.db) {
|
|
355
|
+
try {
|
|
356
|
+
const { createYamoTable } = await import("../yamo/schema.js");
|
|
357
|
+
this.yamoTable = await createYamoTable(this.client.db, "yamo_blocks");
|
|
358
|
+
|
|
359
|
+
// Initialize synthesized skills table (Recursive Skill Synthesis)
|
|
360
|
+
// const { createSynthesizedSkillSchema } = await import('./schema'); // Imported statically now
|
|
361
|
+
const existingTables = await this.client.db.tableNames();
|
|
362
|
+
|
|
363
|
+
if (existingTables.includes("synthesized_skills")) {
|
|
364
|
+
this.skillTable =
|
|
365
|
+
await this.client.db.openTable("synthesized_skills");
|
|
366
|
+
} else {
|
|
367
|
+
const skillSchema = createSynthesizedSkillSchema(
|
|
368
|
+
this.vectorDimension,
|
|
369
|
+
);
|
|
370
|
+
this.skillTable = await this.client.db.createTable(
|
|
371
|
+
"synthesized_skills",
|
|
372
|
+
[],
|
|
373
|
+
{
|
|
374
|
+
schema: skillSchema,
|
|
375
|
+
} as any,
|
|
376
|
+
);
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
380
|
+
logger.debug(
|
|
381
|
+
"YAMO blocks and synthesized skills tables initialized",
|
|
382
|
+
);
|
|
383
|
+
}
|
|
384
|
+
} catch (e) {
|
|
385
|
+
logger.warn({ err: e }, "Failed to initialize extension tables");
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
this.isInitialized = true;
|
|
390
|
+
} catch (error) {
|
|
391
|
+
const e = error instanceof Error ? error : new Error(String(error));
|
|
392
|
+
throw e;
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
/**
|
|
397
|
+
* Add content to memory with auto-generated embedding and scrubbing.
|
|
398
|
+
*
|
|
399
|
+
* This is the primary method for storing information in the memory mesh.
|
|
400
|
+
* The content goes through several processing steps:
|
|
401
|
+
*
|
|
402
|
+
* 1. **Scrubbing**: PII and sensitive data are sanitized (if enabled)
|
|
403
|
+
* 2. **Validation**: Content length and metadata are validated
|
|
404
|
+
* 3. **Embedding**: Content is converted to a vector representation
|
|
405
|
+
* 4. **Storage**: Record is stored in LanceDB with metadata
|
|
406
|
+
* 5. **Emission**: Optional YAMO block emitted for provenance tracking
|
|
407
|
+
*
|
|
408
|
+
* @param content - The text content to store in memory
|
|
409
|
+
* @param metadata - Optional metadata (type, source, tags, etc.)
|
|
410
|
+
* @returns Promise with memory record containing id, content, metadata, created_at
|
|
411
|
+
*
|
|
412
|
+
* @example
|
|
413
|
+
* ```typescript
|
|
414
|
+
* const memory = await mesh.add("User likes TypeScript", {
|
|
415
|
+
* type: "preference",
|
|
416
|
+
* source: "chat",
|
|
417
|
+
* tags: ["programming", "languages"]
|
|
418
|
+
* });
|
|
419
|
+
* ```
|
|
420
|
+
*
|
|
421
|
+
* @throws {Error} If content exceeds max length (100KB)
|
|
422
|
+
* @throws {Error} If embedding generation fails
|
|
423
|
+
* @throws {Error} If database client is not initialized
|
|
424
|
+
*/
|
|
425
|
+
async add(content: string, metadata: any = {}): Promise<any> {
|
|
426
|
+
await this.init();
|
|
427
|
+
|
|
428
|
+
const type = metadata.type || "event";
|
|
429
|
+
const enrichedMetadata = { ...metadata, type };
|
|
430
|
+
|
|
431
|
+
try {
|
|
432
|
+
let processedContent = content;
|
|
433
|
+
let scrubbedMetadata = {};
|
|
434
|
+
|
|
435
|
+
try {
|
|
436
|
+
const scrubbedResult = await this.scrubber.process({
|
|
437
|
+
content: content,
|
|
438
|
+
source: "memory-api",
|
|
439
|
+
type: "txt",
|
|
440
|
+
});
|
|
441
|
+
|
|
442
|
+
if (scrubbedResult.success && scrubbedResult.chunks.length > 0) {
|
|
443
|
+
processedContent = scrubbedResult.chunks
|
|
444
|
+
.map((c: any) => c.text)
|
|
445
|
+
.join("\n\n");
|
|
446
|
+
if (scrubbedResult.metadata) {
|
|
447
|
+
scrubbedMetadata = {
|
|
448
|
+
...scrubbedResult.metadata,
|
|
449
|
+
scrubber_telemetry: JSON.stringify(scrubbedResult.telemetry),
|
|
450
|
+
};
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
} catch (scrubError: any) {
|
|
454
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
455
|
+
logger.error({ err: scrubError }, "Scrubber failed");
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
const sanitizedContent = this._sanitizeContent(processedContent);
|
|
460
|
+
const sanitizedMetadata = this._validateMetadata({
|
|
461
|
+
...scrubbedMetadata,
|
|
462
|
+
...enrichedMetadata,
|
|
463
|
+
});
|
|
464
|
+
|
|
465
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
466
|
+
console.error(
|
|
467
|
+
"[DEBUG] brain.add() scrubbedMetadata.type:",
|
|
468
|
+
(scrubbedMetadata as any).type,
|
|
469
|
+
);
|
|
470
|
+
console.error(
|
|
471
|
+
"[DEBUG] brain.add() enrichedMetadata.type:",
|
|
472
|
+
enrichedMetadata.type,
|
|
473
|
+
);
|
|
474
|
+
console.error(
|
|
475
|
+
"[DEBUG] brain.add() sanitizedMetadata.type:",
|
|
476
|
+
sanitizedMetadata.type,
|
|
477
|
+
);
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
const vector = await this.embeddingFactory.embed(sanitizedContent);
|
|
481
|
+
|
|
482
|
+
// Dedup: search by the already-computed vector before inserting.
|
|
483
|
+
// Catches exact duplicates regardless of which write path is used,
|
|
484
|
+
// protecting callers that bypass captureInteraction()'s dedup guard.
|
|
485
|
+
if (this.client) {
|
|
486
|
+
const nearest = await this.client.search(vector, { limit: 1 });
|
|
487
|
+
if (nearest.length > 0 && nearest[0].content === sanitizedContent) {
|
|
488
|
+
return {
|
|
489
|
+
id: nearest[0].id,
|
|
490
|
+
content: sanitizedContent,
|
|
491
|
+
metadata: sanitizedMetadata,
|
|
492
|
+
created_at: new Date().toISOString(),
|
|
493
|
+
};
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
const id = `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
498
|
+
|
|
499
|
+
const record: MemoryEntry = {
|
|
500
|
+
id,
|
|
501
|
+
vector,
|
|
502
|
+
content: sanitizedContent,
|
|
503
|
+
metadata: JSON.stringify(sanitizedMetadata),
|
|
504
|
+
};
|
|
505
|
+
|
|
506
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
507
|
+
console.error(
|
|
508
|
+
"[DEBUG] record.metadata.type:",
|
|
509
|
+
JSON.parse(record.metadata).type,
|
|
510
|
+
);
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
if (!this.client) {
|
|
514
|
+
throw new Error("Database client not initialized");
|
|
515
|
+
}
|
|
516
|
+
const result = await this.client.add(record);
|
|
517
|
+
|
|
518
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
519
|
+
try {
|
|
520
|
+
console.error(
|
|
521
|
+
"[DEBUG] result.metadata.type:",
|
|
522
|
+
JSON.parse((result as any).metadata).type,
|
|
523
|
+
);
|
|
524
|
+
} catch {
|
|
525
|
+
console.error("[DEBUG] result.metadata:", (result as any).metadata);
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
this.keywordSearch.add(record.id, record.content, sanitizedMetadata);
|
|
530
|
+
|
|
531
|
+
if (this.enableYamo) {
|
|
532
|
+
this._emitYamoBlock(
|
|
533
|
+
"retain",
|
|
534
|
+
result.id,
|
|
535
|
+
YamoEmitter.buildRetainBlock({
|
|
536
|
+
content: sanitizedContent,
|
|
537
|
+
metadata: sanitizedMetadata,
|
|
538
|
+
id: result.id,
|
|
539
|
+
agentId: this.agentId,
|
|
540
|
+
memoryType: sanitizedMetadata.type || "event",
|
|
541
|
+
}),
|
|
542
|
+
).catch((error) => {
|
|
543
|
+
// Log emission failures in debug mode but don't throw
|
|
544
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
545
|
+
logger.warn({ err: error }, "Failed to emit YAMO block (retain)");
|
|
546
|
+
}
|
|
547
|
+
});
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
return {
|
|
551
|
+
id: result.id,
|
|
552
|
+
content: sanitizedContent,
|
|
553
|
+
metadata: sanitizedMetadata,
|
|
554
|
+
created_at: new Date().toISOString(),
|
|
555
|
+
};
|
|
556
|
+
} catch (error) {
|
|
557
|
+
throw error instanceof Error ? error : new Error(String(error));
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
/**
|
|
562
|
+
* Reflect on recent memories
|
|
563
|
+
*/
|
|
564
|
+
async reflect(options: any = {}): Promise<any> {
|
|
565
|
+
await this.init();
|
|
566
|
+
const lookback = options.lookback || 10;
|
|
567
|
+
const topic = options.topic;
|
|
568
|
+
const generate = options.generate !== false;
|
|
569
|
+
|
|
570
|
+
let memories: any[] = [];
|
|
571
|
+
if (topic) {
|
|
572
|
+
memories = await this.search(topic, { limit: lookback });
|
|
573
|
+
} else {
|
|
574
|
+
const all = await this.getAll();
|
|
575
|
+
memories = all
|
|
576
|
+
.sort(
|
|
577
|
+
(a: any, b: any) =>
|
|
578
|
+
new Date(b.created_at).getTime() - new Date(a.created_at).getTime(),
|
|
579
|
+
)
|
|
580
|
+
.slice(0, lookback);
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
const prompt = `Review these memories. Synthesize a high-level "belief" or "observation".`;
|
|
584
|
+
|
|
585
|
+
if (!generate || !this.enableLLM || !this.llmClient) {
|
|
586
|
+
return {
|
|
587
|
+
topic,
|
|
588
|
+
count: memories.length,
|
|
589
|
+
context: memories.map((m) => ({
|
|
590
|
+
content: m.content,
|
|
591
|
+
type: m.metadata?.type || "event",
|
|
592
|
+
id: m.id,
|
|
593
|
+
})),
|
|
594
|
+
prompt,
|
|
595
|
+
};
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
let reflection: string = "";
|
|
599
|
+
let confidence = 0;
|
|
600
|
+
|
|
601
|
+
try {
|
|
602
|
+
const result = await this.llmClient!.reflect(prompt, memories);
|
|
603
|
+
reflection = result.reflection;
|
|
604
|
+
confidence = result.confidence;
|
|
605
|
+
} catch (_error) {
|
|
606
|
+
reflection = `Aggregated from ${memories.length} memories on topic: ${topic || "general"}`;
|
|
607
|
+
confidence = 0.5;
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
const reflectionId = `reflect_${Date.now()}_${crypto.randomBytes(4).toString("hex")}`;
|
|
611
|
+
await this.add(reflection, {
|
|
612
|
+
type: "reflection",
|
|
613
|
+
topic: topic || "general",
|
|
614
|
+
source_memory_count: memories.length,
|
|
615
|
+
confidence,
|
|
616
|
+
generated_at: new Date().toISOString(),
|
|
617
|
+
});
|
|
618
|
+
|
|
619
|
+
let yamoBlock: string | null = null;
|
|
620
|
+
if (this.enableYamo) {
|
|
621
|
+
yamoBlock = YamoEmitter.buildReflectBlock({
|
|
622
|
+
topic: topic || "general",
|
|
623
|
+
memoryCount: memories.length,
|
|
624
|
+
agentId: this.agentId,
|
|
625
|
+
reflection,
|
|
626
|
+
confidence,
|
|
627
|
+
});
|
|
628
|
+
await this._emitYamoBlock("reflect", reflectionId, yamoBlock);
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
return {
|
|
632
|
+
id: reflectionId,
|
|
633
|
+
topic: topic || "general",
|
|
634
|
+
reflection,
|
|
635
|
+
confidence,
|
|
636
|
+
sourceMemoryCount: memories.length,
|
|
637
|
+
yamoBlock,
|
|
638
|
+
createdAt: new Date().toISOString(),
|
|
639
|
+
};
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
/**
|
|
643
|
+
* Ingest synthesized skill
|
|
644
|
+
* @param sourceFilePath - If provided, skip file write (file already exists)
|
|
645
|
+
*/
|
|
646
|
+
async ingestSkill(
|
|
647
|
+
yamoText: string,
|
|
648
|
+
metadata: any = {},
|
|
649
|
+
sourceFilePath?: string,
|
|
650
|
+
): Promise<any> {
|
|
651
|
+
await this.init();
|
|
652
|
+
if (!this.skillTable) {
|
|
653
|
+
throw new Error("Skill table not initialized");
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// DEBUG: Trace sourceFilePath parameter
|
|
657
|
+
if (process.env.YAMO_DEBUG_PATHS === "true") {
|
|
658
|
+
console.error(
|
|
659
|
+
`[BRAIN.ingestSkill] sourceFilePath parameter: ${sourceFilePath || "undefined"}`,
|
|
660
|
+
);
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
try {
|
|
664
|
+
const identity = extractSkillIdentity(yamoText);
|
|
665
|
+
const name = metadata.name || identity.name;
|
|
666
|
+
const intent = identity.intent;
|
|
667
|
+
const description = identity.description;
|
|
668
|
+
|
|
669
|
+
// RECURSION DETECTION: Check for recursive naming patterns
|
|
670
|
+
// Patterns like "SkillSkill", "SkillSkillSkill" indicate filename-derived names
|
|
671
|
+
const recursivePattern = /^(Skill|skill){2,}/;
|
|
672
|
+
if (recursivePattern.test(name)) {
|
|
673
|
+
logger.warn(
|
|
674
|
+
{ originalName: name },
|
|
675
|
+
"Detected recursive naming pattern, rejecting ingestion to prevent loop",
|
|
676
|
+
);
|
|
677
|
+
throw new Error(
|
|
678
|
+
`Recursive naming pattern detected: ${name}. Skills must have proper name: field.`,
|
|
679
|
+
);
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
// Extract tags for tag-aware embeddings (improves semantic search)
|
|
683
|
+
const tags = extractSkillTags(yamoText);
|
|
684
|
+
const tagText = tags.length > 0 ? `\nTags: ${tags.join(", ")}` : "";
|
|
685
|
+
|
|
686
|
+
const embeddingText = `Skill: ${name}\nIntent: ${intent}${tagText}\nDescription: ${description}`;
|
|
687
|
+
const vector = await this.embeddingFactory.embed(embeddingText);
|
|
688
|
+
|
|
689
|
+
const id = `skill_${Date.now()}_${crypto.randomBytes(2).toString("hex")}`;
|
|
690
|
+
const skillMetadata = {
|
|
691
|
+
reliability: 0.5,
|
|
692
|
+
use_count: 0,
|
|
693
|
+
source: "manual",
|
|
694
|
+
...metadata,
|
|
695
|
+
// Store source file path for policy loading and parent discovery
|
|
696
|
+
...(sourceFilePath && { source_file: sourceFilePath }),
|
|
697
|
+
};
|
|
698
|
+
const record = {
|
|
699
|
+
id,
|
|
700
|
+
name,
|
|
701
|
+
intent,
|
|
702
|
+
yamo_text: yamoText,
|
|
703
|
+
vector,
|
|
704
|
+
metadata: JSON.stringify(skillMetadata),
|
|
705
|
+
created_at: new Date(),
|
|
706
|
+
};
|
|
707
|
+
await this.skillTable.add([record]);
|
|
708
|
+
|
|
709
|
+
// NEW: Persist to filesystem for longevity and visibility
|
|
710
|
+
// Skip if sourceFilePath provided (file already exists from SkillCreator)
|
|
711
|
+
// Skip if using in-memory database (:memory:)
|
|
712
|
+
if (!sourceFilePath && this.dbDir !== ":memory:") {
|
|
713
|
+
try {
|
|
714
|
+
const skillsDir = path.resolve(
|
|
715
|
+
process.cwd(),
|
|
716
|
+
this.skillDirectories[0] || "skills",
|
|
717
|
+
);
|
|
718
|
+
if (!fs.existsSync(skillsDir)) {
|
|
719
|
+
fs.mkdirSync(skillsDir, { recursive: true });
|
|
720
|
+
}
|
|
721
|
+
// Robust filename with length limit to prevent ENAMETOOLONG
|
|
722
|
+
const safeName = name
|
|
723
|
+
.toLowerCase()
|
|
724
|
+
.replace(/[^a-z0-9]/g, "-")
|
|
725
|
+
.replace(/-+/g, "-")
|
|
726
|
+
.substring(0, 50);
|
|
727
|
+
const fileName = `skill-${safeName}.yamo`;
|
|
728
|
+
const filePath = path.join(skillsDir, fileName);
|
|
729
|
+
// Only write if file doesn't already exist to prevent duplicates
|
|
730
|
+
if (!fs.existsSync(filePath)) {
|
|
731
|
+
fs.writeFileSync(filePath, yamoText, "utf8");
|
|
732
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
733
|
+
logger.debug({ filePath }, "Skill persisted to file");
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
} catch (fileError: any) {
|
|
737
|
+
logger.warn({ err: fileError }, "Failed to persist skill to file");
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
return { id, name, intent };
|
|
742
|
+
} catch (error: any) {
|
|
743
|
+
throw new Error(`Skill ingestion failed: ${error.message}`);
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
/**
|
|
748
|
+
* Recursive Skill Synthesis
|
|
749
|
+
*/
|
|
750
|
+
async synthesize(options: any = {}): Promise<any> {
|
|
751
|
+
await this.init();
|
|
752
|
+
const topic = options.topic || "general_improvement";
|
|
753
|
+
const enrichedPrompt = options.enrichedPrompt || topic; // PHASE 4: Use enriched prompt
|
|
754
|
+
// const lookback = options.lookback || 20;
|
|
755
|
+
|
|
756
|
+
logger.info({ topic, enrichedPrompt }, "Synthesizing logic");
|
|
757
|
+
|
|
758
|
+
// OPTIMIZATION: If we have an execution engine (kernel), use SkillCreator!
|
|
759
|
+
if ((this as any)._kernel_execute) {
|
|
760
|
+
logger.info("Dispatching to SkillCreator agent...");
|
|
761
|
+
try {
|
|
762
|
+
// Use stored skill directories
|
|
763
|
+
const skillDirs = this.skillDirectories;
|
|
764
|
+
|
|
765
|
+
// Track existing .yamo files before SkillCreator runs
|
|
766
|
+
const filesBefore = new Set<string>();
|
|
767
|
+
for (const dir of skillDirs) {
|
|
768
|
+
if (fs.existsSync(dir)) {
|
|
769
|
+
const walk = (currentDir: string) => {
|
|
770
|
+
try {
|
|
771
|
+
const entries = fs.readdirSync(currentDir, {
|
|
772
|
+
withFileTypes: true,
|
|
773
|
+
});
|
|
774
|
+
for (const entry of entries) {
|
|
775
|
+
const fullPath = path.join(currentDir, entry.name);
|
|
776
|
+
if (entry.isDirectory()) {
|
|
777
|
+
walk(fullPath);
|
|
778
|
+
} else if (entry.isFile() && entry.name.endsWith(".yamo")) {
|
|
779
|
+
filesBefore.add(fullPath);
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
} catch (e) {
|
|
783
|
+
// Skip directories we can't read
|
|
784
|
+
logger.debug({ dir, error: e }, "Could not read directory");
|
|
785
|
+
}
|
|
786
|
+
};
|
|
787
|
+
walk(dir);
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
// PHASE 4: Use enriched prompt for SkillCreator
|
|
792
|
+
await (this as any)._kernel_execute(
|
|
793
|
+
`SkillCreator: design a new skill to handle ${enrichedPrompt}`,
|
|
794
|
+
{
|
|
795
|
+
v1_1_enabled: true,
|
|
796
|
+
},
|
|
797
|
+
);
|
|
798
|
+
|
|
799
|
+
// Find newly created .yamo file
|
|
800
|
+
let newSkillFile: string | undefined;
|
|
801
|
+
for (const dir of skillDirs) {
|
|
802
|
+
if (fs.existsSync(dir)) {
|
|
803
|
+
const walk = (currentDir: string) => {
|
|
804
|
+
try {
|
|
805
|
+
const entries = fs.readdirSync(currentDir, {
|
|
806
|
+
withFileTypes: true,
|
|
807
|
+
});
|
|
808
|
+
for (const entry of entries) {
|
|
809
|
+
const fullPath = path.join(currentDir, entry.name);
|
|
810
|
+
if (entry.isDirectory()) {
|
|
811
|
+
walk(fullPath);
|
|
812
|
+
} else if (entry.isFile() && entry.name.endsWith(".yamo")) {
|
|
813
|
+
if (!filesBefore.has(fullPath)) {
|
|
814
|
+
newSkillFile = fullPath;
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
} catch (e) {
|
|
819
|
+
logger.debug({ dir, error: e }, "Could not read directory");
|
|
820
|
+
}
|
|
821
|
+
};
|
|
822
|
+
walk(dir);
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
// Ingest the newly created skill file
|
|
827
|
+
if (newSkillFile) {
|
|
828
|
+
logger.info(
|
|
829
|
+
{ skillFile: newSkillFile },
|
|
830
|
+
"Ingesting newly synthesized skill",
|
|
831
|
+
);
|
|
832
|
+
let skillContent = fs.readFileSync(newSkillFile, "utf8");
|
|
833
|
+
|
|
834
|
+
// PHASE 4: Expand compressed → canonical for disk storage
|
|
835
|
+
// Skills created by evolution are typically compressed; expand to canonical for readability
|
|
836
|
+
// Skip expansion in test environment or when disabled
|
|
837
|
+
const expansionEnabled =
|
|
838
|
+
process.env.YAMO_EXPANSION_ENABLED !== "false";
|
|
839
|
+
const isCompressed =
|
|
840
|
+
!skillContent.includes("---") ||
|
|
841
|
+
(skillContent.includes("---") &&
|
|
842
|
+
skillContent.split("---").length <= 1);
|
|
843
|
+
if (expansionEnabled && isCompressed) {
|
|
844
|
+
logger.info(
|
|
845
|
+
{ skillFile: newSkillFile },
|
|
846
|
+
"Expanding compressed skill to canonical format",
|
|
847
|
+
);
|
|
848
|
+
try {
|
|
849
|
+
const expanded = await (this as any)._kernel_execute(
|
|
850
|
+
"skill-expansion-system-prompt.yamo",
|
|
851
|
+
{
|
|
852
|
+
input_yamo: skillContent,
|
|
853
|
+
},
|
|
854
|
+
);
|
|
855
|
+
if (expanded && expanded.canonical_yamo) {
|
|
856
|
+
skillContent = expanded.canonical_yamo;
|
|
857
|
+
// Write expanded canonical format back to disk
|
|
858
|
+
fs.writeFileSync(newSkillFile, skillContent, "utf8");
|
|
859
|
+
logger.info(
|
|
860
|
+
{ skillFile: newSkillFile },
|
|
861
|
+
"Skill expanded to canonical format on disk",
|
|
862
|
+
);
|
|
863
|
+
}
|
|
864
|
+
} catch (e) {
|
|
865
|
+
logger.warn(
|
|
866
|
+
{ err: e },
|
|
867
|
+
"Failed to expand skill to canonical, using compressed format",
|
|
868
|
+
);
|
|
869
|
+
}
|
|
870
|
+
}
|
|
871
|
+
|
|
872
|
+
// ENSURE: Synthesized skills always have proper metadata with meaningful name
|
|
873
|
+
// This prevents duplicate skill-agent-{timestamp}.yamo files
|
|
874
|
+
const synIdentity = extractSkillIdentity(skillContent);
|
|
875
|
+
const hasName = !synIdentity.name.startsWith("Unnamed_");
|
|
876
|
+
if (!skillContent.includes("---") || !hasName) {
|
|
877
|
+
logger.info(
|
|
878
|
+
{ skillFile: newSkillFile },
|
|
879
|
+
"Adding metadata block to synthesized skill",
|
|
880
|
+
);
|
|
881
|
+
const intent =
|
|
882
|
+
synIdentity.intent !== "general_procedure"
|
|
883
|
+
? synIdentity.intent.replace(/[^a-zA-Z0-9]/g, "")
|
|
884
|
+
: "Synthesized";
|
|
885
|
+
const PascalCase = intent.charAt(0).toUpperCase() + intent.slice(1);
|
|
886
|
+
const skillName = `${PascalCase}_${Date.now().toString(36)}`;
|
|
887
|
+
|
|
888
|
+
const metadata = `---
|
|
889
|
+
name: ${skillName}
|
|
890
|
+
version: 1.0.0
|
|
891
|
+
author: YAMO Evolution
|
|
892
|
+
license: MIT
|
|
893
|
+
tags: synthesized, evolution, auto-generated
|
|
894
|
+
description: Auto-generated skill to handle: ${enrichedPrompt || topic}
|
|
895
|
+
---
|
|
896
|
+
`;
|
|
897
|
+
|
|
898
|
+
// Prepend metadata if skill doesn't have it
|
|
899
|
+
if (!skillContent.startsWith("---")) {
|
|
900
|
+
skillContent = metadata + skillContent;
|
|
901
|
+
// Write back to disk with proper metadata
|
|
902
|
+
fs.writeFileSync(newSkillFile, skillContent, "utf8");
|
|
903
|
+
logger.info(
|
|
904
|
+
{ skillFile: newSkillFile, skillName },
|
|
905
|
+
"Added metadata block to synthesized skill",
|
|
906
|
+
);
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
|
|
910
|
+
const skill = await this.ingestSkill(
|
|
911
|
+
skillContent,
|
|
912
|
+
{
|
|
913
|
+
source: "synthesized",
|
|
914
|
+
trigger_topic: topic,
|
|
915
|
+
},
|
|
916
|
+
newSkillFile,
|
|
917
|
+
);
|
|
918
|
+
return {
|
|
919
|
+
status: "success",
|
|
920
|
+
analysis: "SkillCreator orchestrated evolution",
|
|
921
|
+
skill_id: skill.id,
|
|
922
|
+
skill_name: skill.name,
|
|
923
|
+
yamo_text: skillContent,
|
|
924
|
+
};
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
// Fallback if no new file found
|
|
928
|
+
return {
|
|
929
|
+
status: "success",
|
|
930
|
+
analysis: "SkillCreator orchestrated evolution (no file detected)",
|
|
931
|
+
skill_name: topic.split(" ")[0],
|
|
932
|
+
};
|
|
933
|
+
} catch (e: any) {
|
|
934
|
+
logger.error({ err: e }, "SkillCreator agent failed");
|
|
935
|
+
return {
|
|
936
|
+
status: "error",
|
|
937
|
+
error: e.message,
|
|
938
|
+
analysis: "SkillCreator agent failed",
|
|
939
|
+
};
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
// SkillCreator is required for synthesis
|
|
944
|
+
if (!(this as any)._kernel_execute) {
|
|
945
|
+
throw new Error(
|
|
946
|
+
"Kernel execution (_kernel_execute) is required for synthesis. Use YamoKernel instead of MemoryMesh directly.",
|
|
947
|
+
);
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
// Should never reach here
|
|
951
|
+
return {
|
|
952
|
+
status: "error",
|
|
953
|
+
analysis: "Unexpected state in synthesis",
|
|
954
|
+
};
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
/**
|
|
958
|
+
* Update reliability
|
|
959
|
+
*/
|
|
960
|
+
async updateSkillReliability(id: string, success: boolean): Promise<any> {
|
|
961
|
+
await this.init();
|
|
962
|
+
if (!this.skillTable) {
|
|
963
|
+
throw new Error("Skill table not initialized");
|
|
964
|
+
}
|
|
965
|
+
try {
|
|
966
|
+
const results = await this.skillTable
|
|
967
|
+
.query()
|
|
968
|
+
.filter(`id == '${id}'`)
|
|
969
|
+
.toArray();
|
|
970
|
+
if (results.length === 0) {
|
|
971
|
+
throw new Error(`Skill ${id} not found`);
|
|
972
|
+
}
|
|
973
|
+
const record: any = results[0];
|
|
974
|
+
const metadata = JSON.parse(record.metadata);
|
|
975
|
+
const adjustment = success ? 0.1 : -0.2;
|
|
976
|
+
metadata.reliability = Math.max(
|
|
977
|
+
0,
|
|
978
|
+
Math.min(1.0, (metadata.reliability || 0.5) + adjustment),
|
|
979
|
+
);
|
|
980
|
+
metadata.use_count = (metadata.use_count || 0) + 1;
|
|
981
|
+
metadata.last_used = new Date().toISOString();
|
|
982
|
+
await this.skillTable.update({
|
|
983
|
+
where: `id == '${id}'`,
|
|
984
|
+
values: { metadata: JSON.stringify(metadata) },
|
|
985
|
+
} as any);
|
|
986
|
+
return {
|
|
987
|
+
id,
|
|
988
|
+
reliability: metadata.reliability,
|
|
989
|
+
use_count: metadata.use_count,
|
|
990
|
+
};
|
|
991
|
+
} catch (error: any) {
|
|
992
|
+
throw new Error(`Failed to update skill reliability: ${error.message}`);
|
|
993
|
+
}
|
|
994
|
+
}
|
|
995
|
+
|
|
996
|
+
/**
|
|
997
|
+
* Prune skills
|
|
998
|
+
*/
|
|
999
|
+
async pruneSkills(threshold: number = 0.3): Promise<any> {
|
|
1000
|
+
await this.init();
|
|
1001
|
+
if (!this.skillTable) {
|
|
1002
|
+
throw new Error("Skill table not initialized");
|
|
1003
|
+
}
|
|
1004
|
+
try {
|
|
1005
|
+
const allSkills = await this.skillTable.query().toArray();
|
|
1006
|
+
let prunedCount = 0;
|
|
1007
|
+
for (const skill of allSkills) {
|
|
1008
|
+
const metadata = JSON.parse(skill.metadata);
|
|
1009
|
+
if (metadata.reliability < threshold) {
|
|
1010
|
+
await this.skillTable.delete(`id == '${skill.id}'`);
|
|
1011
|
+
prunedCount++;
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
return {
|
|
1015
|
+
pruned_count: prunedCount,
|
|
1016
|
+
total_remaining: allSkills.length - prunedCount,
|
|
1017
|
+
};
|
|
1018
|
+
} catch (error: any) {
|
|
1019
|
+
throw new Error(`Pruning failed: ${error.message}`);
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
/**
|
|
1024
|
+
* List all synthesized skills
|
|
1025
|
+
* @param {Object} [options={}] - Search options
|
|
1026
|
+
* @returns {Promise<Array>} Normalized skill results
|
|
1027
|
+
*/
|
|
1028
|
+
async listSkills(options: any = {}): Promise<any[]> {
|
|
1029
|
+
await this.init();
|
|
1030
|
+
if (!this.skillTable) {
|
|
1031
|
+
return [];
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
try {
|
|
1035
|
+
const limit = options.limit || 10;
|
|
1036
|
+
const results = await this.skillTable.query().limit(limit).toArray();
|
|
1037
|
+
|
|
1038
|
+
return results.map((r) => ({
|
|
1039
|
+
...r,
|
|
1040
|
+
score: 1.0, // Full score for direct listing
|
|
1041
|
+
// Parse metadata JSON string to object
|
|
1042
|
+
metadata:
|
|
1043
|
+
typeof r.metadata === "string" ? JSON.parse(r.metadata) : r.metadata,
|
|
1044
|
+
}));
|
|
1045
|
+
} catch (error: any) {
|
|
1046
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
1047
|
+
logger.error({ err: error }, "Skill list failed");
|
|
1048
|
+
}
|
|
1049
|
+
return [];
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
/**
|
|
1054
|
+
* Search for synthesized skills by semantic intent
|
|
1055
|
+
* @param {string} query - Search query (intent description)
|
|
1056
|
+
* @param {Object} [options={}] - Search options
|
|
1057
|
+
* @returns {Promise<Array>} Normalized skill results
|
|
1058
|
+
*/
|
|
1059
|
+
async searchSkills(query: string, options: any = {}): Promise<any[]> {
|
|
1060
|
+
await this.init();
|
|
1061
|
+
if (!this.skillTable) {
|
|
1062
|
+
return [];
|
|
1063
|
+
}
|
|
1064
|
+
|
|
1065
|
+
try {
|
|
1066
|
+
// 1. Check for explicit skill targeting (e.g., "Architect: ...")
|
|
1067
|
+
const explicitMatch = query.match(/^([a-zA-Z0-9_-]+):/);
|
|
1068
|
+
if (explicitMatch) {
|
|
1069
|
+
const targetName = explicitMatch[1];
|
|
1070
|
+
const directResults = await this.skillTable
|
|
1071
|
+
.query()
|
|
1072
|
+
.where(`name == '${targetName}'`)
|
|
1073
|
+
.limit(1)
|
|
1074
|
+
.toArray();
|
|
1075
|
+
|
|
1076
|
+
if (directResults.length > 0) {
|
|
1077
|
+
return directResults.map((r) => ({
|
|
1078
|
+
...r,
|
|
1079
|
+
score: 1.0, // Maximum score for explicit target
|
|
1080
|
+
}));
|
|
1081
|
+
}
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
// 2. Hybrid search: vector + keyword matching
|
|
1085
|
+
const limit = options.limit || 5;
|
|
1086
|
+
|
|
1087
|
+
// 2a. Vector search (get more candidates for fusion)
|
|
1088
|
+
const vector = await this.embeddingFactory.embed(query);
|
|
1089
|
+
const vectorResults = await this.skillTable
|
|
1090
|
+
.search(vector)
|
|
1091
|
+
.limit(limit * 3)
|
|
1092
|
+
.toArray();
|
|
1093
|
+
|
|
1094
|
+
// 2b. Keyword matching against skill fields (including tags)
|
|
1095
|
+
const queryTokens = this._tokenizeQuery(query);
|
|
1096
|
+
const keywordScores = new Map<string, number>();
|
|
1097
|
+
let maxKeywordScore = 0;
|
|
1098
|
+
|
|
1099
|
+
for (const result of vectorResults) {
|
|
1100
|
+
let score = 0;
|
|
1101
|
+
const nameTokens = this._tokenizeQuery(result.name);
|
|
1102
|
+
const intentTokens = this._tokenizeQuery(result.intent || "");
|
|
1103
|
+
const tags = extractSkillTags(result.yamo_text);
|
|
1104
|
+
const tagTokens = tags.flatMap((t) => this._tokenizeQuery(t));
|
|
1105
|
+
const descTokens = this._tokenizeQuery(
|
|
1106
|
+
result.yamo_text.substring(0, 500),
|
|
1107
|
+
); // First 500 chars
|
|
1108
|
+
|
|
1109
|
+
// Token matching with field-based weights
|
|
1110
|
+
// Support both exact and partial matches (for compound words)
|
|
1111
|
+
for (const qToken of queryTokens) {
|
|
1112
|
+
// Exact or partial match in name
|
|
1113
|
+
if (
|
|
1114
|
+
nameTokens.some(
|
|
1115
|
+
(nt) =>
|
|
1116
|
+
nt === qToken || qToken.includes(nt) || nt.includes(qToken),
|
|
1117
|
+
)
|
|
1118
|
+
) {
|
|
1119
|
+
score += 10.0; // Highest: name match
|
|
1120
|
+
}
|
|
1121
|
+
// Exact or partial match in tags
|
|
1122
|
+
if (
|
|
1123
|
+
tagTokens.some(
|
|
1124
|
+
(tt) =>
|
|
1125
|
+
tt === qToken || qToken.includes(tt) || tt.includes(qToken),
|
|
1126
|
+
)
|
|
1127
|
+
) {
|
|
1128
|
+
score += 7.0; // High: tag match
|
|
1129
|
+
}
|
|
1130
|
+
// Exact match in intent
|
|
1131
|
+
if (intentTokens.some((it) => it === qToken)) {
|
|
1132
|
+
score += 5.0; // Medium: intent match
|
|
1133
|
+
}
|
|
1134
|
+
// Exact match in description
|
|
1135
|
+
if (descTokens.some((dt) => dt === qToken)) {
|
|
1136
|
+
score += 1.0; // Low: description match
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
|
|
1140
|
+
if (score > 0) {
|
|
1141
|
+
keywordScores.set(result.id, score);
|
|
1142
|
+
maxKeywordScore = Math.max(maxKeywordScore, score);
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
|
|
1146
|
+
// 2c. Combine scores using weighted fusion
|
|
1147
|
+
const fusedResults = vectorResults.map((r) => {
|
|
1148
|
+
// Normalize vector distance to [0, 1] similarity score
|
|
1149
|
+
// LanceDB cosine distance ranges from 0 (identical) to 2 (opposite)
|
|
1150
|
+
const rawDistance = r._distance !== undefined ? r._distance : 1.0;
|
|
1151
|
+
const vectorScore = Math.max(0, Math.min(1.0, 1 - rawDistance / 2));
|
|
1152
|
+
|
|
1153
|
+
const keywordScore = keywordScores.get(r.id) || 0;
|
|
1154
|
+
|
|
1155
|
+
// Normalize keyword score by max observed (or use fixed max to avoid division by zero)
|
|
1156
|
+
const normalizedKeyword =
|
|
1157
|
+
maxKeywordScore > 0 ? keywordScore / maxKeywordScore : 0;
|
|
1158
|
+
|
|
1159
|
+
// Weighted combination: 70% keyword, 30% vector
|
|
1160
|
+
// Keywords get higher weight to prioritize exact matches
|
|
1161
|
+
const combinedScore = 0.7 * normalizedKeyword + 0.3 * vectorScore;
|
|
1162
|
+
|
|
1163
|
+
return {
|
|
1164
|
+
...r,
|
|
1165
|
+
score: combinedScore,
|
|
1166
|
+
_vectorScore: vectorScore,
|
|
1167
|
+
_keywordScore: keywordScore,
|
|
1168
|
+
};
|
|
1169
|
+
});
|
|
1170
|
+
|
|
1171
|
+
// Sort by combined score and return top results
|
|
1172
|
+
// Don't normalize - we already calculated hybrid scores
|
|
1173
|
+
return fusedResults
|
|
1174
|
+
.sort((a, b) => b.score - a.score)
|
|
1175
|
+
.slice(0, limit)
|
|
1176
|
+
.map((r) => ({
|
|
1177
|
+
...r,
|
|
1178
|
+
// Parse metadata JSON string to object for policy loading
|
|
1179
|
+
metadata:
|
|
1180
|
+
typeof r.metadata === "string"
|
|
1181
|
+
? JSON.parse(r.metadata)
|
|
1182
|
+
: r.metadata,
|
|
1183
|
+
}))
|
|
1184
|
+
.map((r) => ({
|
|
1185
|
+
...r,
|
|
1186
|
+
score: parseFloat(r.score.toFixed(2)), // Round for consistency
|
|
1187
|
+
}));
|
|
1188
|
+
} catch (error: any) {
|
|
1189
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
1190
|
+
logger.error({ err: error }, "Skill search failed");
|
|
1191
|
+
}
|
|
1192
|
+
return [];
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
|
|
1196
|
+
/**
|
|
1197
|
+
* Get recent YAMO logs for the heartbeat
|
|
1198
|
+
* @param {Object} options
|
|
1199
|
+
*/
|
|
1200
|
+
async getYamoLog(options: any = {}): Promise<any[]> {
|
|
1201
|
+
if (!this.yamoTable) {
|
|
1202
|
+
return [];
|
|
1203
|
+
}
|
|
1204
|
+
const limit = options.limit || 10;
|
|
1205
|
+
const maxRetries = 5;
|
|
1206
|
+
|
|
1207
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
1208
|
+
try {
|
|
1209
|
+
// orderBy might not be in LanceDB types but is supported in runtime
|
|
1210
|
+
const query = this.yamoTable.query();
|
|
1211
|
+
let results;
|
|
1212
|
+
try {
|
|
1213
|
+
results = await (query as any)
|
|
1214
|
+
.orderBy("timestamp", "desc")
|
|
1215
|
+
.limit(limit)
|
|
1216
|
+
.toArray();
|
|
1217
|
+
} catch (_e) {
|
|
1218
|
+
// Fallback if orderBy not supported
|
|
1219
|
+
results = await query.limit(1000).toArray(); // Get more and sort manually
|
|
1220
|
+
}
|
|
1221
|
+
|
|
1222
|
+
// Sort newest first in memory
|
|
1223
|
+
return results
|
|
1224
|
+
.sort((a: any, b: any) => {
|
|
1225
|
+
const tA =
|
|
1226
|
+
a.timestamp instanceof Date
|
|
1227
|
+
? a.timestamp.getTime()
|
|
1228
|
+
: Number(a.timestamp);
|
|
1229
|
+
const tB =
|
|
1230
|
+
b.timestamp instanceof Date
|
|
1231
|
+
? b.timestamp.getTime()
|
|
1232
|
+
: Number(b.timestamp);
|
|
1233
|
+
return tB - tA;
|
|
1234
|
+
})
|
|
1235
|
+
.slice(0, limit)
|
|
1236
|
+
.map((r: any) => ({
|
|
1237
|
+
id: r.id,
|
|
1238
|
+
yamoText: r.yamo_text,
|
|
1239
|
+
timestamp: r.timestamp,
|
|
1240
|
+
}));
|
|
1241
|
+
} catch (error: any) {
|
|
1242
|
+
const msg = error.message || "";
|
|
1243
|
+
const isRetryable =
|
|
1244
|
+
msg.includes("LanceError(IO)") ||
|
|
1245
|
+
msg.includes("next batch") ||
|
|
1246
|
+
msg.includes("No such file") ||
|
|
1247
|
+
msg.includes("busy");
|
|
1248
|
+
|
|
1249
|
+
if (isRetryable && attempt < maxRetries) {
|
|
1250
|
+
// If we suspect stale table handle, try to refresh it
|
|
1251
|
+
try {
|
|
1252
|
+
// Re-open table to get fresh file handles
|
|
1253
|
+
const { createYamoTable } = await import("../yamo/schema.js");
|
|
1254
|
+
|
|
1255
|
+
if (this.dbDir) {
|
|
1256
|
+
const db = await lancedb.connect(this.dbDir);
|
|
1257
|
+
this.yamoTable = await createYamoTable(db, "yamo_blocks");
|
|
1258
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
1259
|
+
logger.debug(
|
|
1260
|
+
{ attempt, msg: msg.substring(0, 100) },
|
|
1261
|
+
"Refreshed yamoTable handle during retry",
|
|
1262
|
+
);
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
} catch (e) {
|
|
1266
|
+
logger.warn(
|
|
1267
|
+
{ err: e },
|
|
1268
|
+
"Failed to refresh table handle during retry",
|
|
1269
|
+
);
|
|
1270
|
+
}
|
|
1271
|
+
|
|
1272
|
+
const delay = 500 * Math.pow(2, attempt - 1); // 500ms, 1000ms, 2000ms, 4000ms
|
|
1273
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
1274
|
+
continue;
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
// Only log warning on final failure
|
|
1278
|
+
if (attempt === maxRetries) {
|
|
1279
|
+
logger.warn({ err: error }, "Failed to get log after retries");
|
|
1280
|
+
} else if (!isRetryable) {
|
|
1281
|
+
// Non-retryable error
|
|
1282
|
+
logger.warn({ err: error }, "Failed to get log (non-retryable)");
|
|
1283
|
+
break;
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
}
|
|
1287
|
+
return [];
|
|
1288
|
+
}
|
|
1289
|
+
|
|
1290
|
+
/**
|
|
1291
|
+
* Emit a YAMO block to the YAMO blocks table
|
|
1292
|
+
* @private
|
|
1293
|
+
*
|
|
1294
|
+
* Note: YAMO emission is non-critical - failures are logged but don't throw
|
|
1295
|
+
* to prevent disrupting the main operation.
|
|
1296
|
+
*/
|
|
1297
|
+
async _emitYamoBlock(
|
|
1298
|
+
operationType: string,
|
|
1299
|
+
memoryId: string | undefined,
|
|
1300
|
+
yamoText: string,
|
|
1301
|
+
): Promise<void> {
|
|
1302
|
+
if (!this.yamoTable) {
|
|
1303
|
+
return;
|
|
1304
|
+
}
|
|
1305
|
+
const yamoId = `yamo_${operationType}_${Date.now()}_${crypto.randomBytes(4).toString("hex")}`;
|
|
1306
|
+
try {
|
|
1307
|
+
await this.yamoTable.add([
|
|
1308
|
+
{
|
|
1309
|
+
id: yamoId,
|
|
1310
|
+
agent_id: this.agentId,
|
|
1311
|
+
operation_type: operationType,
|
|
1312
|
+
yamo_text: yamoText,
|
|
1313
|
+
timestamp: new Date(),
|
|
1314
|
+
block_hash: null,
|
|
1315
|
+
prev_hash: null,
|
|
1316
|
+
metadata: JSON.stringify({
|
|
1317
|
+
memory_id: memoryId || null,
|
|
1318
|
+
timestamp: new Date().toISOString(),
|
|
1319
|
+
}),
|
|
1320
|
+
},
|
|
1321
|
+
]);
|
|
1322
|
+
} catch (error) {
|
|
1323
|
+
// Log emission failures in debug mode
|
|
1324
|
+
// Emission is non-critical, so we don't throw
|
|
1325
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
1326
|
+
logger.warn({ err: error, operationType }, "YAMO emission failed");
|
|
1327
|
+
}
|
|
1328
|
+
}
|
|
1329
|
+
}
|
|
1330
|
+
|
|
1331
|
+
/**
|
|
1332
|
+
* Search memory using hybrid vector + keyword search with Reciprocal Rank Fusion (RRF).
|
|
1333
|
+
*
|
|
1334
|
+
* This method performs semantic search by combining:
|
|
1335
|
+
* 1. **Vector Search**: Uses embeddings to find semantically similar content
|
|
1336
|
+
* 2. **Keyword Search**: Uses BM25-style keyword matching
|
|
1337
|
+
* 3. **RRF Fusion**: Combines both result sets using Reciprocal Rank Fusion
|
|
1338
|
+
*
|
|
1339
|
+
* The RRF algorithm scores each document as: `sum(1 / (k + rank))` where k=60.
|
|
1340
|
+
* This gives higher scores to documents that rank well in BOTH searches.
|
|
1341
|
+
*
|
|
1342
|
+
* **Performance**: Uses adaptive sorting strategy
|
|
1343
|
+
* - Small datasets (≤ 2× limit): Full sort O(n log n)
|
|
1344
|
+
* - Large datasets: Partial selection sort O(n×k) where k=limit
|
|
1345
|
+
*
|
|
1346
|
+
* **Caching**: Results are cached for 5 minutes by default (configurable via options)
|
|
1347
|
+
*
|
|
1348
|
+
* @param query - The search query text
|
|
1349
|
+
* @param options - Search options
|
|
1350
|
+
* @param options.limit - Maximum results to return (default: 10)
|
|
1351
|
+
* @param options.filter - LanceDB filter expression (e.g., "type == 'preference'")
|
|
1352
|
+
* @param options.useCache - Enable/disable result caching (default: true)
|
|
1353
|
+
* @returns Promise with array of search results, sorted by relevance score
|
|
1354
|
+
*
|
|
1355
|
+
* @example
|
|
1356
|
+
* ```typescript
|
|
1357
|
+
* // Simple search
|
|
1358
|
+
* const results = await mesh.search("TypeScript preferences");
|
|
1359
|
+
*
|
|
1360
|
+
* // Search with filter
|
|
1361
|
+
* const code = await mesh.search("bug fix", { filter: "type == 'error'" });
|
|
1362
|
+
*
|
|
1363
|
+
* // Search with limit
|
|
1364
|
+
* const top3 = await mesh.search("security issues", { limit: 3 });
|
|
1365
|
+
* ```
|
|
1366
|
+
*
|
|
1367
|
+
* @throws {Error} If embedding generation fails
|
|
1368
|
+
* @throws {Error} If database client is not initialized
|
|
1369
|
+
*/
|
|
1370
|
+
async search(query: string, options: any = {}): Promise<SearchResult[]> {
|
|
1371
|
+
await this.init();
|
|
1372
|
+
try {
|
|
1373
|
+
const limit = options.limit || 10;
|
|
1374
|
+
const filter = options.filter || null;
|
|
1375
|
+
const useCache = options.useCache !== undefined ? options.useCache : true;
|
|
1376
|
+
|
|
1377
|
+
if (useCache) {
|
|
1378
|
+
const cacheKey = this._generateCacheKey(query, { limit, filter });
|
|
1379
|
+
const cached = this._getCachedResult(cacheKey);
|
|
1380
|
+
if (cached) {
|
|
1381
|
+
return cached;
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
|
|
1385
|
+
const vector = await this.embeddingFactory.embed(query);
|
|
1386
|
+
if (!this.client) {
|
|
1387
|
+
throw new Error("Database client not initialized");
|
|
1388
|
+
}
|
|
1389
|
+
const vectorResults: any[] = await this.client.search(vector, {
|
|
1390
|
+
limit: limit * 2,
|
|
1391
|
+
metric: "cosine",
|
|
1392
|
+
filter,
|
|
1393
|
+
});
|
|
1394
|
+
const keywordResults = this.keywordSearch.search(query, {
|
|
1395
|
+
limit: limit * 2,
|
|
1396
|
+
});
|
|
1397
|
+
|
|
1398
|
+
// Optimized Reciprocal Rank Fusion (RRF) with min-heap for O(n log k) performance
|
|
1399
|
+
// Instead of sorting all results (O(n log n)), we maintain a heap of size k (O(n log k))
|
|
1400
|
+
const k = 60; // RRF constant
|
|
1401
|
+
const scores = new Map<string, number>();
|
|
1402
|
+
const docMap = new Map<string, any>();
|
|
1403
|
+
|
|
1404
|
+
// Process vector results - O(m) where m = vectorResults.length
|
|
1405
|
+
for (let rank = 0; rank < vectorResults.length; rank++) {
|
|
1406
|
+
const doc = vectorResults[rank];
|
|
1407
|
+
const rrf = 1 / (k + rank + 1);
|
|
1408
|
+
scores.set(doc.id, (scores.get(doc.id) || 0) + rrf);
|
|
1409
|
+
docMap.set(doc.id, doc);
|
|
1410
|
+
}
|
|
1411
|
+
|
|
1412
|
+
// Process keyword results - O(n) where n = keywordResults.length
|
|
1413
|
+
for (let rank = 0; rank < keywordResults.length; rank++) {
|
|
1414
|
+
const doc = keywordResults[rank];
|
|
1415
|
+
const rrf = 1 / (k + rank + 1);
|
|
1416
|
+
scores.set(doc.id, (scores.get(doc.id) || 0) + rrf);
|
|
1417
|
+
if (!docMap.has(doc.id)) {
|
|
1418
|
+
docMap.set(doc.id, {
|
|
1419
|
+
id: doc.id,
|
|
1420
|
+
content: doc.content,
|
|
1421
|
+
metadata: doc.metadata,
|
|
1422
|
+
score: 0,
|
|
1423
|
+
created_at: new Date().toISOString(),
|
|
1424
|
+
});
|
|
1425
|
+
}
|
|
1426
|
+
}
|
|
1427
|
+
|
|
1428
|
+
// Extract top k results using min-heap pattern - O(n log k)
|
|
1429
|
+
// Since JavaScript doesn't have a built-in heap, we use an efficient approach:
|
|
1430
|
+
// Convert to array and sort only if results exceed limit significantly
|
|
1431
|
+
const scoreEntries = Array.from(scores.entries());
|
|
1432
|
+
|
|
1433
|
+
let mergedResults: SearchResult[];
|
|
1434
|
+
if (scoreEntries.length <= limit * 2) {
|
|
1435
|
+
// Small dataset: standard sort is fine
|
|
1436
|
+
mergedResults = scoreEntries
|
|
1437
|
+
.sort((a, b) => b[1] - a[1]) // O(n log n) but n is small
|
|
1438
|
+
.slice(0, limit)
|
|
1439
|
+
.map(([id, score]) => {
|
|
1440
|
+
const doc = docMap.get(id);
|
|
1441
|
+
return doc ? { ...doc, score } : null;
|
|
1442
|
+
})
|
|
1443
|
+
.filter((d): d is SearchResult => d !== null);
|
|
1444
|
+
} else {
|
|
1445
|
+
// Large dataset: use partial selection sort (O(n*k) but k is small)
|
|
1446
|
+
// This is more efficient than full sort when we only need top k results
|
|
1447
|
+
const topK: [string, number][] = [];
|
|
1448
|
+
for (const entry of scoreEntries) {
|
|
1449
|
+
if (topK.length < limit) {
|
|
1450
|
+
topK.push(entry);
|
|
1451
|
+
// Keep topK sorted in descending order
|
|
1452
|
+
topK.sort((a, b) => b[1] - a[1]);
|
|
1453
|
+
} else if (entry[1] > topK[topK.length - 1][1]) {
|
|
1454
|
+
// Replace smallest in topK if current is larger
|
|
1455
|
+
topK[limit - 1] = entry;
|
|
1456
|
+
topK.sort((a, b) => b[1] - a[1]);
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
mergedResults = topK
|
|
1460
|
+
.map(([id, score]) => {
|
|
1461
|
+
const doc = docMap.get(id);
|
|
1462
|
+
return doc ? { ...doc, score } : null;
|
|
1463
|
+
})
|
|
1464
|
+
.filter((d): d is SearchResult => d !== null);
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
const normalizedResults = this._normalizeScores(mergedResults);
|
|
1468
|
+
if (useCache) {
|
|
1469
|
+
const cacheKey = this._generateCacheKey(query, { limit, filter });
|
|
1470
|
+
this._cacheResult(cacheKey, normalizedResults);
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
if (this.enableYamo) {
|
|
1474
|
+
this._emitYamoBlock(
|
|
1475
|
+
"recall",
|
|
1476
|
+
undefined,
|
|
1477
|
+
YamoEmitter.buildRecallBlock({
|
|
1478
|
+
query,
|
|
1479
|
+
resultCount: normalizedResults.length,
|
|
1480
|
+
limit,
|
|
1481
|
+
agentId: this.agentId,
|
|
1482
|
+
searchType: "hybrid",
|
|
1483
|
+
}),
|
|
1484
|
+
).catch((error) => {
|
|
1485
|
+
// Log emission failures in debug mode but don't throw
|
|
1486
|
+
if (process.env.YAMO_DEBUG === "true") {
|
|
1487
|
+
logger.warn({ err: error }, "Failed to emit YAMO block (recall)");
|
|
1488
|
+
}
|
|
1489
|
+
});
|
|
1490
|
+
}
|
|
1491
|
+
|
|
1492
|
+
return normalizedResults;
|
|
1493
|
+
} catch (error) {
|
|
1494
|
+
throw error instanceof Error ? error : new Error(String(error));
|
|
1495
|
+
}
|
|
1496
|
+
}
|
|
1497
|
+
|
|
1498
|
+
_normalizeScores(results: SearchResult[]): SearchResult[] {
|
|
1499
|
+
if (results.length === 0) {
|
|
1500
|
+
return [];
|
|
1501
|
+
}
|
|
1502
|
+
|
|
1503
|
+
return results.map((r) => {
|
|
1504
|
+
// LanceDB _distance is squared L2 or cosine distance
|
|
1505
|
+
// For cosine distance in MiniLM, it ranges from 0 to 2
|
|
1506
|
+
const rawDistance = r._distance !== undefined ? r._distance : 1.0;
|
|
1507
|
+
// Convert to similarity score [0, 1]
|
|
1508
|
+
const score = Math.max(0, Math.min(1.0, 1 - rawDistance / 2));
|
|
1509
|
+
return {
|
|
1510
|
+
...r,
|
|
1511
|
+
score: parseFloat(score.toFixed(2)),
|
|
1512
|
+
};
|
|
1513
|
+
});
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
/**
|
|
1517
|
+
* Tokenize query for keyword matching (private helper for searchSkills)
|
|
1518
|
+
* Converts text to lowercase tokens, filtering out short tokens and punctuation.
|
|
1519
|
+
* Handles camelCase/PascalCase by splitting on uppercase letters.
|
|
1520
|
+
*/
|
|
1521
|
+
private _tokenizeQuery(text: string): string[] {
|
|
1522
|
+
return text
|
|
1523
|
+
.replace(/([a-z])([A-Z])/g, "$1 $2") // Split camelCase: "targetSkill" → "target Skill"
|
|
1524
|
+
.toLowerCase()
|
|
1525
|
+
.replace(/[^\w\s]/g, "")
|
|
1526
|
+
.split(/\s+/)
|
|
1527
|
+
.filter((t) => t.length > 2); // Filter out very short tokens
|
|
1528
|
+
}
|
|
1529
|
+
|
|
1530
|
+
formatResults(results: SearchResult[]): string {
|
|
1531
|
+
if (results.length === 0) {
|
|
1532
|
+
return "No relevant memories found.";
|
|
1533
|
+
}
|
|
1534
|
+
let output = `[ATTENTION DIRECTIVE]\nThe following [MEMORY CONTEXT] is weighted by relevance.
|
|
1535
|
+
- ALIGN attention to entries with [IMPORTANCE >= 0.8].
|
|
1536
|
+
- TREAT entries with [IMPORTANCE <= 0.4] as auxiliary background info.
|
|
1537
|
+
|
|
1538
|
+
[MEMORY CONTEXT]`;
|
|
1539
|
+
results.forEach((res, i) => {
|
|
1540
|
+
const metadata =
|
|
1541
|
+
typeof res.metadata === "string"
|
|
1542
|
+
? JSON.parse(res.metadata)
|
|
1543
|
+
: res.metadata;
|
|
1544
|
+
output += `\n\n--- MEMORY ${i + 1}: ${res.id} [IMPORTANCE: ${(res as any).score}] ---\nType: ${metadata.type || "event"} | Source: ${metadata.source || "unknown"}\n${res.content}`;
|
|
1545
|
+
});
|
|
1546
|
+
return output;
|
|
1547
|
+
}
|
|
1548
|
+
|
|
1549
|
+
async get(id: string): Promise<any> {
|
|
1550
|
+
await this.init();
|
|
1551
|
+
if (!this.client) {
|
|
1552
|
+
throw new Error("Database client not initialized");
|
|
1553
|
+
}
|
|
1554
|
+
const record = await this.client.getById(id);
|
|
1555
|
+
return record
|
|
1556
|
+
? {
|
|
1557
|
+
id: record.id,
|
|
1558
|
+
content: record.content,
|
|
1559
|
+
metadata: record.metadata,
|
|
1560
|
+
created_at: record.created_at,
|
|
1561
|
+
updated_at: record.updated_at,
|
|
1562
|
+
}
|
|
1563
|
+
: null;
|
|
1564
|
+
}
|
|
1565
|
+
|
|
1566
|
+
async getAll(options: any = {}): Promise<any> {
|
|
1567
|
+
await this.init();
|
|
1568
|
+
if (!this.client) {
|
|
1569
|
+
throw new Error("Database client not initialized");
|
|
1570
|
+
}
|
|
1571
|
+
return this.client.getAll(options);
|
|
1572
|
+
}
|
|
1573
|
+
|
|
1574
|
+
async stats(): Promise<any> {
|
|
1575
|
+
await this.init();
|
|
1576
|
+
if (!this.enableMemory || !this.client) {
|
|
1577
|
+
return {
|
|
1578
|
+
count: 0,
|
|
1579
|
+
totalMemories: 0,
|
|
1580
|
+
totalSkills: 0,
|
|
1581
|
+
tableName: "N/A",
|
|
1582
|
+
uri: "N/A",
|
|
1583
|
+
isConnected: false,
|
|
1584
|
+
embedding: { configured: false, primary: null, fallbacks: [] },
|
|
1585
|
+
status: "disabled",
|
|
1586
|
+
};
|
|
1587
|
+
}
|
|
1588
|
+
const dbStats = await this.client.getStats();
|
|
1589
|
+
|
|
1590
|
+
// Enrich embedding stats with total persisted count
|
|
1591
|
+
const embeddingStats = this.embeddingFactory.getStats();
|
|
1592
|
+
if (embeddingStats.primary) {
|
|
1593
|
+
(embeddingStats.primary as any).totalPersisted = dbStats.count;
|
|
1594
|
+
}
|
|
1595
|
+
|
|
1596
|
+
// Get skill count
|
|
1597
|
+
let totalSkills = 0;
|
|
1598
|
+
if (this.skillTable) {
|
|
1599
|
+
try {
|
|
1600
|
+
const skills = await this.skillTable.query().limit(10000).toArray();
|
|
1601
|
+
totalSkills = skills.length;
|
|
1602
|
+
} catch (_e) {
|
|
1603
|
+
// Ignore errors
|
|
1604
|
+
}
|
|
1605
|
+
}
|
|
1606
|
+
|
|
1607
|
+
return {
|
|
1608
|
+
count: dbStats.count,
|
|
1609
|
+
totalMemories: dbStats.count,
|
|
1610
|
+
totalSkills,
|
|
1611
|
+
tableName: dbStats.tableName,
|
|
1612
|
+
uri: dbStats.uri,
|
|
1613
|
+
isConnected: dbStats.isConnected,
|
|
1614
|
+
embedding: embeddingStats,
|
|
1615
|
+
};
|
|
1616
|
+
}
|
|
1617
|
+
|
|
1618
|
+
_parseEmbeddingConfig(): any[] {
|
|
1619
|
+
const configs = [
|
|
1620
|
+
{
|
|
1621
|
+
modelType: process.env.EMBEDDING_MODEL_TYPE || "local",
|
|
1622
|
+
modelName:
|
|
1623
|
+
process.env.EMBEDDING_MODEL_NAME || "Xenova/all-MiniLM-L6-v2",
|
|
1624
|
+
dimension: parseInt(process.env.EMBEDDING_DIMENSION || "384"),
|
|
1625
|
+
priority: 1,
|
|
1626
|
+
apiKey:
|
|
1627
|
+
process.env.EMBEDDING_API_KEY ||
|
|
1628
|
+
process.env.OPENAI_API_KEY ||
|
|
1629
|
+
process.env.COHERE_API_KEY,
|
|
1630
|
+
},
|
|
1631
|
+
];
|
|
1632
|
+
if (configs[0].modelType !== "local") {
|
|
1633
|
+
configs.push({
|
|
1634
|
+
modelType: "local",
|
|
1635
|
+
modelName: "Xenova/all-MiniLM-L6-v2",
|
|
1636
|
+
dimension: 384,
|
|
1637
|
+
priority: 2,
|
|
1638
|
+
apiKey: undefined,
|
|
1639
|
+
});
|
|
1640
|
+
}
|
|
1641
|
+
return configs;
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
/**
|
|
1645
|
+
* Close database connections and release resources
|
|
1646
|
+
*
|
|
1647
|
+
* This should be called when done with the MemoryMesh to properly:
|
|
1648
|
+
* - Close LanceDB connections
|
|
1649
|
+
* - Release file handles
|
|
1650
|
+
* - Clean up resources
|
|
1651
|
+
*
|
|
1652
|
+
* Important for tests and cleanup to prevent connection leaks.
|
|
1653
|
+
*
|
|
1654
|
+
* @returns {Promise<void>}
|
|
1655
|
+
*
|
|
1656
|
+
* @example
|
|
1657
|
+
* ```typescript
|
|
1658
|
+
* const mesh = new MemoryMesh();
|
|
1659
|
+
* await mesh.init();
|
|
1660
|
+
* // ... use mesh ...
|
|
1661
|
+
* await mesh.close(); // Clean up
|
|
1662
|
+
* ```
|
|
1663
|
+
*/
|
|
1664
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
1665
|
+
async close(): Promise<void> {
|
|
1666
|
+
try {
|
|
1667
|
+
// Close LanceDB client connection
|
|
1668
|
+
if (this.client) {
|
|
1669
|
+
this.client.disconnect();
|
|
1670
|
+
this.client = null;
|
|
1671
|
+
}
|
|
1672
|
+
|
|
1673
|
+
// Clear extension table references
|
|
1674
|
+
this.yamoTable = null;
|
|
1675
|
+
this.skillTable = null;
|
|
1676
|
+
|
|
1677
|
+
// Reset initialization state
|
|
1678
|
+
this.isInitialized = false;
|
|
1679
|
+
|
|
1680
|
+
logger.debug("MemoryMesh closed successfully");
|
|
1681
|
+
} catch (error) {
|
|
1682
|
+
const e = error instanceof Error ? error : new Error(String(error));
|
|
1683
|
+
logger.warn({ err: e }, "Error closing MemoryMesh");
|
|
1684
|
+
// Don't throw - cleanup should always succeed
|
|
1685
|
+
}
|
|
1686
|
+
}
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
/**
|
|
1690
|
+
* Main CLI handler
|
|
1691
|
+
*/
|
|
1692
|
+
export async function run() {
|
|
1693
|
+
let action, input;
|
|
1694
|
+
if (process.argv.length > 3) {
|
|
1695
|
+
action = process.argv[2];
|
|
1696
|
+
try {
|
|
1697
|
+
input = JSON.parse(process.argv[3]);
|
|
1698
|
+
} catch (e: any) {
|
|
1699
|
+
logger.error({ err: e }, "Invalid JSON argument");
|
|
1700
|
+
process.exit(1);
|
|
1701
|
+
}
|
|
1702
|
+
} else {
|
|
1703
|
+
try {
|
|
1704
|
+
const rawInput = fs.readFileSync(0, "utf8");
|
|
1705
|
+
input = JSON.parse(rawInput);
|
|
1706
|
+
action = input.action || action;
|
|
1707
|
+
} catch (_e) {
|
|
1708
|
+
logger.error("No input provided");
|
|
1709
|
+
process.exit(1);
|
|
1710
|
+
}
|
|
1711
|
+
}
|
|
1712
|
+
|
|
1713
|
+
const mesh = new MemoryMesh({
|
|
1714
|
+
llmProvider:
|
|
1715
|
+
process.env.LLM_PROVIDER ||
|
|
1716
|
+
(process.env.OPENAI_API_KEY ? "openai" : "ollama"),
|
|
1717
|
+
llmApiKey: process.env.LLM_API_KEY || process.env.OPENAI_API_KEY,
|
|
1718
|
+
llmModel: process.env.LLM_MODEL,
|
|
1719
|
+
});
|
|
1720
|
+
|
|
1721
|
+
try {
|
|
1722
|
+
if (action === "ingest" || action === "store") {
|
|
1723
|
+
const record = await mesh.add(input.content, input.metadata || {});
|
|
1724
|
+
process.stdout.write(
|
|
1725
|
+
`[MemoryMesh] Ingested record ${record.id}\n${JSON.stringify({ status: "ok", record })}\n`,
|
|
1726
|
+
);
|
|
1727
|
+
} else if (action === "search") {
|
|
1728
|
+
const results = await mesh.search(input.query, {
|
|
1729
|
+
limit: input.limit || 10,
|
|
1730
|
+
filter: input.filter || null,
|
|
1731
|
+
});
|
|
1732
|
+
process.stdout.write(
|
|
1733
|
+
`[MemoryMesh] Found ${results.length} matches.\n**Formatted Context**:\n\`\`\`yamo\n${mesh.formatResults(results)}\n\`\`\`\n**Output**: memory_results.json\n\`\`\`json\n${JSON.stringify(results, null, 2)}\n\`\`\`\n${JSON.stringify({ status: "ok", results })}\n`,
|
|
1734
|
+
);
|
|
1735
|
+
} else if (action === "synthesize") {
|
|
1736
|
+
const result = await mesh.synthesize({
|
|
1737
|
+
topic: input.topic,
|
|
1738
|
+
lookback: input.limit || 20,
|
|
1739
|
+
});
|
|
1740
|
+
process.stdout.write(
|
|
1741
|
+
`[MemoryMesh] Synthesis Outcome: ${result.status}\n${JSON.stringify(result, null, 2)}\n`,
|
|
1742
|
+
);
|
|
1743
|
+
} else if (action === "ingest-skill") {
|
|
1744
|
+
const record = await mesh.ingestSkill(
|
|
1745
|
+
input.yamo_text,
|
|
1746
|
+
input.metadata || {},
|
|
1747
|
+
);
|
|
1748
|
+
process.stdout.write(
|
|
1749
|
+
`[MemoryMesh] Ingested skill ${record.name} (${record.id})\n${JSON.stringify({ status: "ok", record })}\n`,
|
|
1750
|
+
);
|
|
1751
|
+
} else if (action === "search-skills") {
|
|
1752
|
+
await mesh.init();
|
|
1753
|
+
const vector = await mesh.embeddingFactory.embed(input.query);
|
|
1754
|
+
if (mesh.skillTable) {
|
|
1755
|
+
const results = await mesh.skillTable
|
|
1756
|
+
.search(vector)
|
|
1757
|
+
.limit(input.limit || 5)
|
|
1758
|
+
.toArray();
|
|
1759
|
+
process.stdout.write(
|
|
1760
|
+
`[MemoryMesh] Found ${results.length} synthesized skills.\n${JSON.stringify({ status: "ok", results }, null, 2)}\n`,
|
|
1761
|
+
);
|
|
1762
|
+
} else {
|
|
1763
|
+
process.stdout.write(`[MemoryMesh] Skill table not initialized.\n`);
|
|
1764
|
+
}
|
|
1765
|
+
} else if (action === "skill-feedback") {
|
|
1766
|
+
const result = await mesh.updateSkillReliability(
|
|
1767
|
+
input.id,
|
|
1768
|
+
input.success !== false,
|
|
1769
|
+
);
|
|
1770
|
+
process.stdout.write(
|
|
1771
|
+
`[MemoryMesh] Feedback recorded for ${input.id}: Reliability now ${result.reliability}\n${JSON.stringify({ status: "ok", ...result })}\n`,
|
|
1772
|
+
);
|
|
1773
|
+
} else if (action === "skill-prune") {
|
|
1774
|
+
const result = await mesh.pruneSkills(input.threshold || 0.3);
|
|
1775
|
+
process.stdout.write(
|
|
1776
|
+
`[MemoryMesh] Pruning complete. Removed ${result.pruned_count} unreliable skills.\n${JSON.stringify({ status: "ok", ...result })}\n`,
|
|
1777
|
+
);
|
|
1778
|
+
} else if (action === "stats") {
|
|
1779
|
+
process.stdout.write(
|
|
1780
|
+
`[MemoryMesh] Database Statistics:\n${JSON.stringify({ status: "ok", stats: await mesh.stats() }, null, 2)}\n`,
|
|
1781
|
+
);
|
|
1782
|
+
} else {
|
|
1783
|
+
logger.error({ action }, "Unknown action");
|
|
1784
|
+
process.exit(1);
|
|
1785
|
+
}
|
|
1786
|
+
} catch (error) {
|
|
1787
|
+
const errorResponse = handleError(error, {
|
|
1788
|
+
action,
|
|
1789
|
+
input: { ...input, content: input.content ? "[REDACTED]" : undefined },
|
|
1790
|
+
});
|
|
1791
|
+
logger.error({ err: error, errorResponse }, "Fatal Error");
|
|
1792
|
+
process.exit(1);
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
|
|
1796
|
+
export default MemoryMesh;
|
|
1797
|
+
|
|
1798
|
+
if (process.argv[1] === fileURLToPath(import.meta.url)) {
|
|
1799
|
+
run().catch((err) => {
|
|
1800
|
+
logger.error({ err }, "Fatal Error");
|
|
1801
|
+
process.exit(1);
|
|
1802
|
+
});
|
|
1803
|
+
}
|