lancedb-opencode-pro 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +103 -4
- package/dist/config.js +8 -0
- package/dist/extract.d.ts +2 -2
- package/dist/extract.js +9 -6
- package/dist/index.d.ts +1 -1
- package/dist/index.js +196 -12
- package/dist/store.d.ts +12 -1
- package/dist/store.js +298 -5
- package/dist/types.d.ts +68 -0
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -41,7 +41,11 @@ If you already use other plugins, keep them and append `"lancedb-opencode-pro"`.
|
|
|
41
41
|
"mode": "hybrid",
|
|
42
42
|
"vectorWeight": 0.7,
|
|
43
43
|
"bm25Weight": 0.3,
|
|
44
|
-
"minScore": 0.2
|
|
44
|
+
"minScore": 0.2,
|
|
45
|
+
"rrfK": 60,
|
|
46
|
+
"recencyBoost": true,
|
|
47
|
+
"recencyHalfLifeHours": 72,
|
|
48
|
+
"importanceWeight": 0.4
|
|
45
49
|
},
|
|
46
50
|
"includeGlobalScope": true,
|
|
47
51
|
"minCaptureChars": 80,
|
|
@@ -173,7 +177,11 @@ Create `~/.config/opencode/lancedb-opencode-pro.json`:
|
|
|
173
177
|
"mode": "hybrid",
|
|
174
178
|
"vectorWeight": 0.7,
|
|
175
179
|
"bm25Weight": 0.3,
|
|
176
|
-
"minScore": 0.2
|
|
180
|
+
"minScore": 0.2,
|
|
181
|
+
"rrfK": 60,
|
|
182
|
+
"recencyBoost": true,
|
|
183
|
+
"recencyHalfLifeHours": 72,
|
|
184
|
+
"importanceWeight": 0.4
|
|
177
185
|
},
|
|
178
186
|
"includeGlobalScope": true,
|
|
179
187
|
"minCaptureChars": 80,
|
|
@@ -216,6 +224,10 @@ Supported environment variables:
|
|
|
216
224
|
- `LANCEDB_OPENCODE_PRO_VECTOR_WEIGHT`
|
|
217
225
|
- `LANCEDB_OPENCODE_PRO_BM25_WEIGHT`
|
|
218
226
|
- `LANCEDB_OPENCODE_PRO_MIN_SCORE`
|
|
227
|
+
- `LANCEDB_OPENCODE_PRO_RRF_K`
|
|
228
|
+
- `LANCEDB_OPENCODE_PRO_RECENCY_BOOST`
|
|
229
|
+
- `LANCEDB_OPENCODE_PRO_RECENCY_HALF_LIFE_HOURS`
|
|
230
|
+
- `LANCEDB_OPENCODE_PRO_IMPORTANCE_WEIGHT`
|
|
219
231
|
- `LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE`
|
|
220
232
|
- `LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS`
|
|
221
233
|
- `LANCEDB_OPENCODE_PRO_MAX_ENTRIES_PER_SCOPE`
|
|
@@ -230,8 +242,90 @@ Supported environment variables:
|
|
|
230
242
|
- `memory_delete`
|
|
231
243
|
- `memory_clear`
|
|
232
244
|
- `memory_stats`
|
|
245
|
+
- `memory_feedback_missing`
|
|
246
|
+
- `memory_feedback_wrong`
|
|
247
|
+
- `memory_feedback_useful`
|
|
248
|
+
- `memory_effectiveness`
|
|
233
249
|
- `memory_port_plan`
|
|
234
250
|
|
|
251
|
+
## Memory Effectiveness Feedback
|
|
252
|
+
|
|
253
|
+
The provider can now record structured feedback about long-memory quality in addition to storing and recalling memories.
|
|
254
|
+
|
|
255
|
+
- `memory_feedback_missing`: report information that should have been stored but was missed
|
|
256
|
+
- `memory_feedback_wrong`: report a stored memory that should not have been kept
|
|
257
|
+
- `memory_feedback_useful`: report whether a recalled memory was helpful
|
|
258
|
+
- `memory_effectiveness`: return machine-readable capture, recall, and feedback metrics for the active scope
|
|
259
|
+
|
|
260
|
+
Use `memory_search` or recalled memory ids from injected context when you need to reference a specific memory entry in feedback.
|
|
261
|
+
|
|
262
|
+
### Viewing Metrics
|
|
263
|
+
|
|
264
|
+
Use `memory_effectiveness` to inspect machine-readable effectiveness data for the active scope.
|
|
265
|
+
|
|
266
|
+
```text
|
|
267
|
+
memory_effectiveness
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
Example output:
|
|
271
|
+
|
|
272
|
+
```json
|
|
273
|
+
{
|
|
274
|
+
"scope": "project:my-project",
|
|
275
|
+
"totalEvents": 12,
|
|
276
|
+
"capture": {
|
|
277
|
+
"considered": 4,
|
|
278
|
+
"stored": 3,
|
|
279
|
+
"skipped": 1,
|
|
280
|
+
"successRate": 0.75,
|
|
281
|
+
"skipReasons": {
|
|
282
|
+
"below-min-chars": 1
|
|
283
|
+
}
|
|
284
|
+
},
|
|
285
|
+
"recall": {
|
|
286
|
+
"requested": 3,
|
|
287
|
+
"injected": 2,
|
|
288
|
+
"returnedResults": 2,
|
|
289
|
+
"hitRate": 0.67,
|
|
290
|
+
"injectionRate": 0.67
|
|
291
|
+
},
|
|
292
|
+
"feedback": {
|
|
293
|
+
"missing": 1,
|
|
294
|
+
"wrong": 0,
|
|
295
|
+
"useful": {
|
|
296
|
+
"positive": 2,
|
|
297
|
+
"negative": 0,
|
|
298
|
+
"helpfulRate": 1
|
|
299
|
+
},
|
|
300
|
+
"falsePositiveRate": 0,
|
|
301
|
+
"falseNegativeRate": 0.25
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
```
|
|
305
|
+
|
|
306
|
+
Key fields:
|
|
307
|
+
|
|
308
|
+
- `capture.successRate`: how often a considered candidate was stored.
|
|
309
|
+
- `recall.hitRate`: how often a recall request returned at least one result.
|
|
310
|
+
- `feedback.falsePositiveRate`: wrong-memory reports divided by stored memories.
|
|
311
|
+
- `feedback.falseNegativeRate`: missing-memory reports relative to capture attempts.
|
|
312
|
+
|
|
313
|
+
### Interpreting Low-Feedback Results
|
|
314
|
+
|
|
315
|
+
In real OpenCode usage, auto-capture and recall happen in the background, so explicit `memory_feedback_*` events are often sparse.
|
|
316
|
+
|
|
317
|
+
- Treat `capture.*` and `recall.*` as system-health metrics: they show whether the memory pipeline is running.
|
|
318
|
+
- Treat repeated-context reduction, clarification burden, manual memory rescue, correction signals, and sampled audits as product-value signals: they show whether memory actually helped the user.
|
|
319
|
+
- Treat `feedback.* = 0` as insufficient evidence, not proof that memory quality is good.
|
|
320
|
+
- Treat a high `recall.hitRate` or `recall.injectionRate` as recall availability only; those values do not prove usefulness by themselves.
|
|
321
|
+
|
|
322
|
+
Recommended review order in low-feedback environments:
|
|
323
|
+
|
|
324
|
+
1. Check `capture.successRate`, `capture.skipReasons`, `recall.hitRate`, and `recall.injectionRate` for operational health.
|
|
325
|
+
2. Review whether users repeated background context less often or needed fewer clarification turns.
|
|
326
|
+
3. Check whether users still needed manual rescue through `memory_search` or issued correction-like responses.
|
|
327
|
+
4. Run a bounded audit of recalled memories or skipped captures before concluding the system is helping.
|
|
328
|
+
|
|
235
329
|
## OpenAI Embedding Configuration
|
|
236
330
|
|
|
237
331
|
Default behavior stays on Ollama. To use OpenAI embeddings, set `embedding.provider` to `openai` and provide API key + model.
|
|
@@ -252,7 +346,11 @@ Example sidecar:
|
|
|
252
346
|
"mode": "hybrid",
|
|
253
347
|
"vectorWeight": 0.7,
|
|
254
348
|
"bm25Weight": 0.3,
|
|
255
|
-
"minScore": 0.2
|
|
349
|
+
"minScore": 0.2,
|
|
350
|
+
"rrfK": 60,
|
|
351
|
+
"recencyBoost": true,
|
|
352
|
+
"recencyHalfLifeHours": 72,
|
|
353
|
+
"importanceWeight": 0.4
|
|
256
354
|
},
|
|
257
355
|
"includeGlobalScope": true,
|
|
258
356
|
"minCaptureChars": 80,
|
|
@@ -362,9 +460,10 @@ The project provides layered validation workflows that can run locally or inside
|
|
|
362
460
|
|---|---|
|
|
363
461
|
| `npm run test:foundation` | Write-read persistence, scope isolation, vector compatibility, timestamp ordering |
|
|
364
462
|
| `npm run test:regression` | Auto-capture extraction, search output shape, delete/clear safety, pruning |
|
|
463
|
+
| `npm run test:effectiveness` | Foundation + regression workflows covering effectiveness events, feedback commands, and summary output |
|
|
365
464
|
| `npm run test:retrieval` | Recall@K and Robustness-δ@K against synthetic fixtures |
|
|
366
465
|
| `npm run benchmark:latency` | Search p50/p99, insert avg, list avg with hard-gate enforcement |
|
|
367
|
-
| `npm run verify` | Typecheck + build +
|
|
466
|
+
| `npm run verify` | Typecheck + build + effectiveness workflow + retrieval (quick release check) |
|
|
368
467
|
| `npm run verify:full` | All of the above + benchmark + `npm pack` (full release gate) |
|
|
369
468
|
|
|
370
469
|
Threshold policy and benchmark profiles are documented in `docs/benchmark-thresholds.md`.
|
package/dist/config.js
CHANGED
|
@@ -20,6 +20,10 @@ export function resolveMemoryConfig(config, worktree) {
|
|
|
20
20
|
const weightSum = vectorWeight + bm25Weight;
|
|
21
21
|
const normalizedVectorWeight = weightSum > 0 ? vectorWeight / weightSum : 0.7;
|
|
22
22
|
const normalizedBm25Weight = weightSum > 0 ? bm25Weight / weightSum : 0.3;
|
|
23
|
+
const rrfK = Math.max(1, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_RRF_K ?? retrievalRaw.rrfK, 60)));
|
|
24
|
+
const recencyBoost = toBoolean(process.env.LANCEDB_OPENCODE_PRO_RECENCY_BOOST ?? retrievalRaw.recencyBoost, true);
|
|
25
|
+
const recencyHalfLifeHours = Math.max(1, toNumber(process.env.LANCEDB_OPENCODE_PRO_RECENCY_HALF_LIFE_HOURS ?? retrievalRaw.recencyHalfLifeHours, 72));
|
|
26
|
+
const importanceWeight = clamp(toNumber(process.env.LANCEDB_OPENCODE_PRO_IMPORTANCE_WEIGHT ?? retrievalRaw.importanceWeight, 0.4), 0, 2);
|
|
23
27
|
const embeddingProvider = resolveEmbeddingProvider(firstString(process.env.LANCEDB_OPENCODE_PRO_EMBEDDING_PROVIDER, embeddingRaw.provider));
|
|
24
28
|
const embeddingModel = embeddingProvider === "openai"
|
|
25
29
|
? firstString(process.env.LANCEDB_OPENCODE_PRO_OPENAI_MODEL, process.env.LANCEDB_OPENCODE_PRO_EMBEDDING_MODEL, embeddingRaw.model)
|
|
@@ -49,6 +53,10 @@ export function resolveMemoryConfig(config, worktree) {
|
|
|
49
53
|
vectorWeight: normalizedVectorWeight,
|
|
50
54
|
bm25Weight: normalizedBm25Weight,
|
|
51
55
|
minScore: clamp(toNumber(process.env.LANCEDB_OPENCODE_PRO_MIN_SCORE ?? retrievalRaw.minScore, 0.2), 0, 1),
|
|
56
|
+
rrfK,
|
|
57
|
+
recencyBoost,
|
|
58
|
+
recencyHalfLifeHours,
|
|
59
|
+
importanceWeight,
|
|
52
60
|
},
|
|
53
61
|
includeGlobalScope: toBoolean(process.env.LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE ?? raw.includeGlobalScope, true),
|
|
54
62
|
minCaptureChars: Math.max(30, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS ?? raw.minCaptureChars, 80))),
|
package/dist/extract.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
export declare function extractCaptureCandidate(text: string, minChars: number):
|
|
1
|
+
import type { CaptureCandidateResult } from "./types.js";
|
|
2
|
+
export declare function extractCaptureCandidate(text: string, minChars: number): CaptureCandidateResult;
|
package/dist/extract.js
CHANGED
|
@@ -14,18 +14,21 @@ const FACT_SIGNALS = ["because", "root cause", "原因", "由於"];
|
|
|
14
14
|
const PREF_SIGNALS = ["prefer", "preference", "偏好", "習慣"];
|
|
15
15
|
export function extractCaptureCandidate(text, minChars) {
|
|
16
16
|
const normalized = text.trim();
|
|
17
|
-
if (normalized.length < minChars)
|
|
18
|
-
return null;
|
|
17
|
+
if (normalized.length < minChars) {
|
|
18
|
+
return { candidate: null, skipReason: "below-min-chars" };
|
|
19
|
+
}
|
|
19
20
|
const lower = normalized.toLowerCase();
|
|
20
21
|
if (!POSITIVE_SIGNALS.some((signal) => lower.includes(signal.toLowerCase()))) {
|
|
21
|
-
return null;
|
|
22
|
+
return { candidate: null, skipReason: "no-positive-signal" };
|
|
22
23
|
}
|
|
23
24
|
const category = classifyCategory(lower);
|
|
24
25
|
const importance = category === "decision" ? 0.9 : category === "fact" ? 0.75 : 0.65;
|
|
25
26
|
return {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
27
|
+
candidate: {
|
|
28
|
+
text: clipText(normalized, 1200),
|
|
29
|
+
category,
|
|
30
|
+
importance,
|
|
31
|
+
},
|
|
29
32
|
};
|
|
30
33
|
}
|
|
31
34
|
function classifyCategory(text) {
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import type { Plugin } from "@opencode-ai/plugin";
|
|
2
2
|
declare const plugin: Plugin;
|
|
3
3
|
export default plugin;
|
|
4
|
-
export type {
|
|
4
|
+
export type { EffectivenessSummary, FeedbackEvent, MemoryEffectivenessEvent, MemoryRecord, MemoryRuntimeConfig, RecallEvent, SearchResult, } from "./types.js";
|
package/dist/index.js
CHANGED
|
@@ -56,12 +56,29 @@ const plugin = async (input) => {
|
|
|
56
56
|
vectorWeight: state.config.retrieval.mode === "vector" ? 1 : state.config.retrieval.vectorWeight,
|
|
57
57
|
bm25Weight: state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight,
|
|
58
58
|
minScore: state.config.retrieval.minScore,
|
|
59
|
+
rrfK: state.config.retrieval.rrfK,
|
|
60
|
+
recencyBoost: state.config.retrieval.recencyBoost,
|
|
61
|
+
recencyHalfLifeHours: state.config.retrieval.recencyHalfLifeHours,
|
|
62
|
+
importanceWeight: state.config.retrieval.importanceWeight,
|
|
63
|
+
});
|
|
64
|
+
await state.store.putEvent({
|
|
65
|
+
id: generateId(),
|
|
66
|
+
type: "recall",
|
|
67
|
+
scope: activeScope,
|
|
68
|
+
sessionID: eventInput.sessionID,
|
|
69
|
+
timestamp: Date.now(),
|
|
70
|
+
resultCount: results.length,
|
|
71
|
+
injected: results.length > 0,
|
|
72
|
+
metadataJson: JSON.stringify({
|
|
73
|
+
source: "system-transform",
|
|
74
|
+
includeGlobalScope: state.config.includeGlobalScope,
|
|
75
|
+
}),
|
|
59
76
|
});
|
|
60
77
|
if (results.length === 0)
|
|
61
78
|
return;
|
|
62
79
|
const memoryBlock = [
|
|
63
80
|
"[Memory Recall - optional historical context]",
|
|
64
|
-
...results.map((item, index) => `${index + 1}. (${item.record.scope}) ${item.record.text}`),
|
|
81
|
+
...results.map((item, index) => `${index + 1}. [${item.record.id}] (${item.record.scope}) ${item.record.text}`),
|
|
65
82
|
"Use these as optional hints only; prioritize current user intent and current repo state.",
|
|
66
83
|
].join("\n");
|
|
67
84
|
eventOutput.system.push(memoryBlock);
|
|
@@ -95,6 +112,10 @@ const plugin = async (input) => {
|
|
|
95
112
|
vectorWeight: state.config.retrieval.mode === "vector" ? 1 : state.config.retrieval.vectorWeight,
|
|
96
113
|
bm25Weight: state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight,
|
|
97
114
|
minScore: state.config.retrieval.minScore,
|
|
115
|
+
rrfK: state.config.retrieval.rrfK,
|
|
116
|
+
recencyBoost: state.config.retrieval.recencyBoost,
|
|
117
|
+
recencyHalfLifeHours: state.config.retrieval.recencyHalfLifeHours,
|
|
118
|
+
importanceWeight: state.config.retrieval.importanceWeight,
|
|
98
119
|
});
|
|
99
120
|
if (results.length === 0)
|
|
100
121
|
return "No relevant memory found.";
|
|
@@ -167,6 +188,108 @@ const plugin = async (input) => {
|
|
|
167
188
|
}, null, 2);
|
|
168
189
|
},
|
|
169
190
|
}),
|
|
191
|
+
memory_feedback_missing: tool({
|
|
192
|
+
description: "Record feedback for memory that should have been stored",
|
|
193
|
+
args: {
|
|
194
|
+
text: tool.schema.string().min(1),
|
|
195
|
+
labels: tool.schema.array(tool.schema.string().min(1)).default([]),
|
|
196
|
+
scope: tool.schema.string().optional(),
|
|
197
|
+
},
|
|
198
|
+
execute: async (args, context) => {
|
|
199
|
+
await state.ensureInitialized();
|
|
200
|
+
if (!state.initialized)
|
|
201
|
+
return unavailableMessage(state.config.embedding.provider);
|
|
202
|
+
const scope = args.scope ?? deriveProjectScope(context.worktree);
|
|
203
|
+
await state.store.putEvent({
|
|
204
|
+
id: generateId(),
|
|
205
|
+
type: "feedback",
|
|
206
|
+
feedbackType: "missing",
|
|
207
|
+
scope,
|
|
208
|
+
sessionID: context.sessionID,
|
|
209
|
+
timestamp: Date.now(),
|
|
210
|
+
text: args.text,
|
|
211
|
+
labels: args.labels,
|
|
212
|
+
metadataJson: JSON.stringify({ source: "memory_feedback_missing" }),
|
|
213
|
+
});
|
|
214
|
+
return "Recorded missing-memory feedback.";
|
|
215
|
+
},
|
|
216
|
+
}),
|
|
217
|
+
memory_feedback_wrong: tool({
|
|
218
|
+
description: "Record feedback for memory that should not be stored",
|
|
219
|
+
args: {
|
|
220
|
+
id: tool.schema.string().min(6),
|
|
221
|
+
reason: tool.schema.string().optional(),
|
|
222
|
+
scope: tool.schema.string().optional(),
|
|
223
|
+
},
|
|
224
|
+
execute: async (args, context) => {
|
|
225
|
+
await state.ensureInitialized();
|
|
226
|
+
if (!state.initialized)
|
|
227
|
+
return unavailableMessage(state.config.embedding.provider);
|
|
228
|
+
const scope = args.scope ?? deriveProjectScope(context.worktree);
|
|
229
|
+
const scopes = buildScopeFilter(scope, state.config.includeGlobalScope);
|
|
230
|
+
const exists = await state.store.hasMemory(args.id, scopes);
|
|
231
|
+
if (!exists) {
|
|
232
|
+
return `Memory ${args.id} not found in current scope.`;
|
|
233
|
+
}
|
|
234
|
+
await state.store.putEvent({
|
|
235
|
+
id: generateId(),
|
|
236
|
+
type: "feedback",
|
|
237
|
+
feedbackType: "wrong",
|
|
238
|
+
scope,
|
|
239
|
+
sessionID: context.sessionID,
|
|
240
|
+
timestamp: Date.now(),
|
|
241
|
+
memoryId: args.id,
|
|
242
|
+
reason: args.reason,
|
|
243
|
+
metadataJson: JSON.stringify({ source: "memory_feedback_wrong" }),
|
|
244
|
+
});
|
|
245
|
+
return `Recorded wrong-memory feedback for ${args.id}.`;
|
|
246
|
+
},
|
|
247
|
+
}),
|
|
248
|
+
memory_feedback_useful: tool({
|
|
249
|
+
description: "Record whether a recalled memory was helpful",
|
|
250
|
+
args: {
|
|
251
|
+
id: tool.schema.string().min(6),
|
|
252
|
+
helpful: tool.schema.boolean(),
|
|
253
|
+
scope: tool.schema.string().optional(),
|
|
254
|
+
},
|
|
255
|
+
execute: async (args, context) => {
|
|
256
|
+
await state.ensureInitialized();
|
|
257
|
+
if (!state.initialized)
|
|
258
|
+
return unavailableMessage(state.config.embedding.provider);
|
|
259
|
+
const scope = args.scope ?? deriveProjectScope(context.worktree);
|
|
260
|
+
const scopes = buildScopeFilter(scope, state.config.includeGlobalScope);
|
|
261
|
+
const exists = await state.store.hasMemory(args.id, scopes);
|
|
262
|
+
if (!exists) {
|
|
263
|
+
return `Memory ${args.id} not found in current scope.`;
|
|
264
|
+
}
|
|
265
|
+
await state.store.putEvent({
|
|
266
|
+
id: generateId(),
|
|
267
|
+
type: "feedback",
|
|
268
|
+
feedbackType: "useful",
|
|
269
|
+
scope,
|
|
270
|
+
sessionID: context.sessionID,
|
|
271
|
+
timestamp: Date.now(),
|
|
272
|
+
memoryId: args.id,
|
|
273
|
+
helpful: args.helpful,
|
|
274
|
+
metadataJson: JSON.stringify({ source: "memory_feedback_useful" }),
|
|
275
|
+
});
|
|
276
|
+
return `Recorded recall usefulness feedback for ${args.id}.`;
|
|
277
|
+
},
|
|
278
|
+
}),
|
|
279
|
+
memory_effectiveness: tool({
|
|
280
|
+
description: "Show effectiveness metrics for capture recall and feedback",
|
|
281
|
+
args: {
|
|
282
|
+
scope: tool.schema.string().optional(),
|
|
283
|
+
},
|
|
284
|
+
execute: async (args, context) => {
|
|
285
|
+
await state.ensureInitialized();
|
|
286
|
+
if (!state.initialized)
|
|
287
|
+
return unavailableMessage(state.config.embedding.provider);
|
|
288
|
+
const scope = args.scope ?? deriveProjectScope(context.worktree);
|
|
289
|
+
const summary = await state.store.summarizeEvents(scope, state.config.includeGlobalScope);
|
|
290
|
+
return JSON.stringify(summary, null, 2);
|
|
291
|
+
},
|
|
292
|
+
}),
|
|
170
293
|
memory_port_plan: tool({
|
|
171
294
|
description: "Plan non-conflicting host ports for compose services and optionally persist reservations",
|
|
172
295
|
args: {
|
|
@@ -313,36 +436,74 @@ async function getLastUserText(sessionID, client) {
|
|
|
313
436
|
}
|
|
314
437
|
async function flushAutoCapture(sessionID, state, client) {
|
|
315
438
|
const fragments = state.captureBuffer.get(sessionID) ?? [];
|
|
316
|
-
if (fragments.length === 0)
|
|
439
|
+
if (fragments.length === 0) {
|
|
440
|
+
await recordCaptureEvent(state, {
|
|
441
|
+
sessionID,
|
|
442
|
+
scope: state.defaultScope,
|
|
443
|
+
outcome: "skipped",
|
|
444
|
+
skipReason: "empty-buffer",
|
|
445
|
+
text: "",
|
|
446
|
+
});
|
|
317
447
|
return;
|
|
448
|
+
}
|
|
318
449
|
state.captureBuffer.delete(sessionID);
|
|
319
450
|
const combined = fragments.join("\n").trim();
|
|
320
|
-
const
|
|
321
|
-
if (!candidate)
|
|
322
|
-
return;
|
|
451
|
+
const activeScope = await resolveSessionScope(sessionID, client, state.defaultScope);
|
|
323
452
|
await state.ensureInitialized();
|
|
324
|
-
if (!state.initialized)
|
|
453
|
+
if (!state.initialized) {
|
|
325
454
|
return;
|
|
455
|
+
}
|
|
456
|
+
await recordCaptureEvent(state, {
|
|
457
|
+
sessionID,
|
|
458
|
+
scope: activeScope,
|
|
459
|
+
outcome: "considered",
|
|
460
|
+
text: combined,
|
|
461
|
+
});
|
|
462
|
+
const result = extractCaptureCandidate(combined, state.config.minCaptureChars);
|
|
463
|
+
if (!result.candidate) {
|
|
464
|
+
await recordCaptureEvent(state, {
|
|
465
|
+
sessionID,
|
|
466
|
+
scope: activeScope,
|
|
467
|
+
outcome: "skipped",
|
|
468
|
+
skipReason: result.skipReason,
|
|
469
|
+
text: combined,
|
|
470
|
+
});
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
326
473
|
let vector = [];
|
|
327
474
|
try {
|
|
328
|
-
vector = await state.embedder.embed(candidate.text);
|
|
475
|
+
vector = await state.embedder.embed(result.candidate.text);
|
|
329
476
|
}
|
|
330
477
|
catch (error) {
|
|
331
478
|
console.warn(`[lancedb-opencode-pro] embedding unavailable during auto-capture: ${toErrorMessage(error)}`);
|
|
479
|
+
await recordCaptureEvent(state, {
|
|
480
|
+
sessionID,
|
|
481
|
+
scope: activeScope,
|
|
482
|
+
outcome: "skipped",
|
|
483
|
+
skipReason: "embedding-unavailable",
|
|
484
|
+
text: combined,
|
|
485
|
+
});
|
|
332
486
|
vector = [];
|
|
333
487
|
}
|
|
334
488
|
if (vector.length === 0) {
|
|
335
489
|
console.warn("[lancedb-opencode-pro] auto-capture skipped because embedding vector is empty");
|
|
490
|
+
await recordCaptureEvent(state, {
|
|
491
|
+
sessionID,
|
|
492
|
+
scope: activeScope,
|
|
493
|
+
outcome: "skipped",
|
|
494
|
+
skipReason: "empty-embedding",
|
|
495
|
+
text: combined,
|
|
496
|
+
});
|
|
336
497
|
return;
|
|
337
498
|
}
|
|
338
|
-
const
|
|
499
|
+
const memoryId = generateId();
|
|
339
500
|
await state.store.put({
|
|
340
|
-
id:
|
|
341
|
-
text: candidate.text,
|
|
501
|
+
id: memoryId,
|
|
502
|
+
text: result.candidate.text,
|
|
342
503
|
vector,
|
|
343
|
-
category: candidate.category,
|
|
504
|
+
category: result.candidate.category,
|
|
344
505
|
scope: activeScope,
|
|
345
|
-
importance: candidate.importance,
|
|
506
|
+
importance: result.candidate.importance,
|
|
346
507
|
timestamp: Date.now(),
|
|
347
508
|
schemaVersion: SCHEMA_VERSION,
|
|
348
509
|
embeddingModel: state.config.embedding.model,
|
|
@@ -352,8 +513,31 @@ async function flushAutoCapture(sessionID, state, client) {
|
|
|
352
513
|
sessionID,
|
|
353
514
|
}),
|
|
354
515
|
});
|
|
516
|
+
await recordCaptureEvent(state, {
|
|
517
|
+
sessionID,
|
|
518
|
+
scope: activeScope,
|
|
519
|
+
outcome: "stored",
|
|
520
|
+
memoryId,
|
|
521
|
+
text: result.candidate.text,
|
|
522
|
+
});
|
|
355
523
|
await state.store.pruneScope(activeScope, state.config.maxEntriesPerScope);
|
|
356
524
|
}
|
|
525
|
+
async function recordCaptureEvent(state, input) {
|
|
526
|
+
if (!state.initialized)
|
|
527
|
+
return;
|
|
528
|
+
await state.store.putEvent({
|
|
529
|
+
id: generateId(),
|
|
530
|
+
type: "capture",
|
|
531
|
+
scope: input.scope,
|
|
532
|
+
sessionID: input.sessionID,
|
|
533
|
+
timestamp: Date.now(),
|
|
534
|
+
outcome: input.outcome,
|
|
535
|
+
skipReason: input.skipReason,
|
|
536
|
+
memoryId: input.memoryId,
|
|
537
|
+
text: input.text,
|
|
538
|
+
metadataJson: JSON.stringify({ source: "auto-capture" }),
|
|
539
|
+
});
|
|
540
|
+
}
|
|
357
541
|
async function resolveSessionScope(sessionID, client, fallback) {
|
|
358
542
|
try {
|
|
359
543
|
const response = await client.session.get({ path: { id: sessionID } });
|
package/dist/store.d.ts
CHANGED
|
@@ -1,14 +1,16 @@
|
|
|
1
|
-
import type { MemoryRecord, SearchResult } from "./types.js";
|
|
1
|
+
import type { EffectivenessSummary, MemoryEffectivenessEvent, MemoryRecord, SearchResult } from "./types.js";
|
|
2
2
|
export declare class MemoryStore {
|
|
3
3
|
private readonly dbPath;
|
|
4
4
|
private lancedb;
|
|
5
5
|
private connection;
|
|
6
6
|
private table;
|
|
7
|
+
private eventTable;
|
|
7
8
|
private indexState;
|
|
8
9
|
private scopeCache;
|
|
9
10
|
constructor(dbPath: string);
|
|
10
11
|
init(vectorDim: number): Promise<void>;
|
|
11
12
|
put(record: MemoryRecord): Promise<void>;
|
|
13
|
+
putEvent(event: MemoryEffectivenessEvent): Promise<void>;
|
|
12
14
|
search(params: {
|
|
13
15
|
query: string;
|
|
14
16
|
queryVector: number[];
|
|
@@ -17,12 +19,19 @@ export declare class MemoryStore {
|
|
|
17
19
|
vectorWeight: number;
|
|
18
20
|
bm25Weight: number;
|
|
19
21
|
minScore: number;
|
|
22
|
+
rrfK?: number;
|
|
23
|
+
recencyBoost?: boolean;
|
|
24
|
+
recencyHalfLifeHours?: number;
|
|
25
|
+
importanceWeight?: number;
|
|
20
26
|
}): Promise<SearchResult[]>;
|
|
21
27
|
deleteById(id: string, scopes: string[]): Promise<boolean>;
|
|
22
28
|
clearScope(scope: string): Promise<number>;
|
|
23
29
|
list(scope: string, limit: number): Promise<MemoryRecord[]>;
|
|
24
30
|
pruneScope(scope: string, maxEntries: number): Promise<number>;
|
|
25
31
|
countIncompatibleVectors(scopes: string[], expectedDim: number): Promise<number>;
|
|
32
|
+
hasMemory(id: string, scopes: string[]): Promise<boolean>;
|
|
33
|
+
listEvents(scopes: string[], limit: number): Promise<MemoryEffectivenessEvent[]>;
|
|
34
|
+
summarizeEvents(scope: string, includeGlobalScope: boolean): Promise<EffectivenessSummary>;
|
|
26
35
|
getIndexHealth(): {
|
|
27
36
|
vector: boolean;
|
|
28
37
|
fts: boolean;
|
|
@@ -31,6 +40,8 @@ export declare class MemoryStore {
|
|
|
31
40
|
private invalidateScope;
|
|
32
41
|
private getCachedScopes;
|
|
33
42
|
private requireTable;
|
|
43
|
+
private requireEventTable;
|
|
44
|
+
private readEventsByScopes;
|
|
34
45
|
private readByScopes;
|
|
35
46
|
private ensureIndexes;
|
|
36
47
|
}
|
package/dist/store.js
CHANGED
|
@@ -2,11 +2,13 @@ import { mkdir } from "node:fs/promises";
|
|
|
2
2
|
import { dirname } from "node:path";
|
|
3
3
|
import { tokenize } from "./utils.js";
|
|
4
4
|
const TABLE_NAME = "memories";
|
|
5
|
+
const EVENTS_TABLE_NAME = "effectiveness_events";
|
|
5
6
|
export class MemoryStore {
|
|
6
7
|
dbPath;
|
|
7
8
|
lancedb = null;
|
|
8
9
|
connection = null;
|
|
9
10
|
table = null;
|
|
11
|
+
eventTable = null;
|
|
10
12
|
indexState = {
|
|
11
13
|
vector: false,
|
|
12
14
|
fts: false,
|
|
@@ -41,6 +43,31 @@ export class MemoryStore {
|
|
|
41
43
|
this.table = await this.connection.createTable(TABLE_NAME, [bootstrap]);
|
|
42
44
|
await this.table.delete("id = '__bootstrap__'");
|
|
43
45
|
}
|
|
46
|
+
try {
|
|
47
|
+
this.eventTable = await this.connection.openTable(EVENTS_TABLE_NAME);
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
const bootstrapEvent = {
|
|
51
|
+
id: "__bootstrap__",
|
|
52
|
+
type: "capture",
|
|
53
|
+
scope: "global",
|
|
54
|
+
sessionID: "",
|
|
55
|
+
timestamp: 0,
|
|
56
|
+
memoryId: "",
|
|
57
|
+
text: "",
|
|
58
|
+
outcome: "considered",
|
|
59
|
+
skipReason: "",
|
|
60
|
+
resultCount: 0,
|
|
61
|
+
injected: false,
|
|
62
|
+
feedbackType: "",
|
|
63
|
+
helpful: -1,
|
|
64
|
+
reason: "",
|
|
65
|
+
labelsJson: "[]",
|
|
66
|
+
metadataJson: "{}",
|
|
67
|
+
};
|
|
68
|
+
this.eventTable = await this.connection.createTable(EVENTS_TABLE_NAME, [bootstrapEvent]);
|
|
69
|
+
await this.eventTable.delete("id = '__bootstrap__'");
|
|
70
|
+
}
|
|
44
71
|
await this.ensureIndexes();
|
|
45
72
|
}
|
|
46
73
|
async put(record) {
|
|
@@ -48,20 +75,78 @@ export class MemoryStore {
|
|
|
48
75
|
await table.add([record]);
|
|
49
76
|
this.invalidateScope(record.scope);
|
|
50
77
|
}
|
|
78
|
+
async putEvent(event) {
|
|
79
|
+
await this.requireEventTable().add([
|
|
80
|
+
{
|
|
81
|
+
id: event.id,
|
|
82
|
+
type: event.type,
|
|
83
|
+
scope: event.scope,
|
|
84
|
+
sessionID: event.sessionID ?? "",
|
|
85
|
+
timestamp: event.timestamp,
|
|
86
|
+
memoryId: event.memoryId ?? "",
|
|
87
|
+
text: event.text ?? "",
|
|
88
|
+
outcome: event.type === "capture" ? event.outcome : "",
|
|
89
|
+
skipReason: event.type === "capture" ? event.skipReason ?? "" : "",
|
|
90
|
+
resultCount: event.type === "recall" ? event.resultCount : 0,
|
|
91
|
+
injected: event.type === "recall" ? event.injected : false,
|
|
92
|
+
feedbackType: event.type === "feedback" ? event.feedbackType : "",
|
|
93
|
+
helpful: event.type === "feedback" ? (event.helpful === undefined ? -1 : event.helpful ? 1 : 0) : -1,
|
|
94
|
+
reason: event.type === "feedback" ? event.reason ?? "" : "",
|
|
95
|
+
labelsJson: event.type === "feedback" ? JSON.stringify(event.labels ?? []) : "[]",
|
|
96
|
+
metadataJson: event.metadataJson,
|
|
97
|
+
},
|
|
98
|
+
]);
|
|
99
|
+
}
|
|
51
100
|
async search(params) {
|
|
52
101
|
const cached = await this.getCachedScopes(params.scopes);
|
|
53
102
|
if (cached.records.length === 0)
|
|
54
103
|
return [];
|
|
55
104
|
const queryTokens = tokenize(params.query);
|
|
56
105
|
const queryNorm = vecNorm(params.queryVector);
|
|
57
|
-
const
|
|
106
|
+
const useVectorChannel = params.queryVector.length > 0 && params.vectorWeight > 0;
|
|
107
|
+
const useBm25Channel = queryTokens.length > 0 && params.bm25Weight > 0;
|
|
108
|
+
const { vectorWeight, bm25Weight } = normalizeChannelWeights(useVectorChannel ? params.vectorWeight : 0, useBm25Channel ? params.bm25Weight : 0);
|
|
109
|
+
const rrfK = Math.max(1, Math.floor(params.rrfK ?? 60));
|
|
110
|
+
const recencyBoostEnabled = params.recencyBoost ?? true;
|
|
111
|
+
const recencyHalfLifeHours = Math.max(1, params.recencyHalfLifeHours ?? 72);
|
|
112
|
+
const importanceWeight = clampImportanceWeight(params.importanceWeight ?? 0.4);
|
|
113
|
+
const candidates = cached.records
|
|
58
114
|
.filter((record) => params.queryVector.length === 0 || record.vector.length === params.queryVector.length)
|
|
59
115
|
.map((record, index) => {
|
|
60
116
|
const recordNorm = cached.norms.get(record.id) ?? vecNorm(record.vector);
|
|
61
|
-
const vectorScore = fastCosine(params.queryVector, record.vector, queryNorm, recordNorm);
|
|
62
|
-
const bm25Score = bm25LikeScore(queryTokens, cached.tokenized[index], cached.idf);
|
|
63
|
-
|
|
64
|
-
|
|
117
|
+
const vectorScore = useVectorChannel ? fastCosine(params.queryVector, record.vector, queryNorm, recordNorm) : 0;
|
|
118
|
+
const bm25Score = useBm25Channel ? bm25LikeScore(queryTokens, cached.tokenized[index], cached.idf) : 0;
|
|
119
|
+
return { record, vectorScore, bm25Score };
|
|
120
|
+
});
|
|
121
|
+
if (candidates.length === 0)
|
|
122
|
+
return [];
|
|
123
|
+
const vectorRanks = useVectorChannel ? buildRankMap(candidates, (item) => item.vectorScore) : null;
|
|
124
|
+
const bm25Ranks = useBm25Channel ? buildRankMap(candidates, (item) => item.bm25Score) : null;
|
|
125
|
+
const scored = candidates
|
|
126
|
+
.map((item) => {
|
|
127
|
+
let rrfScore = 0;
|
|
128
|
+
if (vectorRanks) {
|
|
129
|
+
const rank = vectorRanks.get(item.record.id);
|
|
130
|
+
if (rank !== undefined)
|
|
131
|
+
rrfScore += vectorWeight / (rrfK + rank);
|
|
132
|
+
}
|
|
133
|
+
if (bm25Ranks) {
|
|
134
|
+
const rank = bm25Ranks.get(item.record.id);
|
|
135
|
+
if (rank !== undefined)
|
|
136
|
+
rrfScore += bm25Weight / (rrfK + rank);
|
|
137
|
+
}
|
|
138
|
+
rrfScore *= rrfK + 1;
|
|
139
|
+
const recencyFactor = recencyBoostEnabled
|
|
140
|
+
? computeRecencyMultiplier(item.record.timestamp, recencyHalfLifeHours)
|
|
141
|
+
: 1;
|
|
142
|
+
const importanceFactor = 1 + importanceWeight * clampImportance(item.record.importance);
|
|
143
|
+
const score = rrfScore * recencyFactor * importanceFactor;
|
|
144
|
+
return {
|
|
145
|
+
record: item.record,
|
|
146
|
+
score,
|
|
147
|
+
vectorScore: item.vectorScore,
|
|
148
|
+
bm25Score: item.bm25Score,
|
|
149
|
+
};
|
|
65
150
|
})
|
|
66
151
|
.filter((item) => item.score >= params.minScore)
|
|
67
152
|
.sort((a, b) => b.score - a.score)
|
|
@@ -104,6 +189,93 @@ export class MemoryStore {
|
|
|
104
189
|
const rows = await this.readByScopes(scopes);
|
|
105
190
|
return rows.filter((row) => row.vectorDim !== expectedDim).length;
|
|
106
191
|
}
|
|
192
|
+
async hasMemory(id, scopes) {
|
|
193
|
+
const rows = await this.readByScopes(scopes);
|
|
194
|
+
return rows.some((row) => row.id === id);
|
|
195
|
+
}
|
|
196
|
+
async listEvents(scopes, limit) {
|
|
197
|
+
const rows = await this.readEventsByScopes(scopes);
|
|
198
|
+
return rows.sort((a, b) => b.timestamp - a.timestamp).slice(0, limit);
|
|
199
|
+
}
|
|
200
|
+
async summarizeEvents(scope, includeGlobalScope) {
|
|
201
|
+
const scopes = includeGlobalScope && scope !== "global" ? [scope, "global"] : [scope];
|
|
202
|
+
const events = await this.readEventsByScopes(scopes);
|
|
203
|
+
const captureSkipReasons = {};
|
|
204
|
+
let captureConsidered = 0;
|
|
205
|
+
let captureStored = 0;
|
|
206
|
+
let captureSkipped = 0;
|
|
207
|
+
let recallRequested = 0;
|
|
208
|
+
let recallInjected = 0;
|
|
209
|
+
let recallReturnedResults = 0;
|
|
210
|
+
let feedbackMissing = 0;
|
|
211
|
+
let feedbackWrong = 0;
|
|
212
|
+
let feedbackUsefulPositive = 0;
|
|
213
|
+
let feedbackUsefulNegative = 0;
|
|
214
|
+
for (const event of events) {
|
|
215
|
+
if (event.type === "capture") {
|
|
216
|
+
if (event.outcome === "considered")
|
|
217
|
+
captureConsidered += 1;
|
|
218
|
+
if (event.outcome === "stored")
|
|
219
|
+
captureStored += 1;
|
|
220
|
+
if (event.outcome === "skipped") {
|
|
221
|
+
captureSkipped += 1;
|
|
222
|
+
if (event.skipReason) {
|
|
223
|
+
captureSkipReasons[event.skipReason] = (captureSkipReasons[event.skipReason] ?? 0) + 1;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
if (event.type === "recall") {
|
|
228
|
+
recallRequested += 1;
|
|
229
|
+
if (event.resultCount > 0)
|
|
230
|
+
recallReturnedResults += 1;
|
|
231
|
+
if (event.injected)
|
|
232
|
+
recallInjected += 1;
|
|
233
|
+
}
|
|
234
|
+
if (event.type === "feedback") {
|
|
235
|
+
if (event.feedbackType === "missing")
|
|
236
|
+
feedbackMissing += 1;
|
|
237
|
+
if (event.feedbackType === "wrong")
|
|
238
|
+
feedbackWrong += 1;
|
|
239
|
+
if (event.feedbackType === "useful") {
|
|
240
|
+
if (event.helpful)
|
|
241
|
+
feedbackUsefulPositive += 1;
|
|
242
|
+
else
|
|
243
|
+
feedbackUsefulNegative += 1;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
const totalCaptureAttempts = captureStored + captureSkipped;
|
|
248
|
+
const totalUsefulFeedback = feedbackUsefulPositive + feedbackUsefulNegative;
|
|
249
|
+
return {
|
|
250
|
+
scope,
|
|
251
|
+
totalEvents: events.length,
|
|
252
|
+
capture: {
|
|
253
|
+
considered: captureConsidered,
|
|
254
|
+
stored: captureStored,
|
|
255
|
+
skipped: captureSkipped,
|
|
256
|
+
successRate: totalCaptureAttempts === 0 ? 0 : captureStored / totalCaptureAttempts,
|
|
257
|
+
skipReasons: captureSkipReasons,
|
|
258
|
+
},
|
|
259
|
+
recall: {
|
|
260
|
+
requested: recallRequested,
|
|
261
|
+
injected: recallInjected,
|
|
262
|
+
returnedResults: recallReturnedResults,
|
|
263
|
+
hitRate: recallRequested === 0 ? 0 : recallReturnedResults / recallRequested,
|
|
264
|
+
injectionRate: recallRequested === 0 ? 0 : recallInjected / recallRequested,
|
|
265
|
+
},
|
|
266
|
+
feedback: {
|
|
267
|
+
missing: feedbackMissing,
|
|
268
|
+
wrong: feedbackWrong,
|
|
269
|
+
useful: {
|
|
270
|
+
positive: feedbackUsefulPositive,
|
|
271
|
+
negative: feedbackUsefulNegative,
|
|
272
|
+
helpfulRate: totalUsefulFeedback === 0 ? 0 : feedbackUsefulPositive / totalUsefulFeedback,
|
|
273
|
+
},
|
|
274
|
+
falsePositiveRate: captureStored === 0 ? 0 : feedbackWrong / captureStored,
|
|
275
|
+
falseNegativeRate: totalCaptureAttempts === 0 ? 0 : feedbackMissing / totalCaptureAttempts,
|
|
276
|
+
},
|
|
277
|
+
};
|
|
278
|
+
}
|
|
107
279
|
getIndexHealth() {
|
|
108
280
|
return {
|
|
109
281
|
vector: this.indexState.vector,
|
|
@@ -148,6 +320,44 @@ export class MemoryStore {
|
|
|
148
320
|
}
|
|
149
321
|
return this.table;
|
|
150
322
|
}
|
|
323
|
+
requireEventTable() {
|
|
324
|
+
if (!this.eventTable) {
|
|
325
|
+
throw new Error("MemoryStore event table is not initialized");
|
|
326
|
+
}
|
|
327
|
+
return this.eventTable;
|
|
328
|
+
}
|
|
329
|
+
async readEventsByScopes(scopes) {
|
|
330
|
+
const table = this.requireEventTable();
|
|
331
|
+
if (scopes.length === 0)
|
|
332
|
+
return [];
|
|
333
|
+
const whereExpr = scopes.map((scope) => `scope = '${escapeSql(scope)}'`).join(" OR ");
|
|
334
|
+
const rows = await table
|
|
335
|
+
.query()
|
|
336
|
+
.where(`(${whereExpr})`)
|
|
337
|
+
.select([
|
|
338
|
+
"id",
|
|
339
|
+
"type",
|
|
340
|
+
"scope",
|
|
341
|
+
"sessionID",
|
|
342
|
+
"timestamp",
|
|
343
|
+
"memoryId",
|
|
344
|
+
"text",
|
|
345
|
+
"outcome",
|
|
346
|
+
"skipReason",
|
|
347
|
+
"resultCount",
|
|
348
|
+
"injected",
|
|
349
|
+
"feedbackType",
|
|
350
|
+
"helpful",
|
|
351
|
+
"reason",
|
|
352
|
+
"labelsJson",
|
|
353
|
+
"metadataJson",
|
|
354
|
+
])
|
|
355
|
+
.limit(100000)
|
|
356
|
+
.toArray();
|
|
357
|
+
return rows
|
|
358
|
+
.map((row) => normalizeEventRow(row))
|
|
359
|
+
.filter((row) => row !== null);
|
|
360
|
+
}
|
|
151
361
|
async readByScopes(scopes) {
|
|
152
362
|
const table = this.requireTable();
|
|
153
363
|
if (scopes.length === 0)
|
|
@@ -222,9 +432,92 @@ function normalizeRow(row) {
|
|
|
222
432
|
metadataJson: String(row.metadataJson ?? "{}"),
|
|
223
433
|
};
|
|
224
434
|
}
|
|
435
|
+
function normalizeEventRow(row) {
|
|
436
|
+
if (typeof row.id !== "string" || typeof row.type !== "string" || typeof row.scope !== "string") {
|
|
437
|
+
return null;
|
|
438
|
+
}
|
|
439
|
+
const base = {
|
|
440
|
+
id: row.id,
|
|
441
|
+
scope: row.scope,
|
|
442
|
+
sessionID: typeof row.sessionID === "string" && row.sessionID.length > 0 ? row.sessionID : undefined,
|
|
443
|
+
timestamp: Number(row.timestamp ?? Date.now()),
|
|
444
|
+
memoryId: typeof row.memoryId === "string" && row.memoryId.length > 0 ? row.memoryId : undefined,
|
|
445
|
+
text: typeof row.text === "string" && row.text.length > 0 ? row.text : undefined,
|
|
446
|
+
metadataJson: String(row.metadataJson ?? "{}"),
|
|
447
|
+
};
|
|
448
|
+
if (row.type === "capture") {
|
|
449
|
+
return {
|
|
450
|
+
...base,
|
|
451
|
+
type: "capture",
|
|
452
|
+
outcome: row.outcome === "stored" || row.outcome === "skipped" ? row.outcome : "considered",
|
|
453
|
+
skipReason: typeof row.skipReason === "string" && row.skipReason.length > 0
|
|
454
|
+
? row.skipReason
|
|
455
|
+
: undefined,
|
|
456
|
+
};
|
|
457
|
+
}
|
|
458
|
+
if (row.type === "recall") {
|
|
459
|
+
return {
|
|
460
|
+
...base,
|
|
461
|
+
type: "recall",
|
|
462
|
+
resultCount: Number(row.resultCount ?? 0),
|
|
463
|
+
injected: Boolean(row.injected),
|
|
464
|
+
};
|
|
465
|
+
}
|
|
466
|
+
if (row.type === "feedback") {
|
|
467
|
+
const labelsJson = typeof row.labelsJson === "string" ? row.labelsJson : "[]";
|
|
468
|
+
const labels = JSON.parse(labelsJson);
|
|
469
|
+
const helpfulValue = Number(row.helpful ?? -1);
|
|
470
|
+
return {
|
|
471
|
+
...base,
|
|
472
|
+
type: "feedback",
|
|
473
|
+
feedbackType: row.feedbackType === "missing" || row.feedbackType === "wrong" ? row.feedbackType : "useful",
|
|
474
|
+
helpful: helpfulValue < 0 ? undefined : helpfulValue === 1,
|
|
475
|
+
labels: Array.isArray(labels) ? labels.filter((item) => typeof item === "string") : [],
|
|
476
|
+
reason: typeof row.reason === "string" && row.reason.length > 0 ? row.reason : undefined,
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
return null;
|
|
480
|
+
}
|
|
225
481
|
function escapeSql(value) {
|
|
226
482
|
return value.replace(/'/g, "''");
|
|
227
483
|
}
|
|
484
|
+
function buildRankMap(items, scoreOf) {
|
|
485
|
+
const ranked = [...items].sort((a, b) => scoreOf(b) - scoreOf(a));
|
|
486
|
+
const ranks = new Map();
|
|
487
|
+
for (let i = 0; i < ranked.length; i += 1) {
|
|
488
|
+
ranks.set(ranked[i].record.id, i + 1);
|
|
489
|
+
}
|
|
490
|
+
return ranks;
|
|
491
|
+
}
|
|
492
|
+
function normalizeChannelWeights(vectorWeight, bm25Weight) {
|
|
493
|
+
const sum = vectorWeight + bm25Weight;
|
|
494
|
+
if (sum <= 0) {
|
|
495
|
+
return { vectorWeight: 0.5, bm25Weight: 0.5 };
|
|
496
|
+
}
|
|
497
|
+
return {
|
|
498
|
+
vectorWeight: vectorWeight / sum,
|
|
499
|
+
bm25Weight: bm25Weight / sum,
|
|
500
|
+
};
|
|
501
|
+
}
|
|
502
|
+
function computeRecencyMultiplier(timestamp, halfLifeHours) {
|
|
503
|
+
const now = Date.now();
|
|
504
|
+
const ageMs = Math.max(0, now - timestamp);
|
|
505
|
+
const ageHours = ageMs / 3_600_000;
|
|
506
|
+
if (ageHours === 0)
|
|
507
|
+
return 1;
|
|
508
|
+
const decay = Math.pow(0.5, ageHours / halfLifeHours);
|
|
509
|
+
return 0.5 + 0.5 * decay;
|
|
510
|
+
}
|
|
511
|
+
function clampImportance(value) {
|
|
512
|
+
if (!Number.isFinite(value))
|
|
513
|
+
return 0;
|
|
514
|
+
return Math.max(0, Math.min(1, value));
|
|
515
|
+
}
|
|
516
|
+
function clampImportanceWeight(value) {
|
|
517
|
+
if (!Number.isFinite(value))
|
|
518
|
+
return 0.4;
|
|
519
|
+
return Math.max(0, Math.min(2, value));
|
|
520
|
+
}
|
|
228
521
|
function computeIdf(docs) {
|
|
229
522
|
const df = new Map();
|
|
230
523
|
for (const doc of docs) {
|
package/dist/types.d.ts
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
export type EmbeddingProvider = "ollama" | "openai";
|
|
2
2
|
export type RetrievalMode = "hybrid" | "vector";
|
|
3
3
|
export type MemoryCategory = "preference" | "fact" | "decision" | "entity" | "other";
|
|
4
|
+
export type CaptureOutcome = "considered" | "skipped" | "stored";
|
|
5
|
+
export type CaptureSkipReason = "empty-buffer" | "below-min-chars" | "no-positive-signal" | "initialization-unavailable" | "embedding-unavailable" | "empty-embedding";
|
|
6
|
+
export type FeedbackType = "missing" | "wrong" | "useful";
|
|
4
7
|
export interface EmbeddingConfig {
|
|
5
8
|
provider: EmbeddingProvider;
|
|
6
9
|
model: string;
|
|
@@ -13,6 +16,10 @@ export interface RetrievalConfig {
|
|
|
13
16
|
vectorWeight: number;
|
|
14
17
|
bm25Weight: number;
|
|
15
18
|
minScore: number;
|
|
19
|
+
rrfK: number;
|
|
20
|
+
recencyBoost: boolean;
|
|
21
|
+
recencyHalfLifeHours: number;
|
|
22
|
+
importanceWeight: number;
|
|
16
23
|
}
|
|
17
24
|
export interface MemoryRuntimeConfig {
|
|
18
25
|
provider: string;
|
|
@@ -47,3 +54,64 @@ export interface CaptureCandidate {
|
|
|
47
54
|
category: MemoryCategory;
|
|
48
55
|
importance: number;
|
|
49
56
|
}
|
|
57
|
+
export interface CaptureCandidateResult {
|
|
58
|
+
candidate: CaptureCandidate | null;
|
|
59
|
+
skipReason?: CaptureSkipReason;
|
|
60
|
+
}
|
|
61
|
+
interface MemoryEffectivenessEventBase {
|
|
62
|
+
id: string;
|
|
63
|
+
scope: string;
|
|
64
|
+
sessionID?: string;
|
|
65
|
+
timestamp: number;
|
|
66
|
+
memoryId?: string;
|
|
67
|
+
text?: string;
|
|
68
|
+
metadataJson: string;
|
|
69
|
+
}
|
|
70
|
+
export interface CaptureEvent extends MemoryEffectivenessEventBase {
|
|
71
|
+
type: "capture";
|
|
72
|
+
outcome: CaptureOutcome;
|
|
73
|
+
skipReason?: CaptureSkipReason;
|
|
74
|
+
}
|
|
75
|
+
export interface RecallEvent extends MemoryEffectivenessEventBase {
|
|
76
|
+
type: "recall";
|
|
77
|
+
resultCount: number;
|
|
78
|
+
injected: boolean;
|
|
79
|
+
}
|
|
80
|
+
export interface FeedbackEvent extends MemoryEffectivenessEventBase {
|
|
81
|
+
type: "feedback";
|
|
82
|
+
feedbackType: FeedbackType;
|
|
83
|
+
helpful?: boolean;
|
|
84
|
+
labels?: string[];
|
|
85
|
+
reason?: string;
|
|
86
|
+
}
|
|
87
|
+
export type MemoryEffectivenessEvent = CaptureEvent | RecallEvent | FeedbackEvent;
|
|
88
|
+
export interface EffectivenessSummary {
|
|
89
|
+
scope: string;
|
|
90
|
+
totalEvents: number;
|
|
91
|
+
capture: {
|
|
92
|
+
considered: number;
|
|
93
|
+
stored: number;
|
|
94
|
+
skipped: number;
|
|
95
|
+
successRate: number;
|
|
96
|
+
skipReasons: Partial<Record<CaptureSkipReason, number>>;
|
|
97
|
+
};
|
|
98
|
+
recall: {
|
|
99
|
+
requested: number;
|
|
100
|
+
injected: number;
|
|
101
|
+
returnedResults: number;
|
|
102
|
+
hitRate: number;
|
|
103
|
+
injectionRate: number;
|
|
104
|
+
};
|
|
105
|
+
feedback: {
|
|
106
|
+
missing: number;
|
|
107
|
+
wrong: number;
|
|
108
|
+
useful: {
|
|
109
|
+
positive: number;
|
|
110
|
+
negative: number;
|
|
111
|
+
helpfulRate: number;
|
|
112
|
+
};
|
|
113
|
+
falsePositiveRate: number;
|
|
114
|
+
falseNegativeRate: number;
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "lancedb-opencode-pro",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.4",
|
|
4
4
|
"description": "LanceDB-backed long-term memory provider for OpenCode",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -46,10 +46,11 @@
|
|
|
46
46
|
"test": "npm run typecheck",
|
|
47
47
|
"test:foundation": "npm run build:test && node --test dist-test/test/foundation/foundation.test.js",
|
|
48
48
|
"test:regression": "npm run build:test && node --test dist-test/test/regression/plugin.test.js",
|
|
49
|
+
"test:effectiveness": "npm run test:foundation && npm run test:regression",
|
|
49
50
|
"test:retrieval": "npm run build:test && node --test dist-test/test/retrieval/retrieval.test.js",
|
|
50
51
|
"benchmark:latency": "npm run build:test && node dist-test/test/benchmark/latency.js",
|
|
51
52
|
"test:e2e": "node scripts/e2e-opencode-memory.mjs",
|
|
52
|
-
"verify": "npm run typecheck && npm run build && npm run test:
|
|
53
|
+
"verify": "npm run typecheck && npm run build && npm run test:effectiveness && npm run test:retrieval",
|
|
53
54
|
"verify:full": "npm run verify && npm run benchmark:latency && npm pack",
|
|
54
55
|
"release:check": "npm run verify:full && npm publish --dry-run",
|
|
55
56
|
"prepublishOnly": "npm run verify:full"
|