lancedb-opencode-pro 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +501 -0
- package/dist/config.d.ts +3 -0
- package/dist/config.js +93 -0
- package/dist/embedder.d.ts +14 -0
- package/dist/embedder.js +72 -0
- package/dist/extract.d.ts +2 -0
- package/dist/extract.js +44 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +372 -0
- package/dist/ports.d.ts +34 -0
- package/dist/ports.js +129 -0
- package/dist/scope.d.ts +2 -0
- package/dist/scope.js +24 -0
- package/dist/store.d.ts +36 -0
- package/dist/store.js +283 -0
- package/dist/types.d.ts +48 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +9 -0
- package/dist/utils.js +73 -0
- package/package.json +66 -0
package/dist/extract.js
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
const POSITIVE_SIGNALS = [
|
|
2
|
+
"fixed",
|
|
3
|
+
"resolved",
|
|
4
|
+
"works now",
|
|
5
|
+
"successful",
|
|
6
|
+
"done",
|
|
7
|
+
"完成",
|
|
8
|
+
"已解決",
|
|
9
|
+
"修復",
|
|
10
|
+
"成功",
|
|
11
|
+
];
|
|
12
|
+
const DECISION_SIGNALS = ["decide", "decision", "tradeoff", "architecture", "採用", "決定", "架構"];
|
|
13
|
+
const FACT_SIGNALS = ["because", "root cause", "原因", "由於"];
|
|
14
|
+
const PREF_SIGNALS = ["prefer", "preference", "偏好", "習慣"];
|
|
15
|
+
export function extractCaptureCandidate(text, minChars) {
|
|
16
|
+
const normalized = text.trim();
|
|
17
|
+
if (normalized.length < minChars)
|
|
18
|
+
return null;
|
|
19
|
+
const lower = normalized.toLowerCase();
|
|
20
|
+
if (!POSITIVE_SIGNALS.some((signal) => lower.includes(signal.toLowerCase()))) {
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
const category = classifyCategory(lower);
|
|
24
|
+
const importance = category === "decision" ? 0.9 : category === "fact" ? 0.75 : 0.65;
|
|
25
|
+
return {
|
|
26
|
+
text: clipText(normalized, 1200),
|
|
27
|
+
category,
|
|
28
|
+
importance,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
function classifyCategory(text) {
|
|
32
|
+
if (DECISION_SIGNALS.some((signal) => text.includes(signal.toLowerCase())))
|
|
33
|
+
return "decision";
|
|
34
|
+
if (FACT_SIGNALS.some((signal) => text.includes(signal.toLowerCase())))
|
|
35
|
+
return "fact";
|
|
36
|
+
if (PREF_SIGNALS.some((signal) => text.includes(signal.toLowerCase())))
|
|
37
|
+
return "preference";
|
|
38
|
+
return "other";
|
|
39
|
+
}
|
|
40
|
+
function clipText(text, maxLen) {
|
|
41
|
+
if (text.length <= maxLen)
|
|
42
|
+
return text;
|
|
43
|
+
return `${text.slice(0, maxLen - 3)}...`;
|
|
44
|
+
}
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
import { tool } from "@opencode-ai/plugin";
|
|
2
|
+
import { resolveMemoryConfig } from "./config.js";
|
|
3
|
+
import { OllamaEmbedder } from "./embedder.js";
|
|
4
|
+
import { extractCaptureCandidate } from "./extract.js";
|
|
5
|
+
import { isTcpPortAvailable, parsePortReservations, planPorts, reservationKey } from "./ports.js";
|
|
6
|
+
import { buildScopeFilter, deriveProjectScope } from "./scope.js";
|
|
7
|
+
import { MemoryStore } from "./store.js";
|
|
8
|
+
import { generateId } from "./utils.js";
|
|
9
|
+
const SCHEMA_VERSION = 1;
|
|
10
|
+
const plugin = async (input) => {
|
|
11
|
+
const state = await createRuntimeState(input);
|
|
12
|
+
const hooks = {
|
|
13
|
+
config: async (config) => {
|
|
14
|
+
state.config = resolveMemoryConfig(config, input.worktree);
|
|
15
|
+
},
|
|
16
|
+
event: async ({ event }) => {
|
|
17
|
+
if (event.type === "session.idle" || event.type === "session.compacted") {
|
|
18
|
+
const sessionID = event.properties.sessionID;
|
|
19
|
+
await flushAutoCapture(sessionID, state, input.client);
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
"experimental.text.complete": async (eventInput, eventOutput) => {
|
|
23
|
+
const list = state.captureBuffer.get(eventInput.sessionID) ?? [];
|
|
24
|
+
list.push(eventOutput.text);
|
|
25
|
+
state.captureBuffer.set(eventInput.sessionID, list);
|
|
26
|
+
},
|
|
27
|
+
"experimental.chat.system.transform": async (eventInput, eventOutput) => {
|
|
28
|
+
if (!eventInput.sessionID)
|
|
29
|
+
return;
|
|
30
|
+
await state.ensureInitialized();
|
|
31
|
+
if (!state.initialized)
|
|
32
|
+
return;
|
|
33
|
+
const query = await getLastUserText(eventInput.sessionID, input.client);
|
|
34
|
+
if (!query)
|
|
35
|
+
return;
|
|
36
|
+
const activeScope = deriveProjectScope(input.worktree);
|
|
37
|
+
const scopes = buildScopeFilter(activeScope, state.config.includeGlobalScope);
|
|
38
|
+
let queryVector = [];
|
|
39
|
+
try {
|
|
40
|
+
queryVector = await state.embedder.embed(query);
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
console.warn(`[lancedb-opencode-pro] embedding unavailable during recall: ${toErrorMessage(error)}`);
|
|
44
|
+
queryVector = [];
|
|
45
|
+
}
|
|
46
|
+
const results = await state.store.search({
|
|
47
|
+
query,
|
|
48
|
+
queryVector,
|
|
49
|
+
scopes,
|
|
50
|
+
limit: 3,
|
|
51
|
+
vectorWeight: state.config.retrieval.mode === "vector" ? 1 : state.config.retrieval.vectorWeight,
|
|
52
|
+
bm25Weight: state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight,
|
|
53
|
+
minScore: state.config.retrieval.minScore,
|
|
54
|
+
});
|
|
55
|
+
if (results.length === 0)
|
|
56
|
+
return;
|
|
57
|
+
const memoryBlock = [
|
|
58
|
+
"[Memory Recall - optional historical context]",
|
|
59
|
+
...results.map((item, index) => `${index + 1}. (${item.record.scope}) ${item.record.text}`),
|
|
60
|
+
"Use these as optional hints only; prioritize current user intent and current repo state.",
|
|
61
|
+
].join("\n");
|
|
62
|
+
eventOutput.system.push(memoryBlock);
|
|
63
|
+
},
|
|
64
|
+
tool: {
|
|
65
|
+
memory_search: tool({
|
|
66
|
+
description: "Search long-term memory using hybrid retrieval",
|
|
67
|
+
args: {
|
|
68
|
+
query: tool.schema.string().min(1),
|
|
69
|
+
limit: tool.schema.number().int().min(1).max(20).default(5),
|
|
70
|
+
scope: tool.schema.string().optional(),
|
|
71
|
+
},
|
|
72
|
+
execute: async (args, context) => {
|
|
73
|
+
await state.ensureInitialized();
|
|
74
|
+
if (!state.initialized)
|
|
75
|
+
return "Memory store unavailable (Ollama may be offline). Will retry automatically.";
|
|
76
|
+
const activeScope = args.scope ?? deriveProjectScope(context.worktree);
|
|
77
|
+
const scopes = buildScopeFilter(activeScope, state.config.includeGlobalScope);
|
|
78
|
+
let queryVector = [];
|
|
79
|
+
try {
|
|
80
|
+
queryVector = await state.embedder.embed(args.query);
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
queryVector = [];
|
|
84
|
+
}
|
|
85
|
+
const results = await state.store.search({
|
|
86
|
+
query: args.query,
|
|
87
|
+
queryVector,
|
|
88
|
+
scopes,
|
|
89
|
+
limit: args.limit,
|
|
90
|
+
vectorWeight: state.config.retrieval.mode === "vector" ? 1 : state.config.retrieval.vectorWeight,
|
|
91
|
+
bm25Weight: state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight,
|
|
92
|
+
minScore: state.config.retrieval.minScore,
|
|
93
|
+
});
|
|
94
|
+
if (results.length === 0)
|
|
95
|
+
return "No relevant memory found.";
|
|
96
|
+
return results
|
|
97
|
+
.map((item, idx) => {
|
|
98
|
+
const percent = Math.round(item.score * 100);
|
|
99
|
+
return `${idx + 1}. [${item.record.id}] (${item.record.scope}) ${item.record.text} [${percent}%]`;
|
|
100
|
+
})
|
|
101
|
+
.join("\n");
|
|
102
|
+
},
|
|
103
|
+
}),
|
|
104
|
+
memory_delete: tool({
|
|
105
|
+
description: "Delete one memory entry by id",
|
|
106
|
+
args: {
|
|
107
|
+
id: tool.schema.string().min(6),
|
|
108
|
+
scope: tool.schema.string().optional(),
|
|
109
|
+
confirm: tool.schema.boolean().default(false),
|
|
110
|
+
},
|
|
111
|
+
execute: async (args, context) => {
|
|
112
|
+
await state.ensureInitialized();
|
|
113
|
+
if (!state.initialized)
|
|
114
|
+
return "Memory store unavailable (Ollama may be offline). Will retry automatically.";
|
|
115
|
+
if (!args.confirm) {
|
|
116
|
+
return "Rejected: memory_delete requires confirm=true.";
|
|
117
|
+
}
|
|
118
|
+
const activeScope = args.scope ?? deriveProjectScope(context.worktree);
|
|
119
|
+
const scopes = buildScopeFilter(activeScope, state.config.includeGlobalScope);
|
|
120
|
+
const deleted = await state.store.deleteById(args.id, scopes);
|
|
121
|
+
return deleted ? `Deleted memory ${args.id}.` : `Memory ${args.id} not found in current scope.`;
|
|
122
|
+
},
|
|
123
|
+
}),
|
|
124
|
+
memory_clear: tool({
|
|
125
|
+
description: "Clear all memories in a scope (requires confirm=true)",
|
|
126
|
+
args: {
|
|
127
|
+
scope: tool.schema.string(),
|
|
128
|
+
confirm: tool.schema.boolean().default(false),
|
|
129
|
+
},
|
|
130
|
+
execute: async (args) => {
|
|
131
|
+
await state.ensureInitialized();
|
|
132
|
+
if (!state.initialized)
|
|
133
|
+
return "Memory store unavailable (Ollama may be offline). Will retry automatically.";
|
|
134
|
+
if (!args.confirm) {
|
|
135
|
+
return "Rejected: destructive clear requires confirm=true.";
|
|
136
|
+
}
|
|
137
|
+
const count = await state.store.clearScope(args.scope);
|
|
138
|
+
return `Cleared ${count} memories from scope ${args.scope}.`;
|
|
139
|
+
},
|
|
140
|
+
}),
|
|
141
|
+
memory_stats: tool({
|
|
142
|
+
description: "Show memory provider status and index health",
|
|
143
|
+
args: {
|
|
144
|
+
scope: tool.schema.string().optional(),
|
|
145
|
+
},
|
|
146
|
+
execute: async (args, context) => {
|
|
147
|
+
await state.ensureInitialized();
|
|
148
|
+
if (!state.initialized)
|
|
149
|
+
return "Memory store unavailable (Ollama may be offline). Will retry automatically.";
|
|
150
|
+
const scope = args.scope ?? deriveProjectScope(context.worktree);
|
|
151
|
+
const entries = await state.store.list(scope, 20);
|
|
152
|
+
const incompatibleVectors = await state.store.countIncompatibleVectors(buildScopeFilter(scope, state.config.includeGlobalScope), await state.embedder.dim());
|
|
153
|
+
const health = state.store.getIndexHealth();
|
|
154
|
+
return JSON.stringify({
|
|
155
|
+
provider: state.config.provider,
|
|
156
|
+
dbPath: state.config.dbPath,
|
|
157
|
+
scope,
|
|
158
|
+
recentCount: entries.length,
|
|
159
|
+
incompatibleVectors,
|
|
160
|
+
index: health,
|
|
161
|
+
embeddingModel: state.config.embedding.model,
|
|
162
|
+
}, null, 2);
|
|
163
|
+
},
|
|
164
|
+
}),
|
|
165
|
+
memory_port_plan: tool({
|
|
166
|
+
description: "Plan non-conflicting host ports for compose services and optionally persist reservations",
|
|
167
|
+
args: {
|
|
168
|
+
project: tool.schema.string().min(1).optional(),
|
|
169
|
+
services: tool.schema
|
|
170
|
+
.array(tool.schema.object({
|
|
171
|
+
name: tool.schema.string().min(1),
|
|
172
|
+
containerPort: tool.schema.number().int().min(1).max(65535),
|
|
173
|
+
preferredHostPort: tool.schema.number().int().min(1).max(65535).optional(),
|
|
174
|
+
}))
|
|
175
|
+
.min(1),
|
|
176
|
+
rangeStart: tool.schema.number().int().min(1).max(65535).default(20000),
|
|
177
|
+
rangeEnd: tool.schema.number().int().min(1).max(65535).default(39999),
|
|
178
|
+
persist: tool.schema.boolean().default(true),
|
|
179
|
+
},
|
|
180
|
+
execute: async (args, context) => {
|
|
181
|
+
await state.ensureInitialized();
|
|
182
|
+
if (!state.initialized)
|
|
183
|
+
return "Memory store unavailable (Ollama may be offline). Will retry automatically.";
|
|
184
|
+
if (args.rangeStart > args.rangeEnd) {
|
|
185
|
+
return "Invalid range: rangeStart must be <= rangeEnd.";
|
|
186
|
+
}
|
|
187
|
+
const project = args.project?.trim() || deriveProjectScope(context.worktree);
|
|
188
|
+
const globalRecords = await state.store.list("global", 100000);
|
|
189
|
+
const reservations = parsePortReservations(globalRecords);
|
|
190
|
+
const assignments = await planPorts({
|
|
191
|
+
project,
|
|
192
|
+
services: args.services,
|
|
193
|
+
rangeStart: args.rangeStart,
|
|
194
|
+
rangeEnd: args.rangeEnd,
|
|
195
|
+
reservations,
|
|
196
|
+
}, isTcpPortAvailable);
|
|
197
|
+
let persisted = 0;
|
|
198
|
+
const warnings = [];
|
|
199
|
+
if (args.persist) {
|
|
200
|
+
const keyToOldIds = new Map();
|
|
201
|
+
for (const reservation of reservations) {
|
|
202
|
+
const key = reservationKey(reservation.project, reservation.service, reservation.protocol);
|
|
203
|
+
if (!keyToOldIds.has(key)) {
|
|
204
|
+
keyToOldIds.set(key, []);
|
|
205
|
+
}
|
|
206
|
+
keyToOldIds.get(key)?.push(reservation.id);
|
|
207
|
+
}
|
|
208
|
+
for (const assignment of assignments) {
|
|
209
|
+
const key = reservationKey(assignment.project, assignment.service, assignment.protocol);
|
|
210
|
+
const oldIds = keyToOldIds.get(key) ?? [];
|
|
211
|
+
const text = `PORT_RESERVATION ${assignment.project} ${assignment.service} host=${assignment.hostPort} container=${assignment.containerPort} protocol=${assignment.protocol}`;
|
|
212
|
+
try {
|
|
213
|
+
const vector = await state.embedder.embed(text);
|
|
214
|
+
if (vector.length === 0) {
|
|
215
|
+
warnings.push(`Skipped persistence for ${assignment.service}: empty embedding vector.`);
|
|
216
|
+
continue;
|
|
217
|
+
}
|
|
218
|
+
await state.store.put({
|
|
219
|
+
id: generateId(),
|
|
220
|
+
text,
|
|
221
|
+
vector,
|
|
222
|
+
category: "entity",
|
|
223
|
+
scope: "global",
|
|
224
|
+
importance: 0.8,
|
|
225
|
+
timestamp: Date.now(),
|
|
226
|
+
schemaVersion: SCHEMA_VERSION,
|
|
227
|
+
embeddingModel: state.config.embedding.model,
|
|
228
|
+
vectorDim: vector.length,
|
|
229
|
+
metadataJson: JSON.stringify({
|
|
230
|
+
source: "port-plan",
|
|
231
|
+
type: "port-reservation",
|
|
232
|
+
project: assignment.project,
|
|
233
|
+
service: assignment.service,
|
|
234
|
+
hostPort: assignment.hostPort,
|
|
235
|
+
containerPort: assignment.containerPort,
|
|
236
|
+
protocol: assignment.protocol,
|
|
237
|
+
}),
|
|
238
|
+
});
|
|
239
|
+
for (const id of oldIds) {
|
|
240
|
+
await state.store.deleteById(id, ["global"]);
|
|
241
|
+
}
|
|
242
|
+
persisted += 1;
|
|
243
|
+
}
|
|
244
|
+
catch (error) {
|
|
245
|
+
warnings.push(`Failed to persist ${assignment.service}: ${toErrorMessage(error)}`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
return JSON.stringify({
|
|
250
|
+
project,
|
|
251
|
+
persistRequested: args.persist,
|
|
252
|
+
persisted,
|
|
253
|
+
assignments,
|
|
254
|
+
warnings,
|
|
255
|
+
}, null, 2);
|
|
256
|
+
},
|
|
257
|
+
}),
|
|
258
|
+
},
|
|
259
|
+
};
|
|
260
|
+
return hooks;
|
|
261
|
+
};
|
|
262
|
+
async function createRuntimeState(input) {
|
|
263
|
+
const resolved = resolveMemoryConfig(undefined, input.worktree);
|
|
264
|
+
const embedder = new OllamaEmbedder(resolved.embedding);
|
|
265
|
+
const store = new MemoryStore(resolved.dbPath);
|
|
266
|
+
const state = {
|
|
267
|
+
config: resolved,
|
|
268
|
+
embedder,
|
|
269
|
+
store,
|
|
270
|
+
defaultScope: deriveProjectScope(input.worktree),
|
|
271
|
+
initialized: false,
|
|
272
|
+
captureBuffer: new Map(),
|
|
273
|
+
ensureInitialized: async () => {
|
|
274
|
+
if (state.initialized)
|
|
275
|
+
return;
|
|
276
|
+
try {
|
|
277
|
+
const dim = await state.embedder.dim();
|
|
278
|
+
await state.store.init(dim);
|
|
279
|
+
state.initialized = true;
|
|
280
|
+
}
|
|
281
|
+
catch (error) {
|
|
282
|
+
console.warn(`[lancedb-opencode-pro] initialization deferred: ${toErrorMessage(error)}`);
|
|
283
|
+
}
|
|
284
|
+
},
|
|
285
|
+
};
|
|
286
|
+
return state;
|
|
287
|
+
}
|
|
288
|
+
async function getLastUserText(sessionID, client) {
|
|
289
|
+
try {
|
|
290
|
+
const response = await client.session.messages({ path: { id: sessionID } });
|
|
291
|
+
const payload = unwrapData(response);
|
|
292
|
+
if (!Array.isArray(payload))
|
|
293
|
+
return "";
|
|
294
|
+
for (let i = payload.length - 1; i >= 0; i -= 1) {
|
|
295
|
+
const item = payload[i];
|
|
296
|
+
if (item.info?.role !== "user" || !Array.isArray(item.parts))
|
|
297
|
+
continue;
|
|
298
|
+
const textParts = item.parts.filter((part) => part.type === "text" && typeof part.text === "string");
|
|
299
|
+
const text = textParts.map((part) => part.text).join("\n").trim();
|
|
300
|
+
if (text.length > 0)
|
|
301
|
+
return text;
|
|
302
|
+
}
|
|
303
|
+
return "";
|
|
304
|
+
}
|
|
305
|
+
catch {
|
|
306
|
+
return "";
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
async function flushAutoCapture(sessionID, state, client) {
|
|
310
|
+
const fragments = state.captureBuffer.get(sessionID) ?? [];
|
|
311
|
+
if (fragments.length === 0)
|
|
312
|
+
return;
|
|
313
|
+
state.captureBuffer.delete(sessionID);
|
|
314
|
+
const combined = fragments.join("\n").trim();
|
|
315
|
+
const candidate = extractCaptureCandidate(combined, state.config.minCaptureChars);
|
|
316
|
+
if (!candidate)
|
|
317
|
+
return;
|
|
318
|
+
await state.ensureInitialized();
|
|
319
|
+
if (!state.initialized)
|
|
320
|
+
return;
|
|
321
|
+
let vector = [];
|
|
322
|
+
try {
|
|
323
|
+
vector = await state.embedder.embed(candidate.text);
|
|
324
|
+
}
|
|
325
|
+
catch (error) {
|
|
326
|
+
console.warn(`[lancedb-opencode-pro] embedding unavailable during auto-capture: ${toErrorMessage(error)}`);
|
|
327
|
+
vector = [];
|
|
328
|
+
}
|
|
329
|
+
if (vector.length === 0) {
|
|
330
|
+
console.warn("[lancedb-opencode-pro] auto-capture skipped because embedding vector is empty");
|
|
331
|
+
return;
|
|
332
|
+
}
|
|
333
|
+
const activeScope = await resolveSessionScope(sessionID, client, state.defaultScope);
|
|
334
|
+
await state.store.put({
|
|
335
|
+
id: generateId(),
|
|
336
|
+
text: candidate.text,
|
|
337
|
+
vector,
|
|
338
|
+
category: candidate.category,
|
|
339
|
+
scope: activeScope,
|
|
340
|
+
importance: candidate.importance,
|
|
341
|
+
timestamp: Date.now(),
|
|
342
|
+
schemaVersion: SCHEMA_VERSION,
|
|
343
|
+
embeddingModel: state.config.embedding.model,
|
|
344
|
+
vectorDim: vector.length,
|
|
345
|
+
metadataJson: JSON.stringify({
|
|
346
|
+
source: "auto-capture",
|
|
347
|
+
sessionID,
|
|
348
|
+
}),
|
|
349
|
+
});
|
|
350
|
+
await state.store.pruneScope(activeScope, state.config.maxEntriesPerScope);
|
|
351
|
+
}
|
|
352
|
+
async function resolveSessionScope(sessionID, client, fallback) {
|
|
353
|
+
try {
|
|
354
|
+
const response = await client.session.get({ path: { id: sessionID } });
|
|
355
|
+
const payload = unwrapData(response);
|
|
356
|
+
if (payload?.directory && payload.directory.trim().length > 0) {
|
|
357
|
+
return deriveProjectScope(payload.directory);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
catch { }
|
|
361
|
+
return fallback;
|
|
362
|
+
}
|
|
363
|
+
function toErrorMessage(error) {
|
|
364
|
+
return error instanceof Error ? error.message : String(error);
|
|
365
|
+
}
|
|
366
|
+
function unwrapData(value) {
|
|
367
|
+
if (value && typeof value === "object" && "data" in value) {
|
|
368
|
+
return value.data;
|
|
369
|
+
}
|
|
370
|
+
return value;
|
|
371
|
+
}
|
|
372
|
+
export default plugin;
|
package/dist/ports.d.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import type { MemoryRecord } from "./types.js";
|
|
2
|
+
export interface PortServiceRequest {
|
|
3
|
+
name: string;
|
|
4
|
+
containerPort: number;
|
|
5
|
+
preferredHostPort?: number;
|
|
6
|
+
}
|
|
7
|
+
export interface PortReservation {
|
|
8
|
+
id: string;
|
|
9
|
+
project: string;
|
|
10
|
+
service: string;
|
|
11
|
+
hostPort: number;
|
|
12
|
+
containerPort: number;
|
|
13
|
+
protocol: "tcp";
|
|
14
|
+
}
|
|
15
|
+
export interface PortAssignment {
|
|
16
|
+
project: string;
|
|
17
|
+
service: string;
|
|
18
|
+
hostPort: number;
|
|
19
|
+
containerPort: number;
|
|
20
|
+
protocol: "tcp";
|
|
21
|
+
}
|
|
22
|
+
export interface PlanPortsInput {
|
|
23
|
+
project: string;
|
|
24
|
+
services: PortServiceRequest[];
|
|
25
|
+
rangeStart: number;
|
|
26
|
+
rangeEnd: number;
|
|
27
|
+
reservations: PortReservation[];
|
|
28
|
+
}
|
|
29
|
+
type PortChecker = (port: number) => Promise<boolean>;
|
|
30
|
+
export declare function parsePortReservations(records: MemoryRecord[]): PortReservation[];
|
|
31
|
+
export declare function planPorts(input: PlanPortsInput, checker?: PortChecker): Promise<PortAssignment[]>;
|
|
32
|
+
export declare function reservationKey(project: string, service: string, protocol: "tcp"): string;
|
|
33
|
+
export declare function isTcpPortAvailable(port: number): Promise<boolean>;
|
|
34
|
+
export {};
|
package/dist/ports.js
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { createServer } from "node:net";
|
|
2
|
+
export function parsePortReservations(records) {
|
|
3
|
+
const parsed = [];
|
|
4
|
+
for (const record of records) {
|
|
5
|
+
let metadata;
|
|
6
|
+
try {
|
|
7
|
+
metadata = JSON.parse(record.metadataJson);
|
|
8
|
+
}
|
|
9
|
+
catch {
|
|
10
|
+
continue;
|
|
11
|
+
}
|
|
12
|
+
if (!isPortReservationMetadata(metadata))
|
|
13
|
+
continue;
|
|
14
|
+
parsed.push({
|
|
15
|
+
id: record.id,
|
|
16
|
+
project: metadata.project,
|
|
17
|
+
service: metadata.service,
|
|
18
|
+
hostPort: metadata.hostPort,
|
|
19
|
+
containerPort: metadata.containerPort,
|
|
20
|
+
protocol: "tcp",
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
return parsed;
|
|
24
|
+
}
|
|
25
|
+
export async function planPorts(input, checker = isTcpPortAvailable) {
|
|
26
|
+
const reservedByPort = new Map();
|
|
27
|
+
for (const reservation of input.reservations) {
|
|
28
|
+
const key = reservationKey(reservation.project, reservation.service, reservation.protocol);
|
|
29
|
+
if (!reservedByPort.has(reservation.hostPort)) {
|
|
30
|
+
reservedByPort.set(reservation.hostPort, new Set());
|
|
31
|
+
}
|
|
32
|
+
reservedByPort.get(reservation.hostPort)?.add(key);
|
|
33
|
+
}
|
|
34
|
+
const occupied = new Set();
|
|
35
|
+
const planUsed = new Set();
|
|
36
|
+
const checked = new Map();
|
|
37
|
+
const assignments = [];
|
|
38
|
+
for (const service of input.services) {
|
|
39
|
+
const serviceKey = reservationKey(input.project, service.name, "tcp");
|
|
40
|
+
const preferred = Number.isInteger(service.preferredHostPort) ? Number(service.preferredHostPort) : undefined;
|
|
41
|
+
const candidate = await pickCandidatePort({
|
|
42
|
+
preferredHostPort: preferred,
|
|
43
|
+
rangeStart: input.rangeStart,
|
|
44
|
+
rangeEnd: input.rangeEnd,
|
|
45
|
+
serviceKey,
|
|
46
|
+
reservedByPort,
|
|
47
|
+
occupied,
|
|
48
|
+
planUsed,
|
|
49
|
+
checked,
|
|
50
|
+
checker,
|
|
51
|
+
});
|
|
52
|
+
if (candidate === null) {
|
|
53
|
+
throw new Error(`No available host port for service '${service.name}' in range ${input.rangeStart}-${input.rangeEnd}.`);
|
|
54
|
+
}
|
|
55
|
+
planUsed.add(candidate);
|
|
56
|
+
occupied.add(candidate);
|
|
57
|
+
assignments.push({
|
|
58
|
+
project: input.project,
|
|
59
|
+
service: service.name,
|
|
60
|
+
hostPort: candidate,
|
|
61
|
+
containerPort: service.containerPort,
|
|
62
|
+
protocol: "tcp",
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
return assignments;
|
|
66
|
+
}
|
|
67
|
+
export function reservationKey(project, service, protocol) {
|
|
68
|
+
return `${project}\u0000${service}\u0000${protocol}`;
|
|
69
|
+
}
|
|
70
|
+
export async function isTcpPortAvailable(port) {
|
|
71
|
+
if (!isValidPort(port))
|
|
72
|
+
return false;
|
|
73
|
+
return new Promise((resolve) => {
|
|
74
|
+
const server = createServer();
|
|
75
|
+
const finish = (result) => {
|
|
76
|
+
server.removeAllListeners();
|
|
77
|
+
server.close(() => resolve(result));
|
|
78
|
+
};
|
|
79
|
+
server.once("error", () => finish(false));
|
|
80
|
+
server.once("listening", () => finish(true));
|
|
81
|
+
server.listen({ host: "0.0.0.0", port, exclusive: true });
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
function isPortReservationMetadata(value) {
|
|
85
|
+
if (!value || typeof value !== "object")
|
|
86
|
+
return false;
|
|
87
|
+
const data = value;
|
|
88
|
+
return data.type === "port-reservation"
|
|
89
|
+
&& typeof data.project === "string"
|
|
90
|
+
&& typeof data.service === "string"
|
|
91
|
+
&& Number.isInteger(data.hostPort)
|
|
92
|
+
&& Number.isInteger(data.containerPort)
|
|
93
|
+
&& (data.protocol === undefined || data.protocol === "tcp");
|
|
94
|
+
}
|
|
95
|
+
async function pickCandidatePort(input) {
|
|
96
|
+
const candidates = [];
|
|
97
|
+
if (input.preferredHostPort !== undefined) {
|
|
98
|
+
candidates.push(input.preferredHostPort);
|
|
99
|
+
}
|
|
100
|
+
for (let port = input.rangeStart; port <= input.rangeEnd; port += 1) {
|
|
101
|
+
if (port === input.preferredHostPort)
|
|
102
|
+
continue;
|
|
103
|
+
candidates.push(port);
|
|
104
|
+
}
|
|
105
|
+
for (const port of candidates) {
|
|
106
|
+
if (!isValidPort(port))
|
|
107
|
+
continue;
|
|
108
|
+
if (input.planUsed.has(port) || input.occupied.has(port))
|
|
109
|
+
continue;
|
|
110
|
+
const owners = input.reservedByPort.get(port);
|
|
111
|
+
if (owners && (owners.size > 1 || !owners.has(input.serviceKey))) {
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
let free = input.checked.get(port);
|
|
115
|
+
if (free === undefined) {
|
|
116
|
+
free = await input.checker(port);
|
|
117
|
+
input.checked.set(port, free);
|
|
118
|
+
}
|
|
119
|
+
if (!free) {
|
|
120
|
+
input.occupied.add(port);
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
return port;
|
|
124
|
+
}
|
|
125
|
+
return null;
|
|
126
|
+
}
|
|
127
|
+
function isValidPort(port) {
|
|
128
|
+
return Number.isInteger(port) && port >= 1 && port <= 65535;
|
|
129
|
+
}
|
package/dist/scope.d.ts
ADDED
package/dist/scope.js
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { execFileSync } from "node:child_process";
|
|
2
|
+
import { stableHash } from "./utils.js";
|
|
3
|
+
export function deriveProjectScope(worktree) {
|
|
4
|
+
const remote = tryGetGitRemote(worktree);
|
|
5
|
+
if (remote) {
|
|
6
|
+
return `project:${stableHash(remote).slice(0, 16)}`;
|
|
7
|
+
}
|
|
8
|
+
return `project:local:${stableHash(worktree).slice(0, 16)}`;
|
|
9
|
+
}
|
|
10
|
+
export function buildScopeFilter(activeScope, includeGlobal) {
|
|
11
|
+
return includeGlobal ? [activeScope, "global"] : [activeScope];
|
|
12
|
+
}
|
|
13
|
+
function tryGetGitRemote(worktree) {
|
|
14
|
+
try {
|
|
15
|
+
const output = execFileSync("git", ["-C", worktree, "config", "--get", "remote.origin.url"], {
|
|
16
|
+
encoding: "utf8",
|
|
17
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
18
|
+
}).trim();
|
|
19
|
+
return output.length > 0 ? output : null;
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
}
|
package/dist/store.d.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { MemoryRecord, SearchResult } from "./types.js";
|
|
2
|
+
export declare class MemoryStore {
|
|
3
|
+
private readonly dbPath;
|
|
4
|
+
private lancedb;
|
|
5
|
+
private connection;
|
|
6
|
+
private table;
|
|
7
|
+
private indexState;
|
|
8
|
+
private scopeCache;
|
|
9
|
+
constructor(dbPath: string);
|
|
10
|
+
init(vectorDim: number): Promise<void>;
|
|
11
|
+
put(record: MemoryRecord): Promise<void>;
|
|
12
|
+
search(params: {
|
|
13
|
+
query: string;
|
|
14
|
+
queryVector: number[];
|
|
15
|
+
scopes: string[];
|
|
16
|
+
limit: number;
|
|
17
|
+
vectorWeight: number;
|
|
18
|
+
bm25Weight: number;
|
|
19
|
+
minScore: number;
|
|
20
|
+
}): Promise<SearchResult[]>;
|
|
21
|
+
deleteById(id: string, scopes: string[]): Promise<boolean>;
|
|
22
|
+
clearScope(scope: string): Promise<number>;
|
|
23
|
+
list(scope: string, limit: number): Promise<MemoryRecord[]>;
|
|
24
|
+
pruneScope(scope: string, maxEntries: number): Promise<number>;
|
|
25
|
+
countIncompatibleVectors(scopes: string[], expectedDim: number): Promise<number>;
|
|
26
|
+
getIndexHealth(): {
|
|
27
|
+
vector: boolean;
|
|
28
|
+
fts: boolean;
|
|
29
|
+
ftsError?: string;
|
|
30
|
+
};
|
|
31
|
+
private invalidateScope;
|
|
32
|
+
private getCachedScopes;
|
|
33
|
+
private requireTable;
|
|
34
|
+
private readByScopes;
|
|
35
|
+
private ensureIndexes;
|
|
36
|
+
}
|