lancedb-opencode-pro 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +501 -0
- package/dist/config.d.ts +3 -0
- package/dist/config.js +93 -0
- package/dist/embedder.d.ts +14 -0
- package/dist/embedder.js +72 -0
- package/dist/extract.d.ts +2 -0
- package/dist/extract.js +44 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +372 -0
- package/dist/ports.d.ts +34 -0
- package/dist/ports.js +129 -0
- package/dist/scope.d.ts +2 -0
- package/dist/scope.js +24 -0
- package/dist/store.d.ts +36 -0
- package/dist/store.js +283 -0
- package/dist/types.d.ts +48 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +9 -0
- package/dist/utils.js +73 -0
- package/package.json +66 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Jonathan Tsai <tryweb@ichiayi.com>
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
# lancedb-opencode-pro
|
|
2
|
+
|
|
3
|
+
LanceDB-backed long-term memory provider for OpenCode.
|
|
4
|
+
|
|
5
|
+
## Supported OpenCode Versions
|
|
6
|
+
|
|
7
|
+
- Supported: OpenCode `1.2.27+`
|
|
8
|
+
- Configuration model: sidecar config file at `~/.config/opencode/lancedb-opencode-pro.json`
|
|
9
|
+
- Not recommended: top-level `memory` in `opencode.json`, because current OpenCode versions reject that key during config validation
|
|
10
|
+
|
|
11
|
+
## Install
|
|
12
|
+
|
|
13
|
+
For normal use, install from npm into a fixed local plugin directory:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
mkdir -p ~/.config/opencode/plugins/lancedb-opencode-pro
|
|
17
|
+
npm install --prefix ~/.config/opencode/plugins/lancedb-opencode-pro lancedb-opencode-pro
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
Register the plugin as a `file://` path in `~/.config/opencode/opencode.json`:
|
|
21
|
+
|
|
22
|
+
```json
|
|
23
|
+
{
|
|
24
|
+
"$schema": "https://opencode.ai/config.json",
|
|
25
|
+
"plugin": [
|
|
26
|
+
"oh-my-opencode",
|
|
27
|
+
"file:///home/<user>/.config/opencode/plugins/lancedb-opencode-pro/node_modules/lancedb-opencode-pro/dist/index.js"
|
|
28
|
+
]
|
|
29
|
+
}
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
If you already use other plugins, keep them and append this `file://` entry.
|
|
33
|
+
|
|
34
|
+
## Fallback Install From Local `.tgz`
|
|
35
|
+
|
|
36
|
+
Use this flow when you want to install an unpublished build or test a release candidate on another machine.
|
|
37
|
+
|
|
38
|
+
1. On the build host, build and pack:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
npm ci
|
|
42
|
+
npm run typecheck
|
|
43
|
+
npm run build
|
|
44
|
+
npm pack
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
This generates a file like:
|
|
48
|
+
|
|
49
|
+
```text
|
|
50
|
+
lancedb-opencode-pro-0.1.1.tgz
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
2. Copy the `.tgz` to the target host (example):
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
scp lancedb-opencode-pro-0.1.1.tgz <user>@<target-host>:/tmp/
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
3. On the target host, install into a fixed local plugin directory:
|
|
60
|
+
|
|
61
|
+
```bash
|
|
62
|
+
mkdir -p ~/.config/opencode/plugins/lancedb-opencode-pro
|
|
63
|
+
npm install --prefix ~/.config/opencode/plugins/lancedb-opencode-pro /tmp/lancedb-opencode-pro-0.1.1.tgz
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
4. For OpenCode `1.2.27+`, create the sidecar config file `~/.config/opencode/lancedb-opencode-pro.json`:
|
|
67
|
+
|
|
68
|
+
```json
|
|
69
|
+
{
|
|
70
|
+
"provider": "lancedb-opencode-pro",
|
|
71
|
+
"dbPath": "~/.opencode/memory/lancedb",
|
|
72
|
+
"embedding": {
|
|
73
|
+
"provider": "ollama",
|
|
74
|
+
"model": "nomic-embed-text",
|
|
75
|
+
"baseUrl": "http://127.0.0.1:11434"
|
|
76
|
+
},
|
|
77
|
+
"retrieval": {
|
|
78
|
+
"mode": "hybrid",
|
|
79
|
+
"vectorWeight": 0.7,
|
|
80
|
+
"bm25Weight": 0.3,
|
|
81
|
+
"minScore": 0.2
|
|
82
|
+
},
|
|
83
|
+
"includeGlobalScope": true,
|
|
84
|
+
"minCaptureChars": 80,
|
|
85
|
+
"maxEntriesPerScope": 3000
|
|
86
|
+
}
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
5. Set `embedding.baseUrl` to the Ollama endpoint that is reachable from that host.
|
|
90
|
+
|
|
91
|
+
- Same machine as OpenCode: `http://127.0.0.1:11434`
|
|
92
|
+
- Another machine on the network: for example `http://192.168.11.206:11434`
|
|
93
|
+
|
|
94
|
+
You do not need `LANCEDB_OPENCODE_PRO_OLLAMA_BASE_URL` if the sidecar file already contains the correct `embedding.baseUrl`. Use the environment variable only when you want to override the file at runtime.
|
|
95
|
+
|
|
96
|
+
6. Make sure Ollama is reachable from that host before starting OpenCode:
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
curl http://127.0.0.1:11434/api/tags
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
or, for a remote Ollama server:
|
|
103
|
+
|
|
104
|
+
```bash
|
|
105
|
+
curl http://192.168.11.206:11434/api/tags
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
7. Verify plugin file path and start/restart OpenCode:
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
ls -la ~/.config/opencode/plugins/lancedb-opencode-pro/node_modules/lancedb-opencode-pro/dist/index.js
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Then start or restart OpenCode, and verify memory store initialization.
|
|
115
|
+
|
|
116
|
+
After the first successful memory operation, LanceDB files should appear under:
|
|
117
|
+
|
|
118
|
+
```text
|
|
119
|
+
~/.opencode/memory/lancedb
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
You can also verify that the directory exists:
|
|
123
|
+
|
|
124
|
+
```bash
|
|
125
|
+
ls -la ~/.opencode/memory/lancedb
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### When to use environment variables
|
|
129
|
+
|
|
130
|
+
Environment variables are optional. The recommended default is:
|
|
131
|
+
|
|
132
|
+
- keep durable settings in `~/.config/opencode/lancedb-opencode-pro.json`
|
|
133
|
+
- avoid setting `LANCEDB_OPENCODE_PRO_OLLAMA_BASE_URL` unless you intentionally want a temporary or host-specific override
|
|
134
|
+
|
|
135
|
+
Example override:
|
|
136
|
+
|
|
137
|
+
```bash
|
|
138
|
+
export LANCEDB_OPENCODE_PRO_OLLAMA_BASE_URL="http://192.168.11.206:11434"
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
This override has higher priority than the sidecar file.
|
|
142
|
+
|
|
143
|
+
## OpenCode Config
|
|
144
|
+
|
|
145
|
+
Use a sidecar config file. This is the supported configuration model for current OpenCode versions.
|
|
146
|
+
|
|
147
|
+
Create `~/.config/opencode/lancedb-opencode-pro.json`:
|
|
148
|
+
|
|
149
|
+
```json
|
|
150
|
+
{
|
|
151
|
+
"provider": "lancedb-opencode-pro",
|
|
152
|
+
"dbPath": "~/.opencode/memory/lancedb",
|
|
153
|
+
"embedding": {
|
|
154
|
+
"provider": "ollama",
|
|
155
|
+
"model": "nomic-embed-text",
|
|
156
|
+
"baseUrl": "http://127.0.0.1:11434"
|
|
157
|
+
},
|
|
158
|
+
"retrieval": {
|
|
159
|
+
"mode": "hybrid",
|
|
160
|
+
"vectorWeight": 0.7,
|
|
161
|
+
"bm25Weight": 0.3,
|
|
162
|
+
"minScore": 0.2
|
|
163
|
+
},
|
|
164
|
+
"includeGlobalScope": true,
|
|
165
|
+
"minCaptureChars": 80,
|
|
166
|
+
"maxEntriesPerScope": 3000
|
|
167
|
+
}
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
Optional project override path:
|
|
171
|
+
|
|
172
|
+
```text
|
|
173
|
+
.opencode/lancedb-opencode-pro.json
|
|
174
|
+
```
|
|
175
|
+
|
|
176
|
+
## Config Precedence
|
|
177
|
+
|
|
178
|
+
Higher priority overrides lower priority:
|
|
179
|
+
|
|
180
|
+
1. Environment variables (`LANCEDB_OPENCODE_PRO_*`)
|
|
181
|
+
2. `LANCEDB_OPENCODE_PRO_CONFIG_PATH`
|
|
182
|
+
3. Project sidecar: `.opencode/lancedb-opencode-pro.json`
|
|
183
|
+
4. Global sidecar: `~/.config/opencode/lancedb-opencode-pro.json`
|
|
184
|
+
5. Legacy sidecar: `~/.opencode/lancedb-opencode-pro.json`
|
|
185
|
+
6. Legacy `config.memory`
|
|
186
|
+
7. Built-in defaults
|
|
187
|
+
|
|
188
|
+
Supported environment variables:
|
|
189
|
+
|
|
190
|
+
- `LANCEDB_OPENCODE_PRO_CONFIG_PATH`
|
|
191
|
+
- `LANCEDB_OPENCODE_PRO_PROVIDER`
|
|
192
|
+
- `LANCEDB_OPENCODE_PRO_DB_PATH`
|
|
193
|
+
- `LANCEDB_OPENCODE_PRO_EMBEDDING_MODEL`
|
|
194
|
+
- `LANCEDB_OPENCODE_PRO_OLLAMA_BASE_URL`
|
|
195
|
+
- `LANCEDB_OPENCODE_PRO_EMBEDDING_TIMEOUT_MS`
|
|
196
|
+
- `LANCEDB_OPENCODE_PRO_RETRIEVAL_MODE`
|
|
197
|
+
- `LANCEDB_OPENCODE_PRO_VECTOR_WEIGHT`
|
|
198
|
+
- `LANCEDB_OPENCODE_PRO_BM25_WEIGHT`
|
|
199
|
+
- `LANCEDB_OPENCODE_PRO_MIN_SCORE`
|
|
200
|
+
- `LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE`
|
|
201
|
+
- `LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS`
|
|
202
|
+
- `LANCEDB_OPENCODE_PRO_MAX_ENTRIES_PER_SCOPE`
|
|
203
|
+
|
|
204
|
+
## What It Provides
|
|
205
|
+
|
|
206
|
+
- Auto-capture of durable outcomes from completed assistant responses.
|
|
207
|
+
- Hybrid retrieval (vector + lexical) for future context injection.
|
|
208
|
+
- Project-scope memory isolation (`project:*` + optional `global`).
|
|
209
|
+
- Memory tools:
|
|
210
|
+
- `memory_search`
|
|
211
|
+
- `memory_delete`
|
|
212
|
+
- `memory_clear`
|
|
213
|
+
- `memory_stats`
|
|
214
|
+
- `memory_port_plan`
|
|
215
|
+
|
|
216
|
+
## Compose Port Planning (Cross-Project)
|
|
217
|
+
|
|
218
|
+
Use `memory_port_plan` before writing `docker-compose.yml` to avoid host port collisions across projects on the same machine.
|
|
219
|
+
|
|
220
|
+
- Reads existing reservations from `global` scope
|
|
221
|
+
- Probes live host port availability
|
|
222
|
+
- Returns non-conflicting assignments
|
|
223
|
+
- Optionally persists reservations for future projects (`persist=true`)
|
|
224
|
+
|
|
225
|
+
Example tool input:
|
|
226
|
+
|
|
227
|
+
```json
|
|
228
|
+
{
|
|
229
|
+
"project": "project-alpha",
|
|
230
|
+
"services": [
|
|
231
|
+
{ "name": "web", "containerPort": 3000, "preferredHostPort": 23000 },
|
|
232
|
+
{ "name": "api", "containerPort": 3001 }
|
|
233
|
+
],
|
|
234
|
+
"rangeStart": 23000,
|
|
235
|
+
"rangeEnd": 23999,
|
|
236
|
+
"persist": true
|
|
237
|
+
}
|
|
238
|
+
```
|
|
239
|
+
|
|
240
|
+
Example output (trimmed):
|
|
241
|
+
|
|
242
|
+
```json
|
|
243
|
+
{
|
|
244
|
+
"project": "project-alpha",
|
|
245
|
+
"persistRequested": true,
|
|
246
|
+
"persisted": 2,
|
|
247
|
+
"assignments": [
|
|
248
|
+
{
|
|
249
|
+
"project": "project-alpha",
|
|
250
|
+
"service": "web",
|
|
251
|
+
"hostPort": 23000,
|
|
252
|
+
"containerPort": 3000,
|
|
253
|
+
"protocol": "tcp"
|
|
254
|
+
},
|
|
255
|
+
{
|
|
256
|
+
"project": "project-alpha",
|
|
257
|
+
"service": "api",
|
|
258
|
+
"hostPort": 23001,
|
|
259
|
+
"containerPort": 3001,
|
|
260
|
+
"protocol": "tcp"
|
|
261
|
+
}
|
|
262
|
+
],
|
|
263
|
+
"warnings": []
|
|
264
|
+
}
|
|
265
|
+
```
|
|
266
|
+
|
|
267
|
+
Map assignments into `docker-compose.yml`:
|
|
268
|
+
|
|
269
|
+
```yaml
|
|
270
|
+
services:
|
|
271
|
+
web:
|
|
272
|
+
ports:
|
|
273
|
+
- "23000:3000"
|
|
274
|
+
api:
|
|
275
|
+
ports:
|
|
276
|
+
- "23001:3001"
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
Notes:
|
|
280
|
+
|
|
281
|
+
- This is best-effort conflict avoidance, not a hard distributed lock.
|
|
282
|
+
- For safer operation in automation, run planning immediately before `docker compose up`.
|
|
283
|
+
- Reservations are upserted by `project + service + protocol` when `persist=true`.
|
|
284
|
+
|
|
285
|
+
## Local Development
|
|
286
|
+
|
|
287
|
+
```bash
|
|
288
|
+
npm install
|
|
289
|
+
npm run typecheck
|
|
290
|
+
npm run build
|
|
291
|
+
```
|
|
292
|
+
|
|
293
|
+
## Validation Commands
|
|
294
|
+
|
|
295
|
+
The project provides layered validation workflows that can run locally or inside the Docker environment.
|
|
296
|
+
|
|
297
|
+
| Command | What it covers |
|
|
298
|
+
|---|---|
|
|
299
|
+
| `npm run test:foundation` | Write-read persistence, scope isolation, vector compatibility, timestamp ordering |
|
|
300
|
+
| `npm run test:regression` | Auto-capture extraction, search output shape, delete/clear safety, pruning |
|
|
301
|
+
| `npm run test:retrieval` | Recall@K and Robustness-δ@K against synthetic fixtures |
|
|
302
|
+
| `npm run benchmark:latency` | Search p50/p99, insert avg, list avg with hard-gate enforcement |
|
|
303
|
+
| `npm run verify` | Typecheck + build + foundation + regression + retrieval (quick release check) |
|
|
304
|
+
| `npm run verify:full` | All of the above + benchmark + `npm pack` (full release gate) |
|
|
305
|
+
|
|
306
|
+
Threshold policy and benchmark profiles are documented in `docs/benchmark-thresholds.md`.
|
|
307
|
+
Acceptance evidence mapping and archive/ship gate policy are documented in `docs/release-readiness.md`.
|
|
308
|
+
|
|
309
|
+
## Maintainer Release SOP
|
|
310
|
+
|
|
311
|
+
Use this flow when publishing a new version to npm.
|
|
312
|
+
|
|
313
|
+
1. Update `package.json` version and `CHANGELOG.md`.
|
|
314
|
+
2. Run the canonical release gate in Docker:
|
|
315
|
+
|
|
316
|
+
```bash
|
|
317
|
+
docker compose build --no-cache && docker compose up -d
|
|
318
|
+
docker compose exec app npm run release:check
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
3. Confirm npm authentication:
|
|
322
|
+
|
|
323
|
+
```bash
|
|
324
|
+
npm whoami
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
If not logged in yet:
|
|
328
|
+
|
|
329
|
+
```bash
|
|
330
|
+
npm login
|
|
331
|
+
```
|
|
332
|
+
|
|
333
|
+
4. Publish from the host:
|
|
334
|
+
|
|
335
|
+
```bash
|
|
336
|
+
npm publish
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
5. Verify the package is live:
|
|
340
|
+
|
|
341
|
+
```bash
|
|
342
|
+
npm view lancedb-opencode-pro name version
|
|
343
|
+
```
|
|
344
|
+
|
|
345
|
+
Notes:
|
|
346
|
+
|
|
347
|
+
- `prepublishOnly` runs `npm run verify:full`, so `npm publish` is blocked if the release gate fails.
|
|
348
|
+
- `publishConfig.access=public` keeps first publish public.
|
|
349
|
+
- For CI provenance attestation, publish from a supported CI provider with `npm publish --provenance`.
|
|
350
|
+
- If your npm account enforces 2FA, complete the browser or OTP challenge during publish.
|
|
351
|
+
|
|
352
|
+
### Troubleshooting: EACCES on `dist` or `dist-test`
|
|
353
|
+
|
|
354
|
+
If `npm publish` fails with errors like `TS5033 ... EACCES: permission denied` for files under `dist/` or `dist-test/`, some build artifacts were likely created by `root` inside Docker.
|
|
355
|
+
|
|
356
|
+
Fix ownership from the container, then re-run publish:
|
|
357
|
+
|
|
358
|
+
```bash
|
|
359
|
+
docker compose up -d
|
|
360
|
+
docker compose exec -T -u root app sh -lc 'chown -R 1000:1000 /workspace/dist /workspace/dist-test 2>/dev/null || true'
|
|
361
|
+
npm publish
|
|
362
|
+
```
|
|
363
|
+
|
|
364
|
+
You can validate ownership first:
|
|
365
|
+
|
|
366
|
+
```bash
|
|
367
|
+
ls -l dist dist-test/src 2>/dev/null
|
|
368
|
+
```
|
|
369
|
+
|
|
370
|
+
## Docker Test Environment
|
|
371
|
+
|
|
372
|
+
```bash
|
|
373
|
+
docker compose build --no-cache && docker compose up -d
|
|
374
|
+
docker compose exec app npm run typecheck
|
|
375
|
+
docker compose exec app npm run build
|
|
376
|
+
```
|
|
377
|
+
|
|
378
|
+
### Running validation inside Docker
|
|
379
|
+
|
|
380
|
+
```bash
|
|
381
|
+
docker compose build --no-cache && docker compose up -d
|
|
382
|
+
|
|
383
|
+
# Quick release check
|
|
384
|
+
docker compose exec app npm run verify
|
|
385
|
+
|
|
386
|
+
# Full release gate (includes benchmark + pack)
|
|
387
|
+
docker compose exec app npm run verify:full
|
|
388
|
+
|
|
389
|
+
# Individual workflows
|
|
390
|
+
docker compose exec app npm run test:foundation
|
|
391
|
+
docker compose exec app npm run test:regression
|
|
392
|
+
docker compose exec app npm run test:retrieval
|
|
393
|
+
docker compose exec app npm run benchmark:latency
|
|
394
|
+
```
|
|
395
|
+
|
|
396
|
+
### Operator verification
|
|
397
|
+
|
|
398
|
+
After running `npm run verify:full`, operators can inspect the following:
|
|
399
|
+
|
|
400
|
+
```bash
|
|
401
|
+
# Confirm the packaged build is installable
|
|
402
|
+
docker compose exec app ls -la lancedb-opencode-pro-*.tgz
|
|
403
|
+
|
|
404
|
+
# Confirm typecheck and build succeeded
|
|
405
|
+
docker compose exec app npm run typecheck
|
|
406
|
+
docker compose exec app npm run build
|
|
407
|
+
|
|
408
|
+
# Check resolved default storage path
|
|
409
|
+
docker compose exec app node -e "import('./dist/index.js').then(() => console.log('plugin loaded'))"
|
|
410
|
+
docker compose exec app sh -lc 'ls -la ~/.opencode/memory/lancedb 2>/dev/null || echo "No data yet (expected before first use)"'
|
|
411
|
+
```
|
|
412
|
+
|
|
413
|
+
## Long Memory Verification
|
|
414
|
+
|
|
415
|
+
Use this checklist when you want to verify that `lancedb-opencode-pro` provides durable long-term memory instead of in-process temporary state.
|
|
416
|
+
|
|
417
|
+
### 1. Start the Docker test environment
|
|
418
|
+
|
|
419
|
+
```bash
|
|
420
|
+
docker compose build --no-cache && docker compose up -d
|
|
421
|
+
```
|
|
422
|
+
|
|
423
|
+
### 2. Install dependencies and build inside the container
|
|
424
|
+
|
|
425
|
+
The E2E script loads `dist/index.js`, so build artifacts must exist first.
|
|
426
|
+
|
|
427
|
+
```bash
|
|
428
|
+
docker compose exec app npm install
|
|
429
|
+
docker compose exec app npm run build
|
|
430
|
+
```
|
|
431
|
+
|
|
432
|
+
### 3. Run the built-in end-to-end memory test
|
|
433
|
+
|
|
434
|
+
```bash
|
|
435
|
+
docker compose exec app npm run test:e2e
|
|
436
|
+
```
|
|
437
|
+
|
|
438
|
+
Expected success output:
|
|
439
|
+
|
|
440
|
+
```text
|
|
441
|
+
E2E PASS: auto-capture, search, delete safety, clear safety, and clear execution verified.
|
|
442
|
+
```
|
|
443
|
+
|
|
444
|
+
This verifies all of the following in one run:
|
|
445
|
+
|
|
446
|
+
- assistant output is buffered and auto-captured
|
|
447
|
+
- `session.idle` triggers durable persistence
|
|
448
|
+
- `memory_search` can retrieve the stored memory
|
|
449
|
+
- `memory_delete` requires `confirm=true`
|
|
450
|
+
- `memory_clear` requires `confirm=true`
|
|
451
|
+
|
|
452
|
+
### 4. Verify that LanceDB files were written to disk
|
|
453
|
+
|
|
454
|
+
The E2E script uses `/tmp/opencode-memory-e2e` as its test database path.
|
|
455
|
+
|
|
456
|
+
```bash
|
|
457
|
+
docker compose exec app ls -la /tmp/opencode-memory-e2e
|
|
458
|
+
```
|
|
459
|
+
|
|
460
|
+
If files appear in that directory after the E2E run, memory was written to disk instead of only being kept in process memory.
|
|
461
|
+
|
|
462
|
+
### 5. Verify the real default storage path used by OpenCode integration
|
|
463
|
+
|
|
464
|
+
When running through the normal plugin config, the default durable storage path is:
|
|
465
|
+
|
|
466
|
+
```text
|
|
467
|
+
~/.opencode/memory/lancedb
|
|
468
|
+
```
|
|
469
|
+
|
|
470
|
+
Check it inside the container with:
|
|
471
|
+
|
|
472
|
+
```bash
|
|
473
|
+
docker compose exec app sh -lc 'ls -la ~/.opencode/memory/lancedb'
|
|
474
|
+
```
|
|
475
|
+
|
|
476
|
+
### 6. Stronger proof: verify retrieval still works after restart
|
|
477
|
+
|
|
478
|
+
Long memory is only convincing if retrieval still works after the runtime is restarted.
|
|
479
|
+
|
|
480
|
+
```bash
|
|
481
|
+
docker compose restart app
|
|
482
|
+
docker compose exec app npm run test:e2e
|
|
483
|
+
docker compose exec app ls -la /tmp/opencode-memory-e2e
|
|
484
|
+
```
|
|
485
|
+
|
|
486
|
+
If the search step still succeeds after restart and the database files remain present, that is strong evidence that the memory is durable.
|
|
487
|
+
|
|
488
|
+
### Pass criteria
|
|
489
|
+
|
|
490
|
+
Treat the feature as verified only when all of these are true:
|
|
491
|
+
|
|
492
|
+
- `docker compose exec app npm run test:e2e` passes
|
|
493
|
+
- `/tmp/opencode-memory-e2e` contains LanceDB files after the run
|
|
494
|
+
- the memory retrieval step still succeeds after container restart
|
|
495
|
+
- the configured OpenCode storage path exists when running real plugin integration
|
|
496
|
+
|
|
497
|
+
## Notes
|
|
498
|
+
|
|
499
|
+
- Default storage path: `~/.opencode/memory/lancedb`
|
|
500
|
+
- Embedding backend in v1: `ollama`
|
|
501
|
+
- The provider keeps schema metadata (`schemaVersion`, `embeddingModel`, `vectorDim`) to guard against unsafe vector mixing.
|
package/dist/config.d.ts
ADDED
package/dist/config.js
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { clamp, expandHomePath, parseJsonObject, toBoolean, toNumber } from "./utils.js";
|
|
4
|
+
const DEFAULT_DB_PATH = "~/.opencode/memory/lancedb";
|
|
5
|
+
const DEFAULT_OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
6
|
+
const SIDECAR_FILE = "lancedb-opencode-pro.json";
|
|
7
|
+
export function resolveMemoryConfig(config, worktree) {
|
|
8
|
+
const legacyRaw = (config?.memory ?? {});
|
|
9
|
+
const sidecarRaw = loadSidecarConfig(worktree);
|
|
10
|
+
const raw = mergeMemoryConfig(legacyRaw, sidecarRaw);
|
|
11
|
+
const embeddingRaw = (raw.embedding ?? {});
|
|
12
|
+
const retrievalRaw = (raw.retrieval ?? {});
|
|
13
|
+
const modeRaw = firstString(process.env.LANCEDB_OPENCODE_PRO_RETRIEVAL_MODE, retrievalRaw.mode) ?? "hybrid";
|
|
14
|
+
const mode = modeRaw === "vector" ? "vector" : "hybrid";
|
|
15
|
+
const provider = firstString(process.env.LANCEDB_OPENCODE_PRO_PROVIDER, raw.provider) ?? "lancedb-opencode-pro";
|
|
16
|
+
const dbPath = expandHomePath(firstString(process.env.LANCEDB_OPENCODE_PRO_DB_PATH, raw.dbPath) ?? DEFAULT_DB_PATH);
|
|
17
|
+
const vectorWeight = clamp(toNumber(process.env.LANCEDB_OPENCODE_PRO_VECTOR_WEIGHT ?? retrievalRaw.vectorWeight, 0.7), 0, 1);
|
|
18
|
+
const bm25Weight = clamp(toNumber(process.env.LANCEDB_OPENCODE_PRO_BM25_WEIGHT ?? retrievalRaw.bm25Weight, 0.3), 0, 1);
|
|
19
|
+
const weightSum = vectorWeight + bm25Weight;
|
|
20
|
+
const normalizedVectorWeight = weightSum > 0 ? vectorWeight / weightSum : 0.7;
|
|
21
|
+
const normalizedBm25Weight = weightSum > 0 ? bm25Weight / weightSum : 0.3;
|
|
22
|
+
return {
|
|
23
|
+
provider,
|
|
24
|
+
dbPath,
|
|
25
|
+
embedding: {
|
|
26
|
+
provider: "ollama",
|
|
27
|
+
model: firstString(process.env.LANCEDB_OPENCODE_PRO_EMBEDDING_MODEL, embeddingRaw.model) ?? "nomic-embed-text",
|
|
28
|
+
baseUrl: firstString(process.env.LANCEDB_OPENCODE_PRO_OLLAMA_BASE_URL, embeddingRaw.baseUrl) ?? DEFAULT_OLLAMA_BASE_URL,
|
|
29
|
+
timeoutMs: Math.max(500, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_EMBEDDING_TIMEOUT_MS ?? embeddingRaw.timeoutMs, 6000))),
|
|
30
|
+
},
|
|
31
|
+
retrieval: {
|
|
32
|
+
mode,
|
|
33
|
+
vectorWeight: normalizedVectorWeight,
|
|
34
|
+
bm25Weight: normalizedBm25Weight,
|
|
35
|
+
minScore: clamp(toNumber(process.env.LANCEDB_OPENCODE_PRO_MIN_SCORE ?? retrievalRaw.minScore, 0.2), 0, 1),
|
|
36
|
+
},
|
|
37
|
+
includeGlobalScope: toBoolean(process.env.LANCEDB_OPENCODE_PRO_INCLUDE_GLOBAL_SCOPE ?? raw.includeGlobalScope, true),
|
|
38
|
+
minCaptureChars: Math.max(30, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_MIN_CAPTURE_CHARS ?? raw.minCaptureChars, 80))),
|
|
39
|
+
maxEntriesPerScope: Math.max(50, Math.floor(toNumber(process.env.LANCEDB_OPENCODE_PRO_MAX_ENTRIES_PER_SCOPE ?? raw.maxEntriesPerScope, 3000))),
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
function loadSidecarConfig(worktree) {
|
|
43
|
+
const configPath = firstString(process.env.LANCEDB_OPENCODE_PRO_CONFIG_PATH);
|
|
44
|
+
const candidates = [
|
|
45
|
+
join(expandHomePath("~/.opencode"), SIDECAR_FILE),
|
|
46
|
+
join(expandHomePath("~/.config/opencode"), SIDECAR_FILE),
|
|
47
|
+
worktree ? join(worktree, ".opencode", SIDECAR_FILE) : undefined,
|
|
48
|
+
configPath,
|
|
49
|
+
];
|
|
50
|
+
let merged = {};
|
|
51
|
+
for (const candidate of candidates) {
|
|
52
|
+
if (!candidate)
|
|
53
|
+
continue;
|
|
54
|
+
const parsed = readConfigFile(candidate);
|
|
55
|
+
if (parsed) {
|
|
56
|
+
merged = mergeMemoryConfig(merged, parsed);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return merged;
|
|
60
|
+
}
|
|
61
|
+
function readConfigFile(filePath) {
|
|
62
|
+
const expanded = expandHomePath(filePath);
|
|
63
|
+
if (!existsSync(expanded))
|
|
64
|
+
return null;
|
|
65
|
+
try {
|
|
66
|
+
return parseJsonObject(readFileSync(expanded, "utf8"), {});
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
return null;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
function mergeMemoryConfig(base, override) {
|
|
73
|
+
return {
|
|
74
|
+
...base,
|
|
75
|
+
...override,
|
|
76
|
+
embedding: {
|
|
77
|
+
...(base.embedding ?? {}),
|
|
78
|
+
...(override.embedding ?? {}),
|
|
79
|
+
},
|
|
80
|
+
retrieval: {
|
|
81
|
+
...(base.retrieval ?? {}),
|
|
82
|
+
...(override.retrieval ?? {}),
|
|
83
|
+
},
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
function firstString(...values) {
|
|
87
|
+
for (const value of values) {
|
|
88
|
+
if (typeof value === "string" && value.trim().length > 0) {
|
|
89
|
+
return value.trim();
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return undefined;
|
|
93
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { EmbeddingConfig } from "./types.js";
|
|
2
|
+
export interface Embedder {
|
|
3
|
+
readonly model: string;
|
|
4
|
+
embed(text: string): Promise<number[]>;
|
|
5
|
+
dim(): Promise<number>;
|
|
6
|
+
}
|
|
7
|
+
export declare class OllamaEmbedder implements Embedder {
|
|
8
|
+
private readonly config;
|
|
9
|
+
readonly model: string;
|
|
10
|
+
private cachedDim;
|
|
11
|
+
constructor(config: EmbeddingConfig);
|
|
12
|
+
embed(text: string): Promise<number[]>;
|
|
13
|
+
dim(): Promise<number>;
|
|
14
|
+
}
|
package/dist/embedder.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
const KNOWN_MODEL_DIMS = {
|
|
2
|
+
"nomic-embed-text": 768,
|
|
3
|
+
"mxbai-embed-large": 1024,
|
|
4
|
+
"all-minilm": 384,
|
|
5
|
+
"snowflake-arctic-embed": 1024,
|
|
6
|
+
};
|
|
7
|
+
function fallbackDim(model) {
|
|
8
|
+
const normalized = model.toLowerCase().replace(/:.*$/, "");
|
|
9
|
+
for (const [prefix, dim] of Object.entries(KNOWN_MODEL_DIMS)) {
|
|
10
|
+
if (normalized === prefix || normalized.startsWith(`${prefix}:`))
|
|
11
|
+
return dim;
|
|
12
|
+
}
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
export class OllamaEmbedder {
|
|
16
|
+
config;
|
|
17
|
+
model;
|
|
18
|
+
cachedDim = null;
|
|
19
|
+
constructor(config) {
|
|
20
|
+
this.config = config;
|
|
21
|
+
this.model = config.model;
|
|
22
|
+
}
|
|
23
|
+
async embed(text) {
|
|
24
|
+
const endpoint = `${this.config.baseUrl ?? "http://127.0.0.1:11434"}/api/embeddings`;
|
|
25
|
+
const controller = new AbortController();
|
|
26
|
+
const timeout = setTimeout(() => controller.abort(), this.config.timeoutMs ?? 6000);
|
|
27
|
+
try {
|
|
28
|
+
const response = await fetch(endpoint, {
|
|
29
|
+
method: "POST",
|
|
30
|
+
headers: {
|
|
31
|
+
"content-type": "application/json",
|
|
32
|
+
},
|
|
33
|
+
body: JSON.stringify({
|
|
34
|
+
model: this.config.model,
|
|
35
|
+
prompt: text,
|
|
36
|
+
}),
|
|
37
|
+
signal: controller.signal,
|
|
38
|
+
});
|
|
39
|
+
if (!response.ok) {
|
|
40
|
+
throw new Error(`Ollama embedding request failed: HTTP ${response.status}`);
|
|
41
|
+
}
|
|
42
|
+
const data = (await response.json());
|
|
43
|
+
if (!Array.isArray(data.embedding) || data.embedding.length === 0) {
|
|
44
|
+
throw new Error("Ollama embedding response missing embedding vector");
|
|
45
|
+
}
|
|
46
|
+
if (this.cachedDim === null) {
|
|
47
|
+
this.cachedDim = data.embedding.length;
|
|
48
|
+
}
|
|
49
|
+
return data.embedding;
|
|
50
|
+
}
|
|
51
|
+
finally {
|
|
52
|
+
clearTimeout(timeout);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
async dim() {
|
|
56
|
+
if (this.cachedDim !== null)
|
|
57
|
+
return this.cachedDim;
|
|
58
|
+
try {
|
|
59
|
+
const probe = await this.embed("dimension probe");
|
|
60
|
+
this.cachedDim = probe.length;
|
|
61
|
+
return this.cachedDim;
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
const fb = fallbackDim(this.model);
|
|
65
|
+
if (fb !== null) {
|
|
66
|
+
console.warn(`[lancedb-opencode-pro] Ollama unreachable, using fallback dim ${fb} for model "${this.model}"`);
|
|
67
|
+
return fb;
|
|
68
|
+
}
|
|
69
|
+
throw new Error(`Ollama unreachable and no known fallback dimension for model "${this.model}"`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|