openrecall 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +145 -0
- package/package.json +51 -0
- package/src/agent.ts +268 -0
- package/src/client.ts +16 -0
- package/src/config.ts +79 -0
- package/src/db.ts +93 -0
- package/src/extract.ts +142 -0
- package/src/index.ts +262 -0
- package/src/maintenance.ts +134 -0
- package/src/memory.ts +604 -0
- package/src/migrations/001_initial.ts +73 -0
- package/src/migrations/002_tags.ts +20 -0
- package/src/migrations/003_decay.ts +15 -0
- package/src/migrations/004_links.ts +23 -0
- package/src/migrations/005_metadata.ts +17 -0
- package/src/migrations/index.ts +16 -0
- package/src/tools.ts +658 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Artur Sidorenko
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# OpenRecall
|
|
2
|
+
|
|
3
|
+
> Cross-session memory plugin for [OpenCode](https://github.com/opencode-ai/opencode) with full-text search, tagging, and auto-recall
|
|
4
|
+
|
|
5
|
+
OpenRecall gives your OpenCode sessions persistent memory. It stores important findings, decisions, and code patterns and makes them searchable across sessions using SQLite FTS5 with BM25 ranking.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- **12 LLM tools** — Store, search, update, delete, list, tag, link, refresh, export, import, cleanup, and stats
|
|
10
|
+
- **Auto-recall** — Automatically injects relevant memories into the system prompt when a session starts
|
|
11
|
+
- **Auto-extraction** — Detects preferences, bug fixes, and conventions from tool outputs
|
|
12
|
+
- **Full-text search** — FTS5 with BM25 ranking, stop word removal, and query sanitization
|
|
13
|
+
- **Relevance decay** — Older, unaccessed memories naturally rank lower (90-day half-life)
|
|
14
|
+
- **Deduplication** — Prevents storing duplicate or near-duplicate memories
|
|
15
|
+
- **Tagging** — Organize memories with tags, search by tag
|
|
16
|
+
- **Memory linking** — Create relationships between memories (related, supersedes, contradicts, extends)
|
|
17
|
+
- **Global memories** — Store knowledge that applies across all projects
|
|
18
|
+
- **Import/export** — Back up and migrate memories as JSON
|
|
19
|
+
- **Database maintenance** — Auto-optimization, cleanup tools, memory limits
|
|
20
|
+
|
|
21
|
+
## Installation
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
bun add openrecall
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Add to your OpenCode config (`opencode.json`):
|
|
28
|
+
|
|
29
|
+
```json
|
|
30
|
+
{
|
|
31
|
+
"plugins": ["openrecall"]
|
|
32
|
+
}
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Configuration
|
|
36
|
+
|
|
37
|
+
Configure via `opencode.json` under `plugins.openrecall`:
|
|
38
|
+
|
|
39
|
+
```json
|
|
40
|
+
{
|
|
41
|
+
"plugins": {
|
|
42
|
+
"openrecall": {
|
|
43
|
+
"autoRecall": true,
|
|
44
|
+
"autoExtract": true,
|
|
45
|
+
"searchLimit": 10,
|
|
46
|
+
"maxMemories": 0,
|
|
47
|
+
"globalMemories": false
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
| Option | Type | Default | Description |
|
|
54
|
+
|--------|------|---------|-------------|
|
|
55
|
+
| `dbPath` | string | `~/.local/share/opencode/openrecall.db` | Database file path |
|
|
56
|
+
| `autoRecall` | boolean | `true` | Auto-inject relevant memories on session start |
|
|
57
|
+
| `autoExtract` | boolean | `true` | Auto-extract patterns from tool outputs |
|
|
58
|
+
| `searchLimit` | number | `10` | Default max search results |
|
|
59
|
+
| `maxMemories` | number | `0` | Memory limit (0 = unlimited) |
|
|
60
|
+
| `globalMemories` | boolean | `false` | Enable cross-project global memories |
|
|
61
|
+
|
|
62
|
+
## Tools
|
|
63
|
+
|
|
64
|
+
### Core
|
|
65
|
+
|
|
66
|
+
| Tool | Description |
|
|
67
|
+
|------|-------------|
|
|
68
|
+
| `memory_store` | Store a memory with content, category, tags, and global flag |
|
|
69
|
+
| `memory_search` | Full-text search with BM25 ranking and relevance decay |
|
|
70
|
+
| `memory_update` | Update content, category, or source of an existing memory |
|
|
71
|
+
| `memory_delete` | Delete a memory by ID |
|
|
72
|
+
| `memory_list` | List recent memories with category/scope filters |
|
|
73
|
+
| `memory_stats` | Show total count, category breakdown, and DB size |
|
|
74
|
+
|
|
75
|
+
### Organization
|
|
76
|
+
|
|
77
|
+
| Tool | Description |
|
|
78
|
+
|------|-------------|
|
|
79
|
+
| `memory_tag` | Add, remove, list tags; list all tags; search by tag |
|
|
80
|
+
| `memory_link` | Create/remove relationships between memories |
|
|
81
|
+
| `memory_refresh` | Boost a memory's relevance to prevent decay |
|
|
82
|
+
|
|
83
|
+
### Infrastructure
|
|
84
|
+
|
|
85
|
+
| Tool | Description |
|
|
86
|
+
|------|-------------|
|
|
87
|
+
| `memory_export` | Export memories to versioned JSON |
|
|
88
|
+
| `memory_import` | Import memories from JSON with conflict handling |
|
|
89
|
+
| `memory_cleanup` | FTS optimization, purge old memories, vacuum DB |
|
|
90
|
+
|
|
91
|
+
## How It Works
|
|
92
|
+
|
|
93
|
+
### Search
|
|
94
|
+
|
|
95
|
+
Memories are indexed in a SQLite FTS5 virtual table. Queries are sanitized (special characters removed, stop words stripped) and matched using BM25 ranking. Results are then re-ranked with a time decay factor:
|
|
96
|
+
|
|
97
|
+
```
|
|
98
|
+
final_score = bm25_score * (1 / (1 + age_days * 0.0077))
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
Frequently accessed memories get an additional boost based on `access_count`.
|
|
102
|
+
|
|
103
|
+
### Auto-recall
|
|
104
|
+
|
|
105
|
+
On the first message in a new session, OpenRecall searches for memories relevant to the user's message and injects the top results into the system prompt. Falls back to showing recent memories if no search matches.
|
|
106
|
+
|
|
107
|
+
### Auto-extraction
|
|
108
|
+
|
|
109
|
+
The `tool.execute.after` hook monitors tool outputs for patterns:
|
|
110
|
+
- **Preferences**: "always use...", "never do...", "I prefer..."
|
|
111
|
+
- **Bug fixes**: "fixed by...", "the issue was..."
|
|
112
|
+
- **Conventions**: "project uses...", "convention is..."
|
|
113
|
+
|
|
114
|
+
Extracted memories are rate-limited (max 20 per session) and deduplicated.
|
|
115
|
+
|
|
116
|
+
### Deduplication
|
|
117
|
+
|
|
118
|
+
Before storing, content is checked against existing memories:
|
|
119
|
+
- **Exact match** (case-insensitive, whitespace-normalized): returns existing memory
|
|
120
|
+
- **Near-duplicate** (Jaccard similarity > 0.8): updates existing memory with newer content
|
|
121
|
+
- Use `force: true` to bypass deduplication
|
|
122
|
+
|
|
123
|
+
## Data Storage
|
|
124
|
+
|
|
125
|
+
- Database: `~/.local/share/opencode/openrecall.db` (configurable)
|
|
126
|
+
- All data is stored locally — nothing is sent externally
|
|
127
|
+
- Uses SQLite WAL mode for concurrent read performance
|
|
128
|
+
- Auto-maintenance runs every 7 days (FTS optimization, limit enforcement)
|
|
129
|
+
|
|
130
|
+
## Development
|
|
131
|
+
|
|
132
|
+
```bash
|
|
133
|
+
# Install dependencies
|
|
134
|
+
bun install
|
|
135
|
+
|
|
136
|
+
# Run tests
|
|
137
|
+
bun test
|
|
138
|
+
|
|
139
|
+
# Type check
|
|
140
|
+
bun run typecheck
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
## License
|
|
144
|
+
|
|
145
|
+
MIT
|
package/package.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "openrecall",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Cross-session memory plugin for OpenCode with full-text search, tagging, and auto-recall",
|
|
5
|
+
"module": "src/index.ts",
|
|
6
|
+
"main": "src/index.ts",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": "./src/index.ts"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"src/**/*.ts",
|
|
13
|
+
"README.md",
|
|
14
|
+
"LICENSE"
|
|
15
|
+
],
|
|
16
|
+
"scripts": {
|
|
17
|
+
"test": "bun test",
|
|
18
|
+
"typecheck": "tsc --noEmit",
|
|
19
|
+
"prepublishOnly": "bun test && bun run typecheck"
|
|
20
|
+
},
|
|
21
|
+
"keywords": [
|
|
22
|
+
"opencode",
|
|
23
|
+
"plugin",
|
|
24
|
+
"memory",
|
|
25
|
+
"cross-session",
|
|
26
|
+
"semantic-search",
|
|
27
|
+
"fts5",
|
|
28
|
+
"sqlite",
|
|
29
|
+
"ai",
|
|
30
|
+
"llm"
|
|
31
|
+
],
|
|
32
|
+
"repository": {
|
|
33
|
+
"type": "git",
|
|
34
|
+
"url": "https://github.com/ASidorenkoCode/openrecall.git"
|
|
35
|
+
},
|
|
36
|
+
"homepage": "https://github.com/ASidorenkoCode/openrecall",
|
|
37
|
+
"bugs": {
|
|
38
|
+
"url": "https://github.com/ASidorenkoCode/openrecall/issues"
|
|
39
|
+
},
|
|
40
|
+
"license": "MIT",
|
|
41
|
+
"author": "ASidorenkoCode",
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"@opencode-ai/plugin": "^1.2.2"
|
|
44
|
+
},
|
|
45
|
+
"devDependencies": {
|
|
46
|
+
"@types/bun": "latest"
|
|
47
|
+
},
|
|
48
|
+
"peerDependencies": {
|
|
49
|
+
"typescript": "^5"
|
|
50
|
+
}
|
|
51
|
+
}
|
package/src/agent.ts
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import { storeMemory } from "./memory"
|
|
2
|
+
import { isDbAvailable } from "./db"
|
|
3
|
+
import * as fs from "fs"
|
|
4
|
+
import * as path from "path"
|
|
5
|
+
|
|
6
|
+
// Key project files to scan on startup
|
|
7
|
+
const KEY_FILES = [
|
|
8
|
+
"README.md",
|
|
9
|
+
"CONTRIBUTING.md",
|
|
10
|
+
"CHANGELOG.md",
|
|
11
|
+
"LICENSE",
|
|
12
|
+
"package.json",
|
|
13
|
+
"tsconfig.json",
|
|
14
|
+
"opencode.json",
|
|
15
|
+
".claude/CLAUDE.md",
|
|
16
|
+
"CLAUDE.md",
|
|
17
|
+
".cursorrules",
|
|
18
|
+
".github/CODEOWNERS",
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
// Max content length per memory chunk
|
|
22
|
+
const MAX_CHUNK = 400
|
|
23
|
+
|
|
24
|
+
// Track which files have been scanned this process lifetime
|
|
25
|
+
const scannedFiles = new Set<string>()
|
|
26
|
+
|
|
27
|
+
interface ExtractionCounter {
|
|
28
|
+
count: number
|
|
29
|
+
extracting: boolean
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const sessionCounters = new Map<string, ExtractionCounter>()
|
|
33
|
+
|
|
34
|
+
export function getCounter(sessionID: string): ExtractionCounter {
|
|
35
|
+
let counter = sessionCounters.get(sessionID)
|
|
36
|
+
if (!counter) {
|
|
37
|
+
counter = { count: 0, extracting: false }
|
|
38
|
+
sessionCounters.set(sessionID, counter)
|
|
39
|
+
}
|
|
40
|
+
return counter
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export function incrementCounter(sessionID: string): number {
|
|
44
|
+
const counter = getCounter(sessionID)
|
|
45
|
+
counter.count++
|
|
46
|
+
return counter.count
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export function shouldTrigger(sessionID: string, interval: number): boolean {
|
|
50
|
+
const counter = getCounter(sessionID)
|
|
51
|
+
return counter.count > 0 && counter.count % interval === 0 && !counter.extracting
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function clearCounter(sessionID: string): void {
|
|
55
|
+
sessionCounters.delete(sessionID)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Scan key project files on startup and store their content as memories.
|
|
60
|
+
* Only stores files that haven't been scanned yet or have been modified since last scan.
|
|
61
|
+
*/
|
|
62
|
+
export function scanProjectFiles(directory: string, projectId: string): void {
|
|
63
|
+
if (!isDbAvailable()) return
|
|
64
|
+
|
|
65
|
+
let stored = 0
|
|
66
|
+
|
|
67
|
+
for (const relPath of KEY_FILES) {
|
|
68
|
+
try {
|
|
69
|
+
const fullPath = path.join(directory, relPath)
|
|
70
|
+
if (!fs.existsSync(fullPath)) continue
|
|
71
|
+
|
|
72
|
+
const stat = fs.statSync(fullPath)
|
|
73
|
+
if (!stat.isFile()) continue
|
|
74
|
+
// Skip large files (> 50KB)
|
|
75
|
+
if (stat.size > 50 * 1024) continue
|
|
76
|
+
|
|
77
|
+
// Use mtime as cache key to avoid re-scanning unchanged files
|
|
78
|
+
const cacheKey = `${fullPath}:${stat.mtimeMs}`
|
|
79
|
+
if (scannedFiles.has(cacheKey)) continue
|
|
80
|
+
scannedFiles.add(cacheKey)
|
|
81
|
+
|
|
82
|
+
const content = fs.readFileSync(fullPath, "utf-8")
|
|
83
|
+
if (!content.trim()) continue
|
|
84
|
+
|
|
85
|
+
// For package.json, extract key info
|
|
86
|
+
if (relPath === "package.json") {
|
|
87
|
+
storePackageJsonMemory(content, projectId, relPath)
|
|
88
|
+
stored++
|
|
89
|
+
continue
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Store file content in chunks
|
|
93
|
+
stored += storeFileChunks(content, projectId, relPath)
|
|
94
|
+
} catch (e) {
|
|
95
|
+
// Silent fail per file — don't break startup
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Also scan for recently modified files via git
|
|
100
|
+
try {
|
|
101
|
+
stored += scanRecentGitFiles(directory, projectId)
|
|
102
|
+
} catch {
|
|
103
|
+
// Git might not be available
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if (stored > 0) {
|
|
107
|
+
console.error(`[OpenRecall] Startup scan: stored ${stored} file memories`)
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function storePackageJsonMemory(content: string, projectId: string, filePath: string): void {
|
|
112
|
+
try {
|
|
113
|
+
const pkg = JSON.parse(content)
|
|
114
|
+
const parts: string[] = []
|
|
115
|
+
|
|
116
|
+
if (pkg.name) parts.push(`name: ${pkg.name}`)
|
|
117
|
+
if (pkg.description) parts.push(`description: ${pkg.description}`)
|
|
118
|
+
if (pkg.scripts) {
|
|
119
|
+
const scripts = Object.keys(pkg.scripts).join(", ")
|
|
120
|
+
parts.push(`scripts: ${scripts}`)
|
|
121
|
+
}
|
|
122
|
+
if (pkg.dependencies) {
|
|
123
|
+
const deps = Object.keys(pkg.dependencies).slice(0, 15).join(", ")
|
|
124
|
+
parts.push(`dependencies: ${deps}`)
|
|
125
|
+
}
|
|
126
|
+
if (pkg.devDependencies) {
|
|
127
|
+
const devDeps = Object.keys(pkg.devDependencies).slice(0, 10).join(", ")
|
|
128
|
+
parts.push(`devDependencies: ${devDeps}`)
|
|
129
|
+
}
|
|
130
|
+
if (pkg.type) parts.push(`type: ${pkg.type}`)
|
|
131
|
+
if (pkg.main || pkg.module) parts.push(`entry: ${pkg.main || pkg.module}`)
|
|
132
|
+
|
|
133
|
+
const summary = parts.join(" | ")
|
|
134
|
+
if (summary.length > 10) {
|
|
135
|
+
storeMemory({
|
|
136
|
+
content: summary.slice(0, MAX_CHUNK),
|
|
137
|
+
category: "discovery",
|
|
138
|
+
projectId,
|
|
139
|
+
source: `file-scan: ${filePath}`,
|
|
140
|
+
tags: ["project-config", "package.json"],
|
|
141
|
+
})
|
|
142
|
+
}
|
|
143
|
+
} catch {
|
|
144
|
+
// Invalid JSON, skip
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
function storeFileChunks(content: string, projectId: string, filePath: string): number {
|
|
149
|
+
// Split by sections (headers in markdown, or double newlines)
|
|
150
|
+
const sections = content.split(/\n#{1,3}\s+|\n\n/).filter((s) => s.trim().length > 20)
|
|
151
|
+
let stored = 0
|
|
152
|
+
|
|
153
|
+
// Store up to 5 chunks per file to avoid flooding
|
|
154
|
+
const maxChunks = 5
|
|
155
|
+
for (let i = 0; i < Math.min(sections.length, maxChunks); i++) {
|
|
156
|
+
const chunk = sections[i]!.trim()
|
|
157
|
+
if (chunk.length < 20) continue
|
|
158
|
+
|
|
159
|
+
const truncated = chunk.length > MAX_CHUNK ? chunk.slice(0, MAX_CHUNK) + "..." : chunk
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
storeMemory({
|
|
163
|
+
content: truncated,
|
|
164
|
+
category: "discovery",
|
|
165
|
+
projectId,
|
|
166
|
+
source: `file-scan: ${filePath} (section ${i + 1})`,
|
|
167
|
+
tags: ["file-content", path.basename(filePath).toLowerCase()],
|
|
168
|
+
})
|
|
169
|
+
stored++
|
|
170
|
+
} catch {
|
|
171
|
+
// Dedup or other error, skip
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return stored
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
function scanRecentGitFiles(directory: string, projectId: string): number {
|
|
179
|
+
// Use git to find recently modified tracked files (last 7 days)
|
|
180
|
+
const { execSync } = require("child_process")
|
|
181
|
+
let stored = 0
|
|
182
|
+
|
|
183
|
+
try {
|
|
184
|
+
const result = execSync("git log --diff-filter=M --name-only --pretty=format: --since=7.days.ago HEAD", {
|
|
185
|
+
cwd: directory,
|
|
186
|
+
encoding: "utf-8",
|
|
187
|
+
timeout: 5000,
|
|
188
|
+
})
|
|
189
|
+
|
|
190
|
+
const files = [...new Set(
|
|
191
|
+
result.split("\n").filter((f: string) => f.trim().length > 0),
|
|
192
|
+
)].slice(0, 20)
|
|
193
|
+
|
|
194
|
+
if (files.length > 0) {
|
|
195
|
+
const summary = `Recently modified files (last 7 days): ${files.join(", ")}`
|
|
196
|
+
storeMemory({
|
|
197
|
+
content: summary.slice(0, MAX_CHUNK),
|
|
198
|
+
category: "discovery",
|
|
199
|
+
projectId,
|
|
200
|
+
source: "file-scan: git-recent",
|
|
201
|
+
tags: ["git", "recent-changes"],
|
|
202
|
+
})
|
|
203
|
+
stored++
|
|
204
|
+
}
|
|
205
|
+
} catch {
|
|
206
|
+
// Git not available or failed
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return stored
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Extract file knowledge from tool execution.
|
|
214
|
+
* Called from tool.execute.after to track what files the LLM reads/edits.
|
|
215
|
+
*/
|
|
216
|
+
export function extractFileKnowledge(
|
|
217
|
+
toolName: string,
|
|
218
|
+
args: any,
|
|
219
|
+
output: string,
|
|
220
|
+
sessionId: string,
|
|
221
|
+
projectId: string,
|
|
222
|
+
): void {
|
|
223
|
+
if (!isDbAvailable()) return
|
|
224
|
+
|
|
225
|
+
// Only track file-related tools
|
|
226
|
+
const fileTools = new Set(["read", "edit", "write", "glob", "grep"])
|
|
227
|
+
if (!fileTools.has(toolName)) return
|
|
228
|
+
|
|
229
|
+
try {
|
|
230
|
+
const filePath = args?.filePath || args?.file_path || args?.path
|
|
231
|
+
if (!filePath || typeof filePath !== "string") return
|
|
232
|
+
|
|
233
|
+
if (toolName === "read") {
|
|
234
|
+
// Store file content summary from read output
|
|
235
|
+
if (!output || output.length < 30) return
|
|
236
|
+
|
|
237
|
+
// Take the first ~400 chars as a content preview
|
|
238
|
+
const preview = output.slice(0, MAX_CHUNK)
|
|
239
|
+
const basename = path.basename(filePath)
|
|
240
|
+
|
|
241
|
+
storeMemory({
|
|
242
|
+
content: `File ${basename}: ${preview}`,
|
|
243
|
+
category: "discovery",
|
|
244
|
+
projectId,
|
|
245
|
+
sessionId,
|
|
246
|
+
source: `tool-read: ${filePath}`,
|
|
247
|
+
tags: ["file-content", basename.toLowerCase()],
|
|
248
|
+
})
|
|
249
|
+
} else if (toolName === "edit" || toolName === "write") {
|
|
250
|
+
// Store what was edited/written
|
|
251
|
+
const basename = path.basename(filePath)
|
|
252
|
+
const editInfo = args?.old_string
|
|
253
|
+
? `Edited ${basename}: replaced content in ${filePath}`
|
|
254
|
+
: `Wrote to ${basename}: ${filePath}`
|
|
255
|
+
|
|
256
|
+
storeMemory({
|
|
257
|
+
content: editInfo.slice(0, MAX_CHUNK),
|
|
258
|
+
category: "discovery",
|
|
259
|
+
projectId,
|
|
260
|
+
sessionId,
|
|
261
|
+
source: `tool-${toolName}: ${filePath}`,
|
|
262
|
+
tags: ["file-edit", basename.toLowerCase()],
|
|
263
|
+
})
|
|
264
|
+
}
|
|
265
|
+
} catch {
|
|
266
|
+
// Silent fail
|
|
267
|
+
}
|
|
268
|
+
}
|
package/src/client.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { PluginInput } from "@opencode-ai/plugin"
|
|
2
|
+
|
|
3
|
+
type SdkClient = PluginInput["client"]
|
|
4
|
+
|
|
5
|
+
let sdkClient: SdkClient | null = null
|
|
6
|
+
|
|
7
|
+
export function initClient(client: SdkClient) {
|
|
8
|
+
sdkClient = client
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function getClient(): SdkClient {
|
|
12
|
+
if (!sdkClient) {
|
|
13
|
+
throw new Error("OpenRecall: SDK client not initialized")
|
|
14
|
+
}
|
|
15
|
+
return sdkClient
|
|
16
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { join } from "path"
|
|
2
|
+
import { homedir } from "os"
|
|
3
|
+
|
|
4
|
+
export const DEFAULT_CATEGORIES = [
|
|
5
|
+
"decision",
|
|
6
|
+
"pattern",
|
|
7
|
+
"debugging",
|
|
8
|
+
"preference",
|
|
9
|
+
"convention",
|
|
10
|
+
"discovery",
|
|
11
|
+
"general",
|
|
12
|
+
] as const
|
|
13
|
+
|
|
14
|
+
export type Category = (typeof DEFAULT_CATEGORIES)[number]
|
|
15
|
+
|
|
16
|
+
export interface OpenRecallConfig {
|
|
17
|
+
dbPath: string
|
|
18
|
+
categories: string[]
|
|
19
|
+
maxMemories: number
|
|
20
|
+
autoRecall: boolean
|
|
21
|
+
autoExtract: boolean
|
|
22
|
+
searchLimit: number
|
|
23
|
+
globalMemories: boolean
|
|
24
|
+
agentModel: string
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function getDefaultDbPath(): string {
|
|
28
|
+
const home = homedir()
|
|
29
|
+
const xdgData = process.env.XDG_DATA_HOME || join(home, ".local", "share")
|
|
30
|
+
return join(xdgData, "opencode", "openrecall.db")
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const DEFAULTS: OpenRecallConfig = {
|
|
34
|
+
dbPath: getDefaultDbPath(),
|
|
35
|
+
categories: [...DEFAULT_CATEGORIES],
|
|
36
|
+
maxMemories: 0, // 0 = unlimited
|
|
37
|
+
autoRecall: true,
|
|
38
|
+
autoExtract: true,
|
|
39
|
+
searchLimit: 10,
|
|
40
|
+
globalMemories: false,
|
|
41
|
+
agentModel: "",
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
let currentConfig: OpenRecallConfig = { ...DEFAULTS }
|
|
45
|
+
|
|
46
|
+
export function getConfig(): OpenRecallConfig {
|
|
47
|
+
return currentConfig
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function initConfig(userConfig?: Partial<OpenRecallConfig>): OpenRecallConfig {
|
|
51
|
+
currentConfig = { ...DEFAULTS }
|
|
52
|
+
|
|
53
|
+
if (userConfig) {
|
|
54
|
+
if (userConfig.dbPath) currentConfig.dbPath = userConfig.dbPath
|
|
55
|
+
if (userConfig.categories && userConfig.categories.length > 0) {
|
|
56
|
+
currentConfig.categories = userConfig.categories
|
|
57
|
+
}
|
|
58
|
+
if (typeof userConfig.maxMemories === "number") {
|
|
59
|
+
currentConfig.maxMemories = userConfig.maxMemories
|
|
60
|
+
}
|
|
61
|
+
if (typeof userConfig.autoRecall === "boolean") {
|
|
62
|
+
currentConfig.autoRecall = userConfig.autoRecall
|
|
63
|
+
}
|
|
64
|
+
if (typeof userConfig.autoExtract === "boolean") {
|
|
65
|
+
currentConfig.autoExtract = userConfig.autoExtract
|
|
66
|
+
}
|
|
67
|
+
if (typeof userConfig.searchLimit === "number" && userConfig.searchLimit > 0) {
|
|
68
|
+
currentConfig.searchLimit = userConfig.searchLimit
|
|
69
|
+
}
|
|
70
|
+
if (typeof userConfig.globalMemories === "boolean") {
|
|
71
|
+
currentConfig.globalMemories = userConfig.globalMemories
|
|
72
|
+
}
|
|
73
|
+
if (typeof userConfig.agentModel === "string" && userConfig.agentModel.trim()) {
|
|
74
|
+
currentConfig.agentModel = userConfig.agentModel.trim()
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return currentConfig
|
|
79
|
+
}
|
package/src/db.ts
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import { Database } from "bun:sqlite"
|
|
2
|
+
import { mkdirSync, existsSync } from "fs"
|
|
3
|
+
import { dirname } from "path"
|
|
4
|
+
import { getConfig } from "./config"
|
|
5
|
+
import { migrations } from "./migrations"
|
|
6
|
+
|
|
7
|
+
let db: Database | null = null
|
|
8
|
+
let initFailed = false
|
|
9
|
+
|
|
10
|
+
export function isDbAvailable(): boolean {
|
|
11
|
+
return db !== null && !initFailed
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function getDb(): Database {
|
|
15
|
+
if (db) return db
|
|
16
|
+
|
|
17
|
+
if (initFailed) {
|
|
18
|
+
throw new Error("OpenRecall: database initialization previously failed")
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const dbPath = getConfig().dbPath
|
|
23
|
+
const dir = dirname(dbPath)
|
|
24
|
+
if (!existsSync(dir)) {
|
|
25
|
+
mkdirSync(dir, { recursive: true })
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
db = new Database(dbPath)
|
|
29
|
+
|
|
30
|
+
db.run("PRAGMA journal_mode = WAL")
|
|
31
|
+
db.run("PRAGMA synchronous = NORMAL")
|
|
32
|
+
db.run("PRAGMA busy_timeout = 5000")
|
|
33
|
+
db.run("PRAGMA foreign_keys = ON")
|
|
34
|
+
|
|
35
|
+
runMigrations(db)
|
|
36
|
+
|
|
37
|
+
return db
|
|
38
|
+
} catch (e) {
|
|
39
|
+
initFailed = true
|
|
40
|
+
db = null
|
|
41
|
+
console.error("[OpenRecall] Database initialization failed:", e)
|
|
42
|
+
throw e
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function runMigrations(db: Database) {
|
|
47
|
+
db.run(`
|
|
48
|
+
CREATE TABLE IF NOT EXISTS _migrations (
|
|
49
|
+
version INTEGER PRIMARY KEY,
|
|
50
|
+
description TEXT NOT NULL,
|
|
51
|
+
applied_at INTEGER NOT NULL DEFAULT (unixepoch())
|
|
52
|
+
)
|
|
53
|
+
`)
|
|
54
|
+
|
|
55
|
+
const applied = new Set(
|
|
56
|
+
(
|
|
57
|
+
db.prepare("SELECT version FROM _migrations").all() as {
|
|
58
|
+
version: number
|
|
59
|
+
}[]
|
|
60
|
+
).map((r) => r.version),
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
for (const migration of migrations) {
|
|
64
|
+
if (applied.has(migration.version)) continue
|
|
65
|
+
|
|
66
|
+
db.run("BEGIN")
|
|
67
|
+
try {
|
|
68
|
+
migration.up(db)
|
|
69
|
+
db.run(
|
|
70
|
+
"INSERT INTO _migrations (version, description) VALUES (?, ?)",
|
|
71
|
+
[migration.version, migration.description],
|
|
72
|
+
)
|
|
73
|
+
db.run("COMMIT")
|
|
74
|
+
} catch (e) {
|
|
75
|
+
db.run("ROLLBACK")
|
|
76
|
+
throw new Error(
|
|
77
|
+
`Migration ${migration.version} (${migration.description}) failed: ${e}`,
|
|
78
|
+
)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export function closeDb() {
|
|
84
|
+
if (db) {
|
|
85
|
+
try {
|
|
86
|
+
db.run("PRAGMA optimize")
|
|
87
|
+
} catch {
|
|
88
|
+
// ignore optimization errors on close
|
|
89
|
+
}
|
|
90
|
+
db.close()
|
|
91
|
+
db = null
|
|
92
|
+
}
|
|
93
|
+
}
|