@comfanion/workflow 4.6.0 → 4.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +53 -4
- package/package.json +1 -1
- package/src/build-info.json +1 -1
- package/src/opencode/tools/codeindex.ts +95 -139
package/bin/cli.js
CHANGED
|
@@ -402,8 +402,9 @@ program
|
|
|
402
402
|
|
|
403
403
|
program
|
|
404
404
|
.command('update')
|
|
405
|
-
.description('Update .opencode/ to latest version (preserves config.yaml)')
|
|
405
|
+
.description('Update .opencode/ to latest version (preserves config.yaml and vectorizer)')
|
|
406
406
|
.option('--no-backup', 'Skip creating backup')
|
|
407
|
+
.option('--vectorizer', 'Update/install vectorizer too')
|
|
407
408
|
.action(async (options) => {
|
|
408
409
|
const spinner = ora('Updating OpenCode Workflow...').start();
|
|
409
410
|
|
|
@@ -416,6 +417,8 @@ program
|
|
|
416
417
|
}
|
|
417
418
|
|
|
418
419
|
const configPath = path.join(targetDir, 'config.yaml');
|
|
420
|
+
const vectorizerDir = path.join(targetDir, 'vectorizer');
|
|
421
|
+
const vectorsDir = path.join(targetDir, 'vectors');
|
|
419
422
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
|
|
420
423
|
const backupDir = path.join(process.cwd(), `.opencode.backup-${timestamp}`);
|
|
421
424
|
|
|
@@ -423,13 +426,32 @@ program
|
|
|
423
426
|
spinner.text = 'Reading config.yaml...';
|
|
424
427
|
const configBackup = await fs.readFile(configPath, 'utf8');
|
|
425
428
|
|
|
429
|
+
// Check if vectorizer exists
|
|
430
|
+
const hasVectorizer = await fs.pathExists(path.join(vectorizerDir, 'node_modules'));
|
|
431
|
+
const hasVectors = await fs.pathExists(vectorsDir);
|
|
432
|
+
|
|
426
433
|
// Create full backup (unless --no-backup)
|
|
427
434
|
if (options.backup !== false) {
|
|
428
435
|
spinner.text = 'Creating backup...';
|
|
429
|
-
await fs.copy(targetDir, backupDir
|
|
436
|
+
await fs.copy(targetDir, backupDir, {
|
|
437
|
+
filter: (src) => !src.includes('node_modules') && !src.includes('vectors')
|
|
438
|
+
});
|
|
430
439
|
}
|
|
431
440
|
|
|
432
|
-
//
|
|
441
|
+
// Preserve vectorizer and vectors by moving them temporarily
|
|
442
|
+
const tempVectorizer = path.join(process.cwd(), '.vectorizer-temp');
|
|
443
|
+
const tempVectors = path.join(process.cwd(), '.vectors-temp');
|
|
444
|
+
|
|
445
|
+
if (hasVectorizer) {
|
|
446
|
+
spinner.text = 'Preserving vectorizer...';
|
|
447
|
+
await fs.move(vectorizerDir, tempVectorizer, { overwrite: true });
|
|
448
|
+
}
|
|
449
|
+
if (hasVectors) {
|
|
450
|
+
spinner.text = 'Preserving vector indexes...';
|
|
451
|
+
await fs.move(vectorsDir, tempVectors, { overwrite: true });
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
// Remove old .opencode directory
|
|
433
455
|
spinner.text = 'Removing old files...';
|
|
434
456
|
await fs.remove(targetDir);
|
|
435
457
|
|
|
@@ -437,6 +459,16 @@ program
|
|
|
437
459
|
spinner.text = 'Installing new version...';
|
|
438
460
|
await fs.copy(OPENCODE_SRC, targetDir);
|
|
439
461
|
|
|
462
|
+
// Restore vectorizer and vectors
|
|
463
|
+
if (hasVectorizer) {
|
|
464
|
+
spinner.text = 'Restoring vectorizer...';
|
|
465
|
+
await fs.move(tempVectorizer, vectorizerDir, { overwrite: true });
|
|
466
|
+
}
|
|
467
|
+
if (hasVectors) {
|
|
468
|
+
spinner.text = 'Restoring vector indexes...';
|
|
469
|
+
await fs.move(tempVectors, vectorsDir, { overwrite: true });
|
|
470
|
+
}
|
|
471
|
+
|
|
440
472
|
// Restore user's config.yaml
|
|
441
473
|
spinner.text = 'Restoring config.yaml...';
|
|
442
474
|
await fs.writeFile(configPath, configBackup);
|
|
@@ -448,7 +480,24 @@ program
|
|
|
448
480
|
console.log(chalk.gray(' You can delete it after verifying the update works.\n'));
|
|
449
481
|
}
|
|
450
482
|
|
|
451
|
-
console.log(chalk.green('✅ Your config.yaml was preserved
|
|
483
|
+
console.log(chalk.green('✅ Your config.yaml was preserved.'));
|
|
484
|
+
if (hasVectorizer) {
|
|
485
|
+
console.log(chalk.green('✅ Vectorizer was preserved.'));
|
|
486
|
+
}
|
|
487
|
+
if (hasVectors) {
|
|
488
|
+
console.log(chalk.green('✅ Vector indexes were preserved.'));
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Option to install/update vectorizer
|
|
492
|
+
if (options.vectorizer) {
|
|
493
|
+
console.log('');
|
|
494
|
+
await installVectorizer(targetDir);
|
|
495
|
+
} else if (!hasVectorizer) {
|
|
496
|
+
console.log(chalk.yellow('\n💡 Vectorizer not installed. Install with:'));
|
|
497
|
+
console.log(chalk.cyan(' npx opencode-workflow vectorizer install\n'));
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
console.log('');
|
|
452
501
|
|
|
453
502
|
} catch (error) {
|
|
454
503
|
spinner.fail(chalk.red('Failed to update'));
|
package/package.json
CHANGED
package/src/build-info.json
CHANGED
|
@@ -14,27 +14,13 @@
|
|
|
14
14
|
import { tool } from "@opencode-ai/plugin"
|
|
15
15
|
import path from "path"
|
|
16
16
|
import fs from "fs/promises"
|
|
17
|
-
import {
|
|
18
|
-
import ignore from "ignore"
|
|
17
|
+
import { execSync } from "child_process"
|
|
19
18
|
|
|
20
|
-
// Index presets
|
|
19
|
+
// Index presets for documentation
|
|
21
20
|
const INDEX_PRESETS: Record<string, { pattern: string; description: string }> = {
|
|
22
|
-
code: {
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
},
|
|
26
|
-
docs: {
|
|
27
|
-
pattern: '**/*.{md,mdx,txt,rst,adoc}',
|
|
28
|
-
description: 'Documentation files'
|
|
29
|
-
},
|
|
30
|
-
config: {
|
|
31
|
-
pattern: '**/*.{yaml,yml,json,toml,ini,env,xml}',
|
|
32
|
-
description: 'Configuration files'
|
|
33
|
-
},
|
|
34
|
-
all: {
|
|
35
|
-
pattern: '**/*.{js,ts,jsx,tsx,mjs,cjs,py,go,rs,java,kt,swift,c,cpp,h,hpp,cs,rb,php,scala,clj,md,mdx,txt,rst,adoc,yaml,yml,json,toml}',
|
|
36
|
-
description: 'All supported files'
|
|
37
|
-
}
|
|
21
|
+
code: { pattern: '**/*.{js,ts,go,py,...}', description: 'Source code files' },
|
|
22
|
+
docs: { pattern: '**/*.{md,txt,...}', description: 'Documentation files' },
|
|
23
|
+
config: { pattern: '**/*.{yaml,json,...}', description: 'Configuration files' },
|
|
38
24
|
}
|
|
39
25
|
|
|
40
26
|
export default tool({
|
|
@@ -54,7 +40,7 @@ Note: Initial indexing takes ~30s to load the embedding model.`,
|
|
|
54
40
|
|
|
55
41
|
args: {
|
|
56
42
|
action: tool.schema.enum(["status", "list", "reindex"]).describe("Action to perform"),
|
|
57
|
-
index: tool.schema.string().optional().default("code").describe("Index name
|
|
43
|
+
index: tool.schema.string().optional().default("code").describe("Index name: code, docs, config"),
|
|
58
44
|
},
|
|
59
45
|
|
|
60
46
|
async execute(args, context) {
|
|
@@ -72,83 +58,93 @@ Note: Initial indexing takes ~30s to load the embedding model.`,
|
|
|
72
58
|
|
|
73
59
|
To install:
|
|
74
60
|
\`\`\`bash
|
|
75
|
-
npx
|
|
61
|
+
npx @comfanion/workflow vectorizer install
|
|
76
62
|
\`\`\`
|
|
77
63
|
|
|
78
64
|
This will download the embedding model (~100MB) and set up the vector database.`
|
|
79
65
|
}
|
|
80
66
|
|
|
81
|
-
|
|
82
|
-
const vectorizerModule = path.join(vectorizerDir, "index.js")
|
|
83
|
-
const { CodebaseIndexer, INDEX_PRESETS: PRESETS } = await import(`file://${vectorizerModule}`)
|
|
84
|
-
|
|
85
|
-
// LIST: Show all indexes
|
|
86
|
-
if (args.action === "list") {
|
|
87
|
-
const tempIndexer = await new CodebaseIndexer(projectRoot, "code").init()
|
|
88
|
-
const allStats = await tempIndexer.getAllStats()
|
|
67
|
+
const indexName = args.index || "code"
|
|
89
68
|
|
|
69
|
+
// LIST: Show all indexes
|
|
70
|
+
if (args.action === "list") {
|
|
71
|
+
try {
|
|
72
|
+
const result = execSync("node .opencode/vectorizer/list-indexes.js 2>/dev/null || echo '{}'", {
|
|
73
|
+
cwd: projectRoot,
|
|
74
|
+
encoding: "utf8",
|
|
75
|
+
timeout: 30000
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
// Fallback: read hashes files directly
|
|
90
79
|
let output = `## Codebase Index Overview\n\n`
|
|
91
80
|
output += `✅ **Vectorizer installed**\n\n`
|
|
92
|
-
|
|
93
|
-
|
|
81
|
+
|
|
82
|
+
const indexes: string[] = []
|
|
83
|
+
try {
|
|
84
|
+
const entries = await fs.readdir(vectorsDir, { withFileTypes: true })
|
|
85
|
+
for (const entry of entries) {
|
|
86
|
+
if (entry.isDirectory()) {
|
|
87
|
+
indexes.push(entry.name)
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
} catch {}
|
|
91
|
+
|
|
92
|
+
if (indexes.length === 0) {
|
|
94
93
|
output += `⚠️ **No indexes created yet**\n\n`
|
|
95
94
|
output += `Create indexes with:\n`
|
|
96
95
|
output += `\`\`\`bash\n`
|
|
97
|
-
output += `npx opencode-workflow index --index code
|
|
98
|
-
output += `npx opencode-workflow index --index docs
|
|
99
|
-
output +=
|
|
100
|
-
output += `\`\`\`\n\n`
|
|
96
|
+
output += `npx opencode-workflow index --index code\n`
|
|
97
|
+
output += `npx opencode-workflow index --index docs --dir docs/\n`
|
|
98
|
+
output += `\`\`\`\n`
|
|
101
99
|
} else {
|
|
102
100
|
output += `### Active Indexes\n\n`
|
|
103
|
-
for (const
|
|
104
|
-
|
|
105
|
-
|
|
101
|
+
for (const idx of indexes) {
|
|
102
|
+
try {
|
|
103
|
+
const hashesPath = path.join(vectorsDir, idx, "hashes.json")
|
|
104
|
+
const hashes = JSON.parse(await fs.readFile(hashesPath, "utf8"))
|
|
105
|
+
const fileCount = Object.keys(hashes).length
|
|
106
|
+
const desc = INDEX_PRESETS[idx]?.description || "Custom index"
|
|
107
|
+
output += `**📁 ${idx}** - ${desc}\n`
|
|
108
|
+
output += ` Files: ${fileCount}\n\n`
|
|
109
|
+
} catch {}
|
|
106
110
|
}
|
|
107
111
|
}
|
|
108
|
-
|
|
109
|
-
output += `###
|
|
110
|
-
for (const [name, preset] of Object.entries(PRESETS || INDEX_PRESETS) as [string, any][]) {
|
|
111
|
-
const exists = allStats.find((s: any) => s.indexName === name)
|
|
112
|
-
const status = exists ? "✅" : "⬜"
|
|
113
|
-
output += `${status} **${name}**: ${preset.description}\n`
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
output += `\n### Usage\n`
|
|
112
|
+
|
|
113
|
+
output += `### Usage\n`
|
|
117
114
|
output += `\`\`\`\n`
|
|
118
115
|
output += `codesearch({ query: "your query", index: "code" })\n`
|
|
119
|
-
output += `codesearch({ query: "
|
|
120
|
-
output += `codesearch({ query: "api keys", searchAll: true })\n`
|
|
116
|
+
output += `codesearch({ query: "how to deploy", index: "docs" })\n`
|
|
121
117
|
output += `\`\`\``
|
|
122
|
-
|
|
118
|
+
|
|
123
119
|
return output
|
|
120
|
+
|
|
121
|
+
} catch (error: any) {
|
|
122
|
+
return `❌ Error listing indexes: ${error.message}`
|
|
124
123
|
}
|
|
124
|
+
}
|
|
125
125
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
const hashesContent = await fs.readFile(hashesFile, "utf8")
|
|
137
|
-
const hashes = JSON.parse(hashesContent)
|
|
138
|
-
const sampleFiles = Object.keys(hashes).slice(0, 5)
|
|
126
|
+
// STATUS: Show specific index status
|
|
127
|
+
if (args.action === "status") {
|
|
128
|
+
const hashesFile = path.join(vectorsDir, indexName, "hashes.json")
|
|
129
|
+
|
|
130
|
+
try {
|
|
131
|
+
const hashesContent = await fs.readFile(hashesFile, "utf8")
|
|
132
|
+
const hashes = JSON.parse(hashesContent)
|
|
133
|
+
const fileCount = Object.keys(hashes).length
|
|
134
|
+
const sampleFiles = Object.keys(hashes).slice(0, 5)
|
|
135
|
+
const desc = INDEX_PRESETS[indexName]?.description || "Custom index"
|
|
139
136
|
|
|
140
|
-
|
|
137
|
+
return `## Index Status: "${indexName}"
|
|
141
138
|
|
|
142
139
|
✅ **Vectorizer installed**
|
|
143
140
|
✅ **Index active**
|
|
144
141
|
|
|
145
|
-
**Description:** ${
|
|
146
|
-
**Files indexed:** ${
|
|
147
|
-
**Chunks:** ${stats.chunkCount}
|
|
142
|
+
**Description:** ${desc}
|
|
143
|
+
**Files indexed:** ${fileCount}
|
|
148
144
|
|
|
149
145
|
**Sample indexed files:**
|
|
150
146
|
${sampleFiles.map(f => `- ${f}`).join("\n")}
|
|
151
|
-
${
|
|
147
|
+
${fileCount > 5 ? `- ... and ${fileCount - 5} more` : ""}
|
|
152
148
|
|
|
153
149
|
**Usage:**
|
|
154
150
|
\`\`\`
|
|
@@ -156,12 +152,12 @@ codesearch({ query: "your search query", index: "${indexName}" })
|
|
|
156
152
|
\`\`\`
|
|
157
153
|
|
|
158
154
|
To re-index:
|
|
159
|
-
\`\`\`
|
|
160
|
-
|
|
155
|
+
\`\`\`bash
|
|
156
|
+
npx opencode-workflow index --index ${indexName}
|
|
161
157
|
\`\`\``
|
|
162
158
|
|
|
163
|
-
|
|
164
|
-
|
|
159
|
+
} catch {
|
|
160
|
+
return `## Index Status: "${indexName}"
|
|
165
161
|
|
|
166
162
|
✅ **Vectorizer installed**
|
|
167
163
|
⚠️ **Index "${indexName}" not created yet**
|
|
@@ -169,87 +165,47 @@ codeindex({ action: "reindex", index: "${indexName}" })
|
|
|
169
165
|
To create this index:
|
|
170
166
|
\`\`\`bash
|
|
171
167
|
npx opencode-workflow index --index ${indexName}
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
Or use:
|
|
175
|
-
\`\`\`
|
|
176
|
-
codeindex({ action: "reindex", index: "${indexName}" })
|
|
168
|
+
# Or with specific directory:
|
|
169
|
+
npx opencode-workflow index --index ${indexName} --dir src/
|
|
177
170
|
\`\`\``
|
|
178
|
-
}
|
|
179
171
|
}
|
|
172
|
+
}
|
|
180
173
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
const gitignore = await fs.readFile(path.join(projectRoot, '.gitignore'), 'utf8')
|
|
196
|
-
ig = ig.add(gitignore)
|
|
197
|
-
} catch {}
|
|
198
|
-
ig.add(['node_modules', '.git', 'dist', 'build', '.opencode/vectors', '.opencode/vectorizer'])
|
|
199
|
-
|
|
200
|
-
// Find files
|
|
201
|
-
const files = await glob(pattern, { cwd: projectRoot, nodir: true })
|
|
202
|
-
const filtered = files.filter((f: string) => !ig.ignores(f))
|
|
203
|
-
|
|
204
|
-
let indexed = 0
|
|
205
|
-
let skipped = 0
|
|
206
|
-
|
|
207
|
-
for (const file of filtered) {
|
|
208
|
-
const filePath = path.join(projectRoot, file)
|
|
209
|
-
try {
|
|
210
|
-
const wasIndexed = await indexer.indexFile(filePath)
|
|
211
|
-
if (wasIndexed) {
|
|
212
|
-
indexed++
|
|
213
|
-
} else {
|
|
214
|
-
skipped++
|
|
215
|
-
}
|
|
216
|
-
} catch {
|
|
217
|
-
// Skip files that can't be read
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
// Unload model to free memory
|
|
222
|
-
await indexer.unloadModel()
|
|
223
|
-
|
|
224
|
-
const stats = await indexer.getStats()
|
|
174
|
+
// REINDEX: Re-index using CLI
|
|
175
|
+
if (args.action === "reindex") {
|
|
176
|
+
try {
|
|
177
|
+
execSync(`npx opencode-workflow index --index ${indexName}`, {
|
|
178
|
+
cwd: projectRoot,
|
|
179
|
+
encoding: "utf8",
|
|
180
|
+
timeout: 300000, // 5 min
|
|
181
|
+
stdio: "pipe"
|
|
182
|
+
})
|
|
183
|
+
|
|
184
|
+
// Get stats after indexing
|
|
185
|
+
const hashesFile = path.join(vectorsDir, indexName, "hashes.json")
|
|
186
|
+
const hashes = JSON.parse(await fs.readFile(hashesFile, "utf8"))
|
|
187
|
+
const fileCount = Object.keys(hashes).length
|
|
225
188
|
|
|
226
|
-
|
|
189
|
+
return `## Re-indexing Complete ✅
|
|
227
190
|
|
|
228
191
|
**Index:** ${indexName}
|
|
229
|
-
**
|
|
230
|
-
**Files found:** ${filtered.length}
|
|
231
|
-
**Files indexed:** ${indexed}
|
|
232
|
-
**Files unchanged:** ${skipped}
|
|
233
|
-
**Total chunks:** ${stats.chunkCount}
|
|
192
|
+
**Files indexed:** ${fileCount}
|
|
234
193
|
|
|
235
194
|
You can now use semantic search:
|
|
236
195
|
\`\`\`
|
|
237
196
|
codesearch({ query: "your search query", index: "${indexName}" })
|
|
238
197
|
\`\`\``
|
|
239
198
|
|
|
240
|
-
|
|
241
|
-
|
|
199
|
+
} catch (error: any) {
|
|
200
|
+
return `❌ Re-indexing failed: ${error.message}
|
|
242
201
|
|
|
243
|
-
Try:
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
202
|
+
Try manually:
|
|
203
|
+
\`\`\`bash
|
|
204
|
+
npx opencode-workflow index --index ${indexName} --force
|
|
205
|
+
\`\`\``
|
|
247
206
|
}
|
|
248
|
-
|
|
249
|
-
return `Unknown action: ${args.action}. Use: status, list, or reindex`
|
|
250
|
-
|
|
251
|
-
} catch (error: any) {
|
|
252
|
-
return `❌ Error: ${error.message}`
|
|
253
207
|
}
|
|
208
|
+
|
|
209
|
+
return `Unknown action: ${args.action}. Use: status, list, or reindex`
|
|
254
210
|
},
|
|
255
211
|
})
|