@bodhi-ventures/aiocs 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +488 -0
- package/dist/chunk-ID3PUSMY.js +4535 -0
- package/dist/cli.js +624 -0
- package/dist/mcp-server.js +720 -0
- package/docs/2026-03-26-agent-json-and-daemon-design.md +157 -0
- package/docs/2026-03-28-hybrid-search-design.md +423 -0
- package/docs/README.md +12 -0
- package/docs/codex-integration.md +125 -0
- package/docs/examples/codex-agents/aiocs-docs-specialist.example.toml +21 -0
- package/docs/json-contract.md +524 -0
- package/docs/superpowers/specs/2026-03-29-tag-driven-release-pipeline-design.md +135 -0
- package/package.json +74 -0
- package/skills/aiocs/SKILL.md +174 -0
- package/sources/ethereal.yaml +20 -0
- package/sources/hyperliquid.yaml +20 -0
- package/sources/lighter.yaml +24 -0
- package/sources/nado.yaml +22 -0
- package/sources/synthetix.yaml +24 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,624 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
AIOCS_ERROR_CODES,
|
|
4
|
+
AiocsError,
|
|
5
|
+
backfillEmbeddings,
|
|
6
|
+
clearEmbeddings,
|
|
7
|
+
diffSnapshotsForSource,
|
|
8
|
+
exportCatalogBackup,
|
|
9
|
+
fetchSources,
|
|
10
|
+
getAiocsConfigDir,
|
|
11
|
+
getAiocsDataDir,
|
|
12
|
+
getDoctorReport,
|
|
13
|
+
getEmbeddingStatus,
|
|
14
|
+
getManagedSourceSpecDirectories,
|
|
15
|
+
importCatalogBackup,
|
|
16
|
+
initBuiltInSources,
|
|
17
|
+
linkProjectSources,
|
|
18
|
+
listSnapshotsForSource,
|
|
19
|
+
listSources,
|
|
20
|
+
openCatalog,
|
|
21
|
+
packageName,
|
|
22
|
+
packageVersion,
|
|
23
|
+
parseDaemonConfig,
|
|
24
|
+
refreshDueSources,
|
|
25
|
+
runEmbeddingWorker,
|
|
26
|
+
runSourceCanaries,
|
|
27
|
+
searchCatalog,
|
|
28
|
+
showChunk,
|
|
29
|
+
startDaemon,
|
|
30
|
+
toAiocsError,
|
|
31
|
+
unlinkProjectSources,
|
|
32
|
+
upsertSourceFromSpecFile,
|
|
33
|
+
verifyCoverage
|
|
34
|
+
} from "./chunk-ID3PUSMY.js";
|
|
35
|
+
|
|
36
|
+
// src/cli.ts
|
|
37
|
+
import { Command, CommanderError as CommanderError2 } from "commander";
|
|
38
|
+
|
|
39
|
+
// src/cli-output.ts
|
|
40
|
+
import { CommanderError } from "commander";
|
|
41
|
+
function toLines(output) {
|
|
42
|
+
if (!output) {
|
|
43
|
+
return [];
|
|
44
|
+
}
|
|
45
|
+
return Array.isArray(output) ? output : [output];
|
|
46
|
+
}
|
|
47
|
+
function serializeEnvelope(envelope) {
|
|
48
|
+
return JSON.stringify(envelope);
|
|
49
|
+
}
|
|
50
|
+
function argvWantsJson(argv) {
|
|
51
|
+
return argv.includes("--json");
|
|
52
|
+
}
|
|
53
|
+
function commandWantsJson(command) {
|
|
54
|
+
return Boolean(command.optsWithGlobals().json);
|
|
55
|
+
}
|
|
56
|
+
function emitSuccess(input) {
|
|
57
|
+
if (input.json) {
|
|
58
|
+
console.log(serializeEnvelope({
|
|
59
|
+
ok: true,
|
|
60
|
+
command: input.commandName,
|
|
61
|
+
data: input.data
|
|
62
|
+
}));
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
for (const line of toLines(input.human)) {
|
|
66
|
+
console.log(line);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
function normalizeError(error) {
|
|
70
|
+
if (error instanceof CommanderError) {
|
|
71
|
+
return {
|
|
72
|
+
code: AIOCS_ERROR_CODES.invalidArgument,
|
|
73
|
+
message: error.message
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
const normalized = toAiocsError(error);
|
|
77
|
+
return {
|
|
78
|
+
code: normalized.code,
|
|
79
|
+
message: normalized.message,
|
|
80
|
+
...typeof normalized.details !== "undefined" ? { details: normalized.details } : {}
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
function emitError(input) {
|
|
84
|
+
const normalized = normalizeError(input.error);
|
|
85
|
+
if (input.json) {
|
|
86
|
+
console.log(serializeEnvelope({
|
|
87
|
+
ok: false,
|
|
88
|
+
command: input.commandName,
|
|
89
|
+
error: normalized
|
|
90
|
+
}));
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
console.error(normalized.message);
|
|
94
|
+
}
|
|
95
|
+
function inferRequestedCommand(argv) {
|
|
96
|
+
const tokens = argv.filter((token) => token !== "--json" && token !== "--help" && token !== "-h");
|
|
97
|
+
const first = tokens[0];
|
|
98
|
+
const second = tokens[1];
|
|
99
|
+
if (!first) {
|
|
100
|
+
return "cli";
|
|
101
|
+
}
|
|
102
|
+
if (["source", "snapshot", "project", "refresh", "verify", "backup"].includes(first) && second && !second.startsWith("-")) {
|
|
103
|
+
return `${first}.${second}`;
|
|
104
|
+
}
|
|
105
|
+
return first;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// src/cli.ts
|
|
109
|
+
function renderSearchResult(result) {
|
|
110
|
+
return [
|
|
111
|
+
`Chunk ID: ${result.chunkId}`,
|
|
112
|
+
`Source: ${result.sourceId}`,
|
|
113
|
+
`Snapshot: ${result.snapshotId}`,
|
|
114
|
+
...typeof result.score === "number" ? [`Score: ${result.score.toFixed(4)}`] : [],
|
|
115
|
+
...result.signals ? [`Signals: ${result.signals.join(", ")}`] : [],
|
|
116
|
+
`Page: ${result.pageTitle}`,
|
|
117
|
+
`Section: ${result.sectionTitle}`,
|
|
118
|
+
`URL: ${result.pageUrl}`,
|
|
119
|
+
"",
|
|
120
|
+
result.markdown,
|
|
121
|
+
""
|
|
122
|
+
].join("\n");
|
|
123
|
+
}
|
|
124
|
+
function parsePositiveIntegerOption(value, field) {
|
|
125
|
+
if (typeof value === "undefined") {
|
|
126
|
+
return void 0;
|
|
127
|
+
}
|
|
128
|
+
const parsed = Number(value);
|
|
129
|
+
if (!Number.isInteger(parsed) || parsed < 0) {
|
|
130
|
+
throw new AiocsError(
|
|
131
|
+
AIOCS_ERROR_CODES.invalidArgument,
|
|
132
|
+
`${field} must be a non-negative integer`
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
if (field === "limit" && parsed === 0) {
|
|
136
|
+
throw new AiocsError(
|
|
137
|
+
AIOCS_ERROR_CODES.invalidArgument,
|
|
138
|
+
"limit must be greater than zero"
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
return parsed;
|
|
142
|
+
}
|
|
143
|
+
function parseSearchModeOption(value) {
|
|
144
|
+
if (typeof value === "undefined") {
|
|
145
|
+
return void 0;
|
|
146
|
+
}
|
|
147
|
+
if (value === "auto" || value === "lexical" || value === "hybrid" || value === "semantic") {
|
|
148
|
+
return value;
|
|
149
|
+
}
|
|
150
|
+
throw new AiocsError(
|
|
151
|
+
AIOCS_ERROR_CODES.invalidArgument,
|
|
152
|
+
"mode must be one of: auto, lexical, hybrid, semantic"
|
|
153
|
+
);
|
|
154
|
+
}
|
|
155
|
+
async function executeCommand(command, commandName, run) {
|
|
156
|
+
try {
|
|
157
|
+
const result = await run();
|
|
158
|
+
emitSuccess({
|
|
159
|
+
json: commandWantsJson(command),
|
|
160
|
+
commandName,
|
|
161
|
+
data: result.data,
|
|
162
|
+
human: result.human
|
|
163
|
+
});
|
|
164
|
+
} catch (error) {
|
|
165
|
+
emitError({
|
|
166
|
+
json: commandWantsJson(command),
|
|
167
|
+
commandName,
|
|
168
|
+
error
|
|
169
|
+
});
|
|
170
|
+
process.exitCode = 1;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
function createDaemonLogger(json) {
|
|
174
|
+
if (json) {
|
|
175
|
+
return {
|
|
176
|
+
emit(event) {
|
|
177
|
+
console.log(JSON.stringify(event));
|
|
178
|
+
}
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
return {
|
|
182
|
+
emit(event) {
|
|
183
|
+
switch (event.type) {
|
|
184
|
+
case "daemon.started":
|
|
185
|
+
console.log([
|
|
186
|
+
"Daemon started",
|
|
187
|
+
`interval=${event.intervalMinutes}m`,
|
|
188
|
+
`fetchOnStart=${String(event.fetchOnStart)}`,
|
|
189
|
+
`sourceSpecDirs=${event.sourceSpecDirs.join(", ") || "(none)"}`
|
|
190
|
+
].join(" | "));
|
|
191
|
+
break;
|
|
192
|
+
case "daemon.stopped":
|
|
193
|
+
console.log("Daemon stopped");
|
|
194
|
+
break;
|
|
195
|
+
case "daemon.cycle.started":
|
|
196
|
+
console.log(`Cycle started (${event.reason}) at ${event.startedAt}`);
|
|
197
|
+
break;
|
|
198
|
+
case "daemon.cycle.completed":
|
|
199
|
+
console.log([
|
|
200
|
+
`Cycle completed (${event.reason})`,
|
|
201
|
+
`bootstrapped=${event.result.bootstrapped.processedSpecCount}`,
|
|
202
|
+
`removed=${event.result.bootstrapped.removedSourceIds.length}`,
|
|
203
|
+
`canaries=${event.result.canaried.length}`,
|
|
204
|
+
`canaryFailed=${event.result.canaryFailed.length}`,
|
|
205
|
+
`due=${event.result.dueSourceIds.length}`,
|
|
206
|
+
`refreshed=${event.result.refreshed.length}`,
|
|
207
|
+
`failed=${event.result.failed.length}`,
|
|
208
|
+
`embedded=${event.result.embedded.length}`,
|
|
209
|
+
`embeddingFailed=${event.result.embeddingFailed.length}`
|
|
210
|
+
].join(" | "));
|
|
211
|
+
for (const canaried of event.result.canaried) {
|
|
212
|
+
console.log([
|
|
213
|
+
"Canary",
|
|
214
|
+
canaried.sourceId,
|
|
215
|
+
`status=${canaried.status}`,
|
|
216
|
+
`checks=${canaried.summary.checkCount}`
|
|
217
|
+
].join(" | "));
|
|
218
|
+
}
|
|
219
|
+
for (const failedCanary of event.result.canaryFailed) {
|
|
220
|
+
console.log(`Canary failed | ${failedCanary.sourceId} | ${failedCanary.errorMessage}`);
|
|
221
|
+
}
|
|
222
|
+
for (const refreshed of event.result.refreshed) {
|
|
223
|
+
console.log([
|
|
224
|
+
refreshed.reused ? "Reused" : "Fetched",
|
|
225
|
+
refreshed.sourceId,
|
|
226
|
+
`snapshot=${refreshed.snapshotId}`,
|
|
227
|
+
`pages=${refreshed.pageCount}`
|
|
228
|
+
].join(" | "));
|
|
229
|
+
}
|
|
230
|
+
for (const failed of event.result.failed) {
|
|
231
|
+
console.log(`Failed | ${failed.sourceId} | ${failed.errorMessage}`);
|
|
232
|
+
}
|
|
233
|
+
for (const embedded of event.result.embedded) {
|
|
234
|
+
console.log(`Embedded | ${embedded.sourceId} | ${embedded.snapshotId} | chunks=${embedded.chunkCount}`);
|
|
235
|
+
}
|
|
236
|
+
for (const failedEmbedding of event.result.embeddingFailed) {
|
|
237
|
+
console.log(`Embedding failed | ${failedEmbedding.sourceId} | ${failedEmbedding.snapshotId} | ${failedEmbedding.errorMessage}`);
|
|
238
|
+
}
|
|
239
|
+
break;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
var program = new Command();
|
|
245
|
+
program.name("docs").description("Local-only docs fetch and search CLI for AI agents.").option("-V, --version", "emit the current aiocs version").option("--json", "emit machine-readable JSON output").showHelpAfterError();
|
|
246
|
+
program.configureOutput({
|
|
247
|
+
writeOut(output) {
|
|
248
|
+
if (!argvWantsJson(process.argv)) {
|
|
249
|
+
process.stdout.write(output);
|
|
250
|
+
}
|
|
251
|
+
},
|
|
252
|
+
writeErr(output) {
|
|
253
|
+
if (!argvWantsJson(process.argv)) {
|
|
254
|
+
process.stderr.write(output);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
program.exitOverride();
|
|
259
|
+
function maybeHandleRootVersionRequest(argv) {
|
|
260
|
+
const tokens = argv.slice(2);
|
|
261
|
+
const filtered = tokens.filter((token) => token !== "--json");
|
|
262
|
+
const isRootVersionRequest = filtered.length === 1 && ["--version", "-V"].includes(filtered[0] ?? "");
|
|
263
|
+
if (!isRootVersionRequest) {
|
|
264
|
+
return false;
|
|
265
|
+
}
|
|
266
|
+
if (argvWantsJson(argv)) {
|
|
267
|
+
emitSuccess({
|
|
268
|
+
json: true,
|
|
269
|
+
commandName: "version",
|
|
270
|
+
data: {
|
|
271
|
+
name: packageName,
|
|
272
|
+
version: packageVersion
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
} else {
|
|
276
|
+
console.log(packageVersion);
|
|
277
|
+
}
|
|
278
|
+
return true;
|
|
279
|
+
}
|
|
280
|
+
if (maybeHandleRootVersionRequest(process.argv)) {
|
|
281
|
+
process.exit(0);
|
|
282
|
+
}
|
|
283
|
+
program.command("version").description("Show the current aiocs version.").action(async (_options, command) => {
|
|
284
|
+
await executeCommand(command, "version", async () => ({
|
|
285
|
+
data: {
|
|
286
|
+
name: packageName,
|
|
287
|
+
version: packageVersion
|
|
288
|
+
},
|
|
289
|
+
human: packageVersion
|
|
290
|
+
}));
|
|
291
|
+
});
|
|
292
|
+
program.command("init").description("Register bundled built-in source specs and optionally fetch them.").option("--fetch", "fetch built-in sources immediately").option("--no-fetch", "skip immediate fetching after bootstrapping").action(async (options, command) => {
|
|
293
|
+
await executeCommand(command, "init", async () => {
|
|
294
|
+
const result = await initBuiltInSources({
|
|
295
|
+
fetch: options.fetch ?? false
|
|
296
|
+
});
|
|
297
|
+
return {
|
|
298
|
+
data: result,
|
|
299
|
+
human: [
|
|
300
|
+
`Initialized ${result.initializedSources.length} built-in sources from ${result.sourceSpecDir}`,
|
|
301
|
+
`User-managed source specs live under ${getManagedSourceSpecDirectories().userSourceDir}`,
|
|
302
|
+
...result.removedSourceIds.length > 0 ? [`Removed managed sources: ${result.removedSourceIds.join(", ")}`] : [],
|
|
303
|
+
...result.fetchResults.length > 0 ? result.fetchResults.map((entry) => {
|
|
304
|
+
const verb = entry.reused ? "Reused" : "Fetched";
|
|
305
|
+
return `${verb} ${entry.sourceId} -> ${entry.snapshotId} (${entry.pageCount} pages)`;
|
|
306
|
+
}) : [result.fetched ? "No built-in sources were fetched." : "Skipped fetching built-in sources."]
|
|
307
|
+
]
|
|
308
|
+
};
|
|
309
|
+
});
|
|
310
|
+
});
|
|
311
|
+
program.command("doctor").alias("health").description("Validate the local aiocs runtime and optional Docker daemon path.").action(async (_options, command) => {
|
|
312
|
+
await executeCommand(command, "doctor", async () => {
|
|
313
|
+
const report = await getDoctorReport();
|
|
314
|
+
return {
|
|
315
|
+
data: report,
|
|
316
|
+
human: [
|
|
317
|
+
`Overall status: ${report.summary.status}`,
|
|
318
|
+
...report.checks.map((check) => `${check.status.toUpperCase()} | ${check.id} | ${check.summary}`)
|
|
319
|
+
]
|
|
320
|
+
};
|
|
321
|
+
});
|
|
322
|
+
});
|
|
323
|
+
var source = program.command("source");
|
|
324
|
+
source.command("upsert").argument("<spec-file>").action(async (specFile, _options, command) => {
|
|
325
|
+
await executeCommand(command, "source.upsert", async () => {
|
|
326
|
+
const result = await upsertSourceFromSpecFile(specFile);
|
|
327
|
+
return {
|
|
328
|
+
data: result,
|
|
329
|
+
human: `Upserted source ${result.sourceId}`
|
|
330
|
+
};
|
|
331
|
+
});
|
|
332
|
+
});
|
|
333
|
+
source.command("list").action(async (_options, command) => {
|
|
334
|
+
await executeCommand(command, "source.list", async () => {
|
|
335
|
+
const result = await listSources();
|
|
336
|
+
const sources = result.sources;
|
|
337
|
+
return {
|
|
338
|
+
data: result,
|
|
339
|
+
human: sources.length === 0 ? "No sources registered." : sources.map((item) => [
|
|
340
|
+
item.id,
|
|
341
|
+
item.label,
|
|
342
|
+
item.isDue ? "due now" : `next due ${item.nextDueAt}`,
|
|
343
|
+
`spec ${item.specPath ?? "(inline/unknown)"}`,
|
|
344
|
+
item.lastSuccessfulSnapshotId ? `latest ${item.lastSuccessfulSnapshotId}` : "no snapshots"
|
|
345
|
+
].join(" | "))
|
|
346
|
+
};
|
|
347
|
+
});
|
|
348
|
+
});
|
|
349
|
+
program.command("fetch").argument("<source-id-or-all>").action(async (sourceIdOrAll, _options, command) => {
|
|
350
|
+
await executeCommand(command, "fetch", async () => {
|
|
351
|
+
const result = await fetchSources(sourceIdOrAll);
|
|
352
|
+
const results = result.results;
|
|
353
|
+
return {
|
|
354
|
+
data: result,
|
|
355
|
+
human: results.length === 0 ? "No sources registered." : results.map((result2) => {
|
|
356
|
+
const verb = result2.reused ? "Reused" : "Fetched";
|
|
357
|
+
return `${verb} ${result2.sourceId} -> ${result2.snapshotId} (${result2.pageCount} pages)`;
|
|
358
|
+
})
|
|
359
|
+
};
|
|
360
|
+
});
|
|
361
|
+
});
|
|
362
|
+
program.command("canary").argument("<source-id-or-all>").description("Run lightweight extraction canaries without creating snapshots.").action(async (sourceIdOrAll, _options, command) => {
|
|
363
|
+
await executeCommand(command, "canary", async () => {
|
|
364
|
+
const result = await runSourceCanaries(sourceIdOrAll);
|
|
365
|
+
return {
|
|
366
|
+
data: result,
|
|
367
|
+
human: result.results.length === 0 ? "No sources registered." : result.results.map(
|
|
368
|
+
(entry) => `Canary ${entry.sourceId} | status=${entry.status} | checks=${entry.summary.checkCount} | pass=${entry.summary.passCount} | fail=${entry.summary.failCount}`
|
|
369
|
+
)
|
|
370
|
+
};
|
|
371
|
+
});
|
|
372
|
+
});
|
|
373
|
+
var refresh = program.command("refresh");
|
|
374
|
+
refresh.command("due").argument("[source-id-or-all]").description("Refresh all due sources, or refresh one specific source only if it is currently due.").action(async (sourceIdOrAll, _options, command) => {
|
|
375
|
+
await executeCommand(command, "refresh.due", async () => {
|
|
376
|
+
const result = await refreshDueSources(sourceIdOrAll ?? "all");
|
|
377
|
+
const results = result.results;
|
|
378
|
+
return {
|
|
379
|
+
data: result,
|
|
380
|
+
human: results.length === 0 ? sourceIdOrAll && sourceIdOrAll !== "all" ? `Source ${sourceIdOrAll} is not due for refresh.` : "No sources due for refresh." : results.map((result2) => {
|
|
381
|
+
const verb = result2.reused ? "Reused" : "Fetched";
|
|
382
|
+
return `${verb} ${result2.sourceId} -> ${result2.snapshotId} (${result2.pageCount} pages)`;
|
|
383
|
+
})
|
|
384
|
+
};
|
|
385
|
+
});
|
|
386
|
+
});
|
|
387
|
+
var snapshot = program.command("snapshot");
|
|
388
|
+
snapshot.command("list").argument("<source-id>").action(async (sourceId, _options, command) => {
|
|
389
|
+
await executeCommand(command, "snapshot.list", async () => {
|
|
390
|
+
const result = await listSnapshotsForSource(sourceId);
|
|
391
|
+
const snapshots = result.snapshots;
|
|
392
|
+
return {
|
|
393
|
+
data: result,
|
|
394
|
+
human: snapshots.length === 0 ? `No snapshots for ${sourceId}` : [
|
|
395
|
+
`Snapshots for ${sourceId}:`,
|
|
396
|
+
...snapshots.map((item) => `${item.snapshotId} | pages=${item.pageCount} | created=${item.createdAt}`)
|
|
397
|
+
]
|
|
398
|
+
};
|
|
399
|
+
});
|
|
400
|
+
});
|
|
401
|
+
program.command("diff").alias("changes").argument("<source-id>").option("--from <snapshot-id>", "base snapshot id").option("--to <snapshot-id>", "target snapshot id").description("Compare two snapshots for a source.").action(
|
|
402
|
+
async (sourceId, options, command) => {
|
|
403
|
+
await executeCommand(command, "diff", async () => {
|
|
404
|
+
const result = await diffSnapshotsForSource({
|
|
405
|
+
sourceId,
|
|
406
|
+
...options.from ? { fromSnapshotId: options.from } : {},
|
|
407
|
+
...options.to ? { toSnapshotId: options.to } : {}
|
|
408
|
+
});
|
|
409
|
+
return {
|
|
410
|
+
data: result,
|
|
411
|
+
human: [
|
|
412
|
+
`Diff ${result.sourceId} | from=${result.fromSnapshotId} | to=${result.toSnapshotId}`,
|
|
413
|
+
`Added=${result.summary.addedPageCount} | Removed=${result.summary.removedPageCount} | Changed=${result.summary.changedPageCount} | Unchanged=${result.summary.unchangedPageCount}`
|
|
414
|
+
]
|
|
415
|
+
};
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
);
|
|
419
|
+
var project = program.command("project");
|
|
420
|
+
project.command("link").argument("<project-path>").argument("<source-ids...>").action(async (projectPath, sourceIds, _options, command) => {
|
|
421
|
+
await executeCommand(command, "project.link", async () => {
|
|
422
|
+
const result = await linkProjectSources(projectPath, sourceIds);
|
|
423
|
+
return {
|
|
424
|
+
data: result,
|
|
425
|
+
human: `Linked ${result.projectPath} -> ${sourceIds.join(", ")}`
|
|
426
|
+
};
|
|
427
|
+
});
|
|
428
|
+
});
|
|
429
|
+
project.command("unlink").argument("<project-path>").argument("[source-ids...]").action(async (projectPath, sourceIds, _options, command) => {
|
|
430
|
+
await executeCommand(command, "project.unlink", async () => {
|
|
431
|
+
const result = await unlinkProjectSources(projectPath, sourceIds);
|
|
432
|
+
return {
|
|
433
|
+
data: result,
|
|
434
|
+
human: `Unlinked ${result.projectPath}`
|
|
435
|
+
};
|
|
436
|
+
});
|
|
437
|
+
});
|
|
438
|
+
var backup = program.command("backup");
|
|
439
|
+
backup.command("export").argument("<output-dir>").option("--replace-existing", "replace an existing non-empty export directory").action(
|
|
440
|
+
async (outputDir, options, command) => {
|
|
441
|
+
await executeCommand(command, "backup.export", async () => {
|
|
442
|
+
const result = await exportCatalogBackup({
|
|
443
|
+
outputDir,
|
|
444
|
+
...typeof options.replaceExisting === "boolean" ? { replaceExisting: options.replaceExisting } : {}
|
|
445
|
+
});
|
|
446
|
+
return {
|
|
447
|
+
data: result,
|
|
448
|
+
human: `Exported backup to ${result.outputDir}`
|
|
449
|
+
};
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
);
|
|
453
|
+
backup.command("import").argument("<input-dir>").option("--replace-existing", "replace an existing local aiocs data/config directory").action(
|
|
454
|
+
async (inputDir, options, command) => {
|
|
455
|
+
await executeCommand(command, "backup.import", async () => {
|
|
456
|
+
const result = await importCatalogBackup({
|
|
457
|
+
inputDir,
|
|
458
|
+
...typeof options.replaceExisting === "boolean" ? { replaceExisting: options.replaceExisting } : {}
|
|
459
|
+
});
|
|
460
|
+
return {
|
|
461
|
+
data: result,
|
|
462
|
+
human: `Imported backup from ${result.inputDir}`
|
|
463
|
+
};
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
);
|
|
467
|
+
var embeddings = program.command("embeddings");
|
|
468
|
+
embeddings.command("status").description("Show embedding backlog and coverage for latest snapshots.").action(async (_options, command) => {
|
|
469
|
+
await executeCommand(command, "embeddings.status", async () => {
|
|
470
|
+
const result = await getEmbeddingStatus();
|
|
471
|
+
return {
|
|
472
|
+
data: result,
|
|
473
|
+
human: [
|
|
474
|
+
`Queue | pending=${result.queue.pendingJobs} running=${result.queue.runningJobs} failed=${result.queue.failedJobs}`,
|
|
475
|
+
...result.sources.map((source2) => [
|
|
476
|
+
source2.sourceId,
|
|
477
|
+
source2.snapshotId ?? "(none)",
|
|
478
|
+
`coverage=${Math.round(source2.coverageRatio * 100)}%`,
|
|
479
|
+
`indexed=${source2.indexedChunks}/${source2.totalChunks}`,
|
|
480
|
+
`pending=${source2.pendingChunks}`,
|
|
481
|
+
`failed=${source2.failedChunks}`,
|
|
482
|
+
`stale=${source2.staleChunks}`
|
|
483
|
+
].join(" | "))
|
|
484
|
+
]
|
|
485
|
+
};
|
|
486
|
+
});
|
|
487
|
+
});
|
|
488
|
+
embeddings.command("backfill").argument("<source-id-or-all>").description("Queue latest snapshots for embedding rebuild.").action(async (sourceIdOrAll, _options, command) => {
|
|
489
|
+
await executeCommand(command, "embeddings.backfill", async () => {
|
|
490
|
+
const result = await backfillEmbeddings(sourceIdOrAll);
|
|
491
|
+
return {
|
|
492
|
+
data: result,
|
|
493
|
+
human: `Queued ${result.queuedJobs} embedding job(s)`
|
|
494
|
+
};
|
|
495
|
+
});
|
|
496
|
+
});
|
|
497
|
+
embeddings.command("clear").argument("<source-id-or-all>").description("Clear derived embedding state for latest snapshots.").action(async (sourceIdOrAll, _options, command) => {
|
|
498
|
+
await executeCommand(command, "embeddings.clear", async () => {
|
|
499
|
+
const result = await clearEmbeddings(sourceIdOrAll);
|
|
500
|
+
return {
|
|
501
|
+
data: result,
|
|
502
|
+
human: result.clearedSources.length === 0 ? "No embedding state cleared." : `Cleared embedding state for ${result.clearedSources.join(", ")}`
|
|
503
|
+
};
|
|
504
|
+
});
|
|
505
|
+
});
|
|
506
|
+
embeddings.command("run").description("Process queued embedding jobs immediately.").action(async (_options, command) => {
|
|
507
|
+
await executeCommand(command, "embeddings.run", async () => {
|
|
508
|
+
const result = await runEmbeddingWorker();
|
|
509
|
+
return {
|
|
510
|
+
data: result,
|
|
511
|
+
human: [
|
|
512
|
+
`Processed ${result.processedJobs} embedding job(s)`,
|
|
513
|
+
...result.succeededJobs.map((job) => `Embedded ${job.sourceId} -> ${job.snapshotId} (${job.chunkCount} chunks)`),
|
|
514
|
+
...result.failedJobs.map((job) => `Embedding failed ${job.sourceId} -> ${job.snapshotId}: ${job.errorMessage}`)
|
|
515
|
+
]
|
|
516
|
+
};
|
|
517
|
+
});
|
|
518
|
+
});
|
|
519
|
+
program.command("search").argument("<query>").option("--source <source-id>", "restrict search to a source", (value, current) => {
|
|
520
|
+
current.push(value);
|
|
521
|
+
return current;
|
|
522
|
+
}, []).option("--snapshot <snapshot-id>", "search a specific snapshot").option("--all", "search across all latest snapshots").option("--project <path>", "resolve search scope as if running from this path").option("--mode <mode>", "search mode: auto, lexical, hybrid, semantic").option("--limit <count>", "maximum number of results to return").option("--offset <count>", "number of results to skip before returning matches").action(async (query, options, command) => {
|
|
523
|
+
await executeCommand(command, "search", async () => {
|
|
524
|
+
const limit = parsePositiveIntegerOption(options.limit, "limit");
|
|
525
|
+
const offset = parsePositiveIntegerOption(options.offset, "offset");
|
|
526
|
+
const mode = parseSearchModeOption(options.mode);
|
|
527
|
+
const result = await searchCatalog(query, {
|
|
528
|
+
source: options.source,
|
|
529
|
+
...options.snapshot ? { snapshot: options.snapshot } : {},
|
|
530
|
+
...typeof options.all !== "undefined" ? { all: options.all } : {},
|
|
531
|
+
...options.project ? { project: options.project } : {},
|
|
532
|
+
...mode ? { mode } : {},
|
|
533
|
+
...typeof limit === "number" ? { limit } : {},
|
|
534
|
+
...typeof offset === "number" ? { offset } : {}
|
|
535
|
+
});
|
|
536
|
+
const results = result.results;
|
|
537
|
+
return {
|
|
538
|
+
data: result,
|
|
539
|
+
human: results.length === 0 ? `No results for "${query}" (mode=${result.modeUsed})` : [
|
|
540
|
+
`Showing ${result.offset + 1}-${result.offset + results.length} of ${result.total} result(s) for "${query}" | mode=${result.modeUsed}`,
|
|
541
|
+
...results.map((entry) => renderSearchResult(entry))
|
|
542
|
+
]
|
|
543
|
+
};
|
|
544
|
+
});
|
|
545
|
+
});
|
|
546
|
+
var verify = program.command("verify");
|
|
547
|
+
verify.command("coverage").argument("<source-id>").argument("<reference-files...>").option("--snapshot <snapshot-id>", "verify a specific snapshot instead of the latest successful snapshot").action(
|
|
548
|
+
async (sourceId, referenceFiles, options, command) => {
|
|
549
|
+
await executeCommand(command, "verify.coverage", async () => {
|
|
550
|
+
const result = await verifyCoverage({
|
|
551
|
+
sourceId,
|
|
552
|
+
referenceFiles,
|
|
553
|
+
...options.snapshot ? { snapshotId: options.snapshot } : {}
|
|
554
|
+
});
|
|
555
|
+
return {
|
|
556
|
+
data: result,
|
|
557
|
+
human: [
|
|
558
|
+
`Coverage ${result.complete ? "complete" : "incomplete"} for ${result.sourceId} @ ${result.snapshotId}`,
|
|
559
|
+
`Files=${result.summary.fileCount} | headings=${result.summary.headingCount} | matched=${result.summary.matchedHeadingCount} | missing=${result.summary.missingHeadingCount}`,
|
|
560
|
+
...result.files.flatMap((file) => file.missingHeadings.length > 0 ? [
|
|
561
|
+
`Missing in ${file.referenceFile}:`,
|
|
562
|
+
...file.missingHeadings.map((heading) => `- ${heading}`)
|
|
563
|
+
] : [`No missing headings in ${file.referenceFile}`])
|
|
564
|
+
]
|
|
565
|
+
};
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
);
|
|
569
|
+
program.command("show").argument("<chunk-id>").action(async (chunkId, _options, command) => {
|
|
570
|
+
await executeCommand(command, "show", async () => {
|
|
571
|
+
const result = await showChunk(Number(chunkId));
|
|
572
|
+
return {
|
|
573
|
+
data: result,
|
|
574
|
+
human: renderSearchResult(result.chunk)
|
|
575
|
+
};
|
|
576
|
+
});
|
|
577
|
+
});
|
|
578
|
+
program.command("daemon").description("Run scheduled local refreshes in a long-lived process.").action(async (_options, command) => {
|
|
579
|
+
const json = commandWantsJson(command);
|
|
580
|
+
const logger = createDaemonLogger(json);
|
|
581
|
+
const abortController = new AbortController();
|
|
582
|
+
const stop = () => abortController.abort();
|
|
583
|
+
process.on("SIGINT", stop);
|
|
584
|
+
process.on("SIGTERM", stop);
|
|
585
|
+
try {
|
|
586
|
+
const config = parseDaemonConfig(process.env);
|
|
587
|
+
const dataDir = getAiocsDataDir();
|
|
588
|
+
getAiocsConfigDir();
|
|
589
|
+
const catalog = openCatalog({ dataDir });
|
|
590
|
+
try {
|
|
591
|
+
await startDaemon({
|
|
592
|
+
catalog,
|
|
593
|
+
dataDir,
|
|
594
|
+
config,
|
|
595
|
+
logger,
|
|
596
|
+
signal: abortController.signal
|
|
597
|
+
});
|
|
598
|
+
} finally {
|
|
599
|
+
catalog.close();
|
|
600
|
+
}
|
|
601
|
+
} catch (error) {
|
|
602
|
+
emitError({
|
|
603
|
+
json,
|
|
604
|
+
commandName: "daemon",
|
|
605
|
+
error
|
|
606
|
+
});
|
|
607
|
+
process.exitCode = 1;
|
|
608
|
+
} finally {
|
|
609
|
+
process.off("SIGINT", stop);
|
|
610
|
+
process.off("SIGTERM", stop);
|
|
611
|
+
}
|
|
612
|
+
});
|
|
613
|
+
program.parseAsync(process.argv).catch((error) => {
|
|
614
|
+
if (error instanceof CommanderError2 && error.code === "commander.helpDisplayed") {
|
|
615
|
+
process.exitCode = 0;
|
|
616
|
+
return;
|
|
617
|
+
}
|
|
618
|
+
emitError({
|
|
619
|
+
json: argvWantsJson(process.argv),
|
|
620
|
+
commandName: inferRequestedCommand(process.argv.slice(2)),
|
|
621
|
+
error
|
|
622
|
+
});
|
|
623
|
+
process.exitCode = error instanceof CommanderError2 ? error.exitCode : 1;
|
|
624
|
+
});
|