@swarmvaultai/cli 0.1.19 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -4
- package/dist/index.js +127 -12
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -26,15 +26,21 @@ swarmvault init --obsidian
|
|
|
26
26
|
sed -n '1,120p' swarmvault.schema.md
|
|
27
27
|
swarmvault ingest ./notes.md
|
|
28
28
|
swarmvault ingest ./repo
|
|
29
|
+
swarmvault add https://arxiv.org/abs/2401.12345
|
|
29
30
|
swarmvault compile
|
|
31
|
+
swarmvault benchmark
|
|
30
32
|
swarmvault query "What keeps recurring?"
|
|
31
33
|
swarmvault query "Turn this into slides" --format slides
|
|
32
34
|
swarmvault explore "What should I research next?" --steps 3
|
|
33
35
|
swarmvault lint --deep
|
|
34
36
|
swarmvault graph query "Which nodes bridge the biggest clusters?"
|
|
35
37
|
swarmvault graph explain "concept:drift"
|
|
38
|
+
swarmvault watch status
|
|
39
|
+
swarmvault watch --repo --once
|
|
40
|
+
swarmvault hook install
|
|
36
41
|
swarmvault graph serve
|
|
37
42
|
swarmvault graph export --html ./exports/graph.html
|
|
43
|
+
swarmvault graph export --cypher ./exports/graph.cypher
|
|
38
44
|
```
|
|
39
45
|
|
|
40
46
|
## Commands
|
|
@@ -64,6 +70,7 @@ Ingest a local file path, directory path, or URL into immutable source storage a
|
|
|
64
70
|
- directory ingest respects `.gitignore` unless you pass `--no-gitignore`
|
|
65
71
|
- repo-aware directory ingest records `repoRelativePath` and later compile writes `state/code-index.json`
|
|
66
72
|
- URL ingest still localizes remote image references by default
|
|
73
|
+
- code-aware directory ingest currently covers JavaScript, TypeScript, Python, Go, Rust, Java, C#, C, C++, PHP, Ruby, and PowerShell
|
|
67
74
|
|
|
68
75
|
Useful flags:
|
|
69
76
|
|
|
@@ -75,6 +82,15 @@ Useful flags:
|
|
|
75
82
|
- `--no-include-assets`
|
|
76
83
|
- `--max-asset-size <bytes>`
|
|
77
84
|
|
|
85
|
+
### `swarmvault add <url>`
|
|
86
|
+
|
|
87
|
+
Capture supported URLs through a normalized markdown layer before ingesting them into the vault.
|
|
88
|
+
|
|
89
|
+
- arXiv abstract URLs and bare arXiv ids become durable markdown captures
|
|
90
|
+
- X/Twitter URLs use a graceful public capture path
|
|
91
|
+
- unsupported URLs fall back to generic URL ingest instead of failing
|
|
92
|
+
- optional metadata: `--author <name>` and `--contributor <name>`
|
|
93
|
+
|
|
78
94
|
### `swarmvault inbox import [dir]`
|
|
79
95
|
|
|
80
96
|
Import supported files from the configured inbox directory. This is meant for browser-clipper style markdown bundles and other capture workflows. Local image and asset references are preserved and copied into canonical storage under `raw/assets/`.
|
|
@@ -95,6 +111,14 @@ New concept and entity pages are staged into `wiki/candidates/` first. A later m
|
|
|
95
111
|
|
|
96
112
|
With `--approve`, compile writes a staged review bundle into `state/approvals/` without applying active wiki changes.
|
|
97
113
|
|
|
114
|
+
### `swarmvault benchmark [--question "<text>" ...]`
|
|
115
|
+
|
|
116
|
+
Measure graph-guided context reduction against a naive full-corpus read.
|
|
117
|
+
|
|
118
|
+
- writes the latest result to `state/benchmark.json`
|
|
119
|
+
- updates `wiki/graph/report.md` with the current benchmark summary
|
|
120
|
+
- accepts repeatable `--question` inputs for vault-specific benchmarks
|
|
121
|
+
|
|
98
122
|
### `swarmvault review list|show|accept|reject`
|
|
99
123
|
|
|
100
124
|
Inspect and resolve staged approval bundles created by `swarmvault compile --approve`.
|
|
@@ -158,9 +182,25 @@ Run anti-drift and vault health checks such as stale pages, missing graph artifa
|
|
|
158
182
|
|
|
159
183
|
`--web` can only be used with `--deep`. It enriches deep-lint findings with external evidence snippets and URLs from a configured web-search provider.
|
|
160
184
|
|
|
161
|
-
### `swarmvault watch [--lint] [--debounce <ms>]`
|
|
185
|
+
### `swarmvault watch [--lint] [--repo] [--once] [--debounce <ms>]`
|
|
186
|
+
|
|
187
|
+
Watch the inbox directory and trigger import and compile cycles when files change. With `--repo`, each cycle also refreshes tracked repo roots that were previously ingested through directory ingest. With `--once`, SwarmVault runs one refresh cycle immediately instead of starting a long-running watcher. With `--lint`, each cycle also runs linting. Each cycle writes a canonical session artifact to `state/sessions/`, and compatibility run metadata is still appended to `state/jobs.ndjson`.
|
|
188
|
+
|
|
189
|
+
When `--repo` sees non-code changes under tracked repo roots, SwarmVault records those files under `state/watch/pending-semantic-refresh.json`, marks affected compiled pages stale, and exposes the pending set through `watch status` and the local graph workspace instead of silently re-ingesting them.
|
|
190
|
+
|
|
191
|
+
### `swarmvault watch status`
|
|
162
192
|
|
|
163
|
-
|
|
193
|
+
Show watched repo roots, the latest watch run, and any pending semantic refresh entries for tracked non-code repo changes.
|
|
194
|
+
|
|
195
|
+
### `swarmvault hook install|uninstall|status`
|
|
196
|
+
|
|
197
|
+
Manage SwarmVault's local git hook blocks for the nearest git repository.
|
|
198
|
+
|
|
199
|
+
- `hook install` writes marker-based `post-commit` and `post-checkout` hooks
|
|
200
|
+
- `hook uninstall` removes only the SwarmVault-managed hook block
|
|
201
|
+
- `hook status` reports whether those managed hook blocks are installed
|
|
202
|
+
|
|
203
|
+
The installed hooks run `swarmvault watch --repo --once` from the vault root so repo-aware source changes are re-ingested and recompiled after commit and checkout.
|
|
164
204
|
|
|
165
205
|
### `swarmvault mcp`
|
|
166
206
|
|
|
@@ -197,14 +237,25 @@ Inspect graph metadata, community membership, neighbors, and provenance for a no
|
|
|
197
237
|
|
|
198
238
|
List the most connected bridge-heavy nodes in the current graph.
|
|
199
239
|
|
|
200
|
-
### `swarmvault graph export --html <output>`
|
|
240
|
+
### `swarmvault graph export --html|--svg|--graphml|--cypher <output>`
|
|
241
|
+
|
|
242
|
+
Export the current graph as one of four formats:
|
|
201
243
|
|
|
202
|
-
|
|
244
|
+
- `--html` for the standalone read-only graph workspace
|
|
245
|
+
- `--svg` for a static shareable diagram
|
|
246
|
+
- `--graphml` for graph-tool interoperability
|
|
247
|
+
- `--cypher` for Neo4j-style import scripts
|
|
203
248
|
|
|
204
249
|
### `swarmvault install --agent <codex|claude|cursor|goose|pi|gemini|opencode>`
|
|
205
250
|
|
|
206
251
|
Install agent-specific rules into the current project so an agent understands the SwarmVault workspace contract and workflow.
|
|
207
252
|
|
|
253
|
+
For Claude Code, you can also install the recommended graph-first pre-search hook:
|
|
254
|
+
|
|
255
|
+
```bash
|
|
256
|
+
swarmvault install --agent claude --hook
|
|
257
|
+
```
|
|
258
|
+
|
|
208
259
|
## Provider Configuration
|
|
209
260
|
|
|
210
261
|
SwarmVault defaults to a local `heuristic` provider so the CLI works without API keys, but real vaults will usually point at an actual model provider.
|
package/dist/index.js
CHANGED
|
@@ -5,16 +5,22 @@ import { readFileSync } from "fs";
|
|
|
5
5
|
import process from "process";
|
|
6
6
|
import {
|
|
7
7
|
acceptApproval,
|
|
8
|
+
addInput,
|
|
8
9
|
archiveCandidate,
|
|
10
|
+
benchmarkVault,
|
|
9
11
|
compileVault,
|
|
10
12
|
explainGraphVault,
|
|
11
13
|
exploreVault,
|
|
14
|
+
exportGraphFormat,
|
|
12
15
|
exportGraphHtml,
|
|
16
|
+
getGitHookStatus,
|
|
17
|
+
getWatchStatus,
|
|
13
18
|
importInbox,
|
|
14
19
|
ingestDirectory,
|
|
15
20
|
ingestInput,
|
|
16
21
|
initVault,
|
|
17
22
|
installAgent,
|
|
23
|
+
installGitHooks,
|
|
18
24
|
lintVault,
|
|
19
25
|
listApprovals,
|
|
20
26
|
listCandidates,
|
|
@@ -28,9 +34,11 @@ import {
|
|
|
28
34
|
readApproval,
|
|
29
35
|
rejectApproval,
|
|
30
36
|
runSchedule,
|
|
37
|
+
runWatchCycle,
|
|
31
38
|
serveSchedules,
|
|
32
39
|
startGraphServer,
|
|
33
40
|
startMcpServer,
|
|
41
|
+
uninstallGitHooks,
|
|
34
42
|
watchVault
|
|
35
43
|
} from "@swarmvaultai/engine";
|
|
36
44
|
import { Command, Option } from "commander";
|
|
@@ -40,9 +48,9 @@ program.name("swarmvault").description("SwarmVault is a local-first LLM wiki com
|
|
|
40
48
|
function readCliVersion() {
|
|
41
49
|
try {
|
|
42
50
|
const packageJson = JSON.parse(readFileSync(new URL("../package.json", import.meta.url), "utf8"));
|
|
43
|
-
return typeof packageJson.version === "string" && packageJson.version.trim() ? packageJson.version : "0.1.
|
|
51
|
+
return typeof packageJson.version === "string" && packageJson.version.trim() ? packageJson.version : "0.1.21";
|
|
44
52
|
} catch {
|
|
45
|
-
return "0.1.
|
|
53
|
+
return "0.1.21";
|
|
46
54
|
}
|
|
47
55
|
}
|
|
48
56
|
function isJson() {
|
|
@@ -103,6 +111,17 @@ program.command("ingest").description("Ingest a local file path, directory path,
|
|
|
103
111
|
}
|
|
104
112
|
}
|
|
105
113
|
);
|
|
114
|
+
program.command("add").description("Capture supported URLs into normalized markdown before ingesting them.").argument("<input>", "Supported URL or bare arXiv id").option("--author <name>", "Human author or curator for this capture").option("--contributor <name>", "Additional contributor metadata for this capture").action(async (input, options) => {
|
|
115
|
+
const result = await addInput(process.cwd(), input, {
|
|
116
|
+
author: options.author,
|
|
117
|
+
contributor: options.contributor
|
|
118
|
+
});
|
|
119
|
+
if (isJson()) {
|
|
120
|
+
emitJson(result);
|
|
121
|
+
} else {
|
|
122
|
+
log(`${result.captureType}${result.fallback ? " (fallback)" : ""}: ${result.manifest.sourceId}`);
|
|
123
|
+
}
|
|
124
|
+
});
|
|
106
125
|
var inbox = program.command("inbox").description("Inbox and capture workflows.");
|
|
107
126
|
inbox.command("import").description("Import supported files from the configured inbox directory.").argument("[dir]", "Optional inbox directory override").action(async (dir) => {
|
|
108
127
|
const result = await importInbox(process.cwd(), dir);
|
|
@@ -159,6 +178,18 @@ program.command("explore").description("Run a save-first multi-step exploration
|
|
|
159
178
|
log(`Completed ${result.stepCount} step(s).`);
|
|
160
179
|
}
|
|
161
180
|
});
|
|
181
|
+
program.command("benchmark").description("Measure graph-guided context reduction against a naive full-corpus read.").option("--question <text...>", "Optional custom benchmark question(s)").action(async (options) => {
|
|
182
|
+
const result = await benchmarkVault(process.cwd(), {
|
|
183
|
+
questions: options.question
|
|
184
|
+
});
|
|
185
|
+
if (isJson()) {
|
|
186
|
+
emitJson(result);
|
|
187
|
+
} else {
|
|
188
|
+
log(`Corpus tokens: ${result.corpusTokens}`);
|
|
189
|
+
log(`Average query tokens: ${result.avgQueryTokens}`);
|
|
190
|
+
log(`Reduction ratio: ${(result.reductionRatio * 100).toFixed(1)}%`);
|
|
191
|
+
}
|
|
192
|
+
});
|
|
162
193
|
program.command("lint").description("Run anti-drift and wiki-health checks.").option("--deep", "Run LLM-powered advisory lint", false).option("--web", "Augment deep lint with configured web search", false).action(async (options) => {
|
|
163
194
|
const findings = await lintVault(process.cwd(), {
|
|
164
195
|
deep: options.deep ?? false,
|
|
@@ -190,12 +221,22 @@ graph.command("serve").description("Serve the local graph viewer.").option("--po
|
|
|
190
221
|
process.exit(0);
|
|
191
222
|
});
|
|
192
223
|
});
|
|
193
|
-
graph.command("export").description("Export the graph
|
|
194
|
-
const
|
|
224
|
+
graph.command("export").description("Export the graph as HTML, SVG, GraphML, or Cypher.").option("--html <output>", "Output HTML file path").option("--svg <output>", "Output SVG file path").option("--graphml <output>", "Output GraphML file path").option("--cypher <output>", "Output Cypher file path").action(async (options) => {
|
|
225
|
+
const targets = [
|
|
226
|
+
options.html ? { format: "html", outputPath: options.html } : null,
|
|
227
|
+
options.svg ? { format: "svg", outputPath: options.svg } : null,
|
|
228
|
+
options.graphml ? { format: "graphml", outputPath: options.graphml } : null,
|
|
229
|
+
options.cypher ? { format: "cypher", outputPath: options.cypher } : null
|
|
230
|
+
].filter((target2) => Boolean(target2));
|
|
231
|
+
if (targets.length !== 1) {
|
|
232
|
+
throw new Error("Pass exactly one of --html, --svg, --graphml, or --cypher.");
|
|
233
|
+
}
|
|
234
|
+
const target = targets[0];
|
|
235
|
+
const outputPath = target.format === "html" ? await exportGraphHtml(process.cwd(), target.outputPath) : (await exportGraphFormat(process.cwd(), target.format, target.outputPath)).outputPath;
|
|
195
236
|
if (isJson()) {
|
|
196
|
-
emitJson({ outputPath });
|
|
237
|
+
emitJson({ format: target.format, outputPath });
|
|
197
238
|
} else {
|
|
198
|
-
log(`Exported graph
|
|
239
|
+
log(`Exported graph ${target.format} to ${outputPath}`);
|
|
199
240
|
}
|
|
200
241
|
});
|
|
201
242
|
graph.command("query").description("Traverse the compiled graph deterministically from local search seeds.").argument("<question>", "Question or graph search seed").option("--dfs", "Prefer a depth-first traversal instead of breadth-first", false).option("--budget <n>", "Maximum number of graph nodes to summarize").action(async (question, options) => {
|
|
@@ -312,23 +353,94 @@ candidate.command("archive").description("Archive a candidate by removing it fro
|
|
|
312
353
|
log(`Archived ${result.pageId}`);
|
|
313
354
|
}
|
|
314
355
|
});
|
|
315
|
-
program.command("watch").description("Watch the inbox directory and run
|
|
356
|
+
var watch = program.command("watch").description("Watch the inbox directory and optionally tracked repos, or run one refresh cycle immediately.").option("--lint", "Run lint after each compile cycle", false).option("--repo", "Also refresh tracked repo sources and watch their repo roots", false).option("--once", "Run one import/refresh cycle immediately instead of starting a watcher", false).option("--debounce <ms>", "Debounce window in milliseconds", "900").action(async (options) => {
|
|
316
357
|
const debounceMs = Number.parseInt(options.debounce ?? "900", 10);
|
|
358
|
+
if (options.once) {
|
|
359
|
+
const result = await runWatchCycle(process.cwd(), {
|
|
360
|
+
lint: options.lint ?? false,
|
|
361
|
+
repo: options.repo ?? false,
|
|
362
|
+
debounceMs: Number.isFinite(debounceMs) ? debounceMs : 900
|
|
363
|
+
});
|
|
364
|
+
if (isJson()) {
|
|
365
|
+
emitJson(result);
|
|
366
|
+
} else {
|
|
367
|
+
log(
|
|
368
|
+
`Refreshed inbox${options.repo ? " and tracked repos" : ""}. Imported ${result.importedCount}, repo imported ${result.repoImportedCount}, repo updated ${result.repoUpdatedCount}, repo removed ${result.repoRemovedCount}.`
|
|
369
|
+
);
|
|
370
|
+
}
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
317
373
|
const { paths } = await loadVaultConfig(process.cwd());
|
|
318
374
|
const controller = await watchVault(process.cwd(), {
|
|
319
375
|
lint: options.lint ?? false,
|
|
376
|
+
repo: options.repo ?? false,
|
|
320
377
|
debounceMs: Number.isFinite(debounceMs) ? debounceMs : 900
|
|
321
378
|
});
|
|
322
379
|
if (isJson()) {
|
|
323
|
-
emitJson({ status: "watching", inboxDir: paths.inboxDir });
|
|
380
|
+
emitJson({ status: "watching", inboxDir: paths.inboxDir, repo: options.repo ?? false });
|
|
324
381
|
} else {
|
|
325
|
-
log(
|
|
382
|
+
log(`Watching inbox${options.repo ? " and tracked repos" : ""} for changes. Press Ctrl+C to stop.`);
|
|
326
383
|
}
|
|
327
384
|
process.on("SIGINT", async () => {
|
|
328
385
|
await controller.close();
|
|
329
386
|
process.exit(0);
|
|
330
387
|
});
|
|
331
388
|
});
|
|
389
|
+
watch.command("status").description("Show the latest watch run plus pending semantic refresh entries.").action(async () => {
|
|
390
|
+
const result = await getWatchStatus(process.cwd());
|
|
391
|
+
if (isJson()) {
|
|
392
|
+
emitJson(result);
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
log(`Watched repo roots: ${result.watchedRepoRoots.length}`);
|
|
396
|
+
log(`Pending semantic refresh: ${result.pendingSemanticRefresh.length}`);
|
|
397
|
+
for (const entry of result.pendingSemanticRefresh.slice(0, 8)) {
|
|
398
|
+
log(`- ${entry.changeType} ${entry.path}`);
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
program.command("watch-status").description("Show the latest watch run plus pending semantic refresh entries.").action(async () => {
|
|
402
|
+
const result = await getWatchStatus(process.cwd());
|
|
403
|
+
if (isJson()) {
|
|
404
|
+
emitJson(result);
|
|
405
|
+
return;
|
|
406
|
+
}
|
|
407
|
+
log(`Watched repo roots: ${result.watchedRepoRoots.length}`);
|
|
408
|
+
log(`Pending semantic refresh: ${result.pendingSemanticRefresh.length}`);
|
|
409
|
+
for (const entry of result.pendingSemanticRefresh.slice(0, 8)) {
|
|
410
|
+
log(`- ${entry.changeType} ${entry.path}`);
|
|
411
|
+
}
|
|
412
|
+
});
|
|
413
|
+
var hook = program.command("hook").description("Install local git hooks that keep tracked repos and the vault in sync.");
|
|
414
|
+
hook.command("install").description("Install post-commit and post-checkout hooks for the nearest git repository.").action(async () => {
|
|
415
|
+
const status = await installGitHooks(process.cwd());
|
|
416
|
+
if (isJson()) {
|
|
417
|
+
emitJson(status);
|
|
418
|
+
return;
|
|
419
|
+
}
|
|
420
|
+
log(`Installed hooks in ${status.repoRoot}`);
|
|
421
|
+
});
|
|
422
|
+
hook.command("uninstall").description("Remove the SwarmVault-managed git hook blocks from the nearest git repository.").action(async () => {
|
|
423
|
+
const status = await uninstallGitHooks(process.cwd());
|
|
424
|
+
if (isJson()) {
|
|
425
|
+
emitJson(status);
|
|
426
|
+
return;
|
|
427
|
+
}
|
|
428
|
+
log(`Removed SwarmVault hook blocks from ${status.repoRoot ?? "the current workspace"}`);
|
|
429
|
+
});
|
|
430
|
+
hook.command("status").description("Show whether SwarmVault-managed git hooks are installed.").action(async () => {
|
|
431
|
+
const status = await getGitHookStatus(process.cwd());
|
|
432
|
+
if (isJson()) {
|
|
433
|
+
emitJson(status);
|
|
434
|
+
return;
|
|
435
|
+
}
|
|
436
|
+
if (!status.repoRoot) {
|
|
437
|
+
log("No git repository found.");
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
log(`repo=${status.repoRoot}`);
|
|
441
|
+
log(`post-commit=${status.postCommit}`);
|
|
442
|
+
log(`post-checkout=${status.postCheckout}`);
|
|
443
|
+
});
|
|
332
444
|
var schedule = program.command("schedule").description("Run scheduled vault maintenance jobs.");
|
|
333
445
|
schedule.command("list").description("List configured schedule jobs and their next run state.").action(async () => {
|
|
334
446
|
const schedules = await listSchedules(process.cwd());
|
|
@@ -380,10 +492,13 @@ program.command("mcp").description("Run SwarmVault as a local MCP server over st
|
|
|
380
492
|
process.exit(0);
|
|
381
493
|
});
|
|
382
494
|
});
|
|
383
|
-
program.command("install").description("Install SwarmVault instructions for an agent in the current project.").requiredOption("--agent <agent>", "codex, claude, cursor, goose, pi, gemini, or opencode").action(async (options) => {
|
|
384
|
-
|
|
495
|
+
program.command("install").description("Install SwarmVault instructions for an agent in the current project.").requiredOption("--agent <agent>", "codex, claude, cursor, goose, pi, gemini, or opencode").option("--hook", "Also install the recommended Claude pre-search hook when agent=claude", false).action(async (options) => {
|
|
496
|
+
if (options.hook && options.agent !== "claude") {
|
|
497
|
+
throw new Error("--hook is only supported for --agent claude");
|
|
498
|
+
}
|
|
499
|
+
const target = await installAgent(process.cwd(), options.agent, { claudeHook: options.hook ?? false });
|
|
385
500
|
if (isJson()) {
|
|
386
|
-
emitJson({ agent: options.agent, target });
|
|
501
|
+
emitJson({ agent: options.agent, target, hook: options.hook ?? false });
|
|
387
502
|
} else {
|
|
388
503
|
log(`Installed rules into ${target}`);
|
|
389
504
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@swarmvaultai/cli",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.21",
|
|
4
4
|
"description": "Global CLI for SwarmVault.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
},
|
|
40
40
|
"dependencies": {
|
|
41
41
|
"commander": "^14.0.1",
|
|
42
|
-
"@swarmvaultai/engine": "0.1.
|
|
42
|
+
"@swarmvaultai/engine": "0.1.21"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "^24.6.0",
|