skilld 1.5.0 → 1.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_chunks/agent.mjs +2 -2
- package/dist/_chunks/assemble.mjs +2 -0
- package/dist/_chunks/assemble.mjs.map +1 -1
- package/dist/_chunks/author.mjs +13 -11
- package/dist/_chunks/author.mjs.map +1 -1
- package/dist/_chunks/cache.mjs +6 -42
- package/dist/_chunks/cache.mjs.map +1 -1
- package/dist/_chunks/cache2.mjs +3 -1
- package/dist/_chunks/cache2.mjs.map +1 -1
- package/dist/_chunks/cli-helpers.mjs +31 -102
- package/dist/_chunks/cli-helpers.mjs.map +1 -1
- package/dist/_chunks/cli-helpers2.mjs +12 -0
- package/dist/_chunks/core.mjs +1 -0
- package/dist/_chunks/embedding-cache.mjs +4 -60
- package/dist/_chunks/embedding-cache2.mjs +61 -0
- package/dist/_chunks/embedding-cache2.mjs.map +1 -0
- package/dist/_chunks/index.d.mts +13 -21
- package/dist/_chunks/index.d.mts.map +1 -1
- package/dist/_chunks/index2.d.mts +32 -600
- package/dist/_chunks/index2.d.mts.map +1 -1
- package/dist/_chunks/index3.d.mts +615 -0
- package/dist/_chunks/index3.d.mts.map +1 -0
- package/dist/_chunks/install.mjs +12 -9
- package/dist/_chunks/install.mjs.map +1 -1
- package/dist/_chunks/list.mjs +3 -1
- package/dist/_chunks/list.mjs.map +1 -1
- package/dist/_chunks/lockfile.mjs +14 -1
- package/dist/_chunks/lockfile.mjs.map +1 -1
- package/dist/_chunks/package-json.mjs +107 -0
- package/dist/_chunks/package-json.mjs.map +1 -0
- package/dist/_chunks/pool.mjs +2 -123
- package/dist/_chunks/pool2.mjs +118 -0
- package/dist/_chunks/pool2.mjs.map +1 -0
- package/dist/_chunks/prepare.mjs +34 -78
- package/dist/_chunks/prepare.mjs.map +1 -1
- package/dist/_chunks/prepare2.mjs +94 -0
- package/dist/_chunks/prepare2.mjs.map +1 -0
- package/dist/_chunks/retriv.mjs +172 -0
- package/dist/_chunks/retriv.mjs.map +1 -0
- package/dist/_chunks/search-interactive.mjs +5 -3
- package/dist/_chunks/search-interactive.mjs.map +1 -1
- package/dist/_chunks/search.mjs +13 -320
- package/dist/_chunks/search2.mjs +319 -0
- package/dist/_chunks/search2.mjs.map +1 -0
- package/dist/_chunks/setup.mjs +4 -2
- package/dist/_chunks/setup.mjs.map +1 -1
- package/dist/_chunks/skills.mjs +1 -1
- package/dist/_chunks/sources.mjs +15 -18
- package/dist/_chunks/sources.mjs.map +1 -1
- package/dist/_chunks/sync-shared.mjs +3 -0
- package/dist/_chunks/sync-shared2.mjs +8 -6
- package/dist/_chunks/sync-shared2.mjs.map +1 -1
- package/dist/_chunks/sync.mjs +7 -7
- package/dist/_chunks/sync.mjs.map +1 -1
- package/dist/_chunks/sync2.mjs +22 -0
- package/dist/_chunks/uninstall.mjs +6 -2
- package/dist/_chunks/uninstall.mjs.map +1 -1
- package/dist/_chunks/wizard.mjs +186 -0
- package/dist/_chunks/wizard.mjs.map +1 -0
- package/dist/agent/index.mjs +2 -0
- package/dist/cache/index.d.mts +1 -1
- package/dist/cache/index.mjs +3 -1
- package/dist/cli-entry.d.mts +1 -0
- package/dist/cli-entry.mjs +11 -0
- package/dist/cli-entry.mjs.map +1 -0
- package/dist/cli.mjs +27 -192
- package/dist/cli.mjs.map +1 -1
- package/dist/index.d.mts +3 -3
- package/dist/index.mjs +4 -2
- package/dist/prepare.d.mts +1 -0
- package/dist/prepare.mjs +93 -0
- package/dist/prepare.mjs.map +1 -0
- package/dist/retriv/index.d.mts +2 -46
- package/dist/retriv/index.mjs +2 -171
- package/dist/sources/index.d.mts +1 -1
- package/dist/sources/index.mjs +1 -0
- package/dist/types.d.mts +1 -1
- package/package.json +1 -1
- package/dist/_chunks/embedding-cache.mjs.map +0 -1
- package/dist/_chunks/pool.mjs.map +0 -1
- package/dist/_chunks/search.mjs.map +0 -1
- package/dist/retriv/index.d.mts.map +0 -1
- package/dist/retriv/index.mjs.map +0 -1
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { dirname, join } from "pathe";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { Worker } from "node:worker_threads";
|
|
5
|
+
//#region src/retriv/pool.ts
|
|
6
|
+
let worker = null;
|
|
7
|
+
let taskId = 0;
|
|
8
|
+
const pending = /* @__PURE__ */ new Map();
|
|
9
|
+
const queue = [];
|
|
10
|
+
let running = false;
|
|
11
|
+
function resolveWorkerPath() {
|
|
12
|
+
const dir = dirname(fileURLToPath(import.meta.url));
|
|
13
|
+
for (const candidate of [join(dir, "worker.mjs"), join(dir, "..", "retriv", "worker.mjs")]) if (existsSync(candidate)) return { path: candidate };
|
|
14
|
+
return {
|
|
15
|
+
path: join(dir, "worker.ts"),
|
|
16
|
+
execArgv: ["--experimental-strip-types"]
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
function ensureWorker() {
|
|
20
|
+
if (worker) return worker;
|
|
21
|
+
const config = resolveWorkerPath();
|
|
22
|
+
const w = new Worker(config.path, { execArgv: config.execArgv });
|
|
23
|
+
w.on("message", (msg) => {
|
|
24
|
+
const task = pending.get(msg.id);
|
|
25
|
+
if (!task) return;
|
|
26
|
+
if (msg.type === "progress") task.onProgress?.({
|
|
27
|
+
phase: msg.phase,
|
|
28
|
+
current: msg.current,
|
|
29
|
+
total: msg.total
|
|
30
|
+
});
|
|
31
|
+
else if (msg.type === "done") {
|
|
32
|
+
pending.delete(msg.id);
|
|
33
|
+
task.resolve();
|
|
34
|
+
} else if (msg.type === "error") {
|
|
35
|
+
pending.delete(msg.id);
|
|
36
|
+
task.reject(new Error(msg.message));
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
w.on("error", (err) => {
|
|
40
|
+
for (const task of pending.values()) task.reject(err);
|
|
41
|
+
pending.clear();
|
|
42
|
+
worker = null;
|
|
43
|
+
});
|
|
44
|
+
w.on("exit", (code) => {
|
|
45
|
+
if (pending.size > 0) {
|
|
46
|
+
const err = /* @__PURE__ */ new Error(`Worker exited (code ${code}) with ${pending.size} pending tasks`);
|
|
47
|
+
for (const task of pending.values()) task.reject(err);
|
|
48
|
+
pending.clear();
|
|
49
|
+
}
|
|
50
|
+
worker = null;
|
|
51
|
+
});
|
|
52
|
+
worker = w;
|
|
53
|
+
return w;
|
|
54
|
+
}
|
|
55
|
+
function drainQueue() {
|
|
56
|
+
if (running || queue.length === 0) return;
|
|
57
|
+
queue.shift()();
|
|
58
|
+
}
|
|
59
|
+
async function createIndexInWorker(documents, config) {
|
|
60
|
+
return new Promise((resolve, reject) => {
|
|
61
|
+
const run = () => {
|
|
62
|
+
running = true;
|
|
63
|
+
const id = ++taskId;
|
|
64
|
+
let w;
|
|
65
|
+
try {
|
|
66
|
+
w = ensureWorker();
|
|
67
|
+
} catch (err) {
|
|
68
|
+
running = false;
|
|
69
|
+
drainQueue();
|
|
70
|
+
reject(err instanceof Error ? err : new Error(String(err)));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
pending.set(id, {
|
|
74
|
+
id,
|
|
75
|
+
resolve: () => {
|
|
76
|
+
running = false;
|
|
77
|
+
drainQueue();
|
|
78
|
+
resolve();
|
|
79
|
+
},
|
|
80
|
+
reject: (err) => {
|
|
81
|
+
running = false;
|
|
82
|
+
drainQueue();
|
|
83
|
+
reject(err);
|
|
84
|
+
},
|
|
85
|
+
onProgress: config.onProgress
|
|
86
|
+
});
|
|
87
|
+
const msg = {
|
|
88
|
+
type: "index",
|
|
89
|
+
id,
|
|
90
|
+
documents,
|
|
91
|
+
dbPath: config.dbPath,
|
|
92
|
+
removeIds: config.removeIds
|
|
93
|
+
};
|
|
94
|
+
w.postMessage(msg);
|
|
95
|
+
};
|
|
96
|
+
if (running) queue.push(run);
|
|
97
|
+
else run();
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
async function shutdownWorker() {
|
|
101
|
+
if (!worker) return;
|
|
102
|
+
const w = worker;
|
|
103
|
+
worker = null;
|
|
104
|
+
return new Promise((resolve) => {
|
|
105
|
+
const timeout = setTimeout(() => {
|
|
106
|
+
w.terminate().then(() => resolve(), () => resolve());
|
|
107
|
+
}, 5e3);
|
|
108
|
+
w.once("exit", () => {
|
|
109
|
+
clearTimeout(timeout);
|
|
110
|
+
resolve();
|
|
111
|
+
});
|
|
112
|
+
w.postMessage({ type: "shutdown" });
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
//#endregion
|
|
116
|
+
export { shutdownWorker as n, createIndexInWorker as t };
|
|
117
|
+
|
|
118
|
+
//# sourceMappingURL=pool2.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool2.mjs","names":[],"sources":["../../src/retriv/pool.ts"],"sourcesContent":["import type { IndexConfig, Document as RetrivDocument } from './types.ts'\nimport type { WorkerMessage, WorkerResponse } from './worker.ts'\nimport { existsSync } from 'node:fs'\nimport { fileURLToPath } from 'node:url'\nimport { Worker } from 'node:worker_threads'\nimport { dirname, join } from 'pathe'\n\ninterface PendingTask {\n id: number\n resolve: () => void\n reject: (err: Error) => void\n onProgress?: IndexConfig['onProgress']\n}\n\nlet worker: Worker | null = null\nlet taskId = 0\nconst pending = new Map<number, PendingTask>()\nconst queue: Array<() => void> = []\nlet running = false\n\nfunction resolveWorkerPath(): { path: string, execArgv?: string[] } {\n const dir = dirname(fileURLToPath(import.meta.url))\n\n // Bundled: dist/retriv/worker.mjs (resolve from package root, not chunk dir)\n for (const candidate of [join(dir, 'worker.mjs'), join(dir, '..', 'retriv', 'worker.mjs')]) {\n if (existsSync(candidate))\n return { path: candidate }\n }\n\n // Dev stub: src/retriv/pool.ts → src/retriv/worker.ts\n return { path: join(dir, 'worker.ts'), execArgv: ['--experimental-strip-types'] }\n}\n\nfunction ensureWorker(): Worker {\n if (worker)\n return worker\n\n const config = resolveWorkerPath()\n const w = new Worker(config.path, {\n execArgv: config.execArgv,\n })\n\n w.on('message', (msg: WorkerResponse) => {\n const task = pending.get(msg.id)\n if (!task)\n return\n\n if (msg.type === 'progress') {\n task.onProgress?.({ phase: msg.phase as any, current: msg.current, total: msg.total })\n }\n else if (msg.type === 'done') {\n pending.delete(msg.id)\n task.resolve()\n }\n else if (msg.type === 'error') {\n pending.delete(msg.id)\n task.reject(new Error(msg.message))\n }\n })\n\n w.on('error', (err: Error) => {\n for (const task of pending.values())\n task.reject(err)\n pending.clear()\n worker = null\n })\n\n w.on('exit', (code) => {\n if (pending.size > 0) {\n const err = new Error(`Worker exited (code ${code}) with ${pending.size} pending tasks`)\n for (const task of pending.values())\n task.reject(err)\n pending.clear()\n }\n worker = null\n })\n\n worker = w\n return w\n}\n\nfunction drainQueue() {\n if (running || queue.length === 0)\n return\n const next = queue.shift()!\n next()\n}\n\nexport async function createIndexInWorker(\n documents: RetrivDocument[],\n config: IndexConfig & { removeIds?: string[] },\n): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n const run = () => {\n running = true\n const id = ++taskId\n\n let w: Worker\n try {\n w = ensureWorker()\n }\n catch (err) {\n running = false\n drainQueue()\n reject(err instanceof Error ? err : new Error(String(err)))\n return\n }\n\n pending.set(id, {\n id,\n resolve: () => {\n running = false\n drainQueue()\n resolve()\n },\n reject: (err) => {\n running = false\n drainQueue()\n reject(err)\n },\n onProgress: config.onProgress,\n })\n\n const msg: WorkerMessage = {\n type: 'index',\n id,\n documents,\n dbPath: config.dbPath,\n removeIds: config.removeIds,\n }\n\n w.postMessage(msg)\n }\n\n if (running) {\n queue.push(run)\n }\n else {\n run()\n }\n })\n}\n\nexport async function shutdownWorker(): Promise<void> {\n if (!worker)\n return\n\n const w = worker\n worker = null\n\n return new Promise<void>((resolve) => {\n const timeout = setTimeout(() => {\n w.terminate().then(() => resolve(), () => resolve())\n }, 5000)\n\n w.once('exit', () => {\n clearTimeout(timeout)\n resolve()\n })\n\n w.postMessage({ type: 'shutdown' } satisfies WorkerMessage)\n })\n}\n"],"mappings":";;;;;AAcA,IAAI,SAAwB;AAC5B,IAAI,SAAS;AACb,MAAM,0BAAU,IAAI,KAA0B;AAC9C,MAAM,QAA2B,EAAE;AACnC,IAAI,UAAU;AAEd,SAAS,oBAA2D;CAClE,MAAM,MAAM,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAGnD,MAAK,MAAM,aAAa,CAAC,KAAK,KAAK,aAAa,EAAE,KAAK,KAAK,MAAM,UAAU,aAAa,CAAC,CACxF,KAAI,WAAW,UAAU,CACvB,QAAO,EAAE,MAAM,WAAW;AAI9B,QAAO;EAAE,MAAM,KAAK,KAAK,YAAY;EAAE,UAAU,CAAC,6BAAA;EAA+B;;AAGnF,SAAS,eAAuB;AAC9B,KAAI,OACF,QAAO;CAET,MAAM,SAAS,mBAAmB;CAClC,MAAM,IAAI,IAAI,OAAO,OAAO,MAAM,EAChC,UAAU,OAAO,UAClB,CAAC;AAEF,GAAE,GAAG,YAAY,QAAwB;EACvC,MAAM,OAAO,QAAQ,IAAI,IAAI,GAAG;AAChC,MAAI,CAAC,KACH;AAEF,MAAI,IAAI,SAAS,WACf,MAAK,aAAa;GAAE,OAAO,IAAI;GAAc,SAAS,IAAI;GAAS,OAAO,IAAI;GAAO,CAAC;WAE/E,IAAI,SAAS,QAAQ;AAC5B,WAAQ,OAAO,IAAI,GAAG;AACtB,QAAK,SAAS;aAEP,IAAI,SAAS,SAAS;AAC7B,WAAQ,OAAO,IAAI,GAAG;AACtB,QAAK,OAAO,IAAI,MAAM,IAAI,QAAQ,CAAC;;GAErC;AAEF,GAAE,GAAG,UAAU,QAAe;AAC5B,OAAK,MAAM,QAAQ,QAAQ,QAAQ,CACjC,MAAK,OAAO,IAAI;AAClB,UAAQ,OAAO;AACf,WAAS;GACT;AAEF,GAAE,GAAG,SAAS,SAAS;AACrB,MAAI,QAAQ,OAAO,GAAG;GACpB,MAAM,sBAAM,IAAI,MAAM,uBAAuB,KAAK,SAAS,QAAQ,KAAK,gBAAgB;AACxF,QAAK,MAAM,QAAQ,QAAQ,QAAQ,CACjC,MAAK,OAAO,IAAI;AAClB,WAAQ,OAAO;;AAEjB,WAAS;GACT;AAEF,UAAS;AACT,QAAO;;AAGT,SAAS,aAAa;AACpB,KAAI,WAAW,MAAM,WAAW,EAC9B;AACW,OAAM,OAAO,EACpB;;AAGR,eAAsB,oBACpB,WACA,QACe;AACf,QAAO,IAAI,SAAe,SAAS,WAAW;EAC5C,MAAM,YAAY;AAChB,aAAU;GACV,MAAM,KAAK,EAAE;GAEb,IAAI;AACJ,OAAI;AACF,QAAI,cAAc;YAEb,KAAK;AACV,cAAU;AACV,gBAAY;AACZ,WAAO,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC,CAAC;AAC3D;;AAGF,WAAQ,IAAI,IAAI;IACd;IACA,eAAe;AACb,eAAU;AACV,iBAAY;AACZ,cAAS;;IAEX,SAAS,QAAQ;AACf,eAAU;AACV,iBAAY;AACZ,YAAO,IAAI;;IAEb,YAAY,OAAO;IACpB,CAAC;GAEF,MAAM,MAAqB;IACzB,MAAM;IACN;IACA;IACA,QAAQ,OAAO;IACf,WAAW,OAAO;IACnB;AAED,KAAE,YAAY,IAAI;;AAGpB,MAAI,QACF,OAAM,KAAK,IAAI;MAGf,MAAK;GAEP;;AAGJ,eAAsB,iBAAgC;AACpD,KAAI,CAAC,OACH;CAEF,MAAM,IAAI;AACV,UAAS;AAET,QAAO,IAAI,SAAe,YAAY;EACpC,MAAM,UAAU,iBAAiB;AAC/B,KAAE,WAAW,CAAC,WAAW,SAAS,QAAQ,SAAS,CAAC;KACnD,IAAK;AAER,IAAE,KAAK,cAAc;AACnB,gBAAa,QAAQ;AACrB,YAAS;IACT;AAEF,IAAE,YAAY,EAAE,MAAM,YAAY,CAAyB;GAC3D"}
|
package/dist/_chunks/prepare.mjs
CHANGED
|
@@ -1,82 +1,17 @@
|
|
|
1
|
-
import "./
|
|
2
|
-
import "./config.mjs";
|
|
3
|
-
import "./sanitize.mjs";
|
|
4
|
-
import { _ as resolvePkgDir, a as getShippedSkills, f as linkShippedSkill } from "./cache.mjs";
|
|
5
|
-
import "./yaml.mjs";
|
|
6
|
-
import "./markdown.mjs";
|
|
7
|
-
import { n as getSharedSkillsDir } from "./shared.mjs";
|
|
8
|
-
import "./sources.mjs";
|
|
9
|
-
import { a as targets } from "./detect.mjs";
|
|
10
|
-
import { i as linkSkillToAgents } from "./prompts.mjs";
|
|
11
|
-
import { g as resolveAgent } from "./cli-helpers.mjs";
|
|
12
|
-
import { i as readLock, s as writeLock, t as mergeLocks } from "./lockfile.mjs";
|
|
13
|
-
import { t as getProjectState } from "./skills.mjs";
|
|
1
|
+
import { o as getCacheDir } from "./config.mjs";
|
|
14
2
|
import { join } from "pathe";
|
|
15
|
-
import { existsSync, mkdirSync, symlinkSync } from "node:fs";
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
const
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
args: { agent: {
|
|
25
|
-
type: "enum",
|
|
26
|
-
options: Object.keys(targets),
|
|
27
|
-
alias: "a",
|
|
28
|
-
description: "Target agent"
|
|
29
|
-
} },
|
|
30
|
-
async run({ args }) {
|
|
31
|
-
const cwd = process.cwd();
|
|
32
|
-
const agent = resolveAgent(args.agent);
|
|
33
|
-
if (!agent || agent === "none") return;
|
|
34
|
-
const agentConfig = targets[agent];
|
|
35
|
-
const shared = getSharedSkillsDir(cwd);
|
|
36
|
-
const skillsDir = shared || join(cwd, agentConfig.skillsDir);
|
|
37
|
-
const allLocks = (shared ? [shared] : Object.values(targets).map((t) => join(cwd, t.skillsDir))).map((dir) => readLock(dir)).filter((l) => !!l && Object.keys(l.skills).length > 0);
|
|
38
|
-
if (allLocks.length > 0) {
|
|
39
|
-
const lock = mergeLocks(allLocks);
|
|
40
|
-
for (const [name, info] of Object.entries(lock.skills)) {
|
|
41
|
-
if (!info.version) continue;
|
|
42
|
-
if (info.source === "shipped") {
|
|
43
|
-
if (!existsSync(join(skillsDir, name))) {
|
|
44
|
-
const match = getShippedSkills(info.packageName || name, cwd, info.version).find((s) => s.skillName === name);
|
|
45
|
-
if (match) linkShippedSkill(skillsDir, name, match.skillDir);
|
|
46
|
-
}
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
restorePkgSymlink(skillsDir, name, info, cwd);
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
const state = await getProjectState(cwd);
|
|
53
|
-
let shippedCount = 0;
|
|
54
|
-
if (state.shipped.length > 0) {
|
|
55
|
-
mkdirSync(skillsDir, { recursive: true });
|
|
56
|
-
for (const entry of state.shipped) {
|
|
57
|
-
const version = state.deps.get(entry.packageName)?.replace(/^[\^~>=<]+/, "") || "0.0.0";
|
|
58
|
-
for (const skill of entry.skills) {
|
|
59
|
-
linkShippedSkill(skillsDir, skill.skillName, skill.skillDir);
|
|
60
|
-
writeLock(skillsDir, skill.skillName, {
|
|
61
|
-
packageName: entry.packageName,
|
|
62
|
-
version,
|
|
63
|
-
source: "shipped",
|
|
64
|
-
syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
|
|
65
|
-
generator: "skilld"
|
|
66
|
-
});
|
|
67
|
-
if (shared) linkSkillToAgents(skill.skillName, shared, cwd, agent);
|
|
68
|
-
shippedCount++;
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
if (shippedCount > 0) p.log.success(`Installed ${shippedCount} shipped skill${shippedCount > 1 ? "s" : ""}`);
|
|
72
|
-
}
|
|
73
|
-
const freshState = shippedCount > 0 ? await getProjectState(cwd) : state;
|
|
74
|
-
if (freshState.outdated.length > 0) {
|
|
75
|
-
const n = freshState.outdated.length;
|
|
76
|
-
p.log.info(`${n} package${n > 1 ? "s" : ""} ha${n > 1 ? "ve" : "s"} new features and/or breaking changes. Run \`skilld update\` to sync.`);
|
|
77
|
-
}
|
|
3
|
+
import { existsSync, lstatSync, mkdirSync, readdirSync, rmSync, symlinkSync, unlinkSync } from "node:fs";
|
|
4
|
+
//#region src/core/prepare.ts
|
|
5
|
+
/** Resolve package directory: node_modules first, then global cache */
|
|
6
|
+
function resolvePkgDir(name, cwd, version) {
|
|
7
|
+
const nodeModulesPath = join(cwd, "node_modules", name);
|
|
8
|
+
if (existsSync(nodeModulesPath)) return nodeModulesPath;
|
|
9
|
+
if (version) {
|
|
10
|
+
const cachedPkgDir = join(getCacheDir(name, version), "pkg");
|
|
11
|
+
if (existsSync(join(cachedPkgDir, "package.json"))) return cachedPkgDir;
|
|
78
12
|
}
|
|
79
|
-
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
80
15
|
/** Restore .skilld/pkg symlink to node_modules if broken */
|
|
81
16
|
function restorePkgSymlink(skillsDir, name, info, cwd) {
|
|
82
17
|
const refsDir = join(skillsDir, name, ".skilld");
|
|
@@ -88,7 +23,28 @@ function restorePkgSymlink(skillsDir, name, info, cwd) {
|
|
|
88
23
|
mkdirSync(refsDir, { recursive: true });
|
|
89
24
|
symlinkSync(pkgDir, pkgLink);
|
|
90
25
|
}
|
|
26
|
+
/** Check if package ships a skills/ directory with SKILL.md or _SKILL.md subdirs */
|
|
27
|
+
function getShippedSkills(name, cwd, version) {
|
|
28
|
+
const pkgPath = resolvePkgDir(name, cwd, version);
|
|
29
|
+
if (!pkgPath) return [];
|
|
30
|
+
const skillsPath = join(pkgPath, "skills");
|
|
31
|
+
if (!existsSync(skillsPath)) return [];
|
|
32
|
+
return readdirSync(skillsPath, { withFileTypes: true }).filter((d) => d.isDirectory() && (existsSync(join(skillsPath, d.name, "SKILL.md")) || existsSync(join(skillsPath, d.name, "_SKILL.md")))).map((d) => ({
|
|
33
|
+
skillName: d.name,
|
|
34
|
+
skillDir: join(skillsPath, d.name)
|
|
35
|
+
}));
|
|
36
|
+
}
|
|
37
|
+
/** Create symlink from skills dir to shipped skill dir */
|
|
38
|
+
function linkShippedSkill(baseDir, skillName, targetDir) {
|
|
39
|
+
const linkPath = join(baseDir, skillName);
|
|
40
|
+
if (existsSync(linkPath)) if (lstatSync(linkPath).isSymbolicLink()) unlinkSync(linkPath);
|
|
41
|
+
else rmSync(linkPath, {
|
|
42
|
+
recursive: true,
|
|
43
|
+
force: true
|
|
44
|
+
});
|
|
45
|
+
symlinkSync(targetDir, linkPath);
|
|
46
|
+
}
|
|
91
47
|
//#endregion
|
|
92
|
-
export {
|
|
48
|
+
export { restorePkgSymlink as i, linkShippedSkill as n, resolvePkgDir as r, getShippedSkills as t };
|
|
93
49
|
|
|
94
50
|
//# sourceMappingURL=prepare.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"prepare.mjs","names":[
|
|
1
|
+
{"version":3,"file":"prepare.mjs","names":[],"sources":["../../src/core/prepare.ts"],"sourcesContent":["/**\n * Shared prepare utilities used by both the fast entry (src/prepare.ts)\n * and the full CLI command (src/commands/prepare.ts).\n *\n * Keep this module lightweight: no imports from agent/, cache/storage.ts,\n * or any module that pulls in sanitize/clack/citty.\n */\n\nimport type { SkillInfo } from './lockfile.ts'\nimport { existsSync, lstatSync, mkdirSync, readdirSync, rmSync, symlinkSync, unlinkSync } from 'node:fs'\nimport { join } from 'pathe'\nimport { getCacheDir } from '../cache/version.ts'\n\n/** Resolve package directory: node_modules first, then global cache */\nexport function resolvePkgDir(name: string, cwd: string, version?: string): string | null {\n const nodeModulesPath = join(cwd, 'node_modules', name)\n if (existsSync(nodeModulesPath))\n return nodeModulesPath\n\n if (version) {\n const cachedPkgDir = join(getCacheDir(name, version), 'pkg')\n if (existsSync(join(cachedPkgDir, 'package.json')))\n return cachedPkgDir\n }\n\n return null\n}\n\n/** Restore .skilld/pkg symlink to node_modules if broken */\nexport function restorePkgSymlink(skillsDir: string, name: string, info: SkillInfo, cwd: string): void {\n const refsDir = join(skillsDir, name, '.skilld')\n const pkgLink = join(refsDir, 'pkg')\n\n if (!existsSync(join(skillsDir, name)))\n return\n\n if (existsSync(pkgLink))\n return\n\n const pkgName = info.packageName || name\n const pkgDir = resolvePkgDir(pkgName, cwd, info.version)\n if (!pkgDir)\n return\n\n mkdirSync(refsDir, { recursive: true })\n symlinkSync(pkgDir, pkgLink)\n}\n\nexport interface ShippedSkill {\n skillName: string\n skillDir: string\n}\n\n/** Check if package ships a skills/ directory with SKILL.md or _SKILL.md subdirs */\nexport function getShippedSkills(name: string, cwd: string, version?: string): ShippedSkill[] {\n const pkgPath = resolvePkgDir(name, cwd, version)\n if (!pkgPath)\n return []\n\n const skillsPath = join(pkgPath, 'skills')\n if (!existsSync(skillsPath))\n return []\n\n return readdirSync(skillsPath, { withFileTypes: true })\n .filter(d => d.isDirectory() && (existsSync(join(skillsPath, d.name, 'SKILL.md')) || existsSync(join(skillsPath, d.name, '_SKILL.md'))))\n .map(d => ({ skillName: d.name, skillDir: join(skillsPath, d.name) }))\n}\n\n/** Create symlink from skills dir to shipped skill dir */\nexport function linkShippedSkill(baseDir: string, skillName: string, targetDir: string): void {\n const linkPath = join(baseDir, skillName)\n if (existsSync(linkPath)) {\n const stat = lstatSync(linkPath)\n if (stat.isSymbolicLink())\n unlinkSync(linkPath)\n else rmSync(linkPath, { recursive: true, force: true })\n }\n symlinkSync(targetDir, linkPath)\n}\n"],"mappings":";;;;;AAcA,SAAgB,cAAc,MAAc,KAAa,SAAiC;CACxF,MAAM,kBAAkB,KAAK,KAAK,gBAAgB,KAAK;AACvD,KAAI,WAAW,gBAAgB,CAC7B,QAAO;AAET,KAAI,SAAS;EACX,MAAM,eAAe,KAAK,YAAY,MAAM,QAAQ,EAAE,MAAM;AAC5D,MAAI,WAAW,KAAK,cAAc,eAAe,CAAC,CAChD,QAAO;;AAGX,QAAO;;;AAIT,SAAgB,kBAAkB,WAAmB,MAAc,MAAiB,KAAmB;CACrG,MAAM,UAAU,KAAK,WAAW,MAAM,UAAU;CAChD,MAAM,UAAU,KAAK,SAAS,MAAM;AAEpC,KAAI,CAAC,WAAW,KAAK,WAAW,KAAK,CAAC,CACpC;AAEF,KAAI,WAAW,QAAQ,CACrB;CAGF,MAAM,SAAS,cADC,KAAK,eAAe,MACE,KAAK,KAAK,QAAQ;AACxD,KAAI,CAAC,OACH;AAEF,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AACvC,aAAY,QAAQ,QAAQ;;;AAS9B,SAAgB,iBAAiB,MAAc,KAAa,SAAkC;CAC5F,MAAM,UAAU,cAAc,MAAM,KAAK,QAAQ;AACjD,KAAI,CAAC,QACH,QAAO,EAAE;CAEX,MAAM,aAAa,KAAK,SAAS,SAAS;AAC1C,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO,EAAE;AAEX,QAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CACpD,QAAO,MAAK,EAAE,aAAa,KAAK,WAAW,KAAK,YAAY,EAAE,MAAM,WAAW,CAAC,IAAI,WAAW,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC,EAAE,CACvI,KAAI,OAAM;EAAE,WAAW,EAAE;EAAM,UAAU,KAAK,YAAY,EAAE,KAAA;EAAO,EAAE;;;AAI1E,SAAgB,iBAAiB,SAAiB,WAAmB,WAAyB;CAC5F,MAAM,WAAW,KAAK,SAAS,UAAU;AACzC,KAAI,WAAW,SAAS,CAEtB,KADa,UAAU,SAAS,CACvB,gBAAgB,CACvB,YAAW,SAAS;KACjB,QAAO,UAAU;EAAE,WAAW;EAAM,OAAO;EAAM,CAAC;AAEzD,aAAY,WAAW,SAAS"}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import "./agent.mjs";
|
|
2
|
+
import "./config.mjs";
|
|
3
|
+
import "./package-json.mjs";
|
|
4
|
+
import { i as restorePkgSymlink, n as linkShippedSkill, t as getShippedSkills } from "./prepare.mjs";
|
|
5
|
+
import "./sanitize.mjs";
|
|
6
|
+
import "./cache.mjs";
|
|
7
|
+
import "./yaml.mjs";
|
|
8
|
+
import "./markdown.mjs";
|
|
9
|
+
import { n as getSharedSkillsDir } from "./shared.mjs";
|
|
10
|
+
import "./sources.mjs";
|
|
11
|
+
import { a as targets } from "./detect.mjs";
|
|
12
|
+
import { i as linkSkillToAgents } from "./prompts.mjs";
|
|
13
|
+
import { b as resolveAgent } from "./cli-helpers.mjs";
|
|
14
|
+
import { i as readLock, s as writeLock } from "./lockfile.mjs";
|
|
15
|
+
import { t as getProjectState } from "./skills.mjs";
|
|
16
|
+
import { join } from "pathe";
|
|
17
|
+
import { existsSync, mkdirSync } from "node:fs";
|
|
18
|
+
import * as p from "@clack/prompts";
|
|
19
|
+
import { defineCommand } from "citty";
|
|
20
|
+
//#region src/commands/prepare.ts
|
|
21
|
+
/**
|
|
22
|
+
* Prepare command — lightweight hook for package.json "prepare" script.
|
|
23
|
+
*
|
|
24
|
+
* Designed to run on every `pnpm install` / `npm install`. Blocking, fast, no LLM calls.
|
|
25
|
+
* 1. Restore broken symlinks from lockfile (like `install` but skips doc fetching)
|
|
26
|
+
* 2. Auto-install shipped skills from deps (just symlinks + lockfile writes)
|
|
27
|
+
* 3. Report outdated skills count and suggest `skilld update`
|
|
28
|
+
*/
|
|
29
|
+
const prepareCommandDef = defineCommand({
|
|
30
|
+
meta: {
|
|
31
|
+
name: "prepare",
|
|
32
|
+
description: "Restore references and sync shipped skills (for package.json hooks)"
|
|
33
|
+
},
|
|
34
|
+
args: { agent: {
|
|
35
|
+
type: "enum",
|
|
36
|
+
options: Object.keys(targets),
|
|
37
|
+
alias: "a",
|
|
38
|
+
description: "Target agent"
|
|
39
|
+
} },
|
|
40
|
+
async run({ args }) {
|
|
41
|
+
const cwd = process.cwd();
|
|
42
|
+
const agent = resolveAgent(args.agent);
|
|
43
|
+
if (!agent || agent === "none") return;
|
|
44
|
+
const agentConfig = targets[agent];
|
|
45
|
+
const shared = getSharedSkillsDir(cwd);
|
|
46
|
+
const skillsDir = shared || join(cwd, agentConfig.skillsDir);
|
|
47
|
+
const lock = readLock(skillsDir);
|
|
48
|
+
if (lock && Object.keys(lock.skills).length > 0) {
|
|
49
|
+
let allIntact = true;
|
|
50
|
+
for (const [name, info] of Object.entries(lock.skills)) {
|
|
51
|
+
if (!info.version) continue;
|
|
52
|
+
if (existsSync(join(skillsDir, name))) {
|
|
53
|
+
if (info.source !== "shipped") restorePkgSymlink(skillsDir, name, info, cwd);
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
allIntact = false;
|
|
57
|
+
if (info.source === "shipped") {
|
|
58
|
+
const match = getShippedSkills(info.packageName || name, cwd, info.version).find((s) => s.skillName === name);
|
|
59
|
+
if (match) linkShippedSkill(skillsDir, name, match.skillDir);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (allIntact) return;
|
|
63
|
+
}
|
|
64
|
+
const state = await getProjectState(cwd);
|
|
65
|
+
let shippedCount = 0;
|
|
66
|
+
if (state.shipped.length > 0) {
|
|
67
|
+
mkdirSync(skillsDir, { recursive: true });
|
|
68
|
+
for (const entry of state.shipped) {
|
|
69
|
+
const version = state.deps.get(entry.packageName)?.replace(/^[\^~>=<]+/, "") || "0.0.0";
|
|
70
|
+
for (const skill of entry.skills) {
|
|
71
|
+
linkShippedSkill(skillsDir, skill.skillName, skill.skillDir);
|
|
72
|
+
writeLock(skillsDir, skill.skillName, {
|
|
73
|
+
packageName: entry.packageName,
|
|
74
|
+
version,
|
|
75
|
+
source: "shipped",
|
|
76
|
+
syncedAt: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
|
|
77
|
+
generator: "skilld"
|
|
78
|
+
});
|
|
79
|
+
if (shared) linkSkillToAgents(skill.skillName, shared, cwd, agent);
|
|
80
|
+
shippedCount++;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
if (shippedCount > 0) p.log.success(`Installed ${shippedCount} shipped skill${shippedCount > 1 ? "s" : ""}`);
|
|
84
|
+
}
|
|
85
|
+
if (state.outdated.length > 0) {
|
|
86
|
+
const n = state.outdated.length;
|
|
87
|
+
p.log.info(`${n} package${n > 1 ? "s" : ""} ha${n > 1 ? "ve" : "s"} new features and/or breaking changes. Run \`skilld update\` to sync.`);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
});
|
|
91
|
+
//#endregion
|
|
92
|
+
export { prepareCommandDef };
|
|
93
|
+
|
|
94
|
+
//# sourceMappingURL=prepare2.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prepare2.mjs","names":["agents"],"sources":["../../src/commands/prepare.ts"],"sourcesContent":["/**\n * Prepare command — lightweight hook for package.json \"prepare\" script.\n *\n * Designed to run on every `pnpm install` / `npm install`. Blocking, fast, no LLM calls.\n * 1. Restore broken symlinks from lockfile (like `install` but skips doc fetching)\n * 2. Auto-install shipped skills from deps (just symlinks + lockfile writes)\n * 3. Report outdated skills count and suggest `skilld update`\n */\n\nimport { existsSync, mkdirSync } from 'node:fs'\nimport * as p from '@clack/prompts'\nimport { defineCommand } from 'citty'\nimport { join } from 'pathe'\nimport { agents, linkSkillToAgents } from '../agent/index.ts'\nimport { resolveAgent } from '../cli-helpers.ts'\nimport { readLock, writeLock } from '../core/lockfile.ts'\nimport { getShippedSkills, linkShippedSkill, restorePkgSymlink } from '../core/prepare.ts'\nimport { getSharedSkillsDir } from '../core/shared.ts'\nimport { getProjectState } from '../core/skills.ts'\n\nexport const prepareCommandDef = defineCommand({\n meta: { name: 'prepare', description: 'Restore references and sync shipped skills (for package.json hooks)' },\n args: {\n agent: {\n type: 'enum' as const,\n options: Object.keys(agents),\n alias: 'a',\n description: 'Target agent',\n },\n },\n async run({ args }) {\n const cwd = process.cwd()\n\n const agent = resolveAgent(args.agent)\n if (!agent || agent === 'none')\n return\n\n const agentConfig = agents[agent]\n const shared = getSharedSkillsDir(cwd)\n const skillsDir = shared || join(cwd, agentConfig.skillsDir)\n\n // ── Fast path: read primary lockfile, check all skills intact ──\n\n const lock = readLock(skillsDir)\n if (lock && Object.keys(lock.skills).length > 0) {\n let allIntact = true\n\n for (const [name, info] of Object.entries(lock.skills)) {\n if (!info.version)\n continue\n\n const skillDir = join(skillsDir, name)\n if (existsSync(skillDir)) {\n // Skill dir exists; for non-shipped, also check .skilld/pkg symlink\n if (info.source !== 'shipped')\n restorePkgSymlink(skillsDir, name, info, cwd)\n continue\n }\n\n // Skill dir missing, needs restore\n allIntact = false\n\n if (info.source === 'shipped') {\n const pkgName = info.packageName || name\n const shipped = getShippedSkills(pkgName, cwd, info.version)\n const match = shipped.find(s => s.skillName === name)\n if (match)\n linkShippedSkill(skillsDir, name, match.skillDir)\n }\n }\n\n // If all skills intact, skip expensive getProjectState entirely\n if (allIntact)\n return\n }\n\n // ── Slow path: discover new shipped skills + report outdated ──\n\n const state = await getProjectState(cwd)\n let shippedCount = 0\n\n if (state.shipped.length > 0) {\n mkdirSync(skillsDir, { recursive: true })\n\n for (const entry of state.shipped) {\n const version = state.deps.get(entry.packageName)?.replace(/^[\\^~>=<]+/, '') || '0.0.0'\n\n for (const skill of entry.skills) {\n linkShippedSkill(skillsDir, skill.skillName, skill.skillDir)\n writeLock(skillsDir, skill.skillName, {\n packageName: entry.packageName,\n version,\n source: 'shipped',\n syncedAt: new Date().toISOString().split('T')[0],\n generator: 'skilld',\n })\n\n if (shared)\n linkSkillToAgents(skill.skillName, shared, cwd, agent)\n\n shippedCount++\n }\n }\n\n if (shippedCount > 0)\n p.log.success(`Installed ${shippedCount} shipped skill${shippedCount > 1 ? 's' : ''}`)\n }\n\n if (state.outdated.length > 0) {\n const n = state.outdated.length\n p.log.info(`${n} package${n > 1 ? 's' : ''} ha${n > 1 ? 've' : 's'} new features and/or breaking changes. Run \\`skilld update\\` to sync.`)\n }\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,MAAa,oBAAoB,cAAc;CAC7C,MAAM;EAAE,MAAM;EAAW,aAAa;EAAuE;CAC7G,MAAM,EACJ,OAAO;EACL,MAAM;EACN,SAAS,OAAO,KAAKA,QAAO;EAC5B,OAAO;EACP,aAAa;EACd,EACF;CACD,MAAM,IAAI,EAAE,QAAQ;EAClB,MAAM,MAAM,QAAQ,KAAK;EAEzB,MAAM,QAAQ,aAAa,KAAK,MAAM;AACtC,MAAI,CAAC,SAAS,UAAU,OACtB;EAEF,MAAM,cAAcA,QAAO;EAC3B,MAAM,SAAS,mBAAmB,IAAI;EACtC,MAAM,YAAY,UAAU,KAAK,KAAK,YAAY,UAAU;EAI5D,MAAM,OAAO,SAAS,UAAU;AAChC,MAAI,QAAQ,OAAO,KAAK,KAAK,OAAO,CAAC,SAAS,GAAG;GAC/C,IAAI,YAAY;AAEhB,QAAK,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ,KAAK,OAAO,EAAE;AACtD,QAAI,CAAC,KAAK,QACR;AAGF,QAAI,WADa,KAAK,WAAW,KAAK,CACd,EAAE;AAExB,SAAI,KAAK,WAAW,UAClB,mBAAkB,WAAW,MAAM,MAAM,IAAI;AAC/C;;AAIF,gBAAY;AAEZ,QAAI,KAAK,WAAW,WAAW;KAG7B,MAAM,QADU,iBADA,KAAK,eAAe,MACM,KAAK,KAAK,QAAQ,CACtC,MAAK,MAAK,EAAE,cAAc,KAAK;AACrD,SAAI,MACF,kBAAiB,WAAW,MAAM,MAAM,SAAS;;;AAKvD,OAAI,UACF;;EAKJ,MAAM,QAAQ,MAAM,gBAAgB,IAAI;EACxC,IAAI,eAAe;AAEnB,MAAI,MAAM,QAAQ,SAAS,GAAG;AAC5B,aAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAEzC,QAAK,MAAM,SAAS,MAAM,SAAS;IACjC,MAAM,UAAU,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,QAAQ,cAAc,GAAG,IAAI;AAEhF,SAAK,MAAM,SAAS,MAAM,QAAQ;AAChC,sBAAiB,WAAW,MAAM,WAAW,MAAM,SAAS;AAC5D,eAAU,WAAW,MAAM,WAAW;MACpC,aAAa,MAAM;MACnB;MACA,QAAQ;MACR,2BAAU,IAAI,MAAM,EAAC,aAAa,CAAC,MAAM,IAAI,CAAC;MAC9C,WAAW;MACZ,CAAC;AAEF,SAAI,OACF,mBAAkB,MAAM,WAAW,QAAQ,KAAK,MAAM;AAExD;;;AAIJ,OAAI,eAAe,EACjB,GAAE,IAAI,QAAQ,aAAa,aAAa,gBAAgB,eAAe,IAAI,MAAM,KAAK;;AAG1F,MAAI,MAAM,SAAS,SAAS,GAAG;GAC7B,MAAM,IAAI,MAAM,SAAS;AACzB,KAAE,IAAI,KAAK,GAAG,EAAE,UAAU,IAAI,IAAI,MAAM,GAAG,KAAK,IAAI,IAAI,OAAO,IAAI,uEAAuE;;;CAG/I,CAAC"}
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import { a as stripFrontmatter } from "./markdown.mjs";
|
|
2
|
+
//#region src/retriv/index.ts
|
|
3
|
+
var SearchDepsUnavailableError = class extends Error {
|
|
4
|
+
constructor(cause) {
|
|
5
|
+
super("Search dependencies unavailable (sqlite-vec or retriv not installed). Search indexing skipped.");
|
|
6
|
+
this.name = "SearchDepsUnavailableError";
|
|
7
|
+
this.cause = cause;
|
|
8
|
+
}
|
|
9
|
+
};
|
|
10
|
+
async function getDb(config) {
|
|
11
|
+
let createRetriv, autoChunker, sqliteMod, sqliteVec, transformersJs, cachedEmbeddings;
|
|
12
|
+
try {
|
|
13
|
+
[{createRetriv}, {autoChunker}, sqliteMod, sqliteVec, {transformersJs}, {cachedEmbeddings}] = await Promise.all([
|
|
14
|
+
import("retriv"),
|
|
15
|
+
import("retriv/chunkers/auto"),
|
|
16
|
+
import("retriv/db/sqlite"),
|
|
17
|
+
import("sqlite-vec"),
|
|
18
|
+
import("retriv/embeddings/transformers-js"),
|
|
19
|
+
import("./embedding-cache.mjs")
|
|
20
|
+
]);
|
|
21
|
+
} catch (err) {
|
|
22
|
+
if (err?.code === "ERR_MODULE_NOT_FOUND") throw new SearchDepsUnavailableError(err);
|
|
23
|
+
throw err;
|
|
24
|
+
}
|
|
25
|
+
const embeddings = await cachedEmbeddings(transformersJs());
|
|
26
|
+
return createRetriv({
|
|
27
|
+
driver: sqliteMod.default({
|
|
28
|
+
path: config.dbPath,
|
|
29
|
+
embeddings,
|
|
30
|
+
sqliteVec
|
|
31
|
+
}),
|
|
32
|
+
chunking: autoChunker()
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Index documents in-process (no worker thread).
|
|
37
|
+
* Preferred for tests and environments where worker_threads is unreliable.
|
|
38
|
+
*/
|
|
39
|
+
async function createIndexDirect(documents, config) {
|
|
40
|
+
const db = await getDb(config);
|
|
41
|
+
if (config.removeIds?.length) await db.remove?.(config.removeIds);
|
|
42
|
+
await db.index(documents, { onProgress: config.onProgress });
|
|
43
|
+
await db.close?.();
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Index documents in a background worker thread.
|
|
47
|
+
* Falls back to direct indexing if worker fails to spawn.
|
|
48
|
+
*/
|
|
49
|
+
async function createIndex(documents, config) {
|
|
50
|
+
const { createIndexInWorker } = await import("./pool.mjs");
|
|
51
|
+
return createIndexInWorker(documents, config);
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* List all raw document IDs in an existing index.
|
|
55
|
+
* Returns chunk IDs (e.g. "doc-id#chunk-0") for chunked docs.
|
|
56
|
+
* Queries sqlite directly to bypass createRetriv's parent-ID deduplication,
|
|
57
|
+
* so callers can use these IDs for exact removal and parent-ID grouping.
|
|
58
|
+
*/
|
|
59
|
+
async function listIndexIds(config) {
|
|
60
|
+
const nodeSqlite = globalThis.process?.getBuiltinModule?.("node:sqlite");
|
|
61
|
+
if (!nodeSqlite) return [];
|
|
62
|
+
const db = new nodeSqlite.DatabaseSync(config.dbPath, {
|
|
63
|
+
open: true,
|
|
64
|
+
readOnly: true
|
|
65
|
+
});
|
|
66
|
+
try {
|
|
67
|
+
return db.prepare("SELECT id FROM documents_meta").all().map((r) => r.id);
|
|
68
|
+
} finally {
|
|
69
|
+
db.close();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Remove documents by ID from an existing index.
|
|
74
|
+
*/
|
|
75
|
+
async function removeFromIndex(ids, config) {
|
|
76
|
+
if (ids.length === 0) return;
|
|
77
|
+
const db = await getDb(config);
|
|
78
|
+
await db.remove?.(ids);
|
|
79
|
+
await db.close?.();
|
|
80
|
+
}
|
|
81
|
+
async function search(query, config, options = {}) {
|
|
82
|
+
const { limit = 10, filter } = options;
|
|
83
|
+
const db = await getDb(config);
|
|
84
|
+
const results = await db.search(query, {
|
|
85
|
+
limit,
|
|
86
|
+
filter,
|
|
87
|
+
returnContent: true,
|
|
88
|
+
returnMetadata: true,
|
|
89
|
+
returnMeta: true
|
|
90
|
+
});
|
|
91
|
+
await db.close?.();
|
|
92
|
+
return results.map((r) => ({
|
|
93
|
+
id: r.id,
|
|
94
|
+
content: r.content ?? "",
|
|
95
|
+
score: r.score,
|
|
96
|
+
metadata: r.metadata ?? {},
|
|
97
|
+
highlights: r._meta?.highlights ?? [],
|
|
98
|
+
lineRange: r._chunk?.lineRange,
|
|
99
|
+
entities: r._chunk?.entities,
|
|
100
|
+
scope: r._chunk?.scope
|
|
101
|
+
}));
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Search and return formatted snippets
|
|
105
|
+
*/
|
|
106
|
+
async function searchSnippets(query, config, options = {}) {
|
|
107
|
+
return toSnippets(await search(query, config, options));
|
|
108
|
+
}
|
|
109
|
+
function toSnippets(results) {
|
|
110
|
+
return results.map((r) => {
|
|
111
|
+
const content = stripFrontmatter(r.content);
|
|
112
|
+
const source = r.metadata.source || r.id;
|
|
113
|
+
const lines = content.split("\n").length;
|
|
114
|
+
return {
|
|
115
|
+
package: r.metadata.package || "unknown",
|
|
116
|
+
source,
|
|
117
|
+
lineStart: r.lineRange?.[0] ?? 1,
|
|
118
|
+
lineEnd: r.lineRange?.[1] ?? lines,
|
|
119
|
+
content,
|
|
120
|
+
score: r.score,
|
|
121
|
+
highlights: r.highlights,
|
|
122
|
+
entities: r.entities,
|
|
123
|
+
scope: r.scope
|
|
124
|
+
};
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
async function openPool(dbPaths) {
|
|
128
|
+
const pool = /* @__PURE__ */ new Map();
|
|
129
|
+
await Promise.all(dbPaths.map(async (dbPath) => {
|
|
130
|
+
const db = await getDb({ dbPath });
|
|
131
|
+
pool.set(dbPath, db);
|
|
132
|
+
}));
|
|
133
|
+
return pool;
|
|
134
|
+
}
|
|
135
|
+
async function searchPooled(query, pool, options = {}) {
|
|
136
|
+
const { limit = 10, filter } = options;
|
|
137
|
+
const fetchLimit = limit * 2;
|
|
138
|
+
const allResults = await Promise.all(Array.from(pool.values(), async (db) => {
|
|
139
|
+
return (await db.search(query, {
|
|
140
|
+
limit: fetchLimit,
|
|
141
|
+
filter,
|
|
142
|
+
returnContent: true,
|
|
143
|
+
returnMetadata: true,
|
|
144
|
+
returnMeta: true
|
|
145
|
+
})).map((r) => ({
|
|
146
|
+
id: r.id,
|
|
147
|
+
content: r.content ?? "",
|
|
148
|
+
score: r.score,
|
|
149
|
+
metadata: r.metadata ?? {},
|
|
150
|
+
highlights: r._meta?.highlights ?? [],
|
|
151
|
+
lineRange: r._chunk?.lineRange,
|
|
152
|
+
entities: r._chunk?.entities,
|
|
153
|
+
scope: r._chunk?.scope
|
|
154
|
+
}));
|
|
155
|
+
}));
|
|
156
|
+
const seen = /* @__PURE__ */ new Set();
|
|
157
|
+
return toSnippets(allResults.flat().sort((a, b) => b.score - a.score).filter((r) => {
|
|
158
|
+
const lr = r.lineRange;
|
|
159
|
+
const key = `${r.metadata.source || r.id}:${lr?.[0]}-${lr?.[1]}`;
|
|
160
|
+
if (seen.has(key)) return false;
|
|
161
|
+
seen.add(key);
|
|
162
|
+
return true;
|
|
163
|
+
}).slice(0, limit));
|
|
164
|
+
}
|
|
165
|
+
async function closePool(pool) {
|
|
166
|
+
await Promise.all(Array.from(pool.values(), (db) => db.close?.()));
|
|
167
|
+
pool.clear();
|
|
168
|
+
}
|
|
169
|
+
//#endregion
|
|
170
|
+
export { getDb as a, removeFromIndex as c, searchSnippets as d, createIndexDirect as i, search as l, closePool as n, listIndexIds as o, createIndex as r, openPool as s, SearchDepsUnavailableError as t, searchPooled as u };
|
|
171
|
+
|
|
172
|
+
//# sourceMappingURL=retriv.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"retriv.mjs","names":[],"sources":["../../src/retriv/index.ts"],"sourcesContent":["import type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet } from './types.ts'\nimport { stripFrontmatter } from '../core/markdown.ts'\n\nexport type { ChunkEntity, Document, IndexConfig, IndexPhase, IndexProgress, SearchFilter, SearchOptions, SearchResult, SearchSnippet }\n\ntype RetrivInstance = Awaited<ReturnType<typeof getDb>>\n\nexport class SearchDepsUnavailableError extends Error {\n constructor(cause: unknown) {\n super('Search dependencies unavailable (sqlite-vec or retriv not installed). Search indexing skipped.')\n this.name = 'SearchDepsUnavailableError'\n this.cause = cause\n }\n}\n\n// Dynamic imports: retriv/chunkers/auto eagerly loads typescript which may not be installed (e.g. npx)\nexport async function getDb(config: Pick<IndexConfig, 'dbPath'>) {\n let createRetriv, autoChunker, sqliteMod, sqliteVec, transformersJs, cachedEmbeddings\n try {\n ;([\n { createRetriv },\n { autoChunker },\n sqliteMod,\n sqliteVec,\n { transformersJs },\n { cachedEmbeddings },\n ] = await Promise.all([\n import('retriv'),\n import('retriv/chunkers/auto'),\n import('retriv/db/sqlite'),\n import('sqlite-vec'),\n import('retriv/embeddings/transformers-js'),\n import('./embedding-cache.ts'),\n ]))\n }\n catch (err: any) {\n if (err?.code === 'ERR_MODULE_NOT_FOUND')\n throw new SearchDepsUnavailableError(err)\n throw err\n }\n const embeddings = await cachedEmbeddings(transformersJs())\n return createRetriv({\n driver: sqliteMod.default({\n path: config.dbPath,\n embeddings,\n sqliteVec,\n }),\n chunking: autoChunker(),\n })\n}\n\n/**\n * Index documents in-process (no worker thread).\n * Preferred for tests and environments where worker_threads is unreliable.\n */\nexport async function createIndexDirect(\n documents: Document[],\n config: IndexConfig & { removeIds?: string[] },\n): Promise<void> {\n const db = await getDb(config)\n if (config.removeIds?.length)\n await db.remove?.(config.removeIds)\n await db.index(documents, { onProgress: config.onProgress })\n await db.close?.()\n}\n\n/**\n * Index documents in a background worker thread.\n * Falls back to direct indexing if worker fails to spawn.\n */\nexport async function createIndex(\n documents: Document[],\n config: IndexConfig & { removeIds?: string[] },\n): Promise<void> {\n // Dynamic import justified: search/searchSnippets shouldn't pull in worker_threads\n const { createIndexInWorker } = await import('./pool.ts')\n return createIndexInWorker(documents, config)\n}\n\n/**\n * List all raw document IDs in an existing index.\n * Returns chunk IDs (e.g. \"doc-id#chunk-0\") for chunked docs.\n * Queries sqlite directly to bypass createRetriv's parent-ID deduplication,\n * so callers can use these IDs for exact removal and parent-ID grouping.\n */\nexport async function listIndexIds(\n config: Pick<IndexConfig, 'dbPath'>,\n): Promise<string[]> {\n const nodeSqlite = globalThis.process?.getBuiltinModule?.('node:sqlite') as typeof import('node:sqlite') | undefined\n if (!nodeSqlite)\n return []\n const db = new nodeSqlite.DatabaseSync(config.dbPath, { open: true, readOnly: true })\n try {\n const rows = db.prepare('SELECT id FROM documents_meta').all() as Array<{ id: string }>\n return rows.map(r => r.id)\n }\n finally {\n db.close()\n }\n}\n\n/**\n * Remove documents by ID from an existing index.\n */\nexport async function removeFromIndex(\n ids: string[],\n config: Pick<IndexConfig, 'dbPath'>,\n): Promise<void> {\n if (ids.length === 0)\n return\n const db = await getDb(config)\n await db.remove?.(ids)\n await db.close?.()\n}\n\nexport async function search(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchResult[]> {\n const { limit = 10, filter } = options\n const db = await getDb(config)\n const results = await db.search(query, { limit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n await db.close?.()\n\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n}\n\n/**\n * Search and return formatted snippets\n */\nexport async function searchSnippets(\n query: string,\n config: IndexConfig,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const results = await search(query, config, options)\n return toSnippets(results)\n}\n\nfunction toSnippets(results: SearchResult[]): SearchSnippet[] {\n return results.map((r) => {\n const content = stripFrontmatter(r.content)\n const source = r.metadata.source || r.id\n const lines = content.split('\\n').length\n\n return {\n package: r.metadata.package || 'unknown',\n source,\n lineStart: r.lineRange?.[0] ?? 1,\n lineEnd: r.lineRange?.[1] ?? lines,\n content,\n score: r.score,\n highlights: r.highlights,\n entities: r.entities,\n scope: r.scope,\n }\n })\n}\n\n// ── Pooled DB access for interactive search ──\n\nexport async function openPool(dbPaths: string[]): Promise<Map<string, RetrivInstance>> {\n const pool = new Map<string, RetrivInstance>()\n await Promise.all(dbPaths.map(async (dbPath) => {\n const db = await getDb({ dbPath })\n pool.set(dbPath, db)\n }))\n return pool\n}\n\nexport async function searchPooled(\n query: string,\n pool: Map<string, RetrivInstance>,\n options: SearchOptions = {},\n): Promise<SearchSnippet[]> {\n const { limit = 10, filter } = options\n const fetchLimit = limit * 2 // Over-fetch to compensate for dedup\n const allResults = await Promise.all(\n Array.from(pool.values(), async (db) => {\n const results = await db.search(query, { limit: fetchLimit, filter, returnContent: true, returnMetadata: true, returnMeta: true })\n return results.map(r => ({\n id: r.id,\n content: r.content ?? '',\n score: r.score,\n metadata: r.metadata ?? {},\n highlights: r._meta?.highlights ?? [],\n lineRange: r._chunk?.lineRange as [number, number] | undefined,\n entities: r._chunk?.entities,\n scope: r._chunk?.scope,\n }))\n }),\n )\n // Deduplicate by source+lineRange (overlapping chunks from same doc)\n const seen = new Set<string>()\n const merged = allResults.flat()\n .sort((a, b) => b.score - a.score)\n .filter((r) => {\n const lr = r.lineRange\n const key = `${r.metadata.source || r.id}:${lr?.[0]}-${lr?.[1]}`\n if (seen.has(key))\n return false\n seen.add(key)\n return true\n })\n .slice(0, limit)\n return toSnippets(merged)\n}\n\nexport async function closePool(pool: Map<string, RetrivInstance>): Promise<void> {\n await Promise.all(Array.from(pool.values(), db => db.close?.()))\n pool.clear()\n}\n"],"mappings":";;AAOA,IAAa,6BAAb,cAAgD,MAAM;CACpD,YAAY,OAAgB;AAC1B,QAAM,iGAAiG;AACvG,OAAK,OAAO;AACZ,OAAK,QAAQ;;;AAKjB,eAAsB,MAAM,QAAqC;CAC/D,IAAI,cAAc,aAAa,WAAW,WAAW,gBAAgB;AACrE,KAAI;AACA,GACA,CAAE,eACF,CAAE,cACF,WACA,WACA,CAAE,iBACF,CAAE,qBACA,MAAM,QAAQ,IAAI;GACpB,OAAO;GACP,OAAO;GACP,OAAO;GACP,OAAO;GACP,OAAO;GACP,OAAO;GACR,CAAC;UAEG,KAAU;AACf,MAAI,KAAK,SAAS,uBAChB,OAAM,IAAI,2BAA2B,IAAI;AAC3C,QAAM;;CAER,MAAM,aAAa,MAAM,iBAAiB,gBAAgB,CAAC;AAC3D,QAAO,aAAa;EAClB,QAAQ,UAAU,QAAQ;GACxB,MAAM,OAAO;GACb;GACA;GACD,CAAC;EACF,UAAU,aAAA;EACX,CAAC;;;;;;AAOJ,eAAsB,kBACpB,WACA,QACe;CACf,MAAM,KAAK,MAAM,MAAM,OAAO;AAC9B,KAAI,OAAO,WAAW,OACpB,OAAM,GAAG,SAAS,OAAO,UAAU;AACrC,OAAM,GAAG,MAAM,WAAW,EAAE,YAAY,OAAO,YAAY,CAAC;AAC5D,OAAM,GAAG,SAAS;;;;;;AAOpB,eAAsB,YACpB,WACA,QACe;CAEf,MAAM,EAAE,wBAAwB,MAAM,OAAO;AAC7C,QAAO,oBAAoB,WAAW,OAAO;;;;;;;;AAS/C,eAAsB,aACpB,QACmB;CACnB,MAAM,aAAa,WAAW,SAAS,mBAAmB,cAAc;AACxE,KAAI,CAAC,WACH,QAAO,EAAE;CACX,MAAM,KAAK,IAAI,WAAW,aAAa,OAAO,QAAQ;EAAE,MAAM;EAAM,UAAU;EAAM,CAAC;AACrF,KAAI;AAEF,SADa,GAAG,QAAQ,gCAAgC,CAAC,KAAK,CAClD,KAAI,MAAK,EAAE,GAAG;WAEpB;AACN,KAAG,OAAO;;;;;;AAOd,eAAsB,gBACpB,KACA,QACe;AACf,KAAI,IAAI,WAAW,EACjB;CACF,MAAM,KAAK,MAAM,MAAM,OAAO;AAC9B,OAAM,GAAG,SAAS,IAAI;AACtB,OAAM,GAAG,SAAS;;AAGpB,eAAsB,OACpB,OACA,QACA,UAAyB,EAAE,EACF;CACzB,MAAM,EAAE,QAAQ,IAAI,WAAW;CAC/B,MAAM,KAAK,MAAM,MAAM,OAAO;CAC9B,MAAM,UAAU,MAAM,GAAG,OAAO,OAAO;EAAE;EAAO;EAAQ,eAAe;EAAM,gBAAgB;EAAM,YAAY;EAAM,CAAC;AACtH,OAAM,GAAG,SAAS;AAElB,QAAO,QAAQ,KAAI,OAAM;EACvB,IAAI,EAAE;EACN,SAAS,EAAE,WAAW;EACtB,OAAO,EAAE;EACT,UAAU,EAAE,YAAY,EAAE;EAC1B,YAAY,EAAE,OAAO,cAAc,EAAE;EACrC,WAAW,EAAE,QAAQ;EACrB,UAAU,EAAE,QAAQ;EACpB,OAAO,EAAE,QAAQ;EAClB,EAAE;;;;;AAML,eAAsB,eACpB,OACA,QACA,UAAyB,EAAE,EACD;AAE1B,QAAO,WADS,MAAM,OAAO,OAAO,QAAQ,QAAQ,CAC1B;;AAG5B,SAAS,WAAW,SAA0C;AAC5D,QAAO,QAAQ,KAAK,MAAM;EACxB,MAAM,UAAU,iBAAiB,EAAE,QAAQ;EAC3C,MAAM,SAAS,EAAE,SAAS,UAAU,EAAE;EACtC,MAAM,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAElC,SAAO;GACL,SAAS,EAAE,SAAS,WAAW;GAC/B;GACA,WAAW,EAAE,YAAY,MAAM;GAC/B,SAAS,EAAE,YAAY,MAAM;GAC7B;GACA,OAAO,EAAE;GACT,YAAY,EAAE;GACd,UAAU,EAAE;GACZ,OAAO,EAAE;GACV;GACD;;AAKJ,eAAsB,SAAS,SAAyD;CACtF,MAAM,uBAAO,IAAI,KAA6B;AAC9C,OAAM,QAAQ,IAAI,QAAQ,IAAI,OAAO,WAAW;EAC9C,MAAM,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAClC,OAAK,IAAI,QAAQ,GAAG;GACpB,CAAC;AACH,QAAO;;AAGT,eAAsB,aACpB,OACA,MACA,UAAyB,EAAE,EACD;CAC1B,MAAM,EAAE,QAAQ,IAAI,WAAW;CAC/B,MAAM,aAAa,QAAQ;CAC3B,MAAM,aAAa,MAAM,QAAQ,IAC/B,MAAM,KAAK,KAAK,QAAQ,EAAE,OAAO,OAAO;AAEtC,UADgB,MAAM,GAAG,OAAO,OAAO;GAAE,OAAO;GAAY;GAAQ,eAAe;GAAM,gBAAgB;GAAM,YAAY;GAAM,CAAC,EACnH,KAAI,OAAM;GACvB,IAAI,EAAE;GACN,SAAS,EAAE,WAAW;GACtB,OAAO,EAAE;GACT,UAAU,EAAE,YAAY,EAAE;GAC1B,YAAY,EAAE,OAAO,cAAc,EAAE;GACrC,WAAW,EAAE,QAAQ;GACrB,UAAU,EAAE,QAAQ;GACpB,OAAO,EAAE,QAAQ;GAClB,EAAE;GACH,CACH;CAED,MAAM,uBAAO,IAAI,KAAa;AAY9B,QAAO,WAXQ,WAAW,MAAM,CAC7B,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,QAAQ,MAAM;EACb,MAAM,KAAK,EAAE;EACb,MAAM,MAAM,GAAG,EAAE,SAAS,UAAU,EAAE,GAAG,GAAG,KAAK,GAAG,GAAG,KAAK;AAC5D,MAAI,KAAK,IAAI,IAAI,CACf,QAAO;AACT,OAAK,IAAI,IAAI;AACb,SAAO;GACP,CACD,MAAM,GAAG,MAAM,CACO;;AAG3B,eAAsB,UAAU,MAAkD;AAChF,OAAM,QAAQ,IAAI,MAAM,KAAK,KAAK,QAAQ,GAAE,OAAM,GAAG,SAAS,CAAC,CAAC;AAChE,MAAK,OAAO"}
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import "./agent.mjs";
|
|
2
2
|
import "./config.mjs";
|
|
3
|
+
import "./package-json.mjs";
|
|
4
|
+
import "./prepare.mjs";
|
|
3
5
|
import { n as sanitizeMarkdown } from "./sanitize.mjs";
|
|
4
6
|
import "./cache.mjs";
|
|
5
7
|
import "./yaml.mjs";
|
|
6
8
|
import "./markdown.mjs";
|
|
7
|
-
import {
|
|
9
|
+
import { n as closePool, s as openPool, t as SearchDepsUnavailableError, u as searchPooled } from "./retriv.mjs";
|
|
8
10
|
import "./shared.mjs";
|
|
9
11
|
import "./sources.mjs";
|
|
10
12
|
import "./detect.mjs";
|
|
@@ -13,8 +15,8 @@ import "./cli-helpers.mjs";
|
|
|
13
15
|
import "./lockfile.mjs";
|
|
14
16
|
import "./skills.mjs";
|
|
15
17
|
import { a as highlightTerms, o as normalizeScores, s as scoreLabel, t as formatCompactSnippet } from "./formatting.mjs";
|
|
16
|
-
import "
|
|
17
|
-
import {
|
|
18
|
+
import "./core.mjs";
|
|
19
|
+
import { a as parseFilterPrefix, i as listLockPackages, r as getPackageVersions, t as findPackageDbs } from "./search2.mjs";
|
|
18
20
|
import { createLogUpdate } from "log-update";
|
|
19
21
|
//#region src/commands/search-interactive.ts
|
|
20
22
|
const FILTER_CYCLE = [
|