@vpxa/kb 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/package.json +1 -1
- package/packages/analyzers/dist/blast-radius-analyzer.js +13 -114
- package/packages/analyzers/dist/dependency-analyzer.js +11 -425
- package/packages/analyzers/dist/diagram-generator.js +4 -86
- package/packages/analyzers/dist/entry-point-analyzer.js +5 -239
- package/packages/analyzers/dist/index.js +1 -23
- package/packages/analyzers/dist/knowledge-producer.js +24 -113
- package/packages/analyzers/dist/pattern-analyzer.js +5 -359
- package/packages/analyzers/dist/regex-call-graph.js +1 -428
- package/packages/analyzers/dist/structure-analyzer.js +4 -258
- package/packages/analyzers/dist/symbol-analyzer.js +13 -442
- package/packages/analyzers/dist/ts-call-graph.js +1 -160
- package/packages/analyzers/dist/types.js +0 -1
- package/packages/chunker/dist/call-graph-extractor.js +1 -90
- package/packages/chunker/dist/chunker-factory.js +1 -36
- package/packages/chunker/dist/chunker.interface.js +0 -1
- package/packages/chunker/dist/code-chunker.js +14 -134
- package/packages/chunker/dist/generic-chunker.js +5 -72
- package/packages/chunker/dist/index.js +1 -21
- package/packages/chunker/dist/markdown-chunker.js +7 -119
- package/packages/chunker/dist/treesitter-chunker.js +8 -234
- package/packages/cli/dist/commands/analyze.js +3 -112
- package/packages/cli/dist/commands/context-cmds.js +1 -155
- package/packages/cli/dist/commands/environment.js +2 -204
- package/packages/cli/dist/commands/execution.js +1 -137
- package/packages/cli/dist/commands/graph.js +7 -81
- package/packages/cli/dist/commands/init.js +9 -87
- package/packages/cli/dist/commands/knowledge.js +1 -139
- package/packages/cli/dist/commands/search.js +8 -267
- package/packages/cli/dist/commands/system.js +4 -241
- package/packages/cli/dist/commands/workspace.js +2 -388
- package/packages/cli/dist/context.js +1 -14
- package/packages/cli/dist/helpers.js +3 -458
- package/packages/cli/dist/index.d.ts +1 -1
- package/packages/cli/dist/index.js +3 -69
- package/packages/cli/dist/kb-init.js +1 -82
- package/packages/cli/dist/types.js +0 -1
- package/packages/core/dist/constants.js +1 -43
- package/packages/core/dist/content-detector.js +1 -79
- package/packages/core/dist/errors.js +1 -40
- package/packages/core/dist/index.js +1 -9
- package/packages/core/dist/logger.js +1 -34
- package/packages/core/dist/types.js +0 -1
- package/packages/embeddings/dist/embedder.interface.js +0 -1
- package/packages/embeddings/dist/index.js +1 -5
- package/packages/embeddings/dist/onnx-embedder.js +1 -82
- package/packages/indexer/dist/file-hasher.js +1 -13
- package/packages/indexer/dist/filesystem-crawler.js +1 -125
- package/packages/indexer/dist/graph-extractor.js +1 -111
- package/packages/indexer/dist/incremental-indexer.js +1 -278
- package/packages/indexer/dist/index.js +1 -14
- package/packages/server/dist/api.js +1 -9
- package/packages/server/dist/config.js +1 -75
- package/packages/server/dist/curated-manager.js +9 -356
- package/packages/server/dist/index.js +1 -134
- package/packages/server/dist/replay-interceptor.js +1 -38
- package/packages/server/dist/resources/resources.js +2 -40
- package/packages/server/dist/server.js +1 -247
- package/packages/server/dist/tools/analyze.tools.js +1 -288
- package/packages/server/dist/tools/forge.tools.js +11 -499
- package/packages/server/dist/tools/forget.tool.js +3 -39
- package/packages/server/dist/tools/graph.tool.js +5 -110
- package/packages/server/dist/tools/list.tool.js +5 -53
- package/packages/server/dist/tools/lookup.tool.js +8 -51
- package/packages/server/dist/tools/onboard.tool.js +2 -112
- package/packages/server/dist/tools/produce.tool.js +4 -74
- package/packages/server/dist/tools/read.tool.js +4 -47
- package/packages/server/dist/tools/reindex.tool.js +2 -70
- package/packages/server/dist/tools/remember.tool.js +3 -42
- package/packages/server/dist/tools/replay.tool.js +6 -88
- package/packages/server/dist/tools/search.tool.js +17 -327
- package/packages/server/dist/tools/status.tool.js +3 -68
- package/packages/server/dist/tools/toolkit.tools.js +20 -1673
- package/packages/server/dist/tools/update.tool.js +3 -39
- package/packages/server/dist/tools/utility.tools.js +19 -456
- package/packages/store/dist/graph-store.interface.js +0 -1
- package/packages/store/dist/index.js +1 -9
- package/packages/store/dist/lance-store.js +1 -258
- package/packages/store/dist/sqlite-graph-store.js +8 -309
- package/packages/store/dist/store-factory.js +1 -14
- package/packages/store/dist/store.interface.js +0 -1
- package/packages/tools/dist/batch.js +1 -45
- package/packages/tools/dist/changelog.js +2 -112
- package/packages/tools/dist/check.js +2 -59
- package/packages/tools/dist/checkpoint.js +2 -43
- package/packages/tools/dist/codemod.js +2 -69
- package/packages/tools/dist/compact.js +3 -60
- package/packages/tools/dist/data-transform.js +1 -124
- package/packages/tools/dist/dead-symbols.js +2 -71
- package/packages/tools/dist/delegate.js +3 -128
- package/packages/tools/dist/diff-parse.js +3 -153
- package/packages/tools/dist/digest.js +7 -242
- package/packages/tools/dist/encode.js +1 -46
- package/packages/tools/dist/env-info.js +1 -58
- package/packages/tools/dist/eval.js +3 -79
- package/packages/tools/dist/evidence-map.js +3 -203
- package/packages/tools/dist/file-summary.js +2 -106
- package/packages/tools/dist/file-walk.js +1 -75
- package/packages/tools/dist/find-examples.js +3 -48
- package/packages/tools/dist/find.js +1 -120
- package/packages/tools/dist/forge-classify.js +2 -319
- package/packages/tools/dist/forge-ground.js +1 -184
- package/packages/tools/dist/git-context.js +3 -46
- package/packages/tools/dist/graph-query.js +1 -194
- package/packages/tools/dist/health.js +1 -118
- package/packages/tools/dist/http-request.js +1 -58
- package/packages/tools/dist/index.js +1 -273
- package/packages/tools/dist/lane.js +7 -227
- package/packages/tools/dist/measure.js +2 -119
- package/packages/tools/dist/onboard.js +42 -1136
- package/packages/tools/dist/parse-output.js +2 -158
- package/packages/tools/dist/process-manager.js +1 -69
- package/packages/tools/dist/queue.js +2 -126
- package/packages/tools/dist/regex-test.js +1 -39
- package/packages/tools/dist/rename.js +2 -70
- package/packages/tools/dist/replay.js +6 -108
- package/packages/tools/dist/schema-validate.js +1 -141
- package/packages/tools/dist/scope-map.js +1 -72
- package/packages/tools/dist/snippet.js +1 -80
- package/packages/tools/dist/stash.js +2 -60
- package/packages/tools/dist/stratum-card.js +5 -238
- package/packages/tools/dist/symbol.js +3 -87
- package/packages/tools/dist/test-run.js +2 -55
- package/packages/tools/dist/text-utils.js +2 -31
- package/packages/tools/dist/time-utils.js +1 -135
- package/packages/tools/dist/trace.js +2 -114
- package/packages/tools/dist/truncation.js +10 -41
- package/packages/tools/dist/watch.js +1 -61
- package/packages/tools/dist/web-fetch.js +9 -244
- package/packages/tools/dist/web-search.js +1 -46
- package/packages/tools/dist/workset.js +2 -77
- package/packages/tui/dist/App.js +260 -52468
- package/packages/tui/dist/index.js +286 -54551
- package/packages/tui/dist/panels/CuratedPanel.js +211 -34291
- package/packages/tui/dist/panels/LogPanel.js +259 -51703
- package/packages/tui/dist/panels/SearchPanel.js +212 -34824
- package/packages/tui/dist/panels/StatusPanel.js +211 -34304
|
@@ -1,45 +1 @@
|
|
|
1
|
-
async function
|
|
2
|
-
const concurrency = Math.max(1, options.concurrency ?? 4);
|
|
3
|
-
const results = [];
|
|
4
|
-
const pending = [...operations];
|
|
5
|
-
async function runOne(op) {
|
|
6
|
-
const start = Date.now();
|
|
7
|
-
try {
|
|
8
|
-
const result = await executor(op);
|
|
9
|
-
return {
|
|
10
|
-
id: op.id,
|
|
11
|
-
status: "success",
|
|
12
|
-
result,
|
|
13
|
-
durationMs: Date.now() - start
|
|
14
|
-
};
|
|
15
|
-
} catch (error) {
|
|
16
|
-
return {
|
|
17
|
-
id: op.id,
|
|
18
|
-
status: "error",
|
|
19
|
-
error: error instanceof Error ? error.message : String(error),
|
|
20
|
-
durationMs: Date.now() - start
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
while (pending.length > 0) {
|
|
25
|
-
const chunk = pending.splice(0, concurrency);
|
|
26
|
-
const chunkResults = await Promise.allSettled(chunk.map((operation) => runOne(operation)));
|
|
27
|
-
for (const result of chunkResults) {
|
|
28
|
-
if (result.status === "fulfilled") {
|
|
29
|
-
results.push(result.value);
|
|
30
|
-
continue;
|
|
31
|
-
}
|
|
32
|
-
results.push({
|
|
33
|
-
id: "unknown",
|
|
34
|
-
status: "error",
|
|
35
|
-
error: result.reason instanceof Error ? result.reason.message : "Promise rejected",
|
|
36
|
-
durationMs: 0
|
|
37
|
-
});
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
return results;
|
|
41
|
-
}
|
|
42
|
-
export {
|
|
43
|
-
batch
|
|
44
|
-
};
|
|
45
|
-
//# sourceMappingURL=batch.js.map
|
|
1
|
+
async function h(o,a,c={}){const i=Math.max(1,c.concurrency??4),e=[],s=[...o];async function u(r){const n=Date.now();try{const t=await a(r);return{id:r.id,status:"success",result:t,durationMs:Date.now()-n}}catch(t){return{id:r.id,status:"error",error:t instanceof Error?t.message:String(t),durationMs:Date.now()-n}}}for(;s.length>0;){const r=s.splice(0,i),n=await Promise.allSettled(r.map(t=>u(t)));for(const t of n){if(t.status==="fulfilled"){e.push(t.value);continue}e.push({id:"unknown",status:"error",error:t.reason instanceof Error?t.reason.message:"Promise rejected",durationMs:0})}}return e}export{h as batch};
|
|
@@ -1,112 +1,2 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
function changelog(options) {
|
|
4
|
-
const {
|
|
5
|
-
from,
|
|
6
|
-
to = "HEAD",
|
|
7
|
-
format = "grouped",
|
|
8
|
-
includeBreaking = true,
|
|
9
|
-
cwd = process.cwd()
|
|
10
|
-
} = options;
|
|
11
|
-
if (!SAFE_REF.test(from)) throw new Error(`Invalid git ref: ${from}`);
|
|
12
|
-
if (!SAFE_REF.test(to)) throw new Error(`Invalid git ref: ${to}`);
|
|
13
|
-
const FIELD = "";
|
|
14
|
-
const ENTRY = "";
|
|
15
|
-
const gitFormat = `%H${FIELD}%s${FIELD}%b${FIELD}%an${FIELD}%ai${ENTRY}`;
|
|
16
|
-
let output;
|
|
17
|
-
try {
|
|
18
|
-
output = execSync(`git log "${from}..${to}" --format="${gitFormat}"`, {
|
|
19
|
-
cwd,
|
|
20
|
-
encoding: "utf8",
|
|
21
|
-
maxBuffer: 10 * 1024 * 1024
|
|
22
|
-
});
|
|
23
|
-
} catch {
|
|
24
|
-
throw new Error(`Git log failed. Ensure "${from}" and "${to}" are valid refs.`);
|
|
25
|
-
}
|
|
26
|
-
const entries = output.split(ENTRY).map((b) => b.trim()).filter(Boolean).map((block) => {
|
|
27
|
-
const [hash = "", subject = "", body = "", author = "", date = ""] = block.split(FIELD);
|
|
28
|
-
const m = subject.match(/^(\w+)(?:\(([^)]*)\))?(!)?:\s*(.+)/);
|
|
29
|
-
return {
|
|
30
|
-
hash: hash.slice(0, 8),
|
|
31
|
-
type: m?.[1] ?? "other",
|
|
32
|
-
scope: m?.[2] ?? "",
|
|
33
|
-
subject: m?.[4] ?? subject,
|
|
34
|
-
body: body.trim(),
|
|
35
|
-
author: author.trim(),
|
|
36
|
-
date: date.trim().split(" ")[0],
|
|
37
|
-
breaking: !!(m?.[3] || /BREAKING[\s-]CHANGE/i.test(body))
|
|
38
|
-
};
|
|
39
|
-
});
|
|
40
|
-
const types = {};
|
|
41
|
-
let breakingCount = 0;
|
|
42
|
-
for (const e of entries) {
|
|
43
|
-
types[e.type] = (types[e.type] ?? 0) + 1;
|
|
44
|
-
if (e.breaking) breakingCount++;
|
|
45
|
-
}
|
|
46
|
-
return {
|
|
47
|
-
entries,
|
|
48
|
-
markdown: formatChangelog(entries, format, includeBreaking),
|
|
49
|
-
stats: { total: entries.length, breaking: breakingCount, types }
|
|
50
|
-
};
|
|
51
|
-
}
|
|
52
|
-
function formatChangelog(entries, format, includeBreaking) {
|
|
53
|
-
const lines = ["# Changelog", ""];
|
|
54
|
-
if (includeBreaking) {
|
|
55
|
-
const breaking = entries.filter((e) => e.breaking);
|
|
56
|
-
if (breaking.length > 0) {
|
|
57
|
-
lines.push("## Breaking Changes", "");
|
|
58
|
-
for (const e of breaking) lines.push(`- ${e.subject} (${e.hash})`);
|
|
59
|
-
lines.push("");
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
if (format === "grouped") {
|
|
63
|
-
const groups = {};
|
|
64
|
-
for (const e of entries) {
|
|
65
|
-
if (!groups[e.type]) groups[e.type] = [];
|
|
66
|
-
groups[e.type].push(e);
|
|
67
|
-
}
|
|
68
|
-
const order = ["feat", "fix", "refactor", "perf", "test", "docs", "chore"];
|
|
69
|
-
const labels = {
|
|
70
|
-
feat: "Features",
|
|
71
|
-
fix: "Bug Fixes",
|
|
72
|
-
refactor: "Refactoring",
|
|
73
|
-
perf: "Performance",
|
|
74
|
-
test: "Tests",
|
|
75
|
-
docs: "Documentation",
|
|
76
|
-
chore: "Chores",
|
|
77
|
-
other: "Other"
|
|
78
|
-
};
|
|
79
|
-
for (const type of [...order, ...Object.keys(groups).filter((k) => !order.includes(k))]) {
|
|
80
|
-
if (!groups[type]?.length) continue;
|
|
81
|
-
lines.push(`## ${labels[type] ?? type}`, "");
|
|
82
|
-
for (const e of groups[type]) {
|
|
83
|
-
const scope = e.scope ? `**${e.scope}:** ` : "";
|
|
84
|
-
lines.push(`- ${scope}${e.subject} (${e.hash})`);
|
|
85
|
-
}
|
|
86
|
-
lines.push("");
|
|
87
|
-
}
|
|
88
|
-
} else if (format === "chronological") {
|
|
89
|
-
for (const e of entries) {
|
|
90
|
-
const scope = e.scope ? `(${e.scope}) ` : "";
|
|
91
|
-
lines.push(`- \`${e.date}\` ${e.type}: ${scope}${e.subject} (${e.hash})`);
|
|
92
|
-
}
|
|
93
|
-
} else {
|
|
94
|
-
const byScope = {};
|
|
95
|
-
for (const e of entries) {
|
|
96
|
-
const key = e.scope || "general";
|
|
97
|
-
if (!byScope[key]) byScope[key] = [];
|
|
98
|
-
byScope[key].push(e);
|
|
99
|
-
}
|
|
100
|
-
for (const [scope, scopeEntries] of Object.entries(byScope)) {
|
|
101
|
-
lines.push(`## ${scope}`, "");
|
|
102
|
-
for (const e of scopeEntries) lines.push(`- ${e.type}: ${e.subject} (${e.hash})`);
|
|
103
|
-
lines.push("");
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
return lines.join("\n");
|
|
107
|
-
}
|
|
108
|
-
export {
|
|
109
|
-
changelog,
|
|
110
|
-
formatChangelog
|
|
111
|
-
};
|
|
112
|
-
//# sourceMappingURL=changelog.js.map
|
|
1
|
+
import{execSync as E}from"node:child_process";const $=/^[a-zA-Z0-9_./\-~^@{}]+$/;function w(i){const{from:a,to:g="HEAD",format:r="grouped",includeBreaking:e=!0,cwd:t=process.cwd()}=i;if(!$.test(a))throw new Error(`Invalid git ref: ${a}`);if(!$.test(g))throw new Error(`Invalid git ref: ${g}`);const n="",o="",s=`%H${n}%s${n}%b${n}%an${n}%ai${o}`;let h;try{h=E(`git log "${a}..${g}" --format="${s}"`,{cwd:t,encoding:"utf8",maxBuffer:10*1024*1024})}catch{throw new Error(`Git log failed. Ensure "${a}" and "${g}" are valid refs.`)}const p=h.split(o).map(c=>c.trim()).filter(Boolean).map(c=>{const[m="",d="",b="",y="",C=""]=c.split(n),f=d.match(/^(\w+)(?:\(([^)]*)\))?(!)?:\s*(.+)/);return{hash:m.slice(0,8),type:f?.[1]??"other",scope:f?.[2]??"",subject:f?.[4]??d,body:b.trim(),author:y.trim(),date:C.trim().split(" ")[0],breaking:!!(f?.[3]||/BREAKING[\s-]CHANGE/i.test(b))}}),l={};let u=0;for(const c of p)l[c.type]=(l[c.type]??0)+1,c.breaking&&u++;return{entries:p,markdown:k(p,r,e),stats:{total:p.length,breaking:u,types:l}}}function k(i,a,g){const r=["# Changelog",""];if(g){const e=i.filter(t=>t.breaking);if(e.length>0){r.push("## Breaking Changes","");for(const t of e)r.push(`- ${t.subject} (${t.hash})`);r.push("")}}if(a==="grouped"){const e={};for(const o of i)e[o.type]||(e[o.type]=[]),e[o.type].push(o);const t=["feat","fix","refactor","perf","test","docs","chore"],n={feat:"Features",fix:"Bug Fixes",refactor:"Refactoring",perf:"Performance",test:"Tests",docs:"Documentation",chore:"Chores",other:"Other"};for(const o of[...t,...Object.keys(e).filter(s=>!t.includes(s))])if(e[o]?.length){r.push(`## ${n[o]??o}`,"");for(const s of e[o]){const h=s.scope?`**${s.scope}:** `:"";r.push(`- ${h}${s.subject} (${s.hash})`)}r.push("")}}else if(a==="chronological")for(const e of i){const t=e.scope?`(${e.scope}) `:"";r.push(`- \`${e.date}\` ${e.type}: ${t}${e.subject} (${e.hash})`)}else{const e={};for(const t of i){const n=t.scope||"general";e[n]||(e[n]=[]),e[n].push(t)}for(const[t,n]of Object.entries(e)){r.push(`## ${t}`,"");for(const o of n)r.push(`- ${o.type}: ${o.subject} (${o.hash})`);r.push("")}}return r.join(`
|
|
2
|
+
`)}export{w as changelog,k as formatChangelog};
|
|
@@ -1,59 +1,2 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
import { join } from "node:path";
|
|
4
|
-
import { promisify } from "node:util";
|
|
5
|
-
import { parseBiome, parseTsc } from "./parse-output.js";
|
|
6
|
-
const execFileAsync = promisify(execFile);
|
|
7
|
-
function getProcessOutput(error) {
|
|
8
|
-
const candidate = error;
|
|
9
|
-
const stdout = candidate.stdout?.toString() ?? "";
|
|
10
|
-
const stderr = candidate.stderr?.toString() ?? "";
|
|
11
|
-
return [stdout, stderr].filter(Boolean).join("\n").trim() || candidate.message || "Command failed";
|
|
12
|
-
}
|
|
13
|
-
async function check(options = {}) {
|
|
14
|
-
const cwd = options.cwd ?? process.cwd();
|
|
15
|
-
const tscResult = { errors: [], passed: true, raw: "" };
|
|
16
|
-
const biomeResult = { errors: [], passed: true, raw: "" };
|
|
17
|
-
if (!options.skipTypes) {
|
|
18
|
-
try {
|
|
19
|
-
const pkgPath = join(cwd, "package.json");
|
|
20
|
-
let useProjectScript = false;
|
|
21
|
-
try {
|
|
22
|
-
const pkg = JSON.parse(await readFileAsync(pkgPath, "utf-8"));
|
|
23
|
-
useProjectScript = !!pkg.scripts?.typecheck;
|
|
24
|
-
} catch {
|
|
25
|
-
}
|
|
26
|
-
if (useProjectScript && !options.files?.length) {
|
|
27
|
-
await execFileAsync("npx", ["turbo", "run", "typecheck"], { cwd, shell: true });
|
|
28
|
-
} else {
|
|
29
|
-
const args = ["--noEmit"];
|
|
30
|
-
if (options.files?.length) args.push(...options.files);
|
|
31
|
-
await execFileAsync("npx", ["tsc", ...args], { cwd, shell: true });
|
|
32
|
-
}
|
|
33
|
-
} catch (error) {
|
|
34
|
-
tscResult.raw = getProcessOutput(error);
|
|
35
|
-
tscResult.errors = parseTsc(tscResult.raw);
|
|
36
|
-
tscResult.passed = tscResult.errors.length === 0;
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
if (!options.skipLint) {
|
|
40
|
-
try {
|
|
41
|
-
const args = ["check"];
|
|
42
|
-
if (options.files?.length) args.push(...options.files);
|
|
43
|
-
await execFileAsync("npx", ["biome", ...args], { cwd, shell: true });
|
|
44
|
-
} catch (error) {
|
|
45
|
-
biomeResult.raw = getProcessOutput(error);
|
|
46
|
-
biomeResult.errors = parseBiome(biomeResult.raw);
|
|
47
|
-
biomeResult.passed = biomeResult.errors.length === 0;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
return {
|
|
51
|
-
tsc: tscResult,
|
|
52
|
-
biome: biomeResult,
|
|
53
|
-
passed: tscResult.passed && biomeResult.passed
|
|
54
|
-
};
|
|
55
|
-
}
|
|
56
|
-
export {
|
|
57
|
-
check
|
|
58
|
-
};
|
|
59
|
-
//# sourceMappingURL=check.js.map
|
|
1
|
+
import{execFile as p}from"node:child_process";import{readFile as l}from"node:fs/promises";import{join as d}from"node:path";import{promisify as f}from"node:util";import{parseBiome as u,parseTsc as g}from"./parse-output.js";const i=f(p);function n(r){const t=r,e=t.stdout?.toString()??"",s=t.stderr?.toString()??"";return[e,s].filter(Boolean).join(`
|
|
2
|
+
`).trim()||t.message||"Command failed"}async function b(r={}){const t=r.cwd??process.cwd(),e={errors:[],passed:!0,raw:""},s={errors:[],passed:!0,raw:""};if(!r.skipTypes)try{const a=d(t,"package.json");let c=!1;try{c=!!JSON.parse(await l(a,"utf-8")).scripts?.typecheck}catch{}if(c&&!r.files?.length)await i("npx",["turbo","run","typecheck"],{cwd:t,shell:!0});else{const o=["--noEmit"];r.files?.length&&o.push(...r.files),await i("npx",["tsc",...o],{cwd:t,shell:!0})}}catch(a){e.raw=n(a),e.errors=g(e.raw),e.passed=e.errors.length===0}if(!r.skipLint)try{const a=["check"];r.files?.length&&a.push(...r.files),await i("npx",["biome",...a],{cwd:t,shell:!0})}catch(a){s.raw=n(a),s.errors=u(s.raw),s.passed=s.errors.length===0}return{tsc:e,biome:s,passed:e.passed&&s.passed}}export{b as check};
|
|
@@ -1,43 +1,2 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
const CHECKPOINT_DIR = ".kb-state/checkpoints";
|
|
4
|
-
function checkpointDir(cwd) {
|
|
5
|
-
const root = cwd ?? process.cwd();
|
|
6
|
-
const dir = resolve(root, CHECKPOINT_DIR);
|
|
7
|
-
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
8
|
-
return dir;
|
|
9
|
-
}
|
|
10
|
-
function checkpointSave(label, data, options) {
|
|
11
|
-
const slug = label.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "") || "checkpoint";
|
|
12
|
-
const checkpoint = {
|
|
13
|
-
id: `${Date.now()}-${slug}`,
|
|
14
|
-
label,
|
|
15
|
-
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
16
|
-
data,
|
|
17
|
-
files: options?.files,
|
|
18
|
-
notes: options?.notes
|
|
19
|
-
};
|
|
20
|
-
const filePath = resolve(checkpointDir(options?.cwd), `${checkpoint.id}.json`);
|
|
21
|
-
writeFileSync(filePath, `${JSON.stringify(checkpoint, null, 2)}
|
|
22
|
-
`, "utf-8");
|
|
23
|
-
return checkpoint;
|
|
24
|
-
}
|
|
25
|
-
function checkpointLoad(id, cwd) {
|
|
26
|
-
const filePath = resolve(checkpointDir(cwd), `${id}.json`);
|
|
27
|
-
if (!existsSync(filePath)) return void 0;
|
|
28
|
-
return JSON.parse(readFileSync(filePath, "utf-8"));
|
|
29
|
-
}
|
|
30
|
-
function checkpointList(cwd) {
|
|
31
|
-
const dir = checkpointDir(cwd);
|
|
32
|
-
return readdirSync(dir).filter((file) => file.endsWith(".json")).map((file) => JSON.parse(readFileSync(resolve(dir, file), "utf-8"))).sort((left, right) => right.createdAt.localeCompare(left.createdAt));
|
|
33
|
-
}
|
|
34
|
-
function checkpointLatest(cwd) {
|
|
35
|
-
return checkpointList(cwd)[0];
|
|
36
|
-
}
|
|
37
|
-
export {
|
|
38
|
-
checkpointLatest,
|
|
39
|
-
checkpointList,
|
|
40
|
-
checkpointLoad,
|
|
41
|
-
checkpointSave
|
|
42
|
-
};
|
|
43
|
-
//# sourceMappingURL=checkpoint.js.map
|
|
1
|
+
import{existsSync as s,mkdirSync as d,readdirSync as f,readFileSync as a,writeFileSync as u}from"node:fs";import{resolve as i}from"node:path";const g=".kb-state/checkpoints";function o(n){const e=n??process.cwd(),t=i(e,g);return s(t)||d(t,{recursive:!0}),t}function C(n,e,t){const r=n.toLowerCase().replace(/[^a-z0-9]+/g,"-").replace(/^-|-$/g,"")||"checkpoint",c={id:`${Date.now()}-${r}`,label:n,createdAt:new Date().toISOString(),data:e,files:t?.files,notes:t?.notes},p=i(o(t?.cwd),`${c.id}.json`);return u(p,`${JSON.stringify(c,null,2)}
|
|
2
|
+
`,"utf-8"),c}function S(n,e){const t=i(o(e),`${n}.json`);if(s(t))return JSON.parse(a(t,"utf-8"))}function k(n){const e=o(n);return f(e).filter(t=>t.endsWith(".json")).map(t=>JSON.parse(a(i(e,t),"utf-8"))).sort((t,r)=>r.createdAt.localeCompare(t.createdAt))}function w(n){return k(n)[0]}export{w as checkpointLatest,k as checkpointList,S as checkpointLoad,C as checkpointSave};
|
|
@@ -1,69 +1,2 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
import { DEFAULT_TOOL_EXTENSIONS, matchesGlobPattern, walkFiles } from "./file-walk.js";
|
|
4
|
-
function normalizePath(path) {
|
|
5
|
-
return path.replace(/\\/g, "/");
|
|
6
|
-
}
|
|
7
|
-
async function codemod(options) {
|
|
8
|
-
const {
|
|
9
|
-
rootPath,
|
|
10
|
-
rules,
|
|
11
|
-
extensions = DEFAULT_TOOL_EXTENSIONS,
|
|
12
|
-
exclude = [],
|
|
13
|
-
dryRun = false
|
|
14
|
-
} = options;
|
|
15
|
-
const compiledRules = rules.map((rule) => ({
|
|
16
|
-
...rule,
|
|
17
|
-
regex: new RegExp(rule.pattern, "g")
|
|
18
|
-
}));
|
|
19
|
-
const filePaths = await walkFiles(rootPath, extensions, exclude);
|
|
20
|
-
const changes = [];
|
|
21
|
-
const appliedRules = /* @__PURE__ */ new Set();
|
|
22
|
-
let filesModified = 0;
|
|
23
|
-
for (const filePath of filePaths) {
|
|
24
|
-
const relativePath = normalizePath(relative(rootPath, filePath));
|
|
25
|
-
const originalContent = await readFile(filePath, "utf-8");
|
|
26
|
-
const lines = originalContent.split(/\r?\n/);
|
|
27
|
-
let fileChanged = false;
|
|
28
|
-
for (const [ruleIndex, rule] of compiledRules.entries()) {
|
|
29
|
-
if (rule.fileFilter && !matchesGlobPattern(relativePath, rule.fileFilter)) {
|
|
30
|
-
continue;
|
|
31
|
-
}
|
|
32
|
-
for (let index = 0; index < lines.length; index++) {
|
|
33
|
-
const before = lines[index];
|
|
34
|
-
rule.regex.lastIndex = 0;
|
|
35
|
-
const after = before.replace(rule.regex, rule.replacement);
|
|
36
|
-
if (before === after) {
|
|
37
|
-
continue;
|
|
38
|
-
}
|
|
39
|
-
lines[index] = after;
|
|
40
|
-
fileChanged = true;
|
|
41
|
-
appliedRules.add(ruleIndex);
|
|
42
|
-
changes.push({
|
|
43
|
-
rule: rule.description,
|
|
44
|
-
path: relativePath,
|
|
45
|
-
line: index + 1,
|
|
46
|
-
before,
|
|
47
|
-
after
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
if (!fileChanged) {
|
|
52
|
-
continue;
|
|
53
|
-
}
|
|
54
|
-
filesModified += 1;
|
|
55
|
-
if (!dryRun) {
|
|
56
|
-
await writeFile(filePath, lines.join("\n"), "utf-8");
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
return {
|
|
60
|
-
changes,
|
|
61
|
-
rulesApplied: appliedRules.size,
|
|
62
|
-
filesModified,
|
|
63
|
-
dryRun
|
|
64
|
-
};
|
|
65
|
-
}
|
|
66
|
-
export {
|
|
67
|
-
codemod
|
|
68
|
-
};
|
|
69
|
-
//# sourceMappingURL=codemod.js.map
|
|
1
|
+
import{readFile as b,writeFile as P}from"node:fs/promises";import{relative as w}from"node:path";import{DEFAULT_TOOL_EXTENSIONS as F,matchesGlobPattern as O,walkFiles as y}from"./file-walk.js";function E(o){return o.replace(/\\/g,"/")}async function v(o){const{rootPath:l,rules:g,extensions:m=F,exclude:h=[],dryRun:a=!1}=o,x=g.map(e=>({...e,regex:new RegExp(e.pattern,"g")})),C=await y(l,m,h),d=[],f=new Set;let c=0;for(const e of C){const u=E(w(l,e)),r=(await b(e,"utf-8")).split(/\r?\n/);let p=!1;for(const[R,t]of x.entries())if(!(t.fileFilter&&!O(u,t.fileFilter)))for(let n=0;n<r.length;n++){const i=r[n];t.regex.lastIndex=0;const s=i.replace(t.regex,t.replacement);i!==s&&(r[n]=s,p=!0,f.add(R),d.push({rule:t.description,path:u,line:n+1,before:i,after:s}))}p&&(c+=1,a||await P(e,r.join(`
|
|
2
|
+
`),"utf-8"))}return{changes:d,rulesApplied:f.size,filesModified:c,dryRun:a}}export{v as codemod};
|
|
@@ -1,60 +1,3 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
if (text.length <= maxChars) {
|
|
5
|
-
return {
|
|
6
|
-
text,
|
|
7
|
-
originalChars: text.length,
|
|
8
|
-
compressedChars: text.length,
|
|
9
|
-
ratio: 1,
|
|
10
|
-
segmentsKept: 1,
|
|
11
|
-
segmentsTotal: 1
|
|
12
|
-
};
|
|
13
|
-
}
|
|
14
|
-
const segments = segment(text, segmentation);
|
|
15
|
-
if (segments.length === 0) {
|
|
16
|
-
return {
|
|
17
|
-
text: "",
|
|
18
|
-
originalChars: text.length,
|
|
19
|
-
compressedChars: 0,
|
|
20
|
-
ratio: 0,
|
|
21
|
-
segmentsKept: 0,
|
|
22
|
-
segmentsTotal: 0
|
|
23
|
-
};
|
|
24
|
-
}
|
|
25
|
-
const queryVector = await embedder.embed(query);
|
|
26
|
-
const scored = [];
|
|
27
|
-
for (let i = 0; i < segments.length; i++) {
|
|
28
|
-
const segVector = await embedder.embed(segments[i]);
|
|
29
|
-
const score = cosineSimilarity(queryVector, segVector);
|
|
30
|
-
scored.push({ text: segments[i], score, index: i });
|
|
31
|
-
}
|
|
32
|
-
const sorted = scored.filter((s) => s.score >= minScore).sort((a, b) => b.score - a.score);
|
|
33
|
-
const selected = [];
|
|
34
|
-
let totalChars = 0;
|
|
35
|
-
for (const seg of sorted) {
|
|
36
|
-
if (totalChars + seg.text.length > maxChars) {
|
|
37
|
-
if (totalChars === 0) {
|
|
38
|
-
selected.push({ ...seg, text: seg.text.slice(0, maxChars) });
|
|
39
|
-
totalChars = maxChars;
|
|
40
|
-
}
|
|
41
|
-
break;
|
|
42
|
-
}
|
|
43
|
-
selected.push(seg);
|
|
44
|
-
totalChars += seg.text.length + 2;
|
|
45
|
-
}
|
|
46
|
-
selected.sort((a, b) => a.index - b.index);
|
|
47
|
-
const compressedText = selected.map((s) => s.text).join("\n\n");
|
|
48
|
-
return {
|
|
49
|
-
text: compressedText,
|
|
50
|
-
originalChars: text.length,
|
|
51
|
-
compressedChars: compressedText.length,
|
|
52
|
-
ratio: compressedText.length / text.length,
|
|
53
|
-
segmentsKept: selected.length,
|
|
54
|
-
segmentsTotal: segments.length
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
export {
|
|
58
|
-
compact
|
|
59
|
-
};
|
|
60
|
-
//# sourceMappingURL=compact.js.map
|
|
1
|
+
import{cosineSimilarity as C,segment as b}from"./text-utils.js";async function S(m,g){const{text:t,query:l,maxChars:o=3e3,minScore:h=.3,segmentation:p="paragraph"}=g;if(t.length<=o)return{text:t,originalChars:t.length,compressedChars:t.length,ratio:1,segmentsKept:1,segmentsTotal:1};const n=b(t,p);if(n.length===0)return{text:"",originalChars:t.length,compressedChars:0,ratio:0,segmentsKept:0,segmentsTotal:0};const d=await m.embed(l),c=[];for(let e=0;e<n.length;e++){const s=await m.embed(n[e]),u=C(d,s);c.push({text:n[e],score:u,index:e})}const x=c.filter(e=>e.score>=h).sort((e,s)=>s.score-e.score),r=[];let i=0;for(const e of x){if(i+e.text.length>o){i===0&&(r.push({...e,text:e.text.slice(0,o)}),i=o);break}r.push(e),i+=e.text.length+2}r.sort((e,s)=>e.index-s.index);const a=r.map(e=>e.text).join(`
|
|
2
|
+
|
|
3
|
+
`);return{text:a,originalChars:t.length,compressedChars:a.length,ratio:a.length/t.length,segmentsKept:r.length,segmentsTotal:n.length}}export{S as compact};
|
|
@@ -1,124 +1 @@
|
|
|
1
|
-
function
|
|
2
|
-
const { input, expression } = options;
|
|
3
|
-
const data = typeof input === "string" ? JSON.parse(input) : input;
|
|
4
|
-
let output;
|
|
5
|
-
if (expression === ".") {
|
|
6
|
-
output = data;
|
|
7
|
-
} else if (expression === "keys") {
|
|
8
|
-
output = Object.keys(asRecord(data));
|
|
9
|
-
} else if (expression === "values") {
|
|
10
|
-
output = Object.values(asRecord(data));
|
|
11
|
-
} else if (expression === "length") {
|
|
12
|
-
output = Array.isArray(data) ? data.length : Object.keys(asRecord(data)).length;
|
|
13
|
-
} else if (expression === "flatten") {
|
|
14
|
-
output = asArray(data).flat();
|
|
15
|
-
} else if (expression === "unique") {
|
|
16
|
-
output = [...new Set(asArray(data))];
|
|
17
|
-
} else if (expression === "sort") {
|
|
18
|
-
output = [...asArray(data)].sort();
|
|
19
|
-
} else if (expression === "reverse") {
|
|
20
|
-
output = [...asArray(data)].reverse();
|
|
21
|
-
} else if (expression.startsWith(".")) {
|
|
22
|
-
output = resolvePath(data, expression.slice(1));
|
|
23
|
-
} else if (expression.startsWith("map(") && expression.endsWith(")")) {
|
|
24
|
-
const field = expression.slice(4, -1).trim();
|
|
25
|
-
const path = field.startsWith(".") ? field.slice(1) : field;
|
|
26
|
-
output = asArray(data).map((item) => resolvePath(item, path));
|
|
27
|
-
} else if (expression.startsWith("select(") && expression.endsWith(")")) {
|
|
28
|
-
output = filterByCondition(asArray(data), expression.slice(7, -1).trim());
|
|
29
|
-
} else if (expression.startsWith("group_by(") && expression.endsWith(")")) {
|
|
30
|
-
const field = expression.slice(9, -1).trim().replace(/^\./, "");
|
|
31
|
-
const groups = {};
|
|
32
|
-
for (const item of asArray(data)) {
|
|
33
|
-
const key = String(resolvePath(item, field) ?? "null");
|
|
34
|
-
if (!groups[key]) {
|
|
35
|
-
groups[key] = [];
|
|
36
|
-
}
|
|
37
|
-
groups[key].push(item);
|
|
38
|
-
}
|
|
39
|
-
output = groups;
|
|
40
|
-
} else if (expression.startsWith("pick(") && expression.endsWith(")")) {
|
|
41
|
-
const fields = expression.slice(5, -1).split(",").map((field) => field.trim().replace(/^\./, "")).filter(Boolean);
|
|
42
|
-
output = Array.isArray(data) ? asArray(data).map((item) => pickFields(item, fields)) : pickFields(data, fields);
|
|
43
|
-
} else {
|
|
44
|
-
throw new Error(
|
|
45
|
-
`Unsupported expression: ${expression}. Supported: ., .path.to.field, keys, values, length, flatten, unique, sort, reverse, map(.field), select(.field == | != | > | < | >= | <= "value"), group_by(.field), pick(.a, .b)`
|
|
46
|
-
);
|
|
47
|
-
}
|
|
48
|
-
return {
|
|
49
|
-
output,
|
|
50
|
-
outputString: typeof output === "string" ? output : JSON.stringify(output, null, 2) ?? "null"
|
|
51
|
-
};
|
|
52
|
-
}
|
|
53
|
-
function filterByCondition(items, condition) {
|
|
54
|
-
const match = condition.match(/^\.?([\w.[\]]+)\s*(==|!=|>=|<=|>|<)\s*"?([^"]*)"?$/);
|
|
55
|
-
if (!match) {
|
|
56
|
-
throw new Error(`Unsupported select expression: ${condition}`);
|
|
57
|
-
}
|
|
58
|
-
const [, field, operator, value] = match;
|
|
59
|
-
return items.filter((item) => {
|
|
60
|
-
const resolved = resolvePath(item, field);
|
|
61
|
-
const resolvedStr = String(resolved);
|
|
62
|
-
switch (operator) {
|
|
63
|
-
case "==":
|
|
64
|
-
return resolvedStr === value;
|
|
65
|
-
case "!=":
|
|
66
|
-
return resolvedStr !== value;
|
|
67
|
-
case ">":
|
|
68
|
-
case "<":
|
|
69
|
-
case ">=":
|
|
70
|
-
case "<=": {
|
|
71
|
-
const numResolved = Number(resolved);
|
|
72
|
-
const numValue = Number(value);
|
|
73
|
-
if (Number.isNaN(numResolved) || Number.isNaN(numValue)) return false;
|
|
74
|
-
if (operator === ">") return numResolved > numValue;
|
|
75
|
-
if (operator === "<") return numResolved < numValue;
|
|
76
|
-
if (operator === ">=") return numResolved >= numValue;
|
|
77
|
-
return numResolved <= numValue;
|
|
78
|
-
}
|
|
79
|
-
default:
|
|
80
|
-
return false;
|
|
81
|
-
}
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
function pickFields(input, fields) {
|
|
85
|
-
const picked = {};
|
|
86
|
-
for (const field of fields) {
|
|
87
|
-
picked[field] = resolvePath(input, field);
|
|
88
|
-
}
|
|
89
|
-
return picked;
|
|
90
|
-
}
|
|
91
|
-
function resolvePath(input, path) {
|
|
92
|
-
if (!path) return input;
|
|
93
|
-
const tokens = [...path.matchAll(/([^.[\]]+)|\[(\d+)\]/g)].map(
|
|
94
|
-
(match) => match[1] ?? Number.parseInt(match[2], 10)
|
|
95
|
-
);
|
|
96
|
-
let current = input;
|
|
97
|
-
for (const token of tokens) {
|
|
98
|
-
if (current == null) return void 0;
|
|
99
|
-
if (typeof token === "number") {
|
|
100
|
-
if (!Array.isArray(current)) return void 0;
|
|
101
|
-
current = current[token];
|
|
102
|
-
continue;
|
|
103
|
-
}
|
|
104
|
-
if (typeof current !== "object") return void 0;
|
|
105
|
-
current = current[token];
|
|
106
|
-
}
|
|
107
|
-
return current;
|
|
108
|
-
}
|
|
109
|
-
function asArray(value) {
|
|
110
|
-
if (!Array.isArray(value)) {
|
|
111
|
-
throw new Error("Operation requires an array input.");
|
|
112
|
-
}
|
|
113
|
-
return value;
|
|
114
|
-
}
|
|
115
|
-
function asRecord(value) {
|
|
116
|
-
if (!value || typeof value !== "object" || Array.isArray(value)) {
|
|
117
|
-
throw new Error("Operation requires an object input.");
|
|
118
|
-
}
|
|
119
|
-
return value;
|
|
120
|
-
}
|
|
121
|
-
export {
|
|
122
|
-
dataTransform
|
|
123
|
-
};
|
|
124
|
-
//# sourceMappingURL=data-transform.js.map
|
|
1
|
+
function g(t){const{input:o,expression:e}=t,n=typeof o=="string"?JSON.parse(o):o;let r;if(e===".")r=n;else if(e==="keys")r=Object.keys(d(n));else if(e==="values")r=Object.values(d(n));else if(e==="length")r=Array.isArray(n)?n.length:Object.keys(d(n)).length;else if(e==="flatten")r=u(n).flat();else if(e==="unique")r=[...new Set(u(n))];else if(e==="sort")r=[...u(n)].sort();else if(e==="reverse")r=[...u(n)].reverse();else if(e.startsWith("."))r=p(n,e.slice(1));else if(e.startsWith("map(")&&e.endsWith(")")){const s=e.slice(4,-1).trim(),i=s.startsWith(".")?s.slice(1):s;r=u(n).map(a=>p(a,i))}else if(e.startsWith("select(")&&e.endsWith(")"))r=w(u(n),e.slice(7,-1).trim());else if(e.startsWith("group_by(")&&e.endsWith(")")){const s=e.slice(9,-1).trim().replace(/^\./,""),i={};for(const a of u(n)){const f=String(p(a,s)??"null");i[f]||(i[f]=[]),i[f].push(a)}r=i}else if(e.startsWith("pick(")&&e.endsWith(")")){const s=e.slice(5,-1).split(",").map(i=>i.trim().replace(/^\./,"")).filter(Boolean);r=Array.isArray(n)?u(n).map(i=>k(i,s)):k(n,s)}else throw new Error(`Unsupported expression: ${e}. Supported: ., .path.to.field, keys, values, length, flatten, unique, sort, reverse, map(.field), select(.field == | != | > | < | >= | <= "value"), group_by(.field), pick(.a, .b)`);return{output:r,outputString:typeof r=="string"?r:JSON.stringify(r,null,2)??"null"}}function w(t,o){const e=o.match(/^\.?([\w.[\]]+)\s*(==|!=|>=|<=|>|<)\s*"?([^"]*)"?$/);if(!e)throw new Error(`Unsupported select expression: ${o}`);const[,n,r,s]=e;return t.filter(i=>{const a=p(i,n),f=String(a);switch(r){case"==":return f===s;case"!=":return f!==s;case">":case"<":case">=":case"<=":{const l=Number(a),c=Number(s);return Number.isNaN(l)||Number.isNaN(c)?!1:r===">"?l>c:r==="<"?l<c:r===">="?l>=c:l<=c}default:return!1}})}function k(t,o){const e={};for(const n of o)e[n]=p(t,n);return e}function p(t,o){if(!o)return t;const e=[...o.matchAll(/([^.[\]]+)|\[(\d+)\]/g)].map(r=>r[1]??Number.parseInt(r[2],10));let n=t;for(const r of e){if(n==null)return;if(typeof r=="number"){if(!Array.isArray(n))return;n=n[r];continue}if(typeof n!="object")return;n=n[r]}return n}function u(t){if(!Array.isArray(t))throw new Error("Operation requires an array input.");return t}function d(t){if(!t||typeof t!="object"||Array.isArray(t))throw new Error("Operation requires an object input.");return t}export{g as dataTransform};
|
|
@@ -1,71 +1,2 @@
|
|
|
1
|
-
async function
|
|
2
|
-
|
|
3
|
-
const exportVector = await embedder.embed("export function class const type interface enum");
|
|
4
|
-
const exportChunks = await store.search(exportVector, { limit: limit * 3 });
|
|
5
|
-
const exportPattern = /^export\s+(?:async\s+)?(?:function|class|const|let|interface|type|enum)\s+(\w+)/gm;
|
|
6
|
-
const exports = [];
|
|
7
|
-
for (const chunk of exportChunks) {
|
|
8
|
-
if (!matchesRootPath(chunk.record.sourcePath, rootPath)) continue;
|
|
9
|
-
const content = chunk.record.content;
|
|
10
|
-
exportPattern.lastIndex = 0;
|
|
11
|
-
for (const match of content.matchAll(exportPattern)) {
|
|
12
|
-
const matchIndex = match.index ?? 0;
|
|
13
|
-
const linesBefore = content.slice(0, matchIndex).split("\n").length - 1;
|
|
14
|
-
const kindMatch = content.slice(matchIndex).match(/export\s+(?:async\s+)?(\w+)/);
|
|
15
|
-
exports.push({
|
|
16
|
-
name: match[1],
|
|
17
|
-
path: chunk.record.sourcePath,
|
|
18
|
-
line: chunk.record.startLine + linesBefore,
|
|
19
|
-
kind: kindMatch?.[1] ?? "unknown"
|
|
20
|
-
});
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
const uniqueExports = /* @__PURE__ */ new Map();
|
|
24
|
-
for (const exportedSymbol of exports) {
|
|
25
|
-
const key = `${exportedSymbol.path}:${exportedSymbol.name}`;
|
|
26
|
-
if (!uniqueExports.has(key)) uniqueExports.set(key, exportedSymbol);
|
|
27
|
-
}
|
|
28
|
-
const deadSymbols = [];
|
|
29
|
-
for (const exportedSymbol of uniqueExports.values()) {
|
|
30
|
-
const escapedName = escapeRegExp(exportedSymbol.name);
|
|
31
|
-
const importPattern = new RegExp(`import\\s+.*\\b${escapedName}\\b.*from`, "m");
|
|
32
|
-
const reexportPattern = new RegExp(`export\\s+\\{[^}]*\\b${escapedName}\\b`, "m");
|
|
33
|
-
const importResults = await store.ftsSearch(`import ${exportedSymbol.name}`, { limit: 10 });
|
|
34
|
-
const isImported = importResults.some(
|
|
35
|
-
(result) => result.record.sourcePath !== exportedSymbol.path && importPattern.test(result.record.content)
|
|
36
|
-
);
|
|
37
|
-
const isReexported = importResults.some(
|
|
38
|
-
(result) => result.record.sourcePath !== exportedSymbol.path && reexportPattern.test(result.record.content)
|
|
39
|
-
);
|
|
40
|
-
if (!isImported && !isReexported) {
|
|
41
|
-
deadSymbols.push(exportedSymbol);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
deadSymbols.sort(
|
|
45
|
-
(left, right) => left.path === right.path ? left.line - right.line : left.path.localeCompare(right.path)
|
|
46
|
-
);
|
|
47
|
-
return {
|
|
48
|
-
deadSymbols,
|
|
49
|
-
totalExports: uniqueExports.size,
|
|
50
|
-
totalDead: deadSymbols.length
|
|
51
|
-
};
|
|
52
|
-
}
|
|
53
|
-
function escapeRegExp(value) {
|
|
54
|
-
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
55
|
-
}
|
|
56
|
-
function matchesRootPath(sourcePath, rootPath) {
|
|
57
|
-
if (!rootPath) return true;
|
|
58
|
-
const normalizedRoot = normalizePath(rootPath).replace(/\/+$/, "");
|
|
59
|
-
if (/^[A-Za-z]:\//.test(normalizedRoot) || normalizedRoot.startsWith("/")) {
|
|
60
|
-
return true;
|
|
61
|
-
}
|
|
62
|
-
const normalizedSource = normalizePath(sourcePath);
|
|
63
|
-
return normalizedSource === normalizedRoot || normalizedSource.startsWith(`${normalizedRoot}/`);
|
|
64
|
-
}
|
|
65
|
-
function normalizePath(value) {
|
|
66
|
-
return value.replace(/\\/g, "/").replace(/^\.\//, "");
|
|
67
|
-
}
|
|
68
|
-
export {
|
|
69
|
-
findDeadSymbols
|
|
70
|
-
};
|
|
71
|
-
//# sourceMappingURL=dead-symbols.js.map
|
|
1
|
+
async function w(o,r,n={}){const{rootPath:a,limit:b=100}=n,x=await o.embed("export function class const type interface enum"),y=await r.search(x,{limit:b*3}),u=/^export\s+(?:async\s+)?(?:function|class|const|let|interface|type|enum)\s+(\w+)/gm,h=[];for(const t of y){if(!D(t.record.sourcePath,a))continue;const e=t.record.content;u.lastIndex=0;for(const m of e.matchAll(u)){const p=m.index??0,d=e.slice(0,p).split(`
|
|
2
|
+
`).length-1,l=e.slice(p).match(/export\s+(?:async\s+)?(\w+)/);h.push({name:m[1],path:t.record.sourcePath,line:t.record.startLine+d,kind:l?.[1]??"unknown"})}}const c=new Map;for(const t of h){const e=`${t.path}:${t.name}`;c.has(e)||c.set(e,t)}const i=[];for(const t of c.values()){const e=S(t.name),m=new RegExp(`import\\s+.*\\b${e}\\b.*from`,"m"),p=new RegExp(`export\\s+\\{[^}]*\\b${e}\\b`,"m"),d=await r.ftsSearch(`import ${t.name}`,{limit:10}),l=d.some(s=>s.record.sourcePath!==t.path&&m.test(s.record.content)),g=d.some(s=>s.record.sourcePath!==t.path&&p.test(s.record.content));!l&&!g&&i.push(t)}return i.sort((t,e)=>t.path===e.path?t.line-e.line:t.path.localeCompare(e.path)),{deadSymbols:i,totalExports:c.size,totalDead:i.length}}function S(o){return o.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}function D(o,r){if(!r)return!0;const n=f(r).replace(/\/+$/,"");if(/^[A-Za-z]:\//.test(n)||n.startsWith("/"))return!0;const a=f(o);return a===n||a.startsWith(`${n}/`)}function f(o){return o.replace(/\\/g,"/").replace(/^\.\//,"")}export{w as findDeadSymbols};
|