@vpxa/kb 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/package.json +1 -1
- package/packages/analyzers/dist/blast-radius-analyzer.js +13 -114
- package/packages/analyzers/dist/dependency-analyzer.js +11 -425
- package/packages/analyzers/dist/diagram-generator.js +4 -86
- package/packages/analyzers/dist/entry-point-analyzer.js +5 -239
- package/packages/analyzers/dist/index.js +1 -23
- package/packages/analyzers/dist/knowledge-producer.js +24 -113
- package/packages/analyzers/dist/pattern-analyzer.js +5 -359
- package/packages/analyzers/dist/regex-call-graph.js +1 -428
- package/packages/analyzers/dist/structure-analyzer.js +4 -258
- package/packages/analyzers/dist/symbol-analyzer.js +13 -442
- package/packages/analyzers/dist/ts-call-graph.js +1 -160
- package/packages/analyzers/dist/types.js +0 -1
- package/packages/chunker/dist/call-graph-extractor.js +1 -90
- package/packages/chunker/dist/chunker-factory.js +1 -36
- package/packages/chunker/dist/chunker.interface.js +0 -1
- package/packages/chunker/dist/code-chunker.js +14 -134
- package/packages/chunker/dist/generic-chunker.js +5 -72
- package/packages/chunker/dist/index.js +1 -21
- package/packages/chunker/dist/markdown-chunker.js +7 -119
- package/packages/chunker/dist/treesitter-chunker.js +8 -234
- package/packages/cli/dist/commands/analyze.js +3 -112
- package/packages/cli/dist/commands/context-cmds.js +1 -155
- package/packages/cli/dist/commands/environment.js +2 -204
- package/packages/cli/dist/commands/execution.js +1 -137
- package/packages/cli/dist/commands/graph.js +7 -81
- package/packages/cli/dist/commands/init.js +9 -87
- package/packages/cli/dist/commands/knowledge.js +1 -139
- package/packages/cli/dist/commands/search.js +8 -267
- package/packages/cli/dist/commands/system.js +4 -241
- package/packages/cli/dist/commands/workspace.js +2 -388
- package/packages/cli/dist/context.js +1 -14
- package/packages/cli/dist/helpers.js +3 -458
- package/packages/cli/dist/index.d.ts +1 -1
- package/packages/cli/dist/index.js +3 -69
- package/packages/cli/dist/kb-init.js +1 -82
- package/packages/cli/dist/types.js +0 -1
- package/packages/core/dist/constants.js +1 -43
- package/packages/core/dist/content-detector.js +1 -79
- package/packages/core/dist/errors.js +1 -40
- package/packages/core/dist/index.js +1 -9
- package/packages/core/dist/logger.js +1 -34
- package/packages/core/dist/types.js +0 -1
- package/packages/embeddings/dist/embedder.interface.js +0 -1
- package/packages/embeddings/dist/index.js +1 -5
- package/packages/embeddings/dist/onnx-embedder.js +1 -82
- package/packages/indexer/dist/file-hasher.js +1 -13
- package/packages/indexer/dist/filesystem-crawler.js +1 -125
- package/packages/indexer/dist/graph-extractor.js +1 -111
- package/packages/indexer/dist/incremental-indexer.js +1 -278
- package/packages/indexer/dist/index.js +1 -14
- package/packages/server/dist/api.js +1 -9
- package/packages/server/dist/config.js +1 -75
- package/packages/server/dist/curated-manager.js +9 -356
- package/packages/server/dist/index.js +1 -134
- package/packages/server/dist/replay-interceptor.js +1 -38
- package/packages/server/dist/resources/resources.js +2 -40
- package/packages/server/dist/server.js +1 -247
- package/packages/server/dist/tools/analyze.tools.js +1 -288
- package/packages/server/dist/tools/forge.tools.js +11 -499
- package/packages/server/dist/tools/forget.tool.js +3 -39
- package/packages/server/dist/tools/graph.tool.js +5 -110
- package/packages/server/dist/tools/list.tool.js +5 -53
- package/packages/server/dist/tools/lookup.tool.js +8 -51
- package/packages/server/dist/tools/onboard.tool.js +2 -112
- package/packages/server/dist/tools/produce.tool.js +4 -74
- package/packages/server/dist/tools/read.tool.js +4 -47
- package/packages/server/dist/tools/reindex.tool.js +2 -70
- package/packages/server/dist/tools/remember.tool.js +3 -42
- package/packages/server/dist/tools/replay.tool.js +6 -88
- package/packages/server/dist/tools/search.tool.js +17 -327
- package/packages/server/dist/tools/status.tool.js +3 -68
- package/packages/server/dist/tools/toolkit.tools.js +20 -1673
- package/packages/server/dist/tools/update.tool.js +3 -39
- package/packages/server/dist/tools/utility.tools.js +19 -456
- package/packages/store/dist/graph-store.interface.js +0 -1
- package/packages/store/dist/index.js +1 -9
- package/packages/store/dist/lance-store.js +1 -258
- package/packages/store/dist/sqlite-graph-store.js +8 -309
- package/packages/store/dist/store-factory.js +1 -14
- package/packages/store/dist/store.interface.js +0 -1
- package/packages/tools/dist/batch.js +1 -45
- package/packages/tools/dist/changelog.js +2 -112
- package/packages/tools/dist/check.js +2 -59
- package/packages/tools/dist/checkpoint.js +2 -43
- package/packages/tools/dist/codemod.js +2 -69
- package/packages/tools/dist/compact.js +3 -60
- package/packages/tools/dist/data-transform.js +1 -124
- package/packages/tools/dist/dead-symbols.js +2 -71
- package/packages/tools/dist/delegate.js +3 -128
- package/packages/tools/dist/diff-parse.js +3 -153
- package/packages/tools/dist/digest.js +7 -242
- package/packages/tools/dist/encode.js +1 -46
- package/packages/tools/dist/env-info.js +1 -58
- package/packages/tools/dist/eval.js +3 -79
- package/packages/tools/dist/evidence-map.js +3 -203
- package/packages/tools/dist/file-summary.js +2 -106
- package/packages/tools/dist/file-walk.js +1 -75
- package/packages/tools/dist/find-examples.js +3 -48
- package/packages/tools/dist/find.js +1 -120
- package/packages/tools/dist/forge-classify.js +2 -319
- package/packages/tools/dist/forge-ground.js +1 -184
- package/packages/tools/dist/git-context.js +3 -46
- package/packages/tools/dist/graph-query.js +1 -194
- package/packages/tools/dist/health.js +1 -118
- package/packages/tools/dist/http-request.js +1 -58
- package/packages/tools/dist/index.js +1 -273
- package/packages/tools/dist/lane.js +7 -227
- package/packages/tools/dist/measure.js +2 -119
- package/packages/tools/dist/onboard.js +42 -1136
- package/packages/tools/dist/parse-output.js +2 -158
- package/packages/tools/dist/process-manager.js +1 -69
- package/packages/tools/dist/queue.js +2 -126
- package/packages/tools/dist/regex-test.js +1 -39
- package/packages/tools/dist/rename.js +2 -70
- package/packages/tools/dist/replay.js +6 -108
- package/packages/tools/dist/schema-validate.js +1 -141
- package/packages/tools/dist/scope-map.js +1 -72
- package/packages/tools/dist/snippet.js +1 -80
- package/packages/tools/dist/stash.js +2 -60
- package/packages/tools/dist/stratum-card.js +5 -238
- package/packages/tools/dist/symbol.js +3 -87
- package/packages/tools/dist/test-run.js +2 -55
- package/packages/tools/dist/text-utils.js +2 -31
- package/packages/tools/dist/time-utils.js +1 -135
- package/packages/tools/dist/trace.js +2 -114
- package/packages/tools/dist/truncation.js +10 -41
- package/packages/tools/dist/watch.js +1 -61
- package/packages/tools/dist/web-fetch.js +9 -244
- package/packages/tools/dist/web-search.js +1 -46
- package/packages/tools/dist/workset.js +2 -77
- package/packages/tui/dist/App.js +260 -52468
- package/packages/tui/dist/index.js +286 -54551
- package/packages/tui/dist/panels/CuratedPanel.js +211 -34291
- package/packages/tui/dist/panels/LogPanel.js +259 -51703
- package/packages/tui/dist/panels/SearchPanel.js +212 -34824
- package/packages/tui/dist/panels/StatusPanel.js +211 -34304
|
@@ -1,319 +1,2 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
import { fileSummary } from "./file-summary.js";
|
|
4
|
-
const MAX_SCAN_BYTES = 1e5;
|
|
5
|
-
const MAX_SCAN_LINES = 200;
|
|
6
|
-
const SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx", ".mts", ".cts", ".mjs", ".cjs"]);
|
|
7
|
-
const EXCLUDED_DIRECTORIES = /* @__PURE__ */ new Set([
|
|
8
|
-
".git",
|
|
9
|
-
".kb-data",
|
|
10
|
-
".kb-state",
|
|
11
|
-
".turbo",
|
|
12
|
-
".yarn",
|
|
13
|
-
"build",
|
|
14
|
-
"coverage",
|
|
15
|
-
"dist",
|
|
16
|
-
"node_modules"
|
|
17
|
-
]);
|
|
18
|
-
const SECURITY_PATH_PATTERN = /auth|token|permission|acl|encrypt|secret|credential|jwt|oauth|password/i;
|
|
19
|
-
const SECURITY_CONTENT_PATTERN = /\b(hash|sign|verify|bcrypt|jwt|decrypt|secret|password)\b/i;
|
|
20
|
-
const SECURITY_TASK_PATTERN = /auth|security|permission|encrypt|secret|credential/i;
|
|
21
|
-
const SCHEMA_PATH_PATTERN = /types\.ts$|schema\.ts$|contract\.ts$|\.proto$|openapi|swagger|\.graphql$/i;
|
|
22
|
-
const SHARED_PATH_PATTERN = /(?:^|\/)(events|contracts|shared)(?:\/|$)/i;
|
|
23
|
-
const SCHEMA_CONTENT_PATTERN = /export\s+interface\b|export\s+type\b|export\s+const\s+\w*Schema\w*\s*=\s*z\./i;
|
|
24
|
-
const SCHEMA_TASK_PATTERN = /schema|contract|migration|breaking.change|api.change/i;
|
|
25
|
-
const TASK_CRITICAL_PATTERN = /migration|data.?model|multi.?service|breaking|backward.?compat/i;
|
|
26
|
-
const CEREMONIES = {
|
|
27
|
-
floor: {
|
|
28
|
-
ground: "Parasitic \u2014 read target file only",
|
|
29
|
-
build: "Implement directly",
|
|
30
|
-
break: "Skip",
|
|
31
|
-
evidenceMap: "Not required",
|
|
32
|
-
gate: "Self-certify"
|
|
33
|
-
},
|
|
34
|
-
standard: {
|
|
35
|
-
ground: "Scope map + blast radius + constraint seed",
|
|
36
|
-
build: "TDD \u2014 test first, then implement",
|
|
37
|
-
break: "Error paths + edge cases",
|
|
38
|
-
evidenceMap: "3-8 critical-path entries",
|
|
39
|
-
gate: "YIELD/HOLD evaluation"
|
|
40
|
-
},
|
|
41
|
-
critical: {
|
|
42
|
-
ground: "Full scope map + blast radius + trace + patterns + constraint pack",
|
|
43
|
-
build: "TDD + contract verification + cross-service validation",
|
|
44
|
-
break: "Error paths + edge cases + security dimensions + data-flow verification",
|
|
45
|
-
evidenceMap: "Comprehensive \u2014 all critical-path claims with receipts",
|
|
46
|
-
gate: "Strict YIELD/HOLD/HARD_BLOCK evaluation"
|
|
47
|
-
}
|
|
48
|
-
};
|
|
49
|
-
async function forgeClassify(options) {
|
|
50
|
-
const rootPath = resolve(options.rootPath);
|
|
51
|
-
const task = options.task.trim();
|
|
52
|
-
const files = options.files.map((filePath) => resolve(rootPath, filePath));
|
|
53
|
-
const triggers = [];
|
|
54
|
-
let hasSecurityPath = false;
|
|
55
|
-
let hasSchemaChange = false;
|
|
56
|
-
for (const filePath of files) {
|
|
57
|
-
const normalizedPath = normalizeForMatch(filePath, rootPath);
|
|
58
|
-
const scanText = readScanText(filePath);
|
|
59
|
-
if (SECURITY_PATH_PATTERN.test(normalizedPath) || SECURITY_CONTENT_PATTERN.test(scanText) || SECURITY_TASK_PATTERN.test(task)) {
|
|
60
|
-
hasSecurityPath = true;
|
|
61
|
-
}
|
|
62
|
-
if (SCHEMA_PATH_PATTERN.test(normalizedPath) || SHARED_PATH_PATTERN.test(normalizedPath) || SCHEMA_TASK_PATTERN.test(task) || SCHEMA_CONTENT_PATTERN.test(scanText) || await hasExportedContractShape(filePath)) {
|
|
63
|
-
hasSchemaChange = true;
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
if (hasSecurityPath) {
|
|
67
|
-
addTrigger(triggers, {
|
|
68
|
-
rule: "security-path",
|
|
69
|
-
detail: "Security/auth path, task, or content matched security heuristics",
|
|
70
|
-
source: "security_auth"
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
if (hasSchemaChange) {
|
|
74
|
-
addTrigger(triggers, {
|
|
75
|
-
rule: "schema-contract",
|
|
76
|
-
detail: "Schema or contract path, task, or export shape matched contract heuristics",
|
|
77
|
-
source: "schema_contract"
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
const blastRadius = countDirectImportBlastRadius(files, rootPath);
|
|
81
|
-
if (blastRadius.affectedFiles > 5) {
|
|
82
|
-
addTrigger(triggers, {
|
|
83
|
-
rule: "blast-radius-importers",
|
|
84
|
-
detail: `${blastRadius.affectedFiles} affected files via direct import scanning`,
|
|
85
|
-
source: "blast_radius"
|
|
86
|
-
});
|
|
87
|
-
}
|
|
88
|
-
const packagesCrossed = [
|
|
89
|
-
...new Set(
|
|
90
|
-
files.map((filePath) => findPackageName(filePath, rootPath)).filter((packageName) => Boolean(packageName))
|
|
91
|
-
)
|
|
92
|
-
].sort();
|
|
93
|
-
if (packagesCrossed.length >= 2) {
|
|
94
|
-
addTrigger(triggers, {
|
|
95
|
-
rule: "cross-package-boundary",
|
|
96
|
-
detail: `Files span ${packagesCrossed.length} packages: ${packagesCrossed.join(", ")}`,
|
|
97
|
-
source: "cross_package"
|
|
98
|
-
});
|
|
99
|
-
}
|
|
100
|
-
if (TASK_CRITICAL_PATTERN.test(task)) {
|
|
101
|
-
addTrigger(triggers, {
|
|
102
|
-
rule: "task-hint-critical",
|
|
103
|
-
detail: "Task description matched migration or compatibility criticality hints",
|
|
104
|
-
source: "task_hint"
|
|
105
|
-
});
|
|
106
|
-
}
|
|
107
|
-
const tier = triggers.length > 0 ? "critical" : files.length === 1 ? "floor" : "standard";
|
|
108
|
-
return {
|
|
109
|
-
tier,
|
|
110
|
-
triggers,
|
|
111
|
-
packagesCrossed,
|
|
112
|
-
hasSchemaChange,
|
|
113
|
-
hasSecurityPath,
|
|
114
|
-
typedUnknownSeeds: buildTypedUnknownSeeds(triggers),
|
|
115
|
-
ceremony: CEREMONIES[tier]
|
|
116
|
-
};
|
|
117
|
-
}
|
|
118
|
-
function addTrigger(triggers, trigger) {
|
|
119
|
-
if (triggers.some(
|
|
120
|
-
(existing) => existing.rule === trigger.rule && existing.source === trigger.source
|
|
121
|
-
)) {
|
|
122
|
-
return;
|
|
123
|
-
}
|
|
124
|
-
triggers.push(trigger);
|
|
125
|
-
}
|
|
126
|
-
function normalizeForMatch(filePath, rootPath) {
|
|
127
|
-
const absolutePath = resolve(rootPath, filePath);
|
|
128
|
-
const displayPath = relative(rootPath, absolutePath);
|
|
129
|
-
const safePath = displayPath && !displayPath.startsWith("..") ? displayPath : absolutePath;
|
|
130
|
-
return safePath.replace(/\\/g, "/");
|
|
131
|
-
}
|
|
132
|
-
function canScanFile(filePath) {
|
|
133
|
-
if (!existsSync(filePath)) {
|
|
134
|
-
return false;
|
|
135
|
-
}
|
|
136
|
-
try {
|
|
137
|
-
return statSync(filePath).size <= MAX_SCAN_BYTES;
|
|
138
|
-
} catch {
|
|
139
|
-
return false;
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
function readScanText(filePath) {
|
|
143
|
-
if (!canScanFile(filePath)) {
|
|
144
|
-
return "";
|
|
145
|
-
}
|
|
146
|
-
try {
|
|
147
|
-
return readFileSync(filePath, "utf-8").split(/\r?\n/).slice(0, MAX_SCAN_LINES).join("\n");
|
|
148
|
-
} catch {
|
|
149
|
-
return "";
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
async function hasExportedContractShape(filePath) {
|
|
153
|
-
if (!canScanFile(filePath)) {
|
|
154
|
-
return false;
|
|
155
|
-
}
|
|
156
|
-
try {
|
|
157
|
-
const summary = await fileSummary({ path: filePath });
|
|
158
|
-
const exportedNames = new Set(summary.exports);
|
|
159
|
-
return summary.interfaces.some((entry) => exportedNames.has(entry.name)) || summary.types.some((entry) => exportedNames.has(entry.name));
|
|
160
|
-
} catch {
|
|
161
|
-
return false;
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
function countDirectImportBlastRadius(targetFiles, rootPath) {
|
|
165
|
-
const existingTargets = new Set(targetFiles.filter((filePath) => existsSync(filePath)));
|
|
166
|
-
if (existingTargets.size === 0) {
|
|
167
|
-
return { affectedFiles: targetFiles.length, importers: [] };
|
|
168
|
-
}
|
|
169
|
-
const importers = /* @__PURE__ */ new Set();
|
|
170
|
-
for (const candidateFile of collectSourceFiles(rootPath)) {
|
|
171
|
-
if (existingTargets.has(candidateFile) || !canScanFile(candidateFile)) {
|
|
172
|
-
continue;
|
|
173
|
-
}
|
|
174
|
-
const scanText = readScanText(candidateFile);
|
|
175
|
-
if (!scanText) {
|
|
176
|
-
continue;
|
|
177
|
-
}
|
|
178
|
-
const specifiers = extractImportSpecifiers(scanText);
|
|
179
|
-
if (specifiers.some((specifier) => resolvesToTarget(specifier, candidateFile, existingTargets))) {
|
|
180
|
-
importers.add(candidateFile);
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
return {
|
|
184
|
-
affectedFiles: targetFiles.length + importers.size,
|
|
185
|
-
importers: [...importers].map((filePath) => relative(rootPath, filePath).replace(/\\/g, "/"))
|
|
186
|
-
};
|
|
187
|
-
}
|
|
188
|
-
function collectSourceFiles(rootPath) {
|
|
189
|
-
const files = [];
|
|
190
|
-
function walk(currentPath) {
|
|
191
|
-
let entries = [];
|
|
192
|
-
try {
|
|
193
|
-
entries = readdirSync(currentPath);
|
|
194
|
-
} catch {
|
|
195
|
-
return;
|
|
196
|
-
}
|
|
197
|
-
for (const entry of entries) {
|
|
198
|
-
if (EXCLUDED_DIRECTORIES.has(entry)) {
|
|
199
|
-
continue;
|
|
200
|
-
}
|
|
201
|
-
const fullPath = resolve(currentPath, entry);
|
|
202
|
-
let entryStat;
|
|
203
|
-
try {
|
|
204
|
-
entryStat = statSync(fullPath);
|
|
205
|
-
} catch {
|
|
206
|
-
continue;
|
|
207
|
-
}
|
|
208
|
-
if (entryStat.isDirectory()) {
|
|
209
|
-
walk(fullPath);
|
|
210
|
-
continue;
|
|
211
|
-
}
|
|
212
|
-
if (SOURCE_EXTENSIONS.has(extname(entry).toLowerCase())) {
|
|
213
|
-
files.push(fullPath);
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
walk(rootPath);
|
|
218
|
-
return files;
|
|
219
|
-
}
|
|
220
|
-
function extractImportSpecifiers(content) {
|
|
221
|
-
const specifiers = /* @__PURE__ */ new Set();
|
|
222
|
-
const pattern = /(?:from\s+['"]([^'"]+)['"]|import\s+['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g;
|
|
223
|
-
for (const match of content.matchAll(pattern)) {
|
|
224
|
-
const specifier = match[1] ?? match[2] ?? match[3];
|
|
225
|
-
if (specifier) {
|
|
226
|
-
specifiers.add(specifier);
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
return [...specifiers];
|
|
230
|
-
}
|
|
231
|
-
function resolvesToTarget(specifier, importerFile, targets) {
|
|
232
|
-
if (!specifier.startsWith(".")) {
|
|
233
|
-
return false;
|
|
234
|
-
}
|
|
235
|
-
const basePath = resolve(dirname(importerFile), specifier);
|
|
236
|
-
const candidates = [
|
|
237
|
-
basePath,
|
|
238
|
-
`${basePath}.ts`,
|
|
239
|
-
`${basePath}.tsx`,
|
|
240
|
-
`${basePath}.js`,
|
|
241
|
-
`${basePath}.jsx`,
|
|
242
|
-
`${basePath}.mts`,
|
|
243
|
-
`${basePath}.cts`,
|
|
244
|
-
`${basePath}.mjs`,
|
|
245
|
-
`${basePath}.cjs`,
|
|
246
|
-
resolve(basePath, "index.ts"),
|
|
247
|
-
resolve(basePath, "index.tsx"),
|
|
248
|
-
resolve(basePath, "index.js"),
|
|
249
|
-
resolve(basePath, "index.jsx")
|
|
250
|
-
];
|
|
251
|
-
return candidates.some((candidate) => targets.has(candidate));
|
|
252
|
-
}
|
|
253
|
-
function buildTypedUnknownSeeds(triggers) {
|
|
254
|
-
return triggers.map((trigger) => {
|
|
255
|
-
switch (trigger.source) {
|
|
256
|
-
case "security_auth":
|
|
257
|
-
return {
|
|
258
|
-
description: "Verify auth and security assumptions before yielding",
|
|
259
|
-
type: "contract",
|
|
260
|
-
suggestedTool: "kb_search"
|
|
261
|
-
};
|
|
262
|
-
case "schema_contract":
|
|
263
|
-
return {
|
|
264
|
-
description: "Confirm schema and contract compatibility",
|
|
265
|
-
type: "contract",
|
|
266
|
-
suggestedTool: "kb_schema_validate"
|
|
267
|
-
};
|
|
268
|
-
case "blast_radius":
|
|
269
|
-
return {
|
|
270
|
-
description: "Inspect affected importers before delivery",
|
|
271
|
-
type: "impact",
|
|
272
|
-
suggestedTool: "kb_blast_radius"
|
|
273
|
-
};
|
|
274
|
-
case "cross_package":
|
|
275
|
-
return {
|
|
276
|
-
description: "Assess downstream package impact across boundaries",
|
|
277
|
-
type: "impact",
|
|
278
|
-
suggestedTool: "kb_blast_radius"
|
|
279
|
-
};
|
|
280
|
-
case "task_hint":
|
|
281
|
-
return {
|
|
282
|
-
description: "Check established conventions for migrations or compatibility work",
|
|
283
|
-
type: "convention",
|
|
284
|
-
suggestedTool: "kb_find_examples"
|
|
285
|
-
};
|
|
286
|
-
default:
|
|
287
|
-
return {
|
|
288
|
-
description: "No explicit unknown routing required",
|
|
289
|
-
type: "freshness",
|
|
290
|
-
suggestedTool: "kb_lookup"
|
|
291
|
-
};
|
|
292
|
-
}
|
|
293
|
-
});
|
|
294
|
-
}
|
|
295
|
-
function findPackageName(filePath, rootPath) {
|
|
296
|
-
let dir = dirname(resolve(rootPath, filePath));
|
|
297
|
-
const root = resolve(rootPath);
|
|
298
|
-
while (dir.length >= root.length) {
|
|
299
|
-
const packageJsonPath = resolve(dir, "package.json");
|
|
300
|
-
if (existsSync(packageJsonPath)) {
|
|
301
|
-
try {
|
|
302
|
-
const pkg = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
|
303
|
-
return pkg.name ?? relative(root, dir).replace(/\\/g, "/");
|
|
304
|
-
} catch {
|
|
305
|
-
return relative(root, dir).replace(/\\/g, "/");
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
const parent = dirname(dir);
|
|
309
|
-
if (parent === dir) {
|
|
310
|
-
break;
|
|
311
|
-
}
|
|
312
|
-
dir = parent;
|
|
313
|
-
}
|
|
314
|
-
return void 0;
|
|
315
|
-
}
|
|
316
|
-
export {
|
|
317
|
-
forgeClassify
|
|
318
|
-
};
|
|
319
|
-
//# sourceMappingURL=forge-classify.js.map
|
|
1
|
+
import{existsSync as g,readdirSync as _,readFileSync as S,statSync as k}from"node:fs";import{dirname as m,extname as C,relative as p,resolve as a}from"node:path";import{fileSummary as w}from"./file-summary.js";const E=1e5,x=200,v=new Set([".ts",".tsx",".js",".jsx",".mts",".cts",".mjs",".cjs"]),A=new Set([".git",".kb-data",".kb-state",".turbo",".yarn","build","coverage","dist","node_modules"]),N=/auth|token|permission|acl|encrypt|secret|credential|jwt|oauth|password/i,R=/\b(hash|sign|verify|bcrypt|jwt|decrypt|secret|password)\b/i,P=/auth|security|permission|encrypt|secret|credential/i,F=/types\.ts$|schema\.ts$|contract\.ts$|\.proto$|openapi|swagger|\.graphql$/i,U=/(?:^|\/)(events|contracts|shared)(?:\/|$)/i,$=/export\s+interface\b|export\s+type\b|export\s+const\s+\w*Schema\w*\s*=\s*z\./i,j=/schema|contract|migration|breaking.change|api.change/i,I=/migration|data.?model|multi.?service|breaking|backward.?compat/i,D={floor:{ground:"Parasitic \u2014 read target file only",build:"Implement directly",break:"Skip",evidenceMap:"Not required",gate:"Self-certify"},standard:{ground:"Scope map + blast radius + constraint seed",build:"TDD \u2014 test first, then implement",break:"Error paths + edge cases",evidenceMap:"3-8 critical-path entries",gate:"YIELD/HOLD evaluation"},critical:{ground:"Full scope map + blast radius + trace + patterns + constraint pack",build:"TDD + contract verification + cross-service validation",break:"Error paths + edge cases + security dimensions + data-flow verification",evidenceMap:"Comprehensive \u2014 all critical-path claims with receipts",gate:"Strict YIELD/HOLD/HARD_BLOCK evaluation"}};async function W(t){const s=a(t.rootPath),r=t.task.trim(),e=t.files.map(c=>a(s,c)),n=[];let i=!1,o=!1;for(const c of e){const f=O(c,s),T=b(c);(N.test(f)||R.test(T)||P.test(r))&&(i=!0),(F.test(f)||U.test(f)||j.test(r)||$.test(T)||await M(c))&&(o=!0)}i&&l(n,{rule:"security-path",detail:"Security/auth path, task, or content matched security heuristics",source:"security_auth"}),o&&l(n,{rule:"schema-contract",detail:"Schema or contract path, task, or export shape matched contract heuristics",source:"schema_contract"});const d=H(e,s);d.affectedFiles>5&&l(n,{rule:"blast-radius-importers",detail:`${d.affectedFiles} affected files via direct import scanning`,source:"blast_radius"});const u=[...new Set(e.map(c=>B(c,s)).filter(c=>!!c))].sort();u.length>=2&&l(n,{rule:"cross-package-boundary",detail:`Files span ${u.length} packages: ${u.join(", ")}`,source:"cross_package"}),I.test(r)&&l(n,{rule:"task-hint-critical",detail:"Task description matched migration or compatibility criticality hints",source:"task_hint"});const y=n.length>0?"critical":e.length===1?"floor":"standard";return{tier:y,triggers:n,packagesCrossed:u,hasSchemaChange:o,hasSecurityPath:i,typedUnknownSeeds:q(n),ceremony:D[y]}}function l(t,s){t.some(r=>r.rule===s.rule&&r.source===s.source)||t.push(s)}function O(t,s){const r=a(s,t),e=p(s,r);return(e&&!e.startsWith("..")?e:r).replace(/\\/g,"/")}function h(t){if(!g(t))return!1;try{return k(t).size<=E}catch{return!1}}function b(t){if(!h(t))return"";try{return S(t,"utf-8").split(/\r?\n/).slice(0,x).join(`
|
|
2
|
+
`)}catch{return""}}async function M(t){if(!h(t))return!1;try{const s=await w({path:t}),r=new Set(s.exports);return s.interfaces.some(e=>r.has(e.name))||s.types.some(e=>r.has(e.name))}catch{return!1}}function H(t,s){const r=new Set(t.filter(n=>g(n)));if(r.size===0)return{affectedFiles:t.length,importers:[]};const e=new Set;for(const n of L(s)){if(r.has(n)||!h(n))continue;const i=b(n);if(!i)continue;z(i).some(d=>Y(d,n,r))&&e.add(n)}return{affectedFiles:t.length+e.size,importers:[...e].map(n=>p(s,n).replace(/\\/g,"/"))}}function L(t){const s=[];function r(e){let n=[];try{n=_(e)}catch{return}for(const i of n){if(A.has(i))continue;const o=a(e,i);let d;try{d=k(o)}catch{continue}if(d.isDirectory()){r(o);continue}v.has(C(i).toLowerCase())&&s.push(o)}}return r(t),s}function z(t){const s=new Set,r=/(?:from\s+['"]([^'"]+)['"]|import\s+['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g;for(const e of t.matchAll(r)){const n=e[1]??e[2]??e[3];n&&s.add(n)}return[...s]}function Y(t,s,r){if(!t.startsWith("."))return!1;const e=a(m(s),t);return[e,`${e}.ts`,`${e}.tsx`,`${e}.js`,`${e}.jsx`,`${e}.mts`,`${e}.cts`,`${e}.mjs`,`${e}.cjs`,a(e,"index.ts"),a(e,"index.tsx"),a(e,"index.js"),a(e,"index.jsx")].some(i=>r.has(i))}function q(t){return t.map(s=>{switch(s.source){case"security_auth":return{description:"Verify auth and security assumptions before yielding",type:"contract",suggestedTool:"kb_search"};case"schema_contract":return{description:"Confirm schema and contract compatibility",type:"contract",suggestedTool:"kb_schema_validate"};case"blast_radius":return{description:"Inspect affected importers before delivery",type:"impact",suggestedTool:"kb_blast_radius"};case"cross_package":return{description:"Assess downstream package impact across boundaries",type:"impact",suggestedTool:"kb_blast_radius"};case"task_hint":return{description:"Check established conventions for migrations or compatibility work",type:"convention",suggestedTool:"kb_find_examples"};default:return{description:"No explicit unknown routing required",type:"freshness",suggestedTool:"kb_lookup"}}})}function B(t,s){let r=m(a(s,t));const e=a(s);for(;r.length>=e.length;){const n=a(r,"package.json");if(g(n))try{return JSON.parse(S(n,"utf-8")).name??p(e,r).replace(/\\/g,"/")}catch{return p(e,r).replace(/\\/g,"/")}const i=m(r);if(i===r)break;r=i}}export{W as forgeClassify};
|
|
@@ -1,184 +1 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { fileSummary } from "./file-summary.js";
|
|
3
|
-
import { forgeClassify } from "./forge-classify.js";
|
|
4
|
-
import { scopeMap } from "./scope-map.js";
|
|
5
|
-
import { estimateTokens } from "./text-utils.js";
|
|
6
|
-
const DEFAULT_MAX_CONSTRAINTS = 3;
|
|
7
|
-
const CONSTRAINT_SNIPPET_CHARS = 200;
|
|
8
|
-
const CEREMONY_BY_TIER = {
|
|
9
|
-
floor: {
|
|
10
|
-
ground: "Parasitic \u2014 read target file only",
|
|
11
|
-
build: "Implement directly",
|
|
12
|
-
break: "Skip",
|
|
13
|
-
evidenceMap: "Not required",
|
|
14
|
-
gate: "Self-certify"
|
|
15
|
-
},
|
|
16
|
-
standard: {
|
|
17
|
-
ground: "Scope map + blast radius + constraint seed",
|
|
18
|
-
build: "TDD \u2014 test first, then implement",
|
|
19
|
-
break: "Error paths + edge cases",
|
|
20
|
-
evidenceMap: "3-8 critical-path entries",
|
|
21
|
-
gate: "YIELD/HOLD evaluation"
|
|
22
|
-
},
|
|
23
|
-
critical: {
|
|
24
|
-
ground: "Full scope map + blast radius + trace + patterns + constraint pack",
|
|
25
|
-
build: "TDD + contract verification + cross-service validation",
|
|
26
|
-
break: "Error paths + edge cases + security dimensions + data-flow verification",
|
|
27
|
-
evidenceMap: "Comprehensive \u2014 all critical-path claims with receipts",
|
|
28
|
-
gate: "Strict YIELD/HOLD/HARD_BLOCK evaluation"
|
|
29
|
-
}
|
|
30
|
-
};
|
|
31
|
-
async function forgeGround(embedder, store, options) {
|
|
32
|
-
const maxConstraints = options.maxConstraints ?? DEFAULT_MAX_CONSTRAINTS;
|
|
33
|
-
const classifyResult = await classifyTier(options);
|
|
34
|
-
const fileSummaries = await summarizeFiles(options.files);
|
|
35
|
-
if (classifyResult.tier === "floor") {
|
|
36
|
-
return finalizeResult({
|
|
37
|
-
tier: classifyResult.tier,
|
|
38
|
-
classifyTriggers: classifyResult.classifyTriggers,
|
|
39
|
-
scopeMap: null,
|
|
40
|
-
typedUnknownSeeds: classifyResult.typedUnknownSeeds,
|
|
41
|
-
constraints: [],
|
|
42
|
-
fileSummaries,
|
|
43
|
-
evidenceMapTaskId: null,
|
|
44
|
-
ceremony: classifyResult.ceremony
|
|
45
|
-
});
|
|
46
|
-
}
|
|
47
|
-
const [scopeMapResult, constraints, evidenceMapTaskId] = await Promise.all([
|
|
48
|
-
loadScopeMap(embedder, store, options.task, classifyResult.tier),
|
|
49
|
-
loadConstraints(embedder, store, options.task, maxConstraints),
|
|
50
|
-
createEvidenceMap(
|
|
51
|
-
options.rootPath,
|
|
52
|
-
options.taskId ?? generateTaskId(options.task),
|
|
53
|
-
classifyResult.tier
|
|
54
|
-
)
|
|
55
|
-
]);
|
|
56
|
-
return finalizeResult({
|
|
57
|
-
tier: classifyResult.tier,
|
|
58
|
-
classifyTriggers: classifyResult.classifyTriggers,
|
|
59
|
-
scopeMap: scopeMapResult,
|
|
60
|
-
typedUnknownSeeds: classifyResult.typedUnknownSeeds,
|
|
61
|
-
constraints,
|
|
62
|
-
fileSummaries,
|
|
63
|
-
evidenceMapTaskId,
|
|
64
|
-
ceremony: classifyResult.ceremony
|
|
65
|
-
});
|
|
66
|
-
}
|
|
67
|
-
async function classifyTier(options) {
|
|
68
|
-
if (options.forceTier) {
|
|
69
|
-
return {
|
|
70
|
-
tier: options.forceTier,
|
|
71
|
-
classifyTriggers: [],
|
|
72
|
-
typedUnknownSeeds: [],
|
|
73
|
-
ceremony: getCeremony(options.forceTier)
|
|
74
|
-
};
|
|
75
|
-
}
|
|
76
|
-
try {
|
|
77
|
-
const result = await forgeClassify({
|
|
78
|
-
files: options.files,
|
|
79
|
-
task: options.task,
|
|
80
|
-
rootPath: options.rootPath
|
|
81
|
-
});
|
|
82
|
-
return {
|
|
83
|
-
tier: result.tier,
|
|
84
|
-
classifyTriggers: result.triggers,
|
|
85
|
-
typedUnknownSeeds: result.typedUnknownSeeds,
|
|
86
|
-
ceremony: result.ceremony
|
|
87
|
-
};
|
|
88
|
-
} catch {
|
|
89
|
-
return {
|
|
90
|
-
tier: "standard",
|
|
91
|
-
classifyTriggers: [],
|
|
92
|
-
typedUnknownSeeds: [],
|
|
93
|
-
ceremony: getCeremony("standard")
|
|
94
|
-
};
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
async function loadScopeMap(embedder, store, task, tier) {
|
|
98
|
-
try {
|
|
99
|
-
return await scopeMap(embedder, store, {
|
|
100
|
-
task,
|
|
101
|
-
maxFiles: tier === "critical" ? 20 : 10
|
|
102
|
-
});
|
|
103
|
-
} catch {
|
|
104
|
-
return null;
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
async function loadConstraints(embedder, store, task, maxConstraints) {
|
|
108
|
-
try {
|
|
109
|
-
const query = `decision pattern convention ${task}`;
|
|
110
|
-
const vector = typeof embedder.embedQuery === "function" ? await embedder.embedQuery(query) : await embedder.embed(query);
|
|
111
|
-
const results = await store.search(vector, {
|
|
112
|
-
limit: maxConstraints,
|
|
113
|
-
origin: "curated"
|
|
114
|
-
});
|
|
115
|
-
return results.slice(0, maxConstraints).map((result) => toConstraintRef(result));
|
|
116
|
-
} catch {
|
|
117
|
-
return [];
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
async function summarizeFiles(files) {
|
|
121
|
-
return Promise.all(files.map(async (path) => summarizeFile(path)));
|
|
122
|
-
}
|
|
123
|
-
async function summarizeFile(path) {
|
|
124
|
-
try {
|
|
125
|
-
const summary = await fileSummary({ path });
|
|
126
|
-
return toGroundFileSummary(summary);
|
|
127
|
-
} catch (error) {
|
|
128
|
-
return {
|
|
129
|
-
path,
|
|
130
|
-
exports: [],
|
|
131
|
-
functions: [],
|
|
132
|
-
lines: 0,
|
|
133
|
-
error: error instanceof Error ? error.message : "Unable to summarize file"
|
|
134
|
-
};
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
async function createEvidenceMap(rootPath, taskId, tier) {
|
|
138
|
-
try {
|
|
139
|
-
evidenceMap({ action: "create", taskId, tier }, rootPath);
|
|
140
|
-
return taskId;
|
|
141
|
-
} catch {
|
|
142
|
-
return null;
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
function toConstraintRef(result) {
|
|
146
|
-
return {
|
|
147
|
-
source: result.record.sourcePath,
|
|
148
|
-
snippet: compactSnippet(result.record.content),
|
|
149
|
-
relevance: result.score
|
|
150
|
-
};
|
|
151
|
-
}
|
|
152
|
-
function compactSnippet(content) {
|
|
153
|
-
const singleLine = content.replace(/\s+/g, " ").trim();
|
|
154
|
-
if (singleLine.length <= CONSTRAINT_SNIPPET_CHARS) {
|
|
155
|
-
return singleLine;
|
|
156
|
-
}
|
|
157
|
-
return `${singleLine.slice(0, CONSTRAINT_SNIPPET_CHARS - 3).trimEnd()}...`;
|
|
158
|
-
}
|
|
159
|
-
function toGroundFileSummary(summary) {
|
|
160
|
-
return {
|
|
161
|
-
path: summary.path,
|
|
162
|
-
exports: summary.exports,
|
|
163
|
-
functions: summary.functions.map((fn) => fn.name),
|
|
164
|
-
lines: summary.lines
|
|
165
|
-
};
|
|
166
|
-
}
|
|
167
|
-
function getCeremony(tier) {
|
|
168
|
-
return { ...CEREMONY_BY_TIER[tier] };
|
|
169
|
-
}
|
|
170
|
-
function finalizeResult(result) {
|
|
171
|
-
return {
|
|
172
|
-
...result,
|
|
173
|
-
estimatedTokens: estimateTokens(JSON.stringify(result))
|
|
174
|
-
};
|
|
175
|
-
}
|
|
176
|
-
function generateTaskId(task) {
|
|
177
|
-
const slug = task.toLowerCase().replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean).slice(0, 5).join("-");
|
|
178
|
-
const ts = Date.now().toString(36);
|
|
179
|
-
return `${slug || "task"}-${ts}`;
|
|
180
|
-
}
|
|
181
|
-
export {
|
|
182
|
-
forgeGround
|
|
183
|
-
};
|
|
184
|
-
//# sourceMappingURL=forge-ground.js.map
|
|
1
|
+
import{evidenceMap as g}from"./evidence-map.js";import{fileSummary as m}from"./file-summary.js";import{forgeClassify as p}from"./forge-classify.js";import{scopeMap as f}from"./scope-map.js";import{estimateTokens as y}from"./text-utils.js";const S=3,c=200,T={floor:{ground:"Parasitic \u2014 read target file only",build:"Implement directly",break:"Skip",evidenceMap:"Not required",gate:"Self-certify"},standard:{ground:"Scope map + blast radius + constraint seed",build:"TDD \u2014 test first, then implement",break:"Error paths + edge cases",evidenceMap:"3-8 critical-path entries",gate:"YIELD/HOLD evaluation"},critical:{ground:"Full scope map + blast radius + trace + patterns + constraint pack",build:"TDD + contract verification + cross-service validation",break:"Error paths + edge cases + security dimensions + data-flow verification",evidenceMap:"Comprehensive \u2014 all critical-path claims with receipts",gate:"Strict YIELD/HOLD/HARD_BLOCK evaluation"}};async function D(e,r,t){const i=t.maxConstraints??S,n=await k(t),s=await F(t.files);if(n.tier==="floor")return u({tier:n.tier,classifyTriggers:n.classifyTriggers,scopeMap:null,typedUnknownSeeds:n.typedUnknownSeeds,constraints:[],fileSummaries:s,evidenceMapTaskId:null,ceremony:n.ceremony});const[a,o,d]=await Promise.all([R(e,r,t.task,n.tier),w(e,r,t.task,i),h(t.rootPath,t.taskId??C(t.task),n.tier)]);return u({tier:n.tier,classifyTriggers:n.classifyTriggers,scopeMap:a,typedUnknownSeeds:n.typedUnknownSeeds,constraints:o,fileSummaries:s,evidenceMapTaskId:d,ceremony:n.ceremony})}async function k(e){if(e.forceTier)return{tier:e.forceTier,classifyTriggers:[],typedUnknownSeeds:[],ceremony:l(e.forceTier)};try{const r=await p({files:e.files,task:e.task,rootPath:e.rootPath});return{tier:r.tier,classifyTriggers:r.triggers,typedUnknownSeeds:r.typedUnknownSeeds,ceremony:r.ceremony}}catch{return{tier:"standard",classifyTriggers:[],typedUnknownSeeds:[],ceremony:l("standard")}}}async function R(e,r,t,i){try{return await f(e,r,{task:t,maxFiles:i==="critical"?20:10})}catch{return null}}async function w(e,r,t,i){try{const n=`decision pattern convention ${t}`,s=typeof e.embedQuery=="function"?await e.embedQuery(n):await e.embed(n);return(await r.search(s,{limit:i,origin:"curated"})).slice(0,i).map(o=>v(o))}catch{return[]}}async function F(e){return Promise.all(e.map(async r=>b(r)))}async function b(e){try{const r=await m({path:e});return M(r)}catch(r){return{path:e,exports:[],functions:[],lines:0,error:r instanceof Error?r.message:"Unable to summarize file"}}}async function h(e,r,t){try{return g({action:"create",taskId:r,tier:t},e),r}catch{return null}}function v(e){return{source:e.record.sourcePath,snippet:I(e.record.content),relevance:e.score}}function I(e){const r=e.replace(/\s+/g," ").trim();return r.length<=c?r:`${r.slice(0,c-3).trimEnd()}...`}function M(e){return{path:e.path,exports:e.exports,functions:e.functions.map(r=>r.name),lines:e.lines}}function l(e){return{...T[e]}}function u(e){return{...e,estimatedTokens:y(JSON.stringify(e))}}function C(e){const r=e.toLowerCase().replace(/[^a-z0-9\s]/g," ").split(/\s+/).filter(Boolean).slice(0,5).join("-"),t=Date.now().toString(36);return`${r||"task"}-${t}`}export{D as forgeGround};
|
|
@@ -1,46 +1,3 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
const
|
|
4
|
-
async function git(args, cwd) {
|
|
5
|
-
try {
|
|
6
|
-
const { stdout } = await execFileAsync("git", args, { cwd });
|
|
7
|
-
return stdout.toString().trim();
|
|
8
|
-
} catch {
|
|
9
|
-
return "";
|
|
10
|
-
}
|
|
11
|
-
}
|
|
12
|
-
async function gitContext(options = {}) {
|
|
13
|
-
const cwd = options.cwd ?? process.cwd();
|
|
14
|
-
const commitCount = options.commitCount ?? 5;
|
|
15
|
-
const [branchRaw, statusRaw, logRaw, diffRaw] = await Promise.all([
|
|
16
|
-
git(["rev-parse", "--abbrev-ref", "HEAD"], cwd),
|
|
17
|
-
git(["status", "--porcelain"], cwd),
|
|
18
|
-
git(["log", `--max-count=${commitCount}`, "--format=%h|%s|%an|%ai"], cwd),
|
|
19
|
-
options.includeDiff ? git(["diff", "--stat", "--no-color"], cwd) : Promise.resolve("")
|
|
20
|
-
]);
|
|
21
|
-
const staged = [];
|
|
22
|
-
const modified = [];
|
|
23
|
-
const untracked = [];
|
|
24
|
-
for (const line of statusRaw.split("\n").filter(Boolean)) {
|
|
25
|
-
const x = line[0];
|
|
26
|
-
const y = line[1];
|
|
27
|
-
const file = line.slice(3).trim();
|
|
28
|
-
if (x !== " " && x !== "?") staged.push(file);
|
|
29
|
-
if (y === "M" || y === "D") modified.push(file);
|
|
30
|
-
if (x === "?") untracked.push(file);
|
|
31
|
-
}
|
|
32
|
-
const recentCommits = logRaw.split("\n").filter(Boolean).map((line) => {
|
|
33
|
-
const [hash, message, author, date] = line.split("|");
|
|
34
|
-
return { hash, message, author, date };
|
|
35
|
-
});
|
|
36
|
-
return {
|
|
37
|
-
branch: branchRaw || "unknown",
|
|
38
|
-
status: { staged, modified, untracked },
|
|
39
|
-
recentCommits,
|
|
40
|
-
diff: diffRaw || void 0
|
|
41
|
-
};
|
|
42
|
-
}
|
|
43
|
-
export {
|
|
44
|
-
gitContext
|
|
45
|
-
};
|
|
46
|
-
//# sourceMappingURL=git-context.js.map
|
|
1
|
+
import{execFile as x}from"node:child_process";import{promisify as w}from"node:util";const C=w(x);async function o(n,t){try{const{stdout:a}=await C("git",n,{cwd:t});return a.toString().trim()}catch{return""}}async function R(n={}){const t=n.cwd??process.cwd(),a=n.commitCount??5,[u,g,l,d]=await Promise.all([o(["rev-parse","--abbrev-ref","HEAD"],t),o(["status","--porcelain"],t),o(["log",`--max-count=${a}`,"--format=%h|%s|%an|%ai"],t),n.includeDiff?o(["diff","--stat","--no-color"],t):Promise.resolve("")]),c=[],f=[],m=[];for(const i of g.split(`
|
|
2
|
+
`).filter(Boolean)){const s=i[0],e=i[1],r=i.slice(3).trim();s!==" "&&s!=="?"&&c.push(r),(e==="M"||e==="D")&&f.push(r),s==="?"&&m.push(r)}const p=l.split(`
|
|
3
|
+
`).filter(Boolean).map(i=>{const[s,e,r,h]=i.split("|");return{hash:s,message:e,author:r,date:h}});return{branch:u||"unknown",status:{staged:c,modified:f,untracked:m},recentCommits:p,diff:d||void 0}}export{R as gitContext};
|