kibi-cli 0.1.5 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +5 -0
- package/dist/commands/aggregated-checks.d.ts +9 -0
- package/dist/commands/aggregated-checks.d.ts.map +1 -0
- package/dist/commands/aggregated-checks.js +80 -0
- package/dist/commands/check.d.ts +5 -0
- package/dist/commands/check.d.ts.map +1 -1
- package/dist/commands/check.js +147 -10
- package/dist/commands/doctor.js +23 -2
- package/dist/commands/init-helpers.d.ts.map +1 -1
- package/dist/commands/init-helpers.js +4 -2
- package/dist/commands/query.d.ts.map +1 -1
- package/dist/commands/query.js +20 -7
- package/dist/commands/sync.js +1 -1
- package/dist/prolog.d.ts +1 -0
- package/dist/prolog.d.ts.map +1 -1
- package/dist/prolog.js +55 -6
- package/dist/traceability/git-staged.d.ts +29 -0
- package/dist/traceability/git-staged.d.ts.map +1 -0
- package/dist/traceability/git-staged.js +141 -0
- package/dist/traceability/symbol-extract.d.ts +15 -0
- package/dist/traceability/symbol-extract.d.ts.map +1 -0
- package/dist/traceability/symbol-extract.js +228 -0
- package/dist/traceability/temp-kb.d.ts +14 -0
- package/dist/traceability/temp-kb.d.ts.map +1 -0
- package/dist/traceability/temp-kb.js +121 -0
- package/dist/traceability/validate.d.ts +17 -0
- package/dist/traceability/validate.d.ts.map +1 -0
- package/dist/traceability/validate.js +150 -0
- package/package.json +3 -4
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
function runGit(cmd) {
|
|
3
|
+
try {
|
|
4
|
+
return execSync(cmd, { encoding: "utf8" });
|
|
5
|
+
}
|
|
6
|
+
catch (err) {
|
|
7
|
+
// wrap common errors
|
|
8
|
+
const e = err;
|
|
9
|
+
const message = e && e.message ? String(e.message) : String(err);
|
|
10
|
+
throw new Error(`git command failed: ${cmd} -> ${message}`);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Parse null-separated name-status output from git
|
|
15
|
+
*/
|
|
16
|
+
export function parseNameStatusNull(input) {
|
|
17
|
+
if (!input)
|
|
18
|
+
return [];
|
|
19
|
+
const entries = input.split("\0").filter(Boolean);
|
|
20
|
+
return entries.map((entry) => {
|
|
21
|
+
const cols = entry.split("\t");
|
|
22
|
+
const status = cols[0];
|
|
23
|
+
const parts = cols.slice(1);
|
|
24
|
+
return { status, parts };
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
const SUPPORTED_EXT = new Set([
|
|
28
|
+
".ts",
|
|
29
|
+
".tsx",
|
|
30
|
+
".js",
|
|
31
|
+
".jsx",
|
|
32
|
+
".mts",
|
|
33
|
+
".cts",
|
|
34
|
+
".mjs",
|
|
35
|
+
".cjs",
|
|
36
|
+
]);
|
|
37
|
+
function hasSupportedExt(p) {
|
|
38
|
+
for (const ext of SUPPORTED_EXT) {
|
|
39
|
+
if (p.endsWith(ext))
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Parse unified diff hunks (new-file coordinates) from git diff output
|
|
46
|
+
*/
|
|
47
|
+
export function parseHunksFromDiff(diffText, isNewFile = false) {
|
|
48
|
+
const ranges = [];
|
|
49
|
+
if (!diffText)
|
|
50
|
+
return ranges;
|
|
51
|
+
const regex = /^@@\s+-\d+(?:,\d+)?\s+\+(\d+)(?:,(\d+))?\s+@@/gm;
|
|
52
|
+
let m = null;
|
|
53
|
+
while (true) {
|
|
54
|
+
m = regex.exec(diffText);
|
|
55
|
+
if (!m)
|
|
56
|
+
break;
|
|
57
|
+
const c = Number.parseInt(m[1], 10);
|
|
58
|
+
const d = m[2] ? Number.parseInt(m[2], 10) : 1;
|
|
59
|
+
if (d > 0) {
|
|
60
|
+
ranges.push({ start: c, end: c + d - 1 });
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
// If no hunks found and isNewFile, treat entire file as changed (we'll use a sentinel later)
|
|
64
|
+
if (ranges.length === 0 && isNewFile) {
|
|
65
|
+
// Represent as a single wide range starting at 1, using Number.MAX_SAFE_INTEGER as a sentinel end to indicate unknown (we'll set later)
|
|
66
|
+
ranges.push({ start: 1, end: Number.MAX_SAFE_INTEGER });
|
|
67
|
+
}
|
|
68
|
+
return ranges;
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Get staged files with statuses, hunks and content.
|
|
72
|
+
*/
|
|
73
|
+
export function getStagedFiles() {
|
|
74
|
+
// 1. get staged name-status -z
|
|
75
|
+
let nameStatus;
|
|
76
|
+
try {
|
|
77
|
+
nameStatus = runGit("git diff --cached --name-status -z --diff-filter=ACMRD");
|
|
78
|
+
}
|
|
79
|
+
catch (err) {
|
|
80
|
+
throw new Error(`failed to list staged files: ${String(err)}`);
|
|
81
|
+
}
|
|
82
|
+
const parsed = parseNameStatusNull(nameStatus);
|
|
83
|
+
const results = [];
|
|
84
|
+
for (const entry of parsed) {
|
|
85
|
+
const statusRaw = entry.status;
|
|
86
|
+
const status = statusRaw[0] || "M";
|
|
87
|
+
if (status === "D") {
|
|
88
|
+
// deleted files: skip but log via console.debug
|
|
89
|
+
console.debug(`Skipping deleted file (staged): ${entry.parts.join(" -> ")}`);
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
// handle renames: parts = [old, new]
|
|
93
|
+
let path = entry.parts[0] ?? "";
|
|
94
|
+
let oldPath;
|
|
95
|
+
if (status === "R") {
|
|
96
|
+
if (entry.parts.length >= 2) {
|
|
97
|
+
oldPath = entry.parts[0];
|
|
98
|
+
path = entry.parts[1];
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
if (!hasSupportedExt(path)) {
|
|
102
|
+
console.debug(`Skipping unsupported extension: ${path}`);
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
// 4. compute hunks using git diff --cached -U0 -- <path>
|
|
106
|
+
let diffText = "";
|
|
107
|
+
try {
|
|
108
|
+
// use new path for diff; quote the path to handle spaces
|
|
109
|
+
diffText = runGit(`git diff --cached -U0 -- "${path.replace(/"/g, '\\"')}"`);
|
|
110
|
+
}
|
|
111
|
+
catch (err) {
|
|
112
|
+
console.debug(`Failed to get diff for ${path}: ${String(err)}`);
|
|
113
|
+
diffText = "";
|
|
114
|
+
}
|
|
115
|
+
// determine if new file: status 'A' or diff contains /dev/null in old file path
|
|
116
|
+
const isNewFile = status === "A" || /\bdev\/null\b/.test(diffText);
|
|
117
|
+
const hunkRanges = parseHunksFromDiff(diffText, isNewFile);
|
|
118
|
+
// 5. read staged content using git show :<path>
|
|
119
|
+
let content;
|
|
120
|
+
try {
|
|
121
|
+
content = runGit(`git show :"${path.replace(/"/g, '\\"')}"`);
|
|
122
|
+
}
|
|
123
|
+
catch (err) {
|
|
124
|
+
// binary or deleted in index
|
|
125
|
+
const e = err;
|
|
126
|
+
const em = e && e.message ? String(e.message) : String(err);
|
|
127
|
+
console.debug(`Skipping binary/deleted or unreadable staged file ${path}: ${em}`);
|
|
128
|
+
continue;
|
|
129
|
+
}
|
|
130
|
+
// If we had a new-file sentinel (end = MAX_SAFE_INTEGER) set a realistic end as content lines
|
|
131
|
+
const lines = content.split(/\r?\n/);
|
|
132
|
+
for (const r of hunkRanges) {
|
|
133
|
+
if (r.end === Number.MAX_SAFE_INTEGER) {
|
|
134
|
+
r.end = Math.max(1, lines.length);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
results.push({ path, status, oldPath, hunkRanges, content });
|
|
138
|
+
}
|
|
139
|
+
return results;
|
|
140
|
+
}
|
|
141
|
+
export default getStagedFiles;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { HunkRange, StagedFile } from "./git-staged.js";
|
|
2
|
+
export interface ExtractedSymbol {
|
|
3
|
+
id: string;
|
|
4
|
+
name: string;
|
|
5
|
+
kind: "function" | "class" | "variable" | "enum" | "unknown";
|
|
6
|
+
location: {
|
|
7
|
+
file: string;
|
|
8
|
+
startLine: number;
|
|
9
|
+
endLine: number;
|
|
10
|
+
};
|
|
11
|
+
hunkRanges: HunkRange[];
|
|
12
|
+
reqLinks: string[];
|
|
13
|
+
}
|
|
14
|
+
export declare function extractSymbolsFromStagedFile(stagedFile: StagedFile): ExtractedSymbol[];
|
|
15
|
+
//# sourceMappingURL=symbol-extract.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"symbol-extract.d.ts","sourceRoot":"","sources":["../../src/traceability/symbol-extract.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAE7D,MAAM,WAAW,eAAe;IAC9B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,UAAU,GAAG,OAAO,GAAG,UAAU,GAAG,MAAM,GAAG,SAAS,CAAC;IAC7D,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;QAClB,OAAO,EAAE,MAAM,CAAC;KACjB,CAAC;IACF,UAAU,EAAE,SAAS,EAAE,CAAC;IACxB,QAAQ,EAAE,MAAM,EAAE,CAAC;CACpB;AA0DD,wBAAgB,4BAA4B,CAC1C,UAAU,EAAE,UAAU,GACrB,eAAe,EAAE,CAuLnB"}
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { Project, ScriptKind } from "ts-morph";
|
|
3
|
+
import { extractFromManifest } from "../extractors/manifest.js";
|
|
4
|
+
// Simple in-memory cache keyed by blob sha with 30s TTL
|
|
5
|
+
const sourceFileCache = new Map();
|
|
6
|
+
const CACHE_TTL_MS = 30 * 1000;
|
|
7
|
+
const project = new Project({ skipAddingFilesFromTsConfig: true });
|
|
8
|
+
function computeContentSha(content) {
|
|
9
|
+
const h = createHash("sha256");
|
|
10
|
+
h.update(content);
|
|
11
|
+
return h.digest("hex");
|
|
12
|
+
}
|
|
13
|
+
function chooseScriptKind(path) {
|
|
14
|
+
const lower = path.toLowerCase();
|
|
15
|
+
if (lower.endsWith(".tsx"))
|
|
16
|
+
return ScriptKind.TSX;
|
|
17
|
+
if (lower.endsWith(".ts") || lower.endsWith(".mts") || lower.endsWith(".cts"))
|
|
18
|
+
return ScriptKind.TS;
|
|
19
|
+
if (lower.endsWith(".jsx"))
|
|
20
|
+
return ScriptKind.JSX;
|
|
21
|
+
return ScriptKind.JS;
|
|
22
|
+
}
|
|
23
|
+
function parseReqDirectives(text) {
|
|
24
|
+
// look for lines containing implements REQ-123 or implements: REQ-1, REQ-2
|
|
25
|
+
// Stop at end-of-line and only accept IDs starting with an uppercase letter
|
|
26
|
+
// to avoid capturing tokens like `export`, `function`, etc.
|
|
27
|
+
const REQ_ID = "[A-Z][A-Z0-9\\-_]*";
|
|
28
|
+
const regex = new RegExp(`implements\\s*:?\\s*(${REQ_ID}(?:\\s*,\\s*${REQ_ID})*)\\s*$`, "gim");
|
|
29
|
+
const reqs = new Set();
|
|
30
|
+
let m;
|
|
31
|
+
while ((m = regex.exec(text))) {
|
|
32
|
+
const list = m[1];
|
|
33
|
+
for (const part of list.split(/[,\s]+/)) {
|
|
34
|
+
const p = part.trim();
|
|
35
|
+
if (!p)
|
|
36
|
+
continue;
|
|
37
|
+
reqs.add(p);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return Array.from(reqs);
|
|
41
|
+
}
|
|
42
|
+
function rangesIntersect(aStart, aEnd, bStart, bEnd) {
|
|
43
|
+
return aStart <= bEnd && bStart <= aEnd;
|
|
44
|
+
}
|
|
45
|
+
export function extractSymbolsFromStagedFile(stagedFile) {
|
|
46
|
+
const content = stagedFile.content ?? "";
|
|
47
|
+
const sha = computeContentSha(content + "|" + stagedFile.path);
|
|
48
|
+
// TTL cache lookup
|
|
49
|
+
const now = Date.now();
|
|
50
|
+
let cached = sourceFileCache.get(sha);
|
|
51
|
+
if (!cached || now - cached.ts > CACHE_TTL_MS) {
|
|
52
|
+
// create or recreate SourceFile in project (in-memory)
|
|
53
|
+
try {
|
|
54
|
+
const scriptKind = chooseScriptKind(stagedFile.path);
|
|
55
|
+
const sf = project.createSourceFile(stagedFile.path + "::staged", content, {
|
|
56
|
+
overwrite: true,
|
|
57
|
+
scriptKind,
|
|
58
|
+
});
|
|
59
|
+
cached = { tsf: sf, ts: now };
|
|
60
|
+
sourceFileCache.set(sha, cached);
|
|
61
|
+
}
|
|
62
|
+
catch (err) {
|
|
63
|
+
// on parse error, cache null to avoid retry storms briefly
|
|
64
|
+
cached = { tsf: null, ts: now };
|
|
65
|
+
sourceFileCache.set(sha, cached);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
const sf = cached.tsf;
|
|
69
|
+
if (!sf)
|
|
70
|
+
return [];
|
|
71
|
+
const results = [];
|
|
72
|
+
// helpers to compute line spans
|
|
73
|
+
const getSpan = (startPos, endPos) => {
|
|
74
|
+
const start = sf.getLineAndColumnAtPos(startPos);
|
|
75
|
+
const end = sf.getLineAndColumnAtPos(endPos);
|
|
76
|
+
return { startLine: start.line, endLine: end.line };
|
|
77
|
+
};
|
|
78
|
+
// Functions
|
|
79
|
+
for (const fn of sf.getFunctions()) {
|
|
80
|
+
if (!fn.isExported())
|
|
81
|
+
continue;
|
|
82
|
+
try {
|
|
83
|
+
const name = fn.getName() ?? "<anonymous>";
|
|
84
|
+
const nameNode = fn.getNameNode();
|
|
85
|
+
const start = nameNode ? nameNode.getStart() : fn.getStart();
|
|
86
|
+
const end = fn.getEnd();
|
|
87
|
+
const span = getSpan(start, end);
|
|
88
|
+
const reqLinks = parseReqDirectives(fn.getFullText() +
|
|
89
|
+
"\n" +
|
|
90
|
+
fn
|
|
91
|
+
.getJsDocs()
|
|
92
|
+
.map((d) => d.getFullText())
|
|
93
|
+
.join("\n"));
|
|
94
|
+
const id = resolveSymbolId(stagedFile.path, name);
|
|
95
|
+
results.push({
|
|
96
|
+
id,
|
|
97
|
+
name,
|
|
98
|
+
kind: "function",
|
|
99
|
+
location: {
|
|
100
|
+
file: stagedFile.path,
|
|
101
|
+
startLine: span.startLine,
|
|
102
|
+
endLine: span.endLine,
|
|
103
|
+
},
|
|
104
|
+
hunkRanges: intersectingHunks(span.startLine, span.endLine, stagedFile.hunkRanges),
|
|
105
|
+
reqLinks,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
catch { }
|
|
109
|
+
}
|
|
110
|
+
// Classes
|
|
111
|
+
for (const cls of sf.getClasses()) {
|
|
112
|
+
if (!cls.isExported())
|
|
113
|
+
continue;
|
|
114
|
+
try {
|
|
115
|
+
const name = cls.getName() ?? "<anonymous>";
|
|
116
|
+
const start = cls.getNameNode()?.getStart() ?? cls.getStart();
|
|
117
|
+
const end = cls.getEnd();
|
|
118
|
+
const span = getSpan(start, end);
|
|
119
|
+
const reqLinks = parseReqDirectives(cls.getText() +
|
|
120
|
+
"\n" +
|
|
121
|
+
cls
|
|
122
|
+
.getJsDocs()
|
|
123
|
+
.map((d) => d.getFullText())
|
|
124
|
+
.join("\n"));
|
|
125
|
+
const id = resolveSymbolId(stagedFile.path, name);
|
|
126
|
+
results.push({
|
|
127
|
+
id,
|
|
128
|
+
name,
|
|
129
|
+
kind: "class",
|
|
130
|
+
location: {
|
|
131
|
+
file: stagedFile.path,
|
|
132
|
+
startLine: span.startLine,
|
|
133
|
+
endLine: span.endLine,
|
|
134
|
+
},
|
|
135
|
+
hunkRanges: intersectingHunks(span.startLine, span.endLine, stagedFile.hunkRanges),
|
|
136
|
+
reqLinks,
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
catch { }
|
|
140
|
+
}
|
|
141
|
+
// Enums
|
|
142
|
+
for (const en of sf.getEnums()) {
|
|
143
|
+
if (!en.isExported())
|
|
144
|
+
continue;
|
|
145
|
+
try {
|
|
146
|
+
const name = en.getName() ?? "<anonymous>";
|
|
147
|
+
const start = en.getNameNode()?.getStart() ?? en.getStart();
|
|
148
|
+
const end = en.getEnd();
|
|
149
|
+
const span = getSpan(start, end);
|
|
150
|
+
const reqLinks = parseReqDirectives(en.getText());
|
|
151
|
+
const id = resolveSymbolId(stagedFile.path, name);
|
|
152
|
+
results.push({
|
|
153
|
+
id,
|
|
154
|
+
name,
|
|
155
|
+
kind: "enum",
|
|
156
|
+
location: {
|
|
157
|
+
file: stagedFile.path,
|
|
158
|
+
startLine: span.startLine,
|
|
159
|
+
endLine: span.endLine,
|
|
160
|
+
},
|
|
161
|
+
hunkRanges: intersectingHunks(span.startLine, span.endLine, stagedFile.hunkRanges),
|
|
162
|
+
reqLinks,
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
catch { }
|
|
166
|
+
}
|
|
167
|
+
// Variable statements (exported)
|
|
168
|
+
for (const vs of sf.getVariableStatements()) {
|
|
169
|
+
if (!vs.isExported())
|
|
170
|
+
continue;
|
|
171
|
+
for (const decl of vs.getDeclarations()) {
|
|
172
|
+
try {
|
|
173
|
+
const name = decl.getName();
|
|
174
|
+
const start = decl.getNameNode()?.getStart() ?? decl.getStart();
|
|
175
|
+
const end = decl.getEnd();
|
|
176
|
+
const span = getSpan(start, end);
|
|
177
|
+
const reqLinks = parseReqDirectives(decl.getText());
|
|
178
|
+
const id = resolveSymbolId(stagedFile.path, name);
|
|
179
|
+
results.push({
|
|
180
|
+
id,
|
|
181
|
+
name,
|
|
182
|
+
kind: "variable",
|
|
183
|
+
location: {
|
|
184
|
+
file: stagedFile.path,
|
|
185
|
+
startLine: span.startLine,
|
|
186
|
+
endLine: span.endLine,
|
|
187
|
+
},
|
|
188
|
+
hunkRanges: intersectingHunks(span.startLine, span.endLine, stagedFile.hunkRanges),
|
|
189
|
+
reqLinks,
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
catch { }
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Filter to only include symbols that intersect with at least one hunk
|
|
196
|
+
// (unless it's a new file or rename, in which case we include all)
|
|
197
|
+
const shouldFilterByHunks = stagedFile.status === "M" && stagedFile.hunkRanges.length > 0;
|
|
198
|
+
if (shouldFilterByHunks) {
|
|
199
|
+
return results.filter((r) => r.hunkRanges.length > 0);
|
|
200
|
+
}
|
|
201
|
+
return results;
|
|
202
|
+
}
|
|
203
|
+
function intersectingHunks(startLine, endLine, hunks) {
|
|
204
|
+
const out = [];
|
|
205
|
+
for (const h of hunks) {
|
|
206
|
+
if (rangesIntersect(startLine, endLine, h.start, h.end))
|
|
207
|
+
out.push(h);
|
|
208
|
+
}
|
|
209
|
+
return out;
|
|
210
|
+
}
|
|
211
|
+
function resolveSymbolId(filePath, name) {
|
|
212
|
+
try {
|
|
213
|
+
// attempt to read manifest entries for explicit id (best-effort)
|
|
214
|
+
// extractFromManifest expects a file path; if manifest not present it will throw — catch it
|
|
215
|
+
const ents = extractFromManifest(filePath);
|
|
216
|
+
for (const e of ents) {
|
|
217
|
+
if (e.entity.title === name)
|
|
218
|
+
return e.entity.id;
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
catch {
|
|
222
|
+
// ignore
|
|
223
|
+
}
|
|
224
|
+
// deterministic id: sha(file:path:name)
|
|
225
|
+
const h = createHash("sha256");
|
|
226
|
+
h.update(`${filePath}:${name}`);
|
|
227
|
+
return h.digest("hex").slice(0, 16);
|
|
228
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { PrologProcess } from "../prolog.js";
|
|
2
|
+
import type { ExtractedSymbol } from "./symbol-extract";
|
|
3
|
+
export interface TempKbContext {
|
|
4
|
+
tempDir: string;
|
|
5
|
+
kbPath: string;
|
|
6
|
+
overlayPath: string;
|
|
7
|
+
prolog: PrologProcess;
|
|
8
|
+
}
|
|
9
|
+
declare function consultOverlay(ctx: TempKbContext): Promise<void>;
|
|
10
|
+
export { consultOverlay };
|
|
11
|
+
export declare function createTempKb(baseKbPath: string): Promise<TempKbContext>;
|
|
12
|
+
export declare function createOverlayFacts(symbols: ExtractedSymbol[]): string;
|
|
13
|
+
export declare function cleanupTempKb(tempDir: string): Promise<void>;
|
|
14
|
+
//# sourceMappingURL=temp-kb.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"temp-kb.d.ts","sourceRoot":"","sources":["../../src/traceability/temp-kb.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAExD,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,EAAE,MAAM,CAAC;IACpB,MAAM,EAAE,aAAa,CAAC;CACvB;AAmDD,iBAAe,cAAc,CAAC,GAAG,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC,CAe/D;AAED,OAAO,EAAE,cAAc,EAAE,CAAC;AAE1B,wBAAsB,YAAY,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,CA2C7E;AAED,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,GAAG,MAAM,CAkBrE;AAED,wBAAsB,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CA2BlE"}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import { existsSync } from "node:fs";
|
|
2
|
+
import { cp, mkdir, rm, writeFile } from "node:fs/promises";
|
|
3
|
+
import { tmpdir } from "node:os";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { PrologProcess } from "../prolog.js";
|
|
6
|
+
const prologByTempDir = new Map();
|
|
7
|
+
const cleanupByTempDir = new Map();
|
|
8
|
+
const cleanedTempDirs = new Set();
|
|
9
|
+
function isTraceEnabled() {
|
|
10
|
+
return Boolean(process.env.KIBI_TRACE || process.env.KIBI_DEBUG);
|
|
11
|
+
}
|
|
12
|
+
function trace(message) {
|
|
13
|
+
if (isTraceEnabled()) {
|
|
14
|
+
// eslint-disable-next-line no-console
|
|
15
|
+
console.log(`[kibi-trace] ${message}`);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
function escapePrologAtom(value) {
|
|
19
|
+
return `'${value.replace(/'/g, "''")}'`;
|
|
20
|
+
}
|
|
21
|
+
function createCleanupHandler(tempDir) {
|
|
22
|
+
let inProgress = false;
|
|
23
|
+
return () => {
|
|
24
|
+
if (inProgress || cleanedTempDirs.has(tempDir)) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
inProgress = true;
|
|
28
|
+
void cleanupTempKb(tempDir).catch((error) => {
|
|
29
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
30
|
+
trace(`cleanup on signal/exit failed for ${tempDir}: ${message}`);
|
|
31
|
+
});
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
function registerCleanupHandlers(tempDir) {
|
|
35
|
+
const handler = createCleanupHandler(tempDir);
|
|
36
|
+
process.once("exit", handler);
|
|
37
|
+
process.once("SIGINT", handler);
|
|
38
|
+
process.once("SIGTERM", handler);
|
|
39
|
+
cleanupByTempDir.set(tempDir, () => {
|
|
40
|
+
process.off("exit", handler);
|
|
41
|
+
process.off("SIGINT", handler);
|
|
42
|
+
process.off("SIGTERM", handler);
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
async function consultOverlay(ctx) {
|
|
46
|
+
const prolog = prologByTempDir.get(ctx.tempDir);
|
|
47
|
+
if (!prolog) {
|
|
48
|
+
throw new Error(`No Prolog session found for temp dir: ${ctx.tempDir}`);
|
|
49
|
+
}
|
|
50
|
+
const consultResult = await prolog.query([
|
|
51
|
+
`consult(${escapePrologAtom(ctx.overlayPath)})`,
|
|
52
|
+
]);
|
|
53
|
+
if (!consultResult.success) {
|
|
54
|
+
throw new Error(`Failed to consult overlay facts ${ctx.overlayPath}: ${consultResult.error || "unknown error"}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
export { consultOverlay };
|
|
58
|
+
export async function createTempKb(baseKbPath) {
|
|
59
|
+
if (!existsSync(baseKbPath)) {
|
|
60
|
+
throw new Error(`Base KB path does not exist: ${baseKbPath}`);
|
|
61
|
+
}
|
|
62
|
+
const tempDir = path.join(tmpdir(), `kibi-precommit-${process.pid}-${Date.now()}`);
|
|
63
|
+
const kbPath = path.join(tempDir, "kb");
|
|
64
|
+
const overlayPath = path.join(tempDir, "changed_symbols.pl");
|
|
65
|
+
trace(`creating temp KB directory ${tempDir}`);
|
|
66
|
+
await mkdir(tempDir, { recursive: true });
|
|
67
|
+
trace(`copying base KB ${baseKbPath} -> ${kbPath}`);
|
|
68
|
+
await cp(baseKbPath, kbPath, { recursive: true });
|
|
69
|
+
await writeFile(overlayPath, "", "utf8");
|
|
70
|
+
const prolog = new PrologProcess({ timeout: 120000 });
|
|
71
|
+
await prolog.start();
|
|
72
|
+
prologByTempDir.set(tempDir, prolog);
|
|
73
|
+
// ctx includes prolog so callers can use it directly
|
|
74
|
+
const ctx = { tempDir, kbPath, overlayPath, prolog };
|
|
75
|
+
registerCleanupHandlers(tempDir);
|
|
76
|
+
const attachResult = await prolog.query(`kb_attach(${escapePrologAtom(kbPath)})`);
|
|
77
|
+
if (!attachResult.success) {
|
|
78
|
+
await cleanupTempKb(tempDir);
|
|
79
|
+
throw new Error(`Failed to attach temporary KB at ${kbPath}: ${attachResult.error || "unknown error"}`);
|
|
80
|
+
}
|
|
81
|
+
// Caller is expected to write overlay facts and then call consultOverlay(ctx).
|
|
82
|
+
trace(`temporary KB ready at ${kbPath}`);
|
|
83
|
+
return ctx;
|
|
84
|
+
}
|
|
85
|
+
export function createOverlayFacts(symbols) {
|
|
86
|
+
const lines = [];
|
|
87
|
+
for (const symbol of symbols) {
|
|
88
|
+
lines.push(`changed_symbol(${escapePrologAtom(symbol.id)}).`);
|
|
89
|
+
lines.push(`changed_symbol_loc(${escapePrologAtom(symbol.id)}, ${escapePrologAtom(symbol.location.file)}, ${symbol.location.startLine}, 0, ${escapePrologAtom(symbol.name)}).`);
|
|
90
|
+
// Emit overlay facts for requirement links from code-comment directives.
|
|
91
|
+
for (const reqId of symbol.reqLinks) {
|
|
92
|
+
lines.push(`changed_symbol_req(${escapePrologAtom(symbol.id)}, ${escapePrologAtom(reqId)}).`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return lines.join("\n");
|
|
96
|
+
}
|
|
97
|
+
export async function cleanupTempKb(tempDir) {
|
|
98
|
+
if (cleanedTempDirs.has(tempDir)) {
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
cleanedTempDirs.add(tempDir);
|
|
102
|
+
const unregister = cleanupByTempDir.get(tempDir);
|
|
103
|
+
if (unregister) {
|
|
104
|
+
unregister();
|
|
105
|
+
cleanupByTempDir.delete(tempDir);
|
|
106
|
+
}
|
|
107
|
+
const prolog = prologByTempDir.get(tempDir);
|
|
108
|
+
if (prolog) {
|
|
109
|
+
try {
|
|
110
|
+
await prolog.query("kb_detach");
|
|
111
|
+
}
|
|
112
|
+
catch (error) {
|
|
113
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
114
|
+
trace(`kb_detach failed during cleanup for ${tempDir}: ${message}`);
|
|
115
|
+
}
|
|
116
|
+
await prolog.terminate();
|
|
117
|
+
prologByTempDir.delete(tempDir);
|
|
118
|
+
}
|
|
119
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
120
|
+
trace(`removed temporary KB directory ${tempDir}`);
|
|
121
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { PrologProcess } from "../prolog.js";
|
|
2
|
+
export interface ValidationOptions {
|
|
3
|
+
minLinks: number;
|
|
4
|
+
prolog: PrologProcess;
|
|
5
|
+
}
|
|
6
|
+
export interface Violation {
|
|
7
|
+
symbolId: string;
|
|
8
|
+
name: string;
|
|
9
|
+
file: string;
|
|
10
|
+
line: number;
|
|
11
|
+
column: number;
|
|
12
|
+
currentLinks: number;
|
|
13
|
+
requiredLinks: number;
|
|
14
|
+
}
|
|
15
|
+
export declare function validateStagedSymbols(options: ValidationOptions): Promise<Violation[]>;
|
|
16
|
+
export declare function formatViolations(violations: Violation[]): string;
|
|
17
|
+
//# sourceMappingURL=validate.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"validate.d.ts","sourceRoot":"","sources":["../../src/traceability/validate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAElD,MAAM,WAAW,iBAAiB;IAChC,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,aAAa,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;CACvB;AAgHD,wBAAsB,qBAAqB,CACzC,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,SAAS,EAAE,CAAC,CAqCtB;AAED,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,SAAS,EAAE,GAAG,MAAM,CAiBhE"}
|