kibi-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/kibi +19 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +117 -0
- package/dist/commands/branch.d.ts +3 -0
- package/dist/commands/branch.d.ts.map +1 -0
- package/dist/commands/branch.js +66 -0
- package/dist/commands/check.d.ts +12 -0
- package/dist/commands/check.d.ts.map +1 -0
- package/dist/commands/check.js +439 -0
- package/dist/commands/doctor.d.ts +2 -0
- package/dist/commands/doctor.d.ts.map +1 -0
- package/dist/commands/doctor.js +268 -0
- package/dist/commands/gc.d.ts +6 -0
- package/dist/commands/gc.d.ts.map +1 -0
- package/dist/commands/gc.js +117 -0
- package/dist/commands/init-helpers.d.ts +8 -0
- package/dist/commands/init-helpers.d.ts.map +1 -0
- package/dist/commands/init-helpers.js +150 -0
- package/dist/commands/init.d.ts +6 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +85 -0
- package/dist/commands/query.d.ts +12 -0
- package/dist/commands/query.d.ts.map +1 -0
- package/dist/commands/query.js +469 -0
- package/dist/commands/sync.d.ts +7 -0
- package/dist/commands/sync.d.ts.map +1 -0
- package/dist/commands/sync.js +587 -0
- package/dist/extractors/manifest.d.ts +30 -0
- package/dist/extractors/manifest.d.ts.map +1 -0
- package/dist/extractors/manifest.js +122 -0
- package/dist/extractors/markdown.d.ts +39 -0
- package/dist/extractors/markdown.d.ts.map +1 -0
- package/dist/extractors/markdown.js +203 -0
- package/dist/extractors/symbols-coordinator.d.ts +4 -0
- package/dist/extractors/symbols-coordinator.d.ts.map +1 -0
- package/dist/extractors/symbols-coordinator.js +131 -0
- package/dist/extractors/symbols-ts.d.ts +21 -0
- package/dist/extractors/symbols-ts.d.ts.map +1 -0
- package/dist/extractors/symbols-ts.js +197 -0
- package/dist/prolog.d.ts +35 -0
- package/dist/prolog.d.ts.map +1 -0
- package/dist/prolog.js +328 -0
- package/dist/public/extractors/symbols-coordinator.d.ts +2 -0
- package/dist/public/extractors/symbols-coordinator.d.ts.map +1 -0
- package/dist/public/extractors/symbols-coordinator.js +46 -0
- package/dist/public/prolog/index.d.ts +2 -0
- package/dist/public/prolog/index.d.ts.map +1 -0
- package/dist/public/prolog/index.js +46 -0
- package/dist/public/schemas/entity.d.ts +58 -0
- package/dist/public/schemas/entity.d.ts.map +1 -0
- package/dist/public/schemas/entity.js +102 -0
- package/dist/public/schemas/relationship.d.ts +35 -0
- package/dist/public/schemas/relationship.d.ts.map +1 -0
- package/dist/public/schemas/relationship.js +81 -0
- package/dist/types/changeset.d.ts +22 -0
- package/dist/types/changeset.d.ts.map +1 -0
- package/dist/types/changeset.js +18 -0
- package/dist/types/entities.d.ts +40 -0
- package/dist/types/entities.d.ts.map +1 -0
- package/dist/types/entities.js +18 -0
- package/dist/types/relationships.d.ts +11 -0
- package/dist/types/relationships.d.ts.map +1 -0
- package/dist/types/relationships.js +18 -0
- package/package.json +57 -0
- package/schema/entities.pl +50 -0
- package/schema/relationships.pl +47 -0
- package/schema/validation.pl +49 -0
- package/src/public/extractors/symbols-coordinator.ts +50 -0
- package/src/public/prolog/index.ts +47 -0
- package/src/public/schemas/entity.ts +104 -0
- package/src/public/schemas/relationship.ts +83 -0
- package/src/schemas/changeset.schema.json +48 -0
- package/src/schemas/entity.schema.json +55 -0
- package/src/schemas/relationship.schema.json +34 -0
package/bin/kibi
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/*
|
|
3
|
+
Kibi — repo-local, per-branch, queryable long-term memory for software projects
|
|
4
|
+
Copyright (C) 2026 Piotr Franczyk
|
|
5
|
+
|
|
6
|
+
This program is free software: you can redistribute it and/or modify
|
|
7
|
+
it under the terms of the GNU Affero General Public License as published by
|
|
8
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
9
|
+
(at your option) any later version.
|
|
10
|
+
|
|
11
|
+
This program is distributed in the hope that it will be useful,
|
|
12
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
GNU Affero General Public License for more details.
|
|
15
|
+
|
|
16
|
+
You should have received a copy of the GNU Affero General Public License
|
|
17
|
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
18
|
+
*/
|
|
19
|
+
import "../dist/cli.js";
|
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Kibi — repo-local, per-branch, queryable long-term memory for software projects
|
|
3
|
+
Copyright (C) 2026 Piotr Franczyk
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify
|
|
6
|
+
it under the terms of the GNU Affero General Public License as published by
|
|
7
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
8
|
+
(at your option) any later version.
|
|
9
|
+
|
|
10
|
+
This program is distributed in the hope that it will be useful,
|
|
11
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
13
|
+
GNU Affero General Public License for more details.
|
|
14
|
+
|
|
15
|
+
You should have received a copy of the GNU Affero General Public License
|
|
16
|
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
17
|
+
*/
|
|
18
|
+
/*
|
|
19
|
+
How to apply this header to source files (examples)
|
|
20
|
+
|
|
21
|
+
1) Prepend header to a single file (POSIX shells):
|
|
22
|
+
|
|
23
|
+
cat LICENSE_HEADER.txt "$FILE" > "$FILE".with-header && mv "$FILE".with-header "$FILE"
|
|
24
|
+
|
|
25
|
+
2) Apply to multiple files (example: the project's main entry files):
|
|
26
|
+
|
|
27
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp packages/cli/src/*.ts packages/mcp/src/*.ts; do
|
|
28
|
+
if [ -f "$f" ]; then
|
|
29
|
+
cp "$f" "$f".bak
|
|
30
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
31
|
+
fi
|
|
32
|
+
done
|
|
33
|
+
|
|
34
|
+
3) Avoid duplicating the header: run a quick guard to only add if missing
|
|
35
|
+
|
|
36
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp; do
|
|
37
|
+
if [ -f "$f" ]; then
|
|
38
|
+
if ! head -n 5 "$f" | grep -q "Copyright (C) 2026 Piotr Franczyk"; then
|
|
39
|
+
cp "$f" "$f".bak
|
|
40
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
41
|
+
fi
|
|
42
|
+
fi
|
|
43
|
+
done
|
|
44
|
+
*/
|
|
45
|
+
import { Command } from "commander";
|
|
46
|
+
import { branchEnsureCommand } from "./commands/branch.js";
|
|
47
|
+
import { checkCommand } from "./commands/check.js";
|
|
48
|
+
import { doctorCommand } from "./commands/doctor.js";
|
|
49
|
+
import { gcCommand } from "./commands/gc.js";
|
|
50
|
+
import { initCommand } from "./commands/init.js";
|
|
51
|
+
import { queryCommand } from "./commands/query.js";
|
|
52
|
+
import { syncCommand } from "./commands/sync.js";
|
|
53
|
+
const VERSION = "0.1.0";
|
|
54
|
+
const program = new Command();
|
|
55
|
+
program
|
|
56
|
+
.name("kibi")
|
|
57
|
+
.description("Prolog-based project knowledge base")
|
|
58
|
+
.version(VERSION);
|
|
59
|
+
program
|
|
60
|
+
.command("init")
|
|
61
|
+
.description("Initialize .kb/ directory")
|
|
62
|
+
.option("--no-hooks", "Do not install git hooks (hooks installed by default)")
|
|
63
|
+
.action(async (options) => {
|
|
64
|
+
await initCommand(options);
|
|
65
|
+
});
|
|
66
|
+
program
|
|
67
|
+
.command("sync")
|
|
68
|
+
.description("Sync entities from documents")
|
|
69
|
+
.option("--validate-only", "Perform validation without mutations")
|
|
70
|
+
.action(async (options) => {
|
|
71
|
+
await syncCommand(options);
|
|
72
|
+
});
|
|
73
|
+
program
|
|
74
|
+
.command("query [type]")
|
|
75
|
+
.description("Query knowledge base")
|
|
76
|
+
.option("--id <id>", "Query specific entity by ID")
|
|
77
|
+
.option("--tag <tag>", "Filter by tag")
|
|
78
|
+
.option("--source <path>", "Filter by source file path (substring match)")
|
|
79
|
+
.option("--relationships <id>", "Get relationships from entity")
|
|
80
|
+
.option("--format <format>", "Output format: json|table", "json")
|
|
81
|
+
.option("--limit <n>", "Limit results", "100")
|
|
82
|
+
.option("--offset <n>", "Skip results", "0")
|
|
83
|
+
.action(async (type, options) => {
|
|
84
|
+
await queryCommand(type, options);
|
|
85
|
+
});
|
|
86
|
+
program
|
|
87
|
+
.command("check")
|
|
88
|
+
.description("Check KB consistency and integrity")
|
|
89
|
+
.option("--fix", "Suggest fixes for violations")
|
|
90
|
+
.action(async (options) => {
|
|
91
|
+
await checkCommand(options);
|
|
92
|
+
});
|
|
93
|
+
program
|
|
94
|
+
.command("gc")
|
|
95
|
+
.description("Garbage collect stale branch KBs")
|
|
96
|
+
.option("--dry-run", "Preview without deleting (default)", true)
|
|
97
|
+
.option("--force", "Actually delete stale branches")
|
|
98
|
+
.action(async (options) => {
|
|
99
|
+
const dryRun = !options.force;
|
|
100
|
+
await gcCommand({ dryRun, force: options.force });
|
|
101
|
+
});
|
|
102
|
+
program
|
|
103
|
+
.command("doctor")
|
|
104
|
+
.description("Diagnose KB setup and configuration")
|
|
105
|
+
.action(async () => {
|
|
106
|
+
await doctorCommand();
|
|
107
|
+
});
|
|
108
|
+
program
|
|
109
|
+
.command("branch")
|
|
110
|
+
.description("Manage branch KBs")
|
|
111
|
+
.argument("<action>", "Action: ensure")
|
|
112
|
+
.action(async (action) => {
|
|
113
|
+
if (action === "ensure") {
|
|
114
|
+
await branchEnsureCommand();
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
program.parse(process.argv);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"branch.d.ts","sourceRoot":"","sources":["../../src/commands/branch.ts"],"names":[],"mappings":"AAiDA,wBAAsB,mBAAmB,IAAI,OAAO,CAAC,IAAI,CAAC,CAoBzD;AAED,eAAe,mBAAmB,CAAC"}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Kibi — repo-local, per-branch, queryable long-term memory for software projects
|
|
3
|
+
Copyright (C) 2026 Piotr Franczyk
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify
|
|
6
|
+
it under the terms of the GNU Affero General Public License as published by
|
|
7
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
8
|
+
(at your option) any later version.
|
|
9
|
+
|
|
10
|
+
This program is distributed in the hope that it will be useful,
|
|
11
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
13
|
+
GNU Affero General Public License for more details.
|
|
14
|
+
|
|
15
|
+
You should have received a copy of the GNU Affero General Public License
|
|
16
|
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
17
|
+
*/
|
|
18
|
+
/*
|
|
19
|
+
How to apply this header to source files (examples)
|
|
20
|
+
|
|
21
|
+
1) Prepend header to a single file (POSIX shells):
|
|
22
|
+
|
|
23
|
+
cat LICENSE_HEADER.txt "$FILE" > "$FILE".with-header && mv "$FILE".with-header "$FILE"
|
|
24
|
+
|
|
25
|
+
2) Apply to multiple files (example: the project's main entry files):
|
|
26
|
+
|
|
27
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp packages/cli/src/*.ts packages/mcp/src/*.ts; do
|
|
28
|
+
if [ -f "$f" ]; then
|
|
29
|
+
cp "$f" "$f".bak
|
|
30
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
31
|
+
fi
|
|
32
|
+
done
|
|
33
|
+
|
|
34
|
+
3) Avoid duplicating the header: run a quick guard to only add if missing
|
|
35
|
+
|
|
36
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp; do
|
|
37
|
+
if [ -f "$f" ]; then
|
|
38
|
+
if ! head -n 5 "$f" | grep -q "Copyright (C) 2026 Piotr Franczyk"; then
|
|
39
|
+
cp "$f" "$f".bak
|
|
40
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
41
|
+
fi
|
|
42
|
+
fi
|
|
43
|
+
done
|
|
44
|
+
*/
|
|
45
|
+
import { execSync } from "node:child_process";
|
|
46
|
+
import * as fs from "node:fs";
|
|
47
|
+
import * as path from "node:path";
|
|
48
|
+
export async function branchEnsureCommand() {
|
|
49
|
+
const branch = execSync("git branch --show-current", {
|
|
50
|
+
encoding: "utf-8",
|
|
51
|
+
}).trim();
|
|
52
|
+
const kbPath = path.join(process.cwd(), ".kb/branches", branch);
|
|
53
|
+
const mainPath = path.join(process.cwd(), ".kb/branches/main");
|
|
54
|
+
if (!fs.existsSync(mainPath)) {
|
|
55
|
+
console.warn("Warning: main branch KB does not exist, skipping branch ensure");
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
if (!fs.existsSync(kbPath)) {
|
|
59
|
+
fs.cpSync(mainPath, kbPath, { recursive: true });
|
|
60
|
+
console.log(`Created branch KB: ${branch}`);
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
console.log(`Branch KB already exists: ${branch}`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
export default branchEnsureCommand;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export interface CheckOptions {
|
|
2
|
+
fix?: boolean;
|
|
3
|
+
}
|
|
4
|
+
export interface Violation {
|
|
5
|
+
rule: string;
|
|
6
|
+
entityId: string;
|
|
7
|
+
description: string;
|
|
8
|
+
suggestion?: string;
|
|
9
|
+
source?: string;
|
|
10
|
+
}
|
|
11
|
+
export declare function checkCommand(options: CheckOptions): Promise<void>;
|
|
12
|
+
//# sourceMappingURL=check.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"check.d.ts","sourceRoot":"","sources":["../../src/commands/check.ts"],"names":[],"mappings":"AA+CA,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,EAAE,OAAO,CAAC;CACf;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,wBAAsB,YAAY,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,CAkDvE"}
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Kibi — repo-local, per-branch, queryable long-term memory for software projects
|
|
3
|
+
Copyright (C) 2026 Piotr Franczyk
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify
|
|
6
|
+
it under the terms of the GNU Affero General Public License as published by
|
|
7
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
8
|
+
(at your option) any later version.
|
|
9
|
+
|
|
10
|
+
This program is distributed in the hope that it will be useful,
|
|
11
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
13
|
+
GNU Affero General Public License for more details.
|
|
14
|
+
|
|
15
|
+
You should have received a copy of the GNU Affero General Public License
|
|
16
|
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
17
|
+
*/
|
|
18
|
+
/*
|
|
19
|
+
How to apply this header to source files (examples)
|
|
20
|
+
|
|
21
|
+
1) Prepend header to a single file (POSIX shells):
|
|
22
|
+
|
|
23
|
+
cat LICENSE_HEADER.txt "$FILE" > "$FILE".with-header && mv "$FILE".with-header "$FILE"
|
|
24
|
+
|
|
25
|
+
2) Apply to multiple files (example: the project's main entry files):
|
|
26
|
+
|
|
27
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp packages/cli/src/*.ts packages/mcp/src/*.ts; do
|
|
28
|
+
if [ -f "$f" ]; then
|
|
29
|
+
cp "$f" "$f".bak
|
|
30
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
31
|
+
fi
|
|
32
|
+
done
|
|
33
|
+
|
|
34
|
+
3) Avoid duplicating the header: run a quick guard to only add if missing
|
|
35
|
+
|
|
36
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp; do
|
|
37
|
+
if [ -f "$f" ]; then
|
|
38
|
+
if ! head -n 5 "$f" | grep -q "Copyright (C) 2026 Piotr Franczyk"; then
|
|
39
|
+
cp "$f" "$f".bak
|
|
40
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
41
|
+
fi
|
|
42
|
+
fi
|
|
43
|
+
done
|
|
44
|
+
*/
|
|
45
|
+
import * as path from "node:path";
|
|
46
|
+
import { PrologProcess } from "../prolog.js";
|
|
47
|
+
export async function checkCommand(options) {
|
|
48
|
+
try {
|
|
49
|
+
const prolog = new PrologProcess();
|
|
50
|
+
await prolog.start();
|
|
51
|
+
const kbPath = path.join(process.cwd(), ".kb/branches/main");
|
|
52
|
+
const attachResult = await prolog.query(`kb_attach('${kbPath}')`);
|
|
53
|
+
if (!attachResult.success) {
|
|
54
|
+
await prolog.terminate();
|
|
55
|
+
console.error(`Error: Failed to attach KB: ${attachResult.error}`);
|
|
56
|
+
process.exit(1);
|
|
57
|
+
}
|
|
58
|
+
const violations = [];
|
|
59
|
+
violations.push(...(await checkMustPriorityCoverage(prolog)));
|
|
60
|
+
violations.push(...(await checkSymbolCoverage(prolog)));
|
|
61
|
+
violations.push(...(await checkNoDanglingRefs(prolog)));
|
|
62
|
+
violations.push(...(await checkNoCycles(prolog)));
|
|
63
|
+
const allEntityIds = await getAllEntityIds(prolog);
|
|
64
|
+
violations.push(...(await checkRequiredFields(prolog, allEntityIds)));
|
|
65
|
+
violations.push(...(await checkDeprecatedAdrs(prolog)));
|
|
66
|
+
violations.push(...(await checkDomainContradictions(prolog)));
|
|
67
|
+
await prolog.query("kb_detach");
|
|
68
|
+
await prolog.terminate();
|
|
69
|
+
if (violations.length === 0) {
|
|
70
|
+
console.log("✓ No violations found. KB is valid.");
|
|
71
|
+
process.exit(0);
|
|
72
|
+
}
|
|
73
|
+
console.log(`Found ${violations.length} violation(s):`);
|
|
74
|
+
console.log();
|
|
75
|
+
for (const v of violations) {
|
|
76
|
+
const filename = v.source ? path.basename(v.source, ".md") : v.entityId;
|
|
77
|
+
console.log(`[${v.rule}] ${filename}`);
|
|
78
|
+
console.log(` ${v.description}`);
|
|
79
|
+
if (options.fix && v.suggestion) {
|
|
80
|
+
console.log(` Suggestion: ${v.suggestion}`);
|
|
81
|
+
}
|
|
82
|
+
console.log();
|
|
83
|
+
}
|
|
84
|
+
process.exit(1);
|
|
85
|
+
}
|
|
86
|
+
catch (error) {
|
|
87
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
88
|
+
console.error(`Error: ${message}`);
|
|
89
|
+
process.exit(1);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
async function checkMustPriorityCoverage(prolog) {
|
|
93
|
+
const violations = [];
|
|
94
|
+
// Find all must-priority requirements
|
|
95
|
+
const mustReqs = await findMustPriorityReqs(prolog);
|
|
96
|
+
for (const reqId of mustReqs) {
|
|
97
|
+
const entityResult = await prolog.query(`kb_entity('${reqId}', req, Props)`);
|
|
98
|
+
let source = "";
|
|
99
|
+
if (entityResult.success && entityResult.bindings.Props) {
|
|
100
|
+
const propsStr = entityResult.bindings.Props;
|
|
101
|
+
const sourceMatch = propsStr.match(/source\s*=\s*\^\^?\("([^"]+)"/);
|
|
102
|
+
if (sourceMatch) {
|
|
103
|
+
source = sourceMatch[1];
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
const scenarioResult = await prolog.query(`kb_relationship(specified_by, '${reqId}', ScenarioId)`);
|
|
107
|
+
const hasScenario = scenarioResult.success;
|
|
108
|
+
const testResult = await prolog.query(`kb_relationship(validates, TestId, '${reqId}')`);
|
|
109
|
+
const hasTest = testResult.success;
|
|
110
|
+
if (!hasScenario || !hasTest) {
|
|
111
|
+
let desc = "Must-priority requirement lacks ";
|
|
112
|
+
const missing = [];
|
|
113
|
+
if (!hasScenario)
|
|
114
|
+
missing.push("scenario");
|
|
115
|
+
if (!hasTest)
|
|
116
|
+
missing.push("test");
|
|
117
|
+
desc = `${desc}${missing.join(" and ")} coverage`;
|
|
118
|
+
violations.push({
|
|
119
|
+
rule: "must-priority-coverage",
|
|
120
|
+
entityId: reqId,
|
|
121
|
+
description: desc,
|
|
122
|
+
source,
|
|
123
|
+
suggestion: missing
|
|
124
|
+
.map((m) => `Create ${m} that covers this requirement`)
|
|
125
|
+
.join("; "),
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
return violations;
|
|
130
|
+
}
|
|
131
|
+
async function findMustPriorityReqs(prolog) {
|
|
132
|
+
const query = `findall(Id, (kb_entity(Id, req, Props), memberchk(priority=P, Props), (P = ^^("must", _) ; P = "must" ; P = 'must' ; (atom(P), atom_string(P, PS), sub_string(PS, _, 4, 0, "must")))), Ids)`;
|
|
133
|
+
const result = await prolog.query(query);
|
|
134
|
+
if (!result.success || !result.bindings.Ids) {
|
|
135
|
+
return [];
|
|
136
|
+
}
|
|
137
|
+
const idsStr = result.bindings.Ids;
|
|
138
|
+
const match = idsStr.match(/\[(.*)\]/);
|
|
139
|
+
if (!match) {
|
|
140
|
+
return [];
|
|
141
|
+
}
|
|
142
|
+
const content = match[1].trim();
|
|
143
|
+
if (!content) {
|
|
144
|
+
return [];
|
|
145
|
+
}
|
|
146
|
+
return content.split(",").map((id) => id.trim().replace(/^'|'$/g, ""));
|
|
147
|
+
}
|
|
148
|
+
async function getAllEntityIds(prolog, type) {
|
|
149
|
+
const typeFilter = type ? `, Type = ${type}` : "";
|
|
150
|
+
const query = `findall(Id, (kb_entity(Id, Type, _)${typeFilter}), Ids)`;
|
|
151
|
+
const result = await prolog.query(query);
|
|
152
|
+
if (!result.success || !result.bindings.Ids) {
|
|
153
|
+
return [];
|
|
154
|
+
}
|
|
155
|
+
const idsStr = result.bindings.Ids;
|
|
156
|
+
const match = idsStr.match(/\[(.*)\]/);
|
|
157
|
+
if (!match) {
|
|
158
|
+
return [];
|
|
159
|
+
}
|
|
160
|
+
const content = match[1].trim();
|
|
161
|
+
if (!content) {
|
|
162
|
+
return [];
|
|
163
|
+
}
|
|
164
|
+
return content.split(",").map((id) => id.trim().replace(/^'|'$/g, ""));
|
|
165
|
+
}
|
|
166
|
+
async function checkNoDanglingRefs(prolog) {
|
|
167
|
+
const violations = [];
|
|
168
|
+
// Get all entity IDs once
|
|
169
|
+
const allEntityIds = new Set(await getAllEntityIds(prolog));
|
|
170
|
+
// Get all relationships by querying all known relationship types
|
|
171
|
+
const relTypes = [
|
|
172
|
+
"depends_on",
|
|
173
|
+
"verified_by",
|
|
174
|
+
"validates",
|
|
175
|
+
"specified_by",
|
|
176
|
+
"constrains",
|
|
177
|
+
"requires_property",
|
|
178
|
+
"supersedes",
|
|
179
|
+
"relates_to",
|
|
180
|
+
];
|
|
181
|
+
const allRels = [];
|
|
182
|
+
for (const relType of relTypes) {
|
|
183
|
+
const relsResult = await prolog.query(`findall([From,To], kb_relationship(${relType}, From, To), Rels)`);
|
|
184
|
+
if (relsResult.success && relsResult.bindings.Rels) {
|
|
185
|
+
const relsStr = relsResult.bindings.Rels;
|
|
186
|
+
const match = relsStr.match(/\[(.*)\]/);
|
|
187
|
+
if (match) {
|
|
188
|
+
const content = match[1].trim();
|
|
189
|
+
if (content) {
|
|
190
|
+
const relMatches = content.matchAll(/\[([^,]+),([^\]]+)\]/g);
|
|
191
|
+
for (const relMatch of relMatches) {
|
|
192
|
+
const fromId = relMatch[1].trim().replace(/^'|'$/g, "");
|
|
193
|
+
const toId = relMatch[2].trim().replace(/^'|'$/g, "");
|
|
194
|
+
allRels.push({ from: fromId, to: toId });
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
// Check all collected relationships for dangling refs
|
|
201
|
+
for (const rel of allRels) {
|
|
202
|
+
if (!allEntityIds.has(rel.from)) {
|
|
203
|
+
violations.push({
|
|
204
|
+
rule: "no-dangling-refs",
|
|
205
|
+
entityId: rel.from,
|
|
206
|
+
description: `Relationship references non-existent entity: ${rel.from}`,
|
|
207
|
+
suggestion: "Remove relationship or create missing entity",
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
if (!allEntityIds.has(rel.to)) {
|
|
211
|
+
violations.push({
|
|
212
|
+
rule: "no-dangling-refs",
|
|
213
|
+
entityId: rel.to,
|
|
214
|
+
description: `Relationship references non-existent entity: ${rel.to}`,
|
|
215
|
+
suggestion: "Remove relationship or create missing entity",
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
return violations;
|
|
220
|
+
}
|
|
221
|
+
async function checkNoCycles(prolog) {
|
|
222
|
+
const violations = [];
|
|
223
|
+
// Get all depends_on relationships
|
|
224
|
+
const depsResult = await prolog.query("findall([From,To], kb_relationship(depends_on, From, To), Deps)");
|
|
225
|
+
if (!depsResult.success || !depsResult.bindings.Deps) {
|
|
226
|
+
return violations;
|
|
227
|
+
}
|
|
228
|
+
const depsStr = depsResult.bindings.Deps;
|
|
229
|
+
const match = depsStr.match(/\[(.*)\]/);
|
|
230
|
+
if (!match) {
|
|
231
|
+
return violations;
|
|
232
|
+
}
|
|
233
|
+
const content = match[1].trim();
|
|
234
|
+
if (!content) {
|
|
235
|
+
return violations;
|
|
236
|
+
}
|
|
237
|
+
// Build adjacency map
|
|
238
|
+
const graph = new Map();
|
|
239
|
+
const depMatches = content.matchAll(/\[([^,]+),([^\]]+)\]/g);
|
|
240
|
+
for (const depMatch of depMatches) {
|
|
241
|
+
const from = depMatch[1].trim().replace(/^'|'$/g, "");
|
|
242
|
+
const to = depMatch[2].trim().replace(/^'|'$/g, "");
|
|
243
|
+
if (!graph.has(from)) {
|
|
244
|
+
graph.set(from, []);
|
|
245
|
+
}
|
|
246
|
+
const fromList = graph.get(from);
|
|
247
|
+
if (fromList) {
|
|
248
|
+
fromList.push(to);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
// DFS to detect cycles
|
|
252
|
+
const visited = new Set();
|
|
253
|
+
const recStack = new Set();
|
|
254
|
+
function hasCycleDFS(node, path) {
|
|
255
|
+
visited.add(node);
|
|
256
|
+
recStack.add(node);
|
|
257
|
+
path.push(node);
|
|
258
|
+
const neighbors = graph.get(node) || [];
|
|
259
|
+
for (const neighbor of neighbors) {
|
|
260
|
+
if (!visited.has(neighbor)) {
|
|
261
|
+
const cyclePath = hasCycleDFS(neighbor, [...path]);
|
|
262
|
+
if (cyclePath)
|
|
263
|
+
return cyclePath;
|
|
264
|
+
}
|
|
265
|
+
else if (recStack.has(neighbor)) {
|
|
266
|
+
// Cycle detected
|
|
267
|
+
return [...path, neighbor];
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
recStack.delete(node);
|
|
271
|
+
return null;
|
|
272
|
+
}
|
|
273
|
+
// Check each node for cycles
|
|
274
|
+
for (const node of graph.keys()) {
|
|
275
|
+
if (!visited.has(node)) {
|
|
276
|
+
const cyclePath = hasCycleDFS(node, []);
|
|
277
|
+
if (cyclePath) {
|
|
278
|
+
const cycleWithSources = [];
|
|
279
|
+
for (const entityId of cyclePath) {
|
|
280
|
+
const entityResult = await prolog.query(`kb_entity('${entityId}', _, Props)`);
|
|
281
|
+
let sourceName = entityId;
|
|
282
|
+
if (entityResult.success && entityResult.bindings.Props) {
|
|
283
|
+
const propsStr = entityResult.bindings.Props;
|
|
284
|
+
const sourceMatch = propsStr.match(/source\s*=\s*\^\^?\("([^"]+)"/);
|
|
285
|
+
if (sourceMatch) {
|
|
286
|
+
sourceName = path.basename(sourceMatch[1], ".md");
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
cycleWithSources.push(sourceName);
|
|
290
|
+
}
|
|
291
|
+
violations.push({
|
|
292
|
+
rule: "no-cycles",
|
|
293
|
+
entityId: cyclePath[0],
|
|
294
|
+
description: `Circular dependency detected: ${cycleWithSources.join(" → ")}`,
|
|
295
|
+
suggestion: "Break cycle by removing one of the depends_on relationships",
|
|
296
|
+
});
|
|
297
|
+
break; // Report only first cycle found
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
return violations;
|
|
302
|
+
}
|
|
303
|
+
async function checkRequiredFields(prolog, allEntityIds) {
|
|
304
|
+
const violations = [];
|
|
305
|
+
const required = [
|
|
306
|
+
"id",
|
|
307
|
+
"title",
|
|
308
|
+
"status",
|
|
309
|
+
"created_at",
|
|
310
|
+
"updated_at",
|
|
311
|
+
"source",
|
|
312
|
+
];
|
|
313
|
+
for (const entityId of allEntityIds) {
|
|
314
|
+
const result = await prolog.query(`kb_entity('${entityId}', Type, Props)`);
|
|
315
|
+
if (result.success && result.bindings.Props) {
|
|
316
|
+
// Parse properties list: [key1=value1, key2=value2, ...]
|
|
317
|
+
const propsStr = result.bindings.Props;
|
|
318
|
+
const propKeys = new Set();
|
|
319
|
+
// Extract keys from Props
|
|
320
|
+
const keyMatches = propsStr.matchAll(/(\w+)\s*=/g);
|
|
321
|
+
for (const match of keyMatches) {
|
|
322
|
+
propKeys.add(match[1]);
|
|
323
|
+
}
|
|
324
|
+
// Check for missing required fields
|
|
325
|
+
for (const field of required) {
|
|
326
|
+
if (!propKeys.has(field)) {
|
|
327
|
+
violations.push({
|
|
328
|
+
rule: "required-fields",
|
|
329
|
+
entityId: entityId,
|
|
330
|
+
description: `Missing required field: ${field}`,
|
|
331
|
+
suggestion: `Add ${field} to entity definition`,
|
|
332
|
+
});
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
return violations;
|
|
338
|
+
}
|
|
339
|
+
async function checkDeprecatedAdrs(prolog) {
|
|
340
|
+
const violations = [];
|
|
341
|
+
// Use Prolog predicate to find deprecated ADRs without successors
|
|
342
|
+
const result = await prolog.query("setof(Id, deprecated_no_successor(Id), Ids)");
|
|
343
|
+
if (!result.success || !result.bindings.Ids) {
|
|
344
|
+
return violations;
|
|
345
|
+
}
|
|
346
|
+
const idsStr = result.bindings.Ids;
|
|
347
|
+
const match = idsStr.match(/\[(.*)\]/);
|
|
348
|
+
if (!match) {
|
|
349
|
+
return violations;
|
|
350
|
+
}
|
|
351
|
+
const content = match[1].trim();
|
|
352
|
+
if (!content) {
|
|
353
|
+
return violations;
|
|
354
|
+
}
|
|
355
|
+
const adrIds = content
|
|
356
|
+
.split(",")
|
|
357
|
+
.map((id) => id.trim().replace(/^'|'$/g, ""));
|
|
358
|
+
for (const adrId of adrIds) {
|
|
359
|
+
// Get source for better error message
|
|
360
|
+
const entityResult = await prolog.query(`kb_entity('${adrId}', adr, Props)`);
|
|
361
|
+
let source = "";
|
|
362
|
+
if (entityResult.success && entityResult.bindings.Props) {
|
|
363
|
+
const propsStr = entityResult.bindings.Props;
|
|
364
|
+
const sourceMatch = propsStr.match(/source\s*=\s*\^\^?\("([^"]+)"/);
|
|
365
|
+
if (sourceMatch) {
|
|
366
|
+
source = sourceMatch[1];
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
violations.push({
|
|
370
|
+
rule: "deprecated-adr-no-successor",
|
|
371
|
+
entityId: adrId,
|
|
372
|
+
description: "Archived/deprecated ADR has no successor — add a supersedes link from the replacement ADR",
|
|
373
|
+
suggestion: `Create a new ADR and add: links: [{type: supersedes, target: ${adrId}}]`,
|
|
374
|
+
source,
|
|
375
|
+
});
|
|
376
|
+
}
|
|
377
|
+
return violations;
|
|
378
|
+
}
|
|
379
|
+
async function checkDomainContradictions(prolog) {
|
|
380
|
+
const violations = [];
|
|
381
|
+
const result = await prolog.query("setof([A,B,Reason], contradicting_reqs(A, B, Reason), Rows)");
|
|
382
|
+
if (!result.success || !result.bindings.Rows) {
|
|
383
|
+
return violations;
|
|
384
|
+
}
|
|
385
|
+
const rows = parseTripleRows(result.bindings.Rows);
|
|
386
|
+
for (const [reqA, reqB, reason] of rows) {
|
|
387
|
+
violations.push({
|
|
388
|
+
rule: "domain-contradictions",
|
|
389
|
+
entityId: `${reqA}/${reqB}`,
|
|
390
|
+
description: reason,
|
|
391
|
+
suggestion: "Supersede one requirement or align both to the same required property",
|
|
392
|
+
});
|
|
393
|
+
}
|
|
394
|
+
return violations;
|
|
395
|
+
}
|
|
396
|
+
async function checkSymbolCoverage(prolog) {
|
|
397
|
+
const violations = [];
|
|
398
|
+
const uncoveredResult = await prolog.query("setof(Symbol, symbol_no_req_coverage(Symbol, _), Symbols)");
|
|
399
|
+
if (uncoveredResult.success && uncoveredResult.bindings.Symbols) {
|
|
400
|
+
const symbolsStr = uncoveredResult.bindings.Symbols;
|
|
401
|
+
const match = symbolsStr.match(/\[(.*)\]/);
|
|
402
|
+
if (match) {
|
|
403
|
+
const content = match[1].trim();
|
|
404
|
+
if (content) {
|
|
405
|
+
const symbolMatches = content.matchAll(/'([^']+)'/g);
|
|
406
|
+
for (const symbolMatch of symbolMatches) {
|
|
407
|
+
const symbolId = symbolMatch[1];
|
|
408
|
+
violations.push({
|
|
409
|
+
rule: "symbol-coverage",
|
|
410
|
+
entityId: symbolId,
|
|
411
|
+
description: "Code symbol is not traceable to any functional requirement.",
|
|
412
|
+
suggestion: "Update symbols.yaml to link this symbol to a related requirement.",
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
return violations;
|
|
419
|
+
}
|
|
420
|
+
function parseTripleRows(raw) {
|
|
421
|
+
const cleaned = raw.trim();
|
|
422
|
+
if (cleaned === "[]" || cleaned.length === 0) {
|
|
423
|
+
return [];
|
|
424
|
+
}
|
|
425
|
+
const rows = [];
|
|
426
|
+
const rowRegex = /\[([^,]+),([^,]+),([^\]]+)\]/g;
|
|
427
|
+
let match;
|
|
428
|
+
do {
|
|
429
|
+
match = rowRegex.exec(cleaned);
|
|
430
|
+
if (match) {
|
|
431
|
+
rows.push([
|
|
432
|
+
match[1].trim().replace(/^'|'$/g, ""),
|
|
433
|
+
match[2].trim().replace(/^'|'$/g, ""),
|
|
434
|
+
match[3].trim().replace(/^'|'$/g, ""),
|
|
435
|
+
]);
|
|
436
|
+
}
|
|
437
|
+
} while (match);
|
|
438
|
+
return rows;
|
|
439
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"doctor.d.ts","sourceRoot":"","sources":["../../src/commands/doctor.ts"],"names":[],"mappings":"AAsDA,wBAAsB,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC,CAsDnD"}
|