kibi-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/kibi +19 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +117 -0
- package/dist/commands/branch.d.ts +3 -0
- package/dist/commands/branch.d.ts.map +1 -0
- package/dist/commands/branch.js +66 -0
- package/dist/commands/check.d.ts +12 -0
- package/dist/commands/check.d.ts.map +1 -0
- package/dist/commands/check.js +439 -0
- package/dist/commands/doctor.d.ts +2 -0
- package/dist/commands/doctor.d.ts.map +1 -0
- package/dist/commands/doctor.js +268 -0
- package/dist/commands/gc.d.ts +6 -0
- package/dist/commands/gc.d.ts.map +1 -0
- package/dist/commands/gc.js +117 -0
- package/dist/commands/init-helpers.d.ts +8 -0
- package/dist/commands/init-helpers.d.ts.map +1 -0
- package/dist/commands/init-helpers.js +150 -0
- package/dist/commands/init.d.ts +6 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +85 -0
- package/dist/commands/query.d.ts +12 -0
- package/dist/commands/query.d.ts.map +1 -0
- package/dist/commands/query.js +469 -0
- package/dist/commands/sync.d.ts +7 -0
- package/dist/commands/sync.d.ts.map +1 -0
- package/dist/commands/sync.js +587 -0
- package/dist/extractors/manifest.d.ts +30 -0
- package/dist/extractors/manifest.d.ts.map +1 -0
- package/dist/extractors/manifest.js +122 -0
- package/dist/extractors/markdown.d.ts +39 -0
- package/dist/extractors/markdown.d.ts.map +1 -0
- package/dist/extractors/markdown.js +203 -0
- package/dist/extractors/symbols-coordinator.d.ts +4 -0
- package/dist/extractors/symbols-coordinator.d.ts.map +1 -0
- package/dist/extractors/symbols-coordinator.js +131 -0
- package/dist/extractors/symbols-ts.d.ts +21 -0
- package/dist/extractors/symbols-ts.d.ts.map +1 -0
- package/dist/extractors/symbols-ts.js +197 -0
- package/dist/prolog.d.ts +35 -0
- package/dist/prolog.d.ts.map +1 -0
- package/dist/prolog.js +328 -0
- package/dist/public/extractors/symbols-coordinator.d.ts +2 -0
- package/dist/public/extractors/symbols-coordinator.d.ts.map +1 -0
- package/dist/public/extractors/symbols-coordinator.js +46 -0
- package/dist/public/prolog/index.d.ts +2 -0
- package/dist/public/prolog/index.d.ts.map +1 -0
- package/dist/public/prolog/index.js +46 -0
- package/dist/public/schemas/entity.d.ts +58 -0
- package/dist/public/schemas/entity.d.ts.map +1 -0
- package/dist/public/schemas/entity.js +102 -0
- package/dist/public/schemas/relationship.d.ts +35 -0
- package/dist/public/schemas/relationship.d.ts.map +1 -0
- package/dist/public/schemas/relationship.js +81 -0
- package/dist/types/changeset.d.ts +22 -0
- package/dist/types/changeset.d.ts.map +1 -0
- package/dist/types/changeset.js +18 -0
- package/dist/types/entities.d.ts +40 -0
- package/dist/types/entities.d.ts.map +1 -0
- package/dist/types/entities.js +18 -0
- package/dist/types/relationships.d.ts +11 -0
- package/dist/types/relationships.d.ts.map +1 -0
- package/dist/types/relationships.js +18 -0
- package/package.json +57 -0
- package/schema/entities.pl +50 -0
- package/schema/relationships.pl +47 -0
- package/schema/validation.pl +49 -0
- package/src/public/extractors/symbols-coordinator.ts +50 -0
- package/src/public/prolog/index.ts +47 -0
- package/src/public/schemas/entity.ts +104 -0
- package/src/public/schemas/relationship.ts +83 -0
- package/src/schemas/changeset.schema.json +48 -0
- package/src/schemas/entity.schema.json +55 -0
- package/src/schemas/relationship.schema.json +34 -0
|
@@ -0,0 +1,587 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Kibi — repo-local, per-branch, queryable long-term memory for software projects
|
|
3
|
+
Copyright (C) 2026 Piotr Franczyk
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify
|
|
6
|
+
it under the terms of the GNU Affero General Public License as published by
|
|
7
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
8
|
+
(at your option) any later version.
|
|
9
|
+
|
|
10
|
+
This program is distributed in the hope that it will be useful,
|
|
11
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
13
|
+
GNU Affero General Public License for more details.
|
|
14
|
+
|
|
15
|
+
You should have received a copy of the GNU Affero General Public License
|
|
16
|
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
17
|
+
*/
|
|
18
|
+
/*
|
|
19
|
+
How to apply this header to source files (examples)
|
|
20
|
+
|
|
21
|
+
1) Prepend header to a single file (POSIX shells):
|
|
22
|
+
|
|
23
|
+
cat LICENSE_HEADER.txt "$FILE" > "$FILE".with-header && mv "$FILE".with-header "$FILE"
|
|
24
|
+
|
|
25
|
+
2) Apply to multiple files (example: the project's main entry files):
|
|
26
|
+
|
|
27
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp packages/cli/src/*.ts packages/mcp/src/*.ts; do
|
|
28
|
+
if [ -f "$f" ]; then
|
|
29
|
+
cp "$f" "$f".bak
|
|
30
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
31
|
+
fi
|
|
32
|
+
done
|
|
33
|
+
|
|
34
|
+
3) Avoid duplicating the header: run a quick guard to only add if missing
|
|
35
|
+
|
|
36
|
+
for f in packages/cli/bin/kibi packages/mcp/bin/kibi-mcp; do
|
|
37
|
+
if [ -f "$f" ]; then
|
|
38
|
+
if ! head -n 5 "$f" | grep -q "Copyright (C) 2026 Piotr Franczyk"; then
|
|
39
|
+
cp "$f" "$f".bak
|
|
40
|
+
(cat LICENSE_HEADER.txt; echo; cat "$f" ) > "$f".new && mv "$f".new "$f"
|
|
41
|
+
fi
|
|
42
|
+
fi
|
|
43
|
+
done
|
|
44
|
+
*/
|
|
45
|
+
import { createHash } from "node:crypto";
|
|
46
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
47
|
+
import * as fs from "node:fs";
|
|
48
|
+
import * as path from "node:path";
|
|
49
|
+
import fg from "fast-glob";
|
|
50
|
+
import { dump as dumpYAML, load as parseYAML } from "js-yaml";
|
|
51
|
+
import { extractFromManifest } from "../extractors/manifest.js";
|
|
52
|
+
import { extractFromMarkdown, } from "../extractors/markdown.js";
|
|
53
|
+
import { enrichSymbolCoordinates, } from "../extractors/symbols-coordinator.js";
|
|
54
|
+
import { PrologProcess } from "../prolog.js";
|
|
55
|
+
export class SyncError extends Error {
|
|
56
|
+
constructor(message) {
|
|
57
|
+
super(message);
|
|
58
|
+
this.name = "SyncError";
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
const SYNC_CACHE_VERSION = 1;
|
|
62
|
+
const SYNC_CACHE_TTL_MS = 30 * 24 * 60 * 60 * 1000;
|
|
63
|
+
const SYMBOLS_MANIFEST_COMMENT_BLOCK = `# symbols.yaml
|
|
64
|
+
# AUTHORED fields (edit freely):
|
|
65
|
+
# id, title, sourceFile, links, status, tags, owner, priority
|
|
66
|
+
# GENERATED fields (never edit manually — overwritten by kibi sync and kb.symbols.refresh):
|
|
67
|
+
# sourceLine, sourceColumn, sourceEndLine, sourceEndColumn, coordinatesGeneratedAt
|
|
68
|
+
# Run \`kibi sync\` or call the \`kb.symbols.refresh\` MCP tool to refresh coordinates.
|
|
69
|
+
`;
|
|
70
|
+
const SYMBOL_COORD_EXTENSIONS = new Set([
|
|
71
|
+
".ts",
|
|
72
|
+
".tsx",
|
|
73
|
+
".js",
|
|
74
|
+
".jsx",
|
|
75
|
+
".mts",
|
|
76
|
+
".cts",
|
|
77
|
+
".mjs",
|
|
78
|
+
".cjs",
|
|
79
|
+
]);
|
|
80
|
+
const GENERATED_COORD_FIELDS = [
|
|
81
|
+
"sourceLine",
|
|
82
|
+
"sourceColumn",
|
|
83
|
+
"sourceEndLine",
|
|
84
|
+
"sourceEndColumn",
|
|
85
|
+
"coordinatesGeneratedAt",
|
|
86
|
+
];
|
|
87
|
+
function toCacheKey(filePath) {
|
|
88
|
+
return path.relative(process.cwd(), filePath).split(path.sep).join("/");
|
|
89
|
+
}
|
|
90
|
+
function hashFile(filePath) {
|
|
91
|
+
const content = readFileSync(filePath);
|
|
92
|
+
return createHash("sha256").update(content).digest("hex");
|
|
93
|
+
}
|
|
94
|
+
function readSyncCache(cachePath) {
|
|
95
|
+
if (!existsSync(cachePath)) {
|
|
96
|
+
return {
|
|
97
|
+
version: SYNC_CACHE_VERSION,
|
|
98
|
+
hashes: {},
|
|
99
|
+
seenAt: {},
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
try {
|
|
103
|
+
const parsed = JSON.parse(readFileSync(cachePath, "utf8"));
|
|
104
|
+
if (parsed.version !== SYNC_CACHE_VERSION) {
|
|
105
|
+
return {
|
|
106
|
+
version: SYNC_CACHE_VERSION,
|
|
107
|
+
hashes: {},
|
|
108
|
+
seenAt: {},
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
version: SYNC_CACHE_VERSION,
|
|
113
|
+
hashes: parsed.hashes ?? {},
|
|
114
|
+
seenAt: parsed.seenAt ?? {},
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
catch {
|
|
118
|
+
return {
|
|
119
|
+
version: SYNC_CACHE_VERSION,
|
|
120
|
+
hashes: {},
|
|
121
|
+
seenAt: {},
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
function writeSyncCache(cachePath, cache) {
|
|
126
|
+
const cacheDir = path.dirname(cachePath);
|
|
127
|
+
if (!existsSync(cacheDir)) {
|
|
128
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
129
|
+
}
|
|
130
|
+
writeFileSync(cachePath, `${JSON.stringify(cache, null, 2)}
|
|
131
|
+
`, "utf8");
|
|
132
|
+
}
|
|
133
|
+
export async function syncCommand(options = {}) {
|
|
134
|
+
const validateOnly = options.validateOnly ?? false;
|
|
135
|
+
try {
|
|
136
|
+
// Detect current branch early (needed for cache and KB paths)
|
|
137
|
+
let currentBranch = "main";
|
|
138
|
+
try {
|
|
139
|
+
const { execSync } = await import("node:child_process");
|
|
140
|
+
const branch = execSync("git branch --show-current", {
|
|
141
|
+
cwd: process.cwd(),
|
|
142
|
+
encoding: "utf8",
|
|
143
|
+
}).trim();
|
|
144
|
+
if (branch && branch !== "master") {
|
|
145
|
+
currentBranch = branch;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
catch {
|
|
149
|
+
currentBranch = "main";
|
|
150
|
+
}
|
|
151
|
+
if (process.env.KIBI_DEBUG) {
|
|
152
|
+
try {
|
|
153
|
+
// eslint-disable-next-line no-console
|
|
154
|
+
console.log("[kibi-debug] currentBranch:", currentBranch);
|
|
155
|
+
}
|
|
156
|
+
catch { }
|
|
157
|
+
}
|
|
158
|
+
// Load config (fall back to defaults if missing)
|
|
159
|
+
const DEFAULT_CONFIG = {
|
|
160
|
+
paths: {
|
|
161
|
+
requirements: "requirements/**/*.md",
|
|
162
|
+
scenarios: "scenarios/**/*.md",
|
|
163
|
+
tests: "tests/**/*.md",
|
|
164
|
+
adr: "adr/**/*.md",
|
|
165
|
+
flags: "flags/**/*.md",
|
|
166
|
+
events: "events/**/*.md",
|
|
167
|
+
facts: "facts/**/*.md",
|
|
168
|
+
symbols: "symbols.yaml",
|
|
169
|
+
},
|
|
170
|
+
};
|
|
171
|
+
const configPath = path.join(process.cwd(), ".kb/config.json");
|
|
172
|
+
let config;
|
|
173
|
+
try {
|
|
174
|
+
const parsed = JSON.parse(readFileSync(configPath, "utf8"));
|
|
175
|
+
config = {
|
|
176
|
+
paths: {
|
|
177
|
+
...DEFAULT_CONFIG.paths,
|
|
178
|
+
...(parsed.paths ?? {}),
|
|
179
|
+
},
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
catch {
|
|
183
|
+
config = DEFAULT_CONFIG;
|
|
184
|
+
}
|
|
185
|
+
const paths = config.paths;
|
|
186
|
+
// Discover files - construct glob patterns from directory paths
|
|
187
|
+
const normalizeMarkdownPath = (pattern) => {
|
|
188
|
+
if (!pattern)
|
|
189
|
+
return null;
|
|
190
|
+
if (pattern.includes("*"))
|
|
191
|
+
return pattern;
|
|
192
|
+
return `${pattern}/**/*.md`;
|
|
193
|
+
};
|
|
194
|
+
const markdownPatterns = [
|
|
195
|
+
normalizeMarkdownPath(paths.requirements),
|
|
196
|
+
normalizeMarkdownPath(paths.scenarios),
|
|
197
|
+
normalizeMarkdownPath(paths.tests),
|
|
198
|
+
normalizeMarkdownPath(paths.adr),
|
|
199
|
+
normalizeMarkdownPath(paths.flags),
|
|
200
|
+
normalizeMarkdownPath(paths.events),
|
|
201
|
+
normalizeMarkdownPath(paths.facts),
|
|
202
|
+
].filter((p) => Boolean(p));
|
|
203
|
+
const markdownFiles = await fg(markdownPatterns, {
|
|
204
|
+
cwd: process.cwd(),
|
|
205
|
+
absolute: true,
|
|
206
|
+
});
|
|
207
|
+
if (process.env.KIBI_DEBUG) {
|
|
208
|
+
try {
|
|
209
|
+
// eslint-disable-next-line no-console
|
|
210
|
+
console.log("[kibi-debug] markdownPatterns:", markdownPatterns);
|
|
211
|
+
// eslint-disable-next-line no-console
|
|
212
|
+
console.log("[kibi-debug] markdownFiles:", markdownFiles);
|
|
213
|
+
}
|
|
214
|
+
catch { }
|
|
215
|
+
}
|
|
216
|
+
const manifestFiles = await fg(paths.symbols, {
|
|
217
|
+
cwd: process.cwd(),
|
|
218
|
+
absolute: true,
|
|
219
|
+
});
|
|
220
|
+
const sourceFiles = [...markdownFiles, ...manifestFiles].sort();
|
|
221
|
+
// Use branch-specific cache to handle branch isolation correctly
|
|
222
|
+
const cachePath = path.join(process.cwd(), `.kb/branches/${currentBranch}/sync-cache.json`);
|
|
223
|
+
const syncCache = readSyncCache(cachePath);
|
|
224
|
+
const nowIso = new Date().toISOString();
|
|
225
|
+
const nowMs = Date.now();
|
|
226
|
+
const nextHashes = {};
|
|
227
|
+
const nextSeenAt = {};
|
|
228
|
+
const changedMarkdownFiles = [];
|
|
229
|
+
const changedManifestFiles = [];
|
|
230
|
+
for (const file of sourceFiles) {
|
|
231
|
+
try {
|
|
232
|
+
const key = toCacheKey(file);
|
|
233
|
+
const hash = hashFile(file);
|
|
234
|
+
const lastSeen = syncCache.seenAt[key];
|
|
235
|
+
const lastSeenMs = lastSeen ? Date.parse(lastSeen) : Number.NaN;
|
|
236
|
+
const expired = Number.isNaN(lastSeenMs)
|
|
237
|
+
? false
|
|
238
|
+
: nowMs - lastSeenMs > SYNC_CACHE_TTL_MS;
|
|
239
|
+
nextHashes[key] = hash;
|
|
240
|
+
nextSeenAt[key] = nowIso;
|
|
241
|
+
if (expired || syncCache.hashes[key] !== hash || validateOnly) {
|
|
242
|
+
if (markdownFiles.includes(file)) {
|
|
243
|
+
changedMarkdownFiles.push(file);
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
changedManifestFiles.push(file);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
catch (error) {
|
|
251
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
252
|
+
console.warn(`Warning: Failed to hash ${file}: ${message}`);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
const results = [];
|
|
256
|
+
const failedCacheKeys = new Set();
|
|
257
|
+
const errors = [];
|
|
258
|
+
for (const file of changedMarkdownFiles) {
|
|
259
|
+
try {
|
|
260
|
+
results.push(extractFromMarkdown(file));
|
|
261
|
+
}
|
|
262
|
+
catch (error) {
|
|
263
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
264
|
+
if (validateOnly) {
|
|
265
|
+
errors.push({ file, message });
|
|
266
|
+
}
|
|
267
|
+
else {
|
|
268
|
+
console.warn(`Warning: Failed to extract from ${file}: ${message}`);
|
|
269
|
+
}
|
|
270
|
+
failedCacheKeys.add(toCacheKey(file));
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
for (const file of changedManifestFiles) {
|
|
274
|
+
try {
|
|
275
|
+
const manifestResults = extractFromManifest(file);
|
|
276
|
+
results.push(...manifestResults);
|
|
277
|
+
}
|
|
278
|
+
catch (error) {
|
|
279
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
280
|
+
if (validateOnly) {
|
|
281
|
+
errors.push({ file, message });
|
|
282
|
+
}
|
|
283
|
+
else {
|
|
284
|
+
console.warn(`Warning: Failed to extract from ${file}: ${message}`);
|
|
285
|
+
}
|
|
286
|
+
failedCacheKeys.add(toCacheKey(file));
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
if (validateOnly) {
|
|
290
|
+
if (errors.length > 0) {
|
|
291
|
+
for (const err of errors) {
|
|
292
|
+
console.error(`${err.file}: ${err.message}`);
|
|
293
|
+
}
|
|
294
|
+
console.error(`FAILED: ${errors.length} errors found`);
|
|
295
|
+
process.exit(1);
|
|
296
|
+
}
|
|
297
|
+
else {
|
|
298
|
+
console.log(`OK: Validation passed (${results.length} entities)`);
|
|
299
|
+
process.exit(0);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
for (const file of manifestFiles) {
|
|
303
|
+
try {
|
|
304
|
+
await refreshManifestCoordinates(file, process.cwd());
|
|
305
|
+
}
|
|
306
|
+
catch (error) {
|
|
307
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
308
|
+
console.warn(`Warning: Failed to refresh symbol coordinates in ${file}: ${message}`);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
if (results.length === 0) {
|
|
312
|
+
const evictedHashes = {};
|
|
313
|
+
const evictedSeenAt = {};
|
|
314
|
+
for (const [key, hash] of Object.entries(nextHashes)) {
|
|
315
|
+
if (failedCacheKeys.has(key)) {
|
|
316
|
+
continue;
|
|
317
|
+
}
|
|
318
|
+
evictedHashes[key] = hash;
|
|
319
|
+
evictedSeenAt[key] = nextSeenAt[key] ?? nowIso;
|
|
320
|
+
}
|
|
321
|
+
writeSyncCache(cachePath, {
|
|
322
|
+
version: SYNC_CACHE_VERSION,
|
|
323
|
+
hashes: evictedHashes,
|
|
324
|
+
seenAt: evictedSeenAt,
|
|
325
|
+
});
|
|
326
|
+
console.log("✓ Imported 0 entities, 0 relationships");
|
|
327
|
+
process.exit(0);
|
|
328
|
+
}
|
|
329
|
+
// Connect to KB
|
|
330
|
+
const prolog = new PrologProcess();
|
|
331
|
+
await prolog.start();
|
|
332
|
+
const kbPath = path.join(process.cwd(), `.kb/branches/${currentBranch}`);
|
|
333
|
+
const mainPath = path.join(process.cwd(), ".kb/branches/main");
|
|
334
|
+
// If branch KB doesn't exist but main does, copy from main (copy-on-write)
|
|
335
|
+
// Skip for orphan branches (branches with no commits yet)
|
|
336
|
+
if (!existsSync(kbPath) && existsSync(mainPath)) {
|
|
337
|
+
const hasCommits = (() => {
|
|
338
|
+
try {
|
|
339
|
+
const { execSync } = require("node:child_process");
|
|
340
|
+
execSync("git rev-parse HEAD", { cwd: process.cwd(), stdio: "pipe" });
|
|
341
|
+
return true;
|
|
342
|
+
}
|
|
343
|
+
catch {
|
|
344
|
+
return false;
|
|
345
|
+
}
|
|
346
|
+
})();
|
|
347
|
+
if (hasCommits) {
|
|
348
|
+
fs.cpSync(mainPath, kbPath, { recursive: true });
|
|
349
|
+
// Remove copied sync cache to avoid cross-branch cache pollution
|
|
350
|
+
try {
|
|
351
|
+
const copiedCache = path.join(kbPath, "sync-cache.json");
|
|
352
|
+
if (existsSync(copiedCache)) {
|
|
353
|
+
fs.rmSync(copiedCache);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
catch {
|
|
357
|
+
// ignore errors cleaning up cache
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
const attachResult = await prolog.query(`kb_attach('${kbPath}')`);
|
|
362
|
+
if (!attachResult.success) {
|
|
363
|
+
await prolog.terminate();
|
|
364
|
+
throw new SyncError(`Failed to attach KB: ${attachResult.error || "Unknown error"}`);
|
|
365
|
+
}
|
|
366
|
+
// Upsert entities
|
|
367
|
+
let entityCount = 0;
|
|
368
|
+
let kbModified = false;
|
|
369
|
+
const simplePrologAtom = /^[a-z][a-zA-Z0-9_]*$/;
|
|
370
|
+
const prologAtom = (value) => simplePrologAtom.test(value) ? value : `'${value.replace(/'/g, "''")}'`;
|
|
371
|
+
for (const { entity } of results) {
|
|
372
|
+
try {
|
|
373
|
+
const props = [
|
|
374
|
+
`id='${entity.id}'`,
|
|
375
|
+
`title="${entity.title.replace(/"/g, '\\"')}"`,
|
|
376
|
+
`status=${prologAtom(entity.status)}`,
|
|
377
|
+
`created_at="${entity.created_at}"`,
|
|
378
|
+
`updated_at="${entity.updated_at}"`,
|
|
379
|
+
`source="${entity.source.replace(/"/g, '\\"')}"`,
|
|
380
|
+
];
|
|
381
|
+
if (entity.tags && entity.tags.length > 0) {
|
|
382
|
+
const tagsList = entity.tags.map(prologAtom).join(",");
|
|
383
|
+
props.push(`tags=[${tagsList}]`);
|
|
384
|
+
}
|
|
385
|
+
if (entity.owner)
|
|
386
|
+
props.push(`owner=${prologAtom(entity.owner)}`);
|
|
387
|
+
if (entity.priority)
|
|
388
|
+
props.push(`priority=${prologAtom(entity.priority)}`);
|
|
389
|
+
if (entity.severity)
|
|
390
|
+
props.push(`severity=${prologAtom(entity.severity)}`);
|
|
391
|
+
if (entity.text_ref)
|
|
392
|
+
props.push(`text_ref="${entity.text_ref}"`);
|
|
393
|
+
const propsList = `[${props.join(", ")}]`;
|
|
394
|
+
const goal = `kb_assert_entity(${entity.type}, ${propsList})`;
|
|
395
|
+
const result = await prolog.query(goal);
|
|
396
|
+
if (result.success) {
|
|
397
|
+
entityCount++;
|
|
398
|
+
kbModified = true;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
catch (error) {
|
|
402
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
403
|
+
console.warn(`Warning: Failed to upsert entity ${entity.id}: ${message}`);
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
// Build ID lookup map: filename -> entity ID
|
|
407
|
+
const idLookup = new Map();
|
|
408
|
+
for (const { entity } of results) {
|
|
409
|
+
const filename = path.basename(entity.source, ".md");
|
|
410
|
+
idLookup.set(filename, entity.id);
|
|
411
|
+
idLookup.set(entity.id, entity.id);
|
|
412
|
+
}
|
|
413
|
+
// Assert relationships - two-pass approach to handle targets that don't exist yet
|
|
414
|
+
let relCount = 0;
|
|
415
|
+
const failedRelationships = [];
|
|
416
|
+
// First pass: try all relationships
|
|
417
|
+
for (const { relationships } of results) {
|
|
418
|
+
for (const rel of relationships) {
|
|
419
|
+
try {
|
|
420
|
+
const fromId = idLookup.get(rel.from) || rel.from;
|
|
421
|
+
const toId = idLookup.get(rel.to) || rel.to;
|
|
422
|
+
const goal = `kb_assert_relationship(${rel.type}, '${fromId}', '${toId}', [])`;
|
|
423
|
+
const result = await prolog.query(goal);
|
|
424
|
+
if (result.success) {
|
|
425
|
+
relCount++;
|
|
426
|
+
kbModified = true;
|
|
427
|
+
}
|
|
428
|
+
else {
|
|
429
|
+
failedRelationships.push({ rel, fromId, toId, error: result.error || "Unknown error" });
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
catch (error) {
|
|
433
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
434
|
+
const fromId = idLookup.get(rel.from) || rel.from;
|
|
435
|
+
const toId = idLookup.get(rel.to) || rel.to;
|
|
436
|
+
failedRelationships.push({ rel, fromId, toId, error: message });
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
// Second pass: retry failed relationships (targets may have been created in first pass)
|
|
441
|
+
const retryCount = 3;
|
|
442
|
+
for (let pass = 0; pass < retryCount && failedRelationships.length > 0; pass++) {
|
|
443
|
+
const remainingFailed = [];
|
|
444
|
+
for (const { rel, fromId, toId, error } of failedRelationships) {
|
|
445
|
+
try {
|
|
446
|
+
const goal = `kb_assert_relationship(${rel.type}, '${fromId}', '${toId}', [])`;
|
|
447
|
+
const result = await prolog.query(goal);
|
|
448
|
+
if (result.success) {
|
|
449
|
+
relCount++;
|
|
450
|
+
kbModified = true;
|
|
451
|
+
}
|
|
452
|
+
else {
|
|
453
|
+
remainingFailed.push({ rel, fromId, toId, error: result.error || "Unknown error" });
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
catch (err) {
|
|
457
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
458
|
+
remainingFailed.push({ rel, fromId, toId, error: message });
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
failedRelationships.length = 0;
|
|
462
|
+
failedRelationships.push(...remainingFailed);
|
|
463
|
+
}
|
|
464
|
+
// Report remaining failed relationships after all passes
|
|
465
|
+
if (failedRelationships.length > 0) {
|
|
466
|
+
console.warn(`\nWarning: ${failedRelationships.length} relationship(s) failed to sync:`);
|
|
467
|
+
const seen = new Set();
|
|
468
|
+
for (const { rel, fromId, toId, error } of failedRelationships) {
|
|
469
|
+
const key = `${rel.type}:${fromId}->${toId}`;
|
|
470
|
+
if (!seen.has(key)) {
|
|
471
|
+
seen.add(key);
|
|
472
|
+
console.warn(` - ${rel.type}: ${fromId} -> ${toId}`);
|
|
473
|
+
console.warn(` Error: ${error}`);
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
console.warn("\nTip: Ensure target entities exist before creating relationships.");
|
|
477
|
+
}
|
|
478
|
+
if (kbModified) {
|
|
479
|
+
prolog.invalidateCache();
|
|
480
|
+
}
|
|
481
|
+
// Save KB and detach
|
|
482
|
+
await prolog.query("kb_save");
|
|
483
|
+
await prolog.query("kb_detach");
|
|
484
|
+
await prolog.terminate();
|
|
485
|
+
const evictedHashes = {};
|
|
486
|
+
const evictedSeenAt = {};
|
|
487
|
+
for (const [key, hash] of Object.entries(nextHashes)) {
|
|
488
|
+
if (failedCacheKeys.has(key)) {
|
|
489
|
+
continue;
|
|
490
|
+
}
|
|
491
|
+
evictedHashes[key] = hash;
|
|
492
|
+
evictedSeenAt[key] = nextSeenAt[key] ?? nowIso;
|
|
493
|
+
}
|
|
494
|
+
writeSyncCache(cachePath, {
|
|
495
|
+
version: SYNC_CACHE_VERSION,
|
|
496
|
+
hashes: evictedHashes,
|
|
497
|
+
seenAt: evictedSeenAt,
|
|
498
|
+
});
|
|
499
|
+
console.log(`✓ Imported ${entityCount} entities, ${relCount} relationships`);
|
|
500
|
+
process.exit(0);
|
|
501
|
+
}
|
|
502
|
+
catch (error) {
|
|
503
|
+
if (error instanceof SyncError) {
|
|
504
|
+
console.error(`Error: ${error.message}`);
|
|
505
|
+
}
|
|
506
|
+
else if (error instanceof Error) {
|
|
507
|
+
console.error(`Error: ${error.message}`);
|
|
508
|
+
}
|
|
509
|
+
else {
|
|
510
|
+
console.error(`Error: ${String(error)}`);
|
|
511
|
+
}
|
|
512
|
+
process.exit(1);
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
async function refreshManifestCoordinates(manifestPath, workspaceRoot) {
|
|
516
|
+
const rawContent = readFileSync(manifestPath, "utf8");
|
|
517
|
+
const parsed = parseYAML(rawContent);
|
|
518
|
+
if (!isRecord(parsed)) {
|
|
519
|
+
console.warn(`Warning: symbols manifest ${manifestPath} is not a YAML object; skipping coordinate refresh`);
|
|
520
|
+
return;
|
|
521
|
+
}
|
|
522
|
+
const rawSymbols = parsed.symbols;
|
|
523
|
+
if (!Array.isArray(rawSymbols)) {
|
|
524
|
+
console.warn(`Warning: symbols manifest ${manifestPath} has no symbols array; skipping coordinate refresh`);
|
|
525
|
+
return;
|
|
526
|
+
}
|
|
527
|
+
const before = rawSymbols.map((entry) => isRecord(entry)
|
|
528
|
+
? { ...entry }
|
|
529
|
+
: {});
|
|
530
|
+
const enriched = await enrichSymbolCoordinates(before, workspaceRoot);
|
|
531
|
+
parsed.symbols = enriched;
|
|
532
|
+
let refreshed = 0;
|
|
533
|
+
let failed = 0;
|
|
534
|
+
let unchanged = 0;
|
|
535
|
+
for (let i = 0; i < before.length; i++) {
|
|
536
|
+
const previous = before[i] ?? {};
|
|
537
|
+
const current = enriched[i] ?? previous;
|
|
538
|
+
const changed = GENERATED_COORD_FIELDS.some((field) => previous[field] !== current[field]);
|
|
539
|
+
if (changed) {
|
|
540
|
+
refreshed++;
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
const eligible = isEligibleForCoordinateRefresh(typeof current.sourceFile === "string"
|
|
544
|
+
? current.sourceFile
|
|
545
|
+
: typeof previous.sourceFile === "string"
|
|
546
|
+
? previous.sourceFile
|
|
547
|
+
: undefined, workspaceRoot);
|
|
548
|
+
if (eligible && !hasAllGeneratedCoordinates(current)) {
|
|
549
|
+
failed++;
|
|
550
|
+
}
|
|
551
|
+
else {
|
|
552
|
+
unchanged++;
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
const dumped = dumpYAML(parsed, {
|
|
556
|
+
lineWidth: -1,
|
|
557
|
+
noRefs: true,
|
|
558
|
+
sortKeys: false,
|
|
559
|
+
});
|
|
560
|
+
const nextContent = `${SYMBOLS_MANIFEST_COMMENT_BLOCK}${dumped}`;
|
|
561
|
+
if (rawContent !== nextContent) {
|
|
562
|
+
writeFileSync(manifestPath, nextContent, "utf8");
|
|
563
|
+
}
|
|
564
|
+
console.log(`✓ Refreshed symbol coordinates in ${path.relative(workspaceRoot, manifestPath)} (refreshed=${refreshed}, unchanged=${unchanged}, failed=${failed})`);
|
|
565
|
+
}
|
|
566
|
+
function hasAllGeneratedCoordinates(entry) {
|
|
567
|
+
return (typeof entry.sourceLine === "number" &&
|
|
568
|
+
typeof entry.sourceColumn === "number" &&
|
|
569
|
+
typeof entry.sourceEndLine === "number" &&
|
|
570
|
+
typeof entry.sourceEndColumn === "number" &&
|
|
571
|
+
typeof entry.coordinatesGeneratedAt === "string" &&
|
|
572
|
+
entry.coordinatesGeneratedAt.length > 0);
|
|
573
|
+
}
|
|
574
|
+
function isEligibleForCoordinateRefresh(sourceFile, workspaceRoot) {
|
|
575
|
+
if (!sourceFile)
|
|
576
|
+
return false;
|
|
577
|
+
const absolute = path.isAbsolute(sourceFile)
|
|
578
|
+
? sourceFile
|
|
579
|
+
: path.resolve(workspaceRoot, sourceFile);
|
|
580
|
+
if (!existsSync(absolute))
|
|
581
|
+
return false;
|
|
582
|
+
const ext = path.extname(absolute).toLowerCase();
|
|
583
|
+
return SYMBOL_COORD_EXTENSIONS.has(ext);
|
|
584
|
+
}
|
|
585
|
+
function isRecord(value) {
|
|
586
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
587
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
export interface ExtractedEntity {
|
|
2
|
+
id: string;
|
|
3
|
+
type: string;
|
|
4
|
+
title: string;
|
|
5
|
+
status: string;
|
|
6
|
+
created_at: string;
|
|
7
|
+
updated_at: string;
|
|
8
|
+
source: string;
|
|
9
|
+
tags?: string[];
|
|
10
|
+
owner?: string;
|
|
11
|
+
priority?: string;
|
|
12
|
+
severity?: string;
|
|
13
|
+
links?: unknown[];
|
|
14
|
+
text_ref?: string;
|
|
15
|
+
}
|
|
16
|
+
export interface ExtractedRelationship {
|
|
17
|
+
type: string;
|
|
18
|
+
from: string;
|
|
19
|
+
to: string;
|
|
20
|
+
}
|
|
21
|
+
export interface ExtractionResult {
|
|
22
|
+
entity: ExtractedEntity;
|
|
23
|
+
relationships: ExtractedRelationship[];
|
|
24
|
+
}
|
|
25
|
+
export declare class ManifestError extends Error {
|
|
26
|
+
filePath: string;
|
|
27
|
+
constructor(message: string, filePath: string);
|
|
28
|
+
}
|
|
29
|
+
export declare function extractFromManifest(filePath: string): ExtractionResult[];
|
|
30
|
+
//# sourceMappingURL=manifest.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"manifest.d.ts","sourceRoot":"","sources":["../../src/extractors/manifest.ts"],"names":[],"mappings":"AAiDA,MAAM,WAAW,eAAe;IAC9B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,OAAO,EAAE,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,EAAE,EAAE,MAAM,CAAC;CACZ;AAED,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,eAAe,CAAC;IACxB,aAAa,EAAE,qBAAqB,EAAE,CAAC;CACxC;AAED,qBAAa,aAAc,SAAQ,KAAK;IAG7B,QAAQ,EAAE,MAAM;gBADvB,OAAO,EAAE,MAAM,EACR,QAAQ,EAAE,MAAM;CAK1B;AA2BD,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,MAAM,GAAG,gBAAgB,EAAE,CAqDxE"}
|