@khiem_enhance/ai-doc-agent 0.1.3 â 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/docCache.js +28 -0
- package/dist/cli.js +5 -0
- package/dist/commands/generate.js +55 -18
- package/dist/scanner/fileRanker.js +48 -0
- package/package.json +1 -1
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getCacheKey = getCacheKey;
|
|
7
|
+
exports.readCache = readCache;
|
|
8
|
+
exports.writeCache = writeCache;
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
12
|
+
const CACHE_DIR = ".ai-doc-cache";
|
|
13
|
+
function sha1(input) {
|
|
14
|
+
return crypto_1.default.createHash("sha1").update(input).digest("hex");
|
|
15
|
+
}
|
|
16
|
+
function getCacheKey(parts) {
|
|
17
|
+
return sha1(`${parts.kind}:${parts.name}:${parts.fileList}:${parts.payload}`);
|
|
18
|
+
}
|
|
19
|
+
function readCache(key) {
|
|
20
|
+
const p = path_1.default.join(CACHE_DIR, `${key}.md`);
|
|
21
|
+
if (!fs_1.default.existsSync(p))
|
|
22
|
+
return null;
|
|
23
|
+
return fs_1.default.readFileSync(p, "utf-8");
|
|
24
|
+
}
|
|
25
|
+
function writeCache(key, content) {
|
|
26
|
+
fs_1.default.mkdirSync(CACHE_DIR, { recursive: true });
|
|
27
|
+
fs_1.default.writeFileSync(path_1.default.join(CACHE_DIR, `${key}.md`), content);
|
|
28
|
+
}
|
package/dist/cli.js
CHANGED
|
@@ -16,17 +16,22 @@ program
|
|
|
16
16
|
.option("--only <part>", "architecture|modules|all", "all")
|
|
17
17
|
.option("--max-files <n>", "Max files included per LLM request", "8")
|
|
18
18
|
.option("--max-chars <n>", "Max characters included per LLM request", "60000")
|
|
19
|
+
.option("--module <name>", "Only generate docs for a specific module")
|
|
20
|
+
.option("--max-modules <n>", "Max modules to generate in one run", "3")
|
|
19
21
|
.action(async (opts) => {
|
|
20
22
|
// normalize options
|
|
21
23
|
const only = String(opts.only ?? "all");
|
|
22
24
|
const maxFiles = Number(opts.maxFiles ?? 8);
|
|
23
25
|
const maxChars = Number(opts.maxChars ?? 60000);
|
|
26
|
+
const maxModules = Number(opts.maxModules ?? 3);
|
|
24
27
|
await (0, generate_1.generateDocs)({
|
|
25
28
|
since: opts.since,
|
|
26
29
|
output: opts.output,
|
|
27
30
|
only: only ?? "all",
|
|
28
31
|
maxFiles: Number.isFinite(maxFiles) ? maxFiles : 8,
|
|
29
32
|
maxChars: Number.isFinite(maxChars) ? maxChars : 60000,
|
|
33
|
+
module: opts.module ? String(opts.module) : undefined,
|
|
34
|
+
maxModules: Number.isFinite(maxModules) ? maxModules : 3,
|
|
30
35
|
});
|
|
31
36
|
});
|
|
32
37
|
program.parse();
|
|
@@ -12,43 +12,80 @@ const modules_1 = require("../analyzers/modules");
|
|
|
12
12
|
const markdownWriter_1 = require("../writers/markdownWriter");
|
|
13
13
|
const gitUtils_1 = require("../git/gitUtils");
|
|
14
14
|
const moduleDetector_1 = require("../scanner/moduleDetector");
|
|
15
|
+
const fileRanker_1 = require("../scanner/fileRanker");
|
|
16
|
+
const docCache_1 = require("../cache/docCache");
|
|
15
17
|
const truncate = (s, maxChars) => s.length > maxChars ? s.slice(0, maxChars) + "\n\n...<truncated>" : s;
|
|
18
|
+
function buildPayload(files, maxFiles, maxChars) {
|
|
19
|
+
const raw = files
|
|
20
|
+
.slice(0, maxFiles)
|
|
21
|
+
.map((f) => `FILE: ${f}\n${(0, contentReader_1.readFile)(f)}`)
|
|
22
|
+
.join("\n\n");
|
|
23
|
+
return truncate(raw, maxChars);
|
|
24
|
+
}
|
|
16
25
|
async function generateDocs(options) {
|
|
17
26
|
const root = process.cwd();
|
|
18
|
-
const
|
|
27
|
+
const allFiles = options.since
|
|
19
28
|
? (0, gitUtils_1.getChangedFiles)(options.since).map((f) => path_1.default.join(root, f))
|
|
20
29
|
: await (0, fileScanner_1.scanProject)(root);
|
|
21
30
|
const only = options.only ?? "all";
|
|
22
31
|
const maxFiles = Math.max(1, options.maxFiles ?? 8);
|
|
23
32
|
const maxChars = Math.max(5000, options.maxChars ?? 60000);
|
|
33
|
+
const maxModules = Math.max(1, options.maxModules ?? 3);
|
|
34
|
+
// Rank files globally for architecture (better signal, lower token)
|
|
35
|
+
const rankedAll = (0, fileRanker_1.rankFiles)(allFiles, root);
|
|
24
36
|
// ---------- Architecture ----------
|
|
25
37
|
if (only === "architecture" || only === "all") {
|
|
26
|
-
const tree =
|
|
27
|
-
|
|
28
|
-
const
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
38
|
+
const tree = allFiles.map((f) => path_1.default.relative(root, f)).join("\n");
|
|
39
|
+
const architecturePayload = buildPayload(rankedAll, maxFiles, maxChars);
|
|
40
|
+
const archCacheKey = (0, docCache_1.getCacheKey)({
|
|
41
|
+
kind: "architecture",
|
|
42
|
+
name: "architecture",
|
|
43
|
+
fileList: tree,
|
|
44
|
+
payload: architecturePayload,
|
|
45
|
+
});
|
|
46
|
+
const cachedArch = (0, docCache_1.readCache)(archCacheKey);
|
|
47
|
+
const architecture = cachedArch ?? (await (0, architecture_1.generateArchitectureDoc)(tree, architecturePayload));
|
|
48
|
+
if (cachedArch) {
|
|
49
|
+
console.log("đ§ Cache hit: architecture");
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
(0, docCache_1.writeCache)(archCacheKey, architecture);
|
|
53
|
+
console.log("đ§ Cache miss: architecture (generated)");
|
|
54
|
+
}
|
|
34
55
|
(0, markdownWriter_1.writeDoc)(options.output, "architecture.md", architecture);
|
|
35
56
|
console.log("đ Architecture doc generated");
|
|
36
57
|
}
|
|
37
58
|
// ---------- Modules ----------
|
|
38
59
|
if (only === "modules" || only === "all") {
|
|
39
|
-
const modules = (0, moduleDetector_1.detectModules)(
|
|
40
|
-
|
|
60
|
+
const modules = (0, moduleDetector_1.detectModules)(allFiles, root);
|
|
61
|
+
const entries = Object.entries(modules).filter(([name]) => options.module ? name === options.module : true);
|
|
62
|
+
const limitedEntries = entries.slice(0, maxModules);
|
|
63
|
+
for (const [moduleName, moduleFiles] of limitedEntries) {
|
|
41
64
|
const fileList = moduleFiles.map((f) => path_1.default.relative(root, f)).join("\n");
|
|
42
|
-
|
|
43
|
-
const
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
65
|
+
const rankedModuleFiles = (0, fileRanker_1.rankFiles)(moduleFiles, root);
|
|
66
|
+
const modulePayload = buildPayload(rankedModuleFiles, maxFiles, maxChars);
|
|
67
|
+
const moduleCacheKey = (0, docCache_1.getCacheKey)({
|
|
68
|
+
kind: "module",
|
|
69
|
+
name: moduleName,
|
|
70
|
+
fileList,
|
|
71
|
+
payload: modulePayload,
|
|
72
|
+
});
|
|
73
|
+
const cachedModule = (0, docCache_1.readCache)(moduleCacheKey);
|
|
74
|
+
const doc = cachedModule ?? (await (0, modules_1.generateModuleDocs)(moduleName, fileList, modulePayload));
|
|
75
|
+
if (cachedModule) {
|
|
76
|
+
console.log(`đ§ Cache hit: module ${moduleName}`);
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
(0, docCache_1.writeCache)(moduleCacheKey, doc);
|
|
80
|
+
console.log(`đ§ Cache miss: module ${moduleName} (generated)`);
|
|
81
|
+
}
|
|
49
82
|
(0, markdownWriter_1.writeDoc)(path_1.default.join(options.output, "modules"), `${moduleName}.md`, doc);
|
|
50
83
|
console.log(`đ Module doc generated: ${moduleName}`);
|
|
51
84
|
}
|
|
85
|
+
if (entries.length > limitedEntries.length) {
|
|
86
|
+
console.log(`âšī¸ Skipped ${entries.length - limitedEntries.length} modules due to --max-modules=${maxModules}. ` +
|
|
87
|
+
`Re-run with higher limit or specify --module <name>.`);
|
|
88
|
+
}
|
|
52
89
|
}
|
|
53
90
|
console.log("â
Docs generation completed");
|
|
54
91
|
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.rankFiles = rankFiles;
|
|
7
|
+
const path_1 = __importDefault(require("path"));
|
|
8
|
+
const IMPORTANT_PATTERNS = [
|
|
9
|
+
/route/i,
|
|
10
|
+
/router/i,
|
|
11
|
+
/controller/i,
|
|
12
|
+
/service/i,
|
|
13
|
+
/api/i,
|
|
14
|
+
/hook/i,
|
|
15
|
+
/store/i,
|
|
16
|
+
/slice/i,
|
|
17
|
+
/middleware/i,
|
|
18
|
+
/schema/i,
|
|
19
|
+
/model/i,
|
|
20
|
+
/dto/i,
|
|
21
|
+
/validator/i,
|
|
22
|
+
/utils/i,
|
|
23
|
+
];
|
|
24
|
+
function rankFiles(files, root) {
|
|
25
|
+
return [...files].sort((a, b) => score(b, root) - score(a, root));
|
|
26
|
+
}
|
|
27
|
+
function score(filePath, root) {
|
|
28
|
+
const rel = path_1.default.relative(root, filePath);
|
|
29
|
+
let s = 0;
|
|
30
|
+
// Prefer shorter paths (usually closer to feature root)
|
|
31
|
+
s += Math.max(0, 30 - rel.split(path_1.default.sep).length * 3);
|
|
32
|
+
// Prefer key filenames/patterns
|
|
33
|
+
for (const re of IMPORTANT_PATTERNS) {
|
|
34
|
+
if (re.test(rel))
|
|
35
|
+
s += 25;
|
|
36
|
+
}
|
|
37
|
+
// Prefer index/entrypoints
|
|
38
|
+
if (/index\.(ts|tsx|js|jsx)$/.test(rel))
|
|
39
|
+
s += 20;
|
|
40
|
+
if (/main\.(ts|js)$/.test(rel))
|
|
41
|
+
s += 15;
|
|
42
|
+
if (/app\.(ts|tsx|js|jsx)$/.test(rel))
|
|
43
|
+
s += 15;
|
|
44
|
+
// Prefer config
|
|
45
|
+
if (/config/i.test(rel))
|
|
46
|
+
s += 8;
|
|
47
|
+
return s;
|
|
48
|
+
}
|