@aigne/doc-smith 0.2.6 → 0.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/agents/check-detail-result.mjs +2 -7
- package/agents/check-detail.mjs +4 -6
- package/agents/check-structure-plan.mjs +5 -10
- package/agents/find-item-by-path.mjs +13 -31
- package/agents/input-generator.mjs +15 -35
- package/agents/language-selector.mjs +6 -18
- package/agents/load-config.mjs +2 -2
- package/agents/load-sources.mjs +29 -117
- package/agents/publish-docs.mjs +15 -28
- package/agents/save-docs.mjs +8 -20
- package/agents/save-output.mjs +2 -9
- package/agents/save-single-doc.mjs +2 -2
- package/agents/schema/structure-plan.yaml +1 -1
- package/agents/transform-detail-datasources.mjs +2 -5
- package/biome.json +13 -3
- package/docs-mcp/get-docs-structure.mjs +1 -1
- package/docs-mcp/read-doc-content.mjs +1 -4
- package/package.json +10 -6
- package/tests/check-detail-result.test.mjs +8 -19
- package/tests/load-sources.test.mjs +65 -161
- package/tests/test-all-validation-cases.mjs +71 -37
- package/tests/test-save-docs.mjs +6 -17
- package/utils/constants.mjs +1 -2
- package/utils/file-utils.mjs +205 -0
- package/utils/markdown-checker.mjs +124 -57
- package/utils/mermaid-validator.mjs +5 -10
- package/utils/mermaid-worker-pool.mjs +7 -11
- package/utils/mermaid-worker.mjs +8 -17
- package/utils/utils.mjs +52 -104
package/utils/mermaid-worker.mjs
CHANGED
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* Runs in isolated Worker thread to avoid global state conflicts
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
-
import { parentPort } from "worker_threads";
|
|
8
|
+
import { parentPort } from "node:worker_threads";
|
|
9
9
|
|
|
10
10
|
/**
|
|
11
11
|
* Validate mermaid syntax using official parser in isolated environment
|
|
@@ -54,9 +54,7 @@ async function validateMermaidWithOfficialParser(content) {
|
|
|
54
54
|
|
|
55
55
|
// Verify DOMPurify is working before proceeding
|
|
56
56
|
if (typeof dompurify.sanitize !== "function") {
|
|
57
|
-
throw new Error(
|
|
58
|
-
"DOMPurify initialization failed - sanitize method not available"
|
|
59
|
-
);
|
|
57
|
+
throw new Error("DOMPurify initialization failed - sanitize method not available");
|
|
60
58
|
}
|
|
61
59
|
|
|
62
60
|
// Test DOMPurify functionality
|
|
@@ -82,10 +80,7 @@ async function validateMermaidWithOfficialParser(content) {
|
|
|
82
80
|
const originalDOMPurifyFactory = DOMPurifyModule.default;
|
|
83
81
|
try {
|
|
84
82
|
// This might work: intercept the factory function itself
|
|
85
|
-
if (
|
|
86
|
-
typeof originalDOMPurifyFactory === "function" &&
|
|
87
|
-
!originalDOMPurifyFactory.sanitize
|
|
88
|
-
) {
|
|
83
|
+
if (typeof originalDOMPurifyFactory === "function" && !originalDOMPurifyFactory.sanitize) {
|
|
89
84
|
// This means DOMPurify.default is a factory function, not an instance
|
|
90
85
|
// We need to make sure when mermaid calls DOMPurify.sanitize, it works
|
|
91
86
|
const factoryResult = originalDOMPurifyFactory(window);
|
|
@@ -93,7 +88,7 @@ async function validateMermaidWithOfficialParser(content) {
|
|
|
93
88
|
// Copy methods from our working instance to the factory result
|
|
94
89
|
Object.assign(originalDOMPurifyFactory, factoryResult);
|
|
95
90
|
}
|
|
96
|
-
} catch (
|
|
91
|
+
} catch (_factoryError) {
|
|
97
92
|
// If factory modification fails, that's OK - we have other fallbacks
|
|
98
93
|
}
|
|
99
94
|
|
|
@@ -120,9 +115,7 @@ async function validateMermaidWithOfficialParser(content) {
|
|
|
120
115
|
}
|
|
121
116
|
|
|
122
117
|
if (errorMessage.includes("Expecting ")) {
|
|
123
|
-
throw new Error(
|
|
124
|
-
"Syntax error: " + errorMessage.replace(/^.*Expecting /, "Expected ")
|
|
125
|
-
);
|
|
118
|
+
throw new Error(`Syntax error: ${errorMessage.replace(/^.*Expecting /, "Expected ")}`);
|
|
126
119
|
}
|
|
127
120
|
|
|
128
121
|
if (errorMessage.includes("Lexical error")) {
|
|
@@ -166,17 +159,15 @@ function validateBasicMermaidSyntax(content) {
|
|
|
166
159
|
];
|
|
167
160
|
|
|
168
161
|
const firstLine = trimmedContent.split("\n")[0].trim();
|
|
169
|
-
const hasValidType = validDiagramTypes.some((type) =>
|
|
170
|
-
firstLine.includes(type)
|
|
171
|
-
);
|
|
162
|
+
const hasValidType = validDiagramTypes.some((type) => firstLine.includes(type));
|
|
172
163
|
|
|
173
164
|
if (!hasValidType) {
|
|
174
165
|
throw new Error("Invalid or missing diagram type");
|
|
175
166
|
}
|
|
176
167
|
|
|
177
168
|
// Basic bracket matching
|
|
178
|
-
const openBrackets = (content.match(/[
|
|
179
|
-
const closeBrackets = (content.match(/[\]
|
|
169
|
+
const openBrackets = (content.match(/[[{(]/g) || []).length;
|
|
170
|
+
const closeBrackets = (content.match(/[\]})]/g) || []).length;
|
|
180
171
|
|
|
181
172
|
if (openBrackets !== closeBrackets) {
|
|
182
173
|
throw new Error("Unmatched brackets in diagram");
|
package/utils/utils.mjs
CHANGED
|
@@ -1,20 +1,10 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { accessSync, constants, existsSync, mkdirSync, readdirSync, statSync } from "node:fs";
|
|
1
3
|
import fs from "node:fs/promises";
|
|
2
4
|
import path from "node:path";
|
|
3
|
-
import { execSync } from "node:child_process";
|
|
4
|
-
import {
|
|
5
|
-
existsSync,
|
|
6
|
-
mkdirSync,
|
|
7
|
-
readdirSync,
|
|
8
|
-
accessSync,
|
|
9
|
-
constants,
|
|
10
|
-
statSync,
|
|
11
|
-
} from "node:fs";
|
|
12
|
-
import { parse } from "yaml";
|
|
13
5
|
import chalk from "chalk";
|
|
14
|
-
import {
|
|
15
|
-
|
|
16
|
-
DEFAULT_EXCLUDE_PATTERNS,
|
|
17
|
-
} from "./constants.mjs";
|
|
6
|
+
import { parse } from "yaml";
|
|
7
|
+
import { DEFAULT_EXCLUDE_PATTERNS, DEFAULT_INCLUDE_PATTERNS } from "./constants.mjs";
|
|
18
8
|
|
|
19
9
|
/**
|
|
20
10
|
* Normalize path to absolute path for consistent comparison
|
|
@@ -22,9 +12,7 @@ import {
|
|
|
22
12
|
* @returns {string} - Absolute path
|
|
23
13
|
*/
|
|
24
14
|
export function normalizePath(filePath) {
|
|
25
|
-
return path.isAbsolute(filePath)
|
|
26
|
-
? filePath
|
|
27
|
-
: path.resolve(process.cwd(), filePath);
|
|
15
|
+
return path.isAbsolute(filePath) ? filePath : path.resolve(process.cwd(), filePath);
|
|
28
16
|
}
|
|
29
17
|
|
|
30
18
|
/**
|
|
@@ -33,36 +21,31 @@ export function normalizePath(filePath) {
|
|
|
33
21
|
* @returns {string} - Relative path
|
|
34
22
|
*/
|
|
35
23
|
export function toRelativePath(filePath) {
|
|
36
|
-
return path.isAbsolute(filePath)
|
|
37
|
-
? path.relative(process.cwd(), filePath)
|
|
38
|
-
: filePath;
|
|
24
|
+
return path.isAbsolute(filePath) ? path.relative(process.cwd(), filePath) : filePath;
|
|
39
25
|
}
|
|
40
26
|
|
|
41
27
|
export function processContent({ content }) {
|
|
42
28
|
// Match markdown regular links [text](link), exclude images 
|
|
43
|
-
return content.replace(
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
if (path.startsWith(".")) {
|
|
58
|
-
finalPath = path.replace(/^\./, "");
|
|
59
|
-
}
|
|
60
|
-
let flatPath = finalPath.replace(/^\//, "").replace(/\//g, "-");
|
|
61
|
-
flatPath = `./${flatPath}.md`;
|
|
62
|
-
const newLink = hash ? `${flatPath}#${hash}` : flatPath;
|
|
63
|
-
return `[${text}](${newLink})`;
|
|
29
|
+
return content.replace(/(?<!!)\[([^\]]+)\]\(([^)]+)\)/g, (match, text, link) => {
|
|
30
|
+
const trimLink = link.trim();
|
|
31
|
+
// Exclude external links and mailto
|
|
32
|
+
if (/^(https?:\/\/|mailto:)/.test(trimLink)) return match;
|
|
33
|
+
// Preserve anchors
|
|
34
|
+
const [path, hash] = trimLink.split("#");
|
|
35
|
+
// Skip if already has extension
|
|
36
|
+
if (/\.[a-zA-Z0-9]+$/.test(path)) return match;
|
|
37
|
+
// Only process relative paths or paths starting with /
|
|
38
|
+
if (!path) return match;
|
|
39
|
+
// Flatten to ./xxx-yyy.md
|
|
40
|
+
let finalPath = path;
|
|
41
|
+
if (path.startsWith(".")) {
|
|
42
|
+
finalPath = path.replace(/^\./, "");
|
|
64
43
|
}
|
|
65
|
-
|
|
44
|
+
let flatPath = finalPath.replace(/^\//, "").replace(/\//g, "-");
|
|
45
|
+
flatPath = `./${flatPath}.md`;
|
|
46
|
+
const newLink = hash ? `${flatPath}#${hash}` : flatPath;
|
|
47
|
+
return `[${text}](${newLink})`;
|
|
48
|
+
});
|
|
66
49
|
}
|
|
67
50
|
|
|
68
51
|
/**
|
|
@@ -190,7 +173,7 @@ export async function saveGitHeadToConfig(gitHead) {
|
|
|
190
173
|
if (fileContent && !fileContent.endsWith("\n")) {
|
|
191
174
|
fileContent += "\n";
|
|
192
175
|
}
|
|
193
|
-
fileContent += newLastGitHeadLine
|
|
176
|
+
fileContent += `${newLastGitHeadLine}\n`;
|
|
194
177
|
}
|
|
195
178
|
|
|
196
179
|
await fs.writeFile(inputFilePath, fileContent);
|
|
@@ -206,20 +189,13 @@ export async function saveGitHeadToConfig(gitHead) {
|
|
|
206
189
|
* @param {Array<string>} filePaths - Array of file paths to check
|
|
207
190
|
* @returns {Array<string>} - Array of modified file paths
|
|
208
191
|
*/
|
|
209
|
-
export function getModifiedFilesBetweenCommits(
|
|
210
|
-
fromCommit,
|
|
211
|
-
toCommit = "HEAD",
|
|
212
|
-
filePaths = []
|
|
213
|
-
) {
|
|
192
|
+
export function getModifiedFilesBetweenCommits(fromCommit, toCommit = "HEAD", filePaths = []) {
|
|
214
193
|
try {
|
|
215
194
|
// Get all modified files between commits
|
|
216
|
-
const modifiedFiles = execSync(
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
stdio: ["pipe", "pipe", "ignore"],
|
|
221
|
-
}
|
|
222
|
-
)
|
|
195
|
+
const modifiedFiles = execSync(`git diff --name-only ${fromCommit}..${toCommit}`, {
|
|
196
|
+
encoding: "utf8",
|
|
197
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
198
|
+
})
|
|
223
199
|
.trim()
|
|
224
200
|
.split("\n")
|
|
225
201
|
.filter(Boolean);
|
|
@@ -234,12 +210,12 @@ export function getModifiedFilesBetweenCommits(
|
|
|
234
210
|
const absoluteFile = normalizePath(file);
|
|
235
211
|
const absoluteTarget = normalizePath(targetPath);
|
|
236
212
|
return absoluteFile === absoluteTarget;
|
|
237
|
-
})
|
|
213
|
+
}),
|
|
238
214
|
);
|
|
239
215
|
} catch (error) {
|
|
240
216
|
console.warn(
|
|
241
217
|
`Failed to get modified files between ${fromCommit} and ${toCommit}:`,
|
|
242
|
-
error.message
|
|
218
|
+
error.message,
|
|
243
219
|
);
|
|
244
220
|
return [];
|
|
245
221
|
}
|
|
@@ -261,7 +237,7 @@ export function hasSourceFilesChanged(sourceIds, modifiedFiles) {
|
|
|
261
237
|
const absoluteModifiedFile = normalizePath(modifiedFile);
|
|
262
238
|
const absoluteSourceId = normalizePath(sourceId);
|
|
263
239
|
return absoluteModifiedFile === absoluteSourceId;
|
|
264
|
-
})
|
|
240
|
+
}),
|
|
265
241
|
);
|
|
266
242
|
}
|
|
267
243
|
|
|
@@ -277,17 +253,14 @@ export function hasFileChangesBetweenCommits(
|
|
|
277
253
|
fromCommit,
|
|
278
254
|
toCommit = "HEAD",
|
|
279
255
|
includePatterns = DEFAULT_INCLUDE_PATTERNS,
|
|
280
|
-
excludePatterns = DEFAULT_EXCLUDE_PATTERNS
|
|
256
|
+
excludePatterns = DEFAULT_EXCLUDE_PATTERNS,
|
|
281
257
|
) {
|
|
282
258
|
try {
|
|
283
259
|
// Get file changes with status (A=added, D=deleted, M=modified)
|
|
284
|
-
const changes = execSync(
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
stdio: ["pipe", "pipe", "ignore"],
|
|
289
|
-
}
|
|
290
|
-
)
|
|
260
|
+
const changes = execSync(`git diff --name-status ${fromCommit}..${toCommit}`, {
|
|
261
|
+
encoding: "utf8",
|
|
262
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
263
|
+
})
|
|
291
264
|
.trim()
|
|
292
265
|
.split("\n")
|
|
293
266
|
.filter(Boolean);
|
|
@@ -309,10 +282,7 @@ export function hasFileChangesBetweenCommits(
|
|
|
309
282
|
// Check if file matches any include pattern
|
|
310
283
|
const matchesInclude = includePatterns.some((pattern) => {
|
|
311
284
|
// Convert glob pattern to regex for matching
|
|
312
|
-
const regexPattern = pattern
|
|
313
|
-
.replace(/\./g, "\\.")
|
|
314
|
-
.replace(/\*/g, ".*")
|
|
315
|
-
.replace(/\?/g, ".");
|
|
285
|
+
const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
316
286
|
const regex = new RegExp(regexPattern);
|
|
317
287
|
return regex.test(filePath);
|
|
318
288
|
});
|
|
@@ -324,10 +294,7 @@ export function hasFileChangesBetweenCommits(
|
|
|
324
294
|
// Check if file matches any exclude pattern
|
|
325
295
|
const matchesExclude = excludePatterns.some((pattern) => {
|
|
326
296
|
// Convert glob pattern to regex for matching
|
|
327
|
-
const regexPattern = pattern
|
|
328
|
-
.replace(/\./g, "\\.")
|
|
329
|
-
.replace(/\*/g, ".*")
|
|
330
|
-
.replace(/\?/g, ".");
|
|
297
|
+
const regexPattern = pattern.replace(/\./g, "\\.").replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
331
298
|
const regex = new RegExp(regexPattern);
|
|
332
299
|
return regex.test(filePath);
|
|
333
300
|
});
|
|
@@ -337,7 +304,7 @@ export function hasFileChangesBetweenCommits(
|
|
|
337
304
|
} catch (error) {
|
|
338
305
|
console.warn(
|
|
339
306
|
`Failed to check file changes between ${fromCommit} and ${toCommit}:`,
|
|
340
|
-
error.message
|
|
307
|
+
error.message,
|
|
341
308
|
);
|
|
342
309
|
return false;
|
|
343
310
|
}
|
|
@@ -348,11 +315,7 @@ export function hasFileChangesBetweenCommits(
|
|
|
348
315
|
* @returns {Promise<Object|null>} - The config object or null if file doesn't exist
|
|
349
316
|
*/
|
|
350
317
|
export async function loadConfigFromFile() {
|
|
351
|
-
const configPath = path.join(
|
|
352
|
-
process.cwd(),
|
|
353
|
-
"./.aigne/doc-smith",
|
|
354
|
-
"config.yaml"
|
|
355
|
-
);
|
|
318
|
+
const configPath = path.join(process.cwd(), "./.aigne/doc-smith", "config.yaml");
|
|
356
319
|
|
|
357
320
|
try {
|
|
358
321
|
if (!existsSync(configPath)) {
|
|
@@ -405,11 +368,7 @@ export async function saveValueToConfig(key, value, comment) {
|
|
|
405
368
|
fileContent = lines.join("\n");
|
|
406
369
|
|
|
407
370
|
// Add comment if provided and not already present
|
|
408
|
-
if (
|
|
409
|
-
comment &&
|
|
410
|
-
keyIndex > 0 &&
|
|
411
|
-
!lines[keyIndex - 1].trim().startsWith("# ")
|
|
412
|
-
) {
|
|
371
|
+
if (comment && keyIndex > 0 && !lines[keyIndex - 1].trim().startsWith("# ")) {
|
|
413
372
|
// Add comment above the key if it doesn't already have one
|
|
414
373
|
lines.splice(keyIndex, 0, `# ${comment}`);
|
|
415
374
|
fileContent = lines.join("\n");
|
|
@@ -425,7 +384,7 @@ export async function saveValueToConfig(key, value, comment) {
|
|
|
425
384
|
fileContent += `# ${comment}\n`;
|
|
426
385
|
}
|
|
427
386
|
|
|
428
|
-
fileContent += newKeyLine
|
|
387
|
+
fileContent += `${newKeyLine}\n`;
|
|
429
388
|
}
|
|
430
389
|
|
|
431
390
|
await fs.writeFile(configPath, fileContent);
|
|
@@ -454,7 +413,7 @@ export function validatePath(filePath) {
|
|
|
454
413
|
// Check if path is accessible (readable)
|
|
455
414
|
try {
|
|
456
415
|
accessSync(absolutePath, constants.R_OK);
|
|
457
|
-
} catch (
|
|
416
|
+
} catch (_accessError) {
|
|
458
417
|
return {
|
|
459
418
|
isValid: false,
|
|
460
419
|
error: `Path is not accessible: ${filePath}`,
|
|
@@ -465,7 +424,7 @@ export function validatePath(filePath) {
|
|
|
465
424
|
isValid: true,
|
|
466
425
|
error: null,
|
|
467
426
|
};
|
|
468
|
-
} catch (
|
|
427
|
+
} catch (_error) {
|
|
469
428
|
return {
|
|
470
429
|
isValid: false,
|
|
471
430
|
error: `Invalid path format: ${filePath}`,
|
|
@@ -588,10 +547,7 @@ export function getAvailablePaths(userInput = "") {
|
|
|
588
547
|
|
|
589
548
|
return uniqueResults;
|
|
590
549
|
} catch (error) {
|
|
591
|
-
console.warn(
|
|
592
|
-
`Failed to get available paths for "${userInput}":`,
|
|
593
|
-
error.message
|
|
594
|
-
);
|
|
550
|
+
console.warn(`Failed to get available paths for "${userInput}":`, error.message);
|
|
595
551
|
return [];
|
|
596
552
|
}
|
|
597
553
|
}
|
|
@@ -632,10 +588,7 @@ function getDirectoryContents(dirPath, searchTerm = "") {
|
|
|
632
588
|
}
|
|
633
589
|
|
|
634
590
|
// Filter by search term if provided
|
|
635
|
-
if (
|
|
636
|
-
searchTerm &&
|
|
637
|
-
!entryName.toLowerCase().includes(searchTerm.toLowerCase())
|
|
638
|
-
) {
|
|
591
|
+
if (searchTerm && !entryName.toLowerCase().includes(searchTerm.toLowerCase())) {
|
|
639
592
|
continue;
|
|
640
593
|
}
|
|
641
594
|
|
|
@@ -673,10 +626,7 @@ function getDirectoryContents(dirPath, searchTerm = "") {
|
|
|
673
626
|
|
|
674
627
|
return items;
|
|
675
628
|
} catch (error) {
|
|
676
|
-
console.warn(
|
|
677
|
-
`Failed to get directory contents from ${dirPath}:`,
|
|
678
|
-
error.message
|
|
679
|
-
);
|
|
629
|
+
console.warn(`Failed to get directory contents from ${dirPath}:`, error.message);
|
|
680
630
|
return [];
|
|
681
631
|
}
|
|
682
632
|
}
|
|
@@ -689,9 +639,7 @@ function getDirectoryContents(dirPath, searchTerm = "") {
|
|
|
689
639
|
export async function getGitHubRepoInfo(repoUrl) {
|
|
690
640
|
try {
|
|
691
641
|
// Extract owner and repo from GitHub URL
|
|
692
|
-
const match = repoUrl.match(
|
|
693
|
-
/github\.com[\/:]([^\/]+)\/([^\/]+?)(?:\.git)?$/
|
|
694
|
-
);
|
|
642
|
+
const match = repoUrl.match(/github\.com[/:]([^/]+)\/([^/]+?)(?:\.git)?$/);
|
|
695
643
|
if (!match) return null;
|
|
696
644
|
|
|
697
645
|
const [, owner, repo] = match;
|
|
@@ -743,7 +691,7 @@ export async function getProjectInfo() {
|
|
|
743
691
|
fromGitHub = true;
|
|
744
692
|
}
|
|
745
693
|
}
|
|
746
|
-
} catch (
|
|
694
|
+
} catch (_error) {
|
|
747
695
|
// Not in git repository or no origin remote, use current directory name
|
|
748
696
|
console.warn("No git repository found, using current directory name");
|
|
749
697
|
}
|