@aigne/doc-smith 0.2.8 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/agents/check-detail.mjs +0 -6
- package/agents/check-structure-plan.mjs +24 -3
- package/agents/find-item-by-path.mjs +11 -29
- package/agents/input-generator.mjs +9 -30
- package/agents/load-config.mjs +10 -1
- package/agents/load-sources.mjs +18 -79
- package/agents/publish-docs.mjs +3 -96
- package/agents/team-publish-docs.yaml +6 -7
- package/package.json +1 -1
- package/utils/auth-utils.mjs +105 -0
- package/utils/blocklet.mjs +25 -0
- package/utils/constants.mjs +78 -11
- package/utils/file-utils.mjs +205 -0
- package/utils/markdown-checker.mjs +15 -21
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [0.2.10](https://github.com/AIGNE-io/aigne-doc-smith/compare/v0.2.9...v0.2.10) (2025-08-14)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Bug Fixes
|
|
7
|
+
|
|
8
|
+
* fix bug bush feedback ([#41](https://github.com/AIGNE-io/aigne-doc-smith/issues/41)) ([2740d1a](https://github.com/AIGNE-io/aigne-doc-smith/commit/2740d1abef70ea36780b030917a6d54f74df4327))
|
|
9
|
+
|
|
10
|
+
## [0.2.9](https://github.com/AIGNE-io/aigne-doc-smith/compare/v0.2.8...v0.2.9) (2025-08-13)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
### Bug Fixes
|
|
14
|
+
|
|
15
|
+
* polish ignore check ([#25](https://github.com/AIGNE-io/aigne-doc-smith/issues/25)) ([90bc866](https://github.com/AIGNE-io/aigne-doc-smith/commit/90bc866513fef7b47047b1016e07bf38881c101c))
|
|
16
|
+
|
|
3
17
|
## [0.2.8](https://github.com/AIGNE-io/aigne-doc-smith/compare/v0.2.7...v0.2.8) (2025-08-13)
|
|
4
18
|
|
|
5
19
|
|
package/agents/check-detail.mjs
CHANGED
|
@@ -16,7 +16,6 @@ export default async function checkDetail(
|
|
|
16
16
|
originalStructurePlan,
|
|
17
17
|
structurePlan,
|
|
18
18
|
modifiedFiles,
|
|
19
|
-
lastGitHead,
|
|
20
19
|
forceRegenerate,
|
|
21
20
|
...rest
|
|
22
21
|
},
|
|
@@ -80,11 +79,6 @@ export default async function checkDetail(
|
|
|
80
79
|
}
|
|
81
80
|
}
|
|
82
81
|
|
|
83
|
-
// If lastGitHead is not set, regenerate
|
|
84
|
-
if (!lastGitHead) {
|
|
85
|
-
sourceFilesChanged = true;
|
|
86
|
-
}
|
|
87
|
-
|
|
88
82
|
// If file exists, check content validation
|
|
89
83
|
let contentValidationFailed = false;
|
|
90
84
|
if (detailGenerated && fileContent && structurePlan) {
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { access } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
1
3
|
import {
|
|
2
4
|
getCurrentGitHead,
|
|
3
5
|
getProjectInfo,
|
|
@@ -7,7 +9,7 @@ import {
|
|
|
7
9
|
} from "../utils/utils.mjs";
|
|
8
10
|
|
|
9
11
|
export default async function checkStructurePlan(
|
|
10
|
-
{ originalStructurePlan, feedback, lastGitHead, ...rest },
|
|
12
|
+
{ originalStructurePlan, feedback, lastGitHead, docsDir, forceRegenerate, ...rest },
|
|
11
13
|
options,
|
|
12
14
|
) {
|
|
13
15
|
// Check if we need to regenerate structure plan
|
|
@@ -16,9 +18,18 @@ export default async function checkStructurePlan(
|
|
|
16
18
|
|
|
17
19
|
// If no feedback and originalStructurePlan exists, check for git changes
|
|
18
20
|
if (originalStructurePlan) {
|
|
19
|
-
// If no lastGitHead,
|
|
21
|
+
// If no lastGitHead, check if _sidebar.md exists to determine if we should regenerate
|
|
20
22
|
if (!lastGitHead) {
|
|
21
|
-
|
|
23
|
+
try {
|
|
24
|
+
// Check if _sidebar.md exists in docsDir
|
|
25
|
+
const sidebarPath = join(docsDir, "_sidebar.md");
|
|
26
|
+
await access(sidebarPath);
|
|
27
|
+
// If _sidebar.md exists, it means last execution was completed, need to regenerate
|
|
28
|
+
shouldRegenerate = true;
|
|
29
|
+
} catch {
|
|
30
|
+
// If _sidebar.md doesn't exist, it means last execution was interrupted, no need to regenerate
|
|
31
|
+
shouldRegenerate = false;
|
|
32
|
+
}
|
|
22
33
|
} else {
|
|
23
34
|
// Check if there are relevant file changes since last generation
|
|
24
35
|
const currentGitHead = getCurrentGitHead();
|
|
@@ -43,6 +54,16 @@ export default async function checkStructurePlan(
|
|
|
43
54
|
}
|
|
44
55
|
}
|
|
45
56
|
|
|
57
|
+
// user requested regeneration
|
|
58
|
+
if (forceRegenerate) {
|
|
59
|
+
shouldRegenerate = true;
|
|
60
|
+
finalFeedback = `
|
|
61
|
+
${finalFeedback || ""}
|
|
62
|
+
|
|
63
|
+
用户请求强制重新生成结构规划,请根据最新的 Data Sources 和用户要求重生生成,**允许任何修改**。
|
|
64
|
+
`;
|
|
65
|
+
}
|
|
66
|
+
|
|
46
67
|
// If no regeneration needed, return original structure plan
|
|
47
68
|
if (originalStructurePlan && !feedback && !shouldRegenerate) {
|
|
48
69
|
return {
|
|
@@ -8,15 +8,8 @@ function getActionText(isTranslate, baseText) {
|
|
|
8
8
|
}
|
|
9
9
|
|
|
10
10
|
export default async function findItemByPath(
|
|
11
|
-
{
|
|
12
|
-
|
|
13
|
-
structurePlanResult,
|
|
14
|
-
boardId,
|
|
15
|
-
docsDir,
|
|
16
|
-
isTranslate,
|
|
17
|
-
feedback,
|
|
18
|
-
},
|
|
19
|
-
options
|
|
11
|
+
{ "doc-path": docPath, structurePlanResult, boardId, docsDir, isTranslate, feedback },
|
|
12
|
+
options,
|
|
20
13
|
) {
|
|
21
14
|
let foundItem = null;
|
|
22
15
|
let selectedFileContent = null;
|
|
@@ -30,9 +23,7 @@ export default async function findItemByPath(
|
|
|
30
23
|
// Filter for main language .md files (exclude _sidebar.md and language-specific files)
|
|
31
24
|
const mainLanguageFiles = files.filter(
|
|
32
25
|
(file) =>
|
|
33
|
-
file.endsWith(".md") &&
|
|
34
|
-
file !== "_sidebar.md" &&
|
|
35
|
-
!file.match(/\.\w+(-\w+)?\.md$/) // Exclude language-specific files like .en.md, .zh-CN.md, etc.
|
|
26
|
+
file.endsWith(".md") && file !== "_sidebar.md" && !file.match(/\.\w+(-\w+)?\.md$/), // Exclude language-specific files like .en.md, .zh-CN.md, etc.
|
|
36
27
|
);
|
|
37
28
|
|
|
38
29
|
if (mainLanguageFiles.length === 0) {
|
|
@@ -52,7 +43,7 @@ export default async function findItemByPath(
|
|
|
52
43
|
|
|
53
44
|
const searchTerm = input.trim().toLowerCase();
|
|
54
45
|
const filteredFiles = mainLanguageFiles.filter((file) =>
|
|
55
|
-
file.toLowerCase().includes(searchTerm)
|
|
46
|
+
file.toLowerCase().includes(searchTerm),
|
|
56
47
|
);
|
|
57
48
|
|
|
58
49
|
return filteredFiles.map((file) => ({
|
|
@@ -71,10 +62,7 @@ export default async function findItemByPath(
|
|
|
71
62
|
const selectedFilePath = join(docsDir, selectedFile);
|
|
72
63
|
selectedFileContent = await readFile(selectedFilePath, "utf-8");
|
|
73
64
|
} catch (readError) {
|
|
74
|
-
console.warn(
|
|
75
|
-
`⚠️ Could not read content from ${selectedFile}:`,
|
|
76
|
-
readError.message
|
|
77
|
-
);
|
|
65
|
+
console.warn(`⚠️ Could not read content from ${selectedFile}:`, readError.message);
|
|
78
66
|
selectedFileContent = null;
|
|
79
67
|
}
|
|
80
68
|
|
|
@@ -87,9 +75,7 @@ export default async function findItemByPath(
|
|
|
87
75
|
|
|
88
76
|
// First try without boardId prefix
|
|
89
77
|
foundItemByFile = structurePlanResult.find((item) => {
|
|
90
|
-
const itemFlattenedPath = item.path
|
|
91
|
-
.replace(/^\//, "")
|
|
92
|
-
.replace(/\//g, "-");
|
|
78
|
+
const itemFlattenedPath = item.path.replace(/^\//, "").replace(/\//g, "-");
|
|
93
79
|
return itemFlattenedPath === flatName;
|
|
94
80
|
});
|
|
95
81
|
if (!foundItemByFile) {
|
|
@@ -102,8 +88,8 @@ export default async function findItemByPath(
|
|
|
102
88
|
throw new Error(
|
|
103
89
|
getActionText(
|
|
104
90
|
isTranslate,
|
|
105
|
-
"Please provide a doc-path parameter to specify which document to {action}"
|
|
106
|
-
)
|
|
91
|
+
"Please provide a doc-path parameter to specify which document to {action}",
|
|
92
|
+
),
|
|
107
93
|
);
|
|
108
94
|
}
|
|
109
95
|
}
|
|
@@ -121,18 +107,14 @@ export default async function findItemByPath(
|
|
|
121
107
|
// Find item by comparing flattened paths
|
|
122
108
|
foundItem = structurePlanResult.find((item) => {
|
|
123
109
|
// Convert item.path to flattened format (replace / with -)
|
|
124
|
-
const itemFlattenedPath = item.path
|
|
125
|
-
.replace(/^\//, "")
|
|
126
|
-
.replace(/\//g, "-");
|
|
110
|
+
const itemFlattenedPath = item.path.replace(/^\//, "").replace(/\//g, "-");
|
|
127
111
|
return itemFlattenedPath === flattenedPath;
|
|
128
112
|
});
|
|
129
113
|
}
|
|
130
114
|
}
|
|
131
115
|
|
|
132
116
|
if (!foundItem) {
|
|
133
|
-
throw new Error(
|
|
134
|
-
`Item with path "${docPath}" not found in structurePlanResult`
|
|
135
|
-
);
|
|
117
|
+
throw new Error(`Item with path "${docPath}" not found in structurePlanResult`);
|
|
136
118
|
}
|
|
137
119
|
|
|
138
120
|
// Prompt for feedback if not provided
|
|
@@ -140,7 +122,7 @@ export default async function findItemByPath(
|
|
|
140
122
|
if (!userFeedback) {
|
|
141
123
|
const feedbackMessage = getActionText(
|
|
142
124
|
isTranslate,
|
|
143
|
-
"Please provide feedback for the {action} (press Enter to skip):"
|
|
125
|
+
"Please provide feedback for the {action} (press Enter to skip):",
|
|
144
126
|
);
|
|
145
127
|
|
|
146
128
|
userFeedback = await options.prompts.input({
|
|
@@ -1,16 +1,8 @@
|
|
|
1
1
|
import { mkdir, readFile, writeFile } from "node:fs/promises";
|
|
2
2
|
import { dirname, join } from "node:path";
|
|
3
3
|
import chalk from "chalk";
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
SUPPORTED_LANGUAGES,
|
|
7
|
-
TARGET_AUDIENCES,
|
|
8
|
-
} from "../utils/constants.mjs";
|
|
9
|
-
import {
|
|
10
|
-
getAvailablePaths,
|
|
11
|
-
getProjectInfo,
|
|
12
|
-
validatePath,
|
|
13
|
-
} from "../utils/utils.mjs";
|
|
4
|
+
import { DOCUMENT_STYLES, SUPPORTED_LANGUAGES, TARGET_AUDIENCES } from "../utils/constants.mjs";
|
|
5
|
+
import { getAvailablePaths, getProjectInfo, validatePath } from "../utils/utils.mjs";
|
|
14
6
|
|
|
15
7
|
// UI constants
|
|
16
8
|
const _PRESS_ENTER_TO_FINISH = "Press Enter to finish";
|
|
@@ -23,12 +15,8 @@ const _PRESS_ENTER_TO_FINISH = "Press Enter to finish";
|
|
|
23
15
|
* @returns {Promise<Object>}
|
|
24
16
|
*/
|
|
25
17
|
export default async function init(
|
|
26
|
-
{
|
|
27
|
-
|
|
28
|
-
fileName = "config.yaml",
|
|
29
|
-
skipIfExists = false,
|
|
30
|
-
},
|
|
31
|
-
options
|
|
18
|
+
{ outputPath = ".aigne/doc-smith", fileName = "config.yaml", skipIfExists = false },
|
|
19
|
+
options,
|
|
32
20
|
) {
|
|
33
21
|
if (skipIfExists) {
|
|
34
22
|
const filePath = join(outputPath, fileName);
|
|
@@ -104,7 +92,7 @@ export default async function init(
|
|
|
104
92
|
// 4. Translation languages
|
|
105
93
|
// Filter out the primary language from available choices
|
|
106
94
|
const availableTranslationLanguages = SUPPORTED_LANGUAGES.filter(
|
|
107
|
-
(lang) => lang.code !== primaryLanguageChoice
|
|
95
|
+
(lang) => lang.code !== primaryLanguageChoice,
|
|
108
96
|
);
|
|
109
97
|
|
|
110
98
|
const translateLanguageChoices = await options.prompts.checkbox({
|
|
@@ -154,11 +142,7 @@ export default async function init(
|
|
|
154
142
|
});
|
|
155
143
|
|
|
156
144
|
// Check if user chose to exit
|
|
157
|
-
if (
|
|
158
|
-
!selectedPath ||
|
|
159
|
-
selectedPath.trim() === "" ||
|
|
160
|
-
selectedPath === "Press Enter to finish"
|
|
161
|
-
) {
|
|
145
|
+
if (!selectedPath || selectedPath.trim() === "" || selectedPath === "Press Enter to finish") {
|
|
162
146
|
break;
|
|
163
147
|
}
|
|
164
148
|
|
|
@@ -207,13 +191,9 @@ export default async function init(
|
|
|
207
191
|
console.log(chalk.cyan("---"));
|
|
208
192
|
console.log(chalk.cyan(yamlContent));
|
|
209
193
|
console.log(chalk.cyan("---"));
|
|
194
|
+
console.log("💡 You can edit the configuration file anytime to modify settings.\n");
|
|
210
195
|
console.log(
|
|
211
|
-
|
|
212
|
-
);
|
|
213
|
-
console.log(
|
|
214
|
-
`🚀 Run ${chalk.cyan(
|
|
215
|
-
"'aigne doc generate'"
|
|
216
|
-
)} to start documentation generation!\n`
|
|
196
|
+
`🚀 Run ${chalk.cyan("'aigne doc generate'")} to start documentation generation!\n`,
|
|
217
197
|
);
|
|
218
198
|
|
|
219
199
|
return {};
|
|
@@ -284,5 +264,4 @@ function generateYAML(input) {
|
|
|
284
264
|
return yaml;
|
|
285
265
|
}
|
|
286
266
|
|
|
287
|
-
init.description =
|
|
288
|
-
"Generate a configuration file for the documentation generation process";
|
|
267
|
+
init.description = "Generate a configuration file for the documentation generation process";
|
package/agents/load-config.mjs
CHANGED
|
@@ -2,7 +2,7 @@ import fs from "node:fs/promises";
|
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
import { parse } from "yaml";
|
|
4
4
|
|
|
5
|
-
export default async function loadConfig({ config }) {
|
|
5
|
+
export default async function loadConfig({ config, appUrl }) {
|
|
6
6
|
const configPath = path.join(process.cwd(), config);
|
|
7
7
|
|
|
8
8
|
try {
|
|
@@ -18,6 +18,11 @@ export default async function loadConfig({ config }) {
|
|
|
18
18
|
// Read and parse YAML file
|
|
19
19
|
const configContent = await fs.readFile(configPath, "utf-8");
|
|
20
20
|
const parsedConfig = parse(configContent);
|
|
21
|
+
|
|
22
|
+
if (appUrl) {
|
|
23
|
+
parsedConfig.appUrl = appUrl;
|
|
24
|
+
}
|
|
25
|
+
|
|
21
26
|
return {
|
|
22
27
|
nodeName: "Section",
|
|
23
28
|
locale: "en",
|
|
@@ -40,5 +45,9 @@ loadConfig.input_schema = {
|
|
|
40
45
|
type: "string",
|
|
41
46
|
default: "./.aigne/doc-smith/config.yaml",
|
|
42
47
|
},
|
|
48
|
+
appUrl: {
|
|
49
|
+
type: "string",
|
|
50
|
+
description: "Application URL to override config",
|
|
51
|
+
},
|
|
43
52
|
},
|
|
44
53
|
};
|
package/agents/load-sources.mjs
CHANGED
|
@@ -1,87 +1,9 @@
|
|
|
1
1
|
import { access, readFile, stat } from "node:fs/promises";
|
|
2
2
|
import path from "node:path";
|
|
3
|
-
import { glob } from "glob";
|
|
4
3
|
import { DEFAULT_EXCLUDE_PATTERNS, DEFAULT_INCLUDE_PATTERNS } from "../utils/constants.mjs";
|
|
4
|
+
import { getFilesWithGlob, loadGitignore } from "../utils/file-utils.mjs";
|
|
5
5
|
import { getCurrentGitHead, getModifiedFilesBetweenCommits } from "../utils/utils.mjs";
|
|
6
6
|
|
|
7
|
-
/**
|
|
8
|
-
* Load .gitignore patterns from a directory
|
|
9
|
-
* @param {string} dir - Directory path
|
|
10
|
-
* @returns {object|null} Ignore instance or null if no .gitignore found
|
|
11
|
-
*/
|
|
12
|
-
async function loadGitignore(dir) {
|
|
13
|
-
const gitignorePath = path.join(dir, ".gitignore");
|
|
14
|
-
try {
|
|
15
|
-
await access(gitignorePath);
|
|
16
|
-
const gitignoreContent = await readFile(gitignorePath, "utf8");
|
|
17
|
-
// Create ignore patterns from .gitignore content
|
|
18
|
-
const ignorePatterns = gitignoreContent
|
|
19
|
-
.split("\n")
|
|
20
|
-
.map((line) => line.trim())
|
|
21
|
-
.filter((line) => line && !line.startsWith("#"))
|
|
22
|
-
.map((line) => line.replace(/^\//, "")); // Remove leading slash
|
|
23
|
-
|
|
24
|
-
return ignorePatterns.length > 0 ? ignorePatterns : null;
|
|
25
|
-
} catch {
|
|
26
|
-
// .gitignore file doesn't exist
|
|
27
|
-
return null;
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
/**
|
|
32
|
-
* Get files using glob patterns
|
|
33
|
-
* @param {string} dir - Directory to search
|
|
34
|
-
* @param {string[]} includePatterns - Include patterns
|
|
35
|
-
* @param {string[]} excludePatterns - Exclude patterns
|
|
36
|
-
* @param {string[]} gitignorePatterns - .gitignore patterns
|
|
37
|
-
* @returns {Promise<string[]>} Array of file paths
|
|
38
|
-
*/
|
|
39
|
-
async function getFilesWithGlob(dir, includePatterns, excludePatterns, gitignorePatterns) {
|
|
40
|
-
// Prepare all ignore patterns
|
|
41
|
-
const allIgnorePatterns = [];
|
|
42
|
-
|
|
43
|
-
if (excludePatterns) {
|
|
44
|
-
allIgnorePatterns.push(...excludePatterns);
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
if (gitignorePatterns) {
|
|
48
|
-
allIgnorePatterns.push(...gitignorePatterns);
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
// Add default exclusions if not already present
|
|
52
|
-
const defaultExclusions = ["node_modules/**", "test/**", "temp/**"];
|
|
53
|
-
for (const exclusion of defaultExclusions) {
|
|
54
|
-
if (!allIgnorePatterns.includes(exclusion)) {
|
|
55
|
-
allIgnorePatterns.push(exclusion);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// Convert patterns to be relative to the directory
|
|
60
|
-
const patterns = includePatterns.map((pattern) => {
|
|
61
|
-
// If pattern doesn't start with / or **, make it relative to dir
|
|
62
|
-
if (!pattern.startsWith("/") && !pattern.startsWith("**")) {
|
|
63
|
-
return `**/${pattern}`; // Use ** to search recursively
|
|
64
|
-
}
|
|
65
|
-
return pattern;
|
|
66
|
-
});
|
|
67
|
-
|
|
68
|
-
try {
|
|
69
|
-
const files = await glob(patterns, {
|
|
70
|
-
cwd: dir,
|
|
71
|
-
ignore: allIgnorePatterns.length > 0 ? allIgnorePatterns : undefined,
|
|
72
|
-
absolute: true,
|
|
73
|
-
nodir: true, // Only return files, not directories
|
|
74
|
-
dot: false, // Don't include dot files by default
|
|
75
|
-
gitignore: true, // Enable .gitignore support
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
return files;
|
|
79
|
-
} catch (error) {
|
|
80
|
-
console.warn(`Warning: Error during glob search in ${dir}: ${error.message}`);
|
|
81
|
-
return [];
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
|
|
85
7
|
export default async function loadSources({
|
|
86
8
|
sources = [],
|
|
87
9
|
sourcesPath = [],
|
|
@@ -242,6 +164,21 @@ export default async function loadSources({
|
|
|
242
164
|
}
|
|
243
165
|
}
|
|
244
166
|
|
|
167
|
+
// Count words and lines in allSources
|
|
168
|
+
let totalWords = 0;
|
|
169
|
+
let totalLines = 0;
|
|
170
|
+
|
|
171
|
+
for (const source of Object.values(allSources)) {
|
|
172
|
+
if (typeof source === "string") {
|
|
173
|
+
// Count English words (simple regex for words containing a-zA-Z)
|
|
174
|
+
const words = source.match(/[a-zA-Z]+/g) || [];
|
|
175
|
+
totalWords += words.length;
|
|
176
|
+
|
|
177
|
+
// Count lines (excluding empty lines)
|
|
178
|
+
totalLines += source.split("\n").filter((line) => line.trim() !== "").length;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
245
182
|
return {
|
|
246
183
|
datasourcesList: sourceFiles,
|
|
247
184
|
datasources: allSources,
|
|
@@ -249,6 +186,8 @@ export default async function loadSources({
|
|
|
249
186
|
originalStructurePlan,
|
|
250
187
|
files,
|
|
251
188
|
modifiedFiles,
|
|
189
|
+
totalWords,
|
|
190
|
+
totalLines,
|
|
252
191
|
};
|
|
253
192
|
}
|
|
254
193
|
|
package/agents/publish-docs.mjs
CHANGED
|
@@ -1,106 +1,13 @@
|
|
|
1
|
-
import { existsSync, mkdirSync } from "node:fs";
|
|
2
|
-
import { readFile, writeFile } from "node:fs/promises";
|
|
3
|
-
import { homedir } from "node:os";
|
|
4
1
|
import { basename, join } from "node:path";
|
|
5
|
-
import { createConnect } from "@aigne/aigne-hub";
|
|
6
2
|
import { publishDocs as publishDocsFn } from "@aigne/publish-docs";
|
|
7
|
-
import
|
|
8
|
-
import { joinURL } from "ufo";
|
|
9
|
-
import { parse, stringify } from "yaml";
|
|
3
|
+
import { getAccessToken } from "../utils/auth-utils.mjs";
|
|
10
4
|
import { loadConfigFromFile, saveValueToConfig } from "../utils/utils.mjs";
|
|
11
5
|
|
|
12
|
-
const WELLKNOWN_SERVICE_PATH_PREFIX = "/.well-known/service";
|
|
13
6
|
const DEFAULT_APP_URL = "https://docsmith.aigne.io";
|
|
14
7
|
|
|
15
|
-
/**
|
|
16
|
-
* Get access token from environment, config file, or prompt user for authorization
|
|
17
|
-
* @param {string} appUrl - The application URL
|
|
18
|
-
* @returns {Promise<string>} - The access token
|
|
19
|
-
*/
|
|
20
|
-
async function getAccessToken(appUrl) {
|
|
21
|
-
const DOC_SMITH_ENV_FILE = join(
|
|
22
|
-
homedir(),
|
|
23
|
-
".aigne",
|
|
24
|
-
"doc-smith-connected.yaml"
|
|
25
|
-
);
|
|
26
|
-
const { hostname } = new URL(appUrl);
|
|
27
|
-
|
|
28
|
-
let accessToken = process.env.DOC_DISCUSS_KIT_ACCESS_TOKEN;
|
|
29
|
-
|
|
30
|
-
// Check if access token exists in environment or config file
|
|
31
|
-
if (!accessToken) {
|
|
32
|
-
try {
|
|
33
|
-
if (existsSync(DOC_SMITH_ENV_FILE)) {
|
|
34
|
-
const data = await readFile(DOC_SMITH_ENV_FILE, "utf8");
|
|
35
|
-
if (data.includes("DOC_DISCUSS_KIT_ACCESS_TOKEN")) {
|
|
36
|
-
const envs = parse(data);
|
|
37
|
-
if (envs[hostname]?.DOC_DISCUSS_KIT_ACCESS_TOKEN) {
|
|
38
|
-
accessToken = envs[hostname].DOC_DISCUSS_KIT_ACCESS_TOKEN;
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
} catch (error) {
|
|
43
|
-
console.warn("Failed to read config file:", error.message);
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// If still no access token, prompt user to authorize
|
|
48
|
-
if (!accessToken) {
|
|
49
|
-
const DISCUSS_KIT_URL = appUrl;
|
|
50
|
-
const connectUrl = joinURL(
|
|
51
|
-
new URL(DISCUSS_KIT_URL).origin,
|
|
52
|
-
WELLKNOWN_SERVICE_PATH_PREFIX
|
|
53
|
-
);
|
|
54
|
-
|
|
55
|
-
try {
|
|
56
|
-
const result = await createConnect({
|
|
57
|
-
connectUrl: connectUrl,
|
|
58
|
-
connectAction: "gen-simple-access-key",
|
|
59
|
-
source: `AIGNE DocSmith connect to Discuss Kit`,
|
|
60
|
-
closeOnSuccess: true,
|
|
61
|
-
appName: "AIGNE DocSmith",
|
|
62
|
-
appLogo:
|
|
63
|
-
"https://www.aigne.io/image-bin/uploads/a7910a71364ee15a27e86f869ad59009.svg",
|
|
64
|
-
openPage: (pageUrl) => open(pageUrl),
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
accessToken = result.accessKeySecret;
|
|
68
|
-
process.env.DOC_DISCUSS_KIT_ACCESS_TOKEN = accessToken;
|
|
69
|
-
|
|
70
|
-
// Save the access token to config file
|
|
71
|
-
const aigneDir = join(homedir(), ".aigne");
|
|
72
|
-
if (!existsSync(aigneDir)) {
|
|
73
|
-
mkdirSync(aigneDir, { recursive: true });
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
const existingConfig = existsSync(DOC_SMITH_ENV_FILE)
|
|
77
|
-
? parse(await readFile(DOC_SMITH_ENV_FILE, "utf8"))
|
|
78
|
-
: {};
|
|
79
|
-
|
|
80
|
-
await writeFile(
|
|
81
|
-
DOC_SMITH_ENV_FILE,
|
|
82
|
-
stringify({
|
|
83
|
-
...existingConfig,
|
|
84
|
-
[hostname]: {
|
|
85
|
-
DOC_DISCUSS_KIT_ACCESS_TOKEN: accessToken,
|
|
86
|
-
DOC_DISCUSS_KIT_URL: DISCUSS_KIT_URL,
|
|
87
|
-
},
|
|
88
|
-
})
|
|
89
|
-
);
|
|
90
|
-
} catch (error) {
|
|
91
|
-
console.error("Failed to get access token:", error);
|
|
92
|
-
throw new Error(
|
|
93
|
-
"Failed to obtain access token. Please check your network connection and try again later."
|
|
94
|
-
);
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
return accessToken;
|
|
99
|
-
}
|
|
100
|
-
|
|
101
8
|
export default async function publishDocs(
|
|
102
9
|
{ docsDir, appUrl, boardId, projectName, projectDesc, projectLogo },
|
|
103
|
-
options
|
|
10
|
+
options,
|
|
104
11
|
) {
|
|
105
12
|
// Check if DOC_DISCUSS_KIT_URL is set in environment variables
|
|
106
13
|
const envAppUrl = process.env.DOC_DISCUSS_KIT_URL;
|
|
@@ -184,7 +91,7 @@ export default async function publishDocs(
|
|
|
184
91
|
await saveValueToConfig(
|
|
185
92
|
"boardId",
|
|
186
93
|
newBoardId,
|
|
187
|
-
"⚠️ Warning: boardId is auto-generated by system, please do not edit manually"
|
|
94
|
+
"⚠️ Warning: boardId is auto-generated by system, please do not edit manually",
|
|
188
95
|
);
|
|
189
96
|
}
|
|
190
97
|
}
|
|
@@ -10,10 +10,9 @@ skills:
|
|
|
10
10
|
skipIfExists: true
|
|
11
11
|
- load-config.mjs
|
|
12
12
|
- publish-docs.mjs
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
# default: ./.aigne/doc-smith/config.yaml
|
|
13
|
+
input_schema:
|
|
14
|
+
type: object
|
|
15
|
+
properties:
|
|
16
|
+
appUrl:
|
|
17
|
+
type: string
|
|
18
|
+
description: target website URL where the documentation will be published
|
package/package.json
CHANGED
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { existsSync, mkdirSync } from "node:fs";
|
|
2
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
3
|
+
import { homedir } from "node:os";
|
|
4
|
+
import { join } from "node:path";
|
|
5
|
+
import { createConnect } from "@aigne/aigne-hub";
|
|
6
|
+
import open from "open";
|
|
7
|
+
import { joinURL } from "ufo";
|
|
8
|
+
import { parse, stringify } from "yaml";
|
|
9
|
+
import { getComponentMountPoint } from "./blocklet.mjs";
|
|
10
|
+
import { DISCUSS_KIT_DID } from "./constants.mjs";
|
|
11
|
+
|
|
12
|
+
const WELLKNOWN_SERVICE_PATH_PREFIX = "/.well-known/service";
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Get access token from environment, config file, or prompt user for authorization
|
|
16
|
+
* @param {string} appUrl - The application URL
|
|
17
|
+
* @returns {Promise<string>} - The access token
|
|
18
|
+
*/
|
|
19
|
+
export async function getAccessToken(appUrl) {
|
|
20
|
+
const DOC_SMITH_ENV_FILE = join(homedir(), ".aigne", "doc-smith-connected.yaml");
|
|
21
|
+
const { hostname } = new URL(appUrl);
|
|
22
|
+
|
|
23
|
+
let accessToken = process.env.DOC_DISCUSS_KIT_ACCESS_TOKEN;
|
|
24
|
+
|
|
25
|
+
// Check if access token exists in environment or config file
|
|
26
|
+
if (!accessToken) {
|
|
27
|
+
try {
|
|
28
|
+
if (existsSync(DOC_SMITH_ENV_FILE)) {
|
|
29
|
+
const data = await readFile(DOC_SMITH_ENV_FILE, "utf8");
|
|
30
|
+
if (data.includes("DOC_DISCUSS_KIT_ACCESS_TOKEN")) {
|
|
31
|
+
const envs = parse(data);
|
|
32
|
+
if (envs[hostname]?.DOC_DISCUSS_KIT_ACCESS_TOKEN) {
|
|
33
|
+
accessToken = envs[hostname].DOC_DISCUSS_KIT_ACCESS_TOKEN;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
} catch (error) {
|
|
38
|
+
console.warn("Failed to read config file:", error.message);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// If still no access token, prompt user to authorize
|
|
43
|
+
if (accessToken) {
|
|
44
|
+
return accessToken;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Check if Discuss Kit is running at the provided URL
|
|
48
|
+
try {
|
|
49
|
+
await getComponentMountPoint(appUrl, DISCUSS_KIT_DID);
|
|
50
|
+
} catch {
|
|
51
|
+
throw new Error(
|
|
52
|
+
`Unable to find Discuss Kit running at the provided URL: ${appUrl}\n\n` +
|
|
53
|
+
"Please ensure that:\n" +
|
|
54
|
+
"• The URL is correct and accessible\n" +
|
|
55
|
+
"• Discuss Kit is properly installed and running\n" +
|
|
56
|
+
"If you continue to experience issues, please verify your Discuss Kit installation.",
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const DISCUSS_KIT_URL = appUrl;
|
|
61
|
+
const connectUrl = joinURL(new URL(DISCUSS_KIT_URL).origin, WELLKNOWN_SERVICE_PATH_PREFIX);
|
|
62
|
+
|
|
63
|
+
try {
|
|
64
|
+
const result = await createConnect({
|
|
65
|
+
connectUrl: connectUrl,
|
|
66
|
+
connectAction: "gen-simple-access-key",
|
|
67
|
+
source: `AIGNE DocSmith connect to Discuss Kit`,
|
|
68
|
+
closeOnSuccess: true,
|
|
69
|
+
appName: "AIGNE DocSmith",
|
|
70
|
+
appLogo: "https://www.aigne.io/image-bin/uploads/a7910a71364ee15a27e86f869ad59009.svg",
|
|
71
|
+
openPage: (pageUrl) => open(pageUrl),
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
accessToken = result.accessKeySecret;
|
|
75
|
+
process.env.DOC_DISCUSS_KIT_ACCESS_TOKEN = accessToken;
|
|
76
|
+
|
|
77
|
+
// Save the access token to config file
|
|
78
|
+
const aigneDir = join(homedir(), ".aigne");
|
|
79
|
+
if (!existsSync(aigneDir)) {
|
|
80
|
+
mkdirSync(aigneDir, { recursive: true });
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const existingConfig = existsSync(DOC_SMITH_ENV_FILE)
|
|
84
|
+
? parse(await readFile(DOC_SMITH_ENV_FILE, "utf8"))
|
|
85
|
+
: {};
|
|
86
|
+
|
|
87
|
+
await writeFile(
|
|
88
|
+
DOC_SMITH_ENV_FILE,
|
|
89
|
+
stringify({
|
|
90
|
+
...existingConfig,
|
|
91
|
+
[hostname]: {
|
|
92
|
+
DOC_DISCUSS_KIT_ACCESS_TOKEN: accessToken,
|
|
93
|
+
DOC_DISCUSS_KIT_URL: DISCUSS_KIT_URL,
|
|
94
|
+
},
|
|
95
|
+
}),
|
|
96
|
+
);
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.debug(error);
|
|
99
|
+
throw new Error(
|
|
100
|
+
"Failed to obtain access token. Please check your network connection and try again later.",
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return accessToken;
|
|
105
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export async function getComponentMountPoint(appUrl, did) {
|
|
2
|
+
const url = new URL(appUrl);
|
|
3
|
+
const blockletJsUrl = `${url.origin}/__blocklet__.js?type=json`;
|
|
4
|
+
|
|
5
|
+
const blockletJs = await fetch(blockletJsUrl, {
|
|
6
|
+
method: "GET",
|
|
7
|
+
headers: {
|
|
8
|
+
Accept: "application/json",
|
|
9
|
+
},
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
if (!blockletJs.ok) {
|
|
13
|
+
throw new Error(
|
|
14
|
+
`Failed to fetch blocklet json: ${blockletJs.status} ${blockletJs.statusText}, ${blockletJsUrl}`,
|
|
15
|
+
);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const config = await blockletJs.json();
|
|
19
|
+
const component = config.componentMountPoints.find((component) => component.did === did);
|
|
20
|
+
if (!component) {
|
|
21
|
+
throw new Error(`Component ${did} not found in blocklet: ${appUrl}`);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return component.mountPoint;
|
|
25
|
+
}
|
package/utils/constants.mjs
CHANGED
|
@@ -1,25 +1,89 @@
|
|
|
1
1
|
// Default file patterns for inclusion and exclusion
|
|
2
2
|
export const DEFAULT_INCLUDE_PATTERNS = [
|
|
3
|
+
// Python
|
|
3
4
|
"*.py",
|
|
5
|
+
"*.pyi",
|
|
6
|
+
"*.pyx",
|
|
7
|
+
// JavaScript/TypeScript
|
|
4
8
|
"*.js",
|
|
5
9
|
"*.jsx",
|
|
6
10
|
"*.ts",
|
|
7
11
|
"*.tsx",
|
|
8
|
-
|
|
9
|
-
"*.java",
|
|
10
|
-
"*.pyi",
|
|
11
|
-
"*.pyx",
|
|
12
|
+
// C/C++
|
|
12
13
|
"*.c",
|
|
13
14
|
"*.cc",
|
|
14
15
|
"*.cpp",
|
|
16
|
+
"*.cxx",
|
|
17
|
+
"*.c++",
|
|
15
18
|
"*.h",
|
|
19
|
+
"*.hpp",
|
|
20
|
+
"*.hxx",
|
|
21
|
+
"*.h++",
|
|
22
|
+
// JVM Languages
|
|
23
|
+
"*.java",
|
|
24
|
+
"*.kt",
|
|
25
|
+
"*.scala",
|
|
26
|
+
"*.groovy",
|
|
27
|
+
"*.gvy",
|
|
28
|
+
"*.gy",
|
|
29
|
+
"*.gsh",
|
|
30
|
+
"*.clj",
|
|
31
|
+
"*.cljs",
|
|
32
|
+
"*.cljx",
|
|
33
|
+
// .NET Languages
|
|
34
|
+
"*.cs",
|
|
35
|
+
"*.vb",
|
|
36
|
+
"*.fs",
|
|
37
|
+
// Functional Languages
|
|
38
|
+
"*.f",
|
|
39
|
+
"*.ml",
|
|
40
|
+
"*.sml",
|
|
41
|
+
"*.lisp",
|
|
42
|
+
"*.lsp",
|
|
43
|
+
"*.cl",
|
|
44
|
+
// Systems Programming
|
|
45
|
+
"*.rs",
|
|
46
|
+
"*.go",
|
|
47
|
+
"*.nim",
|
|
48
|
+
"*.asm",
|
|
49
|
+
"*.s",
|
|
50
|
+
// Web Technologies
|
|
51
|
+
"*.html",
|
|
52
|
+
"*.htm",
|
|
53
|
+
"*.css",
|
|
54
|
+
"*.php",
|
|
55
|
+
// Scripting Languages
|
|
56
|
+
"*.rb",
|
|
57
|
+
"*.pl",
|
|
58
|
+
"*.ps1",
|
|
59
|
+
"*.lua",
|
|
60
|
+
"*.tcl",
|
|
61
|
+
// Mobile/Modern Languages
|
|
62
|
+
"*.swift",
|
|
63
|
+
"*.dart",
|
|
64
|
+
"*.ex",
|
|
65
|
+
"*.exs",
|
|
66
|
+
"*.erl",
|
|
67
|
+
"*.jl",
|
|
68
|
+
// Data Science
|
|
69
|
+
"*.r",
|
|
70
|
+
"*.R",
|
|
71
|
+
"*.m",
|
|
72
|
+
// Other Languages
|
|
73
|
+
"*.pas",
|
|
74
|
+
"*.cob",
|
|
75
|
+
"*.cbl",
|
|
76
|
+
"*.pro",
|
|
77
|
+
"*.prolog",
|
|
78
|
+
"*.sql",
|
|
79
|
+
// Documentation & Config
|
|
16
80
|
"*.md",
|
|
17
81
|
"*.rst",
|
|
18
82
|
"*.json",
|
|
19
|
-
"*Dockerfile",
|
|
20
|
-
"*Makefile",
|
|
21
83
|
"*.yaml",
|
|
22
84
|
"*.yml",
|
|
85
|
+
"*Dockerfile",
|
|
86
|
+
"*Makefile",
|
|
23
87
|
];
|
|
24
88
|
|
|
25
89
|
export const DEFAULT_EXCLUDE_PATTERNS = [
|
|
@@ -78,14 +142,14 @@ export const SUPPORTED_LANGUAGES = [
|
|
|
78
142
|
|
|
79
143
|
// Predefined document generation styles
|
|
80
144
|
export const DOCUMENT_STYLES = {
|
|
81
|
-
developerDocs: {
|
|
82
|
-
name: "Developer Docs",
|
|
83
|
-
rules: "Steps-first; copy-paste examples; minimal context; active 'you'.",
|
|
84
|
-
},
|
|
85
145
|
userGuide: {
|
|
86
146
|
name: "User Guide",
|
|
87
147
|
rules: "Scenario-based; step-by-step; plain language; outcomes & cautions.",
|
|
88
148
|
},
|
|
149
|
+
developerDocs: {
|
|
150
|
+
name: "Developer Docs",
|
|
151
|
+
rules: "Steps-first; copy-paste examples; minimal context; active 'you'.",
|
|
152
|
+
},
|
|
89
153
|
apiReference: {
|
|
90
154
|
name: "API Reference",
|
|
91
155
|
rules: "Exact & skimmable; schema-first; clear params/errors/examples.",
|
|
@@ -98,8 +162,11 @@ export const DOCUMENT_STYLES = {
|
|
|
98
162
|
|
|
99
163
|
// Predefined target audiences
|
|
100
164
|
export const TARGET_AUDIENCES = {
|
|
165
|
+
generalUsers: "General Users",
|
|
101
166
|
actionFirst: "Developers, Implementation Engineers, DevOps",
|
|
102
167
|
conceptFirst: "Architects, Technical Leads, Developers interested in principles",
|
|
103
|
-
generalUsers: "General Users",
|
|
104
168
|
custom: "Enter your own target audience",
|
|
105
169
|
};
|
|
170
|
+
|
|
171
|
+
// Component mount point ID for Discuss Kit
|
|
172
|
+
export const DISCUSS_KIT_DID = "z8ia1WEiBZ7hxURf6LwH21Wpg99vophFwSJdu";
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { access, readFile } from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { glob } from "glob";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Check if a directory is inside a git repository using git command
|
|
8
|
+
* @param {string} dir - Directory path to check
|
|
9
|
+
* @returns {boolean} True if inside a git repository
|
|
10
|
+
*/
|
|
11
|
+
function isInGitRepository(dir) {
|
|
12
|
+
try {
|
|
13
|
+
execSync("git rev-parse --is-inside-work-tree", {
|
|
14
|
+
cwd: dir,
|
|
15
|
+
stdio: "pipe",
|
|
16
|
+
encoding: "utf8",
|
|
17
|
+
});
|
|
18
|
+
return true;
|
|
19
|
+
} catch {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Find git repository root directory using git command
|
|
26
|
+
* @param {string} startDir - Starting directory path
|
|
27
|
+
* @returns {string|null} Git repository root path or null if not found
|
|
28
|
+
*/
|
|
29
|
+
function findGitRoot(startDir) {
|
|
30
|
+
try {
|
|
31
|
+
const gitRoot = execSync("git rev-parse --show-toplevel", {
|
|
32
|
+
cwd: startDir,
|
|
33
|
+
stdio: "pipe",
|
|
34
|
+
encoding: "utf8",
|
|
35
|
+
}).trim();
|
|
36
|
+
return gitRoot;
|
|
37
|
+
} catch {
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Convert gitignore patterns to glob-compatible patterns
|
|
44
|
+
* @param {string} pattern - A single gitignore pattern
|
|
45
|
+
* @returns {string[]} Array of glob patterns that match gitignore behavior
|
|
46
|
+
*/
|
|
47
|
+
function gitignoreToGlobPatterns(pattern) {
|
|
48
|
+
const patterns = [];
|
|
49
|
+
|
|
50
|
+
// Remove leading slash (already handled by gitignore parsing)
|
|
51
|
+
const cleanPattern = pattern.replace(/^\//, "");
|
|
52
|
+
|
|
53
|
+
// If pattern doesn't contain wildcards and doesn't end with /
|
|
54
|
+
// it could match both files and directories
|
|
55
|
+
if (!cleanPattern.includes("*") && !cleanPattern.includes("?") && !cleanPattern.endsWith("/")) {
|
|
56
|
+
// Add patterns to match both file and directory
|
|
57
|
+
patterns.push(cleanPattern); // Exact match
|
|
58
|
+
patterns.push(`${cleanPattern}/**`); // Directory contents
|
|
59
|
+
patterns.push(`**/${cleanPattern}`); // Nested exact match
|
|
60
|
+
patterns.push(`**/${cleanPattern}/**`); // Nested directory contents
|
|
61
|
+
} else if (cleanPattern.endsWith("/")) {
|
|
62
|
+
// Directory-only pattern
|
|
63
|
+
const dirPattern = cleanPattern.slice(0, -1);
|
|
64
|
+
patterns.push(`${dirPattern}/**`);
|
|
65
|
+
patterns.push(`**/${dirPattern}/**`);
|
|
66
|
+
} else {
|
|
67
|
+
// Pattern with wildcards or specific file
|
|
68
|
+
patterns.push(cleanPattern);
|
|
69
|
+
if (!cleanPattern.startsWith("**/")) {
|
|
70
|
+
patterns.push(`**/${cleanPattern}`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return patterns;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Parse .gitignore content into patterns
|
|
79
|
+
* @param {string} content - .gitignore file content
|
|
80
|
+
* @returns {string[]} Array of ignore patterns converted to glob format
|
|
81
|
+
*/
|
|
82
|
+
function parseGitignoreContent(content) {
|
|
83
|
+
const lines = content
|
|
84
|
+
.split("\n")
|
|
85
|
+
.map((line) => line.trim())
|
|
86
|
+
.filter((line) => line && !line.startsWith("#"))
|
|
87
|
+
.map((line) => line.replace(/^\//, "")); // Remove leading slash
|
|
88
|
+
|
|
89
|
+
// Convert each gitignore pattern to glob patterns
|
|
90
|
+
const allPatterns = [];
|
|
91
|
+
for (const line of lines) {
|
|
92
|
+
allPatterns.push(...gitignoreToGlobPatterns(line));
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return [...new Set(allPatterns)]; // Remove duplicates
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Load .gitignore patterns from multiple directories (current + all parent directories up to git root)
|
|
100
|
+
* @param {string} dir - Directory path (will search up to find all .gitignore files)
|
|
101
|
+
* @returns {string[]|null} Array of merged ignore patterns or null if no .gitignore found
|
|
102
|
+
*/
|
|
103
|
+
export async function loadGitignore(dir) {
|
|
104
|
+
// First, check if we're in a git repository
|
|
105
|
+
const inGitRepo = isInGitRepository(dir);
|
|
106
|
+
if (!inGitRepo) {
|
|
107
|
+
// Not in a git repository, just check the current directory
|
|
108
|
+
const gitignorePath = path.join(dir, ".gitignore");
|
|
109
|
+
try {
|
|
110
|
+
await access(gitignorePath);
|
|
111
|
+
const gitignoreContent = await readFile(gitignorePath, "utf8");
|
|
112
|
+
const ignorePatterns = parseGitignoreContent(gitignoreContent);
|
|
113
|
+
return ignorePatterns.length > 0 ? ignorePatterns : null;
|
|
114
|
+
} catch {
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// We're in a git repository, collect all .gitignore files from current dir to git root
|
|
120
|
+
const gitRoot = findGitRoot(dir);
|
|
121
|
+
if (!gitRoot) {
|
|
122
|
+
return null;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const allPatterns = [];
|
|
126
|
+
let currentDir = path.resolve(dir);
|
|
127
|
+
|
|
128
|
+
// Collect .gitignore patterns from current directory up to git root
|
|
129
|
+
while (currentDir.startsWith(gitRoot)) {
|
|
130
|
+
const gitignorePath = path.join(currentDir, ".gitignore");
|
|
131
|
+
try {
|
|
132
|
+
await access(gitignorePath);
|
|
133
|
+
const gitignoreContent = await readFile(gitignorePath, "utf8");
|
|
134
|
+
const patterns = parseGitignoreContent(gitignoreContent);
|
|
135
|
+
|
|
136
|
+
// Add patterns with context of which directory they came from
|
|
137
|
+
// Patterns from deeper directories take precedence
|
|
138
|
+
allPatterns.unshift(...patterns);
|
|
139
|
+
} catch {
|
|
140
|
+
// .gitignore doesn't exist in this directory, continue
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Move up one directory
|
|
144
|
+
if (currentDir === gitRoot) {
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
currentDir = path.dirname(currentDir);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return allPatterns.length > 0 ? [...new Set(allPatterns)] : null;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Get files using glob patterns
|
|
155
|
+
* @param {string} dir - Directory to search
|
|
156
|
+
* @param {string[]} includePatterns - Include patterns
|
|
157
|
+
* @param {string[]} excludePatterns - Exclude patterns
|
|
158
|
+
* @param {string[]} gitignorePatterns - .gitignore patterns
|
|
159
|
+
* @returns {Promise<string[]>} Array of file paths
|
|
160
|
+
*/
|
|
161
|
+
export async function getFilesWithGlob(dir, includePatterns, excludePatterns, gitignorePatterns) {
|
|
162
|
+
// Prepare all ignore patterns
|
|
163
|
+
const allIgnorePatterns = [];
|
|
164
|
+
|
|
165
|
+
if (excludePatterns) {
|
|
166
|
+
allIgnorePatterns.push(...excludePatterns);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (gitignorePatterns) {
|
|
170
|
+
allIgnorePatterns.push(...gitignorePatterns);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Add default exclusions if not already present
|
|
174
|
+
const defaultExclusions = ["node_modules/**", "test/**", "temp/**"];
|
|
175
|
+
for (const exclusion of defaultExclusions) {
|
|
176
|
+
if (!allIgnorePatterns.includes(exclusion)) {
|
|
177
|
+
allIgnorePatterns.push(exclusion);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Convert patterns to be relative to the directory
|
|
182
|
+
const patterns = includePatterns.map((pattern) => {
|
|
183
|
+
// If pattern doesn't start with / or **, make it relative to dir
|
|
184
|
+
if (!pattern.startsWith("/") && !pattern.startsWith("**")) {
|
|
185
|
+
return `**/${pattern}`; // Use ** to search recursively
|
|
186
|
+
}
|
|
187
|
+
return pattern;
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
try {
|
|
191
|
+
const files = await glob(patterns, {
|
|
192
|
+
cwd: dir,
|
|
193
|
+
ignore: allIgnorePatterns.length > 0 ? allIgnorePatterns : undefined,
|
|
194
|
+
absolute: true,
|
|
195
|
+
nodir: true, // Only return files, not directories
|
|
196
|
+
dot: false, // Don't include dot files by default
|
|
197
|
+
gitignore: true, // Enable .gitignore support
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
return files;
|
|
201
|
+
} catch (error) {
|
|
202
|
+
console.warn(`Warning: Error during glob search in ${dir}: ${error.message}`);
|
|
203
|
+
return [];
|
|
204
|
+
}
|
|
205
|
+
}
|
|
@@ -91,45 +91,34 @@ function checkDeadLinks(markdown, source, allowedLinks, errorMessages) {
|
|
|
91
91
|
/**
|
|
92
92
|
* Check code block content for indentation consistency issues
|
|
93
93
|
* @param {Array} codeBlockContent - Array of {line, lineNumber} objects from the code block
|
|
94
|
+
* @param {number} codeBlockIndent - The indentation of the code block start marker (```)
|
|
94
95
|
* @param {string} source - Source description for error reporting
|
|
95
96
|
* @param {Array} errorMessages - Array to push error messages to
|
|
96
97
|
*/
|
|
97
|
-
function checkCodeBlockIndentation(codeBlockContent, source, errorMessages) {
|
|
98
|
+
function checkCodeBlockIndentation(codeBlockContent, codeBlockIndent, source, errorMessages) {
|
|
98
99
|
if (codeBlockContent.length === 0) return;
|
|
99
100
|
|
|
100
|
-
// Filter out empty lines for
|
|
101
|
+
// Filter out empty lines for analysis
|
|
101
102
|
const nonEmptyLines = codeBlockContent.filter((item) => item.line.trim().length > 0);
|
|
102
103
|
if (nonEmptyLines.length === 0) return;
|
|
103
104
|
|
|
104
|
-
//
|
|
105
|
-
|
|
105
|
+
// The expected base indentation for code block content should match the code block marker
|
|
106
|
+
const expectedBaseIndent = codeBlockIndent;
|
|
106
107
|
const problematicLines = [];
|
|
107
108
|
|
|
108
109
|
for (const item of nonEmptyLines) {
|
|
109
110
|
const { line, lineNumber } = item;
|
|
110
111
|
const match = line.match(/^(\s*)/);
|
|
111
112
|
const currentIndent = match ? match[1].length : 0;
|
|
112
|
-
const trimmedLine = line.trim();
|
|
113
113
|
|
|
114
|
-
//
|
|
115
|
-
if (trimmedLine.startsWith("#") && !trimmedLine.includes("=") && !trimmedLine.includes("{")) {
|
|
116
|
-
continue;
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
// Establish base indentation from the first meaningful line
|
|
120
|
-
if (baseCodeIndent === null) {
|
|
121
|
-
baseCodeIndent = currentIndent;
|
|
122
|
-
continue;
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
// Check if current line has less indentation than the base
|
|
114
|
+
// Check if current line has less indentation than expected
|
|
126
115
|
// This indicates inconsistent indentation that may cause rendering issues
|
|
127
|
-
if (currentIndent <
|
|
116
|
+
if (currentIndent < expectedBaseIndent && expectedBaseIndent > 0) {
|
|
128
117
|
problematicLines.push({
|
|
129
118
|
lineNumber,
|
|
130
119
|
line: line.trimEnd(),
|
|
131
120
|
currentIndent,
|
|
132
|
-
|
|
121
|
+
expectedIndent: expectedBaseIndent,
|
|
133
122
|
});
|
|
134
123
|
}
|
|
135
124
|
}
|
|
@@ -162,7 +151,7 @@ function checkCodeBlockIndentation(codeBlockContent, source, errorMessages) {
|
|
|
162
151
|
? `lines ${group[0].lineNumber}-${group[group.length - 1].lineNumber}`
|
|
163
152
|
: `line ${firstIssue.lineNumber}`;
|
|
164
153
|
|
|
165
|
-
const issue = `
|
|
154
|
+
const issue = `insufficient indentation: ${firstIssue.currentIndent} spaces (expected: ${firstIssue.expectedIndent} spaces)`;
|
|
166
155
|
errorMessages.push(
|
|
167
156
|
`Found code block with inconsistent indentation in ${source} at ${lineNumbers}: ${issue}. This may cause rendering issues`,
|
|
168
157
|
);
|
|
@@ -185,6 +174,7 @@ function checkContentStructure(markdown, source, errorMessages) {
|
|
|
185
174
|
let inAnyCodeBlock = false;
|
|
186
175
|
let anyCodeBlockStartLine = 0;
|
|
187
176
|
let codeBlockContent = [];
|
|
177
|
+
let codeBlockIndent = 0;
|
|
188
178
|
|
|
189
179
|
for (let i = 0; i < lines.length; i++) {
|
|
190
180
|
const line = lines[i];
|
|
@@ -198,13 +188,17 @@ function checkContentStructure(markdown, source, errorMessages) {
|
|
|
198
188
|
anyCodeBlockStartLine = lineNumber;
|
|
199
189
|
inCodeBlock = true;
|
|
200
190
|
codeBlockContent = [];
|
|
191
|
+
|
|
192
|
+
// Capture the indentation of the code block start marker
|
|
193
|
+
const match = line.match(/^(\s*)/);
|
|
194
|
+
codeBlockIndent = match ? match[1].length : 0;
|
|
201
195
|
} else {
|
|
202
196
|
// Ending the code block
|
|
203
197
|
inAnyCodeBlock = false;
|
|
204
198
|
|
|
205
199
|
if (inCodeBlock) {
|
|
206
200
|
// Check code block content for indentation issues
|
|
207
|
-
checkCodeBlockIndentation(codeBlockContent, source, errorMessages);
|
|
201
|
+
checkCodeBlockIndentation(codeBlockContent, codeBlockIndent, source, errorMessages);
|
|
208
202
|
inCodeBlock = false;
|
|
209
203
|
}
|
|
210
204
|
}
|