@aigne/doc-smith 0.1.4 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/PULL_REQUEST_TEMPLATE.md +28 -0
- package/CHANGELOG.md +15 -0
- package/README.md +3 -1
- package/agents/batch-docs-detail-generator.yaml +7 -2
- package/agents/batch-translate.yaml +1 -1
- package/agents/check-detail-result.mjs +52 -41
- package/agents/{check-detail-generated.mjs → check-detail.mjs} +38 -6
- package/agents/check-structure-plan.mjs +72 -0
- package/agents/check-structure-planning-result.yaml +1 -1
- package/agents/content-detail-generator.yaml +1 -1
- package/agents/detail-generator-and-translate.yaml +1 -1
- package/agents/detail-regenerator.yaml +3 -0
- package/agents/docs-generator.yaml +14 -9
- package/agents/find-item-by-path.mjs +84 -5
- package/agents/input-generator.mjs +181 -38
- package/agents/load-config.mjs +1 -0
- package/agents/load-sources.mjs +44 -63
- package/agents/publish-docs.mjs +169 -58
- package/agents/reflective-structure-planner.yaml +1 -1
- package/agents/save-docs.mjs +12 -6
- package/agents/save-single-doc.mjs +1 -1
- package/agents/structure-planning.yaml +1 -1
- package/agents/team-publish-docs.yaml +3 -0
- package/agents/transform-detail-datasources.mjs +19 -5
- package/aigne.yaml +2 -2
- package/package.json +8 -6
- package/prompts/check-structure-planning-result.md +1 -1
- package/prompts/structure-planning.md +1 -1
- package/utils/constants.mjs +105 -0
- package/utils/utils.mjs +544 -0
- package/agents/check-structure-planning.mjs +0 -33
package/utils/utils.mjs
CHANGED
|
@@ -1,5 +1,42 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
2
|
import path from "node:path";
|
|
3
|
+
import { execSync } from "node:child_process";
|
|
4
|
+
import {
|
|
5
|
+
existsSync,
|
|
6
|
+
mkdirSync,
|
|
7
|
+
readdirSync,
|
|
8
|
+
accessSync,
|
|
9
|
+
constants,
|
|
10
|
+
statSync,
|
|
11
|
+
} from "node:fs";
|
|
12
|
+
import { parse } from "yaml";
|
|
13
|
+
import chalk from "chalk";
|
|
14
|
+
import {
|
|
15
|
+
DEFAULT_INCLUDE_PATTERNS,
|
|
16
|
+
DEFAULT_EXCLUDE_PATTERNS,
|
|
17
|
+
} from "./constants.mjs";
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Normalize path to absolute path for consistent comparison
|
|
21
|
+
* @param {string} filePath - The path to normalize
|
|
22
|
+
* @returns {string} - Absolute path
|
|
23
|
+
*/
|
|
24
|
+
export function normalizePath(filePath) {
|
|
25
|
+
return path.isAbsolute(filePath)
|
|
26
|
+
? filePath
|
|
27
|
+
: path.resolve(process.cwd(), filePath);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Convert path to relative path from current working directory
|
|
32
|
+
* @param {string} filePath - The path to convert
|
|
33
|
+
* @returns {string} - Relative path
|
|
34
|
+
*/
|
|
35
|
+
export function toRelativePath(filePath) {
|
|
36
|
+
return path.isAbsolute(filePath)
|
|
37
|
+
? path.relative(process.cwd(), filePath)
|
|
38
|
+
: filePath;
|
|
39
|
+
}
|
|
3
40
|
|
|
4
41
|
export function processContent({ content }) {
|
|
5
42
|
// Match markdown regular links [text](link), exclude images 
|
|
@@ -72,6 +109,7 @@ export async function saveDocWithTranslations({
|
|
|
72
109
|
|
|
73
110
|
await fs.writeFile(mainFilePath, finalContent, "utf8");
|
|
74
111
|
results.push({ path: mainFilePath, success: true });
|
|
112
|
+
console.log(chalk.green(`Saved: ${chalk.cyan(mainFilePath)}`));
|
|
75
113
|
|
|
76
114
|
// Process all translations
|
|
77
115
|
for (const translate of translates) {
|
|
@@ -89,9 +127,515 @@ export async function saveDocWithTranslations({
|
|
|
89
127
|
|
|
90
128
|
await fs.writeFile(translatePath, finalTranslationContent, "utf8");
|
|
91
129
|
results.push({ path: translatePath, success: true });
|
|
130
|
+
console.log(chalk.green(`Saved: ${chalk.cyan(translatePath)}`));
|
|
92
131
|
}
|
|
93
132
|
} catch (err) {
|
|
94
133
|
results.push({ path: docPath, success: false, error: err.message });
|
|
95
134
|
}
|
|
96
135
|
return results;
|
|
97
136
|
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Get current git HEAD commit hash
|
|
140
|
+
* @returns {string} - The current git HEAD commit hash
|
|
141
|
+
*/
|
|
142
|
+
export function getCurrentGitHead() {
|
|
143
|
+
try {
|
|
144
|
+
return execSync("git rev-parse HEAD", {
|
|
145
|
+
encoding: "utf8",
|
|
146
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
147
|
+
}).trim();
|
|
148
|
+
} catch (error) {
|
|
149
|
+
// Not in git repository or git command failed
|
|
150
|
+
console.warn("Failed to get git HEAD:", error.message);
|
|
151
|
+
return null;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Save git HEAD to config.yaml file
|
|
157
|
+
* @param {string} gitHead - The current git HEAD commit hash
|
|
158
|
+
*/
|
|
159
|
+
export async function saveGitHeadToConfig(gitHead) {
|
|
160
|
+
if (!gitHead) {
|
|
161
|
+
return; // Skip if no git HEAD available
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
try {
|
|
165
|
+
const docSmithDir = path.join(process.cwd(), "doc-smith");
|
|
166
|
+
if (!existsSync(docSmithDir)) {
|
|
167
|
+
mkdirSync(docSmithDir, { recursive: true });
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const inputFilePath = path.join(docSmithDir, "config.yaml");
|
|
171
|
+
let fileContent = "";
|
|
172
|
+
|
|
173
|
+
// Read existing file content if it exists
|
|
174
|
+
if (existsSync(inputFilePath)) {
|
|
175
|
+
fileContent = await fs.readFile(inputFilePath, "utf8");
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Check if lastGitHead already exists in the file
|
|
179
|
+
const lastGitHeadRegex = /^lastGitHead:\s*.*$/m;
|
|
180
|
+
const newLastGitHeadLine = `lastGitHead: ${gitHead}`;
|
|
181
|
+
|
|
182
|
+
if (lastGitHeadRegex.test(fileContent)) {
|
|
183
|
+
// Replace existing lastGitHead line
|
|
184
|
+
fileContent = fileContent.replace(lastGitHeadRegex, newLastGitHeadLine);
|
|
185
|
+
} else {
|
|
186
|
+
// Add lastGitHead to the end of file
|
|
187
|
+
if (fileContent && !fileContent.endsWith("\n")) {
|
|
188
|
+
fileContent += "\n";
|
|
189
|
+
}
|
|
190
|
+
fileContent += newLastGitHeadLine + "\n";
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
await fs.writeFile(inputFilePath, fileContent);
|
|
194
|
+
} catch (error) {
|
|
195
|
+
console.warn("Failed to save git HEAD to config.yaml:", error.message);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Check if files have been modified between two git commits
|
|
201
|
+
* @param {string} fromCommit - Starting commit hash
|
|
202
|
+
* @param {string} toCommit - Ending commit hash (defaults to HEAD)
|
|
203
|
+
* @param {Array<string>} filePaths - Array of file paths to check
|
|
204
|
+
* @returns {Array<string>} - Array of modified file paths
|
|
205
|
+
*/
|
|
206
|
+
export function getModifiedFilesBetweenCommits(
|
|
207
|
+
fromCommit,
|
|
208
|
+
toCommit = "HEAD",
|
|
209
|
+
filePaths = []
|
|
210
|
+
) {
|
|
211
|
+
try {
|
|
212
|
+
// Get all modified files between commits
|
|
213
|
+
const modifiedFiles = execSync(
|
|
214
|
+
`git diff --name-only ${fromCommit}..${toCommit}`,
|
|
215
|
+
{
|
|
216
|
+
encoding: "utf8",
|
|
217
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
218
|
+
}
|
|
219
|
+
)
|
|
220
|
+
.trim()
|
|
221
|
+
.split("\n")
|
|
222
|
+
.filter(Boolean);
|
|
223
|
+
|
|
224
|
+
// Filter to only include files we care about
|
|
225
|
+
if (filePaths.length === 0) {
|
|
226
|
+
return modifiedFiles;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return modifiedFiles.filter((file) =>
|
|
230
|
+
filePaths.some((targetPath) => {
|
|
231
|
+
const absoluteFile = normalizePath(file);
|
|
232
|
+
const absoluteTarget = normalizePath(targetPath);
|
|
233
|
+
return absoluteFile === absoluteTarget;
|
|
234
|
+
})
|
|
235
|
+
);
|
|
236
|
+
} catch (error) {
|
|
237
|
+
console.warn(
|
|
238
|
+
`Failed to get modified files between ${fromCommit} and ${toCommit}:`,
|
|
239
|
+
error.message
|
|
240
|
+
);
|
|
241
|
+
return [];
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Check if any source files have changed based on modified files list
|
|
247
|
+
* @param {Array<string>} sourceIds - Source file paths
|
|
248
|
+
* @param {Array<string>} modifiedFiles - List of modified files between commits
|
|
249
|
+
* @returns {boolean} - True if any source files have changed
|
|
250
|
+
*/
|
|
251
|
+
export function hasSourceFilesChanged(sourceIds, modifiedFiles) {
|
|
252
|
+
if (!sourceIds || sourceIds.length === 0 || !modifiedFiles) {
|
|
253
|
+
return false; // No source files or no modified files
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
return modifiedFiles.some((modifiedFile) =>
|
|
257
|
+
sourceIds.some((sourceId) => {
|
|
258
|
+
const absoluteModifiedFile = normalizePath(modifiedFile);
|
|
259
|
+
const absoluteSourceId = normalizePath(sourceId);
|
|
260
|
+
return absoluteModifiedFile === absoluteSourceId;
|
|
261
|
+
})
|
|
262
|
+
);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* Check if there are any added or deleted files between two git commits that match the include/exclude patterns
|
|
267
|
+
* @param {string} fromCommit - Starting commit hash
|
|
268
|
+
* @param {string} toCommit - Ending commit hash (defaults to HEAD)
|
|
269
|
+
* @param {Array<string>} includePatterns - Include patterns to match files
|
|
270
|
+
* @param {Array<string>} excludePatterns - Exclude patterns to filter files
|
|
271
|
+
* @returns {boolean} - True if there are relevant added/deleted files
|
|
272
|
+
*/
|
|
273
|
+
export function hasFileChangesBetweenCommits(
|
|
274
|
+
fromCommit,
|
|
275
|
+
toCommit = "HEAD",
|
|
276
|
+
includePatterns = DEFAULT_INCLUDE_PATTERNS,
|
|
277
|
+
excludePatterns = DEFAULT_EXCLUDE_PATTERNS
|
|
278
|
+
) {
|
|
279
|
+
try {
|
|
280
|
+
// Get file changes with status (A=added, D=deleted, M=modified)
|
|
281
|
+
const changes = execSync(
|
|
282
|
+
`git diff --name-status ${fromCommit}..${toCommit}`,
|
|
283
|
+
{
|
|
284
|
+
encoding: "utf8",
|
|
285
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
286
|
+
}
|
|
287
|
+
)
|
|
288
|
+
.trim()
|
|
289
|
+
.split("\n")
|
|
290
|
+
.filter(Boolean);
|
|
291
|
+
|
|
292
|
+
// Only check for added (A) and deleted (D) files
|
|
293
|
+
const addedOrDeletedFiles = changes
|
|
294
|
+
.filter((line) => {
|
|
295
|
+
const [status, filePath] = line.split(/\s+/);
|
|
296
|
+
return (status === "A" || status === "D") && filePath;
|
|
297
|
+
})
|
|
298
|
+
.map((line) => line.split(/\s+/)[1]);
|
|
299
|
+
|
|
300
|
+
if (addedOrDeletedFiles.length === 0) {
|
|
301
|
+
return false;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
// Check if any of the added/deleted files match the include patterns and don't match exclude patterns
|
|
305
|
+
return addedOrDeletedFiles.some((filePath) => {
|
|
306
|
+
// Check if file matches any include pattern
|
|
307
|
+
const matchesInclude = includePatterns.some((pattern) => {
|
|
308
|
+
// Convert glob pattern to regex for matching
|
|
309
|
+
const regexPattern = pattern
|
|
310
|
+
.replace(/\./g, "\\.")
|
|
311
|
+
.replace(/\*/g, ".*")
|
|
312
|
+
.replace(/\?/g, ".");
|
|
313
|
+
const regex = new RegExp(regexPattern);
|
|
314
|
+
return regex.test(filePath);
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
if (!matchesInclude) {
|
|
318
|
+
return false;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// Check if file matches any exclude pattern
|
|
322
|
+
const matchesExclude = excludePatterns.some((pattern) => {
|
|
323
|
+
// Convert glob pattern to regex for matching
|
|
324
|
+
const regexPattern = pattern
|
|
325
|
+
.replace(/\./g, "\\.")
|
|
326
|
+
.replace(/\*/g, ".*")
|
|
327
|
+
.replace(/\?/g, ".");
|
|
328
|
+
const regex = new RegExp(regexPattern);
|
|
329
|
+
return regex.test(filePath);
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
return !matchesExclude;
|
|
333
|
+
});
|
|
334
|
+
} catch (error) {
|
|
335
|
+
console.warn(
|
|
336
|
+
`Failed to check file changes between ${fromCommit} and ${toCommit}:`,
|
|
337
|
+
error.message
|
|
338
|
+
);
|
|
339
|
+
return false;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* Load config from config.yaml file
|
|
345
|
+
* @returns {Promise<Object|null>} - The config object or null if file doesn't exist
|
|
346
|
+
*/
|
|
347
|
+
export async function loadConfigFromFile() {
|
|
348
|
+
const configPath = path.join(process.cwd(), "doc-smith", "config.yaml");
|
|
349
|
+
|
|
350
|
+
try {
|
|
351
|
+
if (!existsSync(configPath)) {
|
|
352
|
+
return null;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
const configContent = await fs.readFile(configPath, "utf8");
|
|
356
|
+
return parse(configContent);
|
|
357
|
+
} catch (error) {
|
|
358
|
+
console.warn("Failed to read config file:", error.message);
|
|
359
|
+
return null;
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
/**
|
|
364
|
+
* Save value to config.yaml file
|
|
365
|
+
* @param {string} key - The config key to save
|
|
366
|
+
* @param {string} value - The value to save
|
|
367
|
+
*/
|
|
368
|
+
export async function saveValueToConfig(key, value) {
|
|
369
|
+
if (!value) {
|
|
370
|
+
return; // Skip if no value provided
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
try {
|
|
374
|
+
const docSmithDir = path.join(process.cwd(), "doc-smith");
|
|
375
|
+
if (!existsSync(docSmithDir)) {
|
|
376
|
+
mkdirSync(docSmithDir, { recursive: true });
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
const configPath = path.join(docSmithDir, "config.yaml");
|
|
380
|
+
let fileContent = "";
|
|
381
|
+
|
|
382
|
+
// Read existing file content if it exists
|
|
383
|
+
if (existsSync(configPath)) {
|
|
384
|
+
fileContent = await fs.readFile(configPath, "utf8");
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
// Check if key already exists in the file
|
|
388
|
+
const keyRegex = new RegExp(`^${key}:\\s*.*$`, "m");
|
|
389
|
+
const newKeyLine = `${key}: ${value}`;
|
|
390
|
+
|
|
391
|
+
if (keyRegex.test(fileContent)) {
|
|
392
|
+
// Replace existing key line
|
|
393
|
+
fileContent = fileContent.replace(keyRegex, newKeyLine);
|
|
394
|
+
} else {
|
|
395
|
+
// Add key to the end of file
|
|
396
|
+
if (fileContent && !fileContent.endsWith("\n")) {
|
|
397
|
+
fileContent += "\n";
|
|
398
|
+
}
|
|
399
|
+
fileContent += newKeyLine + "\n";
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
await fs.writeFile(configPath, fileContent);
|
|
403
|
+
} catch (error) {
|
|
404
|
+
console.warn(`Failed to save ${key} to config.yaml:`, error.message);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
/**
|
|
409
|
+
* Validate if a path exists and is accessible
|
|
410
|
+
* @param {string} filePath - The path to validate (can be absolute or relative)
|
|
411
|
+
* @returns {Object} - Validation result with isValid boolean and error message
|
|
412
|
+
*/
|
|
413
|
+
export function validatePath(filePath) {
|
|
414
|
+
try {
|
|
415
|
+
const absolutePath = normalizePath(filePath);
|
|
416
|
+
|
|
417
|
+
// Check if path exists
|
|
418
|
+
if (!existsSync(absolutePath)) {
|
|
419
|
+
return {
|
|
420
|
+
isValid: false,
|
|
421
|
+
error: `Path does not exist: ${filePath}`,
|
|
422
|
+
};
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
// Check if path is accessible (readable)
|
|
426
|
+
try {
|
|
427
|
+
accessSync(absolutePath, constants.R_OK);
|
|
428
|
+
} catch (accessError) {
|
|
429
|
+
return {
|
|
430
|
+
isValid: false,
|
|
431
|
+
error: `Path is not accessible: ${filePath}`,
|
|
432
|
+
};
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
return {
|
|
436
|
+
isValid: true,
|
|
437
|
+
error: null,
|
|
438
|
+
};
|
|
439
|
+
} catch (error) {
|
|
440
|
+
return {
|
|
441
|
+
isValid: false,
|
|
442
|
+
error: `Invalid path format: ${filePath}`,
|
|
443
|
+
};
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
/**
|
|
448
|
+
* Validate multiple paths and return validation results
|
|
449
|
+
* @param {Array<string>} paths - Array of paths to validate
|
|
450
|
+
* @returns {Object} - Validation results with validPaths array and errors array
|
|
451
|
+
*/
|
|
452
|
+
export function validatePaths(paths) {
|
|
453
|
+
const validPaths = [];
|
|
454
|
+
const errors = [];
|
|
455
|
+
|
|
456
|
+
for (const path of paths) {
|
|
457
|
+
const validation = validatePath(path);
|
|
458
|
+
if (validation.isValid) {
|
|
459
|
+
validPaths.push(path);
|
|
460
|
+
} else {
|
|
461
|
+
errors.push({
|
|
462
|
+
path: path,
|
|
463
|
+
error: validation.error,
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
return {
|
|
469
|
+
validPaths,
|
|
470
|
+
errors,
|
|
471
|
+
};
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
/**
|
|
475
|
+
* Check if input is a valid directory and add it to results if so
|
|
476
|
+
* @param {string} searchTerm - The search term to check
|
|
477
|
+
* @param {Array} results - The results array to modify
|
|
478
|
+
*/
|
|
479
|
+
function addExactDirectoryMatch(searchTerm, results) {
|
|
480
|
+
const inputValidation = validatePath(searchTerm);
|
|
481
|
+
if (inputValidation.isValid) {
|
|
482
|
+
const stats = statSync(normalizePath(searchTerm));
|
|
483
|
+
if (stats.isDirectory()) {
|
|
484
|
+
results.unshift({
|
|
485
|
+
name: searchTerm,
|
|
486
|
+
value: searchTerm,
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
/**
|
|
493
|
+
* Get available paths for search suggestions based on user input
|
|
494
|
+
* @param {string} userInput - User's input string
|
|
495
|
+
* @returns {Array<Object>} - Array of path objects with name, value, and description
|
|
496
|
+
*/
|
|
497
|
+
export function getAvailablePaths(userInput = "") {
|
|
498
|
+
try {
|
|
499
|
+
const searchTerm = userInput.trim();
|
|
500
|
+
|
|
501
|
+
// If no input, return current directory contents
|
|
502
|
+
if (!searchTerm) {
|
|
503
|
+
return getDirectoryContents("./");
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
let results = [];
|
|
507
|
+
|
|
508
|
+
// Handle absolute paths
|
|
509
|
+
if (searchTerm.startsWith("/")) {
|
|
510
|
+
const dirPath = path.dirname(searchTerm);
|
|
511
|
+
const fileName = path.basename(searchTerm);
|
|
512
|
+
results = getDirectoryContents(dirPath, fileName);
|
|
513
|
+
addExactDirectoryMatch(searchTerm, results);
|
|
514
|
+
}
|
|
515
|
+
// Handle relative paths
|
|
516
|
+
else if (searchTerm.startsWith("./") || searchTerm.startsWith("../")) {
|
|
517
|
+
// Extract directory path and search term
|
|
518
|
+
const lastSlashIndex = searchTerm.lastIndexOf("/");
|
|
519
|
+
if (lastSlashIndex === -1) {
|
|
520
|
+
// No slash found, treat as current directory search
|
|
521
|
+
results = getDirectoryContents("./", searchTerm);
|
|
522
|
+
addExactDirectoryMatch(searchTerm, results);
|
|
523
|
+
} else {
|
|
524
|
+
const dirPath = searchTerm.substring(0, lastSlashIndex + 1);
|
|
525
|
+
const fileName = searchTerm.substring(lastSlashIndex + 1);
|
|
526
|
+
|
|
527
|
+
// Validate directory path
|
|
528
|
+
const validation = validatePath(dirPath);
|
|
529
|
+
if (!validation.isValid) {
|
|
530
|
+
return [
|
|
531
|
+
{
|
|
532
|
+
name: dirPath,
|
|
533
|
+
value: dirPath,
|
|
534
|
+
description: validation.error,
|
|
535
|
+
},
|
|
536
|
+
];
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
results = getDirectoryContents(dirPath, fileName);
|
|
540
|
+
addExactDirectoryMatch(searchTerm, results);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
// Handle simple file/directory names (search in current directory)
|
|
544
|
+
else {
|
|
545
|
+
results = getDirectoryContents("./", searchTerm);
|
|
546
|
+
addExactDirectoryMatch(searchTerm, results);
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
// Remove duplicates based on value (path)
|
|
550
|
+
const uniqueResults = [];
|
|
551
|
+
const seenPaths = new Set();
|
|
552
|
+
|
|
553
|
+
for (const item of results) {
|
|
554
|
+
if (!seenPaths.has(item.value)) {
|
|
555
|
+
seenPaths.add(item.value);
|
|
556
|
+
uniqueResults.push(item);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
return uniqueResults;
|
|
561
|
+
} catch (error) {
|
|
562
|
+
console.warn(
|
|
563
|
+
`Failed to get available paths for "${userInput}":`,
|
|
564
|
+
error.message
|
|
565
|
+
);
|
|
566
|
+
return [];
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
/**
|
|
571
|
+
* Get directory contents for a specific path
|
|
572
|
+
* @param {string} dirPath - Directory path to search in
|
|
573
|
+
* @param {string} searchTerm - Optional search term to filter results
|
|
574
|
+
* @returns {Array<Object>} - Array of path objects
|
|
575
|
+
*/
|
|
576
|
+
function getDirectoryContents(dirPath, searchTerm = "") {
|
|
577
|
+
try {
|
|
578
|
+
const absoluteDirPath = normalizePath(dirPath);
|
|
579
|
+
|
|
580
|
+
// Check if directory exists
|
|
581
|
+
if (!existsSync(absoluteDirPath)) {
|
|
582
|
+
return [
|
|
583
|
+
{
|
|
584
|
+
name: dirPath,
|
|
585
|
+
value: dirPath,
|
|
586
|
+
description: "Directory does not exist",
|
|
587
|
+
},
|
|
588
|
+
];
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
const items = [];
|
|
592
|
+
|
|
593
|
+
// Read directory contents
|
|
594
|
+
const entries = readdirSync(absoluteDirPath, { withFileTypes: true });
|
|
595
|
+
|
|
596
|
+
for (const entry of entries) {
|
|
597
|
+
const entryName = entry.name;
|
|
598
|
+
const relativePath = path.join(dirPath, entryName);
|
|
599
|
+
|
|
600
|
+
// Filter by search term if provided
|
|
601
|
+
if (
|
|
602
|
+
searchTerm &&
|
|
603
|
+
!entryName.toLowerCase().includes(searchTerm.toLowerCase())
|
|
604
|
+
) {
|
|
605
|
+
continue;
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
// Skip hidden files and common ignore patterns
|
|
609
|
+
if (
|
|
610
|
+
entryName.startsWith(".") ||
|
|
611
|
+
entryName === "node_modules" ||
|
|
612
|
+
entryName === ".git" ||
|
|
613
|
+
entryName === "dist" ||
|
|
614
|
+
entryName === "build"
|
|
615
|
+
) {
|
|
616
|
+
continue;
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
const isDirectory = entry.isDirectory();
|
|
620
|
+
|
|
621
|
+
// Only include directories, skip files
|
|
622
|
+
if (isDirectory) {
|
|
623
|
+
items.push({
|
|
624
|
+
name: relativePath,
|
|
625
|
+
value: relativePath,
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
// Sort alphabetically (all items are directories now)
|
|
631
|
+
items.sort((a, b) => a.name.localeCompare(b.name));
|
|
632
|
+
|
|
633
|
+
return items;
|
|
634
|
+
} catch (error) {
|
|
635
|
+
console.warn(
|
|
636
|
+
`Failed to get directory contents from ${dirPath}:`,
|
|
637
|
+
error.message
|
|
638
|
+
);
|
|
639
|
+
return [];
|
|
640
|
+
}
|
|
641
|
+
}
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import { dirname } from "node:path";
|
|
2
|
-
import { fileURLToPath } from "node:url";
|
|
3
|
-
|
|
4
|
-
// Get current script directory
|
|
5
|
-
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
-
|
|
7
|
-
export default async function checkStructurePlanning(
|
|
8
|
-
{ originalStructurePlan, feedback, ...rest },
|
|
9
|
-
options
|
|
10
|
-
) {
|
|
11
|
-
// If originalStructurePlan exists, return directly
|
|
12
|
-
if (originalStructurePlan && !feedback) {
|
|
13
|
-
return {
|
|
14
|
-
structurePlan: originalStructurePlan,
|
|
15
|
-
};
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
const panningAgent = options.context.agents["reflective-structure-planner"];
|
|
19
|
-
|
|
20
|
-
const result = await options.context.invoke(panningAgent, {
|
|
21
|
-
feedback: feedback || "",
|
|
22
|
-
originalStructurePlan,
|
|
23
|
-
...rest,
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
return {
|
|
27
|
-
...result,
|
|
28
|
-
feedback: "", // clear feedback
|
|
29
|
-
originalStructurePlan: originalStructurePlan
|
|
30
|
-
? originalStructurePlan
|
|
31
|
-
: JSON.parse(JSON.stringify(result.structurePlan || [])),
|
|
32
|
-
};
|
|
33
|
-
}
|