@aramassa/ai-rules 0.3.5 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +405 -172
- package/dist/optionValidator.d.ts +1 -0
- package/dist/optionValidator.d.ts.map +1 -1
- package/package.json +1 -1
package/dist/cli.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";
|
|
1
|
+
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AA+qBA;;GAEG;AACH,wBAAsB,wBAAwB,CAC5C,GAAG,EAAE,MAAM,EACX,OAAO,GAAE,MAAY,GACpB,OAAO,CAAC,MAAM,EAAE,CAAC,CA4BnB"}
|
package/dist/cli.js
CHANGED
|
@@ -17904,7 +17904,7 @@ class ContentTracker {
|
|
|
17904
17904
|
* Generates a hash for the given content
|
|
17905
17905
|
*/
|
|
17906
17906
|
generateContentHash(content) {
|
|
17907
|
-
return createHash(
|
|
17907
|
+
return createHash("sha256").update(content.trim()).digest("hex");
|
|
17908
17908
|
}
|
|
17909
17909
|
/**
|
|
17910
17910
|
* Checks if the same content has already been written to the file
|
|
@@ -17949,7 +17949,7 @@ function findPackageRoot() {
|
|
|
17949
17949
|
let dir = path.dirname(currentFile);
|
|
17950
17950
|
// Walk up directory tree looking for package.json
|
|
17951
17951
|
while (dir !== path.dirname(dir)) {
|
|
17952
|
-
const packageJsonPath = path.join(dir,
|
|
17952
|
+
const packageJsonPath = path.join(dir, "package.json");
|
|
17953
17953
|
try {
|
|
17954
17954
|
// Use synchronous check since this is initialization
|
|
17955
17955
|
fsSync.accessSync(packageJsonPath);
|
|
@@ -17973,17 +17973,49 @@ function resolveDefaultSrcDir(providedSrc) {
|
|
|
17973
17973
|
}
|
|
17974
17974
|
// Find package root and return artifact/ directory
|
|
17975
17975
|
const packageRoot = findPackageRoot();
|
|
17976
|
-
return path.join(packageRoot,
|
|
17976
|
+
return path.join(packageRoot, "artifact");
|
|
17977
17977
|
}
|
|
17978
17978
|
/**
|
|
17979
|
-
* Resolves source directory for a recipe item following priority order:
|
|
17979
|
+
* Resolves source directory for a recipe item following priority order (Issue #197):
|
|
17980
17980
|
* 1. CLI --src option (highest priority)
|
|
17981
|
-
* 2.
|
|
17982
|
-
* 3.
|
|
17981
|
+
* 2. Import-level src field (overrides item-level src)
|
|
17982
|
+
* 3. Recipe item src field (lower priority - resolved relative to recipe file or import src)
|
|
17983
17983
|
* 4. Default package artifact/ directory (lowest priority)
|
|
17984
17984
|
*/
|
|
17985
|
-
function resolveItemSrcDir(cliSrc, itemSrc, importSrc) {
|
|
17986
|
-
|
|
17985
|
+
function resolveItemSrcDir(cliSrc, itemSrc, importSrc, recipePath) {
|
|
17986
|
+
if (cliSrc) {
|
|
17987
|
+
return cliSrc;
|
|
17988
|
+
}
|
|
17989
|
+
// Per Issue #197: import-level src should override item-level src
|
|
17990
|
+
if (importSrc) {
|
|
17991
|
+
// If itemSrc is specified and relative, resolve it relative to importSrc
|
|
17992
|
+
if (itemSrc && !path.isAbsolute(itemSrc)) {
|
|
17993
|
+
// If importSrc is absolute, resolve itemSrc relative to it
|
|
17994
|
+
if (path.isAbsolute(importSrc)) {
|
|
17995
|
+
return path.resolve(importSrc, itemSrc);
|
|
17996
|
+
}
|
|
17997
|
+
// If importSrc is also relative, we need to resolve it first
|
|
17998
|
+
if (recipePath) {
|
|
17999
|
+
const resolvedImportSrc = path.resolve(path.dirname(recipePath), importSrc);
|
|
18000
|
+
return path.resolve(resolvedImportSrc, itemSrc);
|
|
18001
|
+
}
|
|
18002
|
+
}
|
|
18003
|
+
// Use importSrc as-is (either no itemSrc, or itemSrc is absolute and ignored)
|
|
18004
|
+
return importSrc;
|
|
18005
|
+
}
|
|
18006
|
+
if (itemSrc) {
|
|
18007
|
+
// If itemSrc is absolute, use it as-is
|
|
18008
|
+
if (path.isAbsolute(itemSrc)) {
|
|
18009
|
+
return itemSrc;
|
|
18010
|
+
}
|
|
18011
|
+
// If itemSrc is relative and we have a recipe path, resolve relative to recipe file directory
|
|
18012
|
+
if (recipePath) {
|
|
18013
|
+
return path.resolve(path.dirname(recipePath), itemSrc);
|
|
18014
|
+
}
|
|
18015
|
+
// Fallback to current working directory (existing behavior)
|
|
18016
|
+
return itemSrc;
|
|
18017
|
+
}
|
|
18018
|
+
return resolveDefaultSrcDir();
|
|
17987
18019
|
}
|
|
17988
18020
|
/**
|
|
17989
18021
|
* Resolves output path with baseDir support following priority order:
|
|
@@ -17995,7 +18027,7 @@ function resolveItemSrcDir(cliSrc, itemSrc, importSrc) {
|
|
|
17995
18027
|
function resolveOutputPath(itemOut, cliBaseDir, importBaseDir, recipeBaseDir, recipePath) {
|
|
17996
18028
|
// Check if the itemOut contains environment variables or tilde - if so, try to expand
|
|
17997
18029
|
let processedItemOut = itemOut;
|
|
17998
|
-
if (itemOut.includes(
|
|
18030
|
+
if (itemOut.includes("$") || itemOut.startsWith("~")) {
|
|
17999
18031
|
try {
|
|
18000
18032
|
processedItemOut = resolvePath(itemOut);
|
|
18001
18033
|
// If expansion resulted in an absolute path, use it as-is (ignore baseDir)
|
|
@@ -18021,10 +18053,10 @@ function resolveOutputPath(itemOut, cliBaseDir, importBaseDir, recipeBaseDir, re
|
|
|
18021
18053
|
}
|
|
18022
18054
|
// Existing behavior: resolve relative to recipe file directory or current directory
|
|
18023
18055
|
// For preset files (located in presets/ directory), resolve relative to project root instead
|
|
18024
|
-
let baseDirectory =
|
|
18056
|
+
let baseDirectory = ".";
|
|
18025
18057
|
if (recipePath) {
|
|
18026
18058
|
const packageRoot = findPackageRoot();
|
|
18027
|
-
const presetsDir = path.join(packageRoot,
|
|
18059
|
+
const presetsDir = path.join(packageRoot, "presets");
|
|
18028
18060
|
// If the recipe is in the presets directory, use project root as base
|
|
18029
18061
|
if (path.dirname(recipePath) === presetsDir) {
|
|
18030
18062
|
baseDirectory = process.cwd();
|
|
@@ -18038,10 +18070,10 @@ function resolveOutputPath(itemOut, cliBaseDir, importBaseDir, recipeBaseDir, re
|
|
|
18038
18070
|
}
|
|
18039
18071
|
function resolveRecipePath(recipePath) {
|
|
18040
18072
|
// If recipe starts with ':', resolve to package preset
|
|
18041
|
-
if (recipePath.startsWith(
|
|
18073
|
+
if (recipePath.startsWith(":")) {
|
|
18042
18074
|
const presetName = recipePath.slice(1); // Remove the ':'
|
|
18043
18075
|
const packageRoot = findPackageRoot();
|
|
18044
|
-
return path.join(packageRoot,
|
|
18076
|
+
return path.join(packageRoot, "presets", `${presetName}.yaml`);
|
|
18045
18077
|
}
|
|
18046
18078
|
// Otherwise, use the path as-is
|
|
18047
18079
|
return recipePath;
|
|
@@ -18061,8 +18093,8 @@ async function validateRecipeFilesExist(recipePaths) {
|
|
|
18061
18093
|
}
|
|
18062
18094
|
}
|
|
18063
18095
|
if (missingFiles.length > 0) {
|
|
18064
|
-
const errorMessages = missingFiles.map(file => {
|
|
18065
|
-
if (file.startsWith(
|
|
18096
|
+
const errorMessages = missingFiles.map((file) => {
|
|
18097
|
+
if (file.startsWith(":")) {
|
|
18066
18098
|
const presetName = file.slice(1);
|
|
18067
18099
|
return `Preset ':${presetName}' not found. Use 'ai-rules presets' to list available presets.`;
|
|
18068
18100
|
}
|
|
@@ -18074,7 +18106,7 @@ async function validateRecipeFilesExist(recipePaths) {
|
|
|
18074
18106
|
throw new Error(errorMessages[0]);
|
|
18075
18107
|
}
|
|
18076
18108
|
else {
|
|
18077
|
-
throw new Error(`Multiple recipe files not found:\n${errorMessages.join(
|
|
18109
|
+
throw new Error(`Multiple recipe files not found:\n${errorMessages.join("\n")}`);
|
|
18078
18110
|
}
|
|
18079
18111
|
}
|
|
18080
18112
|
}
|
|
@@ -18083,7 +18115,7 @@ async function validateRecipeFilesExist(recipePaths) {
|
|
|
18083
18115
|
*/
|
|
18084
18116
|
function resolveImportPath(importPath, currentRecipePath) {
|
|
18085
18117
|
// If import starts with ':', resolve to package preset
|
|
18086
|
-
if (importPath.startsWith(
|
|
18118
|
+
if (importPath.startsWith(":")) {
|
|
18087
18119
|
return resolveRecipePath(importPath);
|
|
18088
18120
|
}
|
|
18089
18121
|
// Otherwise, resolve relative to current recipe file directory
|
|
@@ -18109,8 +18141,8 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18109
18141
|
// Check for circular imports
|
|
18110
18142
|
if (visited.has(resolvedImportPath)) {
|
|
18111
18143
|
const visitedArray = Array.from(visited);
|
|
18112
|
-
debugLogger?.log(`Circular import detected in path: ${visitedArray.join(
|
|
18113
|
-
throw new Error(`Circular import detected: ${visitedArray.join(
|
|
18144
|
+
debugLogger?.log(`Circular import detected in path: ${visitedArray.join(" -> ")} -> ${resolvedImportPath}`);
|
|
18145
|
+
throw new Error(`Circular import detected: ${visitedArray.join(" -> ")} -> ${resolvedImportPath}`);
|
|
18114
18146
|
}
|
|
18115
18147
|
// Check for duplicate imports and skip if already imported
|
|
18116
18148
|
if (imported.has(resolvedImportPath)) {
|
|
@@ -18152,18 +18184,25 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18152
18184
|
const expandedImported = await expandRecipeImports(importData.recipe, resolvedImportPath, debugLogger, newVisited, imported, depth + 1);
|
|
18153
18185
|
debugLogger?.timeEnd(`Expanding nested imports in: ${resolvedImportPath}`);
|
|
18154
18186
|
debugLogger?.log(`Nested expansion yielded ${expandedImported.length} items`);
|
|
18155
|
-
// Apply import-level configurations to all expanded items
|
|
18187
|
+
// Apply import-level configurations to all expanded items
|
|
18188
|
+
expandedImported.forEach((expandedItem) => {
|
|
18189
|
+
// Ensure _recipePath is set for proper path resolution
|
|
18190
|
+
if (!expandedItem._recipePath) {
|
|
18191
|
+
expandedItem._recipePath = resolvedImportPath;
|
|
18192
|
+
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out || "untitled"}' with recipe path: ${resolvedImportPath}`);
|
|
18193
|
+
}
|
|
18194
|
+
});
|
|
18156
18195
|
if (importLevelBaseDir || importLevelSrc || importLevelVariables) {
|
|
18157
|
-
expandedImported.forEach(expandedItem => {
|
|
18196
|
+
expandedImported.forEach((expandedItem) => {
|
|
18158
18197
|
// Apply baseDir if specified (only if not already set to preserve nested import priority)
|
|
18159
18198
|
if (importLevelBaseDir && !expandedItem._importBaseDir) {
|
|
18160
18199
|
expandedItem._importBaseDir = importLevelBaseDir;
|
|
18161
|
-
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out ||
|
|
18200
|
+
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out || "untitled"}' with import baseDir: ${importLevelBaseDir}`);
|
|
18162
18201
|
}
|
|
18163
18202
|
// Apply src if specified (only if not already set to preserve nested import priority)
|
|
18164
18203
|
if (importLevelSrc && !expandedItem._importSrc) {
|
|
18165
18204
|
expandedItem._importSrc = importLevelSrc;
|
|
18166
|
-
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out ||
|
|
18205
|
+
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out || "untitled"}' with import src: ${importLevelSrc}`);
|
|
18167
18206
|
}
|
|
18168
18207
|
// Apply variables if specified (merge with existing variables, with import-level taking priority)
|
|
18169
18208
|
if (importLevelVariables) {
|
|
@@ -18174,10 +18213,10 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18174
18213
|
// Merge variables, with import-level taking priority over nested imports
|
|
18175
18214
|
expandedItem._importVariables = {
|
|
18176
18215
|
...expandedItem._importVariables,
|
|
18177
|
-
...importLevelVariables
|
|
18216
|
+
...importLevelVariables,
|
|
18178
18217
|
};
|
|
18179
18218
|
}
|
|
18180
|
-
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out ||
|
|
18219
|
+
debugLogger?.log(`Tagged item '${expandedItem.title || expandedItem.out || "untitled"}' with import variables:`, expandedItem._importVariables);
|
|
18181
18220
|
}
|
|
18182
18221
|
});
|
|
18183
18222
|
}
|
|
@@ -18188,10 +18227,10 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18188
18227
|
// Remove from imported set if there was an error to allow retry
|
|
18189
18228
|
imported.delete(resolvedImportPath);
|
|
18190
18229
|
debugLogger?.log(`Error processing import ${resolvedImportPath}:`, error);
|
|
18191
|
-
if (item.import.startsWith(
|
|
18230
|
+
if (item.import.startsWith(":")) {
|
|
18192
18231
|
const presetName = item.import.slice(1);
|
|
18193
18232
|
// Only treat ENOENT errors as "preset not found"
|
|
18194
|
-
if (error instanceof Error && error.message.includes(
|
|
18233
|
+
if (error instanceof Error && error.message.includes("ENOENT")) {
|
|
18195
18234
|
throw new Error(`Preset ':${presetName}' not found. Use 'ai-rules presets' to list available presets.`);
|
|
18196
18235
|
}
|
|
18197
18236
|
// For other errors, add context but preserve the original error message
|
|
@@ -18200,7 +18239,7 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18200
18239
|
}
|
|
18201
18240
|
throw error;
|
|
18202
18241
|
}
|
|
18203
|
-
if (error instanceof Error && error.message.includes(
|
|
18242
|
+
if (error instanceof Error && error.message.includes("ENOENT")) {
|
|
18204
18243
|
throw new Error(`Import file '${item.import}' not found (resolved to: ${resolvedImportPath})`);
|
|
18205
18244
|
}
|
|
18206
18245
|
throw error;
|
|
@@ -18208,8 +18247,13 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18208
18247
|
}
|
|
18209
18248
|
else {
|
|
18210
18249
|
// This is a regular recipe item
|
|
18211
|
-
debugLogger?.log(`Adding regular recipe item: ${item.title ||
|
|
18212
|
-
|
|
18250
|
+
debugLogger?.log(`Adding regular recipe item: ${item.title || "untitled"}`);
|
|
18251
|
+
// Tag the item with the recipe path it originated from
|
|
18252
|
+
const taggedItem = { ...item };
|
|
18253
|
+
if (!taggedItem._recipePath) {
|
|
18254
|
+
taggedItem._recipePath = currentPath;
|
|
18255
|
+
}
|
|
18256
|
+
expanded.push(taggedItem);
|
|
18213
18257
|
}
|
|
18214
18258
|
}
|
|
18215
18259
|
debugLogger?.log(`Import expansion complete: ${expanded.length} total items after processing`);
|
|
@@ -18221,42 +18265,43 @@ async function expandRecipeImports(items, currentPath, debugLogger, visited = ne
|
|
|
18221
18265
|
function setupProgram() {
|
|
18222
18266
|
const program = new Command();
|
|
18223
18267
|
program
|
|
18224
|
-
.name(
|
|
18225
|
-
.description(
|
|
18226
|
-
.version(
|
|
18268
|
+
.name("ai-rules")
|
|
18269
|
+
.description("CLI for extracting and analyzing markdown files")
|
|
18270
|
+
.version("0.0.1");
|
|
18227
18271
|
// Extract command
|
|
18228
18272
|
program
|
|
18229
|
-
.command(
|
|
18230
|
-
.description(
|
|
18231
|
-
.option(
|
|
18232
|
-
.option(
|
|
18233
|
-
.option(
|
|
18234
|
-
.option(
|
|
18235
|
-
.option(
|
|
18236
|
-
.option(
|
|
18237
|
-
.option(
|
|
18238
|
-
.option(
|
|
18239
|
-
.option(
|
|
18240
|
-
.option(
|
|
18241
|
-
.option(
|
|
18242
|
-
.option(
|
|
18243
|
-
.option(
|
|
18273
|
+
.command("extract")
|
|
18274
|
+
.description("Extract and merge markdown files")
|
|
18275
|
+
.option("--src <path>", "Source directory")
|
|
18276
|
+
.option("--out <path>", "Output file")
|
|
18277
|
+
.option("--type <types>", "Filter by type (comma-separated)")
|
|
18278
|
+
.option("--language <languages>", "Filter by language (comma-separated)")
|
|
18279
|
+
.option("--attr <attributes>", "Filter by attributes (comma-separated)")
|
|
18280
|
+
.option("--title <title>", "Title for the output")
|
|
18281
|
+
.option("--mode <mode>", "Write mode: append, prepend, overwrite", "overwrite")
|
|
18282
|
+
.option("--recipe <path>", "Recipe file path or package preset (e.g., :typescript). Can be specified multiple times or comma-separated.", collectRecipeOptions, [])
|
|
18283
|
+
.option("--base-dir <path>", "Base directory for output files (supports ~ and environment variable expansion)")
|
|
18284
|
+
.option("--vars <variables>", "Template variables in key=value format (comma-separated)")
|
|
18285
|
+
.option("--env-file <path>", "Path to .env file for template variables")
|
|
18286
|
+
.option("--debug", "Enable debug logging")
|
|
18287
|
+
.option("--verbose", "Enable verbose logging (alias for --debug)")
|
|
18288
|
+
.option("--dry-run", "Preview input/output files without writing")
|
|
18244
18289
|
.action(async (options) => {
|
|
18245
18290
|
await handleExtractCommand(options);
|
|
18246
18291
|
});
|
|
18247
|
-
// Stats command
|
|
18292
|
+
// Stats command
|
|
18248
18293
|
program
|
|
18249
|
-
.command(
|
|
18250
|
-
.description(
|
|
18251
|
-
.option(
|
|
18252
|
-
.option(
|
|
18294
|
+
.command("stats")
|
|
18295
|
+
.description("Generate statistics from markdown files")
|
|
18296
|
+
.option("--src <path>", "Source directory")
|
|
18297
|
+
.option("--variables", "Show available template variables")
|
|
18253
18298
|
.action(async (options) => {
|
|
18254
18299
|
await handleStatsCommand(options);
|
|
18255
18300
|
});
|
|
18256
18301
|
// Presets command
|
|
18257
18302
|
program
|
|
18258
|
-
.command(
|
|
18259
|
-
.description(
|
|
18303
|
+
.command("presets")
|
|
18304
|
+
.description("List available package presets")
|
|
18260
18305
|
.action(async () => {
|
|
18261
18306
|
await listPresets();
|
|
18262
18307
|
});
|
|
@@ -18276,7 +18321,8 @@ async function findYamlFilesRecursively(dir, baseDir = dir) {
|
|
|
18276
18321
|
const subFiles = await findYamlFilesRecursively(fullPath, baseDir);
|
|
18277
18322
|
yamlFiles.push(...subFiles);
|
|
18278
18323
|
}
|
|
18279
|
-
else if (item.isFile() &&
|
|
18324
|
+
else if (item.isFile() &&
|
|
18325
|
+
(item.name.endsWith(".yaml") || item.name.endsWith(".yml"))) {
|
|
18280
18326
|
// Get relative path from base directory
|
|
18281
18327
|
const relativePath = path.relative(baseDir, fullPath);
|
|
18282
18328
|
yamlFiles.push(relativePath);
|
|
@@ -18294,25 +18340,25 @@ async function findYamlFilesRecursively(dir, baseDir = dir) {
|
|
|
18294
18340
|
*/
|
|
18295
18341
|
async function listPresets() {
|
|
18296
18342
|
const packageRoot = findPackageRoot();
|
|
18297
|
-
const presetsDir = path.join(packageRoot,
|
|
18343
|
+
const presetsDir = path.join(packageRoot, "presets");
|
|
18298
18344
|
try {
|
|
18299
18345
|
const yamlFiles = await findYamlFilesRecursively(presetsDir);
|
|
18300
18346
|
if (yamlFiles.length === 0) {
|
|
18301
18347
|
// eslint-disable-next-line no-console
|
|
18302
|
-
console.log(
|
|
18348
|
+
console.log("No presets found in package.");
|
|
18303
18349
|
return;
|
|
18304
18350
|
}
|
|
18305
18351
|
// eslint-disable-next-line no-console
|
|
18306
|
-
console.log(
|
|
18307
|
-
yamlFiles.forEach(file => {
|
|
18308
|
-
const presetName = file.replace(/\.(yaml|yml)$/,
|
|
18352
|
+
console.log("Available presets:");
|
|
18353
|
+
yamlFiles.forEach((file) => {
|
|
18354
|
+
const presetName = file.replace(/\.(yaml|yml)$/, "");
|
|
18309
18355
|
// eslint-disable-next-line no-console
|
|
18310
18356
|
console.log(` :${presetName}`);
|
|
18311
18357
|
});
|
|
18312
18358
|
}
|
|
18313
18359
|
catch (error) {
|
|
18314
18360
|
// eslint-disable-next-line no-console
|
|
18315
|
-
console.log(
|
|
18361
|
+
console.log("No presets directory found in package.");
|
|
18316
18362
|
}
|
|
18317
18363
|
}
|
|
18318
18364
|
/**
|
|
@@ -18320,23 +18366,26 @@ async function listPresets() {
|
|
|
18320
18366
|
*/
|
|
18321
18367
|
function collectRecipeOptions(value, previous) {
|
|
18322
18368
|
// Split comma-separated values and add to the accumulated array
|
|
18323
|
-
const newValues = value
|
|
18369
|
+
const newValues = value
|
|
18370
|
+
.split(",")
|
|
18371
|
+
.map((v) => v.trim())
|
|
18372
|
+
.filter((v) => v.length > 0);
|
|
18324
18373
|
return previous.concat(newValues);
|
|
18325
18374
|
}
|
|
18326
18375
|
/**
|
|
18327
18376
|
* Parses comma-separated values into array
|
|
18328
18377
|
*/
|
|
18329
18378
|
function parseCommaSeparated(value) {
|
|
18330
|
-
return value ? value.split(
|
|
18379
|
+
return value ? value.split(",") : undefined;
|
|
18331
18380
|
}
|
|
18332
18381
|
/**
|
|
18333
18382
|
* Validates and converts mode string to WriteMode enum
|
|
18334
18383
|
*/
|
|
18335
18384
|
function parseWriteMode(mode) {
|
|
18336
18385
|
const modeMap = {
|
|
18337
|
-
|
|
18338
|
-
|
|
18339
|
-
|
|
18386
|
+
append: WriteMode.APPEND,
|
|
18387
|
+
prepend: WriteMode.PREPEND,
|
|
18388
|
+
overwrite: WriteMode.OVERWRITE,
|
|
18340
18389
|
};
|
|
18341
18390
|
return modeMap[mode] || WriteMode.OVERWRITE;
|
|
18342
18391
|
}
|
|
@@ -18351,10 +18400,10 @@ function convertFiltersToAttrFilters(filters) {
|
|
|
18351
18400
|
if (Array.isArray(value)) {
|
|
18352
18401
|
// For array values, join with pipe (|) to indicate OR logic
|
|
18353
18402
|
// This needs to be handled specially in the filter logic
|
|
18354
|
-
const joinedValues = value.join(
|
|
18403
|
+
const joinedValues = value.join("|");
|
|
18355
18404
|
attrFilters.push(`${key}=${joinedValues}`);
|
|
18356
18405
|
}
|
|
18357
|
-
else if (typeof value ===
|
|
18406
|
+
else if (typeof value === "string") {
|
|
18358
18407
|
// For string values, create single filter entry
|
|
18359
18408
|
attrFilters.push(`${key}=${value}`);
|
|
18360
18409
|
}
|
|
@@ -18364,54 +18413,204 @@ function convertFiltersToAttrFilters(filters) {
|
|
|
18364
18413
|
/**
|
|
18365
18414
|
* Common logic for loading and filtering markdown files
|
|
18366
18415
|
*/
|
|
18416
|
+
/**
|
|
18417
|
+
* Displays dry-run preview for recipe processing
|
|
18418
|
+
*/
|
|
18419
|
+
async function displayRecipeDryRunPreview(recipePath, recipeData, expandedRecipe, baseOptions, debugLogger, cliBaseDir, cliOutFile, cliSrc) {
|
|
18420
|
+
console.log("=== Recipe Dry-Run Preview ===\n");
|
|
18421
|
+
const presetName = recipePath.startsWith(":") ? recipePath : undefined;
|
|
18422
|
+
if (presetName) {
|
|
18423
|
+
console.log(`📦 Recipe: ${presetName} (${expandedRecipe.length} steps)`);
|
|
18424
|
+
}
|
|
18425
|
+
else {
|
|
18426
|
+
console.log(`📦 Recipe: ${recipePath} (${expandedRecipe.length} steps)`);
|
|
18427
|
+
}
|
|
18428
|
+
const recipeBaseDir = recipeData.config?.baseDir;
|
|
18429
|
+
let totalInputFiles = 0;
|
|
18430
|
+
let totalIncludedFiles = 0;
|
|
18431
|
+
let totalExcludedFiles = 0;
|
|
18432
|
+
const outputFiles = [];
|
|
18433
|
+
for (const [index, item] of expandedRecipe.entries()) {
|
|
18434
|
+
console.log(`\nStep ${index + 1}: ${item.title || `Untitled Step ${index + 1}`}`);
|
|
18435
|
+
// Resolve paths and options for this item
|
|
18436
|
+
const itemOut = cliOutFile || item.out || baseOptions.outFile;
|
|
18437
|
+
const outputFile = resolveOutputPath(itemOut, cliBaseDir, item._importBaseDir, recipeBaseDir, resolveRecipePath(recipePath));
|
|
18438
|
+
const itemTypes = item.type
|
|
18439
|
+
? parseCommaSeparated(item.type)
|
|
18440
|
+
: baseOptions.types;
|
|
18441
|
+
const itemLanguages = item.language
|
|
18442
|
+
? parseCommaSeparated(item.language)
|
|
18443
|
+
: baseOptions.languages;
|
|
18444
|
+
let combinedAttrFilters = [...baseOptions.attrFilters];
|
|
18445
|
+
if (item.filters) {
|
|
18446
|
+
const itemAttrFilters = convertFiltersToAttrFilters(item.filters);
|
|
18447
|
+
combinedAttrFilters = combinedAttrFilters.concat(itemAttrFilters);
|
|
18448
|
+
}
|
|
18449
|
+
const itemSrcDir = resolveItemSrcDir(cliSrc, item.src, item._importSrc, item._recipePath);
|
|
18450
|
+
console.log(` 📂 Source: ${itemSrcDir}`);
|
|
18451
|
+
console.log(` 🔍 Filters: type=${itemTypes?.join(",") || "(none)"}, language=${itemLanguages?.join(",") || "(none)"}`);
|
|
18452
|
+
if (combinedAttrFilters.length > 0) {
|
|
18453
|
+
console.log(` attributes=${combinedAttrFilters.join(", ")}`);
|
|
18454
|
+
}
|
|
18455
|
+
try {
|
|
18456
|
+
const { allFiles, filteredFiles } = await loadAndFilterFilesWithDetails(itemSrcDir, itemTypes, itemLanguages, combinedAttrFilters, debugLogger);
|
|
18457
|
+
totalInputFiles += allFiles.length;
|
|
18458
|
+
totalIncludedFiles += filteredFiles.length;
|
|
18459
|
+
totalExcludedFiles += allFiles.length - filteredFiles.length;
|
|
18460
|
+
console.log(`\n 📄 Input Files Analysis:`);
|
|
18461
|
+
if (filteredFiles.length > 0) {
|
|
18462
|
+
console.log(` ✓ INCLUDED (${filteredFiles.length} files):`);
|
|
18463
|
+
filteredFiles.forEach((file) => {
|
|
18464
|
+
console.log(` - ${file.path}`);
|
|
18465
|
+
});
|
|
18466
|
+
}
|
|
18467
|
+
const excludedFiles = allFiles.filter((f) => !filteredFiles.includes(f));
|
|
18468
|
+
// Note: Individual excluded files are not displayed to keep output concise
|
|
18469
|
+
// Only the count is shown in the summary below
|
|
18470
|
+
console.log(`\n 📝 Output: ${outputFile}`);
|
|
18471
|
+
if (!outputFiles.includes(outputFile)) {
|
|
18472
|
+
outputFiles.push(outputFile);
|
|
18473
|
+
}
|
|
18474
|
+
}
|
|
18475
|
+
catch (error) {
|
|
18476
|
+
console.log(` ❌ Error scanning files: ${error instanceof Error ? error.message : String(error)}`);
|
|
18477
|
+
}
|
|
18478
|
+
}
|
|
18479
|
+
console.log(`\n📈 Overall Summary:`);
|
|
18480
|
+
console.log(` - Total steps: ${expandedRecipe.length}`);
|
|
18481
|
+
console.log(` - Total input files: ${totalInputFiles} (${totalIncludedFiles} included, ${totalExcludedFiles} excluded)`);
|
|
18482
|
+
console.log(` - Total output files: ${outputFiles.length}`);
|
|
18483
|
+
console.log(`\n⚠️ Dry-run mode: No files will be written`);
|
|
18484
|
+
}
|
|
18485
|
+
/**
|
|
18486
|
+
* Formats file size in a human-readable format
|
|
18487
|
+
*/
|
|
18488
|
+
function formatFileSize(sizeInBytes) {
|
|
18489
|
+
if (sizeInBytes < 1024)
|
|
18490
|
+
return `${sizeInBytes}B`;
|
|
18491
|
+
if (sizeInBytes < 1024 * 1024)
|
|
18492
|
+
return `${(sizeInBytes / 1024).toFixed(1)}KB`;
|
|
18493
|
+
return `${(sizeInBytes / (1024 * 1024)).toFixed(1)}MB`;
|
|
18494
|
+
}
|
|
18495
|
+
/**
|
|
18496
|
+
* Displays dry-run preview for single extraction
|
|
18497
|
+
*/
|
|
18498
|
+
function displayDryRunPreview(srcDir, outFile, allFiles, filteredFiles, types, languages, attrFilters = [], verbose = false) {
|
|
18499
|
+
console.log("=== Extract Command Dry-Run Preview ===\n");
|
|
18500
|
+
console.log(`📂 Source Directory: ${srcDir}`);
|
|
18501
|
+
console.log("🔍 Filters Applied:");
|
|
18502
|
+
console.log(` - Type: ${types?.length ? types.join(", ") : "(none)"}`);
|
|
18503
|
+
console.log(` - Language: ${languages?.length ? languages.join(", ") : "(none)"}`);
|
|
18504
|
+
console.log(` - Attributes: ${attrFilters.length ? attrFilters.join(", ") : "(none)"}\n`);
|
|
18505
|
+
console.log("📄 Input Files Analysis:");
|
|
18506
|
+
if (filteredFiles.length > 0) {
|
|
18507
|
+
console.log(` ✓ INCLUDED (${filteredFiles.length} files):`);
|
|
18508
|
+
filteredFiles.forEach((file) => {
|
|
18509
|
+
const attrs = Object.entries(file.attrs)
|
|
18510
|
+
.filter(([key]) => key !== "content")
|
|
18511
|
+
.map(([key, value]) => `${key}: ${Array.isArray(value) ? value.join(",") : value}`)
|
|
18512
|
+
.join(", ");
|
|
18513
|
+
console.log(` - ${file.path}`);
|
|
18514
|
+
if (verbose && attrs) {
|
|
18515
|
+
console.log(` → ${attrs}`);
|
|
18516
|
+
}
|
|
18517
|
+
else if (attrs) {
|
|
18518
|
+
console.log(` → ${attrs}`);
|
|
18519
|
+
}
|
|
18520
|
+
});
|
|
18521
|
+
}
|
|
18522
|
+
const excludedFiles = allFiles.filter((f) => !filteredFiles.includes(f));
|
|
18523
|
+
// Note: Individual excluded files are not displayed to keep output concise
|
|
18524
|
+
// Only the count is shown in the summary below
|
|
18525
|
+
const totalSize = filteredFiles.reduce((sum, f) => sum + f.content.length, 0);
|
|
18526
|
+
console.log(`\n📈 Processing Summary:`);
|
|
18527
|
+
console.log(` - Total files scanned: ${allFiles.length}`);
|
|
18528
|
+
console.log(` - Files matching filters: ${filteredFiles.length}`);
|
|
18529
|
+
console.log(` - Files excluded: ${excludedFiles.length}`);
|
|
18530
|
+
console.log(`\n📝 Output Files:`);
|
|
18531
|
+
console.log(` → ${outFile}`);
|
|
18532
|
+
console.log(` └─ Will merge ${filteredFiles.length} input files (estimated size: ~${formatFileSize(totalSize)})`);
|
|
18533
|
+
console.log(`\n⚠️ Dry-run mode: No files will be written`);
|
|
18534
|
+
}
|
|
18367
18535
|
async function loadAndFilterFiles(srcDir, types, languages, attrFilters = [], debugLogger) {
|
|
18368
18536
|
debugLogger?.log(`Starting file scan in directory: ${srcDir}`);
|
|
18369
|
-
debugLogger?.time(
|
|
18537
|
+
debugLogger?.time("MarkdownFileScanner.parseMarkdownFiles");
|
|
18370
18538
|
const files = await MarkdownFileScanner.parseMarkdownFiles(srcDir);
|
|
18371
|
-
debugLogger?.timeEnd(
|
|
18539
|
+
debugLogger?.timeEnd("MarkdownFileScanner.parseMarkdownFiles");
|
|
18372
18540
|
debugLogger?.log(`Total markdown files found: ${files.length}`);
|
|
18373
18541
|
if (debugLogger?.isEnabled) {
|
|
18374
|
-
debugLogger.log(
|
|
18542
|
+
debugLogger.log("Files discovered:");
|
|
18375
18543
|
files.forEach((file, index) => {
|
|
18376
18544
|
debugLogger.log(` ${index + 1}. ${file.path}`);
|
|
18377
18545
|
debugLogger.log(` Frontmatter attributes:`, Object.keys(file.attrs));
|
|
18378
18546
|
});
|
|
18379
18547
|
}
|
|
18380
|
-
debugLogger?.time(
|
|
18548
|
+
debugLogger?.time("File filtering");
|
|
18381
18549
|
const filtered = filterFiles(files, { types, languages, attrFilters });
|
|
18382
|
-
debugLogger?.timeEnd(
|
|
18550
|
+
debugLogger?.timeEnd("File filtering");
|
|
18383
18551
|
debugLogger?.log(`Files after filtering: ${filtered.length}`);
|
|
18384
18552
|
if (debugLogger?.isEnabled && filtered.length !== files.length) {
|
|
18385
18553
|
const excludedCount = files.length - filtered.length;
|
|
18386
18554
|
debugLogger.log(`Excluded ${excludedCount} files due to filtering`);
|
|
18387
|
-
const excludedFiles = files.filter(f => !filtered.includes(f));
|
|
18388
|
-
excludedFiles.forEach(file => {
|
|
18555
|
+
const excludedFiles = files.filter((f) => !filtered.includes(f));
|
|
18556
|
+
excludedFiles.forEach((file) => {
|
|
18389
18557
|
debugLogger.log(` Excluded: ${file.path} (attributes: ${JSON.stringify(file.attrs)})`);
|
|
18390
18558
|
});
|
|
18391
18559
|
}
|
|
18392
18560
|
return filtered;
|
|
18393
18561
|
}
|
|
18562
|
+
/**
|
|
18563
|
+
* Extended version of loadAndFilterFiles that returns both all files and filtered files
|
|
18564
|
+
* Used for dry-run mode to show exclusion details
|
|
18565
|
+
*/
|
|
18566
|
+
async function loadAndFilterFilesWithDetails(srcDir, types, languages, attrFilters = [], debugLogger) {
|
|
18567
|
+
debugLogger?.log(`Starting file scan in directory: ${srcDir}`);
|
|
18568
|
+
debugLogger?.time("MarkdownFileScanner.parseMarkdownFiles");
|
|
18569
|
+
const files = await MarkdownFileScanner.parseMarkdownFiles(srcDir);
|
|
18570
|
+
debugLogger?.timeEnd("MarkdownFileScanner.parseMarkdownFiles");
|
|
18571
|
+
debugLogger?.log(`Total markdown files found: ${files.length}`);
|
|
18572
|
+
if (debugLogger?.isEnabled) {
|
|
18573
|
+
debugLogger.log("Files discovered:");
|
|
18574
|
+
files.forEach((file, index) => {
|
|
18575
|
+
debugLogger.log(` ${index + 1}. ${file.path}`);
|
|
18576
|
+
debugLogger.log(` Frontmatter attributes:`, Object.keys(file.attrs));
|
|
18577
|
+
});
|
|
18578
|
+
}
|
|
18579
|
+
debugLogger?.time("File filtering");
|
|
18580
|
+
const filtered = filterFiles(files, { types, languages, attrFilters });
|
|
18581
|
+
debugLogger?.timeEnd("File filtering");
|
|
18582
|
+
debugLogger?.log(`Files after filtering: ${filtered.length}`);
|
|
18583
|
+
if (debugLogger?.isEnabled && filtered.length !== files.length) {
|
|
18584
|
+
const excludedCount = files.length - filtered.length;
|
|
18585
|
+
debugLogger.log(`Excluded ${excludedCount} files due to filtering`);
|
|
18586
|
+
const excludedFiles = files.filter((f) => !filtered.includes(f));
|
|
18587
|
+
excludedFiles.forEach((file) => {
|
|
18588
|
+
debugLogger.log(` Excluded: ${file.path} (attributes: ${JSON.stringify(file.attrs)})`);
|
|
18589
|
+
});
|
|
18590
|
+
}
|
|
18591
|
+
return { allFiles: files, filteredFiles: filtered };
|
|
18592
|
+
}
|
|
18394
18593
|
/**
|
|
18395
18594
|
* Handles content mode processing (append/prepend/overwrite)
|
|
18396
18595
|
*/
|
|
18397
18596
|
async function processContentWithMode(outFile, newContent, mode, debugLogger) {
|
|
18398
18597
|
debugLogger?.log(`Processing content with mode: ${mode} for file: ${outFile}`);
|
|
18399
18598
|
if (mode === WriteMode.OVERWRITE) {
|
|
18400
|
-
debugLogger?.log(
|
|
18599
|
+
debugLogger?.log("Using overwrite mode - returning new content as-is");
|
|
18401
18600
|
return newContent;
|
|
18402
18601
|
}
|
|
18403
18602
|
try {
|
|
18404
18603
|
const existing = await fs.readFile(resolvePath(outFile), "utf-8");
|
|
18405
18604
|
debugLogger?.log(`Existing file found with ${existing.length} characters`);
|
|
18406
18605
|
if (mode === WriteMode.APPEND) {
|
|
18407
|
-
debugLogger?.log(
|
|
18606
|
+
debugLogger?.log("Appending new content to existing content");
|
|
18408
18607
|
const needsNewline = !/\n\s*$/.test(existing);
|
|
18409
18608
|
return needsNewline
|
|
18410
18609
|
? `${existing}\n\n${newContent}`
|
|
18411
18610
|
: `${existing}\n${newContent}`;
|
|
18412
18611
|
}
|
|
18413
18612
|
else if (mode === WriteMode.PREPEND) {
|
|
18414
|
-
debugLogger?.log(
|
|
18613
|
+
debugLogger?.log("Prepending new content to existing content");
|
|
18415
18614
|
const needsNewline = !/\n\s*$/.test(newContent);
|
|
18416
18615
|
return needsNewline
|
|
18417
18616
|
? `${newContent}\n\n${existing}`
|
|
@@ -18420,7 +18619,7 @@ async function processContentWithMode(outFile, newContent, mode, debugLogger) {
|
|
|
18420
18619
|
}
|
|
18421
18620
|
catch (error) {
|
|
18422
18621
|
debugLogger?.log(`No existing file found or error reading file:`, error);
|
|
18423
|
-
debugLogger?.log(
|
|
18622
|
+
debugLogger?.log("Proceeding with new content only");
|
|
18424
18623
|
}
|
|
18425
18624
|
return newContent;
|
|
18426
18625
|
}
|
|
@@ -18430,7 +18629,7 @@ async function processContentWithMode(outFile, newContent, mode, debugLogger) {
|
|
|
18430
18629
|
function applyTemplateToObject(obj, templateOptions, templateEngine) {
|
|
18431
18630
|
const result = {};
|
|
18432
18631
|
for (const [key, value] of Object.entries(obj)) {
|
|
18433
|
-
if (typeof value ===
|
|
18632
|
+
if (typeof value === "string") {
|
|
18434
18633
|
// Apply template processing to string values
|
|
18435
18634
|
if (templateEngine.hasTemplateVariables(value)) {
|
|
18436
18635
|
result[key] = templateEngine.renderTemplate(value, templateOptions);
|
|
@@ -18441,11 +18640,11 @@ function applyTemplateToObject(obj, templateOptions, templateEngine) {
|
|
|
18441
18640
|
}
|
|
18442
18641
|
else if (Array.isArray(value)) {
|
|
18443
18642
|
// Process array elements
|
|
18444
|
-
result[key] = value.map(item => typeof item ===
|
|
18643
|
+
result[key] = value.map((item) => typeof item === "string" && templateEngine.hasTemplateVariables(item)
|
|
18445
18644
|
? templateEngine.renderTemplate(item, templateOptions)
|
|
18446
18645
|
: item);
|
|
18447
18646
|
}
|
|
18448
|
-
else if (value && typeof value ===
|
|
18647
|
+
else if (value && typeof value === "object") {
|
|
18449
18648
|
// Recursively process nested objects
|
|
18450
18649
|
result[key] = applyTemplateToObject(value, templateOptions, templateEngine);
|
|
18451
18650
|
}
|
|
@@ -18458,68 +18657,81 @@ function applyTemplateToObject(obj, templateOptions, templateEngine) {
|
|
|
18458
18657
|
}
|
|
18459
18658
|
async function processSingle(options, debugLogger) {
|
|
18460
18659
|
CliOptionValidator.validateExtractOptions(options);
|
|
18461
|
-
const { srcDir, outFile, types, languages, attrFilters, title, mode, attr, vars, envFile } = options;
|
|
18660
|
+
const { srcDir, outFile, types, languages, attrFilters, title, mode, attr, vars, envFile, dryRun, } = options;
|
|
18462
18661
|
// Resolve template variables first
|
|
18463
18662
|
const templateEngine = new TemplateEngine();
|
|
18464
18663
|
const variableResolver = new VariableResolver();
|
|
18465
18664
|
let resolvedVariables = {};
|
|
18466
18665
|
// Always resolve variables - even if no CLI variables are provided,
|
|
18467
18666
|
// we still need to check for required variables and use environment variables
|
|
18468
|
-
debugLogger.time(
|
|
18469
|
-
debugLogger.log(
|
|
18667
|
+
debugLogger.time("Variable resolution");
|
|
18668
|
+
debugLogger.log("Resolving template variables", { vars, envFile });
|
|
18470
18669
|
try {
|
|
18471
|
-
const cliVariables = vars
|
|
18670
|
+
const cliVariables = vars
|
|
18671
|
+
? VariableResolver.parseCliVariables(vars)
|
|
18672
|
+
: undefined;
|
|
18472
18673
|
resolvedVariables = await variableResolver.resolveVariables({
|
|
18473
18674
|
cliVariables,
|
|
18474
18675
|
envFile,
|
|
18475
|
-
environmentVariables: true
|
|
18676
|
+
environmentVariables: true,
|
|
18476
18677
|
});
|
|
18477
|
-
debugLogger.log(
|
|
18678
|
+
debugLogger.log("Resolved variables:", Object.keys(resolvedVariables));
|
|
18478
18679
|
}
|
|
18479
18680
|
catch (error) {
|
|
18480
18681
|
throw new Error(`Template variable resolution failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
18481
18682
|
}
|
|
18482
|
-
debugLogger.timeEnd(
|
|
18483
|
-
debugLogger.time(
|
|
18683
|
+
debugLogger.timeEnd("Variable resolution");
|
|
18684
|
+
debugLogger.time("File scanning and filtering");
|
|
18484
18685
|
debugLogger.log(`Scanning directory: ${srcDir}`);
|
|
18485
|
-
debugLogger.log(
|
|
18686
|
+
debugLogger.log("Applied filters:", { types, languages, attrFilters });
|
|
18687
|
+
// In dry-run mode, we need both all files and filtered files to show exclusion details
|
|
18688
|
+
if (dryRun) {
|
|
18689
|
+
const { allFiles, filteredFiles } = await loadAndFilterFilesWithDetails(srcDir, types, languages, attrFilters, debugLogger);
|
|
18690
|
+
debugLogger.timeEnd("File scanning and filtering");
|
|
18691
|
+
debugLogger.log(`Found ${filteredFiles.length} files after filtering`);
|
|
18692
|
+
// Check if verbose mode is enabled (via debug flag)
|
|
18693
|
+
const verbose = debugLogger.isEnabled;
|
|
18694
|
+
// Display dry-run preview and exit
|
|
18695
|
+
displayDryRunPreview(srcDir, outFile, allFiles, filteredFiles, types, languages, attrFilters, verbose);
|
|
18696
|
+
return;
|
|
18697
|
+
}
|
|
18486
18698
|
const filtered = await loadAndFilterFiles(srcDir, types, languages, attrFilters, debugLogger);
|
|
18487
|
-
debugLogger.timeEnd(
|
|
18699
|
+
debugLogger.timeEnd("File scanning and filtering");
|
|
18488
18700
|
debugLogger.log(`Found ${filtered.length} files after filtering`);
|
|
18489
|
-
debugLogger.time(
|
|
18701
|
+
debugLogger.time("Content merging");
|
|
18490
18702
|
let merged = filtered.map((f) => f.content.trim()).join("\n\n");
|
|
18491
18703
|
// Always create templateOptions with strict mode enabled
|
|
18492
18704
|
const templateOptions = {
|
|
18493
18705
|
variables: resolvedVariables,
|
|
18494
|
-
strictMode: true
|
|
18706
|
+
strictMode: true,
|
|
18495
18707
|
};
|
|
18496
18708
|
// Pre-validate required variables before processing
|
|
18497
18709
|
if (templateEngine.hasTemplateVariables(merged)) {
|
|
18498
|
-
debugLogger.time(
|
|
18710
|
+
debugLogger.time("Required variable validation");
|
|
18499
18711
|
const requiredVars = templateEngine.extractRequiredVariables(merged);
|
|
18500
|
-
debugLogger.log(
|
|
18712
|
+
debugLogger.log("Found required variables:", requiredVars);
|
|
18501
18713
|
if (requiredVars.length > 0) {
|
|
18502
18714
|
try {
|
|
18503
18715
|
templateEngine.validateRequiredVariables(merged, resolvedVariables);
|
|
18504
|
-
debugLogger.log(
|
|
18716
|
+
debugLogger.log("All required variables are satisfied");
|
|
18505
18717
|
}
|
|
18506
18718
|
catch (error) {
|
|
18507
|
-
debugLogger.log(
|
|
18719
|
+
debugLogger.log("Required variable validation failed");
|
|
18508
18720
|
throw error;
|
|
18509
18721
|
}
|
|
18510
18722
|
}
|
|
18511
|
-
debugLogger.timeEnd(
|
|
18512
|
-
debugLogger.time(
|
|
18513
|
-
debugLogger.log(
|
|
18723
|
+
debugLogger.timeEnd("Required variable validation");
|
|
18724
|
+
debugLogger.time("Template processing");
|
|
18725
|
+
debugLogger.log("Applying template processing to merged content");
|
|
18514
18726
|
merged = templateEngine.renderTemplate(merged, templateOptions);
|
|
18515
|
-
debugLogger.timeEnd(
|
|
18727
|
+
debugLogger.timeEnd("Template processing");
|
|
18516
18728
|
}
|
|
18517
18729
|
const contentWithTitle = title ? `# ${title}\n\n${merged}` : merged;
|
|
18518
|
-
debugLogger.timeEnd(
|
|
18730
|
+
debugLogger.timeEnd("Content merging");
|
|
18519
18731
|
debugLogger.log(`Generated content size: ${contentWithTitle.length} characters`);
|
|
18520
|
-
debugLogger.time(
|
|
18732
|
+
debugLogger.time("Content mode processing");
|
|
18521
18733
|
const finalContent = await processContentWithMode(outFile, contentWithTitle, mode, debugLogger);
|
|
18522
|
-
debugLogger.timeEnd(
|
|
18734
|
+
debugLogger.timeEnd("Content mode processing");
|
|
18523
18735
|
const resolved = resolvePath(outFile);
|
|
18524
18736
|
const writer = new MarkdownWriter();
|
|
18525
18737
|
debugLogger.log(`Writing to output file: ${resolved}`);
|
|
@@ -18527,21 +18739,21 @@ async function processSingle(options, debugLogger) {
|
|
|
18527
18739
|
let frontMatter = attr ? { ...attr } : {};
|
|
18528
18740
|
// Apply template processing to front matter if needed
|
|
18529
18741
|
if (Object.keys(frontMatter).length > 0) {
|
|
18530
|
-
debugLogger.log(
|
|
18742
|
+
debugLogger.log("Applying template processing to front matter");
|
|
18531
18743
|
frontMatter = applyTemplateToObject(frontMatter, templateOptions, templateEngine);
|
|
18532
18744
|
}
|
|
18533
|
-
debugLogger.log(
|
|
18534
|
-
debugLogger.time(
|
|
18745
|
+
debugLogger.log("Front matter to add:", frontMatter);
|
|
18746
|
+
debugLogger.time("File writing");
|
|
18535
18747
|
// Write file with or without front matter
|
|
18536
18748
|
if (Object.keys(frontMatter).length > 0) {
|
|
18537
18749
|
await writer.writeMarkdownFileWithFrontMatter(resolved, finalContent, frontMatter);
|
|
18538
|
-
debugLogger.log(
|
|
18750
|
+
debugLogger.log("File written with front matter");
|
|
18539
18751
|
}
|
|
18540
18752
|
else {
|
|
18541
18753
|
await writer.writeMarkdownFile(resolved, finalContent);
|
|
18542
|
-
debugLogger.log(
|
|
18754
|
+
debugLogger.log("File written without front matter");
|
|
18543
18755
|
}
|
|
18544
|
-
debugLogger.timeEnd(
|
|
18756
|
+
debugLogger.timeEnd("File writing");
|
|
18545
18757
|
// eslint-disable-next-line no-console
|
|
18546
18758
|
console.log(`Extracted ${filtered.length} files to ${outFile}`);
|
|
18547
18759
|
}
|
|
@@ -18599,7 +18811,7 @@ async function displayVariables(srcDir) {
|
|
|
18599
18811
|
variableMap.set(variable.name, {
|
|
18600
18812
|
required: variable.required,
|
|
18601
18813
|
defaultValue: variable.defaultValue,
|
|
18602
|
-
sources: new Set([file.path])
|
|
18814
|
+
sources: new Set([file.path]),
|
|
18603
18815
|
});
|
|
18604
18816
|
}
|
|
18605
18817
|
}
|
|
@@ -18608,32 +18820,32 @@ async function displayVariables(srcDir) {
|
|
|
18608
18820
|
for (const [name, info] of variableMap.entries()) {
|
|
18609
18821
|
allVariables.push({
|
|
18610
18822
|
variable: name,
|
|
18611
|
-
required: info.required ?
|
|
18612
|
-
defaultValue: info.defaultValue ??
|
|
18613
|
-
sources: Array.from(info.sources)
|
|
18823
|
+
required: info.required ? "Yes" : "No",
|
|
18824
|
+
defaultValue: info.defaultValue ?? "(none)",
|
|
18825
|
+
sources: Array.from(info.sources),
|
|
18614
18826
|
});
|
|
18615
18827
|
}
|
|
18616
18828
|
if (allVariables.length === 0) {
|
|
18617
18829
|
// eslint-disable-next-line no-console
|
|
18618
|
-
console.log(
|
|
18830
|
+
console.log("No template variables found in source files.");
|
|
18619
18831
|
// eslint-disable-next-line no-console
|
|
18620
|
-
console.log(
|
|
18832
|
+
console.log("\nTemplate variables use the syntax ${VAR:default} for optional variables and !{VAR} for required variables.");
|
|
18621
18833
|
return;
|
|
18622
18834
|
}
|
|
18623
18835
|
// eslint-disable-next-line no-console
|
|
18624
|
-
console.log(
|
|
18836
|
+
console.log("Template variables found in source files:\n");
|
|
18625
18837
|
// Calculate column widths with reasonable limits
|
|
18626
|
-
const maxVariableLength = Math.max(8, Math.min(30, ...allVariables.map(v => v.variable.length)));
|
|
18838
|
+
const maxVariableLength = Math.max(8, Math.min(30, ...allVariables.map((v) => v.variable.length)));
|
|
18627
18839
|
const maxRequiredLength = 8; // "Required" header length
|
|
18628
|
-
const maxDefaultLength = Math.max(12, Math.min(30, ...allVariables.map(v => v.defaultValue.length)));
|
|
18840
|
+
const maxDefaultLength = Math.max(12, Math.min(30, ...allVariables.map((v) => v.defaultValue.length)));
|
|
18629
18841
|
// Header
|
|
18630
|
-
const variableHeader =
|
|
18631
|
-
const requiredHeader =
|
|
18632
|
-
const defaultHeader =
|
|
18842
|
+
const variableHeader = "Variable".padEnd(maxVariableLength);
|
|
18843
|
+
const requiredHeader = "Required".padEnd(maxRequiredLength);
|
|
18844
|
+
const defaultHeader = "Default".padEnd(maxDefaultLength);
|
|
18633
18845
|
// eslint-disable-next-line no-console
|
|
18634
18846
|
console.log(`${variableHeader} | ${requiredHeader} | ${defaultHeader}`);
|
|
18635
18847
|
// eslint-disable-next-line no-console
|
|
18636
|
-
console.log(
|
|
18848
|
+
console.log("-".repeat(maxVariableLength + maxRequiredLength + maxDefaultLength + 6));
|
|
18637
18849
|
// Sort by variable name for consistent output
|
|
18638
18850
|
allVariables.sort((a, b) => a.variable.localeCompare(b.variable));
|
|
18639
18851
|
// Data rows
|
|
@@ -18645,7 +18857,7 @@ async function displayVariables(srcDir) {
|
|
|
18645
18857
|
console.log(`${variableCol} | ${requiredCol} | ${defaultCol}`);
|
|
18646
18858
|
});
|
|
18647
18859
|
// eslint-disable-next-line no-console
|
|
18648
|
-
console.log(
|
|
18860
|
+
console.log("\nNote: Template variables use ${VAR:default} syntax for optional variables and !{VAR} for required variables.");
|
|
18649
18861
|
}
|
|
18650
18862
|
/**
|
|
18651
18863
|
* Handles the extract command with options parsing
|
|
@@ -18653,7 +18865,7 @@ async function displayVariables(srcDir) {
|
|
|
18653
18865
|
async function handleExtractCommand(options) {
|
|
18654
18866
|
const isDebugMode = Boolean(options.debug || options.verbose);
|
|
18655
18867
|
const debugLogger = new DebugLogger(isDebugMode);
|
|
18656
|
-
debugLogger.log(
|
|
18868
|
+
debugLogger.log("Starting extract command with options:", {
|
|
18657
18869
|
src: options.src,
|
|
18658
18870
|
out: options.out,
|
|
18659
18871
|
type: options.type,
|
|
@@ -18662,21 +18874,22 @@ async function handleExtractCommand(options) {
|
|
|
18662
18874
|
title: options.title,
|
|
18663
18875
|
mode: options.mode,
|
|
18664
18876
|
recipeCount: options.recipe?.length || 0,
|
|
18665
|
-
debug: isDebugMode
|
|
18877
|
+
debug: isDebugMode,
|
|
18666
18878
|
});
|
|
18667
18879
|
const extractOptions = {
|
|
18668
18880
|
srcDir: resolveDefaultSrcDir(options.src),
|
|
18669
|
-
outFile: options.out ||
|
|
18881
|
+
outFile: options.out || "./out/instruction.md",
|
|
18670
18882
|
types: parseCommaSeparated(options.type),
|
|
18671
18883
|
languages: parseCommaSeparated(options.language),
|
|
18672
18884
|
attrFilters: parseCommaSeparated(options.attr) || [],
|
|
18673
18885
|
title: options.title,
|
|
18674
|
-
mode: parseWriteMode(options.mode ||
|
|
18886
|
+
mode: parseWriteMode(options.mode || "overwrite"),
|
|
18675
18887
|
debug: isDebugMode,
|
|
18676
18888
|
vars: options.vars,
|
|
18677
18889
|
envFile: options.envFile,
|
|
18890
|
+
dryRun: options.dryRun,
|
|
18678
18891
|
};
|
|
18679
|
-
debugLogger.log(
|
|
18892
|
+
debugLogger.log("Resolved extract options:", extractOptions);
|
|
18680
18893
|
if (options.recipe && options.recipe.length > 0) {
|
|
18681
18894
|
debugLogger.log(`Processing ${options.recipe.length} recipe(s):`, options.recipe);
|
|
18682
18895
|
// Validate all recipe files exist before processing starts
|
|
@@ -18691,7 +18904,7 @@ async function handleExtractCommand(options) {
|
|
|
18691
18904
|
}
|
|
18692
18905
|
}
|
|
18693
18906
|
else {
|
|
18694
|
-
debugLogger.log(
|
|
18907
|
+
debugLogger.log("Processing single extraction without recipe");
|
|
18695
18908
|
// For single extraction, apply baseDir to outFile if provided
|
|
18696
18909
|
if (options.baseDir) {
|
|
18697
18910
|
extractOptions.outFile = resolveOutputPath(extractOptions.outFile, options.baseDir, undefined, undefined, undefined);
|
|
@@ -18736,10 +18949,10 @@ async function processMultipleRecipes(recipePaths, baseOptions, debugLogger, cli
|
|
|
18736
18949
|
* Reads template files and extracts frontmatter for inheritance
|
|
18737
18950
|
*/
|
|
18738
18951
|
async function loadTemplateFrontmatter(srcDir, types, languages, attrFilters, debugLogger) {
|
|
18739
|
-
debugLogger?.log(
|
|
18952
|
+
debugLogger?.log("Loading template frontmatter for inheritance");
|
|
18740
18953
|
const templateFiles = await loadAndFilterFiles(srcDir, types, languages, attrFilters, debugLogger);
|
|
18741
18954
|
debugLogger?.log(`Found ${templateFiles.length} template files for frontmatter inheritance`);
|
|
18742
|
-
return templateFiles.map(file => file.attrs);
|
|
18955
|
+
return templateFiles.map((file) => file.attrs);
|
|
18743
18956
|
}
|
|
18744
18957
|
/**
|
|
18745
18958
|
* Merges frontmatter values from multiple templates according to merge rules:
|
|
@@ -18749,8 +18962,8 @@ async function loadTemplateFrontmatter(srcDir, types, languages, attrFilters, de
|
|
|
18749
18962
|
*/
|
|
18750
18963
|
function mergeTemplateFrontmatterValues(templateFrontmatters, fieldName) {
|
|
18751
18964
|
const values = templateFrontmatters
|
|
18752
|
-
.map(fm => fm[fieldName])
|
|
18753
|
-
.filter(val => val !== undefined && val !== null);
|
|
18965
|
+
.map((fm) => fm[fieldName])
|
|
18966
|
+
.filter((val) => val !== undefined && val !== null);
|
|
18754
18967
|
if (values.length === 0) {
|
|
18755
18968
|
return undefined;
|
|
18756
18969
|
}
|
|
@@ -18758,12 +18971,15 @@ function mergeTemplateFrontmatterValues(templateFrontmatters, fieldName) {
|
|
|
18758
18971
|
return values[0];
|
|
18759
18972
|
}
|
|
18760
18973
|
// Check if all values are strings
|
|
18761
|
-
if (values.every(val => typeof val ===
|
|
18974
|
+
if (values.every((val) => typeof val === "string")) {
|
|
18762
18975
|
// Split each string by newlines, flatten, remove duplicates while preserving order
|
|
18763
18976
|
const allLines = [];
|
|
18764
18977
|
const seen = new Set();
|
|
18765
18978
|
for (const value of values) {
|
|
18766
|
-
const lines = value
|
|
18979
|
+
const lines = value
|
|
18980
|
+
.split("\n")
|
|
18981
|
+
.map((line) => line.trim())
|
|
18982
|
+
.filter((line) => line.length > 0);
|
|
18767
18983
|
for (const line of lines) {
|
|
18768
18984
|
if (!seen.has(line)) {
|
|
18769
18985
|
seen.add(line);
|
|
@@ -18771,10 +18987,10 @@ function mergeTemplateFrontmatterValues(templateFrontmatters, fieldName) {
|
|
|
18771
18987
|
}
|
|
18772
18988
|
}
|
|
18773
18989
|
}
|
|
18774
|
-
return allLines.join(
|
|
18990
|
+
return allLines.join("\n");
|
|
18775
18991
|
}
|
|
18776
18992
|
// Check if all values are arrays
|
|
18777
|
-
if (values.every(val => Array.isArray(val))) {
|
|
18993
|
+
if (values.every((val) => Array.isArray(val))) {
|
|
18778
18994
|
return values.flat();
|
|
18779
18995
|
}
|
|
18780
18996
|
// For objects and mixed types, use the last value
|
|
@@ -18784,14 +19000,14 @@ function mergeTemplateFrontmatterValues(templateFrontmatters, fieldName) {
|
|
|
18784
19000
|
* Processes @ syntax in frontmatter to inherit from template files
|
|
18785
19001
|
*/
|
|
18786
19002
|
async function processFrontmatterInheritance(frontmatter, srcDir, types, languages, attrFilters, debugLogger) {
|
|
18787
|
-
if (!frontmatter || typeof frontmatter !==
|
|
19003
|
+
if (!frontmatter || typeof frontmatter !== "object") {
|
|
18788
19004
|
return {};
|
|
18789
19005
|
}
|
|
18790
19006
|
const result = {};
|
|
18791
19007
|
const inheritanceFields = [];
|
|
18792
19008
|
// Separate inheritance fields (@ syntax) from regular fields
|
|
18793
19009
|
for (const [key, value] of Object.entries(frontmatter)) {
|
|
18794
|
-
if (key.startsWith(
|
|
19010
|
+
if (key.startsWith("@") && value === true) {
|
|
18795
19011
|
inheritanceFields.push(key.slice(1)); // Remove @ prefix
|
|
18796
19012
|
debugLogger?.log(`Found inheritance field: ${key} -> ${key.slice(1)}`);
|
|
18797
19013
|
}
|
|
@@ -18801,14 +19017,14 @@ async function processFrontmatterInheritance(frontmatter, srcDir, types, languag
|
|
|
18801
19017
|
}
|
|
18802
19018
|
// If no inheritance fields, return as-is
|
|
18803
19019
|
if (inheritanceFields.length === 0) {
|
|
18804
|
-
debugLogger?.log(
|
|
19020
|
+
debugLogger?.log("No frontmatter inheritance fields found");
|
|
18805
19021
|
return result;
|
|
18806
19022
|
}
|
|
18807
19023
|
debugLogger?.log(`Processing ${inheritanceFields.length} inheritance fields:`, inheritanceFields);
|
|
18808
19024
|
// Load template frontmatters
|
|
18809
19025
|
const templateFrontmatters = await loadTemplateFrontmatter(srcDir, types, languages, attrFilters, debugLogger);
|
|
18810
19026
|
if (templateFrontmatters.length === 0) {
|
|
18811
|
-
debugLogger?.log(
|
|
19027
|
+
debugLogger?.log("No template files found for inheritance");
|
|
18812
19028
|
return result;
|
|
18813
19029
|
}
|
|
18814
19030
|
// Process each inheritance field
|
|
@@ -18822,7 +19038,7 @@ async function processFrontmatterInheritance(frontmatter, srcDir, types, languag
|
|
|
18822
19038
|
debugLogger?.log(`No value found for inherited field ${fieldName} in templates`);
|
|
18823
19039
|
}
|
|
18824
19040
|
}
|
|
18825
|
-
debugLogger?.log(
|
|
19041
|
+
debugLogger?.log("Frontmatter after inheritance processing:", Object.keys(result));
|
|
18826
19042
|
return result;
|
|
18827
19043
|
}
|
|
18828
19044
|
/**
|
|
@@ -18832,45 +19048,50 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18832
19048
|
const resolvedPath = resolveRecipePath(recipePath);
|
|
18833
19049
|
debugLogger?.log(`Processing recipe at path: ${resolvedPath}`);
|
|
18834
19050
|
try {
|
|
18835
|
-
debugLogger?.time(
|
|
19051
|
+
debugLogger?.time("Recipe file reading and parsing");
|
|
18836
19052
|
const content = await fs.readFile(resolvedPath, "utf-8");
|
|
18837
19053
|
const data = YAML.parse(content);
|
|
18838
|
-
debugLogger?.timeEnd(
|
|
19054
|
+
debugLogger?.timeEnd("Recipe file reading and parsing");
|
|
18839
19055
|
if (!Array.isArray(data?.recipe)) {
|
|
18840
19056
|
throw new Error("Invalid recipe file: 'recipe' array not found");
|
|
18841
19057
|
}
|
|
18842
19058
|
// Validate recipe structure and show warnings
|
|
18843
|
-
debugLogger?.time(
|
|
19059
|
+
debugLogger?.time("Recipe validation");
|
|
18844
19060
|
try {
|
|
18845
19061
|
const validator = await getRecipeValidator();
|
|
18846
19062
|
const validationResult = validator.validateRecipe(data);
|
|
18847
19063
|
// Always log validation warnings to help users improve their recipes
|
|
18848
19064
|
if (validationResult.warnings.length > 0) {
|
|
18849
19065
|
console.warn(`\nRecipe validation warnings for '${recipePath}':`);
|
|
18850
|
-
validationResult.warnings.forEach(warning => {
|
|
19066
|
+
validationResult.warnings.forEach((warning) => {
|
|
18851
19067
|
console.warn(` ${warning}`);
|
|
18852
19068
|
});
|
|
18853
|
-
console.warn(
|
|
19069
|
+
console.warn(" These warnings do not prevent recipe execution but may indicate configuration issues.\n");
|
|
18854
19070
|
}
|
|
18855
19071
|
// Log errors but don't fail (maintain backward compatibility)
|
|
18856
19072
|
if (validationResult.errors.length > 0) {
|
|
18857
|
-
debugLogger?.log(
|
|
19073
|
+
debugLogger?.log("Recipe validation errors (non-blocking):", validationResult.errors);
|
|
18858
19074
|
}
|
|
18859
19075
|
}
|
|
18860
19076
|
catch (validationError) {
|
|
18861
19077
|
// Don't fail recipe processing if validation fails
|
|
18862
|
-
debugLogger?.log(
|
|
19078
|
+
debugLogger?.log("Recipe validation failed:", validationError);
|
|
18863
19079
|
}
|
|
18864
|
-
debugLogger?.timeEnd(
|
|
19080
|
+
debugLogger?.timeEnd("Recipe validation");
|
|
18865
19081
|
debugLogger?.log(`Recipe contains ${data.recipe.length} items`);
|
|
18866
19082
|
// Read recipe config for baseDir
|
|
18867
19083
|
const recipeBaseDir = data.config?.baseDir;
|
|
18868
|
-
debugLogger?.log(
|
|
19084
|
+
debugLogger?.log("Recipe config:", { baseDir: recipeBaseDir });
|
|
18869
19085
|
// Expand any imports in the recipe
|
|
18870
|
-
debugLogger?.time(
|
|
19086
|
+
debugLogger?.time("Recipe import expansion");
|
|
18871
19087
|
const expandedRecipe = await expandRecipeImports(data.recipe, resolvedPath, debugLogger);
|
|
18872
|
-
debugLogger?.timeEnd(
|
|
19088
|
+
debugLogger?.timeEnd("Recipe import expansion");
|
|
18873
19089
|
debugLogger?.log(`After import expansion: ${expandedRecipe.length} items`);
|
|
19090
|
+
// If dry-run mode, display preview and exit
|
|
19091
|
+
if (baseOptions.dryRun) {
|
|
19092
|
+
await displayRecipeDryRunPreview(recipePath, data, expandedRecipe, baseOptions, debugLogger, cliBaseDir, cliOutFile, cliSrc);
|
|
19093
|
+
return;
|
|
19094
|
+
}
|
|
18874
19095
|
// Initialize local tracker if not provided (for single recipe mode)
|
|
18875
19096
|
const localTracker = contentTracker || new ContentTracker();
|
|
18876
19097
|
for (const [index, item] of expandedRecipe.entries()) {
|
|
@@ -18879,7 +19100,7 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18879
19100
|
out: item.out,
|
|
18880
19101
|
type: item.type,
|
|
18881
19102
|
language: item.language,
|
|
18882
|
-
mode: item.mode
|
|
19103
|
+
mode: item.mode,
|
|
18883
19104
|
});
|
|
18884
19105
|
// Priority: CLI --out option > recipe item.out > baseOptions.outFile default
|
|
18885
19106
|
const itemOut = cliOutFile || item.out || baseOptions.outFile;
|
|
@@ -18890,12 +19111,14 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18890
19111
|
fallback: baseOptions.outFile,
|
|
18891
19112
|
cliBaseDir,
|
|
18892
19113
|
importBaseDir: item._importBaseDir,
|
|
18893
|
-
recipeBaseDir
|
|
19114
|
+
recipeBaseDir,
|
|
18894
19115
|
});
|
|
18895
19116
|
// Generate the content that would be written to check for duplicates
|
|
18896
|
-
const { srcDir, types, languages, attrFilters, title, attr, vars, envFile } = baseOptions;
|
|
19117
|
+
const { srcDir, types, languages, attrFilters, title, attr, vars, envFile, } = baseOptions;
|
|
18897
19118
|
const itemTypes = item.type ? parseCommaSeparated(item.type) : types;
|
|
18898
|
-
const itemLanguages = item.language
|
|
19119
|
+
const itemLanguages = item.language
|
|
19120
|
+
? parseCommaSeparated(item.language)
|
|
19121
|
+
: languages;
|
|
18899
19122
|
const itemTitle = item.title || title;
|
|
18900
19123
|
// Combine base attrFilters with item-specific filters
|
|
18901
19124
|
let combinedAttrFilters = [...attrFilters];
|
|
@@ -18909,43 +19132,52 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18909
19132
|
itemLanguages,
|
|
18910
19133
|
combinedAttrFilters,
|
|
18911
19134
|
baseAttrFilters: attrFilters,
|
|
18912
|
-
itemFilters: item.filters ||
|
|
19135
|
+
itemFilters: item.filters || "none",
|
|
18913
19136
|
});
|
|
18914
19137
|
// Resolve template variables for this item (merge import variables, recipe item variables, and CLI variables)
|
|
18915
19138
|
let itemVars = vars;
|
|
18916
19139
|
if (item.variables || item._importVariables) {
|
|
18917
|
-
const cliVariables = vars
|
|
19140
|
+
const cliVariables = vars
|
|
19141
|
+
? VariableResolver.parseCliVariables(vars)
|
|
19142
|
+
: {};
|
|
18918
19143
|
const importVariables = item._importVariables || {};
|
|
18919
19144
|
const itemVariables = item.variables || {};
|
|
18920
19145
|
// Merge with priority: CLI > item > import
|
|
18921
|
-
const mergedVariables = {
|
|
19146
|
+
const mergedVariables = {
|
|
19147
|
+
...importVariables,
|
|
19148
|
+
...itemVariables,
|
|
19149
|
+
...cliVariables,
|
|
19150
|
+
};
|
|
18922
19151
|
itemVars = Object.entries(mergedVariables)
|
|
18923
19152
|
.map(([key, value]) => `${key}=${value}`)
|
|
18924
|
-
.join(
|
|
19153
|
+
.join(",");
|
|
18925
19154
|
debugLogger?.log(`Merged variables for item ${index + 1}:`, {
|
|
18926
19155
|
importVariables,
|
|
18927
19156
|
itemVariables,
|
|
18928
19157
|
cliVariables,
|
|
18929
|
-
merged: mergedVariables
|
|
19158
|
+
merged: mergedVariables,
|
|
18930
19159
|
});
|
|
18931
19160
|
}
|
|
18932
19161
|
// Resolve source directory for this item with priority: CLI --src > item.src > import.src > default
|
|
18933
|
-
const itemSrcDir = resolveItemSrcDir(cliSrc, item.src, item._importSrc);
|
|
19162
|
+
const itemSrcDir = resolveItemSrcDir(cliSrc, item.src, item._importSrc, item._recipePath);
|
|
18934
19163
|
debugLogger?.log(`Resolved source directory for item ${index + 1}:`, {
|
|
18935
19164
|
cliSrc,
|
|
18936
19165
|
itemSrc: item.src,
|
|
18937
19166
|
importSrc: item._importSrc,
|
|
18938
|
-
|
|
19167
|
+
recipePath: item._recipePath,
|
|
19168
|
+
resolved: itemSrcDir,
|
|
18939
19169
|
});
|
|
18940
19170
|
debugLogger?.time(`Content generation for item ${index + 1}`);
|
|
18941
19171
|
const filtered = await loadAndFilterFiles(itemSrcDir, itemTypes, itemLanguages, combinedAttrFilters, debugLogger);
|
|
18942
19172
|
const merged = filtered.map((f) => f.content.trim()).join("\n\n");
|
|
18943
|
-
const contentWithTitle = itemTitle
|
|
19173
|
+
const contentWithTitle = itemTitle
|
|
19174
|
+
? `# ${itemTitle}\n\n${merged}`
|
|
19175
|
+
: merged;
|
|
18944
19176
|
debugLogger?.timeEnd(`Content generation for item ${index + 1}`);
|
|
18945
19177
|
// Check if this exact content has already been written to this file
|
|
18946
|
-
debugLogger?.time(
|
|
19178
|
+
debugLogger?.time("Duplicate content check");
|
|
18947
19179
|
const isDuplicate = localTracker.hasContent(outputFile, contentWithTitle);
|
|
18948
|
-
debugLogger?.timeEnd(
|
|
19180
|
+
debugLogger?.timeEnd("Duplicate content check");
|
|
18949
19181
|
if (isDuplicate) {
|
|
18950
19182
|
debugLogger?.log(`Skipping duplicate content for ${outputFile}`);
|
|
18951
19183
|
// eslint-disable-next-line no-console
|
|
@@ -18983,6 +19215,7 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18983
19215
|
debug: baseOptions.debug,
|
|
18984
19216
|
vars: itemVars,
|
|
18985
19217
|
envFile: envFile,
|
|
19218
|
+
dryRun: baseOptions.dryRun,
|
|
18986
19219
|
};
|
|
18987
19220
|
debugLogger?.time(`Processing single extraction for item ${index + 1}`);
|
|
18988
19221
|
await processSingle(options, debugLogger || new DebugLogger(false));
|
|
@@ -18993,11 +19226,11 @@ async function processRecipe(recipePath, baseOptions, contentTracker, debugLogge
|
|
|
18993
19226
|
}
|
|
18994
19227
|
}
|
|
18995
19228
|
catch (error) {
|
|
18996
|
-
if (recipePath.startsWith(
|
|
19229
|
+
if (recipePath.startsWith(":")) {
|
|
18997
19230
|
const presetName = recipePath.slice(1);
|
|
18998
19231
|
debugLogger?.log(`Error processing preset ${presetName}:`, error);
|
|
18999
19232
|
// Only treat ENOENT errors as "preset not found"
|
|
19000
|
-
if (error instanceof Error && error.message.includes(
|
|
19233
|
+
if (error instanceof Error && error.message.includes("ENOENT")) {
|
|
19001
19234
|
throw new Error(`Preset ':${presetName}' not found. Use 'ai-rules presets' to list available presets.`);
|
|
19002
19235
|
}
|
|
19003
19236
|
// For other errors (like variable validation), add context but preserve the original error
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"optionValidator.d.ts","sourceRoot":"","sources":["../src/optionValidator.ts"],"names":[],"mappings":"AAAA,oBAAY,SAAS;IACnB,MAAM,WAAW;IACjB,OAAO,YAAY;IACnB,SAAS,cAAc;CACxB;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC;IACrB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,SAAS,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC/B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB;AAED,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAIpC;WAEY,sBAAsB,CAAC,OAAO,EAAE,cAAc,GAAG,IAAI;WAYrD,oBAAoB,CAAC,OAAO,EAAE,YAAY,GAAG,IAAI;CAKhE"}
|
|
1
|
+
{"version":3,"file":"optionValidator.d.ts","sourceRoot":"","sources":["../src/optionValidator.ts"],"names":[],"mappings":"AAAA,oBAAY,SAAS;IACnB,MAAM,WAAW;IACjB,OAAO,YAAY;IACnB,SAAS,cAAc;CACxB;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC;IACrB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,SAAS,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC/B,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB;AAED,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAIpC;WAEY,sBAAsB,CAAC,OAAO,EAAE,cAAc,GAAG,IAAI;WAYrD,oBAAoB,CAAC,OAAO,EAAE,YAAY,GAAG,IAAI;CAKhE"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aramassa/ai-rules",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.2",
|
|
4
4
|
"description": "This repository collects guidelines and instructions for developing AI agents. It contains documents covering communication rules, coding standards, testing strategies, and general operational practices.",
|
|
5
5
|
"workspaces": [
|
|
6
6
|
"packages/extract",
|