@mastra/agent-builder 0.0.5-alpha.1 → 0.0.5-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/defaults.d.ts +15 -15
- package/dist/index.js +109 -115
- package/dist/index.js.map +1 -1
- package/dist/workflows/workflow-builder/tools.d.ts +1 -1
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -224,7 +224,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
224
|
if (currentDir === cwd) {
|
|
225
225
|
continue;
|
|
226
226
|
}
|
|
227
|
-
console.
|
|
227
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
228
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
229
|
return true;
|
|
230
230
|
}
|
|
@@ -244,7 +244,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
244
|
}
|
|
245
245
|
return false;
|
|
246
246
|
} catch (error) {
|
|
247
|
-
console.
|
|
247
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
248
|
return false;
|
|
249
249
|
}
|
|
250
250
|
}
|
|
@@ -315,12 +315,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
315
|
}
|
|
316
316
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
317
|
try {
|
|
318
|
-
console.
|
|
318
|
+
console.info("Running install command with swpm");
|
|
319
319
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
320
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
321
|
return;
|
|
322
322
|
} catch (e) {
|
|
323
|
-
console.
|
|
323
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
324
|
}
|
|
325
325
|
try {
|
|
326
326
|
let packageManager;
|
|
@@ -348,11 +348,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
348
|
}
|
|
349
349
|
}
|
|
350
350
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
351
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
352
|
await spawn(packageManager, args, { cwd });
|
|
353
353
|
return;
|
|
354
354
|
} catch (e) {
|
|
355
|
-
console.
|
|
355
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
356
|
}
|
|
357
357
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
358
|
}
|
|
@@ -383,10 +383,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
383
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
384
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
385
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
386
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
387
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
388
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
389
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
390
|
} catch (gitError) {
|
|
391
391
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
392
|
}
|
|
@@ -458,18 +458,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
458
|
try {
|
|
459
459
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
460
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
461
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
462
|
} catch (error) {
|
|
463
463
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
464
|
if (errorStr.includes("already exists")) {
|
|
465
465
|
try {
|
|
466
466
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
467
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
468
|
} catch {
|
|
469
469
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
470
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
471
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
472
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
473
|
}
|
|
474
474
|
} else {
|
|
475
475
|
throw error;
|
|
@@ -479,9 +479,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
479
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
480
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
481
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
482
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
483
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
484
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
485
|
}
|
|
486
486
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
487
|
let counter = 1;
|
|
@@ -495,7 +495,7 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
495
|
counter++;
|
|
496
496
|
}
|
|
497
497
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
498
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
499
|
return uniqueTargetFile;
|
|
500
500
|
}
|
|
501
501
|
var isValidMastraLanguageModel = (model) => {
|
|
@@ -544,7 +544,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
544
|
if (!hasConflict) {
|
|
545
545
|
newEntries.push(trimmed);
|
|
546
546
|
} else {
|
|
547
|
-
console.
|
|
547
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
548
|
}
|
|
549
549
|
}
|
|
550
550
|
}
|
|
@@ -579,7 +579,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
579
|
if (!existingVars.has(key)) {
|
|
580
580
|
newVars.push({ key, value });
|
|
581
581
|
} else {
|
|
582
|
-
console.
|
|
582
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
if (newVars.length === 0) {
|
|
@@ -600,7 +600,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
600
|
try {
|
|
601
601
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
602
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
603
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
604
|
return "v2";
|
|
605
605
|
}
|
|
606
606
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +618,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
618
|
if (versionMatch) {
|
|
619
619
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
620
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
621
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
622
|
return "v2";
|
|
623
623
|
} else {
|
|
624
|
-
console.
|
|
624
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
625
|
return "v1";
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
}
|
|
629
629
|
}
|
|
630
|
-
console.
|
|
630
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
631
|
return "v2";
|
|
632
632
|
} catch (error) {
|
|
633
633
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -688,7 +688,7 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
688
688
|
return null;
|
|
689
689
|
}
|
|
690
690
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
691
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
692
|
return modelInstance;
|
|
693
693
|
} catch (error) {
|
|
694
694
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -702,7 +702,7 @@ var resolveModel = async ({
|
|
|
702
702
|
}) => {
|
|
703
703
|
const modelFromContext = runtimeContext.get("model");
|
|
704
704
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
705
|
+
console.info("Using model from runtime context");
|
|
706
706
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
707
|
return modelFromContext;
|
|
708
708
|
}
|
|
@@ -712,7 +712,7 @@ var resolveModel = async ({
|
|
|
712
712
|
}
|
|
713
713
|
const selectedModel = runtimeContext.get("selectedModel");
|
|
714
714
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
715
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
716
|
const version = await detectAISDKVersion(projectPath);
|
|
717
717
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
718
|
if (modelInstance) {
|
|
@@ -720,7 +720,7 @@ var resolveModel = async ({
|
|
|
720
720
|
return modelInstance;
|
|
721
721
|
}
|
|
722
722
|
}
|
|
723
|
-
console.
|
|
723
|
+
console.info("Using default model");
|
|
724
724
|
return defaultModel;
|
|
725
725
|
};
|
|
726
726
|
|
|
@@ -1756,7 +1756,7 @@ export const mastra = new Mastra({
|
|
|
1756
1756
|
error: stderr
|
|
1757
1757
|
};
|
|
1758
1758
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1759
|
+
console.error(error);
|
|
1760
1760
|
return {
|
|
1761
1761
|
success: false,
|
|
1762
1762
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1771,7 @@ export const mastra = new Mastra({
|
|
|
1771
1771
|
projectPath
|
|
1772
1772
|
}) {
|
|
1773
1773
|
try {
|
|
1774
|
-
console.
|
|
1774
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1775
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1776
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1777
|
return {
|
|
@@ -1795,7 +1795,7 @@ export const mastra = new Mastra({
|
|
|
1795
1795
|
projectPath
|
|
1796
1796
|
}) {
|
|
1797
1797
|
try {
|
|
1798
|
-
console.
|
|
1798
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1799
|
let packageNames = [];
|
|
1800
1800
|
if (packages && packages.length > 0) {
|
|
1801
1801
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3388,13 +3388,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3388
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3389
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3390
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3391
|
+
console.info("Analyzing template package.json...");
|
|
3392
3392
|
const { templateDir } = inputData;
|
|
3393
3393
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3394
|
try {
|
|
3395
3395
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3396
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3397
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3398
|
return {
|
|
3399
3399
|
dependencies: packageJson.dependencies || {},
|
|
3400
3400
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3430,7 +3430,7 @@ var discoverUnitsStep = createStep({
|
|
|
3430
3430
|
const { templateDir } = inputData;
|
|
3431
3431
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3432
3432
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3433
|
+
console.info("targetPath", targetPath);
|
|
3434
3434
|
const model = await resolveModel({ runtimeContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3435
|
try {
|
|
3436
3436
|
const agent = new Agent({
|
|
@@ -3518,7 +3518,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3518
|
template.other?.forEach((otherId) => {
|
|
3519
3519
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3520
|
});
|
|
3521
|
-
console.
|
|
3521
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3522
|
if (units.length === 0) {
|
|
3523
3523
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3524
|
Possible causes:
|
|
@@ -3594,7 +3594,7 @@ var packageMergeStep = createStep({
|
|
|
3594
3594
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3595
|
outputSchema: PackageMergeResultSchema,
|
|
3596
3596
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3597
|
-
console.
|
|
3597
|
+
console.info("Package merge step starting...");
|
|
3598
3598
|
const { slug, packageInfo } = inputData;
|
|
3599
3599
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3600
3600
|
try {
|
|
@@ -3671,7 +3671,7 @@ var installStep = createStep({
|
|
|
3671
3671
|
inputSchema: InstallInputSchema,
|
|
3672
3672
|
outputSchema: InstallResultSchema,
|
|
3673
3673
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3674
|
-
console.
|
|
3674
|
+
console.info("Running install step...");
|
|
3675
3675
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3676
3676
|
try {
|
|
3677
3677
|
await spawnSWPM(targetPath, "install", []);
|
|
@@ -3699,7 +3699,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3699
3699
|
inputSchema: FileCopyInputSchema,
|
|
3700
3700
|
outputSchema: FileCopyResultSchema,
|
|
3701
3701
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3702
|
-
console.
|
|
3702
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3703
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
3704
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3705
3705
|
try {
|
|
@@ -3742,7 +3742,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3742
|
}
|
|
3743
3743
|
};
|
|
3744
3744
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3745
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3746
|
let sourceFile;
|
|
3747
3747
|
let resolvedUnitFile;
|
|
3748
3748
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3773,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3773
|
}
|
|
3774
3774
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3775
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3776
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3777
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3778
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3779
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3781,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3781
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3782
|
if (existsSync(targetFile)) {
|
|
3783
3783
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3784
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3785
|
switch (strategy) {
|
|
3786
3786
|
case "skip":
|
|
3787
3787
|
conflicts.push({
|
|
@@ -3790,7 +3790,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3790
|
sourceFile: unit.file,
|
|
3791
3791
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3792
|
});
|
|
3793
|
-
console.
|
|
3793
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3794
|
continue;
|
|
3795
3795
|
case "backup-and-replace":
|
|
3796
3796
|
try {
|
|
@@ -3800,7 +3800,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3800
|
destination: targetFile,
|
|
3801
3801
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3802
|
});
|
|
3803
|
-
console.
|
|
3803
|
+
console.info(
|
|
3804
3804
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3805
|
);
|
|
3806
3806
|
continue;
|
|
@@ -3821,7 +3821,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3821
|
destination: uniqueTargetFile,
|
|
3822
3822
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3823
|
});
|
|
3824
|
-
console.
|
|
3824
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3825
|
continue;
|
|
3826
3826
|
} catch (renameError) {
|
|
3827
3827
|
conflicts.push({
|
|
@@ -3850,7 +3850,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3850
|
destination: targetFile,
|
|
3851
3851
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3852
|
});
|
|
3853
|
-
console.
|
|
3853
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3854
|
} catch (copyError) {
|
|
3855
3855
|
conflicts.push({
|
|
3856
3856
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3871,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3871
|
destination: targetTsconfig,
|
|
3872
3872
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3873
|
});
|
|
3874
|
-
console.
|
|
3874
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3875
|
} else {
|
|
3876
3876
|
const minimalTsconfig = {
|
|
3877
3877
|
compilerOptions: {
|
|
@@ -3893,7 +3893,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3893
|
destination: targetTsconfig,
|
|
3894
3894
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3895
|
});
|
|
3896
|
-
console.
|
|
3896
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3897
|
}
|
|
3898
3898
|
}
|
|
3899
3899
|
} catch (e) {
|
|
@@ -3918,7 +3918,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3918
|
destination: targetMastraIndex,
|
|
3919
3919
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3920
|
});
|
|
3921
|
-
console.
|
|
3921
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3922
|
}
|
|
3923
3923
|
}
|
|
3924
3924
|
} catch (e) {
|
|
@@ -3942,7 +3942,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3942
|
destination: targetGitignore,
|
|
3943
3943
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3944
|
});
|
|
3945
|
-
console.
|
|
3945
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3946
|
} else {
|
|
3947
3947
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3948
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3955,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3955
|
destination: targetGitignore,
|
|
3956
3956
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3957
|
});
|
|
3958
|
-
console.
|
|
3958
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3959
|
} else {
|
|
3960
|
-
console.
|
|
3960
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3961
|
}
|
|
3962
3962
|
}
|
|
3963
3963
|
}
|
|
@@ -3985,7 +3985,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3985
|
destination: targetEnv,
|
|
3986
3986
|
unit: { kind: "other", id: "env" }
|
|
3987
3987
|
});
|
|
3988
|
-
console.
|
|
3988
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3989
|
} else {
|
|
3990
3990
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3991
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +3997,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
3997
|
destination: targetEnv,
|
|
3998
3998
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
3999
|
});
|
|
4000
|
-
console.
|
|
4000
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
4001
|
} else {
|
|
4002
|
-
console.
|
|
4002
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
4003
|
}
|
|
4004
4004
|
}
|
|
4005
4005
|
}
|
|
@@ -4020,13 +4020,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4020
|
fileList,
|
|
4021
4021
|
{ skipIfNoStaged: true }
|
|
4022
4022
|
);
|
|
4023
|
-
console.
|
|
4023
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4024
|
} catch (commitError) {
|
|
4025
4025
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4026
|
}
|
|
4027
4027
|
}
|
|
4028
4028
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4029
|
+
console.info(message);
|
|
4030
4030
|
return {
|
|
4031
4031
|
success: true,
|
|
4032
4032
|
copiedFiles,
|
|
@@ -4051,7 +4051,7 @@ var intelligentMergeStep = createStep({
|
|
|
4051
4051
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4052
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
4053
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4054
|
-
console.
|
|
4054
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4055
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
4056
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4057
4057
|
try {
|
|
@@ -4178,8 +4178,8 @@ Template information:
|
|
|
4178
4178
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4179
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4180
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4181
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4182
|
+
console.info(
|
|
4183
4183
|
"Registrable components:",
|
|
4184
4184
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4185
|
);
|
|
@@ -4193,7 +4193,7 @@ Template information:
|
|
|
4193
4193
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4194
|
});
|
|
4195
4195
|
}
|
|
4196
|
-
console.
|
|
4196
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4197
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4198
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4199
|
const prompt = `
|
|
@@ -4245,12 +4245,12 @@ Start by listing your tasks and work through them systematically!
|
|
|
4245
4245
|
for await (const chunk of result.fullStream) {
|
|
4246
4246
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4247
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4248
|
+
console.info({
|
|
4249
4249
|
type: chunk.type,
|
|
4250
4250
|
msgId: chunkData.messageId
|
|
4251
4251
|
});
|
|
4252
4252
|
} else {
|
|
4253
|
-
console.
|
|
4253
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4254
|
if (chunk.type === "tool-result") {
|
|
4255
4255
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4256
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4264,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4264
|
content: toolResult.content || "",
|
|
4265
4265
|
notes: toolResult.notes
|
|
4266
4266
|
});
|
|
4267
|
-
console.
|
|
4267
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4268
|
}
|
|
4269
4269
|
} catch (parseError) {
|
|
4270
4270
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4319,12 +4319,12 @@ var validationAndFixStep = createStep({
|
|
|
4319
4319
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4320
|
outputSchema: ValidationFixResultSchema,
|
|
4321
4321
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4322
|
-
console.
|
|
4322
|
+
console.info("Validation and fix step starting...");
|
|
4323
4323
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
4324
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4325
4325
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4326
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4327
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4328
|
return {
|
|
4329
4329
|
success: true,
|
|
4330
4330
|
applied: false,
|
|
@@ -4336,7 +4336,7 @@ var validationAndFixStep = createStep({
|
|
|
4336
4336
|
}
|
|
4337
4337
|
};
|
|
4338
4338
|
}
|
|
4339
|
-
console.
|
|
4339
|
+
console.info(
|
|
4340
4340
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4341
|
);
|
|
4342
4342
|
let currentIteration = 1;
|
|
@@ -4462,7 +4462,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4462
|
executeCommand: allTools.executeCommand
|
|
4463
4463
|
}
|
|
4464
4464
|
});
|
|
4465
|
-
console.
|
|
4465
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4466
|
let validationResults = {
|
|
4467
4467
|
valid: false,
|
|
4468
4468
|
errorsFixed: 0,
|
|
@@ -4473,7 +4473,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4473
|
// Store the actual error details
|
|
4474
4474
|
};
|
|
4475
4475
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4476
|
+
console.info(`
|
|
4477
4477
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4478
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4479
|
|
|
@@ -4493,13 +4493,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4493
|
for await (const chunk of result.fullStream) {
|
|
4494
4494
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4495
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4496
|
+
console.info({
|
|
4497
4497
|
type: chunk.type,
|
|
4498
4498
|
msgId: chunkData.messageId,
|
|
4499
4499
|
iteration: currentIteration
|
|
4500
4500
|
});
|
|
4501
4501
|
} else {
|
|
4502
|
-
console.
|
|
4502
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4503
|
}
|
|
4504
4504
|
if (chunk.type === "tool-result") {
|
|
4505
4505
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4508,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4508
|
lastValidationResult = toolResult;
|
|
4509
4509
|
if (toolResult?.summary) {
|
|
4510
4510
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4511
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4512
|
}
|
|
4513
4513
|
}
|
|
4514
4514
|
}
|
|
@@ -4520,12 +4520,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4520
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4521
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4522
|
}
|
|
4523
|
-
console.
|
|
4523
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4524
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4525
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4526
|
break;
|
|
4527
4527
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4528
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4529
|
break;
|
|
4530
4530
|
}
|
|
4531
4531
|
currentIteration++;
|
|
@@ -4570,7 +4570,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4570
|
} finally {
|
|
4571
4571
|
try {
|
|
4572
4572
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4573
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4574
|
} catch (cleanupError) {
|
|
4575
4575
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4576
|
}
|
|
@@ -5063,12 +5063,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5063
|
research,
|
|
5064
5064
|
userAnswers
|
|
5065
5065
|
} = inputData;
|
|
5066
|
-
console.
|
|
5066
|
+
console.info("Starting planning iteration...");
|
|
5067
5067
|
const qaKey = "workflow-builder-qa";
|
|
5068
5068
|
let storedQAPairs = runtimeContext.get(qaKey) || [];
|
|
5069
5069
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5070
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5071
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5072
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5082,10 +5080,6 @@ var planningIterationStep = createStep({
|
|
|
5082
5080
|
});
|
|
5083
5081
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5084
5082
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5083
|
try {
|
|
5090
5084
|
const model = await resolveModel({ runtimeContext });
|
|
5091
5085
|
const planningAgent = new Agent({
|
|
@@ -5133,8 +5127,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5127
|
};
|
|
5134
5128
|
}
|
|
5135
5129
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5130
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5131
|
+
console.info(planResult.questions);
|
|
5138
5132
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5133
|
question,
|
|
5140
5134
|
answer: null,
|
|
@@ -5143,7 +5137,7 @@ var planningIterationStep = createStep({
|
|
|
5143
5137
|
}));
|
|
5144
5138
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
5139
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5146
|
-
console.
|
|
5140
|
+
console.info(
|
|
5147
5141
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5142
|
);
|
|
5149
5143
|
return suspend({
|
|
@@ -5155,9 +5149,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5149
|
}
|
|
5156
5150
|
});
|
|
5157
5151
|
}
|
|
5158
|
-
console.
|
|
5152
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5159
5153
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5160
|
-
console.
|
|
5154
|
+
console.info(
|
|
5161
5155
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5156
|
);
|
|
5163
5157
|
return {
|
|
@@ -5200,7 +5194,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5194
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5195
|
const { tasks } = inputData;
|
|
5202
5196
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5197
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5198
|
const summary = `Task List for Approval:
|
|
5205
5199
|
|
|
5206
5200
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5207,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5207
|
});
|
|
5214
5208
|
}
|
|
5215
5209
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5210
|
+
console.info("Task list approved by user");
|
|
5217
5211
|
return {
|
|
5218
5212
|
approved: true,
|
|
5219
5213
|
tasks,
|
|
5220
5214
|
message: "Task list approved, ready for execution"
|
|
5221
5215
|
};
|
|
5222
5216
|
} else {
|
|
5223
|
-
console.
|
|
5217
|
+
console.info("Task list rejected by user");
|
|
5224
5218
|
return {
|
|
5225
5219
|
approved: false,
|
|
5226
5220
|
tasks,
|
|
@@ -5237,7 +5231,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5231
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5232
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5233
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5234
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5235
|
return inputData.planComplete === true;
|
|
5242
5236
|
}).map(async ({ inputData }) => {
|
|
5243
5237
|
return {
|
|
@@ -5692,12 +5686,12 @@ var workflowDiscoveryStep = createStep({
|
|
|
5692
5686
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5687
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
5688
|
execute: async ({ inputData, runtimeContext: _runtimeContext }) => {
|
|
5695
|
-
console.
|
|
5689
|
+
console.info("Starting workflow discovery...");
|
|
5696
5690
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5691
|
try {
|
|
5698
5692
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5693
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5694
|
+
console.info("No workflows directory found");
|
|
5701
5695
|
return {
|
|
5702
5696
|
success: true,
|
|
5703
5697
|
workflows: [],
|
|
@@ -5726,7 +5720,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5720
|
}
|
|
5727
5721
|
}
|
|
5728
5722
|
}
|
|
5729
|
-
console.
|
|
5723
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5724
|
return {
|
|
5731
5725
|
success: true,
|
|
5732
5726
|
workflows,
|
|
@@ -5751,7 +5745,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5751
5745
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5746
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
5747
|
execute: async ({ inputData: _inputData, runtimeContext: _runtimeContext }) => {
|
|
5754
|
-
console.
|
|
5748
|
+
console.info("Starting project discovery...");
|
|
5755
5749
|
try {
|
|
5756
5750
|
const projectPath = process.cwd();
|
|
5757
5751
|
const projectStructure = {
|
|
@@ -5772,7 +5766,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5766
|
console.warn("Failed to read package.json:", error);
|
|
5773
5767
|
}
|
|
5774
5768
|
}
|
|
5775
|
-
console.
|
|
5769
|
+
console.info("Project discovery completed");
|
|
5776
5770
|
return {
|
|
5777
5771
|
success: true,
|
|
5778
5772
|
structure: {
|
|
@@ -5813,7 +5807,7 @@ var workflowResearchStep = createStep({
|
|
|
5813
5807
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5808
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
5809
|
execute: async ({ inputData, runtimeContext }) => {
|
|
5816
|
-
console.
|
|
5810
|
+
console.info("Starting workflow research...");
|
|
5817
5811
|
try {
|
|
5818
5812
|
const model = await resolveModel({ runtimeContext });
|
|
5819
5813
|
const researchAgent = new Agent({
|
|
@@ -5845,7 +5839,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5839
|
error: "Research agent failed to generate valid response"
|
|
5846
5840
|
};
|
|
5847
5841
|
}
|
|
5848
|
-
console.
|
|
5842
|
+
console.info("Research completed successfully");
|
|
5849
5843
|
return {
|
|
5850
5844
|
success: true,
|
|
5851
5845
|
documentation: {
|
|
@@ -5891,12 +5885,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5885
|
research,
|
|
5892
5886
|
projectPath
|
|
5893
5887
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5888
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5889
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5890
|
try {
|
|
5897
5891
|
const model = await resolveModel({ runtimeContext });
|
|
5898
5892
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5893
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5894
|
const taskManagerContext = {
|
|
5901
5895
|
action: "create",
|
|
5902
5896
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5903,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5903
|
}))
|
|
5910
5904
|
};
|
|
5911
5905
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5906
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5907
|
if (!taskManagerResult.success) {
|
|
5914
5908
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5909
|
}
|
|
@@ -5962,13 +5956,13 @@ ${additionalInstructions}`;
|
|
|
5962
5956
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5957
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5958
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5959
|
+
console.info(`
|
|
5966
5960
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5961
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5962
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5963
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5964
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5965
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5966
|
break;
|
|
5973
5967
|
}
|
|
5974
5968
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5992,19 +5986,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5986
|
finalMessage += chunk.payload.text;
|
|
5993
5987
|
}
|
|
5994
5988
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5989
|
+
console.info(finalMessage);
|
|
5996
5990
|
finalMessage = "";
|
|
5997
5991
|
}
|
|
5998
5992
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
5993
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
5994
|
}
|
|
6001
5995
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
5996
|
+
console.info(chunk);
|
|
6003
5997
|
}
|
|
6004
5998
|
}
|
|
6005
5999
|
await stream.consumeStream();
|
|
6006
6000
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
6001
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
6002
|
if (!finalResult) {
|
|
6009
6003
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
6004
|
}
|
|
@@ -6012,17 +6006,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
6006
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
6007
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
6008
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
6009
|
+
console.info(
|
|
6016
6010
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
6011
|
);
|
|
6018
6012
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
6013
|
+
console.info(
|
|
6020
6014
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
6015
|
);
|
|
6022
6016
|
break;
|
|
6023
6017
|
}
|
|
6024
6018
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
6019
|
+
console.info(
|
|
6026
6020
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6021
|
);
|
|
6028
6022
|
}
|
|
@@ -6035,8 +6029,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6029
|
throw new Error("No result received from agent execution");
|
|
6036
6030
|
}
|
|
6037
6031
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6032
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6033
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6034
|
return suspend({
|
|
6041
6035
|
questions: finalResult.questions,
|
|
6042
6036
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6046,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6046
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6047
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6048
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6049
|
+
console.info(message);
|
|
6056
6050
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6051
|
const validationErrors = [];
|
|
6058
6052
|
if (finalResult.error) {
|
|
@@ -6120,7 +6114,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6114
|
userAnswers: void 0
|
|
6121
6115
|
};
|
|
6122
6116
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6117
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6118
|
return inputData.approved === true;
|
|
6125
6119
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6120
|
const initData = getInitData();
|