@mastra/agent-builder 0.0.0-toolOptionTypes-20250917085558 → 0.0.0-usechat-duplicate-20251016110554
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +200 -4
- package/dist/agent/index.d.ts +4 -6
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/defaults.d.ts +45 -45
- package/dist/index.js +136 -160
- package/dist/index.js.map +1 -1
- package/dist/utils.d.ts +1 -1
- package/dist/utils.d.ts.map +1 -1
- package/dist/workflows/task-planning/schema.d.ts +4 -4
- package/dist/workflows/task-planning/task-planning.d.ts +23 -11
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -1
- package/dist/workflows/template-builder/template-builder.d.ts +118 -20
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/schema.d.ts +12 -12
- package/dist/workflows/workflow-builder/tools.d.ts +3 -3
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +68 -36
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-map.d.ts +2 -3767
- package/dist/workflows/workflow-map.d.ts.map +1 -1
- package/package.json +9 -9
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Agent } from '@mastra/core/agent';
|
|
1
|
+
import { Agent, tryGenerateWithJsonFallback, tryStreamWithJsonFallback } from '@mastra/core/agent';
|
|
2
2
|
import { Memory } from '@mastra/memory';
|
|
3
3
|
import { TokenLimiter } from '@mastra/memory/processors';
|
|
4
4
|
import { exec as exec$1, execFile as execFile$1, spawn as spawn$1 } from 'child_process';
|
|
@@ -10,7 +10,7 @@ import { z } from 'zod';
|
|
|
10
10
|
import { existsSync, readFileSync } from 'fs';
|
|
11
11
|
import { createRequire } from 'module';
|
|
12
12
|
import { promisify } from 'util';
|
|
13
|
-
import {
|
|
13
|
+
import { ModelRouterLanguageModel } from '@mastra/core/llm';
|
|
14
14
|
import { MemoryProcessor } from '@mastra/core/memory';
|
|
15
15
|
import { tmpdir } from 'os';
|
|
16
16
|
import { openai } from '@ai-sdk/openai';
|
|
@@ -224,7 +224,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
224
|
if (currentDir === cwd) {
|
|
225
225
|
continue;
|
|
226
226
|
}
|
|
227
|
-
console.
|
|
227
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
228
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
229
|
return true;
|
|
230
230
|
}
|
|
@@ -244,7 +244,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
244
|
}
|
|
245
245
|
return false;
|
|
246
246
|
} catch (error) {
|
|
247
|
-
console.
|
|
247
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
248
|
return false;
|
|
249
249
|
}
|
|
250
250
|
}
|
|
@@ -315,12 +315,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
315
|
}
|
|
316
316
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
317
|
try {
|
|
318
|
-
console.
|
|
318
|
+
console.info("Running install command with swpm");
|
|
319
319
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
320
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
321
|
return;
|
|
322
322
|
} catch (e) {
|
|
323
|
-
console.
|
|
323
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
324
|
}
|
|
325
325
|
try {
|
|
326
326
|
let packageManager;
|
|
@@ -348,11 +348,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
348
|
}
|
|
349
349
|
}
|
|
350
350
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
351
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
352
|
await spawn(packageManager, args, { cwd });
|
|
353
353
|
return;
|
|
354
354
|
} catch (e) {
|
|
355
|
-
console.
|
|
355
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
356
|
}
|
|
357
357
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
358
|
}
|
|
@@ -383,10 +383,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
383
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
384
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
385
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
386
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
387
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
388
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
389
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
390
|
} catch (gitError) {
|
|
391
391
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
392
|
}
|
|
@@ -458,18 +458,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
458
|
try {
|
|
459
459
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
460
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
461
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
462
|
} catch (error) {
|
|
463
463
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
464
|
if (errorStr.includes("already exists")) {
|
|
465
465
|
try {
|
|
466
466
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
467
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
468
|
} catch {
|
|
469
469
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
470
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
471
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
472
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
473
|
}
|
|
474
474
|
} else {
|
|
475
475
|
throw error;
|
|
@@ -479,9 +479,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
479
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
480
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
481
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
482
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
483
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
484
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
485
|
}
|
|
486
486
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
487
|
let counter = 1;
|
|
@@ -495,7 +495,7 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
495
|
counter++;
|
|
496
496
|
}
|
|
497
497
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
498
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
499
|
return uniqueTargetFile;
|
|
500
500
|
}
|
|
501
501
|
var isValidMastraLanguageModel = (model) => {
|
|
@@ -544,7 +544,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
544
|
if (!hasConflict) {
|
|
545
545
|
newEntries.push(trimmed);
|
|
546
546
|
} else {
|
|
547
|
-
console.
|
|
547
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
548
|
}
|
|
549
549
|
}
|
|
550
550
|
}
|
|
@@ -579,7 +579,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
579
|
if (!existingVars.has(key)) {
|
|
580
580
|
newVars.push({ key, value });
|
|
581
581
|
} else {
|
|
582
|
-
console.
|
|
582
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
if (newVars.length === 0) {
|
|
@@ -600,7 +600,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
600
|
try {
|
|
601
601
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
602
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
603
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
604
|
return "v2";
|
|
605
605
|
}
|
|
606
606
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +618,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
618
|
if (versionMatch) {
|
|
619
619
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
620
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
621
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
622
|
return "v2";
|
|
623
623
|
} else {
|
|
624
|
-
console.
|
|
624
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
625
|
return "v1";
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
}
|
|
629
629
|
}
|
|
630
|
-
console.
|
|
630
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
631
|
return "v2";
|
|
632
632
|
} catch (error) {
|
|
633
633
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -658,37 +658,15 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
658
658
|
const { google } = await import('@ai-sdk/google');
|
|
659
659
|
return google(modelId);
|
|
660
660
|
}
|
|
661
|
-
},
|
|
662
|
-
v2: {
|
|
663
|
-
openai: async () => {
|
|
664
|
-
const { openai: openai2 } = await import('@ai-sdk/openai-v5');
|
|
665
|
-
return openai2(modelId);
|
|
666
|
-
},
|
|
667
|
-
anthropic: async () => {
|
|
668
|
-
const { anthropic } = await import('@ai-sdk/anthropic-v5');
|
|
669
|
-
return anthropic(modelId);
|
|
670
|
-
},
|
|
671
|
-
groq: async () => {
|
|
672
|
-
const { groq } = await import('@ai-sdk/groq-v5');
|
|
673
|
-
return groq(modelId);
|
|
674
|
-
},
|
|
675
|
-
xai: async () => {
|
|
676
|
-
const { xai } = await import('@ai-sdk/xai-v5');
|
|
677
|
-
return xai(modelId);
|
|
678
|
-
},
|
|
679
|
-
google: async () => {
|
|
680
|
-
const { google } = await import('@ai-sdk/google-v5');
|
|
681
|
-
return google(modelId);
|
|
682
|
-
}
|
|
683
661
|
}
|
|
684
662
|
};
|
|
685
|
-
const providerFn = providerMap[version][provider];
|
|
663
|
+
const providerFn = version === `v1` ? providerMap[version][provider] : () => new ModelRouterLanguageModel(`${provider}/${modelId}`);
|
|
686
664
|
if (!providerFn) {
|
|
687
665
|
console.error(`Unsupported provider: ${provider}`);
|
|
688
666
|
return null;
|
|
689
667
|
}
|
|
690
668
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
669
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
670
|
return modelInstance;
|
|
693
671
|
} catch (error) {
|
|
694
672
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -697,12 +675,12 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
697
675
|
};
|
|
698
676
|
var resolveModel = async ({
|
|
699
677
|
runtimeContext,
|
|
700
|
-
defaultModel = openai
|
|
678
|
+
defaultModel = "openai/gpt-4.1",
|
|
701
679
|
projectPath
|
|
702
680
|
}) => {
|
|
703
681
|
const modelFromContext = runtimeContext.get("model");
|
|
704
682
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
683
|
+
console.info("Using model from runtime context");
|
|
706
684
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
685
|
return modelFromContext;
|
|
708
686
|
}
|
|
@@ -712,7 +690,7 @@ var resolveModel = async ({
|
|
|
712
690
|
}
|
|
713
691
|
const selectedModel = runtimeContext.get("selectedModel");
|
|
714
692
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
693
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
694
|
const version = await detectAISDKVersion(projectPath);
|
|
717
695
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
696
|
if (modelInstance) {
|
|
@@ -720,8 +698,8 @@ var resolveModel = async ({
|
|
|
720
698
|
return modelInstance;
|
|
721
699
|
}
|
|
722
700
|
}
|
|
723
|
-
console.
|
|
724
|
-
return defaultModel;
|
|
701
|
+
console.info("Using default model");
|
|
702
|
+
return typeof defaultModel === `string` ? new ModelRouterLanguageModel(defaultModel) : defaultModel;
|
|
725
703
|
};
|
|
726
704
|
|
|
727
705
|
// src/defaults.ts
|
|
@@ -1756,7 +1734,7 @@ export const mastra = new Mastra({
|
|
|
1756
1734
|
error: stderr
|
|
1757
1735
|
};
|
|
1758
1736
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1737
|
+
console.error(error);
|
|
1760
1738
|
return {
|
|
1761
1739
|
success: false,
|
|
1762
1740
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1749,7 @@ export const mastra = new Mastra({
|
|
|
1771
1749
|
projectPath
|
|
1772
1750
|
}) {
|
|
1773
1751
|
try {
|
|
1774
|
-
console.
|
|
1752
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1753
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1754
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1755
|
return {
|
|
@@ -1795,7 +1773,7 @@ export const mastra = new Mastra({
|
|
|
1795
1773
|
projectPath
|
|
1796
1774
|
}) {
|
|
1797
1775
|
try {
|
|
1798
|
-
console.
|
|
1776
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1777
|
let packageNames = [];
|
|
1800
1778
|
if (packages && packages.length > 0) {
|
|
1801
1779
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3234,7 +3212,7 @@ ${config.instructions}` : "";
|
|
|
3234
3212
|
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3235
3213
|
* Overrides the base Agent generate method to provide additional project context
|
|
3236
3214
|
*/
|
|
3237
|
-
|
|
3215
|
+
generateLegacy = async (messages, generateOptions = {}) => {
|
|
3238
3216
|
const { maxSteps, ...baseOptions } = generateOptions;
|
|
3239
3217
|
const originalInstructions = await this.getInstructions({ runtimeContext: generateOptions?.runtimeContext });
|
|
3240
3218
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3257,13 +3235,13 @@ ${additionalInstructions}`;
|
|
|
3257
3235
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3258
3236
|
projectPath: this.builderConfig.projectPath
|
|
3259
3237
|
});
|
|
3260
|
-
return super.
|
|
3238
|
+
return super.generateLegacy(messages, enhancedOptions);
|
|
3261
3239
|
};
|
|
3262
3240
|
/**
|
|
3263
3241
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3264
3242
|
* Overrides the base Agent stream method to provide additional project context
|
|
3265
3243
|
*/
|
|
3266
|
-
|
|
3244
|
+
streamLegacy = async (messages, streamOptions = {}) => {
|
|
3267
3245
|
const { maxSteps, ...baseOptions } = streamOptions;
|
|
3268
3246
|
const originalInstructions = await this.getInstructions({ runtimeContext: streamOptions?.runtimeContext });
|
|
3269
3247
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3286,13 +3264,13 @@ ${additionalInstructions}`;
|
|
|
3286
3264
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3287
3265
|
projectPath: this.builderConfig.projectPath
|
|
3288
3266
|
});
|
|
3289
|
-
return super.
|
|
3267
|
+
return super.streamLegacy(messages, enhancedOptions);
|
|
3290
3268
|
};
|
|
3291
3269
|
/**
|
|
3292
3270
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3293
3271
|
* Overrides the base Agent stream method to provide additional project context
|
|
3294
3272
|
*/
|
|
3295
|
-
async
|
|
3273
|
+
async stream(messages, streamOptions) {
|
|
3296
3274
|
const { ...baseOptions } = streamOptions || {};
|
|
3297
3275
|
const originalInstructions = await this.getInstructions({ runtimeContext: streamOptions?.runtimeContext });
|
|
3298
3276
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3314,9 +3292,9 @@ ${additionalInstructions}`;
|
|
|
3314
3292
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3315
3293
|
projectPath: this.builderConfig.projectPath
|
|
3316
3294
|
});
|
|
3317
|
-
return super.
|
|
3295
|
+
return super.stream(messages, enhancedOptions);
|
|
3318
3296
|
}
|
|
3319
|
-
async
|
|
3297
|
+
async generate(messages, options) {
|
|
3320
3298
|
const { ...baseOptions } = options || {};
|
|
3321
3299
|
const originalInstructions = await this.getInstructions({ runtimeContext: options?.runtimeContext });
|
|
3322
3300
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3338,7 +3316,7 @@ ${additionalInstructions}`;
|
|
|
3338
3316
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3339
3317
|
projectPath: this.builderConfig.projectPath
|
|
3340
3318
|
});
|
|
3341
|
-
return super.
|
|
3319
|
+
return super.generate(messages, enhancedOptions);
|
|
3342
3320
|
}
|
|
3343
3321
|
};
|
|
3344
3322
|
var cloneTemplateStep = createStep({
|
|
@@ -3388,13 +3366,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3366
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3367
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3368
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3369
|
+
console.info("Analyzing template package.json...");
|
|
3392
3370
|
const { templateDir } = inputData;
|
|
3393
3371
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3372
|
try {
|
|
3395
3373
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3374
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3375
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3376
|
return {
|
|
3399
3377
|
dependencies: packageJson.dependencies || {},
|
|
3400
3378
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3430,7 +3408,7 @@ var discoverUnitsStep = createStep({
|
|
|
3430
3408
|
const { templateDir } = inputData;
|
|
3431
3409
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3432
3410
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3411
|
+
console.info("targetPath", targetPath);
|
|
3434
3412
|
const model = await resolveModel({ runtimeContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3413
|
try {
|
|
3436
3414
|
const agent = new Agent({
|
|
@@ -3491,10 +3469,12 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3491
3469
|
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
3492
3470
|
other: z.array(z.object({ name: z.string(), file: z.string() })).optional()
|
|
3493
3471
|
});
|
|
3494
|
-
const result = isV2 ? await agent
|
|
3495
|
-
|
|
3472
|
+
const result = isV2 ? await tryGenerateWithJsonFallback(agent, prompt, {
|
|
3473
|
+
structuredOutput: {
|
|
3474
|
+
schema: output
|
|
3475
|
+
},
|
|
3496
3476
|
maxSteps: 100
|
|
3497
|
-
}) : await agent.
|
|
3477
|
+
}) : await agent.generateLegacy(prompt, {
|
|
3498
3478
|
experimental_output: output,
|
|
3499
3479
|
maxSteps: 100
|
|
3500
3480
|
});
|
|
@@ -3518,7 +3498,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3498
|
template.other?.forEach((otherId) => {
|
|
3519
3499
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3500
|
});
|
|
3521
|
-
console.
|
|
3501
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3502
|
if (units.length === 0) {
|
|
3523
3503
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3504
|
Possible causes:
|
|
@@ -3594,7 +3574,7 @@ var packageMergeStep = createStep({
|
|
|
3594
3574
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3575
|
outputSchema: PackageMergeResultSchema,
|
|
3596
3576
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3597
|
-
console.
|
|
3577
|
+
console.info("Package merge step starting...");
|
|
3598
3578
|
const { slug, packageInfo } = inputData;
|
|
3599
3579
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3600
3580
|
try {
|
|
@@ -3671,7 +3651,7 @@ var installStep = createStep({
|
|
|
3671
3651
|
inputSchema: InstallInputSchema,
|
|
3672
3652
|
outputSchema: InstallResultSchema,
|
|
3673
3653
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3674
|
-
console.
|
|
3654
|
+
console.info("Running install step...");
|
|
3675
3655
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3676
3656
|
try {
|
|
3677
3657
|
await spawnSWPM(targetPath, "install", []);
|
|
@@ -3699,7 +3679,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3699
3679
|
inputSchema: FileCopyInputSchema,
|
|
3700
3680
|
outputSchema: FileCopyResultSchema,
|
|
3701
3681
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3702
|
-
console.
|
|
3682
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3683
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
3684
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3705
3685
|
try {
|
|
@@ -3742,7 +3722,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3722
|
}
|
|
3743
3723
|
};
|
|
3744
3724
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3725
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3726
|
let sourceFile;
|
|
3747
3727
|
let resolvedUnitFile;
|
|
3748
3728
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3753,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3753
|
}
|
|
3774
3754
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3755
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3756
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3757
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3758
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3759
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3761,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3761
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3762
|
if (existsSync(targetFile)) {
|
|
3783
3763
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3764
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3765
|
switch (strategy) {
|
|
3786
3766
|
case "skip":
|
|
3787
3767
|
conflicts.push({
|
|
@@ -3790,7 +3770,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3770
|
sourceFile: unit.file,
|
|
3791
3771
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3772
|
});
|
|
3793
|
-
console.
|
|
3773
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3774
|
continue;
|
|
3795
3775
|
case "backup-and-replace":
|
|
3796
3776
|
try {
|
|
@@ -3800,7 +3780,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3780
|
destination: targetFile,
|
|
3801
3781
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3782
|
});
|
|
3803
|
-
console.
|
|
3783
|
+
console.info(
|
|
3804
3784
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3785
|
);
|
|
3806
3786
|
continue;
|
|
@@ -3821,7 +3801,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3801
|
destination: uniqueTargetFile,
|
|
3822
3802
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3803
|
});
|
|
3824
|
-
console.
|
|
3804
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3805
|
continue;
|
|
3826
3806
|
} catch (renameError) {
|
|
3827
3807
|
conflicts.push({
|
|
@@ -3850,7 +3830,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3830
|
destination: targetFile,
|
|
3851
3831
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3832
|
});
|
|
3853
|
-
console.
|
|
3833
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3834
|
} catch (copyError) {
|
|
3855
3835
|
conflicts.push({
|
|
3856
3836
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3851,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3851
|
destination: targetTsconfig,
|
|
3872
3852
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3853
|
});
|
|
3874
|
-
console.
|
|
3854
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3855
|
} else {
|
|
3876
3856
|
const minimalTsconfig = {
|
|
3877
3857
|
compilerOptions: {
|
|
@@ -3893,7 +3873,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3873
|
destination: targetTsconfig,
|
|
3894
3874
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3875
|
});
|
|
3896
|
-
console.
|
|
3876
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3877
|
}
|
|
3898
3878
|
}
|
|
3899
3879
|
} catch (e) {
|
|
@@ -3918,7 +3898,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3898
|
destination: targetMastraIndex,
|
|
3919
3899
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3900
|
});
|
|
3921
|
-
console.
|
|
3901
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3902
|
}
|
|
3923
3903
|
}
|
|
3924
3904
|
} catch (e) {
|
|
@@ -3942,7 +3922,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3922
|
destination: targetGitignore,
|
|
3943
3923
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3924
|
});
|
|
3945
|
-
console.
|
|
3925
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3926
|
} else {
|
|
3947
3927
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3928
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3935,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3935
|
destination: targetGitignore,
|
|
3956
3936
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3937
|
});
|
|
3958
|
-
console.
|
|
3938
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3939
|
} else {
|
|
3960
|
-
console.
|
|
3940
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3941
|
}
|
|
3962
3942
|
}
|
|
3963
3943
|
}
|
|
@@ -3985,7 +3965,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3965
|
destination: targetEnv,
|
|
3986
3966
|
unit: { kind: "other", id: "env" }
|
|
3987
3967
|
});
|
|
3988
|
-
console.
|
|
3968
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3969
|
} else {
|
|
3990
3970
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3971
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +3977,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
3977
|
destination: targetEnv,
|
|
3998
3978
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
3979
|
});
|
|
4000
|
-
console.
|
|
3980
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
3981
|
} else {
|
|
4002
|
-
console.
|
|
3982
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
3983
|
}
|
|
4004
3984
|
}
|
|
4005
3985
|
}
|
|
@@ -4020,13 +4000,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4000
|
fileList,
|
|
4021
4001
|
{ skipIfNoStaged: true }
|
|
4022
4002
|
);
|
|
4023
|
-
console.
|
|
4003
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4004
|
} catch (commitError) {
|
|
4025
4005
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4006
|
}
|
|
4027
4007
|
}
|
|
4028
4008
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4009
|
+
console.info(message);
|
|
4030
4010
|
return {
|
|
4031
4011
|
success: true,
|
|
4032
4012
|
copiedFiles,
|
|
@@ -4051,7 +4031,7 @@ var intelligentMergeStep = createStep({
|
|
|
4051
4031
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4032
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
4033
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4054
|
-
console.
|
|
4034
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4035
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
4036
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4057
4037
|
try {
|
|
@@ -4178,8 +4158,8 @@ Template information:
|
|
|
4178
4158
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4159
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4160
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4161
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4162
|
+
console.info(
|
|
4183
4163
|
"Registrable components:",
|
|
4184
4164
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4165
|
);
|
|
@@ -4193,7 +4173,7 @@ Template information:
|
|
|
4193
4173
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4174
|
});
|
|
4195
4175
|
}
|
|
4196
|
-
console.
|
|
4176
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4177
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4178
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4179
|
const prompt = `
|
|
@@ -4240,17 +4220,17 @@ For each task:
|
|
|
4240
4220
|
Start by listing your tasks and work through them systematically!
|
|
4241
4221
|
`;
|
|
4242
4222
|
const isV2 = model.specificationVersion === "v2";
|
|
4243
|
-
const result = isV2 ? await agentBuilder.
|
|
4223
|
+
const result = isV2 ? await agentBuilder.stream(prompt) : await agentBuilder.streamLegacy(prompt);
|
|
4244
4224
|
const actualResolutions = [];
|
|
4245
4225
|
for await (const chunk of result.fullStream) {
|
|
4246
4226
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4227
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4228
|
+
console.info({
|
|
4249
4229
|
type: chunk.type,
|
|
4250
4230
|
msgId: chunkData.messageId
|
|
4251
4231
|
});
|
|
4252
4232
|
} else {
|
|
4253
|
-
console.
|
|
4233
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4234
|
if (chunk.type === "tool-result") {
|
|
4255
4235
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4236
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4244,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4244
|
content: toolResult.content || "",
|
|
4265
4245
|
notes: toolResult.notes
|
|
4266
4246
|
});
|
|
4267
|
-
console.
|
|
4247
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4248
|
}
|
|
4269
4249
|
} catch (parseError) {
|
|
4270
4250
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4319,12 +4299,12 @@ var validationAndFixStep = createStep({
|
|
|
4319
4299
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4300
|
outputSchema: ValidationFixResultSchema,
|
|
4321
4301
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4322
|
-
console.
|
|
4302
|
+
console.info("Validation and fix step starting...");
|
|
4323
4303
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
4304
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4325
4305
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4306
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4307
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4308
|
return {
|
|
4329
4309
|
success: true,
|
|
4330
4310
|
applied: false,
|
|
@@ -4336,7 +4316,7 @@ var validationAndFixStep = createStep({
|
|
|
4336
4316
|
}
|
|
4337
4317
|
};
|
|
4338
4318
|
}
|
|
4339
|
-
console.
|
|
4319
|
+
console.info(
|
|
4340
4320
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4321
|
);
|
|
4342
4322
|
let currentIteration = 1;
|
|
@@ -4462,7 +4442,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4442
|
executeCommand: allTools.executeCommand
|
|
4463
4443
|
}
|
|
4464
4444
|
});
|
|
4465
|
-
console.
|
|
4445
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4446
|
let validationResults = {
|
|
4467
4447
|
valid: false,
|
|
4468
4448
|
errorsFixed: 0,
|
|
@@ -4473,7 +4453,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4453
|
// Store the actual error details
|
|
4474
4454
|
};
|
|
4475
4455
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4456
|
+
console.info(`
|
|
4477
4457
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4458
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4459
|
|
|
@@ -4482,9 +4462,11 @@ Start by running validateCode with all validation types to get a complete pictur
|
|
|
4482
4462
|
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
4483
4463
|
const isV2 = model.specificationVersion === "v2";
|
|
4484
4464
|
const output = z.object({ success: z.boolean() });
|
|
4485
|
-
const result = isV2 ? await validationAgent
|
|
4486
|
-
|
|
4487
|
-
|
|
4465
|
+
const result = isV2 ? await tryStreamWithJsonFallback(validationAgent, iterationPrompt, {
|
|
4466
|
+
structuredOutput: {
|
|
4467
|
+
schema: output
|
|
4468
|
+
}
|
|
4469
|
+
}) : await validationAgent.streamLegacy(iterationPrompt, {
|
|
4488
4470
|
experimental_output: output
|
|
4489
4471
|
});
|
|
4490
4472
|
let iterationErrors = 0;
|
|
@@ -4493,13 +4475,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4475
|
for await (const chunk of result.fullStream) {
|
|
4494
4476
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4477
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4478
|
+
console.info({
|
|
4497
4479
|
type: chunk.type,
|
|
4498
4480
|
msgId: chunkData.messageId,
|
|
4499
4481
|
iteration: currentIteration
|
|
4500
4482
|
});
|
|
4501
4483
|
} else {
|
|
4502
|
-
console.
|
|
4484
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4485
|
}
|
|
4504
4486
|
if (chunk.type === "tool-result") {
|
|
4505
4487
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4490,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4490
|
lastValidationResult = toolResult;
|
|
4509
4491
|
if (toolResult?.summary) {
|
|
4510
4492
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4493
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4494
|
}
|
|
4513
4495
|
}
|
|
4514
4496
|
}
|
|
@@ -4520,12 +4502,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4502
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4503
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4504
|
}
|
|
4523
|
-
console.
|
|
4505
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4506
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4507
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4508
|
break;
|
|
4527
4509
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4510
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4511
|
break;
|
|
4530
4512
|
}
|
|
4531
4513
|
currentIteration++;
|
|
@@ -4570,7 +4552,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4552
|
} finally {
|
|
4571
4553
|
try {
|
|
4572
4554
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4555
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4556
|
} catch (cleanupError) {
|
|
4575
4557
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4558
|
}
|
|
@@ -5063,12 +5045,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5045
|
research,
|
|
5064
5046
|
userAnswers
|
|
5065
5047
|
} = inputData;
|
|
5066
|
-
console.
|
|
5048
|
+
console.info("Starting planning iteration...");
|
|
5067
5049
|
const qaKey = "workflow-builder-qa";
|
|
5068
5050
|
let storedQAPairs = runtimeContext.get(qaKey) || [];
|
|
5069
5051
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5052
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5053
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5054
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5082,10 +5062,6 @@ var planningIterationStep = createStep({
|
|
|
5082
5062
|
});
|
|
5083
5063
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5084
5064
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5065
|
try {
|
|
5090
5066
|
const model = await resolveModel({ runtimeContext });
|
|
5091
5067
|
const planningAgent = new Agent({
|
|
@@ -5117,7 +5093,7 @@ var planningIterationStep = createStep({
|
|
|
5117
5093
|
projectStructure,
|
|
5118
5094
|
research
|
|
5119
5095
|
});
|
|
5120
|
-
const result = await planningAgent.
|
|
5096
|
+
const result = await planningAgent.generate(planningPrompt, {
|
|
5121
5097
|
output: PlanningAgentOutputSchema
|
|
5122
5098
|
// maxSteps: 15,
|
|
5123
5099
|
});
|
|
@@ -5133,8 +5109,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5109
|
};
|
|
5134
5110
|
}
|
|
5135
5111
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5112
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5113
|
+
console.info(planResult.questions);
|
|
5138
5114
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5115
|
question,
|
|
5140
5116
|
answer: null,
|
|
@@ -5143,7 +5119,7 @@ var planningIterationStep = createStep({
|
|
|
5143
5119
|
}));
|
|
5144
5120
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
5121
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5146
|
-
console.
|
|
5122
|
+
console.info(
|
|
5147
5123
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5124
|
);
|
|
5149
5125
|
return suspend({
|
|
@@ -5155,9 +5131,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5131
|
}
|
|
5156
5132
|
});
|
|
5157
5133
|
}
|
|
5158
|
-
console.
|
|
5134
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5159
5135
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5160
|
-
console.
|
|
5136
|
+
console.info(
|
|
5161
5137
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5138
|
);
|
|
5163
5139
|
return {
|
|
@@ -5200,7 +5176,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5176
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5177
|
const { tasks } = inputData;
|
|
5202
5178
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5179
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5180
|
const summary = `Task List for Approval:
|
|
5205
5181
|
|
|
5206
5182
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5189,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5189
|
});
|
|
5214
5190
|
}
|
|
5215
5191
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5192
|
+
console.info("Task list approved by user");
|
|
5217
5193
|
return {
|
|
5218
5194
|
approved: true,
|
|
5219
5195
|
tasks,
|
|
5220
5196
|
message: "Task list approved, ready for execution"
|
|
5221
5197
|
};
|
|
5222
5198
|
} else {
|
|
5223
|
-
console.
|
|
5199
|
+
console.info("Task list rejected by user");
|
|
5224
5200
|
return {
|
|
5225
5201
|
approved: false,
|
|
5226
5202
|
tasks,
|
|
@@ -5237,7 +5213,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5213
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5214
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5215
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5216
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5217
|
return inputData.planComplete === true;
|
|
5242
5218
|
}).map(async ({ inputData }) => {
|
|
5243
5219
|
return {
|
|
@@ -5692,12 +5668,12 @@ var workflowDiscoveryStep = createStep({
|
|
|
5692
5668
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5669
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
5670
|
execute: async ({ inputData, runtimeContext: _runtimeContext }) => {
|
|
5695
|
-
console.
|
|
5671
|
+
console.info("Starting workflow discovery...");
|
|
5696
5672
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5673
|
try {
|
|
5698
5674
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5675
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5676
|
+
console.info("No workflows directory found");
|
|
5701
5677
|
return {
|
|
5702
5678
|
success: true,
|
|
5703
5679
|
workflows: [],
|
|
@@ -5726,7 +5702,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5702
|
}
|
|
5727
5703
|
}
|
|
5728
5704
|
}
|
|
5729
|
-
console.
|
|
5705
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5706
|
return {
|
|
5731
5707
|
success: true,
|
|
5732
5708
|
workflows,
|
|
@@ -5751,7 +5727,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5751
5727
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5728
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
5729
|
execute: async ({ inputData: _inputData, runtimeContext: _runtimeContext }) => {
|
|
5754
|
-
console.
|
|
5730
|
+
console.info("Starting project discovery...");
|
|
5755
5731
|
try {
|
|
5756
5732
|
const projectPath = process.cwd();
|
|
5757
5733
|
const projectStructure = {
|
|
@@ -5772,7 +5748,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5748
|
console.warn("Failed to read package.json:", error);
|
|
5773
5749
|
}
|
|
5774
5750
|
}
|
|
5775
|
-
console.
|
|
5751
|
+
console.info("Project discovery completed");
|
|
5776
5752
|
return {
|
|
5777
5753
|
success: true,
|
|
5778
5754
|
structure: {
|
|
@@ -5813,7 +5789,7 @@ var workflowResearchStep = createStep({
|
|
|
5813
5789
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5790
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
5791
|
execute: async ({ inputData, runtimeContext }) => {
|
|
5816
|
-
console.
|
|
5792
|
+
console.info("Starting workflow research...");
|
|
5817
5793
|
try {
|
|
5818
5794
|
const model = await resolveModel({ runtimeContext });
|
|
5819
5795
|
const researchAgent = new Agent({
|
|
@@ -5827,7 +5803,7 @@ var workflowResearchStep = createStep({
|
|
|
5827
5803
|
dependencies: inputData.dependencies,
|
|
5828
5804
|
hasWorkflowsDir: inputData.structure.hasWorkflowsDir
|
|
5829
5805
|
});
|
|
5830
|
-
const result = await researchAgent.
|
|
5806
|
+
const result = await researchAgent.generate(researchPrompt, {
|
|
5831
5807
|
output: WorkflowResearchResultSchema
|
|
5832
5808
|
// stopWhen: stepCountIs(10),
|
|
5833
5809
|
});
|
|
@@ -5845,7 +5821,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5821
|
error: "Research agent failed to generate valid response"
|
|
5846
5822
|
};
|
|
5847
5823
|
}
|
|
5848
|
-
console.
|
|
5824
|
+
console.info("Research completed successfully");
|
|
5849
5825
|
return {
|
|
5850
5826
|
success: true,
|
|
5851
5827
|
documentation: {
|
|
@@ -5891,12 +5867,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5867
|
research,
|
|
5892
5868
|
projectPath
|
|
5893
5869
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5870
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5871
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5872
|
try {
|
|
5897
5873
|
const model = await resolveModel({ runtimeContext });
|
|
5898
5874
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5875
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5876
|
const taskManagerContext = {
|
|
5901
5877
|
action: "create",
|
|
5902
5878
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5885,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5885
|
}))
|
|
5910
5886
|
};
|
|
5911
5887
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5888
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5889
|
if (!taskManagerResult.success) {
|
|
5914
5890
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5891
|
}
|
|
@@ -5962,13 +5938,13 @@ ${additionalInstructions}`;
|
|
|
5962
5938
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5939
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5940
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5941
|
+
console.info(`
|
|
5966
5942
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5943
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5944
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5945
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5946
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5947
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5948
|
break;
|
|
5973
5949
|
}
|
|
5974
5950
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5979,7 +5955,7 @@ ${additionalInstructions}`;
|
|
|
5979
5955
|
})}
|
|
5980
5956
|
|
|
5981
5957
|
${workflowBuilderPrompts.validation.instructions}`;
|
|
5982
|
-
const stream = await executionAgent.
|
|
5958
|
+
const stream = await executionAgent.stream(iterationPrompt, {
|
|
5983
5959
|
structuredOutput: {
|
|
5984
5960
|
schema: TaskExecutionIterationInputSchema(tasks.length),
|
|
5985
5961
|
model
|
|
@@ -5992,19 +5968,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5968
|
finalMessage += chunk.payload.text;
|
|
5993
5969
|
}
|
|
5994
5970
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5971
|
+
console.info(finalMessage);
|
|
5996
5972
|
finalMessage = "";
|
|
5997
5973
|
}
|
|
5998
5974
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
5975
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
5976
|
}
|
|
6001
5977
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
5978
|
+
console.info(chunk);
|
|
6003
5979
|
}
|
|
6004
5980
|
}
|
|
6005
5981
|
await stream.consumeStream();
|
|
6006
5982
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
5983
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
5984
|
if (!finalResult) {
|
|
6009
5985
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
5986
|
}
|
|
@@ -6012,17 +5988,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
5988
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
5989
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
5990
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
5991
|
+
console.info(
|
|
6016
5992
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
5993
|
);
|
|
6018
5994
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
5995
|
+
console.info(
|
|
6020
5996
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
5997
|
);
|
|
6022
5998
|
break;
|
|
6023
5999
|
}
|
|
6024
6000
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
6001
|
+
console.info(
|
|
6026
6002
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6003
|
);
|
|
6028
6004
|
}
|
|
@@ -6035,8 +6011,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6011
|
throw new Error("No result received from agent execution");
|
|
6036
6012
|
}
|
|
6037
6013
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6014
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6015
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6016
|
return suspend({
|
|
6041
6017
|
questions: finalResult.questions,
|
|
6042
6018
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6028,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6028
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6029
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6030
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6031
|
+
console.info(message);
|
|
6056
6032
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6033
|
const validationErrors = [];
|
|
6058
6034
|
if (finalResult.error) {
|
|
@@ -6120,7 +6096,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6096
|
userAnswers: void 0
|
|
6121
6097
|
};
|
|
6122
6098
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6099
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6100
|
return inputData.approved === true;
|
|
6125
6101
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6102
|
const initData = getInitData();
|