@mastra/agent-builder 0.0.0-pgvector-index-fix-20250905222058 → 0.0.0-playground-studio-cloud-20251031080052
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +305 -12
- package/README.md +0 -4
- package/dist/agent/index.d.ts +4 -6
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/defaults.d.ts +45 -45
- package/dist/index.js +145 -172
- package/dist/index.js.map +1 -1
- package/dist/utils.d.ts +1 -1
- package/dist/utils.d.ts.map +1 -1
- package/dist/workflows/task-planning/schema.d.ts +4 -4
- package/dist/workflows/task-planning/task-planning.d.ts +23 -11
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -1
- package/dist/workflows/template-builder/template-builder.d.ts +118 -20
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/schema.d.ts +12 -12
- package/dist/workflows/workflow-builder/tools.d.ts +3 -3
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +68 -36
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-map.d.ts +2 -3767
- package/dist/workflows/workflow-map.d.ts.map +1 -1
- package/package.json +12 -11
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Agent } from '@mastra/core/agent';
|
|
1
|
+
import { Agent, tryGenerateWithJsonFallback, tryStreamWithJsonFallback } from '@mastra/core/agent';
|
|
2
2
|
import { Memory } from '@mastra/memory';
|
|
3
3
|
import { TokenLimiter } from '@mastra/memory/processors';
|
|
4
4
|
import { exec as exec$1, execFile as execFile$1, spawn as spawn$1 } from 'child_process';
|
|
@@ -10,7 +10,7 @@ import { z } from 'zod';
|
|
|
10
10
|
import { existsSync, readFileSync } from 'fs';
|
|
11
11
|
import { createRequire } from 'module';
|
|
12
12
|
import { promisify } from 'util';
|
|
13
|
-
import {
|
|
13
|
+
import { ModelRouterLanguageModel } from '@mastra/core/llm';
|
|
14
14
|
import { MemoryProcessor } from '@mastra/core/memory';
|
|
15
15
|
import { tmpdir } from 'os';
|
|
16
16
|
import { openai } from '@ai-sdk/openai';
|
|
@@ -224,7 +224,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
224
|
if (currentDir === cwd) {
|
|
225
225
|
continue;
|
|
226
226
|
}
|
|
227
|
-
console.
|
|
227
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
228
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
229
|
return true;
|
|
230
230
|
}
|
|
@@ -244,7 +244,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
244
|
}
|
|
245
245
|
return false;
|
|
246
246
|
} catch (error) {
|
|
247
|
-
console.
|
|
247
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
248
|
return false;
|
|
249
249
|
}
|
|
250
250
|
}
|
|
@@ -315,12 +315,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
315
|
}
|
|
316
316
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
317
|
try {
|
|
318
|
-
console.
|
|
318
|
+
console.info("Running install command with swpm");
|
|
319
319
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
320
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
321
|
return;
|
|
322
322
|
} catch (e) {
|
|
323
|
-
console.
|
|
323
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
324
|
}
|
|
325
325
|
try {
|
|
326
326
|
let packageManager;
|
|
@@ -348,11 +348,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
348
|
}
|
|
349
349
|
}
|
|
350
350
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
351
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
352
|
await spawn(packageManager, args, { cwd });
|
|
353
353
|
return;
|
|
354
354
|
} catch (e) {
|
|
355
|
-
console.
|
|
355
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
356
|
}
|
|
357
357
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
358
|
}
|
|
@@ -383,10 +383,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
383
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
384
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
385
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
386
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
387
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
388
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
389
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
390
|
} catch (gitError) {
|
|
391
391
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
392
|
}
|
|
@@ -458,18 +458,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
458
|
try {
|
|
459
459
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
460
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
461
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
462
|
} catch (error) {
|
|
463
463
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
464
|
if (errorStr.includes("already exists")) {
|
|
465
465
|
try {
|
|
466
466
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
467
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
468
|
} catch {
|
|
469
469
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
470
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
471
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
472
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
473
|
}
|
|
474
474
|
} else {
|
|
475
475
|
throw error;
|
|
@@ -479,9 +479,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
479
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
480
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
481
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
482
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
483
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
484
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
485
|
}
|
|
486
486
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
487
|
let counter = 1;
|
|
@@ -495,7 +495,7 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
495
|
counter++;
|
|
496
496
|
}
|
|
497
497
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
498
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
499
|
return uniqueTargetFile;
|
|
500
500
|
}
|
|
501
501
|
var isValidMastraLanguageModel = (model) => {
|
|
@@ -544,7 +544,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
544
|
if (!hasConflict) {
|
|
545
545
|
newEntries.push(trimmed);
|
|
546
546
|
} else {
|
|
547
|
-
console.
|
|
547
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
548
|
}
|
|
549
549
|
}
|
|
550
550
|
}
|
|
@@ -579,7 +579,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
579
|
if (!existingVars.has(key)) {
|
|
580
580
|
newVars.push({ key, value });
|
|
581
581
|
} else {
|
|
582
|
-
console.
|
|
582
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
if (newVars.length === 0) {
|
|
@@ -600,7 +600,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
600
|
try {
|
|
601
601
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
602
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
603
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
604
|
return "v2";
|
|
605
605
|
}
|
|
606
606
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +618,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
618
|
if (versionMatch) {
|
|
619
619
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
620
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
621
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
622
|
return "v2";
|
|
623
623
|
} else {
|
|
624
|
-
console.
|
|
624
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
625
|
return "v1";
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
}
|
|
629
629
|
}
|
|
630
|
-
console.
|
|
630
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
631
|
return "v2";
|
|
632
632
|
} catch (error) {
|
|
633
633
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -658,37 +658,15 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
658
658
|
const { google } = await import('@ai-sdk/google');
|
|
659
659
|
return google(modelId);
|
|
660
660
|
}
|
|
661
|
-
},
|
|
662
|
-
v2: {
|
|
663
|
-
openai: async () => {
|
|
664
|
-
const { openai: openai2 } = await import('@ai-sdk/openai-v5');
|
|
665
|
-
return openai2(modelId);
|
|
666
|
-
},
|
|
667
|
-
anthropic: async () => {
|
|
668
|
-
const { anthropic } = await import('@ai-sdk/anthropic-v5');
|
|
669
|
-
return anthropic(modelId);
|
|
670
|
-
},
|
|
671
|
-
groq: async () => {
|
|
672
|
-
const { groq } = await import('@ai-sdk/groq-v5');
|
|
673
|
-
return groq(modelId);
|
|
674
|
-
},
|
|
675
|
-
xai: async () => {
|
|
676
|
-
const { xai } = await import('@ai-sdk/xai-v5');
|
|
677
|
-
return xai(modelId);
|
|
678
|
-
},
|
|
679
|
-
google: async () => {
|
|
680
|
-
const { google } = await import('@ai-sdk/google-v5');
|
|
681
|
-
return google(modelId);
|
|
682
|
-
}
|
|
683
661
|
}
|
|
684
662
|
};
|
|
685
|
-
const providerFn = providerMap[version][provider];
|
|
663
|
+
const providerFn = version === `v1` ? providerMap[version][provider] : () => new ModelRouterLanguageModel(`${provider}/${modelId}`);
|
|
686
664
|
if (!providerFn) {
|
|
687
665
|
console.error(`Unsupported provider: ${provider}`);
|
|
688
666
|
return null;
|
|
689
667
|
}
|
|
690
668
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
669
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
670
|
return modelInstance;
|
|
693
671
|
} catch (error) {
|
|
694
672
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -697,12 +675,12 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
697
675
|
};
|
|
698
676
|
var resolveModel = async ({
|
|
699
677
|
runtimeContext,
|
|
700
|
-
defaultModel = openai
|
|
678
|
+
defaultModel = "openai/gpt-4.1",
|
|
701
679
|
projectPath
|
|
702
680
|
}) => {
|
|
703
681
|
const modelFromContext = runtimeContext.get("model");
|
|
704
682
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
683
|
+
console.info("Using model from runtime context");
|
|
706
684
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
685
|
return modelFromContext;
|
|
708
686
|
}
|
|
@@ -712,7 +690,7 @@ var resolveModel = async ({
|
|
|
712
690
|
}
|
|
713
691
|
const selectedModel = runtimeContext.get("selectedModel");
|
|
714
692
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
693
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
694
|
const version = await detectAISDKVersion(projectPath);
|
|
717
695
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
696
|
if (modelInstance) {
|
|
@@ -720,8 +698,8 @@ var resolveModel = async ({
|
|
|
720
698
|
return modelInstance;
|
|
721
699
|
}
|
|
722
700
|
}
|
|
723
|
-
console.
|
|
724
|
-
return defaultModel;
|
|
701
|
+
console.info("Using default model");
|
|
702
|
+
return typeof defaultModel === `string` ? new ModelRouterLanguageModel(defaultModel) : defaultModel;
|
|
725
703
|
};
|
|
726
704
|
|
|
727
705
|
// src/defaults.ts
|
|
@@ -890,7 +868,7 @@ You have access to an enhanced set of tools based on production coding agent pat
|
|
|
890
868
|
### Task Management
|
|
891
869
|
- **taskManager**: Create and track multi-step coding tasks with states (pending, in_progress, completed, blocked). Use this for complex projects that require systematic progress tracking.
|
|
892
870
|
|
|
893
|
-
### Code Discovery & Analysis
|
|
871
|
+
### Code Discovery & Analysis
|
|
894
872
|
- **codeAnalyzer**: Analyze codebase structure, discover definitions (functions, classes, interfaces), map dependencies, and understand architectural patterns.
|
|
895
873
|
- **smartSearch**: Intelligent search with context awareness, pattern matching, and relevance scoring.
|
|
896
874
|
|
|
@@ -1100,7 +1078,7 @@ export const mastra = new Mastra({
|
|
|
1100
1078
|
workflows: { weatherWorkflow },
|
|
1101
1079
|
agents: { weatherAgent },
|
|
1102
1080
|
storage: new LibSQLStore({
|
|
1103
|
-
// stores
|
|
1081
|
+
// stores observability, evals, ... into memory storage, if it needs to persist, change to file:../mastra.db
|
|
1104
1082
|
url: ":memory:",
|
|
1105
1083
|
}),
|
|
1106
1084
|
logger: new PinoLogger({
|
|
@@ -1756,7 +1734,7 @@ export const mastra = new Mastra({
|
|
|
1756
1734
|
error: stderr
|
|
1757
1735
|
};
|
|
1758
1736
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1737
|
+
console.error(error);
|
|
1760
1738
|
return {
|
|
1761
1739
|
success: false,
|
|
1762
1740
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1749,7 @@ export const mastra = new Mastra({
|
|
|
1771
1749
|
projectPath
|
|
1772
1750
|
}) {
|
|
1773
1751
|
try {
|
|
1774
|
-
console.
|
|
1752
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1753
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1754
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1755
|
return {
|
|
@@ -1795,7 +1773,7 @@ export const mastra = new Mastra({
|
|
|
1795
1773
|
projectPath
|
|
1796
1774
|
}) {
|
|
1797
1775
|
try {
|
|
1798
|
-
console.
|
|
1776
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1777
|
let packageNames = [];
|
|
1800
1778
|
if (packages && packages.length > 0) {
|
|
1801
1779
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3234,7 +3212,7 @@ ${config.instructions}` : "";
|
|
|
3234
3212
|
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3235
3213
|
* Overrides the base Agent generate method to provide additional project context
|
|
3236
3214
|
*/
|
|
3237
|
-
|
|
3215
|
+
generateLegacy = async (messages, generateOptions = {}) => {
|
|
3238
3216
|
const { maxSteps, ...baseOptions } = generateOptions;
|
|
3239
3217
|
const originalInstructions = await this.getInstructions({ runtimeContext: generateOptions?.runtimeContext });
|
|
3240
3218
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3257,13 +3235,13 @@ ${additionalInstructions}`;
|
|
|
3257
3235
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3258
3236
|
projectPath: this.builderConfig.projectPath
|
|
3259
3237
|
});
|
|
3260
|
-
return super.
|
|
3238
|
+
return super.generateLegacy(messages, enhancedOptions);
|
|
3261
3239
|
};
|
|
3262
3240
|
/**
|
|
3263
3241
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3264
3242
|
* Overrides the base Agent stream method to provide additional project context
|
|
3265
3243
|
*/
|
|
3266
|
-
|
|
3244
|
+
streamLegacy = async (messages, streamOptions = {}) => {
|
|
3267
3245
|
const { maxSteps, ...baseOptions } = streamOptions;
|
|
3268
3246
|
const originalInstructions = await this.getInstructions({ runtimeContext: streamOptions?.runtimeContext });
|
|
3269
3247
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3286,13 +3264,13 @@ ${additionalInstructions}`;
|
|
|
3286
3264
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3287
3265
|
projectPath: this.builderConfig.projectPath
|
|
3288
3266
|
});
|
|
3289
|
-
return super.
|
|
3267
|
+
return super.streamLegacy(messages, enhancedOptions);
|
|
3290
3268
|
};
|
|
3291
3269
|
/**
|
|
3292
3270
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3293
3271
|
* Overrides the base Agent stream method to provide additional project context
|
|
3294
3272
|
*/
|
|
3295
|
-
async
|
|
3273
|
+
async stream(messages, streamOptions) {
|
|
3296
3274
|
const { ...baseOptions } = streamOptions || {};
|
|
3297
3275
|
const originalInstructions = await this.getInstructions({ runtimeContext: streamOptions?.runtimeContext });
|
|
3298
3276
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3314,9 +3292,9 @@ ${additionalInstructions}`;
|
|
|
3314
3292
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3315
3293
|
projectPath: this.builderConfig.projectPath
|
|
3316
3294
|
});
|
|
3317
|
-
return super.
|
|
3295
|
+
return super.stream(messages, enhancedOptions);
|
|
3318
3296
|
}
|
|
3319
|
-
async
|
|
3297
|
+
async generate(messages, options) {
|
|
3320
3298
|
const { ...baseOptions } = options || {};
|
|
3321
3299
|
const originalInstructions = await this.getInstructions({ runtimeContext: options?.runtimeContext });
|
|
3322
3300
|
const additionalInstructions = baseOptions.instructions;
|
|
@@ -3338,7 +3316,7 @@ ${additionalInstructions}`;
|
|
|
3338
3316
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3339
3317
|
projectPath: this.builderConfig.projectPath
|
|
3340
3318
|
});
|
|
3341
|
-
return super.
|
|
3319
|
+
return super.generate(messages, enhancedOptions);
|
|
3342
3320
|
}
|
|
3343
3321
|
};
|
|
3344
3322
|
var cloneTemplateStep = createStep({
|
|
@@ -3388,13 +3366,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3366
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3367
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3368
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3369
|
+
console.info("Analyzing template package.json...");
|
|
3392
3370
|
const { templateDir } = inputData;
|
|
3393
3371
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3372
|
try {
|
|
3395
3373
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3374
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3375
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3376
|
return {
|
|
3399
3377
|
dependencies: packageJson.dependencies || {},
|
|
3400
3378
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3430,7 +3408,7 @@ var discoverUnitsStep = createStep({
|
|
|
3430
3408
|
const { templateDir } = inputData;
|
|
3431
3409
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3432
3410
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3411
|
+
console.info("targetPath", targetPath);
|
|
3434
3412
|
const model = await resolveModel({ runtimeContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3413
|
try {
|
|
3436
3414
|
const agent = new Agent({
|
|
@@ -3491,10 +3469,12 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3491
3469
|
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
3492
3470
|
other: z.array(z.object({ name: z.string(), file: z.string() })).optional()
|
|
3493
3471
|
});
|
|
3494
|
-
const result = isV2 ? await agent
|
|
3495
|
-
|
|
3472
|
+
const result = isV2 ? await tryGenerateWithJsonFallback(agent, prompt, {
|
|
3473
|
+
structuredOutput: {
|
|
3474
|
+
schema: output
|
|
3475
|
+
},
|
|
3496
3476
|
maxSteps: 100
|
|
3497
|
-
}) : await agent.
|
|
3477
|
+
}) : await agent.generateLegacy(prompt, {
|
|
3498
3478
|
experimental_output: output,
|
|
3499
3479
|
maxSteps: 100
|
|
3500
3480
|
});
|
|
@@ -3518,7 +3498,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3498
|
template.other?.forEach((otherId) => {
|
|
3519
3499
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3500
|
});
|
|
3521
|
-
console.
|
|
3501
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3502
|
if (units.length === 0) {
|
|
3523
3503
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3504
|
Possible causes:
|
|
@@ -3594,7 +3574,7 @@ var packageMergeStep = createStep({
|
|
|
3594
3574
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3575
|
outputSchema: PackageMergeResultSchema,
|
|
3596
3576
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3597
|
-
console.
|
|
3577
|
+
console.info("Package merge step starting...");
|
|
3598
3578
|
const { slug, packageInfo } = inputData;
|
|
3599
3579
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3600
3580
|
try {
|
|
@@ -3671,7 +3651,7 @@ var installStep = createStep({
|
|
|
3671
3651
|
inputSchema: InstallInputSchema,
|
|
3672
3652
|
outputSchema: InstallResultSchema,
|
|
3673
3653
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3674
|
-
console.
|
|
3654
|
+
console.info("Running install step...");
|
|
3675
3655
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3676
3656
|
try {
|
|
3677
3657
|
await spawnSWPM(targetPath, "install", []);
|
|
@@ -3699,7 +3679,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3699
3679
|
inputSchema: FileCopyInputSchema,
|
|
3700
3680
|
outputSchema: FileCopyResultSchema,
|
|
3701
3681
|
execute: async ({ inputData, runtimeContext }) => {
|
|
3702
|
-
console.
|
|
3682
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3683
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
3684
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
3705
3685
|
try {
|
|
@@ -3742,7 +3722,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3722
|
}
|
|
3743
3723
|
};
|
|
3744
3724
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3725
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3726
|
let sourceFile;
|
|
3747
3727
|
let resolvedUnitFile;
|
|
3748
3728
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3753,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3753
|
}
|
|
3774
3754
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3755
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3756
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3757
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3758
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3759
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3761,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3761
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3762
|
if (existsSync(targetFile)) {
|
|
3783
3763
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3764
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3765
|
switch (strategy) {
|
|
3786
3766
|
case "skip":
|
|
3787
3767
|
conflicts.push({
|
|
@@ -3790,7 +3770,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3770
|
sourceFile: unit.file,
|
|
3791
3771
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3772
|
});
|
|
3793
|
-
console.
|
|
3773
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3774
|
continue;
|
|
3795
3775
|
case "backup-and-replace":
|
|
3796
3776
|
try {
|
|
@@ -3800,7 +3780,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3780
|
destination: targetFile,
|
|
3801
3781
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3782
|
});
|
|
3803
|
-
console.
|
|
3783
|
+
console.info(
|
|
3804
3784
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3785
|
);
|
|
3806
3786
|
continue;
|
|
@@ -3821,7 +3801,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3801
|
destination: uniqueTargetFile,
|
|
3822
3802
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3803
|
});
|
|
3824
|
-
console.
|
|
3804
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3805
|
continue;
|
|
3826
3806
|
} catch (renameError) {
|
|
3827
3807
|
conflicts.push({
|
|
@@ -3850,7 +3830,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3830
|
destination: targetFile,
|
|
3851
3831
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3832
|
});
|
|
3853
|
-
console.
|
|
3833
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3834
|
} catch (copyError) {
|
|
3855
3835
|
conflicts.push({
|
|
3856
3836
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3851,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3851
|
destination: targetTsconfig,
|
|
3872
3852
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3853
|
});
|
|
3874
|
-
console.
|
|
3854
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3855
|
} else {
|
|
3876
3856
|
const minimalTsconfig = {
|
|
3877
3857
|
compilerOptions: {
|
|
@@ -3893,7 +3873,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3873
|
destination: targetTsconfig,
|
|
3894
3874
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3875
|
});
|
|
3896
|
-
console.
|
|
3876
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3877
|
}
|
|
3898
3878
|
}
|
|
3899
3879
|
} catch (e) {
|
|
@@ -3918,7 +3898,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3898
|
destination: targetMastraIndex,
|
|
3919
3899
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3900
|
});
|
|
3921
|
-
console.
|
|
3901
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3902
|
}
|
|
3923
3903
|
}
|
|
3924
3904
|
} catch (e) {
|
|
@@ -3942,7 +3922,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3922
|
destination: targetGitignore,
|
|
3943
3923
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3924
|
});
|
|
3945
|
-
console.
|
|
3925
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3926
|
} else {
|
|
3947
3927
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3928
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3935,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3935
|
destination: targetGitignore,
|
|
3956
3936
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3937
|
});
|
|
3958
|
-
console.
|
|
3938
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3939
|
} else {
|
|
3960
|
-
console.
|
|
3940
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3941
|
}
|
|
3962
3942
|
}
|
|
3963
3943
|
}
|
|
@@ -3985,7 +3965,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3965
|
destination: targetEnv,
|
|
3986
3966
|
unit: { kind: "other", id: "env" }
|
|
3987
3967
|
});
|
|
3988
|
-
console.
|
|
3968
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3969
|
} else {
|
|
3990
3970
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3971
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +3977,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
3977
|
destination: targetEnv,
|
|
3998
3978
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
3979
|
});
|
|
4000
|
-
console.
|
|
3980
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
3981
|
} else {
|
|
4002
|
-
console.
|
|
3982
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
3983
|
}
|
|
4004
3984
|
}
|
|
4005
3985
|
}
|
|
@@ -4020,13 +4000,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4000
|
fileList,
|
|
4021
4001
|
{ skipIfNoStaged: true }
|
|
4022
4002
|
);
|
|
4023
|
-
console.
|
|
4003
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4004
|
} catch (commitError) {
|
|
4025
4005
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4006
|
}
|
|
4027
4007
|
}
|
|
4028
4008
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4009
|
+
console.info(message);
|
|
4030
4010
|
return {
|
|
4031
4011
|
success: true,
|
|
4032
4012
|
copiedFiles,
|
|
@@ -4051,7 +4031,7 @@ var intelligentMergeStep = createStep({
|
|
|
4051
4031
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4032
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
4033
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4054
|
-
console.
|
|
4034
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4035
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
4036
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4057
4037
|
try {
|
|
@@ -4178,8 +4158,8 @@ Template information:
|
|
|
4178
4158
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4159
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4160
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4161
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4162
|
+
console.info(
|
|
4183
4163
|
"Registrable components:",
|
|
4184
4164
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4165
|
);
|
|
@@ -4193,7 +4173,7 @@ Template information:
|
|
|
4193
4173
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4174
|
});
|
|
4195
4175
|
}
|
|
4196
|
-
console.
|
|
4176
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4177
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4178
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4179
|
const prompt = `
|
|
@@ -4240,17 +4220,17 @@ For each task:
|
|
|
4240
4220
|
Start by listing your tasks and work through them systematically!
|
|
4241
4221
|
`;
|
|
4242
4222
|
const isV2 = model.specificationVersion === "v2";
|
|
4243
|
-
const result = isV2 ? await agentBuilder.
|
|
4223
|
+
const result = isV2 ? await agentBuilder.stream(prompt) : await agentBuilder.streamLegacy(prompt);
|
|
4244
4224
|
const actualResolutions = [];
|
|
4245
4225
|
for await (const chunk of result.fullStream) {
|
|
4246
4226
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4227
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4228
|
+
console.info({
|
|
4249
4229
|
type: chunk.type,
|
|
4250
4230
|
msgId: chunkData.messageId
|
|
4251
4231
|
});
|
|
4252
4232
|
} else {
|
|
4253
|
-
console.
|
|
4233
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4234
|
if (chunk.type === "tool-result") {
|
|
4255
4235
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4236
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4244,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4244
|
content: toolResult.content || "",
|
|
4265
4245
|
notes: toolResult.notes
|
|
4266
4246
|
});
|
|
4267
|
-
console.
|
|
4247
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4248
|
}
|
|
4269
4249
|
} catch (parseError) {
|
|
4270
4250
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4319,12 +4299,12 @@ var validationAndFixStep = createStep({
|
|
|
4319
4299
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4300
|
outputSchema: ValidationFixResultSchema,
|
|
4321
4301
|
execute: async ({ inputData, runtimeContext }) => {
|
|
4322
|
-
console.
|
|
4302
|
+
console.info("Validation and fix step starting...");
|
|
4323
4303
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
4304
|
const targetPath = resolveTargetPath(inputData, runtimeContext);
|
|
4325
4305
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4306
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4307
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4308
|
return {
|
|
4329
4309
|
success: true,
|
|
4330
4310
|
applied: false,
|
|
@@ -4336,7 +4316,7 @@ var validationAndFixStep = createStep({
|
|
|
4336
4316
|
}
|
|
4337
4317
|
};
|
|
4338
4318
|
}
|
|
4339
|
-
console.
|
|
4319
|
+
console.info(
|
|
4340
4320
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4321
|
);
|
|
4342
4322
|
let currentIteration = 1;
|
|
@@ -4462,7 +4442,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4442
|
executeCommand: allTools.executeCommand
|
|
4463
4443
|
}
|
|
4464
4444
|
});
|
|
4465
|
-
console.
|
|
4445
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4446
|
let validationResults = {
|
|
4467
4447
|
valid: false,
|
|
4468
4448
|
errorsFixed: 0,
|
|
@@ -4473,7 +4453,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4453
|
// Store the actual error details
|
|
4474
4454
|
};
|
|
4475
4455
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4456
|
+
console.info(`
|
|
4477
4457
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4458
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4459
|
|
|
@@ -4482,9 +4462,11 @@ Start by running validateCode with all validation types to get a complete pictur
|
|
|
4482
4462
|
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
4483
4463
|
const isV2 = model.specificationVersion === "v2";
|
|
4484
4464
|
const output = z.object({ success: z.boolean() });
|
|
4485
|
-
const result = isV2 ? await validationAgent
|
|
4486
|
-
|
|
4487
|
-
|
|
4465
|
+
const result = isV2 ? await tryStreamWithJsonFallback(validationAgent, iterationPrompt, {
|
|
4466
|
+
structuredOutput: {
|
|
4467
|
+
schema: output
|
|
4468
|
+
}
|
|
4469
|
+
}) : await validationAgent.streamLegacy(iterationPrompt, {
|
|
4488
4470
|
experimental_output: output
|
|
4489
4471
|
});
|
|
4490
4472
|
let iterationErrors = 0;
|
|
@@ -4493,13 +4475,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4475
|
for await (const chunk of result.fullStream) {
|
|
4494
4476
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4477
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4478
|
+
console.info({
|
|
4497
4479
|
type: chunk.type,
|
|
4498
4480
|
msgId: chunkData.messageId,
|
|
4499
4481
|
iteration: currentIteration
|
|
4500
4482
|
});
|
|
4501
4483
|
} else {
|
|
4502
|
-
console.
|
|
4484
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4485
|
}
|
|
4504
4486
|
if (chunk.type === "tool-result") {
|
|
4505
4487
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4490,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4490
|
lastValidationResult = toolResult;
|
|
4509
4491
|
if (toolResult?.summary) {
|
|
4510
4492
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4493
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4494
|
}
|
|
4513
4495
|
}
|
|
4514
4496
|
}
|
|
@@ -4520,12 +4502,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4502
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4503
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4504
|
}
|
|
4523
|
-
console.
|
|
4505
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4506
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4507
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4508
|
break;
|
|
4527
4509
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4510
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4511
|
break;
|
|
4530
4512
|
}
|
|
4531
4513
|
currentIteration++;
|
|
@@ -4570,7 +4552,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4552
|
} finally {
|
|
4571
4553
|
try {
|
|
4572
4554
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4555
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4556
|
} catch (cleanupError) {
|
|
4575
4557
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4558
|
}
|
|
@@ -5063,12 +5045,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5045
|
research,
|
|
5064
5046
|
userAnswers
|
|
5065
5047
|
} = inputData;
|
|
5066
|
-
console.
|
|
5048
|
+
console.info("Starting planning iteration...");
|
|
5067
5049
|
const qaKey = "workflow-builder-qa";
|
|
5068
5050
|
let storedQAPairs = runtimeContext.get(qaKey) || [];
|
|
5069
5051
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5052
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5053
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5054
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5082,10 +5062,6 @@ var planningIterationStep = createStep({
|
|
|
5082
5062
|
});
|
|
5083
5063
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5084
5064
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5065
|
try {
|
|
5090
5066
|
const model = await resolveModel({ runtimeContext });
|
|
5091
5067
|
const planningAgent = new Agent({
|
|
@@ -5117,8 +5093,10 @@ var planningIterationStep = createStep({
|
|
|
5117
5093
|
projectStructure,
|
|
5118
5094
|
research
|
|
5119
5095
|
});
|
|
5120
|
-
const result = await planningAgent.
|
|
5121
|
-
|
|
5096
|
+
const result = await planningAgent.generate(planningPrompt, {
|
|
5097
|
+
structuredOutput: {
|
|
5098
|
+
schema: PlanningAgentOutputSchema
|
|
5099
|
+
}
|
|
5122
5100
|
// maxSteps: 15,
|
|
5123
5101
|
});
|
|
5124
5102
|
const planResult = await result.object;
|
|
@@ -5133,8 +5111,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5111
|
};
|
|
5134
5112
|
}
|
|
5135
5113
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5114
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5115
|
+
console.info(planResult.questions);
|
|
5138
5116
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5117
|
question,
|
|
5140
5118
|
answer: null,
|
|
@@ -5143,7 +5121,7 @@ var planningIterationStep = createStep({
|
|
|
5143
5121
|
}));
|
|
5144
5122
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
5123
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5146
|
-
console.
|
|
5124
|
+
console.info(
|
|
5147
5125
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5126
|
);
|
|
5149
5127
|
return suspend({
|
|
@@ -5155,9 +5133,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5133
|
}
|
|
5156
5134
|
});
|
|
5157
5135
|
}
|
|
5158
|
-
console.
|
|
5136
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5159
5137
|
runtimeContext.set(qaKey, storedQAPairs);
|
|
5160
|
-
console.
|
|
5138
|
+
console.info(
|
|
5161
5139
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5140
|
);
|
|
5163
5141
|
return {
|
|
@@ -5200,7 +5178,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5178
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5179
|
const { tasks } = inputData;
|
|
5202
5180
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5181
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5182
|
const summary = `Task List for Approval:
|
|
5205
5183
|
|
|
5206
5184
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5191,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5191
|
});
|
|
5214
5192
|
}
|
|
5215
5193
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5194
|
+
console.info("Task list approved by user");
|
|
5217
5195
|
return {
|
|
5218
5196
|
approved: true,
|
|
5219
5197
|
tasks,
|
|
5220
5198
|
message: "Task list approved, ready for execution"
|
|
5221
5199
|
};
|
|
5222
5200
|
} else {
|
|
5223
|
-
console.
|
|
5201
|
+
console.info("Task list rejected by user");
|
|
5224
5202
|
return {
|
|
5225
5203
|
approved: false,
|
|
5226
5204
|
tasks,
|
|
@@ -5237,7 +5215,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5215
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5216
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5217
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5218
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5219
|
return inputData.planComplete === true;
|
|
5242
5220
|
}).map(async ({ inputData }) => {
|
|
5243
5221
|
return {
|
|
@@ -5692,12 +5670,12 @@ var workflowDiscoveryStep = createStep({
|
|
|
5692
5670
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5671
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
5672
|
execute: async ({ inputData, runtimeContext: _runtimeContext }) => {
|
|
5695
|
-
console.
|
|
5673
|
+
console.info("Starting workflow discovery...");
|
|
5696
5674
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5675
|
try {
|
|
5698
5676
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5677
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5678
|
+
console.info("No workflows directory found");
|
|
5701
5679
|
return {
|
|
5702
5680
|
success: true,
|
|
5703
5681
|
workflows: [],
|
|
@@ -5726,7 +5704,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5704
|
}
|
|
5727
5705
|
}
|
|
5728
5706
|
}
|
|
5729
|
-
console.
|
|
5707
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5708
|
return {
|
|
5731
5709
|
success: true,
|
|
5732
5710
|
workflows,
|
|
@@ -5751,7 +5729,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5751
5729
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5730
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
5731
|
execute: async ({ inputData: _inputData, runtimeContext: _runtimeContext }) => {
|
|
5754
|
-
console.
|
|
5732
|
+
console.info("Starting project discovery...");
|
|
5755
5733
|
try {
|
|
5756
5734
|
const projectPath = process.cwd();
|
|
5757
5735
|
const projectStructure = {
|
|
@@ -5772,7 +5750,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5750
|
console.warn("Failed to read package.json:", error);
|
|
5773
5751
|
}
|
|
5774
5752
|
}
|
|
5775
|
-
console.
|
|
5753
|
+
console.info("Project discovery completed");
|
|
5776
5754
|
return {
|
|
5777
5755
|
success: true,
|
|
5778
5756
|
structure: {
|
|
@@ -5813,7 +5791,7 @@ var workflowResearchStep = createStep({
|
|
|
5813
5791
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5792
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
5793
|
execute: async ({ inputData, runtimeContext }) => {
|
|
5816
|
-
console.
|
|
5794
|
+
console.info("Starting workflow research...");
|
|
5817
5795
|
try {
|
|
5818
5796
|
const model = await resolveModel({ runtimeContext });
|
|
5819
5797
|
const researchAgent = new Agent({
|
|
@@ -5827,8 +5805,10 @@ var workflowResearchStep = createStep({
|
|
|
5827
5805
|
dependencies: inputData.dependencies,
|
|
5828
5806
|
hasWorkflowsDir: inputData.structure.hasWorkflowsDir
|
|
5829
5807
|
});
|
|
5830
|
-
const result = await researchAgent.
|
|
5831
|
-
|
|
5808
|
+
const result = await researchAgent.generate(researchPrompt, {
|
|
5809
|
+
structuredOutput: {
|
|
5810
|
+
schema: WorkflowResearchResultSchema
|
|
5811
|
+
}
|
|
5832
5812
|
// stopWhen: stepCountIs(10),
|
|
5833
5813
|
});
|
|
5834
5814
|
const researchResult = await result.object;
|
|
@@ -5845,7 +5825,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5825
|
error: "Research agent failed to generate valid response"
|
|
5846
5826
|
};
|
|
5847
5827
|
}
|
|
5848
|
-
console.
|
|
5828
|
+
console.info("Research completed successfully");
|
|
5849
5829
|
return {
|
|
5850
5830
|
success: true,
|
|
5851
5831
|
documentation: {
|
|
@@ -5891,12 +5871,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5871
|
research,
|
|
5892
5872
|
projectPath
|
|
5893
5873
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5874
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5875
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5876
|
try {
|
|
5897
5877
|
const model = await resolveModel({ runtimeContext });
|
|
5898
5878
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5879
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5880
|
const taskManagerContext = {
|
|
5901
5881
|
action: "create",
|
|
5902
5882
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5889,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5889
|
}))
|
|
5910
5890
|
};
|
|
5911
5891
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5892
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5893
|
if (!taskManagerResult.success) {
|
|
5914
5894
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5895
|
}
|
|
@@ -5940,17 +5920,10 @@ ${workflowBuilderPrompts.validation.instructions}`
|
|
|
5940
5920
|
resumeData
|
|
5941
5921
|
});
|
|
5942
5922
|
const originalInstructions = await executionAgent.getInstructions({ runtimeContext });
|
|
5943
|
-
const additionalInstructions = executionAgent.instructions;
|
|
5944
|
-
let enhancedInstructions = originalInstructions;
|
|
5945
|
-
if (additionalInstructions) {
|
|
5946
|
-
enhancedInstructions = `${originalInstructions}
|
|
5947
|
-
|
|
5948
|
-
${additionalInstructions}`;
|
|
5949
|
-
}
|
|
5950
5923
|
const enhancedOptions = {
|
|
5951
5924
|
stopWhen: stepCountIs(100),
|
|
5952
5925
|
temperature: 0.3,
|
|
5953
|
-
instructions:
|
|
5926
|
+
instructions: originalInstructions
|
|
5954
5927
|
};
|
|
5955
5928
|
let finalResult = null;
|
|
5956
5929
|
let allTasksCompleted = false;
|
|
@@ -5962,13 +5935,13 @@ ${additionalInstructions}`;
|
|
|
5962
5935
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5936
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5937
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5938
|
+
console.info(`
|
|
5966
5939
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5940
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5941
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5942
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5943
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5944
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5945
|
break;
|
|
5973
5946
|
}
|
|
5974
5947
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5979,7 +5952,7 @@ ${additionalInstructions}`;
|
|
|
5979
5952
|
})}
|
|
5980
5953
|
|
|
5981
5954
|
${workflowBuilderPrompts.validation.instructions}`;
|
|
5982
|
-
const stream = await executionAgent.
|
|
5955
|
+
const stream = await executionAgent.stream(iterationPrompt, {
|
|
5983
5956
|
structuredOutput: {
|
|
5984
5957
|
schema: TaskExecutionIterationInputSchema(tasks.length),
|
|
5985
5958
|
model
|
|
@@ -5992,19 +5965,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5965
|
finalMessage += chunk.payload.text;
|
|
5993
5966
|
}
|
|
5994
5967
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5968
|
+
console.info(finalMessage);
|
|
5996
5969
|
finalMessage = "";
|
|
5997
5970
|
}
|
|
5998
5971
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
5972
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
5973
|
}
|
|
6001
5974
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
5975
|
+
console.info(chunk);
|
|
6003
5976
|
}
|
|
6004
5977
|
}
|
|
6005
5978
|
await stream.consumeStream();
|
|
6006
5979
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
5980
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
5981
|
if (!finalResult) {
|
|
6009
5982
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
5983
|
}
|
|
@@ -6012,17 +5985,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
5985
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
5986
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
5987
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
5988
|
+
console.info(
|
|
6016
5989
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
5990
|
);
|
|
6018
5991
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
5992
|
+
console.info(
|
|
6020
5993
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
5994
|
);
|
|
6022
5995
|
break;
|
|
6023
5996
|
}
|
|
6024
5997
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
5998
|
+
console.info(
|
|
6026
5999
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6000
|
);
|
|
6028
6001
|
}
|
|
@@ -6035,8 +6008,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6008
|
throw new Error("No result received from agent execution");
|
|
6036
6009
|
}
|
|
6037
6010
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6011
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6012
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6013
|
return suspend({
|
|
6041
6014
|
questions: finalResult.questions,
|
|
6042
6015
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6025,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6025
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6026
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6027
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6028
|
+
console.info(message);
|
|
6056
6029
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6030
|
const validationErrors = [];
|
|
6058
6031
|
if (finalResult.error) {
|
|
@@ -6120,7 +6093,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6093
|
userAnswers: void 0
|
|
6121
6094
|
};
|
|
6122
6095
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6096
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6097
|
return inputData.approved === true;
|
|
6125
6098
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6099
|
const initData = getInitData();
|