@mastra/agent-builder 0.0.0-iterate-traces-ui-again-20250912091900 → 0.0.0-main-test-20251105183450
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +318 -4
- package/README.md +0 -4
- package/dist/agent/index.d.ts +5 -7
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/defaults.d.ts +87 -1685
- package/dist/defaults.d.ts.map +1 -1
- package/dist/index.js +242 -264
- package/dist/index.js.map +1 -1
- package/dist/processors/tool-summary.d.ts.map +1 -1
- package/dist/types.d.ts +2 -2
- package/dist/utils.d.ts +5 -5
- package/dist/utils.d.ts.map +1 -1
- package/dist/workflows/shared/schema.d.ts +2 -2
- package/dist/workflows/task-planning/schema.d.ts +10 -10
- package/dist/workflows/task-planning/task-planning.d.ts +31 -19
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -1
- package/dist/workflows/template-builder/template-builder.d.ts +119 -21
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/schema.d.ts +16 -16
- package/dist/workflows/workflow-builder/tools.d.ts +3 -179
- package/dist/workflows/workflow-builder/tools.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +84 -52
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-map.d.ts +2 -3767
- package/dist/workflows/workflow-map.d.ts.map +1 -1
- package/package.json +10 -10
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Agent } from '@mastra/core/agent';
|
|
1
|
+
import { Agent, tryGenerateWithJsonFallback, tryStreamWithJsonFallback } from '@mastra/core/agent';
|
|
2
2
|
import { Memory } from '@mastra/memory';
|
|
3
3
|
import { TokenLimiter } from '@mastra/memory/processors';
|
|
4
4
|
import { exec as exec$1, execFile as execFile$1, spawn as spawn$1 } from 'child_process';
|
|
@@ -10,7 +10,7 @@ import { z } from 'zod';
|
|
|
10
10
|
import { existsSync, readFileSync } from 'fs';
|
|
11
11
|
import { createRequire } from 'module';
|
|
12
12
|
import { promisify } from 'util';
|
|
13
|
-
import {
|
|
13
|
+
import { ModelRouterLanguageModel } from '@mastra/core/llm';
|
|
14
14
|
import { MemoryProcessor } from '@mastra/core/memory';
|
|
15
15
|
import { tmpdir } from 'os';
|
|
16
16
|
import { openai } from '@ai-sdk/openai';
|
|
@@ -224,7 +224,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
224
|
if (currentDir === cwd) {
|
|
225
225
|
continue;
|
|
226
226
|
}
|
|
227
|
-
console.
|
|
227
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
228
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
229
|
return true;
|
|
230
230
|
}
|
|
@@ -244,7 +244,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
244
|
}
|
|
245
245
|
return false;
|
|
246
246
|
} catch (error) {
|
|
247
|
-
console.
|
|
247
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
248
|
return false;
|
|
249
249
|
}
|
|
250
250
|
}
|
|
@@ -315,12 +315,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
315
|
}
|
|
316
316
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
317
|
try {
|
|
318
|
-
console.
|
|
318
|
+
console.info("Running install command with swpm");
|
|
319
319
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
320
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
321
|
return;
|
|
322
322
|
} catch (e) {
|
|
323
|
-
console.
|
|
323
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
324
|
}
|
|
325
325
|
try {
|
|
326
326
|
let packageManager;
|
|
@@ -348,11 +348,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
348
|
}
|
|
349
349
|
}
|
|
350
350
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
351
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
352
|
await spawn(packageManager, args, { cwd });
|
|
353
353
|
return;
|
|
354
354
|
} catch (e) {
|
|
355
|
-
console.
|
|
355
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
356
|
}
|
|
357
357
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
358
|
}
|
|
@@ -383,10 +383,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
383
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
384
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
385
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
386
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
387
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
388
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
389
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
390
|
} catch (gitError) {
|
|
391
391
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
392
|
}
|
|
@@ -458,18 +458,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
458
|
try {
|
|
459
459
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
460
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
461
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
462
|
} catch (error) {
|
|
463
463
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
464
|
if (errorStr.includes("already exists")) {
|
|
465
465
|
try {
|
|
466
466
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
467
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
468
|
} catch {
|
|
469
469
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
470
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
471
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
472
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
473
|
}
|
|
474
474
|
} else {
|
|
475
475
|
throw error;
|
|
@@ -479,9 +479,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
479
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
480
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
481
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
482
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
483
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
484
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
485
|
}
|
|
486
486
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
487
|
let counter = 1;
|
|
@@ -495,17 +495,17 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
495
|
counter++;
|
|
496
496
|
}
|
|
497
497
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
498
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
499
|
return uniqueTargetFile;
|
|
500
500
|
}
|
|
501
501
|
var isValidMastraLanguageModel = (model) => {
|
|
502
502
|
return model && typeof model === "object" && typeof model.modelId === "string";
|
|
503
503
|
};
|
|
504
|
-
var resolveTargetPath = (inputData,
|
|
504
|
+
var resolveTargetPath = (inputData, requestContext) => {
|
|
505
505
|
if (inputData.targetPath) {
|
|
506
506
|
return inputData.targetPath;
|
|
507
507
|
}
|
|
508
|
-
const contextPath =
|
|
508
|
+
const contextPath = requestContext.get("targetPath");
|
|
509
509
|
if (contextPath) {
|
|
510
510
|
return contextPath;
|
|
511
511
|
}
|
|
@@ -544,7 +544,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
544
|
if (!hasConflict) {
|
|
545
545
|
newEntries.push(trimmed);
|
|
546
546
|
} else {
|
|
547
|
-
console.
|
|
547
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
548
|
}
|
|
549
549
|
}
|
|
550
550
|
}
|
|
@@ -579,7 +579,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
579
|
if (!existingVars.has(key)) {
|
|
580
580
|
newVars.push({ key, value });
|
|
581
581
|
} else {
|
|
582
|
-
console.
|
|
582
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
if (newVars.length === 0) {
|
|
@@ -600,7 +600,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
600
|
try {
|
|
601
601
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
602
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
603
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
604
|
return "v2";
|
|
605
605
|
}
|
|
606
606
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +618,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
618
|
if (versionMatch) {
|
|
619
619
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
620
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
621
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
622
|
return "v2";
|
|
623
623
|
} else {
|
|
624
|
-
console.
|
|
624
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
625
|
return "v1";
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
}
|
|
629
629
|
}
|
|
630
|
-
console.
|
|
630
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
631
|
return "v2";
|
|
632
632
|
} catch (error) {
|
|
633
633
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -658,37 +658,15 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
658
658
|
const { google } = await import('@ai-sdk/google');
|
|
659
659
|
return google(modelId);
|
|
660
660
|
}
|
|
661
|
-
},
|
|
662
|
-
v2: {
|
|
663
|
-
openai: async () => {
|
|
664
|
-
const { openai: openai2 } = await import('@ai-sdk/openai-v5');
|
|
665
|
-
return openai2(modelId);
|
|
666
|
-
},
|
|
667
|
-
anthropic: async () => {
|
|
668
|
-
const { anthropic } = await import('@ai-sdk/anthropic-v5');
|
|
669
|
-
return anthropic(modelId);
|
|
670
|
-
},
|
|
671
|
-
groq: async () => {
|
|
672
|
-
const { groq } = await import('@ai-sdk/groq-v5');
|
|
673
|
-
return groq(modelId);
|
|
674
|
-
},
|
|
675
|
-
xai: async () => {
|
|
676
|
-
const { xai } = await import('@ai-sdk/xai-v5');
|
|
677
|
-
return xai(modelId);
|
|
678
|
-
},
|
|
679
|
-
google: async () => {
|
|
680
|
-
const { google } = await import('@ai-sdk/google-v5');
|
|
681
|
-
return google(modelId);
|
|
682
|
-
}
|
|
683
661
|
}
|
|
684
662
|
};
|
|
685
|
-
const providerFn = providerMap[version][provider];
|
|
663
|
+
const providerFn = version === `v1` ? providerMap[version][provider] : () => new ModelRouterLanguageModel(`${provider}/${modelId}`);
|
|
686
664
|
if (!providerFn) {
|
|
687
665
|
console.error(`Unsupported provider: ${provider}`);
|
|
688
666
|
return null;
|
|
689
667
|
}
|
|
690
668
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
669
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
670
|
return modelInstance;
|
|
693
671
|
} catch (error) {
|
|
694
672
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -696,13 +674,13 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
696
674
|
}
|
|
697
675
|
};
|
|
698
676
|
var resolveModel = async ({
|
|
699
|
-
|
|
700
|
-
defaultModel = openai
|
|
677
|
+
requestContext,
|
|
678
|
+
defaultModel = "openai/gpt-4.1",
|
|
701
679
|
projectPath
|
|
702
680
|
}) => {
|
|
703
|
-
const modelFromContext =
|
|
681
|
+
const modelFromContext = requestContext.get("model");
|
|
704
682
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
683
|
+
console.info("Using model from request context");
|
|
706
684
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
685
|
return modelFromContext;
|
|
708
686
|
}
|
|
@@ -710,18 +688,18 @@ var resolveModel = async ({
|
|
|
710
688
|
'Invalid model provided. Model must be a MastraLanguageModel instance (e.g., openai("gpt-4"), anthropic("claude-3-5-sonnet"), etc.)'
|
|
711
689
|
);
|
|
712
690
|
}
|
|
713
|
-
const selectedModel =
|
|
691
|
+
const selectedModel = requestContext.get("selectedModel");
|
|
714
692
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
693
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
694
|
const version = await detectAISDKVersion(projectPath);
|
|
717
695
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
696
|
if (modelInstance) {
|
|
719
|
-
|
|
697
|
+
requestContext.set("model", modelInstance);
|
|
720
698
|
return modelInstance;
|
|
721
699
|
}
|
|
722
700
|
}
|
|
723
|
-
console.
|
|
724
|
-
return defaultModel;
|
|
701
|
+
console.info("Using default model");
|
|
702
|
+
return typeof defaultModel === `string` ? new ModelRouterLanguageModel(defaultModel) : defaultModel;
|
|
725
703
|
};
|
|
726
704
|
|
|
727
705
|
// src/defaults.ts
|
|
@@ -890,7 +868,7 @@ You have access to an enhanced set of tools based on production coding agent pat
|
|
|
890
868
|
### Task Management
|
|
891
869
|
- **taskManager**: Create and track multi-step coding tasks with states (pending, in_progress, completed, blocked). Use this for complex projects that require systematic progress tracking.
|
|
892
870
|
|
|
893
|
-
### Code Discovery & Analysis
|
|
871
|
+
### Code Discovery & Analysis
|
|
894
872
|
- **codeAnalyzer**: Analyze codebase structure, discover definitions (functions, classes, interfaces), map dependencies, and understand architectural patterns.
|
|
895
873
|
- **smartSearch**: Intelligent search with context awareness, pattern matching, and relevance scoring.
|
|
896
874
|
|
|
@@ -928,6 +906,7 @@ import { LibSQLStore } from '@mastra/libsql';
|
|
|
928
906
|
import { weatherTool } from '../tools/weather-tool';
|
|
929
907
|
|
|
930
908
|
export const weatherAgent = new Agent({
|
|
909
|
+
id: 'weather-agent',
|
|
931
910
|
name: 'Weather Agent',
|
|
932
911
|
instructions: \${instructions},
|
|
933
912
|
model: openai('gpt-4o-mini'),
|
|
@@ -962,8 +941,8 @@ export const weatherTool = createTool({
|
|
|
962
941
|
conditions: z.string(),
|
|
963
942
|
location: z.string(),
|
|
964
943
|
}),
|
|
965
|
-
execute: async (
|
|
966
|
-
return await getWeather(
|
|
944
|
+
execute: async (inputData) => {
|
|
945
|
+
return await getWeather(inputData.location);
|
|
967
946
|
},
|
|
968
947
|
});
|
|
969
948
|
\`\`\`
|
|
@@ -981,7 +960,7 @@ const fetchWeather = createStep({
|
|
|
981
960
|
city: z.string().describe('The city to get the weather for'),
|
|
982
961
|
}),
|
|
983
962
|
outputSchema: forecastSchema,
|
|
984
|
-
execute: async (
|
|
963
|
+
execute: async (inputData) => {
|
|
985
964
|
if (!inputData) {
|
|
986
965
|
throw new Error('Input data not found');
|
|
987
966
|
}
|
|
@@ -1035,7 +1014,8 @@ const planActivities = createStep({
|
|
|
1035
1014
|
outputSchema: z.object({
|
|
1036
1015
|
activities: z.string(),
|
|
1037
1016
|
}),
|
|
1038
|
-
execute: async (
|
|
1017
|
+
execute: async (inputData, context) => {
|
|
1018
|
+
const mastra = context?.mastra;
|
|
1039
1019
|
const forecast = inputData;
|
|
1040
1020
|
|
|
1041
1021
|
if (!forecast) {
|
|
@@ -1100,7 +1080,7 @@ export const mastra = new Mastra({
|
|
|
1100
1080
|
workflows: { weatherWorkflow },
|
|
1101
1081
|
agents: { weatherAgent },
|
|
1102
1082
|
storage: new LibSQLStore({
|
|
1103
|
-
// stores
|
|
1083
|
+
// stores observability, evals, ... into memory storage, if it needs to persist, change to file:../mastra.db
|
|
1104
1084
|
url: ":memory:",
|
|
1105
1085
|
}),
|
|
1106
1086
|
logger: new PinoLogger({
|
|
@@ -1144,8 +1124,8 @@ export const mastra = new Mastra({
|
|
|
1144
1124
|
}).optional(),
|
|
1145
1125
|
error: z.string().optional()
|
|
1146
1126
|
}),
|
|
1147
|
-
execute: async (
|
|
1148
|
-
return await _AgentBuilderDefaults.readFile({ ...
|
|
1127
|
+
execute: async (inputData) => {
|
|
1128
|
+
return await _AgentBuilderDefaults.readFile({ ...inputData, projectPath });
|
|
1149
1129
|
}
|
|
1150
1130
|
}),
|
|
1151
1131
|
writeFile: createTool({
|
|
@@ -1164,8 +1144,8 @@ export const mastra = new Mastra({
|
|
|
1164
1144
|
message: z.string(),
|
|
1165
1145
|
error: z.string().optional()
|
|
1166
1146
|
}),
|
|
1167
|
-
execute: async (
|
|
1168
|
-
return await _AgentBuilderDefaults.writeFile({ ...
|
|
1147
|
+
execute: async (inputData) => {
|
|
1148
|
+
return await _AgentBuilderDefaults.writeFile({ ...inputData, projectPath });
|
|
1169
1149
|
}
|
|
1170
1150
|
}),
|
|
1171
1151
|
listDirectory: createTool({
|
|
@@ -1196,8 +1176,8 @@ export const mastra = new Mastra({
|
|
|
1196
1176
|
message: z.string(),
|
|
1197
1177
|
error: z.string().optional()
|
|
1198
1178
|
}),
|
|
1199
|
-
execute: async (
|
|
1200
|
-
return await _AgentBuilderDefaults.listDirectory({ ...
|
|
1179
|
+
execute: async (inputData) => {
|
|
1180
|
+
return await _AgentBuilderDefaults.listDirectory({ ...inputData, projectPath });
|
|
1201
1181
|
}
|
|
1202
1182
|
}),
|
|
1203
1183
|
executeCommand: createTool({
|
|
@@ -1221,10 +1201,10 @@ export const mastra = new Mastra({
|
|
|
1221
1201
|
executionTime: z.number().optional(),
|
|
1222
1202
|
error: z.string().optional()
|
|
1223
1203
|
}),
|
|
1224
|
-
execute: async (
|
|
1204
|
+
execute: async (inputData) => {
|
|
1225
1205
|
return await _AgentBuilderDefaults.executeCommand({
|
|
1226
|
-
...
|
|
1227
|
-
workingDirectory:
|
|
1206
|
+
...inputData,
|
|
1207
|
+
workingDirectory: inputData.workingDirectory || projectPath
|
|
1228
1208
|
});
|
|
1229
1209
|
}
|
|
1230
1210
|
}),
|
|
@@ -1262,8 +1242,8 @@ export const mastra = new Mastra({
|
|
|
1262
1242
|
),
|
|
1263
1243
|
message: z.string()
|
|
1264
1244
|
}),
|
|
1265
|
-
execute: async (
|
|
1266
|
-
return await _AgentBuilderDefaults.manageTaskList(
|
|
1245
|
+
execute: async (inputData) => {
|
|
1246
|
+
return await _AgentBuilderDefaults.manageTaskList(inputData);
|
|
1267
1247
|
}
|
|
1268
1248
|
}),
|
|
1269
1249
|
// Advanced File Operations
|
|
@@ -1297,8 +1277,8 @@ export const mastra = new Mastra({
|
|
|
1297
1277
|
),
|
|
1298
1278
|
message: z.string()
|
|
1299
1279
|
}),
|
|
1300
|
-
execute: async (
|
|
1301
|
-
return await _AgentBuilderDefaults.performMultiEdit({ ...
|
|
1280
|
+
execute: async (inputData) => {
|
|
1281
|
+
return await _AgentBuilderDefaults.performMultiEdit({ ...inputData, projectPath });
|
|
1302
1282
|
}
|
|
1303
1283
|
}),
|
|
1304
1284
|
replaceLines: createTool({
|
|
@@ -1322,8 +1302,8 @@ export const mastra = new Mastra({
|
|
|
1322
1302
|
backup: z.string().optional(),
|
|
1323
1303
|
error: z.string().optional()
|
|
1324
1304
|
}),
|
|
1325
|
-
execute: async (
|
|
1326
|
-
return await _AgentBuilderDefaults.replaceLines({ ...
|
|
1305
|
+
execute: async (inputData) => {
|
|
1306
|
+
return await _AgentBuilderDefaults.replaceLines({ ...inputData, projectPath });
|
|
1327
1307
|
}
|
|
1328
1308
|
}),
|
|
1329
1309
|
// File diagnostics tool to help debug line replacement issues
|
|
@@ -1351,8 +1331,8 @@ export const mastra = new Mastra({
|
|
|
1351
1331
|
message: z.string(),
|
|
1352
1332
|
error: z.string().optional()
|
|
1353
1333
|
}),
|
|
1354
|
-
execute: async (
|
|
1355
|
-
return await _AgentBuilderDefaults.showFileLines({ ...
|
|
1334
|
+
execute: async (inputData) => {
|
|
1335
|
+
return await _AgentBuilderDefaults.showFileLines({ ...inputData, projectPath });
|
|
1356
1336
|
}
|
|
1357
1337
|
}),
|
|
1358
1338
|
// Enhanced Pattern Search
|
|
@@ -1395,8 +1375,8 @@ export const mastra = new Mastra({
|
|
|
1395
1375
|
patterns: z.array(z.string())
|
|
1396
1376
|
})
|
|
1397
1377
|
}),
|
|
1398
|
-
execute: async (
|
|
1399
|
-
return await _AgentBuilderDefaults.performSmartSearch(
|
|
1378
|
+
execute: async (inputData) => {
|
|
1379
|
+
return await _AgentBuilderDefaults.performSmartSearch(inputData, projectPath);
|
|
1400
1380
|
}
|
|
1401
1381
|
}),
|
|
1402
1382
|
validateCode: createTool({
|
|
@@ -1429,8 +1409,8 @@ export const mastra = new Mastra({
|
|
|
1429
1409
|
validationsFailed: z.array(z.string())
|
|
1430
1410
|
})
|
|
1431
1411
|
}),
|
|
1432
|
-
execute: async (
|
|
1433
|
-
const { projectPath: validationProjectPath, validationType, files } =
|
|
1412
|
+
execute: async (inputData) => {
|
|
1413
|
+
const { projectPath: validationProjectPath, validationType, files } = inputData;
|
|
1434
1414
|
const targetPath = validationProjectPath || projectPath;
|
|
1435
1415
|
return await _AgentBuilderDefaults.validateCode({
|
|
1436
1416
|
projectPath: targetPath,
|
|
@@ -1469,8 +1449,8 @@ export const mastra = new Mastra({
|
|
|
1469
1449
|
suggestions: z.array(z.string()).optional(),
|
|
1470
1450
|
error: z.string().optional()
|
|
1471
1451
|
}),
|
|
1472
|
-
execute: async (
|
|
1473
|
-
return await _AgentBuilderDefaults.webSearch(
|
|
1452
|
+
execute: async (inputData) => {
|
|
1453
|
+
return await _AgentBuilderDefaults.webSearch(inputData);
|
|
1474
1454
|
}
|
|
1475
1455
|
}),
|
|
1476
1456
|
// Task Completion Signaling
|
|
@@ -1499,8 +1479,8 @@ export const mastra = new Mastra({
|
|
|
1499
1479
|
summary: z.string(),
|
|
1500
1480
|
confidence: z.number().min(0).max(100)
|
|
1501
1481
|
}),
|
|
1502
|
-
execute: async (
|
|
1503
|
-
return await _AgentBuilderDefaults.signalCompletion(
|
|
1482
|
+
execute: async (inputData) => {
|
|
1483
|
+
return await _AgentBuilderDefaults.signalCompletion(inputData);
|
|
1504
1484
|
}
|
|
1505
1485
|
}),
|
|
1506
1486
|
manageProject: createTool({
|
|
@@ -1525,8 +1505,8 @@ export const mastra = new Mastra({
|
|
|
1525
1505
|
details: z.string().optional(),
|
|
1526
1506
|
error: z.string().optional()
|
|
1527
1507
|
}),
|
|
1528
|
-
execute: async (
|
|
1529
|
-
const { action, features, packages } =
|
|
1508
|
+
execute: async (inputData) => {
|
|
1509
|
+
const { action, features, packages } = inputData;
|
|
1530
1510
|
try {
|
|
1531
1511
|
switch (action) {
|
|
1532
1512
|
case "create":
|
|
@@ -1587,8 +1567,8 @@ export const mastra = new Mastra({
|
|
|
1587
1567
|
stdout: z.array(z.string()).optional().describe("Server output lines captured during startup"),
|
|
1588
1568
|
error: z.string().optional()
|
|
1589
1569
|
}),
|
|
1590
|
-
execute: async (
|
|
1591
|
-
const { action, port } =
|
|
1570
|
+
execute: async (inputData) => {
|
|
1571
|
+
const { action, port } = inputData;
|
|
1592
1572
|
try {
|
|
1593
1573
|
switch (action) {
|
|
1594
1574
|
case "start":
|
|
@@ -1673,8 +1653,8 @@ export const mastra = new Mastra({
|
|
|
1673
1653
|
url: z.string(),
|
|
1674
1654
|
method: z.string()
|
|
1675
1655
|
}),
|
|
1676
|
-
execute: async (
|
|
1677
|
-
const { method, url, baseUrl, headers, body, timeout } =
|
|
1656
|
+
execute: async (inputData) => {
|
|
1657
|
+
const { method, url, baseUrl, headers, body, timeout } = inputData;
|
|
1678
1658
|
try {
|
|
1679
1659
|
return await _AgentBuilderDefaults.makeHttpRequest({
|
|
1680
1660
|
method,
|
|
@@ -1729,7 +1709,7 @@ export const mastra = new Mastra({
|
|
|
1729
1709
|
/**
|
|
1730
1710
|
* Get tools for a specific mode
|
|
1731
1711
|
*/
|
|
1732
|
-
static async
|
|
1712
|
+
static async listToolsForMode(projectPath, mode = "code-editor") {
|
|
1733
1713
|
const allTools = await _AgentBuilderDefaults.DEFAULT_TOOLS(projectPath);
|
|
1734
1714
|
if (mode === "template") {
|
|
1735
1715
|
return _AgentBuilderDefaults.filterToolsForTemplateBuilder(allTools);
|
|
@@ -1756,7 +1736,7 @@ export const mastra = new Mastra({
|
|
|
1756
1736
|
error: stderr
|
|
1757
1737
|
};
|
|
1758
1738
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1739
|
+
console.error(error);
|
|
1760
1740
|
return {
|
|
1761
1741
|
success: false,
|
|
1762
1742
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1751,7 @@ export const mastra = new Mastra({
|
|
|
1771
1751
|
projectPath
|
|
1772
1752
|
}) {
|
|
1773
1753
|
try {
|
|
1774
|
-
console.
|
|
1754
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1755
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1756
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1757
|
return {
|
|
@@ -1795,7 +1775,7 @@ export const mastra = new Mastra({
|
|
|
1795
1775
|
projectPath
|
|
1796
1776
|
}) {
|
|
1797
1777
|
try {
|
|
1798
|
-
console.
|
|
1778
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1779
|
let packageNames = [];
|
|
1800
1780
|
if (packages && packages.length > 0) {
|
|
1801
1781
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3108,7 +3088,8 @@ var ToolSummaryProcessor = class extends MemoryProcessor {
|
|
|
3108
3088
|
constructor({ summaryModel }) {
|
|
3109
3089
|
super({ name: "ToolSummaryProcessor" });
|
|
3110
3090
|
this.summaryAgent = new Agent({
|
|
3111
|
-
|
|
3091
|
+
id: "tool-summary-agent",
|
|
3092
|
+
name: "Tool Summary Agent",
|
|
3112
3093
|
description: "A summary agent that summarizes tool calls and results",
|
|
3113
3094
|
instructions: "You are a summary agent that summarizes tool calls and results",
|
|
3114
3095
|
model: summaryModel
|
|
@@ -3212,7 +3193,7 @@ ${config.instructions}` : "";
|
|
|
3212
3193
|
model: config.model,
|
|
3213
3194
|
tools: async () => {
|
|
3214
3195
|
return {
|
|
3215
|
-
...await AgentBuilderDefaults.
|
|
3196
|
+
...await AgentBuilderDefaults.listToolsForMode(config.projectPath, config.mode),
|
|
3216
3197
|
...config.tools || {}
|
|
3217
3198
|
};
|
|
3218
3199
|
},
|
|
@@ -3234,9 +3215,9 @@ ${config.instructions}` : "";
|
|
|
3234
3215
|
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3235
3216
|
* Overrides the base Agent generate method to provide additional project context
|
|
3236
3217
|
*/
|
|
3237
|
-
|
|
3218
|
+
generateLegacy = async (messages, generateOptions = {}) => {
|
|
3238
3219
|
const { maxSteps, ...baseOptions } = generateOptions;
|
|
3239
|
-
const originalInstructions = await this.getInstructions({
|
|
3220
|
+
const originalInstructions = await this.getInstructions({ requestContext: generateOptions?.requestContext });
|
|
3240
3221
|
const additionalInstructions = baseOptions.instructions;
|
|
3241
3222
|
let enhancedInstructions = originalInstructions;
|
|
3242
3223
|
if (additionalInstructions) {
|
|
@@ -3257,15 +3238,15 @@ ${additionalInstructions}`;
|
|
|
3257
3238
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3258
3239
|
projectPath: this.builderConfig.projectPath
|
|
3259
3240
|
});
|
|
3260
|
-
return super.
|
|
3241
|
+
return super.generateLegacy(messages, enhancedOptions);
|
|
3261
3242
|
};
|
|
3262
3243
|
/**
|
|
3263
3244
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3264
3245
|
* Overrides the base Agent stream method to provide additional project context
|
|
3265
3246
|
*/
|
|
3266
|
-
|
|
3247
|
+
streamLegacy = async (messages, streamOptions = {}) => {
|
|
3267
3248
|
const { maxSteps, ...baseOptions } = streamOptions;
|
|
3268
|
-
const originalInstructions = await this.getInstructions({
|
|
3249
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3269
3250
|
const additionalInstructions = baseOptions.instructions;
|
|
3270
3251
|
let enhancedInstructions = originalInstructions;
|
|
3271
3252
|
if (additionalInstructions) {
|
|
@@ -3286,15 +3267,15 @@ ${additionalInstructions}`;
|
|
|
3286
3267
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3287
3268
|
projectPath: this.builderConfig.projectPath
|
|
3288
3269
|
});
|
|
3289
|
-
return super.
|
|
3270
|
+
return super.streamLegacy(messages, enhancedOptions);
|
|
3290
3271
|
};
|
|
3291
3272
|
/**
|
|
3292
3273
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3293
3274
|
* Overrides the base Agent stream method to provide additional project context
|
|
3294
3275
|
*/
|
|
3295
|
-
async
|
|
3276
|
+
async stream(messages, streamOptions) {
|
|
3296
3277
|
const { ...baseOptions } = streamOptions || {};
|
|
3297
|
-
const originalInstructions = await this.getInstructions({
|
|
3278
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3298
3279
|
const additionalInstructions = baseOptions.instructions;
|
|
3299
3280
|
let enhancedInstructions = originalInstructions;
|
|
3300
3281
|
if (additionalInstructions) {
|
|
@@ -3314,11 +3295,11 @@ ${additionalInstructions}`;
|
|
|
3314
3295
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3315
3296
|
projectPath: this.builderConfig.projectPath
|
|
3316
3297
|
});
|
|
3317
|
-
return super.
|
|
3298
|
+
return super.stream(messages, enhancedOptions);
|
|
3318
3299
|
}
|
|
3319
|
-
async
|
|
3300
|
+
async generate(messages, options) {
|
|
3320
3301
|
const { ...baseOptions } = options || {};
|
|
3321
|
-
const originalInstructions = await this.getInstructions({
|
|
3302
|
+
const originalInstructions = await this.getInstructions({ requestContext: options?.requestContext });
|
|
3322
3303
|
const additionalInstructions = baseOptions.instructions;
|
|
3323
3304
|
let enhancedInstructions = originalInstructions;
|
|
3324
3305
|
if (additionalInstructions) {
|
|
@@ -3338,7 +3319,7 @@ ${additionalInstructions}`;
|
|
|
3338
3319
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3339
3320
|
projectPath: this.builderConfig.projectPath
|
|
3340
3321
|
});
|
|
3341
|
-
return super.
|
|
3322
|
+
return super.generate(messages, enhancedOptions);
|
|
3342
3323
|
}
|
|
3343
3324
|
};
|
|
3344
3325
|
var cloneTemplateStep = createStep({
|
|
@@ -3388,13 +3369,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3369
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3370
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3371
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3372
|
+
console.info("Analyzing template package.json...");
|
|
3392
3373
|
const { templateDir } = inputData;
|
|
3393
3374
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3375
|
try {
|
|
3395
3376
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3377
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3378
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3379
|
return {
|
|
3399
3380
|
dependencies: packageJson.dependencies || {},
|
|
3400
3381
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3426,14 +3407,15 @@ var discoverUnitsStep = createStep({
|
|
|
3426
3407
|
description: "Discover template units by analyzing the templates directory structure",
|
|
3427
3408
|
inputSchema: CloneTemplateResultSchema,
|
|
3428
3409
|
outputSchema: DiscoveryResultSchema,
|
|
3429
|
-
execute: async ({ inputData,
|
|
3410
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3430
3411
|
const { templateDir } = inputData;
|
|
3431
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3412
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3432
3413
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3434
|
-
const model = await resolveModel({
|
|
3414
|
+
console.info("targetPath", targetPath);
|
|
3415
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3416
|
try {
|
|
3436
3417
|
const agent = new Agent({
|
|
3418
|
+
id: "mastra-project-discoverer",
|
|
3437
3419
|
model,
|
|
3438
3420
|
instructions: `You are an expert at analyzing Mastra projects.
|
|
3439
3421
|
|
|
@@ -3491,10 +3473,12 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3491
3473
|
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
3492
3474
|
other: z.array(z.object({ name: z.string(), file: z.string() })).optional()
|
|
3493
3475
|
});
|
|
3494
|
-
const result = isV2 ? await agent
|
|
3495
|
-
|
|
3476
|
+
const result = isV2 ? await tryGenerateWithJsonFallback(agent, prompt, {
|
|
3477
|
+
structuredOutput: {
|
|
3478
|
+
schema: output
|
|
3479
|
+
},
|
|
3496
3480
|
maxSteps: 100
|
|
3497
|
-
}) : await agent.
|
|
3481
|
+
}) : await agent.generateLegacy(prompt, {
|
|
3498
3482
|
experimental_output: output,
|
|
3499
3483
|
maxSteps: 100
|
|
3500
3484
|
});
|
|
@@ -3518,7 +3502,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3502
|
template.other?.forEach((otherId) => {
|
|
3519
3503
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3504
|
});
|
|
3521
|
-
console.
|
|
3505
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3506
|
if (units.length === 0) {
|
|
3523
3507
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3508
|
Possible causes:
|
|
@@ -3568,8 +3552,8 @@ var prepareBranchStep = createStep({
|
|
|
3568
3552
|
description: "Create or switch to integration branch before modifications",
|
|
3569
3553
|
inputSchema: PrepareBranchInputSchema,
|
|
3570
3554
|
outputSchema: PrepareBranchResultSchema,
|
|
3571
|
-
execute: async ({ inputData,
|
|
3572
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3555
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3556
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3573
3557
|
try {
|
|
3574
3558
|
const branchName = `feat/install-template-${inputData.slug}`;
|
|
3575
3559
|
await gitCheckoutBranch(branchName, targetPath);
|
|
@@ -3593,10 +3577,10 @@ var packageMergeStep = createStep({
|
|
|
3593
3577
|
description: "Merge template package.json dependencies into target project",
|
|
3594
3578
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3579
|
outputSchema: PackageMergeResultSchema,
|
|
3596
|
-
execute: async ({ inputData,
|
|
3597
|
-
console.
|
|
3580
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3581
|
+
console.info("Package merge step starting...");
|
|
3598
3582
|
const { slug, packageInfo } = inputData;
|
|
3599
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3583
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3600
3584
|
try {
|
|
3601
3585
|
const targetPkgPath = join(targetPath, "package.json");
|
|
3602
3586
|
let targetPkgRaw = "{}";
|
|
@@ -3670,9 +3654,9 @@ var installStep = createStep({
|
|
|
3670
3654
|
description: "Install packages based on merged package.json",
|
|
3671
3655
|
inputSchema: InstallInputSchema,
|
|
3672
3656
|
outputSchema: InstallResultSchema,
|
|
3673
|
-
execute: async ({ inputData,
|
|
3674
|
-
console.
|
|
3675
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3657
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3658
|
+
console.info("Running install step...");
|
|
3659
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3676
3660
|
try {
|
|
3677
3661
|
await spawnSWPM(targetPath, "install", []);
|
|
3678
3662
|
const lock = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock"].map((f) => join(targetPath, f)).find((f) => existsSync(f));
|
|
@@ -3698,10 +3682,10 @@ var programmaticFileCopyStep = createStep({
|
|
|
3698
3682
|
description: "Programmatically copy template files to target project based on ordered units",
|
|
3699
3683
|
inputSchema: FileCopyInputSchema,
|
|
3700
3684
|
outputSchema: FileCopyResultSchema,
|
|
3701
|
-
execute: async ({ inputData,
|
|
3702
|
-
console.
|
|
3685
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3686
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3687
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3688
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3705
3689
|
try {
|
|
3706
3690
|
const copiedFiles = [];
|
|
3707
3691
|
const conflicts = [];
|
|
@@ -3742,7 +3726,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3726
|
}
|
|
3743
3727
|
};
|
|
3744
3728
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3729
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3730
|
let sourceFile;
|
|
3747
3731
|
let resolvedUnitFile;
|
|
3748
3732
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3757,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3757
|
}
|
|
3774
3758
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3759
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3760
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3761
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3762
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3763
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3765,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3765
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3766
|
if (existsSync(targetFile)) {
|
|
3783
3767
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3768
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3769
|
switch (strategy) {
|
|
3786
3770
|
case "skip":
|
|
3787
3771
|
conflicts.push({
|
|
@@ -3790,7 +3774,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3774
|
sourceFile: unit.file,
|
|
3791
3775
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3776
|
});
|
|
3793
|
-
console.
|
|
3777
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3778
|
continue;
|
|
3795
3779
|
case "backup-and-replace":
|
|
3796
3780
|
try {
|
|
@@ -3800,7 +3784,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3784
|
destination: targetFile,
|
|
3801
3785
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3786
|
});
|
|
3803
|
-
console.
|
|
3787
|
+
console.info(
|
|
3804
3788
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3789
|
);
|
|
3806
3790
|
continue;
|
|
@@ -3821,7 +3805,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3805
|
destination: uniqueTargetFile,
|
|
3822
3806
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3807
|
});
|
|
3824
|
-
console.
|
|
3808
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3809
|
continue;
|
|
3826
3810
|
} catch (renameError) {
|
|
3827
3811
|
conflicts.push({
|
|
@@ -3850,7 +3834,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3834
|
destination: targetFile,
|
|
3851
3835
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3836
|
});
|
|
3853
|
-
console.
|
|
3837
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3838
|
} catch (copyError) {
|
|
3855
3839
|
conflicts.push({
|
|
3856
3840
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3855,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3855
|
destination: targetTsconfig,
|
|
3872
3856
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3857
|
});
|
|
3874
|
-
console.
|
|
3858
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3859
|
} else {
|
|
3876
3860
|
const minimalTsconfig = {
|
|
3877
3861
|
compilerOptions: {
|
|
@@ -3893,7 +3877,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3877
|
destination: targetTsconfig,
|
|
3894
3878
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3879
|
});
|
|
3896
|
-
console.
|
|
3880
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3881
|
}
|
|
3898
3882
|
}
|
|
3899
3883
|
} catch (e) {
|
|
@@ -3918,7 +3902,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3902
|
destination: targetMastraIndex,
|
|
3919
3903
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3904
|
});
|
|
3921
|
-
console.
|
|
3905
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3906
|
}
|
|
3923
3907
|
}
|
|
3924
3908
|
} catch (e) {
|
|
@@ -3942,7 +3926,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3926
|
destination: targetGitignore,
|
|
3943
3927
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3928
|
});
|
|
3945
|
-
console.
|
|
3929
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3930
|
} else {
|
|
3947
3931
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3932
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3939,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3939
|
destination: targetGitignore,
|
|
3956
3940
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3941
|
});
|
|
3958
|
-
console.
|
|
3942
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3943
|
} else {
|
|
3960
|
-
console.
|
|
3944
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3945
|
}
|
|
3962
3946
|
}
|
|
3963
3947
|
}
|
|
@@ -3985,7 +3969,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3969
|
destination: targetEnv,
|
|
3986
3970
|
unit: { kind: "other", id: "env" }
|
|
3987
3971
|
});
|
|
3988
|
-
console.
|
|
3972
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3973
|
} else {
|
|
3990
3974
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3975
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +3981,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
3981
|
destination: targetEnv,
|
|
3998
3982
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
3983
|
});
|
|
4000
|
-
console.
|
|
3984
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
3985
|
} else {
|
|
4002
|
-
console.
|
|
3986
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
3987
|
}
|
|
4004
3988
|
}
|
|
4005
3989
|
}
|
|
@@ -4020,13 +4004,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4004
|
fileList,
|
|
4021
4005
|
{ skipIfNoStaged: true }
|
|
4022
4006
|
);
|
|
4023
|
-
console.
|
|
4007
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4008
|
} catch (commitError) {
|
|
4025
4009
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4010
|
}
|
|
4027
4011
|
}
|
|
4028
4012
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4013
|
+
console.info(message);
|
|
4030
4014
|
return {
|
|
4031
4015
|
success: true,
|
|
4032
4016
|
copiedFiles,
|
|
@@ -4050,12 +4034,12 @@ var intelligentMergeStep = createStep({
|
|
|
4050
4034
|
description: "Use AgentBuilder to intelligently merge template files",
|
|
4051
4035
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4036
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
|
-
execute: async ({ inputData,
|
|
4054
|
-
console.
|
|
4037
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4038
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4039
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4040
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4057
4041
|
try {
|
|
4058
|
-
const model = await resolveModel({
|
|
4042
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4059
4043
|
const copyFileTool = createTool({
|
|
4060
4044
|
id: "copy-file",
|
|
4061
4045
|
description: "Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).",
|
|
@@ -4068,9 +4052,9 @@ var intelligentMergeStep = createStep({
|
|
|
4068
4052
|
message: z.string(),
|
|
4069
4053
|
error: z.string().optional()
|
|
4070
4054
|
}),
|
|
4071
|
-
execute: async (
|
|
4055
|
+
execute: async (input) => {
|
|
4072
4056
|
try {
|
|
4073
|
-
const { sourcePath, destinationPath } =
|
|
4057
|
+
const { sourcePath, destinationPath } = input;
|
|
4074
4058
|
const resolvedSourcePath = resolve(templateDir, sourcePath);
|
|
4075
4059
|
const resolvedDestinationPath = resolve(targetPath, destinationPath);
|
|
4076
4060
|
if (existsSync(resolvedSourcePath) && !existsSync(dirname(resolvedDestinationPath))) {
|
|
@@ -4178,8 +4162,8 @@ Template information:
|
|
|
4178
4162
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4163
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4164
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4165
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4166
|
+
console.info(
|
|
4183
4167
|
"Registrable components:",
|
|
4184
4168
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4169
|
);
|
|
@@ -4193,7 +4177,7 @@ Template information:
|
|
|
4193
4177
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4178
|
});
|
|
4195
4179
|
}
|
|
4196
|
-
console.
|
|
4180
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4181
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4182
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4183
|
const prompt = `
|
|
@@ -4240,17 +4224,17 @@ For each task:
|
|
|
4240
4224
|
Start by listing your tasks and work through them systematically!
|
|
4241
4225
|
`;
|
|
4242
4226
|
const isV2 = model.specificationVersion === "v2";
|
|
4243
|
-
const result = isV2 ? await agentBuilder.
|
|
4227
|
+
const result = isV2 ? await agentBuilder.stream(prompt) : await agentBuilder.streamLegacy(prompt);
|
|
4244
4228
|
const actualResolutions = [];
|
|
4245
4229
|
for await (const chunk of result.fullStream) {
|
|
4246
4230
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4231
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4232
|
+
console.info({
|
|
4249
4233
|
type: chunk.type,
|
|
4250
4234
|
msgId: chunkData.messageId
|
|
4251
4235
|
});
|
|
4252
4236
|
} else {
|
|
4253
|
-
console.
|
|
4237
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4238
|
if (chunk.type === "tool-result") {
|
|
4255
4239
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4240
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4248,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4248
|
content: toolResult.content || "",
|
|
4265
4249
|
notes: toolResult.notes
|
|
4266
4250
|
});
|
|
4267
|
-
console.
|
|
4251
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4252
|
}
|
|
4269
4253
|
} catch (parseError) {
|
|
4270
4254
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4318,13 +4302,13 @@ var validationAndFixStep = createStep({
|
|
|
4318
4302
|
description: "Validate the merged template code and fix any issues using a specialized agent",
|
|
4319
4303
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4304
|
outputSchema: ValidationFixResultSchema,
|
|
4321
|
-
execute: async ({ inputData,
|
|
4322
|
-
console.
|
|
4305
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4306
|
+
console.info("Validation and fix step starting...");
|
|
4323
4307
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4308
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4325
4309
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4310
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4311
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4312
|
return {
|
|
4329
4313
|
success: true,
|
|
4330
4314
|
applied: false,
|
|
@@ -4336,15 +4320,16 @@ var validationAndFixStep = createStep({
|
|
|
4336
4320
|
}
|
|
4337
4321
|
};
|
|
4338
4322
|
}
|
|
4339
|
-
console.
|
|
4323
|
+
console.info(
|
|
4340
4324
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4325
|
);
|
|
4342
4326
|
let currentIteration = 1;
|
|
4343
4327
|
try {
|
|
4344
|
-
const model = await resolveModel({
|
|
4345
|
-
const allTools = await AgentBuilderDefaults.
|
|
4328
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4329
|
+
const allTools = await AgentBuilderDefaults.listToolsForMode(targetPath, "template");
|
|
4346
4330
|
const validationAgent = new Agent({
|
|
4347
|
-
|
|
4331
|
+
id: "code-validator-fixer",
|
|
4332
|
+
name: "Code Validator Fixer",
|
|
4348
4333
|
description: "Specialized agent for validating and fixing template integration issues",
|
|
4349
4334
|
instructions: `You are a code validation and fixing specialist. Your job is to:
|
|
4350
4335
|
|
|
@@ -4462,7 +4447,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4447
|
executeCommand: allTools.executeCommand
|
|
4463
4448
|
}
|
|
4464
4449
|
});
|
|
4465
|
-
console.
|
|
4450
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4451
|
let validationResults = {
|
|
4467
4452
|
valid: false,
|
|
4468
4453
|
errorsFixed: 0,
|
|
@@ -4473,7 +4458,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4458
|
// Store the actual error details
|
|
4474
4459
|
};
|
|
4475
4460
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4461
|
+
console.info(`
|
|
4477
4462
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4463
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4464
|
|
|
@@ -4482,9 +4467,11 @@ Start by running validateCode with all validation types to get a complete pictur
|
|
|
4482
4467
|
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
4483
4468
|
const isV2 = model.specificationVersion === "v2";
|
|
4484
4469
|
const output = z.object({ success: z.boolean() });
|
|
4485
|
-
const result = isV2 ? await validationAgent
|
|
4486
|
-
|
|
4487
|
-
|
|
4470
|
+
const result = isV2 ? await tryStreamWithJsonFallback(validationAgent, iterationPrompt, {
|
|
4471
|
+
structuredOutput: {
|
|
4472
|
+
schema: output
|
|
4473
|
+
}
|
|
4474
|
+
}) : await validationAgent.streamLegacy(iterationPrompt, {
|
|
4488
4475
|
experimental_output: output
|
|
4489
4476
|
});
|
|
4490
4477
|
let iterationErrors = 0;
|
|
@@ -4493,13 +4480,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4480
|
for await (const chunk of result.fullStream) {
|
|
4494
4481
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4482
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4483
|
+
console.info({
|
|
4497
4484
|
type: chunk.type,
|
|
4498
4485
|
msgId: chunkData.messageId,
|
|
4499
4486
|
iteration: currentIteration
|
|
4500
4487
|
});
|
|
4501
4488
|
} else {
|
|
4502
|
-
console.
|
|
4489
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4490
|
}
|
|
4504
4491
|
if (chunk.type === "tool-result") {
|
|
4505
4492
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4495,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4495
|
lastValidationResult = toolResult;
|
|
4509
4496
|
if (toolResult?.summary) {
|
|
4510
4497
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4498
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4499
|
}
|
|
4513
4500
|
}
|
|
4514
4501
|
}
|
|
@@ -4520,12 +4507,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4507
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4508
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4509
|
}
|
|
4523
|
-
console.
|
|
4510
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4511
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4512
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4513
|
break;
|
|
4527
4514
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4515
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4516
|
break;
|
|
4530
4517
|
}
|
|
4531
4518
|
currentIteration++;
|
|
@@ -4570,7 +4557,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4557
|
} finally {
|
|
4571
4558
|
try {
|
|
4572
4559
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4560
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4561
|
} catch (cleanupError) {
|
|
4575
4562
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4563
|
}
|
|
@@ -4745,7 +4732,7 @@ var agentBuilderTemplateWorkflow = createWorkflow({
|
|
|
4745
4732
|
}).commit();
|
|
4746
4733
|
async function mergeTemplateBySlug(slug, targetPath) {
|
|
4747
4734
|
const template = await getMastraTemplate(slug);
|
|
4748
|
-
const run = await agentBuilderTemplateWorkflow.
|
|
4735
|
+
const run = await agentBuilderTemplateWorkflow.createRun();
|
|
4749
4736
|
return await run.start({
|
|
4750
4737
|
inputData: {
|
|
4751
4738
|
repo: template.githubUrl,
|
|
@@ -5052,7 +5039,7 @@ var planningIterationStep = createStep({
|
|
|
5052
5039
|
outputSchema: PlanningIterationResultSchema,
|
|
5053
5040
|
suspendSchema: PlanningIterationSuspendSchema,
|
|
5054
5041
|
resumeSchema: PlanningIterationResumeSchema,
|
|
5055
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5042
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5056
5043
|
const {
|
|
5057
5044
|
action,
|
|
5058
5045
|
workflowName,
|
|
@@ -5063,12 +5050,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5050
|
research,
|
|
5064
5051
|
userAnswers
|
|
5065
5052
|
} = inputData;
|
|
5066
|
-
console.
|
|
5053
|
+
console.info("Starting planning iteration...");
|
|
5067
5054
|
const qaKey = "workflow-builder-qa";
|
|
5068
|
-
let storedQAPairs =
|
|
5055
|
+
let storedQAPairs = requestContext.get(qaKey) || [];
|
|
5069
5056
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5057
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5058
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5059
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5080,14 +5065,10 @@ var planningIterationStep = createStep({
|
|
|
5080
5065
|
}
|
|
5081
5066
|
return pair;
|
|
5082
5067
|
});
|
|
5083
|
-
|
|
5068
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5084
5069
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5070
|
try {
|
|
5090
|
-
const model = await resolveModel({
|
|
5071
|
+
const model = await resolveModel({ requestContext });
|
|
5091
5072
|
const planningAgent = new Agent({
|
|
5092
5073
|
model,
|
|
5093
5074
|
instructions: taskPlanningPrompts.planningAgent.instructions({
|
|
@@ -5117,8 +5098,10 @@ var planningIterationStep = createStep({
|
|
|
5117
5098
|
projectStructure,
|
|
5118
5099
|
research
|
|
5119
5100
|
});
|
|
5120
|
-
const result = await planningAgent.
|
|
5121
|
-
|
|
5101
|
+
const result = await planningAgent.generate(planningPrompt, {
|
|
5102
|
+
structuredOutput: {
|
|
5103
|
+
schema: PlanningAgentOutputSchema
|
|
5104
|
+
}
|
|
5122
5105
|
// maxSteps: 15,
|
|
5123
5106
|
});
|
|
5124
5107
|
const planResult = await result.object;
|
|
@@ -5133,8 +5116,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5116
|
};
|
|
5134
5117
|
}
|
|
5135
5118
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5119
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5120
|
+
console.info(planResult.questions);
|
|
5138
5121
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5122
|
question,
|
|
5140
5123
|
answer: null,
|
|
@@ -5142,8 +5125,8 @@ var planningIterationStep = createStep({
|
|
|
5142
5125
|
answeredAt: null
|
|
5143
5126
|
}));
|
|
5144
5127
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
|
-
|
|
5146
|
-
console.
|
|
5128
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5129
|
+
console.info(
|
|
5147
5130
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5131
|
);
|
|
5149
5132
|
return suspend({
|
|
@@ -5155,9 +5138,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5138
|
}
|
|
5156
5139
|
});
|
|
5157
5140
|
}
|
|
5158
|
-
console.
|
|
5159
|
-
|
|
5160
|
-
console.
|
|
5141
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5142
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5143
|
+
console.info(
|
|
5161
5144
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5145
|
);
|
|
5163
5146
|
return {
|
|
@@ -5200,7 +5183,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5183
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5184
|
const { tasks } = inputData;
|
|
5202
5185
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5186
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5187
|
const summary = `Task List for Approval:
|
|
5205
5188
|
|
|
5206
5189
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5196,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5196
|
});
|
|
5214
5197
|
}
|
|
5215
5198
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5199
|
+
console.info("Task list approved by user");
|
|
5217
5200
|
return {
|
|
5218
5201
|
approved: true,
|
|
5219
5202
|
tasks,
|
|
5220
5203
|
message: "Task list approved, ready for execution"
|
|
5221
5204
|
};
|
|
5222
5205
|
} else {
|
|
5223
|
-
console.
|
|
5206
|
+
console.info("Task list rejected by user");
|
|
5224
5207
|
return {
|
|
5225
5208
|
approved: false,
|
|
5226
5209
|
tasks,
|
|
@@ -5237,7 +5220,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5220
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5221
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5222
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5223
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5224
|
return inputData.planComplete === true;
|
|
5242
5225
|
}).map(async ({ inputData }) => {
|
|
5243
5226
|
return {
|
|
@@ -5301,7 +5284,7 @@ const myStep = createStep({
|
|
|
5301
5284
|
- \`mastra\`: Access to Mastra instance (agents, tools, other workflows)
|
|
5302
5285
|
- \`getStepResult(stepInstance)\`: Get results from previous steps
|
|
5303
5286
|
- \`getInitData()\`: Access original workflow input data
|
|
5304
|
-
- \`
|
|
5287
|
+
- \`requestContext\`: Runtime dependency injection context
|
|
5305
5288
|
- \`runCount\`: Number of times this step has run (useful for retries)
|
|
5306
5289
|
|
|
5307
5290
|
### **\u{1F504} CONTROL FLOW METHODS**
|
|
@@ -5380,10 +5363,10 @@ const toolStep = createStep(myTool);
|
|
|
5380
5363
|
|
|
5381
5364
|
// Method 2: Call tool in execute function
|
|
5382
5365
|
const step = createStep({
|
|
5383
|
-
execute: async ({ inputData,
|
|
5366
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5384
5367
|
const result = await myTool.execute({
|
|
5385
5368
|
context: inputData,
|
|
5386
|
-
|
|
5369
|
+
requestContext
|
|
5387
5370
|
});
|
|
5388
5371
|
return result;
|
|
5389
5372
|
}
|
|
@@ -5475,7 +5458,7 @@ export const mastra = new Mastra({
|
|
|
5475
5458
|
**Running Workflows:**
|
|
5476
5459
|
\`\`\`typescript
|
|
5477
5460
|
// Create and start run
|
|
5478
|
-
const run = await workflow.
|
|
5461
|
+
const run = await workflow.createRun();
|
|
5479
5462
|
const result = await run.start({ inputData: {...} });
|
|
5480
5463
|
|
|
5481
5464
|
// Stream execution for real-time monitoring
|
|
@@ -5499,7 +5482,7 @@ run.watch((event) => console.log(event));
|
|
|
5499
5482
|
- Use workflows as steps: \`.then(otherWorkflow)\`
|
|
5500
5483
|
- Enable complex workflow composition
|
|
5501
5484
|
|
|
5502
|
-
**
|
|
5485
|
+
**Request Context:**
|
|
5503
5486
|
- Pass shared data across all steps
|
|
5504
5487
|
- Enable dependency injection patterns
|
|
5505
5488
|
|
|
@@ -5672,11 +5655,11 @@ var restrictedTaskManager = createTool({
|
|
|
5672
5655
|
),
|
|
5673
5656
|
message: z.string()
|
|
5674
5657
|
}),
|
|
5675
|
-
execute: async (
|
|
5658
|
+
execute: async (input) => {
|
|
5676
5659
|
const adaptedContext = {
|
|
5677
|
-
...
|
|
5678
|
-
action:
|
|
5679
|
-
tasks:
|
|
5660
|
+
...input,
|
|
5661
|
+
action: input.action,
|
|
5662
|
+
tasks: input.tasks?.map((task) => ({
|
|
5680
5663
|
...task,
|
|
5681
5664
|
priority: task.priority || "medium"
|
|
5682
5665
|
}))
|
|
@@ -5691,13 +5674,13 @@ var workflowDiscoveryStep = createStep({
|
|
|
5691
5674
|
description: "Discover existing workflows in the project",
|
|
5692
5675
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5676
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
|
-
execute: async ({ inputData,
|
|
5695
|
-
console.
|
|
5677
|
+
execute: async ({ inputData, requestContext: _requestContext }) => {
|
|
5678
|
+
console.info("Starting workflow discovery...");
|
|
5696
5679
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5680
|
try {
|
|
5698
5681
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5682
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5683
|
+
console.info("No workflows directory found");
|
|
5701
5684
|
return {
|
|
5702
5685
|
success: true,
|
|
5703
5686
|
workflows: [],
|
|
@@ -5726,7 +5709,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5709
|
}
|
|
5727
5710
|
}
|
|
5728
5711
|
}
|
|
5729
|
-
console.
|
|
5712
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5713
|
return {
|
|
5731
5714
|
success: true,
|
|
5732
5715
|
workflows,
|
|
@@ -5750,8 +5733,8 @@ var projectDiscoveryStep = createStep({
|
|
|
5750
5733
|
description: "Analyze the project structure and setup",
|
|
5751
5734
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5735
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
|
-
execute: async ({ inputData: _inputData,
|
|
5754
|
-
console.
|
|
5736
|
+
execute: async ({ inputData: _inputData, requestContext: _requestContext }) => {
|
|
5737
|
+
console.info("Starting project discovery...");
|
|
5755
5738
|
try {
|
|
5756
5739
|
const projectPath = process.cwd();
|
|
5757
5740
|
const projectStructure = {
|
|
@@ -5772,7 +5755,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5755
|
console.warn("Failed to read package.json:", error);
|
|
5773
5756
|
}
|
|
5774
5757
|
}
|
|
5775
|
-
console.
|
|
5758
|
+
console.info("Project discovery completed");
|
|
5776
5759
|
return {
|
|
5777
5760
|
success: true,
|
|
5778
5761
|
structure: {
|
|
@@ -5812,10 +5795,10 @@ var workflowResearchStep = createStep({
|
|
|
5812
5795
|
description: "Research Mastra workflows and gather relevant documentation",
|
|
5813
5796
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5797
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
|
-
execute: async ({ inputData,
|
|
5816
|
-
console.
|
|
5798
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5799
|
+
console.info("Starting workflow research...");
|
|
5817
5800
|
try {
|
|
5818
|
-
const model = await resolveModel({
|
|
5801
|
+
const model = await resolveModel({ requestContext });
|
|
5819
5802
|
const researchAgent = new Agent({
|
|
5820
5803
|
model,
|
|
5821
5804
|
instructions: workflowBuilderPrompts.researchAgent.instructions,
|
|
@@ -5827,8 +5810,10 @@ var workflowResearchStep = createStep({
|
|
|
5827
5810
|
dependencies: inputData.dependencies,
|
|
5828
5811
|
hasWorkflowsDir: inputData.structure.hasWorkflowsDir
|
|
5829
5812
|
});
|
|
5830
|
-
const result = await researchAgent.
|
|
5831
|
-
|
|
5813
|
+
const result = await researchAgent.generate(researchPrompt, {
|
|
5814
|
+
structuredOutput: {
|
|
5815
|
+
schema: WorkflowResearchResultSchema
|
|
5816
|
+
}
|
|
5832
5817
|
// stopWhen: stepCountIs(10),
|
|
5833
5818
|
});
|
|
5834
5819
|
const researchResult = await result.object;
|
|
@@ -5845,7 +5830,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5830
|
error: "Research agent failed to generate valid response"
|
|
5846
5831
|
};
|
|
5847
5832
|
}
|
|
5848
|
-
console.
|
|
5833
|
+
console.info("Research completed successfully");
|
|
5849
5834
|
return {
|
|
5850
5835
|
success: true,
|
|
5851
5836
|
documentation: {
|
|
@@ -5879,7 +5864,7 @@ var taskExecutionStep = createStep({
|
|
|
5879
5864
|
outputSchema: TaskExecutionResultSchema,
|
|
5880
5865
|
suspendSchema: TaskExecutionSuspendSchema,
|
|
5881
5866
|
resumeSchema: TaskExecutionResumeSchema,
|
|
5882
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5867
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5883
5868
|
const {
|
|
5884
5869
|
action,
|
|
5885
5870
|
workflowName,
|
|
@@ -5891,12 +5876,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5876
|
research,
|
|
5892
5877
|
projectPath
|
|
5893
5878
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5879
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5880
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5881
|
try {
|
|
5897
|
-
const model = await resolveModel({
|
|
5882
|
+
const model = await resolveModel({ requestContext });
|
|
5898
5883
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5884
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5885
|
const taskManagerContext = {
|
|
5901
5886
|
action: "create",
|
|
5902
5887
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5894,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5894
|
}))
|
|
5910
5895
|
};
|
|
5911
5896
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5897
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5898
|
if (!taskManagerResult.success) {
|
|
5914
5899
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5900
|
}
|
|
@@ -5939,18 +5924,11 @@ ${workflowBuilderPrompts.validation.instructions}`
|
|
|
5939
5924
|
tasks,
|
|
5940
5925
|
resumeData
|
|
5941
5926
|
});
|
|
5942
|
-
const originalInstructions = await executionAgent.getInstructions({
|
|
5943
|
-
const additionalInstructions = executionAgent.instructions;
|
|
5944
|
-
let enhancedInstructions = originalInstructions;
|
|
5945
|
-
if (additionalInstructions) {
|
|
5946
|
-
enhancedInstructions = `${originalInstructions}
|
|
5947
|
-
|
|
5948
|
-
${additionalInstructions}`;
|
|
5949
|
-
}
|
|
5927
|
+
const originalInstructions = await executionAgent.getInstructions({ requestContext });
|
|
5950
5928
|
const enhancedOptions = {
|
|
5951
5929
|
stopWhen: stepCountIs(100),
|
|
5952
5930
|
temperature: 0.3,
|
|
5953
|
-
instructions:
|
|
5931
|
+
instructions: originalInstructions
|
|
5954
5932
|
};
|
|
5955
5933
|
let finalResult = null;
|
|
5956
5934
|
let allTasksCompleted = false;
|
|
@@ -5962,13 +5940,13 @@ ${additionalInstructions}`;
|
|
|
5962
5940
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5941
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5942
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5943
|
+
console.info(`
|
|
5966
5944
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5945
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5946
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5947
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5948
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5949
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5950
|
break;
|
|
5973
5951
|
}
|
|
5974
5952
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5979,7 +5957,7 @@ ${additionalInstructions}`;
|
|
|
5979
5957
|
})}
|
|
5980
5958
|
|
|
5981
5959
|
${workflowBuilderPrompts.validation.instructions}`;
|
|
5982
|
-
const stream = await executionAgent.
|
|
5960
|
+
const stream = await executionAgent.stream(iterationPrompt, {
|
|
5983
5961
|
structuredOutput: {
|
|
5984
5962
|
schema: TaskExecutionIterationInputSchema(tasks.length),
|
|
5985
5963
|
model
|
|
@@ -5992,19 +5970,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5970
|
finalMessage += chunk.payload.text;
|
|
5993
5971
|
}
|
|
5994
5972
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5973
|
+
console.info(finalMessage);
|
|
5996
5974
|
finalMessage = "";
|
|
5997
5975
|
}
|
|
5998
5976
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
5977
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
5978
|
}
|
|
6001
5979
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
5980
|
+
console.info(chunk);
|
|
6003
5981
|
}
|
|
6004
5982
|
}
|
|
6005
5983
|
await stream.consumeStream();
|
|
6006
5984
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
5985
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
5986
|
if (!finalResult) {
|
|
6009
5987
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
5988
|
}
|
|
@@ -6012,17 +5990,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
5990
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
5991
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
5992
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
5993
|
+
console.info(
|
|
6016
5994
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
5995
|
);
|
|
6018
5996
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
5997
|
+
console.info(
|
|
6020
5998
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
5999
|
);
|
|
6022
6000
|
break;
|
|
6023
6001
|
}
|
|
6024
6002
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
6003
|
+
console.info(
|
|
6026
6004
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6005
|
);
|
|
6028
6006
|
}
|
|
@@ -6035,8 +6013,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6013
|
throw new Error("No result received from agent execution");
|
|
6036
6014
|
}
|
|
6037
6015
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6016
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6017
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6018
|
return suspend({
|
|
6041
6019
|
questions: finalResult.questions,
|
|
6042
6020
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6030,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6030
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6031
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6032
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6033
|
+
console.info(message);
|
|
6056
6034
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6035
|
const validationErrors = [];
|
|
6058
6036
|
if (finalResult.error) {
|
|
@@ -6120,7 +6098,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6098
|
userAnswers: void 0
|
|
6121
6099
|
};
|
|
6122
6100
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6101
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6102
|
return inputData.approved === true;
|
|
6125
6103
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6104
|
const initData = getInitData();
|