@mastra/agent-builder 0.0.0-toolOptionTypes-20250917085558 → 0.0.0-trace-timeline-update-20251121092347
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +384 -4
- package/dist/agent/index.d.ts +5 -7
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/defaults.d.ts +217 -765
- package/dist/defaults.d.ts.map +1 -1
- package/dist/index.js +245 -265
- package/dist/index.js.map +1 -1
- package/dist/processors/tool-summary.d.ts.map +1 -1
- package/dist/types.d.ts +2 -2
- package/dist/utils.d.ts +5 -5
- package/dist/utils.d.ts.map +1 -1
- package/dist/workflows/shared/schema.d.ts +2 -2
- package/dist/workflows/task-planning/schema.d.ts +16 -16
- package/dist/workflows/task-planning/task-planning.d.ts +43 -31
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -1
- package/dist/workflows/template-builder/template-builder.d.ts +119 -21
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/schema.d.ts +26 -26
- package/dist/workflows/workflow-builder/tools.d.ts +13 -83
- package/dist/workflows/workflow-builder/tools.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +112 -80
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-map.d.ts +2 -3767
- package/dist/workflows/workflow-map.d.ts.map +1 -1
- package/package.json +14 -12
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Agent } from '@mastra/core/agent';
|
|
1
|
+
import { Agent, tryGenerateWithJsonFallback, tryStreamWithJsonFallback } from '@mastra/core/agent';
|
|
2
2
|
import { Memory } from '@mastra/memory';
|
|
3
3
|
import { TokenLimiter } from '@mastra/memory/processors';
|
|
4
4
|
import { exec as exec$1, execFile as execFile$1, spawn as spawn$1 } from 'child_process';
|
|
@@ -10,7 +10,7 @@ import { z } from 'zod';
|
|
|
10
10
|
import { existsSync, readFileSync } from 'fs';
|
|
11
11
|
import { createRequire } from 'module';
|
|
12
12
|
import { promisify } from 'util';
|
|
13
|
-
import {
|
|
13
|
+
import { ModelRouterLanguageModel } from '@mastra/core/llm';
|
|
14
14
|
import { MemoryProcessor } from '@mastra/core/memory';
|
|
15
15
|
import { tmpdir } from 'os';
|
|
16
16
|
import { openai } from '@ai-sdk/openai';
|
|
@@ -224,7 +224,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
224
|
if (currentDir === cwd) {
|
|
225
225
|
continue;
|
|
226
226
|
}
|
|
227
|
-
console.
|
|
227
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
228
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
229
|
return true;
|
|
230
230
|
}
|
|
@@ -244,7 +244,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
244
|
}
|
|
245
245
|
return false;
|
|
246
246
|
} catch (error) {
|
|
247
|
-
console.
|
|
247
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
248
|
return false;
|
|
249
249
|
}
|
|
250
250
|
}
|
|
@@ -315,12 +315,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
315
|
}
|
|
316
316
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
317
|
try {
|
|
318
|
-
console.
|
|
318
|
+
console.info("Running install command with swpm");
|
|
319
319
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
320
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
321
|
return;
|
|
322
322
|
} catch (e) {
|
|
323
|
-
console.
|
|
323
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
324
|
}
|
|
325
325
|
try {
|
|
326
326
|
let packageManager;
|
|
@@ -348,11 +348,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
348
|
}
|
|
349
349
|
}
|
|
350
350
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
351
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
352
|
await spawn(packageManager, args, { cwd });
|
|
353
353
|
return;
|
|
354
354
|
} catch (e) {
|
|
355
|
-
console.
|
|
355
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
356
|
}
|
|
357
357
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
358
|
}
|
|
@@ -383,10 +383,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
383
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
384
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
385
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
386
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
387
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
388
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
389
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
390
|
} catch (gitError) {
|
|
391
391
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
392
|
}
|
|
@@ -458,18 +458,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
458
|
try {
|
|
459
459
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
460
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
461
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
462
|
} catch (error) {
|
|
463
463
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
464
|
if (errorStr.includes("already exists")) {
|
|
465
465
|
try {
|
|
466
466
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
467
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
468
|
} catch {
|
|
469
469
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
470
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
471
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
472
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
473
|
}
|
|
474
474
|
} else {
|
|
475
475
|
throw error;
|
|
@@ -479,9 +479,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
479
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
480
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
481
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
482
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
483
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
484
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
485
|
}
|
|
486
486
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
487
|
let counter = 1;
|
|
@@ -495,17 +495,17 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
495
|
counter++;
|
|
496
496
|
}
|
|
497
497
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
498
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
499
|
return uniqueTargetFile;
|
|
500
500
|
}
|
|
501
501
|
var isValidMastraLanguageModel = (model) => {
|
|
502
502
|
return model && typeof model === "object" && typeof model.modelId === "string";
|
|
503
503
|
};
|
|
504
|
-
var resolveTargetPath = (inputData,
|
|
504
|
+
var resolveTargetPath = (inputData, requestContext) => {
|
|
505
505
|
if (inputData.targetPath) {
|
|
506
506
|
return inputData.targetPath;
|
|
507
507
|
}
|
|
508
|
-
const contextPath =
|
|
508
|
+
const contextPath = requestContext.get("targetPath");
|
|
509
509
|
if (contextPath) {
|
|
510
510
|
return contextPath;
|
|
511
511
|
}
|
|
@@ -544,7 +544,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
544
|
if (!hasConflict) {
|
|
545
545
|
newEntries.push(trimmed);
|
|
546
546
|
} else {
|
|
547
|
-
console.
|
|
547
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
548
|
}
|
|
549
549
|
}
|
|
550
550
|
}
|
|
@@ -579,7 +579,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
579
|
if (!existingVars.has(key)) {
|
|
580
580
|
newVars.push({ key, value });
|
|
581
581
|
} else {
|
|
582
|
-
console.
|
|
582
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
583
|
}
|
|
584
584
|
}
|
|
585
585
|
if (newVars.length === 0) {
|
|
@@ -600,7 +600,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
600
|
try {
|
|
601
601
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
602
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
603
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
604
|
return "v2";
|
|
605
605
|
}
|
|
606
606
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +618,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
618
|
if (versionMatch) {
|
|
619
619
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
620
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
621
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
622
|
return "v2";
|
|
623
623
|
} else {
|
|
624
|
-
console.
|
|
624
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
625
|
return "v1";
|
|
626
626
|
}
|
|
627
627
|
}
|
|
628
628
|
}
|
|
629
629
|
}
|
|
630
|
-
console.
|
|
630
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
631
|
return "v2";
|
|
632
632
|
} catch (error) {
|
|
633
633
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -658,37 +658,15 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
658
658
|
const { google } = await import('@ai-sdk/google');
|
|
659
659
|
return google(modelId);
|
|
660
660
|
}
|
|
661
|
-
},
|
|
662
|
-
v2: {
|
|
663
|
-
openai: async () => {
|
|
664
|
-
const { openai: openai2 } = await import('@ai-sdk/openai-v5');
|
|
665
|
-
return openai2(modelId);
|
|
666
|
-
},
|
|
667
|
-
anthropic: async () => {
|
|
668
|
-
const { anthropic } = await import('@ai-sdk/anthropic-v5');
|
|
669
|
-
return anthropic(modelId);
|
|
670
|
-
},
|
|
671
|
-
groq: async () => {
|
|
672
|
-
const { groq } = await import('@ai-sdk/groq-v5');
|
|
673
|
-
return groq(modelId);
|
|
674
|
-
},
|
|
675
|
-
xai: async () => {
|
|
676
|
-
const { xai } = await import('@ai-sdk/xai-v5');
|
|
677
|
-
return xai(modelId);
|
|
678
|
-
},
|
|
679
|
-
google: async () => {
|
|
680
|
-
const { google } = await import('@ai-sdk/google-v5');
|
|
681
|
-
return google(modelId);
|
|
682
|
-
}
|
|
683
661
|
}
|
|
684
662
|
};
|
|
685
|
-
const providerFn = providerMap[version][provider];
|
|
663
|
+
const providerFn = version === `v1` ? providerMap[version][provider] : () => new ModelRouterLanguageModel(`${provider}/${modelId}`);
|
|
686
664
|
if (!providerFn) {
|
|
687
665
|
console.error(`Unsupported provider: ${provider}`);
|
|
688
666
|
return null;
|
|
689
667
|
}
|
|
690
668
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
669
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
670
|
return modelInstance;
|
|
693
671
|
} catch (error) {
|
|
694
672
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -696,13 +674,13 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
696
674
|
}
|
|
697
675
|
};
|
|
698
676
|
var resolveModel = async ({
|
|
699
|
-
|
|
700
|
-
defaultModel = openai
|
|
677
|
+
requestContext,
|
|
678
|
+
defaultModel = "openai/gpt-4.1",
|
|
701
679
|
projectPath
|
|
702
680
|
}) => {
|
|
703
|
-
const modelFromContext =
|
|
681
|
+
const modelFromContext = requestContext.get("model");
|
|
704
682
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
683
|
+
console.info("Using model from request context");
|
|
706
684
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
685
|
return modelFromContext;
|
|
708
686
|
}
|
|
@@ -710,18 +688,18 @@ var resolveModel = async ({
|
|
|
710
688
|
'Invalid model provided. Model must be a MastraLanguageModel instance (e.g., openai("gpt-4"), anthropic("claude-3-5-sonnet"), etc.)'
|
|
711
689
|
);
|
|
712
690
|
}
|
|
713
|
-
const selectedModel =
|
|
691
|
+
const selectedModel = requestContext.get("selectedModel");
|
|
714
692
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
693
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
694
|
const version = await detectAISDKVersion(projectPath);
|
|
717
695
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
696
|
if (modelInstance) {
|
|
719
|
-
|
|
697
|
+
requestContext.set("model", modelInstance);
|
|
720
698
|
return modelInstance;
|
|
721
699
|
}
|
|
722
700
|
}
|
|
723
|
-
console.
|
|
724
|
-
return defaultModel;
|
|
701
|
+
console.info("Using default model");
|
|
702
|
+
return typeof defaultModel === `string` ? new ModelRouterLanguageModel(defaultModel) : defaultModel;
|
|
725
703
|
};
|
|
726
704
|
|
|
727
705
|
// src/defaults.ts
|
|
@@ -890,7 +868,7 @@ You have access to an enhanced set of tools based on production coding agent pat
|
|
|
890
868
|
### Task Management
|
|
891
869
|
- **taskManager**: Create and track multi-step coding tasks with states (pending, in_progress, completed, blocked). Use this for complex projects that require systematic progress tracking.
|
|
892
870
|
|
|
893
|
-
### Code Discovery & Analysis
|
|
871
|
+
### Code Discovery & Analysis
|
|
894
872
|
- **codeAnalyzer**: Analyze codebase structure, discover definitions (functions, classes, interfaces), map dependencies, and understand architectural patterns.
|
|
895
873
|
- **smartSearch**: Intelligent search with context awareness, pattern matching, and relevance scoring.
|
|
896
874
|
|
|
@@ -928,12 +906,14 @@ import { LibSQLStore } from '@mastra/libsql';
|
|
|
928
906
|
import { weatherTool } from '../tools/weather-tool';
|
|
929
907
|
|
|
930
908
|
export const weatherAgent = new Agent({
|
|
909
|
+
id: 'weather-agent',
|
|
931
910
|
name: 'Weather Agent',
|
|
932
911
|
instructions: \${instructions},
|
|
933
912
|
model: openai('gpt-4o-mini'),
|
|
934
913
|
tools: { weatherTool },
|
|
935
914
|
memory: new Memory({
|
|
936
915
|
storage: new LibSQLStore({
|
|
916
|
+
id: 'mastra-memory-storage',
|
|
937
917
|
url: 'file:../mastra.db', // ask user what database to use, use this as the default
|
|
938
918
|
}),
|
|
939
919
|
}),
|
|
@@ -962,8 +942,8 @@ export const weatherTool = createTool({
|
|
|
962
942
|
conditions: z.string(),
|
|
963
943
|
location: z.string(),
|
|
964
944
|
}),
|
|
965
|
-
execute: async (
|
|
966
|
-
return await getWeather(
|
|
945
|
+
execute: async (inputData) => {
|
|
946
|
+
return await getWeather(inputData.location);
|
|
967
947
|
},
|
|
968
948
|
});
|
|
969
949
|
\`\`\`
|
|
@@ -981,7 +961,7 @@ const fetchWeather = createStep({
|
|
|
981
961
|
city: z.string().describe('The city to get the weather for'),
|
|
982
962
|
}),
|
|
983
963
|
outputSchema: forecastSchema,
|
|
984
|
-
execute: async (
|
|
964
|
+
execute: async (inputData) => {
|
|
985
965
|
if (!inputData) {
|
|
986
966
|
throw new Error('Input data not found');
|
|
987
967
|
}
|
|
@@ -1035,7 +1015,8 @@ const planActivities = createStep({
|
|
|
1035
1015
|
outputSchema: z.object({
|
|
1036
1016
|
activities: z.string(),
|
|
1037
1017
|
}),
|
|
1038
|
-
execute: async (
|
|
1018
|
+
execute: async (inputData, context) => {
|
|
1019
|
+
const mastra = context?.mastra;
|
|
1039
1020
|
const forecast = inputData;
|
|
1040
1021
|
|
|
1041
1022
|
if (!forecast) {
|
|
@@ -1100,7 +1081,8 @@ export const mastra = new Mastra({
|
|
|
1100
1081
|
workflows: { weatherWorkflow },
|
|
1101
1082
|
agents: { weatherAgent },
|
|
1102
1083
|
storage: new LibSQLStore({
|
|
1103
|
-
|
|
1084
|
+
id: 'mastra-storage',
|
|
1085
|
+
// stores observability, evals, ... into memory storage, if it needs to persist, change to file:../mastra.db
|
|
1104
1086
|
url: ":memory:",
|
|
1105
1087
|
}),
|
|
1106
1088
|
logger: new PinoLogger({
|
|
@@ -1144,8 +1126,8 @@ export const mastra = new Mastra({
|
|
|
1144
1126
|
}).optional(),
|
|
1145
1127
|
error: z.string().optional()
|
|
1146
1128
|
}),
|
|
1147
|
-
execute: async (
|
|
1148
|
-
return await _AgentBuilderDefaults.readFile({ ...
|
|
1129
|
+
execute: async (inputData) => {
|
|
1130
|
+
return await _AgentBuilderDefaults.readFile({ ...inputData, projectPath });
|
|
1149
1131
|
}
|
|
1150
1132
|
}),
|
|
1151
1133
|
writeFile: createTool({
|
|
@@ -1164,8 +1146,8 @@ export const mastra = new Mastra({
|
|
|
1164
1146
|
message: z.string(),
|
|
1165
1147
|
error: z.string().optional()
|
|
1166
1148
|
}),
|
|
1167
|
-
execute: async (
|
|
1168
|
-
return await _AgentBuilderDefaults.writeFile({ ...
|
|
1149
|
+
execute: async (inputData) => {
|
|
1150
|
+
return await _AgentBuilderDefaults.writeFile({ ...inputData, projectPath });
|
|
1169
1151
|
}
|
|
1170
1152
|
}),
|
|
1171
1153
|
listDirectory: createTool({
|
|
@@ -1196,8 +1178,8 @@ export const mastra = new Mastra({
|
|
|
1196
1178
|
message: z.string(),
|
|
1197
1179
|
error: z.string().optional()
|
|
1198
1180
|
}),
|
|
1199
|
-
execute: async (
|
|
1200
|
-
return await _AgentBuilderDefaults.listDirectory({ ...
|
|
1181
|
+
execute: async (inputData) => {
|
|
1182
|
+
return await _AgentBuilderDefaults.listDirectory({ ...inputData, projectPath });
|
|
1201
1183
|
}
|
|
1202
1184
|
}),
|
|
1203
1185
|
executeCommand: createTool({
|
|
@@ -1221,10 +1203,10 @@ export const mastra = new Mastra({
|
|
|
1221
1203
|
executionTime: z.number().optional(),
|
|
1222
1204
|
error: z.string().optional()
|
|
1223
1205
|
}),
|
|
1224
|
-
execute: async (
|
|
1206
|
+
execute: async (inputData) => {
|
|
1225
1207
|
return await _AgentBuilderDefaults.executeCommand({
|
|
1226
|
-
...
|
|
1227
|
-
workingDirectory:
|
|
1208
|
+
...inputData,
|
|
1209
|
+
workingDirectory: inputData.workingDirectory || projectPath
|
|
1228
1210
|
});
|
|
1229
1211
|
}
|
|
1230
1212
|
}),
|
|
@@ -1262,8 +1244,8 @@ export const mastra = new Mastra({
|
|
|
1262
1244
|
),
|
|
1263
1245
|
message: z.string()
|
|
1264
1246
|
}),
|
|
1265
|
-
execute: async (
|
|
1266
|
-
return await _AgentBuilderDefaults.manageTaskList(
|
|
1247
|
+
execute: async (inputData) => {
|
|
1248
|
+
return await _AgentBuilderDefaults.manageTaskList(inputData);
|
|
1267
1249
|
}
|
|
1268
1250
|
}),
|
|
1269
1251
|
// Advanced File Operations
|
|
@@ -1297,8 +1279,8 @@ export const mastra = new Mastra({
|
|
|
1297
1279
|
),
|
|
1298
1280
|
message: z.string()
|
|
1299
1281
|
}),
|
|
1300
|
-
execute: async (
|
|
1301
|
-
return await _AgentBuilderDefaults.performMultiEdit({ ...
|
|
1282
|
+
execute: async (inputData) => {
|
|
1283
|
+
return await _AgentBuilderDefaults.performMultiEdit({ ...inputData, projectPath });
|
|
1302
1284
|
}
|
|
1303
1285
|
}),
|
|
1304
1286
|
replaceLines: createTool({
|
|
@@ -1322,8 +1304,8 @@ export const mastra = new Mastra({
|
|
|
1322
1304
|
backup: z.string().optional(),
|
|
1323
1305
|
error: z.string().optional()
|
|
1324
1306
|
}),
|
|
1325
|
-
execute: async (
|
|
1326
|
-
return await _AgentBuilderDefaults.replaceLines({ ...
|
|
1307
|
+
execute: async (inputData) => {
|
|
1308
|
+
return await _AgentBuilderDefaults.replaceLines({ ...inputData, projectPath });
|
|
1327
1309
|
}
|
|
1328
1310
|
}),
|
|
1329
1311
|
// File diagnostics tool to help debug line replacement issues
|
|
@@ -1351,8 +1333,8 @@ export const mastra = new Mastra({
|
|
|
1351
1333
|
message: z.string(),
|
|
1352
1334
|
error: z.string().optional()
|
|
1353
1335
|
}),
|
|
1354
|
-
execute: async (
|
|
1355
|
-
return await _AgentBuilderDefaults.showFileLines({ ...
|
|
1336
|
+
execute: async (inputData) => {
|
|
1337
|
+
return await _AgentBuilderDefaults.showFileLines({ ...inputData, projectPath });
|
|
1356
1338
|
}
|
|
1357
1339
|
}),
|
|
1358
1340
|
// Enhanced Pattern Search
|
|
@@ -1395,8 +1377,8 @@ export const mastra = new Mastra({
|
|
|
1395
1377
|
patterns: z.array(z.string())
|
|
1396
1378
|
})
|
|
1397
1379
|
}),
|
|
1398
|
-
execute: async (
|
|
1399
|
-
return await _AgentBuilderDefaults.performSmartSearch(
|
|
1380
|
+
execute: async (inputData) => {
|
|
1381
|
+
return await _AgentBuilderDefaults.performSmartSearch(inputData, projectPath);
|
|
1400
1382
|
}
|
|
1401
1383
|
}),
|
|
1402
1384
|
validateCode: createTool({
|
|
@@ -1429,8 +1411,8 @@ export const mastra = new Mastra({
|
|
|
1429
1411
|
validationsFailed: z.array(z.string())
|
|
1430
1412
|
})
|
|
1431
1413
|
}),
|
|
1432
|
-
execute: async (
|
|
1433
|
-
const { projectPath: validationProjectPath, validationType, files } =
|
|
1414
|
+
execute: async (inputData) => {
|
|
1415
|
+
const { projectPath: validationProjectPath, validationType, files } = inputData;
|
|
1434
1416
|
const targetPath = validationProjectPath || projectPath;
|
|
1435
1417
|
return await _AgentBuilderDefaults.validateCode({
|
|
1436
1418
|
projectPath: targetPath,
|
|
@@ -1469,8 +1451,8 @@ export const mastra = new Mastra({
|
|
|
1469
1451
|
suggestions: z.array(z.string()).optional(),
|
|
1470
1452
|
error: z.string().optional()
|
|
1471
1453
|
}),
|
|
1472
|
-
execute: async (
|
|
1473
|
-
return await _AgentBuilderDefaults.webSearch(
|
|
1454
|
+
execute: async (inputData) => {
|
|
1455
|
+
return await _AgentBuilderDefaults.webSearch(inputData);
|
|
1474
1456
|
}
|
|
1475
1457
|
}),
|
|
1476
1458
|
// Task Completion Signaling
|
|
@@ -1499,8 +1481,8 @@ export const mastra = new Mastra({
|
|
|
1499
1481
|
summary: z.string(),
|
|
1500
1482
|
confidence: z.number().min(0).max(100)
|
|
1501
1483
|
}),
|
|
1502
|
-
execute: async (
|
|
1503
|
-
return await _AgentBuilderDefaults.signalCompletion(
|
|
1484
|
+
execute: async (inputData) => {
|
|
1485
|
+
return await _AgentBuilderDefaults.signalCompletion(inputData);
|
|
1504
1486
|
}
|
|
1505
1487
|
}),
|
|
1506
1488
|
manageProject: createTool({
|
|
@@ -1525,8 +1507,8 @@ export const mastra = new Mastra({
|
|
|
1525
1507
|
details: z.string().optional(),
|
|
1526
1508
|
error: z.string().optional()
|
|
1527
1509
|
}),
|
|
1528
|
-
execute: async (
|
|
1529
|
-
const { action, features, packages } =
|
|
1510
|
+
execute: async (inputData) => {
|
|
1511
|
+
const { action, features, packages } = inputData;
|
|
1530
1512
|
try {
|
|
1531
1513
|
switch (action) {
|
|
1532
1514
|
case "create":
|
|
@@ -1587,8 +1569,8 @@ export const mastra = new Mastra({
|
|
|
1587
1569
|
stdout: z.array(z.string()).optional().describe("Server output lines captured during startup"),
|
|
1588
1570
|
error: z.string().optional()
|
|
1589
1571
|
}),
|
|
1590
|
-
execute: async (
|
|
1591
|
-
const { action, port } =
|
|
1572
|
+
execute: async (inputData) => {
|
|
1573
|
+
const { action, port } = inputData;
|
|
1592
1574
|
try {
|
|
1593
1575
|
switch (action) {
|
|
1594
1576
|
case "start":
|
|
@@ -1673,8 +1655,8 @@ export const mastra = new Mastra({
|
|
|
1673
1655
|
url: z.string(),
|
|
1674
1656
|
method: z.string()
|
|
1675
1657
|
}),
|
|
1676
|
-
execute: async (
|
|
1677
|
-
const { method, url, baseUrl, headers, body, timeout } =
|
|
1658
|
+
execute: async (inputData) => {
|
|
1659
|
+
const { method, url, baseUrl, headers, body, timeout } = inputData;
|
|
1678
1660
|
try {
|
|
1679
1661
|
return await _AgentBuilderDefaults.makeHttpRequest({
|
|
1680
1662
|
method,
|
|
@@ -1729,7 +1711,7 @@ export const mastra = new Mastra({
|
|
|
1729
1711
|
/**
|
|
1730
1712
|
* Get tools for a specific mode
|
|
1731
1713
|
*/
|
|
1732
|
-
static async
|
|
1714
|
+
static async listToolsForMode(projectPath, mode = "code-editor") {
|
|
1733
1715
|
const allTools = await _AgentBuilderDefaults.DEFAULT_TOOLS(projectPath);
|
|
1734
1716
|
if (mode === "template") {
|
|
1735
1717
|
return _AgentBuilderDefaults.filterToolsForTemplateBuilder(allTools);
|
|
@@ -1756,7 +1738,7 @@ export const mastra = new Mastra({
|
|
|
1756
1738
|
error: stderr
|
|
1757
1739
|
};
|
|
1758
1740
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1741
|
+
console.error(error);
|
|
1760
1742
|
return {
|
|
1761
1743
|
success: false,
|
|
1762
1744
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1753,7 @@ export const mastra = new Mastra({
|
|
|
1771
1753
|
projectPath
|
|
1772
1754
|
}) {
|
|
1773
1755
|
try {
|
|
1774
|
-
console.
|
|
1756
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1757
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1758
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1759
|
return {
|
|
@@ -1795,7 +1777,7 @@ export const mastra = new Mastra({
|
|
|
1795
1777
|
projectPath
|
|
1796
1778
|
}) {
|
|
1797
1779
|
try {
|
|
1798
|
-
console.
|
|
1780
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1781
|
let packageNames = [];
|
|
1800
1782
|
if (packages && packages.length > 0) {
|
|
1801
1783
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3108,7 +3090,8 @@ var ToolSummaryProcessor = class extends MemoryProcessor {
|
|
|
3108
3090
|
constructor({ summaryModel }) {
|
|
3109
3091
|
super({ name: "ToolSummaryProcessor" });
|
|
3110
3092
|
this.summaryAgent = new Agent({
|
|
3111
|
-
|
|
3093
|
+
id: "tool-summary-agent",
|
|
3094
|
+
name: "Tool Summary Agent",
|
|
3112
3095
|
description: "A summary agent that summarizes tool calls and results",
|
|
3113
3096
|
instructions: "You are a summary agent that summarizes tool calls and results",
|
|
3114
3097
|
model: summaryModel
|
|
@@ -3212,7 +3195,7 @@ ${config.instructions}` : "";
|
|
|
3212
3195
|
model: config.model,
|
|
3213
3196
|
tools: async () => {
|
|
3214
3197
|
return {
|
|
3215
|
-
...await AgentBuilderDefaults.
|
|
3198
|
+
...await AgentBuilderDefaults.listToolsForMode(config.projectPath, config.mode),
|
|
3216
3199
|
...config.tools || {}
|
|
3217
3200
|
};
|
|
3218
3201
|
},
|
|
@@ -3234,9 +3217,9 @@ ${config.instructions}` : "";
|
|
|
3234
3217
|
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3235
3218
|
* Overrides the base Agent generate method to provide additional project context
|
|
3236
3219
|
*/
|
|
3237
|
-
|
|
3220
|
+
generateLegacy = async (messages, generateOptions = {}) => {
|
|
3238
3221
|
const { maxSteps, ...baseOptions } = generateOptions;
|
|
3239
|
-
const originalInstructions = await this.getInstructions({
|
|
3222
|
+
const originalInstructions = await this.getInstructions({ requestContext: generateOptions?.requestContext });
|
|
3240
3223
|
const additionalInstructions = baseOptions.instructions;
|
|
3241
3224
|
let enhancedInstructions = originalInstructions;
|
|
3242
3225
|
if (additionalInstructions) {
|
|
@@ -3257,15 +3240,15 @@ ${additionalInstructions}`;
|
|
|
3257
3240
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3258
3241
|
projectPath: this.builderConfig.projectPath
|
|
3259
3242
|
});
|
|
3260
|
-
return super.
|
|
3243
|
+
return super.generateLegacy(messages, enhancedOptions);
|
|
3261
3244
|
};
|
|
3262
3245
|
/**
|
|
3263
3246
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3264
3247
|
* Overrides the base Agent stream method to provide additional project context
|
|
3265
3248
|
*/
|
|
3266
|
-
|
|
3249
|
+
streamLegacy = async (messages, streamOptions = {}) => {
|
|
3267
3250
|
const { maxSteps, ...baseOptions } = streamOptions;
|
|
3268
|
-
const originalInstructions = await this.getInstructions({
|
|
3251
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3269
3252
|
const additionalInstructions = baseOptions.instructions;
|
|
3270
3253
|
let enhancedInstructions = originalInstructions;
|
|
3271
3254
|
if (additionalInstructions) {
|
|
@@ -3286,15 +3269,15 @@ ${additionalInstructions}`;
|
|
|
3286
3269
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3287
3270
|
projectPath: this.builderConfig.projectPath
|
|
3288
3271
|
});
|
|
3289
|
-
return super.
|
|
3272
|
+
return super.streamLegacy(messages, enhancedOptions);
|
|
3290
3273
|
};
|
|
3291
3274
|
/**
|
|
3292
3275
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3293
3276
|
* Overrides the base Agent stream method to provide additional project context
|
|
3294
3277
|
*/
|
|
3295
|
-
async
|
|
3278
|
+
async stream(messages, streamOptions) {
|
|
3296
3279
|
const { ...baseOptions } = streamOptions || {};
|
|
3297
|
-
const originalInstructions = await this.getInstructions({
|
|
3280
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3298
3281
|
const additionalInstructions = baseOptions.instructions;
|
|
3299
3282
|
let enhancedInstructions = originalInstructions;
|
|
3300
3283
|
if (additionalInstructions) {
|
|
@@ -3314,11 +3297,11 @@ ${additionalInstructions}`;
|
|
|
3314
3297
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3315
3298
|
projectPath: this.builderConfig.projectPath
|
|
3316
3299
|
});
|
|
3317
|
-
return super.
|
|
3300
|
+
return super.stream(messages, enhancedOptions);
|
|
3318
3301
|
}
|
|
3319
|
-
async
|
|
3302
|
+
async generate(messages, options) {
|
|
3320
3303
|
const { ...baseOptions } = options || {};
|
|
3321
|
-
const originalInstructions = await this.getInstructions({
|
|
3304
|
+
const originalInstructions = await this.getInstructions({ requestContext: options?.requestContext });
|
|
3322
3305
|
const additionalInstructions = baseOptions.instructions;
|
|
3323
3306
|
let enhancedInstructions = originalInstructions;
|
|
3324
3307
|
if (additionalInstructions) {
|
|
@@ -3338,7 +3321,7 @@ ${additionalInstructions}`;
|
|
|
3338
3321
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3339
3322
|
projectPath: this.builderConfig.projectPath
|
|
3340
3323
|
});
|
|
3341
|
-
return super.
|
|
3324
|
+
return super.generate(messages, enhancedOptions);
|
|
3342
3325
|
}
|
|
3343
3326
|
};
|
|
3344
3327
|
var cloneTemplateStep = createStep({
|
|
@@ -3388,13 +3371,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3371
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3372
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3373
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3374
|
+
console.info("Analyzing template package.json...");
|
|
3392
3375
|
const { templateDir } = inputData;
|
|
3393
3376
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3377
|
try {
|
|
3395
3378
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3379
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3380
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3381
|
return {
|
|
3399
3382
|
dependencies: packageJson.dependencies || {},
|
|
3400
3383
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3426,14 +3409,15 @@ var discoverUnitsStep = createStep({
|
|
|
3426
3409
|
description: "Discover template units by analyzing the templates directory structure",
|
|
3427
3410
|
inputSchema: CloneTemplateResultSchema,
|
|
3428
3411
|
outputSchema: DiscoveryResultSchema,
|
|
3429
|
-
execute: async ({ inputData,
|
|
3412
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3430
3413
|
const { templateDir } = inputData;
|
|
3431
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3414
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3432
3415
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3434
|
-
const model = await resolveModel({
|
|
3416
|
+
console.info("targetPath", targetPath);
|
|
3417
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3418
|
try {
|
|
3436
3419
|
const agent = new Agent({
|
|
3420
|
+
id: "mastra-project-discoverer",
|
|
3437
3421
|
model,
|
|
3438
3422
|
instructions: `You are an expert at analyzing Mastra projects.
|
|
3439
3423
|
|
|
@@ -3491,10 +3475,12 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3491
3475
|
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
3492
3476
|
other: z.array(z.object({ name: z.string(), file: z.string() })).optional()
|
|
3493
3477
|
});
|
|
3494
|
-
const result = isV2 ? await agent
|
|
3495
|
-
|
|
3478
|
+
const result = isV2 ? await tryGenerateWithJsonFallback(agent, prompt, {
|
|
3479
|
+
structuredOutput: {
|
|
3480
|
+
schema: output
|
|
3481
|
+
},
|
|
3496
3482
|
maxSteps: 100
|
|
3497
|
-
}) : await agent.
|
|
3483
|
+
}) : await agent.generateLegacy(prompt, {
|
|
3498
3484
|
experimental_output: output,
|
|
3499
3485
|
maxSteps: 100
|
|
3500
3486
|
});
|
|
@@ -3518,7 +3504,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3504
|
template.other?.forEach((otherId) => {
|
|
3519
3505
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3506
|
});
|
|
3521
|
-
console.
|
|
3507
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3508
|
if (units.length === 0) {
|
|
3523
3509
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3510
|
Possible causes:
|
|
@@ -3568,8 +3554,8 @@ var prepareBranchStep = createStep({
|
|
|
3568
3554
|
description: "Create or switch to integration branch before modifications",
|
|
3569
3555
|
inputSchema: PrepareBranchInputSchema,
|
|
3570
3556
|
outputSchema: PrepareBranchResultSchema,
|
|
3571
|
-
execute: async ({ inputData,
|
|
3572
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3557
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3558
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3573
3559
|
try {
|
|
3574
3560
|
const branchName = `feat/install-template-${inputData.slug}`;
|
|
3575
3561
|
await gitCheckoutBranch(branchName, targetPath);
|
|
@@ -3593,10 +3579,10 @@ var packageMergeStep = createStep({
|
|
|
3593
3579
|
description: "Merge template package.json dependencies into target project",
|
|
3594
3580
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3581
|
outputSchema: PackageMergeResultSchema,
|
|
3596
|
-
execute: async ({ inputData,
|
|
3597
|
-
console.
|
|
3582
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3583
|
+
console.info("Package merge step starting...");
|
|
3598
3584
|
const { slug, packageInfo } = inputData;
|
|
3599
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3585
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3600
3586
|
try {
|
|
3601
3587
|
const targetPkgPath = join(targetPath, "package.json");
|
|
3602
3588
|
let targetPkgRaw = "{}";
|
|
@@ -3670,9 +3656,9 @@ var installStep = createStep({
|
|
|
3670
3656
|
description: "Install packages based on merged package.json",
|
|
3671
3657
|
inputSchema: InstallInputSchema,
|
|
3672
3658
|
outputSchema: InstallResultSchema,
|
|
3673
|
-
execute: async ({ inputData,
|
|
3674
|
-
console.
|
|
3675
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3659
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3660
|
+
console.info("Running install step...");
|
|
3661
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3676
3662
|
try {
|
|
3677
3663
|
await spawnSWPM(targetPath, "install", []);
|
|
3678
3664
|
const lock = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock"].map((f) => join(targetPath, f)).find((f) => existsSync(f));
|
|
@@ -3698,10 +3684,10 @@ var programmaticFileCopyStep = createStep({
|
|
|
3698
3684
|
description: "Programmatically copy template files to target project based on ordered units",
|
|
3699
3685
|
inputSchema: FileCopyInputSchema,
|
|
3700
3686
|
outputSchema: FileCopyResultSchema,
|
|
3701
|
-
execute: async ({ inputData,
|
|
3702
|
-
console.
|
|
3687
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3688
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3689
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3690
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3705
3691
|
try {
|
|
3706
3692
|
const copiedFiles = [];
|
|
3707
3693
|
const conflicts = [];
|
|
@@ -3742,7 +3728,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3728
|
}
|
|
3743
3729
|
};
|
|
3744
3730
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3731
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3732
|
let sourceFile;
|
|
3747
3733
|
let resolvedUnitFile;
|
|
3748
3734
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3759,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3759
|
}
|
|
3774
3760
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3761
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3762
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3763
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3764
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3765
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3767,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3767
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3768
|
if (existsSync(targetFile)) {
|
|
3783
3769
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3770
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3771
|
switch (strategy) {
|
|
3786
3772
|
case "skip":
|
|
3787
3773
|
conflicts.push({
|
|
@@ -3790,7 +3776,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3776
|
sourceFile: unit.file,
|
|
3791
3777
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3778
|
});
|
|
3793
|
-
console.
|
|
3779
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3780
|
continue;
|
|
3795
3781
|
case "backup-and-replace":
|
|
3796
3782
|
try {
|
|
@@ -3800,7 +3786,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3786
|
destination: targetFile,
|
|
3801
3787
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3788
|
});
|
|
3803
|
-
console.
|
|
3789
|
+
console.info(
|
|
3804
3790
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3791
|
);
|
|
3806
3792
|
continue;
|
|
@@ -3821,7 +3807,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3807
|
destination: uniqueTargetFile,
|
|
3822
3808
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3809
|
});
|
|
3824
|
-
console.
|
|
3810
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3811
|
continue;
|
|
3826
3812
|
} catch (renameError) {
|
|
3827
3813
|
conflicts.push({
|
|
@@ -3850,7 +3836,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3836
|
destination: targetFile,
|
|
3851
3837
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3838
|
});
|
|
3853
|
-
console.
|
|
3839
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3840
|
} catch (copyError) {
|
|
3855
3841
|
conflicts.push({
|
|
3856
3842
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3857,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3857
|
destination: targetTsconfig,
|
|
3872
3858
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3859
|
});
|
|
3874
|
-
console.
|
|
3860
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3861
|
} else {
|
|
3876
3862
|
const minimalTsconfig = {
|
|
3877
3863
|
compilerOptions: {
|
|
@@ -3893,7 +3879,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3879
|
destination: targetTsconfig,
|
|
3894
3880
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3881
|
});
|
|
3896
|
-
console.
|
|
3882
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3883
|
}
|
|
3898
3884
|
}
|
|
3899
3885
|
} catch (e) {
|
|
@@ -3918,7 +3904,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3904
|
destination: targetMastraIndex,
|
|
3919
3905
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3906
|
});
|
|
3921
|
-
console.
|
|
3907
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3908
|
}
|
|
3923
3909
|
}
|
|
3924
3910
|
} catch (e) {
|
|
@@ -3942,7 +3928,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3928
|
destination: targetGitignore,
|
|
3943
3929
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3930
|
});
|
|
3945
|
-
console.
|
|
3931
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3932
|
} else {
|
|
3947
3933
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3934
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3941,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3941
|
destination: targetGitignore,
|
|
3956
3942
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3943
|
});
|
|
3958
|
-
console.
|
|
3944
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3945
|
} else {
|
|
3960
|
-
console.
|
|
3946
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3947
|
}
|
|
3962
3948
|
}
|
|
3963
3949
|
}
|
|
@@ -3985,7 +3971,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3971
|
destination: targetEnv,
|
|
3986
3972
|
unit: { kind: "other", id: "env" }
|
|
3987
3973
|
});
|
|
3988
|
-
console.
|
|
3974
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3975
|
} else {
|
|
3990
3976
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3977
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +3983,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
3983
|
destination: targetEnv,
|
|
3998
3984
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
3985
|
});
|
|
4000
|
-
console.
|
|
3986
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
3987
|
} else {
|
|
4002
|
-
console.
|
|
3988
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
3989
|
}
|
|
4004
3990
|
}
|
|
4005
3991
|
}
|
|
@@ -4020,13 +4006,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4006
|
fileList,
|
|
4021
4007
|
{ skipIfNoStaged: true }
|
|
4022
4008
|
);
|
|
4023
|
-
console.
|
|
4009
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4010
|
} catch (commitError) {
|
|
4025
4011
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4012
|
}
|
|
4027
4013
|
}
|
|
4028
4014
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4015
|
+
console.info(message);
|
|
4030
4016
|
return {
|
|
4031
4017
|
success: true,
|
|
4032
4018
|
copiedFiles,
|
|
@@ -4050,12 +4036,12 @@ var intelligentMergeStep = createStep({
|
|
|
4050
4036
|
description: "Use AgentBuilder to intelligently merge template files",
|
|
4051
4037
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4038
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
|
-
execute: async ({ inputData,
|
|
4054
|
-
console.
|
|
4039
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4040
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4041
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4042
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4057
4043
|
try {
|
|
4058
|
-
const model = await resolveModel({
|
|
4044
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4059
4045
|
const copyFileTool = createTool({
|
|
4060
4046
|
id: "copy-file",
|
|
4061
4047
|
description: "Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).",
|
|
@@ -4068,9 +4054,9 @@ var intelligentMergeStep = createStep({
|
|
|
4068
4054
|
message: z.string(),
|
|
4069
4055
|
error: z.string().optional()
|
|
4070
4056
|
}),
|
|
4071
|
-
execute: async (
|
|
4057
|
+
execute: async (input) => {
|
|
4072
4058
|
try {
|
|
4073
|
-
const { sourcePath, destinationPath } =
|
|
4059
|
+
const { sourcePath, destinationPath } = input;
|
|
4074
4060
|
const resolvedSourcePath = resolve(templateDir, sourcePath);
|
|
4075
4061
|
const resolvedDestinationPath = resolve(targetPath, destinationPath);
|
|
4076
4062
|
if (existsSync(resolvedSourcePath) && !existsSync(dirname(resolvedDestinationPath))) {
|
|
@@ -4178,8 +4164,8 @@ Template information:
|
|
|
4178
4164
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4165
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4166
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4167
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4168
|
+
console.info(
|
|
4183
4169
|
"Registrable components:",
|
|
4184
4170
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4171
|
);
|
|
@@ -4193,7 +4179,7 @@ Template information:
|
|
|
4193
4179
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4180
|
});
|
|
4195
4181
|
}
|
|
4196
|
-
console.
|
|
4182
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4183
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4184
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4185
|
const prompt = `
|
|
@@ -4240,17 +4226,17 @@ For each task:
|
|
|
4240
4226
|
Start by listing your tasks and work through them systematically!
|
|
4241
4227
|
`;
|
|
4242
4228
|
const isV2 = model.specificationVersion === "v2";
|
|
4243
|
-
const result = isV2 ? await agentBuilder.
|
|
4229
|
+
const result = isV2 ? await agentBuilder.stream(prompt) : await agentBuilder.streamLegacy(prompt);
|
|
4244
4230
|
const actualResolutions = [];
|
|
4245
4231
|
for await (const chunk of result.fullStream) {
|
|
4246
4232
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4233
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4234
|
+
console.info({
|
|
4249
4235
|
type: chunk.type,
|
|
4250
4236
|
msgId: chunkData.messageId
|
|
4251
4237
|
});
|
|
4252
4238
|
} else {
|
|
4253
|
-
console.
|
|
4239
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4240
|
if (chunk.type === "tool-result") {
|
|
4255
4241
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4242
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4250,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4250
|
content: toolResult.content || "",
|
|
4265
4251
|
notes: toolResult.notes
|
|
4266
4252
|
});
|
|
4267
|
-
console.
|
|
4253
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4254
|
}
|
|
4269
4255
|
} catch (parseError) {
|
|
4270
4256
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4318,13 +4304,13 @@ var validationAndFixStep = createStep({
|
|
|
4318
4304
|
description: "Validate the merged template code and fix any issues using a specialized agent",
|
|
4319
4305
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4306
|
outputSchema: ValidationFixResultSchema,
|
|
4321
|
-
execute: async ({ inputData,
|
|
4322
|
-
console.
|
|
4307
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4308
|
+
console.info("Validation and fix step starting...");
|
|
4323
4309
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4310
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4325
4311
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4312
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4313
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4314
|
return {
|
|
4329
4315
|
success: true,
|
|
4330
4316
|
applied: false,
|
|
@@ -4336,15 +4322,16 @@ var validationAndFixStep = createStep({
|
|
|
4336
4322
|
}
|
|
4337
4323
|
};
|
|
4338
4324
|
}
|
|
4339
|
-
console.
|
|
4325
|
+
console.info(
|
|
4340
4326
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4327
|
);
|
|
4342
4328
|
let currentIteration = 1;
|
|
4343
4329
|
try {
|
|
4344
|
-
const model = await resolveModel({
|
|
4345
|
-
const allTools = await AgentBuilderDefaults.
|
|
4330
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4331
|
+
const allTools = await AgentBuilderDefaults.listToolsForMode(targetPath, "template");
|
|
4346
4332
|
const validationAgent = new Agent({
|
|
4347
|
-
|
|
4333
|
+
id: "code-validator-fixer",
|
|
4334
|
+
name: "Code Validator Fixer",
|
|
4348
4335
|
description: "Specialized agent for validating and fixing template integration issues",
|
|
4349
4336
|
instructions: `You are a code validation and fixing specialist. Your job is to:
|
|
4350
4337
|
|
|
@@ -4462,7 +4449,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4449
|
executeCommand: allTools.executeCommand
|
|
4463
4450
|
}
|
|
4464
4451
|
});
|
|
4465
|
-
console.
|
|
4452
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4453
|
let validationResults = {
|
|
4467
4454
|
valid: false,
|
|
4468
4455
|
errorsFixed: 0,
|
|
@@ -4473,7 +4460,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4460
|
// Store the actual error details
|
|
4474
4461
|
};
|
|
4475
4462
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4463
|
+
console.info(`
|
|
4477
4464
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4465
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4466
|
|
|
@@ -4482,9 +4469,11 @@ Start by running validateCode with all validation types to get a complete pictur
|
|
|
4482
4469
|
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
4483
4470
|
const isV2 = model.specificationVersion === "v2";
|
|
4484
4471
|
const output = z.object({ success: z.boolean() });
|
|
4485
|
-
const result = isV2 ? await validationAgent
|
|
4486
|
-
|
|
4487
|
-
|
|
4472
|
+
const result = isV2 ? await tryStreamWithJsonFallback(validationAgent, iterationPrompt, {
|
|
4473
|
+
structuredOutput: {
|
|
4474
|
+
schema: output
|
|
4475
|
+
}
|
|
4476
|
+
}) : await validationAgent.streamLegacy(iterationPrompt, {
|
|
4488
4477
|
experimental_output: output
|
|
4489
4478
|
});
|
|
4490
4479
|
let iterationErrors = 0;
|
|
@@ -4493,13 +4482,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4482
|
for await (const chunk of result.fullStream) {
|
|
4494
4483
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4484
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4485
|
+
console.info({
|
|
4497
4486
|
type: chunk.type,
|
|
4498
4487
|
msgId: chunkData.messageId,
|
|
4499
4488
|
iteration: currentIteration
|
|
4500
4489
|
});
|
|
4501
4490
|
} else {
|
|
4502
|
-
console.
|
|
4491
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4492
|
}
|
|
4504
4493
|
if (chunk.type === "tool-result") {
|
|
4505
4494
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4497,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4497
|
lastValidationResult = toolResult;
|
|
4509
4498
|
if (toolResult?.summary) {
|
|
4510
4499
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4500
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4501
|
}
|
|
4513
4502
|
}
|
|
4514
4503
|
}
|
|
@@ -4520,12 +4509,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4509
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4510
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4511
|
}
|
|
4523
|
-
console.
|
|
4512
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4513
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4514
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4515
|
break;
|
|
4527
4516
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4517
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4518
|
break;
|
|
4530
4519
|
}
|
|
4531
4520
|
currentIteration++;
|
|
@@ -4570,7 +4559,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4559
|
} finally {
|
|
4571
4560
|
try {
|
|
4572
4561
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4562
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4563
|
} catch (cleanupError) {
|
|
4575
4564
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4565
|
}
|
|
@@ -4745,7 +4734,7 @@ var agentBuilderTemplateWorkflow = createWorkflow({
|
|
|
4745
4734
|
}).commit();
|
|
4746
4735
|
async function mergeTemplateBySlug(slug, targetPath) {
|
|
4747
4736
|
const template = await getMastraTemplate(slug);
|
|
4748
|
-
const run = await agentBuilderTemplateWorkflow.
|
|
4737
|
+
const run = await agentBuilderTemplateWorkflow.createRun();
|
|
4749
4738
|
return await run.start({
|
|
4750
4739
|
inputData: {
|
|
4751
4740
|
repo: template.githubUrl,
|
|
@@ -5052,7 +5041,7 @@ var planningIterationStep = createStep({
|
|
|
5052
5041
|
outputSchema: PlanningIterationResultSchema,
|
|
5053
5042
|
suspendSchema: PlanningIterationSuspendSchema,
|
|
5054
5043
|
resumeSchema: PlanningIterationResumeSchema,
|
|
5055
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5044
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5056
5045
|
const {
|
|
5057
5046
|
action,
|
|
5058
5047
|
workflowName,
|
|
@@ -5063,12 +5052,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5052
|
research,
|
|
5064
5053
|
userAnswers
|
|
5065
5054
|
} = inputData;
|
|
5066
|
-
console.
|
|
5055
|
+
console.info("Starting planning iteration...");
|
|
5067
5056
|
const qaKey = "workflow-builder-qa";
|
|
5068
|
-
let storedQAPairs =
|
|
5057
|
+
let storedQAPairs = requestContext.get(qaKey) || [];
|
|
5069
5058
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5059
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5060
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5061
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5080,14 +5067,10 @@ var planningIterationStep = createStep({
|
|
|
5080
5067
|
}
|
|
5081
5068
|
return pair;
|
|
5082
5069
|
});
|
|
5083
|
-
|
|
5070
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5084
5071
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5072
|
try {
|
|
5090
|
-
const model = await resolveModel({
|
|
5073
|
+
const model = await resolveModel({ requestContext });
|
|
5091
5074
|
const planningAgent = new Agent({
|
|
5092
5075
|
model,
|
|
5093
5076
|
instructions: taskPlanningPrompts.planningAgent.instructions({
|
|
@@ -5117,8 +5100,10 @@ var planningIterationStep = createStep({
|
|
|
5117
5100
|
projectStructure,
|
|
5118
5101
|
research
|
|
5119
5102
|
});
|
|
5120
|
-
const result = await planningAgent.
|
|
5121
|
-
|
|
5103
|
+
const result = await planningAgent.generate(planningPrompt, {
|
|
5104
|
+
structuredOutput: {
|
|
5105
|
+
schema: PlanningAgentOutputSchema
|
|
5106
|
+
}
|
|
5122
5107
|
// maxSteps: 15,
|
|
5123
5108
|
});
|
|
5124
5109
|
const planResult = await result.object;
|
|
@@ -5133,8 +5118,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5118
|
};
|
|
5134
5119
|
}
|
|
5135
5120
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5121
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5122
|
+
console.info(planResult.questions);
|
|
5138
5123
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5124
|
question,
|
|
5140
5125
|
answer: null,
|
|
@@ -5142,8 +5127,8 @@ var planningIterationStep = createStep({
|
|
|
5142
5127
|
answeredAt: null
|
|
5143
5128
|
}));
|
|
5144
5129
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
|
-
|
|
5146
|
-
console.
|
|
5130
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5131
|
+
console.info(
|
|
5147
5132
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5133
|
);
|
|
5149
5134
|
return suspend({
|
|
@@ -5155,9 +5140,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5140
|
}
|
|
5156
5141
|
});
|
|
5157
5142
|
}
|
|
5158
|
-
console.
|
|
5159
|
-
|
|
5160
|
-
console.
|
|
5143
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5144
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5145
|
+
console.info(
|
|
5161
5146
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5147
|
);
|
|
5163
5148
|
return {
|
|
@@ -5200,7 +5185,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5185
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5186
|
const { tasks } = inputData;
|
|
5202
5187
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5188
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5189
|
const summary = `Task List for Approval:
|
|
5205
5190
|
|
|
5206
5191
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5198,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5198
|
});
|
|
5214
5199
|
}
|
|
5215
5200
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5201
|
+
console.info("Task list approved by user");
|
|
5217
5202
|
return {
|
|
5218
5203
|
approved: true,
|
|
5219
5204
|
tasks,
|
|
5220
5205
|
message: "Task list approved, ready for execution"
|
|
5221
5206
|
};
|
|
5222
5207
|
} else {
|
|
5223
|
-
console.
|
|
5208
|
+
console.info("Task list rejected by user");
|
|
5224
5209
|
return {
|
|
5225
5210
|
approved: false,
|
|
5226
5211
|
tasks,
|
|
@@ -5237,7 +5222,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5222
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5223
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5224
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5225
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5226
|
return inputData.planComplete === true;
|
|
5242
5227
|
}).map(async ({ inputData }) => {
|
|
5243
5228
|
return {
|
|
@@ -5301,7 +5286,7 @@ const myStep = createStep({
|
|
|
5301
5286
|
- \`mastra\`: Access to Mastra instance (agents, tools, other workflows)
|
|
5302
5287
|
- \`getStepResult(stepInstance)\`: Get results from previous steps
|
|
5303
5288
|
- \`getInitData()\`: Access original workflow input data
|
|
5304
|
-
- \`
|
|
5289
|
+
- \`requestContext\`: Runtime dependency injection context
|
|
5305
5290
|
- \`runCount\`: Number of times this step has run (useful for retries)
|
|
5306
5291
|
|
|
5307
5292
|
### **\u{1F504} CONTROL FLOW METHODS**
|
|
@@ -5380,10 +5365,10 @@ const toolStep = createStep(myTool);
|
|
|
5380
5365
|
|
|
5381
5366
|
// Method 2: Call tool in execute function
|
|
5382
5367
|
const step = createStep({
|
|
5383
|
-
execute: async ({ inputData,
|
|
5368
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5384
5369
|
const result = await myTool.execute({
|
|
5385
5370
|
context: inputData,
|
|
5386
|
-
|
|
5371
|
+
requestContext
|
|
5387
5372
|
});
|
|
5388
5373
|
return result;
|
|
5389
5374
|
}
|
|
@@ -5427,7 +5412,7 @@ export const mastra = new Mastra({
|
|
|
5427
5412
|
sendEmailWorkflow, // Use camelCase for keys
|
|
5428
5413
|
dataProcessingWorkflow
|
|
5429
5414
|
},
|
|
5430
|
-
storage: new LibSQLStore({ url: 'file:./mastra.db' }), // Required for suspend/resume
|
|
5415
|
+
storage: new LibSQLStore({ id: 'mastra-storage', url: 'file:./mastra.db' }), // Required for suspend/resume
|
|
5431
5416
|
});
|
|
5432
5417
|
\`\`\`
|
|
5433
5418
|
|
|
@@ -5475,7 +5460,7 @@ export const mastra = new Mastra({
|
|
|
5475
5460
|
**Running Workflows:**
|
|
5476
5461
|
\`\`\`typescript
|
|
5477
5462
|
// Create and start run
|
|
5478
|
-
const run = await workflow.
|
|
5463
|
+
const run = await workflow.createRun();
|
|
5479
5464
|
const result = await run.start({ inputData: {...} });
|
|
5480
5465
|
|
|
5481
5466
|
// Stream execution for real-time monitoring
|
|
@@ -5499,7 +5484,7 @@ run.watch((event) => console.log(event));
|
|
|
5499
5484
|
- Use workflows as steps: \`.then(otherWorkflow)\`
|
|
5500
5485
|
- Enable complex workflow composition
|
|
5501
5486
|
|
|
5502
|
-
**
|
|
5487
|
+
**Request Context:**
|
|
5503
5488
|
- Pass shared data across all steps
|
|
5504
5489
|
- Enable dependency injection patterns
|
|
5505
5490
|
|
|
@@ -5672,11 +5657,11 @@ var restrictedTaskManager = createTool({
|
|
|
5672
5657
|
),
|
|
5673
5658
|
message: z.string()
|
|
5674
5659
|
}),
|
|
5675
|
-
execute: async (
|
|
5660
|
+
execute: async (input) => {
|
|
5676
5661
|
const adaptedContext = {
|
|
5677
|
-
...
|
|
5678
|
-
action:
|
|
5679
|
-
tasks:
|
|
5662
|
+
...input,
|
|
5663
|
+
action: input.action,
|
|
5664
|
+
tasks: input.tasks?.map((task) => ({
|
|
5680
5665
|
...task,
|
|
5681
5666
|
priority: task.priority || "medium"
|
|
5682
5667
|
}))
|
|
@@ -5691,13 +5676,13 @@ var workflowDiscoveryStep = createStep({
|
|
|
5691
5676
|
description: "Discover existing workflows in the project",
|
|
5692
5677
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5678
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
|
-
execute: async ({ inputData,
|
|
5695
|
-
console.
|
|
5679
|
+
execute: async ({ inputData, requestContext: _requestContext }) => {
|
|
5680
|
+
console.info("Starting workflow discovery...");
|
|
5696
5681
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5682
|
try {
|
|
5698
5683
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5684
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5685
|
+
console.info("No workflows directory found");
|
|
5701
5686
|
return {
|
|
5702
5687
|
success: true,
|
|
5703
5688
|
workflows: [],
|
|
@@ -5726,7 +5711,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5711
|
}
|
|
5727
5712
|
}
|
|
5728
5713
|
}
|
|
5729
|
-
console.
|
|
5714
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5715
|
return {
|
|
5731
5716
|
success: true,
|
|
5732
5717
|
workflows,
|
|
@@ -5750,8 +5735,8 @@ var projectDiscoveryStep = createStep({
|
|
|
5750
5735
|
description: "Analyze the project structure and setup",
|
|
5751
5736
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5737
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
|
-
execute: async ({ inputData: _inputData,
|
|
5754
|
-
console.
|
|
5738
|
+
execute: async ({ inputData: _inputData, requestContext: _requestContext }) => {
|
|
5739
|
+
console.info("Starting project discovery...");
|
|
5755
5740
|
try {
|
|
5756
5741
|
const projectPath = process.cwd();
|
|
5757
5742
|
const projectStructure = {
|
|
@@ -5772,7 +5757,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5757
|
console.warn("Failed to read package.json:", error);
|
|
5773
5758
|
}
|
|
5774
5759
|
}
|
|
5775
|
-
console.
|
|
5760
|
+
console.info("Project discovery completed");
|
|
5776
5761
|
return {
|
|
5777
5762
|
success: true,
|
|
5778
5763
|
structure: {
|
|
@@ -5812,10 +5797,10 @@ var workflowResearchStep = createStep({
|
|
|
5812
5797
|
description: "Research Mastra workflows and gather relevant documentation",
|
|
5813
5798
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5799
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
|
-
execute: async ({ inputData,
|
|
5816
|
-
console.
|
|
5800
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5801
|
+
console.info("Starting workflow research...");
|
|
5817
5802
|
try {
|
|
5818
|
-
const model = await resolveModel({
|
|
5803
|
+
const model = await resolveModel({ requestContext });
|
|
5819
5804
|
const researchAgent = new Agent({
|
|
5820
5805
|
model,
|
|
5821
5806
|
instructions: workflowBuilderPrompts.researchAgent.instructions,
|
|
@@ -5827,8 +5812,10 @@ var workflowResearchStep = createStep({
|
|
|
5827
5812
|
dependencies: inputData.dependencies,
|
|
5828
5813
|
hasWorkflowsDir: inputData.structure.hasWorkflowsDir
|
|
5829
5814
|
});
|
|
5830
|
-
const result = await researchAgent.
|
|
5831
|
-
|
|
5815
|
+
const result = await researchAgent.generate(researchPrompt, {
|
|
5816
|
+
structuredOutput: {
|
|
5817
|
+
schema: WorkflowResearchResultSchema
|
|
5818
|
+
}
|
|
5832
5819
|
// stopWhen: stepCountIs(10),
|
|
5833
5820
|
});
|
|
5834
5821
|
const researchResult = await result.object;
|
|
@@ -5845,7 +5832,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5832
|
error: "Research agent failed to generate valid response"
|
|
5846
5833
|
};
|
|
5847
5834
|
}
|
|
5848
|
-
console.
|
|
5835
|
+
console.info("Research completed successfully");
|
|
5849
5836
|
return {
|
|
5850
5837
|
success: true,
|
|
5851
5838
|
documentation: {
|
|
@@ -5879,7 +5866,7 @@ var taskExecutionStep = createStep({
|
|
|
5879
5866
|
outputSchema: TaskExecutionResultSchema,
|
|
5880
5867
|
suspendSchema: TaskExecutionSuspendSchema,
|
|
5881
5868
|
resumeSchema: TaskExecutionResumeSchema,
|
|
5882
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5869
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5883
5870
|
const {
|
|
5884
5871
|
action,
|
|
5885
5872
|
workflowName,
|
|
@@ -5891,12 +5878,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5878
|
research,
|
|
5892
5879
|
projectPath
|
|
5893
5880
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5881
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5882
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5883
|
try {
|
|
5897
|
-
const model = await resolveModel({
|
|
5884
|
+
const model = await resolveModel({ requestContext });
|
|
5898
5885
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5886
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5887
|
const taskManagerContext = {
|
|
5901
5888
|
action: "create",
|
|
5902
5889
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5896,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5896
|
}))
|
|
5910
5897
|
};
|
|
5911
5898
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5899
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5900
|
if (!taskManagerResult.success) {
|
|
5914
5901
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5902
|
}
|
|
@@ -5939,18 +5926,11 @@ ${workflowBuilderPrompts.validation.instructions}`
|
|
|
5939
5926
|
tasks,
|
|
5940
5927
|
resumeData
|
|
5941
5928
|
});
|
|
5942
|
-
const originalInstructions = await executionAgent.getInstructions({
|
|
5943
|
-
const additionalInstructions = executionAgent.instructions;
|
|
5944
|
-
let enhancedInstructions = originalInstructions;
|
|
5945
|
-
if (additionalInstructions) {
|
|
5946
|
-
enhancedInstructions = `${originalInstructions}
|
|
5947
|
-
|
|
5948
|
-
${additionalInstructions}`;
|
|
5949
|
-
}
|
|
5929
|
+
const originalInstructions = await executionAgent.getInstructions({ requestContext });
|
|
5950
5930
|
const enhancedOptions = {
|
|
5951
5931
|
stopWhen: stepCountIs(100),
|
|
5952
5932
|
temperature: 0.3,
|
|
5953
|
-
instructions:
|
|
5933
|
+
instructions: originalInstructions
|
|
5954
5934
|
};
|
|
5955
5935
|
let finalResult = null;
|
|
5956
5936
|
let allTasksCompleted = false;
|
|
@@ -5962,13 +5942,13 @@ ${additionalInstructions}`;
|
|
|
5962
5942
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5943
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5944
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5945
|
+
console.info(`
|
|
5966
5946
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5947
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5948
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5949
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5950
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5951
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5952
|
break;
|
|
5973
5953
|
}
|
|
5974
5954
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5979,7 +5959,7 @@ ${additionalInstructions}`;
|
|
|
5979
5959
|
})}
|
|
5980
5960
|
|
|
5981
5961
|
${workflowBuilderPrompts.validation.instructions}`;
|
|
5982
|
-
const stream = await executionAgent.
|
|
5962
|
+
const stream = await executionAgent.stream(iterationPrompt, {
|
|
5983
5963
|
structuredOutput: {
|
|
5984
5964
|
schema: TaskExecutionIterationInputSchema(tasks.length),
|
|
5985
5965
|
model
|
|
@@ -5992,19 +5972,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5972
|
finalMessage += chunk.payload.text;
|
|
5993
5973
|
}
|
|
5994
5974
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5975
|
+
console.info(finalMessage);
|
|
5996
5976
|
finalMessage = "";
|
|
5997
5977
|
}
|
|
5998
5978
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
5979
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
5980
|
}
|
|
6001
5981
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
5982
|
+
console.info(chunk);
|
|
6003
5983
|
}
|
|
6004
5984
|
}
|
|
6005
5985
|
await stream.consumeStream();
|
|
6006
5986
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
5987
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
5988
|
if (!finalResult) {
|
|
6009
5989
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
5990
|
}
|
|
@@ -6012,17 +5992,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
5992
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
5993
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
5994
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
5995
|
+
console.info(
|
|
6016
5996
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
5997
|
);
|
|
6018
5998
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
5999
|
+
console.info(
|
|
6020
6000
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
6001
|
);
|
|
6022
6002
|
break;
|
|
6023
6003
|
}
|
|
6024
6004
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
6005
|
+
console.info(
|
|
6026
6006
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6007
|
);
|
|
6028
6008
|
}
|
|
@@ -6035,8 +6015,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6015
|
throw new Error("No result received from agent execution");
|
|
6036
6016
|
}
|
|
6037
6017
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6018
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6019
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6020
|
return suspend({
|
|
6041
6021
|
questions: finalResult.questions,
|
|
6042
6022
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6032,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6032
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6033
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6034
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6035
|
+
console.info(message);
|
|
6056
6036
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6037
|
const validationErrors = [];
|
|
6058
6038
|
if (finalResult.error) {
|
|
@@ -6120,7 +6100,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6100
|
userAnswers: void 0
|
|
6121
6101
|
};
|
|
6122
6102
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6103
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6104
|
return inputData.approved === true;
|
|
6125
6105
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6106
|
const initData = getInitData();
|