@mastra/agent-builder 0.0.0-scorers-ui-refactored-20250916094952 → 0.0.0-scorers-logs-20251208101616
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +436 -4
- package/README.md +0 -4
- package/dist/agent/index.d.ts +5 -7
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/defaults.d.ts +217 -765
- package/dist/defaults.d.ts.map +1 -1
- package/dist/index.js +299 -297
- package/dist/index.js.map +1 -1
- package/dist/processors/tool-summary.d.ts +10 -5
- package/dist/processors/tool-summary.d.ts.map +1 -1
- package/dist/types.d.ts +5 -5
- package/dist/utils.d.ts +7 -7
- package/dist/utils.d.ts.map +1 -1
- package/dist/workflows/shared/schema.d.ts +2 -2
- package/dist/workflows/task-planning/schema.d.ts +16 -16
- package/dist/workflows/task-planning/task-planning.d.ts +43 -31
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -1
- package/dist/workflows/template-builder/template-builder.d.ts +125 -27
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts +1 -1
- package/dist/workflows/workflow-builder/prompts.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/schema.d.ts +26 -26
- package/dist/workflows/workflow-builder/tools.d.ts +13 -83
- package/dist/workflows/workflow-builder/tools.d.ts.map +1 -1
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +112 -80
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -1
- package/dist/workflows/workflow-map.d.ts +2 -3767
- package/dist/workflows/workflow-map.d.ts.map +1 -1
- package/package.json +14 -12
package/dist/index.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { Agent } from '@mastra/core/agent';
|
|
1
|
+
import { Agent, tryGenerateWithJsonFallback, tryStreamWithJsonFallback } from '@mastra/core/agent';
|
|
2
2
|
import { Memory } from '@mastra/memory';
|
|
3
|
-
import { TokenLimiter } from '@mastra/memory/processors';
|
|
4
3
|
import { exec as exec$1, execFile as execFile$1, spawn as spawn$1 } from 'child_process';
|
|
5
4
|
import { mkdtemp, rm, readFile, writeFile, readdir, mkdir, copyFile, stat } from 'fs/promises';
|
|
6
5
|
import { join, resolve, basename, extname, dirname, isAbsolute, relative } from 'path';
|
|
@@ -10,8 +9,7 @@ import { z } from 'zod';
|
|
|
10
9
|
import { existsSync, readFileSync } from 'fs';
|
|
11
10
|
import { createRequire } from 'module';
|
|
12
11
|
import { promisify } from 'util';
|
|
13
|
-
import {
|
|
14
|
-
import { MemoryProcessor } from '@mastra/core/memory';
|
|
12
|
+
import { ModelRouterLanguageModel } from '@mastra/core/llm';
|
|
15
13
|
import { tmpdir } from 'os';
|
|
16
14
|
import { openai } from '@ai-sdk/openai';
|
|
17
15
|
import { createStep, createWorkflow } from '@mastra/core/workflows';
|
|
@@ -189,7 +187,7 @@ var PackageMergeResultSchema = z.object({
|
|
|
189
187
|
error: z.string().optional()
|
|
190
188
|
});
|
|
191
189
|
var InstallInputSchema = z.object({
|
|
192
|
-
targetPath: z.string().describe("Path to the project to install packages in")
|
|
190
|
+
targetPath: z.string().optional().describe("Path to the project to install packages in")
|
|
193
191
|
});
|
|
194
192
|
var InstallResultSchema = z.object({
|
|
195
193
|
success: z.boolean(),
|
|
@@ -224,7 +222,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
224
222
|
if (currentDir === cwd) {
|
|
225
223
|
continue;
|
|
226
224
|
}
|
|
227
|
-
console.
|
|
225
|
+
console.info(`Checking for workspace indicators in: ${currentDir}`);
|
|
228
226
|
if (existsSync(resolve(currentDir, "pnpm-workspace.yaml"))) {
|
|
229
227
|
return true;
|
|
230
228
|
}
|
|
@@ -244,7 +242,7 @@ function isInWorkspaceSubfolder(cwd) {
|
|
|
244
242
|
}
|
|
245
243
|
return false;
|
|
246
244
|
} catch (error) {
|
|
247
|
-
console.
|
|
245
|
+
console.warn(`Error in workspace detection: ${error}`);
|
|
248
246
|
return false;
|
|
249
247
|
}
|
|
250
248
|
}
|
|
@@ -315,12 +313,12 @@ function spawnWithOutput(command, args, options) {
|
|
|
315
313
|
}
|
|
316
314
|
async function spawnSWPM(cwd, command, packageNames) {
|
|
317
315
|
try {
|
|
318
|
-
console.
|
|
316
|
+
console.info("Running install command with swpm");
|
|
319
317
|
const swpmPath = createRequire(import.meta.filename).resolve("swpm");
|
|
320
318
|
await spawn(swpmPath, [command, ...packageNames], { cwd });
|
|
321
319
|
return;
|
|
322
320
|
} catch (e) {
|
|
323
|
-
console.
|
|
321
|
+
console.warn("Failed to run install command with swpm", e);
|
|
324
322
|
}
|
|
325
323
|
try {
|
|
326
324
|
let packageManager;
|
|
@@ -348,11 +346,11 @@ async function spawnSWPM(cwd, command, packageNames) {
|
|
|
348
346
|
}
|
|
349
347
|
}
|
|
350
348
|
args.push(...packageNames);
|
|
351
|
-
console.
|
|
349
|
+
console.info(`Falling back to ${packageManager} ${args.join(" ")}`);
|
|
352
350
|
await spawn(packageManager, args, { cwd });
|
|
353
351
|
return;
|
|
354
352
|
} catch (e) {
|
|
355
|
-
console.
|
|
353
|
+
console.warn(`Failed to run install command with native package manager: ${e}`);
|
|
356
354
|
}
|
|
357
355
|
throw new Error(`Failed to run install command with swpm and native package managers`);
|
|
358
356
|
}
|
|
@@ -383,10 +381,10 @@ async function logGitState(targetPath, label) {
|
|
|
383
381
|
const gitStatusResult = await git(targetPath, "status", "--porcelain");
|
|
384
382
|
const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
|
|
385
383
|
const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
|
|
386
|
-
console.
|
|
387
|
-
console.
|
|
388
|
-
console.
|
|
389
|
-
console.
|
|
384
|
+
console.info(`\u{1F4CA} Git state ${label}:`);
|
|
385
|
+
console.info("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
386
|
+
console.info("Recent commits:", gitLogResult.stdout.trim());
|
|
387
|
+
console.info("Total commits:", gitCountResult.stdout.trim());
|
|
390
388
|
} catch (gitError) {
|
|
391
389
|
console.warn(`Could not get git state ${label}:`, gitError);
|
|
392
390
|
}
|
|
@@ -458,18 +456,18 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
458
456
|
try {
|
|
459
457
|
if (!await isInsideGitRepo(targetPath)) return;
|
|
460
458
|
await git(targetPath, "checkout", "-b", branchName);
|
|
461
|
-
console.
|
|
459
|
+
console.info(`Created new branch: ${branchName}`);
|
|
462
460
|
} catch (error) {
|
|
463
461
|
const errorStr = error instanceof Error ? error.message : String(error);
|
|
464
462
|
if (errorStr.includes("already exists")) {
|
|
465
463
|
try {
|
|
466
464
|
await git(targetPath, "checkout", branchName);
|
|
467
|
-
console.
|
|
465
|
+
console.info(`Switched to existing branch: ${branchName}`);
|
|
468
466
|
} catch {
|
|
469
467
|
const timestamp = Date.now().toString().slice(-6);
|
|
470
468
|
const uniqueBranchName = `${branchName}-${timestamp}`;
|
|
471
469
|
await git(targetPath, "checkout", "-b", uniqueBranchName);
|
|
472
|
-
console.
|
|
470
|
+
console.info(`Created unique branch: ${uniqueBranchName}`);
|
|
473
471
|
}
|
|
474
472
|
} else {
|
|
475
473
|
throw error;
|
|
@@ -479,9 +477,9 @@ async function gitCheckoutBranch(branchName, targetPath) {
|
|
|
479
477
|
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
480
478
|
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
481
479
|
await copyFile(targetFile, backupFile);
|
|
482
|
-
console.
|
|
480
|
+
console.info(`\u{1F4E6} Created backup: ${basename(backupFile)}`);
|
|
483
481
|
await copyFile(sourceFile, targetFile);
|
|
484
|
-
console.
|
|
482
|
+
console.info(`\u{1F504} Replaced file with template version (backup created)`);
|
|
485
483
|
}
|
|
486
484
|
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
487
485
|
let counter = 1;
|
|
@@ -495,17 +493,17 @@ async function renameAndCopyFile(sourceFile, targetFile) {
|
|
|
495
493
|
counter++;
|
|
496
494
|
}
|
|
497
495
|
await copyFile(sourceFile, uniqueTargetFile);
|
|
498
|
-
console.
|
|
496
|
+
console.info(`\u{1F4DD} Copied with unique name: ${basename(uniqueTargetFile)}`);
|
|
499
497
|
return uniqueTargetFile;
|
|
500
498
|
}
|
|
501
499
|
var isValidMastraLanguageModel = (model) => {
|
|
502
500
|
return model && typeof model === "object" && typeof model.modelId === "string";
|
|
503
501
|
};
|
|
504
|
-
var resolveTargetPath = (inputData,
|
|
502
|
+
var resolveTargetPath = (inputData, requestContext) => {
|
|
505
503
|
if (inputData.targetPath) {
|
|
506
504
|
return inputData.targetPath;
|
|
507
505
|
}
|
|
508
|
-
const contextPath =
|
|
506
|
+
const contextPath = requestContext.get("targetPath");
|
|
509
507
|
if (contextPath) {
|
|
510
508
|
return contextPath;
|
|
511
509
|
}
|
|
@@ -544,7 +542,7 @@ var mergeGitignoreFiles = (targetContent, templateContent, templateSlug) => {
|
|
|
544
542
|
if (!hasConflict) {
|
|
545
543
|
newEntries.push(trimmed);
|
|
546
544
|
} else {
|
|
547
|
-
console.
|
|
545
|
+
console.info(`\u26A0 Skipping conflicting .gitignore rule: ${trimmed} (conflicts with existing rule)`);
|
|
548
546
|
}
|
|
549
547
|
}
|
|
550
548
|
}
|
|
@@ -579,7 +577,7 @@ var mergeEnvFiles = (targetContent, templateVariables, templateSlug) => {
|
|
|
579
577
|
if (!existingVars.has(key)) {
|
|
580
578
|
newVars.push({ key, value });
|
|
581
579
|
} else {
|
|
582
|
-
console.
|
|
580
|
+
console.info(`\u26A0 Skipping existing environment variable: ${key} (already exists in .env)`);
|
|
583
581
|
}
|
|
584
582
|
}
|
|
585
583
|
if (newVars.length === 0) {
|
|
@@ -600,7 +598,7 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
600
598
|
try {
|
|
601
599
|
const packageJsonPath = join(projectPath, "package.json");
|
|
602
600
|
if (!existsSync(packageJsonPath)) {
|
|
603
|
-
console.
|
|
601
|
+
console.info("No package.json found, defaulting to v2");
|
|
604
602
|
return "v2";
|
|
605
603
|
}
|
|
606
604
|
const packageContent = await readFile(packageJsonPath, "utf-8");
|
|
@@ -618,16 +616,16 @@ var detectAISDKVersion = async (projectPath) => {
|
|
|
618
616
|
if (versionMatch) {
|
|
619
617
|
const majorVersion = parseInt(versionMatch[1]);
|
|
620
618
|
if (majorVersion >= 2) {
|
|
621
|
-
console.
|
|
619
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v2 specification`);
|
|
622
620
|
return "v2";
|
|
623
621
|
} else {
|
|
624
|
-
console.
|
|
622
|
+
console.info(`Detected ${pkg} v${majorVersion} -> using v1 specification`);
|
|
625
623
|
return "v1";
|
|
626
624
|
}
|
|
627
625
|
}
|
|
628
626
|
}
|
|
629
627
|
}
|
|
630
|
-
console.
|
|
628
|
+
console.info("No AI SDK version detected, defaulting to v2");
|
|
631
629
|
return "v2";
|
|
632
630
|
} catch (error) {
|
|
633
631
|
console.warn(`Failed to detect AI SDK version: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -658,37 +656,15 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
658
656
|
const { google } = await import('@ai-sdk/google');
|
|
659
657
|
return google(modelId);
|
|
660
658
|
}
|
|
661
|
-
},
|
|
662
|
-
v2: {
|
|
663
|
-
openai: async () => {
|
|
664
|
-
const { openai: openai2 } = await import('@ai-sdk/openai-v5');
|
|
665
|
-
return openai2(modelId);
|
|
666
|
-
},
|
|
667
|
-
anthropic: async () => {
|
|
668
|
-
const { anthropic } = await import('@ai-sdk/anthropic-v5');
|
|
669
|
-
return anthropic(modelId);
|
|
670
|
-
},
|
|
671
|
-
groq: async () => {
|
|
672
|
-
const { groq } = await import('@ai-sdk/groq-v5');
|
|
673
|
-
return groq(modelId);
|
|
674
|
-
},
|
|
675
|
-
xai: async () => {
|
|
676
|
-
const { xai } = await import('@ai-sdk/xai-v5');
|
|
677
|
-
return xai(modelId);
|
|
678
|
-
},
|
|
679
|
-
google: async () => {
|
|
680
|
-
const { google } = await import('@ai-sdk/google-v5');
|
|
681
|
-
return google(modelId);
|
|
682
|
-
}
|
|
683
659
|
}
|
|
684
660
|
};
|
|
685
|
-
const providerFn = providerMap[version][provider];
|
|
661
|
+
const providerFn = version === `v1` ? providerMap[version][provider] : () => new ModelRouterLanguageModel(`${provider}/${modelId}`);
|
|
686
662
|
if (!providerFn) {
|
|
687
663
|
console.error(`Unsupported provider: ${provider}`);
|
|
688
664
|
return null;
|
|
689
665
|
}
|
|
690
666
|
const modelInstance = await providerFn();
|
|
691
|
-
console.
|
|
667
|
+
console.info(`Created ${provider} model instance (${version}): ${modelId}`);
|
|
692
668
|
return modelInstance;
|
|
693
669
|
} catch (error) {
|
|
694
670
|
console.error(`Failed to create model instance: ${error instanceof Error ? error.message : String(error)}`);
|
|
@@ -696,13 +672,13 @@ var createModelInstance = async (provider, modelId, version = "v2") => {
|
|
|
696
672
|
}
|
|
697
673
|
};
|
|
698
674
|
var resolveModel = async ({
|
|
699
|
-
|
|
700
|
-
defaultModel = openai
|
|
675
|
+
requestContext,
|
|
676
|
+
defaultModel = "openai/gpt-4.1",
|
|
701
677
|
projectPath
|
|
702
678
|
}) => {
|
|
703
|
-
const modelFromContext =
|
|
679
|
+
const modelFromContext = requestContext.get("model");
|
|
704
680
|
if (modelFromContext) {
|
|
705
|
-
console.
|
|
681
|
+
console.info("Using model from request context");
|
|
706
682
|
if (isValidMastraLanguageModel(modelFromContext)) {
|
|
707
683
|
return modelFromContext;
|
|
708
684
|
}
|
|
@@ -710,18 +686,18 @@ var resolveModel = async ({
|
|
|
710
686
|
'Invalid model provided. Model must be a MastraLanguageModel instance (e.g., openai("gpt-4"), anthropic("claude-3-5-sonnet"), etc.)'
|
|
711
687
|
);
|
|
712
688
|
}
|
|
713
|
-
const selectedModel =
|
|
689
|
+
const selectedModel = requestContext.get("selectedModel");
|
|
714
690
|
if (selectedModel?.provider && selectedModel?.modelId && projectPath) {
|
|
715
|
-
console.
|
|
691
|
+
console.info(`Resolving selected model: ${selectedModel.provider}/${selectedModel.modelId}`);
|
|
716
692
|
const version = await detectAISDKVersion(projectPath);
|
|
717
693
|
const modelInstance = await createModelInstance(selectedModel.provider, selectedModel.modelId, version);
|
|
718
694
|
if (modelInstance) {
|
|
719
|
-
|
|
695
|
+
requestContext.set("model", modelInstance);
|
|
720
696
|
return modelInstance;
|
|
721
697
|
}
|
|
722
698
|
}
|
|
723
|
-
console.
|
|
724
|
-
return defaultModel;
|
|
699
|
+
console.info("Using default model");
|
|
700
|
+
return typeof defaultModel === `string` ? new ModelRouterLanguageModel(defaultModel) : defaultModel;
|
|
725
701
|
};
|
|
726
702
|
|
|
727
703
|
// src/defaults.ts
|
|
@@ -890,7 +866,7 @@ You have access to an enhanced set of tools based on production coding agent pat
|
|
|
890
866
|
### Task Management
|
|
891
867
|
- **taskManager**: Create and track multi-step coding tasks with states (pending, in_progress, completed, blocked). Use this for complex projects that require systematic progress tracking.
|
|
892
868
|
|
|
893
|
-
### Code Discovery & Analysis
|
|
869
|
+
### Code Discovery & Analysis
|
|
894
870
|
- **codeAnalyzer**: Analyze codebase structure, discover definitions (functions, classes, interfaces), map dependencies, and understand architectural patterns.
|
|
895
871
|
- **smartSearch**: Intelligent search with context awareness, pattern matching, and relevance scoring.
|
|
896
872
|
|
|
@@ -928,12 +904,14 @@ import { LibSQLStore } from '@mastra/libsql';
|
|
|
928
904
|
import { weatherTool } from '../tools/weather-tool';
|
|
929
905
|
|
|
930
906
|
export const weatherAgent = new Agent({
|
|
907
|
+
id: 'weather-agent',
|
|
931
908
|
name: 'Weather Agent',
|
|
932
909
|
instructions: \${instructions},
|
|
933
910
|
model: openai('gpt-4o-mini'),
|
|
934
911
|
tools: { weatherTool },
|
|
935
912
|
memory: new Memory({
|
|
936
913
|
storage: new LibSQLStore({
|
|
914
|
+
id: 'mastra-memory-storage',
|
|
937
915
|
url: 'file:../mastra.db', // ask user what database to use, use this as the default
|
|
938
916
|
}),
|
|
939
917
|
}),
|
|
@@ -962,8 +940,8 @@ export const weatherTool = createTool({
|
|
|
962
940
|
conditions: z.string(),
|
|
963
941
|
location: z.string(),
|
|
964
942
|
}),
|
|
965
|
-
execute: async (
|
|
966
|
-
return await getWeather(
|
|
943
|
+
execute: async (inputData) => {
|
|
944
|
+
return await getWeather(inputData.location);
|
|
967
945
|
},
|
|
968
946
|
});
|
|
969
947
|
\`\`\`
|
|
@@ -981,7 +959,7 @@ const fetchWeather = createStep({
|
|
|
981
959
|
city: z.string().describe('The city to get the weather for'),
|
|
982
960
|
}),
|
|
983
961
|
outputSchema: forecastSchema,
|
|
984
|
-
execute: async (
|
|
962
|
+
execute: async (inputData) => {
|
|
985
963
|
if (!inputData) {
|
|
986
964
|
throw new Error('Input data not found');
|
|
987
965
|
}
|
|
@@ -1035,7 +1013,8 @@ const planActivities = createStep({
|
|
|
1035
1013
|
outputSchema: z.object({
|
|
1036
1014
|
activities: z.string(),
|
|
1037
1015
|
}),
|
|
1038
|
-
execute: async (
|
|
1016
|
+
execute: async (inputData, context) => {
|
|
1017
|
+
const mastra = context?.mastra;
|
|
1039
1018
|
const forecast = inputData;
|
|
1040
1019
|
|
|
1041
1020
|
if (!forecast) {
|
|
@@ -1100,7 +1079,8 @@ export const mastra = new Mastra({
|
|
|
1100
1079
|
workflows: { weatherWorkflow },
|
|
1101
1080
|
agents: { weatherAgent },
|
|
1102
1081
|
storage: new LibSQLStore({
|
|
1103
|
-
|
|
1082
|
+
id: 'mastra-storage',
|
|
1083
|
+
// stores observability, evals, ... into memory storage, if it needs to persist, change to file:../mastra.db
|
|
1104
1084
|
url: ":memory:",
|
|
1105
1085
|
}),
|
|
1106
1086
|
logger: new PinoLogger({
|
|
@@ -1144,8 +1124,8 @@ export const mastra = new Mastra({
|
|
|
1144
1124
|
}).optional(),
|
|
1145
1125
|
error: z.string().optional()
|
|
1146
1126
|
}),
|
|
1147
|
-
execute: async (
|
|
1148
|
-
return await _AgentBuilderDefaults.readFile({ ...
|
|
1127
|
+
execute: async (inputData) => {
|
|
1128
|
+
return await _AgentBuilderDefaults.readFile({ ...inputData, projectPath });
|
|
1149
1129
|
}
|
|
1150
1130
|
}),
|
|
1151
1131
|
writeFile: createTool({
|
|
@@ -1164,8 +1144,8 @@ export const mastra = new Mastra({
|
|
|
1164
1144
|
message: z.string(),
|
|
1165
1145
|
error: z.string().optional()
|
|
1166
1146
|
}),
|
|
1167
|
-
execute: async (
|
|
1168
|
-
return await _AgentBuilderDefaults.writeFile({ ...
|
|
1147
|
+
execute: async (inputData) => {
|
|
1148
|
+
return await _AgentBuilderDefaults.writeFile({ ...inputData, projectPath });
|
|
1169
1149
|
}
|
|
1170
1150
|
}),
|
|
1171
1151
|
listDirectory: createTool({
|
|
@@ -1196,8 +1176,8 @@ export const mastra = new Mastra({
|
|
|
1196
1176
|
message: z.string(),
|
|
1197
1177
|
error: z.string().optional()
|
|
1198
1178
|
}),
|
|
1199
|
-
execute: async (
|
|
1200
|
-
return await _AgentBuilderDefaults.listDirectory({ ...
|
|
1179
|
+
execute: async (inputData) => {
|
|
1180
|
+
return await _AgentBuilderDefaults.listDirectory({ ...inputData, projectPath });
|
|
1201
1181
|
}
|
|
1202
1182
|
}),
|
|
1203
1183
|
executeCommand: createTool({
|
|
@@ -1221,10 +1201,10 @@ export const mastra = new Mastra({
|
|
|
1221
1201
|
executionTime: z.number().optional(),
|
|
1222
1202
|
error: z.string().optional()
|
|
1223
1203
|
}),
|
|
1224
|
-
execute: async (
|
|
1204
|
+
execute: async (inputData) => {
|
|
1225
1205
|
return await _AgentBuilderDefaults.executeCommand({
|
|
1226
|
-
...
|
|
1227
|
-
workingDirectory:
|
|
1206
|
+
...inputData,
|
|
1207
|
+
workingDirectory: inputData.workingDirectory || projectPath
|
|
1228
1208
|
});
|
|
1229
1209
|
}
|
|
1230
1210
|
}),
|
|
@@ -1262,8 +1242,8 @@ export const mastra = new Mastra({
|
|
|
1262
1242
|
),
|
|
1263
1243
|
message: z.string()
|
|
1264
1244
|
}),
|
|
1265
|
-
execute: async (
|
|
1266
|
-
return await _AgentBuilderDefaults.manageTaskList(
|
|
1245
|
+
execute: async (inputData) => {
|
|
1246
|
+
return await _AgentBuilderDefaults.manageTaskList(inputData);
|
|
1267
1247
|
}
|
|
1268
1248
|
}),
|
|
1269
1249
|
// Advanced File Operations
|
|
@@ -1297,8 +1277,8 @@ export const mastra = new Mastra({
|
|
|
1297
1277
|
),
|
|
1298
1278
|
message: z.string()
|
|
1299
1279
|
}),
|
|
1300
|
-
execute: async (
|
|
1301
|
-
return await _AgentBuilderDefaults.performMultiEdit({ ...
|
|
1280
|
+
execute: async (inputData) => {
|
|
1281
|
+
return await _AgentBuilderDefaults.performMultiEdit({ ...inputData, projectPath });
|
|
1302
1282
|
}
|
|
1303
1283
|
}),
|
|
1304
1284
|
replaceLines: createTool({
|
|
@@ -1322,8 +1302,8 @@ export const mastra = new Mastra({
|
|
|
1322
1302
|
backup: z.string().optional(),
|
|
1323
1303
|
error: z.string().optional()
|
|
1324
1304
|
}),
|
|
1325
|
-
execute: async (
|
|
1326
|
-
return await _AgentBuilderDefaults.replaceLines({ ...
|
|
1305
|
+
execute: async (inputData) => {
|
|
1306
|
+
return await _AgentBuilderDefaults.replaceLines({ ...inputData, projectPath });
|
|
1327
1307
|
}
|
|
1328
1308
|
}),
|
|
1329
1309
|
// File diagnostics tool to help debug line replacement issues
|
|
@@ -1351,8 +1331,8 @@ export const mastra = new Mastra({
|
|
|
1351
1331
|
message: z.string(),
|
|
1352
1332
|
error: z.string().optional()
|
|
1353
1333
|
}),
|
|
1354
|
-
execute: async (
|
|
1355
|
-
return await _AgentBuilderDefaults.showFileLines({ ...
|
|
1334
|
+
execute: async (inputData) => {
|
|
1335
|
+
return await _AgentBuilderDefaults.showFileLines({ ...inputData, projectPath });
|
|
1356
1336
|
}
|
|
1357
1337
|
}),
|
|
1358
1338
|
// Enhanced Pattern Search
|
|
@@ -1395,8 +1375,8 @@ export const mastra = new Mastra({
|
|
|
1395
1375
|
patterns: z.array(z.string())
|
|
1396
1376
|
})
|
|
1397
1377
|
}),
|
|
1398
|
-
execute: async (
|
|
1399
|
-
return await _AgentBuilderDefaults.performSmartSearch(
|
|
1378
|
+
execute: async (inputData) => {
|
|
1379
|
+
return await _AgentBuilderDefaults.performSmartSearch(inputData, projectPath);
|
|
1400
1380
|
}
|
|
1401
1381
|
}),
|
|
1402
1382
|
validateCode: createTool({
|
|
@@ -1429,8 +1409,8 @@ export const mastra = new Mastra({
|
|
|
1429
1409
|
validationsFailed: z.array(z.string())
|
|
1430
1410
|
})
|
|
1431
1411
|
}),
|
|
1432
|
-
execute: async (
|
|
1433
|
-
const { projectPath: validationProjectPath, validationType, files } =
|
|
1412
|
+
execute: async (inputData) => {
|
|
1413
|
+
const { projectPath: validationProjectPath, validationType, files } = inputData;
|
|
1434
1414
|
const targetPath = validationProjectPath || projectPath;
|
|
1435
1415
|
return await _AgentBuilderDefaults.validateCode({
|
|
1436
1416
|
projectPath: targetPath,
|
|
@@ -1469,8 +1449,8 @@ export const mastra = new Mastra({
|
|
|
1469
1449
|
suggestions: z.array(z.string()).optional(),
|
|
1470
1450
|
error: z.string().optional()
|
|
1471
1451
|
}),
|
|
1472
|
-
execute: async (
|
|
1473
|
-
return await _AgentBuilderDefaults.webSearch(
|
|
1452
|
+
execute: async (inputData) => {
|
|
1453
|
+
return await _AgentBuilderDefaults.webSearch(inputData);
|
|
1474
1454
|
}
|
|
1475
1455
|
}),
|
|
1476
1456
|
// Task Completion Signaling
|
|
@@ -1499,8 +1479,8 @@ export const mastra = new Mastra({
|
|
|
1499
1479
|
summary: z.string(),
|
|
1500
1480
|
confidence: z.number().min(0).max(100)
|
|
1501
1481
|
}),
|
|
1502
|
-
execute: async (
|
|
1503
|
-
return await _AgentBuilderDefaults.signalCompletion(
|
|
1482
|
+
execute: async (inputData) => {
|
|
1483
|
+
return await _AgentBuilderDefaults.signalCompletion(inputData);
|
|
1504
1484
|
}
|
|
1505
1485
|
}),
|
|
1506
1486
|
manageProject: createTool({
|
|
@@ -1525,8 +1505,8 @@ export const mastra = new Mastra({
|
|
|
1525
1505
|
details: z.string().optional(),
|
|
1526
1506
|
error: z.string().optional()
|
|
1527
1507
|
}),
|
|
1528
|
-
execute: async (
|
|
1529
|
-
const { action, features, packages } =
|
|
1508
|
+
execute: async (inputData) => {
|
|
1509
|
+
const { action, features, packages } = inputData;
|
|
1530
1510
|
try {
|
|
1531
1511
|
switch (action) {
|
|
1532
1512
|
case "create":
|
|
@@ -1587,8 +1567,8 @@ export const mastra = new Mastra({
|
|
|
1587
1567
|
stdout: z.array(z.string()).optional().describe("Server output lines captured during startup"),
|
|
1588
1568
|
error: z.string().optional()
|
|
1589
1569
|
}),
|
|
1590
|
-
execute: async (
|
|
1591
|
-
const { action, port } =
|
|
1570
|
+
execute: async (inputData) => {
|
|
1571
|
+
const { action, port } = inputData;
|
|
1592
1572
|
try {
|
|
1593
1573
|
switch (action) {
|
|
1594
1574
|
case "start":
|
|
@@ -1673,8 +1653,8 @@ export const mastra = new Mastra({
|
|
|
1673
1653
|
url: z.string(),
|
|
1674
1654
|
method: z.string()
|
|
1675
1655
|
}),
|
|
1676
|
-
execute: async (
|
|
1677
|
-
const { method, url, baseUrl, headers, body, timeout } =
|
|
1656
|
+
execute: async (inputData) => {
|
|
1657
|
+
const { method, url, baseUrl, headers, body, timeout } = inputData;
|
|
1678
1658
|
try {
|
|
1679
1659
|
return await _AgentBuilderDefaults.makeHttpRequest({
|
|
1680
1660
|
method,
|
|
@@ -1729,7 +1709,7 @@ export const mastra = new Mastra({
|
|
|
1729
1709
|
/**
|
|
1730
1710
|
* Get tools for a specific mode
|
|
1731
1711
|
*/
|
|
1732
|
-
static async
|
|
1712
|
+
static async listToolsForMode(projectPath, mode = "code-editor") {
|
|
1733
1713
|
const allTools = await _AgentBuilderDefaults.DEFAULT_TOOLS(projectPath);
|
|
1734
1714
|
if (mode === "template") {
|
|
1735
1715
|
return _AgentBuilderDefaults.filterToolsForTemplateBuilder(allTools);
|
|
@@ -1756,7 +1736,7 @@ export const mastra = new Mastra({
|
|
|
1756
1736
|
error: stderr
|
|
1757
1737
|
};
|
|
1758
1738
|
} catch (error) {
|
|
1759
|
-
console.
|
|
1739
|
+
console.error(error);
|
|
1760
1740
|
return {
|
|
1761
1741
|
success: false,
|
|
1762
1742
|
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
@@ -1771,7 +1751,7 @@ export const mastra = new Mastra({
|
|
|
1771
1751
|
projectPath
|
|
1772
1752
|
}) {
|
|
1773
1753
|
try {
|
|
1774
|
-
console.
|
|
1754
|
+
console.info("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1775
1755
|
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1776
1756
|
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1777
1757
|
return {
|
|
@@ -1795,7 +1775,7 @@ export const mastra = new Mastra({
|
|
|
1795
1775
|
projectPath
|
|
1796
1776
|
}) {
|
|
1797
1777
|
try {
|
|
1798
|
-
console.
|
|
1778
|
+
console.info("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1799
1779
|
let packageNames = [];
|
|
1800
1780
|
if (packages && packages.length > 0) {
|
|
1801
1781
|
packageNames = packages.map((p) => `${p.name}`);
|
|
@@ -3102,13 +3082,15 @@ export const mastra = new Mastra({
|
|
|
3102
3082
|
}
|
|
3103
3083
|
}
|
|
3104
3084
|
};
|
|
3105
|
-
var ToolSummaryProcessor = class
|
|
3085
|
+
var ToolSummaryProcessor = class {
|
|
3086
|
+
id = "tool-summary-processor";
|
|
3087
|
+
name = "ToolSummaryProcessor";
|
|
3106
3088
|
summaryAgent;
|
|
3107
3089
|
summaryCache = /* @__PURE__ */ new Map();
|
|
3108
3090
|
constructor({ summaryModel }) {
|
|
3109
|
-
super({ name: "ToolSummaryProcessor" });
|
|
3110
3091
|
this.summaryAgent = new Agent({
|
|
3111
|
-
|
|
3092
|
+
id: "tool-summary-agent",
|
|
3093
|
+
name: "Tool Summary Agent",
|
|
3112
3094
|
description: "A summary agent that summarizes tool calls and results",
|
|
3113
3095
|
instructions: "You are a summary agent that summarizes tool calls and results",
|
|
3114
3096
|
model: summaryModel
|
|
@@ -3142,30 +3124,37 @@ var ToolSummaryProcessor = class extends MemoryProcessor {
|
|
|
3142
3124
|
keys: Array.from(this.summaryCache.keys())
|
|
3143
3125
|
};
|
|
3144
3126
|
}
|
|
3145
|
-
async
|
|
3127
|
+
async processInput({
|
|
3128
|
+
messages,
|
|
3129
|
+
messageList: _messageList
|
|
3130
|
+
}) {
|
|
3146
3131
|
const summaryTasks = [];
|
|
3147
3132
|
for (const message of messages) {
|
|
3148
|
-
if (message.
|
|
3149
|
-
for (
|
|
3150
|
-
|
|
3151
|
-
|
|
3152
|
-
|
|
3153
|
-
(assistantContent) => assistantContent.type === "tool-call" && assistantContent.toolCallId === content.toolCallId
|
|
3154
|
-
)
|
|
3155
|
-
);
|
|
3156
|
-
const toolCall = Array.isArray(assistantMessageWithToolCall?.content) ? assistantMessageWithToolCall?.content.find(
|
|
3157
|
-
(assistantContent) => assistantContent.type === "tool-call" && assistantContent.toolCallId === content.toolCallId
|
|
3158
|
-
) : null;
|
|
3159
|
-
const cacheKey = this.createCacheKey(toolCall);
|
|
3133
|
+
if (message.content.format === 2 && message.content.parts) {
|
|
3134
|
+
for (let partIndex = 0; partIndex < message.content.parts.length; partIndex++) {
|
|
3135
|
+
const part = message.content.parts[partIndex];
|
|
3136
|
+
if (part && part.type === "tool-invocation" && part.toolInvocation?.state === "result") {
|
|
3137
|
+
const cacheKey = this.createCacheKey(part.toolInvocation);
|
|
3160
3138
|
const cachedSummary = this.summaryCache.get(cacheKey);
|
|
3161
3139
|
if (cachedSummary) {
|
|
3162
|
-
content.
|
|
3140
|
+
message.content.parts[partIndex] = {
|
|
3141
|
+
type: "tool-invocation",
|
|
3142
|
+
toolInvocation: {
|
|
3143
|
+
state: "result",
|
|
3144
|
+
step: part.toolInvocation.step,
|
|
3145
|
+
toolCallId: part.toolInvocation.toolCallId,
|
|
3146
|
+
toolName: part.toolInvocation.toolName,
|
|
3147
|
+
args: part.toolInvocation.args,
|
|
3148
|
+
result: `Tool call summary: ${cachedSummary}`
|
|
3149
|
+
}
|
|
3150
|
+
};
|
|
3163
3151
|
} else {
|
|
3164
3152
|
const summaryPromise = this.summaryAgent.generate(
|
|
3165
|
-
`Summarize the following tool call: ${JSON.stringify(
|
|
3153
|
+
`Summarize the following tool call: ${JSON.stringify(part.toolInvocation)}`
|
|
3166
3154
|
);
|
|
3167
3155
|
summaryTasks.push({
|
|
3168
|
-
|
|
3156
|
+
message,
|
|
3157
|
+
partIndex,
|
|
3169
3158
|
promise: summaryPromise,
|
|
3170
3159
|
cacheKey
|
|
3171
3160
|
});
|
|
@@ -3183,10 +3172,24 @@ var ToolSummaryProcessor = class extends MemoryProcessor {
|
|
|
3183
3172
|
const summaryResult = result.value;
|
|
3184
3173
|
const summaryText = summaryResult.text;
|
|
3185
3174
|
this.summaryCache.set(task.cacheKey, summaryText);
|
|
3186
|
-
task.content.
|
|
3175
|
+
if (task.message.content.format === 2 && task.message.content.parts) {
|
|
3176
|
+
const part = task.message.content.parts[task.partIndex];
|
|
3177
|
+
if (part && part.type === "tool-invocation" && part.toolInvocation?.state === "result") {
|
|
3178
|
+
task.message.content.parts[task.partIndex] = {
|
|
3179
|
+
type: "tool-invocation",
|
|
3180
|
+
toolInvocation: {
|
|
3181
|
+
state: "result",
|
|
3182
|
+
step: part.toolInvocation.step,
|
|
3183
|
+
toolCallId: part.toolInvocation.toolCallId,
|
|
3184
|
+
toolName: part.toolInvocation.toolName,
|
|
3185
|
+
args: part.toolInvocation.args,
|
|
3186
|
+
result: `Tool call summary: ${summaryText}`
|
|
3187
|
+
}
|
|
3188
|
+
};
|
|
3189
|
+
}
|
|
3190
|
+
}
|
|
3187
3191
|
} else if (result.status === "rejected") {
|
|
3188
3192
|
console.warn(`Failed to generate summary for tool call:`, result.reason);
|
|
3189
|
-
task.content.result = `Tool call summary: [Summary generation failed]`;
|
|
3190
3193
|
}
|
|
3191
3194
|
});
|
|
3192
3195
|
}
|
|
@@ -3206,26 +3209,26 @@ var AgentBuilder = class extends Agent {
|
|
|
3206
3209
|
${config.instructions}` : "";
|
|
3207
3210
|
const combinedInstructions = additionalInstructions + AgentBuilderDefaults.DEFAULT_INSTRUCTIONS(config.projectPath);
|
|
3208
3211
|
const agentConfig = {
|
|
3212
|
+
id: "agent-builder",
|
|
3209
3213
|
name: "agent-builder",
|
|
3210
3214
|
description: "An AI agent specialized in generating Mastra agents, tools, and workflows from natural language requirements.",
|
|
3211
3215
|
instructions: combinedInstructions,
|
|
3212
3216
|
model: config.model,
|
|
3213
3217
|
tools: async () => {
|
|
3214
3218
|
return {
|
|
3215
|
-
...await AgentBuilderDefaults.
|
|
3219
|
+
...await AgentBuilderDefaults.listToolsForMode(config.projectPath, config.mode),
|
|
3216
3220
|
...config.tools || {}
|
|
3217
3221
|
};
|
|
3218
3222
|
},
|
|
3219
3223
|
memory: new Memory({
|
|
3220
|
-
options: AgentBuilderDefaults.DEFAULT_MEMORY_CONFIG
|
|
3221
|
-
|
|
3222
|
-
|
|
3223
|
-
|
|
3224
|
-
|
|
3225
|
-
|
|
3226
|
-
|
|
3227
|
-
|
|
3228
|
-
})
|
|
3224
|
+
options: AgentBuilderDefaults.DEFAULT_MEMORY_CONFIG
|
|
3225
|
+
}),
|
|
3226
|
+
inputProcessors: [
|
|
3227
|
+
// use the write to disk processor to debug the agent's context
|
|
3228
|
+
// new WriteToDiskProcessor({ prefix: 'before-filter' }),
|
|
3229
|
+
new ToolSummaryProcessor({ summaryModel: config.summaryModel || config.model })
|
|
3230
|
+
// new WriteToDiskProcessor({ prefix: 'after-filter' }),
|
|
3231
|
+
]
|
|
3229
3232
|
};
|
|
3230
3233
|
super(agentConfig);
|
|
3231
3234
|
this.builderConfig = config;
|
|
@@ -3234,9 +3237,9 @@ ${config.instructions}` : "";
|
|
|
3234
3237
|
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3235
3238
|
* Overrides the base Agent generate method to provide additional project context
|
|
3236
3239
|
*/
|
|
3237
|
-
|
|
3240
|
+
generateLegacy = async (messages, generateOptions = {}) => {
|
|
3238
3241
|
const { maxSteps, ...baseOptions } = generateOptions;
|
|
3239
|
-
const originalInstructions = await this.getInstructions({
|
|
3242
|
+
const originalInstructions = await this.getInstructions({ requestContext: generateOptions?.requestContext });
|
|
3240
3243
|
const additionalInstructions = baseOptions.instructions;
|
|
3241
3244
|
let enhancedInstructions = originalInstructions;
|
|
3242
3245
|
if (additionalInstructions) {
|
|
@@ -3257,15 +3260,15 @@ ${additionalInstructions}`;
|
|
|
3257
3260
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3258
3261
|
projectPath: this.builderConfig.projectPath
|
|
3259
3262
|
});
|
|
3260
|
-
return super.
|
|
3263
|
+
return super.generateLegacy(messages, enhancedOptions);
|
|
3261
3264
|
};
|
|
3262
3265
|
/**
|
|
3263
3266
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3264
3267
|
* Overrides the base Agent stream method to provide additional project context
|
|
3265
3268
|
*/
|
|
3266
|
-
|
|
3269
|
+
streamLegacy = async (messages, streamOptions = {}) => {
|
|
3267
3270
|
const { maxSteps, ...baseOptions } = streamOptions;
|
|
3268
|
-
const originalInstructions = await this.getInstructions({
|
|
3271
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3269
3272
|
const additionalInstructions = baseOptions.instructions;
|
|
3270
3273
|
let enhancedInstructions = originalInstructions;
|
|
3271
3274
|
if (additionalInstructions) {
|
|
@@ -3286,15 +3289,15 @@ ${additionalInstructions}`;
|
|
|
3286
3289
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3287
3290
|
projectPath: this.builderConfig.projectPath
|
|
3288
3291
|
});
|
|
3289
|
-
return super.
|
|
3292
|
+
return super.streamLegacy(messages, enhancedOptions);
|
|
3290
3293
|
};
|
|
3291
3294
|
/**
|
|
3292
3295
|
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3293
3296
|
* Overrides the base Agent stream method to provide additional project context
|
|
3294
3297
|
*/
|
|
3295
|
-
async
|
|
3298
|
+
async stream(messages, streamOptions) {
|
|
3296
3299
|
const { ...baseOptions } = streamOptions || {};
|
|
3297
|
-
const originalInstructions = await this.getInstructions({
|
|
3300
|
+
const originalInstructions = await this.getInstructions({ requestContext: streamOptions?.requestContext });
|
|
3298
3301
|
const additionalInstructions = baseOptions.instructions;
|
|
3299
3302
|
let enhancedInstructions = originalInstructions;
|
|
3300
3303
|
if (additionalInstructions) {
|
|
@@ -3314,11 +3317,11 @@ ${additionalInstructions}`;
|
|
|
3314
3317
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3315
3318
|
projectPath: this.builderConfig.projectPath
|
|
3316
3319
|
});
|
|
3317
|
-
return super.
|
|
3320
|
+
return super.stream(messages, enhancedOptions);
|
|
3318
3321
|
}
|
|
3319
|
-
async
|
|
3322
|
+
async generate(messages, options) {
|
|
3320
3323
|
const { ...baseOptions } = options || {};
|
|
3321
|
-
const originalInstructions = await this.getInstructions({
|
|
3324
|
+
const originalInstructions = await this.getInstructions({ requestContext: options?.requestContext });
|
|
3322
3325
|
const additionalInstructions = baseOptions.instructions;
|
|
3323
3326
|
let enhancedInstructions = originalInstructions;
|
|
3324
3327
|
if (additionalInstructions) {
|
|
@@ -3338,7 +3341,7 @@ ${additionalInstructions}`;
|
|
|
3338
3341
|
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3339
3342
|
projectPath: this.builderConfig.projectPath
|
|
3340
3343
|
});
|
|
3341
|
-
return super.
|
|
3344
|
+
return super.generate(messages, enhancedOptions);
|
|
3342
3345
|
}
|
|
3343
3346
|
};
|
|
3344
3347
|
var cloneTemplateStep = createStep({
|
|
@@ -3388,13 +3391,13 @@ var analyzePackageStep = createStep({
|
|
|
3388
3391
|
inputSchema: CloneTemplateResultSchema,
|
|
3389
3392
|
outputSchema: PackageAnalysisSchema,
|
|
3390
3393
|
execute: async ({ inputData }) => {
|
|
3391
|
-
console.
|
|
3394
|
+
console.info("Analyzing template package.json...");
|
|
3392
3395
|
const { templateDir } = inputData;
|
|
3393
3396
|
const packageJsonPath = join(templateDir, "package.json");
|
|
3394
3397
|
try {
|
|
3395
3398
|
const packageJsonContent = await readFile(packageJsonPath, "utf-8");
|
|
3396
3399
|
const packageJson = JSON.parse(packageJsonContent);
|
|
3397
|
-
console.
|
|
3400
|
+
console.info("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
3398
3401
|
return {
|
|
3399
3402
|
dependencies: packageJson.dependencies || {},
|
|
3400
3403
|
devDependencies: packageJson.devDependencies || {},
|
|
@@ -3426,14 +3429,15 @@ var discoverUnitsStep = createStep({
|
|
|
3426
3429
|
description: "Discover template units by analyzing the templates directory structure",
|
|
3427
3430
|
inputSchema: CloneTemplateResultSchema,
|
|
3428
3431
|
outputSchema: DiscoveryResultSchema,
|
|
3429
|
-
execute: async ({ inputData,
|
|
3432
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3430
3433
|
const { templateDir } = inputData;
|
|
3431
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3434
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3432
3435
|
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
3433
|
-
console.
|
|
3434
|
-
const model = await resolveModel({
|
|
3436
|
+
console.info("targetPath", targetPath);
|
|
3437
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
3435
3438
|
try {
|
|
3436
3439
|
const agent = new Agent({
|
|
3440
|
+
id: "mastra-project-discoverer",
|
|
3437
3441
|
model,
|
|
3438
3442
|
instructions: `You are an expert at analyzing Mastra projects.
|
|
3439
3443
|
|
|
@@ -3491,10 +3495,12 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3491
3495
|
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
3492
3496
|
other: z.array(z.object({ name: z.string(), file: z.string() })).optional()
|
|
3493
3497
|
});
|
|
3494
|
-
const result = isV2 ? await agent
|
|
3495
|
-
|
|
3498
|
+
const result = isV2 ? await tryGenerateWithJsonFallback(agent, prompt, {
|
|
3499
|
+
structuredOutput: {
|
|
3500
|
+
schema: output
|
|
3501
|
+
},
|
|
3496
3502
|
maxSteps: 100
|
|
3497
|
-
}) : await agent.
|
|
3503
|
+
}) : await agent.generateLegacy(prompt, {
|
|
3498
3504
|
experimental_output: output,
|
|
3499
3505
|
maxSteps: 100
|
|
3500
3506
|
});
|
|
@@ -3518,7 +3524,7 @@ Return the actual exported names of the units, as well as the file names.`,
|
|
|
3518
3524
|
template.other?.forEach((otherId) => {
|
|
3519
3525
|
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
3520
3526
|
});
|
|
3521
|
-
console.
|
|
3527
|
+
console.info("Discovered units:", JSON.stringify(units, null, 2));
|
|
3522
3528
|
if (units.length === 0) {
|
|
3523
3529
|
throw new Error(`No Mastra units (agents, workflows, tools) found in template.
|
|
3524
3530
|
Possible causes:
|
|
@@ -3568,8 +3574,8 @@ var prepareBranchStep = createStep({
|
|
|
3568
3574
|
description: "Create or switch to integration branch before modifications",
|
|
3569
3575
|
inputSchema: PrepareBranchInputSchema,
|
|
3570
3576
|
outputSchema: PrepareBranchResultSchema,
|
|
3571
|
-
execute: async ({ inputData,
|
|
3572
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3577
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3578
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3573
3579
|
try {
|
|
3574
3580
|
const branchName = `feat/install-template-${inputData.slug}`;
|
|
3575
3581
|
await gitCheckoutBranch(branchName, targetPath);
|
|
@@ -3593,10 +3599,10 @@ var packageMergeStep = createStep({
|
|
|
3593
3599
|
description: "Merge template package.json dependencies into target project",
|
|
3594
3600
|
inputSchema: PackageMergeInputSchema,
|
|
3595
3601
|
outputSchema: PackageMergeResultSchema,
|
|
3596
|
-
execute: async ({ inputData,
|
|
3597
|
-
console.
|
|
3602
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3603
|
+
console.info("Package merge step starting...");
|
|
3598
3604
|
const { slug, packageInfo } = inputData;
|
|
3599
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3605
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3600
3606
|
try {
|
|
3601
3607
|
const targetPkgPath = join(targetPath, "package.json");
|
|
3602
3608
|
let targetPkgRaw = "{}";
|
|
@@ -3670,9 +3676,9 @@ var installStep = createStep({
|
|
|
3670
3676
|
description: "Install packages based on merged package.json",
|
|
3671
3677
|
inputSchema: InstallInputSchema,
|
|
3672
3678
|
outputSchema: InstallResultSchema,
|
|
3673
|
-
execute: async ({ inputData,
|
|
3674
|
-
console.
|
|
3675
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3679
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3680
|
+
console.info("Running install step...");
|
|
3681
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3676
3682
|
try {
|
|
3677
3683
|
await spawnSWPM(targetPath, "install", []);
|
|
3678
3684
|
const lock = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock"].map((f) => join(targetPath, f)).find((f) => existsSync(f));
|
|
@@ -3698,10 +3704,10 @@ var programmaticFileCopyStep = createStep({
|
|
|
3698
3704
|
description: "Programmatically copy template files to target project based on ordered units",
|
|
3699
3705
|
inputSchema: FileCopyInputSchema,
|
|
3700
3706
|
outputSchema: FileCopyResultSchema,
|
|
3701
|
-
execute: async ({ inputData,
|
|
3702
|
-
console.
|
|
3707
|
+
execute: async ({ inputData, requestContext }) => {
|
|
3708
|
+
console.info("Programmatic file copy step starting...");
|
|
3703
3709
|
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
3704
|
-
const targetPath = resolveTargetPath(inputData,
|
|
3710
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
3705
3711
|
try {
|
|
3706
3712
|
const copiedFiles = [];
|
|
3707
3713
|
const conflicts = [];
|
|
@@ -3742,7 +3748,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3742
3748
|
}
|
|
3743
3749
|
};
|
|
3744
3750
|
for (const unit of orderedUnits) {
|
|
3745
|
-
console.
|
|
3751
|
+
console.info(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
3746
3752
|
let sourceFile;
|
|
3747
3753
|
let resolvedUnitFile;
|
|
3748
3754
|
if (unit.file.includes("/")) {
|
|
@@ -3773,7 +3779,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3773
3779
|
}
|
|
3774
3780
|
const targetDir = dirname(resolvedUnitFile);
|
|
3775
3781
|
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
3776
|
-
console.
|
|
3782
|
+
console.info(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
3777
3783
|
const hasExtension = extname(unit.id) !== "";
|
|
3778
3784
|
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
3779
3785
|
const fileExtension = extname(unit.file);
|
|
@@ -3781,7 +3787,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3781
3787
|
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
3782
3788
|
if (existsSync(targetFile)) {
|
|
3783
3789
|
const strategy = determineConflictStrategy(unit, targetFile);
|
|
3784
|
-
console.
|
|
3790
|
+
console.info(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
3785
3791
|
switch (strategy) {
|
|
3786
3792
|
case "skip":
|
|
3787
3793
|
conflicts.push({
|
|
@@ -3790,7 +3796,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3790
3796
|
sourceFile: unit.file,
|
|
3791
3797
|
targetFile: `${targetDir}/${convertedFileName}`
|
|
3792
3798
|
});
|
|
3793
|
-
console.
|
|
3799
|
+
console.info(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
3794
3800
|
continue;
|
|
3795
3801
|
case "backup-and-replace":
|
|
3796
3802
|
try {
|
|
@@ -3800,7 +3806,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3800
3806
|
destination: targetFile,
|
|
3801
3807
|
unit: { kind: unit.kind, id: unit.id }
|
|
3802
3808
|
});
|
|
3803
|
-
console.
|
|
3809
|
+
console.info(
|
|
3804
3810
|
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
3805
3811
|
);
|
|
3806
3812
|
continue;
|
|
@@ -3821,7 +3827,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3821
3827
|
destination: uniqueTargetFile,
|
|
3822
3828
|
unit: { kind: unit.kind, id: unit.id }
|
|
3823
3829
|
});
|
|
3824
|
-
console.
|
|
3830
|
+
console.info(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${basename(uniqueTargetFile)}`);
|
|
3825
3831
|
continue;
|
|
3826
3832
|
} catch (renameError) {
|
|
3827
3833
|
conflicts.push({
|
|
@@ -3850,7 +3856,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3850
3856
|
destination: targetFile,
|
|
3851
3857
|
unit: { kind: unit.kind, id: unit.id }
|
|
3852
3858
|
});
|
|
3853
|
-
console.
|
|
3859
|
+
console.info(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
3854
3860
|
} catch (copyError) {
|
|
3855
3861
|
conflicts.push({
|
|
3856
3862
|
unit: { kind: unit.kind, id: unit.id },
|
|
@@ -3871,7 +3877,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3871
3877
|
destination: targetTsconfig,
|
|
3872
3878
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3873
3879
|
});
|
|
3874
|
-
console.
|
|
3880
|
+
console.info("\u2713 Copied tsconfig.json from template to target");
|
|
3875
3881
|
} else {
|
|
3876
3882
|
const minimalTsconfig = {
|
|
3877
3883
|
compilerOptions: {
|
|
@@ -3893,7 +3899,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3893
3899
|
destination: targetTsconfig,
|
|
3894
3900
|
unit: { kind: "other", id: "tsconfig.json" }
|
|
3895
3901
|
});
|
|
3896
|
-
console.
|
|
3902
|
+
console.info("\u2713 Generated minimal tsconfig.json in target");
|
|
3897
3903
|
}
|
|
3898
3904
|
}
|
|
3899
3905
|
} catch (e) {
|
|
@@ -3918,7 +3924,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3918
3924
|
destination: targetMastraIndex,
|
|
3919
3925
|
unit: { kind: "other", id: "mastra-index" }
|
|
3920
3926
|
});
|
|
3921
|
-
console.
|
|
3927
|
+
console.info("\u2713 Copied Mastra index file from template");
|
|
3922
3928
|
}
|
|
3923
3929
|
}
|
|
3924
3930
|
} catch (e) {
|
|
@@ -3942,7 +3948,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3942
3948
|
destination: targetGitignore,
|
|
3943
3949
|
unit: { kind: "other", id: "gitignore" }
|
|
3944
3950
|
});
|
|
3945
|
-
console.
|
|
3951
|
+
console.info("\u2713 Copied .gitignore from template to target");
|
|
3946
3952
|
} else {
|
|
3947
3953
|
const targetContent = await readFile(targetGitignore, "utf-8");
|
|
3948
3954
|
const templateContent = await readFile(templateGitignore, "utf-8");
|
|
@@ -3955,9 +3961,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3955
3961
|
destination: targetGitignore,
|
|
3956
3962
|
unit: { kind: "other", id: "gitignore-merge" }
|
|
3957
3963
|
});
|
|
3958
|
-
console.
|
|
3964
|
+
console.info(`\u2713 Merged template .gitignore entries into existing .gitignore (${addedLines} new entries)`);
|
|
3959
3965
|
} else {
|
|
3960
|
-
console.
|
|
3966
|
+
console.info("\u2139 No new .gitignore entries to add from template");
|
|
3961
3967
|
}
|
|
3962
3968
|
}
|
|
3963
3969
|
}
|
|
@@ -3985,7 +3991,7 @@ var programmaticFileCopyStep = createStep({
|
|
|
3985
3991
|
destination: targetEnv,
|
|
3986
3992
|
unit: { kind: "other", id: "env" }
|
|
3987
3993
|
});
|
|
3988
|
-
console.
|
|
3994
|
+
console.info(`\u2713 Created .env file with ${Object.keys(variables).length} template variables`);
|
|
3989
3995
|
} else {
|
|
3990
3996
|
const targetContent = await readFile(targetEnv, "utf-8");
|
|
3991
3997
|
const mergedContent = mergeEnvFiles(targetContent, variables, slug);
|
|
@@ -3997,9 +4003,9 @@ var programmaticFileCopyStep = createStep({
|
|
|
3997
4003
|
destination: targetEnv,
|
|
3998
4004
|
unit: { kind: "other", id: "env-merge" }
|
|
3999
4005
|
});
|
|
4000
|
-
console.
|
|
4006
|
+
console.info(`\u2713 Merged new environment variables into existing .env file (${addedLines} new entries)`);
|
|
4001
4007
|
} else {
|
|
4002
|
-
console.
|
|
4008
|
+
console.info("\u2139 No new environment variables to add (all already exist in .env)");
|
|
4003
4009
|
}
|
|
4004
4010
|
}
|
|
4005
4011
|
}
|
|
@@ -4020,13 +4026,13 @@ var programmaticFileCopyStep = createStep({
|
|
|
4020
4026
|
fileList,
|
|
4021
4027
|
{ skipIfNoStaged: true }
|
|
4022
4028
|
);
|
|
4023
|
-
console.
|
|
4029
|
+
console.info(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
4024
4030
|
} catch (commitError) {
|
|
4025
4031
|
console.warn("Failed to commit copied files:", commitError);
|
|
4026
4032
|
}
|
|
4027
4033
|
}
|
|
4028
4034
|
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
4029
|
-
console.
|
|
4035
|
+
console.info(message);
|
|
4030
4036
|
return {
|
|
4031
4037
|
success: true,
|
|
4032
4038
|
copiedFiles,
|
|
@@ -4050,12 +4056,12 @@ var intelligentMergeStep = createStep({
|
|
|
4050
4056
|
description: "Use AgentBuilder to intelligently merge template files",
|
|
4051
4057
|
inputSchema: IntelligentMergeInputSchema,
|
|
4052
4058
|
outputSchema: IntelligentMergeResultSchema,
|
|
4053
|
-
execute: async ({ inputData,
|
|
4054
|
-
console.
|
|
4059
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4060
|
+
console.info("Intelligent merge step starting...");
|
|
4055
4061
|
const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
|
|
4056
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4062
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4057
4063
|
try {
|
|
4058
|
-
const model = await resolveModel({
|
|
4064
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4059
4065
|
const copyFileTool = createTool({
|
|
4060
4066
|
id: "copy-file",
|
|
4061
4067
|
description: "Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).",
|
|
@@ -4068,9 +4074,9 @@ var intelligentMergeStep = createStep({
|
|
|
4068
4074
|
message: z.string(),
|
|
4069
4075
|
error: z.string().optional()
|
|
4070
4076
|
}),
|
|
4071
|
-
execute: async (
|
|
4077
|
+
execute: async (input) => {
|
|
4072
4078
|
try {
|
|
4073
|
-
const { sourcePath, destinationPath } =
|
|
4079
|
+
const { sourcePath, destinationPath } = input;
|
|
4074
4080
|
const resolvedSourcePath = resolve(templateDir, sourcePath);
|
|
4075
4081
|
const resolvedDestinationPath = resolve(targetPath, destinationPath);
|
|
4076
4082
|
if (existsSync(resolvedSourcePath) && !existsSync(dirname(resolvedDestinationPath))) {
|
|
@@ -4178,8 +4184,8 @@ Template information:
|
|
|
4178
4184
|
const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
|
|
4179
4185
|
const targetMastraIndex = resolve(targetPath, "src/mastra/index.ts");
|
|
4180
4186
|
const mastraIndexExists = existsSync(targetMastraIndex);
|
|
4181
|
-
console.
|
|
4182
|
-
console.
|
|
4187
|
+
console.info(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
|
|
4188
|
+
console.info(
|
|
4183
4189
|
"Registrable components:",
|
|
4184
4190
|
registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
|
|
4185
4191
|
);
|
|
@@ -4193,7 +4199,7 @@ Template information:
|
|
|
4193
4199
|
notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
4194
4200
|
});
|
|
4195
4201
|
}
|
|
4196
|
-
console.
|
|
4202
|
+
console.info(`Creating task list with ${tasks.length} tasks...`);
|
|
4197
4203
|
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
4198
4204
|
await logGitState(targetPath, "before intelligent merge");
|
|
4199
4205
|
const prompt = `
|
|
@@ -4240,17 +4246,17 @@ For each task:
|
|
|
4240
4246
|
Start by listing your tasks and work through them systematically!
|
|
4241
4247
|
`;
|
|
4242
4248
|
const isV2 = model.specificationVersion === "v2";
|
|
4243
|
-
const result = isV2 ? await agentBuilder.
|
|
4249
|
+
const result = isV2 ? await agentBuilder.stream(prompt) : await agentBuilder.streamLegacy(prompt);
|
|
4244
4250
|
const actualResolutions = [];
|
|
4245
4251
|
for await (const chunk of result.fullStream) {
|
|
4246
4252
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4247
4253
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4248
|
-
console.
|
|
4254
|
+
console.info({
|
|
4249
4255
|
type: chunk.type,
|
|
4250
4256
|
msgId: chunkData.messageId
|
|
4251
4257
|
});
|
|
4252
4258
|
} else {
|
|
4253
|
-
console.
|
|
4259
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4254
4260
|
if (chunk.type === "tool-result") {
|
|
4255
4261
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4256
4262
|
if (chunkData.toolName === "manageTaskList") {
|
|
@@ -4264,7 +4270,7 @@ Start by listing your tasks and work through them systematically!
|
|
|
4264
4270
|
content: toolResult.content || "",
|
|
4265
4271
|
notes: toolResult.notes
|
|
4266
4272
|
});
|
|
4267
|
-
console.
|
|
4273
|
+
console.info(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
4268
4274
|
}
|
|
4269
4275
|
} catch (parseError) {
|
|
4270
4276
|
console.warn("Failed to parse task management result:", parseError);
|
|
@@ -4318,13 +4324,13 @@ var validationAndFixStep = createStep({
|
|
|
4318
4324
|
description: "Validate the merged template code and fix any issues using a specialized agent",
|
|
4319
4325
|
inputSchema: ValidationFixInputSchema,
|
|
4320
4326
|
outputSchema: ValidationFixResultSchema,
|
|
4321
|
-
execute: async ({ inputData,
|
|
4322
|
-
console.
|
|
4327
|
+
execute: async ({ inputData, requestContext }) => {
|
|
4328
|
+
console.info("Validation and fix step starting...");
|
|
4323
4329
|
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
4324
|
-
const targetPath = resolveTargetPath(inputData,
|
|
4330
|
+
const targetPath = resolveTargetPath(inputData, requestContext);
|
|
4325
4331
|
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
4326
4332
|
if (!hasChanges) {
|
|
4327
|
-
console.
|
|
4333
|
+
console.info("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
4328
4334
|
return {
|
|
4329
4335
|
success: true,
|
|
4330
4336
|
applied: false,
|
|
@@ -4336,15 +4342,16 @@ var validationAndFixStep = createStep({
|
|
|
4336
4342
|
}
|
|
4337
4343
|
};
|
|
4338
4344
|
}
|
|
4339
|
-
console.
|
|
4345
|
+
console.info(
|
|
4340
4346
|
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
4341
4347
|
);
|
|
4342
4348
|
let currentIteration = 1;
|
|
4343
4349
|
try {
|
|
4344
|
-
const model = await resolveModel({
|
|
4345
|
-
const allTools = await AgentBuilderDefaults.
|
|
4350
|
+
const model = await resolveModel({ requestContext, projectPath: targetPath, defaultModel: openai("gpt-4.1") });
|
|
4351
|
+
const allTools = await AgentBuilderDefaults.listToolsForMode(targetPath, "template");
|
|
4346
4352
|
const validationAgent = new Agent({
|
|
4347
|
-
|
|
4353
|
+
id: "code-validator-fixer",
|
|
4354
|
+
name: "Code Validator Fixer",
|
|
4348
4355
|
description: "Specialized agent for validating and fixing template integration issues",
|
|
4349
4356
|
instructions: `You are a code validation and fixing specialist. Your job is to:
|
|
4350
4357
|
|
|
@@ -4462,7 +4469,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4462
4469
|
executeCommand: allTools.executeCommand
|
|
4463
4470
|
}
|
|
4464
4471
|
});
|
|
4465
|
-
console.
|
|
4472
|
+
console.info("Starting validation and fix agent with internal loop...");
|
|
4466
4473
|
let validationResults = {
|
|
4467
4474
|
valid: false,
|
|
4468
4475
|
errorsFixed: 0,
|
|
@@ -4473,7 +4480,7 @@ Be thorough and methodical. Always use listDirectory to verify actual file exist
|
|
|
4473
4480
|
// Store the actual error details
|
|
4474
4481
|
};
|
|
4475
4482
|
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
4476
|
-
console.
|
|
4483
|
+
console.info(`
|
|
4477
4484
|
=== Validation Iteration ${currentIteration} ===`);
|
|
4478
4485
|
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
4479
4486
|
|
|
@@ -4482,9 +4489,11 @@ Start by running validateCode with all validation types to get a complete pictur
|
|
|
4482
4489
|
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
4483
4490
|
const isV2 = model.specificationVersion === "v2";
|
|
4484
4491
|
const output = z.object({ success: z.boolean() });
|
|
4485
|
-
const result = isV2 ? await validationAgent
|
|
4486
|
-
|
|
4487
|
-
|
|
4492
|
+
const result = isV2 ? await tryStreamWithJsonFallback(validationAgent, iterationPrompt, {
|
|
4493
|
+
structuredOutput: {
|
|
4494
|
+
schema: output
|
|
4495
|
+
}
|
|
4496
|
+
}) : await validationAgent.streamLegacy(iterationPrompt, {
|
|
4488
4497
|
experimental_output: output
|
|
4489
4498
|
});
|
|
4490
4499
|
let iterationErrors = 0;
|
|
@@ -4493,13 +4502,13 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4493
4502
|
for await (const chunk of result.fullStream) {
|
|
4494
4503
|
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
4495
4504
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
4496
|
-
console.
|
|
4505
|
+
console.info({
|
|
4497
4506
|
type: chunk.type,
|
|
4498
4507
|
msgId: chunkData.messageId,
|
|
4499
4508
|
iteration: currentIteration
|
|
4500
4509
|
});
|
|
4501
4510
|
} else {
|
|
4502
|
-
console.
|
|
4511
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
4503
4512
|
}
|
|
4504
4513
|
if (chunk.type === "tool-result") {
|
|
4505
4514
|
const chunkData = "payload" in chunk ? chunk.payload : chunk;
|
|
@@ -4508,7 +4517,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4508
4517
|
lastValidationResult = toolResult;
|
|
4509
4518
|
if (toolResult?.summary) {
|
|
4510
4519
|
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
4511
|
-
console.
|
|
4520
|
+
console.info(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
4512
4521
|
}
|
|
4513
4522
|
}
|
|
4514
4523
|
}
|
|
@@ -4520,12 +4529,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4520
4529
|
if (iterationErrors > 0 && lastValidationResult?.errors) {
|
|
4521
4530
|
validationResults.lastValidationErrors = lastValidationResult.errors;
|
|
4522
4531
|
}
|
|
4523
|
-
console.
|
|
4532
|
+
console.info(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
4524
4533
|
if (iterationErrors === 0) {
|
|
4525
|
-
console.
|
|
4534
|
+
console.info(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
4526
4535
|
break;
|
|
4527
4536
|
} else if (currentIteration >= maxIterations) {
|
|
4528
|
-
console.
|
|
4537
|
+
console.info(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
4529
4538
|
break;
|
|
4530
4539
|
}
|
|
4531
4540
|
currentIteration++;
|
|
@@ -4570,7 +4579,7 @@ Previous iterations may have fixed some issues, so start by re-running validateC
|
|
|
4570
4579
|
} finally {
|
|
4571
4580
|
try {
|
|
4572
4581
|
await rm(templateDir, { recursive: true, force: true });
|
|
4573
|
-
console.
|
|
4582
|
+
console.info(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
4574
4583
|
} catch (cleanupError) {
|
|
4575
4584
|
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
4576
4585
|
}
|
|
@@ -4745,7 +4754,7 @@ var agentBuilderTemplateWorkflow = createWorkflow({
|
|
|
4745
4754
|
}).commit();
|
|
4746
4755
|
async function mergeTemplateBySlug(slug, targetPath) {
|
|
4747
4756
|
const template = await getMastraTemplate(slug);
|
|
4748
|
-
const run = await agentBuilderTemplateWorkflow.
|
|
4757
|
+
const run = await agentBuilderTemplateWorkflow.createRun();
|
|
4749
4758
|
return await run.start({
|
|
4750
4759
|
inputData: {
|
|
4751
4760
|
repo: template.githubUrl,
|
|
@@ -5052,7 +5061,7 @@ var planningIterationStep = createStep({
|
|
|
5052
5061
|
outputSchema: PlanningIterationResultSchema,
|
|
5053
5062
|
suspendSchema: PlanningIterationSuspendSchema,
|
|
5054
5063
|
resumeSchema: PlanningIterationResumeSchema,
|
|
5055
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5064
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5056
5065
|
const {
|
|
5057
5066
|
action,
|
|
5058
5067
|
workflowName,
|
|
@@ -5063,12 +5072,10 @@ var planningIterationStep = createStep({
|
|
|
5063
5072
|
research,
|
|
5064
5073
|
userAnswers
|
|
5065
5074
|
} = inputData;
|
|
5066
|
-
console.
|
|
5075
|
+
console.info("Starting planning iteration...");
|
|
5067
5076
|
const qaKey = "workflow-builder-qa";
|
|
5068
|
-
let storedQAPairs =
|
|
5077
|
+
let storedQAPairs = requestContext.get(qaKey) || [];
|
|
5069
5078
|
const newAnswers = { ...userAnswers || {}, ...resumeData?.answers || {} };
|
|
5070
|
-
console.log("before", storedQAPairs);
|
|
5071
|
-
console.log("newAnswers", newAnswers);
|
|
5072
5079
|
if (Object.keys(newAnswers).length > 0) {
|
|
5073
5080
|
storedQAPairs = storedQAPairs.map((pair) => {
|
|
5074
5081
|
if (newAnswers[pair.question.id]) {
|
|
@@ -5080,15 +5087,12 @@ var planningIterationStep = createStep({
|
|
|
5080
5087
|
}
|
|
5081
5088
|
return pair;
|
|
5082
5089
|
});
|
|
5083
|
-
|
|
5090
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5084
5091
|
}
|
|
5085
|
-
console.log("after", storedQAPairs);
|
|
5086
|
-
console.log(
|
|
5087
|
-
`Current Q&A state: ${storedQAPairs.length} question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5088
|
-
);
|
|
5089
5092
|
try {
|
|
5090
|
-
const model = await resolveModel({
|
|
5093
|
+
const model = await resolveModel({ requestContext });
|
|
5091
5094
|
const planningAgent = new Agent({
|
|
5095
|
+
id: "workflow-planning-agent",
|
|
5092
5096
|
model,
|
|
5093
5097
|
instructions: taskPlanningPrompts.planningAgent.instructions({
|
|
5094
5098
|
storedQAPairs
|
|
@@ -5117,8 +5121,10 @@ var planningIterationStep = createStep({
|
|
|
5117
5121
|
projectStructure,
|
|
5118
5122
|
research
|
|
5119
5123
|
});
|
|
5120
|
-
const result = await planningAgent.
|
|
5121
|
-
|
|
5124
|
+
const result = await planningAgent.generate(planningPrompt, {
|
|
5125
|
+
structuredOutput: {
|
|
5126
|
+
schema: PlanningAgentOutputSchema
|
|
5127
|
+
}
|
|
5122
5128
|
// maxSteps: 15,
|
|
5123
5129
|
});
|
|
5124
5130
|
const planResult = await result.object;
|
|
@@ -5133,8 +5139,8 @@ var planningIterationStep = createStep({
|
|
|
5133
5139
|
};
|
|
5134
5140
|
}
|
|
5135
5141
|
if (planResult.questions && planResult.questions.length > 0 && !planResult.planComplete) {
|
|
5136
|
-
console.
|
|
5137
|
-
console.
|
|
5142
|
+
console.info(`Planning needs user clarification: ${planResult.questions.length} questions`);
|
|
5143
|
+
console.info(planResult.questions);
|
|
5138
5144
|
const newQAPairs = planResult.questions.map((question) => ({
|
|
5139
5145
|
question,
|
|
5140
5146
|
answer: null,
|
|
@@ -5142,8 +5148,8 @@ var planningIterationStep = createStep({
|
|
|
5142
5148
|
answeredAt: null
|
|
5143
5149
|
}));
|
|
5144
5150
|
storedQAPairs = [...storedQAPairs, ...newQAPairs];
|
|
5145
|
-
|
|
5146
|
-
console.
|
|
5151
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5152
|
+
console.info(
|
|
5147
5153
|
`Updated Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5148
5154
|
);
|
|
5149
5155
|
return suspend({
|
|
@@ -5155,9 +5161,9 @@ var planningIterationStep = createStep({
|
|
|
5155
5161
|
}
|
|
5156
5162
|
});
|
|
5157
5163
|
}
|
|
5158
|
-
console.
|
|
5159
|
-
|
|
5160
|
-
console.
|
|
5164
|
+
console.info(`Planning complete with ${planResult.tasks.length} tasks`);
|
|
5165
|
+
requestContext.set(qaKey, storedQAPairs);
|
|
5166
|
+
console.info(
|
|
5161
5167
|
`Final Q&A state: ${storedQAPairs.length} total question-answer pairs, ${storedQAPairs.filter((p) => p.answer).length} answered`
|
|
5162
5168
|
);
|
|
5163
5169
|
return {
|
|
@@ -5200,7 +5206,7 @@ var taskApprovalStep = createStep({
|
|
|
5200
5206
|
execute: async ({ inputData, resumeData, suspend }) => {
|
|
5201
5207
|
const { tasks } = inputData;
|
|
5202
5208
|
if (!resumeData?.approved && resumeData?.approved !== false) {
|
|
5203
|
-
console.
|
|
5209
|
+
console.info(`Requesting user approval for ${tasks.length} tasks`);
|
|
5204
5210
|
const summary = `Task List for Approval:
|
|
5205
5211
|
|
|
5206
5212
|
${tasks.length} tasks planned:
|
|
@@ -5213,14 +5219,14 @@ ${tasks.map((task, i) => `${i + 1}. [${task.priority.toUpperCase()}] ${task.cont
|
|
|
5213
5219
|
});
|
|
5214
5220
|
}
|
|
5215
5221
|
if (resumeData.approved) {
|
|
5216
|
-
console.
|
|
5222
|
+
console.info("Task list approved by user");
|
|
5217
5223
|
return {
|
|
5218
5224
|
approved: true,
|
|
5219
5225
|
tasks,
|
|
5220
5226
|
message: "Task list approved, ready for execution"
|
|
5221
5227
|
};
|
|
5222
5228
|
} else {
|
|
5223
|
-
console.
|
|
5229
|
+
console.info("Task list rejected by user");
|
|
5224
5230
|
return {
|
|
5225
5231
|
approved: false,
|
|
5226
5232
|
tasks,
|
|
@@ -5237,7 +5243,7 @@ var planningAndApprovalWorkflow = createWorkflow({
|
|
|
5237
5243
|
outputSchema: TaskApprovalOutputSchema,
|
|
5238
5244
|
steps: [planningIterationStep, taskApprovalStep]
|
|
5239
5245
|
}).dountil(planningIterationStep, async ({ inputData }) => {
|
|
5240
|
-
console.
|
|
5246
|
+
console.info(`Sub-workflow planning check: planComplete=${inputData.planComplete}`);
|
|
5241
5247
|
return inputData.planComplete === true;
|
|
5242
5248
|
}).map(async ({ inputData }) => {
|
|
5243
5249
|
return {
|
|
@@ -5301,7 +5307,7 @@ const myStep = createStep({
|
|
|
5301
5307
|
- \`mastra\`: Access to Mastra instance (agents, tools, other workflows)
|
|
5302
5308
|
- \`getStepResult(stepInstance)\`: Get results from previous steps
|
|
5303
5309
|
- \`getInitData()\`: Access original workflow input data
|
|
5304
|
-
- \`
|
|
5310
|
+
- \`requestContext\`: Runtime dependency injection context
|
|
5305
5311
|
- \`runCount\`: Number of times this step has run (useful for retries)
|
|
5306
5312
|
|
|
5307
5313
|
### **\u{1F504} CONTROL FLOW METHODS**
|
|
@@ -5380,10 +5386,10 @@ const toolStep = createStep(myTool);
|
|
|
5380
5386
|
|
|
5381
5387
|
// Method 2: Call tool in execute function
|
|
5382
5388
|
const step = createStep({
|
|
5383
|
-
execute: async ({ inputData,
|
|
5389
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5384
5390
|
const result = await myTool.execute({
|
|
5385
5391
|
context: inputData,
|
|
5386
|
-
|
|
5392
|
+
requestContext
|
|
5387
5393
|
});
|
|
5388
5394
|
return result;
|
|
5389
5395
|
}
|
|
@@ -5427,7 +5433,7 @@ export const mastra = new Mastra({
|
|
|
5427
5433
|
sendEmailWorkflow, // Use camelCase for keys
|
|
5428
5434
|
dataProcessingWorkflow
|
|
5429
5435
|
},
|
|
5430
|
-
storage: new LibSQLStore({ url: 'file:./mastra.db' }), // Required for suspend/resume
|
|
5436
|
+
storage: new LibSQLStore({ id: 'mastra-storage', url: 'file:./mastra.db' }), // Required for suspend/resume
|
|
5431
5437
|
});
|
|
5432
5438
|
\`\`\`
|
|
5433
5439
|
|
|
@@ -5475,7 +5481,7 @@ export const mastra = new Mastra({
|
|
|
5475
5481
|
**Running Workflows:**
|
|
5476
5482
|
\`\`\`typescript
|
|
5477
5483
|
// Create and start run
|
|
5478
|
-
const run = await workflow.
|
|
5484
|
+
const run = await workflow.createRun();
|
|
5479
5485
|
const result = await run.start({ inputData: {...} });
|
|
5480
5486
|
|
|
5481
5487
|
// Stream execution for real-time monitoring
|
|
@@ -5499,7 +5505,7 @@ run.watch((event) => console.log(event));
|
|
|
5499
5505
|
- Use workflows as steps: \`.then(otherWorkflow)\`
|
|
5500
5506
|
- Enable complex workflow composition
|
|
5501
5507
|
|
|
5502
|
-
**
|
|
5508
|
+
**Request Context:**
|
|
5503
5509
|
- Pass shared data across all steps
|
|
5504
5510
|
- Enable dependency injection patterns
|
|
5505
5511
|
|
|
@@ -5672,11 +5678,11 @@ var restrictedTaskManager = createTool({
|
|
|
5672
5678
|
),
|
|
5673
5679
|
message: z.string()
|
|
5674
5680
|
}),
|
|
5675
|
-
execute: async (
|
|
5681
|
+
execute: async (input) => {
|
|
5676
5682
|
const adaptedContext = {
|
|
5677
|
-
...
|
|
5678
|
-
action:
|
|
5679
|
-
tasks:
|
|
5683
|
+
...input,
|
|
5684
|
+
action: input.action,
|
|
5685
|
+
tasks: input.tasks?.map((task) => ({
|
|
5680
5686
|
...task,
|
|
5681
5687
|
priority: task.priority || "medium"
|
|
5682
5688
|
}))
|
|
@@ -5691,13 +5697,13 @@ var workflowDiscoveryStep = createStep({
|
|
|
5691
5697
|
description: "Discover existing workflows in the project",
|
|
5692
5698
|
inputSchema: WorkflowBuilderInputSchema,
|
|
5693
5699
|
outputSchema: WorkflowDiscoveryResultSchema,
|
|
5694
|
-
execute: async ({ inputData,
|
|
5695
|
-
console.
|
|
5700
|
+
execute: async ({ inputData, requestContext: _requestContext }) => {
|
|
5701
|
+
console.info("Starting workflow discovery...");
|
|
5696
5702
|
const { projectPath = process.cwd() } = inputData;
|
|
5697
5703
|
try {
|
|
5698
5704
|
const workflowsPath = join(projectPath, "src/mastra/workflows");
|
|
5699
5705
|
if (!existsSync(workflowsPath)) {
|
|
5700
|
-
console.
|
|
5706
|
+
console.info("No workflows directory found");
|
|
5701
5707
|
return {
|
|
5702
5708
|
success: true,
|
|
5703
5709
|
workflows: [],
|
|
@@ -5726,7 +5732,7 @@ var workflowDiscoveryStep = createStep({
|
|
|
5726
5732
|
}
|
|
5727
5733
|
}
|
|
5728
5734
|
}
|
|
5729
|
-
console.
|
|
5735
|
+
console.info(`Discovered ${workflows.length} existing workflows`);
|
|
5730
5736
|
return {
|
|
5731
5737
|
success: true,
|
|
5732
5738
|
workflows,
|
|
@@ -5750,8 +5756,8 @@ var projectDiscoveryStep = createStep({
|
|
|
5750
5756
|
description: "Analyze the project structure and setup",
|
|
5751
5757
|
inputSchema: WorkflowDiscoveryResultSchema,
|
|
5752
5758
|
outputSchema: ProjectDiscoveryResultSchema,
|
|
5753
|
-
execute: async ({ inputData: _inputData,
|
|
5754
|
-
console.
|
|
5759
|
+
execute: async ({ inputData: _inputData, requestContext: _requestContext }) => {
|
|
5760
|
+
console.info("Starting project discovery...");
|
|
5755
5761
|
try {
|
|
5756
5762
|
const projectPath = process.cwd();
|
|
5757
5763
|
const projectStructure = {
|
|
@@ -5772,7 +5778,7 @@ var projectDiscoveryStep = createStep({
|
|
|
5772
5778
|
console.warn("Failed to read package.json:", error);
|
|
5773
5779
|
}
|
|
5774
5780
|
}
|
|
5775
|
-
console.
|
|
5781
|
+
console.info("Project discovery completed");
|
|
5776
5782
|
return {
|
|
5777
5783
|
success: true,
|
|
5778
5784
|
structure: {
|
|
@@ -5812,11 +5818,12 @@ var workflowResearchStep = createStep({
|
|
|
5812
5818
|
description: "Research Mastra workflows and gather relevant documentation",
|
|
5813
5819
|
inputSchema: ProjectDiscoveryResultSchema,
|
|
5814
5820
|
outputSchema: WorkflowResearchResultSchema,
|
|
5815
|
-
execute: async ({ inputData,
|
|
5816
|
-
console.
|
|
5821
|
+
execute: async ({ inputData, requestContext }) => {
|
|
5822
|
+
console.info("Starting workflow research...");
|
|
5817
5823
|
try {
|
|
5818
|
-
const model = await resolveModel({
|
|
5824
|
+
const model = await resolveModel({ requestContext });
|
|
5819
5825
|
const researchAgent = new Agent({
|
|
5826
|
+
id: "workflow-research-agent",
|
|
5820
5827
|
model,
|
|
5821
5828
|
instructions: workflowBuilderPrompts.researchAgent.instructions,
|
|
5822
5829
|
name: "Workflow Research Agent"
|
|
@@ -5827,8 +5834,10 @@ var workflowResearchStep = createStep({
|
|
|
5827
5834
|
dependencies: inputData.dependencies,
|
|
5828
5835
|
hasWorkflowsDir: inputData.structure.hasWorkflowsDir
|
|
5829
5836
|
});
|
|
5830
|
-
const result = await researchAgent.
|
|
5831
|
-
|
|
5837
|
+
const result = await researchAgent.generate(researchPrompt, {
|
|
5838
|
+
structuredOutput: {
|
|
5839
|
+
schema: WorkflowResearchResultSchema
|
|
5840
|
+
}
|
|
5832
5841
|
// stopWhen: stepCountIs(10),
|
|
5833
5842
|
});
|
|
5834
5843
|
const researchResult = await result.object;
|
|
@@ -5845,7 +5854,7 @@ var workflowResearchStep = createStep({
|
|
|
5845
5854
|
error: "Research agent failed to generate valid response"
|
|
5846
5855
|
};
|
|
5847
5856
|
}
|
|
5848
|
-
console.
|
|
5857
|
+
console.info("Research completed successfully");
|
|
5849
5858
|
return {
|
|
5850
5859
|
success: true,
|
|
5851
5860
|
documentation: {
|
|
@@ -5879,7 +5888,7 @@ var taskExecutionStep = createStep({
|
|
|
5879
5888
|
outputSchema: TaskExecutionResultSchema,
|
|
5880
5889
|
suspendSchema: TaskExecutionSuspendSchema,
|
|
5881
5890
|
resumeSchema: TaskExecutionResumeSchema,
|
|
5882
|
-
execute: async ({ inputData, resumeData, suspend,
|
|
5891
|
+
execute: async ({ inputData, resumeData, suspend, requestContext }) => {
|
|
5883
5892
|
const {
|
|
5884
5893
|
action,
|
|
5885
5894
|
workflowName,
|
|
@@ -5891,12 +5900,12 @@ var taskExecutionStep = createStep({
|
|
|
5891
5900
|
research,
|
|
5892
5901
|
projectPath
|
|
5893
5902
|
} = inputData;
|
|
5894
|
-
console.
|
|
5895
|
-
console.
|
|
5903
|
+
console.info(`Starting task execution for ${action}ing workflow: ${workflowName}`);
|
|
5904
|
+
console.info(`Executing ${tasks.length} tasks using AgentBuilder stream...`);
|
|
5896
5905
|
try {
|
|
5897
|
-
const model = await resolveModel({
|
|
5906
|
+
const model = await resolveModel({ requestContext });
|
|
5898
5907
|
const currentProjectPath = projectPath || process.cwd();
|
|
5899
|
-
console.
|
|
5908
|
+
console.info("Pre-populating taskManager with planned tasks...");
|
|
5900
5909
|
const taskManagerContext = {
|
|
5901
5910
|
action: "create",
|
|
5902
5911
|
tasks: tasks.map((task) => ({
|
|
@@ -5909,7 +5918,7 @@ var taskExecutionStep = createStep({
|
|
|
5909
5918
|
}))
|
|
5910
5919
|
};
|
|
5911
5920
|
const taskManagerResult = await AgentBuilderDefaults.manageTaskList(taskManagerContext);
|
|
5912
|
-
console.
|
|
5921
|
+
console.info(`Task manager initialized with ${taskManagerResult.tasks.length} tasks`);
|
|
5913
5922
|
if (!taskManagerResult.success) {
|
|
5914
5923
|
throw new Error(`Failed to initialize task manager: ${taskManagerResult.message}`);
|
|
5915
5924
|
}
|
|
@@ -5939,18 +5948,11 @@ ${workflowBuilderPrompts.validation.instructions}`
|
|
|
5939
5948
|
tasks,
|
|
5940
5949
|
resumeData
|
|
5941
5950
|
});
|
|
5942
|
-
const originalInstructions = await executionAgent.getInstructions({
|
|
5943
|
-
const additionalInstructions = executionAgent.instructions;
|
|
5944
|
-
let enhancedInstructions = originalInstructions;
|
|
5945
|
-
if (additionalInstructions) {
|
|
5946
|
-
enhancedInstructions = `${originalInstructions}
|
|
5947
|
-
|
|
5948
|
-
${additionalInstructions}`;
|
|
5949
|
-
}
|
|
5951
|
+
const originalInstructions = await executionAgent.getInstructions({ requestContext });
|
|
5950
5952
|
const enhancedOptions = {
|
|
5951
5953
|
stopWhen: stepCountIs(100),
|
|
5952
5954
|
temperature: 0.3,
|
|
5953
|
-
instructions:
|
|
5955
|
+
instructions: originalInstructions
|
|
5954
5956
|
};
|
|
5955
5957
|
let finalResult = null;
|
|
5956
5958
|
let allTasksCompleted = false;
|
|
@@ -5962,13 +5964,13 @@ ${additionalInstructions}`;
|
|
|
5962
5964
|
const currentTaskStatus = await AgentBuilderDefaults.manageTaskList({ action: "list" });
|
|
5963
5965
|
const completedTasks = currentTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
5964
5966
|
const pendingTasks = currentTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
5965
|
-
console.
|
|
5967
|
+
console.info(`
|
|
5966
5968
|
=== EXECUTION ITERATION ${iterationCount} ===`);
|
|
5967
|
-
console.
|
|
5968
|
-
console.
|
|
5969
|
+
console.info(`Completed tasks: ${completedTasks.length}/${expectedTaskIds.length}`);
|
|
5970
|
+
console.info(`Remaining tasks: ${pendingTasks.map((t) => t.id).join(", ")}`);
|
|
5969
5971
|
allTasksCompleted = pendingTasks.length === 0;
|
|
5970
5972
|
if (allTasksCompleted) {
|
|
5971
|
-
console.
|
|
5973
|
+
console.info("All tasks completed! Breaking execution loop.");
|
|
5972
5974
|
break;
|
|
5973
5975
|
}
|
|
5974
5976
|
const iterationPrompt = iterationCount === 1 ? executionPrompt : `${workflowBuilderPrompts.executionAgent.iterationPrompt({
|
|
@@ -5979,7 +5981,7 @@ ${additionalInstructions}`;
|
|
|
5979
5981
|
})}
|
|
5980
5982
|
|
|
5981
5983
|
${workflowBuilderPrompts.validation.instructions}`;
|
|
5982
|
-
const stream = await executionAgent.
|
|
5984
|
+
const stream = await executionAgent.stream(iterationPrompt, {
|
|
5983
5985
|
structuredOutput: {
|
|
5984
5986
|
schema: TaskExecutionIterationInputSchema(tasks.length),
|
|
5985
5987
|
model
|
|
@@ -5992,19 +5994,19 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
5992
5994
|
finalMessage += chunk.payload.text;
|
|
5993
5995
|
}
|
|
5994
5996
|
if (chunk.type === "step-finish") {
|
|
5995
|
-
console.
|
|
5997
|
+
console.info(finalMessage);
|
|
5996
5998
|
finalMessage = "";
|
|
5997
5999
|
}
|
|
5998
6000
|
if (chunk.type === "tool-result") {
|
|
5999
|
-
console.
|
|
6001
|
+
console.info(JSON.stringify(chunk, null, 2));
|
|
6000
6002
|
}
|
|
6001
6003
|
if (chunk.type === "finish") {
|
|
6002
|
-
console.
|
|
6004
|
+
console.info(chunk);
|
|
6003
6005
|
}
|
|
6004
6006
|
}
|
|
6005
6007
|
await stream.consumeStream();
|
|
6006
6008
|
finalResult = await stream.object;
|
|
6007
|
-
console.
|
|
6009
|
+
console.info(`Iteration ${iterationCount} result:`, { finalResult });
|
|
6008
6010
|
if (!finalResult) {
|
|
6009
6011
|
throw new Error(`No result received from agent execution on iteration ${iterationCount}`);
|
|
6010
6012
|
}
|
|
@@ -6012,17 +6014,17 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6012
6014
|
const postCompletedTasks = postIterationTaskStatus.tasks.filter((task) => task.status === "completed");
|
|
6013
6015
|
const postPendingTasks = postIterationTaskStatus.tasks.filter((task) => task.status !== "completed");
|
|
6014
6016
|
allTasksCompleted = postPendingTasks.length === 0;
|
|
6015
|
-
console.
|
|
6017
|
+
console.info(
|
|
6016
6018
|
`After iteration ${iterationCount}: ${postCompletedTasks.length}/${expectedTaskIds.length} tasks completed in taskManager`
|
|
6017
6019
|
);
|
|
6018
6020
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6019
|
-
console.
|
|
6021
|
+
console.info(
|
|
6020
6022
|
`Agent needs clarification on iteration ${iterationCount}: ${finalResult.questions.length} questions`
|
|
6021
6023
|
);
|
|
6022
6024
|
break;
|
|
6023
6025
|
}
|
|
6024
6026
|
if (finalResult.status === "completed" && !allTasksCompleted) {
|
|
6025
|
-
console.
|
|
6027
|
+
console.info(
|
|
6026
6028
|
`Agent claimed completion but taskManager shows pending tasks: ${postPendingTasks.map((t) => t.id).join(", ")}`
|
|
6027
6029
|
);
|
|
6028
6030
|
}
|
|
@@ -6035,8 +6037,8 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6035
6037
|
throw new Error("No result received from agent execution");
|
|
6036
6038
|
}
|
|
6037
6039
|
if (finalResult.status === "needs_clarification" && finalResult.questions && finalResult.questions.length > 0) {
|
|
6038
|
-
console.
|
|
6039
|
-
console.
|
|
6040
|
+
console.info(`Agent needs clarification: ${finalResult.questions.length} questions`);
|
|
6041
|
+
console.info("finalResult", JSON.stringify(finalResult, null, 2));
|
|
6040
6042
|
return suspend({
|
|
6041
6043
|
questions: finalResult.questions,
|
|
6042
6044
|
currentProgress: finalResult.progress,
|
|
@@ -6052,7 +6054,7 @@ ${workflowBuilderPrompts.validation.instructions}`;
|
|
|
6052
6054
|
const finalAllTasksCompleted = finalPendingTasks.length === 0;
|
|
6053
6055
|
const success = finalAllTasksCompleted && !finalResult.error;
|
|
6054
6056
|
const message = success ? `Successfully completed workflow ${action} - all ${tasksExpected} tasks completed after ${iterationCount} iteration(s): ${finalResult.message}` : `Workflow execution finished with issues after ${iterationCount} iteration(s): ${finalResult.message}. Completed: ${tasksCompleted}/${tasksExpected} tasks`;
|
|
6055
|
-
console.
|
|
6057
|
+
console.info(message);
|
|
6056
6058
|
const missingTasks = finalPendingTasks.map((task) => task.id);
|
|
6057
6059
|
const validationErrors = [];
|
|
6058
6060
|
if (finalResult.error) {
|
|
@@ -6120,7 +6122,7 @@ var workflowBuilderWorkflow = createWorkflow({
|
|
|
6120
6122
|
userAnswers: void 0
|
|
6121
6123
|
};
|
|
6122
6124
|
}).dountil(planningAndApprovalWorkflow, async ({ inputData }) => {
|
|
6123
|
-
console.
|
|
6125
|
+
console.info(`Sub-workflow check: approved=${inputData.approved}`);
|
|
6124
6126
|
return inputData.approved === true;
|
|
6125
6127
|
}).map(async ({ getStepResult, getInitData }) => {
|
|
6126
6128
|
const initData = getInitData();
|