@mastra/agent-builder 0.0.0-experimental-agent-builder-20250815195917 → 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -7,7 +7,7 @@ var child_process = require('child_process');
7
7
  var promises = require('fs/promises');
8
8
  var path = require('path');
9
9
  var tools = require('@mastra/core/tools');
10
- var mcp = require('@mastra/mcp');
10
+ var ignore = require('ignore');
11
11
  var zod = require('zod');
12
12
  var fs = require('fs');
13
13
  var module$1 = require('module');
@@ -17,7 +17,11 @@ var os = require('os');
17
17
  var openai = require('@ai-sdk/openai');
18
18
  var workflows = require('@mastra/core/workflows');
19
19
 
20
- // src/index.ts
20
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
21
+
22
+ var ignore__default = /*#__PURE__*/_interopDefault(ignore);
23
+
24
+ // src/agent/index.ts
21
25
  var UNIT_KINDS = ["mcp-server", "tool", "workflow", "agent", "integration", "network", "other"];
22
26
  var TemplateUnitSchema = zod.z.object({
23
27
  kind: zod.z.enum(UNIT_KINDS),
@@ -30,7 +34,7 @@ zod.z.object({
30
34
  description: zod.z.string().optional(),
31
35
  units: zod.z.array(TemplateUnitSchema)
32
36
  });
33
- var MergeInputSchema = zod.z.object({
37
+ var AgentBuilderInputSchema = zod.z.object({
34
38
  repo: zod.z.string().describe("Git URL or local path of the template repo"),
35
39
  ref: zod.z.string().optional().describe("Tag/branch/commit to checkout (defaults to main/master)"),
36
40
  slug: zod.z.string().optional().describe("Slug for branch/scripts; defaults to inferred from repo"),
@@ -42,51 +46,342 @@ zod.z.object({
42
46
  templateDir: zod.z.string(),
43
47
  units: zod.z.array(TemplateUnitSchema)
44
48
  });
49
+ var CopiedFileSchema = zod.z.object({
50
+ source: zod.z.string(),
51
+ destination: zod.z.string(),
52
+ unit: zod.z.object({
53
+ kind: zod.z.enum(UNIT_KINDS),
54
+ id: zod.z.string()
55
+ })
56
+ });
57
+ var ConflictSchema = zod.z.object({
58
+ unit: zod.z.object({
59
+ kind: zod.z.enum(UNIT_KINDS),
60
+ id: zod.z.string()
61
+ }),
62
+ issue: zod.z.string(),
63
+ sourceFile: zod.z.string(),
64
+ targetFile: zod.z.string()
65
+ });
66
+ var FileCopyInputSchema = zod.z.object({
67
+ orderedUnits: zod.z.array(TemplateUnitSchema),
68
+ templateDir: zod.z.string(),
69
+ commitSha: zod.z.string(),
70
+ slug: zod.z.string(),
71
+ targetPath: zod.z.string().optional()
72
+ });
73
+ var FileCopyResultSchema = zod.z.object({
74
+ success: zod.z.boolean(),
75
+ copiedFiles: zod.z.array(CopiedFileSchema),
76
+ conflicts: zod.z.array(ConflictSchema),
77
+ message: zod.z.string(),
78
+ error: zod.z.string().optional()
79
+ });
80
+ var ConflictResolutionSchema = zod.z.object({
81
+ unit: zod.z.object({
82
+ kind: zod.z.enum(UNIT_KINDS),
83
+ id: zod.z.string()
84
+ }),
85
+ issue: zod.z.string(),
86
+ resolution: zod.z.string()
87
+ });
88
+ var IntelligentMergeInputSchema = zod.z.object({
89
+ conflicts: zod.z.array(ConflictSchema),
90
+ copiedFiles: zod.z.array(CopiedFileSchema),
91
+ templateDir: zod.z.string(),
92
+ commitSha: zod.z.string(),
93
+ slug: zod.z.string(),
94
+ targetPath: zod.z.string().optional(),
95
+ branchName: zod.z.string().optional()
96
+ });
97
+ var IntelligentMergeResultSchema = zod.z.object({
98
+ success: zod.z.boolean(),
99
+ applied: zod.z.boolean(),
100
+ message: zod.z.string(),
101
+ conflictsResolved: zod.z.array(ConflictResolutionSchema),
102
+ error: zod.z.string().optional()
103
+ });
104
+ var ValidationResultsSchema = zod.z.object({
105
+ valid: zod.z.boolean(),
106
+ errorsFixed: zod.z.number(),
107
+ remainingErrors: zod.z.number()
108
+ });
109
+ var ValidationFixInputSchema = zod.z.object({
110
+ commitSha: zod.z.string(),
111
+ slug: zod.z.string(),
112
+ targetPath: zod.z.string().optional(),
113
+ templateDir: zod.z.string(),
114
+ orderedUnits: zod.z.array(TemplateUnitSchema),
115
+ copiedFiles: zod.z.array(CopiedFileSchema),
116
+ conflictsResolved: zod.z.array(ConflictResolutionSchema).optional(),
117
+ maxIterations: zod.z.number().optional().default(5)
118
+ });
119
+ var ValidationFixResultSchema = zod.z.object({
120
+ success: zod.z.boolean(),
121
+ applied: zod.z.boolean(),
122
+ message: zod.z.string(),
123
+ validationResults: ValidationResultsSchema,
124
+ error: zod.z.string().optional()
125
+ });
45
126
  var ApplyResultSchema = zod.z.object({
46
127
  success: zod.z.boolean(),
47
128
  applied: zod.z.boolean(),
48
129
  branchName: zod.z.string().optional(),
130
+ message: zod.z.string(),
131
+ validationResults: ValidationResultsSchema.optional(),
132
+ error: zod.z.string().optional(),
133
+ errors: zod.z.array(zod.z.string()).optional(),
134
+ stepResults: zod.z.object({
135
+ cloneSuccess: zod.z.boolean().optional(),
136
+ analyzeSuccess: zod.z.boolean().optional(),
137
+ discoverSuccess: zod.z.boolean().optional(),
138
+ orderSuccess: zod.z.boolean().optional(),
139
+ prepareBranchSuccess: zod.z.boolean().optional(),
140
+ packageMergeSuccess: zod.z.boolean().optional(),
141
+ installSuccess: zod.z.boolean().optional(),
142
+ copySuccess: zod.z.boolean().optional(),
143
+ mergeSuccess: zod.z.boolean().optional(),
144
+ validationSuccess: zod.z.boolean().optional(),
145
+ filesCopied: zod.z.number(),
146
+ conflictsSkipped: zod.z.number(),
147
+ conflictsResolved: zod.z.number()
148
+ }).optional()
149
+ });
150
+ var CloneTemplateResultSchema = zod.z.object({
151
+ templateDir: zod.z.string(),
152
+ commitSha: zod.z.string(),
153
+ slug: zod.z.string(),
154
+ success: zod.z.boolean().optional(),
155
+ error: zod.z.string().optional()
156
+ });
157
+ var PackageAnalysisSchema = zod.z.object({
158
+ name: zod.z.string().optional(),
159
+ version: zod.z.string().optional(),
160
+ description: zod.z.string().optional(),
161
+ dependencies: zod.z.record(zod.z.string()).optional(),
162
+ devDependencies: zod.z.record(zod.z.string()).optional(),
163
+ peerDependencies: zod.z.record(zod.z.string()).optional(),
164
+ scripts: zod.z.record(zod.z.string()).optional(),
165
+ success: zod.z.boolean().optional(),
166
+ error: zod.z.string().optional()
167
+ });
168
+ var DiscoveryResultSchema = zod.z.object({
169
+ units: zod.z.array(TemplateUnitSchema),
170
+ success: zod.z.boolean().optional(),
171
+ error: zod.z.string().optional()
172
+ });
173
+ var OrderedUnitsSchema = zod.z.object({
174
+ orderedUnits: zod.z.array(TemplateUnitSchema),
175
+ success: zod.z.boolean().optional(),
176
+ error: zod.z.string().optional()
177
+ });
178
+ var PackageMergeInputSchema = zod.z.object({
179
+ commitSha: zod.z.string(),
180
+ slug: zod.z.string(),
181
+ targetPath: zod.z.string().optional(),
182
+ packageInfo: PackageAnalysisSchema
183
+ });
184
+ var PackageMergeResultSchema = zod.z.object({
185
+ success: zod.z.boolean(),
186
+ applied: zod.z.boolean(),
187
+ message: zod.z.string(),
188
+ error: zod.z.string().optional()
189
+ });
190
+ var InstallInputSchema = zod.z.object({
191
+ targetPath: zod.z.string().describe("Path to the project to install packages in")
192
+ });
193
+ var InstallResultSchema = zod.z.object({
194
+ success: zod.z.boolean(),
195
+ error: zod.z.string().optional()
196
+ });
197
+ var PrepareBranchInputSchema = zod.z.object({
198
+ slug: zod.z.string(),
199
+ commitSha: zod.z.string().optional(),
200
+ // from clone-template if relevant
201
+ targetPath: zod.z.string().optional()
202
+ });
203
+ var PrepareBranchResultSchema = zod.z.object({
204
+ branchName: zod.z.string(),
205
+ success: zod.z.boolean().optional(),
49
206
  error: zod.z.string().optional()
50
207
  });
51
208
 
52
209
  // src/utils.ts
53
210
  var exec = util.promisify(child_process.exec);
211
+ var execFile = util.promisify(child_process.execFile);
212
+ function isInWorkspaceSubfolder(cwd) {
213
+ try {
214
+ const currentPackageJson = path.resolve(cwd, "package.json");
215
+ if (!fs.existsSync(currentPackageJson)) {
216
+ return false;
217
+ }
218
+ let currentDir = cwd;
219
+ let previousDir = "";
220
+ while (currentDir !== previousDir && currentDir !== "/") {
221
+ previousDir = currentDir;
222
+ currentDir = path.dirname(currentDir);
223
+ if (currentDir === cwd) {
224
+ continue;
225
+ }
226
+ console.log(`Checking for workspace indicators in: ${currentDir}`);
227
+ if (fs.existsSync(path.resolve(currentDir, "pnpm-workspace.yaml"))) {
228
+ return true;
229
+ }
230
+ const parentPackageJson = path.resolve(currentDir, "package.json");
231
+ if (fs.existsSync(parentPackageJson)) {
232
+ try {
233
+ const parentPkg = JSON.parse(fs.readFileSync(parentPackageJson, "utf-8"));
234
+ if (parentPkg.workspaces) {
235
+ return true;
236
+ }
237
+ } catch {
238
+ }
239
+ }
240
+ if (fs.existsSync(path.resolve(currentDir, "lerna.json"))) {
241
+ return true;
242
+ }
243
+ }
244
+ return false;
245
+ } catch (error) {
246
+ console.log(`Error in workspace detection: ${error}`);
247
+ return false;
248
+ }
249
+ }
54
250
  function spawn(command, args, options) {
55
251
  return new Promise((resolve4, reject) => {
56
252
  const childProcess = child_process.spawn(command, args, {
57
- // stdio: 'inherit',
253
+ stdio: "inherit",
254
+ // Enable proper stdio handling
58
255
  ...options
59
256
  });
60
257
  childProcess.on("error", (error) => {
61
258
  reject(error);
62
259
  });
260
+ childProcess.on("close", (code) => {
261
+ if (code === 0) {
262
+ resolve4(void 0);
263
+ } else {
264
+ reject(new Error(`Command failed with exit code ${code}`));
265
+ }
266
+ });
267
+ });
268
+ }
269
+ async function isGitInstalled() {
270
+ try {
271
+ await spawnWithOutput("git", ["--version"], {});
272
+ return true;
273
+ } catch {
274
+ return false;
275
+ }
276
+ }
277
+ async function isInsideGitRepo(cwd) {
278
+ try {
279
+ if (!await isGitInstalled()) return false;
280
+ const { stdout } = await spawnWithOutput("git", ["rev-parse", "--is-inside-work-tree"], { cwd });
281
+ return stdout.trim() === "true";
282
+ } catch {
283
+ return false;
284
+ }
285
+ }
286
+ function spawnWithOutput(command, args, options) {
287
+ return new Promise((resolvePromise, rejectPromise) => {
288
+ const childProcess = child_process.spawn(command, args, {
289
+ ...options
290
+ });
291
+ let stdout = "";
63
292
  let stderr = "";
64
- childProcess.stderr?.on("data", (message) => {
65
- stderr += message;
293
+ childProcess.on("error", (error) => {
294
+ rejectPromise(error);
295
+ });
296
+ childProcess.stdout?.on("data", (chunk) => {
297
+ process.stdout.write(chunk);
298
+ stdout += chunk?.toString?.() ?? String(chunk);
299
+ });
300
+ childProcess.stderr?.on("data", (chunk) => {
301
+ stderr += chunk?.toString?.() ?? String(chunk);
302
+ process.stderr.write(chunk);
66
303
  });
67
304
  childProcess.on("close", (code) => {
68
305
  if (code === 0) {
69
- resolve4(void 0);
306
+ resolvePromise({ stdout, stderr, code: code ?? 0 });
70
307
  } else {
71
- reject(new Error(stderr));
308
+ const err = new Error(stderr || `Command failed: ${command} ${args.join(" ")}`);
309
+ err.code = code;
310
+ rejectPromise(err);
72
311
  }
73
312
  });
74
313
  });
75
314
  }
76
315
  async function spawnSWPM(cwd, command, packageNames) {
77
- await spawn(module$1.createRequire(undefined).resolve("swpm"), [command, ...packageNames], {
78
- cwd
79
- });
316
+ try {
317
+ console.log("Running install command with swpm");
318
+ const swpmPath = module$1.createRequire(undefined).resolve("swpm");
319
+ await spawn(swpmPath, [command, ...packageNames], { cwd });
320
+ return;
321
+ } catch (e) {
322
+ console.log("Failed to run install command with swpm", e);
323
+ }
324
+ try {
325
+ let packageManager;
326
+ if (fs.existsSync(path.resolve(cwd, "pnpm-lock.yaml"))) {
327
+ packageManager = "pnpm";
328
+ } else if (fs.existsSync(path.resolve(cwd, "yarn.lock"))) {
329
+ packageManager = "yarn";
330
+ } else {
331
+ packageManager = "npm";
332
+ }
333
+ let nativeCommand = command === "add" ? "add" : command === "install" ? "install" : command;
334
+ const args = [nativeCommand];
335
+ if (nativeCommand === "install") {
336
+ const inWorkspace = isInWorkspaceSubfolder(cwd);
337
+ if (packageManager === "pnpm") {
338
+ args.push("--force");
339
+ if (inWorkspace) {
340
+ args.push("--ignore-workspace");
341
+ }
342
+ } else if (packageManager === "npm") {
343
+ args.push("--yes");
344
+ if (inWorkspace) {
345
+ args.push("--ignore-workspaces");
346
+ }
347
+ }
348
+ }
349
+ args.push(...packageNames);
350
+ console.log(`Falling back to ${packageManager} ${args.join(" ")}`);
351
+ await spawn(packageManager, args, { cwd });
352
+ return;
353
+ } catch (e) {
354
+ console.log(`Failed to run install command with native package manager: ${e}`);
355
+ }
356
+ throw new Error(`Failed to run install command with swpm and native package managers`);
80
357
  }
81
358
  function kindWeight(kind) {
82
359
  const idx = UNIT_KINDS.indexOf(kind);
83
360
  return idx === -1 ? UNIT_KINDS.length : idx;
84
361
  }
362
+ async function fetchMastraTemplates() {
363
+ try {
364
+ const response = await fetch("https://mastra.ai/api/templates.json");
365
+ const data = await response.json();
366
+ return data;
367
+ } catch (error) {
368
+ throw new Error(`Failed to fetch Mastra templates: ${error instanceof Error ? error.message : String(error)}`);
369
+ }
370
+ }
371
+ async function getMastraTemplate(slug) {
372
+ const templates = await fetchMastraTemplates();
373
+ const template = templates.find((t) => t.slug === slug);
374
+ if (!template) {
375
+ throw new Error(`Template "${slug}" not found. Available templates: ${templates.map((t) => t.slug).join(", ")}`);
376
+ }
377
+ return template;
378
+ }
85
379
  async function logGitState(targetPath, label) {
86
380
  try {
87
- const gitStatusResult = await exec("git status --porcelain", { cwd: targetPath });
88
- const gitLogResult = await exec("git log --oneline -3", { cwd: targetPath });
89
- const gitCountResult = await exec("git rev-list --count HEAD", { cwd: targetPath });
381
+ if (!await isInsideGitRepo(targetPath)) return;
382
+ const gitStatusResult = await git(targetPath, "status", "--porcelain");
383
+ const gitLogResult = await git(targetPath, "log", "--oneline", "-3");
384
+ const gitCountResult = await git(targetPath, "rev-list", "--count", "HEAD");
90
385
  console.log(`\u{1F4CA} Git state ${label}:`);
91
386
  console.log("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
92
387
  console.log("Recent commits:", gitLogResult.stdout.trim());
@@ -95,6 +390,91 @@ async function logGitState(targetPath, label) {
95
390
  console.warn(`Could not get git state ${label}:`, gitError);
96
391
  }
97
392
  }
393
+ async function git(cwd, ...args) {
394
+ const { stdout, stderr } = await spawnWithOutput("git", args, { cwd });
395
+ return { stdout: stdout ?? "", stderr: stderr ?? "" };
396
+ }
397
+ async function gitClone(repo, destDir, cwd) {
398
+ await git(process.cwd(), "clone", repo, destDir);
399
+ }
400
+ async function gitCheckoutRef(cwd, ref) {
401
+ if (!await isInsideGitRepo(cwd)) return;
402
+ await git(cwd, "checkout", ref);
403
+ }
404
+ async function gitRevParse(cwd, rev) {
405
+ if (!await isInsideGitRepo(cwd)) return "";
406
+ const { stdout } = await git(cwd, "rev-parse", rev);
407
+ return stdout.trim();
408
+ }
409
+ async function gitAddFiles(cwd, files) {
410
+ if (!files || files.length === 0) return;
411
+ if (!await isInsideGitRepo(cwd)) return;
412
+ await git(cwd, "add", ...files);
413
+ }
414
+ async function gitAddAll(cwd) {
415
+ if (!await isInsideGitRepo(cwd)) return;
416
+ await git(cwd, "add", ".");
417
+ }
418
+ async function gitHasStagedChanges(cwd) {
419
+ if (!await isInsideGitRepo(cwd)) return false;
420
+ const { stdout } = await git(cwd, "diff", "--cached", "--name-only");
421
+ return stdout.trim().length > 0;
422
+ }
423
+ async function gitCommit(cwd, message, opts) {
424
+ try {
425
+ if (!await isInsideGitRepo(cwd)) return false;
426
+ if (opts?.skipIfNoStaged) {
427
+ const has = await gitHasStagedChanges(cwd);
428
+ if (!has) return false;
429
+ }
430
+ const args = ["commit", "-m", message];
431
+ if (opts?.allowEmpty) args.push("--allow-empty");
432
+ await git(cwd, ...args);
433
+ return true;
434
+ } catch (e) {
435
+ const msg = e instanceof Error ? e.message : String(e);
436
+ if (/nothing to commit/i.test(msg) || /no changes added to commit/i.test(msg)) {
437
+ return false;
438
+ }
439
+ throw e;
440
+ }
441
+ }
442
+ async function gitAddAndCommit(cwd, message, files, opts) {
443
+ try {
444
+ if (!await isInsideGitRepo(cwd)) return false;
445
+ if (files && files.length > 0) {
446
+ await gitAddFiles(cwd, files);
447
+ } else {
448
+ await gitAddAll(cwd);
449
+ }
450
+ return gitCommit(cwd, message, opts);
451
+ } catch (e) {
452
+ console.error(`Failed to add and commit files: ${e instanceof Error ? e.message : String(e)}`);
453
+ return false;
454
+ }
455
+ }
456
+ async function gitCheckoutBranch(branchName, targetPath) {
457
+ try {
458
+ if (!await isInsideGitRepo(targetPath)) return;
459
+ await git(targetPath, "checkout", "-b", branchName);
460
+ console.log(`Created new branch: ${branchName}`);
461
+ } catch (error) {
462
+ const errorStr = error instanceof Error ? error.message : String(error);
463
+ if (errorStr.includes("already exists")) {
464
+ try {
465
+ await git(targetPath, "checkout", branchName);
466
+ console.log(`Switched to existing branch: ${branchName}`);
467
+ } catch {
468
+ const timestamp = Date.now().toString().slice(-6);
469
+ const uniqueBranchName = `${branchName}-${timestamp}`;
470
+ await git(targetPath, "checkout", "-b", uniqueBranchName);
471
+ console.log(`Created unique branch: ${uniqueBranchName}`);
472
+ }
473
+ } else {
474
+ throw error;
475
+ }
476
+ }
477
+ }
98
478
  async function backupAndReplaceFile(sourceFile, targetFile) {
99
479
  const backupFile = `${targetFile}.backup-${Date.now()}`;
100
480
  await promises.copyFile(targetFile, backupFile);
@@ -505,26 +885,6 @@ export const mastra = new Mastra({
505
885
  });
506
886
  \`\`\`
507
887
 
508
- ### MCPClient
509
- \`\`\`
510
- // ./src/mcp/client.ts
511
-
512
- import { MCPClient } from '@mastra/mcp-client';
513
-
514
- // leverage existing MCP servers, or create your own
515
- export const mcpClient = new MCPClient({
516
- id: 'example-mcp-client',
517
- servers: {
518
- some-mcp-server: {
519
- command: 'npx',
520
- args: ["some-mcp-server"],
521
- },
522
- },
523
- });
524
-
525
- export const tools = await mcpClient.getTools();
526
- \`\`\`
527
-
528
888
  </examples>`;
529
889
  static DEFAULT_MEMORY_CONFIG = {
530
890
  lastMessages: 20
@@ -537,28 +897,7 @@ export const tools = await mcpClient.getTools();
537
897
  network: "src/mastra/networks"
538
898
  };
539
899
  static DEFAULT_TOOLS = async (projectPath, mode = "code-editor") => {
540
- const mcpClient = new mcp.MCPClient({
541
- id: "agent-builder-mcp-client",
542
- servers: {
543
- // web: {
544
- // command: 'node',
545
- // args: ['/Users/daniellew/Documents/Mastra/web-search/build/index.js'],
546
- // },
547
- docs: {
548
- command: "npx",
549
- args: ["-y", "@mastra/mcp-docs-server"]
550
- }
551
- }
552
- });
553
- const tools$1 = await mcpClient.getTools();
554
- const filteredTools = {};
555
- Object.keys(tools$1).forEach((key) => {
556
- if (!key.includes("MastraCourse")) {
557
- filteredTools[key] = tools$1[key];
558
- }
559
- });
560
900
  const agentBuilderTools = {
561
- ...filteredTools,
562
901
  readFile: tools.createTool({
563
902
  id: "read-file",
564
903
  description: "Read contents of a file with optional line range selection.",
@@ -737,6 +1076,27 @@ export const tools = await mcpClient.getTools();
737
1076
  return await _AgentBuilderDefaults.performMultiEdit({ ...context, projectPath });
738
1077
  }
739
1078
  }),
1079
+ replaceLines: tools.createTool({
1080
+ id: "replace-lines",
1081
+ description: "Replace specific line ranges in files with new content. Perfect for fixing multiline imports, function signatures, or other structured code.",
1082
+ inputSchema: zod.z.object({
1083
+ filePath: zod.z.string().describe("Path to the file to edit"),
1084
+ startLine: zod.z.number().describe("Starting line number to replace (1-indexed)"),
1085
+ endLine: zod.z.number().describe("Ending line number to replace (1-indexed, inclusive)"),
1086
+ newContent: zod.z.string().describe("New content to replace the lines with"),
1087
+ createBackup: zod.z.boolean().default(false).describe("Create backup file before editing")
1088
+ }),
1089
+ outputSchema: zod.z.object({
1090
+ success: zod.z.boolean(),
1091
+ message: zod.z.string(),
1092
+ linesReplaced: zod.z.number().optional(),
1093
+ backup: zod.z.string().optional(),
1094
+ error: zod.z.string().optional()
1095
+ }),
1096
+ execute: async ({ context }) => {
1097
+ return await _AgentBuilderDefaults.replaceLines({ ...context, projectPath });
1098
+ }
1099
+ }),
740
1100
  // Interactive Communication
741
1101
  askClarification: tools.createTool({
742
1102
  id: "ask-clarification",
@@ -809,7 +1169,7 @@ export const tools = await mcpClient.getTools();
809
1169
  })
810
1170
  }),
811
1171
  execute: async ({ context }) => {
812
- return await _AgentBuilderDefaults.performSmartSearch(context);
1172
+ return await _AgentBuilderDefaults.performSmartSearch(context, projectPath);
813
1173
  }
814
1174
  }),
815
1175
  validateCode: tools.createTool({
@@ -1180,12 +1540,12 @@ export const tools = await mcpClient.getTools();
1180
1540
  */
1181
1541
  static async createMastraProject({ features, projectName }) {
1182
1542
  try {
1183
- const args = ["pnpx", "create", "mastra@latest", projectName ?? "", "-l", "openai", "-k", "skip"];
1543
+ const args = ["pnpx", "create-mastra@latest", projectName?.replace(/[;&|`$(){}\[\]]/g, "") ?? "", "-l", "openai"];
1184
1544
  if (features && features.length > 0) {
1185
1545
  args.push("--components", features.join(","));
1186
1546
  }
1187
1547
  args.push("--example");
1188
- const { stdout, stderr } = await exec(args.join(" "));
1548
+ const { stdout, stderr } = await spawnWithOutput(args[0], args.slice(1), {});
1189
1549
  return {
1190
1550
  success: true,
1191
1551
  projectPath: `./${projectName}`,
@@ -1194,6 +1554,7 @@ export const tools = await mcpClient.getTools();
1194
1554
  error: stderr
1195
1555
  };
1196
1556
  } catch (error) {
1557
+ console.log(error);
1197
1558
  return {
1198
1559
  success: false,
1199
1560
  message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
@@ -1369,9 +1730,17 @@ export const tools = await mcpClient.getTools();
1369
1730
  * Stop the Mastra server
1370
1731
  */
1371
1732
  static async stopMastraServer({ port = 4200, projectPath: _projectPath }) {
1733
+ if (typeof port !== "number" || !Number.isInteger(port) || port < 1 || port > 65535) {
1734
+ return {
1735
+ success: false,
1736
+ status: "error",
1737
+ error: `Invalid port value: ${String(port)}`
1738
+ };
1739
+ }
1372
1740
  try {
1373
- const { stdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
1374
- if (!stdout.trim() || stdout.trim() === "No process found") {
1741
+ const { stdout } = await execFile("lsof", ["-ti", String(port)]);
1742
+ const effectiveStdout = stdout.trim() ? stdout : "No process found";
1743
+ if (!effectiveStdout || effectiveStdout === "No process found") {
1375
1744
  return {
1376
1745
  success: true,
1377
1746
  status: "stopped",
@@ -1387,8 +1756,9 @@ export const tools = await mcpClient.getTools();
1387
1756
  try {
1388
1757
  process.kill(pid, "SIGTERM");
1389
1758
  killedPids.push(pid);
1390
- } catch {
1759
+ } catch (e) {
1391
1760
  failedPids.push(pid);
1761
+ console.warn(`Failed to kill process ${pid}:`, e);
1392
1762
  }
1393
1763
  }
1394
1764
  if (killedPids.length === 0) {
@@ -1399,10 +1769,16 @@ export const tools = await mcpClient.getTools();
1399
1769
  error: `Could not kill PIDs: ${failedPids.join(", ")}`
1400
1770
  };
1401
1771
  }
1772
+ if (failedPids.length > 0) {
1773
+ console.warn(
1774
+ `Killed ${killedPids.length} processes but failed to kill ${failedPids.length} processes: ${failedPids.join(", ")}`
1775
+ );
1776
+ }
1402
1777
  await new Promise((resolve4) => setTimeout(resolve4, 2e3));
1403
1778
  try {
1404
- const { stdout: checkStdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
1405
- if (checkStdout.trim() && checkStdout.trim() !== "No process found") {
1779
+ const { stdout: checkStdoutRaw } = await execFile("lsof", ["-ti", String(port)]);
1780
+ const checkStdout = checkStdoutRaw.trim() ? checkStdoutRaw : "No process found";
1781
+ if (checkStdout && checkStdout !== "No process found") {
1406
1782
  const remainingPids = checkStdout.trim().split("\n").filter((pid) => pid.trim());
1407
1783
  for (const pidStr of remainingPids) {
1408
1784
  const pid = parseInt(pidStr.trim());
@@ -1414,8 +1790,9 @@ export const tools = await mcpClient.getTools();
1414
1790
  }
1415
1791
  }
1416
1792
  await new Promise((resolve4) => setTimeout(resolve4, 1e3));
1417
- const { stdout: finalCheck } = await exec(`lsof -ti:${port} || echo "No process found"`);
1418
- if (finalCheck.trim() && finalCheck.trim() !== "No process found") {
1793
+ const { stdout: finalCheckRaw } = await execFile("lsof", ["-ti", String(port)]);
1794
+ const finalCheck = finalCheckRaw.trim() ? finalCheckRaw : "No process found";
1795
+ if (finalCheck && finalCheck !== "No process found") {
1419
1796
  return {
1420
1797
  success: false,
1421
1798
  status: "unknown",
@@ -1473,8 +1850,9 @@ export const tools = await mcpClient.getTools();
1473
1850
  }
1474
1851
  } catch {
1475
1852
  try {
1476
- const { stdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
1477
- const hasProcess = stdout.trim() && stdout.trim() !== "No process found";
1853
+ const { stdout } = await execFile("lsof", ["-ti", String(port)]);
1854
+ const effectiveStdout = stdout.trim() ? stdout : "No process found";
1855
+ const hasProcess = effectiveStdout && effectiveStdout !== "No process found";
1478
1856
  return {
1479
1857
  success: Boolean(hasProcess),
1480
1858
  status: hasProcess ? "starting" : "stopped",
@@ -1505,9 +1883,9 @@ export const tools = await mcpClient.getTools();
1505
1883
  const execOptions = { cwd: projectPath };
1506
1884
  if (validationType.includes("types")) {
1507
1885
  try {
1508
- const filePattern = files?.length ? files.join(" ") : "";
1509
- const tscCommand = files?.length ? `npx tsc --noEmit ${filePattern}` : "npx tsc --noEmit";
1510
- await exec(tscCommand, execOptions);
1886
+ const fileArgs = files?.length ? files : [];
1887
+ const args = ["tsc", "--noEmit", ...fileArgs];
1888
+ await execFile("npx", args, execOptions);
1511
1889
  validationsPassed.push("types");
1512
1890
  } catch (error) {
1513
1891
  let tsOutput = "";
@@ -1528,9 +1906,9 @@ export const tools = await mcpClient.getTools();
1528
1906
  }
1529
1907
  if (validationType.includes("lint")) {
1530
1908
  try {
1531
- const filePattern = files?.length ? files.join(" ") : ".";
1532
- const eslintCommand = `npx eslint ${filePattern} --format json`;
1533
- const { stdout } = await exec(eslintCommand, execOptions);
1909
+ const fileArgs = files?.length ? files : ["."];
1910
+ const eslintArgs = ["eslint", ...fileArgs, "--format", "json"];
1911
+ const { stdout } = await execFile("npx", eslintArgs, execOptions);
1534
1912
  if (stdout) {
1535
1913
  const eslintResults = JSON.parse(stdout);
1536
1914
  const eslintErrors = _AgentBuilderDefaults.parseESLintErrors(eslintResults);
@@ -1664,6 +2042,11 @@ export const tools = await mcpClient.getTools();
1664
2042
  if (!_AgentBuilderDefaults.taskStorage) {
1665
2043
  _AgentBuilderDefaults.taskStorage = /* @__PURE__ */ new Map();
1666
2044
  }
2045
+ const sessions = Array.from(_AgentBuilderDefaults.taskStorage.keys());
2046
+ if (sessions.length > 10) {
2047
+ const sessionsToRemove = sessions.slice(0, sessions.length - 10);
2048
+ sessionsToRemove.forEach((session) => _AgentBuilderDefaults.taskStorage.delete(session));
2049
+ }
1667
2050
  const sessionId = "current";
1668
2051
  const existingTasks = _AgentBuilderDefaults.taskStorage.get(sessionId) || [];
1669
2052
  try {
@@ -1760,7 +2143,35 @@ export const tools = await mcpClient.getTools();
1760
2143
  static async analyzeCode(context) {
1761
2144
  try {
1762
2145
  const { action, path, language, depth = 3 } = context;
1763
- const languagePattern = language ? `*.${language}` : "*";
2146
+ const ALLOWED_LANGUAGES = [
2147
+ "js",
2148
+ "ts",
2149
+ "jsx",
2150
+ "tsx",
2151
+ "py",
2152
+ "java",
2153
+ "go",
2154
+ "cpp",
2155
+ "c",
2156
+ "cs",
2157
+ "rb",
2158
+ "php",
2159
+ "rs",
2160
+ "kt",
2161
+ "swift",
2162
+ "m",
2163
+ "scala",
2164
+ "sh",
2165
+ "json",
2166
+ "yaml",
2167
+ "yml",
2168
+ "toml",
2169
+ "ini"
2170
+ ];
2171
+ let languagePattern = "*";
2172
+ if (language && ALLOWED_LANGUAGES.includes(language)) {
2173
+ languagePattern = `*.${language}`;
2174
+ }
1764
2175
  switch (action) {
1765
2176
  case "definitions":
1766
2177
  const definitionPatterns = [
@@ -1773,9 +2184,15 @@ export const tools = await mcpClient.getTools();
1773
2184
  const definitions = [];
1774
2185
  for (const pattern of definitionPatterns) {
1775
2186
  try {
1776
- const { stdout } = await exec(
1777
- `rg -n "${pattern}" "${path}" --type ${languagePattern} --max-depth ${depth}`
1778
- );
2187
+ const { stdout } = await execFile("rg", [
2188
+ "-n",
2189
+ pattern,
2190
+ path,
2191
+ "--type",
2192
+ languagePattern,
2193
+ "--max-depth",
2194
+ String(depth)
2195
+ ]);
1779
2196
  const matches = stdout.split("\n").filter((line) => line.trim());
1780
2197
  matches.forEach((match) => {
1781
2198
  const parts = match.split(":");
@@ -1813,7 +2230,7 @@ export const tools = await mcpClient.getTools();
1813
2230
  const dependencies = [];
1814
2231
  for (const pattern of depPatterns) {
1815
2232
  try {
1816
- const { stdout } = await exec(`rg -n "${pattern}" "${path}" --type ${languagePattern}`);
2233
+ const { stdout } = await execFile("rg", ["-n", pattern, path, "--type", languagePattern]);
1817
2234
  const matches = stdout.split("\n").filter((line) => line.trim());
1818
2235
  matches.forEach((match) => {
1819
2236
  const parts = match.split(":");
@@ -1840,10 +2257,11 @@ export const tools = await mcpClient.getTools();
1840
2257
  message: `Found ${dependencies.length} dependencies`
1841
2258
  };
1842
2259
  case "structure":
1843
- const { stdout: lsOutput } = await exec(`find "${path}" -type f -name "${languagePattern}" | head -1000`);
1844
- const files = lsOutput.split("\n").filter((line) => line.trim());
1845
- const { stdout: dirOutput } = await exec(`find "${path}" -type d | wc -l`);
1846
- const directories = parseInt(dirOutput.trim());
2260
+ const { stdout: lsOutput } = await execFile("find", [path, "-type", "f", "-name", languagePattern]);
2261
+ const allFiles = lsOutput.split("\n").filter((line) => line.trim());
2262
+ const files = allFiles.slice(0, 1e3);
2263
+ const { stdout: dirOutput } = await execFile("find", [path, "-type", "d"]);
2264
+ const directories = dirOutput.split("\n").filter((line) => line.trim()).length;
1847
2265
  const languages = {};
1848
2266
  files.forEach((file) => {
1849
2267
  const ext = file.split(".").pop();
@@ -1883,56 +2301,57 @@ export const tools = await mcpClient.getTools();
1883
2301
  * Perform multiple edits across files atomically
1884
2302
  */
1885
2303
  static async performMultiEdit(context) {
2304
+ const { operations, createBackup = false, projectPath = process.cwd() } = context;
1886
2305
  const results = [];
1887
2306
  try {
1888
- const { projectPath } = context;
1889
- for (const operation of context.operations) {
1890
- const resolvedPath = path.isAbsolute(operation.filePath) ? operation.filePath : path.resolve(projectPath || process.cwd(), operation.filePath);
1891
- const result = {
1892
- filePath: resolvedPath,
1893
- editsApplied: 0,
1894
- errors: [],
1895
- backup: void 0
1896
- };
2307
+ for (const operation of operations) {
2308
+ const filePath = path.isAbsolute(operation.filePath) ? operation.filePath : path.join(projectPath, operation.filePath);
2309
+ let editsApplied = 0;
2310
+ const errors = [];
2311
+ let backup;
1897
2312
  try {
1898
- const originalContent = await promises.readFile(resolvedPath, "utf-8");
1899
- if (context.createBackup) {
1900
- const backupPath = `${resolvedPath}.backup.${Date.now()}`;
1901
- await promises.writeFile(backupPath, originalContent);
1902
- result.backup = backupPath;
2313
+ if (createBackup) {
2314
+ const backupPath = `${filePath}.backup.${Date.now()}`;
2315
+ const originalContent = await promises.readFile(filePath, "utf-8");
2316
+ await promises.writeFile(backupPath, originalContent, "utf-8");
2317
+ backup = backupPath;
1903
2318
  }
1904
- let modifiedContent = originalContent;
2319
+ let content = await promises.readFile(filePath, "utf-8");
1905
2320
  for (const edit of operation.edits) {
1906
- if (edit.replaceAll) {
1907
- const regex = new RegExp(edit.oldString.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g");
1908
- const matches = modifiedContent.match(regex);
2321
+ const { oldString, newString, replaceAll = false } = edit;
2322
+ if (replaceAll) {
2323
+ const regex = new RegExp(oldString.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g");
2324
+ const matches = content.match(regex);
1909
2325
  if (matches) {
1910
- modifiedContent = modifiedContent.replace(regex, edit.newString);
1911
- result.editsApplied += matches.length;
2326
+ content = content.replace(regex, newString);
2327
+ editsApplied += matches.length;
1912
2328
  }
1913
2329
  } else {
1914
- if (modifiedContent.includes(edit.oldString)) {
1915
- modifiedContent = modifiedContent.replace(edit.oldString, edit.newString);
1916
- result.editsApplied++;
2330
+ if (content.includes(oldString)) {
2331
+ content = content.replace(oldString, newString);
2332
+ editsApplied++;
1917
2333
  } else {
1918
- result.errors.push(`String not found: "${edit.oldString.substring(0, 50)}..."`);
2334
+ errors.push(`String not found: "${oldString.substring(0, 50)}${oldString.length > 50 ? "..." : ""}"`);
1919
2335
  }
1920
2336
  }
1921
2337
  }
1922
- if (result.editsApplied > 0) {
1923
- await promises.writeFile(resolvedPath, modifiedContent);
1924
- }
2338
+ await promises.writeFile(filePath, content, "utf-8");
1925
2339
  } catch (error) {
1926
- result.errors.push(error instanceof Error ? error.message : String(error));
2340
+ errors.push(`File operation error: ${error instanceof Error ? error.message : String(error)}`);
1927
2341
  }
1928
- results.push(result);
2342
+ results.push({
2343
+ filePath: operation.filePath,
2344
+ editsApplied,
2345
+ errors,
2346
+ backup
2347
+ });
1929
2348
  }
1930
2349
  const totalEdits = results.reduce((sum, r) => sum + r.editsApplied, 0);
1931
2350
  const totalErrors = results.reduce((sum, r) => sum + r.errors.length, 0);
1932
2351
  return {
1933
2352
  success: totalErrors === 0,
1934
2353
  results,
1935
- message: `Applied ${totalEdits} edits across ${results.length} files${totalErrors > 0 ? ` with ${totalErrors} errors` : ""}`
2354
+ message: `Applied ${totalEdits} edits across ${operations.length} files${totalErrors > 0 ? ` with ${totalErrors} errors` : ""}`
1936
2355
  };
1937
2356
  } catch (error) {
1938
2357
  return {
@@ -1942,6 +2361,56 @@ export const tools = await mcpClient.getTools();
1942
2361
  };
1943
2362
  }
1944
2363
  }
2364
+ /**
2365
+ * Replace specific line ranges in a file with new content
2366
+ */
2367
+ static async replaceLines(context) {
2368
+ const { filePath, startLine, endLine, newContent, createBackup = false, projectPath = process.cwd() } = context;
2369
+ try {
2370
+ const fullPath = path.isAbsolute(filePath) ? filePath : path.join(projectPath, filePath);
2371
+ const content = await promises.readFile(fullPath, "utf-8");
2372
+ const lines = content.split("\n");
2373
+ if (startLine < 1 || endLine < 1 || startLine > lines.length || endLine > lines.length) {
2374
+ return {
2375
+ success: false,
2376
+ message: `Invalid line range: ${startLine}-${endLine}. File has ${lines.length} lines.`,
2377
+ error: "Invalid line range"
2378
+ };
2379
+ }
2380
+ if (startLine > endLine) {
2381
+ return {
2382
+ success: false,
2383
+ message: `Start line (${startLine}) cannot be greater than end line (${endLine}).`,
2384
+ error: "Invalid line range"
2385
+ };
2386
+ }
2387
+ let backup;
2388
+ if (createBackup) {
2389
+ const backupPath = `${fullPath}.backup.${Date.now()}`;
2390
+ await promises.writeFile(backupPath, content, "utf-8");
2391
+ backup = backupPath;
2392
+ }
2393
+ const beforeLines = lines.slice(0, startLine - 1);
2394
+ const afterLines = lines.slice(endLine);
2395
+ const newLines = newContent ? newContent.split("\n") : [];
2396
+ const updatedLines = [...beforeLines, ...newLines, ...afterLines];
2397
+ const updatedContent = updatedLines.join("\n");
2398
+ await promises.writeFile(fullPath, updatedContent, "utf-8");
2399
+ const linesReplaced = endLine - startLine + 1;
2400
+ return {
2401
+ success: true,
2402
+ message: `Successfully replaced ${linesReplaced} lines (${startLine}-${endLine}) in ${filePath}`,
2403
+ linesReplaced,
2404
+ backup
2405
+ };
2406
+ } catch (error) {
2407
+ return {
2408
+ success: false,
2409
+ message: `Failed to replace lines: ${error instanceof Error ? error.message : String(error)}`,
2410
+ error: error instanceof Error ? error.message : String(error)
2411
+ };
2412
+ }
2413
+ }
1945
2414
  /**
1946
2415
  * Ask user for clarification
1947
2416
  */
@@ -1988,32 +2457,38 @@ export const tools = await mcpClient.getTools();
1988
2457
  /**
1989
2458
  * Perform intelligent search with context
1990
2459
  */
1991
- static async performSmartSearch(context) {
2460
+ static async performSmartSearch(context, projectPath) {
1992
2461
  try {
1993
2462
  const { query, type = "text", scope = {}, context: searchContext = {} } = context;
1994
2463
  const { paths = ["."], fileTypes = [], excludePaths = [], maxResults = 50 } = scope;
1995
2464
  const { beforeLines = 2, afterLines = 2 } = searchContext;
1996
- let rgCommand = "rg";
1997
- if (beforeLines > 0 || afterLines > 0) {
1998
- rgCommand += ` -A ${afterLines} -B ${beforeLines}`;
2465
+ const rgArgs = [];
2466
+ if (beforeLines > 0) {
2467
+ rgArgs.push("-B", beforeLines.toString());
2468
+ }
2469
+ if (afterLines > 0) {
2470
+ rgArgs.push("-A", afterLines.toString());
1999
2471
  }
2000
- rgCommand += " -n";
2472
+ rgArgs.push("-n");
2001
2473
  if (type === "regex") {
2002
- rgCommand += " -e";
2474
+ rgArgs.push("-e");
2003
2475
  } else if (type === "fuzzy") {
2004
- rgCommand += " --fixed-strings";
2476
+ rgArgs.push("--fixed-strings");
2005
2477
  }
2006
2478
  if (fileTypes.length > 0) {
2007
2479
  fileTypes.forEach((ft) => {
2008
- rgCommand += ` --type-add 'custom:*.${ft}' -t custom`;
2480
+ rgArgs.push("--type-add", `custom:*.${ft}`, "-t", "custom");
2009
2481
  });
2010
2482
  }
2011
2483
  excludePaths.forEach((path) => {
2012
- rgCommand += ` --glob '!${path}'`;
2484
+ rgArgs.push("--glob", `!${path}`);
2485
+ });
2486
+ rgArgs.push("-m", maxResults.toString());
2487
+ rgArgs.push(query);
2488
+ rgArgs.push(...paths);
2489
+ const { stdout } = await execFile("rg", rgArgs, {
2490
+ cwd: projectPath
2013
2491
  });
2014
- rgCommand += ` -m ${maxResults}`;
2015
- rgCommand += ` "${query}" ${paths.join(" ")}`;
2016
- const { stdout } = await exec(rgCommand);
2017
2492
  const lines = stdout.split("\n").filter((line) => line.trim());
2018
2493
  const matches = [];
2019
2494
  let currentMatch = null;
@@ -2146,14 +2621,29 @@ export const tools = await mcpClient.getTools();
2146
2621
  includeMetadata = true,
2147
2622
  projectPath
2148
2623
  } = context;
2624
+ const gitignorePath = path.join(projectPath || process.cwd(), ".gitignore");
2625
+ let gitignoreFilter;
2626
+ try {
2627
+ const gitignoreContent = await promises.readFile(gitignorePath, "utf-8");
2628
+ gitignoreFilter = ignore__default.default().add(gitignoreContent);
2629
+ } catch (err) {
2630
+ if (err.code !== "ENOENT") {
2631
+ console.error(`Error reading .gitignore file:`, err);
2632
+ }
2633
+ }
2149
2634
  const resolvedPath = path.isAbsolute(path$1) ? path$1 : path.resolve(projectPath || process.cwd(), path$1);
2150
2635
  const items = [];
2151
2636
  async function processDirectory(dirPath, currentDepth = 0) {
2637
+ const relativeToProject = path.relative(projectPath || process.cwd(), dirPath);
2638
+ if (gitignoreFilter?.ignores(relativeToProject)) return;
2152
2639
  if (currentDepth > maxDepth) return;
2153
2640
  const entries = await promises.readdir(dirPath);
2154
2641
  for (const entry of entries) {
2642
+ const entryPath = path.join(dirPath, entry);
2643
+ const relativeEntryPath = path.relative(projectPath || process.cwd(), entryPath);
2644
+ if (gitignoreFilter?.ignores(relativeEntryPath)) continue;
2155
2645
  if (!includeHidden && entry.startsWith(".")) continue;
2156
- const fullPath = path.join(dirPath, entry);
2646
+ const fullPath = entryPath;
2157
2647
  const relativePath = path.relative(resolvedPath, fullPath);
2158
2648
  if (pattern) {
2159
2649
  const regexPattern = pattern.replace(/\*/g, ".*").replace(/\?/g, ".");
@@ -2387,12 +2877,19 @@ var ToolSummaryProcessor = class extends core.MemoryProcessor {
2387
2877
  }
2388
2878
  }
2389
2879
  if (summaryTasks.length > 0) {
2390
- const summaryResults = await Promise.all(summaryTasks.map((task) => task.promise));
2880
+ const summaryResults = await Promise.allSettled(summaryTasks.map((task) => task.promise));
2391
2881
  summaryTasks.forEach((task, index) => {
2392
- const summaryResult = summaryResults[index];
2393
- const summaryText = summaryResult.text;
2394
- this.summaryCache.set(task.cacheKey, summaryText);
2395
- task.content.result = `Tool call summary: ${summaryText}`;
2882
+ const result = summaryResults[index];
2883
+ if (!result) return;
2884
+ if (result.status === "fulfilled") {
2885
+ const summaryResult = result.value;
2886
+ const summaryText = summaryResult.text;
2887
+ this.summaryCache.set(task.cacheKey, summaryText);
2888
+ task.content.result = `Tool call summary: ${summaryText}`;
2889
+ } else if (result.status === "rejected") {
2890
+ console.warn(`Failed to generate summary for tool call:`, result.reason);
2891
+ task.content.result = `Tool call summary: [Summary generation failed]`;
2892
+ }
2396
2893
  });
2397
2894
  }
2398
2895
  return messages;
@@ -2405,19 +2902,30 @@ var WriteToDiskProcessor = class extends core.MemoryProcessor {
2405
2902
  this.prefix = prefix;
2406
2903
  }
2407
2904
  async process(messages) {
2408
- await promises.writeFile(`${this.prefix}-${Date.now()}.json`, JSON.stringify(messages, null, 2));
2905
+ await promises.writeFile(`${this.prefix}-${Date.now()}-${process.pid}.json`, JSON.stringify(messages, null, 2));
2409
2906
  return messages;
2410
2907
  }
2411
2908
  };
2909
+ var resolveModel = (runtimeContext) => {
2910
+ const modelFromContext = runtimeContext.get("model");
2911
+ if (modelFromContext) {
2912
+ if (isValidMastraLanguageModel(modelFromContext)) {
2913
+ return modelFromContext;
2914
+ }
2915
+ throw new Error(
2916
+ 'Invalid model provided. Model must be a MastraLanguageModel instance (e.g., openai("gpt-4"), anthropic("claude-3-5-sonnet"), etc.)'
2917
+ );
2918
+ }
2919
+ return openai.openai("gpt-4.1");
2920
+ };
2921
+ var isValidMastraLanguageModel = (model) => {
2922
+ return model && typeof model === "object" && typeof model.modelId === "string" && typeof model.generate === "function";
2923
+ };
2412
2924
  var cloneTemplateStep = workflows.createStep({
2413
2925
  id: "clone-template",
2414
2926
  description: "Clone the template repository to a temporary directory at the specified ref",
2415
- inputSchema: MergeInputSchema,
2416
- outputSchema: zod.z.object({
2417
- templateDir: zod.z.string(),
2418
- commitSha: zod.z.string(),
2419
- slug: zod.z.string()
2420
- }),
2927
+ inputSchema: AgentBuilderInputSchema,
2928
+ outputSchema: CloneTemplateResultSchema,
2421
2929
  execute: async ({ inputData }) => {
2422
2930
  const { repo, ref = "main", slug } = inputData;
2423
2931
  if (!repo) {
@@ -2426,45 +2934,37 @@ var cloneTemplateStep = workflows.createStep({
2426
2934
  const inferredSlug = slug || repo.split("/").pop()?.replace(/\.git$/, "") || "template";
2427
2935
  const tempDir = await promises.mkdtemp(path.join(os.tmpdir(), "mastra-template-"));
2428
2936
  try {
2429
- const cloneCmd = `git clone "${repo}" "${tempDir}"`;
2430
- await exec(cloneCmd);
2937
+ await gitClone(repo, tempDir);
2431
2938
  if (ref !== "main" && ref !== "master") {
2432
- await exec(`git checkout "${ref}"`, { cwd: tempDir });
2939
+ await gitCheckoutRef(tempDir, ref);
2433
2940
  }
2434
- const { stdout: commitSha } = await exec("git rev-parse HEAD", { cwd: tempDir });
2941
+ const commitSha = await gitRevParse(tempDir, "HEAD");
2435
2942
  return {
2436
2943
  templateDir: tempDir,
2437
2944
  commitSha: commitSha.trim(),
2438
- slug: inferredSlug
2945
+ slug: inferredSlug,
2946
+ success: true
2439
2947
  };
2440
2948
  } catch (error) {
2441
2949
  try {
2442
2950
  await promises.rm(tempDir, { recursive: true, force: true });
2443
2951
  } catch {
2444
2952
  }
2445
- throw new Error(`Failed to clone template: ${error instanceof Error ? error.message : String(error)}`);
2953
+ return {
2954
+ templateDir: "",
2955
+ commitSha: "",
2956
+ slug: slug || "unknown",
2957
+ success: false,
2958
+ error: `Failed to clone template: ${error instanceof Error ? error.message : String(error)}`
2959
+ };
2446
2960
  }
2447
2961
  }
2448
2962
  });
2449
2963
  var analyzePackageStep = workflows.createStep({
2450
2964
  id: "analyze-package",
2451
2965
  description: "Analyze the template package.json to extract dependency information",
2452
- inputSchema: zod.z.object({
2453
- templateDir: zod.z.string(),
2454
- commitSha: zod.z.string(),
2455
- slug: zod.z.string()
2456
- }),
2457
- outputSchema: zod.z.object({
2458
- dependencies: zod.z.record(zod.z.string()).optional(),
2459
- devDependencies: zod.z.record(zod.z.string()).optional(),
2460
- peerDependencies: zod.z.record(zod.z.string()).optional(),
2461
- scripts: zod.z.record(zod.z.string()).optional(),
2462
- packageInfo: zod.z.object({
2463
- name: zod.z.string().optional(),
2464
- version: zod.z.string().optional(),
2465
- description: zod.z.string().optional()
2466
- })
2467
- }),
2966
+ inputSchema: CloneTemplateResultSchema,
2967
+ outputSchema: PackageAnalysisSchema,
2468
2968
  execute: async ({ inputData }) => {
2469
2969
  console.log("Analyzing template package.json...");
2470
2970
  const { templateDir } = inputData;
@@ -2478,11 +2978,10 @@ var analyzePackageStep = workflows.createStep({
2478
2978
  devDependencies: packageJson.devDependencies || {},
2479
2979
  peerDependencies: packageJson.peerDependencies || {},
2480
2980
  scripts: packageJson.scripts || {},
2481
- packageInfo: {
2482
- name: packageJson.name,
2483
- version: packageJson.version,
2484
- description: packageJson.description
2485
- }
2981
+ name: packageJson.name || "",
2982
+ version: packageJson.version || "",
2983
+ description: packageJson.description || "",
2984
+ success: true
2486
2985
  };
2487
2986
  } catch (error) {
2488
2987
  console.warn(`Failed to read template package.json: ${error instanceof Error ? error.message : String(error)}`);
@@ -2491,7 +2990,11 @@ var analyzePackageStep = workflows.createStep({
2491
2990
  devDependencies: {},
2492
2991
  peerDependencies: {},
2493
2992
  scripts: {},
2494
- packageInfo: {}
2993
+ name: "",
2994
+ version: "",
2995
+ description: "",
2996
+ success: true
2997
+ // This is a graceful fallback, not a failure
2495
2998
  };
2496
2999
  }
2497
3000
  }
@@ -2499,20 +3002,15 @@ var analyzePackageStep = workflows.createStep({
2499
3002
  var discoverUnitsStep = workflows.createStep({
2500
3003
  id: "discover-units",
2501
3004
  description: "Discover template units by analyzing the templates directory structure",
2502
- inputSchema: zod.z.object({
2503
- templateDir: zod.z.string(),
2504
- commitSha: zod.z.string(),
2505
- slug: zod.z.string()
2506
- }),
2507
- outputSchema: zod.z.object({
2508
- units: zod.z.array(TemplateUnitSchema)
2509
- }),
2510
- execute: async ({ inputData }) => {
3005
+ inputSchema: CloneTemplateResultSchema,
3006
+ outputSchema: DiscoveryResultSchema,
3007
+ execute: async ({ inputData, runtimeContext }) => {
2511
3008
  const { templateDir } = inputData;
2512
3009
  const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
2513
- const agent$1 = new agent.Agent({
2514
- model: openai.openai("gpt-4o-mini"),
2515
- instructions: `You are an expert at analyzing Mastra projects.
3010
+ try {
3011
+ const agent$1 = new agent.Agent({
3012
+ model: resolveModel(runtimeContext),
3013
+ instructions: `You are an expert at analyzing Mastra projects.
2516
3014
 
2517
3015
  Your task is to scan the provided directory and identify all available units (agents, workflows, tools, MCP servers, networks).
2518
3016
 
@@ -2543,14 +3041,14 @@ IMPORTANT - Naming Consistency Rules:
2543
3041
  - use the relative path from the template root for the file (e.g., 'src/mastra/lib/util.ts' \u2192 file: 'src/mastra/lib/util.ts')
2544
3042
 
2545
3043
  Return the actual exported names of the units, as well as the file names.`,
2546
- name: "Mastra Project Discoverer",
2547
- tools: {
2548
- readFile: tools.readFile,
2549
- listDirectory: tools.listDirectory
2550
- }
2551
- });
2552
- const result = await agent$1.generate(
2553
- `Analyze the Mastra project directory structure at "${templateDir}".
3044
+ name: "Mastra Project Discoverer",
3045
+ tools: {
3046
+ readFile: tools.readFile,
3047
+ listDirectory: tools.listDirectory
3048
+ }
3049
+ });
3050
+ const result = await agent$1.generate(
3051
+ `Analyze the Mastra project directory structure at "${templateDir}".
2554
3052
 
2555
3053
  List directory contents using listDirectory tool, and then analyze each file with readFile tool.
2556
3054
  IMPORTANT:
@@ -2560,51 +3058,70 @@ Return the actual exported names of the units, as well as the file names.`,
2560
3058
  - If a directory doesn't exist or has no files, return an empty array
2561
3059
 
2562
3060
  Return the analysis in the exact format specified in the output schema.`,
2563
- {
2564
- experimental_output: zod.z.object({
2565
- agents: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
2566
- workflows: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
2567
- tools: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
2568
- mcp: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
2569
- networks: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
2570
- other: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional()
2571
- }),
2572
- maxSteps: 100
3061
+ {
3062
+ experimental_output: zod.z.object({
3063
+ agents: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
3064
+ workflows: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
3065
+ tools: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
3066
+ mcp: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
3067
+ networks: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
3068
+ other: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional()
3069
+ }),
3070
+ maxSteps: 100
3071
+ }
3072
+ );
3073
+ const template = result.object ?? {};
3074
+ const units = [];
3075
+ template.agents?.forEach((agentId) => {
3076
+ units.push({ kind: "agent", id: agentId.name, file: agentId.file });
3077
+ });
3078
+ template.workflows?.forEach((workflowId) => {
3079
+ units.push({ kind: "workflow", id: workflowId.name, file: workflowId.file });
3080
+ });
3081
+ template.tools?.forEach((toolId) => {
3082
+ units.push({ kind: "tool", id: toolId.name, file: toolId.file });
3083
+ });
3084
+ template.mcp?.forEach((mcpId) => {
3085
+ units.push({ kind: "mcp-server", id: mcpId.name, file: mcpId.file });
3086
+ });
3087
+ template.networks?.forEach((networkId) => {
3088
+ units.push({ kind: "network", id: networkId.name, file: networkId.file });
3089
+ });
3090
+ template.other?.forEach((otherId) => {
3091
+ units.push({ kind: "other", id: otherId.name, file: otherId.file });
3092
+ });
3093
+ console.log("Discovered units:", JSON.stringify(units, null, 2));
3094
+ if (units.length === 0) {
3095
+ throw new Error(`No Mastra units (agents, workflows, tools) found in template.
3096
+ Possible causes:
3097
+ - Template may not follow standard Mastra structure
3098
+ - AI agent couldn't analyze template files (model/token limits)
3099
+ - Template is empty or in wrong branch
3100
+
3101
+ Debug steps:
3102
+ - Check template has files in src/mastra/ directories
3103
+ - Try a different branch
3104
+ - Check template repository structure manually`);
2573
3105
  }
2574
- );
2575
- const template = result.object ?? {};
2576
- const units = [];
2577
- template.agents?.forEach((agentId) => {
2578
- units.push({ kind: "agent", id: agentId.name, file: agentId.file });
2579
- });
2580
- template.workflows?.forEach((workflowId) => {
2581
- units.push({ kind: "workflow", id: workflowId.name, file: workflowId.file });
2582
- });
2583
- template.tools?.forEach((toolId) => {
2584
- units.push({ kind: "tool", id: toolId.name, file: toolId.file });
2585
- });
2586
- template.mcp?.forEach((mcpId) => {
2587
- units.push({ kind: "mcp-server", id: mcpId.name, file: mcpId.file });
2588
- });
2589
- template.networks?.forEach((networkId) => {
2590
- units.push({ kind: "network", id: networkId.name, file: networkId.file });
2591
- });
2592
- template.other?.forEach((otherId) => {
2593
- units.push({ kind: "other", id: otherId.name, file: otherId.file });
2594
- });
2595
- console.log("Discovered units:", JSON.stringify(units, null, 2));
2596
- return { units };
3106
+ return {
3107
+ units,
3108
+ success: true
3109
+ };
3110
+ } catch (error) {
3111
+ console.error("Failed to discover units:", error);
3112
+ return {
3113
+ units: [],
3114
+ success: false,
3115
+ error: `Failed to discover units: ${error instanceof Error ? error.message : String(error)}`
3116
+ };
3117
+ }
2597
3118
  }
2598
3119
  });
2599
3120
  var orderUnitsStep = workflows.createStep({
2600
3121
  id: "order-units",
2601
3122
  description: "Sort units in topological order based on kind weights",
2602
- inputSchema: zod.z.object({
2603
- units: zod.z.array(TemplateUnitSchema)
2604
- }),
2605
- outputSchema: zod.z.object({
2606
- orderedUnits: zod.z.array(TemplateUnitSchema)
2607
- }),
3123
+ inputSchema: DiscoveryResultSchema,
3124
+ outputSchema: OrderedUnitsSchema,
2608
3125
  execute: async ({ inputData }) => {
2609
3126
  const { units } = inputData;
2610
3127
  const orderedUnits = [...units].sort((a, b) => {
@@ -2612,103 +3129,102 @@ var orderUnitsStep = workflows.createStep({
2612
3129
  const bWeight = kindWeight(b.kind);
2613
3130
  return aWeight - bWeight;
2614
3131
  });
2615
- return { orderedUnits };
3132
+ return {
3133
+ orderedUnits,
3134
+ success: true
3135
+ };
3136
+ }
3137
+ });
3138
+ var prepareBranchStep = workflows.createStep({
3139
+ id: "prepare-branch",
3140
+ description: "Create or switch to integration branch before modifications",
3141
+ inputSchema: PrepareBranchInputSchema,
3142
+ outputSchema: PrepareBranchResultSchema,
3143
+ execute: async ({ inputData, runtimeContext }) => {
3144
+ const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
3145
+ try {
3146
+ const branchName = `feat/install-template-${inputData.slug}`;
3147
+ await gitCheckoutBranch(branchName, targetPath);
3148
+ return {
3149
+ branchName,
3150
+ success: true
3151
+ };
3152
+ } catch (error) {
3153
+ console.error("Failed to prepare branch:", error);
3154
+ return {
3155
+ branchName: `feat/install-template-${inputData.slug}`,
3156
+ // Return the intended name anyway
3157
+ success: false,
3158
+ error: `Failed to prepare branch: ${error instanceof Error ? error.message : String(error)}`
3159
+ };
3160
+ }
2616
3161
  }
2617
3162
  });
2618
3163
  var packageMergeStep = workflows.createStep({
2619
3164
  id: "package-merge",
2620
- description: "Merge template package.json dependencies into target project and install",
2621
- inputSchema: zod.z.object({
2622
- commitSha: zod.z.string(),
2623
- slug: zod.z.string(),
2624
- targetPath: zod.z.string().optional(),
2625
- packageInfo: zod.z.object({
2626
- dependencies: zod.z.record(zod.z.string()).optional(),
2627
- devDependencies: zod.z.record(zod.z.string()).optional(),
2628
- peerDependencies: zod.z.record(zod.z.string()).optional(),
2629
- scripts: zod.z.record(zod.z.string()).optional(),
2630
- packageInfo: zod.z.object({
2631
- name: zod.z.string().optional(),
2632
- version: zod.z.string().optional(),
2633
- description: zod.z.string().optional()
2634
- })
2635
- })
2636
- }),
2637
- outputSchema: zod.z.object({
2638
- success: zod.z.boolean(),
2639
- applied: zod.z.boolean(),
2640
- message: zod.z.string(),
2641
- error: zod.z.string().optional()
2642
- }),
3165
+ description: "Merge template package.json dependencies into target project",
3166
+ inputSchema: PackageMergeInputSchema,
3167
+ outputSchema: PackageMergeResultSchema,
2643
3168
  execute: async ({ inputData, runtimeContext }) => {
2644
3169
  console.log("Package merge step starting...");
2645
3170
  const { slug, packageInfo } = inputData;
2646
3171
  const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
2647
3172
  try {
2648
- const allTools = await AgentBuilderDefaults.DEFAULT_TOOLS(targetPath);
2649
- const packageMergeAgent = new agent.Agent({
2650
- name: "package-merger",
2651
- description: "Specialized agent for merging package.json dependencies",
2652
- instructions: `You are a package.json merge specialist. Your job is to:
2653
-
2654
- 1. **Read the target project's package.json** using readFile tool
2655
- 2. **Merge template dependencies** into the target package.json following these rules:
2656
- - For dependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
2657
- - For devDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
2658
- - For peerDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
2659
- - For scripts: Add new scripts with "template:${slug}:" prefix, don't overwrite existing ones
2660
- - Maintain existing package.json structure and formatting
2661
- 3. **Write the updated package.json** using writeFile tool
2662
-
2663
- Template Dependencies to Merge:
2664
- - Dependencies: ${JSON.stringify(packageInfo.dependencies || {}, null, 2)}
2665
- - Dev Dependencies: ${JSON.stringify(packageInfo.devDependencies || {}, null, 2)}
2666
- - Peer Dependencies: ${JSON.stringify(packageInfo.peerDependencies || {}, null, 2)}
2667
- - Scripts: ${JSON.stringify(packageInfo.scripts || {}, null, 2)}
2668
-
2669
- CRITICAL MERGE RULES:
2670
- 1. For each dependency in template dependencies, if it does NOT exist in target, ADD it with template version
2671
- 2. For each dependency in template dependencies, if it ALREADY exists in target, KEEP target version
2672
- 3. You MUST add ALL template dependencies that don't conflict - do not skip any
2673
- 4. Be explicit about what you're adding vs keeping
2674
-
2675
- EXAMPLE:
2676
- Template has: {"@mastra/libsql": "latest", "@mastra/core": "latest", "zod": "^3.25.67"}
2677
- Target has: {"@mastra/core": "latest", "zod": "^3.25.0"}
2678
- Result should have: {"@mastra/core": "latest", "zod": "^3.25.0", "@mastra/libsql": "latest"}
2679
-
2680
- Be systematic and thorough. Always read the existing package.json first, then merge, then write.`,
2681
- model: openai.openai("gpt-4o-mini"),
2682
- tools: {
2683
- readFile: allTools.readFile,
2684
- writeFile: allTools.writeFile,
2685
- listDirectory: allTools.listDirectory
3173
+ const targetPkgPath = path.join(targetPath, "package.json");
3174
+ let targetPkgRaw = "{}";
3175
+ try {
3176
+ targetPkgRaw = await promises.readFile(targetPkgPath, "utf-8");
3177
+ } catch {
3178
+ console.warn(`No existing package.json at ${targetPkgPath}, creating a new one`);
3179
+ }
3180
+ let targetPkg;
3181
+ try {
3182
+ targetPkg = JSON.parse(targetPkgRaw || "{}");
3183
+ } catch (e) {
3184
+ throw new Error(
3185
+ `Failed to parse existing package.json at ${targetPkgPath}: ${e instanceof Error ? e.message : String(e)}`
3186
+ );
3187
+ }
3188
+ const ensureObj = (o) => o && typeof o === "object" ? o : {};
3189
+ targetPkg.dependencies = ensureObj(targetPkg.dependencies);
3190
+ targetPkg.devDependencies = ensureObj(targetPkg.devDependencies);
3191
+ targetPkg.peerDependencies = ensureObj(targetPkg.peerDependencies);
3192
+ targetPkg.scripts = ensureObj(targetPkg.scripts);
3193
+ const tplDeps = ensureObj(packageInfo.dependencies);
3194
+ const tplDevDeps = ensureObj(packageInfo.devDependencies);
3195
+ const tplPeerDeps = ensureObj(packageInfo.peerDependencies);
3196
+ const tplScripts = ensureObj(packageInfo.scripts);
3197
+ const existsAnywhere = (name) => name in targetPkg.dependencies || name in targetPkg.devDependencies || name in targetPkg.peerDependencies;
3198
+ for (const [name, ver] of Object.entries(tplDeps)) {
3199
+ if (!existsAnywhere(name)) {
3200
+ targetPkg.dependencies[name] = String(ver);
2686
3201
  }
2687
- });
2688
- console.log("Starting package merge agent...");
2689
- console.log("Template dependencies to merge:", JSON.stringify(packageInfo.dependencies, null, 2));
2690
- console.log("Template devDependencies to merge:", JSON.stringify(packageInfo.devDependencies, null, 2));
2691
- const result = await packageMergeAgent.stream(
2692
- `Please merge the template dependencies into the target project's package.json at ${targetPath}/package.json.`,
2693
- { experimental_output: zod.z.object({ success: zod.z.boolean() }) }
2694
- );
2695
- let buffer = [];
2696
- for await (const chunk of result.fullStream) {
2697
- if (chunk.type === "text-delta") {
2698
- buffer.push(chunk.textDelta);
2699
- if (buffer.length > 20) {
2700
- console.log(buffer.join(""));
2701
- buffer = [];
2702
- }
3202
+ }
3203
+ for (const [name, ver] of Object.entries(tplDevDeps)) {
3204
+ if (!existsAnywhere(name)) {
3205
+ targetPkg.devDependencies[name] = String(ver);
3206
+ }
3207
+ }
3208
+ for (const [name, ver] of Object.entries(tplPeerDeps)) {
3209
+ if (!(name in targetPkg.peerDependencies)) {
3210
+ targetPkg.peerDependencies[name] = String(ver);
2703
3211
  }
2704
3212
  }
2705
- if (buffer.length > 0) {
2706
- console.log(buffer.join(""));
3213
+ const prefix = `template:${slug}:`;
3214
+ for (const [name, cmd] of Object.entries(tplScripts)) {
3215
+ const newKey = `${prefix}${name}`;
3216
+ if (!(newKey in targetPkg.scripts)) {
3217
+ targetPkg.scripts[newKey] = String(cmd);
3218
+ }
2707
3219
  }
3220
+ await promises.writeFile(targetPkgPath, JSON.stringify(targetPkg, null, 2), "utf-8");
3221
+ await gitAddAndCommit(targetPath, `feat(template): merge deps for ${slug}`, [targetPkgPath], {
3222
+ skipIfNoStaged: true
3223
+ });
2708
3224
  return {
2709
3225
  success: true,
2710
3226
  applied: true,
2711
- message: `Successfully merged template dependencies and installed packages for ${slug}`
3227
+ message: `Successfully merged template dependencies for ${slug}`
2712
3228
  };
2713
3229
  } catch (error) {
2714
3230
  console.error("Package merge failed:", error);
@@ -2721,32 +3237,30 @@ Be systematic and thorough. Always read the existing package.json first, then me
2721
3237
  }
2722
3238
  }
2723
3239
  });
2724
- var flatInstallStep = workflows.createStep({
2725
- id: "flat-install",
2726
- description: "Run a flat install command without specifying packages",
2727
- inputSchema: zod.z.object({
2728
- targetPath: zod.z.string().describe("Path to the project to install packages in")
2729
- }),
2730
- outputSchema: zod.z.object({
2731
- success: zod.z.boolean(),
2732
- message: zod.z.string(),
2733
- details: zod.z.string().optional()
2734
- }),
3240
+ var installStep = workflows.createStep({
3241
+ id: "install",
3242
+ description: "Install packages based on merged package.json",
3243
+ inputSchema: InstallInputSchema,
3244
+ outputSchema: InstallResultSchema,
2735
3245
  execute: async ({ inputData, runtimeContext }) => {
2736
- console.log("Running flat install...");
3246
+ console.log("Running install step...");
2737
3247
  const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
2738
3248
  try {
2739
3249
  await spawnSWPM(targetPath, "install", []);
3250
+ const lock = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock"].map((f) => path.join(targetPath, f)).find((f) => fs.existsSync(f));
3251
+ if (lock) {
3252
+ await gitAddAndCommit(targetPath, `chore(template): commit lockfile after install`, [lock], {
3253
+ skipIfNoStaged: true
3254
+ });
3255
+ }
2740
3256
  return {
2741
- success: true,
2742
- message: "Successfully ran flat install command",
2743
- details: "Installed all dependencies from package.json"
3257
+ success: true
2744
3258
  };
2745
3259
  } catch (error) {
2746
- console.error("Flat install failed:", error);
3260
+ console.error("Install failed:", error);
2747
3261
  return {
2748
3262
  success: false,
2749
- message: `Flat install failed: ${error instanceof Error ? error.message : String(error)}`
3263
+ error: error instanceof Error ? error.message : String(error)
2750
3264
  };
2751
3265
  }
2752
3266
  }
@@ -2754,45 +3268,8 @@ var flatInstallStep = workflows.createStep({
2754
3268
  var programmaticFileCopyStep = workflows.createStep({
2755
3269
  id: "programmatic-file-copy",
2756
3270
  description: "Programmatically copy template files to target project based on ordered units",
2757
- inputSchema: zod.z.object({
2758
- orderedUnits: zod.z.array(
2759
- zod.z.object({
2760
- kind: zod.z.string(),
2761
- id: zod.z.string(),
2762
- file: zod.z.string()
2763
- })
2764
- ),
2765
- templateDir: zod.z.string(),
2766
- commitSha: zod.z.string(),
2767
- slug: zod.z.string(),
2768
- targetPath: zod.z.string().optional()
2769
- }),
2770
- outputSchema: zod.z.object({
2771
- success: zod.z.boolean(),
2772
- copiedFiles: zod.z.array(
2773
- zod.z.object({
2774
- source: zod.z.string(),
2775
- destination: zod.z.string(),
2776
- unit: zod.z.object({
2777
- kind: zod.z.string(),
2778
- id: zod.z.string()
2779
- })
2780
- })
2781
- ),
2782
- conflicts: zod.z.array(
2783
- zod.z.object({
2784
- unit: zod.z.object({
2785
- kind: zod.z.string(),
2786
- id: zod.z.string()
2787
- }),
2788
- issue: zod.z.string(),
2789
- sourceFile: zod.z.string(),
2790
- targetFile: zod.z.string()
2791
- })
2792
- ),
2793
- message: zod.z.string(),
2794
- error: zod.z.string().optional()
2795
- }),
3271
+ inputSchema: FileCopyInputSchema,
3272
+ outputSchema: FileCopyResultSchema,
2796
3273
  execute: async ({ inputData, runtimeContext }) => {
2797
3274
  console.log("Programmatic file copy step starting...");
2798
3275
  const { orderedUnits, templateDir, commitSha, slug } = inputData;
@@ -2955,14 +3432,83 @@ var programmaticFileCopyStep = workflows.createStep({
2955
3432
  });
2956
3433
  }
2957
3434
  }
3435
+ try {
3436
+ const targetTsconfig = path.resolve(targetPath, "tsconfig.json");
3437
+ if (!fs.existsSync(targetTsconfig)) {
3438
+ const templateTsconfig = path.resolve(templateDir, "tsconfig.json");
3439
+ if (fs.existsSync(templateTsconfig)) {
3440
+ await promises.copyFile(templateTsconfig, targetTsconfig);
3441
+ copiedFiles.push({
3442
+ source: templateTsconfig,
3443
+ destination: targetTsconfig,
3444
+ unit: { kind: "other", id: "tsconfig.json" }
3445
+ });
3446
+ console.log("\u2713 Copied tsconfig.json from template to target");
3447
+ } else {
3448
+ const minimalTsconfig = {
3449
+ compilerOptions: {
3450
+ target: "ES2020",
3451
+ module: "NodeNext",
3452
+ moduleResolution: "NodeNext",
3453
+ strict: false,
3454
+ esModuleInterop: true,
3455
+ skipLibCheck: true,
3456
+ resolveJsonModule: true,
3457
+ outDir: "dist"
3458
+ },
3459
+ include: ["**/*.ts", "**/*.tsx", "**/*.mts", "**/*.cts"],
3460
+ exclude: ["node_modules", "dist", "build", ".next", ".output", ".turbo"]
3461
+ };
3462
+ await promises.writeFile(targetTsconfig, JSON.stringify(minimalTsconfig, null, 2), "utf-8");
3463
+ copiedFiles.push({
3464
+ source: "[generated tsconfig.json]",
3465
+ destination: targetTsconfig,
3466
+ unit: { kind: "other", id: "tsconfig.json" }
3467
+ });
3468
+ console.log("\u2713 Generated minimal tsconfig.json in target");
3469
+ }
3470
+ }
3471
+ } catch (e) {
3472
+ conflicts.push({
3473
+ unit: { kind: "other", id: "tsconfig.json" },
3474
+ issue: `Failed to ensure tsconfig.json: ${e instanceof Error ? e.message : String(e)}`,
3475
+ sourceFile: "tsconfig.json",
3476
+ targetFile: "tsconfig.json"
3477
+ });
3478
+ }
3479
+ try {
3480
+ const targetMastraIndex = path.resolve(targetPath, "src/mastra/index.ts");
3481
+ if (!fs.existsSync(targetMastraIndex)) {
3482
+ const templateMastraIndex = path.resolve(templateDir, "src/mastra/index.ts");
3483
+ if (fs.existsSync(templateMastraIndex)) {
3484
+ if (!fs.existsSync(path.dirname(targetMastraIndex))) {
3485
+ await promises.mkdir(path.dirname(targetMastraIndex), { recursive: true });
3486
+ }
3487
+ await promises.copyFile(templateMastraIndex, targetMastraIndex);
3488
+ copiedFiles.push({
3489
+ source: templateMastraIndex,
3490
+ destination: targetMastraIndex,
3491
+ unit: { kind: "other", id: "mastra-index" }
3492
+ });
3493
+ console.log("\u2713 Copied src/mastra/index.ts from template to target");
3494
+ }
3495
+ }
3496
+ } catch (e) {
3497
+ conflicts.push({
3498
+ unit: { kind: "other", id: "mastra-index" },
3499
+ issue: `Failed to ensure Mastra index file: ${e instanceof Error ? e.message : String(e)}`,
3500
+ sourceFile: "src/mastra/index.ts",
3501
+ targetFile: "src/mastra/index.ts"
3502
+ });
3503
+ }
2958
3504
  if (copiedFiles.length > 0) {
2959
3505
  try {
2960
3506
  const fileList = copiedFiles.map((f) => f.destination);
2961
- const gitCommand = ["git", "add", ...fileList];
2962
- await exec(gitCommand.join(" "), { cwd: targetPath });
2963
- await exec(
2964
- `git commit -m "feat(template): copy ${copiedFiles.length} files from ${slug}@${commitSha.substring(0, 7)}"`,
2965
- { cwd: targetPath }
3507
+ await gitAddAndCommit(
3508
+ targetPath,
3509
+ `feat(template): copy ${copiedFiles.length} files from ${slug}@${commitSha.substring(0, 7)}`,
3510
+ fileList,
3511
+ { skipIfNoStaged: true }
2966
3512
  );
2967
3513
  console.log(`\u2713 Committed ${copiedFiles.length} copied files`);
2968
3514
  } catch (commitError) {
@@ -2979,83 +3525,26 @@ var programmaticFileCopyStep = workflows.createStep({
2979
3525
  };
2980
3526
  } catch (error) {
2981
3527
  console.error("Programmatic file copy failed:", error);
2982
- throw new Error(`Programmatic file copy failed: ${error instanceof Error ? error.message : String(error)}`);
3528
+ return {
3529
+ success: false,
3530
+ copiedFiles: [],
3531
+ conflicts: [],
3532
+ message: `Programmatic file copy failed: ${error instanceof Error ? error.message : String(error)}`,
3533
+ error: error instanceof Error ? error.message : String(error)
3534
+ };
2983
3535
  }
2984
3536
  }
2985
3537
  });
2986
3538
  var intelligentMergeStep = workflows.createStep({
2987
3539
  id: "intelligent-merge",
2988
3540
  description: "Use AgentBuilder to intelligently merge template files",
2989
- inputSchema: zod.z.object({
2990
- conflicts: zod.z.array(
2991
- zod.z.object({
2992
- unit: zod.z.object({
2993
- kind: zod.z.string(),
2994
- id: zod.z.string()
2995
- }),
2996
- issue: zod.z.string(),
2997
- sourceFile: zod.z.string(),
2998
- targetFile: zod.z.string()
2999
- })
3000
- ),
3001
- copiedFiles: zod.z.array(
3002
- zod.z.object({
3003
- source: zod.z.string(),
3004
- destination: zod.z.string(),
3005
- unit: zod.z.object({
3006
- kind: zod.z.string(),
3007
- id: zod.z.string()
3008
- })
3009
- })
3010
- ),
3011
- templateDir: zod.z.string(),
3012
- commitSha: zod.z.string(),
3013
- slug: zod.z.string(),
3014
- targetPath: zod.z.string().optional()
3015
- }),
3016
- outputSchema: zod.z.object({
3017
- success: zod.z.boolean(),
3018
- applied: zod.z.boolean(),
3019
- message: zod.z.string(),
3020
- conflictsResolved: zod.z.array(
3021
- zod.z.object({
3022
- unit: zod.z.object({
3023
- kind: zod.z.string(),
3024
- id: zod.z.string()
3025
- }),
3026
- issue: zod.z.string(),
3027
- resolution: zod.z.string()
3028
- })
3029
- ),
3030
- error: zod.z.string().optional(),
3031
- branchName: zod.z.string().optional()
3032
- }),
3541
+ inputSchema: IntelligentMergeInputSchema,
3542
+ outputSchema: IntelligentMergeResultSchema,
3033
3543
  execute: async ({ inputData, runtimeContext }) => {
3034
3544
  console.log("Intelligent merge step starting...");
3035
- const { conflicts, copiedFiles, commitSha, slug, templateDir } = inputData;
3545
+ const { conflicts, copiedFiles, commitSha, slug, templateDir, branchName } = inputData;
3036
3546
  const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
3037
- const baseBranchName = `feat/install-template-${slug}`;
3038
3547
  try {
3039
- let branchName = baseBranchName;
3040
- try {
3041
- await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
3042
- console.log(`Created new branch: ${branchName}`);
3043
- } catch (error) {
3044
- const errorStr = error instanceof Error ? error.message : String(error);
3045
- if (errorStr.includes("already exists")) {
3046
- try {
3047
- await exec(`git checkout "${branchName}"`, { cwd: targetPath });
3048
- console.log(`Switched to existing branch: ${branchName}`);
3049
- } catch {
3050
- const timestamp = Date.now().toString().slice(-6);
3051
- branchName = `${baseBranchName}-${timestamp}`;
3052
- await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
3053
- console.log(`Created unique branch: ${branchName}`);
3054
- }
3055
- } else {
3056
- throw error;
3057
- }
3058
- }
3059
3548
  const copyFileTool = tools.createTool({
3060
3549
  id: "copy-file",
3061
3550
  description: "Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).",
@@ -3093,7 +3582,7 @@ var intelligentMergeStep = workflows.createStep({
3093
3582
  const agentBuilder = new AgentBuilder({
3094
3583
  projectPath: targetPath,
3095
3584
  mode: "template",
3096
- model: openai.openai("gpt-4o-mini"),
3585
+ model: resolveModel(runtimeContext),
3097
3586
  instructions: `
3098
3587
  You are an expert at integrating Mastra template components into existing projects.
3099
3588
 
@@ -3108,49 +3597,52 @@ CONFLICTS TO RESOLVE:
3108
3597
  ${JSON.stringify(conflicts, null, 2)}
3109
3598
 
3110
3599
  CRITICAL INSTRUCTIONS:
3111
- 1. **When committing changes**: NEVER add dependency/build directories. Use specific file paths with 'git add'
3112
- 2. **Package management**: NO need to install packages (already handled by package merge step)
3113
- 3. **Validation**: When validation fails due to import issues, check existing files and imports for correct naming conventions
3114
- 4. **Variable vs file names**: A variable name might differ from file name (e.g., filename: ./downloaderTool.ts, export const fetcherTool(...))
3115
- 5. **File copying**: Most files are already copied programmatically. Only use copyFile tool for edge cases where additional files are needed
3600
+ 1. **Package management**: NO need to install packages (already handled by package merge step)
3601
+ 2. **File copying**: Most files are already copied programmatically. Only use copyFile tool for edge cases where additional files are needed for conflict resolution
3116
3602
 
3117
3603
  KEY RESPONSIBILITIES:
3118
3604
  1. Resolve any conflicts from the programmatic copy step
3119
3605
  2. Register components in existing Mastra index file (agents, workflows, networks, mcp-servers)
3120
3606
  3. DO NOT register tools in existing Mastra index file - tools should remain standalone
3121
- 4. Fix import path issues in copied files
3122
- 5. Ensure TypeScript imports and exports are correct
3123
- 6. Validate integration works properly
3124
- 7. Copy additional files ONLY if needed for conflict resolution or missing dependencies
3607
+ 4. Copy additional files ONLY if needed for conflict resolution
3125
3608
 
3126
- MASTRA-SPECIFIC INTEGRATION:
3609
+ MASTRA INDEX FILE HANDLING (src/mastra/index.ts):
3610
+ 1. **Verify the file exists**
3611
+ - Call readFile
3612
+ - If it fails with ENOENT (or listDirectory shows it missing) -> copyFile the template version to src/mastra/index.ts, then confirm it now exists
3613
+ - Always verify after copying that the file exists and is accessible
3614
+
3615
+ 2. **Edit the file**
3616
+ - Always work with the full file content
3617
+ - Generate the complete, correct source (imports, anchors, registrations, formatting)
3618
+ - Keep existing registrations intact and maintain file structure
3619
+ - Ensure proper spacing and organization of new additions
3620
+
3621
+ 3. **Handle anchors and structure**
3622
+ - When generating new content, ensure you do not duplicate existing imports or object entries
3623
+ - If required anchors (e.g., agents: {}) are missing, add them while generating the new content
3624
+ - Add missing anchors just before the closing brace of the Mastra config
3625
+ - Do not restructure or reorder existing anchors and registrations
3626
+
3627
+ CRITICAL: ALWAYS use writeFile to update the mastra/index.ts file when needed to register new components.
3628
+
3629
+ MASTRA-SPECIFIC REGISTRATION:
3127
3630
  - Agents: Register in existing Mastra index file
3128
3631
  - Workflows: Register in existing Mastra index file
3129
3632
  - Networks: Register in existing Mastra index file
3130
3633
  - MCP servers: Register in existing Mastra index file
3131
3634
  - Tools: Copy to ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool} but DO NOT register in existing Mastra index file
3132
-
3133
- EDGE CASE FILE COPYING:
3134
- - IF a file for a resource does not exist in the target project AND was not programmatically copied, you can use copyFile tool
3135
- - When taking files from template, ensure you get the right file name and path
3136
- - Only copy files that are actually needed for the integration to work
3137
-
3138
- NAMING CONVENTION GUIDANCE:
3139
- When fixing imports or understanding naming patterns, use these examples:
3140
-
3141
- **Import Path Patterns:**
3142
- - camelCase files: import { myAgent } from './myAgent'
3143
- - snake_case files: import { myAgent } from './my_agent'
3144
- - kebab-case files: import { myAgent } from './my-agent'
3145
- - PascalCase files: import { MyAgent } from './MyAgent'
3146
-
3147
- **Naming Detection Examples:**
3148
- - Files like "weatherAgent.ts", "chatAgent.ts" \u2192 use camelCase
3149
- - Files like "weather_agent.ts", "chat_agent.ts" \u2192 use snake_case
3150
- - Files like "weather-agent.ts", "chat-agent.ts" \u2192 use kebab-case
3151
- - Files like "WeatherAgent.ts", "ChatAgent.ts" \u2192 use PascalCase
3152
-
3153
- **Key Rule:** Keep variable/export names unchanged - only adapt file names and import paths
3635
+ - If an anchor (e.g., "agents: {") is not found, avoid complex restructuring; instead, insert the missing anchor on a new line (e.g., add "agents: {" just before the closing brace of the Mastra config) and then proceed with the other registrations.
3636
+
3637
+ CONFLICT RESOLUTION AND FILE COPYING:
3638
+ - Only copy files if needed to resolve specific conflicts
3639
+ - When copying files from template:
3640
+ - Ensure you get the right file name and path
3641
+ - Verify the destination directory exists
3642
+ - Maintain the same relative path structure
3643
+ - Only copy files that are actually needed
3644
+ - Preserve existing functionality when resolving conflicts
3645
+ - Focus on registration and conflict resolution, validation will happen in a later step
3154
3646
 
3155
3647
  Template information:
3156
3648
  - Slug: ${slug}
@@ -3171,15 +3663,23 @@ Template information:
3171
3663
  notes: `Unit: ${conflict.unit.kind}:${conflict.unit.id}, Issue: ${conflict.issue}, Source: ${conflict.sourceFile}, Target: ${conflict.targetFile}`
3172
3664
  });
3173
3665
  });
3174
- const nonToolFiles = copiedFiles.filter((f) => f.unit.kind !== "tool");
3175
- if (nonToolFiles.length > 0) {
3666
+ const registrableKinds = /* @__PURE__ */ new Set(["agent", "workflow", "network", "mcp-server"]);
3667
+ const registrableFiles = copiedFiles.filter((f) => registrableKinds.has(f.unit.kind));
3668
+ const targetMastraIndex = path.resolve(targetPath, "src/mastra/index.ts");
3669
+ const mastraIndexExists = fs.existsSync(targetMastraIndex);
3670
+ console.log(`Mastra index exists: ${mastraIndexExists} at ${targetMastraIndex}`);
3671
+ console.log(
3672
+ "Registrable components:",
3673
+ registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`)
3674
+ );
3675
+ if (registrableFiles.length > 0) {
3176
3676
  tasks.push({
3177
3677
  id: "register-components",
3178
- content: `Register ${nonToolFiles.length} components in existing Mastra index file (src/mastra/index.ts)`,
3678
+ content: `Register ${registrableFiles.length} components in existing Mastra index file (src/mastra/index.ts)`,
3179
3679
  status: "pending",
3180
3680
  priority: "medium",
3181
3681
  dependencies: conflicts.length > 0 ? conflicts.map((c) => `conflict-${c.unit.kind}-${c.unit.id}`) : void 0,
3182
- notes: `Components to register: ${nonToolFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
3682
+ notes: `Components to register: ${registrableFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
3183
3683
  });
3184
3684
  }
3185
3685
  console.log(`Creating task list with ${tasks.length} tasks...`);
@@ -3276,10 +3776,12 @@ Start by listing your tasks and work through them systematically!
3276
3776
  };
3277
3777
  }
3278
3778
  });
3779
+ await gitAddAndCommit(targetPath, `feat(template): apply intelligent merge for ${slug}`, void 0, {
3780
+ skipIfNoStaged: true
3781
+ });
3279
3782
  return {
3280
3783
  success: true,
3281
3784
  applied: true,
3282
- branchName,
3283
3785
  message: `Successfully resolved ${conflicts.length} conflicts from template ${slug}`,
3284
3786
  conflictsResolved: conflictResolutions
3285
3787
  };
@@ -3287,7 +3789,6 @@ Start by listing your tasks and work through them systematically!
3287
3789
  return {
3288
3790
  success: false,
3289
3791
  applied: false,
3290
- branchName: baseBranchName,
3291
3792
  message: `Failed to resolve conflicts: ${error instanceof Error ? error.message : String(error)}`,
3292
3793
  conflictsResolved: [],
3293
3794
  error: error instanceof Error ? error.message : String(error)
@@ -3297,52 +3798,9 @@ Start by listing your tasks and work through them systematically!
3297
3798
  });
3298
3799
  var validationAndFixStep = workflows.createStep({
3299
3800
  id: "validation-and-fix",
3300
- description: "Validate the merged template code and fix any validation errors using a specialized agent",
3301
- inputSchema: zod.z.object({
3302
- commitSha: zod.z.string(),
3303
- slug: zod.z.string(),
3304
- targetPath: zod.z.string().optional(),
3305
- templateDir: zod.z.string(),
3306
- orderedUnits: zod.z.array(
3307
- zod.z.object({
3308
- kind: zod.z.string(),
3309
- id: zod.z.string(),
3310
- file: zod.z.string()
3311
- })
3312
- ),
3313
- copiedFiles: zod.z.array(
3314
- zod.z.object({
3315
- source: zod.z.string(),
3316
- destination: zod.z.string(),
3317
- unit: zod.z.object({
3318
- kind: zod.z.string(),
3319
- id: zod.z.string()
3320
- })
3321
- })
3322
- ),
3323
- conflictsResolved: zod.z.array(
3324
- zod.z.object({
3325
- unit: zod.z.object({
3326
- kind: zod.z.string(),
3327
- id: zod.z.string()
3328
- }),
3329
- issue: zod.z.string(),
3330
- resolution: zod.z.string()
3331
- })
3332
- ).optional(),
3333
- maxIterations: zod.z.number().optional().default(5)
3334
- }),
3335
- outputSchema: zod.z.object({
3336
- success: zod.z.boolean(),
3337
- applied: zod.z.boolean(),
3338
- message: zod.z.string(),
3339
- validationResults: zod.z.object({
3340
- valid: zod.z.boolean(),
3341
- errorsFixed: zod.z.number(),
3342
- remainingErrors: zod.z.number()
3343
- }),
3344
- error: zod.z.string().optional()
3345
- }),
3801
+ description: "Validate the merged template code and fix any issues using a specialized agent",
3802
+ inputSchema: ValidationFixInputSchema,
3803
+ outputSchema: ValidationFixResultSchema,
3346
3804
  execute: async ({ inputData, runtimeContext }) => {
3347
3805
  console.log("Validation and fix step starting...");
3348
3806
  const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
@@ -3377,26 +3835,84 @@ var validationAndFixStep = workflows.createStep({
3377
3835
  - ESLint issues
3378
3836
  - Import/export problems
3379
3837
  - Missing dependencies
3838
+ - Index file structure and exports
3839
+ - Component registration correctness
3840
+ - Naming convention compliance
3380
3841
 
3381
3842
  2. **Fix validation errors systematically**:
3382
3843
  - Use readFile to examine files with errors
3383
- - Use multiEdit to fix issues like missing imports, incorrect paths, syntax errors
3844
+ - Use multiEdit for simple search-replace fixes (single line changes)
3845
+ - Use replaceLines for complex multiline fixes (imports, function signatures, etc.)
3384
3846
  - Use listDirectory to understand project structure when fixing import paths
3385
3847
  - Update file contents to resolve TypeScript and linting issues
3386
3848
 
3387
- 3. **Re-validate after fixes** to ensure all issues are resolved
3388
-
3389
- 4. **Focus on template integration issues**:
3390
- - Files were copied with new names based on unit IDs
3391
- - Original template imports may reference old filenames
3392
- - Missing imports in index files
3393
- - Incorrect file paths in imports
3394
- - Type mismatches after integration
3395
- - Missing exports in barrel files
3849
+ 3. **Choose the right tool for the job**:
3850
+ - multiEdit: Simple replacements, single line changes, small fixes
3851
+ - replaceLines: Multiline imports, function signatures, complex code blocks
3852
+ - writeFile: ONLY for creating new files (never overwrite existing)
3853
+
3854
+ 4. **Create missing files ONLY when necessary**:
3855
+ - Use writeFile ONLY for creating NEW files that don't exist
3856
+ - NEVER overwrite existing files - use multiEdit or replaceLines instead
3857
+ - Common cases: missing barrel files (index.ts), missing config files, missing type definitions
3858
+ - Always check with readFile first to ensure file doesn't exist
3859
+
3860
+ 5. **Fix ALL template integration issues**:
3861
+ - Fix import path issues in copied files
3862
+ - Ensure TypeScript imports and exports are correct
3863
+ - Validate integration works properly
3864
+ - Fix files copied with new names based on unit IDs
3865
+ - Update original template imports that reference old filenames
3866
+ - Fix missing imports in index files
3867
+ - Fix incorrect file paths in imports
3868
+ - Fix type mismatches after integration
3869
+ - Fix missing exports in barrel files
3396
3870
  - Use the COPIED FILES mapping below to fix import paths
3871
+ - Fix any missing dependencies or module resolution issues
3872
+
3873
+ 6. **Validate index file structure**:
3874
+ - Correct imports for all components
3875
+ - Proper anchor structure (agents: {}, etc.)
3876
+ - No duplicate registrations
3877
+ - Correct export names and paths
3878
+ - Proper formatting and organization
3879
+
3880
+ 7. **Follow naming conventions**:
3881
+ Import paths:
3882
+ - camelCase: import { myAgent } from './myAgent'
3883
+ - snake_case: import { myAgent } from './my_agent'
3884
+ - kebab-case: import { myAgent } from './my-agent'
3885
+ - PascalCase: import { MyAgent } from './MyAgent'
3886
+
3887
+ File names:
3888
+ - camelCase: weatherAgent.ts, chatAgent.ts
3889
+ - snake_case: weather_agent.ts, chat_agent.ts
3890
+ - kebab-case: weather-agent.ts, chat-agent.ts
3891
+ - PascalCase: WeatherAgent.ts, ChatAgent.ts
3892
+
3893
+ Key Rule: Keep variable/export names unchanged, only adapt file names and import paths
3894
+
3895
+ 8. **Re-validate after fixes** to ensure all issues are resolved
3397
3896
 
3398
3897
  CRITICAL: Always validate the entire project first to get a complete picture of issues, then fix them systematically, and re-validate to confirm fixes worked.
3399
3898
 
3899
+ CRITICAL TOOL SELECTION GUIDE:
3900
+ - **multiEdit**: Use for simple string replacements, single-line changes
3901
+ Example: changing './oldPath' to './newPath'
3902
+
3903
+ - **replaceLines**: Use for multiline fixes, complex code structures
3904
+ Example: fixing multiline imports, function signatures, or code blocks
3905
+ Usage: replaceLines({ filePath: 'file.ts', startLine: 5, endLine: 8, newContent: 'new multiline content' })
3906
+
3907
+ - **writeFile**: ONLY for creating new files that don't exist
3908
+ Example: creating missing index.ts barrel files
3909
+
3910
+ CRITICAL WRITEFIL\u0415 SAFETY RULES:
3911
+ - ONLY use writeFile for creating NEW files that don't exist
3912
+ - ALWAYS check with readFile first to verify file doesn't exist
3913
+ - NEVER use writeFile to overwrite existing files - use multiEdit or replaceLines instead
3914
+ - Common valid uses: missing index.ts barrel files, missing type definitions, missing config files
3915
+
3400
3916
  CRITICAL IMPORT PATH RESOLUTION:
3401
3917
  The following files were copied from template with new names:
3402
3918
  ${JSON.stringify(copiedFiles, null, 2)}
@@ -3417,11 +3933,13 @@ INTEGRATED UNITS:
3417
3933
  ${JSON.stringify(orderedUnits, null, 2)}
3418
3934
 
3419
3935
  Be thorough and methodical. Always use listDirectory to verify actual file existence before fixing imports.`,
3420
- model: openai.openai("gpt-4o-mini"),
3936
+ model: resolveModel(runtimeContext),
3421
3937
  tools: {
3422
3938
  validateCode: allTools.validateCode,
3423
3939
  readFile: allTools.readFile,
3940
+ writeFile: allTools.writeFile,
3424
3941
  multiEdit: allTools.multiEdit,
3942
+ replaceLines: allTools.replaceLines,
3425
3943
  listDirectory: allTools.listDirectory,
3426
3944
  executeCommand: allTools.executeCommand
3427
3945
  }
@@ -3482,10 +4000,12 @@ Previous iterations may have fixed some issues, so start by re-running validateC
3482
4000
  currentIteration++;
3483
4001
  }
3484
4002
  try {
3485
- await exec(
3486
- `git add . && git commit -m "fix(template): resolve validation errors for ${slug}@${commitSha.substring(0, 7)}" || true`,
4003
+ await gitAddAndCommit(
4004
+ targetPath,
4005
+ `fix(template): resolve validation errors for ${slug}@${commitSha.substring(0, 7)}`,
4006
+ void 0,
3487
4007
  {
3488
- cwd: targetPath
4008
+ skipIfNoStaged: true
3489
4009
  }
3490
4010
  );
3491
4011
  } catch (commitError) {
@@ -3524,10 +4044,10 @@ Previous iterations may have fixed some issues, so start by re-running validateC
3524
4044
  }
3525
4045
  }
3526
4046
  });
3527
- var mergeTemplateWorkflow = workflows.createWorkflow({
3528
- id: "merge-template",
4047
+ var agentBuilderTemplateWorkflow = workflows.createWorkflow({
4048
+ id: "agent-builder-template",
3529
4049
  description: "Merges a Mastra template repository into the current project using intelligent AgentBuilder-powered merging",
3530
- inputSchema: MergeInputSchema,
4050
+ inputSchema: AgentBuilderInputSchema,
3531
4051
  outputSchema: ApplyResultSchema,
3532
4052
  steps: [
3533
4053
  cloneTemplateStep,
@@ -3535,15 +4055,36 @@ var mergeTemplateWorkflow = workflows.createWorkflow({
3535
4055
  discoverUnitsStep,
3536
4056
  orderUnitsStep,
3537
4057
  packageMergeStep,
3538
- flatInstallStep,
4058
+ installStep,
3539
4059
  programmaticFileCopyStep,
3540
4060
  intelligentMergeStep,
3541
4061
  validationAndFixStep
3542
4062
  ]
3543
- }).then(cloneTemplateStep).parallel([analyzePackageStep, discoverUnitsStep]).map(async ({ getStepResult }) => {
4063
+ }).then(cloneTemplateStep).map(async ({ getStepResult }) => {
4064
+ const cloneResult = getStepResult(cloneTemplateStep);
4065
+ if (shouldAbortWorkflow(cloneResult)) {
4066
+ throw new Error(`Critical failure in clone step: ${cloneResult.error}`);
4067
+ }
4068
+ return cloneResult;
4069
+ }).parallel([analyzePackageStep, discoverUnitsStep]).map(async ({ getStepResult }) => {
4070
+ const analyzeResult = getStepResult(analyzePackageStep);
3544
4071
  const discoverResult = getStepResult(discoverUnitsStep);
4072
+ if (shouldAbortWorkflow(analyzeResult)) {
4073
+ throw new Error(`Failure in analyze package step: ${analyzeResult.error || "Package analysis failed"}`);
4074
+ }
4075
+ if (shouldAbortWorkflow(discoverResult)) {
4076
+ throw new Error(`Failure in discover units step: ${discoverResult.error || "Unit discovery failed"}`);
4077
+ }
3545
4078
  return discoverResult;
3546
4079
  }).then(orderUnitsStep).map(async ({ getStepResult, getInitData }) => {
4080
+ const cloneResult = getStepResult(cloneTemplateStep);
4081
+ const initData = getInitData();
4082
+ return {
4083
+ commitSha: cloneResult.commitSha,
4084
+ slug: cloneResult.slug,
4085
+ targetPath: initData.targetPath
4086
+ };
4087
+ }).then(prepareBranchStep).map(async ({ getStepResult, getInitData }) => {
3547
4088
  const cloneResult = getStepResult(cloneTemplateStep);
3548
4089
  const packageResult = getStepResult(analyzePackageStep);
3549
4090
  const initData = getInitData();
@@ -3558,10 +4099,14 @@ var mergeTemplateWorkflow = workflows.createWorkflow({
3558
4099
  return {
3559
4100
  targetPath: initData.targetPath
3560
4101
  };
3561
- }).then(flatInstallStep).map(async ({ getStepResult, getInitData }) => {
4102
+ }).then(installStep).map(async ({ getStepResult, getInitData }) => {
3562
4103
  const cloneResult = getStepResult(cloneTemplateStep);
3563
4104
  const orderResult = getStepResult(orderUnitsStep);
4105
+ const installResult = getStepResult(installStep);
3564
4106
  const initData = getInitData();
4107
+ if (shouldAbortWorkflow(installResult)) {
4108
+ throw new Error(`Failure in install step: ${installResult.error || "Install failed"}`);
4109
+ }
3565
4110
  return {
3566
4111
  orderedUnits: orderResult.orderedUnits,
3567
4112
  templateDir: cloneResult.templateDir,
@@ -3596,15 +4141,31 @@ var mergeTemplateWorkflow = workflows.createWorkflow({
3596
4141
  copiedFiles: copyResult.copiedFiles,
3597
4142
  conflictsResolved: mergeResult.conflictsResolved
3598
4143
  };
3599
- }).then(validationAndFixStep).map(async ({ getStepResult, getInitData }) => {
3600
- const validationResult = getStepResult(validationAndFixStep);
3601
- const intelligentMergeResult = getStepResult(intelligentMergeStep);
3602
- const copyResult = getStepResult(programmaticFileCopyStep);
4144
+ }).then(validationAndFixStep).map(async ({ getStepResult }) => {
3603
4145
  const cloneResult = getStepResult(cloneTemplateStep);
3604
- const initData = getInitData();
3605
- const branchName = intelligentMergeResult.branchName || `feat/install-template-${cloneResult.slug || initData.slug}`;
3606
- const allErrors = [copyResult.error, intelligentMergeResult.error, validationResult.error].filter(Boolean);
3607
- const overallSuccess = copyResult.success !== false && intelligentMergeResult.success !== false && validationResult.success;
4146
+ const analyzeResult = getStepResult(analyzePackageStep);
4147
+ const discoverResult = getStepResult(discoverUnitsStep);
4148
+ const orderResult = getStepResult(orderUnitsStep);
4149
+ const prepareBranchResult = getStepResult(prepareBranchStep);
4150
+ const packageMergeResult = getStepResult(packageMergeStep);
4151
+ const installResult = getStepResult(installStep);
4152
+ const copyResult = getStepResult(programmaticFileCopyStep);
4153
+ const intelligentMergeResult = getStepResult(intelligentMergeStep);
4154
+ const validationResult = getStepResult(validationAndFixStep);
4155
+ const branchName = prepareBranchResult.branchName;
4156
+ const allErrors = [
4157
+ cloneResult.error,
4158
+ analyzeResult.error,
4159
+ discoverResult.error,
4160
+ orderResult.error,
4161
+ prepareBranchResult.error,
4162
+ packageMergeResult.error,
4163
+ installResult.error,
4164
+ copyResult.error,
4165
+ intelligentMergeResult.error,
4166
+ validationResult.error
4167
+ ].filter(Boolean);
4168
+ const overallSuccess = cloneResult.success !== false && analyzeResult.success !== false && discoverResult.success !== false && orderResult.success !== false && prepareBranchResult.success !== false && packageMergeResult.success !== false && installResult.success !== false && copyResult.success !== false && intelligentMergeResult.success !== false && validationResult.success !== false;
3608
4169
  const messages = [];
3609
4170
  if (copyResult.copiedFiles?.length > 0) {
3610
4171
  messages.push(`${copyResult.copiedFiles.length} files copied`);
@@ -3629,6 +4190,13 @@ var mergeTemplateWorkflow = workflows.createWorkflow({
3629
4190
  branchName,
3630
4191
  // Additional debugging info
3631
4192
  stepResults: {
4193
+ cloneSuccess: cloneResult.success,
4194
+ analyzeSuccess: analyzeResult.success,
4195
+ discoverSuccess: discoverResult.success,
4196
+ orderSuccess: orderResult.success,
4197
+ prepareBranchSuccess: prepareBranchResult.success,
4198
+ packageMergeSuccess: packageMergeResult.success,
4199
+ installSuccess: installResult.success,
3632
4200
  copySuccess: copyResult.success,
3633
4201
  mergeSuccess: intelligentMergeResult.success,
3634
4202
  validationSuccess: validationResult.success,
@@ -3638,15 +4206,29 @@ var mergeTemplateWorkflow = workflows.createWorkflow({
3638
4206
  }
3639
4207
  };
3640
4208
  }).commit();
4209
+ async function mergeTemplateBySlug(slug, targetPath) {
4210
+ const template = await getMastraTemplate(slug);
4211
+ const run = await agentBuilderTemplateWorkflow.createRunAsync();
4212
+ return await run.start({
4213
+ inputData: {
4214
+ repo: template.githubUrl,
4215
+ slug: template.slug,
4216
+ targetPath
4217
+ }
4218
+ });
4219
+ }
3641
4220
  var determineConflictStrategy = (_unit, _targetFile) => {
3642
4221
  return "skip";
3643
4222
  };
4223
+ var shouldAbortWorkflow = (stepResult) => {
4224
+ return stepResult?.success === false || stepResult?.error;
4225
+ };
3644
4226
 
3645
- // src/index.ts
4227
+ // src/agent/index.ts
3646
4228
  var AgentBuilder = class extends agent.Agent {
3647
4229
  builderConfig;
3648
4230
  /**
3649
- * Private constructor - use AgentBuilder.create() instead
4231
+ * Constructor for AgentBuilder
3650
4232
  */
3651
4233
  constructor(config) {
3652
4234
  const additionalInstructions = config.instructions ? `## Priority Instructions
@@ -3665,7 +4247,7 @@ ${config.instructions}` : "";
3665
4247
  };
3666
4248
  },
3667
4249
  workflows: {
3668
- "merge-template": mergeTemplateWorkflow
4250
+ "merge-template": agentBuilderTemplateWorkflow
3669
4251
  },
3670
4252
  memory: new memory.Memory({
3671
4253
  options: AgentBuilderDefaults.DEFAULT_MEMORY_CONFIG,
@@ -3770,3 +4352,6 @@ ${!options?.outputFormat || options.outputFormat === "both" ? "Provide both expl
3770
4352
  };
3771
4353
 
3772
4354
  exports.AgentBuilder = AgentBuilder;
4355
+ exports.AgentBuilderDefaults = AgentBuilderDefaults;
4356
+ exports.agentBuilderTemplateWorkflow = agentBuilderTemplateWorkflow;
4357
+ exports.mergeTemplateBySlug = mergeTemplateBySlug;