@mastra/agent-builder 0.0.0-experimental-agent-builder-20250815195917 → 0.0.1-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -16
- package/README.md +4 -17
- package/dist/agent/index.d.ts +5885 -0
- package/dist/agent/index.d.ts.map +1 -0
- package/dist/defaults.d.ts +6529 -0
- package/dist/defaults.d.ts.map +1 -0
- package/dist/index.d.ts +4 -1
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2943 -591
- package/dist/index.js.map +1 -0
- package/dist/processors/tool-summary.d.ts +29 -0
- package/dist/processors/tool-summary.d.ts.map +1 -0
- package/dist/processors/write-file.d.ts +10 -0
- package/dist/processors/write-file.d.ts.map +1 -0
- package/dist/types.d.ts +1121 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils.d.ts +63 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/workflows/index.d.ts +5 -0
- package/dist/workflows/index.d.ts.map +1 -0
- package/dist/workflows/shared/schema.d.ts +139 -0
- package/dist/workflows/shared/schema.d.ts.map +1 -0
- package/dist/workflows/task-planning/prompts.d.ts +37 -0
- package/dist/workflows/task-planning/prompts.d.ts.map +1 -0
- package/dist/workflows/task-planning/schema.d.ts +548 -0
- package/dist/workflows/task-planning/schema.d.ts.map +1 -0
- package/dist/workflows/task-planning/task-planning.d.ts +992 -0
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -0
- package/dist/workflows/template-builder/template-builder.d.ts +1910 -0
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/prompts.d.ts +44 -0
- package/dist/workflows/workflow-builder/prompts.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/schema.d.ts +1170 -0
- package/dist/workflows/workflow-builder/schema.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/tools.d.ts +309 -0
- package/dist/workflows/workflow-builder/tools.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +2714 -0
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -0
- package/dist/workflows/workflow-map.d.ts +3735 -0
- package/dist/workflows/workflow-map.d.ts.map +1 -0
- package/package.json +21 -9
- package/dist/_tsup-dts-rollup.d.cts +0 -13109
- package/dist/_tsup-dts-rollup.d.ts +0 -13109
- package/dist/index.cjs +0 -3772
- package/dist/index.d.cts +0 -1
- package/eslint.config.js +0 -11
- package/integration-tests/CHANGELOG.md +0 -20
- package/integration-tests/README.md +0 -154
- package/integration-tests/docker-compose.yml +0 -39
- package/integration-tests/package.json +0 -38
- package/integration-tests/src/agent-template-behavior.test.ts +0 -103
- package/integration-tests/src/fixtures/minimal-mastra-project/env.example +0 -6
- package/integration-tests/src/fixtures/minimal-mastra-project/package.json +0 -17
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/agents/weather.ts +0 -34
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/index.ts +0 -15
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/mcp/index.ts +0 -46
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/tools/weather.ts +0 -13
- package/integration-tests/src/fixtures/minimal-mastra-project/tsconfig.json +0 -17
- package/integration-tests/src/template-integration.test.ts +0 -312
- package/integration-tests/tsconfig.json +0 -13
- package/integration-tests/vitest.config.ts +0 -17
- package/src/agent-builder.test.ts +0 -291
- package/src/defaults.ts +0 -2728
- package/src/index.ts +0 -187
- package/src/processors/tool-summary.ts +0 -136
- package/src/processors/write-file.ts +0 -17
- package/src/types.ts +0 -120
- package/src/utils.ts +0 -133
- package/src/workflows/index.ts +0 -1541
- package/tsconfig.json +0 -5
- package/vitest.config.ts +0 -11
package/src/workflows/index.ts
DELETED
|
@@ -1,1541 +0,0 @@
|
|
|
1
|
-
import { existsSync } from 'fs';
|
|
2
|
-
import { mkdtemp, copyFile, readFile, mkdir, readdir, rm } from 'fs/promises';
|
|
3
|
-
import { tmpdir } from 'os';
|
|
4
|
-
import { join, dirname, resolve, extname, basename } from 'path';
|
|
5
|
-
import { openai } from '@ai-sdk/openai';
|
|
6
|
-
import { Agent } from '@mastra/core/agent';
|
|
7
|
-
import { createTool } from '@mastra/core/tools';
|
|
8
|
-
import { createWorkflow, createStep } from '@mastra/core/workflows';
|
|
9
|
-
import { z } from 'zod';
|
|
10
|
-
import { AgentBuilder } from '..';
|
|
11
|
-
import { AgentBuilderDefaults } from '../defaults';
|
|
12
|
-
import type { TemplateUnit } from '../types';
|
|
13
|
-
import { ApplyResultSchema, MergeInputSchema, TemplateUnitSchema } from '../types';
|
|
14
|
-
import {
|
|
15
|
-
exec,
|
|
16
|
-
getMastraTemplate,
|
|
17
|
-
kindWeight,
|
|
18
|
-
spawnSWPM,
|
|
19
|
-
logGitState,
|
|
20
|
-
backupAndReplaceFile,
|
|
21
|
-
renameAndCopyFile,
|
|
22
|
-
} from '../utils';
|
|
23
|
-
|
|
24
|
-
// Step 1: Clone template to temp directory
|
|
25
|
-
const cloneTemplateStep = createStep({
|
|
26
|
-
id: 'clone-template',
|
|
27
|
-
description: 'Clone the template repository to a temporary directory at the specified ref',
|
|
28
|
-
inputSchema: MergeInputSchema,
|
|
29
|
-
outputSchema: z.object({
|
|
30
|
-
templateDir: z.string(),
|
|
31
|
-
commitSha: z.string(),
|
|
32
|
-
slug: z.string(),
|
|
33
|
-
}),
|
|
34
|
-
execute: async ({ inputData }) => {
|
|
35
|
-
const { repo, ref = 'main', slug } = inputData;
|
|
36
|
-
|
|
37
|
-
if (!repo) {
|
|
38
|
-
throw new Error('Repository URL or path is required');
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// Extract slug from repo URL if not provided
|
|
42
|
-
const inferredSlug =
|
|
43
|
-
slug ||
|
|
44
|
-
repo
|
|
45
|
-
.split('/')
|
|
46
|
-
.pop()
|
|
47
|
-
?.replace(/\.git$/, '') ||
|
|
48
|
-
'template';
|
|
49
|
-
|
|
50
|
-
// Create temporary directory
|
|
51
|
-
const tempDir = await mkdtemp(join(tmpdir(), 'mastra-template-'));
|
|
52
|
-
|
|
53
|
-
try {
|
|
54
|
-
// Clone repository
|
|
55
|
-
const cloneCmd = `git clone "${repo}" "${tempDir}"`;
|
|
56
|
-
await exec(cloneCmd);
|
|
57
|
-
|
|
58
|
-
// Checkout specific ref if provided
|
|
59
|
-
if (ref !== 'main' && ref !== 'master') {
|
|
60
|
-
await exec(`git checkout "${ref}"`, { cwd: tempDir });
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// Get commit SHA
|
|
64
|
-
const { stdout: commitSha } = await exec('git rev-parse HEAD', { cwd: tempDir });
|
|
65
|
-
|
|
66
|
-
return {
|
|
67
|
-
templateDir: tempDir,
|
|
68
|
-
commitSha: commitSha.trim(),
|
|
69
|
-
slug: inferredSlug,
|
|
70
|
-
};
|
|
71
|
-
} catch (error) {
|
|
72
|
-
// Cleanup on error
|
|
73
|
-
try {
|
|
74
|
-
await rm(tempDir, { recursive: true, force: true });
|
|
75
|
-
} catch {}
|
|
76
|
-
throw new Error(`Failed to clone template: ${error instanceof Error ? error.message : String(error)}`);
|
|
77
|
-
}
|
|
78
|
-
},
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
// Step 2: Analyze template package.json for dependencies
|
|
82
|
-
const analyzePackageStep = createStep({
|
|
83
|
-
id: 'analyze-package',
|
|
84
|
-
description: 'Analyze the template package.json to extract dependency information',
|
|
85
|
-
inputSchema: z.object({
|
|
86
|
-
templateDir: z.string(),
|
|
87
|
-
commitSha: z.string(),
|
|
88
|
-
slug: z.string(),
|
|
89
|
-
}),
|
|
90
|
-
outputSchema: z.object({
|
|
91
|
-
dependencies: z.record(z.string()).optional(),
|
|
92
|
-
devDependencies: z.record(z.string()).optional(),
|
|
93
|
-
peerDependencies: z.record(z.string()).optional(),
|
|
94
|
-
scripts: z.record(z.string()).optional(),
|
|
95
|
-
packageInfo: z.object({
|
|
96
|
-
name: z.string().optional(),
|
|
97
|
-
version: z.string().optional(),
|
|
98
|
-
description: z.string().optional(),
|
|
99
|
-
}),
|
|
100
|
-
}),
|
|
101
|
-
execute: async ({ inputData }) => {
|
|
102
|
-
console.log('Analyzing template package.json...');
|
|
103
|
-
const { templateDir } = inputData;
|
|
104
|
-
const packageJsonPath = join(templateDir, 'package.json');
|
|
105
|
-
|
|
106
|
-
try {
|
|
107
|
-
const packageJsonContent = await readFile(packageJsonPath, 'utf-8');
|
|
108
|
-
const packageJson = JSON.parse(packageJsonContent);
|
|
109
|
-
|
|
110
|
-
console.log('Template package.json:', JSON.stringify(packageJson, null, 2));
|
|
111
|
-
|
|
112
|
-
return {
|
|
113
|
-
dependencies: packageJson.dependencies || {},
|
|
114
|
-
devDependencies: packageJson.devDependencies || {},
|
|
115
|
-
peerDependencies: packageJson.peerDependencies || {},
|
|
116
|
-
scripts: packageJson.scripts || {},
|
|
117
|
-
packageInfo: {
|
|
118
|
-
name: packageJson.name,
|
|
119
|
-
version: packageJson.version,
|
|
120
|
-
description: packageJson.description,
|
|
121
|
-
},
|
|
122
|
-
};
|
|
123
|
-
} catch (error) {
|
|
124
|
-
console.warn(`Failed to read template package.json: ${error instanceof Error ? error.message : String(error)}`);
|
|
125
|
-
return {
|
|
126
|
-
dependencies: {},
|
|
127
|
-
devDependencies: {},
|
|
128
|
-
peerDependencies: {},
|
|
129
|
-
scripts: {},
|
|
130
|
-
packageInfo: {},
|
|
131
|
-
};
|
|
132
|
-
}
|
|
133
|
-
},
|
|
134
|
-
});
|
|
135
|
-
|
|
136
|
-
// Step 3: Discover template units by scanning the templates directory
|
|
137
|
-
const discoverUnitsStep = createStep({
|
|
138
|
-
id: 'discover-units',
|
|
139
|
-
description: 'Discover template units by analyzing the templates directory structure',
|
|
140
|
-
inputSchema: z.object({
|
|
141
|
-
templateDir: z.string(),
|
|
142
|
-
commitSha: z.string(),
|
|
143
|
-
slug: z.string(),
|
|
144
|
-
}),
|
|
145
|
-
outputSchema: z.object({
|
|
146
|
-
units: z.array(TemplateUnitSchema),
|
|
147
|
-
}),
|
|
148
|
-
execute: async ({ inputData }) => {
|
|
149
|
-
const { templateDir } = inputData;
|
|
150
|
-
|
|
151
|
-
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
152
|
-
|
|
153
|
-
const agent = new Agent({
|
|
154
|
-
model: openai('gpt-4o-mini'),
|
|
155
|
-
instructions: `You are an expert at analyzing Mastra projects.
|
|
156
|
-
|
|
157
|
-
Your task is to scan the provided directory and identify all available units (agents, workflows, tools, MCP servers, networks).
|
|
158
|
-
|
|
159
|
-
Mastra Project Structure Analysis:
|
|
160
|
-
- Each Mastra project has a structure like: ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.agent}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.workflow}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE['mcp-server']}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.network}
|
|
161
|
-
- Analyze TypeScript files in each category directory to identify exported units
|
|
162
|
-
|
|
163
|
-
CRITICAL: YOU MUST USE YOUR TOOLS (readFile, listDirectory) TO DISCOVER THE UNITS IN THE TEMPLATE DIRECTORY.
|
|
164
|
-
|
|
165
|
-
IMPORTANT - Agent Discovery Rules:
|
|
166
|
-
1. **Multiple Agent Files**: Some templates have separate files for each agent (e.g., evaluationAgent.ts, researchAgent.ts)
|
|
167
|
-
2. **Single File Multiple Agents**: Some files may export multiple agents (look for multiple 'export const' or 'export default' statements)
|
|
168
|
-
3. **Agent Identification**: Look for exported variables that are instances of 'new Agent()' or similar patterns
|
|
169
|
-
4. **Naming Convention**: Agent names should be extracted from the export name (e.g., 'weatherAgent', 'evaluationAgent')
|
|
170
|
-
|
|
171
|
-
For each Mastra project directory you analyze:
|
|
172
|
-
1. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.agent} and identify ALL exported agents
|
|
173
|
-
2. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.workflow} and identify ALL exported workflows
|
|
174
|
-
3. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool} and identify ALL exported tools
|
|
175
|
-
4. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE['mcp-server']} and identify ALL exported MCP servers
|
|
176
|
-
5. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.network} and identify ALL exported networks
|
|
177
|
-
6. Scan for any OTHER files in src/mastra that are NOT in the above default folders (e.g., lib/, utils/, types/, etc.) and identify them as 'other' files
|
|
178
|
-
|
|
179
|
-
IMPORTANT - Naming Consistency Rules:
|
|
180
|
-
- For ALL unit types (including 'other'), the 'name' field should be the filename WITHOUT extension
|
|
181
|
-
- For structured units (agents, workflows, tools, etc.), prefer the actual export name if clearly identifiable
|
|
182
|
-
- use the base filename without extension for the id (e.g., 'util.ts' → name: 'util')
|
|
183
|
-
- use the relative path from the template root for the file (e.g., 'src/mastra/lib/util.ts' → file: 'src/mastra/lib/util.ts')
|
|
184
|
-
|
|
185
|
-
Return the actual exported names of the units, as well as the file names.`,
|
|
186
|
-
name: 'Mastra Project Discoverer',
|
|
187
|
-
tools: {
|
|
188
|
-
readFile: tools.readFile,
|
|
189
|
-
listDirectory: tools.listDirectory,
|
|
190
|
-
},
|
|
191
|
-
});
|
|
192
|
-
|
|
193
|
-
const result = await agent.generate(
|
|
194
|
-
`Analyze the Mastra project directory structure at "${templateDir}".
|
|
195
|
-
|
|
196
|
-
List directory contents using listDirectory tool, and then analyze each file with readFile tool.
|
|
197
|
-
IMPORTANT:
|
|
198
|
-
- Look inside the actual file content to find export statements like 'export const agentName = new Agent(...)'
|
|
199
|
-
- A single file may contain multiple exports
|
|
200
|
-
- Return the actual exported variable names, as well as the file names
|
|
201
|
-
- If a directory doesn't exist or has no files, return an empty array
|
|
202
|
-
|
|
203
|
-
Return the analysis in the exact format specified in the output schema.`,
|
|
204
|
-
{
|
|
205
|
-
experimental_output: z.object({
|
|
206
|
-
agents: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
207
|
-
workflows: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
208
|
-
tools: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
209
|
-
mcp: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
210
|
-
networks: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
211
|
-
other: z.array(z.object({ name: z.string(), file: z.string() })).optional(),
|
|
212
|
-
}),
|
|
213
|
-
maxSteps: 100,
|
|
214
|
-
},
|
|
215
|
-
);
|
|
216
|
-
|
|
217
|
-
const template = result.object ?? {};
|
|
218
|
-
|
|
219
|
-
const units: TemplateUnit[] = [];
|
|
220
|
-
|
|
221
|
-
// Add agents
|
|
222
|
-
template.agents?.forEach((agentId: { name: string; file: string }) => {
|
|
223
|
-
units.push({ kind: 'agent', id: agentId.name, file: agentId.file });
|
|
224
|
-
});
|
|
225
|
-
|
|
226
|
-
// Add workflows
|
|
227
|
-
template.workflows?.forEach((workflowId: { name: string; file: string }) => {
|
|
228
|
-
units.push({ kind: 'workflow', id: workflowId.name, file: workflowId.file });
|
|
229
|
-
});
|
|
230
|
-
|
|
231
|
-
// Add tools
|
|
232
|
-
template.tools?.forEach((toolId: { name: string; file: string }) => {
|
|
233
|
-
units.push({ kind: 'tool', id: toolId.name, file: toolId.file });
|
|
234
|
-
});
|
|
235
|
-
|
|
236
|
-
// Add MCP servers
|
|
237
|
-
template.mcp?.forEach((mcpId: { name: string; file: string }) => {
|
|
238
|
-
units.push({ kind: 'mcp-server', id: mcpId.name, file: mcpId.file });
|
|
239
|
-
});
|
|
240
|
-
|
|
241
|
-
// Add networks
|
|
242
|
-
template.networks?.forEach((networkId: { name: string; file: string }) => {
|
|
243
|
-
units.push({ kind: 'network', id: networkId.name, file: networkId.file });
|
|
244
|
-
});
|
|
245
|
-
|
|
246
|
-
// Add other files
|
|
247
|
-
template.other?.forEach((otherId: { name: string; file: string }) => {
|
|
248
|
-
units.push({ kind: 'other', id: otherId.name, file: otherId.file });
|
|
249
|
-
});
|
|
250
|
-
|
|
251
|
-
console.log('Discovered units:', JSON.stringify(units, null, 2));
|
|
252
|
-
|
|
253
|
-
return { units };
|
|
254
|
-
},
|
|
255
|
-
});
|
|
256
|
-
|
|
257
|
-
// Step 4: Topological ordering (simplified)
|
|
258
|
-
const orderUnitsStep = createStep({
|
|
259
|
-
id: 'order-units',
|
|
260
|
-
description: 'Sort units in topological order based on kind weights',
|
|
261
|
-
inputSchema: z.object({
|
|
262
|
-
units: z.array(TemplateUnitSchema),
|
|
263
|
-
}),
|
|
264
|
-
outputSchema: z.object({
|
|
265
|
-
orderedUnits: z.array(TemplateUnitSchema),
|
|
266
|
-
}),
|
|
267
|
-
execute: async ({ inputData }) => {
|
|
268
|
-
const { units } = inputData;
|
|
269
|
-
|
|
270
|
-
// Simple sort by kind weight (mcp-servers first, then tools, agents, workflows, integration last)
|
|
271
|
-
const orderedUnits = [...units].sort((a, b) => {
|
|
272
|
-
const aWeight = kindWeight(a.kind);
|
|
273
|
-
const bWeight = kindWeight(b.kind);
|
|
274
|
-
return aWeight - bWeight;
|
|
275
|
-
});
|
|
276
|
-
|
|
277
|
-
return { orderedUnits };
|
|
278
|
-
},
|
|
279
|
-
});
|
|
280
|
-
|
|
281
|
-
// Step 5: Package merge
|
|
282
|
-
const packageMergeStep = createStep({
|
|
283
|
-
id: 'package-merge',
|
|
284
|
-
description: 'Merge template package.json dependencies into target project and install',
|
|
285
|
-
inputSchema: z.object({
|
|
286
|
-
commitSha: z.string(),
|
|
287
|
-
slug: z.string(),
|
|
288
|
-
targetPath: z.string().optional(),
|
|
289
|
-
packageInfo: z.object({
|
|
290
|
-
dependencies: z.record(z.string()).optional(),
|
|
291
|
-
devDependencies: z.record(z.string()).optional(),
|
|
292
|
-
peerDependencies: z.record(z.string()).optional(),
|
|
293
|
-
scripts: z.record(z.string()).optional(),
|
|
294
|
-
packageInfo: z.object({
|
|
295
|
-
name: z.string().optional(),
|
|
296
|
-
version: z.string().optional(),
|
|
297
|
-
description: z.string().optional(),
|
|
298
|
-
}),
|
|
299
|
-
}),
|
|
300
|
-
}),
|
|
301
|
-
outputSchema: z.object({
|
|
302
|
-
success: z.boolean(),
|
|
303
|
-
applied: z.boolean(),
|
|
304
|
-
message: z.string(),
|
|
305
|
-
error: z.string().optional(),
|
|
306
|
-
}),
|
|
307
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
308
|
-
console.log('Package merge step starting...');
|
|
309
|
-
const { slug, packageInfo } = inputData;
|
|
310
|
-
const targetPath = inputData.targetPath || runtimeContext.get('targetPath') || process.cwd();
|
|
311
|
-
|
|
312
|
-
try {
|
|
313
|
-
const allTools = await AgentBuilderDefaults.DEFAULT_TOOLS(targetPath);
|
|
314
|
-
|
|
315
|
-
const packageMergeAgent = new Agent({
|
|
316
|
-
name: 'package-merger',
|
|
317
|
-
description: 'Specialized agent for merging package.json dependencies',
|
|
318
|
-
instructions: `You are a package.json merge specialist. Your job is to:
|
|
319
|
-
|
|
320
|
-
1. **Read the target project's package.json** using readFile tool
|
|
321
|
-
2. **Merge template dependencies** into the target package.json following these rules:
|
|
322
|
-
- For dependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
323
|
-
- For devDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
324
|
-
- For peerDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
325
|
-
- For scripts: Add new scripts with "template:${slug}:" prefix, don't overwrite existing ones
|
|
326
|
-
- Maintain existing package.json structure and formatting
|
|
327
|
-
3. **Write the updated package.json** using writeFile tool
|
|
328
|
-
|
|
329
|
-
Template Dependencies to Merge:
|
|
330
|
-
- Dependencies: ${JSON.stringify(packageInfo.dependencies || {}, null, 2)}
|
|
331
|
-
- Dev Dependencies: ${JSON.stringify(packageInfo.devDependencies || {}, null, 2)}
|
|
332
|
-
- Peer Dependencies: ${JSON.stringify(packageInfo.peerDependencies || {}, null, 2)}
|
|
333
|
-
- Scripts: ${JSON.stringify(packageInfo.scripts || {}, null, 2)}
|
|
334
|
-
|
|
335
|
-
CRITICAL MERGE RULES:
|
|
336
|
-
1. For each dependency in template dependencies, if it does NOT exist in target, ADD it with template version
|
|
337
|
-
2. For each dependency in template dependencies, if it ALREADY exists in target, KEEP target version
|
|
338
|
-
3. You MUST add ALL template dependencies that don't conflict - do not skip any
|
|
339
|
-
4. Be explicit about what you're adding vs keeping
|
|
340
|
-
|
|
341
|
-
EXAMPLE:
|
|
342
|
-
Template has: {"@mastra/libsql": "latest", "@mastra/core": "latest", "zod": "^3.25.67"}
|
|
343
|
-
Target has: {"@mastra/core": "latest", "zod": "^3.25.0"}
|
|
344
|
-
Result should have: {"@mastra/core": "latest", "zod": "^3.25.0", "@mastra/libsql": "latest"}
|
|
345
|
-
|
|
346
|
-
Be systematic and thorough. Always read the existing package.json first, then merge, then write.`,
|
|
347
|
-
model: openai('gpt-4o-mini'),
|
|
348
|
-
tools: {
|
|
349
|
-
readFile: allTools.readFile,
|
|
350
|
-
writeFile: allTools.writeFile,
|
|
351
|
-
listDirectory: allTools.listDirectory,
|
|
352
|
-
},
|
|
353
|
-
});
|
|
354
|
-
|
|
355
|
-
console.log('Starting package merge agent...');
|
|
356
|
-
console.log('Template dependencies to merge:', JSON.stringify(packageInfo.dependencies, null, 2));
|
|
357
|
-
console.log('Template devDependencies to merge:', JSON.stringify(packageInfo.devDependencies, null, 2));
|
|
358
|
-
|
|
359
|
-
const result = await packageMergeAgent.stream(
|
|
360
|
-
`Please merge the template dependencies into the target project's package.json at ${targetPath}/package.json.`,
|
|
361
|
-
{ experimental_output: z.object({ success: z.boolean() }) },
|
|
362
|
-
);
|
|
363
|
-
|
|
364
|
-
let buffer: string[] = [];
|
|
365
|
-
for await (const chunk of result.fullStream) {
|
|
366
|
-
if (chunk.type === 'text-delta') {
|
|
367
|
-
buffer.push(chunk.textDelta);
|
|
368
|
-
if (buffer.length > 20) {
|
|
369
|
-
console.log(buffer.join(''));
|
|
370
|
-
buffer = [];
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
}
|
|
374
|
-
|
|
375
|
-
if (buffer.length > 0) {
|
|
376
|
-
console.log(buffer.join(''));
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
return {
|
|
380
|
-
success: true,
|
|
381
|
-
applied: true,
|
|
382
|
-
message: `Successfully merged template dependencies and installed packages for ${slug}`,
|
|
383
|
-
};
|
|
384
|
-
} catch (error) {
|
|
385
|
-
console.error('Package merge failed:', error);
|
|
386
|
-
return {
|
|
387
|
-
success: false,
|
|
388
|
-
applied: false,
|
|
389
|
-
message: `Package merge failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
390
|
-
error: error instanceof Error ? error.message : String(error),
|
|
391
|
-
};
|
|
392
|
-
}
|
|
393
|
-
},
|
|
394
|
-
});
|
|
395
|
-
|
|
396
|
-
// Step 6: Flat install
|
|
397
|
-
const flatInstallStep = createStep({
|
|
398
|
-
id: 'flat-install',
|
|
399
|
-
description: 'Run a flat install command without specifying packages',
|
|
400
|
-
inputSchema: z.object({
|
|
401
|
-
targetPath: z.string().describe('Path to the project to install packages in'),
|
|
402
|
-
}),
|
|
403
|
-
outputSchema: z.object({
|
|
404
|
-
success: z.boolean(),
|
|
405
|
-
message: z.string(),
|
|
406
|
-
details: z.string().optional(),
|
|
407
|
-
}),
|
|
408
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
409
|
-
console.log('Running flat install...');
|
|
410
|
-
const targetPath = inputData.targetPath || runtimeContext.get('targetPath') || process.cwd();
|
|
411
|
-
|
|
412
|
-
try {
|
|
413
|
-
// Run flat install using swpm (no specific packages)
|
|
414
|
-
await spawnSWPM(targetPath, 'install', []);
|
|
415
|
-
|
|
416
|
-
return {
|
|
417
|
-
success: true,
|
|
418
|
-
message: 'Successfully ran flat install command',
|
|
419
|
-
details: 'Installed all dependencies from package.json',
|
|
420
|
-
};
|
|
421
|
-
} catch (error) {
|
|
422
|
-
console.error('Flat install failed:', error);
|
|
423
|
-
return {
|
|
424
|
-
success: false,
|
|
425
|
-
message: `Flat install failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
426
|
-
};
|
|
427
|
-
}
|
|
428
|
-
},
|
|
429
|
-
});
|
|
430
|
-
|
|
431
|
-
// Step 7: Programmatic File Copy Step - copies template files to target project
|
|
432
|
-
const programmaticFileCopyStep = createStep({
|
|
433
|
-
id: 'programmatic-file-copy',
|
|
434
|
-
description: 'Programmatically copy template files to target project based on ordered units',
|
|
435
|
-
inputSchema: z.object({
|
|
436
|
-
orderedUnits: z.array(
|
|
437
|
-
z.object({
|
|
438
|
-
kind: z.string(),
|
|
439
|
-
id: z.string(),
|
|
440
|
-
file: z.string(),
|
|
441
|
-
}),
|
|
442
|
-
),
|
|
443
|
-
templateDir: z.string(),
|
|
444
|
-
commitSha: z.string(),
|
|
445
|
-
slug: z.string(),
|
|
446
|
-
targetPath: z.string().optional(),
|
|
447
|
-
}),
|
|
448
|
-
outputSchema: z.object({
|
|
449
|
-
success: z.boolean(),
|
|
450
|
-
copiedFiles: z.array(
|
|
451
|
-
z.object({
|
|
452
|
-
source: z.string(),
|
|
453
|
-
destination: z.string(),
|
|
454
|
-
unit: z.object({
|
|
455
|
-
kind: z.string(),
|
|
456
|
-
id: z.string(),
|
|
457
|
-
}),
|
|
458
|
-
}),
|
|
459
|
-
),
|
|
460
|
-
conflicts: z.array(
|
|
461
|
-
z.object({
|
|
462
|
-
unit: z.object({
|
|
463
|
-
kind: z.string(),
|
|
464
|
-
id: z.string(),
|
|
465
|
-
}),
|
|
466
|
-
issue: z.string(),
|
|
467
|
-
sourceFile: z.string(),
|
|
468
|
-
targetFile: z.string(),
|
|
469
|
-
}),
|
|
470
|
-
),
|
|
471
|
-
message: z.string(),
|
|
472
|
-
error: z.string().optional(),
|
|
473
|
-
}),
|
|
474
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
475
|
-
console.log('Programmatic file copy step starting...');
|
|
476
|
-
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
477
|
-
const targetPath = inputData.targetPath || runtimeContext.get('targetPath') || process.cwd();
|
|
478
|
-
|
|
479
|
-
try {
|
|
480
|
-
const copiedFiles: Array<{
|
|
481
|
-
source: string;
|
|
482
|
-
destination: string;
|
|
483
|
-
unit: { kind: string; id: string };
|
|
484
|
-
}> = [];
|
|
485
|
-
|
|
486
|
-
const conflicts: Array<{
|
|
487
|
-
unit: { kind: string; id: string };
|
|
488
|
-
issue: string;
|
|
489
|
-
sourceFile: string;
|
|
490
|
-
targetFile: string;
|
|
491
|
-
}> = [];
|
|
492
|
-
|
|
493
|
-
// Analyze target project naming convention first
|
|
494
|
-
const analyzeNamingConvention = async (
|
|
495
|
-
directory: string,
|
|
496
|
-
): Promise<'camelCase' | 'snake_case' | 'kebab-case' | 'PascalCase' | 'unknown'> => {
|
|
497
|
-
try {
|
|
498
|
-
const files = await readdir(resolve(targetPath, directory), { withFileTypes: true });
|
|
499
|
-
const tsFiles = files.filter(f => f.isFile() && f.name.endsWith('.ts')).map(f => f.name);
|
|
500
|
-
|
|
501
|
-
if (tsFiles.length === 0) return 'unknown';
|
|
502
|
-
|
|
503
|
-
// Check for patterns
|
|
504
|
-
const camelCaseCount = tsFiles.filter(f => /^[a-z][a-zA-Z0-9]*\.ts$/.test(f)).length;
|
|
505
|
-
const snakeCaseCount = tsFiles.filter(f => /^[a-z][a-z0-9_]*\.ts$/.test(f) && f.includes('_')).length;
|
|
506
|
-
const kebabCaseCount = tsFiles.filter(f => /^[a-z][a-z0-9-]*\.ts$/.test(f) && f.includes('-')).length;
|
|
507
|
-
const pascalCaseCount = tsFiles.filter(f => /^[A-Z][a-zA-Z0-9]*\.ts$/.test(f)).length;
|
|
508
|
-
|
|
509
|
-
const max = Math.max(camelCaseCount, snakeCaseCount, kebabCaseCount, pascalCaseCount);
|
|
510
|
-
if (max === 0) return 'unknown';
|
|
511
|
-
|
|
512
|
-
if (camelCaseCount === max) return 'camelCase';
|
|
513
|
-
if (snakeCaseCount === max) return 'snake_case';
|
|
514
|
-
if (kebabCaseCount === max) return 'kebab-case';
|
|
515
|
-
if (pascalCaseCount === max) return 'PascalCase';
|
|
516
|
-
|
|
517
|
-
return 'unknown';
|
|
518
|
-
} catch {
|
|
519
|
-
return 'unknown';
|
|
520
|
-
}
|
|
521
|
-
};
|
|
522
|
-
|
|
523
|
-
// Convert naming based on convention
|
|
524
|
-
const convertNaming = (name: string, convention: string): string => {
|
|
525
|
-
const baseName = basename(name, extname(name));
|
|
526
|
-
const ext = extname(name);
|
|
527
|
-
|
|
528
|
-
switch (convention) {
|
|
529
|
-
case 'camelCase':
|
|
530
|
-
return (
|
|
531
|
-
baseName
|
|
532
|
-
.replace(/[-_]/g, '')
|
|
533
|
-
.replace(/([A-Z])/g, (match, p1, offset) => (offset === 0 ? p1.toLowerCase() : p1)) + ext
|
|
534
|
-
);
|
|
535
|
-
case 'snake_case':
|
|
536
|
-
return (
|
|
537
|
-
baseName
|
|
538
|
-
.replace(/[-]/g, '_')
|
|
539
|
-
.replace(/([A-Z])/g, (match, p1, offset) => (offset === 0 ? '' : '_') + p1.toLowerCase()) + ext
|
|
540
|
-
);
|
|
541
|
-
case 'kebab-case':
|
|
542
|
-
return (
|
|
543
|
-
baseName
|
|
544
|
-
.replace(/[_]/g, '-')
|
|
545
|
-
.replace(/([A-Z])/g, (match, p1, offset) => (offset === 0 ? '' : '-') + p1.toLowerCase()) + ext
|
|
546
|
-
);
|
|
547
|
-
case 'PascalCase':
|
|
548
|
-
return baseName.replace(/[-_]/g, '').replace(/^[a-z]/, match => match.toUpperCase()) + ext;
|
|
549
|
-
default:
|
|
550
|
-
return name;
|
|
551
|
-
}
|
|
552
|
-
};
|
|
553
|
-
|
|
554
|
-
// Process each unit
|
|
555
|
-
for (const unit of orderedUnits) {
|
|
556
|
-
console.log(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
557
|
-
|
|
558
|
-
// Resolve source file path with fallback logic
|
|
559
|
-
let sourceFile: string;
|
|
560
|
-
let resolvedUnitFile: string;
|
|
561
|
-
|
|
562
|
-
// Check if unit.file already contains directory structure
|
|
563
|
-
if (unit.file.includes('/')) {
|
|
564
|
-
// unit.file has path structure (e.g., "src/mastra/agents/weatherAgent.ts")
|
|
565
|
-
sourceFile = resolve(templateDir, unit.file);
|
|
566
|
-
resolvedUnitFile = unit.file;
|
|
567
|
-
} else {
|
|
568
|
-
// unit.file is just filename (e.g., "weatherAgent.ts") - use fallback
|
|
569
|
-
const folderPath =
|
|
570
|
-
AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE[
|
|
571
|
-
unit.kind as keyof typeof AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE
|
|
572
|
-
];
|
|
573
|
-
if (!folderPath) {
|
|
574
|
-
conflicts.push({
|
|
575
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
576
|
-
issue: `Unknown unit kind: ${unit.kind}`,
|
|
577
|
-
sourceFile: unit.file,
|
|
578
|
-
targetFile: 'N/A',
|
|
579
|
-
});
|
|
580
|
-
continue;
|
|
581
|
-
}
|
|
582
|
-
resolvedUnitFile = `${folderPath}/${unit.file}`;
|
|
583
|
-
sourceFile = resolve(templateDir, resolvedUnitFile);
|
|
584
|
-
}
|
|
585
|
-
|
|
586
|
-
// Check if source file exists
|
|
587
|
-
if (!existsSync(sourceFile)) {
|
|
588
|
-
conflicts.push({
|
|
589
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
590
|
-
issue: `Source file not found: ${sourceFile}`,
|
|
591
|
-
sourceFile: resolvedUnitFile,
|
|
592
|
-
targetFile: 'N/A',
|
|
593
|
-
});
|
|
594
|
-
continue;
|
|
595
|
-
}
|
|
596
|
-
|
|
597
|
-
// Extract target directory from resolved unit file path
|
|
598
|
-
const targetDir = dirname(resolvedUnitFile);
|
|
599
|
-
|
|
600
|
-
// Analyze target naming convention
|
|
601
|
-
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
602
|
-
console.log(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
603
|
-
|
|
604
|
-
// Convert unit.id to target filename with proper extension
|
|
605
|
-
// Note: Check if unit.id already includes extension to avoid double extensions
|
|
606
|
-
const hasExtension = extname(unit.id) !== '';
|
|
607
|
-
const baseId = hasExtension ? basename(unit.id, extname(unit.id)) : unit.id;
|
|
608
|
-
const fileExtension = extname(unit.file);
|
|
609
|
-
const convertedFileName =
|
|
610
|
-
namingConvention !== 'unknown'
|
|
611
|
-
? convertNaming(baseId + fileExtension, namingConvention)
|
|
612
|
-
: baseId + fileExtension;
|
|
613
|
-
|
|
614
|
-
const targetFile = resolve(targetPath, targetDir, convertedFileName);
|
|
615
|
-
|
|
616
|
-
// Handle file conflicts with strategy-based resolution
|
|
617
|
-
if (existsSync(targetFile)) {
|
|
618
|
-
const strategy = determineConflictStrategy(unit, targetFile);
|
|
619
|
-
console.log(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
620
|
-
|
|
621
|
-
switch (strategy) {
|
|
622
|
-
case 'skip':
|
|
623
|
-
conflicts.push({
|
|
624
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
625
|
-
issue: `File exists - skipped: ${convertedFileName}`,
|
|
626
|
-
sourceFile: unit.file,
|
|
627
|
-
targetFile: `${targetDir}/${convertedFileName}`,
|
|
628
|
-
});
|
|
629
|
-
console.log(`⏭️ Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
630
|
-
continue;
|
|
631
|
-
|
|
632
|
-
case 'backup-and-replace':
|
|
633
|
-
try {
|
|
634
|
-
await backupAndReplaceFile(sourceFile, targetFile);
|
|
635
|
-
copiedFiles.push({
|
|
636
|
-
source: sourceFile,
|
|
637
|
-
destination: targetFile,
|
|
638
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
639
|
-
});
|
|
640
|
-
console.log(
|
|
641
|
-
`🔄 Replaced ${unit.kind} "${unit.id}": ${unit.file} → ${convertedFileName} (backup created)`,
|
|
642
|
-
);
|
|
643
|
-
continue;
|
|
644
|
-
} catch (backupError) {
|
|
645
|
-
conflicts.push({
|
|
646
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
647
|
-
issue: `Failed to backup and replace: ${backupError instanceof Error ? backupError.message : String(backupError)}`,
|
|
648
|
-
sourceFile: unit.file,
|
|
649
|
-
targetFile: `${targetDir}/${convertedFileName}`,
|
|
650
|
-
});
|
|
651
|
-
continue;
|
|
652
|
-
}
|
|
653
|
-
|
|
654
|
-
case 'rename':
|
|
655
|
-
try {
|
|
656
|
-
const uniqueTargetFile = await renameAndCopyFile(sourceFile, targetFile);
|
|
657
|
-
copiedFiles.push({
|
|
658
|
-
source: sourceFile,
|
|
659
|
-
destination: uniqueTargetFile,
|
|
660
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
661
|
-
});
|
|
662
|
-
console.log(`📝 Renamed ${unit.kind} "${unit.id}": ${unit.file} → ${basename(uniqueTargetFile)}`);
|
|
663
|
-
continue;
|
|
664
|
-
} catch (renameError) {
|
|
665
|
-
conflicts.push({
|
|
666
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
667
|
-
issue: `Failed to rename and copy: ${renameError instanceof Error ? renameError.message : String(renameError)}`,
|
|
668
|
-
sourceFile: unit.file,
|
|
669
|
-
targetFile: `${targetDir}/${convertedFileName}`,
|
|
670
|
-
});
|
|
671
|
-
continue;
|
|
672
|
-
}
|
|
673
|
-
|
|
674
|
-
default:
|
|
675
|
-
conflicts.push({
|
|
676
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
677
|
-
issue: `Unknown conflict strategy: ${strategy}`,
|
|
678
|
-
sourceFile: unit.file,
|
|
679
|
-
targetFile: `${targetDir}/${convertedFileName}`,
|
|
680
|
-
});
|
|
681
|
-
continue;
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
|
|
685
|
-
// Ensure target directory exists
|
|
686
|
-
await mkdir(dirname(targetFile), { recursive: true });
|
|
687
|
-
|
|
688
|
-
// Copy the file
|
|
689
|
-
try {
|
|
690
|
-
await copyFile(sourceFile, targetFile);
|
|
691
|
-
copiedFiles.push({
|
|
692
|
-
source: sourceFile,
|
|
693
|
-
destination: targetFile,
|
|
694
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
695
|
-
});
|
|
696
|
-
console.log(`✓ Copied ${unit.kind} "${unit.id}": ${unit.file} → ${convertedFileName}`);
|
|
697
|
-
} catch (copyError) {
|
|
698
|
-
conflicts.push({
|
|
699
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
700
|
-
issue: `Failed to copy file: ${copyError instanceof Error ? copyError.message : String(copyError)}`,
|
|
701
|
-
sourceFile: unit.file,
|
|
702
|
-
targetFile: `${targetDir}/${convertedFileName}`,
|
|
703
|
-
});
|
|
704
|
-
}
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
// Commit the copied files
|
|
708
|
-
if (copiedFiles.length > 0) {
|
|
709
|
-
try {
|
|
710
|
-
const fileList = copiedFiles.map(f => f.destination);
|
|
711
|
-
const gitCommand = ['git', 'add', ...fileList];
|
|
712
|
-
await exec(gitCommand.join(' '), { cwd: targetPath });
|
|
713
|
-
await exec(
|
|
714
|
-
`git commit -m "feat(template): copy ${copiedFiles.length} files from ${slug}@${commitSha.substring(0, 7)}"`,
|
|
715
|
-
{ cwd: targetPath },
|
|
716
|
-
);
|
|
717
|
-
console.log(`✓ Committed ${copiedFiles.length} copied files`);
|
|
718
|
-
} catch (commitError) {
|
|
719
|
-
console.warn('Failed to commit copied files:', commitError);
|
|
720
|
-
}
|
|
721
|
-
}
|
|
722
|
-
|
|
723
|
-
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
724
|
-
console.log(message);
|
|
725
|
-
|
|
726
|
-
return {
|
|
727
|
-
success: true,
|
|
728
|
-
copiedFiles,
|
|
729
|
-
conflicts,
|
|
730
|
-
message,
|
|
731
|
-
};
|
|
732
|
-
} catch (error) {
|
|
733
|
-
console.error('Programmatic file copy failed:', error);
|
|
734
|
-
throw new Error(`Programmatic file copy failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
735
|
-
}
|
|
736
|
-
},
|
|
737
|
-
});
|
|
738
|
-
|
|
739
|
-
// Step 8: Intelligent merging with AgentBuilder
|
|
740
|
-
const intelligentMergeStep = createStep({
|
|
741
|
-
id: 'intelligent-merge',
|
|
742
|
-
description: 'Use AgentBuilder to intelligently merge template files',
|
|
743
|
-
inputSchema: z.object({
|
|
744
|
-
conflicts: z.array(
|
|
745
|
-
z.object({
|
|
746
|
-
unit: z.object({
|
|
747
|
-
kind: z.string(),
|
|
748
|
-
id: z.string(),
|
|
749
|
-
}),
|
|
750
|
-
issue: z.string(),
|
|
751
|
-
sourceFile: z.string(),
|
|
752
|
-
targetFile: z.string(),
|
|
753
|
-
}),
|
|
754
|
-
),
|
|
755
|
-
copiedFiles: z.array(
|
|
756
|
-
z.object({
|
|
757
|
-
source: z.string(),
|
|
758
|
-
destination: z.string(),
|
|
759
|
-
unit: z.object({
|
|
760
|
-
kind: z.string(),
|
|
761
|
-
id: z.string(),
|
|
762
|
-
}),
|
|
763
|
-
}),
|
|
764
|
-
),
|
|
765
|
-
templateDir: z.string(),
|
|
766
|
-
commitSha: z.string(),
|
|
767
|
-
slug: z.string(),
|
|
768
|
-
targetPath: z.string().optional(),
|
|
769
|
-
}),
|
|
770
|
-
outputSchema: z.object({
|
|
771
|
-
success: z.boolean(),
|
|
772
|
-
applied: z.boolean(),
|
|
773
|
-
message: z.string(),
|
|
774
|
-
conflictsResolved: z.array(
|
|
775
|
-
z.object({
|
|
776
|
-
unit: z.object({
|
|
777
|
-
kind: z.string(),
|
|
778
|
-
id: z.string(),
|
|
779
|
-
}),
|
|
780
|
-
issue: z.string(),
|
|
781
|
-
resolution: z.string(),
|
|
782
|
-
}),
|
|
783
|
-
),
|
|
784
|
-
error: z.string().optional(),
|
|
785
|
-
branchName: z.string().optional(),
|
|
786
|
-
}),
|
|
787
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
788
|
-
console.log('Intelligent merge step starting...');
|
|
789
|
-
const { conflicts, copiedFiles, commitSha, slug, templateDir } = inputData;
|
|
790
|
-
const targetPath = inputData.targetPath || runtimeContext.get('targetPath') || process.cwd();
|
|
791
|
-
|
|
792
|
-
const baseBranchName = `feat/install-template-${slug}`;
|
|
793
|
-
try {
|
|
794
|
-
// Create or switch to git branch for template integration
|
|
795
|
-
let branchName = baseBranchName;
|
|
796
|
-
|
|
797
|
-
try {
|
|
798
|
-
// Try to create new branch
|
|
799
|
-
await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
|
|
800
|
-
console.log(`Created new branch: ${branchName}`);
|
|
801
|
-
} catch (error) {
|
|
802
|
-
// If branch exists, check if we can switch to it or create a unique name
|
|
803
|
-
const errorStr = error instanceof Error ? error.message : String(error);
|
|
804
|
-
if (errorStr.includes('already exists')) {
|
|
805
|
-
try {
|
|
806
|
-
// Try to switch to existing branch
|
|
807
|
-
await exec(`git checkout "${branchName}"`, { cwd: targetPath });
|
|
808
|
-
console.log(`Switched to existing branch: ${branchName}`);
|
|
809
|
-
} catch {
|
|
810
|
-
// If can't switch, create a unique branch name
|
|
811
|
-
const timestamp = Date.now().toString().slice(-6);
|
|
812
|
-
branchName = `${baseBranchName}-${timestamp}`;
|
|
813
|
-
await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
|
|
814
|
-
console.log(`Created unique branch: ${branchName}`);
|
|
815
|
-
}
|
|
816
|
-
} else {
|
|
817
|
-
throw error; // Re-throw if it's a different error
|
|
818
|
-
}
|
|
819
|
-
}
|
|
820
|
-
|
|
821
|
-
// Create copyFile tool for edge cases
|
|
822
|
-
const copyFileTool = createTool({
|
|
823
|
-
id: 'copy-file',
|
|
824
|
-
description:
|
|
825
|
-
'Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).',
|
|
826
|
-
inputSchema: z.object({
|
|
827
|
-
sourcePath: z.string().describe('Path to the source file relative to template directory'),
|
|
828
|
-
destinationPath: z.string().describe('Path to the destination file relative to target project'),
|
|
829
|
-
}),
|
|
830
|
-
outputSchema: z.object({
|
|
831
|
-
success: z.boolean(),
|
|
832
|
-
message: z.string(),
|
|
833
|
-
error: z.string().optional(),
|
|
834
|
-
}),
|
|
835
|
-
execute: async ({ context }) => {
|
|
836
|
-
try {
|
|
837
|
-
const { sourcePath, destinationPath } = context;
|
|
838
|
-
|
|
839
|
-
// Use templateDir directly from input
|
|
840
|
-
const resolvedSourcePath = resolve(templateDir, sourcePath);
|
|
841
|
-
const resolvedDestinationPath = resolve(targetPath, destinationPath);
|
|
842
|
-
|
|
843
|
-
if (existsSync(resolvedSourcePath) && !existsSync(dirname(resolvedDestinationPath))) {
|
|
844
|
-
await mkdir(dirname(resolvedDestinationPath), { recursive: true });
|
|
845
|
-
}
|
|
846
|
-
|
|
847
|
-
await copyFile(resolvedSourcePath, resolvedDestinationPath);
|
|
848
|
-
return {
|
|
849
|
-
success: true,
|
|
850
|
-
message: `Successfully copied file from ${sourcePath} to ${destinationPath}`,
|
|
851
|
-
};
|
|
852
|
-
} catch (error) {
|
|
853
|
-
return {
|
|
854
|
-
success: false,
|
|
855
|
-
message: `Failed to copy file: ${error instanceof Error ? error.message : String(error)}`,
|
|
856
|
-
error: error instanceof Error ? error.message : String(error),
|
|
857
|
-
};
|
|
858
|
-
}
|
|
859
|
-
},
|
|
860
|
-
});
|
|
861
|
-
|
|
862
|
-
// Initialize AgentBuilder for merge and registration
|
|
863
|
-
const agentBuilder = new AgentBuilder({
|
|
864
|
-
projectPath: targetPath,
|
|
865
|
-
mode: 'template',
|
|
866
|
-
model: openai('gpt-4o-mini'),
|
|
867
|
-
instructions: `
|
|
868
|
-
You are an expert at integrating Mastra template components into existing projects.
|
|
869
|
-
|
|
870
|
-
CRITICAL CONTEXT:
|
|
871
|
-
- Files have been programmatically copied from template to target project
|
|
872
|
-
- Your job is to handle integration issues, registration, and validation
|
|
873
|
-
|
|
874
|
-
FILES SUCCESSFULLY COPIED:
|
|
875
|
-
${JSON.stringify(copiedFiles, null, 2)}
|
|
876
|
-
|
|
877
|
-
CONFLICTS TO RESOLVE:
|
|
878
|
-
${JSON.stringify(conflicts, null, 2)}
|
|
879
|
-
|
|
880
|
-
CRITICAL INSTRUCTIONS:
|
|
881
|
-
1. **When committing changes**: NEVER add dependency/build directories. Use specific file paths with 'git add'
|
|
882
|
-
2. **Package management**: NO need to install packages (already handled by package merge step)
|
|
883
|
-
3. **Validation**: When validation fails due to import issues, check existing files and imports for correct naming conventions
|
|
884
|
-
4. **Variable vs file names**: A variable name might differ from file name (e.g., filename: ./downloaderTool.ts, export const fetcherTool(...))
|
|
885
|
-
5. **File copying**: Most files are already copied programmatically. Only use copyFile tool for edge cases where additional files are needed
|
|
886
|
-
|
|
887
|
-
KEY RESPONSIBILITIES:
|
|
888
|
-
1. Resolve any conflicts from the programmatic copy step
|
|
889
|
-
2. Register components in existing Mastra index file (agents, workflows, networks, mcp-servers)
|
|
890
|
-
3. DO NOT register tools in existing Mastra index file - tools should remain standalone
|
|
891
|
-
4. Fix import path issues in copied files
|
|
892
|
-
5. Ensure TypeScript imports and exports are correct
|
|
893
|
-
6. Validate integration works properly
|
|
894
|
-
7. Copy additional files ONLY if needed for conflict resolution or missing dependencies
|
|
895
|
-
|
|
896
|
-
MASTRA-SPECIFIC INTEGRATION:
|
|
897
|
-
- Agents: Register in existing Mastra index file
|
|
898
|
-
- Workflows: Register in existing Mastra index file
|
|
899
|
-
- Networks: Register in existing Mastra index file
|
|
900
|
-
- MCP servers: Register in existing Mastra index file
|
|
901
|
-
- Tools: Copy to ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool} but DO NOT register in existing Mastra index file
|
|
902
|
-
|
|
903
|
-
EDGE CASE FILE COPYING:
|
|
904
|
-
- IF a file for a resource does not exist in the target project AND was not programmatically copied, you can use copyFile tool
|
|
905
|
-
- When taking files from template, ensure you get the right file name and path
|
|
906
|
-
- Only copy files that are actually needed for the integration to work
|
|
907
|
-
|
|
908
|
-
NAMING CONVENTION GUIDANCE:
|
|
909
|
-
When fixing imports or understanding naming patterns, use these examples:
|
|
910
|
-
|
|
911
|
-
**Import Path Patterns:**
|
|
912
|
-
- camelCase files: import { myAgent } from './myAgent'
|
|
913
|
-
- snake_case files: import { myAgent } from './my_agent'
|
|
914
|
-
- kebab-case files: import { myAgent } from './my-agent'
|
|
915
|
-
- PascalCase files: import { MyAgent } from './MyAgent'
|
|
916
|
-
|
|
917
|
-
**Naming Detection Examples:**
|
|
918
|
-
- Files like "weatherAgent.ts", "chatAgent.ts" → use camelCase
|
|
919
|
-
- Files like "weather_agent.ts", "chat_agent.ts" → use snake_case
|
|
920
|
-
- Files like "weather-agent.ts", "chat-agent.ts" → use kebab-case
|
|
921
|
-
- Files like "WeatherAgent.ts", "ChatAgent.ts" → use PascalCase
|
|
922
|
-
|
|
923
|
-
**Key Rule:** Keep variable/export names unchanged - only adapt file names and import paths
|
|
924
|
-
|
|
925
|
-
Template information:
|
|
926
|
-
- Slug: ${slug}
|
|
927
|
-
- Commit: ${commitSha.substring(0, 7)}
|
|
928
|
-
- Branch: ${branchName}
|
|
929
|
-
`,
|
|
930
|
-
tools: {
|
|
931
|
-
copyFile: copyFileTool,
|
|
932
|
-
},
|
|
933
|
-
});
|
|
934
|
-
|
|
935
|
-
// Create task list for systematic processing
|
|
936
|
-
const tasks = [];
|
|
937
|
-
|
|
938
|
-
// Add conflict resolution tasks
|
|
939
|
-
conflicts.forEach(conflict => {
|
|
940
|
-
tasks.push({
|
|
941
|
-
id: `conflict-${conflict.unit.kind}-${conflict.unit.id}`,
|
|
942
|
-
content: `Resolve conflict: ${conflict.issue}`,
|
|
943
|
-
status: 'pending' as const,
|
|
944
|
-
priority: 'high' as const,
|
|
945
|
-
notes: `Unit: ${conflict.unit.kind}:${conflict.unit.id}, Issue: ${conflict.issue}, Source: ${conflict.sourceFile}, Target: ${conflict.targetFile}`,
|
|
946
|
-
});
|
|
947
|
-
});
|
|
948
|
-
|
|
949
|
-
// Add registration tasks for successfully copied files
|
|
950
|
-
const nonToolFiles = copiedFiles.filter(f => f.unit.kind !== 'tool');
|
|
951
|
-
if (nonToolFiles.length > 0) {
|
|
952
|
-
tasks.push({
|
|
953
|
-
id: 'register-components',
|
|
954
|
-
content: `Register ${nonToolFiles.length} components in existing Mastra index file (src/mastra/index.ts)`,
|
|
955
|
-
status: 'pending' as const,
|
|
956
|
-
priority: 'medium' as const,
|
|
957
|
-
dependencies: conflicts.length > 0 ? conflicts.map(c => `conflict-${c.unit.kind}-${c.unit.id}`) : undefined,
|
|
958
|
-
notes: `Components to register: ${nonToolFiles.map(f => `${f.unit.kind}:${f.unit.id}`).join(', ')}`,
|
|
959
|
-
});
|
|
960
|
-
}
|
|
961
|
-
|
|
962
|
-
// Note: Validation is handled by the dedicated validation step, not here
|
|
963
|
-
|
|
964
|
-
console.log(`Creating task list with ${tasks.length} tasks...`);
|
|
965
|
-
await AgentBuilderDefaults.manageTaskList({ action: 'create', tasks });
|
|
966
|
-
|
|
967
|
-
// Log git state before merge operations
|
|
968
|
-
await logGitState(targetPath, 'before intelligent merge');
|
|
969
|
-
|
|
970
|
-
// Process tasks systematically
|
|
971
|
-
const result = await agentBuilder.stream(`
|
|
972
|
-
You need to work through a task list to complete the template integration.
|
|
973
|
-
|
|
974
|
-
CRITICAL INSTRUCTIONS:
|
|
975
|
-
|
|
976
|
-
**STEP 1: GET YOUR TASK LIST**
|
|
977
|
-
1. Use manageTaskList tool with action "list" to see all pending tasks
|
|
978
|
-
2. Work through tasks in dependency order (complete dependencies first)
|
|
979
|
-
|
|
980
|
-
**STEP 2: PROCESS EACH TASK SYSTEMATICALLY**
|
|
981
|
-
For each task:
|
|
982
|
-
1. Use manageTaskList to mark the current task as 'in_progress'
|
|
983
|
-
2. Complete the task according to its requirements
|
|
984
|
-
3. Use manageTaskList to mark the task as 'completed' when done
|
|
985
|
-
4. Continue until all tasks are completed
|
|
986
|
-
|
|
987
|
-
**TASK TYPES AND REQUIREMENTS:**
|
|
988
|
-
|
|
989
|
-
**Conflict Resolution Tasks:**
|
|
990
|
-
- Analyze the specific conflict and determine best resolution strategy
|
|
991
|
-
- For file name conflicts: merge content or rename appropriately
|
|
992
|
-
- For missing files: investigate and copy if needed
|
|
993
|
-
- For other issues: apply appropriate fixes
|
|
994
|
-
|
|
995
|
-
**Component Registration Task:**
|
|
996
|
-
- Update main Mastra instance file to register new components
|
|
997
|
-
- Only register: agents, workflows, networks, mcp-servers
|
|
998
|
-
- DO NOT register tools in main config
|
|
999
|
-
- Ensure proper import paths and naming conventions
|
|
1000
|
-
|
|
1001
|
-
**COMMIT STRATEGY:**
|
|
1002
|
-
- After resolving conflicts: "feat(template): resolve conflicts for ${slug}@${commitSha.substring(0, 7)}"
|
|
1003
|
-
- After registration: "feat(template): register components from ${slug}@${commitSha.substring(0, 7)}"
|
|
1004
|
-
|
|
1005
|
-
**CRITICAL NOTES:**
|
|
1006
|
-
- Template source: ${templateDir}
|
|
1007
|
-
- Target project: ${targetPath}
|
|
1008
|
-
- Focus ONLY on conflict resolution and component registration
|
|
1009
|
-
- Use executeCommand for git commits after each task
|
|
1010
|
-
- DO NOT perform validation - that's handled by the dedicated validation step
|
|
1011
|
-
|
|
1012
|
-
Start by listing your tasks and work through them systematically!
|
|
1013
|
-
`);
|
|
1014
|
-
|
|
1015
|
-
// Extract actual conflict resolution details from agent execution
|
|
1016
|
-
const actualResolutions: Array<{
|
|
1017
|
-
taskId: string;
|
|
1018
|
-
action: string;
|
|
1019
|
-
status: string;
|
|
1020
|
-
content: string;
|
|
1021
|
-
notes?: string;
|
|
1022
|
-
}> = [];
|
|
1023
|
-
|
|
1024
|
-
for await (const chunk of result.fullStream) {
|
|
1025
|
-
if (chunk.type === 'step-finish' || chunk.type === 'step-start') {
|
|
1026
|
-
console.log({
|
|
1027
|
-
type: chunk.type,
|
|
1028
|
-
msgId: chunk.messageId,
|
|
1029
|
-
});
|
|
1030
|
-
} else {
|
|
1031
|
-
console.log(JSON.stringify(chunk, null, 2));
|
|
1032
|
-
|
|
1033
|
-
// Extract task management tool results
|
|
1034
|
-
if (chunk.type === 'tool-result' && chunk.toolName === 'manageTaskList') {
|
|
1035
|
-
try {
|
|
1036
|
-
const toolResult = chunk.result;
|
|
1037
|
-
if (toolResult.action === 'update' && toolResult.status === 'completed') {
|
|
1038
|
-
actualResolutions.push({
|
|
1039
|
-
taskId: toolResult.taskId || '',
|
|
1040
|
-
action: toolResult.action,
|
|
1041
|
-
status: toolResult.status,
|
|
1042
|
-
content: toolResult.content || '',
|
|
1043
|
-
notes: toolResult.notes,
|
|
1044
|
-
});
|
|
1045
|
-
console.log(`📋 Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
1046
|
-
}
|
|
1047
|
-
} catch (parseError) {
|
|
1048
|
-
console.warn('Failed to parse task management result:', parseError);
|
|
1049
|
-
}
|
|
1050
|
-
}
|
|
1051
|
-
}
|
|
1052
|
-
}
|
|
1053
|
-
|
|
1054
|
-
// Log git state after merge operations
|
|
1055
|
-
await logGitState(targetPath, 'after intelligent merge');
|
|
1056
|
-
|
|
1057
|
-
// Map actual resolutions back to conflicts
|
|
1058
|
-
const conflictResolutions = conflicts.map(conflict => {
|
|
1059
|
-
const taskId = `conflict-${conflict.unit.kind}-${conflict.unit.id}`;
|
|
1060
|
-
const actualResolution = actualResolutions.find(r => r.taskId === taskId);
|
|
1061
|
-
|
|
1062
|
-
if (actualResolution) {
|
|
1063
|
-
return {
|
|
1064
|
-
unit: conflict.unit,
|
|
1065
|
-
issue: conflict.issue,
|
|
1066
|
-
resolution:
|
|
1067
|
-
actualResolution.notes ||
|
|
1068
|
-
actualResolution.content ||
|
|
1069
|
-
`Completed: ${conflict.unit.kind} ${conflict.unit.id}`,
|
|
1070
|
-
actualWork: true,
|
|
1071
|
-
};
|
|
1072
|
-
} else {
|
|
1073
|
-
return {
|
|
1074
|
-
unit: conflict.unit,
|
|
1075
|
-
issue: conflict.issue,
|
|
1076
|
-
resolution: `No specific resolution found for ${conflict.unit.kind} ${conflict.unit.id}`,
|
|
1077
|
-
actualWork: false,
|
|
1078
|
-
};
|
|
1079
|
-
}
|
|
1080
|
-
});
|
|
1081
|
-
|
|
1082
|
-
return {
|
|
1083
|
-
success: true,
|
|
1084
|
-
applied: true,
|
|
1085
|
-
branchName,
|
|
1086
|
-
message: `Successfully resolved ${conflicts.length} conflicts from template ${slug}`,
|
|
1087
|
-
conflictsResolved: conflictResolutions,
|
|
1088
|
-
};
|
|
1089
|
-
} catch (error) {
|
|
1090
|
-
return {
|
|
1091
|
-
success: false,
|
|
1092
|
-
applied: false,
|
|
1093
|
-
branchName: baseBranchName,
|
|
1094
|
-
message: `Failed to resolve conflicts: ${error instanceof Error ? error.message : String(error)}`,
|
|
1095
|
-
conflictsResolved: [],
|
|
1096
|
-
error: error instanceof Error ? error.message : String(error),
|
|
1097
|
-
};
|
|
1098
|
-
}
|
|
1099
|
-
},
|
|
1100
|
-
});
|
|
1101
|
-
|
|
1102
|
-
// Step 9: Validation and Fix Step - validates merged code and fixes any issues
|
|
1103
|
-
const validationAndFixStep = createStep({
|
|
1104
|
-
id: 'validation-and-fix',
|
|
1105
|
-
description: 'Validate the merged template code and fix any validation errors using a specialized agent',
|
|
1106
|
-
inputSchema: z.object({
|
|
1107
|
-
commitSha: z.string(),
|
|
1108
|
-
slug: z.string(),
|
|
1109
|
-
targetPath: z.string().optional(),
|
|
1110
|
-
templateDir: z.string(),
|
|
1111
|
-
orderedUnits: z.array(
|
|
1112
|
-
z.object({
|
|
1113
|
-
kind: z.string(),
|
|
1114
|
-
id: z.string(),
|
|
1115
|
-
file: z.string(),
|
|
1116
|
-
}),
|
|
1117
|
-
),
|
|
1118
|
-
copiedFiles: z.array(
|
|
1119
|
-
z.object({
|
|
1120
|
-
source: z.string(),
|
|
1121
|
-
destination: z.string(),
|
|
1122
|
-
unit: z.object({
|
|
1123
|
-
kind: z.string(),
|
|
1124
|
-
id: z.string(),
|
|
1125
|
-
}),
|
|
1126
|
-
}),
|
|
1127
|
-
),
|
|
1128
|
-
conflictsResolved: z
|
|
1129
|
-
.array(
|
|
1130
|
-
z.object({
|
|
1131
|
-
unit: z.object({
|
|
1132
|
-
kind: z.string(),
|
|
1133
|
-
id: z.string(),
|
|
1134
|
-
}),
|
|
1135
|
-
issue: z.string(),
|
|
1136
|
-
resolution: z.string(),
|
|
1137
|
-
}),
|
|
1138
|
-
)
|
|
1139
|
-
.optional(),
|
|
1140
|
-
maxIterations: z.number().optional().default(5),
|
|
1141
|
-
}),
|
|
1142
|
-
outputSchema: z.object({
|
|
1143
|
-
success: z.boolean(),
|
|
1144
|
-
applied: z.boolean(),
|
|
1145
|
-
message: z.string(),
|
|
1146
|
-
validationResults: z.object({
|
|
1147
|
-
valid: z.boolean(),
|
|
1148
|
-
errorsFixed: z.number(),
|
|
1149
|
-
remainingErrors: z.number(),
|
|
1150
|
-
}),
|
|
1151
|
-
error: z.string().optional(),
|
|
1152
|
-
}),
|
|
1153
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
1154
|
-
console.log('Validation and fix step starting...');
|
|
1155
|
-
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
1156
|
-
const targetPath = inputData.targetPath || runtimeContext.get('targetPath') || process.cwd();
|
|
1157
|
-
|
|
1158
|
-
// Skip validation if no changes were made
|
|
1159
|
-
const hasChanges = copiedFiles.length > 0 || (conflictsResolved && conflictsResolved.length > 0);
|
|
1160
|
-
if (!hasChanges) {
|
|
1161
|
-
console.log('⏭️ Skipping validation - no files copied or conflicts resolved');
|
|
1162
|
-
return {
|
|
1163
|
-
success: true,
|
|
1164
|
-
applied: false,
|
|
1165
|
-
message: 'No changes to validate - template already integrated or no conflicts resolved',
|
|
1166
|
-
validationResults: {
|
|
1167
|
-
valid: true,
|
|
1168
|
-
errorsFixed: 0,
|
|
1169
|
-
remainingErrors: 0,
|
|
1170
|
-
},
|
|
1171
|
-
};
|
|
1172
|
-
}
|
|
1173
|
-
|
|
1174
|
-
console.log(
|
|
1175
|
-
`📋 Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`,
|
|
1176
|
-
);
|
|
1177
|
-
|
|
1178
|
-
let currentIteration = 1; // Declare at function scope for error handling
|
|
1179
|
-
|
|
1180
|
-
try {
|
|
1181
|
-
const allTools = await AgentBuilderDefaults.DEFAULT_TOOLS(targetPath, 'template');
|
|
1182
|
-
|
|
1183
|
-
const validationAgent = new Agent({
|
|
1184
|
-
name: 'code-validator-fixer',
|
|
1185
|
-
description: 'Specialized agent for validating and fixing template integration issues',
|
|
1186
|
-
instructions: `You are a code validation and fixing specialist. Your job is to:
|
|
1187
|
-
|
|
1188
|
-
1. **Run comprehensive validation** using the validateCode tool to check for:
|
|
1189
|
-
- TypeScript compilation errors
|
|
1190
|
-
- ESLint issues
|
|
1191
|
-
- Import/export problems
|
|
1192
|
-
- Missing dependencies
|
|
1193
|
-
|
|
1194
|
-
2. **Fix validation errors systematically**:
|
|
1195
|
-
- Use readFile to examine files with errors
|
|
1196
|
-
- Use multiEdit to fix issues like missing imports, incorrect paths, syntax errors
|
|
1197
|
-
- Use listDirectory to understand project structure when fixing import paths
|
|
1198
|
-
- Update file contents to resolve TypeScript and linting issues
|
|
1199
|
-
|
|
1200
|
-
3. **Re-validate after fixes** to ensure all issues are resolved
|
|
1201
|
-
|
|
1202
|
-
4. **Focus on template integration issues**:
|
|
1203
|
-
- Files were copied with new names based on unit IDs
|
|
1204
|
-
- Original template imports may reference old filenames
|
|
1205
|
-
- Missing imports in index files
|
|
1206
|
-
- Incorrect file paths in imports
|
|
1207
|
-
- Type mismatches after integration
|
|
1208
|
-
- Missing exports in barrel files
|
|
1209
|
-
- Use the COPIED FILES mapping below to fix import paths
|
|
1210
|
-
|
|
1211
|
-
CRITICAL: Always validate the entire project first to get a complete picture of issues, then fix them systematically, and re-validate to confirm fixes worked.
|
|
1212
|
-
|
|
1213
|
-
CRITICAL IMPORT PATH RESOLUTION:
|
|
1214
|
-
The following files were copied from template with new names:
|
|
1215
|
-
${JSON.stringify(copiedFiles, null, 2)}
|
|
1216
|
-
|
|
1217
|
-
When fixing import errors:
|
|
1218
|
-
1. Check if the missing module corresponds to a copied file
|
|
1219
|
-
2. Use listDirectory to verify actual filenames in target directories
|
|
1220
|
-
3. Update import paths to match the actual copied filenames
|
|
1221
|
-
4. Ensure exported variable names match what's being imported
|
|
1222
|
-
|
|
1223
|
-
EXAMPLE: If error shows "Cannot find module './tools/download-csv-tool'" but a file was copied as "csv-fetcher-tool.ts", update the import to "./tools/csv-fetcher-tool"
|
|
1224
|
-
|
|
1225
|
-
${conflictsResolved ? `CONFLICTS RESOLVED BY INTELLIGENT MERGE:\n${JSON.stringify(conflictsResolved, null, 2)}\n` : ''}
|
|
1226
|
-
|
|
1227
|
-
INTEGRATED UNITS:
|
|
1228
|
-
${JSON.stringify(orderedUnits, null, 2)}
|
|
1229
|
-
|
|
1230
|
-
Be thorough and methodical. Always use listDirectory to verify actual file existence before fixing imports.`,
|
|
1231
|
-
model: openai('gpt-4o-mini'),
|
|
1232
|
-
tools: {
|
|
1233
|
-
validateCode: allTools.validateCode,
|
|
1234
|
-
readFile: allTools.readFile,
|
|
1235
|
-
multiEdit: allTools.multiEdit,
|
|
1236
|
-
listDirectory: allTools.listDirectory,
|
|
1237
|
-
executeCommand: allTools.executeCommand,
|
|
1238
|
-
},
|
|
1239
|
-
});
|
|
1240
|
-
|
|
1241
|
-
console.log('Starting validation and fix agent with internal loop...');
|
|
1242
|
-
|
|
1243
|
-
let validationResults = {
|
|
1244
|
-
valid: false,
|
|
1245
|
-
errorsFixed: 0,
|
|
1246
|
-
remainingErrors: 1, // Start with 1 to enter the loop
|
|
1247
|
-
iteration: currentIteration,
|
|
1248
|
-
};
|
|
1249
|
-
|
|
1250
|
-
// Loop up to maxIterations times or until all errors are fixed
|
|
1251
|
-
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
1252
|
-
console.log(`\n=== Validation Iteration ${currentIteration} ===`);
|
|
1253
|
-
|
|
1254
|
-
const iterationPrompt =
|
|
1255
|
-
currentIteration === 1
|
|
1256
|
-
? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
1257
|
-
|
|
1258
|
-
Start by running validateCode with all validation types to get a complete picture of any issues, then systematically fix them.`
|
|
1259
|
-
: `Continue validation and fixing for the template integration at ${targetPath}. This is iteration ${currentIteration} of validation.
|
|
1260
|
-
|
|
1261
|
-
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
1262
|
-
|
|
1263
|
-
const result = await validationAgent.stream(iterationPrompt, {
|
|
1264
|
-
experimental_output: z.object({ success: z.boolean() }),
|
|
1265
|
-
});
|
|
1266
|
-
|
|
1267
|
-
let iterationErrors = 0;
|
|
1268
|
-
let previousErrors = validationResults.remainingErrors;
|
|
1269
|
-
|
|
1270
|
-
for await (const chunk of result.fullStream) {
|
|
1271
|
-
if (chunk.type === 'step-finish' || chunk.type === 'step-start') {
|
|
1272
|
-
console.log({
|
|
1273
|
-
type: chunk.type,
|
|
1274
|
-
msgId: chunk.messageId,
|
|
1275
|
-
iteration: currentIteration,
|
|
1276
|
-
});
|
|
1277
|
-
} else {
|
|
1278
|
-
console.log(JSON.stringify(chunk, null, 2));
|
|
1279
|
-
}
|
|
1280
|
-
if (chunk.type === 'tool-result') {
|
|
1281
|
-
// Track validation results
|
|
1282
|
-
if (chunk.toolName === 'validateCode') {
|
|
1283
|
-
const toolResult = chunk.result as any;
|
|
1284
|
-
if (toolResult?.summary) {
|
|
1285
|
-
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
1286
|
-
console.log(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
1287
|
-
}
|
|
1288
|
-
}
|
|
1289
|
-
}
|
|
1290
|
-
}
|
|
1291
|
-
|
|
1292
|
-
// Update results for this iteration
|
|
1293
|
-
validationResults.remainingErrors = iterationErrors;
|
|
1294
|
-
validationResults.errorsFixed += Math.max(0, previousErrors - iterationErrors);
|
|
1295
|
-
validationResults.valid = iterationErrors === 0;
|
|
1296
|
-
validationResults.iteration = currentIteration;
|
|
1297
|
-
|
|
1298
|
-
console.log(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
1299
|
-
|
|
1300
|
-
// Break if no errors or max iterations reached
|
|
1301
|
-
if (iterationErrors === 0) {
|
|
1302
|
-
console.log(`✅ All validation issues resolved in ${currentIteration} iterations!`);
|
|
1303
|
-
break;
|
|
1304
|
-
} else if (currentIteration >= maxIterations) {
|
|
1305
|
-
console.log(`⚠️ Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
1306
|
-
break;
|
|
1307
|
-
}
|
|
1308
|
-
|
|
1309
|
-
currentIteration++;
|
|
1310
|
-
}
|
|
1311
|
-
|
|
1312
|
-
// Commit the validation fixes
|
|
1313
|
-
try {
|
|
1314
|
-
await exec(
|
|
1315
|
-
`git add . && git commit -m "fix(template): resolve validation errors for ${slug}@${commitSha.substring(0, 7)}" || true`,
|
|
1316
|
-
{
|
|
1317
|
-
cwd: targetPath,
|
|
1318
|
-
},
|
|
1319
|
-
);
|
|
1320
|
-
} catch (commitError) {
|
|
1321
|
-
console.warn('Failed to commit validation fixes:', commitError);
|
|
1322
|
-
}
|
|
1323
|
-
|
|
1324
|
-
return {
|
|
1325
|
-
success: true,
|
|
1326
|
-
applied: true,
|
|
1327
|
-
message: `Validation completed in ${currentIteration} iteration${currentIteration > 1 ? 's' : ''}. ${validationResults.valid ? 'All issues resolved!' : `${validationResults.remainingErrors} issues remaining`}`,
|
|
1328
|
-
validationResults: {
|
|
1329
|
-
valid: validationResults.valid,
|
|
1330
|
-
errorsFixed: validationResults.errorsFixed,
|
|
1331
|
-
remainingErrors: validationResults.remainingErrors,
|
|
1332
|
-
},
|
|
1333
|
-
};
|
|
1334
|
-
} catch (error) {
|
|
1335
|
-
console.error('Validation and fix failed:', error);
|
|
1336
|
-
return {
|
|
1337
|
-
success: false,
|
|
1338
|
-
applied: false,
|
|
1339
|
-
message: `Validation and fix failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
1340
|
-
validationResults: {
|
|
1341
|
-
valid: false,
|
|
1342
|
-
errorsFixed: 0,
|
|
1343
|
-
remainingErrors: -1,
|
|
1344
|
-
},
|
|
1345
|
-
error: error instanceof Error ? error.message : String(error),
|
|
1346
|
-
};
|
|
1347
|
-
} finally {
|
|
1348
|
-
// Cleanup template directory
|
|
1349
|
-
try {
|
|
1350
|
-
await rm(templateDir, { recursive: true, force: true });
|
|
1351
|
-
console.log(`✓ Cleaned up template directory: ${templateDir}`);
|
|
1352
|
-
} catch (cleanupError) {
|
|
1353
|
-
console.warn('Failed to cleanup template directory:', cleanupError);
|
|
1354
|
-
}
|
|
1355
|
-
}
|
|
1356
|
-
},
|
|
1357
|
-
});
|
|
1358
|
-
|
|
1359
|
-
// Create the complete workflow
|
|
1360
|
-
export const mergeTemplateWorkflow = createWorkflow({
|
|
1361
|
-
id: 'merge-template',
|
|
1362
|
-
description:
|
|
1363
|
-
'Merges a Mastra template repository into the current project using intelligent AgentBuilder-powered merging',
|
|
1364
|
-
inputSchema: MergeInputSchema,
|
|
1365
|
-
outputSchema: ApplyResultSchema,
|
|
1366
|
-
steps: [
|
|
1367
|
-
cloneTemplateStep,
|
|
1368
|
-
analyzePackageStep,
|
|
1369
|
-
discoverUnitsStep,
|
|
1370
|
-
orderUnitsStep,
|
|
1371
|
-
packageMergeStep,
|
|
1372
|
-
flatInstallStep,
|
|
1373
|
-
programmaticFileCopyStep,
|
|
1374
|
-
intelligentMergeStep,
|
|
1375
|
-
validationAndFixStep,
|
|
1376
|
-
],
|
|
1377
|
-
})
|
|
1378
|
-
.then(cloneTemplateStep)
|
|
1379
|
-
.parallel([analyzePackageStep, discoverUnitsStep])
|
|
1380
|
-
.map(async ({ getStepResult }) => {
|
|
1381
|
-
const discoverResult = getStepResult(discoverUnitsStep);
|
|
1382
|
-
return discoverResult;
|
|
1383
|
-
})
|
|
1384
|
-
.then(orderUnitsStep)
|
|
1385
|
-
.map(async ({ getStepResult, getInitData }) => {
|
|
1386
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
1387
|
-
const packageResult = getStepResult(analyzePackageStep);
|
|
1388
|
-
const initData = getInitData();
|
|
1389
|
-
|
|
1390
|
-
return {
|
|
1391
|
-
commitSha: cloneResult.commitSha,
|
|
1392
|
-
slug: cloneResult.slug,
|
|
1393
|
-
targetPath: initData.targetPath,
|
|
1394
|
-
packageInfo: packageResult,
|
|
1395
|
-
};
|
|
1396
|
-
})
|
|
1397
|
-
.then(packageMergeStep)
|
|
1398
|
-
.map(async ({ getInitData }) => {
|
|
1399
|
-
const initData = getInitData();
|
|
1400
|
-
return {
|
|
1401
|
-
targetPath: initData.targetPath,
|
|
1402
|
-
};
|
|
1403
|
-
})
|
|
1404
|
-
.then(flatInstallStep)
|
|
1405
|
-
.map(async ({ getStepResult, getInitData }) => {
|
|
1406
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
1407
|
-
const orderResult = getStepResult(orderUnitsStep);
|
|
1408
|
-
const initData = getInitData();
|
|
1409
|
-
|
|
1410
|
-
return {
|
|
1411
|
-
orderedUnits: orderResult.orderedUnits,
|
|
1412
|
-
templateDir: cloneResult.templateDir,
|
|
1413
|
-
commitSha: cloneResult.commitSha,
|
|
1414
|
-
slug: cloneResult.slug,
|
|
1415
|
-
targetPath: initData.targetPath,
|
|
1416
|
-
};
|
|
1417
|
-
})
|
|
1418
|
-
.then(programmaticFileCopyStep)
|
|
1419
|
-
.map(async ({ getStepResult, getInitData }) => {
|
|
1420
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
1421
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
1422
|
-
const initData = getInitData();
|
|
1423
|
-
|
|
1424
|
-
return {
|
|
1425
|
-
conflicts: copyResult.conflicts,
|
|
1426
|
-
copiedFiles: copyResult.copiedFiles,
|
|
1427
|
-
commitSha: cloneResult.commitSha,
|
|
1428
|
-
slug: cloneResult.slug,
|
|
1429
|
-
targetPath: initData.targetPath,
|
|
1430
|
-
templateDir: cloneResult.templateDir,
|
|
1431
|
-
};
|
|
1432
|
-
})
|
|
1433
|
-
.then(intelligentMergeStep)
|
|
1434
|
-
.map(async ({ getStepResult, getInitData }) => {
|
|
1435
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
1436
|
-
const orderResult = getStepResult(orderUnitsStep);
|
|
1437
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
1438
|
-
const mergeResult = getStepResult(intelligentMergeStep);
|
|
1439
|
-
const initData = getInitData();
|
|
1440
|
-
|
|
1441
|
-
return {
|
|
1442
|
-
commitSha: cloneResult.commitSha,
|
|
1443
|
-
slug: cloneResult.slug,
|
|
1444
|
-
targetPath: initData.targetPath,
|
|
1445
|
-
templateDir: cloneResult.templateDir,
|
|
1446
|
-
orderedUnits: orderResult.orderedUnits,
|
|
1447
|
-
copiedFiles: copyResult.copiedFiles,
|
|
1448
|
-
conflictsResolved: mergeResult.conflictsResolved,
|
|
1449
|
-
};
|
|
1450
|
-
})
|
|
1451
|
-
.then(validationAndFixStep)
|
|
1452
|
-
.map(async ({ getStepResult, getInitData }) => {
|
|
1453
|
-
const validationResult = getStepResult(validationAndFixStep);
|
|
1454
|
-
const intelligentMergeResult = getStepResult(intelligentMergeStep);
|
|
1455
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
1456
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
1457
|
-
const initData = getInitData();
|
|
1458
|
-
|
|
1459
|
-
// Ensure branchName is always present, with fallback logic
|
|
1460
|
-
const branchName =
|
|
1461
|
-
intelligentMergeResult.branchName || `feat/install-template-${cloneResult.slug || initData.slug}`;
|
|
1462
|
-
|
|
1463
|
-
// Aggregate errors from all steps
|
|
1464
|
-
const allErrors = [copyResult.error, intelligentMergeResult.error, validationResult.error].filter(Boolean);
|
|
1465
|
-
|
|
1466
|
-
// Determine overall success based on all step results
|
|
1467
|
-
const overallSuccess =
|
|
1468
|
-
copyResult.success !== false && intelligentMergeResult.success !== false && validationResult.success;
|
|
1469
|
-
|
|
1470
|
-
// Create comprehensive message
|
|
1471
|
-
const messages = [];
|
|
1472
|
-
if (copyResult.copiedFiles?.length > 0) {
|
|
1473
|
-
messages.push(`${copyResult.copiedFiles.length} files copied`);
|
|
1474
|
-
}
|
|
1475
|
-
if (copyResult.conflicts?.length > 0) {
|
|
1476
|
-
messages.push(`${copyResult.conflicts.length} conflicts skipped`);
|
|
1477
|
-
}
|
|
1478
|
-
if (intelligentMergeResult.conflictsResolved?.length > 0) {
|
|
1479
|
-
messages.push(`${intelligentMergeResult.conflictsResolved.length} conflicts resolved`);
|
|
1480
|
-
}
|
|
1481
|
-
if (validationResult.validationResults?.errorsFixed > 0) {
|
|
1482
|
-
messages.push(`${validationResult.validationResults.errorsFixed} validation errors fixed`);
|
|
1483
|
-
}
|
|
1484
|
-
|
|
1485
|
-
const comprehensiveMessage =
|
|
1486
|
-
messages.length > 0
|
|
1487
|
-
? `Template merge completed: ${messages.join(', ')}`
|
|
1488
|
-
: validationResult.message || 'Template merge completed';
|
|
1489
|
-
|
|
1490
|
-
return {
|
|
1491
|
-
success: overallSuccess,
|
|
1492
|
-
applied: validationResult.applied || copyResult.copiedFiles?.length > 0 || false,
|
|
1493
|
-
message: comprehensiveMessage,
|
|
1494
|
-
validationResults: validationResult.validationResults,
|
|
1495
|
-
error: allErrors.length > 0 ? allErrors.join('; ') : undefined,
|
|
1496
|
-
errors: allErrors.length > 0 ? allErrors : undefined,
|
|
1497
|
-
branchName,
|
|
1498
|
-
// Additional debugging info
|
|
1499
|
-
stepResults: {
|
|
1500
|
-
copySuccess: copyResult.success,
|
|
1501
|
-
mergeSuccess: intelligentMergeResult.success,
|
|
1502
|
-
validationSuccess: validationResult.success,
|
|
1503
|
-
filesCopied: copyResult.copiedFiles?.length || 0,
|
|
1504
|
-
conflictsSkipped: copyResult.conflicts?.length || 0,
|
|
1505
|
-
conflictsResolved: intelligentMergeResult.conflictsResolved?.length || 0,
|
|
1506
|
-
},
|
|
1507
|
-
};
|
|
1508
|
-
})
|
|
1509
|
-
.commit();
|
|
1510
|
-
|
|
1511
|
-
// Helper to merge a template by slug
|
|
1512
|
-
export async function mergeTemplateBySlug(slug: string, targetPath?: string) {
|
|
1513
|
-
const template = await getMastraTemplate(slug);
|
|
1514
|
-
const run = await mergeTemplateWorkflow.createRunAsync();
|
|
1515
|
-
return await run.start({
|
|
1516
|
-
inputData: {
|
|
1517
|
-
repo: template.githubUrl,
|
|
1518
|
-
slug: template.slug,
|
|
1519
|
-
targetPath,
|
|
1520
|
-
},
|
|
1521
|
-
});
|
|
1522
|
-
}
|
|
1523
|
-
|
|
1524
|
-
// Helper function to determine conflict resolution strategy
|
|
1525
|
-
const determineConflictStrategy = (
|
|
1526
|
-
_unit: { kind: string; id: string },
|
|
1527
|
-
_targetFile: string,
|
|
1528
|
-
): 'skip' | 'backup-and-replace' | 'rename' => {
|
|
1529
|
-
// For now, always skip conflicts to avoid disrupting existing files
|
|
1530
|
-
// TODO: Enable advanced strategies based on user feedback
|
|
1531
|
-
return 'skip';
|
|
1532
|
-
|
|
1533
|
-
// Future logic (currently disabled):
|
|
1534
|
-
// if (['agent', 'workflow', 'network'].includes(unit.kind)) {
|
|
1535
|
-
// return 'backup-and-replace';
|
|
1536
|
-
// }
|
|
1537
|
-
// if (unit.kind === 'tool') {
|
|
1538
|
-
// return 'rename';
|
|
1539
|
-
// }
|
|
1540
|
-
// return 'backup-and-replace';
|
|
1541
|
-
};
|