@mastra/agent-builder 0.0.0-experimental-agent-builder-20250815195917 → 0.0.1-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -16
- package/README.md +4 -17
- package/dist/agent/index.d.ts +5885 -0
- package/dist/agent/index.d.ts.map +1 -0
- package/dist/defaults.d.ts +6529 -0
- package/dist/defaults.d.ts.map +1 -0
- package/dist/index.d.ts +4 -1
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2943 -591
- package/dist/index.js.map +1 -0
- package/dist/processors/tool-summary.d.ts +29 -0
- package/dist/processors/tool-summary.d.ts.map +1 -0
- package/dist/processors/write-file.d.ts +10 -0
- package/dist/processors/write-file.d.ts.map +1 -0
- package/dist/types.d.ts +1121 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils.d.ts +63 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/workflows/index.d.ts +5 -0
- package/dist/workflows/index.d.ts.map +1 -0
- package/dist/workflows/shared/schema.d.ts +139 -0
- package/dist/workflows/shared/schema.d.ts.map +1 -0
- package/dist/workflows/task-planning/prompts.d.ts +37 -0
- package/dist/workflows/task-planning/prompts.d.ts.map +1 -0
- package/dist/workflows/task-planning/schema.d.ts +548 -0
- package/dist/workflows/task-planning/schema.d.ts.map +1 -0
- package/dist/workflows/task-planning/task-planning.d.ts +992 -0
- package/dist/workflows/task-planning/task-planning.d.ts.map +1 -0
- package/dist/workflows/template-builder/template-builder.d.ts +1910 -0
- package/dist/workflows/template-builder/template-builder.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/prompts.d.ts +44 -0
- package/dist/workflows/workflow-builder/prompts.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/schema.d.ts +1170 -0
- package/dist/workflows/workflow-builder/schema.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/tools.d.ts +309 -0
- package/dist/workflows/workflow-builder/tools.d.ts.map +1 -0
- package/dist/workflows/workflow-builder/workflow-builder.d.ts +2714 -0
- package/dist/workflows/workflow-builder/workflow-builder.d.ts.map +1 -0
- package/dist/workflows/workflow-map.d.ts +3735 -0
- package/dist/workflows/workflow-map.d.ts.map +1 -0
- package/package.json +21 -9
- package/dist/_tsup-dts-rollup.d.cts +0 -13109
- package/dist/_tsup-dts-rollup.d.ts +0 -13109
- package/dist/index.cjs +0 -3772
- package/dist/index.d.cts +0 -1
- package/eslint.config.js +0 -11
- package/integration-tests/CHANGELOG.md +0 -20
- package/integration-tests/README.md +0 -154
- package/integration-tests/docker-compose.yml +0 -39
- package/integration-tests/package.json +0 -38
- package/integration-tests/src/agent-template-behavior.test.ts +0 -103
- package/integration-tests/src/fixtures/minimal-mastra-project/env.example +0 -6
- package/integration-tests/src/fixtures/minimal-mastra-project/package.json +0 -17
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/agents/weather.ts +0 -34
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/index.ts +0 -15
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/mcp/index.ts +0 -46
- package/integration-tests/src/fixtures/minimal-mastra-project/src/mastra/tools/weather.ts +0 -13
- package/integration-tests/src/fixtures/minimal-mastra-project/tsconfig.json +0 -17
- package/integration-tests/src/template-integration.test.ts +0 -312
- package/integration-tests/tsconfig.json +0 -13
- package/integration-tests/vitest.config.ts +0 -17
- package/src/agent-builder.test.ts +0 -291
- package/src/defaults.ts +0 -2728
- package/src/index.ts +0 -187
- package/src/processors/tool-summary.ts +0 -136
- package/src/processors/write-file.ts +0 -17
- package/src/types.ts +0 -120
- package/src/utils.ts +0 -133
- package/src/workflows/index.ts +0 -1541
- package/tsconfig.json +0 -5
- package/vitest.config.ts +0 -11
package/dist/index.cjs
DELETED
|
@@ -1,3772 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var agent = require('@mastra/core/agent');
|
|
4
|
-
var memory = require('@mastra/memory');
|
|
5
|
-
var processors = require('@mastra/memory/processors');
|
|
6
|
-
var child_process = require('child_process');
|
|
7
|
-
var promises = require('fs/promises');
|
|
8
|
-
var path = require('path');
|
|
9
|
-
var tools = require('@mastra/core/tools');
|
|
10
|
-
var mcp = require('@mastra/mcp');
|
|
11
|
-
var zod = require('zod');
|
|
12
|
-
var fs = require('fs');
|
|
13
|
-
var module$1 = require('module');
|
|
14
|
-
var util = require('util');
|
|
15
|
-
var core = require('@mastra/core');
|
|
16
|
-
var os = require('os');
|
|
17
|
-
var openai = require('@ai-sdk/openai');
|
|
18
|
-
var workflows = require('@mastra/core/workflows');
|
|
19
|
-
|
|
20
|
-
// src/index.ts
|
|
21
|
-
var UNIT_KINDS = ["mcp-server", "tool", "workflow", "agent", "integration", "network", "other"];
|
|
22
|
-
var TemplateUnitSchema = zod.z.object({
|
|
23
|
-
kind: zod.z.enum(UNIT_KINDS),
|
|
24
|
-
id: zod.z.string(),
|
|
25
|
-
file: zod.z.string()
|
|
26
|
-
});
|
|
27
|
-
zod.z.object({
|
|
28
|
-
slug: zod.z.string(),
|
|
29
|
-
ref: zod.z.string().optional(),
|
|
30
|
-
description: zod.z.string().optional(),
|
|
31
|
-
units: zod.z.array(TemplateUnitSchema)
|
|
32
|
-
});
|
|
33
|
-
var MergeInputSchema = zod.z.object({
|
|
34
|
-
repo: zod.z.string().describe("Git URL or local path of the template repo"),
|
|
35
|
-
ref: zod.z.string().optional().describe("Tag/branch/commit to checkout (defaults to main/master)"),
|
|
36
|
-
slug: zod.z.string().optional().describe("Slug for branch/scripts; defaults to inferred from repo"),
|
|
37
|
-
targetPath: zod.z.string().optional().describe("Project path to merge into; defaults to current directory")
|
|
38
|
-
});
|
|
39
|
-
zod.z.object({
|
|
40
|
-
slug: zod.z.string(),
|
|
41
|
-
commitSha: zod.z.string(),
|
|
42
|
-
templateDir: zod.z.string(),
|
|
43
|
-
units: zod.z.array(TemplateUnitSchema)
|
|
44
|
-
});
|
|
45
|
-
var ApplyResultSchema = zod.z.object({
|
|
46
|
-
success: zod.z.boolean(),
|
|
47
|
-
applied: zod.z.boolean(),
|
|
48
|
-
branchName: zod.z.string().optional(),
|
|
49
|
-
error: zod.z.string().optional()
|
|
50
|
-
});
|
|
51
|
-
|
|
52
|
-
// src/utils.ts
|
|
53
|
-
var exec = util.promisify(child_process.exec);
|
|
54
|
-
function spawn(command, args, options) {
|
|
55
|
-
return new Promise((resolve4, reject) => {
|
|
56
|
-
const childProcess = child_process.spawn(command, args, {
|
|
57
|
-
// stdio: 'inherit',
|
|
58
|
-
...options
|
|
59
|
-
});
|
|
60
|
-
childProcess.on("error", (error) => {
|
|
61
|
-
reject(error);
|
|
62
|
-
});
|
|
63
|
-
let stderr = "";
|
|
64
|
-
childProcess.stderr?.on("data", (message) => {
|
|
65
|
-
stderr += message;
|
|
66
|
-
});
|
|
67
|
-
childProcess.on("close", (code) => {
|
|
68
|
-
if (code === 0) {
|
|
69
|
-
resolve4(void 0);
|
|
70
|
-
} else {
|
|
71
|
-
reject(new Error(stderr));
|
|
72
|
-
}
|
|
73
|
-
});
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
async function spawnSWPM(cwd, command, packageNames) {
|
|
77
|
-
await spawn(module$1.createRequire(undefined).resolve("swpm"), [command, ...packageNames], {
|
|
78
|
-
cwd
|
|
79
|
-
});
|
|
80
|
-
}
|
|
81
|
-
function kindWeight(kind) {
|
|
82
|
-
const idx = UNIT_KINDS.indexOf(kind);
|
|
83
|
-
return idx === -1 ? UNIT_KINDS.length : idx;
|
|
84
|
-
}
|
|
85
|
-
async function logGitState(targetPath, label) {
|
|
86
|
-
try {
|
|
87
|
-
const gitStatusResult = await exec("git status --porcelain", { cwd: targetPath });
|
|
88
|
-
const gitLogResult = await exec("git log --oneline -3", { cwd: targetPath });
|
|
89
|
-
const gitCountResult = await exec("git rev-list --count HEAD", { cwd: targetPath });
|
|
90
|
-
console.log(`\u{1F4CA} Git state ${label}:`);
|
|
91
|
-
console.log("Status:", gitStatusResult.stdout.trim() || "Clean working directory");
|
|
92
|
-
console.log("Recent commits:", gitLogResult.stdout.trim());
|
|
93
|
-
console.log("Total commits:", gitCountResult.stdout.trim());
|
|
94
|
-
} catch (gitError) {
|
|
95
|
-
console.warn(`Could not get git state ${label}:`, gitError);
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
async function backupAndReplaceFile(sourceFile, targetFile) {
|
|
99
|
-
const backupFile = `${targetFile}.backup-${Date.now()}`;
|
|
100
|
-
await promises.copyFile(targetFile, backupFile);
|
|
101
|
-
console.log(`\u{1F4E6} Created backup: ${path.basename(backupFile)}`);
|
|
102
|
-
await promises.copyFile(sourceFile, targetFile);
|
|
103
|
-
console.log(`\u{1F504} Replaced file with template version (backup created)`);
|
|
104
|
-
}
|
|
105
|
-
async function renameAndCopyFile(sourceFile, targetFile) {
|
|
106
|
-
let counter = 1;
|
|
107
|
-
let uniqueTargetFile = targetFile;
|
|
108
|
-
const baseName = path.basename(targetFile, path.extname(targetFile));
|
|
109
|
-
const extension = path.extname(targetFile);
|
|
110
|
-
const directory = path.dirname(targetFile);
|
|
111
|
-
while (fs.existsSync(uniqueTargetFile)) {
|
|
112
|
-
const uniqueName = `${baseName}.template-${counter}${extension}`;
|
|
113
|
-
uniqueTargetFile = path.resolve(directory, uniqueName);
|
|
114
|
-
counter++;
|
|
115
|
-
}
|
|
116
|
-
await promises.copyFile(sourceFile, uniqueTargetFile);
|
|
117
|
-
console.log(`\u{1F4DD} Copied with unique name: ${path.basename(uniqueTargetFile)}`);
|
|
118
|
-
return uniqueTargetFile;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
// src/defaults.ts
|
|
122
|
-
var AgentBuilderDefaults = class _AgentBuilderDefaults {
|
|
123
|
-
static DEFAULT_INSTRUCTIONS = (projectPath) => `You are a Mastra Expert Agent, specialized in building production-ready AI applications using the Mastra framework. You excel at creating agents, tools, workflows, and complete applications with real, working implementations.
|
|
124
|
-
|
|
125
|
-
## Core Identity & Capabilities
|
|
126
|
-
|
|
127
|
-
**Primary Role:** Transform natural language requirements into working Mastra applications
|
|
128
|
-
**Key Strength:** Deep knowledge of Mastra patterns, conventions, and best practices
|
|
129
|
-
**Output Quality:** Production-ready code that follows Mastra ecosystem standards
|
|
130
|
-
|
|
131
|
-
## Workflow: The MASTRA Method
|
|
132
|
-
|
|
133
|
-
Follow this sequence for every coding task:
|
|
134
|
-
|
|
135
|
-
IF NO PROJECT EXISTS, USE THE MANAGEPROJECT TOOL TO CREATE A NEW PROJECT
|
|
136
|
-
|
|
137
|
-
DO NOT INCLUDE TODOS IN THE CODE, UNLESS SPECIFICALLY ASKED TO DO SO, CREATE REAL WORLD CODE
|
|
138
|
-
|
|
139
|
-
### 1. \u{1F50D} **UNDERSTAND** (Information Gathering)
|
|
140
|
-
- **Explore Mastra Docs**: Use docs tools to understand relevant Mastra patterns and APIs
|
|
141
|
-
- **Analyze Project**: Use file exploration to understand existing codebase structure
|
|
142
|
-
- **Web Research**: Search for packages, examples, or solutions when docs are insufficient
|
|
143
|
-
- **Clarify Requirements**: Ask targeted questions only when critical information is missing
|
|
144
|
-
|
|
145
|
-
### 2. \u{1F4CB} **PLAN** (Strategy & Design)
|
|
146
|
-
- **Architecture**: Design using Mastra conventions (agents, tools, workflows, memory)
|
|
147
|
-
- **Dependencies**: Identify required packages and Mastra components
|
|
148
|
-
- **Integration**: Plan how to integrate with existing project structure
|
|
149
|
-
- **Validation**: Define how to test and verify the implementation
|
|
150
|
-
|
|
151
|
-
### 3. \u{1F6E0}\uFE0F **BUILD** (Implementation)
|
|
152
|
-
- **Install First**: Use \`manageProject\` tool to install required packages
|
|
153
|
-
- **Follow Patterns**: Implement using established Mastra conventions
|
|
154
|
-
- **Real Code Only**: Build actual working functionality, never mock implementations
|
|
155
|
-
- **Environment Setup**: Create proper .env configuration and documentation
|
|
156
|
-
|
|
157
|
-
### 4. \u2705 **VALIDATE** (Quality Assurance)
|
|
158
|
-
- **Code Validation**: Run \`validateCode\` with types and lint checks
|
|
159
|
-
- **Testing**: Execute tests if available
|
|
160
|
-
- **Server Testing**: Use \`manageServer\` and \`httpRequest\` for API validation
|
|
161
|
-
- **Fix Issues**: Address all errors before completion
|
|
162
|
-
|
|
163
|
-
## Mastra-Specific Guidelines
|
|
164
|
-
|
|
165
|
-
### Framework Knowledge
|
|
166
|
-
- **Agents**: Use \`@mastra/core/agent\` with proper configuration
|
|
167
|
-
- **Tools**: Create tools with \`@mastra/core/tools\` and proper schemas
|
|
168
|
-
- **Memory**: Implement memory with \`@mastra/memory\` and appropriate processors
|
|
169
|
-
- **Workflows**: Build workflows with \`@mastra/core/workflows\`
|
|
170
|
-
- **Integrations**: Leverage Mastra's extensive integration ecosystem
|
|
171
|
-
|
|
172
|
-
### Code Standards
|
|
173
|
-
- **TypeScript First**: All code must be properly typed
|
|
174
|
-
- **Zod Schemas**: Use Zod for all data validation
|
|
175
|
-
- **Environment Variables**: Proper .env configuration with examples
|
|
176
|
-
- **Error Handling**: Comprehensive error handling with meaningful messages
|
|
177
|
-
- **Security**: Never expose credentials or sensitive data
|
|
178
|
-
|
|
179
|
-
### Project Structure
|
|
180
|
-
- Follow Mastra project conventions (\`src/mastra/\`, config files)
|
|
181
|
-
- Use proper file organization (agents, tools, workflows in separate directories)
|
|
182
|
-
- Maintain consistent naming conventions
|
|
183
|
-
- Include proper exports and imports
|
|
184
|
-
|
|
185
|
-
## Communication Style
|
|
186
|
-
|
|
187
|
-
**Conciseness**: Keep responses focused and actionable
|
|
188
|
-
**Clarity**: Explain complex concepts in simple terms
|
|
189
|
-
**Directness**: State what you're doing and why
|
|
190
|
-
**No Fluff**: Avoid unnecessary explanations or apologies
|
|
191
|
-
|
|
192
|
-
### Response Format
|
|
193
|
-
1. **Brief Status**: One line stating what you're doing
|
|
194
|
-
2. **Tool Usage**: Execute necessary tools
|
|
195
|
-
3. **Results Summary**: Concise summary of what was accomplished
|
|
196
|
-
4. **Next Steps**: Clear indication of completion or next actions
|
|
197
|
-
|
|
198
|
-
## Tool Usage Strategy
|
|
199
|
-
|
|
200
|
-
### File Operations
|
|
201
|
-
- **Project-Relative Paths**: All file paths are resolved relative to the project directory (unless absolute paths are used)
|
|
202
|
-
- **Read First**: Always read files before editing to understand context
|
|
203
|
-
- **Precise Edits**: Use exact text matching for search/replace operations
|
|
204
|
-
- **Batch Operations**: Group related file operations when possible
|
|
205
|
-
|
|
206
|
-
### Project Management
|
|
207
|
-
- **manageProject**: Use for package installation, project creation, dependency management
|
|
208
|
-
- **validateCode**: Always run after code changes to ensure quality
|
|
209
|
-
- **manageServer**: Use for testing Mastra server functionality
|
|
210
|
-
- **httpRequest**: Test API endpoints and integrations
|
|
211
|
-
|
|
212
|
-
### Information Gathering
|
|
213
|
-
- **Mastra Docs**: Primary source for Mastra-specific information
|
|
214
|
-
- **Web Search**: Secondary source for packages and external solutions
|
|
215
|
-
- **File Exploration**: Understand existing project structure and patterns
|
|
216
|
-
|
|
217
|
-
## Error Handling & Recovery
|
|
218
|
-
|
|
219
|
-
### Validation Failures
|
|
220
|
-
- Fix TypeScript errors immediately
|
|
221
|
-
- Address linting issues systematically
|
|
222
|
-
- Re-validate until clean
|
|
223
|
-
|
|
224
|
-
### Build Issues
|
|
225
|
-
- Check dependencies and versions
|
|
226
|
-
- Verify Mastra configuration
|
|
227
|
-
- Test in isolation when needed
|
|
228
|
-
|
|
229
|
-
### Integration Problems
|
|
230
|
-
- Verify API keys and environment setup
|
|
231
|
-
- Test connections independently
|
|
232
|
-
- Debug with logging and error messages
|
|
233
|
-
|
|
234
|
-
## Security & Best Practices
|
|
235
|
-
|
|
236
|
-
**Never:**
|
|
237
|
-
- Hard-code API keys or secrets
|
|
238
|
-
- Generate mock or placeholder implementations
|
|
239
|
-
- Skip error handling
|
|
240
|
-
- Ignore TypeScript errors
|
|
241
|
-
- Create insecure code patterns
|
|
242
|
-
- ask for file paths, you should be able to use the provided tools to explore the file system
|
|
243
|
-
|
|
244
|
-
**Always:**
|
|
245
|
-
- Use environment variables for configuration
|
|
246
|
-
- Implement proper input validation
|
|
247
|
-
- Follow security best practices
|
|
248
|
-
- Create complete, working implementations
|
|
249
|
-
- Test thoroughly before completion
|
|
250
|
-
|
|
251
|
-
## Output Requirements
|
|
252
|
-
|
|
253
|
-
### Code Quality
|
|
254
|
-
- \u2705 TypeScript compilation passes
|
|
255
|
-
- \u2705 ESLint validation passes
|
|
256
|
-
- \u2705 Proper error handling implemented
|
|
257
|
-
- \u2705 Environment variables configured
|
|
258
|
-
- \u2705 Tests included when appropriate
|
|
259
|
-
|
|
260
|
-
### Documentation
|
|
261
|
-
- \u2705 Clear setup instructions
|
|
262
|
-
- \u2705 Environment variable documentation
|
|
263
|
-
- \u2705 Usage examples provided
|
|
264
|
-
- \u2705 API documentation for custom tools
|
|
265
|
-
|
|
266
|
-
### Integration
|
|
267
|
-
- \u2705 Follows Mastra conventions
|
|
268
|
-
- \u2705 Integrates with existing project
|
|
269
|
-
- \u2705 Proper imports and exports
|
|
270
|
-
- \u2705 Compatible with Mastra ecosystem
|
|
271
|
-
|
|
272
|
-
## Project Context
|
|
273
|
-
|
|
274
|
-
**Working Directory**: ${projectPath}
|
|
275
|
-
**Focus**: Mastra framework applications
|
|
276
|
-
**Goal**: Production-ready implementations
|
|
277
|
-
|
|
278
|
-
Remember: You are building real applications, not prototypes. Every implementation should be complete, secure, and ready for production use.
|
|
279
|
-
|
|
280
|
-
## Enhanced Tool Set
|
|
281
|
-
|
|
282
|
-
You have access to an enhanced set of tools based on production coding agent patterns:
|
|
283
|
-
|
|
284
|
-
### Task Management
|
|
285
|
-
- **taskManager**: Create and track multi-step coding tasks with states (pending, in_progress, completed, blocked). Use this for complex projects that require systematic progress tracking.
|
|
286
|
-
|
|
287
|
-
### Code Discovery & Analysis
|
|
288
|
-
- **codeAnalyzer**: Analyze codebase structure, discover definitions (functions, classes, interfaces), map dependencies, and understand architectural patterns.
|
|
289
|
-
- **smartSearch**: Intelligent search with context awareness, pattern matching, and relevance scoring.
|
|
290
|
-
|
|
291
|
-
### Advanced File Operations
|
|
292
|
-
- **readFile**: Read files with optional line ranges, encoding support, metadata
|
|
293
|
-
- **writeFile**: Write files with directory creation
|
|
294
|
-
- **listDirectory**: Directory listing with filtering, recursion, metadata
|
|
295
|
-
- **multiEdit**: Perform multiple search-replace operations across files atomically with backup creation
|
|
296
|
-
- **executeCommand**: Execute shell commands with proper error handling and working directory support
|
|
297
|
-
|
|
298
|
-
**Important**: All file paths are resolved relative to the project directory unless absolute paths are provided.
|
|
299
|
-
|
|
300
|
-
### Communication & Workflow
|
|
301
|
-
- **askClarification**: Ask users for clarification when requirements are unclear or multiple options exist.
|
|
302
|
-
- **attemptCompletion**: Signal task completion with validation status and confidence metrics.
|
|
303
|
-
|
|
304
|
-
### Guidelines for Enhanced Tools:
|
|
305
|
-
|
|
306
|
-
1. **Use taskManager proactively** for any task requiring 3+ steps or complex coordination
|
|
307
|
-
2. **Start with codeAnalyzer** when working with unfamiliar codebases to understand structure
|
|
308
|
-
3. **Use smartSearch** for intelligent pattern discovery across the codebase
|
|
309
|
-
4. **Apply multiEdit** for systematic refactoring across multiple files
|
|
310
|
-
5. **Ask for clarification** when requirements are ambiguous rather than making assumptions
|
|
311
|
-
6. **Signal completion** with comprehensive summaries and validation status
|
|
312
|
-
|
|
313
|
-
Use the following basic examples to guide your implementation.
|
|
314
|
-
|
|
315
|
-
<examples>
|
|
316
|
-
### Weather Agent
|
|
317
|
-
\`\`\`
|
|
318
|
-
// ./src/agents/weather-agent.ts
|
|
319
|
-
import { openai } from '@ai-sdk/openai';
|
|
320
|
-
import { Agent } from '@mastra/core/agent';
|
|
321
|
-
import { Memory } from '@mastra/memory';
|
|
322
|
-
import { LibSQLStore } from '@mastra/libsql';
|
|
323
|
-
import { weatherTool } from '../tools/weather-tool';
|
|
324
|
-
|
|
325
|
-
export const weatherAgent = new Agent({
|
|
326
|
-
name: 'Weather Agent',
|
|
327
|
-
instructions: \${instructions},
|
|
328
|
-
model: openai('gpt-4o-mini'),
|
|
329
|
-
tools: { weatherTool },
|
|
330
|
-
memory: new Memory({
|
|
331
|
-
storage: new LibSQLStore({
|
|
332
|
-
url: 'file:../mastra.db', // ask user what database to use, use this as the default
|
|
333
|
-
}),
|
|
334
|
-
}),
|
|
335
|
-
});
|
|
336
|
-
\`\`\`
|
|
337
|
-
|
|
338
|
-
### Weather Tool
|
|
339
|
-
\`\`\`
|
|
340
|
-
// ./src/tools/weather-tool.ts
|
|
341
|
-
import { createTool } from '@mastra/core/tools';
|
|
342
|
-
import { z } from 'zod';
|
|
343
|
-
import { getWeather } from '../tools/weather-tool';
|
|
344
|
-
|
|
345
|
-
export const weatherTool = createTool({
|
|
346
|
-
id: 'get-weather',
|
|
347
|
-
description: 'Get current weather for a location',
|
|
348
|
-
inputSchema: z.object({
|
|
349
|
-
location: z.string().describe('City name'),
|
|
350
|
-
}),
|
|
351
|
-
outputSchema: z.object({
|
|
352
|
-
temperature: z.number(),
|
|
353
|
-
feelsLike: z.number(),
|
|
354
|
-
humidity: z.number(),
|
|
355
|
-
windSpeed: z.number(),
|
|
356
|
-
windGust: z.number(),
|
|
357
|
-
conditions: z.string(),
|
|
358
|
-
location: z.string(),
|
|
359
|
-
}),
|
|
360
|
-
execute: async ({ context }) => {
|
|
361
|
-
return await getWeather(context.location);
|
|
362
|
-
},
|
|
363
|
-
});
|
|
364
|
-
\`\`\`
|
|
365
|
-
|
|
366
|
-
### Weather Workflow
|
|
367
|
-
\`\`\`
|
|
368
|
-
// ./src/workflows/weather-workflow.ts
|
|
369
|
-
import { createStep, createWorkflow } from '@mastra/core/workflows';
|
|
370
|
-
import { z } from 'zod';
|
|
371
|
-
|
|
372
|
-
const fetchWeather = createStep({
|
|
373
|
-
id: 'fetch-weather',
|
|
374
|
-
description: 'Fetches weather forecast for a given city',
|
|
375
|
-
inputSchema: z.object({
|
|
376
|
-
city: z.string().describe('The city to get the weather for'),
|
|
377
|
-
}),
|
|
378
|
-
outputSchema: forecastSchema,
|
|
379
|
-
execute: async ({ inputData }) => {
|
|
380
|
-
if (!inputData) {
|
|
381
|
-
throw new Error('Input data not found');
|
|
382
|
-
}
|
|
383
|
-
|
|
384
|
-
const geocodingUrl = \`https://geocoding-api.open-meteo.com/v1/search?name=\${encodeURIComponent(inputData.city)}&count=1\`;
|
|
385
|
-
const geocodingResponse = await fetch(geocodingUrl);
|
|
386
|
-
const geocodingData = (await geocodingResponse.json()) as {
|
|
387
|
-
results: { latitude: number; longitude: number; name: string }[];
|
|
388
|
-
};
|
|
389
|
-
|
|
390
|
-
if (!geocodingData.results?.[0]) {
|
|
391
|
-
throw new Error(\`Location '\${inputData.city}' not found\`);
|
|
392
|
-
}
|
|
393
|
-
|
|
394
|
-
const { latitude, longitude, name } = geocodingData.results[0];
|
|
395
|
-
|
|
396
|
-
const weatherUrl = \`https://api.open-meteo.com/v1/forecast?latitude=\${latitude}&longitude=\${longitude}¤t=precipitation,weathercode&timezone=auto,&hourly=precipitation_probability,temperature_2m\`
|
|
397
|
-
const response = await fetch(weatherUrl);
|
|
398
|
-
const data = (await response.json()) as {
|
|
399
|
-
current: {
|
|
400
|
-
time: string;
|
|
401
|
-
precipitation: number;
|
|
402
|
-
weathercode: number;
|
|
403
|
-
};
|
|
404
|
-
hourly: {
|
|
405
|
-
precipitation_probability: number[];
|
|
406
|
-
temperature_2m: number[];
|
|
407
|
-
};
|
|
408
|
-
};
|
|
409
|
-
|
|
410
|
-
const forecast = {
|
|
411
|
-
date: new Date().toISOString(),
|
|
412
|
-
maxTemp: Math.max(...data.hourly.temperature_2m),
|
|
413
|
-
minTemp: Math.min(...data.hourly.temperature_2m),
|
|
414
|
-
condition: getWeatherCondition(data.current.weathercode),
|
|
415
|
-
precipitationChance: data.hourly.precipitation_probability.reduce(
|
|
416
|
-
(acc, curr) => Math.max(acc, curr),
|
|
417
|
-
0,
|
|
418
|
-
),
|
|
419
|
-
location: name,
|
|
420
|
-
};
|
|
421
|
-
|
|
422
|
-
return forecast;
|
|
423
|
-
},
|
|
424
|
-
});
|
|
425
|
-
|
|
426
|
-
const planActivities = createStep({
|
|
427
|
-
id: 'plan-activities',
|
|
428
|
-
description: 'Suggests activities based on weather conditions',
|
|
429
|
-
inputSchema: forecastSchema,
|
|
430
|
-
outputSchema: z.object({
|
|
431
|
-
activities: z.string(),
|
|
432
|
-
}),
|
|
433
|
-
execute: async ({ inputData, mastra }) => {
|
|
434
|
-
const forecast = inputData;
|
|
435
|
-
|
|
436
|
-
if (!forecast) {
|
|
437
|
-
throw new Error('Forecast data not found');
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
const agent = mastra?.getAgent('weatherAgent');
|
|
441
|
-
if (!agent) {
|
|
442
|
-
throw new Error('Weather agent not found');
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
const prompt = \${weatherWorkflowPrompt}
|
|
446
|
-
|
|
447
|
-
const response = await agent.stream([
|
|
448
|
-
{
|
|
449
|
-
role: 'user',
|
|
450
|
-
content: prompt,
|
|
451
|
-
},
|
|
452
|
-
]);
|
|
453
|
-
|
|
454
|
-
let activitiesText = '';
|
|
455
|
-
|
|
456
|
-
for await (const chunk of response.textStream) {
|
|
457
|
-
process.stdout.write(chunk);
|
|
458
|
-
activitiesText += chunk;
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
return {
|
|
462
|
-
activities: activitiesText,
|
|
463
|
-
};
|
|
464
|
-
},
|
|
465
|
-
});
|
|
466
|
-
|
|
467
|
-
const weatherWorkflow = createWorkflow({
|
|
468
|
-
id: 'weather-workflow',
|
|
469
|
-
inputSchema: z.object({
|
|
470
|
-
city: z.string().describe('The city to get the weather for'),
|
|
471
|
-
}),
|
|
472
|
-
outputSchema: z.object({
|
|
473
|
-
activities: z.string(),
|
|
474
|
-
}),
|
|
475
|
-
})
|
|
476
|
-
.then(fetchWeather)
|
|
477
|
-
.then(planActivities);
|
|
478
|
-
|
|
479
|
-
weatherWorkflow.commit();
|
|
480
|
-
\`\`\`
|
|
481
|
-
export { weatherWorkflow };
|
|
482
|
-
\`\`\`
|
|
483
|
-
|
|
484
|
-
### Mastra instance
|
|
485
|
-
\`\`\`
|
|
486
|
-
// ./src/mastra.ts
|
|
487
|
-
|
|
488
|
-
import { Mastra } from '@mastra/core/mastra';
|
|
489
|
-
import { PinoLogger } from '@mastra/loggers';
|
|
490
|
-
import { LibSQLStore } from '@mastra/libsql';
|
|
491
|
-
import { weatherWorkflow } from './workflows/weather-workflow';
|
|
492
|
-
import { weatherAgent } from './agents/weather-agent';
|
|
493
|
-
|
|
494
|
-
export const mastra = new Mastra({
|
|
495
|
-
workflows: { weatherWorkflow },
|
|
496
|
-
agents: { weatherAgent },
|
|
497
|
-
storage: new LibSQLStore({
|
|
498
|
-
// stores telemetry, evals, ... into memory storage, if it needs to persist, change to file:../mastra.db
|
|
499
|
-
url: ":memory:",
|
|
500
|
-
}),
|
|
501
|
-
logger: new PinoLogger({
|
|
502
|
-
name: 'Mastra',
|
|
503
|
-
level: 'info',
|
|
504
|
-
}),
|
|
505
|
-
});
|
|
506
|
-
\`\`\`
|
|
507
|
-
|
|
508
|
-
### MCPClient
|
|
509
|
-
\`\`\`
|
|
510
|
-
// ./src/mcp/client.ts
|
|
511
|
-
|
|
512
|
-
import { MCPClient } from '@mastra/mcp-client';
|
|
513
|
-
|
|
514
|
-
// leverage existing MCP servers, or create your own
|
|
515
|
-
export const mcpClient = new MCPClient({
|
|
516
|
-
id: 'example-mcp-client',
|
|
517
|
-
servers: {
|
|
518
|
-
some-mcp-server: {
|
|
519
|
-
command: 'npx',
|
|
520
|
-
args: ["some-mcp-server"],
|
|
521
|
-
},
|
|
522
|
-
},
|
|
523
|
-
});
|
|
524
|
-
|
|
525
|
-
export const tools = await mcpClient.getTools();
|
|
526
|
-
\`\`\`
|
|
527
|
-
|
|
528
|
-
</examples>`;
|
|
529
|
-
static DEFAULT_MEMORY_CONFIG = {
|
|
530
|
-
lastMessages: 20
|
|
531
|
-
};
|
|
532
|
-
static DEFAULT_FOLDER_STRUCTURE = {
|
|
533
|
-
agent: "src/mastra/agents",
|
|
534
|
-
workflow: "src/mastra/workflows",
|
|
535
|
-
tool: "src/mastra/tools",
|
|
536
|
-
"mcp-server": "src/mastra/mcp",
|
|
537
|
-
network: "src/mastra/networks"
|
|
538
|
-
};
|
|
539
|
-
static DEFAULT_TOOLS = async (projectPath, mode = "code-editor") => {
|
|
540
|
-
const mcpClient = new mcp.MCPClient({
|
|
541
|
-
id: "agent-builder-mcp-client",
|
|
542
|
-
servers: {
|
|
543
|
-
// web: {
|
|
544
|
-
// command: 'node',
|
|
545
|
-
// args: ['/Users/daniellew/Documents/Mastra/web-search/build/index.js'],
|
|
546
|
-
// },
|
|
547
|
-
docs: {
|
|
548
|
-
command: "npx",
|
|
549
|
-
args: ["-y", "@mastra/mcp-docs-server"]
|
|
550
|
-
}
|
|
551
|
-
}
|
|
552
|
-
});
|
|
553
|
-
const tools$1 = await mcpClient.getTools();
|
|
554
|
-
const filteredTools = {};
|
|
555
|
-
Object.keys(tools$1).forEach((key) => {
|
|
556
|
-
if (!key.includes("MastraCourse")) {
|
|
557
|
-
filteredTools[key] = tools$1[key];
|
|
558
|
-
}
|
|
559
|
-
});
|
|
560
|
-
const agentBuilderTools = {
|
|
561
|
-
...filteredTools,
|
|
562
|
-
readFile: tools.createTool({
|
|
563
|
-
id: "read-file",
|
|
564
|
-
description: "Read contents of a file with optional line range selection.",
|
|
565
|
-
inputSchema: zod.z.object({
|
|
566
|
-
filePath: zod.z.string().describe("Path to the file to read"),
|
|
567
|
-
startLine: zod.z.number().optional().describe("Starting line number (1-indexed)"),
|
|
568
|
-
endLine: zod.z.number().optional().describe("Ending line number (1-indexed, inclusive)"),
|
|
569
|
-
encoding: zod.z.string().default("utf-8").describe("File encoding")
|
|
570
|
-
}),
|
|
571
|
-
outputSchema: zod.z.object({
|
|
572
|
-
success: zod.z.boolean(),
|
|
573
|
-
content: zod.z.string().optional(),
|
|
574
|
-
lines: zod.z.array(zod.z.string()).optional(),
|
|
575
|
-
metadata: zod.z.object({
|
|
576
|
-
size: zod.z.number(),
|
|
577
|
-
totalLines: zod.z.number(),
|
|
578
|
-
encoding: zod.z.string(),
|
|
579
|
-
lastModified: zod.z.string()
|
|
580
|
-
}).optional(),
|
|
581
|
-
error: zod.z.string().optional()
|
|
582
|
-
}),
|
|
583
|
-
execute: async ({ context }) => {
|
|
584
|
-
return await _AgentBuilderDefaults.readFile({ ...context, projectPath });
|
|
585
|
-
}
|
|
586
|
-
}),
|
|
587
|
-
writeFile: tools.createTool({
|
|
588
|
-
id: "write-file",
|
|
589
|
-
description: "Write content to a file, with options for creating directories.",
|
|
590
|
-
inputSchema: zod.z.object({
|
|
591
|
-
filePath: zod.z.string().describe("Path to the file to write"),
|
|
592
|
-
content: zod.z.string().describe("Content to write to the file"),
|
|
593
|
-
createDirs: zod.z.boolean().default(true).describe("Create parent directories if they don't exist"),
|
|
594
|
-
encoding: zod.z.string().default("utf-8").describe("File encoding")
|
|
595
|
-
}),
|
|
596
|
-
outputSchema: zod.z.object({
|
|
597
|
-
success: zod.z.boolean(),
|
|
598
|
-
filePath: zod.z.string(),
|
|
599
|
-
bytesWritten: zod.z.number().optional(),
|
|
600
|
-
message: zod.z.string(),
|
|
601
|
-
error: zod.z.string().optional()
|
|
602
|
-
}),
|
|
603
|
-
execute: async ({ context }) => {
|
|
604
|
-
return await _AgentBuilderDefaults.writeFile({ ...context, projectPath });
|
|
605
|
-
}
|
|
606
|
-
}),
|
|
607
|
-
listDirectory: tools.createTool({
|
|
608
|
-
id: "list-directory",
|
|
609
|
-
description: "List contents of a directory with filtering and metadata options.",
|
|
610
|
-
inputSchema: zod.z.object({
|
|
611
|
-
path: zod.z.string().describe("Directory path to list"),
|
|
612
|
-
recursive: zod.z.boolean().default(false).describe("List subdirectories recursively"),
|
|
613
|
-
includeHidden: zod.z.boolean().default(false).describe("Include hidden files and directories"),
|
|
614
|
-
pattern: zod.z.string().optional().describe("Glob pattern to filter files"),
|
|
615
|
-
maxDepth: zod.z.number().default(10).describe("Maximum recursion depth"),
|
|
616
|
-
includeMetadata: zod.z.boolean().default(true).describe("Include file metadata")
|
|
617
|
-
}),
|
|
618
|
-
outputSchema: zod.z.object({
|
|
619
|
-
success: zod.z.boolean(),
|
|
620
|
-
items: zod.z.array(
|
|
621
|
-
zod.z.object({
|
|
622
|
-
name: zod.z.string(),
|
|
623
|
-
path: zod.z.string(),
|
|
624
|
-
type: zod.z.enum(["file", "directory", "symlink"]),
|
|
625
|
-
size: zod.z.number().optional(),
|
|
626
|
-
lastModified: zod.z.string().optional(),
|
|
627
|
-
permissions: zod.z.string().optional()
|
|
628
|
-
})
|
|
629
|
-
),
|
|
630
|
-
totalItems: zod.z.number(),
|
|
631
|
-
path: zod.z.string(),
|
|
632
|
-
message: zod.z.string(),
|
|
633
|
-
error: zod.z.string().optional()
|
|
634
|
-
}),
|
|
635
|
-
execute: async ({ context }) => {
|
|
636
|
-
return await _AgentBuilderDefaults.listDirectory({ ...context, projectPath });
|
|
637
|
-
}
|
|
638
|
-
}),
|
|
639
|
-
executeCommand: tools.createTool({
|
|
640
|
-
id: "execute-command",
|
|
641
|
-
description: "Execute shell commands with proper error handling and output capture.",
|
|
642
|
-
inputSchema: zod.z.object({
|
|
643
|
-
command: zod.z.string().describe("Shell command to execute"),
|
|
644
|
-
workingDirectory: zod.z.string().optional().describe("Working directory for command execution"),
|
|
645
|
-
timeout: zod.z.number().default(3e4).describe("Timeout in milliseconds"),
|
|
646
|
-
captureOutput: zod.z.boolean().default(true).describe("Capture command output"),
|
|
647
|
-
shell: zod.z.string().optional().describe("Shell to use (defaults to system shell)"),
|
|
648
|
-
env: zod.z.record(zod.z.string()).optional().describe("Environment variables")
|
|
649
|
-
}),
|
|
650
|
-
outputSchema: zod.z.object({
|
|
651
|
-
success: zod.z.boolean(),
|
|
652
|
-
exitCode: zod.z.number().optional(),
|
|
653
|
-
stdout: zod.z.string().optional(),
|
|
654
|
-
stderr: zod.z.string().optional(),
|
|
655
|
-
command: zod.z.string(),
|
|
656
|
-
workingDirectory: zod.z.string().optional(),
|
|
657
|
-
executionTime: zod.z.number().optional(),
|
|
658
|
-
error: zod.z.string().optional()
|
|
659
|
-
}),
|
|
660
|
-
execute: async ({ context }) => {
|
|
661
|
-
return await _AgentBuilderDefaults.executeCommand({
|
|
662
|
-
...context,
|
|
663
|
-
workingDirectory: context.workingDirectory || projectPath
|
|
664
|
-
});
|
|
665
|
-
}
|
|
666
|
-
}),
|
|
667
|
-
// Enhanced Task Management (Critical for complex coding tasks)
|
|
668
|
-
taskManager: tools.createTool({
|
|
669
|
-
id: "task-manager",
|
|
670
|
-
description: "Create and manage structured task lists for coding sessions. Use this for complex multi-step tasks to track progress and ensure thoroughness.",
|
|
671
|
-
inputSchema: zod.z.object({
|
|
672
|
-
action: zod.z.enum(["create", "update", "list", "complete", "remove"]).describe("Task management action"),
|
|
673
|
-
tasks: zod.z.array(
|
|
674
|
-
zod.z.object({
|
|
675
|
-
id: zod.z.string().describe("Unique task identifier"),
|
|
676
|
-
content: zod.z.string().describe("Task description, optional if just updating the status").optional(),
|
|
677
|
-
status: zod.z.enum(["pending", "in_progress", "completed", "blocked"]).describe("Task status"),
|
|
678
|
-
priority: zod.z.enum(["high", "medium", "low"]).default("medium").describe("Task priority"),
|
|
679
|
-
dependencies: zod.z.array(zod.z.string()).optional().describe("IDs of tasks this depends on"),
|
|
680
|
-
notes: zod.z.string().optional().describe("Additional notes or context")
|
|
681
|
-
})
|
|
682
|
-
).optional().describe("Tasks to create or update"),
|
|
683
|
-
taskId: zod.z.string().optional().describe("Specific task ID for single task operations")
|
|
684
|
-
}),
|
|
685
|
-
outputSchema: zod.z.object({
|
|
686
|
-
success: zod.z.boolean(),
|
|
687
|
-
tasks: zod.z.array(
|
|
688
|
-
zod.z.object({
|
|
689
|
-
id: zod.z.string(),
|
|
690
|
-
content: zod.z.string(),
|
|
691
|
-
status: zod.z.string(),
|
|
692
|
-
priority: zod.z.string(),
|
|
693
|
-
dependencies: zod.z.array(zod.z.string()).optional(),
|
|
694
|
-
notes: zod.z.string().optional(),
|
|
695
|
-
createdAt: zod.z.string(),
|
|
696
|
-
updatedAt: zod.z.string()
|
|
697
|
-
})
|
|
698
|
-
),
|
|
699
|
-
message: zod.z.string()
|
|
700
|
-
}),
|
|
701
|
-
execute: async ({ context }) => {
|
|
702
|
-
return await _AgentBuilderDefaults.manageTaskList(context);
|
|
703
|
-
}
|
|
704
|
-
}),
|
|
705
|
-
// Advanced File Operations
|
|
706
|
-
multiEdit: tools.createTool({
|
|
707
|
-
id: "multi-edit",
|
|
708
|
-
description: "Perform multiple search-replace operations on one or more files in a single atomic operation.",
|
|
709
|
-
inputSchema: zod.z.object({
|
|
710
|
-
operations: zod.z.array(
|
|
711
|
-
zod.z.object({
|
|
712
|
-
filePath: zod.z.string().describe("Path to the file to edit"),
|
|
713
|
-
edits: zod.z.array(
|
|
714
|
-
zod.z.object({
|
|
715
|
-
oldString: zod.z.string().describe("Exact text to replace"),
|
|
716
|
-
newString: zod.z.string().describe("Replacement text"),
|
|
717
|
-
replaceAll: zod.z.boolean().default(false).describe("Replace all occurrences")
|
|
718
|
-
})
|
|
719
|
-
).describe("List of edit operations for this file")
|
|
720
|
-
})
|
|
721
|
-
).describe("File edit operations to perform"),
|
|
722
|
-
createBackup: zod.z.boolean().default(false).describe("Create backup files before editing")
|
|
723
|
-
}),
|
|
724
|
-
outputSchema: zod.z.object({
|
|
725
|
-
success: zod.z.boolean(),
|
|
726
|
-
results: zod.z.array(
|
|
727
|
-
zod.z.object({
|
|
728
|
-
filePath: zod.z.string(),
|
|
729
|
-
editsApplied: zod.z.number(),
|
|
730
|
-
errors: zod.z.array(zod.z.string()),
|
|
731
|
-
backup: zod.z.string().optional()
|
|
732
|
-
})
|
|
733
|
-
),
|
|
734
|
-
message: zod.z.string()
|
|
735
|
-
}),
|
|
736
|
-
execute: async ({ context }) => {
|
|
737
|
-
return await _AgentBuilderDefaults.performMultiEdit({ ...context, projectPath });
|
|
738
|
-
}
|
|
739
|
-
}),
|
|
740
|
-
// Interactive Communication
|
|
741
|
-
askClarification: tools.createTool({
|
|
742
|
-
id: "ask-clarification",
|
|
743
|
-
description: "Ask the user for clarification when requirements are unclear or when multiple options exist.",
|
|
744
|
-
inputSchema: zod.z.object({
|
|
745
|
-
question: zod.z.string().describe("The specific question to ask"),
|
|
746
|
-
options: zod.z.array(
|
|
747
|
-
zod.z.object({
|
|
748
|
-
id: zod.z.string(),
|
|
749
|
-
description: zod.z.string(),
|
|
750
|
-
implications: zod.z.string().optional()
|
|
751
|
-
})
|
|
752
|
-
).optional().describe("Multiple choice options if applicable"),
|
|
753
|
-
context: zod.z.string().optional().describe("Additional context about why clarification is needed"),
|
|
754
|
-
urgency: zod.z.enum(["low", "medium", "high"]).default("medium").describe("How urgent the clarification is")
|
|
755
|
-
}),
|
|
756
|
-
outputSchema: zod.z.object({
|
|
757
|
-
questionId: zod.z.string(),
|
|
758
|
-
question: zod.z.string(),
|
|
759
|
-
options: zod.z.array(
|
|
760
|
-
zod.z.object({
|
|
761
|
-
id: zod.z.string(),
|
|
762
|
-
description: zod.z.string()
|
|
763
|
-
})
|
|
764
|
-
).optional(),
|
|
765
|
-
awaitingResponse: zod.z.boolean()
|
|
766
|
-
}),
|
|
767
|
-
execute: async ({ context }) => {
|
|
768
|
-
return await _AgentBuilderDefaults.askClarification(context);
|
|
769
|
-
}
|
|
770
|
-
}),
|
|
771
|
-
// Enhanced Pattern Search
|
|
772
|
-
smartSearch: tools.createTool({
|
|
773
|
-
id: "smart-search",
|
|
774
|
-
description: "Intelligent search across codebase with context awareness and pattern matching.",
|
|
775
|
-
inputSchema: zod.z.object({
|
|
776
|
-
query: zod.z.string().describe("Search query or pattern"),
|
|
777
|
-
type: zod.z.enum(["text", "regex", "fuzzy", "semantic"]).default("text").describe("Type of search to perform"),
|
|
778
|
-
scope: zod.z.object({
|
|
779
|
-
paths: zod.z.array(zod.z.string()).optional().describe("Specific paths to search"),
|
|
780
|
-
fileTypes: zod.z.array(zod.z.string()).optional().describe("File extensions to include"),
|
|
781
|
-
excludePaths: zod.z.array(zod.z.string()).optional().describe("Paths to exclude"),
|
|
782
|
-
maxResults: zod.z.number().default(50).describe("Maximum number of results")
|
|
783
|
-
}).optional(),
|
|
784
|
-
context: zod.z.object({
|
|
785
|
-
beforeLines: zod.z.number().default(2).describe("Lines of context before match"),
|
|
786
|
-
afterLines: zod.z.number().default(2).describe("Lines of context after match"),
|
|
787
|
-
includeDefinitions: zod.z.boolean().default(false).describe("Include function/class definitions")
|
|
788
|
-
}).optional()
|
|
789
|
-
}),
|
|
790
|
-
outputSchema: zod.z.object({
|
|
791
|
-
success: zod.z.boolean(),
|
|
792
|
-
matches: zod.z.array(
|
|
793
|
-
zod.z.object({
|
|
794
|
-
file: zod.z.string(),
|
|
795
|
-
line: zod.z.number(),
|
|
796
|
-
column: zod.z.number().optional(),
|
|
797
|
-
match: zod.z.string(),
|
|
798
|
-
context: zod.z.object({
|
|
799
|
-
before: zod.z.array(zod.z.string()),
|
|
800
|
-
after: zod.z.array(zod.z.string())
|
|
801
|
-
}),
|
|
802
|
-
relevance: zod.z.number().optional()
|
|
803
|
-
})
|
|
804
|
-
),
|
|
805
|
-
summary: zod.z.object({
|
|
806
|
-
totalMatches: zod.z.number(),
|
|
807
|
-
filesSearched: zod.z.number(),
|
|
808
|
-
patterns: zod.z.array(zod.z.string())
|
|
809
|
-
})
|
|
810
|
-
}),
|
|
811
|
-
execute: async ({ context }) => {
|
|
812
|
-
return await _AgentBuilderDefaults.performSmartSearch(context);
|
|
813
|
-
}
|
|
814
|
-
}),
|
|
815
|
-
validateCode: tools.createTool({
|
|
816
|
-
id: "validate-code",
|
|
817
|
-
description: "Validates generated code through TypeScript compilation, ESLint, schema validation, and other checks",
|
|
818
|
-
inputSchema: zod.z.object({
|
|
819
|
-
projectPath: zod.z.string().optional().describe("Path to the project to validate (defaults to current project)"),
|
|
820
|
-
validationType: zod.z.array(zod.z.enum(["types", "lint", "schemas", "tests", "build"])).describe("Types of validation to perform"),
|
|
821
|
-
files: zod.z.array(zod.z.string()).optional().describe("Specific files to validate (if not provided, validates entire project)")
|
|
822
|
-
}),
|
|
823
|
-
outputSchema: zod.z.object({
|
|
824
|
-
valid: zod.z.boolean(),
|
|
825
|
-
errors: zod.z.array(
|
|
826
|
-
zod.z.object({
|
|
827
|
-
type: zod.z.enum(["typescript", "eslint", "schema", "test", "build"]),
|
|
828
|
-
severity: zod.z.enum(["error", "warning", "info"]),
|
|
829
|
-
message: zod.z.string(),
|
|
830
|
-
file: zod.z.string().optional(),
|
|
831
|
-
line: zod.z.number().optional(),
|
|
832
|
-
column: zod.z.number().optional(),
|
|
833
|
-
code: zod.z.string().optional()
|
|
834
|
-
})
|
|
835
|
-
),
|
|
836
|
-
summary: zod.z.object({
|
|
837
|
-
totalErrors: zod.z.number(),
|
|
838
|
-
totalWarnings: zod.z.number(),
|
|
839
|
-
validationsPassed: zod.z.array(zod.z.string()),
|
|
840
|
-
validationsFailed: zod.z.array(zod.z.string())
|
|
841
|
-
})
|
|
842
|
-
}),
|
|
843
|
-
execute: async ({ context }) => {
|
|
844
|
-
const { projectPath: validationProjectPath, validationType, files } = context;
|
|
845
|
-
const targetPath = validationProjectPath || projectPath;
|
|
846
|
-
return await _AgentBuilderDefaults.validateCode({
|
|
847
|
-
projectPath: targetPath,
|
|
848
|
-
validationType,
|
|
849
|
-
files
|
|
850
|
-
});
|
|
851
|
-
}
|
|
852
|
-
})
|
|
853
|
-
};
|
|
854
|
-
if (mode === "template") {
|
|
855
|
-
return agentBuilderTools;
|
|
856
|
-
} else {
|
|
857
|
-
return {
|
|
858
|
-
...agentBuilderTools,
|
|
859
|
-
// Web Search (replaces MCP web search)
|
|
860
|
-
webSearch: tools.createTool({
|
|
861
|
-
id: "web-search",
|
|
862
|
-
description: "Search the web for current information and return structured results.",
|
|
863
|
-
inputSchema: zod.z.object({
|
|
864
|
-
query: zod.z.string().describe("Search query"),
|
|
865
|
-
maxResults: zod.z.number().default(10).describe("Maximum number of results to return"),
|
|
866
|
-
region: zod.z.string().default("us").describe("Search region/country code"),
|
|
867
|
-
language: zod.z.string().default("en").describe("Search language"),
|
|
868
|
-
includeImages: zod.z.boolean().default(false).describe("Include image results"),
|
|
869
|
-
dateRange: zod.z.enum(["day", "week", "month", "year", "all"]).default("all").describe("Date range filter")
|
|
870
|
-
}),
|
|
871
|
-
outputSchema: zod.z.object({
|
|
872
|
-
success: zod.z.boolean(),
|
|
873
|
-
query: zod.z.string(),
|
|
874
|
-
results: zod.z.array(
|
|
875
|
-
zod.z.object({
|
|
876
|
-
title: zod.z.string(),
|
|
877
|
-
url: zod.z.string(),
|
|
878
|
-
snippet: zod.z.string(),
|
|
879
|
-
domain: zod.z.string(),
|
|
880
|
-
publishDate: zod.z.string().optional(),
|
|
881
|
-
relevanceScore: zod.z.number().optional()
|
|
882
|
-
})
|
|
883
|
-
),
|
|
884
|
-
totalResults: zod.z.number(),
|
|
885
|
-
searchTime: zod.z.number(),
|
|
886
|
-
suggestions: zod.z.array(zod.z.string()).optional(),
|
|
887
|
-
error: zod.z.string().optional()
|
|
888
|
-
}),
|
|
889
|
-
execute: async ({ context }) => {
|
|
890
|
-
return await _AgentBuilderDefaults.webSearch(context);
|
|
891
|
-
}
|
|
892
|
-
}),
|
|
893
|
-
// Enhanced Code Discovery
|
|
894
|
-
codeAnalyzer: tools.createTool({
|
|
895
|
-
id: "code-analyzer",
|
|
896
|
-
description: "Analyze codebase structure, discover definitions, and understand architecture patterns.",
|
|
897
|
-
inputSchema: zod.z.object({
|
|
898
|
-
action: zod.z.enum(["definitions", "dependencies", "patterns", "structure"]).describe("Type of analysis to perform"),
|
|
899
|
-
path: zod.z.string().describe("Directory or file path to analyze"),
|
|
900
|
-
language: zod.z.string().optional().describe("Programming language filter"),
|
|
901
|
-
depth: zod.z.number().default(3).describe("Directory traversal depth"),
|
|
902
|
-
includeTests: zod.z.boolean().default(false).describe("Include test files in analysis")
|
|
903
|
-
}),
|
|
904
|
-
outputSchema: zod.z.object({
|
|
905
|
-
success: zod.z.boolean(),
|
|
906
|
-
analysis: zod.z.object({
|
|
907
|
-
definitions: zod.z.array(
|
|
908
|
-
zod.z.object({
|
|
909
|
-
name: zod.z.string(),
|
|
910
|
-
type: zod.z.string(),
|
|
911
|
-
file: zod.z.string(),
|
|
912
|
-
line: zod.z.number().optional(),
|
|
913
|
-
scope: zod.z.string().optional()
|
|
914
|
-
})
|
|
915
|
-
).optional(),
|
|
916
|
-
dependencies: zod.z.array(
|
|
917
|
-
zod.z.object({
|
|
918
|
-
name: zod.z.string(),
|
|
919
|
-
type: zod.z.enum(["import", "require", "include"]),
|
|
920
|
-
source: zod.z.string(),
|
|
921
|
-
target: zod.z.string()
|
|
922
|
-
})
|
|
923
|
-
).optional(),
|
|
924
|
-
patterns: zod.z.array(
|
|
925
|
-
zod.z.object({
|
|
926
|
-
pattern: zod.z.string(),
|
|
927
|
-
description: zod.z.string(),
|
|
928
|
-
files: zod.z.array(zod.z.string())
|
|
929
|
-
})
|
|
930
|
-
).optional(),
|
|
931
|
-
structure: zod.z.object({
|
|
932
|
-
directories: zod.z.number(),
|
|
933
|
-
files: zod.z.number(),
|
|
934
|
-
languages: zod.z.record(zod.z.number()),
|
|
935
|
-
complexity: zod.z.string()
|
|
936
|
-
}).optional()
|
|
937
|
-
}),
|
|
938
|
-
message: zod.z.string()
|
|
939
|
-
}),
|
|
940
|
-
execute: async ({ context }) => {
|
|
941
|
-
return await _AgentBuilderDefaults.analyzeCode(context);
|
|
942
|
-
}
|
|
943
|
-
}),
|
|
944
|
-
// Task Completion Signaling
|
|
945
|
-
attemptCompletion: tools.createTool({
|
|
946
|
-
id: "attempt-completion",
|
|
947
|
-
description: "Signal that you believe the requested task has been completed and provide a summary.",
|
|
948
|
-
inputSchema: zod.z.object({
|
|
949
|
-
summary: zod.z.string().describe("Summary of what was accomplished"),
|
|
950
|
-
changes: zod.z.array(
|
|
951
|
-
zod.z.object({
|
|
952
|
-
type: zod.z.enum([
|
|
953
|
-
"file_created",
|
|
954
|
-
"file_modified",
|
|
955
|
-
"file_deleted",
|
|
956
|
-
"command_executed",
|
|
957
|
-
"dependency_added"
|
|
958
|
-
]),
|
|
959
|
-
description: zod.z.string(),
|
|
960
|
-
path: zod.z.string().optional()
|
|
961
|
-
})
|
|
962
|
-
).describe("List of changes made"),
|
|
963
|
-
validation: zod.z.object({
|
|
964
|
-
testsRun: zod.z.boolean().default(false),
|
|
965
|
-
buildsSuccessfully: zod.z.boolean().default(false),
|
|
966
|
-
manualTestingRequired: zod.z.boolean().default(false)
|
|
967
|
-
}).describe("Validation status"),
|
|
968
|
-
nextSteps: zod.z.array(zod.z.string()).optional().describe("Suggested next steps or follow-up actions")
|
|
969
|
-
}),
|
|
970
|
-
outputSchema: zod.z.object({
|
|
971
|
-
completionId: zod.z.string(),
|
|
972
|
-
status: zod.z.enum(["completed", "needs_review", "needs_testing"]),
|
|
973
|
-
summary: zod.z.string(),
|
|
974
|
-
confidence: zod.z.number().min(0).max(100)
|
|
975
|
-
}),
|
|
976
|
-
execute: async ({ context }) => {
|
|
977
|
-
return await _AgentBuilderDefaults.signalCompletion(context);
|
|
978
|
-
}
|
|
979
|
-
}),
|
|
980
|
-
manageProject: tools.createTool({
|
|
981
|
-
id: "manage-project",
|
|
982
|
-
description: "Handles project management including creating project structures, managing dependencies, and package operations.",
|
|
983
|
-
inputSchema: zod.z.object({
|
|
984
|
-
action: zod.z.enum(["create", "install", "upgrade"]).describe("The action to perform"),
|
|
985
|
-
features: zod.z.array(zod.z.string()).optional().describe('Mastra features to include (e.g., ["agents", "memory", "workflows"])'),
|
|
986
|
-
packages: zod.z.array(
|
|
987
|
-
zod.z.object({
|
|
988
|
-
name: zod.z.string(),
|
|
989
|
-
version: zod.z.string().optional()
|
|
990
|
-
})
|
|
991
|
-
).optional().describe("Packages to install/upgrade")
|
|
992
|
-
}),
|
|
993
|
-
outputSchema: zod.z.object({
|
|
994
|
-
success: zod.z.boolean(),
|
|
995
|
-
installed: zod.z.array(zod.z.string()).optional(),
|
|
996
|
-
upgraded: zod.z.array(zod.z.string()).optional(),
|
|
997
|
-
warnings: zod.z.array(zod.z.string()).optional(),
|
|
998
|
-
message: zod.z.string().optional(),
|
|
999
|
-
details: zod.z.string().optional(),
|
|
1000
|
-
error: zod.z.string().optional()
|
|
1001
|
-
}),
|
|
1002
|
-
execute: async ({ context }) => {
|
|
1003
|
-
const { action, features, packages } = context;
|
|
1004
|
-
try {
|
|
1005
|
-
switch (action) {
|
|
1006
|
-
case "create":
|
|
1007
|
-
return await _AgentBuilderDefaults.createMastraProject({
|
|
1008
|
-
projectName: projectPath,
|
|
1009
|
-
features
|
|
1010
|
-
});
|
|
1011
|
-
case "install":
|
|
1012
|
-
if (!packages?.length) {
|
|
1013
|
-
return {
|
|
1014
|
-
success: false,
|
|
1015
|
-
message: "Packages array is required for install action"
|
|
1016
|
-
};
|
|
1017
|
-
}
|
|
1018
|
-
return await _AgentBuilderDefaults.installPackages({
|
|
1019
|
-
packages,
|
|
1020
|
-
projectPath
|
|
1021
|
-
});
|
|
1022
|
-
case "upgrade":
|
|
1023
|
-
if (!packages?.length) {
|
|
1024
|
-
return {
|
|
1025
|
-
success: false,
|
|
1026
|
-
message: "Packages array is required for upgrade action"
|
|
1027
|
-
};
|
|
1028
|
-
}
|
|
1029
|
-
return await _AgentBuilderDefaults.upgradePackages({
|
|
1030
|
-
packages,
|
|
1031
|
-
projectPath
|
|
1032
|
-
});
|
|
1033
|
-
// case 'check':
|
|
1034
|
-
// return await AgentBuilderDefaults.checkProject({
|
|
1035
|
-
// projectPath,
|
|
1036
|
-
// });
|
|
1037
|
-
default:
|
|
1038
|
-
return {
|
|
1039
|
-
success: false,
|
|
1040
|
-
message: `Unknown action: ${action}`
|
|
1041
|
-
};
|
|
1042
|
-
}
|
|
1043
|
-
} catch (error) {
|
|
1044
|
-
return {
|
|
1045
|
-
success: false,
|
|
1046
|
-
message: `Error executing ${action}: ${error instanceof Error ? error.message : String(error)}`
|
|
1047
|
-
};
|
|
1048
|
-
}
|
|
1049
|
-
}
|
|
1050
|
-
}),
|
|
1051
|
-
manageServer: tools.createTool({
|
|
1052
|
-
id: "manage-server",
|
|
1053
|
-
description: "Manages the Mastra server - start, stop, restart, and check status, use the terminal tool to make curl requests to the server. There is an openapi spec for the server at http://localhost:{port}/openapi.json",
|
|
1054
|
-
inputSchema: zod.z.object({
|
|
1055
|
-
action: zod.z.enum(["start", "stop", "restart", "status"]).describe("Server management action"),
|
|
1056
|
-
port: zod.z.number().optional().default(4200).describe("Port to run the server on")
|
|
1057
|
-
}),
|
|
1058
|
-
outputSchema: zod.z.object({
|
|
1059
|
-
success: zod.z.boolean(),
|
|
1060
|
-
status: zod.z.enum(["running", "stopped", "starting", "stopping", "unknown"]),
|
|
1061
|
-
pid: zod.z.number().optional(),
|
|
1062
|
-
port: zod.z.number().optional(),
|
|
1063
|
-
url: zod.z.string().optional(),
|
|
1064
|
-
message: zod.z.string().optional(),
|
|
1065
|
-
stdout: zod.z.array(zod.z.string()).optional().describe("Server output lines captured during startup"),
|
|
1066
|
-
error: zod.z.string().optional()
|
|
1067
|
-
}),
|
|
1068
|
-
execute: async ({ context }) => {
|
|
1069
|
-
const { action, port } = context;
|
|
1070
|
-
try {
|
|
1071
|
-
switch (action) {
|
|
1072
|
-
case "start":
|
|
1073
|
-
return await _AgentBuilderDefaults.startMastraServer({
|
|
1074
|
-
port,
|
|
1075
|
-
projectPath
|
|
1076
|
-
});
|
|
1077
|
-
case "stop":
|
|
1078
|
-
return await _AgentBuilderDefaults.stopMastraServer({
|
|
1079
|
-
port,
|
|
1080
|
-
projectPath
|
|
1081
|
-
});
|
|
1082
|
-
case "restart":
|
|
1083
|
-
const stopResult = await _AgentBuilderDefaults.stopMastraServer({
|
|
1084
|
-
port,
|
|
1085
|
-
projectPath
|
|
1086
|
-
});
|
|
1087
|
-
if (!stopResult.success) {
|
|
1088
|
-
return {
|
|
1089
|
-
success: false,
|
|
1090
|
-
status: "unknown",
|
|
1091
|
-
message: `Failed to restart: could not stop server on port ${port}`,
|
|
1092
|
-
error: stopResult.error || "Unknown stop error"
|
|
1093
|
-
};
|
|
1094
|
-
}
|
|
1095
|
-
await new Promise((resolve4) => setTimeout(resolve4, 500));
|
|
1096
|
-
const startResult = await _AgentBuilderDefaults.startMastraServer({
|
|
1097
|
-
port,
|
|
1098
|
-
projectPath
|
|
1099
|
-
});
|
|
1100
|
-
if (!startResult.success) {
|
|
1101
|
-
return {
|
|
1102
|
-
success: false,
|
|
1103
|
-
status: "stopped",
|
|
1104
|
-
message: `Failed to restart: server stopped successfully but failed to start on port ${port}`,
|
|
1105
|
-
error: startResult.error || "Unknown start error"
|
|
1106
|
-
};
|
|
1107
|
-
}
|
|
1108
|
-
return {
|
|
1109
|
-
...startResult,
|
|
1110
|
-
message: `Mastra server restarted successfully on port ${port}`
|
|
1111
|
-
};
|
|
1112
|
-
case "status":
|
|
1113
|
-
return await _AgentBuilderDefaults.checkMastraServerStatus({
|
|
1114
|
-
port,
|
|
1115
|
-
projectPath
|
|
1116
|
-
});
|
|
1117
|
-
default:
|
|
1118
|
-
return {
|
|
1119
|
-
success: false,
|
|
1120
|
-
status: "unknown",
|
|
1121
|
-
message: `Unknown action: ${action}`
|
|
1122
|
-
};
|
|
1123
|
-
}
|
|
1124
|
-
} catch (error) {
|
|
1125
|
-
return {
|
|
1126
|
-
success: false,
|
|
1127
|
-
status: "unknown",
|
|
1128
|
-
message: `Error managing server: ${error instanceof Error ? error.message : String(error)}`
|
|
1129
|
-
};
|
|
1130
|
-
}
|
|
1131
|
-
}
|
|
1132
|
-
}),
|
|
1133
|
-
httpRequest: tools.createTool({
|
|
1134
|
-
id: "http-request",
|
|
1135
|
-
description: "Makes HTTP requests to the Mastra server or external APIs for testing and integration",
|
|
1136
|
-
inputSchema: zod.z.object({
|
|
1137
|
-
method: zod.z.enum(["GET", "POST", "PUT", "DELETE", "PATCH"]).describe("HTTP method"),
|
|
1138
|
-
url: zod.z.string().describe("Full URL or path (if baseUrl provided)"),
|
|
1139
|
-
baseUrl: zod.z.string().optional().describe("Base URL for the server (e.g., http://localhost:4200)"),
|
|
1140
|
-
headers: zod.z.record(zod.z.string()).optional().describe("HTTP headers"),
|
|
1141
|
-
body: zod.z.any().optional().describe("Request body (will be JSON stringified if object)"),
|
|
1142
|
-
timeout: zod.z.number().optional().default(3e4).describe("Request timeout in milliseconds")
|
|
1143
|
-
}),
|
|
1144
|
-
outputSchema: zod.z.object({
|
|
1145
|
-
success: zod.z.boolean(),
|
|
1146
|
-
status: zod.z.number().optional(),
|
|
1147
|
-
statusText: zod.z.string().optional(),
|
|
1148
|
-
headers: zod.z.record(zod.z.string()).optional(),
|
|
1149
|
-
data: zod.z.any().optional(),
|
|
1150
|
-
error: zod.z.string().optional(),
|
|
1151
|
-
url: zod.z.string(),
|
|
1152
|
-
method: zod.z.string()
|
|
1153
|
-
}),
|
|
1154
|
-
execute: async ({ context }) => {
|
|
1155
|
-
const { method, url, baseUrl, headers, body, timeout } = context;
|
|
1156
|
-
try {
|
|
1157
|
-
return await _AgentBuilderDefaults.makeHttpRequest({
|
|
1158
|
-
method,
|
|
1159
|
-
url,
|
|
1160
|
-
baseUrl,
|
|
1161
|
-
headers,
|
|
1162
|
-
body,
|
|
1163
|
-
timeout
|
|
1164
|
-
});
|
|
1165
|
-
} catch (error) {
|
|
1166
|
-
return {
|
|
1167
|
-
success: false,
|
|
1168
|
-
url: baseUrl ? `${baseUrl}${url}` : url,
|
|
1169
|
-
method,
|
|
1170
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1171
|
-
};
|
|
1172
|
-
}
|
|
1173
|
-
}
|
|
1174
|
-
})
|
|
1175
|
-
};
|
|
1176
|
-
}
|
|
1177
|
-
};
|
|
1178
|
-
/**
|
|
1179
|
-
* Create a new Mastra project using create-mastra CLI
|
|
1180
|
-
*/
|
|
1181
|
-
static async createMastraProject({ features, projectName }) {
|
|
1182
|
-
try {
|
|
1183
|
-
const args = ["pnpx", "create", "mastra@latest", projectName ?? "", "-l", "openai", "-k", "skip"];
|
|
1184
|
-
if (features && features.length > 0) {
|
|
1185
|
-
args.push("--components", features.join(","));
|
|
1186
|
-
}
|
|
1187
|
-
args.push("--example");
|
|
1188
|
-
const { stdout, stderr } = await exec(args.join(" "));
|
|
1189
|
-
return {
|
|
1190
|
-
success: true,
|
|
1191
|
-
projectPath: `./${projectName}`,
|
|
1192
|
-
message: `Successfully created Mastra project: ${projectName}.`,
|
|
1193
|
-
details: stdout,
|
|
1194
|
-
error: stderr
|
|
1195
|
-
};
|
|
1196
|
-
} catch (error) {
|
|
1197
|
-
return {
|
|
1198
|
-
success: false,
|
|
1199
|
-
message: `Failed to create project: ${error instanceof Error ? error.message : String(error)}`
|
|
1200
|
-
};
|
|
1201
|
-
}
|
|
1202
|
-
}
|
|
1203
|
-
/**
|
|
1204
|
-
* Install packages using the detected package manager
|
|
1205
|
-
*/
|
|
1206
|
-
static async installPackages({
|
|
1207
|
-
packages,
|
|
1208
|
-
projectPath
|
|
1209
|
-
}) {
|
|
1210
|
-
try {
|
|
1211
|
-
console.log("Installing packages:", JSON.stringify(packages, null, 2));
|
|
1212
|
-
const packageStrings = packages.map((p) => `${p.name}`);
|
|
1213
|
-
await spawnSWPM(projectPath || "", "add", packageStrings);
|
|
1214
|
-
return {
|
|
1215
|
-
success: true,
|
|
1216
|
-
installed: packageStrings,
|
|
1217
|
-
message: `Successfully installed ${packages.length} package(s).`,
|
|
1218
|
-
details: ""
|
|
1219
|
-
};
|
|
1220
|
-
} catch (error) {
|
|
1221
|
-
return {
|
|
1222
|
-
success: false,
|
|
1223
|
-
message: `Failed to install packages: ${error instanceof Error ? error.message : String(error)}`
|
|
1224
|
-
};
|
|
1225
|
-
}
|
|
1226
|
-
}
|
|
1227
|
-
/**
|
|
1228
|
-
* Upgrade packages using the detected package manager
|
|
1229
|
-
*/
|
|
1230
|
-
static async upgradePackages({
|
|
1231
|
-
packages,
|
|
1232
|
-
projectPath
|
|
1233
|
-
}) {
|
|
1234
|
-
try {
|
|
1235
|
-
console.log("Upgrading specific packages:", JSON.stringify(packages, null, 2));
|
|
1236
|
-
let packageNames = [];
|
|
1237
|
-
if (packages && packages.length > 0) {
|
|
1238
|
-
packageNames = packages.map((p) => `${p.name}`);
|
|
1239
|
-
}
|
|
1240
|
-
await spawnSWPM(projectPath || "", "upgrade", packageNames);
|
|
1241
|
-
return {
|
|
1242
|
-
success: true,
|
|
1243
|
-
upgraded: packages?.map((p) => p.name) || ["all packages"],
|
|
1244
|
-
message: `Packages upgraded successfully.`,
|
|
1245
|
-
details: ""
|
|
1246
|
-
};
|
|
1247
|
-
} catch (error) {
|
|
1248
|
-
return {
|
|
1249
|
-
success: false,
|
|
1250
|
-
message: `Failed to upgrade packages: ${error instanceof Error ? error.message : String(error)}`
|
|
1251
|
-
};
|
|
1252
|
-
}
|
|
1253
|
-
}
|
|
1254
|
-
// /**
|
|
1255
|
-
// * Check project health and status
|
|
1256
|
-
// */
|
|
1257
|
-
// static async checkProject({ projectPath }: { projectPath?: string }) {
|
|
1258
|
-
// try {
|
|
1259
|
-
// const execOptions = projectPath ? { cwd: projectPath } : {};
|
|
1260
|
-
// let hasPackageJson = false;
|
|
1261
|
-
// let hasMastraConfig = false;
|
|
1262
|
-
// try {
|
|
1263
|
-
// await exec('test -f package.json', execOptions);
|
|
1264
|
-
// hasPackageJson = true;
|
|
1265
|
-
// } catch {
|
|
1266
|
-
// // ignore
|
|
1267
|
-
// }
|
|
1268
|
-
// try {
|
|
1269
|
-
// await exec('test -f mastra.config.* || test -d src/mastra || test -d mastra', execOptions);
|
|
1270
|
-
// hasMastraConfig = true;
|
|
1271
|
-
// } catch {
|
|
1272
|
-
// // ignore
|
|
1273
|
-
// }
|
|
1274
|
-
// const warnings: string[] = [];
|
|
1275
|
-
// if (!hasPackageJson) {
|
|
1276
|
-
// warnings.push('No package.json found - this may not be a Node.js project');
|
|
1277
|
-
// }
|
|
1278
|
-
// if (!hasMastraConfig) {
|
|
1279
|
-
// warnings.push('No Mastra configuration found - run "npx create-mastra" to initialize');
|
|
1280
|
-
// }
|
|
1281
|
-
// return {
|
|
1282
|
-
// success: true,
|
|
1283
|
-
// message: `Project health check completed for ${projectPath || 'current directory'}`,
|
|
1284
|
-
// warnings,
|
|
1285
|
-
// checks: {
|
|
1286
|
-
// hasPackageJson,
|
|
1287
|
-
// hasMastraConfig,
|
|
1288
|
-
// },
|
|
1289
|
-
// };
|
|
1290
|
-
// } catch (error) {
|
|
1291
|
-
// return {
|
|
1292
|
-
// success: false,
|
|
1293
|
-
// message: `Failed to check project: ${error instanceof Error ? error.message : String(error)}`,
|
|
1294
|
-
// };
|
|
1295
|
-
// }
|
|
1296
|
-
// }
|
|
1297
|
-
/**
|
|
1298
|
-
* Start the Mastra server
|
|
1299
|
-
*/
|
|
1300
|
-
static async startMastraServer({
|
|
1301
|
-
port = 4200,
|
|
1302
|
-
projectPath,
|
|
1303
|
-
env = {}
|
|
1304
|
-
}) {
|
|
1305
|
-
try {
|
|
1306
|
-
const serverEnv = { ...process.env, ...env, PORT: port.toString() };
|
|
1307
|
-
const execOptions = {
|
|
1308
|
-
cwd: projectPath || process.cwd(),
|
|
1309
|
-
env: serverEnv
|
|
1310
|
-
};
|
|
1311
|
-
const serverProcess = child_process.spawn("pnpm", ["run", "dev"], {
|
|
1312
|
-
...execOptions,
|
|
1313
|
-
detached: true,
|
|
1314
|
-
stdio: "pipe"
|
|
1315
|
-
});
|
|
1316
|
-
const stdoutLines = [];
|
|
1317
|
-
const serverStarted = new Promise((resolve4, reject) => {
|
|
1318
|
-
const timeout = setTimeout(() => {
|
|
1319
|
-
reject(new Error(`Server startup timeout after 30 seconds. Output: ${stdoutLines.join("\n")}`));
|
|
1320
|
-
}, 3e4);
|
|
1321
|
-
serverProcess.stdout?.on("data", (data) => {
|
|
1322
|
-
const output = data.toString();
|
|
1323
|
-
const lines = output.split("\n").filter((line) => line.trim());
|
|
1324
|
-
stdoutLines.push(...lines);
|
|
1325
|
-
if (output.includes("Mastra API running on port")) {
|
|
1326
|
-
clearTimeout(timeout);
|
|
1327
|
-
resolve4({
|
|
1328
|
-
success: true,
|
|
1329
|
-
status: "running",
|
|
1330
|
-
pid: serverProcess.pid,
|
|
1331
|
-
port,
|
|
1332
|
-
url: `http://localhost:${port}`,
|
|
1333
|
-
message: `Mastra server started successfully on port ${port}`,
|
|
1334
|
-
stdout: stdoutLines
|
|
1335
|
-
});
|
|
1336
|
-
}
|
|
1337
|
-
});
|
|
1338
|
-
serverProcess.stderr?.on("data", (data) => {
|
|
1339
|
-
const errorOutput = data.toString();
|
|
1340
|
-
stdoutLines.push(`[STDERR] ${errorOutput}`);
|
|
1341
|
-
clearTimeout(timeout);
|
|
1342
|
-
reject(new Error(`Server startup failed with error: ${errorOutput}`));
|
|
1343
|
-
});
|
|
1344
|
-
serverProcess.on("error", (error) => {
|
|
1345
|
-
clearTimeout(timeout);
|
|
1346
|
-
reject(error);
|
|
1347
|
-
});
|
|
1348
|
-
serverProcess.on("exit", (code, signal) => {
|
|
1349
|
-
clearTimeout(timeout);
|
|
1350
|
-
if (code !== 0 && code !== null) {
|
|
1351
|
-
reject(
|
|
1352
|
-
new Error(
|
|
1353
|
-
`Server process exited with code ${code}${signal ? ` (signal: ${signal})` : ""}. Output: ${stdoutLines.join("\n")}`
|
|
1354
|
-
)
|
|
1355
|
-
);
|
|
1356
|
-
}
|
|
1357
|
-
});
|
|
1358
|
-
});
|
|
1359
|
-
return await serverStarted;
|
|
1360
|
-
} catch (error) {
|
|
1361
|
-
return {
|
|
1362
|
-
success: false,
|
|
1363
|
-
status: "stopped",
|
|
1364
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1365
|
-
};
|
|
1366
|
-
}
|
|
1367
|
-
}
|
|
1368
|
-
/**
|
|
1369
|
-
* Stop the Mastra server
|
|
1370
|
-
*/
|
|
1371
|
-
static async stopMastraServer({ port = 4200, projectPath: _projectPath }) {
|
|
1372
|
-
try {
|
|
1373
|
-
const { stdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
|
|
1374
|
-
if (!stdout.trim() || stdout.trim() === "No process found") {
|
|
1375
|
-
return {
|
|
1376
|
-
success: true,
|
|
1377
|
-
status: "stopped",
|
|
1378
|
-
message: `No Mastra server found running on port ${port}`
|
|
1379
|
-
};
|
|
1380
|
-
}
|
|
1381
|
-
const pids = stdout.trim().split("\n").filter((pid) => pid.trim());
|
|
1382
|
-
const killedPids = [];
|
|
1383
|
-
const failedPids = [];
|
|
1384
|
-
for (const pidStr of pids) {
|
|
1385
|
-
const pid = parseInt(pidStr.trim());
|
|
1386
|
-
if (isNaN(pid)) continue;
|
|
1387
|
-
try {
|
|
1388
|
-
process.kill(pid, "SIGTERM");
|
|
1389
|
-
killedPids.push(pid);
|
|
1390
|
-
} catch {
|
|
1391
|
-
failedPids.push(pid);
|
|
1392
|
-
}
|
|
1393
|
-
}
|
|
1394
|
-
if (killedPids.length === 0) {
|
|
1395
|
-
return {
|
|
1396
|
-
success: false,
|
|
1397
|
-
status: "unknown",
|
|
1398
|
-
message: `Failed to stop any processes on port ${port}`,
|
|
1399
|
-
error: `Could not kill PIDs: ${failedPids.join(", ")}`
|
|
1400
|
-
};
|
|
1401
|
-
}
|
|
1402
|
-
await new Promise((resolve4) => setTimeout(resolve4, 2e3));
|
|
1403
|
-
try {
|
|
1404
|
-
const { stdout: checkStdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
|
|
1405
|
-
if (checkStdout.trim() && checkStdout.trim() !== "No process found") {
|
|
1406
|
-
const remainingPids = checkStdout.trim().split("\n").filter((pid) => pid.trim());
|
|
1407
|
-
for (const pidStr of remainingPids) {
|
|
1408
|
-
const pid = parseInt(pidStr.trim());
|
|
1409
|
-
if (!isNaN(pid)) {
|
|
1410
|
-
try {
|
|
1411
|
-
process.kill(pid, "SIGKILL");
|
|
1412
|
-
} catch {
|
|
1413
|
-
}
|
|
1414
|
-
}
|
|
1415
|
-
}
|
|
1416
|
-
await new Promise((resolve4) => setTimeout(resolve4, 1e3));
|
|
1417
|
-
const { stdout: finalCheck } = await exec(`lsof -ti:${port} || echo "No process found"`);
|
|
1418
|
-
if (finalCheck.trim() && finalCheck.trim() !== "No process found") {
|
|
1419
|
-
return {
|
|
1420
|
-
success: false,
|
|
1421
|
-
status: "unknown",
|
|
1422
|
-
message: `Server processes still running on port ${port} after stop attempts`,
|
|
1423
|
-
error: `Remaining PIDs: ${finalCheck.trim()}`
|
|
1424
|
-
};
|
|
1425
|
-
}
|
|
1426
|
-
}
|
|
1427
|
-
} catch (error) {
|
|
1428
|
-
console.warn("Failed to verify server stop:", error);
|
|
1429
|
-
}
|
|
1430
|
-
return {
|
|
1431
|
-
success: true,
|
|
1432
|
-
status: "stopped",
|
|
1433
|
-
message: `Mastra server stopped successfully (port ${port}). Killed PIDs: ${killedPids.join(", ")}`
|
|
1434
|
-
};
|
|
1435
|
-
} catch (error) {
|
|
1436
|
-
return {
|
|
1437
|
-
success: false,
|
|
1438
|
-
status: "unknown",
|
|
1439
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1440
|
-
};
|
|
1441
|
-
}
|
|
1442
|
-
}
|
|
1443
|
-
/**
|
|
1444
|
-
* Check Mastra server status
|
|
1445
|
-
*/
|
|
1446
|
-
static async checkMastraServerStatus({
|
|
1447
|
-
port = 4200,
|
|
1448
|
-
projectPath: _projectPath
|
|
1449
|
-
}) {
|
|
1450
|
-
try {
|
|
1451
|
-
const controller = new AbortController();
|
|
1452
|
-
const timeoutId = setTimeout(() => controller.abort(), 5e3);
|
|
1453
|
-
const response = await fetch(`http://localhost:${port}/health`, {
|
|
1454
|
-
method: "GET",
|
|
1455
|
-
signal: controller.signal
|
|
1456
|
-
});
|
|
1457
|
-
clearTimeout(timeoutId);
|
|
1458
|
-
if (response.ok) {
|
|
1459
|
-
return {
|
|
1460
|
-
success: true,
|
|
1461
|
-
status: "running",
|
|
1462
|
-
port,
|
|
1463
|
-
url: `http://localhost:${port}`,
|
|
1464
|
-
message: "Mastra server is running and healthy"
|
|
1465
|
-
};
|
|
1466
|
-
} else {
|
|
1467
|
-
return {
|
|
1468
|
-
success: false,
|
|
1469
|
-
status: "unknown",
|
|
1470
|
-
port,
|
|
1471
|
-
message: `Server responding but not healthy (status: ${response.status})`
|
|
1472
|
-
};
|
|
1473
|
-
}
|
|
1474
|
-
} catch {
|
|
1475
|
-
try {
|
|
1476
|
-
const { stdout } = await exec(`lsof -ti:${port} || echo "No process found"`);
|
|
1477
|
-
const hasProcess = stdout.trim() && stdout.trim() !== "No process found";
|
|
1478
|
-
return {
|
|
1479
|
-
success: Boolean(hasProcess),
|
|
1480
|
-
status: hasProcess ? "starting" : "stopped",
|
|
1481
|
-
port,
|
|
1482
|
-
message: hasProcess ? "Server process exists but not responding to health checks" : "No server process found on specified port"
|
|
1483
|
-
};
|
|
1484
|
-
} catch {
|
|
1485
|
-
return {
|
|
1486
|
-
success: false,
|
|
1487
|
-
status: "stopped",
|
|
1488
|
-
port,
|
|
1489
|
-
message: "Server is not running"
|
|
1490
|
-
};
|
|
1491
|
-
}
|
|
1492
|
-
}
|
|
1493
|
-
}
|
|
1494
|
-
/**
|
|
1495
|
-
* Validate code using TypeScript, ESLint, and other tools
|
|
1496
|
-
*/
|
|
1497
|
-
static async validateCode({
|
|
1498
|
-
projectPath,
|
|
1499
|
-
validationType,
|
|
1500
|
-
files
|
|
1501
|
-
}) {
|
|
1502
|
-
const errors = [];
|
|
1503
|
-
const validationsPassed = [];
|
|
1504
|
-
const validationsFailed = [];
|
|
1505
|
-
const execOptions = { cwd: projectPath };
|
|
1506
|
-
if (validationType.includes("types")) {
|
|
1507
|
-
try {
|
|
1508
|
-
const filePattern = files?.length ? files.join(" ") : "";
|
|
1509
|
-
const tscCommand = files?.length ? `npx tsc --noEmit ${filePattern}` : "npx tsc --noEmit";
|
|
1510
|
-
await exec(tscCommand, execOptions);
|
|
1511
|
-
validationsPassed.push("types");
|
|
1512
|
-
} catch (error) {
|
|
1513
|
-
let tsOutput = "";
|
|
1514
|
-
if (error.stdout) {
|
|
1515
|
-
tsOutput = error.stdout;
|
|
1516
|
-
} else if (error.stderr) {
|
|
1517
|
-
tsOutput = error.stderr;
|
|
1518
|
-
} else if (error.message) {
|
|
1519
|
-
tsOutput = error.message;
|
|
1520
|
-
}
|
|
1521
|
-
errors.push({
|
|
1522
|
-
type: "typescript",
|
|
1523
|
-
severity: "error",
|
|
1524
|
-
message: tsOutput.trim() || `TypeScript validation failed: ${error.message || String(error)}`
|
|
1525
|
-
});
|
|
1526
|
-
validationsFailed.push("types");
|
|
1527
|
-
}
|
|
1528
|
-
}
|
|
1529
|
-
if (validationType.includes("lint")) {
|
|
1530
|
-
try {
|
|
1531
|
-
const filePattern = files?.length ? files.join(" ") : ".";
|
|
1532
|
-
const eslintCommand = `npx eslint ${filePattern} --format json`;
|
|
1533
|
-
const { stdout } = await exec(eslintCommand, execOptions);
|
|
1534
|
-
if (stdout) {
|
|
1535
|
-
const eslintResults = JSON.parse(stdout);
|
|
1536
|
-
const eslintErrors = _AgentBuilderDefaults.parseESLintErrors(eslintResults);
|
|
1537
|
-
errors.push(...eslintErrors);
|
|
1538
|
-
if (eslintErrors.some((e) => e.severity === "error")) {
|
|
1539
|
-
validationsFailed.push("lint");
|
|
1540
|
-
} else {
|
|
1541
|
-
validationsPassed.push("lint");
|
|
1542
|
-
}
|
|
1543
|
-
} else {
|
|
1544
|
-
validationsPassed.push("lint");
|
|
1545
|
-
}
|
|
1546
|
-
} catch (error) {
|
|
1547
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1548
|
-
if (errorMessage.includes('"filePath"') || errorMessage.includes("messages")) {
|
|
1549
|
-
try {
|
|
1550
|
-
const eslintResults = JSON.parse(errorMessage);
|
|
1551
|
-
const eslintErrors = _AgentBuilderDefaults.parseESLintErrors(eslintResults);
|
|
1552
|
-
errors.push(...eslintErrors);
|
|
1553
|
-
validationsFailed.push("lint");
|
|
1554
|
-
} catch {
|
|
1555
|
-
errors.push({
|
|
1556
|
-
type: "eslint",
|
|
1557
|
-
severity: "error",
|
|
1558
|
-
message: `ESLint validation failed: ${errorMessage}`
|
|
1559
|
-
});
|
|
1560
|
-
validationsFailed.push("lint");
|
|
1561
|
-
}
|
|
1562
|
-
} else {
|
|
1563
|
-
validationsPassed.push("lint");
|
|
1564
|
-
}
|
|
1565
|
-
}
|
|
1566
|
-
}
|
|
1567
|
-
const totalErrors = errors.filter((e) => e.severity === "error").length;
|
|
1568
|
-
const totalWarnings = errors.filter((e) => e.severity === "warning").length;
|
|
1569
|
-
const isValid = totalErrors === 0;
|
|
1570
|
-
return {
|
|
1571
|
-
valid: isValid,
|
|
1572
|
-
errors,
|
|
1573
|
-
summary: {
|
|
1574
|
-
totalErrors,
|
|
1575
|
-
totalWarnings,
|
|
1576
|
-
validationsPassed,
|
|
1577
|
-
validationsFailed
|
|
1578
|
-
}
|
|
1579
|
-
};
|
|
1580
|
-
}
|
|
1581
|
-
/**
|
|
1582
|
-
* Parse ESLint errors from JSON output
|
|
1583
|
-
*/
|
|
1584
|
-
static parseESLintErrors(eslintResults) {
|
|
1585
|
-
const errors = [];
|
|
1586
|
-
for (const result of eslintResults) {
|
|
1587
|
-
for (const message of result.messages || []) {
|
|
1588
|
-
if (message.message) {
|
|
1589
|
-
errors.push({
|
|
1590
|
-
type: "eslint",
|
|
1591
|
-
severity: message.severity === 1 ? "warning" : "error",
|
|
1592
|
-
message: message.message,
|
|
1593
|
-
file: result.filePath || void 0,
|
|
1594
|
-
line: message.line || void 0,
|
|
1595
|
-
column: message.column || void 0,
|
|
1596
|
-
code: message.ruleId || void 0
|
|
1597
|
-
});
|
|
1598
|
-
}
|
|
1599
|
-
}
|
|
1600
|
-
}
|
|
1601
|
-
return errors;
|
|
1602
|
-
}
|
|
1603
|
-
/**
|
|
1604
|
-
* Make HTTP request to server or external API
|
|
1605
|
-
*/
|
|
1606
|
-
static async makeHttpRequest({
|
|
1607
|
-
method,
|
|
1608
|
-
url,
|
|
1609
|
-
baseUrl,
|
|
1610
|
-
headers = {},
|
|
1611
|
-
body,
|
|
1612
|
-
timeout = 3e4
|
|
1613
|
-
}) {
|
|
1614
|
-
try {
|
|
1615
|
-
const fullUrl = baseUrl ? `${baseUrl}${url}` : url;
|
|
1616
|
-
const controller = new AbortController();
|
|
1617
|
-
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
1618
|
-
const requestOptions = {
|
|
1619
|
-
method,
|
|
1620
|
-
headers: {
|
|
1621
|
-
"Content-Type": "application/json",
|
|
1622
|
-
...headers
|
|
1623
|
-
},
|
|
1624
|
-
signal: controller.signal
|
|
1625
|
-
};
|
|
1626
|
-
if (body && (method === "POST" || method === "PUT" || method === "PATCH")) {
|
|
1627
|
-
requestOptions.body = typeof body === "string" ? body : JSON.stringify(body);
|
|
1628
|
-
}
|
|
1629
|
-
const response = await fetch(fullUrl, requestOptions);
|
|
1630
|
-
clearTimeout(timeoutId);
|
|
1631
|
-
let data;
|
|
1632
|
-
const contentType = response.headers.get("content-type");
|
|
1633
|
-
if (contentType?.includes("application/json")) {
|
|
1634
|
-
data = await response.json();
|
|
1635
|
-
} else {
|
|
1636
|
-
data = await response.text();
|
|
1637
|
-
}
|
|
1638
|
-
const responseHeaders = {};
|
|
1639
|
-
response.headers.forEach((value, key) => {
|
|
1640
|
-
responseHeaders[key] = value;
|
|
1641
|
-
});
|
|
1642
|
-
return {
|
|
1643
|
-
success: response.ok,
|
|
1644
|
-
status: response.status,
|
|
1645
|
-
statusText: response.statusText,
|
|
1646
|
-
headers: responseHeaders,
|
|
1647
|
-
data,
|
|
1648
|
-
url: fullUrl,
|
|
1649
|
-
method
|
|
1650
|
-
};
|
|
1651
|
-
} catch (error) {
|
|
1652
|
-
return {
|
|
1653
|
-
success: false,
|
|
1654
|
-
url: baseUrl ? `${baseUrl}${url}` : url,
|
|
1655
|
-
method,
|
|
1656
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1657
|
-
};
|
|
1658
|
-
}
|
|
1659
|
-
}
|
|
1660
|
-
/**
|
|
1661
|
-
* Enhanced task management system for complex coding tasks
|
|
1662
|
-
*/
|
|
1663
|
-
static async manageTaskList(context) {
|
|
1664
|
-
if (!_AgentBuilderDefaults.taskStorage) {
|
|
1665
|
-
_AgentBuilderDefaults.taskStorage = /* @__PURE__ */ new Map();
|
|
1666
|
-
}
|
|
1667
|
-
const sessionId = "current";
|
|
1668
|
-
const existingTasks = _AgentBuilderDefaults.taskStorage.get(sessionId) || [];
|
|
1669
|
-
try {
|
|
1670
|
-
switch (context.action) {
|
|
1671
|
-
case "create":
|
|
1672
|
-
if (!context.tasks?.length) {
|
|
1673
|
-
return {
|
|
1674
|
-
success: false,
|
|
1675
|
-
tasks: existingTasks,
|
|
1676
|
-
message: "No tasks provided for creation"
|
|
1677
|
-
};
|
|
1678
|
-
}
|
|
1679
|
-
const newTasks = context.tasks.map((task) => ({
|
|
1680
|
-
...task,
|
|
1681
|
-
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1682
|
-
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1683
|
-
}));
|
|
1684
|
-
const allTasks = [...existingTasks, ...newTasks];
|
|
1685
|
-
_AgentBuilderDefaults.taskStorage.set(sessionId, allTasks);
|
|
1686
|
-
return {
|
|
1687
|
-
success: true,
|
|
1688
|
-
tasks: allTasks,
|
|
1689
|
-
message: `Created ${newTasks.length} new task(s)`
|
|
1690
|
-
};
|
|
1691
|
-
case "update":
|
|
1692
|
-
if (!context.tasks?.length) {
|
|
1693
|
-
return {
|
|
1694
|
-
success: false,
|
|
1695
|
-
tasks: existingTasks,
|
|
1696
|
-
message: "No tasks provided for update"
|
|
1697
|
-
};
|
|
1698
|
-
}
|
|
1699
|
-
const updatedTasks = existingTasks.map((existing) => {
|
|
1700
|
-
const update = context.tasks.find((t) => t.id === existing.id);
|
|
1701
|
-
return update ? { ...existing, ...update, updatedAt: (/* @__PURE__ */ new Date()).toISOString() } : existing;
|
|
1702
|
-
});
|
|
1703
|
-
_AgentBuilderDefaults.taskStorage.set(sessionId, updatedTasks);
|
|
1704
|
-
return {
|
|
1705
|
-
success: true,
|
|
1706
|
-
tasks: updatedTasks,
|
|
1707
|
-
message: "Tasks updated successfully"
|
|
1708
|
-
};
|
|
1709
|
-
case "complete":
|
|
1710
|
-
if (!context.taskId) {
|
|
1711
|
-
return {
|
|
1712
|
-
success: false,
|
|
1713
|
-
tasks: existingTasks,
|
|
1714
|
-
message: "Task ID required for completion"
|
|
1715
|
-
};
|
|
1716
|
-
}
|
|
1717
|
-
const completedTasks = existingTasks.map(
|
|
1718
|
-
(task) => task.id === context.taskId ? { ...task, status: "completed", updatedAt: (/* @__PURE__ */ new Date()).toISOString() } : task
|
|
1719
|
-
);
|
|
1720
|
-
_AgentBuilderDefaults.taskStorage.set(sessionId, completedTasks);
|
|
1721
|
-
return {
|
|
1722
|
-
success: true,
|
|
1723
|
-
tasks: completedTasks,
|
|
1724
|
-
message: `Task ${context.taskId} marked as completed`
|
|
1725
|
-
};
|
|
1726
|
-
case "remove":
|
|
1727
|
-
if (!context.taskId) {
|
|
1728
|
-
return {
|
|
1729
|
-
success: false,
|
|
1730
|
-
tasks: existingTasks,
|
|
1731
|
-
message: "Task ID required for removal"
|
|
1732
|
-
};
|
|
1733
|
-
}
|
|
1734
|
-
const filteredTasks = existingTasks.filter((task) => task.id !== context.taskId);
|
|
1735
|
-
_AgentBuilderDefaults.taskStorage.set(sessionId, filteredTasks);
|
|
1736
|
-
return {
|
|
1737
|
-
success: true,
|
|
1738
|
-
tasks: filteredTasks,
|
|
1739
|
-
message: `Task ${context.taskId} removed`
|
|
1740
|
-
};
|
|
1741
|
-
case "list":
|
|
1742
|
-
default:
|
|
1743
|
-
return {
|
|
1744
|
-
success: true,
|
|
1745
|
-
tasks: existingTasks,
|
|
1746
|
-
message: `Found ${existingTasks.length} task(s)`
|
|
1747
|
-
};
|
|
1748
|
-
}
|
|
1749
|
-
} catch (error) {
|
|
1750
|
-
return {
|
|
1751
|
-
success: false,
|
|
1752
|
-
tasks: existingTasks,
|
|
1753
|
-
message: `Task management error: ${error instanceof Error ? error.message : String(error)}`
|
|
1754
|
-
};
|
|
1755
|
-
}
|
|
1756
|
-
}
|
|
1757
|
-
/**
|
|
1758
|
-
* Analyze codebase structure and patterns
|
|
1759
|
-
*/
|
|
1760
|
-
static async analyzeCode(context) {
|
|
1761
|
-
try {
|
|
1762
|
-
const { action, path, language, depth = 3 } = context;
|
|
1763
|
-
const languagePattern = language ? `*.${language}` : "*";
|
|
1764
|
-
switch (action) {
|
|
1765
|
-
case "definitions":
|
|
1766
|
-
const definitionPatterns = [
|
|
1767
|
-
"function\\s+([a-zA-Z_][a-zA-Z0-9_]*)",
|
|
1768
|
-
"class\\s+([a-zA-Z_][a-zA-Z0-9_]*)",
|
|
1769
|
-
"interface\\s+([a-zA-Z_][a-zA-Z0-9_]*)",
|
|
1770
|
-
"const\\s+([a-zA-Z_][a-zA-Z0-9_]*)\\s*=",
|
|
1771
|
-
"export\\s+(function|class|interface|const)\\s+([a-zA-Z_][a-zA-Z0-9_]*)"
|
|
1772
|
-
];
|
|
1773
|
-
const definitions = [];
|
|
1774
|
-
for (const pattern of definitionPatterns) {
|
|
1775
|
-
try {
|
|
1776
|
-
const { stdout } = await exec(
|
|
1777
|
-
`rg -n "${pattern}" "${path}" --type ${languagePattern} --max-depth ${depth}`
|
|
1778
|
-
);
|
|
1779
|
-
const matches = stdout.split("\n").filter((line) => line.trim());
|
|
1780
|
-
matches.forEach((match) => {
|
|
1781
|
-
const parts = match.split(":");
|
|
1782
|
-
if (parts.length >= 3) {
|
|
1783
|
-
const file = parts[0];
|
|
1784
|
-
const lineStr = parts[1];
|
|
1785
|
-
const line = parseInt(lineStr || "0");
|
|
1786
|
-
const content = parts.slice(2).join(":");
|
|
1787
|
-
const nameMatch = content.match(/([a-zA-Z_][a-zA-Z0-9_]*)/);
|
|
1788
|
-
if (nameMatch && nameMatch[1]) {
|
|
1789
|
-
definitions.push({
|
|
1790
|
-
name: nameMatch[1],
|
|
1791
|
-
type: pattern.includes("function") ? "function" : pattern.includes("class") ? "class" : pattern.includes("interface") ? "interface" : "variable",
|
|
1792
|
-
file: file || "",
|
|
1793
|
-
line,
|
|
1794
|
-
scope: "top-level"
|
|
1795
|
-
});
|
|
1796
|
-
}
|
|
1797
|
-
}
|
|
1798
|
-
});
|
|
1799
|
-
} catch {
|
|
1800
|
-
}
|
|
1801
|
-
}
|
|
1802
|
-
return {
|
|
1803
|
-
success: true,
|
|
1804
|
-
analysis: { definitions },
|
|
1805
|
-
message: `Found ${definitions.length} code definitions`
|
|
1806
|
-
};
|
|
1807
|
-
case "dependencies":
|
|
1808
|
-
const depPatterns = [
|
|
1809
|
-
`import\\s+.*\\s+from\\s+['"]([^'"]+)['"]`,
|
|
1810
|
-
`require\\(['"]([^'"]+)['"]\\)`,
|
|
1811
|
-
'#include\\s+[<"]([^>"]+)[>"]'
|
|
1812
|
-
];
|
|
1813
|
-
const dependencies = [];
|
|
1814
|
-
for (const pattern of depPatterns) {
|
|
1815
|
-
try {
|
|
1816
|
-
const { stdout } = await exec(`rg -n "${pattern}" "${path}" --type ${languagePattern}`);
|
|
1817
|
-
const matches = stdout.split("\n").filter((line) => line.trim());
|
|
1818
|
-
matches.forEach((match) => {
|
|
1819
|
-
const parts = match.split(":");
|
|
1820
|
-
if (parts.length >= 3) {
|
|
1821
|
-
const file = parts[0];
|
|
1822
|
-
const content = parts.slice(2).join(":");
|
|
1823
|
-
const depMatch = content.match(new RegExp(pattern));
|
|
1824
|
-
if (depMatch && depMatch[1]) {
|
|
1825
|
-
dependencies.push({
|
|
1826
|
-
name: depMatch[1],
|
|
1827
|
-
type: pattern.includes("import") ? "import" : pattern.includes("require") ? "require" : "include",
|
|
1828
|
-
source: file || "",
|
|
1829
|
-
target: depMatch[1]
|
|
1830
|
-
});
|
|
1831
|
-
}
|
|
1832
|
-
}
|
|
1833
|
-
});
|
|
1834
|
-
} catch {
|
|
1835
|
-
}
|
|
1836
|
-
}
|
|
1837
|
-
return {
|
|
1838
|
-
success: true,
|
|
1839
|
-
analysis: { dependencies },
|
|
1840
|
-
message: `Found ${dependencies.length} dependencies`
|
|
1841
|
-
};
|
|
1842
|
-
case "structure":
|
|
1843
|
-
const { stdout: lsOutput } = await exec(`find "${path}" -type f -name "${languagePattern}" | head -1000`);
|
|
1844
|
-
const files = lsOutput.split("\n").filter((line) => line.trim());
|
|
1845
|
-
const { stdout: dirOutput } = await exec(`find "${path}" -type d | wc -l`);
|
|
1846
|
-
const directories = parseInt(dirOutput.trim());
|
|
1847
|
-
const languages = {};
|
|
1848
|
-
files.forEach((file) => {
|
|
1849
|
-
const ext = file.split(".").pop();
|
|
1850
|
-
if (ext) {
|
|
1851
|
-
languages[ext] = (languages[ext] || 0) + 1;
|
|
1852
|
-
}
|
|
1853
|
-
});
|
|
1854
|
-
const complexity = files.length > 1e3 ? "high" : files.length > 100 ? "medium" : "low";
|
|
1855
|
-
return {
|
|
1856
|
-
success: true,
|
|
1857
|
-
analysis: {
|
|
1858
|
-
structure: {
|
|
1859
|
-
directories,
|
|
1860
|
-
files: files.length,
|
|
1861
|
-
languages,
|
|
1862
|
-
complexity
|
|
1863
|
-
}
|
|
1864
|
-
},
|
|
1865
|
-
message: `Analyzed project structure: ${files.length} files in ${directories} directories`
|
|
1866
|
-
};
|
|
1867
|
-
default:
|
|
1868
|
-
return {
|
|
1869
|
-
success: false,
|
|
1870
|
-
analysis: {},
|
|
1871
|
-
message: `Unknown analysis action: ${action}`
|
|
1872
|
-
};
|
|
1873
|
-
}
|
|
1874
|
-
} catch (error) {
|
|
1875
|
-
return {
|
|
1876
|
-
success: false,
|
|
1877
|
-
analysis: {},
|
|
1878
|
-
message: `Code analysis error: ${error instanceof Error ? error.message : String(error)}`
|
|
1879
|
-
};
|
|
1880
|
-
}
|
|
1881
|
-
}
|
|
1882
|
-
/**
|
|
1883
|
-
* Perform multiple edits across files atomically
|
|
1884
|
-
*/
|
|
1885
|
-
static async performMultiEdit(context) {
|
|
1886
|
-
const results = [];
|
|
1887
|
-
try {
|
|
1888
|
-
const { projectPath } = context;
|
|
1889
|
-
for (const operation of context.operations) {
|
|
1890
|
-
const resolvedPath = path.isAbsolute(operation.filePath) ? operation.filePath : path.resolve(projectPath || process.cwd(), operation.filePath);
|
|
1891
|
-
const result = {
|
|
1892
|
-
filePath: resolvedPath,
|
|
1893
|
-
editsApplied: 0,
|
|
1894
|
-
errors: [],
|
|
1895
|
-
backup: void 0
|
|
1896
|
-
};
|
|
1897
|
-
try {
|
|
1898
|
-
const originalContent = await promises.readFile(resolvedPath, "utf-8");
|
|
1899
|
-
if (context.createBackup) {
|
|
1900
|
-
const backupPath = `${resolvedPath}.backup.${Date.now()}`;
|
|
1901
|
-
await promises.writeFile(backupPath, originalContent);
|
|
1902
|
-
result.backup = backupPath;
|
|
1903
|
-
}
|
|
1904
|
-
let modifiedContent = originalContent;
|
|
1905
|
-
for (const edit of operation.edits) {
|
|
1906
|
-
if (edit.replaceAll) {
|
|
1907
|
-
const regex = new RegExp(edit.oldString.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g");
|
|
1908
|
-
const matches = modifiedContent.match(regex);
|
|
1909
|
-
if (matches) {
|
|
1910
|
-
modifiedContent = modifiedContent.replace(regex, edit.newString);
|
|
1911
|
-
result.editsApplied += matches.length;
|
|
1912
|
-
}
|
|
1913
|
-
} else {
|
|
1914
|
-
if (modifiedContent.includes(edit.oldString)) {
|
|
1915
|
-
modifiedContent = modifiedContent.replace(edit.oldString, edit.newString);
|
|
1916
|
-
result.editsApplied++;
|
|
1917
|
-
} else {
|
|
1918
|
-
result.errors.push(`String not found: "${edit.oldString.substring(0, 50)}..."`);
|
|
1919
|
-
}
|
|
1920
|
-
}
|
|
1921
|
-
}
|
|
1922
|
-
if (result.editsApplied > 0) {
|
|
1923
|
-
await promises.writeFile(resolvedPath, modifiedContent);
|
|
1924
|
-
}
|
|
1925
|
-
} catch (error) {
|
|
1926
|
-
result.errors.push(error instanceof Error ? error.message : String(error));
|
|
1927
|
-
}
|
|
1928
|
-
results.push(result);
|
|
1929
|
-
}
|
|
1930
|
-
const totalEdits = results.reduce((sum, r) => sum + r.editsApplied, 0);
|
|
1931
|
-
const totalErrors = results.reduce((sum, r) => sum + r.errors.length, 0);
|
|
1932
|
-
return {
|
|
1933
|
-
success: totalErrors === 0,
|
|
1934
|
-
results,
|
|
1935
|
-
message: `Applied ${totalEdits} edits across ${results.length} files${totalErrors > 0 ? ` with ${totalErrors} errors` : ""}`
|
|
1936
|
-
};
|
|
1937
|
-
} catch (error) {
|
|
1938
|
-
return {
|
|
1939
|
-
success: false,
|
|
1940
|
-
results,
|
|
1941
|
-
message: `Multi-edit operation failed: ${error instanceof Error ? error.message : String(error)}`
|
|
1942
|
-
};
|
|
1943
|
-
}
|
|
1944
|
-
}
|
|
1945
|
-
/**
|
|
1946
|
-
* Ask user for clarification
|
|
1947
|
-
*/
|
|
1948
|
-
static async askClarification(context) {
|
|
1949
|
-
const questionId = `q_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
1950
|
-
if (!_AgentBuilderDefaults.pendingQuestions) {
|
|
1951
|
-
_AgentBuilderDefaults.pendingQuestions = /* @__PURE__ */ new Map();
|
|
1952
|
-
}
|
|
1953
|
-
_AgentBuilderDefaults.pendingQuestions.set(questionId, {
|
|
1954
|
-
...context,
|
|
1955
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1956
|
-
});
|
|
1957
|
-
return {
|
|
1958
|
-
questionId,
|
|
1959
|
-
question: context.question,
|
|
1960
|
-
options: context.options?.map((opt) => ({ id: opt.id, description: opt.description })),
|
|
1961
|
-
awaitingResponse: true
|
|
1962
|
-
};
|
|
1963
|
-
}
|
|
1964
|
-
/**
|
|
1965
|
-
* Signal task completion
|
|
1966
|
-
*/
|
|
1967
|
-
static async signalCompletion(context) {
|
|
1968
|
-
const completionId = `completion_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
1969
|
-
let confidence = 70;
|
|
1970
|
-
if (context.validation.testsRun) confidence += 15;
|
|
1971
|
-
if (context.validation.buildsSuccessfully) confidence += 15;
|
|
1972
|
-
if (context.validation.manualTestingRequired) confidence -= 10;
|
|
1973
|
-
let status;
|
|
1974
|
-
if (context.validation.testsRun && context.validation.buildsSuccessfully) {
|
|
1975
|
-
status = "completed";
|
|
1976
|
-
} else if (context.validation.manualTestingRequired) {
|
|
1977
|
-
status = "needs_testing";
|
|
1978
|
-
} else {
|
|
1979
|
-
status = "needs_review";
|
|
1980
|
-
}
|
|
1981
|
-
return {
|
|
1982
|
-
completionId,
|
|
1983
|
-
status,
|
|
1984
|
-
summary: context.summary,
|
|
1985
|
-
confidence: Math.min(100, Math.max(0, confidence))
|
|
1986
|
-
};
|
|
1987
|
-
}
|
|
1988
|
-
/**
|
|
1989
|
-
* Perform intelligent search with context
|
|
1990
|
-
*/
|
|
1991
|
-
static async performSmartSearch(context) {
|
|
1992
|
-
try {
|
|
1993
|
-
const { query, type = "text", scope = {}, context: searchContext = {} } = context;
|
|
1994
|
-
const { paths = ["."], fileTypes = [], excludePaths = [], maxResults = 50 } = scope;
|
|
1995
|
-
const { beforeLines = 2, afterLines = 2 } = searchContext;
|
|
1996
|
-
let rgCommand = "rg";
|
|
1997
|
-
if (beforeLines > 0 || afterLines > 0) {
|
|
1998
|
-
rgCommand += ` -A ${afterLines} -B ${beforeLines}`;
|
|
1999
|
-
}
|
|
2000
|
-
rgCommand += " -n";
|
|
2001
|
-
if (type === "regex") {
|
|
2002
|
-
rgCommand += " -e";
|
|
2003
|
-
} else if (type === "fuzzy") {
|
|
2004
|
-
rgCommand += " --fixed-strings";
|
|
2005
|
-
}
|
|
2006
|
-
if (fileTypes.length > 0) {
|
|
2007
|
-
fileTypes.forEach((ft) => {
|
|
2008
|
-
rgCommand += ` --type-add 'custom:*.${ft}' -t custom`;
|
|
2009
|
-
});
|
|
2010
|
-
}
|
|
2011
|
-
excludePaths.forEach((path) => {
|
|
2012
|
-
rgCommand += ` --glob '!${path}'`;
|
|
2013
|
-
});
|
|
2014
|
-
rgCommand += ` -m ${maxResults}`;
|
|
2015
|
-
rgCommand += ` "${query}" ${paths.join(" ")}`;
|
|
2016
|
-
const { stdout } = await exec(rgCommand);
|
|
2017
|
-
const lines = stdout.split("\n").filter((line) => line.trim());
|
|
2018
|
-
const matches = [];
|
|
2019
|
-
let currentMatch = null;
|
|
2020
|
-
lines.forEach((line) => {
|
|
2021
|
-
if (line.includes(":") && !line.startsWith("-")) {
|
|
2022
|
-
const parts = line.split(":");
|
|
2023
|
-
if (parts.length >= 3) {
|
|
2024
|
-
if (currentMatch) {
|
|
2025
|
-
matches.push(currentMatch);
|
|
2026
|
-
}
|
|
2027
|
-
currentMatch = {
|
|
2028
|
-
file: parts[0] || "",
|
|
2029
|
-
line: parseInt(parts[1] || "0"),
|
|
2030
|
-
match: parts.slice(2).join(":"),
|
|
2031
|
-
context: { before: [], after: [] },
|
|
2032
|
-
relevance: type === "fuzzy" ? Math.random() * 100 : void 0
|
|
2033
|
-
};
|
|
2034
|
-
}
|
|
2035
|
-
} else if (line.startsWith("-") && currentMatch) {
|
|
2036
|
-
const contextLine = line.substring(1);
|
|
2037
|
-
if (currentMatch.context.before.length < beforeLines) {
|
|
2038
|
-
currentMatch.context.before.push(contextLine);
|
|
2039
|
-
} else {
|
|
2040
|
-
currentMatch.context.after.push(contextLine);
|
|
2041
|
-
}
|
|
2042
|
-
}
|
|
2043
|
-
});
|
|
2044
|
-
if (currentMatch) {
|
|
2045
|
-
matches.push(currentMatch);
|
|
2046
|
-
}
|
|
2047
|
-
const filesSearched = new Set(matches.map((m) => m.file)).size;
|
|
2048
|
-
return {
|
|
2049
|
-
success: true,
|
|
2050
|
-
matches: matches.slice(0, maxResults),
|
|
2051
|
-
summary: {
|
|
2052
|
-
totalMatches: matches.length,
|
|
2053
|
-
filesSearched,
|
|
2054
|
-
patterns: [query]
|
|
2055
|
-
}
|
|
2056
|
-
};
|
|
2057
|
-
} catch {
|
|
2058
|
-
return {
|
|
2059
|
-
success: false,
|
|
2060
|
-
matches: [],
|
|
2061
|
-
summary: {
|
|
2062
|
-
totalMatches: 0,
|
|
2063
|
-
filesSearched: 0,
|
|
2064
|
-
patterns: [context.query]
|
|
2065
|
-
}
|
|
2066
|
-
};
|
|
2067
|
-
}
|
|
2068
|
-
}
|
|
2069
|
-
// Static storage properties
|
|
2070
|
-
static taskStorage;
|
|
2071
|
-
static pendingQuestions;
|
|
2072
|
-
/**
|
|
2073
|
-
* Read file contents with optional line range
|
|
2074
|
-
*/
|
|
2075
|
-
static async readFile(context) {
|
|
2076
|
-
try {
|
|
2077
|
-
const { filePath, startLine, endLine, encoding = "utf-8", projectPath } = context;
|
|
2078
|
-
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(projectPath || process.cwd(), filePath);
|
|
2079
|
-
const stats = await promises.stat(resolvedPath);
|
|
2080
|
-
const content = await promises.readFile(resolvedPath, { encoding });
|
|
2081
|
-
const lines = content.split("\n");
|
|
2082
|
-
let resultContent = content;
|
|
2083
|
-
let resultLines = lines;
|
|
2084
|
-
if (startLine !== void 0 || endLine !== void 0) {
|
|
2085
|
-
const start = Math.max(0, (startLine || 1) - 1);
|
|
2086
|
-
const end = endLine !== void 0 ? Math.min(lines.length, endLine) : lines.length;
|
|
2087
|
-
resultLines = lines.slice(start, end);
|
|
2088
|
-
resultContent = resultLines.join("\n");
|
|
2089
|
-
}
|
|
2090
|
-
return {
|
|
2091
|
-
success: true,
|
|
2092
|
-
content: resultContent,
|
|
2093
|
-
lines: resultLines,
|
|
2094
|
-
metadata: {
|
|
2095
|
-
size: stats.size,
|
|
2096
|
-
totalLines: lines.length,
|
|
2097
|
-
encoding,
|
|
2098
|
-
lastModified: stats.mtime.toISOString()
|
|
2099
|
-
}
|
|
2100
|
-
};
|
|
2101
|
-
} catch (error) {
|
|
2102
|
-
return {
|
|
2103
|
-
success: false,
|
|
2104
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2105
|
-
};
|
|
2106
|
-
}
|
|
2107
|
-
}
|
|
2108
|
-
/**
|
|
2109
|
-
* Write content to file with directory creation and backup options
|
|
2110
|
-
*/
|
|
2111
|
-
static async writeFile(context) {
|
|
2112
|
-
try {
|
|
2113
|
-
const { filePath, content, createDirs = true, encoding = "utf-8", projectPath } = context;
|
|
2114
|
-
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(projectPath || process.cwd(), filePath);
|
|
2115
|
-
const dir = path.dirname(resolvedPath);
|
|
2116
|
-
if (createDirs) {
|
|
2117
|
-
await promises.mkdir(dir, { recursive: true });
|
|
2118
|
-
}
|
|
2119
|
-
await promises.writeFile(resolvedPath, content, { encoding });
|
|
2120
|
-
return {
|
|
2121
|
-
success: true,
|
|
2122
|
-
filePath: resolvedPath,
|
|
2123
|
-
bytesWritten: Buffer.byteLength(content, encoding),
|
|
2124
|
-
message: `Successfully wrote ${Buffer.byteLength(content, encoding)} bytes to ${filePath}`
|
|
2125
|
-
};
|
|
2126
|
-
} catch (error) {
|
|
2127
|
-
return {
|
|
2128
|
-
success: false,
|
|
2129
|
-
filePath: context.filePath,
|
|
2130
|
-
message: `Failed to write file: ${error instanceof Error ? error.message : String(error)}`,
|
|
2131
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2132
|
-
};
|
|
2133
|
-
}
|
|
2134
|
-
}
|
|
2135
|
-
/**
|
|
2136
|
-
* List directory contents with filtering and metadata
|
|
2137
|
-
*/
|
|
2138
|
-
static async listDirectory(context) {
|
|
2139
|
-
try {
|
|
2140
|
-
const {
|
|
2141
|
-
path: path$1,
|
|
2142
|
-
recursive = false,
|
|
2143
|
-
includeHidden = false,
|
|
2144
|
-
pattern,
|
|
2145
|
-
maxDepth = 10,
|
|
2146
|
-
includeMetadata = true,
|
|
2147
|
-
projectPath
|
|
2148
|
-
} = context;
|
|
2149
|
-
const resolvedPath = path.isAbsolute(path$1) ? path$1 : path.resolve(projectPath || process.cwd(), path$1);
|
|
2150
|
-
const items = [];
|
|
2151
|
-
async function processDirectory(dirPath, currentDepth = 0) {
|
|
2152
|
-
if (currentDepth > maxDepth) return;
|
|
2153
|
-
const entries = await promises.readdir(dirPath);
|
|
2154
|
-
for (const entry of entries) {
|
|
2155
|
-
if (!includeHidden && entry.startsWith(".")) continue;
|
|
2156
|
-
const fullPath = path.join(dirPath, entry);
|
|
2157
|
-
const relativePath = path.relative(resolvedPath, fullPath);
|
|
2158
|
-
if (pattern) {
|
|
2159
|
-
const regexPattern = pattern.replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
2160
|
-
if (!new RegExp(regexPattern).test(entry)) continue;
|
|
2161
|
-
}
|
|
2162
|
-
let stats;
|
|
2163
|
-
let type;
|
|
2164
|
-
try {
|
|
2165
|
-
stats = await promises.stat(fullPath);
|
|
2166
|
-
if (stats.isDirectory()) {
|
|
2167
|
-
type = "directory";
|
|
2168
|
-
} else if (stats.isSymbolicLink()) {
|
|
2169
|
-
type = "symlink";
|
|
2170
|
-
} else {
|
|
2171
|
-
type = "file";
|
|
2172
|
-
}
|
|
2173
|
-
} catch {
|
|
2174
|
-
continue;
|
|
2175
|
-
}
|
|
2176
|
-
const item = {
|
|
2177
|
-
name: entry,
|
|
2178
|
-
path: relativePath || entry,
|
|
2179
|
-
type
|
|
2180
|
-
};
|
|
2181
|
-
if (includeMetadata) {
|
|
2182
|
-
item.size = stats.size;
|
|
2183
|
-
item.lastModified = stats.mtime.toISOString();
|
|
2184
|
-
item.permissions = `0${(stats.mode & parseInt("777", 8)).toString(8)}`;
|
|
2185
|
-
}
|
|
2186
|
-
items.push(item);
|
|
2187
|
-
if (recursive && type === "directory") {
|
|
2188
|
-
await processDirectory(fullPath, currentDepth + 1);
|
|
2189
|
-
}
|
|
2190
|
-
}
|
|
2191
|
-
}
|
|
2192
|
-
await processDirectory(resolvedPath);
|
|
2193
|
-
return {
|
|
2194
|
-
success: true,
|
|
2195
|
-
items,
|
|
2196
|
-
totalItems: items.length,
|
|
2197
|
-
path: resolvedPath,
|
|
2198
|
-
message: `Listed ${items.length} items in ${resolvedPath}`
|
|
2199
|
-
};
|
|
2200
|
-
} catch (error) {
|
|
2201
|
-
return {
|
|
2202
|
-
success: false,
|
|
2203
|
-
items: [],
|
|
2204
|
-
totalItems: 0,
|
|
2205
|
-
path: context.path,
|
|
2206
|
-
message: `Failed to list directory: ${error instanceof Error ? error.message : String(error)}`,
|
|
2207
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2208
|
-
};
|
|
2209
|
-
}
|
|
2210
|
-
}
|
|
2211
|
-
/**
|
|
2212
|
-
* Execute shell commands with proper error handling
|
|
2213
|
-
*/
|
|
2214
|
-
static async executeCommand(context) {
|
|
2215
|
-
const startTime = Date.now();
|
|
2216
|
-
try {
|
|
2217
|
-
const { command, workingDirectory, timeout = 3e4, captureOutput = true, shell, env } = context;
|
|
2218
|
-
const execOptions = {
|
|
2219
|
-
timeout,
|
|
2220
|
-
env: { ...process.env, ...env }
|
|
2221
|
-
};
|
|
2222
|
-
if (workingDirectory) {
|
|
2223
|
-
execOptions.cwd = workingDirectory;
|
|
2224
|
-
}
|
|
2225
|
-
if (shell) {
|
|
2226
|
-
execOptions.shell = shell;
|
|
2227
|
-
}
|
|
2228
|
-
const { stdout, stderr } = await exec(command, execOptions);
|
|
2229
|
-
const executionTime = Date.now() - startTime;
|
|
2230
|
-
return {
|
|
2231
|
-
success: true,
|
|
2232
|
-
exitCode: 0,
|
|
2233
|
-
stdout: captureOutput ? String(stdout) : void 0,
|
|
2234
|
-
stderr: captureOutput ? String(stderr) : void 0,
|
|
2235
|
-
command,
|
|
2236
|
-
workingDirectory,
|
|
2237
|
-
executionTime
|
|
2238
|
-
};
|
|
2239
|
-
} catch (error) {
|
|
2240
|
-
const executionTime = Date.now() - startTime;
|
|
2241
|
-
return {
|
|
2242
|
-
success: false,
|
|
2243
|
-
exitCode: error.code || 1,
|
|
2244
|
-
stdout: String(error.stdout || ""),
|
|
2245
|
-
stderr: String(error.stderr || ""),
|
|
2246
|
-
command: context.command,
|
|
2247
|
-
workingDirectory: context.workingDirectory,
|
|
2248
|
-
executionTime,
|
|
2249
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2250
|
-
};
|
|
2251
|
-
}
|
|
2252
|
-
}
|
|
2253
|
-
/**
|
|
2254
|
-
* Web search using a simple search approach
|
|
2255
|
-
*/
|
|
2256
|
-
static async webSearch(context) {
|
|
2257
|
-
try {
|
|
2258
|
-
const {
|
|
2259
|
-
query,
|
|
2260
|
-
maxResults = 10
|
|
2261
|
-
// region = 'us',
|
|
2262
|
-
// language = 'en',
|
|
2263
|
-
// includeImages = false,
|
|
2264
|
-
// dateRange = 'all',
|
|
2265
|
-
} = context;
|
|
2266
|
-
const startTime = Date.now();
|
|
2267
|
-
const searchUrl = `https://api.duckduckgo.com/?q=${encodeURIComponent(query)}&format=json&no_redirect=1&skip_disambig=1`;
|
|
2268
|
-
const response = await fetch(searchUrl);
|
|
2269
|
-
const data = await response.json();
|
|
2270
|
-
const results = [];
|
|
2271
|
-
if (data.RelatedTopics && Array.isArray(data.RelatedTopics)) {
|
|
2272
|
-
for (const topic of data.RelatedTopics.slice(0, maxResults)) {
|
|
2273
|
-
if (topic.FirstURL && topic.Text) {
|
|
2274
|
-
const url = new URL(topic.FirstURL);
|
|
2275
|
-
results.push({
|
|
2276
|
-
title: topic.Text.split(" - ")[0] || topic.Text.substring(0, 60),
|
|
2277
|
-
url: topic.FirstURL,
|
|
2278
|
-
snippet: topic.Text,
|
|
2279
|
-
domain: url.hostname,
|
|
2280
|
-
relevanceScore: Math.random() * 100
|
|
2281
|
-
// Placeholder scoring
|
|
2282
|
-
});
|
|
2283
|
-
}
|
|
2284
|
-
}
|
|
2285
|
-
}
|
|
2286
|
-
if (data.Abstract && data.AbstractURL) {
|
|
2287
|
-
const url = new URL(data.AbstractURL);
|
|
2288
|
-
results.unshift({
|
|
2289
|
-
title: data.Heading || "Main Result",
|
|
2290
|
-
url: data.AbstractURL,
|
|
2291
|
-
snippet: data.Abstract,
|
|
2292
|
-
domain: url.hostname,
|
|
2293
|
-
relevanceScore: 100
|
|
2294
|
-
});
|
|
2295
|
-
}
|
|
2296
|
-
const searchTime = Date.now() - startTime;
|
|
2297
|
-
return {
|
|
2298
|
-
success: true,
|
|
2299
|
-
query,
|
|
2300
|
-
results: results.slice(0, maxResults),
|
|
2301
|
-
totalResults: results.length,
|
|
2302
|
-
searchTime,
|
|
2303
|
-
suggestions: data.RelatedTopics?.slice(maxResults, maxResults + 3)?.map((t) => t.Text?.split(" - ")[0] || t.Text?.substring(0, 30)).filter(Boolean) || []
|
|
2304
|
-
};
|
|
2305
|
-
} catch (error) {
|
|
2306
|
-
return {
|
|
2307
|
-
success: false,
|
|
2308
|
-
query: context.query,
|
|
2309
|
-
results: [],
|
|
2310
|
-
totalResults: 0,
|
|
2311
|
-
searchTime: 0,
|
|
2312
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2313
|
-
};
|
|
2314
|
-
}
|
|
2315
|
-
}
|
|
2316
|
-
};
|
|
2317
|
-
var ToolSummaryProcessor = class extends core.MemoryProcessor {
|
|
2318
|
-
summaryAgent;
|
|
2319
|
-
summaryCache = /* @__PURE__ */ new Map();
|
|
2320
|
-
constructor({ summaryModel }) {
|
|
2321
|
-
super({ name: "ToolSummaryProcessor" });
|
|
2322
|
-
this.summaryAgent = new core.Agent({
|
|
2323
|
-
name: "ToolSummaryAgent",
|
|
2324
|
-
description: "A summary agent that summarizes tool calls and results",
|
|
2325
|
-
instructions: "You are a summary agent that summarizes tool calls and results",
|
|
2326
|
-
model: summaryModel
|
|
2327
|
-
});
|
|
2328
|
-
}
|
|
2329
|
-
/**
|
|
2330
|
-
* Creates a cache key from tool call arguments
|
|
2331
|
-
*/
|
|
2332
|
-
createCacheKey(toolCall) {
|
|
2333
|
-
if (!toolCall) return "unknown";
|
|
2334
|
-
const toolName = toolCall.toolName || "unknown";
|
|
2335
|
-
const args = toolCall.args || {};
|
|
2336
|
-
const sortedArgs = Object.keys(args).sort().reduce((result, key) => {
|
|
2337
|
-
result[key] = args[key];
|
|
2338
|
-
return result;
|
|
2339
|
-
}, {});
|
|
2340
|
-
return `${toolName}:${JSON.stringify(sortedArgs)}`;
|
|
2341
|
-
}
|
|
2342
|
-
/**
|
|
2343
|
-
* Clears the summary cache
|
|
2344
|
-
*/
|
|
2345
|
-
clearCache() {
|
|
2346
|
-
this.summaryCache.clear();
|
|
2347
|
-
}
|
|
2348
|
-
/**
|
|
2349
|
-
* Gets cache statistics
|
|
2350
|
-
*/
|
|
2351
|
-
getCacheStats() {
|
|
2352
|
-
return {
|
|
2353
|
-
size: this.summaryCache.size,
|
|
2354
|
-
keys: Array.from(this.summaryCache.keys())
|
|
2355
|
-
};
|
|
2356
|
-
}
|
|
2357
|
-
async process(messages) {
|
|
2358
|
-
const summaryTasks = [];
|
|
2359
|
-
for (const message of messages) {
|
|
2360
|
-
if (message.role === "tool" && Array.isArray(message.content) && message.content.length > 0 && message.content?.some((content) => content.type === "tool-result")) {
|
|
2361
|
-
for (const content of message.content) {
|
|
2362
|
-
if (content.type === "tool-result") {
|
|
2363
|
-
const assistantMessageWithToolCall = messages.find(
|
|
2364
|
-
(message2) => message2.role === "assistant" && Array.isArray(message2.content) && message2.content.length > 0 && message2.content?.some(
|
|
2365
|
-
(assistantContent) => assistantContent.type === "tool-call" && assistantContent.toolCallId === content.toolCallId
|
|
2366
|
-
)
|
|
2367
|
-
);
|
|
2368
|
-
const toolCall = Array.isArray(assistantMessageWithToolCall?.content) ? assistantMessageWithToolCall?.content.find(
|
|
2369
|
-
(assistantContent) => assistantContent.type === "tool-call" && assistantContent.toolCallId === content.toolCallId
|
|
2370
|
-
) : null;
|
|
2371
|
-
const cacheKey = this.createCacheKey(toolCall);
|
|
2372
|
-
const cachedSummary = this.summaryCache.get(cacheKey);
|
|
2373
|
-
if (cachedSummary) {
|
|
2374
|
-
content.result = `Tool call summary: ${cachedSummary}`;
|
|
2375
|
-
} else {
|
|
2376
|
-
const summaryPromise = this.summaryAgent.generate(
|
|
2377
|
-
`Summarize the following tool call: ${JSON.stringify(toolCall)} and result: ${JSON.stringify(content)}`
|
|
2378
|
-
);
|
|
2379
|
-
summaryTasks.push({
|
|
2380
|
-
content,
|
|
2381
|
-
promise: summaryPromise,
|
|
2382
|
-
cacheKey
|
|
2383
|
-
});
|
|
2384
|
-
}
|
|
2385
|
-
}
|
|
2386
|
-
}
|
|
2387
|
-
}
|
|
2388
|
-
}
|
|
2389
|
-
if (summaryTasks.length > 0) {
|
|
2390
|
-
const summaryResults = await Promise.all(summaryTasks.map((task) => task.promise));
|
|
2391
|
-
summaryTasks.forEach((task, index) => {
|
|
2392
|
-
const summaryResult = summaryResults[index];
|
|
2393
|
-
const summaryText = summaryResult.text;
|
|
2394
|
-
this.summaryCache.set(task.cacheKey, summaryText);
|
|
2395
|
-
task.content.result = `Tool call summary: ${summaryText}`;
|
|
2396
|
-
});
|
|
2397
|
-
}
|
|
2398
|
-
return messages;
|
|
2399
|
-
}
|
|
2400
|
-
};
|
|
2401
|
-
var WriteToDiskProcessor = class extends core.MemoryProcessor {
|
|
2402
|
-
prefix;
|
|
2403
|
-
constructor({ prefix = "messages" } = {}) {
|
|
2404
|
-
super({ name: "WriteToDiskProcessor" });
|
|
2405
|
-
this.prefix = prefix;
|
|
2406
|
-
}
|
|
2407
|
-
async process(messages) {
|
|
2408
|
-
await promises.writeFile(`${this.prefix}-${Date.now()}.json`, JSON.stringify(messages, null, 2));
|
|
2409
|
-
return messages;
|
|
2410
|
-
}
|
|
2411
|
-
};
|
|
2412
|
-
var cloneTemplateStep = workflows.createStep({
|
|
2413
|
-
id: "clone-template",
|
|
2414
|
-
description: "Clone the template repository to a temporary directory at the specified ref",
|
|
2415
|
-
inputSchema: MergeInputSchema,
|
|
2416
|
-
outputSchema: zod.z.object({
|
|
2417
|
-
templateDir: zod.z.string(),
|
|
2418
|
-
commitSha: zod.z.string(),
|
|
2419
|
-
slug: zod.z.string()
|
|
2420
|
-
}),
|
|
2421
|
-
execute: async ({ inputData }) => {
|
|
2422
|
-
const { repo, ref = "main", slug } = inputData;
|
|
2423
|
-
if (!repo) {
|
|
2424
|
-
throw new Error("Repository URL or path is required");
|
|
2425
|
-
}
|
|
2426
|
-
const inferredSlug = slug || repo.split("/").pop()?.replace(/\.git$/, "") || "template";
|
|
2427
|
-
const tempDir = await promises.mkdtemp(path.join(os.tmpdir(), "mastra-template-"));
|
|
2428
|
-
try {
|
|
2429
|
-
const cloneCmd = `git clone "${repo}" "${tempDir}"`;
|
|
2430
|
-
await exec(cloneCmd);
|
|
2431
|
-
if (ref !== "main" && ref !== "master") {
|
|
2432
|
-
await exec(`git checkout "${ref}"`, { cwd: tempDir });
|
|
2433
|
-
}
|
|
2434
|
-
const { stdout: commitSha } = await exec("git rev-parse HEAD", { cwd: tempDir });
|
|
2435
|
-
return {
|
|
2436
|
-
templateDir: tempDir,
|
|
2437
|
-
commitSha: commitSha.trim(),
|
|
2438
|
-
slug: inferredSlug
|
|
2439
|
-
};
|
|
2440
|
-
} catch (error) {
|
|
2441
|
-
try {
|
|
2442
|
-
await promises.rm(tempDir, { recursive: true, force: true });
|
|
2443
|
-
} catch {
|
|
2444
|
-
}
|
|
2445
|
-
throw new Error(`Failed to clone template: ${error instanceof Error ? error.message : String(error)}`);
|
|
2446
|
-
}
|
|
2447
|
-
}
|
|
2448
|
-
});
|
|
2449
|
-
var analyzePackageStep = workflows.createStep({
|
|
2450
|
-
id: "analyze-package",
|
|
2451
|
-
description: "Analyze the template package.json to extract dependency information",
|
|
2452
|
-
inputSchema: zod.z.object({
|
|
2453
|
-
templateDir: zod.z.string(),
|
|
2454
|
-
commitSha: zod.z.string(),
|
|
2455
|
-
slug: zod.z.string()
|
|
2456
|
-
}),
|
|
2457
|
-
outputSchema: zod.z.object({
|
|
2458
|
-
dependencies: zod.z.record(zod.z.string()).optional(),
|
|
2459
|
-
devDependencies: zod.z.record(zod.z.string()).optional(),
|
|
2460
|
-
peerDependencies: zod.z.record(zod.z.string()).optional(),
|
|
2461
|
-
scripts: zod.z.record(zod.z.string()).optional(),
|
|
2462
|
-
packageInfo: zod.z.object({
|
|
2463
|
-
name: zod.z.string().optional(),
|
|
2464
|
-
version: zod.z.string().optional(),
|
|
2465
|
-
description: zod.z.string().optional()
|
|
2466
|
-
})
|
|
2467
|
-
}),
|
|
2468
|
-
execute: async ({ inputData }) => {
|
|
2469
|
-
console.log("Analyzing template package.json...");
|
|
2470
|
-
const { templateDir } = inputData;
|
|
2471
|
-
const packageJsonPath = path.join(templateDir, "package.json");
|
|
2472
|
-
try {
|
|
2473
|
-
const packageJsonContent = await promises.readFile(packageJsonPath, "utf-8");
|
|
2474
|
-
const packageJson = JSON.parse(packageJsonContent);
|
|
2475
|
-
console.log("Template package.json:", JSON.stringify(packageJson, null, 2));
|
|
2476
|
-
return {
|
|
2477
|
-
dependencies: packageJson.dependencies || {},
|
|
2478
|
-
devDependencies: packageJson.devDependencies || {},
|
|
2479
|
-
peerDependencies: packageJson.peerDependencies || {},
|
|
2480
|
-
scripts: packageJson.scripts || {},
|
|
2481
|
-
packageInfo: {
|
|
2482
|
-
name: packageJson.name,
|
|
2483
|
-
version: packageJson.version,
|
|
2484
|
-
description: packageJson.description
|
|
2485
|
-
}
|
|
2486
|
-
};
|
|
2487
|
-
} catch (error) {
|
|
2488
|
-
console.warn(`Failed to read template package.json: ${error instanceof Error ? error.message : String(error)}`);
|
|
2489
|
-
return {
|
|
2490
|
-
dependencies: {},
|
|
2491
|
-
devDependencies: {},
|
|
2492
|
-
peerDependencies: {},
|
|
2493
|
-
scripts: {},
|
|
2494
|
-
packageInfo: {}
|
|
2495
|
-
};
|
|
2496
|
-
}
|
|
2497
|
-
}
|
|
2498
|
-
});
|
|
2499
|
-
var discoverUnitsStep = workflows.createStep({
|
|
2500
|
-
id: "discover-units",
|
|
2501
|
-
description: "Discover template units by analyzing the templates directory structure",
|
|
2502
|
-
inputSchema: zod.z.object({
|
|
2503
|
-
templateDir: zod.z.string(),
|
|
2504
|
-
commitSha: zod.z.string(),
|
|
2505
|
-
slug: zod.z.string()
|
|
2506
|
-
}),
|
|
2507
|
-
outputSchema: zod.z.object({
|
|
2508
|
-
units: zod.z.array(TemplateUnitSchema)
|
|
2509
|
-
}),
|
|
2510
|
-
execute: async ({ inputData }) => {
|
|
2511
|
-
const { templateDir } = inputData;
|
|
2512
|
-
const tools = await AgentBuilderDefaults.DEFAULT_TOOLS(templateDir);
|
|
2513
|
-
const agent$1 = new agent.Agent({
|
|
2514
|
-
model: openai.openai("gpt-4o-mini"),
|
|
2515
|
-
instructions: `You are an expert at analyzing Mastra projects.
|
|
2516
|
-
|
|
2517
|
-
Your task is to scan the provided directory and identify all available units (agents, workflows, tools, MCP servers, networks).
|
|
2518
|
-
|
|
2519
|
-
Mastra Project Structure Analysis:
|
|
2520
|
-
- Each Mastra project has a structure like: ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.agent}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.workflow}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE["mcp-server"]}, ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.network}
|
|
2521
|
-
- Analyze TypeScript files in each category directory to identify exported units
|
|
2522
|
-
|
|
2523
|
-
CRITICAL: YOU MUST USE YOUR TOOLS (readFile, listDirectory) TO DISCOVER THE UNITS IN THE TEMPLATE DIRECTORY.
|
|
2524
|
-
|
|
2525
|
-
IMPORTANT - Agent Discovery Rules:
|
|
2526
|
-
1. **Multiple Agent Files**: Some templates have separate files for each agent (e.g., evaluationAgent.ts, researchAgent.ts)
|
|
2527
|
-
2. **Single File Multiple Agents**: Some files may export multiple agents (look for multiple 'export const' or 'export default' statements)
|
|
2528
|
-
3. **Agent Identification**: Look for exported variables that are instances of 'new Agent()' or similar patterns
|
|
2529
|
-
4. **Naming Convention**: Agent names should be extracted from the export name (e.g., 'weatherAgent', 'evaluationAgent')
|
|
2530
|
-
|
|
2531
|
-
For each Mastra project directory you analyze:
|
|
2532
|
-
1. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.agent} and identify ALL exported agents
|
|
2533
|
-
2. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.workflow} and identify ALL exported workflows
|
|
2534
|
-
3. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool} and identify ALL exported tools
|
|
2535
|
-
4. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE["mcp-server"]} and identify ALL exported MCP servers
|
|
2536
|
-
5. Scan all TypeScript files in ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.network} and identify ALL exported networks
|
|
2537
|
-
6. Scan for any OTHER files in src/mastra that are NOT in the above default folders (e.g., lib/, utils/, types/, etc.) and identify them as 'other' files
|
|
2538
|
-
|
|
2539
|
-
IMPORTANT - Naming Consistency Rules:
|
|
2540
|
-
- For ALL unit types (including 'other'), the 'name' field should be the filename WITHOUT extension
|
|
2541
|
-
- For structured units (agents, workflows, tools, etc.), prefer the actual export name if clearly identifiable
|
|
2542
|
-
- use the base filename without extension for the id (e.g., 'util.ts' \u2192 name: 'util')
|
|
2543
|
-
- use the relative path from the template root for the file (e.g., 'src/mastra/lib/util.ts' \u2192 file: 'src/mastra/lib/util.ts')
|
|
2544
|
-
|
|
2545
|
-
Return the actual exported names of the units, as well as the file names.`,
|
|
2546
|
-
name: "Mastra Project Discoverer",
|
|
2547
|
-
tools: {
|
|
2548
|
-
readFile: tools.readFile,
|
|
2549
|
-
listDirectory: tools.listDirectory
|
|
2550
|
-
}
|
|
2551
|
-
});
|
|
2552
|
-
const result = await agent$1.generate(
|
|
2553
|
-
`Analyze the Mastra project directory structure at "${templateDir}".
|
|
2554
|
-
|
|
2555
|
-
List directory contents using listDirectory tool, and then analyze each file with readFile tool.
|
|
2556
|
-
IMPORTANT:
|
|
2557
|
-
- Look inside the actual file content to find export statements like 'export const agentName = new Agent(...)'
|
|
2558
|
-
- A single file may contain multiple exports
|
|
2559
|
-
- Return the actual exported variable names, as well as the file names
|
|
2560
|
-
- If a directory doesn't exist or has no files, return an empty array
|
|
2561
|
-
|
|
2562
|
-
Return the analysis in the exact format specified in the output schema.`,
|
|
2563
|
-
{
|
|
2564
|
-
experimental_output: zod.z.object({
|
|
2565
|
-
agents: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
|
|
2566
|
-
workflows: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
|
|
2567
|
-
tools: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
|
|
2568
|
-
mcp: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
|
|
2569
|
-
networks: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional(),
|
|
2570
|
-
other: zod.z.array(zod.z.object({ name: zod.z.string(), file: zod.z.string() })).optional()
|
|
2571
|
-
}),
|
|
2572
|
-
maxSteps: 100
|
|
2573
|
-
}
|
|
2574
|
-
);
|
|
2575
|
-
const template = result.object ?? {};
|
|
2576
|
-
const units = [];
|
|
2577
|
-
template.agents?.forEach((agentId) => {
|
|
2578
|
-
units.push({ kind: "agent", id: agentId.name, file: agentId.file });
|
|
2579
|
-
});
|
|
2580
|
-
template.workflows?.forEach((workflowId) => {
|
|
2581
|
-
units.push({ kind: "workflow", id: workflowId.name, file: workflowId.file });
|
|
2582
|
-
});
|
|
2583
|
-
template.tools?.forEach((toolId) => {
|
|
2584
|
-
units.push({ kind: "tool", id: toolId.name, file: toolId.file });
|
|
2585
|
-
});
|
|
2586
|
-
template.mcp?.forEach((mcpId) => {
|
|
2587
|
-
units.push({ kind: "mcp-server", id: mcpId.name, file: mcpId.file });
|
|
2588
|
-
});
|
|
2589
|
-
template.networks?.forEach((networkId) => {
|
|
2590
|
-
units.push({ kind: "network", id: networkId.name, file: networkId.file });
|
|
2591
|
-
});
|
|
2592
|
-
template.other?.forEach((otherId) => {
|
|
2593
|
-
units.push({ kind: "other", id: otherId.name, file: otherId.file });
|
|
2594
|
-
});
|
|
2595
|
-
console.log("Discovered units:", JSON.stringify(units, null, 2));
|
|
2596
|
-
return { units };
|
|
2597
|
-
}
|
|
2598
|
-
});
|
|
2599
|
-
var orderUnitsStep = workflows.createStep({
|
|
2600
|
-
id: "order-units",
|
|
2601
|
-
description: "Sort units in topological order based on kind weights",
|
|
2602
|
-
inputSchema: zod.z.object({
|
|
2603
|
-
units: zod.z.array(TemplateUnitSchema)
|
|
2604
|
-
}),
|
|
2605
|
-
outputSchema: zod.z.object({
|
|
2606
|
-
orderedUnits: zod.z.array(TemplateUnitSchema)
|
|
2607
|
-
}),
|
|
2608
|
-
execute: async ({ inputData }) => {
|
|
2609
|
-
const { units } = inputData;
|
|
2610
|
-
const orderedUnits = [...units].sort((a, b) => {
|
|
2611
|
-
const aWeight = kindWeight(a.kind);
|
|
2612
|
-
const bWeight = kindWeight(b.kind);
|
|
2613
|
-
return aWeight - bWeight;
|
|
2614
|
-
});
|
|
2615
|
-
return { orderedUnits };
|
|
2616
|
-
}
|
|
2617
|
-
});
|
|
2618
|
-
var packageMergeStep = workflows.createStep({
|
|
2619
|
-
id: "package-merge",
|
|
2620
|
-
description: "Merge template package.json dependencies into target project and install",
|
|
2621
|
-
inputSchema: zod.z.object({
|
|
2622
|
-
commitSha: zod.z.string(),
|
|
2623
|
-
slug: zod.z.string(),
|
|
2624
|
-
targetPath: zod.z.string().optional(),
|
|
2625
|
-
packageInfo: zod.z.object({
|
|
2626
|
-
dependencies: zod.z.record(zod.z.string()).optional(),
|
|
2627
|
-
devDependencies: zod.z.record(zod.z.string()).optional(),
|
|
2628
|
-
peerDependencies: zod.z.record(zod.z.string()).optional(),
|
|
2629
|
-
scripts: zod.z.record(zod.z.string()).optional(),
|
|
2630
|
-
packageInfo: zod.z.object({
|
|
2631
|
-
name: zod.z.string().optional(),
|
|
2632
|
-
version: zod.z.string().optional(),
|
|
2633
|
-
description: zod.z.string().optional()
|
|
2634
|
-
})
|
|
2635
|
-
})
|
|
2636
|
-
}),
|
|
2637
|
-
outputSchema: zod.z.object({
|
|
2638
|
-
success: zod.z.boolean(),
|
|
2639
|
-
applied: zod.z.boolean(),
|
|
2640
|
-
message: zod.z.string(),
|
|
2641
|
-
error: zod.z.string().optional()
|
|
2642
|
-
}),
|
|
2643
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
2644
|
-
console.log("Package merge step starting...");
|
|
2645
|
-
const { slug, packageInfo } = inputData;
|
|
2646
|
-
const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
|
|
2647
|
-
try {
|
|
2648
|
-
const allTools = await AgentBuilderDefaults.DEFAULT_TOOLS(targetPath);
|
|
2649
|
-
const packageMergeAgent = new agent.Agent({
|
|
2650
|
-
name: "package-merger",
|
|
2651
|
-
description: "Specialized agent for merging package.json dependencies",
|
|
2652
|
-
instructions: `You are a package.json merge specialist. Your job is to:
|
|
2653
|
-
|
|
2654
|
-
1. **Read the target project's package.json** using readFile tool
|
|
2655
|
-
2. **Merge template dependencies** into the target package.json following these rules:
|
|
2656
|
-
- For dependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
2657
|
-
- For devDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
2658
|
-
- For peerDependencies: Add ALL NEW ones with template versions, KEEP EXISTING versions for conflicts
|
|
2659
|
-
- For scripts: Add new scripts with "template:${slug}:" prefix, don't overwrite existing ones
|
|
2660
|
-
- Maintain existing package.json structure and formatting
|
|
2661
|
-
3. **Write the updated package.json** using writeFile tool
|
|
2662
|
-
|
|
2663
|
-
Template Dependencies to Merge:
|
|
2664
|
-
- Dependencies: ${JSON.stringify(packageInfo.dependencies || {}, null, 2)}
|
|
2665
|
-
- Dev Dependencies: ${JSON.stringify(packageInfo.devDependencies || {}, null, 2)}
|
|
2666
|
-
- Peer Dependencies: ${JSON.stringify(packageInfo.peerDependencies || {}, null, 2)}
|
|
2667
|
-
- Scripts: ${JSON.stringify(packageInfo.scripts || {}, null, 2)}
|
|
2668
|
-
|
|
2669
|
-
CRITICAL MERGE RULES:
|
|
2670
|
-
1. For each dependency in template dependencies, if it does NOT exist in target, ADD it with template version
|
|
2671
|
-
2. For each dependency in template dependencies, if it ALREADY exists in target, KEEP target version
|
|
2672
|
-
3. You MUST add ALL template dependencies that don't conflict - do not skip any
|
|
2673
|
-
4. Be explicit about what you're adding vs keeping
|
|
2674
|
-
|
|
2675
|
-
EXAMPLE:
|
|
2676
|
-
Template has: {"@mastra/libsql": "latest", "@mastra/core": "latest", "zod": "^3.25.67"}
|
|
2677
|
-
Target has: {"@mastra/core": "latest", "zod": "^3.25.0"}
|
|
2678
|
-
Result should have: {"@mastra/core": "latest", "zod": "^3.25.0", "@mastra/libsql": "latest"}
|
|
2679
|
-
|
|
2680
|
-
Be systematic and thorough. Always read the existing package.json first, then merge, then write.`,
|
|
2681
|
-
model: openai.openai("gpt-4o-mini"),
|
|
2682
|
-
tools: {
|
|
2683
|
-
readFile: allTools.readFile,
|
|
2684
|
-
writeFile: allTools.writeFile,
|
|
2685
|
-
listDirectory: allTools.listDirectory
|
|
2686
|
-
}
|
|
2687
|
-
});
|
|
2688
|
-
console.log("Starting package merge agent...");
|
|
2689
|
-
console.log("Template dependencies to merge:", JSON.stringify(packageInfo.dependencies, null, 2));
|
|
2690
|
-
console.log("Template devDependencies to merge:", JSON.stringify(packageInfo.devDependencies, null, 2));
|
|
2691
|
-
const result = await packageMergeAgent.stream(
|
|
2692
|
-
`Please merge the template dependencies into the target project's package.json at ${targetPath}/package.json.`,
|
|
2693
|
-
{ experimental_output: zod.z.object({ success: zod.z.boolean() }) }
|
|
2694
|
-
);
|
|
2695
|
-
let buffer = [];
|
|
2696
|
-
for await (const chunk of result.fullStream) {
|
|
2697
|
-
if (chunk.type === "text-delta") {
|
|
2698
|
-
buffer.push(chunk.textDelta);
|
|
2699
|
-
if (buffer.length > 20) {
|
|
2700
|
-
console.log(buffer.join(""));
|
|
2701
|
-
buffer = [];
|
|
2702
|
-
}
|
|
2703
|
-
}
|
|
2704
|
-
}
|
|
2705
|
-
if (buffer.length > 0) {
|
|
2706
|
-
console.log(buffer.join(""));
|
|
2707
|
-
}
|
|
2708
|
-
return {
|
|
2709
|
-
success: true,
|
|
2710
|
-
applied: true,
|
|
2711
|
-
message: `Successfully merged template dependencies and installed packages for ${slug}`
|
|
2712
|
-
};
|
|
2713
|
-
} catch (error) {
|
|
2714
|
-
console.error("Package merge failed:", error);
|
|
2715
|
-
return {
|
|
2716
|
-
success: false,
|
|
2717
|
-
applied: false,
|
|
2718
|
-
message: `Package merge failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
2719
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2720
|
-
};
|
|
2721
|
-
}
|
|
2722
|
-
}
|
|
2723
|
-
});
|
|
2724
|
-
var flatInstallStep = workflows.createStep({
|
|
2725
|
-
id: "flat-install",
|
|
2726
|
-
description: "Run a flat install command without specifying packages",
|
|
2727
|
-
inputSchema: zod.z.object({
|
|
2728
|
-
targetPath: zod.z.string().describe("Path to the project to install packages in")
|
|
2729
|
-
}),
|
|
2730
|
-
outputSchema: zod.z.object({
|
|
2731
|
-
success: zod.z.boolean(),
|
|
2732
|
-
message: zod.z.string(),
|
|
2733
|
-
details: zod.z.string().optional()
|
|
2734
|
-
}),
|
|
2735
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
2736
|
-
console.log("Running flat install...");
|
|
2737
|
-
const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
|
|
2738
|
-
try {
|
|
2739
|
-
await spawnSWPM(targetPath, "install", []);
|
|
2740
|
-
return {
|
|
2741
|
-
success: true,
|
|
2742
|
-
message: "Successfully ran flat install command",
|
|
2743
|
-
details: "Installed all dependencies from package.json"
|
|
2744
|
-
};
|
|
2745
|
-
} catch (error) {
|
|
2746
|
-
console.error("Flat install failed:", error);
|
|
2747
|
-
return {
|
|
2748
|
-
success: false,
|
|
2749
|
-
message: `Flat install failed: ${error instanceof Error ? error.message : String(error)}`
|
|
2750
|
-
};
|
|
2751
|
-
}
|
|
2752
|
-
}
|
|
2753
|
-
});
|
|
2754
|
-
var programmaticFileCopyStep = workflows.createStep({
|
|
2755
|
-
id: "programmatic-file-copy",
|
|
2756
|
-
description: "Programmatically copy template files to target project based on ordered units",
|
|
2757
|
-
inputSchema: zod.z.object({
|
|
2758
|
-
orderedUnits: zod.z.array(
|
|
2759
|
-
zod.z.object({
|
|
2760
|
-
kind: zod.z.string(),
|
|
2761
|
-
id: zod.z.string(),
|
|
2762
|
-
file: zod.z.string()
|
|
2763
|
-
})
|
|
2764
|
-
),
|
|
2765
|
-
templateDir: zod.z.string(),
|
|
2766
|
-
commitSha: zod.z.string(),
|
|
2767
|
-
slug: zod.z.string(),
|
|
2768
|
-
targetPath: zod.z.string().optional()
|
|
2769
|
-
}),
|
|
2770
|
-
outputSchema: zod.z.object({
|
|
2771
|
-
success: zod.z.boolean(),
|
|
2772
|
-
copiedFiles: zod.z.array(
|
|
2773
|
-
zod.z.object({
|
|
2774
|
-
source: zod.z.string(),
|
|
2775
|
-
destination: zod.z.string(),
|
|
2776
|
-
unit: zod.z.object({
|
|
2777
|
-
kind: zod.z.string(),
|
|
2778
|
-
id: zod.z.string()
|
|
2779
|
-
})
|
|
2780
|
-
})
|
|
2781
|
-
),
|
|
2782
|
-
conflicts: zod.z.array(
|
|
2783
|
-
zod.z.object({
|
|
2784
|
-
unit: zod.z.object({
|
|
2785
|
-
kind: zod.z.string(),
|
|
2786
|
-
id: zod.z.string()
|
|
2787
|
-
}),
|
|
2788
|
-
issue: zod.z.string(),
|
|
2789
|
-
sourceFile: zod.z.string(),
|
|
2790
|
-
targetFile: zod.z.string()
|
|
2791
|
-
})
|
|
2792
|
-
),
|
|
2793
|
-
message: zod.z.string(),
|
|
2794
|
-
error: zod.z.string().optional()
|
|
2795
|
-
}),
|
|
2796
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
2797
|
-
console.log("Programmatic file copy step starting...");
|
|
2798
|
-
const { orderedUnits, templateDir, commitSha, slug } = inputData;
|
|
2799
|
-
const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
|
|
2800
|
-
try {
|
|
2801
|
-
const copiedFiles = [];
|
|
2802
|
-
const conflicts = [];
|
|
2803
|
-
const analyzeNamingConvention = async (directory) => {
|
|
2804
|
-
try {
|
|
2805
|
-
const files = await promises.readdir(path.resolve(targetPath, directory), { withFileTypes: true });
|
|
2806
|
-
const tsFiles = files.filter((f) => f.isFile() && f.name.endsWith(".ts")).map((f) => f.name);
|
|
2807
|
-
if (tsFiles.length === 0) return "unknown";
|
|
2808
|
-
const camelCaseCount = tsFiles.filter((f) => /^[a-z][a-zA-Z0-9]*\.ts$/.test(f)).length;
|
|
2809
|
-
const snakeCaseCount = tsFiles.filter((f) => /^[a-z][a-z0-9_]*\.ts$/.test(f) && f.includes("_")).length;
|
|
2810
|
-
const kebabCaseCount = tsFiles.filter((f) => /^[a-z][a-z0-9-]*\.ts$/.test(f) && f.includes("-")).length;
|
|
2811
|
-
const pascalCaseCount = tsFiles.filter((f) => /^[A-Z][a-zA-Z0-9]*\.ts$/.test(f)).length;
|
|
2812
|
-
const max = Math.max(camelCaseCount, snakeCaseCount, kebabCaseCount, pascalCaseCount);
|
|
2813
|
-
if (max === 0) return "unknown";
|
|
2814
|
-
if (camelCaseCount === max) return "camelCase";
|
|
2815
|
-
if (snakeCaseCount === max) return "snake_case";
|
|
2816
|
-
if (kebabCaseCount === max) return "kebab-case";
|
|
2817
|
-
if (pascalCaseCount === max) return "PascalCase";
|
|
2818
|
-
return "unknown";
|
|
2819
|
-
} catch {
|
|
2820
|
-
return "unknown";
|
|
2821
|
-
}
|
|
2822
|
-
};
|
|
2823
|
-
const convertNaming = (name, convention) => {
|
|
2824
|
-
const baseName = path.basename(name, path.extname(name));
|
|
2825
|
-
const ext = path.extname(name);
|
|
2826
|
-
switch (convention) {
|
|
2827
|
-
case "camelCase":
|
|
2828
|
-
return baseName.replace(/[-_]/g, "").replace(/([A-Z])/g, (match, p1, offset) => offset === 0 ? p1.toLowerCase() : p1) + ext;
|
|
2829
|
-
case "snake_case":
|
|
2830
|
-
return baseName.replace(/[-]/g, "_").replace(/([A-Z])/g, (match, p1, offset) => (offset === 0 ? "" : "_") + p1.toLowerCase()) + ext;
|
|
2831
|
-
case "kebab-case":
|
|
2832
|
-
return baseName.replace(/[_]/g, "-").replace(/([A-Z])/g, (match, p1, offset) => (offset === 0 ? "" : "-") + p1.toLowerCase()) + ext;
|
|
2833
|
-
case "PascalCase":
|
|
2834
|
-
return baseName.replace(/[-_]/g, "").replace(/^[a-z]/, (match) => match.toUpperCase()) + ext;
|
|
2835
|
-
default:
|
|
2836
|
-
return name;
|
|
2837
|
-
}
|
|
2838
|
-
};
|
|
2839
|
-
for (const unit of orderedUnits) {
|
|
2840
|
-
console.log(`Processing ${unit.kind} unit "${unit.id}" from file "${unit.file}"`);
|
|
2841
|
-
let sourceFile;
|
|
2842
|
-
let resolvedUnitFile;
|
|
2843
|
-
if (unit.file.includes("/")) {
|
|
2844
|
-
sourceFile = path.resolve(templateDir, unit.file);
|
|
2845
|
-
resolvedUnitFile = unit.file;
|
|
2846
|
-
} else {
|
|
2847
|
-
const folderPath = AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE[unit.kind];
|
|
2848
|
-
if (!folderPath) {
|
|
2849
|
-
conflicts.push({
|
|
2850
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2851
|
-
issue: `Unknown unit kind: ${unit.kind}`,
|
|
2852
|
-
sourceFile: unit.file,
|
|
2853
|
-
targetFile: "N/A"
|
|
2854
|
-
});
|
|
2855
|
-
continue;
|
|
2856
|
-
}
|
|
2857
|
-
resolvedUnitFile = `${folderPath}/${unit.file}`;
|
|
2858
|
-
sourceFile = path.resolve(templateDir, resolvedUnitFile);
|
|
2859
|
-
}
|
|
2860
|
-
if (!fs.existsSync(sourceFile)) {
|
|
2861
|
-
conflicts.push({
|
|
2862
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2863
|
-
issue: `Source file not found: ${sourceFile}`,
|
|
2864
|
-
sourceFile: resolvedUnitFile,
|
|
2865
|
-
targetFile: "N/A"
|
|
2866
|
-
});
|
|
2867
|
-
continue;
|
|
2868
|
-
}
|
|
2869
|
-
const targetDir = path.dirname(resolvedUnitFile);
|
|
2870
|
-
const namingConvention = await analyzeNamingConvention(targetDir);
|
|
2871
|
-
console.log(`Detected naming convention in ${targetDir}: ${namingConvention}`);
|
|
2872
|
-
const hasExtension = path.extname(unit.id) !== "";
|
|
2873
|
-
const baseId = hasExtension ? path.basename(unit.id, path.extname(unit.id)) : unit.id;
|
|
2874
|
-
const fileExtension = path.extname(unit.file);
|
|
2875
|
-
const convertedFileName = namingConvention !== "unknown" ? convertNaming(baseId + fileExtension, namingConvention) : baseId + fileExtension;
|
|
2876
|
-
const targetFile = path.resolve(targetPath, targetDir, convertedFileName);
|
|
2877
|
-
if (fs.existsSync(targetFile)) {
|
|
2878
|
-
const strategy = determineConflictStrategy();
|
|
2879
|
-
console.log(`File exists: ${convertedFileName}, using strategy: ${strategy}`);
|
|
2880
|
-
switch (strategy) {
|
|
2881
|
-
case "skip":
|
|
2882
|
-
conflicts.push({
|
|
2883
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2884
|
-
issue: `File exists - skipped: ${convertedFileName}`,
|
|
2885
|
-
sourceFile: unit.file,
|
|
2886
|
-
targetFile: `${targetDir}/${convertedFileName}`
|
|
2887
|
-
});
|
|
2888
|
-
console.log(`\u23ED\uFE0F Skipped ${unit.kind} "${unit.id}": file already exists`);
|
|
2889
|
-
continue;
|
|
2890
|
-
case "backup-and-replace":
|
|
2891
|
-
try {
|
|
2892
|
-
await backupAndReplaceFile(sourceFile, targetFile);
|
|
2893
|
-
copiedFiles.push({
|
|
2894
|
-
source: sourceFile,
|
|
2895
|
-
destination: targetFile,
|
|
2896
|
-
unit: { kind: unit.kind, id: unit.id }
|
|
2897
|
-
});
|
|
2898
|
-
console.log(
|
|
2899
|
-
`\u{1F504} Replaced ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName} (backup created)`
|
|
2900
|
-
);
|
|
2901
|
-
continue;
|
|
2902
|
-
} catch (backupError) {
|
|
2903
|
-
conflicts.push({
|
|
2904
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2905
|
-
issue: `Failed to backup and replace: ${backupError instanceof Error ? backupError.message : String(backupError)}`,
|
|
2906
|
-
sourceFile: unit.file,
|
|
2907
|
-
targetFile: `${targetDir}/${convertedFileName}`
|
|
2908
|
-
});
|
|
2909
|
-
continue;
|
|
2910
|
-
}
|
|
2911
|
-
case "rename":
|
|
2912
|
-
try {
|
|
2913
|
-
const uniqueTargetFile = await renameAndCopyFile(sourceFile, targetFile);
|
|
2914
|
-
copiedFiles.push({
|
|
2915
|
-
source: sourceFile,
|
|
2916
|
-
destination: uniqueTargetFile,
|
|
2917
|
-
unit: { kind: unit.kind, id: unit.id }
|
|
2918
|
-
});
|
|
2919
|
-
console.log(`\u{1F4DD} Renamed ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${path.basename(uniqueTargetFile)}`);
|
|
2920
|
-
continue;
|
|
2921
|
-
} catch (renameError) {
|
|
2922
|
-
conflicts.push({
|
|
2923
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2924
|
-
issue: `Failed to rename and copy: ${renameError instanceof Error ? renameError.message : String(renameError)}`,
|
|
2925
|
-
sourceFile: unit.file,
|
|
2926
|
-
targetFile: `${targetDir}/${convertedFileName}`
|
|
2927
|
-
});
|
|
2928
|
-
continue;
|
|
2929
|
-
}
|
|
2930
|
-
default:
|
|
2931
|
-
conflicts.push({
|
|
2932
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2933
|
-
issue: `Unknown conflict strategy: ${strategy}`,
|
|
2934
|
-
sourceFile: unit.file,
|
|
2935
|
-
targetFile: `${targetDir}/${convertedFileName}`
|
|
2936
|
-
});
|
|
2937
|
-
continue;
|
|
2938
|
-
}
|
|
2939
|
-
}
|
|
2940
|
-
await promises.mkdir(path.dirname(targetFile), { recursive: true });
|
|
2941
|
-
try {
|
|
2942
|
-
await promises.copyFile(sourceFile, targetFile);
|
|
2943
|
-
copiedFiles.push({
|
|
2944
|
-
source: sourceFile,
|
|
2945
|
-
destination: targetFile,
|
|
2946
|
-
unit: { kind: unit.kind, id: unit.id }
|
|
2947
|
-
});
|
|
2948
|
-
console.log(`\u2713 Copied ${unit.kind} "${unit.id}": ${unit.file} \u2192 ${convertedFileName}`);
|
|
2949
|
-
} catch (copyError) {
|
|
2950
|
-
conflicts.push({
|
|
2951
|
-
unit: { kind: unit.kind, id: unit.id },
|
|
2952
|
-
issue: `Failed to copy file: ${copyError instanceof Error ? copyError.message : String(copyError)}`,
|
|
2953
|
-
sourceFile: unit.file,
|
|
2954
|
-
targetFile: `${targetDir}/${convertedFileName}`
|
|
2955
|
-
});
|
|
2956
|
-
}
|
|
2957
|
-
}
|
|
2958
|
-
if (copiedFiles.length > 0) {
|
|
2959
|
-
try {
|
|
2960
|
-
const fileList = copiedFiles.map((f) => f.destination);
|
|
2961
|
-
const gitCommand = ["git", "add", ...fileList];
|
|
2962
|
-
await exec(gitCommand.join(" "), { cwd: targetPath });
|
|
2963
|
-
await exec(
|
|
2964
|
-
`git commit -m "feat(template): copy ${copiedFiles.length} files from ${slug}@${commitSha.substring(0, 7)}"`,
|
|
2965
|
-
{ cwd: targetPath }
|
|
2966
|
-
);
|
|
2967
|
-
console.log(`\u2713 Committed ${copiedFiles.length} copied files`);
|
|
2968
|
-
} catch (commitError) {
|
|
2969
|
-
console.warn("Failed to commit copied files:", commitError);
|
|
2970
|
-
}
|
|
2971
|
-
}
|
|
2972
|
-
const message = `Programmatic file copy completed. Copied ${copiedFiles.length} files, ${conflicts.length} conflicts detected.`;
|
|
2973
|
-
console.log(message);
|
|
2974
|
-
return {
|
|
2975
|
-
success: true,
|
|
2976
|
-
copiedFiles,
|
|
2977
|
-
conflicts,
|
|
2978
|
-
message
|
|
2979
|
-
};
|
|
2980
|
-
} catch (error) {
|
|
2981
|
-
console.error("Programmatic file copy failed:", error);
|
|
2982
|
-
throw new Error(`Programmatic file copy failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
2983
|
-
}
|
|
2984
|
-
}
|
|
2985
|
-
});
|
|
2986
|
-
var intelligentMergeStep = workflows.createStep({
|
|
2987
|
-
id: "intelligent-merge",
|
|
2988
|
-
description: "Use AgentBuilder to intelligently merge template files",
|
|
2989
|
-
inputSchema: zod.z.object({
|
|
2990
|
-
conflicts: zod.z.array(
|
|
2991
|
-
zod.z.object({
|
|
2992
|
-
unit: zod.z.object({
|
|
2993
|
-
kind: zod.z.string(),
|
|
2994
|
-
id: zod.z.string()
|
|
2995
|
-
}),
|
|
2996
|
-
issue: zod.z.string(),
|
|
2997
|
-
sourceFile: zod.z.string(),
|
|
2998
|
-
targetFile: zod.z.string()
|
|
2999
|
-
})
|
|
3000
|
-
),
|
|
3001
|
-
copiedFiles: zod.z.array(
|
|
3002
|
-
zod.z.object({
|
|
3003
|
-
source: zod.z.string(),
|
|
3004
|
-
destination: zod.z.string(),
|
|
3005
|
-
unit: zod.z.object({
|
|
3006
|
-
kind: zod.z.string(),
|
|
3007
|
-
id: zod.z.string()
|
|
3008
|
-
})
|
|
3009
|
-
})
|
|
3010
|
-
),
|
|
3011
|
-
templateDir: zod.z.string(),
|
|
3012
|
-
commitSha: zod.z.string(),
|
|
3013
|
-
slug: zod.z.string(),
|
|
3014
|
-
targetPath: zod.z.string().optional()
|
|
3015
|
-
}),
|
|
3016
|
-
outputSchema: zod.z.object({
|
|
3017
|
-
success: zod.z.boolean(),
|
|
3018
|
-
applied: zod.z.boolean(),
|
|
3019
|
-
message: zod.z.string(),
|
|
3020
|
-
conflictsResolved: zod.z.array(
|
|
3021
|
-
zod.z.object({
|
|
3022
|
-
unit: zod.z.object({
|
|
3023
|
-
kind: zod.z.string(),
|
|
3024
|
-
id: zod.z.string()
|
|
3025
|
-
}),
|
|
3026
|
-
issue: zod.z.string(),
|
|
3027
|
-
resolution: zod.z.string()
|
|
3028
|
-
})
|
|
3029
|
-
),
|
|
3030
|
-
error: zod.z.string().optional(),
|
|
3031
|
-
branchName: zod.z.string().optional()
|
|
3032
|
-
}),
|
|
3033
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
3034
|
-
console.log("Intelligent merge step starting...");
|
|
3035
|
-
const { conflicts, copiedFiles, commitSha, slug, templateDir } = inputData;
|
|
3036
|
-
const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
|
|
3037
|
-
const baseBranchName = `feat/install-template-${slug}`;
|
|
3038
|
-
try {
|
|
3039
|
-
let branchName = baseBranchName;
|
|
3040
|
-
try {
|
|
3041
|
-
await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
|
|
3042
|
-
console.log(`Created new branch: ${branchName}`);
|
|
3043
|
-
} catch (error) {
|
|
3044
|
-
const errorStr = error instanceof Error ? error.message : String(error);
|
|
3045
|
-
if (errorStr.includes("already exists")) {
|
|
3046
|
-
try {
|
|
3047
|
-
await exec(`git checkout "${branchName}"`, { cwd: targetPath });
|
|
3048
|
-
console.log(`Switched to existing branch: ${branchName}`);
|
|
3049
|
-
} catch {
|
|
3050
|
-
const timestamp = Date.now().toString().slice(-6);
|
|
3051
|
-
branchName = `${baseBranchName}-${timestamp}`;
|
|
3052
|
-
await exec(`git checkout -b "${branchName}"`, { cwd: targetPath });
|
|
3053
|
-
console.log(`Created unique branch: ${branchName}`);
|
|
3054
|
-
}
|
|
3055
|
-
} else {
|
|
3056
|
-
throw error;
|
|
3057
|
-
}
|
|
3058
|
-
}
|
|
3059
|
-
const copyFileTool = tools.createTool({
|
|
3060
|
-
id: "copy-file",
|
|
3061
|
-
description: "Copy a file from template to target project (use only for edge cases - most files are already copied programmatically).",
|
|
3062
|
-
inputSchema: zod.z.object({
|
|
3063
|
-
sourcePath: zod.z.string().describe("Path to the source file relative to template directory"),
|
|
3064
|
-
destinationPath: zod.z.string().describe("Path to the destination file relative to target project")
|
|
3065
|
-
}),
|
|
3066
|
-
outputSchema: zod.z.object({
|
|
3067
|
-
success: zod.z.boolean(),
|
|
3068
|
-
message: zod.z.string(),
|
|
3069
|
-
error: zod.z.string().optional()
|
|
3070
|
-
}),
|
|
3071
|
-
execute: async ({ context }) => {
|
|
3072
|
-
try {
|
|
3073
|
-
const { sourcePath, destinationPath } = context;
|
|
3074
|
-
const resolvedSourcePath = path.resolve(templateDir, sourcePath);
|
|
3075
|
-
const resolvedDestinationPath = path.resolve(targetPath, destinationPath);
|
|
3076
|
-
if (fs.existsSync(resolvedSourcePath) && !fs.existsSync(path.dirname(resolvedDestinationPath))) {
|
|
3077
|
-
await promises.mkdir(path.dirname(resolvedDestinationPath), { recursive: true });
|
|
3078
|
-
}
|
|
3079
|
-
await promises.copyFile(resolvedSourcePath, resolvedDestinationPath);
|
|
3080
|
-
return {
|
|
3081
|
-
success: true,
|
|
3082
|
-
message: `Successfully copied file from ${sourcePath} to ${destinationPath}`
|
|
3083
|
-
};
|
|
3084
|
-
} catch (error) {
|
|
3085
|
-
return {
|
|
3086
|
-
success: false,
|
|
3087
|
-
message: `Failed to copy file: ${error instanceof Error ? error.message : String(error)}`,
|
|
3088
|
-
error: error instanceof Error ? error.message : String(error)
|
|
3089
|
-
};
|
|
3090
|
-
}
|
|
3091
|
-
}
|
|
3092
|
-
});
|
|
3093
|
-
const agentBuilder = new AgentBuilder({
|
|
3094
|
-
projectPath: targetPath,
|
|
3095
|
-
mode: "template",
|
|
3096
|
-
model: openai.openai("gpt-4o-mini"),
|
|
3097
|
-
instructions: `
|
|
3098
|
-
You are an expert at integrating Mastra template components into existing projects.
|
|
3099
|
-
|
|
3100
|
-
CRITICAL CONTEXT:
|
|
3101
|
-
- Files have been programmatically copied from template to target project
|
|
3102
|
-
- Your job is to handle integration issues, registration, and validation
|
|
3103
|
-
|
|
3104
|
-
FILES SUCCESSFULLY COPIED:
|
|
3105
|
-
${JSON.stringify(copiedFiles, null, 2)}
|
|
3106
|
-
|
|
3107
|
-
CONFLICTS TO RESOLVE:
|
|
3108
|
-
${JSON.stringify(conflicts, null, 2)}
|
|
3109
|
-
|
|
3110
|
-
CRITICAL INSTRUCTIONS:
|
|
3111
|
-
1. **When committing changes**: NEVER add dependency/build directories. Use specific file paths with 'git add'
|
|
3112
|
-
2. **Package management**: NO need to install packages (already handled by package merge step)
|
|
3113
|
-
3. **Validation**: When validation fails due to import issues, check existing files and imports for correct naming conventions
|
|
3114
|
-
4. **Variable vs file names**: A variable name might differ from file name (e.g., filename: ./downloaderTool.ts, export const fetcherTool(...))
|
|
3115
|
-
5. **File copying**: Most files are already copied programmatically. Only use copyFile tool for edge cases where additional files are needed
|
|
3116
|
-
|
|
3117
|
-
KEY RESPONSIBILITIES:
|
|
3118
|
-
1. Resolve any conflicts from the programmatic copy step
|
|
3119
|
-
2. Register components in existing Mastra index file (agents, workflows, networks, mcp-servers)
|
|
3120
|
-
3. DO NOT register tools in existing Mastra index file - tools should remain standalone
|
|
3121
|
-
4. Fix import path issues in copied files
|
|
3122
|
-
5. Ensure TypeScript imports and exports are correct
|
|
3123
|
-
6. Validate integration works properly
|
|
3124
|
-
7. Copy additional files ONLY if needed for conflict resolution or missing dependencies
|
|
3125
|
-
|
|
3126
|
-
MASTRA-SPECIFIC INTEGRATION:
|
|
3127
|
-
- Agents: Register in existing Mastra index file
|
|
3128
|
-
- Workflows: Register in existing Mastra index file
|
|
3129
|
-
- Networks: Register in existing Mastra index file
|
|
3130
|
-
- MCP servers: Register in existing Mastra index file
|
|
3131
|
-
- Tools: Copy to ${AgentBuilderDefaults.DEFAULT_FOLDER_STRUCTURE.tool} but DO NOT register in existing Mastra index file
|
|
3132
|
-
|
|
3133
|
-
EDGE CASE FILE COPYING:
|
|
3134
|
-
- IF a file for a resource does not exist in the target project AND was not programmatically copied, you can use copyFile tool
|
|
3135
|
-
- When taking files from template, ensure you get the right file name and path
|
|
3136
|
-
- Only copy files that are actually needed for the integration to work
|
|
3137
|
-
|
|
3138
|
-
NAMING CONVENTION GUIDANCE:
|
|
3139
|
-
When fixing imports or understanding naming patterns, use these examples:
|
|
3140
|
-
|
|
3141
|
-
**Import Path Patterns:**
|
|
3142
|
-
- camelCase files: import { myAgent } from './myAgent'
|
|
3143
|
-
- snake_case files: import { myAgent } from './my_agent'
|
|
3144
|
-
- kebab-case files: import { myAgent } from './my-agent'
|
|
3145
|
-
- PascalCase files: import { MyAgent } from './MyAgent'
|
|
3146
|
-
|
|
3147
|
-
**Naming Detection Examples:**
|
|
3148
|
-
- Files like "weatherAgent.ts", "chatAgent.ts" \u2192 use camelCase
|
|
3149
|
-
- Files like "weather_agent.ts", "chat_agent.ts" \u2192 use snake_case
|
|
3150
|
-
- Files like "weather-agent.ts", "chat-agent.ts" \u2192 use kebab-case
|
|
3151
|
-
- Files like "WeatherAgent.ts", "ChatAgent.ts" \u2192 use PascalCase
|
|
3152
|
-
|
|
3153
|
-
**Key Rule:** Keep variable/export names unchanged - only adapt file names and import paths
|
|
3154
|
-
|
|
3155
|
-
Template information:
|
|
3156
|
-
- Slug: ${slug}
|
|
3157
|
-
- Commit: ${commitSha.substring(0, 7)}
|
|
3158
|
-
- Branch: ${branchName}
|
|
3159
|
-
`,
|
|
3160
|
-
tools: {
|
|
3161
|
-
copyFile: copyFileTool
|
|
3162
|
-
}
|
|
3163
|
-
});
|
|
3164
|
-
const tasks = [];
|
|
3165
|
-
conflicts.forEach((conflict) => {
|
|
3166
|
-
tasks.push({
|
|
3167
|
-
id: `conflict-${conflict.unit.kind}-${conflict.unit.id}`,
|
|
3168
|
-
content: `Resolve conflict: ${conflict.issue}`,
|
|
3169
|
-
status: "pending",
|
|
3170
|
-
priority: "high",
|
|
3171
|
-
notes: `Unit: ${conflict.unit.kind}:${conflict.unit.id}, Issue: ${conflict.issue}, Source: ${conflict.sourceFile}, Target: ${conflict.targetFile}`
|
|
3172
|
-
});
|
|
3173
|
-
});
|
|
3174
|
-
const nonToolFiles = copiedFiles.filter((f) => f.unit.kind !== "tool");
|
|
3175
|
-
if (nonToolFiles.length > 0) {
|
|
3176
|
-
tasks.push({
|
|
3177
|
-
id: "register-components",
|
|
3178
|
-
content: `Register ${nonToolFiles.length} components in existing Mastra index file (src/mastra/index.ts)`,
|
|
3179
|
-
status: "pending",
|
|
3180
|
-
priority: "medium",
|
|
3181
|
-
dependencies: conflicts.length > 0 ? conflicts.map((c) => `conflict-${c.unit.kind}-${c.unit.id}`) : void 0,
|
|
3182
|
-
notes: `Components to register: ${nonToolFiles.map((f) => `${f.unit.kind}:${f.unit.id}`).join(", ")}`
|
|
3183
|
-
});
|
|
3184
|
-
}
|
|
3185
|
-
console.log(`Creating task list with ${tasks.length} tasks...`);
|
|
3186
|
-
await AgentBuilderDefaults.manageTaskList({ action: "create", tasks });
|
|
3187
|
-
await logGitState(targetPath, "before intelligent merge");
|
|
3188
|
-
const result = await agentBuilder.stream(`
|
|
3189
|
-
You need to work through a task list to complete the template integration.
|
|
3190
|
-
|
|
3191
|
-
CRITICAL INSTRUCTIONS:
|
|
3192
|
-
|
|
3193
|
-
**STEP 1: GET YOUR TASK LIST**
|
|
3194
|
-
1. Use manageTaskList tool with action "list" to see all pending tasks
|
|
3195
|
-
2. Work through tasks in dependency order (complete dependencies first)
|
|
3196
|
-
|
|
3197
|
-
**STEP 2: PROCESS EACH TASK SYSTEMATICALLY**
|
|
3198
|
-
For each task:
|
|
3199
|
-
1. Use manageTaskList to mark the current task as 'in_progress'
|
|
3200
|
-
2. Complete the task according to its requirements
|
|
3201
|
-
3. Use manageTaskList to mark the task as 'completed' when done
|
|
3202
|
-
4. Continue until all tasks are completed
|
|
3203
|
-
|
|
3204
|
-
**TASK TYPES AND REQUIREMENTS:**
|
|
3205
|
-
|
|
3206
|
-
**Conflict Resolution Tasks:**
|
|
3207
|
-
- Analyze the specific conflict and determine best resolution strategy
|
|
3208
|
-
- For file name conflicts: merge content or rename appropriately
|
|
3209
|
-
- For missing files: investigate and copy if needed
|
|
3210
|
-
- For other issues: apply appropriate fixes
|
|
3211
|
-
|
|
3212
|
-
**Component Registration Task:**
|
|
3213
|
-
- Update main Mastra instance file to register new components
|
|
3214
|
-
- Only register: agents, workflows, networks, mcp-servers
|
|
3215
|
-
- DO NOT register tools in main config
|
|
3216
|
-
- Ensure proper import paths and naming conventions
|
|
3217
|
-
|
|
3218
|
-
**COMMIT STRATEGY:**
|
|
3219
|
-
- After resolving conflicts: "feat(template): resolve conflicts for ${slug}@${commitSha.substring(0, 7)}"
|
|
3220
|
-
- After registration: "feat(template): register components from ${slug}@${commitSha.substring(0, 7)}"
|
|
3221
|
-
|
|
3222
|
-
**CRITICAL NOTES:**
|
|
3223
|
-
- Template source: ${templateDir}
|
|
3224
|
-
- Target project: ${targetPath}
|
|
3225
|
-
- Focus ONLY on conflict resolution and component registration
|
|
3226
|
-
- Use executeCommand for git commits after each task
|
|
3227
|
-
- DO NOT perform validation - that's handled by the dedicated validation step
|
|
3228
|
-
|
|
3229
|
-
Start by listing your tasks and work through them systematically!
|
|
3230
|
-
`);
|
|
3231
|
-
const actualResolutions = [];
|
|
3232
|
-
for await (const chunk of result.fullStream) {
|
|
3233
|
-
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
3234
|
-
console.log({
|
|
3235
|
-
type: chunk.type,
|
|
3236
|
-
msgId: chunk.messageId
|
|
3237
|
-
});
|
|
3238
|
-
} else {
|
|
3239
|
-
console.log(JSON.stringify(chunk, null, 2));
|
|
3240
|
-
if (chunk.type === "tool-result" && chunk.toolName === "manageTaskList") {
|
|
3241
|
-
try {
|
|
3242
|
-
const toolResult = chunk.result;
|
|
3243
|
-
if (toolResult.action === "update" && toolResult.status === "completed") {
|
|
3244
|
-
actualResolutions.push({
|
|
3245
|
-
taskId: toolResult.taskId || "",
|
|
3246
|
-
action: toolResult.action,
|
|
3247
|
-
status: toolResult.status,
|
|
3248
|
-
content: toolResult.content || "",
|
|
3249
|
-
notes: toolResult.notes
|
|
3250
|
-
});
|
|
3251
|
-
console.log(`\u{1F4CB} Task completed: ${toolResult.taskId} - ${toolResult.content}`);
|
|
3252
|
-
}
|
|
3253
|
-
} catch (parseError) {
|
|
3254
|
-
console.warn("Failed to parse task management result:", parseError);
|
|
3255
|
-
}
|
|
3256
|
-
}
|
|
3257
|
-
}
|
|
3258
|
-
}
|
|
3259
|
-
await logGitState(targetPath, "after intelligent merge");
|
|
3260
|
-
const conflictResolutions = conflicts.map((conflict) => {
|
|
3261
|
-
const taskId = `conflict-${conflict.unit.kind}-${conflict.unit.id}`;
|
|
3262
|
-
const actualResolution = actualResolutions.find((r) => r.taskId === taskId);
|
|
3263
|
-
if (actualResolution) {
|
|
3264
|
-
return {
|
|
3265
|
-
unit: conflict.unit,
|
|
3266
|
-
issue: conflict.issue,
|
|
3267
|
-
resolution: actualResolution.notes || actualResolution.content || `Completed: ${conflict.unit.kind} ${conflict.unit.id}`,
|
|
3268
|
-
actualWork: true
|
|
3269
|
-
};
|
|
3270
|
-
} else {
|
|
3271
|
-
return {
|
|
3272
|
-
unit: conflict.unit,
|
|
3273
|
-
issue: conflict.issue,
|
|
3274
|
-
resolution: `No specific resolution found for ${conflict.unit.kind} ${conflict.unit.id}`,
|
|
3275
|
-
actualWork: false
|
|
3276
|
-
};
|
|
3277
|
-
}
|
|
3278
|
-
});
|
|
3279
|
-
return {
|
|
3280
|
-
success: true,
|
|
3281
|
-
applied: true,
|
|
3282
|
-
branchName,
|
|
3283
|
-
message: `Successfully resolved ${conflicts.length} conflicts from template ${slug}`,
|
|
3284
|
-
conflictsResolved: conflictResolutions
|
|
3285
|
-
};
|
|
3286
|
-
} catch (error) {
|
|
3287
|
-
return {
|
|
3288
|
-
success: false,
|
|
3289
|
-
applied: false,
|
|
3290
|
-
branchName: baseBranchName,
|
|
3291
|
-
message: `Failed to resolve conflicts: ${error instanceof Error ? error.message : String(error)}`,
|
|
3292
|
-
conflictsResolved: [],
|
|
3293
|
-
error: error instanceof Error ? error.message : String(error)
|
|
3294
|
-
};
|
|
3295
|
-
}
|
|
3296
|
-
}
|
|
3297
|
-
});
|
|
3298
|
-
var validationAndFixStep = workflows.createStep({
|
|
3299
|
-
id: "validation-and-fix",
|
|
3300
|
-
description: "Validate the merged template code and fix any validation errors using a specialized agent",
|
|
3301
|
-
inputSchema: zod.z.object({
|
|
3302
|
-
commitSha: zod.z.string(),
|
|
3303
|
-
slug: zod.z.string(),
|
|
3304
|
-
targetPath: zod.z.string().optional(),
|
|
3305
|
-
templateDir: zod.z.string(),
|
|
3306
|
-
orderedUnits: zod.z.array(
|
|
3307
|
-
zod.z.object({
|
|
3308
|
-
kind: zod.z.string(),
|
|
3309
|
-
id: zod.z.string(),
|
|
3310
|
-
file: zod.z.string()
|
|
3311
|
-
})
|
|
3312
|
-
),
|
|
3313
|
-
copiedFiles: zod.z.array(
|
|
3314
|
-
zod.z.object({
|
|
3315
|
-
source: zod.z.string(),
|
|
3316
|
-
destination: zod.z.string(),
|
|
3317
|
-
unit: zod.z.object({
|
|
3318
|
-
kind: zod.z.string(),
|
|
3319
|
-
id: zod.z.string()
|
|
3320
|
-
})
|
|
3321
|
-
})
|
|
3322
|
-
),
|
|
3323
|
-
conflictsResolved: zod.z.array(
|
|
3324
|
-
zod.z.object({
|
|
3325
|
-
unit: zod.z.object({
|
|
3326
|
-
kind: zod.z.string(),
|
|
3327
|
-
id: zod.z.string()
|
|
3328
|
-
}),
|
|
3329
|
-
issue: zod.z.string(),
|
|
3330
|
-
resolution: zod.z.string()
|
|
3331
|
-
})
|
|
3332
|
-
).optional(),
|
|
3333
|
-
maxIterations: zod.z.number().optional().default(5)
|
|
3334
|
-
}),
|
|
3335
|
-
outputSchema: zod.z.object({
|
|
3336
|
-
success: zod.z.boolean(),
|
|
3337
|
-
applied: zod.z.boolean(),
|
|
3338
|
-
message: zod.z.string(),
|
|
3339
|
-
validationResults: zod.z.object({
|
|
3340
|
-
valid: zod.z.boolean(),
|
|
3341
|
-
errorsFixed: zod.z.number(),
|
|
3342
|
-
remainingErrors: zod.z.number()
|
|
3343
|
-
}),
|
|
3344
|
-
error: zod.z.string().optional()
|
|
3345
|
-
}),
|
|
3346
|
-
execute: async ({ inputData, runtimeContext }) => {
|
|
3347
|
-
console.log("Validation and fix step starting...");
|
|
3348
|
-
const { commitSha, slug, orderedUnits, templateDir, copiedFiles, conflictsResolved, maxIterations = 5 } = inputData;
|
|
3349
|
-
const targetPath = inputData.targetPath || runtimeContext.get("targetPath") || process.cwd();
|
|
3350
|
-
const hasChanges = copiedFiles.length > 0 || conflictsResolved && conflictsResolved.length > 0;
|
|
3351
|
-
if (!hasChanges) {
|
|
3352
|
-
console.log("\u23ED\uFE0F Skipping validation - no files copied or conflicts resolved");
|
|
3353
|
-
return {
|
|
3354
|
-
success: true,
|
|
3355
|
-
applied: false,
|
|
3356
|
-
message: "No changes to validate - template already integrated or no conflicts resolved",
|
|
3357
|
-
validationResults: {
|
|
3358
|
-
valid: true,
|
|
3359
|
-
errorsFixed: 0,
|
|
3360
|
-
remainingErrors: 0
|
|
3361
|
-
}
|
|
3362
|
-
};
|
|
3363
|
-
}
|
|
3364
|
-
console.log(
|
|
3365
|
-
`\u{1F4CB} Changes detected: ${copiedFiles.length} files copied, ${conflictsResolved?.length || 0} conflicts resolved`
|
|
3366
|
-
);
|
|
3367
|
-
let currentIteration = 1;
|
|
3368
|
-
try {
|
|
3369
|
-
const allTools = await AgentBuilderDefaults.DEFAULT_TOOLS(targetPath, "template");
|
|
3370
|
-
const validationAgent = new agent.Agent({
|
|
3371
|
-
name: "code-validator-fixer",
|
|
3372
|
-
description: "Specialized agent for validating and fixing template integration issues",
|
|
3373
|
-
instructions: `You are a code validation and fixing specialist. Your job is to:
|
|
3374
|
-
|
|
3375
|
-
1. **Run comprehensive validation** using the validateCode tool to check for:
|
|
3376
|
-
- TypeScript compilation errors
|
|
3377
|
-
- ESLint issues
|
|
3378
|
-
- Import/export problems
|
|
3379
|
-
- Missing dependencies
|
|
3380
|
-
|
|
3381
|
-
2. **Fix validation errors systematically**:
|
|
3382
|
-
- Use readFile to examine files with errors
|
|
3383
|
-
- Use multiEdit to fix issues like missing imports, incorrect paths, syntax errors
|
|
3384
|
-
- Use listDirectory to understand project structure when fixing import paths
|
|
3385
|
-
- Update file contents to resolve TypeScript and linting issues
|
|
3386
|
-
|
|
3387
|
-
3. **Re-validate after fixes** to ensure all issues are resolved
|
|
3388
|
-
|
|
3389
|
-
4. **Focus on template integration issues**:
|
|
3390
|
-
- Files were copied with new names based on unit IDs
|
|
3391
|
-
- Original template imports may reference old filenames
|
|
3392
|
-
- Missing imports in index files
|
|
3393
|
-
- Incorrect file paths in imports
|
|
3394
|
-
- Type mismatches after integration
|
|
3395
|
-
- Missing exports in barrel files
|
|
3396
|
-
- Use the COPIED FILES mapping below to fix import paths
|
|
3397
|
-
|
|
3398
|
-
CRITICAL: Always validate the entire project first to get a complete picture of issues, then fix them systematically, and re-validate to confirm fixes worked.
|
|
3399
|
-
|
|
3400
|
-
CRITICAL IMPORT PATH RESOLUTION:
|
|
3401
|
-
The following files were copied from template with new names:
|
|
3402
|
-
${JSON.stringify(copiedFiles, null, 2)}
|
|
3403
|
-
|
|
3404
|
-
When fixing import errors:
|
|
3405
|
-
1. Check if the missing module corresponds to a copied file
|
|
3406
|
-
2. Use listDirectory to verify actual filenames in target directories
|
|
3407
|
-
3. Update import paths to match the actual copied filenames
|
|
3408
|
-
4. Ensure exported variable names match what's being imported
|
|
3409
|
-
|
|
3410
|
-
EXAMPLE: If error shows "Cannot find module './tools/download-csv-tool'" but a file was copied as "csv-fetcher-tool.ts", update the import to "./tools/csv-fetcher-tool"
|
|
3411
|
-
|
|
3412
|
-
${conflictsResolved ? `CONFLICTS RESOLVED BY INTELLIGENT MERGE:
|
|
3413
|
-
${JSON.stringify(conflictsResolved, null, 2)}
|
|
3414
|
-
` : ""}
|
|
3415
|
-
|
|
3416
|
-
INTEGRATED UNITS:
|
|
3417
|
-
${JSON.stringify(orderedUnits, null, 2)}
|
|
3418
|
-
|
|
3419
|
-
Be thorough and methodical. Always use listDirectory to verify actual file existence before fixing imports.`,
|
|
3420
|
-
model: openai.openai("gpt-4o-mini"),
|
|
3421
|
-
tools: {
|
|
3422
|
-
validateCode: allTools.validateCode,
|
|
3423
|
-
readFile: allTools.readFile,
|
|
3424
|
-
multiEdit: allTools.multiEdit,
|
|
3425
|
-
listDirectory: allTools.listDirectory,
|
|
3426
|
-
executeCommand: allTools.executeCommand
|
|
3427
|
-
}
|
|
3428
|
-
});
|
|
3429
|
-
console.log("Starting validation and fix agent with internal loop...");
|
|
3430
|
-
let validationResults = {
|
|
3431
|
-
valid: false,
|
|
3432
|
-
errorsFixed: 0,
|
|
3433
|
-
remainingErrors: 1,
|
|
3434
|
-
// Start with 1 to enter the loop
|
|
3435
|
-
iteration: currentIteration
|
|
3436
|
-
};
|
|
3437
|
-
while (validationResults.remainingErrors > 0 && currentIteration <= maxIterations) {
|
|
3438
|
-
console.log(`
|
|
3439
|
-
=== Validation Iteration ${currentIteration} ===`);
|
|
3440
|
-
const iterationPrompt = currentIteration === 1 ? `Please validate the template integration and fix any errors found in the project at ${targetPath}. The template "${slug}" (${commitSha.substring(0, 7)}) was just integrated and may have validation issues that need fixing.
|
|
3441
|
-
|
|
3442
|
-
Start by running validateCode with all validation types to get a complete picture of any issues, then systematically fix them.` : `Continue validation and fixing for the template integration at ${targetPath}. This is iteration ${currentIteration} of validation.
|
|
3443
|
-
|
|
3444
|
-
Previous iterations may have fixed some issues, so start by re-running validateCode to see the current state, then fix any remaining issues.`;
|
|
3445
|
-
const result = await validationAgent.stream(iterationPrompt, {
|
|
3446
|
-
experimental_output: zod.z.object({ success: zod.z.boolean() })
|
|
3447
|
-
});
|
|
3448
|
-
let iterationErrors = 0;
|
|
3449
|
-
let previousErrors = validationResults.remainingErrors;
|
|
3450
|
-
for await (const chunk of result.fullStream) {
|
|
3451
|
-
if (chunk.type === "step-finish" || chunk.type === "step-start") {
|
|
3452
|
-
console.log({
|
|
3453
|
-
type: chunk.type,
|
|
3454
|
-
msgId: chunk.messageId,
|
|
3455
|
-
iteration: currentIteration
|
|
3456
|
-
});
|
|
3457
|
-
} else {
|
|
3458
|
-
console.log(JSON.stringify(chunk, null, 2));
|
|
3459
|
-
}
|
|
3460
|
-
if (chunk.type === "tool-result") {
|
|
3461
|
-
if (chunk.toolName === "validateCode") {
|
|
3462
|
-
const toolResult = chunk.result;
|
|
3463
|
-
if (toolResult?.summary) {
|
|
3464
|
-
iterationErrors = toolResult.summary.totalErrors || 0;
|
|
3465
|
-
console.log(`Iteration ${currentIteration}: Found ${iterationErrors} errors`);
|
|
3466
|
-
}
|
|
3467
|
-
}
|
|
3468
|
-
}
|
|
3469
|
-
}
|
|
3470
|
-
validationResults.remainingErrors = iterationErrors;
|
|
3471
|
-
validationResults.errorsFixed += Math.max(0, previousErrors - iterationErrors);
|
|
3472
|
-
validationResults.valid = iterationErrors === 0;
|
|
3473
|
-
validationResults.iteration = currentIteration;
|
|
3474
|
-
console.log(`Iteration ${currentIteration} complete: ${iterationErrors} errors remaining`);
|
|
3475
|
-
if (iterationErrors === 0) {
|
|
3476
|
-
console.log(`\u2705 All validation issues resolved in ${currentIteration} iterations!`);
|
|
3477
|
-
break;
|
|
3478
|
-
} else if (currentIteration >= maxIterations) {
|
|
3479
|
-
console.log(`\u26A0\uFE0F Max iterations (${maxIterations}) reached. ${iterationErrors} errors still remaining.`);
|
|
3480
|
-
break;
|
|
3481
|
-
}
|
|
3482
|
-
currentIteration++;
|
|
3483
|
-
}
|
|
3484
|
-
try {
|
|
3485
|
-
await exec(
|
|
3486
|
-
`git add . && git commit -m "fix(template): resolve validation errors for ${slug}@${commitSha.substring(0, 7)}" || true`,
|
|
3487
|
-
{
|
|
3488
|
-
cwd: targetPath
|
|
3489
|
-
}
|
|
3490
|
-
);
|
|
3491
|
-
} catch (commitError) {
|
|
3492
|
-
console.warn("Failed to commit validation fixes:", commitError);
|
|
3493
|
-
}
|
|
3494
|
-
return {
|
|
3495
|
-
success: true,
|
|
3496
|
-
applied: true,
|
|
3497
|
-
message: `Validation completed in ${currentIteration} iteration${currentIteration > 1 ? "s" : ""}. ${validationResults.valid ? "All issues resolved!" : `${validationResults.remainingErrors} issues remaining`}`,
|
|
3498
|
-
validationResults: {
|
|
3499
|
-
valid: validationResults.valid,
|
|
3500
|
-
errorsFixed: validationResults.errorsFixed,
|
|
3501
|
-
remainingErrors: validationResults.remainingErrors
|
|
3502
|
-
}
|
|
3503
|
-
};
|
|
3504
|
-
} catch (error) {
|
|
3505
|
-
console.error("Validation and fix failed:", error);
|
|
3506
|
-
return {
|
|
3507
|
-
success: false,
|
|
3508
|
-
applied: false,
|
|
3509
|
-
message: `Validation and fix failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
3510
|
-
validationResults: {
|
|
3511
|
-
valid: false,
|
|
3512
|
-
errorsFixed: 0,
|
|
3513
|
-
remainingErrors: -1
|
|
3514
|
-
},
|
|
3515
|
-
error: error instanceof Error ? error.message : String(error)
|
|
3516
|
-
};
|
|
3517
|
-
} finally {
|
|
3518
|
-
try {
|
|
3519
|
-
await promises.rm(templateDir, { recursive: true, force: true });
|
|
3520
|
-
console.log(`\u2713 Cleaned up template directory: ${templateDir}`);
|
|
3521
|
-
} catch (cleanupError) {
|
|
3522
|
-
console.warn("Failed to cleanup template directory:", cleanupError);
|
|
3523
|
-
}
|
|
3524
|
-
}
|
|
3525
|
-
}
|
|
3526
|
-
});
|
|
3527
|
-
var mergeTemplateWorkflow = workflows.createWorkflow({
|
|
3528
|
-
id: "merge-template",
|
|
3529
|
-
description: "Merges a Mastra template repository into the current project using intelligent AgentBuilder-powered merging",
|
|
3530
|
-
inputSchema: MergeInputSchema,
|
|
3531
|
-
outputSchema: ApplyResultSchema,
|
|
3532
|
-
steps: [
|
|
3533
|
-
cloneTemplateStep,
|
|
3534
|
-
analyzePackageStep,
|
|
3535
|
-
discoverUnitsStep,
|
|
3536
|
-
orderUnitsStep,
|
|
3537
|
-
packageMergeStep,
|
|
3538
|
-
flatInstallStep,
|
|
3539
|
-
programmaticFileCopyStep,
|
|
3540
|
-
intelligentMergeStep,
|
|
3541
|
-
validationAndFixStep
|
|
3542
|
-
]
|
|
3543
|
-
}).then(cloneTemplateStep).parallel([analyzePackageStep, discoverUnitsStep]).map(async ({ getStepResult }) => {
|
|
3544
|
-
const discoverResult = getStepResult(discoverUnitsStep);
|
|
3545
|
-
return discoverResult;
|
|
3546
|
-
}).then(orderUnitsStep).map(async ({ getStepResult, getInitData }) => {
|
|
3547
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
3548
|
-
const packageResult = getStepResult(analyzePackageStep);
|
|
3549
|
-
const initData = getInitData();
|
|
3550
|
-
return {
|
|
3551
|
-
commitSha: cloneResult.commitSha,
|
|
3552
|
-
slug: cloneResult.slug,
|
|
3553
|
-
targetPath: initData.targetPath,
|
|
3554
|
-
packageInfo: packageResult
|
|
3555
|
-
};
|
|
3556
|
-
}).then(packageMergeStep).map(async ({ getInitData }) => {
|
|
3557
|
-
const initData = getInitData();
|
|
3558
|
-
return {
|
|
3559
|
-
targetPath: initData.targetPath
|
|
3560
|
-
};
|
|
3561
|
-
}).then(flatInstallStep).map(async ({ getStepResult, getInitData }) => {
|
|
3562
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
3563
|
-
const orderResult = getStepResult(orderUnitsStep);
|
|
3564
|
-
const initData = getInitData();
|
|
3565
|
-
return {
|
|
3566
|
-
orderedUnits: orderResult.orderedUnits,
|
|
3567
|
-
templateDir: cloneResult.templateDir,
|
|
3568
|
-
commitSha: cloneResult.commitSha,
|
|
3569
|
-
slug: cloneResult.slug,
|
|
3570
|
-
targetPath: initData.targetPath
|
|
3571
|
-
};
|
|
3572
|
-
}).then(programmaticFileCopyStep).map(async ({ getStepResult, getInitData }) => {
|
|
3573
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
3574
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
3575
|
-
const initData = getInitData();
|
|
3576
|
-
return {
|
|
3577
|
-
conflicts: copyResult.conflicts,
|
|
3578
|
-
copiedFiles: copyResult.copiedFiles,
|
|
3579
|
-
commitSha: cloneResult.commitSha,
|
|
3580
|
-
slug: cloneResult.slug,
|
|
3581
|
-
targetPath: initData.targetPath,
|
|
3582
|
-
templateDir: cloneResult.templateDir
|
|
3583
|
-
};
|
|
3584
|
-
}).then(intelligentMergeStep).map(async ({ getStepResult, getInitData }) => {
|
|
3585
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
3586
|
-
const orderResult = getStepResult(orderUnitsStep);
|
|
3587
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
3588
|
-
const mergeResult = getStepResult(intelligentMergeStep);
|
|
3589
|
-
const initData = getInitData();
|
|
3590
|
-
return {
|
|
3591
|
-
commitSha: cloneResult.commitSha,
|
|
3592
|
-
slug: cloneResult.slug,
|
|
3593
|
-
targetPath: initData.targetPath,
|
|
3594
|
-
templateDir: cloneResult.templateDir,
|
|
3595
|
-
orderedUnits: orderResult.orderedUnits,
|
|
3596
|
-
copiedFiles: copyResult.copiedFiles,
|
|
3597
|
-
conflictsResolved: mergeResult.conflictsResolved
|
|
3598
|
-
};
|
|
3599
|
-
}).then(validationAndFixStep).map(async ({ getStepResult, getInitData }) => {
|
|
3600
|
-
const validationResult = getStepResult(validationAndFixStep);
|
|
3601
|
-
const intelligentMergeResult = getStepResult(intelligentMergeStep);
|
|
3602
|
-
const copyResult = getStepResult(programmaticFileCopyStep);
|
|
3603
|
-
const cloneResult = getStepResult(cloneTemplateStep);
|
|
3604
|
-
const initData = getInitData();
|
|
3605
|
-
const branchName = intelligentMergeResult.branchName || `feat/install-template-${cloneResult.slug || initData.slug}`;
|
|
3606
|
-
const allErrors = [copyResult.error, intelligentMergeResult.error, validationResult.error].filter(Boolean);
|
|
3607
|
-
const overallSuccess = copyResult.success !== false && intelligentMergeResult.success !== false && validationResult.success;
|
|
3608
|
-
const messages = [];
|
|
3609
|
-
if (copyResult.copiedFiles?.length > 0) {
|
|
3610
|
-
messages.push(`${copyResult.copiedFiles.length} files copied`);
|
|
3611
|
-
}
|
|
3612
|
-
if (copyResult.conflicts?.length > 0) {
|
|
3613
|
-
messages.push(`${copyResult.conflicts.length} conflicts skipped`);
|
|
3614
|
-
}
|
|
3615
|
-
if (intelligentMergeResult.conflictsResolved?.length > 0) {
|
|
3616
|
-
messages.push(`${intelligentMergeResult.conflictsResolved.length} conflicts resolved`);
|
|
3617
|
-
}
|
|
3618
|
-
if (validationResult.validationResults?.errorsFixed > 0) {
|
|
3619
|
-
messages.push(`${validationResult.validationResults.errorsFixed} validation errors fixed`);
|
|
3620
|
-
}
|
|
3621
|
-
const comprehensiveMessage = messages.length > 0 ? `Template merge completed: ${messages.join(", ")}` : validationResult.message || "Template merge completed";
|
|
3622
|
-
return {
|
|
3623
|
-
success: overallSuccess,
|
|
3624
|
-
applied: validationResult.applied || copyResult.copiedFiles?.length > 0 || false,
|
|
3625
|
-
message: comprehensiveMessage,
|
|
3626
|
-
validationResults: validationResult.validationResults,
|
|
3627
|
-
error: allErrors.length > 0 ? allErrors.join("; ") : void 0,
|
|
3628
|
-
errors: allErrors.length > 0 ? allErrors : void 0,
|
|
3629
|
-
branchName,
|
|
3630
|
-
// Additional debugging info
|
|
3631
|
-
stepResults: {
|
|
3632
|
-
copySuccess: copyResult.success,
|
|
3633
|
-
mergeSuccess: intelligentMergeResult.success,
|
|
3634
|
-
validationSuccess: validationResult.success,
|
|
3635
|
-
filesCopied: copyResult.copiedFiles?.length || 0,
|
|
3636
|
-
conflictsSkipped: copyResult.conflicts?.length || 0,
|
|
3637
|
-
conflictsResolved: intelligentMergeResult.conflictsResolved?.length || 0
|
|
3638
|
-
}
|
|
3639
|
-
};
|
|
3640
|
-
}).commit();
|
|
3641
|
-
var determineConflictStrategy = (_unit, _targetFile) => {
|
|
3642
|
-
return "skip";
|
|
3643
|
-
};
|
|
3644
|
-
|
|
3645
|
-
// src/index.ts
|
|
3646
|
-
var AgentBuilder = class extends agent.Agent {
|
|
3647
|
-
builderConfig;
|
|
3648
|
-
/**
|
|
3649
|
-
* Private constructor - use AgentBuilder.create() instead
|
|
3650
|
-
*/
|
|
3651
|
-
constructor(config) {
|
|
3652
|
-
const additionalInstructions = config.instructions ? `## Priority Instructions
|
|
3653
|
-
|
|
3654
|
-
${config.instructions}` : "";
|
|
3655
|
-
const combinedInstructions = additionalInstructions + AgentBuilderDefaults.DEFAULT_INSTRUCTIONS(config.projectPath);
|
|
3656
|
-
const agentConfig = {
|
|
3657
|
-
name: "agent-builder",
|
|
3658
|
-
description: "An AI agent specialized in generating Mastra agents, tools, and workflows from natural language requirements.",
|
|
3659
|
-
instructions: combinedInstructions,
|
|
3660
|
-
model: config.model,
|
|
3661
|
-
tools: async () => {
|
|
3662
|
-
return {
|
|
3663
|
-
...await AgentBuilderDefaults.DEFAULT_TOOLS(config.projectPath, config.mode),
|
|
3664
|
-
...config.tools || {}
|
|
3665
|
-
};
|
|
3666
|
-
},
|
|
3667
|
-
workflows: {
|
|
3668
|
-
"merge-template": mergeTemplateWorkflow
|
|
3669
|
-
},
|
|
3670
|
-
memory: new memory.Memory({
|
|
3671
|
-
options: AgentBuilderDefaults.DEFAULT_MEMORY_CONFIG,
|
|
3672
|
-
processors: [
|
|
3673
|
-
new WriteToDiskProcessor({ prefix: "before-filter" }),
|
|
3674
|
-
new ToolSummaryProcessor({ summaryModel: config.summaryModel || config.model }),
|
|
3675
|
-
new processors.TokenLimiter(1e5),
|
|
3676
|
-
new WriteToDiskProcessor({ prefix: "after-filter" })
|
|
3677
|
-
]
|
|
3678
|
-
})
|
|
3679
|
-
};
|
|
3680
|
-
super(agentConfig);
|
|
3681
|
-
this.builderConfig = config;
|
|
3682
|
-
}
|
|
3683
|
-
/**
|
|
3684
|
-
* Enhanced generate method with AgentBuilder-specific configuration
|
|
3685
|
-
* Overrides the base Agent generate method to provide additional project context
|
|
3686
|
-
*/
|
|
3687
|
-
generate = async (messages, generateOptions = {}) => {
|
|
3688
|
-
const { ...baseOptions } = generateOptions;
|
|
3689
|
-
const originalInstructions = await this.getInstructions({ runtimeContext: generateOptions?.runtimeContext });
|
|
3690
|
-
const additionalInstructions = baseOptions.instructions;
|
|
3691
|
-
let enhancedInstructions = originalInstructions;
|
|
3692
|
-
if (additionalInstructions) {
|
|
3693
|
-
enhancedInstructions = `${originalInstructions}
|
|
3694
|
-
|
|
3695
|
-
${additionalInstructions}`;
|
|
3696
|
-
}
|
|
3697
|
-
const enhancedContext = [...baseOptions.context || []];
|
|
3698
|
-
const enhancedOptions = {
|
|
3699
|
-
...baseOptions,
|
|
3700
|
-
maxSteps: 300,
|
|
3701
|
-
// Higher default for code generation
|
|
3702
|
-
temperature: 0.3,
|
|
3703
|
-
// Lower temperature for more consistent code generation
|
|
3704
|
-
instructions: enhancedInstructions,
|
|
3705
|
-
context: enhancedContext
|
|
3706
|
-
};
|
|
3707
|
-
this.logger.debug(`[AgentBuilder:${this.name}] Starting generation with enhanced context`, {
|
|
3708
|
-
projectPath: this.builderConfig.projectPath
|
|
3709
|
-
});
|
|
3710
|
-
return super.generate(messages, enhancedOptions);
|
|
3711
|
-
};
|
|
3712
|
-
/**
|
|
3713
|
-
* Enhanced stream method with AgentBuilder-specific configuration
|
|
3714
|
-
* Overrides the base Agent stream method to provide additional project context
|
|
3715
|
-
*/
|
|
3716
|
-
stream = async (messages, streamOptions = {}) => {
|
|
3717
|
-
const { ...baseOptions } = streamOptions;
|
|
3718
|
-
const originalInstructions = await this.getInstructions({ runtimeContext: streamOptions?.runtimeContext });
|
|
3719
|
-
const additionalInstructions = baseOptions.instructions;
|
|
3720
|
-
let enhancedInstructions = originalInstructions;
|
|
3721
|
-
if (additionalInstructions) {
|
|
3722
|
-
enhancedInstructions = `${originalInstructions}
|
|
3723
|
-
|
|
3724
|
-
${additionalInstructions}`;
|
|
3725
|
-
}
|
|
3726
|
-
const enhancedContext = [...baseOptions.context || []];
|
|
3727
|
-
const enhancedOptions = {
|
|
3728
|
-
...baseOptions,
|
|
3729
|
-
maxSteps: 100,
|
|
3730
|
-
// Higher default for code generation
|
|
3731
|
-
temperature: 0.3,
|
|
3732
|
-
// Lower temperature for more consistent code generation
|
|
3733
|
-
instructions: enhancedInstructions,
|
|
3734
|
-
context: enhancedContext
|
|
3735
|
-
};
|
|
3736
|
-
this.logger.debug(`[AgentBuilder:${this.name}] Starting streaming with enhanced context`, {
|
|
3737
|
-
projectPath: this.builderConfig.projectPath
|
|
3738
|
-
});
|
|
3739
|
-
return super.stream(messages, enhancedOptions);
|
|
3740
|
-
};
|
|
3741
|
-
/**
|
|
3742
|
-
* Generate a Mastra agent from natural language requirements
|
|
3743
|
-
*/
|
|
3744
|
-
async generateAgent(requirements, options) {
|
|
3745
|
-
const prompt = `Generate a Mastra agent based on these requirements: ${requirements}
|
|
3746
|
-
|
|
3747
|
-
Please provide:
|
|
3748
|
-
1. Complete agent code with proper configuration
|
|
3749
|
-
2. Any custom tools the agent needs
|
|
3750
|
-
3. Example usage
|
|
3751
|
-
4. Testing recommendations
|
|
3752
|
-
|
|
3753
|
-
${options?.outputFormat === "explanation" ? "Focus on explaining the approach and architecture." : ""}
|
|
3754
|
-
${options?.outputFormat === "code" ? "Focus on providing complete, working code." : ""}
|
|
3755
|
-
${!options?.outputFormat || options.outputFormat === "both" ? "Provide both explanation and complete code." : ""}`;
|
|
3756
|
-
return this.generate(prompt, {
|
|
3757
|
-
runtimeContext: options?.runtimeContext
|
|
3758
|
-
});
|
|
3759
|
-
}
|
|
3760
|
-
/**
|
|
3761
|
-
* Get the default configuration for AgentBuilder
|
|
3762
|
-
*/
|
|
3763
|
-
static defaultConfig(projectPath) {
|
|
3764
|
-
return {
|
|
3765
|
-
instructions: AgentBuilderDefaults.DEFAULT_INSTRUCTIONS(projectPath),
|
|
3766
|
-
memoryConfig: AgentBuilderDefaults.DEFAULT_MEMORY_CONFIG,
|
|
3767
|
-
tools: AgentBuilderDefaults.DEFAULT_TOOLS
|
|
3768
|
-
};
|
|
3769
|
-
}
|
|
3770
|
-
};
|
|
3771
|
-
|
|
3772
|
-
exports.AgentBuilder = AgentBuilder;
|