@devramps/mcp-server 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,2658 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
5
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
6
+
7
+ // src/config.ts
8
+ import { readFileSync } from "fs";
9
+ import { join } from "path";
10
+ import { homedir } from "os";
11
+ function loadConfig() {
12
+ const apiUrl = process.env.DEVRAMPS_API_URL || "https://devramps.com";
13
+ const envApiKey = process.env.DEVRAMPS_API_KEY;
14
+ const envOrgId = process.env.DEVRAMPS_ORG_ID;
15
+ if (envApiKey && envOrgId) {
16
+ return { apiKey: envApiKey, orgId: envOrgId, apiUrl };
17
+ }
18
+ const fileConfig = loadFromConfigFile();
19
+ if (fileConfig) {
20
+ return {
21
+ apiKey: fileConfig.accessToken,
22
+ orgId: fileConfig.organizationId,
23
+ apiUrl: fileConfig.apiBaseUrl || apiUrl
24
+ };
25
+ }
26
+ throw new Error(
27
+ "No DevRamps credentials found. Run `npx @devramps/cli login` to authenticate, or set DEVRAMPS_API_KEY and DEVRAMPS_ORG_ID environment variables."
28
+ );
29
+ }
30
+ function loadFromConfigFile() {
31
+ const configPath = join(homedir(), ".devramps", "configuration.json");
32
+ let content;
33
+ try {
34
+ content = readFileSync(configPath, "utf-8");
35
+ } catch {
36
+ return null;
37
+ }
38
+ let stored;
39
+ try {
40
+ stored = JSON.parse(content);
41
+ } catch {
42
+ return null;
43
+ }
44
+ if (!stored.accessToken || !stored.organizationId || !stored.expiresAt) {
45
+ return null;
46
+ }
47
+ const expiresAt = new Date(stored.expiresAt);
48
+ if (expiresAt <= /* @__PURE__ */ new Date()) {
49
+ return null;
50
+ }
51
+ return stored;
52
+ }
53
+
54
+ // src/api/client.ts
55
+ var ApiError = class extends Error {
56
+ constructor(statusCode, message, responseBody) {
57
+ super(message);
58
+ this.statusCode = statusCode;
59
+ this.responseBody = responseBody;
60
+ this.name = "ApiError";
61
+ }
62
+ };
63
+ var DevRampsApiClient = class {
64
+ baseUrl;
65
+ apiKey;
66
+ orgId;
67
+ constructor(config) {
68
+ this.baseUrl = config.apiUrl.replace(/\/$/, "");
69
+ this.apiKey = config.apiKey;
70
+ this.orgId = config.orgId;
71
+ }
72
+ orgPath(path) {
73
+ return `/api/v1/organizations/${this.orgId}${path}`;
74
+ }
75
+ async request(method, path, body, query) {
76
+ const url = new URL(path, this.baseUrl);
77
+ if (query) {
78
+ for (const [key, value] of Object.entries(query)) {
79
+ if (value !== void 0) {
80
+ url.searchParams.set(key, String(value));
81
+ }
82
+ }
83
+ }
84
+ const headers = {
85
+ Authorization: `Bearer ${this.apiKey}`,
86
+ "Content-Type": "application/json"
87
+ };
88
+ const response = await fetch(url.toString(), {
89
+ method,
90
+ headers,
91
+ body: body ? JSON.stringify(body) : void 0
92
+ });
93
+ if (!response.ok) {
94
+ let responseBody;
95
+ try {
96
+ responseBody = await response.json();
97
+ } catch {
98
+ responseBody = await response.text();
99
+ }
100
+ throw new ApiError(
101
+ response.status,
102
+ `API request failed: ${response.status} ${response.statusText}`,
103
+ responseBody
104
+ );
105
+ }
106
+ return await response.json();
107
+ }
108
+ // Pipeline operations
109
+ async listPipelines(limit = 50, offset = 0) {
110
+ return this.request("GET", this.orgPath("/pipelines"), void 0, { limit, offset });
111
+ }
112
+ async getPipelineState(pipelineId) {
113
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/state`));
114
+ }
115
+ async getStageHealth(pipelineId, stageName) {
116
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/health`));
117
+ }
118
+ // Stage management
119
+ async retryStage(pipelineId, stageName) {
120
+ return this.request(
121
+ "POST",
122
+ this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/retry`)
123
+ );
124
+ }
125
+ async cancelStage(pipelineId, stageName, reason) {
126
+ return this.request(
127
+ "POST",
128
+ this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/cancel`),
129
+ reason ? { reason } : void 0
130
+ );
131
+ }
132
+ async stopPipeline(pipelineId, reason) {
133
+ return this.request(
134
+ "POST",
135
+ this.orgPath(`/pipelines/${pipelineId}/stop`),
136
+ reason ? { reason } : void 0
137
+ );
138
+ }
139
+ async startPipeline(pipelineId) {
140
+ return this.request(
141
+ "POST",
142
+ this.orgPath(`/pipelines/${pipelineId}/start`)
143
+ );
144
+ }
145
+ async stopStagePromotion(pipelineId, stageName, reason) {
146
+ return this.request(
147
+ "POST",
148
+ this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/stop`),
149
+ reason ? { reason } : void 0
150
+ );
151
+ }
152
+ async startStagePromotion(pipelineId, stageName) {
153
+ return this.request(
154
+ "POST",
155
+ this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/start`)
156
+ );
157
+ }
158
+ async bypassStageBlockers(pipelineId, stageName) {
159
+ return this.request(
160
+ "POST",
161
+ this.orgPath(`/pipelines/${pipelineId}/stages/${stageName}/bypass-blockers`)
162
+ );
163
+ }
164
+ // Debugging
165
+ async getStepLogs(pipelineId, stageName, revisionId, stepName, options) {
166
+ return this.request(
167
+ "GET",
168
+ this.orgPath(
169
+ `/pipelines/${pipelineId}/${stageName}/${revisionId}/${stepName}/logs`
170
+ ),
171
+ void 0,
172
+ { limit: options?.limit, search: options?.search }
173
+ );
174
+ }
175
+ async getPipelineEvents(pipelineId, options) {
176
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/events`), void 0, {
177
+ limit: options?.limit,
178
+ offset: options?.offset
179
+ });
180
+ }
181
+ async listRevisions(pipelineId, options) {
182
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/revisions`), void 0, {
183
+ limit: options?.limit,
184
+ offset: options?.offset
185
+ });
186
+ }
187
+ // Ephemeral environments
188
+ async listEphemeralEnvironments(pipelineId) {
189
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments`));
190
+ }
191
+ async claimEphemeralSession(pipelineId, envName, commitId, context) {
192
+ return this.request(
193
+ "POST",
194
+ this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments/${envName}/sessions`),
195
+ { commitId, context }
196
+ );
197
+ }
198
+ async forceClaimEphemeralSession(pipelineId, envName, commitId, context) {
199
+ return this.request(
200
+ "POST",
201
+ this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments/${envName}/force-claim`),
202
+ { commitId, context }
203
+ );
204
+ }
205
+ async getEphemeralSessionStatus(pipelineId, envName, sessionId) {
206
+ return this.request("GET", this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments/${envName}/sessions/${sessionId}`));
207
+ }
208
+ async releaseEphemeralSession(pipelineId, envName, sessionId) {
209
+ return this.request(
210
+ "DELETE",
211
+ this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments/${envName}/sessions/${sessionId}`)
212
+ );
213
+ }
214
+ async deployEphemeralCommit(pipelineId, envName, sessionId, commitId) {
215
+ return this.request(
216
+ "POST",
217
+ this.orgPath(`/pipelines/${pipelineId}/ephemeral-environments/${envName}/sessions/${sessionId}/deploy`),
218
+ { commitId }
219
+ );
220
+ }
221
+ // Step type docs
222
+ async listStepTypes() {
223
+ return this.request("GET", this.orgPath("/steps"), void 0, { limit: 100 });
224
+ }
225
+ // Public (unauthenticated) endpoints
226
+ async validatePipelineRemote(args) {
227
+ return this.publicRequest("POST", "/api/v1/public/validate-pipeline", args);
228
+ }
229
+ async publicRequest(method, path, body) {
230
+ const url = new URL(path, this.baseUrl);
231
+ const response = await fetch(url.toString(), {
232
+ method,
233
+ headers: { "Content-Type": "application/json" },
234
+ body: body ? JSON.stringify(body) : void 0
235
+ });
236
+ if (!response.ok) {
237
+ let responseBody;
238
+ try {
239
+ responseBody = await response.json();
240
+ } catch {
241
+ responseBody = await response.text();
242
+ }
243
+ throw new ApiError(
244
+ response.status,
245
+ `API request failed: ${response.status} ${response.statusText}`,
246
+ responseBody
247
+ );
248
+ }
249
+ return await response.json();
250
+ }
251
+ };
252
+ var DevRampsPublicClient = class {
253
+ baseUrl;
254
+ constructor(apiUrl) {
255
+ this.baseUrl = apiUrl.replace(/\/$/, "");
256
+ }
257
+ async validatePipeline(args) {
258
+ const url = new URL("/api/v1/public/validate-pipeline", this.baseUrl);
259
+ const response = await fetch(url.toString(), {
260
+ method: "POST",
261
+ headers: { "Content-Type": "application/json" },
262
+ body: JSON.stringify(args)
263
+ });
264
+ if (!response.ok) {
265
+ let responseBody;
266
+ try {
267
+ responseBody = await response.json();
268
+ } catch {
269
+ responseBody = await response.text();
270
+ }
271
+ throw new ApiError(
272
+ response.status,
273
+ `API request failed: ${response.status} ${response.statusText}`,
274
+ responseBody
275
+ );
276
+ }
277
+ return await response.json();
278
+ }
279
+ };
280
+
281
+ // src/tools/validate-pipeline.ts
282
+ import { z } from "zod";
283
+ import { readFile } from "fs/promises";
284
+ import { resolve } from "path";
285
+ import { parse as parseYaml } from "yaml";
286
+ var ValidatePipelineSchema = z.object({
287
+ file_path: z.string().describe(
288
+ "Path to the pipeline definition YAML file (absolute or relative to cwd)"
289
+ )
290
+ });
291
+ async function validatePipeline(args, apiClient, publicClient) {
292
+ const filePath = resolve(args.file_path);
293
+ let fileContent;
294
+ try {
295
+ fileContent = await readFile(filePath, "utf-8");
296
+ } catch (err) {
297
+ return {
298
+ content: [
299
+ {
300
+ type: "text",
301
+ text: `Error reading file: ${filePath}
302
+ ${err instanceof Error ? err.message : String(err)}`
303
+ }
304
+ ],
305
+ isError: true
306
+ };
307
+ }
308
+ try {
309
+ parseYaml(fileContent);
310
+ } catch (err) {
311
+ return {
312
+ content: [
313
+ {
314
+ type: "text",
315
+ text: `YAML parsing error in ${filePath}:
316
+ ${err instanceof Error ? err.message : String(err)}`
317
+ }
318
+ ],
319
+ isError: true
320
+ };
321
+ }
322
+ let result;
323
+ const remoteClient = apiClient || publicClient;
324
+ try {
325
+ const validateFn = "validatePipelineRemote" in remoteClient ? remoteClient.validatePipelineRemote.bind(
326
+ remoteClient
327
+ ) : remoteClient.validatePipeline.bind(
328
+ remoteClient
329
+ );
330
+ result = await validateFn({ yaml: fileContent });
331
+ } catch (err) {
332
+ const message = err instanceof Error ? err.message : String(err);
333
+ if (message.includes("401") || message.includes("403")) {
334
+ return {
335
+ content: [
336
+ {
337
+ type: "text",
338
+ text: `Authentication error during pipeline validation.
339
+
340
+ Run \`npx @devramps/cli login\` to authenticate, then try again.`
341
+ }
342
+ ],
343
+ isError: true
344
+ };
345
+ }
346
+ return {
347
+ content: [
348
+ {
349
+ type: "text",
350
+ text: `Could not reach the DevRamps API for pipeline validation.
351
+
352
+ Error: ${message}
353
+
354
+ Check your internet connection and try again. If the issue persists, run \`npx @devramps/cli login\` to refresh credentials.`
355
+ }
356
+ ],
357
+ isError: true
358
+ };
359
+ }
360
+ const lines = [];
361
+ if (result.valid && result.warnings.length === 0) {
362
+ lines.push(`Pipeline definition is valid: ${filePath}`);
363
+ } else if (result.valid) {
364
+ lines.push(`Pipeline definition is valid with warnings: ${filePath}`);
365
+ } else {
366
+ lines.push(`Pipeline definition has errors: ${filePath}`);
367
+ }
368
+ if (result.errors.length > 0) {
369
+ lines.push("");
370
+ lines.push("Errors:");
371
+ for (const error of result.errors) {
372
+ lines.push(` - [${error.path || "root"}] ${error.message}`);
373
+ }
374
+ }
375
+ if (result.warnings.length > 0) {
376
+ lines.push("");
377
+ lines.push("Warnings:");
378
+ for (const warning of result.warnings) {
379
+ lines.push(` - [${warning.path || "root"}] ${warning.message}`);
380
+ }
381
+ }
382
+ if (result.valid) {
383
+ lines.push("");
384
+ lines.push("Next steps:");
385
+ lines.push(
386
+ " - Run `npx @devramps/cli bootstrap` in your project root to bootstrap the pipeline"
387
+ );
388
+ lines.push(" - Commit the .devramps/ directory and push to deploy");
389
+ lines.push("");
390
+ lines.push(
391
+ "After deploying, you can use the `list-pipelines` and `get-pipeline-state` tools to monitor your pipeline, or ask the agent to debug any failing steps. View your pipelines in the dashboard: https://app.devramps.com/pipelines"
392
+ );
393
+ }
394
+ return {
395
+ content: [{ type: "text", text: lines.join("\n") }],
396
+ isError: !result.valid
397
+ };
398
+ }
399
+
400
+ // src/tools/scaffold-pipeline.ts
401
+ import { z as z2 } from "zod";
402
+ import { writeFile, mkdir, stat } from "fs/promises";
403
+ import { join as join2, resolve as resolve2 } from "path";
404
+ import { stringify as stringifyYaml } from "yaml";
405
+ var ScaffoldPipelineSchema = z2.object({
406
+ project_path: z2.string().describe("Path to the project root directory"),
407
+ pipeline_name: z2.string().describe(
408
+ "Human-readable pipeline name. Will be converted to snake_case for the directory name under .devramps/"
409
+ ),
410
+ pipeline_definition: z2.record(z2.string(), z2.any()).describe(
411
+ "The complete pipeline definition object to serialize to YAML. Must follow the DevRamps pipeline schema: { version: '1.0.0', pipeline: { cloud_provider: 'AWS', pipeline_updates_require_approval, stages, steps, artifacts, ... } }. See the pipeline-schema resource and scaffold-project prompt for the full structure."
412
+ )
413
+ });
414
+ function toSnakeCase(str) {
415
+ return str.replace(/([a-z])([A-Z])/g, "$1_$2").replace(/[\s\-]+/g, "_").toLowerCase().replace(/[^a-z0-9_]/g, "");
416
+ }
417
+ function formatPipelineYaml(yaml) {
418
+ const lines = yaml.split("\n");
419
+ const result = [];
420
+ const pipelineSectionKeys = /* @__PURE__ */ new Set([
421
+ "notifications:",
422
+ "stage_defaults:",
423
+ "stages:",
424
+ "ephemeral_environments:",
425
+ "steps:",
426
+ "artifacts:"
427
+ ]);
428
+ for (let i = 0; i < lines.length; i++) {
429
+ const line = lines[i];
430
+ const trimmed = line.trimStart();
431
+ const indent = line.length - trimmed.length;
432
+ const prev = result.length > 0 ? result[result.length - 1] : "";
433
+ const prevIsBlank = prev === "";
434
+ if (i > 0 && !prevIsBlank) {
435
+ if (indent === 0 && /^\w/.test(trimmed) && trimmed.includes(":")) {
436
+ result.push("");
437
+ } else if (indent === 2 && pipelineSectionKeys.has(trimmed)) {
438
+ result.push("");
439
+ } else if (indent === 4 && trimmed.startsWith("- name:")) {
440
+ const prevIndent = prev.length - prev.trimStart().length;
441
+ if (prevIndent >= 4) {
442
+ result.push("");
443
+ }
444
+ } else if (indent === 4 && !trimmed.startsWith("-") && trimmed.endsWith(":")) {
445
+ const prevIndent = prev.length - prev.trimStart().length;
446
+ if (prevIndent >= 4) {
447
+ result.push("");
448
+ }
449
+ }
450
+ }
451
+ result.push(line);
452
+ }
453
+ return result.join("\n");
454
+ }
455
+ async function scaffoldPipeline(args) {
456
+ const projectPath = resolve2(args.project_path);
457
+ const pipelineSlug = toSnakeCase(args.pipeline_name);
458
+ if (!pipelineSlug) {
459
+ return {
460
+ content: [
461
+ {
462
+ type: "text",
463
+ text: `Error: pipeline_name "${args.pipeline_name}" could not be converted to a valid snake_case directory name.`
464
+ }
465
+ ],
466
+ isError: true
467
+ };
468
+ }
469
+ const devrampsDirPath = join2(projectPath, ".devramps", pipelineSlug);
470
+ try {
471
+ const projectStat = await stat(projectPath);
472
+ if (!projectStat.isDirectory()) {
473
+ return {
474
+ content: [
475
+ {
476
+ type: "text",
477
+ text: `Error: ${projectPath} is not a directory.`
478
+ }
479
+ ],
480
+ isError: true
481
+ };
482
+ }
483
+ } catch {
484
+ return {
485
+ content: [
486
+ {
487
+ type: "text",
488
+ text: `Error: project directory ${projectPath} does not exist.`
489
+ }
490
+ ],
491
+ isError: true
492
+ };
493
+ }
494
+ const rawYaml = stringifyYaml(args.pipeline_definition, {
495
+ lineWidth: 120
496
+ });
497
+ const yamlContent = formatPipelineYaml(rawYaml);
498
+ await mkdir(devrampsDirPath, { recursive: true });
499
+ const pipelineFilePath = join2(devrampsDirPath, "pipeline.yaml");
500
+ await writeFile(pipelineFilePath, yamlContent, "utf-8");
501
+ const lines = [
502
+ `Pipeline scaffolded successfully for "${args.pipeline_name}"`,
503
+ "",
504
+ "Files created:",
505
+ ` - ${pipelineFilePath}`,
506
+ "",
507
+ "Next steps:",
508
+ " 1. Review the generated pipeline.yaml and verify the configuration",
509
+ " 2. Run the validate-pipeline tool to check for any issues",
510
+ " 3. If you have Terraform files, run generate-iam-policies to create IAM permissions",
511
+ " 4. Run `npx @devramps/cli bootstrap` in your project root to bootstrap the pipeline",
512
+ " 5. Commit the .devramps/ directory to your repository",
513
+ " 6. Push to trigger your first deployment",
514
+ "",
515
+ "After deploying, you can ask the agent to check on your pipeline status,",
516
+ "debug failing steps, or retry failed stages. View your pipelines at:",
517
+ "https://app.devramps.com/pipelines"
518
+ ];
519
+ return {
520
+ content: [{ type: "text", text: lines.join("\n") }]
521
+ };
522
+ }
523
+
524
+ // src/tools/generate-iam-policies.ts
525
+ import { z as z3 } from "zod";
526
+ import { readdir, writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
527
+ import { join as join3, resolve as resolve3, extname } from "path";
528
+ var GenerateIamPoliciesSchema = z3.object({
529
+ terraform_dir: z3.string().describe("Path to the directory containing Terraform files"),
530
+ pipeline_name: z3.string().describe(
531
+ "Pipeline name (must match the pipeline_name used in scaffold-pipeline). Used to determine the output directory under .devramps/<pipeline_name_snake_case>/"
532
+ ),
533
+ project_path: z3.string().optional().describe(
534
+ "Path to the project root directory. Defaults to the parent of terraform_dir if not provided."
535
+ ),
536
+ iam_policy: z3.array(
537
+ z3.object({
538
+ Version: z3.string().optional(),
539
+ Statement: z3.array(z3.any())
540
+ })
541
+ ).describe(
542
+ "The IAM policy document array to write. The agent should analyze the Terraform files and construct a comprehensive IAM policy covering all AWS resources and actions needed. Format: [{ Version: '2012-10-17', Statement: [{ Sid, Effect, Action, Resource }] }]. Be broad rather than narrow \u2014 use service-level wildcards (e.g., 'ecs:*', 's3:*') to avoid first-deploy failures from missing permissions. Always include 'iam:CreateServiceLinkedRole' if using ECS or ELB. The user can tighten permissions later."
543
+ )
544
+ });
545
+ function toSnakeCase2(str) {
546
+ return str.replace(/([a-z])([A-Z])/g, "$1_$2").replace(/[\s\-]+/g, "_").toLowerCase().replace(/[^a-z0-9_]/g, "");
547
+ }
548
+ async function generateIamPolicies(args) {
549
+ const terraformDir = resolve3(args.terraform_dir);
550
+ const pipelineSlug = toSnakeCase2(args.pipeline_name);
551
+ if (!pipelineSlug) {
552
+ return {
553
+ content: [
554
+ {
555
+ type: "text",
556
+ text: `Error: pipeline_name "${args.pipeline_name}" could not be converted to a valid snake_case directory name.`
557
+ }
558
+ ],
559
+ isError: true
560
+ };
561
+ }
562
+ let files;
563
+ try {
564
+ const entries = await readdir(terraformDir, { recursive: true });
565
+ files = entries.filter((f) => typeof f === "string" && extname(f) === ".tf").map((f) => join3(terraformDir, f));
566
+ } catch (err) {
567
+ return {
568
+ content: [
569
+ {
570
+ type: "text",
571
+ text: `Error reading Terraform directory: ${terraformDir}
572
+ ${err instanceof Error ? err.message : String(err)}`
573
+ }
574
+ ],
575
+ isError: true
576
+ };
577
+ }
578
+ if (files.length === 0) {
579
+ return {
580
+ content: [
581
+ {
582
+ type: "text",
583
+ text: `No .tf files found in ${terraformDir}`
584
+ }
585
+ ],
586
+ isError: true
587
+ };
588
+ }
589
+ let outputPath;
590
+ if (args.project_path) {
591
+ outputPath = join3(
592
+ resolve3(args.project_path),
593
+ ".devramps",
594
+ pipelineSlug,
595
+ "aws_additional_iam_policies.json"
596
+ );
597
+ } else {
598
+ outputPath = join3(
599
+ resolve3(terraformDir, ".."),
600
+ ".devramps",
601
+ pipelineSlug,
602
+ "aws_additional_iam_policies.json"
603
+ );
604
+ }
605
+ const outputDir = join3(outputPath, "..");
606
+ await mkdir2(outputDir, { recursive: true });
607
+ try {
608
+ await writeFile2(
609
+ outputPath,
610
+ JSON.stringify(args.iam_policy, null, 2),
611
+ "utf-8"
612
+ );
613
+ } catch (err) {
614
+ return {
615
+ content: [
616
+ {
617
+ type: "text",
618
+ text: `Error writing output file: ${outputPath}
619
+ ${err instanceof Error ? err.message : String(err)}`
620
+ }
621
+ ],
622
+ isError: true
623
+ };
624
+ }
625
+ const statementCount = args.iam_policy.reduce(
626
+ (sum, p) => sum + (p.Statement?.length || 0),
627
+ 0
628
+ );
629
+ const lines = [
630
+ `IAM policies written from analysis of ${files.length} Terraform file(s)`,
631
+ `Output: ${outputPath}`,
632
+ "",
633
+ `Generated ${statementCount} IAM policy statement(s).`,
634
+ "",
635
+ "Important: Review the generated policy and adjust if needed.",
636
+ "Consider narrowing wildcard resources to specific ARNs for production use.",
637
+ "",
638
+ "Next steps:",
639
+ " 1. Review the generated IAM policy file",
640
+ " 2. Run `npx @devramps/cli bootstrap` in your project root to apply the permissions",
641
+ "",
642
+ "After deploying, you can ask the agent to check on your pipeline status,",
643
+ "debug failing steps, or retry failed stages. View your pipelines at:",
644
+ "https://app.devramps.com/pipelines"
645
+ ];
646
+ return {
647
+ content: [{ type: "text", text: lines.join("\n") }]
648
+ };
649
+ }
650
+
651
+ // src/tools/ephemeral-tools.ts
652
+ import { z as z4 } from "zod";
653
+ function formatApiError(err) {
654
+ if (err instanceof ApiError) {
655
+ const body = err.responseBody && typeof err.responseBody === "object" ? JSON.stringify(err.responseBody, null, 2) : String(err.responseBody ?? "");
656
+ return `API Error (${err.statusCode}): ${err.message}${body ? `
657
+ ${body}` : ""}`;
658
+ }
659
+ return err instanceof Error ? err.message : String(err);
660
+ }
661
+ function textResult(text, isError = false) {
662
+ return {
663
+ content: [{ type: "text", text }],
664
+ ...isError ? { isError: true } : {}
665
+ };
666
+ }
667
+ var ListEphemeralEnvironmentsSchema = z4.object({
668
+ pipeline_id: z4.string().uuid().describe("The pipeline ID to list ephemeral environments for")
669
+ });
670
+ async function listEphemeralEnvironments(client, args) {
671
+ try {
672
+ const result = await client.listEphemeralEnvironments(args.pipeline_id);
673
+ const envs = result.environments;
674
+ if (envs.length === 0) {
675
+ return textResult(
676
+ "No ephemeral environments configured for this pipeline."
677
+ );
678
+ }
679
+ const lines = [`Ephemeral Environments (${envs.length}):`, ""];
680
+ for (const env of envs) {
681
+ const status = env.activeLock ? "LOCKED" : "AVAILABLE";
682
+ const triggers = env.triggerConfig.map((t) => t.on).join(", ");
683
+ lines.push(`- ${env.name} [${status}]`);
684
+ lines.push(` Region: ${env.region} | Account: ${env.accountId}`);
685
+ lines.push(` Triggers: ${triggers}`);
686
+ if (env.activeLock) {
687
+ lines.push(` Locked by: ${env.activeLock.lockedBy} (${env.activeLock.lockType})`);
688
+ lines.push(` Session: ${env.activeLock.sessionId}`);
689
+ lines.push(` Claimed: ${env.activeLock.claimedAt}`);
690
+ }
691
+ lines.push("");
692
+ }
693
+ return textResult(lines.join("\n"));
694
+ } catch (err) {
695
+ return textResult(formatApiError(err), true);
696
+ }
697
+ }
698
+ var ClaimEphemeralSessionSchema = z4.object({
699
+ pipeline_id: z4.string().uuid().describe("The pipeline ID"),
700
+ environment_name: z4.string().describe("The ephemeral environment name (e.g. 'agent-env')"),
701
+ commit_id: z4.string().describe("The commit SHA to deploy"),
702
+ context: z4.string().optional().describe(
703
+ "Optional context describing why the session is being claimed (e.g. 'testing feature X')"
704
+ )
705
+ });
706
+ async function claimEphemeralSession(client, args) {
707
+ try {
708
+ const result = await client.claimEphemeralSession(
709
+ args.pipeline_id,
710
+ args.environment_name,
711
+ args.commit_id,
712
+ args.context
713
+ );
714
+ return textResult(
715
+ [
716
+ "Session claimed successfully!",
717
+ "",
718
+ `Session ID: ${result.sessionId}`,
719
+ `Status: ${result.lock.status}`,
720
+ `Claimed at: ${result.lock.claimedAt}`,
721
+ "",
722
+ "An initial deployment has been triggered for the provided commit.",
723
+ `Use get-ephemeral-session-status with session_id "${result.sessionId}" to monitor deployment progress.`
724
+ ].join("\n")
725
+ );
726
+ } catch (err) {
727
+ if (err instanceof ApiError && err.statusCode === 409) {
728
+ const body = err.responseBody;
729
+ const existing = body?.existingLock;
730
+ const lines = [
731
+ "Environment is currently locked by another session.",
732
+ ""
733
+ ];
734
+ if (existing) {
735
+ lines.push(`Existing session: ${existing.sessionId}`);
736
+ lines.push(`Locked by: ${existing.lockedBy} (${existing.lockType})`);
737
+ lines.push(`Claimed at: ${existing.claimedAt}`);
738
+ lines.push("");
739
+ lines.push(
740
+ "Use force-claim-ephemeral-session to override the existing lock."
741
+ );
742
+ }
743
+ return textResult(lines.join("\n"), true);
744
+ }
745
+ return textResult(formatApiError(err), true);
746
+ }
747
+ }
748
+ var ForceClaimEphemeralSessionSchema = z4.object({
749
+ pipeline_id: z4.string().uuid().describe("The pipeline ID"),
750
+ environment_name: z4.string().describe("The ephemeral environment name (e.g. 'agent-env')"),
751
+ commit_id: z4.string().optional().describe("Optional commit SHA to deploy immediately after claiming"),
752
+ context: z4.string().optional().describe("Optional context for the session")
753
+ });
754
+ async function forceClaimEphemeralSession(client, args) {
755
+ try {
756
+ const result = await client.forceClaimEphemeralSession(
757
+ args.pipeline_id,
758
+ args.environment_name,
759
+ args.commit_id,
760
+ args.context
761
+ );
762
+ const lines = [
763
+ "Session force-claimed successfully! Any previous session has been released.",
764
+ "",
765
+ `Session ID: ${result.sessionId}`,
766
+ `Status: ${result.lock.status}`,
767
+ `Claimed at: ${result.lock.claimedAt}`
768
+ ];
769
+ if (args.commit_id) {
770
+ lines.push(
771
+ "",
772
+ "A deployment has been triggered for the provided commit.",
773
+ `Use get-ephemeral-session-status with session_id "${result.sessionId}" to monitor progress.`
774
+ );
775
+ }
776
+ return textResult(lines.join("\n"));
777
+ } catch (err) {
778
+ return textResult(formatApiError(err), true);
779
+ }
780
+ }
781
+ var DeployEphemeralCommitSchema = z4.object({
782
+ pipeline_id: z4.string().uuid().describe("The pipeline ID"),
783
+ environment_name: z4.string().describe("The ephemeral environment name"),
784
+ session_id: z4.string().describe("The session ID from claim-ephemeral-session"),
785
+ commit_id: z4.string().describe("The commit SHA to deploy")
786
+ });
787
+ async function deployEphemeralCommit(client, args) {
788
+ try {
789
+ const result = await client.deployEphemeralCommit(
790
+ args.pipeline_id,
791
+ args.environment_name,
792
+ args.session_id,
793
+ args.commit_id
794
+ );
795
+ return textResult(
796
+ [
797
+ "Deployment triggered!",
798
+ "",
799
+ `Build deployment ID: ${result.buildDeploymentId}`,
800
+ `Deploy deployment ID: ${result.deployDeploymentId}`,
801
+ "",
802
+ `Use get-ephemeral-session-status with session_id "${args.session_id}" to monitor progress.`
803
+ ].join("\n")
804
+ );
805
+ } catch (err) {
806
+ return textResult(formatApiError(err), true);
807
+ }
808
+ }
809
+ var GetEphemeralSessionStatusSchema = z4.object({
810
+ pipeline_id: z4.string().uuid().describe("The pipeline ID"),
811
+ environment_name: z4.string().describe("The ephemeral environment name"),
812
+ session_id: z4.string().describe("The session ID to check")
813
+ });
814
+ async function getEphemeralSessionStatus(client, args) {
815
+ try {
816
+ const result = await client.getEphemeralSessionStatus(
817
+ args.pipeline_id,
818
+ args.environment_name,
819
+ args.session_id
820
+ );
821
+ const lines = [
822
+ "Session Status:",
823
+ ` Status: ${result.lock.status}`,
824
+ ` Type: ${result.lock.lockType}`,
825
+ ` Locked by: ${result.lock.lockedBy}`,
826
+ ` Claimed: ${result.lock.claimedAt}`
827
+ ];
828
+ if (result.lock.releasedAt) {
829
+ lines.push(` Released: ${result.lock.releasedAt}`);
830
+ lines.push(` Release reason: ${result.lock.releaseReason}`);
831
+ }
832
+ if (result.overridden) {
833
+ lines.push("");
834
+ lines.push("[!] This session was overridden by another claim.");
835
+ }
836
+ lines.push("");
837
+ if (result.deployments.length === 0) {
838
+ lines.push("Deployments: None");
839
+ } else {
840
+ lines.push(`Deployments (${result.deployments.length}):`);
841
+ for (const deployment of result.deployments) {
842
+ lines.push("");
843
+ lines.push(` Commit: ${deployment.commitId.substring(0, 8)}`);
844
+ lines.push(` Status: ${deployment.status}`);
845
+ if (deployment.startedAt) {
846
+ lines.push(` Started: ${deployment.startedAt}`);
847
+ }
848
+ if (deployment.finishedAt) {
849
+ lines.push(` Finished: ${deployment.finishedAt}`);
850
+ }
851
+ if (deployment.cause) {
852
+ lines.push(` Cause: ${deployment.cause}`);
853
+ }
854
+ if (deployment.stepStatuses.length > 0) {
855
+ lines.push(" Steps:");
856
+ for (const step of deployment.stepStatuses) {
857
+ let stepLine = ` [${step.stage}] ${step.name}: ${step.status}`;
858
+ if (step.error) {
859
+ stepLine += ` - ${step.error}`;
860
+ }
861
+ lines.push(stepLine);
862
+ }
863
+ }
864
+ }
865
+ }
866
+ return textResult(lines.join("\n"));
867
+ } catch (err) {
868
+ return textResult(formatApiError(err), true);
869
+ }
870
+ }
871
+ var ReleaseEphemeralSessionSchema = z4.object({
872
+ pipeline_id: z4.string().uuid().describe("The pipeline ID"),
873
+ environment_name: z4.string().describe("The ephemeral environment name"),
874
+ session_id: z4.string().describe("The session ID to release")
875
+ });
876
+ async function releaseEphemeralSession(client, args) {
877
+ try {
878
+ await client.releaseEphemeralSession(
879
+ args.pipeline_id,
880
+ args.environment_name,
881
+ args.session_id
882
+ );
883
+ return textResult(
884
+ "Session released successfully. Any in-progress deployments have been cancelled."
885
+ );
886
+ } catch (err) {
887
+ return textResult(formatApiError(err), true);
888
+ }
889
+ }
890
+
891
+ // src/tools/pipeline-tools.ts
892
+ import { z as z5 } from "zod";
893
+ function formatApiError2(err) {
894
+ if (err instanceof ApiError) {
895
+ const body = err.responseBody && typeof err.responseBody === "object" ? JSON.stringify(err.responseBody, null, 2) : String(err.responseBody ?? "");
896
+ return `API Error (${err.statusCode}): ${err.message}${body ? `
897
+ ${body}` : ""}`;
898
+ }
899
+ return err instanceof Error ? err.message : String(err);
900
+ }
901
+ function textResult2(text, isError = false) {
902
+ return {
903
+ content: [{ type: "text", text }],
904
+ ...isError ? { isError: true } : {}
905
+ };
906
+ }
907
+ var ListPipelinesSchema = z5.object({
908
+ limit: z5.number().int().min(1).max(100).default(50).optional().describe("Maximum number of pipelines to return (1-100, default 50)"),
909
+ offset: z5.number().int().min(0).default(0).optional().describe("Number of pipelines to skip for pagination")
910
+ });
911
+ async function listPipelines(client, args) {
912
+ try {
913
+ const result = await client.listPipelines(args.limit, args.offset);
914
+ const lines = [
915
+ `Pipelines (${result.pagination.total} total, showing ${result.pipelines.length}):`,
916
+ ""
917
+ ];
918
+ if (result.pipelines.length === 0) {
919
+ lines.push("No pipelines found.");
920
+ }
921
+ for (const pipeline of result.pipelines) {
922
+ const status = pipeline.stopped ? "STOPPED" : pipeline.blocked ? "BLOCKED" : pipeline.failedStages.length > 0 ? "FAILED" : "ACTIVE";
923
+ lines.push(`- ${pipeline.name} (${pipeline.slug})`);
924
+ lines.push(` ID: ${pipeline.id}`);
925
+ lines.push(` Status: ${status}`);
926
+ lines.push(` Source: ${pipeline.source}`);
927
+ if (pipeline.failedStages.length > 0) {
928
+ lines.push(` Failed stages: ${pipeline.failedStages.join(", ")}`);
929
+ }
930
+ if (pipeline.lastDeploymentAt) {
931
+ lines.push(` Last deployment: ${pipeline.lastDeploymentAt}`);
932
+ }
933
+ lines.push("");
934
+ }
935
+ if (result.pagination.hasMore) {
936
+ lines.push(
937
+ `Page ${Math.floor((args.offset ?? 0) / (args.limit ?? 50)) + 1} \u2014 use offset=${(args.offset ?? 0) + (args.limit ?? 50)} for the next page.`
938
+ );
939
+ }
940
+ return textResult2(lines.join("\n"));
941
+ } catch (err) {
942
+ return textResult2(formatApiError2(err), true);
943
+ }
944
+ }
945
+ var GetPipelineStateSchema = z5.object({
946
+ pipeline_id: z5.string().uuid().describe("Pipeline ID")
947
+ });
948
+ async function getPipelineState(client, args) {
949
+ try {
950
+ const state = await client.getPipelineState(args.pipeline_id);
951
+ const lines = [
952
+ `Pipeline: ${state.pipeline.name} (${state.pipeline.slug})`,
953
+ `ID: ${state.pipeline.id}`,
954
+ `Stopped: ${state.pipeline.stopped}`,
955
+ `Blocked: ${state.pipeline.blocked}`,
956
+ "",
957
+ "Stages:"
958
+ ];
959
+ for (const stage of state.stages) {
960
+ lines.push(` ${stage.name}: ${stage.status}`);
961
+ if (stage.activeRevisionId !== void 0) {
962
+ lines.push(` Active revision: ${stage.activeRevisionId}`);
963
+ }
964
+ if (stage.lastSuccessfulRevisionId !== void 0) {
965
+ lines.push(` Last successful revision: ${stage.lastSuccessfulRevisionId}`);
966
+ }
967
+ if (stage.blockers.length > 0) {
968
+ lines.push(
969
+ ` Blockers: ${stage.blockers.map((b) => `${b.type}${b.message ? ` (${b.message})` : ""}`).join(", ")}`
970
+ );
971
+ }
972
+ if (stage.alarmStatus?.state) {
973
+ lines.push(` Alarm: ${stage.alarmStatus.state} (${stage.alarmStatus.alarmName})`);
974
+ }
975
+ const failedSteps = stage.steps.filter((s) => s.status === "FAILED");
976
+ const inProgressSteps = stage.steps.filter((s) => s.status === "IN_PROGRESS");
977
+ const pendingApproval = stage.steps.filter((s) => s.status === "PENDING_APPROVAL");
978
+ if (failedSteps.length > 0) {
979
+ lines.push(
980
+ ` Failed steps: ${failedSteps.map((s) => `${s.name}${s.cause ? ` (${s.cause})` : ""}`).join(", ")}`
981
+ );
982
+ }
983
+ if (inProgressSteps.length > 0) {
984
+ lines.push(` In progress: ${inProgressSteps.map((s) => s.name).join(", ")}`);
985
+ }
986
+ if (pendingApproval.length > 0) {
987
+ lines.push(` Pending approval: ${pendingApproval.map((s) => s.name).join(", ")}`);
988
+ }
989
+ if (stage.startedAt) {
990
+ lines.push(` Started: ${stage.startedAt}`);
991
+ }
992
+ if (stage.finishedAt) {
993
+ lines.push(` Finished: ${stage.finishedAt}`);
994
+ }
995
+ lines.push("");
996
+ }
997
+ if (state.presentRevisions.length > 0) {
998
+ lines.push(
999
+ `Active revisions: ${state.presentRevisions.map((r) => r.revisionId).join(", ")}`
1000
+ );
1001
+ }
1002
+ return textResult2(lines.join("\n"));
1003
+ } catch (err) {
1004
+ return textResult2(formatApiError2(err), true);
1005
+ }
1006
+ }
1007
+ var GetStageHealthSchema = z5.object({
1008
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1009
+ stage_name: z5.string().describe("Stage name")
1010
+ });
1011
+ async function getStageHealth(client, args) {
1012
+ try {
1013
+ const health = await client.getStageHealth(args.pipeline_id, args.stage_name);
1014
+ const lines = [
1015
+ `Stage Health: ${args.stage_name}`,
1016
+ "",
1017
+ `Success rate: ${health.successRate}%`,
1018
+ `Total deployments (30d): ${health.totalDeployments30d}`,
1019
+ `Successful (30d): ${health.successfulDeployments30d}`,
1020
+ `Failed (30d): ${health.failedDeployments30d}`,
1021
+ "",
1022
+ `Execution times:`,
1023
+ ` p50: ${health.p50ExecutionTimeSec !== null ? `${health.p50ExecutionTimeSec}s` : "N/A"}`,
1024
+ ` p90: ${health.p90ExecutionTimeSec !== null ? `${health.p90ExecutionTimeSec}s` : "N/A"}`,
1025
+ ` max: ${health.maxExecutionTimeSec !== null ? `${health.maxExecutionTimeSec}s` : "N/A"}`
1026
+ ];
1027
+ return textResult2(lines.join("\n"));
1028
+ } catch (err) {
1029
+ return textResult2(formatApiError2(err), true);
1030
+ }
1031
+ }
1032
+ var RetryStageSchema = z5.object({
1033
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1034
+ stage_name: z5.string().describe("Stage name to retry")
1035
+ });
1036
+ async function retryStage(client, args) {
1037
+ try {
1038
+ const result = await client.retryStage(args.pipeline_id, args.stage_name);
1039
+ return textResult2(
1040
+ result.success ? `Stage "${args.stage_name}" retry initiated successfully.` : `Failed to retry stage "${args.stage_name}".`
1041
+ );
1042
+ } catch (err) {
1043
+ return textResult2(formatApiError2(err), true);
1044
+ }
1045
+ }
1046
+ var CancelStageSchema = z5.object({
1047
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1048
+ stage_name: z5.string().describe("Stage name to cancel"),
1049
+ reason: z5.string().optional().describe("Reason for cancellation")
1050
+ });
1051
+ async function cancelStage(client, args) {
1052
+ try {
1053
+ const result = await client.cancelStage(
1054
+ args.pipeline_id,
1055
+ args.stage_name,
1056
+ args.reason
1057
+ );
1058
+ return textResult2(
1059
+ result.success ? `Stage "${args.stage_name}" cancellation initiated. ${result.message}` : `Failed to cancel stage: ${result.message}`
1060
+ );
1061
+ } catch (err) {
1062
+ return textResult2(formatApiError2(err), true);
1063
+ }
1064
+ }
1065
+ var StopPipelineSchema = z5.object({
1066
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1067
+ reason: z5.string().optional().describe("Reason for stopping the pipeline")
1068
+ });
1069
+ async function stopPipeline(client, args) {
1070
+ try {
1071
+ const result = await client.stopPipeline(args.pipeline_id, args.reason);
1072
+ return textResult2(
1073
+ result.success ? `Pipeline stopped. ${result.message}` : `Failed to stop pipeline: ${result.message}`
1074
+ );
1075
+ } catch (err) {
1076
+ return textResult2(formatApiError2(err), true);
1077
+ }
1078
+ }
1079
+ var StartPipelineSchema = z5.object({
1080
+ pipeline_id: z5.string().uuid().describe("Pipeline ID")
1081
+ });
1082
+ async function startPipeline(client, args) {
1083
+ try {
1084
+ const result = await client.startPipeline(args.pipeline_id);
1085
+ return textResult2(
1086
+ result.success ? `Pipeline resumed. ${result.message}` : `Failed to start pipeline: ${result.message}`
1087
+ );
1088
+ } catch (err) {
1089
+ return textResult2(formatApiError2(err), true);
1090
+ }
1091
+ }
1092
+ var StopStagePromotionSchema = z5.object({
1093
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1094
+ stage_name: z5.string().describe("Stage name"),
1095
+ reason: z5.string().optional().describe("Reason for stopping promotion")
1096
+ });
1097
+ async function stopStagePromotion(client, args) {
1098
+ try {
1099
+ const result = await client.stopStagePromotion(
1100
+ args.pipeline_id,
1101
+ args.stage_name,
1102
+ args.reason
1103
+ );
1104
+ return textResult2(
1105
+ result.success ? `Promotion stopped for stage "${args.stage_name}". ${result.message}` : `Failed to stop promotion: ${result.message}`
1106
+ );
1107
+ } catch (err) {
1108
+ return textResult2(formatApiError2(err), true);
1109
+ }
1110
+ }
1111
+ var StartStagePromotionSchema = z5.object({
1112
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1113
+ stage_name: z5.string().describe("Stage name")
1114
+ });
1115
+ async function startStagePromotion(client, args) {
1116
+ try {
1117
+ const result = await client.startStagePromotion(
1118
+ args.pipeline_id,
1119
+ args.stage_name
1120
+ );
1121
+ return textResult2(
1122
+ result.success ? `Promotion resumed for stage "${args.stage_name}". ${result.message}` : `Failed to start promotion: ${result.message}`
1123
+ );
1124
+ } catch (err) {
1125
+ return textResult2(formatApiError2(err), true);
1126
+ }
1127
+ }
1128
+ var BypassStageBlockersSchema = z5.object({
1129
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1130
+ stage_name: z5.string().describe("Stage name")
1131
+ });
1132
+ async function bypassStageBlockers(client, args) {
1133
+ try {
1134
+ const result = await client.bypassStageBlockers(
1135
+ args.pipeline_id,
1136
+ args.stage_name
1137
+ );
1138
+ return textResult2(
1139
+ result.success ? `Blockers bypassed for stage "${args.stage_name}". ${result.message}
1140
+
1141
+ Warning: Bypassing blockers skips safety checks (manual approvals, time windows, etc). Ensure this is intentional.` : `Failed to bypass blockers: ${result.message}`
1142
+ );
1143
+ } catch (err) {
1144
+ return textResult2(formatApiError2(err), true);
1145
+ }
1146
+ }
1147
+ var GetStepLogsSchema = z5.object({
1148
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1149
+ stage_name: z5.string().describe("Stage name"),
1150
+ revision_id: z5.string().describe("Revision ID (numeric)"),
1151
+ step_name: z5.string().describe("Step name"),
1152
+ limit: z5.number().int().min(1).max(1e3).default(100).optional().describe("Maximum number of log lines to return"),
1153
+ search: z5.string().optional().describe("Search string to filter logs")
1154
+ });
1155
+ async function getStepLogs(client, args) {
1156
+ try {
1157
+ const result = await client.getStepLogs(
1158
+ args.pipeline_id,
1159
+ args.stage_name,
1160
+ args.revision_id,
1161
+ args.step_name,
1162
+ { limit: args.limit, search: args.search }
1163
+ );
1164
+ if (result.logs.length === 0) {
1165
+ return textResult2("No logs found for this step.");
1166
+ }
1167
+ const lines = [
1168
+ `Logs for step "${args.step_name}" (stage: ${args.stage_name}, revision: ${args.revision_id}):`,
1169
+ `Showing ${result.logs.length} of ${result.pagination.total} entries`,
1170
+ "",
1171
+ ...result.logs.map(
1172
+ (log) => `[${log.timestamp}] [${log.stream}] ${log.data}`
1173
+ )
1174
+ ];
1175
+ if (result.pagination.hasMore) {
1176
+ lines.push("", "... more logs available. Increase limit to see more.");
1177
+ }
1178
+ return textResult2(lines.join("\n"));
1179
+ } catch (err) {
1180
+ return textResult2(formatApiError2(err), true);
1181
+ }
1182
+ }
1183
+ var GetPipelineEventsSchema = z5.object({
1184
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1185
+ limit: z5.number().int().min(1).max(100).default(50).optional().describe("Maximum number of events to return"),
1186
+ offset: z5.number().int().min(0).default(0).optional().describe("Number of events to skip for pagination")
1187
+ });
1188
+ async function getPipelineEvents(client, args) {
1189
+ try {
1190
+ const result = await client.getPipelineEvents(args.pipeline_id, {
1191
+ limit: args.limit,
1192
+ offset: args.offset
1193
+ });
1194
+ if (result.events.length === 0) {
1195
+ return textResult2("No events found for this pipeline.");
1196
+ }
1197
+ const lines = [
1198
+ `Pipeline Events (${result.pagination.total} total, showing ${result.events.length}):`,
1199
+ ""
1200
+ ];
1201
+ for (const event of result.events) {
1202
+ const parts = [`[${event.createdAt}] ${event.eventType}`];
1203
+ if (event.stageName) parts.push(`stage: ${event.stageName}`);
1204
+ if (event.stepName) parts.push(`step: ${event.stepName}`);
1205
+ parts.push(`rev: ${event.revisionId}`);
1206
+ if (event.commitId) parts.push(`commit: ${event.commitId.slice(0, 8)}`);
1207
+ lines.push(parts.join(" | "));
1208
+ }
1209
+ if (result.pagination.hasMore) {
1210
+ lines.push(
1211
+ "",
1212
+ `Use offset=${(args.offset ?? 0) + (args.limit ?? 50)} for the next page.`
1213
+ );
1214
+ }
1215
+ return textResult2(lines.join("\n"));
1216
+ } catch (err) {
1217
+ return textResult2(formatApiError2(err), true);
1218
+ }
1219
+ }
1220
+ var ListRevisionsSchema = z5.object({
1221
+ pipeline_id: z5.string().uuid().describe("Pipeline ID"),
1222
+ limit: z5.number().int().min(1).max(100).default(20).optional().describe("Maximum number of revisions to return"),
1223
+ offset: z5.number().int().min(0).default(0).optional().describe("Number of revisions to skip for pagination")
1224
+ });
1225
+ async function listRevisions(client, args) {
1226
+ try {
1227
+ const result = await client.listRevisions(args.pipeline_id, {
1228
+ limit: args.limit,
1229
+ offset: args.offset
1230
+ });
1231
+ if (result.revisions.length === 0) {
1232
+ return textResult2("No revisions found for this pipeline.");
1233
+ }
1234
+ const lines = [
1235
+ `Revisions (${result.pagination.total} total, showing ${result.revisions.length}):`,
1236
+ ""
1237
+ ];
1238
+ for (const rev of result.revisions) {
1239
+ lines.push(`Revision #${rev.revisionId}`);
1240
+ lines.push(` Commit: ${rev.commitId.slice(0, 8)}${rev.commitMessage ? ` \u2014 ${rev.commitMessage}` : ""}`);
1241
+ lines.push(
1242
+ ` Stages deployed: ${rev.stagesDeployed.length > 0 ? rev.stagesDeployed.join(", ") : "none"}`
1243
+ );
1244
+ lines.push(` Created: ${rev.createdAt}`);
1245
+ lines.push("");
1246
+ }
1247
+ if (result.pagination.hasMore) {
1248
+ lines.push(
1249
+ `Use offset=${(args.offset ?? 0) + (args.limit ?? 20)} for the next page.`
1250
+ );
1251
+ }
1252
+ return textResult2(lines.join("\n"));
1253
+ } catch (err) {
1254
+ return textResult2(formatApiError2(err), true);
1255
+ }
1256
+ }
1257
+
1258
+ // src/schemas/pipeline-definition.ts
1259
+ import { z as z6 } from "zod";
1260
+ var DeploymentTimeWindowSchema = z6.enum([
1261
+ "NONE",
1262
+ "PACIFIC_WORKING_HOURS",
1263
+ "PACIFIC_WORKING_HOURS_REDUCED"
1264
+ ]);
1265
+ var RequireApprovalTypeSchema = z6.enum([
1266
+ "ALWAYS",
1267
+ "NEVER",
1268
+ "DESTRUCTIVE_CHANGES_ONLY"
1269
+ ]);
1270
+ var KNOWN_STEP_TYPES = [
1271
+ "DEVRAMPS:TERRAFORM:SYNTHESIZE",
1272
+ "DEVRAMPS:ECS:DEPLOY",
1273
+ "DEVRAMPS:EKS:DEPLOY",
1274
+ "DEVRAMPS:EKS:HELM",
1275
+ "DEVRAMPS:LAMBDA:DEPLOY",
1276
+ "DEVRAMPS:LAMBDA:INVOKE",
1277
+ "DEVRAMPS:EC2:DEPLOY",
1278
+ "DEVRAMPS:CODEDEPLOY:DEPLOY",
1279
+ "DEVRAMPS:S3:UPLOAD",
1280
+ "DEVRAMPS:CLOUDFRONT:INVALIDATE",
1281
+ "DEVRAMPS:SCRIPT:EXECUTE",
1282
+ "DEVRAMPS:DATABASE:MIGRATE",
1283
+ "DEVRAMPS:APPROVAL:BAKE",
1284
+ "DEVRAMPS:APPROVAL:TEST",
1285
+ "DEVRAMPS:APPROVAL:MANUAL"
1286
+ ];
1287
+ var KNOWN_ARTIFACT_TYPES = [
1288
+ "DEVRAMPS:DOCKER:BUILD",
1289
+ "DEVRAMPS:DOCKER:IMPORT",
1290
+ "DEVRAMPS:BUNDLE:BUILD",
1291
+ "DEVRAMPS:BUNDLE:IMPORT"
1292
+ ];
1293
+ var NotificationEventKeySchema = z6.enum([
1294
+ "stage_succeeded",
1295
+ "stage_failed",
1296
+ "approval_required",
1297
+ "stage_auto_rolled_back",
1298
+ "stage_rolled_back",
1299
+ "stage_rolled_forward"
1300
+ ]);
1301
+ var SlackNotificationConfigSchema = z6.object({
1302
+ channel: z6.string(),
1303
+ events: z6.array(NotificationEventKeySchema).min(1)
1304
+ });
1305
+ var NotificationsSchema = z6.object({
1306
+ slack: SlackNotificationConfigSchema.optional()
1307
+ });
1308
+ var StageDefaultsSchema = z6.object({
1309
+ deployment_time_window: DeploymentTimeWindowSchema.optional()
1310
+ });
1311
+ var StageSchema = z6.object({
1312
+ name: z6.string(),
1313
+ account_id: z6.string(),
1314
+ region: z6.string(),
1315
+ deployment_time_window: DeploymentTimeWindowSchema.optional(),
1316
+ vars: z6.record(z6.string(), z6.any()).optional(),
1317
+ auto_rollback_alarm_name: z6.string().optional(),
1318
+ skip: z6.array(z6.string()).optional()
1319
+ });
1320
+ var StepSchema = z6.object({
1321
+ type: z6.string(),
1322
+ name: z6.string(),
1323
+ id: z6.string().optional(),
1324
+ params: z6.any().optional(),
1325
+ goes_after: z6.array(z6.string()).optional(),
1326
+ host_size: z6.enum(["small", "medium", "large"]).optional(),
1327
+ architecture: z6.enum(["linux/amd64", "linux/arm64"]).optional(),
1328
+ dependencies: z6.array(z6.string()).optional()
1329
+ }).passthrough();
1330
+ var ArtifactDefinitionSchema = z6.object({
1331
+ type: z6.string(),
1332
+ id: z6.string().optional(),
1333
+ params: z6.any().optional(),
1334
+ goes_after: z6.array(z6.string()).optional(),
1335
+ rebuild_when_changed: z6.array(z6.string()).optional(),
1336
+ per_stage: z6.boolean().optional(),
1337
+ host_size: z6.enum(["small", "medium", "large"]).optional(),
1338
+ architecture: z6.enum(["linux/amd64", "linux/arm64"]).optional(),
1339
+ dependencies: z6.array(z6.string()).optional(),
1340
+ envs: z6.record(z6.string(), z6.any()).optional()
1341
+ }).passthrough();
1342
+ var ArtifactsMapSchema = z6.record(
1343
+ z6.string(),
1344
+ ArtifactDefinitionSchema
1345
+ );
1346
+ var EphemeralTriggerSchema = z6.object({
1347
+ on: z6.enum(["pull_request", "api"]),
1348
+ target_branches: z6.array(z6.string()).optional()
1349
+ }).passthrough();
1350
+ var EphemeralReleaseSchema = z6.object({
1351
+ on: z6.enum(["pr_closed", "api", "timeout"]),
1352
+ after_idle_hours: z6.number().optional()
1353
+ }).passthrough();
1354
+ var EphemeralEnvironmentSchema = z6.object({
1355
+ triggers: z6.array(EphemeralTriggerSchema).min(1),
1356
+ release: z6.array(EphemeralReleaseSchema).min(1),
1357
+ on_release: z6.enum(["deploy_main"]).optional(),
1358
+ account_id: z6.string(),
1359
+ region: z6.string(),
1360
+ skip: z6.array(z6.string()).optional(),
1361
+ vars: z6.record(z6.string(), z6.any()).optional()
1362
+ }).passthrough();
1363
+ var EphemeralEnvironmentsMapSchema = z6.record(
1364
+ z6.string(),
1365
+ EphemeralEnvironmentSchema
1366
+ );
1367
+ var PipelineInnerSchema = z6.object({
1368
+ cloud_provider: z6.enum(["AWS"]),
1369
+ pipeline_updates_require_approval: RequireApprovalTypeSchema,
1370
+ tracks: z6.string().optional(),
1371
+ notifications: NotificationsSchema.optional(),
1372
+ stage_defaults: StageDefaultsSchema.optional(),
1373
+ stages: z6.array(StageSchema).min(1),
1374
+ steps: z6.array(StepSchema),
1375
+ artifacts: ArtifactsMapSchema,
1376
+ ephemeral_environments: EphemeralEnvironmentsMapSchema.optional()
1377
+ });
1378
+ var PipelineDefinitionSchema = z6.object({
1379
+ version: z6.string(),
1380
+ pipeline: PipelineInnerSchema
1381
+ });
1382
+
1383
+ // src/resources/pipeline-schema.ts
1384
+ var PIPELINE_SCHEMA_URI = "devramps://schema/pipeline-definition";
1385
+ function getPipelineSchemaResource() {
1386
+ return {
1387
+ uri: PIPELINE_SCHEMA_URI,
1388
+ name: "Pipeline Definition Schema",
1389
+ description: "Reference documentation for the DevRamps pipeline definition YAML format",
1390
+ mimeType: "text/markdown"
1391
+ };
1392
+ }
1393
+ function getPipelineSchemaContent() {
1394
+ return `# DevRamps Pipeline Definition Schema
1395
+
1396
+ ## Overview
1397
+
1398
+ A DevRamps pipeline definition is a YAML file at \`.devramps/<pipeline_name>/pipeline.yaml\` that describes your CI/CD pipeline configuration.
1399
+
1400
+ ## Top-Level Structure
1401
+
1402
+ \`\`\`yaml
1403
+ version: "1.0.0"
1404
+
1405
+ pipeline:
1406
+ cloud_provider: AWS
1407
+ pipeline_updates_require_approval: ALWAYS # ALWAYS | NEVER | DESTRUCTIVE_CHANGES_ONLY
1408
+ tracks: main # Optional: branch to track (default: main)
1409
+
1410
+ notifications: # Optional
1411
+ slack:
1412
+ channel: "#deployments"
1413
+ events:
1414
+ - stage_failed
1415
+ - approval_required
1416
+ - stage_auto_rolled_back
1417
+ - stage_rolled_back
1418
+ - stage_rolled_forward
1419
+
1420
+ stage_defaults: # Optional - inherited by all stages
1421
+ deployment_time_window: PACIFIC_WORKING_HOURS
1422
+
1423
+ stages:
1424
+ - name: staging
1425
+ account_id: "123456789012"
1426
+ region: us-east-1
1427
+ deployment_time_window: NONE # Optional per-stage override
1428
+ vars: # Optional stage-specific variables
1429
+ env: staging
1430
+ skip: # Optional - step names to skip in this stage
1431
+ - "Bake Period"
1432
+ auto_rollback_alarm_name: my-alarm # Optional CloudWatch alarm
1433
+
1434
+ - name: production
1435
+ account_id: "123456789012"
1436
+ region: us-east-1
1437
+ vars:
1438
+ env: production
1439
+
1440
+ ephemeral_environments: # Optional
1441
+ pr-env:
1442
+ triggers:
1443
+ - on: pull_request
1444
+ target_branches: ["main"]
1445
+ - on: api
1446
+ release:
1447
+ - on: pr_closed
1448
+ - on: api
1449
+ - on: timeout
1450
+ after_idle_hours: 24
1451
+ on_release: deploy_main
1452
+ account_id: "123456789012"
1453
+ region: us-east-1
1454
+ skip: ["Bake Period"]
1455
+ vars:
1456
+ env: ephemeral
1457
+
1458
+ steps:
1459
+ - name: Synthesize Infrastructure
1460
+ id: infra
1461
+ type: DEVRAMPS:TERRAFORM:SYNTHESIZE
1462
+ params:
1463
+ requires_approval: ALWAYS
1464
+ source: /infrastructure
1465
+ variables:
1466
+ region: \${{ stage.region }}
1467
+ aws_account_id: \${{ stage.account_id }}
1468
+ env: \${{ vars.env }}
1469
+
1470
+ - name: Deploy Backend Service
1471
+ type: DEVRAMPS:ECS:DEPLOY
1472
+ goes_after: ["Synthesize Infrastructure"]
1473
+ params:
1474
+ cluster_name: \${{ steps.infra.ecs_cluster_name }}
1475
+ service_name: \${{ steps.infra.ecs_service_name }}
1476
+ reference_task_definition: \${{ steps.infra.task_definition }}
1477
+ images:
1478
+ - container_name: service
1479
+ image: \${{ stage.artifacts.backend.image_url }}
1480
+
1481
+ - name: Deploy Frontend
1482
+ type: DEVRAMPS:S3:UPLOAD
1483
+ goes_after: ["Synthesize Infrastructure"]
1484
+ params:
1485
+ source_s3_url: \${{ stage.artifacts.frontend.s3_url }}
1486
+ bucket: "\${{ steps.infra.frontend_bucket_name }}"
1487
+ prefix: assets/
1488
+ decompress: true
1489
+ clean: true
1490
+
1491
+ - name: Invalidate Cache
1492
+ type: DEVRAMPS:CLOUDFRONT:INVALIDATE
1493
+ goes_after: ["Deploy Frontend"]
1494
+ params:
1495
+ distribution_id: \${{ steps.infra.cloudfront_distribution_id }}
1496
+ paths: ["/*"]
1497
+
1498
+ - name: Bake Period
1499
+ type: DEVRAMPS:APPROVAL:BAKE
1500
+ params:
1501
+ duration_minutes: 5
1502
+
1503
+ artifacts:
1504
+ Backend Image:
1505
+ id: backend
1506
+ type: DEVRAMPS:DOCKER:BUILD
1507
+ architecture: "linux/amd64"
1508
+ host_size: "medium"
1509
+ rebuild_when_changed:
1510
+ - /services/backend
1511
+ params:
1512
+ dockerfile: /services/backend/Dockerfile
1513
+
1514
+ Frontend Bundle:
1515
+ id: frontend
1516
+ type: DEVRAMPS:BUNDLE:BUILD
1517
+ per_stage: true
1518
+ rebuild_when_changed:
1519
+ - /services/frontend
1520
+ dependencies: ["node.24"]
1521
+ envs:
1522
+ environment: \${{ vars.env }}
1523
+ params:
1524
+ build_commands: |
1525
+ cd services/frontend
1526
+ npm install
1527
+ npm run build
1528
+ zip -r ../../bundle.zip ./dist
1529
+ file_path: /bundle.zip
1530
+ \`\`\`
1531
+
1532
+ ## Step Types
1533
+
1534
+ ${KNOWN_STEP_TYPES.map((t) => `- \`${t}\``).join("\n")}
1535
+ - \`CUSTOM:<your-type>\` \u2014 Custom step types from your step registry
1536
+
1537
+ ## Artifact Types
1538
+
1539
+ ${KNOWN_ARTIFACT_TYPES.map((t) => `- \`${t}\``).join("\n")}
1540
+
1541
+ ## Stage Fields
1542
+
1543
+ | Field | Type | Required | Description |
1544
+ |-------|------|----------|-------------|
1545
+ | name | string | Yes | Unique stage name |
1546
+ | account_id | string | Yes | AWS account ID |
1547
+ | region | string | Yes | AWS region |
1548
+ | deployment_time_window | enum | No | When deployments are allowed |
1549
+ | vars | object | No | Stage-specific variables accessible via \${{ vars.key }} |
1550
+ | skip | string[] | No | Step names to skip in this stage |
1551
+ | auto_rollback_alarm_name | string | No | CloudWatch alarm for auto-rollback |
1552
+
1553
+ ## Step Fields
1554
+
1555
+ | Field | Type | Required | Description |
1556
+ |-------|------|----------|-------------|
1557
+ | type | string | Yes | Step type identifier |
1558
+ | name | string | Yes | Unique step name |
1559
+ | id | string | No | Short ID for expression references |
1560
+ | params | object | Yes | Step-specific parameters |
1561
+ | goes_after | string[] | No | Dependencies (step names, IDs, or artifact names) |
1562
+ | host_size | enum | No | VM size: small, medium, large |
1563
+ | architecture | enum | No | linux/amd64 or linux/arm64 |
1564
+ | dependencies | string[] | No | System dependencies to install |
1565
+
1566
+ ## Artifact Fields
1567
+
1568
+ | Field | Type | Required | Description |
1569
+ |-------|------|----------|-------------|
1570
+ | type | string | Yes | Artifact type identifier |
1571
+ | id | string | No | Short ID for expression references |
1572
+ | params | object | Yes | Artifact-specific parameters |
1573
+ | rebuild_when_changed | string[] | No | Paths that trigger rebuild |
1574
+ | per_stage | boolean | No | Build per stage (default: false) |
1575
+ | host_size | enum | No | VM size: small, medium, large |
1576
+ | architecture | enum | No | linux/amd64 or linux/arm64 |
1577
+ | dependencies | string[] | No | System dependencies to install (e.g., "node.24") |
1578
+ | envs | object | No | Environment variables for the build |
1579
+
1580
+ ## Expression Syntax
1581
+
1582
+ Steps and artifacts can reference dynamic values using \`\${{ }}\` expressions:
1583
+
1584
+ **Stage context:**
1585
+ - \`\${{ stage.name }}\` \u2014 Current stage name
1586
+ - \`\${{ stage.account_id }}\` \u2014 Current stage AWS account ID
1587
+ - \`\${{ stage.region }}\` \u2014 Current stage AWS region
1588
+
1589
+ **Stage variables:**
1590
+ - \`\${{ vars.key }}\` \u2014 Stage-specific variable
1591
+ - \`\${{ vars.nested.key }}\` \u2014 Nested variable access
1592
+
1593
+ **Step outputs (from Terraform or other steps):**
1594
+ - \`\${{ steps.<step_id>.<output_name> }}\` \u2014 Reference by step ID
1595
+ - \`\${{ steps["Step Name"].<output_name> }}\` \u2014 Reference by step name
1596
+
1597
+ **Artifact outputs:**
1598
+ - \`\${{ stage.artifacts.<artifact_id>.image_url }}\` \u2014 Docker image URL
1599
+ - \`\${{ stage.artifacts.<artifact_id>.s3_url }}\` \u2014 Bundle S3 URL
1600
+ - \`\${{ stage.artifacts.<artifact_id>.s3_bucket }}\` \u2014 Bundle S3 bucket
1601
+ - \`\${{ stage.artifacts.<artifact_id>.s3_key }}\` \u2014 Bundle S3 key
1602
+
1603
+ **Secrets:**
1604
+ - \`\${{ organization.secrets["SECRET_NAME"] }}\` \u2014 Organization secret
1605
+ - \`\${{ secret("SECRET_NAME") }}\` \u2014 Shorthand for organization secret
1606
+
1607
+ **Pipeline/org context:**
1608
+ - \`\${{ pipeline.name }}\` / \`\${{ pipeline.slug }}\`
1609
+ - \`\${{ organization.name }}\` / \`\${{ organization.slug }}\`
1610
+ - \`\${{ trigger.sha }}\` / \`\${{ trigger.branch }}\`
1611
+
1612
+ ## Notification Events
1613
+
1614
+ - \`stage_succeeded\` \u2014 A stage deployment completed successfully
1615
+ - \`stage_failed\` \u2014 A stage deployment failed
1616
+ - \`approval_required\` \u2014 A manual approval is needed
1617
+ - \`stage_auto_rolled_back\` \u2014 A stage was automatically rolled back
1618
+ - \`stage_rolled_back\` \u2014 A stage rollback completed
1619
+ - \`stage_rolled_forward\` \u2014 A stage roll-forward completed
1620
+
1621
+ ## Deployment Time Windows
1622
+
1623
+ - \`NONE\` \u2014 No restrictions, deploy any time
1624
+ - \`PACIFIC_WORKING_HOURS\` \u2014 9am-5pm Pacific, weekdays only
1625
+ - \`PACIFIC_WORKING_HOURS_REDUCED\` \u2014 10am-4pm Pacific, weekdays only
1626
+
1627
+ ## Ephemeral Environments
1628
+
1629
+ Ephemeral environments are temporary deployment environments that can be triggered by pull requests or API calls. They use the same stages, steps, and artifacts as the main pipeline but with their own account, region, and variables.
1630
+
1631
+ **Trigger types:**
1632
+ - \`pull_request\` \u2014 Triggered when a PR is opened against specified branches
1633
+ - \`api\` \u2014 Triggered via the DevRamps API (for AI agents, etc.)
1634
+
1635
+ **Release types:**
1636
+ - \`pr_closed\` \u2014 Released when the PR is closed/merged
1637
+ - \`api\` \u2014 Released via the API
1638
+ - \`timeout\` \u2014 Released after idle for specified hours
1639
+
1640
+ ## File Structure
1641
+
1642
+ Pipeline definitions live under \`.devramps/<pipeline_name>/\`:
1643
+ \`\`\`
1644
+ .devramps/
1645
+ my_pipeline/
1646
+ pipeline.yaml # Pipeline definition
1647
+ aws_additional_iam_policies.json # Additional IAM permissions for Terraform
1648
+ \`\`\`
1649
+
1650
+ ## Bootstrap
1651
+
1652
+ After creating or modifying pipeline files, run:
1653
+ \`\`\`
1654
+ npx @devramps/cli bootstrap
1655
+ \`\`\`
1656
+
1657
+ This provisions the required AWS resources (ECR repos, S3 buckets, IAM roles, Terraform state buckets) in your target accounts.
1658
+
1659
+ ## Documentation
1660
+
1661
+ For more details, visit https://devramps.com/docs
1662
+ `;
1663
+ }
1664
+
1665
+ // src/resources/step-type-docs.ts
1666
+ var STEP_TYPE_DOCS_URI = "devramps://docs/step-types";
1667
+ function getStepTypeDocsResource() {
1668
+ return {
1669
+ uri: STEP_TYPE_DOCS_URI,
1670
+ name: "Step Type Documentation",
1671
+ description: "Documentation for all available DevRamps step types, including parameters and YAML examples",
1672
+ mimeType: "text/markdown"
1673
+ };
1674
+ }
1675
+ async function getStepTypeDocsContent(client) {
1676
+ if (client) {
1677
+ try {
1678
+ const result = await client.listStepTypes();
1679
+ const lines = [
1680
+ "# DevRamps Step Types",
1681
+ "",
1682
+ "Available step types for pipeline definitions.",
1683
+ ""
1684
+ ];
1685
+ for (const step of result.steps) {
1686
+ lines.push(`## ${step.name} (\`${step.stepType}\`)`);
1687
+ lines.push("");
1688
+ if (step.shortDescription) {
1689
+ lines.push(step.shortDescription);
1690
+ lines.push("");
1691
+ }
1692
+ if (step.longDescription) {
1693
+ lines.push(step.longDescription);
1694
+ lines.push("");
1695
+ }
1696
+ if (step.category) {
1697
+ lines.push(`**Category:** ${step.category}`);
1698
+ }
1699
+ if (step.status) {
1700
+ lines.push(`**Status:** ${step.status}`);
1701
+ }
1702
+ if (step.version) {
1703
+ lines.push(`**Version:** ${step.version}`);
1704
+ }
1705
+ lines.push("");
1706
+ if (step.yamlExample) {
1707
+ lines.push("### YAML Example");
1708
+ lines.push("");
1709
+ lines.push("```yaml");
1710
+ lines.push(step.yamlExample);
1711
+ lines.push("```");
1712
+ lines.push("");
1713
+ }
1714
+ if (step.paramsJsonSchema && typeof step.paramsJsonSchema === "object") {
1715
+ lines.push("### Parameters Schema");
1716
+ lines.push("");
1717
+ lines.push("```json");
1718
+ lines.push(JSON.stringify(step.paramsJsonSchema, null, 2));
1719
+ lines.push("```");
1720
+ lines.push("");
1721
+ }
1722
+ if (step.documentationUrl) {
1723
+ lines.push(`[Documentation](${step.documentationUrl})`);
1724
+ lines.push("");
1725
+ }
1726
+ lines.push("---");
1727
+ lines.push("");
1728
+ }
1729
+ return lines.join("\n");
1730
+ } catch {
1731
+ }
1732
+ }
1733
+ return getStaticStepTypeDocs();
1734
+ }
1735
+ function getStaticStepTypeDocs() {
1736
+ return `# DevRamps Step Types
1737
+
1738
+ ## Deployment Steps
1739
+
1740
+ ### ECS Deploy (\`DEVRAMPS:ECS:DEPLOY\`)
1741
+ Deploy a container image to an Amazon ECS service.
1742
+
1743
+ **Parameters:**
1744
+ - \`cluster_name\` (string, required) \u2014 ECS cluster name
1745
+ - \`service_name\` (string, required) \u2014 ECS service name
1746
+ - \`reference_task_definition\` (string, required) \u2014 Task definition ARN
1747
+ - \`images\` (array, required) \u2014 Container image mappings
1748
+ - \`container_name\` (string) \u2014 Container name in task definition
1749
+ - \`image\` (string) \u2014 Image URL or artifact reference
1750
+ - \`account_id\` (string, optional) \u2014 Override AWS account
1751
+ - \`region\` (string, optional) \u2014 Override AWS region
1752
+ - \`timeout\` (number, optional) \u2014 Timeout in minutes (default: 360)
1753
+
1754
+ ### Lambda Deploy (\`DEVRAMPS:LAMBDA:DEPLOY\`)
1755
+ Deploy code to an AWS Lambda function.
1756
+
1757
+ **Parameters:**
1758
+ - \`function_name\` (string, required) \u2014 Lambda function name
1759
+ - \`s3_bucket\` (string, optional) \u2014 S3 bucket with deployment package
1760
+ - \`s3_key\` (string, optional) \u2014 S3 key for deployment package
1761
+ - \`image_url\` (string, optional) \u2014 Container image URL
1762
+ - \`account_id\`, \`region\`, \`timeout\` \u2014 Same as ECS Deploy
1763
+
1764
+ ### Lambda Invoke (\`DEVRAMPS:LAMBDA:INVOKE\`)
1765
+ Invoke a Lambda function as a pipeline step.
1766
+
1767
+ **Parameters:**
1768
+ - \`function_name\` (string, required)
1769
+ - \`payload\` (string, optional) \u2014 JSON payload
1770
+ - \`invocation_type\` (enum, optional) \u2014 "RequestResponse" or "Event"
1771
+
1772
+ ### S3 Upload (\`DEVRAMPS:S3:UPLOAD\`)
1773
+ Upload files to an S3 bucket.
1774
+
1775
+ ### CloudFront Invalidate (\`DEVRAMPS:CLOUDFRONT:INVALIDATE\`)
1776
+ Invalidate CloudFront distribution cache.
1777
+
1778
+ ### EKS Deploy (\`DEVRAMPS:EKS:DEPLOY\`)
1779
+ Deploy to Amazon EKS (Kubernetes).
1780
+
1781
+ ### EKS Helm (\`DEVRAMPS:EKS:HELM\`)
1782
+ Deploy using Helm charts to EKS.
1783
+
1784
+ ## Custom Steps
1785
+
1786
+ ### Script Execute (\`DEVRAMPS:SCRIPT:EXECUTE\`)
1787
+ Run a custom script during deployment.
1788
+
1789
+ **Parameters:**
1790
+ - \`script_path\` (string, required) \u2014 Path to the script
1791
+ - \`params\` (string[], optional) \u2014 CLI arguments
1792
+ - \`working_directory\` (string, optional)
1793
+ - \`environment_variables\` (object, optional)
1794
+ - \`timeout\` (number, optional) \u2014 Minutes (default: 30)
1795
+ - \`run_in_vpc\` (boolean, optional)
1796
+
1797
+ ### Database Migrate (\`DEVRAMPS:DATABASE:MIGRATE\`)
1798
+ Run database migrations.
1799
+
1800
+ ## Approval Workflows
1801
+
1802
+ ### Bake (\`DEVRAMPS:APPROVAL:BAKE\`)
1803
+ Wait for a specified duration before allowing promotion.
1804
+
1805
+ **Parameters:**
1806
+ - \`duration_minutes\` (number, required)
1807
+
1808
+ ### Test (\`DEVRAMPS:APPROVAL:TEST\`)
1809
+ Run tests and wait for results before promotion.
1810
+
1811
+ ### Manual (\`DEVRAMPS:APPROVAL:MANUAL\`)
1812
+ Require manual approval before promotion.
1813
+
1814
+ ## Artifact Types
1815
+
1816
+ ### Docker Build (\`DEVRAMPS:DOCKER:BUILD\`)
1817
+ Build a Docker container image.
1818
+
1819
+ **Parameters:**
1820
+ - \`dockerfile\` (string, required) \u2014 Dockerfile path
1821
+ - \`build_root\` (string, optional) \u2014 Build context directory
1822
+ - \`args\` (string[], optional) \u2014 Build arguments
1823
+ - \`env\` (object, optional) \u2014 Environment variables
1824
+
1825
+ ### Docker Import (\`DEVRAMPS:DOCKER:IMPORT\`)
1826
+ Import a pre-built Docker image from an external registry.
1827
+
1828
+ ### Bundle Build (\`DEVRAMPS:BUNDLE:BUILD\`)
1829
+ Build a file bundle (zip archive).
1830
+
1831
+ **Parameters:**
1832
+ - \`build_commands\` (string, required) \u2014 Build command(s)
1833
+ - \`file_path\` (string, required) \u2014 Path to bundle output
1834
+ - \`env\` (object, optional) \u2014 Environment variables
1835
+
1836
+ ### Bundle Import (\`DEVRAMPS:BUNDLE:IMPORT\`)
1837
+ Import a pre-built bundle from an external source.
1838
+ `;
1839
+ }
1840
+
1841
+ // src/prompts/scaffold-project.ts
1842
+ import { z as z7 } from "zod";
1843
+ var SCAFFOLD_PROJECT_PROMPT = {
1844
+ name: "scaffold-project",
1845
+ description: "Guided workflow to scaffold a DevRamps CI/CD pipeline for a project. Deeply analyzes the codebase, determines the right infrastructure and deployment architecture, generates Terraform configs for new projects, and produces a complete pipeline definition.",
1846
+ argsSchema: {
1847
+ project_path: z7.string().describe("Path to the project root directory")
1848
+ }
1849
+ };
1850
+ function getScaffoldProjectMessages(args) {
1851
+ const projectPath = args.project_path || ".";
1852
+ return {
1853
+ messages: [
1854
+ {
1855
+ role: "user",
1856
+ content: {
1857
+ type: "text",
1858
+ text: `I want to set up a DevRamps CI/CD pipeline for my project at ${projectPath}.
1859
+
1860
+ You are an expert DevOps engineer helping me set up a production-grade CI/CD pipeline using DevRamps. Follow this workflow carefully.
1861
+
1862
+ ---
1863
+
1864
+ ## Phase 1: Deep Codebase Analysis
1865
+
1866
+ Thoroughly analyze the project at \`${projectPath}\`. Read files, explore directories, and build a complete picture:
1867
+
1868
+ 1. **Project structure** \u2014 List the top-level directory layout. Understand the monorepo vs single-service structure.
1869
+ 2. **Backend services** \u2014 Look for Dockerfiles, server code, API frameworks (Express, FastAPI, Django, Spring, Go net/http, etc.). Note the language, framework, and entry point.
1870
+ 3. **Frontend apps** \u2014 Look for package.json with build scripts, React/Vue/Angular/Next.js/Vite configs. Note the build command and output directory.
1871
+ 4. **Infrastructure** \u2014 Look for \`terraform/\`, \`infrastructure/\`, or \`*.tf\` files. If found, read them to understand what resources are already defined.
1872
+ 5. **Existing CI/CD** \u2014 Look for .github/workflows, .gitlab-ci.yml, Jenkinsfile, buildspec.yml, existing \`.devramps/\` directories.
1873
+ 6. **Dependencies** \u2014 Check package.json, requirements.txt, go.mod, Cargo.toml, etc. for frameworks, tools, and build requirements.
1874
+ 7. **Database** \u2014 Look for migration files, ORM configs (Prisma, TypeORM, Alembic, Flyway), database connection configuration.
1875
+
1876
+ Based on your analysis, classify the project into one of these categories:
1877
+ - **Brand new / nearly empty repo** \u2014 Minimal or no existing application code
1878
+ - **Has application code but no infrastructure** \u2014 App code exists but no Terraform/IaC
1879
+ - **Has infrastructure and code** \u2014 Both application code and Terraform exist
1880
+ - **Already has DevRamps config** \u2014 Has a \`.devramps/\` directory (analyze gaps and suggest improvements)
1881
+
1882
+ ---
1883
+
1884
+ ## Phase 2: Ask Key Questions Up Front
1885
+
1886
+ Before generating anything, ask the user these questions **all at once** in a single message. Suggest reasonable defaults based on what you learned in Phase 1:
1887
+
1888
+ - **Project name** \u2014 What should this project/pipeline be called? (Suggest a default based on the directory name, repo name, or any package.json name field. e.g., "my-saas-app")
1889
+ - **What are you building?** \u2014 (Only ask for brand new / empty repos.) e.g., "A SaaS API with a React frontend", "A data processing pipeline". For existing repos, state what you determined from the code and ask if that's correct.
1890
+ - **Stages** \u2014 Do you want to start with a single stage (e.g., just staging) or multiple stages (e.g., staging + production)? Recommend starting with one stage to get up and running quickly \u2014 they can always add more later. If they choose multiple, ask whether staging and production use the same or different AWS accounts.
1891
+ - **AWS account ID(s)** \u2014 What AWS account ID(s) for the stage(s) they chose?
1892
+ - **AWS regions** \u2014 What region(s) to deploy to? (Default: us-east-1)
1893
+ - **Architecture tier** \u2014 Present the three tiers (see Phase 3) with a recommendation based on context. Let the user pick.
1894
+
1895
+ **Do NOT ask about:**
1896
+ - Deployment targets, artifact types, build commands, frameworks \u2014 determine these from the code
1897
+ - CI/CD tool choice \u2014 DevRamps handles this
1898
+ - Individual AWS service choices \u2014 the architecture tier determines this
1899
+ - Slack notifications, ephemeral environments, approval policies \u2014 use sensible defaults (no Slack, no ephemeral envs, ALWAYS for approval) and mention the user can add these later
1900
+
1901
+ ---
1902
+
1903
+ ## Phase 3: Determine Architecture & Generate Terraform
1904
+
1905
+ Based on the user's answers, generate Terraform configuration files under \`/infrastructure\` in the project.
1906
+
1907
+ ### Architecture Tiers
1908
+
1909
+ Present these to the user in Phase 2 and make a recommendation:
1910
+
1911
+ **1. Budget-friendly** (~$50-80/mo, cheapest to run):
1912
+ - VPC with public + private subnets across 2 AZs
1913
+ - FCK-NAT instance for NAT (no NAT Gateway \u2014 saves ~$30/mo)
1914
+ - Fargate ECS service in private subnets
1915
+ - Private ALB in private subnets (accessed via CloudFront only)
1916
+ - CloudFront as CDN + entry point (routes /api/* to ALB, static assets to S3)
1917
+ - S3 bucket for frontend static assets with OAC
1918
+ - RDS PostgreSQL (db.t4g.micro) in private subnets
1919
+
1920
+ **2. Standard** (recommended, ~$100-150/mo):
1921
+ - VPC with public + private subnets across 2 AZs
1922
+ - FCK-NAT instance for NAT (no NAT Gateway)
1923
+ - Fargate ECS service in private subnets
1924
+ - Private ALB in public subnets
1925
+ - CloudFront as CDN in front of ALB and S3
1926
+ - S3 bucket for frontend static assets with OAC
1927
+ - RDS PostgreSQL (db.t4g.small) in private subnets with Multi-AZ standby
1928
+
1929
+ **3. High Availability** (production-grade, ~$200-300/mo):
1930
+ - VPC with public + private subnets across 3 AZs
1931
+ - Managed NAT Gateway (highest availability)
1932
+ - Fargate ECS service in private subnets (multi-AZ)
1933
+ - Private ALB in public subnets
1934
+ - CloudFront as CDN in front of ALB and S3
1935
+ - S3 bucket for frontend static assets with OAC
1936
+ - RDS PostgreSQL (db.t4g.medium) in private subnets with Multi-AZ
1937
+
1938
+ ### Terraform File Organization
1939
+
1940
+ **CRITICAL: You MUST split Terraform into separate files by concern. Putting everything in a single main.tf is NOT acceptable and will be rejected by the user.** Use this exact structure:
1941
+
1942
+ \`\`\`
1943
+ infrastructure/
1944
+ backend.tf # Terraform backend configuration (S3 remote state)
1945
+ providers.tf # AWS provider configuration
1946
+ variables.tf # All input variables
1947
+ outputs.tf # All outputs (MUST output everything the pipeline needs)
1948
+ vpc.tf # VPC, subnets, internet gateway, NAT, route tables
1949
+ security.tf # Security groups
1950
+ ecs.tf # ECS cluster, task definition, service, IAM roles
1951
+ alb.tf # Application load balancer, target groups, listeners
1952
+ frontend.tf # S3 bucket for static assets, bucket policy
1953
+ cloudfront.tf # CloudFront distribution, OAC, cache behaviors
1954
+ rds.tf # RDS instance, subnet group (if database needed)
1955
+ \`\`\`
1956
+
1957
+ ### Terraform Requirements
1958
+
1959
+ Every generated Terraform file set MUST include:
1960
+
1961
+ **backend.tf \u2014 REQUIRED (without this, DevRamps cannot manage Terraform state):**
1962
+ \`\`\`hcl
1963
+ terraform {
1964
+ required_version = ">= 1.5"
1965
+ required_providers {
1966
+ aws = {
1967
+ source = "hashicorp/aws"
1968
+ version = "~> 5.0"
1969
+ }
1970
+ }
1971
+ backend "s3" {} # MUST be present \u2014 DevRamps configures the state bucket during bootstrap
1972
+ }
1973
+ \`\`\`
1974
+
1975
+ **providers.tf:**
1976
+ \`\`\`hcl
1977
+ provider "aws" {
1978
+ region = var.region
1979
+ }
1980
+ \`\`\`
1981
+
1982
+ **variables.tf** \u2014 Must accept these variables (plus any project-specific ones):
1983
+ \`\`\`hcl
1984
+ variable "region" {
1985
+ type = string
1986
+ description = "AWS region"
1987
+ }
1988
+
1989
+ variable "aws_account_id" {
1990
+ type = string
1991
+ description = "AWS account ID"
1992
+ }
1993
+
1994
+ variable "env" {
1995
+ type = string
1996
+ description = "Environment name (staging, production, etc.)"
1997
+ }
1998
+
1999
+ variable "app_name" {
2000
+ type = string
2001
+ description = "Application name"
2002
+ default = "<project-name>"
2003
+ }
2004
+
2005
+ # Artifact variables \u2014 passed by the pipeline so Terraform can reference them
2006
+ variable "backend_service_image_uri" {
2007
+ type = string
2008
+ description = "Docker image URI for the backend service"
2009
+ default = "" # Empty on first run before any image is built
2010
+ }
2011
+
2012
+ variable "frontend_bundle_s3_bucket" {
2013
+ type = string
2014
+ description = "S3 bucket containing the frontend bundle"
2015
+ default = ""
2016
+ }
2017
+
2018
+ variable "frontend_bundle_s3_key" {
2019
+ type = string
2020
+ description = "S3 key for the frontend bundle"
2021
+ default = ""
2022
+ }
2023
+ \`\`\`
2024
+
2025
+ **outputs.tf** \u2014 Must output everything the pipeline steps need:
2026
+ \`\`\`hcl
2027
+ # ECS outputs (used by DEVRAMPS:ECS:DEPLOY step)
2028
+ output "backend_cluster_name" {
2029
+ value = aws_ecs_cluster.main.name
2030
+ }
2031
+
2032
+ output "backend_service_name" {
2033
+ value = aws_ecs_service.api.name
2034
+ }
2035
+
2036
+ output "backend_task_definition" {
2037
+ value = aws_ecs_task_definition.api.family
2038
+ }
2039
+
2040
+ # Frontend outputs (used by DEVRAMPS:S3:UPLOAD step)
2041
+ output "frontend_bundle_bucket_name" {
2042
+ value = aws_s3_bucket.frontend.id
2043
+ }
2044
+
2045
+ # CloudFront outputs (used by DEVRAMPS:CLOUDFRONT:INVALIDATE step)
2046
+ output "cloudfront_distribution_id" {
2047
+ value = aws_cloudfront_distribution.main.id
2048
+ }
2049
+
2050
+ # Convenience outputs
2051
+ output "cloudfront_domain_name" {
2052
+ value = aws_cloudfront_distribution.main.domain_name
2053
+ }
2054
+
2055
+ output "alb_dns_name" {
2056
+ value = aws_lb.api.dns_name
2057
+ }
2058
+ \`\`\`
2059
+
2060
+ **vpc.tf** \u2014 MUST include private subnets and NAT for ECS tasks:
2061
+ - Public subnets (for ALB, NAT)
2062
+ - Private subnets (for ECS tasks, RDS)
2063
+ - Internet gateway on public subnets
2064
+ - NAT (FCK-NAT instance for Budget/Standard, managed NAT Gateway for High Availability)
2065
+ - Route tables: public routes through IGW, private routes through NAT
2066
+ - **ECS tasks MUST be in private subnets** \u2014 they need NAT to pull images from ECR and reach AWS APIs
2067
+
2068
+ **ecs.tf** \u2014 Must include:
2069
+ - ECS cluster
2070
+ - Task definition using the \`backend_service_image_uri\` variable for the container image (with a sensible placeholder default like a public nginx image for the initial \`terraform apply\` before any image is built)
2071
+ - ECS service in private subnets with \`assign_public_ip = false\`
2072
+ - Task execution IAM role with ECS task execution policy
2073
+ - CloudWatch log group for container logs
2074
+
2075
+ **cloudfront.tf** \u2014 MUST include BOTH origins (S3 and ALB). A CloudFront distribution with only one origin will break either the frontend or the API:
2076
+ - **Two origins required:** S3 origin (for static assets) AND ALB/custom origin (for API backend)
2077
+ - **Ordered cache behavior:** \`/api/*\` path pattern routes to the ALB origin with caching DISABLED (min/default/max TTL all 0, forward all query strings, headers including Authorization and Origin, forward all cookies)
2078
+ - **Default cache behavior:** routes to S3 origin with caching enabled
2079
+ - **Origin Access Control** for the S3 origin (OAC, not OAI)
2080
+ - **Custom error response** for SPA routing: 403 \u2192 200 with /index.html
2081
+ - The ALB origin must use \`custom_origin_config\` with \`origin_protocol_policy = "http-only"\` (ALB listener is HTTP port 80)
2082
+
2083
+ **Naming convention:**
2084
+ Use \`\${var.app_name}-\${var.env}\` as a prefix for all resource names and tags.
2085
+
2086
+ **Tagging:**
2087
+ All resources should include at minimum:
2088
+ \`\`\`hcl
2089
+ tags = {
2090
+ Name = "\${var.app_name}-\${var.env}-<resource>"
2091
+ Environment = var.env
2092
+ Project = var.app_name
2093
+ ManagedBy = "terraform"
2094
+ }
2095
+ \`\`\`
2096
+
2097
+ ---
2098
+
2099
+ ## Phase 3.5: Validate Terraform & Infrastructure Review
2100
+
2101
+ After generating the Terraform files, **validate them and present a summary to the user** before moving on. This is a checkpoint \u2014 don't proceed until the user confirms they're happy.
2102
+
2103
+ ### Terraform Validation
2104
+
2105
+ Run \`terraform validate\` to catch syntax errors before going further:
2106
+
2107
+ \`\`\`bash
2108
+ cd infrastructure && terraform init -backend=false && terraform validate
2109
+ \`\`\`
2110
+
2111
+ Use \`-backend=false\` because the S3 backend isn't configured yet (DevRamps sets that up during bootstrap). If validation fails, fix the errors in the Terraform files and re-run until it passes. Do not present the summary to the user until validation succeeds.
2112
+
2113
+ ### Pre-Summary Checklist
2114
+
2115
+ Before presenting the summary, verify ALL of the following. If any check fails, fix the issue first:
2116
+
2117
+ - [ ] \`backend.tf\` exists and contains \`backend "s3" {}\`
2118
+ - [ ] \`providers.tf\` exists and is separate from \`backend.tf\`
2119
+ - [ ] \`variables.tf\` exists with all required variables
2120
+ - [ ] \`outputs.tf\` exists with outputs for every value the pipeline references
2121
+ - [ ] \`vpc.tf\` exists with both public AND private subnets, and NAT (FCK-NAT or NAT Gateway per tier)
2122
+ - [ ] \`cloudfront.tf\` has BOTH an S3 origin AND an ALB origin (if the project has both frontend and backend)
2123
+ - [ ] \`cloudfront.tf\` has an ordered cache behavior routing \`/api/*\` to the ALB origin
2124
+ - [ ] ECS tasks are in private subnets with \`assign_public_ip = false\`
2125
+ - [ ] Terraform files are split across multiple files (NOT all in main.tf)
2126
+ - [ ] \`terraform validate\` passes
2127
+
2128
+ ### Summary Format
2129
+
2130
+ Present the summary in this structure:
2131
+
2132
+ **1. Decisions made:**
2133
+ List every significant architectural choice you made, written in plain English. For example:
2134
+ - "VPC with 2 availability zones (us-east-1a, us-east-1b) and private subnets for ECS/RDS"
2135
+ - "FCK-NAT instance instead of a managed NAT Gateway (saves ~$32/mo)"
2136
+ - "Fargate ECS service (serverless containers, no EC2 instances to manage)"
2137
+ - "Private ALB in public subnets \u2014 only accessible via CloudFront"
2138
+ - "RDS PostgreSQL db.t4g.micro with 20GB storage (no Multi-AZ standby)"
2139
+ - "CloudFront distribution routing /api/* to ALB, static assets to S3"
2140
+ - "S3 bucket with Origin Access Control for frontend assets"
2141
+
2142
+ **2. Estimated monthly cost:**
2143
+ Provide a line-item cost estimate in a table. Use the AWS pricing for the selected region. Be specific about instance sizes, storage, and data transfer assumptions. Example:
2144
+
2145
+ | Resource | Configuration | Est. Monthly Cost |
2146
+ |----------|--------------|-------------------|
2147
+ | ECS Fargate | 0.25 vCPU, 0.5 GB, 1 task 24/7 | ~$9 |
2148
+ | ALB | Application Load Balancer | ~$16 + data |
2149
+ | RDS PostgreSQL | db.t4g.micro, 20GB gp3 | ~$13 |
2150
+ | FCK-NAT | t4g.nano instance | ~$3 |
2151
+ | S3 | Frontend assets (<1GB) | ~$0.02 |
2152
+ | CloudFront | 10GB transfer/mo | ~$1 |
2153
+ | CloudWatch Logs | 5GB ingestion/mo | ~$2.50 |
2154
+ | **Total** | | **~$45-50/mo** |
2155
+
2156
+ Note: actual costs vary with traffic, data transfer, and usage. This estimate assumes low-moderate traffic.
2157
+
2158
+ **3. Optional upgrades:**
2159
+ List 3-5 concrete changes the user could ask for, each with the benefit, tradeoff, and approximate cost impact. Format as a numbered list so the user can say "do 1 and 3". For example:
2160
+
2161
+ 1. **Replace FCK-NAT with managed NAT Gateway** \u2014 Higher availability and throughput, no instance to manage. +~$32/mo.
2162
+ 2. **Enable RDS Multi-AZ** \u2014 Automatic failover to standby in another AZ. Better durability. +~$13/mo (doubles RDS cost).
2163
+ 3. **Increase ECS task size to 0.5 vCPU / 1GB** \u2014 More headroom for the backend service. +~$9/mo.
2164
+ 4. **Add a third availability zone** \u2014 Better fault tolerance. Adds one more subnet pair + slightly higher NAT costs.
2165
+ 5. **Add a custom domain with ACM certificate** \u2014 HTTPS with your own domain instead of the CloudFront \`.cloudfront.net\` URL. No additional cost (ACM certs are free), but requires a Route 53 hosted zone or DNS access.
2166
+
2167
+ Then ask: "Want me to make any of these changes, or are you happy with the current setup?"
2168
+
2169
+ If the user requests changes, apply them to the Terraform files and re-present the updated summary. Once the user confirms, proceed to Phase 4.
2170
+
2171
+ ---
2172
+
2173
+ ## Phase 4: Generate Pipeline Definition
2174
+
2175
+ After Terraform files are written and the user has confirmed the infrastructure, construct the complete pipeline definition object and call the \`scaffold-pipeline\` tool. The definition must follow this structure:
2176
+
2177
+ \`\`\`yaml
2178
+ version: "1.0.0"
2179
+
2180
+ pipeline:
2181
+ cloud_provider: AWS
2182
+ pipeline_updates_require_approval: ALWAYS
2183
+
2184
+ stage_defaults:
2185
+ deployment_time_window: PACIFIC_WORKING_HOURS
2186
+
2187
+ stages:
2188
+ - name: staging
2189
+ deployment_time_window: NONE
2190
+ account_id: "<AWS_ACCOUNT_ID>"
2191
+ region: us-east-1
2192
+ skip: ["Bake Period"]
2193
+ vars:
2194
+ env: staging
2195
+
2196
+ - name: production
2197
+ account_id: "<AWS_ACCOUNT_ID>"
2198
+ region: us-east-1
2199
+ vars:
2200
+ env: production
2201
+
2202
+ steps:
2203
+ - name: Synthesize Infrastructure
2204
+ id: infra
2205
+ type: DEVRAMPS:TERRAFORM:SYNTHESIZE
2206
+ params:
2207
+ requires_approval: ALWAYS
2208
+ source: /infrastructure
2209
+ variables:
2210
+ region: \${{ stage.region }}
2211
+ aws_account_id: \${{ stage.account_id }}
2212
+ env: \${{ vars.env }}
2213
+ backend_service_image_uri: \${{ stage.artifacts.backend_service.image_url }}
2214
+ frontend_bundle_s3_bucket: \${{ stage.artifacts.frontend_bundle.s3_bucket }}
2215
+ frontend_bundle_s3_key: \${{ stage.artifacts.frontend_bundle.s3_key }}
2216
+
2217
+ - name: Deploy Backend Service
2218
+ id: backend
2219
+ type: DEVRAMPS:ECS:DEPLOY
2220
+ goes_after: ["Synthesize Infrastructure"]
2221
+ params:
2222
+ cluster_name: \${{ steps.infra.backend_cluster_name }}
2223
+ service_name: \${{ steps.infra.backend_service_name }}
2224
+ reference_task_definition: \${{ steps.infra.backend_task_definition }}
2225
+ images:
2226
+ - container_name: service
2227
+ image: \${{ stage.artifacts.backend_service.image_url }}
2228
+
2229
+ - name: Deploy Frontend Bundle
2230
+ type: DEVRAMPS:S3:UPLOAD
2231
+ goes_after: ["Synthesize Infrastructure"]
2232
+ params:
2233
+ source_s3_url: \${{ stage.artifacts.frontend_bundle.s3_url }}
2234
+ bucket: "\${{ steps.infra.frontend_bundle_bucket_name }}"
2235
+ prefix: assets/
2236
+ decompress: true
2237
+ clean: true
2238
+
2239
+ - name: Invalidate Frontend Cache
2240
+ type: DEVRAMPS:CLOUDFRONT:INVALIDATE
2241
+ goes_after: ["Deploy Frontend Bundle"]
2242
+ params:
2243
+ distribution_id: \${{ steps.infra.cloudfront_distribution_id }}
2244
+ paths: ["/*"]
2245
+
2246
+ - name: Bake Period
2247
+ type: DEVRAMPS:APPROVAL:BAKE
2248
+ params:
2249
+ duration_minutes: 5
2250
+
2251
+ artifacts:
2252
+ Backend Service Image:
2253
+ id: backend_service
2254
+ type: DEVRAMPS:DOCKER:BUILD
2255
+ architecture: "linux/amd64"
2256
+ host_size: "medium"
2257
+ rebuild_when_changed:
2258
+ - /services/backend
2259
+ params:
2260
+ dockerfile: /services/backend/Dockerfile
2261
+ args: []
2262
+
2263
+ Frontend Bundle:
2264
+ id: frontend_bundle
2265
+ type: DEVRAMPS:BUNDLE:BUILD
2266
+ per_stage: true
2267
+ host_size: "medium"
2268
+ rebuild_when_changed:
2269
+ - /services/frontend
2270
+ dependencies: ["node.22"]
2271
+ envs:
2272
+ environment: \${{ vars.env }}
2273
+ params:
2274
+ build_commands: |
2275
+ cd services/frontend
2276
+ npm install
2277
+ npm run build
2278
+ zip -r ./bundle.zip ./dist
2279
+ file_path: /services/frontend/bundle.zip
2280
+ \`\`\`
2281
+
2282
+ ### Key rules for constructing the pipeline definition:
2283
+
2284
+ **Step types to use:**
2285
+ - \`DEVRAMPS:TERRAFORM:SYNTHESIZE\` \u2014 for ALL infrastructure management
2286
+ - \`DEVRAMPS:ECS:DEPLOY\` \u2014 for deploying backend services to ECS
2287
+ - \`DEVRAMPS:S3:UPLOAD\` \u2014 for deploying static frontend assets to S3
2288
+ - \`DEVRAMPS:CLOUDFRONT:INVALIDATE\` \u2014 for invalidating CDN cache after frontend deploy
2289
+ - \`DEVRAMPS:APPROVAL:BAKE\` \u2014 for soak/bake periods between stages
2290
+
2291
+ **Artifact types to use:**
2292
+ - \`DEVRAMPS:DOCKER:BUILD\` \u2014 for building Docker images (pushed to ECR automatically)
2293
+ - \`DEVRAMPS:BUNDLE:BUILD\` \u2014 for building frontend/file bundles (uploaded to S3 automatically)
2294
+
2295
+ **Expression syntax for dynamic values:**
2296
+ - \`\${{ stage.region }}\` / \`\${{ stage.account_id }}\` \u2014 current stage info
2297
+ - \`\${{ vars.key }}\` \u2014 stage-specific variables
2298
+ - \`\${{ steps.<step_id>.<output_name> }}\` \u2014 Terraform outputs or step outputs
2299
+ - \`\${{ stage.artifacts.<artifact_id>.image_url }}\` \u2014 Docker image URL
2300
+ - \`\${{ stage.artifacts.<artifact_id>.s3_url }}\` / \`.s3_bucket\` / \`.s3_key\` \u2014 Bundle locations
2301
+
2302
+ **Execution order:**
2303
+ - Steps run in parallel by default unless \`goes_after\` creates dependencies
2304
+ - Terraform synthesize should always run first (other steps depend on its outputs)
2305
+ - Frontend cache invalidation should depend on S3 upload completing
2306
+ - Bake period runs last (and can be skipped per-stage via the \`skip\` list)
2307
+
2308
+ **Adapt the pipeline to the actual project:**
2309
+ - Backend only? Remove frontend artifact and S3/CloudFront steps.
2310
+ - Frontend only? Remove backend artifact and ECS deploy step.
2311
+ - Multiple backend services? Add more Docker artifacts and ECS deploy steps.
2312
+ - No database? Skip rds.tf in the Terraform generation.
2313
+ - Lambda instead of ECS? Use \`DEVRAMPS:LAMBDA:DEPLOY\` step type instead.
2314
+ - Adjust all paths (\`rebuild_when_changed\`, \`dockerfile\`, \`build_commands\`) to match the actual project structure.
2315
+
2316
+ ### CRITICAL: Terraform variables and pipeline variables MUST be in sync
2317
+
2318
+ The \`DEVRAMPS:TERRAFORM:SYNTHESIZE\` step passes variables to Terraform via \`params.variables\`. These map directly to Terraform \`variable\` blocks. **Both sides must match exactly or synthesis will fail.**
2319
+
2320
+ **Rule 1: Every Terraform variable without a \`default\` MUST be passed by the synthesize step.**
2321
+ If \`variables.tf\` declares \`variable "region" {}\` with no default, then the synthesize step's \`params.variables\` must include \`region: ...\`. If it's missing, Terraform will error asking for the value.
2322
+
2323
+ **Rule 2: Every variable passed by the synthesize step MUST exist in Terraform's \`variables.tf\`.**
2324
+ If the pipeline passes \`backend_service_image_uri: \${{ stage.artifacts.backend_service.image_url }}\`, then \`variables.tf\` must declare \`variable "backend_service_image_uri" {}\`. If it's missing, Terraform will error on an unexpected variable.
2325
+
2326
+ **After generating both the Terraform files and the pipeline definition, verify the sync:**
2327
+ 1. List every \`variable\` in \`variables.tf\` that has no \`default\` value
2328
+ 2. List every key under the synthesize step's \`params.variables\`
2329
+ 3. Confirm the two lists match exactly. If they don't, fix whichever side is wrong.
2330
+
2331
+ Variables with a \`default\` value are optional \u2014 they don't need to be passed by the pipeline, but it's fine if they are.
2332
+
2333
+ ### Pipeline output names must match Terraform output names
2334
+
2335
+ The expressions in the pipeline (e.g., \`\${{ steps.infra.backend_cluster_name }}\`) reference **Terraform output names**. Make sure every \`\${{ steps.infra.X }}\` in the pipeline has a corresponding \`output "X"\` in outputs.tf.
2336
+
2337
+ ---
2338
+
2339
+ ## Phase 5: Generate IAM Policies
2340
+
2341
+ If the project has Terraform files (existing or newly generated):
2342
+
2343
+ 1. Read ALL the Terraform files and identify every AWS resource type and data source used
2344
+ 2. For each AWS service involved, determine the IAM actions Terraform needs to create, read, update, and delete those resources
2345
+ 3. Construct a comprehensive IAM policy. **Be broad rather than narrow** \u2014 use service-level wildcards (e.g., \`ecs:*\`, \`s3:*\`, \`ec2:*\`, \`logs:*\`) to avoid first-deploy failures. The user can tighten permissions later.
2346
+ 4. Always include \`iam:CreateServiceLinkedRole\` if the terraform uses ECS or ELB \u2014 these services require service-linked roles on first creation.
2347
+ 5. Call the \`generate-iam-policies\` tool with:
2348
+ - \`terraform_dir\`: path to the Terraform directory
2349
+ - \`pipeline_name\`: same pipeline name used in scaffold-pipeline
2350
+ - \`project_path\`: the project root
2351
+ - \`iam_policy\`: the policy array you constructed
2352
+
2353
+ The tool writes the policy to \`.devramps/<pipeline_name>/aws_additional_iam_policies.json\`.
2354
+
2355
+ ---
2356
+
2357
+ ## Phase 6: Validate
2358
+
2359
+ Call the \`validate-pipeline\` tool on the generated pipeline.yaml to catch any structural issues. Fix any errors before proceeding.
2360
+
2361
+ ---
2362
+
2363
+ ## Phase 7: Final Instructions
2364
+
2365
+ After everything is generated and validated, tell the user:
2366
+
2367
+ 1. Review the generated files:
2368
+ - \`.devramps/<pipeline_name>/pipeline.yaml\` \u2014 pipeline definition
2369
+ - \`.devramps/<pipeline_name>/aws_additional_iam_policies.json\` \u2014 IAM permissions
2370
+ - \`/infrastructure/*.tf\` \u2014 Terraform configs (if generated)
2371
+ 2. Run \`npx @devramps/cli bootstrap\` in the project root to bootstrap the pipeline
2372
+ 3. Commit everything and push to the tracked branch (default: \`main\`)
2373
+ 4. The pipeline will start running automatically on push
2374
+ 5. View the pipeline in the dashboard: https://app.devramps.com/pipelines
2375
+
2376
+ Then ask: "Would you like me to check on your pipeline status after you push? I can monitor the deployment and debug any failing steps."
2377
+
2378
+ You have access to tools for monitoring and debugging deployed pipelines: \`list-pipelines\`, \`get-pipeline-state\`, \`get-step-logs\`, \`get-pipeline-events\`, \`get-stage-health\`, \`retry-stage\`, and \`cancel-stage\`. Use these proactively when the user reports deployment issues or asks about pipeline status.
2379
+
2380
+ If the user wants to learn more about any DevRamps features, direct them to https://devramps.com/docs.
2381
+
2382
+ ---
2383
+
2384
+ Please start by analyzing the project directory now.`
2385
+ }
2386
+ }
2387
+ ]
2388
+ };
2389
+ }
2390
+
2391
+ // src/prompts/debug-deployment.ts
2392
+ import { z as z8 } from "zod";
2393
+ var DEBUG_DEPLOYMENT_PROMPT = {
2394
+ name: "debug-deployment",
2395
+ description: "Guided workflow to diagnose and debug a failed deployment. Walks through checking pipeline state, finding failed steps, retrieving logs, and suggesting fixes.",
2396
+ argsSchema: {
2397
+ pipeline_id: z8.string().describe("Pipeline ID to debug"),
2398
+ stage_name: z8.string().optional().describe(
2399
+ "Specific stage name to investigate (optional \u2014 will auto-detect failed stages if not provided)"
2400
+ )
2401
+ }
2402
+ };
2403
+ function getDebugDeploymentMessages(args) {
2404
+ const pipelineId = args.pipeline_id || "<pipeline_id>";
2405
+ const stageName = args.stage_name || "";
2406
+ return {
2407
+ messages: [
2408
+ {
2409
+ role: "user",
2410
+ content: {
2411
+ type: "text",
2412
+ text: `I need help debugging a failed deployment.
2413
+
2414
+ Pipeline ID: ${pipelineId}
2415
+ ${stageName ? `Stage: ${stageName}` : ""}
2416
+
2417
+ Please follow this systematic debugging workflow:
2418
+
2419
+ 1. **Check pipeline state** using \`get-pipeline-state\` with pipeline_id "${pipelineId}":
2420
+ - Identify which stages are failing
2421
+ - Check for any blockers
2422
+ - Note the active revision ID
2423
+
2424
+ 2. **Get recent events** using \`get-pipeline-events\` to understand the deployment timeline:
2425
+ - Look for the sequence of events leading to failure
2426
+ - Identify when the failure occurred
2427
+
2428
+ 3. **Investigate failed steps**: For each failed stage${stageName ? ` (focusing on "${stageName}")` : ""}:
2429
+ - Identify the specific step(s) that failed from the pipeline state
2430
+ - Note the step type and any cause/error messages
2431
+
2432
+ 4. **Retrieve logs** using \`get-step-logs\` for each failed step:
2433
+ - Look for error messages, stack traces, or timeout indicators
2434
+ - Check if the failure is in the step logic or infrastructure
2435
+
2436
+ 5. **Check stage health** using \`get-stage-health\` to understand patterns:
2437
+ - Is this stage frequently failing? (low success rate)
2438
+ - Are execution times trending up? (might indicate resource issues)
2439
+
2440
+ 6. **Suggest fixes** based on your analysis:
2441
+ - If the issue is transient, suggest using \`retry-stage\`
2442
+ - If it's a code issue, suggest specific fixes
2443
+ - If it's an infrastructure issue, suggest config changes
2444
+ - If a blocker is preventing progress, explain how to resolve it
2445
+
2446
+ 7. **Take action** if appropriate:
2447
+ - Offer to retry the failed stage with \`retry-stage\`
2448
+ - If a stage is stuck with blockers, offer to bypass with \`bypass-stage-blockers\` (with appropriate warnings)
2449
+
2450
+ Please start the investigation now.`
2451
+ }
2452
+ }
2453
+ ]
2454
+ };
2455
+ }
2456
+
2457
+ // src/index.ts
2458
+ async function main() {
2459
+ let apiClient = null;
2460
+ const apiUrl = process.env.DEVRAMPS_API_URL || "https://devramps.com";
2461
+ try {
2462
+ const config = loadConfig();
2463
+ apiClient = new DevRampsApiClient(config);
2464
+ } catch (err) {
2465
+ console.error(
2466
+ `Warning: ${err instanceof Error ? err.message : String(err)}`
2467
+ );
2468
+ console.error(
2469
+ "API-dependent tools will not be available. Run `npx @devramps/cli login` to authenticate. Scaffold and IAM tools will still work."
2470
+ );
2471
+ }
2472
+ const publicClient = new DevRampsPublicClient(apiUrl);
2473
+ const server = new McpServer({
2474
+ name: "devramps",
2475
+ version: "0.1.0"
2476
+ });
2477
+ server.tool(
2478
+ "validate-pipeline",
2479
+ "Validate a DevRamps pipeline definition YAML file against the platform schema. Always uses the DevRamps API for authoritative validation. If authentication fails, instruct the user to run `npx @devramps/cli login`.",
2480
+ ValidatePipelineSchema.shape,
2481
+ async (args) => validatePipeline(args, apiClient, publicClient)
2482
+ );
2483
+ server.tool(
2484
+ "scaffold-pipeline",
2485
+ "Generate a .devramps/<pipeline_name>/pipeline.yaml file from a complete pipeline definition object. The AI agent should analyze the codebase and construct the pipeline definition, then pass it to this tool to serialize and write the YAML file. Use the scaffold-project prompt for a guided workflow.",
2486
+ ScaffoldPipelineSchema.shape,
2487
+ async (args) => scaffoldPipeline(args)
2488
+ );
2489
+ server.tool(
2490
+ "generate-iam-policies",
2491
+ "Write an IAM policy to .devramps/<pipeline_name>/aws_additional_iam_policies.json. The agent should first read the Terraform files, determine all AWS resources and actions needed, then construct a comprehensive IAM policy and pass it via the iam_policy parameter. Use broad service-level wildcards (e.g., 'ecs:*') to avoid first-deploy failures.",
2492
+ GenerateIamPoliciesSchema.shape,
2493
+ async (args) => generateIamPolicies(args)
2494
+ );
2495
+ if (apiClient) {
2496
+ const client = apiClient;
2497
+ server.tool(
2498
+ "list-pipelines",
2499
+ "List all CI/CD pipelines in the DevRamps organization with status summaries including blocked, stopped, and failed stage information.",
2500
+ ListPipelinesSchema.shape,
2501
+ async (args) => listPipelines(client, args)
2502
+ );
2503
+ server.tool(
2504
+ "get-pipeline-state",
2505
+ "Get the detailed current state of a pipeline including all stages, their statuses, active steps, blockers, alarms, and revision information.",
2506
+ GetPipelineStateSchema.shape,
2507
+ async (args) => getPipelineState(client, args)
2508
+ );
2509
+ server.tool(
2510
+ "get-stage-health",
2511
+ "Get health metrics for a specific stage: success rate, deployment counts, and execution time percentiles (p50/p90/max) over the last 30 days.",
2512
+ GetStageHealthSchema.shape,
2513
+ async (args) => getStageHealth(client, args)
2514
+ );
2515
+ server.tool(
2516
+ "retry-stage",
2517
+ "Retry a failed stage deployment. Use this when a stage has failed and you want to re-run it.",
2518
+ RetryStageSchema.shape,
2519
+ async (args) => retryStage(client, args)
2520
+ );
2521
+ server.tool(
2522
+ "cancel-stage",
2523
+ "Cancel an in-progress stage deployment. Optionally provide a reason for the cancellation.",
2524
+ CancelStageSchema.shape,
2525
+ async (args) => cancelStage(client, args)
2526
+ );
2527
+ server.tool(
2528
+ "stop-pipeline",
2529
+ "Stop a pipeline from promoting revisions to further stages. The pipeline can be resumed later with start-pipeline.",
2530
+ StopPipelineSchema.shape,
2531
+ async (args) => stopPipeline(client, args)
2532
+ );
2533
+ server.tool(
2534
+ "start-pipeline",
2535
+ "Resume a stopped pipeline, allowing it to promote revisions to stages again.",
2536
+ StartPipelineSchema.shape,
2537
+ async (args) => startPipeline(client, args)
2538
+ );
2539
+ server.tool(
2540
+ "stop-stage-promotion",
2541
+ "Stop promotion for a specific stage, preventing new revisions from being deployed to it.",
2542
+ StopStagePromotionSchema.shape,
2543
+ async (args) => stopStagePromotion(client, args)
2544
+ );
2545
+ server.tool(
2546
+ "start-stage-promotion",
2547
+ "Resume promotion for a specific stage, allowing new revisions to be deployed to it again.",
2548
+ StartStagePromotionSchema.shape,
2549
+ async (args) => startStagePromotion(client, args)
2550
+ );
2551
+ server.tool(
2552
+ "bypass-stage-blockers",
2553
+ "Force-bypass blockers on a stage (manual approvals, time windows, etc). WARNING: This skips safety checks \u2014 use with caution.",
2554
+ BypassStageBlockersSchema.shape,
2555
+ async (args) => bypassStageBlockers(client, args)
2556
+ );
2557
+ server.tool(
2558
+ "get-step-logs",
2559
+ "Retrieve execution logs for a specific step in a stage deployment. Useful for debugging failed steps.",
2560
+ GetStepLogsSchema.shape,
2561
+ async (args) => getStepLogs(client, args)
2562
+ );
2563
+ server.tool(
2564
+ "get-pipeline-events",
2565
+ "List events for a pipeline, useful for understanding deployment history and the sequence of actions that occurred.",
2566
+ GetPipelineEventsSchema.shape,
2567
+ async (args) => getPipelineEvents(client, args)
2568
+ );
2569
+ server.tool(
2570
+ "list-revisions",
2571
+ "List pipeline revisions with their commit info, deployed stages, and timestamps.",
2572
+ ListRevisionsSchema.shape,
2573
+ async (args) => listRevisions(client, args)
2574
+ );
2575
+ server.tool(
2576
+ "list-ephemeral-environments",
2577
+ "List all ephemeral environments for a pipeline with their current lock status, triggers, and region. Use this to discover available environments before claiming one.",
2578
+ ListEphemeralEnvironmentsSchema.shape,
2579
+ async (args) => listEphemeralEnvironments(client, args)
2580
+ );
2581
+ server.tool(
2582
+ "claim-ephemeral-session",
2583
+ "Claim a lock on an API-triggered ephemeral environment and trigger an initial deployment. Returns a session ID for subsequent operations. If the environment is already locked, use force-claim-ephemeral-session to override.",
2584
+ ClaimEphemeralSessionSchema.shape,
2585
+ async (args) => claimEphemeralSession(client, args)
2586
+ );
2587
+ server.tool(
2588
+ "force-claim-ephemeral-session",
2589
+ "Force-claim an ephemeral environment, overriding any existing lock. The previous session's deployments will be cancelled. Use this when the environment is locked by a stale or abandoned session.",
2590
+ ForceClaimEphemeralSessionSchema.shape,
2591
+ async (args) => forceClaimEphemeralSession(client, args)
2592
+ );
2593
+ server.tool(
2594
+ "deploy-ephemeral-commit",
2595
+ "Deploy a new commit to an ephemeral environment within an active session. Triggers a full build and deploy cycle. Use get-ephemeral-session-status to monitor progress.",
2596
+ DeployEphemeralCommitSchema.shape,
2597
+ async (args) => deployEphemeralCommit(client, args)
2598
+ );
2599
+ server.tool(
2600
+ "get-ephemeral-session-status",
2601
+ "Get the status of an ephemeral session including lock info and deployment progress with per-step details. Use this to monitor ongoing deployments or check if a session is still active.",
2602
+ GetEphemeralSessionStatusSchema.shape,
2603
+ async (args) => getEphemeralSessionStatus(client, args)
2604
+ );
2605
+ server.tool(
2606
+ "release-ephemeral-session",
2607
+ "Release the lock on an ephemeral session, making the environment available for others. Any in-progress deployments will be cancelled.",
2608
+ ReleaseEphemeralSessionSchema.shape,
2609
+ async (args) => releaseEphemeralSession(client, args)
2610
+ );
2611
+ }
2612
+ server.resource(
2613
+ "pipeline-schema",
2614
+ PIPELINE_SCHEMA_URI,
2615
+ getPipelineSchemaResource(),
2616
+ async () => ({
2617
+ contents: [
2618
+ {
2619
+ uri: PIPELINE_SCHEMA_URI,
2620
+ mimeType: "text/markdown",
2621
+ text: getPipelineSchemaContent()
2622
+ }
2623
+ ]
2624
+ })
2625
+ );
2626
+ server.resource(
2627
+ "step-type-docs",
2628
+ STEP_TYPE_DOCS_URI,
2629
+ getStepTypeDocsResource(),
2630
+ async () => ({
2631
+ contents: [
2632
+ {
2633
+ uri: STEP_TYPE_DOCS_URI,
2634
+ mimeType: "text/markdown",
2635
+ text: await getStepTypeDocsContent(apiClient)
2636
+ }
2637
+ ]
2638
+ })
2639
+ );
2640
+ server.prompt(
2641
+ SCAFFOLD_PROJECT_PROMPT.name,
2642
+ SCAFFOLD_PROJECT_PROMPT.description,
2643
+ SCAFFOLD_PROJECT_PROMPT.argsSchema,
2644
+ async (args) => getScaffoldProjectMessages(args)
2645
+ );
2646
+ server.prompt(
2647
+ DEBUG_DEPLOYMENT_PROMPT.name,
2648
+ DEBUG_DEPLOYMENT_PROMPT.description,
2649
+ DEBUG_DEPLOYMENT_PROMPT.argsSchema,
2650
+ async (args) => getDebugDeploymentMessages(args)
2651
+ );
2652
+ const transport = new StdioServerTransport();
2653
+ await server.connect(transport);
2654
+ }
2655
+ main().catch((err) => {
2656
+ console.error("Fatal error starting DevRamps MCP server:", err);
2657
+ process.exit(1);
2658
+ });