@jaypie/mcp 0.3.2 → 0.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aws-B3dW_-bD.js +1202 -0
- package/dist/aws-B3dW_-bD.js.map +1 -0
- package/dist/index.js +166 -1209
- package/dist/index.js.map +1 -1
- package/dist/suite.d.ts +1 -0
- package/dist/suite.js +1252 -0
- package/dist/suite.js.map +1 -0
- package/package.json +8 -2
- package/prompts/Jaypie_Fabric_Package.md +86 -0
- package/release-notes/constructs/1.2.17.md +11 -0
- package/release-notes/fabric/0.1.2.md +11 -0
- package/release-notes/mcp/0.3.3.md +12 -0
- package/release-notes/mcp/0.3.4.md +36 -0
- package/skills/agents.md +25 -0
- package/skills/aws.md +107 -0
- package/skills/cdk.md +141 -0
- package/skills/cicd.md +152 -0
- package/skills/datadog.md +129 -0
- package/skills/debugging.md +148 -0
- package/skills/dns.md +134 -0
- package/skills/dynamodb.md +140 -0
- package/skills/errors.md +142 -0
- package/skills/fabric.md +164 -0
- package/skills/index.md +7 -0
- package/skills/jaypie.md +100 -0
- package/skills/legacy.md +97 -0
- package/skills/logs.md +160 -0
- package/skills/mocks.md +174 -0
- package/skills/models.md +195 -0
- package/skills/releasenotes.md +94 -0
- package/skills/secrets.md +155 -0
- package/skills/services.md +175 -0
- package/skills/style.md +190 -0
- package/skills/tests.md +209 -0
- package/skills/tools.md +127 -0
- package/skills/topics.md +116 -0
- package/skills/variables.md +146 -0
- package/skills/writing.md +153 -0
package/dist/index.js
CHANGED
|
@@ -9,1212 +9,21 @@ import { z } from 'zod';
|
|
|
9
9
|
import * as fs from 'node:fs/promises';
|
|
10
10
|
import matter from 'gray-matter';
|
|
11
11
|
import { gt } from 'semver';
|
|
12
|
-
import
|
|
13
|
-
import { Llm } from '@jaypie/llm';
|
|
14
|
-
import { spawn } from 'node:child_process';
|
|
15
|
-
import * as os from 'node:os';
|
|
12
|
+
import { g as getDatadogCredentials, s as searchDatadogLogs, a as aggregateDatadogLogs, l as listDatadogMonitors, b as getDatadogSyntheticResults, c as listDatadogSynthetics, q as queryDatadogMetrics, d as searchDatadogRum, e as debugLlmCall, f as listLlmProviders, h as listAwsProfiles, i as listStepFunctionExecutions, j as stopStepFunctionExecution, k as listLambdaFunctions, m as getLambdaFunction, n as filterLogEvents, o as listS3Objects, p as describeStack, r as describeDynamoDBTable, t as scanDynamoDB, u as queryDynamoDB, v as getDynamoDBItem, w as listSQSQueues, x as getSQSQueueAttributes, y as receiveSQSMessage, z as purgeSQSQueue } from './aws-B3dW_-bD.js';
|
|
16
13
|
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
|
17
14
|
import { randomUUID } from 'node:crypto';
|
|
15
|
+
import 'node:https';
|
|
16
|
+
import '@jaypie/llm';
|
|
17
|
+
import 'node:child_process';
|
|
18
|
+
import 'node:os';
|
|
18
19
|
|
|
19
|
-
|
|
20
|
-
* Datadog API integration module
|
|
21
|
-
*/
|
|
22
|
-
const nullLogger$1 = {
|
|
23
|
-
info: () => { },
|
|
24
|
-
error: () => { },
|
|
25
|
-
};
|
|
26
|
-
/**
|
|
27
|
-
* Get Datadog credentials from environment variables
|
|
28
|
-
*/
|
|
29
|
-
function getDatadogCredentials() {
|
|
30
|
-
const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY;
|
|
31
|
-
const appKey = process.env.DATADOG_APP_KEY ||
|
|
32
|
-
process.env.DATADOG_APPLICATION_KEY ||
|
|
33
|
-
process.env.DD_APP_KEY ||
|
|
34
|
-
process.env.DD_APPLICATION_KEY;
|
|
35
|
-
if (!apiKey || !appKey) {
|
|
36
|
-
return null;
|
|
37
|
-
}
|
|
38
|
-
return { apiKey, appKey };
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Build query string from environment variables and options
|
|
42
|
-
*/
|
|
43
|
-
function buildDatadogQuery(options) {
|
|
44
|
-
const ddEnv = process.env.DD_ENV;
|
|
45
|
-
const ddService = process.env.DD_SERVICE;
|
|
46
|
-
const ddSource = process.env.DD_SOURCE;
|
|
47
|
-
const ddQuery = process.env.DD_QUERY;
|
|
48
|
-
const queryParts = [];
|
|
49
|
-
// Add source (parameter > env var > default 'lambda')
|
|
50
|
-
const effectiveSource = options.source || ddSource || "lambda";
|
|
51
|
-
queryParts.push(`source:${effectiveSource}`);
|
|
52
|
-
// Add env (parameter > env var)
|
|
53
|
-
const effectiveEnv = options.env || ddEnv;
|
|
54
|
-
if (effectiveEnv) {
|
|
55
|
-
queryParts.push(`env:${effectiveEnv}`);
|
|
56
|
-
}
|
|
57
|
-
// Add service (parameter > env var)
|
|
58
|
-
const effectiveService = options.service || ddService;
|
|
59
|
-
if (effectiveService) {
|
|
60
|
-
queryParts.push(`service:${effectiveService}`);
|
|
61
|
-
}
|
|
62
|
-
// Add base query from DD_QUERY if available
|
|
63
|
-
if (ddQuery) {
|
|
64
|
-
queryParts.push(ddQuery);
|
|
65
|
-
}
|
|
66
|
-
// Add user-provided query terms
|
|
67
|
-
if (options.query) {
|
|
68
|
-
queryParts.push(options.query);
|
|
69
|
-
}
|
|
70
|
-
return queryParts.join(" ");
|
|
71
|
-
}
|
|
72
|
-
/**
|
|
73
|
-
* Search Datadog logs
|
|
74
|
-
*/
|
|
75
|
-
async function searchDatadogLogs(credentials, options = {}, logger = nullLogger$1) {
|
|
76
|
-
const effectiveQuery = buildDatadogQuery(options);
|
|
77
|
-
const effectiveFrom = options.from || "now-15m";
|
|
78
|
-
const effectiveTo = options.to || "now";
|
|
79
|
-
const effectiveLimit = Math.min(options.limit || 50, 1000);
|
|
80
|
-
const effectiveSort = options.sort || "-timestamp";
|
|
81
|
-
logger.info(`Effective query: ${effectiveQuery}`);
|
|
82
|
-
logger.info(`Search params: from=${effectiveFrom}, to=${effectiveTo}, limit=${effectiveLimit}, sort=${effectiveSort}`);
|
|
83
|
-
const requestBody = JSON.stringify({
|
|
84
|
-
filter: {
|
|
85
|
-
query: effectiveQuery,
|
|
86
|
-
from: effectiveFrom,
|
|
87
|
-
to: effectiveTo,
|
|
88
|
-
},
|
|
89
|
-
page: {
|
|
90
|
-
limit: effectiveLimit,
|
|
91
|
-
},
|
|
92
|
-
sort: effectiveSort,
|
|
93
|
-
});
|
|
94
|
-
return new Promise((resolve) => {
|
|
95
|
-
const requestOptions = {
|
|
96
|
-
hostname: "api.datadoghq.com",
|
|
97
|
-
port: 443,
|
|
98
|
-
path: "/api/v2/logs/events/search",
|
|
99
|
-
method: "POST",
|
|
100
|
-
headers: {
|
|
101
|
-
"Content-Type": "application/json",
|
|
102
|
-
"DD-API-KEY": credentials.apiKey,
|
|
103
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
104
|
-
"Content-Length": Buffer.byteLength(requestBody),
|
|
105
|
-
},
|
|
106
|
-
};
|
|
107
|
-
const req = https.request(requestOptions, (res) => {
|
|
108
|
-
let data = "";
|
|
109
|
-
res.on("data", (chunk) => {
|
|
110
|
-
data += chunk.toString();
|
|
111
|
-
});
|
|
112
|
-
res.on("end", () => {
|
|
113
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
114
|
-
if (res.statusCode !== 200) {
|
|
115
|
-
logger.error(`Datadog API error: ${res.statusCode}`);
|
|
116
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
117
|
-
if (res.statusCode === 400) {
|
|
118
|
-
errorMessage = `Invalid query syntax. Check your query: "${effectiveQuery}". Datadog error: ${data}`;
|
|
119
|
-
}
|
|
120
|
-
else if (res.statusCode === 403) {
|
|
121
|
-
errorMessage =
|
|
122
|
-
"Access denied. Verify your API and Application keys have logs_read permission.";
|
|
123
|
-
}
|
|
124
|
-
else if (res.statusCode === 429) {
|
|
125
|
-
errorMessage =
|
|
126
|
-
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
127
|
-
}
|
|
128
|
-
resolve({
|
|
129
|
-
success: false,
|
|
130
|
-
query: effectiveQuery,
|
|
131
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
132
|
-
logs: [],
|
|
133
|
-
error: errorMessage,
|
|
134
|
-
});
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
try {
|
|
138
|
-
const response = JSON.parse(data);
|
|
139
|
-
const logs = (response.data || []).map((log) => {
|
|
140
|
-
const attrs = log.attributes || {};
|
|
141
|
-
return {
|
|
142
|
-
id: log.id,
|
|
143
|
-
timestamp: attrs.timestamp,
|
|
144
|
-
status: attrs.status,
|
|
145
|
-
service: attrs.service,
|
|
146
|
-
message: attrs.message,
|
|
147
|
-
attributes: attrs.attributes,
|
|
148
|
-
};
|
|
149
|
-
});
|
|
150
|
-
logger.info(`Retrieved ${logs.length} log entries`);
|
|
151
|
-
resolve({
|
|
152
|
-
success: true,
|
|
153
|
-
query: effectiveQuery,
|
|
154
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
155
|
-
logs,
|
|
156
|
-
});
|
|
157
|
-
}
|
|
158
|
-
catch (parseError) {
|
|
159
|
-
logger.error("Failed to parse Datadog response:", parseError);
|
|
160
|
-
resolve({
|
|
161
|
-
success: false,
|
|
162
|
-
query: effectiveQuery,
|
|
163
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
164
|
-
logs: [],
|
|
165
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
166
|
-
});
|
|
167
|
-
}
|
|
168
|
-
});
|
|
169
|
-
});
|
|
170
|
-
req.on("error", (error) => {
|
|
171
|
-
logger.error("Request error:", error);
|
|
172
|
-
resolve({
|
|
173
|
-
success: false,
|
|
174
|
-
query: effectiveQuery,
|
|
175
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
176
|
-
logs: [],
|
|
177
|
-
error: `Connection error: ${error.message}`,
|
|
178
|
-
});
|
|
179
|
-
});
|
|
180
|
-
req.write(requestBody);
|
|
181
|
-
req.end();
|
|
182
|
-
});
|
|
183
|
-
}
|
|
184
|
-
/**
|
|
185
|
-
* Aggregate Datadog logs using the Analytics API
|
|
186
|
-
* Groups logs by specified fields and computes aggregations
|
|
187
|
-
*/
|
|
188
|
-
async function aggregateDatadogLogs(credentials, options, logger = nullLogger$1) {
|
|
189
|
-
const effectiveQuery = buildDatadogQuery(options);
|
|
190
|
-
const effectiveFrom = options.from || "now-15m";
|
|
191
|
-
const effectiveTo = options.to || "now";
|
|
192
|
-
const groupBy = options.groupBy;
|
|
193
|
-
const compute = options.compute || [{ aggregation: "count" }];
|
|
194
|
-
logger.info(`Analytics query: ${effectiveQuery}`);
|
|
195
|
-
logger.info(`Group by: ${groupBy.join(", ")}`);
|
|
196
|
-
logger.info(`Time range: ${effectiveFrom} to ${effectiveTo}`);
|
|
197
|
-
// Build compute array - each item needs aggregation and type
|
|
198
|
-
const computeItems = compute.map((c) => {
|
|
199
|
-
const item = {
|
|
200
|
-
aggregation: c.aggregation,
|
|
201
|
-
type: "total",
|
|
202
|
-
};
|
|
203
|
-
if (c.metric) {
|
|
204
|
-
item.metric = c.metric;
|
|
205
|
-
}
|
|
206
|
-
return item;
|
|
207
|
-
});
|
|
208
|
-
// Build group_by with proper sort configuration
|
|
209
|
-
const groupByItems = groupBy.map((field) => {
|
|
210
|
-
const item = {
|
|
211
|
-
facet: field,
|
|
212
|
-
limit: 100,
|
|
213
|
-
sort: {
|
|
214
|
-
type: "measure",
|
|
215
|
-
order: "desc",
|
|
216
|
-
aggregation: compute[0]?.aggregation || "count",
|
|
217
|
-
},
|
|
218
|
-
};
|
|
219
|
-
return item;
|
|
220
|
-
});
|
|
221
|
-
const requestBody = JSON.stringify({
|
|
222
|
-
filter: {
|
|
223
|
-
query: effectiveQuery,
|
|
224
|
-
from: effectiveFrom,
|
|
225
|
-
to: effectiveTo,
|
|
226
|
-
},
|
|
227
|
-
group_by: groupByItems,
|
|
228
|
-
compute: computeItems,
|
|
229
|
-
page: {
|
|
230
|
-
limit: 100,
|
|
231
|
-
},
|
|
232
|
-
});
|
|
233
|
-
return new Promise((resolve) => {
|
|
234
|
-
const requestOptions = {
|
|
235
|
-
hostname: "api.datadoghq.com",
|
|
236
|
-
port: 443,
|
|
237
|
-
path: "/api/v2/logs/analytics/aggregate",
|
|
238
|
-
method: "POST",
|
|
239
|
-
headers: {
|
|
240
|
-
"Content-Type": "application/json",
|
|
241
|
-
"DD-API-KEY": credentials.apiKey,
|
|
242
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
243
|
-
"Content-Length": Buffer.byteLength(requestBody),
|
|
244
|
-
},
|
|
245
|
-
};
|
|
246
|
-
const req = https.request(requestOptions, (res) => {
|
|
247
|
-
let data = "";
|
|
248
|
-
res.on("data", (chunk) => {
|
|
249
|
-
data += chunk.toString();
|
|
250
|
-
});
|
|
251
|
-
res.on("end", () => {
|
|
252
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
253
|
-
if (res.statusCode !== 200) {
|
|
254
|
-
logger.error(`Datadog Analytics API error: ${res.statusCode}`);
|
|
255
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
256
|
-
if (res.statusCode === 400) {
|
|
257
|
-
errorMessage = `Invalid query or groupBy fields. Verify facet names exist: ${groupBy.join(", ")}. Datadog error: ${data}`;
|
|
258
|
-
}
|
|
259
|
-
else if (res.statusCode === 403) {
|
|
260
|
-
errorMessage =
|
|
261
|
-
"Access denied. Verify your API and Application keys have logs_read permission.";
|
|
262
|
-
}
|
|
263
|
-
else if (res.statusCode === 429) {
|
|
264
|
-
errorMessage =
|
|
265
|
-
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
266
|
-
}
|
|
267
|
-
resolve({
|
|
268
|
-
success: false,
|
|
269
|
-
query: effectiveQuery,
|
|
270
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
271
|
-
groupBy,
|
|
272
|
-
buckets: [],
|
|
273
|
-
error: errorMessage,
|
|
274
|
-
});
|
|
275
|
-
return;
|
|
276
|
-
}
|
|
277
|
-
try {
|
|
278
|
-
const response = JSON.parse(data);
|
|
279
|
-
const buckets = (response.data?.buckets || []).map((bucket) => ({
|
|
280
|
-
by: bucket.by || {},
|
|
281
|
-
computes: bucket.computes || {},
|
|
282
|
-
}));
|
|
283
|
-
logger.info(`Retrieved ${buckets.length} aggregation buckets`);
|
|
284
|
-
resolve({
|
|
285
|
-
success: true,
|
|
286
|
-
query: effectiveQuery,
|
|
287
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
288
|
-
groupBy,
|
|
289
|
-
buckets,
|
|
290
|
-
});
|
|
291
|
-
}
|
|
292
|
-
catch (parseError) {
|
|
293
|
-
logger.error("Failed to parse Datadog analytics response:", parseError);
|
|
294
|
-
resolve({
|
|
295
|
-
success: false,
|
|
296
|
-
query: effectiveQuery,
|
|
297
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
298
|
-
groupBy,
|
|
299
|
-
buckets: [],
|
|
300
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
301
|
-
});
|
|
302
|
-
}
|
|
303
|
-
});
|
|
304
|
-
});
|
|
305
|
-
req.on("error", (error) => {
|
|
306
|
-
logger.error("Request error:", error);
|
|
307
|
-
resolve({
|
|
308
|
-
success: false,
|
|
309
|
-
query: effectiveQuery,
|
|
310
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
311
|
-
groupBy,
|
|
312
|
-
buckets: [],
|
|
313
|
-
error: `Connection error: ${error.message}`,
|
|
314
|
-
});
|
|
315
|
-
});
|
|
316
|
-
req.write(requestBody);
|
|
317
|
-
req.end();
|
|
318
|
-
});
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* List Datadog monitors with optional filtering
|
|
322
|
-
*/
|
|
323
|
-
async function listDatadogMonitors(credentials, options = {}, logger = nullLogger$1) {
|
|
324
|
-
logger.info("Fetching Datadog monitors");
|
|
325
|
-
const queryParams = new URLSearchParams();
|
|
326
|
-
if (options.tags && options.tags.length > 0) {
|
|
327
|
-
queryParams.set("tags", options.tags.join(","));
|
|
328
|
-
}
|
|
329
|
-
if (options.monitorTags && options.monitorTags.length > 0) {
|
|
330
|
-
queryParams.set("monitor_tags", options.monitorTags.join(","));
|
|
331
|
-
}
|
|
332
|
-
if (options.name) {
|
|
333
|
-
queryParams.set("name", options.name);
|
|
334
|
-
}
|
|
335
|
-
const queryString = queryParams.toString();
|
|
336
|
-
const path = `/api/v1/monitor${queryString ? `?${queryString}` : ""}`;
|
|
337
|
-
logger.info(`Request path: ${path}`);
|
|
338
|
-
return new Promise((resolve) => {
|
|
339
|
-
const requestOptions = {
|
|
340
|
-
hostname: "api.datadoghq.com",
|
|
341
|
-
port: 443,
|
|
342
|
-
path,
|
|
343
|
-
method: "GET",
|
|
344
|
-
headers: {
|
|
345
|
-
"DD-API-KEY": credentials.apiKey,
|
|
346
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
347
|
-
},
|
|
348
|
-
};
|
|
349
|
-
const req = https.request(requestOptions, (res) => {
|
|
350
|
-
let data = "";
|
|
351
|
-
res.on("data", (chunk) => {
|
|
352
|
-
data += chunk.toString();
|
|
353
|
-
});
|
|
354
|
-
res.on("end", () => {
|
|
355
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
356
|
-
if (res.statusCode !== 200) {
|
|
357
|
-
logger.error(`Datadog Monitors API error: ${res.statusCode}`);
|
|
358
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
359
|
-
if (res.statusCode === 403) {
|
|
360
|
-
errorMessage =
|
|
361
|
-
"Access denied. Verify your API and Application keys have monitors_read permission.";
|
|
362
|
-
}
|
|
363
|
-
else if (res.statusCode === 429) {
|
|
364
|
-
errorMessage =
|
|
365
|
-
"Rate limited by Datadog. Wait a moment and try again.";
|
|
366
|
-
}
|
|
367
|
-
resolve({
|
|
368
|
-
success: false,
|
|
369
|
-
monitors: [],
|
|
370
|
-
error: errorMessage,
|
|
371
|
-
});
|
|
372
|
-
return;
|
|
373
|
-
}
|
|
374
|
-
try {
|
|
375
|
-
const response = JSON.parse(data);
|
|
376
|
-
let monitors = response.map((monitor) => ({
|
|
377
|
-
id: monitor.id,
|
|
378
|
-
name: monitor.name,
|
|
379
|
-
type: monitor.type,
|
|
380
|
-
status: monitor.overall_state || "Unknown",
|
|
381
|
-
message: monitor.message,
|
|
382
|
-
tags: monitor.tags || [],
|
|
383
|
-
priority: monitor.priority,
|
|
384
|
-
query: monitor.query,
|
|
385
|
-
overallState: monitor.overall_state,
|
|
386
|
-
}));
|
|
387
|
-
// Filter by status if specified
|
|
388
|
-
if (options.status && options.status.length > 0) {
|
|
389
|
-
monitors = monitors.filter((m) => options.status.includes(m.status));
|
|
390
|
-
}
|
|
391
|
-
logger.info(`Retrieved ${monitors.length} monitors`);
|
|
392
|
-
resolve({
|
|
393
|
-
success: true,
|
|
394
|
-
monitors,
|
|
395
|
-
});
|
|
396
|
-
}
|
|
397
|
-
catch (parseError) {
|
|
398
|
-
logger.error("Failed to parse Datadog monitors response:", parseError);
|
|
399
|
-
resolve({
|
|
400
|
-
success: false,
|
|
401
|
-
monitors: [],
|
|
402
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
403
|
-
});
|
|
404
|
-
}
|
|
405
|
-
});
|
|
406
|
-
});
|
|
407
|
-
req.on("error", (error) => {
|
|
408
|
-
logger.error("Request error:", error);
|
|
409
|
-
resolve({
|
|
410
|
-
success: false,
|
|
411
|
-
monitors: [],
|
|
412
|
-
error: `Connection error: ${error.message}`,
|
|
413
|
-
});
|
|
414
|
-
});
|
|
415
|
-
req.end();
|
|
416
|
-
});
|
|
417
|
-
}
|
|
418
|
-
/**
|
|
419
|
-
* List Datadog Synthetic tests
|
|
420
|
-
*/
|
|
421
|
-
async function listDatadogSynthetics(credentials, options = {}, logger = nullLogger$1) {
|
|
422
|
-
logger.info("Fetching Datadog Synthetic tests");
|
|
423
|
-
return new Promise((resolve) => {
|
|
424
|
-
const requestOptions = {
|
|
425
|
-
hostname: "api.datadoghq.com",
|
|
426
|
-
port: 443,
|
|
427
|
-
path: "/api/v1/synthetics/tests",
|
|
428
|
-
method: "GET",
|
|
429
|
-
headers: {
|
|
430
|
-
"DD-API-KEY": credentials.apiKey,
|
|
431
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
432
|
-
},
|
|
433
|
-
};
|
|
434
|
-
const req = https.request(requestOptions, (res) => {
|
|
435
|
-
let data = "";
|
|
436
|
-
res.on("data", (chunk) => {
|
|
437
|
-
data += chunk.toString();
|
|
438
|
-
});
|
|
439
|
-
res.on("end", () => {
|
|
440
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
441
|
-
if (res.statusCode !== 200) {
|
|
442
|
-
logger.error(`Datadog Synthetics API error: ${res.statusCode}`);
|
|
443
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
444
|
-
if (res.statusCode === 403) {
|
|
445
|
-
errorMessage =
|
|
446
|
-
"Access denied. Verify your API and Application keys have synthetics_read permission.";
|
|
447
|
-
}
|
|
448
|
-
else if (res.statusCode === 429) {
|
|
449
|
-
errorMessage =
|
|
450
|
-
"Rate limited by Datadog. Wait a moment and try again.";
|
|
451
|
-
}
|
|
452
|
-
resolve({
|
|
453
|
-
success: false,
|
|
454
|
-
tests: [],
|
|
455
|
-
error: errorMessage,
|
|
456
|
-
});
|
|
457
|
-
return;
|
|
458
|
-
}
|
|
459
|
-
try {
|
|
460
|
-
const response = JSON.parse(data);
|
|
461
|
-
let tests = (response.tests || []).map((test) => ({
|
|
462
|
-
publicId: test.public_id,
|
|
463
|
-
name: test.name,
|
|
464
|
-
type: test.type,
|
|
465
|
-
status: test.status,
|
|
466
|
-
tags: test.tags || [],
|
|
467
|
-
locations: test.locations || [],
|
|
468
|
-
message: test.message,
|
|
469
|
-
}));
|
|
470
|
-
// Filter by type if specified
|
|
471
|
-
if (options.type) {
|
|
472
|
-
tests = tests.filter((t) => t.type === options.type);
|
|
473
|
-
}
|
|
474
|
-
// Filter by tags if specified
|
|
475
|
-
if (options.tags && options.tags.length > 0) {
|
|
476
|
-
tests = tests.filter((t) => options.tags.some((tag) => t.tags.includes(tag)));
|
|
477
|
-
}
|
|
478
|
-
logger.info(`Retrieved ${tests.length} synthetic tests`);
|
|
479
|
-
resolve({
|
|
480
|
-
success: true,
|
|
481
|
-
tests,
|
|
482
|
-
});
|
|
483
|
-
}
|
|
484
|
-
catch (parseError) {
|
|
485
|
-
logger.error("Failed to parse Datadog synthetics response:", parseError);
|
|
486
|
-
resolve({
|
|
487
|
-
success: false,
|
|
488
|
-
tests: [],
|
|
489
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
490
|
-
});
|
|
491
|
-
}
|
|
492
|
-
});
|
|
493
|
-
});
|
|
494
|
-
req.on("error", (error) => {
|
|
495
|
-
logger.error("Request error:", error);
|
|
496
|
-
resolve({
|
|
497
|
-
success: false,
|
|
498
|
-
tests: [],
|
|
499
|
-
error: `Connection error: ${error.message}`,
|
|
500
|
-
});
|
|
501
|
-
});
|
|
502
|
-
req.end();
|
|
503
|
-
});
|
|
504
|
-
}
|
|
505
|
-
/**
|
|
506
|
-
* Get recent results for a specific Synthetic test
|
|
507
|
-
*/
|
|
508
|
-
async function getDatadogSyntheticResults(credentials, publicId, logger = nullLogger$1) {
|
|
509
|
-
logger.info(`Fetching results for Synthetic test: ${publicId}`);
|
|
510
|
-
return new Promise((resolve) => {
|
|
511
|
-
const requestOptions = {
|
|
512
|
-
hostname: "api.datadoghq.com",
|
|
513
|
-
port: 443,
|
|
514
|
-
path: `/api/v1/synthetics/tests/${publicId}/results`,
|
|
515
|
-
method: "GET",
|
|
516
|
-
headers: {
|
|
517
|
-
"DD-API-KEY": credentials.apiKey,
|
|
518
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
519
|
-
},
|
|
520
|
-
};
|
|
521
|
-
const req = https.request(requestOptions, (res) => {
|
|
522
|
-
let data = "";
|
|
523
|
-
res.on("data", (chunk) => {
|
|
524
|
-
data += chunk.toString();
|
|
525
|
-
});
|
|
526
|
-
res.on("end", () => {
|
|
527
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
528
|
-
if (res.statusCode !== 200) {
|
|
529
|
-
logger.error(`Datadog Synthetics Results API error: ${res.statusCode}`);
|
|
530
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
531
|
-
if (res.statusCode === 403) {
|
|
532
|
-
errorMessage =
|
|
533
|
-
"Access denied. Verify your API and Application keys have synthetics_read permission.";
|
|
534
|
-
}
|
|
535
|
-
else if (res.statusCode === 404) {
|
|
536
|
-
errorMessage = `Synthetic test '${publicId}' not found. Use datadog_synthetics (without testId) to list available tests.`;
|
|
537
|
-
}
|
|
538
|
-
else if (res.statusCode === 429) {
|
|
539
|
-
errorMessage =
|
|
540
|
-
"Rate limited by Datadog. Wait a moment and try again.";
|
|
541
|
-
}
|
|
542
|
-
resolve({
|
|
543
|
-
success: false,
|
|
544
|
-
publicId,
|
|
545
|
-
results: [],
|
|
546
|
-
error: errorMessage,
|
|
547
|
-
});
|
|
548
|
-
return;
|
|
549
|
-
}
|
|
550
|
-
try {
|
|
551
|
-
const response = JSON.parse(data);
|
|
552
|
-
const results = (response.results || []).map((result) => ({
|
|
553
|
-
publicId,
|
|
554
|
-
resultId: result.result_id,
|
|
555
|
-
status: result.status,
|
|
556
|
-
checkTime: result.check_time,
|
|
557
|
-
passed: result.result?.passed ?? result.status === 0,
|
|
558
|
-
location: result.dc_id?.toString(),
|
|
559
|
-
}));
|
|
560
|
-
logger.info(`Retrieved ${results.length} synthetic results`);
|
|
561
|
-
resolve({
|
|
562
|
-
success: true,
|
|
563
|
-
publicId,
|
|
564
|
-
results,
|
|
565
|
-
});
|
|
566
|
-
}
|
|
567
|
-
catch (parseError) {
|
|
568
|
-
logger.error("Failed to parse Datadog synthetic results:", parseError);
|
|
569
|
-
resolve({
|
|
570
|
-
success: false,
|
|
571
|
-
publicId,
|
|
572
|
-
results: [],
|
|
573
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
574
|
-
});
|
|
575
|
-
}
|
|
576
|
-
});
|
|
577
|
-
});
|
|
578
|
-
req.on("error", (error) => {
|
|
579
|
-
logger.error("Request error:", error);
|
|
580
|
-
resolve({
|
|
581
|
-
success: false,
|
|
582
|
-
publicId,
|
|
583
|
-
results: [],
|
|
584
|
-
error: `Connection error: ${error.message}`,
|
|
585
|
-
});
|
|
586
|
-
});
|
|
587
|
-
req.end();
|
|
588
|
-
});
|
|
589
|
-
}
|
|
590
|
-
/**
|
|
591
|
-
* Query Datadog metrics
|
|
592
|
-
*/
|
|
593
|
-
async function queryDatadogMetrics(credentials, options, logger = nullLogger$1) {
|
|
594
|
-
logger.info(`Querying metrics: ${options.query}`);
|
|
595
|
-
logger.info(`Time range: ${options.from} to ${options.to}`);
|
|
596
|
-
const queryParams = new URLSearchParams({
|
|
597
|
-
query: options.query,
|
|
598
|
-
from: options.from.toString(),
|
|
599
|
-
to: options.to.toString(),
|
|
600
|
-
});
|
|
601
|
-
return new Promise((resolve) => {
|
|
602
|
-
const requestOptions = {
|
|
603
|
-
hostname: "api.datadoghq.com",
|
|
604
|
-
port: 443,
|
|
605
|
-
path: `/api/v1/query?${queryParams.toString()}`,
|
|
606
|
-
method: "GET",
|
|
607
|
-
headers: {
|
|
608
|
-
"DD-API-KEY": credentials.apiKey,
|
|
609
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
610
|
-
},
|
|
611
|
-
};
|
|
612
|
-
const req = https.request(requestOptions, (res) => {
|
|
613
|
-
let data = "";
|
|
614
|
-
res.on("data", (chunk) => {
|
|
615
|
-
data += chunk.toString();
|
|
616
|
-
});
|
|
617
|
-
res.on("end", () => {
|
|
618
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
619
|
-
if (res.statusCode !== 200) {
|
|
620
|
-
logger.error(`Datadog Metrics API error: ${res.statusCode}`);
|
|
621
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
622
|
-
if (res.statusCode === 400) {
|
|
623
|
-
errorMessage = `Invalid metric query. Check format: 'aggregation:metric.name{tags}'. Query: "${options.query}". Datadog error: ${data}`;
|
|
624
|
-
}
|
|
625
|
-
else if (res.statusCode === 403) {
|
|
626
|
-
errorMessage =
|
|
627
|
-
"Access denied. Verify your API and Application keys have metrics_read permission.";
|
|
628
|
-
}
|
|
629
|
-
else if (res.statusCode === 429) {
|
|
630
|
-
errorMessage =
|
|
631
|
-
"Rate limited by Datadog. Wait a moment and try again, or reduce your time range.";
|
|
632
|
-
}
|
|
633
|
-
resolve({
|
|
634
|
-
success: false,
|
|
635
|
-
query: options.query,
|
|
636
|
-
timeRange: { from: options.from, to: options.to },
|
|
637
|
-
series: [],
|
|
638
|
-
error: errorMessage,
|
|
639
|
-
});
|
|
640
|
-
return;
|
|
641
|
-
}
|
|
642
|
-
try {
|
|
643
|
-
const response = JSON.parse(data);
|
|
644
|
-
const series = (response.series || []).map((s) => ({
|
|
645
|
-
metric: s.metric,
|
|
646
|
-
scope: s.scope,
|
|
647
|
-
pointlist: s.pointlist,
|
|
648
|
-
unit: s.unit?.[0]?.name,
|
|
649
|
-
}));
|
|
650
|
-
logger.info(`Retrieved ${series.length} metric series`);
|
|
651
|
-
resolve({
|
|
652
|
-
success: true,
|
|
653
|
-
query: options.query,
|
|
654
|
-
timeRange: { from: options.from, to: options.to },
|
|
655
|
-
series,
|
|
656
|
-
});
|
|
657
|
-
}
|
|
658
|
-
catch (parseError) {
|
|
659
|
-
logger.error("Failed to parse Datadog metrics response:", parseError);
|
|
660
|
-
resolve({
|
|
661
|
-
success: false,
|
|
662
|
-
query: options.query,
|
|
663
|
-
timeRange: { from: options.from, to: options.to },
|
|
664
|
-
series: [],
|
|
665
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
666
|
-
});
|
|
667
|
-
}
|
|
668
|
-
});
|
|
669
|
-
});
|
|
670
|
-
req.on("error", (error) => {
|
|
671
|
-
logger.error("Request error:", error);
|
|
672
|
-
resolve({
|
|
673
|
-
success: false,
|
|
674
|
-
query: options.query,
|
|
675
|
-
timeRange: { from: options.from, to: options.to },
|
|
676
|
-
series: [],
|
|
677
|
-
error: `Connection error: ${error.message}`,
|
|
678
|
-
});
|
|
679
|
-
});
|
|
680
|
-
req.end();
|
|
681
|
-
});
|
|
682
|
-
}
|
|
683
|
-
/**
|
|
684
|
-
* Search Datadog RUM events
|
|
685
|
-
*/
|
|
686
|
-
async function searchDatadogRum(credentials, options = {}, logger = nullLogger$1) {
|
|
687
|
-
const effectiveQuery = options.query || "*";
|
|
688
|
-
const effectiveFrom = options.from || "now-15m";
|
|
689
|
-
const effectiveTo = options.to || "now";
|
|
690
|
-
const effectiveLimit = Math.min(options.limit || 50, 1000);
|
|
691
|
-
const effectiveSort = options.sort || "-timestamp";
|
|
692
|
-
logger.info(`RUM query: ${effectiveQuery}`);
|
|
693
|
-
logger.info(`Time range: ${effectiveFrom} to ${effectiveTo}`);
|
|
694
|
-
const requestBody = JSON.stringify({
|
|
695
|
-
filter: {
|
|
696
|
-
query: effectiveQuery,
|
|
697
|
-
from: effectiveFrom,
|
|
698
|
-
to: effectiveTo,
|
|
699
|
-
},
|
|
700
|
-
page: {
|
|
701
|
-
limit: effectiveLimit,
|
|
702
|
-
},
|
|
703
|
-
sort: effectiveSort,
|
|
704
|
-
});
|
|
705
|
-
return new Promise((resolve) => {
|
|
706
|
-
const requestOptions = {
|
|
707
|
-
hostname: "api.datadoghq.com",
|
|
708
|
-
port: 443,
|
|
709
|
-
path: "/api/v2/rum/events/search",
|
|
710
|
-
method: "POST",
|
|
711
|
-
headers: {
|
|
712
|
-
"Content-Type": "application/json",
|
|
713
|
-
"DD-API-KEY": credentials.apiKey,
|
|
714
|
-
"DD-APPLICATION-KEY": credentials.appKey,
|
|
715
|
-
"Content-Length": Buffer.byteLength(requestBody),
|
|
716
|
-
},
|
|
717
|
-
};
|
|
718
|
-
const req = https.request(requestOptions, (res) => {
|
|
719
|
-
let data = "";
|
|
720
|
-
res.on("data", (chunk) => {
|
|
721
|
-
data += chunk.toString();
|
|
722
|
-
});
|
|
723
|
-
res.on("end", () => {
|
|
724
|
-
logger.info(`Response status: ${res.statusCode}`);
|
|
725
|
-
if (res.statusCode !== 200) {
|
|
726
|
-
logger.error(`Datadog RUM API error: ${res.statusCode}`);
|
|
727
|
-
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
728
|
-
// Check for specific "No valid indexes" error which means no RUM app is configured
|
|
729
|
-
if (data.includes("No valid indexes")) {
|
|
730
|
-
errorMessage =
|
|
731
|
-
"No RUM application found. Ensure you have a RUM application configured in Datadog and it has collected data. " +
|
|
732
|
-
"You can create a RUM application at https://app.datadoghq.com/rum/list";
|
|
733
|
-
}
|
|
734
|
-
else if (res.statusCode === 400) {
|
|
735
|
-
errorMessage = `Invalid RUM query. Check syntax: "${effectiveQuery}". Datadog error: ${data}`;
|
|
736
|
-
}
|
|
737
|
-
else if (res.statusCode === 403) {
|
|
738
|
-
errorMessage =
|
|
739
|
-
"Access denied. Verify your API and Application keys have rum_read permission.";
|
|
740
|
-
}
|
|
741
|
-
else if (res.statusCode === 429) {
|
|
742
|
-
errorMessage =
|
|
743
|
-
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
744
|
-
}
|
|
745
|
-
resolve({
|
|
746
|
-
success: false,
|
|
747
|
-
query: effectiveQuery,
|
|
748
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
749
|
-
events: [],
|
|
750
|
-
error: errorMessage,
|
|
751
|
-
});
|
|
752
|
-
return;
|
|
753
|
-
}
|
|
754
|
-
try {
|
|
755
|
-
const response = JSON.parse(data);
|
|
756
|
-
const events = (response.data || []).map((event) => {
|
|
757
|
-
const attrs = event.attributes?.attributes || {};
|
|
758
|
-
return {
|
|
759
|
-
id: event.id,
|
|
760
|
-
type: event.type,
|
|
761
|
-
timestamp: event.attributes?.timestamp,
|
|
762
|
-
sessionId: attrs.session?.id,
|
|
763
|
-
viewUrl: attrs.view?.url,
|
|
764
|
-
viewName: attrs.view?.name,
|
|
765
|
-
errorMessage: attrs.error?.message,
|
|
766
|
-
errorType: attrs.error?.type,
|
|
767
|
-
attributes: attrs,
|
|
768
|
-
};
|
|
769
|
-
});
|
|
770
|
-
logger.info(`Retrieved ${events.length} RUM events`);
|
|
771
|
-
resolve({
|
|
772
|
-
success: true,
|
|
773
|
-
query: effectiveQuery,
|
|
774
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
775
|
-
events,
|
|
776
|
-
});
|
|
777
|
-
}
|
|
778
|
-
catch (parseError) {
|
|
779
|
-
logger.error("Failed to parse Datadog RUM response:", parseError);
|
|
780
|
-
resolve({
|
|
781
|
-
success: false,
|
|
782
|
-
query: effectiveQuery,
|
|
783
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
784
|
-
events: [],
|
|
785
|
-
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
786
|
-
});
|
|
787
|
-
}
|
|
788
|
-
});
|
|
789
|
-
});
|
|
790
|
-
req.on("error", (error) => {
|
|
791
|
-
logger.error("Request error:", error);
|
|
792
|
-
resolve({
|
|
793
|
-
success: false,
|
|
794
|
-
query: effectiveQuery,
|
|
795
|
-
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
796
|
-
events: [],
|
|
797
|
-
error: `Connection error: ${error.message}`,
|
|
798
|
-
});
|
|
799
|
-
});
|
|
800
|
-
req.write(requestBody);
|
|
801
|
-
req.end();
|
|
802
|
-
});
|
|
803
|
-
}
|
|
804
|
-
|
|
805
|
-
/**
|
|
806
|
-
* LLM debugging utilities for inspecting raw provider responses
|
|
807
|
-
*/
|
|
808
|
-
// Default models for each provider
|
|
809
|
-
const DEFAULT_MODELS = {
|
|
810
|
-
anthropic: "claude-sonnet-4-20250514",
|
|
811
|
-
gemini: "gemini-2.0-flash",
|
|
812
|
-
openai: "gpt-4o-mini",
|
|
813
|
-
openrouter: "openai/gpt-4o-mini",
|
|
814
|
-
};
|
|
815
|
-
/**
|
|
816
|
-
* Make a debug LLM call and return the raw response data for inspection
|
|
817
|
-
*/
|
|
818
|
-
async function debugLlmCall(params, log) {
|
|
819
|
-
const { provider, message } = params;
|
|
820
|
-
const model = params.model || DEFAULT_MODELS[provider];
|
|
821
|
-
log.info(`Making debug LLM call to ${provider} with model ${model}`);
|
|
822
|
-
try {
|
|
823
|
-
const llm = new Llm(provider, { model });
|
|
824
|
-
const result = await llm.operate(message, {
|
|
825
|
-
user: "[jaypie-mcp] Debug LLM Call",
|
|
826
|
-
});
|
|
827
|
-
if (result.error) {
|
|
828
|
-
return {
|
|
829
|
-
success: false,
|
|
830
|
-
provider,
|
|
831
|
-
model,
|
|
832
|
-
error: `${result.error.title}: ${result.error.detail || "Unknown error"}`,
|
|
833
|
-
};
|
|
834
|
-
}
|
|
835
|
-
// Calculate total reasoning tokens
|
|
836
|
-
const reasoningTokens = result.usage.reduce((sum, u) => sum + (u.reasoning || 0), 0);
|
|
837
|
-
return {
|
|
838
|
-
success: true,
|
|
839
|
-
provider,
|
|
840
|
-
model,
|
|
841
|
-
content: typeof result.content === "string"
|
|
842
|
-
? result.content
|
|
843
|
-
: JSON.stringify(result.content),
|
|
844
|
-
reasoning: result.reasoning,
|
|
845
|
-
reasoningTokens,
|
|
846
|
-
history: result.history,
|
|
847
|
-
rawResponses: result.responses,
|
|
848
|
-
usage: result.usage,
|
|
849
|
-
};
|
|
850
|
-
}
|
|
851
|
-
catch (error) {
|
|
852
|
-
log.error(`Error calling ${provider}:`, error);
|
|
853
|
-
return {
|
|
854
|
-
success: false,
|
|
855
|
-
provider,
|
|
856
|
-
model,
|
|
857
|
-
error: error instanceof Error ? error.message : String(error),
|
|
858
|
-
};
|
|
859
|
-
}
|
|
860
|
-
}
|
|
861
|
-
/**
|
|
862
|
-
* List available providers and their default/reasoning models
|
|
863
|
-
*/
|
|
864
|
-
function listLlmProviders() {
|
|
865
|
-
return {
|
|
866
|
-
providers: [
|
|
867
|
-
{
|
|
868
|
-
name: "openai",
|
|
869
|
-
defaultModel: DEFAULT_MODELS.openai,
|
|
870
|
-
reasoningModels: ["o3-mini", "o1-preview", "o1-mini"],
|
|
871
|
-
},
|
|
872
|
-
{
|
|
873
|
-
name: "anthropic",
|
|
874
|
-
defaultModel: DEFAULT_MODELS.anthropic,
|
|
875
|
-
reasoningModels: [], // Anthropic doesn't expose reasoning the same way
|
|
876
|
-
},
|
|
877
|
-
{
|
|
878
|
-
name: "gemini",
|
|
879
|
-
defaultModel: DEFAULT_MODELS.gemini,
|
|
880
|
-
reasoningModels: [], // Gemini has thoughtsTokenCount but unclear on content
|
|
881
|
-
},
|
|
882
|
-
{
|
|
883
|
-
name: "openrouter",
|
|
884
|
-
defaultModel: DEFAULT_MODELS.openrouter,
|
|
885
|
-
reasoningModels: ["openai/o3-mini", "openai/o1-preview"],
|
|
886
|
-
},
|
|
887
|
-
],
|
|
888
|
-
};
|
|
889
|
-
}
|
|
890
|
-
|
|
891
|
-
/**
|
|
892
|
-
* AWS CLI integration module
|
|
893
|
-
* Provides a structured interface for common AWS operations via the AWS CLI
|
|
894
|
-
*/
|
|
895
|
-
const nullLogger = {
|
|
896
|
-
info: () => { },
|
|
897
|
-
error: () => { },
|
|
898
|
-
};
|
|
899
|
-
/**
|
|
900
|
-
* Parse AWS CLI error messages into user-friendly descriptions
|
|
901
|
-
*/
|
|
902
|
-
function parseAwsError(stderr, service, command) {
|
|
903
|
-
if (stderr.includes("ExpiredToken") || stderr.includes("Token has expired")) {
|
|
904
|
-
return "AWS credentials have expired. Run 'aws sso login' or refresh your credentials.";
|
|
905
|
-
}
|
|
906
|
-
if (stderr.includes("NoCredentialProviders") ||
|
|
907
|
-
stderr.includes("Unable to locate credentials")) {
|
|
908
|
-
return "No AWS credentials found. Configure credentials with 'aws configure' or 'aws sso login'.";
|
|
909
|
-
}
|
|
910
|
-
if (stderr.includes("AccessDenied") || stderr.includes("Access Denied")) {
|
|
911
|
-
return `Access denied for ${service}:${command}. Check your IAM permissions.`;
|
|
912
|
-
}
|
|
913
|
-
if (stderr.includes("ResourceNotFoundException")) {
|
|
914
|
-
return `Resource not found. Check that the specified resource exists in the correct region.`;
|
|
915
|
-
}
|
|
916
|
-
if (stderr.includes("ValidationException")) {
|
|
917
|
-
const match = stderr.match(/ValidationException[^:]*:\s*(.+)/);
|
|
918
|
-
return match
|
|
919
|
-
? `Validation error: ${match[1].trim()}`
|
|
920
|
-
: "Validation error in request parameters.";
|
|
921
|
-
}
|
|
922
|
-
if (stderr.includes("ThrottlingException") ||
|
|
923
|
-
stderr.includes("Rate exceeded")) {
|
|
924
|
-
return "AWS API rate limit exceeded. Wait a moment and try again.";
|
|
925
|
-
}
|
|
926
|
-
if (stderr.includes("InvalidParameterValue")) {
|
|
927
|
-
const match = stderr.match(/InvalidParameterValue[^:]*:\s*(.+)/);
|
|
928
|
-
return match
|
|
929
|
-
? `Invalid parameter: ${match[1].trim()}`
|
|
930
|
-
: "Invalid parameter value provided.";
|
|
931
|
-
}
|
|
932
|
-
return stderr.trim();
|
|
933
|
-
}
|
|
934
|
-
/**
|
|
935
|
-
* Parse relative time strings like 'now-1h' to Unix timestamps
|
|
936
|
-
*/
|
|
937
|
-
function parseRelativeTime(timeStr) {
|
|
938
|
-
const now = Date.now();
|
|
939
|
-
if (timeStr === "now") {
|
|
940
|
-
return now;
|
|
941
|
-
}
|
|
942
|
-
// Handle relative time like 'now-15m', 'now-1h', 'now-1d'
|
|
943
|
-
const relativeMatch = timeStr.match(/^now-(\d+)([smhd])$/);
|
|
944
|
-
if (relativeMatch) {
|
|
945
|
-
const value = parseInt(relativeMatch[1], 10);
|
|
946
|
-
const unit = relativeMatch[2];
|
|
947
|
-
const multipliers = {
|
|
948
|
-
s: 1000,
|
|
949
|
-
m: 60 * 1000,
|
|
950
|
-
h: 60 * 60 * 1000,
|
|
951
|
-
d: 24 * 60 * 60 * 1000,
|
|
952
|
-
};
|
|
953
|
-
return now - value * multipliers[unit];
|
|
954
|
-
}
|
|
955
|
-
// Handle ISO 8601 format
|
|
956
|
-
const parsed = Date.parse(timeStr);
|
|
957
|
-
if (!isNaN(parsed)) {
|
|
958
|
-
return parsed;
|
|
959
|
-
}
|
|
960
|
-
// Default to the current time if parsing fails
|
|
961
|
-
return now;
|
|
962
|
-
}
|
|
963
|
-
/**
|
|
964
|
-
* Execute an AWS CLI command and return parsed JSON output
|
|
965
|
-
*/
|
|
966
|
-
async function executeAwsCommand(service, command, args, options = {}, logger = nullLogger) {
|
|
967
|
-
const fullArgs = [service, command, ...args, "--output", "json"];
|
|
968
|
-
if (options.profile) {
|
|
969
|
-
fullArgs.push("--profile", options.profile);
|
|
970
|
-
}
|
|
971
|
-
if (options.region) {
|
|
972
|
-
fullArgs.push("--region", options.region);
|
|
973
|
-
}
|
|
974
|
-
logger.info(`Executing: aws ${fullArgs.join(" ")}`);
|
|
975
|
-
return new Promise((resolve) => {
|
|
976
|
-
const proc = spawn("aws", fullArgs);
|
|
977
|
-
let stdout = "";
|
|
978
|
-
let stderr = "";
|
|
979
|
-
proc.stdout.on("data", (data) => {
|
|
980
|
-
stdout += data.toString();
|
|
981
|
-
});
|
|
982
|
-
proc.stderr.on("data", (data) => {
|
|
983
|
-
stderr += data.toString();
|
|
984
|
-
});
|
|
985
|
-
proc.on("close", (code) => {
|
|
986
|
-
if (code !== 0) {
|
|
987
|
-
logger.error(`AWS CLI error: ${stderr}`);
|
|
988
|
-
resolve({
|
|
989
|
-
success: false,
|
|
990
|
-
error: parseAwsError(stderr, service, command),
|
|
991
|
-
});
|
|
992
|
-
return;
|
|
993
|
-
}
|
|
994
|
-
// Handle empty output (some commands return nothing on success)
|
|
995
|
-
if (!stdout.trim()) {
|
|
996
|
-
resolve({ success: true });
|
|
997
|
-
return;
|
|
998
|
-
}
|
|
999
|
-
try {
|
|
1000
|
-
const data = JSON.parse(stdout);
|
|
1001
|
-
resolve({ success: true, data });
|
|
1002
|
-
}
|
|
1003
|
-
catch {
|
|
1004
|
-
// Some commands return plain text
|
|
1005
|
-
resolve({ success: true, data: stdout.trim() });
|
|
1006
|
-
}
|
|
1007
|
-
});
|
|
1008
|
-
proc.on("error", (error) => {
|
|
1009
|
-
if (error.message.includes("ENOENT")) {
|
|
1010
|
-
resolve({
|
|
1011
|
-
success: false,
|
|
1012
|
-
error: "AWS CLI not found. Install it from https://aws.amazon.com/cli/",
|
|
1013
|
-
});
|
|
1014
|
-
}
|
|
1015
|
-
else {
|
|
1016
|
-
resolve({ success: false, error: error.message });
|
|
1017
|
-
}
|
|
1018
|
-
});
|
|
1019
|
-
});
|
|
1020
|
-
}
|
|
1021
|
-
/**
|
|
1022
|
-
* List available AWS profiles from ~/.aws/config and ~/.aws/credentials
|
|
1023
|
-
*/
|
|
1024
|
-
async function listAwsProfiles(logger = nullLogger) {
|
|
1025
|
-
const profiles = [];
|
|
1026
|
-
const homeDir = os.homedir();
|
|
1027
|
-
try {
|
|
1028
|
-
// Parse ~/.aws/config
|
|
1029
|
-
const configPath = path.join(homeDir, ".aws", "config");
|
|
1030
|
-
try {
|
|
1031
|
-
const configContent = await fs.readFile(configPath, "utf-8");
|
|
1032
|
-
const profileRegex = /\[profile\s+([^\]]+)\]|\[default\]/g;
|
|
1033
|
-
let match;
|
|
1034
|
-
while ((match = profileRegex.exec(configContent)) !== null) {
|
|
1035
|
-
const name = match[1] || "default";
|
|
1036
|
-
profiles.push({
|
|
1037
|
-
name,
|
|
1038
|
-
source: "config",
|
|
1039
|
-
});
|
|
1040
|
-
}
|
|
1041
|
-
logger.info(`Found ${profiles.length} profiles in config`);
|
|
1042
|
-
}
|
|
1043
|
-
catch {
|
|
1044
|
-
logger.info("No ~/.aws/config file found");
|
|
1045
|
-
}
|
|
1046
|
-
// Parse ~/.aws/credentials
|
|
1047
|
-
const credentialsPath = path.join(homeDir, ".aws", "credentials");
|
|
1048
|
-
try {
|
|
1049
|
-
const credentialsContent = await fs.readFile(credentialsPath, "utf-8");
|
|
1050
|
-
const profileRegex = /\[([^\]]+)\]/g;
|
|
1051
|
-
let match;
|
|
1052
|
-
while ((match = profileRegex.exec(credentialsContent)) !== null) {
|
|
1053
|
-
const name = match[1];
|
|
1054
|
-
// Only add if not already in the list
|
|
1055
|
-
if (!profiles.find((p) => p.name === name)) {
|
|
1056
|
-
profiles.push({
|
|
1057
|
-
name,
|
|
1058
|
-
source: "credentials",
|
|
1059
|
-
});
|
|
1060
|
-
}
|
|
1061
|
-
}
|
|
1062
|
-
logger.info(`Total profiles after credentials: ${profiles.length}`);
|
|
1063
|
-
}
|
|
1064
|
-
catch {
|
|
1065
|
-
logger.info("No ~/.aws/credentials file found");
|
|
1066
|
-
}
|
|
1067
|
-
return { success: true, data: profiles };
|
|
1068
|
-
}
|
|
1069
|
-
catch (error) {
|
|
1070
|
-
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
|
1071
|
-
logger.error(`Error listing profiles: ${errorMessage}`);
|
|
1072
|
-
return { success: false, error: errorMessage };
|
|
1073
|
-
}
|
|
1074
|
-
}
|
|
1075
|
-
// Step Functions operations
|
|
1076
|
-
async function listStepFunctionExecutions(options, logger = nullLogger) {
|
|
1077
|
-
const args = ["--state-machine-arn", options.stateMachineArn];
|
|
1078
|
-
if (options.statusFilter) {
|
|
1079
|
-
args.push("--status-filter", options.statusFilter);
|
|
1080
|
-
}
|
|
1081
|
-
if (options.maxResults) {
|
|
1082
|
-
args.push("--max-results", String(options.maxResults));
|
|
1083
|
-
}
|
|
1084
|
-
return executeAwsCommand("stepfunctions", "list-executions", args, { profile: options.profile, region: options.region }, logger);
|
|
1085
|
-
}
|
|
1086
|
-
async function stopStepFunctionExecution(options, logger = nullLogger) {
|
|
1087
|
-
const args = ["--execution-arn", options.executionArn];
|
|
1088
|
-
if (options.cause) {
|
|
1089
|
-
args.push("--cause", options.cause);
|
|
1090
|
-
}
|
|
1091
|
-
return executeAwsCommand("stepfunctions", "stop-execution", args, { profile: options.profile, region: options.region }, logger);
|
|
1092
|
-
}
|
|
1093
|
-
// Lambda operations
|
|
1094
|
-
async function listLambdaFunctions(options = {}, logger = nullLogger) {
|
|
1095
|
-
const args = [];
|
|
1096
|
-
if (options.maxResults) {
|
|
1097
|
-
args.push("--max-items", String(options.maxResults));
|
|
1098
|
-
}
|
|
1099
|
-
const result = await executeAwsCommand("lambda", "list-functions", args, { profile: options.profile, region: options.region }, logger);
|
|
1100
|
-
// Filter by prefix if specified
|
|
1101
|
-
if (result.success && result.data && options.functionNamePrefix) {
|
|
1102
|
-
result.data.Functions = result.data.Functions.filter((f) => f.FunctionName.startsWith(options.functionNamePrefix));
|
|
1103
|
-
}
|
|
1104
|
-
return result;
|
|
1105
|
-
}
|
|
1106
|
-
async function getLambdaFunction(options, logger = nullLogger) {
|
|
1107
|
-
return executeAwsCommand("lambda", "get-function", ["--function-name", options.functionName], { profile: options.profile, region: options.region }, logger);
|
|
1108
|
-
}
|
|
1109
|
-
// CloudWatch Logs operations
|
|
1110
|
-
async function filterLogEvents(options, logger = nullLogger) {
|
|
1111
|
-
const args = ["--log-group-name", options.logGroupName];
|
|
1112
|
-
if (options.filterPattern) {
|
|
1113
|
-
args.push("--filter-pattern", options.filterPattern);
|
|
1114
|
-
}
|
|
1115
|
-
if (options.startTime) {
|
|
1116
|
-
const startMs = parseRelativeTime(options.startTime);
|
|
1117
|
-
args.push("--start-time", String(startMs));
|
|
1118
|
-
}
|
|
1119
|
-
if (options.endTime) {
|
|
1120
|
-
const endMs = parseRelativeTime(options.endTime);
|
|
1121
|
-
args.push("--end-time", String(endMs));
|
|
1122
|
-
}
|
|
1123
|
-
{
|
|
1124
|
-
args.push("--limit", String(options.limit));
|
|
1125
|
-
}
|
|
1126
|
-
return executeAwsCommand("logs", "filter-log-events", args, { profile: options.profile, region: options.region }, logger);
|
|
1127
|
-
}
|
|
1128
|
-
// S3 operations
|
|
1129
|
-
async function listS3Objects(options, logger = nullLogger) {
|
|
1130
|
-
const args = ["--bucket", options.bucket];
|
|
1131
|
-
if (options.prefix) {
|
|
1132
|
-
args.push("--prefix", options.prefix);
|
|
1133
|
-
}
|
|
1134
|
-
if (options.maxResults) {
|
|
1135
|
-
args.push("--max-items", String(options.maxResults));
|
|
1136
|
-
}
|
|
1137
|
-
return executeAwsCommand("s3api", "list-objects-v2", args, { profile: options.profile, region: options.region }, logger);
|
|
1138
|
-
}
|
|
1139
|
-
// CloudFormation operations
|
|
1140
|
-
async function describeStack(options, logger = nullLogger) {
|
|
1141
|
-
return executeAwsCommand("cloudformation", "describe-stacks", ["--stack-name", options.stackName], { profile: options.profile, region: options.region }, logger);
|
|
1142
|
-
}
|
|
1143
|
-
// DynamoDB operations
|
|
1144
|
-
async function describeDynamoDBTable(options, logger = nullLogger) {
|
|
1145
|
-
return executeAwsCommand("dynamodb", "describe-table", ["--table-name", options.tableName], { profile: options.profile, region: options.region }, logger);
|
|
1146
|
-
}
|
|
1147
|
-
async function scanDynamoDB(options, logger = nullLogger) {
|
|
1148
|
-
const args = ["--table-name", options.tableName];
|
|
1149
|
-
if (options.filterExpression) {
|
|
1150
|
-
args.push("--filter-expression", options.filterExpression);
|
|
1151
|
-
}
|
|
1152
|
-
if (options.expressionAttributeValues) {
|
|
1153
|
-
args.push("--expression-attribute-values", options.expressionAttributeValues);
|
|
1154
|
-
}
|
|
1155
|
-
{
|
|
1156
|
-
args.push("--limit", String(options.limit));
|
|
1157
|
-
}
|
|
1158
|
-
return executeAwsCommand("dynamodb", "scan", args, { profile: options.profile, region: options.region }, logger);
|
|
1159
|
-
}
|
|
1160
|
-
async function queryDynamoDB(options, logger = nullLogger) {
|
|
1161
|
-
const args = [
|
|
1162
|
-
"--table-name",
|
|
1163
|
-
options.tableName,
|
|
1164
|
-
"--key-condition-expression",
|
|
1165
|
-
options.keyConditionExpression,
|
|
1166
|
-
"--expression-attribute-values",
|
|
1167
|
-
options.expressionAttributeValues,
|
|
1168
|
-
];
|
|
1169
|
-
if (options.indexName) {
|
|
1170
|
-
args.push("--index-name", options.indexName);
|
|
1171
|
-
}
|
|
1172
|
-
if (options.filterExpression) {
|
|
1173
|
-
args.push("--filter-expression", options.filterExpression);
|
|
1174
|
-
}
|
|
1175
|
-
if (options.limit) {
|
|
1176
|
-
args.push("--limit", String(options.limit));
|
|
1177
|
-
}
|
|
1178
|
-
if (options.scanIndexForward === false) {
|
|
1179
|
-
args.push("--no-scan-index-forward");
|
|
1180
|
-
}
|
|
1181
|
-
return executeAwsCommand("dynamodb", "query", args, { profile: options.profile, region: options.region }, logger);
|
|
1182
|
-
}
|
|
1183
|
-
async function getDynamoDBItem(options, logger = nullLogger) {
|
|
1184
|
-
return executeAwsCommand("dynamodb", "get-item", ["--table-name", options.tableName, "--key", options.key], { profile: options.profile, region: options.region }, logger);
|
|
1185
|
-
}
|
|
1186
|
-
// SQS operations
|
|
1187
|
-
async function listSQSQueues(options = {}, logger = nullLogger) {
|
|
1188
|
-
const args = [];
|
|
1189
|
-
if (options.queueNamePrefix) {
|
|
1190
|
-
args.push("--queue-name-prefix", options.queueNamePrefix);
|
|
1191
|
-
}
|
|
1192
|
-
return executeAwsCommand("sqs", "list-queues", args, { profile: options.profile, region: options.region }, logger);
|
|
1193
|
-
}
|
|
1194
|
-
async function getSQSQueueAttributes(options, logger = nullLogger) {
|
|
1195
|
-
return executeAwsCommand("sqs", "get-queue-attributes", ["--queue-url", options.queueUrl, "--attribute-names", "All"], { profile: options.profile, region: options.region }, logger);
|
|
1196
|
-
}
|
|
1197
|
-
async function receiveSQSMessage(options, logger = nullLogger) {
|
|
1198
|
-
const args = ["--queue-url", options.queueUrl];
|
|
1199
|
-
{
|
|
1200
|
-
args.push("--max-number-of-messages", String(options.maxNumberOfMessages));
|
|
1201
|
-
}
|
|
1202
|
-
{
|
|
1203
|
-
args.push("--visibility-timeout", String(options.visibilityTimeout));
|
|
1204
|
-
}
|
|
1205
|
-
args.push("--attribute-names", "All");
|
|
1206
|
-
return executeAwsCommand("sqs", "receive-message", args, { profile: options.profile, region: options.region }, logger);
|
|
1207
|
-
}
|
|
1208
|
-
async function purgeSQSQueue(options, logger = nullLogger) {
|
|
1209
|
-
return executeAwsCommand("sqs", "purge-queue", ["--queue-url", options.queueUrl], { profile: options.profile, region: options.region }, logger);
|
|
1210
|
-
}
|
|
1211
|
-
|
|
1212
|
-
const BUILD_VERSION_STRING = "@jaypie/mcp@0.3.2#f7a87775"
|
|
20
|
+
const BUILD_VERSION_STRING = "@jaypie/mcp@0.3.4#a6510094"
|
|
1213
21
|
;
|
|
1214
22
|
const __filename$1 = fileURLToPath(import.meta.url);
|
|
1215
23
|
const __dirname$1 = path.dirname(__filename$1);
|
|
1216
24
|
const PROMPTS_PATH = path.join(__dirname$1, "..", "prompts");
|
|
1217
25
|
const RELEASE_NOTES_PATH = path.join(__dirname$1, "..", "release-notes");
|
|
26
|
+
const SKILLS_PATH = path.join(__dirname$1, "..", "skills");
|
|
1218
27
|
// Logger utility
|
|
1219
28
|
function createLogger(verbose) {
|
|
1220
29
|
return {
|
|
@@ -1331,6 +140,42 @@ function filterReleaseNotesSince(notes, sinceVersion) {
|
|
|
1331
140
|
}
|
|
1332
141
|
});
|
|
1333
142
|
}
|
|
143
|
+
function isValidSkillAlias(alias) {
|
|
144
|
+
const normalized = alias.toLowerCase().trim();
|
|
145
|
+
// Reject if contains path separators or traversal
|
|
146
|
+
if (normalized.includes("/") ||
|
|
147
|
+
normalized.includes("\\") ||
|
|
148
|
+
normalized.includes("..")) {
|
|
149
|
+
return false;
|
|
150
|
+
}
|
|
151
|
+
// Only allow alphanumeric, hyphens, underscores
|
|
152
|
+
return /^[a-z0-9_-]+$/.test(normalized);
|
|
153
|
+
}
|
|
154
|
+
async function parseSkillFile(filePath) {
|
|
155
|
+
try {
|
|
156
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
157
|
+
const alias = path.basename(filePath, ".md");
|
|
158
|
+
if (content.startsWith("---")) {
|
|
159
|
+
const parsed = matter(content);
|
|
160
|
+
const frontMatter = parsed.data;
|
|
161
|
+
return {
|
|
162
|
+
alias,
|
|
163
|
+
description: frontMatter.description,
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
return { alias };
|
|
167
|
+
}
|
|
168
|
+
catch {
|
|
169
|
+
return { alias: path.basename(filePath, ".md") };
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
function formatSkillListItem(skill) {
|
|
173
|
+
const { alias, description } = skill;
|
|
174
|
+
if (description) {
|
|
175
|
+
return `* ${alias} - ${description}`;
|
|
176
|
+
}
|
|
177
|
+
return `* ${alias}`;
|
|
178
|
+
}
|
|
1334
179
|
/**
|
|
1335
180
|
* Creates and configures an MCP server instance with Jaypie tools
|
|
1336
181
|
* @param options - Configuration options (or legacy version string)
|
|
@@ -1350,7 +195,7 @@ function createMcpServer(options = {}) {
|
|
|
1350
195
|
capabilities: {},
|
|
1351
196
|
});
|
|
1352
197
|
log.info("Registering tools...");
|
|
1353
|
-
server.tool("list_prompts", "List available Jaypie development prompts and guides. Use this FIRST when starting work on a Jaypie project to discover relevant documentation. Returns filenames, descriptions, and which file patterns each prompt applies to (e.g., 'Required for packages/express/**').", {}, async () => {
|
|
198
|
+
server.tool("list_prompts", "[DEPRECATED: Use skill('index') instead] List available Jaypie development prompts and guides. Use this FIRST when starting work on a Jaypie project to discover relevant documentation. Returns filenames, descriptions, and which file patterns each prompt applies to (e.g., 'Required for packages/express/**').", {}, async () => {
|
|
1354
199
|
log.info("Tool called: list_prompts");
|
|
1355
200
|
log.info(`Reading directory: ${PROMPTS_PATH}`);
|
|
1356
201
|
try {
|
|
@@ -1382,7 +227,7 @@ function createMcpServer(options = {}) {
|
|
|
1382
227
|
}
|
|
1383
228
|
});
|
|
1384
229
|
log.info("Registered tool: list_prompts");
|
|
1385
|
-
server.tool("read_prompt", "Read a Jaypie prompt/guide by filename. Call list_prompts first to see available prompts. These contain best practices, templates, code patterns, and step-by-step guides for Jaypie development tasks.", {
|
|
230
|
+
server.tool("read_prompt", "[DEPRECATED: Use skill(alias) instead] Read a Jaypie prompt/guide by filename. Call list_prompts first to see available prompts. These contain best practices, templates, code patterns, and step-by-step guides for Jaypie development tasks.", {
|
|
1386
231
|
filename: z
|
|
1387
232
|
.string()
|
|
1388
233
|
.describe("The prompt filename from list_prompts (e.g., 'Jaypie_Express_Package.md', 'Development_Process.md')"),
|
|
@@ -1426,6 +271,124 @@ function createMcpServer(options = {}) {
|
|
|
1426
271
|
}
|
|
1427
272
|
});
|
|
1428
273
|
log.info("Registered tool: read_prompt");
|
|
274
|
+
// Skill tool - new unified documentation access
|
|
275
|
+
server.tool("skill", "Access Jaypie development documentation. Pass a skill alias (e.g., 'aws', 'tests', 'errors') to get that documentation. Pass 'index' or no argument to list all available skills.", {
|
|
276
|
+
alias: z
|
|
277
|
+
.string()
|
|
278
|
+
.optional()
|
|
279
|
+
.describe("Skill alias (e.g., 'aws', 'tests', 'errors'). Omit or use 'index' to list all skills."),
|
|
280
|
+
}, async ({ alias }) => {
|
|
281
|
+
const effectiveAlias = alias?.toLowerCase().trim() || "index";
|
|
282
|
+
log.info(`Tool called: skill (alias: ${effectiveAlias})`);
|
|
283
|
+
// Validate alias for path traversal
|
|
284
|
+
if (!isValidSkillAlias(effectiveAlias)) {
|
|
285
|
+
log.error(`Invalid skill alias: ${effectiveAlias}`);
|
|
286
|
+
return {
|
|
287
|
+
content: [
|
|
288
|
+
{
|
|
289
|
+
type: "text",
|
|
290
|
+
text: `Error: Invalid skill alias "${alias}". Aliases may only contain letters, numbers, hyphens, and underscores.`,
|
|
291
|
+
},
|
|
292
|
+
],
|
|
293
|
+
};
|
|
294
|
+
}
|
|
295
|
+
try {
|
|
296
|
+
// Handle index: return index.md content plus list of all skills
|
|
297
|
+
if (effectiveAlias === "index") {
|
|
298
|
+
let indexContent = "";
|
|
299
|
+
// Try to read index.md if it exists
|
|
300
|
+
try {
|
|
301
|
+
const indexPath = path.join(SKILLS_PATH, "index.md");
|
|
302
|
+
const rawContent = await fs.readFile(indexPath, "utf-8");
|
|
303
|
+
// Strip frontmatter if present
|
|
304
|
+
if (rawContent.startsWith("---")) {
|
|
305
|
+
const parsed = matter(rawContent);
|
|
306
|
+
indexContent = parsed.content.trim();
|
|
307
|
+
}
|
|
308
|
+
else {
|
|
309
|
+
indexContent = rawContent;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
catch {
|
|
313
|
+
// No index.md, that's fine
|
|
314
|
+
}
|
|
315
|
+
// Get list of all skills
|
|
316
|
+
const files = await fs.readdir(SKILLS_PATH);
|
|
317
|
+
const mdFiles = files.filter((file) => file.endsWith(".md") && file !== "index.md");
|
|
318
|
+
const skills = await Promise.all(mdFiles.map((file) => parseSkillFile(path.join(SKILLS_PATH, file))));
|
|
319
|
+
// Sort alphabetically
|
|
320
|
+
skills.sort((a, b) => a.alias.localeCompare(b.alias));
|
|
321
|
+
const skillList = skills.map(formatSkillListItem).join("\n");
|
|
322
|
+
const resultText = indexContent
|
|
323
|
+
? `${indexContent}\n\n## Available Skills\n\n${skillList}`
|
|
324
|
+
: `# Jaypie Skills\n\n## Available Skills\n\n${skillList}`;
|
|
325
|
+
log.info("Successfully returned skill index");
|
|
326
|
+
return {
|
|
327
|
+
content: [
|
|
328
|
+
{
|
|
329
|
+
type: "text",
|
|
330
|
+
text: resultText,
|
|
331
|
+
},
|
|
332
|
+
],
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
// Read specific skill file
|
|
336
|
+
const skillPath = path.join(SKILLS_PATH, `${effectiveAlias}.md`);
|
|
337
|
+
log.info(`Reading skill file: ${skillPath}`);
|
|
338
|
+
const content = await fs.readFile(skillPath, "utf-8");
|
|
339
|
+
log.info(`Successfully read skill ${effectiveAlias} (${content.length} bytes)`);
|
|
340
|
+
return {
|
|
341
|
+
content: [
|
|
342
|
+
{
|
|
343
|
+
type: "text",
|
|
344
|
+
text: content,
|
|
345
|
+
},
|
|
346
|
+
],
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
catch (error) {
|
|
350
|
+
if (error.code === "ENOENT") {
|
|
351
|
+
log.error(`Skill not found: ${effectiveAlias}`);
|
|
352
|
+
// Suggest available skills
|
|
353
|
+
try {
|
|
354
|
+
const files = await fs.readdir(SKILLS_PATH);
|
|
355
|
+
const available = files
|
|
356
|
+
.filter((f) => f.endsWith(".md"))
|
|
357
|
+
.map((f) => f.replace(".md", ""))
|
|
358
|
+
.sort()
|
|
359
|
+
.join(", ");
|
|
360
|
+
return {
|
|
361
|
+
content: [
|
|
362
|
+
{
|
|
363
|
+
type: "text",
|
|
364
|
+
text: `Error: Skill "${effectiveAlias}" not found.\n\nAvailable skills: ${available}\n\nUse skill("index") to see all skills with descriptions.`,
|
|
365
|
+
},
|
|
366
|
+
],
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
catch {
|
|
370
|
+
return {
|
|
371
|
+
content: [
|
|
372
|
+
{
|
|
373
|
+
type: "text",
|
|
374
|
+
text: `Error: Skill "${effectiveAlias}" not found. Use skill("index") to list available skills.`,
|
|
375
|
+
},
|
|
376
|
+
],
|
|
377
|
+
};
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
log.error("Error reading skill:", error);
|
|
381
|
+
return {
|
|
382
|
+
content: [
|
|
383
|
+
{
|
|
384
|
+
type: "text",
|
|
385
|
+
text: `Error reading skill: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
386
|
+
},
|
|
387
|
+
],
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
});
|
|
391
|
+
log.info("Registered tool: skill");
|
|
1429
392
|
server.tool("version", `Prints the current version and hash, \`${BUILD_VERSION_STRING}\``, {}, async () => {
|
|
1430
393
|
log.info("Tool called: version");
|
|
1431
394
|
return {
|
|
@@ -1482,9 +445,7 @@ function createMcpServer(options = {}) {
|
|
|
1482
445
|
flatNotes = filterReleaseNotesSince(flatNotes, sinceVersion);
|
|
1483
446
|
}
|
|
1484
447
|
if (flatNotes.length === 0) {
|
|
1485
|
-
const filterDesc = sinceVersion
|
|
1486
|
-
? ` newer than ${sinceVersion}`
|
|
1487
|
-
: "";
|
|
448
|
+
const filterDesc = sinceVersion ? ` newer than ${sinceVersion}` : "";
|
|
1488
449
|
return {
|
|
1489
450
|
content: [
|
|
1490
451
|
{
|
|
@@ -1521,12 +482,8 @@ function createMcpServer(options = {}) {
|
|
|
1521
482
|
});
|
|
1522
483
|
log.info("Registered tool: list_release_notes");
|
|
1523
484
|
server.tool("read_release_note", "Read the full content of a specific release note. Call list_release_notes first to see available versions.", {
|
|
1524
|
-
package: z
|
|
1525
|
-
|
|
1526
|
-
.describe("Package name (e.g., 'jaypie', 'mcp')"),
|
|
1527
|
-
version: z
|
|
1528
|
-
.string()
|
|
1529
|
-
.describe("Version number (e.g., '1.2.3')"),
|
|
485
|
+
package: z.string().describe("Package name (e.g., 'jaypie', 'mcp')"),
|
|
486
|
+
version: z.string().describe("Version number (e.g., '1.2.3')"),
|
|
1530
487
|
}, async ({ package: packageName, version }) => {
|
|
1531
488
|
log.info(`Tool called: read_release_note (package: ${packageName}, version: ${version})`);
|
|
1532
489
|
try {
|