@jaypie/mcp 0.3.2 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/createMcpServer.d.ts +7 -1
- package/dist/index.js +26 -3135
- package/dist/index.js.map +1 -1
- package/dist/suite.d.ts +1 -0
- package/dist/suite.js +2442 -0
- package/dist/suite.js.map +1 -0
- package/package.json +8 -3
- package/release-notes/constructs/1.2.17.md +11 -0
- package/release-notes/fabric/0.1.2.md +11 -0
- package/release-notes/fabric/0.1.3.md +25 -0
- package/release-notes/fabric/0.1.4.md +42 -0
- package/release-notes/mcp/0.3.3.md +12 -0
- package/release-notes/mcp/0.3.4.md +36 -0
- package/release-notes/mcp/0.4.0.md +27 -0
- package/release-notes/testkit/1.2.15.md +23 -0
- package/skills/agents.md +25 -0
- package/skills/aws.md +107 -0
- package/skills/cdk.md +141 -0
- package/skills/cicd.md +152 -0
- package/skills/datadog.md +129 -0
- package/skills/debugging.md +148 -0
- package/skills/dns.md +134 -0
- package/skills/dynamodb.md +140 -0
- package/skills/errors.md +142 -0
- package/skills/fabric.md +191 -0
- package/skills/index.md +7 -0
- package/skills/jaypie.md +100 -0
- package/skills/legacy.md +97 -0
- package/skills/logs.md +160 -0
- package/skills/mocks.md +174 -0
- package/skills/models.md +195 -0
- package/skills/releasenotes.md +94 -0
- package/skills/secrets.md +155 -0
- package/skills/services.md +175 -0
- package/skills/style.md +190 -0
- package/skills/tests.md +209 -0
- package/skills/tools.md +127 -0
- package/skills/topics.md +116 -0
- package/skills/variables.md +146 -0
- package/skills/writing.md +153 -0
- package/prompts/Branch_Management.md +0 -34
- package/prompts/Development_Process.md +0 -89
- package/prompts/Jaypie_Agent_Rules.md +0 -110
- package/prompts/Jaypie_Auth0_Express_Mongoose.md +0 -736
- package/prompts/Jaypie_Browser_and_Frontend_Web_Packages.md +0 -18
- package/prompts/Jaypie_CDK_Constructs_and_Patterns.md +0 -430
- package/prompts/Jaypie_CICD_with_GitHub_Actions.md +0 -371
- package/prompts/Jaypie_Commander_CLI_Package.md +0 -166
- package/prompts/Jaypie_Core_Errors_and_Logging.md +0 -39
- package/prompts/Jaypie_DynamoDB_Package.md +0 -774
- package/prompts/Jaypie_Eslint_NPM_Package.md +0 -78
- package/prompts/Jaypie_Express_Package.md +0 -630
- package/prompts/Jaypie_Fabric_Commander.md +0 -411
- package/prompts/Jaypie_Fabric_LLM.md +0 -312
- package/prompts/Jaypie_Fabric_Lambda.md +0 -308
- package/prompts/Jaypie_Fabric_MCP.md +0 -316
- package/prompts/Jaypie_Fabric_Package.md +0 -513
- package/prompts/Jaypie_Fabricator.md +0 -617
- package/prompts/Jaypie_Ideal_Project_Structure.md +0 -78
- package/prompts/Jaypie_Init_CICD_with_GitHub_Actions.md +0 -1186
- package/prompts/Jaypie_Init_Express_on_Lambda.md +0 -115
- package/prompts/Jaypie_Init_Jaypie_CDK_Package.md +0 -35
- package/prompts/Jaypie_Init_Lambda_Package.md +0 -505
- package/prompts/Jaypie_Init_Monorepo_Project.md +0 -44
- package/prompts/Jaypie_Init_Project_Subpackage.md +0 -65
- package/prompts/Jaypie_Legacy_Patterns.md +0 -15
- package/prompts/Jaypie_Llm_Calls.md +0 -449
- package/prompts/Jaypie_Llm_Tools.md +0 -155
- package/prompts/Jaypie_MCP_Package.md +0 -281
- package/prompts/Jaypie_Mocks_and_Testkit.md +0 -137
- package/prompts/Jaypie_Repokit.md +0 -103
- package/prompts/Jaypie_Scrub.md +0 -177
- package/prompts/Jaypie_Streaming.md +0 -467
- package/prompts/Templates_CDK_Subpackage.md +0 -115
- package/prompts/Templates_Express_Subpackage.md +0 -187
- package/prompts/Templates_Project_Monorepo.md +0 -326
- package/prompts/Templates_Project_Subpackage.md +0 -93
- package/prompts/Write_Efficient_Prompt_Guides.md +0 -48
- package/prompts/Write_and_Maintain_Engaging_Readme.md +0 -67
package/dist/suite.js
ADDED
|
@@ -0,0 +1,2442 @@
|
|
|
1
|
+
import { fabricService, createServiceSuite } from '@jaypie/fabric';
|
|
2
|
+
import * as fs from 'node:fs/promises';
|
|
3
|
+
import * as path from 'node:path';
|
|
4
|
+
import { fileURLToPath } from 'node:url';
|
|
5
|
+
import matter from 'gray-matter';
|
|
6
|
+
import { gt } from 'semver';
|
|
7
|
+
import * as https from 'node:https';
|
|
8
|
+
import { Llm } from '@jaypie/llm';
|
|
9
|
+
import { spawn } from 'node:child_process';
|
|
10
|
+
import * as os from 'node:os';
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Datadog API integration module
|
|
14
|
+
*/
|
|
15
|
+
const nullLogger$1 = {
|
|
16
|
+
info: () => { },
|
|
17
|
+
error: () => { },
|
|
18
|
+
};
|
|
19
|
+
/**
|
|
20
|
+
* Get Datadog credentials from environment variables
|
|
21
|
+
*/
|
|
22
|
+
function getDatadogCredentials() {
|
|
23
|
+
const apiKey = process.env.DATADOG_API_KEY || process.env.DD_API_KEY;
|
|
24
|
+
const appKey = process.env.DATADOG_APP_KEY ||
|
|
25
|
+
process.env.DATADOG_APPLICATION_KEY ||
|
|
26
|
+
process.env.DD_APP_KEY ||
|
|
27
|
+
process.env.DD_APPLICATION_KEY;
|
|
28
|
+
if (!apiKey || !appKey) {
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
return { apiKey, appKey };
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Build query string from environment variables and options
|
|
35
|
+
*/
|
|
36
|
+
function buildDatadogQuery(options) {
|
|
37
|
+
const ddEnv = process.env.DD_ENV;
|
|
38
|
+
const ddService = process.env.DD_SERVICE;
|
|
39
|
+
const ddSource = process.env.DD_SOURCE;
|
|
40
|
+
const ddQuery = process.env.DD_QUERY;
|
|
41
|
+
const queryParts = [];
|
|
42
|
+
// Add source (parameter > env var > default 'lambda')
|
|
43
|
+
const effectiveSource = options.source || ddSource || "lambda";
|
|
44
|
+
queryParts.push(`source:${effectiveSource}`);
|
|
45
|
+
// Add env (parameter > env var)
|
|
46
|
+
const effectiveEnv = options.env || ddEnv;
|
|
47
|
+
if (effectiveEnv) {
|
|
48
|
+
queryParts.push(`env:${effectiveEnv}`);
|
|
49
|
+
}
|
|
50
|
+
// Add service (parameter > env var)
|
|
51
|
+
const effectiveService = options.service || ddService;
|
|
52
|
+
if (effectiveService) {
|
|
53
|
+
queryParts.push(`service:${effectiveService}`);
|
|
54
|
+
}
|
|
55
|
+
// Add base query from DD_QUERY if available
|
|
56
|
+
if (ddQuery) {
|
|
57
|
+
queryParts.push(ddQuery);
|
|
58
|
+
}
|
|
59
|
+
// Add user-provided query terms
|
|
60
|
+
if (options.query) {
|
|
61
|
+
queryParts.push(options.query);
|
|
62
|
+
}
|
|
63
|
+
return queryParts.join(" ");
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Search Datadog logs
|
|
67
|
+
*/
|
|
68
|
+
async function searchDatadogLogs(credentials, options = {}, logger = nullLogger$1) {
|
|
69
|
+
const effectiveQuery = buildDatadogQuery(options);
|
|
70
|
+
const effectiveFrom = options.from || "now-15m";
|
|
71
|
+
const effectiveTo = options.to || "now";
|
|
72
|
+
const effectiveLimit = Math.min(options.limit || 50, 1000);
|
|
73
|
+
const effectiveSort = options.sort || "-timestamp";
|
|
74
|
+
logger.info(`Effective query: ${effectiveQuery}`);
|
|
75
|
+
logger.info(`Search params: from=${effectiveFrom}, to=${effectiveTo}, limit=${effectiveLimit}, sort=${effectiveSort}`);
|
|
76
|
+
const requestBody = JSON.stringify({
|
|
77
|
+
filter: {
|
|
78
|
+
query: effectiveQuery,
|
|
79
|
+
from: effectiveFrom,
|
|
80
|
+
to: effectiveTo,
|
|
81
|
+
},
|
|
82
|
+
page: {
|
|
83
|
+
limit: effectiveLimit,
|
|
84
|
+
},
|
|
85
|
+
sort: effectiveSort,
|
|
86
|
+
});
|
|
87
|
+
return new Promise((resolve) => {
|
|
88
|
+
const requestOptions = {
|
|
89
|
+
hostname: "api.datadoghq.com",
|
|
90
|
+
port: 443,
|
|
91
|
+
path: "/api/v2/logs/events/search",
|
|
92
|
+
method: "POST",
|
|
93
|
+
headers: {
|
|
94
|
+
"Content-Type": "application/json",
|
|
95
|
+
"DD-API-KEY": credentials.apiKey,
|
|
96
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
97
|
+
"Content-Length": Buffer.byteLength(requestBody),
|
|
98
|
+
},
|
|
99
|
+
};
|
|
100
|
+
const req = https.request(requestOptions, (res) => {
|
|
101
|
+
let data = "";
|
|
102
|
+
res.on("data", (chunk) => {
|
|
103
|
+
data += chunk.toString();
|
|
104
|
+
});
|
|
105
|
+
res.on("end", () => {
|
|
106
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
107
|
+
if (res.statusCode !== 200) {
|
|
108
|
+
logger.error(`Datadog API error: ${res.statusCode}`);
|
|
109
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
110
|
+
if (res.statusCode === 400) {
|
|
111
|
+
errorMessage = `Invalid query syntax. Check your query: "${effectiveQuery}". Datadog error: ${data}`;
|
|
112
|
+
}
|
|
113
|
+
else if (res.statusCode === 403) {
|
|
114
|
+
errorMessage =
|
|
115
|
+
"Access denied. Verify your API and Application keys have logs_read permission.";
|
|
116
|
+
}
|
|
117
|
+
else if (res.statusCode === 429) {
|
|
118
|
+
errorMessage =
|
|
119
|
+
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
120
|
+
}
|
|
121
|
+
resolve({
|
|
122
|
+
success: false,
|
|
123
|
+
query: effectiveQuery,
|
|
124
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
125
|
+
logs: [],
|
|
126
|
+
error: errorMessage,
|
|
127
|
+
});
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
try {
|
|
131
|
+
const response = JSON.parse(data);
|
|
132
|
+
const logs = (response.data || []).map((log) => {
|
|
133
|
+
const attrs = log.attributes || {};
|
|
134
|
+
return {
|
|
135
|
+
id: log.id,
|
|
136
|
+
timestamp: attrs.timestamp,
|
|
137
|
+
status: attrs.status,
|
|
138
|
+
service: attrs.service,
|
|
139
|
+
message: attrs.message,
|
|
140
|
+
attributes: attrs.attributes,
|
|
141
|
+
};
|
|
142
|
+
});
|
|
143
|
+
logger.info(`Retrieved ${logs.length} log entries`);
|
|
144
|
+
resolve({
|
|
145
|
+
success: true,
|
|
146
|
+
query: effectiveQuery,
|
|
147
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
148
|
+
logs,
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
catch (parseError) {
|
|
152
|
+
logger.error("Failed to parse Datadog response:", parseError);
|
|
153
|
+
resolve({
|
|
154
|
+
success: false,
|
|
155
|
+
query: effectiveQuery,
|
|
156
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
157
|
+
logs: [],
|
|
158
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
});
|
|
163
|
+
req.on("error", (error) => {
|
|
164
|
+
logger.error("Request error:", error);
|
|
165
|
+
resolve({
|
|
166
|
+
success: false,
|
|
167
|
+
query: effectiveQuery,
|
|
168
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
169
|
+
logs: [],
|
|
170
|
+
error: `Connection error: ${error.message}`,
|
|
171
|
+
});
|
|
172
|
+
});
|
|
173
|
+
req.write(requestBody);
|
|
174
|
+
req.end();
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Aggregate Datadog logs using the Analytics API
|
|
179
|
+
* Groups logs by specified fields and computes aggregations
|
|
180
|
+
*/
|
|
181
|
+
async function aggregateDatadogLogs(credentials, options, logger = nullLogger$1) {
|
|
182
|
+
const effectiveQuery = buildDatadogQuery(options);
|
|
183
|
+
const effectiveFrom = options.from || "now-15m";
|
|
184
|
+
const effectiveTo = options.to || "now";
|
|
185
|
+
const groupBy = options.groupBy;
|
|
186
|
+
const compute = options.compute || [{ aggregation: "count" }];
|
|
187
|
+
logger.info(`Analytics query: ${effectiveQuery}`);
|
|
188
|
+
logger.info(`Group by: ${groupBy.join(", ")}`);
|
|
189
|
+
logger.info(`Time range: ${effectiveFrom} to ${effectiveTo}`);
|
|
190
|
+
// Build compute array - each item needs aggregation and type
|
|
191
|
+
const computeItems = compute.map((c) => {
|
|
192
|
+
const item = {
|
|
193
|
+
aggregation: c.aggregation,
|
|
194
|
+
type: "total",
|
|
195
|
+
};
|
|
196
|
+
if (c.metric) {
|
|
197
|
+
item.metric = c.metric;
|
|
198
|
+
}
|
|
199
|
+
return item;
|
|
200
|
+
});
|
|
201
|
+
// Build group_by with proper sort configuration
|
|
202
|
+
const groupByItems = groupBy.map((field) => {
|
|
203
|
+
const item = {
|
|
204
|
+
facet: field,
|
|
205
|
+
limit: 100,
|
|
206
|
+
sort: {
|
|
207
|
+
type: "measure",
|
|
208
|
+
order: "desc",
|
|
209
|
+
aggregation: compute[0]?.aggregation || "count",
|
|
210
|
+
},
|
|
211
|
+
};
|
|
212
|
+
return item;
|
|
213
|
+
});
|
|
214
|
+
const requestBody = JSON.stringify({
|
|
215
|
+
filter: {
|
|
216
|
+
query: effectiveQuery,
|
|
217
|
+
from: effectiveFrom,
|
|
218
|
+
to: effectiveTo,
|
|
219
|
+
},
|
|
220
|
+
group_by: groupByItems,
|
|
221
|
+
compute: computeItems,
|
|
222
|
+
page: {
|
|
223
|
+
limit: 100,
|
|
224
|
+
},
|
|
225
|
+
});
|
|
226
|
+
return new Promise((resolve) => {
|
|
227
|
+
const requestOptions = {
|
|
228
|
+
hostname: "api.datadoghq.com",
|
|
229
|
+
port: 443,
|
|
230
|
+
path: "/api/v2/logs/analytics/aggregate",
|
|
231
|
+
method: "POST",
|
|
232
|
+
headers: {
|
|
233
|
+
"Content-Type": "application/json",
|
|
234
|
+
"DD-API-KEY": credentials.apiKey,
|
|
235
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
236
|
+
"Content-Length": Buffer.byteLength(requestBody),
|
|
237
|
+
},
|
|
238
|
+
};
|
|
239
|
+
const req = https.request(requestOptions, (res) => {
|
|
240
|
+
let data = "";
|
|
241
|
+
res.on("data", (chunk) => {
|
|
242
|
+
data += chunk.toString();
|
|
243
|
+
});
|
|
244
|
+
res.on("end", () => {
|
|
245
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
246
|
+
if (res.statusCode !== 200) {
|
|
247
|
+
logger.error(`Datadog Analytics API error: ${res.statusCode}`);
|
|
248
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
249
|
+
if (res.statusCode === 400) {
|
|
250
|
+
errorMessage = `Invalid query or groupBy fields. Verify facet names exist: ${groupBy.join(", ")}. Datadog error: ${data}`;
|
|
251
|
+
}
|
|
252
|
+
else if (res.statusCode === 403) {
|
|
253
|
+
errorMessage =
|
|
254
|
+
"Access denied. Verify your API and Application keys have logs_read permission.";
|
|
255
|
+
}
|
|
256
|
+
else if (res.statusCode === 429) {
|
|
257
|
+
errorMessage =
|
|
258
|
+
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
259
|
+
}
|
|
260
|
+
resolve({
|
|
261
|
+
success: false,
|
|
262
|
+
query: effectiveQuery,
|
|
263
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
264
|
+
groupBy,
|
|
265
|
+
buckets: [],
|
|
266
|
+
error: errorMessage,
|
|
267
|
+
});
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
try {
|
|
271
|
+
const response = JSON.parse(data);
|
|
272
|
+
const buckets = (response.data?.buckets || []).map((bucket) => ({
|
|
273
|
+
by: bucket.by || {},
|
|
274
|
+
computes: bucket.computes || {},
|
|
275
|
+
}));
|
|
276
|
+
logger.info(`Retrieved ${buckets.length} aggregation buckets`);
|
|
277
|
+
resolve({
|
|
278
|
+
success: true,
|
|
279
|
+
query: effectiveQuery,
|
|
280
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
281
|
+
groupBy,
|
|
282
|
+
buckets,
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
catch (parseError) {
|
|
286
|
+
logger.error("Failed to parse Datadog analytics response:", parseError);
|
|
287
|
+
resolve({
|
|
288
|
+
success: false,
|
|
289
|
+
query: effectiveQuery,
|
|
290
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
291
|
+
groupBy,
|
|
292
|
+
buckets: [],
|
|
293
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
});
|
|
298
|
+
req.on("error", (error) => {
|
|
299
|
+
logger.error("Request error:", error);
|
|
300
|
+
resolve({
|
|
301
|
+
success: false,
|
|
302
|
+
query: effectiveQuery,
|
|
303
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
304
|
+
groupBy,
|
|
305
|
+
buckets: [],
|
|
306
|
+
error: `Connection error: ${error.message}`,
|
|
307
|
+
});
|
|
308
|
+
});
|
|
309
|
+
req.write(requestBody);
|
|
310
|
+
req.end();
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* List Datadog monitors with optional filtering
|
|
315
|
+
*/
|
|
316
|
+
async function listDatadogMonitors(credentials, options = {}, logger = nullLogger$1) {
|
|
317
|
+
logger.info("Fetching Datadog monitors");
|
|
318
|
+
const queryParams = new URLSearchParams();
|
|
319
|
+
if (options.tags && options.tags.length > 0) {
|
|
320
|
+
queryParams.set("tags", options.tags.join(","));
|
|
321
|
+
}
|
|
322
|
+
if (options.monitorTags && options.monitorTags.length > 0) {
|
|
323
|
+
queryParams.set("monitor_tags", options.monitorTags.join(","));
|
|
324
|
+
}
|
|
325
|
+
if (options.name) {
|
|
326
|
+
queryParams.set("name", options.name);
|
|
327
|
+
}
|
|
328
|
+
const queryString = queryParams.toString();
|
|
329
|
+
const path = `/api/v1/monitor${queryString ? `?${queryString}` : ""}`;
|
|
330
|
+
logger.info(`Request path: ${path}`);
|
|
331
|
+
return new Promise((resolve) => {
|
|
332
|
+
const requestOptions = {
|
|
333
|
+
hostname: "api.datadoghq.com",
|
|
334
|
+
port: 443,
|
|
335
|
+
path,
|
|
336
|
+
method: "GET",
|
|
337
|
+
headers: {
|
|
338
|
+
"DD-API-KEY": credentials.apiKey,
|
|
339
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
340
|
+
},
|
|
341
|
+
};
|
|
342
|
+
const req = https.request(requestOptions, (res) => {
|
|
343
|
+
let data = "";
|
|
344
|
+
res.on("data", (chunk) => {
|
|
345
|
+
data += chunk.toString();
|
|
346
|
+
});
|
|
347
|
+
res.on("end", () => {
|
|
348
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
349
|
+
if (res.statusCode !== 200) {
|
|
350
|
+
logger.error(`Datadog Monitors API error: ${res.statusCode}`);
|
|
351
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
352
|
+
if (res.statusCode === 403) {
|
|
353
|
+
errorMessage =
|
|
354
|
+
"Access denied. Verify your API and Application keys have monitors_read permission.";
|
|
355
|
+
}
|
|
356
|
+
else if (res.statusCode === 429) {
|
|
357
|
+
errorMessage =
|
|
358
|
+
"Rate limited by Datadog. Wait a moment and try again.";
|
|
359
|
+
}
|
|
360
|
+
resolve({
|
|
361
|
+
success: false,
|
|
362
|
+
monitors: [],
|
|
363
|
+
error: errorMessage,
|
|
364
|
+
});
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
try {
|
|
368
|
+
const response = JSON.parse(data);
|
|
369
|
+
let monitors = response.map((monitor) => ({
|
|
370
|
+
id: monitor.id,
|
|
371
|
+
name: monitor.name,
|
|
372
|
+
type: monitor.type,
|
|
373
|
+
status: monitor.overall_state || "Unknown",
|
|
374
|
+
message: monitor.message,
|
|
375
|
+
tags: monitor.tags || [],
|
|
376
|
+
priority: monitor.priority,
|
|
377
|
+
query: monitor.query,
|
|
378
|
+
overallState: monitor.overall_state,
|
|
379
|
+
}));
|
|
380
|
+
// Filter by status if specified
|
|
381
|
+
if (options.status && options.status.length > 0) {
|
|
382
|
+
monitors = monitors.filter((m) => options.status.includes(m.status));
|
|
383
|
+
}
|
|
384
|
+
logger.info(`Retrieved ${monitors.length} monitors`);
|
|
385
|
+
resolve({
|
|
386
|
+
success: true,
|
|
387
|
+
monitors,
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
catch (parseError) {
|
|
391
|
+
logger.error("Failed to parse Datadog monitors response:", parseError);
|
|
392
|
+
resolve({
|
|
393
|
+
success: false,
|
|
394
|
+
monitors: [],
|
|
395
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
});
|
|
399
|
+
});
|
|
400
|
+
req.on("error", (error) => {
|
|
401
|
+
logger.error("Request error:", error);
|
|
402
|
+
resolve({
|
|
403
|
+
success: false,
|
|
404
|
+
monitors: [],
|
|
405
|
+
error: `Connection error: ${error.message}`,
|
|
406
|
+
});
|
|
407
|
+
});
|
|
408
|
+
req.end();
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
/**
|
|
412
|
+
* List Datadog Synthetic tests
|
|
413
|
+
*/
|
|
414
|
+
async function listDatadogSynthetics(credentials, options = {}, logger = nullLogger$1) {
|
|
415
|
+
logger.info("Fetching Datadog Synthetic tests");
|
|
416
|
+
return new Promise((resolve) => {
|
|
417
|
+
const requestOptions = {
|
|
418
|
+
hostname: "api.datadoghq.com",
|
|
419
|
+
port: 443,
|
|
420
|
+
path: "/api/v1/synthetics/tests",
|
|
421
|
+
method: "GET",
|
|
422
|
+
headers: {
|
|
423
|
+
"DD-API-KEY": credentials.apiKey,
|
|
424
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
425
|
+
},
|
|
426
|
+
};
|
|
427
|
+
const req = https.request(requestOptions, (res) => {
|
|
428
|
+
let data = "";
|
|
429
|
+
res.on("data", (chunk) => {
|
|
430
|
+
data += chunk.toString();
|
|
431
|
+
});
|
|
432
|
+
res.on("end", () => {
|
|
433
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
434
|
+
if (res.statusCode !== 200) {
|
|
435
|
+
logger.error(`Datadog Synthetics API error: ${res.statusCode}`);
|
|
436
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
437
|
+
if (res.statusCode === 403) {
|
|
438
|
+
errorMessage =
|
|
439
|
+
"Access denied. Verify your API and Application keys have synthetics_read permission.";
|
|
440
|
+
}
|
|
441
|
+
else if (res.statusCode === 429) {
|
|
442
|
+
errorMessage =
|
|
443
|
+
"Rate limited by Datadog. Wait a moment and try again.";
|
|
444
|
+
}
|
|
445
|
+
resolve({
|
|
446
|
+
success: false,
|
|
447
|
+
tests: [],
|
|
448
|
+
error: errorMessage,
|
|
449
|
+
});
|
|
450
|
+
return;
|
|
451
|
+
}
|
|
452
|
+
try {
|
|
453
|
+
const response = JSON.parse(data);
|
|
454
|
+
let tests = (response.tests || []).map((test) => ({
|
|
455
|
+
publicId: test.public_id,
|
|
456
|
+
name: test.name,
|
|
457
|
+
type: test.type,
|
|
458
|
+
status: test.status,
|
|
459
|
+
tags: test.tags || [],
|
|
460
|
+
locations: test.locations || [],
|
|
461
|
+
message: test.message,
|
|
462
|
+
}));
|
|
463
|
+
// Filter by type if specified
|
|
464
|
+
if (options.type) {
|
|
465
|
+
tests = tests.filter((t) => t.type === options.type);
|
|
466
|
+
}
|
|
467
|
+
// Filter by tags if specified
|
|
468
|
+
if (options.tags && options.tags.length > 0) {
|
|
469
|
+
tests = tests.filter((t) => options.tags.some((tag) => t.tags.includes(tag)));
|
|
470
|
+
}
|
|
471
|
+
logger.info(`Retrieved ${tests.length} synthetic tests`);
|
|
472
|
+
resolve({
|
|
473
|
+
success: true,
|
|
474
|
+
tests,
|
|
475
|
+
});
|
|
476
|
+
}
|
|
477
|
+
catch (parseError) {
|
|
478
|
+
logger.error("Failed to parse Datadog synthetics response:", parseError);
|
|
479
|
+
resolve({
|
|
480
|
+
success: false,
|
|
481
|
+
tests: [],
|
|
482
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
483
|
+
});
|
|
484
|
+
}
|
|
485
|
+
});
|
|
486
|
+
});
|
|
487
|
+
req.on("error", (error) => {
|
|
488
|
+
logger.error("Request error:", error);
|
|
489
|
+
resolve({
|
|
490
|
+
success: false,
|
|
491
|
+
tests: [],
|
|
492
|
+
error: `Connection error: ${error.message}`,
|
|
493
|
+
});
|
|
494
|
+
});
|
|
495
|
+
req.end();
|
|
496
|
+
});
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Get recent results for a specific Synthetic test
|
|
500
|
+
*/
|
|
501
|
+
async function getDatadogSyntheticResults(credentials, publicId, logger = nullLogger$1) {
|
|
502
|
+
logger.info(`Fetching results for Synthetic test: ${publicId}`);
|
|
503
|
+
return new Promise((resolve) => {
|
|
504
|
+
const requestOptions = {
|
|
505
|
+
hostname: "api.datadoghq.com",
|
|
506
|
+
port: 443,
|
|
507
|
+
path: `/api/v1/synthetics/tests/${publicId}/results`,
|
|
508
|
+
method: "GET",
|
|
509
|
+
headers: {
|
|
510
|
+
"DD-API-KEY": credentials.apiKey,
|
|
511
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
512
|
+
},
|
|
513
|
+
};
|
|
514
|
+
const req = https.request(requestOptions, (res) => {
|
|
515
|
+
let data = "";
|
|
516
|
+
res.on("data", (chunk) => {
|
|
517
|
+
data += chunk.toString();
|
|
518
|
+
});
|
|
519
|
+
res.on("end", () => {
|
|
520
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
521
|
+
if (res.statusCode !== 200) {
|
|
522
|
+
logger.error(`Datadog Synthetics Results API error: ${res.statusCode}`);
|
|
523
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
524
|
+
if (res.statusCode === 403) {
|
|
525
|
+
errorMessage =
|
|
526
|
+
"Access denied. Verify your API and Application keys have synthetics_read permission.";
|
|
527
|
+
}
|
|
528
|
+
else if (res.statusCode === 404) {
|
|
529
|
+
errorMessage = `Synthetic test '${publicId}' not found. Use datadog_synthetics (without testId) to list available tests.`;
|
|
530
|
+
}
|
|
531
|
+
else if (res.statusCode === 429) {
|
|
532
|
+
errorMessage =
|
|
533
|
+
"Rate limited by Datadog. Wait a moment and try again.";
|
|
534
|
+
}
|
|
535
|
+
resolve({
|
|
536
|
+
success: false,
|
|
537
|
+
publicId,
|
|
538
|
+
results: [],
|
|
539
|
+
error: errorMessage,
|
|
540
|
+
});
|
|
541
|
+
return;
|
|
542
|
+
}
|
|
543
|
+
try {
|
|
544
|
+
const response = JSON.parse(data);
|
|
545
|
+
const results = (response.results || []).map((result) => ({
|
|
546
|
+
publicId,
|
|
547
|
+
resultId: result.result_id,
|
|
548
|
+
status: result.status,
|
|
549
|
+
checkTime: result.check_time,
|
|
550
|
+
passed: result.result?.passed ?? result.status === 0,
|
|
551
|
+
location: result.dc_id?.toString(),
|
|
552
|
+
}));
|
|
553
|
+
logger.info(`Retrieved ${results.length} synthetic results`);
|
|
554
|
+
resolve({
|
|
555
|
+
success: true,
|
|
556
|
+
publicId,
|
|
557
|
+
results,
|
|
558
|
+
});
|
|
559
|
+
}
|
|
560
|
+
catch (parseError) {
|
|
561
|
+
logger.error("Failed to parse Datadog synthetic results:", parseError);
|
|
562
|
+
resolve({
|
|
563
|
+
success: false,
|
|
564
|
+
publicId,
|
|
565
|
+
results: [],
|
|
566
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
567
|
+
});
|
|
568
|
+
}
|
|
569
|
+
});
|
|
570
|
+
});
|
|
571
|
+
req.on("error", (error) => {
|
|
572
|
+
logger.error("Request error:", error);
|
|
573
|
+
resolve({
|
|
574
|
+
success: false,
|
|
575
|
+
publicId,
|
|
576
|
+
results: [],
|
|
577
|
+
error: `Connection error: ${error.message}`,
|
|
578
|
+
});
|
|
579
|
+
});
|
|
580
|
+
req.end();
|
|
581
|
+
});
|
|
582
|
+
}
|
|
583
|
+
/**
|
|
584
|
+
* Query Datadog metrics
|
|
585
|
+
*/
|
|
586
|
+
async function queryDatadogMetrics(credentials, options, logger = nullLogger$1) {
|
|
587
|
+
logger.info(`Querying metrics: ${options.query}`);
|
|
588
|
+
logger.info(`Time range: ${options.from} to ${options.to}`);
|
|
589
|
+
const queryParams = new URLSearchParams({
|
|
590
|
+
query: options.query,
|
|
591
|
+
from: options.from.toString(),
|
|
592
|
+
to: options.to.toString(),
|
|
593
|
+
});
|
|
594
|
+
return new Promise((resolve) => {
|
|
595
|
+
const requestOptions = {
|
|
596
|
+
hostname: "api.datadoghq.com",
|
|
597
|
+
port: 443,
|
|
598
|
+
path: `/api/v1/query?${queryParams.toString()}`,
|
|
599
|
+
method: "GET",
|
|
600
|
+
headers: {
|
|
601
|
+
"DD-API-KEY": credentials.apiKey,
|
|
602
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
603
|
+
},
|
|
604
|
+
};
|
|
605
|
+
const req = https.request(requestOptions, (res) => {
|
|
606
|
+
let data = "";
|
|
607
|
+
res.on("data", (chunk) => {
|
|
608
|
+
data += chunk.toString();
|
|
609
|
+
});
|
|
610
|
+
res.on("end", () => {
|
|
611
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
612
|
+
if (res.statusCode !== 200) {
|
|
613
|
+
logger.error(`Datadog Metrics API error: ${res.statusCode}`);
|
|
614
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
615
|
+
if (res.statusCode === 400) {
|
|
616
|
+
errorMessage = `Invalid metric query. Check format: 'aggregation:metric.name{tags}'. Query: "${options.query}". Datadog error: ${data}`;
|
|
617
|
+
}
|
|
618
|
+
else if (res.statusCode === 403) {
|
|
619
|
+
errorMessage =
|
|
620
|
+
"Access denied. Verify your API and Application keys have metrics_read permission.";
|
|
621
|
+
}
|
|
622
|
+
else if (res.statusCode === 429) {
|
|
623
|
+
errorMessage =
|
|
624
|
+
"Rate limited by Datadog. Wait a moment and try again, or reduce your time range.";
|
|
625
|
+
}
|
|
626
|
+
resolve({
|
|
627
|
+
success: false,
|
|
628
|
+
query: options.query,
|
|
629
|
+
timeRange: { from: options.from, to: options.to },
|
|
630
|
+
series: [],
|
|
631
|
+
error: errorMessage,
|
|
632
|
+
});
|
|
633
|
+
return;
|
|
634
|
+
}
|
|
635
|
+
try {
|
|
636
|
+
const response = JSON.parse(data);
|
|
637
|
+
const series = (response.series || []).map((s) => ({
|
|
638
|
+
metric: s.metric,
|
|
639
|
+
scope: s.scope,
|
|
640
|
+
pointlist: s.pointlist,
|
|
641
|
+
unit: s.unit?.[0]?.name,
|
|
642
|
+
}));
|
|
643
|
+
logger.info(`Retrieved ${series.length} metric series`);
|
|
644
|
+
resolve({
|
|
645
|
+
success: true,
|
|
646
|
+
query: options.query,
|
|
647
|
+
timeRange: { from: options.from, to: options.to },
|
|
648
|
+
series,
|
|
649
|
+
});
|
|
650
|
+
}
|
|
651
|
+
catch (parseError) {
|
|
652
|
+
logger.error("Failed to parse Datadog metrics response:", parseError);
|
|
653
|
+
resolve({
|
|
654
|
+
success: false,
|
|
655
|
+
query: options.query,
|
|
656
|
+
timeRange: { from: options.from, to: options.to },
|
|
657
|
+
series: [],
|
|
658
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
});
|
|
662
|
+
});
|
|
663
|
+
req.on("error", (error) => {
|
|
664
|
+
logger.error("Request error:", error);
|
|
665
|
+
resolve({
|
|
666
|
+
success: false,
|
|
667
|
+
query: options.query,
|
|
668
|
+
timeRange: { from: options.from, to: options.to },
|
|
669
|
+
series: [],
|
|
670
|
+
error: `Connection error: ${error.message}`,
|
|
671
|
+
});
|
|
672
|
+
});
|
|
673
|
+
req.end();
|
|
674
|
+
});
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Search Datadog RUM events
|
|
678
|
+
*/
|
|
679
|
+
async function searchDatadogRum(credentials, options = {}, logger = nullLogger$1) {
|
|
680
|
+
const effectiveQuery = options.query || "*";
|
|
681
|
+
const effectiveFrom = options.from || "now-15m";
|
|
682
|
+
const effectiveTo = options.to || "now";
|
|
683
|
+
const effectiveLimit = Math.min(options.limit || 50, 1000);
|
|
684
|
+
const effectiveSort = options.sort || "-timestamp";
|
|
685
|
+
logger.info(`RUM query: ${effectiveQuery}`);
|
|
686
|
+
logger.info(`Time range: ${effectiveFrom} to ${effectiveTo}`);
|
|
687
|
+
const requestBody = JSON.stringify({
|
|
688
|
+
filter: {
|
|
689
|
+
query: effectiveQuery,
|
|
690
|
+
from: effectiveFrom,
|
|
691
|
+
to: effectiveTo,
|
|
692
|
+
},
|
|
693
|
+
page: {
|
|
694
|
+
limit: effectiveLimit,
|
|
695
|
+
},
|
|
696
|
+
sort: effectiveSort,
|
|
697
|
+
});
|
|
698
|
+
return new Promise((resolve) => {
|
|
699
|
+
const requestOptions = {
|
|
700
|
+
hostname: "api.datadoghq.com",
|
|
701
|
+
port: 443,
|
|
702
|
+
path: "/api/v2/rum/events/search",
|
|
703
|
+
method: "POST",
|
|
704
|
+
headers: {
|
|
705
|
+
"Content-Type": "application/json",
|
|
706
|
+
"DD-API-KEY": credentials.apiKey,
|
|
707
|
+
"DD-APPLICATION-KEY": credentials.appKey,
|
|
708
|
+
"Content-Length": Buffer.byteLength(requestBody),
|
|
709
|
+
},
|
|
710
|
+
};
|
|
711
|
+
const req = https.request(requestOptions, (res) => {
|
|
712
|
+
let data = "";
|
|
713
|
+
res.on("data", (chunk) => {
|
|
714
|
+
data += chunk.toString();
|
|
715
|
+
});
|
|
716
|
+
res.on("end", () => {
|
|
717
|
+
logger.info(`Response status: ${res.statusCode}`);
|
|
718
|
+
if (res.statusCode !== 200) {
|
|
719
|
+
logger.error(`Datadog RUM API error: ${res.statusCode}`);
|
|
720
|
+
let errorMessage = `Datadog API returned status ${res.statusCode}: ${data}`;
|
|
721
|
+
// Check for specific "No valid indexes" error which means no RUM app is configured
|
|
722
|
+
if (data.includes("No valid indexes")) {
|
|
723
|
+
errorMessage =
|
|
724
|
+
"No RUM application found. Ensure you have a RUM application configured in Datadog and it has collected data. " +
|
|
725
|
+
"You can create a RUM application at https://app.datadoghq.com/rum/list";
|
|
726
|
+
}
|
|
727
|
+
else if (res.statusCode === 400) {
|
|
728
|
+
errorMessage = `Invalid RUM query. Check syntax: "${effectiveQuery}". Datadog error: ${data}`;
|
|
729
|
+
}
|
|
730
|
+
else if (res.statusCode === 403) {
|
|
731
|
+
errorMessage =
|
|
732
|
+
"Access denied. Verify your API and Application keys have rum_read permission.";
|
|
733
|
+
}
|
|
734
|
+
else if (res.statusCode === 429) {
|
|
735
|
+
errorMessage =
|
|
736
|
+
"Rate limited by Datadog. Wait a moment and try again, or reduce your query scope.";
|
|
737
|
+
}
|
|
738
|
+
resolve({
|
|
739
|
+
success: false,
|
|
740
|
+
query: effectiveQuery,
|
|
741
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
742
|
+
events: [],
|
|
743
|
+
error: errorMessage,
|
|
744
|
+
});
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
try {
|
|
748
|
+
const response = JSON.parse(data);
|
|
749
|
+
const events = (response.data || []).map((event) => {
|
|
750
|
+
const attrs = event.attributes?.attributes || {};
|
|
751
|
+
return {
|
|
752
|
+
id: event.id,
|
|
753
|
+
type: event.type,
|
|
754
|
+
timestamp: event.attributes?.timestamp,
|
|
755
|
+
sessionId: attrs.session?.id,
|
|
756
|
+
viewUrl: attrs.view?.url,
|
|
757
|
+
viewName: attrs.view?.name,
|
|
758
|
+
errorMessage: attrs.error?.message,
|
|
759
|
+
errorType: attrs.error?.type,
|
|
760
|
+
attributes: attrs,
|
|
761
|
+
};
|
|
762
|
+
});
|
|
763
|
+
logger.info(`Retrieved ${events.length} RUM events`);
|
|
764
|
+
resolve({
|
|
765
|
+
success: true,
|
|
766
|
+
query: effectiveQuery,
|
|
767
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
768
|
+
events,
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
catch (parseError) {
|
|
772
|
+
logger.error("Failed to parse Datadog RUM response:", parseError);
|
|
773
|
+
resolve({
|
|
774
|
+
success: false,
|
|
775
|
+
query: effectiveQuery,
|
|
776
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
777
|
+
events: [],
|
|
778
|
+
error: `Failed to parse response: ${parseError instanceof Error ? parseError.message : "Unknown error"}`,
|
|
779
|
+
});
|
|
780
|
+
}
|
|
781
|
+
});
|
|
782
|
+
});
|
|
783
|
+
req.on("error", (error) => {
|
|
784
|
+
logger.error("Request error:", error);
|
|
785
|
+
resolve({
|
|
786
|
+
success: false,
|
|
787
|
+
query: effectiveQuery,
|
|
788
|
+
timeRange: { from: effectiveFrom, to: effectiveTo },
|
|
789
|
+
events: [],
|
|
790
|
+
error: `Connection error: ${error.message}`,
|
|
791
|
+
});
|
|
792
|
+
});
|
|
793
|
+
req.write(requestBody);
|
|
794
|
+
req.end();
|
|
795
|
+
});
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
/**
|
|
799
|
+
* LLM debugging utilities for inspecting raw provider responses
|
|
800
|
+
*/
|
|
801
|
+
// Default models for each provider
|
|
802
|
+
const DEFAULT_MODELS = {
|
|
803
|
+
anthropic: "claude-sonnet-4-20250514",
|
|
804
|
+
gemini: "gemini-2.0-flash",
|
|
805
|
+
openai: "gpt-4o-mini",
|
|
806
|
+
openrouter: "openai/gpt-4o-mini",
|
|
807
|
+
};
|
|
808
|
+
/**
|
|
809
|
+
* Make a debug LLM call and return the raw response data for inspection
|
|
810
|
+
*/
|
|
811
|
+
async function debugLlmCall(params, log) {
|
|
812
|
+
const { provider, message } = params;
|
|
813
|
+
const model = params.model || DEFAULT_MODELS[provider];
|
|
814
|
+
try {
|
|
815
|
+
const llm = new Llm(provider, { model });
|
|
816
|
+
const result = await llm.operate(message, {
|
|
817
|
+
user: "[jaypie-mcp] Debug LLM Call",
|
|
818
|
+
});
|
|
819
|
+
if (result.error) {
|
|
820
|
+
return {
|
|
821
|
+
success: false,
|
|
822
|
+
provider,
|
|
823
|
+
model,
|
|
824
|
+
error: `${result.error.title}: ${result.error.detail || "Unknown error"}`,
|
|
825
|
+
};
|
|
826
|
+
}
|
|
827
|
+
// Calculate total reasoning tokens
|
|
828
|
+
const reasoningTokens = result.usage.reduce((sum, u) => sum + (u.reasoning || 0), 0);
|
|
829
|
+
return {
|
|
830
|
+
success: true,
|
|
831
|
+
provider,
|
|
832
|
+
model,
|
|
833
|
+
content: typeof result.content === "string"
|
|
834
|
+
? result.content
|
|
835
|
+
: JSON.stringify(result.content),
|
|
836
|
+
reasoning: result.reasoning,
|
|
837
|
+
reasoningTokens,
|
|
838
|
+
history: result.history,
|
|
839
|
+
rawResponses: result.responses,
|
|
840
|
+
usage: result.usage,
|
|
841
|
+
};
|
|
842
|
+
}
|
|
843
|
+
catch (error) {
|
|
844
|
+
return {
|
|
845
|
+
success: false,
|
|
846
|
+
provider,
|
|
847
|
+
model,
|
|
848
|
+
error: error instanceof Error ? error.message : String(error),
|
|
849
|
+
};
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
/**
|
|
853
|
+
* List available providers and their default/reasoning models
|
|
854
|
+
*/
|
|
855
|
+
function listLlmProviders() {
|
|
856
|
+
return {
|
|
857
|
+
providers: [
|
|
858
|
+
{
|
|
859
|
+
name: "openai",
|
|
860
|
+
defaultModel: DEFAULT_MODELS.openai,
|
|
861
|
+
reasoningModels: ["o3-mini", "o1-preview", "o1-mini"],
|
|
862
|
+
},
|
|
863
|
+
{
|
|
864
|
+
name: "anthropic",
|
|
865
|
+
defaultModel: DEFAULT_MODELS.anthropic,
|
|
866
|
+
reasoningModels: [], // Anthropic doesn't expose reasoning the same way
|
|
867
|
+
},
|
|
868
|
+
{
|
|
869
|
+
name: "gemini",
|
|
870
|
+
defaultModel: DEFAULT_MODELS.gemini,
|
|
871
|
+
reasoningModels: [], // Gemini has thoughtsTokenCount but unclear on content
|
|
872
|
+
},
|
|
873
|
+
{
|
|
874
|
+
name: "openrouter",
|
|
875
|
+
defaultModel: DEFAULT_MODELS.openrouter,
|
|
876
|
+
reasoningModels: ["openai/o3-mini", "openai/o1-preview"],
|
|
877
|
+
},
|
|
878
|
+
],
|
|
879
|
+
};
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
/**
|
|
883
|
+
* AWS CLI integration module
|
|
884
|
+
* Provides a structured interface for common AWS operations via the AWS CLI
|
|
885
|
+
*/
|
|
886
|
+
const nullLogger = {
|
|
887
|
+
info: () => { },
|
|
888
|
+
error: () => { },
|
|
889
|
+
};
|
|
890
|
+
/**
|
|
891
|
+
* Parse AWS CLI error messages into user-friendly descriptions
|
|
892
|
+
*/
|
|
893
|
+
function parseAwsError(stderr, service, command) {
|
|
894
|
+
if (stderr.includes("ExpiredToken") || stderr.includes("Token has expired")) {
|
|
895
|
+
return "AWS credentials have expired. Run 'aws sso login' or refresh your credentials.";
|
|
896
|
+
}
|
|
897
|
+
if (stderr.includes("NoCredentialProviders") ||
|
|
898
|
+
stderr.includes("Unable to locate credentials")) {
|
|
899
|
+
return "No AWS credentials found. Configure credentials with 'aws configure' or 'aws sso login'.";
|
|
900
|
+
}
|
|
901
|
+
if (stderr.includes("AccessDenied") || stderr.includes("Access Denied")) {
|
|
902
|
+
return `Access denied for ${service}:${command}. Check your IAM permissions.`;
|
|
903
|
+
}
|
|
904
|
+
if (stderr.includes("ResourceNotFoundException")) {
|
|
905
|
+
return `Resource not found. Check that the specified resource exists in the correct region.`;
|
|
906
|
+
}
|
|
907
|
+
if (stderr.includes("ValidationException")) {
|
|
908
|
+
const match = stderr.match(/ValidationException[^:]*:\s*(.+)/);
|
|
909
|
+
return match
|
|
910
|
+
? `Validation error: ${match[1].trim()}`
|
|
911
|
+
: "Validation error in request parameters.";
|
|
912
|
+
}
|
|
913
|
+
if (stderr.includes("ThrottlingException") ||
|
|
914
|
+
stderr.includes("Rate exceeded")) {
|
|
915
|
+
return "AWS API rate limit exceeded. Wait a moment and try again.";
|
|
916
|
+
}
|
|
917
|
+
if (stderr.includes("InvalidParameterValue")) {
|
|
918
|
+
const match = stderr.match(/InvalidParameterValue[^:]*:\s*(.+)/);
|
|
919
|
+
return match
|
|
920
|
+
? `Invalid parameter: ${match[1].trim()}`
|
|
921
|
+
: "Invalid parameter value provided.";
|
|
922
|
+
}
|
|
923
|
+
return stderr.trim();
|
|
924
|
+
}
|
|
925
|
+
/**
|
|
926
|
+
* Parse relative time strings like 'now-1h' to Unix timestamps
|
|
927
|
+
*/
|
|
928
|
+
function parseRelativeTime(timeStr) {
|
|
929
|
+
const now = Date.now();
|
|
930
|
+
if (timeStr === "now") {
|
|
931
|
+
return now;
|
|
932
|
+
}
|
|
933
|
+
// Handle relative time like 'now-15m', 'now-1h', 'now-1d'
|
|
934
|
+
const relativeMatch = timeStr.match(/^now-(\d+)([smhd])$/);
|
|
935
|
+
if (relativeMatch) {
|
|
936
|
+
const value = parseInt(relativeMatch[1], 10);
|
|
937
|
+
const unit = relativeMatch[2];
|
|
938
|
+
const multipliers = {
|
|
939
|
+
s: 1000,
|
|
940
|
+
m: 60 * 1000,
|
|
941
|
+
h: 60 * 60 * 1000,
|
|
942
|
+
d: 24 * 60 * 60 * 1000,
|
|
943
|
+
};
|
|
944
|
+
return now - value * multipliers[unit];
|
|
945
|
+
}
|
|
946
|
+
// Handle ISO 8601 format
|
|
947
|
+
const parsed = Date.parse(timeStr);
|
|
948
|
+
if (!isNaN(parsed)) {
|
|
949
|
+
return parsed;
|
|
950
|
+
}
|
|
951
|
+
// Default to the current time if parsing fails
|
|
952
|
+
return now;
|
|
953
|
+
}
|
|
954
|
+
/**
|
|
955
|
+
* Execute an AWS CLI command and return parsed JSON output
|
|
956
|
+
*/
|
|
957
|
+
async function executeAwsCommand(service, command, args, options = {}, logger = nullLogger) {
|
|
958
|
+
const fullArgs = [service, command, ...args, "--output", "json"];
|
|
959
|
+
if (options.profile) {
|
|
960
|
+
fullArgs.push("--profile", options.profile);
|
|
961
|
+
}
|
|
962
|
+
if (options.region) {
|
|
963
|
+
fullArgs.push("--region", options.region);
|
|
964
|
+
}
|
|
965
|
+
logger.info(`Executing: aws ${fullArgs.join(" ")}`);
|
|
966
|
+
return new Promise((resolve) => {
|
|
967
|
+
const proc = spawn("aws", fullArgs);
|
|
968
|
+
let stdout = "";
|
|
969
|
+
let stderr = "";
|
|
970
|
+
proc.stdout.on("data", (data) => {
|
|
971
|
+
stdout += data.toString();
|
|
972
|
+
});
|
|
973
|
+
proc.stderr.on("data", (data) => {
|
|
974
|
+
stderr += data.toString();
|
|
975
|
+
});
|
|
976
|
+
proc.on("close", (code) => {
|
|
977
|
+
if (code !== 0) {
|
|
978
|
+
logger.error(`AWS CLI error: ${stderr}`);
|
|
979
|
+
resolve({
|
|
980
|
+
success: false,
|
|
981
|
+
error: parseAwsError(stderr, service, command),
|
|
982
|
+
});
|
|
983
|
+
return;
|
|
984
|
+
}
|
|
985
|
+
// Handle empty output (some commands return nothing on success)
|
|
986
|
+
if (!stdout.trim()) {
|
|
987
|
+
resolve({ success: true });
|
|
988
|
+
return;
|
|
989
|
+
}
|
|
990
|
+
try {
|
|
991
|
+
const data = JSON.parse(stdout);
|
|
992
|
+
resolve({ success: true, data });
|
|
993
|
+
}
|
|
994
|
+
catch {
|
|
995
|
+
// Some commands return plain text
|
|
996
|
+
resolve({ success: true, data: stdout.trim() });
|
|
997
|
+
}
|
|
998
|
+
});
|
|
999
|
+
proc.on("error", (error) => {
|
|
1000
|
+
if (error.message.includes("ENOENT")) {
|
|
1001
|
+
resolve({
|
|
1002
|
+
success: false,
|
|
1003
|
+
error: "AWS CLI not found. Install it from https://aws.amazon.com/cli/",
|
|
1004
|
+
});
|
|
1005
|
+
}
|
|
1006
|
+
else {
|
|
1007
|
+
resolve({ success: false, error: error.message });
|
|
1008
|
+
}
|
|
1009
|
+
});
|
|
1010
|
+
});
|
|
1011
|
+
}
|
|
1012
|
+
/**
|
|
1013
|
+
* List available AWS profiles from ~/.aws/config and ~/.aws/credentials
|
|
1014
|
+
*/
|
|
1015
|
+
async function listAwsProfiles(logger = nullLogger) {
|
|
1016
|
+
const profiles = [];
|
|
1017
|
+
const homeDir = os.homedir();
|
|
1018
|
+
try {
|
|
1019
|
+
// Parse ~/.aws/config
|
|
1020
|
+
const configPath = path.join(homeDir, ".aws", "config");
|
|
1021
|
+
try {
|
|
1022
|
+
const configContent = await fs.readFile(configPath, "utf-8");
|
|
1023
|
+
const profileRegex = /\[profile\s+([^\]]+)\]|\[default\]/g;
|
|
1024
|
+
let match;
|
|
1025
|
+
while ((match = profileRegex.exec(configContent)) !== null) {
|
|
1026
|
+
const name = match[1] || "default";
|
|
1027
|
+
profiles.push({
|
|
1028
|
+
name,
|
|
1029
|
+
source: "config",
|
|
1030
|
+
});
|
|
1031
|
+
}
|
|
1032
|
+
logger.info(`Found ${profiles.length} profiles in config`);
|
|
1033
|
+
}
|
|
1034
|
+
catch {
|
|
1035
|
+
logger.info("No ~/.aws/config file found");
|
|
1036
|
+
}
|
|
1037
|
+
// Parse ~/.aws/credentials
|
|
1038
|
+
const credentialsPath = path.join(homeDir, ".aws", "credentials");
|
|
1039
|
+
try {
|
|
1040
|
+
const credentialsContent = await fs.readFile(credentialsPath, "utf-8");
|
|
1041
|
+
const profileRegex = /\[([^\]]+)\]/g;
|
|
1042
|
+
let match;
|
|
1043
|
+
while ((match = profileRegex.exec(credentialsContent)) !== null) {
|
|
1044
|
+
const name = match[1];
|
|
1045
|
+
// Only add if not already in the list
|
|
1046
|
+
if (!profiles.find((p) => p.name === name)) {
|
|
1047
|
+
profiles.push({
|
|
1048
|
+
name,
|
|
1049
|
+
source: "credentials",
|
|
1050
|
+
});
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
logger.info(`Total profiles after credentials: ${profiles.length}`);
|
|
1054
|
+
}
|
|
1055
|
+
catch {
|
|
1056
|
+
logger.info("No ~/.aws/credentials file found");
|
|
1057
|
+
}
|
|
1058
|
+
return { success: true, data: profiles };
|
|
1059
|
+
}
|
|
1060
|
+
catch (error) {
|
|
1061
|
+
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
|
1062
|
+
logger.error(`Error listing profiles: ${errorMessage}`);
|
|
1063
|
+
return { success: false, error: errorMessage };
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
// Step Functions operations
|
|
1067
|
+
async function listStepFunctionExecutions(options, logger = nullLogger) {
|
|
1068
|
+
const args = ["--state-machine-arn", options.stateMachineArn];
|
|
1069
|
+
if (options.statusFilter) {
|
|
1070
|
+
args.push("--status-filter", options.statusFilter);
|
|
1071
|
+
}
|
|
1072
|
+
if (options.maxResults) {
|
|
1073
|
+
args.push("--max-results", String(options.maxResults));
|
|
1074
|
+
}
|
|
1075
|
+
return executeAwsCommand("stepfunctions", "list-executions", args, { profile: options.profile, region: options.region }, logger);
|
|
1076
|
+
}
|
|
1077
|
+
async function stopStepFunctionExecution(options, logger = nullLogger) {
|
|
1078
|
+
const args = ["--execution-arn", options.executionArn];
|
|
1079
|
+
if (options.cause) {
|
|
1080
|
+
args.push("--cause", options.cause);
|
|
1081
|
+
}
|
|
1082
|
+
return executeAwsCommand("stepfunctions", "stop-execution", args, { profile: options.profile, region: options.region }, logger);
|
|
1083
|
+
}
|
|
1084
|
+
// Lambda operations
|
|
1085
|
+
async function listLambdaFunctions(options = {}, logger = nullLogger) {
|
|
1086
|
+
const args = [];
|
|
1087
|
+
if (options.maxResults) {
|
|
1088
|
+
args.push("--max-items", String(options.maxResults));
|
|
1089
|
+
}
|
|
1090
|
+
const result = await executeAwsCommand("lambda", "list-functions", args, { profile: options.profile, region: options.region }, logger);
|
|
1091
|
+
// Filter by prefix if specified
|
|
1092
|
+
if (result.success && result.data && options.functionNamePrefix) {
|
|
1093
|
+
result.data.Functions = result.data.Functions.filter((f) => f.FunctionName.startsWith(options.functionNamePrefix));
|
|
1094
|
+
}
|
|
1095
|
+
return result;
|
|
1096
|
+
}
|
|
1097
|
+
async function getLambdaFunction(options, logger = nullLogger) {
|
|
1098
|
+
return executeAwsCommand("lambda", "get-function", ["--function-name", options.functionName], { profile: options.profile, region: options.region }, logger);
|
|
1099
|
+
}
|
|
1100
|
+
// CloudWatch Logs operations
|
|
1101
|
+
async function filterLogEvents(options, logger = nullLogger) {
|
|
1102
|
+
const args = ["--log-group-name", options.logGroupName];
|
|
1103
|
+
if (options.filterPattern) {
|
|
1104
|
+
args.push("--filter-pattern", options.filterPattern);
|
|
1105
|
+
}
|
|
1106
|
+
if (options.startTime) {
|
|
1107
|
+
const startMs = parseRelativeTime(options.startTime);
|
|
1108
|
+
args.push("--start-time", String(startMs));
|
|
1109
|
+
}
|
|
1110
|
+
if (options.endTime) {
|
|
1111
|
+
const endMs = parseRelativeTime(options.endTime);
|
|
1112
|
+
args.push("--end-time", String(endMs));
|
|
1113
|
+
}
|
|
1114
|
+
{
|
|
1115
|
+
args.push("--limit", String(options.limit));
|
|
1116
|
+
}
|
|
1117
|
+
return executeAwsCommand("logs", "filter-log-events", args, { profile: options.profile, region: options.region }, logger);
|
|
1118
|
+
}
|
|
1119
|
+
// S3 operations
|
|
1120
|
+
async function listS3Objects(options, logger = nullLogger) {
|
|
1121
|
+
const args = ["--bucket", options.bucket];
|
|
1122
|
+
if (options.prefix) {
|
|
1123
|
+
args.push("--prefix", options.prefix);
|
|
1124
|
+
}
|
|
1125
|
+
if (options.maxResults) {
|
|
1126
|
+
args.push("--max-items", String(options.maxResults));
|
|
1127
|
+
}
|
|
1128
|
+
return executeAwsCommand("s3api", "list-objects-v2", args, { profile: options.profile, region: options.region }, logger);
|
|
1129
|
+
}
|
|
1130
|
+
// CloudFormation operations
|
|
1131
|
+
async function describeStack(options, logger = nullLogger) {
|
|
1132
|
+
return executeAwsCommand("cloudformation", "describe-stacks", ["--stack-name", options.stackName], { profile: options.profile, region: options.region }, logger);
|
|
1133
|
+
}
|
|
1134
|
+
// DynamoDB operations
|
|
1135
|
+
async function describeDynamoDBTable(options, logger = nullLogger) {
|
|
1136
|
+
return executeAwsCommand("dynamodb", "describe-table", ["--table-name", options.tableName], { profile: options.profile, region: options.region }, logger);
|
|
1137
|
+
}
|
|
1138
|
+
async function scanDynamoDB(options, logger = nullLogger) {
|
|
1139
|
+
const args = ["--table-name", options.tableName];
|
|
1140
|
+
if (options.filterExpression) {
|
|
1141
|
+
args.push("--filter-expression", options.filterExpression);
|
|
1142
|
+
}
|
|
1143
|
+
if (options.expressionAttributeValues) {
|
|
1144
|
+
args.push("--expression-attribute-values", options.expressionAttributeValues);
|
|
1145
|
+
}
|
|
1146
|
+
{
|
|
1147
|
+
args.push("--limit", String(options.limit));
|
|
1148
|
+
}
|
|
1149
|
+
return executeAwsCommand("dynamodb", "scan", args, { profile: options.profile, region: options.region }, logger);
|
|
1150
|
+
}
|
|
1151
|
+
async function queryDynamoDB(options, logger = nullLogger) {
|
|
1152
|
+
const args = [
|
|
1153
|
+
"--table-name",
|
|
1154
|
+
options.tableName,
|
|
1155
|
+
"--key-condition-expression",
|
|
1156
|
+
options.keyConditionExpression,
|
|
1157
|
+
"--expression-attribute-values",
|
|
1158
|
+
options.expressionAttributeValues,
|
|
1159
|
+
];
|
|
1160
|
+
if (options.indexName) {
|
|
1161
|
+
args.push("--index-name", options.indexName);
|
|
1162
|
+
}
|
|
1163
|
+
if (options.filterExpression) {
|
|
1164
|
+
args.push("--filter-expression", options.filterExpression);
|
|
1165
|
+
}
|
|
1166
|
+
if (options.limit) {
|
|
1167
|
+
args.push("--limit", String(options.limit));
|
|
1168
|
+
}
|
|
1169
|
+
if (options.scanIndexForward === false) {
|
|
1170
|
+
args.push("--no-scan-index-forward");
|
|
1171
|
+
}
|
|
1172
|
+
return executeAwsCommand("dynamodb", "query", args, { profile: options.profile, region: options.region }, logger);
|
|
1173
|
+
}
|
|
1174
|
+
async function getDynamoDBItem(options, logger = nullLogger) {
|
|
1175
|
+
return executeAwsCommand("dynamodb", "get-item", ["--table-name", options.tableName, "--key", options.key], { profile: options.profile, region: options.region }, logger);
|
|
1176
|
+
}
|
|
1177
|
+
// SQS operations
|
|
1178
|
+
async function listSQSQueues(options = {}, logger = nullLogger) {
|
|
1179
|
+
const args = [];
|
|
1180
|
+
if (options.queueNamePrefix) {
|
|
1181
|
+
args.push("--queue-name-prefix", options.queueNamePrefix);
|
|
1182
|
+
}
|
|
1183
|
+
return executeAwsCommand("sqs", "list-queues", args, { profile: options.profile, region: options.region }, logger);
|
|
1184
|
+
}
|
|
1185
|
+
async function getSQSQueueAttributes(options, logger = nullLogger) {
|
|
1186
|
+
return executeAwsCommand("sqs", "get-queue-attributes", ["--queue-url", options.queueUrl, "--attribute-names", "All"], { profile: options.profile, region: options.region }, logger);
|
|
1187
|
+
}
|
|
1188
|
+
async function receiveSQSMessage(options, logger = nullLogger) {
|
|
1189
|
+
const args = ["--queue-url", options.queueUrl];
|
|
1190
|
+
{
|
|
1191
|
+
args.push("--max-number-of-messages", String(options.maxNumberOfMessages));
|
|
1192
|
+
}
|
|
1193
|
+
{
|
|
1194
|
+
args.push("--visibility-timeout", String(options.visibilityTimeout));
|
|
1195
|
+
}
|
|
1196
|
+
args.push("--attribute-names", "All");
|
|
1197
|
+
return executeAwsCommand("sqs", "receive-message", args, { profile: options.profile, region: options.region }, logger);
|
|
1198
|
+
}
|
|
1199
|
+
async function purgeSQSQueue(options, logger = nullLogger) {
|
|
1200
|
+
return executeAwsCommand("sqs", "purge-queue", ["--queue-url", options.queueUrl], { profile: options.profile, region: options.region }, logger);
|
|
1201
|
+
}
|
|
1202
|
+
|
|
1203
|
+
// ServiceSuite for @jaypie/mcp
|
|
1204
|
+
// Provides metadata and direct execution for Jaypie MCP services
|
|
1205
|
+
const BUILD_VERSION_STRING = "@jaypie/mcp@0.4.0#988a2bd8"
|
|
1206
|
+
;
|
|
1207
|
+
const __filename$1 = fileURLToPath(import.meta.url);
|
|
1208
|
+
const __dirname$1 = path.dirname(__filename$1);
|
|
1209
|
+
const PROMPTS_PATH = path.join(__dirname$1, "..", "prompts");
|
|
1210
|
+
const RELEASE_NOTES_PATH = path.join(__dirname$1, "..", "release-notes");
|
|
1211
|
+
const SKILLS_PATH = path.join(__dirname$1, "..", "skills");
|
|
1212
|
+
// Silent logger for direct execution
|
|
1213
|
+
const log = {
|
|
1214
|
+
info: () => { },
|
|
1215
|
+
error: () => { },
|
|
1216
|
+
};
|
|
1217
|
+
async function parseMarkdownFile(filePath) {
|
|
1218
|
+
try {
|
|
1219
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
1220
|
+
const filename = path.basename(filePath);
|
|
1221
|
+
if (content.startsWith("---")) {
|
|
1222
|
+
const parsed = matter(content);
|
|
1223
|
+
const frontMatter = parsed.data;
|
|
1224
|
+
return {
|
|
1225
|
+
filename,
|
|
1226
|
+
description: frontMatter.description,
|
|
1227
|
+
include: frontMatter.include || frontMatter.globs,
|
|
1228
|
+
};
|
|
1229
|
+
}
|
|
1230
|
+
return { filename };
|
|
1231
|
+
}
|
|
1232
|
+
catch {
|
|
1233
|
+
return { filename: path.basename(filePath) };
|
|
1234
|
+
}
|
|
1235
|
+
}
|
|
1236
|
+
function formatPromptListItem(prompt) {
|
|
1237
|
+
const { filename, description, include } = prompt;
|
|
1238
|
+
if (description && include) {
|
|
1239
|
+
return `* ${filename}: ${description} - Required for ${include}`;
|
|
1240
|
+
}
|
|
1241
|
+
else if (description) {
|
|
1242
|
+
return `* ${filename}: ${description}`;
|
|
1243
|
+
}
|
|
1244
|
+
else if (include) {
|
|
1245
|
+
return `* ${filename} - Required for ${include}`;
|
|
1246
|
+
}
|
|
1247
|
+
else {
|
|
1248
|
+
return `* ${filename}`;
|
|
1249
|
+
}
|
|
1250
|
+
}
|
|
1251
|
+
async function parseReleaseNoteFile(filePath) {
|
|
1252
|
+
try {
|
|
1253
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
1254
|
+
const filename = path.basename(filePath, ".md");
|
|
1255
|
+
if (content.startsWith("---")) {
|
|
1256
|
+
const parsed = matter(content);
|
|
1257
|
+
const frontMatter = parsed.data;
|
|
1258
|
+
return {
|
|
1259
|
+
date: frontMatter.date,
|
|
1260
|
+
filename,
|
|
1261
|
+
summary: frontMatter.summary,
|
|
1262
|
+
version: frontMatter.version || filename,
|
|
1263
|
+
};
|
|
1264
|
+
}
|
|
1265
|
+
return { filename, version: filename };
|
|
1266
|
+
}
|
|
1267
|
+
catch {
|
|
1268
|
+
return { filename: path.basename(filePath, ".md") };
|
|
1269
|
+
}
|
|
1270
|
+
}
|
|
1271
|
+
function formatReleaseNoteListItem(note) {
|
|
1272
|
+
const { date, packageName, summary, version } = note;
|
|
1273
|
+
const parts = [`* ${packageName}@${version}`];
|
|
1274
|
+
if (date) {
|
|
1275
|
+
parts.push(`(${date})`);
|
|
1276
|
+
}
|
|
1277
|
+
if (summary) {
|
|
1278
|
+
parts.push(`- ${summary}`);
|
|
1279
|
+
}
|
|
1280
|
+
return parts.join(" ");
|
|
1281
|
+
}
|
|
1282
|
+
function isValidSkillAlias(alias) {
|
|
1283
|
+
const normalized = alias.toLowerCase().trim();
|
|
1284
|
+
// Reject if contains path separators or traversal
|
|
1285
|
+
if (normalized.includes("/") ||
|
|
1286
|
+
normalized.includes("\\") ||
|
|
1287
|
+
normalized.includes("..")) {
|
|
1288
|
+
return false;
|
|
1289
|
+
}
|
|
1290
|
+
// Only allow alphanumeric, hyphens, underscores
|
|
1291
|
+
return /^[a-z0-9_-]+$/.test(normalized);
|
|
1292
|
+
}
|
|
1293
|
+
async function parseSkillFile(filePath) {
|
|
1294
|
+
try {
|
|
1295
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
1296
|
+
const alias = path.basename(filePath, ".md");
|
|
1297
|
+
if (content.startsWith("---")) {
|
|
1298
|
+
const parsed = matter(content);
|
|
1299
|
+
const frontMatter = parsed.data;
|
|
1300
|
+
return {
|
|
1301
|
+
alias,
|
|
1302
|
+
description: frontMatter.description,
|
|
1303
|
+
};
|
|
1304
|
+
}
|
|
1305
|
+
return { alias };
|
|
1306
|
+
}
|
|
1307
|
+
catch {
|
|
1308
|
+
return { alias: path.basename(filePath, ".md") };
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
function formatSkillListItem(skill) {
|
|
1312
|
+
const { alias, description } = skill;
|
|
1313
|
+
if (description) {
|
|
1314
|
+
return `* ${alias} - ${description}`;
|
|
1315
|
+
}
|
|
1316
|
+
return `* ${alias}`;
|
|
1317
|
+
}
|
|
1318
|
+
async function getPackageReleaseNotes(packageName) {
|
|
1319
|
+
const packageDir = path.join(RELEASE_NOTES_PATH, packageName);
|
|
1320
|
+
try {
|
|
1321
|
+
const files = await fs.readdir(packageDir);
|
|
1322
|
+
const mdFiles = files.filter((file) => file.endsWith(".md"));
|
|
1323
|
+
const notes = await Promise.all(mdFiles.map(async (file) => {
|
|
1324
|
+
const parsed = await parseReleaseNoteFile(path.join(packageDir, file));
|
|
1325
|
+
return { ...parsed, packageName };
|
|
1326
|
+
}));
|
|
1327
|
+
return notes.sort((a, b) => {
|
|
1328
|
+
if (!a.version || !b.version)
|
|
1329
|
+
return 0;
|
|
1330
|
+
try {
|
|
1331
|
+
return gt(a.version, b.version) ? -1 : 1;
|
|
1332
|
+
}
|
|
1333
|
+
catch {
|
|
1334
|
+
return b.version.localeCompare(a.version);
|
|
1335
|
+
}
|
|
1336
|
+
});
|
|
1337
|
+
}
|
|
1338
|
+
catch {
|
|
1339
|
+
return [];
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
function filterReleaseNotesSince(notes, sinceVersion) {
|
|
1343
|
+
return notes.filter((note) => {
|
|
1344
|
+
if (!note.version)
|
|
1345
|
+
return false;
|
|
1346
|
+
try {
|
|
1347
|
+
return gt(note.version, sinceVersion);
|
|
1348
|
+
}
|
|
1349
|
+
catch {
|
|
1350
|
+
return false;
|
|
1351
|
+
}
|
|
1352
|
+
});
|
|
1353
|
+
}
|
|
1354
|
+
// =============================================================================
|
|
1355
|
+
// DOCS SERVICES
|
|
1356
|
+
// =============================================================================
|
|
1357
|
+
const version = fabricService({
|
|
1358
|
+
alias: "version",
|
|
1359
|
+
description: `Prints the current version and hash, \`${BUILD_VERSION_STRING}\``,
|
|
1360
|
+
input: {},
|
|
1361
|
+
service: async () => BUILD_VERSION_STRING,
|
|
1362
|
+
});
|
|
1363
|
+
const skill = fabricService({
|
|
1364
|
+
alias: "skill",
|
|
1365
|
+
description: "Access Jaypie development documentation. Pass a skill alias (e.g., 'aws', 'tests', 'errors') to get that documentation. Pass 'index' or no argument to list all available skills.",
|
|
1366
|
+
input: {
|
|
1367
|
+
alias: {
|
|
1368
|
+
type: String,
|
|
1369
|
+
required: false,
|
|
1370
|
+
description: "Skill alias (e.g., 'aws', 'tests'). Omit or use 'index' to list all skills.",
|
|
1371
|
+
},
|
|
1372
|
+
},
|
|
1373
|
+
service: async ({ alias: inputAlias }) => {
|
|
1374
|
+
const alias = (inputAlias || "index").toLowerCase().trim();
|
|
1375
|
+
// Security: validate alias to prevent path traversal
|
|
1376
|
+
if (!isValidSkillAlias(alias)) {
|
|
1377
|
+
throw new Error(`Invalid skill alias "${alias}". Use alphanumeric characters, hyphens, and underscores only.`);
|
|
1378
|
+
}
|
|
1379
|
+
// If requesting index, return list of all skills with descriptions
|
|
1380
|
+
if (alias === "index") {
|
|
1381
|
+
const indexPath = path.join(SKILLS_PATH, "index.md");
|
|
1382
|
+
let indexContent = "";
|
|
1383
|
+
try {
|
|
1384
|
+
indexContent = await fs.readFile(indexPath, "utf-8");
|
|
1385
|
+
// Strip frontmatter for display
|
|
1386
|
+
if (indexContent.startsWith("---")) {
|
|
1387
|
+
const parsed = matter(indexContent);
|
|
1388
|
+
indexContent = parsed.content.trim();
|
|
1389
|
+
}
|
|
1390
|
+
}
|
|
1391
|
+
catch {
|
|
1392
|
+
// Index file doesn't exist, will just show skill list
|
|
1393
|
+
}
|
|
1394
|
+
// Get all skill files
|
|
1395
|
+
const files = await fs.readdir(SKILLS_PATH);
|
|
1396
|
+
const mdFiles = files.filter((file) => file.endsWith(".md") && file !== "index.md");
|
|
1397
|
+
const skills = await Promise.all(mdFiles.map((file) => parseSkillFile(path.join(SKILLS_PATH, file))));
|
|
1398
|
+
// Sort alphabetically
|
|
1399
|
+
skills.sort((a, b) => a.alias.localeCompare(b.alias));
|
|
1400
|
+
const skillList = skills.map(formatSkillListItem).join("\n");
|
|
1401
|
+
if (indexContent) {
|
|
1402
|
+
return `${indexContent}\n\n## Available Skills\n\n${skillList}`;
|
|
1403
|
+
}
|
|
1404
|
+
return `# Jaypie Skills\n\n## Available Skills\n\n${skillList}`;
|
|
1405
|
+
}
|
|
1406
|
+
// Read specific skill file
|
|
1407
|
+
const skillPath = path.join(SKILLS_PATH, `${alias}.md`);
|
|
1408
|
+
try {
|
|
1409
|
+
return await fs.readFile(skillPath, "utf-8");
|
|
1410
|
+
}
|
|
1411
|
+
catch {
|
|
1412
|
+
throw new Error(`Skill "${alias}" not found. Use skill("index") to list available skills.`);
|
|
1413
|
+
}
|
|
1414
|
+
},
|
|
1415
|
+
});
|
|
1416
|
+
const listPrompts = fabricService({
|
|
1417
|
+
alias: "list_prompts",
|
|
1418
|
+
description: "[DEPRECATED: Use skill('index') instead] List available Jaypie development prompts and guides. Use this FIRST when starting work on a Jaypie project to discover relevant documentation. Returns filenames, descriptions, and which file patterns each prompt applies to (e.g., 'Required for packages/express/**').",
|
|
1419
|
+
input: {},
|
|
1420
|
+
service: async () => {
|
|
1421
|
+
const files = await fs.readdir(PROMPTS_PATH);
|
|
1422
|
+
const mdFiles = files.filter((file) => file.endsWith(".md"));
|
|
1423
|
+
const prompts = await Promise.all(mdFiles.map((file) => parseMarkdownFile(path.join(PROMPTS_PATH, file))));
|
|
1424
|
+
return (prompts.map(formatPromptListItem).join("\n") ||
|
|
1425
|
+
"No .md files found in the prompts directory.");
|
|
1426
|
+
},
|
|
1427
|
+
});
|
|
1428
|
+
const readPrompt = fabricService({
|
|
1429
|
+
alias: "read_prompt",
|
|
1430
|
+
description: "[DEPRECATED: Use skill(alias) instead] Read a Jaypie prompt/guide by filename. Call list_prompts first to see available prompts. These contain best practices, templates, code patterns, and step-by-step guides for Jaypie development tasks.",
|
|
1431
|
+
input: {
|
|
1432
|
+
filename: {
|
|
1433
|
+
type: String,
|
|
1434
|
+
required: true,
|
|
1435
|
+
description: "The prompt filename from list_prompts (e.g., 'Jaypie_Express_Package.md', 'Development_Process.md')",
|
|
1436
|
+
},
|
|
1437
|
+
},
|
|
1438
|
+
service: async ({ filename }) => {
|
|
1439
|
+
const filePath = path.join(PROMPTS_PATH, filename);
|
|
1440
|
+
return fs.readFile(filePath, "utf-8");
|
|
1441
|
+
},
|
|
1442
|
+
});
|
|
1443
|
+
const listReleaseNotes = fabricService({
|
|
1444
|
+
alias: "list_release_notes",
|
|
1445
|
+
description: "List available release notes for Jaypie packages. Filter by package name and/or get only versions newer than a specified version.",
|
|
1446
|
+
input: {
|
|
1447
|
+
package: {
|
|
1448
|
+
type: String,
|
|
1449
|
+
required: false,
|
|
1450
|
+
description: "Filter by package name (e.g., 'jaypie', 'mcp'). If not provided, lists release notes for all packages.",
|
|
1451
|
+
},
|
|
1452
|
+
since_version: {
|
|
1453
|
+
type: String,
|
|
1454
|
+
required: false,
|
|
1455
|
+
description: "Only show versions newer than this (e.g., '1.0.0'). Uses semver comparison.",
|
|
1456
|
+
},
|
|
1457
|
+
},
|
|
1458
|
+
service: async ({ package: packageFilter, since_version: sinceVersion, }) => {
|
|
1459
|
+
const entries = await fs.readdir(RELEASE_NOTES_PATH, {
|
|
1460
|
+
withFileTypes: true,
|
|
1461
|
+
});
|
|
1462
|
+
const packageDirs = entries
|
|
1463
|
+
.filter((entry) => entry.isDirectory())
|
|
1464
|
+
.map((entry) => entry.name);
|
|
1465
|
+
const packagesToList = packageFilter
|
|
1466
|
+
? packageDirs.filter((pkg) => pkg === packageFilter)
|
|
1467
|
+
: packageDirs;
|
|
1468
|
+
if (packagesToList.length === 0 && packageFilter) {
|
|
1469
|
+
return `No release notes found for package "${packageFilter}".`;
|
|
1470
|
+
}
|
|
1471
|
+
const allNotes = await Promise.all(packagesToList.map((pkg) => getPackageReleaseNotes(pkg)));
|
|
1472
|
+
let flatNotes = allNotes.flat();
|
|
1473
|
+
if (sinceVersion) {
|
|
1474
|
+
flatNotes = filterReleaseNotesSince(flatNotes, sinceVersion);
|
|
1475
|
+
}
|
|
1476
|
+
if (flatNotes.length === 0) {
|
|
1477
|
+
const filterDesc = sinceVersion ? ` newer than ${sinceVersion}` : "";
|
|
1478
|
+
return `No release notes found${filterDesc}.`;
|
|
1479
|
+
}
|
|
1480
|
+
return flatNotes.map(formatReleaseNoteListItem).join("\n");
|
|
1481
|
+
},
|
|
1482
|
+
});
|
|
1483
|
+
const readReleaseNote = fabricService({
|
|
1484
|
+
alias: "read_release_note",
|
|
1485
|
+
description: "Read the full content of a specific release note. Call list_release_notes first to see available versions.",
|
|
1486
|
+
input: {
|
|
1487
|
+
package: {
|
|
1488
|
+
type: String,
|
|
1489
|
+
required: true,
|
|
1490
|
+
description: "Package name (e.g., 'jaypie', 'mcp')",
|
|
1491
|
+
},
|
|
1492
|
+
version: {
|
|
1493
|
+
type: String,
|
|
1494
|
+
required: true,
|
|
1495
|
+
description: "Version number (e.g., '1.2.3')",
|
|
1496
|
+
},
|
|
1497
|
+
},
|
|
1498
|
+
service: async ({ package: packageName, version: ver, }) => {
|
|
1499
|
+
const filePath = path.join(RELEASE_NOTES_PATH, packageName, `${ver}.md`);
|
|
1500
|
+
return fs.readFile(filePath, "utf-8");
|
|
1501
|
+
},
|
|
1502
|
+
});
|
|
1503
|
+
// =============================================================================
|
|
1504
|
+
// DATADOG SERVICES
|
|
1505
|
+
// =============================================================================
|
|
1506
|
+
const datadogLogs = fabricService({
|
|
1507
|
+
alias: "datadog_logs",
|
|
1508
|
+
description: "Search and retrieve individual Datadog log entries. Use this to view actual log messages and details. For aggregated counts/statistics (e.g., 'how many errors by service?'), use datadog_log_analytics instead. Requires DATADOG_API_KEY and DATADOG_APP_KEY environment variables.",
|
|
1509
|
+
input: {
|
|
1510
|
+
query: {
|
|
1511
|
+
type: String,
|
|
1512
|
+
required: false,
|
|
1513
|
+
description: "Search query to filter logs. Examples: 'status:error', '@http.status_code:500', '*timeout*', '@requestId:abc123'. Combined with DD_ENV, DD_SERVICE, DD_SOURCE env vars if set.",
|
|
1514
|
+
},
|
|
1515
|
+
source: {
|
|
1516
|
+
type: String,
|
|
1517
|
+
required: false,
|
|
1518
|
+
description: "Override the log source (e.g., 'lambda', 'auth0', 'nginx'). If not provided, uses DD_SOURCE env var or defaults to 'lambda'.",
|
|
1519
|
+
},
|
|
1520
|
+
env: {
|
|
1521
|
+
type: String,
|
|
1522
|
+
required: false,
|
|
1523
|
+
description: "Override the environment (e.g., 'sandbox', 'kitchen', 'lab', 'studio', 'production'). If not provided, uses DD_ENV env var.",
|
|
1524
|
+
},
|
|
1525
|
+
service: {
|
|
1526
|
+
type: String,
|
|
1527
|
+
required: false,
|
|
1528
|
+
description: "Override the service name. If not provided, uses DD_SERVICE env var.",
|
|
1529
|
+
},
|
|
1530
|
+
from: {
|
|
1531
|
+
type: String,
|
|
1532
|
+
required: false,
|
|
1533
|
+
description: "Start time. Formats: relative ('now-15m', 'now-1h', 'now-1d'), ISO 8601 ('2024-01-15T10:00:00Z'). Defaults to 'now-15m'.",
|
|
1534
|
+
},
|
|
1535
|
+
to: {
|
|
1536
|
+
type: String,
|
|
1537
|
+
required: false,
|
|
1538
|
+
description: "End time. Formats: 'now', relative ('now-5m'), or ISO 8601. Defaults to 'now'.",
|
|
1539
|
+
},
|
|
1540
|
+
limit: {
|
|
1541
|
+
type: Number,
|
|
1542
|
+
required: false,
|
|
1543
|
+
description: "Max logs to return (1-1000). Defaults to 50.",
|
|
1544
|
+
},
|
|
1545
|
+
sort: {
|
|
1546
|
+
type: ["timestamp", "-timestamp"],
|
|
1547
|
+
required: false,
|
|
1548
|
+
description: "Sort order: 'timestamp' (oldest first) or '-timestamp' (newest first, default).",
|
|
1549
|
+
},
|
|
1550
|
+
},
|
|
1551
|
+
service: async (input) => {
|
|
1552
|
+
const credentials = getDatadogCredentials();
|
|
1553
|
+
if (!credentials) {
|
|
1554
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1555
|
+
}
|
|
1556
|
+
const result = await searchDatadogLogs(credentials, input, log);
|
|
1557
|
+
if (!result.success) {
|
|
1558
|
+
throw new Error(result.error);
|
|
1559
|
+
}
|
|
1560
|
+
return result;
|
|
1561
|
+
},
|
|
1562
|
+
});
|
|
1563
|
+
const datadogLogAnalytics = fabricService({
|
|
1564
|
+
alias: "datadog_log_analytics",
|
|
1565
|
+
description: "Aggregate and analyze Datadog logs by grouping them by fields. Use this for statistics and counts (e.g., 'errors by service', 'requests by status code'). For viewing individual log entries, use datadog_logs instead.",
|
|
1566
|
+
input: {
|
|
1567
|
+
groupBy: {
|
|
1568
|
+
type: [String],
|
|
1569
|
+
required: true,
|
|
1570
|
+
description: "Fields to group by. Examples: ['source'], ['service', 'status'], ['@http.status_code']. Common facets: source, service, status, host, @http.status_code, @env.",
|
|
1571
|
+
},
|
|
1572
|
+
query: {
|
|
1573
|
+
type: String,
|
|
1574
|
+
required: false,
|
|
1575
|
+
description: "Filter query. Examples: 'status:error', '*timeout*', '@http.method:POST'. Use '*' for all logs.",
|
|
1576
|
+
},
|
|
1577
|
+
source: {
|
|
1578
|
+
type: String,
|
|
1579
|
+
required: false,
|
|
1580
|
+
description: "Override the log source filter. Use '*' to include all sources. If not provided, uses DD_SOURCE env var or defaults to 'lambda'.",
|
|
1581
|
+
},
|
|
1582
|
+
env: {
|
|
1583
|
+
type: String,
|
|
1584
|
+
required: false,
|
|
1585
|
+
description: "Override the environment filter. If not provided, uses DD_ENV env var.",
|
|
1586
|
+
},
|
|
1587
|
+
service: {
|
|
1588
|
+
type: String,
|
|
1589
|
+
required: false,
|
|
1590
|
+
description: "Override the service name filter. If not provided, uses DD_SERVICE env var.",
|
|
1591
|
+
},
|
|
1592
|
+
from: {
|
|
1593
|
+
type: String,
|
|
1594
|
+
required: false,
|
|
1595
|
+
description: "Start time. Formats: relative ('now-15m', 'now-1h', 'now-1d'), ISO 8601 ('2024-01-15T10:00:00Z'). Defaults to 'now-15m'.",
|
|
1596
|
+
},
|
|
1597
|
+
to: {
|
|
1598
|
+
type: String,
|
|
1599
|
+
required: false,
|
|
1600
|
+
description: "End time. Formats: 'now', relative ('now-5m'), or ISO 8601. Defaults to 'now'.",
|
|
1601
|
+
},
|
|
1602
|
+
aggregation: {
|
|
1603
|
+
type: ["count", "avg", "sum", "min", "max", "cardinality"],
|
|
1604
|
+
required: false,
|
|
1605
|
+
description: "Aggregation type. 'count' counts logs, others require a metric field. Defaults to 'count'.",
|
|
1606
|
+
},
|
|
1607
|
+
metric: {
|
|
1608
|
+
type: String,
|
|
1609
|
+
required: false,
|
|
1610
|
+
description: "Metric field to aggregate when using avg, sum, min, max, or cardinality. E.g., '@duration', '@http.response_time'.",
|
|
1611
|
+
},
|
|
1612
|
+
},
|
|
1613
|
+
service: async (input) => {
|
|
1614
|
+
const credentials = getDatadogCredentials();
|
|
1615
|
+
if (!credentials) {
|
|
1616
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1617
|
+
}
|
|
1618
|
+
const compute = input.aggregation
|
|
1619
|
+
? [{ aggregation: input.aggregation, metric: input.metric }]
|
|
1620
|
+
: [{ aggregation: "count" }];
|
|
1621
|
+
const result = await aggregateDatadogLogs(credentials, { ...input, compute }, log);
|
|
1622
|
+
if (!result.success) {
|
|
1623
|
+
throw new Error(result.error);
|
|
1624
|
+
}
|
|
1625
|
+
return result;
|
|
1626
|
+
},
|
|
1627
|
+
});
|
|
1628
|
+
const datadogMonitors = fabricService({
|
|
1629
|
+
alias: "datadog_monitors",
|
|
1630
|
+
description: "List and check Datadog monitors. Shows monitor status (Alert, Warn, No Data, OK), name, type, and tags. Useful for quickly checking if any monitors are alerting.",
|
|
1631
|
+
input: {
|
|
1632
|
+
status: {
|
|
1633
|
+
type: Array,
|
|
1634
|
+
required: false,
|
|
1635
|
+
description: "Filter monitors by status. E.g., ['Alert', 'Warn'] to see only alerting monitors.",
|
|
1636
|
+
},
|
|
1637
|
+
tags: {
|
|
1638
|
+
type: Array,
|
|
1639
|
+
required: false,
|
|
1640
|
+
description: "Filter monitors by resource tags (tags on the monitored resources).",
|
|
1641
|
+
},
|
|
1642
|
+
monitorTags: {
|
|
1643
|
+
type: Array,
|
|
1644
|
+
required: false,
|
|
1645
|
+
description: "Filter monitors by monitor tags (tags on the monitor itself).",
|
|
1646
|
+
},
|
|
1647
|
+
name: {
|
|
1648
|
+
type: String,
|
|
1649
|
+
required: false,
|
|
1650
|
+
description: "Filter monitors by name (partial match supported).",
|
|
1651
|
+
},
|
|
1652
|
+
},
|
|
1653
|
+
service: async (input) => {
|
|
1654
|
+
const credentials = getDatadogCredentials();
|
|
1655
|
+
if (!credentials) {
|
|
1656
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1657
|
+
}
|
|
1658
|
+
const result = await listDatadogMonitors(credentials, input, log);
|
|
1659
|
+
if (!result.success) {
|
|
1660
|
+
throw new Error(result.error);
|
|
1661
|
+
}
|
|
1662
|
+
return result;
|
|
1663
|
+
},
|
|
1664
|
+
});
|
|
1665
|
+
const datadogSynthetics = fabricService({
|
|
1666
|
+
alias: "datadog_synthetics",
|
|
1667
|
+
description: "List Datadog Synthetic tests and optionally get recent results for a specific test. Shows test status, type (api/browser), and locations.",
|
|
1668
|
+
input: {
|
|
1669
|
+
type: {
|
|
1670
|
+
type: ["api", "browser"],
|
|
1671
|
+
required: false,
|
|
1672
|
+
description: "Filter tests by type: 'api' or 'browser'.",
|
|
1673
|
+
},
|
|
1674
|
+
tags: {
|
|
1675
|
+
type: Array,
|
|
1676
|
+
required: false,
|
|
1677
|
+
description: "Filter tests by tags.",
|
|
1678
|
+
},
|
|
1679
|
+
testId: {
|
|
1680
|
+
type: String,
|
|
1681
|
+
required: false,
|
|
1682
|
+
description: "If provided, fetches recent results for this specific test (public_id). Otherwise lists all tests.",
|
|
1683
|
+
},
|
|
1684
|
+
},
|
|
1685
|
+
service: async (input) => {
|
|
1686
|
+
const credentials = getDatadogCredentials();
|
|
1687
|
+
if (!credentials) {
|
|
1688
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1689
|
+
}
|
|
1690
|
+
if (input.testId) {
|
|
1691
|
+
const result = await getDatadogSyntheticResults(credentials, input.testId, log);
|
|
1692
|
+
if (!result.success) {
|
|
1693
|
+
throw new Error(result.error);
|
|
1694
|
+
}
|
|
1695
|
+
return result;
|
|
1696
|
+
}
|
|
1697
|
+
const result = await listDatadogSynthetics(credentials, input, log);
|
|
1698
|
+
if (!result.success) {
|
|
1699
|
+
throw new Error(result.error);
|
|
1700
|
+
}
|
|
1701
|
+
return result;
|
|
1702
|
+
},
|
|
1703
|
+
});
|
|
1704
|
+
const datadogMetrics = fabricService({
|
|
1705
|
+
alias: "datadog_metrics",
|
|
1706
|
+
description: "Query Datadog metrics. Returns timeseries data for the specified metric query. Useful for checking specific metric values.",
|
|
1707
|
+
input: {
|
|
1708
|
+
query: {
|
|
1709
|
+
type: String,
|
|
1710
|
+
required: true,
|
|
1711
|
+
description: "Metric query. Format: 'aggregation:metric.name{tags}'. Examples: 'avg:system.cpu.user{*}', 'sum:aws.lambda.invocations{function:my-func}.as_count()', 'max:aws.lambda.duration{env:production}'.",
|
|
1712
|
+
},
|
|
1713
|
+
from: {
|
|
1714
|
+
type: String,
|
|
1715
|
+
required: false,
|
|
1716
|
+
description: "Start time. Formats: relative ('1h', '30m', '1d'), or Unix timestamp. Defaults to '1h'.",
|
|
1717
|
+
},
|
|
1718
|
+
to: {
|
|
1719
|
+
type: String,
|
|
1720
|
+
required: false,
|
|
1721
|
+
description: "End time. Formats: 'now' or Unix timestamp. Defaults to 'now'.",
|
|
1722
|
+
},
|
|
1723
|
+
},
|
|
1724
|
+
service: async (input) => {
|
|
1725
|
+
const credentials = getDatadogCredentials();
|
|
1726
|
+
if (!credentials) {
|
|
1727
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1728
|
+
}
|
|
1729
|
+
const now = Math.floor(Date.now() / 1000);
|
|
1730
|
+
const fromStr = input.from || "1h";
|
|
1731
|
+
let fromTs;
|
|
1732
|
+
if (fromStr.match(/^\d+$/)) {
|
|
1733
|
+
fromTs = parseInt(fromStr, 10);
|
|
1734
|
+
}
|
|
1735
|
+
else if (fromStr.match(/^(\d+)h$/)) {
|
|
1736
|
+
const hours = parseInt(fromStr.match(/^(\d+)h$/)[1], 10);
|
|
1737
|
+
fromTs = now - hours * 3600;
|
|
1738
|
+
}
|
|
1739
|
+
else if (fromStr.match(/^(\d+)m$/)) {
|
|
1740
|
+
const minutes = parseInt(fromStr.match(/^(\d+)m$/)[1], 10);
|
|
1741
|
+
fromTs = now - minutes * 60;
|
|
1742
|
+
}
|
|
1743
|
+
else if (fromStr.match(/^(\d+)d$/)) {
|
|
1744
|
+
const days = parseInt(fromStr.match(/^(\d+)d$/)[1], 10);
|
|
1745
|
+
fromTs = now - days * 86400;
|
|
1746
|
+
}
|
|
1747
|
+
else {
|
|
1748
|
+
fromTs = now - 3600;
|
|
1749
|
+
}
|
|
1750
|
+
const toStr = input.to || "now";
|
|
1751
|
+
const toTs = toStr === "now" ? now : parseInt(toStr, 10);
|
|
1752
|
+
const result = await queryDatadogMetrics(credentials, { query: input.query, from: fromTs, to: toTs }, log);
|
|
1753
|
+
if (!result.success) {
|
|
1754
|
+
throw new Error(result.error);
|
|
1755
|
+
}
|
|
1756
|
+
return result;
|
|
1757
|
+
},
|
|
1758
|
+
});
|
|
1759
|
+
const datadogRum = fabricService({
|
|
1760
|
+
alias: "datadog_rum",
|
|
1761
|
+
description: "Search Datadog RUM (Real User Monitoring) events. Find user sessions, page views, errors, and actions. Useful for debugging frontend issues and understanding user behavior.",
|
|
1762
|
+
input: {
|
|
1763
|
+
query: {
|
|
1764
|
+
type: String,
|
|
1765
|
+
required: false,
|
|
1766
|
+
description: "RUM search query. E.g., '@type:error', '@session.id:abc123', '@view.url:*checkout*'. Defaults to '*' (all events).",
|
|
1767
|
+
},
|
|
1768
|
+
from: {
|
|
1769
|
+
type: String,
|
|
1770
|
+
required: false,
|
|
1771
|
+
description: "Start time. Formats: relative ('now-15m', 'now-1h', 'now-1d'), ISO 8601 ('2024-01-15T10:00:00Z'). Defaults to 'now-15m'.",
|
|
1772
|
+
},
|
|
1773
|
+
to: {
|
|
1774
|
+
type: String,
|
|
1775
|
+
required: false,
|
|
1776
|
+
description: "End time. Formats: 'now', relative ('now-5m'), or ISO 8601. Defaults to 'now'.",
|
|
1777
|
+
},
|
|
1778
|
+
limit: {
|
|
1779
|
+
type: Number,
|
|
1780
|
+
required: false,
|
|
1781
|
+
description: "Max events to return (1-1000). Defaults to 50.",
|
|
1782
|
+
},
|
|
1783
|
+
},
|
|
1784
|
+
service: async (input) => {
|
|
1785
|
+
const credentials = getDatadogCredentials();
|
|
1786
|
+
if (!credentials) {
|
|
1787
|
+
throw new Error("Datadog credentials not found. Set DATADOG_API_KEY and DATADOG_APP_KEY.");
|
|
1788
|
+
}
|
|
1789
|
+
const result = await searchDatadogRum(credentials, input, log);
|
|
1790
|
+
if (!result.success) {
|
|
1791
|
+
throw new Error(result.error);
|
|
1792
|
+
}
|
|
1793
|
+
return result;
|
|
1794
|
+
},
|
|
1795
|
+
});
|
|
1796
|
+
// =============================================================================
|
|
1797
|
+
// LLM SERVICES
|
|
1798
|
+
// =============================================================================
|
|
1799
|
+
const llmDebugCall = fabricService({
|
|
1800
|
+
alias: "llm_debug_call",
|
|
1801
|
+
description: "Make a debug LLM API call and inspect the raw response. Useful for understanding how each provider formats responses, especially for reasoning/thinking content. Returns full history, raw responses, and extracted reasoning.",
|
|
1802
|
+
input: {
|
|
1803
|
+
provider: {
|
|
1804
|
+
type: ["anthropic", "gemini", "openai", "openrouter"],
|
|
1805
|
+
required: true,
|
|
1806
|
+
description: "LLM provider to call",
|
|
1807
|
+
},
|
|
1808
|
+
model: {
|
|
1809
|
+
type: String,
|
|
1810
|
+
required: false,
|
|
1811
|
+
description: "Model to use. If not provided, uses a sensible default. For reasoning tests, try 'o3-mini' with openai.",
|
|
1812
|
+
},
|
|
1813
|
+
message: {
|
|
1814
|
+
type: String,
|
|
1815
|
+
required: true,
|
|
1816
|
+
description: "Message to send to the LLM. For reasoning tests, try something that requires thinking like 'What is 15 * 17? Think step by step.'",
|
|
1817
|
+
},
|
|
1818
|
+
},
|
|
1819
|
+
service: async (input) => {
|
|
1820
|
+
const result = await debugLlmCall({
|
|
1821
|
+
provider: input.provider,
|
|
1822
|
+
model: input.model,
|
|
1823
|
+
message: input.message,
|
|
1824
|
+
});
|
|
1825
|
+
if (!result.success) {
|
|
1826
|
+
throw new Error(result.error);
|
|
1827
|
+
}
|
|
1828
|
+
return result;
|
|
1829
|
+
},
|
|
1830
|
+
});
|
|
1831
|
+
const llmListProviders = fabricService({
|
|
1832
|
+
alias: "llm_list_providers",
|
|
1833
|
+
description: "List available LLM providers with their default and reasoning-capable models.",
|
|
1834
|
+
input: {},
|
|
1835
|
+
service: async () => listLlmProviders(),
|
|
1836
|
+
});
|
|
1837
|
+
// =============================================================================
|
|
1838
|
+
// AWS SERVICES
|
|
1839
|
+
// =============================================================================
|
|
1840
|
+
const awsListProfiles = fabricService({
|
|
1841
|
+
alias: "aws_list_profiles",
|
|
1842
|
+
description: "List available AWS profiles from ~/.aws/config and credentials.",
|
|
1843
|
+
input: {},
|
|
1844
|
+
service: async () => {
|
|
1845
|
+
const result = await listAwsProfiles(log);
|
|
1846
|
+
if (!result.success) {
|
|
1847
|
+
throw new Error(result.error);
|
|
1848
|
+
}
|
|
1849
|
+
return result.data;
|
|
1850
|
+
},
|
|
1851
|
+
});
|
|
1852
|
+
const awsStepfunctionsListExecutions = fabricService({
|
|
1853
|
+
alias: "aws_stepfunctions_list_executions",
|
|
1854
|
+
description: "List Step Function executions for a state machine. Useful for finding stuck or running executions.",
|
|
1855
|
+
input: {
|
|
1856
|
+
stateMachineArn: {
|
|
1857
|
+
type: String,
|
|
1858
|
+
required: true,
|
|
1859
|
+
description: "ARN of the state machine",
|
|
1860
|
+
},
|
|
1861
|
+
statusFilter: {
|
|
1862
|
+
type: [
|
|
1863
|
+
"RUNNING",
|
|
1864
|
+
"SUCCEEDED",
|
|
1865
|
+
"FAILED",
|
|
1866
|
+
"TIMED_OUT",
|
|
1867
|
+
"ABORTED",
|
|
1868
|
+
"PENDING_REDRIVE",
|
|
1869
|
+
],
|
|
1870
|
+
required: false,
|
|
1871
|
+
description: "Filter by execution status",
|
|
1872
|
+
},
|
|
1873
|
+
profile: {
|
|
1874
|
+
type: String,
|
|
1875
|
+
required: false,
|
|
1876
|
+
description: "AWS profile to use",
|
|
1877
|
+
},
|
|
1878
|
+
region: {
|
|
1879
|
+
type: String,
|
|
1880
|
+
required: false,
|
|
1881
|
+
description: "AWS region",
|
|
1882
|
+
},
|
|
1883
|
+
maxResults: {
|
|
1884
|
+
type: Number,
|
|
1885
|
+
required: false,
|
|
1886
|
+
description: "Max results (1-1000, default 100)",
|
|
1887
|
+
},
|
|
1888
|
+
},
|
|
1889
|
+
service: async (input) => {
|
|
1890
|
+
const result = await listStepFunctionExecutions(input, log);
|
|
1891
|
+
if (!result.success) {
|
|
1892
|
+
throw new Error(result.error);
|
|
1893
|
+
}
|
|
1894
|
+
return result.data;
|
|
1895
|
+
},
|
|
1896
|
+
});
|
|
1897
|
+
const awsStepfunctionsStopExecution = fabricService({
|
|
1898
|
+
alias: "aws_stepfunctions_stop_execution",
|
|
1899
|
+
description: "Stop a running Step Function execution. Use with caution - this will abort the workflow.",
|
|
1900
|
+
input: {
|
|
1901
|
+
executionArn: {
|
|
1902
|
+
type: String,
|
|
1903
|
+
required: true,
|
|
1904
|
+
description: "ARN of the execution to stop",
|
|
1905
|
+
},
|
|
1906
|
+
cause: {
|
|
1907
|
+
type: String,
|
|
1908
|
+
required: false,
|
|
1909
|
+
description: "Description of why the execution was stopped",
|
|
1910
|
+
},
|
|
1911
|
+
profile: {
|
|
1912
|
+
type: String,
|
|
1913
|
+
required: false,
|
|
1914
|
+
description: "AWS profile to use",
|
|
1915
|
+
},
|
|
1916
|
+
region: {
|
|
1917
|
+
type: String,
|
|
1918
|
+
required: false,
|
|
1919
|
+
description: "AWS region",
|
|
1920
|
+
},
|
|
1921
|
+
},
|
|
1922
|
+
service: async (input) => {
|
|
1923
|
+
const result = await stopStepFunctionExecution(input, log);
|
|
1924
|
+
if (!result.success) {
|
|
1925
|
+
throw new Error(result.error);
|
|
1926
|
+
}
|
|
1927
|
+
return result.data;
|
|
1928
|
+
},
|
|
1929
|
+
});
|
|
1930
|
+
const awsLambdaListFunctions = fabricService({
|
|
1931
|
+
alias: "aws_lambda_list_functions",
|
|
1932
|
+
description: "List Lambda functions in the account. Filter by function name prefix.",
|
|
1933
|
+
input: {
|
|
1934
|
+
functionNamePrefix: {
|
|
1935
|
+
type: String,
|
|
1936
|
+
required: false,
|
|
1937
|
+
description: "Filter by function name prefix",
|
|
1938
|
+
},
|
|
1939
|
+
profile: {
|
|
1940
|
+
type: String,
|
|
1941
|
+
required: false,
|
|
1942
|
+
description: "AWS profile to use",
|
|
1943
|
+
},
|
|
1944
|
+
region: {
|
|
1945
|
+
type: String,
|
|
1946
|
+
required: false,
|
|
1947
|
+
description: "AWS region",
|
|
1948
|
+
},
|
|
1949
|
+
maxResults: {
|
|
1950
|
+
type: Number,
|
|
1951
|
+
required: false,
|
|
1952
|
+
description: "Max results to return",
|
|
1953
|
+
},
|
|
1954
|
+
},
|
|
1955
|
+
service: async (input) => {
|
|
1956
|
+
const result = await listLambdaFunctions(input, log);
|
|
1957
|
+
if (!result.success) {
|
|
1958
|
+
throw new Error(result.error);
|
|
1959
|
+
}
|
|
1960
|
+
return result.data;
|
|
1961
|
+
},
|
|
1962
|
+
});
|
|
1963
|
+
const awsLambdaGetFunction = fabricService({
|
|
1964
|
+
alias: "aws_lambda_get_function",
|
|
1965
|
+
description: "Get configuration and details for a specific Lambda function.",
|
|
1966
|
+
input: {
|
|
1967
|
+
functionName: {
|
|
1968
|
+
type: String,
|
|
1969
|
+
required: true,
|
|
1970
|
+
description: "Function name or ARN",
|
|
1971
|
+
},
|
|
1972
|
+
profile: {
|
|
1973
|
+
type: String,
|
|
1974
|
+
required: false,
|
|
1975
|
+
description: "AWS profile to use",
|
|
1976
|
+
},
|
|
1977
|
+
region: {
|
|
1978
|
+
type: String,
|
|
1979
|
+
required: false,
|
|
1980
|
+
description: "AWS region",
|
|
1981
|
+
},
|
|
1982
|
+
},
|
|
1983
|
+
service: async (input) => {
|
|
1984
|
+
const result = await getLambdaFunction(input, log);
|
|
1985
|
+
if (!result.success) {
|
|
1986
|
+
throw new Error(result.error);
|
|
1987
|
+
}
|
|
1988
|
+
return result.data;
|
|
1989
|
+
},
|
|
1990
|
+
});
|
|
1991
|
+
const awsLogsFilterLogEvents = fabricService({
|
|
1992
|
+
alias: "aws_logs_filter_log_events",
|
|
1993
|
+
description: "Search CloudWatch Logs for a log group. Filter by pattern and time range.",
|
|
1994
|
+
input: {
|
|
1995
|
+
logGroupName: {
|
|
1996
|
+
type: String,
|
|
1997
|
+
required: true,
|
|
1998
|
+
description: "Log group name (e.g., /aws/lambda/my-function)",
|
|
1999
|
+
},
|
|
2000
|
+
filterPattern: {
|
|
2001
|
+
type: String,
|
|
2002
|
+
required: false,
|
|
2003
|
+
description: "CloudWatch filter pattern (e.g., 'ERROR', '{ $.level = \"error\" }')",
|
|
2004
|
+
},
|
|
2005
|
+
startTime: {
|
|
2006
|
+
type: String,
|
|
2007
|
+
required: false,
|
|
2008
|
+
description: "Start time (ISO 8601 or relative like 'now-1h'). Defaults to 'now-15m'.",
|
|
2009
|
+
},
|
|
2010
|
+
endTime: {
|
|
2011
|
+
type: String,
|
|
2012
|
+
required: false,
|
|
2013
|
+
description: "End time (ISO 8601 or 'now'). Defaults to 'now'.",
|
|
2014
|
+
},
|
|
2015
|
+
profile: {
|
|
2016
|
+
type: String,
|
|
2017
|
+
required: false,
|
|
2018
|
+
description: "AWS profile to use",
|
|
2019
|
+
},
|
|
2020
|
+
region: {
|
|
2021
|
+
type: String,
|
|
2022
|
+
required: false,
|
|
2023
|
+
description: "AWS region",
|
|
2024
|
+
},
|
|
2025
|
+
limit: {
|
|
2026
|
+
type: Number,
|
|
2027
|
+
required: false,
|
|
2028
|
+
description: "Max events to return (default 100)",
|
|
2029
|
+
},
|
|
2030
|
+
},
|
|
2031
|
+
service: async (input) => {
|
|
2032
|
+
const result = await filterLogEvents({
|
|
2033
|
+
...input,
|
|
2034
|
+
startTime: input.startTime || "now-15m",
|
|
2035
|
+
endTime: input.endTime || "now",
|
|
2036
|
+
limit: input.limit || 100,
|
|
2037
|
+
}, log);
|
|
2038
|
+
if (!result.success) {
|
|
2039
|
+
throw new Error(result.error);
|
|
2040
|
+
}
|
|
2041
|
+
return result.data;
|
|
2042
|
+
},
|
|
2043
|
+
});
|
|
2044
|
+
const awsS3ListObjects = fabricService({
|
|
2045
|
+
alias: "aws_s3_list_objects",
|
|
2046
|
+
description: "List objects in an S3 bucket with optional prefix filtering.",
|
|
2047
|
+
input: {
|
|
2048
|
+
bucket: {
|
|
2049
|
+
type: String,
|
|
2050
|
+
required: true,
|
|
2051
|
+
description: "S3 bucket name",
|
|
2052
|
+
},
|
|
2053
|
+
prefix: {
|
|
2054
|
+
type: String,
|
|
2055
|
+
required: false,
|
|
2056
|
+
description: "Object key prefix filter",
|
|
2057
|
+
},
|
|
2058
|
+
profile: {
|
|
2059
|
+
type: String,
|
|
2060
|
+
required: false,
|
|
2061
|
+
description: "AWS profile to use",
|
|
2062
|
+
},
|
|
2063
|
+
region: {
|
|
2064
|
+
type: String,
|
|
2065
|
+
required: false,
|
|
2066
|
+
description: "AWS region",
|
|
2067
|
+
},
|
|
2068
|
+
maxResults: {
|
|
2069
|
+
type: Number,
|
|
2070
|
+
required: false,
|
|
2071
|
+
description: "Max results to return",
|
|
2072
|
+
},
|
|
2073
|
+
},
|
|
2074
|
+
service: async (input) => {
|
|
2075
|
+
const result = await listS3Objects(input, log);
|
|
2076
|
+
if (!result.success) {
|
|
2077
|
+
throw new Error(result.error);
|
|
2078
|
+
}
|
|
2079
|
+
return result.data;
|
|
2080
|
+
},
|
|
2081
|
+
});
|
|
2082
|
+
const awsCloudformationDescribeStack = fabricService({
|
|
2083
|
+
alias: "aws_cloudformation_describe_stack",
|
|
2084
|
+
description: "Get details and status of a CloudFormation stack.",
|
|
2085
|
+
input: {
|
|
2086
|
+
stackName: {
|
|
2087
|
+
type: String,
|
|
2088
|
+
required: true,
|
|
2089
|
+
description: "Stack name or ARN",
|
|
2090
|
+
},
|
|
2091
|
+
profile: {
|
|
2092
|
+
type: String,
|
|
2093
|
+
required: false,
|
|
2094
|
+
description: "AWS profile to use",
|
|
2095
|
+
},
|
|
2096
|
+
region: {
|
|
2097
|
+
type: String,
|
|
2098
|
+
required: false,
|
|
2099
|
+
description: "AWS region",
|
|
2100
|
+
},
|
|
2101
|
+
},
|
|
2102
|
+
service: async (input) => {
|
|
2103
|
+
const result = await describeStack(input, log);
|
|
2104
|
+
if (!result.success) {
|
|
2105
|
+
throw new Error(result.error);
|
|
2106
|
+
}
|
|
2107
|
+
return result.data;
|
|
2108
|
+
},
|
|
2109
|
+
});
|
|
2110
|
+
const awsDynamodbDescribeTable = fabricService({
|
|
2111
|
+
alias: "aws_dynamodb_describe_table",
|
|
2112
|
+
description: "Get metadata about a DynamoDB table including key schema, indexes, and provisioned capacity.",
|
|
2113
|
+
input: {
|
|
2114
|
+
tableName: {
|
|
2115
|
+
type: String,
|
|
2116
|
+
required: true,
|
|
2117
|
+
description: "DynamoDB table name",
|
|
2118
|
+
},
|
|
2119
|
+
profile: {
|
|
2120
|
+
type: String,
|
|
2121
|
+
required: false,
|
|
2122
|
+
description: "AWS profile to use",
|
|
2123
|
+
},
|
|
2124
|
+
region: {
|
|
2125
|
+
type: String,
|
|
2126
|
+
required: false,
|
|
2127
|
+
description: "AWS region",
|
|
2128
|
+
},
|
|
2129
|
+
},
|
|
2130
|
+
service: async (input) => {
|
|
2131
|
+
const result = await describeDynamoDBTable(input, log);
|
|
2132
|
+
if (!result.success) {
|
|
2133
|
+
throw new Error(result.error);
|
|
2134
|
+
}
|
|
2135
|
+
return result.data;
|
|
2136
|
+
},
|
|
2137
|
+
});
|
|
2138
|
+
const awsDynamodbScan = fabricService({
|
|
2139
|
+
alias: "aws_dynamodb_scan",
|
|
2140
|
+
description: "Scan a DynamoDB table. Use sparingly on large tables - prefer query when possible.",
|
|
2141
|
+
input: {
|
|
2142
|
+
tableName: {
|
|
2143
|
+
type: String,
|
|
2144
|
+
required: true,
|
|
2145
|
+
description: "DynamoDB table name",
|
|
2146
|
+
},
|
|
2147
|
+
filterExpression: {
|
|
2148
|
+
type: String,
|
|
2149
|
+
required: false,
|
|
2150
|
+
description: "Filter expression (e.g., 'status = :s')",
|
|
2151
|
+
},
|
|
2152
|
+
expressionAttributeValues: {
|
|
2153
|
+
type: String,
|
|
2154
|
+
required: false,
|
|
2155
|
+
description: 'JSON object of attribute values (e.g., \'{\\":s\\":{\\"S\\":\\"active\\"}}\')',
|
|
2156
|
+
},
|
|
2157
|
+
limit: {
|
|
2158
|
+
type: Number,
|
|
2159
|
+
required: false,
|
|
2160
|
+
description: "Max items to return (default 25)",
|
|
2161
|
+
},
|
|
2162
|
+
profile: {
|
|
2163
|
+
type: String,
|
|
2164
|
+
required: false,
|
|
2165
|
+
description: "AWS profile to use",
|
|
2166
|
+
},
|
|
2167
|
+
region: {
|
|
2168
|
+
type: String,
|
|
2169
|
+
required: false,
|
|
2170
|
+
description: "AWS region",
|
|
2171
|
+
},
|
|
2172
|
+
},
|
|
2173
|
+
service: async (input) => {
|
|
2174
|
+
const result = await scanDynamoDB({ ...input, limit: input.limit || 25 }, log);
|
|
2175
|
+
if (!result.success) {
|
|
2176
|
+
throw new Error(result.error);
|
|
2177
|
+
}
|
|
2178
|
+
return result.data;
|
|
2179
|
+
},
|
|
2180
|
+
});
|
|
2181
|
+
const awsDynamodbQuery = fabricService({
|
|
2182
|
+
alias: "aws_dynamodb_query",
|
|
2183
|
+
description: "Query a DynamoDB table by partition key. More efficient than scan for targeted lookups.",
|
|
2184
|
+
input: {
|
|
2185
|
+
tableName: {
|
|
2186
|
+
type: String,
|
|
2187
|
+
required: true,
|
|
2188
|
+
description: "DynamoDB table name",
|
|
2189
|
+
},
|
|
2190
|
+
keyConditionExpression: {
|
|
2191
|
+
type: String,
|
|
2192
|
+
required: true,
|
|
2193
|
+
description: "Key condition (e.g., 'pk = :pk')",
|
|
2194
|
+
},
|
|
2195
|
+
expressionAttributeValues: {
|
|
2196
|
+
type: String,
|
|
2197
|
+
required: true,
|
|
2198
|
+
description: "JSON object of attribute values",
|
|
2199
|
+
},
|
|
2200
|
+
indexName: {
|
|
2201
|
+
type: String,
|
|
2202
|
+
required: false,
|
|
2203
|
+
description: "GSI or LSI name to query",
|
|
2204
|
+
},
|
|
2205
|
+
filterExpression: {
|
|
2206
|
+
type: String,
|
|
2207
|
+
required: false,
|
|
2208
|
+
description: "Additional filter expression",
|
|
2209
|
+
},
|
|
2210
|
+
limit: {
|
|
2211
|
+
type: Number,
|
|
2212
|
+
required: false,
|
|
2213
|
+
description: "Max items to return",
|
|
2214
|
+
},
|
|
2215
|
+
scanIndexForward: {
|
|
2216
|
+
type: Boolean,
|
|
2217
|
+
required: false,
|
|
2218
|
+
description: "Sort ascending (true) or descending (false)",
|
|
2219
|
+
},
|
|
2220
|
+
profile: {
|
|
2221
|
+
type: String,
|
|
2222
|
+
required: false,
|
|
2223
|
+
description: "AWS profile to use",
|
|
2224
|
+
},
|
|
2225
|
+
region: {
|
|
2226
|
+
type: String,
|
|
2227
|
+
required: false,
|
|
2228
|
+
description: "AWS region",
|
|
2229
|
+
},
|
|
2230
|
+
},
|
|
2231
|
+
service: async (input) => {
|
|
2232
|
+
const result = await queryDynamoDB(input, log);
|
|
2233
|
+
if (!result.success) {
|
|
2234
|
+
throw new Error(result.error);
|
|
2235
|
+
}
|
|
2236
|
+
return result.data;
|
|
2237
|
+
},
|
|
2238
|
+
});
|
|
2239
|
+
const awsDynamodbGetItem = fabricService({
|
|
2240
|
+
alias: "aws_dynamodb_get_item",
|
|
2241
|
+
description: "Get a single item from a DynamoDB table by its primary key.",
|
|
2242
|
+
input: {
|
|
2243
|
+
tableName: {
|
|
2244
|
+
type: String,
|
|
2245
|
+
required: true,
|
|
2246
|
+
description: "DynamoDB table name",
|
|
2247
|
+
},
|
|
2248
|
+
key: {
|
|
2249
|
+
type: String,
|
|
2250
|
+
required: true,
|
|
2251
|
+
description: 'JSON object of the primary key (e.g., \'{\\"pk\\":{\\"S\\":\\"user#123\\"},\\"sk\\":{\\"S\\":\\"profile\\"}}\')',
|
|
2252
|
+
},
|
|
2253
|
+
profile: {
|
|
2254
|
+
type: String,
|
|
2255
|
+
required: false,
|
|
2256
|
+
description: "AWS profile to use",
|
|
2257
|
+
},
|
|
2258
|
+
region: {
|
|
2259
|
+
type: String,
|
|
2260
|
+
required: false,
|
|
2261
|
+
description: "AWS region",
|
|
2262
|
+
},
|
|
2263
|
+
},
|
|
2264
|
+
service: async (input) => {
|
|
2265
|
+
const result = await getDynamoDBItem(input, log);
|
|
2266
|
+
if (!result.success) {
|
|
2267
|
+
throw new Error(result.error);
|
|
2268
|
+
}
|
|
2269
|
+
return result.data;
|
|
2270
|
+
},
|
|
2271
|
+
});
|
|
2272
|
+
const awsSqsListQueues = fabricService({
|
|
2273
|
+
alias: "aws_sqs_list_queues",
|
|
2274
|
+
description: "List SQS queues in the account. Filter by queue name prefix.",
|
|
2275
|
+
input: {
|
|
2276
|
+
queueNamePrefix: {
|
|
2277
|
+
type: String,
|
|
2278
|
+
required: false,
|
|
2279
|
+
description: "Filter by queue name prefix",
|
|
2280
|
+
},
|
|
2281
|
+
profile: {
|
|
2282
|
+
type: String,
|
|
2283
|
+
required: false,
|
|
2284
|
+
description: "AWS profile to use",
|
|
2285
|
+
},
|
|
2286
|
+
region: {
|
|
2287
|
+
type: String,
|
|
2288
|
+
required: false,
|
|
2289
|
+
description: "AWS region",
|
|
2290
|
+
},
|
|
2291
|
+
},
|
|
2292
|
+
service: async (input) => {
|
|
2293
|
+
const result = await listSQSQueues(input, log);
|
|
2294
|
+
if (!result.success) {
|
|
2295
|
+
throw new Error(result.error);
|
|
2296
|
+
}
|
|
2297
|
+
return result.data;
|
|
2298
|
+
},
|
|
2299
|
+
});
|
|
2300
|
+
const awsSqsGetQueueAttributes = fabricService({
|
|
2301
|
+
alias: "aws_sqs_get_queue_attributes",
|
|
2302
|
+
description: "Get attributes for an SQS queue including approximate message count, visibility timeout, and dead-letter config.",
|
|
2303
|
+
input: {
|
|
2304
|
+
queueUrl: {
|
|
2305
|
+
type: String,
|
|
2306
|
+
required: true,
|
|
2307
|
+
description: "SQS queue URL",
|
|
2308
|
+
},
|
|
2309
|
+
profile: {
|
|
2310
|
+
type: String,
|
|
2311
|
+
required: false,
|
|
2312
|
+
description: "AWS profile to use",
|
|
2313
|
+
},
|
|
2314
|
+
region: {
|
|
2315
|
+
type: String,
|
|
2316
|
+
required: false,
|
|
2317
|
+
description: "AWS region",
|
|
2318
|
+
},
|
|
2319
|
+
},
|
|
2320
|
+
service: async (input) => {
|
|
2321
|
+
const result = await getSQSQueueAttributes(input, log);
|
|
2322
|
+
if (!result.success) {
|
|
2323
|
+
throw new Error(result.error);
|
|
2324
|
+
}
|
|
2325
|
+
return result.data;
|
|
2326
|
+
},
|
|
2327
|
+
});
|
|
2328
|
+
const awsSqsReceiveMessage = fabricService({
|
|
2329
|
+
alias: "aws_sqs_receive_message",
|
|
2330
|
+
description: "Receive messages from an SQS queue for inspection. Messages are returned to the queue after visibility timeout.",
|
|
2331
|
+
input: {
|
|
2332
|
+
queueUrl: {
|
|
2333
|
+
type: String,
|
|
2334
|
+
required: true,
|
|
2335
|
+
description: "SQS queue URL",
|
|
2336
|
+
},
|
|
2337
|
+
maxNumberOfMessages: {
|
|
2338
|
+
type: Number,
|
|
2339
|
+
required: false,
|
|
2340
|
+
description: "Max messages to receive (1-10, default 1)",
|
|
2341
|
+
},
|
|
2342
|
+
visibilityTimeout: {
|
|
2343
|
+
type: Number,
|
|
2344
|
+
required: false,
|
|
2345
|
+
description: "Seconds to hide message (default 30)",
|
|
2346
|
+
},
|
|
2347
|
+
profile: {
|
|
2348
|
+
type: String,
|
|
2349
|
+
required: false,
|
|
2350
|
+
description: "AWS profile to use",
|
|
2351
|
+
},
|
|
2352
|
+
region: {
|
|
2353
|
+
type: String,
|
|
2354
|
+
required: false,
|
|
2355
|
+
description: "AWS region",
|
|
2356
|
+
},
|
|
2357
|
+
},
|
|
2358
|
+
service: async (input) => {
|
|
2359
|
+
const result = await receiveSQSMessage({
|
|
2360
|
+
...input,
|
|
2361
|
+
maxNumberOfMessages: input.maxNumberOfMessages || 1,
|
|
2362
|
+
visibilityTimeout: input.visibilityTimeout || 30,
|
|
2363
|
+
}, log);
|
|
2364
|
+
if (!result.success) {
|
|
2365
|
+
throw new Error(result.error);
|
|
2366
|
+
}
|
|
2367
|
+
return result.data;
|
|
2368
|
+
},
|
|
2369
|
+
});
|
|
2370
|
+
const awsSqsPurgeQueue = fabricService({
|
|
2371
|
+
alias: "aws_sqs_purge_queue",
|
|
2372
|
+
description: "Delete all messages from an SQS queue. Use with caution - this is irreversible.",
|
|
2373
|
+
input: {
|
|
2374
|
+
queueUrl: {
|
|
2375
|
+
type: String,
|
|
2376
|
+
required: true,
|
|
2377
|
+
description: "SQS queue URL",
|
|
2378
|
+
},
|
|
2379
|
+
profile: {
|
|
2380
|
+
type: String,
|
|
2381
|
+
required: false,
|
|
2382
|
+
description: "AWS profile to use",
|
|
2383
|
+
},
|
|
2384
|
+
region: {
|
|
2385
|
+
type: String,
|
|
2386
|
+
required: false,
|
|
2387
|
+
description: "AWS region",
|
|
2388
|
+
},
|
|
2389
|
+
},
|
|
2390
|
+
service: async (input) => {
|
|
2391
|
+
const result = await purgeSQSQueue(input, log);
|
|
2392
|
+
if (!result.success) {
|
|
2393
|
+
throw new Error(result.error);
|
|
2394
|
+
}
|
|
2395
|
+
return { success: true };
|
|
2396
|
+
},
|
|
2397
|
+
});
|
|
2398
|
+
// =============================================================================
|
|
2399
|
+
// SUITE CREATION
|
|
2400
|
+
// =============================================================================
|
|
2401
|
+
const VERSION = "0.3.4";
|
|
2402
|
+
const suite = createServiceSuite({
|
|
2403
|
+
name: "jaypie",
|
|
2404
|
+
version: VERSION,
|
|
2405
|
+
});
|
|
2406
|
+
// Register docs services
|
|
2407
|
+
suite.register(skill, "docs");
|
|
2408
|
+
suite.register(version, "docs");
|
|
2409
|
+
suite.register(listPrompts, "docs");
|
|
2410
|
+
suite.register(readPrompt, "docs");
|
|
2411
|
+
suite.register(listReleaseNotes, "docs");
|
|
2412
|
+
suite.register(readReleaseNote, "docs");
|
|
2413
|
+
// Register Datadog services
|
|
2414
|
+
suite.register(datadogLogs, "datadog");
|
|
2415
|
+
suite.register(datadogLogAnalytics, "datadog");
|
|
2416
|
+
suite.register(datadogMonitors, "datadog");
|
|
2417
|
+
suite.register(datadogSynthetics, "datadog");
|
|
2418
|
+
suite.register(datadogMetrics, "datadog");
|
|
2419
|
+
suite.register(datadogRum, "datadog");
|
|
2420
|
+
// Register LLM services
|
|
2421
|
+
suite.register(llmDebugCall, "llm");
|
|
2422
|
+
suite.register(llmListProviders, "llm");
|
|
2423
|
+
// Register AWS services
|
|
2424
|
+
suite.register(awsListProfiles, "aws");
|
|
2425
|
+
suite.register(awsStepfunctionsListExecutions, "aws");
|
|
2426
|
+
suite.register(awsStepfunctionsStopExecution, "aws");
|
|
2427
|
+
suite.register(awsLambdaListFunctions, "aws");
|
|
2428
|
+
suite.register(awsLambdaGetFunction, "aws");
|
|
2429
|
+
suite.register(awsLogsFilterLogEvents, "aws");
|
|
2430
|
+
suite.register(awsS3ListObjects, "aws");
|
|
2431
|
+
suite.register(awsCloudformationDescribeStack, "aws");
|
|
2432
|
+
suite.register(awsDynamodbDescribeTable, "aws");
|
|
2433
|
+
suite.register(awsDynamodbScan, "aws");
|
|
2434
|
+
suite.register(awsDynamodbQuery, "aws");
|
|
2435
|
+
suite.register(awsDynamodbGetItem, "aws");
|
|
2436
|
+
suite.register(awsSqsListQueues, "aws");
|
|
2437
|
+
suite.register(awsSqsGetQueueAttributes, "aws");
|
|
2438
|
+
suite.register(awsSqsReceiveMessage, "aws");
|
|
2439
|
+
suite.register(awsSqsPurgeQueue, "aws");
|
|
2440
|
+
|
|
2441
|
+
export { suite };
|
|
2442
|
+
//# sourceMappingURL=suite.js.map
|