@fluentcommerce/fluent-mcp-extn 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +818 -0
- package/dist/config.js +195 -0
- package/dist/entity-registry.js +418 -0
- package/dist/entity-tools.js +414 -0
- package/dist/environment-tools.js +573 -0
- package/dist/errors.js +150 -0
- package/dist/event-payload.js +22 -0
- package/dist/fluent-client.js +229 -0
- package/dist/index.js +47 -0
- package/dist/resilience.js +52 -0
- package/dist/response-shaper.js +361 -0
- package/dist/sdk-client.js +237 -0
- package/dist/settings-tools.js +348 -0
- package/dist/test-tools.js +388 -0
- package/dist/tools.js +3254 -0
- package/dist/workflow-tools.js +752 -0
- package/docs/CONTRIBUTING.md +100 -0
- package/docs/E2E_TESTING.md +739 -0
- package/docs/HANDOVER_COPILOT_SETUP_STEPS.example.yml +35 -0
- package/docs/HANDOVER_ENV.example +29 -0
- package/docs/HANDOVER_GITHUB_COPILOT.md +165 -0
- package/docs/HANDOVER_GITHUB_REPO_MCP_CONFIG.example.json +31 -0
- package/docs/HANDOVER_VSCODE_MCP_JSON.example.json +10 -0
- package/docs/IMPLEMENTATION_GUIDE.md +299 -0
- package/docs/RUNBOOK.md +312 -0
- package/docs/TOOL_REFERENCE.md +1810 -0
- package/package.json +68 -0
package/dist/tools.js
ADDED
|
@@ -0,0 +1,3254 @@
|
|
|
1
|
+
import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
|
2
|
+
import { FluentConnectionTester, GraphQLIntrospectionService, buildAliasedMutationQuery, parseAliasedMutationResponse, formatErrorSummary, WebhookValidationService, SignatureAlgorithm, } from "@fluentcommerce/fc-connect-sdk";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { toSafeConfigSummary, validateConfig, } from "./config.js";
|
|
5
|
+
import { buildEventPayload } from "./event-payload.js";
|
|
6
|
+
import { ToolError, toToolFailure } from "./errors.js";
|
|
7
|
+
// New tool modules
|
|
8
|
+
import { ENTITY_TOOL_DEFINITIONS, handleEntityCreate, handleEntityUpdate, handleEntityGet, } from "./entity-tools.js";
|
|
9
|
+
import { WORKFLOW_TOOL_DEFINITIONS, handleWorkflowUpload, handleWorkflowDiff, handleWorkflowSimulate, } from "./workflow-tools.js";
|
|
10
|
+
import { SETTING_TOOL_DEFINITIONS, handleSettingUpsert, handleSettingBulkUpsert, } from "./settings-tools.js";
|
|
11
|
+
import { ENVIRONMENT_TOOL_DEFINITIONS, handleEnvironmentDiscover, handleEnvironmentValidate, } from "./environment-tools.js";
|
|
12
|
+
import { TEST_TOOL_DEFINITIONS, handleTestAssert, } from "./test-tools.js";
|
|
13
|
+
import { shapeResponse, summarizeConnection, analyzeEvents, } from "./response-shaper.js";
|
|
14
|
+
/**
|
|
15
|
+
* MCP tool registration and request handling.
|
|
16
|
+
*
|
|
17
|
+
* Design principles:
|
|
18
|
+
* - strict input validation (zod)
|
|
19
|
+
* - deterministic payload shaping
|
|
20
|
+
* - standardized success/failure response envelopes
|
|
21
|
+
*/
|
|
22
|
+
// ---------------------------------------------------------------------------
|
|
23
|
+
// Input schemas
|
|
24
|
+
// ---------------------------------------------------------------------------
|
|
25
|
+
const EventBuildInputSchema = z.object({
|
|
26
|
+
name: z.string().min(1),
|
|
27
|
+
entityRef: z.string().min(1),
|
|
28
|
+
entityId: z.string().optional(),
|
|
29
|
+
entityType: z.string().min(1),
|
|
30
|
+
retailerId: z.string().optional(),
|
|
31
|
+
entitySubtype: z.string().optional(),
|
|
32
|
+
rootEntityRef: z.string().optional(),
|
|
33
|
+
rootEntityType: z.string().optional(),
|
|
34
|
+
rootEntityId: z.string().optional(),
|
|
35
|
+
accountId: z.string().optional(),
|
|
36
|
+
source: z.string().default("fluent-mcp-extn"),
|
|
37
|
+
type: z.string().default("NORMAL"),
|
|
38
|
+
attributes: z.record(z.string(), z.unknown()).default({}),
|
|
39
|
+
});
|
|
40
|
+
const EventPublishInputSchema = z.object({
|
|
41
|
+
name: z.string().min(1),
|
|
42
|
+
entityRef: z.string().min(1),
|
|
43
|
+
entityId: z.string().optional(),
|
|
44
|
+
entityType: z.string().min(1),
|
|
45
|
+
retailerId: z.string().optional(),
|
|
46
|
+
entitySubtype: z.string().optional(),
|
|
47
|
+
rootEntityRef: z.string().optional(),
|
|
48
|
+
rootEntityType: z.string().optional(),
|
|
49
|
+
rootEntityId: z.string().optional(),
|
|
50
|
+
accountId: z.string().optional(),
|
|
51
|
+
source: z.string().default("fluent-mcp-extn"),
|
|
52
|
+
type: z.string().default("NORMAL"),
|
|
53
|
+
attributes: z.record(z.string(), z.unknown()).default({}),
|
|
54
|
+
mode: z.enum(["async", "sync"]).default("async"),
|
|
55
|
+
dryRun: z.boolean().default(false),
|
|
56
|
+
});
|
|
57
|
+
const EventGetInputSchema = z.object({
|
|
58
|
+
eventId: z.string().min(1),
|
|
59
|
+
});
|
|
60
|
+
/** Tool input for event.list. Supports canonical keys and backward-compatible aliases. */
|
|
61
|
+
const EventListInputSchema = z
|
|
62
|
+
.object({
|
|
63
|
+
// Canonical keys (SDK FluentEventQueryParams)
|
|
64
|
+
eventId: z.string().optional(),
|
|
65
|
+
name: z.string().optional(),
|
|
66
|
+
category: z.string().optional(),
|
|
67
|
+
retailerId: z.union([z.string(), z.number()]).optional(),
|
|
68
|
+
eventType: z.string().optional(),
|
|
69
|
+
eventStatus: z.string().optional(),
|
|
70
|
+
from: z.string().optional(),
|
|
71
|
+
to: z.string().optional(),
|
|
72
|
+
start: z.number().int().min(1).optional(),
|
|
73
|
+
count: z.number().int().min(1).max(500).optional(),
|
|
74
|
+
// Response shaping
|
|
75
|
+
fields: z
|
|
76
|
+
.array(z.string())
|
|
77
|
+
.optional()
|
|
78
|
+
.describe("Project response to only these fields per event. Reduces token usage."),
|
|
79
|
+
analyze: z
|
|
80
|
+
.boolean()
|
|
81
|
+
.default(false)
|
|
82
|
+
.describe("If true, return grouped analysis (by name/entityType/status with counts) instead of raw events."),
|
|
83
|
+
// context.* (canonical)
|
|
84
|
+
"context.rootEntityType": z.string().optional(),
|
|
85
|
+
"context.rootEntityId": z.union([z.string(), z.number()]).optional(),
|
|
86
|
+
"context.rootEntityRef": z.string().optional(),
|
|
87
|
+
"context.entityType": z.string().optional(),
|
|
88
|
+
"context.entityId": z.union([z.string(), z.number()]).optional(),
|
|
89
|
+
"context.entityRef": z.string().optional(),
|
|
90
|
+
"context.sourceEvents": z.array(z.string()).optional(),
|
|
91
|
+
// Backward-compatible aliases
|
|
92
|
+
id: z.string().optional(),
|
|
93
|
+
entityRef: z.string().optional(),
|
|
94
|
+
entityType: z.string().optional(),
|
|
95
|
+
type: z.string().optional(),
|
|
96
|
+
})
|
|
97
|
+
.passthrough();
|
|
98
|
+
const EventFlowInspectInputSchema = z.object({
|
|
99
|
+
rootEntityRef: z.string().min(1),
|
|
100
|
+
rootEntityType: z.string().min(1).optional(),
|
|
101
|
+
rootEntityId: z.union([z.string(), z.number()]).optional(),
|
|
102
|
+
from: z.string().optional(),
|
|
103
|
+
to: z.string().optional(),
|
|
104
|
+
compact: z.boolean().default(true),
|
|
105
|
+
maxPages: z.number().int().min(1).max(50).default(10),
|
|
106
|
+
includeAudit: z.boolean().default(true),
|
|
107
|
+
includeEventDetails: z.boolean().default(true),
|
|
108
|
+
includeAuditDetails: z.boolean().default(false),
|
|
109
|
+
includeScheduled: z.boolean().default(true),
|
|
110
|
+
includeRuleDetails: z.boolean().default(false),
|
|
111
|
+
includeNoMatchDetails: z.boolean().default(true),
|
|
112
|
+
includeCustomLogs: z.boolean().default(false),
|
|
113
|
+
includeSnapshots: z.boolean().default(false),
|
|
114
|
+
includeCrossEntity: z.boolean().default(false),
|
|
115
|
+
includeExceptions: z.boolean().default(true),
|
|
116
|
+
inspectStatuses: z
|
|
117
|
+
.array(z.string())
|
|
118
|
+
.default(["NO_MATCH", "PENDING", "FAILED"]),
|
|
119
|
+
maxDrilldowns: z.number().int().min(0).max(100).default(50),
|
|
120
|
+
actionSampleLimit: z.number().int().min(1).max(200).default(100),
|
|
121
|
+
});
|
|
122
|
+
const TransitionTriggerInputSchema = z.object({
|
|
123
|
+
name: z.string().min(1).optional(),
|
|
124
|
+
type: z.string().min(1).optional(),
|
|
125
|
+
subtype: z.string().min(1).optional(),
|
|
126
|
+
status: z.string().min(1).optional(),
|
|
127
|
+
retailerId: z.string().min(1).optional(),
|
|
128
|
+
module: z.string().min(1).optional(),
|
|
129
|
+
flexType: z.string().min(1).optional(),
|
|
130
|
+
flexVersion: z.union([z.string().min(1), z.number().int()]).optional(),
|
|
131
|
+
});
|
|
132
|
+
const TransitionActionsInputSchema = z.object({
|
|
133
|
+
triggers: z.array(TransitionTriggerInputSchema).min(1),
|
|
134
|
+
});
|
|
135
|
+
const PluginListInputSchema = z.object({
|
|
136
|
+
name: z.string().optional(),
|
|
137
|
+
compact: z
|
|
138
|
+
.boolean()
|
|
139
|
+
.default(false)
|
|
140
|
+
.describe("If true, return only ruleInfo (name, description, entityTypes) per rule, stripping parameters and eventAttributes."),
|
|
141
|
+
});
|
|
142
|
+
const GraphQLQueryInputSchema = z.object({
|
|
143
|
+
query: z.string().min(1),
|
|
144
|
+
variables: z.record(z.string(), z.unknown()).optional(),
|
|
145
|
+
summarize: z
|
|
146
|
+
.boolean()
|
|
147
|
+
.default(false)
|
|
148
|
+
.describe("If true and result contains a Relay connection (edges/nodes), return record count, field names, and 3 sample records instead of full data."),
|
|
149
|
+
});
|
|
150
|
+
const BatchCreateInputSchema = z.object({
|
|
151
|
+
name: z.string().min(1),
|
|
152
|
+
retailerId: z.string().optional(),
|
|
153
|
+
entityType: z.string().optional(),
|
|
154
|
+
action: z.string().optional(),
|
|
155
|
+
});
|
|
156
|
+
const BatchSendInputSchema = z.object({
|
|
157
|
+
jobId: z.string().min(1),
|
|
158
|
+
payload: z.object({
|
|
159
|
+
action: z.string().min(1),
|
|
160
|
+
entityType: z.string().min(1),
|
|
161
|
+
entities: z.array(z.record(z.string(), z.unknown())).min(1),
|
|
162
|
+
source: z.string().optional(),
|
|
163
|
+
event: z.string().optional(),
|
|
164
|
+
catalogueRef: z.string().optional(),
|
|
165
|
+
conditions: z.record(z.string(), z.unknown()).optional(),
|
|
166
|
+
}),
|
|
167
|
+
});
|
|
168
|
+
const BatchStatusInputSchema = z.object({
|
|
169
|
+
jobId: z.string().min(1),
|
|
170
|
+
});
|
|
171
|
+
const BatchGetBatchStatusInputSchema = z.object({
|
|
172
|
+
jobId: z.string().min(1),
|
|
173
|
+
batchId: z.string().min(1),
|
|
174
|
+
});
|
|
175
|
+
const BatchResultsInputSchema = z.object({
|
|
176
|
+
jobId: z.string().min(1),
|
|
177
|
+
});
|
|
178
|
+
const GraphQLQueryAllInputSchema = z.object({
|
|
179
|
+
query: z.string().min(1),
|
|
180
|
+
variables: z.record(z.string(), z.unknown()).optional(),
|
|
181
|
+
maxPages: z.number().int().min(1).max(500).default(100),
|
|
182
|
+
maxRecords: z.number().int().min(1).max(50000).default(10000),
|
|
183
|
+
timeoutMs: z.number().int().min(1000).max(600000).default(300000),
|
|
184
|
+
direction: z.enum(["forward", "backward"]).default("forward"),
|
|
185
|
+
errorHandling: z.enum(["throw", "partial"]).default("throw"),
|
|
186
|
+
summarize: z
|
|
187
|
+
.boolean()
|
|
188
|
+
.default(false)
|
|
189
|
+
.describe("If true, return record count, field names, and 3 sample records instead of full paginated data."),
|
|
190
|
+
});
|
|
191
|
+
const GraphQLBatchMutateInputSchema = z.object({
|
|
192
|
+
mutation: z.string().min(1),
|
|
193
|
+
inputs: z.array(z.record(z.string(), z.unknown())).min(1).max(50),
|
|
194
|
+
returnFields: z.array(z.string()).optional(),
|
|
195
|
+
operationName: z.string().optional(),
|
|
196
|
+
});
|
|
197
|
+
const GraphQLIntrospectInputSchema = z.object({
|
|
198
|
+
type: z.string().optional(),
|
|
199
|
+
mutation: z.string().optional(),
|
|
200
|
+
listMutations: z.boolean().default(false),
|
|
201
|
+
listQueries: z.boolean().default(false),
|
|
202
|
+
});
|
|
203
|
+
const WebhookValidateInputSchema = z.object({
|
|
204
|
+
payload: z.record(z.string(), z.unknown()),
|
|
205
|
+
rawBody: z.string().optional(),
|
|
206
|
+
signature: z.string().optional(),
|
|
207
|
+
publicKey: z.string().optional(),
|
|
208
|
+
algorithm: z
|
|
209
|
+
.enum(["SHA512withRSA", "MD5withRSA"])
|
|
210
|
+
.default("SHA512withRSA"),
|
|
211
|
+
});
|
|
212
|
+
const MetricsQueryInputSchema = z.object({
|
|
213
|
+
query: z.string().min(1),
|
|
214
|
+
type: z.enum(["instant", "range"]).default("instant"),
|
|
215
|
+
time: z.string().optional(),
|
|
216
|
+
start: z.string().optional(),
|
|
217
|
+
end: z.string().optional(),
|
|
218
|
+
step: z.string().optional(),
|
|
219
|
+
});
|
|
220
|
+
const MetricsTopEventsInputSchema = z.object({
|
|
221
|
+
from: z.string().min(1),
|
|
222
|
+
to: z.string().optional(),
|
|
223
|
+
entityType: z.string().optional(),
|
|
224
|
+
eventStatus: z.string().optional(),
|
|
225
|
+
eventType: z.string().default("ORCHESTRATION"),
|
|
226
|
+
topN: z.number().int().min(1).max(100).default(20),
|
|
227
|
+
maxPages: z.number().int().min(1).max(50).default(10),
|
|
228
|
+
});
|
|
229
|
+
const MetricsHealthCheckInputSchema = z.object({
|
|
230
|
+
window: z.string().default("1h"),
|
|
231
|
+
includeTopEvents: z.boolean().default(true),
|
|
232
|
+
topN: z.number().int().min(1).max(100).default(10),
|
|
233
|
+
thresholds: z.object({
|
|
234
|
+
failureRate: z.number().min(0).max(100).default(5),
|
|
235
|
+
pendingRate: z.number().min(0).max(100).default(10),
|
|
236
|
+
dominanceRate: z.number().min(0).max(100).default(50),
|
|
237
|
+
}).default({ failureRate: 5, pendingRate: 10, dominanceRate: 50 }),
|
|
238
|
+
});
|
|
239
|
+
const MetricsSloReportInputSchema = z.object({
|
|
240
|
+
window: z.string().default("1h"),
|
|
241
|
+
includeTopFailingEvents: z.boolean().default(true),
|
|
242
|
+
topN: z.number().int().min(1).max(100).default(10),
|
|
243
|
+
maxPages: z.number().int().min(1).max(50).default(10),
|
|
244
|
+
thresholds: z
|
|
245
|
+
.object({
|
|
246
|
+
failureRate: z.number().min(0).max(100).default(5),
|
|
247
|
+
noMatchRate: z.number().min(0).max(100).default(0),
|
|
248
|
+
pendingRate: z.number().min(0).max(100).default(10),
|
|
249
|
+
runtimeP95Seconds: z.number().min(0).default(5),
|
|
250
|
+
inflightP95Seconds: z.number().min(0).default(60),
|
|
251
|
+
})
|
|
252
|
+
.default({
|
|
253
|
+
failureRate: 5,
|
|
254
|
+
noMatchRate: 0,
|
|
255
|
+
pendingRate: 10,
|
|
256
|
+
runtimeP95Seconds: 5,
|
|
257
|
+
inflightP95Seconds: 60,
|
|
258
|
+
}),
|
|
259
|
+
});
|
|
260
|
+
const MetricsLabelCatalogInputSchema = z.object({
|
|
261
|
+
metric: z.string().min(1),
|
|
262
|
+
window: z.string().default("24h"),
|
|
263
|
+
includeKnownLabels: z.boolean().default(true),
|
|
264
|
+
maxValuesPerLabel: z.number().int().min(1).max(50).default(10),
|
|
265
|
+
});
|
|
266
|
+
// ---------------------------------------------------------------------------
|
|
267
|
+
// Tool definitions (JSON Schema for MCP)
|
|
268
|
+
// ---------------------------------------------------------------------------
|
|
269
|
+
const TOOL_DEFINITIONS = [
|
|
270
|
+
{
|
|
271
|
+
name: "config.validate",
|
|
272
|
+
description: "Run first: validates auth/base URL/retailer configuration and reports readiness for API tools.",
|
|
273
|
+
inputSchema: {
|
|
274
|
+
type: "object",
|
|
275
|
+
properties: {},
|
|
276
|
+
additionalProperties: false,
|
|
277
|
+
},
|
|
278
|
+
},
|
|
279
|
+
{
|
|
280
|
+
name: "health.ping",
|
|
281
|
+
description: "Quick diagnostics: confirms SDK adapter connection and config readiness when calls fail.",
|
|
282
|
+
inputSchema: {
|
|
283
|
+
type: "object",
|
|
284
|
+
properties: {},
|
|
285
|
+
additionalProperties: false,
|
|
286
|
+
},
|
|
287
|
+
},
|
|
288
|
+
{
|
|
289
|
+
name: "event.build",
|
|
290
|
+
description: "Payload-only builder for Event API input. Use before event.send to verify defaults and required context (no API call).",
|
|
291
|
+
inputSchema: {
|
|
292
|
+
type: "object",
|
|
293
|
+
properties: {
|
|
294
|
+
name: { type: "string", description: "Event name" },
|
|
295
|
+
entityRef: { type: "string", description: "Entity reference" },
|
|
296
|
+
entityId: { type: "string", description: "Entity ID (preferred when available)" },
|
|
297
|
+
entityType: {
|
|
298
|
+
type: "string",
|
|
299
|
+
description: "Entity type (for example ORDER or FULFILMENT)",
|
|
300
|
+
},
|
|
301
|
+
retailerId: { type: "string" },
|
|
302
|
+
entitySubtype: { type: "string" },
|
|
303
|
+
rootEntityRef: { type: "string" },
|
|
304
|
+
rootEntityType: { type: "string" },
|
|
305
|
+
rootEntityId: { type: "string" },
|
|
306
|
+
accountId: { type: "string" },
|
|
307
|
+
source: { type: "string" },
|
|
308
|
+
type: { type: "string" },
|
|
309
|
+
attributes: { type: "object", additionalProperties: true },
|
|
310
|
+
},
|
|
311
|
+
required: ["name", "entityRef", "entityType"],
|
|
312
|
+
additionalProperties: false,
|
|
313
|
+
},
|
|
314
|
+
},
|
|
315
|
+
{
|
|
316
|
+
name: "event.send",
|
|
317
|
+
description: "Builds and sends a Fluent event (supports dryRun). Recommended flow: config.validate -> event.build -> event.send dryRun=true -> event.send dryRun=false -> event.list/event.get.",
|
|
318
|
+
inputSchema: {
|
|
319
|
+
type: "object",
|
|
320
|
+
properties: {
|
|
321
|
+
name: { type: "string", description: "Event name" },
|
|
322
|
+
entityRef: { type: "string", description: "Entity reference" },
|
|
323
|
+
entityId: { type: "string", description: "Entity ID (preferred when available)" },
|
|
324
|
+
entityType: { type: "string", description: "Entity type" },
|
|
325
|
+
retailerId: { type: "string" },
|
|
326
|
+
entitySubtype: { type: "string" },
|
|
327
|
+
rootEntityRef: { type: "string" },
|
|
328
|
+
rootEntityType: { type: "string" },
|
|
329
|
+
rootEntityId: { type: "string" },
|
|
330
|
+
accountId: { type: "string" },
|
|
331
|
+
source: { type: "string" },
|
|
332
|
+
type: { type: "string" },
|
|
333
|
+
attributes: { type: "object", additionalProperties: true },
|
|
334
|
+
mode: {
|
|
335
|
+
type: "string",
|
|
336
|
+
enum: ["async", "sync"],
|
|
337
|
+
description: "Event dispatch mode (default: async)",
|
|
338
|
+
},
|
|
339
|
+
dryRun: {
|
|
340
|
+
type: "boolean",
|
|
341
|
+
description: "If true, build payload without sending",
|
|
342
|
+
},
|
|
343
|
+
},
|
|
344
|
+
required: ["name", "entityRef", "entityType"],
|
|
345
|
+
additionalProperties: false,
|
|
346
|
+
},
|
|
347
|
+
},
|
|
348
|
+
{
|
|
349
|
+
name: "event.get",
|
|
350
|
+
description: "Fetch one event by ID. Use after event.send if response contains eventId, or after event.list resolves eventId.",
|
|
351
|
+
inputSchema: {
|
|
352
|
+
type: "object",
|
|
353
|
+
properties: {
|
|
354
|
+
eventId: { type: "string", description: "Event ID to fetch" },
|
|
355
|
+
},
|
|
356
|
+
required: ["eventId"],
|
|
357
|
+
additionalProperties: false,
|
|
358
|
+
},
|
|
359
|
+
},
|
|
360
|
+
{
|
|
361
|
+
name: "event.list",
|
|
362
|
+
description: "List/filter events via SDK getEvents with filters and pagination. Use to verify sends, discover eventId, then call event.get for full details.",
|
|
363
|
+
inputSchema: {
|
|
364
|
+
type: "object",
|
|
365
|
+
properties: {
|
|
366
|
+
eventId: { type: "string", description: "Filter by event ID (canonical)" },
|
|
367
|
+
name: { type: "string", description: "Event name filter" },
|
|
368
|
+
category: { type: "string", description: "Event category" },
|
|
369
|
+
retailerId: { type: "string", description: "Retailer ID" },
|
|
370
|
+
eventType: { type: "string", description: "Event type (ORCHESTRATION, API, etc.)" },
|
|
371
|
+
eventStatus: { type: "string", description: "Event status (PENDING, SUCCESS, FAILED, etc.)" },
|
|
372
|
+
from: { type: "string", description: "Time window start (ISO)" },
|
|
373
|
+
to: { type: "string", description: "Time window end (ISO)" },
|
|
374
|
+
start: { type: "integer", minimum: 1, description: "Pagination offset" },
|
|
375
|
+
count: { type: "integer", minimum: 1, maximum: 500, description: "Page size" },
|
|
376
|
+
"context.rootEntityType": { type: "string", description: "Root entity type filter" },
|
|
377
|
+
"context.rootEntityId": { type: "string", description: "Root entity ID filter" },
|
|
378
|
+
"context.rootEntityRef": { type: "string", description: "Root entity ref filter" },
|
|
379
|
+
"context.entityType": { type: "string", description: "Entity type filter" },
|
|
380
|
+
"context.entityId": { type: "string", description: "Entity ID filter" },
|
|
381
|
+
"context.entityRef": { type: "string", description: "Entity ref filter" },
|
|
382
|
+
id: { type: "string", description: "Alias for eventId" },
|
|
383
|
+
entityRef: { type: "string", description: "Alias for context.entityRef" },
|
|
384
|
+
entityType: { type: "string", description: "Alias for context.entityType" },
|
|
385
|
+
type: { type: "string", description: "Alias for eventType" },
|
|
386
|
+
fields: {
|
|
387
|
+
type: "array",
|
|
388
|
+
items: { type: "string" },
|
|
389
|
+
description: "Project response: only include these fields per event. Example: [\"id\",\"name\",\"eventStatus\",\"category\",\"context.entityRef\",\"context.entityType\",\"generatedOn\"]. Omit for full events.",
|
|
390
|
+
},
|
|
391
|
+
analyze: {
|
|
392
|
+
type: "boolean",
|
|
393
|
+
description: "If true, return grouped analysis (by name/entityType/status with counts and time range) instead of raw events. Much smaller payload.",
|
|
394
|
+
},
|
|
395
|
+
},
|
|
396
|
+
additionalProperties: true,
|
|
397
|
+
},
|
|
398
|
+
},
|
|
399
|
+
{
|
|
400
|
+
name: "event.flowInspect",
|
|
401
|
+
description: [
|
|
402
|
+
"One-call event forensics for any root entity.",
|
|
403
|
+
"",
|
|
404
|
+
"Collects ORCHESTRATION events (and ORCHESTRATION_AUDIT when includeAudit=true), then extracts:",
|
|
405
|
+
"- status/entity timelines",
|
|
406
|
+
"- mutation request payloads from ACTION audit events",
|
|
407
|
+
"- webhook request/response diagnostics",
|
|
408
|
+
"- SendEvent payloads and future-dated scheduling evidence",
|
|
409
|
+
"- entity snapshots at each orchestration step (full state at transition)",
|
|
410
|
+
"- custom rule logs (LogCollection from extension rules)",
|
|
411
|
+
"- rule execution props (parameters used at runtime)",
|
|
412
|
+
"- enhanced NO_MATCH diagnostics with closeMatches and mismatch reasons",
|
|
413
|
+
"- NO_MATCH/PENDING/FAILED/SCHEDULED drill-down via event.get",
|
|
414
|
+
"",
|
|
415
|
+
"TOGGLEABLE SECTIONS (all default true except includeCrossEntity):",
|
|
416
|
+
"- includeRuleDetails: per-rule execution trace with class name, props, timing",
|
|
417
|
+
"- includeCustomLogs: custom plugin log messages (LogCollection)",
|
|
418
|
+
"- includeSnapshots: entity state at each processing point",
|
|
419
|
+
"- includeCrossEntity: child entity events (default: false)",
|
|
420
|
+
"",
|
|
421
|
+
"USE CASES:",
|
|
422
|
+
"- Debug stuck/failed entities: find which events NO_MATCHed or FAILed",
|
|
423
|
+
"- E2E post-mortem: see the full orchestration spread across entity types",
|
|
424
|
+
"- Webhook debugging: extract HTTP diagnostics from AUDIT actions",
|
|
425
|
+
"- Cross-entity flow analysis: ORDER → FULFILMENT_CHOICE → FULFILMENT → ORDER",
|
|
426
|
+
].join("\n") +
|
|
427
|
+
"\n\nCOMPACT MODE (default):\n" +
|
|
428
|
+
"Returns a pre-analyzed summary (~2-3k tokens) with anomaly findings, status flow,\n" +
|
|
429
|
+
"failed webhook endpoints, and slowest rulesets instead of raw arrays (~24k tokens).\n" +
|
|
430
|
+
"Set compact=false for full raw data when drilling into specific issues.",
|
|
431
|
+
inputSchema: {
|
|
432
|
+
type: "object",
|
|
433
|
+
properties: {
|
|
434
|
+
rootEntityRef: {
|
|
435
|
+
type: "string",
|
|
436
|
+
description: "Root entity reference, for example an order ref.",
|
|
437
|
+
},
|
|
438
|
+
rootEntityType: {
|
|
439
|
+
type: "string",
|
|
440
|
+
description: "Optional root entity type (for example ORDER, FULFILMENT, LOCATION, WAVE, PRODUCT).",
|
|
441
|
+
},
|
|
442
|
+
rootEntityId: {
|
|
443
|
+
oneOf: [{ type: "string" }, { type: "number" }],
|
|
444
|
+
description: "Optional root entity ID for disambiguation when refs are reused.",
|
|
445
|
+
},
|
|
446
|
+
from: {
|
|
447
|
+
type: "string",
|
|
448
|
+
description: "Optional ISO start time for event window.",
|
|
449
|
+
},
|
|
450
|
+
to: {
|
|
451
|
+
type: "string",
|
|
452
|
+
description: "Optional ISO end time for event window.",
|
|
453
|
+
},
|
|
454
|
+
compact: {
|
|
455
|
+
type: "boolean",
|
|
456
|
+
description: "Return a pre-analyzed summary instead of raw arrays (default: true). " +
|
|
457
|
+
"Compact mode returns an analysis section with statusFlow, findings, " +
|
|
458
|
+
"and stripped audit arrays (~2-3k tokens vs ~24k full). " +
|
|
459
|
+
"Set to false for full raw data.",
|
|
460
|
+
},
|
|
461
|
+
maxPages: {
|
|
462
|
+
type: "integer",
|
|
463
|
+
minimum: 1,
|
|
464
|
+
maximum: 50,
|
|
465
|
+
description: "Max 500-event pages to fetch per event type (default: 10).",
|
|
466
|
+
},
|
|
467
|
+
includeEventDetails: {
|
|
468
|
+
type: "boolean",
|
|
469
|
+
description: "Include compact orchestration events in response (default: true).",
|
|
470
|
+
},
|
|
471
|
+
includeAudit: {
|
|
472
|
+
type: "boolean",
|
|
473
|
+
description: "Fetch and parse ORCHESTRATION_AUDIT events (default: true).",
|
|
474
|
+
},
|
|
475
|
+
includeAuditDetails: {
|
|
476
|
+
type: "boolean",
|
|
477
|
+
description: "Include compact action audit samples in response (default: false).",
|
|
478
|
+
},
|
|
479
|
+
includeScheduled: {
|
|
480
|
+
type: "boolean",
|
|
481
|
+
description: "Fetch SCHEDULED events separately (default: true). Adds scheduled section to response.",
|
|
482
|
+
},
|
|
483
|
+
includeRuleDetails: {
|
|
484
|
+
type: "boolean",
|
|
485
|
+
description: "Include per-rule execution trace with class name, props, and timing (default: false).",
|
|
486
|
+
},
|
|
487
|
+
includeNoMatchDetails: {
|
|
488
|
+
type: "boolean",
|
|
489
|
+
description: "Include enhanced NO_MATCH diagnostics with closeMatches from ruleSet audit events (default: true).",
|
|
490
|
+
},
|
|
491
|
+
includeCustomLogs: {
|
|
492
|
+
type: "boolean",
|
|
493
|
+
description: "Include custom plugin log messages from CUSTOM category audit events (default: false).",
|
|
494
|
+
},
|
|
495
|
+
includeSnapshots: {
|
|
496
|
+
type: "boolean",
|
|
497
|
+
description: "Include entity state snapshots at each processing point (default: false).",
|
|
498
|
+
},
|
|
499
|
+
includeCrossEntity: {
|
|
500
|
+
type: "boolean",
|
|
501
|
+
description: "Fetch events for child entity types (FULFILMENT_CHOICE, FULFILMENT) using rootEntityRef (default: false).",
|
|
502
|
+
},
|
|
503
|
+
includeExceptions: {
|
|
504
|
+
type: "boolean",
|
|
505
|
+
description: "Include rule exceptions with class, message, and ruleset context (default: true).",
|
|
506
|
+
},
|
|
507
|
+
inspectStatuses: {
|
|
508
|
+
type: "array",
|
|
509
|
+
items: { type: "string" },
|
|
510
|
+
description: "Orchestration statuses to drill with event.get (default: [NO_MATCH, PENDING, FAILED]).",
|
|
511
|
+
},
|
|
512
|
+
maxDrilldowns: {
|
|
513
|
+
type: "integer",
|
|
514
|
+
minimum: 0,
|
|
515
|
+
maximum: 100,
|
|
516
|
+
description: "Max events to drill into with event.get (default: 50). Set to 0 to skip drilldowns.",
|
|
517
|
+
},
|
|
518
|
+
actionSampleLimit: {
|
|
519
|
+
type: "integer",
|
|
520
|
+
minimum: 1,
|
|
521
|
+
maximum: 200,
|
|
522
|
+
description: "Max rows returned per action extraction section (default: 100).",
|
|
523
|
+
},
|
|
524
|
+
},
|
|
525
|
+
required: ["rootEntityRef"],
|
|
526
|
+
additionalProperties: false,
|
|
527
|
+
},
|
|
528
|
+
},
|
|
529
|
+
{
|
|
530
|
+
name: "metrics.query",
|
|
531
|
+
description: [
|
|
532
|
+
"Query Prometheus metrics via the Fluent GraphQL metricInstant/metricRange queries.",
|
|
533
|
+
"",
|
|
534
|
+
"INSTANT: evaluates a PromQL expression at one point in time.",
|
|
535
|
+
"RANGE: requires start, end, and step to evaluate over a time window.",
|
|
536
|
+
"",
|
|
537
|
+
"Routes PromQL through the GraphQL proxy (metricInstant/metricRange).",
|
|
538
|
+
"Requires METRICS_VIEW permission on the authenticated user.",
|
|
539
|
+
"",
|
|
540
|
+
"AVAILABLE METRICS:",
|
|
541
|
+
"- core_event_received_total (counter) — events received by Fluent APIs",
|
|
542
|
+
"- rubix_event_runtime_seconds (histogram) — event processing time in Rubix",
|
|
543
|
+
"- rubix_event_inflight_latency_seconds (histogram) — queue wait time",
|
|
544
|
+
"- bpp_records_processed_total (counter) — batch pre-processing records",
|
|
545
|
+
"- feed_sent_total (counter) — inventory feed records exported",
|
|
546
|
+
"",
|
|
547
|
+
"COMMON LABELS: account_id, retailer_id, event_name, entity_type, source, status (Rubix only)",
|
|
548
|
+
].join("\n"),
|
|
549
|
+
inputSchema: {
|
|
550
|
+
type: "object",
|
|
551
|
+
properties: {
|
|
552
|
+
query: { type: "string", description: "PromQL expression" },
|
|
553
|
+
type: {
|
|
554
|
+
type: "string",
|
|
555
|
+
enum: ["instant", "range"],
|
|
556
|
+
description: "Query mode (default: instant)",
|
|
557
|
+
},
|
|
558
|
+
time: {
|
|
559
|
+
type: "string",
|
|
560
|
+
description: "Evaluation timestamp for instant queries (ISO-8601 or Unix timestamp)",
|
|
561
|
+
},
|
|
562
|
+
start: {
|
|
563
|
+
type: "string",
|
|
564
|
+
description: "Range start timestamp (required for range queries)",
|
|
565
|
+
},
|
|
566
|
+
end: {
|
|
567
|
+
type: "string",
|
|
568
|
+
description: "Range end timestamp (required for range queries)",
|
|
569
|
+
},
|
|
570
|
+
step: {
|
|
571
|
+
type: "string",
|
|
572
|
+
description: "Range step duration, for example 15s or 1m (required for range queries)",
|
|
573
|
+
},
|
|
574
|
+
},
|
|
575
|
+
required: ["query"],
|
|
576
|
+
additionalProperties: false,
|
|
577
|
+
},
|
|
578
|
+
},
|
|
579
|
+
{
|
|
580
|
+
name: "workflow.transitions",
|
|
581
|
+
description: [
|
|
582
|
+
"Query available user actions (transitions) for entities at a given state.",
|
|
583
|
+
"",
|
|
584
|
+
"Calls POST /api/v4.1/transition to discover what events can be fired,",
|
|
585
|
+
"what attributes they require, and how they appear in the UI.",
|
|
586
|
+
"",
|
|
587
|
+
"USE CASES:",
|
|
588
|
+
"- Discover available actions at any workflow status without reading workflow JSON",
|
|
589
|
+
"- Build dynamic E2E test sequences that adapt to workflow changes",
|
|
590
|
+
"- Validate that expected user actions are available after deployment",
|
|
591
|
+
"- Get required event attributes for each action (avoids missing-attribute errors)",
|
|
592
|
+
"",
|
|
593
|
+
"RESPONSE includes per-trigger:",
|
|
594
|
+
"- trigger: the matched trigger condition",
|
|
595
|
+
"- userActions[]: available actions with eventName, context (button config), attributes (required form fields)",
|
|
596
|
+
"- transitions[]: same as userActions (legacy compatibility)",
|
|
597
|
+
"",
|
|
598
|
+
"INTEGRATION: The eventName from each userAction maps directly to event.send's \"name\" parameter.",
|
|
599
|
+
"The attributes[] tell you what to include in event.send's \"attributes\" parameter.",
|
|
600
|
+
"",
|
|
601
|
+
"EXAMPLE: Find available actions for an ORDER in CREATED status:",
|
|
602
|
+
"{ triggers: [{ type: \"ORDER\", subtype: \"HD\", status: \"CREATED\", retailerId: \"5\", flexType: \"ORDER::HD\" }] }",
|
|
603
|
+
].join("\n"),
|
|
604
|
+
inputSchema: {
|
|
605
|
+
type: "object",
|
|
606
|
+
properties: {
|
|
607
|
+
triggers: {
|
|
608
|
+
type: "array",
|
|
609
|
+
minItems: 1,
|
|
610
|
+
items: {
|
|
611
|
+
type: "object",
|
|
612
|
+
properties: {
|
|
613
|
+
type: { type: "string", description: "Entity type (e.g. ORDER, FULFILMENT, MANIFEST)" },
|
|
614
|
+
subtype: { type: "string", description: "Entity subtype (e.g. HD, CC, DEFAULT)" },
|
|
615
|
+
status: {
|
|
616
|
+
type: "string",
|
|
617
|
+
description: "Current entity status. Omit to get all actions for any status.",
|
|
618
|
+
},
|
|
619
|
+
retailerId: {
|
|
620
|
+
type: "string",
|
|
621
|
+
description: "Retailer ID (required). Falls back to FLUENT_RETAILER_ID when omitted.",
|
|
622
|
+
},
|
|
623
|
+
module: {
|
|
624
|
+
type: "string",
|
|
625
|
+
description: "Filter by module (e.g. servicepoint, adminconsole). Case-sensitive.",
|
|
626
|
+
},
|
|
627
|
+
flexType: {
|
|
628
|
+
type: "string",
|
|
629
|
+
description: "Workflow type (e.g. ORDER::HD, FULFILMENT::HD_WH).",
|
|
630
|
+
},
|
|
631
|
+
flexVersion: {
|
|
632
|
+
oneOf: [{ type: "integer" }, { type: "string" }],
|
|
633
|
+
description: "Workflow version. Omit for latest.",
|
|
634
|
+
},
|
|
635
|
+
name: { type: "string", description: "Event name filter" },
|
|
636
|
+
},
|
|
637
|
+
required: ["retailerId"],
|
|
638
|
+
additionalProperties: false,
|
|
639
|
+
},
|
|
640
|
+
description: "List of trigger conditions to query available user actions for.",
|
|
641
|
+
},
|
|
642
|
+
},
|
|
643
|
+
required: ["triggers"],
|
|
644
|
+
additionalProperties: false,
|
|
645
|
+
},
|
|
646
|
+
},
|
|
647
|
+
{
|
|
648
|
+
name: "plugin.list",
|
|
649
|
+
description: [
|
|
650
|
+
"List all registered orchestration rules (standard + custom) with metadata.",
|
|
651
|
+
"Returns a map keyed by ACCOUNT.context.RuleName with each entry containing:",
|
|
652
|
+
"- ruleInfo: name, description, accepted entity types, produced events",
|
|
653
|
+
"- eventAttributes: attributes the rule reads from events",
|
|
654
|
+
"- parameters: configurable rule parameters",
|
|
655
|
+
"",
|
|
656
|
+
"Use to understand what rules do when analyzing workflows.",
|
|
657
|
+
"Optional name filter performs case-insensitive substring match on rule keys.",
|
|
658
|
+
].join("\n"),
|
|
659
|
+
inputSchema: {
|
|
660
|
+
type: "object",
|
|
661
|
+
properties: {
|
|
662
|
+
name: {
|
|
663
|
+
type: "string",
|
|
664
|
+
description: "Optional filter: only return rules whose key contains this string (case-insensitive).",
|
|
665
|
+
},
|
|
666
|
+
compact: {
|
|
667
|
+
type: "boolean",
|
|
668
|
+
description: "If true, return only ruleInfo per rule (name, description, entityTypes), stripping parameters and eventAttributes to reduce token usage.",
|
|
669
|
+
},
|
|
670
|
+
},
|
|
671
|
+
additionalProperties: false,
|
|
672
|
+
},
|
|
673
|
+
},
|
|
674
|
+
{
|
|
675
|
+
name: "graphql.query",
|
|
676
|
+
description: [
|
|
677
|
+
"Execute a Fluent Commerce GraphQL query or mutation via SDK.",
|
|
678
|
+
"",
|
|
679
|
+
"PAGINATION: Fluent uses Relay-style connections. Cursors live on each edge, NOT on pageInfo.",
|
|
680
|
+
"Pattern: { orders(first: 50, after: $cursor) { edges { cursor node { id ref } } pageInfo { hasNextPage } } }",
|
|
681
|
+
"To paginate: take the cursor from the LAST edge in the response, pass it as the 'after' variable in the next call. Repeat while hasNextPage is true.",
|
|
682
|
+
"Pagination args: first/after (forward), last/before (backward). No endCursor or startCursor exists.",
|
|
683
|
+
"",
|
|
684
|
+
"MUTATIONS: Pass input via variables.",
|
|
685
|
+
"Pattern: mutation($input: UpdateOrderInput!) { updateOrder(input: $input) { id ref status } }",
|
|
686
|
+
"Variables: { \"input\": { \"id\": \"36\", \"status\": \"RECEIVED\" } }",
|
|
687
|
+
"",
|
|
688
|
+
"CONNECTION SHAPE: All list queries return connections: { edges { cursor node { ...fields } } pageInfo { hasNextPage } }.",
|
|
689
|
+
"Common roots: orders, fulfilments, fulfilmentChoices, locations, inventoryPositions, products, categories, settings, waves, articles.",
|
|
690
|
+
].join("\n"),
|
|
691
|
+
inputSchema: {
|
|
692
|
+
type: "object",
|
|
693
|
+
properties: {
|
|
694
|
+
query: { type: "string", description: "GraphQL query or mutation string" },
|
|
695
|
+
variables: { type: "object", additionalProperties: true },
|
|
696
|
+
summarize: {
|
|
697
|
+
type: "boolean",
|
|
698
|
+
description: "If true and result contains a Relay connection (edges/nodes), return record count, field names, and 3 sample records instead of full data.",
|
|
699
|
+
},
|
|
700
|
+
},
|
|
701
|
+
required: ["query"],
|
|
702
|
+
additionalProperties: false,
|
|
703
|
+
},
|
|
704
|
+
},
|
|
705
|
+
{
|
|
706
|
+
name: "batch.create",
|
|
707
|
+
description: "Create a batch ingestion job. Requires ready config and retailerId (input or env). Next steps: batch.send -> batch.status -> batch.results.",
|
|
708
|
+
inputSchema: {
|
|
709
|
+
type: "object",
|
|
710
|
+
properties: {
|
|
711
|
+
name: { type: "string", description: "Job name" },
|
|
712
|
+
retailerId: { type: "string" },
|
|
713
|
+
entityType: { type: "string" },
|
|
714
|
+
action: { type: "string" },
|
|
715
|
+
},
|
|
716
|
+
required: ["name"],
|
|
717
|
+
additionalProperties: false,
|
|
718
|
+
},
|
|
719
|
+
},
|
|
720
|
+
{
|
|
721
|
+
name: "batch.send",
|
|
722
|
+
description: "Send records to an existing batch job. Requires jobId from batch.create and integration-specific payload shape.",
|
|
723
|
+
inputSchema: {
|
|
724
|
+
type: "object",
|
|
725
|
+
properties: {
|
|
726
|
+
jobId: { type: "string", description: "Job ID from batch.create" },
|
|
727
|
+
payload: { type: "object", additionalProperties: true },
|
|
728
|
+
},
|
|
729
|
+
required: ["jobId", "payload"],
|
|
730
|
+
additionalProperties: false,
|
|
731
|
+
},
|
|
732
|
+
},
|
|
733
|
+
{
|
|
734
|
+
name: "batch.status",
|
|
735
|
+
description: "Check overall job status for polling loops. Repeat until terminal status before fetching results.",
|
|
736
|
+
inputSchema: {
|
|
737
|
+
type: "object",
|
|
738
|
+
properties: {
|
|
739
|
+
jobId: { type: "string", description: "Job ID to check" },
|
|
740
|
+
},
|
|
741
|
+
required: ["jobId"],
|
|
742
|
+
additionalProperties: false,
|
|
743
|
+
},
|
|
744
|
+
},
|
|
745
|
+
{
|
|
746
|
+
name: "batch.batchStatus",
|
|
747
|
+
description: "Check a specific batch inside a job (jobId + batchId). Use for troubleshooting partial failures within multi-batch jobs.",
|
|
748
|
+
inputSchema: {
|
|
749
|
+
type: "object",
|
|
750
|
+
properties: {
|
|
751
|
+
jobId: { type: "string", description: "Job ID" },
|
|
752
|
+
batchId: { type: "string", description: "Batch ID within the job" },
|
|
753
|
+
},
|
|
754
|
+
required: ["jobId", "batchId"],
|
|
755
|
+
additionalProperties: false,
|
|
756
|
+
},
|
|
757
|
+
},
|
|
758
|
+
{
|
|
759
|
+
name: "batch.results",
|
|
760
|
+
description: "Get per-record batch outcomes for a completed job. Use after batch.status reaches terminal state to inspect success/failure details.",
|
|
761
|
+
inputSchema: {
|
|
762
|
+
type: "object",
|
|
763
|
+
properties: {
|
|
764
|
+
jobId: { type: "string", description: "Job ID to get results for" },
|
|
765
|
+
},
|
|
766
|
+
required: ["jobId"],
|
|
767
|
+
additionalProperties: false,
|
|
768
|
+
},
|
|
769
|
+
},
|
|
770
|
+
{
|
|
771
|
+
name: "graphql.queryAll",
|
|
772
|
+
description: [
|
|
773
|
+
"Execute a Fluent Commerce GraphQL query with SDK auto-pagination.",
|
|
774
|
+
"Automatically follows cursors, merges edges across pages, and deduplicates by node ID.",
|
|
775
|
+
"",
|
|
776
|
+
"USE INSTEAD OF graphql.query WHEN: you need ALL records from a connection (not just the first page).",
|
|
777
|
+
"The SDK detects pagination variables (first/after or last/before) in your query and auto-follows cursors.",
|
|
778
|
+
"",
|
|
779
|
+
"CONTROLS:",
|
|
780
|
+
"- maxPages: stop after N pages (default 100)",
|
|
781
|
+
"- maxRecords: stop after N total records (default 10000)",
|
|
782
|
+
"- timeoutMs: hard timeout for entire pagination run (default 300000ms = 5 min)",
|
|
783
|
+
"- direction: 'forward' (first/after) or 'backward' (last/before)",
|
|
784
|
+
"- errorHandling: 'throw' (default, fail on GraphQL errors) or 'partial' (return partial data with errors)",
|
|
785
|
+
"",
|
|
786
|
+
"RESPONSE includes extensions.autoPagination with: totalPages, totalRecords, truncated, truncationReason, direction.",
|
|
787
|
+
"",
|
|
788
|
+
"EXAMPLE: Fetch all orders with status ACTIVE:",
|
|
789
|
+
"query: { orders(first: 100, after: $cursor) { edges { cursor node { id ref status } } pageInfo { hasNextPage } } }",
|
|
790
|
+
"variables: { cursor: null }",
|
|
791
|
+
"maxRecords: 5000",
|
|
792
|
+
].join("\n"),
|
|
793
|
+
inputSchema: {
|
|
794
|
+
type: "object",
|
|
795
|
+
properties: {
|
|
796
|
+
query: {
|
|
797
|
+
type: "string",
|
|
798
|
+
description: "GraphQL query with pagination variables (first/after or last/before). Must include edges { cursor node { ... } } and pageInfo { hasNextPage }.",
|
|
799
|
+
},
|
|
800
|
+
variables: {
|
|
801
|
+
type: "object",
|
|
802
|
+
additionalProperties: true,
|
|
803
|
+
description: "Query variables. Include cursor variable (usually null for first page).",
|
|
804
|
+
},
|
|
805
|
+
maxPages: {
|
|
806
|
+
type: "integer",
|
|
807
|
+
minimum: 1,
|
|
808
|
+
maximum: 500,
|
|
809
|
+
description: "Maximum number of pages to fetch (default: 100)",
|
|
810
|
+
},
|
|
811
|
+
maxRecords: {
|
|
812
|
+
type: "integer",
|
|
813
|
+
minimum: 1,
|
|
814
|
+
maximum: 50000,
|
|
815
|
+
description: "Maximum total records to accumulate (default: 10000)",
|
|
816
|
+
},
|
|
817
|
+
timeoutMs: {
|
|
818
|
+
type: "integer",
|
|
819
|
+
minimum: 1000,
|
|
820
|
+
maximum: 600000,
|
|
821
|
+
description: "Hard timeout in ms for entire pagination run (default: 300000)",
|
|
822
|
+
},
|
|
823
|
+
direction: {
|
|
824
|
+
type: "string",
|
|
825
|
+
enum: ["forward", "backward"],
|
|
826
|
+
description: "Pagination direction: forward (first/after) or backward (last/before). Default: forward.",
|
|
827
|
+
},
|
|
828
|
+
errorHandling: {
|
|
829
|
+
type: "string",
|
|
830
|
+
enum: ["throw", "partial"],
|
|
831
|
+
description: "Error mode: 'throw' fails on GraphQL errors, 'partial' returns partial data with errors. Default: throw.",
|
|
832
|
+
},
|
|
833
|
+
summarize: {
|
|
834
|
+
type: "boolean",
|
|
835
|
+
description: "If true, return record count, field names, and 3 sample records instead of full paginated data.",
|
|
836
|
+
},
|
|
837
|
+
},
|
|
838
|
+
required: ["query"],
|
|
839
|
+
additionalProperties: false,
|
|
840
|
+
},
|
|
841
|
+
},
|
|
842
|
+
{
|
|
843
|
+
name: "graphql.batchMutate",
|
|
844
|
+
description: [
|
|
845
|
+
"Execute multiple GraphQL mutations in a single request using aliased mutations.",
|
|
846
|
+
"Sends up to 50 mutations at once, each with its own input, in one GraphQL request.",
|
|
847
|
+
"",
|
|
848
|
+
"HOW IT WORKS:",
|
|
849
|
+
"1. Builds an aliased mutation query: mutation Batch($input1: UpdateOrderInput!, $input2: UpdateOrderInput!) {",
|
|
850
|
+
" updateOrder1: updateOrder(input: $input1) { id ref }",
|
|
851
|
+
" updateOrder2: updateOrder(input: $input2) { id ref }",
|
|
852
|
+
" }",
|
|
853
|
+
"2. Sends as a single GraphQL request",
|
|
854
|
+
"3. Parses per-mutation results (success/failure for each input)",
|
|
855
|
+
"",
|
|
856
|
+
"USE CASES:",
|
|
857
|
+
"- Bulk update order statuses",
|
|
858
|
+
"- Batch create/update inventory positions",
|
|
859
|
+
"- Mass-update attributes on multiple entities",
|
|
860
|
+
"",
|
|
861
|
+
"RESPONSE includes: executed (count), failed (count), allSucceeded, allFailed, results (per-mutation), errors (per-mutation).",
|
|
862
|
+
"",
|
|
863
|
+
"EXAMPLE: Update 3 orders to SHIPPED:",
|
|
864
|
+
"mutation: 'updateOrder'",
|
|
865
|
+
"inputs: [{id: '1', status: 'SHIPPED'}, {id: '2', status: 'SHIPPED'}, {id: '3', status: 'SHIPPED'}]",
|
|
866
|
+
"returnFields: ['id', 'ref', 'status']",
|
|
867
|
+
].join("\n"),
|
|
868
|
+
inputSchema: {
|
|
869
|
+
type: "object",
|
|
870
|
+
properties: {
|
|
871
|
+
mutation: {
|
|
872
|
+
type: "string",
|
|
873
|
+
description: "Mutation name (e.g., 'updateOrder', 'createFulfilment', 'updateInventoryPosition'). The input type is inferred as <MutationName>Input (e.g., UpdateOrderInput).",
|
|
874
|
+
},
|
|
875
|
+
inputs: {
|
|
876
|
+
type: "array",
|
|
877
|
+
items: { type: "object", additionalProperties: true },
|
|
878
|
+
minItems: 1,
|
|
879
|
+
maxItems: 50,
|
|
880
|
+
description: "Array of mutation inputs. Each object is passed as the $input variable for one aliased mutation.",
|
|
881
|
+
},
|
|
882
|
+
returnFields: {
|
|
883
|
+
type: "array",
|
|
884
|
+
items: { type: "string" },
|
|
885
|
+
description: "Fields to return from each mutation result (default: ['id', 'ref']). Use dot notation for nested: ['id', 'ref', 'status'].",
|
|
886
|
+
},
|
|
887
|
+
operationName: {
|
|
888
|
+
type: "string",
|
|
889
|
+
description: "Custom GraphQL operation name (default: Batch<MutationName>s).",
|
|
890
|
+
},
|
|
891
|
+
},
|
|
892
|
+
required: ["mutation", "inputs"],
|
|
893
|
+
additionalProperties: false,
|
|
894
|
+
},
|
|
895
|
+
},
|
|
896
|
+
{
|
|
897
|
+
name: "graphql.introspect",
|
|
898
|
+
description: [
|
|
899
|
+
"Inspect the Fluent Commerce GraphQL schema via introspection.",
|
|
900
|
+
"Fetches the full schema and caches it for 1 hour. Use to discover mutations, input types, and field requirements.",
|
|
901
|
+
"",
|
|
902
|
+
"MODES (specify one):",
|
|
903
|
+
"- type: Get details of an INPUT_OBJECT type (fields, types, required flags). E.g., type='UpdateOrderInput'.",
|
|
904
|
+
"- mutation: Get details of a specific mutation (args, return type). E.g., mutation='updateOrder'.",
|
|
905
|
+
"- listMutations: true to get all available mutation names.",
|
|
906
|
+
"- listQueries: true to get all available query root field names.",
|
|
907
|
+
"",
|
|
908
|
+
"EXAMPLE USAGE:",
|
|
909
|
+
"1. List all mutations: { listMutations: true }",
|
|
910
|
+
"2. Inspect updateOrder: { mutation: 'updateOrder' }",
|
|
911
|
+
"3. See UpdateOrderInput fields: { type: 'UpdateOrderInput' }",
|
|
912
|
+
"4. Chain: listMutations -> find mutation -> inspect type -> build query",
|
|
913
|
+
].join("\n"),
|
|
914
|
+
inputSchema: {
|
|
915
|
+
type: "object",
|
|
916
|
+
properties: {
|
|
917
|
+
type: {
|
|
918
|
+
type: "string",
|
|
919
|
+
description: "Name of an INPUT_OBJECT type to inspect (e.g., 'UpdateOrderInput', 'CreateFulfilmentInput').",
|
|
920
|
+
},
|
|
921
|
+
mutation: {
|
|
922
|
+
type: "string",
|
|
923
|
+
description: "Name of a mutation to inspect (e.g., 'updateOrder', 'createFulfilment').",
|
|
924
|
+
},
|
|
925
|
+
listMutations: {
|
|
926
|
+
type: "boolean",
|
|
927
|
+
description: "If true, return all available mutation names.",
|
|
928
|
+
},
|
|
929
|
+
listQueries: {
|
|
930
|
+
type: "boolean",
|
|
931
|
+
description: "If true, return all available query root field names.",
|
|
932
|
+
},
|
|
933
|
+
},
|
|
934
|
+
additionalProperties: false,
|
|
935
|
+
},
|
|
936
|
+
},
|
|
937
|
+
{
|
|
938
|
+
name: "connection.test",
|
|
939
|
+
description: [
|
|
940
|
+
"Comprehensive Fluent Commerce connectivity test.",
|
|
941
|
+
"Authenticates, executes a 'me' query, and returns the authenticated user's details including:",
|
|
942
|
+
"- User: id, username, email, type, status, roles, permissions",
|
|
943
|
+
"- Retailer: id, ref, tradingName, primaryEmail",
|
|
944
|
+
"- Location: id, ref, name, type",
|
|
945
|
+
"",
|
|
946
|
+
"More thorough than health.ping — actually verifies the GraphQL endpoint works end-to-end.",
|
|
947
|
+
"Use when: first connecting, debugging auth issues, verifying retailer/location context.",
|
|
948
|
+
].join("\n"),
|
|
949
|
+
inputSchema: {
|
|
950
|
+
type: "object",
|
|
951
|
+
properties: {},
|
|
952
|
+
additionalProperties: false,
|
|
953
|
+
},
|
|
954
|
+
},
|
|
955
|
+
{
|
|
956
|
+
name: "webhook.validate",
|
|
957
|
+
description: [
|
|
958
|
+
"Validate a Fluent Commerce webhook payload and optionally verify its signature.",
|
|
959
|
+
"",
|
|
960
|
+
"TWO MODES:",
|
|
961
|
+
"1. Basic validation (no signature): Checks required fields (name, id, retailerId) are present.",
|
|
962
|
+
" Input: { payload: { name: 'OrderCreated', id: '123', retailerId: '1' } }",
|
|
963
|
+
"",
|
|
964
|
+
"2. Signature validation: Verifies the X-Fluent-Signature header against the webhook body using the public key.",
|
|
965
|
+
" Input: { payload: {...}, signature: '<base64-sig>', publicKey: '<PEM-key>', algorithm: 'SHA512withRSA' }",
|
|
966
|
+
"",
|
|
967
|
+
"Supported algorithms: SHA512withRSA (default), MD5withRSA.",
|
|
968
|
+
"",
|
|
969
|
+
"USE CASES:",
|
|
970
|
+
"- Testing webhook integrations during development",
|
|
971
|
+
"- Verifying webhook payloads in debug scenarios",
|
|
972
|
+
"- Validating webhook signature configuration",
|
|
973
|
+
].join("\n"),
|
|
974
|
+
inputSchema: {
|
|
975
|
+
type: "object",
|
|
976
|
+
properties: {
|
|
977
|
+
payload: {
|
|
978
|
+
type: "object",
|
|
979
|
+
additionalProperties: true,
|
|
980
|
+
description: "The webhook payload object. Must include name, id, and retailerId for basic validation.",
|
|
981
|
+
},
|
|
982
|
+
rawBody: {
|
|
983
|
+
type: "string",
|
|
984
|
+
description: "The exact raw HTTP request body string for signature verification. " +
|
|
985
|
+
"Signatures are computed over raw bytes, so providing the original " +
|
|
986
|
+
"body avoids false negatives from JSON re-serialization. " +
|
|
987
|
+
"Falls back to JSON.stringify(payload) if omitted.",
|
|
988
|
+
},
|
|
989
|
+
signature: {
|
|
990
|
+
type: "string",
|
|
991
|
+
description: "The X-Fluent-Signature header value (base64-encoded). Required for signature validation.",
|
|
992
|
+
},
|
|
993
|
+
publicKey: {
|
|
994
|
+
type: "string",
|
|
995
|
+
description: "The Fluent public key (PEM format) for signature verification. Required when signature is provided.",
|
|
996
|
+
},
|
|
997
|
+
algorithm: {
|
|
998
|
+
type: "string",
|
|
999
|
+
enum: ["SHA512withRSA", "MD5withRSA"],
|
|
1000
|
+
description: "Signature algorithm (default: SHA512withRSA).",
|
|
1001
|
+
},
|
|
1002
|
+
},
|
|
1003
|
+
required: ["payload"],
|
|
1004
|
+
additionalProperties: false,
|
|
1005
|
+
},
|
|
1006
|
+
},
|
|
1007
|
+
{
|
|
1008
|
+
name: "metrics.topEvents",
|
|
1009
|
+
description: [
|
|
1010
|
+
"Aggregate event analytics within a time window.",
|
|
1011
|
+
"Fetches events via the Event API, groups by (name + entityType + status),",
|
|
1012
|
+
"and returns top-N rankings by volume with summary statistics.",
|
|
1013
|
+
"",
|
|
1014
|
+
"USE CASES:",
|
|
1015
|
+
"- Monitor event volume patterns and identify high-frequency events",
|
|
1016
|
+
"- Detect failure rate anomalies across entity types",
|
|
1017
|
+
"- Identify NO_MATCH events indicating workflow gaps",
|
|
1018
|
+
"- Compare event distribution before and after deployments",
|
|
1019
|
+
"- Get top failing events: { from: '...', eventStatus: 'FAILED' }",
|
|
1020
|
+
"",
|
|
1021
|
+
"IMPLEMENTATION: Paginates through event.list results (up to maxPages x 500 events),",
|
|
1022
|
+
"aggregates client-side, and returns ranked results with failure rate and status breakdown.",
|
|
1023
|
+
"",
|
|
1024
|
+
"EXAMPLE: Top 20 events in the last hour:",
|
|
1025
|
+
'{ from: "2026-02-22T07:00:00Z", topN: 20 }',
|
|
1026
|
+
].join("\n"),
|
|
1027
|
+
inputSchema: {
|
|
1028
|
+
type: "object",
|
|
1029
|
+
properties: {
|
|
1030
|
+
from: { type: "string", description: "Time window start (ISO-8601, required)" },
|
|
1031
|
+
to: { type: "string", description: "Time window end (ISO-8601, defaults to now)" },
|
|
1032
|
+
entityType: { type: "string", description: "Filter by entity type (e.g., ORDER, FULFILMENT)" },
|
|
1033
|
+
eventStatus: { type: "string", description: "Filter by event status (e.g., FAILED, NO_MATCH, COMPLETE, SUCCESS, PENDING)" },
|
|
1034
|
+
eventType: { type: "string", description: "Filter by event type (default: ORCHESTRATION)" },
|
|
1035
|
+
topN: { type: "integer", minimum: 1, maximum: 100, description: "Top N results (default: 20)" },
|
|
1036
|
+
maxPages: { type: "integer", minimum: 1, maximum: 50, description: "Max pages to fetch (default: 10, each page = 500 events)" },
|
|
1037
|
+
},
|
|
1038
|
+
required: ["from"],
|
|
1039
|
+
additionalProperties: false,
|
|
1040
|
+
},
|
|
1041
|
+
},
|
|
1042
|
+
{
|
|
1043
|
+
name: "metrics.healthCheck",
|
|
1044
|
+
description: [
|
|
1045
|
+
"Single-call health assessment using Prometheus metrics via GraphQL,",
|
|
1046
|
+
"applies anomaly detection heuristics, and returns a compact findings report.",
|
|
1047
|
+
"",
|
|
1048
|
+
"Runs locally in the MCP server — minimal tokens returned to the AI.",
|
|
1049
|
+
"Falls back to Event API aggregation if Prometheus/GraphQL metrics are unavailable.",
|
|
1050
|
+
"",
|
|
1051
|
+
"CHECKS PERFORMED:",
|
|
1052
|
+
"- Failure rate (>5% = HIGH severity)",
|
|
1053
|
+
"- NO_MATCH events present (any = CRITICAL)",
|
|
1054
|
+
"- PENDING queue ratio (>10% = MEDIUM)",
|
|
1055
|
+
"- Single event dominance (>50% = MEDIUM, possible runaway loop)",
|
|
1056
|
+
"",
|
|
1057
|
+
"USE INSTEAD OF calling metrics.query multiple times manually.",
|
|
1058
|
+
].join("\n"),
|
|
1059
|
+
inputSchema: {
|
|
1060
|
+
type: "object",
|
|
1061
|
+
properties: {
|
|
1062
|
+
window: {
|
|
1063
|
+
type: "string",
|
|
1064
|
+
description: "Prometheus time window (default: 1h). Examples: 1h, 6h, 24h, 7d",
|
|
1065
|
+
},
|
|
1066
|
+
includeTopEvents: {
|
|
1067
|
+
type: "boolean",
|
|
1068
|
+
description: "Include top-N event breakdown (default: true)",
|
|
1069
|
+
},
|
|
1070
|
+
topN: {
|
|
1071
|
+
type: "integer",
|
|
1072
|
+
minimum: 1,
|
|
1073
|
+
maximum: 100,
|
|
1074
|
+
description: "Number of top events to include (default: 10)",
|
|
1075
|
+
},
|
|
1076
|
+
thresholds: {
|
|
1077
|
+
type: "object",
|
|
1078
|
+
description: "Custom thresholds (defaults: failureRate=5, pendingRate=10, dominanceRate=50)",
|
|
1079
|
+
properties: {
|
|
1080
|
+
failureRate: { type: "number", description: "Failure rate threshold % (default: 5)" },
|
|
1081
|
+
pendingRate: { type: "number", description: "Pending rate threshold % (default: 10)" },
|
|
1082
|
+
dominanceRate: { type: "number", description: "Single-event dominance threshold % (default: 50)" },
|
|
1083
|
+
},
|
|
1084
|
+
},
|
|
1085
|
+
},
|
|
1086
|
+
additionalProperties: false,
|
|
1087
|
+
},
|
|
1088
|
+
},
|
|
1089
|
+
{
|
|
1090
|
+
name: "metrics.sloReport",
|
|
1091
|
+
description: [
|
|
1092
|
+
"Managed-services SLO snapshot in one call.",
|
|
1093
|
+
"Computes event volume, failure/no-match/pending rates, and p95 runtime/inflight latency.",
|
|
1094
|
+
"",
|
|
1095
|
+
"PRIMARY DATA SOURCE: Prometheus metrics via metrics.query endpoints.",
|
|
1096
|
+
"FALLBACK: Event API aggregation when Prometheus is unavailable (latency fields become null).",
|
|
1097
|
+
"",
|
|
1098
|
+
"USE CASES:",
|
|
1099
|
+
"- Daily/shift operational health checks",
|
|
1100
|
+
"- Release validation guardrails",
|
|
1101
|
+
"- Fast incident triage with threshold-based findings",
|
|
1102
|
+
"",
|
|
1103
|
+
"Optionally includes top failing events for targeted remediation.",
|
|
1104
|
+
].join("\n"),
|
|
1105
|
+
inputSchema: {
|
|
1106
|
+
type: "object",
|
|
1107
|
+
properties: {
|
|
1108
|
+
window: {
|
|
1109
|
+
type: "string",
|
|
1110
|
+
description: "Window for KPI aggregation (default: 1h). Examples: 30m, 1h, 24h, 7d",
|
|
1111
|
+
},
|
|
1112
|
+
includeTopFailingEvents: {
|
|
1113
|
+
type: "boolean",
|
|
1114
|
+
description: "Include top failing events from Event API (default: true)",
|
|
1115
|
+
},
|
|
1116
|
+
topN: {
|
|
1117
|
+
type: "integer",
|
|
1118
|
+
minimum: 1,
|
|
1119
|
+
maximum: 100,
|
|
1120
|
+
description: "Top N failing events to include (default: 10)",
|
|
1121
|
+
},
|
|
1122
|
+
maxPages: {
|
|
1123
|
+
type: "integer",
|
|
1124
|
+
minimum: 1,
|
|
1125
|
+
maximum: 50,
|
|
1126
|
+
description: "Event API pagination cap for fallback/top failing events (default: 10)",
|
|
1127
|
+
},
|
|
1128
|
+
thresholds: {
|
|
1129
|
+
type: "object",
|
|
1130
|
+
description: "Custom thresholds. Defaults: failureRate=5, noMatchRate=0, pendingRate=10, runtimeP95Seconds=5, inflightP95Seconds=60",
|
|
1131
|
+
properties: {
|
|
1132
|
+
failureRate: { type: "number", description: "Failure rate threshold in percent" },
|
|
1133
|
+
noMatchRate: { type: "number", description: "NO_MATCH rate threshold in percent" },
|
|
1134
|
+
pendingRate: { type: "number", description: "Pending rate threshold in percent" },
|
|
1135
|
+
runtimeP95Seconds: { type: "number", description: "Runtime p95 threshold in seconds" },
|
|
1136
|
+
inflightP95Seconds: {
|
|
1137
|
+
type: "number",
|
|
1138
|
+
description: "Inflight latency p95 threshold in seconds",
|
|
1139
|
+
},
|
|
1140
|
+
},
|
|
1141
|
+
},
|
|
1142
|
+
},
|
|
1143
|
+
additionalProperties: false,
|
|
1144
|
+
},
|
|
1145
|
+
},
|
|
1146
|
+
{
|
|
1147
|
+
name: "metrics.labelCatalog",
|
|
1148
|
+
description: [
|
|
1149
|
+
"Discover supported labels for a metric with live sampling + known Fluent hints.",
|
|
1150
|
+
"",
|
|
1151
|
+
"Runs an instant PromQL sample query: last_over_time(<metric>[window])",
|
|
1152
|
+
"then extracts label keys from returned time series and summarizes cardinality.",
|
|
1153
|
+
"",
|
|
1154
|
+
"Useful to prevent invalid sum by(...) usage (for example using status on metrics that don't expose it).",
|
|
1155
|
+
"If no live series are returned, can still provide known Fluent label hints for common platform metrics.",
|
|
1156
|
+
].join("\n"),
|
|
1157
|
+
inputSchema: {
|
|
1158
|
+
type: "object",
|
|
1159
|
+
properties: {
|
|
1160
|
+
metric: { type: "string", description: "Metric selector name, for example core_event_received_total" },
|
|
1161
|
+
window: {
|
|
1162
|
+
type: "string",
|
|
1163
|
+
description: "Sampling window (default: 24h). Examples: 30m, 1h, 24h, 7d",
|
|
1164
|
+
},
|
|
1165
|
+
includeKnownLabels: {
|
|
1166
|
+
type: "boolean",
|
|
1167
|
+
description: "Include known Fluent metric label hints (default: true)",
|
|
1168
|
+
},
|
|
1169
|
+
maxValuesPerLabel: {
|
|
1170
|
+
type: "integer",
|
|
1171
|
+
minimum: 1,
|
|
1172
|
+
maximum: 50,
|
|
1173
|
+
description: "Max sample values per label in response (default: 10)",
|
|
1174
|
+
},
|
|
1175
|
+
},
|
|
1176
|
+
required: ["metric"],
|
|
1177
|
+
additionalProperties: false,
|
|
1178
|
+
},
|
|
1179
|
+
},
|
|
1180
|
+
// New agentic tools (appended from modules)
|
|
1181
|
+
...ENTITY_TOOL_DEFINITIONS,
|
|
1182
|
+
...WORKFLOW_TOOL_DEFINITIONS,
|
|
1183
|
+
...SETTING_TOOL_DEFINITIONS,
|
|
1184
|
+
...ENVIRONMENT_TOOL_DEFINITIONS,
|
|
1185
|
+
...TEST_TOOL_DEFINITIONS,
|
|
1186
|
+
];
|
|
1187
|
+
// ---------------------------------------------------------------------------
|
|
1188
|
+
// Helpers
|
|
1189
|
+
// ---------------------------------------------------------------------------
|
|
1190
|
+
/**
|
|
1191
|
+
* Maps tool input (with aliases) to SDK FluentEventQueryParams.
|
|
1192
|
+
* Resolves aliases: id->eventId, entityRef->context.entityRef, entityType->context.entityType, type->eventType.
|
|
1193
|
+
* Preserves only primitive values for passthrough.
|
|
1194
|
+
*/
|
|
1195
|
+
export function toEventQueryParams(input) {
|
|
1196
|
+
const params = {};
|
|
1197
|
+
const set = (key, value) => {
|
|
1198
|
+
if (value === undefined || value === null)
|
|
1199
|
+
return;
|
|
1200
|
+
if (typeof value === "string" && value.trim() === "")
|
|
1201
|
+
return;
|
|
1202
|
+
if (Array.isArray(value)) {
|
|
1203
|
+
params[key] = value.filter((v) => typeof v === "string");
|
|
1204
|
+
return;
|
|
1205
|
+
}
|
|
1206
|
+
if (typeof value === "string" || typeof value === "number") {
|
|
1207
|
+
params[key] = value;
|
|
1208
|
+
}
|
|
1209
|
+
};
|
|
1210
|
+
set("eventId", input.eventId ?? input.id);
|
|
1211
|
+
set("name", input.name);
|
|
1212
|
+
set("category", input.category);
|
|
1213
|
+
set("retailerId", input.retailerId);
|
|
1214
|
+
set("eventType", input.eventType ?? input.type);
|
|
1215
|
+
set("eventStatus", input.eventStatus);
|
|
1216
|
+
set("from", input.from);
|
|
1217
|
+
set("to", input.to);
|
|
1218
|
+
set("start", input.start);
|
|
1219
|
+
set("count", input.count);
|
|
1220
|
+
set("context.rootEntityType", input["context.rootEntityType"]);
|
|
1221
|
+
set("context.rootEntityId", input["context.rootEntityId"]);
|
|
1222
|
+
set("context.rootEntityRef", input["context.rootEntityRef"]);
|
|
1223
|
+
set("context.entityType", input["context.entityType"] ?? input.entityType);
|
|
1224
|
+
set("context.entityId", input["context.entityId"]);
|
|
1225
|
+
set("context.entityRef", input["context.entityRef"] ?? input.entityRef);
|
|
1226
|
+
set("context.sourceEvents", input["context.sourceEvents"]);
|
|
1227
|
+
for (const [key, value] of Object.entries(input)) {
|
|
1228
|
+
if (key.startsWith("context.") ||
|
|
1229
|
+
[
|
|
1230
|
+
"eventId",
|
|
1231
|
+
"id",
|
|
1232
|
+
"name",
|
|
1233
|
+
"category",
|
|
1234
|
+
"retailerId",
|
|
1235
|
+
"eventType",
|
|
1236
|
+
"type",
|
|
1237
|
+
"eventStatus",
|
|
1238
|
+
"from",
|
|
1239
|
+
"to",
|
|
1240
|
+
"start",
|
|
1241
|
+
"count",
|
|
1242
|
+
"entityRef",
|
|
1243
|
+
"entityType",
|
|
1244
|
+
].includes(key)) {
|
|
1245
|
+
continue;
|
|
1246
|
+
}
|
|
1247
|
+
if (typeof value === "string" ||
|
|
1248
|
+
typeof value === "number" ||
|
|
1249
|
+
(Array.isArray(value) && value.every((v) => typeof v === "string"))) {
|
|
1250
|
+
params[key] = value;
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
return params;
|
|
1254
|
+
}
|
|
1255
|
+
/**
|
|
1256
|
+
* Fill missing trigger.retailerId from runtime config and validate readiness.
|
|
1257
|
+
*/
|
|
1258
|
+
export function resolveTransitionRequest(input, fallbackRetailerId) {
|
|
1259
|
+
const triggers = input.triggers.map((trigger, index) => {
|
|
1260
|
+
const retailerId = trigger.retailerId ?? fallbackRetailerId;
|
|
1261
|
+
if (!retailerId) {
|
|
1262
|
+
throw new ToolError("VALIDATION_ERROR", `triggers[${index}].retailerId is required. Set FLUENT_RETAILER_ID or pass retailerId per trigger.`);
|
|
1263
|
+
}
|
|
1264
|
+
return {
|
|
1265
|
+
...trigger,
|
|
1266
|
+
retailerId,
|
|
1267
|
+
};
|
|
1268
|
+
});
|
|
1269
|
+
return { triggers };
|
|
1270
|
+
}
|
|
1271
|
+
/**
|
|
1272
|
+
* GraphQL mutations are non-idempotent; retries can duplicate side effects.
|
|
1273
|
+
* Detect mutation operations so tools can disable automatic retries.
|
|
1274
|
+
*/
|
|
1275
|
+
export function isMutationOperation(query) {
|
|
1276
|
+
const strippedComments = query.replace(/#[^\r\n]*/g, "").trimStart();
|
|
1277
|
+
return /^mutation\b/i.test(strippedComments);
|
|
1278
|
+
}
|
|
1279
|
+
/** Derive tool name list from definitions for use by index.ts logging. */
|
|
1280
|
+
export const TOOL_NAMES = TOOL_DEFINITIONS.map((t) => t.name);
|
|
1281
|
+
/**
|
|
1282
|
+
* Narrow cast for GraphQL variables at the Zod→SDK boundary.
|
|
1283
|
+
* Tool inputs are validated as JSON-compatible objects by Zod; the SDK type
|
|
1284
|
+
* expects Record<string, never> which is overly restrictive. This single
|
|
1285
|
+
* helper centralizes the boundary cast.
|
|
1286
|
+
*/
|
|
1287
|
+
function toSdkVariables(vars) {
|
|
1288
|
+
return vars;
|
|
1289
|
+
}
|
|
1290
|
+
function asRecord(value) {
|
|
1291
|
+
if (!value || typeof value !== "object")
|
|
1292
|
+
return null;
|
|
1293
|
+
return value;
|
|
1294
|
+
}
|
|
1295
|
+
function asAttributeMap(attributes) {
|
|
1296
|
+
if (Array.isArray(attributes)) {
|
|
1297
|
+
const out = {};
|
|
1298
|
+
for (const item of attributes) {
|
|
1299
|
+
const rec = asRecord(item);
|
|
1300
|
+
if (!rec)
|
|
1301
|
+
continue;
|
|
1302
|
+
const name = rec.name;
|
|
1303
|
+
if (typeof name === "string")
|
|
1304
|
+
out[name] = rec.value;
|
|
1305
|
+
}
|
|
1306
|
+
return out;
|
|
1307
|
+
}
|
|
1308
|
+
return asRecord(attributes) ?? {};
|
|
1309
|
+
}
|
|
1310
|
+
function toNumber(value) {
|
|
1311
|
+
const parsed = Number(value);
|
|
1312
|
+
return Number.isFinite(parsed) ? parsed : null;
|
|
1313
|
+
}
|
|
1314
|
+
function countByKey(items, keyFn) {
|
|
1315
|
+
const out = {};
|
|
1316
|
+
for (const item of items) {
|
|
1317
|
+
const key = keyFn(item);
|
|
1318
|
+
out[key] = (out[key] ?? 0) + 1;
|
|
1319
|
+
}
|
|
1320
|
+
return out;
|
|
1321
|
+
}
|
|
1322
|
+
function topCounts(counts, limit) {
|
|
1323
|
+
return Object.entries(counts)
|
|
1324
|
+
.map(([key, count]) => ({ key, count }))
|
|
1325
|
+
.sort((a, b) => b.count - a.count)
|
|
1326
|
+
.slice(0, limit)
|
|
1327
|
+
.map((entry, idx) => ({ rank: idx + 1, ...entry }));
|
|
1328
|
+
}
|
|
1329
|
+
function json(payload, isError = false, budget) {
|
|
1330
|
+
let finalPayload = payload;
|
|
1331
|
+
if (budget && budget.maxChars > 0 && !isError) {
|
|
1332
|
+
const raw = JSON.stringify(payload);
|
|
1333
|
+
if (raw.length > budget.maxChars) {
|
|
1334
|
+
const result = shapeResponse(payload, budget);
|
|
1335
|
+
finalPayload = {
|
|
1336
|
+
...result.shaped,
|
|
1337
|
+
_responseMeta: result.meta,
|
|
1338
|
+
};
|
|
1339
|
+
}
|
|
1340
|
+
}
|
|
1341
|
+
return {
|
|
1342
|
+
content: [
|
|
1343
|
+
{ type: "text", text: JSON.stringify(finalPayload, null, 2) },
|
|
1344
|
+
],
|
|
1345
|
+
...(isError ? { isError: true } : {}),
|
|
1346
|
+
};
|
|
1347
|
+
}
|
|
1348
|
+
function requireClient(ctx) {
|
|
1349
|
+
if (!ctx.client) {
|
|
1350
|
+
throw new ToolError("CONFIG_ERROR", "SDK client is not available. Run config.validate and fix auth/base URL.");
|
|
1351
|
+
}
|
|
1352
|
+
return ctx.client;
|
|
1353
|
+
}
|
|
1354
|
+
function round1(value) {
|
|
1355
|
+
return Math.round(value * 10) / 10;
|
|
1356
|
+
}
|
|
1357
|
+
function round3(value) {
|
|
1358
|
+
return Math.round(value * 1000) / 1000;
|
|
1359
|
+
}
|
|
1360
|
+
function percentage(part, total) {
|
|
1361
|
+
if (total <= 0)
|
|
1362
|
+
return 0;
|
|
1363
|
+
return round1((part / total) * 100);
|
|
1364
|
+
}
|
|
1365
|
+
/**
|
|
1366
|
+
* Parse numeric output from Prometheus query responses.
|
|
1367
|
+
*
|
|
1368
|
+
* Supported result shapes:
|
|
1369
|
+
* - scalar: [timestamp, value]
|
|
1370
|
+
* - vector: [{ metric: {...}, value: [timestamp, value] }, ...]
|
|
1371
|
+
*/
|
|
1372
|
+
function extractPrometheusNumber(response, fallback) {
|
|
1373
|
+
if (response.status !== "success")
|
|
1374
|
+
return fallback;
|
|
1375
|
+
const data = response.data && typeof response.data === "object"
|
|
1376
|
+
? response.data
|
|
1377
|
+
: null;
|
|
1378
|
+
if (!data)
|
|
1379
|
+
return fallback;
|
|
1380
|
+
const resultType = typeof data.resultType === "string" ? data.resultType : undefined;
|
|
1381
|
+
const result = data.result;
|
|
1382
|
+
if (resultType === "scalar" && Array.isArray(result) && result.length >= 2) {
|
|
1383
|
+
const scalar = Number.parseFloat(String(result[1]));
|
|
1384
|
+
return Number.isFinite(scalar) ? scalar : fallback;
|
|
1385
|
+
}
|
|
1386
|
+
if (Array.isArray(result) && result.length > 0) {
|
|
1387
|
+
const first = result[0] && typeof result[0] === "object"
|
|
1388
|
+
? result[0]
|
|
1389
|
+
: null;
|
|
1390
|
+
const value = first?.value;
|
|
1391
|
+
if (Array.isArray(value) && value.length >= 2) {
|
|
1392
|
+
const parsed = Number.parseFloat(String(value[1]));
|
|
1393
|
+
return Number.isFinite(parsed) ? parsed : fallback;
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
return fallback;
|
|
1397
|
+
}
|
|
1398
|
+
const KNOWN_METRIC_LABELS = {
|
|
1399
|
+
core_event_received_total: [
|
|
1400
|
+
"account_id",
|
|
1401
|
+
"retailer_id",
|
|
1402
|
+
"event_name",
|
|
1403
|
+
"entity_type",
|
|
1404
|
+
"source",
|
|
1405
|
+
],
|
|
1406
|
+
core_event_last_received_seconds: [
|
|
1407
|
+
"account_id",
|
|
1408
|
+
"retailer_id",
|
|
1409
|
+
"event_name",
|
|
1410
|
+
"entity_type",
|
|
1411
|
+
"source",
|
|
1412
|
+
],
|
|
1413
|
+
rubix_event_received_total: [
|
|
1414
|
+
"account_id",
|
|
1415
|
+
"retailer_id",
|
|
1416
|
+
"event_name",
|
|
1417
|
+
"entity_type",
|
|
1418
|
+
"source",
|
|
1419
|
+
],
|
|
1420
|
+
rubix_event_inflight_latency_seconds: [
|
|
1421
|
+
"account_id",
|
|
1422
|
+
"retailer_id",
|
|
1423
|
+
"event_name",
|
|
1424
|
+
"entity_type",
|
|
1425
|
+
"source",
|
|
1426
|
+
],
|
|
1427
|
+
rubix_event_inflight_latency_seconds_sum: [
|
|
1428
|
+
"account_id",
|
|
1429
|
+
"retailer_id",
|
|
1430
|
+
"event_name",
|
|
1431
|
+
"entity_type",
|
|
1432
|
+
"source",
|
|
1433
|
+
],
|
|
1434
|
+
rubix_event_inflight_latency_seconds_count: [
|
|
1435
|
+
"account_id",
|
|
1436
|
+
"retailer_id",
|
|
1437
|
+
"event_name",
|
|
1438
|
+
"entity_type",
|
|
1439
|
+
"source",
|
|
1440
|
+
],
|
|
1441
|
+
rubix_event_inflight_latency_seconds_bucket: [
|
|
1442
|
+
"account_id",
|
|
1443
|
+
"retailer_id",
|
|
1444
|
+
"event_name",
|
|
1445
|
+
"entity_type",
|
|
1446
|
+
"source",
|
|
1447
|
+
"le",
|
|
1448
|
+
],
|
|
1449
|
+
rubix_event_runtime_seconds: [
|
|
1450
|
+
"account_id",
|
|
1451
|
+
"retailer_id",
|
|
1452
|
+
"event_name",
|
|
1453
|
+
"entity_type",
|
|
1454
|
+
"source",
|
|
1455
|
+
"status",
|
|
1456
|
+
],
|
|
1457
|
+
rubix_event_runtime_seconds_sum: [
|
|
1458
|
+
"account_id",
|
|
1459
|
+
"retailer_id",
|
|
1460
|
+
"event_name",
|
|
1461
|
+
"entity_type",
|
|
1462
|
+
"source",
|
|
1463
|
+
"status",
|
|
1464
|
+
],
|
|
1465
|
+
rubix_event_runtime_seconds_count: [
|
|
1466
|
+
"account_id",
|
|
1467
|
+
"retailer_id",
|
|
1468
|
+
"event_name",
|
|
1469
|
+
"entity_type",
|
|
1470
|
+
"source",
|
|
1471
|
+
"status",
|
|
1472
|
+
],
|
|
1473
|
+
rubix_event_runtime_seconds_bucket: [
|
|
1474
|
+
"account_id",
|
|
1475
|
+
"retailer_id",
|
|
1476
|
+
"event_name",
|
|
1477
|
+
"entity_type",
|
|
1478
|
+
"source",
|
|
1479
|
+
"status",
|
|
1480
|
+
"le",
|
|
1481
|
+
],
|
|
1482
|
+
bpp_records_processed_total: [
|
|
1483
|
+
"account_id",
|
|
1484
|
+
"run_id",
|
|
1485
|
+
"stage",
|
|
1486
|
+
"first_batch_received",
|
|
1487
|
+
"deduplication_finished",
|
|
1488
|
+
],
|
|
1489
|
+
bpp_records_unchanged_total: ["account_id", "run_id", "stage"],
|
|
1490
|
+
bpp_records_changed_total: ["account_id", "run_id", "stage"],
|
|
1491
|
+
bpp_last_run_timestamp_seconds: ["account_id", "run_id", "stage", "status"],
|
|
1492
|
+
feed_sent_total: ["account_id", "feed_ref", "run_id", "data_type"],
|
|
1493
|
+
feed_last_run_timestamp_seconds: ["account_id", "feed_ref", "run_id", "status"],
|
|
1494
|
+
};
|
|
1495
|
+
function extractPrometheusVectors(response) {
|
|
1496
|
+
if (response.status !== "success")
|
|
1497
|
+
return [];
|
|
1498
|
+
const data = response.data && typeof response.data === "object"
|
|
1499
|
+
? response.data
|
|
1500
|
+
: null;
|
|
1501
|
+
if (!data)
|
|
1502
|
+
return [];
|
|
1503
|
+
const result = data.result;
|
|
1504
|
+
if (!Array.isArray(result))
|
|
1505
|
+
return [];
|
|
1506
|
+
return result.filter((item) => typeof item === "object" && item !== null && "metric" in item);
|
|
1507
|
+
}
|
|
1508
|
+
/**
|
|
1509
|
+
* Parse a Prometheus-style time window string into milliseconds.
|
|
1510
|
+
* Supports h (hours), m (minutes), d (days). Throws on invalid format.
|
|
1511
|
+
*/
|
|
1512
|
+
export function parseWindowMs(window) {
|
|
1513
|
+
const match = window.match(/^(\d+)([hmd])$/);
|
|
1514
|
+
if (!match) {
|
|
1515
|
+
throw new ToolError("VALIDATION_ERROR", `Invalid window format "${window}". Use <number><unit> where unit is h (hours), m (minutes), or d (days). Examples: 1h, 30m, 7d`);
|
|
1516
|
+
}
|
|
1517
|
+
const num = parseInt(match[1], 10);
|
|
1518
|
+
const unit = match[2];
|
|
1519
|
+
if (num <= 0) {
|
|
1520
|
+
throw new ToolError("VALIDATION_ERROR", `Window value must be positive, got ${num}`);
|
|
1521
|
+
}
|
|
1522
|
+
if (unit === "h")
|
|
1523
|
+
return num * 3600000;
|
|
1524
|
+
if (unit === "d")
|
|
1525
|
+
return num * 86400000;
|
|
1526
|
+
/* unit === "m" */ return num * 60000;
|
|
1527
|
+
}
|
|
1528
|
+
/**
|
|
1529
|
+
* Fetch events from the Event API with pagination, then aggregate by
|
|
1530
|
+
* (name|entityType|status). Shared by metrics.topEvents and the
|
|
1531
|
+
* metrics.healthCheck Event API fallback path.
|
|
1532
|
+
*/
|
|
1533
|
+
async function aggregateEventsFromApi(client, params) {
|
|
1534
|
+
const baseParams = {
|
|
1535
|
+
from: params.from,
|
|
1536
|
+
to: params.to,
|
|
1537
|
+
count: 500,
|
|
1538
|
+
};
|
|
1539
|
+
if (params.entityType)
|
|
1540
|
+
baseParams["context.entityType"] = params.entityType;
|
|
1541
|
+
if (params.eventStatus)
|
|
1542
|
+
baseParams.eventStatus = params.eventStatus;
|
|
1543
|
+
if (params.eventType)
|
|
1544
|
+
baseParams.eventType = params.eventType;
|
|
1545
|
+
const allEvents = [];
|
|
1546
|
+
let page = 1;
|
|
1547
|
+
let hasMore = true;
|
|
1548
|
+
while (hasMore && page <= params.maxPages) {
|
|
1549
|
+
const reqParams = { ...baseParams, start: (page - 1) * 500 + 1 };
|
|
1550
|
+
const result = await client.getEvents(reqParams);
|
|
1551
|
+
const events = result?.results ?? [];
|
|
1552
|
+
if (!Array.isArray(events) || events.length === 0) {
|
|
1553
|
+
hasMore = false;
|
|
1554
|
+
break;
|
|
1555
|
+
}
|
|
1556
|
+
for (const evt of events) {
|
|
1557
|
+
allEvents.push({
|
|
1558
|
+
name: evt.name ?? "unknown",
|
|
1559
|
+
entityType: evt.context?.entityType ?? "unknown",
|
|
1560
|
+
status: evt.eventStatus ?? "unknown",
|
|
1561
|
+
});
|
|
1562
|
+
}
|
|
1563
|
+
hasMore = result.hasMore ?? events.length >= 500;
|
|
1564
|
+
page++;
|
|
1565
|
+
}
|
|
1566
|
+
const groups = new Map();
|
|
1567
|
+
const statusCounts = new Map();
|
|
1568
|
+
for (const evt of allEvents) {
|
|
1569
|
+
const key = `${evt.name}|${evt.entityType}|${evt.status}`;
|
|
1570
|
+
const existing = groups.get(key);
|
|
1571
|
+
if (existing) {
|
|
1572
|
+
existing.count++;
|
|
1573
|
+
}
|
|
1574
|
+
else {
|
|
1575
|
+
groups.set(key, { ...evt, count: 1 });
|
|
1576
|
+
}
|
|
1577
|
+
statusCounts.set(evt.status, (statusCounts.get(evt.status) ?? 0) + 1);
|
|
1578
|
+
}
|
|
1579
|
+
return {
|
|
1580
|
+
totalEvents: allEvents.length,
|
|
1581
|
+
totalPages: page - 1,
|
|
1582
|
+
statusBreakdown: Object.fromEntries(statusCounts),
|
|
1583
|
+
groups,
|
|
1584
|
+
uniqueNames: new Set(allEvents.map((e) => e.name)),
|
|
1585
|
+
uniqueEntityTypes: new Set(allEvents.map((e) => e.entityType)),
|
|
1586
|
+
};
|
|
1587
|
+
}
|
|
1588
|
+
/**
|
|
1589
|
+
* Rank aggregation groups by count descending and compute percentages.
|
|
1590
|
+
*/
|
|
1591
|
+
function rankTopEvents(groups, totalEvents, topN) {
|
|
1592
|
+
const sorted = Array.from(groups.values()).sort((a, b) => b.count - a.count);
|
|
1593
|
+
return sorted.slice(0, topN).map((item, idx) => ({
|
|
1594
|
+
rank: idx + 1,
|
|
1595
|
+
name: item.name,
|
|
1596
|
+
entityType: item.entityType,
|
|
1597
|
+
status: item.status,
|
|
1598
|
+
count: item.count,
|
|
1599
|
+
percentage: totalEvents > 0
|
|
1600
|
+
? Math.round((item.count / totalEvents) * 1000) / 10
|
|
1601
|
+
: 0,
|
|
1602
|
+
}));
|
|
1603
|
+
}
|
|
1604
|
+
export function registerToolHandlers(server, ctx) {
|
|
1605
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
1606
|
+
tools: TOOL_DEFINITIONS.map((t) => ({
|
|
1607
|
+
name: t.name,
|
|
1608
|
+
description: t.description,
|
|
1609
|
+
inputSchema: t.inputSchema,
|
|
1610
|
+
})),
|
|
1611
|
+
}));
|
|
1612
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
1613
|
+
const toolName = request.params.name;
|
|
1614
|
+
const args = request.params.arguments ?? {};
|
|
1615
|
+
try {
|
|
1616
|
+
// ------- config.validate ----------------------------------------------
|
|
1617
|
+
if (toolName === "config.validate") {
|
|
1618
|
+
const validation = validateConfig(ctx.config);
|
|
1619
|
+
return json({
|
|
1620
|
+
ok: true,
|
|
1621
|
+
readyForApiCalls: validation.isReadyForApiCalls,
|
|
1622
|
+
validation,
|
|
1623
|
+
config: toSafeConfigSummary(ctx.config),
|
|
1624
|
+
});
|
|
1625
|
+
}
|
|
1626
|
+
// ------- health.ping ---------------------------------------------------
|
|
1627
|
+
if (toolName === "health.ping") {
|
|
1628
|
+
const validation = validateConfig(ctx.config);
|
|
1629
|
+
return json({
|
|
1630
|
+
ok: true,
|
|
1631
|
+
status: "ok",
|
|
1632
|
+
sdkClient: ctx.client ? "connected" : "not available",
|
|
1633
|
+
readyForApiCalls: validation.isReadyForApiCalls,
|
|
1634
|
+
validation,
|
|
1635
|
+
config: toSafeConfigSummary(ctx.config),
|
|
1636
|
+
});
|
|
1637
|
+
}
|
|
1638
|
+
// ------- event.build ---------------------------------------------------
|
|
1639
|
+
if (toolName === "event.build") {
|
|
1640
|
+
const parsed = EventBuildInputSchema.parse(args);
|
|
1641
|
+
const event = buildEventPayload(parsed, ctx.config);
|
|
1642
|
+
return json({ ok: true, event });
|
|
1643
|
+
}
|
|
1644
|
+
// ------- event.send ----------------------------------------------------
|
|
1645
|
+
if (toolName === "event.send") {
|
|
1646
|
+
const parsed = EventPublishInputSchema.parse(args);
|
|
1647
|
+
const event = buildEventPayload(parsed, ctx.config);
|
|
1648
|
+
if (parsed.dryRun) {
|
|
1649
|
+
return json({
|
|
1650
|
+
ok: true,
|
|
1651
|
+
dryRun: true,
|
|
1652
|
+
mode: parsed.mode,
|
|
1653
|
+
event,
|
|
1654
|
+
note: "No API call made. Set dryRun=false to send.",
|
|
1655
|
+
});
|
|
1656
|
+
}
|
|
1657
|
+
const client = requireClient(ctx);
|
|
1658
|
+
const result = await client.sendEvent(event, parsed.mode);
|
|
1659
|
+
return json({
|
|
1660
|
+
ok: true,
|
|
1661
|
+
mode: parsed.mode,
|
|
1662
|
+
response: result,
|
|
1663
|
+
});
|
|
1664
|
+
}
|
|
1665
|
+
// ------- event.get -----------------------------------------------------
|
|
1666
|
+
if (toolName === "event.get") {
|
|
1667
|
+
const parsed = EventGetInputSchema.parse(args);
|
|
1668
|
+
const client = requireClient(ctx);
|
|
1669
|
+
const event = await client.getEventById(parsed.eventId);
|
|
1670
|
+
return json({ ok: true, event }, false, ctx.responseBudget);
|
|
1671
|
+
}
|
|
1672
|
+
// ------- event.list ----------------------------------------------------
|
|
1673
|
+
if (toolName === "event.list") {
|
|
1674
|
+
const parsed = EventListInputSchema.parse(args);
|
|
1675
|
+
const client = requireClient(ctx);
|
|
1676
|
+
const events = await client.getEvents(toEventQueryParams(parsed));
|
|
1677
|
+
// Field projection: strip events down to requested fields only
|
|
1678
|
+
if (parsed.fields && parsed.fields.length > 0 && events?.results) {
|
|
1679
|
+
const fieldSet = new Set(parsed.fields);
|
|
1680
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1681
|
+
events.results = events.results.map((evt) => {
|
|
1682
|
+
const projected = {};
|
|
1683
|
+
for (const field of fieldSet) {
|
|
1684
|
+
if (field.startsWith("context.")) {
|
|
1685
|
+
const subKey = field.slice("context.".length);
|
|
1686
|
+
const evtCtx = evt.context;
|
|
1687
|
+
if (evtCtx && subKey in evtCtx) {
|
|
1688
|
+
if (!projected.context)
|
|
1689
|
+
projected.context = {};
|
|
1690
|
+
projected.context[subKey] =
|
|
1691
|
+
evtCtx[subKey];
|
|
1692
|
+
}
|
|
1693
|
+
}
|
|
1694
|
+
else if (field in evt) {
|
|
1695
|
+
projected[field] = evt[field];
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
return projected;
|
|
1699
|
+
});
|
|
1700
|
+
}
|
|
1701
|
+
// Analyze mode: return grouped analysis instead of raw events
|
|
1702
|
+
if (parsed.analyze && events?.results) {
|
|
1703
|
+
const analysis = analyzeEvents(events.results, events.hasMore ?? false);
|
|
1704
|
+
return json({ ok: true, analyze: true, ...analysis }, false, ctx.responseBudget);
|
|
1705
|
+
}
|
|
1706
|
+
return json({ ok: true, events }, false, ctx.responseBudget);
|
|
1707
|
+
}
|
|
1708
|
+
// ------- event.flowInspect ----------------------------------------------
|
|
1709
|
+
if (toolName === "event.flowInspect") {
|
|
1710
|
+
const parsed = EventFlowInspectInputSchema.parse(args);
|
|
1711
|
+
const client = requireClient(ctx);
|
|
1712
|
+
const baseParams = {
|
|
1713
|
+
"context.rootEntityRef": parsed.rootEntityRef,
|
|
1714
|
+
count: 500,
|
|
1715
|
+
...(parsed.from ? { from: parsed.from } : {}),
|
|
1716
|
+
...(parsed.to ? { to: parsed.to } : {}),
|
|
1717
|
+
};
|
|
1718
|
+
if (parsed.rootEntityType) {
|
|
1719
|
+
baseParams["context.rootEntityType"] = parsed.rootEntityType;
|
|
1720
|
+
}
|
|
1721
|
+
if (parsed.rootEntityId !== undefined) {
|
|
1722
|
+
baseParams["context.rootEntityId"] = parsed.rootEntityId;
|
|
1723
|
+
}
|
|
1724
|
+
const fetchPaged = async (extraParams) => {
|
|
1725
|
+
const all = [];
|
|
1726
|
+
let page = 1;
|
|
1727
|
+
let hasMore = true;
|
|
1728
|
+
while (hasMore && page <= parsed.maxPages) {
|
|
1729
|
+
const reqParams = {
|
|
1730
|
+
...baseParams,
|
|
1731
|
+
...extraParams,
|
|
1732
|
+
start: (page - 1) * 500 + 1,
|
|
1733
|
+
};
|
|
1734
|
+
const result = await client.getEvents(reqParams);
|
|
1735
|
+
const rows = Array.isArray(result?.results) ? result.results : [];
|
|
1736
|
+
if (rows.length === 0)
|
|
1737
|
+
break;
|
|
1738
|
+
for (const row of rows) {
|
|
1739
|
+
const rec = asRecord(row);
|
|
1740
|
+
if (rec)
|
|
1741
|
+
all.push(rec);
|
|
1742
|
+
}
|
|
1743
|
+
hasMore = result.hasMore ?? rows.length >= 500;
|
|
1744
|
+
page += 1;
|
|
1745
|
+
}
|
|
1746
|
+
return all;
|
|
1747
|
+
};
|
|
1748
|
+
const orchestrationEvents = await fetchPaged({
|
|
1749
|
+
eventType: "ORCHESTRATION",
|
|
1750
|
+
});
|
|
1751
|
+
let auditEvents = [];
|
|
1752
|
+
if (parsed.includeAudit) {
|
|
1753
|
+
auditEvents = await fetchPaged({
|
|
1754
|
+
eventType: "ORCHESTRATION_AUDIT",
|
|
1755
|
+
});
|
|
1756
|
+
}
|
|
1757
|
+
// Fetch SCHEDULED events separately (if requested)
|
|
1758
|
+
let scheduledEvents = [];
|
|
1759
|
+
if (parsed.includeScheduled) {
|
|
1760
|
+
scheduledEvents = await fetchPaged({
|
|
1761
|
+
eventType: "ORCHESTRATION",
|
|
1762
|
+
eventStatus: "SCHEDULED",
|
|
1763
|
+
});
|
|
1764
|
+
}
|
|
1765
|
+
const statusCounts = countByKey(orchestrationEvents, (evt) => typeof evt.eventStatus === "string" ? evt.eventStatus : "UNKNOWN");
|
|
1766
|
+
const entityTypeCounts = countByKey(orchestrationEvents, (evt) => {
|
|
1767
|
+
const ctxRec = asRecord(evt.context);
|
|
1768
|
+
return typeof ctxRec?.entityType === "string"
|
|
1769
|
+
? ctxRec.entityType
|
|
1770
|
+
: "UNKNOWN";
|
|
1771
|
+
});
|
|
1772
|
+
const orchestrationNameCounts = countByKey(orchestrationEvents, (evt) => typeof evt.name === "string" ? evt.name : "UNKNOWN");
|
|
1773
|
+
const categoryCounts = countByKey(auditEvents, (evt) => typeof evt.category === "string" ? evt.category : "UNKNOWN");
|
|
1774
|
+
const actionEvents = auditEvents.filter((evt) => String(evt.category ?? "").toUpperCase() === "ACTION");
|
|
1775
|
+
const actionNameCounts = countByKey(actionEvents, (evt) => typeof evt.name === "string" ? evt.name : "UNKNOWN");
|
|
1776
|
+
const webhookActions = actionEvents
|
|
1777
|
+
.map((evt) => {
|
|
1778
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1779
|
+
const endpoint = attrs["Request Endpoint"];
|
|
1780
|
+
const isWebhook = String(evt.name ?? "")
|
|
1781
|
+
.toLowerCase()
|
|
1782
|
+
.includes("webhook") || endpoint !== undefined;
|
|
1783
|
+
if (!isWebhook)
|
|
1784
|
+
return null;
|
|
1785
|
+
const ctxRec = asRecord(evt.context);
|
|
1786
|
+
return {
|
|
1787
|
+
id: evt.id ?? null,
|
|
1788
|
+
name: evt.name ?? null,
|
|
1789
|
+
source: evt.source ?? null,
|
|
1790
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1791
|
+
entityType: ctxRec?.entityType ?? null,
|
|
1792
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
1793
|
+
endpoint: endpoint ?? null,
|
|
1794
|
+
responseCode: attrs["Response code"] ?? null,
|
|
1795
|
+
responseReason: attrs["Response reason"] ?? null,
|
|
1796
|
+
responseBody: attrs["Response Body"] ?? null,
|
|
1797
|
+
responseHeaders: attrs["Response Headers"] ?? null,
|
|
1798
|
+
requestHeaders: attrs["Request Headers"] ?? null,
|
|
1799
|
+
};
|
|
1800
|
+
})
|
|
1801
|
+
.filter((row) => row !== null)
|
|
1802
|
+
.slice(0, parsed.actionSampleLimit);
|
|
1803
|
+
const mutationActions = actionEvents
|
|
1804
|
+
.map((evt) => {
|
|
1805
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1806
|
+
const request = asRecord(attrs.request);
|
|
1807
|
+
const requestName = asRecord(request?.name);
|
|
1808
|
+
const queryNameRaw = request?.queryName ?? requestName?.name;
|
|
1809
|
+
const queryName = typeof queryNameRaw === "string" ? queryNameRaw : null;
|
|
1810
|
+
const queryType = typeof request?.queryType === "string" ? request.queryType : null;
|
|
1811
|
+
const isMutation = (queryType &&
|
|
1812
|
+
queryType.toLowerCase().includes("mutation")) ||
|
|
1813
|
+
(queryName && /^(update|create|upsert|delete)/i.test(queryName));
|
|
1814
|
+
if (!isMutation)
|
|
1815
|
+
return null;
|
|
1816
|
+
const startTimer = toNumber(attrs.startTimer);
|
|
1817
|
+
const stopTimer = toNumber(attrs.stopTimer);
|
|
1818
|
+
const durationMs = startTimer !== null && stopTimer !== null
|
|
1819
|
+
? Math.max(0, stopTimer - startTimer)
|
|
1820
|
+
: null;
|
|
1821
|
+
const variables = asRecord(request?.variables);
|
|
1822
|
+
const values = asRecord(variables?.values);
|
|
1823
|
+
const input = values?.input ?? null;
|
|
1824
|
+
const ctxRec = asRecord(evt.context);
|
|
1825
|
+
const response = asRecord(attrs.response);
|
|
1826
|
+
return {
|
|
1827
|
+
id: evt.id ?? null,
|
|
1828
|
+
name: evt.name ?? null,
|
|
1829
|
+
source: evt.source ?? null,
|
|
1830
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1831
|
+
entityType: ctxRec?.entityType ?? null,
|
|
1832
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
1833
|
+
queryName,
|
|
1834
|
+
queryType,
|
|
1835
|
+
inputName: typeof request?.inputName === "string"
|
|
1836
|
+
? request.inputName
|
|
1837
|
+
: null,
|
|
1838
|
+
queryString: typeof request?.queryString === "string"
|
|
1839
|
+
? request.queryString
|
|
1840
|
+
: null,
|
|
1841
|
+
input,
|
|
1842
|
+
response: response?.inner ?? response ?? null,
|
|
1843
|
+
durationMs,
|
|
1844
|
+
};
|
|
1845
|
+
})
|
|
1846
|
+
.filter((row) => row !== null)
|
|
1847
|
+
.slice(0, parsed.actionSampleLimit);
|
|
1848
|
+
const sendEventActions = actionEvents
|
|
1849
|
+
.map((evt) => {
|
|
1850
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1851
|
+
const eventPayload = asRecord(attrs.Event);
|
|
1852
|
+
const eventNameRaw = attrs["Event Name"];
|
|
1853
|
+
const eventName = typeof eventNameRaw === "string" ? eventNameRaw : null;
|
|
1854
|
+
const isSendEvent = String(evt.name ?? "")
|
|
1855
|
+
.toLowerCase()
|
|
1856
|
+
.includes("send event") ||
|
|
1857
|
+
Boolean(eventPayload) ||
|
|
1858
|
+
Boolean(eventName);
|
|
1859
|
+
if (!isSendEvent)
|
|
1860
|
+
return null;
|
|
1861
|
+
const meta = asRecord(eventPayload?.meta);
|
|
1862
|
+
const scheduledOn = eventPayload?.scheduledOn ?? meta?.scheduledOn ?? null;
|
|
1863
|
+
const futureDated = attrs["Future Dated"] === true || scheduledOn !== null;
|
|
1864
|
+
return {
|
|
1865
|
+
id: evt.id ?? null,
|
|
1866
|
+
name: evt.name ?? null,
|
|
1867
|
+
source: evt.source ?? null,
|
|
1868
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1869
|
+
eventName,
|
|
1870
|
+
futureDated,
|
|
1871
|
+
scheduledOn,
|
|
1872
|
+
event: eventPayload ?? null,
|
|
1873
|
+
};
|
|
1874
|
+
})
|
|
1875
|
+
.filter((row) => row !== null)
|
|
1876
|
+
.slice(0, parsed.actionSampleLimit);
|
|
1877
|
+
const scheduledActions = sendEventActions.filter((row) => row.futureDated === true);
|
|
1878
|
+
const rulesetDurations = auditEvents
|
|
1879
|
+
.filter((evt) => String(evt.category ?? "") === "ruleSet")
|
|
1880
|
+
.map((evt) => {
|
|
1881
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1882
|
+
const startTimer = toNumber(attrs.startTimer);
|
|
1883
|
+
const stopTimer = toNumber(attrs.stopTimer);
|
|
1884
|
+
const durationMs = startTimer !== null && stopTimer !== null
|
|
1885
|
+
? Math.max(0, stopTimer - startTimer)
|
|
1886
|
+
: null;
|
|
1887
|
+
return {
|
|
1888
|
+
id: evt.id ?? null,
|
|
1889
|
+
name: evt.name ?? null,
|
|
1890
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1891
|
+
source: evt.source ?? null,
|
|
1892
|
+
durationMs,
|
|
1893
|
+
};
|
|
1894
|
+
})
|
|
1895
|
+
.sort((a, b) => Number(b.durationMs ?? Number.NEGATIVE_INFINITY) -
|
|
1896
|
+
Number(a.durationMs ?? Number.NEGATIVE_INFINITY))
|
|
1897
|
+
.slice(0, parsed.actionSampleLimit);
|
|
1898
|
+
// --- Snapshot extraction (category=snapshot) — gated by includeSnapshots ---
|
|
1899
|
+
const snapshotRawCount = auditEvents.filter((evt) => String(evt.category ?? "") === "snapshot").length;
|
|
1900
|
+
const snapshots = parsed.includeSnapshots
|
|
1901
|
+
? auditEvents
|
|
1902
|
+
.filter((evt) => String(evt.category ?? "") === "snapshot")
|
|
1903
|
+
.map((evt) => {
|
|
1904
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1905
|
+
const ctxRec = asRecord(evt.context);
|
|
1906
|
+
return {
|
|
1907
|
+
id: evt.id ?? null,
|
|
1908
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1909
|
+
entityType: ctxRec?.entityType ?? null,
|
|
1910
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
1911
|
+
status: attrs.status ?? null,
|
|
1912
|
+
items: attrs.items ?? null,
|
|
1913
|
+
attributes: attrs.attributes ?? null,
|
|
1914
|
+
customer: attrs.customer ?? null,
|
|
1915
|
+
toAddress: attrs.toAddress ?? null,
|
|
1916
|
+
fromAddress: attrs.fromAddress ?? null,
|
|
1917
|
+
deliveryType: attrs.deliveryType ?? null,
|
|
1918
|
+
};
|
|
1919
|
+
})
|
|
1920
|
+
.slice(0, parsed.actionSampleLimit)
|
|
1921
|
+
: undefined;
|
|
1922
|
+
// --- Custom log extraction (category=CUSTOM) — gated by includeCustomLogs ---
|
|
1923
|
+
const customLogRawCount = auditEvents.filter((evt) => String(evt.category ?? "").toUpperCase() === "CUSTOM").length;
|
|
1924
|
+
const customLogs = parsed.includeCustomLogs
|
|
1925
|
+
? auditEvents
|
|
1926
|
+
.filter((evt) => String(evt.category ?? "").toUpperCase() === "CUSTOM")
|
|
1927
|
+
.map((evt) => {
|
|
1928
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1929
|
+
const ctxRec = asRecord(evt.context);
|
|
1930
|
+
let logs = [];
|
|
1931
|
+
try {
|
|
1932
|
+
const raw = attrs.LogCollection;
|
|
1933
|
+
if (typeof raw === "string")
|
|
1934
|
+
logs = JSON.parse(raw)?.logs ?? [];
|
|
1935
|
+
else if (raw && typeof raw === "object")
|
|
1936
|
+
logs =
|
|
1937
|
+
raw.logs ?? [];
|
|
1938
|
+
}
|
|
1939
|
+
catch {
|
|
1940
|
+
/* ignore parse errors */
|
|
1941
|
+
}
|
|
1942
|
+
return {
|
|
1943
|
+
id: evt.id ?? null,
|
|
1944
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1945
|
+
entityType: ctxRec?.entityType ?? null,
|
|
1946
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
1947
|
+
source: evt.source ?? null,
|
|
1948
|
+
logs,
|
|
1949
|
+
};
|
|
1950
|
+
})
|
|
1951
|
+
.filter((row) => row.logs.length > 0)
|
|
1952
|
+
.slice(0, parsed.actionSampleLimit)
|
|
1953
|
+
: undefined;
|
|
1954
|
+
// --- Rule props extraction (category=rule) — gated by includeRuleDetails ---
|
|
1955
|
+
const ruleEventRawCount = auditEvents.filter((evt) => String(evt.category ?? "") === "rule").length;
|
|
1956
|
+
const ruleEvents = parsed.includeRuleDetails
|
|
1957
|
+
? auditEvents
|
|
1958
|
+
.filter((evt) => String(evt.category ?? "") === "rule")
|
|
1959
|
+
.map((evt) => {
|
|
1960
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
1961
|
+
const startTimer = toNumber(attrs.startTimer);
|
|
1962
|
+
const stopTimer = toNumber(attrs.stopTimer);
|
|
1963
|
+
const durationMs = startTimer !== null && stopTimer !== null
|
|
1964
|
+
? Math.max(0, stopTimer - startTimer)
|
|
1965
|
+
: null;
|
|
1966
|
+
return {
|
|
1967
|
+
id: evt.id ?? null,
|
|
1968
|
+
name: evt.name ?? null,
|
|
1969
|
+
generatedOn: evt.generatedOn ?? null,
|
|
1970
|
+
source: evt.source ?? null,
|
|
1971
|
+
ruleSet: attrs.ruleSet ?? null,
|
|
1972
|
+
props: attrs.props ?? null,
|
|
1973
|
+
durationMs,
|
|
1974
|
+
};
|
|
1975
|
+
})
|
|
1976
|
+
.slice(0, parsed.actionSampleLimit)
|
|
1977
|
+
: undefined;
|
|
1978
|
+
const inspectedStatuses = new Set(parsed.inspectStatuses.map((status) => status.toUpperCase()));
|
|
1979
|
+
const inspectCandidates = orchestrationEvents
|
|
1980
|
+
.filter((evt) => inspectedStatuses.has(String(evt.eventStatus ?? "").toUpperCase()))
|
|
1981
|
+
.slice(0, parsed.maxDrilldowns);
|
|
1982
|
+
const inspectedEvents = [];
|
|
1983
|
+
for (const candidate of inspectCandidates) {
|
|
1984
|
+
const eventId = typeof candidate.id === "string" ? candidate.id : undefined;
|
|
1985
|
+
if (!eventId)
|
|
1986
|
+
continue;
|
|
1987
|
+
const event = await client.getEventById(eventId);
|
|
1988
|
+
const eventRec = asRecord(event);
|
|
1989
|
+
const eventAttrs = asAttributeMap(eventRec?.attributes);
|
|
1990
|
+
inspectedEvents.push({
|
|
1991
|
+
eventId,
|
|
1992
|
+
name: candidate.name ?? null,
|
|
1993
|
+
eventStatus: candidate.eventStatus ?? null,
|
|
1994
|
+
closeMatches: eventAttrs.closeMatches ?? null,
|
|
1995
|
+
entityStatus: eventAttrs.entityStatus ?? null,
|
|
1996
|
+
message: eventAttrs.message ?? null,
|
|
1997
|
+
event,
|
|
1998
|
+
});
|
|
1999
|
+
}
|
|
2000
|
+
const compactOrchestrationEvents = parsed.includeEventDetails
|
|
2001
|
+
? orchestrationEvents.map((evt) => {
|
|
2002
|
+
const ctxRec = asRecord(evt.context);
|
|
2003
|
+
return {
|
|
2004
|
+
id: evt.id ?? null,
|
|
2005
|
+
name: evt.name ?? null,
|
|
2006
|
+
eventStatus: evt.eventStatus ?? null,
|
|
2007
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2008
|
+
source: evt.source ?? null,
|
|
2009
|
+
entityType: ctxRec?.entityType ?? null,
|
|
2010
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
2011
|
+
rootEntityType: ctxRec?.rootEntityType ?? null,
|
|
2012
|
+
rootEntityRef: ctxRec?.rootEntityRef ?? null,
|
|
2013
|
+
sourceEvents: ctxRec?.sourceEvents ?? [],
|
|
2014
|
+
attributes: asAttributeMap(evt.attributes),
|
|
2015
|
+
};
|
|
2016
|
+
})
|
|
2017
|
+
: undefined;
|
|
2018
|
+
const compactAuditActionEvents = parsed.includeAuditDetails
|
|
2019
|
+
? actionEvents.slice(0, parsed.actionSampleLimit).map((evt) => {
|
|
2020
|
+
const ctxRec = asRecord(evt.context);
|
|
2021
|
+
return {
|
|
2022
|
+
id: evt.id ?? null,
|
|
2023
|
+
name: evt.name ?? null,
|
|
2024
|
+
category: evt.category ?? null,
|
|
2025
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2026
|
+
source: evt.source ?? null,
|
|
2027
|
+
entityType: ctxRec?.entityType ?? null,
|
|
2028
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
2029
|
+
attributes: asAttributeMap(evt.attributes),
|
|
2030
|
+
};
|
|
2031
|
+
})
|
|
2032
|
+
: undefined;
|
|
2033
|
+
// --- Exception extraction — gated by includeExceptions ---
|
|
2034
|
+
const exceptionRawCount = auditEvents.filter((evt) => String(evt.category ?? "").toLowerCase() === "exception" ||
|
|
2035
|
+
asAttributeMap(evt.attributes).exceptionClass !== undefined).length;
|
|
2036
|
+
const exceptions = parsed.includeExceptions
|
|
2037
|
+
? auditEvents
|
|
2038
|
+
.filter((evt) => String(evt.category ?? "").toLowerCase() === "exception" ||
|
|
2039
|
+
asAttributeMap(evt.attributes).exceptionClass !== undefined)
|
|
2040
|
+
.map((evt) => {
|
|
2041
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
2042
|
+
const ctxRec = asRecord(evt.context);
|
|
2043
|
+
return {
|
|
2044
|
+
id: evt.id ?? null,
|
|
2045
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2046
|
+
entityType: ctxRec?.entityType ?? null,
|
|
2047
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
2048
|
+
ruleset: attrs.ruleSetName ?? attrs.ruleSet ?? null,
|
|
2049
|
+
rule: attrs.ruleName ?? null,
|
|
2050
|
+
exceptionClass: attrs.exceptionClass ?? null,
|
|
2051
|
+
message: typeof attrs.exceptionMessage === "string"
|
|
2052
|
+
? attrs.exceptionMessage
|
|
2053
|
+
: typeof attrs.exception === "string"
|
|
2054
|
+
? attrs.exception
|
|
2055
|
+
: null,
|
|
2056
|
+
};
|
|
2057
|
+
})
|
|
2058
|
+
.slice(0, parsed.actionSampleLimit)
|
|
2059
|
+
: undefined;
|
|
2060
|
+
// --- NO_MATCH closeMatches from ruleSet audit events — gated by includeNoMatchDetails ---
|
|
2061
|
+
const noMatchAuditDetails = parsed.includeNoMatchDetails
|
|
2062
|
+
? auditEvents
|
|
2063
|
+
.filter((evt) => String(evt.category ?? "") === "ruleSet" &&
|
|
2064
|
+
String(evt.eventStatus ?? "").toUpperCase() === "NO_MATCH")
|
|
2065
|
+
.map((evt) => {
|
|
2066
|
+
const attrs = asAttributeMap(evt.attributes);
|
|
2067
|
+
return {
|
|
2068
|
+
id: evt.id ?? null,
|
|
2069
|
+
rulesetName: evt.name ?? null,
|
|
2070
|
+
source: evt.source ?? null,
|
|
2071
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2072
|
+
entityStatus: attrs.entityStatus ?? null,
|
|
2073
|
+
message: attrs.message ?? null,
|
|
2074
|
+
closeMatches: attrs.closeMatches ?? null,
|
|
2075
|
+
};
|
|
2076
|
+
})
|
|
2077
|
+
: undefined;
|
|
2078
|
+
let crossEntity;
|
|
2079
|
+
if (parsed.includeCrossEntity) {
|
|
2080
|
+
const crossEntityTypes = ["FULFILMENT_CHOICE", "FULFILMENT"];
|
|
2081
|
+
const sections = [];
|
|
2082
|
+
for (const childType of crossEntityTypes) {
|
|
2083
|
+
const childOrch = await fetchPaged({
|
|
2084
|
+
eventType: "ORCHESTRATION",
|
|
2085
|
+
"context.rootEntityRef": parsed.rootEntityRef,
|
|
2086
|
+
"context.entityType": childType,
|
|
2087
|
+
});
|
|
2088
|
+
if (childOrch.length === 0)
|
|
2089
|
+
continue;
|
|
2090
|
+
const childStatusCounts = countByKey(childOrch, (evt) => typeof evt.eventStatus === "string" ? evt.eventStatus : "UNKNOWN");
|
|
2091
|
+
sections.push({
|
|
2092
|
+
entityType: childType,
|
|
2093
|
+
orchestrationCount: childOrch.length,
|
|
2094
|
+
statusCounts: childStatusCounts,
|
|
2095
|
+
events: childOrch.map((evt) => {
|
|
2096
|
+
const ctxRec = asRecord(evt.context);
|
|
2097
|
+
return {
|
|
2098
|
+
id: evt.id ?? null,
|
|
2099
|
+
name: evt.name ?? null,
|
|
2100
|
+
eventStatus: evt.eventStatus ?? null,
|
|
2101
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2102
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
2103
|
+
};
|
|
2104
|
+
}),
|
|
2105
|
+
});
|
|
2106
|
+
}
|
|
2107
|
+
crossEntity = sections.length > 0 ? sections : undefined;
|
|
2108
|
+
}
|
|
2109
|
+
const noMatchCount = statusCounts.NO_MATCH ?? 0;
|
|
2110
|
+
const pendingCount = statusCounts.PENDING ?? 0;
|
|
2111
|
+
const failedCount = statusCounts.FAILED ?? 0;
|
|
2112
|
+
const scheduledStatusCount = statusCounts.SCHEDULED ?? 0;
|
|
2113
|
+
const recommendations = [];
|
|
2114
|
+
if (noMatchCount > 0) {
|
|
2115
|
+
recommendations.push("Review NO_MATCH events for missing rulesets or event-name mismatches.");
|
|
2116
|
+
}
|
|
2117
|
+
if (pendingCount > 0) {
|
|
2118
|
+
recommendations.push("Check whether PENDING events have completed audit chains (status lag vs real blockage).");
|
|
2119
|
+
}
|
|
2120
|
+
if (failedCount > 0) {
|
|
2121
|
+
recommendations.push("Investigate FAILED events — check audit exceptions for stack traces and rule errors.");
|
|
2122
|
+
}
|
|
2123
|
+
if (webhookActions.some((w) => Number(w.responseCode) >= 400)) {
|
|
2124
|
+
recommendations.push("Investigate webhook endpoint/auth failures from Response code/Body fields.");
|
|
2125
|
+
}
|
|
2126
|
+
if (exceptionRawCount > 0) {
|
|
2127
|
+
recommendations.push("Rule exceptions detected — check exceptionClass and message for root cause.");
|
|
2128
|
+
}
|
|
2129
|
+
// --- Shared envelope fields ---
|
|
2130
|
+
const totals = {
|
|
2131
|
+
orchestrationCount: orchestrationEvents.length,
|
|
2132
|
+
auditCount: auditEvents.length,
|
|
2133
|
+
actionCount: actionEvents.length,
|
|
2134
|
+
mutationActionCount: mutationActions.length,
|
|
2135
|
+
webhookActionCount: webhookActions.length,
|
|
2136
|
+
sendEventActionCount: sendEventActions.length,
|
|
2137
|
+
scheduledCount: scheduledEvents.length,
|
|
2138
|
+
exceptionCount: exceptionRawCount,
|
|
2139
|
+
snapshotCount: snapshotRawCount,
|
|
2140
|
+
customLogCount: customLogRawCount,
|
|
2141
|
+
ruleEventCount: ruleEventRawCount,
|
|
2142
|
+
};
|
|
2143
|
+
const envelope = {
|
|
2144
|
+
ok: true,
|
|
2145
|
+
rootEntityRef: parsed.rootEntityRef,
|
|
2146
|
+
rootEntityType: parsed.rootEntityType ?? null,
|
|
2147
|
+
rootEntityId: parsed.rootEntityId ?? null,
|
|
2148
|
+
window: { from: parsed.from ?? null, to: parsed.to ?? null },
|
|
2149
|
+
paging: { maxPages: parsed.maxPages, pageSize: 500 },
|
|
2150
|
+
compact: parsed.compact,
|
|
2151
|
+
};
|
|
2152
|
+
// ========= COMPACT MODE =========
|
|
2153
|
+
if (parsed.compact) {
|
|
2154
|
+
// --- analysis.statusFlow: ordered unique statuses seen ---
|
|
2155
|
+
const statusFlow = [];
|
|
2156
|
+
const seenStatuses = new Set();
|
|
2157
|
+
for (const evt of orchestrationEvents) {
|
|
2158
|
+
const s = typeof evt.eventStatus === "string" ? evt.eventStatus : null;
|
|
2159
|
+
if (s && !seenStatuses.has(s)) {
|
|
2160
|
+
seenStatuses.add(s);
|
|
2161
|
+
statusFlow.push(s);
|
|
2162
|
+
}
|
|
2163
|
+
}
|
|
2164
|
+
// --- analysis.entityTypes ---
|
|
2165
|
+
const entityTypes = Object.keys(entityTypeCounts);
|
|
2166
|
+
// --- analysis.timespan ---
|
|
2167
|
+
const timestamps = orchestrationEvents
|
|
2168
|
+
.map((evt) => {
|
|
2169
|
+
const g = evt.generatedOn;
|
|
2170
|
+
return typeof g === "string" ? new Date(g).getTime() : NaN;
|
|
2171
|
+
})
|
|
2172
|
+
.filter((t) => !Number.isNaN(t));
|
|
2173
|
+
const timespan = timestamps.length > 0
|
|
2174
|
+
? {
|
|
2175
|
+
first: new Date(Math.min(...timestamps)).toISOString(),
|
|
2176
|
+
last: new Date(Math.max(...timestamps)).toISOString(),
|
|
2177
|
+
durationMs: Math.max(...timestamps) - Math.min(...timestamps),
|
|
2178
|
+
}
|
|
2179
|
+
: null;
|
|
2180
|
+
// --- analysis.failedWebhookEndpoints ---
|
|
2181
|
+
const failedWebhookEndpoints = webhookActions
|
|
2182
|
+
.filter((w) => Number(w.responseCode) >= 400)
|
|
2183
|
+
.map((w) => String(w.endpoint ?? "unknown"))
|
|
2184
|
+
.filter((v, i, a) => a.indexOf(v) === i);
|
|
2185
|
+
// --- analysis.slowestRulesets (top 3) ---
|
|
2186
|
+
const slowestRulesets = rulesetDurations
|
|
2187
|
+
.filter((r) => r.durationMs !== null)
|
|
2188
|
+
.slice(0, 3)
|
|
2189
|
+
.map((r) => ({ name: r.name ?? "unknown", durationMs: r.durationMs }));
|
|
2190
|
+
const findings = [];
|
|
2191
|
+
if (noMatchCount > 0) {
|
|
2192
|
+
findings.push({
|
|
2193
|
+
code: "NO_MATCH_EVENTS",
|
|
2194
|
+
severity: "CRITICAL",
|
|
2195
|
+
message: `${noMatchCount} NO_MATCH event(s) — missing rulesets or event-name mismatches`,
|
|
2196
|
+
});
|
|
2197
|
+
}
|
|
2198
|
+
if (failedCount > 0) {
|
|
2199
|
+
findings.push({
|
|
2200
|
+
code: "FAILED_EVENTS",
|
|
2201
|
+
severity: "HIGH",
|
|
2202
|
+
message: `${failedCount} FAILED event(s) — check audit exceptions for stack traces`,
|
|
2203
|
+
});
|
|
2204
|
+
}
|
|
2205
|
+
if (failedWebhookEndpoints.length > 0) {
|
|
2206
|
+
findings.push({
|
|
2207
|
+
code: "FAILED_WEBHOOKS",
|
|
2208
|
+
severity: "HIGH",
|
|
2209
|
+
message: `${failedWebhookEndpoints.length} webhook endpoint(s) returned errors: ${failedWebhookEndpoints.join(", ")}`,
|
|
2210
|
+
});
|
|
2211
|
+
}
|
|
2212
|
+
if (exceptionRawCount > 0) {
|
|
2213
|
+
findings.push({
|
|
2214
|
+
code: "EXCEPTIONS_FOUND",
|
|
2215
|
+
severity: "HIGH",
|
|
2216
|
+
message: `${exceptionRawCount} rule exception(s) detected`,
|
|
2217
|
+
});
|
|
2218
|
+
}
|
|
2219
|
+
if (pendingCount > 0) {
|
|
2220
|
+
findings.push({
|
|
2221
|
+
code: "PENDING_EVENTS",
|
|
2222
|
+
severity: "MEDIUM",
|
|
2223
|
+
message: `${pendingCount} PENDING event(s) — may indicate processing lag or real blockage`,
|
|
2224
|
+
});
|
|
2225
|
+
}
|
|
2226
|
+
const slowThreshold = 1000;
|
|
2227
|
+
const slowCount = rulesetDurations.filter((r) => r.durationMs !== null && r.durationMs > slowThreshold).length;
|
|
2228
|
+
if (slowCount > 0) {
|
|
2229
|
+
findings.push({
|
|
2230
|
+
code: "SLOW_RULESETS",
|
|
2231
|
+
severity: "MEDIUM",
|
|
2232
|
+
message: `${slowCount} ruleset(s) exceeded ${slowThreshold}ms`,
|
|
2233
|
+
});
|
|
2234
|
+
}
|
|
2235
|
+
// --- compact audit: only failures for webhooks (stripped), top 5 mutations by name ---
|
|
2236
|
+
const compactWebhookActions = webhookActions
|
|
2237
|
+
.filter((w) => Number(w.responseCode) >= 400)
|
|
2238
|
+
.map((w) => ({
|
|
2239
|
+
id: w.id,
|
|
2240
|
+
source: w.source,
|
|
2241
|
+
endpoint: w.endpoint,
|
|
2242
|
+
responseCode: w.responseCode,
|
|
2243
|
+
responseReason: w.responseReason,
|
|
2244
|
+
entityType: w.entityType,
|
|
2245
|
+
entityRef: w.entityRef,
|
|
2246
|
+
}));
|
|
2247
|
+
const mutationNameGroups = {};
|
|
2248
|
+
for (const m of mutationActions) {
|
|
2249
|
+
const key = m.queryName ?? "unknown";
|
|
2250
|
+
mutationNameGroups[key] = (mutationNameGroups[key] ?? 0) + 1;
|
|
2251
|
+
}
|
|
2252
|
+
const compactMutationActions = Object.entries(mutationNameGroups)
|
|
2253
|
+
.sort(([, a], [, b]) => b - a)
|
|
2254
|
+
.slice(0, 5)
|
|
2255
|
+
.map(([queryName, count]) => ({ queryName, count }));
|
|
2256
|
+
// --- compact diagnostics: summary only for inspected events ---
|
|
2257
|
+
const compactInspectedEvents = inspectedEvents.map((evt) => ({
|
|
2258
|
+
eventId: evt.eventId,
|
|
2259
|
+
name: evt.name,
|
|
2260
|
+
eventStatus: evt.eventStatus,
|
|
2261
|
+
closeMatchCount: Array.isArray(evt.closeMatches)
|
|
2262
|
+
? evt.closeMatches.length
|
|
2263
|
+
: 0,
|
|
2264
|
+
}));
|
|
2265
|
+
// --- compact crossEntity: summary without events array ---
|
|
2266
|
+
const compactCrossEntity = crossEntity?.map((section) => ({
|
|
2267
|
+
entityType: section.entityType,
|
|
2268
|
+
orchestrationCount: section.orchestrationCount,
|
|
2269
|
+
statusCounts: section.statusCounts,
|
|
2270
|
+
}));
|
|
2271
|
+
return json({
|
|
2272
|
+
...envelope,
|
|
2273
|
+
totals,
|
|
2274
|
+
orchestration: {
|
|
2275
|
+
statusCounts,
|
|
2276
|
+
entityTypeCounts,
|
|
2277
|
+
topNames: topCounts(orchestrationNameCounts, 10),
|
|
2278
|
+
},
|
|
2279
|
+
scheduled: scheduledEvents.length > 0
|
|
2280
|
+
? { count: scheduledEvents.length }
|
|
2281
|
+
: undefined,
|
|
2282
|
+
audit: parsed.includeAudit
|
|
2283
|
+
? {
|
|
2284
|
+
categoryCounts,
|
|
2285
|
+
webhookActions: compactWebhookActions.length > 0
|
|
2286
|
+
? compactWebhookActions
|
|
2287
|
+
: undefined,
|
|
2288
|
+
mutationActions: compactMutationActions.length > 0
|
|
2289
|
+
? compactMutationActions
|
|
2290
|
+
: undefined,
|
|
2291
|
+
sendEventActions: {
|
|
2292
|
+
count: sendEventActions.length,
|
|
2293
|
+
scheduledCount: scheduledActions.length,
|
|
2294
|
+
},
|
|
2295
|
+
rulesetDurations: rulesetDurations
|
|
2296
|
+
.filter((r) => r.durationMs !== null)
|
|
2297
|
+
.slice(0, 5)
|
|
2298
|
+
.map((r) => ({
|
|
2299
|
+
name: r.name ?? "unknown",
|
|
2300
|
+
durationMs: r.durationMs,
|
|
2301
|
+
})),
|
|
2302
|
+
exceptions,
|
|
2303
|
+
}
|
|
2304
|
+
: undefined,
|
|
2305
|
+
diagnostics: {
|
|
2306
|
+
noMatchCount,
|
|
2307
|
+
pendingCount,
|
|
2308
|
+
failedCount,
|
|
2309
|
+
scheduledCount: scheduledStatusCount,
|
|
2310
|
+
inspectStatuses: parsed.inspectStatuses,
|
|
2311
|
+
inspectedEvents: compactInspectedEvents,
|
|
2312
|
+
noMatchAuditDetails,
|
|
2313
|
+
},
|
|
2314
|
+
crossEntity: compactCrossEntity,
|
|
2315
|
+
analysis: {
|
|
2316
|
+
statusFlow,
|
|
2317
|
+
entityTypes,
|
|
2318
|
+
timespan,
|
|
2319
|
+
failedWebhookEndpoints,
|
|
2320
|
+
slowestRulesets,
|
|
2321
|
+
findings,
|
|
2322
|
+
},
|
|
2323
|
+
recommendations: recommendations.length > 0 ? recommendations : undefined,
|
|
2324
|
+
}, false, ctx.responseBudget);
|
|
2325
|
+
}
|
|
2326
|
+
// ========= FULL MODE (compact: false) =========
|
|
2327
|
+
return json({
|
|
2328
|
+
...envelope,
|
|
2329
|
+
totals,
|
|
2330
|
+
orchestration: {
|
|
2331
|
+
statusCounts,
|
|
2332
|
+
entityTypeCounts,
|
|
2333
|
+
topNames: topCounts(orchestrationNameCounts, 30),
|
|
2334
|
+
events: compactOrchestrationEvents,
|
|
2335
|
+
},
|
|
2336
|
+
scheduled: scheduledEvents.length > 0
|
|
2337
|
+
? scheduledEvents.map((evt) => {
|
|
2338
|
+
const ctxRec = asRecord(evt.context);
|
|
2339
|
+
return {
|
|
2340
|
+
id: evt.id ?? null,
|
|
2341
|
+
name: evt.name ?? null,
|
|
2342
|
+
eventStatus: evt.eventStatus ?? null,
|
|
2343
|
+
generatedOn: evt.generatedOn ?? null,
|
|
2344
|
+
entityType: ctxRec?.entityType ?? null,
|
|
2345
|
+
entityRef: ctxRec?.entityRef ?? null,
|
|
2346
|
+
};
|
|
2347
|
+
})
|
|
2348
|
+
: undefined,
|
|
2349
|
+
audit: parsed.includeAudit
|
|
2350
|
+
? {
|
|
2351
|
+
categoryCounts,
|
|
2352
|
+
topActionNames: topCounts(actionNameCounts, 30),
|
|
2353
|
+
webhookActions,
|
|
2354
|
+
mutationActions,
|
|
2355
|
+
sendEventActions,
|
|
2356
|
+
scheduledActions,
|
|
2357
|
+
rulesetDurations,
|
|
2358
|
+
exceptions,
|
|
2359
|
+
snapshots,
|
|
2360
|
+
customLogs,
|
|
2361
|
+
ruleEvents,
|
|
2362
|
+
actionEvents: compactAuditActionEvents,
|
|
2363
|
+
}
|
|
2364
|
+
: undefined,
|
|
2365
|
+
diagnostics: {
|
|
2366
|
+
noMatchCount,
|
|
2367
|
+
pendingCount,
|
|
2368
|
+
failedCount,
|
|
2369
|
+
scheduledCount: scheduledStatusCount,
|
|
2370
|
+
inspectStatuses: parsed.inspectStatuses,
|
|
2371
|
+
inspectedEvents,
|
|
2372
|
+
noMatchAuditDetails,
|
|
2373
|
+
},
|
|
2374
|
+
crossEntity,
|
|
2375
|
+
recommendations: recommendations.length > 0 ? recommendations : undefined,
|
|
2376
|
+
}, false, ctx.responseBudget);
|
|
2377
|
+
}
|
|
2378
|
+
// ------- metrics.query -------------------------------------------------
|
|
2379
|
+
if (toolName === "metrics.query") {
|
|
2380
|
+
const parsed = MetricsQueryInputSchema.parse(args);
|
|
2381
|
+
if (parsed.type === "range" && (!parsed.start || !parsed.end || !parsed.step)) {
|
|
2382
|
+
throw new ToolError("VALIDATION_ERROR", "Range queries require start, end, and step.");
|
|
2383
|
+
}
|
|
2384
|
+
const client = requireClient(ctx);
|
|
2385
|
+
const response = await client.queryPrometheus(parsed);
|
|
2386
|
+
return json({
|
|
2387
|
+
ok: response.status === "success",
|
|
2388
|
+
status: response.status,
|
|
2389
|
+
data: response.data,
|
|
2390
|
+
errorType: response.errorType,
|
|
2391
|
+
error: response.error,
|
|
2392
|
+
warnings: response.warnings,
|
|
2393
|
+
}, false, ctx.responseBudget);
|
|
2394
|
+
}
|
|
2395
|
+
// ------- workflow.transitions -------------------------------------------
|
|
2396
|
+
if (toolName === "workflow.transitions") {
|
|
2397
|
+
const parsed = TransitionActionsInputSchema.parse(args);
|
|
2398
|
+
const client = requireClient(ctx);
|
|
2399
|
+
const payload = resolveTransitionRequest(parsed, ctx.config.retailerId);
|
|
2400
|
+
const result = await client.getTransitions(payload);
|
|
2401
|
+
return json({ ok: true, response: result }, false, ctx.responseBudget);
|
|
2402
|
+
}
|
|
2403
|
+
// ------- plugin.list ---------------------------------------------------
|
|
2404
|
+
if (toolName === "plugin.list") {
|
|
2405
|
+
const client = requireClient(ctx);
|
|
2406
|
+
const parsed = PluginListInputSchema.parse(args);
|
|
2407
|
+
const allPlugins = await client.getPlugins();
|
|
2408
|
+
let plugins = typeof allPlugins === "object" && allPlugins !== null
|
|
2409
|
+
? allPlugins
|
|
2410
|
+
: {};
|
|
2411
|
+
// Name filter
|
|
2412
|
+
if (parsed.name) {
|
|
2413
|
+
const filter = parsed.name.toLowerCase();
|
|
2414
|
+
plugins = Object.fromEntries(Object.entries(plugins).filter(([key]) => key.toLowerCase().includes(filter)));
|
|
2415
|
+
}
|
|
2416
|
+
// Compact mode: strip parameters and eventAttributes, keep only ruleInfo
|
|
2417
|
+
if (parsed.compact) {
|
|
2418
|
+
plugins = Object.fromEntries(Object.entries(plugins).map(([key, val]) => {
|
|
2419
|
+
const entry = val;
|
|
2420
|
+
if (entry && typeof entry === "object") {
|
|
2421
|
+
return [key, { ruleInfo: entry.ruleInfo }];
|
|
2422
|
+
}
|
|
2423
|
+
return [key, val];
|
|
2424
|
+
}));
|
|
2425
|
+
}
|
|
2426
|
+
return json({
|
|
2427
|
+
ok: true,
|
|
2428
|
+
count: Object.keys(plugins).length,
|
|
2429
|
+
plugins,
|
|
2430
|
+
}, false, ctx.responseBudget);
|
|
2431
|
+
}
|
|
2432
|
+
// ------- graphql.query -------------------------------------------------
|
|
2433
|
+
if (toolName === "graphql.query") {
|
|
2434
|
+
const client = requireClient(ctx);
|
|
2435
|
+
const parsed = GraphQLQueryInputSchema.parse(args);
|
|
2436
|
+
const isMutation = isMutationOperation(parsed.query);
|
|
2437
|
+
const payload = {
|
|
2438
|
+
query: parsed.query,
|
|
2439
|
+
variables: toSdkVariables(parsed.variables),
|
|
2440
|
+
};
|
|
2441
|
+
const result = await client.graphql(payload, { retry: !isMutation });
|
|
2442
|
+
if (parsed.summarize) {
|
|
2443
|
+
const summary = summarizeConnection(result);
|
|
2444
|
+
if (summary) {
|
|
2445
|
+
return json({ ok: true, summarized: true, ...summary }, false, ctx.responseBudget);
|
|
2446
|
+
}
|
|
2447
|
+
}
|
|
2448
|
+
return json({ ok: true, response: result }, false, ctx.responseBudget);
|
|
2449
|
+
}
|
|
2450
|
+
// ------- batch.create --------------------------------------------------
|
|
2451
|
+
if (toolName === "batch.create") {
|
|
2452
|
+
const client = requireClient(ctx);
|
|
2453
|
+
const parsed = BatchCreateInputSchema.parse(args);
|
|
2454
|
+
const retailerId = parsed.retailerId ?? ctx.config.retailerId;
|
|
2455
|
+
if (!retailerId) {
|
|
2456
|
+
throw new ToolError("VALIDATION_ERROR", "retailerId is required. Set FLUENT_RETAILER_ID or pass retailerId.");
|
|
2457
|
+
}
|
|
2458
|
+
const payload = {
|
|
2459
|
+
name: parsed.name,
|
|
2460
|
+
retailerId,
|
|
2461
|
+
meta: {
|
|
2462
|
+
source: "fluent-mcp-extn",
|
|
2463
|
+
...(parsed.entityType ? { entityType: parsed.entityType } : {}),
|
|
2464
|
+
...(parsed.action ? { action: parsed.action } : {}),
|
|
2465
|
+
},
|
|
2466
|
+
};
|
|
2467
|
+
const result = await client.createJob(payload);
|
|
2468
|
+
return json({ ok: true, job: result });
|
|
2469
|
+
}
|
|
2470
|
+
// ------- batch.send ----------------------------------------------------
|
|
2471
|
+
if (toolName === "batch.send") {
|
|
2472
|
+
const client = requireClient(ctx);
|
|
2473
|
+
const parsed = BatchSendInputSchema.parse(args);
|
|
2474
|
+
// Batch payload shape varies by integration. We validate a baseline
|
|
2475
|
+
// contract here and pass through full payload details to the SDK.
|
|
2476
|
+
const result = await client.sendBatch(parsed.jobId, parsed.payload);
|
|
2477
|
+
return json({ ok: true, batch: result });
|
|
2478
|
+
}
|
|
2479
|
+
// ------- batch.status --------------------------------------------------
|
|
2480
|
+
if (toolName === "batch.status") {
|
|
2481
|
+
const client = requireClient(ctx);
|
|
2482
|
+
const parsed = BatchStatusInputSchema.parse(args);
|
|
2483
|
+
const result = await client.getJobStatus(parsed.jobId);
|
|
2484
|
+
return json({ ok: true, status: result });
|
|
2485
|
+
}
|
|
2486
|
+
// ------- batch.batchStatus ---------------------------------------------
|
|
2487
|
+
if (toolName === "batch.batchStatus") {
|
|
2488
|
+
const client = requireClient(ctx);
|
|
2489
|
+
const parsed = BatchGetBatchStatusInputSchema.parse(args);
|
|
2490
|
+
const result = await client.getBatchStatus(parsed.jobId, parsed.batchId);
|
|
2491
|
+
return json({ ok: true, batchStatus: result });
|
|
2492
|
+
}
|
|
2493
|
+
// ------- batch.results -------------------------------------------------
|
|
2494
|
+
if (toolName === "batch.results") {
|
|
2495
|
+
const client = requireClient(ctx);
|
|
2496
|
+
const parsed = BatchResultsInputSchema.parse(args);
|
|
2497
|
+
const result = await client.getJobResults(parsed.jobId);
|
|
2498
|
+
return json({ ok: true, results: result });
|
|
2499
|
+
}
|
|
2500
|
+
// ------- graphql.queryAll -----------------------------------------------
|
|
2501
|
+
if (toolName === "graphql.queryAll") {
|
|
2502
|
+
const client = requireClient(ctx);
|
|
2503
|
+
const parsed = GraphQLQueryAllInputSchema.parse(args);
|
|
2504
|
+
const payload = {
|
|
2505
|
+
query: parsed.query,
|
|
2506
|
+
variables: toSdkVariables(parsed.variables),
|
|
2507
|
+
pagination: {
|
|
2508
|
+
enabled: true,
|
|
2509
|
+
maxPages: parsed.maxPages,
|
|
2510
|
+
maxRecords: parsed.maxRecords,
|
|
2511
|
+
timeoutMs: parsed.timeoutMs,
|
|
2512
|
+
direction: parsed.direction,
|
|
2513
|
+
},
|
|
2514
|
+
errorHandling: parsed.errorHandling,
|
|
2515
|
+
};
|
|
2516
|
+
const result = await client.graphqlPaginated(payload);
|
|
2517
|
+
const pagination = result.extensions?.autoPagination ?? null;
|
|
2518
|
+
if (parsed.summarize) {
|
|
2519
|
+
const summary = summarizeConnection(result);
|
|
2520
|
+
if (summary) {
|
|
2521
|
+
return json({ ok: true, summarized: true, ...summary, pagination }, false, ctx.responseBudget);
|
|
2522
|
+
}
|
|
2523
|
+
}
|
|
2524
|
+
return json({ ok: true, response: result, pagination }, false, ctx.responseBudget);
|
|
2525
|
+
}
|
|
2526
|
+
// ------- graphql.batchMutate --------------------------------------------
|
|
2527
|
+
if (toolName === "graphql.batchMutate") {
|
|
2528
|
+
const client = requireClient(ctx);
|
|
2529
|
+
const parsed = GraphQLBatchMutateInputSchema.parse(args);
|
|
2530
|
+
const batchSize = parsed.inputs.length;
|
|
2531
|
+
const mutationConfig = {
|
|
2532
|
+
mutation: parsed.mutation,
|
|
2533
|
+
returnFields: parsed.returnFields ?? ["id", "ref"],
|
|
2534
|
+
operationName: parsed.operationName,
|
|
2535
|
+
};
|
|
2536
|
+
// Build the aliased mutation query
|
|
2537
|
+
const query = buildAliasedMutationQuery(mutationConfig, batchSize);
|
|
2538
|
+
// Build variables: { input1: inputs[0], input2: inputs[1], ... }
|
|
2539
|
+
const variables = {};
|
|
2540
|
+
for (let i = 0; i < batchSize; i++) {
|
|
2541
|
+
variables[`input${i + 1}`] = parsed.inputs[i];
|
|
2542
|
+
}
|
|
2543
|
+
const gqlPayload = {
|
|
2544
|
+
query,
|
|
2545
|
+
variables: toSdkVariables(variables),
|
|
2546
|
+
pagination: { enabled: false },
|
|
2547
|
+
};
|
|
2548
|
+
const response = await client.graphql(gqlPayload, { retry: false });
|
|
2549
|
+
const batchResult = parseAliasedMutationResponse(response, batchSize, parsed.mutation, parsed.inputs);
|
|
2550
|
+
return json({
|
|
2551
|
+
ok: batchResult.allSucceeded,
|
|
2552
|
+
summary: formatErrorSummary(batchResult),
|
|
2553
|
+
executed: batchResult.executed,
|
|
2554
|
+
failed: batchResult.failed,
|
|
2555
|
+
allSucceeded: batchResult.allSucceeded,
|
|
2556
|
+
allFailed: batchResult.allFailed,
|
|
2557
|
+
results: batchResult.results,
|
|
2558
|
+
errors: batchResult.errors.length > 0 ? batchResult.errors : undefined,
|
|
2559
|
+
query,
|
|
2560
|
+
});
|
|
2561
|
+
}
|
|
2562
|
+
// ------- graphql.introspect ---------------------------------------------
|
|
2563
|
+
if (toolName === "graphql.introspect") {
|
|
2564
|
+
const client = requireClient(ctx);
|
|
2565
|
+
const parsed = GraphQLIntrospectInputSchema.parse(args);
|
|
2566
|
+
const introspectionService = new GraphQLIntrospectionService(client.asGraphQLCapable());
|
|
2567
|
+
// Mode: list mutations
|
|
2568
|
+
if (parsed.listMutations) {
|
|
2569
|
+
const mutations = await introspectionService.getMutations();
|
|
2570
|
+
return json({ ok: true, mutations, count: mutations.length });
|
|
2571
|
+
}
|
|
2572
|
+
// Mode: list queries
|
|
2573
|
+
if (parsed.listQueries) {
|
|
2574
|
+
const schema = await introspectionService.getSchema();
|
|
2575
|
+
const queryType = schema.types.find((t) => t.name === schema.queryType?.name);
|
|
2576
|
+
const queries = queryType?.fields?.map((f) => f.name) ?? [];
|
|
2577
|
+
return json({ ok: true, queries, count: queries.length });
|
|
2578
|
+
}
|
|
2579
|
+
// Mode: inspect mutation
|
|
2580
|
+
if (parsed.mutation) {
|
|
2581
|
+
const mutation = await introspectionService.getMutation(parsed.mutation);
|
|
2582
|
+
if (!mutation) {
|
|
2583
|
+
throw new ToolError("VALIDATION_ERROR", `Mutation '${parsed.mutation}' not found in schema.`);
|
|
2584
|
+
}
|
|
2585
|
+
return json({ ok: true, mutation });
|
|
2586
|
+
}
|
|
2587
|
+
// Mode: inspect type
|
|
2588
|
+
if (parsed.type) {
|
|
2589
|
+
const inputType = await introspectionService.getInputType(parsed.type);
|
|
2590
|
+
if (!inputType) {
|
|
2591
|
+
throw new ToolError("VALIDATION_ERROR", `Input type '${parsed.type}' not found in schema.`);
|
|
2592
|
+
}
|
|
2593
|
+
return json({ ok: true, inputType });
|
|
2594
|
+
}
|
|
2595
|
+
throw new ToolError("VALIDATION_ERROR", "Specify one of: type, mutation, listMutations, or listQueries.");
|
|
2596
|
+
}
|
|
2597
|
+
// ------- connection.test ------------------------------------------------
|
|
2598
|
+
if (toolName === "connection.test") {
|
|
2599
|
+
const client = requireClient(ctx);
|
|
2600
|
+
// FluentConnectionTester expects a FluentClient with graphql() method.
|
|
2601
|
+
// Our adapter satisfies this duck-typed contract.
|
|
2602
|
+
const tester = new FluentConnectionTester(client.asGraphQLCapable(), {
|
|
2603
|
+
retailerId: ctx.config.retailerId ?? "unknown",
|
|
2604
|
+
logger: console,
|
|
2605
|
+
});
|
|
2606
|
+
const result = await tester.testConnection();
|
|
2607
|
+
return json({
|
|
2608
|
+
ok: result.success,
|
|
2609
|
+
duration: result.duration,
|
|
2610
|
+
...(result.success
|
|
2611
|
+
? { details: result.details }
|
|
2612
|
+
: { error: result.error }),
|
|
2613
|
+
});
|
|
2614
|
+
}
|
|
2615
|
+
// ------- webhook.validate -----------------------------------------------
|
|
2616
|
+
if (toolName === "webhook.validate") {
|
|
2617
|
+
const parsed = WebhookValidateInputSchema.parse(args);
|
|
2618
|
+
const payload = parsed.payload;
|
|
2619
|
+
// Basic field validation
|
|
2620
|
+
const missingFields = [];
|
|
2621
|
+
if (!payload.name)
|
|
2622
|
+
missingFields.push("name");
|
|
2623
|
+
if (!payload.id)
|
|
2624
|
+
missingFields.push("id");
|
|
2625
|
+
if (!payload.retailerId)
|
|
2626
|
+
missingFields.push("retailerId");
|
|
2627
|
+
const basicValid = missingFields.length === 0;
|
|
2628
|
+
// Signature validation (if signature + publicKey provided)
|
|
2629
|
+
if (parsed.signature && parsed.publicKey) {
|
|
2630
|
+
const algorithmMap = {
|
|
2631
|
+
SHA512withRSA: SignatureAlgorithm.SHA512_WITH_RSA,
|
|
2632
|
+
MD5withRSA: SignatureAlgorithm.MD5_WITH_RSA,
|
|
2633
|
+
};
|
|
2634
|
+
const algorithm = algorithmMap[parsed.algorithm] ?? SignatureAlgorithm.SHA512_WITH_RSA;
|
|
2635
|
+
// Use raw body if provided (preserves exact bytes the signature
|
|
2636
|
+
// was computed over); fall back to re-serialized JSON otherwise.
|
|
2637
|
+
const hasRawBody = typeof parsed.rawBody === "string" && parsed.rawBody.length > 0;
|
|
2638
|
+
const rawPayload = hasRawBody
|
|
2639
|
+
? parsed.rawBody
|
|
2640
|
+
: JSON.stringify(parsed.payload);
|
|
2641
|
+
try {
|
|
2642
|
+
const noopLogger = {
|
|
2643
|
+
debug: () => { },
|
|
2644
|
+
info: () => { },
|
|
2645
|
+
warn: () => { },
|
|
2646
|
+
error: () => { },
|
|
2647
|
+
};
|
|
2648
|
+
const validator = new WebhookValidationService({ algorithm, strictValidation: false }, noopLogger);
|
|
2649
|
+
const sigResult = await validator.validateWebhookSignature(rawPayload, parsed.signature, parsed.publicKey, algorithm);
|
|
2650
|
+
const isValid = sigResult.isValid && basicValid;
|
|
2651
|
+
if (isValid) {
|
|
2652
|
+
return json({
|
|
2653
|
+
ok: true,
|
|
2654
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2655
|
+
signatureValidation: {
|
|
2656
|
+
valid: sigResult.isValid,
|
|
2657
|
+
algorithm: sigResult.algorithm,
|
|
2658
|
+
payloadSource: hasRawBody ? "rawBody" : "json-stringify",
|
|
2659
|
+
},
|
|
2660
|
+
});
|
|
2661
|
+
}
|
|
2662
|
+
const failureMessage = !basicValid
|
|
2663
|
+
? "Basic validation failed: missing required fields."
|
|
2664
|
+
: sigResult.error || "Signature validation failed.";
|
|
2665
|
+
const failure = toToolFailure(new ToolError("VALIDATION_ERROR", failureMessage, {
|
|
2666
|
+
details: {
|
|
2667
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2668
|
+
signatureValidation: {
|
|
2669
|
+
valid: sigResult.isValid,
|
|
2670
|
+
algorithm: sigResult.algorithm,
|
|
2671
|
+
payloadSource: hasRawBody ? "rawBody" : "json-stringify",
|
|
2672
|
+
error: sigResult.error ?? undefined,
|
|
2673
|
+
},
|
|
2674
|
+
},
|
|
2675
|
+
}));
|
|
2676
|
+
return json({
|
|
2677
|
+
...failure,
|
|
2678
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2679
|
+
signatureValidation: {
|
|
2680
|
+
valid: sigResult.isValid,
|
|
2681
|
+
algorithm: sigResult.algorithm,
|
|
2682
|
+
payloadSource: hasRawBody ? "rawBody" : "json-stringify",
|
|
2683
|
+
error: sigResult.error ?? undefined,
|
|
2684
|
+
},
|
|
2685
|
+
});
|
|
2686
|
+
}
|
|
2687
|
+
catch (e) {
|
|
2688
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
2689
|
+
const failure = toToolFailure(new ToolError("VALIDATION_ERROR", `Signature validation failed: ${message}`, {
|
|
2690
|
+
details: {
|
|
2691
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2692
|
+
signatureValidation: {
|
|
2693
|
+
valid: false,
|
|
2694
|
+
algorithm: parsed.algorithm,
|
|
2695
|
+
payloadSource: hasRawBody ? "rawBody" : "json-stringify",
|
|
2696
|
+
error: message,
|
|
2697
|
+
},
|
|
2698
|
+
},
|
|
2699
|
+
}));
|
|
2700
|
+
return json({
|
|
2701
|
+
...failure,
|
|
2702
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2703
|
+
signatureValidation: {
|
|
2704
|
+
valid: false,
|
|
2705
|
+
algorithm: parsed.algorithm,
|
|
2706
|
+
payloadSource: hasRawBody ? "rawBody" : "json-stringify",
|
|
2707
|
+
error: message,
|
|
2708
|
+
},
|
|
2709
|
+
});
|
|
2710
|
+
}
|
|
2711
|
+
}
|
|
2712
|
+
// Basic validation only
|
|
2713
|
+
return json({
|
|
2714
|
+
ok: basicValid,
|
|
2715
|
+
basicValidation: { valid: basicValid, missingFields },
|
|
2716
|
+
note: basicValid
|
|
2717
|
+
? "Basic field validation passed. Provide signature + publicKey for signature verification."
|
|
2718
|
+
: "Basic validation failed: missing required fields.",
|
|
2719
|
+
});
|
|
2720
|
+
}
|
|
2721
|
+
// ------- metrics.healthCheck -----------------------------------------------
|
|
2722
|
+
if (toolName === "metrics.healthCheck") {
|
|
2723
|
+
const parsed = MetricsHealthCheckInputSchema.parse(args);
|
|
2724
|
+
const client = requireClient(ctx);
|
|
2725
|
+
const window = parsed.window;
|
|
2726
|
+
const thresholds = parsed.thresholds;
|
|
2727
|
+
let source = "prometheus";
|
|
2728
|
+
let statusBreakdown = {};
|
|
2729
|
+
let totalEvents = 0;
|
|
2730
|
+
let topEvents = [];
|
|
2731
|
+
try {
|
|
2732
|
+
// Query 1: status breakdown
|
|
2733
|
+
const statusResponse = await client.queryPrometheus({
|
|
2734
|
+
query: `sum by (status) (increase(rubix_event_runtime_seconds_count[${window}]))`,
|
|
2735
|
+
type: "instant",
|
|
2736
|
+
});
|
|
2737
|
+
// Parse Prometheus vector response
|
|
2738
|
+
const statusResult = statusResponse.data?.result ?? [];
|
|
2739
|
+
for (const item of statusResult) {
|
|
2740
|
+
const status = item.metric?.status ?? "UNKNOWN";
|
|
2741
|
+
const value = Math.round(parseFloat(item.value?.[1] ?? "0"));
|
|
2742
|
+
if (value > 0) {
|
|
2743
|
+
statusBreakdown[status] = value;
|
|
2744
|
+
totalEvents += value;
|
|
2745
|
+
}
|
|
2746
|
+
}
|
|
2747
|
+
// Query 2: top events by name+entityType+status (if requested)
|
|
2748
|
+
if (parsed.includeTopEvents && totalEvents > 0) {
|
|
2749
|
+
const topResponse = await client.queryPrometheus({
|
|
2750
|
+
query: `sort_desc(sum by (event_name, entity_type, status) (increase(rubix_event_runtime_seconds_count[${window}])))`,
|
|
2751
|
+
type: "instant",
|
|
2752
|
+
});
|
|
2753
|
+
const topResult = topResponse.data?.result ?? [];
|
|
2754
|
+
const parsedResults = [];
|
|
2755
|
+
for (const item of topResult) {
|
|
2756
|
+
const count = Math.round(parseFloat(item.value?.[1] ?? "0"));
|
|
2757
|
+
if (count > 0) {
|
|
2758
|
+
parsedResults.push({
|
|
2759
|
+
name: item.metric?.event_name ?? "unknown",
|
|
2760
|
+
entityType: item.metric?.entity_type ?? "unknown",
|
|
2761
|
+
status: item.metric?.status ?? "unknown",
|
|
2762
|
+
count,
|
|
2763
|
+
});
|
|
2764
|
+
}
|
|
2765
|
+
}
|
|
2766
|
+
topEvents = parsedResults.slice(0, parsed.topN).map((item, idx) => ({
|
|
2767
|
+
rank: idx + 1,
|
|
2768
|
+
name: item.name,
|
|
2769
|
+
entityType: item.entityType,
|
|
2770
|
+
status: item.status,
|
|
2771
|
+
count: item.count,
|
|
2772
|
+
percentage: totalEvents > 0
|
|
2773
|
+
? Math.round((item.count / totalEvents) * 1000) / 10
|
|
2774
|
+
: 0,
|
|
2775
|
+
}));
|
|
2776
|
+
}
|
|
2777
|
+
}
|
|
2778
|
+
catch {
|
|
2779
|
+
// Prometheus unavailable — fall back to Event API
|
|
2780
|
+
source = "event_api";
|
|
2781
|
+
const windowMs = parseWindowMs(window);
|
|
2782
|
+
const now = new Date();
|
|
2783
|
+
const from = new Date(now.getTime() - windowMs).toISOString();
|
|
2784
|
+
const to = now.toISOString();
|
|
2785
|
+
const agg = await aggregateEventsFromApi(client, {
|
|
2786
|
+
from,
|
|
2787
|
+
to,
|
|
2788
|
+
maxPages: 10,
|
|
2789
|
+
eventType: "ORCHESTRATION",
|
|
2790
|
+
});
|
|
2791
|
+
totalEvents = agg.totalEvents;
|
|
2792
|
+
statusBreakdown = agg.statusBreakdown;
|
|
2793
|
+
if (parsed.includeTopEvents) {
|
|
2794
|
+
topEvents = rankTopEvents(agg.groups, agg.totalEvents, parsed.topN);
|
|
2795
|
+
}
|
|
2796
|
+
}
|
|
2797
|
+
// Apply heuristic findings
|
|
2798
|
+
const findings = [];
|
|
2799
|
+
const failedCount = statusBreakdown["FAILED"] ?? 0;
|
|
2800
|
+
const noMatchCount = statusBreakdown["NO_MATCH"] ?? 0;
|
|
2801
|
+
const pendingCount = statusBreakdown["PENDING"] ?? 0;
|
|
2802
|
+
const failureRate = totalEvents > 0
|
|
2803
|
+
? Math.round((failedCount / totalEvents) * 1000) / 10
|
|
2804
|
+
: 0;
|
|
2805
|
+
const pendingRate = totalEvents > 0
|
|
2806
|
+
? Math.round((pendingCount / totalEvents) * 1000) / 10
|
|
2807
|
+
: 0;
|
|
2808
|
+
// Check 1: Failure rate
|
|
2809
|
+
if (failureRate > thresholds.failureRate) {
|
|
2810
|
+
findings.push({
|
|
2811
|
+
severity: "HIGH",
|
|
2812
|
+
type: "HIGH_FAILURE_RATE",
|
|
2813
|
+
message: `Failure rate ${failureRate}% exceeds ${thresholds.failureRate}% threshold (${failedCount} of ${totalEvents} events)`,
|
|
2814
|
+
value: failureRate,
|
|
2815
|
+
threshold: thresholds.failureRate,
|
|
2816
|
+
});
|
|
2817
|
+
}
|
|
2818
|
+
// Check 2: NO_MATCH present
|
|
2819
|
+
if (noMatchCount > 0) {
|
|
2820
|
+
findings.push({
|
|
2821
|
+
severity: "CRITICAL",
|
|
2822
|
+
type: "NO_MATCH_PRESENT",
|
|
2823
|
+
message: `${noMatchCount} NO_MATCH events — event names not matching any workflow ruleset`,
|
|
2824
|
+
value: noMatchCount,
|
|
2825
|
+
threshold: 0,
|
|
2826
|
+
});
|
|
2827
|
+
}
|
|
2828
|
+
// Check 3: PENDING queue
|
|
2829
|
+
if (pendingRate > thresholds.pendingRate) {
|
|
2830
|
+
findings.push({
|
|
2831
|
+
severity: "MEDIUM",
|
|
2832
|
+
type: "HIGH_PENDING_RATE",
|
|
2833
|
+
message: `Pending rate ${pendingRate}% exceeds ${thresholds.pendingRate}% threshold — async queue may be backed up`,
|
|
2834
|
+
value: pendingRate,
|
|
2835
|
+
threshold: thresholds.pendingRate,
|
|
2836
|
+
});
|
|
2837
|
+
}
|
|
2838
|
+
// Check 4: Single event dominance
|
|
2839
|
+
if (topEvents.length > 0 && topEvents[0].percentage > thresholds.dominanceRate) {
|
|
2840
|
+
findings.push({
|
|
2841
|
+
severity: "MEDIUM",
|
|
2842
|
+
type: "EVENT_DOMINANCE",
|
|
2843
|
+
message: `"${topEvents[0].name}" (${topEvents[0].entityType}) accounts for ${topEvents[0].percentage}% of all events — possible runaway loop`,
|
|
2844
|
+
value: topEvents[0].percentage,
|
|
2845
|
+
threshold: thresholds.dominanceRate,
|
|
2846
|
+
});
|
|
2847
|
+
}
|
|
2848
|
+
// Build recommendations
|
|
2849
|
+
const recommendations = [];
|
|
2850
|
+
if (findings.some((f) => f.type === "HIGH_FAILURE_RATE")) {
|
|
2851
|
+
recommendations.push("Run event.list({ eventStatus: \"FAILED\", from: \"<recent>\", count: 50 }) to identify failing events");
|
|
2852
|
+
recommendations.push("Hand off to /fluent-trace for root cause analysis");
|
|
2853
|
+
}
|
|
2854
|
+
if (findings.some((f) => f.type === "NO_MATCH_PRESENT")) {
|
|
2855
|
+
recommendations.push("Verify event names match workflow ruleset names using /fluent-workflow-analyzer");
|
|
2856
|
+
recommendations.push("Check entity subtype and workflow deployment status");
|
|
2857
|
+
}
|
|
2858
|
+
if (findings.some((f) => f.type === "HIGH_PENDING_RATE")) {
|
|
2859
|
+
recommendations.push("Check platform health — PENDING events should clear within seconds");
|
|
2860
|
+
}
|
|
2861
|
+
if (findings.some((f) => f.type === "EVENT_DOMINANCE")) {
|
|
2862
|
+
recommendations.push("Investigate if a SendEvent rule is creating a circular event chain");
|
|
2863
|
+
}
|
|
2864
|
+
const healthy = findings.length === 0;
|
|
2865
|
+
return json({
|
|
2866
|
+
ok: true,
|
|
2867
|
+
healthy,
|
|
2868
|
+
source,
|
|
2869
|
+
summary: {
|
|
2870
|
+
window,
|
|
2871
|
+
totalEvents,
|
|
2872
|
+
failureRate,
|
|
2873
|
+
pendingRate,
|
|
2874
|
+
statusBreakdown,
|
|
2875
|
+
},
|
|
2876
|
+
findings,
|
|
2877
|
+
topEvents: parsed.includeTopEvents ? topEvents : undefined,
|
|
2878
|
+
recommendations: recommendations.length > 0 ? recommendations : undefined,
|
|
2879
|
+
});
|
|
2880
|
+
}
|
|
2881
|
+
// ------- metrics.sloReport -----------------------------------------------
|
|
2882
|
+
if (toolName === "metrics.sloReport") {
|
|
2883
|
+
const parsed = MetricsSloReportInputSchema.parse(args);
|
|
2884
|
+
const client = requireClient(ctx);
|
|
2885
|
+
const window = parsed.window;
|
|
2886
|
+
const thresholds = parsed.thresholds;
|
|
2887
|
+
const promQueries = {
|
|
2888
|
+
totalEvents: `sum(increase(rubix_event_runtime_seconds_count[${window}]))`,
|
|
2889
|
+
failedEvents: `sum(increase(rubix_event_runtime_seconds_count{status="FAILED"}[${window}]))`,
|
|
2890
|
+
noMatchEvents: `sum(increase(rubix_event_runtime_seconds_count{status="NO_MATCH"}[${window}]))`,
|
|
2891
|
+
pendingEvents: `sum(increase(rubix_event_runtime_seconds_count{status="PENDING"}[${window}]))`,
|
|
2892
|
+
runtimeP95Seconds: `histogram_quantile(0.95, sum by (le) (increase(rubix_event_runtime_seconds_bucket[${window}])))`,
|
|
2893
|
+
inflightP95Seconds: `histogram_quantile(0.95, sum by (le) (increase(rubix_event_inflight_latency_seconds_bucket[${window}])))`,
|
|
2894
|
+
};
|
|
2895
|
+
const now = new Date();
|
|
2896
|
+
const from = new Date(now.getTime() - parseWindowMs(window)).toISOString();
|
|
2897
|
+
const to = now.toISOString();
|
|
2898
|
+
let source = "prometheus";
|
|
2899
|
+
let totalEvents = 0;
|
|
2900
|
+
let failedEvents = 0;
|
|
2901
|
+
let noMatchEvents = 0;
|
|
2902
|
+
let pendingEvents = 0;
|
|
2903
|
+
let runtimeP95Seconds = null;
|
|
2904
|
+
let inflightP95Seconds = null;
|
|
2905
|
+
try {
|
|
2906
|
+
const [totalResponse, failedResponse, noMatchResponse, pendingResponse, runtimeP95Response, inflightP95Response,] = await Promise.all([
|
|
2907
|
+
client.queryPrometheus({
|
|
2908
|
+
query: promQueries.totalEvents,
|
|
2909
|
+
type: "instant",
|
|
2910
|
+
}),
|
|
2911
|
+
client.queryPrometheus({
|
|
2912
|
+
query: promQueries.failedEvents,
|
|
2913
|
+
type: "instant",
|
|
2914
|
+
}),
|
|
2915
|
+
client.queryPrometheus({
|
|
2916
|
+
query: promQueries.noMatchEvents,
|
|
2917
|
+
type: "instant",
|
|
2918
|
+
}),
|
|
2919
|
+
client.queryPrometheus({
|
|
2920
|
+
query: promQueries.pendingEvents,
|
|
2921
|
+
type: "instant",
|
|
2922
|
+
}),
|
|
2923
|
+
client.queryPrometheus({
|
|
2924
|
+
query: promQueries.runtimeP95Seconds,
|
|
2925
|
+
type: "instant",
|
|
2926
|
+
}),
|
|
2927
|
+
client.queryPrometheus({
|
|
2928
|
+
query: promQueries.inflightP95Seconds,
|
|
2929
|
+
type: "instant",
|
|
2930
|
+
}),
|
|
2931
|
+
]);
|
|
2932
|
+
totalEvents = Math.round(extractPrometheusNumber(totalResponse, 0) ?? 0);
|
|
2933
|
+
failedEvents = Math.round(extractPrometheusNumber(failedResponse, 0) ?? 0);
|
|
2934
|
+
noMatchEvents = Math.round(extractPrometheusNumber(noMatchResponse, 0) ?? 0);
|
|
2935
|
+
pendingEvents = Math.round(extractPrometheusNumber(pendingResponse, 0) ?? 0);
|
|
2936
|
+
runtimeP95Seconds = extractPrometheusNumber(runtimeP95Response, null);
|
|
2937
|
+
inflightP95Seconds = extractPrometheusNumber(inflightP95Response, null);
|
|
2938
|
+
}
|
|
2939
|
+
catch {
|
|
2940
|
+
source = "event_api";
|
|
2941
|
+
const agg = await aggregateEventsFromApi(client, {
|
|
2942
|
+
from,
|
|
2943
|
+
to,
|
|
2944
|
+
maxPages: parsed.maxPages,
|
|
2945
|
+
eventType: "ORCHESTRATION",
|
|
2946
|
+
});
|
|
2947
|
+
totalEvents = agg.totalEvents;
|
|
2948
|
+
failedEvents = agg.statusBreakdown["FAILED"] ?? 0;
|
|
2949
|
+
noMatchEvents = agg.statusBreakdown["NO_MATCH"] ?? 0;
|
|
2950
|
+
pendingEvents = agg.statusBreakdown["PENDING"] ?? 0;
|
|
2951
|
+
runtimeP95Seconds = null;
|
|
2952
|
+
inflightP95Seconds = null;
|
|
2953
|
+
}
|
|
2954
|
+
let topFailingEvents;
|
|
2955
|
+
if (parsed.includeTopFailingEvents) {
|
|
2956
|
+
try {
|
|
2957
|
+
const failingAgg = await aggregateEventsFromApi(client, {
|
|
2958
|
+
from,
|
|
2959
|
+
to,
|
|
2960
|
+
maxPages: parsed.maxPages,
|
|
2961
|
+
eventStatus: "FAILED",
|
|
2962
|
+
eventType: "ORCHESTRATION",
|
|
2963
|
+
});
|
|
2964
|
+
topFailingEvents = rankTopEvents(failingAgg.groups, failingAgg.totalEvents, parsed.topN);
|
|
2965
|
+
}
|
|
2966
|
+
catch {
|
|
2967
|
+
topFailingEvents = [];
|
|
2968
|
+
}
|
|
2969
|
+
}
|
|
2970
|
+
const failureRate = percentage(failedEvents, totalEvents);
|
|
2971
|
+
const noMatchRate = percentage(noMatchEvents, totalEvents);
|
|
2972
|
+
const pendingRate = percentage(pendingEvents, totalEvents);
|
|
2973
|
+
const findings = [];
|
|
2974
|
+
if (failureRate > thresholds.failureRate) {
|
|
2975
|
+
findings.push({
|
|
2976
|
+
severity: "HIGH",
|
|
2977
|
+
type: "HIGH_FAILURE_RATE",
|
|
2978
|
+
message: `Failure rate ${failureRate}% exceeds ${thresholds.failureRate}%`,
|
|
2979
|
+
value: failureRate,
|
|
2980
|
+
threshold: thresholds.failureRate,
|
|
2981
|
+
});
|
|
2982
|
+
}
|
|
2983
|
+
if (noMatchRate > thresholds.noMatchRate) {
|
|
2984
|
+
findings.push({
|
|
2985
|
+
severity: "CRITICAL",
|
|
2986
|
+
type: "HIGH_NO_MATCH_RATE",
|
|
2987
|
+
message: `NO_MATCH rate ${noMatchRate}% exceeds ${thresholds.noMatchRate}%`,
|
|
2988
|
+
value: noMatchRate,
|
|
2989
|
+
threshold: thresholds.noMatchRate,
|
|
2990
|
+
});
|
|
2991
|
+
}
|
|
2992
|
+
if (pendingRate > thresholds.pendingRate) {
|
|
2993
|
+
findings.push({
|
|
2994
|
+
severity: "MEDIUM",
|
|
2995
|
+
type: "HIGH_PENDING_RATE",
|
|
2996
|
+
message: `Pending rate ${pendingRate}% exceeds ${thresholds.pendingRate}%`,
|
|
2997
|
+
value: pendingRate,
|
|
2998
|
+
threshold: thresholds.pendingRate,
|
|
2999
|
+
});
|
|
3000
|
+
}
|
|
3001
|
+
if (runtimeP95Seconds !== null &&
|
|
3002
|
+
runtimeP95Seconds > thresholds.runtimeP95Seconds) {
|
|
3003
|
+
findings.push({
|
|
3004
|
+
severity: "MEDIUM",
|
|
3005
|
+
type: "HIGH_RUNTIME_P95",
|
|
3006
|
+
message: `Runtime p95 ${round3(runtimeP95Seconds)}s exceeds ${thresholds.runtimeP95Seconds}s`,
|
|
3007
|
+
value: round3(runtimeP95Seconds),
|
|
3008
|
+
threshold: thresholds.runtimeP95Seconds,
|
|
3009
|
+
});
|
|
3010
|
+
}
|
|
3011
|
+
if (inflightP95Seconds !== null &&
|
|
3012
|
+
inflightP95Seconds > thresholds.inflightP95Seconds) {
|
|
3013
|
+
findings.push({
|
|
3014
|
+
severity: "MEDIUM",
|
|
3015
|
+
type: "HIGH_INFLIGHT_P95",
|
|
3016
|
+
message: `Inflight p95 ${round3(inflightP95Seconds)}s exceeds ${thresholds.inflightP95Seconds}s`,
|
|
3017
|
+
value: round3(inflightP95Seconds),
|
|
3018
|
+
threshold: thresholds.inflightP95Seconds,
|
|
3019
|
+
});
|
|
3020
|
+
}
|
|
3021
|
+
const recommendations = [];
|
|
3022
|
+
if (findings.some((f) => f.type === "HIGH_FAILURE_RATE")) {
|
|
3023
|
+
recommendations.push("Run metrics.topEvents with eventStatus=FAILED and investigate top failures via /fluent-trace.");
|
|
3024
|
+
}
|
|
3025
|
+
if (findings.some((f) => f.type === "HIGH_NO_MATCH_RATE")) {
|
|
3026
|
+
recommendations.push("Validate event names against workflow rulesets using /fluent-workflow-analyzer.");
|
|
3027
|
+
}
|
|
3028
|
+
if (findings.some((f) => f.type === "HIGH_PENDING_RATE")) {
|
|
3029
|
+
recommendations.push("Check orchestration backlog and re-run metrics.sloReport over shorter windows (e.g., 15m).");
|
|
3030
|
+
}
|
|
3031
|
+
if (findings.some((f) => f.type === "HIGH_RUNTIME_P95" || f.type === "HIGH_INFLIGHT_P95")) {
|
|
3032
|
+
recommendations.push("Use metrics.query to break down latency by event_name/entity_type and isolate slow workflows.");
|
|
3033
|
+
}
|
|
3034
|
+
return json({
|
|
3035
|
+
ok: true,
|
|
3036
|
+
source,
|
|
3037
|
+
healthy: findings.length === 0,
|
|
3038
|
+
summary: {
|
|
3039
|
+
window,
|
|
3040
|
+
timeWindow: { from, to },
|
|
3041
|
+
totalEvents,
|
|
3042
|
+
failedEvents,
|
|
3043
|
+
noMatchEvents,
|
|
3044
|
+
pendingEvents,
|
|
3045
|
+
failureRate,
|
|
3046
|
+
noMatchRate,
|
|
3047
|
+
pendingRate,
|
|
3048
|
+
runtimeP95Seconds: runtimeP95Seconds !== null ? round3(runtimeP95Seconds) : null,
|
|
3049
|
+
inflightP95Seconds: inflightP95Seconds !== null ? round3(inflightP95Seconds) : null,
|
|
3050
|
+
},
|
|
3051
|
+
thresholds,
|
|
3052
|
+
findings,
|
|
3053
|
+
topFailingEvents: parsed.includeTopFailingEvents
|
|
3054
|
+
? topFailingEvents
|
|
3055
|
+
: undefined,
|
|
3056
|
+
recommendations: recommendations.length > 0 ? recommendations : undefined,
|
|
3057
|
+
queries: source === "prometheus"
|
|
3058
|
+
? promQueries
|
|
3059
|
+
: {
|
|
3060
|
+
eventApiFallback: {
|
|
3061
|
+
from,
|
|
3062
|
+
to,
|
|
3063
|
+
eventType: "ORCHESTRATION",
|
|
3064
|
+
maxPages: parsed.maxPages,
|
|
3065
|
+
},
|
|
3066
|
+
},
|
|
3067
|
+
});
|
|
3068
|
+
}
|
|
3069
|
+
// ------- metrics.labelCatalog --------------------------------------------
|
|
3070
|
+
if (toolName === "metrics.labelCatalog") {
|
|
3071
|
+
const parsed = MetricsLabelCatalogInputSchema.parse(args);
|
|
3072
|
+
const client = requireClient(ctx);
|
|
3073
|
+
parseWindowMs(parsed.window); // validate format early
|
|
3074
|
+
const query = `last_over_time(${parsed.metric}[${parsed.window}])`;
|
|
3075
|
+
const response = await client.queryPrometheus({
|
|
3076
|
+
query,
|
|
3077
|
+
type: "instant",
|
|
3078
|
+
});
|
|
3079
|
+
const vectors = extractPrometheusVectors(response);
|
|
3080
|
+
const liveLabelPresence = new Map();
|
|
3081
|
+
const liveLabelValues = new Map();
|
|
3082
|
+
for (const vector of vectors) {
|
|
3083
|
+
const metric = vector.metric ?? {};
|
|
3084
|
+
for (const [rawKey, rawValue] of Object.entries(metric)) {
|
|
3085
|
+
if (rawKey === "__name__")
|
|
3086
|
+
continue;
|
|
3087
|
+
liveLabelPresence.set(rawKey, (liveLabelPresence.get(rawKey) ?? 0) + 1);
|
|
3088
|
+
if (!liveLabelValues.has(rawKey)) {
|
|
3089
|
+
liveLabelValues.set(rawKey, new Map());
|
|
3090
|
+
}
|
|
3091
|
+
const value = String(rawValue);
|
|
3092
|
+
const valueMap = liveLabelValues.get(rawKey);
|
|
3093
|
+
valueMap.set(value, (valueMap.get(value) ?? 0) + 1);
|
|
3094
|
+
}
|
|
3095
|
+
}
|
|
3096
|
+
const knownLabels = parsed.includeKnownLabels
|
|
3097
|
+
? KNOWN_METRIC_LABELS[parsed.metric] ?? []
|
|
3098
|
+
: [];
|
|
3099
|
+
const mergedLabels = new Set([
|
|
3100
|
+
...Array.from(liveLabelPresence.keys()),
|
|
3101
|
+
...knownLabels,
|
|
3102
|
+
]);
|
|
3103
|
+
const labels = Array.from(mergedLabels)
|
|
3104
|
+
.sort((a, b) => a.localeCompare(b))
|
|
3105
|
+
.map((name) => {
|
|
3106
|
+
const presenceCount = liveLabelPresence.get(name) ?? 0;
|
|
3107
|
+
const valuesMap = liveLabelValues.get(name) ?? new Map();
|
|
3108
|
+
const sampleValues = Array.from(valuesMap.entries())
|
|
3109
|
+
.sort((a, b) => b[1] - a[1])
|
|
3110
|
+
.slice(0, parsed.maxValuesPerLabel)
|
|
3111
|
+
.map(([value]) => value);
|
|
3112
|
+
const source = knownLabels.includes(name)
|
|
3113
|
+
? presenceCount > 0
|
|
3114
|
+
? "both"
|
|
3115
|
+
: "known"
|
|
3116
|
+
: "live";
|
|
3117
|
+
return {
|
|
3118
|
+
name,
|
|
3119
|
+
source,
|
|
3120
|
+
presentInSeries: presenceCount,
|
|
3121
|
+
presenceRate: vectors.length > 0 ? round1((presenceCount / vectors.length) * 100) : 0,
|
|
3122
|
+
distinctValues: valuesMap.size,
|
|
3123
|
+
sampleValues,
|
|
3124
|
+
};
|
|
3125
|
+
});
|
|
3126
|
+
const warnings = [];
|
|
3127
|
+
if (response.status !== "success") {
|
|
3128
|
+
warnings.push(`Live label sampling failed with status=${response.status}. Returning known-label hints where available.`);
|
|
3129
|
+
}
|
|
3130
|
+
else if (vectors.length === 0) {
|
|
3131
|
+
warnings.push("No live series returned for the selected metric/window. Label list may be incomplete.");
|
|
3132
|
+
}
|
|
3133
|
+
if (knownLabels.length === 0 && labels.length === 0) {
|
|
3134
|
+
warnings.push("No labels discovered from live sampling and no known-label mapping exists for this metric.");
|
|
3135
|
+
}
|
|
3136
|
+
return json({
|
|
3137
|
+
ok: true,
|
|
3138
|
+
metric: parsed.metric,
|
|
3139
|
+
window: parsed.window,
|
|
3140
|
+
query,
|
|
3141
|
+
liveSampling: {
|
|
3142
|
+
status: response.status,
|
|
3143
|
+
seriesSampled: vectors.length,
|
|
3144
|
+
errorType: response.errorType,
|
|
3145
|
+
error: response.error,
|
|
3146
|
+
},
|
|
3147
|
+
labels,
|
|
3148
|
+
knownLabelHintsApplied: knownLabels.length > 0,
|
|
3149
|
+
warnings: warnings.length > 0 ? warnings : undefined,
|
|
3150
|
+
});
|
|
3151
|
+
}
|
|
3152
|
+
// ------- metrics.topEvents -----------------------------------------------
|
|
3153
|
+
if (toolName === "metrics.topEvents") {
|
|
3154
|
+
const parsed = MetricsTopEventsInputSchema.parse(args);
|
|
3155
|
+
const client = requireClient(ctx);
|
|
3156
|
+
const toTime = parsed.to ?? new Date().toISOString();
|
|
3157
|
+
const agg = await aggregateEventsFromApi(client, {
|
|
3158
|
+
from: parsed.from,
|
|
3159
|
+
to: toTime,
|
|
3160
|
+
maxPages: parsed.maxPages,
|
|
3161
|
+
entityType: parsed.entityType,
|
|
3162
|
+
eventStatus: parsed.eventStatus,
|
|
3163
|
+
eventType: parsed.eventType,
|
|
3164
|
+
});
|
|
3165
|
+
const topEvents = rankTopEvents(agg.groups, agg.totalEvents, parsed.topN);
|
|
3166
|
+
const failedCount = agg.statusBreakdown["FAILED"] ?? 0;
|
|
3167
|
+
return json({
|
|
3168
|
+
ok: true,
|
|
3169
|
+
analytics: {
|
|
3170
|
+
timeWindow: { from: parsed.from, to: toTime },
|
|
3171
|
+
totalEvents: agg.totalEvents,
|
|
3172
|
+
totalPages: agg.totalPages,
|
|
3173
|
+
uniqueEventNames: agg.uniqueNames.size,
|
|
3174
|
+
uniqueEntityTypes: agg.uniqueEntityTypes.size,
|
|
3175
|
+
failureRate: agg.totalEvents > 0
|
|
3176
|
+
? Math.round((failedCount / agg.totalEvents) * 1000) / 10
|
|
3177
|
+
: 0,
|
|
3178
|
+
statusBreakdown: agg.statusBreakdown,
|
|
3179
|
+
topEvents,
|
|
3180
|
+
},
|
|
3181
|
+
});
|
|
3182
|
+
}
|
|
3183
|
+
// ------- entity.create ------------------------------------------------
|
|
3184
|
+
if (toolName === "entity.create") {
|
|
3185
|
+
const result = await handleEntityCreate(args, ctx);
|
|
3186
|
+
return json(result, false, ctx.responseBudget);
|
|
3187
|
+
}
|
|
3188
|
+
// ------- entity.update ------------------------------------------------
|
|
3189
|
+
if (toolName === "entity.update") {
|
|
3190
|
+
const result = await handleEntityUpdate(args, ctx);
|
|
3191
|
+
return json(result, false, ctx.responseBudget);
|
|
3192
|
+
}
|
|
3193
|
+
// ------- entity.get ---------------------------------------------------
|
|
3194
|
+
if (toolName === "entity.get") {
|
|
3195
|
+
const result = await handleEntityGet(args, ctx);
|
|
3196
|
+
return json(result, false, ctx.responseBudget);
|
|
3197
|
+
}
|
|
3198
|
+
// ------- workflow.upload -----------------------------------------------
|
|
3199
|
+
if (toolName === "workflow.upload") {
|
|
3200
|
+
const result = await handleWorkflowUpload(args, ctx);
|
|
3201
|
+
return json(result, false, ctx.responseBudget);
|
|
3202
|
+
}
|
|
3203
|
+
// ------- workflow.diff -------------------------------------------------
|
|
3204
|
+
if (toolName === "workflow.diff") {
|
|
3205
|
+
const result = await handleWorkflowDiff(args, ctx);
|
|
3206
|
+
return json(result, false, ctx.responseBudget);
|
|
3207
|
+
}
|
|
3208
|
+
// ------- workflow.simulate ----------------------------------------------
|
|
3209
|
+
if (toolName === "workflow.simulate") {
|
|
3210
|
+
const result = await handleWorkflowSimulate(args, ctx);
|
|
3211
|
+
return json(result, false, ctx.responseBudget);
|
|
3212
|
+
}
|
|
3213
|
+
// ------- setting.upsert ------------------------------------------------
|
|
3214
|
+
if (toolName === "setting.upsert") {
|
|
3215
|
+
const result = await handleSettingUpsert(args, ctx);
|
|
3216
|
+
return json(result, false, ctx.responseBudget);
|
|
3217
|
+
}
|
|
3218
|
+
// ------- setting.bulkUpsert --------------------------------------------
|
|
3219
|
+
if (toolName === "setting.bulkUpsert") {
|
|
3220
|
+
const result = await handleSettingBulkUpsert(args, ctx);
|
|
3221
|
+
return json(result, false, ctx.responseBudget);
|
|
3222
|
+
}
|
|
3223
|
+
// ------- environment.discover ------------------------------------------
|
|
3224
|
+
if (toolName === "environment.discover") {
|
|
3225
|
+
const result = await handleEnvironmentDiscover(args, ctx);
|
|
3226
|
+
return json(result, false, ctx.responseBudget);
|
|
3227
|
+
}
|
|
3228
|
+
// ------- environment.validate ------------------------------------------
|
|
3229
|
+
if (toolName === "environment.validate") {
|
|
3230
|
+
const result = await handleEnvironmentValidate(args, ctx);
|
|
3231
|
+
return json(result, false, ctx.responseBudget);
|
|
3232
|
+
}
|
|
3233
|
+
// ------- test.assert ---------------------------------------------------
|
|
3234
|
+
if (toolName === "test.assert") {
|
|
3235
|
+
const result = await handleTestAssert(args, ctx);
|
|
3236
|
+
return json(result, false, ctx.responseBudget);
|
|
3237
|
+
}
|
|
3238
|
+
throw new ToolError("VALIDATION_ERROR", `Unknown tool: ${toolName}`);
|
|
3239
|
+
}
|
|
3240
|
+
catch (error) {
|
|
3241
|
+
if (error instanceof z.ZodError) {
|
|
3242
|
+
return json(toToolFailure(new ToolError("VALIDATION_ERROR", "Invalid tool arguments.", {
|
|
3243
|
+
details: {
|
|
3244
|
+
issues: error.issues.map((issue) => ({
|
|
3245
|
+
path: issue.path.join("."),
|
|
3246
|
+
message: issue.message,
|
|
3247
|
+
})),
|
|
3248
|
+
},
|
|
3249
|
+
})), true);
|
|
3250
|
+
}
|
|
3251
|
+
return json(toToolFailure(error), true);
|
|
3252
|
+
}
|
|
3253
|
+
});
|
|
3254
|
+
}
|