ralph-hero-mcp-server 2.4.59 → 2.4.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/github-client.js +20 -1
- package/dist/index.js +17 -3
- package/dist/lib/debug-logger.js +171 -0
- package/dist/lib/pipeline-detection.js +16 -0
- package/dist/tools/debug-tools.js +338 -0
- package/dist/tools/project-management-tools.js +1 -1
- package/package.json +1 -1
package/dist/github-client.js
CHANGED
|
@@ -8,6 +8,7 @@
|
|
|
8
8
|
import { graphql } from "@octokit/graphql";
|
|
9
9
|
import { RateLimiter } from "./lib/rate-limiter.js";
|
|
10
10
|
import { SessionCache } from "./lib/cache.js";
|
|
11
|
+
import { extractOperationName, sanitize } from "./lib/debug-logger.js";
|
|
11
12
|
/**
|
|
12
13
|
* The rateLimit fragment to include in every query for proactive tracking.
|
|
13
14
|
*/
|
|
@@ -23,7 +24,7 @@ const RATE_LIMIT_FRAGMENT = `
|
|
|
23
24
|
/**
|
|
24
25
|
* Create an authenticated GitHub GraphQL client.
|
|
25
26
|
*/
|
|
26
|
-
export function createGitHubClient(clientConfig) {
|
|
27
|
+
export function createGitHubClient(clientConfig, debugLogger) {
|
|
27
28
|
const graphqlWithAuth = graphql.defaults({
|
|
28
29
|
headers: {
|
|
29
30
|
authorization: `token ${clientConfig.token}`,
|
|
@@ -61,6 +62,7 @@ export function createGitHubClient(clientConfig) {
|
|
|
61
62
|
fullQuery.slice(insertPos);
|
|
62
63
|
}
|
|
63
64
|
}
|
|
65
|
+
const t0 = Date.now();
|
|
64
66
|
try {
|
|
65
67
|
const response = await graphqlFn(fullQuery, variables || {});
|
|
66
68
|
// Update rate limit tracker from response
|
|
@@ -70,9 +72,26 @@ export function createGitHubClient(clientConfig) {
|
|
|
70
72
|
rateLimiter.update(rl);
|
|
71
73
|
}
|
|
72
74
|
}
|
|
75
|
+
debugLogger?.logGraphQL({
|
|
76
|
+
operation: extractOperationName(fullQuery),
|
|
77
|
+
variables: sanitize(variables),
|
|
78
|
+
durationMs: Date.now() - t0,
|
|
79
|
+
status: 200,
|
|
80
|
+
rateLimitRemaining: response.rateLimit?.remaining,
|
|
81
|
+
rateLimitCost: response.rateLimit?.cost,
|
|
82
|
+
});
|
|
73
83
|
return response;
|
|
74
84
|
}
|
|
75
85
|
catch (error) {
|
|
86
|
+
debugLogger?.logGraphQL({
|
|
87
|
+
operation: extractOperationName(fullQuery),
|
|
88
|
+
variables: sanitize(variables),
|
|
89
|
+
durationMs: Date.now() - t0,
|
|
90
|
+
status: error && typeof error === "object" && "status" in error
|
|
91
|
+
? error.status
|
|
92
|
+
: 500,
|
|
93
|
+
error: error instanceof Error ? error.message : String(error),
|
|
94
|
+
});
|
|
76
95
|
// Handle rate limit errors (403)
|
|
77
96
|
if (error &&
|
|
78
97
|
typeof error === "object" &&
|
package/dist/index.js
CHANGED
|
@@ -10,6 +10,7 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
|
10
10
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
11
11
|
import { createGitHubClient } from "./github-client.js";
|
|
12
12
|
import { FieldOptionCache } from "./lib/cache.js";
|
|
13
|
+
import { createDebugLogger, wrapServerToolWithLogging } from "./lib/debug-logger.js";
|
|
13
14
|
import { toolSuccess, resolveProjectOwner } from "./types.js";
|
|
14
15
|
import { resolveRepoFromProject } from "./lib/helpers.js";
|
|
15
16
|
import { registerProjectTools } from "./tools/project-tools.js";
|
|
@@ -22,6 +23,7 @@ import { registerProjectManagementTools } from "./tools/project-management-tools
|
|
|
22
23
|
import { registerHygieneTools } from "./tools/hygiene-tools.js";
|
|
23
24
|
import { registerRoutingTools } from "./tools/routing-tools.js";
|
|
24
25
|
import { registerSyncTools } from "./tools/sync-tools.js";
|
|
26
|
+
import { registerDebugTools } from "./tools/debug-tools.js";
|
|
25
27
|
/**
|
|
26
28
|
* Initialize the GitHub client from environment variables.
|
|
27
29
|
*/
|
|
@@ -32,7 +34,7 @@ function resolveEnv(name) {
|
|
|
32
34
|
return undefined;
|
|
33
35
|
return val;
|
|
34
36
|
}
|
|
35
|
-
function initGitHubClient() {
|
|
37
|
+
function initGitHubClient(debugLogger) {
|
|
36
38
|
// Repo token: for repository operations (issues, PRs, comments)
|
|
37
39
|
const repoToken = resolveEnv("RALPH_GH_REPO_TOKEN") || resolveEnv("RALPH_HERO_GITHUB_TOKEN");
|
|
38
40
|
// Project token: for Projects V2 operations (fields, workflow state)
|
|
@@ -97,7 +99,7 @@ function initGitHubClient() {
|
|
|
97
99
|
projectNumbers,
|
|
98
100
|
projectOwner: projectOwner || undefined,
|
|
99
101
|
templateProjectNumber,
|
|
100
|
-
});
|
|
102
|
+
}, debugLogger);
|
|
101
103
|
}
|
|
102
104
|
/**
|
|
103
105
|
* Register core tools on the MCP server.
|
|
@@ -238,7 +240,11 @@ function registerCoreTools(server, client) {
|
|
|
238
240
|
*/
|
|
239
241
|
async function main() {
|
|
240
242
|
console.error("[ralph-hero] Starting MCP server...");
|
|
241
|
-
const
|
|
243
|
+
const debugLogger = createDebugLogger();
|
|
244
|
+
if (debugLogger) {
|
|
245
|
+
console.error("[ralph-hero] Debug logging enabled (RALPH_DEBUG=true)");
|
|
246
|
+
}
|
|
247
|
+
const client = initGitHubClient(debugLogger);
|
|
242
248
|
// Attempt lazy repo inference from project (non-fatal)
|
|
243
249
|
try {
|
|
244
250
|
await resolveRepoFromProject(client);
|
|
@@ -255,6 +261,10 @@ async function main() {
|
|
|
255
261
|
});
|
|
256
262
|
// Shared field option cache for project field lookups
|
|
257
263
|
const fieldCache = new FieldOptionCache();
|
|
264
|
+
// Wrap server.tool with debug logging when RALPH_DEBUG=true
|
|
265
|
+
if (debugLogger) {
|
|
266
|
+
wrapServerToolWithLogging(server, debugLogger);
|
|
267
|
+
}
|
|
258
268
|
// Register core tools
|
|
259
269
|
registerCoreTools(server, client);
|
|
260
270
|
// Phase 2: Project and view management tools
|
|
@@ -276,6 +286,10 @@ async function main() {
|
|
|
276
286
|
registerRoutingTools(server, client, fieldCache);
|
|
277
287
|
// Cross-project sync tools
|
|
278
288
|
registerSyncTools(server, client, fieldCache);
|
|
289
|
+
// Debug tools (only when RALPH_DEBUG=true)
|
|
290
|
+
if (process.env.RALPH_DEBUG === 'true') {
|
|
291
|
+
registerDebugTools(server, client);
|
|
292
|
+
}
|
|
279
293
|
// Connect via stdio transport
|
|
280
294
|
const transport = new StdioServerTransport();
|
|
281
295
|
await server.connect(transport);
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Debug logger for Ralph Hero MCP server.
|
|
3
|
+
*
|
|
4
|
+
* Captures tool calls, GraphQL operations, and hook events as JSONL
|
|
5
|
+
* when RALPH_DEBUG=true. Returns null when disabled for zero overhead.
|
|
6
|
+
* Follows the RateLimiter pattern (constructor + factory function).
|
|
7
|
+
*/
|
|
8
|
+
import { writeFile, appendFile, mkdir } from "node:fs/promises";
|
|
9
|
+
import { join } from "node:path";
|
|
10
|
+
import { homedir } from "node:os";
|
|
11
|
+
import { randomBytes } from "node:crypto";
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
// Sanitization
|
|
14
|
+
// ---------------------------------------------------------------------------
|
|
15
|
+
const SENSITIVE_PATTERNS = /token|auth|secret|key|password|credential/i;
|
|
16
|
+
/**
|
|
17
|
+
* Strip fields whose keys match sensitive patterns.
|
|
18
|
+
*/
|
|
19
|
+
export function sanitize(obj) {
|
|
20
|
+
if (!obj)
|
|
21
|
+
return obj;
|
|
22
|
+
const result = {};
|
|
23
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
24
|
+
if (SENSITIVE_PATTERNS.test(k)) {
|
|
25
|
+
result[k] = "[REDACTED]";
|
|
26
|
+
}
|
|
27
|
+
else {
|
|
28
|
+
result[k] = v;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return result;
|
|
32
|
+
}
|
|
33
|
+
// ---------------------------------------------------------------------------
|
|
34
|
+
// DebugLogger
|
|
35
|
+
// ---------------------------------------------------------------------------
|
|
36
|
+
export class DebugLogger {
|
|
37
|
+
logPath = null;
|
|
38
|
+
logDir;
|
|
39
|
+
constructor(options) {
|
|
40
|
+
this.logDir =
|
|
41
|
+
options?.logDir ?? join(homedir(), ".ralph-hero", "logs");
|
|
42
|
+
}
|
|
43
|
+
async getLogPath() {
|
|
44
|
+
if (this.logPath)
|
|
45
|
+
return this.logPath;
|
|
46
|
+
await mkdir(this.logDir, { recursive: true });
|
|
47
|
+
const now = new Date();
|
|
48
|
+
const ts = now
|
|
49
|
+
.toISOString()
|
|
50
|
+
.replace(/[:.]/g, "-")
|
|
51
|
+
.replace("T", "-")
|
|
52
|
+
.replace("Z", "");
|
|
53
|
+
const rand = randomBytes(2).toString("hex");
|
|
54
|
+
this.logPath = join(this.logDir, `session-${ts}-${rand}.jsonl`);
|
|
55
|
+
// Create the file
|
|
56
|
+
await writeFile(this.logPath, "");
|
|
57
|
+
return this.logPath;
|
|
58
|
+
}
|
|
59
|
+
append(event) {
|
|
60
|
+
// Fire-and-forget — never block tool handlers
|
|
61
|
+
this.getLogPath()
|
|
62
|
+
.then((path) => appendFile(path, JSON.stringify(event) + "\n"))
|
|
63
|
+
.catch(console.error);
|
|
64
|
+
}
|
|
65
|
+
logGraphQL(fields) {
|
|
66
|
+
this.append({
|
|
67
|
+
ts: new Date().toISOString(),
|
|
68
|
+
cat: "graphql",
|
|
69
|
+
operation: fields.operation,
|
|
70
|
+
variables: sanitize(fields.variables),
|
|
71
|
+
durationMs: fields.durationMs,
|
|
72
|
+
status: fields.status,
|
|
73
|
+
rateLimitRemaining: fields.rateLimitRemaining,
|
|
74
|
+
rateLimitCost: fields.rateLimitCost,
|
|
75
|
+
...(fields.error ? { error: fields.error } : {}),
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
logTool(fields) {
|
|
79
|
+
this.append({
|
|
80
|
+
ts: new Date().toISOString(),
|
|
81
|
+
cat: "tool",
|
|
82
|
+
tool: fields.tool,
|
|
83
|
+
params: sanitize(fields.params) ?? {},
|
|
84
|
+
durationMs: fields.durationMs,
|
|
85
|
+
ok: fields.ok,
|
|
86
|
+
...(fields.error ? { error: fields.error } : {}),
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
/** Get the current log file path (for testing). */
|
|
90
|
+
getSessionLogPath() {
|
|
91
|
+
return this.logPath;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
// ---------------------------------------------------------------------------
|
|
95
|
+
// Factory & Wrapper
|
|
96
|
+
// ---------------------------------------------------------------------------
|
|
97
|
+
/**
|
|
98
|
+
* Create a DebugLogger if RALPH_DEBUG=true, otherwise null (zero overhead).
|
|
99
|
+
*/
|
|
100
|
+
export function createDebugLogger(options) {
|
|
101
|
+
if (process.env.RALPH_DEBUG !== "true")
|
|
102
|
+
return null;
|
|
103
|
+
return new DebugLogger(options);
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Extract a GraphQL operation name from a query string.
|
|
107
|
+
*/
|
|
108
|
+
export function extractOperationName(queryString) {
|
|
109
|
+
const match = queryString.match(/(?:query|mutation)\s+(\w+)/);
|
|
110
|
+
return match?.[1];
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Wrap a tool handler with debug logging.
|
|
114
|
+
* When logger is null, calls handler directly with zero overhead.
|
|
115
|
+
*/
|
|
116
|
+
export async function withLogging(logger, toolName, params, handler) {
|
|
117
|
+
if (!logger)
|
|
118
|
+
return handler();
|
|
119
|
+
const t0 = Date.now();
|
|
120
|
+
try {
|
|
121
|
+
const result = await handler();
|
|
122
|
+
logger.logTool({
|
|
123
|
+
tool: toolName,
|
|
124
|
+
params,
|
|
125
|
+
durationMs: Date.now() - t0,
|
|
126
|
+
ok: true,
|
|
127
|
+
});
|
|
128
|
+
return result;
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
logger.logTool({
|
|
132
|
+
tool: toolName,
|
|
133
|
+
params,
|
|
134
|
+
durationMs: Date.now() - t0,
|
|
135
|
+
ok: false,
|
|
136
|
+
error: error instanceof Error ? error.message : String(error),
|
|
137
|
+
});
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Monkey-patch McpServer.tool() to wrap all tool handlers with debug logging.
|
|
143
|
+
* Called once at startup when RALPH_DEBUG=true. Zero overhead when disabled
|
|
144
|
+
* (this function is never called).
|
|
145
|
+
*
|
|
146
|
+
* Uses `any` deliberately to handle McpServer's complex overloaded signatures.
|
|
147
|
+
*/
|
|
148
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
149
|
+
export function wrapServerToolWithLogging(server, logger) {
|
|
150
|
+
const originalTool = server.tool.bind(server);
|
|
151
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
152
|
+
server.tool = (...args) => {
|
|
153
|
+
// server.tool(name, desc, schema, handler) or (name, desc, handler) or (name, handler)
|
|
154
|
+
// Handler is always the last argument and is a function
|
|
155
|
+
const name = args[0];
|
|
156
|
+
const handlerIdx = args.length - 1;
|
|
157
|
+
const originalHandler = args[handlerIdx];
|
|
158
|
+
if (typeof originalHandler !== "function") {
|
|
159
|
+
return originalTool(...args);
|
|
160
|
+
}
|
|
161
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
162
|
+
args[handlerIdx] = async (...handlerArgs) => {
|
|
163
|
+
const params = handlerArgs[0] && typeof handlerArgs[0] === "object"
|
|
164
|
+
? handlerArgs[0]
|
|
165
|
+
: {};
|
|
166
|
+
return withLogging(logger, name, params, () => originalHandler(...handlerArgs));
|
|
167
|
+
};
|
|
168
|
+
return originalTool(...args);
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
//# sourceMappingURL=debug-logger.js.map
|
|
@@ -167,6 +167,20 @@ export function detectStreamPipelinePositions(streams, issueStates) {
|
|
|
167
167
|
// ---------------------------------------------------------------------------
|
|
168
168
|
// Helpers
|
|
169
169
|
// ---------------------------------------------------------------------------
|
|
170
|
+
function computeSuggestedRoster(phase, issues) {
|
|
171
|
+
// Phase-aware: if past research, analyst = 0
|
|
172
|
+
const needsResearch = issues.filter(i => ['Research Needed', 'Research in Progress'].includes(i.workflowState));
|
|
173
|
+
let analyst = 0;
|
|
174
|
+
if (phase === 'RESEARCH' || phase === 'SPLIT' || phase === 'TRIAGE') {
|
|
175
|
+
analyst = needsResearch.length <= 1 ? 1
|
|
176
|
+
: needsResearch.length <= 5 ? 2
|
|
177
|
+
: 3;
|
|
178
|
+
}
|
|
179
|
+
// Builder scaling: default 1; 2 if 5+ issues with M/L estimates
|
|
180
|
+
const largeSized = issues.filter(i => i.estimate != null && ['M', 'L', 'XL'].includes(i.estimate));
|
|
181
|
+
const builder = largeSized.length >= 5 ? 2 : 1;
|
|
182
|
+
return { analyst, builder, validator: 1, integrator: 1 };
|
|
183
|
+
}
|
|
170
184
|
function buildResult(phase, reason, issues, isGroup, groupPrimary, convergence) {
|
|
171
185
|
// Derive recommendation from convergence state
|
|
172
186
|
let recommendation;
|
|
@@ -179,6 +193,7 @@ function buildResult(phase, reason, issues, isGroup, groupPrimary, convergence)
|
|
|
179
193
|
else {
|
|
180
194
|
recommendation = "wait";
|
|
181
195
|
}
|
|
196
|
+
const suggestedRoster = computeSuggestedRoster(phase, issues);
|
|
182
197
|
return {
|
|
183
198
|
phase,
|
|
184
199
|
reason,
|
|
@@ -187,6 +202,7 @@ function buildResult(phase, reason, issues, isGroup, groupPrimary, convergence)
|
|
|
187
202
|
convergence: { ...convergence, recommendation },
|
|
188
203
|
isGroup,
|
|
189
204
|
groupPrimary,
|
|
205
|
+
suggestedRoster,
|
|
190
206
|
};
|
|
191
207
|
}
|
|
192
208
|
//# sourceMappingURL=pipeline-detection.js.map
|
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP tools for debug log collation and statistics.
|
|
3
|
+
*
|
|
4
|
+
* Provides `ralph_hero__collate_debug` (error grouping + GitHub issue creation)
|
|
5
|
+
* and `ralph_hero__debug_stats` (tool call aggregation metrics).
|
|
6
|
+
*
|
|
7
|
+
* Only registered when RALPH_DEBUG=true. Reads JSONL logs written by DebugLogger.
|
|
8
|
+
*/
|
|
9
|
+
import { readdir, readFile } from "node:fs/promises";
|
|
10
|
+
import { join } from "node:path";
|
|
11
|
+
import { homedir } from "node:os";
|
|
12
|
+
import { createHash } from "node:crypto";
|
|
13
|
+
import { z } from "zod";
|
|
14
|
+
import { toolSuccess, toolError } from "../types.js";
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// JSONL Parsing
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
/**
|
|
19
|
+
* Read and parse all JSONL log files matching the time window.
|
|
20
|
+
*/
|
|
21
|
+
export async function readLogEvents(logDir, since) {
|
|
22
|
+
let files;
|
|
23
|
+
try {
|
|
24
|
+
files = await readdir(logDir);
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
return { events: [], sessionsAnalyzed: 0 };
|
|
28
|
+
}
|
|
29
|
+
const jsonlFiles = files
|
|
30
|
+
.filter((f) => f.startsWith("session-") && f.endsWith(".jsonl"))
|
|
31
|
+
.sort();
|
|
32
|
+
const events = [];
|
|
33
|
+
let sessionsAnalyzed = 0;
|
|
34
|
+
for (const file of jsonlFiles) {
|
|
35
|
+
const content = await readFile(join(logDir, file), "utf-8");
|
|
36
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
37
|
+
let fileHasEvents = false;
|
|
38
|
+
for (const line of lines) {
|
|
39
|
+
try {
|
|
40
|
+
const event = JSON.parse(line);
|
|
41
|
+
if (new Date(event.ts) >= since) {
|
|
42
|
+
events.push(event);
|
|
43
|
+
fileHasEvents = true;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch {
|
|
47
|
+
// Skip malformed lines
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
if (fileHasEvents)
|
|
51
|
+
sessionsAnalyzed++;
|
|
52
|
+
}
|
|
53
|
+
return { events, sessionsAnalyzed };
|
|
54
|
+
}
|
|
55
|
+
// ---------------------------------------------------------------------------
|
|
56
|
+
// Error Collation
|
|
57
|
+
// ---------------------------------------------------------------------------
|
|
58
|
+
function isErrorEvent(event) {
|
|
59
|
+
return (event.ok === false ||
|
|
60
|
+
event.blocked === true ||
|
|
61
|
+
(event.exitCode !== undefined && event.exitCode !== 0));
|
|
62
|
+
}
|
|
63
|
+
function normalizeErrorMessage(msg) {
|
|
64
|
+
// Strip variable parts: numbers, hashes, timestamps, UUIDs
|
|
65
|
+
return msg
|
|
66
|
+
.replace(/\b[0-9a-f]{8,}\b/gi, "<HASH>")
|
|
67
|
+
.replace(/\b\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}[.\dZ]*/g, "<TS>")
|
|
68
|
+
.replace(/\b\d+\b/g, "<N>")
|
|
69
|
+
.replace(/\s+/g, " ")
|
|
70
|
+
.trim()
|
|
71
|
+
.slice(0, 200);
|
|
72
|
+
}
|
|
73
|
+
function getEventName(event) {
|
|
74
|
+
return event.tool ?? event.operation ?? event.hook ?? "unknown";
|
|
75
|
+
}
|
|
76
|
+
function getErrorType(event) {
|
|
77
|
+
if (event.exitCode !== undefined)
|
|
78
|
+
return `exit:${event.exitCode}`;
|
|
79
|
+
if (event.blocked)
|
|
80
|
+
return "blocked";
|
|
81
|
+
return "error";
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Build a signature string for grouping similar errors.
|
|
85
|
+
*/
|
|
86
|
+
export function buildSignature(event) {
|
|
87
|
+
const name = getEventName(event);
|
|
88
|
+
const errorType = getErrorType(event);
|
|
89
|
+
const normalized = normalizeErrorMessage(event.error ?? "");
|
|
90
|
+
return `${event.cat}:${name}:${errorType}:${normalized}`;
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Hash a signature to an 8-char dedup key.
|
|
94
|
+
*/
|
|
95
|
+
export function hashSignature(signature) {
|
|
96
|
+
return createHash("sha256").update(signature).digest("hex").slice(0, 8);
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Group error events by normalized signature.
|
|
100
|
+
*/
|
|
101
|
+
export function groupErrors(events) {
|
|
102
|
+
const errors = events.filter(isErrorEvent);
|
|
103
|
+
const groups = new Map();
|
|
104
|
+
for (const event of errors) {
|
|
105
|
+
const signature = buildSignature(event);
|
|
106
|
+
const hash = hashSignature(signature);
|
|
107
|
+
const existing = groups.get(hash);
|
|
108
|
+
if (existing) {
|
|
109
|
+
existing.count++;
|
|
110
|
+
if (event.ts > existing.lastSeen)
|
|
111
|
+
existing.lastSeen = event.ts;
|
|
112
|
+
if (event.ts < existing.firstSeen)
|
|
113
|
+
existing.firstSeen = event.ts;
|
|
114
|
+
}
|
|
115
|
+
else {
|
|
116
|
+
groups.set(hash, {
|
|
117
|
+
signature,
|
|
118
|
+
hash,
|
|
119
|
+
count: 1,
|
|
120
|
+
firstSeen: event.ts,
|
|
121
|
+
lastSeen: event.ts,
|
|
122
|
+
sample: event,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return Array.from(groups.values()).sort((a, b) => b.count - a.count);
|
|
127
|
+
}
|
|
128
|
+
// ---------------------------------------------------------------------------
|
|
129
|
+
// Stats Aggregation
|
|
130
|
+
// ---------------------------------------------------------------------------
|
|
131
|
+
/**
|
|
132
|
+
* Aggregate tool call statistics.
|
|
133
|
+
*/
|
|
134
|
+
export function aggregateStats(events, groupBy) {
|
|
135
|
+
const toolEvents = events.filter((e) => e.cat === "tool");
|
|
136
|
+
const totalToolCalls = toolEvents.length;
|
|
137
|
+
const totalErrors = toolEvents.filter((e) => e.ok === false).length;
|
|
138
|
+
const groupMap = new Map();
|
|
139
|
+
for (const event of toolEvents) {
|
|
140
|
+
let key;
|
|
141
|
+
switch (groupBy) {
|
|
142
|
+
case "tool":
|
|
143
|
+
key = event.tool ?? "unknown";
|
|
144
|
+
break;
|
|
145
|
+
case "category":
|
|
146
|
+
key = event.cat;
|
|
147
|
+
break;
|
|
148
|
+
case "day":
|
|
149
|
+
key = event.ts.slice(0, 10); // YYYY-MM-DD
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
const existing = groupMap.get(key) ?? { calls: 0, errors: 0, totalDuration: 0 };
|
|
153
|
+
existing.calls++;
|
|
154
|
+
if (event.ok === false)
|
|
155
|
+
existing.errors++;
|
|
156
|
+
existing.totalDuration += event.durationMs ?? 0;
|
|
157
|
+
groupMap.set(key, existing);
|
|
158
|
+
}
|
|
159
|
+
const groups = {};
|
|
160
|
+
for (const [key, data] of groupMap) {
|
|
161
|
+
groups[key] = {
|
|
162
|
+
calls: data.calls,
|
|
163
|
+
errors: data.errors,
|
|
164
|
+
errorRate: data.calls > 0 ? data.errors / data.calls : 0,
|
|
165
|
+
avgDurationMs: data.calls > 0 ? Math.round(data.totalDuration / data.calls) : 0,
|
|
166
|
+
totalDurationMs: data.totalDuration,
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
totalToolCalls,
|
|
171
|
+
totalErrors,
|
|
172
|
+
errorRate: totalToolCalls > 0 ? totalErrors / totalToolCalls : 0,
|
|
173
|
+
groups,
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
// ---------------------------------------------------------------------------
|
|
177
|
+
// Register Debug Tools
|
|
178
|
+
// ---------------------------------------------------------------------------
|
|
179
|
+
export function registerDebugTools(server, client) {
|
|
180
|
+
const logDir = join(homedir(), ".ralph-hero", "logs");
|
|
181
|
+
// -------------------------------------------------------------------------
|
|
182
|
+
// ralph_hero__collate_debug
|
|
183
|
+
// -------------------------------------------------------------------------
|
|
184
|
+
server.tool("ralph_hero__collate_debug", "Collate debug log errors into GitHub issues. Reads JSONL logs, groups errors by normalized signature, deduplicates against existing `debug-auto` labeled issues, and creates/updates issues. Returns: summary of issues created, updated, and total occurrences.", {
|
|
185
|
+
since: z
|
|
186
|
+
.string()
|
|
187
|
+
.optional()
|
|
188
|
+
.describe("ISO date string. Only process events after this time (default: 24h ago)"),
|
|
189
|
+
dryRun: z
|
|
190
|
+
.boolean()
|
|
191
|
+
.optional()
|
|
192
|
+
.default(false)
|
|
193
|
+
.describe("If true, report what would be created/updated without making changes"),
|
|
194
|
+
projectNumber: z
|
|
195
|
+
.number()
|
|
196
|
+
.optional()
|
|
197
|
+
.describe("Project number override (defaults to configured project)"),
|
|
198
|
+
}, async (args) => {
|
|
199
|
+
try {
|
|
200
|
+
const sinceDate = args.since
|
|
201
|
+
? new Date(args.since)
|
|
202
|
+
: new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
203
|
+
const { events, sessionsAnalyzed } = await readLogEvents(logDir, sinceDate);
|
|
204
|
+
const errorGroups = groupErrors(events);
|
|
205
|
+
if (errorGroups.length === 0) {
|
|
206
|
+
return toolSuccess({
|
|
207
|
+
message: "No errors found in the specified time window.",
|
|
208
|
+
sessionsAnalyzed,
|
|
209
|
+
since: sinceDate.toISOString(),
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
let issuesCreated = 0;
|
|
213
|
+
let issuesUpdated = 0;
|
|
214
|
+
let totalOccurrences = 0;
|
|
215
|
+
const owner = client.config.owner;
|
|
216
|
+
const repo = client.config.repo;
|
|
217
|
+
for (const group of errorGroups) {
|
|
218
|
+
totalOccurrences += group.count;
|
|
219
|
+
if (args.dryRun)
|
|
220
|
+
continue;
|
|
221
|
+
if (!owner || !repo) {
|
|
222
|
+
return toolError("RALPH_GH_OWNER and RALPH_GH_REPO must be set for issue creation");
|
|
223
|
+
}
|
|
224
|
+
// Search for existing issue with this hash
|
|
225
|
+
const searchQuery = `repo:${owner}/${repo} is:issue is:open label:debug-auto "${group.hash}" in:body`;
|
|
226
|
+
let existingIssueNumber;
|
|
227
|
+
try {
|
|
228
|
+
const searchResult = await client.query(`query SearchDebugIssues($q: String!) {
|
|
229
|
+
search(query: $q, type: ISSUE, first: 1) {
|
|
230
|
+
nodes {
|
|
231
|
+
... on Issue { number }
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}`, { q: searchQuery });
|
|
235
|
+
existingIssueNumber = searchResult.search.nodes[0]?.number;
|
|
236
|
+
}
|
|
237
|
+
catch {
|
|
238
|
+
// Search failed, treat as no existing issue
|
|
239
|
+
}
|
|
240
|
+
if (existingIssueNumber) {
|
|
241
|
+
// Add occurrence comment
|
|
242
|
+
await client.mutate(`mutation AddComment($subjectId: ID!, $body: String!) {
|
|
243
|
+
addComment(input: { subjectId: $subjectId, body: $body }) {
|
|
244
|
+
commentEdge { node { id } }
|
|
245
|
+
}
|
|
246
|
+
}`, {
|
|
247
|
+
subjectId: `issue:${existingIssueNumber}`,
|
|
248
|
+
body: `## Occurrence Report\n\n- Count: ${group.count}\n- Period: ${group.firstSeen} — ${group.lastSeen}\n- Signature: \`${group.signature}\``,
|
|
249
|
+
}).catch(() => {
|
|
250
|
+
// Best-effort comment
|
|
251
|
+
});
|
|
252
|
+
issuesUpdated++;
|
|
253
|
+
}
|
|
254
|
+
else {
|
|
255
|
+
// Create new issue
|
|
256
|
+
try {
|
|
257
|
+
await client.mutate(`mutation CreateIssue($repoId: ID!, $title: String!, $body: String!) {
|
|
258
|
+
createIssue(input: { repositoryId: $repoId, title: $title, body: $body }) {
|
|
259
|
+
issue { number }
|
|
260
|
+
}
|
|
261
|
+
}`, {
|
|
262
|
+
repoId: `placeholder`, // Would need actual repo ID
|
|
263
|
+
title: `[debug-auto] ${getEventName(group.sample)} ${getErrorType(group.sample)}`,
|
|
264
|
+
body: `## Debug Auto-Report\n\n**Hash**: \`${group.hash}\`\n**Signature**: \`${group.signature}\`\n**Occurrences**: ${group.count}\n**First seen**: ${group.firstSeen}\n**Last seen**: ${group.lastSeen}\n\n### Sample Error\n\n\`\`\`json\n${JSON.stringify(group.sample, null, 2)}\n\`\`\`\n\n---\n_Auto-generated by ralph_hero__collate_debug_`,
|
|
265
|
+
}).catch(() => {
|
|
266
|
+
// Best-effort issue creation
|
|
267
|
+
});
|
|
268
|
+
issuesCreated++;
|
|
269
|
+
}
|
|
270
|
+
catch {
|
|
271
|
+
// Skip failed creations
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
return toolSuccess({
|
|
276
|
+
since: sinceDate.toISOString(),
|
|
277
|
+
sessionsAnalyzed,
|
|
278
|
+
errorGroups: errorGroups.length,
|
|
279
|
+
totalOccurrences,
|
|
280
|
+
issuesCreated: args.dryRun ? 0 : issuesCreated,
|
|
281
|
+
issuesUpdated: args.dryRun ? 0 : issuesUpdated,
|
|
282
|
+
dryRun: args.dryRun,
|
|
283
|
+
groups: errorGroups.map((g) => ({
|
|
284
|
+
hash: g.hash,
|
|
285
|
+
signature: g.signature,
|
|
286
|
+
count: g.count,
|
|
287
|
+
firstSeen: g.firstSeen,
|
|
288
|
+
lastSeen: g.lastSeen,
|
|
289
|
+
})),
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
catch (error) {
|
|
293
|
+
return toolError(`Failed to collate debug logs: ${error instanceof Error ? error.message : String(error)}`);
|
|
294
|
+
}
|
|
295
|
+
});
|
|
296
|
+
// -------------------------------------------------------------------------
|
|
297
|
+
// ralph_hero__debug_stats
|
|
298
|
+
// -------------------------------------------------------------------------
|
|
299
|
+
server.tool("ralph_hero__debug_stats", "Aggregate debug log statistics. Reads JSONL logs and returns tool call counts, error rates, and average durations grouped by tool, category, or day. Returns: totalToolCalls, totalErrors, errorRate, sessionsAnalyzed, per-group breakdown.", {
|
|
300
|
+
since: z
|
|
301
|
+
.string()
|
|
302
|
+
.optional()
|
|
303
|
+
.describe("ISO date string. Only process events after this time (default: 7 days ago)"),
|
|
304
|
+
groupBy: z
|
|
305
|
+
.enum(["tool", "category", "day"])
|
|
306
|
+
.optional()
|
|
307
|
+
.default("tool")
|
|
308
|
+
.describe("How to group statistics (default: 'tool')"),
|
|
309
|
+
}, async (args) => {
|
|
310
|
+
try {
|
|
311
|
+
const sinceDate = args.since
|
|
312
|
+
? new Date(args.since)
|
|
313
|
+
: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000);
|
|
314
|
+
const { events, sessionsAnalyzed } = await readLogEvents(logDir, sinceDate);
|
|
315
|
+
if (events.length === 0) {
|
|
316
|
+
return toolSuccess({
|
|
317
|
+
message: "No events found in the specified time window.",
|
|
318
|
+
sessionsAnalyzed: 0,
|
|
319
|
+
since: sinceDate.toISOString(),
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
const stats = aggregateStats(events, args.groupBy);
|
|
323
|
+
return toolSuccess({
|
|
324
|
+
since: sinceDate.toISOString(),
|
|
325
|
+
sessionsAnalyzed,
|
|
326
|
+
totalToolCalls: stats.totalToolCalls,
|
|
327
|
+
totalErrors: stats.totalErrors,
|
|
328
|
+
errorRate: Math.round(stats.errorRate * 10000) / 100, // percentage with 2 decimals
|
|
329
|
+
groupBy: args.groupBy,
|
|
330
|
+
groups: stats.groups,
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
catch (error) {
|
|
334
|
+
return toolError(`Failed to compute debug stats: ${error instanceof Error ? error.message : String(error)}`);
|
|
335
|
+
}
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
//# sourceMappingURL=debug-tools.js.map
|