ralph-hero-mcp-server 2.5.139 → 2.5.142
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/debug-issue-shape.js +202 -0
- package/dist/lib/error-signature.js +194 -0
- package/dist/lib/langfuse-client.js +95 -0
- package/dist/tools/debug-tools.js +304 -96
- package/package.json +1 -1
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Issue body + comment body builders for `ralph_hero__collate_debug` Phase 3b.
|
|
3
|
+
*
|
|
4
|
+
* Each `SignatureGroup` returned by `groupSpansBySignature` becomes either:
|
|
5
|
+
* - a fresh GitHub issue (when no existing `debug-auto` issue carries the
|
|
6
|
+
* same hash within the dedup window), or
|
|
7
|
+
* - a comment on an existing issue (occurrence-update).
|
|
8
|
+
*
|
|
9
|
+
* The body MUST include a machine-parseable hash marker on its own line —
|
|
10
|
+
* `**Hash**: \`<8-char-hash>\`` — because dedup in Phase 3b matches on this
|
|
11
|
+
* exact line via GitHub's code-search index.
|
|
12
|
+
*
|
|
13
|
+
* Token-shaped values are scrubbed from every emitted field. The regex set
|
|
14
|
+
* mirrors `redactTokenAttributes` in `telemetry.ts`: GitHub tokens
|
|
15
|
+
* (`^gh[ps]_`), basic-auth headers, and any attribute key ending in
|
|
16
|
+
* `_TOKEN` are replaced with `[REDACTED]`.
|
|
17
|
+
*/
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
// Token redaction (kept local to avoid cross-module coupling at runtime; the
|
|
20
|
+
// shape mirrors telemetry.ts:redactTokenAttributes for consistency)
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
const GH_TOKEN_VALUE_RE = /\bgh[psour]_[A-Za-z0-9_]{16,}\b/g;
|
|
23
|
+
const TOKEN_KEY_RE = /(_TOKEN|authorization)$/i;
|
|
24
|
+
const BASIC_AUTH_RE = /\bBasic\s+[A-Za-z0-9+/=]{8,}\b/g;
|
|
25
|
+
/**
|
|
26
|
+
* Scrub token-shaped substrings from a free-form string. Used for error
|
|
27
|
+
* messages and serialised metadata before they land in an issue body.
|
|
28
|
+
*/
|
|
29
|
+
export function scrubTokensFromString(input) {
|
|
30
|
+
if (!input)
|
|
31
|
+
return input;
|
|
32
|
+
return input.replace(GH_TOKEN_VALUE_RE, "[REDACTED]").replace(BASIC_AUTH_RE, "Basic [REDACTED]");
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Scrub token-shaped values from a plain attribute bag. Keys matching
|
|
36
|
+
* `_TOKEN` or `authorization` (case-insensitive) are replaced with
|
|
37
|
+
* `[REDACTED]`; values matching the GitHub token regex are scrubbed too.
|
|
38
|
+
*
|
|
39
|
+
* The result is a shallow copy — callers can serialise it without mutating
|
|
40
|
+
* the input. Nested objects are stringified before scrubbing to keep the
|
|
41
|
+
* function flat and predictable.
|
|
42
|
+
*/
|
|
43
|
+
export function scrubTokensFromAttrs(attrs) {
|
|
44
|
+
const out = {};
|
|
45
|
+
for (const [key, value] of Object.entries(attrs)) {
|
|
46
|
+
if (TOKEN_KEY_RE.test(key)) {
|
|
47
|
+
out[key] = "[REDACTED]";
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
if (typeof value === "string") {
|
|
51
|
+
out[key] = scrubTokensFromString(value);
|
|
52
|
+
}
|
|
53
|
+
else if (value !== null && typeof value === "object") {
|
|
54
|
+
out[key] = scrubTokensFromString(JSON.stringify(value));
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
out[key] = value;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
return out;
|
|
61
|
+
}
|
|
62
|
+
// ---------------------------------------------------------------------------
|
|
63
|
+
// Helpers
|
|
64
|
+
// ---------------------------------------------------------------------------
|
|
65
|
+
const TITLE_MAX = 100;
|
|
66
|
+
const NORMALIZED_MAX = 60;
|
|
67
|
+
function truncate(s, max) {
|
|
68
|
+
if (s.length <= max)
|
|
69
|
+
return s;
|
|
70
|
+
return s.slice(0, max - 1) + "…"; // ellipsis
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Pull a short, human-readable error blurb out of a span. Falls back through
|
|
74
|
+
* statusMessage -> exception.message -> metadata.error -> the span name.
|
|
75
|
+
*/
|
|
76
|
+
function extractMessage(span) {
|
|
77
|
+
if (!span)
|
|
78
|
+
return "";
|
|
79
|
+
if (span.message)
|
|
80
|
+
return span.message;
|
|
81
|
+
const meta = span.metadata ?? {};
|
|
82
|
+
const exception = meta.exception;
|
|
83
|
+
if (exception &&
|
|
84
|
+
typeof exception === "object" &&
|
|
85
|
+
"message" in exception &&
|
|
86
|
+
typeof exception.message === "string") {
|
|
87
|
+
return exception.message;
|
|
88
|
+
}
|
|
89
|
+
if (typeof meta.error === "string")
|
|
90
|
+
return meta.error;
|
|
91
|
+
if (typeof meta.message === "string")
|
|
92
|
+
return meta.message;
|
|
93
|
+
return "";
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Take the third segment of a `${spanName}:${errorType}:${normalized}`
|
|
97
|
+
* signature string. Used to populate the title when no sample span message
|
|
98
|
+
* is available.
|
|
99
|
+
*/
|
|
100
|
+
function normalizedFromSignature(signature) {
|
|
101
|
+
const parts = signature.split(":");
|
|
102
|
+
if (parts.length < 3)
|
|
103
|
+
return signature;
|
|
104
|
+
return parts.slice(2).join(":");
|
|
105
|
+
}
|
|
106
|
+
// ---------------------------------------------------------------------------
|
|
107
|
+
// buildIssueBody
|
|
108
|
+
// ---------------------------------------------------------------------------
|
|
109
|
+
/**
|
|
110
|
+
* Build the title + body for a freshly-filed `debug-auto` issue.
|
|
111
|
+
*
|
|
112
|
+
* The body layout is deliberately stable so Phase 3b's dedup regex
|
|
113
|
+
* (`/^\*\*Hash\*\*: `([0-9a-f]{8})`/m`) keeps matching across versions.
|
|
114
|
+
*/
|
|
115
|
+
export function buildIssueBody(group, env) {
|
|
116
|
+
const sample = group.sampleSpans[0];
|
|
117
|
+
const rawMessage = extractMessage(sample) || normalizedFromSignature(group.signature);
|
|
118
|
+
const message = scrubTokensFromString(rawMessage);
|
|
119
|
+
const spanName = sample?.name ?? "ralph_hero.error";
|
|
120
|
+
const title = truncate(`[Debug] ${spanName}: ${truncate(message, NORMALIZED_MAX)}`, TITLE_MAX);
|
|
121
|
+
const occurrenceRows = [
|
|
122
|
+
`| Count | First seen | Last seen |`,
|
|
123
|
+
`|---|---|---|`,
|
|
124
|
+
`| ${group.count} | ${group.firstSeen} | ${group.lastSeen} |`,
|
|
125
|
+
].join("\n");
|
|
126
|
+
const sampleAttrs = sample?.metadata
|
|
127
|
+
? scrubTokensFromAttrs(sample.metadata)
|
|
128
|
+
: {};
|
|
129
|
+
const errorDetails = Object.keys(sampleAttrs).length
|
|
130
|
+
? "```json\n" + JSON.stringify(sampleAttrs, null, 2) + "\n```"
|
|
131
|
+
: "_(no attributes captured on sample span)_";
|
|
132
|
+
const reproduction = sample
|
|
133
|
+
? "```json\n" +
|
|
134
|
+
JSON.stringify({
|
|
135
|
+
spanName: sample.name,
|
|
136
|
+
traceId: sample.traceId,
|
|
137
|
+
startTime: sample.startTime,
|
|
138
|
+
errorType: sample.errorType,
|
|
139
|
+
message: scrubTokensFromString(extractMessage(sample)),
|
|
140
|
+
}, null, 2) +
|
|
141
|
+
"\n```"
|
|
142
|
+
: "_(no sample span available)_";
|
|
143
|
+
const body = [
|
|
144
|
+
`**Hash**: \`${group.hash}\``,
|
|
145
|
+
``,
|
|
146
|
+
`**Signature**: \`${scrubTokensFromString(group.signature)}\``,
|
|
147
|
+
``,
|
|
148
|
+
`## First seen`,
|
|
149
|
+
``,
|
|
150
|
+
`- mcp-server version: \`${env.mcpVersion}\``,
|
|
151
|
+
`- node: \`${env.nodeVersion}\``,
|
|
152
|
+
`- os: \`${env.os}\``,
|
|
153
|
+
``,
|
|
154
|
+
`## Error details`,
|
|
155
|
+
``,
|
|
156
|
+
errorDetails,
|
|
157
|
+
``,
|
|
158
|
+
`## Reproduction (sample span)`,
|
|
159
|
+
``,
|
|
160
|
+
reproduction,
|
|
161
|
+
``,
|
|
162
|
+
`## Occurrences`,
|
|
163
|
+
``,
|
|
164
|
+
occurrenceRows,
|
|
165
|
+
``,
|
|
166
|
+
`## Langfuse trace`,
|
|
167
|
+
``,
|
|
168
|
+
`[Open latest example trace](${group.exampleTraceUrl})`,
|
|
169
|
+
``,
|
|
170
|
+
`---`,
|
|
171
|
+
``,
|
|
172
|
+
`_Filed automatically by \`ralph_hero__collate_debug\` — Phase 3b (GH-1100). ` +
|
|
173
|
+
`Re-running collation over the same window will append occurrence ` +
|
|
174
|
+
`comments here instead of creating a duplicate issue._`,
|
|
175
|
+
].join("\n");
|
|
176
|
+
return { title, body };
|
|
177
|
+
}
|
|
178
|
+
// ---------------------------------------------------------------------------
|
|
179
|
+
// buildCommentBody
|
|
180
|
+
// ---------------------------------------------------------------------------
|
|
181
|
+
/**
|
|
182
|
+
* Build the occurrence-update comment body posted when an existing
|
|
183
|
+
* `debug-auto` issue is matched by hash. `newCount` is the count returned by
|
|
184
|
+
* the current `groupSpansBySignature` run — i.e., occurrences in the *new*
|
|
185
|
+
* window, not the cumulative total (we don't have read access to historical
|
|
186
|
+
* comment counts without extra queries).
|
|
187
|
+
*/
|
|
188
|
+
export function buildCommentBody(group, newCount, latestTraceUrl) {
|
|
189
|
+
return [
|
|
190
|
+
`## Recurring occurrence`,
|
|
191
|
+
``,
|
|
192
|
+
`Detected **${newCount}** new occurrence${newCount === 1 ? "" : "s"} of this signature in the latest collation window.`,
|
|
193
|
+
``,
|
|
194
|
+
`- Hash: \`${group.hash}\``,
|
|
195
|
+
`- First seen (this window): ${group.firstSeen}`,
|
|
196
|
+
`- Last seen (this window): ${group.lastSeen}`,
|
|
197
|
+
`- [Latest example trace](${scrubTokensFromString(latestTraceUrl)})`,
|
|
198
|
+
``,
|
|
199
|
+
`_Posted automatically by \`ralph_hero__collate_debug\` (Phase 3b)._`,
|
|
200
|
+
].join("\n");
|
|
201
|
+
}
|
|
202
|
+
//# sourceMappingURL=debug-issue-shape.js.map
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Error-signature normalization and grouping for Langfuse OTel spans.
|
|
3
|
+
*
|
|
4
|
+
* Used by `ralph_hero__collate_debug` to collapse noisy, near-identical
|
|
5
|
+
* error spans into a small set of "signatures." Each signature is hashed to
|
|
6
|
+
* an 8-char ID that survives across runs, so Phase 3b's GitHub dedup can
|
|
7
|
+
* match an incoming group to an existing issue body by the hash marker.
|
|
8
|
+
*
|
|
9
|
+
* The normalization rules deliberately strip *dynamic* details (issue
|
|
10
|
+
* numbers, timestamps, UUIDs, hashes, quoted paths/names) while preserving
|
|
11
|
+
* the *structural* shape of the message. Two errors that differ only in
|
|
12
|
+
* which issue number triggered them collapse to the same signature.
|
|
13
|
+
*/
|
|
14
|
+
import { createHash } from "node:crypto";
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// Normalization
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
const ISO_TIMESTAMP_RE = /\b\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:?\d{2})?\b/g;
|
|
19
|
+
const UUID_RE = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi;
|
|
20
|
+
const HEX_HASH_RE = /\b[0-9a-f]{8,}\b/gi;
|
|
21
|
+
const ISSUE_NUMBER_RE = /#\d+/g;
|
|
22
|
+
// Match runs of digits anywhere — without word boundaries so embedded
|
|
23
|
+
// numbers like "60s" or "v2" collapse too. ISSUE_NUMBER_RE runs first so
|
|
24
|
+
// "#42" becomes "#N" before this fires.
|
|
25
|
+
const BARE_NUMBER_RE = /\d+/g;
|
|
26
|
+
// Match double-quoted or single-quoted strings (non-greedy).
|
|
27
|
+
const QUOTED_STRING_RE = /"[^"\n]*"|'[^'\n]*'/g;
|
|
28
|
+
/**
|
|
29
|
+
* Normalize an error message into a comparable signature fragment.
|
|
30
|
+
*
|
|
31
|
+
* Order of replacements matters:
|
|
32
|
+
* 1. Quoted strings first (so a quoted ISO timestamp becomes `<STR>` not
|
|
33
|
+
* `<TS>`).
|
|
34
|
+
* 2. ISO timestamps before UUIDs (timestamps can contain colons / dashes).
|
|
35
|
+
* 3. UUIDs before generic hex hashes (UUID format is stricter).
|
|
36
|
+
* 4. Issue numbers (`#NNN`) before bare numbers.
|
|
37
|
+
* 5. Bare numbers last.
|
|
38
|
+
* 6. Whitespace collapsed and result truncated to 200 chars.
|
|
39
|
+
*/
|
|
40
|
+
export function normalizeErrorMessage(msg) {
|
|
41
|
+
if (!msg)
|
|
42
|
+
return "";
|
|
43
|
+
let out = msg;
|
|
44
|
+
out = out.replace(QUOTED_STRING_RE, "<STR>");
|
|
45
|
+
out = out.replace(ISO_TIMESTAMP_RE, "<TS>");
|
|
46
|
+
out = out.replace(UUID_RE, "<ID>");
|
|
47
|
+
out = out.replace(HEX_HASH_RE, "<HASH>");
|
|
48
|
+
out = out.replace(ISSUE_NUMBER_RE, "#N");
|
|
49
|
+
out = out.replace(BARE_NUMBER_RE, "<N>");
|
|
50
|
+
out = out.replace(/\s+/g, " ").trim();
|
|
51
|
+
return out.slice(0, 200);
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Build the signature key (pre-hash). Format:
|
|
55
|
+
* `${spanName}:${errorType}:${normalizedMessage}`
|
|
56
|
+
*/
|
|
57
|
+
export function buildSignatureKey(spanName, errorType, normalizedMsg) {
|
|
58
|
+
return `${spanName}:${errorType}:${normalizedMsg}`;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* SHA256 hash truncated to 8 hex chars. Stable across runs, suitable for
|
|
62
|
+
* dedup body markers like `**Hash**: \`a1b2c3d4\``.
|
|
63
|
+
*/
|
|
64
|
+
export function hashSignature(key) {
|
|
65
|
+
return createHash("sha256").update(key).digest("hex").slice(0, 8);
|
|
66
|
+
}
|
|
67
|
+
// ---------------------------------------------------------------------------
|
|
68
|
+
// Span helpers
|
|
69
|
+
// ---------------------------------------------------------------------------
|
|
70
|
+
/**
|
|
71
|
+
* Extract the `ralph_hero.error_type` attribute from a span's metadata, with
|
|
72
|
+
* fallback to a hoisted `errorType` field. Returns `"unknown"` if neither is
|
|
73
|
+
* present.
|
|
74
|
+
*/
|
|
75
|
+
export function getErrorType(span) {
|
|
76
|
+
if (span.errorType)
|
|
77
|
+
return span.errorType;
|
|
78
|
+
const meta = span.metadata ?? {};
|
|
79
|
+
const fromMeta = meta["ralph_hero.error_type"] ??
|
|
80
|
+
meta.error_type ??
|
|
81
|
+
meta.errorType;
|
|
82
|
+
if (typeof fromMeta === "string" && fromMeta.length > 0)
|
|
83
|
+
return fromMeta;
|
|
84
|
+
return "unknown";
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Extract the error message from a span. Prefers `message` (Langfuse
|
|
88
|
+
* `statusMessage`), then `metadata.exception.message`, then `metadata.error`.
|
|
89
|
+
*/
|
|
90
|
+
export function getErrorMessage(span) {
|
|
91
|
+
if (span.message)
|
|
92
|
+
return span.message;
|
|
93
|
+
const meta = span.metadata ?? {};
|
|
94
|
+
const exception = meta.exception;
|
|
95
|
+
if (exception &&
|
|
96
|
+
typeof exception === "object" &&
|
|
97
|
+
"message" in exception &&
|
|
98
|
+
typeof exception.message === "string") {
|
|
99
|
+
return exception.message;
|
|
100
|
+
}
|
|
101
|
+
if (typeof meta.error === "string")
|
|
102
|
+
return meta.error;
|
|
103
|
+
if (typeof meta.message === "string")
|
|
104
|
+
return meta.message;
|
|
105
|
+
return "";
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Convert a `LangfuseObservation` to a `SignatureSpan`. Hoists the
|
|
109
|
+
* `ralph_hero.error_type` attribute up to the top level.
|
|
110
|
+
*/
|
|
111
|
+
export function observationToSpan(obs) {
|
|
112
|
+
const meta = obs.metadata ?? {};
|
|
113
|
+
const errorType = typeof meta["ralph_hero.error_type"] === "string"
|
|
114
|
+
? meta["ralph_hero.error_type"]
|
|
115
|
+
: undefined;
|
|
116
|
+
return {
|
|
117
|
+
name: obs.name,
|
|
118
|
+
traceId: obs.traceId,
|
|
119
|
+
startTime: obs.startTime,
|
|
120
|
+
endTime: obs.endTime,
|
|
121
|
+
metadata: meta,
|
|
122
|
+
errorType,
|
|
123
|
+
message: obs.statusMessage,
|
|
124
|
+
level: obs.level,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
// ---------------------------------------------------------------------------
|
|
128
|
+
// Grouping
|
|
129
|
+
// ---------------------------------------------------------------------------
|
|
130
|
+
function buildTraceUrl(langfuseHost, projectId, traceId) {
|
|
131
|
+
const host = (langfuseHost ?? "http://localhost:3100").replace(/\/+$/, "");
|
|
132
|
+
const project = projectId ?? "<defaultProjectId>";
|
|
133
|
+
return `${host}/project/${project}/traces/${traceId}`;
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Group spans by signature. Returns groups sorted by `count` descending.
|
|
137
|
+
*
|
|
138
|
+
* Spans below `minOccurrences` (default 3) are filtered out. Each group's
|
|
139
|
+
* `sampleSpans` contains up to 3 representative spans, most-recent first.
|
|
140
|
+
*/
|
|
141
|
+
export function groupSpansBySignature(spans, opts = {}) {
|
|
142
|
+
const minOccurrences = opts.minOccurrences ?? 3;
|
|
143
|
+
const buckets = new Map();
|
|
144
|
+
for (const span of spans) {
|
|
145
|
+
const errorType = getErrorType(span);
|
|
146
|
+
const normalized = normalizeErrorMessage(getErrorMessage(span));
|
|
147
|
+
const signature = buildSignatureKey(span.name, errorType, normalized);
|
|
148
|
+
const hash = hashSignature(signature);
|
|
149
|
+
const existing = buckets.get(hash);
|
|
150
|
+
if (existing) {
|
|
151
|
+
existing.count += 1;
|
|
152
|
+
if (span.startTime > existing.lastSeen) {
|
|
153
|
+
existing.lastSeen = span.startTime;
|
|
154
|
+
existing.latestTraceId = span.traceId;
|
|
155
|
+
}
|
|
156
|
+
if (span.startTime < existing.firstSeen) {
|
|
157
|
+
existing.firstSeen = span.startTime;
|
|
158
|
+
}
|
|
159
|
+
existing.spans.push(span);
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
buckets.set(hash, {
|
|
163
|
+
signature,
|
|
164
|
+
hash,
|
|
165
|
+
count: 1,
|
|
166
|
+
firstSeen: span.startTime,
|
|
167
|
+
lastSeen: span.startTime,
|
|
168
|
+
latestTraceId: span.traceId,
|
|
169
|
+
spans: [span],
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
const groups = [];
|
|
174
|
+
for (const bucket of buckets.values()) {
|
|
175
|
+
if (bucket.count < minOccurrences)
|
|
176
|
+
continue;
|
|
177
|
+
// Sort sample spans by startTime desc, keep up to 3.
|
|
178
|
+
const sampleSpans = [...bucket.spans]
|
|
179
|
+
.sort((a, b) => (b.startTime > a.startTime ? 1 : -1))
|
|
180
|
+
.slice(0, 3);
|
|
181
|
+
groups.push({
|
|
182
|
+
signature: bucket.signature,
|
|
183
|
+
hash: bucket.hash,
|
|
184
|
+
count: bucket.count,
|
|
185
|
+
firstSeen: bucket.firstSeen,
|
|
186
|
+
lastSeen: bucket.lastSeen,
|
|
187
|
+
exampleTraceUrl: buildTraceUrl(opts.langfuseHost, opts.projectId, bucket.latestTraceId),
|
|
188
|
+
sampleSpans,
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
groups.sort((a, b) => b.count - a.count);
|
|
192
|
+
return groups;
|
|
193
|
+
}
|
|
194
|
+
//# sourceMappingURL=error-signature.js.map
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal Langfuse HTTP client for querying traces and observations.
|
|
3
|
+
*
|
|
4
|
+
* Used by `ralph_hero__collate_debug` to fetch error spans emitted by the
|
|
5
|
+
* MCP server's OTel pipeline (see `telemetry.ts`). Authenticates via HTTP
|
|
6
|
+
* basic auth with `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY`.
|
|
7
|
+
*
|
|
8
|
+
* No SDK dependency — uses Node's native `fetch` (Node 20+).
|
|
9
|
+
*
|
|
10
|
+
* Reference: https://langfuse.com/docs/api
|
|
11
|
+
*/
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
// Factory
|
|
14
|
+
// ---------------------------------------------------------------------------
|
|
15
|
+
const DEFAULT_HOST = "http://localhost:3100";
|
|
16
|
+
function buildAuthHeader(publicKey, secretKey) {
|
|
17
|
+
const credentials = `${publicKey}:${secretKey}`;
|
|
18
|
+
// Node 20+ provides global Buffer
|
|
19
|
+
const encoded = Buffer.from(credentials, "utf-8").toString("base64");
|
|
20
|
+
return `Basic ${encoded}`;
|
|
21
|
+
}
|
|
22
|
+
function appendQueryParams(url, params) {
|
|
23
|
+
if (!params)
|
|
24
|
+
return;
|
|
25
|
+
for (const [key, value] of Object.entries(params)) {
|
|
26
|
+
if (value === undefined || value === null)
|
|
27
|
+
continue;
|
|
28
|
+
url.searchParams.set(key, String(value));
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Create a Langfuse HTTP client.
|
|
33
|
+
*
|
|
34
|
+
* Throws on construction if `publicKey` or `secretKey` are missing (in args
|
|
35
|
+
* and in env), because every endpoint requires authentication.
|
|
36
|
+
*/
|
|
37
|
+
export function createLangfuseClient(options = {}) {
|
|
38
|
+
const host = (options.host ?? process.env.LANGFUSE_HOST ?? DEFAULT_HOST)
|
|
39
|
+
.replace(/\/+$/, "");
|
|
40
|
+
const publicKey = options.publicKey ?? process.env.LANGFUSE_PUBLIC_KEY;
|
|
41
|
+
const secretKey = options.secretKey ?? process.env.LANGFUSE_SECRET_KEY;
|
|
42
|
+
const fetchImpl = options.fetchImpl ?? fetch;
|
|
43
|
+
if (!publicKey || !secretKey) {
|
|
44
|
+
throw new Error("Langfuse credentials missing: set LANGFUSE_PUBLIC_KEY and LANGFUSE_SECRET_KEY (or pass via options).");
|
|
45
|
+
}
|
|
46
|
+
const authHeader = buildAuthHeader(publicKey, secretKey);
|
|
47
|
+
async function request(path, params) {
|
|
48
|
+
const url = new URL(`${host}${path}`);
|
|
49
|
+
appendQueryParams(url, params);
|
|
50
|
+
const response = await fetchImpl(url.toString(), {
|
|
51
|
+
method: "GET",
|
|
52
|
+
headers: {
|
|
53
|
+
Authorization: authHeader,
|
|
54
|
+
Accept: "application/json",
|
|
55
|
+
},
|
|
56
|
+
});
|
|
57
|
+
if (!response.ok) {
|
|
58
|
+
const bodyText = await response.text().catch(() => "");
|
|
59
|
+
throw new Error(`Langfuse request failed: ${response.status} ${response.statusText}` +
|
|
60
|
+
(bodyText ? ` — ${bodyText.slice(0, 200)}` : ""));
|
|
61
|
+
}
|
|
62
|
+
return (await response.json());
|
|
63
|
+
}
|
|
64
|
+
async function queryTraces(params = {}) {
|
|
65
|
+
return request("/api/public/traces", params);
|
|
66
|
+
}
|
|
67
|
+
async function queryObservations(params = {}) {
|
|
68
|
+
return request("/api/public/observations", params);
|
|
69
|
+
}
|
|
70
|
+
async function queryAllObservations(params = {}, maxPages = 10) {
|
|
71
|
+
const all = [];
|
|
72
|
+
const limit = params.limit ?? 100;
|
|
73
|
+
let page = params.page ?? 1;
|
|
74
|
+
for (let i = 0; i < maxPages; i++) {
|
|
75
|
+
const result = await queryObservations({ ...params, page, limit });
|
|
76
|
+
if (!result.data || result.data.length === 0)
|
|
77
|
+
break;
|
|
78
|
+
all.push(...result.data);
|
|
79
|
+
const totalPages = result.meta?.totalPages;
|
|
80
|
+
if (totalPages !== undefined && page >= totalPages)
|
|
81
|
+
break;
|
|
82
|
+
if (result.data.length < limit)
|
|
83
|
+
break;
|
|
84
|
+
page += 1;
|
|
85
|
+
}
|
|
86
|
+
return all;
|
|
87
|
+
}
|
|
88
|
+
return {
|
|
89
|
+
host,
|
|
90
|
+
queryTraces,
|
|
91
|
+
queryObservations,
|
|
92
|
+
queryAllObservations,
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
//# sourceMappingURL=langfuse-client.js.map
|
|
@@ -1,18 +1,30 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* MCP tools for debug log collation and statistics.
|
|
3
3
|
*
|
|
4
|
-
* Provides
|
|
5
|
-
*
|
|
4
|
+
* Provides:
|
|
5
|
+
* - `ralph_hero__collate_debug` (v2 — queries Langfuse for error spans,
|
|
6
|
+
* groups by normalized signature, dedupes against open `debug-auto`
|
|
7
|
+
* issues, and either creates new issues or appends occurrence comments
|
|
8
|
+
* when `dryRun=false`)
|
|
9
|
+
* - `ralph_hero__debug_stats` (v1 — aggregates JSONL logs; preserved for
|
|
10
|
+
* backward compat, not extended)
|
|
6
11
|
*
|
|
7
|
-
* Only registered when RALPH_DEBUG=true
|
|
12
|
+
* Only registered when `RALPH_DEBUG=true`. JSONL helpers below still back
|
|
13
|
+
* `debug_stats`; the new Langfuse path is fully separate.
|
|
8
14
|
*/
|
|
9
15
|
import { readdir, readFile } from "node:fs/promises";
|
|
10
|
-
import {
|
|
11
|
-
import {
|
|
16
|
+
import { readFileSync } from "node:fs";
|
|
17
|
+
import { join, resolve, dirname } from "node:path";
|
|
18
|
+
import { fileURLToPath } from "node:url";
|
|
19
|
+
import { homedir, platform, release } from "node:os";
|
|
12
20
|
import { createHash } from "node:crypto";
|
|
13
21
|
import { z } from "zod";
|
|
14
22
|
import { toolSuccess, toolError } from "../types.js";
|
|
15
23
|
import { zBoolish } from "../lib/zod-helpers.js";
|
|
24
|
+
import { createLangfuseClient, } from "../lib/langfuse-client.js";
|
|
25
|
+
import { groupSpansBySignature, observationToSpan, } from "../lib/error-signature.js";
|
|
26
|
+
import { buildIssueBody, buildCommentBody, } from "../lib/debug-issue-shape.js";
|
|
27
|
+
import { resolveConfig } from "../lib/helpers.js";
|
|
16
28
|
// ---------------------------------------------------------------------------
|
|
17
29
|
// JSONL Parsing
|
|
18
30
|
// ---------------------------------------------------------------------------
|
|
@@ -174,123 +186,319 @@ export function aggregateStats(events, groupBy) {
|
|
|
174
186
|
groups,
|
|
175
187
|
};
|
|
176
188
|
}
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
189
|
+
/**
|
|
190
|
+
* Look for an open `debug-auto` issue whose body carries the given 8-char
|
|
191
|
+
* hash on a `**Hash**: \`<hash>\`` line. Only issues updated within the last
|
|
192
|
+
* `withinDays` are considered (default 7), matching the spec's "dedup window".
|
|
193
|
+
*
|
|
194
|
+
* Returns the first matching issue `{ number, id }` or `null` if no match.
|
|
195
|
+
* Search rate-limit errors are swallowed (caller will create a duplicate;
|
|
196
|
+
* the next run will collapse it via comment).
|
|
197
|
+
*/
|
|
198
|
+
export async function findExistingDebugIssue(client, owner, repo, hash, withinDays = 7) {
|
|
199
|
+
const sinceIso = new Date(Date.now() - withinDays * 24 * 60 * 60 * 1000)
|
|
200
|
+
.toISOString()
|
|
201
|
+
.slice(0, 10); // YYYY-MM-DD
|
|
202
|
+
// The hash marker `**Hash**: ` is too punctuation-heavy for GitHub's text
|
|
203
|
+
// search index — search on the bare 8-char hex; the marker line is verified
|
|
204
|
+
// by inspecting the issue body below.
|
|
205
|
+
const q = `repo:${owner}/${repo} is:issue is:open label:debug-auto ${hash} in:body updated:>=${sinceIso}`;
|
|
206
|
+
try {
|
|
207
|
+
const data = await client.query(`query DebugIssueSearch($q: String!) {
|
|
208
|
+
search(query: $q, type: ISSUE, first: 10) {
|
|
209
|
+
nodes {
|
|
210
|
+
... on Issue {
|
|
211
|
+
number
|
|
212
|
+
id
|
|
213
|
+
body
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}`, { q });
|
|
218
|
+
const marker = new RegExp(`^\\*\\*Hash\\*\\*: \`${hash}\``, "m");
|
|
219
|
+
for (const node of data.search.nodes ?? []) {
|
|
220
|
+
if (typeof node.number === "number" &&
|
|
221
|
+
typeof node.id === "string" &&
|
|
222
|
+
typeof node.body === "string" &&
|
|
223
|
+
marker.test(node.body)) {
|
|
224
|
+
return { number: node.number, id: node.id };
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
catch (error) {
|
|
230
|
+
console.error(`[debug-tools] findExistingDebugIssue search failed (treating as no-match): ${error instanceof Error ? error.message : String(error)}`);
|
|
231
|
+
return null;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
/**
|
|
235
|
+
* Resolve the repository's GraphQL node ID, with SessionCache memoization.
|
|
236
|
+
*/
|
|
237
|
+
async function resolveRepoNodeId(client, owner, repo) {
|
|
238
|
+
const cacheKey = `repo-node-id:${owner}/${repo}`;
|
|
239
|
+
const cached = client.getCache().get(cacheKey);
|
|
240
|
+
if (cached)
|
|
241
|
+
return cached;
|
|
242
|
+
const result = await client.query(`query($owner: String!, $repo: String!) {
|
|
243
|
+
repository(owner: $owner, name: $repo) { id }
|
|
244
|
+
}`, { owner, repo });
|
|
245
|
+
const id = result.repository?.id;
|
|
246
|
+
if (!id) {
|
|
247
|
+
throw new Error(`Repository ${owner}/${repo} not found`);
|
|
248
|
+
}
|
|
249
|
+
client.getCache().set(cacheKey, id, 60 * 60 * 1000); // 1 hour
|
|
250
|
+
return id;
|
|
251
|
+
}
|
|
252
|
+
/**
|
|
253
|
+
* Resolve repo-scoped label IDs for `debug-auto` and `ralph-self-report`.
|
|
254
|
+
* Labels that don't exist in the repo are skipped silently — issue creation
|
|
255
|
+
* still proceeds, the label just won't be applied.
|
|
256
|
+
*/
|
|
257
|
+
async function resolveLabelIds(client, owner, repo, labelNames) {
|
|
258
|
+
const cacheKey = `repo-labels:${owner}/${repo}`;
|
|
259
|
+
let labels = client.getCache().get(cacheKey);
|
|
260
|
+
if (!labels) {
|
|
261
|
+
const result = await client.query(`query($owner: String!, $repo: String!) {
|
|
262
|
+
repository(owner: $owner, name: $repo) {
|
|
263
|
+
labels(first: 100) { nodes { id name } }
|
|
264
|
+
}
|
|
265
|
+
}`, { owner, repo });
|
|
266
|
+
labels = result.repository?.labels.nodes ?? [];
|
|
267
|
+
client.getCache().set(cacheKey, labels, 5 * 60 * 1000);
|
|
268
|
+
}
|
|
269
|
+
return labelNames
|
|
270
|
+
.map((n) => labels.find((l) => l.name === n)?.id)
|
|
271
|
+
.filter((id) => typeof id === "string");
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Create a fresh `debug-auto` issue for a new signature. Returns the new
|
|
275
|
+
* issue number + node ID. Project board placement (Backlog state) is
|
|
276
|
+
* delegated to the existing route-issues.yml workflow — we set labels and
|
|
277
|
+
* the body marker; the workflow handles board routing.
|
|
278
|
+
*/
|
|
279
|
+
async function createDebugIssue(client, owner, repo, title, body) {
|
|
280
|
+
const repoId = await resolveRepoNodeId(client, owner, repo);
|
|
281
|
+
const labelIds = await resolveLabelIds(client, owner, repo, [
|
|
282
|
+
"debug-auto",
|
|
283
|
+
"ralph-self-report",
|
|
284
|
+
]);
|
|
285
|
+
const result = await client.mutate(`mutation($repoId: ID!, $title: String!, $body: String!, $labelIds: [ID!]) {
|
|
286
|
+
createIssue(input: {
|
|
287
|
+
repositoryId: $repoId,
|
|
288
|
+
title: $title,
|
|
289
|
+
body: $body,
|
|
290
|
+
labelIds: $labelIds
|
|
291
|
+
}) {
|
|
292
|
+
issue { id number url }
|
|
293
|
+
}
|
|
294
|
+
}`, {
|
|
295
|
+
repoId,
|
|
296
|
+
title,
|
|
297
|
+
body,
|
|
298
|
+
labelIds: labelIds.length ? labelIds : null,
|
|
299
|
+
});
|
|
300
|
+
const issue = result.createIssue.issue;
|
|
301
|
+
client
|
|
302
|
+
.getCache()
|
|
303
|
+
.set(`issue-node-id:${owner}/${repo}#${issue.number}`, issue.id, 30 * 60 * 1000);
|
|
304
|
+
return issue;
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Append an occurrence-update comment to an existing `debug-auto` issue.
|
|
308
|
+
*/
|
|
309
|
+
async function commentOnDebugIssue(client, issueNodeId, body) {
|
|
310
|
+
const result = await client.mutate(`mutation($subjectId: ID!, $body: String!) {
|
|
311
|
+
addComment(input: { subjectId: $subjectId, body: $body }) {
|
|
312
|
+
commentEdge { node { id } }
|
|
313
|
+
}
|
|
314
|
+
}`, { subjectId: issueNodeId, body });
|
|
315
|
+
return result.addComment.commentEdge.node.id;
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* Read the MCP server semver from package.json next to this module. Falls
|
|
319
|
+
* back to `"unknown"` if the file is missing or unreadable. Mirrors the
|
|
320
|
+
* approach used in `telemetry.ts:resolveServiceVersion` but kept local so
|
|
321
|
+
* the debug surface has zero cross-dependency on telemetry init order.
|
|
322
|
+
*/
|
|
323
|
+
function readMcpServerVersion() {
|
|
324
|
+
try {
|
|
325
|
+
const here = dirname(fileURLToPath(import.meta.url));
|
|
326
|
+
const pkgPath = resolve(here, "..", "..", "package.json");
|
|
327
|
+
const raw = readFileSync(pkgPath, "utf8");
|
|
328
|
+
const pkg = JSON.parse(raw);
|
|
329
|
+
return pkg.version ?? "unknown";
|
|
330
|
+
}
|
|
331
|
+
catch {
|
|
332
|
+
return "unknown";
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Default `IssueShapeEnv` builder — captures the MCP server version, the
|
|
337
|
+
* Node version, and a short OS descriptor at call time. Exposed so tests
|
|
338
|
+
* can override (e.g., deterministic version stamps).
|
|
339
|
+
*/
|
|
340
|
+
function defaultEnv(mcpVersion) {
|
|
341
|
+
return {
|
|
342
|
+
mcpVersion: mcpVersion === "unknown" ? readMcpServerVersion() : mcpVersion,
|
|
343
|
+
nodeVersion: process.version,
|
|
344
|
+
os: `${platform()} ${release()}`,
|
|
345
|
+
};
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* Iterate groups, dedupe each against existing issues, and either create a
|
|
349
|
+
* new issue or post a comment. Returns the counts the tool surfaces back to
|
|
350
|
+
* the caller. Per-group failures are recorded and surfaced but do NOT abort
|
|
351
|
+
* the loop — partial success is preferable to losing the whole run.
|
|
352
|
+
*/
|
|
353
|
+
export async function fileOrCommentForGroups(client, owner, repo, groups, env) {
|
|
354
|
+
const results = [];
|
|
355
|
+
let issuesCreated = 0;
|
|
356
|
+
let issuesUpdated = 0;
|
|
357
|
+
for (const group of groups) {
|
|
358
|
+
try {
|
|
359
|
+
const existing = await findExistingDebugIssue(client, owner, repo, group.hash);
|
|
360
|
+
if (existing) {
|
|
361
|
+
const commentBody = buildCommentBody(group, group.count, group.exampleTraceUrl);
|
|
362
|
+
await commentOnDebugIssue(client, existing.id, commentBody);
|
|
363
|
+
issuesUpdated += 1;
|
|
364
|
+
results.push({
|
|
365
|
+
hash: group.hash,
|
|
366
|
+
action: "commented",
|
|
367
|
+
issueNumber: existing.number,
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
else {
|
|
371
|
+
const { title, body } = buildIssueBody(group, env);
|
|
372
|
+
const created = await createDebugIssue(client, owner, repo, title, body);
|
|
373
|
+
issuesCreated += 1;
|
|
374
|
+
results.push({
|
|
375
|
+
hash: group.hash,
|
|
376
|
+
action: "created",
|
|
377
|
+
issueNumber: created.number,
|
|
378
|
+
});
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
catch (error) {
|
|
382
|
+
results.push({
|
|
383
|
+
hash: group.hash,
|
|
384
|
+
action: "error",
|
|
385
|
+
error: error instanceof Error ? error.message : String(error),
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
return { issuesCreated, issuesUpdated, results };
|
|
390
|
+
}
|
|
391
|
+
let langfuseClientFactory = () => createLangfuseClient();
|
|
392
|
+
/**
|
|
393
|
+
* Override the Langfuse client factory. Returns a disposer that restores the
|
|
394
|
+
* previous factory (used by tests).
|
|
395
|
+
*/
|
|
396
|
+
export function setLangfuseClientFactory(factory) {
|
|
397
|
+
const prev = langfuseClientFactory;
|
|
398
|
+
langfuseClientFactory = factory;
|
|
399
|
+
return () => {
|
|
400
|
+
langfuseClientFactory = prev;
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
export function registerDebugTools(server, client, mcpVersion = "unknown") {
|
|
181
404
|
const logDir = join(homedir(), ".ralph-hero", "logs");
|
|
182
405
|
// -------------------------------------------------------------------------
|
|
183
|
-
// ralph_hero__collate_debug
|
|
406
|
+
// ralph_hero__collate_debug (v2 — Langfuse + GitHub dedup)
|
|
184
407
|
// -------------------------------------------------------------------------
|
|
185
|
-
server.tool("ralph_hero__collate_debug", "
|
|
408
|
+
server.tool("ralph_hero__collate_debug", "Query Langfuse for error spans in a time window, normalize messages, group by signature, then either return the grouped report (dryRun=true) or dedupe against open `debug-auto` issues and create / comment (dryRun=false, default). Returns: { since, errorGroups, totalOccurrences, dryRun, issuesCreated?, issuesUpdated?, groups[] }.", {
|
|
186
409
|
since: z
|
|
187
410
|
.string()
|
|
188
411
|
.optional()
|
|
189
|
-
.describe("ISO date string. Only
|
|
412
|
+
.describe("ISO date string. Only spans whose startTime >= this value are considered (default: 24h ago)."),
|
|
190
413
|
dryRun: zBoolish()
|
|
191
414
|
.optional()
|
|
192
415
|
.default(false)
|
|
193
|
-
.describe("If true, report
|
|
416
|
+
.describe("If true, return the grouped report without touching GitHub. Default false — creates / comments on `debug-auto` issues per signature."),
|
|
417
|
+
minOccurrences: z
|
|
418
|
+
.number()
|
|
419
|
+
.int()
|
|
420
|
+
.min(1)
|
|
421
|
+
.optional()
|
|
422
|
+
.default(3)
|
|
423
|
+
.describe("Filter out signatures with fewer occurrences (default: 3)."),
|
|
194
424
|
projectNumber: z
|
|
195
425
|
.number()
|
|
196
426
|
.optional()
|
|
197
|
-
.describe("Project number override
|
|
427
|
+
.describe("Project number override. Currently informational — issues land in the configured project via the existing route-issues workflow."),
|
|
198
428
|
}, async (args) => {
|
|
199
429
|
try {
|
|
430
|
+
const dryRun = args.dryRun ?? false;
|
|
431
|
+
const minOccurrences = args.minOccurrences ?? 3;
|
|
200
432
|
const sinceDate = args.since
|
|
201
433
|
? new Date(args.since)
|
|
202
434
|
: new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
435
|
+
if (Number.isNaN(sinceDate.getTime())) {
|
|
436
|
+
return toolError(`Invalid 'since' value: ${args.since}`);
|
|
437
|
+
}
|
|
438
|
+
let langfuse;
|
|
439
|
+
try {
|
|
440
|
+
langfuse = langfuseClientFactory();
|
|
441
|
+
}
|
|
442
|
+
catch (error) {
|
|
443
|
+
return toolError(`Langfuse client unavailable: ${error instanceof Error ? error.message : String(error)}`);
|
|
444
|
+
}
|
|
445
|
+
const fromStartTime = sinceDate.toISOString();
|
|
446
|
+
const observations = await langfuse.queryAllObservations({
|
|
447
|
+
type: "SPAN",
|
|
448
|
+
level: "ERROR",
|
|
449
|
+
fromStartTime,
|
|
450
|
+
limit: 100,
|
|
451
|
+
});
|
|
452
|
+
const spans = observations.map(observationToSpan);
|
|
453
|
+
const groups = groupSpansBySignature(spans, {
|
|
454
|
+
minOccurrences,
|
|
455
|
+
langfuseHost: langfuse.host,
|
|
456
|
+
});
|
|
457
|
+
const totalOccurrences = groups.reduce((sum, g) => sum + g.count, 0);
|
|
458
|
+
const summaryGroups = groups.map((g) => ({
|
|
459
|
+
signature: g.signature,
|
|
460
|
+
hash: g.hash,
|
|
461
|
+
count: g.count,
|
|
462
|
+
firstSeen: g.firstSeen,
|
|
463
|
+
lastSeen: g.lastSeen,
|
|
464
|
+
exampleTraceUrl: g.exampleTraceUrl,
|
|
465
|
+
sampleSpans: g.sampleSpans.slice(0, 3),
|
|
466
|
+
}));
|
|
467
|
+
if (dryRun) {
|
|
206
468
|
return toolSuccess({
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
469
|
+
since: fromStartTime,
|
|
470
|
+
errorGroups: groups.length,
|
|
471
|
+
totalOccurrences,
|
|
472
|
+
dryRun: true,
|
|
473
|
+
groups: summaryGroups,
|
|
210
474
|
});
|
|
211
475
|
}
|
|
212
|
-
|
|
213
|
-
let
|
|
214
|
-
let
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
return toolError("RALPH_GH_OWNER and RALPH_GH_REPO must be set for issue creation");
|
|
223
|
-
}
|
|
224
|
-
// Search for existing issue with this hash
|
|
225
|
-
const searchQuery = `repo:${owner}/${repo} is:issue is:open label:debug-auto "${group.hash}" in:body`;
|
|
226
|
-
let existingIssueNumber;
|
|
227
|
-
try {
|
|
228
|
-
const searchResult = await client.query(`query SearchDebugIssues($q: String!) {
|
|
229
|
-
search(query: $q, type: ISSUE, first: 1) {
|
|
230
|
-
nodes {
|
|
231
|
-
... on Issue { number }
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
}`, { q: searchQuery });
|
|
235
|
-
existingIssueNumber = searchResult.search.nodes[0]?.number;
|
|
236
|
-
}
|
|
237
|
-
catch {
|
|
238
|
-
// Search failed, treat as no existing issue
|
|
239
|
-
}
|
|
240
|
-
if (existingIssueNumber) {
|
|
241
|
-
// Add occurrence comment
|
|
242
|
-
await client.mutate(`mutation AddComment($subjectId: ID!, $body: String!) {
|
|
243
|
-
addComment(input: { subjectId: $subjectId, body: $body }) {
|
|
244
|
-
commentEdge { node { id } }
|
|
245
|
-
}
|
|
246
|
-
}`, {
|
|
247
|
-
subjectId: `issue:${existingIssueNumber}`,
|
|
248
|
-
body: `## Occurrence Report\n\n- Count: ${group.count}\n- Period: ${group.firstSeen} — ${group.lastSeen}\n- Signature: \`${group.signature}\``,
|
|
249
|
-
}).catch(() => {
|
|
250
|
-
// Best-effort comment
|
|
251
|
-
});
|
|
252
|
-
issuesUpdated++;
|
|
253
|
-
}
|
|
254
|
-
else {
|
|
255
|
-
// Create new issue
|
|
256
|
-
try {
|
|
257
|
-
await client.mutate(`mutation CreateIssue($repoId: ID!, $title: String!, $body: String!) {
|
|
258
|
-
createIssue(input: { repositoryId: $repoId, title: $title, body: $body }) {
|
|
259
|
-
issue { number }
|
|
260
|
-
}
|
|
261
|
-
}`, {
|
|
262
|
-
repoId: `placeholder`, // Would need actual repo ID
|
|
263
|
-
title: `[debug-auto] ${getEventName(group.sample)} ${getErrorType(group.sample)}`,
|
|
264
|
-
body: `## Debug Auto-Report\n\n**Hash**: \`${group.hash}\`\n**Signature**: \`${group.signature}\`\n**Occurrences**: ${group.count}\n**First seen**: ${group.firstSeen}\n**Last seen**: ${group.lastSeen}\n\n### Sample Error\n\n\`\`\`json\n${JSON.stringify(group.sample, null, 2)}\n\`\`\`\n\n---\n_Auto-generated by ralph_hero__collate_debug_`,
|
|
265
|
-
}).catch(() => {
|
|
266
|
-
// Best-effort issue creation
|
|
267
|
-
});
|
|
268
|
-
issuesCreated++;
|
|
269
|
-
}
|
|
270
|
-
catch {
|
|
271
|
-
// Skip failed creations
|
|
272
|
-
}
|
|
273
|
-
}
|
|
476
|
+
// dryRun=false — file or comment per signature.
|
|
477
|
+
let owner;
|
|
478
|
+
let repo;
|
|
479
|
+
try {
|
|
480
|
+
const resolved = resolveConfig(client, {});
|
|
481
|
+
owner = resolved.owner;
|
|
482
|
+
repo = resolved.repo;
|
|
483
|
+
}
|
|
484
|
+
catch (error) {
|
|
485
|
+
return toolError(`Cannot resolve owner/repo for issue creation: ${error instanceof Error ? error.message : String(error)}`);
|
|
274
486
|
}
|
|
487
|
+
const env = defaultEnv(mcpVersion);
|
|
488
|
+
const fileResult = await fileOrCommentForGroups(client, owner, repo, groups, env);
|
|
275
489
|
return toolSuccess({
|
|
276
|
-
since:
|
|
277
|
-
|
|
278
|
-
errorGroups: errorGroups.length,
|
|
490
|
+
since: fromStartTime,
|
|
491
|
+
errorGroups: groups.length,
|
|
279
492
|
totalOccurrences,
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
signature: g.signature,
|
|
286
|
-
count: g.count,
|
|
287
|
-
firstSeen: g.firstSeen,
|
|
288
|
-
lastSeen: g.lastSeen,
|
|
289
|
-
})),
|
|
493
|
+
dryRun: false,
|
|
494
|
+
issuesCreated: fileResult.issuesCreated,
|
|
495
|
+
issuesUpdated: fileResult.issuesUpdated,
|
|
496
|
+
results: fileResult.results,
|
|
497
|
+
groups: summaryGroups,
|
|
290
498
|
});
|
|
291
499
|
}
|
|
292
500
|
catch (error) {
|
|
293
|
-
return toolError(`Failed to collate debug
|
|
501
|
+
return toolError(`Failed to collate debug spans: ${error instanceof Error ? error.message : String(error)}`);
|
|
294
502
|
}
|
|
295
503
|
});
|
|
296
504
|
// -------------------------------------------------------------------------
|