@myskyline_ai/ccdebug 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +129 -0
  3. package/dist/cli.d.ts +9 -0
  4. package/dist/cli.d.ts.map +1 -0
  5. package/dist/cli.js +674 -0
  6. package/dist/html-generator.d.ts +24 -0
  7. package/dist/html-generator.d.ts.map +1 -0
  8. package/dist/html-generator.js +141 -0
  9. package/dist/index-generator.d.ts +29 -0
  10. package/dist/index-generator.d.ts.map +1 -0
  11. package/dist/index-generator.js +271 -0
  12. package/dist/index.d.ts +7 -0
  13. package/dist/index.d.ts.map +1 -0
  14. package/dist/index.js +28 -0
  15. package/dist/interceptor-loader.js +59 -0
  16. package/dist/interceptor.d.ts +46 -0
  17. package/dist/interceptor.d.ts.map +1 -0
  18. package/dist/interceptor.js +555 -0
  19. package/dist/log-file-manager.d.ts +15 -0
  20. package/dist/log-file-manager.d.ts.map +1 -0
  21. package/dist/log-file-manager.js +41 -0
  22. package/dist/shared-conversation-processor.d.ts +114 -0
  23. package/dist/shared-conversation-processor.d.ts.map +1 -0
  24. package/dist/shared-conversation-processor.js +663 -0
  25. package/dist/token-extractor.js +28 -0
  26. package/dist/types.d.ts +95 -0
  27. package/dist/types.d.ts.map +1 -0
  28. package/dist/types.js +3 -0
  29. package/frontend/dist/index.global.js +1522 -0
  30. package/frontend/dist/styles.css +985 -0
  31. package/frontend/template.html +19 -0
  32. package/package.json +83 -0
  33. package/web/debug.html +14 -0
  34. package/web/dist/assets/index-BIP9r3RA.js +48 -0
  35. package/web/dist/assets/index-BIP9r3RA.js.map +1 -0
  36. package/web/dist/assets/index-De3gn-G-.css +1 -0
  37. package/web/dist/favicon.svg +4 -0
  38. package/web/dist/index.html +15 -0
  39. package/web/index.html +14 -0
  40. package/web/package.json +47 -0
  41. package/web/server/conversation-parser.d.ts +47 -0
  42. package/web/server/conversation-parser.d.ts.map +1 -0
  43. package/web/server/conversation-parser.js +564 -0
  44. package/web/server/conversation-parser.js.map +1 -0
  45. package/web/server/index.d.ts +16 -0
  46. package/web/server/index.d.ts.map +1 -0
  47. package/web/server/index.js +60 -0
  48. package/web/server/index.js.map +1 -0
  49. package/web/server/log-file-manager.d.ts +98 -0
  50. package/web/server/log-file-manager.d.ts.map +1 -0
  51. package/web/server/log-file-manager.js +512 -0
  52. package/web/server/log-file-manager.js.map +1 -0
  53. package/web/server/src/types/index.d.ts +68 -0
  54. package/web/server/src/types/index.d.ts.map +1 -0
  55. package/web/server/src/types/index.js +3 -0
  56. package/web/server/src/types/index.js.map +1 -0
  57. package/web/server/test-path.js +48 -0
  58. package/web/server/web-server.d.ts +41 -0
  59. package/web/server/web-server.d.ts.map +1 -0
  60. package/web/server/web-server.js +807 -0
  61. package/web/server/web-server.js.map +1 -0
@@ -0,0 +1,663 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SharedConversationProcessor = void 0;
4
+ /**
5
+ * Shared conversation processing functionality for both frontend and backend
6
+ */
7
+ class SharedConversationProcessor {
8
+ /**
9
+ * Process raw JSONL pairs into ProcessedPairs
10
+ */
11
+ processRawPairs(rawPairs) {
12
+ if (!rawPairs || rawPairs.length === 0) {
13
+ return [];
14
+ }
15
+ const processedPairs = [];
16
+ for (let i = 0; i < rawPairs.length; i++) {
17
+ const pair = rawPairs[i];
18
+ if (!pair?.request || !pair?.response) {
19
+ continue;
20
+ }
21
+ try {
22
+ // Detect streaming
23
+ const isStreaming = !!pair.response.body_raw;
24
+ let response;
25
+ let streamFormat = null;
26
+ if (pair.response.body_raw) {
27
+ // Parse streaming response and detect format
28
+ streamFormat = this.isBedrockResponse(pair.response.body_raw) ? "bedrock" : "standard";
29
+ response = this.parseStreamingResponse(pair.response.body_raw);
30
+ }
31
+ else if (pair.response.body) {
32
+ response = pair.response.body;
33
+ }
34
+ else {
35
+ continue;
36
+ }
37
+ // Extract model from request headers or URL
38
+ const model = this.extractModel(pair);
39
+ processedPairs.push({
40
+ id: `${pair.request.timestamp || Date.now()}_${Math.random()}`,
41
+ timestamp: new Date((pair.request.timestamp || Date.now()) * 1000).toISOString(),
42
+ request: pair.request.body,
43
+ response,
44
+ model,
45
+ isStreaming,
46
+ rawStreamData: pair.response.body_raw,
47
+ streamFormat,
48
+ });
49
+ }
50
+ catch (error) {
51
+ console.warn(`Failed to process raw pair at index ${i}:`, error);
52
+ // Continue processing other pairs
53
+ }
54
+ }
55
+ return processedPairs;
56
+ }
57
+ /**
58
+ * Detect if the response is from Bedrock by checking for binary event stream format
59
+ */
60
+ isBedrockResponse(bodyRaw) {
61
+ // Check for AWS EventStream format with binary headers and base64 encoded events
62
+ return bodyRaw.startsWith("\u0000\u0000");
63
+ }
64
+ /**
65
+ * Parse Bedrock binary event stream and extract the standard message events
66
+ */
67
+ parseBedrockStreamingResponse(bodyRaw) {
68
+ if (!bodyRaw || bodyRaw.length === 0) {
69
+ throw new Error("Empty bodyRaw provided to parseBedrockStreamingResponse");
70
+ }
71
+ const events = [];
72
+ let bedrockMetrics = null;
73
+ try {
74
+ // Extract JSON payloads from AWS EventStream format
75
+ // The format contains binary headers followed by JSON payloads
76
+ const jsonChunks = this.extractJsonChunksFromEventStream(bodyRaw);
77
+ for (const jsonChunk of jsonChunks) {
78
+ try {
79
+ const eventPayload = JSON.parse(jsonChunk);
80
+ // Check if this is an event with base64-encoded bytes
81
+ if (eventPayload.bytes) {
82
+ const base64Data = eventPayload.bytes;
83
+ const decodedJson = this.decodeBase64ToUtf8(base64Data);
84
+ const event = JSON.parse(decodedJson);
85
+ events.push(event);
86
+ }
87
+ }
88
+ catch (chunkError) {
89
+ console.warn("Failed to parse JSON chunk:", jsonChunk, chunkError);
90
+ // Continue with other chunks
91
+ }
92
+ }
93
+ // Extract Bedrock metrics from the last event if present
94
+ bedrockMetrics = this.extractBedrockMetrics(bodyRaw);
95
+ }
96
+ catch (error) {
97
+ console.error("Failed to parse Bedrock streaming response:", error);
98
+ throw new Error(`Bedrock streaming response parsing failed: ${error}`);
99
+ }
100
+ return this.buildMessageFromEvents(events, bedrockMetrics);
101
+ }
102
+ /**
103
+ * Decode base64 string to UTF-8, compatible with both browser and Node.js environments
104
+ */
105
+ decodeBase64ToUtf8(base64Data) {
106
+ // Check if we're in a browser environment
107
+ if (typeof window !== "undefined" && typeof atob !== "undefined") {
108
+ // Browser environment - use atob()
109
+ return atob(base64Data);
110
+ }
111
+ else if (typeof Buffer !== "undefined") {
112
+ // Node.js environment - use Buffer
113
+ return Buffer.from(base64Data, "base64").toString("utf-8");
114
+ }
115
+ else {
116
+ // Fallback implementation for environments without either
117
+ throw new Error("Base64 decoding not supported in this environment");
118
+ }
119
+ }
120
+ /**
121
+ * Extract JSON chunks from AWS EventStream binary format
122
+ */
123
+ extractJsonChunksFromEventStream(bodyRaw) {
124
+ if (!bodyRaw || bodyRaw.length === 0) {
125
+ return [];
126
+ }
127
+ const jsonChunks = [];
128
+ const pattern = 'event{"bytes":';
129
+ let searchIndex = 0;
130
+ while (searchIndex < bodyRaw.length) {
131
+ // Find the next occurrence of the pattern
132
+ const patternIndex = bodyRaw.indexOf(pattern, searchIndex);
133
+ if (patternIndex === -1) {
134
+ break; // No more patterns found
135
+ }
136
+ // Start extracting JSON from the '{' after 'event'
137
+ const jsonStartIndex = patternIndex + 5; // Skip 'event' prefix
138
+ let braceCount = 0;
139
+ let jsonEndIndex = -1;
140
+ // Find the matching closing brace
141
+ for (let i = jsonStartIndex; i < bodyRaw.length; i++) {
142
+ const char = bodyRaw[i];
143
+ if (char === "{") {
144
+ braceCount++;
145
+ }
146
+ else if (char === "}") {
147
+ braceCount--;
148
+ if (braceCount === 0) {
149
+ jsonEndIndex = i;
150
+ break;
151
+ }
152
+ }
153
+ }
154
+ // Extract the JSON chunk if we found a complete object
155
+ if (jsonEndIndex !== -1) {
156
+ const jsonChunk = bodyRaw.substring(jsonStartIndex, jsonEndIndex + 1);
157
+ jsonChunks.push(jsonChunk);
158
+ searchIndex = jsonEndIndex + 1;
159
+ }
160
+ else {
161
+ // No matching brace found, move past this pattern
162
+ searchIndex = patternIndex + pattern.length;
163
+ }
164
+ }
165
+ return jsonChunks;
166
+ }
167
+ /**
168
+ * Extract Bedrock invocation metrics from the response
169
+ */
170
+ extractBedrockMetrics(bodyRaw) {
171
+ try {
172
+ // Look for the amazon-bedrock-invocationMetrics in the last decoded event
173
+ const metricsMatch = bodyRaw.match(/"amazon-bedrock-invocationMetrics":\s*(\{[^}]+\})/);
174
+ if (metricsMatch && metricsMatch[1]) {
175
+ return JSON.parse(metricsMatch[1]);
176
+ }
177
+ }
178
+ catch (e) {
179
+ // Skip invalid metrics
180
+ }
181
+ return null;
182
+ }
183
+ /**
184
+ * Parse streaming response from raw SSE data
185
+ */
186
+ parseStreamingResponse(bodyRaw) {
187
+ if (this.isBedrockResponse(bodyRaw)) {
188
+ return this.parseBedrockStreamingResponse(bodyRaw);
189
+ }
190
+ else {
191
+ return this.parseStandardStreamingResponse(bodyRaw);
192
+ }
193
+ }
194
+ /**
195
+ * Parse standard Anthropic API streaming response
196
+ */
197
+ parseStandardStreamingResponse(bodyRaw) {
198
+ if (!bodyRaw || bodyRaw.length === 0) {
199
+ throw new Error("Empty bodyRaw provided to parseStandardStreamingResponse");
200
+ }
201
+ const lines = bodyRaw.split("\n");
202
+ const events = [];
203
+ for (const line of lines) {
204
+ if (!line.startsWith("data: "))
205
+ continue;
206
+ const data = line.substring(6).trim();
207
+ if (data === "[DONE]")
208
+ break;
209
+ try {
210
+ const event = JSON.parse(data);
211
+ events.push(event);
212
+ }
213
+ catch (e) {
214
+ console.warn("Failed to parse SSE event:", data, e);
215
+ // Skip invalid JSON
216
+ }
217
+ }
218
+ return this.buildMessageFromEvents(events);
219
+ }
220
+ /**
221
+ * Build a Message object from a list of streaming events
222
+ */
223
+ buildMessageFromEvents(events, bedrockMetrics) {
224
+ // Initialize with defaults
225
+ let message = {
226
+ id: "",
227
+ type: "message",
228
+ role: "assistant",
229
+ content: [],
230
+ model: "",
231
+ stop_reason: null,
232
+ stop_sequence: null,
233
+ usage: {
234
+ input_tokens: 0,
235
+ output_tokens: 0,
236
+ cache_creation_input_tokens: null,
237
+ cache_read_input_tokens: null,
238
+ server_tool_use: null,
239
+ service_tier: null,
240
+ },
241
+ };
242
+ // Track content blocks being built
243
+ const contentBlocks = [];
244
+ let currentBlockIndex = -1;
245
+ for (const event of events) {
246
+ switch (event.type) {
247
+ case "message_start":
248
+ // Initialize message with base structure
249
+ message = { ...message, ...event.message };
250
+ break;
251
+ case "content_block_start":
252
+ // Start a new content block
253
+ currentBlockIndex = event.index;
254
+ contentBlocks[currentBlockIndex] = { ...event.content_block };
255
+ break;
256
+ case "content_block_delta":
257
+ // Update the current content block
258
+ if (currentBlockIndex >= 0 && contentBlocks[currentBlockIndex]) {
259
+ const block = contentBlocks[currentBlockIndex];
260
+ const delta = event.delta;
261
+ switch (delta.type) {
262
+ case "text_delta":
263
+ if (block.type === "text") {
264
+ block.text = (block.text || "") + delta.text;
265
+ }
266
+ break;
267
+ case "input_json_delta":
268
+ if (block.type === "tool_use") {
269
+ // Accumulate JSON string for tool_use blocks
270
+ const toolBlock = block;
271
+ if (typeof toolBlock.input === "string") {
272
+ toolBlock.input = toolBlock.input + delta.partial_json;
273
+ }
274
+ else {
275
+ // Initialize as string if not already
276
+ toolBlock.input = delta.partial_json;
277
+ }
278
+ }
279
+ break;
280
+ case "thinking_delta":
281
+ if (block.type === "thinking") {
282
+ block.thinking =
283
+ (block.thinking || "") + delta.thinking;
284
+ }
285
+ break;
286
+ case "signature_delta":
287
+ if (block.type === "thinking") {
288
+ block.signature =
289
+ (block.signature || "") + delta.signature;
290
+ }
291
+ break;
292
+ case "citations_delta":
293
+ // Handle citations delta if needed
294
+ break;
295
+ }
296
+ }
297
+ break;
298
+ case "content_block_stop":
299
+ // Finalize content block
300
+ if (currentBlockIndex >= 0 && contentBlocks[currentBlockIndex]) {
301
+ const block = contentBlocks[currentBlockIndex];
302
+ // Parse JSON input if it's a tool_use block
303
+ if (block.type === "tool_use") {
304
+ const toolBlock = block;
305
+ if (typeof toolBlock.input === "string") {
306
+ try {
307
+ toolBlock.input = JSON.parse(toolBlock.input);
308
+ }
309
+ catch (e) {
310
+ // Keep as string if JSON parsing fails
311
+ console.warn("Failed to parse tool input JSON:", toolBlock.input);
312
+ }
313
+ }
314
+ }
315
+ }
316
+ break;
317
+ case "message_delta":
318
+ // Update message-level fields
319
+ if (event.delta.stop_reason) {
320
+ message.stop_reason = event.delta.stop_reason;
321
+ }
322
+ if (event.delta.stop_sequence) {
323
+ message.stop_sequence = event.delta.stop_sequence;
324
+ }
325
+ if (event.usage) {
326
+ // Preserve existing input_tokens if not provided in this delta
327
+ // Input tokens are typically only sent once and shouldn't change
328
+ const currentInputTokens = message.usage?.input_tokens ?? 0;
329
+ message.usage = {
330
+ input_tokens: event.usage.input_tokens ?? currentInputTokens,
331
+ output_tokens: event.usage.output_tokens ?? message.usage?.output_tokens ?? 0,
332
+ cache_creation_input_tokens: event.usage.cache_creation_input_tokens ?? message.usage?.cache_creation_input_tokens ?? null,
333
+ cache_read_input_tokens: event.usage.cache_read_input_tokens ?? message.usage?.cache_read_input_tokens ?? null,
334
+ server_tool_use: event.usage.server_tool_use ?? message.usage?.server_tool_use ?? null,
335
+ service_tier: null, // MessageDeltaUsage doesn't have service_tier
336
+ };
337
+ }
338
+ break;
339
+ case "message_stop":
340
+ // Finalize message
341
+ break;
342
+ }
343
+ }
344
+ // Set the final content blocks
345
+ message.content = contentBlocks.filter((block) => block != null);
346
+ // If we have bedrock metrics, merge them into usage
347
+ if (bedrockMetrics && message.usage) {
348
+ message.usage.input_tokens = bedrockMetrics.inputTokenCount;
349
+ message.usage.output_tokens = bedrockMetrics.outputTokenCount;
350
+ }
351
+ return message;
352
+ }
353
+ /**
354
+ * Extract model name from the raw pair
355
+ */
356
+ extractModel(pair) {
357
+ // Try to extract from Bedrock URL
358
+ if (pair.request?.url && pair.request.url.includes("bedrock-runtime")) {
359
+ const urlMatch = pair.request.url.match(/\/model\/([^\/]+)/);
360
+ if (urlMatch && urlMatch[1]) {
361
+ return this.normalizeModelName(urlMatch[1]);
362
+ }
363
+ }
364
+ // Try to get model from request body
365
+ if (pair.request?.body && typeof pair.request.body === "object" && "model" in pair.request.body) {
366
+ return this.normalizeModelName(pair.request.body.model);
367
+ }
368
+ // Try to get from response
369
+ if (pair.response?.body && typeof pair.response.body === "object" && "model" in pair.response.body) {
370
+ return this.normalizeModelName(pair.response.body.model);
371
+ }
372
+ // Default
373
+ return "unknown";
374
+ }
375
+ /**
376
+ * Normalize model names from different formats to a consistent display format
377
+ */
378
+ normalizeModelName(modelName) {
379
+ if (!modelName)
380
+ return "unknown";
381
+ // Handle Bedrock model names
382
+ if (modelName.startsWith("us.anthropic.")) {
383
+ // Convert "us.anthropic.claude-3-5-sonnet-20241022-v1:0" to "claude-3-5-sonnet-20241022"
384
+ const match = modelName.match(/us\.anthropic\.([^:]+)/);
385
+ if (match && match[1]) {
386
+ return match[1];
387
+ }
388
+ }
389
+ // Return as-is for other formats
390
+ return modelName;
391
+ }
392
+ /**
393
+ * Group processed pairs into conversations
394
+ */
395
+ mergeConversations(pairs, options = {}) {
396
+ if (!pairs || pairs.length === 0)
397
+ return [];
398
+ // Group pairs by system instructions + model
399
+ const pairsBySystem = new Map();
400
+ for (const pair of pairs) {
401
+ const system = pair.request.system;
402
+ const model = pair.model;
403
+ const systemKey = JSON.stringify({ system, model });
404
+ if (!pairsBySystem.has(systemKey)) {
405
+ pairsBySystem.set(systemKey, []);
406
+ }
407
+ pairsBySystem.get(systemKey).push(pair);
408
+ }
409
+ const allConversations = [];
410
+ for (const [, systemPairs] of pairsBySystem) {
411
+ const sortedPairs = [...systemPairs].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
412
+ // Group pairs by conversation thread
413
+ const conversationThreads = new Map();
414
+ for (const pair of sortedPairs) {
415
+ const messages = pair.request.messages || [];
416
+ if (messages.length === 0)
417
+ continue;
418
+ const firstUserMessage = messages[0];
419
+ const normalizedFirstMessage = this.normalizeMessageForGrouping(firstUserMessage);
420
+ const conversationKey = JSON.stringify({ firstMessage: normalizedFirstMessage });
421
+ const keyHash = this.hashString(conversationKey);
422
+ if (!conversationThreads.has(keyHash)) {
423
+ conversationThreads.set(keyHash, []);
424
+ }
425
+ conversationThreads.get(keyHash).push(pair);
426
+ }
427
+ // For each conversation thread, keep the final pair
428
+ for (const [conversationKey, threadPairs] of conversationThreads) {
429
+ const sortedThreadPairs = [...threadPairs].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
430
+ const finalPair = sortedThreadPairs.reduce((longest, current) => {
431
+ const currentMessages = current.request.messages || [];
432
+ const longestMessages = longest.request.messages || [];
433
+ return currentMessages.length > longestMessages.length ? current : longest;
434
+ });
435
+ const modelsUsed = new Set(sortedThreadPairs.map((pair) => pair.model));
436
+ const enhancedMessages = this.processToolResults(finalPair.request.messages || []);
437
+ const conversation = {
438
+ id: this.hashString(conversationKey),
439
+ models: modelsUsed,
440
+ system: finalPair.request.system,
441
+ messages: enhancedMessages,
442
+ response: finalPair.response,
443
+ allPairs: sortedThreadPairs,
444
+ finalPair: finalPair,
445
+ metadata: {
446
+ startTime: sortedThreadPairs[0].timestamp,
447
+ endTime: finalPair.timestamp,
448
+ totalPairs: sortedThreadPairs.length,
449
+ inputTokens: finalPair.response.usage?.input_tokens || 0,
450
+ outputTokens: finalPair.response.usage?.output_tokens || 0,
451
+ totalTokens: (finalPair.response.usage?.input_tokens || 0) + (finalPair.response.usage?.output_tokens || 0),
452
+ },
453
+ };
454
+ allConversations.push(conversation);
455
+ }
456
+ }
457
+ // Apply compact conversation detection
458
+ const mergedConversations = this.detectAndMergeCompactConversations(allConversations);
459
+ // Filter out short conversations unless explicitly included
460
+ const filteredConversations = options.includeShortConversations
461
+ ? mergedConversations
462
+ : mergedConversations.filter((conv) => conv.messages.length > 2);
463
+ // Sort by start time
464
+ return filteredConversations.sort((a, b) => new Date(a.metadata.startTime).getTime() - new Date(b.metadata.startTime).getTime());
465
+ }
466
+ /**
467
+ * Process messages to pair tool_use with tool_result
468
+ */
469
+ processToolResults(messages) {
470
+ const enhancedMessages = [];
471
+ const pendingToolUses = {};
472
+ for (let i = 0; i < messages.length; i++) {
473
+ const message = messages[i];
474
+ const enhancedMessage = { ...message, toolResults: {}, hide: false };
475
+ if (Array.isArray(message.content)) {
476
+ let hasOnlyToolResults = true;
477
+ let hasTextContent = false;
478
+ for (let j = 0; j < message.content.length; j++) {
479
+ const block = message.content[j];
480
+ if (block.type === "tool_use" && "id" in block) {
481
+ const toolUse = block;
482
+ pendingToolUses[toolUse.id] = { messageIndex: i, toolIndex: j };
483
+ hasOnlyToolResults = false;
484
+ }
485
+ else if (block.type === "tool_result" && "tool_use_id" in block) {
486
+ const toolResult = block;
487
+ const toolUseId = toolResult.tool_use_id;
488
+ if (pendingToolUses[toolUseId]) {
489
+ const { messageIndex } = pendingToolUses[toolUseId];
490
+ if (!enhancedMessages[messageIndex]) {
491
+ enhancedMessages[messageIndex] = { ...messages[messageIndex], toolResults: {}, hide: false };
492
+ }
493
+ enhancedMessages[messageIndex].toolResults[toolUseId] = toolResult;
494
+ delete pendingToolUses[toolUseId];
495
+ }
496
+ }
497
+ else if (block.type === "text") {
498
+ hasTextContent = true;
499
+ hasOnlyToolResults = false;
500
+ }
501
+ else {
502
+ hasOnlyToolResults = false;
503
+ }
504
+ }
505
+ if (hasOnlyToolResults && !hasTextContent) {
506
+ enhancedMessage.hide = true;
507
+ }
508
+ }
509
+ enhancedMessages[i] = enhancedMessage;
510
+ }
511
+ return enhancedMessages;
512
+ }
513
+ /**
514
+ * Detect and merge compact conversations
515
+ */
516
+ detectAndMergeCompactConversations(conversations) {
517
+ if (conversations.length <= 1)
518
+ return conversations;
519
+ const sortedConversations = [...conversations].sort((a, b) => new Date(a.metadata.startTime).getTime() - new Date(b.metadata.startTime).getTime());
520
+ const usedConversations = new Set();
521
+ const mergedConversations = [];
522
+ for (let i = 0; i < sortedConversations.length; i++) {
523
+ const currentConv = sortedConversations[i];
524
+ if (usedConversations.has(i))
525
+ continue;
526
+ // Check if this is a compact conversation (1 pair with many messages)
527
+ if (currentConv.allPairs.length === 1 && currentConv.messages.length > 2) {
528
+ let originalConv = null;
529
+ let originalIndex = -1;
530
+ for (let j = 0; j < sortedConversations.length; j++) {
531
+ if (j === i || usedConversations.has(j))
532
+ continue;
533
+ const otherConv = sortedConversations[j];
534
+ // Check if other conversation has exactly 2 fewer messages
535
+ if (otherConv.messages.length === currentConv.messages.length - 2) {
536
+ // Check if messages match (simplified check)
537
+ let messagesMatch = true;
538
+ for (let k = 1; k < otherConv.messages.length; k++) {
539
+ if (!this.messagesRoughlyEqual(otherConv.messages[k], currentConv.messages[k])) {
540
+ messagesMatch = false;
541
+ break;
542
+ }
543
+ }
544
+ if (messagesMatch) {
545
+ originalConv = otherConv;
546
+ originalIndex = j;
547
+ break;
548
+ }
549
+ }
550
+ }
551
+ if (originalConv) {
552
+ const mergedConv = this.mergeCompactConversation(originalConv, currentConv);
553
+ mergedConversations.push(mergedConv);
554
+ usedConversations.add(i);
555
+ usedConversations.add(originalIndex);
556
+ }
557
+ else {
558
+ currentConv.compacted = true;
559
+ mergedConversations.push(currentConv);
560
+ usedConversations.add(i);
561
+ }
562
+ }
563
+ else {
564
+ mergedConversations.push(currentConv);
565
+ usedConversations.add(i);
566
+ }
567
+ }
568
+ // Add remaining conversations
569
+ for (let i = 0; i < sortedConversations.length; i++) {
570
+ if (!usedConversations.has(i)) {
571
+ mergedConversations.push(sortedConversations[i]);
572
+ }
573
+ }
574
+ return mergedConversations.sort((a, b) => new Date(a.metadata.startTime).getTime() - new Date(b.metadata.startTime).getTime());
575
+ }
576
+ /**
577
+ * Merge a compact conversation with its original counterpart
578
+ */
579
+ mergeCompactConversation(originalConv, compactConv) {
580
+ const originalMessages = originalConv.messages || [];
581
+ const compactMessages = compactConv.messages || [];
582
+ const mergedMessages = [...compactMessages];
583
+ if (originalMessages.length > 0 && mergedMessages.length > 0) {
584
+ mergedMessages[0] = originalMessages[0];
585
+ }
586
+ const allPairs = [...originalConv.allPairs, ...compactConv.allPairs].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
587
+ const allModels = new Set([...originalConv.models, ...compactConv.models]);
588
+ const startTime = allPairs[0].timestamp;
589
+ const endTime = allPairs[allPairs.length - 1].timestamp;
590
+ return {
591
+ id: compactConv.id,
592
+ models: allModels,
593
+ system: originalConv.system,
594
+ messages: mergedMessages,
595
+ response: compactConv.response,
596
+ allPairs: allPairs,
597
+ finalPair: compactConv.finalPair,
598
+ compacted: true,
599
+ metadata: {
600
+ startTime: startTime,
601
+ endTime: endTime,
602
+ totalPairs: allPairs.length,
603
+ inputTokens: (originalConv.metadata.inputTokens || 0) + (compactConv.metadata.inputTokens || 0),
604
+ outputTokens: (originalConv.metadata.outputTokens || 0) + (compactConv.metadata.outputTokens || 0),
605
+ totalTokens: (originalConv.metadata.totalTokens || 0) + (compactConv.metadata.totalTokens || 0),
606
+ },
607
+ };
608
+ }
609
+ /**
610
+ * Compare two messages to see if they're roughly equal
611
+ */
612
+ messagesRoughlyEqual(msg1, msg2) {
613
+ if (msg1.role !== msg2.role)
614
+ return false;
615
+ const content1 = msg1.content;
616
+ const content2 = msg2.content;
617
+ if (typeof content1 !== typeof content2)
618
+ return false;
619
+ if (Array.isArray(content1) !== Array.isArray(content2))
620
+ return false;
621
+ return true;
622
+ }
623
+ /**
624
+ * Normalize message for grouping (removes dynamic content)
625
+ */
626
+ normalizeMessageForGrouping(message) {
627
+ if (!message || !message.content)
628
+ return message;
629
+ let normalizedContent;
630
+ if (Array.isArray(message.content)) {
631
+ normalizedContent = message.content.map((block) => {
632
+ if (block.type === "text" && "text" in block) {
633
+ let text = block.text;
634
+ text = text.replace(/Generated \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/g, "Generated [TIMESTAMP]");
635
+ text = text.replace(/The user opened the file [^\s]+ in the IDE\./g, "The user opened file in IDE.");
636
+ text = text.replace(/<system-reminder>.*?<\/system-reminder>/gs, "[SYSTEM-REMINDER]");
637
+ return { type: "text", text: text };
638
+ }
639
+ return block;
640
+ });
641
+ }
642
+ else {
643
+ normalizedContent = message.content;
644
+ }
645
+ return {
646
+ role: message.role,
647
+ content: normalizedContent,
648
+ };
649
+ }
650
+ /**
651
+ * Generate hash string for conversation grouping
652
+ */
653
+ hashString(str) {
654
+ let hash = 0;
655
+ for (let i = 0; i < str.length; i++) {
656
+ const char = str.charCodeAt(i);
657
+ hash = (hash << 5) - hash + char;
658
+ hash = hash & hash;
659
+ }
660
+ return Math.abs(hash).toString();
661
+ }
662
+ }
663
+ exports.SharedConversationProcessor = SharedConversationProcessor;
@@ -0,0 +1,28 @@
1
+ // Token extractor for OAuth token extraction
2
+ const fs = require("fs");
3
+ const path = require("path");
4
+ const originalFetch = global.fetch;
5
+
6
+ // Get token file from environment variable
7
+ const TEMP_TOKEN_FILE = process.env.CLAUDE_TRACE_TOKEN_FILE;
8
+
9
+ global.fetch = async function (input, init = {}) {
10
+ const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
11
+
12
+ const anthropicBaseUrl = process.env.ANTHROPIC_BASE_URL.replace(/https:\/\//g, "");
13
+
14
+ if ((url.includes("api.anthropic.com") || url.includes(anthropicBaseUrl)) && url.includes("/v1/messages")) {
15
+ const headers = new Headers(init.headers || {});
16
+ const authorization = headers.get("authorization");
17
+ if (authorization && authorization.startsWith("Bearer ") && TEMP_TOKEN_FILE) {
18
+ const token = authorization.substring(7);
19
+ try {
20
+ fs.writeFileSync(TEMP_TOKEN_FILE, token);
21
+ } catch (e) {
22
+ // Ignore write errors silently
23
+ }
24
+ }
25
+ }
26
+
27
+ return originalFetch(input, init);
28
+ };