trickle-observe 0.2.119 → 0.2.121

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,6 +11,7 @@
11
11
  */
12
12
  export declare function patchOpenAI(openaiModule: any, debug: boolean): void;
13
13
  export declare function patchAnthropic(anthropicModule: any, debug: boolean): void;
14
+ export declare function patchGemini(geminiModule: any, debug: boolean): void;
14
15
  /**
15
16
  * Initialize the LLM observer — clears previous data file.
16
17
  */
@@ -46,6 +46,7 @@ var __importStar = (this && this.__importStar) || (function () {
46
46
  Object.defineProperty(exports, "__esModule", { value: true });
47
47
  exports.patchOpenAI = patchOpenAI;
48
48
  exports.patchAnthropic = patchAnthropic;
49
+ exports.patchGemini = patchGemini;
49
50
  exports.initLlmObserver = initLlmObserver;
50
51
  const fs = __importStar(require("fs"));
51
52
  const path = __importStar(require("path"));
@@ -65,6 +66,12 @@ const PRICING = {
65
66
  'claude-3-5-sonnet-20241022': { input: 3, output: 15 },
66
67
  'claude-3-5-haiku-20241022': { input: 0.8, output: 4 },
67
68
  'claude-3-haiku-20240307': { input: 0.25, output: 1.25 },
69
+ 'gemini-2.5-flash-lite': { input: 0.1, output: 0.4 },
70
+ 'gemini-2.5-flash': { input: 0.3, output: 2.5 },
71
+ 'gemini-2.5-pro': { input: 1.25, output: 10 },
72
+ 'gemini-2.0-flash': { input: 0.1, output: 0.4 },
73
+ 'gemini-1.5-flash': { input: 0.075, output: 0.3 },
74
+ 'gemini-1.5-pro': { input: 1.25, output: 5 },
68
75
  };
69
76
  function getLlmFile() {
70
77
  if (llmFile)
@@ -489,6 +496,223 @@ function captureAnthropicError(params, err, startTime, debug) {
489
496
  });
490
497
  }
491
498
  // ────────────────────────────────────────────────────
499
+ // Google Gemini SDK instrumentation (@google/genai)
500
+ // ────────────────────────────────────────────────────
501
+ function patchGemini(geminiModule, debug) {
502
+ if (!geminiModule || getattr(geminiModule, '_trickle_llm_patched'))
503
+ return;
504
+ setattr(geminiModule, '_trickle_llm_patched', true);
505
+ // @google/genai exports GoogleGenAI class
506
+ // Usage: const ai = new GoogleGenAI({ apiKey }); ai.models.generateContent({...})
507
+ const GoogleGenAI = geminiModule.GoogleGenAI || geminiModule.default?.GoogleGenAI;
508
+ if (typeof GoogleGenAI !== 'function') {
509
+ if (debug)
510
+ console.log('[trickle/llm] Gemini: GoogleGenAI class not found');
511
+ return;
512
+ }
513
+ try {
514
+ // GoogleGenAI creates models as own property in the constructor.
515
+ // Patch the GoogleGenAI constructor to wrap generateContent after creation.
516
+ const origGoogleGenAIInit = GoogleGenAI.prototype.constructor;
517
+ // Use a post-construction hook: after new GoogleGenAI() creates the instance
518
+ // with models.generateContent as an own property, wrap that method.
519
+ const tmpClient = new GoogleGenAI({ apiKey: 'trickle-probe' });
520
+ const ModelsClass = Object.getPrototypeOf(tmpClient.models)?.constructor;
521
+ if (ModelsClass) {
522
+ const origModelsInit = ModelsClass;
523
+ // Patch the Models constructor to wrap generateContent after instance creation
524
+ const origConstruct = ModelsClass.prototype.constructor;
525
+ // We can't replace the ES6 class constructor, so instead we use a
526
+ // post-construction approach: hook into GoogleGenAI's prototype to
527
+ // patch models on each new client instance.
528
+ const origGAIProto = GoogleGenAI.prototype;
529
+ const origInitDescriptors = Object.getOwnPropertyDescriptors(origGAIProto);
530
+ // Define a lazy wrapper: first time models.generateContent is called,
531
+ // install the instrumentation wrapper
532
+ function wrapModelsInstance(models) {
533
+ if (!models || models.__trickle_patched)
534
+ return;
535
+ models.__trickle_patched = true;
536
+ if (typeof models.generateContent === 'function') {
537
+ const origGenerate = models.generateContent.bind(models);
538
+ models.generateContent = function patchedGenerateContent(...args) {
539
+ const params = args[0] || {};
540
+ const startTime = performance.now();
541
+ const result = origGenerate(...args);
542
+ if (result && typeof result.then === 'function') {
543
+ return result.then((response) => {
544
+ captureGeminiResponse(params, response, startTime, false, debug);
545
+ return response;
546
+ }).catch((err) => {
547
+ captureGeminiError(params, err, startTime, debug);
548
+ throw err;
549
+ });
550
+ }
551
+ return result;
552
+ };
553
+ }
554
+ if (typeof models.generateContentStream === 'function') {
555
+ const origStream = models.generateContentStream.bind(models);
556
+ models.generateContentStream = function patchedStream(...args) {
557
+ const params = args[0] || {};
558
+ const startTime = performance.now();
559
+ const result = origStream(...args);
560
+ if (result && typeof result.then === 'function') {
561
+ return result.then((stream) => handleGeminiStream(stream, params, startTime, debug));
562
+ }
563
+ return result;
564
+ };
565
+ }
566
+ }
567
+ // Intercept the GoogleGenAI constructor to patch each instance's models
568
+ // Since we can't replace ES6 class constructors, we use a Proxy
569
+ const proxyHandler = {
570
+ construct(target, args, newTarget) {
571
+ const instance = Reflect.construct(target, args, newTarget);
572
+ if (instance.models)
573
+ wrapModelsInstance(instance.models);
574
+ return instance;
575
+ }
576
+ };
577
+ const ProxiedGoogleGenAI = new Proxy(GoogleGenAI, proxyHandler);
578
+ // Replace on the module (try both export styles)
579
+ try {
580
+ geminiModule.GoogleGenAI = ProxiedGoogleGenAI;
581
+ }
582
+ catch { }
583
+ try {
584
+ if (geminiModule.default?.GoogleGenAI)
585
+ geminiModule.default.GoogleGenAI = ProxiedGoogleGenAI;
586
+ }
587
+ catch { }
588
+ // Also patch the already-created probe instance (in case someone imported before us)
589
+ // This is a no-op since the probe is discarded.
590
+ if (debug)
591
+ console.log('[trickle/llm] Patched Gemini SDK');
592
+ }
593
+ }
594
+ catch (e) {
595
+ if (debug)
596
+ console.log('[trickle/llm] Gemini patch probe failed:', e.message);
597
+ }
598
+ }
599
+ function captureGeminiResponse(params, response, startTime, isStream, debug) {
600
+ const usage = response.usageMetadata || {};
601
+ const model = params.model || 'gemini-unknown';
602
+ const inputTokens = usage.promptTokenCount || 0;
603
+ const outputTokens = usage.candidatesTokenCount || 0;
604
+ const totalTokens = usage.totalTokenCount || inputTokens + outputTokens;
605
+ let outputText = '';
606
+ let finishReason = 'unknown';
607
+ try {
608
+ outputText = response.text || '';
609
+ }
610
+ catch {
611
+ const candidates = response.candidates || [];
612
+ if (candidates[0]?.content?.parts?.[0]?.text) {
613
+ outputText = candidates[0].content.parts[0].text;
614
+ }
615
+ }
616
+ const candidates = response.candidates || [];
617
+ if (candidates[0]?.finishReason)
618
+ finishReason = candidates[0].finishReason;
619
+ // Extract input preview from contents
620
+ let inputPreview = '';
621
+ const contents = params.contents;
622
+ if (typeof contents === 'string') {
623
+ inputPreview = truncate(contents);
624
+ }
625
+ else if (Array.isArray(contents)) {
626
+ const last = contents[contents.length - 1];
627
+ if (typeof last === 'string')
628
+ inputPreview = truncate(last);
629
+ else if (last?.parts?.[0]?.text)
630
+ inputPreview = truncate(last.parts[0].text);
631
+ }
632
+ const event = {
633
+ kind: 'llm_call', provider: 'gemini', model,
634
+ durationMs: round(performance.now() - startTime),
635
+ inputTokens, outputTokens, totalTokens,
636
+ estimatedCostUsd: estimateCost(model, inputTokens, outputTokens),
637
+ stream: isStream, finishReason,
638
+ temperature: params.config?.temperature,
639
+ maxTokens: params.config?.maxOutputTokens,
640
+ systemPrompt: typeof params.config?.systemInstruction === 'string'
641
+ ? truncate(params.config.systemInstruction, 200) : undefined,
642
+ inputPreview, outputPreview: truncate(outputText),
643
+ messageCount: Array.isArray(contents) ? contents.length : (contents ? 1 : 0),
644
+ toolUse: !!(params.config?.tools?.length || params.tools?.length),
645
+ timestamp: Date.now(),
646
+ };
647
+ writeLlmEvent(event);
648
+ if (debug)
649
+ console.log(`[trickle/llm] Gemini: ${model} (${totalTokens} tokens, ${event.durationMs}ms)`);
650
+ }
651
+ function captureGeminiError(params, err, startTime, debug) {
652
+ const model = params.model || 'gemini-unknown';
653
+ writeLlmEvent({
654
+ kind: 'llm_call', provider: 'gemini', model,
655
+ durationMs: round(performance.now() - startTime),
656
+ inputTokens: 0, outputTokens: 0, totalTokens: 0, estimatedCostUsd: 0,
657
+ stream: false, finishReason: 'error',
658
+ temperature: params.config?.temperature,
659
+ maxTokens: params.config?.maxOutputTokens,
660
+ inputPreview: typeof params.contents === 'string' ? truncate(params.contents) : '',
661
+ outputPreview: '', messageCount: 0,
662
+ toolUse: false, timestamp: Date.now(),
663
+ error: truncate(err?.message || String(err), 200),
664
+ });
665
+ }
666
+ async function handleGeminiStream(stream, params, startTime, debug) {
667
+ if (!stream || !stream[Symbol.asyncIterator])
668
+ return stream;
669
+ const chunks = [];
670
+ const origIterator = stream[Symbol.asyncIterator].bind(stream);
671
+ let lastUsage = null;
672
+ stream[Symbol.asyncIterator] = function () {
673
+ const iter = origIterator();
674
+ return {
675
+ async next() {
676
+ const result = await iter.next();
677
+ if (!result.done) {
678
+ const chunk = result.value;
679
+ try {
680
+ if (chunk.text)
681
+ chunks.push(chunk.text);
682
+ }
683
+ catch { }
684
+ if (chunk.usageMetadata)
685
+ lastUsage = chunk.usageMetadata;
686
+ }
687
+ else {
688
+ // Stream finished
689
+ const model = params.model || 'gemini-unknown';
690
+ const inputTokens = lastUsage?.promptTokenCount || 0;
691
+ const outputTokens = lastUsage?.candidatesTokenCount || 0;
692
+ writeLlmEvent({
693
+ kind: 'llm_call', provider: 'gemini', model,
694
+ durationMs: round(performance.now() - startTime),
695
+ inputTokens, outputTokens, totalTokens: inputTokens + outputTokens,
696
+ estimatedCostUsd: estimateCost(model, inputTokens, outputTokens),
697
+ stream: true, finishReason: 'stop',
698
+ temperature: params.config?.temperature,
699
+ maxTokens: params.config?.maxOutputTokens,
700
+ inputPreview: typeof params.contents === 'string' ? truncate(params.contents) : '',
701
+ outputPreview: truncate(chunks.join('')),
702
+ messageCount: 0, toolUse: false, timestamp: Date.now(),
703
+ });
704
+ if (debug)
705
+ console.log(`[trickle/llm] Gemini stream: ${model} (${outputTokens} tokens)`);
706
+ }
707
+ return result;
708
+ },
709
+ return: iter.return?.bind(iter),
710
+ throw: iter.throw?.bind(iter),
711
+ };
712
+ };
713
+ return stream;
714
+ }
715
+ // ────────────────────────────────────────────────────
492
716
  // Helpers
493
717
  // ────────────────────────────────────────────────────
494
718
  function round(n) {
@@ -0,0 +1,13 @@
1
+ /**
2
+ * MCP tool call observer — auto-instruments MCP client and server SDKs
3
+ * to capture tool invocations, arguments, responses, latency, and errors.
4
+ *
5
+ * Writes to .trickle/mcp.jsonl as:
6
+ * { "kind": "mcp_tool_call", "tool": "fetch", "direction": "outgoing",
7
+ * "durationMs": 234.5, "args": {...}, "result": "...", ... }
8
+ *
9
+ * Zero code changes needed — intercepted via Module._load hook.
10
+ */
11
+ export declare function patchMcpClient(mcpModule: any, debug: boolean): void;
12
+ export declare function patchMcpServer(mcpModule: any, debug: boolean): void;
13
+ export declare function initMcpObserver(): void;
@@ -0,0 +1,298 @@
1
+ "use strict";
2
+ /**
3
+ * MCP tool call observer — auto-instruments MCP client and server SDKs
4
+ * to capture tool invocations, arguments, responses, latency, and errors.
5
+ *
6
+ * Writes to .trickle/mcp.jsonl as:
7
+ * { "kind": "mcp_tool_call", "tool": "fetch", "direction": "outgoing",
8
+ * "durationMs": 234.5, "args": {...}, "result": "...", ... }
9
+ *
10
+ * Zero code changes needed — intercepted via Module._load hook.
11
+ */
12
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
13
+ if (k2 === undefined) k2 = k;
14
+ var desc = Object.getOwnPropertyDescriptor(m, k);
15
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
16
+ desc = { enumerable: true, get: function() { return m[k]; } };
17
+ }
18
+ Object.defineProperty(o, k2, desc);
19
+ }) : (function(o, m, k, k2) {
20
+ if (k2 === undefined) k2 = k;
21
+ o[k2] = m[k];
22
+ }));
23
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
24
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
25
+ }) : function(o, v) {
26
+ o["default"] = v;
27
+ });
28
+ var __importStar = (this && this.__importStar) || (function () {
29
+ var ownKeys = function(o) {
30
+ ownKeys = Object.getOwnPropertyNames || function (o) {
31
+ var ar = [];
32
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
33
+ return ar;
34
+ };
35
+ return ownKeys(o);
36
+ };
37
+ return function (mod) {
38
+ if (mod && mod.__esModule) return mod;
39
+ var result = {};
40
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
41
+ __setModuleDefault(result, mod);
42
+ return result;
43
+ };
44
+ })();
45
+ Object.defineProperty(exports, "__esModule", { value: true });
46
+ exports.patchMcpClient = patchMcpClient;
47
+ exports.patchMcpServer = patchMcpServer;
48
+ exports.initMcpObserver = initMcpObserver;
49
+ const fs = __importStar(require("fs"));
50
+ const path = __importStar(require("path"));
51
+ let mcpFile = null;
52
+ let eventCount = 0;
53
+ const MAX_MCP_EVENTS = 1000;
54
+ const TRUNCATE_LEN = 500;
55
+ function getMcpFile() {
56
+ if (mcpFile)
57
+ return mcpFile;
58
+ const dir = process.env.TRICKLE_LOCAL_DIR || path.join(process.cwd(), '.trickle');
59
+ try {
60
+ fs.mkdirSync(dir, { recursive: true });
61
+ }
62
+ catch { }
63
+ mcpFile = path.join(dir, 'mcp.jsonl');
64
+ return mcpFile;
65
+ }
66
+ function writeMcpEvent(event) {
67
+ if (eventCount >= MAX_MCP_EVENTS)
68
+ return;
69
+ eventCount++;
70
+ try {
71
+ fs.appendFileSync(getMcpFile(), JSON.stringify(event) + '\n');
72
+ }
73
+ catch { }
74
+ }
75
+ function truncate(s, len = TRUNCATE_LEN) {
76
+ if (!s)
77
+ return '';
78
+ return s.length > len ? s.substring(0, len) + '...' : s;
79
+ }
80
+ function sanitizeArgs(args) {
81
+ if (args === null || args === undefined)
82
+ return null;
83
+ try {
84
+ const s = JSON.stringify(args);
85
+ return s.length > 1000 ? JSON.parse(s.substring(0, 1000) + '"}') : args;
86
+ }
87
+ catch {
88
+ return String(args).substring(0, 200);
89
+ }
90
+ }
91
+ function extractResultPreview(result) {
92
+ if (!result)
93
+ return '';
94
+ // MCP CallToolResult has .content array
95
+ if (result.content && Array.isArray(result.content)) {
96
+ const texts = result.content
97
+ .filter((c) => c.type === 'text' && c.text)
98
+ .map((c) => c.text);
99
+ return truncate(texts.join('\n'));
100
+ }
101
+ if (typeof result === 'string')
102
+ return truncate(result);
103
+ try {
104
+ return truncate(JSON.stringify(result));
105
+ }
106
+ catch {
107
+ return '';
108
+ }
109
+ }
110
+ // ────────────────────────────────────────────────────
111
+ // Client-side: patch Client.callTool
112
+ // ────────────────────────────────────────────────────
113
+ function patchMcpClient(mcpModule, debug) {
114
+ if (!mcpModule || mcpModule.__trickle_mcp_patched)
115
+ return;
116
+ mcpModule.__trickle_mcp_patched = true;
117
+ // @modelcontextprotocol/sdk exports Client class
118
+ const ClientClass = mcpModule.Client;
119
+ if (!ClientClass) {
120
+ if (debug)
121
+ console.log('[trickle/mcp] Client class not found in module');
122
+ return;
123
+ }
124
+ // Patch callTool on the prototype
125
+ const proto = ClientClass.prototype;
126
+ if (proto.callTool && !proto.callTool.__trickle_patched) {
127
+ const origCallTool = proto.callTool;
128
+ proto.callTool = async function patchedCallTool(...args) {
129
+ const params = args[0] || {};
130
+ const toolName = typeof params === 'string' ? params : (params.name || 'unknown');
131
+ const toolArgs = typeof params === 'string' ? args[1] : (params.arguments || params.args);
132
+ const startTime = performance.now();
133
+ try {
134
+ const result = await origCallTool.apply(this, args);
135
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
136
+ writeMcpEvent({
137
+ kind: 'mcp_tool_call', tool: toolName, direction: 'outgoing',
138
+ durationMs, args: sanitizeArgs(toolArgs),
139
+ resultPreview: extractResultPreview(result),
140
+ isError: result?.isError || false,
141
+ errorMessage: result?.isError ? extractResultPreview(result) : undefined,
142
+ timestamp: Date.now(),
143
+ });
144
+ if (debug)
145
+ console.log(`[trickle/mcp] callTool: ${toolName} (${durationMs}ms)`);
146
+ return result;
147
+ }
148
+ catch (err) {
149
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
150
+ writeMcpEvent({
151
+ kind: 'mcp_tool_call', tool: toolName, direction: 'outgoing',
152
+ durationMs, args: sanitizeArgs(toolArgs),
153
+ resultPreview: '', isError: true,
154
+ errorMessage: truncate(err?.message || String(err), 200),
155
+ timestamp: Date.now(),
156
+ });
157
+ throw err;
158
+ }
159
+ };
160
+ proto.callTool.__trickle_patched = true;
161
+ if (debug)
162
+ console.log('[trickle/mcp] Patched Client.callTool');
163
+ }
164
+ // Also patch listTools for discovery
165
+ if (proto.listTools && !proto.listTools.__trickle_patched) {
166
+ const origListTools = proto.listTools;
167
+ proto.listTools = async function patchedListTools(...args) {
168
+ const startTime = performance.now();
169
+ const result = await origListTools.apply(this, args);
170
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
171
+ const toolCount = result?.tools?.length || 0;
172
+ writeMcpEvent({
173
+ kind: 'mcp_tool_call', tool: '__list_tools', direction: 'outgoing',
174
+ durationMs, args: null,
175
+ resultPreview: `${toolCount} tools available`,
176
+ isError: false, timestamp: Date.now(),
177
+ });
178
+ return result;
179
+ };
180
+ proto.listTools.__trickle_patched = true;
181
+ }
182
+ }
183
+ // ────────────────────────────────────────────────────
184
+ // Server-side: patch Server to wrap tool handlers
185
+ // ────────────────────────────────────────────────────
186
+ function patchMcpServer(mcpModule, debug) {
187
+ // Server is exported from @modelcontextprotocol/sdk
188
+ const ServerClass = mcpModule.Server || mcpModule.McpServer;
189
+ if (!ServerClass || ServerClass.__trickle_mcp_server_patched)
190
+ return;
191
+ ServerClass.__trickle_mcp_server_patched = true;
192
+ const proto = ServerClass.prototype;
193
+ // Patch the .tool() registration method to wrap handlers
194
+ if (proto.tool && !proto.tool.__trickle_patched) {
195
+ const origTool = proto.tool;
196
+ proto.tool = function patchedTool(...args) {
197
+ // tool(name, schema, handler) or tool(name, handler)
198
+ const toolName = typeof args[0] === 'string' ? args[0] : 'unknown';
199
+ const lastArg = args[args.length - 1];
200
+ if (typeof lastArg === 'function') {
201
+ const originalHandler = lastArg;
202
+ args[args.length - 1] = async function wrappedHandler(...handlerArgs) {
203
+ const startTime = performance.now();
204
+ try {
205
+ const result = await originalHandler.apply(this, handlerArgs);
206
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
207
+ writeMcpEvent({
208
+ kind: 'mcp_tool_call', tool: toolName, direction: 'incoming',
209
+ durationMs, args: sanitizeArgs(handlerArgs[0]),
210
+ resultPreview: extractResultPreview(result),
211
+ isError: result?.isError || false,
212
+ timestamp: Date.now(),
213
+ });
214
+ if (debug)
215
+ console.log(`[trickle/mcp] tool handler: ${toolName} (${durationMs}ms)`);
216
+ return result;
217
+ }
218
+ catch (err) {
219
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
220
+ writeMcpEvent({
221
+ kind: 'mcp_tool_call', tool: toolName, direction: 'incoming',
222
+ durationMs, args: sanitizeArgs(handlerArgs[0]),
223
+ resultPreview: '', isError: true,
224
+ errorMessage: truncate(err?.message || String(err), 200),
225
+ timestamp: Date.now(),
226
+ });
227
+ throw err;
228
+ }
229
+ };
230
+ }
231
+ return origTool.apply(this, args);
232
+ };
233
+ proto.tool.__trickle_patched = true;
234
+ if (debug)
235
+ console.log('[trickle/mcp] Patched Server.tool');
236
+ }
237
+ // Patch setRequestHandler for lower-level interception
238
+ if (proto.setRequestHandler && !proto.setRequestHandler.__trickle_patched) {
239
+ const origSetHandler = proto.setRequestHandler;
240
+ proto.setRequestHandler = function patchedSetHandler(schema, handler) {
241
+ if (typeof handler === 'function') {
242
+ const origHandler = handler;
243
+ const capturedHandler = origHandler;
244
+ handler = async function wrappedHandler(...args) {
245
+ const request = args[0];
246
+ const method = request?.method || schema?.method || 'unknown';
247
+ if (method === 'tools/call') {
248
+ const startTime = performance.now();
249
+ try {
250
+ const result = await capturedHandler.apply(this, args);
251
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
252
+ writeMcpEvent({
253
+ kind: 'mcp_tool_call', tool: request?.params?.name || 'unknown',
254
+ direction: 'incoming', durationMs,
255
+ args: sanitizeArgs(request?.params?.arguments),
256
+ resultPreview: extractResultPreview(result),
257
+ isError: result?.isError || false,
258
+ timestamp: Date.now(),
259
+ });
260
+ return result;
261
+ }
262
+ catch (err) {
263
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
264
+ writeMcpEvent({
265
+ kind: 'mcp_tool_call', tool: request?.params?.name || 'unknown',
266
+ direction: 'incoming', durationMs,
267
+ args: sanitizeArgs(request?.params?.arguments),
268
+ resultPreview: '', isError: true,
269
+ errorMessage: truncate(err?.message || String(err), 200),
270
+ timestamp: Date.now(),
271
+ });
272
+ throw err;
273
+ }
274
+ }
275
+ return capturedHandler.apply(this, args);
276
+ };
277
+ }
278
+ return origSetHandler.call(this, schema, handler);
279
+ };
280
+ proto.setRequestHandler.__trickle_patched = true;
281
+ }
282
+ }
283
+ // ────────────────────────────────────────────────────
284
+ // Initialization
285
+ // ────────────────────────────────────────────────────
286
+ function initMcpObserver() {
287
+ const dir = process.env.TRICKLE_LOCAL_DIR || path.join(process.cwd(), '.trickle');
288
+ try {
289
+ fs.mkdirSync(dir, { recursive: true });
290
+ }
291
+ catch { }
292
+ mcpFile = path.join(dir, 'mcp.jsonl');
293
+ try {
294
+ fs.writeFileSync(mcpFile, '');
295
+ }
296
+ catch { }
297
+ eventCount = 0;
298
+ }
@@ -42,6 +42,7 @@ const express_1 = require("./express");
42
42
  const trace_var_1 = require("./trace-var");
43
43
  const call_trace_1 = require("./call-trace");
44
44
  const llm_observer_1 = require("./llm-observer");
45
+ const mcp_observer_1 = require("./mcp-observer");
45
46
  const vite_plugin_1 = require("./vite-plugin");
46
47
  // ── Source map support ──
47
48
  // Lightweight VLQ decoder for mapping compiled JS lines back to original TS lines
@@ -1242,6 +1243,8 @@ if (enabled) {
1242
1243
  (0, call_trace_1.initCallTrace)();
1243
1244
  // ── Hook 0b3: Initialize LLM observer ──
1244
1245
  (0, llm_observer_1.initLlmObserver)();
1246
+ // ── Hook 0b4: Initialize MCP observer ──
1247
+ (0, mcp_observer_1.initMcpObserver)();
1245
1248
  // ── Hook 0c: Capture environment snapshot ──
1246
1249
  try {
1247
1250
  const envDir = process.env.TRICKLE_LOCAL_DIR || path_1.default.join(process.cwd(), '.trickle');
@@ -1552,6 +1555,32 @@ if (enabled) {
1552
1555
  }
1553
1556
  catch { /* not critical */ }
1554
1557
  }
1558
+ // MCP SDK (client + server) — match any subpath import
1559
+ if (request.includes('@modelcontextprotocol/sdk') && !expressPatched.has('mcp-client') && exports.Client) {
1560
+ expressPatched.add('mcp-client');
1561
+ try {
1562
+ const { patchMcpClient } = require(path_1.default.join(__dirname, 'mcp-observer.js'));
1563
+ patchMcpClient(exports, debug);
1564
+ }
1565
+ catch { /* not critical */ }
1566
+ }
1567
+ if (request.includes('@modelcontextprotocol/sdk') && !expressPatched.has('mcp-server') && (exports.Server || exports.McpServer)) {
1568
+ expressPatched.add('mcp-server');
1569
+ try {
1570
+ const { patchMcpServer } = require(path_1.default.join(__dirname, 'mcp-observer.js'));
1571
+ patchMcpServer(exports, debug);
1572
+ }
1573
+ catch { /* not critical */ }
1574
+ }
1575
+ // Google Gemini SDK
1576
+ if (request === '@google/genai' && !expressPatched.has('@google/genai')) {
1577
+ expressPatched.add('@google/genai');
1578
+ try {
1579
+ const { patchGemini } = require(path_1.default.join(__dirname, 'llm-observer.js'));
1580
+ patchGemini(exports, debug);
1581
+ }
1582
+ catch { /* not critical */ }
1583
+ }
1555
1584
  // Resolve to absolute path for dedup — do this FIRST since bundlers like
1556
1585
  // tsx/esbuild may use path aliases (e.g., @config/env) that don't start
1557
1586
  // with './' or '/'. We need the resolved path to decide if it's user code.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "trickle-observe",
3
- "version": "0.2.119",
3
+ "version": "0.2.121",
4
4
  "description": "Runtime type observability for JavaScript applications",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -30,6 +30,12 @@ const PRICING: Record<string, { input: number; output: number }> = {
30
30
  'claude-3-5-sonnet-20241022': { input: 3, output: 15 },
31
31
  'claude-3-5-haiku-20241022': { input: 0.8, output: 4 },
32
32
  'claude-3-haiku-20240307': { input: 0.25, output: 1.25 },
33
+ 'gemini-2.5-flash-lite': { input: 0.1, output: 0.4 },
34
+ 'gemini-2.5-flash': { input: 0.3, output: 2.5 },
35
+ 'gemini-2.5-pro': { input: 1.25, output: 10 },
36
+ 'gemini-2.0-flash': { input: 0.1, output: 0.4 },
37
+ 'gemini-1.5-flash': { input: 0.075, output: 0.3 },
38
+ 'gemini-1.5-pro': { input: 1.25, output: 5 },
33
39
  };
34
40
 
35
41
  function getLlmFile(): string {
@@ -480,6 +486,217 @@ function captureAnthropicError(params: any, err: any, startTime: number, debug:
480
486
  });
481
487
  }
482
488
 
489
+ // ────────────────────────────────────────────────────
490
+ // Google Gemini SDK instrumentation (@google/genai)
491
+ // ────────────────────────────────────────────────────
492
+
493
+ export function patchGemini(geminiModule: any, debug: boolean): void {
494
+ if (!geminiModule || getattr(geminiModule, '_trickle_llm_patched')) return;
495
+ setattr(geminiModule, '_trickle_llm_patched', true);
496
+
497
+ // @google/genai exports GoogleGenAI class
498
+ // Usage: const ai = new GoogleGenAI({ apiKey }); ai.models.generateContent({...})
499
+ const GoogleGenAI = geminiModule.GoogleGenAI || geminiModule.default?.GoogleGenAI;
500
+ if (typeof GoogleGenAI !== 'function') {
501
+ if (debug) console.log('[trickle/llm] Gemini: GoogleGenAI class not found');
502
+ return;
503
+ }
504
+
505
+ try {
506
+ // GoogleGenAI creates models as own property in the constructor.
507
+ // Patch the GoogleGenAI constructor to wrap generateContent after creation.
508
+ const origGoogleGenAIInit = GoogleGenAI.prototype.constructor;
509
+
510
+ // Use a post-construction hook: after new GoogleGenAI() creates the instance
511
+ // with models.generateContent as an own property, wrap that method.
512
+ const tmpClient = new GoogleGenAI({ apiKey: 'trickle-probe' });
513
+ const ModelsClass = Object.getPrototypeOf(tmpClient.models)?.constructor;
514
+
515
+ if (ModelsClass) {
516
+ const origModelsInit = ModelsClass;
517
+ // Patch the Models constructor to wrap generateContent after instance creation
518
+ const origConstruct = ModelsClass.prototype.constructor;
519
+
520
+ // We can't replace the ES6 class constructor, so instead we use a
521
+ // post-construction approach: hook into GoogleGenAI's prototype to
522
+ // patch models on each new client instance.
523
+ const origGAIProto = GoogleGenAI.prototype;
524
+ const origInitDescriptors = Object.getOwnPropertyDescriptors(origGAIProto);
525
+
526
+ // Define a lazy wrapper: first time models.generateContent is called,
527
+ // install the instrumentation wrapper
528
+ function wrapModelsInstance(models: any): void {
529
+ if (!models || models.__trickle_patched) return;
530
+ models.__trickle_patched = true;
531
+
532
+ if (typeof models.generateContent === 'function') {
533
+ const origGenerate = models.generateContent.bind(models);
534
+ models.generateContent = function patchedGenerateContent(...args: any[]) {
535
+ const params = args[0] || {};
536
+ const startTime = performance.now();
537
+ const result = origGenerate(...args);
538
+ if (result && typeof result.then === 'function') {
539
+ return result.then((response: any) => {
540
+ captureGeminiResponse(params, response, startTime, false, debug);
541
+ return response;
542
+ }).catch((err: any) => {
543
+ captureGeminiError(params, err, startTime, debug);
544
+ throw err;
545
+ });
546
+ }
547
+ return result;
548
+ };
549
+ }
550
+
551
+ if (typeof models.generateContentStream === 'function') {
552
+ const origStream = models.generateContentStream.bind(models);
553
+ models.generateContentStream = function patchedStream(...args: any[]) {
554
+ const params = args[0] || {};
555
+ const startTime = performance.now();
556
+ const result = origStream(...args);
557
+ if (result && typeof result.then === 'function') {
558
+ return result.then((stream: any) => handleGeminiStream(stream, params, startTime, debug));
559
+ }
560
+ return result;
561
+ };
562
+ }
563
+ }
564
+
565
+ // Intercept the GoogleGenAI constructor to patch each instance's models
566
+ // Since we can't replace ES6 class constructors, we use a Proxy
567
+ const proxyHandler: ProxyHandler<any> = {
568
+ construct(target: any, args: any[], newTarget: any): object {
569
+ const instance = Reflect.construct(target, args, newTarget) as any;
570
+ if (instance.models) wrapModelsInstance(instance.models);
571
+ return instance as object;
572
+ }
573
+ };
574
+ const ProxiedGoogleGenAI = new Proxy(GoogleGenAI, proxyHandler);
575
+
576
+ // Replace on the module (try both export styles)
577
+ try { geminiModule.GoogleGenAI = ProxiedGoogleGenAI; } catch {}
578
+ try { if (geminiModule.default?.GoogleGenAI) geminiModule.default.GoogleGenAI = ProxiedGoogleGenAI; } catch {}
579
+
580
+ // Also patch the already-created probe instance (in case someone imported before us)
581
+ // This is a no-op since the probe is discarded.
582
+
583
+ if (debug) console.log('[trickle/llm] Patched Gemini SDK');
584
+ }
585
+ } catch (e: any) {
586
+ if (debug) console.log('[trickle/llm] Gemini patch probe failed:', e.message);
587
+ }
588
+ }
589
+
590
+ function captureGeminiResponse(params: any, response: any, startTime: number, isStream: boolean, debug: boolean): void {
591
+ const usage = response.usageMetadata || {};
592
+ const model = params.model || 'gemini-unknown';
593
+ const inputTokens = usage.promptTokenCount || 0;
594
+ const outputTokens = usage.candidatesTokenCount || 0;
595
+ const totalTokens = usage.totalTokenCount || inputTokens + outputTokens;
596
+
597
+ let outputText = '';
598
+ let finishReason = 'unknown';
599
+ try {
600
+ outputText = response.text || '';
601
+ } catch {
602
+ const candidates = response.candidates || [];
603
+ if (candidates[0]?.content?.parts?.[0]?.text) {
604
+ outputText = candidates[0].content.parts[0].text;
605
+ }
606
+ }
607
+ const candidates = response.candidates || [];
608
+ if (candidates[0]?.finishReason) finishReason = candidates[0].finishReason;
609
+
610
+ // Extract input preview from contents
611
+ let inputPreview = '';
612
+ const contents = params.contents;
613
+ if (typeof contents === 'string') {
614
+ inputPreview = truncate(contents);
615
+ } else if (Array.isArray(contents)) {
616
+ const last = contents[contents.length - 1];
617
+ if (typeof last === 'string') inputPreview = truncate(last);
618
+ else if (last?.parts?.[0]?.text) inputPreview = truncate(last.parts[0].text);
619
+ }
620
+
621
+ const event: LlmEvent = {
622
+ kind: 'llm_call', provider: 'gemini', model,
623
+ durationMs: round(performance.now() - startTime),
624
+ inputTokens, outputTokens, totalTokens,
625
+ estimatedCostUsd: estimateCost(model, inputTokens, outputTokens),
626
+ stream: isStream, finishReason,
627
+ temperature: params.config?.temperature,
628
+ maxTokens: params.config?.maxOutputTokens,
629
+ systemPrompt: typeof params.config?.systemInstruction === 'string'
630
+ ? truncate(params.config.systemInstruction, 200) : undefined,
631
+ inputPreview, outputPreview: truncate(outputText),
632
+ messageCount: Array.isArray(contents) ? contents.length : (contents ? 1 : 0),
633
+ toolUse: !!(params.config?.tools?.length || params.tools?.length),
634
+ timestamp: Date.now(),
635
+ };
636
+ writeLlmEvent(event);
637
+ if (debug) console.log(`[trickle/llm] Gemini: ${model} (${totalTokens} tokens, ${event.durationMs}ms)`);
638
+ }
639
+
640
+ function captureGeminiError(params: any, err: any, startTime: number, debug: boolean): void {
641
+ const model = params.model || 'gemini-unknown';
642
+ writeLlmEvent({
643
+ kind: 'llm_call', provider: 'gemini', model,
644
+ durationMs: round(performance.now() - startTime),
645
+ inputTokens: 0, outputTokens: 0, totalTokens: 0, estimatedCostUsd: 0,
646
+ stream: false, finishReason: 'error',
647
+ temperature: params.config?.temperature,
648
+ maxTokens: params.config?.maxOutputTokens,
649
+ inputPreview: typeof params.contents === 'string' ? truncate(params.contents) : '',
650
+ outputPreview: '', messageCount: 0,
651
+ toolUse: false, timestamp: Date.now(),
652
+ error: truncate(err?.message || String(err), 200),
653
+ });
654
+ }
655
+
656
+ async function handleGeminiStream(stream: any, params: any, startTime: number, debug: boolean): Promise<any> {
657
+ if (!stream || !stream[Symbol.asyncIterator]) return stream;
658
+
659
+ const chunks: string[] = [];
660
+ const origIterator = stream[Symbol.asyncIterator].bind(stream);
661
+ let lastUsage: any = null;
662
+
663
+ stream[Symbol.asyncIterator] = function () {
664
+ const iter = origIterator();
665
+ return {
666
+ async next() {
667
+ const result = await iter.next();
668
+ if (!result.done) {
669
+ const chunk = result.value;
670
+ try { if (chunk.text) chunks.push(chunk.text); } catch {}
671
+ if (chunk.usageMetadata) lastUsage = chunk.usageMetadata;
672
+ } else {
673
+ // Stream finished
674
+ const model = params.model || 'gemini-unknown';
675
+ const inputTokens = lastUsage?.promptTokenCount || 0;
676
+ const outputTokens = lastUsage?.candidatesTokenCount || 0;
677
+ writeLlmEvent({
678
+ kind: 'llm_call', provider: 'gemini', model,
679
+ durationMs: round(performance.now() - startTime),
680
+ inputTokens, outputTokens, totalTokens: inputTokens + outputTokens,
681
+ estimatedCostUsd: estimateCost(model, inputTokens, outputTokens),
682
+ stream: true, finishReason: 'stop',
683
+ temperature: params.config?.temperature,
684
+ maxTokens: params.config?.maxOutputTokens,
685
+ inputPreview: typeof params.contents === 'string' ? truncate(params.contents) : '',
686
+ outputPreview: truncate(chunks.join('')),
687
+ messageCount: 0, toolUse: false, timestamp: Date.now(),
688
+ });
689
+ if (debug) console.log(`[trickle/llm] Gemini stream: ${model} (${outputTokens} tokens)`);
690
+ }
691
+ return result;
692
+ },
693
+ return: iter.return?.bind(iter),
694
+ throw: iter.throw?.bind(iter),
695
+ };
696
+ };
697
+ return stream;
698
+ }
699
+
483
700
  // ────────────────────────────────────────────────────
484
701
  // Helpers
485
702
  // ────────────────────────────────────────────────────
@@ -0,0 +1,260 @@
1
+ /**
2
+ * MCP tool call observer — auto-instruments MCP client and server SDKs
3
+ * to capture tool invocations, arguments, responses, latency, and errors.
4
+ *
5
+ * Writes to .trickle/mcp.jsonl as:
6
+ * { "kind": "mcp_tool_call", "tool": "fetch", "direction": "outgoing",
7
+ * "durationMs": 234.5, "args": {...}, "result": "...", ... }
8
+ *
9
+ * Zero code changes needed — intercepted via Module._load hook.
10
+ */
11
+
12
+ import * as fs from 'fs';
13
+ import * as path from 'path';
14
+
15
+ let mcpFile: string | null = null;
16
+ let eventCount = 0;
17
+ const MAX_MCP_EVENTS = 1000;
18
+ const TRUNCATE_LEN = 500;
19
+
20
+ function getMcpFile(): string {
21
+ if (mcpFile) return mcpFile;
22
+ const dir = process.env.TRICKLE_LOCAL_DIR || path.join(process.cwd(), '.trickle');
23
+ try { fs.mkdirSync(dir, { recursive: true }); } catch {}
24
+ mcpFile = path.join(dir, 'mcp.jsonl');
25
+ return mcpFile;
26
+ }
27
+
28
+ interface McpEvent {
29
+ kind: 'mcp_tool_call';
30
+ tool: string;
31
+ direction: 'outgoing' | 'incoming';
32
+ durationMs: number;
33
+ args: unknown;
34
+ resultPreview: string;
35
+ isError: boolean;
36
+ errorMessage?: string;
37
+ timestamp: number;
38
+ }
39
+
40
+ function writeMcpEvent(event: McpEvent): void {
41
+ if (eventCount >= MAX_MCP_EVENTS) return;
42
+ eventCount++;
43
+ try {
44
+ fs.appendFileSync(getMcpFile(), JSON.stringify(event) + '\n');
45
+ } catch {}
46
+ }
47
+
48
+ function truncate(s: string, len = TRUNCATE_LEN): string {
49
+ if (!s) return '';
50
+ return s.length > len ? s.substring(0, len) + '...' : s;
51
+ }
52
+
53
+ function sanitizeArgs(args: unknown): unknown {
54
+ if (args === null || args === undefined) return null;
55
+ try {
56
+ const s = JSON.stringify(args);
57
+ return s.length > 1000 ? JSON.parse(s.substring(0, 1000) + '"}') : args;
58
+ } catch { return String(args).substring(0, 200); }
59
+ }
60
+
61
+ function extractResultPreview(result: any): string {
62
+ if (!result) return '';
63
+ // MCP CallToolResult has .content array
64
+ if (result.content && Array.isArray(result.content)) {
65
+ const texts = result.content
66
+ .filter((c: any) => c.type === 'text' && c.text)
67
+ .map((c: any) => c.text);
68
+ return truncate(texts.join('\n'));
69
+ }
70
+ if (typeof result === 'string') return truncate(result);
71
+ try { return truncate(JSON.stringify(result)); } catch { return ''; }
72
+ }
73
+
74
+ // ────────────────────────────────────────────────────
75
+ // Client-side: patch Client.callTool
76
+ // ────────────────────────────────────────────────────
77
+
78
+ export function patchMcpClient(mcpModule: any, debug: boolean): void {
79
+ if (!mcpModule || mcpModule.__trickle_mcp_patched) return;
80
+ mcpModule.__trickle_mcp_patched = true;
81
+
82
+ // @modelcontextprotocol/sdk exports Client class
83
+ const ClientClass = mcpModule.Client;
84
+ if (!ClientClass) {
85
+ if (debug) console.log('[trickle/mcp] Client class not found in module');
86
+ return;
87
+ }
88
+
89
+ // Patch callTool on the prototype
90
+ const proto = ClientClass.prototype;
91
+ if (proto.callTool && !proto.callTool.__trickle_patched) {
92
+ const origCallTool = proto.callTool;
93
+ proto.callTool = async function patchedCallTool(this: any, ...args: any[]) {
94
+ const params = args[0] || {};
95
+ const toolName = typeof params === 'string' ? params : (params.name || 'unknown');
96
+ const toolArgs = typeof params === 'string' ? args[1] : (params.arguments || params.args);
97
+ const startTime = performance.now();
98
+
99
+ try {
100
+ const result = await origCallTool.apply(this, args);
101
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
102
+ writeMcpEvent({
103
+ kind: 'mcp_tool_call', tool: toolName, direction: 'outgoing',
104
+ durationMs, args: sanitizeArgs(toolArgs),
105
+ resultPreview: extractResultPreview(result),
106
+ isError: result?.isError || false,
107
+ errorMessage: result?.isError ? extractResultPreview(result) : undefined,
108
+ timestamp: Date.now(),
109
+ });
110
+ if (debug) console.log(`[trickle/mcp] callTool: ${toolName} (${durationMs}ms)`);
111
+ return result;
112
+ } catch (err: any) {
113
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
114
+ writeMcpEvent({
115
+ kind: 'mcp_tool_call', tool: toolName, direction: 'outgoing',
116
+ durationMs, args: sanitizeArgs(toolArgs),
117
+ resultPreview: '', isError: true,
118
+ errorMessage: truncate(err?.message || String(err), 200),
119
+ timestamp: Date.now(),
120
+ });
121
+ throw err;
122
+ }
123
+ };
124
+ proto.callTool.__trickle_patched = true;
125
+ if (debug) console.log('[trickle/mcp] Patched Client.callTool');
126
+ }
127
+
128
+ // Also patch listTools for discovery
129
+ if (proto.listTools && !proto.listTools.__trickle_patched) {
130
+ const origListTools = proto.listTools;
131
+ proto.listTools = async function patchedListTools(this: any, ...args: any[]) {
132
+ const startTime = performance.now();
133
+ const result = await origListTools.apply(this, args);
134
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
135
+ const toolCount = result?.tools?.length || 0;
136
+ writeMcpEvent({
137
+ kind: 'mcp_tool_call', tool: '__list_tools', direction: 'outgoing',
138
+ durationMs, args: null,
139
+ resultPreview: `${toolCount} tools available`,
140
+ isError: false, timestamp: Date.now(),
141
+ });
142
+ return result;
143
+ };
144
+ proto.listTools.__trickle_patched = true;
145
+ }
146
+ }
147
+
148
+ // ────────────────────────────────────────────────────
149
+ // Server-side: patch Server to wrap tool handlers
150
+ // ────────────────────────────────────────────────────
151
+
152
+ export function patchMcpServer(mcpModule: any, debug: boolean): void {
153
+ // Server is exported from @modelcontextprotocol/sdk
154
+ const ServerClass = mcpModule.Server || mcpModule.McpServer;
155
+ if (!ServerClass || ServerClass.__trickle_mcp_server_patched) return;
156
+ ServerClass.__trickle_mcp_server_patched = true;
157
+
158
+ const proto = ServerClass.prototype;
159
+
160
+ // Patch the .tool() registration method to wrap handlers
161
+ if (proto.tool && !proto.tool.__trickle_patched) {
162
+ const origTool = proto.tool;
163
+ proto.tool = function patchedTool(this: any, ...args: any[]) {
164
+ // tool(name, schema, handler) or tool(name, handler)
165
+ const toolName = typeof args[0] === 'string' ? args[0] : 'unknown';
166
+ const lastArg = args[args.length - 1];
167
+
168
+ if (typeof lastArg === 'function') {
169
+ const originalHandler = lastArg;
170
+ args[args.length - 1] = async function wrappedHandler(...handlerArgs: any[]) {
171
+ const startTime = performance.now();
172
+ try {
173
+ const result = await originalHandler.apply(this, handlerArgs);
174
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
175
+ writeMcpEvent({
176
+ kind: 'mcp_tool_call', tool: toolName, direction: 'incoming',
177
+ durationMs, args: sanitizeArgs(handlerArgs[0]),
178
+ resultPreview: extractResultPreview(result),
179
+ isError: result?.isError || false,
180
+ timestamp: Date.now(),
181
+ });
182
+ if (debug) console.log(`[trickle/mcp] tool handler: ${toolName} (${durationMs}ms)`);
183
+ return result;
184
+ } catch (err: any) {
185
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
186
+ writeMcpEvent({
187
+ kind: 'mcp_tool_call', tool: toolName, direction: 'incoming',
188
+ durationMs, args: sanitizeArgs(handlerArgs[0]),
189
+ resultPreview: '', isError: true,
190
+ errorMessage: truncate(err?.message || String(err), 200),
191
+ timestamp: Date.now(),
192
+ });
193
+ throw err;
194
+ }
195
+ };
196
+ }
197
+
198
+ return origTool.apply(this, args);
199
+ };
200
+ proto.tool.__trickle_patched = true;
201
+ if (debug) console.log('[trickle/mcp] Patched Server.tool');
202
+ }
203
+
204
+ // Patch setRequestHandler for lower-level interception
205
+ if (proto.setRequestHandler && !proto.setRequestHandler.__trickle_patched) {
206
+ const origSetHandler = proto.setRequestHandler;
207
+ proto.setRequestHandler = function patchedSetHandler(this: any, schema: any, handler: any) {
208
+ if (typeof handler === 'function') {
209
+ const origHandler = handler;
210
+ const capturedHandler = origHandler;
211
+ handler = async function wrappedHandler(this: any, ...args: any[]) {
212
+ const request = args[0];
213
+ const method = request?.method || schema?.method || 'unknown';
214
+ if (method === 'tools/call') {
215
+ const startTime = performance.now();
216
+ try {
217
+ const result = await capturedHandler.apply(this, args);
218
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
219
+ writeMcpEvent({
220
+ kind: 'mcp_tool_call', tool: request?.params?.name || 'unknown',
221
+ direction: 'incoming', durationMs,
222
+ args: sanitizeArgs(request?.params?.arguments),
223
+ resultPreview: extractResultPreview(result),
224
+ isError: result?.isError || false,
225
+ timestamp: Date.now(),
226
+ });
227
+ return result;
228
+ } catch (err: any) {
229
+ const durationMs = Math.round((performance.now() - startTime) * 100) / 100;
230
+ writeMcpEvent({
231
+ kind: 'mcp_tool_call', tool: request?.params?.name || 'unknown',
232
+ direction: 'incoming', durationMs,
233
+ args: sanitizeArgs(request?.params?.arguments),
234
+ resultPreview: '', isError: true,
235
+ errorMessage: truncate(err?.message || String(err), 200),
236
+ timestamp: Date.now(),
237
+ });
238
+ throw err;
239
+ }
240
+ }
241
+ return capturedHandler.apply(this, args);
242
+ };
243
+ }
244
+ return origSetHandler.call(this, schema, handler);
245
+ };
246
+ proto.setRequestHandler.__trickle_patched = true;
247
+ }
248
+ }
249
+
250
+ // ────────────────────────────────────────────────────
251
+ // Initialization
252
+ // ────────────────────────────────────────────────────
253
+
254
+ export function initMcpObserver(): void {
255
+ const dir = process.env.TRICKLE_LOCAL_DIR || path.join(process.cwd(), '.trickle');
256
+ try { fs.mkdirSync(dir, { recursive: true }); } catch {}
257
+ mcpFile = path.join(dir, 'mcp.jsonl');
258
+ try { fs.writeFileSync(mcpFile, ''); } catch {}
259
+ eventCount = 0;
260
+ }
@@ -39,6 +39,7 @@ import { instrumentExpress, trickleMiddleware } from './express';
39
39
  import { initVarTracer, traceVar } from './trace-var';
40
40
  import { initCallTrace } from './call-trace';
41
41
  import { initLlmObserver } from './llm-observer';
42
+ import { initMcpObserver } from './mcp-observer';
42
43
  import {
43
44
  findReassignments,
44
45
  findForLoopVars,
@@ -1231,6 +1232,9 @@ if (enabled) {
1231
1232
  // ── Hook 0b3: Initialize LLM observer ──
1232
1233
  initLlmObserver();
1233
1234
 
1235
+ // ── Hook 0b4: Initialize MCP observer ──
1236
+ initMcpObserver();
1237
+
1234
1238
  // ── Hook 0c: Capture environment snapshot ──
1235
1239
  try {
1236
1240
  const envDir = process.env.TRICKLE_LOCAL_DIR || path.join(process.cwd(), '.trickle');
@@ -1537,6 +1541,31 @@ if (enabled) {
1537
1541
  } catch { /* not critical */ }
1538
1542
  }
1539
1543
 
1544
+ // MCP SDK (client + server) — match any subpath import
1545
+ if (request.includes('@modelcontextprotocol/sdk') && !expressPatched.has('mcp-client') && exports.Client) {
1546
+ expressPatched.add('mcp-client');
1547
+ try {
1548
+ const { patchMcpClient } = require(path.join(__dirname, 'mcp-observer.js'));
1549
+ patchMcpClient(exports, debug);
1550
+ } catch { /* not critical */ }
1551
+ }
1552
+ if (request.includes('@modelcontextprotocol/sdk') && !expressPatched.has('mcp-server') && (exports.Server || exports.McpServer)) {
1553
+ expressPatched.add('mcp-server');
1554
+ try {
1555
+ const { patchMcpServer } = require(path.join(__dirname, 'mcp-observer.js'));
1556
+ patchMcpServer(exports, debug);
1557
+ } catch { /* not critical */ }
1558
+ }
1559
+
1560
+ // Google Gemini SDK
1561
+ if (request === '@google/genai' && !expressPatched.has('@google/genai')) {
1562
+ expressPatched.add('@google/genai');
1563
+ try {
1564
+ const { patchGemini } = require(path.join(__dirname, 'llm-observer.js'));
1565
+ patchGemini(exports, debug);
1566
+ } catch { /* not critical */ }
1567
+ }
1568
+
1540
1569
  // Resolve to absolute path for dedup — do this FIRST since bundlers like
1541
1570
  // tsx/esbuild may use path aliases (e.g., @config/env) that don't start
1542
1571
  // with './' or '/'. We need the resolved path to decide if it's user code.