@mynitorai/sdk 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -4,6 +4,7 @@
4
4
  */
5
5
  export interface MyNitorConfig {
6
6
  apiKey: string;
7
+ environment?: string;
7
8
  endpoint?: string;
8
9
  }
9
10
  export declare class MyNitor {
@@ -15,7 +16,7 @@ export declare class MyNitor {
15
16
  private setupAutoFlush;
16
17
  static init(config: MyNitorConfig): MyNitor;
17
18
  /**
18
- * Automatically detect and wrap AI libraries like OpenAI
19
+ * Automatically detect and wrap AI libraries: OpenAI, Anthropic, and Google Gemini
19
20
  */
20
21
  instrument(): void;
21
22
  /**
@@ -27,5 +28,7 @@ export declare class MyNitor {
27
28
  private getCallSite;
28
29
  private sendEvent;
29
30
  private wrapOpenAI;
31
+ private wrapAnthropic;
32
+ private wrapGemini;
30
33
  }
31
34
  export declare const init: typeof MyNitor.init;
package/dist/index.js CHANGED
@@ -5,11 +5,14 @@
5
5
  */
6
6
  Object.defineProperty(exports, "__esModule", { value: true });
7
7
  exports.init = exports.MyNitor = void 0;
8
+ // Symbol to prevent double-patching (Idempotency)
9
+ const WRAPPED_MARKER = Symbol('mynitor_wrapped');
8
10
  class MyNitor {
9
11
  constructor(config) {
10
12
  this.isInstrumented = false;
11
13
  this.pendingPromises = new Set();
12
14
  this.config = {
15
+ environment: 'production',
13
16
  endpoint: 'https://app.mynitor.ai/api/v1/events',
14
17
  ...config
15
18
  };
@@ -21,7 +24,6 @@ class MyNitor {
21
24
  process.env.NETLIFY ||
22
25
  process.env.FUNCTIONS_WORKER_RUNTIME);
23
26
  if (!isServerless && typeof process !== 'undefined' && typeof process.on === 'function') {
24
- // Local script or long-running process
25
27
  process.on('beforeExit', async () => {
26
28
  await this.flush();
27
29
  });
@@ -37,15 +39,16 @@ class MyNitor {
37
39
  return MyNitor.instance;
38
40
  }
39
41
  /**
40
- * Automatically detect and wrap AI libraries like OpenAI
42
+ * Automatically detect and wrap AI libraries: OpenAI, Anthropic, and Google Gemini
41
43
  */
42
44
  instrument() {
43
- // We do NOT check this.isInstrumented here.
44
- // We must check the actual OpenAI object every time,
45
- // because test runners may have reloaded the module.
45
+ if (this.isInstrumented)
46
+ return;
46
47
  this.wrapOpenAI();
48
+ this.wrapAnthropic();
49
+ this.wrapGemini();
47
50
  this.isInstrumented = true;
48
- console.log('🚀 MyNitor: Auto-instrumentation active.');
51
+ console.log('🚀 MyNitor: Universal Auto-instrumentation active.');
49
52
  }
50
53
  /**
51
54
  * Waits for all pending network requests to complete.
@@ -64,12 +67,8 @@ class MyNitor {
64
67
  try {
65
68
  const err = new Error();
66
69
  const stack = err.stack?.split('\n') || [];
67
- // Look for the frame that called the LLM method
68
- // Stack usually: Error -> getCallSite -> wrapOpenAI wrapper -> USER CODE
69
- // We iterate to find the first frame NOT in MyNitor SDK
70
70
  for (const line of stack) {
71
71
  if (!line.includes('mynitor') && !line.includes('Error') && line.includes('/')) {
72
- // Typical format: " at Object.myFunction (/path/to/file.ts:10:5)"
73
72
  const match = line.match(/at\s+(?:(.+?)\s+\()?(.*?):(\d+):(\d+)\)?/);
74
73
  if (match) {
75
74
  const func = match[1] || 'anonymous';
@@ -85,15 +84,11 @@ class MyNitor {
85
84
  }
86
85
  }
87
86
  }
88
- catch (e) {
89
- // fail safe
90
- }
87
+ catch (e) { }
91
88
  return { file: 'unknown', line: 0, functionName: 'unknown', workflowGuess: 'default-workflow' };
92
89
  }
93
90
  async sendEvent(payload) {
94
91
  try {
95
- // Fire and forget
96
- // Fire and forget (but track)
97
92
  const promise = fetch(this.config.endpoint, {
98
93
  method: 'POST',
99
94
  headers: {
@@ -102,6 +97,7 @@ class MyNitor {
102
97
  },
103
98
  body: JSON.stringify({
104
99
  ...payload,
100
+ environment: this.config.environment,
105
101
  event_version: '1.0',
106
102
  timestamp: new Date().toISOString()
107
103
  })
@@ -117,22 +113,22 @@ class MyNitor {
117
113
  }
118
114
  wrapOpenAI() {
119
115
  try {
120
- const OpenAI = require('openai');
116
+ let OpenAI = require('openai');
117
+ if (OpenAI && OpenAI.default)
118
+ OpenAI = OpenAI.default;
121
119
  if (!OpenAI || !OpenAI.OpenAI)
122
120
  return;
123
- // Idempotency: Check if already patched on THIS specific instance of the module
124
- // This prevents "zombie singleton" issues in test runners that reload modules
125
- if (OpenAI.OpenAI.Chat.Completions.prototype.create._isMynitorWrapped) {
121
+ const target = OpenAI.OpenAI.Chat.Completions.prototype;
122
+ if (target[WRAPPED_MARKER])
126
123
  return;
127
- }
128
124
  const self = this;
129
- const originalChatCreate = OpenAI.OpenAI.Chat.Completions.prototype.create;
130
- const wrapped = async function (...args) {
125
+ const originalCreate = target.create;
126
+ target.create = async function (...args) {
131
127
  const start = Date.now();
132
128
  const body = args[0];
133
129
  const callsite = self.getCallSite();
134
130
  try {
135
- const result = await originalChatCreate.apply(this, args);
131
+ const result = await originalCreate.apply(this, args);
136
132
  const end = Date.now();
137
133
  self.sendEvent({
138
134
  request_id: result.id || `req_${Date.now()}`,
@@ -167,9 +163,119 @@ class MyNitor {
167
163
  throw error;
168
164
  }
169
165
  };
170
- // Mark it so we don't wrap it twice on the same module instance
171
- wrapped._isMynitorWrapped = true;
172
- OpenAI.OpenAI.Chat.Completions.prototype.create = wrapped;
166
+ target[WRAPPED_MARKER] = true;
167
+ }
168
+ catch (e) { }
169
+ }
170
+ wrapAnthropic() {
171
+ try {
172
+ let Anthropic = require('@anthropic-ai/sdk');
173
+ if (Anthropic && Anthropic.default)
174
+ Anthropic = Anthropic.default;
175
+ if (!Anthropic || !Anthropic.Messages)
176
+ return;
177
+ const target = Anthropic.Messages.prototype;
178
+ if (target[WRAPPED_MARKER])
179
+ return;
180
+ const self = this;
181
+ const originalCreate = target.create;
182
+ target.create = async function (...args) {
183
+ const start = Date.now();
184
+ const body = args[0];
185
+ const callsite = self.getCallSite();
186
+ try {
187
+ const result = await originalCreate.apply(this, args);
188
+ const end = Date.now();
189
+ self.sendEvent({
190
+ request_id: result.id || `ant_${Date.now()}`,
191
+ model: result.model || body.model,
192
+ provider: 'anthropic',
193
+ agent: 'default-agent',
194
+ workflow: callsite.workflowGuess,
195
+ file: callsite.file,
196
+ function_name: callsite.functionName,
197
+ line_number: callsite.line,
198
+ input_tokens: result.usage?.input_tokens || 0,
199
+ output_tokens: result.usage?.output_tokens || 0,
200
+ latency_ms: end - start,
201
+ status: 'success'
202
+ });
203
+ return result;
204
+ }
205
+ catch (error) {
206
+ const end = Date.now();
207
+ self.sendEvent({
208
+ request_id: `err_ant_${Date.now()}`,
209
+ model: body?.model || 'unknown',
210
+ provider: 'anthropic',
211
+ agent: 'default-agent',
212
+ workflow: callsite.workflowGuess,
213
+ file: callsite.file,
214
+ function_name: callsite.functionName,
215
+ latency_ms: end - start,
216
+ status: 'error',
217
+ error_type: error?.constructor?.name || 'Error'
218
+ });
219
+ throw error;
220
+ }
221
+ };
222
+ target[WRAPPED_MARKER] = true;
223
+ }
224
+ catch (e) { }
225
+ }
226
+ wrapGemini() {
227
+ try {
228
+ let GoogleGenAI = require('@google/generative-ai');
229
+ if (GoogleGenAI && GoogleGenAI.default)
230
+ GoogleGenAI = GoogleGenAI.default;
231
+ if (!GoogleGenAI || !GoogleGenAI.GenerativeModel)
232
+ return;
233
+ const target = GoogleGenAI.GenerativeModel.prototype;
234
+ if (target[WRAPPED_MARKER])
235
+ return;
236
+ const self = this;
237
+ const originalGenerate = target.generateContent;
238
+ target.generateContent = async function (...args) {
239
+ const start = Date.now();
240
+ const callsite = self.getCallSite();
241
+ try {
242
+ const result = await originalGenerate.apply(this, args);
243
+ const end = Date.now();
244
+ const metadata = result.response?.usageMetadata;
245
+ self.sendEvent({
246
+ request_id: `gem_${Date.now()}`,
247
+ model: this.model || 'gemini',
248
+ provider: 'google',
249
+ agent: 'default-agent',
250
+ workflow: callsite.workflowGuess,
251
+ file: callsite.file,
252
+ function_name: callsite.functionName,
253
+ line_number: callsite.line,
254
+ input_tokens: metadata?.promptTokenCount || 0,
255
+ output_tokens: metadata?.candidatesTokenCount || 0,
256
+ latency_ms: end - start,
257
+ status: 'success'
258
+ });
259
+ return result;
260
+ }
261
+ catch (error) {
262
+ const end = Date.now();
263
+ self.sendEvent({
264
+ request_id: `err_gem_${Date.now()}`,
265
+ model: this.model || 'gemini',
266
+ provider: 'google',
267
+ agent: 'default-agent',
268
+ workflow: callsite.workflowGuess,
269
+ file: callsite.file,
270
+ function_name: callsite.functionName,
271
+ latency_ms: end - start,
272
+ status: 'error',
273
+ error_type: error?.constructor?.name || 'Error'
274
+ });
275
+ throw error;
276
+ }
277
+ };
278
+ target[WRAPPED_MARKER] = true;
173
279
  }
174
280
  catch (e) { }
175
281
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mynitorai/sdk",
3
- "version": "0.1.7",
3
+ "version": "0.1.9",
4
4
  "description": "Production safety and observability for AI systems.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -30,4 +30,4 @@
30
30
  "openai": "^4.0.0",
31
31
  "@types/node": "^20.0.0"
32
32
  }
33
- }
33
+ }