observa-sdk 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +300 -11
- package/dist/index.cjs +283 -7
- package/dist/index.d.cts +181 -3
- package/dist/index.d.ts +181 -3
- package/dist/index.js +283 -7
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
|
@@ -63,7 +63,64 @@ declare class Observa {
|
|
|
63
63
|
userId?: string;
|
|
64
64
|
}): string;
|
|
65
65
|
/**
|
|
66
|
-
* Track
|
|
66
|
+
* Track an LLM call with full OTEL support
|
|
67
|
+
* CRITICAL: This is the primary method for tracking LLM calls with all SOTA parameters
|
|
68
|
+
*/
|
|
69
|
+
trackLLMCall(options: {
|
|
70
|
+
model: string;
|
|
71
|
+
input?: string | null;
|
|
72
|
+
output?: string | null;
|
|
73
|
+
inputTokens?: number | null;
|
|
74
|
+
outputTokens?: number | null;
|
|
75
|
+
totalTokens?: number | null;
|
|
76
|
+
latencyMs: number;
|
|
77
|
+
timeToFirstTokenMs?: number | null;
|
|
78
|
+
streamingDurationMs?: number | null;
|
|
79
|
+
finishReason?: string | null;
|
|
80
|
+
responseId?: string | null;
|
|
81
|
+
systemFingerprint?: string | null;
|
|
82
|
+
cost?: number | null;
|
|
83
|
+
temperature?: number | null;
|
|
84
|
+
maxTokens?: number | null;
|
|
85
|
+
operationName?: "chat" | "text_completion" | "generate_content" | string | null;
|
|
86
|
+
providerName?: string | null;
|
|
87
|
+
responseModel?: string | null;
|
|
88
|
+
topK?: number | null;
|
|
89
|
+
topP?: number | null;
|
|
90
|
+
frequencyPenalty?: number | null;
|
|
91
|
+
presencePenalty?: number | null;
|
|
92
|
+
stopSequences?: string[] | null;
|
|
93
|
+
seed?: number | null;
|
|
94
|
+
inputCost?: number | null;
|
|
95
|
+
outputCost?: number | null;
|
|
96
|
+
inputMessages?: Array<{
|
|
97
|
+
role: string;
|
|
98
|
+
content?: string | any;
|
|
99
|
+
parts?: Array<{
|
|
100
|
+
type: string;
|
|
101
|
+
content: any;
|
|
102
|
+
}>;
|
|
103
|
+
}> | null;
|
|
104
|
+
outputMessages?: Array<{
|
|
105
|
+
role: string;
|
|
106
|
+
content?: string | any;
|
|
107
|
+
parts?: Array<{
|
|
108
|
+
type: string;
|
|
109
|
+
content: any;
|
|
110
|
+
}>;
|
|
111
|
+
finish_reason?: string;
|
|
112
|
+
}> | null;
|
|
113
|
+
systemInstructions?: Array<{
|
|
114
|
+
type: string;
|
|
115
|
+
content: string | any;
|
|
116
|
+
}> | null;
|
|
117
|
+
serverAddress?: string | null;
|
|
118
|
+
serverPort?: number | null;
|
|
119
|
+
conversationIdOtel?: string | null;
|
|
120
|
+
choiceCount?: number | null;
|
|
121
|
+
}): string;
|
|
122
|
+
/**
|
|
123
|
+
* Track a tool call with OTEL standardization
|
|
67
124
|
*/
|
|
68
125
|
trackToolCall(options: {
|
|
69
126
|
toolName: string;
|
|
@@ -72,9 +129,15 @@ declare class Observa {
|
|
|
72
129
|
resultStatus: "success" | "error" | "timeout";
|
|
73
130
|
latencyMs: number;
|
|
74
131
|
errorMessage?: string;
|
|
132
|
+
operationName?: "execute_tool" | string | null;
|
|
133
|
+
toolType?: "function" | "extension" | "datastore" | string | null;
|
|
134
|
+
toolDescription?: string | null;
|
|
135
|
+
toolCallId?: string | null;
|
|
136
|
+
errorType?: string | null;
|
|
137
|
+
errorCategory?: string | null;
|
|
75
138
|
}): string;
|
|
76
139
|
/**
|
|
77
|
-
* Track a retrieval operation
|
|
140
|
+
* Track a retrieval operation with vector metadata enrichment
|
|
78
141
|
*/
|
|
79
142
|
trackRetrieval(options: {
|
|
80
143
|
contextIds?: string[];
|
|
@@ -82,9 +145,17 @@ declare class Observa {
|
|
|
82
145
|
k?: number;
|
|
83
146
|
similarityScores?: number[];
|
|
84
147
|
latencyMs: number;
|
|
148
|
+
retrievalContext?: string | null;
|
|
149
|
+
embeddingModel?: string | null;
|
|
150
|
+
embeddingDimensions?: number | null;
|
|
151
|
+
vectorMetric?: "cosine" | "euclidean" | "dot_product" | string | null;
|
|
152
|
+
rerankScore?: number | null;
|
|
153
|
+
fusionMethod?: string | null;
|
|
154
|
+
deduplicationRemovedCount?: number | null;
|
|
155
|
+
qualityScore?: number | null;
|
|
85
156
|
}): string;
|
|
86
157
|
/**
|
|
87
|
-
* Track an error with
|
|
158
|
+
* Track an error with structured error classification
|
|
88
159
|
*/
|
|
89
160
|
trackError(options: {
|
|
90
161
|
errorType: string;
|
|
@@ -92,6 +163,8 @@ declare class Observa {
|
|
|
92
163
|
stackTrace?: string;
|
|
93
164
|
context?: Record<string, any>;
|
|
94
165
|
error?: Error;
|
|
166
|
+
errorCategory?: string | null;
|
|
167
|
+
errorCode?: string | null;
|
|
95
168
|
}): string;
|
|
96
169
|
/**
|
|
97
170
|
* Track user feedback
|
|
@@ -119,6 +192,63 @@ declare class Observa {
|
|
|
119
192
|
finalOutput?: string;
|
|
120
193
|
outputLength?: number;
|
|
121
194
|
}): string;
|
|
195
|
+
/**
|
|
196
|
+
* Track an embedding operation (TIER 1: Critical)
|
|
197
|
+
*/
|
|
198
|
+
trackEmbedding(options: {
|
|
199
|
+
model: string;
|
|
200
|
+
dimensionCount?: number | null;
|
|
201
|
+
encodingFormats?: string[] | null;
|
|
202
|
+
inputTokens?: number | null;
|
|
203
|
+
outputTokens?: number | null;
|
|
204
|
+
latencyMs: number;
|
|
205
|
+
cost?: number | null;
|
|
206
|
+
inputText?: string | null;
|
|
207
|
+
inputHash?: string | null;
|
|
208
|
+
embeddings?: number[][] | null;
|
|
209
|
+
embeddingsHash?: string | null;
|
|
210
|
+
operationName?: "embeddings" | string | null;
|
|
211
|
+
providerName?: string | null;
|
|
212
|
+
}): string;
|
|
213
|
+
/**
|
|
214
|
+
* Track a vector database operation (TIER 3)
|
|
215
|
+
*/
|
|
216
|
+
trackVectorDbOperation(options: {
|
|
217
|
+
operationType: "vector_search" | "index_upsert" | "delete" | string;
|
|
218
|
+
indexName?: string | null;
|
|
219
|
+
indexVersion?: string | null;
|
|
220
|
+
vectorDimensions?: number | null;
|
|
221
|
+
vectorMetric?: "cosine" | "euclidean" | "dot_product" | string | null;
|
|
222
|
+
resultsCount?: number | null;
|
|
223
|
+
scores?: number[] | null;
|
|
224
|
+
latencyMs: number;
|
|
225
|
+
cost?: number | null;
|
|
226
|
+
apiVersion?: string | null;
|
|
227
|
+
providerName?: string | null;
|
|
228
|
+
}): string;
|
|
229
|
+
/**
|
|
230
|
+
* Track a cache operation (TIER 3)
|
|
231
|
+
*/
|
|
232
|
+
trackCacheOperation(options: {
|
|
233
|
+
cacheBackend?: "redis" | "in_memory" | "memcached" | string | null;
|
|
234
|
+
cacheKey?: string | null;
|
|
235
|
+
cacheNamespace?: string | null;
|
|
236
|
+
hitStatus: "hit" | "miss";
|
|
237
|
+
latencyMs: number;
|
|
238
|
+
savedCost?: number | null;
|
|
239
|
+
ttl?: number | null;
|
|
240
|
+
evictionInfo?: Record<string, any> | null;
|
|
241
|
+
}): string;
|
|
242
|
+
/**
|
|
243
|
+
* Track agent creation (TIER 3)
|
|
244
|
+
*/
|
|
245
|
+
trackAgentCreate(options: {
|
|
246
|
+
agentName: string;
|
|
247
|
+
agentConfig?: Record<string, any> | null;
|
|
248
|
+
toolsBound?: string[] | null;
|
|
249
|
+
modelConfig?: Record<string, any> | null;
|
|
250
|
+
operationName?: "create_agent" | string | null;
|
|
251
|
+
}): string;
|
|
122
252
|
/**
|
|
123
253
|
* Execute a function within a span context (for nested operations)
|
|
124
254
|
* This allows tool calls to be nested under LLM calls, etc.
|
|
@@ -150,6 +280,54 @@ declare class Observa {
|
|
|
150
280
|
* Cleanup (call when shutting down)
|
|
151
281
|
*/
|
|
152
282
|
end(): Promise<void>;
|
|
283
|
+
/**
|
|
284
|
+
* Observe OpenAI client - wraps client with automatic tracing
|
|
285
|
+
*
|
|
286
|
+
* @param client - OpenAI client instance
|
|
287
|
+
* @param options - Observation options (name, tags, userId, sessionId, redact)
|
|
288
|
+
* @returns Wrapped OpenAI client
|
|
289
|
+
*
|
|
290
|
+
* @example
|
|
291
|
+
* ```typescript
|
|
292
|
+
* import OpenAI from 'openai';
|
|
293
|
+
* const openai = new OpenAI({ apiKey: '...' });
|
|
294
|
+
* const wrapped = observa.observeOpenAI(openai, {
|
|
295
|
+
* name: 'my-app',
|
|
296
|
+
* redact: (data) => ({ ...data, messages: '[REDACTED]' })
|
|
297
|
+
* });
|
|
298
|
+
* ```
|
|
299
|
+
*/
|
|
300
|
+
observeOpenAI(client: any, options?: {
|
|
301
|
+
name?: string;
|
|
302
|
+
tags?: string[];
|
|
303
|
+
userId?: string;
|
|
304
|
+
sessionId?: string;
|
|
305
|
+
redact?: (data: any) => any;
|
|
306
|
+
}): any;
|
|
307
|
+
/**
|
|
308
|
+
* Observe Anthropic client - wraps client with automatic tracing
|
|
309
|
+
*
|
|
310
|
+
* @param client - Anthropic client instance
|
|
311
|
+
* @param options - Observation options (name, tags, userId, sessionId, redact)
|
|
312
|
+
* @returns Wrapped Anthropic client
|
|
313
|
+
*
|
|
314
|
+
* @example
|
|
315
|
+
* ```typescript
|
|
316
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
317
|
+
* const anthropic = new Anthropic({ apiKey: '...' });
|
|
318
|
+
* const wrapped = observa.observeAnthropic(anthropic, {
|
|
319
|
+
* name: 'my-app',
|
|
320
|
+
* redact: (data) => ({ ...data, messages: '[REDACTED]' })
|
|
321
|
+
* });
|
|
322
|
+
* ```
|
|
323
|
+
*/
|
|
324
|
+
observeAnthropic(client: any, options?: {
|
|
325
|
+
name?: string;
|
|
326
|
+
tags?: string[];
|
|
327
|
+
userId?: string;
|
|
328
|
+
sessionId?: string;
|
|
329
|
+
redact?: (data: any) => any;
|
|
330
|
+
}): any;
|
|
153
331
|
track(event: TrackEventInput, action: () => Promise<Response>, options?: {
|
|
154
332
|
trackBlocking?: boolean;
|
|
155
333
|
}): Promise<any>;
|
package/dist/index.js
CHANGED
|
@@ -383,7 +383,78 @@ var Observa = class {
|
|
|
383
383
|
return this.currentTraceId;
|
|
384
384
|
}
|
|
385
385
|
/**
|
|
386
|
-
* Track
|
|
386
|
+
* Track an LLM call with full OTEL support
|
|
387
|
+
* CRITICAL: This is the primary method for tracking LLM calls with all SOTA parameters
|
|
388
|
+
*/
|
|
389
|
+
trackLLMCall(options) {
|
|
390
|
+
const spanId = crypto.randomUUID();
|
|
391
|
+
let providerName = options.providerName;
|
|
392
|
+
if (!providerName && options.model) {
|
|
393
|
+
const modelLower = options.model.toLowerCase();
|
|
394
|
+
if (modelLower.includes("gpt") || modelLower.includes("openai")) {
|
|
395
|
+
providerName = "openai";
|
|
396
|
+
} else if (modelLower.includes("claude") || modelLower.includes("anthropic")) {
|
|
397
|
+
providerName = "anthropic";
|
|
398
|
+
} else if (modelLower.includes("gemini") || modelLower.includes("google")) {
|
|
399
|
+
providerName = "google";
|
|
400
|
+
} else if (modelLower.includes("vertex")) {
|
|
401
|
+
providerName = "gcp.vertex_ai";
|
|
402
|
+
} else if (modelLower.includes("bedrock") || modelLower.includes("aws")) {
|
|
403
|
+
providerName = "aws.bedrock";
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
const operationName = options.operationName || "chat";
|
|
407
|
+
this.addEvent({
|
|
408
|
+
event_type: "llm_call",
|
|
409
|
+
span_id: spanId,
|
|
410
|
+
attributes: {
|
|
411
|
+
llm_call: {
|
|
412
|
+
model: options.model,
|
|
413
|
+
input: options.input || null,
|
|
414
|
+
output: options.output || null,
|
|
415
|
+
input_tokens: options.inputTokens || null,
|
|
416
|
+
output_tokens: options.outputTokens || null,
|
|
417
|
+
total_tokens: options.totalTokens || null,
|
|
418
|
+
latency_ms: options.latencyMs,
|
|
419
|
+
time_to_first_token_ms: options.timeToFirstTokenMs || null,
|
|
420
|
+
streaming_duration_ms: options.streamingDurationMs || null,
|
|
421
|
+
finish_reason: options.finishReason || null,
|
|
422
|
+
response_id: options.responseId || null,
|
|
423
|
+
system_fingerprint: options.systemFingerprint || null,
|
|
424
|
+
cost: options.cost || null,
|
|
425
|
+
temperature: options.temperature || null,
|
|
426
|
+
max_tokens: options.maxTokens || null,
|
|
427
|
+
// TIER 1: OTEL Semantic Conventions
|
|
428
|
+
operation_name: operationName,
|
|
429
|
+
provider_name: providerName || null,
|
|
430
|
+
response_model: options.responseModel || null,
|
|
431
|
+
// TIER 2: Sampling parameters
|
|
432
|
+
top_k: options.topK || null,
|
|
433
|
+
top_p: options.topP || null,
|
|
434
|
+
frequency_penalty: options.frequencyPenalty || null,
|
|
435
|
+
presence_penalty: options.presencePenalty || null,
|
|
436
|
+
stop_sequences: options.stopSequences || null,
|
|
437
|
+
seed: options.seed || null,
|
|
438
|
+
// TIER 2: Structured cost tracking
|
|
439
|
+
input_cost: options.inputCost || null,
|
|
440
|
+
output_cost: options.outputCost || null,
|
|
441
|
+
// TIER 1: Structured message objects
|
|
442
|
+
input_messages: options.inputMessages || null,
|
|
443
|
+
output_messages: options.outputMessages || null,
|
|
444
|
+
system_instructions: options.systemInstructions || null,
|
|
445
|
+
// TIER 2: Server metadata
|
|
446
|
+
server_address: options.serverAddress || null,
|
|
447
|
+
server_port: options.serverPort || null,
|
|
448
|
+
// TIER 2: Conversation grouping
|
|
449
|
+
conversation_id_otel: options.conversationIdOtel || null,
|
|
450
|
+
choice_count: options.choiceCount || null
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
});
|
|
454
|
+
return spanId;
|
|
455
|
+
}
|
|
456
|
+
/**
|
|
457
|
+
* Track a tool call with OTEL standardization
|
|
387
458
|
*/
|
|
388
459
|
trackToolCall(options) {
|
|
389
460
|
const spanId = crypto.randomUUID();
|
|
@@ -397,14 +468,21 @@ var Observa = class {
|
|
|
397
468
|
result: options.result || null,
|
|
398
469
|
result_status: options.resultStatus,
|
|
399
470
|
latency_ms: options.latencyMs,
|
|
400
|
-
error_message: options.errorMessage || null
|
|
471
|
+
error_message: options.errorMessage || null,
|
|
472
|
+
// TIER 2: OTEL Tool Standardization
|
|
473
|
+
operation_name: options.operationName || "execute_tool",
|
|
474
|
+
tool_type: options.toolType || null,
|
|
475
|
+
tool_description: options.toolDescription || null,
|
|
476
|
+
tool_call_id: options.toolCallId || null,
|
|
477
|
+
error_type: options.errorType || null,
|
|
478
|
+
error_category: options.errorCategory || null
|
|
401
479
|
}
|
|
402
480
|
}
|
|
403
481
|
});
|
|
404
482
|
return spanId;
|
|
405
483
|
}
|
|
406
484
|
/**
|
|
407
|
-
* Track a retrieval operation
|
|
485
|
+
* Track a retrieval operation with vector metadata enrichment
|
|
408
486
|
*/
|
|
409
487
|
trackRetrieval(options) {
|
|
410
488
|
const spanId = crypto.randomUUID();
|
|
@@ -418,14 +496,23 @@ var Observa = class {
|
|
|
418
496
|
k: options.k || null,
|
|
419
497
|
top_k: options.k || null,
|
|
420
498
|
similarity_scores: options.similarityScores || null,
|
|
421
|
-
latency_ms: options.latencyMs
|
|
499
|
+
latency_ms: options.latencyMs,
|
|
500
|
+
// TIER 2: Retrieval enrichment
|
|
501
|
+
retrieval_context: options.retrievalContext || null,
|
|
502
|
+
embedding_model: options.embeddingModel || null,
|
|
503
|
+
embedding_dimensions: options.embeddingDimensions || null,
|
|
504
|
+
vector_metric: options.vectorMetric || null,
|
|
505
|
+
rerank_score: options.rerankScore || null,
|
|
506
|
+
fusion_method: options.fusionMethod || null,
|
|
507
|
+
deduplication_removed_count: options.deduplicationRemovedCount || null,
|
|
508
|
+
quality_score: options.qualityScore || null
|
|
422
509
|
}
|
|
423
510
|
}
|
|
424
511
|
});
|
|
425
512
|
return spanId;
|
|
426
513
|
}
|
|
427
514
|
/**
|
|
428
|
-
* Track an error with
|
|
515
|
+
* Track an error with structured error classification
|
|
429
516
|
*/
|
|
430
517
|
trackError(options) {
|
|
431
518
|
const spanId = crypto.randomUUID();
|
|
@@ -441,7 +528,10 @@ var Observa = class {
|
|
|
441
528
|
error_type: options.errorType,
|
|
442
529
|
error_message: options.errorMessage,
|
|
443
530
|
stack_trace: stackTrace || null,
|
|
444
|
-
context: options.context || null
|
|
531
|
+
context: options.context || null,
|
|
532
|
+
// TIER 2: Structured error classification
|
|
533
|
+
error_category: options.errorCategory || null,
|
|
534
|
+
error_code: options.errorCode || null
|
|
445
535
|
}
|
|
446
536
|
}
|
|
447
537
|
});
|
|
@@ -495,6 +585,114 @@ var Observa = class {
|
|
|
495
585
|
});
|
|
496
586
|
return spanId;
|
|
497
587
|
}
|
|
588
|
+
/**
|
|
589
|
+
* Track an embedding operation (TIER 1: Critical)
|
|
590
|
+
*/
|
|
591
|
+
trackEmbedding(options) {
|
|
592
|
+
const spanId = crypto.randomUUID();
|
|
593
|
+
let providerName = options.providerName;
|
|
594
|
+
if (!providerName && options.model) {
|
|
595
|
+
const modelLower = options.model.toLowerCase();
|
|
596
|
+
if (modelLower.includes("text-embedding") || modelLower.includes("openai")) {
|
|
597
|
+
providerName = "openai";
|
|
598
|
+
} else if (modelLower.includes("textembedding") || modelLower.includes("google")) {
|
|
599
|
+
providerName = "google";
|
|
600
|
+
} else if (modelLower.includes("vertex")) {
|
|
601
|
+
providerName = "gcp.vertex_ai";
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
this.addEvent({
|
|
605
|
+
event_type: "embedding",
|
|
606
|
+
span_id: spanId,
|
|
607
|
+
attributes: {
|
|
608
|
+
embedding: {
|
|
609
|
+
model: options.model,
|
|
610
|
+
dimension_count: options.dimensionCount || null,
|
|
611
|
+
encoding_formats: options.encodingFormats || null,
|
|
612
|
+
input_tokens: options.inputTokens || null,
|
|
613
|
+
output_tokens: options.outputTokens || null,
|
|
614
|
+
latency_ms: options.latencyMs,
|
|
615
|
+
cost: options.cost || null,
|
|
616
|
+
input_text: options.inputText || null,
|
|
617
|
+
input_hash: options.inputHash || null,
|
|
618
|
+
embeddings: options.embeddings || null,
|
|
619
|
+
embeddings_hash: options.embeddingsHash || null,
|
|
620
|
+
operation_name: options.operationName || "embeddings",
|
|
621
|
+
provider_name: providerName || null
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
});
|
|
625
|
+
return spanId;
|
|
626
|
+
}
|
|
627
|
+
/**
|
|
628
|
+
* Track a vector database operation (TIER 3)
|
|
629
|
+
*/
|
|
630
|
+
trackVectorDbOperation(options) {
|
|
631
|
+
const spanId = crypto.randomUUID();
|
|
632
|
+
this.addEvent({
|
|
633
|
+
event_type: "vector_db_operation",
|
|
634
|
+
span_id: spanId,
|
|
635
|
+
attributes: {
|
|
636
|
+
vector_db_operation: {
|
|
637
|
+
operation_type: options.operationType,
|
|
638
|
+
index_name: options.indexName || null,
|
|
639
|
+
index_version: options.indexVersion || null,
|
|
640
|
+
vector_dimensions: options.vectorDimensions || null,
|
|
641
|
+
vector_metric: options.vectorMetric || null,
|
|
642
|
+
results_count: options.resultsCount || null,
|
|
643
|
+
scores: options.scores || null,
|
|
644
|
+
latency_ms: options.latencyMs,
|
|
645
|
+
cost: options.cost || null,
|
|
646
|
+
api_version: options.apiVersion || null,
|
|
647
|
+
provider_name: options.providerName || null
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
});
|
|
651
|
+
return spanId;
|
|
652
|
+
}
|
|
653
|
+
/**
|
|
654
|
+
* Track a cache operation (TIER 3)
|
|
655
|
+
*/
|
|
656
|
+
trackCacheOperation(options) {
|
|
657
|
+
const spanId = crypto.randomUUID();
|
|
658
|
+
this.addEvent({
|
|
659
|
+
event_type: "cache_operation",
|
|
660
|
+
span_id: spanId,
|
|
661
|
+
attributes: {
|
|
662
|
+
cache_operation: {
|
|
663
|
+
cache_backend: options.cacheBackend || null,
|
|
664
|
+
cache_key: options.cacheKey || null,
|
|
665
|
+
cache_namespace: options.cacheNamespace || null,
|
|
666
|
+
hit_status: options.hitStatus,
|
|
667
|
+
latency_ms: options.latencyMs,
|
|
668
|
+
saved_cost: options.savedCost || null,
|
|
669
|
+
ttl: options.ttl || null,
|
|
670
|
+
eviction_info: options.evictionInfo || null
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
});
|
|
674
|
+
return spanId;
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Track agent creation (TIER 3)
|
|
678
|
+
*/
|
|
679
|
+
trackAgentCreate(options) {
|
|
680
|
+
const spanId = crypto.randomUUID();
|
|
681
|
+
this.addEvent({
|
|
682
|
+
event_type: "agent_create",
|
|
683
|
+
span_id: spanId,
|
|
684
|
+
attributes: {
|
|
685
|
+
agent_create: {
|
|
686
|
+
agent_name: options.agentName,
|
|
687
|
+
agent_config: options.agentConfig || null,
|
|
688
|
+
tools_bound: options.toolsBound || null,
|
|
689
|
+
model_config: options.modelConfig || null,
|
|
690
|
+
operation_name: options.operationName || "create_agent"
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
});
|
|
694
|
+
return spanId;
|
|
695
|
+
}
|
|
498
696
|
/**
|
|
499
697
|
* Execute a function within a span context (for nested operations)
|
|
500
698
|
* This allows tool calls to be nested under LLM calls, etc.
|
|
@@ -595,6 +793,21 @@ var Observa = class {
|
|
|
595
793
|
});
|
|
596
794
|
if (trace.model) {
|
|
597
795
|
const llmSpanId = crypto.randomUUID();
|
|
796
|
+
let providerName = null;
|
|
797
|
+
if (trace.model) {
|
|
798
|
+
const modelLower = trace.model.toLowerCase();
|
|
799
|
+
if (modelLower.includes("gpt") || modelLower.includes("openai")) {
|
|
800
|
+
providerName = "openai";
|
|
801
|
+
} else if (modelLower.includes("claude") || modelLower.includes("anthropic")) {
|
|
802
|
+
providerName = "anthropic";
|
|
803
|
+
} else if (modelLower.includes("gemini") || modelLower.includes("google")) {
|
|
804
|
+
providerName = "google";
|
|
805
|
+
} else if (modelLower.includes("vertex")) {
|
|
806
|
+
providerName = "gcp.vertex_ai";
|
|
807
|
+
} else if (modelLower.includes("bedrock") || modelLower.includes("aws")) {
|
|
808
|
+
providerName = "aws.bedrock";
|
|
809
|
+
}
|
|
810
|
+
}
|
|
598
811
|
events.push({
|
|
599
812
|
...baseEvent,
|
|
600
813
|
span_id: llmSpanId,
|
|
@@ -615,8 +828,13 @@ var Observa = class {
|
|
|
615
828
|
finish_reason: trace.finishReason || null,
|
|
616
829
|
response_id: trace.responseId || null,
|
|
617
830
|
system_fingerprint: trace.systemFingerprint || null,
|
|
618
|
-
cost: null
|
|
831
|
+
cost: null,
|
|
619
832
|
// Cost calculation handled by backend
|
|
833
|
+
// TIER 1: OTEL Semantic Conventions (auto-inferred)
|
|
834
|
+
operation_name: "chat",
|
|
835
|
+
// Default for legacy track() method
|
|
836
|
+
provider_name: providerName
|
|
837
|
+
// Other OTEL fields can be added via trackLLMCall() method
|
|
620
838
|
}
|
|
621
839
|
}
|
|
622
840
|
});
|
|
@@ -708,6 +926,64 @@ var Observa = class {
|
|
|
708
926
|
}
|
|
709
927
|
await this.flush();
|
|
710
928
|
}
|
|
929
|
+
/**
|
|
930
|
+
* Observe OpenAI client - wraps client with automatic tracing
|
|
931
|
+
*
|
|
932
|
+
* @param client - OpenAI client instance
|
|
933
|
+
* @param options - Observation options (name, tags, userId, sessionId, redact)
|
|
934
|
+
* @returns Wrapped OpenAI client
|
|
935
|
+
*
|
|
936
|
+
* @example
|
|
937
|
+
* ```typescript
|
|
938
|
+
* import OpenAI from 'openai';
|
|
939
|
+
* const openai = new OpenAI({ apiKey: '...' });
|
|
940
|
+
* const wrapped = observa.observeOpenAI(openai, {
|
|
941
|
+
* name: 'my-app',
|
|
942
|
+
* redact: (data) => ({ ...data, messages: '[REDACTED]' })
|
|
943
|
+
* });
|
|
944
|
+
* ```
|
|
945
|
+
*/
|
|
946
|
+
observeOpenAI(client, options) {
|
|
947
|
+
try {
|
|
948
|
+
const requireFn = globalThis.require || ((module) => {
|
|
949
|
+
throw new Error("require is not available");
|
|
950
|
+
});
|
|
951
|
+
const { observeOpenAI: observeOpenAIFn } = requireFn("./instrumentation/openai");
|
|
952
|
+
return observeOpenAIFn(client, { ...options, observa: this });
|
|
953
|
+
} catch (error) {
|
|
954
|
+
console.error("[Observa] Failed to load OpenAI wrapper:", error);
|
|
955
|
+
return client;
|
|
956
|
+
}
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* Observe Anthropic client - wraps client with automatic tracing
|
|
960
|
+
*
|
|
961
|
+
* @param client - Anthropic client instance
|
|
962
|
+
* @param options - Observation options (name, tags, userId, sessionId, redact)
|
|
963
|
+
* @returns Wrapped Anthropic client
|
|
964
|
+
*
|
|
965
|
+
* @example
|
|
966
|
+
* ```typescript
|
|
967
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
968
|
+
* const anthropic = new Anthropic({ apiKey: '...' });
|
|
969
|
+
* const wrapped = observa.observeAnthropic(anthropic, {
|
|
970
|
+
* name: 'my-app',
|
|
971
|
+
* redact: (data) => ({ ...data, messages: '[REDACTED]' })
|
|
972
|
+
* });
|
|
973
|
+
* ```
|
|
974
|
+
*/
|
|
975
|
+
observeAnthropic(client, options) {
|
|
976
|
+
try {
|
|
977
|
+
const requireFn = globalThis.require || ((module) => {
|
|
978
|
+
throw new Error("require is not available");
|
|
979
|
+
});
|
|
980
|
+
const { observeAnthropic: observeAnthropicFn } = requireFn("./instrumentation/anthropic");
|
|
981
|
+
return observeAnthropicFn(client, { ...options, observa: this });
|
|
982
|
+
} catch (error) {
|
|
983
|
+
console.error("[Observa] Failed to load Anthropic wrapper:", error);
|
|
984
|
+
return client;
|
|
985
|
+
}
|
|
986
|
+
}
|
|
711
987
|
async track(event, action, options) {
|
|
712
988
|
if (this.sampleRate < 1 && Math.random() > this.sampleRate) {
|
|
713
989
|
return action();
|
package/package.json
CHANGED