openlit 1.10.0 → 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -1
- package/dist/config.d.ts +12 -4
- package/dist/config.js +7 -17
- package/dist/config.js.map +1 -1
- package/dist/evals/llm/anthropic.js +10 -6
- package/dist/evals/llm/anthropic.js.map +1 -1
- package/dist/evals/llm/openai.js +9 -5
- package/dist/evals/llm/openai.js.map +1 -1
- package/dist/features/__tests__/rule-engine.test.d.ts +1 -0
- package/dist/features/__tests__/rule-engine.test.js +146 -0
- package/dist/features/__tests__/rule-engine.test.js.map +1 -0
- package/dist/features/base.d.ts +2 -0
- package/dist/features/base.js +2 -0
- package/dist/features/base.js.map +1 -1
- package/dist/features/rule-engine.d.ts +6 -0
- package/dist/features/rule-engine.js +60 -0
- package/dist/features/rule-engine.js.map +1 -0
- package/dist/features/vault.js +1 -1
- package/dist/features/vault.js.map +1 -1
- package/dist/helpers.d.ts +93 -1
- package/dist/helpers.js +270 -8
- package/dist/helpers.js.map +1 -1
- package/dist/index.d.ts +6 -5
- package/dist/index.js +95 -50
- package/dist/index.js.map +1 -1
- package/dist/instrumentation/__tests__/anthropic-wrapper.test.js +215 -27
- package/dist/instrumentation/__tests__/anthropic-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/base-wrapper.test.js +19 -23
- package/dist/instrumentation/__tests__/base-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/bedrock-trace-comparison.test.d.ts +1 -0
- package/dist/instrumentation/__tests__/bedrock-trace-comparison.test.js +422 -0
- package/dist/instrumentation/__tests__/bedrock-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/__tests__/chroma-trace-comparison.test.js +1 -1
- package/dist/instrumentation/__tests__/chroma-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/cohere-wrapper.test.js +150 -25
- package/dist/instrumentation/__tests__/cohere-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/google-ai-trace-comparison.test.js +152 -33
- package/dist/instrumentation/__tests__/google-ai-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/groq-trace-comparison.test.js +391 -45
- package/dist/instrumentation/__tests__/groq-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/huggingface-trace-comparison.test.d.ts +2 -2
- package/dist/instrumentation/__tests__/huggingface-trace-comparison.test.js +323 -31
- package/dist/instrumentation/__tests__/huggingface-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/langchain-wrapper.test.d.ts +1 -0
- package/dist/instrumentation/__tests__/langchain-wrapper.test.js +282 -0
- package/dist/instrumentation/__tests__/langchain-wrapper.test.js.map +1 -0
- package/dist/instrumentation/__tests__/milvus-trace-comparison.test.js +1 -1
- package/dist/instrumentation/__tests__/milvus-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.d.ts +0 -3
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.js +275 -68
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/openai-wrapper.test.js +7 -9
- package/dist/instrumentation/__tests__/openai-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/qdrant-trace-comparison.test.js +1 -1
- package/dist/instrumentation/__tests__/qdrant-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/replicate-trace-comparison.test.d.ts +2 -1
- package/dist/instrumentation/__tests__/replicate-trace-comparison.test.js +209 -21
- package/dist/instrumentation/__tests__/replicate-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/together-trace-comparison.test.js +231 -51
- package/dist/instrumentation/__tests__/together-trace-comparison.test.js.map +1 -1
- package/dist/instrumentation/__tests__/vercel-ai-trace-comparison.test.d.ts +8 -0
- package/dist/instrumentation/__tests__/vercel-ai-trace-comparison.test.js +446 -0
- package/dist/instrumentation/__tests__/vercel-ai-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/anthropic/index.d.ts +2 -3
- package/dist/instrumentation/anthropic/index.js.map +1 -1
- package/dist/instrumentation/anthropic/wrapper.d.ts +1 -3
- package/dist/instrumentation/anthropic/wrapper.js +211 -91
- package/dist/instrumentation/anthropic/wrapper.js.map +1 -1
- package/dist/instrumentation/azure-ai-inference/index.d.ts +11 -0
- package/dist/instrumentation/azure-ai-inference/index.js +76 -0
- package/dist/instrumentation/azure-ai-inference/index.js.map +1 -0
- package/dist/instrumentation/azure-ai-inference/wrapper.d.ts +42 -0
- package/dist/instrumentation/azure-ai-inference/wrapper.js +515 -0
- package/dist/instrumentation/azure-ai-inference/wrapper.js.map +1 -0
- package/dist/instrumentation/base-wrapper.d.ts +2 -1
- package/dist/instrumentation/base-wrapper.js +35 -23
- package/dist/instrumentation/base-wrapper.js.map +1 -1
- package/dist/instrumentation/bedrock/wrapper.d.ts +21 -3
- package/dist/instrumentation/bedrock/wrapper.js +318 -265
- package/dist/instrumentation/bedrock/wrapper.js.map +1 -1
- package/dist/instrumentation/chroma/wrapper.js +1 -1
- package/dist/instrumentation/chroma/wrapper.js.map +1 -1
- package/dist/instrumentation/claude-agent-sdk/index.d.ts +23 -0
- package/dist/instrumentation/claude-agent-sdk/index.js +83 -0
- package/dist/instrumentation/claude-agent-sdk/index.js.map +1 -0
- package/dist/instrumentation/claude-agent-sdk/wrapper.d.ts +13 -0
- package/dist/instrumentation/claude-agent-sdk/wrapper.js +1031 -0
- package/dist/instrumentation/claude-agent-sdk/wrapper.js.map +1 -0
- package/dist/instrumentation/cohere/index.d.ts +2 -3
- package/dist/instrumentation/cohere/index.js.map +1 -1
- package/dist/instrumentation/cohere/wrapper.d.ts +1 -1
- package/dist/instrumentation/cohere/wrapper.js +215 -56
- package/dist/instrumentation/cohere/wrapper.js.map +1 -1
- package/dist/instrumentation/google-adk/index.d.ts +57 -0
- package/dist/instrumentation/google-adk/index.js +371 -0
- package/dist/instrumentation/google-adk/index.js.map +1 -0
- package/dist/instrumentation/google-adk/utils.d.ts +45 -0
- package/dist/instrumentation/google-adk/utils.js +663 -0
- package/dist/instrumentation/google-adk/utils.js.map +1 -0
- package/dist/instrumentation/google-adk/wrapper.d.ts +11 -0
- package/dist/instrumentation/google-adk/wrapper.js +391 -0
- package/dist/instrumentation/google-adk/wrapper.js.map +1 -0
- package/dist/instrumentation/google-ai/wrapper.d.ts +7 -4
- package/dist/instrumentation/google-ai/wrapper.js +197 -61
- package/dist/instrumentation/google-ai/wrapper.js.map +1 -1
- package/dist/instrumentation/groq/wrapper.js +137 -65
- package/dist/instrumentation/groq/wrapper.js.map +1 -1
- package/dist/instrumentation/huggingface/wrapper.js +241 -39
- package/dist/instrumentation/huggingface/wrapper.js.map +1 -1
- package/dist/instrumentation/index.d.ts +2 -2
- package/dist/instrumentation/index.js +64 -6
- package/dist/instrumentation/index.js.map +1 -1
- package/dist/instrumentation/langchain/index.d.ts +0 -7
- package/dist/instrumentation/langchain/index.js +2 -20
- package/dist/instrumentation/langchain/index.js.map +1 -1
- package/dist/instrumentation/langchain/wrapper.d.ts +35 -0
- package/dist/instrumentation/langchain/wrapper.js +1098 -184
- package/dist/instrumentation/langchain/wrapper.js.map +1 -1
- package/dist/instrumentation/langgraph/index.d.ts +12 -0
- package/dist/instrumentation/langgraph/index.js +99 -0
- package/dist/instrumentation/langgraph/index.js.map +1 -0
- package/dist/instrumentation/langgraph/wrapper.d.ts +20 -0
- package/dist/instrumentation/langgraph/wrapper.js +619 -0
- package/dist/instrumentation/langgraph/wrapper.js.map +1 -0
- package/dist/instrumentation/llamaindex/index.d.ts +31 -6
- package/dist/instrumentation/llamaindex/index.js +180 -61
- package/dist/instrumentation/llamaindex/index.js.map +1 -1
- package/dist/instrumentation/llamaindex/wrapper.d.ts +15 -3
- package/dist/instrumentation/llamaindex/wrapper.js +670 -179
- package/dist/instrumentation/llamaindex/wrapper.js.map +1 -1
- package/dist/instrumentation/milvus/wrapper.js +1 -1
- package/dist/instrumentation/milvus/wrapper.js.map +1 -1
- package/dist/instrumentation/mistral/wrapper.js +154 -79
- package/dist/instrumentation/mistral/wrapper.js.map +1 -1
- package/dist/instrumentation/ollama/index.js +33 -4
- package/dist/instrumentation/ollama/index.js.map +1 -1
- package/dist/instrumentation/ollama/wrapper.d.ts +28 -2
- package/dist/instrumentation/ollama/wrapper.js +432 -48
- package/dist/instrumentation/ollama/wrapper.js.map +1 -1
- package/dist/instrumentation/openai/index.d.ts +2 -3
- package/dist/instrumentation/openai/index.js.map +1 -1
- package/dist/instrumentation/openai/wrapper.js +293 -194
- package/dist/instrumentation/openai/wrapper.js.map +1 -1
- package/dist/instrumentation/openai-agents/index.d.ts +20 -0
- package/dist/instrumentation/openai-agents/index.js +174 -0
- package/dist/instrumentation/openai-agents/index.js.map +1 -0
- package/dist/instrumentation/openai-agents/processor.d.ts +35 -0
- package/dist/instrumentation/openai-agents/processor.js +249 -0
- package/dist/instrumentation/openai-agents/processor.js.map +1 -0
- package/dist/instrumentation/openai-agents/utils.d.ts +20 -0
- package/dist/instrumentation/openai-agents/utils.js +624 -0
- package/dist/instrumentation/openai-agents/utils.js.map +1 -0
- package/dist/instrumentation/pinecone/wrapper.js +2 -2
- package/dist/instrumentation/pinecone/wrapper.js.map +1 -1
- package/dist/instrumentation/qdrant/wrapper.js +1 -1
- package/dist/instrumentation/qdrant/wrapper.js.map +1 -1
- package/dist/instrumentation/replicate/wrapper.js +103 -21
- package/dist/instrumentation/replicate/wrapper.js.map +1 -1
- package/dist/instrumentation/strands/index.d.ts +21 -0
- package/dist/instrumentation/strands/index.js +83 -0
- package/dist/instrumentation/strands/index.js.map +1 -0
- package/dist/instrumentation/strands/processor.d.ts +45 -0
- package/dist/instrumentation/strands/processor.js +545 -0
- package/dist/instrumentation/strands/processor.js.map +1 -0
- package/dist/instrumentation/strands/utils.d.ts +24 -0
- package/dist/instrumentation/strands/utils.js +360 -0
- package/dist/instrumentation/strands/utils.js.map +1 -0
- package/dist/instrumentation/together/wrapper.js +125 -51
- package/dist/instrumentation/together/wrapper.js.map +1 -1
- package/dist/instrumentation/vercel-ai/wrapper.d.ts +28 -2
- package/dist/instrumentation/vercel-ai/wrapper.js +314 -164
- package/dist/instrumentation/vercel-ai/wrapper.js.map +1 -1
- package/dist/llm/anthropic.js +10 -6
- package/dist/llm/anthropic.js.map +1 -1
- package/dist/llm/openai.js +9 -5
- package/dist/llm/openai.js.map +1 -1
- package/dist/otel/__tests__/metrics.test.js +16 -27
- package/dist/otel/__tests__/metrics.test.js.map +1 -1
- package/dist/otel/events.d.ts +11 -0
- package/dist/otel/events.js +74 -0
- package/dist/otel/events.js.map +1 -0
- package/dist/otel/metrics.d.ts +5 -6
- package/dist/otel/metrics.js +66 -48
- package/dist/otel/metrics.js.map +1 -1
- package/dist/otel/tracing.d.ts +6 -2
- package/dist/otel/tracing.js +71 -24
- package/dist/otel/tracing.js.map +1 -1
- package/dist/otel/utils.d.ts +11 -0
- package/dist/otel/utils.js +34 -0
- package/dist/otel/utils.js.map +1 -0
- package/dist/semantic-convention.d.ts +44 -5
- package/dist/semantic-convention.js +51 -8
- package/dist/semantic-convention.js.map +1 -1
- package/dist/types.d.ts +74 -22
- package/package.json +41 -9
|
@@ -8,106 +8,71 @@ const config_1 = __importDefault(require("../../config"));
|
|
|
8
8
|
const helpers_1 = __importDefault(require("../../helpers"));
|
|
9
9
|
const semantic_convention_1 = __importDefault(require("../../semantic-convention"));
|
|
10
10
|
const base_wrapper_1 = __importDefault(require("../base-wrapper"));
|
|
11
|
+
function spanCreationAttrs(operationName, requestModel) {
|
|
12
|
+
return {
|
|
13
|
+
[semantic_convention_1.default.GEN_AI_OPERATION]: operationName,
|
|
14
|
+
[semantic_convention_1.default.GEN_AI_PROVIDER_NAME_OTEL]: VercelAIWrapper.aiSystem,
|
|
15
|
+
[semantic_convention_1.default.GEN_AI_REQUEST_MODEL]: requestModel,
|
|
16
|
+
[semantic_convention_1.default.SERVER_ADDRESS]: VercelAIWrapper.serverAddress,
|
|
17
|
+
[semantic_convention_1.default.SERVER_PORT]: VercelAIWrapper.serverPort,
|
|
18
|
+
};
|
|
19
|
+
}
|
|
11
20
|
class VercelAIWrapper extends base_wrapper_1.default {
|
|
12
|
-
static _getProviderFromModel(model) {
|
|
13
|
-
if (!model)
|
|
14
|
-
return VercelAIWrapper.aiSystem;
|
|
15
|
-
const provider = model.provider || '';
|
|
16
|
-
if (provider.startsWith('openai'))
|
|
17
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_OPENAI;
|
|
18
|
-
if (provider.startsWith('anthropic'))
|
|
19
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_ANTHROPIC;
|
|
20
|
-
if (provider.startsWith('google'))
|
|
21
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_VERTEXAI;
|
|
22
|
-
if (provider.startsWith('mistral'))
|
|
23
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_MISTRAL;
|
|
24
|
-
if (provider.startsWith('cohere'))
|
|
25
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_COHERE;
|
|
26
|
-
if (provider.startsWith('amazon') || provider.startsWith('aws'))
|
|
27
|
-
return semantic_convention_1.default.GEN_AI_SYSTEM_AWS_BEDROCK;
|
|
28
|
-
return VercelAIWrapper.aiSystem;
|
|
29
|
-
}
|
|
30
21
|
static _patchGenerateText(tracer) {
|
|
31
|
-
const genAIEndpoint = '
|
|
22
|
+
const genAIEndpoint = 'vercel_ai.generateText';
|
|
32
23
|
return (originalMethod) => {
|
|
33
24
|
return async function (...args) {
|
|
34
|
-
const
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
span
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if (config_1.default.traceContent) {
|
|
67
|
-
const messages = params.messages || (params.prompt ? [{ role: 'user', content: params.prompt }] : []);
|
|
68
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(messages));
|
|
69
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_MESSAGES, helpers_1.default.buildOutputMessages(response.text || '', response.finishReason || 'stop', response.toolCalls));
|
|
70
|
-
}
|
|
71
|
-
metricParams = { genAIEndpoint, model: modelId, cost, aiSystem };
|
|
72
|
-
return response;
|
|
73
|
-
}
|
|
74
|
-
catch (e) {
|
|
75
|
-
helpers_1.default.handleException(span, e);
|
|
76
|
-
throw e;
|
|
77
|
-
}
|
|
78
|
-
finally {
|
|
79
|
-
span.end();
|
|
80
|
-
if (metricParams)
|
|
81
|
-
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
82
|
-
}
|
|
25
|
+
const params = args[0] || {};
|
|
26
|
+
const modelId = params.model?.modelId || 'unknown';
|
|
27
|
+
const spanName = `${semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT} ${modelId}`;
|
|
28
|
+
const span = tracer.startSpan(spanName, {
|
|
29
|
+
kind: api_1.SpanKind.CLIENT,
|
|
30
|
+
attributes: spanCreationAttrs(semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT, modelId),
|
|
31
|
+
});
|
|
32
|
+
return api_1.context
|
|
33
|
+
.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
34
|
+
return originalMethod.apply(this, args);
|
|
35
|
+
})
|
|
36
|
+
.then((response) => {
|
|
37
|
+
return VercelAIWrapper._chatComplete({
|
|
38
|
+
args,
|
|
39
|
+
genAIEndpoint,
|
|
40
|
+
response,
|
|
41
|
+
span,
|
|
42
|
+
outputType: semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_TEXT,
|
|
43
|
+
});
|
|
44
|
+
})
|
|
45
|
+
.catch((e) => {
|
|
46
|
+
helpers_1.default.handleException(span, e);
|
|
47
|
+
base_wrapper_1.default.recordMetrics(span, {
|
|
48
|
+
genAIEndpoint,
|
|
49
|
+
model: modelId,
|
|
50
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
51
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
52
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
53
|
+
errorType: e?.constructor?.name || '_OTHER',
|
|
54
|
+
});
|
|
55
|
+
span.end();
|
|
56
|
+
throw e;
|
|
83
57
|
});
|
|
84
58
|
};
|
|
85
59
|
};
|
|
86
60
|
}
|
|
87
61
|
static _patchStreamText(tracer) {
|
|
88
|
-
const genAIEndpoint = '
|
|
62
|
+
const genAIEndpoint = 'vercel_ai.streamText';
|
|
89
63
|
return (originalMethod) => {
|
|
90
64
|
return async function (...args) {
|
|
91
|
-
const
|
|
65
|
+
const params = args[0] || {};
|
|
66
|
+
const modelId = params.model?.modelId || 'unknown';
|
|
67
|
+
const spanName = `${semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT} ${modelId}`;
|
|
68
|
+
const span = tracer.startSpan(spanName, {
|
|
69
|
+
kind: api_1.SpanKind.CLIENT,
|
|
70
|
+
attributes: spanCreationAttrs(semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT, modelId),
|
|
71
|
+
});
|
|
92
72
|
const startTime = Date.now();
|
|
93
73
|
const chunkTimestamps = [];
|
|
94
74
|
try {
|
|
95
|
-
const response = await originalMethod.apply(this, args);
|
|
96
|
-
const params = args[0] || {};
|
|
97
|
-
const model = params.model;
|
|
98
|
-
const modelId = model?.modelId || 'unknown';
|
|
99
|
-
const aiSystem = VercelAIWrapper._getProviderFromModel(model);
|
|
100
|
-
// Set request attributes immediately
|
|
101
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_OPERATION, semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT);
|
|
102
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, true);
|
|
103
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_MAX_TOKENS, params.maxTokens || -1);
|
|
104
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TEMPERATURE, params.temperature ?? 1);
|
|
105
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_P, params.topP ?? 1);
|
|
106
|
-
if (config_1.default.traceContent) {
|
|
107
|
-
const messages = params.messages || (params.prompt ? [{ role: 'user', content: params.prompt }] : []);
|
|
108
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(messages));
|
|
109
|
-
}
|
|
110
|
-
// Intercept textStream to capture per-chunk timestamps for TTFT/TBT
|
|
75
|
+
const response = await api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => originalMethod.apply(this, args));
|
|
111
76
|
try {
|
|
112
77
|
const originalTextStream = response.textStream;
|
|
113
78
|
if (originalTextStream && typeof originalTextStream.getReader === 'function') {
|
|
@@ -135,11 +100,11 @@ class VercelAIWrapper extends base_wrapper_1.default {
|
|
|
135
100
|
}
|
|
136
101
|
}
|
|
137
102
|
catch (_) {
|
|
138
|
-
// Stream interception
|
|
103
|
+
// Stream interception is best-effort; TTFT/TBT won't be captured
|
|
139
104
|
}
|
|
140
|
-
// Observe stream completion via usage promise
|
|
141
105
|
Promise.resolve(response.usage)
|
|
142
106
|
.then(async (usage) => {
|
|
107
|
+
let metricParams;
|
|
143
108
|
try {
|
|
144
109
|
const ttft = chunkTimestamps.length > 0 ? (chunkTimestamps[0] - startTime) / 1000 : 0;
|
|
145
110
|
let tbt = 0;
|
|
@@ -147,42 +112,65 @@ class VercelAIWrapper extends base_wrapper_1.default {
|
|
|
147
112
|
const timeDiffs = chunkTimestamps.slice(1).map((t, i) => t - chunkTimestamps[i]);
|
|
148
113
|
tbt = timeDiffs.reduce((a, b) => a + b, 0) / timeDiffs.length / 1000;
|
|
149
114
|
}
|
|
150
|
-
const pricingInfo = await config_1.default.updatePricingJson(config_1.default.pricing_json);
|
|
151
|
-
const cost = helpers_1.default.getChatModelCost(modelId, pricingInfo, usage?.promptTokens || 0, usage?.completionTokens || 0);
|
|
152
|
-
VercelAIWrapper.setBaseSpanAttributes(span, { genAIEndpoint, model: modelId, cost, aiSystem });
|
|
153
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_MODEL, modelId);
|
|
154
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, usage?.promptTokens || 0);
|
|
155
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_OUTPUT_TOKENS, usage?.completionTokens || 0);
|
|
156
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_TOTAL_TOKENS, usage?.totalTokens || 0);
|
|
157
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_CLIENT_TOKEN_USAGE, usage?.totalTokens || 0);
|
|
158
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_TYPE, semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_TEXT);
|
|
159
|
-
if (ttft > 0)
|
|
160
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TTFT, ttft);
|
|
161
|
-
if (tbt > 0)
|
|
162
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TBT, tbt);
|
|
163
115
|
const finishReason = await Promise.resolve(response.finishReason).catch(() => 'stop');
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
116
|
+
const text = await Promise.resolve(response.text).catch(() => '');
|
|
117
|
+
const toolCalls = await Promise.resolve(response.toolCalls).catch(() => undefined);
|
|
118
|
+
const responseDetails = await Promise.resolve(response.response).catch(() => undefined);
|
|
119
|
+
const result = {
|
|
120
|
+
usage: {
|
|
121
|
+
promptTokens: usage?.promptTokens || 0,
|
|
122
|
+
completionTokens: usage?.completionTokens || 0,
|
|
123
|
+
},
|
|
124
|
+
finishReason: finishReason || 'stop',
|
|
125
|
+
text: text || '',
|
|
126
|
+
toolCalls,
|
|
127
|
+
response: responseDetails,
|
|
128
|
+
};
|
|
129
|
+
metricParams = await VercelAIWrapper._chatCommonSetter({
|
|
130
|
+
args,
|
|
131
|
+
genAIEndpoint,
|
|
132
|
+
result,
|
|
133
|
+
span,
|
|
134
|
+
isStream: true,
|
|
135
|
+
outputType: semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_TEXT,
|
|
136
|
+
ttft,
|
|
137
|
+
tbt,
|
|
138
|
+
});
|
|
170
139
|
}
|
|
171
140
|
catch (e) {
|
|
172
141
|
helpers_1.default.handleException(span, e);
|
|
173
142
|
}
|
|
174
143
|
finally {
|
|
175
144
|
span.end();
|
|
145
|
+
if (metricParams) {
|
|
146
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
147
|
+
}
|
|
176
148
|
}
|
|
177
149
|
})
|
|
178
150
|
.catch((e) => {
|
|
179
151
|
helpers_1.default.handleException(span, e);
|
|
152
|
+
base_wrapper_1.default.recordMetrics(span, {
|
|
153
|
+
genAIEndpoint,
|
|
154
|
+
model: modelId,
|
|
155
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
156
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
157
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
158
|
+
errorType: e?.constructor?.name || '_OTHER',
|
|
159
|
+
});
|
|
180
160
|
span.end();
|
|
181
161
|
});
|
|
182
162
|
return response;
|
|
183
163
|
}
|
|
184
164
|
catch (e) {
|
|
185
165
|
helpers_1.default.handleException(span, e);
|
|
166
|
+
base_wrapper_1.default.recordMetrics(span, {
|
|
167
|
+
genAIEndpoint,
|
|
168
|
+
model: modelId,
|
|
169
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
170
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
171
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
172
|
+
errorType: e?.constructor?.name || '_OTHER',
|
|
173
|
+
});
|
|
186
174
|
span.end();
|
|
187
175
|
throw e;
|
|
188
176
|
}
|
|
@@ -190,83 +178,103 @@ class VercelAIWrapper extends base_wrapper_1.default {
|
|
|
190
178
|
};
|
|
191
179
|
}
|
|
192
180
|
static _patchGenerateObject(tracer) {
|
|
193
|
-
const genAIEndpoint = '
|
|
181
|
+
const genAIEndpoint = 'vercel_ai.generateObject';
|
|
194
182
|
return (originalMethod) => {
|
|
195
183
|
return async function (...args) {
|
|
196
|
-
const
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
span
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
if (metricParams)
|
|
234
|
-
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
235
|
-
}
|
|
184
|
+
const params = args[0] || {};
|
|
185
|
+
const modelId = params.model?.modelId || 'unknown';
|
|
186
|
+
const spanName = `${semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT} ${modelId}`;
|
|
187
|
+
const span = tracer.startSpan(spanName, {
|
|
188
|
+
kind: api_1.SpanKind.CLIENT,
|
|
189
|
+
attributes: spanCreationAttrs(semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT, modelId),
|
|
190
|
+
});
|
|
191
|
+
return api_1.context
|
|
192
|
+
.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
193
|
+
return originalMethod.apply(this, args);
|
|
194
|
+
})
|
|
195
|
+
.then((response) => {
|
|
196
|
+
const result = {
|
|
197
|
+
...response,
|
|
198
|
+
text: JSON.stringify(response.object || {}),
|
|
199
|
+
};
|
|
200
|
+
return VercelAIWrapper._chatComplete({
|
|
201
|
+
args,
|
|
202
|
+
genAIEndpoint,
|
|
203
|
+
response,
|
|
204
|
+
span,
|
|
205
|
+
outputType: semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_JSON,
|
|
206
|
+
resultOverride: result,
|
|
207
|
+
});
|
|
208
|
+
})
|
|
209
|
+
.catch((e) => {
|
|
210
|
+
helpers_1.default.handleException(span, e);
|
|
211
|
+
base_wrapper_1.default.recordMetrics(span, {
|
|
212
|
+
genAIEndpoint,
|
|
213
|
+
model: modelId,
|
|
214
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
215
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
216
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
217
|
+
errorType: e?.constructor?.name || '_OTHER',
|
|
218
|
+
});
|
|
219
|
+
span.end();
|
|
220
|
+
throw e;
|
|
236
221
|
});
|
|
237
222
|
};
|
|
238
223
|
};
|
|
239
224
|
}
|
|
240
225
|
static _patchEmbed(tracer) {
|
|
241
|
-
const genAIEndpoint = '
|
|
226
|
+
const genAIEndpoint = 'vercel_ai.embed';
|
|
242
227
|
return (originalMethod) => {
|
|
243
228
|
return async function (...args) {
|
|
244
|
-
const
|
|
229
|
+
const params = args[0] || {};
|
|
230
|
+
const modelId = params.model?.modelId || 'unknown';
|
|
231
|
+
const spanName = `${semantic_convention_1.default.GEN_AI_OPERATION_TYPE_EMBEDDING} ${modelId}`;
|
|
232
|
+
const span = tracer.startSpan(spanName, {
|
|
233
|
+
kind: api_1.SpanKind.CLIENT,
|
|
234
|
+
attributes: spanCreationAttrs(semantic_convention_1.default.GEN_AI_OPERATION_TYPE_EMBEDDING, modelId),
|
|
235
|
+
});
|
|
245
236
|
return api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
237
|
+
const captureContent = config_1.default.captureMessageContent;
|
|
246
238
|
let metricParams;
|
|
247
239
|
try {
|
|
248
240
|
const response = await originalMethod.apply(this, args);
|
|
249
|
-
const
|
|
250
|
-
const
|
|
251
|
-
const
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
241
|
+
const pricingInfo = config_1.default.pricingInfo || {};
|
|
242
|
+
const inputTokens = response.usage?.tokens || 0;
|
|
243
|
+
const cost = helpers_1.default.getEmbedModelCost(modelId, pricingInfo, inputTokens);
|
|
244
|
+
VercelAIWrapper.setBaseSpanAttributes(span, {
|
|
245
|
+
genAIEndpoint,
|
|
246
|
+
model: modelId,
|
|
247
|
+
cost,
|
|
248
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
249
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
250
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
251
|
+
});
|
|
257
252
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, false);
|
|
258
|
-
span.setAttribute(semantic_convention_1.default.
|
|
259
|
-
|
|
260
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_CLIENT_TOKEN_USAGE, response.usage?.tokens || 0);
|
|
261
|
-
if (config_1.default.traceContent && params.value !== undefined) {
|
|
253
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, inputTokens);
|
|
254
|
+
if (captureContent && params.value !== undefined) {
|
|
262
255
|
const inputStr = typeof params.value === 'string' ? params.value : JSON.stringify(params.value);
|
|
263
256
|
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, inputStr);
|
|
264
257
|
}
|
|
265
|
-
metricParams = {
|
|
258
|
+
metricParams = {
|
|
259
|
+
genAIEndpoint,
|
|
260
|
+
model: modelId,
|
|
261
|
+
cost,
|
|
262
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
263
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
264
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
265
|
+
};
|
|
266
266
|
return response;
|
|
267
267
|
}
|
|
268
268
|
catch (e) {
|
|
269
269
|
helpers_1.default.handleException(span, e);
|
|
270
|
+
metricParams = {
|
|
271
|
+
genAIEndpoint,
|
|
272
|
+
model: modelId,
|
|
273
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
274
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
275
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
276
|
+
errorType: e?.constructor?.name || '_OTHER',
|
|
277
|
+
};
|
|
270
278
|
throw e;
|
|
271
279
|
}
|
|
272
280
|
finally {
|
|
@@ -278,7 +286,149 @@ class VercelAIWrapper extends base_wrapper_1.default {
|
|
|
278
286
|
};
|
|
279
287
|
};
|
|
280
288
|
}
|
|
289
|
+
static async _chatComplete({ args, genAIEndpoint, response, span, outputType, resultOverride, }) {
|
|
290
|
+
let metricParams;
|
|
291
|
+
try {
|
|
292
|
+
metricParams = await VercelAIWrapper._chatCommonSetter({
|
|
293
|
+
args,
|
|
294
|
+
genAIEndpoint,
|
|
295
|
+
result: resultOverride || response,
|
|
296
|
+
span,
|
|
297
|
+
isStream: false,
|
|
298
|
+
outputType,
|
|
299
|
+
});
|
|
300
|
+
return response;
|
|
301
|
+
}
|
|
302
|
+
catch (e) {
|
|
303
|
+
helpers_1.default.handleException(span, e);
|
|
304
|
+
throw e;
|
|
305
|
+
}
|
|
306
|
+
finally {
|
|
307
|
+
span.end();
|
|
308
|
+
if (metricParams) {
|
|
309
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
static async _chatCommonSetter({ args, genAIEndpoint, result, span, isStream, outputType, ttft = 0, tbt = 0, }) {
|
|
314
|
+
const captureContent = config_1.default.captureMessageContent;
|
|
315
|
+
const params = args[0] || {};
|
|
316
|
+
const modelId = params.model?.modelId || 'unknown';
|
|
317
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TEMPERATURE, params.temperature ?? 1);
|
|
318
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_P, params.topP ?? 1);
|
|
319
|
+
if (params.maxTokens != null) {
|
|
320
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_MAX_TOKENS, params.maxTokens);
|
|
321
|
+
}
|
|
322
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, isStream);
|
|
323
|
+
if (params.seed != null) {
|
|
324
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_SEED, Number(params.seed));
|
|
325
|
+
}
|
|
326
|
+
if (params.frequencyPenalty) {
|
|
327
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_FREQUENCY_PENALTY, params.frequencyPenalty);
|
|
328
|
+
}
|
|
329
|
+
if (params.presencePenalty) {
|
|
330
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_PRESENCE_PENALTY, params.presencePenalty);
|
|
331
|
+
}
|
|
332
|
+
if (params.stopSequences) {
|
|
333
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_STOP_SEQUENCES, Array.isArray(params.stopSequences) ? params.stopSequences : [params.stopSequences]);
|
|
334
|
+
}
|
|
335
|
+
if (params.topK != null) {
|
|
336
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_K, params.topK);
|
|
337
|
+
}
|
|
338
|
+
const messages = params.messages || (params.prompt ? [{ role: 'user', content: params.prompt }] : []);
|
|
339
|
+
if (captureContent) {
|
|
340
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(messages, params.system));
|
|
341
|
+
}
|
|
342
|
+
const responseId = result.response?.id;
|
|
343
|
+
const responseModel = result.response?.modelId || modelId;
|
|
344
|
+
const inputTokens = result.usage?.promptTokens || 0;
|
|
345
|
+
const outputTokens = result.usage?.completionTokens || 0;
|
|
346
|
+
const pricingInfo = config_1.default.pricingInfo || {};
|
|
347
|
+
const cost = helpers_1.default.getChatModelCost(modelId, pricingInfo, inputTokens, outputTokens);
|
|
348
|
+
VercelAIWrapper.setBaseSpanAttributes(span, {
|
|
349
|
+
genAIEndpoint,
|
|
350
|
+
model: modelId,
|
|
351
|
+
cost,
|
|
352
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
353
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
354
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
355
|
+
});
|
|
356
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_MODEL, responseModel);
|
|
357
|
+
if (responseId) {
|
|
358
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_ID, responseId);
|
|
359
|
+
}
|
|
360
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, inputTokens);
|
|
361
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_OUTPUT_TOKENS, outputTokens);
|
|
362
|
+
const finishReason = result.finishReason || 'stop';
|
|
363
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON, [finishReason]);
|
|
364
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_TYPE, outputType);
|
|
365
|
+
if (ttft > 0) {
|
|
366
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TTFT, ttft);
|
|
367
|
+
}
|
|
368
|
+
if (tbt > 0) {
|
|
369
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TBT, tbt);
|
|
370
|
+
}
|
|
371
|
+
if (result.toolCalls?.length > 0) {
|
|
372
|
+
const toolNames = result.toolCalls.map((t) => t.toolName || '').filter(Boolean);
|
|
373
|
+
const toolIds = result.toolCalls.map((t) => t.toolCallId || '').filter(Boolean);
|
|
374
|
+
const toolArgs = result.toolCalls.map((t) => JSON.stringify(t.args || {})).filter(Boolean);
|
|
375
|
+
if (toolNames.length > 0) {
|
|
376
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_NAME, toolNames.join(', '));
|
|
377
|
+
}
|
|
378
|
+
if (toolIds.length > 0) {
|
|
379
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_CALL_ID, toolIds.join(', '));
|
|
380
|
+
}
|
|
381
|
+
if (toolArgs.length > 0) {
|
|
382
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_ARGS, toolArgs.join(', '));
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
const normalizedToolCalls = result.toolCalls?.map((t) => ({
|
|
386
|
+
id: t.toolCallId || '',
|
|
387
|
+
name: t.toolName || '',
|
|
388
|
+
arguments: t.args || {},
|
|
389
|
+
}));
|
|
390
|
+
let inputMessagesJson;
|
|
391
|
+
let outputMessagesJson;
|
|
392
|
+
if (captureContent) {
|
|
393
|
+
outputMessagesJson = helpers_1.default.buildOutputMessages(result.text || '', finishReason, normalizedToolCalls);
|
|
394
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_MESSAGES, outputMessagesJson);
|
|
395
|
+
inputMessagesJson = helpers_1.default.buildInputMessages(messages, params.system);
|
|
396
|
+
}
|
|
397
|
+
if (!config_1.default.disableEvents) {
|
|
398
|
+
const eventAttrs = {
|
|
399
|
+
[semantic_convention_1.default.GEN_AI_OPERATION]: semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT,
|
|
400
|
+
[semantic_convention_1.default.GEN_AI_REQUEST_MODEL]: modelId,
|
|
401
|
+
[semantic_convention_1.default.GEN_AI_RESPONSE_MODEL]: responseModel,
|
|
402
|
+
[semantic_convention_1.default.SERVER_ADDRESS]: VercelAIWrapper.serverAddress,
|
|
403
|
+
[semantic_convention_1.default.SERVER_PORT]: VercelAIWrapper.serverPort,
|
|
404
|
+
[semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON]: [finishReason],
|
|
405
|
+
[semantic_convention_1.default.GEN_AI_OUTPUT_TYPE]: outputType,
|
|
406
|
+
[semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS]: inputTokens,
|
|
407
|
+
[semantic_convention_1.default.GEN_AI_USAGE_OUTPUT_TOKENS]: outputTokens,
|
|
408
|
+
};
|
|
409
|
+
if (responseId) {
|
|
410
|
+
eventAttrs[semantic_convention_1.default.GEN_AI_RESPONSE_ID] = responseId;
|
|
411
|
+
}
|
|
412
|
+
if (captureContent) {
|
|
413
|
+
if (inputMessagesJson)
|
|
414
|
+
eventAttrs[semantic_convention_1.default.GEN_AI_INPUT_MESSAGES] = inputMessagesJson;
|
|
415
|
+
if (outputMessagesJson)
|
|
416
|
+
eventAttrs[semantic_convention_1.default.GEN_AI_OUTPUT_MESSAGES] = outputMessagesJson;
|
|
417
|
+
}
|
|
418
|
+
helpers_1.default.emitInferenceEvent(span, eventAttrs);
|
|
419
|
+
}
|
|
420
|
+
return {
|
|
421
|
+
genAIEndpoint,
|
|
422
|
+
model: modelId,
|
|
423
|
+
cost,
|
|
424
|
+
aiSystem: VercelAIWrapper.aiSystem,
|
|
425
|
+
serverAddress: VercelAIWrapper.serverAddress,
|
|
426
|
+
serverPort: VercelAIWrapper.serverPort,
|
|
427
|
+
};
|
|
428
|
+
}
|
|
281
429
|
}
|
|
282
|
-
VercelAIWrapper.aiSystem =
|
|
430
|
+
VercelAIWrapper.aiSystem = semantic_convention_1.default.GEN_AI_SYSTEM_VERCEL_AI;
|
|
431
|
+
VercelAIWrapper.serverAddress = 'vercel.ai';
|
|
432
|
+
VercelAIWrapper.serverPort = 443;
|
|
283
433
|
exports.default = VercelAIWrapper;
|
|
284
434
|
//# sourceMappingURL=wrapper.js.map
|