openlit 1.7.1 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -10
- package/dist/helpers.d.ts +10 -0
- package/dist/helpers.js +106 -3
- package/dist/helpers.js.map +1 -1
- package/dist/instrumentation/__tests__/anthropic-wrapper.test.js +1 -1
- package/dist/instrumentation/__tests__/anthropic-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/base-wrapper.test.js +1 -1
- package/dist/instrumentation/__tests__/base-wrapper.test.js.map +1 -1
- package/dist/instrumentation/__tests__/google-ai-trace-comparison.test.d.ts +4 -0
- package/dist/instrumentation/__tests__/google-ai-trace-comparison.test.js +99 -0
- package/dist/instrumentation/__tests__/google-ai-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/__tests__/groq-trace-comparison.test.d.ts +7 -0
- package/dist/instrumentation/__tests__/groq-trace-comparison.test.js +180 -0
- package/dist/instrumentation/__tests__/groq-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.d.ts +4 -0
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.js +127 -0
- package/dist/instrumentation/__tests__/mistral-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/__tests__/together-trace-comparison.test.d.ts +4 -0
- package/dist/instrumentation/__tests__/together-trace-comparison.test.js +98 -0
- package/dist/instrumentation/__tests__/together-trace-comparison.test.js.map +1 -0
- package/dist/instrumentation/__tests__/trace-comparison-utils.d.ts +66 -0
- package/dist/instrumentation/__tests__/trace-comparison-utils.js +245 -0
- package/dist/instrumentation/__tests__/trace-comparison-utils.js.map +1 -0
- package/dist/instrumentation/anthropic/index.js +6 -4
- package/dist/instrumentation/anthropic/index.js.map +1 -1
- package/dist/instrumentation/anthropic/wrapper.js +12 -30
- package/dist/instrumentation/anthropic/wrapper.js.map +1 -1
- package/dist/instrumentation/base-wrapper.js +2 -2
- package/dist/instrumentation/base-wrapper.js.map +1 -1
- package/dist/instrumentation/bedrock/index.d.ts +11 -0
- package/dist/instrumentation/bedrock/index.js +52 -0
- package/dist/instrumentation/bedrock/index.js.map +1 -0
- package/dist/instrumentation/bedrock/wrapper.d.ts +10 -0
- package/dist/instrumentation/bedrock/wrapper.js +345 -0
- package/dist/instrumentation/bedrock/wrapper.js.map +1 -0
- package/dist/instrumentation/cohere/wrapper.js +8 -10
- package/dist/instrumentation/cohere/wrapper.js.map +1 -1
- package/dist/instrumentation/google-ai/index.d.ts +11 -0
- package/dist/instrumentation/google-ai/index.js +48 -0
- package/dist/instrumentation/google-ai/index.js.map +1 -0
- package/dist/instrumentation/google-ai/wrapper.d.ts +34 -0
- package/dist/instrumentation/google-ai/wrapper.js +241 -0
- package/dist/instrumentation/google-ai/wrapper.js.map +1 -0
- package/dist/instrumentation/groq/index.d.ts +11 -0
- package/dist/instrumentation/groq/index.js +43 -0
- package/dist/instrumentation/groq/index.js.map +1 -0
- package/dist/instrumentation/groq/wrapper.d.ts +33 -0
- package/dist/instrumentation/groq/wrapper.js +289 -0
- package/dist/instrumentation/groq/wrapper.js.map +1 -0
- package/dist/instrumentation/index.js +20 -0
- package/dist/instrumentation/index.js.map +1 -1
- package/dist/instrumentation/langchain/index.d.ts +23 -0
- package/dist/instrumentation/langchain/index.js +109 -0
- package/dist/instrumentation/langchain/index.js.map +1 -0
- package/dist/instrumentation/langchain/wrapper.d.ts +6 -0
- package/dist/instrumentation/langchain/wrapper.js +318 -0
- package/dist/instrumentation/langchain/wrapper.js.map +1 -0
- package/dist/instrumentation/llamaindex/index.d.ts +11 -0
- package/dist/instrumentation/llamaindex/index.js +102 -0
- package/dist/instrumentation/llamaindex/index.js.map +1 -0
- package/dist/instrumentation/llamaindex/wrapper.d.ts +11 -0
- package/dist/instrumentation/llamaindex/wrapper.js +252 -0
- package/dist/instrumentation/llamaindex/wrapper.js.map +1 -0
- package/dist/instrumentation/mistral/index.d.ts +11 -0
- package/dist/instrumentation/mistral/index.js +66 -0
- package/dist/instrumentation/mistral/index.js.map +1 -0
- package/dist/instrumentation/mistral/wrapper.d.ts +34 -0
- package/dist/instrumentation/mistral/wrapper.js +340 -0
- package/dist/instrumentation/mistral/wrapper.js.map +1 -0
- package/dist/instrumentation/ollama/wrapper.d.ts +11 -3
- package/dist/instrumentation/ollama/wrapper.js +60 -95
- package/dist/instrumentation/ollama/wrapper.js.map +1 -1
- package/dist/instrumentation/openai/index.js +1 -1
- package/dist/instrumentation/openai/index.js.map +1 -1
- package/dist/instrumentation/openai/wrapper.js +14 -84
- package/dist/instrumentation/openai/wrapper.js.map +1 -1
- package/dist/instrumentation/pinecone/index.d.ts +11 -0
- package/dist/instrumentation/pinecone/index.js +79 -0
- package/dist/instrumentation/pinecone/index.js.map +1 -0
- package/dist/instrumentation/pinecone/wrapper.d.ts +14 -0
- package/dist/instrumentation/pinecone/wrapper.js +198 -0
- package/dist/instrumentation/pinecone/wrapper.js.map +1 -0
- package/dist/instrumentation/together/index.d.ts +11 -0
- package/dist/instrumentation/together/index.js +43 -0
- package/dist/instrumentation/together/index.js.map +1 -0
- package/dist/instrumentation/together/wrapper.d.ts +33 -0
- package/dist/instrumentation/together/wrapper.js +271 -0
- package/dist/instrumentation/together/wrapper.js.map +1 -0
- package/dist/instrumentation/vercel-ai/index.d.ts +10 -0
- package/dist/instrumentation/vercel-ai/index.js +59 -0
- package/dist/instrumentation/vercel-ai/index.js.map +1 -0
- package/dist/instrumentation/vercel-ai/wrapper.d.ts +11 -0
- package/dist/instrumentation/vercel-ai/wrapper.js +248 -0
- package/dist/instrumentation/vercel-ai/wrapper.js.map +1 -0
- package/dist/otel/__tests__/metrics.test.js +5 -5
- package/dist/otel/__tests__/metrics.test.js.map +1 -1
- package/dist/semantic-convention.d.ts +14 -3
- package/dist/semantic-convention.js +19 -8
- package/dist/semantic-convention.js.map +1 -1
- package/dist/types.d.ts +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const api_1 = require("@opentelemetry/api");
|
|
7
|
+
const config_1 = __importDefault(require("../../config"));
|
|
8
|
+
const helpers_1 = __importDefault(require("../../helpers"));
|
|
9
|
+
const semantic_convention_1 = __importDefault(require("../../semantic-convention"));
|
|
10
|
+
const base_wrapper_1 = __importDefault(require("../base-wrapper"));
|
|
11
|
+
class MistralWrapper extends base_wrapper_1.default {
|
|
12
|
+
static _patchChatCompletionCreate(tracer) {
|
|
13
|
+
const genAIEndpoint = 'mistral.chat.completions';
|
|
14
|
+
return (originalMethod) => {
|
|
15
|
+
return async function (...args) {
|
|
16
|
+
const span = tracer.startSpan(genAIEndpoint, { kind: api_1.SpanKind.CLIENT });
|
|
17
|
+
return api_1.context
|
|
18
|
+
.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
19
|
+
return originalMethod.apply(this, args);
|
|
20
|
+
})
|
|
21
|
+
.then((response) => {
|
|
22
|
+
// Detect streaming: new Mistral SDK's chat.stream() returns an async iterable directly
|
|
23
|
+
const isStream = args[0]?.stream === true || typeof response[Symbol.asyncIterator] === 'function';
|
|
24
|
+
if (isStream) {
|
|
25
|
+
return helpers_1.default.createStreamProxy(response, MistralWrapper._chatCompletionGenerator({
|
|
26
|
+
args,
|
|
27
|
+
genAIEndpoint,
|
|
28
|
+
response,
|
|
29
|
+
span,
|
|
30
|
+
}));
|
|
31
|
+
}
|
|
32
|
+
return MistralWrapper._chatCompletion({ args, genAIEndpoint, response, span });
|
|
33
|
+
})
|
|
34
|
+
.catch((e) => {
|
|
35
|
+
helpers_1.default.handleException(span, e);
|
|
36
|
+
span.end();
|
|
37
|
+
throw e;
|
|
38
|
+
});
|
|
39
|
+
};
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
static async _chatCompletion({ args, genAIEndpoint, response, span, }) {
|
|
43
|
+
let metricParams;
|
|
44
|
+
try {
|
|
45
|
+
metricParams = await MistralWrapper._chatCompletionCommonSetter({
|
|
46
|
+
args,
|
|
47
|
+
genAIEndpoint,
|
|
48
|
+
result: response,
|
|
49
|
+
span,
|
|
50
|
+
});
|
|
51
|
+
return response;
|
|
52
|
+
}
|
|
53
|
+
catch (e) {
|
|
54
|
+
helpers_1.default.handleException(span, e);
|
|
55
|
+
throw e;
|
|
56
|
+
}
|
|
57
|
+
finally {
|
|
58
|
+
span.end();
|
|
59
|
+
if (metricParams) {
|
|
60
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
static async *_chatCompletionGenerator({ args, genAIEndpoint, response, span, }) {
|
|
65
|
+
let metricParams;
|
|
66
|
+
const timestamps = [];
|
|
67
|
+
const startTime = Date.now();
|
|
68
|
+
try {
|
|
69
|
+
const { messages } = args[0];
|
|
70
|
+
let { tools } = args[0];
|
|
71
|
+
const result = {
|
|
72
|
+
id: '0',
|
|
73
|
+
created: -1,
|
|
74
|
+
model: '',
|
|
75
|
+
choices: [
|
|
76
|
+
{
|
|
77
|
+
index: 0,
|
|
78
|
+
finish_reason: 'stop',
|
|
79
|
+
message: { role: 'assistant', content: '' },
|
|
80
|
+
},
|
|
81
|
+
],
|
|
82
|
+
usage: {
|
|
83
|
+
prompt_tokens: 0,
|
|
84
|
+
completion_tokens: 0,
|
|
85
|
+
total_tokens: 0,
|
|
86
|
+
},
|
|
87
|
+
};
|
|
88
|
+
let toolCalls = [];
|
|
89
|
+
for await (const chunk of response) {
|
|
90
|
+
timestamps.push(Date.now());
|
|
91
|
+
// New Mistral SDK wraps each SSE event in { data: { ... } }
|
|
92
|
+
const chunkData = chunk.data ?? chunk;
|
|
93
|
+
result.id = chunkData.id || result.id;
|
|
94
|
+
result.created = chunkData.created || result.created;
|
|
95
|
+
result.model = chunkData.model || result.model;
|
|
96
|
+
if (chunkData.choices && chunkData.choices[0]) {
|
|
97
|
+
if (chunkData.choices[0].finish_reason) {
|
|
98
|
+
result.choices[0].finish_reason = chunkData.choices[0].finish_reason;
|
|
99
|
+
}
|
|
100
|
+
if (chunkData.choices[0].delta?.content) {
|
|
101
|
+
result.choices[0].message.content += chunkData.choices[0].delta.content;
|
|
102
|
+
}
|
|
103
|
+
// Handle tool calls for streaming
|
|
104
|
+
if (chunkData.choices[0].delta?.tool_calls) {
|
|
105
|
+
const deltaTools = chunkData.choices[0].delta.tool_calls;
|
|
106
|
+
for (const tool of deltaTools) {
|
|
107
|
+
const idx = tool.index || 0;
|
|
108
|
+
while (toolCalls.length <= idx) {
|
|
109
|
+
toolCalls.push({
|
|
110
|
+
id: '',
|
|
111
|
+
type: 'function',
|
|
112
|
+
function: { name: '', arguments: '' }
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
if (tool.id) {
|
|
116
|
+
toolCalls[idx].id = tool.id;
|
|
117
|
+
toolCalls[idx].type = tool.type || 'function';
|
|
118
|
+
if (tool.function?.name) {
|
|
119
|
+
toolCalls[idx].function.name = tool.function.name;
|
|
120
|
+
}
|
|
121
|
+
if (tool.function?.arguments) {
|
|
122
|
+
toolCalls[idx].function.arguments = tool.function.arguments;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
else if (tool.function?.arguments) {
|
|
126
|
+
toolCalls[idx].function.arguments += tool.function.arguments;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
tools = true;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
yield chunk;
|
|
133
|
+
}
|
|
134
|
+
if (toolCalls.length > 0) {
|
|
135
|
+
result.choices[0].message = {
|
|
136
|
+
...result.choices[0].message,
|
|
137
|
+
tool_calls: toolCalls
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
// Estimate token usage if not provided
|
|
141
|
+
let promptTokens = 0;
|
|
142
|
+
for (const message of messages || []) {
|
|
143
|
+
promptTokens += helpers_1.default.openaiTokens(message.content, result.model) ?? 0;
|
|
144
|
+
}
|
|
145
|
+
const completionTokens = helpers_1.default.openaiTokens(result.choices[0].message.content ?? '', result.model);
|
|
146
|
+
if (completionTokens) {
|
|
147
|
+
result.usage = {
|
|
148
|
+
prompt_tokens: promptTokens,
|
|
149
|
+
completion_tokens: completionTokens,
|
|
150
|
+
total_tokens: promptTokens + completionTokens,
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
args[0].tools = tools;
|
|
154
|
+
// Calculate TTFT and TBT
|
|
155
|
+
const ttft = timestamps.length > 0 ? (timestamps[0] - startTime) / 1000 : 0;
|
|
156
|
+
let tbt = 0;
|
|
157
|
+
if (timestamps.length > 1) {
|
|
158
|
+
const timeDiffs = timestamps.slice(1).map((t, i) => t - timestamps[i]);
|
|
159
|
+
tbt = timeDiffs.reduce((a, b) => a + b, 0) / timeDiffs.length / 1000;
|
|
160
|
+
}
|
|
161
|
+
metricParams = await MistralWrapper._chatCompletionCommonSetter({
|
|
162
|
+
args,
|
|
163
|
+
genAIEndpoint,
|
|
164
|
+
result,
|
|
165
|
+
span,
|
|
166
|
+
ttft,
|
|
167
|
+
tbt,
|
|
168
|
+
});
|
|
169
|
+
return result;
|
|
170
|
+
}
|
|
171
|
+
catch (e) {
|
|
172
|
+
helpers_1.default.handleException(span, e);
|
|
173
|
+
throw e;
|
|
174
|
+
}
|
|
175
|
+
finally {
|
|
176
|
+
span.end();
|
|
177
|
+
if (metricParams) {
|
|
178
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
static async _chatCompletionCommonSetter({ args, genAIEndpoint, result, span, ttft = 0, tbt = 0, }) {
|
|
183
|
+
const traceContent = config_1.default.traceContent;
|
|
184
|
+
const { messages, max_tokens = null, temperature = 0.7, top_p, user, stream = false, safe_prompt = false, } = args[0];
|
|
185
|
+
// Request Params attributes
|
|
186
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_P, top_p || 1);
|
|
187
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_MAX_TOKENS, max_tokens || -1);
|
|
188
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TEMPERATURE, temperature);
|
|
189
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, stream);
|
|
190
|
+
if (user) {
|
|
191
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_USER, user);
|
|
192
|
+
}
|
|
193
|
+
if (safe_prompt !== undefined) {
|
|
194
|
+
span.setAttribute('gen_ai.request.safe_prompt', safe_prompt);
|
|
195
|
+
}
|
|
196
|
+
if (traceContent) {
|
|
197
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(messages || []));
|
|
198
|
+
}
|
|
199
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OPERATION, semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT);
|
|
200
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_ID, result.id);
|
|
201
|
+
const model = result.model || 'mistral-small-latest';
|
|
202
|
+
const responseModel = result.model || model;
|
|
203
|
+
const pricingInfo = await config_1.default.updatePricingJson(config_1.default.pricing_json);
|
|
204
|
+
// Support both camelCase (new SDK) and snake_case (old SDK) usage fields
|
|
205
|
+
const promptTokens = result.usage?.promptTokens ?? result.usage?.prompt_tokens ?? 0;
|
|
206
|
+
const completionTokens = result.usage?.completionTokens ?? result.usage?.completion_tokens ?? 0;
|
|
207
|
+
const totalTokens = result.usage?.totalTokens ?? result.usage?.total_tokens ?? 0;
|
|
208
|
+
// Calculate cost of the operation
|
|
209
|
+
const cost = helpers_1.default.getChatModelCost(model, pricingInfo, promptTokens, completionTokens);
|
|
210
|
+
MistralWrapper.setBaseSpanAttributes(span, {
|
|
211
|
+
genAIEndpoint,
|
|
212
|
+
model,
|
|
213
|
+
user,
|
|
214
|
+
cost,
|
|
215
|
+
aiSystem: MistralWrapper.aiSystem,
|
|
216
|
+
});
|
|
217
|
+
// Response model
|
|
218
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_MODEL, responseModel);
|
|
219
|
+
// Token usage
|
|
220
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, promptTokens);
|
|
221
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_OUTPUT_TOKENS, completionTokens);
|
|
222
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_TOTAL_TOKENS, totalTokens);
|
|
223
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_CLIENT_TOKEN_USAGE, totalTokens);
|
|
224
|
+
// TTFT and TBT metrics
|
|
225
|
+
if (ttft > 0) {
|
|
226
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TTFT, ttft);
|
|
227
|
+
}
|
|
228
|
+
if (tbt > 0) {
|
|
229
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TBT, tbt);
|
|
230
|
+
}
|
|
231
|
+
// Finish reason
|
|
232
|
+
if (result.choices[0].finish_reason) {
|
|
233
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON, [result.choices[0].finish_reason]);
|
|
234
|
+
}
|
|
235
|
+
// Output type
|
|
236
|
+
const outputType = typeof result.choices[0].message.content === 'string'
|
|
237
|
+
? semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_TEXT
|
|
238
|
+
: semantic_convention_1.default.GEN_AI_OUTPUT_TYPE_JSON;
|
|
239
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_TYPE, outputType);
|
|
240
|
+
// Tool calls handling
|
|
241
|
+
if (result.choices[0].message.tool_calls) {
|
|
242
|
+
const toolCalls = result.choices[0].message.tool_calls;
|
|
243
|
+
const toolNames = toolCalls.map((t) => t.function?.name || '').filter(Boolean);
|
|
244
|
+
const toolIds = toolCalls.map((t) => t.id || '').filter(Boolean);
|
|
245
|
+
const toolArgs = toolCalls.map((t) => t.function?.arguments || '').filter(Boolean);
|
|
246
|
+
const toolTypes = toolCalls.map((t) => t.type || '').filter(Boolean);
|
|
247
|
+
if (toolNames.length > 0) {
|
|
248
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_NAME, toolNames.join(', '));
|
|
249
|
+
}
|
|
250
|
+
if (toolIds.length > 0) {
|
|
251
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_CALL_ID, toolIds.join(', '));
|
|
252
|
+
}
|
|
253
|
+
if (toolArgs.length > 0) {
|
|
254
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_CALL_ARGUMENTS, toolArgs);
|
|
255
|
+
}
|
|
256
|
+
if (toolTypes.length > 0) {
|
|
257
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_TOOL_TYPE, toolTypes.join(', '));
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
// Content
|
|
261
|
+
if (traceContent) {
|
|
262
|
+
const completionContent = result.choices[0].message.content || '';
|
|
263
|
+
const toolCalls = result.choices[0].message.tool_calls;
|
|
264
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_MESSAGES, helpers_1.default.buildOutputMessages(completionContent, result.choices[0].finish_reason || 'stop', toolCalls));
|
|
265
|
+
}
|
|
266
|
+
return {
|
|
267
|
+
genAIEndpoint,
|
|
268
|
+
model,
|
|
269
|
+
user,
|
|
270
|
+
cost,
|
|
271
|
+
aiSystem: MistralWrapper.aiSystem,
|
|
272
|
+
};
|
|
273
|
+
}
|
|
274
|
+
static _patchEmbedding(tracer) {
|
|
275
|
+
const genAIEndpoint = 'mistral.embeddings';
|
|
276
|
+
const traceContent = config_1.default.traceContent;
|
|
277
|
+
return (originalMethod) => {
|
|
278
|
+
return async function (...args) {
|
|
279
|
+
const span = tracer.startSpan(genAIEndpoint, { kind: api_1.SpanKind.CLIENT });
|
|
280
|
+
return api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
281
|
+
let metricParams;
|
|
282
|
+
try {
|
|
283
|
+
const response = await originalMethod.apply(this, args);
|
|
284
|
+
const model = args[0].model || 'mistral-embed';
|
|
285
|
+
const pricingInfo = await config_1.default.updatePricingJson(config_1.default.pricing_json);
|
|
286
|
+
// Mistral SDK returns camelCase usage fields
|
|
287
|
+
const promptTokens = response.usage?.promptTokens ?? response.usage?.prompt_tokens ?? 0;
|
|
288
|
+
const totalTokens = response.usage?.totalTokens ?? response.usage?.total_tokens ?? 0;
|
|
289
|
+
const cost = helpers_1.default.getEmbedModelCost(model, pricingInfo, promptTokens);
|
|
290
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OPERATION, semantic_convention_1.default.GEN_AI_OPERATION_TYPE_EMBEDDING);
|
|
291
|
+
const { input, inputs, user, encoding_format = 'float' } = args[0];
|
|
292
|
+
const embeddingInput = input ?? inputs;
|
|
293
|
+
MistralWrapper.setBaseSpanAttributes(span, {
|
|
294
|
+
genAIEndpoint,
|
|
295
|
+
model,
|
|
296
|
+
user,
|
|
297
|
+
cost,
|
|
298
|
+
aiSystem: MistralWrapper.aiSystem,
|
|
299
|
+
});
|
|
300
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_ENCODING_FORMATS, [encoding_format]);
|
|
301
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, false);
|
|
302
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TBT, 0);
|
|
303
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TTFT, 0);
|
|
304
|
+
if (user) {
|
|
305
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_USER, user);
|
|
306
|
+
}
|
|
307
|
+
if (traceContent && embeddingInput) {
|
|
308
|
+
const inputArr = Array.isArray(embeddingInput) ? embeddingInput : [embeddingInput];
|
|
309
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(inputArr.map((c) => ({ role: 'user', content: c }))));
|
|
310
|
+
}
|
|
311
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, promptTokens);
|
|
312
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_TOTAL_TOKENS, totalTokens);
|
|
313
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_CLIENT_TOKEN_USAGE, promptTokens);
|
|
314
|
+
metricParams = {
|
|
315
|
+
genAIEndpoint,
|
|
316
|
+
model,
|
|
317
|
+
user,
|
|
318
|
+
cost,
|
|
319
|
+
aiSystem: MistralWrapper.aiSystem,
|
|
320
|
+
};
|
|
321
|
+
return response;
|
|
322
|
+
}
|
|
323
|
+
catch (e) {
|
|
324
|
+
helpers_1.default.handleException(span, e);
|
|
325
|
+
throw e;
|
|
326
|
+
}
|
|
327
|
+
finally {
|
|
328
|
+
span.end();
|
|
329
|
+
if (metricParams) {
|
|
330
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
};
|
|
335
|
+
};
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
MistralWrapper.aiSystem = 'mistral';
|
|
339
|
+
exports.default = MistralWrapper;
|
|
340
|
+
//# sourceMappingURL=wrapper.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"wrapper.js","sourceRoot":"","sources":["../../../src/instrumentation/mistral/wrapper.ts"],"names":[],"mappings":";;;;;AAAA,4CAA4E;AAC5E,0DAAyC;AACzC,4DAA0C;AAC1C,oFAA2D;AAC3D,mEAAkE;AAElE,MAAM,cAAe,SAAQ,sBAAW;IAGtC,MAAM,CAAC,0BAA0B,CAAC,MAAc;QAC9C,MAAM,aAAa,GAAG,0BAA0B,CAAC;QACjD,OAAO,CAAC,cAAuC,EAAE,EAAE;YACjD,OAAO,KAAK,WAAsB,GAAG,IAAW;gBAC9C,MAAM,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,EAAE,IAAI,EAAE,cAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;gBACxE,OAAO,aAAO;qBACX,IAAI,CAAC,WAAK,CAAC,OAAO,CAAC,aAAO,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,EAAE,KAAK,IAAI,EAAE;oBACtD,OAAO,cAAc,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBAC1C,CAAC,CAAC;qBACD,IAAI,CAAC,CAAC,QAAa,EAAE,EAAE;oBACtB,uFAAuF;oBACvF,MAAM,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,EAAE,MAAM,KAAK,IAAI,IAAI,OAAO,QAAQ,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,UAAU,CAAC;oBAElG,IAAI,QAAQ,EAAE,CAAC;wBACb,OAAO,iBAAa,CAAC,iBAAiB,CACpC,QAAQ,EACR,cAAc,CAAC,wBAAwB,CAAC;4BACtC,IAAI;4BACJ,aAAa;4BACb,QAAQ;4BACR,IAAI;yBACL,CAAC,CACH,CAAC;oBACJ,CAAC;oBAED,OAAO,cAAc,CAAC,eAAe,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;gBACjF,CAAC,CAAC;qBACD,KAAK,CAAC,CAAC,CAAM,EAAE,EAAE;oBAChB,iBAAa,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;oBACvC,IAAI,CAAC,GAAG,EAAE,CAAC;oBACX,MAAM,CAAC,CAAC;gBACV,CAAC,CAAC,CAAC;YACP,CAAC,CAAC;QACJ,CAAC,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,eAAe,CAAC,EAC3B,IAAI,EACJ,aAAa,EACb,QAAQ,EACR,IAAI,GAML;QACC,IAAI,YAAY,CAAC;QACjB,IAAI,CAAC;YACH,YAAY,GAAG,MAAM,cAAc,CAAC,2BAA2B,CAAC;gBAC9D,IAAI;gBACJ,aAAa;gBACb,MAAM,EAAE,QAAQ;gBAChB,IAAI;aACL,CAAC,CAAC;YACH,OAAO,QAAQ,CAAC;QAClB,CAAC;QAAC,OAAO,CAAM,EAAE,CAAC;YAChB,iBAAa,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;YACvC,MAAM,CAAC,CAAC;QACV,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,YAAY,EAAE,CAAC;gBACjB,sBAAW,CAAC,aAAa,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;YAChD,CAAC;QACH,CAAC;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,CAAC,wBAAwB,CAAC,EACrC,IAAI,EACJ,aAAa,EACb,QAAQ,EACR,IAAI,GAML;QACC,IAAI,YAAY,CAAC;QACjB,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAE7B,IAAI,CAAC;YACH,MAAM,EAAE,QAAQ,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;YAC7B,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;YACxB,MAAM,MAAM,GAAG;gBACb,EAAE,EAAE,GAAG;gBACP,OAAO,EAAE,CAAC,CAAC;gBACX,KAAK,EAAE,EAAE;gBACT,OAAO,EAAE;oBACP;wBACE,KAAK,EAAE,CAAC;wBACR,aAAa,EAAE,MAAM;wBACrB,OAAO,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,EAAE,EAAE;qBAC5C;iBACF;gBACD,KAAK,EAAE;oBACL,aAAa,EAAE,CAAC;oBAChB,iBAAiB,EAAE,CAAC;oBACpB,YAAY,EAAE,CAAC;iBAChB;aACF,CAAC;YAEF,IAAI,SAAS,GAAU,EAAE,CAAC;YAE1B,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE,CAAC;gBACnC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;gBAE5B,4DAA4D;gBAC5D,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC;gBAEtC,MAAM,CAAC,EAAE,GAAG,SAAS,CAAC,EAAE,IAAI,MAAM,CAAC,EAAE,CAAC;gBACtC,MAAM,CAAC,OAAO,GAAG,SAAS,CAAC,OAAO,IAAI,MAAM,CAAC,OAAO,CAAC;gBACrD,MAAM,CAAC,KAAK,GAAG,SAAS,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC;gBAE/C,IAAI,SAAS,CAAC,OAAO,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;oBAC9C,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC;wBACvC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC;oBACvE,CAAC;oBACD,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,EAAE,CAAC;wBACxC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;oBAC1E,CAAC;oBAED,kCAAkC;oBAClC,IAAI,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,UAAU,EAAE,CAAC;wBAC3C,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;wBAEzD,KAAK,MAAM,IAAI,IAAI,UAAU,EAAE,CAAC;4BAC9B,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC;4BAE5B,OAAO,SAAS,CAAC,MAAM,IAAI,GAAG,EAAE,CAAC;gCAC/B,SAAS,CAAC,IAAI,CAAC;oCACb,EAAE,EAAE,EAAE;oCACN,IAAI,EAAE,UAAU;oCAChB,QAAQ,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,EAAE,EAAE;iCACtC,CAAC,CAAC;4BACL,CAAC;4BAED,IAAI,IAAI,CAAC,EAAE,EAAE,CAAC;gCACZ,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,EAAE,CAAC;gCAC5B,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,UAAU,CAAC;gCAC9C,IAAI,IAAI,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;oCACxB,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC;gCACpD,CAAC;gCACD,IAAI,IAAI,CAAC,QAAQ,EAAE,SAAS,EAAE,CAAC;oCAC7B,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC;gCAC9D,CAAC;4BACH,CAAC;iCAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,SAAS,EAAE,CAAC;gCACpC,SAAS,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,SAAS,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC;4BAC/D,CAAC;wBACH,CAAC;wBAED,KAAK,GAAG,IAAI,CAAC;oBACf,CAAC;gBACH,CAAC;gBAED,MAAM,KAAK,CAAC;YACd,CAAC;YAED,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACzB,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,GAAG;oBAC1B,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO;oBAC5B,UAAU,EAAE,SAAS;iBACf,CAAC;YACX,CAAC;YAED,uCAAuC;YACvC,IAAI,YAAY,GAAG,CAAC,CAAC;YACrB,KAAK,MAAM,OAAO,IAAI,QAAQ,IAAI,EAAE,EAAE,CAAC;gBACrC,YAAY,IAAI,iBAAa,CAAC,YAAY,CAAC,OAAO,CAAC,OAAiB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC3F,CAAC;YAED,MAAM,gBAAgB,GAAG,iBAAa,CAAC,YAAY,CACjD,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,EACvC,MAAM,CAAC,KAAK,CACb,CAAC;YAEF,IAAI,gBAAgB,EAAE,CAAC;gBACrB,MAAM,CAAC,KAAK,GAAG;oBACb,aAAa,EAAE,YAAY;oBAC3B,iBAAiB,EAAE,gBAAgB;oBACnC,YAAY,EAAE,YAAY,GAAG,gBAAgB;iBAC9C,CAAC;YACJ,CAAC;YAED,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,KAAK,CAAC;YAEtB,yBAAyB;YACzB,MAAM,IAAI,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5E,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC1B,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;gBACvE,GAAG,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,MAAM,GAAG,IAAI,CAAC;YACvE,CAAC;YAED,YAAY,GAAG,MAAM,cAAc,CAAC,2BAA2B,CAAC;gBAC9D,IAAI;gBACJ,aAAa;gBACb,MAAM;gBACN,IAAI;gBACJ,IAAI;gBACJ,GAAG;aACJ,CAAC,CAAC;YAEH,OAAO,MAAM,CAAC;QAChB,CAAC;QAAC,OAAO,CAAM,EAAE,CAAC;YAChB,iBAAa,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;YACvC,MAAM,CAAC,CAAC;QACV,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,GAAG,EAAE,CAAC;YACX,IAAI,YAAY,EAAE,CAAC;gBACjB,sBAAW,CAAC,aAAa,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;YAChD,CAAC;QACH,CAAC;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,EACvC,IAAI,EACJ,aAAa,EACb,MAAM,EACN,IAAI,EACJ,IAAI,GAAG,CAAC,EACR,GAAG,GAAG,CAAC,GAQR;QACC,MAAM,YAAY,GAAG,gBAAa,CAAC,YAAY,CAAC;QAChD,MAAM,EACJ,QAAQ,EACR,UAAU,GAAG,IAAI,EACjB,WAAW,GAAG,GAAG,EACjB,KAAK,EACL,IAAI,EACJ,MAAM,GAAG,KAAK,EACd,WAAW,GAAG,KAAK,GACpB,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QAEZ,4BAA4B;QAC5B,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,oBAAoB,EAAE,KAAK,IAAI,CAAC,CAAC,CAAC;QACvE,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,UAAU,IAAI,CAAC,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,0BAA0B,EAAE,WAAW,CAAC,CAAC;QAC9E,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC;QAEvE,IAAI,IAAI,EAAE,CAAC;YACT,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,mBAAmB,EAAE,IAAI,CAAC,CAAC;QAClE,CAAC;QACD,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,IAAI,CAAC,YAAY,CAAC,4BAA4B,EAAE,WAAW,CAAC,CAAC;QAC/D,CAAC;QAED,IAAI,YAAY,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,qBAAqB,EAAE,iBAAa,CAAC,kBAAkB,CAAC,QAAQ,IAAI,EAAE,CAAC,CAAC,CAAC;QAChH,CAAC;QAED,IAAI,CAAC,YAAY,CACf,6BAAkB,CAAC,gBAAgB,EACnC,6BAAkB,CAAC,0BAA0B,CAC9C,CAAC;QAEF,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,kBAAkB,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;QAEpE,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,sBAAsB,CAAC;QACrD,MAAM,aAAa,GAAG,MAAM,CAAC,KAAK,IAAI,KAAK,CAAC;QAE5C,MAAM,WAAW,GAAG,MAAM,gBAAa,CAAC,iBAAiB,CAAC,gBAAa,CAAC,YAAY,CAAC,CAAC;QAEtF,yEAAyE;QACzE,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,EAAE,YAAY,IAAI,MAAM,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;QACpF,MAAM,gBAAgB,GAAG,MAAM,CAAC,KAAK,EAAE,gBAAgB,IAAI,MAAM,CAAC,KAAK,EAAE,iBAAiB,IAAI,CAAC,CAAC;QAChG,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,EAAE,WAAW,IAAI,MAAM,CAAC,KAAK,EAAE,YAAY,IAAI,CAAC,CAAC;QAEjF,kCAAkC;QAClC,MAAM,IAAI,GAAG,iBAAa,CAAC,gBAAgB,CAAC,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;QAEhG,cAAc,CAAC,qBAAqB,CAAC,IAAI,EAAE;YACzC,aAAa;YACb,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,QAAQ,EAAE,cAAc,CAAC,QAAQ;SAClC,CAAC,CAAC;QAEH,iBAAiB;QACjB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,qBAAqB,EAAE,aAAa,CAAC,CAAC;QAE3E,cAAc;QACd,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,YAAY,CAAC,CAAC;QAC9E,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,0BAA0B,EAAE,gBAAgB,CAAC,CAAC;QACnF,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAC;QAC7E,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAC;QAE7E,uBAAuB;QACvB,IAAI,IAAI,GAAG,CAAC,EAAE,CAAC;YACb,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,kBAAkB,EAAE,IAAI,CAAC,CAAC;QACjE,CAAC;QACD,IAAI,GAAG,GAAG,CAAC,EAAE,CAAC;YACZ,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC;QAC/D,CAAC;QAED,gBAAgB;QAChB,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC;YACpC,IAAI,CAAC,YAAY,CACf,6BAAkB,CAAC,6BAA6B,EAChD,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAClC,CAAC;QACJ,CAAC;QAED,cAAc;QACd,MAAM,UAAU,GAAG,OAAO,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,KAAK,QAAQ;YACtE,CAAC,CAAC,6BAAkB,CAAC,uBAAuB;YAC5C,CAAC,CAAC,6BAAkB,CAAC,uBAAuB,CAAC;QAC/C,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAC;QAErE,sBAAsB;QACtB,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;YACzC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC;YACvD,MAAM,SAAS,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YACpF,MAAM,OAAO,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YACtE,MAAM,QAAQ,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YACxF,MAAM,SAAS,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAE1E,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACzB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,gBAAgB,EAAE,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;YAC/E,CAAC;YACD,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,mBAAmB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;YAChF,CAAC;YACD,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACxB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,0BAA0B,EAAE,QAAQ,CAAC,CAAC;YAC7E,CAAC;YACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACzB,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,gBAAgB,EAAE,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;YAC/E,CAAC;QACH,CAAC;QAED,UAAU;QACV,IAAI,YAAY,EAAE,CAAC;YACjB,MAAM,iBAAiB,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,IAAI,EAAE,CAAC;YAClE,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC;YACvD,IAAI,CAAC,YAAY,CACf,6BAAkB,CAAC,sBAAsB,EACzC,iBAAa,CAAC,mBAAmB,CAAC,iBAAiB,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,aAAa,IAAI,MAAM,EAAE,SAAS,CAAC,CAC3G,CAAC;QACJ,CAAC;QAED,OAAO;YACL,aAAa;YACb,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,QAAQ,EAAE,cAAc,CAAC,QAAQ;SAClC,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,eAAe,CAAC,MAAc;QACnC,MAAM,aAAa,GAAG,oBAAoB,CAAC;QAC3C,MAAM,YAAY,GAAG,gBAAa,CAAC,YAAY,CAAC;QAEhD,OAAO,CAAC,cAAuC,EAAE,EAAE;YACjD,OAAO,KAAK,WAAsB,GAAG,IAAW;gBAC9C,MAAM,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,EAAE,IAAI,EAAE,cAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;gBACxE,OAAO,aAAO,CAAC,IAAI,CAAC,WAAK,CAAC,OAAO,CAAC,aAAO,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,EAAE,KAAK,IAAI,EAAE;oBACpE,IAAI,YAA4C,CAAC;oBACjD,IAAI,CAAC;wBACH,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;wBAExD,MAAM,KAAK,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,IAAI,eAAe,CAAC;wBAC/C,MAAM,WAAW,GAAG,MAAM,gBAAa,CAAC,iBAAiB,CAAC,gBAAa,CAAC,YAAY,CAAC,CAAC;wBACtF,6CAA6C;wBAC7C,MAAM,YAAY,GAAG,QAAQ,CAAC,KAAK,EAAE,YAAY,IAAI,QAAQ,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;wBACxF,MAAM,WAAW,GAAG,QAAQ,CAAC,KAAK,EAAE,WAAW,IAAI,QAAQ,CAAC,KAAK,EAAE,YAAY,IAAI,CAAC,CAAC;wBACrF,MAAM,IAAI,GAAG,iBAAa,CAAC,iBAAiB,CAAC,KAAK,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;wBAE/E,IAAI,CAAC,YAAY,CACf,6BAAkB,CAAC,gBAAgB,EACnC,6BAAkB,CAAC,+BAA+B,CACnD,CAAC;wBAEF,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,eAAe,GAAG,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;wBACnE,MAAM,cAAc,GAAG,KAAK,IAAI,MAAM,CAAC;wBAEvC,cAAc,CAAC,qBAAqB,CAAC,IAAI,EAAE;4BACzC,aAAa;4BACb,KAAK;4BACL,IAAI;4BACJ,IAAI;4BACJ,QAAQ,EAAE,cAAc,CAAC,QAAQ;yBAClC,CAAC,CAAC;wBAEH,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,+BAA+B,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC;wBACzF,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,wBAAwB,EAAE,KAAK,CAAC,CAAC;wBACtE,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAC;wBAC3D,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC;wBAE5D,IAAI,IAAI,EAAE,CAAC;4BACT,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,mBAAmB,EAAE,IAAI,CAAC,CAAC;wBAClE,CAAC;wBACD,IAAI,YAAY,IAAI,cAAc,EAAE,CAAC;4BACnC,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC;4BACnF,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,qBAAqB,EAAE,iBAAa,CAAC,kBAAkB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;wBAC7J,CAAC;wBAED,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,YAAY,CAAC,CAAC;wBAC9E,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAC;wBAC7E,IAAI,CAAC,YAAY,CAAC,6BAAkB,CAAC,yBAAyB,EAAE,YAAY,CAAC,CAAC;wBAE9E,YAAY,GAAG;4BACb,aAAa;4BACb,KAAK;4BACL,IAAI;4BACJ,IAAI;4BACJ,QAAQ,EAAE,cAAc,CAAC,QAAQ;yBAClC,CAAC;wBAEF,OAAO,QAAQ,CAAC;oBAClB,CAAC;oBAAC,OAAO,CAAM,EAAE,CAAC;wBAChB,iBAAa,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;wBACvC,MAAM,CAAC,CAAC;oBACV,CAAC;4BAAS,CAAC;wBACT,IAAI,CAAC,GAAG,EAAE,CAAC;wBACX,IAAI,YAAY,EAAE,CAAC;4BACjB,sBAAW,CAAC,aAAa,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;wBAChD,CAAC;oBACH,CAAC;gBACH,CAAC,CAAC,CAAC;YACL,CAAC,CAAC;QACJ,CAAC,CAAC;IACJ,CAAC;;AAlbM,uBAAQ,GAAG,SAAS,CAAC;AAqb9B,kBAAe,cAAc,CAAC"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Span, Tracer } from '@opentelemetry/api';
|
|
2
2
|
import BaseWrapper from '../base-wrapper';
|
|
3
3
|
export default class OllamaWrapper extends BaseWrapper {
|
|
4
|
-
static
|
|
4
|
+
static aiSystem: string;
|
|
5
5
|
static _patchChat(tracer: Tracer): any;
|
|
6
6
|
static _chat({ args, genAIEndpoint, response, span, }: {
|
|
7
7
|
args: any[];
|
|
@@ -15,10 +15,18 @@ export default class OllamaWrapper extends BaseWrapper {
|
|
|
15
15
|
response: any;
|
|
16
16
|
span: Span;
|
|
17
17
|
}): AsyncGenerator<any, any, unknown>;
|
|
18
|
-
static _chatCommonSetter({ args, genAIEndpoint, result, span, }: {
|
|
18
|
+
static _chatCommonSetter({ args, genAIEndpoint, result, span, ttft, tbt, }: {
|
|
19
19
|
args: any[];
|
|
20
20
|
genAIEndpoint: string;
|
|
21
21
|
result: any;
|
|
22
22
|
span: Span;
|
|
23
|
-
|
|
23
|
+
ttft?: number;
|
|
24
|
+
tbt?: number;
|
|
25
|
+
}): Promise<{
|
|
26
|
+
genAIEndpoint: string;
|
|
27
|
+
model: any;
|
|
28
|
+
user: any;
|
|
29
|
+
cost: number;
|
|
30
|
+
aiSystem: string;
|
|
31
|
+
}>;
|
|
24
32
|
}
|
|
@@ -7,24 +7,8 @@ const api_1 = require("@opentelemetry/api");
|
|
|
7
7
|
const config_1 = __importDefault(require("../../config"));
|
|
8
8
|
const helpers_1 = __importDefault(require("../../helpers"));
|
|
9
9
|
const semantic_convention_1 = __importDefault(require("../../semantic-convention"));
|
|
10
|
-
const constant_1 = require("../../constant");
|
|
11
10
|
const base_wrapper_1 = __importDefault(require("../base-wrapper"));
|
|
12
11
|
class OllamaWrapper extends base_wrapper_1.default {
|
|
13
|
-
static setBaseSpanAttributes(span, { genAIEndpoint, model, user, cost, environment, applicationName }) {
|
|
14
|
-
span.setAttributes({
|
|
15
|
-
[constant_1.TELEMETRY_SDK_NAME]: constant_1.SDK_NAME,
|
|
16
|
-
});
|
|
17
|
-
span.setAttribute(constant_1.TELEMETRY_SDK_NAME, constant_1.SDK_NAME);
|
|
18
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_SYSTEM, semantic_convention_1.default.GEN_AI_SYSTEM_ANTHROPIC);
|
|
19
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_ENDPOINT, genAIEndpoint);
|
|
20
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_ENVIRONMENT, environment);
|
|
21
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_APPLICATION_NAME, applicationName);
|
|
22
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_MODEL, model);
|
|
23
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_USER, user);
|
|
24
|
-
if (cost !== undefined)
|
|
25
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_COST, cost);
|
|
26
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
|
27
|
-
}
|
|
28
12
|
static _patchChat(tracer) {
|
|
29
13
|
const genAIEndpoint = 'ollama.chat';
|
|
30
14
|
return (originalMethod) => {
|
|
@@ -55,8 +39,9 @@ class OllamaWrapper extends base_wrapper_1.default {
|
|
|
55
39
|
};
|
|
56
40
|
}
|
|
57
41
|
static async _chat({ args, genAIEndpoint, response, span, }) {
|
|
42
|
+
let metricParams;
|
|
58
43
|
try {
|
|
59
|
-
await OllamaWrapper._chatCommonSetter({
|
|
44
|
+
metricParams = await OllamaWrapper._chatCommonSetter({
|
|
60
45
|
args,
|
|
61
46
|
genAIEndpoint,
|
|
62
47
|
result: response,
|
|
@@ -69,61 +54,50 @@ class OllamaWrapper extends base_wrapper_1.default {
|
|
|
69
54
|
}
|
|
70
55
|
finally {
|
|
71
56
|
span.end();
|
|
57
|
+
if (metricParams) {
|
|
58
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
59
|
+
}
|
|
72
60
|
}
|
|
73
61
|
}
|
|
74
62
|
static async *_chatGenerator({ args, genAIEndpoint, response, span, }) {
|
|
63
|
+
let metricParams;
|
|
64
|
+
const timestamps = [];
|
|
65
|
+
const startTime = Date.now();
|
|
75
66
|
try {
|
|
76
67
|
const result = {
|
|
77
|
-
id: '0',
|
|
78
68
|
model: '',
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
role: '',
|
|
84
|
-
},
|
|
85
|
-
],
|
|
86
|
-
usage: {
|
|
87
|
-
input_tokens: 0,
|
|
88
|
-
output_tokens: 0,
|
|
89
|
-
total_tokens: 0,
|
|
90
|
-
},
|
|
69
|
+
message: { role: 'assistant', content: '' },
|
|
70
|
+
done_reason: '',
|
|
71
|
+
prompt_eval_count: 0,
|
|
72
|
+
eval_count: 0,
|
|
91
73
|
};
|
|
92
74
|
for await (const chunk of response) {
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
result.id = chunk.message.id;
|
|
104
|
-
result.model = chunk.message.model;
|
|
105
|
-
result.content[0].role = chunk.message.role;
|
|
106
|
-
result.usage.input_tokens += Number(chunk.message.usage?.input_tokens) ?? 0;
|
|
107
|
-
result.usage.output_tokens += Number(chunk.message.usage?.output_tokens) ?? 0;
|
|
108
|
-
result.stop_reason = chunk.message?.stop_reason ?? '';
|
|
109
|
-
}
|
|
110
|
-
break;
|
|
111
|
-
case 'content_block_start':
|
|
112
|
-
result.content[0].text = chunk.content_block?.text ?? '';
|
|
113
|
-
break;
|
|
114
|
-
case 'message_delta':
|
|
115
|
-
result.stop_reason = chunk.delta?.stop_reason ?? '';
|
|
116
|
-
result.usage.output_tokens += Number(chunk.usage?.output_tokens) ?? 0;
|
|
117
|
-
break;
|
|
75
|
+
timestamps.push(Date.now());
|
|
76
|
+
result.model = chunk.model || result.model;
|
|
77
|
+
if (chunk.message?.content) {
|
|
78
|
+
result.message.content += chunk.message.content;
|
|
79
|
+
result.message.role = chunk.message.role || result.message.role;
|
|
80
|
+
}
|
|
81
|
+
if (chunk.done) {
|
|
82
|
+
result.done_reason = chunk.done_reason || '';
|
|
83
|
+
result.prompt_eval_count = chunk.prompt_eval_count || 0;
|
|
84
|
+
result.eval_count = chunk.eval_count || 0;
|
|
118
85
|
}
|
|
119
86
|
yield chunk;
|
|
120
87
|
}
|
|
121
|
-
|
|
122
|
-
|
|
88
|
+
const ttft = timestamps.length > 0 ? (timestamps[0] - startTime) / 1000 : 0;
|
|
89
|
+
let tbt = 0;
|
|
90
|
+
if (timestamps.length > 1) {
|
|
91
|
+
const timeDiffs = timestamps.slice(1).map((t, i) => t - timestamps[i]);
|
|
92
|
+
tbt = timeDiffs.reduce((a, b) => a + b, 0) / timeDiffs.length / 1000;
|
|
93
|
+
}
|
|
94
|
+
metricParams = await OllamaWrapper._chatCommonSetter({
|
|
123
95
|
args,
|
|
124
96
|
genAIEndpoint,
|
|
125
97
|
result,
|
|
126
98
|
span,
|
|
99
|
+
ttft,
|
|
100
|
+
tbt,
|
|
127
101
|
});
|
|
128
102
|
return response;
|
|
129
103
|
}
|
|
@@ -132,43 +106,20 @@ class OllamaWrapper extends base_wrapper_1.default {
|
|
|
132
106
|
}
|
|
133
107
|
finally {
|
|
134
108
|
span.end();
|
|
109
|
+
if (metricParams) {
|
|
110
|
+
base_wrapper_1.default.recordMetrics(span, metricParams);
|
|
111
|
+
}
|
|
135
112
|
}
|
|
136
113
|
}
|
|
137
|
-
static async _chatCommonSetter({ args, genAIEndpoint, result, span, }) {
|
|
138
|
-
const applicationName = config_1.default.applicationName;
|
|
139
|
-
const environment = config_1.default.environment;
|
|
114
|
+
static async _chatCommonSetter({ args, genAIEndpoint, result, span, ttft = 0, tbt = 0, }) {
|
|
140
115
|
const traceContent = config_1.default.traceContent;
|
|
141
|
-
const { messages, max_tokens = null, seed = null, temperature = 1, top_p, top_k, user, stream = false,
|
|
142
|
-
// Format 'messages' into a single string
|
|
143
|
-
const messagePrompt = messages || '';
|
|
144
|
-
const formattedMessages = [];
|
|
145
|
-
for (const message of messagePrompt) {
|
|
146
|
-
const role = message.role;
|
|
147
|
-
const content = message.content;
|
|
148
|
-
if (Array.isArray(content)) {
|
|
149
|
-
const contentStr = content
|
|
150
|
-
.map((item) => {
|
|
151
|
-
if ('type' in item) {
|
|
152
|
-
return `${item.type}: ${item.text ? item.text : item.image_url}`;
|
|
153
|
-
}
|
|
154
|
-
else {
|
|
155
|
-
return `text: ${item.text}`;
|
|
156
|
-
}
|
|
157
|
-
})
|
|
158
|
-
.join(', ');
|
|
159
|
-
formattedMessages.push(`${role}: ${contentStr}`);
|
|
160
|
-
}
|
|
161
|
-
else {
|
|
162
|
-
formattedMessages.push(`${role}: ${content}`);
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
const prompt = formattedMessages.join('\n');
|
|
116
|
+
const { messages, max_tokens = null, seed = null, temperature = 1, top_p, top_k, user, stream = false, } = args[0];
|
|
166
117
|
span.setAttribute(semantic_convention_1.default.GEN_AI_OPERATION, semantic_convention_1.default.GEN_AI_OPERATION_TYPE_CHAT);
|
|
167
118
|
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_ID, result.id);
|
|
168
119
|
const model = result.model || args[0].model;
|
|
169
120
|
const pricingInfo = await config_1.default.updatePricingJson(config_1.default.pricing_json);
|
|
170
|
-
const promptTokens = result.prompt_eval_count;
|
|
171
|
-
const completionTokens = result.eval_count;
|
|
121
|
+
const promptTokens = result.prompt_eval_count || 0;
|
|
122
|
+
const completionTokens = result.eval_count || 0;
|
|
172
123
|
const totalTokens = promptTokens + completionTokens;
|
|
173
124
|
// Calculate cost of the operation
|
|
174
125
|
const cost = helpers_1.default.getChatModelCost(model, pricingInfo, promptTokens, completionTokens);
|
|
@@ -177,34 +128,48 @@ class OllamaWrapper extends base_wrapper_1.default {
|
|
|
177
128
|
model,
|
|
178
129
|
user,
|
|
179
130
|
cost,
|
|
180
|
-
|
|
181
|
-
environment,
|
|
131
|
+
aiSystem: OllamaWrapper.aiSystem,
|
|
182
132
|
});
|
|
133
|
+
// Response model
|
|
134
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_MODEL, model);
|
|
183
135
|
// Request Params attributes : Start
|
|
184
136
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_P, top_p);
|
|
185
137
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TOP_K, top_k);
|
|
186
138
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_MAX_TOKENS, max_tokens);
|
|
187
139
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_TEMPERATURE, temperature);
|
|
188
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON, stop_reason);
|
|
189
140
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_IS_STREAM, stream);
|
|
190
141
|
span.setAttribute(semantic_convention_1.default.GEN_AI_REQUEST_SEED, seed);
|
|
191
142
|
if (traceContent) {
|
|
192
|
-
span.setAttribute(semantic_convention_1.default.
|
|
143
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_INPUT_MESSAGES, helpers_1.default.buildInputMessages(messages || []));
|
|
193
144
|
}
|
|
194
145
|
// Request Params attributes : End
|
|
195
146
|
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_INPUT_TOKENS, promptTokens);
|
|
196
147
|
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_OUTPUT_TOKENS, completionTokens);
|
|
197
148
|
span.setAttribute(semantic_convention_1.default.GEN_AI_USAGE_TOTAL_TOKENS, totalTokens);
|
|
149
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_CLIENT_TOKEN_USAGE, totalTokens);
|
|
198
150
|
if (result.done_reason) {
|
|
199
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON, result.done_reason);
|
|
151
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_RESPONSE_FINISH_REASON, [result.done_reason]);
|
|
152
|
+
}
|
|
153
|
+
// TTFT and TBT metrics
|
|
154
|
+
if (ttft > 0) {
|
|
155
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TTFT, ttft);
|
|
156
|
+
}
|
|
157
|
+
if (tbt > 0) {
|
|
158
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_SERVER_TBT, tbt);
|
|
200
159
|
}
|
|
201
160
|
if (traceContent) {
|
|
202
|
-
// Format 'messages' into a single string
|
|
203
161
|
const { message = {} } = result;
|
|
204
|
-
|
|
205
|
-
span.setAttribute(semantic_convention_1.default.GEN_AI_CONTENT_COMPLETION, messageString);
|
|
162
|
+
span.setAttribute(semantic_convention_1.default.GEN_AI_OUTPUT_MESSAGES, helpers_1.default.buildOutputMessages(message.content || '', result.done_reason || 'stop'));
|
|
206
163
|
}
|
|
164
|
+
return {
|
|
165
|
+
genAIEndpoint,
|
|
166
|
+
model,
|
|
167
|
+
user,
|
|
168
|
+
cost,
|
|
169
|
+
aiSystem: OllamaWrapper.aiSystem,
|
|
170
|
+
};
|
|
207
171
|
}
|
|
208
172
|
}
|
|
173
|
+
OllamaWrapper.aiSystem = 'ollama';
|
|
209
174
|
exports.default = OllamaWrapper;
|
|
210
175
|
//# sourceMappingURL=wrapper.js.map
|