@traceloop/instrumentation-bedrock 0.16.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +59 -16
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +59 -16
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -5,7 +5,7 @@ var api = require('@opentelemetry/api');
|
|
|
5
5
|
var instrumentation = require('@opentelemetry/instrumentation');
|
|
6
6
|
var aiSemanticConventions = require('@traceloop/ai-semantic-conventions');
|
|
7
7
|
|
|
8
|
-
var version = "0.
|
|
8
|
+
var version = "0.17.0";
|
|
9
9
|
|
|
10
10
|
class BedrockInstrumentation extends instrumentation.InstrumentationBase {
|
|
11
11
|
constructor(config = {}) {
|
|
@@ -191,21 +191,42 @@ class BedrockInstrumentation extends instrumentation.InstrumentationBase {
|
|
|
191
191
|
: {}));
|
|
192
192
|
}
|
|
193
193
|
case "anthropic": {
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
194
|
+
const baseAttributes = {
|
|
195
|
+
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
|
|
196
|
+
[aiSemanticConventions.SpanAttributes.LLM_TOP_K]: requestBody["top_k"],
|
|
197
|
+
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
|
|
198
|
+
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens_to_sample"] || requestBody["max_tokens"],
|
|
199
|
+
};
|
|
200
|
+
if (!this._shouldSendPrompts()) {
|
|
201
|
+
return baseAttributes;
|
|
202
|
+
}
|
|
203
|
+
// Handle new messages API format (used by langchain)
|
|
204
|
+
if (requestBody["messages"]) {
|
|
205
|
+
const promptAttributes = {};
|
|
206
|
+
requestBody["messages"].forEach((message, index) => {
|
|
207
|
+
promptAttributes[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${index}.role`] =
|
|
208
|
+
message.role;
|
|
209
|
+
promptAttributes[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${index}.content`] =
|
|
210
|
+
typeof message.content === "string"
|
|
211
|
+
? message.content
|
|
212
|
+
: JSON.stringify(message.content);
|
|
213
|
+
});
|
|
214
|
+
return Object.assign(Object.assign({}, baseAttributes), promptAttributes);
|
|
215
|
+
}
|
|
216
|
+
// Handle legacy prompt format
|
|
217
|
+
if (requestBody["prompt"]) {
|
|
218
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.0.role`]: "user", [`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["prompt"]
|
|
198
219
|
// The format is removing when we are setting span attribute
|
|
199
220
|
.replace("\n\nHuman:", "")
|
|
200
|
-
.replace("\n\nAssistant:", "")
|
|
201
|
-
|
|
202
|
-
|
|
221
|
+
.replace("\n\nAssistant:", "") });
|
|
222
|
+
}
|
|
223
|
+
return baseAttributes;
|
|
203
224
|
}
|
|
204
225
|
case "cohere": {
|
|
205
226
|
return Object.assign({ [aiSemanticConventions.SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["p"], [aiSemanticConventions.SpanAttributes.LLM_TOP_K]: requestBody["k"], [aiSemanticConventions.SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"], [aiSemanticConventions.SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens"] }, (this._shouldSendPrompts()
|
|
206
227
|
? {
|
|
207
228
|
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.0.role`]: "user",
|
|
208
|
-
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["prompt"],
|
|
229
|
+
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["message"] || requestBody["prompt"],
|
|
209
230
|
}
|
|
210
231
|
: {}));
|
|
211
232
|
}
|
|
@@ -222,6 +243,7 @@ class BedrockInstrumentation extends instrumentation.InstrumentationBase {
|
|
|
222
243
|
}
|
|
223
244
|
}
|
|
224
245
|
_setResponseAttributes(vendor, response, isStream = false) {
|
|
246
|
+
var _a, _b, _c, _d;
|
|
225
247
|
switch (vendor) {
|
|
226
248
|
case "ai21": {
|
|
227
249
|
return Object.assign({ [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["completions"][0]["finishReason"]["reason"], [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant" }, (this._shouldSendPrompts()
|
|
@@ -248,18 +270,39 @@ class BedrockInstrumentation extends instrumentation.InstrumentationBase {
|
|
|
248
270
|
: {}));
|
|
249
271
|
}
|
|
250
272
|
case "anthropic": {
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
273
|
+
const baseAttributes = {
|
|
274
|
+
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["stop_reason"],
|
|
275
|
+
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant",
|
|
276
|
+
};
|
|
277
|
+
if (!this._shouldSendPrompts()) {
|
|
278
|
+
return baseAttributes;
|
|
279
|
+
}
|
|
280
|
+
// Handle new messages API format response
|
|
281
|
+
if (response["content"]) {
|
|
282
|
+
const content = Array.isArray(response["content"])
|
|
283
|
+
? response["content"].map((c) => c.text || c).join("")
|
|
284
|
+
: response["content"];
|
|
285
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.content`]: content });
|
|
286
|
+
}
|
|
287
|
+
// Handle legacy completion format
|
|
288
|
+
if (response["completion"]) {
|
|
289
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.content`]: response["completion"] });
|
|
290
|
+
}
|
|
291
|
+
return baseAttributes;
|
|
256
292
|
}
|
|
257
293
|
case "cohere": {
|
|
258
|
-
|
|
294
|
+
const baseAttributes = Object.assign({ [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: (_b = (_a = response["generations"]) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b["finish_reason"], [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant" }, (this._shouldSendPrompts()
|
|
259
295
|
? {
|
|
260
|
-
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.content`]: response["generations"][0]["text"],
|
|
296
|
+
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.content`]: (_d = (_c = response["generations"]) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d["text"],
|
|
261
297
|
}
|
|
262
298
|
: {}));
|
|
299
|
+
// Add token usage if available
|
|
300
|
+
if (response["meta"] && response["meta"]["billed_units"]) {
|
|
301
|
+
const billedUnits = response["meta"]["billed_units"];
|
|
302
|
+
return Object.assign(Object.assign({}, baseAttributes), { [aiSemanticConventions.SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: billedUnits["input_tokens"], [aiSemanticConventions.SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: billedUnits["output_tokens"], [aiSemanticConventions.SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: (billedUnits["input_tokens"] || 0) +
|
|
303
|
+
(billedUnits["output_tokens"] || 0) });
|
|
304
|
+
}
|
|
305
|
+
return baseAttributes;
|
|
263
306
|
}
|
|
264
307
|
case "meta": {
|
|
265
308
|
return Object.assign({ [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["stop_reason"], [`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant", [aiSemanticConventions.SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: response["prompt_token_count"], [aiSemanticConventions.SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: response["generation_token_count"], [aiSemanticConventions.SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: response["prompt_token_count"] + response["generation_token_count"] }, (this._shouldSendPrompts()
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../../src/instrumentation.ts"],"sourcesContent":["/*\n * Copyright Traceloop\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n Span,\n Attributes,\n SpanKind,\n SpanStatusCode,\n context,\n trace,\n} from \"@opentelemetry/api\";\nimport {\n InstrumentationBase,\n InstrumentationModuleDefinition,\n InstrumentationNodeModuleDefinition,\n safeExecuteInTheMiddle,\n} from \"@opentelemetry/instrumentation\";\nimport { BedrockInstrumentationConfig } from \"./types\";\nimport type * as bedrock from \"@aws-sdk/client-bedrock-runtime\";\nimport {\n CONTEXT_KEY_ALLOW_TRACE_CONTENT,\n LLMRequestTypeValues,\n SpanAttributes,\n} from \"@traceloop/ai-semantic-conventions\";\nimport { version } from \"../package.json\";\n\nexport class BedrockInstrumentation extends InstrumentationBase {\n declare protected _config: BedrockInstrumentationConfig;\n\n constructor(config: BedrockInstrumentationConfig = {}) {\n super(\"@traceloop/instrumentation-bedrock\", version, config);\n }\n\n public override setConfig(config: BedrockInstrumentationConfig = {}) {\n super.setConfig(config);\n }\n\n protected init(): InstrumentationModuleDefinition {\n const module = new InstrumentationNodeModuleDefinition(\n \"@aws-sdk/client-bedrock-runtime\",\n [\">=3.499.0\"],\n this.wrap.bind(this),\n this.unwrap.bind(this),\n );\n\n return module;\n }\n\n public manuallyInstrument(module: typeof bedrock) {\n this._diag.debug(`Patching @aws-sdk/client-bedrock-runtime manually`);\n\n this._wrap(\n module.BedrockRuntimeClient.prototype,\n \"send\",\n this.wrapperMethod(),\n );\n }\n\n private wrap(module: typeof bedrock, moduleVersion?: string) {\n this._diag.debug(\n `Patching @aws-sdk/client-bedrock-runtime@${moduleVersion}`,\n );\n\n this._wrap(\n module.BedrockRuntimeClient.prototype,\n \"send\",\n this.wrapperMethod(),\n );\n\n return module;\n }\n\n private unwrap(module: typeof bedrock, moduleVersion?: string) {\n this._diag.debug(\n `Unpatching @aws-sdk/client-bedrock-runtime@${moduleVersion}`,\n );\n\n this._unwrap(module.BedrockRuntimeClient.prototype, \"send\");\n }\n\n private wrapperMethod() {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const plugin = this;\n // eslint-disable-next-line\n return (original: Function) => {\n return function method(this: any, ...args: any) {\n const span = plugin._startSpan({\n params: args[0],\n });\n const execContext = trace.setSpan(context.active(), span);\n const execPromise = safeExecuteInTheMiddle(\n () => {\n return context.with(execContext, () => {\n return original.apply(this, args);\n });\n },\n (e) => {\n if (e) {\n plugin._diag.error(`Error in bedrock instrumentation`, e);\n }\n },\n );\n const wrappedPromise = plugin._wrapPromise(span, execPromise);\n return context.bind(execContext, wrappedPromise);\n };\n };\n }\n private _wrapPromise<T>(span: Span, promise: Promise<T>): Promise<T> {\n return promise\n .then(async (result) => {\n await this._endSpan({\n span,\n result: result as\n | bedrock.InvokeModelCommandOutput\n | bedrock.InvokeModelWithResponseStreamCommandOutput,\n });\n\n return new Promise<T>((resolve) => resolve(result));\n })\n .catch((error: Error) => {\n return new Promise<T>((_, reject) => {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message,\n });\n span.recordException(error);\n span.end();\n\n reject(error);\n });\n });\n }\n\n private _startSpan({\n params,\n }: {\n params: Parameters<bedrock.BedrockRuntimeClient[\"send\"]>[0];\n }): Span {\n let attributes: Attributes = {};\n\n try {\n const input = params.input as bedrock.InvokeModelCommandInput;\n const { modelVendor, model } = this._extractVendorAndModel(\n input.modelId || \"\",\n );\n\n attributes = {\n [SpanAttributes.LLM_SYSTEM]: \"AWS\",\n [SpanAttributes.LLM_REQUEST_MODEL]: model,\n [SpanAttributes.LLM_RESPONSE_MODEL]: input.modelId,\n [SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,\n };\n\n if (typeof input.body === \"string\") {\n const requestBody = JSON.parse(input.body);\n\n attributes = {\n ...attributes,\n ...this._setRequestAttributes(modelVendor, requestBody),\n };\n }\n } catch (e) {\n this._diag.debug(e);\n this._config.exceptionLogger?.(e);\n }\n\n return this.tracer.startSpan(`bedrock.completion`, {\n kind: SpanKind.CLIENT,\n attributes,\n });\n }\n\n private async _endSpan({\n span,\n result,\n }: {\n span: Span;\n result:\n | bedrock.InvokeModelCommandOutput\n | bedrock.InvokeModelWithResponseStreamCommandOutput;\n }) {\n try {\n if (\"body\" in result) {\n const attributes =\n \"attributes\" in span\n ? (span[\"attributes\"] as Record<string, any>)\n : {};\n\n if (SpanAttributes.LLM_SYSTEM in attributes) {\n const modelId = attributes[\n SpanAttributes.LLM_RESPONSE_MODEL\n ] as string;\n const { modelVendor, model } = this._extractVendorAndModel(modelId);\n\n span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, model);\n\n if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {\n const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;\n\n let streamedContent = \"\";\n for await (const value of rawRes) {\n // Convert it to a JSON String\n const jsonString = new TextDecoder().decode(value.chunk?.bytes);\n // Parse the JSON string\n const parsedResponse = JSON.parse(jsonString);\n\n if (\"amazon-bedrock-invocationMetrics\" in parsedResponse) {\n span.setAttribute(\n SpanAttributes.LLM_USAGE_PROMPT_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"inputTokenCount\"\n ],\n );\n span.setAttribute(\n SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"outputTokenCount\"\n ],\n );\n\n span.setAttribute(\n SpanAttributes.LLM_USAGE_TOTAL_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"inputTokenCount\"\n ] +\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"outputTokenCount\"\n ],\n );\n }\n\n let responseAttributes = this._setResponseAttributes(\n modelVendor,\n parsedResponse,\n true,\n );\n\n // ! NOTE: This make sure the content always have all streamed chunks\n if (this._shouldSendPrompts()) {\n // Update local value with attribute value that was set by _setResponseAttributes\n streamedContent +=\n responseAttributes[\n `${SpanAttributes.LLM_COMPLETIONS}.0.content`\n ];\n // re-assign the new value to responseAttributes\n responseAttributes = {\n ...responseAttributes,\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n streamedContent,\n };\n }\n\n span.setAttributes(responseAttributes);\n }\n } else if (result.body instanceof Object.getPrototypeOf(Uint8Array)) {\n // Convert it to a JSON String\n const jsonString = new TextDecoder().decode(\n result.body as Uint8Array,\n );\n // Parse the JSON string\n const parsedResponse = JSON.parse(jsonString);\n\n const responseAttributes = this._setResponseAttributes(\n modelVendor,\n parsedResponse,\n );\n\n span.setAttributes(responseAttributes);\n }\n }\n }\n } catch (e) {\n this._diag.debug(e);\n this._config.exceptionLogger?.(e);\n }\n\n span.setStatus({ code: SpanStatusCode.OK });\n span.end();\n }\n\n private _setRequestAttributes(\n vendor: string,\n requestBody: Record<string, any>,\n ) {\n switch (vendor) {\n case \"ai21\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"topP\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"maxTokens\"],\n [SpanAttributes.LLM_PRESENCE_PENALTY]:\n requestBody[\"presencePenalty\"][\"scale\"],\n [SpanAttributes.LLM_FREQUENCY_PENALTY]:\n requestBody[\"frequencyPenalty\"][\"scale\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n case \"amazon\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]:\n requestBody[\"textGenerationConfig\"][\"topP\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]:\n requestBody[\"textGenerationConfig\"][\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]:\n requestBody[\"textGenerationConfig\"][\"maxTokenCount\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"inputText\"],\n }\n : {}),\n };\n }\n case \"anthropic\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"top_p\"],\n [SpanAttributes.LLM_TOP_K]: requestBody[\"top_k\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]:\n requestBody[\"max_tokens_to_sample\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody[\n \"prompt\"\n ]\n // The format is removing when we are setting span attribute\n .replace(\"\\n\\nHuman:\", \"\")\n .replace(\"\\n\\nAssistant:\", \"\"),\n }\n : {}),\n };\n }\n case \"cohere\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"p\"],\n [SpanAttributes.LLM_TOP_K]: requestBody[\"k\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"max_tokens\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n case \"meta\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"top_p\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"max_gen_len\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n default:\n return {};\n }\n }\n\n private _setResponseAttributes(\n vendor: string,\n response: Record<string, any>,\n isStream = false,\n ) {\n switch (vendor) {\n case \"ai21\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"completions\"][0][\"finishReason\"][\"reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"completions\"][0][\"data\"][\"text\"],\n }\n : {}),\n };\n }\n case \"amazon\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: isStream\n ? response[\"completionReason\"]\n : response[\"results\"][0][\"completionReason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]:\n response[\"inputTextTokenCount\"],\n [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: isStream\n ? response[\"totalOutputTextTokenCount\"]\n : response[\"results\"][0][\"tokenCount\"],\n [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: isStream\n ? response[\"inputTextTokenCount\"] +\n response[\"totalOutputTextTokenCount\"]\n : response[\"inputTextTokenCount\"] +\n response[\"results\"][0][\"tokenCount\"],\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: isStream\n ? response[\"outputText\"]\n : response[\"results\"][0][\"outputText\"],\n }\n : {}),\n };\n }\n case \"anthropic\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"stop_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"completion\"],\n }\n : {}),\n };\n }\n case \"cohere\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"generations\"][0][\"finish_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"generations\"][0][\"text\"],\n }\n : {}),\n };\n }\n case \"meta\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"stop_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]:\n response[\"prompt_token_count\"],\n [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]:\n response[\"generation_token_count\"],\n [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]:\n response[\"prompt_token_count\"] + response[\"generation_token_count\"],\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"generation\"],\n }\n : {}),\n };\n }\n default:\n return {};\n }\n }\n\n private _shouldSendPrompts() {\n const contextShouldSendPrompts = context\n .active()\n .getValue(CONTEXT_KEY_ALLOW_TRACE_CONTENT);\n\n if (contextShouldSendPrompts !== undefined) {\n return contextShouldSendPrompts;\n }\n\n return this._config.traceContent !== undefined\n ? this._config.traceContent\n : true;\n }\n\n private _extractVendorAndModel(modelId: string): {\n modelVendor: string;\n model: string;\n } {\n if (!modelId) {\n return { modelVendor: \"\", model: \"\" };\n }\n\n const parts = modelId.split(\".\");\n return {\n modelVendor: parts[0] || \"\",\n model: parts[1] || \"\",\n };\n }\n}\n"],"names":["InstrumentationBase","InstrumentationNodeModuleDefinition","trace","context","safeExecuteInTheMiddle","__awaiter","SpanStatusCode","SpanAttributes","LLMRequestTypeValues","SpanKind","__asyncValues","CONTEXT_KEY_ALLOW_TRACE_CONTENT"],"mappings":";;;;;;;;;AAsCM,MAAO,sBAAuB,SAAQA,mCAAmB,CAAA;AAG7D,IAAA,WAAA,CAAY,SAAuC,EAAE,EAAA;AACnD,QAAA,KAAK,CAAC,oCAAoC,EAAE,OAAO,EAAE,MAAM,CAAC;IAC9D;IAEgB,SAAS,CAAC,SAAuC,EAAE,EAAA;AACjE,QAAA,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC;IACzB;IAEU,IAAI,GAAA;AACZ,QAAA,MAAM,MAAM,GAAG,IAAIC,mDAAmC,CACpD,iCAAiC,EACjC,CAAC,WAAW,CAAC,EACb,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EACpB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CACvB;AAED,QAAA,OAAO,MAAM;IACf;AAEO,IAAA,kBAAkB,CAAC,MAAsB,EAAA;AAC9C,QAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA,iDAAA,CAAmD,CAAC;AAErE,QAAA,IAAI,CAAC,KAAK,CACR,MAAM,CAAC,oBAAoB,CAAC,SAAS,EACrC,MAAM,EACN,IAAI,CAAC,aAAa,EAAE,CACrB;IACH;IAEQ,IAAI,CAAC,MAAsB,EAAE,aAAsB,EAAA;QACzD,IAAI,CAAC,KAAK,CAAC,KAAK,CACd,CAAA,yCAAA,EAA4C,aAAa,CAAA,CAAE,CAC5D;AAED,QAAA,IAAI,CAAC,KAAK,CACR,MAAM,CAAC,oBAAoB,CAAC,SAAS,EACrC,MAAM,EACN,IAAI,CAAC,aAAa,EAAE,CACrB;AAED,QAAA,OAAO,MAAM;IACf;IAEQ,MAAM,CAAC,MAAsB,EAAE,aAAsB,EAAA;QAC3D,IAAI,CAAC,KAAK,CAAC,KAAK,CACd,CAAA,2CAAA,EAA8C,aAAa,CAAA,CAAE,CAC9D;QAED,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,oBAAoB,CAAC,SAAS,EAAE,MAAM,CAAC;IAC7D;IAEQ,aAAa,GAAA;;QAEnB,MAAM,MAAM,GAAG,IAAI;;QAEnB,OAAO,CAAC,QAAkB,KAAI;AAC5B,YAAA,OAAO,SAAS,MAAM,CAAY,GAAG,IAAS,EAAA;AAC5C,gBAAA,MAAM,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC;AAC7B,oBAAA,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;AAChB,iBAAA,CAAC;AACF,gBAAA,MAAM,WAAW,GAAGC,SAAK,CAAC,OAAO,CAACC,WAAO,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC;AACzD,gBAAA,MAAM,WAAW,GAAGC,sCAAsB,CACxC,MAAK;AACH,oBAAA,OAAOD,WAAO,CAAC,IAAI,CAAC,WAAW,EAAE,MAAK;wBACpC,OAAO,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC;AACnC,oBAAA,CAAC,CAAC;AACJ,gBAAA,CAAC,EACD,CAAC,CAAC,KAAI;oBACJ,IAAI,CAAC,EAAE;wBACL,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA,gCAAA,CAAkC,EAAE,CAAC,CAAC;oBAC3D;AACF,gBAAA,CAAC,CACF;gBACD,MAAM,cAAc,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,WAAW,CAAC;gBAC7D,OAAOA,WAAO,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC;AAClD,YAAA,CAAC;AACH,QAAA,CAAC;IACH;IACQ,YAAY,CAAI,IAAU,EAAE,OAAmB,EAAA;AACrD,QAAA,OAAO;AACJ,aAAA,IAAI,CAAC,CAAO,MAAM,KAAIE,eAAA,CAAA,IAAA,EAAA,MAAA,EAAA,MAAA,EAAA,aAAA;YACrB,MAAM,IAAI,CAAC,QAAQ,CAAC;gBAClB,IAAI;AACJ,gBAAA,MAAM,EAAE,MAE8C;AACvD,aAAA,CAAC;AAEF,YAAA,OAAO,IAAI,OAAO,CAAI,CAAC,OAAO,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;AACrD,QAAA,CAAC,CAAA;AACA,aAAA,KAAK,CAAC,CAAC,KAAY,KAAI;YACtB,OAAO,IAAI,OAAO,CAAI,CAAC,CAAC,EAAE,MAAM,KAAI;gBAClC,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEC,kBAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;AACvB,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC;gBAC3B,IAAI,CAAC,GAAG,EAAE;gBAEV,MAAM,CAAC,KAAK,CAAC;AACf,YAAA,CAAC,CAAC;AACJ,QAAA,CAAC,CAAC;IACN;IAEQ,UAAU,CAAC,EACjB,MAAM,GAGP,EAAA;;QACC,IAAI,UAAU,GAAe,EAAE;AAE/B,QAAA,IAAI;AACF,YAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAwC;AAC7D,YAAA,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,sBAAsB,CACxD,KAAK,CAAC,OAAO,IAAI,EAAE,CACpB;AAED,YAAA,UAAU,GAAG;AACX,gBAAA,CAACC,oCAAc,CAAC,UAAU,GAAG,KAAK;AAClC,gBAAA,CAACA,oCAAc,CAAC,iBAAiB,GAAG,KAAK;AACzC,gBAAA,CAACA,oCAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,OAAO;AAClD,gBAAA,CAACA,oCAAc,CAAC,gBAAgB,GAAGC,0CAAoB,CAAC,UAAU;aACnE;AAED,YAAA,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAClC,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;AAE1C,gBAAA,UAAU,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACL,UAAU,CAAA,EACV,IAAI,CAAC,qBAAqB,CAAC,WAAW,EAAE,WAAW,CAAC,CACxD;YACH;QACF;QAAE,OAAO,CAAC,EAAE;AACV,YAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;YACnB,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,IAAI,CAAC,OAAO,EAAC,eAAe,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAA,IAAA,CAAA,EAAA,EAAG,CAAC,CAAC;QACnC;AAEA,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,oBAAoB,EAAE;YACjD,IAAI,EAAEC,YAAQ,CAAC,MAAM;YACrB,UAAU;AACX,SAAA,CAAC;IACJ;IAEc,QAAQ,CAAA,EAAA,EAAA;mEAAC,EACrB,IAAI,EACJ,MAAM,GAMP,EAAA;;;AACC,YAAA,IAAI;AACF,gBAAA,IAAI,MAAM,IAAI,MAAM,EAAE;AACpB,oBAAA,MAAM,UAAU,GACd,YAAY,IAAI;AACd,0BAAG,IAAI,CAAC,YAAY;0BAClB,EAAE;AAER,oBAAA,IAAIF,oCAAc,CAAC,UAAU,IAAI,UAAU,EAAE;wBAC3C,MAAM,OAAO,GAAG,UAAU,CACxBA,oCAAc,CAAC,kBAAkB,CACxB;AACX,wBAAA,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC;wBAEnE,IAAI,CAAC,YAAY,CAACA,oCAAc,CAAC,kBAAkB,EAAE,KAAK,CAAC;AAE3D,wBAAA,IAAI,EAAE,MAAM,CAAC,IAAI,YAAY,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,EAAE;AAC/D,4BAAA,MAAM,MAAM,GAAG,MAAM,CAAC,IAA6C;4BAEnE,IAAI,eAAe,GAAG,EAAE;;AACxB,gCAAA,KAA0B,eAAA,QAAA,GAAAG,mBAAA,CAAA,MAAM,CAAA,EAAA,UAAA,4EAAE;oCAAR,EAAA,GAAA,UAAA,CAAA,KAAA;oCAAA,EAAA,GAAA,KAAA;oCAAf,MAAM,KAAK,KAAA;;AAEpB,oCAAA,MAAM,UAAU,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,CAAA,EAAA,GAAA,KAAK,CAAC,KAAK,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,KAAK,CAAC;;oCAE/D,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC;AAE7C,oCAAA,IAAI,kCAAkC,IAAI,cAAc,EAAE;AACxD,wCAAA,IAAI,CAAC,YAAY,CACfH,oCAAc,CAAC,uBAAuB,EACtC,cAAc,CAAC,kCAAkC,CAAC,CAChD,iBAAiB,CAClB,CACF;AACD,wCAAA,IAAI,CAAC,YAAY,CACfA,oCAAc,CAAC,2BAA2B,EAC1C,cAAc,CAAC,kCAAkC,CAAC,CAChD,kBAAkB,CACnB,CACF;AAED,wCAAA,IAAI,CAAC,YAAY,CACfA,oCAAc,CAAC,sBAAsB,EACrC,cAAc,CAAC,kCAAkC,CAAC,CAChD,iBAAiB,CAClB;AACC,4CAAA,cAAc,CAAC,kCAAkC,CAAC,CAChD,kBAAkB,CACnB,CACJ;oCACH;AAEA,oCAAA,IAAI,kBAAkB,GAAG,IAAI,CAAC,sBAAsB,CAClD,WAAW,EACX,cAAc,EACd,IAAI,CACL;;AAGD,oCAAA,IAAI,IAAI,CAAC,kBAAkB,EAAE,EAAE;;wCAE7B,eAAe;AACb,4CAAA,kBAAkB,CAChB,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,CAC9C;;AAEH,wCAAA,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACb,kBAAkB,CAAA,EAAA,EACrB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,eAAe,GAClB;oCACH;AAEA,oCAAA,IAAI,CAAC,aAAa,CAAC,kBAAkB,CAAC;gCACxC;;;;;;;;;wBACF;6BAAO,IAAI,MAAM,CAAC,IAAI,YAAY,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,EAAE;;AAEnE,4BAAA,MAAM,UAAU,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CACzC,MAAM,CAAC,IAAkB,CAC1B;;4BAED,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC;4BAE7C,MAAM,kBAAkB,GAAG,IAAI,CAAC,sBAAsB,CACpD,WAAW,EACX,cAAc,CACf;AAED,4BAAA,IAAI,CAAC,aAAa,CAAC,kBAAkB,CAAC;wBACxC;oBACF;gBACF;YACF;YAAE,OAAO,CAAC,EAAE;AACV,gBAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;gBACnB,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,IAAI,CAAC,OAAO,EAAC,eAAe,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAA,IAAA,CAAA,EAAA,EAAG,CAAC,CAAC;YACnC;YAEA,IAAI,CAAC,SAAS,CAAC,EAAE,IAAI,EAAED,kBAAc,CAAC,EAAE,EAAE,CAAC;YAC3C,IAAI,CAAC,GAAG,EAAE;QACZ,CAAC,CAAA;AAAA,IAAA;IAEO,qBAAqB,CAC3B,MAAc,EACd,WAAgC,EAAA;QAEhC,QAAQ,MAAM;YACZ,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACC,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,MAAM,CAAC,EACvD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,WAAW,CAAC,EACjE,CAACA,oCAAc,CAAC,oBAAoB,GAClC,WAAW,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,EACzC,CAACA,oCAAc,CAAC,qBAAqB,GACnC,WAAW,CAAC,kBAAkB,CAAC,CAAC,OAAO,CAAC,EAAA,GAGtC,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,QAAQ,CAAC;AACxB;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAC/B,WAAW,CAAC,sBAAsB,CAAC,CAAC,MAAM,CAAC,EAC7C,CAACA,oCAAc,CAAC,uBAAuB,GACrC,WAAW,CAAC,sBAAsB,CAAC,CAAC,aAAa,CAAC,EACpD,CAACA,oCAAc,CAAC,sBAAsB,GACpC,WAAW,CAAC,sBAAsB,CAAC,CAAC,eAAe,CAAC,KAGlD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,WAAW,CAAC;AAC3B;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,WAAW,EAAE;gBAChB,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,OAAO,CAAC,EACxD,CAACA,oCAAc,CAAC,SAAS,GAAG,WAAW,CAAC,OAAO,CAAC,EAChD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GACpC,WAAW,CAAC,sBAAsB,CAAC,KAGjC,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GAAG,WAAW,CACtD,QAAQ;;AAGP,6BAAA,OAAO,CAAC,YAAY,EAAE,EAAE;AACxB,6BAAA,OAAO,CAAC,gBAAgB,EAAE,EAAE,CAAC;AACjC;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,GAAG,CAAC,EACpD,CAACA,oCAAc,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,CAAC,EAC5C,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,YAAY,CAAC,KAG9D,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,QAAQ,CAAC;AACxB;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,OAAO,CAAC,EACxD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,aAAa,CAAC,EAAA,GAG/D,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,QAAQ,CAAC;AACxB;sBACD,EAAE,EAAC;YAEX;AACA,YAAA;AACE,gBAAA,OAAO,EAAE;;IAEf;AAEQ,IAAA,sBAAsB,CAC5B,MAAc,EACd,QAA6B,EAC7B,QAAQ,GAAG,KAAK,EAAA;QAEhB,QAAQ,MAAM;YACZ,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,kBAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,QAAQ,CAAC,EACtD,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EAAA,GACrD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC7C;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAAG;AACrD,0BAAE,QAAQ,CAAC,kBAAkB;AAC7B,0BAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,kBAAkB,CAAC,EAC9C,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EACzD,CAACA,oCAAc,CAAC,uBAAuB,GACrC,QAAQ,CAAC,qBAAqB,CAAC,EACjC,CAACA,oCAAc,CAAC,2BAA2B,GAAG;AAC5C,0BAAE,QAAQ,CAAC,2BAA2B;AACtC,0BAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EACxC,CAACA,oCAAc,CAAC,sBAAsB,GAAG;AACvC,0BAAE,QAAQ,CAAC,qBAAqB,CAAC;4BAC/B,QAAQ,CAAC,2BAA2B;AACtC,0BAAE,QAAQ,CAAC,qBAAqB,CAAC;AAC/B,4BAAA,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EAAA,GACpC,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAAG;AAC/C,8BAAE,QAAQ,CAAC,YAAY;8BACrB,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AACzC;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,WAAW,EAAE;gBAChB,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,EACzB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EAAA,GACrD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;wBACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,YAAY,CAAC;AACzB;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;AACb,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,EAC7C,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EAAA,GACrD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AACrC;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,EACzB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EACzD,CAACA,oCAAc,CAAC,uBAAuB,GACrC,QAAQ,CAAC,oBAAoB,CAAC,EAChC,CAACA,oCAAc,CAAC,2BAA2B,GACzC,QAAQ,CAAC,wBAAwB,CAAC,EACpC,CAACA,oCAAc,CAAC,sBAAsB,GACpC,QAAQ,CAAC,oBAAoB,CAAC,GAAG,QAAQ,CAAC,wBAAwB,CAAC,EAAA,GACjE,IAAI,CAAC,kBAAkB;AACzB,sBAAE;wBACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,YAAY,CAAC;AACzB;sBACD,EAAE,EAAC;YAEX;AACA,YAAA;AACE,gBAAA,OAAO,EAAE;;IAEf;IAEQ,kBAAkB,GAAA;QACxB,MAAM,wBAAwB,GAAGJ;AAC9B,aAAA,MAAM;aACN,QAAQ,CAACQ,qDAA+B,CAAC;AAE5C,QAAA,IAAI,wBAAwB,KAAK,SAAS,EAAE;AAC1C,YAAA,OAAO,wBAAwB;QACjC;AAEA,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,YAAY,KAAK;AACnC,cAAE,IAAI,CAAC,OAAO,CAAC;cACb,IAAI;IACV;AAEQ,IAAA,sBAAsB,CAAC,OAAe,EAAA;QAI5C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,EAAE,WAAW,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;QACvC;QAEA,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC;QAChC,OAAO;AACL,YAAA,WAAW,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;AAC3B,YAAA,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;SACtB;IACH;AACD;;;;"}
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../src/instrumentation.ts"],"sourcesContent":["/*\n * Copyright Traceloop\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n Span,\n Attributes,\n SpanKind,\n SpanStatusCode,\n context,\n trace,\n} from \"@opentelemetry/api\";\nimport {\n InstrumentationBase,\n InstrumentationModuleDefinition,\n InstrumentationNodeModuleDefinition,\n safeExecuteInTheMiddle,\n} from \"@opentelemetry/instrumentation\";\nimport { BedrockInstrumentationConfig } from \"./types\";\nimport type * as bedrock from \"@aws-sdk/client-bedrock-runtime\";\nimport {\n CONTEXT_KEY_ALLOW_TRACE_CONTENT,\n LLMRequestTypeValues,\n SpanAttributes,\n} from \"@traceloop/ai-semantic-conventions\";\nimport { version } from \"../package.json\";\n\nexport class BedrockInstrumentation extends InstrumentationBase {\n declare protected _config: BedrockInstrumentationConfig;\n\n constructor(config: BedrockInstrumentationConfig = {}) {\n super(\"@traceloop/instrumentation-bedrock\", version, config);\n }\n\n public override setConfig(config: BedrockInstrumentationConfig = {}) {\n super.setConfig(config);\n }\n\n protected init(): InstrumentationModuleDefinition {\n const module = new InstrumentationNodeModuleDefinition(\n \"@aws-sdk/client-bedrock-runtime\",\n [\">=3.499.0\"],\n this.wrap.bind(this),\n this.unwrap.bind(this),\n );\n\n return module;\n }\n\n public manuallyInstrument(module: typeof bedrock) {\n this._diag.debug(`Patching @aws-sdk/client-bedrock-runtime manually`);\n\n this._wrap(\n module.BedrockRuntimeClient.prototype,\n \"send\",\n this.wrapperMethod(),\n );\n }\n\n private wrap(module: typeof bedrock, moduleVersion?: string) {\n this._diag.debug(\n `Patching @aws-sdk/client-bedrock-runtime@${moduleVersion}`,\n );\n\n this._wrap(\n module.BedrockRuntimeClient.prototype,\n \"send\",\n this.wrapperMethod(),\n );\n\n return module;\n }\n\n private unwrap(module: typeof bedrock, moduleVersion?: string) {\n this._diag.debug(\n `Unpatching @aws-sdk/client-bedrock-runtime@${moduleVersion}`,\n );\n\n this._unwrap(module.BedrockRuntimeClient.prototype, \"send\");\n }\n\n private wrapperMethod() {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const plugin = this;\n // eslint-disable-next-line\n return (original: Function) => {\n return function method(this: any, ...args: any) {\n const span = plugin._startSpan({\n params: args[0],\n });\n const execContext = trace.setSpan(context.active(), span);\n const execPromise = safeExecuteInTheMiddle(\n () => {\n return context.with(execContext, () => {\n return original.apply(this, args);\n });\n },\n (e) => {\n if (e) {\n plugin._diag.error(`Error in bedrock instrumentation`, e);\n }\n },\n );\n const wrappedPromise = plugin._wrapPromise(span, execPromise);\n return context.bind(execContext, wrappedPromise);\n };\n };\n }\n private _wrapPromise<T>(span: Span, promise: Promise<T>): Promise<T> {\n return promise\n .then(async (result) => {\n await this._endSpan({\n span,\n result: result as\n | bedrock.InvokeModelCommandOutput\n | bedrock.InvokeModelWithResponseStreamCommandOutput,\n });\n\n return new Promise<T>((resolve) => resolve(result));\n })\n .catch((error: Error) => {\n return new Promise<T>((_, reject) => {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error.message,\n });\n span.recordException(error);\n span.end();\n\n reject(error);\n });\n });\n }\n\n private _startSpan({\n params,\n }: {\n params: Parameters<bedrock.BedrockRuntimeClient[\"send\"]>[0];\n }): Span {\n let attributes: Attributes = {};\n\n try {\n const input = params.input as bedrock.InvokeModelCommandInput;\n const { modelVendor, model } = this._extractVendorAndModel(\n input.modelId || \"\",\n );\n\n attributes = {\n [SpanAttributes.LLM_SYSTEM]: \"AWS\",\n [SpanAttributes.LLM_REQUEST_MODEL]: model,\n [SpanAttributes.LLM_RESPONSE_MODEL]: input.modelId,\n [SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,\n };\n\n if (typeof input.body === \"string\") {\n const requestBody = JSON.parse(input.body);\n\n attributes = {\n ...attributes,\n ...this._setRequestAttributes(modelVendor, requestBody),\n };\n }\n } catch (e) {\n this._diag.debug(e);\n this._config.exceptionLogger?.(e);\n }\n\n return this.tracer.startSpan(`bedrock.completion`, {\n kind: SpanKind.CLIENT,\n attributes,\n });\n }\n\n private async _endSpan({\n span,\n result,\n }: {\n span: Span;\n result:\n | bedrock.InvokeModelCommandOutput\n | bedrock.InvokeModelWithResponseStreamCommandOutput;\n }) {\n try {\n if (\"body\" in result) {\n const attributes =\n \"attributes\" in span\n ? (span[\"attributes\"] as Record<string, any>)\n : {};\n\n if (SpanAttributes.LLM_SYSTEM in attributes) {\n const modelId = attributes[\n SpanAttributes.LLM_RESPONSE_MODEL\n ] as string;\n const { modelVendor, model } = this._extractVendorAndModel(modelId);\n\n span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, model);\n\n if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {\n const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;\n\n let streamedContent = \"\";\n for await (const value of rawRes) {\n // Convert it to a JSON String\n const jsonString = new TextDecoder().decode(value.chunk?.bytes);\n // Parse the JSON string\n const parsedResponse = JSON.parse(jsonString);\n\n if (\"amazon-bedrock-invocationMetrics\" in parsedResponse) {\n span.setAttribute(\n SpanAttributes.LLM_USAGE_PROMPT_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"inputTokenCount\"\n ],\n );\n span.setAttribute(\n SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"outputTokenCount\"\n ],\n );\n\n span.setAttribute(\n SpanAttributes.LLM_USAGE_TOTAL_TOKENS,\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"inputTokenCount\"\n ] +\n parsedResponse[\"amazon-bedrock-invocationMetrics\"][\n \"outputTokenCount\"\n ],\n );\n }\n\n let responseAttributes = this._setResponseAttributes(\n modelVendor,\n parsedResponse,\n true,\n );\n\n // ! NOTE: This make sure the content always have all streamed chunks\n if (this._shouldSendPrompts()) {\n // Update local value with attribute value that was set by _setResponseAttributes\n streamedContent +=\n responseAttributes[\n `${SpanAttributes.LLM_COMPLETIONS}.0.content`\n ];\n // re-assign the new value to responseAttributes\n responseAttributes = {\n ...responseAttributes,\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n streamedContent,\n };\n }\n\n span.setAttributes(responseAttributes);\n }\n } else if (result.body instanceof Object.getPrototypeOf(Uint8Array)) {\n // Convert it to a JSON String\n const jsonString = new TextDecoder().decode(\n result.body as Uint8Array,\n );\n // Parse the JSON string\n const parsedResponse = JSON.parse(jsonString);\n\n const responseAttributes = this._setResponseAttributes(\n modelVendor,\n parsedResponse,\n );\n\n span.setAttributes(responseAttributes);\n }\n }\n }\n } catch (e) {\n this._diag.debug(e);\n this._config.exceptionLogger?.(e);\n }\n\n span.setStatus({ code: SpanStatusCode.OK });\n span.end();\n }\n\n private _setRequestAttributes(\n vendor: string,\n requestBody: Record<string, any>,\n ) {\n switch (vendor) {\n case \"ai21\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"topP\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"maxTokens\"],\n [SpanAttributes.LLM_PRESENCE_PENALTY]:\n requestBody[\"presencePenalty\"][\"scale\"],\n [SpanAttributes.LLM_FREQUENCY_PENALTY]:\n requestBody[\"frequencyPenalty\"][\"scale\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n case \"amazon\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]:\n requestBody[\"textGenerationConfig\"][\"topP\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]:\n requestBody[\"textGenerationConfig\"][\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]:\n requestBody[\"textGenerationConfig\"][\"maxTokenCount\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"inputText\"],\n }\n : {}),\n };\n }\n case \"anthropic\": {\n const baseAttributes = {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"top_p\"],\n [SpanAttributes.LLM_TOP_K]: requestBody[\"top_k\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]:\n requestBody[\"max_tokens_to_sample\"] || requestBody[\"max_tokens\"],\n };\n\n if (!this._shouldSendPrompts()) {\n return baseAttributes;\n }\n\n // Handle new messages API format (used by langchain)\n if (requestBody[\"messages\"]) {\n const promptAttributes: Record<string, any> = {};\n requestBody[\"messages\"].forEach((message: any, index: number) => {\n promptAttributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =\n message.role;\n promptAttributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =\n typeof message.content === \"string\"\n ? message.content\n : JSON.stringify(message.content);\n });\n return { ...baseAttributes, ...promptAttributes };\n }\n\n // Handle legacy prompt format\n if (requestBody[\"prompt\"]) {\n return {\n ...baseAttributes,\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody[\"prompt\"]\n // The format is removing when we are setting span attribute\n .replace(\"\\n\\nHuman:\", \"\")\n .replace(\"\\n\\nAssistant:\", \"\"),\n };\n }\n\n return baseAttributes;\n }\n case \"cohere\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"p\"],\n [SpanAttributes.LLM_TOP_K]: requestBody[\"k\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"max_tokens\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"message\"] || requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n case \"meta\": {\n return {\n [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody[\"top_p\"],\n [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody[\"temperature\"],\n [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody[\"max_gen_len\"],\n\n // Prompt & Role\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_PROMPTS}.0.role`]: \"user\",\n [`${SpanAttributes.LLM_PROMPTS}.0.content`]:\n requestBody[\"prompt\"],\n }\n : {}),\n };\n }\n default:\n return {};\n }\n }\n\n private _setResponseAttributes(\n vendor: string,\n response: Record<string, any>,\n isStream = false,\n ) {\n switch (vendor) {\n case \"ai21\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"completions\"][0][\"finishReason\"][\"reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"completions\"][0][\"data\"][\"text\"],\n }\n : {}),\n };\n }\n case \"amazon\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: isStream\n ? response[\"completionReason\"]\n : response[\"results\"][0][\"completionReason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]:\n response[\"inputTextTokenCount\"],\n [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: isStream\n ? response[\"totalOutputTextTokenCount\"]\n : response[\"results\"][0][\"tokenCount\"],\n [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: isStream\n ? response[\"inputTextTokenCount\"] +\n response[\"totalOutputTextTokenCount\"]\n : response[\"inputTextTokenCount\"] +\n response[\"results\"][0][\"tokenCount\"],\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: isStream\n ? response[\"outputText\"]\n : response[\"results\"][0][\"outputText\"],\n }\n : {}),\n };\n }\n case \"anthropic\": {\n const baseAttributes = {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"stop_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n };\n\n if (!this._shouldSendPrompts()) {\n return baseAttributes;\n }\n\n // Handle new messages API format response\n if (response[\"content\"]) {\n const content = Array.isArray(response[\"content\"])\n ? response[\"content\"].map((c: any) => c.text || c).join(\"\")\n : response[\"content\"];\n return {\n ...baseAttributes,\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: content,\n };\n }\n\n // Handle legacy completion format\n if (response[\"completion\"]) {\n return {\n ...baseAttributes,\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"completion\"],\n };\n }\n\n return baseAttributes;\n }\n case \"cohere\": {\n const baseAttributes = {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"generations\"]?.[0]?.[\"finish_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"generations\"]?.[0]?.[\"text\"],\n }\n : {}),\n };\n\n // Add token usage if available\n if (response[\"meta\"] && response[\"meta\"][\"billed_units\"]) {\n const billedUnits = response[\"meta\"][\"billed_units\"];\n return {\n ...baseAttributes,\n [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]:\n billedUnits[\"input_tokens\"],\n [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]:\n billedUnits[\"output_tokens\"],\n [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]:\n (billedUnits[\"input_tokens\"] || 0) +\n (billedUnits[\"output_tokens\"] || 0),\n };\n }\n\n return baseAttributes;\n }\n case \"meta\": {\n return {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]:\n response[\"stop_reason\"],\n [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: \"assistant\",\n [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]:\n response[\"prompt_token_count\"],\n [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]:\n response[\"generation_token_count\"],\n [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]:\n response[\"prompt_token_count\"] + response[\"generation_token_count\"],\n ...(this._shouldSendPrompts()\n ? {\n [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:\n response[\"generation\"],\n }\n : {}),\n };\n }\n default:\n return {};\n }\n }\n\n private _shouldSendPrompts() {\n const contextShouldSendPrompts = context\n .active()\n .getValue(CONTEXT_KEY_ALLOW_TRACE_CONTENT);\n\n if (contextShouldSendPrompts !== undefined) {\n return contextShouldSendPrompts;\n }\n\n return this._config.traceContent !== undefined\n ? this._config.traceContent\n : true;\n }\n\n private _extractVendorAndModel(modelId: string): {\n modelVendor: string;\n model: string;\n } {\n if (!modelId) {\n return { modelVendor: \"\", model: \"\" };\n }\n\n const parts = modelId.split(\".\");\n return {\n modelVendor: parts[0] || \"\",\n model: parts[1] || \"\",\n };\n }\n}\n"],"names":["InstrumentationBase","InstrumentationNodeModuleDefinition","trace","context","safeExecuteInTheMiddle","__awaiter","SpanStatusCode","SpanAttributes","LLMRequestTypeValues","SpanKind","__asyncValues","CONTEXT_KEY_ALLOW_TRACE_CONTENT"],"mappings":";;;;;;;;;AAsCM,MAAO,sBAAuB,SAAQA,mCAAmB,CAAA;AAG7D,IAAA,WAAA,CAAY,SAAuC,EAAE,EAAA;AACnD,QAAA,KAAK,CAAC,oCAAoC,EAAE,OAAO,EAAE,MAAM,CAAC;IAC9D;IAEgB,SAAS,CAAC,SAAuC,EAAE,EAAA;AACjE,QAAA,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC;IACzB;IAEU,IAAI,GAAA;AACZ,QAAA,MAAM,MAAM,GAAG,IAAIC,mDAAmC,CACpD,iCAAiC,EACjC,CAAC,WAAW,CAAC,EACb,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EACpB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CACvB;AAED,QAAA,OAAO,MAAM;IACf;AAEO,IAAA,kBAAkB,CAAC,MAAsB,EAAA;AAC9C,QAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA,iDAAA,CAAmD,CAAC;AAErE,QAAA,IAAI,CAAC,KAAK,CACR,MAAM,CAAC,oBAAoB,CAAC,SAAS,EACrC,MAAM,EACN,IAAI,CAAC,aAAa,EAAE,CACrB;IACH;IAEQ,IAAI,CAAC,MAAsB,EAAE,aAAsB,EAAA;QACzD,IAAI,CAAC,KAAK,CAAC,KAAK,CACd,CAAA,yCAAA,EAA4C,aAAa,CAAA,CAAE,CAC5D;AAED,QAAA,IAAI,CAAC,KAAK,CACR,MAAM,CAAC,oBAAoB,CAAC,SAAS,EACrC,MAAM,EACN,IAAI,CAAC,aAAa,EAAE,CACrB;AAED,QAAA,OAAO,MAAM;IACf;IAEQ,MAAM,CAAC,MAAsB,EAAE,aAAsB,EAAA;QAC3D,IAAI,CAAC,KAAK,CAAC,KAAK,CACd,CAAA,2CAAA,EAA8C,aAAa,CAAA,CAAE,CAC9D;QAED,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,oBAAoB,CAAC,SAAS,EAAE,MAAM,CAAC;IAC7D;IAEQ,aAAa,GAAA;;QAEnB,MAAM,MAAM,GAAG,IAAI;;QAEnB,OAAO,CAAC,QAAkB,KAAI;AAC5B,YAAA,OAAO,SAAS,MAAM,CAAY,GAAG,IAAS,EAAA;AAC5C,gBAAA,MAAM,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC;AAC7B,oBAAA,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC;AAChB,iBAAA,CAAC;AACF,gBAAA,MAAM,WAAW,GAAGC,SAAK,CAAC,OAAO,CAACC,WAAO,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC;AACzD,gBAAA,MAAM,WAAW,GAAGC,sCAAsB,CACxC,MAAK;AACH,oBAAA,OAAOD,WAAO,CAAC,IAAI,CAAC,WAAW,EAAE,MAAK;wBACpC,OAAO,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC;AACnC,oBAAA,CAAC,CAAC;AACJ,gBAAA,CAAC,EACD,CAAC,CAAC,KAAI;oBACJ,IAAI,CAAC,EAAE;wBACL,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA,gCAAA,CAAkC,EAAE,CAAC,CAAC;oBAC3D;AACF,gBAAA,CAAC,CACF;gBACD,MAAM,cAAc,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,WAAW,CAAC;gBAC7D,OAAOA,WAAO,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC;AAClD,YAAA,CAAC;AACH,QAAA,CAAC;IACH;IACQ,YAAY,CAAI,IAAU,EAAE,OAAmB,EAAA;AACrD,QAAA,OAAO;AACJ,aAAA,IAAI,CAAC,CAAO,MAAM,KAAIE,eAAA,CAAA,IAAA,EAAA,MAAA,EAAA,MAAA,EAAA,aAAA;YACrB,MAAM,IAAI,CAAC,QAAQ,CAAC;gBAClB,IAAI;AACJ,gBAAA,MAAM,EAAE,MAE8C;AACvD,aAAA,CAAC;AAEF,YAAA,OAAO,IAAI,OAAO,CAAI,CAAC,OAAO,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;AACrD,QAAA,CAAC,CAAA;AACA,aAAA,KAAK,CAAC,CAAC,KAAY,KAAI;YACtB,OAAO,IAAI,OAAO,CAAI,CAAC,CAAC,EAAE,MAAM,KAAI;gBAClC,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEC,kBAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,KAAK,CAAC,OAAO;AACvB,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC;gBAC3B,IAAI,CAAC,GAAG,EAAE;gBAEV,MAAM,CAAC,KAAK,CAAC;AACf,YAAA,CAAC,CAAC;AACJ,QAAA,CAAC,CAAC;IACN;IAEQ,UAAU,CAAC,EACjB,MAAM,GAGP,EAAA;;QACC,IAAI,UAAU,GAAe,EAAE;AAE/B,QAAA,IAAI;AACF,YAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAwC;AAC7D,YAAA,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,sBAAsB,CACxD,KAAK,CAAC,OAAO,IAAI,EAAE,CACpB;AAED,YAAA,UAAU,GAAG;AACX,gBAAA,CAACC,oCAAc,CAAC,UAAU,GAAG,KAAK;AAClC,gBAAA,CAACA,oCAAc,CAAC,iBAAiB,GAAG,KAAK;AACzC,gBAAA,CAACA,oCAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,OAAO;AAClD,gBAAA,CAACA,oCAAc,CAAC,gBAAgB,GAAGC,0CAAoB,CAAC,UAAU;aACnE;AAED,YAAA,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAClC,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;AAE1C,gBAAA,UAAU,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACL,UAAU,CAAA,EACV,IAAI,CAAC,qBAAqB,CAAC,WAAW,EAAE,WAAW,CAAC,CACxD;YACH;QACF;QAAE,OAAO,CAAC,EAAE;AACV,YAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;YACnB,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,IAAI,CAAC,OAAO,EAAC,eAAe,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAA,IAAA,CAAA,EAAA,EAAG,CAAC,CAAC;QACnC;AAEA,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,oBAAoB,EAAE;YACjD,IAAI,EAAEC,YAAQ,CAAC,MAAM;YACrB,UAAU;AACX,SAAA,CAAC;IACJ;IAEc,QAAQ,CAAA,EAAA,EAAA;mEAAC,EACrB,IAAI,EACJ,MAAM,GAMP,EAAA;;;AACC,YAAA,IAAI;AACF,gBAAA,IAAI,MAAM,IAAI,MAAM,EAAE;AACpB,oBAAA,MAAM,UAAU,GACd,YAAY,IAAI;AACd,0BAAG,IAAI,CAAC,YAAY;0BAClB,EAAE;AAER,oBAAA,IAAIF,oCAAc,CAAC,UAAU,IAAI,UAAU,EAAE;wBAC3C,MAAM,OAAO,GAAG,UAAU,CACxBA,oCAAc,CAAC,kBAAkB,CACxB;AACX,wBAAA,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC;wBAEnE,IAAI,CAAC,YAAY,CAACA,oCAAc,CAAC,kBAAkB,EAAE,KAAK,CAAC;AAE3D,wBAAA,IAAI,EAAE,MAAM,CAAC,IAAI,YAAY,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,EAAE;AAC/D,4BAAA,MAAM,MAAM,GAAG,MAAM,CAAC,IAA6C;4BAEnE,IAAI,eAAe,GAAG,EAAE;;AACxB,gCAAA,KAA0B,eAAA,QAAA,GAAAG,mBAAA,CAAA,MAAM,CAAA,EAAA,UAAA,4EAAE;oCAAR,EAAA,GAAA,UAAA,CAAA,KAAA;oCAAA,EAAA,GAAA,KAAA;oCAAf,MAAM,KAAK,KAAA;;AAEpB,oCAAA,MAAM,UAAU,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,CAAA,EAAA,GAAA,KAAK,CAAC,KAAK,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,KAAK,CAAC;;oCAE/D,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC;AAE7C,oCAAA,IAAI,kCAAkC,IAAI,cAAc,EAAE;AACxD,wCAAA,IAAI,CAAC,YAAY,CACfH,oCAAc,CAAC,uBAAuB,EACtC,cAAc,CAAC,kCAAkC,CAAC,CAChD,iBAAiB,CAClB,CACF;AACD,wCAAA,IAAI,CAAC,YAAY,CACfA,oCAAc,CAAC,2BAA2B,EAC1C,cAAc,CAAC,kCAAkC,CAAC,CAChD,kBAAkB,CACnB,CACF;AAED,wCAAA,IAAI,CAAC,YAAY,CACfA,oCAAc,CAAC,sBAAsB,EACrC,cAAc,CAAC,kCAAkC,CAAC,CAChD,iBAAiB,CAClB;AACC,4CAAA,cAAc,CAAC,kCAAkC,CAAC,CAChD,kBAAkB,CACnB,CACJ;oCACH;AAEA,oCAAA,IAAI,kBAAkB,GAAG,IAAI,CAAC,sBAAsB,CAClD,WAAW,EACX,cAAc,EACd,IAAI,CACL;;AAGD,oCAAA,IAAI,IAAI,CAAC,kBAAkB,EAAE,EAAE;;wCAE7B,eAAe;AACb,4CAAA,kBAAkB,CAChB,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,CAC9C;;AAEH,wCAAA,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACb,kBAAkB,CAAA,EAAA,EACrB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,eAAe,GAClB;oCACH;AAEA,oCAAA,IAAI,CAAC,aAAa,CAAC,kBAAkB,CAAC;gCACxC;;;;;;;;;wBACF;6BAAO,IAAI,MAAM,CAAC,IAAI,YAAY,MAAM,CAAC,cAAc,CAAC,UAAU,CAAC,EAAE;;AAEnE,4BAAA,MAAM,UAAU,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CACzC,MAAM,CAAC,IAAkB,CAC1B;;4BAED,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC;4BAE7C,MAAM,kBAAkB,GAAG,IAAI,CAAC,sBAAsB,CACpD,WAAW,EACX,cAAc,CACf;AAED,4BAAA,IAAI,CAAC,aAAa,CAAC,kBAAkB,CAAC;wBACxC;oBACF;gBACF;YACF;YAAE,OAAO,CAAC,EAAE;AACV,gBAAA,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;gBACnB,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,IAAI,CAAC,OAAO,EAAC,eAAe,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAA,IAAA,CAAA,EAAA,EAAG,CAAC,CAAC;YACnC;YAEA,IAAI,CAAC,SAAS,CAAC,EAAE,IAAI,EAAED,kBAAc,CAAC,EAAE,EAAE,CAAC;YAC3C,IAAI,CAAC,GAAG,EAAE;QACZ,CAAC,CAAA;AAAA,IAAA;IAEO,qBAAqB,CAC3B,MAAc,EACd,WAAgC,EAAA;QAEhC,QAAQ,MAAM;YACZ,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACC,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,MAAM,CAAC,EACvD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,WAAW,CAAC,EACjE,CAACA,oCAAc,CAAC,oBAAoB,GAClC,WAAW,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,EACzC,CAACA,oCAAc,CAAC,qBAAqB,GACnC,WAAW,CAAC,kBAAkB,CAAC,CAAC,OAAO,CAAC,EAAA,GAGtC,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,QAAQ,CAAC;AACxB;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAC/B,WAAW,CAAC,sBAAsB,CAAC,CAAC,MAAM,CAAC,EAC7C,CAACA,oCAAc,CAAC,uBAAuB,GACrC,WAAW,CAAC,sBAAsB,CAAC,CAAC,aAAa,CAAC,EACpD,CAACA,oCAAc,CAAC,sBAAsB,GACpC,WAAW,CAAC,sBAAsB,CAAC,CAAC,eAAe,CAAC,KAGlD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,WAAW,CAAC;AAC3B;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,WAAW,EAAE;AAChB,gBAAA,MAAM,cAAc,GAAG;oBACrB,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,OAAO,CAAC;oBACxD,CAACA,oCAAc,CAAC,SAAS,GAAG,WAAW,CAAC,OAAO,CAAC;oBAChD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC;AACpE,oBAAA,CAACA,oCAAc,CAAC,sBAAsB,GACpC,WAAW,CAAC,sBAAsB,CAAC,IAAI,WAAW,CAAC,YAAY,CAAC;iBACnE;AAED,gBAAA,IAAI,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE;AAC9B,oBAAA,OAAO,cAAc;gBACvB;;AAGA,gBAAA,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE;oBAC3B,MAAM,gBAAgB,GAAwB,EAAE;oBAChD,WAAW,CAAC,UAAU,CAAC,CAAC,OAAO,CAAC,CAAC,OAAY,EAAE,KAAa,KAAI;wBAC9D,gBAAgB,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,CAAA,EAAI,KAAK,OAAO,CAAC;4BAC7D,OAAO,CAAC,IAAI;wBACd,gBAAgB,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,CAAA,EAAI,KAAK,UAAU,CAAC;AAChE,4BAAA,OAAO,OAAO,CAAC,OAAO,KAAK;kCACvB,OAAO,CAAC;kCACR,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC;AACvC,oBAAA,CAAC,CAAC;oBACF,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,cAAc,CAAA,EAAK,gBAAgB,CAAA;gBACjD;;AAGA,gBAAA,IAAI,WAAW,CAAC,QAAQ,CAAC,EAAE;oBACzB,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,cAAc,KACjB,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM,EAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GAAG,WAAW,CAAC,QAAQ;;AAE9D,6BAAA,OAAO,CAAC,YAAY,EAAE,EAAE;AACxB,6BAAA,OAAO,CAAC,gBAAgB,EAAE,EAAE,CAAC,EAAA,CAAA;gBAEpC;AAEA,gBAAA,OAAO,cAAc;YACvB;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,GAAG,CAAC,EACpD,CAACA,oCAAc,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,CAAC,EAC5C,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,YAAY,CAAC,KAG9D,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;AAChD,wBAAA,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,YAAY,GACxC,WAAW,CAAC,SAAS,CAAC,IAAI,WAAW,CAAC,QAAQ,CAAC;AAClD;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAACA,oCAAc,CAAC,iBAAiB,GAAG,WAAW,CAAC,OAAO,CAAC,EACxD,CAACA,oCAAc,CAAC,uBAAuB,GAAG,WAAW,CAAC,aAAa,CAAC,EACpE,CAACA,oCAAc,CAAC,sBAAsB,GAAG,WAAW,CAAC,aAAa,CAAC,EAAA,GAG/D,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,WAAW,CAAA,OAAA,CAAS,GAAG,MAAM;wBAChD,CAAC,CAAA,EAAGA,oCAAc,CAAC,WAAW,CAAA,UAAA,CAAY,GACxC,WAAW,CAAC,QAAQ,CAAC;AACxB;sBACD,EAAE,EAAC;YAEX;AACA,YAAA;AACE,gBAAA,OAAO,EAAE;;IAEf;AAEQ,IAAA,sBAAsB,CAC5B,MAAc,EACd,QAA6B,EAC7B,QAAQ,GAAG,KAAK,EAAA;;QAEhB,QAAQ,MAAM;YACZ,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,kBAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,QAAQ,CAAC,EACtD,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EAAA,GACrD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC;AAC7C;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,QAAQ,EAAE;gBACb,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAAG;AACrD,0BAAE,QAAQ,CAAC,kBAAkB;AAC7B,0BAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,kBAAkB,CAAC,EAC9C,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EACzD,CAACA,oCAAc,CAAC,uBAAuB,GACrC,QAAQ,CAAC,qBAAqB,CAAC,EACjC,CAACA,oCAAc,CAAC,2BAA2B,GAAG;AAC5C,0BAAE,QAAQ,CAAC,2BAA2B;AACtC,0BAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EACxC,CAACA,oCAAc,CAAC,sBAAsB,GAAG;AACvC,0BAAE,QAAQ,CAAC,qBAAqB,CAAC;4BAC/B,QAAQ,CAAC,2BAA2B;AACtC,0BAAE,QAAQ,CAAC,qBAAqB,CAAC;AAC/B,4BAAA,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,EAAA,GACpC,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAAG;AAC/C,8BAAE,QAAQ,CAAC,YAAY;8BACrB,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AACzC;sBACD,EAAE,EAAC;YAEX;YACA,KAAK,WAAW,EAAE;AAChB,gBAAA,MAAM,cAAc,GAAG;oBACrB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC;AACzB,oBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW;iBAC1D;AAED,gBAAA,IAAI,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE;AAC9B,oBAAA,OAAO,cAAc;gBACvB;;AAGA,gBAAA,IAAI,QAAQ,CAAC,SAAS,CAAC,EAAE;oBACvB,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC;0BAC7C,QAAQ,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAM,KAAK,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE;AAC1D,0BAAE,QAAQ,CAAC,SAAS,CAAC;oBACvB,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,cAAc,CAAA,EAAA,EACjB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAAG,OAAO,EAAA,CAAA;gBAE5D;;AAGA,gBAAA,IAAI,QAAQ,CAAC,YAAY,CAAC,EAAE;AAC1B,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,cAAc,CAAA,EAAA,EACjB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,YAAY,CAAC,EAAA,CAAA;gBAE5B;AAEA,gBAAA,OAAO,cAAc;YACvB;YACA,KAAK,QAAQ,EAAE;AACb,gBAAA,MAAM,cAAc,GAAA,MAAA,CAAA,MAAA,CAAA,EAClB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,MAAA,CAAA,EAAA,GAAA,QAAQ,CAAC,aAAa,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAG,CAAC,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAG,eAAe,CAAC,EACjD,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EAAA,GACrD,IAAI,CAAC,kBAAkB;AACzB,sBAAE;AACE,wBAAA,CAAC,GAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,QAAQ,CAAC,aAAa,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAG,CAAC,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,MAAA,GAAA,MAAA,GAAA,EAAA,CAAG,MAAM,CAAC;AACzC;AACH,sBAAE,EAAE,EACP;;AAGD,gBAAA,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC,EAAE;oBACxD,MAAM,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,cAAc,CAAC;AACpD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,cAAc,CAAA,EAAA,EACjB,CAACA,oCAAc,CAAC,uBAAuB,GACrC,WAAW,CAAC,cAAc,CAAC,EAC7B,CAACA,oCAAc,CAAC,2BAA2B,GACzC,WAAW,CAAC,eAAe,CAAC,EAC9B,CAACA,oCAAc,CAAC,sBAAsB,GACpC,CAAC,WAAW,CAAC,cAAc,CAAC,IAAI,CAAC;AACjC,6BAAC,WAAW,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,EAAA,CAAA;gBAEzC;AAEA,gBAAA,OAAO,cAAc;YACvB;YACA,KAAK,MAAM,EAAE;AACX,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,EACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,gBAAA,CAAkB,GAClD,QAAQ,CAAC,aAAa,CAAC,EACzB,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,OAAA,CAAS,GAAG,WAAW,EACzD,CAACA,oCAAc,CAAC,uBAAuB,GACrC,QAAQ,CAAC,oBAAoB,CAAC,EAChC,CAACA,oCAAc,CAAC,2BAA2B,GACzC,QAAQ,CAAC,wBAAwB,CAAC,EACpC,CAACA,oCAAc,CAAC,sBAAsB,GACpC,QAAQ,CAAC,oBAAoB,CAAC,GAAG,QAAQ,CAAC,wBAAwB,CAAC,EAAA,GACjE,IAAI,CAAC,kBAAkB;AACzB,sBAAE;wBACE,CAAC,CAAA,EAAGA,oCAAc,CAAC,eAAe,CAAA,UAAA,CAAY,GAC5C,QAAQ,CAAC,YAAY,CAAC;AACzB;sBACD,EAAE,EAAC;YAEX;AACA,YAAA;AACE,gBAAA,OAAO,EAAE;;IAEf;IAEQ,kBAAkB,GAAA;QACxB,MAAM,wBAAwB,GAAGJ;AAC9B,aAAA,MAAM;aACN,QAAQ,CAACQ,qDAA+B,CAAC;AAE5C,QAAA,IAAI,wBAAwB,KAAK,SAAS,EAAE;AAC1C,YAAA,OAAO,wBAAwB;QACjC;AAEA,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,YAAY,KAAK;AACnC,cAAE,IAAI,CAAC,OAAO,CAAC;cACb,IAAI;IACV;AAEQ,IAAA,sBAAsB,CAAC,OAAe,EAAA;QAI5C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,EAAE,WAAW,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;QACvC;QAEA,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC;QAChC,OAAO;AACL,YAAA,WAAW,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;AAC3B,YAAA,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;SACtB;IACH;AACD;;;;"}
|
package/dist/index.mjs
CHANGED
|
@@ -3,7 +3,7 @@ import { trace, context, SpanStatusCode, SpanKind } from '@opentelemetry/api';
|
|
|
3
3
|
import { InstrumentationBase, InstrumentationNodeModuleDefinition, safeExecuteInTheMiddle } from '@opentelemetry/instrumentation';
|
|
4
4
|
import { LLMRequestTypeValues, SpanAttributes, CONTEXT_KEY_ALLOW_TRACE_CONTENT } from '@traceloop/ai-semantic-conventions';
|
|
5
5
|
|
|
6
|
-
var version = "0.
|
|
6
|
+
var version = "0.17.0";
|
|
7
7
|
|
|
8
8
|
class BedrockInstrumentation extends InstrumentationBase {
|
|
9
9
|
constructor(config = {}) {
|
|
@@ -189,21 +189,42 @@ class BedrockInstrumentation extends InstrumentationBase {
|
|
|
189
189
|
: {}));
|
|
190
190
|
}
|
|
191
191
|
case "anthropic": {
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
192
|
+
const baseAttributes = {
|
|
193
|
+
[SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["top_p"],
|
|
194
|
+
[SpanAttributes.LLM_TOP_K]: requestBody["top_k"],
|
|
195
|
+
[SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"],
|
|
196
|
+
[SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens_to_sample"] || requestBody["max_tokens"],
|
|
197
|
+
};
|
|
198
|
+
if (!this._shouldSendPrompts()) {
|
|
199
|
+
return baseAttributes;
|
|
200
|
+
}
|
|
201
|
+
// Handle new messages API format (used by langchain)
|
|
202
|
+
if (requestBody["messages"]) {
|
|
203
|
+
const promptAttributes = {};
|
|
204
|
+
requestBody["messages"].forEach((message, index) => {
|
|
205
|
+
promptAttributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
|
|
206
|
+
message.role;
|
|
207
|
+
promptAttributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
|
|
208
|
+
typeof message.content === "string"
|
|
209
|
+
? message.content
|
|
210
|
+
: JSON.stringify(message.content);
|
|
211
|
+
});
|
|
212
|
+
return Object.assign(Object.assign({}, baseAttributes), promptAttributes);
|
|
213
|
+
}
|
|
214
|
+
// Handle legacy prompt format
|
|
215
|
+
if (requestBody["prompt"]) {
|
|
216
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${SpanAttributes.LLM_PROMPTS}.0.role`]: "user", [`${SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["prompt"]
|
|
196
217
|
// The format is removing when we are setting span attribute
|
|
197
218
|
.replace("\n\nHuman:", "")
|
|
198
|
-
.replace("\n\nAssistant:", "")
|
|
199
|
-
|
|
200
|
-
|
|
219
|
+
.replace("\n\nAssistant:", "") });
|
|
220
|
+
}
|
|
221
|
+
return baseAttributes;
|
|
201
222
|
}
|
|
202
223
|
case "cohere": {
|
|
203
224
|
return Object.assign({ [SpanAttributes.LLM_REQUEST_TOP_P]: requestBody["p"], [SpanAttributes.LLM_TOP_K]: requestBody["k"], [SpanAttributes.LLM_REQUEST_TEMPERATURE]: requestBody["temperature"], [SpanAttributes.LLM_REQUEST_MAX_TOKENS]: requestBody["max_tokens"] }, (this._shouldSendPrompts()
|
|
204
225
|
? {
|
|
205
226
|
[`${SpanAttributes.LLM_PROMPTS}.0.role`]: "user",
|
|
206
|
-
[`${SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["prompt"],
|
|
227
|
+
[`${SpanAttributes.LLM_PROMPTS}.0.content`]: requestBody["message"] || requestBody["prompt"],
|
|
207
228
|
}
|
|
208
229
|
: {}));
|
|
209
230
|
}
|
|
@@ -220,6 +241,7 @@ class BedrockInstrumentation extends InstrumentationBase {
|
|
|
220
241
|
}
|
|
221
242
|
}
|
|
222
243
|
_setResponseAttributes(vendor, response, isStream = false) {
|
|
244
|
+
var _a, _b, _c, _d;
|
|
223
245
|
switch (vendor) {
|
|
224
246
|
case "ai21": {
|
|
225
247
|
return Object.assign({ [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["completions"][0]["finishReason"]["reason"], [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant" }, (this._shouldSendPrompts()
|
|
@@ -246,18 +268,39 @@ class BedrockInstrumentation extends InstrumentationBase {
|
|
|
246
268
|
: {}));
|
|
247
269
|
}
|
|
248
270
|
case "anthropic": {
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
271
|
+
const baseAttributes = {
|
|
272
|
+
[`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["stop_reason"],
|
|
273
|
+
[`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant",
|
|
274
|
+
};
|
|
275
|
+
if (!this._shouldSendPrompts()) {
|
|
276
|
+
return baseAttributes;
|
|
277
|
+
}
|
|
278
|
+
// Handle new messages API format response
|
|
279
|
+
if (response["content"]) {
|
|
280
|
+
const content = Array.isArray(response["content"])
|
|
281
|
+
? response["content"].map((c) => c.text || c).join("")
|
|
282
|
+
: response["content"];
|
|
283
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: content });
|
|
284
|
+
}
|
|
285
|
+
// Handle legacy completion format
|
|
286
|
+
if (response["completion"]) {
|
|
287
|
+
return Object.assign(Object.assign({}, baseAttributes), { [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: response["completion"] });
|
|
288
|
+
}
|
|
289
|
+
return baseAttributes;
|
|
254
290
|
}
|
|
255
291
|
case "cohere": {
|
|
256
|
-
|
|
292
|
+
const baseAttributes = Object.assign({ [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: (_b = (_a = response["generations"]) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b["finish_reason"], [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant" }, (this._shouldSendPrompts()
|
|
257
293
|
? {
|
|
258
|
-
[`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: response["generations"][0]["text"],
|
|
294
|
+
[`${SpanAttributes.LLM_COMPLETIONS}.0.content`]: (_d = (_c = response["generations"]) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d["text"],
|
|
259
295
|
}
|
|
260
296
|
: {}));
|
|
297
|
+
// Add token usage if available
|
|
298
|
+
if (response["meta"] && response["meta"]["billed_units"]) {
|
|
299
|
+
const billedUnits = response["meta"]["billed_units"];
|
|
300
|
+
return Object.assign(Object.assign({}, baseAttributes), { [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: billedUnits["input_tokens"], [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: billedUnits["output_tokens"], [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: (billedUnits["input_tokens"] || 0) +
|
|
301
|
+
(billedUnits["output_tokens"] || 0) });
|
|
302
|
+
}
|
|
303
|
+
return baseAttributes;
|
|
261
304
|
}
|
|
262
305
|
case "meta": {
|
|
263
306
|
return Object.assign({ [`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`]: response["stop_reason"], [`${SpanAttributes.LLM_COMPLETIONS}.0.role`]: "assistant", [SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: response["prompt_token_count"], [SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: response["generation_token_count"], [SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: response["prompt_token_count"] + response["generation_token_count"] }, (this._shouldSendPrompts()
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@traceloop/instrumentation-bedrock",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.17.0",
|
|
4
4
|
"description": "Amazon Bedrock Instrumentation",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.mjs",
|
|
@@ -42,7 +42,7 @@
|
|
|
42
42
|
"@opentelemetry/core": "^2.0.1",
|
|
43
43
|
"@opentelemetry/instrumentation": "^0.203.0",
|
|
44
44
|
"@opentelemetry/semantic-conventions": "^1.36.0",
|
|
45
|
-
"@traceloop/ai-semantic-conventions": "0.
|
|
45
|
+
"@traceloop/ai-semantic-conventions": "0.17.0",
|
|
46
46
|
"tslib": "^2.8.1"
|
|
47
47
|
},
|
|
48
48
|
"devDependencies": {
|
|
@@ -57,5 +57,5 @@
|
|
|
57
57
|
"ts-mocha": "^11.1.0"
|
|
58
58
|
},
|
|
59
59
|
"homepage": "https://github.com/traceloop/openllmetry-js/tree/main/packages/instrumentation-openai",
|
|
60
|
-
"gitHead": "
|
|
60
|
+
"gitHead": "f09a480dc8af115f5acbce5539c07a5ed3bdb2db"
|
|
61
61
|
}
|