@elizaos/server 1.6.4-alpha.2 → 1.6.4-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +954 -102
- package/package.json +5 -5
package/dist/index.js
CHANGED
|
@@ -30173,7 +30173,7 @@ import express30 from "express";
|
|
|
30173
30173
|
// package.json
|
|
30174
30174
|
var package_default = {
|
|
30175
30175
|
name: "@elizaos/server",
|
|
30176
|
-
version: "1.6.4-alpha.
|
|
30176
|
+
version: "1.6.4-alpha.4",
|
|
30177
30177
|
description: "ElizaOS Server - Core server infrastructure for ElizaOS agents",
|
|
30178
30178
|
publishConfig: {
|
|
30179
30179
|
access: "public",
|
|
@@ -31732,7 +31732,7 @@ var DEBUG_BUILD = typeof __SENTRY_DEBUG__ === "undefined" || __SENTRY_DEBUG__;
|
|
|
31732
31732
|
var GLOBAL_OBJ = globalThis;
|
|
31733
31733
|
|
|
31734
31734
|
// ../../node_modules/@sentry/core/build/esm/utils/version.js
|
|
31735
|
-
var SDK_VERSION = "10.
|
|
31735
|
+
var SDK_VERSION = "10.22.0";
|
|
31736
31736
|
|
|
31737
31737
|
// ../../node_modules/@sentry/core/build/esm/carrier.js
|
|
31738
31738
|
function getMainCarrier() {
|
|
@@ -35163,6 +35163,14 @@ function captureException(exception, hint) {
|
|
|
35163
35163
|
function captureEvent(event, hint) {
|
|
35164
35164
|
return getCurrentScope().captureEvent(event, hint);
|
|
35165
35165
|
}
|
|
35166
|
+
async function flush(timeout) {
|
|
35167
|
+
const client = getClient();
|
|
35168
|
+
if (client) {
|
|
35169
|
+
return client.flush(timeout);
|
|
35170
|
+
}
|
|
35171
|
+
DEBUG_BUILD && debug.warn("Cannot flush events. No client defined.");
|
|
35172
|
+
return Promise.resolve(false);
|
|
35173
|
+
}
|
|
35166
35174
|
function isEnabled2() {
|
|
35167
35175
|
const client = getClient();
|
|
35168
35176
|
return client?.getOptions().enabled !== false && !!client?.getTransport();
|
|
@@ -36383,7 +36391,7 @@ function updateRateLimits(limits, { statusCode, headers }, now = Date.now()) {
|
|
|
36383
36391
|
var DEFAULT_TRANSPORT_BUFFER_SIZE = 64;
|
|
36384
36392
|
function createTransport(options, makeRequest, buffer = makePromiseBuffer(options.bufferSize || DEFAULT_TRANSPORT_BUFFER_SIZE)) {
|
|
36385
36393
|
let rateLimits = {};
|
|
36386
|
-
const
|
|
36394
|
+
const flush2 = (timeout) => buffer.drain(timeout);
|
|
36387
36395
|
function send(envelope) {
|
|
36388
36396
|
const filteredEnvelopeItems = [];
|
|
36389
36397
|
forEachEnvelopeItem(envelope, (item, type) => {
|
|
@@ -36426,7 +36434,7 @@ function createTransport(options, makeRequest, buffer = makePromiseBuffer(option
|
|
|
36426
36434
|
}
|
|
36427
36435
|
return {
|
|
36428
36436
|
send,
|
|
36429
|
-
flush
|
|
36437
|
+
flush: flush2
|
|
36430
36438
|
};
|
|
36431
36439
|
}
|
|
36432
36440
|
// ../../node_modules/@sentry/core/build/esm/utils/url.js
|
|
@@ -36551,7 +36559,7 @@ function debounce(func, wait, options) {
|
|
|
36551
36559
|
maxTimerId !== undefined && clearTimeout(maxTimerId);
|
|
36552
36560
|
timerId = maxTimerId = undefined;
|
|
36553
36561
|
}
|
|
36554
|
-
function
|
|
36562
|
+
function flush2() {
|
|
36555
36563
|
if (timerId !== undefined || maxTimerId !== undefined) {
|
|
36556
36564
|
return invokeFunc();
|
|
36557
36565
|
}
|
|
@@ -36568,7 +36576,7 @@ function debounce(func, wait, options) {
|
|
|
36568
36576
|
return callbackReturnValue;
|
|
36569
36577
|
}
|
|
36570
36578
|
debounced.cancel = cancelTimers;
|
|
36571
|
-
debounced.flush =
|
|
36579
|
+
debounced.flush = flush2;
|
|
36572
36580
|
return debounced;
|
|
36573
36581
|
}
|
|
36574
36582
|
// ../../node_modules/@sentry/core/build/esm/utils/request.js
|
|
@@ -37166,6 +37174,7 @@ var GEN_AI_REQUEST_TOP_K_ATTRIBUTE = "gen_ai.request.top_k";
|
|
|
37166
37174
|
var GEN_AI_RESPONSE_FINISH_REASONS_ATTRIBUTE = "gen_ai.response.finish_reasons";
|
|
37167
37175
|
var GEN_AI_RESPONSE_MODEL_ATTRIBUTE = "gen_ai.response.model";
|
|
37168
37176
|
var GEN_AI_RESPONSE_ID_ATTRIBUTE = "gen_ai.response.id";
|
|
37177
|
+
var GEN_AI_RESPONSE_STOP_REASON_ATTRIBUTE = "gen_ai.response.stop_reason";
|
|
37169
37178
|
var GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE = "gen_ai.usage.input_tokens";
|
|
37170
37179
|
var GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE = "gen_ai.usage.output_tokens";
|
|
37171
37180
|
var GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE = "gen_ai.usage.total_tokens";
|
|
@@ -37175,6 +37184,8 @@ var GEN_AI_RESPONSE_TEXT_ATTRIBUTE = "gen_ai.response.text";
|
|
|
37175
37184
|
var GEN_AI_REQUEST_AVAILABLE_TOOLS_ATTRIBUTE = "gen_ai.request.available_tools";
|
|
37176
37185
|
var GEN_AI_RESPONSE_STREAMING_ATTRIBUTE = "gen_ai.response.streaming";
|
|
37177
37186
|
var GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE = "gen_ai.response.tool_calls";
|
|
37187
|
+
var GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS_ATTRIBUTE = "gen_ai.usage.cache_creation_input_tokens";
|
|
37188
|
+
var GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS_ATTRIBUTE = "gen_ai.usage.cache_read_input_tokens";
|
|
37178
37189
|
var GEN_AI_USAGE_INPUT_TOKENS_CACHE_WRITE_ATTRIBUTE = "gen_ai.usage.input_tokens.cache_write";
|
|
37179
37190
|
var GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE = "gen_ai.usage.input_tokens.cached";
|
|
37180
37191
|
var OPENAI_RESPONSE_ID_ATTRIBUTE = "openai.response.id";
|
|
@@ -37188,6 +37199,183 @@ var OPENAI_OPERATIONS = {
|
|
|
37188
37199
|
};
|
|
37189
37200
|
var ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE = "anthropic.response.timestamp";
|
|
37190
37201
|
|
|
37202
|
+
// ../../node_modules/@sentry/core/build/esm/utils/ai/messageTruncation.js
|
|
37203
|
+
var DEFAULT_GEN_AI_MESSAGES_BYTE_LIMIT = 20000;
|
|
37204
|
+
var utf8Bytes = (text) => {
|
|
37205
|
+
return new TextEncoder().encode(text).length;
|
|
37206
|
+
};
|
|
37207
|
+
var jsonBytes = (value) => {
|
|
37208
|
+
return utf8Bytes(JSON.stringify(value));
|
|
37209
|
+
};
|
|
37210
|
+
function truncateTextByBytes(text, maxBytes) {
|
|
37211
|
+
if (utf8Bytes(text) <= maxBytes) {
|
|
37212
|
+
return text;
|
|
37213
|
+
}
|
|
37214
|
+
let low = 0;
|
|
37215
|
+
let high = text.length;
|
|
37216
|
+
let bestFit = "";
|
|
37217
|
+
while (low <= high) {
|
|
37218
|
+
const mid = Math.floor((low + high) / 2);
|
|
37219
|
+
const candidate = text.slice(0, mid);
|
|
37220
|
+
const byteSize = utf8Bytes(candidate);
|
|
37221
|
+
if (byteSize <= maxBytes) {
|
|
37222
|
+
bestFit = candidate;
|
|
37223
|
+
low = mid + 1;
|
|
37224
|
+
} else {
|
|
37225
|
+
high = mid - 1;
|
|
37226
|
+
}
|
|
37227
|
+
}
|
|
37228
|
+
return bestFit;
|
|
37229
|
+
}
|
|
37230
|
+
function getPartText(part) {
|
|
37231
|
+
if (typeof part === "string") {
|
|
37232
|
+
return part;
|
|
37233
|
+
}
|
|
37234
|
+
return part.text;
|
|
37235
|
+
}
|
|
37236
|
+
function withPartText(part, text) {
|
|
37237
|
+
if (typeof part === "string") {
|
|
37238
|
+
return text;
|
|
37239
|
+
}
|
|
37240
|
+
return { ...part, text };
|
|
37241
|
+
}
|
|
37242
|
+
function isContentMessage(message) {
|
|
37243
|
+
return message !== null && typeof message === "object" && "content" in message && typeof message.content === "string";
|
|
37244
|
+
}
|
|
37245
|
+
function isPartsMessage(message) {
|
|
37246
|
+
return message !== null && typeof message === "object" && "parts" in message && Array.isArray(message.parts) && message.parts.length > 0;
|
|
37247
|
+
}
|
|
37248
|
+
function truncateContentMessage(message, maxBytes) {
|
|
37249
|
+
const emptyMessage = { ...message, content: "" };
|
|
37250
|
+
const overhead = jsonBytes(emptyMessage);
|
|
37251
|
+
const availableForContent = maxBytes - overhead;
|
|
37252
|
+
if (availableForContent <= 0) {
|
|
37253
|
+
return [];
|
|
37254
|
+
}
|
|
37255
|
+
const truncatedContent = truncateTextByBytes(message.content, availableForContent);
|
|
37256
|
+
return [{ ...message, content: truncatedContent }];
|
|
37257
|
+
}
|
|
37258
|
+
function truncatePartsMessage(message, maxBytes) {
|
|
37259
|
+
const { parts } = message;
|
|
37260
|
+
const emptyParts = parts.map((part) => withPartText(part, ""));
|
|
37261
|
+
const overhead = jsonBytes({ ...message, parts: emptyParts });
|
|
37262
|
+
let remainingBytes = maxBytes - overhead;
|
|
37263
|
+
if (remainingBytes <= 0) {
|
|
37264
|
+
return [];
|
|
37265
|
+
}
|
|
37266
|
+
const includedParts = [];
|
|
37267
|
+
for (const part of parts) {
|
|
37268
|
+
const text = getPartText(part);
|
|
37269
|
+
const textSize = utf8Bytes(text);
|
|
37270
|
+
if (textSize <= remainingBytes) {
|
|
37271
|
+
includedParts.push(part);
|
|
37272
|
+
remainingBytes -= textSize;
|
|
37273
|
+
} else if (includedParts.length === 0) {
|
|
37274
|
+
const truncated = truncateTextByBytes(text, remainingBytes);
|
|
37275
|
+
if (truncated) {
|
|
37276
|
+
includedParts.push(withPartText(part, truncated));
|
|
37277
|
+
}
|
|
37278
|
+
break;
|
|
37279
|
+
} else {
|
|
37280
|
+
break;
|
|
37281
|
+
}
|
|
37282
|
+
}
|
|
37283
|
+
return includedParts.length > 0 ? [{ ...message, parts: includedParts }] : [];
|
|
37284
|
+
}
|
|
37285
|
+
function truncateSingleMessage(message, maxBytes) {
|
|
37286
|
+
if (!message || typeof message !== "object") {
|
|
37287
|
+
return [];
|
|
37288
|
+
}
|
|
37289
|
+
if (isContentMessage(message)) {
|
|
37290
|
+
return truncateContentMessage(message, maxBytes);
|
|
37291
|
+
}
|
|
37292
|
+
if (isPartsMessage(message)) {
|
|
37293
|
+
return truncatePartsMessage(message, maxBytes);
|
|
37294
|
+
}
|
|
37295
|
+
return [];
|
|
37296
|
+
}
|
|
37297
|
+
function truncateMessagesByBytes(messages, maxBytes) {
|
|
37298
|
+
if (!Array.isArray(messages) || messages.length === 0) {
|
|
37299
|
+
return messages;
|
|
37300
|
+
}
|
|
37301
|
+
const totalBytes = jsonBytes(messages);
|
|
37302
|
+
if (totalBytes <= maxBytes) {
|
|
37303
|
+
return messages;
|
|
37304
|
+
}
|
|
37305
|
+
const messageSizes = messages.map(jsonBytes);
|
|
37306
|
+
let bytesUsed = 0;
|
|
37307
|
+
let startIndex = messages.length;
|
|
37308
|
+
for (let i = messages.length - 1;i >= 0; i--) {
|
|
37309
|
+
const messageSize = messageSizes[i];
|
|
37310
|
+
if (messageSize && bytesUsed + messageSize > maxBytes) {
|
|
37311
|
+
break;
|
|
37312
|
+
}
|
|
37313
|
+
if (messageSize) {
|
|
37314
|
+
bytesUsed += messageSize;
|
|
37315
|
+
}
|
|
37316
|
+
startIndex = i;
|
|
37317
|
+
}
|
|
37318
|
+
if (startIndex === messages.length) {
|
|
37319
|
+
const newestMessage = messages[messages.length - 1];
|
|
37320
|
+
return truncateSingleMessage(newestMessage, maxBytes);
|
|
37321
|
+
}
|
|
37322
|
+
return messages.slice(startIndex);
|
|
37323
|
+
}
|
|
37324
|
+
function truncateGenAiMessages(messages) {
|
|
37325
|
+
return truncateMessagesByBytes(messages, DEFAULT_GEN_AI_MESSAGES_BYTE_LIMIT);
|
|
37326
|
+
}
|
|
37327
|
+
|
|
37328
|
+
// ../../node_modules/@sentry/core/build/esm/utils/ai/utils.js
|
|
37329
|
+
function getFinalOperationName(methodPath) {
|
|
37330
|
+
if (methodPath.includes("messages")) {
|
|
37331
|
+
return "messages";
|
|
37332
|
+
}
|
|
37333
|
+
if (methodPath.includes("completions")) {
|
|
37334
|
+
return "completions";
|
|
37335
|
+
}
|
|
37336
|
+
if (methodPath.includes("models")) {
|
|
37337
|
+
return "models";
|
|
37338
|
+
}
|
|
37339
|
+
if (methodPath.includes("chat")) {
|
|
37340
|
+
return "chat";
|
|
37341
|
+
}
|
|
37342
|
+
return methodPath.split(".").pop() || "unknown";
|
|
37343
|
+
}
|
|
37344
|
+
function getSpanOperation(methodPath) {
|
|
37345
|
+
return `gen_ai.${getFinalOperationName(methodPath)}`;
|
|
37346
|
+
}
|
|
37347
|
+
function buildMethodPath(currentPath, prop) {
|
|
37348
|
+
return currentPath ? `${currentPath}.${prop}` : prop;
|
|
37349
|
+
}
|
|
37350
|
+
function setTokenUsageAttributes(span, promptTokens, completionTokens, cachedInputTokens, cachedOutputTokens) {
|
|
37351
|
+
if (promptTokens !== undefined) {
|
|
37352
|
+
span.setAttributes({
|
|
37353
|
+
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: promptTokens
|
|
37354
|
+
});
|
|
37355
|
+
}
|
|
37356
|
+
if (completionTokens !== undefined) {
|
|
37357
|
+
span.setAttributes({
|
|
37358
|
+
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: completionTokens
|
|
37359
|
+
});
|
|
37360
|
+
}
|
|
37361
|
+
if (promptTokens !== undefined || completionTokens !== undefined || cachedInputTokens !== undefined || cachedOutputTokens !== undefined) {
|
|
37362
|
+
const totalTokens = (promptTokens ?? 0) + (completionTokens ?? 0) + (cachedInputTokens ?? 0) + (cachedOutputTokens ?? 0);
|
|
37363
|
+
span.setAttributes({
|
|
37364
|
+
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: totalTokens
|
|
37365
|
+
});
|
|
37366
|
+
}
|
|
37367
|
+
}
|
|
37368
|
+
function getTruncatedJsonString(value) {
|
|
37369
|
+
if (typeof value === "string") {
|
|
37370
|
+
return value;
|
|
37371
|
+
}
|
|
37372
|
+
if (Array.isArray(value)) {
|
|
37373
|
+
const truncatedMessages = truncateGenAiMessages(value);
|
|
37374
|
+
return JSON.stringify(truncatedMessages);
|
|
37375
|
+
}
|
|
37376
|
+
return JSON.stringify(value);
|
|
37377
|
+
}
|
|
37378
|
+
|
|
37191
37379
|
// ../../node_modules/@sentry/core/build/esm/utils/vercel-ai/constants.js
|
|
37192
37380
|
var toolCallSpanMap = new Map;
|
|
37193
37381
|
|
|
@@ -37355,7 +37543,8 @@ function processGenerateSpan(span, name, attributes) {
|
|
|
37355
37543
|
span.setAttribute("gen_ai.function_id", functionId);
|
|
37356
37544
|
}
|
|
37357
37545
|
if (attributes[AI_PROMPT_ATTRIBUTE]) {
|
|
37358
|
-
|
|
37546
|
+
const truncatedPrompt = getTruncatedJsonString(attributes[AI_PROMPT_ATTRIBUTE]);
|
|
37547
|
+
span.setAttribute("gen_ai.prompt", truncatedPrompt);
|
|
37359
37548
|
}
|
|
37360
37549
|
if (attributes[AI_MODEL_ID_ATTRIBUTE] && !attributes[GEN_AI_RESPONSE_MODEL_ATTRIBUTE2]) {
|
|
37361
37550
|
span.setAttribute(GEN_AI_RESPONSE_MODEL_ATTRIBUTE2, attributes[AI_MODEL_ID_ATTRIBUTE]);
|
|
@@ -37488,13 +37677,13 @@ function getOperationName(methodPath) {
|
|
|
37488
37677
|
}
|
|
37489
37678
|
return methodPath.split(".").pop() || "unknown";
|
|
37490
37679
|
}
|
|
37491
|
-
function
|
|
37680
|
+
function getSpanOperation2(methodPath) {
|
|
37492
37681
|
return `gen_ai.${getOperationName(methodPath)}`;
|
|
37493
37682
|
}
|
|
37494
37683
|
function shouldInstrument(methodPath) {
|
|
37495
37684
|
return INSTRUMENTED_METHODS.includes(methodPath);
|
|
37496
37685
|
}
|
|
37497
|
-
function
|
|
37686
|
+
function buildMethodPath2(currentPath, prop) {
|
|
37498
37687
|
return currentPath ? `${currentPath}.${prop}` : prop;
|
|
37499
37688
|
}
|
|
37500
37689
|
function isChatCompletionResponse(response) {
|
|
@@ -37509,7 +37698,7 @@ function isResponsesApiStreamEvent(event) {
|
|
|
37509
37698
|
function isChatCompletionChunk(event) {
|
|
37510
37699
|
return event !== null && typeof event === "object" && "object" in event && event.object === "chat.completion.chunk";
|
|
37511
37700
|
}
|
|
37512
|
-
function
|
|
37701
|
+
function setTokenUsageAttributes2(span, promptTokens, completionTokens, totalTokens) {
|
|
37513
37702
|
if (promptTokens !== undefined) {
|
|
37514
37703
|
span.setAttributes({
|
|
37515
37704
|
[OPENAI_USAGE_PROMPT_TOKENS_ATTRIBUTE]: promptTokens,
|
|
@@ -37661,7 +37850,7 @@ async function* instrumentStream(stream, span, recordOutputs) {
|
|
|
37661
37850
|
}
|
|
37662
37851
|
} finally {
|
|
37663
37852
|
setCommonResponseAttributes(span, state.responseId, state.responseModel, state.responseTimestamp);
|
|
37664
|
-
|
|
37853
|
+
setTokenUsageAttributes2(span, state.promptTokens, state.completionTokens, state.totalTokens);
|
|
37665
37854
|
span.setAttributes({
|
|
37666
37855
|
[GEN_AI_RESPONSE_STREAMING_ATTRIBUTE]: true
|
|
37667
37856
|
});
|
|
@@ -37724,7 +37913,7 @@ function extractRequestAttributes(args, methodPath) {
|
|
|
37724
37913
|
function addChatCompletionAttributes(span, response, recordOutputs) {
|
|
37725
37914
|
setCommonResponseAttributes(span, response.id, response.model, response.created);
|
|
37726
37915
|
if (response.usage) {
|
|
37727
|
-
|
|
37916
|
+
setTokenUsageAttributes2(span, response.usage.prompt_tokens, response.usage.completion_tokens, response.usage.total_tokens);
|
|
37728
37917
|
}
|
|
37729
37918
|
if (Array.isArray(response.choices)) {
|
|
37730
37919
|
const finishReasons = response.choices.map((choice) => choice.finish_reason).filter((reason) => reason !== null);
|
|
@@ -37751,7 +37940,7 @@ function addResponsesApiAttributes(span, response, recordOutputs) {
|
|
|
37751
37940
|
});
|
|
37752
37941
|
}
|
|
37753
37942
|
if (response.usage) {
|
|
37754
|
-
|
|
37943
|
+
setTokenUsageAttributes2(span, response.usage.input_tokens, response.usage.output_tokens, response.usage.total_tokens);
|
|
37755
37944
|
}
|
|
37756
37945
|
if (recordOutputs) {
|
|
37757
37946
|
const responseWithOutput = response;
|
|
@@ -37784,10 +37973,12 @@ function addResponseAttributes(span, result, recordOutputs) {
|
|
|
37784
37973
|
}
|
|
37785
37974
|
function addRequestAttributes(span, params) {
|
|
37786
37975
|
if ("messages" in params) {
|
|
37787
|
-
|
|
37976
|
+
const truncatedMessages = getTruncatedJsonString(params.messages);
|
|
37977
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedMessages });
|
|
37788
37978
|
}
|
|
37789
37979
|
if ("input" in params) {
|
|
37790
|
-
|
|
37980
|
+
const truncatedInput = getTruncatedJsonString(params.input);
|
|
37981
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedInput });
|
|
37791
37982
|
}
|
|
37792
37983
|
}
|
|
37793
37984
|
function getOptionsFromIntegration() {
|
|
@@ -37811,7 +38002,7 @@ function instrumentMethod(originalMethod, methodPath, context, options) {
|
|
|
37811
38002
|
if (isStreamRequested) {
|
|
37812
38003
|
return startSpanManual({
|
|
37813
38004
|
name: `${operationName} ${model} stream-response`,
|
|
37814
|
-
op:
|
|
38005
|
+
op: getSpanOperation2(methodPath),
|
|
37815
38006
|
attributes: requestAttributes
|
|
37816
38007
|
}, async (span) => {
|
|
37817
38008
|
try {
|
|
@@ -37838,7 +38029,7 @@ function instrumentMethod(originalMethod, methodPath, context, options) {
|
|
|
37838
38029
|
} else {
|
|
37839
38030
|
return startSpan({
|
|
37840
38031
|
name: `${operationName} ${model}`,
|
|
37841
|
-
op:
|
|
38032
|
+
op: getSpanOperation2(methodPath),
|
|
37842
38033
|
attributes: requestAttributes
|
|
37843
38034
|
}, async (span) => {
|
|
37844
38035
|
try {
|
|
@@ -37868,7 +38059,7 @@ function createDeepProxy(target, currentPath = "", options) {
|
|
|
37868
38059
|
return new Proxy(target, {
|
|
37869
38060
|
get(obj, prop) {
|
|
37870
38061
|
const value = obj[prop];
|
|
37871
|
-
const methodPath =
|
|
38062
|
+
const methodPath = buildMethodPath2(currentPath, String(prop));
|
|
37872
38063
|
if (typeof value === "function" && shouldInstrument(methodPath)) {
|
|
37873
38064
|
return instrumentMethod(value, methodPath, obj, options);
|
|
37874
38065
|
}
|
|
@@ -37885,47 +38076,6 @@ function createDeepProxy(target, currentPath = "", options) {
|
|
|
37885
38076
|
function instrumentOpenAiClient(client, options) {
|
|
37886
38077
|
return createDeepProxy(client, "", options);
|
|
37887
38078
|
}
|
|
37888
|
-
// ../../node_modules/@sentry/core/build/esm/utils/ai/utils.js
|
|
37889
|
-
function getFinalOperationName(methodPath) {
|
|
37890
|
-
if (methodPath.includes("messages")) {
|
|
37891
|
-
return "messages";
|
|
37892
|
-
}
|
|
37893
|
-
if (methodPath.includes("completions")) {
|
|
37894
|
-
return "completions";
|
|
37895
|
-
}
|
|
37896
|
-
if (methodPath.includes("models")) {
|
|
37897
|
-
return "models";
|
|
37898
|
-
}
|
|
37899
|
-
if (methodPath.includes("chat")) {
|
|
37900
|
-
return "chat";
|
|
37901
|
-
}
|
|
37902
|
-
return methodPath.split(".").pop() || "unknown";
|
|
37903
|
-
}
|
|
37904
|
-
function getSpanOperation2(methodPath) {
|
|
37905
|
-
return `gen_ai.${getFinalOperationName(methodPath)}`;
|
|
37906
|
-
}
|
|
37907
|
-
function buildMethodPath2(currentPath, prop) {
|
|
37908
|
-
return currentPath ? `${currentPath}.${prop}` : prop;
|
|
37909
|
-
}
|
|
37910
|
-
function setTokenUsageAttributes2(span, promptTokens, completionTokens, cachedInputTokens, cachedOutputTokens) {
|
|
37911
|
-
if (promptTokens !== undefined) {
|
|
37912
|
-
span.setAttributes({
|
|
37913
|
-
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: promptTokens
|
|
37914
|
-
});
|
|
37915
|
-
}
|
|
37916
|
-
if (completionTokens !== undefined) {
|
|
37917
|
-
span.setAttributes({
|
|
37918
|
-
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: completionTokens
|
|
37919
|
-
});
|
|
37920
|
-
}
|
|
37921
|
-
if (promptTokens !== undefined || completionTokens !== undefined || cachedInputTokens !== undefined || cachedOutputTokens !== undefined) {
|
|
37922
|
-
const totalTokens = (promptTokens ?? 0) + (completionTokens ?? 0) + (cachedInputTokens ?? 0) + (cachedOutputTokens ?? 0);
|
|
37923
|
-
span.setAttributes({
|
|
37924
|
-
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: totalTokens
|
|
37925
|
-
});
|
|
37926
|
-
}
|
|
37927
|
-
}
|
|
37928
|
-
|
|
37929
38079
|
// ../../node_modules/@sentry/core/build/esm/utils/anthropic-ai/streaming.js
|
|
37930
38080
|
function isErrorEvent3(event, span) {
|
|
37931
38081
|
if ("type" in event && typeof event.type === "string") {
|
|
@@ -38037,7 +38187,7 @@ function finalizeStreamSpan(state, span, recordOutputs) {
|
|
|
38037
38187
|
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: state.responseModel
|
|
38038
38188
|
});
|
|
38039
38189
|
}
|
|
38040
|
-
|
|
38190
|
+
setTokenUsageAttributes(span, state.promptTokens, state.completionTokens, state.cacheCreationInputTokens, state.cacheReadInputTokens);
|
|
38041
38191
|
span.setAttributes({
|
|
38042
38192
|
[GEN_AI_RESPONSE_STREAMING_ATTRIBUTE]: true
|
|
38043
38193
|
});
|
|
@@ -38087,7 +38237,7 @@ async function* instrumentAsyncIterableStream(stream, span, recordOutputs) {
|
|
|
38087
38237
|
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: state.responseModel
|
|
38088
38238
|
});
|
|
38089
38239
|
}
|
|
38090
|
-
|
|
38240
|
+
setTokenUsageAttributes(span, state.promptTokens, state.completionTokens, state.cacheCreationInputTokens, state.cacheReadInputTokens);
|
|
38091
38241
|
span.setAttributes({
|
|
38092
38242
|
[GEN_AI_RESPONSE_STREAMING_ATTRIBUTE]: true
|
|
38093
38243
|
});
|
|
@@ -38159,6 +38309,17 @@ var ANTHROPIC_AI_INSTRUMENTED_METHODS = [
|
|
|
38159
38309
|
function shouldInstrument2(methodPath) {
|
|
38160
38310
|
return ANTHROPIC_AI_INSTRUMENTED_METHODS.includes(methodPath);
|
|
38161
38311
|
}
|
|
38312
|
+
function handleResponseError(span, response) {
|
|
38313
|
+
if (response.error) {
|
|
38314
|
+
span.setStatus({ code: SPAN_STATUS_ERROR, message: response.error.type || "unknown_error" });
|
|
38315
|
+
captureException(response.error, {
|
|
38316
|
+
mechanism: {
|
|
38317
|
+
handled: false,
|
|
38318
|
+
type: "auto.ai.anthropic.anthropic_error"
|
|
38319
|
+
}
|
|
38320
|
+
});
|
|
38321
|
+
}
|
|
38322
|
+
}
|
|
38162
38323
|
|
|
38163
38324
|
// ../../node_modules/@sentry/core/build/esm/utils/anthropic-ai/index.js
|
|
38164
38325
|
function extractRequestAttributes2(args, methodPath) {
|
|
@@ -38196,26 +38357,17 @@ function extractRequestAttributes2(args, methodPath) {
|
|
|
38196
38357
|
}
|
|
38197
38358
|
function addPrivateRequestAttributes(span, params) {
|
|
38198
38359
|
if ("messages" in params) {
|
|
38199
|
-
|
|
38360
|
+
const truncatedMessages = getTruncatedJsonString(params.messages);
|
|
38361
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedMessages });
|
|
38200
38362
|
}
|
|
38201
38363
|
if ("input" in params) {
|
|
38202
|
-
|
|
38364
|
+
const truncatedInput = getTruncatedJsonString(params.input);
|
|
38365
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedInput });
|
|
38203
38366
|
}
|
|
38204
38367
|
if ("prompt" in params) {
|
|
38205
38368
|
span.setAttributes({ [GEN_AI_PROMPT_ATTRIBUTE]: JSON.stringify(params.prompt) });
|
|
38206
38369
|
}
|
|
38207
38370
|
}
|
|
38208
|
-
function handleResponseError(span, response) {
|
|
38209
|
-
if (response.error) {
|
|
38210
|
-
span.setStatus({ code: SPAN_STATUS_ERROR, message: response.error.type || "unknown_error" });
|
|
38211
|
-
captureException(response.error, {
|
|
38212
|
-
mechanism: {
|
|
38213
|
-
handled: false,
|
|
38214
|
-
type: "auto.ai.anthropic.anthropic_error"
|
|
38215
|
-
}
|
|
38216
|
-
});
|
|
38217
|
-
}
|
|
38218
|
-
}
|
|
38219
38371
|
function addContentAttributes(span, response) {
|
|
38220
38372
|
if ("content" in response) {
|
|
38221
38373
|
if (Array.isArray(response.content)) {
|
|
@@ -38257,7 +38409,7 @@ function addMetadataAttributes(span, response) {
|
|
|
38257
38409
|
});
|
|
38258
38410
|
}
|
|
38259
38411
|
if ("usage" in response && response.usage) {
|
|
38260
|
-
|
|
38412
|
+
setTokenUsageAttributes(span, response.usage.input_tokens, response.usage.output_tokens, response.usage.cache_creation_input_tokens, response.usage.cache_read_input_tokens);
|
|
38261
38413
|
}
|
|
38262
38414
|
}
|
|
38263
38415
|
}
|
|
@@ -38283,14 +38435,14 @@ function handleStreamingError(error2, span, methodPath) {
|
|
|
38283
38435
|
}
|
|
38284
38436
|
throw error2;
|
|
38285
38437
|
}
|
|
38286
|
-
function handleStreamingRequest(originalMethod, target, context, args, requestAttributes, operationName, methodPath, params, options, isStreamRequested) {
|
|
38438
|
+
function handleStreamingRequest(originalMethod, target, context, args, requestAttributes, operationName, methodPath, params, options, isStreamRequested, isStreamingMethod) {
|
|
38287
38439
|
const model = requestAttributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] ?? "unknown";
|
|
38288
38440
|
const spanConfig = {
|
|
38289
38441
|
name: `${operationName} ${model} stream-response`,
|
|
38290
|
-
op:
|
|
38442
|
+
op: getSpanOperation(methodPath),
|
|
38291
38443
|
attributes: requestAttributes
|
|
38292
38444
|
};
|
|
38293
|
-
if (isStreamRequested) {
|
|
38445
|
+
if (isStreamRequested && !isStreamingMethod) {
|
|
38294
38446
|
return startSpanManual(spanConfig, async (span) => {
|
|
38295
38447
|
try {
|
|
38296
38448
|
if (options.recordInputs && params) {
|
|
@@ -38326,11 +38478,11 @@ function instrumentMethod2(originalMethod, methodPath, context, options) {
|
|
|
38326
38478
|
const isStreamRequested = Boolean(params?.stream);
|
|
38327
38479
|
const isStreamingMethod = methodPath === "messages.stream";
|
|
38328
38480
|
if (isStreamRequested || isStreamingMethod) {
|
|
38329
|
-
return handleStreamingRequest(originalMethod, target, context, args, requestAttributes, operationName, methodPath, params, options, isStreamRequested);
|
|
38481
|
+
return handleStreamingRequest(originalMethod, target, context, args, requestAttributes, operationName, methodPath, params, options, isStreamRequested, isStreamingMethod);
|
|
38330
38482
|
}
|
|
38331
38483
|
return startSpan({
|
|
38332
38484
|
name: `${operationName} ${model}`,
|
|
38333
|
-
op:
|
|
38485
|
+
op: getSpanOperation(methodPath),
|
|
38334
38486
|
attributes: requestAttributes
|
|
38335
38487
|
}, (span) => {
|
|
38336
38488
|
if (options.recordInputs && params) {
|
|
@@ -38355,7 +38507,7 @@ function createDeepProxy2(target, currentPath = "", options) {
|
|
|
38355
38507
|
return new Proxy(target, {
|
|
38356
38508
|
get(obj, prop) {
|
|
38357
38509
|
const value = obj[prop];
|
|
38358
|
-
const methodPath =
|
|
38510
|
+
const methodPath = buildMethodPath(currentPath, String(prop));
|
|
38359
38511
|
if (typeof value === "function" && shouldInstrument2(methodPath)) {
|
|
38360
38512
|
return instrumentMethod2(value, methodPath, obj, options);
|
|
38361
38513
|
}
|
|
@@ -38559,13 +38711,19 @@ function extractRequestAttributes3(methodPath, params, context) {
|
|
|
38559
38711
|
}
|
|
38560
38712
|
function addPrivateRequestAttributes2(span, params) {
|
|
38561
38713
|
if ("contents" in params) {
|
|
38562
|
-
|
|
38714
|
+
const contents = params.contents;
|
|
38715
|
+
const truncatedContents = getTruncatedJsonString(contents);
|
|
38716
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedContents });
|
|
38563
38717
|
}
|
|
38564
38718
|
if ("message" in params) {
|
|
38565
|
-
|
|
38719
|
+
const message = params.message;
|
|
38720
|
+
const truncatedMessage = getTruncatedJsonString(message);
|
|
38721
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedMessage });
|
|
38566
38722
|
}
|
|
38567
38723
|
if ("history" in params) {
|
|
38568
|
-
|
|
38724
|
+
const history = params.history;
|
|
38725
|
+
const truncatedHistory = getTruncatedJsonString(history);
|
|
38726
|
+
span.setAttributes({ [GEN_AI_REQUEST_MESSAGES_ATTRIBUTE]: truncatedHistory });
|
|
38569
38727
|
}
|
|
38570
38728
|
}
|
|
38571
38729
|
function addResponseAttributes3(span, response, recordOutputs) {
|
|
@@ -38622,7 +38780,7 @@ function instrumentMethod3(originalMethod, methodPath, context, options) {
|
|
|
38622
38780
|
if (isStreamingMethod(methodPath)) {
|
|
38623
38781
|
return startSpanManual({
|
|
38624
38782
|
name: `${operationName} ${model} stream-response`,
|
|
38625
|
-
op:
|
|
38783
|
+
op: getSpanOperation(methodPath),
|
|
38626
38784
|
attributes: requestAttributes
|
|
38627
38785
|
}, async (span) => {
|
|
38628
38786
|
try {
|
|
@@ -38647,7 +38805,7 @@ function instrumentMethod3(originalMethod, methodPath, context, options) {
|
|
|
38647
38805
|
}
|
|
38648
38806
|
return startSpan({
|
|
38649
38807
|
name: isSyncCreate ? `${operationName} ${model} create` : `${operationName} ${model}`,
|
|
38650
|
-
op:
|
|
38808
|
+
op: getSpanOperation(methodPath),
|
|
38651
38809
|
attributes: requestAttributes
|
|
38652
38810
|
}, (span) => {
|
|
38653
38811
|
if (options.recordInputs && params) {
|
|
@@ -38670,7 +38828,7 @@ function createDeepProxy3(target, currentPath = "", options) {
|
|
|
38670
38828
|
return new Proxy(target, {
|
|
38671
38829
|
get: (t, prop, receiver) => {
|
|
38672
38830
|
const value = Reflect.get(t, prop, receiver);
|
|
38673
|
-
const methodPath =
|
|
38831
|
+
const methodPath = buildMethodPath(currentPath, String(prop));
|
|
38674
38832
|
if (typeof value === "function" && shouldInstrument3(methodPath)) {
|
|
38675
38833
|
if (methodPath === CHATS_CREATE_METHOD) {
|
|
38676
38834
|
const instrumentedMethod = instrumentMethod3(value, methodPath, t, options);
|
|
@@ -38703,6 +38861,418 @@ function instrumentGoogleGenAIClient(client, options) {
|
|
|
38703
38861
|
};
|
|
38704
38862
|
return createDeepProxy3(client, "", _options);
|
|
38705
38863
|
}
|
|
38864
|
+
// ../../node_modules/@sentry/core/build/esm/utils/langchain/constants.js
|
|
38865
|
+
var LANGCHAIN_INTEGRATION_NAME = "LangChain";
|
|
38866
|
+
var LANGCHAIN_ORIGIN = "auto.ai.langchain";
|
|
38867
|
+
var ROLE_MAP = {
|
|
38868
|
+
human: "user",
|
|
38869
|
+
ai: "assistant",
|
|
38870
|
+
assistant: "assistant",
|
|
38871
|
+
system: "system",
|
|
38872
|
+
function: "function",
|
|
38873
|
+
tool: "tool"
|
|
38874
|
+
};
|
|
38875
|
+
|
|
38876
|
+
// ../../node_modules/@sentry/core/build/esm/utils/langchain/utils.js
|
|
38877
|
+
var setIfDefined = (target, key, value) => {
|
|
38878
|
+
if (value != null)
|
|
38879
|
+
target[key] = value;
|
|
38880
|
+
};
|
|
38881
|
+
var setNumberIfDefined = (target, key, value) => {
|
|
38882
|
+
const n = Number(value);
|
|
38883
|
+
if (!Number.isNaN(n))
|
|
38884
|
+
target[key] = n;
|
|
38885
|
+
};
|
|
38886
|
+
function asString(v) {
|
|
38887
|
+
if (typeof v === "string")
|
|
38888
|
+
return v;
|
|
38889
|
+
try {
|
|
38890
|
+
return JSON.stringify(v);
|
|
38891
|
+
} catch {
|
|
38892
|
+
return String(v);
|
|
38893
|
+
}
|
|
38894
|
+
}
|
|
38895
|
+
function normalizeMessageRole(role) {
|
|
38896
|
+
const normalized = role.toLowerCase();
|
|
38897
|
+
return ROLE_MAP[normalized] ?? normalized;
|
|
38898
|
+
}
|
|
38899
|
+
function normalizeRoleNameFromCtor(name) {
|
|
38900
|
+
if (name.includes("System"))
|
|
38901
|
+
return "system";
|
|
38902
|
+
if (name.includes("Human"))
|
|
38903
|
+
return "user";
|
|
38904
|
+
if (name.includes("AI") || name.includes("Assistant"))
|
|
38905
|
+
return "assistant";
|
|
38906
|
+
if (name.includes("Function"))
|
|
38907
|
+
return "function";
|
|
38908
|
+
if (name.includes("Tool"))
|
|
38909
|
+
return "tool";
|
|
38910
|
+
return "user";
|
|
38911
|
+
}
|
|
38912
|
+
function getInvocationParams(tags) {
|
|
38913
|
+
if (!tags || Array.isArray(tags))
|
|
38914
|
+
return;
|
|
38915
|
+
return tags.invocation_params;
|
|
38916
|
+
}
|
|
38917
|
+
function normalizeLangChainMessages(messages) {
|
|
38918
|
+
return messages.map((message) => {
|
|
38919
|
+
const maybeGetType = message._getType;
|
|
38920
|
+
if (typeof maybeGetType === "function") {
|
|
38921
|
+
const messageType = maybeGetType.call(message);
|
|
38922
|
+
return {
|
|
38923
|
+
role: normalizeMessageRole(messageType),
|
|
38924
|
+
content: asString(message.content)
|
|
38925
|
+
};
|
|
38926
|
+
}
|
|
38927
|
+
const ctor = message.constructor?.name;
|
|
38928
|
+
if (ctor) {
|
|
38929
|
+
return {
|
|
38930
|
+
role: normalizeMessageRole(normalizeRoleNameFromCtor(ctor)),
|
|
38931
|
+
content: asString(message.content)
|
|
38932
|
+
};
|
|
38933
|
+
}
|
|
38934
|
+
if (message.type) {
|
|
38935
|
+
const role = String(message.type).toLowerCase();
|
|
38936
|
+
return {
|
|
38937
|
+
role: normalizeMessageRole(role),
|
|
38938
|
+
content: asString(message.content)
|
|
38939
|
+
};
|
|
38940
|
+
}
|
|
38941
|
+
if (message.role) {
|
|
38942
|
+
return {
|
|
38943
|
+
role: normalizeMessageRole(String(message.role)),
|
|
38944
|
+
content: asString(message.content)
|
|
38945
|
+
};
|
|
38946
|
+
}
|
|
38947
|
+
if (message.lc === 1 && message.kwargs) {
|
|
38948
|
+
const id = message.id;
|
|
38949
|
+
const messageType = Array.isArray(id) && id.length > 0 ? id[id.length - 1] : "";
|
|
38950
|
+
const role = typeof messageType === "string" ? normalizeRoleNameFromCtor(messageType) : "user";
|
|
38951
|
+
return {
|
|
38952
|
+
role: normalizeMessageRole(role),
|
|
38953
|
+
content: asString(message.kwargs?.content)
|
|
38954
|
+
};
|
|
38955
|
+
}
|
|
38956
|
+
return {
|
|
38957
|
+
role: "user",
|
|
38958
|
+
content: asString(message.content)
|
|
38959
|
+
};
|
|
38960
|
+
});
|
|
38961
|
+
}
|
|
38962
|
+
function extractCommonRequestAttributes(serialized, invocationParams, langSmithMetadata) {
|
|
38963
|
+
const attrs = {};
|
|
38964
|
+
const kwargs = "kwargs" in serialized ? serialized.kwargs : undefined;
|
|
38965
|
+
const temperature = invocationParams?.temperature ?? langSmithMetadata?.ls_temperature ?? kwargs?.temperature;
|
|
38966
|
+
setNumberIfDefined(attrs, GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE, temperature);
|
|
38967
|
+
const maxTokens = invocationParams?.max_tokens ?? langSmithMetadata?.ls_max_tokens ?? kwargs?.max_tokens;
|
|
38968
|
+
setNumberIfDefined(attrs, GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE, maxTokens);
|
|
38969
|
+
const topP = invocationParams?.top_p ?? kwargs?.top_p;
|
|
38970
|
+
setNumberIfDefined(attrs, GEN_AI_REQUEST_TOP_P_ATTRIBUTE, topP);
|
|
38971
|
+
const frequencyPenalty = invocationParams?.frequency_penalty;
|
|
38972
|
+
setNumberIfDefined(attrs, GEN_AI_REQUEST_FREQUENCY_PENALTY_ATTRIBUTE, frequencyPenalty);
|
|
38973
|
+
const presencePenalty = invocationParams?.presence_penalty;
|
|
38974
|
+
setNumberIfDefined(attrs, GEN_AI_REQUEST_PRESENCE_PENALTY_ATTRIBUTE, presencePenalty);
|
|
38975
|
+
if (invocationParams && "stream" in invocationParams) {
|
|
38976
|
+
setIfDefined(attrs, GEN_AI_REQUEST_STREAM_ATTRIBUTE, Boolean(invocationParams.stream));
|
|
38977
|
+
}
|
|
38978
|
+
return attrs;
|
|
38979
|
+
}
|
|
38980
|
+
function baseRequestAttributes(system, modelName, operation, serialized, invocationParams, langSmithMetadata) {
|
|
38981
|
+
return {
|
|
38982
|
+
[GEN_AI_SYSTEM_ATTRIBUTE]: asString(system ?? "langchain"),
|
|
38983
|
+
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: operation,
|
|
38984
|
+
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: asString(modelName),
|
|
38985
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: LANGCHAIN_ORIGIN,
|
|
38986
|
+
...extractCommonRequestAttributes(serialized, invocationParams, langSmithMetadata)
|
|
38987
|
+
};
|
|
38988
|
+
}
|
|
38989
|
+
function extractLLMRequestAttributes(llm, prompts, recordInputs, invocationParams, langSmithMetadata) {
|
|
38990
|
+
const system = langSmithMetadata?.ls_provider;
|
|
38991
|
+
const modelName = invocationParams?.model ?? langSmithMetadata?.ls_model_name ?? "unknown";
|
|
38992
|
+
const attrs = baseRequestAttributes(system, modelName, "pipeline", llm, invocationParams, langSmithMetadata);
|
|
38993
|
+
if (recordInputs && Array.isArray(prompts) && prompts.length > 0) {
|
|
38994
|
+
const messages = prompts.map((p) => ({ role: "user", content: p }));
|
|
38995
|
+
setIfDefined(attrs, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, asString(messages));
|
|
38996
|
+
}
|
|
38997
|
+
return attrs;
|
|
38998
|
+
}
|
|
38999
|
+
function extractChatModelRequestAttributes(llm, langChainMessages, recordInputs, invocationParams, langSmithMetadata) {
|
|
39000
|
+
const system = langSmithMetadata?.ls_provider ?? llm.id?.[2];
|
|
39001
|
+
const modelName = invocationParams?.model ?? langSmithMetadata?.ls_model_name ?? "unknown";
|
|
39002
|
+
const attrs = baseRequestAttributes(system, modelName, "chat", llm, invocationParams, langSmithMetadata);
|
|
39003
|
+
if (recordInputs && Array.isArray(langChainMessages) && langChainMessages.length > 0) {
|
|
39004
|
+
const normalized = normalizeLangChainMessages(langChainMessages.flat());
|
|
39005
|
+
setIfDefined(attrs, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, asString(normalized));
|
|
39006
|
+
}
|
|
39007
|
+
return attrs;
|
|
39008
|
+
}
|
|
39009
|
+
function addToolCallsAttributes(generations, attrs) {
|
|
39010
|
+
const toolCalls = [];
|
|
39011
|
+
const flatGenerations = generations.flat();
|
|
39012
|
+
for (const gen of flatGenerations) {
|
|
39013
|
+
const content = gen.message?.content;
|
|
39014
|
+
if (Array.isArray(content)) {
|
|
39015
|
+
for (const item of content) {
|
|
39016
|
+
const t = item;
|
|
39017
|
+
if (t.type === "tool_use")
|
|
39018
|
+
toolCalls.push(t);
|
|
39019
|
+
}
|
|
39020
|
+
}
|
|
39021
|
+
}
|
|
39022
|
+
if (toolCalls.length > 0) {
|
|
39023
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE, asString(toolCalls));
|
|
39024
|
+
}
|
|
39025
|
+
}
|
|
39026
|
+
function addTokenUsageAttributes(llmOutput, attrs) {
|
|
39027
|
+
if (!llmOutput)
|
|
39028
|
+
return;
|
|
39029
|
+
const tokenUsage = llmOutput.tokenUsage;
|
|
39030
|
+
const anthropicUsage = llmOutput.usage;
|
|
39031
|
+
if (tokenUsage) {
|
|
39032
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE, tokenUsage.promptTokens);
|
|
39033
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE, tokenUsage.completionTokens);
|
|
39034
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE, tokenUsage.totalTokens);
|
|
39035
|
+
} else if (anthropicUsage) {
|
|
39036
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE, anthropicUsage.input_tokens);
|
|
39037
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE, anthropicUsage.output_tokens);
|
|
39038
|
+
const input = Number(anthropicUsage.input_tokens);
|
|
39039
|
+
const output = Number(anthropicUsage.output_tokens);
|
|
39040
|
+
const total = (Number.isNaN(input) ? 0 : input) + (Number.isNaN(output) ? 0 : output);
|
|
39041
|
+
if (total > 0)
|
|
39042
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE, total);
|
|
39043
|
+
if (anthropicUsage.cache_creation_input_tokens !== undefined)
|
|
39044
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS_ATTRIBUTE, anthropicUsage.cache_creation_input_tokens);
|
|
39045
|
+
if (anthropicUsage.cache_read_input_tokens !== undefined)
|
|
39046
|
+
setNumberIfDefined(attrs, GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS_ATTRIBUTE, anthropicUsage.cache_read_input_tokens);
|
|
39047
|
+
}
|
|
39048
|
+
}
|
|
39049
|
+
function extractLlmResponseAttributes(llmResult, recordOutputs) {
|
|
39050
|
+
if (!llmResult)
|
|
39051
|
+
return;
|
|
39052
|
+
const attrs = {};
|
|
39053
|
+
if (Array.isArray(llmResult.generations)) {
|
|
39054
|
+
const finishReasons = llmResult.generations.flat().map((g) => g.generation_info?.finish_reason).filter((r) => typeof r === "string");
|
|
39055
|
+
if (finishReasons.length > 0) {
|
|
39056
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_FINISH_REASONS_ATTRIBUTE, asString(finishReasons));
|
|
39057
|
+
}
|
|
39058
|
+
addToolCallsAttributes(llmResult.generations, attrs);
|
|
39059
|
+
if (recordOutputs) {
|
|
39060
|
+
const texts = llmResult.generations.flat().map((gen) => gen.text ?? gen.message?.content).filter((t) => typeof t === "string");
|
|
39061
|
+
if (texts.length > 0) {
|
|
39062
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_TEXT_ATTRIBUTE, asString(texts));
|
|
39063
|
+
}
|
|
39064
|
+
}
|
|
39065
|
+
}
|
|
39066
|
+
addTokenUsageAttributes(llmResult.llmOutput, attrs);
|
|
39067
|
+
const llmOutput = llmResult.llmOutput;
|
|
39068
|
+
const modelName = llmOutput?.model_name ?? llmOutput?.model;
|
|
39069
|
+
if (modelName)
|
|
39070
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_MODEL_ATTRIBUTE, modelName);
|
|
39071
|
+
if (llmOutput?.id) {
|
|
39072
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_ID_ATTRIBUTE, llmOutput.id);
|
|
39073
|
+
}
|
|
39074
|
+
if (llmOutput?.stop_reason) {
|
|
39075
|
+
setIfDefined(attrs, GEN_AI_RESPONSE_STOP_REASON_ATTRIBUTE, asString(llmOutput.stop_reason));
|
|
39076
|
+
}
|
|
39077
|
+
return attrs;
|
|
39078
|
+
}
|
|
39079
|
+
|
|
39080
|
+
// ../../node_modules/@sentry/core/build/esm/utils/langchain/index.js
|
|
39081
|
+
function createLangChainCallbackHandler(options = {}) {
|
|
39082
|
+
const recordInputs = options.recordInputs ?? false;
|
|
39083
|
+
const recordOutputs = options.recordOutputs ?? false;
|
|
39084
|
+
const spanMap = new Map;
|
|
39085
|
+
const exitSpan = (runId) => {
|
|
39086
|
+
const span = spanMap.get(runId);
|
|
39087
|
+
if (span?.isRecording()) {
|
|
39088
|
+
span.end();
|
|
39089
|
+
spanMap.delete(runId);
|
|
39090
|
+
}
|
|
39091
|
+
};
|
|
39092
|
+
const handler = {
|
|
39093
|
+
lc_serializable: false,
|
|
39094
|
+
lc_namespace: ["langchain_core", "callbacks", "sentry"],
|
|
39095
|
+
lc_secrets: undefined,
|
|
39096
|
+
lc_attributes: undefined,
|
|
39097
|
+
lc_aliases: undefined,
|
|
39098
|
+
lc_serializable_keys: undefined,
|
|
39099
|
+
lc_id: ["langchain_core", "callbacks", "sentry"],
|
|
39100
|
+
lc_kwargs: {},
|
|
39101
|
+
name: "SentryCallbackHandler",
|
|
39102
|
+
ignoreLLM: false,
|
|
39103
|
+
ignoreChain: false,
|
|
39104
|
+
ignoreAgent: false,
|
|
39105
|
+
ignoreRetriever: false,
|
|
39106
|
+
ignoreCustomEvent: false,
|
|
39107
|
+
raiseError: false,
|
|
39108
|
+
awaitHandlers: true,
|
|
39109
|
+
handleLLMStart(llm, prompts, runId, _parentRunId, _extraParams, tags, metadata, _runName) {
|
|
39110
|
+
const invocationParams = getInvocationParams(tags);
|
|
39111
|
+
const attributes = extractLLMRequestAttributes(llm, prompts, recordInputs, invocationParams, metadata);
|
|
39112
|
+
const modelName = attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE];
|
|
39113
|
+
const operationName = attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE];
|
|
39114
|
+
startSpanManual({
|
|
39115
|
+
name: `${operationName} ${modelName}`,
|
|
39116
|
+
op: "gen_ai.pipeline",
|
|
39117
|
+
attributes: {
|
|
39118
|
+
...attributes,
|
|
39119
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: "gen_ai.pipeline"
|
|
39120
|
+
}
|
|
39121
|
+
}, (span) => {
|
|
39122
|
+
spanMap.set(runId, span);
|
|
39123
|
+
return span;
|
|
39124
|
+
});
|
|
39125
|
+
},
|
|
39126
|
+
handleChatModelStart(llm, messages, runId, _parentRunId, _extraParams, tags, metadata, _runName) {
|
|
39127
|
+
const invocationParams = getInvocationParams(tags);
|
|
39128
|
+
const attributes = extractChatModelRequestAttributes(llm, messages, recordInputs, invocationParams, metadata);
|
|
39129
|
+
const modelName = attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE];
|
|
39130
|
+
const operationName = attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE];
|
|
39131
|
+
startSpanManual({
|
|
39132
|
+
name: `${operationName} ${modelName}`,
|
|
39133
|
+
op: "gen_ai.chat",
|
|
39134
|
+
attributes: {
|
|
39135
|
+
...attributes,
|
|
39136
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: "gen_ai.chat"
|
|
39137
|
+
}
|
|
39138
|
+
}, (span) => {
|
|
39139
|
+
spanMap.set(runId, span);
|
|
39140
|
+
return span;
|
|
39141
|
+
});
|
|
39142
|
+
},
|
|
39143
|
+
handleLLMEnd(output, runId, _parentRunId, _tags, _extraParams) {
|
|
39144
|
+
const span = spanMap.get(runId);
|
|
39145
|
+
if (span?.isRecording()) {
|
|
39146
|
+
const attributes = extractLlmResponseAttributes(output, recordOutputs);
|
|
39147
|
+
if (attributes) {
|
|
39148
|
+
span.setAttributes(attributes);
|
|
39149
|
+
}
|
|
39150
|
+
exitSpan(runId);
|
|
39151
|
+
}
|
|
39152
|
+
},
|
|
39153
|
+
handleLLMError(error2, runId) {
|
|
39154
|
+
const span = spanMap.get(runId);
|
|
39155
|
+
if (span?.isRecording()) {
|
|
39156
|
+
span.setStatus({ code: SPAN_STATUS_ERROR, message: "llm_error" });
|
|
39157
|
+
exitSpan(runId);
|
|
39158
|
+
}
|
|
39159
|
+
captureException(error2, {
|
|
39160
|
+
mechanism: {
|
|
39161
|
+
handled: false,
|
|
39162
|
+
type: `${LANGCHAIN_ORIGIN}.llm_error_handler`
|
|
39163
|
+
}
|
|
39164
|
+
});
|
|
39165
|
+
},
|
|
39166
|
+
handleChainStart(chain, inputs, runId, _parentRunId) {
|
|
39167
|
+
const chainName = chain.name || "unknown_chain";
|
|
39168
|
+
const attributes = {
|
|
39169
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: "auto.ai.langchain",
|
|
39170
|
+
"langchain.chain.name": chainName
|
|
39171
|
+
};
|
|
39172
|
+
if (recordInputs) {
|
|
39173
|
+
attributes["langchain.chain.inputs"] = JSON.stringify(inputs);
|
|
39174
|
+
}
|
|
39175
|
+
startSpanManual({
|
|
39176
|
+
name: `chain ${chainName}`,
|
|
39177
|
+
op: "gen_ai.invoke_agent",
|
|
39178
|
+
attributes: {
|
|
39179
|
+
...attributes,
|
|
39180
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: "gen_ai.invoke_agent"
|
|
39181
|
+
}
|
|
39182
|
+
}, (span) => {
|
|
39183
|
+
spanMap.set(runId, span);
|
|
39184
|
+
return span;
|
|
39185
|
+
});
|
|
39186
|
+
},
|
|
39187
|
+
handleChainEnd(outputs, runId) {
|
|
39188
|
+
const span = spanMap.get(runId);
|
|
39189
|
+
if (span?.isRecording()) {
|
|
39190
|
+
if (recordOutputs) {
|
|
39191
|
+
span.setAttributes({
|
|
39192
|
+
"langchain.chain.outputs": JSON.stringify(outputs)
|
|
39193
|
+
});
|
|
39194
|
+
}
|
|
39195
|
+
exitSpan(runId);
|
|
39196
|
+
}
|
|
39197
|
+
},
|
|
39198
|
+
handleChainError(error2, runId) {
|
|
39199
|
+
const span = spanMap.get(runId);
|
|
39200
|
+
if (span?.isRecording()) {
|
|
39201
|
+
span.setStatus({ code: SPAN_STATUS_ERROR, message: "chain_error" });
|
|
39202
|
+
exitSpan(runId);
|
|
39203
|
+
}
|
|
39204
|
+
captureException(error2, {
|
|
39205
|
+
mechanism: {
|
|
39206
|
+
handled: false,
|
|
39207
|
+
type: `${LANGCHAIN_ORIGIN}.chain_error_handler`
|
|
39208
|
+
}
|
|
39209
|
+
});
|
|
39210
|
+
},
|
|
39211
|
+
handleToolStart(tool, input, runId, _parentRunId) {
|
|
39212
|
+
const toolName = tool.name || "unknown_tool";
|
|
39213
|
+
const attributes = {
|
|
39214
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: LANGCHAIN_ORIGIN,
|
|
39215
|
+
"gen_ai.tool.name": toolName
|
|
39216
|
+
};
|
|
39217
|
+
if (recordInputs) {
|
|
39218
|
+
attributes["gen_ai.tool.input"] = input;
|
|
39219
|
+
}
|
|
39220
|
+
startSpanManual({
|
|
39221
|
+
name: `execute_tool ${toolName}`,
|
|
39222
|
+
op: "gen_ai.execute_tool",
|
|
39223
|
+
attributes: {
|
|
39224
|
+
...attributes,
|
|
39225
|
+
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: "gen_ai.execute_tool"
|
|
39226
|
+
}
|
|
39227
|
+
}, (span) => {
|
|
39228
|
+
spanMap.set(runId, span);
|
|
39229
|
+
return span;
|
|
39230
|
+
});
|
|
39231
|
+
},
|
|
39232
|
+
handleToolEnd(output, runId) {
|
|
39233
|
+
const span = spanMap.get(runId);
|
|
39234
|
+
if (span?.isRecording()) {
|
|
39235
|
+
if (recordOutputs) {
|
|
39236
|
+
span.setAttributes({
|
|
39237
|
+
"gen_ai.tool.output": JSON.stringify(output)
|
|
39238
|
+
});
|
|
39239
|
+
}
|
|
39240
|
+
exitSpan(runId);
|
|
39241
|
+
}
|
|
39242
|
+
},
|
|
39243
|
+
handleToolError(error2, runId) {
|
|
39244
|
+
const span = spanMap.get(runId);
|
|
39245
|
+
if (span?.isRecording()) {
|
|
39246
|
+
span.setStatus({ code: SPAN_STATUS_ERROR, message: "tool_error" });
|
|
39247
|
+
exitSpan(runId);
|
|
39248
|
+
}
|
|
39249
|
+
captureException(error2, {
|
|
39250
|
+
mechanism: {
|
|
39251
|
+
handled: false,
|
|
39252
|
+
type: `${LANGCHAIN_ORIGIN}.tool_error_handler`
|
|
39253
|
+
}
|
|
39254
|
+
});
|
|
39255
|
+
},
|
|
39256
|
+
copy() {
|
|
39257
|
+
return handler;
|
|
39258
|
+
},
|
|
39259
|
+
toJSON() {
|
|
39260
|
+
return {
|
|
39261
|
+
lc: 1,
|
|
39262
|
+
type: "not_implemented",
|
|
39263
|
+
id: handler.lc_id
|
|
39264
|
+
};
|
|
39265
|
+
},
|
|
39266
|
+
toJSONNotImplemented() {
|
|
39267
|
+
return {
|
|
39268
|
+
lc: 1,
|
|
39269
|
+
type: "not_implemented",
|
|
39270
|
+
id: handler.lc_id
|
|
39271
|
+
};
|
|
39272
|
+
}
|
|
39273
|
+
};
|
|
39274
|
+
return handler;
|
|
39275
|
+
}
|
|
38706
39276
|
// ../../node_modules/@sentry/core/build/esm/utils/breadcrumb-log-level.js
|
|
38707
39277
|
function getBreadcrumbLogLevelFromHttpStatusCode(statusCode) {
|
|
38708
39278
|
if (statusCode === undefined) {
|
|
@@ -41666,7 +42236,7 @@ function functionNamesMatch(a, b) {
|
|
|
41666
42236
|
}
|
|
41667
42237
|
|
|
41668
42238
|
// ../../node_modules/@sentry/node-core/build/esm/integrations/local-variables/local-variables-async.js
|
|
41669
|
-
var base64WorkerScript = "
|
|
42239
|
+
var base64WorkerScript = "LyohIEBzZW50cnkvbm9kZS1jb3JlIDEwLjIyLjAgKGM2Yjk5YzApIHwgaHR0cHM6Ly9naXRodWIuY29tL2dldHNlbnRyeS9zZW50cnktamF2YXNjcmlwdCAqLwppbXBvcnR7U2Vzc2lvbiBhcyBlfWZyb20ibm9kZTppbnNwZWN0b3IvcHJvbWlzZXMiO2ltcG9ydHt3b3JrZXJEYXRhIGFzIHR9ZnJvbSJub2RlOndvcmtlcl90aHJlYWRzIjtjb25zdCBuPWdsb2JhbFRoaXMsaT17fTtjb25zdCBvPSJfX1NFTlRSWV9FUlJPUl9MT0NBTF9WQVJJQUJMRVNfXyI7Y29uc3QgYT10O2Z1bmN0aW9uIHMoLi4uZSl7YS5kZWJ1ZyYmZnVuY3Rpb24oZSl7aWYoISgiY29uc29sZSJpbiBuKSlyZXR1cm4gZSgpO2NvbnN0IHQ9bi5jb25zb2xlLG89e30sYT1PYmplY3Qua2V5cyhpKTthLmZvckVhY2goZT0+e2NvbnN0IG49aVtlXTtvW2VdPXRbZV0sdFtlXT1ufSk7dHJ5e3JldHVybiBlKCl9ZmluYWxseXthLmZvckVhY2goZT0+e3RbZV09b1tlXX0pfX0oKCk9PmNvbnNvbGUubG9nKCJbTG9jYWxWYXJpYWJsZXMgV29ya2VyXSIsLi4uZSkpfWFzeW5jIGZ1bmN0aW9uIGMoZSx0LG4saSl7Y29uc3Qgbz1hd2FpdCBlLnBvc3QoIlJ1bnRpbWUuZ2V0UHJvcGVydGllcyIse29iamVjdElkOnQsb3duUHJvcGVydGllczohMH0pO2lbbl09by5yZXN1bHQuZmlsdGVyKGU9PiJsZW5ndGgiIT09ZS5uYW1lJiYhaXNOYU4ocGFyc2VJbnQoZS5uYW1lLDEwKSkpLnNvcnQoKGUsdCk9PnBhcnNlSW50KGUubmFtZSwxMCktcGFyc2VJbnQodC5uYW1lLDEwKSkubWFwKGU9PmUudmFsdWU/LnZhbHVlKX1hc3luYyBmdW5jdGlvbiByKGUsdCxuLGkpe2NvbnN0IG89YXdhaXQgZS5wb3N0KCJSdW50aW1lLmdldFByb3BlcnRpZXMiLHtvYmplY3RJZDp0LG93blByb3BlcnRpZXM6ITB9KTtpW25dPW8ucmVzdWx0Lm1hcChlPT5bZS5uYW1lLGUudmFsdWU/LnZhbHVlXSkucmVkdWNlKChlLFt0LG5dKT0+KGVbdF09bixlKSx7fSl9ZnVuY3Rpb24gdShlLHQpe2UudmFsdWUmJigidmFsdWUiaW4gZS52YWx1ZT92b2lkIDA9PT1lLnZhbHVlLnZhbHVlfHxudWxsPT09ZS52YWx1ZS52YWx1ZT90W2UubmFtZV09YDwke2UudmFsdWUudmFsdWV9PmA6dFtlLm5hbWVdPWUudmFsdWUudmFsdWU6ImRlc2NyaXB0aW9uImluIGUudmFsdWUmJiJmdW5jdGlvbiIhPT1lLnZhbHVlLnR5cGU/dFtlLm5hbWVdPWA8JHtlLnZhbHVlLmRlc2NyaXB0aW9ufT5gOiJ1bmRlZmluZWQiPT09ZS52YWx1ZS50eXBlJiYodFtlLm5hbWVdPSI8dW5kZWZpbmVkPiIpKX1hc3luYyBmdW5jdGlvbiBsKGUsdCl7Y29uc3Qgbj1hd2FpdCBlLnBvc3QoIlJ1bnRpbWUuZ2V0UHJvcGVydGllcyIse29iamVjdElkOnQsb3duUHJvcGVydGllczohMH0pLGk9e307Zm9yKGNvbnN0IHQgb2Ygbi5yZXN1bHQpaWYodC52YWx1ZT8ub2JqZWN0SWQmJiJBcnJheSI9PT10LnZhbHVlLmNsYXNzTmFtZSl7Y29uc3Qgbj10LnZhbHVlLm9iamVjdElkO2F3YWl0IGMoZSxuLHQubmFtZSxpKX1lbHNlIGlmKHQudmFsdWU/Lm9iamVjdElkJiYiT2JqZWN0Ij09PXQudmFsdWUuY2xhc3NOYW1lKXtjb25zdCBuPXQudmFsdWUub2JqZWN0SWQ7YXdhaXQgcihlLG4sdC5uYW1lLGkpfWVsc2UgdC52YWx1ZSYmdSh0LGkpO3JldHVybiBpfWxldCBmOyhhc3luYyBmdW5jdGlvbigpe2NvbnN0IHQ9bmV3IGU7dC5jb25uZWN0VG9NYWluVGhyZWFkKCkscygiQ29ubmVjdGVkIHRvIG1haW4gdGhyZWFkIik7bGV0IG49ITE7dC5vbigiRGVidWdnZXIucmVzdW1lZCIsKCk9PntuPSExfSksdC5vbigiRGVidWdnZXIucGF1c2VkIixlPT57bj0hMCxhc3luYyBmdW5jdGlvbihlLHtyZWFzb246dCxkYXRhOntvYmplY3RJZDpufSxjYWxsRnJhbWVzOml9KXtpZigiZXhjZXB0aW9uIiE9PXQmJiJwcm9taXNlUmVqZWN0aW9uIiE9PXQpcmV0dXJuO2lmKGY/LigpLG51bGw9PW4pcmV0dXJuO2NvbnN0IGE9W107Zm9yKGxldCB0PTA7dDxpLmxlbmd0aDt0Kyspe2NvbnN0e3Njb3BlQ2hhaW46bixmdW5jdGlvbk5hbWU6byx0aGlzOnN9PWlbdF0sYz1uLmZpbmQoZT0+ImxvY2FsIj09PWUudHlwZSkscj0iZ2xvYmFsIiE9PXMuY2xhc3NOYW1lJiZzLmNsYXNzTmFtZT9gJHtzLmNsYXNzTmFtZX0uJHtvfWA6bztpZih2b2lkIDA9PT1jPy5vYmplY3Qub2JqZWN0SWQpYVt0XT17ZnVuY3Rpb246cn07ZWxzZXtjb25zdCBuPWF3YWl0IGwoZSxjLm9iamVjdC5vYmplY3RJZCk7YVt0XT17ZnVuY3Rpb246cix2YXJzOm59fX1hd2FpdCBlLnBvc3QoIlJ1bnRpbWUuY2FsbEZ1bmN0aW9uT24iLHtmdW5jdGlvbkRlY2xhcmF0aW9uOmBmdW5jdGlvbigpIHsgdGhpcy4ke299ID0gdGhpcy4ke299IHx8ICR7SlNPTi5zdHJpbmdpZnkoYSl9OyB9YCxzaWxlbnQ6ITAsb2JqZWN0SWQ6bn0pLGF3YWl0IGUucG9zdCgiUnVudGltZS5yZWxlYXNlT2JqZWN0Iix7b2JqZWN0SWQ6bn0pfSh0LGUucGFyYW1zKS50aGVuKGFzeW5jKCk9PntuJiZhd2FpdCB0LnBvc3QoIkRlYnVnZ2VyLnJlc3VtZSIpfSxhc3luYyBlPT57biYmYXdhaXQgdC5wb3N0KCJEZWJ1Z2dlci5yZXN1bWUiKX0pfSksYXdhaXQgdC5wb3N0KCJEZWJ1Z2dlci5lbmFibGUiKTtjb25zdCBpPSExIT09YS5jYXB0dXJlQWxsRXhjZXB0aW9ucztpZihhd2FpdCB0LnBvc3QoIkRlYnVnZ2VyLnNldFBhdXNlT25FeGNlcHRpb25zIix7c3RhdGU6aT8iYWxsIjoidW5jYXVnaHQifSksaSl7Y29uc3QgZT1hLm1heEV4Y2VwdGlvbnNQZXJTZWNvbmR8fDUwO2Y9ZnVuY3Rpb24oZSx0LG4pe2xldCBpPTAsbz01LGE9MDtyZXR1cm4gc2V0SW50ZXJ2YWwoKCk9PnswPT09YT9pPmUmJihvKj0yLG4obyksbz44NjQwMCYmKG89ODY0MDApLGE9byk6KGEtPTEsMD09PWEmJnQoKSksaT0wfSwxZTMpLnVucmVmKCksKCk9PntpKz0xfX0oZSxhc3luYygpPT57cygiUmF0ZS1saW1pdCBsaWZ0ZWQuIiksYXdhaXQgdC5wb3N0KCJEZWJ1Z2dlci5zZXRQYXVzZU9uRXhjZXB0aW9ucyIse3N0YXRlOiJhbGwifSl9LGFzeW5jIGU9PntzKGBSYXRlLWxpbWl0IGV4Y2VlZGVkLiBEaXNhYmxpbmcgY2FwdHVyaW5nIG9mIGNhdWdodCBleGNlcHRpb25zIGZvciAke2V9IHNlY29uZHMuYCksYXdhaXQgdC5wb3N0KCJEZWJ1Z2dlci5zZXRQYXVzZU9uRXhjZXB0aW9ucyIse3N0YXRlOiJ1bmNhdWdodCJ9KX0pfX0pKCkuY2F0Y2goZT0+e3MoIkZhaWxlZCB0byBzdGFydCBkZWJ1Z2dlciIsZSl9KSxzZXRJbnRlcnZhbCgoKT0+e30sMWU0KTs=";
|
|
41670
42240
|
function log2(...args) {
|
|
41671
42241
|
debug.log("[LocalVariables]", ...args);
|
|
41672
42242
|
}
|
|
@@ -43311,7 +43881,9 @@ function getConfigWithDefaults2(options = {}) {
|
|
|
43311
43881
|
return {
|
|
43312
43882
|
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: "auto.http.otel.node_fetch"
|
|
43313
43883
|
};
|
|
43314
|
-
}
|
|
43884
|
+
},
|
|
43885
|
+
requestHook: options.requestHook,
|
|
43886
|
+
responseHook: options.responseHook
|
|
43315
43887
|
};
|
|
43316
43888
|
return instrumentationConfig;
|
|
43317
43889
|
}
|
|
@@ -46886,12 +47458,122 @@ var _googleGenAIIntegration = (options = {}) => {
|
|
|
46886
47458
|
};
|
|
46887
47459
|
var googleGenAIIntegration = defineIntegration(_googleGenAIIntegration);
|
|
46888
47460
|
|
|
47461
|
+
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/langchain/instrumentation.js
|
|
47462
|
+
var import_instrumentation21 = __toESM(require_src6(), 1);
|
|
47463
|
+
var supportedVersions4 = [">=0.1.0 <1.0.0"];
|
|
47464
|
+
function augmentCallbackHandlers(handlers2, sentryHandler) {
|
|
47465
|
+
if (!handlers2) {
|
|
47466
|
+
return [sentryHandler];
|
|
47467
|
+
}
|
|
47468
|
+
if (Array.isArray(handlers2)) {
|
|
47469
|
+
if (handlers2.includes(sentryHandler)) {
|
|
47470
|
+
return handlers2;
|
|
47471
|
+
}
|
|
47472
|
+
return [...handlers2, sentryHandler];
|
|
47473
|
+
}
|
|
47474
|
+
if (typeof handlers2 === "object") {
|
|
47475
|
+
return [handlers2, sentryHandler];
|
|
47476
|
+
}
|
|
47477
|
+
return handlers2;
|
|
47478
|
+
}
|
|
47479
|
+
function wrapRunnableMethod(originalMethod, sentryHandler, _methodName) {
|
|
47480
|
+
return new Proxy(originalMethod, {
|
|
47481
|
+
apply(target, thisArg, args) {
|
|
47482
|
+
const optionsIndex = 1;
|
|
47483
|
+
let options = args[optionsIndex];
|
|
47484
|
+
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
|
47485
|
+
options = {};
|
|
47486
|
+
args[optionsIndex] = options;
|
|
47487
|
+
}
|
|
47488
|
+
const existingCallbacks = options.callbacks;
|
|
47489
|
+
const augmentedCallbacks = augmentCallbackHandlers(existingCallbacks, sentryHandler);
|
|
47490
|
+
options.callbacks = augmentedCallbacks;
|
|
47491
|
+
return Reflect.apply(target, thisArg, args);
|
|
47492
|
+
}
|
|
47493
|
+
});
|
|
47494
|
+
}
|
|
47495
|
+
|
|
47496
|
+
class SentryLangChainInstrumentation extends import_instrumentation21.InstrumentationBase {
|
|
47497
|
+
constructor(config2 = {}) {
|
|
47498
|
+
super("@sentry/instrumentation-langchain", SDK_VERSION, config2);
|
|
47499
|
+
}
|
|
47500
|
+
init() {
|
|
47501
|
+
const modules = [];
|
|
47502
|
+
const providerPackages = [
|
|
47503
|
+
"@langchain/anthropic",
|
|
47504
|
+
"@langchain/openai",
|
|
47505
|
+
"@langchain/google-genai",
|
|
47506
|
+
"@langchain/mistralai",
|
|
47507
|
+
"@langchain/google-vertexai",
|
|
47508
|
+
"@langchain/groq"
|
|
47509
|
+
];
|
|
47510
|
+
for (const packageName of providerPackages) {
|
|
47511
|
+
modules.push(new import_instrumentation21.InstrumentationNodeModuleDefinition(packageName, supportedVersions4, this._patch.bind(this), (exports) => exports, [
|
|
47512
|
+
new import_instrumentation21.InstrumentationNodeModuleFile(`${packageName}/dist/index.cjs`, supportedVersions4, this._patch.bind(this), (exports) => exports)
|
|
47513
|
+
]));
|
|
47514
|
+
}
|
|
47515
|
+
return modules;
|
|
47516
|
+
}
|
|
47517
|
+
_patch(exports) {
|
|
47518
|
+
const client = getClient();
|
|
47519
|
+
const defaultPii = Boolean(client?.getOptions().sendDefaultPii);
|
|
47520
|
+
const config2 = this.getConfig();
|
|
47521
|
+
const recordInputs = config2?.recordInputs ?? defaultPii;
|
|
47522
|
+
const recordOutputs = config2?.recordOutputs ?? defaultPii;
|
|
47523
|
+
const sentryHandler = createLangChainCallbackHandler({
|
|
47524
|
+
recordInputs,
|
|
47525
|
+
recordOutputs
|
|
47526
|
+
});
|
|
47527
|
+
this._patchRunnableMethods(exports, sentryHandler);
|
|
47528
|
+
return exports;
|
|
47529
|
+
}
|
|
47530
|
+
_patchRunnableMethods(exports, sentryHandler) {
|
|
47531
|
+
const knownChatModelNames = [
|
|
47532
|
+
"ChatAnthropic",
|
|
47533
|
+
"ChatOpenAI",
|
|
47534
|
+
"ChatGoogleGenerativeAI",
|
|
47535
|
+
"ChatMistralAI",
|
|
47536
|
+
"ChatVertexAI",
|
|
47537
|
+
"ChatGroq"
|
|
47538
|
+
];
|
|
47539
|
+
const chatModelClass = Object.values(exports).find((exp) => {
|
|
47540
|
+
if (typeof exp !== "function") {
|
|
47541
|
+
return false;
|
|
47542
|
+
}
|
|
47543
|
+
return knownChatModelNames.includes(exp.name);
|
|
47544
|
+
});
|
|
47545
|
+
if (!chatModelClass) {
|
|
47546
|
+
return;
|
|
47547
|
+
}
|
|
47548
|
+
const targetProto = chatModelClass.prototype;
|
|
47549
|
+
const methodsToPatch = ["invoke", "stream", "batch"];
|
|
47550
|
+
for (const methodName of methodsToPatch) {
|
|
47551
|
+
const method = targetProto[methodName];
|
|
47552
|
+
if (typeof method === "function") {
|
|
47553
|
+
targetProto[methodName] = wrapRunnableMethod(method, sentryHandler);
|
|
47554
|
+
}
|
|
47555
|
+
}
|
|
47556
|
+
}
|
|
47557
|
+
}
|
|
47558
|
+
|
|
47559
|
+
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/langchain/index.js
|
|
47560
|
+
var instrumentLangChain = generateInstrumentOnce(LANGCHAIN_INTEGRATION_NAME, (options) => new SentryLangChainInstrumentation(options));
|
|
47561
|
+
var _langChainIntegration = (options = {}) => {
|
|
47562
|
+
return {
|
|
47563
|
+
name: LANGCHAIN_INTEGRATION_NAME,
|
|
47564
|
+
setupOnce() {
|
|
47565
|
+
instrumentLangChain(options);
|
|
47566
|
+
}
|
|
47567
|
+
};
|
|
47568
|
+
};
|
|
47569
|
+
var langChainIntegration = defineIntegration(_langChainIntegration);
|
|
47570
|
+
|
|
46889
47571
|
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/firebase/otel/firebaseInstrumentation.js
|
|
46890
|
-
var
|
|
47572
|
+
var import_instrumentation25 = __toESM(require_src6(), 1);
|
|
46891
47573
|
|
|
46892
47574
|
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/firebase/otel/patches/firestore.js
|
|
46893
47575
|
var import_api20 = __toESM(require_src(), 1);
|
|
46894
|
-
var
|
|
47576
|
+
var import_instrumentation23 = __toESM(require_src6(), 1);
|
|
46895
47577
|
var import_semantic_conventions8 = __toESM(require_src2(), 1);
|
|
46896
47578
|
import * as net2 from "node:net";
|
|
46897
47579
|
function patchFirestore(tracer, firestoreSupportedVersions, wrap, unwrap, config2) {
|
|
@@ -46900,7 +47582,7 @@ function patchFirestore(tracer, firestoreSupportedVersions, wrap, unwrap, config
|
|
|
46900
47582
|
const configFirestoreSpanCreationHook = config2.firestoreSpanCreationHook;
|
|
46901
47583
|
if (typeof configFirestoreSpanCreationHook === "function") {
|
|
46902
47584
|
firestoreSpanCreationHook = (span) => {
|
|
46903
|
-
|
|
47585
|
+
import_instrumentation23.safeExecuteInTheMiddle(() => configFirestoreSpanCreationHook(span), (error2) => {
|
|
46904
47586
|
if (!error2) {
|
|
46905
47587
|
return;
|
|
46906
47588
|
}
|
|
@@ -46908,7 +47590,7 @@ function patchFirestore(tracer, firestoreSupportedVersions, wrap, unwrap, config
|
|
|
46908
47590
|
}, true);
|
|
46909
47591
|
};
|
|
46910
47592
|
}
|
|
46911
|
-
const moduleFirestoreCJS = new
|
|
47593
|
+
const moduleFirestoreCJS = new import_instrumentation23.InstrumentationNodeModuleDefinition("@firebase/firestore", firestoreSupportedVersions, (moduleExports) => wrapMethods(moduleExports, wrap, unwrap, tracer, firestoreSpanCreationHook));
|
|
46912
47594
|
const files = [
|
|
46913
47595
|
"@firebase/firestore/dist/lite/index.node.cjs.js",
|
|
46914
47596
|
"@firebase/firestore/dist/lite/index.node.mjs.js",
|
|
@@ -46916,7 +47598,7 @@ function patchFirestore(tracer, firestoreSupportedVersions, wrap, unwrap, config
|
|
|
46916
47598
|
"@firebase/firestore/dist/lite/index.cjs.js"
|
|
46917
47599
|
];
|
|
46918
47600
|
for (const file of files) {
|
|
46919
|
-
moduleFirestoreCJS.files.push(new
|
|
47601
|
+
moduleFirestoreCJS.files.push(new import_instrumentation23.InstrumentationNodeModuleFile(file, firestoreSupportedVersions, (moduleExports) => wrapMethods(moduleExports, wrap, unwrap, tracer, firestoreSpanCreationHook), (moduleExports) => unwrapMethods(moduleExports, unwrap)));
|
|
46920
47602
|
}
|
|
46921
47603
|
return moduleFirestoreCJS;
|
|
46922
47604
|
}
|
|
@@ -46930,7 +47612,7 @@ function wrapMethods(moduleExports, wrap, unwrap, tracer, firestoreSpanCreationH
|
|
|
46930
47612
|
}
|
|
46931
47613
|
function unwrapMethods(moduleExports, unwrap) {
|
|
46932
47614
|
for (const method of ["addDoc", "getDocs", "setDoc", "deleteDoc"]) {
|
|
46933
|
-
if (
|
|
47615
|
+
if (import_instrumentation23.isWrapped(moduleExports[method])) {
|
|
46934
47616
|
unwrap(moduleExports, method);
|
|
46935
47617
|
}
|
|
46936
47618
|
}
|
|
@@ -46982,7 +47664,7 @@ function patchSetDoc(tracer, firestoreSpanCreationHook) {
|
|
|
46982
47664
|
}
|
|
46983
47665
|
function executeContextWithSpan(span, callback) {
|
|
46984
47666
|
return import_api20.context.with(import_api20.trace.setSpan(import_api20.context.active(), span), () => {
|
|
46985
|
-
return
|
|
47667
|
+
return import_instrumentation23.safeExecuteInTheMiddle(() => {
|
|
46986
47668
|
return callback();
|
|
46987
47669
|
}, (err) => {
|
|
46988
47670
|
if (err) {
|
|
@@ -47056,11 +47738,162 @@ function addAttributes(span, reference) {
|
|
|
47056
47738
|
span.setAttributes(attributes);
|
|
47057
47739
|
}
|
|
47058
47740
|
|
|
47741
|
+
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/firebase/otel/patches/functions.js
|
|
47742
|
+
var import_api21 = __toESM(require_src(), 1);
|
|
47743
|
+
var import_instrumentation24 = __toESM(require_src6(), 1);
|
|
47744
|
+
function patchFunctions(tracer, functionsSupportedVersions, wrap, unwrap, config2) {
|
|
47745
|
+
let requestHook2 = () => {};
|
|
47746
|
+
let responseHook = () => {};
|
|
47747
|
+
const errorHook = config2.functions?.errorHook;
|
|
47748
|
+
const configRequestHook = config2.functions?.requestHook;
|
|
47749
|
+
const configResponseHook = config2.functions?.responseHook;
|
|
47750
|
+
if (typeof configResponseHook === "function") {
|
|
47751
|
+
responseHook = (span, err) => {
|
|
47752
|
+
import_instrumentation24.safeExecuteInTheMiddle(() => configResponseHook(span, err), (error2) => {
|
|
47753
|
+
if (!error2) {
|
|
47754
|
+
return;
|
|
47755
|
+
}
|
|
47756
|
+
import_api21.diag.error(error2?.message);
|
|
47757
|
+
}, true);
|
|
47758
|
+
};
|
|
47759
|
+
}
|
|
47760
|
+
if (typeof configRequestHook === "function") {
|
|
47761
|
+
requestHook2 = (span) => {
|
|
47762
|
+
import_instrumentation24.safeExecuteInTheMiddle(() => configRequestHook(span), (error2) => {
|
|
47763
|
+
if (!error2) {
|
|
47764
|
+
return;
|
|
47765
|
+
}
|
|
47766
|
+
import_api21.diag.error(error2?.message);
|
|
47767
|
+
}, true);
|
|
47768
|
+
};
|
|
47769
|
+
}
|
|
47770
|
+
const moduleFunctionsCJS = new import_instrumentation24.InstrumentationNodeModuleDefinition("firebase-functions", functionsSupportedVersions);
|
|
47771
|
+
const modulesToInstrument = [
|
|
47772
|
+
{ name: "firebase-functions/lib/v2/providers/https.js", triggerType: "function" },
|
|
47773
|
+
{ name: "firebase-functions/lib/v2/providers/firestore.js", triggerType: "firestore" },
|
|
47774
|
+
{ name: "firebase-functions/lib/v2/providers/scheduler.js", triggerType: "scheduler" },
|
|
47775
|
+
{ name: "firebase-functions/lib/v2/storage.js", triggerType: "storage" }
|
|
47776
|
+
];
|
|
47777
|
+
modulesToInstrument.forEach(({ name, triggerType }) => {
|
|
47778
|
+
moduleFunctionsCJS.files.push(new import_instrumentation24.InstrumentationNodeModuleFile(name, functionsSupportedVersions, (moduleExports) => wrapCommonFunctions(moduleExports, wrap, unwrap, tracer, { requestHook: requestHook2, responseHook, errorHook }, triggerType), (moduleExports) => unwrapCommonFunctions(moduleExports, unwrap)));
|
|
47779
|
+
});
|
|
47780
|
+
return moduleFunctionsCJS;
|
|
47781
|
+
}
|
|
47782
|
+
function patchV2Functions(tracer, functionsConfig, triggerType) {
|
|
47783
|
+
return function v2FunctionsWrapper(original) {
|
|
47784
|
+
return function(...args) {
|
|
47785
|
+
const handler = typeof args[0] === "function" ? args[0] : args[1];
|
|
47786
|
+
const documentOrOptions = typeof args[0] === "function" ? undefined : args[0];
|
|
47787
|
+
if (!handler) {
|
|
47788
|
+
return original.call(this, ...args);
|
|
47789
|
+
}
|
|
47790
|
+
const wrappedHandler = async function(...handlerArgs) {
|
|
47791
|
+
const functionName = process.env.FUNCTION_TARGET || process.env.K_SERVICE || "unknown";
|
|
47792
|
+
const span = tracer.startSpan(`firebase.function.${triggerType}`, {
|
|
47793
|
+
kind: import_api21.SpanKind.SERVER
|
|
47794
|
+
});
|
|
47795
|
+
const attributes = {
|
|
47796
|
+
"faas.name": functionName,
|
|
47797
|
+
"faas.trigger": triggerType,
|
|
47798
|
+
"faas.provider": "firebase"
|
|
47799
|
+
};
|
|
47800
|
+
if (process.env.GCLOUD_PROJECT) {
|
|
47801
|
+
attributes["cloud.project_id"] = process.env.GCLOUD_PROJECT;
|
|
47802
|
+
}
|
|
47803
|
+
if (process.env.EVENTARC_CLOUD_EVENT_SOURCE) {
|
|
47804
|
+
attributes["cloud.event_source"] = process.env.EVENTARC_CLOUD_EVENT_SOURCE;
|
|
47805
|
+
}
|
|
47806
|
+
span.setAttributes(attributes);
|
|
47807
|
+
functionsConfig?.requestHook?.(span);
|
|
47808
|
+
return import_api21.context.with(import_api21.trace.setSpan(import_api21.context.active(), span), async () => {
|
|
47809
|
+
let error2;
|
|
47810
|
+
let result;
|
|
47811
|
+
try {
|
|
47812
|
+
result = await handler.apply(this, handlerArgs);
|
|
47813
|
+
} catch (e) {
|
|
47814
|
+
error2 = e;
|
|
47815
|
+
}
|
|
47816
|
+
functionsConfig?.responseHook?.(span, error2);
|
|
47817
|
+
if (error2) {
|
|
47818
|
+
span.recordException(error2);
|
|
47819
|
+
}
|
|
47820
|
+
span.end();
|
|
47821
|
+
if (error2) {
|
|
47822
|
+
await functionsConfig?.errorHook?.(span, error2);
|
|
47823
|
+
throw error2;
|
|
47824
|
+
}
|
|
47825
|
+
return result;
|
|
47826
|
+
});
|
|
47827
|
+
};
|
|
47828
|
+
if (documentOrOptions) {
|
|
47829
|
+
return original.call(this, documentOrOptions, wrappedHandler);
|
|
47830
|
+
} else {
|
|
47831
|
+
return original.call(this, wrappedHandler);
|
|
47832
|
+
}
|
|
47833
|
+
};
|
|
47834
|
+
};
|
|
47835
|
+
}
|
|
47836
|
+
function wrapCommonFunctions(moduleExports, wrap, unwrap, tracer, functionsConfig, triggerType) {
|
|
47837
|
+
unwrapCommonFunctions(moduleExports, unwrap);
|
|
47838
|
+
switch (triggerType) {
|
|
47839
|
+
case "function":
|
|
47840
|
+
wrap(moduleExports, "onRequest", patchV2Functions(tracer, functionsConfig, "http.request"));
|
|
47841
|
+
wrap(moduleExports, "onCall", patchV2Functions(tracer, functionsConfig, "http.call"));
|
|
47842
|
+
break;
|
|
47843
|
+
case "firestore":
|
|
47844
|
+
wrap(moduleExports, "onDocumentCreated", patchV2Functions(tracer, functionsConfig, "firestore.document.created"));
|
|
47845
|
+
wrap(moduleExports, "onDocumentUpdated", patchV2Functions(tracer, functionsConfig, "firestore.document.updated"));
|
|
47846
|
+
wrap(moduleExports, "onDocumentDeleted", patchV2Functions(tracer, functionsConfig, "firestore.document.deleted"));
|
|
47847
|
+
wrap(moduleExports, "onDocumentWritten", patchV2Functions(tracer, functionsConfig, "firestore.document.written"));
|
|
47848
|
+
wrap(moduleExports, "onDocumentCreatedWithAuthContext", patchV2Functions(tracer, functionsConfig, "firestore.document.created"));
|
|
47849
|
+
wrap(moduleExports, "onDocumentUpdatedWithAuthContext", patchV2Functions(tracer, functionsConfig, "firestore.document.updated"));
|
|
47850
|
+
wrap(moduleExports, "onDocumentDeletedWithAuthContext", patchV2Functions(tracer, functionsConfig, "firestore.document.deleted"));
|
|
47851
|
+
wrap(moduleExports, "onDocumentWrittenWithAuthContext", patchV2Functions(tracer, functionsConfig, "firestore.document.written"));
|
|
47852
|
+
break;
|
|
47853
|
+
case "scheduler":
|
|
47854
|
+
wrap(moduleExports, "onSchedule", patchV2Functions(tracer, functionsConfig, "scheduler.scheduled"));
|
|
47855
|
+
break;
|
|
47856
|
+
case "storage":
|
|
47857
|
+
wrap(moduleExports, "onObjectFinalized", patchV2Functions(tracer, functionsConfig, "storage.object.finalized"));
|
|
47858
|
+
wrap(moduleExports, "onObjectArchived", patchV2Functions(tracer, functionsConfig, "storage.object.archived"));
|
|
47859
|
+
wrap(moduleExports, "onObjectDeleted", patchV2Functions(tracer, functionsConfig, "storage.object.deleted"));
|
|
47860
|
+
wrap(moduleExports, "onObjectMetadataUpdated", patchV2Functions(tracer, functionsConfig, "storage.object.metadataUpdated"));
|
|
47861
|
+
break;
|
|
47862
|
+
}
|
|
47863
|
+
return moduleExports;
|
|
47864
|
+
}
|
|
47865
|
+
function unwrapCommonFunctions(moduleExports, unwrap) {
|
|
47866
|
+
const methods = [
|
|
47867
|
+
"onSchedule",
|
|
47868
|
+
"onRequest",
|
|
47869
|
+
"onCall",
|
|
47870
|
+
"onObjectFinalized",
|
|
47871
|
+
"onObjectArchived",
|
|
47872
|
+
"onObjectDeleted",
|
|
47873
|
+
"onObjectMetadataUpdated",
|
|
47874
|
+
"onDocumentCreated",
|
|
47875
|
+
"onDocumentUpdated",
|
|
47876
|
+
"onDocumentDeleted",
|
|
47877
|
+
"onDocumentWritten",
|
|
47878
|
+
"onDocumentCreatedWithAuthContext",
|
|
47879
|
+
"onDocumentUpdatedWithAuthContext",
|
|
47880
|
+
"onDocumentDeletedWithAuthContext",
|
|
47881
|
+
"onDocumentWrittenWithAuthContext"
|
|
47882
|
+
];
|
|
47883
|
+
for (const method of methods) {
|
|
47884
|
+
if (import_instrumentation24.isWrapped(moduleExports[method])) {
|
|
47885
|
+
unwrap(moduleExports, method);
|
|
47886
|
+
}
|
|
47887
|
+
}
|
|
47888
|
+
return moduleExports;
|
|
47889
|
+
}
|
|
47890
|
+
|
|
47059
47891
|
// ../../node_modules/@sentry/node/build/esm/integrations/tracing/firebase/otel/firebaseInstrumentation.js
|
|
47060
47892
|
var DefaultFirebaseInstrumentationConfig = {};
|
|
47061
47893
|
var firestoreSupportedVersions = [">=3.0.0 <5"];
|
|
47894
|
+
var functionsSupportedVersions = [">=6.0.0 <7"];
|
|
47062
47895
|
|
|
47063
|
-
class FirebaseInstrumentation extends
|
|
47896
|
+
class FirebaseInstrumentation extends import_instrumentation25.InstrumentationBase {
|
|
47064
47897
|
constructor(config2 = DefaultFirebaseInstrumentationConfig) {
|
|
47065
47898
|
super("@sentry/instrumentation-firebase", SDK_VERSION, config2);
|
|
47066
47899
|
}
|
|
@@ -47070,6 +47903,7 @@ class FirebaseInstrumentation extends import_instrumentation22.InstrumentationBa
|
|
|
47070
47903
|
init() {
|
|
47071
47904
|
const modules = [];
|
|
47072
47905
|
modules.push(patchFirestore(this.tracer, firestoreSupportedVersions, this._wrap, this._unwrap, this.getConfig()));
|
|
47906
|
+
modules.push(patchFunctions(this.tracer, functionsSupportedVersions, this._wrap, this._unwrap, this.getConfig()));
|
|
47073
47907
|
return modules;
|
|
47074
47908
|
}
|
|
47075
47909
|
}
|
|
@@ -47080,6 +47914,23 @@ var config2 = {
|
|
|
47080
47914
|
firestoreSpanCreationHook: (span) => {
|
|
47081
47915
|
addOriginToSpan2(span, "auto.firebase.otel.firestore");
|
|
47082
47916
|
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, "db.query");
|
|
47917
|
+
},
|
|
47918
|
+
functions: {
|
|
47919
|
+
requestHook: (span) => {
|
|
47920
|
+
addOriginToSpan2(span, "auto.firebase.otel.functions");
|
|
47921
|
+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, "http.request");
|
|
47922
|
+
},
|
|
47923
|
+
errorHook: async (_, error2) => {
|
|
47924
|
+
if (error2) {
|
|
47925
|
+
captureException(error2, {
|
|
47926
|
+
mechanism: {
|
|
47927
|
+
type: "auto.firebase.otel.functions",
|
|
47928
|
+
handled: false
|
|
47929
|
+
}
|
|
47930
|
+
});
|
|
47931
|
+
await flush(2000);
|
|
47932
|
+
}
|
|
47933
|
+
}
|
|
47083
47934
|
}
|
|
47084
47935
|
};
|
|
47085
47936
|
var instrumentFirebase = generateInstrumentOnce(INTEGRATION_NAME43, () => new FirebaseInstrumentation(config2));
|
|
@@ -47120,12 +47971,13 @@ function getAutoPerformanceIntegrations() {
|
|
|
47120
47971
|
postgresJsIntegration(),
|
|
47121
47972
|
firebaseIntegration(),
|
|
47122
47973
|
anthropicAIIntegration(),
|
|
47123
|
-
googleGenAIIntegration()
|
|
47974
|
+
googleGenAIIntegration(),
|
|
47975
|
+
langChainIntegration()
|
|
47124
47976
|
];
|
|
47125
47977
|
}
|
|
47126
47978
|
|
|
47127
47979
|
// ../../node_modules/@sentry/node/build/esm/sdk/initOtel.js
|
|
47128
|
-
var
|
|
47980
|
+
var import_api22 = __toESM(require_src(), 1);
|
|
47129
47981
|
var import_resources = __toESM(require_src9(), 1);
|
|
47130
47982
|
var import_sdk_trace_base2 = __toESM(require_src10(), 1);
|
|
47131
47983
|
var import_semantic_conventions9 = __toESM(require_src2(), 1);
|
|
@@ -47153,9 +48005,9 @@ function setupOtel(client, options = {}) {
|
|
|
47153
48005
|
...options.spanProcessors || []
|
|
47154
48006
|
]
|
|
47155
48007
|
});
|
|
47156
|
-
|
|
47157
|
-
|
|
47158
|
-
|
|
48008
|
+
import_api22.trace.setGlobalTracerProvider(provider);
|
|
48009
|
+
import_api22.propagation.setGlobalPropagator(new SentryPropagator);
|
|
48010
|
+
import_api22.context.setGlobalContextManager(new SentryContextManager);
|
|
47159
48011
|
return provider;
|
|
47160
48012
|
}
|
|
47161
48013
|
function _clampSpanProcessorTimeout(maxSpanWaitDuration) {
|