@elasticdash/langchain 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,538 @@
1
+ // src/CallbackHandler.ts
2
+ import { getGlobalLogger } from "@elasticdash/core";
3
+ import {
4
+ startObservation
5
+ } from "@elasticdash/tracing";
6
+ import { BaseCallbackHandler } from "@langchain/core/callbacks/base";
7
+ import {
8
+ AIMessage,
9
+ AIMessageChunk,
10
+ BaseMessage
11
+ } from "@langchain/core/messages";
12
+ var LANGSMITH_HIDDEN_TAG = "langsmith:hidden";
13
+ var CallbackHandler = class extends BaseCallbackHandler {
14
+ constructor(params) {
15
+ var _a;
16
+ super();
17
+ this.name = "LangfuseCallbackHandler";
18
+ this.completionStartTimes = {};
19
+ this.runMap = /* @__PURE__ */ new Map();
20
+ this.last_trace_id = null;
21
+ this.sessionId = params == null ? void 0 : params.sessionId;
22
+ this.userId = params == null ? void 0 : params.userId;
23
+ this.tags = (_a = params == null ? void 0 : params.tags) != null ? _a : [];
24
+ this.traceMetadata = params == null ? void 0 : params.traceMetadata;
25
+ this.version = params == null ? void 0 : params.version;
26
+ this.promptToParentRunMap = /* @__PURE__ */ new Map();
27
+ }
28
+ get logger() {
29
+ return getGlobalLogger();
30
+ }
31
+ async handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, _fields) {
32
+ if (runId && !(runId in this.completionStartTimes)) {
33
+ this.logger.debug(`LLM first streaming token: ${runId}`);
34
+ this.completionStartTimes[runId] = /* @__PURE__ */ new Date();
35
+ }
36
+ }
37
+ async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) {
38
+ var _a, _b;
39
+ try {
40
+ this.logger.debug(`Chain start with Id: ${runId}`);
41
+ const runName = (_b = name != null ? name : (_a = chain.id.at(-1)) == null ? void 0 : _a.toString()) != null ? _b : "Langchain Run";
42
+ this.registerLangfusePrompt(parentRunId, metadata);
43
+ let finalInput = inputs;
44
+ if (typeof inputs === "object" && "input" in inputs && Array.isArray(inputs["input"]) && inputs["input"].every((m) => m instanceof BaseMessage)) {
45
+ finalInput = inputs["input"].map(
46
+ (m) => this.extractChatMessageContent(m)
47
+ );
48
+ } else if (typeof inputs === "object" && "messages" in inputs && Array.isArray(inputs["messages"]) && inputs["messages"].every((m) => m instanceof BaseMessage)) {
49
+ finalInput = inputs["messages"].map(
50
+ (m) => this.extractChatMessageContent(m)
51
+ );
52
+ } else if (typeof inputs === "object" && "content" in inputs && typeof inputs["content"] === "string") {
53
+ finalInput = inputs["content"];
54
+ }
55
+ const span = this.startAndRegisterOtelSpan({
56
+ runName,
57
+ parentRunId,
58
+ runId,
59
+ tags,
60
+ metadata,
61
+ attributes: {
62
+ input: finalInput
63
+ }
64
+ });
65
+ const traceTags = [.../* @__PURE__ */ new Set([...tags != null ? tags : [], ...this.tags])];
66
+ if (!parentRunId) {
67
+ span.updateTrace({
68
+ tags: traceTags,
69
+ userId: metadata && "langfuseUserId" in metadata && typeof metadata["langfuseUserId"] === "string" ? metadata["langfuseUserId"] : this.userId,
70
+ sessionId: metadata && "langfuseSessionId" in metadata && typeof metadata["langfuseSessionId"] === "string" ? metadata["langfuseSessionId"] : this.sessionId,
71
+ metadata: this.traceMetadata,
72
+ version: this.version
73
+ });
74
+ }
75
+ } catch (e) {
76
+ this.logger.debug(e instanceof Error ? e.message : String(e));
77
+ }
78
+ }
79
+ async handleAgentAction(action, runId, parentRunId) {
80
+ try {
81
+ this.logger.debug(`Agent action ${action.tool} with ID: ${runId}`);
82
+ this.startAndRegisterOtelSpan({
83
+ runId,
84
+ parentRunId,
85
+ runName: action.tool,
86
+ attributes: {
87
+ input: action
88
+ }
89
+ });
90
+ } catch (e) {
91
+ this.logger.debug(e instanceof Error ? e.message : String(e));
92
+ }
93
+ }
94
+ async handleAgentEnd(action, runId, parentRunId) {
95
+ try {
96
+ this.logger.debug(`Agent finish with ID: ${runId}`);
97
+ this.handleOtelSpanEnd({
98
+ runId,
99
+ attributes: { output: action }
100
+ });
101
+ } catch (e) {
102
+ this.logger.debug(e instanceof Error ? e.message : String(e));
103
+ }
104
+ }
105
+ async handleChainError(err, runId, parentRunId) {
106
+ try {
107
+ this.logger.debug(`Chain error: ${err} with ID: ${runId}`);
108
+ const azureRefusalError = this.parseAzureRefusalError(err);
109
+ this.handleOtelSpanEnd({
110
+ runId,
111
+ attributes: {
112
+ level: "ERROR",
113
+ statusMessage: err.toString() + azureRefusalError
114
+ }
115
+ });
116
+ } catch (e) {
117
+ this.logger.debug(e instanceof Error ? e.message : String(e));
118
+ }
119
+ }
120
+ async handleGenerationStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name) {
121
+ var _a, _b;
122
+ this.logger.debug(
123
+ `Generation start with ID: ${runId} and parentRunId ${parentRunId}`
124
+ );
125
+ const runName = (_b = name != null ? name : (_a = llm.id.at(-1)) == null ? void 0 : _a.toString()) != null ? _b : "Langchain Generation";
126
+ const modelParameters = {};
127
+ const invocationParams = extraParams == null ? void 0 : extraParams["invocation_params"];
128
+ for (const [key, value] of Object.entries({
129
+ temperature: invocationParams == null ? void 0 : invocationParams.temperature,
130
+ max_tokens: invocationParams == null ? void 0 : invocationParams.max_tokens,
131
+ top_p: invocationParams == null ? void 0 : invocationParams.top_p,
132
+ frequency_penalty: invocationParams == null ? void 0 : invocationParams.frequency_penalty,
133
+ presence_penalty: invocationParams == null ? void 0 : invocationParams.presence_penalty,
134
+ request_timeout: invocationParams == null ? void 0 : invocationParams.request_timeout
135
+ })) {
136
+ if (value !== void 0 && value !== null) {
137
+ modelParameters[key] = value;
138
+ }
139
+ }
140
+ let extractedModelName;
141
+ if (extraParams) {
142
+ const invocationParamsModelName = extraParams.invocation_params.model;
143
+ const metadataModelName = metadata && "ls_model_name" in metadata ? metadata["ls_model_name"] : void 0;
144
+ extractedModelName = invocationParamsModelName != null ? invocationParamsModelName : metadataModelName;
145
+ }
146
+ const registeredPrompt = this.promptToParentRunMap.get(
147
+ parentRunId != null ? parentRunId : "root"
148
+ );
149
+ if (registeredPrompt && parentRunId) {
150
+ this.deregisterLangfusePrompt(parentRunId);
151
+ }
152
+ this.startAndRegisterOtelSpan({
153
+ type: "generation",
154
+ runId,
155
+ parentRunId,
156
+ metadata,
157
+ tags,
158
+ runName,
159
+ attributes: {
160
+ input: messages,
161
+ model: extractedModelName,
162
+ modelParameters,
163
+ prompt: registeredPrompt
164
+ }
165
+ });
166
+ }
167
+ async handleChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name) {
168
+ try {
169
+ this.logger.debug(`Chat model start with ID: ${runId}`);
170
+ const prompts = messages.flatMap(
171
+ (message) => message.map((m) => this.extractChatMessageContent(m))
172
+ );
173
+ this.handleGenerationStart(
174
+ llm,
175
+ prompts,
176
+ runId,
177
+ parentRunId,
178
+ extraParams,
179
+ tags,
180
+ metadata,
181
+ name
182
+ );
183
+ } catch (e) {
184
+ this.logger.debug(e instanceof Error ? e.message : String(e));
185
+ }
186
+ }
187
+ async handleChainEnd(outputs, runId, parentRunId) {
188
+ try {
189
+ this.logger.debug(`Chain end with ID: ${runId}`);
190
+ let finalOutput = outputs;
191
+ if (typeof outputs === "object" && "output" in outputs && typeof outputs["output"] === "string") {
192
+ finalOutput = outputs["output"];
193
+ } else if (typeof outputs === "object" && "messages" in outputs && Array.isArray(outputs["messages"]) && outputs["messages"].every((m) => m instanceof BaseMessage)) {
194
+ finalOutput = {
195
+ messages: outputs.messages.map(
196
+ (message) => this.extractChatMessageContent(message)
197
+ )
198
+ };
199
+ }
200
+ this.handleOtelSpanEnd({
201
+ runId,
202
+ attributes: {
203
+ output: finalOutput
204
+ }
205
+ });
206
+ this.deregisterLangfusePrompt(runId);
207
+ } catch (e) {
208
+ this.logger.debug(e instanceof Error ? e.message : String(e));
209
+ }
210
+ }
211
+ async handleLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata, name) {
212
+ try {
213
+ this.logger.debug(`LLM start with ID: ${runId}`);
214
+ this.handleGenerationStart(
215
+ llm,
216
+ prompts,
217
+ runId,
218
+ parentRunId,
219
+ extraParams,
220
+ tags,
221
+ metadata,
222
+ name
223
+ );
224
+ } catch (e) {
225
+ this.logger.debug(e instanceof Error ? e.message : String(e));
226
+ }
227
+ }
228
+ async handleToolStart(tool, input, runId, parentRunId, tags, metadata, name) {
229
+ var _a, _b;
230
+ try {
231
+ this.logger.debug(`Tool start with ID: ${runId}`);
232
+ this.startAndRegisterOtelSpan({
233
+ runId,
234
+ parentRunId,
235
+ runName: (_b = name != null ? name : (_a = tool.id.at(-1)) == null ? void 0 : _a.toString()) != null ? _b : "Tool execution",
236
+ attributes: {
237
+ input
238
+ },
239
+ metadata,
240
+ tags
241
+ });
242
+ } catch (e) {
243
+ this.logger.debug(e instanceof Error ? e.message : String(e));
244
+ }
245
+ }
246
+ async handleRetrieverStart(retriever, query, runId, parentRunId, tags, metadata, name) {
247
+ var _a, _b;
248
+ try {
249
+ this.logger.debug(`Retriever start with ID: ${runId}`);
250
+ this.startAndRegisterOtelSpan({
251
+ runId,
252
+ parentRunId,
253
+ runName: (_b = name != null ? name : (_a = retriever.id.at(-1)) == null ? void 0 : _a.toString()) != null ? _b : "Retriever",
254
+ attributes: {
255
+ input: query
256
+ },
257
+ tags,
258
+ metadata
259
+ });
260
+ } catch (e) {
261
+ this.logger.debug(e instanceof Error ? e.message : String(e));
262
+ }
263
+ }
264
+ async handleRetrieverEnd(documents, runId, parentRunId) {
265
+ try {
266
+ this.logger.debug(`Retriever end with ID: ${runId}`);
267
+ this.handleOtelSpanEnd({
268
+ runId,
269
+ attributes: {
270
+ output: documents
271
+ }
272
+ });
273
+ } catch (e) {
274
+ this.logger.debug(e instanceof Error ? e.message : String(e));
275
+ }
276
+ }
277
+ async handleRetrieverError(err, runId, parentRunId) {
278
+ try {
279
+ this.logger.debug(`Retriever error: ${err} with ID: ${runId}`);
280
+ this.handleOtelSpanEnd({
281
+ runId,
282
+ attributes: {
283
+ level: "ERROR",
284
+ statusMessage: err.toString()
285
+ }
286
+ });
287
+ } catch (e) {
288
+ this.logger.debug(e instanceof Error ? e.message : String(e));
289
+ }
290
+ }
291
+ async handleToolEnd(output, runId, parentRunId) {
292
+ try {
293
+ this.logger.debug(`Tool end with ID: ${runId}`);
294
+ this.handleOtelSpanEnd({
295
+ runId,
296
+ attributes: { output }
297
+ });
298
+ } catch (e) {
299
+ this.logger.debug(e instanceof Error ? e.message : String(e));
300
+ }
301
+ }
302
+ async handleToolError(err, runId, parentRunId) {
303
+ try {
304
+ this.logger.debug(`Tool error ${err} with ID: ${runId}`);
305
+ this.handleOtelSpanEnd({
306
+ runId,
307
+ attributes: {
308
+ level: "ERROR",
309
+ statusMessage: err.toString()
310
+ }
311
+ });
312
+ } catch (e) {
313
+ this.logger.debug(e instanceof Error ? e.message : String(e));
314
+ }
315
+ }
316
+ async handleLLMEnd(output, runId, parentRunId) {
317
+ var _a, _b, _c, _d, _e, _f, _g;
318
+ try {
319
+ this.logger.debug(`LLM end with ID: ${runId}`);
320
+ const lastResponse = output.generations[output.generations.length - 1][output.generations[output.generations.length - 1].length - 1];
321
+ const llmUsage = (_b = this.extractUsageMetadata(lastResponse)) != null ? _b : (_a = output.llmOutput) == null ? void 0 : _a["tokenUsage"];
322
+ const modelName = this.extractModelNameFromMetadata(lastResponse);
323
+ const usageDetails = {
324
+ input: (_c = llmUsage == null ? void 0 : llmUsage.input_tokens) != null ? _c : "promptTokens" in llmUsage ? llmUsage == null ? void 0 : llmUsage.promptTokens : void 0,
325
+ output: (_d = llmUsage == null ? void 0 : llmUsage.output_tokens) != null ? _d : "completionTokens" in llmUsage ? llmUsage == null ? void 0 : llmUsage.completionTokens : void 0,
326
+ total: (_e = llmUsage == null ? void 0 : llmUsage.total_tokens) != null ? _e : "totalTokens" in llmUsage ? llmUsage == null ? void 0 : llmUsage.totalTokens : void 0
327
+ };
328
+ if (llmUsage && "input_token_details" in llmUsage) {
329
+ for (const [key, val] of Object.entries(
330
+ (_f = llmUsage["input_token_details"]) != null ? _f : {}
331
+ )) {
332
+ usageDetails[`input_${key}`] = val;
333
+ if ("input" in usageDetails && typeof val === "number") {
334
+ usageDetails["input"] = Math.max(0, usageDetails["input"] - val);
335
+ }
336
+ }
337
+ }
338
+ if (llmUsage && "output_token_details" in llmUsage) {
339
+ for (const [key, val] of Object.entries(
340
+ (_g = llmUsage["output_token_details"]) != null ? _g : {}
341
+ )) {
342
+ usageDetails[`output_${key}`] = val;
343
+ if ("output" in usageDetails && typeof val === "number") {
344
+ usageDetails["output"] = Math.max(0, usageDetails["output"] - val);
345
+ }
346
+ }
347
+ }
348
+ const extractedOutput = "message" in lastResponse ? this.extractChatMessageContent(
349
+ lastResponse["message"]
350
+ ) : lastResponse.text;
351
+ this.handleOtelSpanEnd({
352
+ runId,
353
+ type: "generation",
354
+ attributes: {
355
+ model: modelName,
356
+ output: extractedOutput,
357
+ completionStartTime: runId in this.completionStartTimes ? this.completionStartTimes[runId] : void 0,
358
+ usageDetails
359
+ }
360
+ });
361
+ if (runId in this.completionStartTimes) {
362
+ delete this.completionStartTimes[runId];
363
+ }
364
+ } catch (e) {
365
+ this.logger.debug(e instanceof Error ? e.message : String(e));
366
+ }
367
+ }
368
+ async handleLLMError(err, runId, parentRunId) {
369
+ try {
370
+ this.logger.debug(`LLM error ${err} with ID: ${runId}`);
371
+ const azureRefusalError = this.parseAzureRefusalError(err);
372
+ this.handleOtelSpanEnd({
373
+ runId,
374
+ attributes: {
375
+ level: "ERROR",
376
+ statusMessage: err.toString() + azureRefusalError
377
+ }
378
+ });
379
+ } catch (e) {
380
+ this.logger.debug(e instanceof Error ? e.message : String(e));
381
+ }
382
+ }
383
+ registerLangfusePrompt(parentRunId, metadata) {
384
+ if (metadata && "langfusePrompt" in metadata && parentRunId) {
385
+ this.promptToParentRunMap.set(
386
+ parentRunId,
387
+ metadata.langfusePrompt
388
+ );
389
+ }
390
+ }
391
+ deregisterLangfusePrompt(runId) {
392
+ this.promptToParentRunMap.delete(runId);
393
+ }
394
+ startAndRegisterOtelSpan(params) {
395
+ var _a, _b;
396
+ const { type, runName, runId, parentRunId, attributes, metadata, tags } = params;
397
+ const observation = type === "generation" ? startObservation(
398
+ runName,
399
+ {
400
+ version: this.version,
401
+ metadata: this.joinTagsAndMetaData(tags, metadata),
402
+ level: tags && tags.includes(LANGSMITH_HIDDEN_TAG) ? "DEBUG" : void 0,
403
+ ...attributes
404
+ },
405
+ {
406
+ asType: "generation",
407
+ parentSpanContext: parentRunId ? (_a = this.runMap.get(parentRunId)) == null ? void 0 : _a.otelSpan.spanContext() : void 0
408
+ }
409
+ ) : startObservation(
410
+ runName,
411
+ {
412
+ version: this.version,
413
+ metadata: this.joinTagsAndMetaData(tags, metadata),
414
+ level: tags && tags.includes(LANGSMITH_HIDDEN_TAG) ? "DEBUG" : void 0,
415
+ ...attributes
416
+ },
417
+ {
418
+ parentSpanContext: parentRunId ? (_b = this.runMap.get(parentRunId)) == null ? void 0 : _b.otelSpan.spanContext() : void 0
419
+ }
420
+ );
421
+ this.runMap.set(runId, observation);
422
+ return observation;
423
+ }
424
+ handleOtelSpanEnd(params) {
425
+ const { runId, attributes = {} } = params;
426
+ const span = this.runMap.get(runId);
427
+ if (!span) {
428
+ this.logger.warn("Span not found in runMap. Skipping operation");
429
+ return;
430
+ }
431
+ span.update(attributes).end();
432
+ this.last_trace_id = span.traceId;
433
+ this.runMap.delete(runId);
434
+ }
435
+ parseAzureRefusalError(err) {
436
+ let azureRefusalError = "";
437
+ if (typeof err == "object" && "error" in err) {
438
+ try {
439
+ azureRefusalError = "\n\nError details:\n" + JSON.stringify(err["error"], null, 2);
440
+ } catch {
441
+ }
442
+ }
443
+ return azureRefusalError;
444
+ }
445
+ joinTagsAndMetaData(tags, metadata1, metadata2) {
446
+ const finalDict = {};
447
+ if (tags && tags.length > 0) {
448
+ finalDict.tags = tags;
449
+ }
450
+ if (metadata1) {
451
+ Object.assign(finalDict, metadata1);
452
+ }
453
+ if (metadata2) {
454
+ Object.assign(finalDict, metadata2);
455
+ }
456
+ return this.stripLangfuseKeysFromMetadata(finalDict);
457
+ }
458
+ stripLangfuseKeysFromMetadata(metadata) {
459
+ if (!metadata) {
460
+ return;
461
+ }
462
+ const langfuseKeys = [
463
+ "langfusePrompt",
464
+ "langfuseUserId",
465
+ "langfuseSessionId"
466
+ ];
467
+ return Object.fromEntries(
468
+ Object.entries(metadata).filter(
469
+ ([key, _]) => !langfuseKeys.includes(key)
470
+ )
471
+ );
472
+ }
473
+ /** Not all models supports tokenUsage in llmOutput, can use AIMessage.usage_metadata instead */
474
+ extractUsageMetadata(generation) {
475
+ try {
476
+ const usageMetadata = "message" in generation && (generation["message"] instanceof AIMessage || generation["message"] instanceof AIMessageChunk) ? generation["message"].usage_metadata : void 0;
477
+ return usageMetadata;
478
+ } catch (err) {
479
+ this.logger.debug(`Error extracting usage metadata: ${err}`);
480
+ return;
481
+ }
482
+ }
483
+ extractModelNameFromMetadata(generation) {
484
+ try {
485
+ return "message" in generation && (generation["message"] instanceof AIMessage || generation["message"] instanceof AIMessageChunk) ? generation["message"].response_metadata.model_name : void 0;
486
+ } catch {
487
+ }
488
+ }
489
+ extractChatMessageContent(message) {
490
+ var _a, _b;
491
+ let response = void 0;
492
+ if (message.getType() === "human") {
493
+ response = { content: message.content, role: "user" };
494
+ } else if (message.getType() === "generic") {
495
+ response = {
496
+ content: message.content,
497
+ role: "human"
498
+ };
499
+ } else if (message.getType() === "ai") {
500
+ response = { content: message.content, role: "assistant" };
501
+ if ("tool_calls" in message && Array.isArray(message.tool_calls) && ((_b = (_a = message.tool_calls) == null ? void 0 : _a.length) != null ? _b : 0) > 0) {
502
+ response["tool_calls"] = message["tool_calls"];
503
+ }
504
+ if ("additional_kwargs" in message && "tool_calls" in message["additional_kwargs"]) {
505
+ response["tool_calls"] = message["additional_kwargs"]["tool_calls"];
506
+ }
507
+ } else if (message.getType() === "system") {
508
+ response = { content: message.content, role: "system" };
509
+ } else if (message.getType() === "function") {
510
+ response = {
511
+ content: message.content,
512
+ additional_kwargs: message.additional_kwargs,
513
+ role: message.name
514
+ };
515
+ } else if (message.getType() === "tool") {
516
+ response = {
517
+ content: message.content,
518
+ additional_kwargs: message.additional_kwargs,
519
+ role: message.name
520
+ };
521
+ } else if (!message.name) {
522
+ response = { content: message.content };
523
+ } else {
524
+ response = {
525
+ role: message.name,
526
+ content: message.content
527
+ };
528
+ }
529
+ if ((message.additional_kwargs.function_call || message.additional_kwargs.tool_calls) && response["tool_calls"] === void 0) {
530
+ return { ...response, additional_kwargs: message.additional_kwargs };
531
+ }
532
+ return response;
533
+ }
534
+ };
535
+ export {
536
+ CallbackHandler
537
+ };
538
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/CallbackHandler.ts"],"sourcesContent":["import { getGlobalLogger } from \"@elasticdash/core\";\nimport {\n startObservation,\n LangfuseGeneration,\n LangfuseSpan,\n LangfuseGenerationAttributes,\n LangfuseSpanAttributes,\n} from \"@elasticdash/tracing\";\nimport type { AgentAction, AgentFinish } from \"@langchain/core/agents\";\nimport { BaseCallbackHandler } from \"@langchain/core/callbacks/base\";\nimport type { Document } from \"@langchain/core/documents\";\nimport type { Serialized } from \"@langchain/core/load/serializable\";\nimport {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n type UsageMetadata,\n type BaseMessageFields,\n type MessageContent,\n} from \"@langchain/core/messages\";\nimport type { Generation, LLMResult } from \"@langchain/core/outputs\";\nimport type { ChainValues } from \"@langchain/core/utils/types\";\n\nconst LANGSMITH_HIDDEN_TAG = \"langsmith:hidden\";\n\ntype LangfusePrompt = {\n name: string;\n version: number;\n isFallback: boolean;\n};\n\nexport type LlmMessage = {\n role: string;\n content: BaseMessageFields[\"content\"];\n additional_kwargs?: BaseMessageFields[\"additional_kwargs\"];\n};\n\nexport type AnonymousLlmMessage = {\n content: BaseMessageFields[\"content\"];\n additional_kwargs?: BaseMessageFields[\"additional_kwargs\"];\n};\n\ntype ConstructorParams = {\n userId?: string;\n sessionId?: string;\n tags?: string[];\n version?: string; // added to all traces and observations\n traceMetadata?: Record<string, unknown>; // added to all traces\n};\n\nexport class CallbackHandler extends BaseCallbackHandler {\n name = \"LangfuseCallbackHandler\";\n\n private userId?: string;\n private version?: string;\n private sessionId?: string;\n private tags: string[];\n private traceMetadata?: Record<string, unknown>;\n\n private completionStartTimes: Record<string, Date> = {};\n private promptToParentRunMap;\n private runMap: Map<string, LangfuseSpan | LangfuseGeneration> = new Map();\n\n public last_trace_id: string | null = null;\n\n constructor(params?: ConstructorParams) {\n super();\n\n this.sessionId = params?.sessionId;\n this.userId = params?.userId;\n this.tags = params?.tags ?? [];\n this.traceMetadata = params?.traceMetadata;\n this.version = params?.version;\n\n this.promptToParentRunMap = new Map<string, LangfusePrompt>();\n }\n\n get logger() {\n return getGlobalLogger();\n }\n\n async handleLLMNewToken(\n token: string,\n _idx: any,\n runId: string,\n _parentRunId?: string,\n _tags?: string[],\n _fields?: any,\n ): Promise<void> {\n // if this is the first token, add it to completionStartTimes\n if (runId && !(runId in this.completionStartTimes)) {\n this.logger.debug(`LLM first streaming token: ${runId}`);\n this.completionStartTimes[runId] = new Date();\n }\n }\n\n async handleChainStart(\n chain: Serialized,\n inputs: ChainValues,\n runId: string,\n parentRunId?: string | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n runType?: string,\n name?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Chain start with Id: ${runId}`);\n\n const runName = name ?? chain.id.at(-1)?.toString() ?? \"Langchain Run\";\n\n this.registerLangfusePrompt(parentRunId, metadata);\n\n // In chains, inputs can be a string or an array of BaseMessage\n let finalInput: string | ChainValues = inputs;\n if (\n typeof inputs === \"object\" &&\n \"input\" in inputs &&\n Array.isArray(inputs[\"input\"]) &&\n inputs[\"input\"].every((m: unknown) => m instanceof BaseMessage)\n ) {\n finalInput = inputs[\"input\"].map((m: BaseMessage) =>\n this.extractChatMessageContent(m),\n );\n } else if (\n typeof inputs === \"object\" &&\n \"messages\" in inputs &&\n Array.isArray(inputs[\"messages\"]) &&\n inputs[\"messages\"].every((m: unknown) => m instanceof BaseMessage)\n ) {\n finalInput = inputs[\"messages\"].map((m: BaseMessage) =>\n this.extractChatMessageContent(m),\n );\n } else if (\n typeof inputs === \"object\" &&\n \"content\" in inputs &&\n typeof inputs[\"content\"] === \"string\"\n ) {\n finalInput = inputs[\"content\"];\n }\n\n const span = this.startAndRegisterOtelSpan({\n runName,\n parentRunId,\n runId,\n tags,\n metadata,\n attributes: {\n input: finalInput,\n },\n });\n\n // If there's no parent run, this is a top-level chain execution\n // and we need to store trace attributes on the span\n const traceTags = [...new Set([...(tags ?? []), ...this.tags])];\n\n if (!parentRunId) {\n span.updateTrace({\n tags: traceTags,\n userId:\n metadata &&\n \"langfuseUserId\" in metadata &&\n typeof metadata[\"langfuseUserId\"] === \"string\"\n ? metadata[\"langfuseUserId\"]\n : this.userId,\n sessionId:\n metadata &&\n \"langfuseSessionId\" in metadata &&\n typeof metadata[\"langfuseSessionId\"] === \"string\"\n ? metadata[\"langfuseSessionId\"]\n : this.sessionId,\n metadata: this.traceMetadata,\n version: this.version,\n });\n }\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleAgentAction(\n action: AgentAction,\n runId: string,\n parentRunId?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Agent action ${action.tool} with ID: ${runId}`);\n this.startAndRegisterOtelSpan({\n runId,\n parentRunId,\n runName: action.tool,\n attributes: {\n input: action,\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleAgentEnd?(\n action: AgentFinish,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Agent finish with ID: ${runId}`);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: { output: action },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleChainError(\n err: any,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Chain error: ${err} with ID: ${runId}`);\n\n const azureRefusalError = this.parseAzureRefusalError(err);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n level: \"ERROR\",\n statusMessage: err.toString() + azureRefusalError,\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleGenerationStart(\n llm: Serialized,\n messages: (LlmMessage | MessageContent | AnonymousLlmMessage)[],\n runId: string,\n parentRunId?: string | undefined,\n extraParams?: Record<string, unknown> | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n name?: string,\n ): Promise<void> {\n this.logger.debug(\n `Generation start with ID: ${runId} and parentRunId ${parentRunId}`,\n );\n\n const runName = name ?? llm.id.at(-1)?.toString() ?? \"Langchain Generation\";\n\n const modelParameters: Record<string, any> = {};\n const invocationParams = extraParams?.[\"invocation_params\"];\n\n for (const [key, value] of Object.entries({\n temperature: (invocationParams as any)?.temperature,\n max_tokens: (invocationParams as any)?.max_tokens,\n top_p: (invocationParams as any)?.top_p,\n frequency_penalty: (invocationParams as any)?.frequency_penalty,\n presence_penalty: (invocationParams as any)?.presence_penalty,\n request_timeout: (invocationParams as any)?.request_timeout,\n })) {\n if (value !== undefined && value !== null) {\n modelParameters[key] = value;\n }\n }\n\n interface InvocationParams {\n _type?: string;\n model?: string;\n model_name?: string;\n repo_id?: string;\n }\n\n let extractedModelName: string | undefined;\n if (extraParams) {\n const invocationParamsModelName = (\n extraParams.invocation_params as InvocationParams\n ).model;\n const metadataModelName =\n metadata && \"ls_model_name\" in metadata\n ? (metadata[\"ls_model_name\"] as string)\n : undefined;\n\n extractedModelName = invocationParamsModelName ?? metadataModelName;\n }\n\n const registeredPrompt = this.promptToParentRunMap.get(\n parentRunId ?? \"root\",\n );\n if (registeredPrompt && parentRunId) {\n this.deregisterLangfusePrompt(parentRunId);\n }\n\n this.startAndRegisterOtelSpan({\n type: \"generation\",\n runId,\n parentRunId,\n metadata,\n tags,\n runName,\n attributes: {\n input: messages,\n model: extractedModelName,\n modelParameters: modelParameters,\n prompt: registeredPrompt,\n },\n });\n }\n\n async handleChatModelStart(\n llm: Serialized,\n messages: BaseMessage[][],\n runId: string,\n parentRunId?: string | undefined,\n extraParams?: Record<string, unknown> | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n name?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Chat model start with ID: ${runId}`);\n\n const prompts = messages.flatMap((message) =>\n message.map((m) => this.extractChatMessageContent(m)),\n );\n\n this.handleGenerationStart(\n llm,\n prompts,\n runId,\n parentRunId,\n extraParams,\n tags,\n metadata,\n name,\n );\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleChainEnd(\n outputs: ChainValues,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Chain end with ID: ${runId}`);\n\n let finalOutput: ChainValues | string = outputs;\n if (\n typeof outputs === \"object\" &&\n \"output\" in outputs &&\n typeof outputs[\"output\"] === \"string\"\n ) {\n finalOutput = outputs[\"output\"];\n } else if (\n typeof outputs === \"object\" &&\n \"messages\" in outputs &&\n Array.isArray(outputs[\"messages\"]) &&\n outputs[\"messages\"].every((m: unknown) => m instanceof BaseMessage)\n ) {\n finalOutput = {\n messages: outputs.messages.map((message: BaseMessage) =>\n this.extractChatMessageContent(message),\n ),\n };\n }\n\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n output: finalOutput,\n },\n });\n this.deregisterLangfusePrompt(runId);\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleLLMStart(\n llm: Serialized,\n prompts: string[],\n runId: string,\n parentRunId?: string | undefined,\n extraParams?: Record<string, unknown> | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n name?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`LLM start with ID: ${runId}`);\n\n this.handleGenerationStart(\n llm,\n prompts,\n runId,\n parentRunId,\n extraParams,\n tags,\n metadata,\n name,\n );\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleToolStart(\n tool: Serialized,\n input: string,\n runId: string,\n parentRunId?: string | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n name?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Tool start with ID: ${runId}`);\n\n this.startAndRegisterOtelSpan({\n runId,\n parentRunId,\n runName: name ?? tool.id.at(-1)?.toString() ?? \"Tool execution\",\n attributes: {\n input,\n },\n metadata,\n tags,\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleRetrieverStart(\n retriever: Serialized,\n query: string,\n runId: string,\n parentRunId?: string | undefined,\n tags?: string[] | undefined,\n metadata?: Record<string, unknown> | undefined,\n name?: string,\n ): Promise<void> {\n try {\n this.logger.debug(`Retriever start with ID: ${runId}`);\n\n this.startAndRegisterOtelSpan({\n runId,\n parentRunId,\n runName: name ?? retriever.id.at(-1)?.toString() ?? \"Retriever\",\n attributes: {\n input: query,\n },\n tags,\n metadata,\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleRetrieverEnd(\n documents: Document<Record<string, any>>[],\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Retriever end with ID: ${runId}`);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n output: documents,\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleRetrieverError(\n err: any,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Retriever error: ${err} with ID: ${runId}`);\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n level: \"ERROR\",\n statusMessage: err.toString(),\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n async handleToolEnd(\n output: string,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Tool end with ID: ${runId}`);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: { output },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleToolError(\n err: any,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`Tool error ${err} with ID: ${runId}`);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n level: \"ERROR\",\n statusMessage: err.toString(),\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleLLMEnd(\n output: LLMResult,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`LLM end with ID: ${runId}`);\n\n const lastResponse =\n output.generations[output.generations.length - 1][\n output.generations[output.generations.length - 1].length - 1\n ];\n const llmUsage =\n this.extractUsageMetadata(lastResponse) ??\n output.llmOutput?.[\"tokenUsage\"];\n const modelName = this.extractModelNameFromMetadata(lastResponse);\n\n const usageDetails: Record<string, any> = {\n input:\n llmUsage?.input_tokens ??\n (\"promptTokens\" in llmUsage ? llmUsage?.promptTokens : undefined),\n output:\n llmUsage?.output_tokens ??\n (\"completionTokens\" in llmUsage\n ? llmUsage?.completionTokens\n : undefined),\n total:\n llmUsage?.total_tokens ??\n (\"totalTokens\" in llmUsage ? llmUsage?.totalTokens : undefined),\n };\n\n if (llmUsage && \"input_token_details\" in llmUsage) {\n for (const [key, val] of Object.entries(\n llmUsage[\"input_token_details\"] ?? {},\n )) {\n usageDetails[`input_${key}`] = val;\n\n if (\"input\" in usageDetails && typeof val === \"number\") {\n usageDetails[\"input\"] = Math.max(0, usageDetails[\"input\"] - val);\n }\n }\n }\n\n if (llmUsage && \"output_token_details\" in llmUsage) {\n for (const [key, val] of Object.entries(\n llmUsage[\"output_token_details\"] ?? {},\n )) {\n usageDetails[`output_${key}`] = val;\n\n if (\"output\" in usageDetails && typeof val === \"number\") {\n usageDetails[\"output\"] = Math.max(0, usageDetails[\"output\"] - val);\n }\n }\n }\n\n const extractedOutput =\n \"message\" in lastResponse\n ? this.extractChatMessageContent(\n lastResponse[\"message\"] as BaseMessage,\n )\n : lastResponse.text;\n\n this.handleOtelSpanEnd({\n runId,\n type: \"generation\",\n attributes: {\n model: modelName,\n output: extractedOutput,\n completionStartTime:\n runId in this.completionStartTimes\n ? this.completionStartTimes[runId]\n : undefined,\n usageDetails: usageDetails,\n },\n });\n\n if (runId in this.completionStartTimes) {\n delete this.completionStartTimes[runId];\n }\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n async handleLLMError(\n err: any,\n runId: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n parentRunId?: string | undefined,\n ): Promise<void> {\n try {\n this.logger.debug(`LLM error ${err} with ID: ${runId}`);\n\n // Azure has the refusal status for harmful messages in the error property\n // This would not be logged as the error message is only a generic message\n // that there has been a refusal\n const azureRefusalError = this.parseAzureRefusalError(err);\n\n this.handleOtelSpanEnd({\n runId,\n attributes: {\n level: \"ERROR\",\n statusMessage: err.toString() + azureRefusalError,\n },\n });\n } catch (e) {\n this.logger.debug(e instanceof Error ? e.message : String(e));\n }\n }\n\n private registerLangfusePrompt(\n parentRunId?: string,\n metadata?: Record<string, unknown>,\n ): void {\n /*\n Register a prompt for linking to a generation with the same parentRunId.\n\n `parentRunId` must exist when we want to do any prompt linking to a generation. If it does not exist, it means the execution is solely a Prompt template formatting without any following LLM invocation, so no generation will be created to link to.\n For the simplest chain, a parent run is always created to wrap the individual runs consisting of prompt template formatting and LLM invocation.\n So, we do not need to register any prompt for linking if parentRunId is missing.\n */\n if (metadata && \"langfusePrompt\" in metadata && parentRunId) {\n this.promptToParentRunMap.set(\n parentRunId,\n metadata.langfusePrompt as LangfusePrompt,\n );\n }\n }\n\n private deregisterLangfusePrompt(runId: string): void {\n this.promptToParentRunMap.delete(runId);\n }\n\n private startAndRegisterOtelSpan(params: {\n type?: \"span\";\n runName: string;\n runId: string;\n parentRunId?: string;\n attributes: LangfuseGenerationAttributes;\n metadata?: Record<string, unknown>;\n tags?: string[];\n }): LangfuseSpan;\n private startAndRegisterOtelSpan(params: {\n type: \"generation\";\n runName: string;\n runId: string;\n parentRunId?: string;\n attributes: LangfuseGenerationAttributes;\n metadata?: Record<string, unknown>;\n tags?: string[];\n }): LangfuseGeneration;\n private startAndRegisterOtelSpan(params: {\n type?: \"span\" | \"generation\";\n runName: string;\n runId: string;\n parentRunId?: string;\n attributes: LangfuseGenerationAttributes;\n metadata?: Record<string, unknown>;\n tags?: string[];\n }): LangfuseSpan | LangfuseGeneration {\n const { type, runName, runId, parentRunId, attributes, metadata, tags } =\n params;\n\n const observation =\n type === \"generation\"\n ? startObservation(\n runName,\n {\n version: this.version,\n metadata: this.joinTagsAndMetaData(tags, metadata),\n level:\n tags && tags.includes(LANGSMITH_HIDDEN_TAG)\n ? \"DEBUG\"\n : undefined,\n ...attributes,\n },\n {\n asType: \"generation\",\n parentSpanContext: parentRunId\n ? this.runMap.get(parentRunId)?.otelSpan.spanContext()\n : undefined,\n },\n )\n : startObservation(\n runName,\n {\n version: this.version,\n metadata: this.joinTagsAndMetaData(tags, metadata),\n level:\n tags && tags.includes(LANGSMITH_HIDDEN_TAG)\n ? \"DEBUG\"\n : undefined,\n ...attributes,\n },\n {\n parentSpanContext: parentRunId\n ? this.runMap.get(parentRunId)?.otelSpan.spanContext()\n : undefined,\n },\n );\n this.runMap.set(runId, observation);\n\n return observation;\n }\n\n private handleOtelSpanEnd(params: {\n runId: string;\n attributes?: LangfuseSpanAttributes;\n type?: \"span\";\n }): void;\n private handleOtelSpanEnd(params: {\n runId: string;\n attributes?: LangfuseGenerationAttributes;\n type: \"generation\";\n }): void;\n private handleOtelSpanEnd(params: {\n runId: string;\n attributes?: LangfuseGenerationAttributes | LangfuseSpanAttributes;\n type?: \"span\" | \"generation\";\n }) {\n const { runId, attributes = {} } = params;\n\n const span = this.runMap.get(runId);\n if (!span) {\n this.logger.warn(\"Span not found in runMap. Skipping operation\");\n\n return;\n }\n\n span.update(attributes).end();\n\n this.last_trace_id = span.traceId;\n this.runMap.delete(runId);\n }\n private parseAzureRefusalError(err: any): string {\n // Azure has the refusal status for harmful messages in the error property\n // This would not be logged as the error message is only a generic message\n // that there has been a refusal\n let azureRefusalError = \"\";\n if (typeof err == \"object\" && \"error\" in err) {\n try {\n azureRefusalError =\n \"\\n\\nError details:\\n\" + JSON.stringify(err[\"error\"], null, 2);\n } catch {}\n }\n\n return azureRefusalError;\n }\n\n private joinTagsAndMetaData(\n tags?: string[] | undefined,\n metadata1?: Record<string, unknown> | undefined,\n metadata2?: Record<string, unknown> | undefined,\n ): Record<string, unknown> | undefined {\n const finalDict: Record<string, unknown> = {};\n if (tags && tags.length > 0) {\n finalDict.tags = tags;\n }\n if (metadata1) {\n Object.assign(finalDict, metadata1);\n }\n if (metadata2) {\n Object.assign(finalDict, metadata2);\n }\n return this.stripLangfuseKeysFromMetadata(finalDict);\n }\n\n private stripLangfuseKeysFromMetadata(\n metadata?: Record<string, unknown>,\n ): Record<string, unknown> | undefined {\n if (!metadata) {\n return;\n }\n\n const langfuseKeys = [\n \"langfusePrompt\",\n \"langfuseUserId\",\n \"langfuseSessionId\",\n ];\n\n return Object.fromEntries(\n Object.entries(metadata).filter(\n ([key, _]) => !langfuseKeys.includes(key),\n ),\n );\n }\n\n /** Not all models supports tokenUsage in llmOutput, can use AIMessage.usage_metadata instead */\n private extractUsageMetadata(\n generation: Generation,\n ): UsageMetadata | undefined {\n try {\n const usageMetadata =\n \"message\" in generation &&\n (generation[\"message\"] instanceof AIMessage ||\n generation[\"message\"] instanceof AIMessageChunk)\n ? generation[\"message\"].usage_metadata\n : undefined;\n\n return usageMetadata;\n } catch (err) {\n this.logger.debug(`Error extracting usage metadata: ${err}`);\n\n return;\n }\n }\n\n private extractModelNameFromMetadata(generation: any): string | undefined {\n try {\n return \"message\" in generation &&\n (generation[\"message\"] instanceof AIMessage ||\n generation[\"message\"] instanceof AIMessageChunk)\n ? generation[\"message\"].response_metadata.model_name\n : undefined;\n } catch {}\n }\n\n private extractChatMessageContent(\n message: BaseMessage,\n ): LlmMessage | AnonymousLlmMessage | MessageContent {\n let response = undefined;\n\n if (message.getType() === \"human\") {\n response = { content: message.content, role: \"user\" };\n } else if (message.getType() === \"generic\") {\n response = {\n content: message.content,\n role: \"human\",\n };\n } else if (message.getType() === \"ai\") {\n response = { content: message.content, role: \"assistant\" };\n\n if (\n \"tool_calls\" in message &&\n Array.isArray(message.tool_calls) &&\n (message.tool_calls?.length ?? 0) > 0\n ) {\n (response as any)[\"tool_calls\"] = message[\"tool_calls\"];\n }\n if (\n \"additional_kwargs\" in message &&\n \"tool_calls\" in message[\"additional_kwargs\"]\n ) {\n (response as any)[\"tool_calls\"] =\n message[\"additional_kwargs\"][\"tool_calls\"];\n }\n } else if (message.getType() === \"system\") {\n response = { content: message.content, role: \"system\" };\n } else if (message.getType() === \"function\") {\n response = {\n content: message.content,\n additional_kwargs: message.additional_kwargs,\n role: message.name,\n };\n } else if (message.getType() === \"tool\") {\n response = {\n content: message.content,\n additional_kwargs: message.additional_kwargs,\n role: message.name,\n };\n } else if (!message.name) {\n response = { content: message.content };\n } else {\n response = {\n role: message.name,\n content: message.content,\n };\n }\n\n if (\n (message.additional_kwargs.function_call ||\n message.additional_kwargs.tool_calls) &&\n (response as any)[\"tool_calls\"] === undefined\n ) {\n return { ...response, additional_kwargs: message.additional_kwargs };\n }\n\n return response;\n }\n}\n"],"mappings":";AAAA,SAAS,uBAAuB;AAChC;AAAA,EACE;AAAA,OAKK;AAEP,SAAS,2BAA2B;AAGpC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAIK;AAIP,IAAM,uBAAuB;AA2BtB,IAAM,kBAAN,cAA8B,oBAAoB;AAAA,EAevD,YAAY,QAA4B;AAjE1C;AAkEI,UAAM;AAfR,gBAAO;AAQP,SAAQ,uBAA6C,CAAC;AAEtD,SAAQ,SAAyD,oBAAI,IAAI;AAEzE,SAAO,gBAA+B;AAKpC,SAAK,YAAY,iCAAQ;AACzB,SAAK,SAAS,iCAAQ;AACtB,SAAK,QAAO,sCAAQ,SAAR,YAAgB,CAAC;AAC7B,SAAK,gBAAgB,iCAAQ;AAC7B,SAAK,UAAU,iCAAQ;AAEvB,SAAK,uBAAuB,oBAAI,IAA4B;AAAA,EAC9D;AAAA,EAEA,IAAI,SAAS;AACX,WAAO,gBAAgB;AAAA,EACzB;AAAA,EAEA,MAAM,kBACJ,OACA,MACA,OACA,cACA,OACA,SACe;AAEf,QAAI,SAAS,EAAE,SAAS,KAAK,uBAAuB;AAClD,WAAK,OAAO,MAAM,8BAA8B,KAAK,EAAE;AACvD,WAAK,qBAAqB,KAAK,IAAI,oBAAI,KAAK;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,MAAM,iBACJ,OACA,QACA,OACA,aACA,MACA,UACA,SACA,MACe;AAzGnB;AA0GI,QAAI;AACF,WAAK,OAAO,MAAM,wBAAwB,KAAK,EAAE;AAEjD,YAAM,WAAU,4BAAQ,WAAM,GAAG,GAAG,EAAE,MAAd,mBAAiB,eAAzB,YAAuC;AAEvD,WAAK,uBAAuB,aAAa,QAAQ;AAGjD,UAAI,aAAmC;AACvC,UACE,OAAO,WAAW,YAClB,WAAW,UACX,MAAM,QAAQ,OAAO,OAAO,CAAC,KAC7B,OAAO,OAAO,EAAE,MAAM,CAAC,MAAe,aAAa,WAAW,GAC9D;AACA,qBAAa,OAAO,OAAO,EAAE;AAAA,UAAI,CAAC,MAChC,KAAK,0BAA0B,CAAC;AAAA,QAClC;AAAA,MACF,WACE,OAAO,WAAW,YAClB,cAAc,UACd,MAAM,QAAQ,OAAO,UAAU,CAAC,KAChC,OAAO,UAAU,EAAE,MAAM,CAAC,MAAe,aAAa,WAAW,GACjE;AACA,qBAAa,OAAO,UAAU,EAAE;AAAA,UAAI,CAAC,MACnC,KAAK,0BAA0B,CAAC;AAAA,QAClC;AAAA,MACF,WACE,OAAO,WAAW,YAClB,aAAa,UACb,OAAO,OAAO,SAAS,MAAM,UAC7B;AACA,qBAAa,OAAO,SAAS;AAAA,MAC/B;AAEA,YAAM,OAAO,KAAK,yBAAyB;AAAA,QACzC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,QACT;AAAA,MACF,CAAC;AAID,YAAM,YAAY,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAI,sBAAQ,CAAC,GAAI,GAAG,KAAK,IAAI,CAAC,CAAC;AAE9D,UAAI,CAAC,aAAa;AAChB,aAAK,YAAY;AAAA,UACf,MAAM;AAAA,UACN,QACE,YACA,oBAAoB,YACpB,OAAO,SAAS,gBAAgB,MAAM,WAClC,SAAS,gBAAgB,IACzB,KAAK;AAAA,UACX,WACE,YACA,uBAAuB,YACvB,OAAO,SAAS,mBAAmB,MAAM,WACrC,SAAS,mBAAmB,IAC5B,KAAK;AAAA,UACX,UAAU,KAAK;AAAA,UACf,SAAS,KAAK;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,kBACJ,QACA,OACA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,gBAAgB,OAAO,IAAI,aAAa,KAAK,EAAE;AACjE,WAAK,yBAAyB;AAAA,QAC5B;AAAA,QACA;AAAA,QACA,SAAS,OAAO;AAAA,QAChB,YAAY;AAAA,UACV,OAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,QACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,yBAAyB,KAAK,EAAE;AAElD,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY,EAAE,QAAQ,OAAO;AAAA,MAC/B,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,iBACJ,KACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,gBAAgB,GAAG,aAAa,KAAK,EAAE;AAEzD,YAAM,oBAAoB,KAAK,uBAAuB,GAAG;AAEzD,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,UACP,eAAe,IAAI,SAAS,IAAI;AAAA,QAClC;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,sBACJ,KACA,UACA,OACA,aACA,aACA,MACA,UACA,MACe;AA1PnB;AA2PI,SAAK,OAAO;AAAA,MACV,6BAA6B,KAAK,oBAAoB,WAAW;AAAA,IACnE;AAEA,UAAM,WAAU,4BAAQ,SAAI,GAAG,GAAG,EAAE,MAAZ,mBAAe,eAAvB,YAAqC;AAErD,UAAM,kBAAuC,CAAC;AAC9C,UAAM,mBAAmB,2CAAc;AAEvC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ;AAAA,MACxC,aAAc,qDAA0B;AAAA,MACxC,YAAa,qDAA0B;AAAA,MACvC,OAAQ,qDAA0B;AAAA,MAClC,mBAAoB,qDAA0B;AAAA,MAC9C,kBAAmB,qDAA0B;AAAA,MAC7C,iBAAkB,qDAA0B;AAAA,IAC9C,CAAC,GAAG;AACF,UAAI,UAAU,UAAa,UAAU,MAAM;AACzC,wBAAgB,GAAG,IAAI;AAAA,MACzB;AAAA,IACF;AASA,QAAI;AACJ,QAAI,aAAa;AACf,YAAM,4BACJ,YAAY,kBACZ;AACF,YAAM,oBACJ,YAAY,mBAAmB,WAC1B,SAAS,eAAe,IACzB;AAEN,2BAAqB,gEAA6B;AAAA,IACpD;AAEA,UAAM,mBAAmB,KAAK,qBAAqB;AAAA,MACjD,oCAAe;AAAA,IACjB;AACA,QAAI,oBAAoB,aAAa;AACnC,WAAK,yBAAyB,WAAW;AAAA,IAC3C;AAEA,SAAK,yBAAyB;AAAA,MAC5B,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA,QACV,OAAO;AAAA,QACP,OAAO;AAAA,QACP;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,qBACJ,KACA,UACA,OACA,aACA,aACA,MACA,UACA,MACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,6BAA6B,KAAK,EAAE;AAEtD,YAAM,UAAU,SAAS;AAAA,QAAQ,CAAC,YAChC,QAAQ,IAAI,CAAC,MAAM,KAAK,0BAA0B,CAAC,CAAC;AAAA,MACtD;AAEA,WAAK;AAAA,QACH;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,SACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,sBAAsB,KAAK,EAAE;AAE/C,UAAI,cAAoC;AACxC,UACE,OAAO,YAAY,YACnB,YAAY,WACZ,OAAO,QAAQ,QAAQ,MAAM,UAC7B;AACA,sBAAc,QAAQ,QAAQ;AAAA,MAChC,WACE,OAAO,YAAY,YACnB,cAAc,WACd,MAAM,QAAQ,QAAQ,UAAU,CAAC,KACjC,QAAQ,UAAU,EAAE,MAAM,CAAC,MAAe,aAAa,WAAW,GAClE;AACA,sBAAc;AAAA,UACZ,UAAU,QAAQ,SAAS;AAAA,YAAI,CAAC,YAC9B,KAAK,0BAA0B,OAAO;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAEA,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,QAAQ;AAAA,QACV;AAAA,MACF,CAAC;AACD,WAAK,yBAAyB,KAAK;AAAA,IACrC,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,KACA,SACA,OACA,aACA,aACA,MACA,UACA,MACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,sBAAsB,KAAK,EAAE;AAE/C,WAAK;AAAA,QACH;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,gBACJ,MACA,OACA,OACA,aACA,MACA,UACA,MACe;AAzanB;AA0aI,QAAI;AACF,WAAK,OAAO,MAAM,uBAAuB,KAAK,EAAE;AAEhD,WAAK,yBAAyB;AAAA,QAC5B;AAAA,QACA;AAAA,QACA,UAAS,4BAAQ,UAAK,GAAG,GAAG,EAAE,MAAb,mBAAgB,eAAxB,YAAsC;AAAA,QAC/C,YAAY;AAAA,UACV;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,qBACJ,WACA,OACA,OACA,aACA,MACA,UACA,MACe;AApcnB;AAqcI,QAAI;AACF,WAAK,OAAO,MAAM,4BAA4B,KAAK,EAAE;AAErD,WAAK,yBAAyB;AAAA,QAC5B;AAAA,QACA;AAAA,QACA,UAAS,4BAAQ,eAAU,GAAG,GAAG,EAAE,MAAlB,mBAAqB,eAA7B,YAA2C;AAAA,QACpD,YAAY;AAAA,UACV,OAAO;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,mBACJ,WACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,0BAA0B,KAAK,EAAE;AAEnD,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,QAAQ;AAAA,QACV;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,qBACJ,KACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,oBAAoB,GAAG,aAAa,KAAK,EAAE;AAC7D,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,UACP,eAAe,IAAI,SAAS;AAAA,QAC9B;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EACA,MAAM,cACJ,QACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,qBAAqB,KAAK,EAAE;AAE9C,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY,EAAE,OAAO;AAAA,MACvB,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,gBACJ,KACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,cAAc,GAAG,aAAa,KAAK,EAAE;AAEvD,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,UACP,eAAe,IAAI,SAAS;AAAA,QAC9B;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,aACJ,QACA,OAEA,aACe;AA1iBnB;AA2iBI,QAAI;AACF,WAAK,OAAO,MAAM,oBAAoB,KAAK,EAAE;AAE7C,YAAM,eACJ,OAAO,YAAY,OAAO,YAAY,SAAS,CAAC,EAC9C,OAAO,YAAY,OAAO,YAAY,SAAS,CAAC,EAAE,SAAS,CAC7D;AACF,YAAM,YACJ,UAAK,qBAAqB,YAAY,MAAtC,aACA,YAAO,cAAP,mBAAmB;AACrB,YAAM,YAAY,KAAK,6BAA6B,YAAY;AAEhE,YAAM,eAAoC;AAAA,QACxC,QACE,0CAAU,iBAAV,YACC,kBAAkB,WAAW,qCAAU,eAAe;AAAA,QACzD,SACE,0CAAU,kBAAV,YACC,sBAAsB,WACnB,qCAAU,mBACV;AAAA,QACN,QACE,0CAAU,iBAAV,YACC,iBAAiB,WAAW,qCAAU,cAAc;AAAA,MACzD;AAEA,UAAI,YAAY,yBAAyB,UAAU;AACjD,mBAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAAA,WAC9B,cAAS,qBAAqB,MAA9B,YAAmC,CAAC;AAAA,QACtC,GAAG;AACD,uBAAa,SAAS,GAAG,EAAE,IAAI;AAE/B,cAAI,WAAW,gBAAgB,OAAO,QAAQ,UAAU;AACtD,yBAAa,OAAO,IAAI,KAAK,IAAI,GAAG,aAAa,OAAO,IAAI,GAAG;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAEA,UAAI,YAAY,0BAA0B,UAAU;AAClD,mBAAW,CAAC,KAAK,GAAG,KAAK,OAAO;AAAA,WAC9B,cAAS,sBAAsB,MAA/B,YAAoC,CAAC;AAAA,QACvC,GAAG;AACD,uBAAa,UAAU,GAAG,EAAE,IAAI;AAEhC,cAAI,YAAY,gBAAgB,OAAO,QAAQ,UAAU;AACvD,yBAAa,QAAQ,IAAI,KAAK,IAAI,GAAG,aAAa,QAAQ,IAAI,GAAG;AAAA,UACnE;AAAA,QACF;AAAA,MACF;AAEA,YAAM,kBACJ,aAAa,eACT,KAAK;AAAA,QACH,aAAa,SAAS;AAAA,MACxB,IACA,aAAa;AAEnB,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,MAAM;AAAA,QACN,YAAY;AAAA,UACV,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,qBACE,SAAS,KAAK,uBACV,KAAK,qBAAqB,KAAK,IAC/B;AAAA,UACN;AAAA,QACF;AAAA,MACF,CAAC;AAED,UAAI,SAAS,KAAK,sBAAsB;AACtC,eAAO,KAAK,qBAAqB,KAAK;AAAA,MACxC;AAAA,IACF,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,KACA,OAEA,aACe;AACf,QAAI;AACF,WAAK,OAAO,MAAM,aAAa,GAAG,aAAa,KAAK,EAAE;AAKtD,YAAM,oBAAoB,KAAK,uBAAuB,GAAG;AAEzD,WAAK,kBAAkB;AAAA,QACrB;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,UACP,eAAe,IAAI,SAAS,IAAI;AAAA,QAClC;AAAA,MACF,CAAC;AAAA,IACH,SAAS,GAAG;AACV,WAAK,OAAO,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA,EAEQ,uBACN,aACA,UACM;AAQN,QAAI,YAAY,oBAAoB,YAAY,aAAa;AAC3D,WAAK,qBAAqB;AAAA,QACxB;AAAA,QACA,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,yBAAyB,OAAqB;AACpD,SAAK,qBAAqB,OAAO,KAAK;AAAA,EACxC;AAAA,EAoBQ,yBAAyB,QAQK;AArsBxC;AAssBI,UAAM,EAAE,MAAM,SAAS,OAAO,aAAa,YAAY,UAAU,KAAK,IACpE;AAEF,UAAM,cACJ,SAAS,eACL;AAAA,MACE;AAAA,MACA;AAAA,QACE,SAAS,KAAK;AAAA,QACd,UAAU,KAAK,oBAAoB,MAAM,QAAQ;AAAA,QACjD,OACE,QAAQ,KAAK,SAAS,oBAAoB,IACtC,UACA;AAAA,QACN,GAAG;AAAA,MACL;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR,mBAAmB,eACf,UAAK,OAAO,IAAI,WAAW,MAA3B,mBAA8B,SAAS,gBACvC;AAAA,MACN;AAAA,IACF,IACA;AAAA,MACE;AAAA,MACA;AAAA,QACE,SAAS,KAAK;AAAA,QACd,UAAU,KAAK,oBAAoB,MAAM,QAAQ;AAAA,QACjD,OACE,QAAQ,KAAK,SAAS,oBAAoB,IACtC,UACA;AAAA,QACN,GAAG;AAAA,MACL;AAAA,MACA;AAAA,QACE,mBAAmB,eACf,UAAK,OAAO,IAAI,WAAW,MAA3B,mBAA8B,SAAS,gBACvC;AAAA,MACN;AAAA,IACF;AACN,SAAK,OAAO,IAAI,OAAO,WAAW;AAElC,WAAO;AAAA,EACT;AAAA,EAYQ,kBAAkB,QAIvB;AACD,UAAM,EAAE,OAAO,aAAa,CAAC,EAAE,IAAI;AAEnC,UAAM,OAAO,KAAK,OAAO,IAAI,KAAK;AAClC,QAAI,CAAC,MAAM;AACT,WAAK,OAAO,KAAK,8CAA8C;AAE/D;AAAA,IACF;AAEA,SAAK,OAAO,UAAU,EAAE,IAAI;AAE5B,SAAK,gBAAgB,KAAK;AAC1B,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA,EACQ,uBAAuB,KAAkB;AAI/C,QAAI,oBAAoB;AACxB,QAAI,OAAO,OAAO,YAAY,WAAW,KAAK;AAC5C,UAAI;AACF,4BACE,yBAAyB,KAAK,UAAU,IAAI,OAAO,GAAG,MAAM,CAAC;AAAA,MACjE,QAAQ;AAAA,MAAC;AAAA,IACX;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,oBACN,MACA,WACA,WACqC;AACrC,UAAM,YAAqC,CAAC;AAC5C,QAAI,QAAQ,KAAK,SAAS,GAAG;AAC3B,gBAAU,OAAO;AAAA,IACnB;AACA,QAAI,WAAW;AACb,aAAO,OAAO,WAAW,SAAS;AAAA,IACpC;AACA,QAAI,WAAW;AACb,aAAO,OAAO,WAAW,SAAS;AAAA,IACpC;AACA,WAAO,KAAK,8BAA8B,SAAS;AAAA,EACrD;AAAA,EAEQ,8BACN,UACqC;AACrC,QAAI,CAAC,UAAU;AACb;AAAA,IACF;AAEA,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,WAAO,OAAO;AAAA,MACZ,OAAO,QAAQ,QAAQ,EAAE;AAAA,QACvB,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,aAAa,SAAS,GAAG;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGQ,qBACN,YAC2B;AAC3B,QAAI;AACF,YAAM,gBACJ,aAAa,eACZ,WAAW,SAAS,aAAa,aAChC,WAAW,SAAS,aAAa,kBAC/B,WAAW,SAAS,EAAE,iBACtB;AAEN,aAAO;AAAA,IACT,SAAS,KAAK;AACZ,WAAK,OAAO,MAAM,oCAAoC,GAAG,EAAE;AAE3D;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,6BAA6B,YAAqC;AACxE,QAAI;AACF,aAAO,aAAa,eACjB,WAAW,SAAS,aAAa,aAChC,WAAW,SAAS,aAAa,kBACjC,WAAW,SAAS,EAAE,kBAAkB,aACxC;AAAA,IACN,QAAQ;AAAA,IAAC;AAAA,EACX;AAAA,EAEQ,0BACN,SACmD;AAr2BvD;AAs2BI,QAAI,WAAW;AAEf,QAAI,QAAQ,QAAQ,MAAM,SAAS;AACjC,iBAAW,EAAE,SAAS,QAAQ,SAAS,MAAM,OAAO;AAAA,IACtD,WAAW,QAAQ,QAAQ,MAAM,WAAW;AAC1C,iBAAW;AAAA,QACT,SAAS,QAAQ;AAAA,QACjB,MAAM;AAAA,MACR;AAAA,IACF,WAAW,QAAQ,QAAQ,MAAM,MAAM;AACrC,iBAAW,EAAE,SAAS,QAAQ,SAAS,MAAM,YAAY;AAEzD,UACE,gBAAgB,WAChB,MAAM,QAAQ,QAAQ,UAAU,OAC/B,mBAAQ,eAAR,mBAAoB,WAApB,YAA8B,KAAK,GACpC;AACA,QAAC,SAAiB,YAAY,IAAI,QAAQ,YAAY;AAAA,MACxD;AACA,UACE,uBAAuB,WACvB,gBAAgB,QAAQ,mBAAmB,GAC3C;AACA,QAAC,SAAiB,YAAY,IAC5B,QAAQ,mBAAmB,EAAE,YAAY;AAAA,MAC7C;AAAA,IACF,WAAW,QAAQ,QAAQ,MAAM,UAAU;AACzC,iBAAW,EAAE,SAAS,QAAQ,SAAS,MAAM,SAAS;AAAA,IACxD,WAAW,QAAQ,QAAQ,MAAM,YAAY;AAC3C,iBAAW;AAAA,QACT,SAAS,QAAQ;AAAA,QACjB,mBAAmB,QAAQ;AAAA,QAC3B,MAAM,QAAQ;AAAA,MAChB;AAAA,IACF,WAAW,QAAQ,QAAQ,MAAM,QAAQ;AACvC,iBAAW;AAAA,QACT,SAAS,QAAQ;AAAA,QACjB,mBAAmB,QAAQ;AAAA,QAC3B,MAAM,QAAQ;AAAA,MAChB;AAAA,IACF,WAAW,CAAC,QAAQ,MAAM;AACxB,iBAAW,EAAE,SAAS,QAAQ,QAAQ;AAAA,IACxC,OAAO;AACL,iBAAW;AAAA,QACT,MAAM,QAAQ;AAAA,QACd,SAAS,QAAQ;AAAA,MACnB;AAAA,IACF;AAEA,SACG,QAAQ,kBAAkB,iBACzB,QAAQ,kBAAkB,eAC3B,SAAiB,YAAY,MAAM,QACpC;AACA,aAAO,EAAE,GAAG,UAAU,mBAAmB,QAAQ,kBAAkB;AAAA,IACrE;AAEA,WAAO;AAAA,EACT;AACF;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elasticdash/langchain",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "Langfuse integration for LangChain",
5
5
  "type": "module",
6
6
  "sideEffects": false,
@@ -14,14 +14,6 @@
14
14
  "require": "./dist/index.cjs"
15
15
  }
16
16
  },
17
- "scripts": {
18
- "build": "tsup",
19
- "test": "vitest run",
20
- "test:watch": "vitest",
21
- "format": "prettier --write \"src/**/*.ts\"",
22
- "format:check": "prettier --check \"src/**/*.ts\"",
23
- "clean": "rm -rf dist"
24
- },
25
17
  "author": "Langfuse",
26
18
  "license": "MIT",
27
19
  "repository": {
@@ -33,11 +25,19 @@
33
25
  "dist"
34
26
  ],
35
27
  "dependencies": {
36
- "@elasticdash/core": "workspace:^",
37
- "@elasticdash/tracing": "workspace:^"
28
+ "@elasticdash/core": "^0.0.2",
29
+ "@elasticdash/tracing": "^0.0.2"
38
30
  },
39
31
  "peerDependencies": {
40
32
  "@langchain/core": ">=0.3.8",
41
33
  "@opentelemetry/api": "^1.9.0"
34
+ },
35
+ "scripts": {
36
+ "build": "tsup",
37
+ "test": "vitest run",
38
+ "test:watch": "vitest",
39
+ "format": "prettier --write \"src/**/*.ts\"",
40
+ "format:check": "prettier --check \"src/**/*.ts\"",
41
+ "clean": "rm -rf dist"
42
42
  }
43
- }
43
+ }