@traccia2/sdk 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,810 @@
1
+ import type { AgentAction, AgentFinish } from "@langchain/core/agents";
2
+ import { BaseCallbackHandler } from "@langchain/core/callbacks/base";
3
+ import type { Document } from "@langchain/core/documents";
4
+ import type { Serialized } from "@langchain/core/load/serializable";
5
+ import {
6
+ AIMessage,
7
+ AIMessageChunk,
8
+ BaseMessage,
9
+ type UsageMetadata,
10
+ type BaseMessageFields,
11
+ type MessageContent,
12
+ } from "@langchain/core/messages";
13
+ import type { Generation, LLMResult } from "@langchain/core/outputs";
14
+ import type { ChainValues } from "@langchain/core/utils/types";
15
+ import { getTracer } from '../auto';
16
+ import { ISpan } from '../types';
17
+
18
+ type LangfusePrompt = {
19
+ name: string;
20
+ version: number;
21
+ isFallback: boolean;
22
+ };
23
+
24
+ export type LlmMessage = {
25
+ role: string;
26
+ content: BaseMessageFields["content"];
27
+ additional_kwargs?: BaseMessageFields["additional_kwargs"];
28
+ };
29
+
30
+ export type AnonymousLlmMessage = {
31
+ content: BaseMessageFields["content"];
32
+ additional_kwargs?: BaseMessageFields["additional_kwargs"];
33
+ };
34
+
35
+ type ConstructorParams = {
36
+ userId?: string;
37
+ sessionId?: string;
38
+ tags?: string[];
39
+ version?: string; // added to all traces and observations
40
+ traceMetadata?: Record<string, unknown>; // added to all traces
41
+ };
42
+
43
+ export class TracciaCallbackHandlerNew extends BaseCallbackHandler {
44
+ name = "TracciaCallbackHandler";
45
+ private tracer = getTracer('langchain');
46
+
47
+ private tags: string[] | undefined = [];
48
+
49
+ private completionStartTimes: Record<string, Date> = {};
50
+ private promptToParentRunMap;
51
+ private runMap: Map<string, ISpan> = new Map();
52
+
53
+ public last_trace_id: string | null = null;
54
+
55
+ constructor(params?: ConstructorParams) {
56
+ super();
57
+
58
+ this.tags = params?.tags ?? [];
59
+
60
+ this.promptToParentRunMap = new Map<string, LangfusePrompt>();
61
+ }
62
+
63
+ async handleLLMNewToken(
64
+ _token: string,
65
+ _idx: any,
66
+ runId: string,
67
+ _parentRunId?: string,
68
+ _tags?: string[],
69
+ _fields?: any,
70
+ ): Promise<void> {
71
+ // if this is the first token, add it to completionStartTimes
72
+ if (runId && !(runId in this.completionStartTimes)) {
73
+ console.debug(`LLM first streaming token: ${runId}`);
74
+ this.completionStartTimes[runId] = new Date();
75
+ }
76
+ }
77
+
78
+ async handleChainStart(
79
+ chain: Serialized,
80
+ inputs: ChainValues,
81
+ runId: string,
82
+ parentRunId?: string | undefined,
83
+ tags?: string[] | undefined,
84
+ metadata?: Record<string, unknown> | undefined,
85
+ _runType?: string,
86
+ name?: string,
87
+ ): Promise<void> {
88
+ try {
89
+ console.debug(`Chain start with Id: ${runId}`);
90
+ this.tags;
91
+
92
+ const runName = name ?? chain.id.at(-1)?.toString() ?? "Langchain Run";
93
+
94
+ this.registerLangfusePrompt(parentRunId, metadata);
95
+
96
+ // In chains, inputs can be a string or an array of BaseMessage
97
+ let finalInput: string | ChainValues = inputs;
98
+ if (
99
+ typeof inputs === "object" &&
100
+ "input" in inputs &&
101
+ Array.isArray(inputs["input"]) &&
102
+ inputs["input"].every((m: unknown) => m instanceof BaseMessage)
103
+ ) {
104
+ finalInput = inputs["input"].map((m: BaseMessage) =>
105
+ this.extractChatMessageContent(m),
106
+ );
107
+ } else if (
108
+ typeof inputs === "object" &&
109
+ "messages" in inputs &&
110
+ Array.isArray(inputs["messages"]) &&
111
+ inputs["messages"].every((m: unknown) => m instanceof BaseMessage)
112
+ ) {
113
+ finalInput = inputs["messages"].map((m: BaseMessage) =>
114
+ this.extractChatMessageContent(m),
115
+ );
116
+ } else if (
117
+ typeof inputs === "object" &&
118
+ "content" in inputs &&
119
+ typeof inputs["content"] === "string"
120
+ ) {
121
+ finalInput = inputs["content"];
122
+ }
123
+
124
+ this.startAndRegisterSpan({
125
+ runName,
126
+ parentRunId,
127
+ runId,
128
+ tags,
129
+ metadata,
130
+ attributes: {
131
+ input: finalInput,
132
+ },
133
+ });
134
+
135
+ if (!parentRunId) {
136
+ // Here we would update the trace, but traccia sdk doesn't support it yet
137
+ }
138
+ } catch (e) {
139
+ console.debug(e instanceof Error ? e.message : String(e));
140
+ }
141
+ }
142
+
143
+ async handleAgentAction(
144
+ action: AgentAction,
145
+ runId: string,
146
+ parentRunId?: string,
147
+ ): Promise<void> {
148
+ try {
149
+ console.debug(`Agent action ${action.tool} with ID: ${runId}`);
150
+ this.startAndRegisterSpan({
151
+ runId,
152
+ parentRunId,
153
+ runName: action.tool,
154
+ attributes: {
155
+ input: action,
156
+ },
157
+ });
158
+ } catch (e) {
159
+ console.debug(e instanceof Error ? e.message : String(e));
160
+ }
161
+ }
162
+
163
+ async handleAgentEnd?(
164
+ action: AgentFinish,
165
+ runId: string,
166
+ _parentRunId?: string,
167
+ ): Promise<void> {
168
+ try {
169
+ console.debug(`Agent finish with ID: ${runId}`);
170
+
171
+ this.handleSpanEnd({
172
+ runId,
173
+ attributes: { output: action },
174
+ });
175
+ } catch (e) {
176
+ console.debug(e instanceof Error ? e.message : String(e));
177
+ }
178
+ }
179
+
180
+ async handleChainError(
181
+ err: any,
182
+ runId: string,
183
+ _parentRunId?: string | undefined,
184
+ ): Promise<void> {
185
+ try {
186
+ console.debug(`Chain error: ${err} with ID: ${runId}`);
187
+
188
+ const azureRefusalError = this.parseAzureRefusalError(err);
189
+
190
+ this.handleSpanEnd({
191
+ runId,
192
+ attributes: {
193
+ level: "ERROR",
194
+ statusMessage: err.toString() + azureRefusalError,
195
+ },
196
+ });
197
+ } catch (e) {
198
+ console.debug(e instanceof Error ? e.message : String(e));
199
+ }
200
+ }
201
+
202
+ async handleGenerationStart(
203
+ llm: Serialized,
204
+ messages: (LlmMessage | MessageContent | AnonymousLlmMessage)[],
205
+ runId: string,
206
+ parentRunId?: string | undefined,
207
+ extraParams?: Record<string, unknown> | undefined,
208
+ tags?: string[] | undefined,
209
+ metadata?: Record<string, unknown> | undefined,
210
+ name?: string,
211
+ ): Promise<void> {
212
+ console.debug(
213
+ `Generation start with ID: ${runId} and parentRunId ${parentRunId}`,
214
+ );
215
+
216
+ const runName = name ?? llm.id.at(-1)?.toString() ?? "Langchain Generation";
217
+
218
+ const modelParameters: Record<string, any> = {};
219
+ const invocationParams = extraParams?.["invocation_params"];
220
+
221
+ for (const [key, value] of Object.entries({
222
+ temperature: (invocationParams as any)?.temperature,
223
+ max_tokens: (invocationParams as any)?.max_tokens,
224
+ top_p: (invocationParams as any)?.top_p,
225
+ frequency_penalty: (invocationParams as any)?.frequency_penalty,
226
+ presence_penalty: (invocationParams as any)?.presence_penalty,
227
+ request_timeout: (invocationParams as any)?.request_timeout,
228
+ })) {
229
+ if (value !== undefined && value !== null) {
230
+ modelParameters[key] = value;
231
+ }
232
+ }
233
+
234
+ interface InvocationParams {
235
+ _type?: string;
236
+ model?: string;
237
+ model_name?: string;
238
+ repo_id?: string;
239
+ }
240
+
241
+ let extractedModelName: string | undefined;
242
+ if (extraParams) {
243
+ const invocationParamsModelName = (
244
+ extraParams.invocation_params as InvocationParams
245
+ ).model;
246
+ const metadataModelName =
247
+ metadata && "ls_model_name" in metadata
248
+ ? (metadata["ls_model_name"] as string)
249
+ : undefined;
250
+
251
+ extractedModelName = invocationParamsModelName ?? metadataModelName;
252
+ }
253
+
254
+ const registeredPrompt = this.promptToParentRunMap.get(
255
+ parentRunId ?? "root",
256
+ );
257
+ if (registeredPrompt && parentRunId) {
258
+ this.deregisterLangfusePrompt(parentRunId);
259
+ }
260
+
261
+ this.startAndRegisterSpan({
262
+ runId,
263
+ parentRunId,
264
+ metadata,
265
+ tags,
266
+ runName,
267
+ attributes: {
268
+ input: messages,
269
+ model: extractedModelName,
270
+ modelParameters: modelParameters,
271
+ prompt: registeredPrompt,
272
+ },
273
+ });
274
+ }
275
+
276
+ async handleChatModelStart(
277
+ llm: Serialized,
278
+ messages: BaseMessage[][],
279
+ runId: string,
280
+ parentRunId?: string | undefined,
281
+ extraParams?: Record<string, unknown> | undefined,
282
+ tags?: string[] | undefined,
283
+ metadata?: Record<string, unknown> | undefined,
284
+ name?: string,
285
+ ): Promise<void> {
286
+ try {
287
+ console.debug(`Chat model start with ID: ${runId}`);
288
+
289
+ const prompts = messages.flatMap((message) =>
290
+ message.map((m) => this.extractChatMessageContent(m)),
291
+ );
292
+
293
+ this.handleGenerationStart(
294
+ llm,
295
+ prompts,
296
+ runId,
297
+ parentRunId,
298
+ extraParams,
299
+ tags,
300
+ metadata,
301
+ name,
302
+ );
303
+ } catch (e) {
304
+ console.debug(e instanceof Error ? e.message : String(e));
305
+ }
306
+ }
307
+
308
+ async handleChainEnd(
309
+ outputs: ChainValues,
310
+ runId: string,
311
+ _parentRunId?: string | undefined,
312
+ ): Promise<void> {
313
+ try {
314
+ console.debug(`Chain end with ID: ${runId}`);
315
+
316
+ let finalOutput: ChainValues | string = outputs;
317
+ if (
318
+ typeof outputs === "object" &&
319
+ "output" in outputs &&
320
+ typeof outputs["output"] === "string"
321
+ ) {
322
+ finalOutput = outputs["output"];
323
+ } else if (
324
+ typeof outputs === "object" &&
325
+ "messages" in outputs &&
326
+ Array.isArray(outputs["messages"]) &&
327
+ outputs["messages"].every((m: unknown) => m instanceof BaseMessage)
328
+ ) {
329
+ finalOutput = {
330
+ messages: outputs.messages.map((message: BaseMessage) =>
331
+ this.extractChatMessageContent(message),
332
+ ),
333
+ };
334
+ }
335
+
336
+ this.handleSpanEnd({
337
+ runId,
338
+ attributes: {
339
+ output: finalOutput,
340
+ },
341
+ });
342
+ this.deregisterLangfusePrompt(runId);
343
+ } catch (e) {
344
+ console.debug(e instanceof Error ? e.message : String(e));
345
+ }
346
+ }
347
+
348
+ async handleLLMStart(
349
+ llm: Serialized,
350
+ prompts: string[],
351
+ runId: string,
352
+ parentRunId?: string | undefined,
353
+ extraParams?: Record<string, unknown> | undefined,
354
+ tags?: string[] | undefined,
355
+ metadata?: Record<string, unknown> | undefined,
356
+ name?: string,
357
+ ): Promise<void> {
358
+ try {
359
+ console.debug(`LLM start with ID: ${runId}`);
360
+
361
+ this.handleGenerationStart(
362
+ llm,
363
+ prompts,
364
+ runId,
365
+ parentRunId,
366
+ extraParams,
367
+ tags,
368
+ metadata,
369
+ name,
370
+ );
371
+ } catch (e) {
372
+ console.debug(e instanceof Error ? e.message : String(e));
373
+ }
374
+ }
375
+
376
+ async handleToolStart(
377
+ tool: Serialized,
378
+ input: string,
379
+ runId: string,
380
+ parentRunId?: string | undefined,
381
+ tags?: string[] | undefined,
382
+ metadata?: Record<string, unknown> | undefined,
383
+ name?: string,
384
+ ): Promise<void> {
385
+ try {
386
+ console.debug(`Tool start with ID: ${runId}`);
387
+
388
+ this.startAndRegisterSpan({
389
+ runId,
390
+ parentRunId,
391
+ runName: name ?? tool.id.at(-1)?.toString() ?? "Tool execution",
392
+ attributes: {
393
+ input,
394
+ },
395
+ metadata,
396
+ tags,
397
+ });
398
+ } catch (e) {
399
+ console.debug(e instanceof Error ? e.message : String(e));
400
+ }
401
+ }
402
+
403
+ async handleRetrieverStart(
404
+ retriever: Serialized,
405
+ query: string,
406
+ runId: string,
407
+ parentRunId?: string | undefined,
408
+ tags?: string[] | undefined,
409
+ metadata?: Record<string, unknown> | undefined,
410
+ name?: string,
411
+ ): Promise<void> {
412
+ try {
413
+ console.debug(`Retriever start with ID: ${runId}`);
414
+
415
+ this.startAndRegisterSpan({
416
+ runId,
417
+ parentRunId,
418
+ runName: name ?? retriever.id.at(-1)?.toString() ?? "Retriever",
419
+ attributes: {
420
+ input: query,
421
+ },
422
+ tags,
423
+ metadata,
424
+ });
425
+ } catch (e) {
426
+ console.debug(e instanceof Error ? e.message : String(e));
427
+ }
428
+ }
429
+
430
+ async handleRetrieverEnd(
431
+ documents: Document<Record<string, any>>[],
432
+ runId: string,
433
+ _parentRunId?: string | undefined,
434
+ ): Promise<void> {
435
+ try {
436
+ console.debug(`Retriever end with ID: ${runId}`);
437
+
438
+ this.handleSpanEnd({
439
+ runId,
440
+ attributes: {
441
+ output: documents,
442
+ },
443
+ });
444
+ } catch (e) {
445
+ console.debug(e instanceof Error ? e.message : String(e));
446
+ }
447
+ }
448
+
449
+ async handleRetrieverError(
450
+ err: any,
451
+ runId: string,
452
+ _parentRunId?: string | undefined,
453
+ ): Promise<void> {
454
+ try {
455
+ console.debug(`Retriever error: ${err} with ID: ${runId}`);
456
+ this.handleSpanEnd({
457
+ runId,
458
+ attributes: {
459
+ level: "ERROR",
460
+ statusMessage: err.toString(),
461
+ },
462
+ });
463
+ } catch (e) {
464
+ console.debug(e instanceof Error ? e.message : String(e));
465
+ }
466
+ }
467
+ async handleToolEnd(
468
+ output: string,
469
+ runId: string,
470
+ _parentRunId?: string | undefined,
471
+ ): Promise<void> {
472
+ try {
473
+ console.debug(`Tool end with ID: ${runId}`);
474
+
475
+ this.handleSpanEnd({
476
+ runId,
477
+ attributes: { output },
478
+ });
479
+ } catch (e) {
480
+ console.debug(e instanceof Error ? e.message : String(e));
481
+ }
482
+ }
483
+
484
+ async handleToolError(
485
+ err: any,
486
+ runId: string,
487
+ _parentRunId?: string | undefined,
488
+ ): Promise<void> {
489
+ try {
490
+ console.debug(`Tool error ${err} with ID: ${runId}`);
491
+
492
+ this.handleSpanEnd({
493
+ runId,
494
+ attributes: {
495
+ level: "ERROR",
496
+ statusMessage: err.toString(),
497
+ },
498
+ });
499
+ } catch (e) {
500
+ console.debug(e instanceof Error ? e.message : String(e));
501
+ }
502
+ }
503
+
504
+ async handleLLMEnd(
505
+ output: LLMResult,
506
+ runId: string,
507
+ _parentRunId?: string | undefined,
508
+ ): Promise<void> {
509
+ try {
510
+ console.debug(`LLM end with ID: ${runId}`);
511
+
512
+ const lastResponse =
513
+ output.generations[output.generations.length - 1][
514
+ output.generations[output.generations.length - 1].length - 1
515
+ ];
516
+ const llmUsage =
517
+ this.extractUsageMetadata(lastResponse) ??
518
+ output.llmOutput?.["tokenUsage"];
519
+ const modelName = this.extractModelNameFromMetadata(lastResponse);
520
+
521
+ const usageDetails: Record<string, any> = {
522
+ input:
523
+ llmUsage?.input_tokens ??
524
+ ("promptTokens" in llmUsage ? llmUsage?.promptTokens : undefined),
525
+ output:
526
+ llmUsage?.output_tokens ??
527
+ ("completionTokens" in llmUsage
528
+ ? llmUsage?.completionTokens
529
+ : undefined),
530
+ total:
531
+ llmUsage?.total_tokens ??
532
+ ("totalTokens" in llmUsage ? llmUsage?.totalTokens : undefined),
533
+ };
534
+
535
+ if (llmUsage && "input_token_details" in llmUsage) {
536
+ for (const [key, val] of Object.entries(
537
+ llmUsage["input_token_details"] ?? {},
538
+ )) {
539
+ usageDetails[`input_${key}`] = val;
540
+
541
+ if ("input" in usageDetails && typeof val === "number") {
542
+ usageDetails["input"] = Math.max(0, usageDetails["input"] - val);
543
+ }
544
+ }
545
+ }
546
+
547
+ if (llmUsage && "output_token_details" in llmUsage) {
548
+ for (const [key, val] of Object.entries(
549
+ llmUsage["output_token_details"] ?? {},
550
+ )) {
551
+ usageDetails[`output_${key}`] = val;
552
+
553
+ if ("output" in usageDetails && typeof val === "number") {
554
+ usageDetails["output"] = Math.max(0, usageDetails["output"] - val);
555
+ }
556
+ }
557
+ }
558
+
559
+ const extractedOutput =
560
+ "message" in lastResponse
561
+ ? this.extractChatMessageContent(
562
+ lastResponse["message"] as BaseMessage,
563
+ )
564
+ : lastResponse.text;
565
+
566
+ this.handleSpanEnd({
567
+ runId,
568
+ attributes: {
569
+ model: modelName,
570
+ output: extractedOutput,
571
+ completionStartTime:
572
+ runId in this.completionStartTimes
573
+ ? this.completionStartTimes[runId]
574
+ : undefined,
575
+ usageDetails: usageDetails,
576
+ },
577
+ });
578
+
579
+ if (runId in this.completionStartTimes) {
580
+ delete this.completionStartTimes[runId];
581
+ }
582
+ } catch (e) {
583
+ console.debug(e instanceof Error ? e.message : String(e));
584
+ }
585
+ }
586
+
587
+ async handleLLMError(
588
+ err: any,
589
+ runId: string,
590
+ _parentRunId?: string | undefined,
591
+ ): Promise<void> {
592
+ try {
593
+ console.debug(`LLM error ${err} with ID: ${runId}`);
594
+
595
+ const azureRefusalError = this.parseAzureRefusalError(err);
596
+
597
+ this.handleSpanEnd({
598
+ runId,
599
+ attributes: {
600
+ level: "ERROR",
601
+ statusMessage: err.toString() + azureRefusalError,
602
+ },
603
+ });
604
+ } catch (e) {
605
+ console.debug(e instanceof Error ? e.message : String(e));
606
+ }
607
+ }
608
+
609
+ private registerLangfusePrompt(
610
+ parentRunId?: string,
611
+ metadata?: Record<string, unknown>,
612
+ ): void {
613
+ if (metadata && "langfusePrompt" in metadata && parentRunId) {
614
+ this.promptToParentRunMap.set(
615
+ parentRunId,
616
+ metadata.langfusePrompt as LangfusePrompt,
617
+ );
618
+ }
619
+ }
620
+
621
+ private deregisterLangfusePrompt(runId: string): void {
622
+ this.promptToParentRunMap.delete(runId);
623
+ }
624
+
625
+ private startAndRegisterSpan(params: {
626
+ runName: string;
627
+ runId: string;
628
+ parentRunId?: string;
629
+ attributes: Record<string, any>;
630
+ metadata?: Record<string, unknown>;
631
+ tags?: string[];
632
+ }): ISpan {
633
+ const { runName, runId, attributes, metadata, tags } =
634
+ params;
635
+
636
+ const span = this.tracer.startSpan(runName, {
637
+ attributes: {
638
+ ...attributes,
639
+ ...this.joinTagsAndMetaData(tags, metadata)
640
+ }
641
+ });
642
+ this.runMap.set(runId, span);
643
+
644
+ return span;
645
+ }
646
+
647
+ private handleSpanEnd(params: {
648
+ runId: string;
649
+ attributes?: Record<string, any>;
650
+ }) {
651
+ const { runId, attributes = {} } = params;
652
+
653
+ const span = this.runMap.get(runId);
654
+ if (!span) {
655
+ console.warn("Span not found in runMap. Skipping operation");
656
+
657
+ return;
658
+ }
659
+
660
+ for (const [key, value] of Object.entries(attributes)) {
661
+ span.setAttribute(key, value);
662
+ }
663
+ span.end();
664
+
665
+ this.runMap.delete(runId);
666
+ }
667
+ private parseAzureRefusalError(err: any): string {
668
+ let azureRefusalError = "";
669
+ if (typeof err == "object" && "error" in err) {
670
+ try {
671
+ azureRefusalError =
672
+ "\n\nError details:\n" + JSON.stringify(err["error"], null, 2);
673
+ } catch {}
674
+ }
675
+
676
+ return azureRefusalError;
677
+ }
678
+
679
+ private joinTagsAndMetaData(
680
+ tags?: string[] | undefined,
681
+ metadata1?: Record<string, unknown> | undefined,
682
+ metadata2?: Record<string, unknown> | undefined,
683
+ ): Record<string, unknown> | undefined {
684
+ const finalDict: Record<string, unknown> = {};
685
+ if (tags && tags.length > 0) {
686
+ finalDict.tags = tags;
687
+ }
688
+ if (metadata1) {
689
+ Object.assign(finalDict, metadata1);
690
+ }
691
+ if (metadata2) {
692
+ Object.assign(finalDict, metadata2);
693
+ }
694
+ return this.stripLangfuseKeysFromMetadata(finalDict);
695
+ }
696
+
697
+ private stripLangfuseKeysFromMetadata(
698
+ metadata?: Record<string, unknown>,
699
+ ): Record<string, unknown> | undefined {
700
+ if (!metadata) {
701
+ return;
702
+ }
703
+
704
+ const langfuseKeys = [
705
+ "langfusePrompt",
706
+ "langfuseUserId",
707
+ "langfuseSessionId",
708
+ ];
709
+
710
+ return Object.fromEntries(
711
+ Object.entries(metadata).filter(
712
+ ([key, _]) => !langfuseKeys.includes(key),
713
+ ),
714
+ );
715
+ }
716
+
717
+ private extractUsageMetadata(
718
+ generation: Generation,
719
+ ): UsageMetadata | undefined {
720
+ try {
721
+ const usageMetadata =
722
+ "message" in generation &&
723
+ (generation["message"] instanceof AIMessage ||
724
+ generation["message"] instanceof AIMessageChunk)
725
+ ? generation["message"].usage_metadata
726
+ : undefined;
727
+
728
+ return usageMetadata;
729
+ } catch (err) {
730
+ console.debug(`Error extracting usage metadata: ${err}`);
731
+
732
+ return;
733
+ }
734
+ }
735
+
736
+ private extractModelNameFromMetadata(generation: any): string | undefined {
737
+ try {
738
+ return "message" in generation &&
739
+ (generation["message"] instanceof AIMessage ||
740
+ generation["message"] instanceof AIMessageChunk)
741
+ ? generation["message"].response_metadata.model_name
742
+ : undefined;
743
+ } catch {
744
+ return undefined;
745
+ }
746
+ }
747
+
748
+ private extractChatMessageContent(
749
+ message: BaseMessage,
750
+ ): LlmMessage | AnonymousLlmMessage | MessageContent {
751
+ let response = undefined;
752
+
753
+ if (message.getType() === "human") {
754
+ response = { content: message.content, role: "user" };
755
+ } else if (message.getType() === "generic") {
756
+ response = {
757
+ content: message.content,
758
+ role: "human",
759
+ };
760
+ } else if (message.getType() === "ai") {
761
+ response = { content: message.content, role: "assistant" };
762
+
763
+ if (
764
+ "tool_calls" in message &&
765
+ Array.isArray(message.tool_calls) &&
766
+ (message.tool_calls?.length ?? 0) > 0
767
+ ) {
768
+ (response as any)[`tool_calls`] = message[`tool_calls`];
769
+ }
770
+ if (
771
+ "additional_kwargs" in message &&
772
+ "tool_calls" in message["additional_kwargs"]
773
+ ) {
774
+ (response as any)[`tool_calls`] =
775
+ message["additional_kwargs"]["tool_calls"];
776
+ }
777
+ } else if (message.getType() === "system") {
778
+ response = { content: message.content, role: "system" };
779
+ } else if (message.getType() === "function") {
780
+ response = {
781
+ content: message.content,
782
+ additional_kwargs: message.additional_kwargs,
783
+ role: message.name,
784
+ };
785
+ } else if (message.getType() === "tool") {
786
+ response = {
787
+ content: message.content,
788
+ additional_kwargs: message.additional_kwargs,
789
+ role: message.name,
790
+ };
791
+ } else if (!message.name) {
792
+ response = { content: message.content };
793
+ } else {
794
+ response = {
795
+ role: message.name,
796
+ content: message.content,
797
+ };
798
+ }
799
+
800
+ if (
801
+ (message.additional_kwargs.function_call ||
802
+ message.additional_kwargs.tool_calls) &&
803
+ (response as any)[`tool_calls`] === undefined
804
+ ) {
805
+ return { ...response, additional_kwargs: message.additional_kwargs };
806
+ }
807
+
808
+ return response;
809
+ }
810
+ }