@cloudbase/agent-observability 1.0.1-alpha.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,893 @@
1
+ /**
2
+ * LangChain Callback Handler for AG-Kit Observability
3
+ *
4
+ * Converts LangChain callback events into AG-Kit observations with OpenInference semantics.
5
+ */
6
+
7
+ import type { AgentAction, AgentFinish } from "@langchain/core/agents";
8
+ import { BaseCallbackHandler } from "@langchain/core/callbacks/base";
9
+ import type { Document } from "@langchain/core/documents";
10
+ import type { Serialized } from "@langchain/core/load/serializable";
11
+ import {
12
+ AIMessage,
13
+ AIMessageChunk,
14
+ BaseMessage,
15
+ type UsageMetadata,
16
+ type BaseMessageFields,
17
+ type MessageContent,
18
+ } from "@langchain/core/messages";
19
+ import type { Generation, LLMResult } from "@langchain/core/outputs";
20
+ import type { ChainValues } from "@langchain/core/utils/types";
21
+
22
+ import {
23
+ startObservation,
24
+ type ObservationLLM,
25
+ type ObservationSpan,
26
+ type ObservationTool,
27
+ type Observation,
28
+ type ObservationAttributes,
29
+ } from "../index.js";
30
+ import type { SpanContext } from "@opentelemetry/api";
31
+
32
+ /**
33
+ * Constructor parameters for CallbackHandler.
34
+ *
35
+ * @public
36
+ */
37
+ type ConstructorParams = {
38
+ userId?: string;
39
+ sessionId?: string;
40
+ tags?: string[];
41
+ version?: string;
42
+ traceMetadata?: Record<string, unknown>;
43
+ adapterName?: string; // e.g., "LangGraph" or "LangChain"
44
+ };
45
+
46
+ /**
47
+ * Message format for LLM input/output.
48
+ *
49
+ * @public
50
+ */
51
+ export type LlmMessage = {
52
+ role: string;
53
+ content: BaseMessageFields["content"];
54
+ additional_kwargs?: BaseMessageFields["additional_kwargs"];
55
+ };
56
+
57
+ /**
58
+ * Anonymous message format (without role).
59
+ *
60
+ * @public
61
+ */
62
+ export type AnonymousLlmMessage = {
63
+ content: BaseMessageFields["content"];
64
+ additional_kwargs?: BaseMessageFields["additional_kwargs"];
65
+ };
66
+
67
+ /**
68
+ * Prompt information for linking to generations.
69
+ *
70
+ * @public
71
+ */
72
+ type PromptInfo = {
73
+ name: string;
74
+ version: number;
75
+ isFallback: boolean;
76
+ };
77
+
78
+ /**
79
+ * LangChain Callback Handler for AG-Kit Observability.
80
+ *
81
+ * This handler intercepts LangChain callbacks and converts them into
82
+ * AG-Kit observations following OpenInference semantic conventions.
83
+ *
84
+ * @public
85
+ */
86
+ export class CallbackHandler extends BaseCallbackHandler {
87
+ name = "ObservabilityCallbackHandler";
88
+
89
+ private userId?: string;
90
+ private version?: string;
91
+ private sessionId?: string;
92
+ private tags: string[];
93
+ private traceMetadata?: Record<string, unknown>;
94
+
95
+ private completionStartTimes: Record<string, Date> = {};
96
+ private promptToParentRunMap;
97
+ private runMap: Map<string, Observation> = new Map();
98
+
99
+ public last_trace_id: string | null = null;
100
+
101
+ // External parent context from AG-UI.Server span
102
+ private externalParentSpanContext?: SpanContext;
103
+
104
+ // Adapter name for ROOT span prefix
105
+ private adapterName?: string;
106
+
107
+ constructor(params?: ConstructorParams) {
108
+ super();
109
+
110
+ this.sessionId = params?.sessionId;
111
+ this.userId = params?.userId;
112
+ this.tags = params?.tags ?? [];
113
+ this.traceMetadata = params?.traceMetadata;
114
+ this.version = params?.version;
115
+ this.adapterName = params?.adapterName;
116
+
117
+ this.promptToParentRunMap = new Map<string, PromptInfo>();
118
+ }
119
+
120
+ get logger() {
121
+ return console;
122
+ }
123
+
124
+ /**
125
+ * Set external parent SpanContext from AG-UI.Server span.
126
+ * This allows the CallbackHandler to link LangChain/LangGraph spans
127
+ * to the server-level span, creating a unified trace hierarchy.
128
+ *
129
+ * @param spanContext - SpanContext from the AG-UI.Server span
130
+ * @public
131
+ */
132
+ setExternalParentContext(spanContext: SpanContext): void {
133
+ this.externalParentSpanContext = spanContext;
134
+ }
135
+
136
+ async handleLLMNewToken(
137
+ token: string,
138
+ _idx: any,
139
+ runId: string,
140
+ _parentRunId?: string,
141
+ _tags?: string[],
142
+ _fields?: any
143
+ ): Promise<void> {
144
+ if (runId && !(runId in this.completionStartTimes)) {
145
+ this.logger.debug(`LLM first streaming token: ${runId}`);
146
+ this.completionStartTimes[runId] = new Date();
147
+ }
148
+ }
149
+
150
+ async handleChainStart(
151
+ chain: Serialized,
152
+ inputs: ChainValues,
153
+ runId: string,
154
+ parentRunId?: string | undefined,
155
+ tags?: string[] | undefined,
156
+ metadata?: Record<string, unknown> | undefined,
157
+ runType?: string,
158
+ name?: string
159
+ ): Promise<void> {
160
+ try {
161
+ this.logger.debug(`Chain start with Id: ${runId}`);
162
+
163
+ const runName = name ?? chain.id.at(-1)?.toString() ?? "Langchain Run";
164
+
165
+ this.registerPromptInfo(parentRunId, metadata);
166
+
167
+ let finalInput: string | ChainValues = inputs;
168
+ if (
169
+ typeof inputs === "object" &&
170
+ "input" in inputs &&
171
+ Array.isArray(inputs["input"]) &&
172
+ inputs["input"].every((m: unknown) => m instanceof BaseMessage)
173
+ ) {
174
+ finalInput = inputs["input"].map((m: BaseMessage) =>
175
+ this.extractChatMessageContent(m)
176
+ );
177
+ } else if (
178
+ typeof inputs === "object" &&
179
+ "messages" in inputs &&
180
+ Array.isArray(inputs["messages"]) &&
181
+ inputs["messages"].every((m: unknown) => m instanceof BaseMessage)
182
+ ) {
183
+ finalInput = inputs["messages"].map((m: BaseMessage) =>
184
+ this.extractChatMessageContent(m)
185
+ );
186
+ } else if (
187
+ typeof inputs === "object" &&
188
+ "content" in inputs &&
189
+ typeof inputs["content"] === "string"
190
+ ) {
191
+ finalInput = inputs["content"];
192
+ }
193
+
194
+ const observation = this.startAndRegisterObservation({
195
+ runName,
196
+ parentRunId,
197
+ runId,
198
+ tags,
199
+ metadata,
200
+ attributes: {
201
+ input: finalInput,
202
+ },
203
+ asType: "span",
204
+ });
205
+
206
+ const traceTags = [...new Set([...(tags ?? []), ...this.tags])];
207
+
208
+ if (!parentRunId) {
209
+ observation.updateTrace({
210
+ tags: traceTags,
211
+ userId:
212
+ metadata &&
213
+ "userId" in metadata &&
214
+ typeof metadata["userId"] === "string"
215
+ ? metadata["userId"]
216
+ : this.userId,
217
+ sessionId:
218
+ metadata &&
219
+ "sessionId" in metadata &&
220
+ typeof metadata["sessionId"] === "string"
221
+ ? metadata["sessionId"]
222
+ : this.sessionId,
223
+ metadata: this.traceMetadata,
224
+ version: this.version,
225
+ });
226
+ }
227
+ } catch (e) {
228
+ this.logger.debug(e instanceof Error ? e.message : String(e));
229
+ }
230
+ }
231
+
232
+ async handleAgentAction(
233
+ action: AgentAction,
234
+ runId: string,
235
+ parentRunId?: string
236
+ ): Promise<void> {
237
+ try {
238
+ this.logger.debug(`Agent action ${action.tool} with ID: ${runId}`);
239
+ this.startAndRegisterObservation({
240
+ runId,
241
+ parentRunId,
242
+ runName: action.tool,
243
+ attributes: {
244
+ input: action,
245
+ },
246
+ asType: "tool",
247
+ });
248
+ } catch (e) {
249
+ this.logger.debug(e instanceof Error ? e.message : String(e));
250
+ }
251
+ }
252
+
253
+ async handleAgentEnd?(
254
+ action: AgentFinish,
255
+ runId: string,
256
+ _parentRunId?: string
257
+ ): Promise<void> {
258
+ try {
259
+ this.logger.debug(`Agent finish with ID: ${runId}`);
260
+ this.handleObservationEnd({
261
+ runId,
262
+ attributes: { output: action },
263
+ });
264
+ } catch (e) {
265
+ this.logger.debug(e instanceof Error ? e.message : String(e));
266
+ }
267
+ }
268
+
269
+ async handleChainError(
270
+ err: any,
271
+ runId: string,
272
+ _parentRunId?: string | undefined
273
+ ): Promise<void> {
274
+ try {
275
+ this.logger.debug(`Chain error: ${err} with ID: ${runId}`);
276
+ this.handleObservationEnd({
277
+ runId,
278
+ attributes: {
279
+ level: "ERROR",
280
+ statusMessage: err.toString(),
281
+ },
282
+ });
283
+ } catch (e) {
284
+ this.logger.debug(e instanceof Error ? e.message : String(e));
285
+ }
286
+ }
287
+
288
+ async handleGenerationStart(
289
+ llm: Serialized,
290
+ messages: (LlmMessage | MessageContent | AnonymousLlmMessage)[],
291
+ runId: string,
292
+ parentRunId?: string | undefined,
293
+ extraParams?: Record<string, unknown> | undefined,
294
+ tags?: string[] | undefined,
295
+ metadata?: Record<string, unknown> | undefined,
296
+ name?: string
297
+ ): Promise<void> {
298
+ this.logger.debug(
299
+ `Generation start with ID: ${runId} and parentRunId ${parentRunId}`
300
+ );
301
+
302
+ const runName = name ?? llm.id.at(-1)?.toString() ?? "Langchain Generation";
303
+
304
+ const modelParameters: Record<string, any> = {};
305
+ const invocationParams = extraParams?.["invocation_params"];
306
+
307
+ for (const [key, value] of Object.entries({
308
+ temperature: (invocationParams as any)?.temperature,
309
+ max_tokens: (invocationParams as any)?.max_tokens,
310
+ top_p: (invocationParams as any)?.top_p,
311
+ frequency_penalty: (invocationParams as any)?.frequency_penalty,
312
+ presence_penalty: (invocationParams as any)?.presence_penalty,
313
+ request_timeout: (invocationParams as any)?.request_timeout,
314
+ })) {
315
+ if (value !== undefined && value !== null) {
316
+ modelParameters[key] = value;
317
+ }
318
+ }
319
+
320
+ interface InvocationParams {
321
+ _type?: string;
322
+ model?: string;
323
+ model_name?: string;
324
+ repo_id?: string;
325
+ }
326
+
327
+ let extractedModelName: string | undefined;
328
+ if (extraParams) {
329
+ const invocationParamsModelName = (
330
+ extraParams.invocation_params as InvocationParams
331
+ ).model;
332
+ const metadataModelName =
333
+ metadata && "ls_model_name" in metadata
334
+ ? (metadata["ls_model_name"] as string)
335
+ : undefined;
336
+
337
+ extractedModelName = invocationParamsModelName ?? metadataModelName;
338
+ }
339
+
340
+ const registeredPrompt = this.promptToParentRunMap.get(
341
+ parentRunId ?? "root"
342
+ );
343
+ if (registeredPrompt && parentRunId) {
344
+ this.deregisterPromptInfo(parentRunId);
345
+ }
346
+
347
+ this.startAndRegisterObservation({
348
+ runId,
349
+ parentRunId,
350
+ metadata,
351
+ tags,
352
+ runName,
353
+ attributes: {
354
+ input: messages,
355
+ model: extractedModelName,
356
+ modelParameters: modelParameters,
357
+ },
358
+ asType: "llm",
359
+ });
360
+ }
361
+
362
+ async handleChatModelStart(
363
+ llm: Serialized,
364
+ messages: BaseMessage[][],
365
+ runId: string,
366
+ parentRunId?: string | undefined,
367
+ extraParams?: Record<string, unknown> | undefined,
368
+ tags?: string[] | undefined,
369
+ metadata?: Record<string, unknown> | undefined,
370
+ name?: string
371
+ ): Promise<void> {
372
+ try {
373
+ this.logger.debug(`Chat model start with ID: ${runId}`);
374
+
375
+ const prompts = messages.flatMap((message) =>
376
+ message.map((m) => this.extractChatMessageContent(m))
377
+ );
378
+
379
+ this.handleGenerationStart(
380
+ llm,
381
+ prompts,
382
+ runId,
383
+ parentRunId,
384
+ extraParams,
385
+ tags,
386
+ metadata,
387
+ name
388
+ );
389
+ } catch (e) {
390
+ this.logger.debug(e instanceof Error ? e.message : String(e));
391
+ }
392
+ }
393
+
394
+ async handleChainEnd(
395
+ outputs: ChainValues,
396
+ runId: string,
397
+ _parentRunId?: string | undefined
398
+ ): Promise<void> {
399
+ try {
400
+ this.logger.debug(`Chain end with ID: ${runId}`);
401
+
402
+ let finalOutput: ChainValues | string = outputs;
403
+ if (
404
+ typeof outputs === "object" &&
405
+ "output" in outputs &&
406
+ typeof outputs["output"] === "string"
407
+ ) {
408
+ finalOutput = outputs["output"];
409
+ } else if (
410
+ typeof outputs === "object" &&
411
+ "messages" in outputs &&
412
+ Array.isArray(outputs["messages"]) &&
413
+ outputs["messages"].every((m: unknown) => m instanceof BaseMessage)
414
+ ) {
415
+ finalOutput = {
416
+ messages: outputs.messages.map((message: BaseMessage) =>
417
+ this.extractChatMessageContent(message)
418
+ ),
419
+ };
420
+ }
421
+
422
+ this.handleObservationEnd({
423
+ runId,
424
+ attributes: {
425
+ output: finalOutput,
426
+ },
427
+ });
428
+ this.deregisterPromptInfo(runId);
429
+ } catch (e) {
430
+ this.logger.debug(e instanceof Error ? e.message : String(e));
431
+ }
432
+ }
433
+
434
+ async handleLLMStart(
435
+ llm: Serialized,
436
+ prompts: string[],
437
+ runId: string,
438
+ parentRunId?: string | undefined,
439
+ extraParams?: Record<string, unknown> | undefined,
440
+ tags?: string[] | undefined,
441
+ metadata?: Record<string, unknown> | undefined,
442
+ name?: string
443
+ ): Promise<void> {
444
+ try {
445
+ this.logger.debug(`LLM start with ID: ${runId}`);
446
+ this.handleGenerationStart(
447
+ llm,
448
+ prompts,
449
+ runId,
450
+ parentRunId,
451
+ extraParams,
452
+ tags,
453
+ metadata,
454
+ name
455
+ );
456
+ } catch (e) {
457
+ this.logger.debug(e instanceof Error ? e.message : String(e));
458
+ }
459
+ }
460
+
461
+ async handleToolStart(
462
+ tool: Serialized,
463
+ input: string,
464
+ runId: string,
465
+ parentRunId?: string | undefined,
466
+ tags?: string[] | undefined,
467
+ metadata?: Record<string, unknown> | undefined,
468
+ name?: string
469
+ ): Promise<void> {
470
+ try {
471
+ this.logger.debug(`Tool start with ID: ${runId}`);
472
+ this.startAndRegisterObservation({
473
+ runId,
474
+ parentRunId,
475
+ runName: name ?? tool.id.at(-1)?.toString() ?? "Tool execution",
476
+ attributes: {
477
+ input,
478
+ },
479
+ metadata,
480
+ tags,
481
+ asType: "tool",
482
+ });
483
+ } catch (e) {
484
+ this.logger.debug(e instanceof Error ? e.message : String(e));
485
+ }
486
+ }
487
+
488
+ async handleRetrieverStart(
489
+ retriever: Serialized,
490
+ query: string,
491
+ runId: string,
492
+ parentRunId?: string | undefined,
493
+ tags?: string[] | undefined,
494
+ metadata?: Record<string, unknown> | undefined,
495
+ name?: string
496
+ ): Promise<void> {
497
+ try {
498
+ this.logger.debug(`Retriever start with ID: ${runId}`);
499
+ this.startAndRegisterObservation({
500
+ runId,
501
+ parentRunId,
502
+ runName: name ?? retriever.id.at(-1)?.toString() ?? "Retriever",
503
+ attributes: {
504
+ input: query,
505
+ },
506
+ tags,
507
+ metadata,
508
+ asType: "span",
509
+ });
510
+ } catch (e) {
511
+ this.logger.debug(e instanceof Error ? e.message : String(e));
512
+ }
513
+ }
514
+
515
+ async handleRetrieverEnd(
516
+ documents: Document<Record<string, any>>[],
517
+ runId: string,
518
+ _parentRunId?: string | undefined
519
+ ): Promise<void> {
520
+ try {
521
+ this.logger.debug(`Retriever end with ID: ${runId}`);
522
+ this.handleObservationEnd({
523
+ runId,
524
+ attributes: {
525
+ output: documents,
526
+ },
527
+ });
528
+ } catch (e) {
529
+ this.logger.debug(e instanceof Error ? e.message : String(e));
530
+ }
531
+ }
532
+
533
+ async handleRetrieverError(
534
+ err: any,
535
+ runId: string,
536
+ _parentRunId?: string | undefined
537
+ ): Promise<void> {
538
+ try {
539
+ this.logger.debug(`Retriever error: ${err} with ID: ${runId}`);
540
+ this.handleObservationEnd({
541
+ runId,
542
+ attributes: {
543
+ level: "ERROR",
544
+ statusMessage: err.toString(),
545
+ },
546
+ });
547
+ } catch (e) {
548
+ this.logger.debug(e instanceof Error ? e.message : String(e));
549
+ }
550
+ }
551
+
552
+ async handleToolEnd(
553
+ output: string,
554
+ runId: string,
555
+ _parentRunId?: string | undefined
556
+ ): Promise<void> {
557
+ try {
558
+ this.logger.debug(`Tool end with ID: ${runId}`);
559
+ this.handleObservationEnd({
560
+ runId,
561
+ attributes: { output },
562
+ });
563
+ } catch (e) {
564
+ this.logger.debug(e instanceof Error ? e.message : String(e));
565
+ }
566
+ }
567
+
568
+ async handleToolError(
569
+ err: any,
570
+ runId: string,
571
+ _parentRunId?: string | undefined
572
+ ): Promise<void> {
573
+ try {
574
+ this.logger.debug(`Tool error ${err} with ID: ${runId}`);
575
+ this.handleObservationEnd({
576
+ runId,
577
+ attributes: {
578
+ level: "ERROR",
579
+ statusMessage: err.toString(),
580
+ },
581
+ });
582
+ } catch (e) {
583
+ this.logger.debug(e instanceof Error ? e.message : String(e));
584
+ }
585
+ }
586
+
587
+ async handleLLMEnd(
588
+ output: LLMResult,
589
+ runId: string,
590
+ _parentRunId?: string | undefined
591
+ ): Promise<void> {
592
+ try {
593
+ this.logger.debug(`LLM end with ID: ${runId}`);
594
+
595
+ const lastResponse =
596
+ output.generations[output.generations.length - 1][
597
+ output.generations[output.generations.length - 1].length - 1
598
+ ];
599
+ const llmUsage =
600
+ this.extractUsageMetadata(lastResponse) ??
601
+ output.llmOutput?.["tokenUsage"];
602
+ const modelName = this.extractModelNameFromMetadata(lastResponse);
603
+
604
+ const usageDetails: Record<string, any> = {
605
+ input:
606
+ llmUsage?.input_tokens ??
607
+ ("promptTokens" in llmUsage ? llmUsage?.promptTokens : undefined),
608
+ output:
609
+ llmUsage?.output_tokens ??
610
+ ("completionTokens" in llmUsage
611
+ ? llmUsage?.completionTokens
612
+ : undefined),
613
+ total:
614
+ llmUsage?.total_tokens ??
615
+ ("totalTokens" in llmUsage ? llmUsage?.totalTokens : undefined),
616
+ };
617
+
618
+ if (llmUsage && "input_token_details" in llmUsage) {
619
+ for (const [key, val] of Object.entries(
620
+ llmUsage["input_token_details"] ?? {}
621
+ )) {
622
+ usageDetails[`input_${key}`] = val;
623
+ if ("input" in usageDetails && typeof val === "number") {
624
+ usageDetails["input"] = Math.max(0, usageDetails["input"] - val);
625
+ }
626
+ }
627
+ }
628
+
629
+ if (llmUsage && "output_token_details" in llmUsage) {
630
+ for (const [key, val] of Object.entries(
631
+ llmUsage["output_token_details"] ?? {}
632
+ )) {
633
+ usageDetails[`output_${key}`] = val;
634
+ if ("output" in usageDetails && typeof val === "number") {
635
+ usageDetails["output"] = Math.max(0, usageDetails["output"] - val);
636
+ }
637
+ }
638
+ }
639
+
640
+ const extractedOutput =
641
+ "message" in lastResponse
642
+ ? this.extractChatMessageContent(
643
+ lastResponse["message"] as BaseMessage
644
+ )
645
+ : lastResponse.text;
646
+
647
+ this.handleObservationEnd({
648
+ runId,
649
+ attributes: {
650
+ model: modelName,
651
+ output: extractedOutput,
652
+ completionStartTime:
653
+ runId in this.completionStartTimes
654
+ ? this.completionStartTimes[runId]
655
+ : undefined,
656
+ usageDetails: usageDetails,
657
+ },
658
+ });
659
+
660
+ if (runId in this.completionStartTimes) {
661
+ delete this.completionStartTimes[runId];
662
+ }
663
+ } catch (e) {
664
+ this.logger.debug(e instanceof Error ? e.message : String(e));
665
+ }
666
+ }
667
+
668
+ async handleLLMError(
669
+ err: any,
670
+ runId: string,
671
+ _parentRunId?: string | undefined
672
+ ): Promise<void> {
673
+ try {
674
+ this.logger.debug(`LLM error ${err} with ID: ${runId}`);
675
+ this.handleObservationEnd({
676
+ runId,
677
+ attributes: {
678
+ level: "ERROR",
679
+ statusMessage: err.toString(),
680
+ },
681
+ });
682
+ } catch (e) {
683
+ this.logger.debug(e instanceof Error ? e.message : String(e));
684
+ }
685
+ }
686
+
687
+ private registerPromptInfo(
688
+ parentRunId?: string,
689
+ metadata?: Record<string, unknown>
690
+ ): void {
691
+ if (metadata && "promptInfo" in metadata && parentRunId) {
692
+ this.promptToParentRunMap.set(
693
+ parentRunId,
694
+ metadata.promptInfo as PromptInfo
695
+ );
696
+ }
697
+ }
698
+
699
+ private deregisterPromptInfo(runId: string): void {
700
+ this.promptToParentRunMap.delete(runId);
701
+ }
702
+
703
+ private startAndRegisterObservation(params: {
704
+ runName: string;
705
+ runId: string;
706
+ parentRunId?: string;
707
+ attributes: Record<string, unknown>;
708
+ metadata?: Record<string, unknown>;
709
+ tags?: string[];
710
+ asType?: "span" | "llm" | "tool";
711
+ }): Observation {
712
+ const { runName, runId, parentRunId, attributes, metadata, tags, asType } =
713
+ params;
714
+
715
+ // Determine parent context:
716
+ // 1. If parentRunId exists, use the parent span from runMap (internal LangChain/LangGraph hierarchy)
717
+ // 2. If no parentRunId (ROOT span) but externalParentSpanContext exists, use it (link to AG-UI.Server)
718
+ // 3. Otherwise, create a new root span
719
+ let parentSpanContext: SpanContext | undefined;
720
+
721
+ if (parentRunId) {
722
+ // Internal parent from LangChain/LangGraph
723
+ parentSpanContext = this.runMap.get(parentRunId)?.otelSpan.spanContext();
724
+ } else if (this.externalParentSpanContext) {
725
+ // External parent from AG-UI.Server
726
+ parentSpanContext = this.externalParentSpanContext;
727
+ }
728
+
729
+ // Add adapter name prefix to ROOT span
730
+ let finalRunName = runName;
731
+ if (!parentRunId && this.adapterName) {
732
+ // ROOT span: add Adapter.LangGraph or Adapter.LangChain prefix
733
+ finalRunName = `Adapter.${this.adapterName}`;
734
+ }
735
+
736
+ const observation = startObservation(
737
+ finalRunName,
738
+ {
739
+ version: this.version,
740
+ metadata: this.joinTagsAndMetaData(tags, metadata),
741
+ ...attributes,
742
+ },
743
+ {
744
+ asType: asType ?? "span",
745
+ parentSpanContext,
746
+ }
747
+ );
748
+ this.runMap.set(runId, observation);
749
+
750
+ return observation;
751
+ }
752
+
753
+ private handleObservationEnd(params: {
754
+ runId: string;
755
+ attributes?: Record<string, unknown>;
756
+ }) {
757
+ const { runId, attributes = {} } = params;
758
+
759
+ const observation = this.runMap.get(runId);
760
+ if (!observation) {
761
+ this.logger.warn("Observation not found in runMap. Skipping operation.");
762
+ return;
763
+ }
764
+
765
+ // Type-safe update: cast to ObservationAttributes which is the union of all observation attribute types
766
+ observation.update(attributes as ObservationAttributes).end();
767
+
768
+ this.last_trace_id = observation.traceId;
769
+ this.runMap.delete(runId);
770
+ }
771
+
772
+ private joinTagsAndMetaData(
773
+ tags?: string[] | undefined,
774
+ metadata1?: Record<string, unknown> | undefined,
775
+ metadata2?: Record<string, unknown> | undefined
776
+ ): Record<string, unknown> | undefined {
777
+ const finalDict: Record<string, unknown> = {};
778
+ if (tags && tags.length > 0) {
779
+ finalDict.tags = tags;
780
+ }
781
+ if (metadata1) {
782
+ Object.assign(finalDict, metadata1);
783
+ }
784
+ if (metadata2) {
785
+ Object.assign(finalDict, metadata2);
786
+ }
787
+ return this.stripObservabilityKeysFromMetadata(finalDict);
788
+ }
789
+
790
+ private stripObservabilityKeysFromMetadata(
791
+ metadata?: Record<string, unknown>
792
+ ): Record<string, unknown> | undefined {
793
+ if (!metadata) {
794
+ return;
795
+ }
796
+
797
+ const reservedKeys = ["promptInfo", "userId", "sessionId"];
798
+
799
+ return Object.fromEntries(
800
+ Object.entries(metadata).filter(([key, _]) => !reservedKeys.includes(key))
801
+ );
802
+ }
803
+
804
+ private extractUsageMetadata(
805
+ generation: Generation
806
+ ): UsageMetadata | undefined {
807
+ try {
808
+ const usageMetadata =
809
+ "message" in generation &&
810
+ (AIMessage.isInstance(generation["message"]) ||
811
+ AIMessageChunk.isInstance(generation["message"]))
812
+ ? generation["message"].usage_metadata
813
+ : undefined;
814
+ return usageMetadata;
815
+ } catch (err) {
816
+ this.logger.debug(`Error extracting usage metadata: ${err}`);
817
+ return;
818
+ }
819
+ }
820
+
821
+ private extractModelNameFromMetadata(generation: any): string | undefined {
822
+ try {
823
+ return "message" in generation &&
824
+ (AIMessage.isInstance(generation["message"]) ||
825
+ AIMessageChunk.isInstance(generation["message"]))
826
+ ? generation["message"].response_metadata.model_name
827
+ : undefined;
828
+ } catch {}
829
+ }
830
+
831
+ private extractChatMessageContent(
832
+ message: BaseMessage
833
+ ): LlmMessage | AnonymousLlmMessage | MessageContent {
834
+ let response = undefined;
835
+
836
+ if (message.getType() === "human") {
837
+ response = { content: message.content, role: "user" };
838
+ } else if (message.getType() === "generic") {
839
+ response = {
840
+ content: message.content,
841
+ role: "human",
842
+ };
843
+ } else if (message.getType() === "ai") {
844
+ response = { content: message.content, role: "assistant" };
845
+
846
+ if (
847
+ "tool_calls" in message &&
848
+ Array.isArray(message.tool_calls) &&
849
+ (message.tool_calls?.length ?? 0) > 0
850
+ ) {
851
+ (response as any)["tool_calls"] = message["tool_calls"];
852
+ }
853
+ if (
854
+ "additional_kwargs" in message &&
855
+ "tool_calls" in message["additional_kwargs"]
856
+ ) {
857
+ (response as any)["tool_calls"] =
858
+ message["additional_kwargs"]["tool_calls"];
859
+ }
860
+ } else if (message.getType() === "system") {
861
+ response = { content: message.content, role: "system" };
862
+ } else if (message.getType() === "function") {
863
+ response = {
864
+ content: message.content,
865
+ additional_kwargs: message.additional_kwargs,
866
+ role: message.name,
867
+ };
868
+ } else if (message.getType() === "tool") {
869
+ response = {
870
+ content: message.content,
871
+ additional_kwargs: message.additional_kwargs,
872
+ role: message.name,
873
+ };
874
+ } else if (!message.name) {
875
+ response = { content: message.content };
876
+ } else {
877
+ response = {
878
+ role: message.name,
879
+ content: message.content,
880
+ };
881
+ }
882
+
883
+ if (
884
+ (message.additional_kwargs.function_call ||
885
+ message.additional_kwargs.tool_calls) &&
886
+ (response as any)["tool_calls"] === undefined
887
+ ) {
888
+ return { ...response, additional_kwargs: message.additional_kwargs };
889
+ }
890
+
891
+ return response;
892
+ }
893
+ }