langsmith 0.3.39 → 0.3.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.AI_SDK_TOOL_OPERATIONS = exports.AI_SDK_LLM_OPERATIONS = exports.GEN_AI_CHOICE = exports.GEN_AI_ASSISTANT_MESSAGE = exports.GEN_AI_USER_MESSAGE = exports.GEN_AI_SYSTEM_MESSAGE = exports.LANGSMITH_PARENT_RUN_ID = exports.LANGSMITH_DOTTED_ORDER = exports.LANGSMITH_TRACE_ID = exports.LANGSMITH_RUN_ID = exports.LANGSMITH_REQUEST_HEADERS = exports.LANGSMITH_REQUEST_STREAMING = exports.LANGSMITH_RUNTIME = exports.LANGSMITH_TAGS = exports.LANGSMITH_METADATA = exports.LANGSMITH_NAME = exports.LANGSMITH_RUN_TYPE = exports.LANGSMITH_SESSION_NAME = exports.LANGSMITH_SESSION_ID = exports.GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = exports.GEN_AI_USAGE_INPUT_TOKEN_DETAILS = exports.GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = exports.GEN_AI_RESPONSE_SERVICE_TIER = exports.GEN_AI_RESPONSE_ID = exports.GEN_AI_SERIALIZED_DOC = exports.GEN_AI_SERIALIZED_SIGNATURE = exports.GEN_AI_SERIALIZED_NAME = exports.GEN_AI_REQUEST_EXTRA_BODY = exports.GEN_AI_REQUEST_EXTRA_QUERY = exports.GENAI_COMPLETION = exports.GENAI_PROMPT = exports.GEN_AI_RESPONSE_FINISH_REASONS = exports.GEN_AI_REQUEST_PRESENCE_PENALTY = exports.GEN_AI_REQUEST_FREQUENCY_PENALTY = exports.GEN_AI_REQUEST_TOP_P = exports.GEN_AI_REQUEST_TEMPERATURE = exports.GEN_AI_REQUEST_MAX_TOKENS = exports.GEN_AI_USAGE_TOTAL_TOKENS = exports.GEN_AI_USAGE_OUTPUT_TOKENS = exports.GEN_AI_USAGE_INPUT_TOKENS = exports.GEN_AI_RESPONSE_MODEL = exports.GEN_AI_REQUEST_MODEL = exports.GEN_AI_SYSTEM = exports.GEN_AI_OPERATION_NAME = void 0;
3
+ exports.AI_SDK_TOOL_OPERATIONS = exports.AI_SDK_LLM_OPERATIONS = exports.GEN_AI_CHOICE = exports.GEN_AI_ASSISTANT_MESSAGE = exports.GEN_AI_USER_MESSAGE = exports.GEN_AI_SYSTEM_MESSAGE = exports.LANGSMITH_USAGE_METADATA = exports.LANGSMITH_PARENT_RUN_ID = exports.LANGSMITH_DOTTED_ORDER = exports.LANGSMITH_TRACE_ID = exports.LANGSMITH_RUN_ID = exports.LANGSMITH_REQUEST_HEADERS = exports.LANGSMITH_REQUEST_STREAMING = exports.LANGSMITH_RUNTIME = exports.LANGSMITH_TAGS = exports.LANGSMITH_METADATA = exports.LANGSMITH_NAME = exports.LANGSMITH_RUN_TYPE = exports.LANGSMITH_SESSION_NAME = exports.LANGSMITH_SESSION_ID = exports.GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = exports.GEN_AI_USAGE_INPUT_TOKEN_DETAILS = exports.GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = exports.GEN_AI_RESPONSE_SERVICE_TIER = exports.GEN_AI_RESPONSE_ID = exports.GEN_AI_SERIALIZED_DOC = exports.GEN_AI_SERIALIZED_SIGNATURE = exports.GEN_AI_SERIALIZED_NAME = exports.GEN_AI_REQUEST_EXTRA_BODY = exports.GEN_AI_REQUEST_EXTRA_QUERY = exports.GENAI_COMPLETION = exports.GENAI_PROMPT = exports.GEN_AI_RESPONSE_FINISH_REASONS = exports.GEN_AI_REQUEST_PRESENCE_PENALTY = exports.GEN_AI_REQUEST_FREQUENCY_PENALTY = exports.GEN_AI_REQUEST_TOP_P = exports.GEN_AI_REQUEST_TEMPERATURE = exports.GEN_AI_REQUEST_MAX_TOKENS = exports.GEN_AI_USAGE_TOTAL_TOKENS = exports.GEN_AI_USAGE_OUTPUT_TOKENS = exports.GEN_AI_USAGE_INPUT_TOKENS = exports.GEN_AI_RESPONSE_MODEL = exports.GEN_AI_REQUEST_MODEL = exports.GEN_AI_SYSTEM = exports.GEN_AI_OPERATION_NAME = void 0;
4
4
  // OpenTelemetry GenAI semantic convention attribute names
5
5
  exports.GEN_AI_OPERATION_NAME = "gen_ai.operation.name";
6
6
  exports.GEN_AI_SYSTEM = "gen_ai.system";
@@ -41,6 +41,7 @@ exports.LANGSMITH_RUN_ID = "langsmith.span.id";
41
41
  exports.LANGSMITH_TRACE_ID = "langsmith.trace.id";
42
42
  exports.LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
43
43
  exports.LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
44
+ exports.LANGSMITH_USAGE_METADATA = "langsmith.usage_metadata";
44
45
  // GenAI event names
45
46
  exports.GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
46
47
  exports.GEN_AI_USER_MESSAGE = "gen_ai.user.message";
@@ -36,6 +36,7 @@ export declare const LANGSMITH_RUN_ID = "langsmith.span.id";
36
36
  export declare const LANGSMITH_TRACE_ID = "langsmith.trace.id";
37
37
  export declare const LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
38
38
  export declare const LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
39
+ export declare const LANGSMITH_USAGE_METADATA = "langsmith.usage_metadata";
39
40
  export declare const GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
40
41
  export declare const GEN_AI_USER_MESSAGE = "gen_ai.user.message";
41
42
  export declare const GEN_AI_ASSISTANT_MESSAGE = "gen_ai.assistant.message";
@@ -38,6 +38,7 @@ export const LANGSMITH_RUN_ID = "langsmith.span.id";
38
38
  export const LANGSMITH_TRACE_ID = "langsmith.trace.id";
39
39
  export const LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
40
40
  export const LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
41
+ export const LANGSMITH_USAGE_METADATA = "langsmith.usage_metadata";
41
42
  // GenAI event names
42
43
  export const GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
43
44
  export const GEN_AI_USER_MESSAGE = "gen_ai.user.message";
@@ -38,6 +38,7 @@ const exporter_trace_otlp_proto_1 = require("@opentelemetry/exporter-trace-otlp-
38
38
  const constants = __importStar(require("./constants.cjs"));
39
39
  const env_js_1 = require("../../env.cjs");
40
40
  const env_js_2 = require("../../utils/env.cjs");
41
+ const vercel_js_1 = require("../../utils/vercel.cjs");
41
42
  /**
42
43
  * Convert headers string in format "name=value,name2=value2" to object
43
44
  */
@@ -168,6 +169,9 @@ class LangSmithOTLPTraceExporter extends exporter_trace_otlp_proto_1.OTLPTraceEx
168
169
  if (typeof span.attributes["ai.operationId"] === "string" &&
169
170
  constants.AI_SDK_LLM_OPERATIONS.includes(span.attributes["ai.operationId"])) {
170
171
  span.attributes[constants.LANGSMITH_RUN_TYPE] = "llm";
172
+ const usageMetadata = (0, vercel_js_1.extractUsageMetadata)(span);
173
+ span.attributes[constants.LANGSMITH_USAGE_METADATA] =
174
+ JSON.stringify(usageMetadata);
171
175
  }
172
176
  else if (typeof span.attributes["ai.operationId"] === "string" &&
173
177
  constants.AI_SDK_TOOL_OPERATIONS.includes(span.attributes["ai.operationId"])) {
@@ -2,6 +2,7 @@ import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
2
2
  import * as constants from "./constants.js";
3
3
  import { isTracingEnabled } from "../../env.js";
4
4
  import { getEnvironmentVariable, getLangSmithEnvironmentVariable, } from "../../utils/env.js";
5
+ import { extractUsageMetadata } from "../../utils/vercel.js";
5
6
  /**
6
7
  * Convert headers string in format "name=value,name2=value2" to object
7
8
  */
@@ -132,6 +133,9 @@ export class LangSmithOTLPTraceExporter extends OTLPTraceExporter {
132
133
  if (typeof span.attributes["ai.operationId"] === "string" &&
133
134
  constants.AI_SDK_LLM_OPERATIONS.includes(span.attributes["ai.operationId"])) {
134
135
  span.attributes[constants.LANGSMITH_RUN_TYPE] = "llm";
136
+ const usageMetadata = extractUsageMetadata(span);
137
+ span.attributes[constants.LANGSMITH_USAGE_METADATA] =
138
+ JSON.stringify(usageMetadata);
135
139
  }
136
140
  else if (typeof span.attributes["ai.operationId"] === "string" &&
137
141
  constants.AI_SDK_TOOL_OPERATIONS.includes(span.attributes["ai.operationId"])) {
package/dist/index.cjs CHANGED
@@ -10,4 +10,4 @@ Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true
10
10
  var project_js_1 = require("./utils/project.cjs");
11
11
  Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get: function () { return project_js_1.getDefaultProjectName; } });
12
12
  // Update using yarn bump-version
13
- exports.__version__ = "0.3.39";
13
+ exports.__version__ = "0.3.40";
package/dist/index.d.ts CHANGED
@@ -3,4 +3,4 @@ export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, }
3
3
  export { RunTree, type RunTreeConfig } from "./run_trees.js";
4
4
  export { overrideFetchImplementation } from "./singletons/fetch.js";
5
5
  export { getDefaultProjectName } from "./utils/project.js";
6
- export declare const __version__ = "0.3.39";
6
+ export declare const __version__ = "0.3.40";
package/dist/index.js CHANGED
@@ -3,4 +3,4 @@ export { RunTree } from "./run_trees.js";
3
3
  export { overrideFetchImplementation } from "./singletons/fetch.js";
4
4
  export { getDefaultProjectName } from "./utils/project.js";
5
5
  // Update using yarn bump-version
6
- export const __version__ = "0.3.39";
6
+ export const __version__ = "0.3.40";
@@ -0,0 +1,71 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.extractUsageMetadata = extractUsageMetadata;
4
+ function extractInputTokenDetails(providerMetadata) {
5
+ const inputTokenDetails = {};
6
+ if (providerMetadata.anthropic != null &&
7
+ typeof providerMetadata.anthropic === "object") {
8
+ const anthropic = providerMetadata.anthropic;
9
+ if (anthropic.cacheReadInputTokens != null &&
10
+ typeof anthropic.cacheReadInputTokens === "number") {
11
+ inputTokenDetails.cache_read = anthropic.cacheReadInputTokens;
12
+ }
13
+ if (anthropic.cacheCreationInputTokens != null &&
14
+ typeof anthropic.cacheCreationInputTokens === "number") {
15
+ inputTokenDetails.ephemeral_5m_input_tokens =
16
+ anthropic.cacheCreationInputTokens;
17
+ }
18
+ return inputTokenDetails;
19
+ }
20
+ else if (providerMetadata.openai != null &&
21
+ typeof providerMetadata.openai === "object") {
22
+ const openai = providerMetadata.openai;
23
+ if (openai.cachedPromptTokens != null &&
24
+ typeof openai.cachedPromptTokens === "number") {
25
+ inputTokenDetails.cache_read = openai.cachedPromptTokens;
26
+ }
27
+ }
28
+ return inputTokenDetails;
29
+ }
30
+ function extractUsageMetadata(span) {
31
+ const isError = span?.status?.code === 2;
32
+ if (isError || !span || !span.attributes) {
33
+ return {
34
+ input_tokens: 0,
35
+ output_tokens: 0,
36
+ total_tokens: 0,
37
+ };
38
+ }
39
+ const usageMetadata = {
40
+ input_tokens: 0,
41
+ output_tokens: 0,
42
+ total_tokens: 0,
43
+ };
44
+ if (typeof span.attributes["ai.usage.promptTokens"] === "number") {
45
+ usageMetadata.input_tokens = span.attributes["ai.usage.promptTokens"];
46
+ }
47
+ if (typeof span.attributes["ai.usage.completionTokens"] === "number") {
48
+ usageMetadata.output_tokens = span.attributes["ai.usage.completionTokens"];
49
+ }
50
+ if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
51
+ try {
52
+ const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
53
+ usageMetadata.input_token_details =
54
+ extractInputTokenDetails(providerMetadata);
55
+ if (providerMetadata.anthropic != null &&
56
+ typeof providerMetadata.anthropic === "object") {
57
+ // AI SDK does not include Anthropic cache tokens in their stated input token
58
+ // numbers, so we need to add them manually
59
+ for (const key in usageMetadata.input_token_details) {
60
+ usageMetadata.input_tokens += usageMetadata.input_token_details[key];
61
+ }
62
+ }
63
+ }
64
+ catch {
65
+ // pass
66
+ }
67
+ }
68
+ usageMetadata.total_tokens =
69
+ usageMetadata.input_tokens + usageMetadata.output_tokens;
70
+ return usageMetadata;
71
+ }
@@ -0,0 +1,7 @@
1
+ import { KVMap } from "../schemas.js";
2
+ export declare function extractUsageMetadata(span?: {
3
+ status?: {
4
+ code: number;
5
+ };
6
+ attributes?: Record<string, unknown>;
7
+ }): KVMap;
@@ -0,0 +1,68 @@
1
+ function extractInputTokenDetails(providerMetadata) {
2
+ const inputTokenDetails = {};
3
+ if (providerMetadata.anthropic != null &&
4
+ typeof providerMetadata.anthropic === "object") {
5
+ const anthropic = providerMetadata.anthropic;
6
+ if (anthropic.cacheReadInputTokens != null &&
7
+ typeof anthropic.cacheReadInputTokens === "number") {
8
+ inputTokenDetails.cache_read = anthropic.cacheReadInputTokens;
9
+ }
10
+ if (anthropic.cacheCreationInputTokens != null &&
11
+ typeof anthropic.cacheCreationInputTokens === "number") {
12
+ inputTokenDetails.ephemeral_5m_input_tokens =
13
+ anthropic.cacheCreationInputTokens;
14
+ }
15
+ return inputTokenDetails;
16
+ }
17
+ else if (providerMetadata.openai != null &&
18
+ typeof providerMetadata.openai === "object") {
19
+ const openai = providerMetadata.openai;
20
+ if (openai.cachedPromptTokens != null &&
21
+ typeof openai.cachedPromptTokens === "number") {
22
+ inputTokenDetails.cache_read = openai.cachedPromptTokens;
23
+ }
24
+ }
25
+ return inputTokenDetails;
26
+ }
27
+ export function extractUsageMetadata(span) {
28
+ const isError = span?.status?.code === 2;
29
+ if (isError || !span || !span.attributes) {
30
+ return {
31
+ input_tokens: 0,
32
+ output_tokens: 0,
33
+ total_tokens: 0,
34
+ };
35
+ }
36
+ const usageMetadata = {
37
+ input_tokens: 0,
38
+ output_tokens: 0,
39
+ total_tokens: 0,
40
+ };
41
+ if (typeof span.attributes["ai.usage.promptTokens"] === "number") {
42
+ usageMetadata.input_tokens = span.attributes["ai.usage.promptTokens"];
43
+ }
44
+ if (typeof span.attributes["ai.usage.completionTokens"] === "number") {
45
+ usageMetadata.output_tokens = span.attributes["ai.usage.completionTokens"];
46
+ }
47
+ if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
48
+ try {
49
+ const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
50
+ usageMetadata.input_token_details =
51
+ extractInputTokenDetails(providerMetadata);
52
+ if (providerMetadata.anthropic != null &&
53
+ typeof providerMetadata.anthropic === "object") {
54
+ // AI SDK does not include Anthropic cache tokens in their stated input token
55
+ // numbers, so we need to add them manually
56
+ for (const key in usageMetadata.input_token_details) {
57
+ usageMetadata.input_tokens += usageMetadata.input_token_details[key];
58
+ }
59
+ }
60
+ }
61
+ catch {
62
+ // pass
63
+ }
64
+ }
65
+ usageMetadata.total_tokens =
66
+ usageMetadata.input_tokens + usageMetadata.output_tokens;
67
+ return usageMetadata;
68
+ }
package/dist/vercel.cjs CHANGED
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.AISDKExporter = void 0;
4
+ const vercel_js_1 = require("./utils/vercel.cjs");
4
5
  const index_js_1 = require("./index.cjs");
5
6
  const uuid_1 = require("uuid");
6
7
  const traceable_js_1 = require("./singletons/traceable.cjs");
@@ -473,20 +474,6 @@ class AISDKExporter {
473
474
  }),
474
475
  };
475
476
  }
476
- if (span.attributes["ai.usage.completionTokens"]) {
477
- result ??= {};
478
- result.llm_output ??= {};
479
- result.llm_output.token_usage ??= {};
480
- result.llm_output.token_usage["completion_tokens"] =
481
- span.attributes["ai.usage.completionTokens"];
482
- }
483
- if (span.attributes["ai.usage.promptTokens"]) {
484
- result ??= {};
485
- result.llm_output ??= {};
486
- result.llm_output.token_usage ??= {};
487
- result.llm_output.token_usage["prompt_tokens"] =
488
- span.attributes["ai.usage.promptTokens"];
489
- }
490
477
  return result;
491
478
  })();
492
479
  const invocationParams = (() => {
@@ -517,6 +504,7 @@ class AISDKExporter {
517
504
  ? "chain"
518
505
  : "llm";
519
506
  const error = span.status?.code === 2 ? span.status.message : undefined;
507
+ const usageMetadata = (0, vercel_js_1.extractUsageMetadata)(span);
520
508
  // TODO: add first_token_time
521
509
  return asRunCreate({
522
510
  run_type: runType,
@@ -529,15 +517,7 @@ class AISDKExporter {
529
517
  invocation_params: invocationParams,
530
518
  batch_size: 1,
531
519
  metadata: {
532
- ...(error
533
- ? {
534
- usage_metadata: {
535
- input_tokens: 0,
536
- output_tokens: 0,
537
- total_tokens: 0,
538
- },
539
- }
540
- : undefined),
520
+ usage_metadata: usageMetadata,
541
521
  ls_provider: span.attributes["ai.model.provider"]
542
522
  .split(".")
543
523
  .at(0),
@@ -602,20 +582,6 @@ class AISDKExporter {
602
582
  output: tryJson(span.attributes["ai.response.object"]),
603
583
  };
604
584
  }
605
- if (span.attributes["ai.usage.completionTokens"]) {
606
- result ??= {};
607
- result.llm_output ??= {};
608
- result.llm_output.token_usage ??= {};
609
- result.llm_output.token_usage["completion_tokens"] =
610
- span.attributes["ai.usage.completionTokens"];
611
- }
612
- if (span.attributes["ai.usage.promptTokens"]) {
613
- result ??= {};
614
- result.llm_output ??= {};
615
- result.llm_output.token_usage ??= {};
616
- result.llm_output.token_usage["prompt_tokens"] =
617
- +span.attributes["ai.usage.promptTokens"];
618
- }
619
585
  return result;
620
586
  })();
621
587
  const events = [];
@@ -630,6 +596,7 @@ class AISDKExporter {
630
596
  ? "chain"
631
597
  : "llm";
632
598
  const error = span.status?.code === 2 ? span.status.message : undefined;
599
+ const usageMetadata = (0, vercel_js_1.extractUsageMetadata)(span);
633
600
  return asRunCreate({
634
601
  run_type: runType,
635
602
  name: span.attributes["ai.model.provider"],
@@ -640,15 +607,7 @@ class AISDKExporter {
640
607
  extra: {
641
608
  batch_size: 1,
642
609
  metadata: {
643
- ...(error
644
- ? {
645
- usage_metadata: {
646
- input_tokens: 0,
647
- output_tokens: 0,
648
- total_tokens: 0,
649
- },
650
- }
651
- : undefined),
610
+ usage_metadata: usageMetadata,
652
611
  ls_provider: span.attributes["ai.model.provider"]
653
612
  .split(".")
654
613
  .at(0),
package/dist/vercel.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { extractUsageMetadata } from "./utils/vercel.js";
1
2
  import { Client, RunTree } from "./index.js";
2
3
  import { v5 as uuid5 } from "uuid";
3
4
  import { getCurrentRunTree } from "./singletons/traceable.js";
@@ -470,20 +471,6 @@ export class AISDKExporter {
470
471
  }),
471
472
  };
472
473
  }
473
- if (span.attributes["ai.usage.completionTokens"]) {
474
- result ??= {};
475
- result.llm_output ??= {};
476
- result.llm_output.token_usage ??= {};
477
- result.llm_output.token_usage["completion_tokens"] =
478
- span.attributes["ai.usage.completionTokens"];
479
- }
480
- if (span.attributes["ai.usage.promptTokens"]) {
481
- result ??= {};
482
- result.llm_output ??= {};
483
- result.llm_output.token_usage ??= {};
484
- result.llm_output.token_usage["prompt_tokens"] =
485
- span.attributes["ai.usage.promptTokens"];
486
- }
487
474
  return result;
488
475
  })();
489
476
  const invocationParams = (() => {
@@ -514,6 +501,7 @@ export class AISDKExporter {
514
501
  ? "chain"
515
502
  : "llm";
516
503
  const error = span.status?.code === 2 ? span.status.message : undefined;
504
+ const usageMetadata = extractUsageMetadata(span);
517
505
  // TODO: add first_token_time
518
506
  return asRunCreate({
519
507
  run_type: runType,
@@ -526,15 +514,7 @@ export class AISDKExporter {
526
514
  invocation_params: invocationParams,
527
515
  batch_size: 1,
528
516
  metadata: {
529
- ...(error
530
- ? {
531
- usage_metadata: {
532
- input_tokens: 0,
533
- output_tokens: 0,
534
- total_tokens: 0,
535
- },
536
- }
537
- : undefined),
517
+ usage_metadata: usageMetadata,
538
518
  ls_provider: span.attributes["ai.model.provider"]
539
519
  .split(".")
540
520
  .at(0),
@@ -599,20 +579,6 @@ export class AISDKExporter {
599
579
  output: tryJson(span.attributes["ai.response.object"]),
600
580
  };
601
581
  }
602
- if (span.attributes["ai.usage.completionTokens"]) {
603
- result ??= {};
604
- result.llm_output ??= {};
605
- result.llm_output.token_usage ??= {};
606
- result.llm_output.token_usage["completion_tokens"] =
607
- span.attributes["ai.usage.completionTokens"];
608
- }
609
- if (span.attributes["ai.usage.promptTokens"]) {
610
- result ??= {};
611
- result.llm_output ??= {};
612
- result.llm_output.token_usage ??= {};
613
- result.llm_output.token_usage["prompt_tokens"] =
614
- +span.attributes["ai.usage.promptTokens"];
615
- }
616
582
  return result;
617
583
  })();
618
584
  const events = [];
@@ -627,6 +593,7 @@ export class AISDKExporter {
627
593
  ? "chain"
628
594
  : "llm";
629
595
  const error = span.status?.code === 2 ? span.status.message : undefined;
596
+ const usageMetadata = extractUsageMetadata(span);
630
597
  return asRunCreate({
631
598
  run_type: runType,
632
599
  name: span.attributes["ai.model.provider"],
@@ -637,15 +604,7 @@ export class AISDKExporter {
637
604
  extra: {
638
605
  batch_size: 1,
639
606
  metadata: {
640
- ...(error
641
- ? {
642
- usage_metadata: {
643
- input_tokens: 0,
644
- output_tokens: 0,
645
- total_tokens: 0,
646
- },
647
- }
648
- : undefined),
607
+ usage_metadata: usageMetadata,
649
608
  ls_provider: span.attributes["ai.model.provider"]
650
609
  .split(".")
651
610
  .at(0),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langsmith",
3
- "version": "0.3.39",
3
+ "version": "0.3.40",
4
4
  "description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
5
5
  "packageManager": "yarn@1.22.19",
6
6
  "files": [
@@ -140,6 +140,7 @@
140
140
  "uuid": "^10.0.0"
141
141
  },
142
142
  "devDependencies": {
143
+ "@ai-sdk/anthropic": "^1.2.12",
143
144
  "@ai-sdk/openai": "^1.3.20",
144
145
  "@babel/preset-env": "^7.22.4",
145
146
  "@faker-js/faker": "^8.4.1",
@@ -158,7 +159,7 @@
158
159
  "@types/node-fetch": "^2.6.12",
159
160
  "@typescript-eslint/eslint-plugin": "^5.59.8",
160
161
  "@typescript-eslint/parser": "^5.59.8",
161
- "ai": "^4.3.10",
162
+ "ai": "^4.3.17",
162
163
  "babel-jest": "^29.5.0",
163
164
  "cross-env": "^7.0.3",
164
165
  "dotenv": "^16.1.3",
File without changes