langsmith 0.3.49-rc.3 → 0.3.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/experimental/otel/processor.cjs +6 -6
- package/dist/experimental/otel/processor.d.ts +1 -2
- package/dist/experimental/otel/processor.js +6 -6
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/utils/vercel.cjs +16 -7
- package/dist/utils/vercel.js +16 -7
- package/package.json +1 -1
|
@@ -20,8 +20,8 @@ function getParentSpanId(span) {
|
|
|
20
20
|
* usually should not be traced.
|
|
21
21
|
*/
|
|
22
22
|
class LangSmithOTLPSpanProcessor extends sdk_trace_base_1.BatchSpanProcessor {
|
|
23
|
-
constructor(
|
|
24
|
-
super(...
|
|
23
|
+
constructor() {
|
|
24
|
+
super(...arguments);
|
|
25
25
|
Object.defineProperty(this, "traceMap", {
|
|
26
26
|
enumerable: true,
|
|
27
27
|
configurable: true,
|
|
@@ -29,10 +29,6 @@ class LangSmithOTLPSpanProcessor extends sdk_trace_base_1.BatchSpanProcessor {
|
|
|
29
29
|
value: {}
|
|
30
30
|
});
|
|
31
31
|
}
|
|
32
|
-
async forceFlush() {
|
|
33
|
-
await run_trees_js_1.RunTree.getSharedClient().awaitPendingTraceBatches();
|
|
34
|
-
await super.forceFlush();
|
|
35
|
-
}
|
|
36
32
|
onStart(span, parentContext) {
|
|
37
33
|
if (!this.traceMap[span.spanContext().traceId]) {
|
|
38
34
|
this.traceMap[span.spanContext().traceId] = {
|
|
@@ -84,5 +80,9 @@ class LangSmithOTLPSpanProcessor extends sdk_trace_base_1.BatchSpanProcessor {
|
|
|
84
80
|
super.onEnd(span);
|
|
85
81
|
}
|
|
86
82
|
}
|
|
83
|
+
async shutdown() {
|
|
84
|
+
await run_trees_js_1.RunTree.getSharedClient().awaitPendingTraceBatches();
|
|
85
|
+
await super.shutdown();
|
|
86
|
+
}
|
|
87
87
|
}
|
|
88
88
|
exports.LangSmithOTLPSpanProcessor = LangSmithOTLPSpanProcessor;
|
|
@@ -7,8 +7,7 @@ export declare function isTraceableSpan(span: ReadableSpan): boolean;
|
|
|
7
7
|
*/
|
|
8
8
|
export declare class LangSmithOTLPSpanProcessor extends BatchSpanProcessor {
|
|
9
9
|
private traceMap;
|
|
10
|
-
constructor(...args: ConstructorParameters<typeof BatchSpanProcessor>);
|
|
11
|
-
forceFlush(): Promise<void>;
|
|
12
10
|
onStart(span: Span, parentContext: Context): void;
|
|
13
11
|
onEnd(span: ReadableSpan): void;
|
|
12
|
+
shutdown(): Promise<void>;
|
|
14
13
|
}
|
|
@@ -16,8 +16,8 @@ function getParentSpanId(span) {
|
|
|
16
16
|
* usually should not be traced.
|
|
17
17
|
*/
|
|
18
18
|
export class LangSmithOTLPSpanProcessor extends BatchSpanProcessor {
|
|
19
|
-
constructor(
|
|
20
|
-
super(...
|
|
19
|
+
constructor() {
|
|
20
|
+
super(...arguments);
|
|
21
21
|
Object.defineProperty(this, "traceMap", {
|
|
22
22
|
enumerable: true,
|
|
23
23
|
configurable: true,
|
|
@@ -25,10 +25,6 @@ export class LangSmithOTLPSpanProcessor extends BatchSpanProcessor {
|
|
|
25
25
|
value: {}
|
|
26
26
|
});
|
|
27
27
|
}
|
|
28
|
-
async forceFlush() {
|
|
29
|
-
await RunTree.getSharedClient().awaitPendingTraceBatches();
|
|
30
|
-
await super.forceFlush();
|
|
31
|
-
}
|
|
32
28
|
onStart(span, parentContext) {
|
|
33
29
|
if (!this.traceMap[span.spanContext().traceId]) {
|
|
34
30
|
this.traceMap[span.spanContext().traceId] = {
|
|
@@ -80,4 +76,8 @@ export class LangSmithOTLPSpanProcessor extends BatchSpanProcessor {
|
|
|
80
76
|
super.onEnd(span);
|
|
81
77
|
}
|
|
82
78
|
}
|
|
79
|
+
async shutdown() {
|
|
80
|
+
await RunTree.getSharedClient().awaitPendingTraceBatches();
|
|
81
|
+
await super.shutdown();
|
|
82
|
+
}
|
|
83
83
|
}
|
package/dist/index.cjs
CHANGED
|
@@ -10,4 +10,4 @@ Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true
|
|
|
10
10
|
var project_js_1 = require("./utils/project.cjs");
|
|
11
11
|
Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get: function () { return project_js_1.getDefaultProjectName; } });
|
|
12
12
|
// Update using yarn bump-version
|
|
13
|
-
exports.__version__ = "0.3.49
|
|
13
|
+
exports.__version__ = "0.3.49";
|
package/dist/index.d.ts
CHANGED
|
@@ -3,4 +3,4 @@ export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, }
|
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
|
-
export declare const __version__ = "0.3.49
|
|
6
|
+
export declare const __version__ = "0.3.49";
|
package/dist/index.js
CHANGED
|
@@ -3,4 +3,4 @@ export { RunTree } from "./run_trees.js";
|
|
|
3
3
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
4
4
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
5
5
|
// Update using yarn bump-version
|
|
6
|
-
export const __version__ = "0.3.49
|
|
6
|
+
export const __version__ = "0.3.49";
|
package/dist/utils/vercel.cjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.extractUsageMetadata = extractUsageMetadata;
|
|
4
|
-
function extractInputTokenDetails(providerMetadata) {
|
|
4
|
+
function extractInputTokenDetails(providerMetadata, spanAttributes) {
|
|
5
5
|
const inputTokenDetails = {};
|
|
6
6
|
if (providerMetadata.anthropic != null &&
|
|
7
7
|
typeof providerMetadata.anthropic === "object") {
|
|
@@ -24,6 +24,10 @@ function extractInputTokenDetails(providerMetadata) {
|
|
|
24
24
|
typeof openai.cachedPromptTokens === "number") {
|
|
25
25
|
inputTokenDetails.cache_read = openai.cachedPromptTokens;
|
|
26
26
|
}
|
|
27
|
+
else if (typeof spanAttributes?.["ai.usage.cachedInputTokens"] === "number") {
|
|
28
|
+
inputTokenDetails.cache_read =
|
|
29
|
+
spanAttributes["ai.usage.cachedInputTokens"];
|
|
30
|
+
}
|
|
27
31
|
}
|
|
28
32
|
return inputTokenDetails;
|
|
29
33
|
}
|
|
@@ -41,17 +45,22 @@ function extractUsageMetadata(span) {
|
|
|
41
45
|
output_tokens: 0,
|
|
42
46
|
total_tokens: 0,
|
|
43
47
|
};
|
|
44
|
-
if (typeof span.attributes["ai.usage.promptTokens"] === "number"
|
|
45
|
-
|
|
48
|
+
if (typeof span.attributes["ai.usage.promptTokens"] === "number" ||
|
|
49
|
+
typeof span.attributes["ai.usage.inputTokens"] === "number") {
|
|
50
|
+
usageMetadata.input_tokens =
|
|
51
|
+
span.attributes["ai.usage.promptTokens"] ??
|
|
52
|
+
span.attributes["ai.usage.inputTokens"];
|
|
46
53
|
}
|
|
47
|
-
if (typeof span.attributes["ai.usage.completionTokens"] === "number"
|
|
48
|
-
|
|
54
|
+
if (typeof span.attributes["ai.usage.completionTokens"] === "number" ||
|
|
55
|
+
typeof span.attributes["ai.usage.outputTokens"] === "number") {
|
|
56
|
+
usageMetadata.output_tokens =
|
|
57
|
+
span.attributes["ai.usage.completionTokens"] ??
|
|
58
|
+
span.attributes["ai.usage.outputTokens"];
|
|
49
59
|
}
|
|
50
60
|
if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
|
|
51
61
|
try {
|
|
52
62
|
const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
|
|
53
|
-
usageMetadata.input_token_details =
|
|
54
|
-
extractInputTokenDetails(providerMetadata);
|
|
63
|
+
usageMetadata.input_token_details = extractInputTokenDetails(providerMetadata, span.attributes);
|
|
55
64
|
if (providerMetadata.anthropic != null &&
|
|
56
65
|
typeof providerMetadata.anthropic === "object") {
|
|
57
66
|
// AI SDK does not include Anthropic cache tokens in their stated input token
|
package/dist/utils/vercel.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
function extractInputTokenDetails(providerMetadata) {
|
|
1
|
+
function extractInputTokenDetails(providerMetadata, spanAttributes) {
|
|
2
2
|
const inputTokenDetails = {};
|
|
3
3
|
if (providerMetadata.anthropic != null &&
|
|
4
4
|
typeof providerMetadata.anthropic === "object") {
|
|
@@ -21,6 +21,10 @@ function extractInputTokenDetails(providerMetadata) {
|
|
|
21
21
|
typeof openai.cachedPromptTokens === "number") {
|
|
22
22
|
inputTokenDetails.cache_read = openai.cachedPromptTokens;
|
|
23
23
|
}
|
|
24
|
+
else if (typeof spanAttributes?.["ai.usage.cachedInputTokens"] === "number") {
|
|
25
|
+
inputTokenDetails.cache_read =
|
|
26
|
+
spanAttributes["ai.usage.cachedInputTokens"];
|
|
27
|
+
}
|
|
24
28
|
}
|
|
25
29
|
return inputTokenDetails;
|
|
26
30
|
}
|
|
@@ -38,17 +42,22 @@ export function extractUsageMetadata(span) {
|
|
|
38
42
|
output_tokens: 0,
|
|
39
43
|
total_tokens: 0,
|
|
40
44
|
};
|
|
41
|
-
if (typeof span.attributes["ai.usage.promptTokens"] === "number"
|
|
42
|
-
|
|
45
|
+
if (typeof span.attributes["ai.usage.promptTokens"] === "number" ||
|
|
46
|
+
typeof span.attributes["ai.usage.inputTokens"] === "number") {
|
|
47
|
+
usageMetadata.input_tokens =
|
|
48
|
+
span.attributes["ai.usage.promptTokens"] ??
|
|
49
|
+
span.attributes["ai.usage.inputTokens"];
|
|
43
50
|
}
|
|
44
|
-
if (typeof span.attributes["ai.usage.completionTokens"] === "number"
|
|
45
|
-
|
|
51
|
+
if (typeof span.attributes["ai.usage.completionTokens"] === "number" ||
|
|
52
|
+
typeof span.attributes["ai.usage.outputTokens"] === "number") {
|
|
53
|
+
usageMetadata.output_tokens =
|
|
54
|
+
span.attributes["ai.usage.completionTokens"] ??
|
|
55
|
+
span.attributes["ai.usage.outputTokens"];
|
|
46
56
|
}
|
|
47
57
|
if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
|
|
48
58
|
try {
|
|
49
59
|
const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
|
|
50
|
-
usageMetadata.input_token_details =
|
|
51
|
-
extractInputTokenDetails(providerMetadata);
|
|
60
|
+
usageMetadata.input_token_details = extractInputTokenDetails(providerMetadata, span.attributes);
|
|
52
61
|
if (providerMetadata.anthropic != null &&
|
|
53
62
|
typeof providerMetadata.anthropic === "object") {
|
|
54
63
|
// AI SDK does not include Anthropic cache tokens in their stated input token
|