langsmith 0.3.65 → 0.3.67
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/client.cjs +4 -1
- package/dist/client.d.ts +2 -1
- package/dist/client.js +4 -1
- package/dist/experimental/vercel/middleware.cjs +2 -2
- package/dist/experimental/vercel/middleware.js +2 -2
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/run_trees.d.ts +1 -1
- package/dist/schemas.d.ts +2 -2
- package/dist/traceable.cjs +1 -20
- package/dist/traceable.js +1 -20
- package/dist/utils/jestlike/index.cjs +19 -4
- package/dist/utils/jestlike/index.d.ts +1 -1
- package/dist/utils/jestlike/index.js +19 -4
- package/dist/utils/jestlike/types.d.ts +4 -4
- package/dist/utils/vercel.cjs +54 -17
- package/dist/utils/vercel.d.ts +3 -2
- package/dist/utils/vercel.js +54 -17
- package/dist/vercel.cjs +1 -0
- package/dist/vercel.d.ts +3 -0
- package/dist/vercel.js +1 -0
- package/dist/wrappers/openai.cjs +21 -3
- package/dist/wrappers/openai.js +21 -3
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -55,6 +55,7 @@ process.env.LANGSMITH_ENDPOINT = "https://api.smith.langchain.com";
|
|
|
55
55
|
// process.env.LANGSMITH_ENDPOINT = "https://eu.api.smith.langchain.com"; // If signed up in the EU region
|
|
56
56
|
process.env.LANGSMITH_API_KEY = "<YOUR-LANGSMITH-API-KEY>";
|
|
57
57
|
// process.env.LANGSMITH_PROJECT = "My Project Name"; // Optional: "default" is used if not set
|
|
58
|
+
// process.env.LANGSMITH_WORKSPACE_ID = "<YOUR-WORKSPACE-ID>"; // Required for org-scoped API keys
|
|
58
59
|
```
|
|
59
60
|
|
|
60
61
|
> **Tip:** Projects are groups of traces. All runs are logged to a project. If not specified, the project is set to `default`.
|
package/dist/client.cjs
CHANGED
|
@@ -1987,7 +1987,7 @@ class Client {
|
|
|
1987
1987
|
}
|
|
1988
1988
|
throw new Error("No projects found to resolve tenant.");
|
|
1989
1989
|
}
|
|
1990
|
-
async *listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, datasetVersion, referenceFree, metadata, } = {}) {
|
|
1990
|
+
async *listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, includeStats, datasetVersion, referenceFree, metadata, } = {}) {
|
|
1991
1991
|
const params = new URLSearchParams();
|
|
1992
1992
|
if (projectIds !== undefined) {
|
|
1993
1993
|
for (const projectId of projectIds) {
|
|
@@ -2009,6 +2009,9 @@ class Client {
|
|
|
2009
2009
|
});
|
|
2010
2010
|
params.append("reference_dataset", dataset.id);
|
|
2011
2011
|
}
|
|
2012
|
+
if (includeStats !== undefined) {
|
|
2013
|
+
params.append("include_stats", includeStats.toString());
|
|
2014
|
+
}
|
|
2012
2015
|
if (datasetVersion !== undefined) {
|
|
2013
2016
|
params.append("dataset_version", datasetVersion);
|
|
2014
2017
|
}
|
package/dist/client.d.ts
CHANGED
|
@@ -556,12 +556,13 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
556
556
|
datasetName?: string;
|
|
557
557
|
}): Promise<string>;
|
|
558
558
|
private _getTenantId;
|
|
559
|
-
listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, datasetVersion, referenceFree, metadata, }?: {
|
|
559
|
+
listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, includeStats, datasetVersion, referenceFree, metadata, }?: {
|
|
560
560
|
projectIds?: string[];
|
|
561
561
|
name?: string;
|
|
562
562
|
nameContains?: string;
|
|
563
563
|
referenceDatasetId?: string;
|
|
564
564
|
referenceDatasetName?: string;
|
|
565
|
+
includeStats?: boolean;
|
|
565
566
|
datasetVersion?: string;
|
|
566
567
|
referenceFree?: boolean;
|
|
567
568
|
metadata?: RecordStringAny;
|
package/dist/client.js
CHANGED
|
@@ -1949,7 +1949,7 @@ export class Client {
|
|
|
1949
1949
|
}
|
|
1950
1950
|
throw new Error("No projects found to resolve tenant.");
|
|
1951
1951
|
}
|
|
1952
|
-
async *listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, datasetVersion, referenceFree, metadata, } = {}) {
|
|
1952
|
+
async *listProjects({ projectIds, name, nameContains, referenceDatasetId, referenceDatasetName, includeStats, datasetVersion, referenceFree, metadata, } = {}) {
|
|
1953
1953
|
const params = new URLSearchParams();
|
|
1954
1954
|
if (projectIds !== undefined) {
|
|
1955
1955
|
for (const projectId of projectIds) {
|
|
@@ -1971,6 +1971,9 @@ export class Client {
|
|
|
1971
1971
|
});
|
|
1972
1972
|
params.append("reference_dataset", dataset.id);
|
|
1973
1973
|
}
|
|
1974
|
+
if (includeStats !== undefined) {
|
|
1975
|
+
params.append("include_stats", includeStats.toString());
|
|
1976
|
+
}
|
|
1974
1977
|
if (datasetVersion !== undefined) {
|
|
1975
1978
|
params.append("dataset_version", datasetVersion);
|
|
1976
1979
|
}
|
|
@@ -31,8 +31,8 @@ const setUsageMetadataOnRunTree = (result, runTree) => {
|
|
|
31
31
|
output_tokens: result.usage?.outputTokens,
|
|
32
32
|
total_tokens: result.usage?.totalTokens,
|
|
33
33
|
};
|
|
34
|
-
const inputTokenDetails = (0, vercel_js_1.extractInputTokenDetails)(result.
|
|
35
|
-
const outputTokenDetails = (0, vercel_js_1.extractOutputTokenDetails)(result.usage
|
|
34
|
+
const inputTokenDetails = (0, vercel_js_1.extractInputTokenDetails)(result.usage, result.providerMetadata);
|
|
35
|
+
const outputTokenDetails = (0, vercel_js_1.extractOutputTokenDetails)(result.usage, result.providerMetadata);
|
|
36
36
|
runTree.extra = {
|
|
37
37
|
...runTree.extra,
|
|
38
38
|
metadata: {
|
|
@@ -28,8 +28,8 @@ const setUsageMetadataOnRunTree = (result, runTree) => {
|
|
|
28
28
|
output_tokens: result.usage?.outputTokens,
|
|
29
29
|
total_tokens: result.usage?.totalTokens,
|
|
30
30
|
};
|
|
31
|
-
const inputTokenDetails = extractInputTokenDetails(result.
|
|
32
|
-
const outputTokenDetails = extractOutputTokenDetails(result.usage
|
|
31
|
+
const inputTokenDetails = extractInputTokenDetails(result.usage, result.providerMetadata);
|
|
32
|
+
const outputTokenDetails = extractOutputTokenDetails(result.usage, result.providerMetadata);
|
|
33
33
|
runTree.extra = {
|
|
34
34
|
...runTree.extra,
|
|
35
35
|
metadata: {
|
package/dist/index.cjs
CHANGED
|
@@ -10,4 +10,4 @@ Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true
|
|
|
10
10
|
var project_js_1 = require("./utils/project.cjs");
|
|
11
11
|
Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get: function () { return project_js_1.getDefaultProjectName; } });
|
|
12
12
|
// Update using yarn bump-version
|
|
13
|
-
exports.__version__ = "0.3.
|
|
13
|
+
exports.__version__ = "0.3.67";
|
package/dist/index.d.ts
CHANGED
|
@@ -3,4 +3,4 @@ export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, }
|
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
|
-
export declare const __version__ = "0.3.
|
|
6
|
+
export declare const __version__ = "0.3.67";
|
package/dist/index.js
CHANGED
|
@@ -3,4 +3,4 @@ export { RunTree } from "./run_trees.js";
|
|
|
3
3
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
4
4
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
5
5
|
// Update using yarn bump-version
|
|
6
|
-
export const __version__ = "0.3.
|
|
6
|
+
export const __version__ = "0.3.67";
|
package/dist/run_trees.d.ts
CHANGED
|
@@ -121,7 +121,7 @@ export declare class RunTree implements BaseRun {
|
|
|
121
121
|
addEvent(event: RunEvent | string): void;
|
|
122
122
|
static fromRunnableConfig(parentConfig: RunnableConfigLike, props: RunTreeConfig): RunTree;
|
|
123
123
|
static fromDottedOrder(dottedOrder: string): RunTree | undefined;
|
|
124
|
-
static fromHeaders(headers: Record<string, string | string[]> | HeadersLike, inheritArgs?: RunTreeConfig): RunTree | undefined;
|
|
124
|
+
static fromHeaders(headers: Record<string, string | string[]> | HeadersLike, inheritArgs?: Partial<RunTreeConfig>): RunTree | undefined;
|
|
125
125
|
toHeaders(headers?: HeadersLike): {
|
|
126
126
|
"langsmith-trace": string;
|
|
127
127
|
baggage: string;
|
package/dist/schemas.d.ts
CHANGED
|
@@ -485,7 +485,7 @@ export type InputTokenDetails = {
|
|
|
485
485
|
* Since there was a cache miss, the cache was created from these tokens.
|
|
486
486
|
*/
|
|
487
487
|
cache_creation?: number;
|
|
488
|
-
}
|
|
488
|
+
} & Record<string, number>;
|
|
489
489
|
/**
|
|
490
490
|
* Breakdown of output token counts.
|
|
491
491
|
*
|
|
@@ -503,7 +503,7 @@ export type OutputTokenDetails = {
|
|
|
503
503
|
* OpenAI's o1 models) that are not returned as part of model output.
|
|
504
504
|
*/
|
|
505
505
|
reasoning?: number;
|
|
506
|
-
}
|
|
506
|
+
} & Record<string, number>;
|
|
507
507
|
/**
|
|
508
508
|
* Usage metadata for a message, such as token counts.
|
|
509
509
|
*/
|
package/dist/traceable.cjs
CHANGED
|
@@ -105,25 +105,6 @@ const _extractUsage = (runData) => {
|
|
|
105
105
|
.usage_metadata;
|
|
106
106
|
return runData.outputs?.usage_metadata ?? usageMetadataFromMetadata;
|
|
107
107
|
};
|
|
108
|
-
function validateExtractedUsageMetadata(data) {
|
|
109
|
-
const allowedKeys = new Set([
|
|
110
|
-
"input_tokens",
|
|
111
|
-
"output_tokens",
|
|
112
|
-
"total_tokens",
|
|
113
|
-
"input_token_details",
|
|
114
|
-
"output_token_details",
|
|
115
|
-
"input_cost",
|
|
116
|
-
"output_cost",
|
|
117
|
-
"total_cost",
|
|
118
|
-
"input_cost_details",
|
|
119
|
-
"output_cost_details",
|
|
120
|
-
]);
|
|
121
|
-
const extraKeys = Object.keys(data).filter((key) => !allowedKeys.has(key));
|
|
122
|
-
if (extraKeys.length > 0) {
|
|
123
|
-
throw new Error(`Unexpected keys in usage metadata: ${extraKeys.join(", ")}`);
|
|
124
|
-
}
|
|
125
|
-
return data;
|
|
126
|
-
}
|
|
127
108
|
async function handleEnd(params) {
|
|
128
109
|
const { runTree, on_end, postRunPromise, excludeInputs } = params;
|
|
129
110
|
const onEnd = on_end;
|
|
@@ -152,7 +133,7 @@ const _populateUsageMetadata = (processedOutputs, runTree) => {
|
|
|
152
133
|
if (usageMetadata !== undefined) {
|
|
153
134
|
runTree.extra.metadata = {
|
|
154
135
|
...runTree.extra.metadata,
|
|
155
|
-
usage_metadata:
|
|
136
|
+
usage_metadata: usageMetadata,
|
|
156
137
|
};
|
|
157
138
|
processedOutputs.usage_metadata = usageMetadata;
|
|
158
139
|
}
|
package/dist/traceable.js
CHANGED
|
@@ -101,25 +101,6 @@ const _extractUsage = (runData) => {
|
|
|
101
101
|
.usage_metadata;
|
|
102
102
|
return runData.outputs?.usage_metadata ?? usageMetadataFromMetadata;
|
|
103
103
|
};
|
|
104
|
-
function validateExtractedUsageMetadata(data) {
|
|
105
|
-
const allowedKeys = new Set([
|
|
106
|
-
"input_tokens",
|
|
107
|
-
"output_tokens",
|
|
108
|
-
"total_tokens",
|
|
109
|
-
"input_token_details",
|
|
110
|
-
"output_token_details",
|
|
111
|
-
"input_cost",
|
|
112
|
-
"output_cost",
|
|
113
|
-
"total_cost",
|
|
114
|
-
"input_cost_details",
|
|
115
|
-
"output_cost_details",
|
|
116
|
-
]);
|
|
117
|
-
const extraKeys = Object.keys(data).filter((key) => !allowedKeys.has(key));
|
|
118
|
-
if (extraKeys.length > 0) {
|
|
119
|
-
throw new Error(`Unexpected keys in usage metadata: ${extraKeys.join(", ")}`);
|
|
120
|
-
}
|
|
121
|
-
return data;
|
|
122
|
-
}
|
|
123
104
|
async function handleEnd(params) {
|
|
124
105
|
const { runTree, on_end, postRunPromise, excludeInputs } = params;
|
|
125
106
|
const onEnd = on_end;
|
|
@@ -148,7 +129,7 @@ const _populateUsageMetadata = (processedOutputs, runTree) => {
|
|
|
148
129
|
if (usageMetadata !== undefined) {
|
|
149
130
|
runTree.extra.metadata = {
|
|
150
131
|
...runTree.extra.metadata,
|
|
151
|
-
usage_metadata:
|
|
132
|
+
usage_metadata: usageMetadata,
|
|
152
133
|
};
|
|
153
134
|
processedOutputs.usage_metadata = usageMetadata;
|
|
154
135
|
}
|
|
@@ -161,17 +161,29 @@ function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
161
161
|
return (0, uuid_1.v5)(identifier, constants_js_1.UUID5_NAMESPACE);
|
|
162
162
|
}
|
|
163
163
|
async function syncExample(params) {
|
|
164
|
-
const { client, exampleId, inputs, outputs, metadata, createdAt, datasetId, } = params;
|
|
164
|
+
const { client, exampleId, inputs, outputs, metadata, split, createdAt, datasetId, } = params;
|
|
165
165
|
let example;
|
|
166
166
|
try {
|
|
167
167
|
example = await client.readExample(exampleId);
|
|
168
|
+
const normalizedSplit = split
|
|
169
|
+
? typeof split === "string"
|
|
170
|
+
? [split]
|
|
171
|
+
: split
|
|
172
|
+
: undefined;
|
|
173
|
+
const { dataset_split, ...restMetadata } = example.metadata ?? {};
|
|
168
174
|
if (_objectHash(example.inputs) !== _objectHash(inputs) ||
|
|
169
175
|
_objectHash(example.outputs ?? {}) !== _objectHash(outputs ?? {}) ||
|
|
170
|
-
example.dataset_id !== datasetId
|
|
176
|
+
example.dataset_id !== datasetId ||
|
|
177
|
+
(normalizedSplit !== undefined &&
|
|
178
|
+
_objectHash(dataset_split ?? []) !==
|
|
179
|
+
_objectHash(normalizedSplit ?? [])) ||
|
|
180
|
+
(metadata !== undefined &&
|
|
181
|
+
_objectHash(restMetadata ?? {}) !== _objectHash(metadata ?? {}))) {
|
|
171
182
|
await client.updateExample(exampleId, {
|
|
172
183
|
inputs,
|
|
173
184
|
outputs,
|
|
174
185
|
metadata,
|
|
186
|
+
split,
|
|
175
187
|
dataset_id: datasetId,
|
|
176
188
|
});
|
|
177
189
|
}
|
|
@@ -182,6 +194,7 @@ function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
182
194
|
exampleId,
|
|
183
195
|
datasetId,
|
|
184
196
|
createdAt: new Date(createdAt ?? new Date()),
|
|
197
|
+
split,
|
|
185
198
|
metadata,
|
|
186
199
|
});
|
|
187
200
|
}
|
|
@@ -398,7 +411,7 @@ function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
398
411
|
lsParams.config?.enableTestTracking !== undefined) {
|
|
399
412
|
context.enableTestTracking = lsParams.config.enableTestTracking;
|
|
400
413
|
}
|
|
401
|
-
const { id, config, inputs, ...rest } = lsParams;
|
|
414
|
+
const { id, config, inputs, split, metadata, ...rest } = lsParams;
|
|
402
415
|
let referenceOutputs = rest.referenceOutputs;
|
|
403
416
|
if (!referenceOutputs && "outputs" in rest) {
|
|
404
417
|
referenceOutputs = rest.outputs;
|
|
@@ -471,6 +484,7 @@ function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
471
484
|
datasetId: dataset?.id,
|
|
472
485
|
testTrackingEnabled: (0, globals_js_1.trackingEnabled)(testContext),
|
|
473
486
|
repetition: i,
|
|
487
|
+
split,
|
|
474
488
|
},
|
|
475
489
|
}));
|
|
476
490
|
(0, globals_js_1._logTestFeedback)({
|
|
@@ -536,7 +550,8 @@ function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
536
550
|
datasetId: dataset.id,
|
|
537
551
|
inputs,
|
|
538
552
|
outputs: referenceOutputs ?? {},
|
|
539
|
-
metadata
|
|
553
|
+
metadata,
|
|
554
|
+
split,
|
|
540
555
|
createdAt,
|
|
541
556
|
}));
|
|
542
557
|
}
|
|
@@ -6,7 +6,7 @@ export declare function logFeedback(feedback: SimpleEvaluationResult, config?: {
|
|
|
6
6
|
sourceRunId?: string;
|
|
7
7
|
}): void;
|
|
8
8
|
export declare function logOutputs(output: Record<string, unknown>): void;
|
|
9
|
-
export declare function _objectHash(obj: KVMap, depth?: number): string;
|
|
9
|
+
export declare function _objectHash(obj: KVMap | unknown[], depth?: number): string;
|
|
10
10
|
export declare function generateWrapperFromJestlikeMethods(methods: Record<string, any>, testRunnerName: string): {
|
|
11
11
|
test: (<I extends Record<string, any> = Record<string, any>, O extends Record<string, any> = Record<string, any>>(name: string, lsParams: LangSmithJestlikeWrapperParams<I, O>, testFn: LangSmithJestlikeTestFunction<I, O>, timeout?: number) => void) & {
|
|
12
12
|
only: (<I extends Record<string, any> = Record<string, any>, O extends Record<string, any> = Record<string, any>>(name: string, lsParams: LangSmithJestlikeWrapperParams<I, O>, testFn: LangSmithJestlikeTestFunction<I, O>, timeout?: number) => void) & {
|
|
@@ -114,17 +114,29 @@ export function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
114
114
|
return v5(identifier, UUID5_NAMESPACE);
|
|
115
115
|
}
|
|
116
116
|
async function syncExample(params) {
|
|
117
|
-
const { client, exampleId, inputs, outputs, metadata, createdAt, datasetId, } = params;
|
|
117
|
+
const { client, exampleId, inputs, outputs, metadata, split, createdAt, datasetId, } = params;
|
|
118
118
|
let example;
|
|
119
119
|
try {
|
|
120
120
|
example = await client.readExample(exampleId);
|
|
121
|
+
const normalizedSplit = split
|
|
122
|
+
? typeof split === "string"
|
|
123
|
+
? [split]
|
|
124
|
+
: split
|
|
125
|
+
: undefined;
|
|
126
|
+
const { dataset_split, ...restMetadata } = example.metadata ?? {};
|
|
121
127
|
if (_objectHash(example.inputs) !== _objectHash(inputs) ||
|
|
122
128
|
_objectHash(example.outputs ?? {}) !== _objectHash(outputs ?? {}) ||
|
|
123
|
-
example.dataset_id !== datasetId
|
|
129
|
+
example.dataset_id !== datasetId ||
|
|
130
|
+
(normalizedSplit !== undefined &&
|
|
131
|
+
_objectHash(dataset_split ?? []) !==
|
|
132
|
+
_objectHash(normalizedSplit ?? [])) ||
|
|
133
|
+
(metadata !== undefined &&
|
|
134
|
+
_objectHash(restMetadata ?? {}) !== _objectHash(metadata ?? {}))) {
|
|
124
135
|
await client.updateExample(exampleId, {
|
|
125
136
|
inputs,
|
|
126
137
|
outputs,
|
|
127
138
|
metadata,
|
|
139
|
+
split,
|
|
128
140
|
dataset_id: datasetId,
|
|
129
141
|
});
|
|
130
142
|
}
|
|
@@ -135,6 +147,7 @@ export function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
135
147
|
exampleId,
|
|
136
148
|
datasetId,
|
|
137
149
|
createdAt: new Date(createdAt ?? new Date()),
|
|
150
|
+
split,
|
|
138
151
|
metadata,
|
|
139
152
|
});
|
|
140
153
|
}
|
|
@@ -351,7 +364,7 @@ export function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
351
364
|
lsParams.config?.enableTestTracking !== undefined) {
|
|
352
365
|
context.enableTestTracking = lsParams.config.enableTestTracking;
|
|
353
366
|
}
|
|
354
|
-
const { id, config, inputs, ...rest } = lsParams;
|
|
367
|
+
const { id, config, inputs, split, metadata, ...rest } = lsParams;
|
|
355
368
|
let referenceOutputs = rest.referenceOutputs;
|
|
356
369
|
if (!referenceOutputs && "outputs" in rest) {
|
|
357
370
|
referenceOutputs = rest.outputs;
|
|
@@ -424,6 +437,7 @@ export function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
424
437
|
datasetId: dataset?.id,
|
|
425
438
|
testTrackingEnabled: trackingEnabled(testContext),
|
|
426
439
|
repetition: i,
|
|
440
|
+
split,
|
|
427
441
|
},
|
|
428
442
|
}));
|
|
429
443
|
_logTestFeedback({
|
|
@@ -489,7 +503,8 @@ export function generateWrapperFromJestlikeMethods(methods, testRunnerName) {
|
|
|
489
503
|
datasetId: dataset.id,
|
|
490
504
|
inputs,
|
|
491
505
|
outputs: referenceOutputs ?? {},
|
|
492
|
-
metadata
|
|
506
|
+
metadata,
|
|
507
|
+
split,
|
|
493
508
|
createdAt,
|
|
494
509
|
}));
|
|
495
510
|
}
|
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { CreateProjectParams } from "../../client.js";
|
|
2
|
-
import { EvaluationResult } from "../../evaluation/evaluator.js";
|
|
3
|
-
import { Client } from "../../index.js";
|
|
1
|
+
import type { CreateProjectParams, CreateExampleOptions, Client } from "../../client.js";
|
|
2
|
+
import type { EvaluationResult } from "../../evaluation/evaluator.js";
|
|
4
3
|
import type { RunTreeConfig } from "../../run_trees.js";
|
|
5
4
|
import type { SimpleEvaluator } from "./vendor/evaluatedBy.js";
|
|
6
5
|
export { type SimpleEvaluator };
|
|
@@ -15,7 +14,7 @@ export type LangSmithJestlikeWrapperParams<I, O> = {
|
|
|
15
14
|
inputs: I;
|
|
16
15
|
referenceOutputs?: O;
|
|
17
16
|
config?: LangSmithJestlikeWrapperConfig;
|
|
18
|
-
}
|
|
17
|
+
} & Pick<CreateExampleOptions, "split" | "metadata">;
|
|
19
18
|
export type LangSmithJestlikeDescribeWrapperConfig = {
|
|
20
19
|
client?: Client;
|
|
21
20
|
enableTestTracking?: boolean;
|
|
@@ -35,6 +34,7 @@ export type LangSmithJestlikeTestMetadata = {
|
|
|
35
34
|
datasetId?: string;
|
|
36
35
|
testTrackingEnabled: boolean;
|
|
37
36
|
repetition: number;
|
|
37
|
+
split?: string | string[];
|
|
38
38
|
};
|
|
39
39
|
export type LangSmithJestlikeTestFunction<I, O> = (data: {
|
|
40
40
|
inputs: I;
|
package/dist/utils/vercel.cjs
CHANGED
|
@@ -3,17 +3,41 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.extractOutputTokenDetails = extractOutputTokenDetails;
|
|
4
4
|
exports.extractInputTokenDetails = extractInputTokenDetails;
|
|
5
5
|
exports.extractUsageMetadata = extractUsageMetadata;
|
|
6
|
-
function
|
|
6
|
+
function extractTraceableServiceTier(providerMetadata) {
|
|
7
|
+
if (providerMetadata?.openai != null &&
|
|
8
|
+
typeof providerMetadata.openai === "object") {
|
|
9
|
+
const openai = providerMetadata.openai;
|
|
10
|
+
if (openai.serviceTier != null &&
|
|
11
|
+
typeof openai.serviceTier === "string" &&
|
|
12
|
+
["priority", "flex"].includes(openai.serviceTier)) {
|
|
13
|
+
return openai.serviceTier;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
return undefined;
|
|
17
|
+
}
|
|
18
|
+
function extractOutputTokenDetails(usage, providerMetadata) {
|
|
19
|
+
const openAIServiceTier = extractTraceableServiceTier(providerMetadata ?? {});
|
|
20
|
+
const outputTokenDetailsKeyPrefix = openAIServiceTier
|
|
21
|
+
? `${openAIServiceTier}_`
|
|
22
|
+
: "";
|
|
7
23
|
const outputTokenDetails = {};
|
|
8
|
-
if (typeof reasoningTokens === "number") {
|
|
9
|
-
outputTokenDetails
|
|
24
|
+
if (typeof usage?.reasoningTokens === "number") {
|
|
25
|
+
outputTokenDetails[`${outputTokenDetailsKeyPrefix}reasoning`] =
|
|
26
|
+
usage.reasoningTokens;
|
|
27
|
+
}
|
|
28
|
+
if (openAIServiceTier && typeof usage?.outputTokens === "number") {
|
|
29
|
+
// Avoid counting reasoning tokens towards the output token count
|
|
30
|
+
// since service tier tokens are already priced differently
|
|
31
|
+
outputTokenDetails[openAIServiceTier] =
|
|
32
|
+
usage.outputTokens -
|
|
33
|
+
(outputTokenDetails[`${outputTokenDetailsKeyPrefix}reasoning`] ?? 0);
|
|
10
34
|
}
|
|
11
35
|
return outputTokenDetails;
|
|
12
36
|
}
|
|
13
|
-
function extractInputTokenDetails(
|
|
37
|
+
function extractInputTokenDetails(usage, providerMetadata) {
|
|
14
38
|
const inputTokenDetails = {};
|
|
15
|
-
if (providerMetadata
|
|
16
|
-
typeof providerMetadata
|
|
39
|
+
if (providerMetadata?.anthropic != null &&
|
|
40
|
+
typeof providerMetadata?.anthropic === "object") {
|
|
17
41
|
const anthropic = providerMetadata.anthropic;
|
|
18
42
|
if (anthropic.usage != null && typeof anthropic.usage === "object") {
|
|
19
43
|
// Raw usage from Anthropic returned in AI SDK 5
|
|
@@ -54,15 +78,28 @@ function extractInputTokenDetails(providerMetadata, cachedTokenUsage) {
|
|
|
54
78
|
}
|
|
55
79
|
return inputTokenDetails;
|
|
56
80
|
}
|
|
57
|
-
else if (providerMetadata
|
|
58
|
-
typeof providerMetadata
|
|
59
|
-
const
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
81
|
+
else if (providerMetadata?.openai != null &&
|
|
82
|
+
typeof providerMetadata?.openai === "object") {
|
|
83
|
+
const openAIServiceTier = extractTraceableServiceTier(providerMetadata ?? {});
|
|
84
|
+
const outputTokenDetailsKeyPrefix = openAIServiceTier
|
|
85
|
+
? `${openAIServiceTier}_`
|
|
86
|
+
: "";
|
|
87
|
+
if (typeof usage?.cachedInputTokens === "number") {
|
|
88
|
+
inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] =
|
|
89
|
+
usage.cachedInputTokens;
|
|
90
|
+
}
|
|
91
|
+
else if ("cachedPromptTokens" in providerMetadata.openai &&
|
|
92
|
+
providerMetadata.openai.cachedPromptTokens != null &&
|
|
93
|
+
typeof providerMetadata.openai.cachedPromptTokens === "number") {
|
|
94
|
+
inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] =
|
|
95
|
+
providerMetadata.openai.cachedPromptTokens;
|
|
63
96
|
}
|
|
64
|
-
|
|
65
|
-
|
|
97
|
+
if (openAIServiceTier && typeof usage?.inputTokens === "number") {
|
|
98
|
+
// Avoid counting cached input tokens towards the input token count
|
|
99
|
+
// since service tier tokens are already priced differently
|
|
100
|
+
inputTokenDetails[openAIServiceTier] =
|
|
101
|
+
usage.inputTokens -
|
|
102
|
+
(inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] ?? 0);
|
|
66
103
|
}
|
|
67
104
|
}
|
|
68
105
|
return inputTokenDetails;
|
|
@@ -96,9 +133,9 @@ function extractUsageMetadata(span) {
|
|
|
96
133
|
if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
|
|
97
134
|
try {
|
|
98
135
|
const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
|
|
99
|
-
usageMetadata.input_token_details = extractInputTokenDetails(
|
|
100
|
-
? span.attributes["ai.usage.cachedInputTokens"]
|
|
101
|
-
: undefined);
|
|
136
|
+
usageMetadata.input_token_details = extractInputTokenDetails(typeof span.attributes["ai.usage.cachedInputTokens"] === "number"
|
|
137
|
+
? { cachedInputTokens: span.attributes["ai.usage.cachedInputTokens"] }
|
|
138
|
+
: undefined, providerMetadata);
|
|
102
139
|
if (providerMetadata.anthropic != null &&
|
|
103
140
|
typeof providerMetadata.anthropic === "object") {
|
|
104
141
|
// AI SDK does not include Anthropic cache tokens in their stated input token
|
package/dist/utils/vercel.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
import type { LanguageModelV2Usage } from "@ai-sdk/provider";
|
|
1
2
|
import { KVMap } from "../schemas.js";
|
|
2
|
-
export declare function extractOutputTokenDetails(
|
|
3
|
-
export declare function extractInputTokenDetails(providerMetadata
|
|
3
|
+
export declare function extractOutputTokenDetails(usage?: Partial<LanguageModelV2Usage>, providerMetadata?: Record<string, unknown>): Record<string, number>;
|
|
4
|
+
export declare function extractInputTokenDetails(usage?: Partial<LanguageModelV2Usage>, providerMetadata?: Record<string, unknown>): Record<string, number>;
|
|
4
5
|
export declare function extractUsageMetadata(span?: {
|
|
5
6
|
status?: {
|
|
6
7
|
code: number;
|
package/dist/utils/vercel.js
CHANGED
|
@@ -1,14 +1,38 @@
|
|
|
1
|
-
|
|
1
|
+
function extractTraceableServiceTier(providerMetadata) {
|
|
2
|
+
if (providerMetadata?.openai != null &&
|
|
3
|
+
typeof providerMetadata.openai === "object") {
|
|
4
|
+
const openai = providerMetadata.openai;
|
|
5
|
+
if (openai.serviceTier != null &&
|
|
6
|
+
typeof openai.serviceTier === "string" &&
|
|
7
|
+
["priority", "flex"].includes(openai.serviceTier)) {
|
|
8
|
+
return openai.serviceTier;
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
return undefined;
|
|
12
|
+
}
|
|
13
|
+
export function extractOutputTokenDetails(usage, providerMetadata) {
|
|
14
|
+
const openAIServiceTier = extractTraceableServiceTier(providerMetadata ?? {});
|
|
15
|
+
const outputTokenDetailsKeyPrefix = openAIServiceTier
|
|
16
|
+
? `${openAIServiceTier}_`
|
|
17
|
+
: "";
|
|
2
18
|
const outputTokenDetails = {};
|
|
3
|
-
if (typeof reasoningTokens === "number") {
|
|
4
|
-
outputTokenDetails
|
|
19
|
+
if (typeof usage?.reasoningTokens === "number") {
|
|
20
|
+
outputTokenDetails[`${outputTokenDetailsKeyPrefix}reasoning`] =
|
|
21
|
+
usage.reasoningTokens;
|
|
22
|
+
}
|
|
23
|
+
if (openAIServiceTier && typeof usage?.outputTokens === "number") {
|
|
24
|
+
// Avoid counting reasoning tokens towards the output token count
|
|
25
|
+
// since service tier tokens are already priced differently
|
|
26
|
+
outputTokenDetails[openAIServiceTier] =
|
|
27
|
+
usage.outputTokens -
|
|
28
|
+
(outputTokenDetails[`${outputTokenDetailsKeyPrefix}reasoning`] ?? 0);
|
|
5
29
|
}
|
|
6
30
|
return outputTokenDetails;
|
|
7
31
|
}
|
|
8
|
-
export function extractInputTokenDetails(
|
|
32
|
+
export function extractInputTokenDetails(usage, providerMetadata) {
|
|
9
33
|
const inputTokenDetails = {};
|
|
10
|
-
if (providerMetadata
|
|
11
|
-
typeof providerMetadata
|
|
34
|
+
if (providerMetadata?.anthropic != null &&
|
|
35
|
+
typeof providerMetadata?.anthropic === "object") {
|
|
12
36
|
const anthropic = providerMetadata.anthropic;
|
|
13
37
|
if (anthropic.usage != null && typeof anthropic.usage === "object") {
|
|
14
38
|
// Raw usage from Anthropic returned in AI SDK 5
|
|
@@ -49,15 +73,28 @@ export function extractInputTokenDetails(providerMetadata, cachedTokenUsage) {
|
|
|
49
73
|
}
|
|
50
74
|
return inputTokenDetails;
|
|
51
75
|
}
|
|
52
|
-
else if (providerMetadata
|
|
53
|
-
typeof providerMetadata
|
|
54
|
-
const
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
76
|
+
else if (providerMetadata?.openai != null &&
|
|
77
|
+
typeof providerMetadata?.openai === "object") {
|
|
78
|
+
const openAIServiceTier = extractTraceableServiceTier(providerMetadata ?? {});
|
|
79
|
+
const outputTokenDetailsKeyPrefix = openAIServiceTier
|
|
80
|
+
? `${openAIServiceTier}_`
|
|
81
|
+
: "";
|
|
82
|
+
if (typeof usage?.cachedInputTokens === "number") {
|
|
83
|
+
inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] =
|
|
84
|
+
usage.cachedInputTokens;
|
|
85
|
+
}
|
|
86
|
+
else if ("cachedPromptTokens" in providerMetadata.openai &&
|
|
87
|
+
providerMetadata.openai.cachedPromptTokens != null &&
|
|
88
|
+
typeof providerMetadata.openai.cachedPromptTokens === "number") {
|
|
89
|
+
inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] =
|
|
90
|
+
providerMetadata.openai.cachedPromptTokens;
|
|
58
91
|
}
|
|
59
|
-
|
|
60
|
-
|
|
92
|
+
if (openAIServiceTier && typeof usage?.inputTokens === "number") {
|
|
93
|
+
// Avoid counting cached input tokens towards the input token count
|
|
94
|
+
// since service tier tokens are already priced differently
|
|
95
|
+
inputTokenDetails[openAIServiceTier] =
|
|
96
|
+
usage.inputTokens -
|
|
97
|
+
(inputTokenDetails[`${outputTokenDetailsKeyPrefix}cache_read`] ?? 0);
|
|
61
98
|
}
|
|
62
99
|
}
|
|
63
100
|
return inputTokenDetails;
|
|
@@ -91,9 +128,9 @@ export function extractUsageMetadata(span) {
|
|
|
91
128
|
if (typeof span.attributes["ai.response.providerMetadata"] === "string") {
|
|
92
129
|
try {
|
|
93
130
|
const providerMetadata = JSON.parse(span.attributes["ai.response.providerMetadata"]);
|
|
94
|
-
usageMetadata.input_token_details = extractInputTokenDetails(
|
|
95
|
-
? span.attributes["ai.usage.cachedInputTokens"]
|
|
96
|
-
: undefined);
|
|
131
|
+
usageMetadata.input_token_details = extractInputTokenDetails(typeof span.attributes["ai.usage.cachedInputTokens"] === "number"
|
|
132
|
+
? { cachedInputTokens: span.attributes["ai.usage.cachedInputTokens"] }
|
|
133
|
+
: undefined, providerMetadata);
|
|
97
134
|
if (providerMetadata.anthropic != null &&
|
|
98
135
|
typeof providerMetadata.anthropic === "object") {
|
|
99
136
|
// AI SDK does not include Anthropic cache tokens in their stated input token
|
package/dist/vercel.cjs
CHANGED
|
@@ -265,6 +265,7 @@ function getParentSpanId(span) {
|
|
|
265
265
|
return (span.parentSpanId ?? span.parentSpanContext?.spanId ?? undefined);
|
|
266
266
|
}
|
|
267
267
|
/**
|
|
268
|
+
* @deprecated Use `wrapAISDK` from `langsmith/experimental/vercel` instead.
|
|
268
269
|
* OpenTelemetry trace exporter for Vercel AI SDK.
|
|
269
270
|
*
|
|
270
271
|
* @example
|
package/dist/vercel.d.ts
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import type { generateText } from "ai";
|
|
2
2
|
import { Client } from "./index.js";
|
|
3
|
+
/** @deprecated Use `wrapAISDK` from `langsmith/experimental/vercel` instead. */
|
|
3
4
|
export type AITelemetrySettings = Exclude<Parameters<typeof generateText>[0]["experimental_telemetry"], undefined>;
|
|
5
|
+
/** @deprecated Use `wrapAISDK` from `langsmith/experimental/vercel` instead. */
|
|
4
6
|
export interface TelemetrySettings extends AITelemetrySettings {
|
|
5
7
|
/** ID of the run sent to LangSmith */
|
|
6
8
|
runId?: string;
|
|
@@ -9,6 +11,7 @@ export interface TelemetrySettings extends AITelemetrySettings {
|
|
|
9
11
|
}
|
|
10
12
|
export declare const parseStrippedIsoTime: (stripped: string) => string;
|
|
11
13
|
/**
|
|
14
|
+
* @deprecated Use `wrapAISDK` from `langsmith/experimental/vercel` instead.
|
|
12
15
|
* OpenTelemetry trace exporter for Vercel AI SDK.
|
|
13
16
|
*
|
|
14
17
|
* @example
|
package/dist/vercel.js
CHANGED
|
@@ -261,6 +261,7 @@ function getParentSpanId(span) {
|
|
|
261
261
|
return (span.parentSpanId ?? span.parentSpanContext?.spanId ?? undefined);
|
|
262
262
|
}
|
|
263
263
|
/**
|
|
264
|
+
* @deprecated Use `wrapAISDK` from `langsmith/experimental/vercel` instead.
|
|
264
265
|
* OpenTelemetry trace exporter for Vercel AI SDK.
|
|
265
266
|
*
|
|
266
267
|
* @example
|
package/dist/wrappers/openai.cjs
CHANGED
|
@@ -72,7 +72,8 @@ function _combineChatCompletionChoices(choices
|
|
|
72
72
|
}
|
|
73
73
|
return {
|
|
74
74
|
index: choices[0].index,
|
|
75
|
-
finish_reason: reversedChoices.find((c) => c.finish_reason) || null
|
|
75
|
+
finish_reason: (reversedChoices.find((c) => c.finish_reason) || null)
|
|
76
|
+
?.finish_reason,
|
|
76
77
|
message: message,
|
|
77
78
|
};
|
|
78
79
|
}
|
|
@@ -128,6 +129,12 @@ const textAggregator = (allChunks
|
|
|
128
129
|
};
|
|
129
130
|
function processChatCompletion(outputs) {
|
|
130
131
|
const chatCompletion = outputs;
|
|
132
|
+
const recognizedServiceTier = ["priority", "flex"].includes(chatCompletion.service_tier ?? "")
|
|
133
|
+
? chatCompletion.service_tier
|
|
134
|
+
: undefined;
|
|
135
|
+
const serviceTierPrefix = recognizedServiceTier
|
|
136
|
+
? `${recognizedServiceTier}_`
|
|
137
|
+
: "";
|
|
131
138
|
// copy the original object, minus usage
|
|
132
139
|
const result = { ...chatCompletion };
|
|
133
140
|
const usage = chatCompletion.usage;
|
|
@@ -137,7 +144,7 @@ function processChatCompletion(outputs) {
|
|
|
137
144
|
audio: usage.prompt_tokens_details?.audio_tokens,
|
|
138
145
|
}),
|
|
139
146
|
...(usage.prompt_tokens_details?.cached_tokens !== null && {
|
|
140
|
-
cache_read: usage.prompt_tokens_details?.cached_tokens,
|
|
147
|
+
[`${serviceTierPrefix}cache_read`]: usage.prompt_tokens_details?.cached_tokens,
|
|
141
148
|
}),
|
|
142
149
|
};
|
|
143
150
|
const outputTokenDetails = {
|
|
@@ -145,9 +152,20 @@ function processChatCompletion(outputs) {
|
|
|
145
152
|
audio: usage.completion_tokens_details?.audio_tokens,
|
|
146
153
|
}),
|
|
147
154
|
...(usage.completion_tokens_details?.reasoning_tokens !== null && {
|
|
148
|
-
reasoning: usage.completion_tokens_details?.reasoning_tokens,
|
|
155
|
+
[`${serviceTierPrefix}reasoning`]: usage.completion_tokens_details?.reasoning_tokens,
|
|
149
156
|
}),
|
|
150
157
|
};
|
|
158
|
+
if (recognizedServiceTier) {
|
|
159
|
+
// Avoid counting cache read and reasoning tokens towards the
|
|
160
|
+
// service tier token count since service tier tokens are already
|
|
161
|
+
// priced differently
|
|
162
|
+
inputTokenDetails[recognizedServiceTier] =
|
|
163
|
+
usage.prompt_tokens -
|
|
164
|
+
(inputTokenDetails[`${serviceTierPrefix}cache_read`] ?? 0);
|
|
165
|
+
outputTokenDetails[recognizedServiceTier] =
|
|
166
|
+
usage.completion_tokens -
|
|
167
|
+
(outputTokenDetails[`${serviceTierPrefix}reasoning`] ?? 0);
|
|
168
|
+
}
|
|
151
169
|
result.usage_metadata = {
|
|
152
170
|
input_tokens: usage.prompt_tokens ?? 0,
|
|
153
171
|
output_tokens: usage.completion_tokens ?? 0,
|
package/dist/wrappers/openai.js
CHANGED
|
@@ -69,7 +69,8 @@ function _combineChatCompletionChoices(choices
|
|
|
69
69
|
}
|
|
70
70
|
return {
|
|
71
71
|
index: choices[0].index,
|
|
72
|
-
finish_reason: reversedChoices.find((c) => c.finish_reason) || null
|
|
72
|
+
finish_reason: (reversedChoices.find((c) => c.finish_reason) || null)
|
|
73
|
+
?.finish_reason,
|
|
73
74
|
message: message,
|
|
74
75
|
};
|
|
75
76
|
}
|
|
@@ -125,6 +126,12 @@ const textAggregator = (allChunks
|
|
|
125
126
|
};
|
|
126
127
|
function processChatCompletion(outputs) {
|
|
127
128
|
const chatCompletion = outputs;
|
|
129
|
+
const recognizedServiceTier = ["priority", "flex"].includes(chatCompletion.service_tier ?? "")
|
|
130
|
+
? chatCompletion.service_tier
|
|
131
|
+
: undefined;
|
|
132
|
+
const serviceTierPrefix = recognizedServiceTier
|
|
133
|
+
? `${recognizedServiceTier}_`
|
|
134
|
+
: "";
|
|
128
135
|
// copy the original object, minus usage
|
|
129
136
|
const result = { ...chatCompletion };
|
|
130
137
|
const usage = chatCompletion.usage;
|
|
@@ -134,7 +141,7 @@ function processChatCompletion(outputs) {
|
|
|
134
141
|
audio: usage.prompt_tokens_details?.audio_tokens,
|
|
135
142
|
}),
|
|
136
143
|
...(usage.prompt_tokens_details?.cached_tokens !== null && {
|
|
137
|
-
cache_read: usage.prompt_tokens_details?.cached_tokens,
|
|
144
|
+
[`${serviceTierPrefix}cache_read`]: usage.prompt_tokens_details?.cached_tokens,
|
|
138
145
|
}),
|
|
139
146
|
};
|
|
140
147
|
const outputTokenDetails = {
|
|
@@ -142,9 +149,20 @@ function processChatCompletion(outputs) {
|
|
|
142
149
|
audio: usage.completion_tokens_details?.audio_tokens,
|
|
143
150
|
}),
|
|
144
151
|
...(usage.completion_tokens_details?.reasoning_tokens !== null && {
|
|
145
|
-
reasoning: usage.completion_tokens_details?.reasoning_tokens,
|
|
152
|
+
[`${serviceTierPrefix}reasoning`]: usage.completion_tokens_details?.reasoning_tokens,
|
|
146
153
|
}),
|
|
147
154
|
};
|
|
155
|
+
if (recognizedServiceTier) {
|
|
156
|
+
// Avoid counting cache read and reasoning tokens towards the
|
|
157
|
+
// service tier token count since service tier tokens are already
|
|
158
|
+
// priced differently
|
|
159
|
+
inputTokenDetails[recognizedServiceTier] =
|
|
160
|
+
usage.prompt_tokens -
|
|
161
|
+
(inputTokenDetails[`${serviceTierPrefix}cache_read`] ?? 0);
|
|
162
|
+
outputTokenDetails[recognizedServiceTier] =
|
|
163
|
+
usage.completion_tokens -
|
|
164
|
+
(outputTokenDetails[`${serviceTierPrefix}reasoning`] ?? 0);
|
|
165
|
+
}
|
|
148
166
|
result.usage_metadata = {
|
|
149
167
|
input_tokens: usage.prompt_tokens ?? 0,
|
|
150
168
|
output_tokens: usage.completion_tokens ?? 0,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langsmith",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.67",
|
|
4
4
|
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
|
|
5
5
|
"packageManager": "yarn@1.22.19",
|
|
6
6
|
"files": [
|
|
@@ -149,7 +149,7 @@
|
|
|
149
149
|
},
|
|
150
150
|
"devDependencies": {
|
|
151
151
|
"@ai-sdk/anthropic": "^2.0.1",
|
|
152
|
-
"@ai-sdk/openai": "^2.0.
|
|
152
|
+
"@ai-sdk/openai": "^2.0.23",
|
|
153
153
|
"@babel/preset-env": "^7.22.4",
|
|
154
154
|
"@faker-js/faker": "^8.4.1",
|
|
155
155
|
"@jest/globals": "^29.5.0",
|