langsmith 0.1.62 → 0.1.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/singletons/traceable.cjs +2 -1
- package/dist/singletons/traceable.js +2 -1
- package/dist/traceable.cjs +1 -1
- package/dist/traceable.js +1 -1
- package/dist/wrappers/openai.cjs +96 -46
- package/dist/wrappers/openai.d.ts +7 -0
- package/dist/wrappers/openai.js +96 -46
- package/package.json +2 -2
package/dist/index.cjs
CHANGED
|
@@ -8,4 +8,4 @@ Object.defineProperty(exports, "RunTree", { enumerable: true, get: function () {
|
|
|
8
8
|
var fetch_js_1 = require("./singletons/fetch.cjs");
|
|
9
9
|
Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true, get: function () { return fetch_js_1.overrideFetchImplementation; } });
|
|
10
10
|
// Update using yarn bump-version
|
|
11
|
-
exports.__version__ = "0.1.
|
|
11
|
+
exports.__version__ = "0.1.64";
|
package/dist/index.d.ts
CHANGED
|
@@ -2,4 +2,4 @@ export { Client, type ClientConfig } from "./client.js";
|
|
|
2
2
|
export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, } from "./schemas.js";
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
|
-
export declare const __version__ = "0.1.
|
|
5
|
+
export declare const __version__ = "0.1.64";
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.isTraceableFunction = exports.ROOT = exports.withRunTree = exports.getCurrentRunTree = exports.AsyncLocalStorageProviderSingleton = void 0;
|
|
4
|
+
const run_trees_js_1 = require("../run_trees.cjs");
|
|
4
5
|
class MockAsyncLocalStorage {
|
|
5
6
|
getStore() {
|
|
6
7
|
return undefined;
|
|
@@ -33,7 +34,7 @@ exports.AsyncLocalStorageProviderSingleton = new AsyncLocalStorageProvider();
|
|
|
33
34
|
*/
|
|
34
35
|
const getCurrentRunTree = () => {
|
|
35
36
|
const runTree = exports.AsyncLocalStorageProviderSingleton.getInstance().getStore();
|
|
36
|
-
if (
|
|
37
|
+
if (!(0, run_trees_js_1.isRunTree)(runTree)) {
|
|
37
38
|
throw new Error([
|
|
38
39
|
"Could not get the current run tree.",
|
|
39
40
|
"",
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { isRunTree } from "../run_trees.js";
|
|
1
2
|
class MockAsyncLocalStorage {
|
|
2
3
|
getStore() {
|
|
3
4
|
return undefined;
|
|
@@ -30,7 +31,7 @@ export const AsyncLocalStorageProviderSingleton = new AsyncLocalStorageProvider(
|
|
|
30
31
|
*/
|
|
31
32
|
export const getCurrentRunTree = () => {
|
|
32
33
|
const runTree = AsyncLocalStorageProviderSingleton.getInstance().getStore();
|
|
33
|
-
if (runTree
|
|
34
|
+
if (!isRunTree(runTree)) {
|
|
34
35
|
throw new Error([
|
|
35
36
|
"Could not get the current run tree.",
|
|
36
37
|
"",
|
package/dist/traceable.cjs
CHANGED
|
@@ -272,7 +272,7 @@ function traceable(wrappedFunc, config) {
|
|
|
272
272
|
// Node.JS uses AsyncLocalStorage (ALS) and AsyncResource
|
|
273
273
|
// to allow storing context
|
|
274
274
|
const prevRunFromStore = asyncLocalStorage.getStore();
|
|
275
|
-
if (prevRunFromStore) {
|
|
275
|
+
if ((0, run_trees_js_1.isRunTree)(prevRunFromStore)) {
|
|
276
276
|
return [
|
|
277
277
|
getTracingRunTree(prevRunFromStore.createChild(ensuredConfig), processedArgs, config?.getInvocationParams),
|
|
278
278
|
processedArgs,
|
package/dist/traceable.js
CHANGED
|
@@ -269,7 +269,7 @@ export function traceable(wrappedFunc, config) {
|
|
|
269
269
|
// Node.JS uses AsyncLocalStorage (ALS) and AsyncResource
|
|
270
270
|
// to allow storing context
|
|
271
271
|
const prevRunFromStore = asyncLocalStorage.getStore();
|
|
272
|
-
if (prevRunFromStore) {
|
|
272
|
+
if (isRunTree(prevRunFromStore)) {
|
|
273
273
|
return [
|
|
274
274
|
getTracingRunTree(prevRunFromStore.createChild(ensuredConfig), processedArgs, config?.getInvocationParams),
|
|
275
275
|
processedArgs,
|
package/dist/wrappers/openai.cjs
CHANGED
|
@@ -142,52 +142,102 @@ const wrapOpenAI = (openai, options) => {
|
|
|
142
142
|
(0, traceable_js_1.isTraceableFunction)(openai.completions.create)) {
|
|
143
143
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
144
144
|
}
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
145
|
+
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
146
|
+
// OpenAI methods.
|
|
147
|
+
const tracedOpenAIClient = { ...openai };
|
|
148
|
+
if (openai.beta &&
|
|
149
|
+
openai.beta.chat &&
|
|
150
|
+
openai.beta.chat.completions &&
|
|
151
|
+
typeof openai.beta.chat.completions.parse === "function") {
|
|
152
|
+
tracedOpenAIClient.beta = {
|
|
153
|
+
...openai.beta,
|
|
154
|
+
chat: {
|
|
155
|
+
...openai.beta.chat,
|
|
156
|
+
completions: {
|
|
157
|
+
...openai.beta.chat.completions,
|
|
158
|
+
parse: (0, traceable_js_1.traceable)(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
|
|
159
|
+
name: "ChatOpenAI",
|
|
160
|
+
run_type: "llm",
|
|
161
|
+
aggregator: chatAggregator,
|
|
162
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
163
|
+
getInvocationParams: (payload) => {
|
|
164
|
+
if (typeof payload !== "object" || payload == null)
|
|
165
|
+
return undefined;
|
|
166
|
+
// we can safely do so, as the types are not exported in TSC
|
|
167
|
+
const params = payload;
|
|
168
|
+
const ls_stop = (typeof params.stop === "string"
|
|
169
|
+
? [params.stop]
|
|
170
|
+
: params.stop) ?? undefined;
|
|
171
|
+
return {
|
|
172
|
+
ls_provider: "openai",
|
|
173
|
+
ls_model_type: "chat",
|
|
174
|
+
ls_model_name: params.model,
|
|
175
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
176
|
+
ls_temperature: params.temperature ?? undefined,
|
|
177
|
+
ls_stop,
|
|
178
|
+
};
|
|
179
|
+
},
|
|
180
|
+
...options,
|
|
181
|
+
}),
|
|
182
|
+
},
|
|
183
|
+
},
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
tracedOpenAIClient.chat = {
|
|
187
|
+
...openai.chat,
|
|
188
|
+
completions: {
|
|
189
|
+
...openai.chat.completions,
|
|
190
|
+
create: (0, traceable_js_1.traceable)(openai.chat.completions.create.bind(openai.chat.completions), {
|
|
191
|
+
name: "ChatOpenAI",
|
|
192
|
+
run_type: "llm",
|
|
193
|
+
aggregator: chatAggregator,
|
|
194
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
195
|
+
getInvocationParams: (payload) => {
|
|
196
|
+
if (typeof payload !== "object" || payload == null)
|
|
197
|
+
return undefined;
|
|
198
|
+
// we can safely do so, as the types are not exported in TSC
|
|
199
|
+
const params = payload;
|
|
200
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
201
|
+
undefined;
|
|
202
|
+
return {
|
|
203
|
+
ls_provider: "openai",
|
|
204
|
+
ls_model_type: "chat",
|
|
205
|
+
ls_model_name: params.model,
|
|
206
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
207
|
+
ls_temperature: params.temperature ?? undefined,
|
|
208
|
+
ls_stop,
|
|
209
|
+
};
|
|
210
|
+
},
|
|
211
|
+
...options,
|
|
212
|
+
}),
|
|
188
213
|
},
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
214
|
+
};
|
|
215
|
+
tracedOpenAIClient.completions = {
|
|
216
|
+
...openai.completions,
|
|
217
|
+
create: (0, traceable_js_1.traceable)(openai.completions.create.bind(openai.completions), {
|
|
218
|
+
name: "OpenAI",
|
|
219
|
+
run_type: "llm",
|
|
220
|
+
aggregator: textAggregator,
|
|
221
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
222
|
+
getInvocationParams: (payload) => {
|
|
223
|
+
if (typeof payload !== "object" || payload == null)
|
|
224
|
+
return undefined;
|
|
225
|
+
// we can safely do so, as the types are not exported in TSC
|
|
226
|
+
const params = payload;
|
|
227
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
228
|
+
undefined;
|
|
229
|
+
return {
|
|
230
|
+
ls_provider: "openai",
|
|
231
|
+
ls_model_type: "llm",
|
|
232
|
+
ls_model_name: params.model,
|
|
233
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
234
|
+
ls_temperature: params.temperature ?? undefined,
|
|
235
|
+
ls_stop,
|
|
236
|
+
};
|
|
237
|
+
},
|
|
238
|
+
...options,
|
|
239
|
+
}),
|
|
240
|
+
};
|
|
241
|
+
return tracedOpenAIClient;
|
|
192
242
|
};
|
|
193
243
|
exports.wrapOpenAI = wrapOpenAI;
|
|
@@ -2,6 +2,13 @@ import { OpenAI } from "openai";
|
|
|
2
2
|
import type { APIPromise } from "openai/core";
|
|
3
3
|
import type { RunTreeConfig } from "../index.js";
|
|
4
4
|
type OpenAIType = {
|
|
5
|
+
beta?: {
|
|
6
|
+
chat?: {
|
|
7
|
+
completions?: {
|
|
8
|
+
parse?: (...args: any[]) => any;
|
|
9
|
+
};
|
|
10
|
+
};
|
|
11
|
+
};
|
|
5
12
|
chat: {
|
|
6
13
|
completions: {
|
|
7
14
|
create: (...args: any[]) => any;
|
package/dist/wrappers/openai.js
CHANGED
|
@@ -139,51 +139,101 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
139
139
|
isTraceableFunction(openai.completions.create)) {
|
|
140
140
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
141
141
|
}
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
142
|
+
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
143
|
+
// OpenAI methods.
|
|
144
|
+
const tracedOpenAIClient = { ...openai };
|
|
145
|
+
if (openai.beta &&
|
|
146
|
+
openai.beta.chat &&
|
|
147
|
+
openai.beta.chat.completions &&
|
|
148
|
+
typeof openai.beta.chat.completions.parse === "function") {
|
|
149
|
+
tracedOpenAIClient.beta = {
|
|
150
|
+
...openai.beta,
|
|
151
|
+
chat: {
|
|
152
|
+
...openai.beta.chat,
|
|
153
|
+
completions: {
|
|
154
|
+
...openai.beta.chat.completions,
|
|
155
|
+
parse: traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
|
|
156
|
+
name: "ChatOpenAI",
|
|
157
|
+
run_type: "llm",
|
|
158
|
+
aggregator: chatAggregator,
|
|
159
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
160
|
+
getInvocationParams: (payload) => {
|
|
161
|
+
if (typeof payload !== "object" || payload == null)
|
|
162
|
+
return undefined;
|
|
163
|
+
// we can safely do so, as the types are not exported in TSC
|
|
164
|
+
const params = payload;
|
|
165
|
+
const ls_stop = (typeof params.stop === "string"
|
|
166
|
+
? [params.stop]
|
|
167
|
+
: params.stop) ?? undefined;
|
|
168
|
+
return {
|
|
169
|
+
ls_provider: "openai",
|
|
170
|
+
ls_model_type: "chat",
|
|
171
|
+
ls_model_name: params.model,
|
|
172
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
173
|
+
ls_temperature: params.temperature ?? undefined,
|
|
174
|
+
ls_stop,
|
|
175
|
+
};
|
|
176
|
+
},
|
|
177
|
+
...options,
|
|
178
|
+
}),
|
|
179
|
+
},
|
|
180
|
+
},
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
tracedOpenAIClient.chat = {
|
|
184
|
+
...openai.chat,
|
|
185
|
+
completions: {
|
|
186
|
+
...openai.chat.completions,
|
|
187
|
+
create: traceable(openai.chat.completions.create.bind(openai.chat.completions), {
|
|
188
|
+
name: "ChatOpenAI",
|
|
189
|
+
run_type: "llm",
|
|
190
|
+
aggregator: chatAggregator,
|
|
191
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
192
|
+
getInvocationParams: (payload) => {
|
|
193
|
+
if (typeof payload !== "object" || payload == null)
|
|
194
|
+
return undefined;
|
|
195
|
+
// we can safely do so, as the types are not exported in TSC
|
|
196
|
+
const params = payload;
|
|
197
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
198
|
+
undefined;
|
|
199
|
+
return {
|
|
200
|
+
ls_provider: "openai",
|
|
201
|
+
ls_model_type: "chat",
|
|
202
|
+
ls_model_name: params.model,
|
|
203
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
204
|
+
ls_temperature: params.temperature ?? undefined,
|
|
205
|
+
ls_stop,
|
|
206
|
+
};
|
|
207
|
+
},
|
|
208
|
+
...options,
|
|
209
|
+
}),
|
|
185
210
|
},
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
211
|
+
};
|
|
212
|
+
tracedOpenAIClient.completions = {
|
|
213
|
+
...openai.completions,
|
|
214
|
+
create: traceable(openai.completions.create.bind(openai.completions), {
|
|
215
|
+
name: "OpenAI",
|
|
216
|
+
run_type: "llm",
|
|
217
|
+
aggregator: textAggregator,
|
|
218
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
219
|
+
getInvocationParams: (payload) => {
|
|
220
|
+
if (typeof payload !== "object" || payload == null)
|
|
221
|
+
return undefined;
|
|
222
|
+
// we can safely do so, as the types are not exported in TSC
|
|
223
|
+
const params = payload;
|
|
224
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
225
|
+
undefined;
|
|
226
|
+
return {
|
|
227
|
+
ls_provider: "openai",
|
|
228
|
+
ls_model_type: "llm",
|
|
229
|
+
ls_model_name: params.model,
|
|
230
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
231
|
+
ls_temperature: params.temperature ?? undefined,
|
|
232
|
+
ls_stop,
|
|
233
|
+
};
|
|
234
|
+
},
|
|
235
|
+
...options,
|
|
236
|
+
}),
|
|
237
|
+
};
|
|
238
|
+
return tracedOpenAIClient;
|
|
189
239
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langsmith",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.64",
|
|
4
4
|
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
|
|
5
5
|
"packageManager": "yarn@1.22.19",
|
|
6
6
|
"files": [
|
|
@@ -127,7 +127,7 @@
|
|
|
127
127
|
"eslint-plugin-prettier": "^4.2.1",
|
|
128
128
|
"jest": "^29.5.0",
|
|
129
129
|
"langchain": "^0.3.2",
|
|
130
|
-
"openai": "^4.
|
|
130
|
+
"openai": "^4.67.3",
|
|
131
131
|
"prettier": "^2.8.8",
|
|
132
132
|
"ts-jest": "^29.1.0",
|
|
133
133
|
"ts-node": "^10.9.1",
|