langsmith 0.1.62 → 0.1.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/wrappers/openai.cjs +96 -46
- package/dist/wrappers/openai.d.ts +7 -0
- package/dist/wrappers/openai.js +96 -46
- package/package.json +2 -2
package/dist/index.cjs
CHANGED
|
@@ -8,4 +8,4 @@ Object.defineProperty(exports, "RunTree", { enumerable: true, get: function () {
|
|
|
8
8
|
var fetch_js_1 = require("./singletons/fetch.cjs");
|
|
9
9
|
Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true, get: function () { return fetch_js_1.overrideFetchImplementation; } });
|
|
10
10
|
// Update using yarn bump-version
|
|
11
|
-
exports.__version__ = "0.1.
|
|
11
|
+
exports.__version__ = "0.1.63";
|
package/dist/index.d.ts
CHANGED
|
@@ -2,4 +2,4 @@ export { Client, type ClientConfig } from "./client.js";
|
|
|
2
2
|
export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, } from "./schemas.js";
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
|
-
export declare const __version__ = "0.1.
|
|
5
|
+
export declare const __version__ = "0.1.63";
|
package/dist/index.js
CHANGED
package/dist/wrappers/openai.cjs
CHANGED
|
@@ -142,52 +142,102 @@ const wrapOpenAI = (openai, options) => {
|
|
|
142
142
|
(0, traceable_js_1.isTraceableFunction)(openai.completions.create)) {
|
|
143
143
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
144
144
|
}
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
145
|
+
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
146
|
+
// OpenAI methods.
|
|
147
|
+
const tracedOpenAIClient = { ...openai };
|
|
148
|
+
if (openai.beta &&
|
|
149
|
+
openai.beta.chat &&
|
|
150
|
+
openai.beta.chat.completions &&
|
|
151
|
+
typeof openai.beta.chat.completions.parse === "function") {
|
|
152
|
+
tracedOpenAIClient.beta = {
|
|
153
|
+
...openai.beta,
|
|
154
|
+
chat: {
|
|
155
|
+
...openai.beta.chat,
|
|
156
|
+
completions: {
|
|
157
|
+
...openai.beta.chat.completions,
|
|
158
|
+
parse: (0, traceable_js_1.traceable)(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
|
|
159
|
+
name: "ChatOpenAI",
|
|
160
|
+
run_type: "llm",
|
|
161
|
+
aggregator: chatAggregator,
|
|
162
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
163
|
+
getInvocationParams: (payload) => {
|
|
164
|
+
if (typeof payload !== "object" || payload == null)
|
|
165
|
+
return undefined;
|
|
166
|
+
// we can safely do so, as the types are not exported in TSC
|
|
167
|
+
const params = payload;
|
|
168
|
+
const ls_stop = (typeof params.stop === "string"
|
|
169
|
+
? [params.stop]
|
|
170
|
+
: params.stop) ?? undefined;
|
|
171
|
+
return {
|
|
172
|
+
ls_provider: "openai",
|
|
173
|
+
ls_model_type: "chat",
|
|
174
|
+
ls_model_name: params.model,
|
|
175
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
176
|
+
ls_temperature: params.temperature ?? undefined,
|
|
177
|
+
ls_stop,
|
|
178
|
+
};
|
|
179
|
+
},
|
|
180
|
+
...options,
|
|
181
|
+
}),
|
|
182
|
+
},
|
|
183
|
+
},
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
tracedOpenAIClient.chat = {
|
|
187
|
+
...openai.chat,
|
|
188
|
+
completions: {
|
|
189
|
+
...openai.chat.completions,
|
|
190
|
+
create: (0, traceable_js_1.traceable)(openai.chat.completions.create.bind(openai.chat.completions), {
|
|
191
|
+
name: "ChatOpenAI",
|
|
192
|
+
run_type: "llm",
|
|
193
|
+
aggregator: chatAggregator,
|
|
194
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
195
|
+
getInvocationParams: (payload) => {
|
|
196
|
+
if (typeof payload !== "object" || payload == null)
|
|
197
|
+
return undefined;
|
|
198
|
+
// we can safely do so, as the types are not exported in TSC
|
|
199
|
+
const params = payload;
|
|
200
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
201
|
+
undefined;
|
|
202
|
+
return {
|
|
203
|
+
ls_provider: "openai",
|
|
204
|
+
ls_model_type: "chat",
|
|
205
|
+
ls_model_name: params.model,
|
|
206
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
207
|
+
ls_temperature: params.temperature ?? undefined,
|
|
208
|
+
ls_stop,
|
|
209
|
+
};
|
|
210
|
+
},
|
|
211
|
+
...options,
|
|
212
|
+
}),
|
|
188
213
|
},
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
214
|
+
};
|
|
215
|
+
tracedOpenAIClient.completions = {
|
|
216
|
+
...openai.completions,
|
|
217
|
+
create: (0, traceable_js_1.traceable)(openai.completions.create.bind(openai.completions), {
|
|
218
|
+
name: "OpenAI",
|
|
219
|
+
run_type: "llm",
|
|
220
|
+
aggregator: textAggregator,
|
|
221
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
222
|
+
getInvocationParams: (payload) => {
|
|
223
|
+
if (typeof payload !== "object" || payload == null)
|
|
224
|
+
return undefined;
|
|
225
|
+
// we can safely do so, as the types are not exported in TSC
|
|
226
|
+
const params = payload;
|
|
227
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
228
|
+
undefined;
|
|
229
|
+
return {
|
|
230
|
+
ls_provider: "openai",
|
|
231
|
+
ls_model_type: "llm",
|
|
232
|
+
ls_model_name: params.model,
|
|
233
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
234
|
+
ls_temperature: params.temperature ?? undefined,
|
|
235
|
+
ls_stop,
|
|
236
|
+
};
|
|
237
|
+
},
|
|
238
|
+
...options,
|
|
239
|
+
}),
|
|
240
|
+
};
|
|
241
|
+
return tracedOpenAIClient;
|
|
192
242
|
};
|
|
193
243
|
exports.wrapOpenAI = wrapOpenAI;
|
|
@@ -2,6 +2,13 @@ import { OpenAI } from "openai";
|
|
|
2
2
|
import type { APIPromise } from "openai/core";
|
|
3
3
|
import type { RunTreeConfig } from "../index.js";
|
|
4
4
|
type OpenAIType = {
|
|
5
|
+
beta?: {
|
|
6
|
+
chat?: {
|
|
7
|
+
completions?: {
|
|
8
|
+
parse?: (...args: any[]) => any;
|
|
9
|
+
};
|
|
10
|
+
};
|
|
11
|
+
};
|
|
5
12
|
chat: {
|
|
6
13
|
completions: {
|
|
7
14
|
create: (...args: any[]) => any;
|
package/dist/wrappers/openai.js
CHANGED
|
@@ -139,51 +139,101 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
139
139
|
isTraceableFunction(openai.completions.create)) {
|
|
140
140
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
141
141
|
}
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
142
|
+
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
143
|
+
// OpenAI methods.
|
|
144
|
+
const tracedOpenAIClient = { ...openai };
|
|
145
|
+
if (openai.beta &&
|
|
146
|
+
openai.beta.chat &&
|
|
147
|
+
openai.beta.chat.completions &&
|
|
148
|
+
typeof openai.beta.chat.completions.parse === "function") {
|
|
149
|
+
tracedOpenAIClient.beta = {
|
|
150
|
+
...openai.beta,
|
|
151
|
+
chat: {
|
|
152
|
+
...openai.beta.chat,
|
|
153
|
+
completions: {
|
|
154
|
+
...openai.beta.chat.completions,
|
|
155
|
+
parse: traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
|
|
156
|
+
name: "ChatOpenAI",
|
|
157
|
+
run_type: "llm",
|
|
158
|
+
aggregator: chatAggregator,
|
|
159
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
160
|
+
getInvocationParams: (payload) => {
|
|
161
|
+
if (typeof payload !== "object" || payload == null)
|
|
162
|
+
return undefined;
|
|
163
|
+
// we can safely do so, as the types are not exported in TSC
|
|
164
|
+
const params = payload;
|
|
165
|
+
const ls_stop = (typeof params.stop === "string"
|
|
166
|
+
? [params.stop]
|
|
167
|
+
: params.stop) ?? undefined;
|
|
168
|
+
return {
|
|
169
|
+
ls_provider: "openai",
|
|
170
|
+
ls_model_type: "chat",
|
|
171
|
+
ls_model_name: params.model,
|
|
172
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
173
|
+
ls_temperature: params.temperature ?? undefined,
|
|
174
|
+
ls_stop,
|
|
175
|
+
};
|
|
176
|
+
},
|
|
177
|
+
...options,
|
|
178
|
+
}),
|
|
179
|
+
},
|
|
180
|
+
},
|
|
181
|
+
};
|
|
182
|
+
}
|
|
183
|
+
tracedOpenAIClient.chat = {
|
|
184
|
+
...openai.chat,
|
|
185
|
+
completions: {
|
|
186
|
+
...openai.chat.completions,
|
|
187
|
+
create: traceable(openai.chat.completions.create.bind(openai.chat.completions), {
|
|
188
|
+
name: "ChatOpenAI",
|
|
189
|
+
run_type: "llm",
|
|
190
|
+
aggregator: chatAggregator,
|
|
191
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
192
|
+
getInvocationParams: (payload) => {
|
|
193
|
+
if (typeof payload !== "object" || payload == null)
|
|
194
|
+
return undefined;
|
|
195
|
+
// we can safely do so, as the types are not exported in TSC
|
|
196
|
+
const params = payload;
|
|
197
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
198
|
+
undefined;
|
|
199
|
+
return {
|
|
200
|
+
ls_provider: "openai",
|
|
201
|
+
ls_model_type: "chat",
|
|
202
|
+
ls_model_name: params.model,
|
|
203
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
204
|
+
ls_temperature: params.temperature ?? undefined,
|
|
205
|
+
ls_stop,
|
|
206
|
+
};
|
|
207
|
+
},
|
|
208
|
+
...options,
|
|
209
|
+
}),
|
|
185
210
|
},
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
211
|
+
};
|
|
212
|
+
tracedOpenAIClient.completions = {
|
|
213
|
+
...openai.completions,
|
|
214
|
+
create: traceable(openai.completions.create.bind(openai.completions), {
|
|
215
|
+
name: "OpenAI",
|
|
216
|
+
run_type: "llm",
|
|
217
|
+
aggregator: textAggregator,
|
|
218
|
+
argsConfigPath: [1, "langsmithExtra"],
|
|
219
|
+
getInvocationParams: (payload) => {
|
|
220
|
+
if (typeof payload !== "object" || payload == null)
|
|
221
|
+
return undefined;
|
|
222
|
+
// we can safely do so, as the types are not exported in TSC
|
|
223
|
+
const params = payload;
|
|
224
|
+
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
225
|
+
undefined;
|
|
226
|
+
return {
|
|
227
|
+
ls_provider: "openai",
|
|
228
|
+
ls_model_type: "llm",
|
|
229
|
+
ls_model_name: params.model,
|
|
230
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
231
|
+
ls_temperature: params.temperature ?? undefined,
|
|
232
|
+
ls_stop,
|
|
233
|
+
};
|
|
234
|
+
},
|
|
235
|
+
...options,
|
|
236
|
+
}),
|
|
237
|
+
};
|
|
238
|
+
return tracedOpenAIClient;
|
|
189
239
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langsmith",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.63",
|
|
4
4
|
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
|
|
5
5
|
"packageManager": "yarn@1.22.19",
|
|
6
6
|
"files": [
|
|
@@ -127,7 +127,7 @@
|
|
|
127
127
|
"eslint-plugin-prettier": "^4.2.1",
|
|
128
128
|
"jest": "^29.5.0",
|
|
129
129
|
"langchain": "^0.3.2",
|
|
130
|
-
"openai": "^4.
|
|
130
|
+
"openai": "^4.67.3",
|
|
131
131
|
"prettier": "^2.8.8",
|
|
132
132
|
"ts-jest": "^29.1.0",
|
|
133
133
|
"ts-node": "^10.9.1",
|