langsmith 0.3.63 → 0.3.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.cjs +4 -2
- package/dist/client.js +5 -3
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/run_trees.cjs +2 -2
- package/dist/run_trees.js +2 -2
- package/dist/traceable.cjs +4 -1
- package/dist/traceable.js +4 -1
- package/dist/utils/env.cjs +5 -1
- package/dist/utils/env.js +5 -1
- package/dist/wrappers/openai.cjs +11 -6
- package/dist/wrappers/openai.d.ts +1 -1
- package/dist/wrappers/openai.js +11 -6
- package/package.json +1 -1
package/dist/client.cjs
CHANGED
|
@@ -1193,8 +1193,10 @@ class Client {
|
|
|
1193
1193
|
try {
|
|
1194
1194
|
let res;
|
|
1195
1195
|
let streamedAttempt = false;
|
|
1196
|
-
// attempt stream only if not disabled and not using node-fetch
|
|
1197
|
-
if (!isNodeFetch &&
|
|
1196
|
+
// attempt stream only if not disabled and not using node-fetch or Bun
|
|
1197
|
+
if (!isNodeFetch &&
|
|
1198
|
+
!this.multipartStreamingDisabled &&
|
|
1199
|
+
(0, env_js_1.getEnv)() !== "bun") {
|
|
1198
1200
|
streamedAttempt = true;
|
|
1199
1201
|
res = await sendWithRetry(buildStream);
|
|
1200
1202
|
}
|
package/dist/client.js
CHANGED
|
@@ -3,7 +3,7 @@ import { LangSmithToOTELTranslator, } from "./experimental/otel/translator.js";
|
|
|
3
3
|
import { getDefaultOTLPTracerComponents, getOTELTrace, getOTELContext, } from "./singletons/otel.js";
|
|
4
4
|
import { AsyncCaller } from "./utils/async_caller.js";
|
|
5
5
|
import { convertLangChainMessageToExample, isLangChainMessage, } from "./utils/messages.js";
|
|
6
|
-
import { getEnvironmentVariable, getLangChainEnvVarsMetadata, getLangSmithEnvironmentVariable, getRuntimeEnvironment, getOtelEnabled, } from "./utils/env.js";
|
|
6
|
+
import { getEnvironmentVariable, getLangChainEnvVarsMetadata, getLangSmithEnvironmentVariable, getRuntimeEnvironment, getOtelEnabled, getEnv, } from "./utils/env.js";
|
|
7
7
|
import { __version__ } from "./index.js";
|
|
8
8
|
import { assertUuid } from "./utils/_uuid.js";
|
|
9
9
|
import { warnOnce } from "./utils/warn.js";
|
|
@@ -1155,8 +1155,10 @@ export class Client {
|
|
|
1155
1155
|
try {
|
|
1156
1156
|
let res;
|
|
1157
1157
|
let streamedAttempt = false;
|
|
1158
|
-
// attempt stream only if not disabled and not using node-fetch
|
|
1159
|
-
if (!isNodeFetch &&
|
|
1158
|
+
// attempt stream only if not disabled and not using node-fetch or Bun
|
|
1159
|
+
if (!isNodeFetch &&
|
|
1160
|
+
!this.multipartStreamingDisabled &&
|
|
1161
|
+
getEnv() !== "bun") {
|
|
1160
1162
|
streamedAttempt = true;
|
|
1161
1163
|
res = await sendWithRetry(buildStream);
|
|
1162
1164
|
}
|
package/dist/index.cjs
CHANGED
|
@@ -10,4 +10,4 @@ Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true
|
|
|
10
10
|
var project_js_1 = require("./utils/project.cjs");
|
|
11
11
|
Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get: function () { return project_js_1.getDefaultProjectName; } });
|
|
12
12
|
// Update using yarn bump-version
|
|
13
|
-
exports.__version__ = "0.3.
|
|
13
|
+
exports.__version__ = "0.3.64";
|
package/dist/index.d.ts
CHANGED
|
@@ -3,4 +3,4 @@ export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, }
|
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
|
-
export declare const __version__ = "0.3.
|
|
6
|
+
export declare const __version__ = "0.3.64";
|
package/dist/index.js
CHANGED
|
@@ -3,4 +3,4 @@ export { RunTree } from "./run_trees.js";
|
|
|
3
3
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
4
4
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
5
5
|
// Update using yarn bump-version
|
|
6
|
-
export const __version__ = "0.3.
|
|
6
|
+
export const __version__ = "0.3.64";
|
package/dist/run_trees.cjs
CHANGED
|
@@ -734,7 +734,7 @@ Object.defineProperty(RunTree, "sharedClient", {
|
|
|
734
734
|
value: null
|
|
735
735
|
});
|
|
736
736
|
function isRunTree(x) {
|
|
737
|
-
return (x
|
|
737
|
+
return (x != null &&
|
|
738
738
|
typeof x.createChild === "function" &&
|
|
739
739
|
typeof x.postRun === "function");
|
|
740
740
|
}
|
|
@@ -756,7 +756,7 @@ function isRunnableConfigLike(x) {
|
|
|
756
756
|
// Check that it's an object with a callbacks arg
|
|
757
757
|
// that has either a CallbackManagerLike object with a langchain tracer within it
|
|
758
758
|
// or an array with a LangChainTracerLike object within it
|
|
759
|
-
return (x
|
|
759
|
+
return (x != null &&
|
|
760
760
|
typeof x.callbacks === "object" &&
|
|
761
761
|
// Callback manager with a langchain tracer
|
|
762
762
|
(containsLangChainTracerLike(x.callbacks?.handlers) ||
|
package/dist/run_trees.js
CHANGED
|
@@ -694,7 +694,7 @@ Object.defineProperty(RunTree, "sharedClient", {
|
|
|
694
694
|
value: null
|
|
695
695
|
});
|
|
696
696
|
export function isRunTree(x) {
|
|
697
|
-
return (x
|
|
697
|
+
return (x != null &&
|
|
698
698
|
typeof x.createChild === "function" &&
|
|
699
699
|
typeof x.postRun === "function");
|
|
700
700
|
}
|
|
@@ -716,7 +716,7 @@ export function isRunnableConfigLike(x) {
|
|
|
716
716
|
// Check that it's an object with a callbacks arg
|
|
717
717
|
// that has either a CallbackManagerLike object with a langchain tracer within it
|
|
718
718
|
// or an array with a LangChainTracerLike object within it
|
|
719
|
-
return (x
|
|
719
|
+
return (x != null &&
|
|
720
720
|
typeof x.callbacks === "object" &&
|
|
721
721
|
// Callback manager with a langchain tracer
|
|
722
722
|
(containsLangChainTracerLike(x.callbacks?.handlers) ||
|
package/dist/traceable.cjs
CHANGED
|
@@ -74,7 +74,10 @@ function maybeCreateOtelContext(runTree, projectName, tracer
|
|
|
74
74
|
const runInputsToMap = (rawInputs) => {
|
|
75
75
|
const firstInput = rawInputs[0];
|
|
76
76
|
let inputs;
|
|
77
|
-
if (firstInput
|
|
77
|
+
if (firstInput === null) {
|
|
78
|
+
inputs = { inputs: null };
|
|
79
|
+
}
|
|
80
|
+
else if (firstInput === undefined) {
|
|
78
81
|
inputs = {};
|
|
79
82
|
}
|
|
80
83
|
else if (rawInputs.length > 1) {
|
package/dist/traceable.js
CHANGED
|
@@ -70,7 +70,10 @@ function maybeCreateOtelContext(runTree, projectName, tracer
|
|
|
70
70
|
const runInputsToMap = (rawInputs) => {
|
|
71
71
|
const firstInput = rawInputs[0];
|
|
72
72
|
let inputs;
|
|
73
|
-
if (firstInput
|
|
73
|
+
if (firstInput === null) {
|
|
74
|
+
inputs = { inputs: null };
|
|
75
|
+
}
|
|
76
|
+
else if (firstInput === undefined) {
|
|
74
77
|
inputs = {};
|
|
75
78
|
}
|
|
76
79
|
else if (rawInputs.length > 1) {
|
package/dist/utils/env.cjs
CHANGED
|
@@ -36,7 +36,11 @@ const getEnv = () => {
|
|
|
36
36
|
if (globalEnv) {
|
|
37
37
|
return globalEnv;
|
|
38
38
|
}
|
|
39
|
-
|
|
39
|
+
// @ts-expect-error Bun types are not imported due to conflicts with Node types
|
|
40
|
+
if (typeof Bun !== "undefined") {
|
|
41
|
+
globalEnv = "bun";
|
|
42
|
+
}
|
|
43
|
+
else if ((0, exports.isBrowser)()) {
|
|
40
44
|
globalEnv = "browser";
|
|
41
45
|
}
|
|
42
46
|
else if ((0, exports.isNode)()) {
|
package/dist/utils/env.js
CHANGED
|
@@ -19,7 +19,11 @@ export const getEnv = () => {
|
|
|
19
19
|
if (globalEnv) {
|
|
20
20
|
return globalEnv;
|
|
21
21
|
}
|
|
22
|
-
|
|
22
|
+
// @ts-expect-error Bun types are not imported due to conflicts with Node types
|
|
23
|
+
if (typeof Bun !== "undefined") {
|
|
24
|
+
globalEnv = "bun";
|
|
25
|
+
}
|
|
26
|
+
else if (isBrowser()) {
|
|
23
27
|
globalEnv = "browser";
|
|
24
28
|
}
|
|
25
29
|
else if (isNode()) {
|
package/dist/wrappers/openai.cjs
CHANGED
|
@@ -197,11 +197,16 @@ const wrapOpenAI = (openai, options) => {
|
|
|
197
197
|
(0, traceable_js_1.isTraceableFunction)(openai.completions.create)) {
|
|
198
198
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
199
199
|
}
|
|
200
|
+
// Attempt to determine if this is an Azure OpenAI client
|
|
201
|
+
const isAzureOpenAI = openai.constructor?.name === "AzureOpenAI";
|
|
202
|
+
const provider = isAzureOpenAI ? "azure" : "openai";
|
|
203
|
+
const chatName = isAzureOpenAI ? "AzureChatOpenAI" : "ChatOpenAI";
|
|
204
|
+
const completionsName = isAzureOpenAI ? "AzureOpenAI" : "OpenAI";
|
|
200
205
|
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
201
206
|
// OpenAI methods.
|
|
202
207
|
const tracedOpenAIClient = { ...openai };
|
|
203
208
|
const chatCompletionParseMetadata = {
|
|
204
|
-
name:
|
|
209
|
+
name: chatName,
|
|
205
210
|
run_type: "llm",
|
|
206
211
|
aggregator: chatAggregator,
|
|
207
212
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -213,7 +218,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
213
218
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
214
219
|
undefined;
|
|
215
220
|
return {
|
|
216
|
-
ls_provider:
|
|
221
|
+
ls_provider: provider,
|
|
217
222
|
ls_model_type: "chat",
|
|
218
223
|
ls_model_name: params.model,
|
|
219
224
|
ls_max_tokens: params.max_completion_tokens ?? params.max_tokens ?? undefined,
|
|
@@ -247,7 +252,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
247
252
|
tracedOpenAIClient.completions = {
|
|
248
253
|
...openai.completions,
|
|
249
254
|
create: (0, traceable_js_1.traceable)(openai.completions.create.bind(openai.completions), {
|
|
250
|
-
name:
|
|
255
|
+
name: completionsName,
|
|
251
256
|
run_type: "llm",
|
|
252
257
|
aggregator: textAggregator,
|
|
253
258
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -259,7 +264,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
259
264
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
260
265
|
undefined;
|
|
261
266
|
return {
|
|
262
|
-
ls_provider:
|
|
267
|
+
ls_provider: provider,
|
|
263
268
|
ls_model_type: "llm",
|
|
264
269
|
ls_model_name: params.model,
|
|
265
270
|
ls_max_tokens: params.max_tokens ?? undefined,
|
|
@@ -282,7 +287,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
282
287
|
if (tracedOpenAIClient.responses &&
|
|
283
288
|
typeof tracedOpenAIClient.responses.create === "function") {
|
|
284
289
|
tracedOpenAIClient.responses.create = (0, traceable_js_1.traceable)(openai.responses.create.bind(openai.responses), {
|
|
285
|
-
name:
|
|
290
|
+
name: chatName,
|
|
286
291
|
run_type: "llm",
|
|
287
292
|
aggregator: responsesAggregator,
|
|
288
293
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -292,7 +297,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
292
297
|
// Handle responses API parameters
|
|
293
298
|
const params = payload;
|
|
294
299
|
return {
|
|
295
|
-
ls_provider:
|
|
300
|
+
ls_provider: provider,
|
|
296
301
|
ls_model_type: "llm",
|
|
297
302
|
ls_model_name: params.model || "unknown",
|
|
298
303
|
};
|
package/dist/wrappers/openai.js
CHANGED
|
@@ -194,11 +194,16 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
194
194
|
isTraceableFunction(openai.completions.create)) {
|
|
195
195
|
throw new Error("This instance of OpenAI client has been already wrapped once.");
|
|
196
196
|
}
|
|
197
|
+
// Attempt to determine if this is an Azure OpenAI client
|
|
198
|
+
const isAzureOpenAI = openai.constructor?.name === "AzureOpenAI";
|
|
199
|
+
const provider = isAzureOpenAI ? "azure" : "openai";
|
|
200
|
+
const chatName = isAzureOpenAI ? "AzureChatOpenAI" : "ChatOpenAI";
|
|
201
|
+
const completionsName = isAzureOpenAI ? "AzureOpenAI" : "OpenAI";
|
|
197
202
|
// Some internal OpenAI methods call each other, so we need to preserve original
|
|
198
203
|
// OpenAI methods.
|
|
199
204
|
const tracedOpenAIClient = { ...openai };
|
|
200
205
|
const chatCompletionParseMetadata = {
|
|
201
|
-
name:
|
|
206
|
+
name: chatName,
|
|
202
207
|
run_type: "llm",
|
|
203
208
|
aggregator: chatAggregator,
|
|
204
209
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -210,7 +215,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
210
215
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
211
216
|
undefined;
|
|
212
217
|
return {
|
|
213
|
-
ls_provider:
|
|
218
|
+
ls_provider: provider,
|
|
214
219
|
ls_model_type: "chat",
|
|
215
220
|
ls_model_name: params.model,
|
|
216
221
|
ls_max_tokens: params.max_completion_tokens ?? params.max_tokens ?? undefined,
|
|
@@ -244,7 +249,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
244
249
|
tracedOpenAIClient.completions = {
|
|
245
250
|
...openai.completions,
|
|
246
251
|
create: traceable(openai.completions.create.bind(openai.completions), {
|
|
247
|
-
name:
|
|
252
|
+
name: completionsName,
|
|
248
253
|
run_type: "llm",
|
|
249
254
|
aggregator: textAggregator,
|
|
250
255
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -256,7 +261,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
256
261
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
257
262
|
undefined;
|
|
258
263
|
return {
|
|
259
|
-
ls_provider:
|
|
264
|
+
ls_provider: provider,
|
|
260
265
|
ls_model_type: "llm",
|
|
261
266
|
ls_model_name: params.model,
|
|
262
267
|
ls_max_tokens: params.max_tokens ?? undefined,
|
|
@@ -279,7 +284,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
279
284
|
if (tracedOpenAIClient.responses &&
|
|
280
285
|
typeof tracedOpenAIClient.responses.create === "function") {
|
|
281
286
|
tracedOpenAIClient.responses.create = traceable(openai.responses.create.bind(openai.responses), {
|
|
282
|
-
name:
|
|
287
|
+
name: chatName,
|
|
283
288
|
run_type: "llm",
|
|
284
289
|
aggregator: responsesAggregator,
|
|
285
290
|
argsConfigPath: [1, "langsmithExtra"],
|
|
@@ -289,7 +294,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
289
294
|
// Handle responses API parameters
|
|
290
295
|
const params = payload;
|
|
291
296
|
return {
|
|
292
|
-
ls_provider:
|
|
297
|
+
ls_provider: provider,
|
|
293
298
|
ls_model_type: "llm",
|
|
294
299
|
ls_model_name: params.model || "unknown",
|
|
295
300
|
};
|