langsmith 0.3.76 → 0.3.77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/wrappers/openai.cjs +62 -0
- package/dist/wrappers/openai.js +62 -0
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -10,4 +10,4 @@ Object.defineProperty(exports, "overrideFetchImplementation", { enumerable: true
|
|
|
10
10
|
var project_js_1 = require("./utils/project.cjs");
|
|
11
11
|
Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get: function () { return project_js_1.getDefaultProjectName; } });
|
|
12
12
|
// Update using yarn bump-version
|
|
13
|
-
exports.__version__ = "0.3.
|
|
13
|
+
exports.__version__ = "0.3.77";
|
package/dist/index.d.ts
CHANGED
|
@@ -3,4 +3,4 @@ export type { Dataset, Example, TracerSession, Run, Feedback, RetrieverOutput, }
|
|
|
3
3
|
export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
|
-
export declare const __version__ = "0.3.
|
|
6
|
+
export declare const __version__ = "0.3.77";
|
package/dist/index.js
CHANGED
|
@@ -3,4 +3,4 @@ export { RunTree } from "./run_trees.js";
|
|
|
3
3
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
4
4
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
5
5
|
// Update using yarn bump-version
|
|
6
|
-
export const __version__ = "0.3.
|
|
6
|
+
export const __version__ = "0.3.77";
|
package/dist/wrappers/openai.cjs
CHANGED
|
@@ -263,6 +263,36 @@ const wrapOpenAI = (openai, options) => {
|
|
|
263
263
|
const params = payload;
|
|
264
264
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
265
265
|
undefined;
|
|
266
|
+
// Allowlist of safe invocation parameters to include
|
|
267
|
+
const allowedInvocationKeys = new Set([
|
|
268
|
+
"frequency_penalty",
|
|
269
|
+
"n",
|
|
270
|
+
"logit_bias",
|
|
271
|
+
"logprobs",
|
|
272
|
+
"modalities",
|
|
273
|
+
"parallel_tool_calls",
|
|
274
|
+
"prediction",
|
|
275
|
+
"presence_penalty",
|
|
276
|
+
"prompt_cache_key",
|
|
277
|
+
"reasoning",
|
|
278
|
+
"reasoning_effort",
|
|
279
|
+
"response_format",
|
|
280
|
+
"seed",
|
|
281
|
+
"service_tier",
|
|
282
|
+
"stream_options",
|
|
283
|
+
"top_logprobs",
|
|
284
|
+
"top_p",
|
|
285
|
+
"truncation",
|
|
286
|
+
"user",
|
|
287
|
+
"verbosity",
|
|
288
|
+
"web_search_options",
|
|
289
|
+
]);
|
|
290
|
+
const ls_invocation_params = {};
|
|
291
|
+
for (const [key, value] of Object.entries(params)) {
|
|
292
|
+
if (allowedInvocationKeys.has(key)) {
|
|
293
|
+
ls_invocation_params[key] = value;
|
|
294
|
+
}
|
|
295
|
+
}
|
|
266
296
|
return {
|
|
267
297
|
ls_provider: provider,
|
|
268
298
|
ls_model_type: "chat",
|
|
@@ -270,6 +300,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
270
300
|
ls_max_tokens: params.max_completion_tokens ?? params.max_tokens ?? undefined,
|
|
271
301
|
ls_temperature: params.temperature ?? undefined,
|
|
272
302
|
ls_stop,
|
|
303
|
+
ls_invocation_params,
|
|
273
304
|
};
|
|
274
305
|
},
|
|
275
306
|
processOutputs: processChatCompletion,
|
|
@@ -309,6 +340,36 @@ const wrapOpenAI = (openai, options) => {
|
|
|
309
340
|
const params = payload;
|
|
310
341
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
311
342
|
undefined;
|
|
343
|
+
// Allowlist of safe invocation parameters to include
|
|
344
|
+
const allowedInvocationKeys = new Set([
|
|
345
|
+
"frequency_penalty",
|
|
346
|
+
"n",
|
|
347
|
+
"logit_bias",
|
|
348
|
+
"logprobs",
|
|
349
|
+
"modalities",
|
|
350
|
+
"parallel_tool_calls",
|
|
351
|
+
"prediction",
|
|
352
|
+
"presence_penalty",
|
|
353
|
+
"prompt_cache_key",
|
|
354
|
+
"reasoning",
|
|
355
|
+
"reasoning_effort",
|
|
356
|
+
"response_format",
|
|
357
|
+
"seed",
|
|
358
|
+
"service_tier",
|
|
359
|
+
"stream_options",
|
|
360
|
+
"top_logprobs",
|
|
361
|
+
"top_p",
|
|
362
|
+
"truncation",
|
|
363
|
+
"user",
|
|
364
|
+
"verbosity",
|
|
365
|
+
"web_search_options",
|
|
366
|
+
]);
|
|
367
|
+
const ls_invocation_params = {};
|
|
368
|
+
for (const [key, value] of Object.entries(params)) {
|
|
369
|
+
if (allowedInvocationKeys.has(key)) {
|
|
370
|
+
ls_invocation_params[key] = value;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
312
373
|
return {
|
|
313
374
|
ls_provider: provider,
|
|
314
375
|
ls_model_type: "llm",
|
|
@@ -316,6 +377,7 @@ const wrapOpenAI = (openai, options) => {
|
|
|
316
377
|
ls_max_tokens: params.max_tokens ?? undefined,
|
|
317
378
|
ls_temperature: params.temperature ?? undefined,
|
|
318
379
|
ls_stop,
|
|
380
|
+
ls_invocation_params,
|
|
319
381
|
};
|
|
320
382
|
},
|
|
321
383
|
...options,
|
package/dist/wrappers/openai.js
CHANGED
|
@@ -260,6 +260,36 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
260
260
|
const params = payload;
|
|
261
261
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
262
262
|
undefined;
|
|
263
|
+
// Allowlist of safe invocation parameters to include
|
|
264
|
+
const allowedInvocationKeys = new Set([
|
|
265
|
+
"frequency_penalty",
|
|
266
|
+
"n",
|
|
267
|
+
"logit_bias",
|
|
268
|
+
"logprobs",
|
|
269
|
+
"modalities",
|
|
270
|
+
"parallel_tool_calls",
|
|
271
|
+
"prediction",
|
|
272
|
+
"presence_penalty",
|
|
273
|
+
"prompt_cache_key",
|
|
274
|
+
"reasoning",
|
|
275
|
+
"reasoning_effort",
|
|
276
|
+
"response_format",
|
|
277
|
+
"seed",
|
|
278
|
+
"service_tier",
|
|
279
|
+
"stream_options",
|
|
280
|
+
"top_logprobs",
|
|
281
|
+
"top_p",
|
|
282
|
+
"truncation",
|
|
283
|
+
"user",
|
|
284
|
+
"verbosity",
|
|
285
|
+
"web_search_options",
|
|
286
|
+
]);
|
|
287
|
+
const ls_invocation_params = {};
|
|
288
|
+
for (const [key, value] of Object.entries(params)) {
|
|
289
|
+
if (allowedInvocationKeys.has(key)) {
|
|
290
|
+
ls_invocation_params[key] = value;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
263
293
|
return {
|
|
264
294
|
ls_provider: provider,
|
|
265
295
|
ls_model_type: "chat",
|
|
@@ -267,6 +297,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
267
297
|
ls_max_tokens: params.max_completion_tokens ?? params.max_tokens ?? undefined,
|
|
268
298
|
ls_temperature: params.temperature ?? undefined,
|
|
269
299
|
ls_stop,
|
|
300
|
+
ls_invocation_params,
|
|
270
301
|
};
|
|
271
302
|
},
|
|
272
303
|
processOutputs: processChatCompletion,
|
|
@@ -306,6 +337,36 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
306
337
|
const params = payload;
|
|
307
338
|
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
|
|
308
339
|
undefined;
|
|
340
|
+
// Allowlist of safe invocation parameters to include
|
|
341
|
+
const allowedInvocationKeys = new Set([
|
|
342
|
+
"frequency_penalty",
|
|
343
|
+
"n",
|
|
344
|
+
"logit_bias",
|
|
345
|
+
"logprobs",
|
|
346
|
+
"modalities",
|
|
347
|
+
"parallel_tool_calls",
|
|
348
|
+
"prediction",
|
|
349
|
+
"presence_penalty",
|
|
350
|
+
"prompt_cache_key",
|
|
351
|
+
"reasoning",
|
|
352
|
+
"reasoning_effort",
|
|
353
|
+
"response_format",
|
|
354
|
+
"seed",
|
|
355
|
+
"service_tier",
|
|
356
|
+
"stream_options",
|
|
357
|
+
"top_logprobs",
|
|
358
|
+
"top_p",
|
|
359
|
+
"truncation",
|
|
360
|
+
"user",
|
|
361
|
+
"verbosity",
|
|
362
|
+
"web_search_options",
|
|
363
|
+
]);
|
|
364
|
+
const ls_invocation_params = {};
|
|
365
|
+
for (const [key, value] of Object.entries(params)) {
|
|
366
|
+
if (allowedInvocationKeys.has(key)) {
|
|
367
|
+
ls_invocation_params[key] = value;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
309
370
|
return {
|
|
310
371
|
ls_provider: provider,
|
|
311
372
|
ls_model_type: "llm",
|
|
@@ -313,6 +374,7 @@ export const wrapOpenAI = (openai, options) => {
|
|
|
313
374
|
ls_max_tokens: params.max_tokens ?? undefined,
|
|
314
375
|
ls_temperature: params.temperature ?? undefined,
|
|
315
376
|
ls_stop,
|
|
377
|
+
ls_invocation_params,
|
|
316
378
|
};
|
|
317
379
|
},
|
|
318
380
|
...options,
|