langsmith 0.3.84 → 0.3.86

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/client.cjs CHANGED
@@ -48,7 +48,10 @@ const prompts_js_1 = require("./utils/prompts.cjs");
48
48
  const error_js_1 = require("./utils/error.cjs");
49
49
  const fetch_js_1 = require("./singletons/fetch.cjs");
50
50
  const index_js_2 = require("./utils/fast-safe-stringify/index.cjs");
51
- function mergeRuntimeEnvIntoRun(run, cachedEnvVars) {
51
+ function mergeRuntimeEnvIntoRun(run, cachedEnvVars, omitTracedRuntimeInfo) {
52
+ if (omitTracedRuntimeInfo) {
53
+ return run;
54
+ }
52
55
  const runtimeEnv = (0, env_js_1.getRuntimeEnvironment)();
53
56
  const envVars = cachedEnvVars ?? (0, env_js_1.getLangSmithEnvVarsMetadata)();
54
57
  const extra = run.extra ?? {};
@@ -292,6 +295,12 @@ class Client {
292
295
  writable: true,
293
296
  value: void 0
294
297
  });
298
+ Object.defineProperty(this, "omitTracedRuntimeInfo", {
299
+ enumerable: true,
300
+ configurable: true,
301
+ writable: true,
302
+ value: void 0
303
+ });
295
304
  Object.defineProperty(this, "tracingSampleRate", {
296
305
  enumerable: true,
297
306
  configurable: true,
@@ -452,6 +461,7 @@ class Client {
452
461
  config.hideInputs ?? config.anonymizer ?? defaultConfig.hideInputs;
453
462
  this.hideOutputs =
454
463
  config.hideOutputs ?? config.anonymizer ?? defaultConfig.hideOutputs;
464
+ this.omitTracedRuntimeInfo = config.omitTracedRuntimeInfo ?? false;
455
465
  this.autoBatchTracing = config.autoBatchTracing ?? this.autoBatchTracing;
456
466
  this.autoBatchQueue = new AutoBatchQueue(maxMemory);
457
467
  this.blockOnRootRunFinalization =
@@ -821,7 +831,7 @@ class Client {
821
831
  async processRunOperation(item) {
822
832
  clearTimeout(this.autoBatchTimeout);
823
833
  this.autoBatchTimeout = undefined;
824
- item.item = mergeRuntimeEnvIntoRun(item.item, this.cachedLSEnvVarsForMetadata);
834
+ item.item = mergeRuntimeEnvIntoRun(item.item, this.cachedLSEnvVarsForMetadata, this.omitTracedRuntimeInfo);
825
835
  const itemPromise = this.autoBatchQueue.push(item);
826
836
  if (this.manualFlushMode) {
827
837
  // Rely on manual flushing in serverless environments
@@ -943,7 +953,7 @@ class Client {
943
953
  }).catch(console.error);
944
954
  return;
945
955
  }
946
- const mergedRunCreateParam = mergeRuntimeEnvIntoRun(runCreate, this.cachedLSEnvVarsForMetadata);
956
+ const mergedRunCreateParam = mergeRuntimeEnvIntoRun(runCreate, this.cachedLSEnvVarsForMetadata, this.omitTracedRuntimeInfo);
947
957
  if (options?.apiKey !== undefined) {
948
958
  headers["x-api-key"] = options.apiKey;
949
959
  }
package/dist/client.d.ts CHANGED
@@ -11,6 +11,13 @@ export interface ClientConfig {
11
11
  anonymizer?: (values: KVMap) => KVMap | Promise<KVMap>;
12
12
  hideInputs?: boolean | ((inputs: KVMap) => KVMap | Promise<KVMap>);
13
13
  hideOutputs?: boolean | ((outputs: KVMap) => KVMap | Promise<KVMap>);
14
+ /**
15
+ * Whether to omit runtime information from traced runs.
16
+ * If true, runtime information (SDK version, platform, etc.) and
17
+ * LangChain environment variable metadata will not be stored in runs.
18
+ * Defaults to false.
19
+ */
20
+ omitTracedRuntimeInfo?: boolean;
14
21
  autoBatchTracing?: boolean;
15
22
  /** Maximum size of a batch of runs in bytes. */
16
23
  batchSizeBytesLimit?: number;
@@ -280,7 +287,7 @@ type Thread = {
280
287
  last_outputs: string;
281
288
  last_error: string | null;
282
289
  };
283
- export declare function mergeRuntimeEnvIntoRun<T extends RunCreate | RunUpdate>(run: T, cachedEnvVars?: Record<string, string>): T;
290
+ export declare function mergeRuntimeEnvIntoRun<T extends RunCreate | RunUpdate>(run: T, cachedEnvVars?: Record<string, string>, omitTracedRuntimeInfo?: boolean): T;
284
291
  export declare const DEFAULT_UNCOMPRESSED_BATCH_SIZE_LIMIT_BYTES: number;
285
292
  /** Default maximum memory (1GB) for queue size limits. */
286
293
  export declare const DEFAULT_MAX_SIZE_BYTES: number;
@@ -325,6 +332,7 @@ export declare class Client implements LangSmithTracingClientInterface {
325
332
  private _tenantId;
326
333
  private hideInputs?;
327
334
  private hideOutputs?;
335
+ private omitTracedRuntimeInfo?;
328
336
  private tracingSampleRate?;
329
337
  private filteredPostUuids;
330
338
  private autoBatchTracing;
package/dist/client.js CHANGED
@@ -11,7 +11,10 @@ import { parsePromptIdentifier } from "./utils/prompts.js";
11
11
  import { raiseForStatus } from "./utils/error.js";
12
12
  import { _globalFetchImplementationIsNodeFetch, _getFetchImplementation, } from "./singletons/fetch.js";
13
13
  import { serialize as serializePayloadForTracing } from "./utils/fast-safe-stringify/index.js";
14
- export function mergeRuntimeEnvIntoRun(run, cachedEnvVars) {
14
+ export function mergeRuntimeEnvIntoRun(run, cachedEnvVars, omitTracedRuntimeInfo) {
15
+ if (omitTracedRuntimeInfo) {
16
+ return run;
17
+ }
15
18
  const runtimeEnv = getRuntimeEnvironment();
16
19
  const envVars = cachedEnvVars ?? getLangSmithEnvVarsMetadata();
17
20
  const extra = run.extra ?? {};
@@ -254,6 +257,12 @@ export class Client {
254
257
  writable: true,
255
258
  value: void 0
256
259
  });
260
+ Object.defineProperty(this, "omitTracedRuntimeInfo", {
261
+ enumerable: true,
262
+ configurable: true,
263
+ writable: true,
264
+ value: void 0
265
+ });
257
266
  Object.defineProperty(this, "tracingSampleRate", {
258
267
  enumerable: true,
259
268
  configurable: true,
@@ -414,6 +423,7 @@ export class Client {
414
423
  config.hideInputs ?? config.anonymizer ?? defaultConfig.hideInputs;
415
424
  this.hideOutputs =
416
425
  config.hideOutputs ?? config.anonymizer ?? defaultConfig.hideOutputs;
426
+ this.omitTracedRuntimeInfo = config.omitTracedRuntimeInfo ?? false;
417
427
  this.autoBatchTracing = config.autoBatchTracing ?? this.autoBatchTracing;
418
428
  this.autoBatchQueue = new AutoBatchQueue(maxMemory);
419
429
  this.blockOnRootRunFinalization =
@@ -783,7 +793,7 @@ export class Client {
783
793
  async processRunOperation(item) {
784
794
  clearTimeout(this.autoBatchTimeout);
785
795
  this.autoBatchTimeout = undefined;
786
- item.item = mergeRuntimeEnvIntoRun(item.item, this.cachedLSEnvVarsForMetadata);
796
+ item.item = mergeRuntimeEnvIntoRun(item.item, this.cachedLSEnvVarsForMetadata, this.omitTracedRuntimeInfo);
787
797
  const itemPromise = this.autoBatchQueue.push(item);
788
798
  if (this.manualFlushMode) {
789
799
  // Rely on manual flushing in serverless environments
@@ -905,7 +915,7 @@ export class Client {
905
915
  }).catch(console.error);
906
916
  return;
907
917
  }
908
- const mergedRunCreateParam = mergeRuntimeEnvIntoRun(runCreate, this.cachedLSEnvVarsForMetadata);
918
+ const mergedRunCreateParam = mergeRuntimeEnvIntoRun(runCreate, this.cachedLSEnvVarsForMetadata, this.omitTracedRuntimeInfo);
909
919
  if (options?.apiKey !== undefined) {
910
920
  headers["x-api-key"] = options.apiKey;
911
921
  }
package/dist/index.cjs CHANGED
@@ -13,4 +13,4 @@ var uuid_js_1 = require("./uuid.cjs");
13
13
  Object.defineProperty(exports, "uuid7", { enumerable: true, get: function () { return uuid_js_1.uuid7; } });
14
14
  Object.defineProperty(exports, "uuid7FromTime", { enumerable: true, get: function () { return uuid_js_1.uuid7FromTime; } });
15
15
  // Update using yarn bump-version
16
- exports.__version__ = "0.3.84";
16
+ exports.__version__ = "0.3.86";
package/dist/index.d.ts CHANGED
@@ -4,4 +4,4 @@ export { RunTree, type RunTreeConfig } from "./run_trees.js";
4
4
  export { overrideFetchImplementation } from "./singletons/fetch.js";
5
5
  export { getDefaultProjectName } from "./utils/project.js";
6
6
  export { uuid7, uuid7FromTime } from "./uuid.js";
7
- export declare const __version__ = "0.3.84";
7
+ export declare const __version__ = "0.3.86";
package/dist/index.js CHANGED
@@ -4,4 +4,4 @@ export { overrideFetchImplementation } from "./singletons/fetch.js";
4
4
  export { getDefaultProjectName } from "./utils/project.js";
5
5
  export { uuid7, uuid7FromTime } from "./uuid.js";
6
6
  // Update using yarn bump-version
7
- export const __version__ = "0.3.84";
7
+ export const __version__ = "0.3.86";
@@ -909,12 +909,14 @@ function isRunnableConfigLike(x) {
909
909
  // Check that it's an object with a callbacks arg
910
910
  // that has either a CallbackManagerLike object with a langchain tracer within it
911
911
  // or an array with a LangChainTracerLike object within it
912
+ const callbacks = x?.callbacks;
912
913
  return (x != null &&
913
- typeof x.callbacks === "object" &&
914
+ typeof callbacks === "object" &&
914
915
  // Callback manager with a langchain tracer
915
- (containsLangChainTracerLike(x.callbacks?.handlers) ||
916
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
917
+ (containsLangChainTracerLike(callbacks?.handlers) ||
916
918
  // Or it's an array with a LangChainTracerLike object within it
917
- containsLangChainTracerLike(x.callbacks)));
919
+ containsLangChainTracerLike(callbacks)));
918
920
  }
919
921
  function _getWriteReplicasFromEnv() {
920
922
  const envVar = (0, env_js_2.getEnvironmentVariable)("LANGSMITH_RUNS_ENDPOINTS");
@@ -48,7 +48,7 @@ export interface RunnableConfigLike {
48
48
  * Callbacks for this call and any sub-calls (eg. a Chain calling an LLM).
49
49
  * Tags are passed to all callbacks, metadata is passed to handle*Start callbacks.
50
50
  */
51
- callbacks?: any;
51
+ callbacks?: Record<string, any> | any[];
52
52
  }
53
53
  interface HeadersLike {
54
54
  get(name: string): string | null;
package/dist/run_trees.js CHANGED
@@ -902,12 +902,14 @@ export function isRunnableConfigLike(x) {
902
902
  // Check that it's an object with a callbacks arg
903
903
  // that has either a CallbackManagerLike object with a langchain tracer within it
904
904
  // or an array with a LangChainTracerLike object within it
905
+ const callbacks = x?.callbacks;
905
906
  return (x != null &&
906
- typeof x.callbacks === "object" &&
907
+ typeof callbacks === "object" &&
907
908
  // Callback manager with a langchain tracer
908
- (containsLangChainTracerLike(x.callbacks?.handlers) ||
909
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
910
+ (containsLangChainTracerLike(callbacks?.handlers) ||
909
911
  // Or it's an array with a LangChainTracerLike object within it
910
- containsLangChainTracerLike(x.callbacks)));
912
+ containsLangChainTracerLike(callbacks)));
911
913
  }
912
914
  function _getWriteReplicasFromEnv() {
913
915
  const envVar = getEnvironmentVariable("LANGSMITH_RUNS_ENDPOINTS");
@@ -1,16 +1,18 @@
1
1
  import { RunTree, RunnableConfigLike } from "../run_trees.js";
2
2
  import { ROOT } from "./traceable.js";
3
3
  import { _LC_CONTEXT_VARIABLES_KEY } from "./constants.js";
4
- type SmartPromise<T> = T extends AsyncGenerator ? T : T extends Promise<unknown> ? T : Promise<T>;
4
+ type SmartPromise<T> = T extends AsyncIterable<any, any, any> ? T : T extends Promise<unknown> ? T : Promise<T>;
5
5
  type WrapArgReturnPair<Pair> = Pair extends [
6
6
  infer Args extends any[],
7
7
  infer Return
8
8
  ] ? Args extends [RunTree, ...infer RestArgs] ? {
9
9
  (runTree: RunTree | typeof ROOT, ...args: RestArgs): SmartPromise<Return>;
10
+ /** @deprecated Will be removed in 0.4 */
10
11
  (config: RunnableConfigLike, ...args: RestArgs): SmartPromise<Return>;
11
12
  } : {
12
13
  (...args: Args): SmartPromise<Return>;
13
- (runTree: RunTree, ...rest: Args): SmartPromise<Return>;
14
+ (runTree: RunTree | typeof ROOT, ...rest: Args): SmartPromise<Return>;
15
+ /** @deprecated Will be removed in 0.4 */
14
16
  (config: RunnableConfigLike, ...args: Args): SmartPromise<Return>;
15
17
  } : never;
16
18
  type UnionToIntersection<U> = (U extends any ? (x: U) => void : never) extends (x: infer I) => void ? I : never;
@@ -313,6 +313,36 @@ const wrapOpenAI = (openai, options) => {
313
313
  tracedOpenAIClient.beta.chat.completions.parse = (0, traceable_js_1.traceable)(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), chatCompletionParseMetadata);
314
314
  }
315
315
  }
316
+ // Shared function to wrap stream methods (similar to wrapAnthropic)
317
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
318
+ const wrapStreamMethod = (originalStreamFn) => {
319
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
320
+ return function (...args) {
321
+ const stream = originalStreamFn(...args);
322
+ // Helper to ensure stream is fully consumed before calling final methods
323
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
324
+ const ensureStreamConsumed = (methodName) => {
325
+ if (methodName in stream && typeof stream[methodName] === "function") {
326
+ const originalMethod = stream[methodName].bind(stream);
327
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
328
+ stream[methodName] = async (...args) => {
329
+ if ("done" in stream && typeof stream.done === "function") {
330
+ await stream.done();
331
+ }
332
+ for await (const _ of stream) {
333
+ // Finish consuming the stream if it has not already been consumed
334
+ }
335
+ return originalMethod(...args);
336
+ };
337
+ }
338
+ };
339
+ // Ensure stream is consumed for final methods
340
+ ensureStreamConsumed("finalChatCompletion");
341
+ ensureStreamConsumed("finalMessage");
342
+ ensureStreamConsumed("finalResponse");
343
+ return stream;
344
+ };
345
+ };
316
346
  tracedOpenAIClient.chat = {
317
347
  ...openai.chat,
318
348
  completions: Object.create(Object.getPrototypeOf(openai.chat.completions)),
@@ -325,6 +355,17 @@ const wrapOpenAI = (openai, options) => {
325
355
  if (typeof openai.chat.completions.parse === "function") {
326
356
  tracedOpenAIClient.chat.completions.parse = (0, traceable_js_1.traceable)(openai.chat.completions.parse.bind(openai.chat.completions), chatCompletionParseMetadata);
327
357
  }
358
+ // Wrap chat.completions.stream if it exists
359
+ if (typeof openai.chat.completions.stream === "function") {
360
+ tracedOpenAIClient.chat.completions.stream = (0, traceable_js_1.traceable)(wrapStreamMethod(openai.chat.completions.stream.bind(openai.chat.completions)), chatCompletionParseMetadata);
361
+ }
362
+ // Wrap beta.chat.completions.stream if it exists
363
+ if (openai.beta &&
364
+ openai.beta.chat &&
365
+ openai.beta.chat.completions &&
366
+ typeof openai.beta.chat.completions.stream === "function") {
367
+ tracedOpenAIClient.beta.chat.completions.stream = (0, traceable_js_1.traceable)(wrapStreamMethod(openai.beta.chat.completions.stream.bind(openai.beta.chat.completions)), chatCompletionParseMetadata);
368
+ }
328
369
  tracedOpenAIClient.completions = {
329
370
  ...openai.completions,
330
371
  create: (0, traceable_js_1.traceable)(openai.completions.create.bind(openai.completions), {
@@ -413,7 +454,7 @@ const wrapOpenAI = (openai, options) => {
413
454
  }
414
455
  if (tracedOpenAIClient.responses &&
415
456
  typeof tracedOpenAIClient.responses.stream === "function") {
416
- tracedOpenAIClient.responses.stream = (0, traceable_js_1.traceable)(openai.responses.stream.bind(openai.responses), {
457
+ tracedOpenAIClient.responses.stream = (0, traceable_js_1.traceable)(wrapStreamMethod(openai.responses.stream.bind(openai.responses)), {
417
458
  name: chatName,
418
459
  run_type: "llm",
419
460
  aggregator: responsesAggregator,
@@ -6,7 +6,8 @@ type OpenAIType = {
6
6
  chat: {
7
7
  completions: {
8
8
  create: (...args: any[]) => any;
9
- parse: (...args: any[]) => any;
9
+ parse?: (...args: any[]) => any;
10
+ stream?: (...args: any[]) => any;
10
11
  };
11
12
  };
12
13
  completions: {
@@ -310,6 +310,36 @@ export const wrapOpenAI = (openai, options) => {
310
310
  tracedOpenAIClient.beta.chat.completions.parse = traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), chatCompletionParseMetadata);
311
311
  }
312
312
  }
313
+ // Shared function to wrap stream methods (similar to wrapAnthropic)
314
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
315
+ const wrapStreamMethod = (originalStreamFn) => {
316
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
317
+ return function (...args) {
318
+ const stream = originalStreamFn(...args);
319
+ // Helper to ensure stream is fully consumed before calling final methods
320
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
321
+ const ensureStreamConsumed = (methodName) => {
322
+ if (methodName in stream && typeof stream[methodName] === "function") {
323
+ const originalMethod = stream[methodName].bind(stream);
324
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
325
+ stream[methodName] = async (...args) => {
326
+ if ("done" in stream && typeof stream.done === "function") {
327
+ await stream.done();
328
+ }
329
+ for await (const _ of stream) {
330
+ // Finish consuming the stream if it has not already been consumed
331
+ }
332
+ return originalMethod(...args);
333
+ };
334
+ }
335
+ };
336
+ // Ensure stream is consumed for final methods
337
+ ensureStreamConsumed("finalChatCompletion");
338
+ ensureStreamConsumed("finalMessage");
339
+ ensureStreamConsumed("finalResponse");
340
+ return stream;
341
+ };
342
+ };
313
343
  tracedOpenAIClient.chat = {
314
344
  ...openai.chat,
315
345
  completions: Object.create(Object.getPrototypeOf(openai.chat.completions)),
@@ -322,6 +352,17 @@ export const wrapOpenAI = (openai, options) => {
322
352
  if (typeof openai.chat.completions.parse === "function") {
323
353
  tracedOpenAIClient.chat.completions.parse = traceable(openai.chat.completions.parse.bind(openai.chat.completions), chatCompletionParseMetadata);
324
354
  }
355
+ // Wrap chat.completions.stream if it exists
356
+ if (typeof openai.chat.completions.stream === "function") {
357
+ tracedOpenAIClient.chat.completions.stream = traceable(wrapStreamMethod(openai.chat.completions.stream.bind(openai.chat.completions)), chatCompletionParseMetadata);
358
+ }
359
+ // Wrap beta.chat.completions.stream if it exists
360
+ if (openai.beta &&
361
+ openai.beta.chat &&
362
+ openai.beta.chat.completions &&
363
+ typeof openai.beta.chat.completions.stream === "function") {
364
+ tracedOpenAIClient.beta.chat.completions.stream = traceable(wrapStreamMethod(openai.beta.chat.completions.stream.bind(openai.beta.chat.completions)), chatCompletionParseMetadata);
365
+ }
325
366
  tracedOpenAIClient.completions = {
326
367
  ...openai.completions,
327
368
  create: traceable(openai.completions.create.bind(openai.completions), {
@@ -410,7 +451,7 @@ export const wrapOpenAI = (openai, options) => {
410
451
  }
411
452
  if (tracedOpenAIClient.responses &&
412
453
  typeof tracedOpenAIClient.responses.stream === "function") {
413
- tracedOpenAIClient.responses.stream = traceable(openai.responses.stream.bind(openai.responses), {
454
+ tracedOpenAIClient.responses.stream = traceable(wrapStreamMethod(openai.responses.stream.bind(openai.responses)), {
414
455
  name: chatName,
415
456
  run_type: "llm",
416
457
  aggregator: responsesAggregator,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langsmith",
3
- "version": "0.3.84",
3
+ "version": "0.3.86",
4
4
  "description": "Client library to connect to the LangSmith Observability and Evaluation Platform.",
5
5
  "packageManager": "yarn@1.22.19",
6
6
  "files": [