@langchain/core 0.2.19 → 0.2.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/language_models/base.d.ts +0 -10
  2. package/dist/language_models/chat_models.cjs +5 -6
  3. package/dist/language_models/chat_models.d.ts +9 -10
  4. package/dist/language_models/chat_models.js +5 -6
  5. package/dist/language_models/llms.cjs +5 -6
  6. package/dist/language_models/llms.d.ts +10 -12
  7. package/dist/language_models/llms.js +5 -6
  8. package/dist/runnables/base.cjs +34 -9
  9. package/dist/runnables/base.js +34 -9
  10. package/dist/runnables/config.cjs +41 -0
  11. package/dist/runnables/config.js +41 -0
  12. package/dist/runnables/remote.cjs +14 -13
  13. package/dist/runnables/remote.js +14 -13
  14. package/dist/runnables/types.d.ts +10 -0
  15. package/dist/utils/math.cjs +6 -4
  16. package/dist/utils/math.js +6 -4
  17. package/dist/utils/ml-distance/distances.cjs +18 -0
  18. package/dist/utils/ml-distance/distances.d.ts +8 -0
  19. package/dist/utils/ml-distance/distances.js +14 -0
  20. package/dist/utils/ml-distance/similarities.cjs +21 -0
  21. package/dist/utils/ml-distance/similarities.d.ts +7 -0
  22. package/dist/utils/ml-distance/similarities.js +17 -0
  23. package/dist/utils/ml-distance-euclidean/euclidean.cjs +15 -0
  24. package/dist/utils/ml-distance-euclidean/euclidean.d.ts +2 -0
  25. package/dist/utils/ml-distance-euclidean/euclidean.js +10 -0
  26. package/dist/utils/signal.cjs +28 -0
  27. package/dist/utils/signal.d.ts +1 -0
  28. package/dist/utils/signal.js +24 -0
  29. package/dist/utils/stream.cjs +19 -4
  30. package/dist/utils/stream.d.ts +3 -1
  31. package/dist/utils/stream.js +19 -4
  32. package/dist/utils/testing/index.cjs +9 -3
  33. package/dist/utils/testing/index.d.ts +9 -6
  34. package/dist/utils/testing/index.js +9 -3
  35. package/package.json +1 -2
@@ -1,4 +1,5 @@
1
1
  import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js";
2
+ import { raceWithSignal } from "./signal.js";
2
3
  /*
3
4
  * Support async iterator syntax for ReadableStreams in all environments.
4
5
  * Source: https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
@@ -181,6 +182,12 @@ export class AsyncGeneratorWithSetup {
181
182
  writable: true,
182
183
  value: void 0
183
184
  });
185
+ Object.defineProperty(this, "signal", {
186
+ enumerable: true,
187
+ configurable: true,
188
+ writable: true,
189
+ value: void 0
190
+ });
184
191
  Object.defineProperty(this, "firstResult", {
185
192
  enumerable: true,
186
193
  configurable: true,
@@ -195,6 +202,8 @@ export class AsyncGeneratorWithSetup {
195
202
  });
196
203
  this.generator = params.generator;
197
204
  this.config = params.config;
205
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
206
+ this.signal = params.signal ?? this.config?.signal;
198
207
  // setup is a promise that resolves only after the first iterator value
199
208
  // is available. this is useful when setup of several piped generators
200
209
  // needs to happen in logical order, ie. in the order in which input to
@@ -212,13 +221,18 @@ export class AsyncGeneratorWithSetup {
212
221
  });
213
222
  }
214
223
  async next(...args) {
224
+ this.signal?.throwIfAborted();
215
225
  if (!this.firstResultUsed) {
216
226
  this.firstResultUsed = true;
217
227
  return this.firstResult;
218
228
  }
219
- return AsyncLocalStorageProviderSingleton.runWithConfig(this.config, async () => {
220
- return this.generator.next(...args);
221
- }, true);
229
+ return AsyncLocalStorageProviderSingleton.runWithConfig(this.config, this.signal
230
+ ? async () => {
231
+ return raceWithSignal(this.generator.next(...args), this.signal);
232
+ }
233
+ : async () => {
234
+ return this.generator.next(...args);
235
+ }, true);
222
236
  }
223
237
  async return(value) {
224
238
  return this.generator.return(value);
@@ -230,10 +244,11 @@ export class AsyncGeneratorWithSetup {
230
244
  return this;
231
245
  }
232
246
  }
233
- export async function pipeGeneratorWithSetup(to, generator, startSetup, ...args) {
247
+ export async function pipeGeneratorWithSetup(to, generator, startSetup, signal, ...args) {
234
248
  const gen = new AsyncGeneratorWithSetup({
235
249
  generator,
236
250
  startSetup,
251
+ signal,
237
252
  });
238
253
  const setup = await gen.setup;
239
254
  return { output: to(gen, setup, ...args), setup };
@@ -4,7 +4,6 @@
4
4
  /* eslint-disable @typescript-eslint/no-unused-vars */
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.FakeVectorStore = exports.SingleRunExtractor = exports.SyntheticEmbeddings = exports.FakeEmbeddings = exports.FakeTool = exports.FakeTracer = exports.FakeListChatMessageHistory = exports.FakeChatMessageHistory = exports.FakeListChatModel = exports.FakeRetriever = exports.FakeStreamingChatModel = exports.FakeChatModel = exports.FakeStreamingLLM = exports.FakeLLM = exports.FakeRunnable = exports.FakeSplitIntoListParser = void 0;
7
- const ml_distance_1 = require("ml-distance");
8
7
  const chat_history_js_1 = require("../../chat_history.cjs");
9
8
  const document_js_1 = require("../../documents/document.cjs");
10
9
  const chat_models_js_1 = require("../../language_models/chat_models.cjs");
@@ -18,6 +17,7 @@ const index_js_3 = require("../../tools/index.cjs");
18
17
  const base_js_3 = require("../../tracers/base.cjs");
19
18
  const embeddings_js_1 = require("../../embeddings.cjs");
20
19
  const vectorstores_js_1 = require("../../vectorstores.cjs");
20
+ const similarities_js_1 = require("../ml-distance/similarities.cjs");
21
21
  /**
22
22
  * Parser for comma-separated values. It splits the input text by commas
23
23
  * and trims the resulting values.
@@ -347,6 +347,9 @@ class FakeListChatModel extends chat_models_js_1.BaseChatModel {
347
347
  }
348
348
  async _generate(_messages, options, runManager) {
349
349
  await this._sleepIfRequested();
350
+ if (options?.thrownErrorString) {
351
+ throw new Error(options.thrownErrorString);
352
+ }
350
353
  if (this.emitCustomEvent) {
351
354
  await runManager?.handleCustomEvent("some_test_event", {
352
355
  someval: true,
@@ -372,7 +375,7 @@ class FakeListChatModel extends chat_models_js_1.BaseChatModel {
372
375
  text,
373
376
  };
374
377
  }
375
- async *_streamResponseChunks(_messages, _options, runManager) {
378
+ async *_streamResponseChunks(_messages, options, runManager) {
376
379
  const response = this._currentResponse();
377
380
  this._incrementResponse();
378
381
  if (this.emitCustomEvent) {
@@ -382,6 +385,9 @@ class FakeListChatModel extends chat_models_js_1.BaseChatModel {
382
385
  }
383
386
  for await (const text of response) {
384
387
  await this._sleepIfRequested();
388
+ if (options?.thrownErrorString) {
389
+ throw new Error(options.thrownErrorString);
390
+ }
385
391
  const chunk = this._createResponseChunk(text);
386
392
  yield chunk;
387
393
  void runManager?.handleLLMNewToken(text);
@@ -678,7 +684,7 @@ class FakeVectorStore extends vectorstores_js_1.VectorStore {
678
684
  writable: true,
679
685
  value: void 0
680
686
  });
681
- this.similarity = similarity ?? ml_distance_1.similarity.cosine;
687
+ this.similarity = similarity ?? similarities_js_1.cosine;
682
688
  }
683
689
  /**
684
690
  * Method to add documents to the memory vector store. It extracts the
@@ -1,9 +1,8 @@
1
- import { similarity as ml_distance_similarity } from "ml-distance";
2
1
  import { z } from "zod";
3
2
  import { BaseCallbackConfig, CallbackManagerForLLMRun, CallbackManagerForToolRun } from "../../callbacks/manager.js";
4
3
  import { BaseChatMessageHistory, BaseListChatMessageHistory } from "../../chat_history.js";
5
4
  import { Document } from "../../documents/document.js";
6
- import { BaseChatModel, BaseChatModelParams } from "../../language_models/chat_models.js";
5
+ import { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from "../../language_models/chat_models.js";
7
6
  import { BaseLLMParams, LLM } from "../../language_models/llms.js";
8
7
  import { BaseMessage, AIMessage } from "../../messages/index.js";
9
8
  import { BaseOutputParser } from "../../output_parsers/base.js";
@@ -15,6 +14,7 @@ import { BaseTracer, Run } from "../../tracers/base.js";
15
14
  import { Embeddings, EmbeddingsInterface, EmbeddingsParams } from "../../embeddings.js";
16
15
  import { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from "../../language_models/base.js";
17
16
  import { VectorStore } from "../../vectorstores.js";
17
+ import { cosine } from "../ml-distance/similarities.js";
18
18
  /**
19
19
  * Parser for comma-separated values. It splits the input text by commas
20
20
  * and trims the resulting values.
@@ -91,6 +91,9 @@ export interface FakeChatInput extends BaseChatModelParams {
91
91
  sleep?: number;
92
92
  emitCustomEvent?: boolean;
93
93
  }
94
+ export interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {
95
+ thrownErrorString?: string;
96
+ }
94
97
  /**
95
98
  * A fake Chat Model that returns a predefined list of responses. It can be used
96
99
  * for testing purposes.
@@ -111,7 +114,7 @@ export interface FakeChatInput extends BaseChatModelParams {
111
114
  * console.log({ secondResponse });
112
115
  * ```
113
116
  */
114
- export declare class FakeListChatModel extends BaseChatModel {
117
+ export declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {
115
118
  static lc_name(): string;
116
119
  responses: string[];
117
120
  i: number;
@@ -125,7 +128,7 @@ export declare class FakeListChatModel extends BaseChatModel {
125
128
  message: AIMessage;
126
129
  text: string;
127
130
  };
128
- _streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
131
+ _streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
129
132
  _sleepIfRequested(): Promise<void>;
130
133
  _sleep(): Promise<void>;
131
134
  _createResponseChunk(text: string): ChatGenerationChunk;
@@ -248,7 +251,7 @@ interface MemoryVector {
248
251
  * function.
249
252
  */
250
253
  export interface FakeVectorStoreArgs {
251
- similarity?: typeof ml_distance_similarity.cosine;
254
+ similarity?: typeof cosine;
252
255
  }
253
256
  /**
254
257
  * Class that extends `VectorStore` to store vectors in memory. Provides
@@ -258,7 +261,7 @@ export interface FakeVectorStoreArgs {
258
261
  export declare class FakeVectorStore extends VectorStore {
259
262
  FilterType: (doc: Document) => boolean;
260
263
  memoryVectors: MemoryVector[];
261
- similarity: typeof ml_distance_similarity.cosine;
264
+ similarity: typeof cosine;
262
265
  _vectorstoreType(): string;
263
266
  constructor(embeddings: EmbeddingsInterface, { similarity, ...rest }?: FakeVectorStoreArgs);
264
267
  /**
@@ -1,7 +1,6 @@
1
1
  /* eslint-disable no-promise-executor-return */
2
2
  /* eslint-disable @typescript-eslint/no-explicit-any */
3
3
  /* eslint-disable @typescript-eslint/no-unused-vars */
4
- import { similarity as ml_distance_similarity } from "ml-distance";
5
4
  import { BaseChatMessageHistory, BaseListChatMessageHistory, } from "../../chat_history.js";
6
5
  import { Document } from "../../documents/document.js";
7
6
  import { BaseChatModel, } from "../../language_models/chat_models.js";
@@ -15,6 +14,7 @@ import { StructuredTool } from "../../tools/index.js";
15
14
  import { BaseTracer } from "../../tracers/base.js";
16
15
  import { Embeddings, } from "../../embeddings.js";
17
16
  import { VectorStore } from "../../vectorstores.js";
17
+ import { cosine } from "../ml-distance/similarities.js";
18
18
  /**
19
19
  * Parser for comma-separated values. It splits the input text by commas
20
20
  * and trims the resulting values.
@@ -337,6 +337,9 @@ export class FakeListChatModel extends BaseChatModel {
337
337
  }
338
338
  async _generate(_messages, options, runManager) {
339
339
  await this._sleepIfRequested();
340
+ if (options?.thrownErrorString) {
341
+ throw new Error(options.thrownErrorString);
342
+ }
340
343
  if (this.emitCustomEvent) {
341
344
  await runManager?.handleCustomEvent("some_test_event", {
342
345
  someval: true,
@@ -362,7 +365,7 @@ export class FakeListChatModel extends BaseChatModel {
362
365
  text,
363
366
  };
364
367
  }
365
- async *_streamResponseChunks(_messages, _options, runManager) {
368
+ async *_streamResponseChunks(_messages, options, runManager) {
366
369
  const response = this._currentResponse();
367
370
  this._incrementResponse();
368
371
  if (this.emitCustomEvent) {
@@ -372,6 +375,9 @@ export class FakeListChatModel extends BaseChatModel {
372
375
  }
373
376
  for await (const text of response) {
374
377
  await this._sleepIfRequested();
378
+ if (options?.thrownErrorString) {
379
+ throw new Error(options.thrownErrorString);
380
+ }
375
381
  const chunk = this._createResponseChunk(text);
376
382
  yield chunk;
377
383
  void runManager?.handleLLMNewToken(text);
@@ -660,7 +666,7 @@ export class FakeVectorStore extends VectorStore {
660
666
  writable: true,
661
667
  value: void 0
662
668
  });
663
- this.similarity = similarity ?? ml_distance_similarity.cosine;
669
+ this.similarity = similarity ?? cosine;
664
670
  }
665
671
  /**
666
672
  * Method to add documents to the memory vector store. It extracts the
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.2.19",
3
+ "version": "0.2.20",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {
@@ -46,7 +46,6 @@
46
46
  "decamelize": "1.2.0",
47
47
  "js-tiktoken": "^1.0.12",
48
48
  "langsmith": "~0.1.39",
49
- "ml-distance": "^4.0.0",
50
49
  "mustache": "^4.2.0",
51
50
  "p-queue": "^6.6.2",
52
51
  "p-retry": "4",