@langchain/core 0.2.9 → 0.2.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,9 +18,8 @@ class Document {
18
18
  writable: true,
19
19
  value: void 0
20
20
  });
21
- this.pageContent = fields.pageContent
22
- ? fields.pageContent.toString()
23
- : this.pageContent;
21
+ this.pageContent =
22
+ fields.pageContent !== undefined ? fields.pageContent.toString() : "";
24
23
  this.metadata = fields.metadata ?? {};
25
24
  }
26
25
  }
@@ -15,9 +15,8 @@ export class Document {
15
15
  writable: true,
16
16
  value: void 0
17
17
  });
18
- this.pageContent = fields.pageContent
19
- ? fields.pageContent.toString()
20
- : this.pageContent;
18
+ this.pageContent =
19
+ fields.pageContent !== undefined ? fields.pageContent.toString() : "";
21
20
  this.metadata = fields.metadata ?? {};
22
21
  }
23
22
  }
@@ -65,7 +65,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
65
65
  text: "",
66
66
  });
67
67
  try {
68
- for await (const chunk of this._streamResponseChunks(input.toString(), callOptions, runManagers?.[0])) {
68
+ for await (const chunk of this._streamResponseChunks(prompt.toString(), callOptions, runManagers?.[0])) {
69
69
  if (!generation) {
70
70
  generation = chunk;
71
71
  }
@@ -62,7 +62,7 @@ export class BaseLLM extends BaseLanguageModel {
62
62
  text: "",
63
63
  });
64
64
  try {
65
- for await (const chunk of this._streamResponseChunks(input.toString(), callOptions, runManagers?.[0])) {
65
+ for await (const chunk of this._streamResponseChunks(prompt.toString(), callOptions, runManagers?.[0])) {
66
66
  if (!generation) {
67
67
  generation = chunk;
68
68
  }
@@ -1,6 +1,7 @@
1
1
  /* eslint-disable no-promise-executor-return */
2
2
  import { test } from "@jest/globals";
3
- import { FakeLLM } from "../../utils/testing/index.js";
3
+ import { FakeLLM, FakeStreamingLLM } from "../../utils/testing/index.js";
4
+ import { HumanMessagePromptTemplate } from "../../prompts/chat.js";
4
5
  test("Test FakeLLM uses callbacks", async () => {
5
6
  const model = new FakeLLM({});
6
7
  let acc = "";
@@ -37,3 +38,15 @@ test("Test FakeLLM uses callbacks with a cache", async () => {
37
38
  expect(response).toEqual(response2);
38
39
  expect(response2).toEqual(acc);
39
40
  });
41
+ test("Test FakeStreamingLLM works when streaming through a prompt", async () => {
42
+ const prompt = HumanMessagePromptTemplate.fromTemplate("hello there {name}");
43
+ const model = new FakeStreamingLLM({});
44
+ const chain = prompt.pipe(model);
45
+ const stream = await chain.stream({ name: "test" });
46
+ const chunks = [];
47
+ for await (const chunk of stream) {
48
+ chunks.push(chunk);
49
+ }
50
+ expect(chunks.length).toBeGreaterThan(1);
51
+ expect(chunks.join("")).toEqual("Human: hello there test");
52
+ });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.2.9",
3
+ "version": "0.2.10",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {