langchain 0.0.180 → 0.0.182-rc.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/agents/openai/output_parser.cjs +3 -0
  2. package/dist/agents/openai/output_parser.js +3 -0
  3. package/dist/base_language/index.cjs +7 -3
  4. package/dist/base_language/index.d.ts +3 -3
  5. package/dist/base_language/index.js +7 -3
  6. package/dist/cache/base.cjs +2 -5
  7. package/dist/cache/base.js +2 -2
  8. package/dist/chat_models/base.cjs +9 -1
  9. package/dist/chat_models/base.js +9 -1
  10. package/dist/chat_models/bedrock/web.cjs +5 -1
  11. package/dist/chat_models/bedrock/web.js +5 -1
  12. package/dist/chat_models/cloudflare_workersai.cjs +8 -1
  13. package/dist/chat_models/cloudflare_workersai.js +8 -1
  14. package/dist/chat_models/googlepalm.cjs +16 -7
  15. package/dist/chat_models/googlepalm.js +16 -7
  16. package/dist/chat_models/googlevertexai/common.cjs +6 -0
  17. package/dist/chat_models/googlevertexai/common.js +6 -0
  18. package/dist/chat_models/iflytek_xinghuo/common.cjs +9 -4
  19. package/dist/chat_models/iflytek_xinghuo/common.js +9 -4
  20. package/dist/chat_models/llama_cpp.cjs +23 -4
  21. package/dist/chat_models/llama_cpp.js +23 -4
  22. package/dist/chat_models/minimax.cjs +6 -0
  23. package/dist/chat_models/minimax.js +6 -0
  24. package/dist/chat_models/openai.cjs +2 -5
  25. package/dist/chat_models/openai.js +3 -6
  26. package/dist/chat_models/portkey.cjs +18 -8
  27. package/dist/chat_models/portkey.js +18 -8
  28. package/dist/chat_models/yandex.cjs +3 -0
  29. package/dist/chat_models/yandex.js +3 -0
  30. package/dist/embeddings/cache_backed.cjs +2 -5
  31. package/dist/embeddings/cache_backed.js +2 -2
  32. package/dist/embeddings/voyage.cjs +120 -0
  33. package/dist/embeddings/voyage.d.ts +66 -0
  34. package/dist/embeddings/voyage.js +116 -0
  35. package/dist/experimental/autogpt/prompt.cjs +10 -0
  36. package/dist/experimental/autogpt/prompt.js +10 -0
  37. package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.cjs +6 -0
  38. package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.js +6 -0
  39. package/dist/experimental/chat_models/anthropic_functions.cjs +3 -0
  40. package/dist/experimental/chat_models/anthropic_functions.js +3 -0
  41. package/dist/experimental/chat_models/bittensor.cjs +9 -4
  42. package/dist/experimental/chat_models/bittensor.js +9 -4
  43. package/dist/load/import_map.cjs +3 -2
  44. package/dist/load/import_map.d.ts +1 -0
  45. package/dist/load/import_map.js +1 -0
  46. package/dist/schema/index.cjs +27 -7
  47. package/dist/schema/index.d.ts +10 -3
  48. package/dist/schema/index.js +27 -7
  49. package/dist/schema/output_parser.cjs +25 -2
  50. package/dist/schema/output_parser.js +25 -2
  51. package/dist/util/js-sha1/hash.cjs +358 -0
  52. package/dist/util/js-sha1/hash.d.ts +1 -0
  53. package/dist/util/js-sha1/hash.js +355 -0
  54. package/dist/util/stream.cjs +4 -1
  55. package/dist/util/stream.d.ts +4 -1
  56. package/dist/util/stream.js +4 -1
  57. package/embeddings/voyage.cjs +1 -0
  58. package/embeddings/voyage.d.ts +1 -0
  59. package/embeddings/voyage.js +1 -0
  60. package/package.json +12 -5
@@ -24,8 +24,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
24
24
  return result;
25
25
  };
26
26
  Object.defineProperty(exports, "__esModule", { value: true });
27
- exports.chat_models__fireworks = exports.chat_models__cloudflare_workersai = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
- exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = void 0;
27
+ exports.chat_models__cloudflare_workersai = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__voyage = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
+ exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = void 0;
29
29
  exports.load__serializable = __importStar(require("../load/serializable.cjs"));
30
30
  exports.agents = __importStar(require("../agents/index.cjs"));
31
31
  exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
@@ -48,6 +48,7 @@ exports.embeddings__fake = __importStar(require("../embeddings/fake.cjs"));
48
48
  exports.embeddings__ollama = __importStar(require("../embeddings/ollama.cjs"));
49
49
  exports.embeddings__openai = __importStar(require("../embeddings/openai.cjs"));
50
50
  exports.embeddings__minimax = __importStar(require("../embeddings/minimax.cjs"));
51
+ exports.embeddings__voyage = __importStar(require("../embeddings/voyage.cjs"));
51
52
  exports.llms__base = __importStar(require("../llms/base.cjs"));
52
53
  exports.llms__openai = __importStar(require("../llms/openai.cjs"));
53
54
  exports.llms__ai21 = __importStar(require("../llms/ai21.cjs"));
@@ -20,6 +20,7 @@ export * as embeddings__fake from "../embeddings/fake.js";
20
20
  export * as embeddings__ollama from "../embeddings/ollama.js";
21
21
  export * as embeddings__openai from "../embeddings/openai.js";
22
22
  export * as embeddings__minimax from "../embeddings/minimax.js";
23
+ export * as embeddings__voyage from "../embeddings/voyage.js";
23
24
  export * as llms__base from "../llms/base.js";
24
25
  export * as llms__openai from "../llms/openai.js";
25
26
  export * as llms__ai21 from "../llms/ai21.js";
@@ -21,6 +21,7 @@ export * as embeddings__fake from "../embeddings/fake.js";
21
21
  export * as embeddings__ollama from "../embeddings/ollama.js";
22
22
  export * as embeddings__openai from "../embeddings/openai.js";
23
23
  export * as embeddings__minimax from "../embeddings/minimax.js";
24
+ export * as embeddings__voyage from "../embeddings/voyage.js";
24
25
  export * as llms__base from "../llms/base.js";
25
26
  export * as llms__openai from "../llms/openai.js";
26
27
  export * as llms__ai21 from "../llms/ai21.js";
@@ -35,6 +35,26 @@ class GenerationChunk {
35
35
  }
36
36
  }
37
37
  exports.GenerationChunk = GenerationChunk;
38
+ function mergeContent(firstContent, secondContent) {
39
+ // If first content is a string
40
+ if (typeof firstContent === "string") {
41
+ if (typeof secondContent === "string") {
42
+ return firstContent + secondContent;
43
+ }
44
+ else {
45
+ return [{ type: "text", text: firstContent }, ...secondContent];
46
+ }
47
+ // If both are arrays
48
+ }
49
+ else if (Array.isArray(secondContent)) {
50
+ return [...firstContent, ...secondContent];
51
+ // If the first content is a list and second is a string
52
+ }
53
+ else {
54
+ // Otherwise, add the second content as a new element of the list
55
+ return [...firstContent, { type: "text", text: secondContent }];
56
+ }
57
+ }
38
58
  /**
39
59
  * Base class for all types of messages in a conversation. It includes
40
60
  * properties like `content`, `name`, and `additional_kwargs`. It also
@@ -46,7 +66,7 @@ class BaseMessage extends serializable_js_1.Serializable {
46
66
  * Use {@link BaseMessage.content} instead.
47
67
  */
48
68
  get text() {
49
- return this.content;
69
+ return typeof this.content === "string" ? this.content : "";
50
70
  }
51
71
  constructor(fields,
52
72
  /** @deprecated */
@@ -73,7 +93,7 @@ class BaseMessage extends serializable_js_1.Serializable {
73
93
  writable: true,
74
94
  value: true
75
95
  });
76
- /** The text of the message. */
96
+ /** The content of the message. */
77
97
  Object.defineProperty(this, "content", {
78
98
  enumerable: true,
79
99
  configurable: true,
@@ -191,7 +211,7 @@ class HumanMessageChunk extends BaseMessageChunk {
191
211
  }
192
212
  concat(chunk) {
193
213
  return new HumanMessageChunk({
194
- content: this.content + chunk.content,
214
+ content: mergeContent(this.content, chunk.content),
195
215
  additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
196
216
  });
197
217
  }
@@ -222,7 +242,7 @@ class AIMessageChunk extends BaseMessageChunk {
222
242
  }
223
243
  concat(chunk) {
224
244
  return new AIMessageChunk({
225
- content: this.content + chunk.content,
245
+ content: mergeContent(this.content, chunk.content),
226
246
  additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
227
247
  });
228
248
  }
@@ -253,7 +273,7 @@ class SystemMessageChunk extends BaseMessageChunk {
253
273
  }
254
274
  concat(chunk) {
255
275
  return new SystemMessageChunk({
256
- content: this.content + chunk.content,
276
+ content: mergeContent(this.content, chunk.content),
257
277
  additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
258
278
  });
259
279
  }
@@ -313,7 +333,7 @@ class FunctionMessageChunk extends BaseMessageChunk {
313
333
  }
314
334
  concat(chunk) {
315
335
  return new FunctionMessageChunk({
316
- content: this.content + chunk.content,
336
+ content: mergeContent(this.content, chunk.content),
317
337
  additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
318
338
  name: this.name ?? "",
319
339
  });
@@ -407,7 +427,7 @@ class ChatMessageChunk extends BaseMessageChunk {
407
427
  }
408
428
  concat(chunk) {
409
429
  return new ChatMessageChunk({
410
- content: this.content + chunk.content,
430
+ content: mergeContent(this.content, chunk.content),
411
431
  additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
412
432
  role: this.role,
413
433
  });
@@ -64,8 +64,15 @@ export interface StoredGeneration {
64
64
  message?: StoredMessage;
65
65
  }
66
66
  export type MessageType = "human" | "ai" | "generic" | "system" | "function";
67
+ export type MessageContent = string | {
68
+ type: "text" | "image_url";
69
+ text?: string;
70
+ image_url?: string | {
71
+ url: string;
72
+ };
73
+ }[];
67
74
  export interface BaseMessageFields {
68
- content: string;
75
+ content: MessageContent;
69
76
  name?: string;
70
77
  additional_kwargs?: {
71
78
  function_call?: OpenAIClient.Chat.ChatCompletionMessage.FunctionCall;
@@ -91,8 +98,8 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
91
98
  * Use {@link BaseMessage.content} instead.
92
99
  */
93
100
  get text(): string;
94
- /** The text of the message. */
95
- content: string;
101
+ /** The content of the message. */
102
+ content: MessageContent;
96
103
  /** The name of the message sender in a multi-user chat. */
97
104
  name?: string;
98
105
  /** Additional keyword arguments */
@@ -31,6 +31,26 @@ export class GenerationChunk {
31
31
  });
32
32
  }
33
33
  }
34
+ function mergeContent(firstContent, secondContent) {
35
+ // If first content is a string
36
+ if (typeof firstContent === "string") {
37
+ if (typeof secondContent === "string") {
38
+ return firstContent + secondContent;
39
+ }
40
+ else {
41
+ return [{ type: "text", text: firstContent }, ...secondContent];
42
+ }
43
+ // If both are arrays
44
+ }
45
+ else if (Array.isArray(secondContent)) {
46
+ return [...firstContent, ...secondContent];
47
+ // If the first content is a list and second is a string
48
+ }
49
+ else {
50
+ // Otherwise, add the second content as a new element of the list
51
+ return [...firstContent, { type: "text", text: secondContent }];
52
+ }
53
+ }
34
54
  /**
35
55
  * Base class for all types of messages in a conversation. It includes
36
56
  * properties like `content`, `name`, and `additional_kwargs`. It also
@@ -42,7 +62,7 @@ export class BaseMessage extends Serializable {
42
62
  * Use {@link BaseMessage.content} instead.
43
63
  */
44
64
  get text() {
45
- return this.content;
65
+ return typeof this.content === "string" ? this.content : "";
46
66
  }
47
67
  constructor(fields,
48
68
  /** @deprecated */
@@ -69,7 +89,7 @@ export class BaseMessage extends Serializable {
69
89
  writable: true,
70
90
  value: true
71
91
  });
72
- /** The text of the message. */
92
+ /** The content of the message. */
73
93
  Object.defineProperty(this, "content", {
74
94
  enumerable: true,
75
95
  configurable: true,
@@ -184,7 +204,7 @@ export class HumanMessageChunk extends BaseMessageChunk {
184
204
  }
185
205
  concat(chunk) {
186
206
  return new HumanMessageChunk({
187
- content: this.content + chunk.content,
207
+ content: mergeContent(this.content, chunk.content),
188
208
  additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
189
209
  });
190
210
  }
@@ -213,7 +233,7 @@ export class AIMessageChunk extends BaseMessageChunk {
213
233
  }
214
234
  concat(chunk) {
215
235
  return new AIMessageChunk({
216
- content: this.content + chunk.content,
236
+ content: mergeContent(this.content, chunk.content),
217
237
  additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
218
238
  });
219
239
  }
@@ -242,7 +262,7 @@ export class SystemMessageChunk extends BaseMessageChunk {
242
262
  }
243
263
  concat(chunk) {
244
264
  return new SystemMessageChunk({
245
- content: this.content + chunk.content,
265
+ content: mergeContent(this.content, chunk.content),
246
266
  additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
247
267
  });
248
268
  }
@@ -300,7 +320,7 @@ export class FunctionMessageChunk extends BaseMessageChunk {
300
320
  }
301
321
  concat(chunk) {
302
322
  return new FunctionMessageChunk({
303
- content: this.content + chunk.content,
323
+ content: mergeContent(this.content, chunk.content),
304
324
  additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
305
325
  name: this.name ?? "",
306
326
  });
@@ -389,7 +409,7 @@ export class ChatMessageChunk extends BaseMessageChunk {
389
409
  }
390
410
  concat(chunk) {
391
411
  return new ChatMessageChunk({
392
- content: this.content + chunk.content,
412
+ content: mergeContent(this.content, chunk.content),
393
413
  additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
394
414
  role: this.role,
395
415
  });
@@ -36,7 +36,14 @@ class BaseLLMOutputParser extends index_js_2.Runnable {
36
36
  return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
37
37
  }
38
38
  else {
39
- return this._callWithConfig(async (input) => this.parseResult([{ message: input, text: input.content }]), input, { ...options, runType: "parser" });
39
+ return this._callWithConfig(async (input) => this.parseResult([
40
+ {
41
+ message: input,
42
+ text: typeof input.content === "string"
43
+ ? input.content
44
+ : JSON.stringify(input.content),
45
+ },
46
+ ]), input, { ...options, runType: "parser" });
40
47
  }
41
48
  }
42
49
  }
@@ -69,7 +76,14 @@ class BaseTransformOutputParser extends BaseOutputParser {
69
76
  yield this.parseResult([{ text: chunk }]);
70
77
  }
71
78
  else {
72
- yield this.parseResult([{ message: chunk, text: chunk.content }]);
79
+ yield this.parseResult([
80
+ {
81
+ message: chunk,
82
+ text: typeof chunk.content === "string"
83
+ ? chunk.content
84
+ : JSON.stringify(chunk.content),
85
+ },
86
+ ]);
73
87
  }
74
88
  }
75
89
  }
@@ -108,14 +122,23 @@ class BaseCumulativeTransformOutputParser extends BaseTransformOutputParser {
108
122
  let prevParsed;
109
123
  let accGen;
110
124
  for await (const chunk of inputGenerator) {
125
+ if (typeof chunk !== "string" && typeof chunk.content !== "string") {
126
+ throw new Error("Cannot handle non-string output.");
127
+ }
111
128
  let chunkGen;
112
129
  if ((0, index_js_1.isBaseMessageChunk)(chunk)) {
130
+ if (typeof chunk.content !== "string") {
131
+ throw new Error("Cannot handle non-string message output.");
132
+ }
113
133
  chunkGen = new index_js_1.ChatGenerationChunk({
114
134
  message: chunk,
115
135
  text: chunk.content,
116
136
  });
117
137
  }
118
138
  else if ((0, index_js_1.isBaseMessage)(chunk)) {
139
+ if (typeof chunk.content !== "string") {
140
+ throw new Error("Cannot handle non-string message output.");
141
+ }
119
142
  chunkGen = new index_js_1.ChatGenerationChunk({
120
143
  message: chunk.toChunk(),
121
144
  text: chunk.content,
@@ -33,7 +33,14 @@ export class BaseLLMOutputParser extends Runnable {
33
33
  return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
34
34
  }
35
35
  else {
36
- return this._callWithConfig(async (input) => this.parseResult([{ message: input, text: input.content }]), input, { ...options, runType: "parser" });
36
+ return this._callWithConfig(async (input) => this.parseResult([
37
+ {
38
+ message: input,
39
+ text: typeof input.content === "string"
40
+ ? input.content
41
+ : JSON.stringify(input.content),
42
+ },
43
+ ]), input, { ...options, runType: "parser" });
37
44
  }
38
45
  }
39
46
  }
@@ -64,7 +71,14 @@ export class BaseTransformOutputParser extends BaseOutputParser {
64
71
  yield this.parseResult([{ text: chunk }]);
65
72
  }
66
73
  else {
67
- yield this.parseResult([{ message: chunk, text: chunk.content }]);
74
+ yield this.parseResult([
75
+ {
76
+ message: chunk,
77
+ text: typeof chunk.content === "string"
78
+ ? chunk.content
79
+ : JSON.stringify(chunk.content),
80
+ },
81
+ ]);
68
82
  }
69
83
  }
70
84
  }
@@ -102,14 +116,23 @@ export class BaseCumulativeTransformOutputParser extends BaseTransformOutputPars
102
116
  let prevParsed;
103
117
  let accGen;
104
118
  for await (const chunk of inputGenerator) {
119
+ if (typeof chunk !== "string" && typeof chunk.content !== "string") {
120
+ throw new Error("Cannot handle non-string output.");
121
+ }
105
122
  let chunkGen;
106
123
  if (isBaseMessageChunk(chunk)) {
124
+ if (typeof chunk.content !== "string") {
125
+ throw new Error("Cannot handle non-string message output.");
126
+ }
107
127
  chunkGen = new ChatGenerationChunk({
108
128
  message: chunk,
109
129
  text: chunk.content,
110
130
  });
111
131
  }
112
132
  else if (isBaseMessage(chunk)) {
133
+ if (typeof chunk.content !== "string") {
134
+ throw new Error("Cannot handle non-string message output.");
135
+ }
113
136
  chunkGen = new ChatGenerationChunk({
114
137
  message: chunk.toChunk(),
115
138
  text: chunk.content,