langchain 0.0.181 → 0.0.182-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/openai/output_parser.cjs +3 -0
- package/dist/agents/openai/output_parser.js +3 -0
- package/dist/base_language/index.cjs +7 -3
- package/dist/base_language/index.d.ts +3 -3
- package/dist/base_language/index.js +7 -3
- package/dist/chat_models/base.cjs +9 -1
- package/dist/chat_models/base.js +9 -1
- package/dist/chat_models/bedrock/web.cjs +5 -1
- package/dist/chat_models/bedrock/web.js +5 -1
- package/dist/chat_models/cloudflare_workersai.cjs +8 -1
- package/dist/chat_models/cloudflare_workersai.js +8 -1
- package/dist/chat_models/googlepalm.cjs +16 -7
- package/dist/chat_models/googlepalm.js +16 -7
- package/dist/chat_models/googlevertexai/common.cjs +6 -0
- package/dist/chat_models/googlevertexai/common.js +6 -0
- package/dist/chat_models/iflytek_xinghuo/common.cjs +9 -4
- package/dist/chat_models/iflytek_xinghuo/common.js +9 -4
- package/dist/chat_models/llama_cpp.cjs +23 -4
- package/dist/chat_models/llama_cpp.js +23 -4
- package/dist/chat_models/minimax.cjs +6 -0
- package/dist/chat_models/minimax.js +6 -0
- package/dist/chat_models/openai.cjs +6 -5
- package/dist/chat_models/openai.d.ts +4 -0
- package/dist/chat_models/openai.js +7 -6
- package/dist/chat_models/portkey.cjs +18 -8
- package/dist/chat_models/portkey.js +18 -8
- package/dist/chat_models/yandex.cjs +3 -0
- package/dist/chat_models/yandex.js +3 -0
- package/dist/experimental/autogpt/prompt.cjs +10 -0
- package/dist/experimental/autogpt/prompt.js +10 -0
- package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.cjs +6 -0
- package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.js +6 -0
- package/dist/experimental/chat_models/anthropic_functions.cjs +3 -0
- package/dist/experimental/chat_models/anthropic_functions.js +3 -0
- package/dist/experimental/chat_models/bittensor.cjs +9 -4
- package/dist/experimental/chat_models/bittensor.js +9 -4
- package/dist/schema/index.cjs +27 -7
- package/dist/schema/index.d.ts +10 -3
- package/dist/schema/index.js +27 -7
- package/dist/schema/output_parser.cjs +25 -2
- package/dist/schema/output_parser.js +25 -2
- package/package.json +2 -2
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { OpenAI as OpenAIClient } from "openai";
|
|
2
|
-
import { AIMessage, AIMessageChunk, ChatGenerationChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk,
|
|
2
|
+
import { AIMessage, AIMessageChunk, ChatGenerationChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk, HumanMessageChunk, SystemMessageChunk, } from "../schema/index.js";
|
|
3
3
|
import { formatToOpenAIFunction } from "../tools/convert_to_openai.js";
|
|
4
4
|
import { getEndpoint } from "../util/azure.js";
|
|
5
5
|
import { getEnvironmentVariable } from "../util/env.js";
|
|
@@ -51,14 +51,10 @@ function messageToOpenAIMessage(message) {
|
|
|
51
51
|
}
|
|
52
52
|
function openAIResponseToChatMessage(message) {
|
|
53
53
|
switch (message.role) {
|
|
54
|
-
case "user":
|
|
55
|
-
return new HumanMessage(message.content || "");
|
|
56
54
|
case "assistant":
|
|
57
55
|
return new AIMessage(message.content || "", {
|
|
58
56
|
function_call: message.function_call,
|
|
59
57
|
});
|
|
60
|
-
case "system":
|
|
61
|
-
return new SystemMessage(message.content || "");
|
|
62
58
|
default:
|
|
63
59
|
return new ChatMessage(message.content || "", message.role ?? "unknown");
|
|
64
60
|
}
|
|
@@ -128,6 +124,8 @@ export class ChatOpenAI extends BaseChatModel {
|
|
|
128
124
|
"functions",
|
|
129
125
|
"tools",
|
|
130
126
|
"promptIndex",
|
|
127
|
+
"response_format",
|
|
128
|
+
"seed",
|
|
131
129
|
];
|
|
132
130
|
}
|
|
133
131
|
get lc_secrets() {
|
|
@@ -371,6 +369,8 @@ export class ChatOpenAI extends BaseChatModel {
|
|
|
371
369
|
? options?.tools.map(formatToOpenAIFunction)
|
|
372
370
|
: undefined),
|
|
373
371
|
function_call: options?.function_call,
|
|
372
|
+
response_format: options?.response_format,
|
|
373
|
+
seed: options?.seed,
|
|
374
374
|
...this.modelKwargs,
|
|
375
375
|
};
|
|
376
376
|
}
|
|
@@ -574,7 +574,8 @@ export class ChatOpenAI extends BaseChatModel {
|
|
|
574
574
|
let count = textCount + tokensPerMessage + roleCount + nameCount;
|
|
575
575
|
// From: https://github.com/hmarr/openai-chat-tokens/blob/main/src/index.ts messageTokenEstimate
|
|
576
576
|
const openAIMessage = messageToOpenAIMessage(message);
|
|
577
|
-
if (openAIMessage.role === "function"
|
|
577
|
+
if (openAIMessage.role === "function" ||
|
|
578
|
+
openAIMessage.role === "tool") {
|
|
578
579
|
count -= 2;
|
|
579
580
|
}
|
|
580
581
|
if (openAIMessage.function_call) {
|
|
@@ -98,10 +98,15 @@ class PortkeyChat extends base_js_1.BaseChatModel {
|
|
|
98
98
|
return "portkey";
|
|
99
99
|
}
|
|
100
100
|
async _generate(messages, options, _) {
|
|
101
|
-
const messagesList = messages.map((message) =>
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
101
|
+
const messagesList = messages.map((message) => {
|
|
102
|
+
if (typeof message.content !== "string") {
|
|
103
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
104
|
+
}
|
|
105
|
+
return {
|
|
106
|
+
role: message._getType(),
|
|
107
|
+
content: message.content,
|
|
108
|
+
};
|
|
109
|
+
});
|
|
105
110
|
const response = await this.session.portkey.chatCompletions.create({
|
|
106
111
|
messages: messagesList,
|
|
107
112
|
...options,
|
|
@@ -124,10 +129,15 @@ class PortkeyChat extends base_js_1.BaseChatModel {
|
|
|
124
129
|
};
|
|
125
130
|
}
|
|
126
131
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
127
|
-
const messagesList = messages.map((message) =>
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
132
|
+
const messagesList = messages.map((message) => {
|
|
133
|
+
if (typeof message.content !== "string") {
|
|
134
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
135
|
+
}
|
|
136
|
+
return {
|
|
137
|
+
role: message._getType(),
|
|
138
|
+
content: message.content,
|
|
139
|
+
};
|
|
140
|
+
});
|
|
131
141
|
const response = await this.session.portkey.chatCompletions.create({
|
|
132
142
|
messages: messagesList,
|
|
133
143
|
...options,
|
|
@@ -95,10 +95,15 @@ export class PortkeyChat extends BaseChatModel {
|
|
|
95
95
|
return "portkey";
|
|
96
96
|
}
|
|
97
97
|
async _generate(messages, options, _) {
|
|
98
|
-
const messagesList = messages.map((message) =>
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
98
|
+
const messagesList = messages.map((message) => {
|
|
99
|
+
if (typeof message.content !== "string") {
|
|
100
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
101
|
+
}
|
|
102
|
+
return {
|
|
103
|
+
role: message._getType(),
|
|
104
|
+
content: message.content,
|
|
105
|
+
};
|
|
106
|
+
});
|
|
102
107
|
const response = await this.session.portkey.chatCompletions.create({
|
|
103
108
|
messages: messagesList,
|
|
104
109
|
...options,
|
|
@@ -121,10 +126,15 @@ export class PortkeyChat extends BaseChatModel {
|
|
|
121
126
|
};
|
|
122
127
|
}
|
|
123
128
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
124
|
-
const messagesList = messages.map((message) =>
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
129
|
+
const messagesList = messages.map((message) => {
|
|
130
|
+
if (typeof message.content !== "string") {
|
|
131
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
132
|
+
}
|
|
133
|
+
return {
|
|
134
|
+
role: message._getType(),
|
|
135
|
+
content: message.content,
|
|
136
|
+
};
|
|
137
|
+
});
|
|
128
138
|
const response = await this.session.portkey.chatCompletions.create({
|
|
129
139
|
messages: messagesList,
|
|
130
140
|
...options,
|
|
@@ -9,6 +9,9 @@ function _parseChatHistory(history) {
|
|
|
9
9
|
const chatHistory = [];
|
|
10
10
|
let instruction = "";
|
|
11
11
|
for (const message of history) {
|
|
12
|
+
if (typeof message.content !== "string") {
|
|
13
|
+
throw new Error("ChatYandexGPT does not support non-string message content.");
|
|
14
|
+
}
|
|
12
15
|
if ("content" in message) {
|
|
13
16
|
if (message._getType() === "human") {
|
|
14
17
|
chatHistory.push({ role: "user", text: message.content });
|
|
@@ -6,6 +6,9 @@ function _parseChatHistory(history) {
|
|
|
6
6
|
const chatHistory = [];
|
|
7
7
|
let instruction = "";
|
|
8
8
|
for (const message of history) {
|
|
9
|
+
if (typeof message.content !== "string") {
|
|
10
|
+
throw new Error("ChatYandexGPT does not support non-string message content.");
|
|
11
|
+
}
|
|
9
12
|
if ("content" in message) {
|
|
10
13
|
if (message._getType() === "human") {
|
|
11
14
|
chatHistory.push({ role: "user", text: message.content });
|
|
@@ -81,6 +81,10 @@ class AutoGPTPrompt extends chat_js_1.BaseChatPromptTemplate {
|
|
|
81
81
|
async formatMessages({ goals, memory, messages: previousMessages, user_input, }) {
|
|
82
82
|
const basePrompt = new index_js_1.SystemMessage(this.constructFullPrompt(goals));
|
|
83
83
|
const timePrompt = new index_js_1.SystemMessage(`The current time and date is ${new Date().toLocaleString()}`);
|
|
84
|
+
if (typeof basePrompt.content !== "string" ||
|
|
85
|
+
typeof timePrompt.content !== "string") {
|
|
86
|
+
throw new Error("Non-string message content is not supported.");
|
|
87
|
+
}
|
|
84
88
|
const usedTokens = (await this.tokenCounter(basePrompt.content)) +
|
|
85
89
|
(await this.tokenCounter(timePrompt.content));
|
|
86
90
|
const relevantDocs = await memory.getRelevantDocuments(JSON.stringify(previousMessages.slice(-10)));
|
|
@@ -92,9 +96,15 @@ class AutoGPTPrompt extends chat_js_1.BaseChatPromptTemplate {
|
|
|
92
96
|
}
|
|
93
97
|
const contentFormat = `This reminds you of these events from your past:\n${relevantMemory.join("\n")}\n\n`;
|
|
94
98
|
const memoryMessage = new index_js_1.SystemMessage(contentFormat);
|
|
99
|
+
if (typeof memoryMessage.content !== "string") {
|
|
100
|
+
throw new Error("Non-string message content is not supported.");
|
|
101
|
+
}
|
|
95
102
|
const usedTokensWithMemory = (await usedTokens) + (await this.tokenCounter(memoryMessage.content));
|
|
96
103
|
const historicalMessages = [];
|
|
97
104
|
for (const message of previousMessages.slice(-10).reverse()) {
|
|
105
|
+
if (typeof message.content !== "string") {
|
|
106
|
+
throw new Error("Non-string message content is not supported.");
|
|
107
|
+
}
|
|
98
108
|
const messageTokens = await this.tokenCounter(message.content);
|
|
99
109
|
if (usedTokensWithMemory + messageTokens > this.sendTokenLimit - 1000) {
|
|
100
110
|
break;
|
|
@@ -78,6 +78,10 @@ export class AutoGPTPrompt extends BaseChatPromptTemplate {
|
|
|
78
78
|
async formatMessages({ goals, memory, messages: previousMessages, user_input, }) {
|
|
79
79
|
const basePrompt = new SystemMessage(this.constructFullPrompt(goals));
|
|
80
80
|
const timePrompt = new SystemMessage(`The current time and date is ${new Date().toLocaleString()}`);
|
|
81
|
+
if (typeof basePrompt.content !== "string" ||
|
|
82
|
+
typeof timePrompt.content !== "string") {
|
|
83
|
+
throw new Error("Non-string message content is not supported.");
|
|
84
|
+
}
|
|
81
85
|
const usedTokens = (await this.tokenCounter(basePrompt.content)) +
|
|
82
86
|
(await this.tokenCounter(timePrompt.content));
|
|
83
87
|
const relevantDocs = await memory.getRelevantDocuments(JSON.stringify(previousMessages.slice(-10)));
|
|
@@ -89,9 +93,15 @@ export class AutoGPTPrompt extends BaseChatPromptTemplate {
|
|
|
89
93
|
}
|
|
90
94
|
const contentFormat = `This reminds you of these events from your past:\n${relevantMemory.join("\n")}\n\n`;
|
|
91
95
|
const memoryMessage = new SystemMessage(contentFormat);
|
|
96
|
+
if (typeof memoryMessage.content !== "string") {
|
|
97
|
+
throw new Error("Non-string message content is not supported.");
|
|
98
|
+
}
|
|
92
99
|
const usedTokensWithMemory = (await usedTokens) + (await this.tokenCounter(memoryMessage.content));
|
|
93
100
|
const historicalMessages = [];
|
|
94
101
|
for (const message of previousMessages.slice(-10).reverse()) {
|
|
102
|
+
if (typeof message.content !== "string") {
|
|
103
|
+
throw new Error("Non-string message content is not supported.");
|
|
104
|
+
}
|
|
95
105
|
const messageTokens = await this.tokenCounter(message.content);
|
|
96
106
|
if (usedTokensWithMemory + messageTokens > this.sendTokenLimit - 1000) {
|
|
97
107
|
break;
|
package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.cjs
CHANGED
|
@@ -247,6 +247,9 @@ class ViolationOfExpectationsChain extends base_js_1.BaseChain {
|
|
|
247
247
|
function_call: { name: types_js_1.PREDICTION_VIOLATIONS_FUNCTION.name },
|
|
248
248
|
});
|
|
249
249
|
const chain = violation_of_expectations_prompt_js_1.PREDICTION_VIOLATIONS_PROMPT.pipe(llmWithFunctions).pipe(this.jsonOutputParser);
|
|
250
|
+
if (typeof userResponse?.content !== "string") {
|
|
251
|
+
throw new Error("This chain does not support non-string model output.");
|
|
252
|
+
}
|
|
250
253
|
const res = (await chain.invoke({
|
|
251
254
|
predicted_output: userPredictions.predictedUserMessage,
|
|
252
255
|
actual_output: userResponse?.content ?? "",
|
|
@@ -299,6 +302,9 @@ class ViolationOfExpectationsChain extends base_js_1.BaseChain {
|
|
|
299
302
|
*/
|
|
300
303
|
async generateFacts({ userResponse, predictions, runManager, }) {
|
|
301
304
|
const chain = violation_of_expectations_prompt_js_1.GENERATE_FACTS_PROMPT.pipe(this.llm).pipe(this.stringOutputParser);
|
|
305
|
+
if (typeof userResponse?.content !== "string") {
|
|
306
|
+
throw new Error("This chain does not support non-string model output.");
|
|
307
|
+
}
|
|
302
308
|
const res = await chain.invoke({
|
|
303
309
|
prediction_violations: predictions.explainedPredictionErrors.join("\n"),
|
|
304
310
|
prediction: predictions.revisedPrediction,
|
package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.js
CHANGED
|
@@ -244,6 +244,9 @@ export class ViolationOfExpectationsChain extends BaseChain {
|
|
|
244
244
|
function_call: { name: PREDICTION_VIOLATIONS_FUNCTION.name },
|
|
245
245
|
});
|
|
246
246
|
const chain = PREDICTION_VIOLATIONS_PROMPT.pipe(llmWithFunctions).pipe(this.jsonOutputParser);
|
|
247
|
+
if (typeof userResponse?.content !== "string") {
|
|
248
|
+
throw new Error("This chain does not support non-string model output.");
|
|
249
|
+
}
|
|
247
250
|
const res = (await chain.invoke({
|
|
248
251
|
predicted_output: userPredictions.predictedUserMessage,
|
|
249
252
|
actual_output: userResponse?.content ?? "",
|
|
@@ -296,6 +299,9 @@ export class ViolationOfExpectationsChain extends BaseChain {
|
|
|
296
299
|
*/
|
|
297
300
|
async generateFacts({ userResponse, predictions, runManager, }) {
|
|
298
301
|
const chain = GENERATE_FACTS_PROMPT.pipe(this.llm).pipe(this.stringOutputParser);
|
|
302
|
+
if (typeof userResponse?.content !== "string") {
|
|
303
|
+
throw new Error("This chain does not support non-string model output.");
|
|
304
|
+
}
|
|
299
305
|
const res = await chain.invoke({
|
|
300
306
|
prediction_violations: predictions.explainedPredictionErrors.join("\n"),
|
|
301
307
|
prediction: predictions.revisedPrediction,
|
|
@@ -112,6 +112,9 @@ class AnthropicFunctions extends base_js_1.BaseChatModel {
|
|
|
112
112
|
}
|
|
113
113
|
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
114
114
|
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
115
|
+
if (typeof chatGenerationContent !== "string") {
|
|
116
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
117
|
+
}
|
|
115
118
|
if (forced) {
|
|
116
119
|
const parser = new fast_xml_parser_1.XMLParser();
|
|
117
120
|
const result = parser.parse(`${chatGenerationContent}</tool_input>`);
|
|
@@ -109,6 +109,9 @@ export class AnthropicFunctions extends BaseChatModel {
|
|
|
109
109
|
}
|
|
110
110
|
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
111
111
|
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
112
|
+
if (typeof chatGenerationContent !== "string") {
|
|
113
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
114
|
+
}
|
|
112
115
|
if (forced) {
|
|
113
116
|
const parser = new XMLParser();
|
|
114
117
|
const result = parser.parse(`${chatGenerationContent}</tool_input>`);
|
|
@@ -60,10 +60,15 @@ class NIBittensorChatModel extends base_js_1.BaseChatModel {
|
|
|
60
60
|
const res = await chat.call([message]);
|
|
61
61
|
*/
|
|
62
62
|
async _generate(messages) {
|
|
63
|
-
const processed_messages = messages.map((message) =>
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
63
|
+
const processed_messages = messages.map((message) => {
|
|
64
|
+
if (typeof message.content !== "string") {
|
|
65
|
+
throw new Error("NIBittensorChat does not support non-string output.");
|
|
66
|
+
}
|
|
67
|
+
return {
|
|
68
|
+
role: this.messageToOpenAIRole(message),
|
|
69
|
+
content: message.content,
|
|
70
|
+
};
|
|
71
|
+
});
|
|
67
72
|
const generations = [];
|
|
68
73
|
try {
|
|
69
74
|
// Retrieve API KEY
|
|
@@ -57,10 +57,15 @@ export class NIBittensorChatModel extends BaseChatModel {
|
|
|
57
57
|
const res = await chat.call([message]);
|
|
58
58
|
*/
|
|
59
59
|
async _generate(messages) {
|
|
60
|
-
const processed_messages = messages.map((message) =>
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
60
|
+
const processed_messages = messages.map((message) => {
|
|
61
|
+
if (typeof message.content !== "string") {
|
|
62
|
+
throw new Error("NIBittensorChat does not support non-string output.");
|
|
63
|
+
}
|
|
64
|
+
return {
|
|
65
|
+
role: this.messageToOpenAIRole(message),
|
|
66
|
+
content: message.content,
|
|
67
|
+
};
|
|
68
|
+
});
|
|
64
69
|
const generations = [];
|
|
65
70
|
try {
|
|
66
71
|
// Retrieve API KEY
|
package/dist/schema/index.cjs
CHANGED
|
@@ -35,6 +35,26 @@ class GenerationChunk {
|
|
|
35
35
|
}
|
|
36
36
|
}
|
|
37
37
|
exports.GenerationChunk = GenerationChunk;
|
|
38
|
+
function mergeContent(firstContent, secondContent) {
|
|
39
|
+
// If first content is a string
|
|
40
|
+
if (typeof firstContent === "string") {
|
|
41
|
+
if (typeof secondContent === "string") {
|
|
42
|
+
return firstContent + secondContent;
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
return [{ type: "text", text: firstContent }, ...secondContent];
|
|
46
|
+
}
|
|
47
|
+
// If both are arrays
|
|
48
|
+
}
|
|
49
|
+
else if (Array.isArray(secondContent)) {
|
|
50
|
+
return [...firstContent, ...secondContent];
|
|
51
|
+
// If the first content is a list and second is a string
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
// Otherwise, add the second content as a new element of the list
|
|
55
|
+
return [...firstContent, { type: "text", text: secondContent }];
|
|
56
|
+
}
|
|
57
|
+
}
|
|
38
58
|
/**
|
|
39
59
|
* Base class for all types of messages in a conversation. It includes
|
|
40
60
|
* properties like `content`, `name`, and `additional_kwargs`. It also
|
|
@@ -46,7 +66,7 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
46
66
|
* Use {@link BaseMessage.content} instead.
|
|
47
67
|
*/
|
|
48
68
|
get text() {
|
|
49
|
-
return this.content;
|
|
69
|
+
return typeof this.content === "string" ? this.content : "";
|
|
50
70
|
}
|
|
51
71
|
constructor(fields,
|
|
52
72
|
/** @deprecated */
|
|
@@ -73,7 +93,7 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
73
93
|
writable: true,
|
|
74
94
|
value: true
|
|
75
95
|
});
|
|
76
|
-
/** The
|
|
96
|
+
/** The content of the message. */
|
|
77
97
|
Object.defineProperty(this, "content", {
|
|
78
98
|
enumerable: true,
|
|
79
99
|
configurable: true,
|
|
@@ -191,7 +211,7 @@ class HumanMessageChunk extends BaseMessageChunk {
|
|
|
191
211
|
}
|
|
192
212
|
concat(chunk) {
|
|
193
213
|
return new HumanMessageChunk({
|
|
194
|
-
content: this.content
|
|
214
|
+
content: mergeContent(this.content, chunk.content),
|
|
195
215
|
additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
196
216
|
});
|
|
197
217
|
}
|
|
@@ -222,7 +242,7 @@ class AIMessageChunk extends BaseMessageChunk {
|
|
|
222
242
|
}
|
|
223
243
|
concat(chunk) {
|
|
224
244
|
return new AIMessageChunk({
|
|
225
|
-
content: this.content
|
|
245
|
+
content: mergeContent(this.content, chunk.content),
|
|
226
246
|
additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
227
247
|
});
|
|
228
248
|
}
|
|
@@ -253,7 +273,7 @@ class SystemMessageChunk extends BaseMessageChunk {
|
|
|
253
273
|
}
|
|
254
274
|
concat(chunk) {
|
|
255
275
|
return new SystemMessageChunk({
|
|
256
|
-
content: this.content
|
|
276
|
+
content: mergeContent(this.content, chunk.content),
|
|
257
277
|
additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
258
278
|
});
|
|
259
279
|
}
|
|
@@ -313,7 +333,7 @@ class FunctionMessageChunk extends BaseMessageChunk {
|
|
|
313
333
|
}
|
|
314
334
|
concat(chunk) {
|
|
315
335
|
return new FunctionMessageChunk({
|
|
316
|
-
content: this.content
|
|
336
|
+
content: mergeContent(this.content, chunk.content),
|
|
317
337
|
additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
318
338
|
name: this.name ?? "",
|
|
319
339
|
});
|
|
@@ -407,7 +427,7 @@ class ChatMessageChunk extends BaseMessageChunk {
|
|
|
407
427
|
}
|
|
408
428
|
concat(chunk) {
|
|
409
429
|
return new ChatMessageChunk({
|
|
410
|
-
content: this.content
|
|
430
|
+
content: mergeContent(this.content, chunk.content),
|
|
411
431
|
additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
412
432
|
role: this.role,
|
|
413
433
|
});
|
package/dist/schema/index.d.ts
CHANGED
|
@@ -64,8 +64,15 @@ export interface StoredGeneration {
|
|
|
64
64
|
message?: StoredMessage;
|
|
65
65
|
}
|
|
66
66
|
export type MessageType = "human" | "ai" | "generic" | "system" | "function";
|
|
67
|
+
export type MessageContent = string | {
|
|
68
|
+
type: "text" | "image_url";
|
|
69
|
+
text?: string;
|
|
70
|
+
image_url?: string | {
|
|
71
|
+
url: string;
|
|
72
|
+
};
|
|
73
|
+
}[];
|
|
67
74
|
export interface BaseMessageFields {
|
|
68
|
-
content:
|
|
75
|
+
content: MessageContent;
|
|
69
76
|
name?: string;
|
|
70
77
|
additional_kwargs?: {
|
|
71
78
|
function_call?: OpenAIClient.Chat.ChatCompletionMessage.FunctionCall;
|
|
@@ -91,8 +98,8 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
|
|
|
91
98
|
* Use {@link BaseMessage.content} instead.
|
|
92
99
|
*/
|
|
93
100
|
get text(): string;
|
|
94
|
-
/** The
|
|
95
|
-
content:
|
|
101
|
+
/** The content of the message. */
|
|
102
|
+
content: MessageContent;
|
|
96
103
|
/** The name of the message sender in a multi-user chat. */
|
|
97
104
|
name?: string;
|
|
98
105
|
/** Additional keyword arguments */
|
package/dist/schema/index.js
CHANGED
|
@@ -31,6 +31,26 @@ export class GenerationChunk {
|
|
|
31
31
|
});
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
|
+
function mergeContent(firstContent, secondContent) {
|
|
35
|
+
// If first content is a string
|
|
36
|
+
if (typeof firstContent === "string") {
|
|
37
|
+
if (typeof secondContent === "string") {
|
|
38
|
+
return firstContent + secondContent;
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
return [{ type: "text", text: firstContent }, ...secondContent];
|
|
42
|
+
}
|
|
43
|
+
// If both are arrays
|
|
44
|
+
}
|
|
45
|
+
else if (Array.isArray(secondContent)) {
|
|
46
|
+
return [...firstContent, ...secondContent];
|
|
47
|
+
// If the first content is a list and second is a string
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
// Otherwise, add the second content as a new element of the list
|
|
51
|
+
return [...firstContent, { type: "text", text: secondContent }];
|
|
52
|
+
}
|
|
53
|
+
}
|
|
34
54
|
/**
|
|
35
55
|
* Base class for all types of messages in a conversation. It includes
|
|
36
56
|
* properties like `content`, `name`, and `additional_kwargs`. It also
|
|
@@ -42,7 +62,7 @@ export class BaseMessage extends Serializable {
|
|
|
42
62
|
* Use {@link BaseMessage.content} instead.
|
|
43
63
|
*/
|
|
44
64
|
get text() {
|
|
45
|
-
return this.content;
|
|
65
|
+
return typeof this.content === "string" ? this.content : "";
|
|
46
66
|
}
|
|
47
67
|
constructor(fields,
|
|
48
68
|
/** @deprecated */
|
|
@@ -69,7 +89,7 @@ export class BaseMessage extends Serializable {
|
|
|
69
89
|
writable: true,
|
|
70
90
|
value: true
|
|
71
91
|
});
|
|
72
|
-
/** The
|
|
92
|
+
/** The content of the message. */
|
|
73
93
|
Object.defineProperty(this, "content", {
|
|
74
94
|
enumerable: true,
|
|
75
95
|
configurable: true,
|
|
@@ -184,7 +204,7 @@ export class HumanMessageChunk extends BaseMessageChunk {
|
|
|
184
204
|
}
|
|
185
205
|
concat(chunk) {
|
|
186
206
|
return new HumanMessageChunk({
|
|
187
|
-
content: this.content
|
|
207
|
+
content: mergeContent(this.content, chunk.content),
|
|
188
208
|
additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
189
209
|
});
|
|
190
210
|
}
|
|
@@ -213,7 +233,7 @@ export class AIMessageChunk extends BaseMessageChunk {
|
|
|
213
233
|
}
|
|
214
234
|
concat(chunk) {
|
|
215
235
|
return new AIMessageChunk({
|
|
216
|
-
content: this.content
|
|
236
|
+
content: mergeContent(this.content, chunk.content),
|
|
217
237
|
additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
218
238
|
});
|
|
219
239
|
}
|
|
@@ -242,7 +262,7 @@ export class SystemMessageChunk extends BaseMessageChunk {
|
|
|
242
262
|
}
|
|
243
263
|
concat(chunk) {
|
|
244
264
|
return new SystemMessageChunk({
|
|
245
|
-
content: this.content
|
|
265
|
+
content: mergeContent(this.content, chunk.content),
|
|
246
266
|
additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
247
267
|
});
|
|
248
268
|
}
|
|
@@ -300,7 +320,7 @@ export class FunctionMessageChunk extends BaseMessageChunk {
|
|
|
300
320
|
}
|
|
301
321
|
concat(chunk) {
|
|
302
322
|
return new FunctionMessageChunk({
|
|
303
|
-
content: this.content
|
|
323
|
+
content: mergeContent(this.content, chunk.content),
|
|
304
324
|
additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
305
325
|
name: this.name ?? "",
|
|
306
326
|
});
|
|
@@ -389,7 +409,7 @@ export class ChatMessageChunk extends BaseMessageChunk {
|
|
|
389
409
|
}
|
|
390
410
|
concat(chunk) {
|
|
391
411
|
return new ChatMessageChunk({
|
|
392
|
-
content: this.content
|
|
412
|
+
content: mergeContent(this.content, chunk.content),
|
|
393
413
|
additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
394
414
|
role: this.role,
|
|
395
415
|
});
|
|
@@ -36,7 +36,14 @@ class BaseLLMOutputParser extends index_js_2.Runnable {
|
|
|
36
36
|
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
37
37
|
}
|
|
38
38
|
else {
|
|
39
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
39
|
+
return this._callWithConfig(async (input) => this.parseResult([
|
|
40
|
+
{
|
|
41
|
+
message: input,
|
|
42
|
+
text: typeof input.content === "string"
|
|
43
|
+
? input.content
|
|
44
|
+
: JSON.stringify(input.content),
|
|
45
|
+
},
|
|
46
|
+
]), input, { ...options, runType: "parser" });
|
|
40
47
|
}
|
|
41
48
|
}
|
|
42
49
|
}
|
|
@@ -69,7 +76,14 @@ class BaseTransformOutputParser extends BaseOutputParser {
|
|
|
69
76
|
yield this.parseResult([{ text: chunk }]);
|
|
70
77
|
}
|
|
71
78
|
else {
|
|
72
|
-
yield this.parseResult([
|
|
79
|
+
yield this.parseResult([
|
|
80
|
+
{
|
|
81
|
+
message: chunk,
|
|
82
|
+
text: typeof chunk.content === "string"
|
|
83
|
+
? chunk.content
|
|
84
|
+
: JSON.stringify(chunk.content),
|
|
85
|
+
},
|
|
86
|
+
]);
|
|
73
87
|
}
|
|
74
88
|
}
|
|
75
89
|
}
|
|
@@ -108,14 +122,23 @@ class BaseCumulativeTransformOutputParser extends BaseTransformOutputParser {
|
|
|
108
122
|
let prevParsed;
|
|
109
123
|
let accGen;
|
|
110
124
|
for await (const chunk of inputGenerator) {
|
|
125
|
+
if (typeof chunk !== "string" && typeof chunk.content !== "string") {
|
|
126
|
+
throw new Error("Cannot handle non-string output.");
|
|
127
|
+
}
|
|
111
128
|
let chunkGen;
|
|
112
129
|
if ((0, index_js_1.isBaseMessageChunk)(chunk)) {
|
|
130
|
+
if (typeof chunk.content !== "string") {
|
|
131
|
+
throw new Error("Cannot handle non-string message output.");
|
|
132
|
+
}
|
|
113
133
|
chunkGen = new index_js_1.ChatGenerationChunk({
|
|
114
134
|
message: chunk,
|
|
115
135
|
text: chunk.content,
|
|
116
136
|
});
|
|
117
137
|
}
|
|
118
138
|
else if ((0, index_js_1.isBaseMessage)(chunk)) {
|
|
139
|
+
if (typeof chunk.content !== "string") {
|
|
140
|
+
throw new Error("Cannot handle non-string message output.");
|
|
141
|
+
}
|
|
119
142
|
chunkGen = new index_js_1.ChatGenerationChunk({
|
|
120
143
|
message: chunk.toChunk(),
|
|
121
144
|
text: chunk.content,
|
|
@@ -33,7 +33,14 @@ export class BaseLLMOutputParser extends Runnable {
|
|
|
33
33
|
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
34
34
|
}
|
|
35
35
|
else {
|
|
36
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
36
|
+
return this._callWithConfig(async (input) => this.parseResult([
|
|
37
|
+
{
|
|
38
|
+
message: input,
|
|
39
|
+
text: typeof input.content === "string"
|
|
40
|
+
? input.content
|
|
41
|
+
: JSON.stringify(input.content),
|
|
42
|
+
},
|
|
43
|
+
]), input, { ...options, runType: "parser" });
|
|
37
44
|
}
|
|
38
45
|
}
|
|
39
46
|
}
|
|
@@ -64,7 +71,14 @@ export class BaseTransformOutputParser extends BaseOutputParser {
|
|
|
64
71
|
yield this.parseResult([{ text: chunk }]);
|
|
65
72
|
}
|
|
66
73
|
else {
|
|
67
|
-
yield this.parseResult([
|
|
74
|
+
yield this.parseResult([
|
|
75
|
+
{
|
|
76
|
+
message: chunk,
|
|
77
|
+
text: typeof chunk.content === "string"
|
|
78
|
+
? chunk.content
|
|
79
|
+
: JSON.stringify(chunk.content),
|
|
80
|
+
},
|
|
81
|
+
]);
|
|
68
82
|
}
|
|
69
83
|
}
|
|
70
84
|
}
|
|
@@ -102,14 +116,23 @@ export class BaseCumulativeTransformOutputParser extends BaseTransformOutputPars
|
|
|
102
116
|
let prevParsed;
|
|
103
117
|
let accGen;
|
|
104
118
|
for await (const chunk of inputGenerator) {
|
|
119
|
+
if (typeof chunk !== "string" && typeof chunk.content !== "string") {
|
|
120
|
+
throw new Error("Cannot handle non-string output.");
|
|
121
|
+
}
|
|
105
122
|
let chunkGen;
|
|
106
123
|
if (isBaseMessageChunk(chunk)) {
|
|
124
|
+
if (typeof chunk.content !== "string") {
|
|
125
|
+
throw new Error("Cannot handle non-string message output.");
|
|
126
|
+
}
|
|
107
127
|
chunkGen = new ChatGenerationChunk({
|
|
108
128
|
message: chunk,
|
|
109
129
|
text: chunk.content,
|
|
110
130
|
});
|
|
111
131
|
}
|
|
112
132
|
else if (isBaseMessage(chunk)) {
|
|
133
|
+
if (typeof chunk.content !== "string") {
|
|
134
|
+
throw new Error("Cannot handle non-string message output.");
|
|
135
|
+
}
|
|
113
136
|
chunkGen = new ChatGenerationChunk({
|
|
114
137
|
message: chunk.toChunk(),
|
|
115
138
|
text: chunk.content,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langchain",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.182-rc.1",
|
|
4
4
|
"description": "Typescript bindings for langchain",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -1360,7 +1360,7 @@
|
|
|
1360
1360
|
"langchainhub": "~0.0.6",
|
|
1361
1361
|
"langsmith": "~0.0.48",
|
|
1362
1362
|
"ml-distance": "^4.0.0",
|
|
1363
|
-
"openai": "~4.
|
|
1363
|
+
"openai": "~4.16.0",
|
|
1364
1364
|
"openapi-types": "^12.1.3",
|
|
1365
1365
|
"p-queue": "^6.6.2",
|
|
1366
1366
|
"p-retry": "4",
|