langchain 0.0.181 → 0.0.182-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/openai/output_parser.cjs +3 -0
- package/dist/agents/openai/output_parser.js +3 -0
- package/dist/base_language/index.cjs +7 -3
- package/dist/base_language/index.d.ts +3 -3
- package/dist/base_language/index.js +7 -3
- package/dist/chat_models/base.cjs +9 -1
- package/dist/chat_models/base.js +9 -1
- package/dist/chat_models/bedrock/web.cjs +5 -1
- package/dist/chat_models/bedrock/web.js +5 -1
- package/dist/chat_models/cloudflare_workersai.cjs +8 -1
- package/dist/chat_models/cloudflare_workersai.js +8 -1
- package/dist/chat_models/googlepalm.cjs +16 -7
- package/dist/chat_models/googlepalm.js +16 -7
- package/dist/chat_models/googlevertexai/common.cjs +6 -0
- package/dist/chat_models/googlevertexai/common.js +6 -0
- package/dist/chat_models/iflytek_xinghuo/common.cjs +9 -4
- package/dist/chat_models/iflytek_xinghuo/common.js +9 -4
- package/dist/chat_models/llama_cpp.cjs +23 -4
- package/dist/chat_models/llama_cpp.js +23 -4
- package/dist/chat_models/minimax.cjs +6 -0
- package/dist/chat_models/minimax.js +6 -0
- package/dist/chat_models/openai.cjs +2 -5
- package/dist/chat_models/openai.js +3 -6
- package/dist/chat_models/portkey.cjs +18 -8
- package/dist/chat_models/portkey.js +18 -8
- package/dist/chat_models/yandex.cjs +3 -0
- package/dist/chat_models/yandex.js +3 -0
- package/dist/experimental/autogpt/prompt.cjs +10 -0
- package/dist/experimental/autogpt/prompt.js +10 -0
- package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.cjs +6 -0
- package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.js +6 -0
- package/dist/experimental/chat_models/anthropic_functions.cjs +3 -0
- package/dist/experimental/chat_models/anthropic_functions.js +3 -0
- package/dist/experimental/chat_models/bittensor.cjs +9 -4
- package/dist/experimental/chat_models/bittensor.js +9 -4
- package/dist/schema/index.cjs +27 -7
- package/dist/schema/index.d.ts +10 -3
- package/dist/schema/index.js +27 -7
- package/dist/schema/output_parser.cjs +25 -2
- package/dist/schema/output_parser.js +25 -2
- package/package.json +3 -2
|
@@ -33,6 +33,9 @@ class OpenAIFunctionsAgentOutputParser extends types_js_1.AgentActionOutputParse
|
|
|
33
33
|
* @returns A FunctionsAgentAction or AgentFinish object.
|
|
34
34
|
*/
|
|
35
35
|
parseAIMessage(message) {
|
|
36
|
+
if (message.content && typeof message.content !== "string") {
|
|
37
|
+
throw new Error("This agent cannot parse non-string model responses.");
|
|
38
|
+
}
|
|
36
39
|
if (message.additional_kwargs.function_call) {
|
|
37
40
|
// eslint-disable-next-line prefer-destructuring
|
|
38
41
|
const function_call = message.additional_kwargs.function_call;
|
|
@@ -30,6 +30,9 @@ export class OpenAIFunctionsAgentOutputParser extends AgentActionOutputParser {
|
|
|
30
30
|
* @returns A FunctionsAgentAction or AgentFinish object.
|
|
31
31
|
*/
|
|
32
32
|
parseAIMessage(message) {
|
|
33
|
+
if (message.content && typeof message.content !== "string") {
|
|
34
|
+
throw new Error("This agent cannot parse non-string model responses.");
|
|
35
|
+
}
|
|
33
36
|
if (message.additional_kwargs.function_call) {
|
|
34
37
|
// eslint-disable-next-line prefer-destructuring
|
|
35
38
|
const function_call = message.additional_kwargs.function_call;
|
|
@@ -104,9 +104,13 @@ class BaseLanguageModel extends BaseLangChain {
|
|
|
104
104
|
}
|
|
105
105
|
this.caller = new async_caller_js_1.AsyncCaller(params ?? {});
|
|
106
106
|
}
|
|
107
|
-
async getNumTokens(
|
|
107
|
+
async getNumTokens(content) {
|
|
108
|
+
// TODO: Figure out correct value.
|
|
109
|
+
if (typeof content !== "string") {
|
|
110
|
+
return 0;
|
|
111
|
+
}
|
|
108
112
|
// fallback to approximate calculation if tiktoken is not available
|
|
109
|
-
let numTokens = Math.ceil(
|
|
113
|
+
let numTokens = Math.ceil(content.length / 4);
|
|
110
114
|
if (!this._encoding) {
|
|
111
115
|
try {
|
|
112
116
|
this._encoding = await (0, tiktoken_js_1.encodingForModel)("modelName" in this
|
|
@@ -118,7 +122,7 @@ class BaseLanguageModel extends BaseLangChain {
|
|
|
118
122
|
}
|
|
119
123
|
}
|
|
120
124
|
if (this._encoding) {
|
|
121
|
-
numTokens = this._encoding.encode(
|
|
125
|
+
numTokens = this._encoding.encode(content).length;
|
|
122
126
|
}
|
|
123
127
|
return numTokens;
|
|
124
128
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { OpenAI as OpenAIClient } from "openai";
|
|
2
|
-
import { BaseCache, BaseMessage, BaseMessageLike, BasePromptValue, LLMResult } from "../schema/index.js";
|
|
2
|
+
import { BaseCache, BaseMessage, BaseMessageLike, BasePromptValue, LLMResult, MessageContent } from "../schema/index.js";
|
|
3
3
|
import { BaseCallbackConfig, CallbackManager, Callbacks } from "../callbacks/manager.js";
|
|
4
4
|
import { AsyncCaller, AsyncCallerParams } from "../util/async_caller.js";
|
|
5
5
|
import { Runnable } from "../schema/runnable/index.js";
|
|
@@ -60,7 +60,7 @@ export interface BaseLanguageModelCallOptions extends BaseCallbackConfig {
|
|
|
60
60
|
signal?: AbortSignal;
|
|
61
61
|
}
|
|
62
62
|
export interface BaseFunctionCallOptions extends BaseLanguageModelCallOptions {
|
|
63
|
-
function_call?: OpenAIClient.Chat.
|
|
63
|
+
function_call?: OpenAIClient.Chat.ChatCompletionFunctionCallOption;
|
|
64
64
|
functions?: OpenAIClient.Chat.ChatCompletionCreateParams.Function[];
|
|
65
65
|
}
|
|
66
66
|
export type BaseLanguageModelInput = BasePromptValue | string | BaseMessageLike[];
|
|
@@ -86,7 +86,7 @@ export declare abstract class BaseLanguageModel<RunOutput = any, CallOptions ext
|
|
|
86
86
|
abstract _modelType(): string;
|
|
87
87
|
abstract _llmType(): string;
|
|
88
88
|
private _encoding?;
|
|
89
|
-
getNumTokens(
|
|
89
|
+
getNumTokens(content: MessageContent): Promise<number>;
|
|
90
90
|
protected static _convertInputToPromptValue(input: BaseLanguageModelInput): BasePromptValue;
|
|
91
91
|
/**
|
|
92
92
|
* Get the identifying parameters of the LLM.
|
|
@@ -100,9 +100,13 @@ export class BaseLanguageModel extends BaseLangChain {
|
|
|
100
100
|
}
|
|
101
101
|
this.caller = new AsyncCaller(params ?? {});
|
|
102
102
|
}
|
|
103
|
-
async getNumTokens(
|
|
103
|
+
async getNumTokens(content) {
|
|
104
|
+
// TODO: Figure out correct value.
|
|
105
|
+
if (typeof content !== "string") {
|
|
106
|
+
return 0;
|
|
107
|
+
}
|
|
104
108
|
// fallback to approximate calculation if tiktoken is not available
|
|
105
|
-
let numTokens = Math.ceil(
|
|
109
|
+
let numTokens = Math.ceil(content.length / 4);
|
|
106
110
|
if (!this._encoding) {
|
|
107
111
|
try {
|
|
108
112
|
this._encoding = await encodingForModel("modelName" in this
|
|
@@ -114,7 +118,7 @@ export class BaseLanguageModel extends BaseLangChain {
|
|
|
114
118
|
}
|
|
115
119
|
}
|
|
116
120
|
if (this._encoding) {
|
|
117
|
-
numTokens = this._encoding.encode(
|
|
121
|
+
numTokens = this._encoding.encode(content).length;
|
|
118
122
|
}
|
|
119
123
|
return numTokens;
|
|
120
124
|
}
|
|
@@ -13,7 +13,9 @@ function createChatMessageChunkEncoderStream() {
|
|
|
13
13
|
const textEncoder = new TextEncoder();
|
|
14
14
|
return new TransformStream({
|
|
15
15
|
transform(chunk, controller) {
|
|
16
|
-
controller.enqueue(textEncoder.encode(chunk.content
|
|
16
|
+
controller.enqueue(textEncoder.encode(typeof chunk.content === "string"
|
|
17
|
+
? chunk.content
|
|
18
|
+
: JSON.stringify(chunk.content)));
|
|
17
19
|
},
|
|
18
20
|
});
|
|
19
21
|
}
|
|
@@ -263,6 +265,9 @@ class BaseChatModel extends index_js_2.BaseLanguageModel {
|
|
|
263
265
|
async predict(text, options, callbacks) {
|
|
264
266
|
const message = new index_js_1.HumanMessage(text);
|
|
265
267
|
const result = await this.call([message], options, callbacks);
|
|
268
|
+
if (typeof result.content !== "string") {
|
|
269
|
+
throw new Error("Cannot use predict when output is not a string.");
|
|
270
|
+
}
|
|
266
271
|
return result.content;
|
|
267
272
|
}
|
|
268
273
|
}
|
|
@@ -275,6 +280,9 @@ class SimpleChatModel extends BaseChatModel {
|
|
|
275
280
|
async _generate(messages, options, runManager) {
|
|
276
281
|
const text = await this._call(messages, options, runManager);
|
|
277
282
|
const message = new index_js_1.AIMessage(text);
|
|
283
|
+
if (typeof message.content !== "string") {
|
|
284
|
+
throw new Error("Cannot generate with a simple chat model when output is not a string.");
|
|
285
|
+
}
|
|
278
286
|
return {
|
|
279
287
|
generations: [
|
|
280
288
|
{
|
package/dist/chat_models/base.js
CHANGED
|
@@ -10,7 +10,9 @@ export function createChatMessageChunkEncoderStream() {
|
|
|
10
10
|
const textEncoder = new TextEncoder();
|
|
11
11
|
return new TransformStream({
|
|
12
12
|
transform(chunk, controller) {
|
|
13
|
-
controller.enqueue(textEncoder.encode(chunk.content
|
|
13
|
+
controller.enqueue(textEncoder.encode(typeof chunk.content === "string"
|
|
14
|
+
? chunk.content
|
|
15
|
+
: JSON.stringify(chunk.content)));
|
|
14
16
|
},
|
|
15
17
|
});
|
|
16
18
|
}
|
|
@@ -259,6 +261,9 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
259
261
|
async predict(text, options, callbacks) {
|
|
260
262
|
const message = new HumanMessage(text);
|
|
261
263
|
const result = await this.call([message], options, callbacks);
|
|
264
|
+
if (typeof result.content !== "string") {
|
|
265
|
+
throw new Error("Cannot use predict when output is not a string.");
|
|
266
|
+
}
|
|
262
267
|
return result.content;
|
|
263
268
|
}
|
|
264
269
|
}
|
|
@@ -270,6 +275,9 @@ export class SimpleChatModel extends BaseChatModel {
|
|
|
270
275
|
async _generate(messages, options, runManager) {
|
|
271
276
|
const text = await this._call(messages, options, runManager);
|
|
272
277
|
const message = new AIMessage(text);
|
|
278
|
+
if (typeof message.content !== "string") {
|
|
279
|
+
throw new Error("Cannot generate with a simple chat model when output is not a string.");
|
|
280
|
+
}
|
|
273
281
|
return {
|
|
274
282
|
generations: [
|
|
275
283
|
{
|
|
@@ -206,7 +206,11 @@ class BedrockChat extends base_js_1.SimpleChatModel {
|
|
|
206
206
|
finalResult = finalResult.concat(chunk);
|
|
207
207
|
}
|
|
208
208
|
}
|
|
209
|
-
|
|
209
|
+
const messageContent = finalResult?.message.content;
|
|
210
|
+
if (messageContent && typeof messageContent !== "string") {
|
|
211
|
+
throw new Error("Non-string output for ChatBedrock is currently not supported.");
|
|
212
|
+
}
|
|
213
|
+
return messageContent ?? "";
|
|
210
214
|
}
|
|
211
215
|
const response = await this._signedFetch(messages, options, {
|
|
212
216
|
bedrockMethod: "invoke",
|
|
@@ -201,7 +201,11 @@ export class BedrockChat extends SimpleChatModel {
|
|
|
201
201
|
finalResult = finalResult.concat(chunk);
|
|
202
202
|
}
|
|
203
203
|
}
|
|
204
|
-
|
|
204
|
+
const messageContent = finalResult?.message.content;
|
|
205
|
+
if (messageContent && typeof messageContent !== "string") {
|
|
206
|
+
throw new Error("Non-string output for ChatBedrock is currently not supported.");
|
|
207
|
+
}
|
|
208
|
+
return messageContent ?? "";
|
|
205
209
|
}
|
|
206
210
|
const response = await this._signedFetch(messages, options, {
|
|
207
211
|
bedrockMethod: "invoke",
|
|
@@ -159,6 +159,9 @@ class ChatCloudflareWorkersAI extends base_js_1.SimpleChatModel {
|
|
|
159
159
|
console.warn(`Unsupported message type passed to Cloudflare: "${message._getType()}"`);
|
|
160
160
|
role = "user";
|
|
161
161
|
}
|
|
162
|
+
if (typeof message.content !== "string") {
|
|
163
|
+
throw new Error("ChatCloudflareWorkersAI currently does not support non-string message content.");
|
|
164
|
+
}
|
|
162
165
|
return {
|
|
163
166
|
role,
|
|
164
167
|
content: message.content,
|
|
@@ -184,7 +187,11 @@ class ChatCloudflareWorkersAI extends base_js_1.SimpleChatModel {
|
|
|
184
187
|
finalResult = finalResult.concat(chunk);
|
|
185
188
|
}
|
|
186
189
|
}
|
|
187
|
-
|
|
190
|
+
const messageContent = finalResult?.message.content;
|
|
191
|
+
if (messageContent && typeof messageContent !== "string") {
|
|
192
|
+
throw new Error("Non-string output for ChatCloudflareWorkersAI is currently not supported.");
|
|
193
|
+
}
|
|
194
|
+
return messageContent ?? "";
|
|
188
195
|
}
|
|
189
196
|
}
|
|
190
197
|
}
|
|
@@ -156,6 +156,9 @@ export class ChatCloudflareWorkersAI extends SimpleChatModel {
|
|
|
156
156
|
console.warn(`Unsupported message type passed to Cloudflare: "${message._getType()}"`);
|
|
157
157
|
role = "user";
|
|
158
158
|
}
|
|
159
|
+
if (typeof message.content !== "string") {
|
|
160
|
+
throw new Error("ChatCloudflareWorkersAI currently does not support non-string message content.");
|
|
161
|
+
}
|
|
159
162
|
return {
|
|
160
163
|
role,
|
|
161
164
|
content: message.content,
|
|
@@ -181,7 +184,11 @@ export class ChatCloudflareWorkersAI extends SimpleChatModel {
|
|
|
181
184
|
finalResult = finalResult.concat(chunk);
|
|
182
185
|
}
|
|
183
186
|
}
|
|
184
|
-
|
|
187
|
+
const messageContent = finalResult?.message.content;
|
|
188
|
+
if (messageContent && typeof messageContent !== "string") {
|
|
189
|
+
throw new Error("Non-string output for ChatCloudflareWorkersAI is currently not supported.");
|
|
190
|
+
}
|
|
191
|
+
return messageContent ?? "";
|
|
185
192
|
}
|
|
186
193
|
}
|
|
187
194
|
}
|
|
@@ -132,6 +132,10 @@ class ChatGooglePaLM extends base_js_1.BaseChatModel {
|
|
|
132
132
|
const systemMessage = messages.length > 0 && getMessageAuthor(messages[0]) === "system"
|
|
133
133
|
? messages[0]
|
|
134
134
|
: undefined;
|
|
135
|
+
if (systemMessage?.content !== undefined &&
|
|
136
|
+
typeof systemMessage.content !== "string") {
|
|
137
|
+
throw new Error("Non-string system message content is not supported.");
|
|
138
|
+
}
|
|
135
139
|
return systemMessage?.content;
|
|
136
140
|
}
|
|
137
141
|
_mapBaseMessagesToPalmMessages(messages) {
|
|
@@ -145,13 +149,18 @@ class ChatGooglePaLM extends base_js_1.BaseChatModel {
|
|
|
145
149
|
throw new Error(`Google PaLM requires alternate messages between authors`);
|
|
146
150
|
}
|
|
147
151
|
});
|
|
148
|
-
return nonSystemMessages.map((m) =>
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
152
|
+
return nonSystemMessages.map((m) => {
|
|
153
|
+
if (typeof m.content !== "string") {
|
|
154
|
+
throw new Error("ChatGooglePaLM does not support non-string message content.");
|
|
155
|
+
}
|
|
156
|
+
return {
|
|
157
|
+
author: getMessageAuthor(m),
|
|
158
|
+
content: m.content,
|
|
159
|
+
citationMetadata: {
|
|
160
|
+
citationSources: m.additional_kwargs.citationSources,
|
|
161
|
+
},
|
|
162
|
+
};
|
|
163
|
+
});
|
|
155
164
|
}
|
|
156
165
|
_mapPalmMessagesToChatResult(msgRes) {
|
|
157
166
|
if (msgRes.candidates &&
|
|
@@ -129,6 +129,10 @@ export class ChatGooglePaLM extends BaseChatModel {
|
|
|
129
129
|
const systemMessage = messages.length > 0 && getMessageAuthor(messages[0]) === "system"
|
|
130
130
|
? messages[0]
|
|
131
131
|
: undefined;
|
|
132
|
+
if (systemMessage?.content !== undefined &&
|
|
133
|
+
typeof systemMessage.content !== "string") {
|
|
134
|
+
throw new Error("Non-string system message content is not supported.");
|
|
135
|
+
}
|
|
132
136
|
return systemMessage?.content;
|
|
133
137
|
}
|
|
134
138
|
_mapBaseMessagesToPalmMessages(messages) {
|
|
@@ -142,13 +146,18 @@ export class ChatGooglePaLM extends BaseChatModel {
|
|
|
142
146
|
throw new Error(`Google PaLM requires alternate messages between authors`);
|
|
143
147
|
}
|
|
144
148
|
});
|
|
145
|
-
return nonSystemMessages.map((m) =>
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
149
|
+
return nonSystemMessages.map((m) => {
|
|
150
|
+
if (typeof m.content !== "string") {
|
|
151
|
+
throw new Error("ChatGooglePaLM does not support non-string message content.");
|
|
152
|
+
}
|
|
153
|
+
return {
|
|
154
|
+
author: getMessageAuthor(m),
|
|
155
|
+
content: m.content,
|
|
156
|
+
citationMetadata: {
|
|
157
|
+
citationSources: m.additional_kwargs.citationSources,
|
|
158
|
+
},
|
|
159
|
+
};
|
|
160
|
+
});
|
|
152
161
|
}
|
|
153
162
|
_mapPalmMessagesToChatResult(msgRes) {
|
|
154
163
|
if (msgRes.candidates &&
|
|
@@ -76,6 +76,9 @@ class GoogleVertexAIChatMessage {
|
|
|
76
76
|
* @returns A new Google Vertex AI chat message.
|
|
77
77
|
*/
|
|
78
78
|
static fromChatMessage(message, model) {
|
|
79
|
+
if (typeof message.content !== "string") {
|
|
80
|
+
throw new Error("ChatGoogleVertexAI does not support non-string message content.");
|
|
81
|
+
}
|
|
79
82
|
return new GoogleVertexAIChatMessage({
|
|
80
83
|
author: GoogleVertexAIChatMessage.mapMessageTypeToVertexChatAuthor(message, model),
|
|
81
84
|
content: message.content,
|
|
@@ -204,6 +207,9 @@ class BaseChatGoogleVertexAI extends base_js_1.BaseChatModel {
|
|
|
204
207
|
let context = "";
|
|
205
208
|
let conversationMessages = messages;
|
|
206
209
|
if (messages[0]?._getType() === "system") {
|
|
210
|
+
if (typeof messages[0].content !== "string") {
|
|
211
|
+
throw new Error("ChatGoogleVertexAI does not support non-string message content.");
|
|
212
|
+
}
|
|
207
213
|
context = messages[0].content;
|
|
208
214
|
conversationMessages = messages.slice(1);
|
|
209
215
|
}
|
|
@@ -73,6 +73,9 @@ export class GoogleVertexAIChatMessage {
|
|
|
73
73
|
* @returns A new Google Vertex AI chat message.
|
|
74
74
|
*/
|
|
75
75
|
static fromChatMessage(message, model) {
|
|
76
|
+
if (typeof message.content !== "string") {
|
|
77
|
+
throw new Error("ChatGoogleVertexAI does not support non-string message content.");
|
|
78
|
+
}
|
|
76
79
|
return new GoogleVertexAIChatMessage({
|
|
77
80
|
author: GoogleVertexAIChatMessage.mapMessageTypeToVertexChatAuthor(message, model),
|
|
78
81
|
content: message.content,
|
|
@@ -200,6 +203,9 @@ export class BaseChatGoogleVertexAI extends BaseChatModel {
|
|
|
200
203
|
let context = "";
|
|
201
204
|
let conversationMessages = messages;
|
|
202
205
|
if (messages[0]?._getType() === "system") {
|
|
206
|
+
if (typeof messages[0].content !== "string") {
|
|
207
|
+
throw new Error("ChatGoogleVertexAI does not support non-string message content.");
|
|
208
|
+
}
|
|
203
209
|
context = messages[0].content;
|
|
204
210
|
conversationMessages = messages.slice(1);
|
|
205
211
|
}
|
|
@@ -269,10 +269,15 @@ class BaseChatIflytekXinghuo extends base_js_1.BaseChatModel {
|
|
|
269
269
|
async _generate(messages, options, runManager) {
|
|
270
270
|
const tokenUsage = {};
|
|
271
271
|
const params = this.invocationParams();
|
|
272
|
-
const messagesMapped = messages.map((message) =>
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
272
|
+
const messagesMapped = messages.map((message) => {
|
|
273
|
+
if (typeof message.content !== "string") {
|
|
274
|
+
throw new Error("ChatIflytekXinghuo does not support non-string message content.");
|
|
275
|
+
}
|
|
276
|
+
return {
|
|
277
|
+
role: messageToXinghuoRole(message),
|
|
278
|
+
content: message.content,
|
|
279
|
+
};
|
|
280
|
+
});
|
|
276
281
|
const data = params.streaming
|
|
277
282
|
? await (async () => {
|
|
278
283
|
const streams = await this.completion({ messages: messagesMapped, ...params }, true, options.signal);
|
|
@@ -266,10 +266,15 @@ export class BaseChatIflytekXinghuo extends BaseChatModel {
|
|
|
266
266
|
async _generate(messages, options, runManager) {
|
|
267
267
|
const tokenUsage = {};
|
|
268
268
|
const params = this.invocationParams();
|
|
269
|
-
const messagesMapped = messages.map((message) =>
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
269
|
+
const messagesMapped = messages.map((message) => {
|
|
270
|
+
if (typeof message.content !== "string") {
|
|
271
|
+
throw new Error("ChatIflytekXinghuo does not support non-string message content.");
|
|
272
|
+
}
|
|
273
|
+
return {
|
|
274
|
+
role: messageToXinghuoRole(message),
|
|
275
|
+
content: message.content,
|
|
276
|
+
};
|
|
277
|
+
});
|
|
273
278
|
const data = params.streaming
|
|
274
279
|
? await (async () => {
|
|
275
280
|
const streams = await this.completion({ messages: messagesMapped, ...params }, true, options.signal);
|
|
@@ -100,6 +100,9 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
100
100
|
prompt = this._buildSession(messages);
|
|
101
101
|
}
|
|
102
102
|
else {
|
|
103
|
+
if (typeof messages[0].content !== "string") {
|
|
104
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
105
|
+
}
|
|
103
106
|
// If we already have a session then we should just have a single prompt
|
|
104
107
|
prompt = messages[0].content;
|
|
105
108
|
}
|
|
@@ -128,8 +131,12 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
128
131
|
// Let's see if we have a system message
|
|
129
132
|
if (messages.findIndex((msg) => msg._getType() === "system") !== -1) {
|
|
130
133
|
const sysMessages = messages.filter((message) => message._getType() === "system");
|
|
134
|
+
const systemMessageContent = sysMessages[sysMessages.length - 1].content;
|
|
135
|
+
if (typeof systemMessageContent !== "string") {
|
|
136
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
137
|
+
}
|
|
131
138
|
// Only use the last provided system message
|
|
132
|
-
sysMessage =
|
|
139
|
+
sysMessage = systemMessageContent;
|
|
133
140
|
// Now filter out the system messages
|
|
134
141
|
noSystemMessages = messages.filter((message) => message._getType() !== "system");
|
|
135
142
|
}
|
|
@@ -140,7 +147,11 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
140
147
|
if (noSystemMessages.length > 1) {
|
|
141
148
|
// Is the last message a prompt?
|
|
142
149
|
if (noSystemMessages[noSystemMessages.length - 1]._getType() === "human") {
|
|
143
|
-
|
|
150
|
+
const finalMessageContent = noSystemMessages[noSystemMessages.length - 1].content;
|
|
151
|
+
if (typeof finalMessageContent !== "string") {
|
|
152
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
153
|
+
}
|
|
154
|
+
prompt = finalMessageContent;
|
|
144
155
|
interactions = this._convertMessagesToInteractions(noSystemMessages.slice(0, noSystemMessages.length - 1));
|
|
145
156
|
}
|
|
146
157
|
else {
|
|
@@ -148,6 +159,9 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
148
159
|
}
|
|
149
160
|
}
|
|
150
161
|
else {
|
|
162
|
+
if (typeof noSystemMessages[0].content !== "string") {
|
|
163
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
164
|
+
}
|
|
151
165
|
// If there was only a single message we assume it's a prompt
|
|
152
166
|
prompt = noSystemMessages[0].content;
|
|
153
167
|
}
|
|
@@ -183,9 +197,14 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
183
197
|
const result = [];
|
|
184
198
|
for (let i = 0; i < messages.length; i += 2) {
|
|
185
199
|
if (i + 1 < messages.length) {
|
|
200
|
+
const prompt = messages[i].content;
|
|
201
|
+
const response = messages[i + 1].content;
|
|
202
|
+
if (typeof prompt !== "string" || typeof response !== "string") {
|
|
203
|
+
throw new Error("ChatLlamaCpp does not support non-string message content.");
|
|
204
|
+
}
|
|
186
205
|
result.push({
|
|
187
|
-
prompt
|
|
188
|
-
response
|
|
206
|
+
prompt,
|
|
207
|
+
response,
|
|
189
208
|
});
|
|
190
209
|
}
|
|
191
210
|
}
|
|
@@ -97,6 +97,9 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
97
97
|
prompt = this._buildSession(messages);
|
|
98
98
|
}
|
|
99
99
|
else {
|
|
100
|
+
if (typeof messages[0].content !== "string") {
|
|
101
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
102
|
+
}
|
|
100
103
|
// If we already have a session then we should just have a single prompt
|
|
101
104
|
prompt = messages[0].content;
|
|
102
105
|
}
|
|
@@ -125,8 +128,12 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
125
128
|
// Let's see if we have a system message
|
|
126
129
|
if (messages.findIndex((msg) => msg._getType() === "system") !== -1) {
|
|
127
130
|
const sysMessages = messages.filter((message) => message._getType() === "system");
|
|
131
|
+
const systemMessageContent = sysMessages[sysMessages.length - 1].content;
|
|
132
|
+
if (typeof systemMessageContent !== "string") {
|
|
133
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
134
|
+
}
|
|
128
135
|
// Only use the last provided system message
|
|
129
|
-
sysMessage =
|
|
136
|
+
sysMessage = systemMessageContent;
|
|
130
137
|
// Now filter out the system messages
|
|
131
138
|
noSystemMessages = messages.filter((message) => message._getType() !== "system");
|
|
132
139
|
}
|
|
@@ -137,7 +144,11 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
137
144
|
if (noSystemMessages.length > 1) {
|
|
138
145
|
// Is the last message a prompt?
|
|
139
146
|
if (noSystemMessages[noSystemMessages.length - 1]._getType() === "human") {
|
|
140
|
-
|
|
147
|
+
const finalMessageContent = noSystemMessages[noSystemMessages.length - 1].content;
|
|
148
|
+
if (typeof finalMessageContent !== "string") {
|
|
149
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
150
|
+
}
|
|
151
|
+
prompt = finalMessageContent;
|
|
141
152
|
interactions = this._convertMessagesToInteractions(noSystemMessages.slice(0, noSystemMessages.length - 1));
|
|
142
153
|
}
|
|
143
154
|
else {
|
|
@@ -145,6 +156,9 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
145
156
|
}
|
|
146
157
|
}
|
|
147
158
|
else {
|
|
159
|
+
if (typeof noSystemMessages[0].content !== "string") {
|
|
160
|
+
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
161
|
+
}
|
|
148
162
|
// If there was only a single message we assume it's a prompt
|
|
149
163
|
prompt = noSystemMessages[0].content;
|
|
150
164
|
}
|
|
@@ -180,9 +194,14 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
180
194
|
const result = [];
|
|
181
195
|
for (let i = 0; i < messages.length; i += 2) {
|
|
182
196
|
if (i + 1 < messages.length) {
|
|
197
|
+
const prompt = messages[i].content;
|
|
198
|
+
const response = messages[i + 1].content;
|
|
199
|
+
if (typeof prompt !== "string" || typeof response !== "string") {
|
|
200
|
+
throw new Error("ChatLlamaCpp does not support non-string message content.");
|
|
201
|
+
}
|
|
183
202
|
result.push({
|
|
184
|
-
prompt
|
|
185
|
-
response
|
|
203
|
+
prompt,
|
|
204
|
+
response,
|
|
186
205
|
});
|
|
187
206
|
}
|
|
188
207
|
}
|
|
@@ -328,6 +328,9 @@ class ChatMinimax extends base_js_1.BaseChatModel {
|
|
|
328
328
|
})
|
|
329
329
|
?.map((message) => {
|
|
330
330
|
const sender_type = messageToMinimaxRole(message);
|
|
331
|
+
if (typeof message.content !== "string") {
|
|
332
|
+
throw new Error("ChatMinimax does not support non-string message content.");
|
|
333
|
+
}
|
|
331
334
|
return {
|
|
332
335
|
sender_type,
|
|
333
336
|
text: message.content,
|
|
@@ -520,6 +523,9 @@ class ChatMinimax extends base_js_1.BaseChatModel {
|
|
|
520
523
|
return;
|
|
521
524
|
}
|
|
522
525
|
const lastSystemMessage = systemMessages[systemMessages.length - 1];
|
|
526
|
+
if (typeof lastSystemMessage.content !== "string") {
|
|
527
|
+
throw new Error("ChatMinimax does not support non-string message content.");
|
|
528
|
+
}
|
|
523
529
|
// setting the default botSetting.
|
|
524
530
|
this.botSetting = [
|
|
525
531
|
{
|
|
@@ -325,6 +325,9 @@ export class ChatMinimax extends BaseChatModel {
|
|
|
325
325
|
})
|
|
326
326
|
?.map((message) => {
|
|
327
327
|
const sender_type = messageToMinimaxRole(message);
|
|
328
|
+
if (typeof message.content !== "string") {
|
|
329
|
+
throw new Error("ChatMinimax does not support non-string message content.");
|
|
330
|
+
}
|
|
328
331
|
return {
|
|
329
332
|
sender_type,
|
|
330
333
|
text: message.content,
|
|
@@ -517,6 +520,9 @@ export class ChatMinimax extends BaseChatModel {
|
|
|
517
520
|
return;
|
|
518
521
|
}
|
|
519
522
|
const lastSystemMessage = systemMessages[systemMessages.length - 1];
|
|
523
|
+
if (typeof lastSystemMessage.content !== "string") {
|
|
524
|
+
throw new Error("ChatMinimax does not support non-string message content.");
|
|
525
|
+
}
|
|
520
526
|
// setting the default botSetting.
|
|
521
527
|
this.botSetting = [
|
|
522
528
|
{
|
|
@@ -54,14 +54,10 @@ function messageToOpenAIMessage(message) {
|
|
|
54
54
|
}
|
|
55
55
|
function openAIResponseToChatMessage(message) {
|
|
56
56
|
switch (message.role) {
|
|
57
|
-
case "user":
|
|
58
|
-
return new index_js_1.HumanMessage(message.content || "");
|
|
59
57
|
case "assistant":
|
|
60
58
|
return new index_js_1.AIMessage(message.content || "", {
|
|
61
59
|
function_call: message.function_call,
|
|
62
60
|
});
|
|
63
|
-
case "system":
|
|
64
|
-
return new index_js_1.SystemMessage(message.content || "");
|
|
65
61
|
default:
|
|
66
62
|
return new index_js_1.ChatMessage(message.content || "", message.role ?? "unknown");
|
|
67
63
|
}
|
|
@@ -577,7 +573,8 @@ class ChatOpenAI extends base_js_1.BaseChatModel {
|
|
|
577
573
|
let count = textCount + tokensPerMessage + roleCount + nameCount;
|
|
578
574
|
// From: https://github.com/hmarr/openai-chat-tokens/blob/main/src/index.ts messageTokenEstimate
|
|
579
575
|
const openAIMessage = messageToOpenAIMessage(message);
|
|
580
|
-
if (openAIMessage.role === "function"
|
|
576
|
+
if (openAIMessage.role === "function" ||
|
|
577
|
+
openAIMessage.role === "tool") {
|
|
581
578
|
count -= 2;
|
|
582
579
|
}
|
|
583
580
|
if (openAIMessage.function_call) {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { OpenAI as OpenAIClient } from "openai";
|
|
2
|
-
import { AIMessage, AIMessageChunk, ChatGenerationChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk,
|
|
2
|
+
import { AIMessage, AIMessageChunk, ChatGenerationChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk, HumanMessageChunk, SystemMessageChunk, } from "../schema/index.js";
|
|
3
3
|
import { formatToOpenAIFunction } from "../tools/convert_to_openai.js";
|
|
4
4
|
import { getEndpoint } from "../util/azure.js";
|
|
5
5
|
import { getEnvironmentVariable } from "../util/env.js";
|
|
@@ -51,14 +51,10 @@ function messageToOpenAIMessage(message) {
|
|
|
51
51
|
}
|
|
52
52
|
function openAIResponseToChatMessage(message) {
|
|
53
53
|
switch (message.role) {
|
|
54
|
-
case "user":
|
|
55
|
-
return new HumanMessage(message.content || "");
|
|
56
54
|
case "assistant":
|
|
57
55
|
return new AIMessage(message.content || "", {
|
|
58
56
|
function_call: message.function_call,
|
|
59
57
|
});
|
|
60
|
-
case "system":
|
|
61
|
-
return new SystemMessage(message.content || "");
|
|
62
58
|
default:
|
|
63
59
|
return new ChatMessage(message.content || "", message.role ?? "unknown");
|
|
64
60
|
}
|
|
@@ -574,7 +570,8 @@ export class ChatOpenAI extends BaseChatModel {
|
|
|
574
570
|
let count = textCount + tokensPerMessage + roleCount + nameCount;
|
|
575
571
|
// From: https://github.com/hmarr/openai-chat-tokens/blob/main/src/index.ts messageTokenEstimate
|
|
576
572
|
const openAIMessage = messageToOpenAIMessage(message);
|
|
577
|
-
if (openAIMessage.role === "function"
|
|
573
|
+
if (openAIMessage.role === "function" ||
|
|
574
|
+
openAIMessage.role === "tool") {
|
|
578
575
|
count -= 2;
|
|
579
576
|
}
|
|
580
577
|
if (openAIMessage.function_call) {
|
|
@@ -98,10 +98,15 @@ class PortkeyChat extends base_js_1.BaseChatModel {
|
|
|
98
98
|
return "portkey";
|
|
99
99
|
}
|
|
100
100
|
async _generate(messages, options, _) {
|
|
101
|
-
const messagesList = messages.map((message) =>
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
101
|
+
const messagesList = messages.map((message) => {
|
|
102
|
+
if (typeof message.content !== "string") {
|
|
103
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
104
|
+
}
|
|
105
|
+
return {
|
|
106
|
+
role: message._getType(),
|
|
107
|
+
content: message.content,
|
|
108
|
+
};
|
|
109
|
+
});
|
|
105
110
|
const response = await this.session.portkey.chatCompletions.create({
|
|
106
111
|
messages: messagesList,
|
|
107
112
|
...options,
|
|
@@ -124,10 +129,15 @@ class PortkeyChat extends base_js_1.BaseChatModel {
|
|
|
124
129
|
};
|
|
125
130
|
}
|
|
126
131
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
127
|
-
const messagesList = messages.map((message) =>
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
132
|
+
const messagesList = messages.map((message) => {
|
|
133
|
+
if (typeof message.content !== "string") {
|
|
134
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
135
|
+
}
|
|
136
|
+
return {
|
|
137
|
+
role: message._getType(),
|
|
138
|
+
content: message.content,
|
|
139
|
+
};
|
|
140
|
+
});
|
|
131
141
|
const response = await this.session.portkey.chatCompletions.create({
|
|
132
142
|
messages: messagesList,
|
|
133
143
|
...options,
|
|
@@ -95,10 +95,15 @@ export class PortkeyChat extends BaseChatModel {
|
|
|
95
95
|
return "portkey";
|
|
96
96
|
}
|
|
97
97
|
async _generate(messages, options, _) {
|
|
98
|
-
const messagesList = messages.map((message) =>
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
98
|
+
const messagesList = messages.map((message) => {
|
|
99
|
+
if (typeof message.content !== "string") {
|
|
100
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
101
|
+
}
|
|
102
|
+
return {
|
|
103
|
+
role: message._getType(),
|
|
104
|
+
content: message.content,
|
|
105
|
+
};
|
|
106
|
+
});
|
|
102
107
|
const response = await this.session.portkey.chatCompletions.create({
|
|
103
108
|
messages: messagesList,
|
|
104
109
|
...options,
|
|
@@ -121,10 +126,15 @@ export class PortkeyChat extends BaseChatModel {
|
|
|
121
126
|
};
|
|
122
127
|
}
|
|
123
128
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
124
|
-
const messagesList = messages.map((message) =>
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
129
|
+
const messagesList = messages.map((message) => {
|
|
130
|
+
if (typeof message.content !== "string") {
|
|
131
|
+
throw new Error("PortkeyChat does not support non-string message content.");
|
|
132
|
+
}
|
|
133
|
+
return {
|
|
134
|
+
role: message._getType(),
|
|
135
|
+
content: message.content,
|
|
136
|
+
};
|
|
137
|
+
});
|
|
128
138
|
const response = await this.session.portkey.chatCompletions.create({
|
|
129
139
|
messages: messagesList,
|
|
130
140
|
...options,
|
|
@@ -9,6 +9,9 @@ function _parseChatHistory(history) {
|
|
|
9
9
|
const chatHistory = [];
|
|
10
10
|
let instruction = "";
|
|
11
11
|
for (const message of history) {
|
|
12
|
+
if (typeof message.content !== "string") {
|
|
13
|
+
throw new Error("ChatYandexGPT does not support non-string message content.");
|
|
14
|
+
}
|
|
12
15
|
if ("content" in message) {
|
|
13
16
|
if (message._getType() === "human") {
|
|
14
17
|
chatHistory.push({ role: "user", text: message.content });
|
|
@@ -6,6 +6,9 @@ function _parseChatHistory(history) {
|
|
|
6
6
|
const chatHistory = [];
|
|
7
7
|
let instruction = "";
|
|
8
8
|
for (const message of history) {
|
|
9
|
+
if (typeof message.content !== "string") {
|
|
10
|
+
throw new Error("ChatYandexGPT does not support non-string message content.");
|
|
11
|
+
}
|
|
9
12
|
if ("content" in message) {
|
|
10
13
|
if (message._getType() === "human") {
|
|
11
14
|
chatHistory.push({ role: "user", text: message.content });
|
|
@@ -81,6 +81,10 @@ class AutoGPTPrompt extends chat_js_1.BaseChatPromptTemplate {
|
|
|
81
81
|
async formatMessages({ goals, memory, messages: previousMessages, user_input, }) {
|
|
82
82
|
const basePrompt = new index_js_1.SystemMessage(this.constructFullPrompt(goals));
|
|
83
83
|
const timePrompt = new index_js_1.SystemMessage(`The current time and date is ${new Date().toLocaleString()}`);
|
|
84
|
+
if (typeof basePrompt.content !== "string" ||
|
|
85
|
+
typeof timePrompt.content !== "string") {
|
|
86
|
+
throw new Error("Non-string message content is not supported.");
|
|
87
|
+
}
|
|
84
88
|
const usedTokens = (await this.tokenCounter(basePrompt.content)) +
|
|
85
89
|
(await this.tokenCounter(timePrompt.content));
|
|
86
90
|
const relevantDocs = await memory.getRelevantDocuments(JSON.stringify(previousMessages.slice(-10)));
|
|
@@ -92,9 +96,15 @@ class AutoGPTPrompt extends chat_js_1.BaseChatPromptTemplate {
|
|
|
92
96
|
}
|
|
93
97
|
const contentFormat = `This reminds you of these events from your past:\n${relevantMemory.join("\n")}\n\n`;
|
|
94
98
|
const memoryMessage = new index_js_1.SystemMessage(contentFormat);
|
|
99
|
+
if (typeof memoryMessage.content !== "string") {
|
|
100
|
+
throw new Error("Non-string message content is not supported.");
|
|
101
|
+
}
|
|
95
102
|
const usedTokensWithMemory = (await usedTokens) + (await this.tokenCounter(memoryMessage.content));
|
|
96
103
|
const historicalMessages = [];
|
|
97
104
|
for (const message of previousMessages.slice(-10).reverse()) {
|
|
105
|
+
if (typeof message.content !== "string") {
|
|
106
|
+
throw new Error("Non-string message content is not supported.");
|
|
107
|
+
}
|
|
98
108
|
const messageTokens = await this.tokenCounter(message.content);
|
|
99
109
|
if (usedTokensWithMemory + messageTokens > this.sendTokenLimit - 1000) {
|
|
100
110
|
break;
|
|
@@ -78,6 +78,10 @@ export class AutoGPTPrompt extends BaseChatPromptTemplate {
|
|
|
78
78
|
async formatMessages({ goals, memory, messages: previousMessages, user_input, }) {
|
|
79
79
|
const basePrompt = new SystemMessage(this.constructFullPrompt(goals));
|
|
80
80
|
const timePrompt = new SystemMessage(`The current time and date is ${new Date().toLocaleString()}`);
|
|
81
|
+
if (typeof basePrompt.content !== "string" ||
|
|
82
|
+
typeof timePrompt.content !== "string") {
|
|
83
|
+
throw new Error("Non-string message content is not supported.");
|
|
84
|
+
}
|
|
81
85
|
const usedTokens = (await this.tokenCounter(basePrompt.content)) +
|
|
82
86
|
(await this.tokenCounter(timePrompt.content));
|
|
83
87
|
const relevantDocs = await memory.getRelevantDocuments(JSON.stringify(previousMessages.slice(-10)));
|
|
@@ -89,9 +93,15 @@ export class AutoGPTPrompt extends BaseChatPromptTemplate {
|
|
|
89
93
|
}
|
|
90
94
|
const contentFormat = `This reminds you of these events from your past:\n${relevantMemory.join("\n")}\n\n`;
|
|
91
95
|
const memoryMessage = new SystemMessage(contentFormat);
|
|
96
|
+
if (typeof memoryMessage.content !== "string") {
|
|
97
|
+
throw new Error("Non-string message content is not supported.");
|
|
98
|
+
}
|
|
92
99
|
const usedTokensWithMemory = (await usedTokens) + (await this.tokenCounter(memoryMessage.content));
|
|
93
100
|
const historicalMessages = [];
|
|
94
101
|
for (const message of previousMessages.slice(-10).reverse()) {
|
|
102
|
+
if (typeof message.content !== "string") {
|
|
103
|
+
throw new Error("Non-string message content is not supported.");
|
|
104
|
+
}
|
|
95
105
|
const messageTokens = await this.tokenCounter(message.content);
|
|
96
106
|
if (usedTokensWithMemory + messageTokens > this.sendTokenLimit - 1000) {
|
|
97
107
|
break;
|
package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.cjs
CHANGED
|
@@ -247,6 +247,9 @@ class ViolationOfExpectationsChain extends base_js_1.BaseChain {
|
|
|
247
247
|
function_call: { name: types_js_1.PREDICTION_VIOLATIONS_FUNCTION.name },
|
|
248
248
|
});
|
|
249
249
|
const chain = violation_of_expectations_prompt_js_1.PREDICTION_VIOLATIONS_PROMPT.pipe(llmWithFunctions).pipe(this.jsonOutputParser);
|
|
250
|
+
if (typeof userResponse?.content !== "string") {
|
|
251
|
+
throw new Error("This chain does not support non-string model output.");
|
|
252
|
+
}
|
|
250
253
|
const res = (await chain.invoke({
|
|
251
254
|
predicted_output: userPredictions.predictedUserMessage,
|
|
252
255
|
actual_output: userResponse?.content ?? "",
|
|
@@ -299,6 +302,9 @@ class ViolationOfExpectationsChain extends base_js_1.BaseChain {
|
|
|
299
302
|
*/
|
|
300
303
|
async generateFacts({ userResponse, predictions, runManager, }) {
|
|
301
304
|
const chain = violation_of_expectations_prompt_js_1.GENERATE_FACTS_PROMPT.pipe(this.llm).pipe(this.stringOutputParser);
|
|
305
|
+
if (typeof userResponse?.content !== "string") {
|
|
306
|
+
throw new Error("This chain does not support non-string model output.");
|
|
307
|
+
}
|
|
302
308
|
const res = await chain.invoke({
|
|
303
309
|
prediction_violations: predictions.explainedPredictionErrors.join("\n"),
|
|
304
310
|
prediction: predictions.revisedPrediction,
|
package/dist/experimental/chains/violation_of_expectations/violation_of_expectations_chain.js
CHANGED
|
@@ -244,6 +244,9 @@ export class ViolationOfExpectationsChain extends BaseChain {
|
|
|
244
244
|
function_call: { name: PREDICTION_VIOLATIONS_FUNCTION.name },
|
|
245
245
|
});
|
|
246
246
|
const chain = PREDICTION_VIOLATIONS_PROMPT.pipe(llmWithFunctions).pipe(this.jsonOutputParser);
|
|
247
|
+
if (typeof userResponse?.content !== "string") {
|
|
248
|
+
throw new Error("This chain does not support non-string model output.");
|
|
249
|
+
}
|
|
247
250
|
const res = (await chain.invoke({
|
|
248
251
|
predicted_output: userPredictions.predictedUserMessage,
|
|
249
252
|
actual_output: userResponse?.content ?? "",
|
|
@@ -296,6 +299,9 @@ export class ViolationOfExpectationsChain extends BaseChain {
|
|
|
296
299
|
*/
|
|
297
300
|
async generateFacts({ userResponse, predictions, runManager, }) {
|
|
298
301
|
const chain = GENERATE_FACTS_PROMPT.pipe(this.llm).pipe(this.stringOutputParser);
|
|
302
|
+
if (typeof userResponse?.content !== "string") {
|
|
303
|
+
throw new Error("This chain does not support non-string model output.");
|
|
304
|
+
}
|
|
299
305
|
const res = await chain.invoke({
|
|
300
306
|
prediction_violations: predictions.explainedPredictionErrors.join("\n"),
|
|
301
307
|
prediction: predictions.revisedPrediction,
|
|
@@ -112,6 +112,9 @@ class AnthropicFunctions extends base_js_1.BaseChatModel {
|
|
|
112
112
|
}
|
|
113
113
|
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
114
114
|
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
115
|
+
if (typeof chatGenerationContent !== "string") {
|
|
116
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
117
|
+
}
|
|
115
118
|
if (forced) {
|
|
116
119
|
const parser = new fast_xml_parser_1.XMLParser();
|
|
117
120
|
const result = parser.parse(`${chatGenerationContent}</tool_input>`);
|
|
@@ -109,6 +109,9 @@ export class AnthropicFunctions extends BaseChatModel {
|
|
|
109
109
|
}
|
|
110
110
|
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
111
111
|
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
112
|
+
if (typeof chatGenerationContent !== "string") {
|
|
113
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
114
|
+
}
|
|
112
115
|
if (forced) {
|
|
113
116
|
const parser = new XMLParser();
|
|
114
117
|
const result = parser.parse(`${chatGenerationContent}</tool_input>`);
|
|
@@ -60,10 +60,15 @@ class NIBittensorChatModel extends base_js_1.BaseChatModel {
|
|
|
60
60
|
const res = await chat.call([message]);
|
|
61
61
|
*/
|
|
62
62
|
async _generate(messages) {
|
|
63
|
-
const processed_messages = messages.map((message) =>
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
63
|
+
const processed_messages = messages.map((message) => {
|
|
64
|
+
if (typeof message.content !== "string") {
|
|
65
|
+
throw new Error("NIBittensorChat does not support non-string output.");
|
|
66
|
+
}
|
|
67
|
+
return {
|
|
68
|
+
role: this.messageToOpenAIRole(message),
|
|
69
|
+
content: message.content,
|
|
70
|
+
};
|
|
71
|
+
});
|
|
67
72
|
const generations = [];
|
|
68
73
|
try {
|
|
69
74
|
// Retrieve API KEY
|
|
@@ -57,10 +57,15 @@ export class NIBittensorChatModel extends BaseChatModel {
|
|
|
57
57
|
const res = await chat.call([message]);
|
|
58
58
|
*/
|
|
59
59
|
async _generate(messages) {
|
|
60
|
-
const processed_messages = messages.map((message) =>
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
60
|
+
const processed_messages = messages.map((message) => {
|
|
61
|
+
if (typeof message.content !== "string") {
|
|
62
|
+
throw new Error("NIBittensorChat does not support non-string output.");
|
|
63
|
+
}
|
|
64
|
+
return {
|
|
65
|
+
role: this.messageToOpenAIRole(message),
|
|
66
|
+
content: message.content,
|
|
67
|
+
};
|
|
68
|
+
});
|
|
64
69
|
const generations = [];
|
|
65
70
|
try {
|
|
66
71
|
// Retrieve API KEY
|
package/dist/schema/index.cjs
CHANGED
|
@@ -35,6 +35,26 @@ class GenerationChunk {
|
|
|
35
35
|
}
|
|
36
36
|
}
|
|
37
37
|
exports.GenerationChunk = GenerationChunk;
|
|
38
|
+
function mergeContent(firstContent, secondContent) {
|
|
39
|
+
// If first content is a string
|
|
40
|
+
if (typeof firstContent === "string") {
|
|
41
|
+
if (typeof secondContent === "string") {
|
|
42
|
+
return firstContent + secondContent;
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
return [{ type: "text", text: firstContent }, ...secondContent];
|
|
46
|
+
}
|
|
47
|
+
// If both are arrays
|
|
48
|
+
}
|
|
49
|
+
else if (Array.isArray(secondContent)) {
|
|
50
|
+
return [...firstContent, ...secondContent];
|
|
51
|
+
// If the first content is a list and second is a string
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
// Otherwise, add the second content as a new element of the list
|
|
55
|
+
return [...firstContent, { type: "text", text: secondContent }];
|
|
56
|
+
}
|
|
57
|
+
}
|
|
38
58
|
/**
|
|
39
59
|
* Base class for all types of messages in a conversation. It includes
|
|
40
60
|
* properties like `content`, `name`, and `additional_kwargs`. It also
|
|
@@ -46,7 +66,7 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
46
66
|
* Use {@link BaseMessage.content} instead.
|
|
47
67
|
*/
|
|
48
68
|
get text() {
|
|
49
|
-
return this.content;
|
|
69
|
+
return typeof this.content === "string" ? this.content : "";
|
|
50
70
|
}
|
|
51
71
|
constructor(fields,
|
|
52
72
|
/** @deprecated */
|
|
@@ -73,7 +93,7 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
73
93
|
writable: true,
|
|
74
94
|
value: true
|
|
75
95
|
});
|
|
76
|
-
/** The
|
|
96
|
+
/** The content of the message. */
|
|
77
97
|
Object.defineProperty(this, "content", {
|
|
78
98
|
enumerable: true,
|
|
79
99
|
configurable: true,
|
|
@@ -191,7 +211,7 @@ class HumanMessageChunk extends BaseMessageChunk {
|
|
|
191
211
|
}
|
|
192
212
|
concat(chunk) {
|
|
193
213
|
return new HumanMessageChunk({
|
|
194
|
-
content: this.content
|
|
214
|
+
content: mergeContent(this.content, chunk.content),
|
|
195
215
|
additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
196
216
|
});
|
|
197
217
|
}
|
|
@@ -222,7 +242,7 @@ class AIMessageChunk extends BaseMessageChunk {
|
|
|
222
242
|
}
|
|
223
243
|
concat(chunk) {
|
|
224
244
|
return new AIMessageChunk({
|
|
225
|
-
content: this.content
|
|
245
|
+
content: mergeContent(this.content, chunk.content),
|
|
226
246
|
additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
227
247
|
});
|
|
228
248
|
}
|
|
@@ -253,7 +273,7 @@ class SystemMessageChunk extends BaseMessageChunk {
|
|
|
253
273
|
}
|
|
254
274
|
concat(chunk) {
|
|
255
275
|
return new SystemMessageChunk({
|
|
256
|
-
content: this.content
|
|
276
|
+
content: mergeContent(this.content, chunk.content),
|
|
257
277
|
additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
258
278
|
});
|
|
259
279
|
}
|
|
@@ -313,7 +333,7 @@ class FunctionMessageChunk extends BaseMessageChunk {
|
|
|
313
333
|
}
|
|
314
334
|
concat(chunk) {
|
|
315
335
|
return new FunctionMessageChunk({
|
|
316
|
-
content: this.content
|
|
336
|
+
content: mergeContent(this.content, chunk.content),
|
|
317
337
|
additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
318
338
|
name: this.name ?? "",
|
|
319
339
|
});
|
|
@@ -407,7 +427,7 @@ class ChatMessageChunk extends BaseMessageChunk {
|
|
|
407
427
|
}
|
|
408
428
|
concat(chunk) {
|
|
409
429
|
return new ChatMessageChunk({
|
|
410
|
-
content: this.content
|
|
430
|
+
content: mergeContent(this.content, chunk.content),
|
|
411
431
|
additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
412
432
|
role: this.role,
|
|
413
433
|
});
|
package/dist/schema/index.d.ts
CHANGED
|
@@ -64,8 +64,15 @@ export interface StoredGeneration {
|
|
|
64
64
|
message?: StoredMessage;
|
|
65
65
|
}
|
|
66
66
|
export type MessageType = "human" | "ai" | "generic" | "system" | "function";
|
|
67
|
+
export type MessageContent = string | {
|
|
68
|
+
type: "text" | "image_url";
|
|
69
|
+
text?: string;
|
|
70
|
+
image_url?: string | {
|
|
71
|
+
url: string;
|
|
72
|
+
};
|
|
73
|
+
}[];
|
|
67
74
|
export interface BaseMessageFields {
|
|
68
|
-
content:
|
|
75
|
+
content: MessageContent;
|
|
69
76
|
name?: string;
|
|
70
77
|
additional_kwargs?: {
|
|
71
78
|
function_call?: OpenAIClient.Chat.ChatCompletionMessage.FunctionCall;
|
|
@@ -91,8 +98,8 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
|
|
|
91
98
|
* Use {@link BaseMessage.content} instead.
|
|
92
99
|
*/
|
|
93
100
|
get text(): string;
|
|
94
|
-
/** The
|
|
95
|
-
content:
|
|
101
|
+
/** The content of the message. */
|
|
102
|
+
content: MessageContent;
|
|
96
103
|
/** The name of the message sender in a multi-user chat. */
|
|
97
104
|
name?: string;
|
|
98
105
|
/** Additional keyword arguments */
|
package/dist/schema/index.js
CHANGED
|
@@ -31,6 +31,26 @@ export class GenerationChunk {
|
|
|
31
31
|
});
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
|
+
function mergeContent(firstContent, secondContent) {
|
|
35
|
+
// If first content is a string
|
|
36
|
+
if (typeof firstContent === "string") {
|
|
37
|
+
if (typeof secondContent === "string") {
|
|
38
|
+
return firstContent + secondContent;
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
return [{ type: "text", text: firstContent }, ...secondContent];
|
|
42
|
+
}
|
|
43
|
+
// If both are arrays
|
|
44
|
+
}
|
|
45
|
+
else if (Array.isArray(secondContent)) {
|
|
46
|
+
return [...firstContent, ...secondContent];
|
|
47
|
+
// If the first content is a list and second is a string
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
// Otherwise, add the second content as a new element of the list
|
|
51
|
+
return [...firstContent, { type: "text", text: secondContent }];
|
|
52
|
+
}
|
|
53
|
+
}
|
|
34
54
|
/**
|
|
35
55
|
* Base class for all types of messages in a conversation. It includes
|
|
36
56
|
* properties like `content`, `name`, and `additional_kwargs`. It also
|
|
@@ -42,7 +62,7 @@ export class BaseMessage extends Serializable {
|
|
|
42
62
|
* Use {@link BaseMessage.content} instead.
|
|
43
63
|
*/
|
|
44
64
|
get text() {
|
|
45
|
-
return this.content;
|
|
65
|
+
return typeof this.content === "string" ? this.content : "";
|
|
46
66
|
}
|
|
47
67
|
constructor(fields,
|
|
48
68
|
/** @deprecated */
|
|
@@ -69,7 +89,7 @@ export class BaseMessage extends Serializable {
|
|
|
69
89
|
writable: true,
|
|
70
90
|
value: true
|
|
71
91
|
});
|
|
72
|
-
/** The
|
|
92
|
+
/** The content of the message. */
|
|
73
93
|
Object.defineProperty(this, "content", {
|
|
74
94
|
enumerable: true,
|
|
75
95
|
configurable: true,
|
|
@@ -184,7 +204,7 @@ export class HumanMessageChunk extends BaseMessageChunk {
|
|
|
184
204
|
}
|
|
185
205
|
concat(chunk) {
|
|
186
206
|
return new HumanMessageChunk({
|
|
187
|
-
content: this.content
|
|
207
|
+
content: mergeContent(this.content, chunk.content),
|
|
188
208
|
additional_kwargs: HumanMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
189
209
|
});
|
|
190
210
|
}
|
|
@@ -213,7 +233,7 @@ export class AIMessageChunk extends BaseMessageChunk {
|
|
|
213
233
|
}
|
|
214
234
|
concat(chunk) {
|
|
215
235
|
return new AIMessageChunk({
|
|
216
|
-
content: this.content
|
|
236
|
+
content: mergeContent(this.content, chunk.content),
|
|
217
237
|
additional_kwargs: AIMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
218
238
|
});
|
|
219
239
|
}
|
|
@@ -242,7 +262,7 @@ export class SystemMessageChunk extends BaseMessageChunk {
|
|
|
242
262
|
}
|
|
243
263
|
concat(chunk) {
|
|
244
264
|
return new SystemMessageChunk({
|
|
245
|
-
content: this.content
|
|
265
|
+
content: mergeContent(this.content, chunk.content),
|
|
246
266
|
additional_kwargs: SystemMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
247
267
|
});
|
|
248
268
|
}
|
|
@@ -300,7 +320,7 @@ export class FunctionMessageChunk extends BaseMessageChunk {
|
|
|
300
320
|
}
|
|
301
321
|
concat(chunk) {
|
|
302
322
|
return new FunctionMessageChunk({
|
|
303
|
-
content: this.content
|
|
323
|
+
content: mergeContent(this.content, chunk.content),
|
|
304
324
|
additional_kwargs: FunctionMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
305
325
|
name: this.name ?? "",
|
|
306
326
|
});
|
|
@@ -389,7 +409,7 @@ export class ChatMessageChunk extends BaseMessageChunk {
|
|
|
389
409
|
}
|
|
390
410
|
concat(chunk) {
|
|
391
411
|
return new ChatMessageChunk({
|
|
392
|
-
content: this.content
|
|
412
|
+
content: mergeContent(this.content, chunk.content),
|
|
393
413
|
additional_kwargs: ChatMessageChunk._mergeAdditionalKwargs(this.additional_kwargs, chunk.additional_kwargs),
|
|
394
414
|
role: this.role,
|
|
395
415
|
});
|
|
@@ -36,7 +36,14 @@ class BaseLLMOutputParser extends index_js_2.Runnable {
|
|
|
36
36
|
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
37
37
|
}
|
|
38
38
|
else {
|
|
39
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
39
|
+
return this._callWithConfig(async (input) => this.parseResult([
|
|
40
|
+
{
|
|
41
|
+
message: input,
|
|
42
|
+
text: typeof input.content === "string"
|
|
43
|
+
? input.content
|
|
44
|
+
: JSON.stringify(input.content),
|
|
45
|
+
},
|
|
46
|
+
]), input, { ...options, runType: "parser" });
|
|
40
47
|
}
|
|
41
48
|
}
|
|
42
49
|
}
|
|
@@ -69,7 +76,14 @@ class BaseTransformOutputParser extends BaseOutputParser {
|
|
|
69
76
|
yield this.parseResult([{ text: chunk }]);
|
|
70
77
|
}
|
|
71
78
|
else {
|
|
72
|
-
yield this.parseResult([
|
|
79
|
+
yield this.parseResult([
|
|
80
|
+
{
|
|
81
|
+
message: chunk,
|
|
82
|
+
text: typeof chunk.content === "string"
|
|
83
|
+
? chunk.content
|
|
84
|
+
: JSON.stringify(chunk.content),
|
|
85
|
+
},
|
|
86
|
+
]);
|
|
73
87
|
}
|
|
74
88
|
}
|
|
75
89
|
}
|
|
@@ -108,14 +122,23 @@ class BaseCumulativeTransformOutputParser extends BaseTransformOutputParser {
|
|
|
108
122
|
let prevParsed;
|
|
109
123
|
let accGen;
|
|
110
124
|
for await (const chunk of inputGenerator) {
|
|
125
|
+
if (typeof chunk !== "string" && typeof chunk.content !== "string") {
|
|
126
|
+
throw new Error("Cannot handle non-string output.");
|
|
127
|
+
}
|
|
111
128
|
let chunkGen;
|
|
112
129
|
if ((0, index_js_1.isBaseMessageChunk)(chunk)) {
|
|
130
|
+
if (typeof chunk.content !== "string") {
|
|
131
|
+
throw new Error("Cannot handle non-string message output.");
|
|
132
|
+
}
|
|
113
133
|
chunkGen = new index_js_1.ChatGenerationChunk({
|
|
114
134
|
message: chunk,
|
|
115
135
|
text: chunk.content,
|
|
116
136
|
});
|
|
117
137
|
}
|
|
118
138
|
else if ((0, index_js_1.isBaseMessage)(chunk)) {
|
|
139
|
+
if (typeof chunk.content !== "string") {
|
|
140
|
+
throw new Error("Cannot handle non-string message output.");
|
|
141
|
+
}
|
|
119
142
|
chunkGen = new index_js_1.ChatGenerationChunk({
|
|
120
143
|
message: chunk.toChunk(),
|
|
121
144
|
text: chunk.content,
|
|
@@ -33,7 +33,14 @@ export class BaseLLMOutputParser extends Runnable {
|
|
|
33
33
|
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
34
34
|
}
|
|
35
35
|
else {
|
|
36
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
36
|
+
return this._callWithConfig(async (input) => this.parseResult([
|
|
37
|
+
{
|
|
38
|
+
message: input,
|
|
39
|
+
text: typeof input.content === "string"
|
|
40
|
+
? input.content
|
|
41
|
+
: JSON.stringify(input.content),
|
|
42
|
+
},
|
|
43
|
+
]), input, { ...options, runType: "parser" });
|
|
37
44
|
}
|
|
38
45
|
}
|
|
39
46
|
}
|
|
@@ -64,7 +71,14 @@ export class BaseTransformOutputParser extends BaseOutputParser {
|
|
|
64
71
|
yield this.parseResult([{ text: chunk }]);
|
|
65
72
|
}
|
|
66
73
|
else {
|
|
67
|
-
yield this.parseResult([
|
|
74
|
+
yield this.parseResult([
|
|
75
|
+
{
|
|
76
|
+
message: chunk,
|
|
77
|
+
text: typeof chunk.content === "string"
|
|
78
|
+
? chunk.content
|
|
79
|
+
: JSON.stringify(chunk.content),
|
|
80
|
+
},
|
|
81
|
+
]);
|
|
68
82
|
}
|
|
69
83
|
}
|
|
70
84
|
}
|
|
@@ -102,14 +116,23 @@ export class BaseCumulativeTransformOutputParser extends BaseTransformOutputPars
|
|
|
102
116
|
let prevParsed;
|
|
103
117
|
let accGen;
|
|
104
118
|
for await (const chunk of inputGenerator) {
|
|
119
|
+
if (typeof chunk !== "string" && typeof chunk.content !== "string") {
|
|
120
|
+
throw new Error("Cannot handle non-string output.");
|
|
121
|
+
}
|
|
105
122
|
let chunkGen;
|
|
106
123
|
if (isBaseMessageChunk(chunk)) {
|
|
124
|
+
if (typeof chunk.content !== "string") {
|
|
125
|
+
throw new Error("Cannot handle non-string message output.");
|
|
126
|
+
}
|
|
107
127
|
chunkGen = new ChatGenerationChunk({
|
|
108
128
|
message: chunk,
|
|
109
129
|
text: chunk.content,
|
|
110
130
|
});
|
|
111
131
|
}
|
|
112
132
|
else if (isBaseMessage(chunk)) {
|
|
133
|
+
if (typeof chunk.content !== "string") {
|
|
134
|
+
throw new Error("Cannot handle non-string message output.");
|
|
135
|
+
}
|
|
113
136
|
chunkGen = new ChatGenerationChunk({
|
|
114
137
|
message: chunk.toChunk(),
|
|
115
138
|
text: chunk.content,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langchain",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.182-rc.0",
|
|
4
4
|
"description": "Typescript bindings for langchain",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -805,6 +805,7 @@
|
|
|
805
805
|
"precommit": "lint-staged",
|
|
806
806
|
"clean": "rimraf dist/ && NODE_OPTIONS=--max-old-space-size=4096 node scripts/create-entrypoints.js pre",
|
|
807
807
|
"prepack": "yarn build",
|
|
808
|
+
"prerelease": "npm publish",
|
|
808
809
|
"release": "release-it --only-version --config .release-it.json",
|
|
809
810
|
"test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%",
|
|
810
811
|
"test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts",
|
|
@@ -1360,7 +1361,7 @@
|
|
|
1360
1361
|
"langchainhub": "~0.0.6",
|
|
1361
1362
|
"langsmith": "~0.0.48",
|
|
1362
1363
|
"ml-distance": "^4.0.0",
|
|
1363
|
-
"openai": "~4.
|
|
1364
|
+
"openai": "~4.16.0",
|
|
1364
1365
|
"openapi-types": "^12.1.3",
|
|
1365
1366
|
"p-queue": "^6.6.2",
|
|
1366
1367
|
"p-retry": "4",
|