@langchain/core 0.2.30 → 0.2.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/language_models/chat_models.cjs +15 -0
- package/dist/language_models/chat_models.js +15 -0
- package/dist/messages/base.cjs +13 -1
- package/dist/messages/base.d.ts +8 -1
- package/dist/messages/base.js +11 -0
- package/dist/messages/utils.cjs +36 -1
- package/dist/messages/utils.js +38 -3
- package/dist/tracers/event_stream.cjs +1 -1
- package/dist/tracers/event_stream.js +1 -1
- package/dist/tracers/log_stream.cjs +4 -1
- package/dist/tracers/log_stream.js +4 -1
- package/package.json +1 -1
|
@@ -90,6 +90,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
90
90
|
let generationChunk;
|
|
91
91
|
try {
|
|
92
92
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
93
|
+
if (chunk.message.id == null) {
|
|
94
|
+
const runId = runManagers?.at(0)?.runId;
|
|
95
|
+
if (runId != null)
|
|
96
|
+
chunk.message._updateId(`run-${runId}`);
|
|
97
|
+
}
|
|
93
98
|
chunk.message.response_metadata = {
|
|
94
99
|
...chunk.generationInfo,
|
|
95
100
|
...chunk.message.response_metadata,
|
|
@@ -150,6 +155,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
150
155
|
const stream = await this._streamResponseChunks(baseMessages[0], parsedOptions, runManagers?.[0]);
|
|
151
156
|
let aggregated;
|
|
152
157
|
for await (const chunk of stream) {
|
|
158
|
+
if (chunk.message.id == null) {
|
|
159
|
+
const runId = runManagers?.at(0)?.runId;
|
|
160
|
+
if (runId != null)
|
|
161
|
+
chunk.message._updateId(`run-${runId}`);
|
|
162
|
+
}
|
|
153
163
|
if (aggregated === undefined) {
|
|
154
164
|
aggregated = chunk;
|
|
155
165
|
}
|
|
@@ -179,6 +189,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
179
189
|
if (pResult.status === "fulfilled") {
|
|
180
190
|
const result = pResult.value;
|
|
181
191
|
for (const generation of result.generations) {
|
|
192
|
+
if (generation.message.id == null) {
|
|
193
|
+
const runId = runManagers?.at(0)?.runId;
|
|
194
|
+
if (runId != null)
|
|
195
|
+
generation.message._updateId(`run-${runId}`);
|
|
196
|
+
}
|
|
182
197
|
generation.message.response_metadata = {
|
|
183
198
|
...generation.generationInfo,
|
|
184
199
|
...generation.message.response_metadata,
|
|
@@ -86,6 +86,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
86
86
|
let generationChunk;
|
|
87
87
|
try {
|
|
88
88
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
89
|
+
if (chunk.message.id == null) {
|
|
90
|
+
const runId = runManagers?.at(0)?.runId;
|
|
91
|
+
if (runId != null)
|
|
92
|
+
chunk.message._updateId(`run-${runId}`);
|
|
93
|
+
}
|
|
89
94
|
chunk.message.response_metadata = {
|
|
90
95
|
...chunk.generationInfo,
|
|
91
96
|
...chunk.message.response_metadata,
|
|
@@ -146,6 +151,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
146
151
|
const stream = await this._streamResponseChunks(baseMessages[0], parsedOptions, runManagers?.[0]);
|
|
147
152
|
let aggregated;
|
|
148
153
|
for await (const chunk of stream) {
|
|
154
|
+
if (chunk.message.id == null) {
|
|
155
|
+
const runId = runManagers?.at(0)?.runId;
|
|
156
|
+
if (runId != null)
|
|
157
|
+
chunk.message._updateId(`run-${runId}`);
|
|
158
|
+
}
|
|
149
159
|
if (aggregated === undefined) {
|
|
150
160
|
aggregated = chunk;
|
|
151
161
|
}
|
|
@@ -175,6 +185,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
175
185
|
if (pResult.status === "fulfilled") {
|
|
176
186
|
const result = pResult.value;
|
|
177
187
|
for (const generation of result.generations) {
|
|
188
|
+
if (generation.message.id == null) {
|
|
189
|
+
const runId = runManagers?.at(0)?.runId;
|
|
190
|
+
if (runId != null)
|
|
191
|
+
generation.message._updateId(`run-${runId}`);
|
|
192
|
+
}
|
|
178
193
|
generation.message.response_metadata = {
|
|
179
194
|
...generation.generationInfo,
|
|
180
195
|
...generation.message.response_metadata,
|
package/dist/messages/base.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.isBaseMessageChunk = exports.isBaseMessage = exports.BaseMessageChunk = exports._mergeObj = exports._mergeLists = exports._mergeDicts = exports.isOpenAIToolCallArray = exports.BaseMessage = exports._mergeStatus = exports.mergeContent = void 0;
|
|
3
|
+
exports.isBaseMessageChunk = exports.isBaseMessage = exports._isMessageFieldWithRole = exports.BaseMessageChunk = exports._mergeObj = exports._mergeLists = exports._mergeDicts = exports.isOpenAIToolCallArray = exports.BaseMessage = exports._mergeStatus = exports.mergeContent = void 0;
|
|
4
4
|
const serializable_js_1 = require("../load/serializable.cjs");
|
|
5
5
|
function mergeContent(firstContent, secondContent) {
|
|
6
6
|
// If first content is a string
|
|
@@ -181,6 +181,14 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
181
181
|
response_metadata: this.response_metadata,
|
|
182
182
|
};
|
|
183
183
|
}
|
|
184
|
+
// this private method is used to update the ID for the runtime
|
|
185
|
+
// value as well as in lc_kwargs for serialisation
|
|
186
|
+
_updateId(value) {
|
|
187
|
+
this.id = value;
|
|
188
|
+
// lc_attributes wouldn't work here, because jest compares the
|
|
189
|
+
// whole object
|
|
190
|
+
this.lc_kwargs.id = value;
|
|
191
|
+
}
|
|
184
192
|
get [Symbol.toStringTag]() {
|
|
185
193
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
186
194
|
return this.constructor.lc_name();
|
|
@@ -317,6 +325,10 @@ exports._mergeObj = _mergeObj;
|
|
|
317
325
|
class BaseMessageChunk extends BaseMessage {
|
|
318
326
|
}
|
|
319
327
|
exports.BaseMessageChunk = BaseMessageChunk;
|
|
328
|
+
function _isMessageFieldWithRole(x) {
|
|
329
|
+
return typeof x.role === "string";
|
|
330
|
+
}
|
|
331
|
+
exports._isMessageFieldWithRole = _isMessageFieldWithRole;
|
|
320
332
|
function isBaseMessage(messageLike) {
|
|
321
333
|
return typeof messageLike?._getType === "function";
|
|
322
334
|
}
|
package/dist/messages/base.d.ts
CHANGED
|
@@ -134,6 +134,7 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
|
|
|
134
134
|
toDict(): StoredMessage;
|
|
135
135
|
static lc_name(): string;
|
|
136
136
|
get _printableFields(): Record<string, unknown>;
|
|
137
|
+
_updateId(value: string | undefined): void;
|
|
137
138
|
get [Symbol.toStringTag](): any;
|
|
138
139
|
}
|
|
139
140
|
export type OpenAIToolCall = ToolCall & {
|
|
@@ -153,9 +154,15 @@ export declare function _mergeObj<T = any>(left: T | undefined, right: T | undef
|
|
|
153
154
|
export declare abstract class BaseMessageChunk extends BaseMessage {
|
|
154
155
|
abstract concat(chunk: BaseMessageChunk): BaseMessageChunk;
|
|
155
156
|
}
|
|
157
|
+
export type MessageFieldWithRole = {
|
|
158
|
+
role: StringWithAutocomplete<"user" | "assistant" | MessageType>;
|
|
159
|
+
content: MessageContent;
|
|
160
|
+
name?: string;
|
|
161
|
+
} & Record<string, unknown>;
|
|
162
|
+
export declare function _isMessageFieldWithRole(x: BaseMessageLike): x is MessageFieldWithRole;
|
|
156
163
|
export type BaseMessageLike = BaseMessage | ({
|
|
157
164
|
type: MessageType | "user" | "assistant" | "placeholder";
|
|
158
|
-
} & BaseMessageFields & Record<string, unknown>) | [
|
|
165
|
+
} & BaseMessageFields & Record<string, unknown>) | MessageFieldWithRole | [
|
|
159
166
|
StringWithAutocomplete<MessageType | "user" | "assistant" | "placeholder">,
|
|
160
167
|
MessageContent
|
|
161
168
|
] | string;
|
package/dist/messages/base.js
CHANGED
|
@@ -176,6 +176,14 @@ export class BaseMessage extends Serializable {
|
|
|
176
176
|
response_metadata: this.response_metadata,
|
|
177
177
|
};
|
|
178
178
|
}
|
|
179
|
+
// this private method is used to update the ID for the runtime
|
|
180
|
+
// value as well as in lc_kwargs for serialisation
|
|
181
|
+
_updateId(value) {
|
|
182
|
+
this.id = value;
|
|
183
|
+
// lc_attributes wouldn't work here, because jest compares the
|
|
184
|
+
// whole object
|
|
185
|
+
this.lc_kwargs.id = value;
|
|
186
|
+
}
|
|
179
187
|
get [Symbol.toStringTag]() {
|
|
180
188
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
181
189
|
return this.constructor.lc_name();
|
|
@@ -306,6 +314,9 @@ export function _mergeObj(left, right) {
|
|
|
306
314
|
*/
|
|
307
315
|
export class BaseMessageChunk extends BaseMessage {
|
|
308
316
|
}
|
|
317
|
+
export function _isMessageFieldWithRole(x) {
|
|
318
|
+
return typeof x.role === "string";
|
|
319
|
+
}
|
|
309
320
|
export function isBaseMessage(messageLike) {
|
|
310
321
|
return typeof messageLike?._getType === "function";
|
|
311
322
|
}
|
package/dist/messages/utils.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.convertToChunk = exports.mapChatMessagesToStoredMessages = exports.mapStoredMessagesToChatMessages = exports.mapStoredMessageToChatMessage = exports.getBufferString = exports.coerceMessageLikeToMessage = void 0;
|
|
4
|
+
const utils_js_1 = require("../tools/utils.cjs");
|
|
4
5
|
const ai_js_1 = require("./ai.cjs");
|
|
5
6
|
const base_js_1 = require("./base.cjs");
|
|
6
7
|
const chat_js_1 = require("./chat.cjs");
|
|
@@ -8,13 +9,43 @@ const function_js_1 = require("./function.cjs");
|
|
|
8
9
|
const human_js_1 = require("./human.cjs");
|
|
9
10
|
const system_js_1 = require("./system.cjs");
|
|
10
11
|
const tool_js_1 = require("./tool.cjs");
|
|
12
|
+
function _coerceToolCall(toolCall) {
|
|
13
|
+
if ((0, utils_js_1._isToolCall)(toolCall)) {
|
|
14
|
+
return toolCall;
|
|
15
|
+
}
|
|
16
|
+
else if (typeof toolCall.id === "string" &&
|
|
17
|
+
toolCall.type === "function" &&
|
|
18
|
+
typeof toolCall.function === "object" &&
|
|
19
|
+
toolCall.function !== null &&
|
|
20
|
+
"arguments" in toolCall.function &&
|
|
21
|
+
typeof toolCall.function.arguments === "string" &&
|
|
22
|
+
"name" in toolCall.function &&
|
|
23
|
+
typeof toolCall.function.name === "string") {
|
|
24
|
+
// Handle OpenAI tool call format
|
|
25
|
+
return {
|
|
26
|
+
id: toolCall.id,
|
|
27
|
+
args: JSON.parse(toolCall.function.arguments),
|
|
28
|
+
name: toolCall.function.name,
|
|
29
|
+
type: "tool_call",
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
// TODO: Throw an error?
|
|
34
|
+
return toolCall;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
11
37
|
function _constructMessageFromParams(params) {
|
|
12
38
|
const { type, ...rest } = params;
|
|
13
39
|
if (type === "human" || type === "user") {
|
|
14
40
|
return new human_js_1.HumanMessage(rest);
|
|
15
41
|
}
|
|
16
42
|
else if (type === "ai" || type === "assistant") {
|
|
17
|
-
|
|
43
|
+
const { tool_calls: rawToolCalls, ...other } = rest;
|
|
44
|
+
if (!Array.isArray(rawToolCalls)) {
|
|
45
|
+
return new ai_js_1.AIMessage(rest);
|
|
46
|
+
}
|
|
47
|
+
const tool_calls = rawToolCalls.map(_coerceToolCall);
|
|
48
|
+
return new ai_js_1.AIMessage({ ...other, tool_calls });
|
|
18
49
|
}
|
|
19
50
|
else if (type === "system") {
|
|
20
51
|
return new system_js_1.SystemMessage(rest);
|
|
@@ -42,6 +73,10 @@ function coerceMessageLikeToMessage(messageLike) {
|
|
|
42
73
|
const [type, content] = messageLike;
|
|
43
74
|
return _constructMessageFromParams({ type, content });
|
|
44
75
|
}
|
|
76
|
+
else if ((0, base_js_1._isMessageFieldWithRole)(messageLike)) {
|
|
77
|
+
const { role: type, ...rest } = messageLike;
|
|
78
|
+
return _constructMessageFromParams({ ...rest, type });
|
|
79
|
+
}
|
|
45
80
|
else {
|
|
46
81
|
return _constructMessageFromParams(messageLike);
|
|
47
82
|
}
|
package/dist/messages/utils.js
CHANGED
|
@@ -1,17 +1,48 @@
|
|
|
1
|
+
import { _isToolCall } from "../tools/utils.js";
|
|
1
2
|
import { AIMessage, AIMessageChunk } from "./ai.js";
|
|
2
|
-
import { isBaseMessage, } from "./base.js";
|
|
3
|
+
import { isBaseMessage, _isMessageFieldWithRole, } from "./base.js";
|
|
3
4
|
import { ChatMessage, ChatMessageChunk, } from "./chat.js";
|
|
4
5
|
import { FunctionMessage, FunctionMessageChunk, } from "./function.js";
|
|
5
6
|
import { HumanMessage, HumanMessageChunk } from "./human.js";
|
|
6
7
|
import { SystemMessage, SystemMessageChunk } from "./system.js";
|
|
7
|
-
import { ToolMessage } from "./tool.js";
|
|
8
|
+
import { ToolMessage, } from "./tool.js";
|
|
9
|
+
function _coerceToolCall(toolCall) {
|
|
10
|
+
if (_isToolCall(toolCall)) {
|
|
11
|
+
return toolCall;
|
|
12
|
+
}
|
|
13
|
+
else if (typeof toolCall.id === "string" &&
|
|
14
|
+
toolCall.type === "function" &&
|
|
15
|
+
typeof toolCall.function === "object" &&
|
|
16
|
+
toolCall.function !== null &&
|
|
17
|
+
"arguments" in toolCall.function &&
|
|
18
|
+
typeof toolCall.function.arguments === "string" &&
|
|
19
|
+
"name" in toolCall.function &&
|
|
20
|
+
typeof toolCall.function.name === "string") {
|
|
21
|
+
// Handle OpenAI tool call format
|
|
22
|
+
return {
|
|
23
|
+
id: toolCall.id,
|
|
24
|
+
args: JSON.parse(toolCall.function.arguments),
|
|
25
|
+
name: toolCall.function.name,
|
|
26
|
+
type: "tool_call",
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
// TODO: Throw an error?
|
|
31
|
+
return toolCall;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
8
34
|
function _constructMessageFromParams(params) {
|
|
9
35
|
const { type, ...rest } = params;
|
|
10
36
|
if (type === "human" || type === "user") {
|
|
11
37
|
return new HumanMessage(rest);
|
|
12
38
|
}
|
|
13
39
|
else if (type === "ai" || type === "assistant") {
|
|
14
|
-
|
|
40
|
+
const { tool_calls: rawToolCalls, ...other } = rest;
|
|
41
|
+
if (!Array.isArray(rawToolCalls)) {
|
|
42
|
+
return new AIMessage(rest);
|
|
43
|
+
}
|
|
44
|
+
const tool_calls = rawToolCalls.map(_coerceToolCall);
|
|
45
|
+
return new AIMessage({ ...other, tool_calls });
|
|
15
46
|
}
|
|
16
47
|
else if (type === "system") {
|
|
17
48
|
return new SystemMessage(rest);
|
|
@@ -39,6 +70,10 @@ export function coerceMessageLikeToMessage(messageLike) {
|
|
|
39
70
|
const [type, content] = messageLike;
|
|
40
71
|
return _constructMessageFromParams({ type, content });
|
|
41
72
|
}
|
|
73
|
+
else if (_isMessageFieldWithRole(messageLike)) {
|
|
74
|
+
const { role: type, ...rest } = messageLike;
|
|
75
|
+
return _constructMessageFromParams({ ...rest, type });
|
|
76
|
+
}
|
|
42
77
|
else {
|
|
43
78
|
return _constructMessageFromParams(messageLike);
|
|
44
79
|
}
|
|
@@ -279,7 +279,7 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
279
279
|
if (runInfo.runType === "chat_model") {
|
|
280
280
|
eventName = "on_chat_model_stream";
|
|
281
281
|
if (kwargs?.chunk === undefined) {
|
|
282
|
-
chunk = new ai_js_1.AIMessageChunk({ content: token });
|
|
282
|
+
chunk = new ai_js_1.AIMessageChunk({ content: token, id: `run-${run.id}` });
|
|
283
283
|
}
|
|
284
284
|
else {
|
|
285
285
|
chunk = kwargs.chunk.message;
|
|
@@ -275,7 +275,7 @@ export class EventStreamCallbackHandler extends BaseTracer {
|
|
|
275
275
|
if (runInfo.runType === "chat_model") {
|
|
276
276
|
eventName = "on_chat_model_stream";
|
|
277
277
|
if (kwargs?.chunk === undefined) {
|
|
278
|
-
chunk = new AIMessageChunk({ content: token });
|
|
278
|
+
chunk = new AIMessageChunk({ content: token, id: `run-${run.id}` });
|
|
279
279
|
}
|
|
280
280
|
else {
|
|
281
281
|
chunk = kwargs.chunk.message;
|
|
@@ -401,7 +401,10 @@ class LogStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
401
401
|
streamedOutputValue = kwargs?.chunk;
|
|
402
402
|
}
|
|
403
403
|
else {
|
|
404
|
-
streamedOutputValue = new ai_js_1.AIMessageChunk(
|
|
404
|
+
streamedOutputValue = new ai_js_1.AIMessageChunk({
|
|
405
|
+
id: `run-${run.id}`,
|
|
406
|
+
content: token,
|
|
407
|
+
});
|
|
405
408
|
}
|
|
406
409
|
}
|
|
407
410
|
else {
|
|
@@ -395,7 +395,10 @@ export class LogStreamCallbackHandler extends BaseTracer {
|
|
|
395
395
|
streamedOutputValue = kwargs?.chunk;
|
|
396
396
|
}
|
|
397
397
|
else {
|
|
398
|
-
streamedOutputValue = new AIMessageChunk(
|
|
398
|
+
streamedOutputValue = new AIMessageChunk({
|
|
399
|
+
id: `run-${run.id}`,
|
|
400
|
+
content: token,
|
|
401
|
+
});
|
|
399
402
|
}
|
|
400
403
|
}
|
|
401
404
|
else {
|