@langchain/core 0.2.30 → 0.3.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -4
- package/dist/callbacks/base.cjs +1 -1
- package/dist/callbacks/base.js +1 -1
- package/dist/callbacks/manager.cjs +1 -11
- package/dist/callbacks/manager.d.ts +1 -1
- package/dist/callbacks/manager.js +1 -11
- package/dist/language_models/chat_models.cjs +15 -0
- package/dist/language_models/chat_models.js +15 -0
- package/dist/messages/base.cjs +13 -1
- package/dist/messages/base.d.ts +23 -22
- package/dist/messages/base.js +11 -0
- package/dist/messages/utils.cjs +36 -1
- package/dist/messages/utils.js +38 -3
- package/dist/runnables/base.cjs +3 -3
- package/dist/runnables/base.d.ts +2 -8
- package/dist/runnables/base.js +3 -3
- package/dist/tracers/event_stream.cjs +1 -1
- package/dist/tracers/event_stream.js +1 -1
- package/dist/tracers/log_stream.cjs +4 -1
- package/dist/tracers/log_stream.js +4 -1
- package/dist/tracers/root_listener.cjs +3 -18
- package/dist/tracers/root_listener.d.ts +6 -15
- package/dist/tracers/root_listener.js +3 -18
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -89,7 +89,7 @@ For more check out the [LCEL docs](https://js.langchain.com/v0.2/docs/concepts#l
|
|
|
89
89
|
|
|
90
90
|
## 📕 Releases & Versioning
|
|
91
91
|
|
|
92
|
-
`@langchain/core` is currently on version `0.
|
|
92
|
+
`@langchain/core` is currently on version `0.3.x`.
|
|
93
93
|
|
|
94
94
|
As `@langchain/core` contains the base abstractions and runtime for the whole LangChain ecosystem, we will communicate any breaking changes with advance notice and version bumps. The exception for this is anything in `@langchain/core/beta`. The reason for `@langchain/core/beta` is that given the rate of change of the field, being able to move quickly is still a priority, and this module is our attempt to do so.
|
|
95
95
|
|
|
@@ -109,7 +109,7 @@ Patch version increases will occur for:
|
|
|
109
109
|
Other LangChain packages should add this package as a dependency and extend the classes within.
|
|
110
110
|
For an example, see the [@langchain/anthropic](https://github.com/langchain-ai/langchainjs/tree/main/libs/langchain-anthropic) in this repo.
|
|
111
111
|
|
|
112
|
-
Because all used packages must share the same version of core,
|
|
112
|
+
Because all used packages must share the same version of core, packages should never directly depend on `@langchain/core`. Instead they should have core as a peer dependency and a dev dependency. We suggest using a tilde dependency to allow for different (backwards-compatible) patch versions:
|
|
113
113
|
|
|
114
114
|
```json
|
|
115
115
|
{
|
|
@@ -120,8 +120,13 @@ Because all used packages must share the same version of core, we suggest using
|
|
|
120
120
|
"author": "LangChain",
|
|
121
121
|
"license": "MIT",
|
|
122
122
|
"dependencies": {
|
|
123
|
-
"@anthropic-ai/sdk": "^0.10.0"
|
|
124
|
-
|
|
123
|
+
"@anthropic-ai/sdk": "^0.10.0"
|
|
124
|
+
},
|
|
125
|
+
"peerDependencies": {
|
|
126
|
+
"@langchain/core": "~0.3.0"
|
|
127
|
+
},
|
|
128
|
+
"devDependencies": {
|
|
129
|
+
"@langchain/core": "~0.3.0"
|
|
125
130
|
}
|
|
126
131
|
}
|
|
127
132
|
```
|
package/dist/callbacks/base.cjs
CHANGED
|
@@ -125,7 +125,7 @@ class BaseCallbackHandler extends BaseCallbackHandlerMethodsClass {
|
|
|
125
125
|
enumerable: true,
|
|
126
126
|
configurable: true,
|
|
127
127
|
writable: true,
|
|
128
|
-
value: (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND")
|
|
128
|
+
value: (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND") === "false"
|
|
129
129
|
});
|
|
130
130
|
this.lc_kwargs = input || {};
|
|
131
131
|
if (input) {
|
package/dist/callbacks/base.js
CHANGED
|
@@ -99,7 +99,7 @@ export class BaseCallbackHandler extends BaseCallbackHandlerMethodsClass {
|
|
|
99
99
|
enumerable: true,
|
|
100
100
|
configurable: true,
|
|
101
101
|
writable: true,
|
|
102
|
-
value: getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND")
|
|
102
|
+
value: getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") === "false"
|
|
103
103
|
});
|
|
104
104
|
this.lc_kwargs = input || {};
|
|
105
105
|
if (input) {
|
|
@@ -10,16 +10,6 @@ const tracer_langchain_js_1 = require("../tracers/tracer_langchain.cjs");
|
|
|
10
10
|
const promises_js_1 = require("./promises.cjs");
|
|
11
11
|
const callbacks_js_1 = require("../utils/callbacks.cjs");
|
|
12
12
|
const base_js_2 = require("../tracers/base.cjs");
|
|
13
|
-
if (
|
|
14
|
-
/* #__PURE__ */ (0, callbacks_js_1.isTracingEnabled)() &&
|
|
15
|
-
/* #__PURE__ */ (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND") !==
|
|
16
|
-
"true") {
|
|
17
|
-
/* #__PURE__ */ console.warn([
|
|
18
|
-
"[WARN]: You have enabled LangSmith tracing without backgrounding callbacks.",
|
|
19
|
-
"[WARN]: If you are not using a serverless environment where you must wait for tracing calls to finish,",
|
|
20
|
-
`[WARN]: we suggest setting "process.env.LANGCHAIN_CALLBACKS_BACKGROUND=true" to avoid additional latency.`,
|
|
21
|
-
].join("\n"));
|
|
22
|
-
}
|
|
23
13
|
function parseCallbackConfigArg(arg) {
|
|
24
14
|
if (!arg) {
|
|
25
15
|
return {};
|
|
@@ -729,7 +719,7 @@ class CallbackManager extends BaseCallbackManager {
|
|
|
729
719
|
manager.addHandler(new Handler());
|
|
730
720
|
return manager;
|
|
731
721
|
}
|
|
732
|
-
static
|
|
722
|
+
static configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
|
|
733
723
|
return this._configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options);
|
|
734
724
|
}
|
|
735
725
|
// TODO: Deprecate async method in favor of this one.
|
|
@@ -152,7 +152,7 @@ export declare class CallbackManager extends BaseCallbackManager implements Base
|
|
|
152
152
|
removeMetadata(metadata: Record<string, unknown>): void;
|
|
153
153
|
copy(additionalHandlers?: BaseCallbackHandler[], inherit?: boolean): CallbackManager;
|
|
154
154
|
static fromHandlers(handlers: CallbackHandlerMethods): CallbackManager;
|
|
155
|
-
static configure(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions):
|
|
155
|
+
static configure(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions): CallbackManager | undefined;
|
|
156
156
|
static _configureSync(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions): CallbackManager | undefined;
|
|
157
157
|
}
|
|
158
158
|
export declare function ensureHandler(handler: BaseCallbackHandler | CallbackHandlerMethods): BaseCallbackHandler;
|
|
@@ -7,16 +7,6 @@ import { LangChainTracer, } from "../tracers/tracer_langchain.js";
|
|
|
7
7
|
import { consumeCallback } from "./promises.js";
|
|
8
8
|
import { isTracingEnabled } from "../utils/callbacks.js";
|
|
9
9
|
import { isBaseTracer } from "../tracers/base.js";
|
|
10
|
-
if (
|
|
11
|
-
/* #__PURE__ */ isTracingEnabled() &&
|
|
12
|
-
/* #__PURE__ */ getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") !==
|
|
13
|
-
"true") {
|
|
14
|
-
/* #__PURE__ */ console.warn([
|
|
15
|
-
"[WARN]: You have enabled LangSmith tracing without backgrounding callbacks.",
|
|
16
|
-
"[WARN]: If you are not using a serverless environment where you must wait for tracing calls to finish,",
|
|
17
|
-
`[WARN]: we suggest setting "process.env.LANGCHAIN_CALLBACKS_BACKGROUND=true" to avoid additional latency.`,
|
|
18
|
-
].join("\n"));
|
|
19
|
-
}
|
|
20
10
|
export function parseCallbackConfigArg(arg) {
|
|
21
11
|
if (!arg) {
|
|
22
12
|
return {};
|
|
@@ -719,7 +709,7 @@ export class CallbackManager extends BaseCallbackManager {
|
|
|
719
709
|
manager.addHandler(new Handler());
|
|
720
710
|
return manager;
|
|
721
711
|
}
|
|
722
|
-
static
|
|
712
|
+
static configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
|
|
723
713
|
return this._configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options);
|
|
724
714
|
}
|
|
725
715
|
// TODO: Deprecate async method in favor of this one.
|
|
@@ -90,6 +90,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
90
90
|
let generationChunk;
|
|
91
91
|
try {
|
|
92
92
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
93
|
+
if (chunk.message.id == null) {
|
|
94
|
+
const runId = runManagers?.at(0)?.runId;
|
|
95
|
+
if (runId != null)
|
|
96
|
+
chunk.message._updateId(`run-${runId}`);
|
|
97
|
+
}
|
|
93
98
|
chunk.message.response_metadata = {
|
|
94
99
|
...chunk.generationInfo,
|
|
95
100
|
...chunk.message.response_metadata,
|
|
@@ -150,6 +155,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
150
155
|
const stream = await this._streamResponseChunks(baseMessages[0], parsedOptions, runManagers?.[0]);
|
|
151
156
|
let aggregated;
|
|
152
157
|
for await (const chunk of stream) {
|
|
158
|
+
if (chunk.message.id == null) {
|
|
159
|
+
const runId = runManagers?.at(0)?.runId;
|
|
160
|
+
if (runId != null)
|
|
161
|
+
chunk.message._updateId(`run-${runId}`);
|
|
162
|
+
}
|
|
153
163
|
if (aggregated === undefined) {
|
|
154
164
|
aggregated = chunk;
|
|
155
165
|
}
|
|
@@ -179,6 +189,11 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
179
189
|
if (pResult.status === "fulfilled") {
|
|
180
190
|
const result = pResult.value;
|
|
181
191
|
for (const generation of result.generations) {
|
|
192
|
+
if (generation.message.id == null) {
|
|
193
|
+
const runId = runManagers?.at(0)?.runId;
|
|
194
|
+
if (runId != null)
|
|
195
|
+
generation.message._updateId(`run-${runId}`);
|
|
196
|
+
}
|
|
182
197
|
generation.message.response_metadata = {
|
|
183
198
|
...generation.generationInfo,
|
|
184
199
|
...generation.message.response_metadata,
|
|
@@ -86,6 +86,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
86
86
|
let generationChunk;
|
|
87
87
|
try {
|
|
88
88
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
89
|
+
if (chunk.message.id == null) {
|
|
90
|
+
const runId = runManagers?.at(0)?.runId;
|
|
91
|
+
if (runId != null)
|
|
92
|
+
chunk.message._updateId(`run-${runId}`);
|
|
93
|
+
}
|
|
89
94
|
chunk.message.response_metadata = {
|
|
90
95
|
...chunk.generationInfo,
|
|
91
96
|
...chunk.message.response_metadata,
|
|
@@ -146,6 +151,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
146
151
|
const stream = await this._streamResponseChunks(baseMessages[0], parsedOptions, runManagers?.[0]);
|
|
147
152
|
let aggregated;
|
|
148
153
|
for await (const chunk of stream) {
|
|
154
|
+
if (chunk.message.id == null) {
|
|
155
|
+
const runId = runManagers?.at(0)?.runId;
|
|
156
|
+
if (runId != null)
|
|
157
|
+
chunk.message._updateId(`run-${runId}`);
|
|
158
|
+
}
|
|
149
159
|
if (aggregated === undefined) {
|
|
150
160
|
aggregated = chunk;
|
|
151
161
|
}
|
|
@@ -175,6 +185,11 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
175
185
|
if (pResult.status === "fulfilled") {
|
|
176
186
|
const result = pResult.value;
|
|
177
187
|
for (const generation of result.generations) {
|
|
188
|
+
if (generation.message.id == null) {
|
|
189
|
+
const runId = runManagers?.at(0)?.runId;
|
|
190
|
+
if (runId != null)
|
|
191
|
+
generation.message._updateId(`run-${runId}`);
|
|
192
|
+
}
|
|
178
193
|
generation.message.response_metadata = {
|
|
179
194
|
...generation.generationInfo,
|
|
180
195
|
...generation.message.response_metadata,
|
package/dist/messages/base.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.isBaseMessageChunk = exports.isBaseMessage = exports.BaseMessageChunk = exports._mergeObj = exports._mergeLists = exports._mergeDicts = exports.isOpenAIToolCallArray = exports.BaseMessage = exports._mergeStatus = exports.mergeContent = void 0;
|
|
3
|
+
exports.isBaseMessageChunk = exports.isBaseMessage = exports._isMessageFieldWithRole = exports.BaseMessageChunk = exports._mergeObj = exports._mergeLists = exports._mergeDicts = exports.isOpenAIToolCallArray = exports.BaseMessage = exports._mergeStatus = exports.mergeContent = void 0;
|
|
4
4
|
const serializable_js_1 = require("../load/serializable.cjs");
|
|
5
5
|
function mergeContent(firstContent, secondContent) {
|
|
6
6
|
// If first content is a string
|
|
@@ -181,6 +181,14 @@ class BaseMessage extends serializable_js_1.Serializable {
|
|
|
181
181
|
response_metadata: this.response_metadata,
|
|
182
182
|
};
|
|
183
183
|
}
|
|
184
|
+
// this private method is used to update the ID for the runtime
|
|
185
|
+
// value as well as in lc_kwargs for serialisation
|
|
186
|
+
_updateId(value) {
|
|
187
|
+
this.id = value;
|
|
188
|
+
// lc_attributes wouldn't work here, because jest compares the
|
|
189
|
+
// whole object
|
|
190
|
+
this.lc_kwargs.id = value;
|
|
191
|
+
}
|
|
184
192
|
get [Symbol.toStringTag]() {
|
|
185
193
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
186
194
|
return this.constructor.lc_name();
|
|
@@ -317,6 +325,10 @@ exports._mergeObj = _mergeObj;
|
|
|
317
325
|
class BaseMessageChunk extends BaseMessage {
|
|
318
326
|
}
|
|
319
327
|
exports.BaseMessageChunk = BaseMessageChunk;
|
|
328
|
+
function _isMessageFieldWithRole(x) {
|
|
329
|
+
return typeof x.role === "string";
|
|
330
|
+
}
|
|
331
|
+
exports._isMessageFieldWithRole = _isMessageFieldWithRole;
|
|
320
332
|
function isBaseMessage(messageLike) {
|
|
321
333
|
return typeof messageLike?._getType === "function";
|
|
322
334
|
}
|
package/dist/messages/base.d.ts
CHANGED
|
@@ -55,30 +55,12 @@ export interface FunctionCall {
|
|
|
55
55
|
*/
|
|
56
56
|
name: string;
|
|
57
57
|
}
|
|
58
|
-
/**
|
|
59
|
-
* @deprecated
|
|
60
|
-
* Import as "OpenAIToolCall" instead
|
|
61
|
-
*/
|
|
62
|
-
export interface ToolCall {
|
|
63
|
-
/**
|
|
64
|
-
* The ID of the tool call.
|
|
65
|
-
*/
|
|
66
|
-
id: string;
|
|
67
|
-
/**
|
|
68
|
-
* The function that the model called.
|
|
69
|
-
*/
|
|
70
|
-
function: FunctionCall;
|
|
71
|
-
/**
|
|
72
|
-
* The type of the tool. Currently, only `function` is supported.
|
|
73
|
-
*/
|
|
74
|
-
type: "function";
|
|
75
|
-
}
|
|
76
58
|
export type BaseMessageFields = {
|
|
77
59
|
content: MessageContent;
|
|
78
60
|
name?: string;
|
|
79
61
|
additional_kwargs?: {
|
|
80
62
|
function_call?: FunctionCall;
|
|
81
|
-
tool_calls?:
|
|
63
|
+
tool_calls?: OpenAIToolCall[];
|
|
82
64
|
[key: string]: unknown;
|
|
83
65
|
};
|
|
84
66
|
/** Response metadata. For example: response headers, logprobs, token counts. */
|
|
@@ -134,10 +116,23 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
|
|
|
134
116
|
toDict(): StoredMessage;
|
|
135
117
|
static lc_name(): string;
|
|
136
118
|
get _printableFields(): Record<string, unknown>;
|
|
119
|
+
_updateId(value: string | undefined): void;
|
|
137
120
|
get [Symbol.toStringTag](): any;
|
|
138
121
|
}
|
|
139
|
-
export type OpenAIToolCall =
|
|
140
|
-
|
|
122
|
+
export type OpenAIToolCall = {
|
|
123
|
+
/**
|
|
124
|
+
* The ID of the tool call.
|
|
125
|
+
*/
|
|
126
|
+
id: string;
|
|
127
|
+
/**
|
|
128
|
+
* The function that the model called.
|
|
129
|
+
*/
|
|
130
|
+
function: FunctionCall;
|
|
131
|
+
/**
|
|
132
|
+
* The type of the tool. Currently, only `function` is supported.
|
|
133
|
+
*/
|
|
134
|
+
type: "function";
|
|
135
|
+
index?: number;
|
|
141
136
|
};
|
|
142
137
|
export declare function isOpenAIToolCallArray(value?: unknown): value is OpenAIToolCall[];
|
|
143
138
|
export declare function _mergeDicts(left: Record<string, any>, right: Record<string, any>): Record<string, any>;
|
|
@@ -153,9 +148,15 @@ export declare function _mergeObj<T = any>(left: T | undefined, right: T | undef
|
|
|
153
148
|
export declare abstract class BaseMessageChunk extends BaseMessage {
|
|
154
149
|
abstract concat(chunk: BaseMessageChunk): BaseMessageChunk;
|
|
155
150
|
}
|
|
151
|
+
export type MessageFieldWithRole = {
|
|
152
|
+
role: StringWithAutocomplete<"user" | "assistant" | MessageType>;
|
|
153
|
+
content: MessageContent;
|
|
154
|
+
name?: string;
|
|
155
|
+
} & Record<string, unknown>;
|
|
156
|
+
export declare function _isMessageFieldWithRole(x: BaseMessageLike): x is MessageFieldWithRole;
|
|
156
157
|
export type BaseMessageLike = BaseMessage | ({
|
|
157
158
|
type: MessageType | "user" | "assistant" | "placeholder";
|
|
158
|
-
} & BaseMessageFields & Record<string, unknown>) | [
|
|
159
|
+
} & BaseMessageFields & Record<string, unknown>) | MessageFieldWithRole | [
|
|
159
160
|
StringWithAutocomplete<MessageType | "user" | "assistant" | "placeholder">,
|
|
160
161
|
MessageContent
|
|
161
162
|
] | string;
|
package/dist/messages/base.js
CHANGED
|
@@ -176,6 +176,14 @@ export class BaseMessage extends Serializable {
|
|
|
176
176
|
response_metadata: this.response_metadata,
|
|
177
177
|
};
|
|
178
178
|
}
|
|
179
|
+
// this private method is used to update the ID for the runtime
|
|
180
|
+
// value as well as in lc_kwargs for serialisation
|
|
181
|
+
_updateId(value) {
|
|
182
|
+
this.id = value;
|
|
183
|
+
// lc_attributes wouldn't work here, because jest compares the
|
|
184
|
+
// whole object
|
|
185
|
+
this.lc_kwargs.id = value;
|
|
186
|
+
}
|
|
179
187
|
get [Symbol.toStringTag]() {
|
|
180
188
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
181
189
|
return this.constructor.lc_name();
|
|
@@ -306,6 +314,9 @@ export function _mergeObj(left, right) {
|
|
|
306
314
|
*/
|
|
307
315
|
export class BaseMessageChunk extends BaseMessage {
|
|
308
316
|
}
|
|
317
|
+
export function _isMessageFieldWithRole(x) {
|
|
318
|
+
return typeof x.role === "string";
|
|
319
|
+
}
|
|
309
320
|
export function isBaseMessage(messageLike) {
|
|
310
321
|
return typeof messageLike?._getType === "function";
|
|
311
322
|
}
|
package/dist/messages/utils.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.convertToChunk = exports.mapChatMessagesToStoredMessages = exports.mapStoredMessagesToChatMessages = exports.mapStoredMessageToChatMessage = exports.getBufferString = exports.coerceMessageLikeToMessage = void 0;
|
|
4
|
+
const utils_js_1 = require("../tools/utils.cjs");
|
|
4
5
|
const ai_js_1 = require("./ai.cjs");
|
|
5
6
|
const base_js_1 = require("./base.cjs");
|
|
6
7
|
const chat_js_1 = require("./chat.cjs");
|
|
@@ -8,13 +9,43 @@ const function_js_1 = require("./function.cjs");
|
|
|
8
9
|
const human_js_1 = require("./human.cjs");
|
|
9
10
|
const system_js_1 = require("./system.cjs");
|
|
10
11
|
const tool_js_1 = require("./tool.cjs");
|
|
12
|
+
function _coerceToolCall(toolCall) {
|
|
13
|
+
if ((0, utils_js_1._isToolCall)(toolCall)) {
|
|
14
|
+
return toolCall;
|
|
15
|
+
}
|
|
16
|
+
else if (typeof toolCall.id === "string" &&
|
|
17
|
+
toolCall.type === "function" &&
|
|
18
|
+
typeof toolCall.function === "object" &&
|
|
19
|
+
toolCall.function !== null &&
|
|
20
|
+
"arguments" in toolCall.function &&
|
|
21
|
+
typeof toolCall.function.arguments === "string" &&
|
|
22
|
+
"name" in toolCall.function &&
|
|
23
|
+
typeof toolCall.function.name === "string") {
|
|
24
|
+
// Handle OpenAI tool call format
|
|
25
|
+
return {
|
|
26
|
+
id: toolCall.id,
|
|
27
|
+
args: JSON.parse(toolCall.function.arguments),
|
|
28
|
+
name: toolCall.function.name,
|
|
29
|
+
type: "tool_call",
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
// TODO: Throw an error?
|
|
34
|
+
return toolCall;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
11
37
|
function _constructMessageFromParams(params) {
|
|
12
38
|
const { type, ...rest } = params;
|
|
13
39
|
if (type === "human" || type === "user") {
|
|
14
40
|
return new human_js_1.HumanMessage(rest);
|
|
15
41
|
}
|
|
16
42
|
else if (type === "ai" || type === "assistant") {
|
|
17
|
-
|
|
43
|
+
const { tool_calls: rawToolCalls, ...other } = rest;
|
|
44
|
+
if (!Array.isArray(rawToolCalls)) {
|
|
45
|
+
return new ai_js_1.AIMessage(rest);
|
|
46
|
+
}
|
|
47
|
+
const tool_calls = rawToolCalls.map(_coerceToolCall);
|
|
48
|
+
return new ai_js_1.AIMessage({ ...other, tool_calls });
|
|
18
49
|
}
|
|
19
50
|
else if (type === "system") {
|
|
20
51
|
return new system_js_1.SystemMessage(rest);
|
|
@@ -42,6 +73,10 @@ function coerceMessageLikeToMessage(messageLike) {
|
|
|
42
73
|
const [type, content] = messageLike;
|
|
43
74
|
return _constructMessageFromParams({ type, content });
|
|
44
75
|
}
|
|
76
|
+
else if ((0, base_js_1._isMessageFieldWithRole)(messageLike)) {
|
|
77
|
+
const { role: type, ...rest } = messageLike;
|
|
78
|
+
return _constructMessageFromParams({ ...rest, type });
|
|
79
|
+
}
|
|
45
80
|
else {
|
|
46
81
|
return _constructMessageFromParams(messageLike);
|
|
47
82
|
}
|
package/dist/messages/utils.js
CHANGED
|
@@ -1,17 +1,48 @@
|
|
|
1
|
+
import { _isToolCall } from "../tools/utils.js";
|
|
1
2
|
import { AIMessage, AIMessageChunk } from "./ai.js";
|
|
2
|
-
import { isBaseMessage, } from "./base.js";
|
|
3
|
+
import { isBaseMessage, _isMessageFieldWithRole, } from "./base.js";
|
|
3
4
|
import { ChatMessage, ChatMessageChunk, } from "./chat.js";
|
|
4
5
|
import { FunctionMessage, FunctionMessageChunk, } from "./function.js";
|
|
5
6
|
import { HumanMessage, HumanMessageChunk } from "./human.js";
|
|
6
7
|
import { SystemMessage, SystemMessageChunk } from "./system.js";
|
|
7
|
-
import { ToolMessage } from "./tool.js";
|
|
8
|
+
import { ToolMessage, } from "./tool.js";
|
|
9
|
+
function _coerceToolCall(toolCall) {
|
|
10
|
+
if (_isToolCall(toolCall)) {
|
|
11
|
+
return toolCall;
|
|
12
|
+
}
|
|
13
|
+
else if (typeof toolCall.id === "string" &&
|
|
14
|
+
toolCall.type === "function" &&
|
|
15
|
+
typeof toolCall.function === "object" &&
|
|
16
|
+
toolCall.function !== null &&
|
|
17
|
+
"arguments" in toolCall.function &&
|
|
18
|
+
typeof toolCall.function.arguments === "string" &&
|
|
19
|
+
"name" in toolCall.function &&
|
|
20
|
+
typeof toolCall.function.name === "string") {
|
|
21
|
+
// Handle OpenAI tool call format
|
|
22
|
+
return {
|
|
23
|
+
id: toolCall.id,
|
|
24
|
+
args: JSON.parse(toolCall.function.arguments),
|
|
25
|
+
name: toolCall.function.name,
|
|
26
|
+
type: "tool_call",
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
// TODO: Throw an error?
|
|
31
|
+
return toolCall;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
8
34
|
function _constructMessageFromParams(params) {
|
|
9
35
|
const { type, ...rest } = params;
|
|
10
36
|
if (type === "human" || type === "user") {
|
|
11
37
|
return new HumanMessage(rest);
|
|
12
38
|
}
|
|
13
39
|
else if (type === "ai" || type === "assistant") {
|
|
14
|
-
|
|
40
|
+
const { tool_calls: rawToolCalls, ...other } = rest;
|
|
41
|
+
if (!Array.isArray(rawToolCalls)) {
|
|
42
|
+
return new AIMessage(rest);
|
|
43
|
+
}
|
|
44
|
+
const tool_calls = rawToolCalls.map(_coerceToolCall);
|
|
45
|
+
return new AIMessage({ ...other, tool_calls });
|
|
15
46
|
}
|
|
16
47
|
else if (type === "system") {
|
|
17
48
|
return new SystemMessage(rest);
|
|
@@ -39,6 +70,10 @@ export function coerceMessageLikeToMessage(messageLike) {
|
|
|
39
70
|
const [type, content] = messageLike;
|
|
40
71
|
return _constructMessageFromParams({ type, content });
|
|
41
72
|
}
|
|
73
|
+
else if (_isMessageFieldWithRole(messageLike)) {
|
|
74
|
+
const { role: type, ...rest } = messageLike;
|
|
75
|
+
return _constructMessageFromParams({ ...rest, type });
|
|
76
|
+
}
|
|
42
77
|
else {
|
|
43
78
|
return _constructMessageFromParams(messageLike);
|
|
44
79
|
}
|
package/dist/runnables/base.cjs
CHANGED
|
@@ -942,7 +942,7 @@ class RunnableEach extends Runnable {
|
|
|
942
942
|
* @returns A promise that resolves to the output of the runnable.
|
|
943
943
|
*/
|
|
944
944
|
async invoke(inputs, config) {
|
|
945
|
-
return this._callWithConfig(this._invoke, inputs, config);
|
|
945
|
+
return this._callWithConfig(this._invoke.bind(this), inputs, config);
|
|
946
946
|
}
|
|
947
947
|
/**
|
|
948
948
|
* A helper method that is used to invoke the runnable with the specified input and configuration.
|
|
@@ -1026,7 +1026,7 @@ class RunnableRetry extends RunnableBinding {
|
|
|
1026
1026
|
* @returns A promise that resolves to the output of the runnable.
|
|
1027
1027
|
*/
|
|
1028
1028
|
async invoke(input, config) {
|
|
1029
|
-
return this._callWithConfig(this._invoke, input, config);
|
|
1029
|
+
return this._callWithConfig(this._invoke.bind(this), input, config);
|
|
1030
1030
|
}
|
|
1031
1031
|
async _batch(inputs, configs, runManagers, batchOptions) {
|
|
1032
1032
|
const resultsMap = {};
|
|
@@ -1590,7 +1590,7 @@ class RunnableLambda extends Runnable {
|
|
|
1590
1590
|
});
|
|
1591
1591
|
}
|
|
1592
1592
|
async invoke(input, options) {
|
|
1593
|
-
return this._callWithConfig(this._invoke, input, options);
|
|
1593
|
+
return this._callWithConfig(this._invoke.bind(this), input, options);
|
|
1594
1594
|
}
|
|
1595
1595
|
async *_transform(generator, runManager, config) {
|
|
1596
1596
|
let finalChunk;
|
package/dist/runnables/base.d.ts
CHANGED
|
@@ -11,13 +11,7 @@ import { Run } from "../tracers/base.js";
|
|
|
11
11
|
import { Graph } from "./graph.js";
|
|
12
12
|
import { ToolCall } from "../messages/tool.js";
|
|
13
13
|
export { type RunnableInterface, RunnableBatchOptions };
|
|
14
|
-
export type RunnableFunc<RunInput, RunOutput> = (input: RunInput, options
|
|
15
|
-
/** @deprecated Use top-level config fields instead. */
|
|
16
|
-
config?: RunnableConfig;
|
|
17
|
-
} & RunnableConfig) | Record<string, any> | (Record<string, any> & {
|
|
18
|
-
/** @deprecated Use top-level config fields instead. */
|
|
19
|
-
config: RunnableConfig;
|
|
20
|
-
} & RunnableConfig)) => RunOutput | Promise<RunOutput>;
|
|
14
|
+
export type RunnableFunc<RunInput, RunOutput> = (input: RunInput, options: RunnableConfig | Record<string, any> | (Record<string, any> & RunnableConfig)) => RunOutput | Promise<RunOutput>;
|
|
21
15
|
export type RunnableMapLike<RunInput, RunOutput> = {
|
|
22
16
|
[K in keyof RunOutput]: RunnableLike<RunInput, RunOutput[K]>;
|
|
23
17
|
};
|
|
@@ -351,7 +345,7 @@ export declare class RunnableBinding<RunInput, RunOutput, CallOptions extends Ru
|
|
|
351
345
|
batch(inputs: RunInput[], options?: Partial<CallOptions> | Partial<CallOptions>[], batchOptions?: RunnableBatchOptions): Promise<(RunOutput | Error)[]>;
|
|
352
346
|
_streamIterator(input: RunInput, options?: Partial<CallOptions> | undefined): AsyncGenerator<Awaited<RunOutput>, void, unknown>;
|
|
353
347
|
stream(input: RunInput, options?: Partial<CallOptions> | undefined): Promise<IterableReadableStream<RunOutput>>;
|
|
354
|
-
transform(generator: AsyncGenerator<RunInput>, options
|
|
348
|
+
transform(generator: AsyncGenerator<RunInput>, options?: Partial<CallOptions>): AsyncGenerator<RunOutput>;
|
|
355
349
|
streamEvents(input: RunInput, options: Partial<CallOptions> & {
|
|
356
350
|
version: "v1" | "v2";
|
|
357
351
|
}, streamOptions?: Omit<LogStreamCallbackHandlerInput, "autoClose">): IterableReadableStream<StreamEvent>;
|
package/dist/runnables/base.js
CHANGED
|
@@ -933,7 +933,7 @@ export class RunnableEach extends Runnable {
|
|
|
933
933
|
* @returns A promise that resolves to the output of the runnable.
|
|
934
934
|
*/
|
|
935
935
|
async invoke(inputs, config) {
|
|
936
|
-
return this._callWithConfig(this._invoke, inputs, config);
|
|
936
|
+
return this._callWithConfig(this._invoke.bind(this), inputs, config);
|
|
937
937
|
}
|
|
938
938
|
/**
|
|
939
939
|
* A helper method that is used to invoke the runnable with the specified input and configuration.
|
|
@@ -1016,7 +1016,7 @@ export class RunnableRetry extends RunnableBinding {
|
|
|
1016
1016
|
* @returns A promise that resolves to the output of the runnable.
|
|
1017
1017
|
*/
|
|
1018
1018
|
async invoke(input, config) {
|
|
1019
|
-
return this._callWithConfig(this._invoke, input, config);
|
|
1019
|
+
return this._callWithConfig(this._invoke.bind(this), input, config);
|
|
1020
1020
|
}
|
|
1021
1021
|
async _batch(inputs, configs, runManagers, batchOptions) {
|
|
1022
1022
|
const resultsMap = {};
|
|
@@ -1576,7 +1576,7 @@ export class RunnableLambda extends Runnable {
|
|
|
1576
1576
|
});
|
|
1577
1577
|
}
|
|
1578
1578
|
async invoke(input, options) {
|
|
1579
|
-
return this._callWithConfig(this._invoke, input, options);
|
|
1579
|
+
return this._callWithConfig(this._invoke.bind(this), input, options);
|
|
1580
1580
|
}
|
|
1581
1581
|
async *_transform(generator, runManager, config) {
|
|
1582
1582
|
let finalChunk;
|
|
@@ -279,7 +279,7 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
279
279
|
if (runInfo.runType === "chat_model") {
|
|
280
280
|
eventName = "on_chat_model_stream";
|
|
281
281
|
if (kwargs?.chunk === undefined) {
|
|
282
|
-
chunk = new ai_js_1.AIMessageChunk({ content: token });
|
|
282
|
+
chunk = new ai_js_1.AIMessageChunk({ content: token, id: `run-${run.id}` });
|
|
283
283
|
}
|
|
284
284
|
else {
|
|
285
285
|
chunk = kwargs.chunk.message;
|
|
@@ -275,7 +275,7 @@ export class EventStreamCallbackHandler extends BaseTracer {
|
|
|
275
275
|
if (runInfo.runType === "chat_model") {
|
|
276
276
|
eventName = "on_chat_model_stream";
|
|
277
277
|
if (kwargs?.chunk === undefined) {
|
|
278
|
-
chunk = new AIMessageChunk({ content: token });
|
|
278
|
+
chunk = new AIMessageChunk({ content: token, id: `run-${run.id}` });
|
|
279
279
|
}
|
|
280
280
|
else {
|
|
281
281
|
chunk = kwargs.chunk.message;
|
|
@@ -401,7 +401,10 @@ class LogStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
401
401
|
streamedOutputValue = kwargs?.chunk;
|
|
402
402
|
}
|
|
403
403
|
else {
|
|
404
|
-
streamedOutputValue = new ai_js_1.AIMessageChunk(
|
|
404
|
+
streamedOutputValue = new ai_js_1.AIMessageChunk({
|
|
405
|
+
id: `run-${run.id}`,
|
|
406
|
+
content: token,
|
|
407
|
+
});
|
|
405
408
|
}
|
|
406
409
|
}
|
|
407
410
|
else {
|
|
@@ -395,7 +395,10 @@ export class LogStreamCallbackHandler extends BaseTracer {
|
|
|
395
395
|
streamedOutputValue = kwargs?.chunk;
|
|
396
396
|
}
|
|
397
397
|
else {
|
|
398
|
-
streamedOutputValue = new AIMessageChunk(
|
|
398
|
+
streamedOutputValue = new AIMessageChunk({
|
|
399
|
+
id: `run-${run.id}`,
|
|
400
|
+
content: token,
|
|
401
|
+
});
|
|
399
402
|
}
|
|
400
403
|
}
|
|
401
404
|
else {
|
|
@@ -61,12 +61,7 @@ class RootListenersTracer extends base_js_1.BaseTracer {
|
|
|
61
61
|
}
|
|
62
62
|
this.rootId = run.id;
|
|
63
63
|
if (this.argOnStart) {
|
|
64
|
-
|
|
65
|
-
await this.argOnStart(run);
|
|
66
|
-
}
|
|
67
|
-
else if (this.argOnStart.length === 2) {
|
|
68
|
-
await this.argOnStart(run, this.config);
|
|
69
|
-
}
|
|
64
|
+
await this.argOnStart(run, this.config);
|
|
70
65
|
}
|
|
71
66
|
}
|
|
72
67
|
async onRunUpdate(run) {
|
|
@@ -75,21 +70,11 @@ class RootListenersTracer extends base_js_1.BaseTracer {
|
|
|
75
70
|
}
|
|
76
71
|
if (!run.error) {
|
|
77
72
|
if (this.argOnEnd) {
|
|
78
|
-
|
|
79
|
-
await this.argOnEnd(run);
|
|
80
|
-
}
|
|
81
|
-
else if (this.argOnEnd.length === 2) {
|
|
82
|
-
await this.argOnEnd(run, this.config);
|
|
83
|
-
}
|
|
73
|
+
await this.argOnEnd(run, this.config);
|
|
84
74
|
}
|
|
85
75
|
}
|
|
86
76
|
else if (this.argOnError) {
|
|
87
|
-
|
|
88
|
-
await this.argOnError(run);
|
|
89
|
-
}
|
|
90
|
-
else if (this.argOnError.length === 2) {
|
|
91
|
-
await this.argOnError(run, this.config);
|
|
92
|
-
}
|
|
77
|
+
await this.argOnError(run, this.config);
|
|
93
78
|
}
|
|
94
79
|
}
|
|
95
80
|
}
|
|
@@ -5,23 +5,14 @@ export declare class RootListenersTracer extends BaseTracer {
|
|
|
5
5
|
/** The Run's ID. Type UUID */
|
|
6
6
|
rootId?: string;
|
|
7
7
|
config: RunnableConfig;
|
|
8
|
-
argOnStart?:
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
};
|
|
12
|
-
argOnEnd?: {
|
|
13
|
-
(run: Run): void | Promise<void>;
|
|
14
|
-
(run: Run, config: RunnableConfig): void | Promise<void>;
|
|
15
|
-
};
|
|
16
|
-
argOnError?: {
|
|
17
|
-
(run: Run): void | Promise<void>;
|
|
18
|
-
(run: Run, config: RunnableConfig): void | Promise<void>;
|
|
19
|
-
};
|
|
8
|
+
argOnStart?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
9
|
+
argOnEnd?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
10
|
+
argOnError?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
20
11
|
constructor({ config, onStart, onEnd, onError, }: {
|
|
21
12
|
config: RunnableConfig;
|
|
22
|
-
onStart?: (run: Run, config
|
|
23
|
-
onEnd?: (run: Run, config
|
|
24
|
-
onError?: (run: Run, config
|
|
13
|
+
onStart?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
14
|
+
onEnd?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
15
|
+
onError?: (run: Run, config: RunnableConfig) => void | Promise<void>;
|
|
25
16
|
});
|
|
26
17
|
/**
|
|
27
18
|
* This is a legacy method only called once for an entire run tree
|
|
@@ -58,12 +58,7 @@ export class RootListenersTracer extends BaseTracer {
|
|
|
58
58
|
}
|
|
59
59
|
this.rootId = run.id;
|
|
60
60
|
if (this.argOnStart) {
|
|
61
|
-
|
|
62
|
-
await this.argOnStart(run);
|
|
63
|
-
}
|
|
64
|
-
else if (this.argOnStart.length === 2) {
|
|
65
|
-
await this.argOnStart(run, this.config);
|
|
66
|
-
}
|
|
61
|
+
await this.argOnStart(run, this.config);
|
|
67
62
|
}
|
|
68
63
|
}
|
|
69
64
|
async onRunUpdate(run) {
|
|
@@ -72,21 +67,11 @@ export class RootListenersTracer extends BaseTracer {
|
|
|
72
67
|
}
|
|
73
68
|
if (!run.error) {
|
|
74
69
|
if (this.argOnEnd) {
|
|
75
|
-
|
|
76
|
-
await this.argOnEnd(run);
|
|
77
|
-
}
|
|
78
|
-
else if (this.argOnEnd.length === 2) {
|
|
79
|
-
await this.argOnEnd(run, this.config);
|
|
80
|
-
}
|
|
70
|
+
await this.argOnEnd(run, this.config);
|
|
81
71
|
}
|
|
82
72
|
}
|
|
83
73
|
else if (this.argOnError) {
|
|
84
|
-
|
|
85
|
-
await this.argOnError(run);
|
|
86
|
-
}
|
|
87
|
-
else if (this.argOnError.length === 2) {
|
|
88
|
-
await this.argOnError(run, this.config);
|
|
89
|
-
}
|
|
74
|
+
await this.argOnError(run, this.config);
|
|
90
75
|
}
|
|
91
76
|
}
|
|
92
77
|
}
|