@langchain/core 0.3.28 → 0.3.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -35,16 +35,10 @@ const config_js_1 = require("../../runnables/config.cjs");
|
|
|
35
35
|
async function dispatchCustomEvent(name,
|
|
36
36
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
37
37
|
payload, config) {
|
|
38
|
-
|
|
39
|
-
const parentRunId = callbackManager?.getParentRunId();
|
|
40
|
-
// We want to get the callback manager for the parent run.
|
|
41
|
-
// This is a work-around for now to be able to dispatch adhoc events from
|
|
42
|
-
// within a tool or a lambda and have the metadata events associated
|
|
43
|
-
// with the parent run rather than have a new run id generated for each.
|
|
44
|
-
if (callbackManager === undefined || parentRunId === undefined) {
|
|
38
|
+
if (config === undefined) {
|
|
45
39
|
throw new Error([
|
|
46
40
|
"Unable to dispatch a custom event without a parent run id.",
|
|
47
|
-
"
|
|
41
|
+
`"dispatchCustomEvent" can only be called from within an existing run (e.g.,`,
|
|
48
42
|
"inside a tool or a RunnableLambda).",
|
|
49
43
|
`\n\nIf you continue to see this error, please import from "@langchain/core/callbacks/dispatch/web"`,
|
|
50
44
|
"and explicitly pass in a config parameter.",
|
|
@@ -54,8 +48,12 @@ payload, config) {
|
|
|
54
48
|
"\n",
|
|
55
49
|
].join(" "));
|
|
56
50
|
}
|
|
51
|
+
const callbackManager = await (0, config_js_1.getCallbackManagerForConfig)(config);
|
|
52
|
+
const parentRunId = callbackManager?.getParentRunId();
|
|
57
53
|
// We pass parent id as the current run id here intentionally since events dispatch
|
|
58
54
|
// from within things like RunnableLambda
|
|
59
|
-
|
|
55
|
+
if (callbackManager !== undefined && parentRunId !== undefined) {
|
|
56
|
+
await callbackManager.handleCustomEvent?.(name, payload, parentRunId);
|
|
57
|
+
}
|
|
60
58
|
}
|
|
61
59
|
exports.dispatchCustomEvent = dispatchCustomEvent;
|
|
@@ -32,16 +32,10 @@ import { getCallbackManagerForConfig, } from "../../runnables/config.js";
|
|
|
32
32
|
export async function dispatchCustomEvent(name,
|
|
33
33
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
34
34
|
payload, config) {
|
|
35
|
-
|
|
36
|
-
const parentRunId = callbackManager?.getParentRunId();
|
|
37
|
-
// We want to get the callback manager for the parent run.
|
|
38
|
-
// This is a work-around for now to be able to dispatch adhoc events from
|
|
39
|
-
// within a tool or a lambda and have the metadata events associated
|
|
40
|
-
// with the parent run rather than have a new run id generated for each.
|
|
41
|
-
if (callbackManager === undefined || parentRunId === undefined) {
|
|
35
|
+
if (config === undefined) {
|
|
42
36
|
throw new Error([
|
|
43
37
|
"Unable to dispatch a custom event without a parent run id.",
|
|
44
|
-
"
|
|
38
|
+
`"dispatchCustomEvent" can only be called from within an existing run (e.g.,`,
|
|
45
39
|
"inside a tool or a RunnableLambda).",
|
|
46
40
|
`\n\nIf you continue to see this error, please import from "@langchain/core/callbacks/dispatch/web"`,
|
|
47
41
|
"and explicitly pass in a config parameter.",
|
|
@@ -51,7 +45,11 @@ payload, config) {
|
|
|
51
45
|
"\n",
|
|
52
46
|
].join(" "));
|
|
53
47
|
}
|
|
48
|
+
const callbackManager = await getCallbackManagerForConfig(config);
|
|
49
|
+
const parentRunId = callbackManager?.getParentRunId();
|
|
54
50
|
// We pass parent id as the current run id here intentionally since events dispatch
|
|
55
51
|
// from within things like RunnableLambda
|
|
56
|
-
|
|
52
|
+
if (callbackManager !== undefined && parentRunId !== undefined) {
|
|
53
|
+
await callbackManager.handleCustomEvent?.(name, payload, parentRunId);
|
|
54
|
+
}
|
|
57
55
|
}
|
|
@@ -41,6 +41,12 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
41
41
|
writable: true,
|
|
42
42
|
value: ["langchain", "chat_models", this._llmType()]
|
|
43
43
|
});
|
|
44
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: false
|
|
49
|
+
});
|
|
44
50
|
}
|
|
45
51
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
46
52
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -68,7 +74,8 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
68
74
|
async *_streamIterator(input, options) {
|
|
69
75
|
// Subclass check required to avoid double callbacks with default implementation
|
|
70
76
|
if (this._streamResponseChunks ===
|
|
71
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
77
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
78
|
+
this.disableStreaming) {
|
|
72
79
|
yield this.invoke(input, options);
|
|
73
80
|
}
|
|
74
81
|
else {
|
|
@@ -26,7 +26,18 @@ export type SerializedLLM = {
|
|
|
26
26
|
/**
|
|
27
27
|
* Represents the parameters for a base chat model.
|
|
28
28
|
*/
|
|
29
|
-
export type BaseChatModelParams = BaseLanguageModelParams
|
|
29
|
+
export type BaseChatModelParams = BaseLanguageModelParams & {
|
|
30
|
+
/**
|
|
31
|
+
* Whether to disable streaming.
|
|
32
|
+
*
|
|
33
|
+
* If streaming is bypassed, then `stream()` will defer to
|
|
34
|
+
* `invoke()`.
|
|
35
|
+
*
|
|
36
|
+
* - If true, will always bypass streaming case.
|
|
37
|
+
* - If false (default), will always use streaming case if available.
|
|
38
|
+
*/
|
|
39
|
+
disableStreaming?: boolean;
|
|
40
|
+
};
|
|
30
41
|
/**
|
|
31
42
|
* Represents the call options for a base chat model.
|
|
32
43
|
*/
|
|
@@ -69,6 +80,7 @@ export type BindToolsInput = StructuredToolInterface | Record<string, any> | Too
|
|
|
69
80
|
export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, OutputMessageType extends BaseMessageChunk = AIMessageChunk> extends BaseLanguageModel<OutputMessageType, CallOptions> {
|
|
70
81
|
ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
|
|
71
82
|
lc_namespace: string[];
|
|
83
|
+
disableStreaming: boolean;
|
|
72
84
|
constructor(fields: BaseChatModelParams);
|
|
73
85
|
_combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"];
|
|
74
86
|
protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
|
|
@@ -37,6 +37,12 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
37
37
|
writable: true,
|
|
38
38
|
value: ["langchain", "chat_models", this._llmType()]
|
|
39
39
|
});
|
|
40
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
41
|
+
enumerable: true,
|
|
42
|
+
configurable: true,
|
|
43
|
+
writable: true,
|
|
44
|
+
value: false
|
|
45
|
+
});
|
|
40
46
|
}
|
|
41
47
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
42
48
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -64,7 +70,8 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
64
70
|
async *_streamIterator(input, options) {
|
|
65
71
|
// Subclass check required to avoid double callbacks with default implementation
|
|
66
72
|
if (this._streamResponseChunks ===
|
|
67
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
73
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
74
|
+
this.disableStreaming) {
|
|
68
75
|
yield this.invoke(input, options);
|
|
69
76
|
}
|
|
70
77
|
else {
|