@langchain/core 0.3.28 → 0.3.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -41,6 +41,12 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
41
41
|
writable: true,
|
|
42
42
|
value: ["langchain", "chat_models", this._llmType()]
|
|
43
43
|
});
|
|
44
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: false
|
|
49
|
+
});
|
|
44
50
|
}
|
|
45
51
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
46
52
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -68,7 +74,8 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
68
74
|
async *_streamIterator(input, options) {
|
|
69
75
|
// Subclass check required to avoid double callbacks with default implementation
|
|
70
76
|
if (this._streamResponseChunks ===
|
|
71
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
77
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
78
|
+
this.disableStreaming) {
|
|
72
79
|
yield this.invoke(input, options);
|
|
73
80
|
}
|
|
74
81
|
else {
|
|
@@ -26,7 +26,18 @@ export type SerializedLLM = {
|
|
|
26
26
|
/**
|
|
27
27
|
* Represents the parameters for a base chat model.
|
|
28
28
|
*/
|
|
29
|
-
export type BaseChatModelParams = BaseLanguageModelParams
|
|
29
|
+
export type BaseChatModelParams = BaseLanguageModelParams & {
|
|
30
|
+
/**
|
|
31
|
+
* Whether to disable streaming.
|
|
32
|
+
*
|
|
33
|
+
* If streaming is bypassed, then `stream()` will defer to
|
|
34
|
+
* `invoke()`.
|
|
35
|
+
*
|
|
36
|
+
* - If true, will always bypass streaming case.
|
|
37
|
+
* - If false (default), will always use streaming case if available.
|
|
38
|
+
*/
|
|
39
|
+
disableStreaming?: boolean;
|
|
40
|
+
};
|
|
30
41
|
/**
|
|
31
42
|
* Represents the call options for a base chat model.
|
|
32
43
|
*/
|
|
@@ -69,6 +80,7 @@ export type BindToolsInput = StructuredToolInterface | Record<string, any> | Too
|
|
|
69
80
|
export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, OutputMessageType extends BaseMessageChunk = AIMessageChunk> extends BaseLanguageModel<OutputMessageType, CallOptions> {
|
|
70
81
|
ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
|
|
71
82
|
lc_namespace: string[];
|
|
83
|
+
disableStreaming: boolean;
|
|
72
84
|
constructor(fields: BaseChatModelParams);
|
|
73
85
|
_combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"];
|
|
74
86
|
protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
|
|
@@ -37,6 +37,12 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
37
37
|
writable: true,
|
|
38
38
|
value: ["langchain", "chat_models", this._llmType()]
|
|
39
39
|
});
|
|
40
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
41
|
+
enumerable: true,
|
|
42
|
+
configurable: true,
|
|
43
|
+
writable: true,
|
|
44
|
+
value: false
|
|
45
|
+
});
|
|
40
46
|
}
|
|
41
47
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
42
48
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -64,7 +70,8 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
64
70
|
async *_streamIterator(input, options) {
|
|
65
71
|
// Subclass check required to avoid double callbacks with default implementation
|
|
66
72
|
if (this._streamResponseChunks ===
|
|
67
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
73
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
74
|
+
this.disableStreaming) {
|
|
68
75
|
yield this.invoke(input, options);
|
|
69
76
|
}
|
|
70
77
|
else {
|