@langchain/core 0.3.27 → 0.3.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -41,6 +41,12 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
41
41
|
writable: true,
|
|
42
42
|
value: ["langchain", "chat_models", this._llmType()]
|
|
43
43
|
});
|
|
44
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: false
|
|
49
|
+
});
|
|
44
50
|
}
|
|
45
51
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
46
52
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -68,7 +74,8 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
68
74
|
async *_streamIterator(input, options) {
|
|
69
75
|
// Subclass check required to avoid double callbacks with default implementation
|
|
70
76
|
if (this._streamResponseChunks ===
|
|
71
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
77
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
78
|
+
this.disableStreaming) {
|
|
72
79
|
yield this.invoke(input, options);
|
|
73
80
|
}
|
|
74
81
|
else {
|
|
@@ -26,7 +26,18 @@ export type SerializedLLM = {
|
|
|
26
26
|
/**
|
|
27
27
|
* Represents the parameters for a base chat model.
|
|
28
28
|
*/
|
|
29
|
-
export type BaseChatModelParams = BaseLanguageModelParams
|
|
29
|
+
export type BaseChatModelParams = BaseLanguageModelParams & {
|
|
30
|
+
/**
|
|
31
|
+
* Whether to disable streaming.
|
|
32
|
+
*
|
|
33
|
+
* If streaming is bypassed, then `stream()` will defer to
|
|
34
|
+
* `invoke()`.
|
|
35
|
+
*
|
|
36
|
+
* - If true, will always bypass streaming case.
|
|
37
|
+
* - If false (default), will always use streaming case if available.
|
|
38
|
+
*/
|
|
39
|
+
disableStreaming?: boolean;
|
|
40
|
+
};
|
|
30
41
|
/**
|
|
31
42
|
* Represents the call options for a base chat model.
|
|
32
43
|
*/
|
|
@@ -69,6 +80,7 @@ export type BindToolsInput = StructuredToolInterface | Record<string, any> | Too
|
|
|
69
80
|
export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, OutputMessageType extends BaseMessageChunk = AIMessageChunk> extends BaseLanguageModel<OutputMessageType, CallOptions> {
|
|
70
81
|
ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
|
|
71
82
|
lc_namespace: string[];
|
|
83
|
+
disableStreaming: boolean;
|
|
72
84
|
constructor(fields: BaseChatModelParams);
|
|
73
85
|
_combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"];
|
|
74
86
|
protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
|
|
@@ -37,6 +37,12 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
37
37
|
writable: true,
|
|
38
38
|
value: ["langchain", "chat_models", this._llmType()]
|
|
39
39
|
});
|
|
40
|
+
Object.defineProperty(this, "disableStreaming", {
|
|
41
|
+
enumerable: true,
|
|
42
|
+
configurable: true,
|
|
43
|
+
writable: true,
|
|
44
|
+
value: false
|
|
45
|
+
});
|
|
40
46
|
}
|
|
41
47
|
_separateRunnableConfigFromCallOptionsCompat(options) {
|
|
42
48
|
// For backwards compat, keep `signal` in both runnableConfig and callOptions
|
|
@@ -64,7 +70,8 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
64
70
|
async *_streamIterator(input, options) {
|
|
65
71
|
// Subclass check required to avoid double callbacks with default implementation
|
|
66
72
|
if (this._streamResponseChunks ===
|
|
67
|
-
BaseChatModel.prototype._streamResponseChunks
|
|
73
|
+
BaseChatModel.prototype._streamResponseChunks ||
|
|
74
|
+
this.disableStreaming) {
|
|
68
75
|
yield this.invoke(input, options);
|
|
69
76
|
}
|
|
70
77
|
else {
|
package/dist/prompts/base.cjs
CHANGED
|
@@ -46,6 +46,22 @@ class BasePromptTemplate extends base_js_1.Runnable {
|
|
|
46
46
|
writable: true,
|
|
47
47
|
value: void 0
|
|
48
48
|
});
|
|
49
|
+
/**
|
|
50
|
+
* Metadata to be used for tracing.
|
|
51
|
+
*/
|
|
52
|
+
Object.defineProperty(this, "metadata", {
|
|
53
|
+
enumerable: true,
|
|
54
|
+
configurable: true,
|
|
55
|
+
writable: true,
|
|
56
|
+
value: void 0
|
|
57
|
+
});
|
|
58
|
+
/** Tags to be used for tracing. */
|
|
59
|
+
Object.defineProperty(this, "tags", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: void 0
|
|
64
|
+
});
|
|
49
65
|
const { inputVariables } = input;
|
|
50
66
|
if (inputVariables.includes("stop")) {
|
|
51
67
|
throw new Error("Cannot have an input variable named 'stop', as it is used internally, please rename.");
|
|
@@ -81,7 +97,12 @@ class BasePromptTemplate extends base_js_1.Runnable {
|
|
|
81
97
|
* @returns A Promise that resolves to the output of the prompt template.
|
|
82
98
|
*/
|
|
83
99
|
async invoke(input, options) {
|
|
84
|
-
|
|
100
|
+
const metadata = {
|
|
101
|
+
...this.metadata,
|
|
102
|
+
...options?.metadata,
|
|
103
|
+
};
|
|
104
|
+
const tags = [...(this.tags ?? []), ...(options?.tags ?? [])];
|
|
105
|
+
return this._callWithConfig((input) => this.formatPromptValue(input), input, { ...options, tags, metadata, runType: "prompt" });
|
|
85
106
|
}
|
|
86
107
|
/**
|
|
87
108
|
* Return a json-like object representing this prompt template.
|
package/dist/prompts/base.d.ts
CHANGED
|
@@ -34,6 +34,12 @@ export declare abstract class BasePromptTemplate<RunInput extends InputValues =
|
|
|
34
34
|
inputVariables: Array<Extract<keyof RunInput, string>>;
|
|
35
35
|
outputParser?: BaseOutputParser;
|
|
36
36
|
partialVariables: PartialValues<PartialVariableName>;
|
|
37
|
+
/**
|
|
38
|
+
* Metadata to be used for tracing.
|
|
39
|
+
*/
|
|
40
|
+
metadata?: Record<string, unknown>;
|
|
41
|
+
/** Tags to be used for tracing. */
|
|
42
|
+
tags?: string[];
|
|
37
43
|
constructor(input: BasePromptTemplateInput);
|
|
38
44
|
abstract partial(values: PartialValues): Promise<BasePromptTemplate<RunInput, RunOutput, PartialVariableName>>;
|
|
39
45
|
/**
|
package/dist/prompts/base.js
CHANGED
|
@@ -43,6 +43,22 @@ export class BasePromptTemplate extends Runnable {
|
|
|
43
43
|
writable: true,
|
|
44
44
|
value: void 0
|
|
45
45
|
});
|
|
46
|
+
/**
|
|
47
|
+
* Metadata to be used for tracing.
|
|
48
|
+
*/
|
|
49
|
+
Object.defineProperty(this, "metadata", {
|
|
50
|
+
enumerable: true,
|
|
51
|
+
configurable: true,
|
|
52
|
+
writable: true,
|
|
53
|
+
value: void 0
|
|
54
|
+
});
|
|
55
|
+
/** Tags to be used for tracing. */
|
|
56
|
+
Object.defineProperty(this, "tags", {
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
writable: true,
|
|
60
|
+
value: void 0
|
|
61
|
+
});
|
|
46
62
|
const { inputVariables } = input;
|
|
47
63
|
if (inputVariables.includes("stop")) {
|
|
48
64
|
throw new Error("Cannot have an input variable named 'stop', as it is used internally, please rename.");
|
|
@@ -78,7 +94,12 @@ export class BasePromptTemplate extends Runnable {
|
|
|
78
94
|
* @returns A Promise that resolves to the output of the prompt template.
|
|
79
95
|
*/
|
|
80
96
|
async invoke(input, options) {
|
|
81
|
-
|
|
97
|
+
const metadata = {
|
|
98
|
+
...this.metadata,
|
|
99
|
+
...options?.metadata,
|
|
100
|
+
};
|
|
101
|
+
const tags = [...(this.tags ?? []), ...(options?.tags ?? [])];
|
|
102
|
+
return this._callWithConfig((input) => this.formatPromptValue(input), input, { ...options, tags, metadata, runType: "prompt" });
|
|
82
103
|
}
|
|
83
104
|
/**
|
|
84
105
|
* Return a json-like object representing this prompt template.
|