@langchain/core 0.1.54 → 0.1.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/runnables/base.cjs
CHANGED
|
@@ -1107,7 +1107,8 @@ class RunnableSequence extends Runnable {
|
|
|
1107
1107
|
}
|
|
1108
1108
|
async *_streamIterator(input, options) {
|
|
1109
1109
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(options);
|
|
1110
|
-
const
|
|
1110
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1111
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1111
1112
|
const steps = [this.first, ...this.middle, this.last];
|
|
1112
1113
|
let concatSupported = true;
|
|
1113
1114
|
let finalOutput;
|
|
@@ -1115,12 +1116,12 @@ class RunnableSequence extends Runnable {
|
|
|
1115
1116
|
yield input;
|
|
1116
1117
|
}
|
|
1117
1118
|
try {
|
|
1118
|
-
let finalGenerator = steps[0].transform(inputGenerator(), (0, config_js_1.patchConfig)(
|
|
1119
|
+
let finalGenerator = steps[0].transform(inputGenerator(), (0, config_js_1.patchConfig)(otherOptions, {
|
|
1119
1120
|
callbacks: runManager?.getChild(`seq:step:1`),
|
|
1120
1121
|
}));
|
|
1121
1122
|
for (let i = 1; i < steps.length; i += 1) {
|
|
1122
1123
|
const step = steps[i];
|
|
1123
|
-
finalGenerator = await step.transform(finalGenerator, (0, config_js_1.patchConfig)(
|
|
1124
|
+
finalGenerator = await step.transform(finalGenerator, (0, config_js_1.patchConfig)(otherOptions, {
|
|
1124
1125
|
callbacks: runManager?.getChild(`seq:step:${i + 1}`),
|
|
1125
1126
|
}));
|
|
1126
1127
|
}
|
|
@@ -1488,13 +1489,12 @@ class RunnableWithFallbacks extends Runnable {
|
|
|
1488
1489
|
}
|
|
1489
1490
|
async invoke(input, options) {
|
|
1490
1491
|
const callbackManager_ = await manager_js_1.CallbackManager.configure(options?.callbacks, undefined, options?.tags, undefined, options?.metadata);
|
|
1491
|
-
const
|
|
1492
|
-
|
|
1493
|
-
delete options?.runId;
|
|
1492
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1493
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1494
1494
|
let firstError;
|
|
1495
1495
|
for (const runnable of this.runnables()) {
|
|
1496
1496
|
try {
|
|
1497
|
-
const output = await runnable.invoke(input, (0, config_js_1.patchConfig)(
|
|
1497
|
+
const output = await runnable.invoke(input, (0, config_js_1.patchConfig)(otherOptions, { callbacks: runManager?.getChild() }));
|
|
1498
1498
|
await runManager?.handleChainEnd(_coerceToDict(output, "output"));
|
|
1499
1499
|
return output;
|
|
1500
1500
|
}
|
package/dist/runnables/base.js
CHANGED
|
@@ -1096,7 +1096,8 @@ export class RunnableSequence extends Runnable {
|
|
|
1096
1096
|
}
|
|
1097
1097
|
async *_streamIterator(input, options) {
|
|
1098
1098
|
const callbackManager_ = await getCallbackManagerForConfig(options);
|
|
1099
|
-
const
|
|
1099
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1100
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1100
1101
|
const steps = [this.first, ...this.middle, this.last];
|
|
1101
1102
|
let concatSupported = true;
|
|
1102
1103
|
let finalOutput;
|
|
@@ -1104,12 +1105,12 @@ export class RunnableSequence extends Runnable {
|
|
|
1104
1105
|
yield input;
|
|
1105
1106
|
}
|
|
1106
1107
|
try {
|
|
1107
|
-
let finalGenerator = steps[0].transform(inputGenerator(), patchConfig(
|
|
1108
|
+
let finalGenerator = steps[0].transform(inputGenerator(), patchConfig(otherOptions, {
|
|
1108
1109
|
callbacks: runManager?.getChild(`seq:step:1`),
|
|
1109
1110
|
}));
|
|
1110
1111
|
for (let i = 1; i < steps.length; i += 1) {
|
|
1111
1112
|
const step = steps[i];
|
|
1112
|
-
finalGenerator = await step.transform(finalGenerator, patchConfig(
|
|
1113
|
+
finalGenerator = await step.transform(finalGenerator, patchConfig(otherOptions, {
|
|
1113
1114
|
callbacks: runManager?.getChild(`seq:step:${i + 1}`),
|
|
1114
1115
|
}));
|
|
1115
1116
|
}
|
|
@@ -1473,13 +1474,12 @@ export class RunnableWithFallbacks extends Runnable {
|
|
|
1473
1474
|
}
|
|
1474
1475
|
async invoke(input, options) {
|
|
1475
1476
|
const callbackManager_ = await CallbackManager.configure(options?.callbacks, undefined, options?.tags, undefined, options?.metadata);
|
|
1476
|
-
const
|
|
1477
|
-
|
|
1478
|
-
delete options?.runId;
|
|
1477
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1478
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1479
1479
|
let firstError;
|
|
1480
1480
|
for (const runnable of this.runnables()) {
|
|
1481
1481
|
try {
|
|
1482
|
-
const output = await runnable.invoke(input, patchConfig(
|
|
1482
|
+
const output = await runnable.invoke(input, patchConfig(otherOptions, { callbacks: runManager?.getChild() }));
|
|
1483
1483
|
await runManager?.handleChainEnd(_coerceToDict(output, "output"));
|
|
1484
1484
|
return output;
|
|
1485
1485
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SingleRunExtractor = exports.SyntheticEmbeddings = exports.FakeEmbeddings = exports.FakeTool = exports.FakeTracer = exports.FakeListChatMessageHistory = exports.FakeChatMessageHistory = exports.FakeListChatModel = exports.FakeRetriever = exports.FakeChatModel = exports.FakeStreamingLLM = exports.FakeLLM = exports.FakeRunnable = exports.FakeSplitIntoListParser = void 0;
|
|
3
|
+
exports.SingleRunExtractor = exports.SyntheticEmbeddings = exports.FakeEmbeddings = exports.FakeTool = exports.FakeTracer = exports.FakeListChatMessageHistory = exports.FakeChatMessageHistory = exports.FakeListChatModel = exports.FakeRetriever = exports.FakeStreamingChatModel = exports.FakeChatModel = exports.FakeStreamingLLM = exports.FakeLLM = exports.FakeRunnable = exports.FakeSplitIntoListParser = void 0;
|
|
4
4
|
const chat_history_js_1 = require("../../chat_history.cjs");
|
|
5
5
|
const document_js_1 = require("../../documents/document.cjs");
|
|
6
6
|
const chat_models_js_1 = require("../../language_models/chat_models.cjs");
|
|
@@ -172,6 +172,79 @@ class FakeChatModel extends chat_models_js_1.BaseChatModel {
|
|
|
172
172
|
}
|
|
173
173
|
}
|
|
174
174
|
exports.FakeChatModel = FakeChatModel;
|
|
175
|
+
class FakeStreamingChatModel extends chat_models_js_1.BaseChatModel {
|
|
176
|
+
constructor(fields) {
|
|
177
|
+
super(fields);
|
|
178
|
+
Object.defineProperty(this, "sleep", {
|
|
179
|
+
enumerable: true,
|
|
180
|
+
configurable: true,
|
|
181
|
+
writable: true,
|
|
182
|
+
value: 50
|
|
183
|
+
});
|
|
184
|
+
Object.defineProperty(this, "responses", {
|
|
185
|
+
enumerable: true,
|
|
186
|
+
configurable: true,
|
|
187
|
+
writable: true,
|
|
188
|
+
value: void 0
|
|
189
|
+
});
|
|
190
|
+
Object.defineProperty(this, "thrownErrorString", {
|
|
191
|
+
enumerable: true,
|
|
192
|
+
configurable: true,
|
|
193
|
+
writable: true,
|
|
194
|
+
value: void 0
|
|
195
|
+
});
|
|
196
|
+
this.sleep = fields.sleep ?? this.sleep;
|
|
197
|
+
this.responses = fields.responses;
|
|
198
|
+
this.thrownErrorString = fields.thrownErrorString;
|
|
199
|
+
}
|
|
200
|
+
_llmType() {
|
|
201
|
+
return "fake";
|
|
202
|
+
}
|
|
203
|
+
async _generate(messages, _options, _runManager) {
|
|
204
|
+
if (this.thrownErrorString) {
|
|
205
|
+
throw new Error(this.thrownErrorString);
|
|
206
|
+
}
|
|
207
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
208
|
+
const generation = {
|
|
209
|
+
generations: [
|
|
210
|
+
{
|
|
211
|
+
text: "",
|
|
212
|
+
message: new index_js_1.AIMessage({
|
|
213
|
+
content,
|
|
214
|
+
}),
|
|
215
|
+
},
|
|
216
|
+
],
|
|
217
|
+
};
|
|
218
|
+
return generation;
|
|
219
|
+
}
|
|
220
|
+
async *_streamResponseChunks(messages, _options, _runManager) {
|
|
221
|
+
if (this.thrownErrorString) {
|
|
222
|
+
throw new Error(this.thrownErrorString);
|
|
223
|
+
}
|
|
224
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
225
|
+
if (typeof content !== "string") {
|
|
226
|
+
for (const _ of this.responses ?? messages) {
|
|
227
|
+
yield new outputs_js_1.ChatGenerationChunk({
|
|
228
|
+
text: "",
|
|
229
|
+
message: new index_js_1.AIMessageChunk({
|
|
230
|
+
content,
|
|
231
|
+
}),
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
else {
|
|
236
|
+
for (const _ of this.responses ?? messages) {
|
|
237
|
+
yield new outputs_js_1.ChatGenerationChunk({
|
|
238
|
+
text: content,
|
|
239
|
+
message: new index_js_1.AIMessageChunk({
|
|
240
|
+
content,
|
|
241
|
+
}),
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
exports.FakeStreamingChatModel = FakeStreamingChatModel;
|
|
175
248
|
class FakeRetriever extends index_js_2.BaseRetriever {
|
|
176
249
|
constructor(fields) {
|
|
177
250
|
super();
|
|
@@ -58,6 +58,19 @@ export declare class FakeChatModel extends BaseChatModel {
|
|
|
58
58
|
_llmType(): string;
|
|
59
59
|
_generate(messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
60
60
|
}
|
|
61
|
+
export declare class FakeStreamingChatModel extends BaseChatModel {
|
|
62
|
+
sleep?: number;
|
|
63
|
+
responses?: BaseMessage[];
|
|
64
|
+
thrownErrorString?: string;
|
|
65
|
+
constructor(fields: {
|
|
66
|
+
sleep?: number;
|
|
67
|
+
responses?: BaseMessage[];
|
|
68
|
+
thrownErrorString?: string;
|
|
69
|
+
} & BaseLLMParams);
|
|
70
|
+
_llmType(): string;
|
|
71
|
+
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
72
|
+
_streamResponseChunks(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
73
|
+
}
|
|
61
74
|
export declare class FakeRetriever extends BaseRetriever {
|
|
62
75
|
lc_namespace: string[];
|
|
63
76
|
output: Document<Record<string, any>>[];
|
|
@@ -164,6 +164,78 @@ export class FakeChatModel extends BaseChatModel {
|
|
|
164
164
|
};
|
|
165
165
|
}
|
|
166
166
|
}
|
|
167
|
+
export class FakeStreamingChatModel extends BaseChatModel {
|
|
168
|
+
constructor(fields) {
|
|
169
|
+
super(fields);
|
|
170
|
+
Object.defineProperty(this, "sleep", {
|
|
171
|
+
enumerable: true,
|
|
172
|
+
configurable: true,
|
|
173
|
+
writable: true,
|
|
174
|
+
value: 50
|
|
175
|
+
});
|
|
176
|
+
Object.defineProperty(this, "responses", {
|
|
177
|
+
enumerable: true,
|
|
178
|
+
configurable: true,
|
|
179
|
+
writable: true,
|
|
180
|
+
value: void 0
|
|
181
|
+
});
|
|
182
|
+
Object.defineProperty(this, "thrownErrorString", {
|
|
183
|
+
enumerable: true,
|
|
184
|
+
configurable: true,
|
|
185
|
+
writable: true,
|
|
186
|
+
value: void 0
|
|
187
|
+
});
|
|
188
|
+
this.sleep = fields.sleep ?? this.sleep;
|
|
189
|
+
this.responses = fields.responses;
|
|
190
|
+
this.thrownErrorString = fields.thrownErrorString;
|
|
191
|
+
}
|
|
192
|
+
_llmType() {
|
|
193
|
+
return "fake";
|
|
194
|
+
}
|
|
195
|
+
async _generate(messages, _options, _runManager) {
|
|
196
|
+
if (this.thrownErrorString) {
|
|
197
|
+
throw new Error(this.thrownErrorString);
|
|
198
|
+
}
|
|
199
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
200
|
+
const generation = {
|
|
201
|
+
generations: [
|
|
202
|
+
{
|
|
203
|
+
text: "",
|
|
204
|
+
message: new AIMessage({
|
|
205
|
+
content,
|
|
206
|
+
}),
|
|
207
|
+
},
|
|
208
|
+
],
|
|
209
|
+
};
|
|
210
|
+
return generation;
|
|
211
|
+
}
|
|
212
|
+
async *_streamResponseChunks(messages, _options, _runManager) {
|
|
213
|
+
if (this.thrownErrorString) {
|
|
214
|
+
throw new Error(this.thrownErrorString);
|
|
215
|
+
}
|
|
216
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
217
|
+
if (typeof content !== "string") {
|
|
218
|
+
for (const _ of this.responses ?? messages) {
|
|
219
|
+
yield new ChatGenerationChunk({
|
|
220
|
+
text: "",
|
|
221
|
+
message: new AIMessageChunk({
|
|
222
|
+
content,
|
|
223
|
+
}),
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
else {
|
|
228
|
+
for (const _ of this.responses ?? messages) {
|
|
229
|
+
yield new ChatGenerationChunk({
|
|
230
|
+
text: content,
|
|
231
|
+
message: new AIMessageChunk({
|
|
232
|
+
content,
|
|
233
|
+
}),
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
167
239
|
export class FakeRetriever extends BaseRetriever {
|
|
168
240
|
constructor(fields) {
|
|
169
241
|
super();
|