@langchain/core 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/callbacks/manager.cjs +2 -1
- package/dist/callbacks/manager.js +2 -1
- package/dist/language_models/chat_models.cjs +66 -26
- package/dist/language_models/chat_models.js +66 -26
- package/dist/language_models/llms.cjs +43 -7
- package/dist/language_models/llms.js +43 -7
- package/dist/runnables/base.cjs +8 -15
- package/dist/runnables/base.js +8 -15
- package/dist/runnables/tests/runnable_stream_events_v2.test.js +894 -18
- package/dist/runnables/tests/runnable_stream_log.test.js +1 -212
- package/dist/singletons/index.cjs +5 -18
- package/dist/singletons/index.d.ts +0 -2
- package/dist/singletons/index.js +5 -18
- package/dist/tracers/event_stream.cjs +17 -1
- package/dist/tracers/event_stream.d.ts +4 -1
- package/dist/tracers/event_stream.js +15 -0
- package/dist/tracers/log_stream.cjs +3 -1
- package/dist/tracers/log_stream.d.ts +2 -1
- package/dist/tracers/log_stream.js +1 -0
- package/dist/utils/testing/index.cjs +2 -1
- package/dist/utils/testing/index.d.ts +1 -1
- package/dist/utils/testing/index.js +2 -1
- package/package.json +1 -1
|
@@ -4,10 +4,7 @@ import { test } from "@jest/globals";
|
|
|
4
4
|
import { ChatPromptTemplate, HumanMessagePromptTemplate, PromptTemplate, SystemMessagePromptTemplate, } from "../../prompts/index.js";
|
|
5
5
|
import { Document } from "../../documents/document.js";
|
|
6
6
|
import { RunnableSequence, RunnableMap } from "../base.js";
|
|
7
|
-
import { FakeLLM,
|
|
8
|
-
import { SystemMessage, HumanMessage } from "../../messages/index.js";
|
|
9
|
-
import { CommaSeparatedListOutputParser } from "../../output_parsers/list.js";
|
|
10
|
-
import { ChatPromptValue } from "../../prompt_values.js";
|
|
7
|
+
import { FakeLLM, FakeChatModel, FakeRetriever, } from "../../utils/testing/index.js";
|
|
11
8
|
test("Runnable streamLog method", async () => {
|
|
12
9
|
const promptTemplate = PromptTemplate.fromTemplate("{input}");
|
|
13
10
|
const llm = new FakeLLM({});
|
|
@@ -72,211 +69,3 @@ test("Runnable streamLog method with a more complicated sequence", async () => {
|
|
|
72
69
|
expect(finalState.state.logs.CUSTOM_NAME).toBeDefined();
|
|
73
70
|
expect(finalState.state.logs.CUSTOM_NAME.final_output.output).toEqual(JSON.stringify(retrieverOutputDocs));
|
|
74
71
|
});
|
|
75
|
-
test("Test stream log aggregation", async () => {
|
|
76
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
77
|
-
["system", "You are a nice assistant"],
|
|
78
|
-
["human", "{question}"],
|
|
79
|
-
]);
|
|
80
|
-
const llm = new FakeStreamingLLM({
|
|
81
|
-
responses: ["tomato, lettuce, onion"],
|
|
82
|
-
});
|
|
83
|
-
const parser = new CommaSeparatedListOutputParser({});
|
|
84
|
-
const chain = prompt.pipe(llm).pipe(parser);
|
|
85
|
-
const logStream = await chain.streamLog({
|
|
86
|
-
question: "what is up?",
|
|
87
|
-
});
|
|
88
|
-
const chunks = [];
|
|
89
|
-
for await (const chunk of logStream) {
|
|
90
|
-
chunks.push(chunk);
|
|
91
|
-
}
|
|
92
|
-
expect(chunks).toMatchObject([
|
|
93
|
-
{
|
|
94
|
-
ops: [
|
|
95
|
-
{
|
|
96
|
-
op: "replace",
|
|
97
|
-
path: "",
|
|
98
|
-
value: {
|
|
99
|
-
id: expect.any(String),
|
|
100
|
-
streamed_output: [],
|
|
101
|
-
logs: {},
|
|
102
|
-
},
|
|
103
|
-
},
|
|
104
|
-
],
|
|
105
|
-
},
|
|
106
|
-
{
|
|
107
|
-
ops: [
|
|
108
|
-
{
|
|
109
|
-
op: "add",
|
|
110
|
-
path: "/logs/ChatPromptTemplate",
|
|
111
|
-
value: {
|
|
112
|
-
id: expect.any(String),
|
|
113
|
-
name: "ChatPromptTemplate",
|
|
114
|
-
type: "prompt",
|
|
115
|
-
tags: ["seq:step:1"],
|
|
116
|
-
metadata: {},
|
|
117
|
-
start_time: expect.any(String),
|
|
118
|
-
streamed_output: [],
|
|
119
|
-
streamed_output_str: [],
|
|
120
|
-
},
|
|
121
|
-
},
|
|
122
|
-
],
|
|
123
|
-
},
|
|
124
|
-
{
|
|
125
|
-
ops: [
|
|
126
|
-
{
|
|
127
|
-
op: "add",
|
|
128
|
-
path: "/logs/ChatPromptTemplate/final_output",
|
|
129
|
-
value: new ChatPromptValue([
|
|
130
|
-
new SystemMessage("You are a nice assistant"),
|
|
131
|
-
new HumanMessage("what is up?"),
|
|
132
|
-
]),
|
|
133
|
-
},
|
|
134
|
-
{
|
|
135
|
-
op: "add",
|
|
136
|
-
path: "/logs/ChatPromptTemplate/end_time",
|
|
137
|
-
value: expect.any(String),
|
|
138
|
-
},
|
|
139
|
-
],
|
|
140
|
-
},
|
|
141
|
-
{
|
|
142
|
-
ops: [
|
|
143
|
-
{
|
|
144
|
-
op: "add",
|
|
145
|
-
path: "/logs/FakeStreamingLLM",
|
|
146
|
-
value: {
|
|
147
|
-
id: expect.any(String),
|
|
148
|
-
name: "FakeStreamingLLM",
|
|
149
|
-
type: "llm",
|
|
150
|
-
tags: ["seq:step:2"],
|
|
151
|
-
metadata: {},
|
|
152
|
-
start_time: expect.any(String),
|
|
153
|
-
streamed_output: [],
|
|
154
|
-
streamed_output_str: [],
|
|
155
|
-
},
|
|
156
|
-
},
|
|
157
|
-
],
|
|
158
|
-
},
|
|
159
|
-
{
|
|
160
|
-
ops: [
|
|
161
|
-
{
|
|
162
|
-
op: "add",
|
|
163
|
-
path: "/logs/CommaSeparatedListOutputParser",
|
|
164
|
-
value: {
|
|
165
|
-
id: expect.any(String),
|
|
166
|
-
name: "CommaSeparatedListOutputParser",
|
|
167
|
-
type: "parser",
|
|
168
|
-
tags: ["seq:step:3"],
|
|
169
|
-
metadata: {},
|
|
170
|
-
start_time: expect.any(String),
|
|
171
|
-
streamed_output: [],
|
|
172
|
-
streamed_output_str: [],
|
|
173
|
-
},
|
|
174
|
-
},
|
|
175
|
-
],
|
|
176
|
-
},
|
|
177
|
-
{
|
|
178
|
-
ops: [
|
|
179
|
-
{
|
|
180
|
-
op: "add",
|
|
181
|
-
path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
|
|
182
|
-
value: ["tomato"],
|
|
183
|
-
},
|
|
184
|
-
],
|
|
185
|
-
},
|
|
186
|
-
{
|
|
187
|
-
ops: [
|
|
188
|
-
{
|
|
189
|
-
op: "add",
|
|
190
|
-
path: "/streamed_output/-",
|
|
191
|
-
value: ["tomato"],
|
|
192
|
-
},
|
|
193
|
-
],
|
|
194
|
-
},
|
|
195
|
-
{
|
|
196
|
-
ops: [
|
|
197
|
-
{
|
|
198
|
-
op: "add",
|
|
199
|
-
path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
|
|
200
|
-
value: ["lettuce"],
|
|
201
|
-
},
|
|
202
|
-
],
|
|
203
|
-
},
|
|
204
|
-
{
|
|
205
|
-
ops: [
|
|
206
|
-
{
|
|
207
|
-
op: "add",
|
|
208
|
-
path: "/streamed_output/-",
|
|
209
|
-
value: ["lettuce"],
|
|
210
|
-
},
|
|
211
|
-
],
|
|
212
|
-
},
|
|
213
|
-
{
|
|
214
|
-
ops: [
|
|
215
|
-
{
|
|
216
|
-
op: "add",
|
|
217
|
-
path: "/logs/FakeStreamingLLM/final_output",
|
|
218
|
-
value: {
|
|
219
|
-
generations: [
|
|
220
|
-
[
|
|
221
|
-
{
|
|
222
|
-
text: "tomato, lettuce, onion",
|
|
223
|
-
generationInfo: {},
|
|
224
|
-
},
|
|
225
|
-
],
|
|
226
|
-
],
|
|
227
|
-
},
|
|
228
|
-
},
|
|
229
|
-
{
|
|
230
|
-
op: "add",
|
|
231
|
-
path: "/logs/FakeStreamingLLM/end_time",
|
|
232
|
-
value: expect.any(String),
|
|
233
|
-
},
|
|
234
|
-
],
|
|
235
|
-
},
|
|
236
|
-
{
|
|
237
|
-
ops: [
|
|
238
|
-
{
|
|
239
|
-
op: "add",
|
|
240
|
-
path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
|
|
241
|
-
value: ["onion"],
|
|
242
|
-
},
|
|
243
|
-
],
|
|
244
|
-
},
|
|
245
|
-
{
|
|
246
|
-
ops: [
|
|
247
|
-
{
|
|
248
|
-
op: "add",
|
|
249
|
-
path: "/streamed_output/-",
|
|
250
|
-
value: ["onion"],
|
|
251
|
-
},
|
|
252
|
-
],
|
|
253
|
-
},
|
|
254
|
-
{
|
|
255
|
-
ops: [
|
|
256
|
-
{
|
|
257
|
-
op: "add",
|
|
258
|
-
path: "/logs/CommaSeparatedListOutputParser/final_output",
|
|
259
|
-
value: {
|
|
260
|
-
output: ["tomato", "lettuce", "onion"],
|
|
261
|
-
},
|
|
262
|
-
},
|
|
263
|
-
{
|
|
264
|
-
op: "add",
|
|
265
|
-
path: "/logs/CommaSeparatedListOutputParser/end_time",
|
|
266
|
-
value: expect.any(String),
|
|
267
|
-
},
|
|
268
|
-
],
|
|
269
|
-
},
|
|
270
|
-
{
|
|
271
|
-
ops: [
|
|
272
|
-
{
|
|
273
|
-
op: "replace",
|
|
274
|
-
path: "/final_output",
|
|
275
|
-
value: {
|
|
276
|
-
output: ["tomato", "lettuce", "onion"],
|
|
277
|
-
},
|
|
278
|
-
},
|
|
279
|
-
],
|
|
280
|
-
},
|
|
281
|
-
]);
|
|
282
|
-
});
|
|
@@ -11,28 +11,15 @@ class MockAsyncLocalStorage {
|
|
|
11
11
|
}
|
|
12
12
|
}
|
|
13
13
|
exports.MockAsyncLocalStorage = MockAsyncLocalStorage;
|
|
14
|
+
const mockAsyncLocalStorage = new MockAsyncLocalStorage();
|
|
14
15
|
class AsyncLocalStorageProvider {
|
|
15
|
-
constructor() {
|
|
16
|
-
Object.defineProperty(this, "asyncLocalStorage", {
|
|
17
|
-
enumerable: true,
|
|
18
|
-
configurable: true,
|
|
19
|
-
writable: true,
|
|
20
|
-
value: new MockAsyncLocalStorage()
|
|
21
|
-
});
|
|
22
|
-
Object.defineProperty(this, "hasBeenInitialized", {
|
|
23
|
-
enumerable: true,
|
|
24
|
-
configurable: true,
|
|
25
|
-
writable: true,
|
|
26
|
-
value: false
|
|
27
|
-
});
|
|
28
|
-
}
|
|
29
16
|
getInstance() {
|
|
30
|
-
return
|
|
17
|
+
return (globalThis.__lc_tracing_async_local_storage ??
|
|
18
|
+
mockAsyncLocalStorage);
|
|
31
19
|
}
|
|
32
20
|
initializeGlobalInstance(instance) {
|
|
33
|
-
if (
|
|
34
|
-
|
|
35
|
-
this.asyncLocalStorage = instance;
|
|
21
|
+
if (globalThis.__lc_tracing_async_local_storage === undefined) {
|
|
22
|
+
globalThis.__lc_tracing_async_local_storage = instance;
|
|
36
23
|
}
|
|
37
24
|
}
|
|
38
25
|
}
|
|
@@ -7,8 +7,6 @@ export declare class MockAsyncLocalStorage implements AsyncLocalStorageInterface
|
|
|
7
7
|
run<T>(_store: any, callback: () => T): T;
|
|
8
8
|
}
|
|
9
9
|
declare class AsyncLocalStorageProvider {
|
|
10
|
-
private asyncLocalStorage;
|
|
11
|
-
private hasBeenInitialized;
|
|
12
10
|
getInstance(): AsyncLocalStorageInterface;
|
|
13
11
|
initializeGlobalInstance(instance: AsyncLocalStorageInterface): void;
|
|
14
12
|
}
|
package/dist/singletons/index.js
CHANGED
|
@@ -7,28 +7,15 @@ export class MockAsyncLocalStorage {
|
|
|
7
7
|
return callback();
|
|
8
8
|
}
|
|
9
9
|
}
|
|
10
|
+
const mockAsyncLocalStorage = new MockAsyncLocalStorage();
|
|
10
11
|
class AsyncLocalStorageProvider {
|
|
11
|
-
constructor() {
|
|
12
|
-
Object.defineProperty(this, "asyncLocalStorage", {
|
|
13
|
-
enumerable: true,
|
|
14
|
-
configurable: true,
|
|
15
|
-
writable: true,
|
|
16
|
-
value: new MockAsyncLocalStorage()
|
|
17
|
-
});
|
|
18
|
-
Object.defineProperty(this, "hasBeenInitialized", {
|
|
19
|
-
enumerable: true,
|
|
20
|
-
configurable: true,
|
|
21
|
-
writable: true,
|
|
22
|
-
value: false
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
12
|
getInstance() {
|
|
26
|
-
return
|
|
13
|
+
return (globalThis.__lc_tracing_async_local_storage ??
|
|
14
|
+
mockAsyncLocalStorage);
|
|
27
15
|
}
|
|
28
16
|
initializeGlobalInstance(instance) {
|
|
29
|
-
if (
|
|
30
|
-
|
|
31
|
-
this.asyncLocalStorage = instance;
|
|
17
|
+
if (globalThis.__lc_tracing_async_local_storage === undefined) {
|
|
18
|
+
globalThis.__lc_tracing_async_local_storage = instance;
|
|
32
19
|
}
|
|
33
20
|
}
|
|
34
21
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.EventStreamCallbackHandler = void 0;
|
|
3
|
+
exports.EventStreamCallbackHandler = exports.isStreamEventsHandler = void 0;
|
|
4
4
|
const base_js_1 = require("./base.cjs");
|
|
5
5
|
const stream_js_1 = require("../utils/stream.cjs");
|
|
6
6
|
const ai_js_1 = require("../messages/ai.cjs");
|
|
@@ -17,6 +17,8 @@ function assignName({ name, serialized, }) {
|
|
|
17
17
|
}
|
|
18
18
|
return "Unnamed";
|
|
19
19
|
}
|
|
20
|
+
const isStreamEventsHandler = (handler) => handler.name === "event_stream_tracer";
|
|
21
|
+
exports.isStreamEventsHandler = isStreamEventsHandler;
|
|
20
22
|
/**
|
|
21
23
|
* Class that extends the `BaseTracer` class from the
|
|
22
24
|
* `langchain.callbacks.tracers.base` module. It represents a callback
|
|
@@ -205,6 +207,7 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
205
207
|
finally {
|
|
206
208
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
207
209
|
tappedPromiseResolver();
|
|
210
|
+
// Don't delete from the map to keep track of which runs have been tapped.
|
|
208
211
|
}
|
|
209
212
|
}
|
|
210
213
|
else {
|
|
@@ -489,5 +492,18 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
|
|
|
489
492
|
metadata: runInfo.metadata,
|
|
490
493
|
}, runInfo);
|
|
491
494
|
}
|
|
495
|
+
async onRunCreate(run) {
|
|
496
|
+
if (this.rootId === undefined) {
|
|
497
|
+
this.rootId = run.id;
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
async onRunUpdate(run) {
|
|
501
|
+
if (run.id === this.rootId && this.autoClose) {
|
|
502
|
+
const pendingPromises = [...this.tappedPromises.values()];
|
|
503
|
+
void Promise.all(pendingPromises).finally(() => {
|
|
504
|
+
void this.writer.close();
|
|
505
|
+
});
|
|
506
|
+
}
|
|
507
|
+
}
|
|
492
508
|
}
|
|
493
509
|
exports.EventStreamCallbackHandler = EventStreamCallbackHandler;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { BaseTracer, type Run } from "./base.js";
|
|
2
|
-
import { BaseCallbackHandlerInput } from "../callbacks/base.js";
|
|
2
|
+
import { BaseCallbackHandler, BaseCallbackHandlerInput } from "../callbacks/base.js";
|
|
3
3
|
import { IterableReadableStream } from "../utils/stream.js";
|
|
4
4
|
/**
|
|
5
5
|
* Data associated with a StreamEvent.
|
|
@@ -94,6 +94,7 @@ export interface EventStreamCallbackHandlerInput extends BaseCallbackHandlerInpu
|
|
|
94
94
|
excludeTypes?: string[];
|
|
95
95
|
excludeTags?: string[];
|
|
96
96
|
}
|
|
97
|
+
export declare const isStreamEventsHandler: (handler: BaseCallbackHandler) => handler is EventStreamCallbackHandler;
|
|
97
98
|
/**
|
|
98
99
|
* Class that extends the `BaseTracer` class from the
|
|
99
100
|
* `langchain.callbacks.tracers.base` module. It represents a callback
|
|
@@ -133,5 +134,7 @@ export declare class EventStreamCallbackHandler extends BaseTracer {
|
|
|
133
134
|
onToolEnd(run: Run): Promise<void>;
|
|
134
135
|
onRetrieverStart(run: Run): Promise<void>;
|
|
135
136
|
onRetrieverEnd(run: Run): Promise<void>;
|
|
137
|
+
onRunCreate(run: Run): Promise<void>;
|
|
138
|
+
onRunUpdate(run: Run): Promise<void>;
|
|
136
139
|
}
|
|
137
140
|
export {};
|
|
@@ -14,6 +14,7 @@ function assignName({ name, serialized, }) {
|
|
|
14
14
|
}
|
|
15
15
|
return "Unnamed";
|
|
16
16
|
}
|
|
17
|
+
export const isStreamEventsHandler = (handler) => handler.name === "event_stream_tracer";
|
|
17
18
|
/**
|
|
18
19
|
* Class that extends the `BaseTracer` class from the
|
|
19
20
|
* `langchain.callbacks.tracers.base` module. It represents a callback
|
|
@@ -202,6 +203,7 @@ export class EventStreamCallbackHandler extends BaseTracer {
|
|
|
202
203
|
finally {
|
|
203
204
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
204
205
|
tappedPromiseResolver();
|
|
206
|
+
// Don't delete from the map to keep track of which runs have been tapped.
|
|
205
207
|
}
|
|
206
208
|
}
|
|
207
209
|
else {
|
|
@@ -486,4 +488,17 @@ export class EventStreamCallbackHandler extends BaseTracer {
|
|
|
486
488
|
metadata: runInfo.metadata,
|
|
487
489
|
}, runInfo);
|
|
488
490
|
}
|
|
491
|
+
async onRunCreate(run) {
|
|
492
|
+
if (this.rootId === undefined) {
|
|
493
|
+
this.rootId = run.id;
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
async onRunUpdate(run) {
|
|
497
|
+
if (run.id === this.rootId && this.autoClose) {
|
|
498
|
+
const pendingPromises = [...this.tappedPromises.values()];
|
|
499
|
+
void Promise.all(pendingPromises).finally(() => {
|
|
500
|
+
void this.writer.close();
|
|
501
|
+
});
|
|
502
|
+
}
|
|
503
|
+
}
|
|
489
504
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.LogStreamCallbackHandler = exports.RunLog = exports.RunLogPatch = void 0;
|
|
3
|
+
exports.LogStreamCallbackHandler = exports.isLogStreamHandler = exports.RunLog = exports.RunLogPatch = void 0;
|
|
4
4
|
const index_js_1 = require("../utils/fast-json-patch/index.cjs");
|
|
5
5
|
const base_js_1 = require("./base.cjs");
|
|
6
6
|
const stream_js_1 = require("../utils/stream.cjs");
|
|
@@ -59,6 +59,8 @@ class RunLog extends RunLogPatch {
|
|
|
59
59
|
}
|
|
60
60
|
}
|
|
61
61
|
exports.RunLog = RunLog;
|
|
62
|
+
const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer";
|
|
63
|
+
exports.isLogStreamHandler = isLogStreamHandler;
|
|
62
64
|
/**
|
|
63
65
|
* Extract standardized inputs from a run.
|
|
64
66
|
*
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { type Operation as JSONPatchOperation } from "../utils/fast-json-patch/index.js";
|
|
2
2
|
import { BaseTracer, type Run } from "./base.js";
|
|
3
|
-
import { BaseCallbackHandlerInput, HandleLLMNewTokenCallbackFields } from "../callbacks/base.js";
|
|
3
|
+
import { BaseCallbackHandler, BaseCallbackHandlerInput, HandleLLMNewTokenCallbackFields } from "../callbacks/base.js";
|
|
4
4
|
import { IterableReadableStream } from "../utils/stream.js";
|
|
5
5
|
import type { StreamEvent, StreamEventData } from "./event_stream.js";
|
|
6
6
|
export type { StreamEvent, StreamEventData };
|
|
@@ -83,6 +83,7 @@ export interface LogStreamCallbackHandlerInput extends BaseCallbackHandlerInput
|
|
|
83
83
|
excludeTags?: string[];
|
|
84
84
|
_schemaFormat?: SchemaFormat;
|
|
85
85
|
}
|
|
86
|
+
export declare const isLogStreamHandler: (handler: BaseCallbackHandler) => handler is LogStreamCallbackHandler;
|
|
86
87
|
/**
|
|
87
88
|
* Class that extends the `BaseTracer` class from the
|
|
88
89
|
* `langchain.callbacks.tracers.base` module. It represents a callback
|
|
@@ -130,7 +130,7 @@ class FakeStreamingLLM extends llms_js_1.LLM {
|
|
|
130
130
|
this.responses = this.responses?.slice(1);
|
|
131
131
|
return response ?? prompt;
|
|
132
132
|
}
|
|
133
|
-
async *_streamResponseChunks(input) {
|
|
133
|
+
async *_streamResponseChunks(input, _options, runManager) {
|
|
134
134
|
if (this.thrownErrorString) {
|
|
135
135
|
throw new Error(this.thrownErrorString);
|
|
136
136
|
}
|
|
@@ -139,6 +139,7 @@ class FakeStreamingLLM extends llms_js_1.LLM {
|
|
|
139
139
|
for (const c of response ?? input) {
|
|
140
140
|
await new Promise((resolve) => setTimeout(resolve, this.sleep));
|
|
141
141
|
yield { text: c, generationInfo: {} };
|
|
142
|
+
await runManager?.handleLLMNewToken(c);
|
|
142
143
|
}
|
|
143
144
|
}
|
|
144
145
|
}
|
|
@@ -51,7 +51,7 @@ export declare class FakeStreamingLLM extends LLM {
|
|
|
51
51
|
} & BaseLLMParams);
|
|
52
52
|
_llmType(): string;
|
|
53
53
|
_call(prompt: string): Promise<string>;
|
|
54
|
-
_streamResponseChunks(input: string): AsyncGenerator<GenerationChunk, void, unknown>;
|
|
54
|
+
_streamResponseChunks(input: string, _options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk, void, unknown>;
|
|
55
55
|
}
|
|
56
56
|
export declare class FakeChatModel extends BaseChatModel {
|
|
57
57
|
_combineLLMOutput(): never[];
|
|
@@ -124,7 +124,7 @@ export class FakeStreamingLLM extends LLM {
|
|
|
124
124
|
this.responses = this.responses?.slice(1);
|
|
125
125
|
return response ?? prompt;
|
|
126
126
|
}
|
|
127
|
-
async *_streamResponseChunks(input) {
|
|
127
|
+
async *_streamResponseChunks(input, _options, runManager) {
|
|
128
128
|
if (this.thrownErrorString) {
|
|
129
129
|
throw new Error(this.thrownErrorString);
|
|
130
130
|
}
|
|
@@ -133,6 +133,7 @@ export class FakeStreamingLLM extends LLM {
|
|
|
133
133
|
for (const c of response ?? input) {
|
|
134
134
|
await new Promise((resolve) => setTimeout(resolve, this.sleep));
|
|
135
135
|
yield { text: c, generationInfo: {} };
|
|
136
|
+
await runManager?.handleLLMNewToken(c);
|
|
136
137
|
}
|
|
137
138
|
}
|
|
138
139
|
}
|