@langchain/core 0.1.53 → 0.1.55-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/callbacks/manager.cjs +13 -11
- package/dist/callbacks/manager.d.ts +7 -2
- package/dist/callbacks/manager.js +13 -11
- package/dist/language_models/chat_models.cjs +3 -3
- package/dist/language_models/chat_models.js +3 -3
- package/dist/language_models/llms.cjs +5 -4
- package/dist/language_models/llms.d.ts +2 -1
- package/dist/language_models/llms.js +5 -4
- package/dist/messages/index.d.ts +5 -1
- package/dist/output_parsers/string.cjs +12 -5
- package/dist/output_parsers/string.js +12 -5
- package/dist/prompts/chat.cjs +21 -3
- package/dist/prompts/chat.js +21 -3
- package/dist/retrievers/index.cjs +1 -1
- package/dist/retrievers/index.js +1 -1
- package/dist/runnables/base.cjs +40 -15
- package/dist/runnables/base.js +40 -15
- package/dist/runnables/branch.cjs +1 -1
- package/dist/runnables/branch.js +1 -1
- package/dist/runnables/config.cjs +5 -1
- package/dist/runnables/config.d.ts +1 -1
- package/dist/runnables/config.js +5 -1
- package/dist/tools.cjs +2 -1
- package/dist/tools.js +2 -1
- package/dist/utils/testing/index.cjs +108 -1
- package/dist/utils/testing/index.d.ts +22 -0
- package/dist/utils/testing/index.js +105 -0
- package/package.json +1 -1
|
@@ -372,34 +372,36 @@ class CallbackManager extends BaseCallbackManager {
|
|
|
372
372
|
getParentRunId() {
|
|
373
373
|
return this._parentRunId;
|
|
374
374
|
}
|
|
375
|
-
async handleLLMStart(llm, prompts,
|
|
376
|
-
return Promise.all(prompts.map(async (prompt) => {
|
|
377
|
-
|
|
375
|
+
async handleLLMStart(llm, prompts, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
376
|
+
return Promise.all(prompts.map(async (prompt, idx) => {
|
|
377
|
+
// Can't have duplicate runs with the same run ID (if provided)
|
|
378
|
+
const runId_ = idx === 0 && runId ? runId : (0, uuid_1.v4)();
|
|
378
379
|
await Promise.all(this.handlers.map((handler) => (0, promises_js_1.consumeCallback)(async () => {
|
|
379
380
|
if (!handler.ignoreLLM) {
|
|
380
381
|
try {
|
|
381
|
-
await handler.handleLLMStart?.(llm, [prompt],
|
|
382
|
+
await handler.handleLLMStart?.(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
382
383
|
}
|
|
383
384
|
catch (err) {
|
|
384
385
|
console.error(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`);
|
|
385
386
|
}
|
|
386
387
|
}
|
|
387
388
|
}, handler.awaitHandlers)));
|
|
388
|
-
return new CallbackManagerForLLMRun(
|
|
389
|
+
return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
|
|
389
390
|
}));
|
|
390
391
|
}
|
|
391
|
-
async handleChatModelStart(llm, messages,
|
|
392
|
-
return Promise.all(messages.map(async (messageGroup) => {
|
|
393
|
-
|
|
392
|
+
async handleChatModelStart(llm, messages, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
393
|
+
return Promise.all(messages.map(async (messageGroup, idx) => {
|
|
394
|
+
// Can't have duplicate runs with the same run ID (if provided)
|
|
395
|
+
const runId_ = idx === 0 && runId ? runId : (0, uuid_1.v4)();
|
|
394
396
|
await Promise.all(this.handlers.map((handler) => (0, promises_js_1.consumeCallback)(async () => {
|
|
395
397
|
if (!handler.ignoreLLM) {
|
|
396
398
|
try {
|
|
397
399
|
if (handler.handleChatModelStart) {
|
|
398
|
-
await handler.handleChatModelStart?.(llm, [messageGroup],
|
|
400
|
+
await handler.handleChatModelStart?.(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
399
401
|
}
|
|
400
402
|
else if (handler.handleLLMStart) {
|
|
401
403
|
const messageString = (0, index_js_1.getBufferString)(messageGroup);
|
|
402
|
-
await handler.handleLLMStart?.(llm, [messageString],
|
|
404
|
+
await handler.handleLLMStart?.(llm, [messageString], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
403
405
|
}
|
|
404
406
|
}
|
|
405
407
|
catch (err) {
|
|
@@ -407,7 +409,7 @@ class CallbackManager extends BaseCallbackManager {
|
|
|
407
409
|
}
|
|
408
410
|
}
|
|
409
411
|
}, handler.awaitHandlers)));
|
|
410
|
-
return new CallbackManagerForLLMRun(
|
|
412
|
+
return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
|
|
411
413
|
}));
|
|
412
414
|
}
|
|
413
415
|
async handleChainStart(chain, inputs, runId = (0, uuid_1.v4)(), runType = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
@@ -34,6 +34,11 @@ export interface BaseCallbackConfig {
|
|
|
34
34
|
* Tags are passed to all callbacks, metadata is passed to handle*Start callbacks.
|
|
35
35
|
*/
|
|
36
36
|
callbacks?: Callbacks;
|
|
37
|
+
/**
|
|
38
|
+
* Unique identifier for the tracer run for this call. If not provided, a new UUID
|
|
39
|
+
* will be generated.
|
|
40
|
+
*/
|
|
41
|
+
runId?: string;
|
|
37
42
|
}
|
|
38
43
|
export declare function parseCallbackConfigArg(arg: Callbacks | BaseCallbackConfig | undefined): BaseCallbackConfig;
|
|
39
44
|
/**
|
|
@@ -130,8 +135,8 @@ export declare class CallbackManager extends BaseCallbackManager implements Base
|
|
|
130
135
|
* @returns The parent run ID.
|
|
131
136
|
*/
|
|
132
137
|
getParentRunId(): string | undefined;
|
|
133
|
-
handleLLMStart(llm: Serialized, prompts: string[],
|
|
134
|
-
handleChatModelStart(llm: Serialized, messages: BaseMessage[][],
|
|
138
|
+
handleLLMStart(llm: Serialized, prompts: string[], runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
|
|
139
|
+
handleChatModelStart(llm: Serialized, messages: BaseMessage[][], runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
|
|
135
140
|
handleChainStart(chain: Serialized, inputs: ChainValues, runId?: string, runType?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForChainRun>;
|
|
136
141
|
handleToolStart(tool: Serialized, input: string, runId?: string, _parentRunId?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForToolRun>;
|
|
137
142
|
handleRetrieverStart(retriever: Serialized, query: string, runId?: string, _parentRunId?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForRetrieverRun>;
|
|
@@ -363,34 +363,36 @@ export class CallbackManager extends BaseCallbackManager {
|
|
|
363
363
|
getParentRunId() {
|
|
364
364
|
return this._parentRunId;
|
|
365
365
|
}
|
|
366
|
-
async handleLLMStart(llm, prompts,
|
|
367
|
-
return Promise.all(prompts.map(async (prompt) => {
|
|
368
|
-
|
|
366
|
+
async handleLLMStart(llm, prompts, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
367
|
+
return Promise.all(prompts.map(async (prompt, idx) => {
|
|
368
|
+
// Can't have duplicate runs with the same run ID (if provided)
|
|
369
|
+
const runId_ = idx === 0 && runId ? runId : uuidv4();
|
|
369
370
|
await Promise.all(this.handlers.map((handler) => consumeCallback(async () => {
|
|
370
371
|
if (!handler.ignoreLLM) {
|
|
371
372
|
try {
|
|
372
|
-
await handler.handleLLMStart?.(llm, [prompt],
|
|
373
|
+
await handler.handleLLMStart?.(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
373
374
|
}
|
|
374
375
|
catch (err) {
|
|
375
376
|
console.error(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`);
|
|
376
377
|
}
|
|
377
378
|
}
|
|
378
379
|
}, handler.awaitHandlers)));
|
|
379
|
-
return new CallbackManagerForLLMRun(
|
|
380
|
+
return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
|
|
380
381
|
}));
|
|
381
382
|
}
|
|
382
|
-
async handleChatModelStart(llm, messages,
|
|
383
|
-
return Promise.all(messages.map(async (messageGroup) => {
|
|
384
|
-
|
|
383
|
+
async handleChatModelStart(llm, messages, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
384
|
+
return Promise.all(messages.map(async (messageGroup, idx) => {
|
|
385
|
+
// Can't have duplicate runs with the same run ID (if provided)
|
|
386
|
+
const runId_ = idx === 0 && runId ? runId : uuidv4();
|
|
385
387
|
await Promise.all(this.handlers.map((handler) => consumeCallback(async () => {
|
|
386
388
|
if (!handler.ignoreLLM) {
|
|
387
389
|
try {
|
|
388
390
|
if (handler.handleChatModelStart) {
|
|
389
|
-
await handler.handleChatModelStart?.(llm, [messageGroup],
|
|
391
|
+
await handler.handleChatModelStart?.(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
390
392
|
}
|
|
391
393
|
else if (handler.handleLLMStart) {
|
|
392
394
|
const messageString = getBufferString(messageGroup);
|
|
393
|
-
await handler.handleLLMStart?.(llm, [messageString],
|
|
395
|
+
await handler.handleLLMStart?.(llm, [messageString], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
|
|
394
396
|
}
|
|
395
397
|
}
|
|
396
398
|
catch (err) {
|
|
@@ -398,7 +400,7 @@ export class CallbackManager extends BaseCallbackManager {
|
|
|
398
400
|
}
|
|
399
401
|
}
|
|
400
402
|
}, handler.awaitHandlers)));
|
|
401
|
-
return new CallbackManagerForLLMRun(
|
|
403
|
+
return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
|
|
402
404
|
}));
|
|
403
405
|
}
|
|
404
406
|
async handleChainStart(chain, inputs, runId = uuidv4(), runType = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
|
|
@@ -76,7 +76,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
76
76
|
invocation_params: this?.invocationParams(callOptions),
|
|
77
77
|
batch_size: 1,
|
|
78
78
|
};
|
|
79
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages],
|
|
79
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
80
80
|
let generationChunk;
|
|
81
81
|
try {
|
|
82
82
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
@@ -113,7 +113,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
113
113
|
invocation_params: this?.invocationParams(parsedOptions),
|
|
114
114
|
batch_size: 1,
|
|
115
115
|
};
|
|
116
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages,
|
|
116
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
|
|
117
117
|
// generate results
|
|
118
118
|
const results = await Promise.allSettled(baseMessages.map((messageList, i) => this._generate(messageList, { ...parsedOptions, promptIndex: i }, runManagers?.[i])));
|
|
119
119
|
// handle results
|
|
@@ -172,7 +172,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
172
172
|
batch_size: 1,
|
|
173
173
|
cached: true,
|
|
174
174
|
};
|
|
175
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages,
|
|
175
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
|
|
176
176
|
// generate results
|
|
177
177
|
const missingPromptIndices = [];
|
|
178
178
|
const results = await Promise.allSettled(baseMessages.map(async (baseMessage, index) => {
|
|
@@ -72,7 +72,7 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
72
72
|
invocation_params: this?.invocationParams(callOptions),
|
|
73
73
|
batch_size: 1,
|
|
74
74
|
};
|
|
75
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages],
|
|
75
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
76
76
|
let generationChunk;
|
|
77
77
|
try {
|
|
78
78
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
@@ -109,7 +109,7 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
109
109
|
invocation_params: this?.invocationParams(parsedOptions),
|
|
110
110
|
batch_size: 1,
|
|
111
111
|
};
|
|
112
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages,
|
|
112
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
|
|
113
113
|
// generate results
|
|
114
114
|
const results = await Promise.allSettled(baseMessages.map((messageList, i) => this._generate(messageList, { ...parsedOptions, promptIndex: i }, runManagers?.[i])));
|
|
115
115
|
// handle results
|
|
@@ -168,7 +168,7 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
168
168
|
batch_size: 1,
|
|
169
169
|
cached: true,
|
|
170
170
|
};
|
|
171
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages,
|
|
171
|
+
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
|
|
172
172
|
// generate results
|
|
173
173
|
const missingPromptIndices = [];
|
|
174
174
|
const results = await Promise.allSettled(baseMessages.map(async (baseMessage, index) => {
|
|
@@ -57,7 +57,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
|
|
|
57
57
|
invocation_params: this?.invocationParams(callOptions),
|
|
58
58
|
batch_size: 1,
|
|
59
59
|
};
|
|
60
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()],
|
|
60
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
61
61
|
let generation = new outputs_js_1.GenerationChunk({
|
|
62
62
|
text: "",
|
|
63
63
|
});
|
|
@@ -132,7 +132,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
|
|
|
132
132
|
invocation_params: this?.invocationParams(parsedOptions),
|
|
133
133
|
batch_size: prompts.length,
|
|
134
134
|
};
|
|
135
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts,
|
|
135
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions?.runName);
|
|
136
136
|
let output;
|
|
137
137
|
try {
|
|
138
138
|
output = await this._generate(prompts, parsedOptions, runManagers?.[0]);
|
|
@@ -153,7 +153,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
|
|
|
153
153
|
});
|
|
154
154
|
return output;
|
|
155
155
|
}
|
|
156
|
-
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }) {
|
|
156
|
+
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }) {
|
|
157
157
|
const callbackManager_ = await manager_js_1.CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
|
|
158
158
|
const extra = {
|
|
159
159
|
options: parsedOptions,
|
|
@@ -161,7 +161,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
|
|
|
161
161
|
batch_size: prompts.length,
|
|
162
162
|
cached: true,
|
|
163
163
|
};
|
|
164
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts,
|
|
164
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, undefined, extra, undefined, undefined, handledOptions?.runName);
|
|
165
165
|
// generate results
|
|
166
166
|
const missingPromptIndices = [];
|
|
167
167
|
const results = await Promise.allSettled(prompts.map(async (prompt, index) => {
|
|
@@ -238,6 +238,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
|
|
|
238
238
|
llmStringKey,
|
|
239
239
|
parsedOptions: callOptions,
|
|
240
240
|
handledOptions: runnableConfig,
|
|
241
|
+
runId: runnableConfig.runId,
|
|
241
242
|
});
|
|
242
243
|
let llmOutput = {};
|
|
243
244
|
if (missingPromptIndices.length > 0) {
|
|
@@ -23,6 +23,7 @@ interface LLMGenerateCachedParameters<T extends BaseLLM<CallOptions>, CallOption
|
|
|
23
23
|
llmStringKey: string;
|
|
24
24
|
parsedOptions: T["ParsedCallOptions"];
|
|
25
25
|
handledOptions: RunnableConfig;
|
|
26
|
+
runId?: string;
|
|
26
27
|
}
|
|
27
28
|
/**
|
|
28
29
|
* LLM Wrapper. Takes in a prompt (or prompts) and returns a string.
|
|
@@ -63,7 +64,7 @@ export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = B
|
|
|
63
64
|
_flattenLLMResult(llmResult: LLMResult): LLMResult[];
|
|
64
65
|
/** @ignore */
|
|
65
66
|
_generateUncached(prompts: string[], parsedOptions: this["ParsedCallOptions"], handledOptions: BaseCallbackConfig): Promise<LLMResult>;
|
|
66
|
-
_generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }: LLMGenerateCachedParameters<typeof this>): Promise<LLMResult & {
|
|
67
|
+
_generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }: LLMGenerateCachedParameters<typeof this>): Promise<LLMResult & {
|
|
67
68
|
missingPromptIndices: number[];
|
|
68
69
|
}>;
|
|
69
70
|
/**
|
|
@@ -54,7 +54,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
54
54
|
invocation_params: this?.invocationParams(callOptions),
|
|
55
55
|
batch_size: 1,
|
|
56
56
|
};
|
|
57
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()],
|
|
57
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
58
58
|
let generation = new GenerationChunk({
|
|
59
59
|
text: "",
|
|
60
60
|
});
|
|
@@ -129,7 +129,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
129
129
|
invocation_params: this?.invocationParams(parsedOptions),
|
|
130
130
|
batch_size: prompts.length,
|
|
131
131
|
};
|
|
132
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts,
|
|
132
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions?.runName);
|
|
133
133
|
let output;
|
|
134
134
|
try {
|
|
135
135
|
output = await this._generate(prompts, parsedOptions, runManagers?.[0]);
|
|
@@ -150,7 +150,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
150
150
|
});
|
|
151
151
|
return output;
|
|
152
152
|
}
|
|
153
|
-
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }) {
|
|
153
|
+
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }) {
|
|
154
154
|
const callbackManager_ = await CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
|
|
155
155
|
const extra = {
|
|
156
156
|
options: parsedOptions,
|
|
@@ -158,7 +158,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
158
158
|
batch_size: prompts.length,
|
|
159
159
|
cached: true,
|
|
160
160
|
};
|
|
161
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts,
|
|
161
|
+
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, undefined, extra, undefined, undefined, handledOptions?.runName);
|
|
162
162
|
// generate results
|
|
163
163
|
const missingPromptIndices = [];
|
|
164
164
|
const results = await Promise.allSettled(prompts.map(async (prompt, index) => {
|
|
@@ -235,6 +235,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
235
235
|
llmStringKey,
|
|
236
236
|
parsedOptions: callOptions,
|
|
237
237
|
handledOptions: runnableConfig,
|
|
238
|
+
runId: runnableConfig.runId,
|
|
238
239
|
});
|
|
239
240
|
let llmOutput = {};
|
|
240
241
|
if (missingPromptIndices.length > 0) {
|
package/dist/messages/index.d.ts
CHANGED
|
@@ -30,7 +30,11 @@ export type MessageContentImageUrl = {
|
|
|
30
30
|
detail?: ImageDetail;
|
|
31
31
|
};
|
|
32
32
|
};
|
|
33
|
-
export type MessageContentComplex = MessageContentText | MessageContentImageUrl
|
|
33
|
+
export type MessageContentComplex = MessageContentText | MessageContentImageUrl | (Record<string, any> & {
|
|
34
|
+
type?: "text" | "image_url" | string;
|
|
35
|
+
}) | (Record<string, any> & {
|
|
36
|
+
type?: never;
|
|
37
|
+
});
|
|
34
38
|
export type MessageContent = string | MessageContentComplex[];
|
|
35
39
|
export interface FunctionCall {
|
|
36
40
|
/**
|
|
@@ -62,14 +62,21 @@ class StringOutputParser extends transform_js_1.BaseTransformOutputParser {
|
|
|
62
62
|
_messageContentComplexToString(content) {
|
|
63
63
|
switch (content.type) {
|
|
64
64
|
case "text":
|
|
65
|
-
|
|
65
|
+
if ("text" in content) {
|
|
66
|
+
// Type guard for MessageContentText
|
|
67
|
+
return this._textContentToString(content);
|
|
68
|
+
}
|
|
69
|
+
break;
|
|
66
70
|
case "image_url":
|
|
67
|
-
|
|
71
|
+
if ("image_url" in content) {
|
|
72
|
+
// Type guard for MessageContentImageUrl
|
|
73
|
+
return this._imageUrlContentToString(content);
|
|
74
|
+
}
|
|
75
|
+
break;
|
|
68
76
|
default:
|
|
69
|
-
throw new Error(
|
|
70
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
71
|
-
`Cannot coerce "${content.type}" message part into a string.`);
|
|
77
|
+
throw new Error(`Cannot coerce "${content.type}" message part into a string.`);
|
|
72
78
|
}
|
|
79
|
+
throw new Error(`Invalid content type: ${content.type}`);
|
|
73
80
|
}
|
|
74
81
|
_baseMessageContentToString(content) {
|
|
75
82
|
return content.reduce((acc, item) => acc + this._messageContentComplexToString(item), "");
|
|
@@ -59,14 +59,21 @@ export class StringOutputParser extends BaseTransformOutputParser {
|
|
|
59
59
|
_messageContentComplexToString(content) {
|
|
60
60
|
switch (content.type) {
|
|
61
61
|
case "text":
|
|
62
|
-
|
|
62
|
+
if ("text" in content) {
|
|
63
|
+
// Type guard for MessageContentText
|
|
64
|
+
return this._textContentToString(content);
|
|
65
|
+
}
|
|
66
|
+
break;
|
|
63
67
|
case "image_url":
|
|
64
|
-
|
|
68
|
+
if ("image_url" in content) {
|
|
69
|
+
// Type guard for MessageContentImageUrl
|
|
70
|
+
return this._imageUrlContentToString(content);
|
|
71
|
+
}
|
|
72
|
+
break;
|
|
65
73
|
default:
|
|
66
|
-
throw new Error(
|
|
67
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
68
|
-
`Cannot coerce "${content.type}" message part into a string.`);
|
|
74
|
+
throw new Error(`Cannot coerce "${content.type}" message part into a string.`);
|
|
69
75
|
}
|
|
76
|
+
throw new Error(`Invalid content type: ${content.type}`);
|
|
70
77
|
}
|
|
71
78
|
_baseMessageContentToString(content) {
|
|
72
79
|
return content.reduce((acc, item) => acc + this._messageContentComplexToString(item), "");
|
package/dist/prompts/chat.cjs
CHANGED
|
@@ -463,14 +463,32 @@ function _coerceMessagePromptTemplateLike(messagePromptTemplateLike) {
|
|
|
463
463
|
return new MessagesPlaceholder({ variableName, optional: true });
|
|
464
464
|
}
|
|
465
465
|
const message = (0, index_js_1.coerceMessageLikeToMessage)(messagePromptTemplateLike);
|
|
466
|
+
let templateData;
|
|
467
|
+
if (typeof message.content === "string") {
|
|
468
|
+
templateData = message.content;
|
|
469
|
+
}
|
|
470
|
+
else {
|
|
471
|
+
// Assuming message.content is an array of complex objects, transform it.
|
|
472
|
+
templateData = message.content.map((item) => {
|
|
473
|
+
if ("text" in item) {
|
|
474
|
+
return { text: item.text };
|
|
475
|
+
}
|
|
476
|
+
else if ("image_url" in item) {
|
|
477
|
+
return { image_url: item.image_url };
|
|
478
|
+
}
|
|
479
|
+
else {
|
|
480
|
+
throw new Error("Invalid message content");
|
|
481
|
+
}
|
|
482
|
+
});
|
|
483
|
+
}
|
|
466
484
|
if (message._getType() === "human") {
|
|
467
|
-
return HumanMessagePromptTemplate.fromTemplate(
|
|
485
|
+
return HumanMessagePromptTemplate.fromTemplate(templateData);
|
|
468
486
|
}
|
|
469
487
|
else if (message._getType() === "ai") {
|
|
470
|
-
return AIMessagePromptTemplate.fromTemplate(
|
|
488
|
+
return AIMessagePromptTemplate.fromTemplate(templateData);
|
|
471
489
|
}
|
|
472
490
|
else if (message._getType() === "system") {
|
|
473
|
-
return SystemMessagePromptTemplate.fromTemplate(
|
|
491
|
+
return SystemMessagePromptTemplate.fromTemplate(templateData);
|
|
474
492
|
}
|
|
475
493
|
else if (index_js_1.ChatMessage.isInstance(message)) {
|
|
476
494
|
return ChatMessagePromptTemplate.fromTemplate(message.content, message.role);
|
package/dist/prompts/chat.js
CHANGED
|
@@ -452,14 +452,32 @@ function _coerceMessagePromptTemplateLike(messagePromptTemplateLike) {
|
|
|
452
452
|
return new MessagesPlaceholder({ variableName, optional: true });
|
|
453
453
|
}
|
|
454
454
|
const message = coerceMessageLikeToMessage(messagePromptTemplateLike);
|
|
455
|
+
let templateData;
|
|
456
|
+
if (typeof message.content === "string") {
|
|
457
|
+
templateData = message.content;
|
|
458
|
+
}
|
|
459
|
+
else {
|
|
460
|
+
// Assuming message.content is an array of complex objects, transform it.
|
|
461
|
+
templateData = message.content.map((item) => {
|
|
462
|
+
if ("text" in item) {
|
|
463
|
+
return { text: item.text };
|
|
464
|
+
}
|
|
465
|
+
else if ("image_url" in item) {
|
|
466
|
+
return { image_url: item.image_url };
|
|
467
|
+
}
|
|
468
|
+
else {
|
|
469
|
+
throw new Error("Invalid message content");
|
|
470
|
+
}
|
|
471
|
+
});
|
|
472
|
+
}
|
|
455
473
|
if (message._getType() === "human") {
|
|
456
|
-
return HumanMessagePromptTemplate.fromTemplate(
|
|
474
|
+
return HumanMessagePromptTemplate.fromTemplate(templateData);
|
|
457
475
|
}
|
|
458
476
|
else if (message._getType() === "ai") {
|
|
459
|
-
return AIMessagePromptTemplate.fromTemplate(
|
|
477
|
+
return AIMessagePromptTemplate.fromTemplate(templateData);
|
|
460
478
|
}
|
|
461
479
|
else if (message._getType() === "system") {
|
|
462
|
-
return SystemMessagePromptTemplate.fromTemplate(
|
|
480
|
+
return SystemMessagePromptTemplate.fromTemplate(templateData);
|
|
463
481
|
}
|
|
464
482
|
else if (ChatMessage.isInstance(message)) {
|
|
465
483
|
return ChatMessagePromptTemplate.fromTemplate(message.content, message.role);
|
|
@@ -65,7 +65,7 @@ class BaseRetriever extends base_js_1.Runnable {
|
|
|
65
65
|
async getRelevantDocuments(query, config) {
|
|
66
66
|
const parsedConfig = (0, config_js_1.ensureConfig)((0, manager_js_1.parseCallbackConfigArg)(config));
|
|
67
67
|
const callbackManager_ = await manager_js_1.CallbackManager.configure(parsedConfig.callbacks, this.callbacks, parsedConfig.tags, this.tags, parsedConfig.metadata, this.metadata, { verbose: this.verbose });
|
|
68
|
-
const runManager = await callbackManager_?.handleRetrieverStart(this.toJSON(), query,
|
|
68
|
+
const runManager = await callbackManager_?.handleRetrieverStart(this.toJSON(), query, parsedConfig.runId, undefined, undefined, undefined, parsedConfig.runName);
|
|
69
69
|
try {
|
|
70
70
|
const results = await this._getRelevantDocuments(query, runManager);
|
|
71
71
|
await runManager?.handleRetrieverEnd(results);
|
package/dist/retrievers/index.js
CHANGED
|
@@ -62,7 +62,7 @@ export class BaseRetriever extends Runnable {
|
|
|
62
62
|
async getRelevantDocuments(query, config) {
|
|
63
63
|
const parsedConfig = ensureConfig(parseCallbackConfigArg(config));
|
|
64
64
|
const callbackManager_ = await CallbackManager.configure(parsedConfig.callbacks, this.callbacks, parsedConfig.tags, this.tags, parsedConfig.metadata, this.metadata, { verbose: this.verbose });
|
|
65
|
-
const runManager = await callbackManager_?.handleRetrieverStart(this.toJSON(), query,
|
|
65
|
+
const runManager = await callbackManager_?.handleRetrieverStart(this.toJSON(), query, parsedConfig.runId, undefined, undefined, undefined, parsedConfig.runName);
|
|
66
66
|
try {
|
|
67
67
|
const results = await this._getRelevantDocuments(query, runManager);
|
|
68
68
|
await runManager?.handleRetrieverEnd(results);
|
package/dist/runnables/base.cjs
CHANGED
|
@@ -112,12 +112,17 @@ class Runnable extends serializable_js_1.Serializable {
|
|
|
112
112
|
});
|
|
113
113
|
}
|
|
114
114
|
_getOptionsList(options, length = 0) {
|
|
115
|
+
if (Array.isArray(options) && options.length !== length) {
|
|
116
|
+
throw new Error(`Passed "options" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`);
|
|
117
|
+
}
|
|
115
118
|
if (Array.isArray(options)) {
|
|
116
|
-
if (options.length !== length) {
|
|
117
|
-
throw new Error(`Passed "options" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`);
|
|
118
|
-
}
|
|
119
119
|
return options.map(config_js_1.ensureConfig);
|
|
120
120
|
}
|
|
121
|
+
if (length > 1 && !Array.isArray(options) && options.runId) {
|
|
122
|
+
console.warn("Provided runId will be used only for the first element of the batch.");
|
|
123
|
+
const subsequent = Object.fromEntries(Object.entries(options).filter(([key]) => key !== "runId"));
|
|
124
|
+
return Array.from({ length }, (_, i) => (0, config_js_1.ensureConfig)(i === 0 ? options : subsequent));
|
|
125
|
+
}
|
|
121
126
|
return Array.from({ length }, () => (0, config_js_1.ensureConfig)(options));
|
|
122
127
|
}
|
|
123
128
|
async batch(inputs, options, batchOptions) {
|
|
@@ -179,6 +184,7 @@ class Runnable extends serializable_js_1.Serializable {
|
|
|
179
184
|
configurable: options.configurable,
|
|
180
185
|
recursionLimit: options.recursionLimit,
|
|
181
186
|
maxConcurrency: options.maxConcurrency,
|
|
187
|
+
runId: options.runId,
|
|
182
188
|
});
|
|
183
189
|
}
|
|
184
190
|
const callOptions = { ...options };
|
|
@@ -189,12 +195,14 @@ class Runnable extends serializable_js_1.Serializable {
|
|
|
189
195
|
delete callOptions.configurable;
|
|
190
196
|
delete callOptions.recursionLimit;
|
|
191
197
|
delete callOptions.maxConcurrency;
|
|
198
|
+
delete callOptions.runId;
|
|
192
199
|
return [runnableConfig, callOptions];
|
|
193
200
|
}
|
|
194
201
|
async _callWithConfig(func, input, options) {
|
|
195
202
|
const config = (0, config_js_1.ensureConfig)(options);
|
|
196
203
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(config);
|
|
197
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"),
|
|
204
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, config?.runType, undefined, undefined, config?.runName ?? this.getName());
|
|
205
|
+
delete config.runId;
|
|
198
206
|
let output;
|
|
199
207
|
try {
|
|
200
208
|
output = await func.call(this, input, config, runManager);
|
|
@@ -218,7 +226,11 @@ class Runnable extends serializable_js_1.Serializable {
|
|
|
218
226
|
async _batchWithConfig(func, inputs, options, batchOptions) {
|
|
219
227
|
const optionsList = this._getOptionsList(options ?? {}, inputs.length);
|
|
220
228
|
const callbackManagers = await Promise.all(optionsList.map(config_js_1.getCallbackManagerForConfig));
|
|
221
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
229
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
230
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), optionsList[i].runId, optionsList[i].runType, undefined, undefined, optionsList[i].runName ?? this.getName());
|
|
231
|
+
delete optionsList[i].runId;
|
|
232
|
+
return handleStartRes;
|
|
233
|
+
}));
|
|
222
234
|
let outputs;
|
|
223
235
|
try {
|
|
224
236
|
outputs = await func.call(this, inputs, optionsList, runManagers, batchOptions);
|
|
@@ -264,7 +276,8 @@ class Runnable extends serializable_js_1.Serializable {
|
|
|
264
276
|
}
|
|
265
277
|
let runManager;
|
|
266
278
|
try {
|
|
267
|
-
const pipe = await (0, stream_js_1.pipeGeneratorWithSetup)(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" },
|
|
279
|
+
const pipe = await (0, stream_js_1.pipeGeneratorWithSetup)(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), config);
|
|
280
|
+
delete config.runId;
|
|
268
281
|
runManager = pipe.setup;
|
|
269
282
|
const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer";
|
|
270
283
|
const streamLogHandler = runManager?.handlers.find(isLogStreamHandler);
|
|
@@ -1042,7 +1055,8 @@ class RunnableSequence extends Runnable {
|
|
|
1042
1055
|
async invoke(input, options) {
|
|
1043
1056
|
const config = (0, config_js_1.ensureConfig)(options);
|
|
1044
1057
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(config);
|
|
1045
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"),
|
|
1058
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, undefined, undefined, undefined, config?.runName);
|
|
1059
|
+
delete config.runId;
|
|
1046
1060
|
let nextStepInput = input;
|
|
1047
1061
|
let finalOutput;
|
|
1048
1062
|
try {
|
|
@@ -1068,7 +1082,11 @@ class RunnableSequence extends Runnable {
|
|
|
1068
1082
|
async batch(inputs, options, batchOptions) {
|
|
1069
1083
|
const configList = this._getOptionsList(options ?? {}, inputs.length);
|
|
1070
1084
|
const callbackManagers = await Promise.all(configList.map(config_js_1.getCallbackManagerForConfig));
|
|
1071
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
1085
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
1086
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, undefined, undefined, undefined, configList[i].runName);
|
|
1087
|
+
delete configList[i].runId;
|
|
1088
|
+
return handleStartRes;
|
|
1089
|
+
}));
|
|
1072
1090
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1073
1091
|
let nextStepInputs = inputs;
|
|
1074
1092
|
try {
|
|
@@ -1089,7 +1107,8 @@ class RunnableSequence extends Runnable {
|
|
|
1089
1107
|
}
|
|
1090
1108
|
async *_streamIterator(input, options) {
|
|
1091
1109
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(options);
|
|
1092
|
-
const
|
|
1110
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1111
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1093
1112
|
const steps = [this.first, ...this.middle, this.last];
|
|
1094
1113
|
let concatSupported = true;
|
|
1095
1114
|
let finalOutput;
|
|
@@ -1097,12 +1116,12 @@ class RunnableSequence extends Runnable {
|
|
|
1097
1116
|
yield input;
|
|
1098
1117
|
}
|
|
1099
1118
|
try {
|
|
1100
|
-
let finalGenerator = steps[0].transform(inputGenerator(), (0, config_js_1.patchConfig)(
|
|
1119
|
+
let finalGenerator = steps[0].transform(inputGenerator(), (0, config_js_1.patchConfig)(otherOptions, {
|
|
1101
1120
|
callbacks: runManager?.getChild(`seq:step:1`),
|
|
1102
1121
|
}));
|
|
1103
1122
|
for (let i = 1; i < steps.length; i += 1) {
|
|
1104
1123
|
const step = steps[i];
|
|
1105
|
-
finalGenerator = await step.transform(finalGenerator, (0, config_js_1.patchConfig)(
|
|
1124
|
+
finalGenerator = await step.transform(finalGenerator, (0, config_js_1.patchConfig)(otherOptions, {
|
|
1106
1125
|
callbacks: runManager?.getChild(`seq:step:${i + 1}`),
|
|
1107
1126
|
}));
|
|
1108
1127
|
}
|
|
@@ -1248,7 +1267,8 @@ class RunnableMap extends Runnable {
|
|
|
1248
1267
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(config);
|
|
1249
1268
|
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), {
|
|
1250
1269
|
input,
|
|
1251
|
-
},
|
|
1270
|
+
}, config.runId, undefined, undefined, undefined, config?.runName);
|
|
1271
|
+
delete config.runId;
|
|
1252
1272
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1253
1273
|
const output = {};
|
|
1254
1274
|
try {
|
|
@@ -1469,11 +1489,12 @@ class RunnableWithFallbacks extends Runnable {
|
|
|
1469
1489
|
}
|
|
1470
1490
|
async invoke(input, options) {
|
|
1471
1491
|
const callbackManager_ = await manager_js_1.CallbackManager.configure(options?.callbacks, undefined, options?.tags, undefined, options?.metadata);
|
|
1472
|
-
const
|
|
1492
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1493
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1473
1494
|
let firstError;
|
|
1474
1495
|
for (const runnable of this.runnables()) {
|
|
1475
1496
|
try {
|
|
1476
|
-
const output = await runnable.invoke(input, (0, config_js_1.patchConfig)(
|
|
1497
|
+
const output = await runnable.invoke(input, (0, config_js_1.patchConfig)(otherOptions, { callbacks: runManager?.getChild() }));
|
|
1477
1498
|
await runManager?.handleChainEnd(_coerceToDict(output, "output"));
|
|
1478
1499
|
return output;
|
|
1479
1500
|
}
|
|
@@ -1495,7 +1516,11 @@ class RunnableWithFallbacks extends Runnable {
|
|
|
1495
1516
|
}
|
|
1496
1517
|
const configList = this._getOptionsList(options ?? {}, inputs.length);
|
|
1497
1518
|
const callbackManagers = await Promise.all(configList.map((config) => manager_js_1.CallbackManager.configure(config?.callbacks, undefined, config?.tags, undefined, config?.metadata)));
|
|
1498
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
1519
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
1520
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, undefined, undefined, undefined, configList[i].runName);
|
|
1521
|
+
delete configList[i].runId;
|
|
1522
|
+
return handleStartRes;
|
|
1523
|
+
}));
|
|
1499
1524
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1500
1525
|
let firstError;
|
|
1501
1526
|
for (const runnable of this.runnables()) {
|
package/dist/runnables/base.js
CHANGED
|
@@ -105,12 +105,17 @@ export class Runnable extends Serializable {
|
|
|
105
105
|
});
|
|
106
106
|
}
|
|
107
107
|
_getOptionsList(options, length = 0) {
|
|
108
|
+
if (Array.isArray(options) && options.length !== length) {
|
|
109
|
+
throw new Error(`Passed "options" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`);
|
|
110
|
+
}
|
|
108
111
|
if (Array.isArray(options)) {
|
|
109
|
-
if (options.length !== length) {
|
|
110
|
-
throw new Error(`Passed "options" must be an array with the same length as the inputs, but got ${options.length} options for ${length} inputs`);
|
|
111
|
-
}
|
|
112
112
|
return options.map(ensureConfig);
|
|
113
113
|
}
|
|
114
|
+
if (length > 1 && !Array.isArray(options) && options.runId) {
|
|
115
|
+
console.warn("Provided runId will be used only for the first element of the batch.");
|
|
116
|
+
const subsequent = Object.fromEntries(Object.entries(options).filter(([key]) => key !== "runId"));
|
|
117
|
+
return Array.from({ length }, (_, i) => ensureConfig(i === 0 ? options : subsequent));
|
|
118
|
+
}
|
|
114
119
|
return Array.from({ length }, () => ensureConfig(options));
|
|
115
120
|
}
|
|
116
121
|
async batch(inputs, options, batchOptions) {
|
|
@@ -172,6 +177,7 @@ export class Runnable extends Serializable {
|
|
|
172
177
|
configurable: options.configurable,
|
|
173
178
|
recursionLimit: options.recursionLimit,
|
|
174
179
|
maxConcurrency: options.maxConcurrency,
|
|
180
|
+
runId: options.runId,
|
|
175
181
|
});
|
|
176
182
|
}
|
|
177
183
|
const callOptions = { ...options };
|
|
@@ -182,12 +188,14 @@ export class Runnable extends Serializable {
|
|
|
182
188
|
delete callOptions.configurable;
|
|
183
189
|
delete callOptions.recursionLimit;
|
|
184
190
|
delete callOptions.maxConcurrency;
|
|
191
|
+
delete callOptions.runId;
|
|
185
192
|
return [runnableConfig, callOptions];
|
|
186
193
|
}
|
|
187
194
|
async _callWithConfig(func, input, options) {
|
|
188
195
|
const config = ensureConfig(options);
|
|
189
196
|
const callbackManager_ = await getCallbackManagerForConfig(config);
|
|
190
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"),
|
|
197
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, config?.runType, undefined, undefined, config?.runName ?? this.getName());
|
|
198
|
+
delete config.runId;
|
|
191
199
|
let output;
|
|
192
200
|
try {
|
|
193
201
|
output = await func.call(this, input, config, runManager);
|
|
@@ -211,7 +219,11 @@ export class Runnable extends Serializable {
|
|
|
211
219
|
async _batchWithConfig(func, inputs, options, batchOptions) {
|
|
212
220
|
const optionsList = this._getOptionsList(options ?? {}, inputs.length);
|
|
213
221
|
const callbackManagers = await Promise.all(optionsList.map(getCallbackManagerForConfig));
|
|
214
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
222
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
223
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), optionsList[i].runId, optionsList[i].runType, undefined, undefined, optionsList[i].runName ?? this.getName());
|
|
224
|
+
delete optionsList[i].runId;
|
|
225
|
+
return handleStartRes;
|
|
226
|
+
}));
|
|
215
227
|
let outputs;
|
|
216
228
|
try {
|
|
217
229
|
outputs = await func.call(this, inputs, optionsList, runManagers, batchOptions);
|
|
@@ -257,7 +269,8 @@ export class Runnable extends Serializable {
|
|
|
257
269
|
}
|
|
258
270
|
let runManager;
|
|
259
271
|
try {
|
|
260
|
-
const pipe = await pipeGeneratorWithSetup(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" },
|
|
272
|
+
const pipe = await pipeGeneratorWithSetup(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), config);
|
|
273
|
+
delete config.runId;
|
|
261
274
|
runManager = pipe.setup;
|
|
262
275
|
const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer";
|
|
263
276
|
const streamLogHandler = runManager?.handlers.find(isLogStreamHandler);
|
|
@@ -1031,7 +1044,8 @@ export class RunnableSequence extends Runnable {
|
|
|
1031
1044
|
async invoke(input, options) {
|
|
1032
1045
|
const config = ensureConfig(options);
|
|
1033
1046
|
const callbackManager_ = await getCallbackManagerForConfig(config);
|
|
1034
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"),
|
|
1047
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, undefined, undefined, undefined, config?.runName);
|
|
1048
|
+
delete config.runId;
|
|
1035
1049
|
let nextStepInput = input;
|
|
1036
1050
|
let finalOutput;
|
|
1037
1051
|
try {
|
|
@@ -1057,7 +1071,11 @@ export class RunnableSequence extends Runnable {
|
|
|
1057
1071
|
async batch(inputs, options, batchOptions) {
|
|
1058
1072
|
const configList = this._getOptionsList(options ?? {}, inputs.length);
|
|
1059
1073
|
const callbackManagers = await Promise.all(configList.map(getCallbackManagerForConfig));
|
|
1060
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
1074
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
1075
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, undefined, undefined, undefined, configList[i].runName);
|
|
1076
|
+
delete configList[i].runId;
|
|
1077
|
+
return handleStartRes;
|
|
1078
|
+
}));
|
|
1061
1079
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1062
1080
|
let nextStepInputs = inputs;
|
|
1063
1081
|
try {
|
|
@@ -1078,7 +1096,8 @@ export class RunnableSequence extends Runnable {
|
|
|
1078
1096
|
}
|
|
1079
1097
|
async *_streamIterator(input, options) {
|
|
1080
1098
|
const callbackManager_ = await getCallbackManagerForConfig(options);
|
|
1081
|
-
const
|
|
1099
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1100
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1082
1101
|
const steps = [this.first, ...this.middle, this.last];
|
|
1083
1102
|
let concatSupported = true;
|
|
1084
1103
|
let finalOutput;
|
|
@@ -1086,12 +1105,12 @@ export class RunnableSequence extends Runnable {
|
|
|
1086
1105
|
yield input;
|
|
1087
1106
|
}
|
|
1088
1107
|
try {
|
|
1089
|
-
let finalGenerator = steps[0].transform(inputGenerator(), patchConfig(
|
|
1108
|
+
let finalGenerator = steps[0].transform(inputGenerator(), patchConfig(otherOptions, {
|
|
1090
1109
|
callbacks: runManager?.getChild(`seq:step:1`),
|
|
1091
1110
|
}));
|
|
1092
1111
|
for (let i = 1; i < steps.length; i += 1) {
|
|
1093
1112
|
const step = steps[i];
|
|
1094
|
-
finalGenerator = await step.transform(finalGenerator, patchConfig(
|
|
1113
|
+
finalGenerator = await step.transform(finalGenerator, patchConfig(otherOptions, {
|
|
1095
1114
|
callbacks: runManager?.getChild(`seq:step:${i + 1}`),
|
|
1096
1115
|
}));
|
|
1097
1116
|
}
|
|
@@ -1236,7 +1255,8 @@ export class RunnableMap extends Runnable {
|
|
|
1236
1255
|
const callbackManager_ = await getCallbackManagerForConfig(config);
|
|
1237
1256
|
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), {
|
|
1238
1257
|
input,
|
|
1239
|
-
},
|
|
1258
|
+
}, config.runId, undefined, undefined, undefined, config?.runName);
|
|
1259
|
+
delete config.runId;
|
|
1240
1260
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1241
1261
|
const output = {};
|
|
1242
1262
|
try {
|
|
@@ -1454,11 +1474,12 @@ export class RunnableWithFallbacks extends Runnable {
|
|
|
1454
1474
|
}
|
|
1455
1475
|
async invoke(input, options) {
|
|
1456
1476
|
const callbackManager_ = await CallbackManager.configure(options?.callbacks, undefined, options?.tags, undefined, options?.metadata);
|
|
1457
|
-
const
|
|
1477
|
+
const { runId, ...otherOptions } = options ?? {};
|
|
1478
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherOptions?.runName);
|
|
1458
1479
|
let firstError;
|
|
1459
1480
|
for (const runnable of this.runnables()) {
|
|
1460
1481
|
try {
|
|
1461
|
-
const output = await runnable.invoke(input, patchConfig(
|
|
1482
|
+
const output = await runnable.invoke(input, patchConfig(otherOptions, { callbacks: runManager?.getChild() }));
|
|
1462
1483
|
await runManager?.handleChainEnd(_coerceToDict(output, "output"));
|
|
1463
1484
|
return output;
|
|
1464
1485
|
}
|
|
@@ -1480,7 +1501,11 @@ export class RunnableWithFallbacks extends Runnable {
|
|
|
1480
1501
|
}
|
|
1481
1502
|
const configList = this._getOptionsList(options ?? {}, inputs.length);
|
|
1482
1503
|
const callbackManagers = await Promise.all(configList.map((config) => CallbackManager.configure(config?.callbacks, undefined, config?.tags, undefined, config?.metadata)));
|
|
1483
|
-
const runManagers = await Promise.all(callbackManagers.map((callbackManager, i) =>
|
|
1504
|
+
const runManagers = await Promise.all(callbackManagers.map(async (callbackManager, i) => {
|
|
1505
|
+
const handleStartRes = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(inputs[i], "input"), configList[i].runId, undefined, undefined, undefined, configList[i].runName);
|
|
1506
|
+
delete configList[i].runId;
|
|
1507
|
+
return handleStartRes;
|
|
1508
|
+
}));
|
|
1484
1509
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1485
1510
|
let firstError;
|
|
1486
1511
|
for (const runnable of this.runnables()) {
|
|
@@ -140,7 +140,7 @@ class RunnableBranch extends base_js_1.Runnable {
|
|
|
140
140
|
}
|
|
141
141
|
async *_streamIterator(input, config) {
|
|
142
142
|
const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(config);
|
|
143
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"),
|
|
143
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), config?.runId, undefined, undefined, undefined, config?.runName);
|
|
144
144
|
let finalOutput;
|
|
145
145
|
let finalOutputSupported = true;
|
|
146
146
|
let stream;
|
package/dist/runnables/branch.js
CHANGED
|
@@ -137,7 +137,7 @@ export class RunnableBranch extends Runnable {
|
|
|
137
137
|
}
|
|
138
138
|
async *_streamIterator(input, config) {
|
|
139
139
|
const callbackManager_ = await getCallbackManagerForConfig(config);
|
|
140
|
-
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"),
|
|
140
|
+
const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config?.runId, undefined, undefined, undefined, config?.runName);
|
|
141
141
|
let finalOutput;
|
|
142
142
|
let finalOutputSupported = true;
|
|
143
143
|
let stream;
|
|
@@ -95,6 +95,7 @@ function ensureConfig(config) {
|
|
|
95
95
|
metadata: {},
|
|
96
96
|
callbacks: undefined,
|
|
97
97
|
recursionLimit: 25,
|
|
98
|
+
runId: undefined,
|
|
98
99
|
};
|
|
99
100
|
if (loadedConfig) {
|
|
100
101
|
empty = { ...empty, ...loadedConfig };
|
|
@@ -116,7 +117,7 @@ exports.ensureConfig = ensureConfig;
|
|
|
116
117
|
/**
|
|
117
118
|
* Helper function that patches runnable configs with updated properties.
|
|
118
119
|
*/
|
|
119
|
-
function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, runName, configurable, } = {}) {
|
|
120
|
+
function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, runName, configurable, runId, } = {}) {
|
|
120
121
|
const newConfig = ensureConfig(config);
|
|
121
122
|
if (callbacks !== undefined) {
|
|
122
123
|
/**
|
|
@@ -138,6 +139,9 @@ function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, r
|
|
|
138
139
|
if (configurable !== undefined) {
|
|
139
140
|
newConfig.configurable = { ...newConfig.configurable, ...configurable };
|
|
140
141
|
}
|
|
142
|
+
if (runId !== undefined) {
|
|
143
|
+
delete newConfig.runId;
|
|
144
|
+
}
|
|
141
145
|
return newConfig;
|
|
142
146
|
}
|
|
143
147
|
exports.patchConfig = patchConfig;
|
|
@@ -25,4 +25,4 @@ export declare function ensureConfig<CallOptions extends RunnableConfig>(config?
|
|
|
25
25
|
/**
|
|
26
26
|
* Helper function that patches runnable configs with updated properties.
|
|
27
27
|
*/
|
|
28
|
-
export declare function patchConfig<CallOptions extends RunnableConfig>(config?: Partial<CallOptions>, { callbacks, maxConcurrency, recursionLimit, runName, configurable, }?: RunnableConfig): Partial<CallOptions>;
|
|
28
|
+
export declare function patchConfig<CallOptions extends RunnableConfig>(config?: Partial<CallOptions>, { callbacks, maxConcurrency, recursionLimit, runName, configurable, runId, }?: RunnableConfig): Partial<CallOptions>;
|
package/dist/runnables/config.js
CHANGED
|
@@ -90,6 +90,7 @@ export function ensureConfig(config) {
|
|
|
90
90
|
metadata: {},
|
|
91
91
|
callbacks: undefined,
|
|
92
92
|
recursionLimit: 25,
|
|
93
|
+
runId: undefined,
|
|
93
94
|
};
|
|
94
95
|
if (loadedConfig) {
|
|
95
96
|
empty = { ...empty, ...loadedConfig };
|
|
@@ -110,7 +111,7 @@ export function ensureConfig(config) {
|
|
|
110
111
|
/**
|
|
111
112
|
* Helper function that patches runnable configs with updated properties.
|
|
112
113
|
*/
|
|
113
|
-
export function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, runName, configurable, } = {}) {
|
|
114
|
+
export function patchConfig(config = {}, { callbacks, maxConcurrency, recursionLimit, runName, configurable, runId, } = {}) {
|
|
114
115
|
const newConfig = ensureConfig(config);
|
|
115
116
|
if (callbacks !== undefined) {
|
|
116
117
|
/**
|
|
@@ -132,5 +133,8 @@ export function patchConfig(config = {}, { callbacks, maxConcurrency, recursionL
|
|
|
132
133
|
if (configurable !== undefined) {
|
|
133
134
|
newConfig.configurable = { ...newConfig.configurable, ...configurable };
|
|
134
135
|
}
|
|
136
|
+
if (runId !== undefined) {
|
|
137
|
+
delete newConfig.runId;
|
|
138
|
+
}
|
|
135
139
|
return newConfig;
|
|
136
140
|
}
|
package/dist/tools.cjs
CHANGED
|
@@ -69,7 +69,8 @@ class StructuredTool extends base_js_1.BaseLangChain {
|
|
|
69
69
|
}
|
|
70
70
|
const config = (0, manager_js_1.parseCallbackConfigArg)(configArg);
|
|
71
71
|
const callbackManager_ = await manager_js_1.CallbackManager.configure(config.callbacks, this.callbacks, config.tags || tags, this.tags, config.metadata, this.metadata, { verbose: this.verbose });
|
|
72
|
-
const runManager = await callbackManager_?.handleToolStart(this.toJSON(), typeof parsed === "string" ? parsed : JSON.stringify(parsed),
|
|
72
|
+
const runManager = await callbackManager_?.handleToolStart(this.toJSON(), typeof parsed === "string" ? parsed : JSON.stringify(parsed), config.runId, undefined, undefined, undefined, config.runName);
|
|
73
|
+
delete config.runId;
|
|
73
74
|
let result;
|
|
74
75
|
try {
|
|
75
76
|
result = await this._call(parsed, runManager, config);
|
package/dist/tools.js
CHANGED
|
@@ -65,7 +65,8 @@ export class StructuredTool extends BaseLangChain {
|
|
|
65
65
|
}
|
|
66
66
|
const config = parseCallbackConfigArg(configArg);
|
|
67
67
|
const callbackManager_ = await CallbackManager.configure(config.callbacks, this.callbacks, config.tags || tags, this.tags, config.metadata, this.metadata, { verbose: this.verbose });
|
|
68
|
-
const runManager = await callbackManager_?.handleToolStart(this.toJSON(), typeof parsed === "string" ? parsed : JSON.stringify(parsed),
|
|
68
|
+
const runManager = await callbackManager_?.handleToolStart(this.toJSON(), typeof parsed === "string" ? parsed : JSON.stringify(parsed), config.runId, undefined, undefined, undefined, config.runName);
|
|
69
|
+
delete config.runId;
|
|
69
70
|
let result;
|
|
70
71
|
try {
|
|
71
72
|
result = await this._call(parsed, runManager, config);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SyntheticEmbeddings = exports.FakeEmbeddings = exports.FakeTool = exports.FakeTracer = exports.FakeListChatMessageHistory = exports.FakeChatMessageHistory = exports.FakeListChatModel = exports.FakeRetriever = exports.FakeChatModel = exports.FakeStreamingLLM = exports.FakeLLM = exports.FakeRunnable = exports.FakeSplitIntoListParser = void 0;
|
|
3
|
+
exports.SingleRunExtractor = exports.SyntheticEmbeddings = exports.FakeEmbeddings = exports.FakeTool = exports.FakeTracer = exports.FakeListChatMessageHistory = exports.FakeChatMessageHistory = exports.FakeListChatModel = exports.FakeRetriever = exports.FakeStreamingChatModel = exports.FakeChatModel = exports.FakeStreamingLLM = exports.FakeLLM = exports.FakeRunnable = exports.FakeSplitIntoListParser = void 0;
|
|
4
4
|
const chat_history_js_1 = require("../../chat_history.cjs");
|
|
5
5
|
const document_js_1 = require("../../documents/document.cjs");
|
|
6
6
|
const chat_models_js_1 = require("../../language_models/chat_models.cjs");
|
|
@@ -172,6 +172,79 @@ class FakeChatModel extends chat_models_js_1.BaseChatModel {
|
|
|
172
172
|
}
|
|
173
173
|
}
|
|
174
174
|
exports.FakeChatModel = FakeChatModel;
|
|
175
|
+
class FakeStreamingChatModel extends chat_models_js_1.BaseChatModel {
|
|
176
|
+
constructor(fields) {
|
|
177
|
+
super(fields);
|
|
178
|
+
Object.defineProperty(this, "sleep", {
|
|
179
|
+
enumerable: true,
|
|
180
|
+
configurable: true,
|
|
181
|
+
writable: true,
|
|
182
|
+
value: 50
|
|
183
|
+
});
|
|
184
|
+
Object.defineProperty(this, "responses", {
|
|
185
|
+
enumerable: true,
|
|
186
|
+
configurable: true,
|
|
187
|
+
writable: true,
|
|
188
|
+
value: void 0
|
|
189
|
+
});
|
|
190
|
+
Object.defineProperty(this, "thrownErrorString", {
|
|
191
|
+
enumerable: true,
|
|
192
|
+
configurable: true,
|
|
193
|
+
writable: true,
|
|
194
|
+
value: void 0
|
|
195
|
+
});
|
|
196
|
+
this.sleep = fields.sleep ?? this.sleep;
|
|
197
|
+
this.responses = fields.responses;
|
|
198
|
+
this.thrownErrorString = fields.thrownErrorString;
|
|
199
|
+
}
|
|
200
|
+
_llmType() {
|
|
201
|
+
return "fake";
|
|
202
|
+
}
|
|
203
|
+
async _generate(messages, _options, _runManager) {
|
|
204
|
+
if (this.thrownErrorString) {
|
|
205
|
+
throw new Error(this.thrownErrorString);
|
|
206
|
+
}
|
|
207
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
208
|
+
const generation = {
|
|
209
|
+
generations: [
|
|
210
|
+
{
|
|
211
|
+
text: "",
|
|
212
|
+
message: new index_js_1.AIMessage({
|
|
213
|
+
content,
|
|
214
|
+
}),
|
|
215
|
+
},
|
|
216
|
+
],
|
|
217
|
+
};
|
|
218
|
+
return generation;
|
|
219
|
+
}
|
|
220
|
+
async *_streamResponseChunks(messages, _options, _runManager) {
|
|
221
|
+
if (this.thrownErrorString) {
|
|
222
|
+
throw new Error(this.thrownErrorString);
|
|
223
|
+
}
|
|
224
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
225
|
+
if (typeof content !== "string") {
|
|
226
|
+
for (const _ of this.responses ?? messages) {
|
|
227
|
+
yield new outputs_js_1.ChatGenerationChunk({
|
|
228
|
+
text: "",
|
|
229
|
+
message: new index_js_1.AIMessageChunk({
|
|
230
|
+
content,
|
|
231
|
+
}),
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
else {
|
|
236
|
+
for (const _ of this.responses ?? messages) {
|
|
237
|
+
yield new outputs_js_1.ChatGenerationChunk({
|
|
238
|
+
text: content,
|
|
239
|
+
message: new index_js_1.AIMessageChunk({
|
|
240
|
+
content,
|
|
241
|
+
}),
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
exports.FakeStreamingChatModel = FakeStreamingChatModel;
|
|
175
248
|
class FakeRetriever extends index_js_2.BaseRetriever {
|
|
176
249
|
constructor(fields) {
|
|
177
250
|
super();
|
|
@@ -518,3 +591,37 @@ class SyntheticEmbeddings extends embeddings_js_1.Embeddings {
|
|
|
518
591
|
}
|
|
519
592
|
}
|
|
520
593
|
exports.SyntheticEmbeddings = SyntheticEmbeddings;
|
|
594
|
+
class SingleRunExtractor extends base_js_3.BaseTracer {
|
|
595
|
+
constructor() {
|
|
596
|
+
super();
|
|
597
|
+
Object.defineProperty(this, "runPromiseResolver", {
|
|
598
|
+
enumerable: true,
|
|
599
|
+
configurable: true,
|
|
600
|
+
writable: true,
|
|
601
|
+
value: void 0
|
|
602
|
+
});
|
|
603
|
+
Object.defineProperty(this, "runPromise", {
|
|
604
|
+
enumerable: true,
|
|
605
|
+
configurable: true,
|
|
606
|
+
writable: true,
|
|
607
|
+
value: void 0
|
|
608
|
+
});
|
|
609
|
+
/** The name of the callback handler. */
|
|
610
|
+
Object.defineProperty(this, "name", {
|
|
611
|
+
enumerable: true,
|
|
612
|
+
configurable: true,
|
|
613
|
+
writable: true,
|
|
614
|
+
value: "single_run_extractor"
|
|
615
|
+
});
|
|
616
|
+
this.runPromise = new Promise((extract) => {
|
|
617
|
+
this.runPromiseResolver = extract;
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
async persistRun(run) {
|
|
621
|
+
this.runPromiseResolver(run);
|
|
622
|
+
}
|
|
623
|
+
async extract() {
|
|
624
|
+
return this.runPromise;
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
exports.SingleRunExtractor = SingleRunExtractor;
|
|
@@ -58,6 +58,19 @@ export declare class FakeChatModel extends BaseChatModel {
|
|
|
58
58
|
_llmType(): string;
|
|
59
59
|
_generate(messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
60
60
|
}
|
|
61
|
+
export declare class FakeStreamingChatModel extends BaseChatModel {
|
|
62
|
+
sleep?: number;
|
|
63
|
+
responses?: BaseMessage[];
|
|
64
|
+
thrownErrorString?: string;
|
|
65
|
+
constructor(fields: {
|
|
66
|
+
sleep?: number;
|
|
67
|
+
responses?: BaseMessage[];
|
|
68
|
+
thrownErrorString?: string;
|
|
69
|
+
} & BaseLLMParams);
|
|
70
|
+
_llmType(): string;
|
|
71
|
+
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
72
|
+
_streamResponseChunks(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
73
|
+
}
|
|
61
74
|
export declare class FakeRetriever extends BaseRetriever {
|
|
62
75
|
lc_namespace: string[];
|
|
63
76
|
output: Document<Record<string, any>>[];
|
|
@@ -206,4 +219,13 @@ export declare class SyntheticEmbeddings extends Embeddings implements Synthetic
|
|
|
206
219
|
*/
|
|
207
220
|
embedQuery(document: string): Promise<number[]>;
|
|
208
221
|
}
|
|
222
|
+
export declare class SingleRunExtractor extends BaseTracer {
|
|
223
|
+
runPromiseResolver: (run: Run) => void;
|
|
224
|
+
runPromise: Promise<Run>;
|
|
225
|
+
/** The name of the callback handler. */
|
|
226
|
+
name: string;
|
|
227
|
+
constructor();
|
|
228
|
+
persistRun(run: Run): Promise<void>;
|
|
229
|
+
extract(): Promise<Run>;
|
|
230
|
+
}
|
|
209
231
|
export {};
|
|
@@ -164,6 +164,78 @@ export class FakeChatModel extends BaseChatModel {
|
|
|
164
164
|
};
|
|
165
165
|
}
|
|
166
166
|
}
|
|
167
|
+
export class FakeStreamingChatModel extends BaseChatModel {
|
|
168
|
+
constructor(fields) {
|
|
169
|
+
super(fields);
|
|
170
|
+
Object.defineProperty(this, "sleep", {
|
|
171
|
+
enumerable: true,
|
|
172
|
+
configurable: true,
|
|
173
|
+
writable: true,
|
|
174
|
+
value: 50
|
|
175
|
+
});
|
|
176
|
+
Object.defineProperty(this, "responses", {
|
|
177
|
+
enumerable: true,
|
|
178
|
+
configurable: true,
|
|
179
|
+
writable: true,
|
|
180
|
+
value: void 0
|
|
181
|
+
});
|
|
182
|
+
Object.defineProperty(this, "thrownErrorString", {
|
|
183
|
+
enumerable: true,
|
|
184
|
+
configurable: true,
|
|
185
|
+
writable: true,
|
|
186
|
+
value: void 0
|
|
187
|
+
});
|
|
188
|
+
this.sleep = fields.sleep ?? this.sleep;
|
|
189
|
+
this.responses = fields.responses;
|
|
190
|
+
this.thrownErrorString = fields.thrownErrorString;
|
|
191
|
+
}
|
|
192
|
+
_llmType() {
|
|
193
|
+
return "fake";
|
|
194
|
+
}
|
|
195
|
+
async _generate(messages, _options, _runManager) {
|
|
196
|
+
if (this.thrownErrorString) {
|
|
197
|
+
throw new Error(this.thrownErrorString);
|
|
198
|
+
}
|
|
199
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
200
|
+
const generation = {
|
|
201
|
+
generations: [
|
|
202
|
+
{
|
|
203
|
+
text: "",
|
|
204
|
+
message: new AIMessage({
|
|
205
|
+
content,
|
|
206
|
+
}),
|
|
207
|
+
},
|
|
208
|
+
],
|
|
209
|
+
};
|
|
210
|
+
return generation;
|
|
211
|
+
}
|
|
212
|
+
async *_streamResponseChunks(messages, _options, _runManager) {
|
|
213
|
+
if (this.thrownErrorString) {
|
|
214
|
+
throw new Error(this.thrownErrorString);
|
|
215
|
+
}
|
|
216
|
+
const content = this.responses?.[0].content ?? messages[0].content;
|
|
217
|
+
if (typeof content !== "string") {
|
|
218
|
+
for (const _ of this.responses ?? messages) {
|
|
219
|
+
yield new ChatGenerationChunk({
|
|
220
|
+
text: "",
|
|
221
|
+
message: new AIMessageChunk({
|
|
222
|
+
content,
|
|
223
|
+
}),
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
else {
|
|
228
|
+
for (const _ of this.responses ?? messages) {
|
|
229
|
+
yield new ChatGenerationChunk({
|
|
230
|
+
text: content,
|
|
231
|
+
message: new AIMessageChunk({
|
|
232
|
+
content,
|
|
233
|
+
}),
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
167
239
|
export class FakeRetriever extends BaseRetriever {
|
|
168
240
|
constructor(fields) {
|
|
169
241
|
super();
|
|
@@ -502,3 +574,36 @@ export class SyntheticEmbeddings extends Embeddings {
|
|
|
502
574
|
return ret;
|
|
503
575
|
}
|
|
504
576
|
}
|
|
577
|
+
export class SingleRunExtractor extends BaseTracer {
|
|
578
|
+
constructor() {
|
|
579
|
+
super();
|
|
580
|
+
Object.defineProperty(this, "runPromiseResolver", {
|
|
581
|
+
enumerable: true,
|
|
582
|
+
configurable: true,
|
|
583
|
+
writable: true,
|
|
584
|
+
value: void 0
|
|
585
|
+
});
|
|
586
|
+
Object.defineProperty(this, "runPromise", {
|
|
587
|
+
enumerable: true,
|
|
588
|
+
configurable: true,
|
|
589
|
+
writable: true,
|
|
590
|
+
value: void 0
|
|
591
|
+
});
|
|
592
|
+
/** The name of the callback handler. */
|
|
593
|
+
Object.defineProperty(this, "name", {
|
|
594
|
+
enumerable: true,
|
|
595
|
+
configurable: true,
|
|
596
|
+
writable: true,
|
|
597
|
+
value: "single_run_extractor"
|
|
598
|
+
});
|
|
599
|
+
this.runPromise = new Promise((extract) => {
|
|
600
|
+
this.runPromiseResolver = extract;
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
async persistRun(run) {
|
|
604
|
+
this.runPromiseResolver(run);
|
|
605
|
+
}
|
|
606
|
+
async extract() {
|
|
607
|
+
return this.runPromise;
|
|
608
|
+
}
|
|
609
|
+
}
|