@langchain/core 0.2.18-rc.0 → 0.2.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/callbacks/manager.cjs +94 -19
- package/dist/callbacks/manager.d.ts +3 -1
- package/dist/callbacks/manager.js +94 -19
- package/dist/messages/base.cjs +16 -1
- package/dist/messages/base.d.ts +9 -0
- package/dist/messages/base.js +14 -0
- package/dist/messages/tool.cjs +23 -0
- package/dist/messages/tool.d.ts +15 -0
- package/dist/messages/tool.js +24 -1
- package/dist/messages/utils.cjs +1 -1
- package/dist/messages/utils.js +1 -1
- package/dist/output_parsers/string.cjs +1 -0
- package/dist/output_parsers/string.js +1 -0
- package/dist/runnables/base.cjs +1 -3
- package/dist/runnables/base.js +1 -3
- package/dist/runnables/remote.cjs +3 -1
- package/dist/runnables/remote.js +3 -1
- package/dist/tools/index.cjs +15 -6
- package/dist/tools/index.d.ts +21 -12
- package/dist/tools/index.js +15 -6
- package/dist/utils/testing/index.cjs +161 -3
- package/dist/utils/testing/index.d.ts +94 -3
- package/dist/utils/testing/index.js +160 -3
- package/package.json +4 -3
- package/dist/caches/tests/in_memory_cache.test.d.ts +0 -1
- package/dist/caches/tests/in_memory_cache.test.js +0 -33
- package/dist/callbacks/tests/callbacks.test.d.ts +0 -1
- package/dist/callbacks/tests/callbacks.test.js +0 -495
- package/dist/callbacks/tests/manager.int.test.d.ts +0 -1
- package/dist/callbacks/tests/manager.int.test.js +0 -29
- package/dist/callbacks/tests/run_collector.test.d.ts +0 -1
- package/dist/callbacks/tests/run_collector.test.js +0 -58
- package/dist/language_models/tests/chat_models.test.d.ts +0 -1
- package/dist/language_models/tests/chat_models.test.js +0 -204
- package/dist/language_models/tests/count_tokens.test.d.ts +0 -1
- package/dist/language_models/tests/count_tokens.test.js +0 -19
- package/dist/language_models/tests/llms.test.d.ts +0 -1
- package/dist/language_models/tests/llms.test.js +0 -52
- package/dist/messages/tests/base_message.test.d.ts +0 -1
- package/dist/messages/tests/base_message.test.js +0 -245
- package/dist/messages/tests/message_utils.test.d.ts +0 -1
- package/dist/messages/tests/message_utils.test.js +0 -434
- package/dist/output_parsers/openai_tools/tests/json_output_tools_parser.test.d.ts +0 -1
- package/dist/output_parsers/openai_tools/tests/json_output_tools_parser.test.js +0 -81
- package/dist/output_parsers/tests/json.test.d.ts +0 -1
- package/dist/output_parsers/tests/json.test.js +0 -427
- package/dist/output_parsers/tests/output_parser.test.d.ts +0 -1
- package/dist/output_parsers/tests/output_parser.test.js +0 -78
- package/dist/output_parsers/tests/string.test.d.ts +0 -1
- package/dist/output_parsers/tests/string.test.js +0 -68
- package/dist/output_parsers/tests/structured.test.d.ts +0 -1
- package/dist/output_parsers/tests/structured.test.js +0 -166
- package/dist/output_parsers/tests/xml.test.d.ts +0 -1
- package/dist/output_parsers/tests/xml.test.js +0 -81
- package/dist/prompts/tests/chat.mustache.test.d.ts +0 -1
- package/dist/prompts/tests/chat.mustache.test.js +0 -129
- package/dist/prompts/tests/chat.test.d.ts +0 -1
- package/dist/prompts/tests/chat.test.js +0 -557
- package/dist/prompts/tests/few_shot.test.d.ts +0 -1
- package/dist/prompts/tests/few_shot.test.js +0 -224
- package/dist/prompts/tests/pipeline.test.d.ts +0 -1
- package/dist/prompts/tests/pipeline.test.js +0 -101
- package/dist/prompts/tests/prompt.mustache.test.d.ts +0 -1
- package/dist/prompts/tests/prompt.mustache.test.js +0 -105
- package/dist/prompts/tests/prompt.test.d.ts +0 -1
- package/dist/prompts/tests/prompt.test.js +0 -78
- package/dist/prompts/tests/structured.test.d.ts +0 -1
- package/dist/prompts/tests/structured.test.js +0 -37
- package/dist/prompts/tests/template.test.d.ts +0 -1
- package/dist/prompts/tests/template.test.js +0 -24
- package/dist/runnables/tests/runnable.test.d.ts +0 -1
- package/dist/runnables/tests/runnable.test.js +0 -491
- package/dist/runnables/tests/runnable_binding.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_binding.test.js +0 -46
- package/dist/runnables/tests/runnable_branch.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_branch.test.js +0 -116
- package/dist/runnables/tests/runnable_graph.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_graph.test.js +0 -100
- package/dist/runnables/tests/runnable_history.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_history.test.js +0 -177
- package/dist/runnables/tests/runnable_interface.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_interface.test.js +0 -209
- package/dist/runnables/tests/runnable_map.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_map.test.js +0 -238
- package/dist/runnables/tests/runnable_passthrough.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_passthrough.test.js +0 -96
- package/dist/runnables/tests/runnable_remote.int.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_remote.int.test.js +0 -138
- package/dist/runnables/tests/runnable_remote.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_remote.test.js +0 -200
- package/dist/runnables/tests/runnable_retry.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_retry.test.js +0 -125
- package/dist/runnables/tests/runnable_stream_events.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_stream_events.test.js +0 -1013
- package/dist/runnables/tests/runnable_stream_events_v2.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_stream_events_v2.test.js +0 -2022
- package/dist/runnables/tests/runnable_stream_log.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_stream_log.test.js +0 -71
- package/dist/runnables/tests/runnable_tools.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_tools.test.js +0 -149
- package/dist/runnables/tests/runnable_tracing.int.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_tracing.int.test.js +0 -37
- package/dist/runnables/tests/runnable_with_fallbacks.test.d.ts +0 -1
- package/dist/runnables/tests/runnable_with_fallbacks.test.js +0 -36
- package/dist/singletons/tests/async_local_storage.test.d.ts +0 -1
- package/dist/singletons/tests/async_local_storage.test.js +0 -153
- package/dist/structured_query/tests/utils.test.d.ts +0 -1
- package/dist/structured_query/tests/utils.test.js +0 -47
- package/dist/tools/tests/tools.test.d.ts +0 -1
- package/dist/tools/tests/tools.test.js +0 -85
- package/dist/tracers/tests/langchain_tracer.int.test.d.ts +0 -1
- package/dist/tracers/tests/langchain_tracer.int.test.js +0 -74
- package/dist/tracers/tests/langsmith_interop.test.d.ts +0 -1
- package/dist/tracers/tests/langsmith_interop.test.js +0 -551
- package/dist/tracers/tests/tracer.test.d.ts +0 -1
- package/dist/tracers/tests/tracer.test.js +0 -378
- package/dist/utils/testing/tests/chatfake.test.d.ts +0 -1
- package/dist/utils/testing/tests/chatfake.test.js +0 -112
- package/dist/utils/tests/async_caller.test.d.ts +0 -1
- package/dist/utils/tests/async_caller.test.js +0 -27
- package/dist/utils/tests/enviroment.test.d.ts +0 -1
- package/dist/utils/tests/enviroment.test.js +0 -6
- package/dist/utils/tests/function_calling.test.d.ts +0 -1
- package/dist/utils/tests/function_calling.test.js +0 -107
- package/dist/utils/tests/math_utils.test.d.ts +0 -1
- package/dist/utils/tests/math_utils.test.js +0 -139
- package/dist/utils/tests/polyfill_stream.test.d.ts +0 -1
- package/dist/utils/tests/polyfill_stream.test.js +0 -15
|
@@ -1,100 +0,0 @@
|
|
|
1
|
-
/* eslint-disable no-promise-executor-return */
|
|
2
|
-
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
3
|
-
import { StringOutputParser } from "../../output_parsers/string.js";
|
|
4
|
-
import { FakeLLM } from "../../utils/testing/index.js";
|
|
5
|
-
import { PromptTemplate } from "../../prompts/prompt.js";
|
|
6
|
-
import { CommaSeparatedListOutputParser } from "../../output_parsers/list.js";
|
|
7
|
-
test("Test graph single runnable", async () => {
|
|
8
|
-
const jsonOutputParser = new StringOutputParser();
|
|
9
|
-
const graph = jsonOutputParser.getGraph();
|
|
10
|
-
const firstNode = graph.firstNode();
|
|
11
|
-
expect(firstNode).not.toBeNull();
|
|
12
|
-
const lastNode = graph.lastNode();
|
|
13
|
-
expect(lastNode).not.toBeNull();
|
|
14
|
-
expect(graph.edges.length).toBe(2);
|
|
15
|
-
expect(Object.keys(graph.nodes).length).toBe(3);
|
|
16
|
-
});
|
|
17
|
-
test("Test graph sequence", async () => {
|
|
18
|
-
const llm = new FakeLLM({});
|
|
19
|
-
const prompt = PromptTemplate.fromTemplate("Hello, {name}!");
|
|
20
|
-
const listParser = new CommaSeparatedListOutputParser();
|
|
21
|
-
const sequence = prompt.pipe(llm).pipe(listParser);
|
|
22
|
-
const graph = sequence.getGraph();
|
|
23
|
-
const firstNode = graph.firstNode();
|
|
24
|
-
expect(firstNode).not.toBeNull();
|
|
25
|
-
const lastNode = graph.lastNode();
|
|
26
|
-
expect(lastNode).not.toBeNull();
|
|
27
|
-
expect(graph.edges.length).toBe(4);
|
|
28
|
-
expect(Object.keys(graph.nodes).length).toBe(5);
|
|
29
|
-
expect(graph.toJSON()).toStrictEqual({
|
|
30
|
-
nodes: [
|
|
31
|
-
{
|
|
32
|
-
id: 0,
|
|
33
|
-
type: "schema",
|
|
34
|
-
data: {
|
|
35
|
-
title: "PromptTemplateInput",
|
|
36
|
-
$schema: "http://json-schema.org/draft-07/schema#",
|
|
37
|
-
},
|
|
38
|
-
},
|
|
39
|
-
{
|
|
40
|
-
id: 1,
|
|
41
|
-
type: "runnable",
|
|
42
|
-
data: {
|
|
43
|
-
id: ["langchain_core", "prompts", "prompt", "PromptTemplate"],
|
|
44
|
-
name: "PromptTemplate",
|
|
45
|
-
},
|
|
46
|
-
},
|
|
47
|
-
{
|
|
48
|
-
id: 2,
|
|
49
|
-
type: "runnable",
|
|
50
|
-
data: {
|
|
51
|
-
id: ["langchain", "llms", "fake", "FakeLLM"],
|
|
52
|
-
name: "FakeLLM",
|
|
53
|
-
},
|
|
54
|
-
},
|
|
55
|
-
{
|
|
56
|
-
id: 3,
|
|
57
|
-
type: "runnable",
|
|
58
|
-
data: {
|
|
59
|
-
id: [
|
|
60
|
-
"langchain_core",
|
|
61
|
-
"output_parsers",
|
|
62
|
-
"list",
|
|
63
|
-
"CommaSeparatedListOutputParser",
|
|
64
|
-
],
|
|
65
|
-
name: "CommaSeparatedListOutputParser",
|
|
66
|
-
},
|
|
67
|
-
},
|
|
68
|
-
{
|
|
69
|
-
id: 4,
|
|
70
|
-
type: "schema",
|
|
71
|
-
data: {
|
|
72
|
-
title: "CommaSeparatedListOutputParserOutput",
|
|
73
|
-
$schema: "http://json-schema.org/draft-07/schema#",
|
|
74
|
-
},
|
|
75
|
-
},
|
|
76
|
-
],
|
|
77
|
-
edges: [
|
|
78
|
-
{ source: 0, target: 1 },
|
|
79
|
-
{ source: 1, target: 2 },
|
|
80
|
-
{ source: 3, target: 4 },
|
|
81
|
-
{ source: 2, target: 3 },
|
|
82
|
-
],
|
|
83
|
-
});
|
|
84
|
-
expect(graph.drawMermaid())
|
|
85
|
-
.toEqual(`%%{init: {'flowchart': {'curve': 'linear'}}}%%
|
|
86
|
-
graph TD;
|
|
87
|
-
\tPromptTemplateInput[PromptTemplateInput]:::startclass;
|
|
88
|
-
\tPromptTemplate([PromptTemplate]):::otherclass;
|
|
89
|
-
\tFakeLLM([FakeLLM]):::otherclass;
|
|
90
|
-
\tCommaSeparatedListOutputParser([CommaSeparatedListOutputParser]):::otherclass;
|
|
91
|
-
\tCommaSeparatedListOutputParserOutput[CommaSeparatedListOutputParserOutput]:::endclass;
|
|
92
|
-
\tPromptTemplateInput --> PromptTemplate;
|
|
93
|
-
\tPromptTemplate --> FakeLLM;
|
|
94
|
-
\tCommaSeparatedListOutputParser --> CommaSeparatedListOutputParserOutput;
|
|
95
|
-
\tFakeLLM --> CommaSeparatedListOutputParser;
|
|
96
|
-
\tclassDef startclass fill:#ffdfba;
|
|
97
|
-
\tclassDef endclass fill:#baffc9;
|
|
98
|
-
\tclassDef otherclass fill:#fad7de;
|
|
99
|
-
`);
|
|
100
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,177 +0,0 @@
|
|
|
1
|
-
import { AIMessage, AIMessageChunk, HumanMessage, } from "../../messages/index.js";
|
|
2
|
-
import { RunnableLambda } from "../base.js";
|
|
3
|
-
import { RunnableWithMessageHistory } from "../history.js";
|
|
4
|
-
import { FakeChatMessageHistory, FakeLLM, FakeListChatMessageHistory, FakeListChatModel, FakeStreamingLLM, } from "../../utils/testing/index.js";
|
|
5
|
-
import { ChatPromptTemplate, MessagesPlaceholder } from "../../prompts/chat.js";
|
|
6
|
-
import { StringOutputParser } from "../../output_parsers/string.js";
|
|
7
|
-
// For `BaseChatMessageHistory`
|
|
8
|
-
async function getGetSessionHistory() {
|
|
9
|
-
const chatHistoryStore = {};
|
|
10
|
-
async function getSessionHistory(sessionId) {
|
|
11
|
-
if (!(sessionId in chatHistoryStore)) {
|
|
12
|
-
chatHistoryStore[sessionId] = new FakeChatMessageHistory();
|
|
13
|
-
}
|
|
14
|
-
return chatHistoryStore[sessionId];
|
|
15
|
-
}
|
|
16
|
-
return getSessionHistory;
|
|
17
|
-
}
|
|
18
|
-
// Extends `BaseListChatMessageHistory`
|
|
19
|
-
async function getListSessionHistory() {
|
|
20
|
-
const chatHistoryStore = {};
|
|
21
|
-
async function getSessionHistory(sessionId) {
|
|
22
|
-
if (!(sessionId in chatHistoryStore)) {
|
|
23
|
-
chatHistoryStore[sessionId] = new FakeListChatMessageHistory();
|
|
24
|
-
}
|
|
25
|
-
return chatHistoryStore[sessionId];
|
|
26
|
-
}
|
|
27
|
-
return getSessionHistory;
|
|
28
|
-
}
|
|
29
|
-
test("Runnable with message history", async () => {
|
|
30
|
-
const runnable = new RunnableLambda({
|
|
31
|
-
func: (messages) => `you said: ${messages
|
|
32
|
-
.filter((m) => m._getType() === "human")
|
|
33
|
-
.map((m) => m.content)
|
|
34
|
-
.join("\n")}`,
|
|
35
|
-
});
|
|
36
|
-
const getMessageHistory = await getGetSessionHistory();
|
|
37
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
38
|
-
runnable,
|
|
39
|
-
config: {},
|
|
40
|
-
getMessageHistory,
|
|
41
|
-
});
|
|
42
|
-
const config = { configurable: { sessionId: "1" } };
|
|
43
|
-
let output = await withHistory.invoke([new HumanMessage("hello")], config);
|
|
44
|
-
expect(output).toBe("you said: hello");
|
|
45
|
-
output = await withHistory.invoke([new HumanMessage("good bye")], config);
|
|
46
|
-
expect(output).toBe("you said: hello\ngood bye");
|
|
47
|
-
});
|
|
48
|
-
test("Runnable with message history with a chat model", async () => {
|
|
49
|
-
const runnable = new FakeListChatModel({
|
|
50
|
-
responses: ["Hello world!"],
|
|
51
|
-
});
|
|
52
|
-
const getMessageHistory = await getGetSessionHistory();
|
|
53
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
54
|
-
runnable,
|
|
55
|
-
config: {},
|
|
56
|
-
getMessageHistory,
|
|
57
|
-
});
|
|
58
|
-
const config = { configurable: { sessionId: "2" } };
|
|
59
|
-
const output = await withHistory.invoke([new HumanMessage("hello")], config);
|
|
60
|
-
expect(output.content).toBe("Hello world!");
|
|
61
|
-
const stream = await withHistory.stream([new HumanMessage("good bye")], config);
|
|
62
|
-
const chunks = [];
|
|
63
|
-
for await (const chunk of stream) {
|
|
64
|
-
console.log(chunk);
|
|
65
|
-
chunks.push(chunk);
|
|
66
|
-
}
|
|
67
|
-
expect(chunks.map((chunk) => chunk.content).join("")).toEqual("Hello world!");
|
|
68
|
-
const sessionHistory = await getMessageHistory("2");
|
|
69
|
-
expect(await sessionHistory.getMessages()).toEqual([
|
|
70
|
-
new HumanMessage("hello"),
|
|
71
|
-
new AIMessage("Hello world!"),
|
|
72
|
-
new HumanMessage("good bye"),
|
|
73
|
-
new AIMessageChunk("Hello world!"),
|
|
74
|
-
]);
|
|
75
|
-
});
|
|
76
|
-
test("Runnable with message history with a messages in, messages out chain", async () => {
|
|
77
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
78
|
-
["system", "you are a robot"],
|
|
79
|
-
["placeholder", "{messages}"],
|
|
80
|
-
]);
|
|
81
|
-
const model = new FakeListChatModel({
|
|
82
|
-
responses: ["So long and thanks for the fish!!"],
|
|
83
|
-
});
|
|
84
|
-
const runnable = prompt.pipe(model);
|
|
85
|
-
const getMessageHistory = await getGetSessionHistory();
|
|
86
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
87
|
-
runnable,
|
|
88
|
-
config: {},
|
|
89
|
-
getMessageHistory,
|
|
90
|
-
});
|
|
91
|
-
const config = { configurable: { sessionId: "2" } };
|
|
92
|
-
const output = await withHistory.invoke([new HumanMessage("hello")], config);
|
|
93
|
-
expect(output.content).toBe("So long and thanks for the fish!!");
|
|
94
|
-
const stream = await withHistory.stream([new HumanMessage("good bye")], config);
|
|
95
|
-
const chunks = [];
|
|
96
|
-
for await (const chunk of stream) {
|
|
97
|
-
console.log(chunk);
|
|
98
|
-
chunks.push(chunk);
|
|
99
|
-
}
|
|
100
|
-
expect(chunks.map((chunk) => chunk.content).join("")).toEqual("So long and thanks for the fish!!");
|
|
101
|
-
const sessionHistory = await getMessageHistory("2");
|
|
102
|
-
expect(await sessionHistory.getMessages()).toEqual([
|
|
103
|
-
new HumanMessage("hello"),
|
|
104
|
-
new AIMessage("So long and thanks for the fish!!"),
|
|
105
|
-
new HumanMessage("good bye"),
|
|
106
|
-
new AIMessageChunk("So long and thanks for the fish!!"),
|
|
107
|
-
]);
|
|
108
|
-
});
|
|
109
|
-
test("Runnable with message history work with chat list memory", async () => {
|
|
110
|
-
const runnable = new RunnableLambda({
|
|
111
|
-
func: (messages) => `you said: ${messages
|
|
112
|
-
.filter((m) => m._getType() === "human")
|
|
113
|
-
.map((m) => m.content)
|
|
114
|
-
.join("\n")}`,
|
|
115
|
-
});
|
|
116
|
-
const getListMessageHistory = await getListSessionHistory();
|
|
117
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
118
|
-
runnable,
|
|
119
|
-
config: {},
|
|
120
|
-
getMessageHistory: getListMessageHistory,
|
|
121
|
-
});
|
|
122
|
-
const config = { configurable: { sessionId: "3" } };
|
|
123
|
-
let output = await withHistory.invoke([new HumanMessage("hello")], config);
|
|
124
|
-
expect(output).toBe("you said: hello");
|
|
125
|
-
output = await withHistory.invoke([new HumanMessage("good bye")], config);
|
|
126
|
-
expect(output).toBe("you said: hello\ngood bye");
|
|
127
|
-
});
|
|
128
|
-
test("Runnable with message history and RunnableSequence", async () => {
|
|
129
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
130
|
-
["ai", "You are a helpful assistant"],
|
|
131
|
-
new MessagesPlaceholder("history"),
|
|
132
|
-
["human", "{input}"],
|
|
133
|
-
]);
|
|
134
|
-
const model = new FakeLLM({});
|
|
135
|
-
const chain = prompt.pipe(model);
|
|
136
|
-
const getListMessageHistory = await getListSessionHistory();
|
|
137
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
138
|
-
runnable: chain,
|
|
139
|
-
config: {},
|
|
140
|
-
getMessageHistory: getListMessageHistory,
|
|
141
|
-
inputMessagesKey: "input",
|
|
142
|
-
historyMessagesKey: "history",
|
|
143
|
-
});
|
|
144
|
-
const config = { configurable: { sessionId: "4" } };
|
|
145
|
-
let output = await withHistory.invoke({ input: "hello" }, config);
|
|
146
|
-
expect(output).toBe("AI: You are a helpful assistant\nHuman: hello");
|
|
147
|
-
output = await withHistory.invoke({ input: "good bye" }, config);
|
|
148
|
-
expect(output).toBe(`AI: You are a helpful assistant
|
|
149
|
-
Human: hello
|
|
150
|
-
AI: AI: You are a helpful assistant
|
|
151
|
-
Human: hello
|
|
152
|
-
Human: good bye`);
|
|
153
|
-
});
|
|
154
|
-
test("Runnable with message history should stream through", async () => {
|
|
155
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
156
|
-
["ai", "You are a helpful assistant"],
|
|
157
|
-
new MessagesPlaceholder("history"),
|
|
158
|
-
["human", "{input}"],
|
|
159
|
-
]);
|
|
160
|
-
const model = new FakeStreamingLLM({});
|
|
161
|
-
const chain = prompt.pipe(model);
|
|
162
|
-
const getListMessageHistory = await getListSessionHistory();
|
|
163
|
-
const withHistory = new RunnableWithMessageHistory({
|
|
164
|
-
runnable: chain,
|
|
165
|
-
config: {},
|
|
166
|
-
getMessageHistory: getListMessageHistory,
|
|
167
|
-
inputMessagesKey: "input",
|
|
168
|
-
historyMessagesKey: "history",
|
|
169
|
-
}).pipe(new StringOutputParser());
|
|
170
|
-
const config = { configurable: { sessionId: "5" } };
|
|
171
|
-
const stream = await withHistory.stream({ input: "hello" }, config);
|
|
172
|
-
const chunks = [];
|
|
173
|
-
for await (const chunk of stream) {
|
|
174
|
-
chunks.push(chunk);
|
|
175
|
-
}
|
|
176
|
-
expect(chunks.length).toBeGreaterThan(1);
|
|
177
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,209 +0,0 @@
|
|
|
1
|
-
/* eslint-disable no-promise-executor-return */
|
|
2
|
-
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
3
|
-
import { StringOutputParser } from "../../output_parsers/string.js";
|
|
4
|
-
import { PromptTemplate } from "../../prompts/prompt.js";
|
|
5
|
-
import { RunnableSequence } from "../base.js";
|
|
6
|
-
class IterableReadableStreamV0 extends ReadableStream {
|
|
7
|
-
constructor() {
|
|
8
|
-
super(...arguments);
|
|
9
|
-
Object.defineProperty(this, "reader", {
|
|
10
|
-
enumerable: true,
|
|
11
|
-
configurable: true,
|
|
12
|
-
writable: true,
|
|
13
|
-
value: void 0
|
|
14
|
-
});
|
|
15
|
-
}
|
|
16
|
-
ensureReader() {
|
|
17
|
-
if (!this.reader) {
|
|
18
|
-
this.reader = this.getReader();
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
async next() {
|
|
22
|
-
this.ensureReader();
|
|
23
|
-
try {
|
|
24
|
-
const result = await this.reader.read();
|
|
25
|
-
if (result.done)
|
|
26
|
-
this.reader.releaseLock(); // release lock when stream becomes closed
|
|
27
|
-
return {
|
|
28
|
-
done: result.done,
|
|
29
|
-
value: result.value, // Cloudflare Workers typing fix
|
|
30
|
-
};
|
|
31
|
-
}
|
|
32
|
-
catch (e) {
|
|
33
|
-
this.reader.releaseLock(); // release lock when stream becomes errored
|
|
34
|
-
throw e;
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
async return() {
|
|
38
|
-
this.ensureReader();
|
|
39
|
-
// If wrapped in a Node stream, cancel is already called.
|
|
40
|
-
if (this.locked) {
|
|
41
|
-
const cancelPromise = this.reader.cancel(); // cancel first, but don't await yet
|
|
42
|
-
this.reader.releaseLock(); // release lock first
|
|
43
|
-
await cancelPromise; // now await it
|
|
44
|
-
}
|
|
45
|
-
return { done: true, value: undefined }; // This cast fixes TS typing, and convention is to ignore final chunk value anyway
|
|
46
|
-
}
|
|
47
|
-
async throw(e) {
|
|
48
|
-
throw e;
|
|
49
|
-
}
|
|
50
|
-
[Symbol.asyncIterator]() {
|
|
51
|
-
return this;
|
|
52
|
-
}
|
|
53
|
-
static fromReadableStream(stream) {
|
|
54
|
-
// From https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams#reading_the_stream
|
|
55
|
-
const reader = stream.getReader();
|
|
56
|
-
return new IterableReadableStreamV0({
|
|
57
|
-
start(controller) {
|
|
58
|
-
return pump();
|
|
59
|
-
function pump() {
|
|
60
|
-
return reader.read().then(({ done, value }) => {
|
|
61
|
-
// When no more data needs to be consumed, close the stream
|
|
62
|
-
if (done) {
|
|
63
|
-
controller.close();
|
|
64
|
-
return;
|
|
65
|
-
}
|
|
66
|
-
// Enqueue the next data chunk into our target stream
|
|
67
|
-
controller.enqueue(value);
|
|
68
|
-
return pump();
|
|
69
|
-
});
|
|
70
|
-
}
|
|
71
|
-
},
|
|
72
|
-
cancel() {
|
|
73
|
-
reader.releaseLock();
|
|
74
|
-
},
|
|
75
|
-
});
|
|
76
|
-
}
|
|
77
|
-
static fromAsyncGenerator(generator) {
|
|
78
|
-
return new IterableReadableStreamV0({
|
|
79
|
-
async pull(controller) {
|
|
80
|
-
const { value, done } = await generator.next();
|
|
81
|
-
// When no more data needs to be consumed, close the stream
|
|
82
|
-
if (done) {
|
|
83
|
-
controller.close();
|
|
84
|
-
}
|
|
85
|
-
// Fix: `else if (value)` will hang the streaming when nullish value (e.g. empty string) is pulled
|
|
86
|
-
controller.enqueue(value);
|
|
87
|
-
},
|
|
88
|
-
});
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* Base class for all types of messages in a conversation. It includes
|
|
93
|
-
* properties like `content`, `name`, and `additional_kwargs`. It also
|
|
94
|
-
* includes methods like `toDict()` and `_getType()`.
|
|
95
|
-
*/
|
|
96
|
-
class AIMessageV0 {
|
|
97
|
-
/** The type of the message. */
|
|
98
|
-
_getType() {
|
|
99
|
-
return "ai";
|
|
100
|
-
}
|
|
101
|
-
constructor(content) {
|
|
102
|
-
Object.defineProperty(this, "lc_namespace", {
|
|
103
|
-
enumerable: true,
|
|
104
|
-
configurable: true,
|
|
105
|
-
writable: true,
|
|
106
|
-
value: ["langchain_core", "messages"]
|
|
107
|
-
});
|
|
108
|
-
Object.defineProperty(this, "lc_serializable", {
|
|
109
|
-
enumerable: true,
|
|
110
|
-
configurable: true,
|
|
111
|
-
writable: true,
|
|
112
|
-
value: true
|
|
113
|
-
});
|
|
114
|
-
/** The content of the message. */
|
|
115
|
-
Object.defineProperty(this, "content", {
|
|
116
|
-
enumerable: true,
|
|
117
|
-
configurable: true,
|
|
118
|
-
writable: true,
|
|
119
|
-
value: void 0
|
|
120
|
-
});
|
|
121
|
-
/** The name of the message sender in a multi-user chat. */
|
|
122
|
-
Object.defineProperty(this, "name", {
|
|
123
|
-
enumerable: true,
|
|
124
|
-
configurable: true,
|
|
125
|
-
writable: true,
|
|
126
|
-
value: void 0
|
|
127
|
-
});
|
|
128
|
-
this.content = content;
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
class StringPromptValueV0 {
|
|
132
|
-
constructor(value) {
|
|
133
|
-
Object.defineProperty(this, "lc_namespace", {
|
|
134
|
-
enumerable: true,
|
|
135
|
-
configurable: true,
|
|
136
|
-
writable: true,
|
|
137
|
-
value: ["langchain_core", "prompt_values"]
|
|
138
|
-
});
|
|
139
|
-
Object.defineProperty(this, "lc_serializable", {
|
|
140
|
-
enumerable: true,
|
|
141
|
-
configurable: true,
|
|
142
|
-
writable: true,
|
|
143
|
-
value: true
|
|
144
|
-
});
|
|
145
|
-
Object.defineProperty(this, "value", {
|
|
146
|
-
enumerable: true,
|
|
147
|
-
configurable: true,
|
|
148
|
-
writable: true,
|
|
149
|
-
value: void 0
|
|
150
|
-
});
|
|
151
|
-
this.value = value;
|
|
152
|
-
}
|
|
153
|
-
toString() {
|
|
154
|
-
return this.value;
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
class RunnableV0 {
|
|
158
|
-
constructor() {
|
|
159
|
-
Object.defineProperty(this, "lc_serializable", {
|
|
160
|
-
enumerable: true,
|
|
161
|
-
configurable: true,
|
|
162
|
-
writable: true,
|
|
163
|
-
value: true
|
|
164
|
-
});
|
|
165
|
-
Object.defineProperty(this, "lc_runnable", {
|
|
166
|
-
enumerable: true,
|
|
167
|
-
configurable: true,
|
|
168
|
-
writable: true,
|
|
169
|
-
value: true
|
|
170
|
-
});
|
|
171
|
-
}
|
|
172
|
-
async invoke(input, _options) {
|
|
173
|
-
return new AIMessageV0(input.toString());
|
|
174
|
-
}
|
|
175
|
-
async batch(_inputs, _options, _batchOptions) {
|
|
176
|
-
return [];
|
|
177
|
-
}
|
|
178
|
-
async stream(_input, _options) {
|
|
179
|
-
throw new Error("Not implemented");
|
|
180
|
-
}
|
|
181
|
-
// eslint-disable-next-line require-yield
|
|
182
|
-
async *transform(_generator, _options) {
|
|
183
|
-
throw new Error("Not implemented");
|
|
184
|
-
}
|
|
185
|
-
getName() {
|
|
186
|
-
return "TEST";
|
|
187
|
-
}
|
|
188
|
-
get lc_id() {
|
|
189
|
-
return ["TEST"];
|
|
190
|
-
}
|
|
191
|
-
}
|
|
192
|
-
test("Pipe with a class that implements a runnable interface", async () => {
|
|
193
|
-
const promptTemplate = PromptTemplate.fromTemplate("{input}");
|
|
194
|
-
const llm = new RunnableV0();
|
|
195
|
-
const outputParser = new StringOutputParser();
|
|
196
|
-
const runnable = promptTemplate.pipe(llm).pipe(outputParser);
|
|
197
|
-
const result = await runnable.invoke({ input: "Hello world!!" });
|
|
198
|
-
console.log(result);
|
|
199
|
-
expect(result).toBe("Hello world!!");
|
|
200
|
-
});
|
|
201
|
-
test("Runnable sequence with a class that implements a runnable interface", async () => {
|
|
202
|
-
const promptTemplate = PromptTemplate.fromTemplate("{input}");
|
|
203
|
-
const llm = new RunnableV0();
|
|
204
|
-
const outputParser = new StringOutputParser();
|
|
205
|
-
const runnable = RunnableSequence.from([promptTemplate, llm, outputParser]);
|
|
206
|
-
const result = await runnable.invoke({ input: "Hello sequence!!" });
|
|
207
|
-
console.log(result);
|
|
208
|
-
expect(result).toBe("Hello sequence!!");
|
|
209
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|