@langchain/core 0.2.13-rc.2 → 0.2.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/runnables/graph.cjs
CHANGED
|
@@ -99,14 +99,19 @@ class Graph {
|
|
|
99
99
|
// Filter out edges connected to the node
|
|
100
100
|
this.edges = this.edges.filter((edge) => edge.source !== node.id && edge.target !== node.id);
|
|
101
101
|
}
|
|
102
|
-
addEdge(source, target, data) {
|
|
102
|
+
addEdge(source, target, data, conditional) {
|
|
103
103
|
if (this.nodes[source.id] === undefined) {
|
|
104
104
|
throw new Error(`Source node ${source.id} not in graph`);
|
|
105
105
|
}
|
|
106
106
|
if (this.nodes[target.id] === undefined) {
|
|
107
107
|
throw new Error(`Target node ${target.id} not in graph`);
|
|
108
108
|
}
|
|
109
|
-
const edge = {
|
|
109
|
+
const edge = {
|
|
110
|
+
source: source.id,
|
|
111
|
+
target: target.id,
|
|
112
|
+
data,
|
|
113
|
+
conditional,
|
|
114
|
+
};
|
|
110
115
|
this.edges.push(edge);
|
|
111
116
|
return edge;
|
|
112
117
|
}
|
|
@@ -130,13 +135,37 @@ class Graph {
|
|
|
130
135
|
});
|
|
131
136
|
return found[0];
|
|
132
137
|
}
|
|
133
|
-
|
|
134
|
-
|
|
138
|
+
/**
|
|
139
|
+
* Add all nodes and edges from another graph.
|
|
140
|
+
* Note this doesn't check for duplicates, nor does it connect the graphs.
|
|
141
|
+
*/
|
|
142
|
+
extend(graph, prefix = "") {
|
|
143
|
+
let finalPrefix = prefix;
|
|
144
|
+
const nodeIds = Object.values(graph.nodes).map((node) => node.id);
|
|
145
|
+
if (nodeIds.every(uuid_1.validate)) {
|
|
146
|
+
finalPrefix = "";
|
|
147
|
+
}
|
|
148
|
+
const prefixed = (id) => {
|
|
149
|
+
return finalPrefix ? `${finalPrefix}:${id}` : id;
|
|
150
|
+
};
|
|
135
151
|
Object.entries(graph.nodes).forEach(([key, value]) => {
|
|
136
|
-
this.nodes[key] = value;
|
|
152
|
+
this.nodes[prefixed(key)] = { ...value, id: prefixed(key) };
|
|
153
|
+
});
|
|
154
|
+
const newEdges = graph.edges.map((edge) => {
|
|
155
|
+
return {
|
|
156
|
+
...edge,
|
|
157
|
+
source: prefixed(edge.source),
|
|
158
|
+
target: prefixed(edge.target),
|
|
159
|
+
};
|
|
137
160
|
});
|
|
138
161
|
// Add all edges from the other graph
|
|
139
|
-
this.edges = [...this.edges, ...
|
|
162
|
+
this.edges = [...this.edges, ...newEdges];
|
|
163
|
+
const first = graph.firstNode();
|
|
164
|
+
const last = graph.lastNode();
|
|
165
|
+
return [
|
|
166
|
+
first ? { id: prefixed(first.id), data: first.data } : undefined,
|
|
167
|
+
last ? { id: prefixed(last.id), data: last.data } : undefined,
|
|
168
|
+
];
|
|
140
169
|
}
|
|
141
170
|
trimFirstNode() {
|
|
142
171
|
const firstNode = this.firstNode();
|
|
@@ -6,10 +6,17 @@ export declare class Graph {
|
|
|
6
6
|
toJSON(): Record<string, any>;
|
|
7
7
|
addNode(data: RunnableInterface | RunnableIOSchema, id?: string): Node;
|
|
8
8
|
removeNode(node: Node): void;
|
|
9
|
-
addEdge(source: Node, target: Node, data?: string): Edge;
|
|
9
|
+
addEdge(source: Node, target: Node, data?: string, conditional?: boolean): Edge;
|
|
10
10
|
firstNode(): Node | undefined;
|
|
11
11
|
lastNode(): Node | undefined;
|
|
12
|
-
|
|
12
|
+
/**
|
|
13
|
+
* Add all nodes and edges from another graph.
|
|
14
|
+
* Note this doesn't check for duplicates, nor does it connect the graphs.
|
|
15
|
+
*/
|
|
16
|
+
extend(graph: Graph, prefix?: string): ({
|
|
17
|
+
id: string;
|
|
18
|
+
data: RunnableIOSchema | RunnableInterface<any, any, import("./config.js").RunnableConfig>;
|
|
19
|
+
} | undefined)[];
|
|
13
20
|
trimFirstNode(): void;
|
|
14
21
|
trimLastNode(): void;
|
|
15
22
|
drawMermaid(params?: {
|
package/dist/runnables/graph.js
CHANGED
|
@@ -96,14 +96,19 @@ export class Graph {
|
|
|
96
96
|
// Filter out edges connected to the node
|
|
97
97
|
this.edges = this.edges.filter((edge) => edge.source !== node.id && edge.target !== node.id);
|
|
98
98
|
}
|
|
99
|
-
addEdge(source, target, data) {
|
|
99
|
+
addEdge(source, target, data, conditional) {
|
|
100
100
|
if (this.nodes[source.id] === undefined) {
|
|
101
101
|
throw new Error(`Source node ${source.id} not in graph`);
|
|
102
102
|
}
|
|
103
103
|
if (this.nodes[target.id] === undefined) {
|
|
104
104
|
throw new Error(`Target node ${target.id} not in graph`);
|
|
105
105
|
}
|
|
106
|
-
const edge = {
|
|
106
|
+
const edge = {
|
|
107
|
+
source: source.id,
|
|
108
|
+
target: target.id,
|
|
109
|
+
data,
|
|
110
|
+
conditional,
|
|
111
|
+
};
|
|
107
112
|
this.edges.push(edge);
|
|
108
113
|
return edge;
|
|
109
114
|
}
|
|
@@ -127,13 +132,37 @@ export class Graph {
|
|
|
127
132
|
});
|
|
128
133
|
return found[0];
|
|
129
134
|
}
|
|
130
|
-
|
|
131
|
-
|
|
135
|
+
/**
|
|
136
|
+
* Add all nodes and edges from another graph.
|
|
137
|
+
* Note this doesn't check for duplicates, nor does it connect the graphs.
|
|
138
|
+
*/
|
|
139
|
+
extend(graph, prefix = "") {
|
|
140
|
+
let finalPrefix = prefix;
|
|
141
|
+
const nodeIds = Object.values(graph.nodes).map((node) => node.id);
|
|
142
|
+
if (nodeIds.every(isUuid)) {
|
|
143
|
+
finalPrefix = "";
|
|
144
|
+
}
|
|
145
|
+
const prefixed = (id) => {
|
|
146
|
+
return finalPrefix ? `${finalPrefix}:${id}` : id;
|
|
147
|
+
};
|
|
132
148
|
Object.entries(graph.nodes).forEach(([key, value]) => {
|
|
133
|
-
this.nodes[key] = value;
|
|
149
|
+
this.nodes[prefixed(key)] = { ...value, id: prefixed(key) };
|
|
150
|
+
});
|
|
151
|
+
const newEdges = graph.edges.map((edge) => {
|
|
152
|
+
return {
|
|
153
|
+
...edge,
|
|
154
|
+
source: prefixed(edge.source),
|
|
155
|
+
target: prefixed(edge.target),
|
|
156
|
+
};
|
|
134
157
|
});
|
|
135
158
|
// Add all edges from the other graph
|
|
136
|
-
this.edges = [...this.edges, ...
|
|
159
|
+
this.edges = [...this.edges, ...newEdges];
|
|
160
|
+
const first = graph.firstNode();
|
|
161
|
+
const last = graph.lastNode();
|
|
162
|
+
return [
|
|
163
|
+
first ? { id: prefixed(first.id), data: first.data } : undefined,
|
|
164
|
+
last ? { id: prefixed(last.id), data: last.data } : undefined,
|
|
165
|
+
];
|
|
137
166
|
}
|
|
138
167
|
trimFirstNode() {
|
|
139
168
|
const firstNode = this.firstNode();
|
|
@@ -4,14 +4,6 @@ import { StringOutputParser } from "../../output_parsers/string.js";
|
|
|
4
4
|
import { FakeLLM } from "../../utils/testing/index.js";
|
|
5
5
|
import { PromptTemplate } from "../../prompts/prompt.js";
|
|
6
6
|
import { CommaSeparatedListOutputParser } from "../../output_parsers/list.js";
|
|
7
|
-
// import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
|
|
8
|
-
// import { ChatOpenAI } from "@langchain/openai";
|
|
9
|
-
// import { MemorySaver } from "@langchain/langgraph";
|
|
10
|
-
// // Define the tools for the agent to use
|
|
11
|
-
// import { HumanMessage } from "@langchain/core/messages";
|
|
12
|
-
// import { ToolNode } from "@langchain/langgraph/prebuilt";
|
|
13
|
-
// import { END, START, StateGraph, StateGraphArgs } from "@langchain/langgraph";
|
|
14
|
-
// import * as fs from "fs";
|
|
15
7
|
test("Test graph single runnable", async () => {
|
|
16
8
|
const jsonOutputParser = new StringOutputParser();
|
|
17
9
|
const graph = jsonOutputParser.getGraph();
|
|
@@ -106,70 +98,3 @@ graph TD;
|
|
|
106
98
|
\tclassDef otherclass fill:#fad7de;
|
|
107
99
|
`);
|
|
108
100
|
});
|
|
109
|
-
// test.only("langgraph", async () => {
|
|
110
|
-
// // Define the state interface
|
|
111
|
-
// interface AgentState {
|
|
112
|
-
// messages: HumanMessage[];
|
|
113
|
-
// }
|
|
114
|
-
// // Define the graph state
|
|
115
|
-
// const graphState: StateGraphArgs<AgentState>["channels"] = {
|
|
116
|
-
// messages: {
|
|
117
|
-
// value: (x: HumanMessage[], y: HumanMessage[]) => x.concat(y),
|
|
118
|
-
// default: () => [],
|
|
119
|
-
// },
|
|
120
|
-
// };
|
|
121
|
-
// // Define the tools for the agent to use
|
|
122
|
-
// const tools = [new TavilySearchResults({ maxResults: 1 })];
|
|
123
|
-
// const toolNode = new ToolNode<AgentState>(tools);
|
|
124
|
-
// const model = new ChatOpenAI({ temperature: 0 }).bindTools(tools);
|
|
125
|
-
// // Define the function that determines whether to continue or not
|
|
126
|
-
// function shouldContinue(state: AgentState): "tools" | typeof END {
|
|
127
|
-
// const messages = state.messages;
|
|
128
|
-
// const lastMessage = messages[messages.length - 1];
|
|
129
|
-
// // If the LLM makes a tool call, then we route to the "tools" node
|
|
130
|
-
// if (lastMessage.additional_kwargs.tool_calls) {
|
|
131
|
-
// return "tools";
|
|
132
|
-
// }
|
|
133
|
-
// // Otherwise, we stop (reply to the user)
|
|
134
|
-
// return END;
|
|
135
|
-
// }
|
|
136
|
-
// // Define the function that calls the model
|
|
137
|
-
// async function callModel(state: AgentState) {
|
|
138
|
-
// const messages = state.messages;
|
|
139
|
-
// const response = await model.invoke(messages);
|
|
140
|
-
// // We return a list, because this will get added to the existing list
|
|
141
|
-
// return { messages: [response] };
|
|
142
|
-
// }
|
|
143
|
-
// // Define a new graph
|
|
144
|
-
// const workflow = new StateGraph<AgentState>({ channels: graphState })
|
|
145
|
-
// .addNode("agent", callModel)
|
|
146
|
-
// .addNode("tools", toolNode)
|
|
147
|
-
// .addEdge(START, "agent")
|
|
148
|
-
// .addConditionalEdges("agent", shouldContinue)
|
|
149
|
-
// .addEdge("tools", "agent");
|
|
150
|
-
// // Initialize memory to persist state between graph runs
|
|
151
|
-
// const checkpointer = new MemorySaver();
|
|
152
|
-
// // Finally, we compile it!
|
|
153
|
-
// // This compiles it into a LangChain Runnable.
|
|
154
|
-
// // Note that we're (optionally) passing the memory when compiling the graph
|
|
155
|
-
// const app = workflow.compile({ checkpointer });
|
|
156
|
-
// const graph = app.getGraph();
|
|
157
|
-
// const blob = await graph.drawMermaidPng();
|
|
158
|
-
// const arrayBuffer = await blob.arrayBuffer();
|
|
159
|
-
// fs.writeFileSync("/Users/jacoblee/langchain/langchainjs/langchain-core/src/runnables/tests/data/graph-mermaid.png", Buffer.from(arrayBuffer));
|
|
160
|
-
// // // Now it's time to use!
|
|
161
|
-
// // const agentFinalState = await agent.invoke(
|
|
162
|
-
// // { messages: [new HumanMessage("what is the weather in sf")] },
|
|
163
|
-
// // { configurable: { thread_id: "42" } },
|
|
164
|
-
// // );
|
|
165
|
-
// // console.log(
|
|
166
|
-
// // agentFinalState.messages[agentFinalState.messages.length - 1].content,
|
|
167
|
-
// // );
|
|
168
|
-
// // const agentNextState = await agent.invoke(
|
|
169
|
-
// // { messages: [new HumanMessage("what about ny")] },
|
|
170
|
-
// // { configurable: { thread_id: "42" } },
|
|
171
|
-
// // );
|
|
172
|
-
// // console.log(
|
|
173
|
-
// // agentNextState.messages[agentNextState.messages.length - 1].content,
|
|
174
|
-
// // );
|
|
175
|
-
// })
|