@getzep/zep-cloud 2.5.0 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/api/resources/document/client/Client.js +13 -13
  2. package/api/resources/graph/client/Client.js +3 -3
  3. package/api/resources/graph/resources/edge/client/Client.js +4 -4
  4. package/api/resources/graph/resources/episode/client/Client.js +4 -4
  5. package/api/resources/graph/resources/node/client/Client.js +3 -3
  6. package/api/resources/group/client/Client.js +6 -6
  7. package/api/resources/memory/client/Client.js +22 -22
  8. package/api/resources/memory/client/requests/AddMemoryRequest.d.ts +6 -0
  9. package/api/resources/user/client/Client.js +8 -8
  10. package/dist/api/resources/document/client/Client.js +13 -13
  11. package/dist/api/resources/graph/client/Client.js +3 -3
  12. package/dist/api/resources/graph/resources/edge/client/Client.js +4 -4
  13. package/dist/api/resources/graph/resources/episode/client/Client.js +4 -4
  14. package/dist/api/resources/graph/resources/node/client/Client.js +3 -3
  15. package/dist/api/resources/group/client/Client.js +6 -6
  16. package/dist/api/resources/memory/client/Client.js +22 -22
  17. package/dist/api/resources/memory/client/requests/AddMemoryRequest.d.ts +6 -0
  18. package/dist/api/resources/user/client/Client.js +8 -8
  19. package/dist/serialization/resources/memory/client/requests/AddMemoryRequest.d.ts +2 -0
  20. package/dist/serialization/resources/memory/client/requests/AddMemoryRequest.js +2 -0
  21. package/dist/version.d.ts +1 -1
  22. package/dist/version.js +1 -1
  23. package/examples/langgraph/.env.example +7 -0
  24. package/examples/langgraph/README.md +165 -0
  25. package/examples/langgraph/agent.ts +261 -0
  26. package/examples/langgraph/package-lock.json +1697 -0
  27. package/examples/langgraph/package.json +20 -0
  28. package/examples/langgraph/zep-memory.ts +271 -0
  29. package/package.json +1 -1
  30. package/serialization/resources/memory/client/requests/AddMemoryRequest.d.ts +2 -0
  31. package/serialization/resources/memory/client/requests/AddMemoryRequest.js +2 -0
  32. package/version.d.ts +1 -1
  33. package/version.js +1 -1
@@ -0,0 +1,261 @@
1
+ // agent.ts
2
+ import dotenv from "dotenv";
3
+
4
+ // Load environment variables from .env file
5
+ dotenv.config();
6
+
7
+ import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
8
+ import { ChatOpenAI } from "@langchain/openai";
9
+ import { HumanMessage, AIMessage, BaseMessage, SystemMessage } from "@langchain/core/messages";
10
+ import { ToolNode } from "@langchain/langgraph/prebuilt";
11
+ import { StateGraph, MessagesAnnotation } from "@langchain/langgraph";
12
+ import { fileURLToPath } from "url";
13
+ import path from "path";
14
+ import { Command } from "commander";
15
+ import { ZepMemory } from "./zep-memory";
16
+
17
+ // Define the tools for the agent to use
18
+ const tools = [new TavilySearchResults({ maxResults: 3 })];
19
+ const toolNode = new ToolNode(tools);
20
+
21
+ // Create a model and give it access to the tools
22
+ const model = new ChatOpenAI({
23
+ model: "gpt-4o-mini",
24
+ temperature: 0,
25
+ }).bindTools(tools);
26
+
27
+ // Define the function that determines whether to continue or not
28
+ function shouldContinue({ messages }: typeof MessagesAnnotation.State) {
29
+ const lastMessage = messages[messages.length - 1] as AIMessage;
30
+
31
+ // If the LLM makes a tool call, then we route to the "tools" node
32
+ if (lastMessage.tool_calls?.length) {
33
+ return "tools";
34
+ }
35
+ // Otherwise, we stop (reply to the user) using the special "__end__" node
36
+ return "__end__";
37
+ }
38
+
39
+ // Define the function that calls the model
40
+ async function callModel(state: typeof MessagesAnnotation.State) {
41
+ const response = await model.invoke(state.messages);
42
+ return { messages: [...state.messages, response] };
43
+ }
44
+
45
+ // Define the workflow as a graph
46
+ const workflow = new StateGraph(MessagesAnnotation)
47
+ .addNode("agent", callModel)
48
+ .addNode("tools", toolNode)
49
+ .addEdge("__start__", "agent")
50
+ .addConditionalEdges("agent", shouldContinue, ["tools", "__end__"])
51
+ .addEdge("tools", "agent");
52
+
53
+ // Compile the graph
54
+ export const graph = workflow.compile();
55
+
56
+ // Parse command line arguments using Commander
57
+ function parseCommandLineArgs() {
58
+ const program = new Command();
59
+
60
+ program
61
+ .name("langgraph-agent")
62
+ .description("LangGraph CLI Agent with Zep memory integration")
63
+ .version("1.0.0")
64
+ .option("--userId <id>", "User ID to associate with the conversation")
65
+ .option("--user-id <id>", "User ID to associate with the conversation (alternative format)")
66
+ .option("--sessionId <id>", "Session ID for the conversation")
67
+ .option("--session-id <id>", "Session ID for the conversation (alternative format)")
68
+ .option("--system-message <message>", "Custom system message to use")
69
+ .option("--debug", "Enable debug mode with additional logging");
70
+
71
+ program.parse();
72
+
73
+ const options = program.opts();
74
+
75
+ // Handle alternative formats and naming
76
+ return {
77
+ userId: options.userId,
78
+ sessionId: options.sessionId,
79
+ systemMessage: options.systemMessage || "You are a helpful assistant. Answer the user's questions to the best of your ability.",
80
+ debug: !!options.debug
81
+ };
82
+ }
83
+
84
+ // Check if this file is being run directly
85
+ const isMainModule = () => {
86
+ if (typeof require !== 'undefined' && require.main === module) {
87
+ return true;
88
+ }
89
+
90
+ if (import.meta.url) {
91
+ try {
92
+ const currentFilePath = fileURLToPath(import.meta.url);
93
+ const currentFileName = path.basename(currentFilePath);
94
+ // Check if this script was run directly with node/tsx
95
+ return process.argv[1] && process.argv[1].endsWith(currentFileName);
96
+ } catch (e) {
97
+ return false;
98
+ }
99
+ }
100
+
101
+ return false;
102
+ };
103
+
104
+ // Parse command line arguments once and store the result
105
+ const args = isMainModule() ? parseCommandLineArgs() : {
106
+ userId: undefined,
107
+ sessionId: undefined,
108
+ systemMessage: "You are a helpful assistant. Answer the user's questions to the best of your ability.",
109
+ debug: false
110
+ };
111
+
112
+ // Initialize Zep memory if API key is available
113
+ let zepMemory: ZepMemory | undefined;
114
+ if (process.env.ZEP_API_KEY) {
115
+ zepMemory = new ZepMemory(process.env.ZEP_API_KEY, args.sessionId, args.userId);
116
+
117
+ if (args.debug) {
118
+ console.log("Zep memory initialized with session ID:", zepMemory.getSessionId());
119
+ if (args.userId) {
120
+ console.log("Using user ID:", args.userId);
121
+ }
122
+ }
123
+ }
124
+
125
+ // CLI interface
126
+ if (isMainModule()) {
127
+ // This will run when the script is executed directly
128
+ const runCLI = async () => {
129
+ const systemMessage = args.systemMessage;
130
+
131
+ console.log("🦜🔗 LangGraph Agent CLI");
132
+ if (args.userId) {
133
+ console.log(`User ID: ${args.userId}`);
134
+ }
135
+ if (args.sessionId) {
136
+ console.log(`Session ID: ${args.sessionId}`);
137
+ }
138
+ console.log("Type 'exit' to quit the application");
139
+ console.log("------------------------------");
140
+
141
+ // Create a readline interface
142
+ const readline = await import("readline");
143
+ const rl = readline.createInterface({
144
+ input: process.stdin,
145
+ output: process.stdout,
146
+ });
147
+
148
+ // Create a new chat session
149
+ const config = { configurable: { sessionId: args.sessionId || "cli-session" } };
150
+ let state = { messages: [] as BaseMessage[] };
151
+
152
+ // Initialize Zep memory if available
153
+ if (zepMemory) {
154
+ try {
155
+ await zepMemory.initialize(args.userId);
156
+ console.log("Connected to Zep memory service");
157
+
158
+ // Try to load previous messages from Zep memory
159
+ try {
160
+ const previousMessages = await zepMemory.getMessages(10);
161
+ if (previousMessages.length > 0) {
162
+ state.messages = previousMessages;
163
+ console.log(`Loaded ${previousMessages.length} messages from previous conversation`);
164
+ }
165
+ } catch (error) {
166
+ console.error("Failed to load previous messages:", error);
167
+ console.log("Starting a new conversation");
168
+ }
169
+ } catch (error) {
170
+ console.error("Failed to initialize Zep memory:", error);
171
+ console.log("Continuing without memory persistence");
172
+ zepMemory = undefined;
173
+ }
174
+ }
175
+
176
+ const askQuestion = () => {
177
+ rl.question("\nYou: ", async (input) => {
178
+ if (input.toLowerCase() === "exit") {
179
+ console.log("Goodbye!");
180
+ rl.close();
181
+ return;
182
+ }
183
+
184
+ // Add the user's message to the state
185
+ const userMessage = new HumanMessage(input);
186
+ let systemMessageWithContext = systemMessage;
187
+
188
+ // Persist user message to Zep memory if available
189
+ let context: string | undefined;
190
+ if (zepMemory) {
191
+ try {
192
+ // Add the user message to Zep memory and get the context, if available
193
+ context = await zepMemory.addMessage(userMessage, true);
194
+ } catch (error) {
195
+ console.error("Failed to persist user message to Zep memory:", error);
196
+ }
197
+
198
+ try {
199
+ if (args.debug) {
200
+ console.log(context);
201
+ }
202
+ if (context) {
203
+ systemMessageWithContext = systemMessage + "\n" + context;
204
+ }
205
+ } catch (error) {
206
+ console.error("Failed to get memory with context:", error);
207
+ }
208
+ }
209
+
210
+ // Get the last 5 messages from the current state (excluding any previous system messages)
211
+ const previousMessages = state.messages
212
+ .filter(msg => !(msg instanceof SystemMessage))
213
+ .slice(-5);
214
+
215
+ // Create new state with system message at the beginning followed by the history messages
216
+ state.messages = [
217
+ new SystemMessage(systemMessageWithContext),
218
+ ...previousMessages,
219
+ userMessage
220
+ ];
221
+
222
+ try {
223
+ // Process the input through the graph
224
+ const result = await graph.invoke(state, config);
225
+
226
+ // Extract the AI's response using instanceof instead of _getType()
227
+ const aiMessages = result.messages.filter((msg) => msg instanceof AIMessage);
228
+ const lastAIMessage = aiMessages[aiMessages.length - 1] as AIMessage;
229
+
230
+ console.log(`\nAI: ${lastAIMessage.content}`);
231
+
232
+ // Persist AI message to Zep memory if available
233
+ if (zepMemory) {
234
+ try {
235
+ await zepMemory.addMessage(lastAIMessage);
236
+ } catch (error) {
237
+ console.error("Failed to persist AI message to Zep memory:", error);
238
+ }
239
+ }
240
+
241
+ // Update the state for the next interaction
242
+ // We don't want to completely replace the state, just update the messages
243
+ // to maintain our structure with the system message at the beginning
244
+ const resultMessages = result.messages.filter(msg => !(msg instanceof SystemMessage));
245
+ state = {
246
+ messages: resultMessages
247
+ };
248
+ } catch (error) {
249
+ console.error("Error:", error);
250
+ }
251
+
252
+ // Ask for the next input
253
+ askQuestion();
254
+ });
255
+ };
256
+
257
+ askQuestion();
258
+ };
259
+
260
+ runCLI().catch(console.error);
261
+ }