mem0ai-azure-mysql 0.1.115__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. mem0/__init__.py +6 -0
  2. mem0/client/__init__.py +0 -0
  3. mem0/client/main.py +1535 -0
  4. mem0/client/project.py +860 -0
  5. mem0/client/utils.py +29 -0
  6. mem0/configs/__init__.py +0 -0
  7. mem0/configs/base.py +90 -0
  8. mem0/configs/dbs/__init__.py +4 -0
  9. mem0/configs/dbs/base.py +41 -0
  10. mem0/configs/dbs/mysql.py +25 -0
  11. mem0/configs/embeddings/__init__.py +0 -0
  12. mem0/configs/embeddings/base.py +108 -0
  13. mem0/configs/enums.py +7 -0
  14. mem0/configs/llms/__init__.py +0 -0
  15. mem0/configs/llms/base.py +152 -0
  16. mem0/configs/prompts.py +333 -0
  17. mem0/configs/vector_stores/__init__.py +0 -0
  18. mem0/configs/vector_stores/azure_ai_search.py +59 -0
  19. mem0/configs/vector_stores/baidu.py +29 -0
  20. mem0/configs/vector_stores/chroma.py +40 -0
  21. mem0/configs/vector_stores/elasticsearch.py +47 -0
  22. mem0/configs/vector_stores/faiss.py +39 -0
  23. mem0/configs/vector_stores/langchain.py +32 -0
  24. mem0/configs/vector_stores/milvus.py +43 -0
  25. mem0/configs/vector_stores/mongodb.py +25 -0
  26. mem0/configs/vector_stores/opensearch.py +41 -0
  27. mem0/configs/vector_stores/pgvector.py +37 -0
  28. mem0/configs/vector_stores/pinecone.py +56 -0
  29. mem0/configs/vector_stores/qdrant.py +49 -0
  30. mem0/configs/vector_stores/redis.py +26 -0
  31. mem0/configs/vector_stores/supabase.py +44 -0
  32. mem0/configs/vector_stores/upstash_vector.py +36 -0
  33. mem0/configs/vector_stores/vertex_ai_vector_search.py +27 -0
  34. mem0/configs/vector_stores/weaviate.py +43 -0
  35. mem0/dbs/__init__.py +4 -0
  36. mem0/dbs/base.py +68 -0
  37. mem0/dbs/configs.py +21 -0
  38. mem0/dbs/mysql.py +321 -0
  39. mem0/embeddings/__init__.py +0 -0
  40. mem0/embeddings/aws_bedrock.py +100 -0
  41. mem0/embeddings/azure_openai.py +43 -0
  42. mem0/embeddings/base.py +31 -0
  43. mem0/embeddings/configs.py +30 -0
  44. mem0/embeddings/gemini.py +39 -0
  45. mem0/embeddings/huggingface.py +41 -0
  46. mem0/embeddings/langchain.py +35 -0
  47. mem0/embeddings/lmstudio.py +29 -0
  48. mem0/embeddings/mock.py +11 -0
  49. mem0/embeddings/ollama.py +53 -0
  50. mem0/embeddings/openai.py +49 -0
  51. mem0/embeddings/together.py +31 -0
  52. mem0/embeddings/vertexai.py +54 -0
  53. mem0/graphs/__init__.py +0 -0
  54. mem0/graphs/configs.py +96 -0
  55. mem0/graphs/neptune/__init__.py +0 -0
  56. mem0/graphs/neptune/base.py +410 -0
  57. mem0/graphs/neptune/main.py +372 -0
  58. mem0/graphs/tools.py +371 -0
  59. mem0/graphs/utils.py +97 -0
  60. mem0/llms/__init__.py +0 -0
  61. mem0/llms/anthropic.py +64 -0
  62. mem0/llms/aws_bedrock.py +270 -0
  63. mem0/llms/azure_openai.py +114 -0
  64. mem0/llms/azure_openai_structured.py +76 -0
  65. mem0/llms/base.py +32 -0
  66. mem0/llms/configs.py +34 -0
  67. mem0/llms/deepseek.py +85 -0
  68. mem0/llms/gemini.py +201 -0
  69. mem0/llms/groq.py +88 -0
  70. mem0/llms/langchain.py +65 -0
  71. mem0/llms/litellm.py +87 -0
  72. mem0/llms/lmstudio.py +53 -0
  73. mem0/llms/ollama.py +94 -0
  74. mem0/llms/openai.py +124 -0
  75. mem0/llms/openai_structured.py +52 -0
  76. mem0/llms/sarvam.py +89 -0
  77. mem0/llms/together.py +88 -0
  78. mem0/llms/vllm.py +89 -0
  79. mem0/llms/xai.py +52 -0
  80. mem0/memory/__init__.py +0 -0
  81. mem0/memory/base.py +63 -0
  82. mem0/memory/graph_memory.py +632 -0
  83. mem0/memory/main.py +1843 -0
  84. mem0/memory/memgraph_memory.py +630 -0
  85. mem0/memory/setup.py +56 -0
  86. mem0/memory/storage.py +218 -0
  87. mem0/memory/telemetry.py +90 -0
  88. mem0/memory/utils.py +133 -0
  89. mem0/proxy/__init__.py +0 -0
  90. mem0/proxy/main.py +194 -0
  91. mem0/utils/factory.py +132 -0
  92. mem0/vector_stores/__init__.py +0 -0
  93. mem0/vector_stores/azure_ai_search.py +383 -0
  94. mem0/vector_stores/baidu.py +368 -0
  95. mem0/vector_stores/base.py +58 -0
  96. mem0/vector_stores/chroma.py +229 -0
  97. mem0/vector_stores/configs.py +60 -0
  98. mem0/vector_stores/elasticsearch.py +235 -0
  99. mem0/vector_stores/faiss.py +473 -0
  100. mem0/vector_stores/langchain.py +179 -0
  101. mem0/vector_stores/milvus.py +245 -0
  102. mem0/vector_stores/mongodb.py +293 -0
  103. mem0/vector_stores/opensearch.py +281 -0
  104. mem0/vector_stores/pgvector.py +294 -0
  105. mem0/vector_stores/pinecone.py +373 -0
  106. mem0/vector_stores/qdrant.py +240 -0
  107. mem0/vector_stores/redis.py +295 -0
  108. mem0/vector_stores/supabase.py +237 -0
  109. mem0/vector_stores/upstash_vector.py +293 -0
  110. mem0/vector_stores/vertex_ai_vector_search.py +629 -0
  111. mem0/vector_stores/weaviate.py +316 -0
  112. mem0ai_azure_mysql-0.1.115.data/data/README.md +169 -0
  113. mem0ai_azure_mysql-0.1.115.dist-info/METADATA +224 -0
  114. mem0ai_azure_mysql-0.1.115.dist-info/RECORD +116 -0
  115. mem0ai_azure_mysql-0.1.115.dist-info/WHEEL +4 -0
  116. mem0ai_azure_mysql-0.1.115.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,410 @@
1
+ import logging
2
+ from abc import ABC, abstractmethod
3
+
4
+ from mem0.memory.utils import format_entities
5
+
6
+ try:
7
+ from rank_bm25 import BM25Okapi
8
+ except ImportError:
9
+ raise ImportError("rank_bm25 is not installed. Please install it using pip install rank-bm25")
10
+
11
+ from mem0.graphs.tools import (
12
+ DELETE_MEMORY_STRUCT_TOOL_GRAPH,
13
+ DELETE_MEMORY_TOOL_GRAPH,
14
+ EXTRACT_ENTITIES_STRUCT_TOOL,
15
+ EXTRACT_ENTITIES_TOOL,
16
+ RELATIONS_STRUCT_TOOL,
17
+ RELATIONS_TOOL,
18
+ )
19
+ from mem0.graphs.utils import EXTRACT_RELATIONS_PROMPT, get_delete_messages
20
+ from mem0.utils.factory import EmbedderFactory, LlmFactory
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class NeptuneBase(ABC):
26
+ """
27
+ Abstract base class for neptune (neptune analytics and neptune db) calls using OpenCypher
28
+ to store/retrieve data
29
+ """
30
+
31
+ @staticmethod
32
+ def _create_embedding_model(config):
33
+ """
34
+ :return: the Embedder model used for memory store
35
+ """
36
+ return EmbedderFactory.create(
37
+ config.embedder.provider,
38
+ config.embedder.config,
39
+ {"enable_embeddings": True},
40
+ )
41
+
42
+ @staticmethod
43
+ def _create_llm(config, llm_provider):
44
+ """
45
+ :return: the llm model used for memory store
46
+ """
47
+ return LlmFactory.create(llm_provider, config.llm.config)
48
+
49
+ def add(self, data, filters):
50
+ """
51
+ Adds data to the graph.
52
+
53
+ Args:
54
+ data (str): The data to add to the graph.
55
+ filters (dict): A dictionary containing filters to be applied during the addition.
56
+ """
57
+ entity_type_map = self._retrieve_nodes_from_data(data, filters)
58
+ to_be_added = self._establish_nodes_relations_from_data(data, filters, entity_type_map)
59
+ search_output = self._search_graph_db(node_list=list(entity_type_map.keys()), filters=filters)
60
+ to_be_deleted = self._get_delete_entities_from_search_output(search_output, data, filters)
61
+
62
+ deleted_entities = self._delete_entities(to_be_deleted, filters["user_id"])
63
+ added_entities = self._add_entities(to_be_added, filters["user_id"], entity_type_map)
64
+
65
+ return {"deleted_entities": deleted_entities, "added_entities": added_entities}
66
+
67
+ def _retrieve_nodes_from_data(self, data, filters):
68
+ """
69
+ Extract all entities mentioned in the query.
70
+ """
71
+ _tools = [EXTRACT_ENTITIES_TOOL]
72
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
73
+ _tools = [EXTRACT_ENTITIES_STRUCT_TOOL]
74
+ search_results = self.llm.generate_response(
75
+ messages=[
76
+ {
77
+ "role": "system",
78
+ "content": f"You are a smart assistant who understands entities and their types in a given text. If user message contains self reference such as 'I', 'me', 'my' etc. then use {filters['user_id']} as the source entity. Extract all the entities from the text. ***DO NOT*** answer the question itself if the given text is a question.",
79
+ },
80
+ {"role": "user", "content": data},
81
+ ],
82
+ tools=_tools,
83
+ )
84
+
85
+ entity_type_map = {}
86
+
87
+ try:
88
+ for tool_call in search_results["tool_calls"]:
89
+ if tool_call["name"] != "extract_entities":
90
+ continue
91
+ for item in tool_call["arguments"]["entities"]:
92
+ entity_type_map[item["entity"]] = item["entity_type"]
93
+ except Exception as e:
94
+ logger.exception(
95
+ f"Error in search tool: {e}, llm_provider={self.llm_provider}, search_results={search_results}"
96
+ )
97
+
98
+ entity_type_map = {k.lower().replace(" ", "_"): v.lower().replace(" ", "_") for k, v in entity_type_map.items()}
99
+ return entity_type_map
100
+
101
+ def _establish_nodes_relations_from_data(self, data, filters, entity_type_map):
102
+ """
103
+ Establish relations among the extracted nodes.
104
+ """
105
+ if self.config.graph_store.custom_prompt:
106
+ messages = [
107
+ {
108
+ "role": "system",
109
+ "content": EXTRACT_RELATIONS_PROMPT.replace("USER_ID", filters["user_id"]).replace(
110
+ "CUSTOM_PROMPT", f"4. {self.config.graph_store.custom_prompt}"
111
+ ),
112
+ },
113
+ {"role": "user", "content": data},
114
+ ]
115
+ else:
116
+ messages = [
117
+ {
118
+ "role": "system",
119
+ "content": EXTRACT_RELATIONS_PROMPT.replace("USER_ID", filters["user_id"]),
120
+ },
121
+ {
122
+ "role": "user",
123
+ "content": f"List of entities: {list(entity_type_map.keys())}. \n\nText: {data}",
124
+ },
125
+ ]
126
+
127
+ _tools = [RELATIONS_TOOL]
128
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
129
+ _tools = [RELATIONS_STRUCT_TOOL]
130
+
131
+ extracted_entities = self.llm.generate_response(
132
+ messages=messages,
133
+ tools=_tools,
134
+ )
135
+
136
+ entities = []
137
+ if extracted_entities["tool_calls"]:
138
+ entities = extracted_entities["tool_calls"][0]["arguments"]["entities"]
139
+
140
+ entities = self._remove_spaces_from_entities(entities)
141
+ logger.debug(f"Extracted entities: {entities}")
142
+ return entities
143
+
144
+ def _remove_spaces_from_entities(self, entity_list):
145
+ for item in entity_list:
146
+ item["source"] = item["source"].lower().replace(" ", "_")
147
+ item["relationship"] = item["relationship"].lower().replace(" ", "_")
148
+ item["destination"] = item["destination"].lower().replace(" ", "_")
149
+ return entity_list
150
+
151
+ def _get_delete_entities_from_search_output(self, search_output, data, filters):
152
+ """
153
+ Get the entities to be deleted from the search output.
154
+ """
155
+
156
+ search_output_string = format_entities(search_output)
157
+ system_prompt, user_prompt = get_delete_messages(search_output_string, data, filters["user_id"])
158
+
159
+ _tools = [DELETE_MEMORY_TOOL_GRAPH]
160
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
161
+ _tools = [
162
+ DELETE_MEMORY_STRUCT_TOOL_GRAPH,
163
+ ]
164
+
165
+ memory_updates = self.llm.generate_response(
166
+ messages=[
167
+ {"role": "system", "content": system_prompt},
168
+ {"role": "user", "content": user_prompt},
169
+ ],
170
+ tools=_tools,
171
+ )
172
+
173
+ to_be_deleted = []
174
+ for item in memory_updates["tool_calls"]:
175
+ if item["name"] == "delete_graph_memory":
176
+ to_be_deleted.append(item["arguments"])
177
+ # in case if it is not in the correct format
178
+ to_be_deleted = self._remove_spaces_from_entities(to_be_deleted)
179
+ logger.debug(f"Deleted relationships: {to_be_deleted}")
180
+ return to_be_deleted
181
+
182
+ def _delete_entities(self, to_be_deleted, user_id):
183
+ """
184
+ Delete the entities from the graph.
185
+ """
186
+
187
+ results = []
188
+ for item in to_be_deleted:
189
+ source = item["source"]
190
+ destination = item["destination"]
191
+ relationship = item["relationship"]
192
+
193
+ # Delete the specific relationship between nodes
194
+ cypher, params = self._delete_entities_cypher(source, destination, relationship, user_id)
195
+ result = self.graph.query(cypher, params=params)
196
+ results.append(result)
197
+ return results
198
+
199
+ @abstractmethod
200
+ def _delete_entities_cypher(self, source, destination, relationship, user_id):
201
+ """
202
+ Returns the OpenCypher query and parameters for deleting entities in the graph DB
203
+ """
204
+
205
+ pass
206
+
207
+ def _add_entities(self, to_be_added, user_id, entity_type_map):
208
+ """
209
+ Add the new entities to the graph. Merge the nodes if they already exist.
210
+ """
211
+
212
+ results = []
213
+ for item in to_be_added:
214
+ # entities
215
+ source = item["source"]
216
+ destination = item["destination"]
217
+ relationship = item["relationship"]
218
+
219
+ # types
220
+ source_type = entity_type_map.get(source, "__User__")
221
+ destination_type = entity_type_map.get(destination, "__User__")
222
+
223
+ # embeddings
224
+ source_embedding = self.embedding_model.embed(source)
225
+ dest_embedding = self.embedding_model.embed(destination)
226
+
227
+ # search for the nodes with the closest embeddings
228
+ source_node_search_result = self._search_source_node(source_embedding, user_id, threshold=0.9)
229
+ destination_node_search_result = self._search_destination_node(dest_embedding, user_id, threshold=0.9)
230
+
231
+ cypher, params = self._add_entities_cypher(
232
+ source_node_search_result,
233
+ source,
234
+ source_embedding,
235
+ source_type,
236
+ destination_node_search_result,
237
+ destination,
238
+ dest_embedding,
239
+ destination_type,
240
+ relationship,
241
+ user_id,
242
+ )
243
+ result = self.graph.query(cypher, params=params)
244
+ results.append(result)
245
+ return results
246
+
247
+ @abstractmethod
248
+ def _add_entities_cypher(
249
+ self,
250
+ source_node_list,
251
+ source,
252
+ source_embedding,
253
+ source_type,
254
+ destination_node_list,
255
+ destination,
256
+ dest_embedding,
257
+ destination_type,
258
+ relationship,
259
+ user_id,
260
+ ):
261
+ """
262
+ Returns the OpenCypher query and parameters for adding entities in the graph DB
263
+ """
264
+ pass
265
+
266
+ def search(self, query, filters, limit=100):
267
+ """
268
+ Search for memories and related graph data.
269
+
270
+ Args:
271
+ query (str): Query to search for.
272
+ filters (dict): A dictionary containing filters to be applied during the search.
273
+ limit (int): The maximum number of nodes and relationships to retrieve. Defaults to 100.
274
+
275
+ Returns:
276
+ dict: A dictionary containing:
277
+ - "contexts": List of search results from the base data store.
278
+ - "entities": List of related graph data based on the query.
279
+ """
280
+
281
+ entity_type_map = self._retrieve_nodes_from_data(query, filters)
282
+ search_output = self._search_graph_db(node_list=list(entity_type_map.keys()), filters=filters)
283
+
284
+ if not search_output:
285
+ return []
286
+
287
+ search_outputs_sequence = [
288
+ [item["source"], item["relationship"], item["destination"]] for item in search_output
289
+ ]
290
+ bm25 = BM25Okapi(search_outputs_sequence)
291
+
292
+ tokenized_query = query.split(" ")
293
+ reranked_results = bm25.get_top_n(tokenized_query, search_outputs_sequence, n=5)
294
+
295
+ search_results = []
296
+ for item in reranked_results:
297
+ search_results.append({"source": item[0], "relationship": item[1], "destination": item[2]})
298
+
299
+ return search_results
300
+
301
+ def _search_source_node(self, source_embedding, user_id, threshold=0.9):
302
+ cypher, params = self._search_source_node_cypher(source_embedding, user_id, threshold)
303
+ result = self.graph.query(cypher, params=params)
304
+ return result
305
+
306
+ @abstractmethod
307
+ def _search_source_node_cypher(self, source_embedding, user_id, threshold):
308
+ """
309
+ Returns the OpenCypher query and parameters to search for source nodes
310
+ """
311
+ pass
312
+
313
+ def _search_destination_node(self, destination_embedding, user_id, threshold=0.9):
314
+ cypher, params = self._search_destination_node_cypher(destination_embedding, user_id, threshold)
315
+ result = self.graph.query(cypher, params=params)
316
+ return result
317
+
318
+ @abstractmethod
319
+ def _search_destination_node_cypher(self, destination_embedding, user_id, threshold):
320
+ """
321
+ Returns the OpenCypher query and parameters to search for destination nodes
322
+ """
323
+ pass
324
+
325
+ def delete_all(self, filters):
326
+ cypher, params = self._delete_all_cypher(filters)
327
+ self.graph.query(cypher, params=params)
328
+
329
+ @abstractmethod
330
+ def _delete_all_cypher(self, filters):
331
+ """
332
+ Returns the OpenCypher query and parameters to delete all edges/nodes in the memory store
333
+ """
334
+ pass
335
+
336
+ def get_all(self, filters, limit=100):
337
+ """
338
+ Retrieves all nodes and relationships from the graph database based on filtering criteria.
339
+
340
+ Args:
341
+ filters (dict): A dictionary containing filters to be applied during the retrieval.
342
+ limit (int): The maximum number of nodes and relationships to retrieve. Defaults to 100.
343
+ Returns:
344
+ list: A list of dictionaries, each containing:
345
+ - 'contexts': The base data store response for each memory.
346
+ - 'entities': A list of strings representing the nodes and relationships
347
+ """
348
+
349
+ # return all nodes and relationships
350
+ query, params = self._get_all_cypher(filters, limit)
351
+ results = self.graph.query(query, params=params)
352
+
353
+ final_results = []
354
+ for result in results:
355
+ final_results.append(
356
+ {
357
+ "source": result["source"],
358
+ "relationship": result["relationship"],
359
+ "target": result["target"],
360
+ }
361
+ )
362
+
363
+ logger.debug(f"Retrieved {len(final_results)} relationships")
364
+
365
+ return final_results
366
+
367
+ @abstractmethod
368
+ def _get_all_cypher(self, filters, limit):
369
+ """
370
+ Returns the OpenCypher query and parameters to get all edges/nodes in the memory store
371
+ """
372
+ pass
373
+
374
+ def _search_graph_db(self, node_list, filters, limit=100):
375
+ """
376
+ Search similar nodes among and their respective incoming and outgoing relations.
377
+ """
378
+ result_relations = []
379
+
380
+ for node in node_list:
381
+ n_embedding = self.embedding_model.embed(node)
382
+ cypher_query, params = self._search_graph_db_cypher(n_embedding, filters, limit)
383
+ ans = self.graph.query(cypher_query, params=params)
384
+ result_relations.extend(ans)
385
+
386
+ return result_relations
387
+
388
+ @abstractmethod
389
+ def _search_graph_db_cypher(self, n_embedding, filters, limit):
390
+ """
391
+ Returns the OpenCypher query and parameters to search for similar nodes in the memory store
392
+ """
393
+ pass
394
+
395
+ # Reset is not defined in base.py
396
+ def reset(self):
397
+ """
398
+ Reset the graph by clearing all nodes and relationships.
399
+
400
+ link: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/neptune-graph/client/reset_graph.html
401
+ """
402
+
403
+ logger.warning("Clearing graph...")
404
+ graph_id = self.graph.graph_identifier
405
+ self.graph.client.reset_graph(
406
+ graphIdentifier=graph_id,
407
+ skipSnapshot=True,
408
+ )
409
+ waiter = self.graph.client.get_waiter("graph_available")
410
+ waiter.wait(graphIdentifier=graph_id, WaiterConfig={"Delay": 10, "MaxAttempts": 60})