mem0ai-azure-mysql 0.1.115__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. mem0/__init__.py +6 -0
  2. mem0/client/__init__.py +0 -0
  3. mem0/client/main.py +1535 -0
  4. mem0/client/project.py +860 -0
  5. mem0/client/utils.py +29 -0
  6. mem0/configs/__init__.py +0 -0
  7. mem0/configs/base.py +90 -0
  8. mem0/configs/dbs/__init__.py +4 -0
  9. mem0/configs/dbs/base.py +41 -0
  10. mem0/configs/dbs/mysql.py +25 -0
  11. mem0/configs/embeddings/__init__.py +0 -0
  12. mem0/configs/embeddings/base.py +108 -0
  13. mem0/configs/enums.py +7 -0
  14. mem0/configs/llms/__init__.py +0 -0
  15. mem0/configs/llms/base.py +152 -0
  16. mem0/configs/prompts.py +333 -0
  17. mem0/configs/vector_stores/__init__.py +0 -0
  18. mem0/configs/vector_stores/azure_ai_search.py +59 -0
  19. mem0/configs/vector_stores/baidu.py +29 -0
  20. mem0/configs/vector_stores/chroma.py +40 -0
  21. mem0/configs/vector_stores/elasticsearch.py +47 -0
  22. mem0/configs/vector_stores/faiss.py +39 -0
  23. mem0/configs/vector_stores/langchain.py +32 -0
  24. mem0/configs/vector_stores/milvus.py +43 -0
  25. mem0/configs/vector_stores/mongodb.py +25 -0
  26. mem0/configs/vector_stores/opensearch.py +41 -0
  27. mem0/configs/vector_stores/pgvector.py +37 -0
  28. mem0/configs/vector_stores/pinecone.py +56 -0
  29. mem0/configs/vector_stores/qdrant.py +49 -0
  30. mem0/configs/vector_stores/redis.py +26 -0
  31. mem0/configs/vector_stores/supabase.py +44 -0
  32. mem0/configs/vector_stores/upstash_vector.py +36 -0
  33. mem0/configs/vector_stores/vertex_ai_vector_search.py +27 -0
  34. mem0/configs/vector_stores/weaviate.py +43 -0
  35. mem0/dbs/__init__.py +4 -0
  36. mem0/dbs/base.py +68 -0
  37. mem0/dbs/configs.py +21 -0
  38. mem0/dbs/mysql.py +321 -0
  39. mem0/embeddings/__init__.py +0 -0
  40. mem0/embeddings/aws_bedrock.py +100 -0
  41. mem0/embeddings/azure_openai.py +43 -0
  42. mem0/embeddings/base.py +31 -0
  43. mem0/embeddings/configs.py +30 -0
  44. mem0/embeddings/gemini.py +39 -0
  45. mem0/embeddings/huggingface.py +41 -0
  46. mem0/embeddings/langchain.py +35 -0
  47. mem0/embeddings/lmstudio.py +29 -0
  48. mem0/embeddings/mock.py +11 -0
  49. mem0/embeddings/ollama.py +53 -0
  50. mem0/embeddings/openai.py +49 -0
  51. mem0/embeddings/together.py +31 -0
  52. mem0/embeddings/vertexai.py +54 -0
  53. mem0/graphs/__init__.py +0 -0
  54. mem0/graphs/configs.py +96 -0
  55. mem0/graphs/neptune/__init__.py +0 -0
  56. mem0/graphs/neptune/base.py +410 -0
  57. mem0/graphs/neptune/main.py +372 -0
  58. mem0/graphs/tools.py +371 -0
  59. mem0/graphs/utils.py +97 -0
  60. mem0/llms/__init__.py +0 -0
  61. mem0/llms/anthropic.py +64 -0
  62. mem0/llms/aws_bedrock.py +270 -0
  63. mem0/llms/azure_openai.py +114 -0
  64. mem0/llms/azure_openai_structured.py +76 -0
  65. mem0/llms/base.py +32 -0
  66. mem0/llms/configs.py +34 -0
  67. mem0/llms/deepseek.py +85 -0
  68. mem0/llms/gemini.py +201 -0
  69. mem0/llms/groq.py +88 -0
  70. mem0/llms/langchain.py +65 -0
  71. mem0/llms/litellm.py +87 -0
  72. mem0/llms/lmstudio.py +53 -0
  73. mem0/llms/ollama.py +94 -0
  74. mem0/llms/openai.py +124 -0
  75. mem0/llms/openai_structured.py +52 -0
  76. mem0/llms/sarvam.py +89 -0
  77. mem0/llms/together.py +88 -0
  78. mem0/llms/vllm.py +89 -0
  79. mem0/llms/xai.py +52 -0
  80. mem0/memory/__init__.py +0 -0
  81. mem0/memory/base.py +63 -0
  82. mem0/memory/graph_memory.py +632 -0
  83. mem0/memory/main.py +1843 -0
  84. mem0/memory/memgraph_memory.py +630 -0
  85. mem0/memory/setup.py +56 -0
  86. mem0/memory/storage.py +218 -0
  87. mem0/memory/telemetry.py +90 -0
  88. mem0/memory/utils.py +133 -0
  89. mem0/proxy/__init__.py +0 -0
  90. mem0/proxy/main.py +194 -0
  91. mem0/utils/factory.py +132 -0
  92. mem0/vector_stores/__init__.py +0 -0
  93. mem0/vector_stores/azure_ai_search.py +383 -0
  94. mem0/vector_stores/baidu.py +368 -0
  95. mem0/vector_stores/base.py +58 -0
  96. mem0/vector_stores/chroma.py +229 -0
  97. mem0/vector_stores/configs.py +60 -0
  98. mem0/vector_stores/elasticsearch.py +235 -0
  99. mem0/vector_stores/faiss.py +473 -0
  100. mem0/vector_stores/langchain.py +179 -0
  101. mem0/vector_stores/milvus.py +245 -0
  102. mem0/vector_stores/mongodb.py +293 -0
  103. mem0/vector_stores/opensearch.py +281 -0
  104. mem0/vector_stores/pgvector.py +294 -0
  105. mem0/vector_stores/pinecone.py +373 -0
  106. mem0/vector_stores/qdrant.py +240 -0
  107. mem0/vector_stores/redis.py +295 -0
  108. mem0/vector_stores/supabase.py +237 -0
  109. mem0/vector_stores/upstash_vector.py +293 -0
  110. mem0/vector_stores/vertex_ai_vector_search.py +629 -0
  111. mem0/vector_stores/weaviate.py +316 -0
  112. mem0ai_azure_mysql-0.1.115.data/data/README.md +169 -0
  113. mem0ai_azure_mysql-0.1.115.dist-info/METADATA +224 -0
  114. mem0ai_azure_mysql-0.1.115.dist-info/RECORD +116 -0
  115. mem0ai_azure_mysql-0.1.115.dist-info/WHEEL +4 -0
  116. mem0ai_azure_mysql-0.1.115.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,293 @@
1
+ import logging
2
+ from typing import Dict, List, Optional
3
+
4
+ from pydantic import BaseModel
5
+
6
+ from mem0.vector_stores.base import VectorStoreBase
7
+
8
+ try:
9
+ from upstash_vector import Index
10
+ except ImportError:
11
+ raise ImportError("The 'upstash_vector' library is required. Please install it using 'pip install upstash_vector'.")
12
+
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class OutputData(BaseModel):
18
+ id: Optional[str] # memory id
19
+ score: Optional[float] # is None for `get` method
20
+ payload: Optional[Dict] # metadata
21
+
22
+
23
+ class UpstashVector(VectorStoreBase):
24
+ def __init__(
25
+ self,
26
+ collection_name: str,
27
+ url: Optional[str] = None,
28
+ token: Optional[str] = None,
29
+ client: Optional[Index] = None,
30
+ enable_embeddings: bool = False,
31
+ ):
32
+ """
33
+ Initialize the UpstashVector vector store.
34
+
35
+ Args:
36
+ url (str, optional): URL for Upstash Vector index. Defaults to None.
37
+ token (int, optional): Token for Upstash Vector index. Defaults to None.
38
+ client (Index, optional): Existing `upstash_vector.Index` client instance. Defaults to None.
39
+ namespace (str, optional): Default namespace for the index. Defaults to None.
40
+ """
41
+ if client:
42
+ self.client = client
43
+ elif url and token:
44
+ self.client = Index(url, token)
45
+ else:
46
+ raise ValueError("Either a client or URL and token must be provided.")
47
+
48
+ self.collection_name = collection_name
49
+
50
+ self.enable_embeddings = enable_embeddings
51
+
52
+ def insert(
53
+ self,
54
+ vectors: List[list],
55
+ payloads: Optional[List[Dict]] = None,
56
+ ids: Optional[List[str]] = None,
57
+ ):
58
+ """
59
+ Insert vectors
60
+
61
+ Args:
62
+ vectors (list): List of vectors to insert.
63
+ payloads (list, optional): List of payloads corresponding to vectors. These will be passed as metadatas to the Upstash Vector client. Defaults to None.
64
+ ids (list, optional): List of IDs corresponding to vectors. Defaults to None.
65
+ """
66
+ logger.info(f"Inserting {len(vectors)} vectors into namespace {self.collection_name}")
67
+
68
+ if self.enable_embeddings:
69
+ if not payloads or any("data" not in m or m["data"] is None for m in payloads):
70
+ raise ValueError("When embeddings are enabled, all payloads must contain a 'data' field.")
71
+ processed_vectors = [
72
+ {
73
+ "id": ids[i] if ids else None,
74
+ "data": payloads[i]["data"],
75
+ "metadata": payloads[i],
76
+ }
77
+ for i, v in enumerate(vectors)
78
+ ]
79
+ else:
80
+ processed_vectors = [
81
+ {
82
+ "id": ids[i] if ids else None,
83
+ "vector": vectors[i],
84
+ "metadata": payloads[i] if payloads else None,
85
+ }
86
+ for i, v in enumerate(vectors)
87
+ ]
88
+
89
+ self.client.upsert(
90
+ vectors=processed_vectors,
91
+ namespace=self.collection_name,
92
+ )
93
+
94
+ def _stringify(self, x):
95
+ return f'"{x}"' if isinstance(x, str) else x
96
+
97
+ def search(
98
+ self,
99
+ query: str,
100
+ vectors: List[list],
101
+ limit: int = 5,
102
+ filters: Optional[Dict] = None,
103
+ ) -> List[OutputData]:
104
+ """
105
+ Search for similar vectors.
106
+
107
+ Args:
108
+ query (list): Query vector.
109
+ limit (int, optional): Number of results to return. Defaults to 5.
110
+ filters (Dict, optional): Filters to apply to the search.
111
+
112
+ Returns:
113
+ List[OutputData]: Search results.
114
+ """
115
+
116
+ filters_str = " AND ".join([f"{k} = {self._stringify(v)}" for k, v in filters.items()]) if filters else None
117
+
118
+ response = []
119
+
120
+ if self.enable_embeddings:
121
+ response = self.client.query(
122
+ data=query,
123
+ top_k=limit,
124
+ filter=filters_str or "",
125
+ include_metadata=True,
126
+ namespace=self.collection_name,
127
+ )
128
+ else:
129
+ queries = [
130
+ {
131
+ "vector": v,
132
+ "top_k": limit,
133
+ "filter": filters_str or "",
134
+ "include_metadata": True,
135
+ "namespace": self.collection_name,
136
+ }
137
+ for v in vectors
138
+ ]
139
+ responses = self.client.query_many(queries=queries)
140
+ # flatten
141
+ response = [res for res_list in responses for res in res_list]
142
+
143
+ return [
144
+ OutputData(
145
+ id=res.id,
146
+ score=res.score,
147
+ payload=res.metadata,
148
+ )
149
+ for res in response
150
+ ]
151
+
152
+ def delete(self, vector_id: int):
153
+ """
154
+ Delete a vector by ID.
155
+
156
+ Args:
157
+ vector_id (int): ID of the vector to delete.
158
+ """
159
+ self.client.delete(
160
+ ids=[str(vector_id)],
161
+ namespace=self.collection_name,
162
+ )
163
+
164
+ def update(
165
+ self,
166
+ vector_id: int,
167
+ vector: Optional[list] = None,
168
+ payload: Optional[dict] = None,
169
+ ):
170
+ """
171
+ Update a vector and its payload.
172
+
173
+ Args:
174
+ vector_id (int): ID of the vector to update.
175
+ vector (list, optional): Updated vector. Defaults to None.
176
+ payload (dict, optional): Updated payload. Defaults to None.
177
+ """
178
+ self.client.update(
179
+ id=str(vector_id),
180
+ vector=vector,
181
+ data=payload.get("data") if payload else None,
182
+ metadata=payload,
183
+ namespace=self.collection_name,
184
+ )
185
+
186
+ def get(self, vector_id: int) -> Optional[OutputData]:
187
+ """
188
+ Retrieve a vector by ID.
189
+
190
+ Args:
191
+ vector_id (int): ID of the vector to retrieve.
192
+
193
+ Returns:
194
+ dict: Retrieved vector.
195
+ """
196
+ response = self.client.fetch(
197
+ ids=[str(vector_id)],
198
+ namespace=self.collection_name,
199
+ include_metadata=True,
200
+ )
201
+ if len(response) == 0:
202
+ return None
203
+ vector = response[0]
204
+ if not vector:
205
+ return None
206
+ return OutputData(id=vector.id, score=None, payload=vector.metadata)
207
+
208
+ def list(self, filters: Optional[Dict] = None, limit: int = 100) -> List[List[OutputData]]:
209
+ """
210
+ List all memories.
211
+ Args:
212
+ filters (Dict, optional): Filters to apply to the search. Defaults to None.
213
+ limit (int, optional): Number of results to return. Defaults to 100.
214
+ Returns:
215
+ List[OutputData]: Search results.
216
+ """
217
+ filters_str = " AND ".join([f"{k} = {self._stringify(v)}" for k, v in filters.items()]) if filters else None
218
+
219
+ info = self.client.info()
220
+ ns_info = info.namespaces.get(self.collection_name)
221
+
222
+ if not ns_info or ns_info.vector_count == 0:
223
+ return [[]]
224
+
225
+ random_vector = [1.0] * self.client.info().dimension
226
+
227
+ results, query = self.client.resumable_query(
228
+ vector=random_vector,
229
+ filter=filters_str or "",
230
+ include_metadata=True,
231
+ namespace=self.collection_name,
232
+ top_k=100,
233
+ )
234
+ with query:
235
+ while True:
236
+ if len(results) >= limit:
237
+ break
238
+ res = query.fetch_next(100)
239
+ if not res:
240
+ break
241
+ results.extend(res)
242
+
243
+ parsed_result = [
244
+ OutputData(
245
+ id=res.id,
246
+ score=res.score,
247
+ payload=res.metadata,
248
+ )
249
+ for res in results
250
+ ]
251
+ return [parsed_result]
252
+
253
+ def create_col(self, name, vector_size, distance):
254
+ """
255
+ Upstash Vector has namespaces instead of collections. A namespace is created when the first vector is inserted.
256
+
257
+ This method is a placeholder to maintain the interface.
258
+ """
259
+ pass
260
+
261
+ def list_cols(self) -> List[str]:
262
+ """
263
+ Lists all namespaces in the Upstash Vector index.
264
+ Returns:
265
+ List[str]: List of namespaces.
266
+ """
267
+ return self.client.list_namespaces()
268
+
269
+ def delete_col(self):
270
+ """
271
+ Delete the namespace and all vectors in it.
272
+ """
273
+ self.client.reset(namespace=self.collection_name)
274
+ pass
275
+
276
+ def col_info(self):
277
+ """
278
+ Return general information about the Upstash Vector index.
279
+
280
+ - Total number of vectors across all namespaces
281
+ - Total number of vectors waiting to be indexed across all namespaces
282
+ - Total size of the index on disk in bytes
283
+ - Vector dimension
284
+ - Similarity function used
285
+ - Per-namespace vector and pending vector counts
286
+ """
287
+ return self.client.info()
288
+
289
+ def reset(self):
290
+ """
291
+ Reset the Upstash Vector index.
292
+ """
293
+ self.delete_col()