mem0ai-azure-mysql 0.1.115__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. mem0/__init__.py +6 -0
  2. mem0/client/__init__.py +0 -0
  3. mem0/client/main.py +1535 -0
  4. mem0/client/project.py +860 -0
  5. mem0/client/utils.py +29 -0
  6. mem0/configs/__init__.py +0 -0
  7. mem0/configs/base.py +90 -0
  8. mem0/configs/dbs/__init__.py +4 -0
  9. mem0/configs/dbs/base.py +41 -0
  10. mem0/configs/dbs/mysql.py +25 -0
  11. mem0/configs/embeddings/__init__.py +0 -0
  12. mem0/configs/embeddings/base.py +108 -0
  13. mem0/configs/enums.py +7 -0
  14. mem0/configs/llms/__init__.py +0 -0
  15. mem0/configs/llms/base.py +152 -0
  16. mem0/configs/prompts.py +333 -0
  17. mem0/configs/vector_stores/__init__.py +0 -0
  18. mem0/configs/vector_stores/azure_ai_search.py +59 -0
  19. mem0/configs/vector_stores/baidu.py +29 -0
  20. mem0/configs/vector_stores/chroma.py +40 -0
  21. mem0/configs/vector_stores/elasticsearch.py +47 -0
  22. mem0/configs/vector_stores/faiss.py +39 -0
  23. mem0/configs/vector_stores/langchain.py +32 -0
  24. mem0/configs/vector_stores/milvus.py +43 -0
  25. mem0/configs/vector_stores/mongodb.py +25 -0
  26. mem0/configs/vector_stores/opensearch.py +41 -0
  27. mem0/configs/vector_stores/pgvector.py +37 -0
  28. mem0/configs/vector_stores/pinecone.py +56 -0
  29. mem0/configs/vector_stores/qdrant.py +49 -0
  30. mem0/configs/vector_stores/redis.py +26 -0
  31. mem0/configs/vector_stores/supabase.py +44 -0
  32. mem0/configs/vector_stores/upstash_vector.py +36 -0
  33. mem0/configs/vector_stores/vertex_ai_vector_search.py +27 -0
  34. mem0/configs/vector_stores/weaviate.py +43 -0
  35. mem0/dbs/__init__.py +4 -0
  36. mem0/dbs/base.py +68 -0
  37. mem0/dbs/configs.py +21 -0
  38. mem0/dbs/mysql.py +321 -0
  39. mem0/embeddings/__init__.py +0 -0
  40. mem0/embeddings/aws_bedrock.py +100 -0
  41. mem0/embeddings/azure_openai.py +43 -0
  42. mem0/embeddings/base.py +31 -0
  43. mem0/embeddings/configs.py +30 -0
  44. mem0/embeddings/gemini.py +39 -0
  45. mem0/embeddings/huggingface.py +41 -0
  46. mem0/embeddings/langchain.py +35 -0
  47. mem0/embeddings/lmstudio.py +29 -0
  48. mem0/embeddings/mock.py +11 -0
  49. mem0/embeddings/ollama.py +53 -0
  50. mem0/embeddings/openai.py +49 -0
  51. mem0/embeddings/together.py +31 -0
  52. mem0/embeddings/vertexai.py +54 -0
  53. mem0/graphs/__init__.py +0 -0
  54. mem0/graphs/configs.py +96 -0
  55. mem0/graphs/neptune/__init__.py +0 -0
  56. mem0/graphs/neptune/base.py +410 -0
  57. mem0/graphs/neptune/main.py +372 -0
  58. mem0/graphs/tools.py +371 -0
  59. mem0/graphs/utils.py +97 -0
  60. mem0/llms/__init__.py +0 -0
  61. mem0/llms/anthropic.py +64 -0
  62. mem0/llms/aws_bedrock.py +270 -0
  63. mem0/llms/azure_openai.py +114 -0
  64. mem0/llms/azure_openai_structured.py +76 -0
  65. mem0/llms/base.py +32 -0
  66. mem0/llms/configs.py +34 -0
  67. mem0/llms/deepseek.py +85 -0
  68. mem0/llms/gemini.py +201 -0
  69. mem0/llms/groq.py +88 -0
  70. mem0/llms/langchain.py +65 -0
  71. mem0/llms/litellm.py +87 -0
  72. mem0/llms/lmstudio.py +53 -0
  73. mem0/llms/ollama.py +94 -0
  74. mem0/llms/openai.py +124 -0
  75. mem0/llms/openai_structured.py +52 -0
  76. mem0/llms/sarvam.py +89 -0
  77. mem0/llms/together.py +88 -0
  78. mem0/llms/vllm.py +89 -0
  79. mem0/llms/xai.py +52 -0
  80. mem0/memory/__init__.py +0 -0
  81. mem0/memory/base.py +63 -0
  82. mem0/memory/graph_memory.py +632 -0
  83. mem0/memory/main.py +1843 -0
  84. mem0/memory/memgraph_memory.py +630 -0
  85. mem0/memory/setup.py +56 -0
  86. mem0/memory/storage.py +218 -0
  87. mem0/memory/telemetry.py +90 -0
  88. mem0/memory/utils.py +133 -0
  89. mem0/proxy/__init__.py +0 -0
  90. mem0/proxy/main.py +194 -0
  91. mem0/utils/factory.py +132 -0
  92. mem0/vector_stores/__init__.py +0 -0
  93. mem0/vector_stores/azure_ai_search.py +383 -0
  94. mem0/vector_stores/baidu.py +368 -0
  95. mem0/vector_stores/base.py +58 -0
  96. mem0/vector_stores/chroma.py +229 -0
  97. mem0/vector_stores/configs.py +60 -0
  98. mem0/vector_stores/elasticsearch.py +235 -0
  99. mem0/vector_stores/faiss.py +473 -0
  100. mem0/vector_stores/langchain.py +179 -0
  101. mem0/vector_stores/milvus.py +245 -0
  102. mem0/vector_stores/mongodb.py +293 -0
  103. mem0/vector_stores/opensearch.py +281 -0
  104. mem0/vector_stores/pgvector.py +294 -0
  105. mem0/vector_stores/pinecone.py +373 -0
  106. mem0/vector_stores/qdrant.py +240 -0
  107. mem0/vector_stores/redis.py +295 -0
  108. mem0/vector_stores/supabase.py +237 -0
  109. mem0/vector_stores/upstash_vector.py +293 -0
  110. mem0/vector_stores/vertex_ai_vector_search.py +629 -0
  111. mem0/vector_stores/weaviate.py +316 -0
  112. mem0ai_azure_mysql-0.1.115.data/data/README.md +169 -0
  113. mem0ai_azure_mysql-0.1.115.dist-info/METADATA +224 -0
  114. mem0ai_azure_mysql-0.1.115.dist-info/RECORD +116 -0
  115. mem0ai_azure_mysql-0.1.115.dist-info/WHEEL +4 -0
  116. mem0ai_azure_mysql-0.1.115.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,60 @@
1
+ from typing import Dict, Optional
2
+
3
+ from pydantic import BaseModel, Field, model_validator
4
+
5
+
6
+ class VectorStoreConfig(BaseModel):
7
+ provider: str = Field(
8
+ description="Provider of the vector store (e.g., 'qdrant', 'chroma', 'upstash_vector')",
9
+ default="qdrant",
10
+ )
11
+ config: Optional[Dict] = Field(description="Configuration for the specific vector store", default=None)
12
+
13
+ _provider_configs: Dict[str, str] = {
14
+ "qdrant": "QdrantConfig",
15
+ "chroma": "ChromaDbConfig",
16
+ "pgvector": "PGVectorConfig",
17
+ "pinecone": "PineconeConfig",
18
+ "mongodb": "MongoDBConfig",
19
+ "milvus": "MilvusDBConfig",
20
+ "baidu": "BaiduDBConfig",
21
+ "upstash_vector": "UpstashVectorConfig",
22
+ "azure_ai_search": "AzureAISearchConfig",
23
+ "redis": "RedisDBConfig",
24
+ "elasticsearch": "ElasticsearchConfig",
25
+ "vertex_ai_vector_search": "GoogleMatchingEngineConfig",
26
+ "opensearch": "OpenSearchConfig",
27
+ "supabase": "SupabaseConfig",
28
+ "weaviate": "WeaviateConfig",
29
+ "faiss": "FAISSConfig",
30
+ "langchain": "LangchainConfig",
31
+ }
32
+
33
+ @model_validator(mode="after")
34
+ def validate_and_create_config(self) -> "VectorStoreConfig":
35
+ provider = self.provider
36
+ config = self.config
37
+
38
+ if provider not in self._provider_configs:
39
+ raise ValueError(f"Unsupported vector store provider: {provider}")
40
+
41
+ module = __import__(
42
+ f"mem0.configs.vector_stores.{provider}",
43
+ fromlist=[self._provider_configs[provider]],
44
+ )
45
+ config_class = getattr(module, self._provider_configs[provider])
46
+
47
+ if config is None:
48
+ config = {}
49
+
50
+ if not isinstance(config, dict):
51
+ if not isinstance(config, config_class):
52
+ raise ValueError(f"Invalid config type for provider {provider}")
53
+ return self
54
+
55
+ # also check if path in allowed kays for pydantic model, and whether config extra fields are allowed
56
+ if "path" not in config and "path" in config_class.__annotations__:
57
+ config["path"] = f"/tmp/{provider}"
58
+
59
+ self.config = config_class(**config)
60
+ return self
@@ -0,0 +1,235 @@
1
+ import logging
2
+ from typing import Any, Dict, List, Optional
3
+
4
+ try:
5
+ from elasticsearch import Elasticsearch
6
+ from elasticsearch.helpers import bulk
7
+ except ImportError:
8
+ raise ImportError("Elasticsearch requires extra dependencies. Install with `pip install elasticsearch`") from None
9
+
10
+ from pydantic import BaseModel
11
+
12
+ from mem0.configs.vector_stores.elasticsearch import ElasticsearchConfig
13
+ from mem0.vector_stores.base import VectorStoreBase
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class OutputData(BaseModel):
19
+ id: str
20
+ score: float
21
+ payload: Dict
22
+
23
+
24
+ class ElasticsearchDB(VectorStoreBase):
25
+ def __init__(self, **kwargs):
26
+ config = ElasticsearchConfig(**kwargs)
27
+
28
+ # Initialize Elasticsearch client
29
+ if config.cloud_id:
30
+ self.client = Elasticsearch(
31
+ cloud_id=config.cloud_id,
32
+ api_key=config.api_key,
33
+ verify_certs=config.verify_certs,
34
+ )
35
+ else:
36
+ self.client = Elasticsearch(
37
+ hosts=[f"{config.host}" if config.port is None else f"{config.host}:{config.port}"],
38
+ basic_auth=(config.user, config.password) if (config.user and config.password) else None,
39
+ verify_certs=config.verify_certs,
40
+ )
41
+
42
+ self.collection_name = config.collection_name
43
+ self.embedding_model_dims = config.embedding_model_dims
44
+
45
+ # Create index only if auto_create_index is True
46
+ if config.auto_create_index:
47
+ self.create_index()
48
+
49
+ if config.custom_search_query:
50
+ self.custom_search_query = config.custom_search_query
51
+ else:
52
+ self.custom_search_query = None
53
+
54
+ def create_index(self) -> None:
55
+ """Create Elasticsearch index with proper mappings if it doesn't exist"""
56
+ index_settings = {
57
+ "settings": {"index": {"number_of_replicas": 1, "number_of_shards": 5, "refresh_interval": "1s"}},
58
+ "mappings": {
59
+ "properties": {
60
+ "text": {"type": "text"},
61
+ "vector": {
62
+ "type": "dense_vector",
63
+ "dims": self.embedding_model_dims,
64
+ "index": True,
65
+ "similarity": "cosine",
66
+ },
67
+ "metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
68
+ }
69
+ },
70
+ }
71
+
72
+ if not self.client.indices.exists(index=self.collection_name):
73
+ self.client.indices.create(index=self.collection_name, body=index_settings)
74
+ logger.info(f"Created index {self.collection_name}")
75
+ else:
76
+ logger.info(f"Index {self.collection_name} already exists")
77
+
78
+ def create_col(self, name: str, vector_size: int, distance: str = "cosine") -> None:
79
+ """Create a new collection (index in Elasticsearch)."""
80
+ index_settings = {
81
+ "mappings": {
82
+ "properties": {
83
+ "vector": {"type": "dense_vector", "dims": vector_size, "index": True, "similarity": "cosine"},
84
+ "payload": {"type": "object"},
85
+ "id": {"type": "keyword"},
86
+ }
87
+ }
88
+ }
89
+
90
+ if not self.client.indices.exists(index=name):
91
+ self.client.indices.create(index=name, body=index_settings)
92
+ logger.info(f"Created index {name}")
93
+
94
+ def insert(
95
+ self, vectors: List[List[float]], payloads: Optional[List[Dict]] = None, ids: Optional[List[str]] = None
96
+ ) -> List[OutputData]:
97
+ """Insert vectors into the index."""
98
+ if not ids:
99
+ ids = [str(i) for i in range(len(vectors))]
100
+
101
+ if payloads is None:
102
+ payloads = [{} for _ in range(len(vectors))]
103
+
104
+ actions = []
105
+ for i, (vec, id_) in enumerate(zip(vectors, ids)):
106
+ action = {
107
+ "_index": self.collection_name,
108
+ "_id": id_,
109
+ "_source": {
110
+ "vector": vec,
111
+ "metadata": payloads[i], # Store all metadata in the metadata field
112
+ },
113
+ }
114
+ actions.append(action)
115
+
116
+ bulk(self.client, actions)
117
+
118
+ results = []
119
+ for i, id_ in enumerate(ids):
120
+ results.append(
121
+ OutputData(
122
+ id=id_,
123
+ score=1.0, # Default score for inserts
124
+ payload=payloads[i],
125
+ )
126
+ )
127
+ return results
128
+
129
+ def search(
130
+ self, query: str, vectors: List[float], limit: int = 5, filters: Optional[Dict] = None
131
+ ) -> List[OutputData]:
132
+ """
133
+ Search with two options:
134
+ 1. Use custom search query if provided
135
+ 2. Use KNN search on vectors with pre-filtering if no custom search query is provided
136
+ """
137
+ if self.custom_search_query:
138
+ search_query = self.custom_search_query(vectors, limit, filters)
139
+ else:
140
+ search_query = {
141
+ "knn": {"field": "vector", "query_vector": vectors, "k": limit, "num_candidates": limit * 2}
142
+ }
143
+ if filters:
144
+ filter_conditions = []
145
+ for key, value in filters.items():
146
+ filter_conditions.append({"term": {f"metadata.{key}": value}})
147
+ search_query["knn"]["filter"] = {"bool": {"must": filter_conditions}}
148
+
149
+ response = self.client.search(index=self.collection_name, body=search_query)
150
+
151
+ results = []
152
+ for hit in response["hits"]["hits"]:
153
+ results.append(
154
+ OutputData(id=hit["_id"], score=hit["_score"], payload=hit.get("_source", {}).get("metadata", {}))
155
+ )
156
+
157
+ return results
158
+
159
+ def delete(self, vector_id: str) -> None:
160
+ """Delete a vector by ID."""
161
+ self.client.delete(index=self.collection_name, id=vector_id)
162
+
163
+ def update(self, vector_id: str, vector: Optional[List[float]] = None, payload: Optional[Dict] = None) -> None:
164
+ """Update a vector and its payload."""
165
+ doc = {}
166
+ if vector is not None:
167
+ doc["vector"] = vector
168
+ if payload is not None:
169
+ doc["metadata"] = payload
170
+
171
+ self.client.update(index=self.collection_name, id=vector_id, body={"doc": doc})
172
+
173
+ def get(self, vector_id: str) -> Optional[OutputData]:
174
+ """Retrieve a vector by ID."""
175
+ try:
176
+ response = self.client.get(index=self.collection_name, id=vector_id)
177
+ return OutputData(
178
+ id=response["_id"],
179
+ score=1.0, # Default score for direct get
180
+ payload=response["_source"].get("metadata", {}),
181
+ )
182
+ except KeyError as e:
183
+ logger.warning(f"Missing key in Elasticsearch response: {e}")
184
+ return None
185
+ except TypeError as e:
186
+ logger.warning(f"Invalid response type from Elasticsearch: {e}")
187
+ return None
188
+ except Exception as e:
189
+ logger.error(f"Unexpected error while parsing Elasticsearch response: {e}")
190
+ return None
191
+
192
+ def list_cols(self) -> List[str]:
193
+ """List all collections (indices)."""
194
+ return list(self.client.indices.get_alias().keys())
195
+
196
+ def delete_col(self) -> None:
197
+ """Delete a collection (index)."""
198
+ self.client.indices.delete(index=self.collection_name)
199
+
200
+ def col_info(self, name: str) -> Any:
201
+ """Get information about a collection (index)."""
202
+ return self.client.indices.get(index=name)
203
+
204
+ def list(self, filters: Optional[Dict] = None, limit: Optional[int] = None) -> List[List[OutputData]]:
205
+ """List all memories."""
206
+ query: Dict[str, Any] = {"query": {"match_all": {}}}
207
+
208
+ if filters:
209
+ filter_conditions = []
210
+ for key, value in filters.items():
211
+ filter_conditions.append({"term": {f"metadata.{key}": value}})
212
+ query["query"] = {"bool": {"must": filter_conditions}}
213
+
214
+ if limit:
215
+ query["size"] = limit
216
+
217
+ response = self.client.search(index=self.collection_name, body=query)
218
+
219
+ results = []
220
+ for hit in response["hits"]["hits"]:
221
+ results.append(
222
+ OutputData(
223
+ id=hit["_id"],
224
+ score=1.0, # Default score for list operation
225
+ payload=hit.get("_source", {}).get("metadata", {}),
226
+ )
227
+ )
228
+
229
+ return [results]
230
+
231
+ def reset(self):
232
+ """Reset the index by deleting and recreating it."""
233
+ logger.warning(f"Resetting index {self.collection_name}...")
234
+ self.delete_col()
235
+ self.create_index()