better-mem0-mcp 1.1.5__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- better_mem0_mcp/config.py +14 -21
- better_mem0_mcp-1.2.0.dist-info/METADATA +173 -0
- {better_mem0_mcp-1.1.5.dist-info → better_mem0_mcp-1.2.0.dist-info}/RECORD +6 -6
- better_mem0_mcp-1.1.5.dist-info/METADATA +0 -149
- {better_mem0_mcp-1.1.5.dist-info → better_mem0_mcp-1.2.0.dist-info}/WHEEL +0 -0
- {better_mem0_mcp-1.1.5.dist-info → better_mem0_mcp-1.2.0.dist-info}/entry_points.txt +0 -0
- {better_mem0_mcp-1.1.5.dist-info → better_mem0_mcp-1.2.0.dist-info}/licenses/LICENSE +0 -0
better_mem0_mcp/config.py
CHANGED
|
@@ -33,43 +33,36 @@ class Settings(BaseSettings):
|
|
|
33
33
|
embedder_models: str = "gemini/gemini-embedding-001"
|
|
34
34
|
|
|
35
35
|
def setup_api_keys(self) -> dict[str, list[str]]:
|
|
36
|
-
"""
|
|
37
|
-
|
|
36
|
+
"""Parse API_KEYS (format: ENV_VAR:key,...) and set env vars.
|
|
37
|
+
|
|
38
|
+
Example:
|
|
39
|
+
API_KEYS="GOOGLE_API_KEY:abc,GOOGLE_API_KEY:def,OPENAI_API_KEY:xyz"
|
|
38
40
|
|
|
39
41
|
Returns:
|
|
40
|
-
Dict mapping
|
|
42
|
+
Dict mapping env var name to list of API keys.
|
|
41
43
|
"""
|
|
42
|
-
|
|
43
|
-
"gemini": "GOOGLE_API_KEY",
|
|
44
|
-
"openai": "OPENAI_API_KEY",
|
|
45
|
-
"anthropic": "ANTHROPIC_API_KEY",
|
|
46
|
-
"groq": "GROQ_API_KEY",
|
|
47
|
-
"deepseek": "DEEPSEEK_API_KEY",
|
|
48
|
-
"mistral": "MISTRAL_API_KEY",
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
keys_by_provider: dict[str, list[str]] = {}
|
|
44
|
+
keys_by_env: dict[str, list[str]] = {}
|
|
52
45
|
|
|
53
46
|
for pair in self.api_keys.split(","):
|
|
54
47
|
pair = pair.strip()
|
|
55
48
|
if ":" not in pair:
|
|
56
49
|
continue
|
|
57
50
|
|
|
58
|
-
|
|
59
|
-
|
|
51
|
+
env_var, key = pair.split(":", 1)
|
|
52
|
+
env_var = env_var.strip()
|
|
60
53
|
key = key.strip()
|
|
61
54
|
|
|
62
55
|
if not key:
|
|
63
56
|
continue
|
|
64
57
|
|
|
65
|
-
|
|
58
|
+
keys_by_env.setdefault(env_var, []).append(key)
|
|
66
59
|
|
|
67
|
-
# Set first key of each
|
|
68
|
-
for
|
|
69
|
-
if
|
|
70
|
-
os.environ[
|
|
60
|
+
# Set first key of each env var (LiteLLM reads from env)
|
|
61
|
+
for env_var, keys in keys_by_env.items():
|
|
62
|
+
if keys:
|
|
63
|
+
os.environ[env_var] = keys[0]
|
|
71
64
|
|
|
72
|
-
return
|
|
65
|
+
return keys_by_env
|
|
73
66
|
|
|
74
67
|
def parse_database_url(self) -> dict:
|
|
75
68
|
"""Parse DATABASE_URL into connection parameters."""
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: better-mem0-mcp
|
|
3
|
+
Version: 1.2.0
|
|
4
|
+
Summary: Zero-setup MCP Server for AI memory - works with Neon/Supabase
|
|
5
|
+
Project-URL: Homepage, https://github.com/n24q02m/better-mem0-mcp
|
|
6
|
+
Project-URL: Repository, https://github.com/n24q02m/better-mem0-mcp.git
|
|
7
|
+
Project-URL: Issues, https://github.com/n24q02m/better-mem0-mcp/issues
|
|
8
|
+
Author-email: n24q02m <quangminh2422004@gmail.com>
|
|
9
|
+
License: MIT
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Keywords: ai-agent,llm,mcp,mem0,memory,pgvector
|
|
12
|
+
Classifier: Development Status :: 4 - Beta
|
|
13
|
+
Classifier: Environment :: Console
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
20
|
+
Requires-Python: ==3.13.*
|
|
21
|
+
Requires-Dist: google-genai
|
|
22
|
+
Requires-Dist: litellm
|
|
23
|
+
Requires-Dist: loguru
|
|
24
|
+
Requires-Dist: mcp[cli]
|
|
25
|
+
Requires-Dist: mem0ai
|
|
26
|
+
Requires-Dist: psycopg[binary,pool]
|
|
27
|
+
Requires-Dist: pydantic-settings
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
|
|
30
|
+
# better-mem0-mcp
|
|
31
|
+
|
|
32
|
+
**Self-hosted MCP Server for AI memory with PostgreSQL (pgvector).**
|
|
33
|
+
|
|
34
|
+
[](https://pypi.org/project/better-mem0-mcp/)
|
|
35
|
+
[](https://hub.docker.com/r/n24q02m/better-mem0-mcp)
|
|
36
|
+
[](LICENSE)
|
|
37
|
+
|
|
38
|
+
## Features
|
|
39
|
+
|
|
40
|
+
- **Self-hosted PostgreSQL** - Your data stays with you (Neon/Supabase free tier supported)
|
|
41
|
+
- **Graph Memory** - SQL-based relationship tracking alongside vector memory
|
|
42
|
+
- **Multi-provider LLM** - Gemini, OpenAI, Anthropic, Groq, DeepSeek, Mistral
|
|
43
|
+
- **Fallback chains** - Multi-key per provider + multi-model fallback
|
|
44
|
+
- **Zero manual setup** - Just `DATABASE_URL` + `API_KEYS`
|
|
45
|
+
|
|
46
|
+
---
|
|
47
|
+
|
|
48
|
+
## Quick Start
|
|
49
|
+
|
|
50
|
+
### 1. Get Prerequisites
|
|
51
|
+
|
|
52
|
+
- **Database**: [Neon](https://neon.tech) or [Supabase](https://supabase.com) (free tier works)
|
|
53
|
+
- **API Key**: Any supported provider ([Google AI Studio](https://aistudio.google.com/apikey) is free)
|
|
54
|
+
|
|
55
|
+
### 2. Add to mcp.json
|
|
56
|
+
|
|
57
|
+
#### uvx (Recommended)
|
|
58
|
+
|
|
59
|
+
```json
|
|
60
|
+
{
|
|
61
|
+
"mcpServers": {
|
|
62
|
+
"better-mem0": {
|
|
63
|
+
"command": "uvx",
|
|
64
|
+
"args": ["better-mem0-mcp@latest"],
|
|
65
|
+
"env": {
|
|
66
|
+
"DATABASE_URL": "postgresql://user:pass@xxx.neon.tech/neondb?sslmode=require",
|
|
67
|
+
"API_KEYS": "GOOGLE_API_KEY:AIza..."
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
#### Docker
|
|
75
|
+
|
|
76
|
+
```json
|
|
77
|
+
{
|
|
78
|
+
"mcpServers": {
|
|
79
|
+
"better-mem0": {
|
|
80
|
+
"command": "docker",
|
|
81
|
+
"args": ["run", "-i", "--rm", "-e", "DATABASE_URL", "-e", "API_KEYS", "n24q02m/better-mem0-mcp:latest"],
|
|
82
|
+
"env": {
|
|
83
|
+
"DATABASE_URL": "postgresql://...",
|
|
84
|
+
"API_KEYS": "GOOGLE_API_KEY:AIza..."
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### 3. Done!
|
|
92
|
+
|
|
93
|
+
Ask your AI: "Remember that I prefer dark mode and use FastAPI"
|
|
94
|
+
|
|
95
|
+
---
|
|
96
|
+
|
|
97
|
+
## Configuration
|
|
98
|
+
|
|
99
|
+
| Variable | Required | Description |
|
|
100
|
+
|----------|----------|-------------|
|
|
101
|
+
| `DATABASE_URL` | Yes | PostgreSQL with pgvector extension |
|
|
102
|
+
| `API_KEYS` | Yes | `ENV_VAR:key` pairs, comma-separated |
|
|
103
|
+
| `LLM_MODELS` | No | Model fallback chain |
|
|
104
|
+
| `EMBEDDER_MODELS` | No | Embedding model chain |
|
|
105
|
+
|
|
106
|
+
### Supported LiteLLM Providers
|
|
107
|
+
|
|
108
|
+
Use environment variable names from [LiteLLM docs](https://docs.litellm.ai/):
|
|
109
|
+
`GOOGLE_API_KEY`, `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, `GROQ_API_KEY`, etc.
|
|
110
|
+
|
|
111
|
+
**Single provider:**
|
|
112
|
+
```bash
|
|
113
|
+
API_KEYS=GOOGLE_API_KEY:AIza...
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
**Multi-key with fallback:**
|
|
117
|
+
```bash
|
|
118
|
+
API_KEYS=GOOGLE_API_KEY:AIza-1,GOOGLE_API_KEY:AIza-2,OPENAI_API_KEY:sk-xxx
|
|
119
|
+
LLM_MODELS=gemini/gemini-3-flash-preview,openai/gpt-4o-mini
|
|
120
|
+
EMBEDDER_MODELS=gemini/gemini-embedding-001,openai/text-embedding-3-small
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
### Defaults
|
|
124
|
+
|
|
125
|
+
| Setting | Default |
|
|
126
|
+
|---------|---------|
|
|
127
|
+
| `LLM_MODELS` | `gemini/gemini-3-flash-preview` |
|
|
128
|
+
| `EMBEDDER_MODELS` | `gemini/gemini-embedding-001` |
|
|
129
|
+
|
|
130
|
+
---
|
|
131
|
+
|
|
132
|
+
## Tools
|
|
133
|
+
|
|
134
|
+
| Tool | Description |
|
|
135
|
+
|------|-------------|
|
|
136
|
+
| `memory` | Memory operations: `add`, `search`, `list`, `delete` |
|
|
137
|
+
| `help` | Get full documentation for tools |
|
|
138
|
+
|
|
139
|
+
### Usage Examples
|
|
140
|
+
|
|
141
|
+
```json
|
|
142
|
+
{"action": "add", "content": "I prefer TypeScript over JavaScript"}
|
|
143
|
+
{"action": "search", "query": "programming preferences"}
|
|
144
|
+
{"action": "list"}
|
|
145
|
+
{"action": "delete", "memory_id": "abc123"}
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
---
|
|
149
|
+
|
|
150
|
+
## Build from Source
|
|
151
|
+
|
|
152
|
+
```bash
|
|
153
|
+
git clone https://github.com/n24q02m/better-mem0-mcp
|
|
154
|
+
cd better-mem0-mcp
|
|
155
|
+
|
|
156
|
+
# Setup (requires mise: https://mise.jdx.dev/)
|
|
157
|
+
mise run setup
|
|
158
|
+
|
|
159
|
+
# Run
|
|
160
|
+
uv run better-mem0-mcp
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
**Requirements:** Python 3.13+
|
|
164
|
+
|
|
165
|
+
---
|
|
166
|
+
|
|
167
|
+
## Contributing
|
|
168
|
+
|
|
169
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md)
|
|
170
|
+
|
|
171
|
+
## License
|
|
172
|
+
|
|
173
|
+
MIT - See [LICENSE](LICENSE)
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
better_mem0_mcp/__init__.py,sha256=fcqgbz2HvMCPidqqoPvtRky5pGIHP2w9oVim7UQkuBc,106
|
|
2
2
|
better_mem0_mcp/__main__.py,sha256=IeeRidmZF4oBaamjQM6FUbhXpsSdE4sQF_6y_D2GEE4,116
|
|
3
|
-
better_mem0_mcp/config.py,sha256=
|
|
3
|
+
better_mem0_mcp/config.py,sha256=cqLGzxYno7SgRMn7Y8GOBR0P4CrBuFjsmFpK6eGZhs0,5312
|
|
4
4
|
better_mem0_mcp/graph.py,sha256=rE9z6XECiAktEqDNgmwqCpFpvKSn3azO9H4sRBhj8UU,6195
|
|
5
5
|
better_mem0_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
better_mem0_mcp/server.py,sha256=QYLP46pz0U9gkxQKB_g8FMga77ywElMMjSCU-hGZQ-M,5499
|
|
7
7
|
better_mem0_mcp/docs/memory.md,sha256=198dDuAGccG5Ca7rhEXIU03ZhxNKK44B_Brl2glurGc,1608
|
|
8
|
-
better_mem0_mcp-1.
|
|
9
|
-
better_mem0_mcp-1.
|
|
10
|
-
better_mem0_mcp-1.
|
|
11
|
-
better_mem0_mcp-1.
|
|
12
|
-
better_mem0_mcp-1.
|
|
8
|
+
better_mem0_mcp-1.2.0.dist-info/METADATA,sha256=141SpGEaPKcg0-koPix7VrQGjXg0VW5fIH2ba0HC7qY,4627
|
|
9
|
+
better_mem0_mcp-1.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
10
|
+
better_mem0_mcp-1.2.0.dist-info/entry_points.txt,sha256=2b7E3D6yo94mQXP2Ms0bhUlWkK9f664f0GrstImOq30,57
|
|
11
|
+
better_mem0_mcp-1.2.0.dist-info/licenses/LICENSE,sha256=d7xQ6sRyeGus6gnvwgqiQtSY7XdFw0Jd0w5-Co_xHnk,1064
|
|
12
|
+
better_mem0_mcp-1.2.0.dist-info/RECORD,,
|
|
@@ -1,149 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: better-mem0-mcp
|
|
3
|
-
Version: 1.1.5
|
|
4
|
-
Summary: Zero-setup MCP Server for AI memory - works with Neon/Supabase
|
|
5
|
-
Project-URL: Homepage, https://github.com/n24q02m/better-mem0-mcp
|
|
6
|
-
Project-URL: Repository, https://github.com/n24q02m/better-mem0-mcp.git
|
|
7
|
-
Project-URL: Issues, https://github.com/n24q02m/better-mem0-mcp/issues
|
|
8
|
-
Author-email: n24q02m <quangminh2422004@gmail.com>
|
|
9
|
-
License: MIT
|
|
10
|
-
License-File: LICENSE
|
|
11
|
-
Keywords: ai-agent,llm,mcp,mem0,memory,pgvector
|
|
12
|
-
Classifier: Development Status :: 4 - Beta
|
|
13
|
-
Classifier: Environment :: Console
|
|
14
|
-
Classifier: Intended Audience :: Developers
|
|
15
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
-
Classifier: Operating System :: OS Independent
|
|
17
|
-
Classifier: Programming Language :: Python :: 3
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
-
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
20
|
-
Requires-Python: >=3.13
|
|
21
|
-
Requires-Dist: google-genai>=1.0.0
|
|
22
|
-
Requires-Dist: litellm>=1.0.0
|
|
23
|
-
Requires-Dist: loguru>=0.7.0
|
|
24
|
-
Requires-Dist: mcp[cli]>=1.0.0
|
|
25
|
-
Requires-Dist: mem0ai>=0.1.0
|
|
26
|
-
Requires-Dist: psycopg[binary,pool]>=3.1.0
|
|
27
|
-
Requires-Dist: pydantic-settings>=2.0.0
|
|
28
|
-
Description-Content-Type: text/markdown
|
|
29
|
-
|
|
30
|
-
# better-mem0-mcp
|
|
31
|
-
|
|
32
|
-
**Zero-setup** MCP Server for AI memory. Works with Neon/Supabase free tier.
|
|
33
|
-
|
|
34
|
-
[](https://opensource.org/licenses/MIT)
|
|
35
|
-
|
|
36
|
-
## Quick Start
|
|
37
|
-
|
|
38
|
-
### 1. Get Prerequisites
|
|
39
|
-
|
|
40
|
-
- **Database**: [Neon](https://neon.tech) or [Supabase](https://supabase.com) (free tier)
|
|
41
|
-
- **API Key**: [Google AI Studio](https://aistudio.google.com/apikey) (free tier)
|
|
42
|
-
|
|
43
|
-
### 2. Add to mcp.json
|
|
44
|
-
|
|
45
|
-
#### uvx (Recommended)
|
|
46
|
-
|
|
47
|
-
```json
|
|
48
|
-
{
|
|
49
|
-
"mcpServers": {
|
|
50
|
-
"better-mem0": {
|
|
51
|
-
"command": "uvx",
|
|
52
|
-
"args": ["better-mem0-mcp@latest"],
|
|
53
|
-
"env": {
|
|
54
|
-
"DATABASE_URL": "postgresql://user:pass@xxx.neon.tech/neondb?sslmode=require",
|
|
55
|
-
"API_KEYS": "gemini:AIza..."
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
```
|
|
61
|
-
|
|
62
|
-
#### Docker
|
|
63
|
-
|
|
64
|
-
```json
|
|
65
|
-
{
|
|
66
|
-
"mcpServers": {
|
|
67
|
-
"better-mem0": {
|
|
68
|
-
"command": "docker",
|
|
69
|
-
"args": ["run", "-i", "--rm", "-e", "DATABASE_URL", "-e", "API_KEYS", "n24q02m/better-mem0-mcp:latest"],
|
|
70
|
-
"env": {
|
|
71
|
-
"DATABASE_URL": "postgresql://...",
|
|
72
|
-
"API_KEYS": "gemini:AIza..."
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
```
|
|
78
|
-
|
|
79
|
-
### 3. Done!
|
|
80
|
-
|
|
81
|
-
Ask Claude: "Remember that I prefer dark mode and use FastAPI"
|
|
82
|
-
|
|
83
|
-
---
|
|
84
|
-
|
|
85
|
-
## Configuration
|
|
86
|
-
|
|
87
|
-
| Variable | Required | Description |
|
|
88
|
-
|----------|----------|-------------|
|
|
89
|
-
| `DATABASE_URL` | Yes | PostgreSQL connection string |
|
|
90
|
-
| `API_KEYS` | Yes | `provider:key,...` (multi-key per provider OK) |
|
|
91
|
-
| `LLM_MODELS` | No | `provider/model,...` (fallback chain) |
|
|
92
|
-
| `EMBEDDER_MODELS` | No | `provider/model,...` (fallback chain) |
|
|
93
|
-
|
|
94
|
-
### Examples
|
|
95
|
-
|
|
96
|
-
**Minimal (Gemini only):**
|
|
97
|
-
```
|
|
98
|
-
API_KEYS=gemini:AIza...
|
|
99
|
-
```
|
|
100
|
-
|
|
101
|
-
**Multi-key with fallback:**
|
|
102
|
-
```
|
|
103
|
-
API_KEYS=gemini:AIza-1,gemini:AIza-2,openai:sk-xxx
|
|
104
|
-
LLM_MODELS=gemini/gemini-2.5-flash,openai/gpt-4o-mini
|
|
105
|
-
EMBEDDER_MODELS=gemini/gemini-embedding-001,openai/text-embedding-3-small
|
|
106
|
-
```
|
|
107
|
-
|
|
108
|
-
### Defaults
|
|
109
|
-
|
|
110
|
-
| Setting | Default |
|
|
111
|
-
|---------|---------|
|
|
112
|
-
| `LLM_MODELS` | `gemini/gemini-2.5-flash` |
|
|
113
|
-
| `EMBEDDER_MODELS` | `gemini/gemini-embedding-001` |
|
|
114
|
-
|
|
115
|
-
---
|
|
116
|
-
|
|
117
|
-
## Tools
|
|
118
|
-
|
|
119
|
-
| Tool | Description |
|
|
120
|
-
|------|-------------|
|
|
121
|
-
| `memory` | `action`: add, search, list, delete |
|
|
122
|
-
| `help` | Detailed documentation |
|
|
123
|
-
|
|
124
|
-
### Usage
|
|
125
|
-
|
|
126
|
-
```json
|
|
127
|
-
{"action": "add", "content": "I prefer TypeScript over JavaScript"}
|
|
128
|
-
{"action": "search", "query": "preferences"}
|
|
129
|
-
{"action": "list"}
|
|
130
|
-
{"action": "delete", "memory_id": "abc123"}
|
|
131
|
-
```
|
|
132
|
-
|
|
133
|
-
---
|
|
134
|
-
|
|
135
|
-
## Why better-mem0-mcp?
|
|
136
|
-
|
|
137
|
-
| Feature | Official mem0-mcp | better-mem0-mcp |
|
|
138
|
-
|---------|-------------------|-----------------|
|
|
139
|
-
| Storage | Mem0 Cloud | **Self-hosted PostgreSQL** |
|
|
140
|
-
| Graph Memory | No | **Yes (SQL-based)** |
|
|
141
|
-
| LLM Provider | OpenAI only | **Any (Gemini/OpenAI/Ollama/...)** |
|
|
142
|
-
| Fallback | No | **Yes (multi-key + multi-model)** |
|
|
143
|
-
| Setup | API Key | **DATABASE_URL + API_KEYS** |
|
|
144
|
-
|
|
145
|
-
---
|
|
146
|
-
|
|
147
|
-
## License
|
|
148
|
-
|
|
149
|
-
MIT
|
|
File without changes
|
|
File without changes
|
|
File without changes
|