mem0-open-mcp 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. mem0_open_mcp-0.1.3/.github/workflows/publish.yml +36 -0
  2. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/PKG-INFO +15 -3
  3. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/README.md +14 -2
  4. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/mem0-open-mcp.example.yaml +6 -0
  5. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/pyproject.toml +1 -1
  6. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/__init__.py +6 -1
  7. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/cli.py +199 -1
  8. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/config/schema.py +2 -0
  9. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/.env.example +0 -0
  10. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/.gitignore +0 -0
  11. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/Dockerfile +0 -0
  12. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/docker-compose.yaml +0 -0
  13. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/examples/ollama-config.yaml +0 -0
  14. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/api/__init__.py +0 -0
  15. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/api/routes.py +0 -0
  16. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/config/__init__.py +0 -0
  17. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/config/loader.py +0 -0
  18. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/mcp/__init__.py +0 -0
  19. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/mcp/server.py +0 -0
  20. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/server.py +0 -0
  21. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/src/mem0_server/utils/__init__.py +0 -0
  22. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/tests/test_api.py +0 -0
  23. {mem0_open_mcp-0.1.1 → mem0_open_mcp-0.1.3}/tests/test_config.py +0 -0
@@ -0,0 +1,36 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - 'v*'
7
+
8
+ jobs:
9
+ publish:
10
+ runs-on: ubuntu-latest
11
+ environment: pypi
12
+ permissions:
13
+ id-token: write
14
+ contents: write
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+
18
+ - name: Set up Python
19
+ uses: actions/setup-python@v5
20
+ with:
21
+ python-version: "3.11"
22
+
23
+ - name: Install build tools
24
+ run: pip install build
25
+
26
+ - name: Build package
27
+ run: python -m build
28
+
29
+ - name: Publish to PyPI
30
+ uses: pypa/gh-action-pypi-publish@release/v1
31
+
32
+ - name: Create GitHub Release
33
+ uses: softprops/action-gh-release@v2
34
+ with:
35
+ generate_release_notes: true
36
+ files: dist/*
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mem0-open-mcp
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Open-source MCP server for mem0 - local LLMs, self-hosted, Docker-free
5
5
  Author: Alex
6
6
  License-Expression: Apache-2.0
@@ -59,10 +59,14 @@ Created because the official `mem0-mcp` configuration wasn't working properly fo
59
59
 
60
60
  ### Installation
61
61
 
62
- Install from source:
62
+ ```bash
63
+ pip install mem0-open-mcp
64
+ ```
65
+
66
+ Or install from source:
63
67
 
64
68
  ```bash
65
- git clone https://github.com/yourname/mem0-open-mcp.git
69
+ git clone https://github.com/wonseoko/mem0-open-mcp.git
66
70
  cd mem0-open-mcp
67
71
  pip install -e .
68
72
  ```
@@ -79,10 +83,18 @@ mem0-open-mcp configure
79
83
  # Start the server
80
84
  mem0-open-mcp serve
81
85
 
86
+ # Test configuration before starting (recommended for initial setup)
87
+ mem0-open-mcp serve --test
88
+
82
89
  # With options
83
90
  mem0-open-mcp serve --port 8765 --user-id alice
84
91
  ```
85
92
 
93
+ The `--test` flag runs connectivity and memory tests before starting the server:
94
+ - Checks Vector Store, LLM, and Embedder connections
95
+ - Performs actual memory add/search operations
96
+ - Cleans up test data automatically
97
+
86
98
  ## Configuration
87
99
 
88
100
  Create `mem0-open-mcp.yaml`:
@@ -18,10 +18,14 @@ Created because the official `mem0-mcp` configuration wasn't working properly fo
18
18
 
19
19
  ### Installation
20
20
 
21
- Install from source:
21
+ ```bash
22
+ pip install mem0-open-mcp
23
+ ```
24
+
25
+ Or install from source:
22
26
 
23
27
  ```bash
24
- git clone https://github.com/yourname/mem0-open-mcp.git
28
+ git clone https://github.com/wonseoko/mem0-open-mcp.git
25
29
  cd mem0-open-mcp
26
30
  pip install -e .
27
31
  ```
@@ -38,10 +42,18 @@ mem0-open-mcp configure
38
42
  # Start the server
39
43
  mem0-open-mcp serve
40
44
 
45
+ # Test configuration before starting (recommended for initial setup)
46
+ mem0-open-mcp serve --test
47
+
41
48
  # With options
42
49
  mem0-open-mcp serve --port 8765 --user-id alice
43
50
  ```
44
51
 
52
+ The `--test` flag runs connectivity and memory tests before starting the server:
53
+ - Checks Vector Store, LLM, and Embedder connections
54
+ - Performs actual memory add/search operations
55
+ - Cleans up test data automatically
56
+
45
57
  ## Configuration
46
58
 
47
59
  Create `mem0-open-mcp.yaml`:
@@ -30,6 +30,10 @@ embedder:
30
30
  config:
31
31
  model: "text-embedding-3-small"
32
32
  api_key: "env:OPENAI_API_KEY"
33
+ # Embedding dimensions (must match vector_store.embedding_model_dims)
34
+ # OpenAI text-embedding-3-small: 1536
35
+ # Ollama nomic-embed-text: 768
36
+ embedding_dims: 1536
33
37
  # For Ollama:
34
38
  # base_url: "http://localhost:11434"
35
39
 
@@ -42,6 +46,8 @@ vector_store:
42
46
  collection_name: "mem0_memories"
43
47
  host: "localhost"
44
48
  port: 6333
49
+ # Must match embedder.embedding_dims
50
+ embedding_model_dims: 1536
45
51
  # For cloud services:
46
52
  # api_key: "env:QDRANT_API_KEY"
47
53
  # url: "https://your-cluster.qdrant.io"
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "mem0-open-mcp"
3
- version = "0.1.1"
3
+ version = "0.1.3"
4
4
  description = "Open-source MCP server for mem0 - local LLMs, self-hosted, Docker-free"
5
5
  readme = "README.md"
6
6
  license = "Apache-2.0"
@@ -5,4 +5,9 @@ This package provides a CLI tool to run mem0 as an MCP server without Docker,
5
5
  with optional web UI for configuration management.
6
6
  """
7
7
 
8
- __version__ = "0.1.0"
8
+ from importlib.metadata import version, PackageNotFoundError
9
+
10
+ try:
11
+ __version__ = version("mem0-open-mcp")
12
+ except PackageNotFoundError:
13
+ __version__ = "0.0.0-dev"
@@ -2,7 +2,9 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import json
5
6
  import logging
7
+ import time
6
8
  from pathlib import Path
7
9
  from typing import Annotated
8
10
 
@@ -29,6 +31,190 @@ app = typer.Typer(
29
31
 
30
32
  console = Console()
31
33
 
34
+ UPDATE_CHECK_CACHE = Path.home() / ".cache" / "mem0-open-mcp" / "update_check.json"
35
+ UPDATE_CHECK_INTERVAL = 86400 # 24 hours
36
+
37
+
38
+ def _check_for_updates() -> None:
39
+ """Check PyPI for newer version (once per day)."""
40
+ try:
41
+ UPDATE_CHECK_CACHE.parent.mkdir(parents=True, exist_ok=True)
42
+
43
+ now = time.time()
44
+ if UPDATE_CHECK_CACHE.exists():
45
+ cache = json.loads(UPDATE_CHECK_CACHE.read_text())
46
+ if now - cache.get("last_check", 0) < UPDATE_CHECK_INTERVAL:
47
+ if cache.get("latest") and cache["latest"] != __version__:
48
+ console.print(
49
+ f"[yellow]Update available: {__version__} → {cache['latest']}[/yellow]\n"
50
+ f"[dim] pip install --upgrade mem0-open-mcp[/dim]\n"
51
+ )
52
+ return
53
+
54
+ import httpx
55
+ resp = httpx.get("https://pypi.org/pypi/mem0-open-mcp/json", timeout=3)
56
+ resp.raise_for_status()
57
+ latest = resp.json()["info"]["version"]
58
+
59
+ UPDATE_CHECK_CACHE.write_text(json.dumps({"last_check": now, "latest": latest}))
60
+
61
+ if latest != __version__:
62
+ console.print(
63
+ f"[yellow]Update available: {__version__} → {latest}[/yellow]\n"
64
+ f"[dim] pip install --upgrade mem0-open-mcp[/dim]\n"
65
+ )
66
+ except Exception:
67
+ pass
68
+
69
+
70
+ def _run_connectivity_tests(config: Mem0ServerConfig) -> bool:
71
+ """Run connectivity tests for LLM, Embedder, and Vector Store."""
72
+ console.print("[bold]Running connectivity tests...[/bold]\n")
73
+
74
+ all_passed = True
75
+
76
+ # Test Vector Store
77
+ console.print(" [dim]Vector Store...[/dim]", end=" ")
78
+ try:
79
+ vs_config = config.vector_store
80
+ if vs_config.provider.value == "qdrant":
81
+ from qdrant_client import QdrantClient
82
+ host = vs_config.config.host or "localhost"
83
+ port = vs_config.config.port or 6333
84
+ client = QdrantClient(host=host, port=port, timeout=5)
85
+ client.get_collections()
86
+ console.print("[green]✓ Connected[/green]")
87
+ elif vs_config.provider.value == "chroma":
88
+ import chromadb
89
+ if vs_config.config.host:
90
+ client = chromadb.HttpClient(host=vs_config.config.host, port=vs_config.config.port or 8000)
91
+ else:
92
+ client = chromadb.Client()
93
+ client.heartbeat()
94
+ console.print("[green]✓ Connected[/green]")
95
+ else:
96
+ console.print(f"[yellow]⚠ Skip (no test for {vs_config.provider.value})[/yellow]")
97
+ except Exception as e:
98
+ console.print(f"[red]✗ Failed: {e}[/red]")
99
+ all_passed = False
100
+
101
+ # Test LLM
102
+ console.print(" [dim]LLM...[/dim]", end=" ")
103
+ try:
104
+ llm_config = config.llm
105
+ if llm_config.provider.value == "ollama":
106
+ import httpx
107
+ base_url = llm_config.config.base_url or "http://localhost:11434"
108
+ resp = httpx.get(f"{base_url}/api/tags", timeout=5)
109
+ resp.raise_for_status()
110
+ models = [m["name"] for m in resp.json().get("models", [])]
111
+ if llm_config.config.model in models or any(llm_config.config.model in m for m in models):
112
+ console.print(f"[green]✓ Connected ({llm_config.config.model})[/green]")
113
+ else:
114
+ console.print(f"[yellow]⚠ Connected but model '{llm_config.config.model}' not found[/yellow]")
115
+ console.print(f" [dim]Available: {', '.join(models[:5])}{'...' if len(models) > 5 else ''}[/dim]")
116
+ elif llm_config.provider.value in ("openai", "lmstudio"):
117
+ import httpx
118
+ base_url = llm_config.config.base_url or "https://api.openai.com/v1"
119
+ headers = {}
120
+ if llm_config.config.api_key:
121
+ headers["Authorization"] = f"Bearer {llm_config.config.api_key}"
122
+ resp = httpx.get(f"{base_url}/models", headers=headers, timeout=5)
123
+ resp.raise_for_status()
124
+ console.print(f"[green]✓ Connected ({llm_config.config.model})[/green]")
125
+ else:
126
+ console.print(f"[yellow]⚠ Skip (no test for {llm_config.provider.value})[/yellow]")
127
+ except Exception as e:
128
+ console.print(f"[red]✗ Failed: {e}[/red]")
129
+ all_passed = False
130
+
131
+ # Test Embedder
132
+ console.print(" [dim]Embedder...[/dim]", end=" ")
133
+ try:
134
+ emb_config = config.embedder
135
+ if emb_config.provider.value == "ollama":
136
+ import httpx
137
+ base_url = emb_config.config.base_url or "http://localhost:11434"
138
+ resp = httpx.get(f"{base_url}/api/tags", timeout=5)
139
+ resp.raise_for_status()
140
+ models = [m["name"] for m in resp.json().get("models", [])]
141
+ if emb_config.config.model in models or any(emb_config.config.model in m for m in models):
142
+ console.print(f"[green]✓ Connected ({emb_config.config.model})[/green]")
143
+ else:
144
+ console.print(f"[yellow]⚠ Connected but model '{emb_config.config.model}' not found[/yellow]")
145
+ console.print(f" [dim]Available: {', '.join(models[:5])}{'...' if len(models) > 5 else ''}[/dim]")
146
+ elif emb_config.provider.value in ("openai", "lmstudio"):
147
+ import httpx
148
+ base_url = emb_config.config.base_url or "https://api.openai.com/v1"
149
+ headers = {}
150
+ if emb_config.config.api_key:
151
+ headers["Authorization"] = f"Bearer {emb_config.config.api_key}"
152
+ resp = httpx.get(f"{base_url}/models", headers=headers, timeout=5)
153
+ resp.raise_for_status()
154
+ console.print(f"[green]✓ Connected ({emb_config.config.model})[/green]")
155
+ else:
156
+ console.print(f"[yellow]⚠ Skip (no test for {emb_config.provider.value})[/yellow]")
157
+ except Exception as e:
158
+ console.print(f"[red]✗ Failed: {e}[/red]")
159
+ all_passed = False
160
+
161
+ console.print()
162
+ if all_passed:
163
+ console.print("[bold green]All connectivity tests passed![/bold green]\n")
164
+ else:
165
+ console.print("[bold red]Some connectivity tests failed. Please check your configuration.[/bold red]\n")
166
+
167
+ return all_passed
168
+
169
+
170
+ def _run_memory_tests(config: Mem0ServerConfig) -> bool:
171
+ """Run actual mem0 memory add/search tests."""
172
+ import uuid
173
+ console.print("[bold]Running memory tests...[/bold]\n")
174
+
175
+ test_user_id = f"__test_user_{uuid.uuid4().hex[:8]}"
176
+ test_memory_text = "This is a test memory for connectivity verification."
177
+
178
+ try:
179
+ console.print(" [dim]Initializing mem0 client...[/dim]", end=" ")
180
+ from mem0 import Memory
181
+ mem0_config = config.to_mem0_config()
182
+ memory = Memory.from_config(mem0_config)
183
+ console.print("[green]✓[/green]")
184
+
185
+ console.print(" [dim]Adding test memory...[/dim]", end=" ")
186
+ add_result = memory.add(test_memory_text, user_id=test_user_id)
187
+ if add_result and add_result.get("results"):
188
+ first_result = add_result["results"][0]
189
+ memory_id = first_result.get("id") if first_result else None
190
+ if memory_id:
191
+ console.print(f"[green]✓ Added (id: {memory_id[:8]}...)[/green]")
192
+ else:
193
+ console.print("[green]✓ Added[/green]")
194
+ else:
195
+ console.print("[green]✓ Added[/green]")
196
+
197
+ console.print(" [dim]Searching memories...[/dim]", end=" ")
198
+ search_result = memory.search("test memory verification", user_id=test_user_id, limit=5)
199
+ if search_result and search_result.get("results"):
200
+ console.print(f"[green]✓ Found {len(search_result['results'])} result(s)[/green]")
201
+ else:
202
+ console.print("[yellow]⚠ No results (may be expected for new setup)[/yellow]")
203
+
204
+ console.print(" [dim]Cleaning up test data...[/dim]", end=" ")
205
+ memory.delete_all(user_id=test_user_id)
206
+ console.print("[green]✓ Cleaned[/green]")
207
+
208
+ console.print()
209
+ console.print("[bold green]All memory tests passed![/bold green]\n")
210
+ return True
211
+
212
+ except Exception as e:
213
+ console.print(f"[red]✗ Failed: {e}[/red]")
214
+ console.print()
215
+ console.print("[bold red]Memory test failed. Check your LLM/Embedder/VectorStore configuration.[/bold red]\n")
216
+ return False
217
+
32
218
 
33
219
  def version_callback(value: bool) -> None:
34
220
  """Show version and exit."""
@@ -83,6 +269,10 @@ def serve(
83
269
  str,
84
270
  typer.Option("--log-level", "-l", help="Logging level."),
85
271
  ] = "info",
272
+ test: Annotated[
273
+ bool,
274
+ typer.Option("--test", "-t", help="Run connectivity tests before starting server."),
275
+ ] = False,
86
276
  ) -> None:
87
277
  """Start the MCP server.
88
278
 
@@ -120,7 +310,9 @@ def serve(
120
310
  border_style="green",
121
311
  ))
122
312
 
123
- console.print("\n[bold]Configuration:[/bold]")
313
+ _check_for_updates()
314
+
315
+ console.print("[bold]Configuration:[/bold]")
124
316
  console.print(f" Host: [cyan]{config.server.host}[/cyan]")
125
317
  console.print(f" Port: [cyan]{config.server.port}[/cyan]")
126
318
  console.print(f" User ID: [cyan]{config.server.user_id}[/cyan]")
@@ -129,6 +321,12 @@ def serve(
129
321
  console.print(f" Vector Store: [cyan]{config.vector_store.provider.value}[/cyan]")
130
322
  console.print()
131
323
 
324
+ if test:
325
+ if not _run_connectivity_tests(config):
326
+ raise typer.Exit(1)
327
+ if not _run_memory_tests(config):
328
+ raise typer.Exit(1)
329
+
132
330
  # Start the server
133
331
  try:
134
332
  from mem0_server.server import run_server
@@ -323,6 +323,7 @@ def get_default_config() -> Mem0ServerConfig:
323
323
  config=EmbedderConfig(
324
324
  model="text-embedding-3-small",
325
325
  api_key="env:OPENAI_API_KEY",
326
+ embedding_dims=1536,
326
327
  ),
327
328
  ),
328
329
  vector_store=VectorStoreProvider(
@@ -331,6 +332,7 @@ def get_default_config() -> Mem0ServerConfig:
331
332
  collection_name="mem0_memories",
332
333
  host="localhost",
333
334
  port=6333,
335
+ embedding_model_dims=1536,
334
336
  ),
335
337
  ),
336
338
  )
File without changes
File without changes