better-mem0-mcp 1.1.0b21__tar.gz → 1.1.0b22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.github/workflows/cd.yml +41 -4
  2. better_mem0_mcp-1.1.0b22/.vscode/better-mem0-mcp.code-workspace +23 -0
  3. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/CHANGELOG.md +7 -0
  4. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/PKG-INFO +10 -11
  5. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/README.md +8 -9
  6. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/pyproject.toml +2 -2
  7. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/config.py +14 -21
  8. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/uv.lock +4 -505
  9. better_mem0_mcp-1.1.0b21/.vscode/better-mem0-mcp.code-workspace +0 -8
  10. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.dockerignore +0 -0
  11. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.editorconfig +0 -0
  12. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.github/scripts/check-ci-cd-status.sh +0 -0
  13. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.github/scripts/merge-with-auto-resolve.sh +0 -0
  14. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.github/workflows/ci.yml +0 -0
  15. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.gitignore +0 -0
  16. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.mise.toml +0 -0
  17. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.pre-commit-config.yaml +0 -0
  18. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.python-version +0 -0
  19. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/.releaserc.json +0 -0
  20. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/CODE_OF_CONDUCT.md +0 -0
  21. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/CONTRIBUTING.md +0 -0
  22. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/Dockerfile +0 -0
  23. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/LICENSE +0 -0
  24. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/SECURITY.md +0 -0
  25. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/package-lock.json +0 -0
  26. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/package.json +0 -0
  27. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/scripts/clean-venv.mjs +0 -0
  28. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/__init__.py +0 -0
  29. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/__main__.py +0 -0
  30. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/docs/memory.md +0 -0
  31. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/graph.py +0 -0
  32. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/py.typed +0 -0
  33. {better_mem0_mcp-1.1.0b21 → better_mem0_mcp-1.1.0b22}/src/better_mem0_mcp/server.py +0 -0
@@ -46,12 +46,49 @@ jobs:
46
46
  chmod +x .github/scripts/check-ci-cd-status.sh
47
47
  ./.github/scripts/check-ci-cd-status.sh --branch=dev
48
48
 
49
- - name: Merge dev to main
49
+ - name: Sync main into dev (resolve version conflicts)
50
+ run: |
51
+ git config user.name "github-actions[bot]"
52
+ git config user.email "github-actions[bot]@users.noreply.github.com"
53
+ git checkout dev
54
+ git fetch origin main
55
+ git merge origin/main --no-edit -X ours || true
56
+ git push origin dev
57
+
58
+ - name: Create PR to promote dev to main
50
59
  env:
51
- AUTO_RESOLVE_FILES: "CHANGELOG.md,pyproject.toml"
60
+ GH_TOKEN: ${{ secrets.GH_PAT }}
52
61
  run: |
53
- chmod +x .github/scripts/merge-with-auto-resolve.sh
54
- ./.github/scripts/merge-with-auto-resolve.sh --source=dev --target=main --files="$AUTO_RESOLVE_FILES"
62
+ # Check if PR already exists
63
+ EXISTING_PR=$(gh pr list --base main --head dev --json number --jq '.[0].number')
64
+ if [ -n "$EXISTING_PR" ]; then
65
+ echo "PR #$EXISTING_PR already exists for dev -> main"
66
+ echo "URL: https://github.com/${{ github.repository }}/pull/$EXISTING_PR"
67
+ exit 0
68
+ fi
69
+
70
+ # Get latest release version from dev
71
+ LATEST_TAG=$(git describe --tags --abbrev=0 origin/dev 2>/dev/null || echo "")
72
+ if [ -z "$LATEST_TAG" ]; then
73
+ PR_TITLE="chore: promote dev to main"
74
+ else
75
+ PR_TITLE="chore: promote dev to main ($LATEST_TAG)"
76
+ fi
77
+
78
+ # Create PR
79
+ gh pr create \
80
+ --base main \
81
+ --head dev \
82
+ --title "$PR_TITLE" \
83
+ --body "## Promote dev to main
84
+
85
+ This PR promotes the latest changes from \`dev\` branch to \`main\`.
86
+
87
+ ### Pre-checks passed:
88
+ - ✅ CI workflow passed on dev
89
+ - ✅ CD workflow passed on dev
90
+
91
+ ### Latest beta version: $LATEST_TAG"
55
92
 
56
93
  release:
57
94
  name: Semantic Release
@@ -0,0 +1,23 @@
1
+ {
2
+ "folders": [
3
+ {
4
+ "path": ".."
5
+ },
6
+ {
7
+ "path": "../../EchoVault"
8
+ },
9
+ {
10
+ "path": "../../wet-mcp"
11
+ },
12
+ {
13
+ "path": "../../better-notion-mcp"
14
+ },
15
+ {
16
+ "path": "../../KnowledgePrism"
17
+ },
18
+ {
19
+ "path": "../../QuikShipping"
20
+ }
21
+ ],
22
+ "settings": {}
23
+ }
@@ -1,3 +1,10 @@
1
+ # [1.1.0-beta.22](https://github.com/n24q02m/better-mem0-mcp/compare/v1.1.0-beta.21...v1.1.0-beta.22) (2026-02-05)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * Pin Python version to 3.13 and remove Python 3.14 compatibility from the dependency lock file. ([ec134ef](https://github.com/n24q02m/better-mem0-mcp/commit/ec134ef10ab6de3eaf800d51a441fe2b44f2d4dc))
7
+
1
8
  # [1.1.0-beta.21](https://github.com/n24q02m/better-mem0-mcp/compare/v1.1.0-beta.20...v1.1.0-beta.21) (2026-01-12)
2
9
 
3
10
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: better-mem0-mcp
3
- Version: 1.1.0b21
3
+ Version: 1.1.0b22
4
4
  Summary: Zero-setup MCP Server for AI memory - works with Neon/Supabase
5
5
  Project-URL: Homepage, https://github.com/n24q02m/better-mem0-mcp
6
6
  Project-URL: Repository, https://github.com/n24q02m/better-mem0-mcp.git
@@ -17,7 +17,7 @@ Classifier: Operating System :: OS Independent
17
17
  Classifier: Programming Language :: Python :: 3
18
18
  Classifier: Programming Language :: Python :: 3.13
19
19
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
- Requires-Python: >=3.13
20
+ Requires-Python: ==3.13.*
21
21
  Requires-Dist: google-genai>=1.0.0
22
22
  Requires-Dist: litellm>=1.0.0
23
23
  Requires-Dist: loguru>=0.7.0
@@ -64,7 +64,7 @@ Description-Content-Type: text/markdown
64
64
  "args": ["better-mem0-mcp@latest"],
65
65
  "env": {
66
66
  "DATABASE_URL": "postgresql://user:pass@xxx.neon.tech/neondb?sslmode=require",
67
- "API_KEYS": "gemini:AIza..."
67
+ "API_KEYS": "GOOGLE_API_KEY:AIza..."
68
68
  }
69
69
  }
70
70
  }
@@ -81,7 +81,7 @@ Description-Content-Type: text/markdown
81
81
  "args": ["run", "-i", "--rm", "-e", "DATABASE_URL", "-e", "API_KEYS", "n24q02m/better-mem0-mcp:latest"],
82
82
  "env": {
83
83
  "DATABASE_URL": "postgresql://...",
84
- "API_KEYS": "gemini:AIza..."
84
+ "API_KEYS": "GOOGLE_API_KEY:AIza..."
85
85
  }
86
86
  }
87
87
  }
@@ -99,24 +99,23 @@ Ask your AI: "Remember that I prefer dark mode and use FastAPI"
99
99
  | Variable | Required | Description |
100
100
  |----------|----------|-------------|
101
101
  | `DATABASE_URL` | Yes | PostgreSQL with pgvector extension |
102
- | `API_KEYS` | Yes | `provider:key` pairs, comma-separated |
102
+ | `API_KEYS` | Yes | `ENV_VAR:key` pairs, comma-separated |
103
103
  | `LLM_MODELS` | No | Model fallback chain |
104
104
  | `EMBEDDER_MODELS` | No | Embedding model chain |
105
105
 
106
- ### Supported Providers
106
+ ### Supported LiteLLM Providers
107
107
 
108
- `gemini`, `openai`, `anthropic`, `groq`, `deepseek`, `mistral`
109
-
110
- ### Examples
108
+ Use environment variable names from [LiteLLM docs](https://docs.litellm.ai/):
109
+ `GOOGLE_API_KEY`, `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, `GROQ_API_KEY`, etc.
111
110
 
112
111
  **Single provider:**
113
112
  ```bash
114
- API_KEYS=gemini:AIza...
113
+ API_KEYS=GOOGLE_API_KEY:AIza...
115
114
  ```
116
115
 
117
116
  **Multi-key with fallback:**
118
117
  ```bash
119
- API_KEYS=gemini:AIza-1,gemini:AIza-2,openai:sk-xxx
118
+ API_KEYS=GOOGLE_API_KEY:AIza-1,GOOGLE_API_KEY:AIza-2,OPENAI_API_KEY:sk-xxx
120
119
  LLM_MODELS=gemini/gemini-3-flash-preview,openai/gpt-4o-mini
121
120
  EMBEDDER_MODELS=gemini/gemini-embedding-001,openai/text-embedding-3-small
122
121
  ```
@@ -35,7 +35,7 @@
35
35
  "args": ["better-mem0-mcp@latest"],
36
36
  "env": {
37
37
  "DATABASE_URL": "postgresql://user:pass@xxx.neon.tech/neondb?sslmode=require",
38
- "API_KEYS": "gemini:AIza..."
38
+ "API_KEYS": "GOOGLE_API_KEY:AIza..."
39
39
  }
40
40
  }
41
41
  }
@@ -52,7 +52,7 @@
52
52
  "args": ["run", "-i", "--rm", "-e", "DATABASE_URL", "-e", "API_KEYS", "n24q02m/better-mem0-mcp:latest"],
53
53
  "env": {
54
54
  "DATABASE_URL": "postgresql://...",
55
- "API_KEYS": "gemini:AIza..."
55
+ "API_KEYS": "GOOGLE_API_KEY:AIza..."
56
56
  }
57
57
  }
58
58
  }
@@ -70,24 +70,23 @@ Ask your AI: "Remember that I prefer dark mode and use FastAPI"
70
70
  | Variable | Required | Description |
71
71
  |----------|----------|-------------|
72
72
  | `DATABASE_URL` | Yes | PostgreSQL with pgvector extension |
73
- | `API_KEYS` | Yes | `provider:key` pairs, comma-separated |
73
+ | `API_KEYS` | Yes | `ENV_VAR:key` pairs, comma-separated |
74
74
  | `LLM_MODELS` | No | Model fallback chain |
75
75
  | `EMBEDDER_MODELS` | No | Embedding model chain |
76
76
 
77
- ### Supported Providers
77
+ ### Supported LiteLLM Providers
78
78
 
79
- `gemini`, `openai`, `anthropic`, `groq`, `deepseek`, `mistral`
80
-
81
- ### Examples
79
+ Use environment variable names from [LiteLLM docs](https://docs.litellm.ai/):
80
+ `GOOGLE_API_KEY`, `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, `GROQ_API_KEY`, etc.
82
81
 
83
82
  **Single provider:**
84
83
  ```bash
85
- API_KEYS=gemini:AIza...
84
+ API_KEYS=GOOGLE_API_KEY:AIza...
86
85
  ```
87
86
 
88
87
  **Multi-key with fallback:**
89
88
  ```bash
90
- API_KEYS=gemini:AIza-1,gemini:AIza-2,openai:sk-xxx
89
+ API_KEYS=GOOGLE_API_KEY:AIza-1,GOOGLE_API_KEY:AIza-2,OPENAI_API_KEY:sk-xxx
91
90
  LLM_MODELS=gemini/gemini-3-flash-preview,openai/gpt-4o-mini
92
91
  EMBEDDER_MODELS=gemini/gemini-embedding-001,openai/text-embedding-3-small
93
92
  ```
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "better-mem0-mcp"
3
- version = "1.1.0-beta.21"
3
+ version = "1.1.0-beta.22"
4
4
  description = "Zero-setup MCP Server for AI memory - works with Neon/Supabase"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -16,7 +16,7 @@ classifiers = [
16
16
  "Programming Language :: Python :: 3.13",
17
17
  "Topic :: Scientific/Engineering :: Artificial Intelligence",
18
18
  ]
19
- requires-python = ">=3.13"
19
+ requires-python = "==3.13.*"
20
20
  dependencies = [
21
21
  # MCP Server
22
22
  "mcp[cli]>=1.0.0",
@@ -33,43 +33,36 @@ class Settings(BaseSettings):
33
33
  embedder_models: str = "gemini/gemini-embedding-001"
34
34
 
35
35
  def setup_api_keys(self) -> dict[str, list[str]]:
36
- """
37
- Parse API_KEYS and set environment variables for LiteLLM.
36
+ """Parse API_KEYS (format: ENV_VAR:key,...) and set env vars.
37
+
38
+ Example:
39
+ API_KEYS="GOOGLE_API_KEY:abc,GOOGLE_API_KEY:def,OPENAI_API_KEY:xyz"
38
40
 
39
41
  Returns:
40
- Dict mapping provider to list of API keys.
42
+ Dict mapping env var name to list of API keys.
41
43
  """
42
- env_map = {
43
- "gemini": "GOOGLE_API_KEY",
44
- "openai": "OPENAI_API_KEY",
45
- "anthropic": "ANTHROPIC_API_KEY",
46
- "groq": "GROQ_API_KEY",
47
- "deepseek": "DEEPSEEK_API_KEY",
48
- "mistral": "MISTRAL_API_KEY",
49
- }
50
-
51
- keys_by_provider: dict[str, list[str]] = {}
44
+ keys_by_env: dict[str, list[str]] = {}
52
45
 
53
46
  for pair in self.api_keys.split(","):
54
47
  pair = pair.strip()
55
48
  if ":" not in pair:
56
49
  continue
57
50
 
58
- provider, key = pair.split(":", 1)
59
- provider = provider.strip().lower()
51
+ env_var, key = pair.split(":", 1)
52
+ env_var = env_var.strip()
60
53
  key = key.strip()
61
54
 
62
55
  if not key:
63
56
  continue
64
57
 
65
- keys_by_provider.setdefault(provider, []).append(key)
58
+ keys_by_env.setdefault(env_var, []).append(key)
66
59
 
67
- # Set first key of each provider as env var (LiteLLM reads from env)
68
- for provider, keys in keys_by_provider.items():
69
- if provider in env_map and keys:
70
- os.environ[env_map[provider]] = keys[0]
60
+ # Set first key of each env var (LiteLLM reads from env)
61
+ for env_var, keys in keys_by_env.items():
62
+ if keys:
63
+ os.environ[env_var] = keys[0]
71
64
 
72
- return keys_by_provider
65
+ return keys_by_env
73
66
 
74
67
  def parse_database_url(self) -> dict:
75
68
  """Parse DATABASE_URL into connection parameters."""