memable 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. memable-0.1.1/.github/workflows/ci.yml +72 -0
  2. memable-0.1.1/.github/workflows/pages.yml +61 -0
  3. memable-0.1.1/.github/workflows/release.yml +38 -0
  4. memable-0.1.1/.gitignore +55 -0
  5. memable-0.1.1/CHANGELOG.md +20 -0
  6. memable-0.1.1/CLAUDE.md +85 -0
  7. memable-0.1.1/LICENSE +21 -0
  8. memable-0.1.1/PKG-INFO +286 -0
  9. memable-0.1.1/README.md +247 -0
  10. memable-0.1.1/ROADMAP.md +44 -0
  11. memable-0.1.1/examples/simple_agent/README.md +62 -0
  12. memable-0.1.1/examples/simple_agent/main.py +145 -0
  13. memable-0.1.1/pyproject.toml +113 -0
  14. memable-0.1.1/scripts/publish.sh +46 -0
  15. memable-0.1.1/src/memable/__init__.py +44 -0
  16. memable-0.1.1/src/memable/backends/__init__.py +14 -0
  17. memable-0.1.1/src/memable/backends/base.py +121 -0
  18. memable-0.1.1/src/memable/backends/duckdb.py +291 -0
  19. memable-0.1.1/src/memable/backends/factory.py +203 -0
  20. memable-0.1.1/src/memable/backends/postgres.py +238 -0
  21. memable-0.1.1/src/memable/backends/sqlite.py +367 -0
  22. memable-0.1.1/src/memable/consolidation.py +333 -0
  23. memable-0.1.1/src/memable/contradiction.py +289 -0
  24. memable-0.1.1/src/memable/extraction.py +316 -0
  25. memable-0.1.1/src/memable/graph.py +183 -0
  26. memable-0.1.1/src/memable/nodes.py +262 -0
  27. memable-0.1.1/src/memable/retrieval.py +239 -0
  28. memable-0.1.1/src/memable/schema.py +262 -0
  29. memable-0.1.1/src/memable/store.py +660 -0
  30. memable-0.1.1/tests/__init__.py +1 -0
  31. memable-0.1.1/tests/conftest.py +154 -0
  32. memable-0.1.1/tests/integration/__init__.py +1 -0
  33. memable-0.1.1/tests/integration/test_contradiction.py +100 -0
  34. memable-0.1.1/tests/integration/test_extraction.py +87 -0
  35. memable-0.1.1/tests/integration/test_store.py +225 -0
  36. memable-0.1.1/tests/performance/README.md +77 -0
  37. memable-0.1.1/tests/performance/__init__.py +1 -0
  38. memable-0.1.1/tests/performance/test_embedding_costs.py +222 -0
  39. memable-0.1.1/tests/performance/test_storage_growth.py +227 -0
  40. memable-0.1.1/tests/performance/test_throughput.py +195 -0
  41. memable-0.1.1/tests/unit/__init__.py +1 -0
  42. memable-0.1.1/tests/unit/test_consolidation.py +53 -0
  43. memable-0.1.1/tests/unit/test_duckdb_backend.py +132 -0
  44. memable-0.1.1/tests/unit/test_memory_type_filter.py +181 -0
  45. memable-0.1.1/tests/unit/test_retrieval.py +135 -0
  46. memable-0.1.1/tests/unit/test_schema.py +336 -0
  47. memable-0.1.1/tests/unit/test_schema_isolation.py +87 -0
  48. memable-0.1.1/tests/unit/test_sqlite_backend.py +117 -0
@@ -0,0 +1,72 @@
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+
13
+ services:
14
+ postgres:
15
+ image: pgvector/pgvector:pg16
16
+ env:
17
+ POSTGRES_USER: test
18
+ POSTGRES_PASSWORD: test
19
+ POSTGRES_DB: test
20
+ ports:
21
+ - 5432:5432
22
+ options: >-
23
+ --health-cmd pg_isready
24
+ --health-interval 10s
25
+ --health-timeout 5s
26
+ --health-retries 5
27
+
28
+ steps:
29
+ - uses: actions/checkout@v4
30
+
31
+ - name: Set up Python
32
+ uses: actions/setup-python@v5
33
+ with:
34
+ python-version: "3.11"
35
+
36
+ - name: Install dependencies
37
+ run: |
38
+ python -m pip install --upgrade pip
39
+ pip install -e ".[dev]"
40
+
41
+ - name: Run unit tests
42
+ run: pytest tests/unit -v
43
+
44
+ - name: Run integration tests
45
+ env:
46
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
47
+ DATABASE_URL: postgresql://test:test@localhost:5432/test
48
+ run: pytest tests/integration -v
49
+ # Skip if no API key (fork PRs won't have secrets)
50
+ if: env.OPENAI_API_KEY != ''
51
+
52
+ lint:
53
+ runs-on: ubuntu-latest
54
+ steps:
55
+ - uses: actions/checkout@v4
56
+
57
+ - name: Set up Python
58
+ uses: actions/setup-python@v5
59
+ with:
60
+ python-version: "3.11"
61
+
62
+ - name: Install dependencies
63
+ run: |
64
+ python -m pip install --upgrade pip
65
+ pip install ruff mypy
66
+
67
+ - name: Run ruff
68
+ run: ruff check src/
69
+
70
+ - name: Run mypy
71
+ run: mypy src/ --ignore-missing-imports
72
+ continue-on-error: true # Don't fail on type errors yet
@@ -0,0 +1,61 @@
1
+ name: Deploy Landing Page
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ paths:
7
+ - 'landing/**'
8
+ workflow_dispatch:
9
+
10
+ permissions:
11
+ contents: read
12
+ pages: write
13
+ id-token: write
14
+
15
+ concurrency:
16
+ group: "pages"
17
+ cancel-in-progress: false
18
+
19
+ jobs:
20
+ build:
21
+ runs-on: ubuntu-latest
22
+ steps:
23
+ - uses: actions/checkout@v4
24
+
25
+ - name: Setup pnpm
26
+ uses: pnpm/action-setup@v4
27
+ with:
28
+ version: 10
29
+
30
+ - name: Setup Node
31
+ uses: actions/setup-node@v4
32
+ with:
33
+ node-version: 22
34
+ cache: 'pnpm'
35
+ cache-dependency-path: landing/pnpm-lock.yaml
36
+
37
+ - name: Install dependencies
38
+ working-directory: landing
39
+ run: pnpm install --frozen-lockfile
40
+
41
+ - name: Build
42
+ working-directory: landing
43
+ run: pnpm build
44
+ env:
45
+ NEXT_PUBLIC_BASE_PATH: /memable
46
+
47
+ - name: Upload artifact
48
+ uses: actions/upload-pages-artifact@v3
49
+ with:
50
+ path: landing/out
51
+
52
+ deploy:
53
+ environment:
54
+ name: github-pages
55
+ url: ${{ steps.deployment.outputs.page_url }}
56
+ runs-on: ubuntu-latest
57
+ needs: build
58
+ steps:
59
+ - name: Deploy to GitHub Pages
60
+ id: deployment
61
+ uses: actions/deploy-pages@v4
@@ -0,0 +1,38 @@
1
+ name: Release
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+
7
+ jobs:
8
+ release:
9
+ runs-on: ubuntu-latest
10
+ concurrency: release
11
+ permissions:
12
+ id-token: write
13
+ contents: write
14
+
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+ with:
18
+ fetch-depth: 0
19
+
20
+ - name: Set up Python
21
+ uses: actions/setup-python@v5
22
+ with:
23
+ python-version: "3.11"
24
+
25
+ - name: Install python-semantic-release
26
+ run: pip install python-semantic-release
27
+
28
+ - name: Python Semantic Release
29
+ id: release
30
+ uses: python-semantic-release/python-semantic-release@master
31
+ with:
32
+ github_token: ${{ secrets.GITHUB_TOKEN }}
33
+
34
+ - name: Publish to GitHub Releases
35
+ uses: python-semantic-release/upload-to-gh-release@main
36
+ if: steps.release.outputs.released == 'true'
37
+ with:
38
+ github_token: ${{ secrets.GITHUB_TOKEN }}
@@ -0,0 +1,55 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.so
6
+ .Python
7
+ build/
8
+ develop-eggs/
9
+ dist/
10
+ downloads/
11
+ eggs/
12
+ .eggs/
13
+ lib/
14
+ lib64/
15
+ parts/
16
+ sdist/
17
+ var/
18
+ wheels/
19
+ *.egg-info/
20
+ .installed.cfg
21
+ *.egg
22
+
23
+ # Virtual environments
24
+ .venv/
25
+ venv/
26
+ ENV/
27
+ env/
28
+
29
+ # IDE
30
+ .idea/
31
+ .vscode/
32
+ *.swp
33
+ *.swo
34
+ *~
35
+
36
+ # Testing
37
+ .pytest_cache/
38
+ .coverage
39
+ htmlcov/
40
+ .tox/
41
+ .nox/
42
+
43
+ # Type checking
44
+ .mypy_cache/
45
+
46
+ # Environments
47
+ .env
48
+ .env.local
49
+ *.env
50
+
51
+ # OS
52
+ .DS_Store
53
+ Thumbs.db
54
+
55
+ .claude/
@@ -0,0 +1,20 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [0.1.0] - 2026-02-06
9
+
10
+ ### Added
11
+ - Memory schema with durability tiers (core/situational/episodic)
12
+ - Version chains for contradiction handling with full audit trail
13
+ - Temporal validity (valid_from/valid_until) for time-bounded memories
14
+ - PostgresStore wrapper with semantic search via pgvector
15
+ - LLM-based memory extraction with automatic durability classification
16
+ - Contradiction detection and resolution
17
+ - Memory consolidation strategies (prune, decay, summarize, dedupe)
18
+ - LangGraph nodes (retrieve_memories, store_memories, consolidate_memories)
19
+ - Pre-built memory graph for quick integration
20
+ - Example app with Neon integration
@@ -0,0 +1,85 @@
1
+ # CLAUDE.md
2
+
3
+ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
+
5
+ ## Build & Development Commands
6
+
7
+ ```bash
8
+ # Install for development
9
+ pip install -e ".[dev]"
10
+
11
+ # Run all tests
12
+ pytest
13
+
14
+ # Run unit tests only (no external deps)
15
+ pytest tests/unit -m unit
16
+
17
+ # Run integration tests (requires Docker with pgvector or DATABASE_URL)
18
+ pytest tests/integration -m integration
19
+
20
+ # Run a single test file
21
+ pytest tests/unit/test_schema.py -v
22
+
23
+ # Run performance tests (requires OPENAI_API_KEY)
24
+ pytest tests/performance -v -s
25
+
26
+ # Lint and format
27
+ ruff check src tests
28
+ ruff format src tests
29
+
30
+ # Type check
31
+ mypy src
32
+ ```
33
+
34
+ ## Environment Variables
35
+
36
+ - `OPENAI_API_KEY` - Required for embeddings
37
+ - `DATABASE_URL` - PostgreSQL connection string (for integration tests and production)
38
+ - `MEMORY_DB_PATH` - SQLite path when using SQLite backend
39
+
40
+ ## Architecture Overview
41
+
42
+ memento-ai is a semantic memory library for LangGraph agents with three storage backends.
43
+
44
+ ### Core Components
45
+
46
+ **Schema (`src/memento_ai/schema.py`)**
47
+ - `Memory` - Main memory object with durability tiers (core/situational/episodic), temporal validity, version chains, and MemoryType categorization
48
+ - `MemoryCreate`/`MemoryUpdate`/`MemoryQuery` - Input/query types
49
+ - Version chains track supersedes/superseded_by for contradiction handling
50
+
51
+ **Store (`src/memento_ai/store.py`)**
52
+ - `SemanticMemoryStore` - High-level API wrapping backend stores
53
+ - Factory functions: `build_store()` (auto-detect), `build_postgres_store()`, `build_sqlite_store()`, `build_duckdb_store()`
54
+ - All stores use context managers for connection lifecycle
55
+
56
+ **Backends (`src/memento_ai/backends/`)**
57
+ - `BaseStore` - Abstract protocol defining put/get/delete/search operations
58
+ - `PostgresStore` - Production backend using pgvector
59
+ - `SQLiteStore` - Dev/testing backend using sqlite-vec
60
+ - `DuckDBStore` - Analytics backend with native vector similarity
61
+ - Factory in `factory.py` auto-selects backend from URL scheme
62
+
63
+ **LangGraph Integration**
64
+ - `nodes.py` - Pre-built nodes: `retrieve_memories_node`, `store_memories_node`, `consolidate_memories_node`
65
+ - `graph.py` - `build_memory_graph()` creates a ready-to-use graph with retrieve -> LLM -> store flow
66
+
67
+ **Supporting Modules**
68
+ - `extraction.py` - LLM-based memory extraction from conversations
69
+ - `contradiction.py` - Detect and resolve conflicting memories via version chains
70
+ - `consolidation.py` - Decay, summarize, and prune old memories
71
+ - `retrieval.py` - Multi-scope retrieval with priority merging
72
+
73
+ ### Key Patterns
74
+
75
+ - Namespaces are tuples: `("org_id", "user_id", "scope")` for hierarchical scoping
76
+ - Version chains preserve history: old memories get `superseded_by`, new ones get `supersedes`
77
+ - Temporal validity via `valid_from`/`valid_until` with automatic filtering
78
+ - All backends implement the same `BaseStore` interface for swappable storage
79
+
80
+ ### Testing
81
+
82
+ Integration tests use testcontainers with `pgvector/pgvector:pg16` image. Pull first:
83
+ ```bash
84
+ docker pull pgvector/pgvector:pg16
85
+ ```
memable-0.1.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Joel Ash
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
memable-0.1.1/PKG-INFO ADDED
@@ -0,0 +1,286 @@
1
+ Metadata-Version: 2.4
2
+ Name: memable
3
+ Version: 0.1.1
4
+ Summary: Reusable semantic memory library for LangGraph agents with PostgreSQL/pgvector and SQLite backends
5
+ Project-URL: Repository, https://github.com/joelash/memento-ai
6
+ Author: Joel Ash
7
+ License-Expression: MIT
8
+ License-File: LICENSE
9
+ Keywords: agents,ai,langgraph,memory,pgvector,postgres,semantic,sqlite
10
+ Classifier: Development Status :: 3 - Alpha
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
17
+ Requires-Python: >=3.11
18
+ Requires-Dist: langchain-core>=0.3.0
19
+ Requires-Dist: langchain-openai>=0.3.0
20
+ Requires-Dist: langgraph-checkpoint-postgres>=2.0.0
21
+ Requires-Dist: langgraph>=0.2.0
22
+ Requires-Dist: psycopg[binary]>=3.1.0
23
+ Requires-Dist: pydantic>=2.0.0
24
+ Provides-Extra: all
25
+ Requires-Dist: memento-ai[dev,duckdb,sqlite]; extra == 'all'
26
+ Provides-Extra: dev
27
+ Requires-Dist: duckdb>=1.0.0; extra == 'dev'
28
+ Requires-Dist: mypy>=1.10.0; extra == 'dev'
29
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
30
+ Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
31
+ Requires-Dist: pytest>=8.0.0; extra == 'dev'
32
+ Requires-Dist: ruff>=0.4.0; extra == 'dev'
33
+ Requires-Dist: testcontainers[postgres]>=4.0.0; extra == 'dev'
34
+ Provides-Extra: duckdb
35
+ Requires-Dist: duckdb>=1.0.0; extra == 'duckdb'
36
+ Provides-Extra: sqlite
37
+ Requires-Dist: sqlite-vec>=0.1.0; extra == 'sqlite'
38
+ Description-Content-Type: text/markdown
39
+
40
+ <p align="center">
41
+ <img src="assets/memable-hero.png" alt="memable - AI memory that never forgets" width="600">
42
+ </p>
43
+
44
+ <h1 align="center">memable 🐘</h1>
45
+
46
+ <p align="center">
47
+ <em>Long-term semantic memory for AI agents. Elephants never forget.</em>
48
+ </p>
49
+
50
+ ---
51
+
52
+ Drop-in long-term memory with:
53
+
54
+ - **Durability tiers** — core facts vs situational context vs episodic memories
55
+ - **Temporal awareness** — validity windows, expiry, recency weighting
56
+ - **Version chains** — audit trail for memory updates with contradiction handling
57
+ - **Scoped namespaces** — org/user/project hierarchies with priority merging
58
+ - **Memory consolidation** — decay, summarize, and prune old memories
59
+ - **LangGraph integration** — ready-to-use nodes for retrieve/store/consolidate
60
+
61
+ ## Need Help?
62
+
63
+ **I'll add production-grade memory to your AI agent in 1-2 weeks.**
64
+
65
+ - 📞 **Consult** ($500) — 2-hour architecture deep-dive
66
+ - 🛠️ **Implementation** ($3-5k) — Full memory system, integrated + tested
67
+
68
+ [Book a Call →](https://calendar.superhuman.com/book/11SzDnK01g1VgPEI2w/FtV5Q)
69
+
70
+ ---
71
+
72
+ ## Installation
73
+
74
+ ```bash
75
+ pip install memable
76
+ ```
77
+
78
+ Or for development:
79
+
80
+ ```bash
81
+ git clone https://github.com/joelash/memable
82
+ cd memable
83
+ pip install -e ".[dev]"
84
+ ```
85
+
86
+ ## Quick Start
87
+
88
+ ```python
89
+ from memable import build_postgres_store
90
+ from memable.graph import build_memory_graph
91
+
92
+ # Connect to your Neon/Postgres DB (context manager handles connection lifecycle)
93
+ with build_postgres_store("postgresql://user:pass@host:5432/dbname") as store:
94
+ store.setup() # Run migrations (once)
95
+
96
+ # Build a graph with memory baked in
97
+ graph = build_memory_graph()
98
+ compiled = graph.compile(store=store.raw_store)
99
+
100
+ # Run it
101
+ config = {"configurable": {"user_id": "user_123"}}
102
+ result = compiled.invoke(
103
+ {"messages": [{"role": "user", "content": "I'm Joel, I live in Wheaton."}]},
104
+ config=config,
105
+ )
106
+ ```
107
+
108
+ ## Memory Schema
109
+
110
+ Each memory item includes:
111
+
112
+ ```python
113
+ {
114
+ "text": "User lives in Wheaton, IL",
115
+ "durability": "core", # core | situational | episodic
116
+ "valid_from": "2026-02-06", # when this became true
117
+ "valid_until": None, # null = permanent
118
+ "confidence": 0.95,
119
+ "source": "explicit", # explicit | inferred
120
+ "supersedes": None, # UUID of memory this replaces (version chain)
121
+ "superseded_by": None, # UUID of memory that replaced this
122
+ }
123
+ ```
124
+
125
+ ## Durability Tiers
126
+
127
+ | Tier | Description | Example | Default TTL |
128
+ |------|-------------|---------|-------------|
129
+ | `core` | Stable facts about the user | "Name is Joel", "Prefers dark mode" | Never expires |
130
+ | `situational` | Temporary context | "Visiting Ohio this week" | Explicit end date |
131
+ | `episodic` | Things that happened | "We discussed the API design" | 30 days, decays |
132
+
133
+ ## Features
134
+
135
+ ### Version Chains (Contradiction Handling)
136
+
137
+ When a memory contradicts an existing one, we don't delete — we create a version chain:
138
+
139
+ ```python
140
+ # Original: "User lives in Wheaton"
141
+ # New info: "User moved to Austin"
142
+
143
+ # Result:
144
+ # - Old memory gets superseded_by = new_memory_id
145
+ # - New memory gets supersedes = old_memory_id
146
+ # - Retrieval only returns current (non-superseded) memories
147
+ # - Audit trail preserved for debugging
148
+ ```
149
+
150
+ ### Scoped Namespaces
151
+
152
+ ```python
153
+ # Retrieval merges across scopes with priority
154
+ retrieve_memories(
155
+ store=store,
156
+ scopes=[
157
+ ("org_123", "user_456", "preferences"), # highest priority
158
+ ("org_123", "shared"), # org-wide fallback
159
+ ],
160
+ query="user preferences",
161
+ )
162
+ ```
163
+
164
+ ### Memory Consolidation
165
+
166
+ ```python
167
+ from memable import consolidate_memories
168
+
169
+ # Periodic cleanup job
170
+ consolidate_memories(
171
+ store=store,
172
+ user_id="user_123",
173
+ strategy="summarize_and_prune",
174
+ older_than_days=7,
175
+ )
176
+ ```
177
+
178
+ ## LangGraph Nodes
179
+
180
+ Pre-built nodes for your graph:
181
+
182
+ ```python
183
+ from memable.nodes import (
184
+ retrieve_memories_node,
185
+ store_memories_node,
186
+ consolidate_memories_node,
187
+ )
188
+
189
+ builder = StateGraph(MessagesState)
190
+ builder.add_node("retrieve", retrieve_memories_node)
191
+ builder.add_node("llm", your_llm_node)
192
+ builder.add_node("store", store_memories_node)
193
+
194
+ builder.add_edge(START, "retrieve")
195
+ builder.add_edge("retrieve", "llm")
196
+ builder.add_edge("llm", "store")
197
+ builder.add_edge("store", END)
198
+ ```
199
+
200
+ ## Performance & Costs
201
+
202
+ ### Storage Requirements
203
+
204
+ | Scale | Memories | SQLite | DuckDB | Postgres |
205
+ |-------|----------|--------|--------|----------|
206
+ | Light user | 100 | ~700 KB | ~3 MB | ~700 KB |
207
+ | Regular user | 1,000 | ~7 MB | ~30 MB | ~7 MB |
208
+ | Heavy user | 10,000 | ~70 MB | ~300 MB | ~70 MB |
209
+ | Power user | 100,000 | ~700 MB | ~3 GB | ~700 MB |
210
+
211
+ *Embeddings dominate storage: 1536 dims × 4 bytes = ~6KB per memory*
212
+
213
+ ### API Costs (text-embedding-3-small)
214
+
215
+ | Usage | Daily Tokens | Daily Cost | Monthly Cost |
216
+ |-------|--------------|------------|--------------|
217
+ | Light (100 adds, 500 searches) | 7,000 | $0.0001 | $0.00 |
218
+ | Medium (500 adds, 2,000 searches) | 30,000 | $0.0006 | $0.02 |
219
+ | Heavy (2,000 adds, 10,000 searches) | 140,000 | $0.0028 | $0.08 |
220
+
221
+ ### Extraction Costs (gpt-4.1-mini)
222
+
223
+ If using LLM-based memory extraction:
224
+
225
+ | Usage | Daily Cost | Monthly Cost |
226
+ |-------|------------|--------------|
227
+ | Light (50 extractions) | $0.007 | $0.20 |
228
+ | Medium (200 extractions) | $0.027 | $0.81 |
229
+ | Heavy (1,000 extractions) | $0.135 | $4.05 |
230
+
231
+ **Total cost for a typical agent (100 conversations/day):** ~$0.08-0.50/month
232
+
233
+ Run `pytest tests/performance/ -v -s` to benchmark on your hardware.
234
+
235
+ ## Configuration
236
+
237
+ Environment variables:
238
+
239
+ ```bash
240
+ OPENAI_API_KEY=sk-... # For embeddings
241
+ DATABASE_URL=postgresql://... # Postgres connection
242
+ ```
243
+
244
+ ## Multi-Tenant / Schema Isolation
245
+
246
+ For multi-tenant deployments where each customer needs isolated data, you can use PostgreSQL schemas:
247
+
248
+ ```python
249
+ from memable import build_store
250
+
251
+ # Each tenant gets their own schema
252
+ with build_store("postgresql://...", schema="customer_123") as store:
253
+ store.setup() # Creates tables in customer_123 schema
254
+ store.add(namespace, memory)
255
+ ```
256
+
257
+ **Requirements:**
258
+ - The schema must already exist in the database (`CREATE SCHEMA customer_123;`)
259
+ - Tables will be created within that schema when `setup()` is called
260
+ - Each schema has its own isolated set of tables
261
+
262
+ ### Database Tables
263
+
264
+ memable uses LangGraph's PostgresStore under the hood, which creates:
265
+
266
+ | Table | Purpose |
267
+ |-------|---------|
268
+ | `store` | Memory documents with metadata |
269
+ | `store_vectors` | pgvector embeddings for semantic search |
270
+ | `store_migrations` | Migration version tracking |
271
+
272
+ **Note:** Table names are currently fixed by LangGraph. If you need custom table names (e.g., prefixes/suffixes), use schema-based isolation instead, or run each app in a separate PostgreSQL schema.
273
+
274
+ **Alternative pattern:** For apps that already use schema-per-tenant, you could combine with a suffix:
275
+ ```sql
276
+ -- Example: customer schemas with memory suffix
277
+ CREATE SCHEMA customer_123_memories;
278
+ ```
279
+ ```python
280
+ with build_store("postgresql://...", schema="customer_123_memories") as store:
281
+ store.setup()
282
+ ```
283
+
284
+ ## License
285
+
286
+ MIT