memoryagent-lib 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. memoryagent_lib-0.1.1/PKG-INFO +186 -0
  2. memoryagent_lib-0.1.1/README.md +171 -0
  3. memoryagent_lib-0.1.1/memoryagent/__init__.py +35 -0
  4. memoryagent_lib-0.1.1/memoryagent/confidence.py +82 -0
  5. memoryagent_lib-0.1.1/memoryagent/config.py +35 -0
  6. memoryagent_lib-0.1.1/memoryagent/consolidation.py +5 -0
  7. memoryagent_lib-0.1.1/memoryagent/examples/export_memory.py +110 -0
  8. memoryagent_lib-0.1.1/memoryagent/examples/memory_api_server.py +223 -0
  9. memoryagent_lib-0.1.1/memoryagent/examples/minimal.py +47 -0
  10. memoryagent_lib-0.1.1/memoryagent/examples/openai_agent.py +137 -0
  11. memoryagent_lib-0.1.1/memoryagent/indexers.py +61 -0
  12. memoryagent_lib-0.1.1/memoryagent/models.py +156 -0
  13. memoryagent_lib-0.1.1/memoryagent/policy.py +171 -0
  14. memoryagent_lib-0.1.1/memoryagent/retrieval.py +154 -0
  15. memoryagent_lib-0.1.1/memoryagent/storage/base.py +86 -0
  16. memoryagent_lib-0.1.1/memoryagent/storage/in_memory.py +88 -0
  17. memoryagent_lib-0.1.1/memoryagent/storage/local_disk.py +415 -0
  18. memoryagent_lib-0.1.1/memoryagent/system.py +182 -0
  19. memoryagent_lib-0.1.1/memoryagent/utils.py +35 -0
  20. memoryagent_lib-0.1.1/memoryagent/workers.py +169 -0
  21. memoryagent_lib-0.1.1/memoryagent_lib.egg-info/PKG-INFO +186 -0
  22. memoryagent_lib-0.1.1/memoryagent_lib.egg-info/SOURCES.txt +25 -0
  23. memoryagent_lib-0.1.1/memoryagent_lib.egg-info/dependency_links.txt +1 -0
  24. memoryagent_lib-0.1.1/memoryagent_lib.egg-info/requires.txt +4 -0
  25. memoryagent_lib-0.1.1/memoryagent_lib.egg-info/top_level.txt +1 -0
  26. memoryagent_lib-0.1.1/pyproject.toml +20 -0
  27. memoryagent_lib-0.1.1/setup.cfg +4 -0
@@ -0,0 +1,186 @@
1
+ Metadata-Version: 2.4
2
+ Name: memoryagent-lib
3
+ Version: 0.1.1
4
+ Summary: Add your description here
5
+ Author-email: Jiawei Zheng <jw.zhengai@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/jia-wei-zheng/MemoryAgent
8
+ Project-URL: Repository, https://github.com/jia-wei-zheng/MemoryAgent
9
+ Requires-Python: >=3.10
10
+ Description-Content-Type: text/markdown
11
+ Requires-Dist: python-dotenv>=0.9.9
12
+ Requires-Dist: openai>=2.16.0
13
+ Requires-Dist: pydantic>=2.0
14
+ Requires-Dist: sqlite-vec>=0.1.6
15
+
16
+ #
17
+
18
+ <div align="center">
19
+ <img src="https://raw.githubusercontent.com/jia-wei-zheng/MemoryAgent/refs/heads/master/memoryagent_logo.jpg?token=GHSAT0AAAAAADSXDGNDXIWZF42E6S3D2XPG2MDPSDQ" alt="MemoryAgent" width="500">
20
+ <h1>MemoryAgent: An Open, Modular Memory Framework for Agents (Beta)</h1>
21
+ </div>
22
+
23
+ MemoryAgent is a reusable memory framework for LLM-based agent systems. It provides tiered memory (working, episodic, semantic, perceptual), hot/cold storage, archive indexing, confidence-based retrieval escalation, and optional local vector search via sqlite-vec.
24
+
25
+ ## Highlights
26
+ - **Tiered memory**: working (TTL), episodic, semantic, perceptual
27
+ - **Storage tiers**: hot metadata (SQLite + sqlite-vec), cold archive (filesystem), archive index (vector index)
28
+ - **Memory retrieval pipeline**: hot -> archive -> cold hydration with rerank + context packaging
29
+ - **Local mode**: SQLite + sqlite-vec (optional) + filesystem
30
+ - **Async-friendly** with sync convenience methods
31
+
32
+ ## Project Layout
33
+ ```
34
+ memoryagent/
35
+ config.py # Default system settings and retrieval thresholds
36
+ models.py # Pydantic data models for memory items, queries, bundles
37
+ system.py # MemorySystem entry point and wiring
38
+ retrieval.py # Retrieval orchestration and reranking
39
+ confidence.py # Confidence scoring components
40
+ policy.py # Conversation + routing policies
41
+ indexers.py # Episodic/semantic/perceptual indexers
42
+ workers.py # Consolidation, archiving, rehydration, compaction
43
+ storage/
44
+ base.py # Storage adapter interfaces
45
+ in_memory.py # Simple in-memory vector + graph stores
46
+ local_disk.py # SQLite metadata/features + sqlite-vec + file object store
47
+ examples/
48
+ minimal.py # Basic usage example
49
+ openai_agent.py # CLI OpenAI agent with memory retrieval
50
+ memory_api_server.py # Local API for memory + chat
51
+ memory_viz.html # Web UI for chat + memory visualization
52
+ ```
53
+
54
+ ## Installation
55
+ Python 3.10+ required.
56
+
57
+ Development (sync deps from `uv.lock`):
58
+ ```bash
59
+ uv sync
60
+ ```
61
+
62
+ Use as a dependency:
63
+ ```bash
64
+ uv add memoryagent-lib
65
+ # or
66
+ pip install memoryagent-lib
67
+ ```
68
+
69
+ Optional extras:
70
+ ```bash
71
+ uv add openai sqlite-vec
72
+ # or
73
+ pip install openai sqlite-vec
74
+ ```
75
+
76
+ ## Quick Start
77
+ ```python
78
+ from memoryagent import MemoryEvent, MemorySystem
79
+
80
+ memory = MemorySystem()
81
+ owner = "user-001"
82
+
83
+ memory.write(
84
+ MemoryEvent(
85
+ content="User prefers concise summaries about climate policy.",
86
+ type="semantic",
87
+ owner=owner,
88
+ tags=["preference", "summary"],
89
+ confidence=0.7,
90
+ stability=0.8,
91
+ )
92
+ )
93
+
94
+ bundle = memory.retrieve("What policy topics did we cover?", owner=owner)
95
+ print(bundle.confidence.total)
96
+ for block in bundle.blocks:
97
+ print(block.text)
98
+
99
+ memory.flush(owner)
100
+ ```
101
+
102
+ ## Enable sqlite-vec (Local Vector Search)
103
+ ```python
104
+ from memoryagent import MemorySystem, MemorySystemConfig
105
+
106
+ config = MemorySystemConfig(
107
+ use_sqlite_vec=True,
108
+ vector_dim=1536, # match your embedding model
109
+ )
110
+
111
+ memory = MemorySystem(config=config)
112
+ ```
113
+
114
+ If sqlite-vec cannot be auto-loaded, set an explicit path:
115
+ ```python
116
+ from pathlib import Path
117
+ from memoryagent import MemorySystemConfig
118
+
119
+ config = MemorySystemConfig(
120
+ use_sqlite_vec=True,
121
+ vector_dim=1536,
122
+ sqlite_vec_extension_path=Path("/path/to/sqlite_vec.dylib"),
123
+ )
124
+ ```
125
+
126
+ ## Policies
127
+ ### Conversation storage policy
128
+ `HeuristicMemoryPolicy` decides whether a turn should be stored and whether it becomes episodic or semantic memory.
129
+
130
+ ### Routing policy
131
+ `MemoryRoutingPolicy` decides where a memory should be written:
132
+ - **Hot** metadata store
133
+ - **Vector index**
134
+ - **Feature store** (perceptual)
135
+ - **Cold** archive (via workers)
136
+
137
+ ## Background Workers
138
+ - `ConsolidationWorker`: working → episodic/semantic
139
+ - `ArchiverWorker`: hot → cold + archive index
140
+ - `RehydratorWorker`: cold → hot (based on access)
141
+ - `Compactor`: cleanup/TTL
142
+
143
+ ## Examples
144
+ ### OpenAI Agent (CLI)
145
+ ```bash
146
+ python -m memoryagent.examples.openai_agent
147
+ ```
148
+ - Uses OpenAI responses + embeddings.
149
+ - Stores session transcript as a single working memory item.
150
+
151
+ ### Memory Visualization + API
152
+ Start the API server:
153
+ ```bash
154
+ python -m memoryagent.examples.memory_api_server
155
+ ```
156
+ Open in browser:
157
+ ```
158
+ http://127.0.0.1:8000/memory_viz.html
159
+ ```
160
+
161
+ An example (System records semantic memory and updating working memory):
162
+
163
+ ![Screenshot](https://raw.githubusercontent.com/jia-wei-zheng/MemoryAgent/master/Memory%20Agent%20_%20Live%20Console.jpeg?token=GHSAT0AAAAAADSXDGNDWOWFKKV777VMZM6U2MDPSTQ)
164
+
165
+
166
+ The page calls:
167
+ - `GET /api/memory?owner=user-001`
168
+ - `POST /api/chat`
169
+
170
+ ## Data Stores
171
+ - **Hot metadata**: `.memoryagent_hot.sqlite`
172
+ - **Vector index**: `.memoryagent_vectors.sqlite` (sqlite-vec)
173
+ - **Features**: `.memoryagent_features.sqlite`
174
+ - **Cold archive**: `.memoryagent_cold/records/<owner>/YYYY/MM/DD/daily_notes.json`
175
+
176
+ ## Configuration
177
+ See `memoryagent/config.py` for defaults:
178
+ - `working_ttl_seconds`
179
+ - `retrieval_plan` thresholds and budgets
180
+ - `use_sqlite_vec`, `vector_dim`, `sqlite_vec_extension_path`
181
+
182
+ ## Notes
183
+ - Working memory is stored as a single session transcript (updated each turn).
184
+ - Episodic/semantic memories are candidates for cold archive.
185
+
186
+ ## License
@@ -0,0 +1,171 @@
1
+ #
2
+
3
+ <div align="center">
4
+ <img src="https://raw.githubusercontent.com/jia-wei-zheng/MemoryAgent/refs/heads/master/memoryagent_logo.jpg?token=GHSAT0AAAAAADSXDGNDXIWZF42E6S3D2XPG2MDPSDQ" alt="MemoryAgent" width="500">
5
+ <h1>MemoryAgent: An Open, Modular Memory Framework for Agents (Beta)</h1>
6
+ </div>
7
+
8
+ MemoryAgent is a reusable memory framework for LLM-based agent systems. It provides tiered memory (working, episodic, semantic, perceptual), hot/cold storage, archive indexing, confidence-based retrieval escalation, and optional local vector search via sqlite-vec.
9
+
10
+ ## Highlights
11
+ - **Tiered memory**: working (TTL), episodic, semantic, perceptual
12
+ - **Storage tiers**: hot metadata (SQLite + sqlite-vec), cold archive (filesystem), archive index (vector index)
13
+ - **Memory retrieval pipeline**: hot -> archive -> cold hydration with rerank + context packaging
14
+ - **Local mode**: SQLite + sqlite-vec (optional) + filesystem
15
+ - **Async-friendly** with sync convenience methods
16
+
17
+ ## Project Layout
18
+ ```
19
+ memoryagent/
20
+ config.py # Default system settings and retrieval thresholds
21
+ models.py # Pydantic data models for memory items, queries, bundles
22
+ system.py # MemorySystem entry point and wiring
23
+ retrieval.py # Retrieval orchestration and reranking
24
+ confidence.py # Confidence scoring components
25
+ policy.py # Conversation + routing policies
26
+ indexers.py # Episodic/semantic/perceptual indexers
27
+ workers.py # Consolidation, archiving, rehydration, compaction
28
+ storage/
29
+ base.py # Storage adapter interfaces
30
+ in_memory.py # Simple in-memory vector + graph stores
31
+ local_disk.py # SQLite metadata/features + sqlite-vec + file object store
32
+ examples/
33
+ minimal.py # Basic usage example
34
+ openai_agent.py # CLI OpenAI agent with memory retrieval
35
+ memory_api_server.py # Local API for memory + chat
36
+ memory_viz.html # Web UI for chat + memory visualization
37
+ ```
38
+
39
+ ## Installation
40
+ Python 3.10+ required.
41
+
42
+ Development (sync deps from `uv.lock`):
43
+ ```bash
44
+ uv sync
45
+ ```
46
+
47
+ Use as a dependency:
48
+ ```bash
49
+ uv add memoryagent-lib
50
+ # or
51
+ pip install memoryagent-lib
52
+ ```
53
+
54
+ Optional extras:
55
+ ```bash
56
+ uv add openai sqlite-vec
57
+ # or
58
+ pip install openai sqlite-vec
59
+ ```
60
+
61
+ ## Quick Start
62
+ ```python
63
+ from memoryagent import MemoryEvent, MemorySystem
64
+
65
+ memory = MemorySystem()
66
+ owner = "user-001"
67
+
68
+ memory.write(
69
+ MemoryEvent(
70
+ content="User prefers concise summaries about climate policy.",
71
+ type="semantic",
72
+ owner=owner,
73
+ tags=["preference", "summary"],
74
+ confidence=0.7,
75
+ stability=0.8,
76
+ )
77
+ )
78
+
79
+ bundle = memory.retrieve("What policy topics did we cover?", owner=owner)
80
+ print(bundle.confidence.total)
81
+ for block in bundle.blocks:
82
+ print(block.text)
83
+
84
+ memory.flush(owner)
85
+ ```
86
+
87
+ ## Enable sqlite-vec (Local Vector Search)
88
+ ```python
89
+ from memoryagent import MemorySystem, MemorySystemConfig
90
+
91
+ config = MemorySystemConfig(
92
+ use_sqlite_vec=True,
93
+ vector_dim=1536, # match your embedding model
94
+ )
95
+
96
+ memory = MemorySystem(config=config)
97
+ ```
98
+
99
+ If sqlite-vec cannot be auto-loaded, set an explicit path:
100
+ ```python
101
+ from pathlib import Path
102
+ from memoryagent import MemorySystemConfig
103
+
104
+ config = MemorySystemConfig(
105
+ use_sqlite_vec=True,
106
+ vector_dim=1536,
107
+ sqlite_vec_extension_path=Path("/path/to/sqlite_vec.dylib"),
108
+ )
109
+ ```
110
+
111
+ ## Policies
112
+ ### Conversation storage policy
113
+ `HeuristicMemoryPolicy` decides whether a turn should be stored and whether it becomes episodic or semantic memory.
114
+
115
+ ### Routing policy
116
+ `MemoryRoutingPolicy` decides where a memory should be written:
117
+ - **Hot** metadata store
118
+ - **Vector index**
119
+ - **Feature store** (perceptual)
120
+ - **Cold** archive (via workers)
121
+
122
+ ## Background Workers
123
+ - `ConsolidationWorker`: working → episodic/semantic
124
+ - `ArchiverWorker`: hot → cold + archive index
125
+ - `RehydratorWorker`: cold → hot (based on access)
126
+ - `Compactor`: cleanup/TTL
127
+
128
+ ## Examples
129
+ ### OpenAI Agent (CLI)
130
+ ```bash
131
+ python -m memoryagent.examples.openai_agent
132
+ ```
133
+ - Uses OpenAI responses + embeddings.
134
+ - Stores session transcript as a single working memory item.
135
+
136
+ ### Memory Visualization + API
137
+ Start the API server:
138
+ ```bash
139
+ python -m memoryagent.examples.memory_api_server
140
+ ```
141
+ Open in browser:
142
+ ```
143
+ http://127.0.0.1:8000/memory_viz.html
144
+ ```
145
+
146
+ An example (System records semantic memory and updating working memory):
147
+
148
+ ![Screenshot](https://raw.githubusercontent.com/jia-wei-zheng/MemoryAgent/master/Memory%20Agent%20_%20Live%20Console.jpeg?token=GHSAT0AAAAAADSXDGNDWOWFKKV777VMZM6U2MDPSTQ)
149
+
150
+
151
+ The page calls:
152
+ - `GET /api/memory?owner=user-001`
153
+ - `POST /api/chat`
154
+
155
+ ## Data Stores
156
+ - **Hot metadata**: `.memoryagent_hot.sqlite`
157
+ - **Vector index**: `.memoryagent_vectors.sqlite` (sqlite-vec)
158
+ - **Features**: `.memoryagent_features.sqlite`
159
+ - **Cold archive**: `.memoryagent_cold/records/<owner>/YYYY/MM/DD/daily_notes.json`
160
+
161
+ ## Configuration
162
+ See `memoryagent/config.py` for defaults:
163
+ - `working_ttl_seconds`
164
+ - `retrieval_plan` thresholds and budgets
165
+ - `use_sqlite_vec`, `vector_dim`, `sqlite_vec_extension_path`
166
+
167
+ ## Notes
168
+ - Working memory is stored as a single session transcript (updated each turn).
169
+ - Episodic/semantic memories are candidates for cold archive.
170
+
171
+ ## License
@@ -0,0 +1,35 @@
1
+ from memoryagent.config import MemorySystemConfig
2
+ from memoryagent.models import (
3
+ ConfidenceReport,
4
+ MemoryBlock,
5
+ MemoryEvent,
6
+ MemoryItem,
7
+ MemoryQuery,
8
+ MemoryType,
9
+ RetrievalPlan,
10
+ )
11
+ from memoryagent.policy import (
12
+ ConversationMemoryPolicy,
13
+ HeuristicMemoryPolicy,
14
+ MemoryDecision,
15
+ MemoryRoutingPolicy,
16
+ RoutingDecision,
17
+ )
18
+ from memoryagent.system import MemorySystem
19
+
20
+ __all__ = [
21
+ "MemorySystem",
22
+ "MemorySystemConfig",
23
+ "MemoryEvent",
24
+ "MemoryItem",
25
+ "MemoryQuery",
26
+ "MemoryType",
27
+ "MemoryBlock",
28
+ "RetrievalPlan",
29
+ "ConfidenceReport",
30
+ "ConversationMemoryPolicy",
31
+ "HeuristicMemoryPolicy",
32
+ "MemoryDecision",
33
+ "MemoryRoutingPolicy",
34
+ "RoutingDecision",
35
+ ]
@@ -0,0 +1,82 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime, timezone
4
+ from typing import List
5
+
6
+ from memoryagent.models import ConfidenceReport, MemoryQuery, ScoredMemory
7
+ from memoryagent.utils import clamp, safe_div, unique_tokens
8
+
9
+
10
+ def _semantic_relevance(results: List[ScoredMemory]) -> float:
11
+ if not results:
12
+ return 0.0
13
+ top_scores = [r.score for r in results[:5]]
14
+ return sum(top_scores) / len(top_scores)
15
+
16
+
17
+ def _coverage(query: MemoryQuery, results: List[ScoredMemory]) -> float:
18
+ query_tokens = unique_tokens(query.text)
19
+ if not query_tokens:
20
+ return 0.0
21
+ covered = set()
22
+ for item in results[:5]:
23
+ covered |= unique_tokens(item.item.text())
24
+ return safe_div(len(query_tokens & covered), len(query_tokens))
25
+
26
+
27
+ def _temporal_fit(results: List[ScoredMemory]) -> float:
28
+ if not results:
29
+ return 0.0
30
+ now = datetime.now(timezone.utc)
31
+ scores = []
32
+ for item in results[:5]:
33
+ age_days = max(0.0, (now - item.item.created_at).total_seconds() / 86400)
34
+ scores.append(1.0 / (1.0 + age_days))
35
+ return sum(scores) / len(scores)
36
+
37
+
38
+ def _authority(results: List[ScoredMemory]) -> float:
39
+ if not results:
40
+ return 0.0
41
+ scores = [0.5 * r.item.authority + 0.5 * r.item.stability for r in results[:5]]
42
+ return sum(scores) / len(scores)
43
+
44
+
45
+ def _consistency(results: List[ScoredMemory]) -> float:
46
+ if len(results) < 2:
47
+ return 0.5
48
+ tag_sets = [set(r.item.tags) for r in results[:5] if r.item.tags]
49
+ if not tag_sets:
50
+ return 0.4
51
+ overlap = set.intersection(*tag_sets) if len(tag_sets) > 1 else tag_sets[0]
52
+ union = set.union(*tag_sets) if len(tag_sets) > 1 else tag_sets[0]
53
+ return safe_div(len(overlap), len(union))
54
+
55
+
56
+ def evaluate_confidence(query: MemoryQuery, results: List[ScoredMemory]) -> ConfidenceReport:
57
+ semantic = _semantic_relevance(results)
58
+ coverage = _coverage(query, results)
59
+ temporal = _temporal_fit(results)
60
+ authority = _authority(results)
61
+ consistency = _consistency(results)
62
+
63
+ total = clamp(0.35 * semantic + 0.2 * coverage + 0.2 * temporal + 0.15 * authority + 0.1 * consistency)
64
+
65
+ if total >= 0.75:
66
+ recommendation = "accept"
67
+ elif total >= 0.6:
68
+ recommendation = "escalate_archive"
69
+ elif total >= 0.45:
70
+ recommendation = "fetch_cold"
71
+ else:
72
+ recommendation = "uncertain"
73
+
74
+ return ConfidenceReport(
75
+ total=total,
76
+ semantic_relevance=semantic,
77
+ coverage=coverage,
78
+ temporal_fit=temporal,
79
+ authority=authority,
80
+ consistency=consistency,
81
+ recommendation=recommendation,
82
+ )
@@ -0,0 +1,35 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Optional
5
+
6
+ from pydantic import BaseModel, Field
7
+
8
+ from memoryagent.models import RetrievalPlan
9
+
10
+
11
+ class ConsolidationConfig(BaseModel):
12
+ archive_on_flush: bool = True
13
+ semantic_min_count: int = 2
14
+ perceptual_summary_limit: int = 5
15
+
16
+
17
+ class MemorySystemConfig(BaseModel):
18
+ """System-wide configuration with sane local defaults."""
19
+
20
+ working_ttl_seconds: int = 3600
21
+ retrieval_plan: RetrievalPlan = Field(default_factory=RetrievalPlan)
22
+ consolidation: ConsolidationConfig = Field(default_factory=ConsolidationConfig)
23
+ cold_store_path: Path = Field(default_factory=lambda: Path(".memoryagent_cold"))
24
+ metadata_db_path: Path = Field(default_factory=lambda: Path(".memoryagent_hot.sqlite"))
25
+ feature_db_path: Path = Field(default_factory=lambda: Path(".memoryagent_features.sqlite"))
26
+ vector_db_path: Path = Field(default_factory=lambda: Path(".memoryagent_vectors.sqlite"))
27
+ vector_dim: int = 384
28
+ use_sqlite_vec: bool = False
29
+ sqlite_vec_extension_path: Optional[Path] = None
30
+ archive_index_path: Optional[Path] = None
31
+
32
+ def resolved_archive_path(self) -> Path:
33
+ if self.archive_index_path is not None:
34
+ return self.archive_index_path
35
+ return self.cold_store_path / "archive_index.json"
@@ -0,0 +1,5 @@
1
+ from __future__ import annotations
2
+
3
+ from memoryagent.workers import ConsolidationWorker
4
+
5
+ __all__ = ["ConsolidationWorker"]
@@ -0,0 +1,110 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import sqlite3
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List
7
+
8
+ ROOT = Path(__file__).resolve().parents[2]
9
+ COLD_ROOT = ROOT / ".memoryagent_cold"
10
+ HOT_DB = ROOT / ".memoryagent_hot.sqlite"
11
+ FEATURE_DB = ROOT / ".memoryagent_features.sqlite"
12
+ ARCHIVE_INDEX = COLD_ROOT / "archive_index.json"
13
+
14
+
15
+ def load_hot() -> List[Dict[str, Any]]:
16
+ if not HOT_DB.exists():
17
+ return []
18
+ with sqlite3.connect(HOT_DB) as conn:
19
+ rows = conn.execute(
20
+ "SELECT id, type, owner, summary, content_json, tags_json, created_at, updated_at, last_accessed, tier, pointer_json, ttl_seconds, confidence, authority, stability FROM memory_items"
21
+ ).fetchall()
22
+ items = []
23
+ for row in rows:
24
+ (
25
+ item_id,
26
+ item_type,
27
+ owner,
28
+ summary,
29
+ content_json,
30
+ tags_json,
31
+ created_at,
32
+ updated_at,
33
+ last_accessed,
34
+ tier,
35
+ pointer_json,
36
+ ttl_seconds,
37
+ confidence,
38
+ authority,
39
+ stability,
40
+ ) = row
41
+ items.append(
42
+ {
43
+ "id": item_id,
44
+ "type": item_type,
45
+ "owner": owner,
46
+ "summary": summary,
47
+ "content": json.loads(content_json) if content_json else None,
48
+ "tags": json.loads(tags_json) if tags_json else [],
49
+ "created_at": created_at,
50
+ "updated_at": updated_at,
51
+ "last_accessed": last_accessed,
52
+ "tier": tier,
53
+ "pointer": json.loads(pointer_json) if pointer_json else {},
54
+ "ttl_seconds": ttl_seconds,
55
+ "confidence": confidence,
56
+ "authority": authority,
57
+ "stability": stability,
58
+ }
59
+ )
60
+ return items
61
+
62
+
63
+ def load_features() -> List[Dict[str, Any]]:
64
+ if not FEATURE_DB.exists():
65
+ return []
66
+ with sqlite3.connect(FEATURE_DB) as conn:
67
+ rows = conn.execute("SELECT owner, created_at, payload_json FROM features").fetchall()
68
+ features = []
69
+ for owner, created_at, payload_json in rows:
70
+ features.append(
71
+ {
72
+ "owner": owner,
73
+ "created_at": created_at,
74
+ "payload": json.loads(payload_json),
75
+ }
76
+ )
77
+ return features
78
+
79
+
80
+ def load_cold_records() -> List[Dict[str, Any]]:
81
+ if not COLD_ROOT.exists():
82
+ return []
83
+ records = []
84
+ records_root = COLD_ROOT / "records"
85
+ if not records_root.exists():
86
+ return []
87
+ for path in records_root.rglob("*.json"):
88
+ try:
89
+ records.append({"path": str(path.relative_to(ROOT)), "payload": json.loads(path.read_text())})
90
+ except Exception:
91
+ continue
92
+ return records
93
+
94
+
95
+ def load_archive_index() -> Dict[str, Any]:
96
+ if not ARCHIVE_INDEX.exists():
97
+ return {}
98
+ return json.loads(ARCHIVE_INDEX.read_text())
99
+
100
+
101
+ def get_memory_payload() -> Dict[str, Any]:
102
+ return {
103
+ "hot_items": load_hot(),
104
+ "features": load_features(),
105
+ "cold_records": load_cold_records(),
106
+ "archive_index": load_archive_index(),
107
+ }
108
+
109
+
110
+ __all__ = ["get_memory_payload"]