mcal-ai-langgraph 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcal_ai_langgraph-0.2.0.dist-info/METADATA +147 -0
- mcal_ai_langgraph-0.2.0.dist-info/RECORD +11 -0
- mcal_ai_langgraph-0.2.0.dist-info/WHEEL +5 -0
- mcal_ai_langgraph-0.2.0.dist-info/licenses/LICENSE +21 -0
- mcal_ai_langgraph-0.2.0.dist-info/top_level.txt +1 -0
- mcal_langgraph/__init__.py +39 -0
- mcal_langgraph/_compat.py +71 -0
- mcal_langgraph/checkpointer.py +53 -0
- mcal_langgraph/memory.py +189 -0
- mcal_langgraph/py.typed +1 -0
- mcal_langgraph/store.py +564 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mcal-ai-langgraph
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: LangGraph integration for MCAL - Goal-aware memory for AI agents
|
|
5
|
+
Author: MCAL Team
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/Shivakoreddi/mcal-ai
|
|
8
|
+
Project-URL: Documentation, https://github.com/Shivakoreddi/mcal-ai/blob/main/docs/integrations/langgraph.md
|
|
9
|
+
Project-URL: Repository, https://github.com/Shivakoreddi/mcal-ai.git
|
|
10
|
+
Project-URL: Issues, https://github.com/Shivakoreddi/mcal-ai/issues
|
|
11
|
+
Keywords: mcal,langgraph,memory,agents,llm,goal-aware,langchain
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: Intended Audience :: Science/Research
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
20
|
+
Requires-Python: >=3.11
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
License-File: LICENSE
|
|
23
|
+
Requires-Dist: mcal-ai>=0.1.0
|
|
24
|
+
Requires-Dist: langgraph>=0.0.40
|
|
25
|
+
Requires-Dist: langchain-core>=0.1.0
|
|
26
|
+
Provides-Extra: dev
|
|
27
|
+
Requires-Dist: pytest>=7.4.0; extra == "dev"
|
|
28
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
29
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
30
|
+
Dynamic: license-file
|
|
31
|
+
|
|
32
|
+
# mcal-langgraph
|
|
33
|
+
|
|
34
|
+
LangGraph integration for [MCAL](https://github.com/Shivakoreddi/mcla-research) - Goal-aware memory for AI agents.
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
pip install mcal-langgraph
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
This will automatically install `mcal` and `langgraph` as dependencies.
|
|
43
|
+
|
|
44
|
+
## Quick Start
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
from mcal import MCAL
|
|
48
|
+
from mcal_langgraph import MCALStore
|
|
49
|
+
|
|
50
|
+
# Initialize MCAL with a goal
|
|
51
|
+
mcal = MCAL(goal="Build a fraud detection system")
|
|
52
|
+
|
|
53
|
+
# Create LangGraph-compatible store
|
|
54
|
+
store = MCALStore(mcal)
|
|
55
|
+
|
|
56
|
+
# Use with LangGraph
|
|
57
|
+
from langgraph.prebuilt import create_react_agent
|
|
58
|
+
|
|
59
|
+
agent = create_react_agent(
|
|
60
|
+
model=your_model,
|
|
61
|
+
tools=your_tools,
|
|
62
|
+
store=store # Goal-aware memory!
|
|
63
|
+
)
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## Features
|
|
67
|
+
|
|
68
|
+
### MCALStore (BaseStore)
|
|
69
|
+
|
|
70
|
+
Drop-in replacement for LangGraph's built-in stores with **goal-aware** memory:
|
|
71
|
+
|
|
72
|
+
```python
|
|
73
|
+
from mcal_langgraph import MCALStore
|
|
74
|
+
|
|
75
|
+
store = MCALStore(mcal)
|
|
76
|
+
|
|
77
|
+
# Store memories
|
|
78
|
+
await store.aput(
|
|
79
|
+
namespace=("user_123", "memories"),
|
|
80
|
+
key="decision_1",
|
|
81
|
+
value={"text": "Decided to use PostgreSQL for ACID compliance"}
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Goal-aware search - returns memories relevant to current goals
|
|
85
|
+
results = await store.asearch(
|
|
86
|
+
namespace_prefix=("user_123",),
|
|
87
|
+
query="database choice"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Results include goal context and decisions
|
|
91
|
+
for item in results:
|
|
92
|
+
print(item.value["goals"]) # Related goals
|
|
93
|
+
print(item.value["decisions"]) # Related decisions
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### MCALMemory
|
|
97
|
+
|
|
98
|
+
Memory nodes for custom LangGraph workflows:
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from mcal_langgraph import MCALMemory
|
|
102
|
+
|
|
103
|
+
memory = MCALMemory(llm_provider="anthropic")
|
|
104
|
+
|
|
105
|
+
# Add as nodes in your graph
|
|
106
|
+
graph.add_node("update_memory", memory.update_node())
|
|
107
|
+
graph.add_node("get_context", memory.context_node())
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
### MCALCheckpointer
|
|
111
|
+
|
|
112
|
+
State persistence for LangGraph graphs:
|
|
113
|
+
|
|
114
|
+
```python
|
|
115
|
+
from mcal_langgraph import MCALCheckpointer
|
|
116
|
+
|
|
117
|
+
checkpointer = MCALCheckpointer(mcal)
|
|
118
|
+
graph = builder.compile(checkpointer=checkpointer)
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
## Why mcal-langgraph?
|
|
122
|
+
|
|
123
|
+
| Feature | LangGraph InMemoryStore | **MCALStore** |
|
|
124
|
+
|---------|------------------------|---------------|
|
|
125
|
+
| BaseStore interface | ✅ | ✅ |
|
|
126
|
+
| Namespace organization | ✅ | ✅ |
|
|
127
|
+
| **Goal-aware search** | ❌ | ✅ |
|
|
128
|
+
| **Decision tracking** | ❌ | ✅ |
|
|
129
|
+
| **Intent preservation** | ❌ | ✅ |
|
|
130
|
+
|
|
131
|
+
## Migrating from mcal[langgraph]
|
|
132
|
+
|
|
133
|
+
If you were using the old extras-based installation:
|
|
134
|
+
|
|
135
|
+
```python
|
|
136
|
+
# Old way (deprecated)
|
|
137
|
+
from mcal.integrations.langgraph import MCALStore
|
|
138
|
+
|
|
139
|
+
# New way (recommended)
|
|
140
|
+
from mcal_langgraph import MCALStore
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
The old import path still works but will show a deprecation warning.
|
|
144
|
+
|
|
145
|
+
## License
|
|
146
|
+
|
|
147
|
+
MIT
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
mcal_ai_langgraph-0.2.0.dist-info/licenses/LICENSE,sha256=zdp5kxDzb-kYvBiEZ_h1Hi96z-o6e5oXoXFx2IIefCs,1062
|
|
2
|
+
mcal_langgraph/__init__.py,sha256=YfCMV6GcPo8zITC9y6BfdkIcpT3U4oZNG7-rsFUZYrg,1022
|
|
3
|
+
mcal_langgraph/_compat.py,sha256=_KO7SX8g4ahMVHoHDqpuBFO-dpgOVA1JUlc2daw9Ph0,1644
|
|
4
|
+
mcal_langgraph/checkpointer.py,sha256=y9ChSXu71J9FWnwpnHRhdpuR-7FNQoSGrKtA7fn5x5U,1687
|
|
5
|
+
mcal_langgraph/memory.py,sha256=POgMIoVXuZA8KAM-o2ZR6bIx9k4ct4T6WwBU4QRFkGU,6047
|
|
6
|
+
mcal_langgraph/py.typed,sha256=2X_1X_HUbFbPMsgG85pQ4WTxzXDtgoxEZEPV1nHY9nw,40
|
|
7
|
+
mcal_langgraph/store.py,sha256=8XgYI2Y2oFL54vooRRM633FtU04cbHIWYHGQG9fC1cY,19608
|
|
8
|
+
mcal_ai_langgraph-0.2.0.dist-info/METADATA,sha256=aZTOXcAYtbQ__0MjFY1DN-GlUUhJsMQuNnez8Lj5kbw,3871
|
|
9
|
+
mcal_ai_langgraph-0.2.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
10
|
+
mcal_ai_langgraph-0.2.0.dist-info/top_level.txt,sha256=msXK9BlyqOeRq-IPIzT2O9j7bwlx1fe9z_cb5rYszUU,15
|
|
11
|
+
mcal_ai_langgraph-0.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Shiva
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
mcal_langgraph
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""
|
|
2
|
+
mcal-langgraph: LangGraph integration for MCAL
|
|
3
|
+
|
|
4
|
+
Provides goal-aware memory for LangGraph agent workflows:
|
|
5
|
+
- MCALStore: BaseStore implementation with goal-aware search
|
|
6
|
+
- MCALMemory: Memory nodes for LangGraph workflows
|
|
7
|
+
- MCALCheckpointer: State persistence for graphs
|
|
8
|
+
|
|
9
|
+
Installation:
|
|
10
|
+
pip install mcal-langgraph
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
from mcal import MCAL
|
|
14
|
+
from mcal_langgraph import MCALStore
|
|
15
|
+
|
|
16
|
+
mcal = MCAL(goal="Build fraud detection system")
|
|
17
|
+
store = MCALStore(mcal)
|
|
18
|
+
|
|
19
|
+
# Use with LangGraph
|
|
20
|
+
from langgraph.prebuilt import create_react_agent
|
|
21
|
+
agent = create_react_agent(model, tools, store=store)
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
from mcal_langgraph.store import MCALStore
|
|
25
|
+
from mcal_langgraph.memory import MCALMemory, MCALMemoryConfig
|
|
26
|
+
from mcal_langgraph.checkpointer import MCALCheckpointer
|
|
27
|
+
from mcal_langgraph._compat import LANGGRAPH_AVAILABLE
|
|
28
|
+
|
|
29
|
+
# Version
|
|
30
|
+
__version__ = "0.1.0"
|
|
31
|
+
|
|
32
|
+
__all__ = [
|
|
33
|
+
"MCALStore",
|
|
34
|
+
"MCALMemory",
|
|
35
|
+
"MCALMemoryConfig",
|
|
36
|
+
"MCALCheckpointer",
|
|
37
|
+
"LANGGRAPH_AVAILABLE",
|
|
38
|
+
"__version__",
|
|
39
|
+
]
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Compatibility layer for LangGraph imports.
|
|
3
|
+
|
|
4
|
+
Handles graceful degradation when langgraph is not installed.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
# Check for LangGraph availability
|
|
11
|
+
try:
|
|
12
|
+
from langgraph.graph import StateGraph
|
|
13
|
+
from langgraph.checkpoint.base import BaseCheckpointSaver
|
|
14
|
+
from langgraph.store.base import (
|
|
15
|
+
BaseStore,
|
|
16
|
+
Item,
|
|
17
|
+
SearchItem,
|
|
18
|
+
GetOp,
|
|
19
|
+
PutOp,
|
|
20
|
+
SearchOp,
|
|
21
|
+
ListNamespacesOp,
|
|
22
|
+
NamespacePath,
|
|
23
|
+
)
|
|
24
|
+
from langchain_core.messages import BaseMessage, HumanMessage, AIMessage
|
|
25
|
+
|
|
26
|
+
LANGGRAPH_AVAILABLE = True
|
|
27
|
+
except ImportError:
|
|
28
|
+
LANGGRAPH_AVAILABLE = False
|
|
29
|
+
StateGraph = None
|
|
30
|
+
BaseCheckpointSaver = object
|
|
31
|
+
BaseStore = object
|
|
32
|
+
Item = None
|
|
33
|
+
SearchItem = None
|
|
34
|
+
GetOp = None
|
|
35
|
+
PutOp = None
|
|
36
|
+
SearchOp = None
|
|
37
|
+
ListNamespacesOp = None
|
|
38
|
+
NamespacePath = None
|
|
39
|
+
BaseMessage = Any
|
|
40
|
+
HumanMessage = Any
|
|
41
|
+
AIMessage = Any
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def check_langgraph():
|
|
45
|
+
"""Raise helpful error if LangGraph not installed."""
|
|
46
|
+
if not LANGGRAPH_AVAILABLE:
|
|
47
|
+
raise ImportError(
|
|
48
|
+
"mcal-langgraph requires langgraph package.\n"
|
|
49
|
+
"Install with: pip install mcal-langgraph\n"
|
|
50
|
+
"Or manually: pip install langgraph langchain-core"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# Export everything needed by other modules
|
|
55
|
+
__all__ = [
|
|
56
|
+
"LANGGRAPH_AVAILABLE",
|
|
57
|
+
"check_langgraph",
|
|
58
|
+
"StateGraph",
|
|
59
|
+
"BaseCheckpointSaver",
|
|
60
|
+
"BaseStore",
|
|
61
|
+
"Item",
|
|
62
|
+
"SearchItem",
|
|
63
|
+
"GetOp",
|
|
64
|
+
"PutOp",
|
|
65
|
+
"SearchOp",
|
|
66
|
+
"ListNamespacesOp",
|
|
67
|
+
"NamespacePath",
|
|
68
|
+
"BaseMessage",
|
|
69
|
+
"HumanMessage",
|
|
70
|
+
"AIMessage",
|
|
71
|
+
]
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCALCheckpointer: State persistence for LangGraph graphs.
|
|
3
|
+
|
|
4
|
+
Stores graph state alongside MCAL memory for unified persistence.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import Any, Dict, List, Optional
|
|
10
|
+
|
|
11
|
+
from mcal_langgraph._compat import (
|
|
12
|
+
check_langgraph,
|
|
13
|
+
LANGGRAPH_AVAILABLE,
|
|
14
|
+
BaseCheckpointSaver,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class MCALCheckpointer(BaseCheckpointSaver if LANGGRAPH_AVAILABLE else object):
|
|
19
|
+
"""
|
|
20
|
+
MCAL-based checkpointer for LangGraph state persistence.
|
|
21
|
+
|
|
22
|
+
Stores graph state alongside MCAL memory for unified persistence.
|
|
23
|
+
|
|
24
|
+
Usage:
|
|
25
|
+
from mcal_langgraph import MCALCheckpointer
|
|
26
|
+
|
|
27
|
+
checkpointer = MCALCheckpointer(storage_path="~/.mcal")
|
|
28
|
+
graph = StateGraph(...).compile(checkpointer=checkpointer)
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, storage_path: Optional[str] = None):
|
|
32
|
+
check_langgraph()
|
|
33
|
+
if LANGGRAPH_AVAILABLE:
|
|
34
|
+
super().__init__()
|
|
35
|
+
self.storage_path = storage_path
|
|
36
|
+
self._checkpoints: Dict[str, Any] = {}
|
|
37
|
+
|
|
38
|
+
def get(self, config: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
39
|
+
"""Get checkpoint by config."""
|
|
40
|
+
thread_id = config.get("configurable", {}).get("thread_id", "default")
|
|
41
|
+
return self._checkpoints.get(thread_id)
|
|
42
|
+
|
|
43
|
+
def put(self, config: Dict[str, Any], checkpoint: Dict[str, Any]) -> None:
|
|
44
|
+
"""Save checkpoint."""
|
|
45
|
+
thread_id = config.get("configurable", {}).get("thread_id", "default")
|
|
46
|
+
self._checkpoints[thread_id] = checkpoint
|
|
47
|
+
|
|
48
|
+
def list(self, config: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
49
|
+
"""List all checkpoints."""
|
|
50
|
+
return list(self._checkpoints.values())
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
__all__ = ["MCALCheckpointer"]
|
mcal_langgraph/memory.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCALMemory: Memory nodes for LangGraph workflows.
|
|
3
|
+
|
|
4
|
+
Provides goal-aware memory that preserves reasoning context.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from typing import Any, Callable, Dict, List, Optional, Sequence, TYPE_CHECKING
|
|
11
|
+
|
|
12
|
+
from mcal_langgraph._compat import check_langgraph, BaseMessage
|
|
13
|
+
from mcal_langgraph.store import MCALStore
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from mcal import MCAL
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class MCALMemoryConfig:
|
|
21
|
+
"""Configuration for MCAL memory in LangGraph."""
|
|
22
|
+
llm_provider: str = "anthropic"
|
|
23
|
+
embedding_provider: str = "openai"
|
|
24
|
+
storage_path: Optional[str] = None
|
|
25
|
+
user_id: str = "default"
|
|
26
|
+
include_goals: bool = True
|
|
27
|
+
include_decisions: bool = True
|
|
28
|
+
max_context_tokens: int = 4000
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class MCALMemory:
|
|
32
|
+
"""
|
|
33
|
+
MCAL Memory component for LangGraph workflows.
|
|
34
|
+
|
|
35
|
+
Provides goal-aware memory that preserves reasoning context
|
|
36
|
+
across agent interactions.
|
|
37
|
+
|
|
38
|
+
Usage:
|
|
39
|
+
from mcal_langgraph import MCALMemory
|
|
40
|
+
from langgraph.graph import StateGraph
|
|
41
|
+
|
|
42
|
+
memory = MCALMemory(llm_provider="anthropic")
|
|
43
|
+
|
|
44
|
+
# Add as a node in your graph
|
|
45
|
+
graph = StateGraph(...)
|
|
46
|
+
graph.add_node("update_memory", memory.update_node())
|
|
47
|
+
graph.add_node("get_context", memory.context_node())
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def __init__(
|
|
51
|
+
self,
|
|
52
|
+
llm_provider: str = "anthropic",
|
|
53
|
+
embedding_provider: str = "openai",
|
|
54
|
+
storage_path: Optional[str] = None,
|
|
55
|
+
user_id: str = "default",
|
|
56
|
+
**mcal_kwargs
|
|
57
|
+
):
|
|
58
|
+
check_langgraph()
|
|
59
|
+
|
|
60
|
+
# Import MCAL here to avoid circular imports
|
|
61
|
+
from mcal import MCAL
|
|
62
|
+
|
|
63
|
+
self.user_id = user_id
|
|
64
|
+
self._mcal = MCAL(
|
|
65
|
+
llm_provider=llm_provider,
|
|
66
|
+
storage_path=storage_path,
|
|
67
|
+
**mcal_kwargs
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def mcal(self) -> "MCAL":
|
|
72
|
+
"""Get the underlying MCAL instance."""
|
|
73
|
+
return self._mcal
|
|
74
|
+
|
|
75
|
+
def as_store(self) -> MCALStore:
|
|
76
|
+
"""Get a LangGraph BaseStore backed by this memory."""
|
|
77
|
+
return MCALStore(self._mcal)
|
|
78
|
+
|
|
79
|
+
def _convert_messages(self, messages: Sequence[BaseMessage]) -> List[Dict[str, str]]:
|
|
80
|
+
"""Convert LangChain messages to MCAL format."""
|
|
81
|
+
result = []
|
|
82
|
+
for msg in messages:
|
|
83
|
+
if hasattr(msg, 'type'):
|
|
84
|
+
if msg.type == "human":
|
|
85
|
+
result.append({"role": "user", "content": str(msg.content)})
|
|
86
|
+
elif msg.type == "ai":
|
|
87
|
+
result.append({"role": "assistant", "content": str(msg.content)})
|
|
88
|
+
else:
|
|
89
|
+
result.append({"role": "user", "content": str(msg.content)})
|
|
90
|
+
else:
|
|
91
|
+
result.append({"role": "user", "content": str(msg.content)})
|
|
92
|
+
return result
|
|
93
|
+
|
|
94
|
+
async def add(self, messages: Sequence[BaseMessage]) -> Dict[str, Any]:
|
|
95
|
+
"""
|
|
96
|
+
Add messages to MCAL memory.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
messages: LangChain message sequence
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Extraction result with goals and decisions
|
|
103
|
+
"""
|
|
104
|
+
mcal_messages = self._convert_messages(messages)
|
|
105
|
+
result = await self._mcal.add(mcal_messages, user_id=self.user_id)
|
|
106
|
+
return {
|
|
107
|
+
"goals": result.unified_graph.get_active_goals() if result.unified_graph else [],
|
|
108
|
+
"decisions": result.unified_graph.get_all_decisions_with_detail() if result.unified_graph else [],
|
|
109
|
+
"node_count": result.unified_graph.node_count if result.unified_graph else 0,
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async def get_context(self, query: str, max_tokens: int = 4000) -> str:
|
|
113
|
+
"""
|
|
114
|
+
Get goal-aware context for a query.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
query: The current query/task
|
|
118
|
+
max_tokens: Maximum context tokens
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Formatted context string
|
|
122
|
+
"""
|
|
123
|
+
return await self._mcal.get_context(
|
|
124
|
+
query=query,
|
|
125
|
+
user_id=self.user_id,
|
|
126
|
+
max_tokens=max_tokens
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
async def search(self, query: str, top_k: int = 10) -> List[Dict[str, Any]]:
|
|
130
|
+
"""
|
|
131
|
+
Search memory for relevant information.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
query: Search query
|
|
135
|
+
top_k: Number of results
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
List of search results
|
|
139
|
+
"""
|
|
140
|
+
results = await self._mcal.search(query, user_id=self.user_id, top_k=top_k)
|
|
141
|
+
return results.results if results else []
|
|
142
|
+
|
|
143
|
+
def update_node(self) -> Callable:
|
|
144
|
+
"""
|
|
145
|
+
Create a LangGraph node that updates memory.
|
|
146
|
+
|
|
147
|
+
Usage:
|
|
148
|
+
graph.add_node("update_memory", memory.update_node())
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Async function suitable for add_node()
|
|
152
|
+
"""
|
|
153
|
+
async def _update_memory(state: Dict[str, Any]) -> Dict[str, Any]:
|
|
154
|
+
messages = state.get("messages", [])
|
|
155
|
+
if messages:
|
|
156
|
+
result = await self.add(messages)
|
|
157
|
+
return {
|
|
158
|
+
"memory_updated": True,
|
|
159
|
+
"active_goals": result.get("goals", []),
|
|
160
|
+
"decisions": result.get("decisions", []),
|
|
161
|
+
}
|
|
162
|
+
return {"memory_updated": False}
|
|
163
|
+
|
|
164
|
+
return _update_memory
|
|
165
|
+
|
|
166
|
+
def context_node(self, query_key: str = "query") -> Callable:
|
|
167
|
+
"""
|
|
168
|
+
Create a LangGraph node that retrieves context.
|
|
169
|
+
|
|
170
|
+
Usage:
|
|
171
|
+
graph.add_node("get_context", memory.context_node())
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
query_key: State key containing the query
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Async function suitable for add_node()
|
|
178
|
+
"""
|
|
179
|
+
async def _get_context(state: Dict[str, Any]) -> Dict[str, Any]:
|
|
180
|
+
query = state.get(query_key, "")
|
|
181
|
+
if query:
|
|
182
|
+
context = await self.get_context(query)
|
|
183
|
+
return {"memory_context": context}
|
|
184
|
+
return {"memory_context": ""}
|
|
185
|
+
|
|
186
|
+
return _get_context
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
__all__ = ["MCALMemory", "MCALMemoryConfig"]
|
mcal_langgraph/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Marker file for PEP 561 typed package
|
mcal_langgraph/store.py
ADDED
|
@@ -0,0 +1,564 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCALStore: LangGraph BaseStore implementation with goal-aware search.
|
|
3
|
+
|
|
4
|
+
This is the main integration point for LangGraph users.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import threading
|
|
12
|
+
from datetime import datetime, timezone, timedelta
|
|
13
|
+
from typing import Any, Dict, Iterable, List, Literal, Tuple, TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
from mcal_langgraph._compat import (
|
|
16
|
+
LANGGRAPH_AVAILABLE,
|
|
17
|
+
check_langgraph,
|
|
18
|
+
BaseStore,
|
|
19
|
+
Item,
|
|
20
|
+
SearchItem,
|
|
21
|
+
GetOp,
|
|
22
|
+
PutOp,
|
|
23
|
+
SearchOp,
|
|
24
|
+
ListNamespacesOp,
|
|
25
|
+
NamespacePath,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
if TYPE_CHECKING:
|
|
29
|
+
from mcal import MCAL
|
|
30
|
+
|
|
31
|
+
# Type aliases
|
|
32
|
+
Op = Any
|
|
33
|
+
Result = Any
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _run_sync(coro):
|
|
37
|
+
"""
|
|
38
|
+
Run async coroutine synchronously, handling existing event loops.
|
|
39
|
+
|
|
40
|
+
Works in Lambda, Jupyter, FastAPI, and other async environments.
|
|
41
|
+
"""
|
|
42
|
+
try:
|
|
43
|
+
loop = asyncio.get_running_loop()
|
|
44
|
+
except RuntimeError:
|
|
45
|
+
# No running loop - safe to use asyncio.run()
|
|
46
|
+
return asyncio.run(coro)
|
|
47
|
+
else:
|
|
48
|
+
# Loop already running - run in thread pool
|
|
49
|
+
import concurrent.futures
|
|
50
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as pool:
|
|
51
|
+
future = pool.submit(asyncio.run, coro)
|
|
52
|
+
return future.result()
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class MCALStore(BaseStore if LANGGRAPH_AVAILABLE else object):
|
|
56
|
+
"""
|
|
57
|
+
LangGraph-compatible store backed by MCAL.
|
|
58
|
+
|
|
59
|
+
Implements LangGraph's BaseStore interface, providing MCAL's
|
|
60
|
+
goal-aware memory capabilities through the standard LangGraph API.
|
|
61
|
+
|
|
62
|
+
Key Advantages over generic stores:
|
|
63
|
+
- Goal-aware search (MCAL's unique value)
|
|
64
|
+
- Decision tracking in search results
|
|
65
|
+
- Intent preservation across conversations
|
|
66
|
+
|
|
67
|
+
Usage:
|
|
68
|
+
from mcal import MCAL
|
|
69
|
+
from mcal_langgraph import MCALStore
|
|
70
|
+
|
|
71
|
+
mcal = MCAL(goal="Build fraud detection system")
|
|
72
|
+
store = MCALStore(mcal)
|
|
73
|
+
|
|
74
|
+
# Use with LangGraph
|
|
75
|
+
from langgraph.prebuilt import create_react_agent
|
|
76
|
+
agent = create_react_agent(model, tools, store=store)
|
|
77
|
+
|
|
78
|
+
# Or use directly
|
|
79
|
+
await store.aput(("user_123",), "mem1", {"text": "Use PostgreSQL"})
|
|
80
|
+
results = await store.asearch(("user_123",), query="database choice")
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
def __init__(self, mcal: "MCAL"):
|
|
84
|
+
"""
|
|
85
|
+
Initialize MCALStore.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
mcal: Initialized MCAL instance
|
|
89
|
+
"""
|
|
90
|
+
check_langgraph()
|
|
91
|
+
self._mcal = mcal
|
|
92
|
+
self._lock = threading.RLock() # Thread safety for writes
|
|
93
|
+
|
|
94
|
+
# In-memory storage for items
|
|
95
|
+
self._items: Dict[Tuple[str, ...], Dict[str, Dict[str, Any]]] = {}
|
|
96
|
+
self._created_at: Dict[Tuple[str, ...], Dict[str, datetime]] = {}
|
|
97
|
+
self._updated_at: Dict[Tuple[str, ...], Dict[str, datetime]] = {}
|
|
98
|
+
|
|
99
|
+
# TTL support (Issue #057)
|
|
100
|
+
self._ttl: Dict[Tuple[str, ...], Dict[str, float]] = {} # TTL in minutes
|
|
101
|
+
self._expires_at: Dict[Tuple[str, ...], Dict[str, datetime]] = {} # Expiration times
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def mcal(self) -> "MCAL":
|
|
105
|
+
"""Get the underlying MCAL instance."""
|
|
106
|
+
return self._mcal
|
|
107
|
+
|
|
108
|
+
# ========== TTL Support (Issue #057) ==========
|
|
109
|
+
|
|
110
|
+
def _is_expired(self, namespace: Tuple[str, ...], key: str) -> bool:
|
|
111
|
+
"""Check if an item has expired based on TTL."""
|
|
112
|
+
if namespace not in self._expires_at:
|
|
113
|
+
return False
|
|
114
|
+
if key not in self._expires_at.get(namespace, {}):
|
|
115
|
+
return False
|
|
116
|
+
|
|
117
|
+
expires_at = self._expires_at[namespace][key]
|
|
118
|
+
return datetime.now(timezone.utc) > expires_at
|
|
119
|
+
|
|
120
|
+
def _refresh_ttl(self, namespace: Tuple[str, ...], key: str) -> None:
|
|
121
|
+
"""Refresh TTL by updating expiration time."""
|
|
122
|
+
if namespace in self._ttl and key in self._ttl.get(namespace, {}):
|
|
123
|
+
ttl_minutes = self._ttl[namespace][key]
|
|
124
|
+
self._expires_at[namespace][key] = datetime.now(timezone.utc) + timedelta(minutes=ttl_minutes)
|
|
125
|
+
|
|
126
|
+
def _cleanup_ttl(self, namespace: Tuple[str, ...], key: str) -> None:
|
|
127
|
+
"""Clean up TTL tracking for a deleted item."""
|
|
128
|
+
if namespace in self._ttl and key in self._ttl.get(namespace, {}):
|
|
129
|
+
del self._ttl[namespace][key]
|
|
130
|
+
if namespace in self._expires_at and key in self._expires_at.get(namespace, {}):
|
|
131
|
+
del self._expires_at[namespace][key]
|
|
132
|
+
|
|
133
|
+
# ========== Abstract Methods (Required by BaseStore) ==========
|
|
134
|
+
|
|
135
|
+
def batch(self, ops: Iterable[Op]) -> List[Result]:
|
|
136
|
+
"""Execute multiple operations synchronously in a single batch."""
|
|
137
|
+
return _run_sync(self.abatch(ops))
|
|
138
|
+
|
|
139
|
+
async def abatch(self, ops: Iterable[Op]) -> List[Result]:
|
|
140
|
+
"""Execute multiple operations asynchronously in a single batch."""
|
|
141
|
+
results = []
|
|
142
|
+
for op in ops:
|
|
143
|
+
if isinstance(op, GetOp):
|
|
144
|
+
result = await self.aget(op.namespace, op.key)
|
|
145
|
+
results.append(result)
|
|
146
|
+
elif isinstance(op, PutOp):
|
|
147
|
+
await self.aput(op.namespace, op.key, op.value)
|
|
148
|
+
results.append(None)
|
|
149
|
+
elif isinstance(op, SearchOp):
|
|
150
|
+
result = await self.asearch(
|
|
151
|
+
op.namespace_prefix,
|
|
152
|
+
query=op.query,
|
|
153
|
+
filter=op.filter,
|
|
154
|
+
limit=op.limit,
|
|
155
|
+
offset=op.offset,
|
|
156
|
+
)
|
|
157
|
+
results.append(result)
|
|
158
|
+
elif isinstance(op, ListNamespacesOp):
|
|
159
|
+
prefix = None
|
|
160
|
+
suffix = None
|
|
161
|
+
if op.match_conditions:
|
|
162
|
+
for cond in op.match_conditions:
|
|
163
|
+
if cond.match_type == "prefix":
|
|
164
|
+
prefix = cond.path
|
|
165
|
+
elif cond.match_type == "suffix":
|
|
166
|
+
suffix = cond.path
|
|
167
|
+
result = await self.alist_namespaces(
|
|
168
|
+
prefix=prefix,
|
|
169
|
+
suffix=suffix,
|
|
170
|
+
max_depth=op.max_depth,
|
|
171
|
+
limit=op.limit,
|
|
172
|
+
offset=op.offset,
|
|
173
|
+
)
|
|
174
|
+
results.append(result)
|
|
175
|
+
else:
|
|
176
|
+
results.append(None)
|
|
177
|
+
return results
|
|
178
|
+
|
|
179
|
+
# ========== Get Operations ==========
|
|
180
|
+
|
|
181
|
+
def get(
|
|
182
|
+
self,
|
|
183
|
+
namespace: Tuple[str, ...],
|
|
184
|
+
key: str,
|
|
185
|
+
*,
|
|
186
|
+
refresh_ttl: bool | None = None,
|
|
187
|
+
) -> Item | None:
|
|
188
|
+
"""Retrieve a single item synchronously."""
|
|
189
|
+
return _run_sync(self.aget(namespace, key, refresh_ttl=refresh_ttl))
|
|
190
|
+
|
|
191
|
+
async def aget(
|
|
192
|
+
self,
|
|
193
|
+
namespace: Tuple[str, ...],
|
|
194
|
+
key: str,
|
|
195
|
+
*,
|
|
196
|
+
refresh_ttl: bool | None = None,
|
|
197
|
+
) -> Item | None:
|
|
198
|
+
"""
|
|
199
|
+
Retrieve a single item asynchronously.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
namespace: The namespace tuple for the item
|
|
203
|
+
key: The unique key within the namespace
|
|
204
|
+
refresh_ttl: If True, refresh the TTL on access. Default: True for items with TTL.
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
Item if found and not expired, None otherwise
|
|
208
|
+
"""
|
|
209
|
+
if namespace not in self._items:
|
|
210
|
+
return None
|
|
211
|
+
if key not in self._items[namespace]:
|
|
212
|
+
return None
|
|
213
|
+
|
|
214
|
+
# Check TTL expiration (lazy expiration)
|
|
215
|
+
if self._is_expired(namespace, key):
|
|
216
|
+
# Item has expired - delete it
|
|
217
|
+
with self._lock:
|
|
218
|
+
if namespace in self._items and key in self._items[namespace]:
|
|
219
|
+
del self._items[namespace][key]
|
|
220
|
+
self._cleanup_ttl(namespace, key)
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
# Refresh TTL on access if requested (default: True for items with TTL)
|
|
224
|
+
if refresh_ttl is not False:
|
|
225
|
+
with self._lock:
|
|
226
|
+
self._refresh_ttl(namespace, key)
|
|
227
|
+
|
|
228
|
+
value = self._items[namespace][key]
|
|
229
|
+
created = self._created_at.get(namespace, {}).get(key, datetime.now(timezone.utc))
|
|
230
|
+
updated = self._updated_at.get(namespace, {}).get(key, datetime.now(timezone.utc))
|
|
231
|
+
|
|
232
|
+
return Item(
|
|
233
|
+
namespace=namespace,
|
|
234
|
+
key=key,
|
|
235
|
+
value=value,
|
|
236
|
+
created_at=created,
|
|
237
|
+
updated_at=updated,
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
# ========== Put Operations ==========
|
|
241
|
+
|
|
242
|
+
def put(
|
|
243
|
+
self,
|
|
244
|
+
namespace: Tuple[str, ...],
|
|
245
|
+
key: str,
|
|
246
|
+
value: Dict[str, Any],
|
|
247
|
+
index: Literal[False] | List[str] | None = None,
|
|
248
|
+
*,
|
|
249
|
+
ttl: float | None = None,
|
|
250
|
+
) -> None:
|
|
251
|
+
"""Store or update an item synchronously."""
|
|
252
|
+
_run_sync(self.aput(namespace, key, value, index, ttl=ttl))
|
|
253
|
+
|
|
254
|
+
async def aput(
|
|
255
|
+
self,
|
|
256
|
+
namespace: Tuple[str, ...],
|
|
257
|
+
key: str,
|
|
258
|
+
value: Dict[str, Any],
|
|
259
|
+
index: Literal[False] | List[str] | None = None,
|
|
260
|
+
*,
|
|
261
|
+
ttl: float | None = None,
|
|
262
|
+
) -> None:
|
|
263
|
+
"""
|
|
264
|
+
Store or update an item asynchronously.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
namespace: The namespace tuple for the item
|
|
268
|
+
key: The unique key within the namespace
|
|
269
|
+
value: The value to store (must be a dict)
|
|
270
|
+
index: Fields to index for search. False to skip indexing.
|
|
271
|
+
ttl: Time-to-live in minutes. None for no expiration.
|
|
272
|
+
"""
|
|
273
|
+
now = datetime.now(timezone.utc)
|
|
274
|
+
|
|
275
|
+
# Thread-safe write
|
|
276
|
+
with self._lock:
|
|
277
|
+
if namespace not in self._items:
|
|
278
|
+
self._items[namespace] = {}
|
|
279
|
+
self._created_at[namespace] = {}
|
|
280
|
+
self._updated_at[namespace] = {}
|
|
281
|
+
|
|
282
|
+
if key not in self._items[namespace]:
|
|
283
|
+
self._created_at[namespace][key] = now
|
|
284
|
+
|
|
285
|
+
self._items[namespace][key] = value
|
|
286
|
+
self._updated_at[namespace][key] = now
|
|
287
|
+
|
|
288
|
+
# Store TTL if provided (Issue #057)
|
|
289
|
+
if ttl is not None:
|
|
290
|
+
if namespace not in self._ttl:
|
|
291
|
+
self._ttl[namespace] = {}
|
|
292
|
+
self._expires_at[namespace] = {}
|
|
293
|
+
self._ttl[namespace][key] = ttl
|
|
294
|
+
self._expires_at[namespace][key] = now + timedelta(minutes=ttl)
|
|
295
|
+
|
|
296
|
+
# Process through MCAL for goal-aware retrieval (outside lock)
|
|
297
|
+
user_id = namespace[0] if namespace else "default"
|
|
298
|
+
text = value.get("text", value.get("content", json.dumps(value)))
|
|
299
|
+
|
|
300
|
+
if text and index is not False:
|
|
301
|
+
try:
|
|
302
|
+
await self._mcal.add(
|
|
303
|
+
messages=[{"role": "user", "content": text}],
|
|
304
|
+
user_id=user_id,
|
|
305
|
+
metadata={"key": key, "namespace": "/".join(namespace)},
|
|
306
|
+
)
|
|
307
|
+
except Exception:
|
|
308
|
+
pass # Don't fail if MCAL processing fails
|
|
309
|
+
|
|
310
|
+
# ========== Delete Operations ==========
|
|
311
|
+
|
|
312
|
+
def delete(self, namespace: Tuple[str, ...], key: str) -> None:
|
|
313
|
+
"""Delete an item synchronously."""
|
|
314
|
+
_run_sync(self.adelete(namespace, key))
|
|
315
|
+
|
|
316
|
+
async def adelete(self, namespace: Tuple[str, ...], key: str) -> None:
|
|
317
|
+
"""Delete an item asynchronously."""
|
|
318
|
+
with self._lock:
|
|
319
|
+
if namespace in self._items and key in self._items[namespace]:
|
|
320
|
+
del self._items[namespace][key]
|
|
321
|
+
if namespace in self._created_at and key in self._created_at[namespace]:
|
|
322
|
+
del self._created_at[namespace][key]
|
|
323
|
+
if namespace in self._updated_at and key in self._updated_at[namespace]:
|
|
324
|
+
del self._updated_at[namespace][key]
|
|
325
|
+
# Clean up TTL tracking
|
|
326
|
+
self._cleanup_ttl(namespace, key)
|
|
327
|
+
|
|
328
|
+
# ========== Search Operations ==========
|
|
329
|
+
|
|
330
|
+
def search(
|
|
331
|
+
self,
|
|
332
|
+
namespace_prefix: Tuple[str, ...],
|
|
333
|
+
/,
|
|
334
|
+
*,
|
|
335
|
+
query: str | None = None,
|
|
336
|
+
filter: Dict[str, Any] | None = None,
|
|
337
|
+
limit: int = 10,
|
|
338
|
+
offset: int = 0,
|
|
339
|
+
refresh_ttl: bool | None = None,
|
|
340
|
+
) -> List[SearchItem]:
|
|
341
|
+
"""Search for items synchronously."""
|
|
342
|
+
return _run_sync(
|
|
343
|
+
self.asearch(
|
|
344
|
+
namespace_prefix,
|
|
345
|
+
query=query,
|
|
346
|
+
filter=filter,
|
|
347
|
+
limit=limit,
|
|
348
|
+
offset=offset,
|
|
349
|
+
refresh_ttl=refresh_ttl,
|
|
350
|
+
)
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
async def asearch(
|
|
354
|
+
self,
|
|
355
|
+
namespace_prefix: Tuple[str, ...],
|
|
356
|
+
/,
|
|
357
|
+
*,
|
|
358
|
+
query: str | None = None,
|
|
359
|
+
filter: Dict[str, Any] | None = None,
|
|
360
|
+
limit: int = 10,
|
|
361
|
+
offset: int = 0,
|
|
362
|
+
refresh_ttl: bool | None = None,
|
|
363
|
+
) -> List[SearchItem]:
|
|
364
|
+
"""
|
|
365
|
+
Search for items asynchronously.
|
|
366
|
+
|
|
367
|
+
MCAL Enhancement: Uses goal-aware retrieval when query is provided,
|
|
368
|
+
enriching results with goals and decisions.
|
|
369
|
+
"""
|
|
370
|
+
results = []
|
|
371
|
+
|
|
372
|
+
# If query provided, use MCAL's goal-aware search
|
|
373
|
+
if query:
|
|
374
|
+
user_id = namespace_prefix[0] if namespace_prefix else "default"
|
|
375
|
+
try:
|
|
376
|
+
mcal_results = await self._mcal.search(
|
|
377
|
+
query=query,
|
|
378
|
+
user_id=user_id,
|
|
379
|
+
top_k=limit,
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
for i, result in enumerate(mcal_results.results if mcal_results else []):
|
|
383
|
+
if i < offset:
|
|
384
|
+
continue
|
|
385
|
+
if len(results) >= limit:
|
|
386
|
+
break
|
|
387
|
+
|
|
388
|
+
now = datetime.now(timezone.utc)
|
|
389
|
+
results.append(SearchItem(
|
|
390
|
+
namespace=namespace_prefix,
|
|
391
|
+
key=f"mcal_{i}",
|
|
392
|
+
value={
|
|
393
|
+
"text": result.get("content", result.get("memory", "")),
|
|
394
|
+
"goals": result.get("goals", []),
|
|
395
|
+
"decisions": result.get("decisions", []),
|
|
396
|
+
"score": result.get("score", 0.0),
|
|
397
|
+
"metadata": result.get("metadata", {}),
|
|
398
|
+
},
|
|
399
|
+
created_at=now,
|
|
400
|
+
updated_at=now,
|
|
401
|
+
score=result.get("score", 0.0),
|
|
402
|
+
))
|
|
403
|
+
except Exception:
|
|
404
|
+
pass
|
|
405
|
+
|
|
406
|
+
# Also search in-memory items
|
|
407
|
+
for ns, items in list(self._items.items()):
|
|
408
|
+
if not self._namespace_matches_prefix(ns, namespace_prefix):
|
|
409
|
+
continue
|
|
410
|
+
|
|
411
|
+
for key, value in list(items.items()):
|
|
412
|
+
# Skip expired items (lazy expiration)
|
|
413
|
+
if self._is_expired(ns, key):
|
|
414
|
+
with self._lock:
|
|
415
|
+
if ns in self._items and key in self._items[ns]:
|
|
416
|
+
del self._items[ns][key]
|
|
417
|
+
self._cleanup_ttl(ns, key)
|
|
418
|
+
continue
|
|
419
|
+
|
|
420
|
+
if filter and not self._matches_filter(value, filter):
|
|
421
|
+
continue
|
|
422
|
+
|
|
423
|
+
if query and not self._text_matches_query(value, query):
|
|
424
|
+
continue
|
|
425
|
+
|
|
426
|
+
# Refresh TTL on search access if requested
|
|
427
|
+
if refresh_ttl is not False:
|
|
428
|
+
with self._lock:
|
|
429
|
+
self._refresh_ttl(ns, key)
|
|
430
|
+
|
|
431
|
+
created = self._created_at.get(ns, {}).get(key, datetime.now(timezone.utc))
|
|
432
|
+
updated = self._updated_at.get(ns, {}).get(key, datetime.now(timezone.utc))
|
|
433
|
+
|
|
434
|
+
results.append(SearchItem(
|
|
435
|
+
namespace=ns,
|
|
436
|
+
key=key,
|
|
437
|
+
value=value,
|
|
438
|
+
created_at=created,
|
|
439
|
+
updated_at=updated,
|
|
440
|
+
score=1.0 if query else None,
|
|
441
|
+
))
|
|
442
|
+
|
|
443
|
+
return results[offset:offset + limit]
|
|
444
|
+
|
|
445
|
+
# ========== List Namespaces ==========
|
|
446
|
+
|
|
447
|
+
def list_namespaces(
|
|
448
|
+
self,
|
|
449
|
+
*,
|
|
450
|
+
prefix: NamespacePath | None = None,
|
|
451
|
+
suffix: NamespacePath | None = None,
|
|
452
|
+
max_depth: int | None = None,
|
|
453
|
+
limit: int = 100,
|
|
454
|
+
offset: int = 0,
|
|
455
|
+
) -> List[Tuple[str, ...]]:
|
|
456
|
+
"""List namespaces synchronously."""
|
|
457
|
+
return _run_sync(
|
|
458
|
+
self.alist_namespaces(
|
|
459
|
+
prefix=prefix,
|
|
460
|
+
suffix=suffix,
|
|
461
|
+
max_depth=max_depth,
|
|
462
|
+
limit=limit,
|
|
463
|
+
offset=offset,
|
|
464
|
+
)
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
async def alist_namespaces(
|
|
468
|
+
self,
|
|
469
|
+
*,
|
|
470
|
+
prefix: NamespacePath | None = None,
|
|
471
|
+
suffix: NamespacePath | None = None,
|
|
472
|
+
max_depth: int | None = None,
|
|
473
|
+
limit: int = 100,
|
|
474
|
+
offset: int = 0,
|
|
475
|
+
) -> List[Tuple[str, ...]]:
|
|
476
|
+
"""List namespaces asynchronously."""
|
|
477
|
+
namespaces = set()
|
|
478
|
+
|
|
479
|
+
for ns in self._items.keys():
|
|
480
|
+
if prefix and not self._namespace_matches_prefix(ns, prefix):
|
|
481
|
+
continue
|
|
482
|
+
|
|
483
|
+
if suffix and not self._namespace_matches_suffix(ns, suffix):
|
|
484
|
+
continue
|
|
485
|
+
|
|
486
|
+
if max_depth and len(ns) > max_depth:
|
|
487
|
+
ns = ns[:max_depth]
|
|
488
|
+
|
|
489
|
+
namespaces.add(ns)
|
|
490
|
+
|
|
491
|
+
sorted_ns = sorted(namespaces)
|
|
492
|
+
return sorted_ns[offset:offset + limit]
|
|
493
|
+
|
|
494
|
+
# ========== Helper Methods ==========
|
|
495
|
+
|
|
496
|
+
def _namespace_matches_prefix(
|
|
497
|
+
self,
|
|
498
|
+
namespace: Tuple[str, ...],
|
|
499
|
+
prefix: Tuple[str, ...],
|
|
500
|
+
) -> bool:
|
|
501
|
+
"""Check if namespace starts with prefix."""
|
|
502
|
+
if not prefix:
|
|
503
|
+
return True
|
|
504
|
+
if len(namespace) < len(prefix):
|
|
505
|
+
return False
|
|
506
|
+
return namespace[:len(prefix)] == prefix
|
|
507
|
+
|
|
508
|
+
def _namespace_matches_suffix(
|
|
509
|
+
self,
|
|
510
|
+
namespace: Tuple[str, ...],
|
|
511
|
+
suffix: Tuple[str, ...],
|
|
512
|
+
) -> bool:
|
|
513
|
+
"""Check if namespace ends with suffix."""
|
|
514
|
+
if not suffix:
|
|
515
|
+
return True
|
|
516
|
+
if len(namespace) < len(suffix):
|
|
517
|
+
return False
|
|
518
|
+
return namespace[-len(suffix):] == suffix
|
|
519
|
+
|
|
520
|
+
def _matches_filter(
|
|
521
|
+
self,
|
|
522
|
+
value: Dict[str, Any],
|
|
523
|
+
filter: Dict[str, Any],
|
|
524
|
+
) -> bool:
|
|
525
|
+
"""Check if value matches filter criteria."""
|
|
526
|
+
for key, expected in filter.items():
|
|
527
|
+
if key not in value:
|
|
528
|
+
return False
|
|
529
|
+
actual = value[key]
|
|
530
|
+
|
|
531
|
+
if isinstance(expected, dict):
|
|
532
|
+
for op, val in expected.items():
|
|
533
|
+
if op == "$eq" and actual != val:
|
|
534
|
+
return False
|
|
535
|
+
elif op == "$ne" and actual == val:
|
|
536
|
+
return False
|
|
537
|
+
elif op == "$gt" and not (actual > val):
|
|
538
|
+
return False
|
|
539
|
+
elif op == "$gte" and not (actual >= val):
|
|
540
|
+
return False
|
|
541
|
+
elif op == "$lt" and not (actual < val):
|
|
542
|
+
return False
|
|
543
|
+
elif op == "$lte" and not (actual <= val):
|
|
544
|
+
return False
|
|
545
|
+
else:
|
|
546
|
+
if actual != expected:
|
|
547
|
+
return False
|
|
548
|
+
return True
|
|
549
|
+
|
|
550
|
+
def _text_matches_query(
|
|
551
|
+
self,
|
|
552
|
+
value: Dict[str, Any],
|
|
553
|
+
query: str,
|
|
554
|
+
) -> bool:
|
|
555
|
+
"""Simple text matching for query."""
|
|
556
|
+
query_lower = query.lower()
|
|
557
|
+
for field in ["text", "content", "memory", "description"]:
|
|
558
|
+
if field in value:
|
|
559
|
+
if query_lower in str(value[field]).lower():
|
|
560
|
+
return True
|
|
561
|
+
return False
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
__all__ = ["MCALStore"]
|