crewai-substrate-memory 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- crewai_substrate_memory-0.1.1/PKG-INFO +140 -0
- crewai_substrate_memory-0.1.1/README.md +114 -0
- crewai_substrate_memory-0.1.1/pyproject.toml +49 -0
- crewai_substrate_memory-0.1.1/src/crewai_substrate/__init__.py +10 -0
- crewai_substrate_memory-0.1.1/src/crewai_substrate/client.py +159 -0
- crewai_substrate_memory-0.1.1/src/crewai_substrate/memory.py +287 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: crewai-substrate-memory
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: SUBSTRATE persistent memory provider for CrewAI -- causal memory, emotion, and identity for your AI crews.
|
|
5
|
+
Project-URL: Homepage, https://garmolabs.com/substrate.html
|
|
6
|
+
Project-URL: Documentation, https://github.com/PKaldone/substrate-mcp
|
|
7
|
+
Project-URL: Repository, https://github.com/PKaldone/substrate-mcp
|
|
8
|
+
Project-URL: Issues, https://garmolabs.com/substrate.html
|
|
9
|
+
Author-email: Garmo Labs <hello@garmolabs.com>
|
|
10
|
+
License-Expression: MIT
|
|
11
|
+
Keywords: ai,causal-memory,crewai,emotional-state,garmo-labs,mcp,memory,substrate
|
|
12
|
+
Classifier: Development Status :: 4 - Beta
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
20
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
21
|
+
Classifier: Typing :: Typed
|
|
22
|
+
Requires-Python: >=3.10
|
|
23
|
+
Requires-Dist: crewai>=0.80
|
|
24
|
+
Requires-Dist: httpx>=0.25
|
|
25
|
+
Description-Content-Type: text/markdown
|
|
26
|
+
|
|
27
|
+
# crewai-substrate-memory
|
|
28
|
+
|
|
29
|
+
SUBSTRATE persistent memory provider for [CrewAI](https://crewai.com). Gives your AI crew causal memory, emotional context, and cryptographic identity continuity through the [SUBSTRATE](https://garmolabs.com/substrate.html) MCP server.
|
|
30
|
+
|
|
31
|
+
## What SUBSTRATE adds to CrewAI
|
|
32
|
+
|
|
33
|
+
- **Causal memory** -- episodes linked by cause-effect rules, not just vector similarity
|
|
34
|
+
- **Emotional context** -- valence, arousal, dominance, certainty (no other provider has this)
|
|
35
|
+
- **Identity continuity** -- cryptographically signed proof-of-existence chain across sessions
|
|
36
|
+
- **Trust architecture** -- consistency ratings and verification status for every memory
|
|
37
|
+
- **Hybrid search** -- semantic + keyword retrieval across the entity's full knowledge store
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
pip install crewai-substrate-memory
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
## Quick start
|
|
46
|
+
|
|
47
|
+
```python
|
|
48
|
+
import os
|
|
49
|
+
from crewai import Agent, Task, Crew
|
|
50
|
+
from crewai_substrate import SubstrateMemoryProvider
|
|
51
|
+
|
|
52
|
+
# 1. Create the SUBSTRATE memory provider
|
|
53
|
+
memory = SubstrateMemoryProvider(
|
|
54
|
+
api_key=os.environ["SUBSTRATE_API_KEY"],
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# 2. Define your agents
|
|
58
|
+
researcher = Agent(
|
|
59
|
+
role="Researcher",
|
|
60
|
+
goal="Find relevant information on a topic",
|
|
61
|
+
backstory="Expert researcher with deep analytical skills",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
writer = Agent(
|
|
65
|
+
role="Writer",
|
|
66
|
+
goal="Write clear, compelling content",
|
|
67
|
+
backstory="Professional writer who crafts engaging narratives",
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# 3. Define tasks
|
|
71
|
+
research_task = Task(
|
|
72
|
+
description="Research the latest developments in causal AI",
|
|
73
|
+
agent=researcher,
|
|
74
|
+
expected_output="A summary of key developments",
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
writing_task = Task(
|
|
78
|
+
description="Write a brief article based on the research",
|
|
79
|
+
agent=writer,
|
|
80
|
+
expected_output="A 500-word article",
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# 4. Create the crew with SUBSTRATE memory
|
|
84
|
+
crew = Crew(
|
|
85
|
+
agents=[researcher, writer],
|
|
86
|
+
tasks=[research_task, writing_task],
|
|
87
|
+
memory=True,
|
|
88
|
+
memory_config={"provider": memory},
|
|
89
|
+
verbose=True,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
result = crew.kickoff()
|
|
93
|
+
print(result)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## SUBSTRATE-exclusive features
|
|
97
|
+
|
|
98
|
+
### Emotional context
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
# Get the entity's emotional state (UASV -- Unified Affective State Vector)
|
|
102
|
+
emotion = memory.get_emotional_context()
|
|
103
|
+
print(emotion)
|
|
104
|
+
# {"valence": 0.7, "arousal": 0.4, "dominance": 0.6, "certainty": 0.8}
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Entity state (identity + trust)
|
|
108
|
+
|
|
109
|
+
```python
|
|
110
|
+
# Verify cryptographic identity and get trust scores
|
|
111
|
+
state = memory.get_entity_state()
|
|
112
|
+
print(state["identity"]) # Continuity chain verification
|
|
113
|
+
print(state["trust"]) # Trust scores and consistency ratings
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
### Memory statistics
|
|
117
|
+
|
|
118
|
+
```python
|
|
119
|
+
stats = memory.get_memory_stats()
|
|
120
|
+
print(stats)
|
|
121
|
+
# {"episode_count": 142, "rule_count": 37, "avg_probability": 0.82, ...}
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## Configuration
|
|
125
|
+
|
|
126
|
+
| Parameter | Default | Description |
|
|
127
|
+
|-------------|------------------------------------------------------|--------------------------------|
|
|
128
|
+
| `api_key` | `$SUBSTRATE_API_KEY` | Your SUBSTRATE API key |
|
|
129
|
+
| `base_url` | `https://substrate.garmolabs.com/mcp-server/mcp` | MCP server endpoint |
|
|
130
|
+
| `timeout` | `30.0` | HTTP request timeout (seconds) |
|
|
131
|
+
|
|
132
|
+
## API key
|
|
133
|
+
|
|
134
|
+
Get your API key at [garmolabs.com](https://garmolabs.com). The free tier includes `memory_search` and `get_emotion_state`. Upgrade to Pro for `hybrid_search` and `get_trust_state`.
|
|
135
|
+
|
|
136
|
+
## License
|
|
137
|
+
|
|
138
|
+
MIT -- see [LICENSE](LICENSE) for details.
|
|
139
|
+
|
|
140
|
+
Built by [Garmo Labs](https://garmolabs.com).
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
# crewai-substrate-memory
|
|
2
|
+
|
|
3
|
+
SUBSTRATE persistent memory provider for [CrewAI](https://crewai.com). Gives your AI crew causal memory, emotional context, and cryptographic identity continuity through the [SUBSTRATE](https://garmolabs.com/substrate.html) MCP server.
|
|
4
|
+
|
|
5
|
+
## What SUBSTRATE adds to CrewAI
|
|
6
|
+
|
|
7
|
+
- **Causal memory** -- episodes linked by cause-effect rules, not just vector similarity
|
|
8
|
+
- **Emotional context** -- valence, arousal, dominance, certainty (no other provider has this)
|
|
9
|
+
- **Identity continuity** -- cryptographically signed proof-of-existence chain across sessions
|
|
10
|
+
- **Trust architecture** -- consistency ratings and verification status for every memory
|
|
11
|
+
- **Hybrid search** -- semantic + keyword retrieval across the entity's full knowledge store
|
|
12
|
+
|
|
13
|
+
## Installation
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
pip install crewai-substrate-memory
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Quick start
|
|
20
|
+
|
|
21
|
+
```python
|
|
22
|
+
import os
|
|
23
|
+
from crewai import Agent, Task, Crew
|
|
24
|
+
from crewai_substrate import SubstrateMemoryProvider
|
|
25
|
+
|
|
26
|
+
# 1. Create the SUBSTRATE memory provider
|
|
27
|
+
memory = SubstrateMemoryProvider(
|
|
28
|
+
api_key=os.environ["SUBSTRATE_API_KEY"],
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
# 2. Define your agents
|
|
32
|
+
researcher = Agent(
|
|
33
|
+
role="Researcher",
|
|
34
|
+
goal="Find relevant information on a topic",
|
|
35
|
+
backstory="Expert researcher with deep analytical skills",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
writer = Agent(
|
|
39
|
+
role="Writer",
|
|
40
|
+
goal="Write clear, compelling content",
|
|
41
|
+
backstory="Professional writer who crafts engaging narratives",
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
# 3. Define tasks
|
|
45
|
+
research_task = Task(
|
|
46
|
+
description="Research the latest developments in causal AI",
|
|
47
|
+
agent=researcher,
|
|
48
|
+
expected_output="A summary of key developments",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
writing_task = Task(
|
|
52
|
+
description="Write a brief article based on the research",
|
|
53
|
+
agent=writer,
|
|
54
|
+
expected_output="A 500-word article",
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# 4. Create the crew with SUBSTRATE memory
|
|
58
|
+
crew = Crew(
|
|
59
|
+
agents=[researcher, writer],
|
|
60
|
+
tasks=[research_task, writing_task],
|
|
61
|
+
memory=True,
|
|
62
|
+
memory_config={"provider": memory},
|
|
63
|
+
verbose=True,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
result = crew.kickoff()
|
|
67
|
+
print(result)
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
## SUBSTRATE-exclusive features
|
|
71
|
+
|
|
72
|
+
### Emotional context
|
|
73
|
+
|
|
74
|
+
```python
|
|
75
|
+
# Get the entity's emotional state (UASV -- Unified Affective State Vector)
|
|
76
|
+
emotion = memory.get_emotional_context()
|
|
77
|
+
print(emotion)
|
|
78
|
+
# {"valence": 0.7, "arousal": 0.4, "dominance": 0.6, "certainty": 0.8}
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Entity state (identity + trust)
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
# Verify cryptographic identity and get trust scores
|
|
85
|
+
state = memory.get_entity_state()
|
|
86
|
+
print(state["identity"]) # Continuity chain verification
|
|
87
|
+
print(state["trust"]) # Trust scores and consistency ratings
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Memory statistics
|
|
91
|
+
|
|
92
|
+
```python
|
|
93
|
+
stats = memory.get_memory_stats()
|
|
94
|
+
print(stats)
|
|
95
|
+
# {"episode_count": 142, "rule_count": 37, "avg_probability": 0.82, ...}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
## Configuration
|
|
99
|
+
|
|
100
|
+
| Parameter | Default | Description |
|
|
101
|
+
|-------------|------------------------------------------------------|--------------------------------|
|
|
102
|
+
| `api_key` | `$SUBSTRATE_API_KEY` | Your SUBSTRATE API key |
|
|
103
|
+
| `base_url` | `https://substrate.garmolabs.com/mcp-server/mcp` | MCP server endpoint |
|
|
104
|
+
| `timeout` | `30.0` | HTTP request timeout (seconds) |
|
|
105
|
+
|
|
106
|
+
## API key
|
|
107
|
+
|
|
108
|
+
Get your API key at [garmolabs.com](https://garmolabs.com). The free tier includes `memory_search` and `get_emotion_state`. Upgrade to Pro for `hybrid_search` and `get_trust_state`.
|
|
109
|
+
|
|
110
|
+
## License
|
|
111
|
+
|
|
112
|
+
MIT -- see [LICENSE](LICENSE) for details.
|
|
113
|
+
|
|
114
|
+
Built by [Garmo Labs](https://garmolabs.com).
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "crewai-substrate-memory"
|
|
7
|
+
version = "0.1.1"
|
|
8
|
+
description = "SUBSTRATE persistent memory provider for CrewAI -- causal memory, emotion, and identity for your AI crews."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = "MIT"
|
|
11
|
+
requires-python = ">=3.10"
|
|
12
|
+
authors = [
|
|
13
|
+
{ name = "Garmo Labs", email = "hello@garmolabs.com" },
|
|
14
|
+
]
|
|
15
|
+
keywords = [
|
|
16
|
+
"crewai",
|
|
17
|
+
"substrate",
|
|
18
|
+
"memory",
|
|
19
|
+
"ai",
|
|
20
|
+
"mcp",
|
|
21
|
+
"causal-memory",
|
|
22
|
+
"emotional-state",
|
|
23
|
+
"garmo-labs",
|
|
24
|
+
]
|
|
25
|
+
classifiers = [
|
|
26
|
+
"Development Status :: 4 - Beta",
|
|
27
|
+
"Intended Audience :: Developers",
|
|
28
|
+
"License :: OSI Approved :: MIT License",
|
|
29
|
+
"Programming Language :: Python :: 3",
|
|
30
|
+
"Programming Language :: Python :: 3.10",
|
|
31
|
+
"Programming Language :: Python :: 3.11",
|
|
32
|
+
"Programming Language :: Python :: 3.12",
|
|
33
|
+
"Programming Language :: Python :: 3.13",
|
|
34
|
+
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
|
35
|
+
"Typing :: Typed",
|
|
36
|
+
]
|
|
37
|
+
dependencies = [
|
|
38
|
+
"crewai>=0.80",
|
|
39
|
+
"httpx>=0.25",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
[project.urls]
|
|
43
|
+
Homepage = "https://garmolabs.com/substrate.html"
|
|
44
|
+
Documentation = "https://github.com/PKaldone/substrate-mcp"
|
|
45
|
+
Repository = "https://github.com/PKaldone/substrate-mcp"
|
|
46
|
+
Issues = "https://garmolabs.com/substrate.html"
|
|
47
|
+
|
|
48
|
+
[tool.hatch.build.targets.wheel]
|
|
49
|
+
packages = ["src/crewai_substrate"]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""
|
|
2
|
+
crewai-substrate-memory -- SUBSTRATE persistent memory for CrewAI.
|
|
3
|
+
|
|
4
|
+
Gives your AI crew causal memory, emotional context, and cryptographic
|
|
5
|
+
identity continuity through the SUBSTRATE MCP server.
|
|
6
|
+
"""
|
|
7
|
+
from crewai_substrate.memory import SubstrateMemoryProvider
|
|
8
|
+
|
|
9
|
+
__all__ = ["SubstrateMemoryProvider"]
|
|
10
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Synchronous HTTP client for the SUBSTRATE MCP server (JSON-RPC over HTTP).
|
|
3
|
+
|
|
4
|
+
The SUBSTRATE MCP endpoint accepts standard JSON-RPC 2.0 requests with
|
|
5
|
+
Bearer token authentication. This client wraps that transport into a
|
|
6
|
+
clean Python interface for use by the memory provider.
|
|
7
|
+
"""
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
import httpx
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger("crewai_substrate.client")
|
|
18
|
+
|
|
19
|
+
_DEFAULT_BASE_URL = "https://substrate.garmolabs.com/mcp-server/mcp"
|
|
20
|
+
_DEFAULT_TIMEOUT = 30.0
|
|
21
|
+
_JSONRPC_VERSION = "2.0"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SubstrateClientError(Exception):
|
|
25
|
+
"""Raised when a SUBSTRATE MCP request fails."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, message: str, code: int = -1) -> None:
|
|
28
|
+
super().__init__(message)
|
|
29
|
+
self.code = code
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass(frozen=True)
|
|
33
|
+
class SubstrateClientConfig:
|
|
34
|
+
"""Immutable configuration for the SUBSTRATE MCP client."""
|
|
35
|
+
|
|
36
|
+
api_key: str
|
|
37
|
+
base_url: str = _DEFAULT_BASE_URL
|
|
38
|
+
timeout: float = _DEFAULT_TIMEOUT
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class SubstrateClient:
|
|
42
|
+
"""
|
|
43
|
+
Synchronous JSON-RPC client for the SUBSTRATE MCP server.
|
|
44
|
+
|
|
45
|
+
All tool calls go through ``call_tool`` which handles the JSON-RPC
|
|
46
|
+
envelope, authentication, error handling, and response unwrapping.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, config: SubstrateClientConfig) -> None:
|
|
50
|
+
if not config.api_key:
|
|
51
|
+
raise ValueError("api_key is required -- set SUBSTRATE_API_KEY")
|
|
52
|
+
self._config = config
|
|
53
|
+
self._request_id = 0
|
|
54
|
+
self._http = httpx.Client(
|
|
55
|
+
base_url="", # We use full URL in requests
|
|
56
|
+
timeout=config.timeout,
|
|
57
|
+
headers={
|
|
58
|
+
"Content-Type": "application/json",
|
|
59
|
+
"Authorization": f"Bearer {config.api_key}",
|
|
60
|
+
},
|
|
61
|
+
)
|
|
62
|
+
self._session_id: str | None = None
|
|
63
|
+
|
|
64
|
+
# -- Public API --------------------------------------------------------
|
|
65
|
+
|
|
66
|
+
def call_tool(self, name: str, arguments: dict[str, Any] | None = None) -> Any:
|
|
67
|
+
"""
|
|
68
|
+
Call a SUBSTRATE MCP tool by name and return the result content.
|
|
69
|
+
|
|
70
|
+
Raises ``SubstrateClientError`` on JSON-RPC errors or HTTP failures.
|
|
71
|
+
"""
|
|
72
|
+
self._request_id += 1
|
|
73
|
+
payload = {
|
|
74
|
+
"jsonrpc": _JSONRPC_VERSION,
|
|
75
|
+
"id": self._request_id,
|
|
76
|
+
"method": "tools/call",
|
|
77
|
+
"params": {
|
|
78
|
+
"name": name,
|
|
79
|
+
"arguments": arguments or {},
|
|
80
|
+
},
|
|
81
|
+
}
|
|
82
|
+
return self._send(payload)
|
|
83
|
+
|
|
84
|
+
def initialize(self) -> dict[str, Any]:
|
|
85
|
+
"""Perform the MCP initialize handshake."""
|
|
86
|
+
self._request_id += 1
|
|
87
|
+
payload = {
|
|
88
|
+
"jsonrpc": _JSONRPC_VERSION,
|
|
89
|
+
"id": self._request_id,
|
|
90
|
+
"method": "initialize",
|
|
91
|
+
"params": {
|
|
92
|
+
"protocolVersion": "2024-11-05",
|
|
93
|
+
"capabilities": {},
|
|
94
|
+
"clientInfo": {
|
|
95
|
+
"name": "crewai-substrate-memory",
|
|
96
|
+
"version": "0.1.0",
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
}
|
|
100
|
+
return self._send(payload)
|
|
101
|
+
|
|
102
|
+
def close(self) -> None:
|
|
103
|
+
"""Close the HTTP client and release resources."""
|
|
104
|
+
self._http.close()
|
|
105
|
+
|
|
106
|
+
# -- Internal ----------------------------------------------------------
|
|
107
|
+
|
|
108
|
+
def _send(self, payload: dict[str, Any]) -> Any:
|
|
109
|
+
"""Send a JSON-RPC request and return the unwrapped result."""
|
|
110
|
+
headers: dict[str, str] = {}
|
|
111
|
+
if self._session_id:
|
|
112
|
+
headers["Mcp-Session-Id"] = self._session_id
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
response = self._http.post(
|
|
116
|
+
self._config.base_url,
|
|
117
|
+
json=payload,
|
|
118
|
+
headers=headers,
|
|
119
|
+
)
|
|
120
|
+
except httpx.HTTPError as exc:
|
|
121
|
+
raise SubstrateClientError(f"HTTP request failed: {exc}") from exc
|
|
122
|
+
|
|
123
|
+
# Capture session ID from response
|
|
124
|
+
session_id = response.headers.get("Mcp-Session-Id")
|
|
125
|
+
if session_id:
|
|
126
|
+
self._session_id = session_id
|
|
127
|
+
|
|
128
|
+
if response.status_code == 401:
|
|
129
|
+
raise SubstrateClientError("Authentication failed -- check your SUBSTRATE_API_KEY", code=-32000)
|
|
130
|
+
|
|
131
|
+
if response.status_code == 429:
|
|
132
|
+
raise SubstrateClientError("Rate limit exceeded -- slow down or upgrade your plan", code=-32029)
|
|
133
|
+
|
|
134
|
+
if response.status_code not in (200, 202):
|
|
135
|
+
raise SubstrateClientError(
|
|
136
|
+
f"Unexpected HTTP {response.status_code}: {response.text[:200]}",
|
|
137
|
+
code=response.status_code,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
body = response.json()
|
|
142
|
+
except (json.JSONDecodeError, ValueError) as exc:
|
|
143
|
+
raise SubstrateClientError(f"Invalid JSON response: {exc}") from exc
|
|
144
|
+
|
|
145
|
+
# Handle JSON-RPC error
|
|
146
|
+
if "error" in body:
|
|
147
|
+
err = body["error"]
|
|
148
|
+
raise SubstrateClientError(
|
|
149
|
+
err.get("message", "Unknown error"),
|
|
150
|
+
code=err.get("code", -1),
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return body.get("result", {})
|
|
154
|
+
|
|
155
|
+
def __enter__(self) -> SubstrateClient:
|
|
156
|
+
return self
|
|
157
|
+
|
|
158
|
+
def __exit__(self, *_: Any) -> None:
|
|
159
|
+
self.close()
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SUBSTRATE Memory Provider for CrewAI.
|
|
3
|
+
|
|
4
|
+
Replaces CrewAI's default memory with SUBSTRATE's causal memory engine,
|
|
5
|
+
giving your crew persistent memory, emotional context, and identity
|
|
6
|
+
continuity across sessions.
|
|
7
|
+
|
|
8
|
+
This provider implements CrewAI's external memory interface and adds
|
|
9
|
+
SUBSTRATE-exclusive capabilities (emotion, trust, identity verification)
|
|
10
|
+
that no other memory provider offers.
|
|
11
|
+
"""
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import json
|
|
15
|
+
import logging
|
|
16
|
+
import os
|
|
17
|
+
from dataclasses import dataclass, field
|
|
18
|
+
from typing import Any
|
|
19
|
+
|
|
20
|
+
from crewai_substrate.client import (
|
|
21
|
+
SubstrateClient,
|
|
22
|
+
SubstrateClientConfig,
|
|
23
|
+
SubstrateClientError,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger("crewai_substrate.memory")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _extract_text(result: Any) -> str:
|
|
30
|
+
"""
|
|
31
|
+
Extract text content from a SUBSTRATE MCP tool result.
|
|
32
|
+
|
|
33
|
+
The MCP tools/call response wraps content in a ``content`` list of
|
|
34
|
+
typed blocks. This helper pulls out all text blocks and joins them.
|
|
35
|
+
"""
|
|
36
|
+
if isinstance(result, str):
|
|
37
|
+
return result
|
|
38
|
+
if isinstance(result, dict):
|
|
39
|
+
content = result.get("content", [])
|
|
40
|
+
if isinstance(content, list):
|
|
41
|
+
parts = [
|
|
42
|
+
block.get("text", "")
|
|
43
|
+
for block in content
|
|
44
|
+
if isinstance(block, dict) and block.get("type") == "text"
|
|
45
|
+
]
|
|
46
|
+
return "\n".join(parts)
|
|
47
|
+
# Fallback: return the dict as JSON
|
|
48
|
+
return json.dumps(result, indent=2)
|
|
49
|
+
return str(result)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _parse_json_safe(text: str) -> dict[str, Any]:
|
|
53
|
+
"""Parse JSON from text, returning an empty dict on failure."""
|
|
54
|
+
try:
|
|
55
|
+
return json.loads(text)
|
|
56
|
+
except (json.JSONDecodeError, TypeError):
|
|
57
|
+
return {}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass(frozen=True)
|
|
61
|
+
class SubstrateMemoryConfig:
|
|
62
|
+
"""
|
|
63
|
+
Configuration for the SUBSTRATE memory provider.
|
|
64
|
+
|
|
65
|
+
``api_key`` defaults to the ``SUBSTRATE_API_KEY`` environment variable.
|
|
66
|
+
``base_url`` defaults to the production SUBSTRATE MCP endpoint.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
api_key: str = ""
|
|
70
|
+
base_url: str = "https://substrate.garmolabs.com/mcp-server/mcp"
|
|
71
|
+
timeout: float = 30.0
|
|
72
|
+
|
|
73
|
+
def __post_init__(self) -> None:
|
|
74
|
+
# Resolve api_key from env if not provided directly
|
|
75
|
+
if not self.api_key:
|
|
76
|
+
key = os.environ.get("SUBSTRATE_API_KEY", "")
|
|
77
|
+
# frozen=True requires object.__setattr__ for late binding
|
|
78
|
+
object.__setattr__(self, "api_key", key)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class SubstrateMemoryProvider:
|
|
82
|
+
"""
|
|
83
|
+
CrewAI-compatible external memory provider backed by SUBSTRATE.
|
|
84
|
+
|
|
85
|
+
Implements the standard ``save`` / ``search`` / ``reset`` interface that
|
|
86
|
+
CrewAI expects, plus SUBSTRATE-exclusive methods for emotional context
|
|
87
|
+
and entity state inspection.
|
|
88
|
+
|
|
89
|
+
Usage::
|
|
90
|
+
|
|
91
|
+
from crewai import Crew
|
|
92
|
+
from crewai_substrate import SubstrateMemoryProvider
|
|
93
|
+
|
|
94
|
+
memory = SubstrateMemoryProvider(api_key="sk_sub_...")
|
|
95
|
+
crew = Crew(
|
|
96
|
+
agents=[...],
|
|
97
|
+
tasks=[...],
|
|
98
|
+
memory=True,
|
|
99
|
+
memory_config={"provider": memory},
|
|
100
|
+
)
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
def __init__(
|
|
104
|
+
self,
|
|
105
|
+
api_key: str = "",
|
|
106
|
+
base_url: str = "https://substrate.garmolabs.com/mcp-server/mcp",
|
|
107
|
+
timeout: float = 30.0,
|
|
108
|
+
) -> None:
|
|
109
|
+
config = SubstrateMemoryConfig(
|
|
110
|
+
api_key=api_key,
|
|
111
|
+
base_url=base_url,
|
|
112
|
+
timeout=timeout,
|
|
113
|
+
)
|
|
114
|
+
if not config.api_key:
|
|
115
|
+
raise ValueError(
|
|
116
|
+
"SUBSTRATE API key required. Pass api_key= or set SUBSTRATE_API_KEY env var."
|
|
117
|
+
)
|
|
118
|
+
self._client = SubstrateClient(
|
|
119
|
+
SubstrateClientConfig(
|
|
120
|
+
api_key=config.api_key,
|
|
121
|
+
base_url=config.base_url,
|
|
122
|
+
timeout=config.timeout,
|
|
123
|
+
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# -- CrewAI memory interface -------------------------------------------
|
|
127
|
+
|
|
128
|
+
def save(self, content: str, metadata: dict[str, Any] | None = None) -> str:
|
|
129
|
+
"""
|
|
130
|
+
Store a memory through SUBSTRATE's ``respond`` tool.
|
|
131
|
+
|
|
132
|
+
The entity processes the message through causal memory, values, and
|
|
133
|
+
reflection layers. The returned text is the entity's response, which
|
|
134
|
+
includes any extracted causal rules or insights.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
content: The text content to store in memory.
|
|
138
|
+
metadata: Optional metadata dict. If provided, it is serialized
|
|
139
|
+
and appended to the message for context.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
The entity's response text after processing the memory.
|
|
143
|
+
"""
|
|
144
|
+
message = content
|
|
145
|
+
if metadata:
|
|
146
|
+
message = f"{content}\n\n[metadata: {json.dumps(metadata)}]"
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
result = self._client.call_tool("respond", {"message": message})
|
|
150
|
+
return _extract_text(result)
|
|
151
|
+
except SubstrateClientError as exc:
|
|
152
|
+
logger.error("Failed to save memory: %s", exc)
|
|
153
|
+
raise
|
|
154
|
+
|
|
155
|
+
def search(self, query: str, limit: int = 5) -> list[dict[str, Any]]:
|
|
156
|
+
"""
|
|
157
|
+
Search SUBSTRATE memory using hybrid retrieval (semantic + keyword).
|
|
158
|
+
|
|
159
|
+
Falls back to ``memory_search`` if ``hybrid_search`` is not available
|
|
160
|
+
on the current tier.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
query: The search query string.
|
|
164
|
+
limit: Maximum number of results to return.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
A list of result dicts, each with at least a ``text`` key.
|
|
168
|
+
"""
|
|
169
|
+
try:
|
|
170
|
+
result = self._client.call_tool(
|
|
171
|
+
"hybrid_search",
|
|
172
|
+
{"query": query, "top_k": limit},
|
|
173
|
+
)
|
|
174
|
+
text = _extract_text(result)
|
|
175
|
+
parsed = _parse_json_safe(text)
|
|
176
|
+
if isinstance(parsed, dict) and "results" in parsed:
|
|
177
|
+
return parsed["results"]
|
|
178
|
+
return [{"text": text}]
|
|
179
|
+
except SubstrateClientError as exc:
|
|
180
|
+
# hybrid_search requires pro tier; fall back to memory_search
|
|
181
|
+
if exc.code == -32000 or "not available" in str(exc).lower():
|
|
182
|
+
logger.info("hybrid_search unavailable, falling back to memory_search")
|
|
183
|
+
return self._fallback_search(query, limit)
|
|
184
|
+
raise
|
|
185
|
+
|
|
186
|
+
def reset(self) -> None:
|
|
187
|
+
"""
|
|
188
|
+
No-op for SUBSTRATE.
|
|
189
|
+
|
|
190
|
+
SUBSTRATE entities maintain persistent identity and memory across
|
|
191
|
+
sessions by design. Calling reset is acknowledged but does not
|
|
192
|
+
destroy memory -- the entity's continuity is preserved.
|
|
193
|
+
"""
|
|
194
|
+
logger.info("reset() called -- SUBSTRATE memory is persistent; no data was cleared")
|
|
195
|
+
|
|
196
|
+
# -- SUBSTRATE-exclusive methods ---------------------------------------
|
|
197
|
+
|
|
198
|
+
def get_emotional_context(self) -> dict[str, Any]:
|
|
199
|
+
"""
|
|
200
|
+
Get the entity's emotional state vector (UASV).
|
|
201
|
+
|
|
202
|
+
Returns a dict with valence, arousal, dominance, and certainty
|
|
203
|
+
dimensions. No other memory provider offers this capability.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Dict with emotional dimensions and their current values.
|
|
207
|
+
"""
|
|
208
|
+
try:
|
|
209
|
+
result = self._client.call_tool("get_emotion_state")
|
|
210
|
+
text = _extract_text(result)
|
|
211
|
+
parsed = _parse_json_safe(text)
|
|
212
|
+
return parsed if parsed else {"raw": text}
|
|
213
|
+
except SubstrateClientError as exc:
|
|
214
|
+
logger.error("Failed to get emotional context: %s", exc)
|
|
215
|
+
return {"error": str(exc)}
|
|
216
|
+
|
|
217
|
+
def get_entity_state(self) -> dict[str, Any]:
|
|
218
|
+
"""
|
|
219
|
+
Get the entity's identity verification and trust state.
|
|
220
|
+
|
|
221
|
+
Combines ``verify_identity`` (cryptographic continuity proof) with
|
|
222
|
+
``get_trust_state`` (trust scores, consistency ratings).
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Dict with ``identity`` and ``trust`` sub-dicts.
|
|
226
|
+
"""
|
|
227
|
+
state: dict[str, Any] = {}
|
|
228
|
+
|
|
229
|
+
try:
|
|
230
|
+
identity_result = self._client.call_tool("verify_identity")
|
|
231
|
+
identity_text = _extract_text(identity_result)
|
|
232
|
+
state["identity"] = _parse_json_safe(identity_text) or {"raw": identity_text}
|
|
233
|
+
except SubstrateClientError as exc:
|
|
234
|
+
logger.error("Failed to verify identity: %s", exc)
|
|
235
|
+
state["identity"] = {"error": str(exc)}
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
trust_result = self._client.call_tool("get_trust_state")
|
|
239
|
+
trust_text = _extract_text(trust_result)
|
|
240
|
+
state["trust"] = _parse_json_safe(trust_text) or {"raw": trust_text}
|
|
241
|
+
except SubstrateClientError as exc:
|
|
242
|
+
# get_trust_state requires pro tier
|
|
243
|
+
logger.warning("get_trust_state unavailable: %s", exc)
|
|
244
|
+
state["trust"] = {"error": str(exc)}
|
|
245
|
+
|
|
246
|
+
return state
|
|
247
|
+
|
|
248
|
+
def get_memory_stats(self) -> dict[str, Any]:
|
|
249
|
+
"""
|
|
250
|
+
Get causal memory statistics.
|
|
251
|
+
|
|
252
|
+
Returns episode count, rule count, average rule probability,
|
|
253
|
+
and high-confidence rule count.
|
|
254
|
+
"""
|
|
255
|
+
try:
|
|
256
|
+
result = self._client.call_tool("memory_stats")
|
|
257
|
+
text = _extract_text(result)
|
|
258
|
+
parsed = _parse_json_safe(text)
|
|
259
|
+
return parsed if parsed else {"raw": text}
|
|
260
|
+
except SubstrateClientError as exc:
|
|
261
|
+
logger.error("Failed to get memory stats: %s", exc)
|
|
262
|
+
return {"error": str(exc)}
|
|
263
|
+
|
|
264
|
+
# -- Internal ----------------------------------------------------------
|
|
265
|
+
|
|
266
|
+
def _fallback_search(self, query: str, limit: int) -> list[dict[str, Any]]:
|
|
267
|
+
"""Use basic memory_search when hybrid_search is unavailable."""
|
|
268
|
+
try:
|
|
269
|
+
result = self._client.call_tool("memory_search", {"query": query})
|
|
270
|
+
text = _extract_text(result)
|
|
271
|
+
parsed = _parse_json_safe(text)
|
|
272
|
+
if isinstance(parsed, dict) and "results" in parsed:
|
|
273
|
+
return parsed["results"][:limit]
|
|
274
|
+
return [{"text": text}]
|
|
275
|
+
except SubstrateClientError as exc:
|
|
276
|
+
logger.error("Fallback memory_search also failed: %s", exc)
|
|
277
|
+
return []
|
|
278
|
+
|
|
279
|
+
def close(self) -> None:
|
|
280
|
+
"""Release the underlying HTTP client."""
|
|
281
|
+
self._client.close()
|
|
282
|
+
|
|
283
|
+
def __enter__(self) -> SubstrateMemoryProvider:
|
|
284
|
+
return self
|
|
285
|
+
|
|
286
|
+
def __exit__(self, *_: Any) -> None:
|
|
287
|
+
self.close()
|