langchain-substrate 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Binary file
@@ -0,0 +1,262 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-substrate
3
+ Version: 0.1.0
4
+ Summary: SUBSTRATE cognitive memory integration for LangChain and LangGraph
5
+ Project-URL: Homepage, https://garmolabs.com
6
+ Project-URL: Documentation, https://substrate-engine.fly.dev/docs
7
+ Project-URL: Repository, https://github.com/PKaldone/SUBSTRATE
8
+ Author-email: Garmo Labs <hello@garmolabs.com>
9
+ License-Expression: MIT
10
+ Keywords: ai,cognitive,langchain,langgraph,memory,substrate
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
+ Requires-Python: >=3.10
21
+ Requires-Dist: httpx>=0.27
22
+ Requires-Dist: langchain-core>=0.3
23
+ Requires-Dist: langgraph>=0.2
24
+ Provides-Extra: dev
25
+ Requires-Dist: mypy>=1.13; extra == 'dev'
26
+ Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
27
+ Requires-Dist: pytest-cov>=5.0; extra == 'dev'
28
+ Requires-Dist: pytest>=8.0; extra == 'dev'
29
+ Requires-Dist: respx>=0.22; extra == 'dev'
30
+ Requires-Dist: ruff>=0.8; extra == 'dev'
31
+ Description-Content-Type: text/markdown
32
+
33
+ # langchain-substrate
34
+
35
+ SUBSTRATE cognitive memory integration for LangChain and LangGraph.
36
+
37
+ Use SUBSTRATE as a persistent memory store for LangGraph agents or as a retriever in LangChain RAG pipelines. SUBSTRATE provides causal memory, semantic search, knowledge graphs, emotion state, identity verification, and 61 cognitive capability layers.
38
+
39
+ ## Installation
40
+
41
+ ```bash
42
+ pip install langchain-substrate
43
+ ```
44
+
45
+ Or install from source:
46
+
47
+ ```bash
48
+ cd integrations/langchain
49
+ pip install -e ".[dev]"
50
+ ```
51
+
52
+ ## Quick Start
53
+
54
+ ### Environment Setup
55
+
56
+ ```python
57
+ import os
58
+ os.environ["SUBSTRATE_API_KEY"] = "sk-sub-..."
59
+ ```
60
+
61
+ ### As a LangGraph Memory Store
62
+
63
+ Use `SubstrateStore` as the backing store for any LangGraph agent. This gives your agent persistent, semantically searchable memory across conversations.
64
+
65
+ ```python
66
+ from langchain_substrate import SubstrateStore, SubstrateClient
67
+ from langgraph.prebuilt import create_react_agent
68
+ from langchain_openai import ChatOpenAI
69
+
70
+ # Create the SUBSTRATE-backed store
71
+ client = SubstrateClient(api_key=os.environ["SUBSTRATE_API_KEY"])
72
+ store = SubstrateStore(client=client)
73
+
74
+ # Create a LangGraph agent with SUBSTRATE memory
75
+ model = ChatOpenAI(model="gpt-4o")
76
+ agent = create_react_agent(model, tools=[], store=store)
77
+
78
+ # The agent now persists state to SUBSTRATE
79
+ config = {"configurable": {"thread_id": "conversation-1"}}
80
+ response = agent.invoke(
81
+ {"messages": [{"role": "user", "content": "Remember that my favorite color is blue."}]},
82
+ config=config,
83
+ )
84
+ ```
85
+
86
+ #### Store Operations
87
+
88
+ ```python
89
+ # Store a value
90
+ store.put(("user", "alice"), "preferences", {"theme": "dark", "language": "en"})
91
+
92
+ # Retrieve by key
93
+ item = store.get(("user", "alice"), "preferences")
94
+ print(item.value) # {"theme": "dark", "language": "en"}
95
+
96
+ # Semantic search across memory
97
+ results = store.search(("user", "alice"), query="color preferences", limit=5)
98
+ for item in results:
99
+ print(item.key, item.value)
100
+
101
+ # Delete is a no-op (SUBSTRATE memory is append-only)
102
+ store.delete(("user", "alice"), "preferences")
103
+ ```
104
+
105
+ #### Multi-Tenant Isolation
106
+
107
+ ```python
108
+ # Use namespace_prefix for tenant isolation
109
+ store = SubstrateStore(
110
+ client=client,
111
+ namespace_prefix="myapp.prod",
112
+ )
113
+ # All operations are scoped under "myapp.prod.*"
114
+ store.put(("user", "bob"), "state", {"step": 3})
115
+ ```
116
+
117
+ ### As a LangChain Retriever (RAG)
118
+
119
+ Use `SubstrateRetriever` in any LangChain RAG chain. It uses SUBSTRATE's hybrid search (semantic + keyword) to find relevant memories.
120
+
121
+ ```python
122
+ from langchain_substrate import SubstrateRetriever
123
+ from langchain_openai import ChatOpenAI
124
+ from langchain_core.prompts import ChatPromptTemplate
125
+ from langchain_core.runnables import RunnablePassthrough
126
+ from langchain_core.output_parsers import StrOutputParser
127
+
128
+ # Create the retriever
129
+ retriever = SubstrateRetriever(
130
+ api_key=os.environ["SUBSTRATE_API_KEY"],
131
+ top_k=5,
132
+ )
133
+
134
+ # Build a RAG chain
135
+ prompt = ChatPromptTemplate.from_template(
136
+ "Answer based on the following context:\n{context}\n\nQuestion: {question}"
137
+ )
138
+ model = ChatOpenAI(model="gpt-4o")
139
+
140
+ chain = (
141
+ {"context": retriever, "question": RunnablePassthrough()}
142
+ | prompt
143
+ | model
144
+ | StrOutputParser()
145
+ )
146
+
147
+ answer = chain.invoke("What are the entity's core values?")
148
+ ```
149
+
150
+ #### Retriever with Namespace Scoping
151
+
152
+ ```python
153
+ retriever = SubstrateRetriever(
154
+ api_key=os.environ["SUBSTRATE_API_KEY"],
155
+ namespace="app.conversations",
156
+ top_k=10,
157
+ )
158
+ ```
159
+
160
+ #### Free Tier Fallback
161
+
162
+ `hybrid_search` requires the Pro tier. On the free tier, fall back to `memory_search`:
163
+
164
+ ```python
165
+ retriever = SubstrateRetriever(
166
+ api_key=os.environ["SUBSTRATE_API_KEY"],
167
+ search_tool="memory_search",
168
+ )
169
+ ```
170
+
171
+ ### Async Support
172
+
173
+ All operations support async for use in async LangGraph workflows:
174
+
175
+ ```python
176
+ import asyncio
177
+ from langchain_substrate import SubstrateStore, SubstrateClient
178
+
179
+ async def main():
180
+ client = SubstrateClient(api_key="sk-sub-...")
181
+ store = SubstrateStore(client=client)
182
+
183
+ await store.aput(("user", "alice"), "mood", {"current": "happy"})
184
+ item = await store.aget(("user", "alice"), "mood")
185
+ print(item.value)
186
+
187
+ results = await store.asearch(("user",), query="emotional state")
188
+ for r in results:
189
+ print(r.value)
190
+
191
+ asyncio.run(main())
192
+ ```
193
+
194
+ ## Architecture
195
+
196
+ ```
197
+ LangGraph Agent / RAG Chain
198
+ |
199
+ SubstrateStore / SubstrateRetriever
200
+ |
201
+ SubstrateClient (httpx)
202
+ |
203
+ SUBSTRATE MCP Server (JSON-RPC over HTTP)
204
+ |
205
+ Causal Memory + Knowledge Graph + 61 Layers
206
+ ```
207
+
208
+ ### Namespace Encoding
209
+
210
+ LangGraph uses tuple namespaces like `("user", "alice", "prefs")`. SUBSTRATE uses flat string keys. The store encodes namespaces as dot-separated prefixes:
211
+
212
+ | LangGraph Namespace | SUBSTRATE Prefix |
213
+ |---|---|
214
+ | `("user", "alice")` | `user.alice` |
215
+ | `("app", "v2", "state")` | `app.v2.state` |
216
+
217
+ ### Tool Mapping
218
+
219
+ | Store Operation | SUBSTRATE Tool | Tier |
220
+ |---|---|---|
221
+ | `put()` | `respond` | Free |
222
+ | `get()` | `memory_search` | Free |
223
+ | `search()` | `hybrid_search` | Pro |
224
+ | `list_namespaces()` | N/A (limited) | -- |
225
+ | `delete()` | No-op | -- |
226
+
227
+ ## SUBSTRATE MCP Tools Available
228
+
229
+ | Tool | Description | Tier |
230
+ |---|---|---|
231
+ | `respond` | Send a message, get a response | Free |
232
+ | `memory_search` | Search causal memory episodes | Free |
233
+ | `hybrid_search` | Semantic + keyword search | Pro |
234
+ | `get_emotion_state` | Affective state vector | Free |
235
+ | `verify_identity` | Cryptographic identity check | Free |
236
+ | `knowledge_graph_query` | Query knowledge graph | Pro |
237
+ | `get_values` | Core value architecture | Free |
238
+ | `theory_of_mind` | User model | Free |
239
+ | `get_trust_state` | Trust scores | Pro |
240
+
241
+ ## Development
242
+
243
+ ```bash
244
+ # Install dev dependencies
245
+ pip install -e ".[dev]"
246
+
247
+ # Run tests
248
+ pytest
249
+
250
+ # Run with coverage
251
+ pytest --cov=langchain_substrate --cov-report=term-missing
252
+
253
+ # Lint
254
+ ruff check src/ tests/
255
+
256
+ # Type check
257
+ mypy src/
258
+ ```
259
+
260
+ ## License
261
+
262
+ MIT -- Garmo Labs
@@ -0,0 +1,230 @@
1
+ # langchain-substrate
2
+
3
+ SUBSTRATE cognitive memory integration for LangChain and LangGraph.
4
+
5
+ Use SUBSTRATE as a persistent memory store for LangGraph agents or as a retriever in LangChain RAG pipelines. SUBSTRATE provides causal memory, semantic search, knowledge graphs, emotion state, identity verification, and 61 cognitive capability layers.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ pip install langchain-substrate
11
+ ```
12
+
13
+ Or install from source:
14
+
15
+ ```bash
16
+ cd integrations/langchain
17
+ pip install -e ".[dev]"
18
+ ```
19
+
20
+ ## Quick Start
21
+
22
+ ### Environment Setup
23
+
24
+ ```python
25
+ import os
26
+ os.environ["SUBSTRATE_API_KEY"] = "sk-sub-..."
27
+ ```
28
+
29
+ ### As a LangGraph Memory Store
30
+
31
+ Use `SubstrateStore` as the backing store for any LangGraph agent. This gives your agent persistent, semantically searchable memory across conversations.
32
+
33
+ ```python
34
+ from langchain_substrate import SubstrateStore, SubstrateClient
35
+ from langgraph.prebuilt import create_react_agent
36
+ from langchain_openai import ChatOpenAI
37
+
38
+ # Create the SUBSTRATE-backed store
39
+ client = SubstrateClient(api_key=os.environ["SUBSTRATE_API_KEY"])
40
+ store = SubstrateStore(client=client)
41
+
42
+ # Create a LangGraph agent with SUBSTRATE memory
43
+ model = ChatOpenAI(model="gpt-4o")
44
+ agent = create_react_agent(model, tools=[], store=store)
45
+
46
+ # The agent now persists state to SUBSTRATE
47
+ config = {"configurable": {"thread_id": "conversation-1"}}
48
+ response = agent.invoke(
49
+ {"messages": [{"role": "user", "content": "Remember that my favorite color is blue."}]},
50
+ config=config,
51
+ )
52
+ ```
53
+
54
+ #### Store Operations
55
+
56
+ ```python
57
+ # Store a value
58
+ store.put(("user", "alice"), "preferences", {"theme": "dark", "language": "en"})
59
+
60
+ # Retrieve by key
61
+ item = store.get(("user", "alice"), "preferences")
62
+ print(item.value) # {"theme": "dark", "language": "en"}
63
+
64
+ # Semantic search across memory
65
+ results = store.search(("user", "alice"), query="color preferences", limit=5)
66
+ for item in results:
67
+ print(item.key, item.value)
68
+
69
+ # Delete is a no-op (SUBSTRATE memory is append-only)
70
+ store.delete(("user", "alice"), "preferences")
71
+ ```
72
+
73
+ #### Multi-Tenant Isolation
74
+
75
+ ```python
76
+ # Use namespace_prefix for tenant isolation
77
+ store = SubstrateStore(
78
+ client=client,
79
+ namespace_prefix="myapp.prod",
80
+ )
81
+ # All operations are scoped under "myapp.prod.*"
82
+ store.put(("user", "bob"), "state", {"step": 3})
83
+ ```
84
+
85
+ ### As a LangChain Retriever (RAG)
86
+
87
+ Use `SubstrateRetriever` in any LangChain RAG chain. It uses SUBSTRATE's hybrid search (semantic + keyword) to find relevant memories.
88
+
89
+ ```python
90
+ from langchain_substrate import SubstrateRetriever
91
+ from langchain_openai import ChatOpenAI
92
+ from langchain_core.prompts import ChatPromptTemplate
93
+ from langchain_core.runnables import RunnablePassthrough
94
+ from langchain_core.output_parsers import StrOutputParser
95
+
96
+ # Create the retriever
97
+ retriever = SubstrateRetriever(
98
+ api_key=os.environ["SUBSTRATE_API_KEY"],
99
+ top_k=5,
100
+ )
101
+
102
+ # Build a RAG chain
103
+ prompt = ChatPromptTemplate.from_template(
104
+ "Answer based on the following context:\n{context}\n\nQuestion: {question}"
105
+ )
106
+ model = ChatOpenAI(model="gpt-4o")
107
+
108
+ chain = (
109
+ {"context": retriever, "question": RunnablePassthrough()}
110
+ | prompt
111
+ | model
112
+ | StrOutputParser()
113
+ )
114
+
115
+ answer = chain.invoke("What are the entity's core values?")
116
+ ```
117
+
118
+ #### Retriever with Namespace Scoping
119
+
120
+ ```python
121
+ retriever = SubstrateRetriever(
122
+ api_key=os.environ["SUBSTRATE_API_KEY"],
123
+ namespace="app.conversations",
124
+ top_k=10,
125
+ )
126
+ ```
127
+
128
+ #### Free Tier Fallback
129
+
130
+ `hybrid_search` requires the Pro tier. On the free tier, fall back to `memory_search`:
131
+
132
+ ```python
133
+ retriever = SubstrateRetriever(
134
+ api_key=os.environ["SUBSTRATE_API_KEY"],
135
+ search_tool="memory_search",
136
+ )
137
+ ```
138
+
139
+ ### Async Support
140
+
141
+ All operations support async for use in async LangGraph workflows:
142
+
143
+ ```python
144
+ import asyncio
145
+ from langchain_substrate import SubstrateStore, SubstrateClient
146
+
147
+ async def main():
148
+ client = SubstrateClient(api_key="sk-sub-...")
149
+ store = SubstrateStore(client=client)
150
+
151
+ await store.aput(("user", "alice"), "mood", {"current": "happy"})
152
+ item = await store.aget(("user", "alice"), "mood")
153
+ print(item.value)
154
+
155
+ results = await store.asearch(("user",), query="emotional state")
156
+ for r in results:
157
+ print(r.value)
158
+
159
+ asyncio.run(main())
160
+ ```
161
+
162
+ ## Architecture
163
+
164
+ ```
165
+ LangGraph Agent / RAG Chain
166
+ |
167
+ SubstrateStore / SubstrateRetriever
168
+ |
169
+ SubstrateClient (httpx)
170
+ |
171
+ SUBSTRATE MCP Server (JSON-RPC over HTTP)
172
+ |
173
+ Causal Memory + Knowledge Graph + 61 Layers
174
+ ```
175
+
176
+ ### Namespace Encoding
177
+
178
+ LangGraph uses tuple namespaces like `("user", "alice", "prefs")`. SUBSTRATE uses flat string keys. The store encodes namespaces as dot-separated prefixes:
179
+
180
+ | LangGraph Namespace | SUBSTRATE Prefix |
181
+ |---|---|
182
+ | `("user", "alice")` | `user.alice` |
183
+ | `("app", "v2", "state")` | `app.v2.state` |
184
+
185
+ ### Tool Mapping
186
+
187
+ | Store Operation | SUBSTRATE Tool | Tier |
188
+ |---|---|---|
189
+ | `put()` | `respond` | Free |
190
+ | `get()` | `memory_search` | Free |
191
+ | `search()` | `hybrid_search` | Pro |
192
+ | `list_namespaces()` | N/A (limited) | -- |
193
+ | `delete()` | No-op | -- |
194
+
195
+ ## SUBSTRATE MCP Tools Available
196
+
197
+ | Tool | Description | Tier |
198
+ |---|---|---|
199
+ | `respond` | Send a message, get a response | Free |
200
+ | `memory_search` | Search causal memory episodes | Free |
201
+ | `hybrid_search` | Semantic + keyword search | Pro |
202
+ | `get_emotion_state` | Affective state vector | Free |
203
+ | `verify_identity` | Cryptographic identity check | Free |
204
+ | `knowledge_graph_query` | Query knowledge graph | Pro |
205
+ | `get_values` | Core value architecture | Free |
206
+ | `theory_of_mind` | User model | Free |
207
+ | `get_trust_state` | Trust scores | Pro |
208
+
209
+ ## Development
210
+
211
+ ```bash
212
+ # Install dev dependencies
213
+ pip install -e ".[dev]"
214
+
215
+ # Run tests
216
+ pytest
217
+
218
+ # Run with coverage
219
+ pytest --cov=langchain_substrate --cov-report=term-missing
220
+
221
+ # Lint
222
+ ruff check src/ tests/
223
+
224
+ # Type check
225
+ mypy src/
226
+ ```
227
+
228
+ ## License
229
+
230
+ MIT -- Garmo Labs
@@ -0,0 +1,70 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "langchain-substrate"
7
+ version = "0.1.0"
8
+ description = "SUBSTRATE cognitive memory integration for LangChain and LangGraph"
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.10"
12
+ authors = [
13
+ { name = "Garmo Labs", email = "hello@garmolabs.com" },
14
+ ]
15
+ keywords = ["langchain", "langgraph", "substrate", "memory", "ai", "cognitive"]
16
+ classifiers = [
17
+ "Development Status :: 4 - Beta",
18
+ "Intended Audience :: Developers",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Programming Language :: Python :: 3",
21
+ "Programming Language :: Python :: 3.10",
22
+ "Programming Language :: Python :: 3.11",
23
+ "Programming Language :: Python :: 3.12",
24
+ "Programming Language :: Python :: 3.13",
25
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
26
+ ]
27
+ dependencies = [
28
+ "langchain-core>=0.3",
29
+ "langgraph>=0.2",
30
+ "httpx>=0.27",
31
+ ]
32
+
33
+ [project.optional-dependencies]
34
+ dev = [
35
+ "pytest>=8.0",
36
+ "pytest-asyncio>=0.24",
37
+ "pytest-cov>=5.0",
38
+ "respx>=0.22",
39
+ "ruff>=0.8",
40
+ "mypy>=1.13",
41
+ ]
42
+
43
+ [project.urls]
44
+ Homepage = "https://garmolabs.com"
45
+ Documentation = "https://substrate-engine.fly.dev/docs"
46
+ Repository = "https://github.com/PKaldone/SUBSTRATE"
47
+
48
+ [tool.hatch.build.targets.wheel]
49
+ packages = ["src/langchain_substrate"]
50
+
51
+ [tool.ruff]
52
+ target-version = "py310"
53
+ line-length = 100
54
+
55
+ [tool.ruff.lint]
56
+ select = ["E", "F", "I", "N", "W", "UP", "B", "SIM", "RUF"]
57
+
58
+ [tool.mypy]
59
+ python_version = "3.10"
60
+ strict = true
61
+ warn_return_any = true
62
+ warn_unused_configs = true
63
+
64
+ [tool.pytest.ini_options]
65
+ testpaths = ["tests"]
66
+ asyncio_mode = "auto"
67
+ markers = [
68
+ "unit: unit tests",
69
+ "integration: integration tests",
70
+ ]
@@ -0,0 +1,13 @@
1
+ """langchain-substrate: SUBSTRATE cognitive memory for LangChain and LangGraph."""
2
+
3
+ from langchain_substrate.client import SubstrateClient
4
+ from langchain_substrate.retriever import SubstrateRetriever
5
+ from langchain_substrate.store import SubstrateStore
6
+
7
+ __all__ = [
8
+ "SubstrateClient",
9
+ "SubstrateRetriever",
10
+ "SubstrateStore",
11
+ ]
12
+
13
+ __version__ = "0.1.0"