synapse-layer 2.3.0__tar.gz → 2.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-layer
3
- Version: 2.3.0
3
+ Version: 2.3.1
4
4
  Summary: Universal memory layer for AI agents - Persistent, Private, Model-agnostic
5
5
  Author-email: Synapse Layer <founder.synapselayer@proton.me>
6
6
  Project-URL: Homepage, https://synapselayer.org
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "synapse-layer"
7
- version = "2.3.0"
7
+ version = "2.3.1"
8
8
  authors = [
9
9
  { name="Synapse Layer", email="founder.synapselayer@proton.me" },
10
10
  ]
@@ -0,0 +1,70 @@
1
+ """
2
+ Synapse Layer — Universal memory layer for AI agents.
3
+ Persistent, Private, Model-agnostic.
4
+
5
+ MCP-native. LangChain-compatible. A2A-ready.
6
+
7
+ Core (always available):
8
+ from synapse_layer import SynapseA2AClient
9
+
10
+ LangChain adapter (requires langchain-core):
11
+ from synapse_layer import SynapseMemory, SynapseChatHistory
12
+
13
+ CrewAI tools (requires crewai):
14
+ from synapse_layer import SynapseStoreMemoryTool, SynapseRecallMemoryTool
15
+ """
16
+
17
+ __version__ = "2.3.1"
18
+
19
+ # Core — always importable, zero optional deps
20
+ from .a2a_client import SynapseA2AClient
21
+
22
+
23
+ # ---------------------------------------------------------------------------
24
+ # Lazy accessors for optional integrations
25
+ # ---------------------------------------------------------------------------
26
+
27
+ def __getattr__(name: str):
28
+ """Lazy-load optional adapters only when accessed."""
29
+
30
+ # LangChain adapters
31
+ if name in ("SynapseMemory", "SynapseChatHistory"):
32
+ try:
33
+ from . import langchain_memory as _lc # noqa: F811
34
+ except ImportError as exc:
35
+ raise ImportError(
36
+ f"{name} requires langchain-core. "
37
+ "Install with: pip install 'synapse-layer[langchain]'"
38
+ ) from exc
39
+ return getattr(_lc, name)
40
+
41
+ # CrewAI tools
42
+ if name in (
43
+ "SynapseStoreMemoryTool",
44
+ "SynapseRecallMemoryTool",
45
+ "SynapseHandoverTool",
46
+ ):
47
+ try:
48
+ from . import crewai_tools as _ct # noqa: F811
49
+ except ImportError as exc:
50
+ raise ImportError(
51
+ f"{name} requires crewai. "
52
+ "Install with: pip install 'synapse-layer[crewai]'"
53
+ ) from exc
54
+ return getattr(_ct, name)
55
+
56
+ raise AttributeError(f"module 'synapse_layer' has no attribute {name!r}")
57
+
58
+
59
+ __all__ = [
60
+ "__version__",
61
+ # Core
62
+ "SynapseA2AClient",
63
+ # LangChain (lazy)
64
+ "SynapseMemory",
65
+ "SynapseChatHistory",
66
+ # CrewAI (lazy)
67
+ "SynapseStoreMemoryTool",
68
+ "SynapseRecallMemoryTool",
69
+ "SynapseHandoverTool",
70
+ ]
@@ -8,6 +8,8 @@ Provides three CrewAI tools for Agent-to-Agent memory management:
8
8
 
9
9
  TQ Formula: TQ = (confidence_score * 0.4) + (recency_score * 0.3) + (usage_normalized * 0.3)
10
10
 
11
+ Requires: pip install 'synapse-layer[crewai]'
12
+
11
13
  Author: Ismael Marchi
12
14
  License: Apache 2.0
13
15
  """
@@ -20,8 +22,15 @@ from pydantic import BaseModel, Field
20
22
 
21
23
  try:
22
24
  from crewai.tools import BaseTool
25
+ _CREWAI_AVAILABLE = True
23
26
  except ImportError:
24
- raise ImportError("crewai is required for CrewAI tools. Install with: pip install 'synapse-layer[crewai]'")
27
+ _CREWAI_AVAILABLE = False
28
+
29
+ if not _CREWAI_AVAILABLE:
30
+ raise ImportError(
31
+ "crewai is required for CrewAI tools. "
32
+ "Install with: pip install 'synapse-layer[crewai]'"
33
+ )
25
34
 
26
35
  from .a2a_client import SynapseA2AClient, TaskResult
27
36
 
@@ -123,10 +132,8 @@ class _SynapseBaseTool(BaseTool):
123
132
  loop = None
124
133
 
125
134
  if loop is None:
126
- # No running loop, create new event loop
127
135
  return asyncio.run(coro)
128
136
  else:
129
- # Running loop exists, use ThreadPoolExecutor
130
137
  with ThreadPoolExecutor(max_workers=1) as executor:
131
138
  future = executor.submit(asyncio.run, coro)
132
139
  return future.result()
@@ -166,18 +173,6 @@ class SynapseStoreMemoryTool(_SynapseBaseTool):
166
173
  source_type: str = "api_response",
167
174
  confidence: float = 0.8,
168
175
  ) -> str:
169
- """
170
- Synchronous wrapper for storing memory.
171
-
172
- Args:
173
- user_id: User UUID
174
- content: Memory content
175
- source_type: Memory source (default: api_response)
176
- confidence: Confidence score 0.0-1.0 (default: 0.8)
177
-
178
- Returns:
179
- JSON string with TaskResult
180
- """
181
176
  coro = self._async_run(user_id, content, source_type, confidence)
182
177
  result = self._run_sync(coro)
183
178
  return str(result)
@@ -189,18 +184,6 @@ class SynapseStoreMemoryTool(_SynapseBaseTool):
189
184
  source_type: str = "api_response",
190
185
  confidence: float = 0.8,
191
186
  ) -> TaskResult:
192
- """
193
- Asynchronous implementation of store_memory.
194
-
195
- Args:
196
- user_id: User UUID
197
- content: Memory content
198
- source_type: Memory source
199
- confidence: Confidence score
200
-
201
- Returns:
202
- TaskResult
203
- """
204
187
  async with SynapseA2AClient(api_key=self.api_key, base_url=self.base_url) as client:
205
188
  return await client.store_memory(
206
189
  user_id=user_id,
@@ -216,18 +199,6 @@ class SynapseStoreMemoryTool(_SynapseBaseTool):
216
199
  source_type: str = "api_response",
217
200
  confidence: float = 0.8,
218
201
  ) -> str:
219
- """
220
- Async run method for CrewAI compatibility.
221
-
222
- Args:
223
- user_id: User UUID
224
- content: Memory content
225
- source_type: Memory source
226
- confidence: Confidence score
227
-
228
- Returns:
229
- JSON string with TaskResult
230
- """
231
202
  result = await self._async_run(user_id, content, source_type, confidence)
232
203
  return str(result)
233
204
 
@@ -243,13 +214,6 @@ class SynapseRecallMemoryTool(_SynapseBaseTool):
243
214
 
244
215
  Retrieves memories ordered by Trust Quotient (TQ) score.
245
216
  TQ = (confidence_score * 0.4) + (recency_score * 0.3) + (usage_normalized * 0.3)
246
-
247
- Args:
248
- api_key: Synapse Layer API key
249
- base_url: API endpoint (default: Supabase Edge Function)
250
-
251
- Returns:
252
- TaskResult with list of retrieved memories
253
217
  """
254
218
 
255
219
  name: str = "synapse_recall_memory"
@@ -265,17 +229,6 @@ class SynapseRecallMemoryTool(_SynapseBaseTool):
265
229
  query: str,
266
230
  limit: int = 10,
267
231
  ) -> str:
268
- """
269
- Synchronous wrapper for recalling memory.
270
-
271
- Args:
272
- user_id: User UUID
273
- query: Semantic search query
274
- limit: Max memories to retrieve (default: 10)
275
-
276
- Returns:
277
- JSON string with TaskResult
278
- """
279
232
  coro = self._async_run(user_id, query, limit)
280
233
  result = self._run_sync(coro)
281
234
  return str(result)
@@ -286,17 +239,6 @@ class SynapseRecallMemoryTool(_SynapseBaseTool):
286
239
  query: str,
287
240
  limit: int = 10,
288
241
  ) -> TaskResult:
289
- """
290
- Asynchronous implementation of recall_memory.
291
-
292
- Args:
293
- user_id: User UUID
294
- query: Semantic search query
295
- limit: Max memories to retrieve
296
-
297
- Returns:
298
- TaskResult
299
- """
300
242
  async with SynapseA2AClient(api_key=self.api_key, base_url=self.base_url) as client:
301
243
  return await client.recall_memory(
302
244
  user_id=user_id,
@@ -310,17 +252,6 @@ class SynapseRecallMemoryTool(_SynapseBaseTool):
310
252
  query: str,
311
253
  limit: int = 10,
312
254
  ) -> str:
313
- """
314
- Async run method for CrewAI compatibility.
315
-
316
- Args:
317
- user_id: User UUID
318
- query: Semantic search query
319
- limit: Max memories to retrieve
320
-
321
- Returns:
322
- JSON string with TaskResult
323
- """
324
255
  result = await self._async_run(user_id, query, limit)
325
256
  return str(result)
326
257
 
@@ -335,14 +266,6 @@ class SynapseHandoverTool(_SynapseBaseTool):
335
266
  Tool for creating Neural Handover context to target model.
336
267
 
337
268
  Packages agent context with HMAC-SHA256 signature for secure transfer.
338
- Supports handover to different AI models (Claude, GPT, etc.).
339
-
340
- Args:
341
- api_key: Synapse Layer API key
342
- base_url: API endpoint (default: Supabase Edge Function)
343
-
344
- Returns:
345
- TaskResult with handover context and signature
346
269
  """
347
270
 
348
271
  name: str = "synapse_create_handover"
@@ -358,17 +281,6 @@ class SynapseHandoverTool(_SynapseBaseTool):
358
281
  target_model: str,
359
282
  summary: str,
360
283
  ) -> str:
361
- """
362
- Synchronous wrapper for creating handover.
363
-
364
- Args:
365
- user_id: User UUID
366
- target_model: Target model ID
367
- summary: Context summary
368
-
369
- Returns:
370
- JSON string with TaskResult
371
- """
372
284
  coro = self._async_run(user_id, target_model, summary)
373
285
  result = self._run_sync(coro)
374
286
  return str(result)
@@ -379,17 +291,6 @@ class SynapseHandoverTool(_SynapseBaseTool):
379
291
  target_model: str,
380
292
  summary: str,
381
293
  ) -> TaskResult:
382
- """
383
- Asynchronous implementation of create_handover.
384
-
385
- Args:
386
- user_id: User UUID
387
- target_model: Target model ID
388
- summary: Context summary
389
-
390
- Returns:
391
- TaskResult
392
- """
393
294
  async with SynapseA2AClient(api_key=self.api_key, base_url=self.base_url) as client:
394
295
  return await client.create_handover(
395
296
  user_id=user_id,
@@ -403,17 +304,6 @@ class SynapseHandoverTool(_SynapseBaseTool):
403
304
  target_model: str,
404
305
  summary: str,
405
306
  ) -> str:
406
- """
407
- Async run method for CrewAI compatibility.
408
-
409
- Args:
410
- user_id: User UUID
411
- target_model: Target model ID
412
- summary: Context summary
413
-
414
- Returns:
415
- JSON string with TaskResult
416
- """
417
307
  result = await self._async_run(user_id, target_model, summary)
418
308
  return str(result)
419
309
 
@@ -9,6 +9,8 @@ Implements Trust Quotient (TQ) formula for memory evaluation:
9
9
  Two adapters provided:
10
10
  1. SynapseChatHistory — BaseChatMessageHistory for persistent chat histories
11
11
  2. SynapseMemory — BaseMemory for general agent memory management
12
+
13
+ Requires: pip install 'synapse-layer[langchain]'
12
14
  """
13
15
 
14
16
  import asyncio
@@ -20,10 +22,15 @@ try:
20
22
  from langchain_core.messages import BaseMessage, HumanMessage, AIMessage, SystemMessage
21
23
  from langchain_core.memory import BaseMemory
22
24
  from pydantic import Field, validator
23
- except ImportError as e:
25
+ _LANGCHAIN_AVAILABLE = True
26
+ except ImportError:
27
+ _LANGCHAIN_AVAILABLE = False
28
+
29
+ if not _LANGCHAIN_AVAILABLE:
24
30
  raise ImportError(
25
- "langchain-core is required. Install: pip install langchain-core"
26
- ) from e
31
+ "langchain-core is required for LangChain adapters. "
32
+ "Install with: pip install 'synapse-layer[langchain]'"
33
+ )
27
34
 
28
35
  from .a2a_client import SynapseA2AClient, TaskResult, TaskState
29
36
 
@@ -31,12 +38,12 @@ from .a2a_client import SynapseA2AClient, TaskResult, TaskState
31
38
  class SynapseChatHistory(BaseChatMessageHistory):
32
39
  """
33
40
  Chat message history backed by Synapse Layer persistent memory.
34
-
41
+
35
42
  Uses store_memory to persist each message and recall_memory to load
36
43
  conversation context. Applies TQ formula for smart recall:
37
-
44
+
38
45
  TQ = (confidence * 0.4) + (recency * 0.3) + (usage * 0.3)
39
-
46
+
40
47
  Example:
41
48
  history = SynapseChatHistory(
42
49
  api_key="synapse-key",
@@ -64,17 +71,17 @@ class SynapseChatHistory(BaseChatMessageHistory):
64
71
  async def add_message(self, message: BaseMessage) -> None:
65
72
  """
66
73
  Add message to history and persist via Synapse Layer.
67
-
74
+
68
75
  Automatically determines source_type from message class:
69
76
  - HumanMessage -> "user_input"
70
77
  - AIMessage -> "inference"
71
78
  - SystemMessage -> "system"
72
-
79
+
73
80
  Args:
74
81
  message: LangChain BaseMessage instance
75
82
  """
76
83
  self._messages.append(message)
77
-
84
+
78
85
  # Determine source type
79
86
  if isinstance(message, HumanMessage):
80
87
  source_type = "user_input"
@@ -91,7 +98,7 @@ class SynapseChatHistory(BaseChatMessageHistory):
91
98
  async def _persist(self, content: str, source_type: str) -> None:
92
99
  """
93
100
  Persist message content to Synapse Layer.
94
-
101
+
95
102
  Args:
96
103
  content: Message text to store
97
104
  source_type: Origin type (user_input, inference, system)
@@ -117,18 +124,18 @@ class SynapseChatHistory(BaseChatMessageHistory):
117
124
  class SynapseMemory(BaseMemory):
118
125
  """
119
126
  General agent memory backed by Synapse Layer persistent storage.
120
-
127
+
121
128
  Implements BaseMemory interface for LangChain agents. Applies Trust Quotient
122
129
  formula to all recall operations:
123
-
130
+
124
131
  TQ = (confidence * 0.4) + (recency * 0.3) + (usage * 0.3)
125
-
132
+
126
133
  Supports:
127
134
  - Persistent memory across sessions
128
135
  - Semantic search via pgvector HNSW
129
- - Multi-model context sharing via Neural Handover
136
+ - Multi-model context sharing
130
137
  - Automatic conflict resolution via Consensus Engine
131
-
138
+
132
139
  Example:
133
140
  memory = SynapseMemory(
134
141
  api_key="synapse-key",
@@ -167,13 +174,13 @@ class SynapseMemory(BaseMemory):
167
174
  async def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
168
175
  """
169
176
  Load relevant memories based on input query.
170
-
177
+
171
178
  Uses recall_memory with TQ weighting to find relevant context:
172
179
  TQ = (confidence * 0.4) + (recency * 0.3) + (usage * 0.3)
173
-
180
+
174
181
  Args:
175
182
  inputs: Dict with at least input_key containing the query
176
-
183
+
177
184
  Returns:
178
185
  Dict with memory_key -> formatted memory text
179
186
  """
@@ -187,12 +194,12 @@ class SynapseMemory(BaseMemory):
187
194
  async def _recall(self, query: str) -> str:
188
195
  """
189
196
  Recall memories matching query.
190
-
197
+
191
198
  Applies Trust Quotient formula and formats results.
192
-
199
+
193
200
  Args:
194
201
  query: Natural language query
195
-
202
+
196
203
  Returns:
197
204
  Formatted memory context string
198
205
  """
@@ -226,22 +233,20 @@ class SynapseMemory(BaseMemory):
226
233
  def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None:
227
234
  """
228
235
  Save input/output pair to memory (sync wrapper).
229
-
236
+
230
237
  For async operation, use _store directly.
231
-
238
+
232
239
  Args:
233
240
  inputs: Agent inputs dict
234
241
  outputs: Agent outputs dict
235
242
  """
236
- # For sync context, we can't await. Use asyncio.run if in main thread.
237
243
  input_text = inputs.get(self.input_key, "")
238
244
  output_text = outputs.get("output", "")
239
-
245
+
240
246
  if input_text:
241
247
  try:
242
248
  asyncio.run(self._store(input_text, "user_input"))
243
249
  except RuntimeError:
244
- # Already in event loop, schedule as task
245
250
  pass
246
251
 
247
252
  if output_text:
@@ -253,7 +258,7 @@ class SynapseMemory(BaseMemory):
253
258
  async def _store(self, content: str, source_type: str) -> None:
254
259
  """
255
260
  Store content to Synapse memory.
256
-
261
+
257
262
  Args:
258
263
  content: Text to store
259
264
  source_type: Origin type (user_input, inference, system)
@@ -274,7 +279,7 @@ class SynapseMemory(BaseMemory):
274
279
  def clear(self) -> None:
275
280
  """
276
281
  Clear operation not supported for persistent Synapse memory.
277
-
282
+
278
283
  Memories are designed to persist across sessions. To remove specific
279
284
  memories, use SynapseA2AClient.forget_memory() directly.
280
285
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: synapse-layer
3
- Version: 2.3.0
3
+ Version: 2.3.1
4
4
  Summary: Universal memory layer for AI agents - Persistent, Private, Model-agnostic
5
5
  Author-email: Synapse Layer <founder.synapselayer@proton.me>
6
6
  Project-URL: Homepage, https://synapselayer.org
@@ -1,13 +0,0 @@
1
- """
2
- Synapse Layer — Universal memory layer for AI agents.
3
- Persistent, Private, Model-agnostic.
4
-
5
- MCP-native. LangChain-compatible. A2A-ready.
6
- """
7
-
8
- __version__ = "2.3.0"
9
-
10
- from .a2a_client import SynapseA2AClient
11
- from .langchain_memory import SynapseMemory
12
-
13
- __all__ = ["SynapseA2AClient", "SynapseMemory", "__version__"]
File without changes
File without changes