loom-agent 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of loom-agent might be problematic. Click here for more details.

Files changed (51) hide show
  1. loom/__init__.py +1 -0
  2. loom/adapters/converters.py +77 -0
  3. loom/adapters/registry.py +43 -0
  4. loom/api/factory.py +77 -0
  5. loom/api/main.py +201 -0
  6. loom/builtin/__init__.py +3 -0
  7. loom/builtin/memory/__init__.py +3 -0
  8. loom/builtin/memory/metabolic.py +96 -0
  9. loom/builtin/memory/pso.py +41 -0
  10. loom/builtin/memory/sanitizers.py +39 -0
  11. loom/builtin/memory/validators.py +55 -0
  12. loom/config/tool.py +63 -0
  13. loom/infra/__init__.py +0 -0
  14. loom/infra/llm.py +43 -0
  15. loom/infra/logging.py +42 -0
  16. loom/infra/store.py +39 -0
  17. loom/infra/transport/memory.py +85 -0
  18. loom/infra/transport/nats.py +141 -0
  19. loom/infra/transport/redis.py +140 -0
  20. loom/interfaces/llm.py +44 -0
  21. loom/interfaces/memory.py +50 -0
  22. loom/interfaces/store.py +29 -0
  23. loom/interfaces/transport.py +35 -0
  24. loom/kernel/__init__.py +0 -0
  25. loom/kernel/base_interceptor.py +97 -0
  26. loom/kernel/bus.py +76 -0
  27. loom/kernel/dispatcher.py +58 -0
  28. loom/kernel/interceptors/__init__.py +14 -0
  29. loom/kernel/interceptors/budget.py +60 -0
  30. loom/kernel/interceptors/depth.py +45 -0
  31. loom/kernel/interceptors/hitl.py +51 -0
  32. loom/kernel/interceptors/studio.py +137 -0
  33. loom/kernel/interceptors/timeout.py +27 -0
  34. loom/kernel/state.py +71 -0
  35. loom/memory/hierarchical.py +94 -0
  36. loom/node/__init__.py +0 -0
  37. loom/node/agent.py +133 -0
  38. loom/node/base.py +121 -0
  39. loom/node/crew.py +103 -0
  40. loom/node/router.py +68 -0
  41. loom/node/tool.py +50 -0
  42. loom/protocol/__init__.py +0 -0
  43. loom/protocol/cloudevents.py +73 -0
  44. loom/protocol/interfaces.py +110 -0
  45. loom/protocol/mcp.py +97 -0
  46. loom/protocol/memory_operations.py +51 -0
  47. loom/protocol/patch.py +93 -0
  48. loom_agent-0.3.2.dist-info/LICENSE +204 -0
  49. loom_agent-0.3.2.dist-info/METADATA +139 -0
  50. loom_agent-0.3.2.dist-info/RECORD +51 -0
  51. loom_agent-0.3.2.dist-info/WHEEL +4 -0
loom/config/tool.py ADDED
@@ -0,0 +1,63 @@
1
+ """
2
+ Tool Configuration & Factory
3
+ """
4
+
5
+ import importlib
6
+ import os
7
+ from typing import Dict, Any, Optional, Callable
8
+ from pydantic import BaseModel, Field
9
+
10
+ from loom.node.tool import ToolNode
11
+ from loom.protocol.mcp import MCPToolDefinition
12
+ from loom.kernel.dispatcher import Dispatcher
13
+
14
+ class ToolConfig(BaseModel):
15
+ """
16
+ Configuration for a Tool.
17
+ """
18
+ name: str
19
+ description: str = ""
20
+ python_path: str = Field(..., description="Dot-path to the python function e.g. 'my_pkg.tools.search'")
21
+ parameters: Dict[str, Any] = Field(default_factory=dict, description="Input schema properties")
22
+ env_vars: Dict[str, str] = Field(default_factory=dict)
23
+
24
+ class ToolFactory:
25
+ """
26
+ Factory to load valid ToolConfigs into ToolNodes.
27
+ """
28
+
29
+ @staticmethod
30
+ def create_node(
31
+ config: ToolConfig,
32
+ node_id: str,
33
+ dispatcher: Dispatcher
34
+ ) -> ToolNode:
35
+ # 1. Load function
36
+ module_name, func_name = config.python_path.rsplit(".", 1)
37
+ try:
38
+ mod = importlib.import_module(module_name)
39
+ func = getattr(mod, func_name)
40
+ except (ImportError, AttributeError) as e:
41
+ raise ValueError(f"Could not load tool function {config.python_path}: {e}")
42
+
43
+ # 2. Apply Env Vars
44
+ for k, v in config.env_vars.items():
45
+ os.environ[k] = v
46
+
47
+ # 3. Create Definition
48
+ tool_def = MCPToolDefinition(
49
+ name=config.name,
50
+ description=config.description,
51
+ inputSchema={
52
+ "type": "object",
53
+ "properties": config.parameters
54
+ }
55
+ )
56
+
57
+ # 4. Create Node
58
+ return ToolNode(
59
+ node_id=node_id,
60
+ dispatcher=dispatcher,
61
+ tool_def=tool_def,
62
+ func=func
63
+ )
loom/infra/__init__.py ADDED
File without changes
loom/infra/llm.py ADDED
@@ -0,0 +1,43 @@
1
+ """
2
+ Mock LLM Provider for Testing
3
+ """
4
+
5
+ from typing import List, Dict, Any, AsyncIterator, Optional
6
+ from loom.interfaces.llm import LLMProvider, LLMResponse
7
+
8
+ class MockLLMProvider(LLMProvider):
9
+ """
10
+ A Mock Provider that returns canned responses.
11
+ Useful for unit testing and demos without API keys.
12
+ """
13
+
14
+ async def chat(
15
+ self,
16
+ messages: List[Dict[str, Any]],
17
+ tools: Optional[List[Dict[str, Any]]] = None
18
+ ) -> LLMResponse:
19
+ last_msg = messages[-1]["content"].lower()
20
+
21
+ # Simple keywords
22
+ if "search" in last_msg:
23
+ # Simulate Tool Call
24
+ query = last_msg.replace("search", "").strip() or "fractal"
25
+ return LLMResponse(
26
+ content="",
27
+ tool_calls=[{
28
+ "name": "search",
29
+ "arguments": {"query": query},
30
+ "id": "call_mock_123"
31
+ }]
32
+ )
33
+
34
+ return LLMResponse(content=f"Mock response to: {last_msg}")
35
+
36
+ async def stream_chat(
37
+ self,
38
+ messages: List[Dict[str, Any]],
39
+ tools: Optional[List[Dict[str, Any]]] = None
40
+ ) -> AsyncIterator[str]:
41
+ yield "Mock "
42
+ yield "stream "
43
+ yield "response."
loom/infra/logging.py ADDED
@@ -0,0 +1,42 @@
1
+ """
2
+ Structured Logging Configuration.
3
+ """
4
+
5
+ import logging
6
+ import sys
7
+ import structlog
8
+ from typing import Any
9
+
10
+ def configure_logging(log_level: str = "INFO", json_format: bool = False) -> None:
11
+ """
12
+ Configure standard logging and structlog.
13
+ """
14
+ level = getattr(logging, log_level.upper(), logging.INFO)
15
+
16
+ # Configure processors
17
+ processors = [
18
+ structlog.contextvars.merge_contextvars,
19
+ structlog.processors.add_log_level,
20
+ structlog.processors.StackInfoRenderer(),
21
+ structlog.dev.set_exc_info,
22
+ structlog.processors.TimeStamper(fmt="iso"),
23
+ ]
24
+
25
+ if json_format:
26
+ processors.append(structlog.processors.JSONRenderer())
27
+ else:
28
+ processors.append(structlog.dev.ConsoleRenderer(colors=True))
29
+
30
+ structlog.configure(
31
+ processors=processors,
32
+ wrapper_class=structlog.make_filtering_bound_logger(level),
33
+ context_class=dict,
34
+ logger_factory=structlog.PrintLoggerFactory(),
35
+ cache_logger_on_first_use=True
36
+ )
37
+
38
+ def get_logger(name: str) -> Any:
39
+ """
40
+ Get a structured logger.
41
+ """
42
+ return structlog.get_logger(name)
loom/infra/store.py ADDED
@@ -0,0 +1,39 @@
1
+ """
2
+ In-Memory Event Store Implementation
3
+ """
4
+
5
+ from typing import List, Any
6
+ from loom.interfaces.store import EventStore
7
+ from loom.protocol.cloudevents import CloudEvent
8
+
9
+ class InMemoryEventStore(EventStore):
10
+ """
11
+ Simple in-memory list storage for events.
12
+ Useful for testing and local demos.
13
+ """
14
+
15
+ def __init__(self):
16
+ self._storage: List[CloudEvent] = []
17
+
18
+ async def append(self, event: CloudEvent) -> None:
19
+ self._storage.append(event)
20
+
21
+ async def get_events(self, limit: int = 100, offset: int = 0, **filters) -> List[CloudEvent]:
22
+ """
23
+ Naive implementation of filtering.
24
+ """
25
+ filtered = self._storage
26
+
27
+ # Apply filters
28
+ # e.g. get_events(source="/agent/a")
29
+ if filters:
30
+ filtered = [
31
+ e for e in filtered
32
+ if all(getattr(e, k, None) == v for k, v in filters.items())
33
+ ]
34
+
35
+ # Apply pagination
36
+ return filtered[offset : offset + limit]
37
+
38
+ def clear(self):
39
+ self._storage.clear()
@@ -0,0 +1,85 @@
1
+
2
+ import asyncio
3
+ from typing import Dict, List, Set
4
+ from collections import defaultdict
5
+ import logging
6
+
7
+ from loom.interfaces.transport import Transport, EventHandler
8
+ from loom.protocol.cloudevents import CloudEvent
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ class InMemoryTransport(Transport):
13
+ """
14
+ In-memory transport implementation using asyncio.Queue/Event.
15
+ Default for local development.
16
+ """
17
+
18
+ def __init__(self):
19
+ self._connected = False
20
+ self._handlers: Dict[str, List[EventHandler]] = defaultdict(list)
21
+ # For wildcard support: "node.request/*" -> [handler1, handler2]
22
+ self._wildcard_handlers: Dict[str, List[EventHandler]] = defaultdict(list)
23
+
24
+ async def connect(self) -> None:
25
+ self._connected = True
26
+ logger.info("InMemoryTransport connected")
27
+
28
+ async def disconnect(self) -> None:
29
+ self._connected = False
30
+ self._handlers.clear()
31
+ self._wildcard_handlers.clear()
32
+ logger.info("InMemoryTransport disconnected")
33
+
34
+ async def publish(self, topic: str, event: CloudEvent) -> None:
35
+ if not self._connected:
36
+ logger.warning("InMemoryTransport not connected, dropping event")
37
+ return
38
+
39
+ # Direct dispatch to handlers
40
+ await self._dispatch(topic, event)
41
+
42
+ async def subscribe(self, topic: str, handler: EventHandler) -> None:
43
+ if "*" in topic:
44
+ self._wildcard_handlers[topic].append(handler)
45
+ else:
46
+ self._handlers[topic].append(handler)
47
+
48
+ async def _dispatch(self, topic: str, event: CloudEvent) -> None:
49
+ targets: Set[EventHandler] = set()
50
+
51
+ # 1. Exact match
52
+ if topic in self._handlers:
53
+ targets.update(self._handlers[topic])
54
+
55
+ # 2. Wildcard match (Simple prefix/suffix matching)
56
+ for pattern, handlers in self._wildcard_handlers.items():
57
+ if self._match(topic, pattern):
58
+ targets.update(handlers)
59
+
60
+ # 3. Execute handlers
61
+ for handler in targets:
62
+ try:
63
+ # Fire and forget / await
64
+ # Since Bus expects us to be async, we await.
65
+ # But handlers might be slow, so we should spawn tasks?
66
+ # For in-memory bus, typically we want some concurrency.
67
+ asyncio.create_task(self._safe_exec(handler, event))
68
+ except Exception as e:
69
+ logger.error(f"Error dispatching to handler: {e}")
70
+
71
+ async def _safe_exec(self, handler: EventHandler, event: CloudEvent):
72
+ try:
73
+ await handler(event)
74
+ except Exception as e:
75
+ logger.error(f"Handler failed: {e}")
76
+
77
+ def _match(self, topic: str, pattern: str) -> bool:
78
+ # Simple glob matching
79
+ if pattern == "*":
80
+ return True
81
+ if pattern.endswith("*"):
82
+ return topic.startswith(pattern[:-1])
83
+ if pattern.startswith("*"):
84
+ return topic.endswith(pattern[1:])
85
+ return topic == pattern
@@ -0,0 +1,141 @@
1
+
2
+ import asyncio
3
+ import logging
4
+ from typing import Dict, List, Optional
5
+ try:
6
+ import nats
7
+ from nats.aio.client import Client as NATSClient
8
+ from nats.js import JetStreamContext
9
+ except ImportError:
10
+ nats = None
11
+ NATSClient = None # type: ignore
12
+ JetStreamContext = None # type: ignore
13
+
14
+ from loom.interfaces.transport import Transport, EventHandler
15
+ from loom.protocol.cloudevents import CloudEvent
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+ class NATSTransport(Transport):
20
+ """
21
+ NATS Transport Implementation.
22
+ Supports Core NATS and JetStream (optional).
23
+ Requires 'nats-py' package.
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ servers: List[str] = ["nats://localhost:4222"],
29
+ use_jetstream: bool = False,
30
+ stream_name: str = "LOOM_EVENTS",
31
+ ):
32
+ if not nats:
33
+ raise ImportError("nats-py package is required for NATSTransport. Install with 'pip install nats-py'")
34
+
35
+ self.servers = servers
36
+ self.use_jetstream = use_jetstream
37
+ self.stream_name = stream_name
38
+
39
+ self.nc: Optional[NATSClient] = None
40
+ self.js: Optional[JetStreamContext] = None
41
+ self._handlers: Dict[str, List[EventHandler]] = {}
42
+ self._subscriptions: List = []
43
+ self._connected = False
44
+
45
+ async def connect(self) -> None:
46
+ try:
47
+ self.nc = await nats.connect(servers=self.servers)
48
+
49
+ if self.use_jetstream:
50
+ self.js = self.nc.jetstream()
51
+ # Create stream if not exists is best effort or explicit setup
52
+ # Here we assume stream might be managed externally or auto-created
53
+ try:
54
+ await self.js.add_stream(name=self.stream_name, subjects=["loom.>"])
55
+ except Exception as e:
56
+ logger.debug(f"Stream creation check: {e}")
57
+
58
+ self._connected = True
59
+ logger.info(f"NATSTransport connected to {self.servers}")
60
+ except Exception as e:
61
+ logger.error(f"NATS connection failed: {e}")
62
+ raise
63
+
64
+ async def disconnect(self) -> None:
65
+ self._connected = False
66
+
67
+ for sub in self._subscriptions:
68
+ try:
69
+ await sub.unsubscribe()
70
+ except Exception:
71
+ pass
72
+
73
+ if self.nc:
74
+ await self.nc.close()
75
+
76
+ logger.info("NATSTransport disconnected")
77
+
78
+ async def publish(self, topic: str, event: CloudEvent) -> None:
79
+ if not self._connected:
80
+ raise RuntimeError("NATSTransport not connected")
81
+
82
+ # NATS Subject: loom.{topic} (replace / with .)
83
+ subject = self._to_subject(topic)
84
+ payload = event.model_dump_json().encode()
85
+
86
+ if self.use_jetstream and self.js:
87
+ await self.js.publish(subject, payload)
88
+ else:
89
+ await self.nc.publish(subject, payload)
90
+
91
+ async def subscribe(self, topic: str, handler: EventHandler) -> None:
92
+ if not self._connected:
93
+ raise RuntimeError("NATSTransport not connected")
94
+
95
+ # Normalize subject for wildcard
96
+ subject = self._to_subject(topic)
97
+
98
+ # NATS wildcards: * (one token), > (tail)
99
+ # Loom wildcards: * (usually suffix)
100
+ # If topic ends with *, replace with >
101
+ if subject.endswith(".*"):
102
+ subject = subject[:-2] + ".>"
103
+
104
+ if topic not in self._handlers:
105
+ self._handlers[topic] = []
106
+
107
+ async def cb(msg):
108
+ try:
109
+ data = msg.data.decode()
110
+ event = CloudEvent.model_validate_json(data)
111
+ # How to map back to specific handlers?
112
+ # We invoke all handlers for this subscription
113
+ handlers = self._handlers.get(topic, [])
114
+ for h in handlers:
115
+ asyncio.create_task(self._safe_exec(h, event))
116
+ except Exception as e:
117
+ logger.error(f"Error handling NATS message: {e}")
118
+
119
+ if self.use_jetstream and self.js:
120
+ # Durable consumer? For now, ephemeral to match interface
121
+ sub = await self.js.subscribe(subject, cb=cb)
122
+ else:
123
+ sub = await self.nc.subscribe(subject, cb=cb)
124
+
125
+ self._subscriptions.append(sub)
126
+
127
+ self._handlers[topic].append(handler)
128
+ logger.debug(f"Subscribed to {subject}")
129
+
130
+ async def _safe_exec(self, handler: EventHandler, event: CloudEvent):
131
+ try:
132
+ await handler(event)
133
+ except Exception as e:
134
+ logger.error(f"Handler failed: {e}")
135
+
136
+ def _to_subject(self, topic: str) -> str:
137
+ # Replace / with .
138
+ # e.g. node.request/agent -> node.request.agent
139
+ # loom prefix
140
+ safe_topic = topic.replace("/", ".")
141
+ return f"loom.{safe_topic}"
@@ -0,0 +1,140 @@
1
+
2
+ import asyncio
3
+ import json
4
+ import logging
5
+ from typing import Dict, List, Optional
6
+ try:
7
+ import redis.asyncio as aioredis
8
+ except ImportError:
9
+ aioredis = None
10
+
11
+ from loom.interfaces.transport import Transport, EventHandler
12
+ from loom.protocol.cloudevents import CloudEvent
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ class RedisTransport(Transport):
17
+ """
18
+ Redis Pub/Sub Transport.
19
+ Requires 'redis' package.
20
+ """
21
+
22
+ def __init__(self, redis_url: str = "redis://localhost:6379"):
23
+ if not aioredis:
24
+ raise ImportError("redis package is required for RedisTransport. Install with 'pip install redis'")
25
+
26
+ self.redis_url = redis_url
27
+ self.redis: Optional[aioredis.Redis] = None
28
+ self.pubsub: Optional[aioredis.client.PubSub] = None
29
+ self._handlers: Dict[str, List[EventHandler]] = {}
30
+ self._connected = False
31
+ self._listen_task: Optional[asyncio.Task] = None
32
+
33
+ async def connect(self) -> None:
34
+ try:
35
+ self.redis = aioredis.from_url(self.redis_url, decode_responses=True)
36
+ await self.redis.ping()
37
+ self.pubsub = self.redis.pubsub()
38
+ self._connected = True
39
+
40
+ # Start listener loop
41
+ self._listen_task = asyncio.create_task(self._listener())
42
+ logger.info(f"RedisTransport connected to {self.redis_url}")
43
+ except Exception as e:
44
+ logger.error(f"Redis connection failed: {e}")
45
+ raise
46
+
47
+ async def disconnect(self) -> None:
48
+ self._connected = False
49
+ if self._listen_task:
50
+ self._listen_task.cancel()
51
+ try:
52
+ await self._listen_task
53
+ except asyncio.CancelledError:
54
+ pass
55
+
56
+ if self.pubsub:
57
+ await self.pubsub.close()
58
+
59
+ if self.redis:
60
+ await self.redis.close()
61
+
62
+ logger.info("RedisTransport disconnected")
63
+
64
+ async def publish(self, topic: str, event: CloudEvent) -> None:
65
+ if not self._connected:
66
+ raise RuntimeError("RedisTransport not connected")
67
+
68
+ # Redis channel convention: loom.{topic}
69
+ channel = self._to_channel(topic)
70
+ payload = event.model_dump_json()
71
+ await self.redis.publish(channel, payload)
72
+
73
+ async def subscribe(self, topic: str, handler: EventHandler) -> None:
74
+ if not self._connected:
75
+ raise RuntimeError("RedisTransport not connected")
76
+
77
+ # Map loom topic to redis channel pattern
78
+ channel = self._to_channel(topic)
79
+
80
+ if topic not in self._handlers:
81
+ self._handlers[topic] = []
82
+ # Subscribe in Redis
83
+ await self.pubsub.psubscribe(channel)
84
+
85
+ self._handlers[topic].append(handler)
86
+ logger.debug(f"Subscribed to {channel}")
87
+
88
+ async def _listener(self):
89
+ try:
90
+ async for message in self.pubsub.listen():
91
+ if message["type"] == "pmessage":
92
+ channel = message["channel"]
93
+ data = message["data"]
94
+ await self._handle_message(channel, data)
95
+ except asyncio.CancelledError:
96
+ pass
97
+ except Exception as e:
98
+ logger.error(f"Redis listener error: {e}")
99
+
100
+ async def _handle_message(self, channel: str, data: str):
101
+ # Convert redis channel back to topic?
102
+ # Since we use psubscribe, we matched.
103
+ # But we need to find which handlers to invoke.
104
+ # Actually pattern matching is done by Redis.
105
+ # But our internal registry matches by topic.
106
+
107
+ # Simplification: We iterate our topic patterns to find match?
108
+ # Or we assume channel == _to_channel(topic)
109
+ # But wildcard * in topic maps to * in redis.
110
+
111
+ try:
112
+ event = CloudEvent.model_validate_json(data)
113
+
114
+ # Dispatch to all matching local handlers
115
+ # This is slightly inefficient if we have many patterns, but robust.
116
+ for topic, handlers in self._handlers.items():
117
+ if self._match(channel, self._to_channel(topic)):
118
+ for handler in handlers:
119
+ asyncio.create_task(self._safe_exec(handler, event))
120
+ except Exception as e:
121
+ logger.error(f"Failed to handle Redis message: {e}")
122
+
123
+ async def _safe_exec(self, handler: EventHandler, event: CloudEvent):
124
+ try:
125
+ await handler(event)
126
+ except Exception as e:
127
+ logger.error(f"Handler failed: {e}")
128
+
129
+ def _to_channel(self, topic: str) -> str:
130
+ # loom.topic.sub
131
+ # If topic has /, replace with . ?
132
+ # Standard: topic is dot separated usually?
133
+ # Loom uses "node.request" etc.
134
+ # If topic has *, it is wildcard.
135
+ return f"loom.{topic}"
136
+
137
+ def _match(self, channel: str, pattern: str) -> bool:
138
+ # Redis-style glob matching implemented in Python for dispatch
139
+ import fnmatch
140
+ return fnmatch.fnmatch(channel, pattern)
loom/interfaces/llm.py ADDED
@@ -0,0 +1,44 @@
1
+ """
2
+ LLM Provider Interface
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import List, Dict, Any, AsyncIterator, Optional
7
+ from pydantic import BaseModel
8
+
9
+ class LLMResponse(BaseModel):
10
+ """
11
+ Standardized response from an LLM.
12
+ """
13
+ content: str
14
+ tool_calls: List[Dict[str, Any]] = []
15
+ token_usage: Optional[Dict[str, int]] = None
16
+
17
+ from loom.protocol.interfaces import LLMProviderProtocol
18
+
19
+ class LLMProvider(LLMProviderProtocol, ABC):
20
+ """
21
+ Abstract Interface for LLM Backends (OpenAI, Anthropic, Local).
22
+ """
23
+
24
+ @abstractmethod
25
+ async def chat(
26
+ self,
27
+ messages: List[Dict[str, Any]],
28
+ tools: Optional[List[Dict[str, Any]]] = None
29
+ ) -> LLMResponse:
30
+ """
31
+ Generate a response for a given chat history.
32
+ """
33
+ pass
34
+
35
+ @abstractmethod
36
+ async def stream_chat(
37
+ self,
38
+ messages: List[Dict[str, Any]],
39
+ tools: Optional[List[Dict[str, Any]]] = None
40
+ ) -> AsyncIterator[str]:
41
+ """
42
+ Stream the response content.
43
+ """
44
+ pass
@@ -0,0 +1,50 @@
1
+ """
2
+ Memory Interface
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import Any, Dict, List, Optional
7
+ from pydantic import BaseModel, Field
8
+
9
+ class MemoryEntry(BaseModel):
10
+ """
11
+ A single unit of memory.
12
+ """
13
+ role: str
14
+ content: str
15
+ timestamp: float = Field(default_factory=lambda: __import__("time").time())
16
+ metadata: Dict[str, Any] = Field(default_factory=dict)
17
+ tier: str = "session" # ephemeral, working, session, longterm
18
+
19
+ from loom.protocol.interfaces import MemoryStrategy
20
+
21
+ class MemoryInterface(MemoryStrategy, ABC):
22
+ """
23
+ Abstract Base Class for Agent Memory.
24
+ """
25
+
26
+ @abstractmethod
27
+ async def add(self, role: str, content: str, metadata: Optional[Dict[str, Any]] = None) -> None:
28
+ """Add a memory entry."""
29
+ pass
30
+
31
+ @abstractmethod
32
+ async def get_context(self, task: str = "") -> str:
33
+ """
34
+ Get full context formatted for the LLM.
35
+ May involve retrieval relevant to the 'task'.
36
+ """
37
+ pass
38
+
39
+ @abstractmethod
40
+ async def get_recent(self, limit: int = 10) -> List[Dict[str, Any]]:
41
+ """
42
+ Get recent memory entries as a list of dicts (role/content).
43
+ Useful for Chat History.
44
+ """
45
+ pass
46
+
47
+ @abstractmethod
48
+ async def clear(self) -> None:
49
+ """Clear short-term memory."""
50
+ pass
@@ -0,0 +1,29 @@
1
+ """
2
+ Event Store Interface
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import List, Optional, Dict, Any
7
+
8
+ from loom.protocol.cloudevents import CloudEvent
9
+
10
+ class EventStore(ABC):
11
+ """
12
+ Abstract Interface for Event Persistence.
13
+ Decouples the Event Bus from the storage mechanism (Memory, Redis, SQL).
14
+ """
15
+
16
+ @abstractmethod
17
+ async def append(self, event: CloudEvent) -> None:
18
+ """
19
+ Persist a single event.
20
+ """
21
+ pass
22
+
23
+ @abstractmethod
24
+ async def get_events(self, limit: int = 100, offset: int = 0, **filters) -> List[CloudEvent]:
25
+ """
26
+ Retrieve events with optional filtering.
27
+ Filters can match on standard CloudEvent attributes (source, type, etc.)
28
+ """
29
+ pass