loom-agent 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of loom-agent might be problematic. Click here for more details.

Files changed (51) hide show
  1. loom/__init__.py +1 -0
  2. loom/adapters/converters.py +77 -0
  3. loom/adapters/registry.py +43 -0
  4. loom/api/factory.py +77 -0
  5. loom/api/main.py +201 -0
  6. loom/builtin/__init__.py +3 -0
  7. loom/builtin/memory/__init__.py +3 -0
  8. loom/builtin/memory/metabolic.py +96 -0
  9. loom/builtin/memory/pso.py +41 -0
  10. loom/builtin/memory/sanitizers.py +39 -0
  11. loom/builtin/memory/validators.py +55 -0
  12. loom/config/tool.py +63 -0
  13. loom/infra/__init__.py +0 -0
  14. loom/infra/llm.py +43 -0
  15. loom/infra/logging.py +42 -0
  16. loom/infra/store.py +39 -0
  17. loom/infra/transport/memory.py +85 -0
  18. loom/infra/transport/nats.py +141 -0
  19. loom/infra/transport/redis.py +140 -0
  20. loom/interfaces/llm.py +44 -0
  21. loom/interfaces/memory.py +50 -0
  22. loom/interfaces/store.py +29 -0
  23. loom/interfaces/transport.py +35 -0
  24. loom/kernel/__init__.py +0 -0
  25. loom/kernel/base_interceptor.py +97 -0
  26. loom/kernel/bus.py +76 -0
  27. loom/kernel/dispatcher.py +58 -0
  28. loom/kernel/interceptors/__init__.py +14 -0
  29. loom/kernel/interceptors/budget.py +60 -0
  30. loom/kernel/interceptors/depth.py +45 -0
  31. loom/kernel/interceptors/hitl.py +51 -0
  32. loom/kernel/interceptors/studio.py +137 -0
  33. loom/kernel/interceptors/timeout.py +27 -0
  34. loom/kernel/state.py +71 -0
  35. loom/memory/hierarchical.py +94 -0
  36. loom/node/__init__.py +0 -0
  37. loom/node/agent.py +133 -0
  38. loom/node/base.py +121 -0
  39. loom/node/crew.py +103 -0
  40. loom/node/router.py +68 -0
  41. loom/node/tool.py +50 -0
  42. loom/protocol/__init__.py +0 -0
  43. loom/protocol/cloudevents.py +73 -0
  44. loom/protocol/interfaces.py +110 -0
  45. loom/protocol/mcp.py +97 -0
  46. loom/protocol/memory_operations.py +51 -0
  47. loom/protocol/patch.py +93 -0
  48. loom_agent-0.3.2.dist-info/LICENSE +204 -0
  49. loom_agent-0.3.2.dist-info/METADATA +139 -0
  50. loom_agent-0.3.2.dist-info/RECORD +51 -0
  51. loom_agent-0.3.2.dist-info/WHEEL +4 -0
@@ -0,0 +1,35 @@
1
+ """
2
+ Transport Interface (Connectivity Layer)
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import Callable, Awaitable
7
+ from loom.protocol.cloudevents import CloudEvent
8
+
9
+ EventHandler = Callable[[CloudEvent], Awaitable[None]]
10
+
11
+ class Transport(ABC):
12
+ """
13
+ Abstract Base Class for Event Transport.
14
+ Responsible for delivering events between components (local or remote).
15
+ """
16
+
17
+ @abstractmethod
18
+ async def connect(self) -> None:
19
+ """Establish connection to the transport layer."""
20
+ pass
21
+
22
+ @abstractmethod
23
+ async def disconnect(self) -> None:
24
+ """Close connection."""
25
+ pass
26
+
27
+ @abstractmethod
28
+ async def publish(self, topic: str, event: CloudEvent) -> None:
29
+ """Publish an event to a specific topic."""
30
+ pass
31
+
32
+ @abstractmethod
33
+ async def subscribe(self, topic: str, handler: EventHandler) -> None:
34
+ """Subscribe to a topic."""
35
+ pass
File without changes
@@ -0,0 +1,97 @@
1
+ """
2
+ Middleware Interceptors (Kernel)
3
+ """
4
+
5
+ from abc import ABC, abstractmethod
6
+ from typing import Optional, Set
7
+ import uuid
8
+
9
+ from loom.protocol.cloudevents import CloudEvent
10
+
11
+ class Interceptor(ABC):
12
+ """
13
+ Abstract Base Class for Interceptors.
14
+ Allows AOP-style cross-cutting concerns (Auth, Logging, Budget).
15
+ """
16
+
17
+ @abstractmethod
18
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
19
+ """
20
+ Called before the event is dispatched to a handler.
21
+ Return the event (modified or not) to proceed.
22
+ Return None to halt execution (block/filter).
23
+ """
24
+ pass
25
+
26
+ @abstractmethod
27
+ async def post_invoke(self, event: CloudEvent) -> None:
28
+ """
29
+ Called after the event has been processed.
30
+ """
31
+ pass
32
+
33
+ class BudgetInterceptor(Interceptor):
34
+ """
35
+ Simulated Token Budget Interceptor.
36
+ """
37
+ def __init__(self, max_tokens: int = 100000):
38
+ self.max_tokens = max_tokens
39
+ self.used_tokens = 0
40
+
41
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
42
+ # Check if event carries token cost estimation
43
+ # This is a simplification.
44
+ if "token_usage" in event.data:
45
+ cost = event.data["token_usage"].get("estimated", 0)
46
+ if self.used_tokens + cost > self.max_tokens:
47
+ print(f"⚠️ Budget exceeded: {self.used_tokens}/{self.max_tokens}")
48
+ return None
49
+ return event
50
+
51
+ async def post_invoke(self, event: CloudEvent) -> None:
52
+ if "token_usage" in event.data:
53
+ actual = event.data["token_usage"].get("actual", 0)
54
+ self.used_tokens += actual
55
+
56
+ class TracingInterceptor(Interceptor):
57
+ """
58
+ Injects Distributed Tracing Context (W3C Trace Parent).
59
+ """
60
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
61
+ if not event.traceparent:
62
+ # Generate new trace
63
+ trace_id = uuid.uuid4().hex
64
+ span_id = uuid.uuid4().hex[:16]
65
+ event.traceparent = f"00-{trace_id}-{span_id}-01"
66
+ return event
67
+
68
+ async def post_invoke(self, event: CloudEvent) -> None:
69
+ pass
70
+
71
+ class AuthInterceptor(Interceptor):
72
+ """
73
+ Basic Source Verification.
74
+ """
75
+ def __init__(self, allowed_prefixes: Set[str]):
76
+ self.allowed_prefixes = allowed_prefixes
77
+
78
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
79
+ if not event.source:
80
+ return None
81
+
82
+ # Check simplified prefix
83
+ # e.g. source="/agent/foo", prefix="agent"
84
+ # source="agent", prefix="agent"
85
+ parts = event.source.strip("/").split("/")
86
+ if not parts:
87
+ return None
88
+
89
+ prefix = parts[0]
90
+ if prefix not in self.allowed_prefixes:
91
+ print(f"🚫 Unauthorized source: {event.source}")
92
+ return None
93
+
94
+ return event
95
+
96
+ async def post_invoke(self, event: CloudEvent) -> None:
97
+ pass
loom/kernel/bus.py ADDED
@@ -0,0 +1,76 @@
1
+ """
2
+ Universal Event Bus (Kernel)
3
+ """
4
+
5
+ import asyncio
6
+ from typing import Dict, List, Callable, Awaitable, Optional, Any
7
+
8
+ from loom.protocol.cloudevents import CloudEvent
9
+ from loom.interfaces.store import EventStore
10
+ from loom.infra.store import InMemoryEventStore
11
+ from loom.interfaces.transport import Transport, EventHandler
12
+ from loom.infra.transport.memory import InMemoryTransport
13
+
14
+ class UniversalEventBus:
15
+ """
16
+ Universal Event Bus based on Event Sourcing.
17
+ Delegates routing to a Transport layer.
18
+ """
19
+
20
+ def __init__(self, store: EventStore = None, transport: Transport = None):
21
+ self.store = store or InMemoryEventStore()
22
+ self.transport = transport or InMemoryTransport()
23
+
24
+ async def connect(self):
25
+ """Connect the underlying transport."""
26
+ await self.transport.connect()
27
+
28
+ async def disconnect(self):
29
+ """Disconnect the underlying transport."""
30
+ await self.transport.disconnect()
31
+
32
+ async def publish(self, event: CloudEvent) -> None:
33
+ """
34
+ Publish an event to the bus.
35
+ 1. Persist to store.
36
+ 2. Route to subscribers via Transport.
37
+ """
38
+ # 1. Persist
39
+ await self.store.append(event)
40
+
41
+ # 2. Route via Transport
42
+ topic = self._get_topic(event)
43
+
44
+ # Ensure connected
45
+ # (Optimistically connect. In prod, connect() called at startup app.start())
46
+ await self.transport.connect()
47
+
48
+ await self.transport.publish(topic, event)
49
+
50
+ async def subscribe(self, topic: str, handler: Callable[[CloudEvent], Awaitable[None]]):
51
+ """Register a handler for a topic."""
52
+ # optimistic connect
53
+ await self.transport.connect()
54
+ await self.transport.subscribe(topic, handler)
55
+
56
+ def _get_topic(self, event: CloudEvent) -> str:
57
+ """Construct topic string from event."""
58
+ # Special routing for requests: use subject (target) if present
59
+ if event.subject and (event.type == "node.request" or event.type == "node.call"):
60
+ safe_subject = event.subject.strip("/")
61
+ return f"{event.type}/{safe_subject}"
62
+
63
+ # Default: route by source (Origin)
64
+ safe_source = event.source.strip("/")
65
+ return f"{event.type}/{safe_source}"
66
+
67
+ async def get_events(self) -> List[CloudEvent]:
68
+ """Return all events in the store."""
69
+ return await self.store.get_events(limit=1000)
70
+
71
+ async def clear(self):
72
+ """Clear state (for testing)."""
73
+ if hasattr(self.store, "clear"):
74
+ self.store.clear()
75
+
76
+ await self.transport.disconnect()
@@ -0,0 +1,58 @@
1
+ """
2
+ Event Dispatcher (Kernel)
3
+ """
4
+
5
+ from typing import List, Any
6
+
7
+ from loom.protocol.cloudevents import CloudEvent
8
+ from loom.kernel.bus import UniversalEventBus
9
+ from loom.kernel.base_interceptor import Interceptor
10
+
11
+ class Dispatcher:
12
+ """
13
+ Central dispatch mechanism.
14
+ 1. Runs Interceptor Chain (Pre-invoke).
15
+ 2. Publishes to Bus.
16
+ 3. Runs Interceptor Chain (Post-invoke).
17
+ """
18
+
19
+ def __init__(self, bus: UniversalEventBus):
20
+ self.bus = bus
21
+ self.interceptors: List[Interceptor] = []
22
+
23
+ def add_interceptor(self, interceptor: Interceptor) -> None:
24
+ """Add an interceptor to the chain."""
25
+ self.interceptors.append(interceptor)
26
+
27
+ async def dispatch(self, event: CloudEvent) -> None:
28
+ """
29
+ Dispatch an event through the system.
30
+ """
31
+ # 1. Pre-invoke Interceptors
32
+ current_event = event
33
+ for interceptor in self.interceptors:
34
+ current_event = await interceptor.pre_invoke(current_event)
35
+ if current_event is None:
36
+ # Blocked by interceptor
37
+ return
38
+
39
+ # 2. Publish to Bus (Routing & Persistence)
40
+ import asyncio
41
+ timeout = 30.0 # Default fallback
42
+ if current_event.extensions and "timeout" in current_event.extensions:
43
+ try:
44
+ timeout = float(current_event.extensions["timeout"])
45
+ except:
46
+ pass
47
+
48
+ try:
49
+ await asyncio.wait_for(self.bus.publish(current_event), timeout=timeout)
50
+ except asyncio.TimeoutError:
51
+ print(f"timeout dispatching event {current_event.id}")
52
+ # We might want to raise or handle graceful failure
53
+ # Raising allows the caller (e.g. app.run) to catch it
54
+ raise
55
+
56
+ # 3. Post-invoke Interceptors (in reverse order)
57
+ for interceptor in reversed(self.interceptors):
58
+ await interceptor.post_invoke(current_event)
@@ -0,0 +1,14 @@
1
+ from .timeout import TimeoutInterceptor
2
+ from .budget import BudgetInterceptor
3
+ from .depth import DepthInterceptor
4
+ from .hitl import HITLInterceptor
5
+ from loom.kernel.base_interceptor import TracingInterceptor
6
+
7
+ __all__ = [
8
+ "TimeoutInterceptor",
9
+ "BudgetInterceptor",
10
+ "DepthInterceptor",
11
+ "HITLInterceptor",
12
+ "TracingInterceptor"
13
+ ]
14
+
@@ -0,0 +1,60 @@
1
+ """
2
+ Budget Interceptor
3
+ """
4
+
5
+ from typing import Dict, Any, Optional
6
+ from loom.kernel.base_interceptor import Interceptor
7
+ from loom.protocol.cloudevents import CloudEvent
8
+
9
+ class BudgetExceededError(Exception):
10
+ pass
11
+
12
+ class BudgetInterceptor(Interceptor):
13
+ """
14
+ Controls resource usage (tokens/cost) per agent/node.
15
+ """
16
+
17
+ def __init__(self, max_tokens: int = 100000):
18
+ self.max_tokens = max_tokens
19
+ # Usage tracking: {node_id: tokens_used}
20
+ self._usage: Dict[str, int] = {}
21
+
22
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
23
+ # Check if this is a request that consumes budget?
24
+ # Typically we check the SOURCE (Who is asking).
25
+ # If Agent A asks Tool B, Agent A is spending budget?
26
+ # Or if Agent A sends "node.response", it used tokens to generate it.
27
+
28
+ # Policy: Check usage of the SOURCE node.
29
+ # If usage > max, block.
30
+
31
+ node_id = event.source.split("/")[-1]
32
+ current_usage = self._usage.get(node_id, 0)
33
+
34
+ if current_usage >= self.max_tokens:
35
+ raise BudgetExceededError(
36
+ f"Node {node_id} exceeded token budget: {current_usage}/{self.max_tokens}"
37
+ )
38
+
39
+ return event
40
+
41
+ async def post_invoke(self, event: CloudEvent) -> None:
42
+ # Update usage based on event type or result
43
+ # For LLM-based agents, we usually get usage in the "agent.thought" or "node.response".
44
+ # In this demo system, we don't have real token counts from MockLLM.
45
+ # We'll heuristic: 1 char = 1 token for demo.
46
+
47
+ node_id = event.source.split("/")[-1]
48
+
49
+ tokens = 0
50
+ if event.data and isinstance(event.data, dict):
51
+ # If explicit usage field exists
52
+ if "usage" in event.data:
53
+ tokens = event.data["usage"].get("total_tokens", 0)
54
+ else:
55
+ # Heuristic
56
+ content = str(event.data.get("thought", "") or event.data.get("result", "") or "")
57
+ tokens = len(content) // 4 # Approx
58
+
59
+ if tokens > 0:
60
+ self._usage[node_id] = self._usage.get(node_id, 0) + tokens
@@ -0,0 +1,45 @@
1
+ """
2
+ Depth Interceptor
3
+ """
4
+
5
+ from typing import Optional, Any
6
+ from loom.kernel.base_interceptor import Interceptor
7
+ from loom.protocol.cloudevents import CloudEvent
8
+
9
+ class RecursionLimitExceededError(Exception):
10
+ pass
11
+
12
+ class DepthInterceptor(Interceptor):
13
+ """
14
+ Prevents infinite fractal recursion.
15
+ """
16
+
17
+ def __init__(self, max_depth: int = 5):
18
+ self.max_depth = max_depth
19
+
20
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
21
+ # We need to know the current depth.
22
+ # CloudEvents 1.0 doesn't have a standard depth field.
23
+ # We assume it is propagated in extension attribute `depth` or inside `tracestate`.
24
+
25
+ # If it's a new request from User, depth is 0.
26
+ # If it's a sub-request, parent should have incremented it.
27
+
28
+ # But Interceptor is on the Sender side (Dispatcher)?
29
+ # Yes, Dispatcher is shared or per-node.
30
+ # If Dispatcher is centralized (LoomApp), it intercepts ALL events.
31
+
32
+ # We check `event.extensions`.
33
+ current_depth = int(getattr(event, "depth", 0) or 0)
34
+
35
+ if current_depth > self.max_depth:
36
+ raise RecursionLimitExceededError(f"Max recursion depth {self.max_depth} exceeded.")
37
+
38
+ # When an Agent receives an event and sends a NEW event (Tool Call),
39
+ # the Agent is responsible for correct propagation (depth+1).
40
+ # This interceptor essentially Gates it.
41
+
42
+ return event
43
+
44
+ async def post_invoke(self, event: CloudEvent) -> None:
45
+ pass
@@ -0,0 +1,51 @@
1
+ """
2
+ Human-in-the-Loop Interceptor
3
+ """
4
+
5
+ import asyncio
6
+ from typing import List, Optional, Any
7
+ from loom.kernel.base_interceptor import Interceptor
8
+ from loom.protocol.cloudevents import CloudEvent
9
+
10
+ class HITLInterceptor(Interceptor):
11
+ """
12
+ Pauses execution for Human Approval on sensitive events.
13
+ """
14
+
15
+ def __init__(self, patterns: List[str]):
16
+ """
17
+ Args:
18
+ patterns: List of substring matches for Event Type or Subject.
19
+ e.g. ["tool.execute/delete_file", "payment"]
20
+ """
21
+ self.patterns = patterns
22
+
23
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
24
+ # Check simple pattern match
25
+ identifier = f"{event.type}/{event.subject or ''}"
26
+
27
+ should_pause = any(p in identifier for p in self.patterns)
28
+
29
+ if should_pause:
30
+ print(f"\n[HITL] ⚠️ STOP! Event requires approval: {identifier}")
31
+ print(f" Data: {str(event.data)[:200]}")
32
+
33
+ # This blocks the Dispatcher!
34
+ # In a real async web app, this would suspend and wait for an API call (Webhook/Signal).
35
+ # For this CLI SDK, we use blocking input (in a separate thread if needed, or just sync).
36
+ # Since standard input() is blocking, it pauses the loop.
37
+ # In purely async heavily concurrent apps, use a non-blocking wrapper.
38
+ # Here: simplistic CLI approach.
39
+
40
+ approval = await asyncio.to_thread(input, " Approve? (y/N): ")
41
+
42
+ if approval.lower().strip() != "y":
43
+ print(" ❌ Denied.")
44
+ return None # Drop event
45
+
46
+ print(" ✅ Approved.")
47
+
48
+ return event
49
+
50
+ async def post_invoke(self, event: CloudEvent) -> None:
51
+ pass
@@ -0,0 +1,137 @@
1
+
2
+ import asyncio
3
+ import json
4
+ import os
5
+ import time
6
+ from typing import Optional, List, Dict, Any
7
+ import logging
8
+
9
+ try:
10
+ import websockets
11
+ from websockets.client import WebSocketClientProtocol
12
+ except ImportError:
13
+ websockets = None
14
+ WebSocketClientProtocol = Any
15
+
16
+ from loom.kernel.base_interceptor import Interceptor
17
+ from loom.protocol.cloudevents import CloudEvent
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ class StudioInterceptor(Interceptor):
22
+ """
23
+ Studio Interceptor: Captures all events and forwards them to Studio Server.
24
+
25
+ Features:
26
+ - Async non-blocking: Uses asyncio.create_task to avoid blocking main flow.
27
+ - Optional: Controlled by LOOM_STUDIO_ENABLED environment variable.
28
+ - Batching: Buffers events and sends in batches to reduce network overhead.
29
+ """
30
+
31
+ def __init__(self, studio_url: str = "ws://localhost:8765", enabled: bool = False):
32
+ self.studio_url = studio_url
33
+ self.ws: Optional[WebSocketClientProtocol] = None
34
+ self.event_buffer: List[CloudEvent] = []
35
+ self.buffer_size = 10
36
+ # Priority: Argument > Env Var
37
+ self.enabled = enabled or os.getenv("LOOM_STUDIO_ENABLED", "false").lower() == "true"
38
+ self._loop = None
39
+
40
+ if self.enabled and not websockets:
41
+ logger.warning("LOOM_STUDIO_ENABLED is true but websockets is not installed. Disabling Studio.")
42
+ self.enabled = False
43
+
44
+ if self.enabled:
45
+ asyncio.create_task(self._ensure_connection())
46
+
47
+ async def _ensure_connection(self):
48
+ """Ensure WebSocket connection is established"""
49
+ if not self.enabled:
50
+ return
51
+
52
+ if self.ws:
53
+ try:
54
+ # Basic check if open
55
+ if self.ws.state == 1: # Open
56
+ return
57
+ except Exception:
58
+ pass
59
+ self.ws = None
60
+
61
+ try:
62
+ # Append /ws/ingest to the base URL if not present
63
+ url = self.studio_url
64
+ if not url.endswith("/ws/ingest"):
65
+ url = f"{url.rstrip('/')}/ws/ingest"
66
+
67
+ # Simple debounce/lock could go here but for now just log
68
+ print(f"DEBUG: Connecting to {url}")
69
+ self.ws = await websockets.connect(url)
70
+ logger.info(f"Connected to Loom Studio at {url}")
71
+ print(f"DEBUG: Connected successfully")
72
+ except Exception as e:
73
+ # Silent fail to not disrupt agent operation, but log it
74
+ logger.debug(f"Failed to connect to Studio: {e}")
75
+ print(f"DEBUG: Failed to connect: {e}")
76
+ self.ws = None
77
+
78
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
79
+ """Capture event (pre-phase)"""
80
+ if self.enabled:
81
+ enriched_event_data = event.model_dump(mode='json')
82
+ if "extensions" not in enriched_event_data:
83
+ enriched_event_data["extensions"] = {}
84
+
85
+ enriched_event_data["extensions"]["studio_phase"] = "pre"
86
+ enriched_event_data["extensions"]["studio_timestamp"] = time.time()
87
+
88
+ asyncio.create_task(self._send_event_data(enriched_event_data))
89
+
90
+ return event
91
+
92
+ async def post_invoke(self, event: CloudEvent) -> None:
93
+ """Capture event (post-phase)"""
94
+ if self.enabled:
95
+ enriched_event_data = event.model_dump(mode='json')
96
+ if "extensions" not in enriched_event_data:
97
+ enriched_event_data["extensions"] = {}
98
+
99
+ enriched_event_data["extensions"]["studio_phase"] = "post"
100
+ enriched_event_data["extensions"]["studio_timestamp"] = time.time()
101
+
102
+ asyncio.create_task(self._send_event_data(enriched_event_data))
103
+
104
+ async def _send_event_data(self, event_data: Dict[str, Any]):
105
+ """Buffer and send event data"""
106
+ try:
107
+ self.event_buffer.append(event_data)
108
+
109
+ if len(self.event_buffer) >= self.buffer_size:
110
+ await self._flush_buffer()
111
+ except Exception as e:
112
+ logger.error(f"Error in StudioInterceptor: {e}")
113
+
114
+ async def _flush_buffer(self):
115
+ """Flush buffered events to server"""
116
+ if not self.event_buffer:
117
+ return
118
+
119
+ # Snapshot and clear immediately to avoid duplicates/race
120
+ current_batch = list(self.event_buffer)
121
+ self.event_buffer = []
122
+
123
+ await self._ensure_connection()
124
+
125
+ if self.ws:
126
+ try:
127
+ batch = {
128
+ "type": "event_batch",
129
+ "events": current_batch
130
+ }
131
+ await self.ws.send(json.dumps(batch))
132
+ except Exception as e:
133
+ logger.debug(f"Failed to send batch to Studio: {e}")
134
+ print(f"DEBUG: Failed to send batch: {e}")
135
+ # Re-queue? For now drop to avoid complexity
136
+ else:
137
+ print(f"DEBUG: No connection, dropping batch of {len(current_batch)}")
@@ -0,0 +1,27 @@
1
+ """
2
+ Standard Kernel Interceptors.
3
+ """
4
+
5
+ from typing import Optional
6
+ from loom.protocol.cloudevents import CloudEvent
7
+ from loom.kernel.base_interceptor import Interceptor
8
+
9
+ class TimeoutInterceptor(Interceptor):
10
+ """
11
+ Enforces a timeout on event processing by injecting a deadline constraint.
12
+ The Dispatcher or Transport is responsible for respecting this constraint.
13
+ """
14
+ def __init__(self, default_timeout_sec: float = 30.0):
15
+ self.default_timeout_sec = default_timeout_sec
16
+
17
+ async def pre_invoke(self, event: CloudEvent) -> Optional[CloudEvent]:
18
+ # If timeout not already set in extensions, inject it
19
+ extensions = event.extensions or {}
20
+ if "timeout" not in extensions:
21
+ extensions["timeout"] = self.default_timeout_sec
22
+ event.extensions = extensions
23
+
24
+ return event
25
+
26
+ async def post_invoke(self, event: CloudEvent) -> None:
27
+ pass
loom/kernel/state.py ADDED
@@ -0,0 +1,71 @@
1
+ """
2
+ State Management (Kernel)
3
+ """
4
+
5
+ from typing import Any, Dict, List, Optional
6
+ import copy
7
+
8
+ from loom.protocol.cloudevents import CloudEvent
9
+ from loom.protocol.patch import StatePatch, apply_patch as apply_dict_patch
10
+
11
+ class StateStore:
12
+ """
13
+ Manages the application state tree.
14
+ Updates state by applying 'state.patch' events.
15
+ """
16
+
17
+ def __init__(self):
18
+ self._root: Dict[str, Any] = {}
19
+
20
+ def apply_event(self, event: CloudEvent) -> None:
21
+ """
22
+ Update state if event contains patches.
23
+ Expected event.type = "state.patch"
24
+ Expected event.data = {"patches": [...]}
25
+ """
26
+ if event.type != "state.patch":
27
+ return
28
+
29
+ patches_data = event.data.get("patches", [])
30
+ if not patches_data:
31
+ return
32
+
33
+ for p_data in patches_data:
34
+ try:
35
+ patch = StatePatch(**p_data)
36
+ # Apply strictly to root
37
+ apply_dict_patch(self._root, patch)
38
+ except Exception as e:
39
+ # In a real system, we might want to dead-letter queue this
40
+ print(f"Failed to apply patch: {e}")
41
+
42
+ def get_snapshot(self, path: str = "/") -> Any:
43
+ """
44
+ Get a deep copy of the state at a specific path.
45
+ """
46
+ if path == "/":
47
+ return copy.deepcopy(self._root)
48
+
49
+ tokens = [t for t in path.split('/') if t]
50
+ current = self._root
51
+
52
+ for token in tokens:
53
+ if isinstance(current, dict):
54
+ current = current.get(token)
55
+ elif isinstance(current, list):
56
+ try:
57
+ idx = int(token)
58
+ if 0 <= idx < len(current):
59
+ current = current[idx]
60
+ else:
61
+ current = None
62
+ except ValueError:
63
+ current = None
64
+
65
+ if current is None:
66
+ return None
67
+
68
+ return copy.deepcopy(current)
69
+
70
+ def clear(self):
71
+ self._root = {}