soorma-core 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- soorma/__init__.py +138 -0
- soorma/agents/__init__.py +17 -0
- soorma/agents/base.py +523 -0
- soorma/agents/planner.py +391 -0
- soorma/agents/tool.py +373 -0
- soorma/agents/worker.py +385 -0
- soorma/ai/event_toolkit.py +281 -0
- soorma/ai/tools.py +280 -0
- soorma/cli/__init__.py +7 -0
- soorma/cli/commands/__init__.py +3 -0
- soorma/cli/commands/dev.py +780 -0
- soorma/cli/commands/init.py +717 -0
- soorma/cli/main.py +52 -0
- soorma/context.py +832 -0
- soorma/events.py +496 -0
- soorma/models.py +24 -0
- soorma/registry/client.py +186 -0
- soorma/utils/schema_utils.py +209 -0
- soorma_core-0.3.0.dist-info/METADATA +454 -0
- soorma_core-0.3.0.dist-info/RECORD +23 -0
- soorma_core-0.3.0.dist-info/WHEEL +4 -0
- soorma_core-0.3.0.dist-info/entry_points.txt +3 -0
- soorma_core-0.3.0.dist-info/licenses/LICENSE.txt +21 -0
soorma/context.py
ADDED
|
@@ -0,0 +1,832 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Platform Context - Infrastructure services for Soorma agents.
|
|
3
|
+
|
|
4
|
+
The PlatformContext provides access to all platform services:
|
|
5
|
+
- registry: Service discovery and capability registration
|
|
6
|
+
- memory: Distributed state management (procedural, semantic, episodic)
|
|
7
|
+
- bus: Event choreography (publish/subscribe)
|
|
8
|
+
- tracker: Observability and state machine tracking
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
@worker.on_task("schedule_technician")
|
|
12
|
+
async def schedule_service(task, context: PlatformContext):
|
|
13
|
+
# Service Discovery
|
|
14
|
+
calendar_tool = await context.registry.find("calendar_api_tool")
|
|
15
|
+
|
|
16
|
+
# Shared Memory
|
|
17
|
+
vehicle = await context.memory.retrieve(f"vehicle:{task.data['vehicle_id']}")
|
|
18
|
+
|
|
19
|
+
# Event Publishing (automatic state tracking by platform)
|
|
20
|
+
await context.bus.publish("technician_scheduled", result)
|
|
21
|
+
"""
|
|
22
|
+
from dataclasses import dataclass, field
|
|
23
|
+
from typing import Any, Dict, List, Optional
|
|
24
|
+
import httpx
|
|
25
|
+
import logging
|
|
26
|
+
import os
|
|
27
|
+
|
|
28
|
+
from .events import EventClient
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class RegistryClient:
|
|
35
|
+
"""
|
|
36
|
+
Service Discovery & Capabilities client.
|
|
37
|
+
|
|
38
|
+
Powered by: PostgreSQL + gRPC (in production)
|
|
39
|
+
|
|
40
|
+
Methods:
|
|
41
|
+
find(): Locate agents by capability
|
|
42
|
+
register(): Announce your services
|
|
43
|
+
query_schemas(): Get event DTOs
|
|
44
|
+
"""
|
|
45
|
+
base_url: str = field(default_factory=lambda: os.getenv("SOORMA_REGISTRY_URL", "http://localhost:8081"))
|
|
46
|
+
_http_client: Optional[httpx.AsyncClient] = field(default=None, repr=False)
|
|
47
|
+
|
|
48
|
+
async def _ensure_client(self) -> httpx.AsyncClient:
|
|
49
|
+
if self._http_client is None:
|
|
50
|
+
self._http_client = httpx.AsyncClient()
|
|
51
|
+
return self._http_client
|
|
52
|
+
|
|
53
|
+
async def find(self, capability: str) -> Optional[Dict[str, Any]]:
|
|
54
|
+
"""
|
|
55
|
+
Locate agents by capability.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
capability: The capability to search for (e.g., "calendar_api_tool")
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Agent info dict if found, None otherwise
|
|
62
|
+
"""
|
|
63
|
+
client = await self._ensure_client()
|
|
64
|
+
try:
|
|
65
|
+
response = await client.get(
|
|
66
|
+
f"{self.base_url}/v1/agents/search",
|
|
67
|
+
params={"capability": capability},
|
|
68
|
+
timeout=10.0,
|
|
69
|
+
)
|
|
70
|
+
if response.status_code == 200:
|
|
71
|
+
results = response.json()
|
|
72
|
+
return results[0] if results else None
|
|
73
|
+
return None
|
|
74
|
+
except Exception as e:
|
|
75
|
+
logger.error(f"Registry lookup failed: {e}")
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
async def find_all(self, capability: str) -> List[Dict[str, Any]]:
|
|
79
|
+
"""
|
|
80
|
+
Find all agents with a specific capability.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
capability: The capability to search for
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
List of agent info dicts
|
|
87
|
+
"""
|
|
88
|
+
client = await self._ensure_client()
|
|
89
|
+
try:
|
|
90
|
+
response = await client.get(
|
|
91
|
+
f"{self.base_url}/v1/agents/search",
|
|
92
|
+
params={"capability": capability},
|
|
93
|
+
timeout=10.0,
|
|
94
|
+
)
|
|
95
|
+
if response.status_code == 200:
|
|
96
|
+
return response.json()
|
|
97
|
+
return []
|
|
98
|
+
except Exception as e:
|
|
99
|
+
logger.error(f"Registry lookup failed: {e}")
|
|
100
|
+
return []
|
|
101
|
+
|
|
102
|
+
async def register(
|
|
103
|
+
self,
|
|
104
|
+
agent_id: str,
|
|
105
|
+
name: str,
|
|
106
|
+
agent_type: str,
|
|
107
|
+
capabilities: List[Any], # Can be str or AgentCapability
|
|
108
|
+
events_consumed: List[str],
|
|
109
|
+
events_produced: List[str],
|
|
110
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
111
|
+
) -> bool:
|
|
112
|
+
"""
|
|
113
|
+
Register an agent with the platform.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
agent_id: Unique identifier for the agent
|
|
117
|
+
name: Human-readable name
|
|
118
|
+
agent_type: Type of agent (planner, worker, tool)
|
|
119
|
+
capabilities: List of capabilities (strings or AgentCapability objects)
|
|
120
|
+
events_consumed: Event types this agent subscribes to
|
|
121
|
+
events_produced: Event types this agent publishes
|
|
122
|
+
metadata: Optional additional metadata
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
True if registration succeeded
|
|
126
|
+
"""
|
|
127
|
+
client = await self._ensure_client()
|
|
128
|
+
|
|
129
|
+
# Convert capabilities to structured format if they are strings
|
|
130
|
+
structured_capabilities = []
|
|
131
|
+
for cap in capabilities:
|
|
132
|
+
if isinstance(cap, str):
|
|
133
|
+
# Auto-convert string capability to structured
|
|
134
|
+
structured_capabilities.append({
|
|
135
|
+
"taskName": cap,
|
|
136
|
+
"description": f"Capability: {cap}",
|
|
137
|
+
"consumedEvent": "unknown",
|
|
138
|
+
"producedEvents": []
|
|
139
|
+
})
|
|
140
|
+
elif hasattr(cap, "model_dump"):
|
|
141
|
+
# It's a Pydantic model (AgentCapability)
|
|
142
|
+
structured_capabilities.append(cap.model_dump(by_alias=True))
|
|
143
|
+
elif isinstance(cap, dict):
|
|
144
|
+
# Already a dict
|
|
145
|
+
structured_capabilities.append(cap)
|
|
146
|
+
else:
|
|
147
|
+
logger.warning(f"Unknown capability format: {cap}")
|
|
148
|
+
|
|
149
|
+
# Construct the full AgentDefinition structure
|
|
150
|
+
agent_def = {
|
|
151
|
+
"agentId": agent_id,
|
|
152
|
+
"name": name,
|
|
153
|
+
"description": (metadata or {}).get("description", ""),
|
|
154
|
+
"capabilities": structured_capabilities,
|
|
155
|
+
"consumedEvents": events_consumed,
|
|
156
|
+
"producedEvents": events_produced
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
# Wrap in AgentRegistrationRequest structure
|
|
161
|
+
request_payload = {"agent": agent_def}
|
|
162
|
+
|
|
163
|
+
response = await client.post(
|
|
164
|
+
f"{self.base_url}/v1/agents",
|
|
165
|
+
json=request_payload,
|
|
166
|
+
timeout=10.0,
|
|
167
|
+
)
|
|
168
|
+
return response.status_code in (200, 201)
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.error(f"Registry registration failed: {e}")
|
|
171
|
+
return False
|
|
172
|
+
|
|
173
|
+
async def deregister(self, agent_id: str) -> bool:
|
|
174
|
+
"""
|
|
175
|
+
Remove an agent from the registry.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
agent_id: The agent ID to remove
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
True if deregistration succeeded
|
|
182
|
+
"""
|
|
183
|
+
client = await self._ensure_client()
|
|
184
|
+
try:
|
|
185
|
+
response = await client.delete(
|
|
186
|
+
f"{self.base_url}/v1/agents/{agent_id}",
|
|
187
|
+
timeout=10.0,
|
|
188
|
+
)
|
|
189
|
+
return response.status_code in (200, 204)
|
|
190
|
+
except Exception as e:
|
|
191
|
+
logger.error(f"Registry deregistration failed: {e}")
|
|
192
|
+
return False
|
|
193
|
+
|
|
194
|
+
async def query_schemas(self, event_type: str) -> Optional[Dict[str, Any]]:
|
|
195
|
+
"""
|
|
196
|
+
Get the JSON schema for an event type.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
event_type: The event type to get schema for
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
JSON schema dict if found, None otherwise
|
|
203
|
+
"""
|
|
204
|
+
client = await self._ensure_client()
|
|
205
|
+
try:
|
|
206
|
+
response = await client.get(
|
|
207
|
+
f"{self.base_url}/v1/events/schemas/{event_type}",
|
|
208
|
+
timeout=10.0,
|
|
209
|
+
)
|
|
210
|
+
if response.status_code == 200:
|
|
211
|
+
return response.json()
|
|
212
|
+
return None
|
|
213
|
+
except Exception as e:
|
|
214
|
+
logger.error(f"Schema query failed: {e}")
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
async def heartbeat(self, agent_id: str) -> bool:
|
|
218
|
+
"""
|
|
219
|
+
Send a heartbeat to keep registration alive.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
agent_id: The agent ID
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
True if heartbeat succeeded
|
|
226
|
+
"""
|
|
227
|
+
client = await self._ensure_client()
|
|
228
|
+
try:
|
|
229
|
+
response = await client.post(
|
|
230
|
+
f"{self.base_url}/v1/agents/{agent_id}/heartbeat",
|
|
231
|
+
timeout=5.0,
|
|
232
|
+
)
|
|
233
|
+
return response.status_code == 200
|
|
234
|
+
except Exception as e:
|
|
235
|
+
logger.debug(f"Heartbeat failed: {e}")
|
|
236
|
+
return False
|
|
237
|
+
|
|
238
|
+
async def close(self) -> None:
|
|
239
|
+
"""Close the HTTP client."""
|
|
240
|
+
if self._http_client:
|
|
241
|
+
await self._http_client.aclose()
|
|
242
|
+
self._http_client = None
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
@dataclass
|
|
246
|
+
class MemoryClient:
|
|
247
|
+
"""
|
|
248
|
+
Distributed State Management client.
|
|
249
|
+
|
|
250
|
+
Powered by: Redis + Vector DB (in production)
|
|
251
|
+
|
|
252
|
+
Memory types:
|
|
253
|
+
- Procedural: How to do things (skills, procedures)
|
|
254
|
+
- Semantic: Facts and knowledge
|
|
255
|
+
- Episodic: Past experiences and events
|
|
256
|
+
- Working: Current task context
|
|
257
|
+
|
|
258
|
+
Methods:
|
|
259
|
+
retrieve(): Read shared memory
|
|
260
|
+
store(): Persist agent state
|
|
261
|
+
search(): Semantic memory lookup
|
|
262
|
+
|
|
263
|
+
NOTE: Memory Service is not yet implemented. This client provides
|
|
264
|
+
a mock implementation that stores data in-memory for development.
|
|
265
|
+
"""
|
|
266
|
+
base_url: str = field(default_factory=lambda: os.getenv("SOORMA_MEMORY_URL", "http://localhost:8083"))
|
|
267
|
+
_http_client: Optional[httpx.AsyncClient] = field(default=None, repr=False)
|
|
268
|
+
# In-memory storage for development (when Memory Service is not available)
|
|
269
|
+
_local_store: Dict[str, Any] = field(default_factory=dict, repr=False)
|
|
270
|
+
_use_local: bool = field(default=True, repr=False) # Use local store by default until service is implemented
|
|
271
|
+
|
|
272
|
+
async def _ensure_client(self) -> httpx.AsyncClient:
|
|
273
|
+
if self._http_client is None:
|
|
274
|
+
self._http_client = httpx.AsyncClient()
|
|
275
|
+
return self._http_client
|
|
276
|
+
|
|
277
|
+
async def retrieve(self, key: str) -> Optional[Any]:
|
|
278
|
+
"""
|
|
279
|
+
Read shared memory by key.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
key: Memory key (e.g., "vehicle:123", "user:abc")
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Stored value if found, None otherwise
|
|
286
|
+
"""
|
|
287
|
+
# Use local store for development
|
|
288
|
+
if self._use_local:
|
|
289
|
+
value = self._local_store.get(key)
|
|
290
|
+
logger.debug(f"Memory retrieve (local): {key} -> {value is not None}")
|
|
291
|
+
return value
|
|
292
|
+
|
|
293
|
+
client = await self._ensure_client()
|
|
294
|
+
try:
|
|
295
|
+
response = await client.get(
|
|
296
|
+
f"{self.base_url}/v1/memory/{key}",
|
|
297
|
+
timeout=10.0,
|
|
298
|
+
)
|
|
299
|
+
if response.status_code == 200:
|
|
300
|
+
return response.json().get("value")
|
|
301
|
+
return None
|
|
302
|
+
except Exception as e:
|
|
303
|
+
logger.debug(f"Memory retrieve failed, using local: {e}")
|
|
304
|
+
return self._local_store.get(key)
|
|
305
|
+
|
|
306
|
+
async def store(
|
|
307
|
+
self,
|
|
308
|
+
key: str,
|
|
309
|
+
value: Any,
|
|
310
|
+
memory_type: str = "working",
|
|
311
|
+
ttl: Optional[int] = None,
|
|
312
|
+
) -> bool:
|
|
313
|
+
"""
|
|
314
|
+
Persist agent state to shared memory.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
key: Memory key
|
|
318
|
+
value: Value to store (will be JSON serialized)
|
|
319
|
+
memory_type: Type of memory (working, semantic, episodic, procedural)
|
|
320
|
+
ttl: Time-to-live in seconds (optional)
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
True if store succeeded
|
|
324
|
+
"""
|
|
325
|
+
# Use local store for development
|
|
326
|
+
if self._use_local:
|
|
327
|
+
self._local_store[key] = value
|
|
328
|
+
logger.debug(f"Memory store (local): {key}")
|
|
329
|
+
return True
|
|
330
|
+
|
|
331
|
+
client = await self._ensure_client()
|
|
332
|
+
try:
|
|
333
|
+
payload = {
|
|
334
|
+
"key": key,
|
|
335
|
+
"value": value,
|
|
336
|
+
"memory_type": memory_type,
|
|
337
|
+
}
|
|
338
|
+
if ttl:
|
|
339
|
+
payload["ttl"] = ttl
|
|
340
|
+
|
|
341
|
+
response = await client.post(
|
|
342
|
+
f"{self.base_url}/v1/memory",
|
|
343
|
+
json=payload,
|
|
344
|
+
timeout=10.0,
|
|
345
|
+
)
|
|
346
|
+
return response.status_code in (200, 201)
|
|
347
|
+
except Exception as e:
|
|
348
|
+
logger.debug(f"Memory store failed, using local: {e}")
|
|
349
|
+
self._local_store[key] = value
|
|
350
|
+
return True
|
|
351
|
+
|
|
352
|
+
async def search(
|
|
353
|
+
self,
|
|
354
|
+
query: str,
|
|
355
|
+
memory_type: Optional[str] = None,
|
|
356
|
+
limit: int = 10,
|
|
357
|
+
) -> List[Dict[str, Any]]:
|
|
358
|
+
"""
|
|
359
|
+
Semantic memory lookup.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
query: Natural language search query
|
|
363
|
+
memory_type: Filter by memory type (optional)
|
|
364
|
+
limit: Maximum results to return
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
List of matching memory entries with similarity scores
|
|
368
|
+
"""
|
|
369
|
+
# Local store doesn't support semantic search
|
|
370
|
+
if self._use_local:
|
|
371
|
+
logger.debug(f"Memory search (local): '{query}' - semantic search not available in dev mode")
|
|
372
|
+
return []
|
|
373
|
+
|
|
374
|
+
client = await self._ensure_client()
|
|
375
|
+
try:
|
|
376
|
+
params = {"q": query, "limit": limit}
|
|
377
|
+
if memory_type:
|
|
378
|
+
params["type"] = memory_type
|
|
379
|
+
|
|
380
|
+
response = await client.get(
|
|
381
|
+
f"{self.base_url}/v1/memory/search",
|
|
382
|
+
params=params,
|
|
383
|
+
timeout=10.0,
|
|
384
|
+
)
|
|
385
|
+
if response.status_code == 200:
|
|
386
|
+
return response.json()
|
|
387
|
+
return []
|
|
388
|
+
except Exception as e:
|
|
389
|
+
logger.debug(f"Memory search failed: {e}")
|
|
390
|
+
return []
|
|
391
|
+
|
|
392
|
+
async def delete(self, key: str) -> bool:
|
|
393
|
+
"""
|
|
394
|
+
Delete a memory entry.
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
key: Memory key to delete
|
|
398
|
+
|
|
399
|
+
Returns:
|
|
400
|
+
True if deletion succeeded
|
|
401
|
+
"""
|
|
402
|
+
# Use local store for development
|
|
403
|
+
if self._use_local:
|
|
404
|
+
self._local_store.pop(key, None)
|
|
405
|
+
logger.debug(f"Memory delete (local): {key}")
|
|
406
|
+
return True
|
|
407
|
+
|
|
408
|
+
client = await self._ensure_client()
|
|
409
|
+
try:
|
|
410
|
+
response = await client.delete(
|
|
411
|
+
f"{self.base_url}/v1/memory/{key}",
|
|
412
|
+
timeout=10.0,
|
|
413
|
+
)
|
|
414
|
+
return response.status_code in (200, 204)
|
|
415
|
+
except Exception as e:
|
|
416
|
+
logger.debug(f"Memory delete failed, using local: {e}")
|
|
417
|
+
self._local_store.pop(key, None)
|
|
418
|
+
return True
|
|
419
|
+
|
|
420
|
+
async def close(self) -> None:
|
|
421
|
+
"""Close the HTTP client."""
|
|
422
|
+
if self._http_client:
|
|
423
|
+
await self._http_client.aclose()
|
|
424
|
+
self._http_client = None
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
@dataclass
|
|
428
|
+
class BusClient:
|
|
429
|
+
"""
|
|
430
|
+
Event Choreography client.
|
|
431
|
+
|
|
432
|
+
Powered by: Kafka / NATS (via Event Service)
|
|
433
|
+
|
|
434
|
+
Methods:
|
|
435
|
+
publish(): Emit domain events
|
|
436
|
+
subscribe(): React to events (via EventClient)
|
|
437
|
+
request(): RPC-style calls
|
|
438
|
+
"""
|
|
439
|
+
event_client: EventClient = field(default_factory=EventClient)
|
|
440
|
+
|
|
441
|
+
async def publish(
|
|
442
|
+
self,
|
|
443
|
+
event_type: str,
|
|
444
|
+
data: Dict[str, Any],
|
|
445
|
+
topic: Optional[str] = None,
|
|
446
|
+
correlation_id: Optional[str] = None,
|
|
447
|
+
) -> str:
|
|
448
|
+
"""
|
|
449
|
+
Emit a domain event.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
event_type: Event type (e.g., "technician_scheduled")
|
|
453
|
+
data: Event payload
|
|
454
|
+
topic: Target topic (auto-inferred from event_type if not provided)
|
|
455
|
+
correlation_id: Optional correlation ID for tracing
|
|
456
|
+
|
|
457
|
+
Returns:
|
|
458
|
+
The event ID
|
|
459
|
+
"""
|
|
460
|
+
# Auto-infer topic from event type if not provided
|
|
461
|
+
if topic is None:
|
|
462
|
+
topic = self._infer_topic(event_type)
|
|
463
|
+
|
|
464
|
+
return await self.event_client.publish(
|
|
465
|
+
event_type=event_type,
|
|
466
|
+
topic=topic,
|
|
467
|
+
data=data,
|
|
468
|
+
correlation_id=correlation_id,
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
def _infer_topic(self, event_type: str) -> str:
|
|
472
|
+
"""Infer the topic from event type based on conventions."""
|
|
473
|
+
# Map common patterns to topics
|
|
474
|
+
if event_type.endswith(".requested") or event_type.endswith(".request"):
|
|
475
|
+
return "action-requests"
|
|
476
|
+
elif event_type.endswith(".completed") or event_type.endswith(".result"):
|
|
477
|
+
return "action-results"
|
|
478
|
+
elif event_type.startswith("billing."):
|
|
479
|
+
return "billing"
|
|
480
|
+
elif event_type.startswith("notification."):
|
|
481
|
+
return "notifications"
|
|
482
|
+
else:
|
|
483
|
+
return "business-facts"
|
|
484
|
+
|
|
485
|
+
async def subscribe(self, topics: List[str]) -> None:
|
|
486
|
+
"""
|
|
487
|
+
Subscribe to event topics.
|
|
488
|
+
|
|
489
|
+
This connects to the Event Service and starts receiving events.
|
|
490
|
+
Use @event_client.on_event() decorator to register handlers.
|
|
491
|
+
|
|
492
|
+
Args:
|
|
493
|
+
topics: List of topic patterns to subscribe to
|
|
494
|
+
"""
|
|
495
|
+
await self.event_client.connect(topics=topics)
|
|
496
|
+
|
|
497
|
+
async def request(
|
|
498
|
+
self,
|
|
499
|
+
event_type: str,
|
|
500
|
+
data: Dict[str, Any],
|
|
501
|
+
timeout: float = 30.0,
|
|
502
|
+
) -> Optional[Dict[str, Any]]:
|
|
503
|
+
"""
|
|
504
|
+
RPC-style request/response.
|
|
505
|
+
|
|
506
|
+
Publishes a request event and waits for a correlated response.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
event_type: Request event type
|
|
510
|
+
data: Request payload
|
|
511
|
+
timeout: Timeout in seconds
|
|
512
|
+
|
|
513
|
+
Returns:
|
|
514
|
+
Response data if received, None on timeout
|
|
515
|
+
"""
|
|
516
|
+
import asyncio
|
|
517
|
+
from uuid import uuid4
|
|
518
|
+
|
|
519
|
+
correlation_id = str(uuid4())
|
|
520
|
+
response_received = asyncio.Event()
|
|
521
|
+
response_data: Dict[str, Any] = {}
|
|
522
|
+
|
|
523
|
+
# Create a one-time handler for the response
|
|
524
|
+
async def handle_response(event: Dict[str, Any]) -> None:
|
|
525
|
+
if event.get("correlation_id") == correlation_id:
|
|
526
|
+
response_data.update(event.get("data", {}))
|
|
527
|
+
response_received.set()
|
|
528
|
+
|
|
529
|
+
# Register temporary handler
|
|
530
|
+
response_type = event_type.replace(".request", ".response")
|
|
531
|
+
original_handlers = self.event_client._handlers.get(response_type, [])
|
|
532
|
+
self.event_client._handlers.setdefault(response_type, []).append(handle_response)
|
|
533
|
+
|
|
534
|
+
try:
|
|
535
|
+
# Publish request
|
|
536
|
+
await self.publish(
|
|
537
|
+
event_type=event_type,
|
|
538
|
+
data=data,
|
|
539
|
+
correlation_id=correlation_id,
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
# Wait for response
|
|
543
|
+
try:
|
|
544
|
+
await asyncio.wait_for(response_received.wait(), timeout=timeout)
|
|
545
|
+
return response_data
|
|
546
|
+
except asyncio.TimeoutError:
|
|
547
|
+
logger.warning(f"Request {event_type} timed out after {timeout}s")
|
|
548
|
+
return None
|
|
549
|
+
finally:
|
|
550
|
+
# Cleanup handler
|
|
551
|
+
if handle_response in self.event_client._handlers.get(response_type, []):
|
|
552
|
+
self.event_client._handlers[response_type].remove(handle_response)
|
|
553
|
+
|
|
554
|
+
async def close(self) -> None:
|
|
555
|
+
"""Close the event client."""
|
|
556
|
+
await self.event_client.disconnect()
|
|
557
|
+
|
|
558
|
+
|
|
559
|
+
@dataclass
|
|
560
|
+
class TrackerClient:
|
|
561
|
+
"""
|
|
562
|
+
Observability & State Machine client.
|
|
563
|
+
|
|
564
|
+
Powered by: Time-series DB (in production)
|
|
565
|
+
|
|
566
|
+
Methods:
|
|
567
|
+
start_plan(): Initialize execution trace
|
|
568
|
+
emit_progress(): Log checkpoints
|
|
569
|
+
detect_timeout(): Handle failures
|
|
570
|
+
|
|
571
|
+
NOTE: Tracker Service is not yet implemented. This client provides
|
|
572
|
+
a no-op implementation that logs operations for development.
|
|
573
|
+
"""
|
|
574
|
+
base_url: str = field(default_factory=lambda: os.getenv("SOORMA_TRACKER_URL", "http://localhost:8084"))
|
|
575
|
+
_http_client: Optional[httpx.AsyncClient] = field(default=None, repr=False)
|
|
576
|
+
_use_noop: bool = field(default=True, repr=False) # Use no-op by default until service is implemented
|
|
577
|
+
|
|
578
|
+
async def _ensure_client(self) -> httpx.AsyncClient:
|
|
579
|
+
if self._http_client is None:
|
|
580
|
+
self._http_client = httpx.AsyncClient()
|
|
581
|
+
return self._http_client
|
|
582
|
+
|
|
583
|
+
async def start_plan(
|
|
584
|
+
self,
|
|
585
|
+
plan_id: str,
|
|
586
|
+
agent_id: str,
|
|
587
|
+
goal: str,
|
|
588
|
+
tasks: List[Dict[str, Any]],
|
|
589
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
590
|
+
) -> bool:
|
|
591
|
+
"""
|
|
592
|
+
Initialize execution trace for a plan.
|
|
593
|
+
|
|
594
|
+
Args:
|
|
595
|
+
plan_id: Unique plan identifier
|
|
596
|
+
agent_id: The planning agent ID
|
|
597
|
+
goal: The goal being solved
|
|
598
|
+
tasks: List of planned tasks
|
|
599
|
+
metadata: Optional additional metadata
|
|
600
|
+
|
|
601
|
+
Returns:
|
|
602
|
+
True if plan was started
|
|
603
|
+
"""
|
|
604
|
+
# No-op for development
|
|
605
|
+
if self._use_noop:
|
|
606
|
+
logger.debug(f"Tracker start_plan (noop): {plan_id} with {len(tasks)} tasks")
|
|
607
|
+
return True
|
|
608
|
+
|
|
609
|
+
client = await self._ensure_client()
|
|
610
|
+
try:
|
|
611
|
+
response = await client.post(
|
|
612
|
+
f"{self.base_url}/v1/plans",
|
|
613
|
+
json={
|
|
614
|
+
"plan_id": plan_id,
|
|
615
|
+
"agent_id": agent_id,
|
|
616
|
+
"goal": goal,
|
|
617
|
+
"tasks": tasks,
|
|
618
|
+
"metadata": metadata or {},
|
|
619
|
+
},
|
|
620
|
+
timeout=10.0,
|
|
621
|
+
)
|
|
622
|
+
return response.status_code in (200, 201)
|
|
623
|
+
except Exception as e:
|
|
624
|
+
logger.debug(f"Tracker start_plan failed (continuing): {e}")
|
|
625
|
+
return True
|
|
626
|
+
|
|
627
|
+
async def emit_progress(
|
|
628
|
+
self,
|
|
629
|
+
plan_id: str,
|
|
630
|
+
task_id: str,
|
|
631
|
+
status: str,
|
|
632
|
+
progress: float = 0.0,
|
|
633
|
+
message: Optional[str] = None,
|
|
634
|
+
data: Optional[Dict[str, Any]] = None,
|
|
635
|
+
) -> bool:
|
|
636
|
+
"""
|
|
637
|
+
Log a checkpoint/progress update.
|
|
638
|
+
|
|
639
|
+
Args:
|
|
640
|
+
plan_id: The plan being executed
|
|
641
|
+
task_id: The specific task
|
|
642
|
+
status: Status (pending, running, completed, failed)
|
|
643
|
+
progress: Progress percentage (0.0 - 1.0)
|
|
644
|
+
message: Optional status message
|
|
645
|
+
data: Optional data payload
|
|
646
|
+
|
|
647
|
+
Returns:
|
|
648
|
+
True if progress was recorded
|
|
649
|
+
"""
|
|
650
|
+
# No-op for development
|
|
651
|
+
if self._use_noop:
|
|
652
|
+
logger.debug(f"Tracker emit_progress (noop): {task_id} -> {status} ({progress:.0%})")
|
|
653
|
+
return True
|
|
654
|
+
|
|
655
|
+
client = await self._ensure_client()
|
|
656
|
+
try:
|
|
657
|
+
response = await client.post(
|
|
658
|
+
f"{self.base_url}/v1/plans/{plan_id}/progress",
|
|
659
|
+
json={
|
|
660
|
+
"task_id": task_id,
|
|
661
|
+
"status": status,
|
|
662
|
+
"progress": progress,
|
|
663
|
+
"message": message,
|
|
664
|
+
"data": data or {},
|
|
665
|
+
},
|
|
666
|
+
timeout=10.0,
|
|
667
|
+
)
|
|
668
|
+
return response.status_code == 200
|
|
669
|
+
except Exception as e:
|
|
670
|
+
logger.debug(f"Tracker emit_progress failed (continuing): {e}")
|
|
671
|
+
return True
|
|
672
|
+
|
|
673
|
+
async def complete_task(
|
|
674
|
+
self,
|
|
675
|
+
plan_id: str,
|
|
676
|
+
task_id: str,
|
|
677
|
+
result: Optional[Dict[str, Any]] = None,
|
|
678
|
+
) -> bool:
|
|
679
|
+
"""
|
|
680
|
+
Mark a task as completed.
|
|
681
|
+
|
|
682
|
+
Args:
|
|
683
|
+
plan_id: The plan ID
|
|
684
|
+
task_id: The task ID
|
|
685
|
+
result: Optional result data
|
|
686
|
+
|
|
687
|
+
Returns:
|
|
688
|
+
True if task was marked complete
|
|
689
|
+
"""
|
|
690
|
+
return await self.emit_progress(
|
|
691
|
+
plan_id=plan_id,
|
|
692
|
+
task_id=task_id,
|
|
693
|
+
status="completed",
|
|
694
|
+
progress=1.0,
|
|
695
|
+
data=result,
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
async def fail_task(
|
|
699
|
+
self,
|
|
700
|
+
plan_id: str,
|
|
701
|
+
task_id: str,
|
|
702
|
+
error: str,
|
|
703
|
+
data: Optional[Dict[str, Any]] = None,
|
|
704
|
+
) -> bool:
|
|
705
|
+
"""
|
|
706
|
+
Mark a task as failed.
|
|
707
|
+
|
|
708
|
+
Args:
|
|
709
|
+
plan_id: The plan ID
|
|
710
|
+
task_id: The task ID
|
|
711
|
+
error: Error message
|
|
712
|
+
data: Optional error data
|
|
713
|
+
|
|
714
|
+
Returns:
|
|
715
|
+
True if task was marked failed
|
|
716
|
+
"""
|
|
717
|
+
return await self.emit_progress(
|
|
718
|
+
plan_id=plan_id,
|
|
719
|
+
task_id=task_id,
|
|
720
|
+
status="failed",
|
|
721
|
+
message=error,
|
|
722
|
+
data=data,
|
|
723
|
+
)
|
|
724
|
+
|
|
725
|
+
async def get_plan_status(self, plan_id: str) -> Optional[Dict[str, Any]]:
|
|
726
|
+
"""
|
|
727
|
+
Get the current status of a plan.
|
|
728
|
+
|
|
729
|
+
Args:
|
|
730
|
+
plan_id: The plan ID
|
|
731
|
+
|
|
732
|
+
Returns:
|
|
733
|
+
Plan status dict if found, None otherwise
|
|
734
|
+
"""
|
|
735
|
+
# No-op for development
|
|
736
|
+
if self._use_noop:
|
|
737
|
+
logger.debug(f"Tracker get_plan_status (noop): {plan_id}")
|
|
738
|
+
return {"plan_id": plan_id, "status": "unknown"}
|
|
739
|
+
|
|
740
|
+
client = await self._ensure_client()
|
|
741
|
+
try:
|
|
742
|
+
response = await client.get(
|
|
743
|
+
f"{self.base_url}/v1/plans/{plan_id}",
|
|
744
|
+
timeout=10.0,
|
|
745
|
+
)
|
|
746
|
+
if response.status_code == 200:
|
|
747
|
+
return response.json()
|
|
748
|
+
return None
|
|
749
|
+
except Exception as e:
|
|
750
|
+
logger.debug(f"Tracker get_plan_status failed: {e}")
|
|
751
|
+
return None
|
|
752
|
+
|
|
753
|
+
async def close(self) -> None:
|
|
754
|
+
"""Close the HTTP client."""
|
|
755
|
+
if self._http_client:
|
|
756
|
+
await self._http_client.aclose()
|
|
757
|
+
self._http_client = None
|
|
758
|
+
|
|
759
|
+
|
|
760
|
+
@dataclass
|
|
761
|
+
class PlatformContext:
|
|
762
|
+
"""
|
|
763
|
+
Platform Context - The complete infrastructure access for agents.
|
|
764
|
+
|
|
765
|
+
Every event handler receives a PlatformContext that provides access to
|
|
766
|
+
all platform services. This eliminates configuration overhead and provides
|
|
767
|
+
a consistent interface regardless of deployment environment.
|
|
768
|
+
|
|
769
|
+
Attributes:
|
|
770
|
+
registry: Service Discovery & Capabilities
|
|
771
|
+
memory: Distributed State Management
|
|
772
|
+
bus: Event Choreography
|
|
773
|
+
tracker: Observability & State Machines
|
|
774
|
+
|
|
775
|
+
Usage:
|
|
776
|
+
@worker.on_task("schedule_technician")
|
|
777
|
+
async def schedule_service(task, context: PlatformContext):
|
|
778
|
+
# Service Discovery
|
|
779
|
+
calendar_tool = await context.registry.find("calendar_api_tool")
|
|
780
|
+
|
|
781
|
+
# Shared Memory
|
|
782
|
+
vehicle = await context.memory.retrieve(f"vehicle:{task.data['vehicle_id']}")
|
|
783
|
+
|
|
784
|
+
# Event Publishing
|
|
785
|
+
await context.bus.publish("technician_scheduled", result)
|
|
786
|
+
|
|
787
|
+
# Progress tracking (automatic for most cases)
|
|
788
|
+
await context.tracker.emit_progress(
|
|
789
|
+
plan_id=task.plan_id,
|
|
790
|
+
task_id=task.id,
|
|
791
|
+
status="completed",
|
|
792
|
+
)
|
|
793
|
+
"""
|
|
794
|
+
registry: RegistryClient = field(default_factory=RegistryClient)
|
|
795
|
+
memory: MemoryClient = field(default_factory=MemoryClient)
|
|
796
|
+
bus: BusClient = field(default_factory=BusClient)
|
|
797
|
+
tracker: TrackerClient = field(default_factory=TrackerClient)
|
|
798
|
+
|
|
799
|
+
@classmethod
|
|
800
|
+
def from_env(cls) -> "PlatformContext":
|
|
801
|
+
"""
|
|
802
|
+
Create a PlatformContext from environment variables.
|
|
803
|
+
|
|
804
|
+
Environment variables:
|
|
805
|
+
SOORMA_REGISTRY_URL: Registry service URL
|
|
806
|
+
SOORMA_EVENT_SERVICE_URL: Event service URL
|
|
807
|
+
SOORMA_MEMORY_URL: Memory service URL
|
|
808
|
+
SOORMA_TRACKER_URL: Tracker service URL
|
|
809
|
+
"""
|
|
810
|
+
event_client = EventClient(
|
|
811
|
+
event_service_url=os.getenv("SOORMA_EVENT_SERVICE_URL", "http://localhost:8082"),
|
|
812
|
+
)
|
|
813
|
+
|
|
814
|
+
return cls(
|
|
815
|
+
registry=RegistryClient(
|
|
816
|
+
base_url=os.getenv("SOORMA_REGISTRY_URL", "http://localhost:8081"),
|
|
817
|
+
),
|
|
818
|
+
memory=MemoryClient(
|
|
819
|
+
base_url=os.getenv("SOORMA_MEMORY_URL", "http://localhost:8083"),
|
|
820
|
+
),
|
|
821
|
+
bus=BusClient(event_client=event_client),
|
|
822
|
+
tracker=TrackerClient(
|
|
823
|
+
base_url=os.getenv("SOORMA_TRACKER_URL", "http://localhost:8084"),
|
|
824
|
+
),
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
async def close(self) -> None:
|
|
828
|
+
"""Close all clients."""
|
|
829
|
+
await self.registry.close()
|
|
830
|
+
await self.memory.close()
|
|
831
|
+
await self.bus.close()
|
|
832
|
+
await self.tracker.close()
|