agentfield 0.1.22rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentfield/__init__.py +66 -0
- agentfield/agent.py +3569 -0
- agentfield/agent_ai.py +1125 -0
- agentfield/agent_cli.py +386 -0
- agentfield/agent_field_handler.py +494 -0
- agentfield/agent_mcp.py +534 -0
- agentfield/agent_registry.py +29 -0
- agentfield/agent_server.py +1185 -0
- agentfield/agent_utils.py +269 -0
- agentfield/agent_workflow.py +323 -0
- agentfield/async_config.py +278 -0
- agentfield/async_execution_manager.py +1227 -0
- agentfield/client.py +1447 -0
- agentfield/connection_manager.py +280 -0
- agentfield/decorators.py +527 -0
- agentfield/did_manager.py +337 -0
- agentfield/dynamic_skills.py +304 -0
- agentfield/execution_context.py +255 -0
- agentfield/execution_state.py +453 -0
- agentfield/http_connection_manager.py +429 -0
- agentfield/litellm_adapters.py +140 -0
- agentfield/logger.py +249 -0
- agentfield/mcp_client.py +204 -0
- agentfield/mcp_manager.py +340 -0
- agentfield/mcp_stdio_bridge.py +550 -0
- agentfield/memory.py +723 -0
- agentfield/memory_events.py +489 -0
- agentfield/multimodal.py +173 -0
- agentfield/multimodal_response.py +403 -0
- agentfield/pydantic_utils.py +227 -0
- agentfield/rate_limiter.py +280 -0
- agentfield/result_cache.py +441 -0
- agentfield/router.py +190 -0
- agentfield/status.py +70 -0
- agentfield/types.py +710 -0
- agentfield/utils.py +26 -0
- agentfield/vc_generator.py +464 -0
- agentfield/vision.py +198 -0
- agentfield-0.1.22rc2.dist-info/METADATA +102 -0
- agentfield-0.1.22rc2.dist-info/RECORD +42 -0
- agentfield-0.1.22rc2.dist-info/WHEEL +5 -0
- agentfield-0.1.22rc2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,489 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from functools import wraps
|
|
6
|
+
from typing import Any, Callable, Dict, List, Optional, Union
|
|
7
|
+
|
|
8
|
+
import websockets
|
|
9
|
+
|
|
10
|
+
from agentfield.logger import log_error, log_info
|
|
11
|
+
from .types import MemoryChangeEvent
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PatternMatcher:
|
|
15
|
+
"""Utility class for wildcard pattern matching."""
|
|
16
|
+
|
|
17
|
+
@staticmethod
|
|
18
|
+
def matches_pattern(pattern: str, key: str) -> bool:
|
|
19
|
+
"""
|
|
20
|
+
Check if a key matches a wildcard pattern.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
pattern: Pattern with wildcards (e.g., "customer_*", "user_*.preferences")
|
|
24
|
+
key: Key to match against
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
True if key matches pattern, False otherwise
|
|
28
|
+
"""
|
|
29
|
+
# Convert wildcard pattern to regex
|
|
30
|
+
regex_pattern = pattern.replace("*", ".*")
|
|
31
|
+
regex_pattern = f"^{regex_pattern}$"
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
return bool(re.match(regex_pattern, key))
|
|
35
|
+
except re.error:
|
|
36
|
+
# If regex is invalid, fall back to exact match
|
|
37
|
+
return pattern == key
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class EventSubscription:
|
|
41
|
+
"""Represents an event subscription with patterns and callback."""
|
|
42
|
+
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
patterns: List[str],
|
|
46
|
+
callback: Callable,
|
|
47
|
+
scope: Optional[str] = None,
|
|
48
|
+
scope_id: Optional[str] = None,
|
|
49
|
+
):
|
|
50
|
+
self.patterns = patterns
|
|
51
|
+
self.callback = callback
|
|
52
|
+
self.scope = scope
|
|
53
|
+
self.scope_id = scope_id
|
|
54
|
+
self.active = True
|
|
55
|
+
|
|
56
|
+
def matches_event(self, event: MemoryChangeEvent) -> bool:
|
|
57
|
+
"""Check if this subscription matches the given event."""
|
|
58
|
+
if not self.active:
|
|
59
|
+
return False
|
|
60
|
+
|
|
61
|
+
# Check scope if specified
|
|
62
|
+
if self.scope and event.scope != self.scope:
|
|
63
|
+
return False
|
|
64
|
+
if self.scope_id and event.scope_id != self.scope_id:
|
|
65
|
+
return False
|
|
66
|
+
|
|
67
|
+
# Check if any pattern matches
|
|
68
|
+
for pattern in self.patterns:
|
|
69
|
+
if PatternMatcher.matches_pattern(pattern, event.key):
|
|
70
|
+
return True
|
|
71
|
+
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
def unsubscribe(self):
|
|
75
|
+
"""Mark this subscription as inactive."""
|
|
76
|
+
self.active = False
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class MemoryEventClient:
|
|
80
|
+
"""Enhanced memory event client with pattern-based subscriptions and event history."""
|
|
81
|
+
|
|
82
|
+
def __init__(self, base_url: str, execution_context, api_key: Optional[str] = None):
|
|
83
|
+
self.base_url = base_url.replace("http", "ws")
|
|
84
|
+
self.execution_context = execution_context
|
|
85
|
+
self.api_key = api_key
|
|
86
|
+
self.websocket: Optional[websockets.WebSocketClientProtocol] = None
|
|
87
|
+
self.is_listening = False
|
|
88
|
+
# Lazily initialize the lock inside an active event loop to avoid
|
|
89
|
+
# `RuntimeError: There is no current event loop` in synchronous contexts.
|
|
90
|
+
self._connect_lock: Optional[asyncio.Lock] = None
|
|
91
|
+
self.subscriptions: List[EventSubscription] = []
|
|
92
|
+
self._reconnect_attempts = 0
|
|
93
|
+
self._max_reconnect_attempts = 5
|
|
94
|
+
self._reconnect_delay = 1.0
|
|
95
|
+
|
|
96
|
+
def _is_connected(self) -> bool:
|
|
97
|
+
"""
|
|
98
|
+
Safely determine if the WebSocket connection is open.
|
|
99
|
+
|
|
100
|
+
Supports both legacy WebSocketClientProtocol (with `open`) and
|
|
101
|
+
newer ClientConnection objects (with `closed`).
|
|
102
|
+
"""
|
|
103
|
+
if not self.websocket:
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
open_attr = getattr(self.websocket, "open", None)
|
|
107
|
+
if isinstance(open_attr, bool):
|
|
108
|
+
return open_attr
|
|
109
|
+
|
|
110
|
+
closed_attr = getattr(self.websocket, "closed", None)
|
|
111
|
+
if isinstance(closed_attr, bool):
|
|
112
|
+
return not closed_attr
|
|
113
|
+
|
|
114
|
+
# Fallback: assume connected if we have a websocket object
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
async def connect(
|
|
118
|
+
self,
|
|
119
|
+
patterns: Optional[List[str]] = None,
|
|
120
|
+
scope: Optional[str] = None,
|
|
121
|
+
scope_id: Optional[str] = None,
|
|
122
|
+
):
|
|
123
|
+
"""
|
|
124
|
+
Establishes a WebSocket connection with optional filtering.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
patterns: List of patterns to subscribe to
|
|
128
|
+
scope: Scope to filter events by
|
|
129
|
+
scope_id: Scope ID to filter events by
|
|
130
|
+
"""
|
|
131
|
+
if self._connect_lock is None:
|
|
132
|
+
self._connect_lock = asyncio.Lock()
|
|
133
|
+
|
|
134
|
+
async with self._connect_lock:
|
|
135
|
+
if self._is_connected():
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
headers = self.execution_context.to_headers()
|
|
140
|
+
if self.api_key:
|
|
141
|
+
headers["X-API-Key"] = self.api_key
|
|
142
|
+
ws_url = f"{self.base_url}/api/v1/memory/events/ws"
|
|
143
|
+
|
|
144
|
+
# Add query parameters for server-side filtering
|
|
145
|
+
query_params = []
|
|
146
|
+
if patterns:
|
|
147
|
+
query_params.append(f"patterns={','.join(patterns)}")
|
|
148
|
+
if scope:
|
|
149
|
+
query_params.append(f"scope={scope}")
|
|
150
|
+
if scope_id:
|
|
151
|
+
query_params.append(f"scope_id={scope_id}")
|
|
152
|
+
|
|
153
|
+
if query_params:
|
|
154
|
+
ws_url += "?" + "&".join(query_params)
|
|
155
|
+
|
|
156
|
+
self.websocket = await websockets.connect(
|
|
157
|
+
ws_url, additional_headers=headers
|
|
158
|
+
)
|
|
159
|
+
self.is_listening = True
|
|
160
|
+
self._reconnect_attempts = 0
|
|
161
|
+
asyncio.create_task(self._listen())
|
|
162
|
+
|
|
163
|
+
except Exception as e:
|
|
164
|
+
log_error(f"Failed to connect to memory events: {e}")
|
|
165
|
+
await self._handle_reconnect()
|
|
166
|
+
|
|
167
|
+
async def _listen(self):
|
|
168
|
+
"""Listens for incoming messages and dispatches them to subscribers."""
|
|
169
|
+
if not self.websocket:
|
|
170
|
+
return
|
|
171
|
+
|
|
172
|
+
while self.is_listening:
|
|
173
|
+
try:
|
|
174
|
+
message = await self.websocket.recv()
|
|
175
|
+
event_data = json.loads(message)
|
|
176
|
+
event = MemoryChangeEvent.from_dict(event_data)
|
|
177
|
+
|
|
178
|
+
# Dispatch to matching subscriptions
|
|
179
|
+
for subscription in self.subscriptions:
|
|
180
|
+
if subscription.matches_event(event):
|
|
181
|
+
try:
|
|
182
|
+
asyncio.create_task(subscription.callback(event))
|
|
183
|
+
except Exception as e:
|
|
184
|
+
log_error(f"Error in event callback: {e}")
|
|
185
|
+
|
|
186
|
+
except websockets.exceptions.ConnectionClosed:
|
|
187
|
+
# Connection closed cleanly or unexpectedly; try to reconnect
|
|
188
|
+
self.is_listening = False
|
|
189
|
+
self.websocket = None
|
|
190
|
+
if self._reconnect_attempts < self._max_reconnect_attempts:
|
|
191
|
+
await self._handle_reconnect()
|
|
192
|
+
break
|
|
193
|
+
except Exception as e:
|
|
194
|
+
# Any unexpected error in the listener should reset the connection
|
|
195
|
+
log_error(f"Error in event listener: {e}")
|
|
196
|
+
self.is_listening = False
|
|
197
|
+
if self.websocket:
|
|
198
|
+
try:
|
|
199
|
+
await self.websocket.close()
|
|
200
|
+
except Exception:
|
|
201
|
+
pass
|
|
202
|
+
self.websocket = None
|
|
203
|
+
if self._reconnect_attempts < self._max_reconnect_attempts:
|
|
204
|
+
await self._handle_reconnect()
|
|
205
|
+
break
|
|
206
|
+
|
|
207
|
+
async def _handle_reconnect(self):
|
|
208
|
+
"""Handle automatic reconnection with exponential backoff."""
|
|
209
|
+
if self._reconnect_attempts >= self._max_reconnect_attempts:
|
|
210
|
+
log_error(
|
|
211
|
+
f"Max reconnection attempts reached ({self._max_reconnect_attempts})"
|
|
212
|
+
)
|
|
213
|
+
return
|
|
214
|
+
|
|
215
|
+
self._reconnect_attempts += 1
|
|
216
|
+
delay = self._reconnect_delay * (2 ** (self._reconnect_attempts - 1))
|
|
217
|
+
|
|
218
|
+
log_info(
|
|
219
|
+
f"Reconnecting to memory events (attempt {self._reconnect_attempts}/{self._max_reconnect_attempts}) in {delay}s..."
|
|
220
|
+
)
|
|
221
|
+
await asyncio.sleep(delay)
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
await self.connect()
|
|
225
|
+
except Exception as e:
|
|
226
|
+
log_error(f"Reconnection failed: {e}")
|
|
227
|
+
|
|
228
|
+
def subscribe(
|
|
229
|
+
self,
|
|
230
|
+
patterns: Union[str, List[str]],
|
|
231
|
+
callback: Callable,
|
|
232
|
+
scope: Optional[str] = None,
|
|
233
|
+
scope_id: Optional[str] = None,
|
|
234
|
+
) -> EventSubscription:
|
|
235
|
+
"""
|
|
236
|
+
Subscribe to memory change events with pattern matching.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
patterns: Pattern(s) to match against memory keys
|
|
240
|
+
callback: Function to call when matching events occur
|
|
241
|
+
scope: Optional scope to filter by
|
|
242
|
+
scope_id: Optional scope ID to filter by
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
EventSubscription object that can be used to unsubscribe
|
|
246
|
+
"""
|
|
247
|
+
if isinstance(patterns, str):
|
|
248
|
+
patterns = [patterns]
|
|
249
|
+
|
|
250
|
+
subscription = EventSubscription(patterns, callback, scope, scope_id)
|
|
251
|
+
self.subscriptions.append(subscription)
|
|
252
|
+
|
|
253
|
+
# If not connected, establish (or re-establish) the WebSocket connection.
|
|
254
|
+
# We rely on client-side pattern matching, so we don't need to send
|
|
255
|
+
# pattern filters to the server.
|
|
256
|
+
if not self._is_connected():
|
|
257
|
+
asyncio.create_task(self.connect())
|
|
258
|
+
|
|
259
|
+
return subscription
|
|
260
|
+
|
|
261
|
+
def on_change(self, patterns: Union[str, List[str]]):
|
|
262
|
+
"""
|
|
263
|
+
Decorator for subscribing to memory change events.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
patterns: Pattern(s) to match against memory keys
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
Decorator function
|
|
270
|
+
"""
|
|
271
|
+
|
|
272
|
+
def decorator(func: Callable) -> Callable:
|
|
273
|
+
@wraps(func)
|
|
274
|
+
async def wrapper(event: MemoryChangeEvent):
|
|
275
|
+
return await func(event)
|
|
276
|
+
|
|
277
|
+
# Subscribe to the patterns
|
|
278
|
+
self.subscribe(patterns, wrapper)
|
|
279
|
+
|
|
280
|
+
# Mark the function as a memory event listener using setattr to avoid type errors
|
|
281
|
+
setattr(wrapper, "_memory_event_listener", True)
|
|
282
|
+
setattr(
|
|
283
|
+
wrapper,
|
|
284
|
+
"_memory_event_patterns",
|
|
285
|
+
patterns if isinstance(patterns, list) else [patterns],
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
return wrapper
|
|
289
|
+
|
|
290
|
+
return decorator
|
|
291
|
+
|
|
292
|
+
async def history(
|
|
293
|
+
self,
|
|
294
|
+
patterns: Optional[Union[str, List[str]]] = None,
|
|
295
|
+
since: Optional[datetime] = None,
|
|
296
|
+
limit: int = 100,
|
|
297
|
+
scope: Optional[str] = None,
|
|
298
|
+
scope_id: Optional[str] = None,
|
|
299
|
+
) -> List[MemoryChangeEvent]:
|
|
300
|
+
"""
|
|
301
|
+
Get historical memory change events.
|
|
302
|
+
|
|
303
|
+
Args:
|
|
304
|
+
patterns: Pattern(s) to filter events by
|
|
305
|
+
since: Only return events after this timestamp
|
|
306
|
+
limit: Maximum number of events to return
|
|
307
|
+
scope: Scope to filter by
|
|
308
|
+
scope_id: Scope ID to filter by
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
List of historical memory change events
|
|
312
|
+
"""
|
|
313
|
+
try:
|
|
314
|
+
import httpx
|
|
315
|
+
|
|
316
|
+
async with httpx.AsyncClient() as client:
|
|
317
|
+
headers = self.execution_context.to_headers()
|
|
318
|
+
if self.api_key:
|
|
319
|
+
headers["X-API-Key"] = self.api_key
|
|
320
|
+
|
|
321
|
+
# Build query parameters
|
|
322
|
+
params: Dict[str, Any] = {"limit": limit}
|
|
323
|
+
if patterns:
|
|
324
|
+
if isinstance(patterns, str):
|
|
325
|
+
patterns = [patterns]
|
|
326
|
+
params["patterns"] = ",".join(patterns)
|
|
327
|
+
if since:
|
|
328
|
+
params["since"] = since.isoformat()
|
|
329
|
+
if scope:
|
|
330
|
+
params["scope"] = scope
|
|
331
|
+
if scope_id:
|
|
332
|
+
params["scope_id"] = scope_id
|
|
333
|
+
|
|
334
|
+
# Make request to history endpoint
|
|
335
|
+
http_url = self.base_url.replace("ws", "http")
|
|
336
|
+
response = await client.get(
|
|
337
|
+
f"{http_url}/api/v1/memory/events/history",
|
|
338
|
+
params=params,
|
|
339
|
+
headers=headers,
|
|
340
|
+
timeout=10.0,
|
|
341
|
+
)
|
|
342
|
+
response.raise_for_status()
|
|
343
|
+
|
|
344
|
+
# Parse response
|
|
345
|
+
events_data = response.json()
|
|
346
|
+
events = []
|
|
347
|
+
|
|
348
|
+
if isinstance(events_data, list):
|
|
349
|
+
for event_data in events_data:
|
|
350
|
+
try:
|
|
351
|
+
event = MemoryChangeEvent.from_dict(event_data)
|
|
352
|
+
events.append(event)
|
|
353
|
+
except Exception as e:
|
|
354
|
+
log_error(f"Failed to parse event: {e}")
|
|
355
|
+
|
|
356
|
+
return events
|
|
357
|
+
|
|
358
|
+
except ImportError:
|
|
359
|
+
# Fallback to synchronous requests
|
|
360
|
+
import requests
|
|
361
|
+
|
|
362
|
+
headers = self.execution_context.to_headers()
|
|
363
|
+
if self.api_key:
|
|
364
|
+
headers["X-API-Key"] = self.api_key
|
|
365
|
+
|
|
366
|
+
# Build query parameters
|
|
367
|
+
params = {"limit": limit}
|
|
368
|
+
if patterns:
|
|
369
|
+
if isinstance(patterns, str):
|
|
370
|
+
patterns = [patterns]
|
|
371
|
+
params["patterns"] = ",".join(patterns)
|
|
372
|
+
if since:
|
|
373
|
+
params["since"] = since.isoformat()
|
|
374
|
+
if scope:
|
|
375
|
+
params["scope"] = scope
|
|
376
|
+
if scope_id:
|
|
377
|
+
params["scope_id"] = scope_id
|
|
378
|
+
|
|
379
|
+
# Make request to history endpoint
|
|
380
|
+
http_url = self.base_url.replace("ws", "http")
|
|
381
|
+
response = requests.get(
|
|
382
|
+
f"{http_url}/api/v1/memory/events/history",
|
|
383
|
+
params=params,
|
|
384
|
+
headers=headers,
|
|
385
|
+
timeout=10.0,
|
|
386
|
+
)
|
|
387
|
+
response.raise_for_status()
|
|
388
|
+
|
|
389
|
+
# Parse response
|
|
390
|
+
events_data = response.json()
|
|
391
|
+
events = []
|
|
392
|
+
|
|
393
|
+
if isinstance(events_data, list):
|
|
394
|
+
for event_data in events_data:
|
|
395
|
+
try:
|
|
396
|
+
event = MemoryChangeEvent.from_dict(event_data)
|
|
397
|
+
events.append(event)
|
|
398
|
+
except Exception as e:
|
|
399
|
+
log_error(f"Failed to parse event: {e}")
|
|
400
|
+
|
|
401
|
+
return events
|
|
402
|
+
|
|
403
|
+
except Exception as e:
|
|
404
|
+
log_error(f"Failed to get event history: {e}")
|
|
405
|
+
return []
|
|
406
|
+
|
|
407
|
+
def unsubscribe_all(self):
|
|
408
|
+
"""Unsubscribe from all event subscriptions."""
|
|
409
|
+
for subscription in self.subscriptions:
|
|
410
|
+
subscription.unsubscribe()
|
|
411
|
+
self.subscriptions.clear()
|
|
412
|
+
|
|
413
|
+
async def close(self):
|
|
414
|
+
"""Closes the WebSocket connection and cleans up subscriptions."""
|
|
415
|
+
self.is_listening = False
|
|
416
|
+
self.unsubscribe_all()
|
|
417
|
+
|
|
418
|
+
if self.websocket:
|
|
419
|
+
await self.websocket.close()
|
|
420
|
+
self.websocket = None
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
class ScopedMemoryEventClient:
|
|
424
|
+
"""Memory event client scoped to a specific context."""
|
|
425
|
+
|
|
426
|
+
def __init__(self, event_client: MemoryEventClient, scope: str, scope_id: str):
|
|
427
|
+
self.event_client = event_client
|
|
428
|
+
self.scope = scope
|
|
429
|
+
self.scope_id = scope_id
|
|
430
|
+
|
|
431
|
+
def on_change(self, patterns: Union[str, List[str]]):
|
|
432
|
+
"""
|
|
433
|
+
Decorator for subscribing to scoped memory change events.
|
|
434
|
+
|
|
435
|
+
Args:
|
|
436
|
+
patterns: Pattern(s) to match against memory keys
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
Decorator function
|
|
440
|
+
"""
|
|
441
|
+
|
|
442
|
+
def decorator(func: Callable) -> Callable:
|
|
443
|
+
@wraps(func)
|
|
444
|
+
async def wrapper(event: MemoryChangeEvent):
|
|
445
|
+
return await func(event)
|
|
446
|
+
|
|
447
|
+
# Subscribe to the patterns with scope filtering
|
|
448
|
+
self.event_client.subscribe(
|
|
449
|
+
patterns, wrapper, scope=self.scope, scope_id=self.scope_id
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
# Mark the function as a memory event listener using setattr to avoid type errors
|
|
453
|
+
setattr(wrapper, "_memory_event_listener", True)
|
|
454
|
+
setattr(
|
|
455
|
+
wrapper,
|
|
456
|
+
"_memory_event_patterns",
|
|
457
|
+
patterns if isinstance(patterns, list) else [patterns],
|
|
458
|
+
)
|
|
459
|
+
setattr(wrapper, "_memory_event_scope", self.scope)
|
|
460
|
+
setattr(wrapper, "_memory_event_scope_id", self.scope_id)
|
|
461
|
+
|
|
462
|
+
return wrapper
|
|
463
|
+
|
|
464
|
+
return decorator
|
|
465
|
+
|
|
466
|
+
async def history(
|
|
467
|
+
self,
|
|
468
|
+
patterns: Optional[Union[str, List[str]]] = None,
|
|
469
|
+
since: Optional[datetime] = None,
|
|
470
|
+
limit: int = 100,
|
|
471
|
+
) -> List[MemoryChangeEvent]:
|
|
472
|
+
"""
|
|
473
|
+
Get historical memory change events for this scope.
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
patterns: Pattern(s) to filter events by
|
|
477
|
+
since: Only return events after this timestamp
|
|
478
|
+
limit: Maximum number of events to return
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
List of historical memory change events
|
|
482
|
+
"""
|
|
483
|
+
return await self.event_client.history(
|
|
484
|
+
patterns=patterns,
|
|
485
|
+
since=since,
|
|
486
|
+
limit=limit,
|
|
487
|
+
scope=self.scope,
|
|
488
|
+
scope_id=self.scope_id,
|
|
489
|
+
)
|
agentfield/multimodal.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Literal, Optional, Union
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Text(BaseModel):
|
|
9
|
+
"""Represents text content in a multimodal prompt."""
|
|
10
|
+
|
|
11
|
+
type: Literal["text"] = "text"
|
|
12
|
+
text: str = Field(..., description="The text content.")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Image(BaseModel):
|
|
16
|
+
"""Represents image content in a multimodal prompt."""
|
|
17
|
+
|
|
18
|
+
type: Literal["image_url"] = "image_url"
|
|
19
|
+
image_url: Union[str, dict] = Field(
|
|
20
|
+
...,
|
|
21
|
+
description="The URL of the image, or a dictionary with 'url' and optional 'detail' (e.g., {'url': 'https://example.com/image.jpg', 'detail': 'high'}).",
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def from_file(cls, file_path: Union[str, Path], detail: str = "high") -> "Image":
|
|
26
|
+
"""Create Image from local file by converting to base64 data URL."""
|
|
27
|
+
file_path = Path(file_path)
|
|
28
|
+
if not file_path.exists():
|
|
29
|
+
raise FileNotFoundError(f"Image file not found: {file_path}")
|
|
30
|
+
|
|
31
|
+
# Read and encode image
|
|
32
|
+
with open(file_path, "rb") as f:
|
|
33
|
+
image_data = base64.b64encode(f.read()).decode()
|
|
34
|
+
|
|
35
|
+
# Determine MIME type from extension
|
|
36
|
+
ext = file_path.suffix.lower()
|
|
37
|
+
mime_types = {
|
|
38
|
+
".jpg": "image/jpeg",
|
|
39
|
+
".jpeg": "image/jpeg",
|
|
40
|
+
".png": "image/png",
|
|
41
|
+
".gif": "image/gif",
|
|
42
|
+
".webp": "image/webp",
|
|
43
|
+
".bmp": "image/bmp",
|
|
44
|
+
}
|
|
45
|
+
mime_type = mime_types.get(ext, "image/jpeg")
|
|
46
|
+
|
|
47
|
+
data_url = f"data:{mime_type};base64,{image_data}"
|
|
48
|
+
return cls(image_url={"url": data_url, "detail": detail})
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def from_url(cls, url: str, detail: str = "high") -> "Image":
|
|
52
|
+
"""Create Image from URL."""
|
|
53
|
+
return cls(image_url={"url": url, "detail": detail})
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class Audio(BaseModel):
|
|
57
|
+
"""Represents audio content in a multimodal prompt."""
|
|
58
|
+
|
|
59
|
+
type: Literal["input_audio"] = "input_audio"
|
|
60
|
+
input_audio: dict = Field(
|
|
61
|
+
..., description="Audio input data with 'data' (base64) and 'format' fields."
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
@classmethod
|
|
65
|
+
def from_file(
|
|
66
|
+
cls, file_path: Union[str, Path], format: Optional[str] = None
|
|
67
|
+
) -> "Audio":
|
|
68
|
+
"""Create Audio from local file by converting to base64."""
|
|
69
|
+
file_path = Path(file_path)
|
|
70
|
+
if not file_path.exists():
|
|
71
|
+
raise FileNotFoundError(f"Audio file not found: {file_path}")
|
|
72
|
+
|
|
73
|
+
# Auto-detect format from extension if not provided
|
|
74
|
+
if format is None:
|
|
75
|
+
ext = file_path.suffix.lower().lstrip(".")
|
|
76
|
+
format = ext if ext in ["wav", "mp3", "flac", "ogg"] else "wav"
|
|
77
|
+
|
|
78
|
+
# Read and encode audio
|
|
79
|
+
with open(file_path, "rb") as f:
|
|
80
|
+
audio_data = base64.b64encode(f.read()).decode()
|
|
81
|
+
|
|
82
|
+
return cls(input_audio={"data": audio_data, "format": format})
|
|
83
|
+
|
|
84
|
+
@classmethod
|
|
85
|
+
def from_url(cls, url: str, format: str = "wav") -> "Audio":
|
|
86
|
+
"""Create Audio from URL (downloads and converts to base64)."""
|
|
87
|
+
try:
|
|
88
|
+
import requests
|
|
89
|
+
|
|
90
|
+
response = requests.get(url)
|
|
91
|
+
response.raise_for_status()
|
|
92
|
+
audio_data = base64.b64encode(response.content).decode()
|
|
93
|
+
return cls(input_audio={"data": audio_data, "format": format})
|
|
94
|
+
except ImportError:
|
|
95
|
+
raise ImportError("URL download requires requests: pip install requests")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class File(BaseModel):
|
|
99
|
+
"""Represents a generic file content in a multimodal prompt."""
|
|
100
|
+
|
|
101
|
+
type: Literal["file"] = "file"
|
|
102
|
+
file: Union[str, dict] = Field(
|
|
103
|
+
...,
|
|
104
|
+
description="The URL of the file, or a dictionary with 'url' and optional 'mime_type'.",
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
def from_file(
|
|
109
|
+
cls, file_path: Union[str, Path], mime_type: Optional[str] = None
|
|
110
|
+
) -> "File":
|
|
111
|
+
"""Create File from local file."""
|
|
112
|
+
file_path = Path(file_path)
|
|
113
|
+
if not file_path.exists():
|
|
114
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
115
|
+
|
|
116
|
+
# Auto-detect MIME type if not provided
|
|
117
|
+
if mime_type is None:
|
|
118
|
+
import mimetypes
|
|
119
|
+
|
|
120
|
+
mime_type, _ = mimetypes.guess_type(str(file_path))
|
|
121
|
+
mime_type = mime_type or "application/octet-stream"
|
|
122
|
+
|
|
123
|
+
# For now, just store the file path - could be enhanced to base64 encode
|
|
124
|
+
return cls(
|
|
125
|
+
file={"url": f"file://{file_path.absolute()}", "mime_type": mime_type}
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def from_url(cls, url: str, mime_type: Optional[str] = None) -> "File":
|
|
130
|
+
"""Create File from URL."""
|
|
131
|
+
return cls(file={"url": url, "mime_type": mime_type})
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
# Union type for all multimodal content types
|
|
135
|
+
MultimodalContent = Union[Text, Image, Audio, File]
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# Convenience functions for creating multimodal content
|
|
139
|
+
def text(content: str) -> Text:
|
|
140
|
+
"""Create text content."""
|
|
141
|
+
return Text(text=content)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def image_from_file(file_path: Union[str, Path], detail: str = "high") -> Image:
|
|
145
|
+
"""Create image content from local file."""
|
|
146
|
+
return Image.from_file(file_path, detail)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def image_from_url(url: str, detail: str = "high") -> Image:
|
|
150
|
+
"""Create image content from URL."""
|
|
151
|
+
return Image.from_url(url, detail)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def audio_from_file(file_path: Union[str, Path], format: Optional[str] = None) -> Audio:
|
|
155
|
+
"""Create audio content from local file."""
|
|
156
|
+
return Audio.from_file(file_path, format)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def audio_from_url(url: str, format: str = "wav") -> Audio:
|
|
160
|
+
"""Create audio content from URL."""
|
|
161
|
+
return Audio.from_url(url, format)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def file_from_path(
|
|
165
|
+
file_path: Union[str, Path], mime_type: Optional[str] = None
|
|
166
|
+
) -> File:
|
|
167
|
+
"""Create file content from local file."""
|
|
168
|
+
return File.from_file(file_path, mime_type)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def file_from_url(url: str, mime_type: Optional[str] = None) -> File:
|
|
172
|
+
"""Create file content from URL."""
|
|
173
|
+
return File.from_url(url, mime_type)
|