basion-agent 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- basion_agent/__init__.py +62 -0
- basion_agent/agent.py +360 -0
- basion_agent/agent_state_client.py +149 -0
- basion_agent/app.py +502 -0
- basion_agent/artifact.py +58 -0
- basion_agent/attachment_client.py +153 -0
- basion_agent/checkpoint_client.py +169 -0
- basion_agent/checkpointer.py +16 -0
- basion_agent/cli.py +139 -0
- basion_agent/conversation.py +103 -0
- basion_agent/conversation_client.py +86 -0
- basion_agent/conversation_message.py +48 -0
- basion_agent/exceptions.py +36 -0
- basion_agent/extensions/__init__.py +1 -0
- basion_agent/extensions/langgraph.py +526 -0
- basion_agent/extensions/pydantic_ai.py +180 -0
- basion_agent/gateway_client.py +531 -0
- basion_agent/gateway_pb2.py +73 -0
- basion_agent/gateway_pb2_grpc.py +101 -0
- basion_agent/heartbeat.py +84 -0
- basion_agent/loki_handler.py +355 -0
- basion_agent/memory.py +73 -0
- basion_agent/memory_client.py +155 -0
- basion_agent/message.py +333 -0
- basion_agent/py.typed +0 -0
- basion_agent/streamer.py +184 -0
- basion_agent/structural/__init__.py +6 -0
- basion_agent/structural/artifact.py +94 -0
- basion_agent/structural/base.py +71 -0
- basion_agent/structural/stepper.py +125 -0
- basion_agent/structural/surface.py +90 -0
- basion_agent/structural/text_block.py +96 -0
- basion_agent/tools/__init__.py +19 -0
- basion_agent/tools/container.py +46 -0
- basion_agent/tools/knowledge_graph.py +306 -0
- basion_agent-0.4.0.dist-info/METADATA +880 -0
- basion_agent-0.4.0.dist-info/RECORD +41 -0
- basion_agent-0.4.0.dist-info/WHEEL +5 -0
- basion_agent-0.4.0.dist-info/entry_points.txt +2 -0
- basion_agent-0.4.0.dist-info/licenses/LICENSE +21 -0
- basion_agent-0.4.0.dist-info/top_level.txt +1 -0
basion_agent/app.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
1
|
+
"""Main application class for Basion AI Agent framework (async)."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import logging
|
|
5
|
+
import signal
|
|
6
|
+
import threading
|
|
7
|
+
import time
|
|
8
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
9
|
+
from typing import Optional, Dict, Any, List, Set
|
|
10
|
+
|
|
11
|
+
from .agent import Agent
|
|
12
|
+
from .gateway_client import GatewayClient
|
|
13
|
+
from .conversation_client import ConversationClient
|
|
14
|
+
from .memory_client import MemoryClient
|
|
15
|
+
from .attachment_client import AttachmentClient
|
|
16
|
+
from .exceptions import RegistrationError, ConfigurationError
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class BasionAgentApp:
|
|
22
|
+
"""
|
|
23
|
+
Main application class for Basion AI Agent framework.
|
|
24
|
+
|
|
25
|
+
Example:
|
|
26
|
+
from basion_agent import BasionAgentApp
|
|
27
|
+
|
|
28
|
+
app = BasionAgentApp(
|
|
29
|
+
gateway_url="agent-gateway:8080",
|
|
30
|
+
api_key="agent-key-1"
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Register agents
|
|
34
|
+
medical_agent = app.register_me(
|
|
35
|
+
name="medical-qa-agent",
|
|
36
|
+
about="Medical Q&A",
|
|
37
|
+
document="Answers medical questions"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
@medical_agent.on_message
|
|
41
|
+
def handle_message(message, sender):
|
|
42
|
+
# Access conversation history
|
|
43
|
+
history = message.conversation.get_history(limit=5)
|
|
44
|
+
|
|
45
|
+
# Stream response
|
|
46
|
+
with medical_agent.streamer(message) as s:
|
|
47
|
+
s.stream("Processing...")
|
|
48
|
+
s.stream("Done!")
|
|
49
|
+
|
|
50
|
+
app.run()
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
gateway_url: str,
|
|
56
|
+
api_key: str,
|
|
57
|
+
heartbeat_interval: int = 60,
|
|
58
|
+
max_concurrent_tasks: int = 100,
|
|
59
|
+
error_message_template: str = (
|
|
60
|
+
"I encountered an error while processing your message. "
|
|
61
|
+
"Please try again or contact support if the issue persists."
|
|
62
|
+
),
|
|
63
|
+
secure: bool = False,
|
|
64
|
+
enable_remote_logging: bool = False,
|
|
65
|
+
remote_log_level: int = logging.INFO,
|
|
66
|
+
remote_log_batch_size: int = 100,
|
|
67
|
+
remote_log_flush_interval: float = 5.0,
|
|
68
|
+
):
|
|
69
|
+
"""
|
|
70
|
+
Initialize Basion Agent App.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
gateway_url: Gateway endpoint (e.g., "agent-gateway:8080")
|
|
74
|
+
api_key: API key for gateway authentication
|
|
75
|
+
heartbeat_interval: Heartbeat interval in seconds
|
|
76
|
+
max_concurrent_tasks: Maximum number of concurrent message handlers (default: 100)
|
|
77
|
+
error_message_template: Template for error messages sent to users
|
|
78
|
+
secure: If True, use TLS for gRPC and HTTPS for HTTP requests
|
|
79
|
+
enable_remote_logging: Send logs to Loki via gateway (default: False)
|
|
80
|
+
remote_log_level: Minimum log level for remote logging (default: INFO)
|
|
81
|
+
remote_log_batch_size: Logs per batch (default: 100)
|
|
82
|
+
remote_log_flush_interval: Seconds between flushes (default: 5.0)
|
|
83
|
+
"""
|
|
84
|
+
self.gateway_url = gateway_url
|
|
85
|
+
self.api_key = api_key
|
|
86
|
+
self.heartbeat_interval = heartbeat_interval
|
|
87
|
+
self.max_concurrent_tasks = max_concurrent_tasks
|
|
88
|
+
self.error_message_template = error_message_template
|
|
89
|
+
self.secure = secure
|
|
90
|
+
|
|
91
|
+
# Create gateway client
|
|
92
|
+
self.gateway_client = GatewayClient(gateway_url, api_key, secure=secure)
|
|
93
|
+
|
|
94
|
+
# Create conversation client using gateway proxy
|
|
95
|
+
self.conversation_client = ConversationClient(
|
|
96
|
+
self.gateway_client.conversation_store_url
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Create memory client using gateway proxy
|
|
100
|
+
self.memory_client = MemoryClient(
|
|
101
|
+
self.gateway_client.ai_memory_url
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Create attachment client using gateway proxy
|
|
105
|
+
self.attachment_client = AttachmentClient(
|
|
106
|
+
self.gateway_client.attachment_url
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
self._agents: List[Agent] = []
|
|
110
|
+
self._running = False
|
|
111
|
+
self._connected = False
|
|
112
|
+
self._active_tasks: Set[asyncio.Task] = set()
|
|
113
|
+
self._executor = ThreadPoolExecutor(max_workers=4, thread_name_prefix="consume")
|
|
114
|
+
self._task_start_times: Dict[asyncio.Task, float] = {}
|
|
115
|
+
self._hung_task_threshold = 300.0 # 5 minutes
|
|
116
|
+
|
|
117
|
+
# Remote logging setup
|
|
118
|
+
self._loki_handler: Optional[Any] = None
|
|
119
|
+
self._pending_remote_logging: Optional[dict] = None
|
|
120
|
+
|
|
121
|
+
if enable_remote_logging:
|
|
122
|
+
self._pending_remote_logging = {
|
|
123
|
+
"log_level": remote_log_level,
|
|
124
|
+
"batch_size": remote_log_batch_size,
|
|
125
|
+
"flush_interval": remote_log_flush_interval,
|
|
126
|
+
}
|
|
127
|
+
logger.info("Remote logging will be enabled after agent registration")
|
|
128
|
+
|
|
129
|
+
logger.info(f"BasionAgentApp initialized (gateway: {gateway_url}, max_concurrent: {max_concurrent_tasks})")
|
|
130
|
+
|
|
131
|
+
def register_me(
|
|
132
|
+
self,
|
|
133
|
+
name: str,
|
|
134
|
+
about: str,
|
|
135
|
+
document: str,
|
|
136
|
+
representation_name: Optional[str] = None,
|
|
137
|
+
base_url: Optional[str] = None,
|
|
138
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
139
|
+
related_pages: Optional[List[Dict[str, str]]] = None,
|
|
140
|
+
category_name: Optional[str] = None,
|
|
141
|
+
tag_names: Optional[List[str]] = None,
|
|
142
|
+
example_prompts: Optional[List[str]] = None,
|
|
143
|
+
is_experimental: Optional[bool] = None,
|
|
144
|
+
force_update: Optional[bool] = None,
|
|
145
|
+
) -> Agent:
|
|
146
|
+
"""
|
|
147
|
+
Register an agent with the backend.
|
|
148
|
+
|
|
149
|
+
Example:
|
|
150
|
+
medical_agent = app.register_me(
|
|
151
|
+
name="medical-qa",
|
|
152
|
+
about="Medical question answering",
|
|
153
|
+
document="Answers medical questions using AI",
|
|
154
|
+
representation_name="Medical Q&A Bot",
|
|
155
|
+
base_url="http://my-agent-frontend:3000",
|
|
156
|
+
related_pages=[
|
|
157
|
+
{"name": "Dashboard", "endpoint": "/dashboard"},
|
|
158
|
+
{"name": "Reports", "endpoint": "/reports"},
|
|
159
|
+
]
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
@medical_agent.on_message
|
|
163
|
+
def handle_message(message, sender):
|
|
164
|
+
with medical_agent.streamer(message) as s:
|
|
165
|
+
s.stream("Processing...")
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
name: Unique agent name (used for routing)
|
|
169
|
+
about: Short description
|
|
170
|
+
document: Detailed documentation
|
|
171
|
+
representation_name: Display name (optional)
|
|
172
|
+
base_url: Base URL for agent's frontend service (optional)
|
|
173
|
+
Used for iframe artifact URLs
|
|
174
|
+
metadata: Additional metadata (optional)
|
|
175
|
+
related_pages: List of related pages with 'name' and 'endpoint' keys (optional)
|
|
176
|
+
Example: [{"name": "Dashboard", "endpoint": "/dashboard"}]
|
|
177
|
+
category_name: Category for the agent in kebab-case (optional, auto-created if needed)
|
|
178
|
+
tag_names: List of tags in kebab-case (optional, auto-created if needed)
|
|
179
|
+
example_prompts: List of example prompts for users (optional)
|
|
180
|
+
is_experimental: Whether the agent is experimental (optional, defaults to false)
|
|
181
|
+
force_update: Bypass content hash check and force update (optional, defaults to false)
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
Agent instance
|
|
185
|
+
"""
|
|
186
|
+
# Register agent with backend via gateway proxy
|
|
187
|
+
logger.info(f"Registering agent '{name}'...")
|
|
188
|
+
|
|
189
|
+
payload = {
|
|
190
|
+
"name": name,
|
|
191
|
+
"about": about,
|
|
192
|
+
"document": document,
|
|
193
|
+
"representationName": representation_name or name,
|
|
194
|
+
}
|
|
195
|
+
if base_url:
|
|
196
|
+
payload["baseUrl"] = base_url
|
|
197
|
+
if metadata:
|
|
198
|
+
payload["metadata"] = metadata
|
|
199
|
+
if related_pages:
|
|
200
|
+
payload["relatedPages"] = related_pages
|
|
201
|
+
if category_name:
|
|
202
|
+
payload["categoryName"] = category_name
|
|
203
|
+
if tag_names:
|
|
204
|
+
payload["tagNames"] = tag_names
|
|
205
|
+
if example_prompts:
|
|
206
|
+
payload["examplePrompts"] = example_prompts
|
|
207
|
+
if is_experimental is not None:
|
|
208
|
+
payload["isExperimental"] = is_experimental
|
|
209
|
+
if force_update is not None:
|
|
210
|
+
payload["forceUpdate"] = force_update
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
response = self.gateway_client.http_post(
|
|
214
|
+
"ai-inventory",
|
|
215
|
+
"/api/v1/agents/register",
|
|
216
|
+
json=payload,
|
|
217
|
+
timeout=30
|
|
218
|
+
)
|
|
219
|
+
if response.status_code == 201:
|
|
220
|
+
agent_data = response.json()
|
|
221
|
+
logger.info(f"Agent '{name}' registered successfully")
|
|
222
|
+
else:
|
|
223
|
+
raise RegistrationError(f"Registration failed: {response.text}")
|
|
224
|
+
except Exception as e:
|
|
225
|
+
raise RegistrationError(f"Failed to register agent '{name}': {e}")
|
|
226
|
+
|
|
227
|
+
# Set up remote logging now that we have agent name
|
|
228
|
+
if self._pending_remote_logging and not self._loki_handler:
|
|
229
|
+
from .loki_handler import LokiLogHandler
|
|
230
|
+
|
|
231
|
+
config = self._pending_remote_logging
|
|
232
|
+
self._loki_handler = LokiLogHandler(
|
|
233
|
+
gateway_client=self.gateway_client,
|
|
234
|
+
agent_name=name,
|
|
235
|
+
batch_size=config["batch_size"],
|
|
236
|
+
flush_interval=config["flush_interval"],
|
|
237
|
+
level=config["log_level"],
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
# Add to root logger to capture all logs
|
|
241
|
+
logging.getLogger().addHandler(self._loki_handler)
|
|
242
|
+
logger.info(f"Remote logging enabled for agent '{name}'")
|
|
243
|
+
|
|
244
|
+
# Clear pending config
|
|
245
|
+
self._pending_remote_logging = None
|
|
246
|
+
|
|
247
|
+
# Create agent instance
|
|
248
|
+
agent = Agent(
|
|
249
|
+
name=name,
|
|
250
|
+
gateway_client=self.gateway_client,
|
|
251
|
+
agent_data=agent_data,
|
|
252
|
+
heartbeat_interval=self.heartbeat_interval,
|
|
253
|
+
conversation_client=self.conversation_client,
|
|
254
|
+
memory_client=self.memory_client,
|
|
255
|
+
attachment_client=self.attachment_client,
|
|
256
|
+
error_message_template=self.error_message_template,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
self._agents.append(agent)
|
|
260
|
+
logger.info(f"Agent '{name}' ready")
|
|
261
|
+
|
|
262
|
+
return agent
|
|
263
|
+
|
|
264
|
+
def run(self):
|
|
265
|
+
"""Start all registered agents and block until shutdown."""
|
|
266
|
+
if not self._agents:
|
|
267
|
+
raise ConfigurationError(
|
|
268
|
+
"No agents registered. Use app.register_me() to register agents."
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
logger.info(f"Starting {len(self._agents)} agent(s)...")
|
|
272
|
+
self._running = True
|
|
273
|
+
|
|
274
|
+
# Connect to gateway with all agent names
|
|
275
|
+
agent_names = [agent.name for agent in self._agents]
|
|
276
|
+
try:
|
|
277
|
+
self.gateway_client.connect(agent_names)
|
|
278
|
+
self._connected = True
|
|
279
|
+
except Exception as e:
|
|
280
|
+
raise ConfigurationError(f"Failed to connect to gateway: {e}")
|
|
281
|
+
|
|
282
|
+
# Setup signal handlers
|
|
283
|
+
signal.signal(signal.SIGINT, self._shutdown_handler)
|
|
284
|
+
signal.signal(signal.SIGTERM, self._shutdown_handler)
|
|
285
|
+
|
|
286
|
+
# Initialize all agents with the connected gateway
|
|
287
|
+
for agent in self._agents:
|
|
288
|
+
agent._initialize_with_gateway()
|
|
289
|
+
|
|
290
|
+
# Start all agents in background threads (they just stay alive)
|
|
291
|
+
threads = []
|
|
292
|
+
for agent in self._agents:
|
|
293
|
+
thread = threading.Thread(target=agent.start_consuming, daemon=True)
|
|
294
|
+
thread.start()
|
|
295
|
+
threads.append(thread)
|
|
296
|
+
|
|
297
|
+
logger.info("All agents started. Press Ctrl+C to stop.")
|
|
298
|
+
|
|
299
|
+
# Run the single consume loop in the main thread
|
|
300
|
+
try:
|
|
301
|
+
self._consume_loop()
|
|
302
|
+
except KeyboardInterrupt:
|
|
303
|
+
logger.info("Interrupted by user")
|
|
304
|
+
finally:
|
|
305
|
+
self._shutdown()
|
|
306
|
+
|
|
307
|
+
def _consume_loop(self):
|
|
308
|
+
"""Main consume loop that dispatches messages to agents concurrently."""
|
|
309
|
+
# Run the async consume loop
|
|
310
|
+
asyncio.run(self._consume_loop_async())
|
|
311
|
+
|
|
312
|
+
async def _consume_loop_async(self):
|
|
313
|
+
"""Async consume loop with concurrent message processing."""
|
|
314
|
+
# Build topic -> agent mapping
|
|
315
|
+
topic_to_agent = {
|
|
316
|
+
f"{agent.name}.inbox": agent for agent in self._agents
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
logger.info(
|
|
320
|
+
f"Starting consume loop for topics: {list(topic_to_agent.keys())} "
|
|
321
|
+
f"(max_concurrent: {self.max_concurrent_tasks})"
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
loop = asyncio.get_event_loop()
|
|
325
|
+
|
|
326
|
+
try:
|
|
327
|
+
while self._running:
|
|
328
|
+
try:
|
|
329
|
+
# Run sync consume_one in executor to not block the event loop
|
|
330
|
+
msg = await loop.run_in_executor(
|
|
331
|
+
self._executor,
|
|
332
|
+
lambda: self.gateway_client.consume_one(timeout=0.5)
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
if msg:
|
|
336
|
+
agent = topic_to_agent.get(msg.topic)
|
|
337
|
+
if agent:
|
|
338
|
+
# Wait if we've hit max concurrency
|
|
339
|
+
while len(self._active_tasks) >= self.max_concurrent_tasks:
|
|
340
|
+
if not self._active_tasks:
|
|
341
|
+
break
|
|
342
|
+
|
|
343
|
+
# Wait with timeout to detect hung tasks
|
|
344
|
+
done, self._active_tasks = await asyncio.wait(
|
|
345
|
+
self._active_tasks,
|
|
346
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
347
|
+
timeout=30.0 # 30 second timeout
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# Check for errors in completed tasks
|
|
351
|
+
for task in done:
|
|
352
|
+
self._task_start_times.pop(task, None)
|
|
353
|
+
if task.exception():
|
|
354
|
+
logger.error(
|
|
355
|
+
f"Task failed with exception: {task.exception()}"
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
# If timeout with no completions, check for hung tasks
|
|
359
|
+
if not done:
|
|
360
|
+
current_time = time.time()
|
|
361
|
+
hung_tasks = [
|
|
362
|
+
task for task in self._active_tasks
|
|
363
|
+
if current_time - self._task_start_times.get(task, current_time) > self._hung_task_threshold
|
|
364
|
+
]
|
|
365
|
+
|
|
366
|
+
if hung_tasks:
|
|
367
|
+
logger.error(
|
|
368
|
+
f"Detected {len(hung_tasks)} hung tasks (>{self._hung_task_threshold}s). "
|
|
369
|
+
f"Total active: {len(self._active_tasks)}"
|
|
370
|
+
)
|
|
371
|
+
# Optional: cancel hung tasks
|
|
372
|
+
# for task in hung_tasks:
|
|
373
|
+
# task.cancel()
|
|
374
|
+
# self._task_start_times.pop(task, None)
|
|
375
|
+
else:
|
|
376
|
+
logger.warning(
|
|
377
|
+
f"No tasks completed in 30s. Active tasks: {len(self._active_tasks)} "
|
|
378
|
+
f"(may indicate slow handlers or blocking code)"
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
# Create task for concurrent processing
|
|
382
|
+
task = asyncio.create_task(
|
|
383
|
+
self._handle_message_safe(agent, msg)
|
|
384
|
+
)
|
|
385
|
+
self._active_tasks.add(task)
|
|
386
|
+
self._task_start_times[task] = time.time()
|
|
387
|
+
# Auto-remove from set when done
|
|
388
|
+
task.add_done_callback(self._active_tasks.discard)
|
|
389
|
+
|
|
390
|
+
logger.debug(
|
|
391
|
+
f"Dispatched message to {agent.name}, "
|
|
392
|
+
f"active tasks: {len(self._active_tasks)}"
|
|
393
|
+
)
|
|
394
|
+
else:
|
|
395
|
+
logger.warning(f"No agent for topic: {msg.topic}")
|
|
396
|
+
|
|
397
|
+
except Exception as e:
|
|
398
|
+
if self._running:
|
|
399
|
+
logger.error(f"Error in consume loop: {e}")
|
|
400
|
+
# Try to reconnect if connection lost
|
|
401
|
+
await asyncio.sleep(1.0)
|
|
402
|
+
try:
|
|
403
|
+
self._reconnect()
|
|
404
|
+
except Exception as re:
|
|
405
|
+
logger.error(f"Reconnect failed: {re}")
|
|
406
|
+
await asyncio.sleep(5.0)
|
|
407
|
+
|
|
408
|
+
finally:
|
|
409
|
+
# Wait for all active tasks to complete on shutdown
|
|
410
|
+
if self._active_tasks:
|
|
411
|
+
logger.info(
|
|
412
|
+
f"Waiting for {len(self._active_tasks)} active tasks to complete..."
|
|
413
|
+
)
|
|
414
|
+
await asyncio.gather(*self._active_tasks, return_exceptions=True)
|
|
415
|
+
logger.info("All tasks completed")
|
|
416
|
+
|
|
417
|
+
async def _handle_message_safe(self, agent: Agent, msg) -> None:
|
|
418
|
+
"""Wrapper that handles errors from message handlers gracefully."""
|
|
419
|
+
try:
|
|
420
|
+
await agent._handle_message_async(msg)
|
|
421
|
+
except Exception as e:
|
|
422
|
+
logger.error(
|
|
423
|
+
f"Error handling message for {agent.name}: {e}",
|
|
424
|
+
exc_info=True
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
def _reconnect(self):
|
|
428
|
+
"""Attempt to reconnect to the gateway."""
|
|
429
|
+
logger.info("Attempting to reconnect to gateway...")
|
|
430
|
+
agent_names = [agent.name for agent in self._agents]
|
|
431
|
+
self.gateway_client.close()
|
|
432
|
+
self.gateway_client.connect(agent_names)
|
|
433
|
+
logger.info("Reconnected to gateway successfully")
|
|
434
|
+
|
|
435
|
+
def _shutdown_handler(self, signum, frame):
|
|
436
|
+
"""Handle shutdown signals."""
|
|
437
|
+
logger.info(f"Received signal {signum}, initiating shutdown...")
|
|
438
|
+
self._running = False
|
|
439
|
+
|
|
440
|
+
def _shutdown(self):
|
|
441
|
+
"""Shutdown all agents gracefully."""
|
|
442
|
+
logger.info("Shutting down all agents...")
|
|
443
|
+
self._running = False
|
|
444
|
+
|
|
445
|
+
# Shutdown the executor
|
|
446
|
+
try:
|
|
447
|
+
self._executor.shutdown(wait=False)
|
|
448
|
+
except Exception as e:
|
|
449
|
+
logger.error(f"Error shutting down executor: {e}")
|
|
450
|
+
|
|
451
|
+
# Close async sessions - need to create a new loop since asyncio.run closed its loop
|
|
452
|
+
try:
|
|
453
|
+
loop = asyncio.new_event_loop()
|
|
454
|
+
asyncio.set_event_loop(loop)
|
|
455
|
+
try:
|
|
456
|
+
loop.run_until_complete(self._close_async_sessions())
|
|
457
|
+
finally:
|
|
458
|
+
loop.close()
|
|
459
|
+
except Exception as e:
|
|
460
|
+
logger.error(f"Error closing async sessions: {e}")
|
|
461
|
+
|
|
462
|
+
for agent in self._agents:
|
|
463
|
+
try:
|
|
464
|
+
agent.shutdown()
|
|
465
|
+
except Exception as e:
|
|
466
|
+
logger.error(f"Error shutting down agent {agent.name}: {e}")
|
|
467
|
+
|
|
468
|
+
# Close Loki handler
|
|
469
|
+
if self._loki_handler:
|
|
470
|
+
try:
|
|
471
|
+
logger.info("Flushing and closing remote logging handler...")
|
|
472
|
+
self._loki_handler.close()
|
|
473
|
+
except Exception as e:
|
|
474
|
+
logger.error(f"Error closing Loki handler: {e}")
|
|
475
|
+
|
|
476
|
+
# Close gateway connection
|
|
477
|
+
if self._connected:
|
|
478
|
+
self.gateway_client.close()
|
|
479
|
+
|
|
480
|
+
logger.info("All agents shut down successfully")
|
|
481
|
+
|
|
482
|
+
async def _close_async_sessions(self):
|
|
483
|
+
"""Close all async HTTP sessions."""
|
|
484
|
+
try:
|
|
485
|
+
await self.gateway_client.close_async()
|
|
486
|
+
except Exception as e:
|
|
487
|
+
logger.error(f"Error closing gateway async session: {e}")
|
|
488
|
+
|
|
489
|
+
try:
|
|
490
|
+
await self.conversation_client.close()
|
|
491
|
+
except Exception as e:
|
|
492
|
+
logger.error(f"Error closing conversation client: {e}")
|
|
493
|
+
|
|
494
|
+
try:
|
|
495
|
+
await self.memory_client.close()
|
|
496
|
+
except Exception as e:
|
|
497
|
+
logger.error(f"Error closing memory client: {e}")
|
|
498
|
+
|
|
499
|
+
try:
|
|
500
|
+
await self.attachment_client.close()
|
|
501
|
+
except Exception as e:
|
|
502
|
+
logger.error(f"Error closing attachment client: {e}")
|
basion_agent/artifact.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Artifact class for attaching files/URLs to messages."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Optional, Dict, Any
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ArtifactType(str, Enum):
|
|
9
|
+
"""Artifact type enum for categorizing attachments."""
|
|
10
|
+
|
|
11
|
+
DOCUMENT = "document" # PDF, DOC, etc.
|
|
12
|
+
IMAGE = "image" # PNG, JPG, GIF, etc.
|
|
13
|
+
IFRAME = "iframe" # Embeddable content (maps, charts)
|
|
14
|
+
VIDEO = "video" # Video files or embeds
|
|
15
|
+
AUDIO = "audio" # Audio files
|
|
16
|
+
CODE = "code" # Code snippets with syntax highlighting
|
|
17
|
+
LINK = "link" # External links with preview
|
|
18
|
+
FILE = "file" # Generic file download
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class Artifact:
|
|
23
|
+
"""Represents an artifact (URL attachment) for a message.
|
|
24
|
+
|
|
25
|
+
Artifacts are URLs to files, images, iframes, or other resources
|
|
26
|
+
that agents can attach to their responses.
|
|
27
|
+
|
|
28
|
+
Example:
|
|
29
|
+
artifact = Artifact(
|
|
30
|
+
url="https://example.com/report.pdf",
|
|
31
|
+
type=ArtifactType.DOCUMENT,
|
|
32
|
+
title="Monthly Report",
|
|
33
|
+
metadata={"size": 1024000, "mimeType": "application/pdf"}
|
|
34
|
+
)
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
url: str
|
|
38
|
+
type: ArtifactType
|
|
39
|
+
title: Optional[str] = None
|
|
40
|
+
description: Optional[str] = None
|
|
41
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
42
|
+
position: Optional[int] = None
|
|
43
|
+
|
|
44
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
45
|
+
"""Convert to dict for JSON serialization."""
|
|
46
|
+
d: Dict[str, Any] = {
|
|
47
|
+
"url": self.url,
|
|
48
|
+
"type": self.type.value,
|
|
49
|
+
}
|
|
50
|
+
if self.title:
|
|
51
|
+
d["title"] = self.title
|
|
52
|
+
if self.description:
|
|
53
|
+
d["description"] = self.description
|
|
54
|
+
if self.metadata:
|
|
55
|
+
d["metadata"] = self.metadata
|
|
56
|
+
if self.position is not None:
|
|
57
|
+
d["position"] = self.position
|
|
58
|
+
return d
|