fast-agent-mcp 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fast-agent-mcp might be problematic. Click here for more details.
- fast_agent_mcp-0.0.7.dist-info/METADATA +322 -0
- fast_agent_mcp-0.0.7.dist-info/RECORD +100 -0
- fast_agent_mcp-0.0.7.dist-info/WHEEL +4 -0
- fast_agent_mcp-0.0.7.dist-info/entry_points.txt +5 -0
- fast_agent_mcp-0.0.7.dist-info/licenses/LICENSE +201 -0
- mcp_agent/__init__.py +0 -0
- mcp_agent/agents/__init__.py +0 -0
- mcp_agent/agents/agent.py +277 -0
- mcp_agent/app.py +303 -0
- mcp_agent/cli/__init__.py +0 -0
- mcp_agent/cli/__main__.py +4 -0
- mcp_agent/cli/commands/bootstrap.py +221 -0
- mcp_agent/cli/commands/config.py +11 -0
- mcp_agent/cli/commands/setup.py +229 -0
- mcp_agent/cli/main.py +68 -0
- mcp_agent/cli/terminal.py +24 -0
- mcp_agent/config.py +334 -0
- mcp_agent/console.py +28 -0
- mcp_agent/context.py +251 -0
- mcp_agent/context_dependent.py +48 -0
- mcp_agent/core/fastagent.py +1013 -0
- mcp_agent/eval/__init__.py +0 -0
- mcp_agent/event_progress.py +88 -0
- mcp_agent/executor/__init__.py +0 -0
- mcp_agent/executor/decorator_registry.py +120 -0
- mcp_agent/executor/executor.py +293 -0
- mcp_agent/executor/task_registry.py +34 -0
- mcp_agent/executor/temporal.py +405 -0
- mcp_agent/executor/workflow.py +197 -0
- mcp_agent/executor/workflow_signal.py +325 -0
- mcp_agent/human_input/__init__.py +0 -0
- mcp_agent/human_input/handler.py +49 -0
- mcp_agent/human_input/types.py +58 -0
- mcp_agent/logging/__init__.py +0 -0
- mcp_agent/logging/events.py +123 -0
- mcp_agent/logging/json_serializer.py +163 -0
- mcp_agent/logging/listeners.py +216 -0
- mcp_agent/logging/logger.py +365 -0
- mcp_agent/logging/rich_progress.py +120 -0
- mcp_agent/logging/tracing.py +140 -0
- mcp_agent/logging/transport.py +461 -0
- mcp_agent/mcp/__init__.py +0 -0
- mcp_agent/mcp/gen_client.py +85 -0
- mcp_agent/mcp/mcp_activity.py +18 -0
- mcp_agent/mcp/mcp_agent_client_session.py +242 -0
- mcp_agent/mcp/mcp_agent_server.py +56 -0
- mcp_agent/mcp/mcp_aggregator.py +394 -0
- mcp_agent/mcp/mcp_connection_manager.py +330 -0
- mcp_agent/mcp/stdio.py +104 -0
- mcp_agent/mcp_server_registry.py +275 -0
- mcp_agent/progress_display.py +10 -0
- mcp_agent/resources/examples/decorator/main.py +26 -0
- mcp_agent/resources/examples/decorator/optimizer.py +78 -0
- mcp_agent/resources/examples/decorator/orchestrator.py +68 -0
- mcp_agent/resources/examples/decorator/parallel.py +81 -0
- mcp_agent/resources/examples/decorator/router.py +56 -0
- mcp_agent/resources/examples/decorator/tiny.py +22 -0
- mcp_agent/resources/examples/mcp_researcher/main-evalopt.py +53 -0
- mcp_agent/resources/examples/mcp_researcher/main.py +38 -0
- mcp_agent/telemetry/__init__.py +0 -0
- mcp_agent/telemetry/usage_tracking.py +18 -0
- mcp_agent/workflows/__init__.py +0 -0
- mcp_agent/workflows/embedding/__init__.py +0 -0
- mcp_agent/workflows/embedding/embedding_base.py +61 -0
- mcp_agent/workflows/embedding/embedding_cohere.py +49 -0
- mcp_agent/workflows/embedding/embedding_openai.py +46 -0
- mcp_agent/workflows/evaluator_optimizer/__init__.py +0 -0
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +359 -0
- mcp_agent/workflows/intent_classifier/__init__.py +0 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +120 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +134 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +161 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +60 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +60 -0
- mcp_agent/workflows/llm/__init__.py +0 -0
- mcp_agent/workflows/llm/augmented_llm.py +645 -0
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +539 -0
- mcp_agent/workflows/llm/augmented_llm_openai.py +615 -0
- mcp_agent/workflows/llm/llm_selector.py +345 -0
- mcp_agent/workflows/llm/model_factory.py +175 -0
- mcp_agent/workflows/orchestrator/__init__.py +0 -0
- mcp_agent/workflows/orchestrator/orchestrator.py +407 -0
- mcp_agent/workflows/orchestrator/orchestrator_models.py +154 -0
- mcp_agent/workflows/orchestrator/orchestrator_prompts.py +113 -0
- mcp_agent/workflows/parallel/__init__.py +0 -0
- mcp_agent/workflows/parallel/fan_in.py +350 -0
- mcp_agent/workflows/parallel/fan_out.py +187 -0
- mcp_agent/workflows/parallel/parallel_llm.py +141 -0
- mcp_agent/workflows/router/__init__.py +0 -0
- mcp_agent/workflows/router/router_base.py +276 -0
- mcp_agent/workflows/router/router_embedding.py +240 -0
- mcp_agent/workflows/router/router_embedding_cohere.py +59 -0
- mcp_agent/workflows/router/router_embedding_openai.py +59 -0
- mcp_agent/workflows/router/router_llm.py +301 -0
- mcp_agent/workflows/swarm/__init__.py +0 -0
- mcp_agent/workflows/swarm/swarm.py +320 -0
- mcp_agent/workflows/swarm/swarm_anthropic.py +42 -0
- mcp_agent/workflows/swarm/swarm_openai.py +41 -0
|
@@ -0,0 +1,461 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Transports for the Logger module for MCP Agent, including:
|
|
3
|
+
- Local + optional remote event transport
|
|
4
|
+
- Async event bus
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import json
|
|
9
|
+
from abc import ABC, abstractmethod
|
|
10
|
+
from typing import Dict, List, Protocol
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
import aiohttp
|
|
14
|
+
from opentelemetry import trace
|
|
15
|
+
from rich.json import JSON
|
|
16
|
+
from rich.text import Text
|
|
17
|
+
|
|
18
|
+
from mcp_agent.config import LoggerSettings
|
|
19
|
+
from mcp_agent.console import console
|
|
20
|
+
from mcp_agent.logging.events import Event, EventFilter
|
|
21
|
+
from mcp_agent.logging.json_serializer import JSONSerializer
|
|
22
|
+
from mcp_agent.logging.listeners import EventListener, LifecycleAwareListener
|
|
23
|
+
from rich import print
|
|
24
|
+
import traceback
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class EventTransport(Protocol):
|
|
28
|
+
"""
|
|
29
|
+
Pluggable interface for sending events to a remote or external system
|
|
30
|
+
(Kafka, RabbitMQ, REST, etc.).
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
async def send_event(self, event: Event):
|
|
34
|
+
"""
|
|
35
|
+
Send an event to the external system.
|
|
36
|
+
Args:
|
|
37
|
+
event: Event to send.
|
|
38
|
+
"""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class FilteredEventTransport(EventTransport, ABC):
|
|
43
|
+
"""
|
|
44
|
+
Event transport that filters events based on a filter before sending.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, event_filter: EventFilter | None = None):
|
|
48
|
+
self.filter = event_filter
|
|
49
|
+
|
|
50
|
+
async def send_event(self, event: Event):
|
|
51
|
+
if not self.filter or self.filter.matches(event):
|
|
52
|
+
await self.send_matched_event(event)
|
|
53
|
+
|
|
54
|
+
@abstractmethod
|
|
55
|
+
async def send_matched_event(self, event: Event):
|
|
56
|
+
"""Send an event to the external system."""
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class NoOpTransport(FilteredEventTransport):
|
|
60
|
+
"""Default transport that does nothing (purely local)."""
|
|
61
|
+
|
|
62
|
+
async def send_matched_event(self, event):
|
|
63
|
+
"""Do nothing."""
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class ConsoleTransport(FilteredEventTransport):
|
|
68
|
+
"""Simple transport that prints events to console."""
|
|
69
|
+
|
|
70
|
+
def __init__(self, event_filter: EventFilter | None = None):
|
|
71
|
+
super().__init__(event_filter=event_filter)
|
|
72
|
+
# Use shared console instances
|
|
73
|
+
self._serializer = JSONSerializer()
|
|
74
|
+
self.log_level_styles: Dict[str, str] = {
|
|
75
|
+
"info": "bold green",
|
|
76
|
+
"debug": "dim white",
|
|
77
|
+
"warning": "bold yellow",
|
|
78
|
+
"error": "bold red",
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async def send_matched_event(self, event: Event):
|
|
82
|
+
# Map log levels to styles
|
|
83
|
+
style = self.log_level_styles.get(event.type, "white")
|
|
84
|
+
|
|
85
|
+
# Use the appropriate console based on event type
|
|
86
|
+
# output_console = error_console if event.type == "error" else console
|
|
87
|
+
output_console = console
|
|
88
|
+
|
|
89
|
+
# Create namespace without None
|
|
90
|
+
namespace = event.namespace
|
|
91
|
+
if event.name:
|
|
92
|
+
namespace = f"{namespace}.{event.name}"
|
|
93
|
+
|
|
94
|
+
log_text = Text.assemble(
|
|
95
|
+
(f"[{event.type.upper()}] ", style),
|
|
96
|
+
(f"{event.timestamp.replace(microsecond=0).isoformat()} ", "cyan"),
|
|
97
|
+
(f"{namespace} ", "magenta"),
|
|
98
|
+
(f"- {event.message}", "white"),
|
|
99
|
+
)
|
|
100
|
+
output_console.print(log_text)
|
|
101
|
+
|
|
102
|
+
# Print additional data as JSON if available
|
|
103
|
+
if event.data:
|
|
104
|
+
serialized_data = self._serializer(event.data)
|
|
105
|
+
output_console.print(JSON.from_data(serialized_data))
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class FileTransport(FilteredEventTransport):
|
|
109
|
+
"""Transport that writes events to a file with proper formatting."""
|
|
110
|
+
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
filepath: str | Path,
|
|
114
|
+
event_filter: EventFilter | None = None,
|
|
115
|
+
mode: str = "a",
|
|
116
|
+
encoding: str = "utf-8",
|
|
117
|
+
):
|
|
118
|
+
"""Initialize FileTransport.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
filepath: Path to the log file. If relative, the current working directory will be used
|
|
122
|
+
event_filter: Optional filter for events
|
|
123
|
+
mode: File open mode ('a' for append, 'w' for write)
|
|
124
|
+
encoding: File encoding to use
|
|
125
|
+
"""
|
|
126
|
+
super().__init__(event_filter=event_filter)
|
|
127
|
+
self.filepath = Path(filepath)
|
|
128
|
+
self.mode = mode
|
|
129
|
+
self.encoding = encoding
|
|
130
|
+
self._serializer = JSONSerializer()
|
|
131
|
+
|
|
132
|
+
# Create directory if it doesn't exist
|
|
133
|
+
self.filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
134
|
+
|
|
135
|
+
async def send_matched_event(self, event: Event) -> None:
|
|
136
|
+
"""Write matched event to log file asynchronously.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
event: Event to write to file
|
|
140
|
+
"""
|
|
141
|
+
# Format the log entry
|
|
142
|
+
namespace = event.namespace
|
|
143
|
+
if event.name:
|
|
144
|
+
namespace = f"{namespace}.{event.name}"
|
|
145
|
+
|
|
146
|
+
log_entry = {
|
|
147
|
+
"level": event.type.upper(),
|
|
148
|
+
"timestamp": event.timestamp.isoformat(),
|
|
149
|
+
"namespace": namespace,
|
|
150
|
+
"message": event.message,
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
# Add event data if present
|
|
154
|
+
if event.data:
|
|
155
|
+
log_entry["data"] = self._serializer(event.data)
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
with open(self.filepath, mode=self.mode, encoding=self.encoding) as f:
|
|
159
|
+
# Write the log entry as compact JSON (JSONL format)
|
|
160
|
+
f.write(json.dumps(log_entry, separators=(",", ":")) + "\n")
|
|
161
|
+
f.flush() # Ensure writing to disk
|
|
162
|
+
except IOError as e:
|
|
163
|
+
# Log error without recursion
|
|
164
|
+
print(f"Error writing to log file {self.filepath}: {e}")
|
|
165
|
+
|
|
166
|
+
async def close(self) -> None:
|
|
167
|
+
"""Clean up resources if needed."""
|
|
168
|
+
pass # File handles are automatically closed after each write
|
|
169
|
+
|
|
170
|
+
@property
|
|
171
|
+
def is_closed(self) -> bool:
|
|
172
|
+
"""Check if transport is closed."""
|
|
173
|
+
return False # Since we open/close per write
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
class HTTPTransport(FilteredEventTransport):
|
|
177
|
+
"""
|
|
178
|
+
Sends events to an HTTP endpoint in batches.
|
|
179
|
+
Useful for sending to remote logging services like Elasticsearch, etc.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
def __init__(
|
|
183
|
+
self,
|
|
184
|
+
endpoint: str,
|
|
185
|
+
headers: Dict[str, str] = None,
|
|
186
|
+
batch_size: int = 100,
|
|
187
|
+
timeout: float = 5.0,
|
|
188
|
+
event_filter: EventFilter | None = None,
|
|
189
|
+
):
|
|
190
|
+
super().__init__(event_filter=event_filter)
|
|
191
|
+
self.endpoint = endpoint
|
|
192
|
+
self.headers = headers or {}
|
|
193
|
+
self.batch_size = batch_size
|
|
194
|
+
self.timeout = timeout
|
|
195
|
+
|
|
196
|
+
self.batch: List[Event] = []
|
|
197
|
+
self.lock = asyncio.Lock()
|
|
198
|
+
self._session: aiohttp.ClientSession | None = None
|
|
199
|
+
self._serializer = JSONSerializer()
|
|
200
|
+
|
|
201
|
+
async def start(self):
|
|
202
|
+
"""Initialize HTTP session."""
|
|
203
|
+
if not self._session:
|
|
204
|
+
self._session = aiohttp.ClientSession(
|
|
205
|
+
headers=self.headers, timeout=aiohttp.ClientTimeout(total=self.timeout)
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
async def stop(self):
|
|
209
|
+
"""Close HTTP session and flush any remaining events."""
|
|
210
|
+
if self.batch:
|
|
211
|
+
await self._flush()
|
|
212
|
+
if self._session:
|
|
213
|
+
await self._session.close()
|
|
214
|
+
self._session = None
|
|
215
|
+
|
|
216
|
+
async def send_matched_event(self, event: Event):
|
|
217
|
+
"""Add event to batch, flush if batch is full."""
|
|
218
|
+
async with self.lock:
|
|
219
|
+
self.batch.append(event)
|
|
220
|
+
if len(self.batch) >= self.batch_size:
|
|
221
|
+
await self._flush()
|
|
222
|
+
|
|
223
|
+
async def _flush(self):
|
|
224
|
+
"""Send batch of events to HTTP endpoint."""
|
|
225
|
+
if not self.batch:
|
|
226
|
+
return
|
|
227
|
+
|
|
228
|
+
if not self._session:
|
|
229
|
+
await self.start()
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
# Convert events to JSON-serializable dicts
|
|
233
|
+
events_data = [
|
|
234
|
+
{
|
|
235
|
+
"timestamp": event.timestamp.isoformat(),
|
|
236
|
+
"type": event.type,
|
|
237
|
+
"name": event.name,
|
|
238
|
+
"namespace": event.namespace,
|
|
239
|
+
"message": event.message,
|
|
240
|
+
"data": self._serializer(event.data),
|
|
241
|
+
"trace_id": event.trace_id,
|
|
242
|
+
"span_id": event.span_id,
|
|
243
|
+
"context": event.context.dict() if event.context else None,
|
|
244
|
+
}
|
|
245
|
+
for event in self.batch
|
|
246
|
+
]
|
|
247
|
+
|
|
248
|
+
async with self._session.post(self.endpoint, json=events_data) as response:
|
|
249
|
+
if response.status >= 400:
|
|
250
|
+
text = await response.text()
|
|
251
|
+
print(
|
|
252
|
+
f"Error sending log events to {self.endpoint}. "
|
|
253
|
+
f"Status: {response.status}, Response: {text}"
|
|
254
|
+
)
|
|
255
|
+
except Exception as e:
|
|
256
|
+
print(f"Error sending log events to {self.endpoint}: {e}")
|
|
257
|
+
finally:
|
|
258
|
+
self.batch.clear()
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class AsyncEventBus:
|
|
262
|
+
"""
|
|
263
|
+
Async event bus with local in-process listeners + optional remote transport.
|
|
264
|
+
Also injects distributed tracing (trace_id, span_id) if there's a current span.
|
|
265
|
+
"""
|
|
266
|
+
|
|
267
|
+
_instance = None
|
|
268
|
+
|
|
269
|
+
def __init__(self, transport: EventTransport | None = None):
|
|
270
|
+
self.transport: EventTransport = transport or NoOpTransport()
|
|
271
|
+
self.listeners: Dict[str, EventListener] = {}
|
|
272
|
+
self._queue = asyncio.Queue()
|
|
273
|
+
self._task: asyncio.Task | None = None
|
|
274
|
+
self._running = False
|
|
275
|
+
self._stop_event = asyncio.Event()
|
|
276
|
+
|
|
277
|
+
# Store the loop we're created on
|
|
278
|
+
try:
|
|
279
|
+
self._loop = asyncio.get_running_loop()
|
|
280
|
+
except RuntimeError:
|
|
281
|
+
self._loop = asyncio.new_event_loop()
|
|
282
|
+
asyncio.set_event_loop(self._loop)
|
|
283
|
+
|
|
284
|
+
@classmethod
|
|
285
|
+
def get(cls, transport: EventTransport | None = None) -> "AsyncEventBus":
|
|
286
|
+
"""Get the singleton instance of the event bus."""
|
|
287
|
+
if cls._instance is None:
|
|
288
|
+
cls._instance = cls(transport=transport)
|
|
289
|
+
elif transport is not None:
|
|
290
|
+
# Update transport if provided
|
|
291
|
+
cls._instance.transport = transport
|
|
292
|
+
return cls._instance
|
|
293
|
+
|
|
294
|
+
async def start(self):
|
|
295
|
+
"""Start the event bus and all lifecycle-aware listeners."""
|
|
296
|
+
if self._running:
|
|
297
|
+
return
|
|
298
|
+
|
|
299
|
+
# Start each lifecycle-aware listener
|
|
300
|
+
for listener in self.listeners.values():
|
|
301
|
+
if isinstance(listener, LifecycleAwareListener):
|
|
302
|
+
await listener.start()
|
|
303
|
+
|
|
304
|
+
# Clear stop event and start processing
|
|
305
|
+
self._stop_event.clear()
|
|
306
|
+
self._running = True
|
|
307
|
+
self._task = asyncio.create_task(self._process_events())
|
|
308
|
+
|
|
309
|
+
async def stop(self):
|
|
310
|
+
"""Stop the event bus and all lifecycle-aware listeners."""
|
|
311
|
+
if not self._running:
|
|
312
|
+
return
|
|
313
|
+
|
|
314
|
+
# Signal processing to stop
|
|
315
|
+
self._running = False
|
|
316
|
+
self._stop_event.set()
|
|
317
|
+
|
|
318
|
+
# Try to process remaining items with a timeout
|
|
319
|
+
if not self._queue.empty():
|
|
320
|
+
try:
|
|
321
|
+
# Give some time for remaining items to be processed
|
|
322
|
+
await asyncio.wait_for(self._queue.join(), timeout=5.0)
|
|
323
|
+
except asyncio.TimeoutError:
|
|
324
|
+
# If we timeout, drain the queue to prevent deadlock
|
|
325
|
+
while not self._queue.empty():
|
|
326
|
+
try:
|
|
327
|
+
self._queue.get_nowait()
|
|
328
|
+
self._queue.task_done()
|
|
329
|
+
except asyncio.QueueEmpty:
|
|
330
|
+
break
|
|
331
|
+
except Exception as e:
|
|
332
|
+
print(f"Error during queue cleanup: {e}")
|
|
333
|
+
|
|
334
|
+
# Cancel and wait for task with timeout
|
|
335
|
+
if self._task and not self._task.done():
|
|
336
|
+
self._task.cancel()
|
|
337
|
+
try:
|
|
338
|
+
# Wait for task to complete with timeout
|
|
339
|
+
await asyncio.wait_for(self._task, timeout=5.0)
|
|
340
|
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
|
341
|
+
pass # Task was cancelled or timed out
|
|
342
|
+
except Exception as e:
|
|
343
|
+
print(f"Error cancelling process task: {e}")
|
|
344
|
+
finally:
|
|
345
|
+
self._task = None
|
|
346
|
+
|
|
347
|
+
# Stop each lifecycle-aware listener
|
|
348
|
+
for listener in self.listeners.values():
|
|
349
|
+
if isinstance(listener, LifecycleAwareListener):
|
|
350
|
+
try:
|
|
351
|
+
await asyncio.wait_for(listener.stop(), timeout=3.0)
|
|
352
|
+
except asyncio.TimeoutError:
|
|
353
|
+
print(f"Timeout stopping listener: {listener}")
|
|
354
|
+
except Exception as e:
|
|
355
|
+
print(f"Error stopping listener: {e}")
|
|
356
|
+
|
|
357
|
+
async def emit(self, event: Event):
|
|
358
|
+
"""Emit an event to all listeners and transport."""
|
|
359
|
+
# Inject current tracing info if available
|
|
360
|
+
span = trace.get_current_span()
|
|
361
|
+
if span.is_recording():
|
|
362
|
+
ctx = span.get_span_context()
|
|
363
|
+
event.trace_id = f"{ctx.trace_id:032x}"
|
|
364
|
+
event.span_id = f"{ctx.span_id:016x}"
|
|
365
|
+
|
|
366
|
+
# Forward to transport first (immediate processing)
|
|
367
|
+
try:
|
|
368
|
+
await self.transport.send_event(event)
|
|
369
|
+
except Exception as e:
|
|
370
|
+
print(f"Error in transport.send_event: {e}")
|
|
371
|
+
|
|
372
|
+
# Then queue for listeners
|
|
373
|
+
await self._queue.put(event)
|
|
374
|
+
|
|
375
|
+
def add_listener(self, name: str, listener: EventListener):
|
|
376
|
+
"""Add a listener to the event bus."""
|
|
377
|
+
self.listeners[name] = listener
|
|
378
|
+
|
|
379
|
+
def remove_listener(self, name: str):
|
|
380
|
+
"""Remove a listener from the event bus."""
|
|
381
|
+
self.listeners.pop(name, None)
|
|
382
|
+
|
|
383
|
+
async def _process_events(self):
|
|
384
|
+
"""Process events from the queue until stopped."""
|
|
385
|
+
while self._running:
|
|
386
|
+
try:
|
|
387
|
+
# Use wait_for with a timeout to allow checking running state
|
|
388
|
+
try:
|
|
389
|
+
event = await asyncio.wait_for(self._queue.get(), timeout=0.1)
|
|
390
|
+
except asyncio.TimeoutError:
|
|
391
|
+
continue
|
|
392
|
+
|
|
393
|
+
# Process the event through all listeners
|
|
394
|
+
tasks = []
|
|
395
|
+
for listener in self.listeners.values():
|
|
396
|
+
try:
|
|
397
|
+
tasks.append(listener.handle_event(event))
|
|
398
|
+
except Exception as e:
|
|
399
|
+
print(f"Error creating listener task: {e}")
|
|
400
|
+
|
|
401
|
+
if tasks:
|
|
402
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
403
|
+
for r in results:
|
|
404
|
+
if isinstance(r, Exception):
|
|
405
|
+
print(f"Error in listener: {r}")
|
|
406
|
+
print(
|
|
407
|
+
f"Stacktrace: {''.join(traceback.format_exception(type(r), r, r.__traceback__))}"
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
self._queue.task_done()
|
|
411
|
+
|
|
412
|
+
except asyncio.CancelledError:
|
|
413
|
+
break
|
|
414
|
+
except Exception as e:
|
|
415
|
+
print(f"Error in event processing loop: {e}")
|
|
416
|
+
continue
|
|
417
|
+
|
|
418
|
+
# Process remaining events in queue
|
|
419
|
+
while not self._queue.empty():
|
|
420
|
+
try:
|
|
421
|
+
event = self._queue.get_nowait()
|
|
422
|
+
tasks = []
|
|
423
|
+
for listener in self.listeners.values():
|
|
424
|
+
try:
|
|
425
|
+
tasks.append(listener.handle_event(event))
|
|
426
|
+
except Exception:
|
|
427
|
+
pass
|
|
428
|
+
if tasks:
|
|
429
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
430
|
+
self._queue.task_done()
|
|
431
|
+
except asyncio.QueueEmpty:
|
|
432
|
+
break
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def create_transport(
|
|
436
|
+
settings: LoggerSettings, event_filter: EventFilter | None = None
|
|
437
|
+
) -> EventTransport:
|
|
438
|
+
"""Create event transport based on settings."""
|
|
439
|
+
if settings.type == "none":
|
|
440
|
+
return NoOpTransport(event_filter=event_filter)
|
|
441
|
+
elif settings.type == "console":
|
|
442
|
+
return ConsoleTransport(event_filter=event_filter)
|
|
443
|
+
elif settings.type == "file":
|
|
444
|
+
if not settings.path:
|
|
445
|
+
raise ValueError("File path required for file transport")
|
|
446
|
+
return FileTransport(
|
|
447
|
+
filepath=settings.path,
|
|
448
|
+
event_filter=event_filter,
|
|
449
|
+
)
|
|
450
|
+
elif settings.type == "http":
|
|
451
|
+
if not settings.http_endpoint:
|
|
452
|
+
raise ValueError("HTTP endpoint required for HTTP transport")
|
|
453
|
+
return HTTPTransport(
|
|
454
|
+
endpoint=settings.http_endpoint,
|
|
455
|
+
headers=settings.http_headers,
|
|
456
|
+
batch_size=settings.batch_size,
|
|
457
|
+
timeout=settings.http_timeout,
|
|
458
|
+
event_filter=event_filter,
|
|
459
|
+
)
|
|
460
|
+
else:
|
|
461
|
+
raise ValueError(f"Unsupported transport type: {settings.type}")
|
|
File without changes
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from contextlib import asynccontextmanager
|
|
2
|
+
from datetime import timedelta
|
|
3
|
+
from typing import AsyncGenerator, Callable
|
|
4
|
+
|
|
5
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
|
6
|
+
from mcp import ClientSession
|
|
7
|
+
|
|
8
|
+
from mcp_agent.logging.logger import get_logger
|
|
9
|
+
from mcp_agent.mcp_server_registry import ServerRegistry
|
|
10
|
+
from mcp_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
|
11
|
+
|
|
12
|
+
logger = get_logger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@asynccontextmanager
|
|
16
|
+
async def gen_client(
|
|
17
|
+
server_name: str,
|
|
18
|
+
server_registry: ServerRegistry,
|
|
19
|
+
client_session_factory: Callable[
|
|
20
|
+
[MemoryObjectReceiveStream, MemoryObjectSendStream, timedelta | None],
|
|
21
|
+
ClientSession,
|
|
22
|
+
] = MCPAgentClientSession,
|
|
23
|
+
) -> AsyncGenerator[ClientSession, None]:
|
|
24
|
+
"""
|
|
25
|
+
Create a client session to the specified server.
|
|
26
|
+
Handles server startup, initialization, and message receive loop setup.
|
|
27
|
+
If required, callers can specify their own message receive loop and ClientSession class constructor to customize further.
|
|
28
|
+
For persistent connections, use connect() or MCPConnectionManager instead.
|
|
29
|
+
"""
|
|
30
|
+
if not server_registry:
|
|
31
|
+
raise ValueError(
|
|
32
|
+
"Server registry not found in the context. Please specify one either on this method, or in the context."
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
async with server_registry.initialize_server(
|
|
36
|
+
server_name=server_name,
|
|
37
|
+
client_session_factory=client_session_factory,
|
|
38
|
+
) as session:
|
|
39
|
+
yield session
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
async def connect(
|
|
43
|
+
server_name: str,
|
|
44
|
+
server_registry: ServerRegistry,
|
|
45
|
+
client_session_factory: Callable[
|
|
46
|
+
[MemoryObjectReceiveStream, MemoryObjectSendStream, timedelta | None],
|
|
47
|
+
ClientSession,
|
|
48
|
+
] = MCPAgentClientSession,
|
|
49
|
+
) -> ClientSession:
|
|
50
|
+
"""
|
|
51
|
+
Create a persistent client session to the specified server.
|
|
52
|
+
Handles server startup, initialization, and message receive loop setup.
|
|
53
|
+
If required, callers can specify their own message receive loop and ClientSession class constructor to customize further.
|
|
54
|
+
"""
|
|
55
|
+
if not server_registry:
|
|
56
|
+
raise ValueError(
|
|
57
|
+
"Server registry not found in the context. Please specify one either on this method, or in the context."
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
server_connection = await server_registry.connection_manager.get_server(
|
|
61
|
+
server_name=server_name,
|
|
62
|
+
client_session_factory=client_session_factory,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return server_connection.session
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
async def disconnect(
|
|
69
|
+
server_name: str | None,
|
|
70
|
+
server_registry: ServerRegistry,
|
|
71
|
+
) -> None:
|
|
72
|
+
"""
|
|
73
|
+
Disconnect from the specified server. If server_name is None, disconnect from all servers.
|
|
74
|
+
"""
|
|
75
|
+
if not server_registry:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
"Server registry not found in the context. Please specify one either on this method, or in the context."
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if server_name:
|
|
81
|
+
await server_registry.connection_manager.disconnect_server(
|
|
82
|
+
server_name=server_name
|
|
83
|
+
)
|
|
84
|
+
else:
|
|
85
|
+
await server_registry.connection_manager.disconnect_all_servers()
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# import functools
|
|
2
|
+
# from temporalio import activity
|
|
3
|
+
# from typing import Dict, Any, List, Callable, Awaitable
|
|
4
|
+
# from .gen_client import gen_client
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# def mcp_activity(server_name: str, mcp_call: Callable):
|
|
8
|
+
# def decorator(func):
|
|
9
|
+
# @activity.defn
|
|
10
|
+
# @functools.wraps(func)
|
|
11
|
+
# async def wrapper(*activity_args, **activity_kwargs):
|
|
12
|
+
# params = await func(*activity_args, **activity_kwargs)
|
|
13
|
+
# async with gen_client(server_name) as client:
|
|
14
|
+
# return await mcp_call(client, params)
|
|
15
|
+
|
|
16
|
+
# return wrapper
|
|
17
|
+
|
|
18
|
+
# return decorator
|