fast-agent-mcp 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fast-agent-mcp might be problematic. Click here for more details.
- fast_agent_mcp-0.0.7.dist-info/METADATA +322 -0
- fast_agent_mcp-0.0.7.dist-info/RECORD +100 -0
- fast_agent_mcp-0.0.7.dist-info/WHEEL +4 -0
- fast_agent_mcp-0.0.7.dist-info/entry_points.txt +5 -0
- fast_agent_mcp-0.0.7.dist-info/licenses/LICENSE +201 -0
- mcp_agent/__init__.py +0 -0
- mcp_agent/agents/__init__.py +0 -0
- mcp_agent/agents/agent.py +277 -0
- mcp_agent/app.py +303 -0
- mcp_agent/cli/__init__.py +0 -0
- mcp_agent/cli/__main__.py +4 -0
- mcp_agent/cli/commands/bootstrap.py +221 -0
- mcp_agent/cli/commands/config.py +11 -0
- mcp_agent/cli/commands/setup.py +229 -0
- mcp_agent/cli/main.py +68 -0
- mcp_agent/cli/terminal.py +24 -0
- mcp_agent/config.py +334 -0
- mcp_agent/console.py +28 -0
- mcp_agent/context.py +251 -0
- mcp_agent/context_dependent.py +48 -0
- mcp_agent/core/fastagent.py +1013 -0
- mcp_agent/eval/__init__.py +0 -0
- mcp_agent/event_progress.py +88 -0
- mcp_agent/executor/__init__.py +0 -0
- mcp_agent/executor/decorator_registry.py +120 -0
- mcp_agent/executor/executor.py +293 -0
- mcp_agent/executor/task_registry.py +34 -0
- mcp_agent/executor/temporal.py +405 -0
- mcp_agent/executor/workflow.py +197 -0
- mcp_agent/executor/workflow_signal.py +325 -0
- mcp_agent/human_input/__init__.py +0 -0
- mcp_agent/human_input/handler.py +49 -0
- mcp_agent/human_input/types.py +58 -0
- mcp_agent/logging/__init__.py +0 -0
- mcp_agent/logging/events.py +123 -0
- mcp_agent/logging/json_serializer.py +163 -0
- mcp_agent/logging/listeners.py +216 -0
- mcp_agent/logging/logger.py +365 -0
- mcp_agent/logging/rich_progress.py +120 -0
- mcp_agent/logging/tracing.py +140 -0
- mcp_agent/logging/transport.py +461 -0
- mcp_agent/mcp/__init__.py +0 -0
- mcp_agent/mcp/gen_client.py +85 -0
- mcp_agent/mcp/mcp_activity.py +18 -0
- mcp_agent/mcp/mcp_agent_client_session.py +242 -0
- mcp_agent/mcp/mcp_agent_server.py +56 -0
- mcp_agent/mcp/mcp_aggregator.py +394 -0
- mcp_agent/mcp/mcp_connection_manager.py +330 -0
- mcp_agent/mcp/stdio.py +104 -0
- mcp_agent/mcp_server_registry.py +275 -0
- mcp_agent/progress_display.py +10 -0
- mcp_agent/resources/examples/decorator/main.py +26 -0
- mcp_agent/resources/examples/decorator/optimizer.py +78 -0
- mcp_agent/resources/examples/decorator/orchestrator.py +68 -0
- mcp_agent/resources/examples/decorator/parallel.py +81 -0
- mcp_agent/resources/examples/decorator/router.py +56 -0
- mcp_agent/resources/examples/decorator/tiny.py +22 -0
- mcp_agent/resources/examples/mcp_researcher/main-evalopt.py +53 -0
- mcp_agent/resources/examples/mcp_researcher/main.py +38 -0
- mcp_agent/telemetry/__init__.py +0 -0
- mcp_agent/telemetry/usage_tracking.py +18 -0
- mcp_agent/workflows/__init__.py +0 -0
- mcp_agent/workflows/embedding/__init__.py +0 -0
- mcp_agent/workflows/embedding/embedding_base.py +61 -0
- mcp_agent/workflows/embedding/embedding_cohere.py +49 -0
- mcp_agent/workflows/embedding/embedding_openai.py +46 -0
- mcp_agent/workflows/evaluator_optimizer/__init__.py +0 -0
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +359 -0
- mcp_agent/workflows/intent_classifier/__init__.py +0 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +120 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +134 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +161 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +60 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +60 -0
- mcp_agent/workflows/llm/__init__.py +0 -0
- mcp_agent/workflows/llm/augmented_llm.py +645 -0
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +539 -0
- mcp_agent/workflows/llm/augmented_llm_openai.py +615 -0
- mcp_agent/workflows/llm/llm_selector.py +345 -0
- mcp_agent/workflows/llm/model_factory.py +175 -0
- mcp_agent/workflows/orchestrator/__init__.py +0 -0
- mcp_agent/workflows/orchestrator/orchestrator.py +407 -0
- mcp_agent/workflows/orchestrator/orchestrator_models.py +154 -0
- mcp_agent/workflows/orchestrator/orchestrator_prompts.py +113 -0
- mcp_agent/workflows/parallel/__init__.py +0 -0
- mcp_agent/workflows/parallel/fan_in.py +350 -0
- mcp_agent/workflows/parallel/fan_out.py +187 -0
- mcp_agent/workflows/parallel/parallel_llm.py +141 -0
- mcp_agent/workflows/router/__init__.py +0 -0
- mcp_agent/workflows/router/router_base.py +276 -0
- mcp_agent/workflows/router/router_embedding.py +240 -0
- mcp_agent/workflows/router/router_embedding_cohere.py +59 -0
- mcp_agent/workflows/router/router_embedding_openai.py +59 -0
- mcp_agent/workflows/router/router_llm.py +301 -0
- mcp_agent/workflows/swarm/__init__.py +0 -0
- mcp_agent/workflows/swarm/swarm.py +320 -0
- mcp_agent/workflows/swarm/swarm_anthropic.py +42 -0
- mcp_agent/workflows/swarm/swarm_openai.py +41 -0
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logger module for the MCP Agent, which provides:
|
|
3
|
+
- Local + optional remote event transport
|
|
4
|
+
- Async event bus
|
|
5
|
+
- OpenTelemetry tracing decorators (for distributed tracing)
|
|
6
|
+
- Automatic injection of trace_id/span_id into events
|
|
7
|
+
- Developer-friendly Logger that can be used anywhere
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import threading
|
|
12
|
+
import time
|
|
13
|
+
|
|
14
|
+
from typing import Any, Dict
|
|
15
|
+
|
|
16
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
17
|
+
|
|
18
|
+
from mcp_agent.logging.events import Event, EventContext, EventFilter, EventType
|
|
19
|
+
from mcp_agent.logging.listeners import (
|
|
20
|
+
BatchingListener,
|
|
21
|
+
LoggingListener,
|
|
22
|
+
ProgressListener,
|
|
23
|
+
)
|
|
24
|
+
from mcp_agent.logging.transport import AsyncEventBus, EventTransport
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Logger:
|
|
28
|
+
"""
|
|
29
|
+
Developer-friendly logger that sends events to the AsyncEventBus.
|
|
30
|
+
- `type` is a broad category (INFO, ERROR, etc.).
|
|
31
|
+
- `name` can be a custom domain-specific event name, e.g. "ORDER_PLACED".
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(self, namespace: str):
|
|
35
|
+
self.namespace = namespace
|
|
36
|
+
self.event_bus = AsyncEventBus.get()
|
|
37
|
+
|
|
38
|
+
def _ensure_event_loop(self):
|
|
39
|
+
"""Ensure we have an event loop we can use."""
|
|
40
|
+
try:
|
|
41
|
+
return asyncio.get_running_loop()
|
|
42
|
+
except RuntimeError:
|
|
43
|
+
# If no loop is running, create a new one
|
|
44
|
+
loop = asyncio.new_event_loop()
|
|
45
|
+
asyncio.set_event_loop(loop)
|
|
46
|
+
return loop
|
|
47
|
+
|
|
48
|
+
def _emit_event(self, event: Event):
|
|
49
|
+
"""Emit an event by running it in the event loop."""
|
|
50
|
+
loop = self._ensure_event_loop()
|
|
51
|
+
if loop.is_running():
|
|
52
|
+
# If we're in a thread with a running loop, schedule the coroutine
|
|
53
|
+
asyncio.create_task(self.event_bus.emit(event))
|
|
54
|
+
else:
|
|
55
|
+
# If no loop is running, run it until the emit completes
|
|
56
|
+
loop.run_until_complete(self.event_bus.emit(event))
|
|
57
|
+
|
|
58
|
+
def event(
|
|
59
|
+
self,
|
|
60
|
+
etype: EventType,
|
|
61
|
+
ename: str | None,
|
|
62
|
+
message: str,
|
|
63
|
+
context: EventContext | None,
|
|
64
|
+
data: dict,
|
|
65
|
+
):
|
|
66
|
+
"""Create and emit an event."""
|
|
67
|
+
evt = Event(
|
|
68
|
+
type=etype,
|
|
69
|
+
name=ename,
|
|
70
|
+
namespace=self.namespace,
|
|
71
|
+
message=message,
|
|
72
|
+
context=context,
|
|
73
|
+
data=data,
|
|
74
|
+
)
|
|
75
|
+
self._emit_event(evt)
|
|
76
|
+
|
|
77
|
+
def debug(
|
|
78
|
+
self,
|
|
79
|
+
message: str,
|
|
80
|
+
name: str | None = None,
|
|
81
|
+
context: EventContext = None,
|
|
82
|
+
**data,
|
|
83
|
+
):
|
|
84
|
+
"""Log a debug message."""
|
|
85
|
+
self.event("debug", name, message, context, data)
|
|
86
|
+
|
|
87
|
+
def info(
|
|
88
|
+
self,
|
|
89
|
+
message: str,
|
|
90
|
+
name: str | None = None,
|
|
91
|
+
context: EventContext = None,
|
|
92
|
+
**data,
|
|
93
|
+
):
|
|
94
|
+
"""Log an info message."""
|
|
95
|
+
self.event("info", name, message, context, data)
|
|
96
|
+
|
|
97
|
+
def warning(
|
|
98
|
+
self,
|
|
99
|
+
message: str,
|
|
100
|
+
name: str | None = None,
|
|
101
|
+
context: EventContext = None,
|
|
102
|
+
**data,
|
|
103
|
+
):
|
|
104
|
+
"""Log a warning message."""
|
|
105
|
+
self.event("warning", name, message, context, data)
|
|
106
|
+
|
|
107
|
+
def error(
|
|
108
|
+
self,
|
|
109
|
+
message: str,
|
|
110
|
+
name: str | None = None,
|
|
111
|
+
context: EventContext = None,
|
|
112
|
+
**data,
|
|
113
|
+
):
|
|
114
|
+
"""Log an error message."""
|
|
115
|
+
self.event("error", name, message, context, data)
|
|
116
|
+
|
|
117
|
+
def progress(
|
|
118
|
+
self,
|
|
119
|
+
message: str,
|
|
120
|
+
name: str | None = None,
|
|
121
|
+
percentage: float = None,
|
|
122
|
+
context: EventContext = None,
|
|
123
|
+
**data,
|
|
124
|
+
):
|
|
125
|
+
"""Log a progress message."""
|
|
126
|
+
merged_data = dict(percentage=percentage, **data)
|
|
127
|
+
self.event("progress", name, message, context, merged_data)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@contextmanager
|
|
131
|
+
def event_context(
|
|
132
|
+
logger: Logger,
|
|
133
|
+
message: str,
|
|
134
|
+
event_type: EventType = "info",
|
|
135
|
+
name: str | None = None,
|
|
136
|
+
**data,
|
|
137
|
+
):
|
|
138
|
+
"""
|
|
139
|
+
Times a synchronous block, logs an event after completion.
|
|
140
|
+
Because logger methods are async, we schedule the final log.
|
|
141
|
+
"""
|
|
142
|
+
start_time = time.time()
|
|
143
|
+
try:
|
|
144
|
+
yield
|
|
145
|
+
finally:
|
|
146
|
+
duration = time.time() - start_time
|
|
147
|
+
|
|
148
|
+
logger.event(
|
|
149
|
+
event_type,
|
|
150
|
+
name,
|
|
151
|
+
f"{message} finished in {duration:.3f}s",
|
|
152
|
+
None,
|
|
153
|
+
{"duration": duration, **data},
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# TODO: saqadri - check if we need this
|
|
158
|
+
@asynccontextmanager
|
|
159
|
+
async def async_event_context(
|
|
160
|
+
logger: Logger,
|
|
161
|
+
message: str,
|
|
162
|
+
event_type: EventType = "info",
|
|
163
|
+
name: str | None = None,
|
|
164
|
+
**data,
|
|
165
|
+
):
|
|
166
|
+
"""
|
|
167
|
+
Times an asynchronous block, logs an event after completion.
|
|
168
|
+
Because logger methods are async, we schedule the final log.
|
|
169
|
+
"""
|
|
170
|
+
start_time = time.time()
|
|
171
|
+
try:
|
|
172
|
+
yield
|
|
173
|
+
finally:
|
|
174
|
+
duration = time.time() - start_time
|
|
175
|
+
logger.event(
|
|
176
|
+
event_type,
|
|
177
|
+
name,
|
|
178
|
+
f"{message} finished in {duration:.3f}s",
|
|
179
|
+
None,
|
|
180
|
+
{"duration": duration, **data},
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class LoggingConfig:
|
|
185
|
+
"""Global configuration for the logging system."""
|
|
186
|
+
|
|
187
|
+
_initialized = False
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
async def configure(
|
|
191
|
+
cls,
|
|
192
|
+
event_filter: EventFilter | None = None,
|
|
193
|
+
transport: EventTransport | None = None,
|
|
194
|
+
batch_size: int = 100,
|
|
195
|
+
flush_interval: float = 2.0,
|
|
196
|
+
**kwargs: Any,
|
|
197
|
+
):
|
|
198
|
+
"""
|
|
199
|
+
Configure the logging system.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
event_filter: Default filter for all loggers
|
|
203
|
+
transport: Transport for sending events to external systems
|
|
204
|
+
batch_size: Default batch size for batching listener
|
|
205
|
+
flush_interval: Default flush interval for batching listener
|
|
206
|
+
**kwargs: Additional configuration options
|
|
207
|
+
"""
|
|
208
|
+
if cls._initialized:
|
|
209
|
+
return
|
|
210
|
+
|
|
211
|
+
bus = AsyncEventBus.get(transport=transport)
|
|
212
|
+
|
|
213
|
+
# Add standard listeners
|
|
214
|
+
if "logging" not in bus.listeners:
|
|
215
|
+
bus.add_listener("logging", LoggingListener(event_filter=event_filter))
|
|
216
|
+
|
|
217
|
+
# Only add progress listener if enabled in settings
|
|
218
|
+
if "progress" not in bus.listeners and kwargs.get("progress_display", True):
|
|
219
|
+
bus.add_listener("progress", ProgressListener())
|
|
220
|
+
|
|
221
|
+
if "batching" not in bus.listeners:
|
|
222
|
+
bus.add_listener(
|
|
223
|
+
"batching",
|
|
224
|
+
BatchingListener(
|
|
225
|
+
event_filter=event_filter,
|
|
226
|
+
batch_size=batch_size,
|
|
227
|
+
flush_interval=flush_interval,
|
|
228
|
+
),
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
await bus.start()
|
|
232
|
+
cls._initialized = True
|
|
233
|
+
|
|
234
|
+
@classmethod
|
|
235
|
+
async def shutdown(cls):
|
|
236
|
+
"""Shutdown the logging system gracefully."""
|
|
237
|
+
if not cls._initialized:
|
|
238
|
+
return
|
|
239
|
+
bus = AsyncEventBus.get()
|
|
240
|
+
await bus.stop()
|
|
241
|
+
cls._initialized = False
|
|
242
|
+
|
|
243
|
+
@classmethod
|
|
244
|
+
@asynccontextmanager
|
|
245
|
+
async def managed(cls, **config_kwargs):
|
|
246
|
+
"""Context manager for the logging system lifecycle."""
|
|
247
|
+
try:
|
|
248
|
+
await cls.configure(**config_kwargs)
|
|
249
|
+
yield
|
|
250
|
+
finally:
|
|
251
|
+
await cls.shutdown()
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
_logger_lock = threading.Lock()
|
|
255
|
+
_loggers: Dict[str, Logger] = {}
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def get_logger(namespace: str) -> Logger:
|
|
259
|
+
"""
|
|
260
|
+
Get a logger instance for a given namespace.
|
|
261
|
+
Creates a new logger if one doesn't exist for this namespace.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
namespace: The namespace for the logger (e.g. "agent.helper", "workflow.demo")
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
A Logger instance for the given namespace
|
|
268
|
+
"""
|
|
269
|
+
|
|
270
|
+
with _logger_lock:
|
|
271
|
+
if namespace not in _loggers:
|
|
272
|
+
_loggers[namespace] = Logger(namespace)
|
|
273
|
+
return _loggers[namespace]
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
##########
|
|
277
|
+
# Example
|
|
278
|
+
##########
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
# class Agent:
|
|
282
|
+
# """Shows how to combine Logger with OTel's @telemetry.traced decorator."""
|
|
283
|
+
|
|
284
|
+
# def __init__(self, name: str):
|
|
285
|
+
# self.logger = Logger(f"agent.{name}")
|
|
286
|
+
|
|
287
|
+
# @telemetry.traced("agent.call_tool", kind=SpanKind.CLIENT)
|
|
288
|
+
# async def call_tool(self, tool_name: str, **kwargs):
|
|
289
|
+
# await self.logger.info(
|
|
290
|
+
# f"Calling tool '{tool_name}'", name="TOOL_CALL_START", **kwargs
|
|
291
|
+
# )
|
|
292
|
+
# await asyncio.sleep(random.uniform(0.1, 0.3))
|
|
293
|
+
# # Possibly do real logic here
|
|
294
|
+
# await self.logger.debug(
|
|
295
|
+
# f"Completed tool call '{tool_name}'", name="TOOL_CALL_END"
|
|
296
|
+
# )
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
# class Workflow:
|
|
300
|
+
# """Example workflow that logs multiple steps, also with optional tracing."""
|
|
301
|
+
|
|
302
|
+
# def __init__(self, name: str, steps: List[str]):
|
|
303
|
+
# self.logger = Logger(f"workflow.{name}")
|
|
304
|
+
# self.steps = steps
|
|
305
|
+
|
|
306
|
+
# @telemetry.traced("workflow.run", kind=SpanKind.INTERNAL)
|
|
307
|
+
# async def run(self):
|
|
308
|
+
# await self.logger.info(
|
|
309
|
+
# "Workflow started", name="WORKFLOW_START", steps=len(self.steps)
|
|
310
|
+
# )
|
|
311
|
+
# for i, step_name in enumerate(self.steps, start=1):
|
|
312
|
+
# pct = round((i / len(self.steps)) * 100, 2)
|
|
313
|
+
# await self.logger.progress(
|
|
314
|
+
# f"Executing {step_name}", name="WORKFLOW_STEP", percentage=pct
|
|
315
|
+
# )
|
|
316
|
+
# await asyncio.sleep(random.uniform(0.1, 0.3))
|
|
317
|
+
# await self.logger.milestone(
|
|
318
|
+
# f"Completed {step_name}", name="WORKFLOW_MILESTONE", step_index=i
|
|
319
|
+
# )
|
|
320
|
+
# await self.logger.status("Workflow complete", name="WORKFLOW_DONE")
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
# ###############################################################################
|
|
324
|
+
# # 10) Demo Main
|
|
325
|
+
# ###############################################################################
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
# async def main():
|
|
329
|
+
# # 1) Configure Python logging
|
|
330
|
+
# logging.basicConfig(level=logging.INFO)
|
|
331
|
+
|
|
332
|
+
# # 2) Get the event bus and add local listeners
|
|
333
|
+
# bus = AsyncEventBus.get()
|
|
334
|
+
# bus.add_listener("logging", LoggingListener())
|
|
335
|
+
# bus.add_listener("batching", BatchingListener(batch_size=3, flush_interval=2.0))
|
|
336
|
+
|
|
337
|
+
# # 3) Optionally set up distributed transport
|
|
338
|
+
# # configure_distributed("https://my-remote-logger.example.com")
|
|
339
|
+
|
|
340
|
+
# # 4) Start the event bus
|
|
341
|
+
# await bus.start()
|
|
342
|
+
|
|
343
|
+
# # 5) Run example tasks
|
|
344
|
+
# agent = Agent("assistant")
|
|
345
|
+
# workflow = Workflow("demo_flow", ["init", "process", "cleanup"])
|
|
346
|
+
|
|
347
|
+
# agent_task = asyncio.create_task(agent.call_tool("my-tool", foo="bar"))
|
|
348
|
+
# workflow_task = asyncio.create_task(workflow.run())
|
|
349
|
+
|
|
350
|
+
# # Also demonstrate timed context manager
|
|
351
|
+
# logger = Logger("misc")
|
|
352
|
+
# with event_context(
|
|
353
|
+
# logger, "SynchronousBlock", event_type="info", name="SYNCHRONOUS_BLOCK"
|
|
354
|
+
# ):
|
|
355
|
+
# time.sleep(0.5) # do a blocking operation
|
|
356
|
+
|
|
357
|
+
# # Wait for tasks
|
|
358
|
+
# await asyncio.gather(agent_task, workflow_task)
|
|
359
|
+
|
|
360
|
+
# # 6) Stop the bus (flush & close)
|
|
361
|
+
# await bus.stop()
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
# if __name__ == "__main__":
|
|
365
|
+
# asyncio.run(main())
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Rich-based progress display for MCP Agent."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from mcp_agent.console import console as default_console
|
|
7
|
+
from mcp_agent.event_progress import ProgressEvent, ProgressAction
|
|
8
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class RichProgressDisplay:
|
|
13
|
+
"""Rich-based display for progress events."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, console: Optional[Console] = None):
|
|
16
|
+
"""Initialize the progress display."""
|
|
17
|
+
self.console = console or default_console
|
|
18
|
+
self._taskmap = {}
|
|
19
|
+
self._progress = Progress(
|
|
20
|
+
SpinnerColumn(spinner_name="simpleDotsScrolling"),
|
|
21
|
+
TextColumn(
|
|
22
|
+
"[progress.description]{task.description}|",
|
|
23
|
+
# table_column=Column(max_width=16),
|
|
24
|
+
),
|
|
25
|
+
TextColumn(text_format="{task.fields[target]:<16}", style="Bold Blue"),
|
|
26
|
+
TextColumn(text_format="{task.fields[details]}", style="dim white"),
|
|
27
|
+
console=self.console,
|
|
28
|
+
transient=False,
|
|
29
|
+
)
|
|
30
|
+
self._paused = False
|
|
31
|
+
|
|
32
|
+
def start(self):
|
|
33
|
+
"""start"""
|
|
34
|
+
|
|
35
|
+
self._progress.start()
|
|
36
|
+
|
|
37
|
+
def stop(self):
|
|
38
|
+
"""stop"""
|
|
39
|
+
self._progress.stop()
|
|
40
|
+
|
|
41
|
+
def pause(self):
|
|
42
|
+
"""Pause the progress display."""
|
|
43
|
+
if not self._paused:
|
|
44
|
+
self._paused = True
|
|
45
|
+
|
|
46
|
+
for task in self._progress.tasks:
|
|
47
|
+
task.visible = False
|
|
48
|
+
self._progress.stop()
|
|
49
|
+
|
|
50
|
+
def resume(self):
|
|
51
|
+
"""Resume the progress display."""
|
|
52
|
+
if self._paused:
|
|
53
|
+
for task in self._progress.tasks:
|
|
54
|
+
task.visible = True
|
|
55
|
+
self._paused = False
|
|
56
|
+
self._progress.start()
|
|
57
|
+
|
|
58
|
+
@contextmanager
|
|
59
|
+
def paused(self):
|
|
60
|
+
"""Context manager for temporarily pausing the display."""
|
|
61
|
+
self.pause()
|
|
62
|
+
try:
|
|
63
|
+
yield
|
|
64
|
+
finally:
|
|
65
|
+
self.resume()
|
|
66
|
+
|
|
67
|
+
def _get_action_style(self, action: ProgressAction) -> str:
|
|
68
|
+
"""Map actions to appropriate styles."""
|
|
69
|
+
return {
|
|
70
|
+
ProgressAction.STARTING: "bold yellow",
|
|
71
|
+
ProgressAction.INITIALIZED: "dim green",
|
|
72
|
+
ProgressAction.CHATTING: "bold blue",
|
|
73
|
+
ProgressAction.READY: "dim green",
|
|
74
|
+
ProgressAction.ROUTING: "bold blue",
|
|
75
|
+
ProgressAction.CALLING_TOOL: "bold magenta",
|
|
76
|
+
ProgressAction.FINISHED: "black on green",
|
|
77
|
+
ProgressAction.SHUTDOWN: "black on red",
|
|
78
|
+
ProgressAction.AGGREGATOR_INITIALIZED: "bold green",
|
|
79
|
+
}.get(action, "white")
|
|
80
|
+
|
|
81
|
+
def update(self, event: ProgressEvent) -> None:
|
|
82
|
+
"""Update the progress display with a new event."""
|
|
83
|
+
task_name = event.agent_name or "default"
|
|
84
|
+
# Create new task if needed
|
|
85
|
+
if task_name not in self._taskmap:
|
|
86
|
+
task_id = self._progress.add_task(
|
|
87
|
+
"",
|
|
88
|
+
total=None,
|
|
89
|
+
target=f"{event.target}",
|
|
90
|
+
details=f"{event.agent_name}",
|
|
91
|
+
)
|
|
92
|
+
self._taskmap[task_name] = task_id
|
|
93
|
+
else:
|
|
94
|
+
task_id = self._taskmap[task_name]
|
|
95
|
+
|
|
96
|
+
self._progress.update(
|
|
97
|
+
task_id,
|
|
98
|
+
description=f"[{self._get_action_style(event.action)}]{event.action.value:<15}",
|
|
99
|
+
target=event.target,
|
|
100
|
+
details=event.details if event.details else "",
|
|
101
|
+
task_name=task_name,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
if (
|
|
105
|
+
event.action == ProgressAction.INITIALIZED
|
|
106
|
+
or event.action == ProgressAction.READY
|
|
107
|
+
):
|
|
108
|
+
self._progress.update(task_id, completed=100, total=100)
|
|
109
|
+
elif event.action == ProgressAction.FINISHED:
|
|
110
|
+
self._progress.update(
|
|
111
|
+
task_id,
|
|
112
|
+
completed=100,
|
|
113
|
+
total=100,
|
|
114
|
+
details=f" / Elapsed Time {time.strftime('%H:%M:%S', time.gmtime(self._progress.tasks[task_id].elapsed))}",
|
|
115
|
+
)
|
|
116
|
+
for task in self._progress.tasks:
|
|
117
|
+
if task.id != task_id:
|
|
118
|
+
task.visible = False
|
|
119
|
+
else:
|
|
120
|
+
self._progress.reset(task_id)
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Telemetry manager that defines distributed tracing decorators for OpenTelemetry traces/spans
|
|
3
|
+
for the Logger module for MCP Agent
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
import functools
|
|
8
|
+
from typing import Any, Dict, Callable, Optional, Tuple, TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
from opentelemetry import trace
|
|
11
|
+
from opentelemetry.context import Context as OtelContext
|
|
12
|
+
from opentelemetry.propagate import extract as otel_extract
|
|
13
|
+
from opentelemetry.trace import set_span_in_context
|
|
14
|
+
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
|
15
|
+
|
|
16
|
+
from opentelemetry.trace import SpanKind, Status, StatusCode
|
|
17
|
+
|
|
18
|
+
from mcp_agent.context_dependent import ContextDependent
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from mcp_agent.context import Context
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TelemetryManager(ContextDependent):
|
|
25
|
+
"""
|
|
26
|
+
Simple manager for creating OpenTelemetry spans automatically.
|
|
27
|
+
Decorator usage: @telemetry.traced("SomeSpanName")
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, context: Optional["Context"] = None, **kwargs):
|
|
31
|
+
# If needed, configure resources, exporters, etc.
|
|
32
|
+
# E.g.: from opentelemetry.sdk.trace import TracerProvider
|
|
33
|
+
# trace.set_tracer_provider(TracerProvider(...))
|
|
34
|
+
super().__init__(context=context, **kwargs)
|
|
35
|
+
|
|
36
|
+
def traced(
|
|
37
|
+
self,
|
|
38
|
+
name: str | None = None,
|
|
39
|
+
kind: SpanKind = SpanKind.INTERNAL,
|
|
40
|
+
attributes: Dict[str, Any] = None,
|
|
41
|
+
) -> Callable:
|
|
42
|
+
"""
|
|
43
|
+
Decorator that automatically creates and manages a span for a function.
|
|
44
|
+
Works for both async and sync functions.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def decorator(func):
|
|
48
|
+
span_name = name or f"{func.__module__}.{func.__qualname__}"
|
|
49
|
+
|
|
50
|
+
tracer = self.context.tracer or trace.get_tracer("mcp_agent")
|
|
51
|
+
|
|
52
|
+
@functools.wraps(func)
|
|
53
|
+
async def async_wrapper(*args, **kwargs):
|
|
54
|
+
with tracer.start_as_current_span(span_name, kind=kind) as span:
|
|
55
|
+
if attributes:
|
|
56
|
+
for k, v in attributes.items():
|
|
57
|
+
span.set_attribute(k, v)
|
|
58
|
+
# Record simple args
|
|
59
|
+
self._record_args(span, args, kwargs)
|
|
60
|
+
try:
|
|
61
|
+
res = await func(*args, **kwargs)
|
|
62
|
+
return res
|
|
63
|
+
except Exception as e:
|
|
64
|
+
span.record_exception(e)
|
|
65
|
+
span.set_status(Status(StatusCode.ERROR))
|
|
66
|
+
raise
|
|
67
|
+
|
|
68
|
+
@functools.wraps(func)
|
|
69
|
+
def sync_wrapper(*args, **kwargs):
|
|
70
|
+
with tracer.start_as_current_span(span_name, kind=kind) as span:
|
|
71
|
+
if attributes:
|
|
72
|
+
for k, v in attributes.items():
|
|
73
|
+
span.set_attribute(k, v)
|
|
74
|
+
# Record simple args
|
|
75
|
+
self._record_args(span, args, kwargs)
|
|
76
|
+
try:
|
|
77
|
+
res = func(*args, **kwargs)
|
|
78
|
+
return res
|
|
79
|
+
except Exception as e:
|
|
80
|
+
span.record_exception(e)
|
|
81
|
+
span.set_status(Status(StatusCode.ERROR))
|
|
82
|
+
raise
|
|
83
|
+
|
|
84
|
+
if asyncio.iscoroutinefunction(func):
|
|
85
|
+
return async_wrapper
|
|
86
|
+
else:
|
|
87
|
+
return sync_wrapper
|
|
88
|
+
|
|
89
|
+
return decorator
|
|
90
|
+
|
|
91
|
+
def _record_args(self, span, args, kwargs):
|
|
92
|
+
"""Optionally record primitive args as span attributes."""
|
|
93
|
+
for i, arg in enumerate(args):
|
|
94
|
+
if isinstance(arg, (str, int, float, bool)):
|
|
95
|
+
span.set_attribute(f"arg_{i}", str(arg))
|
|
96
|
+
for k, v in kwargs.items():
|
|
97
|
+
if isinstance(v, (str, int, float, bool)):
|
|
98
|
+
span.set_attribute(k, str(v))
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class MCPRequestTrace:
|
|
102
|
+
"""Helper class for trace context propagation in MCP"""
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def start_span_from_mcp_request(
|
|
106
|
+
method: str, params: Dict[str, Any]
|
|
107
|
+
) -> Tuple[trace.Span, OtelContext]:
|
|
108
|
+
"""Extract trace context from incoming MCP request and start a new span"""
|
|
109
|
+
# Extract trace context from _meta if present
|
|
110
|
+
carrier = {}
|
|
111
|
+
_meta = params.get("_meta", {})
|
|
112
|
+
if "traceparent" in _meta:
|
|
113
|
+
carrier["traceparent"] = _meta["traceparent"]
|
|
114
|
+
if "tracestate" in _meta:
|
|
115
|
+
carrier["tracestate"] = _meta["tracestate"]
|
|
116
|
+
|
|
117
|
+
# Extract context and start span
|
|
118
|
+
ctx = otel_extract(carrier, context=OtelContext())
|
|
119
|
+
tracer = trace.get_tracer(__name__)
|
|
120
|
+
span = tracer.start_span(method, context=ctx, kind=SpanKind.SERVER)
|
|
121
|
+
return span, set_span_in_context(span)
|
|
122
|
+
|
|
123
|
+
@staticmethod
|
|
124
|
+
def inject_trace_context(arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
125
|
+
"""Inject current trace context into outgoing MCP request arguments"""
|
|
126
|
+
carrier = {}
|
|
127
|
+
TraceContextTextMapPropagator().inject(carrier)
|
|
128
|
+
|
|
129
|
+
# Create or update _meta with trace context
|
|
130
|
+
_meta = arguments.get("_meta", {})
|
|
131
|
+
if "traceparent" in carrier:
|
|
132
|
+
_meta["traceparent"] = carrier["traceparent"]
|
|
133
|
+
if "tracestate" in carrier:
|
|
134
|
+
_meta["tracestate"] = carrier["tracestate"]
|
|
135
|
+
arguments["_meta"] = _meta
|
|
136
|
+
|
|
137
|
+
return arguments
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
telemetry = TelemetryManager()
|