fast-agent-mcp 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fast-agent-mcp might be problematic. Click here for more details.
- fast_agent_mcp-0.0.7.dist-info/METADATA +322 -0
- fast_agent_mcp-0.0.7.dist-info/RECORD +100 -0
- fast_agent_mcp-0.0.7.dist-info/WHEEL +4 -0
- fast_agent_mcp-0.0.7.dist-info/entry_points.txt +5 -0
- fast_agent_mcp-0.0.7.dist-info/licenses/LICENSE +201 -0
- mcp_agent/__init__.py +0 -0
- mcp_agent/agents/__init__.py +0 -0
- mcp_agent/agents/agent.py +277 -0
- mcp_agent/app.py +303 -0
- mcp_agent/cli/__init__.py +0 -0
- mcp_agent/cli/__main__.py +4 -0
- mcp_agent/cli/commands/bootstrap.py +221 -0
- mcp_agent/cli/commands/config.py +11 -0
- mcp_agent/cli/commands/setup.py +229 -0
- mcp_agent/cli/main.py +68 -0
- mcp_agent/cli/terminal.py +24 -0
- mcp_agent/config.py +334 -0
- mcp_agent/console.py +28 -0
- mcp_agent/context.py +251 -0
- mcp_agent/context_dependent.py +48 -0
- mcp_agent/core/fastagent.py +1013 -0
- mcp_agent/eval/__init__.py +0 -0
- mcp_agent/event_progress.py +88 -0
- mcp_agent/executor/__init__.py +0 -0
- mcp_agent/executor/decorator_registry.py +120 -0
- mcp_agent/executor/executor.py +293 -0
- mcp_agent/executor/task_registry.py +34 -0
- mcp_agent/executor/temporal.py +405 -0
- mcp_agent/executor/workflow.py +197 -0
- mcp_agent/executor/workflow_signal.py +325 -0
- mcp_agent/human_input/__init__.py +0 -0
- mcp_agent/human_input/handler.py +49 -0
- mcp_agent/human_input/types.py +58 -0
- mcp_agent/logging/__init__.py +0 -0
- mcp_agent/logging/events.py +123 -0
- mcp_agent/logging/json_serializer.py +163 -0
- mcp_agent/logging/listeners.py +216 -0
- mcp_agent/logging/logger.py +365 -0
- mcp_agent/logging/rich_progress.py +120 -0
- mcp_agent/logging/tracing.py +140 -0
- mcp_agent/logging/transport.py +461 -0
- mcp_agent/mcp/__init__.py +0 -0
- mcp_agent/mcp/gen_client.py +85 -0
- mcp_agent/mcp/mcp_activity.py +18 -0
- mcp_agent/mcp/mcp_agent_client_session.py +242 -0
- mcp_agent/mcp/mcp_agent_server.py +56 -0
- mcp_agent/mcp/mcp_aggregator.py +394 -0
- mcp_agent/mcp/mcp_connection_manager.py +330 -0
- mcp_agent/mcp/stdio.py +104 -0
- mcp_agent/mcp_server_registry.py +275 -0
- mcp_agent/progress_display.py +10 -0
- mcp_agent/resources/examples/decorator/main.py +26 -0
- mcp_agent/resources/examples/decorator/optimizer.py +78 -0
- mcp_agent/resources/examples/decorator/orchestrator.py +68 -0
- mcp_agent/resources/examples/decorator/parallel.py +81 -0
- mcp_agent/resources/examples/decorator/router.py +56 -0
- mcp_agent/resources/examples/decorator/tiny.py +22 -0
- mcp_agent/resources/examples/mcp_researcher/main-evalopt.py +53 -0
- mcp_agent/resources/examples/mcp_researcher/main.py +38 -0
- mcp_agent/telemetry/__init__.py +0 -0
- mcp_agent/telemetry/usage_tracking.py +18 -0
- mcp_agent/workflows/__init__.py +0 -0
- mcp_agent/workflows/embedding/__init__.py +0 -0
- mcp_agent/workflows/embedding/embedding_base.py +61 -0
- mcp_agent/workflows/embedding/embedding_cohere.py +49 -0
- mcp_agent/workflows/embedding/embedding_openai.py +46 -0
- mcp_agent/workflows/evaluator_optimizer/__init__.py +0 -0
- mcp_agent/workflows/evaluator_optimizer/evaluator_optimizer.py +359 -0
- mcp_agent/workflows/intent_classifier/__init__.py +0 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_base.py +120 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding.py +134 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_cohere.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_embedding_openai.py +45 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm.py +161 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_anthropic.py +60 -0
- mcp_agent/workflows/intent_classifier/intent_classifier_llm_openai.py +60 -0
- mcp_agent/workflows/llm/__init__.py +0 -0
- mcp_agent/workflows/llm/augmented_llm.py +645 -0
- mcp_agent/workflows/llm/augmented_llm_anthropic.py +539 -0
- mcp_agent/workflows/llm/augmented_llm_openai.py +615 -0
- mcp_agent/workflows/llm/llm_selector.py +345 -0
- mcp_agent/workflows/llm/model_factory.py +175 -0
- mcp_agent/workflows/orchestrator/__init__.py +0 -0
- mcp_agent/workflows/orchestrator/orchestrator.py +407 -0
- mcp_agent/workflows/orchestrator/orchestrator_models.py +154 -0
- mcp_agent/workflows/orchestrator/orchestrator_prompts.py +113 -0
- mcp_agent/workflows/parallel/__init__.py +0 -0
- mcp_agent/workflows/parallel/fan_in.py +350 -0
- mcp_agent/workflows/parallel/fan_out.py +187 -0
- mcp_agent/workflows/parallel/parallel_llm.py +141 -0
- mcp_agent/workflows/router/__init__.py +0 -0
- mcp_agent/workflows/router/router_base.py +276 -0
- mcp_agent/workflows/router/router_embedding.py +240 -0
- mcp_agent/workflows/router/router_embedding_cohere.py +59 -0
- mcp_agent/workflows/router/router_embedding_openai.py +59 -0
- mcp_agent/workflows/router/router_llm.py +301 -0
- mcp_agent/workflows/swarm/__init__.py +0 -0
- mcp_agent/workflows/swarm/swarm.py +320 -0
- mcp_agent/workflows/swarm/swarm_anthropic.py +42 -0
- mcp_agent/workflows/swarm/swarm_openai.py +41 -0
mcp_agent/config.py
ADDED
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Reading settings from environment variables and providing a settings object
|
|
3
|
+
for the application configuration.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, List, Literal, Optional
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, ConfigDict, field_validator
|
|
10
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class MCPServerAuthSettings(BaseModel):
|
|
14
|
+
"""Represents authentication configuration for a server."""
|
|
15
|
+
|
|
16
|
+
api_key: str | None = None
|
|
17
|
+
|
|
18
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class MCPRootSettings(BaseModel):
|
|
22
|
+
"""Represents a root directory configuration for an MCP server."""
|
|
23
|
+
|
|
24
|
+
uri: str
|
|
25
|
+
"""The URI identifying the root. Must start with file://"""
|
|
26
|
+
|
|
27
|
+
name: Optional[str] = None
|
|
28
|
+
"""Optional name for the root."""
|
|
29
|
+
|
|
30
|
+
server_uri_alias: Optional[str] = None
|
|
31
|
+
"""Optional URI alias for presentation to the server"""
|
|
32
|
+
|
|
33
|
+
@field_validator("uri", "server_uri_alias")
|
|
34
|
+
@classmethod
|
|
35
|
+
def validate_uri(cls, v: str) -> str:
|
|
36
|
+
"""Validate that the URI starts with file:// (required by specification 2024-11-05)"""
|
|
37
|
+
if not v.startswith("file://"):
|
|
38
|
+
raise ValueError("Root URI must start with file://")
|
|
39
|
+
return v
|
|
40
|
+
|
|
41
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class MCPServerSettings(BaseModel):
|
|
45
|
+
"""
|
|
46
|
+
Represents the configuration for an individual server.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
# TODO: saqadri - server name should be something a server can provide itself during initialization
|
|
50
|
+
name: str | None = None
|
|
51
|
+
"""The name of the server."""
|
|
52
|
+
|
|
53
|
+
# TODO: saqadri - server description should be something a server can provide itself during initialization
|
|
54
|
+
description: str | None = None
|
|
55
|
+
"""The description of the server."""
|
|
56
|
+
|
|
57
|
+
transport: Literal["stdio", "sse"] = "stdio"
|
|
58
|
+
"""The transport mechanism."""
|
|
59
|
+
|
|
60
|
+
command: str | None = None
|
|
61
|
+
"""The command to execute the server (e.g. npx)."""
|
|
62
|
+
|
|
63
|
+
args: List[str] | None = None
|
|
64
|
+
"""The arguments for the server command."""
|
|
65
|
+
|
|
66
|
+
read_timeout_seconds: int | None = None
|
|
67
|
+
"""The timeout in seconds for the server connection."""
|
|
68
|
+
|
|
69
|
+
url: str | None = None
|
|
70
|
+
"""The URL for the server (e.g. for SSE transport)."""
|
|
71
|
+
|
|
72
|
+
auth: MCPServerAuthSettings | None = None
|
|
73
|
+
"""The authentication configuration for the server."""
|
|
74
|
+
|
|
75
|
+
roots: Optional[List[MCPRootSettings]] = None
|
|
76
|
+
"""Root directories this server has access to."""
|
|
77
|
+
|
|
78
|
+
env: Dict[str, str] | None = None
|
|
79
|
+
"""Environment variables to pass to the server process."""
|
|
80
|
+
|
|
81
|
+
env: Dict[str, str] | None = None
|
|
82
|
+
"""Environment variables to pass to the server process."""
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class MCPSettings(BaseModel):
|
|
86
|
+
"""Configuration for all MCP servers."""
|
|
87
|
+
|
|
88
|
+
servers: Dict[str, MCPServerSettings] = {}
|
|
89
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class AnthropicSettings(BaseModel):
|
|
93
|
+
"""
|
|
94
|
+
Settings for using Anthropic models in the MCP Agent application.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
api_key: str | None = None
|
|
98
|
+
|
|
99
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class CohereSettings(BaseModel):
|
|
103
|
+
"""
|
|
104
|
+
Settings for using Cohere models in the MCP Agent application.
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
api_key: str | None = None
|
|
108
|
+
|
|
109
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class OpenAISettings(BaseModel):
|
|
113
|
+
"""
|
|
114
|
+
Settings for using OpenAI models in the MCP Agent application.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
api_key: str | None = None
|
|
118
|
+
reasoning_effort: Literal["low", "medium", "high"] = "medium"
|
|
119
|
+
|
|
120
|
+
base_url: str | None = None
|
|
121
|
+
|
|
122
|
+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class TemporalSettings(BaseModel):
|
|
126
|
+
"""
|
|
127
|
+
Temporal settings for the MCP Agent application.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
host: str
|
|
131
|
+
namespace: str = "default"
|
|
132
|
+
task_queue: str
|
|
133
|
+
api_key: str | None = None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class UsageTelemetrySettings(BaseModel):
|
|
137
|
+
"""
|
|
138
|
+
Settings for usage telemetry in the MCP Agent application.
|
|
139
|
+
Anonymized usage metrics are sent to a telemetry server to help improve the product.
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
enabled: bool = True
|
|
143
|
+
"""Enable usage telemetry in the MCP Agent application."""
|
|
144
|
+
|
|
145
|
+
enable_detailed_telemetry: bool = False
|
|
146
|
+
"""If enabled, detailed telemetry data, including prompts and agents, will be sent to the telemetry server."""
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class OpenTelemetrySettings(BaseModel):
|
|
150
|
+
"""
|
|
151
|
+
OTEL settings for the MCP Agent application.
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
enabled: bool = True
|
|
155
|
+
|
|
156
|
+
service_name: str = "mcp-agent"
|
|
157
|
+
service_instance_id: str | None = None
|
|
158
|
+
service_version: str | None = None
|
|
159
|
+
|
|
160
|
+
otlp_endpoint: str | None = None
|
|
161
|
+
"""OTLP endpoint for OpenTelemetry tracing"""
|
|
162
|
+
|
|
163
|
+
console_debug: bool = False
|
|
164
|
+
"""Log spans to console"""
|
|
165
|
+
|
|
166
|
+
sample_rate: float = 1.0
|
|
167
|
+
"""Sample rate for tracing (1.0 = sample everything)"""
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class LoggerSettings(BaseModel):
|
|
171
|
+
"""
|
|
172
|
+
Logger settings for the MCP Agent application.
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
type: Literal["none", "console", "file", "http"] = "file"
|
|
176
|
+
|
|
177
|
+
level: Literal["debug", "info", "warning", "error"] = "warning"
|
|
178
|
+
"""Minimum logging level"""
|
|
179
|
+
|
|
180
|
+
progress_display: bool = True
|
|
181
|
+
"""Enable or disable the progress display"""
|
|
182
|
+
|
|
183
|
+
path: str = "fastagent.jsonl"
|
|
184
|
+
"""Path to log file, if logger 'type' is 'file'."""
|
|
185
|
+
|
|
186
|
+
batch_size: int = 100
|
|
187
|
+
"""Number of events to accumulate before processing"""
|
|
188
|
+
|
|
189
|
+
flush_interval: float = 2.0
|
|
190
|
+
"""How often to flush events in seconds"""
|
|
191
|
+
|
|
192
|
+
max_queue_size: int = 2048
|
|
193
|
+
"""Maximum queue size for event processing"""
|
|
194
|
+
|
|
195
|
+
# HTTP transport settings
|
|
196
|
+
http_endpoint: str | None = None
|
|
197
|
+
"""HTTP endpoint for event transport"""
|
|
198
|
+
|
|
199
|
+
http_headers: dict[str, str] | None = None
|
|
200
|
+
"""HTTP headers for event transport"""
|
|
201
|
+
|
|
202
|
+
http_timeout: float = 5.0
|
|
203
|
+
"""HTTP timeout seconds for event transport"""
|
|
204
|
+
|
|
205
|
+
show_chat: bool = True
|
|
206
|
+
"""Show chat User/Assistant on the console"""
|
|
207
|
+
show_tools: bool = True
|
|
208
|
+
"""Show MCP Sever tool calls on the console"""
|
|
209
|
+
truncate_tools: bool = True
|
|
210
|
+
"""Truncate display of long tool calls"""
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class Settings(BaseSettings):
|
|
214
|
+
"""
|
|
215
|
+
Settings class for the MCP Agent application.
|
|
216
|
+
"""
|
|
217
|
+
|
|
218
|
+
model_config = SettingsConfigDict(
|
|
219
|
+
env_nested_delimiter="__",
|
|
220
|
+
env_file=".env",
|
|
221
|
+
env_file_encoding="utf-8",
|
|
222
|
+
extra="allow",
|
|
223
|
+
nested_model_default_partial_update=True,
|
|
224
|
+
) # Customize the behavior of settings here
|
|
225
|
+
|
|
226
|
+
mcp: MCPSettings | None = MCPSettings()
|
|
227
|
+
"""MCP config, such as MCP servers"""
|
|
228
|
+
|
|
229
|
+
execution_engine: Literal["asyncio", "temporal"] = "asyncio"
|
|
230
|
+
"""Execution engine for the MCP Agent application"""
|
|
231
|
+
|
|
232
|
+
default_model: str | None = "haiku"
|
|
233
|
+
"""
|
|
234
|
+
Default model for agents. Format is provider.model_name.<reasoning_effort>, for example openai.o3-mini.low
|
|
235
|
+
Aliases are provided for common models e.g. sonnet, haiku, gpt-4o, o3-mini etc.
|
|
236
|
+
"""
|
|
237
|
+
temporal: TemporalSettings | None = None
|
|
238
|
+
"""Settings for Temporal workflow orchestration"""
|
|
239
|
+
|
|
240
|
+
anthropic: AnthropicSettings | None = None
|
|
241
|
+
"""Settings for using Anthropic models in the MCP Agent application"""
|
|
242
|
+
|
|
243
|
+
cohere: CohereSettings | None = None
|
|
244
|
+
"""Settings for using Cohere models in the MCP Agent application"""
|
|
245
|
+
|
|
246
|
+
openai: OpenAISettings | None = None
|
|
247
|
+
"""Settings for using OpenAI models in the MCP Agent application"""
|
|
248
|
+
|
|
249
|
+
otel: OpenTelemetrySettings | None = OpenTelemetrySettings()
|
|
250
|
+
"""OpenTelemetry logging settings for the MCP Agent application"""
|
|
251
|
+
|
|
252
|
+
logger: LoggerSettings | None = LoggerSettings()
|
|
253
|
+
"""Logger settings for the MCP Agent application"""
|
|
254
|
+
|
|
255
|
+
usage_telemetry: UsageTelemetrySettings | None = UsageTelemetrySettings()
|
|
256
|
+
"""Usage tracking settings for the MCP Agent application"""
|
|
257
|
+
|
|
258
|
+
@classmethod
|
|
259
|
+
def find_config(cls) -> Path | None:
|
|
260
|
+
"""Find the config file in the current directory or parent directories."""
|
|
261
|
+
current_dir = Path.cwd()
|
|
262
|
+
|
|
263
|
+
# Check current directory and parent directories
|
|
264
|
+
while current_dir != current_dir.parent:
|
|
265
|
+
for filename in [
|
|
266
|
+
"mcp-agent.config.yaml",
|
|
267
|
+
"mcp_agent.config.yaml",
|
|
268
|
+
"fastagent.config.yaml",
|
|
269
|
+
]:
|
|
270
|
+
config_path = current_dir / filename
|
|
271
|
+
if config_path.exists():
|
|
272
|
+
return config_path
|
|
273
|
+
current_dir = current_dir.parent
|
|
274
|
+
|
|
275
|
+
return None
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
# Global settings object
|
|
279
|
+
_settings: Settings | None = None
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def get_settings(config_path: str | None = None) -> Settings:
|
|
283
|
+
"""Get settings instance, automatically loading from config file if available."""
|
|
284
|
+
|
|
285
|
+
def deep_merge(base: dict, update: dict) -> dict:
|
|
286
|
+
"""Recursively merge two dictionaries, preserving nested structures."""
|
|
287
|
+
merged = base.copy()
|
|
288
|
+
for key, value in update.items():
|
|
289
|
+
if (
|
|
290
|
+
key in merged
|
|
291
|
+
and isinstance(merged[key], dict)
|
|
292
|
+
and isinstance(value, dict)
|
|
293
|
+
):
|
|
294
|
+
merged[key] = deep_merge(merged[key], value)
|
|
295
|
+
else:
|
|
296
|
+
merged[key] = value
|
|
297
|
+
return merged
|
|
298
|
+
|
|
299
|
+
global _settings
|
|
300
|
+
if _settings:
|
|
301
|
+
return _settings
|
|
302
|
+
|
|
303
|
+
config_file = Path(config_path) if config_path else Settings.find_config()
|
|
304
|
+
merged_settings = {}
|
|
305
|
+
|
|
306
|
+
if config_file:
|
|
307
|
+
if not config_file.exists():
|
|
308
|
+
pass
|
|
309
|
+
else:
|
|
310
|
+
import yaml # pylint: disable=C0415
|
|
311
|
+
|
|
312
|
+
# Load main config
|
|
313
|
+
with open(config_file, "r", encoding="utf-8") as f:
|
|
314
|
+
yaml_settings = yaml.safe_load(f) or {}
|
|
315
|
+
merged_settings = yaml_settings
|
|
316
|
+
|
|
317
|
+
# Look for secrets file in the same directory
|
|
318
|
+
for secrets_file in [
|
|
319
|
+
config_file.parent / "mcp-agent.secrets.yaml",
|
|
320
|
+
config_file.parent / "mcp_agent.secrets.yaml",
|
|
321
|
+
config_file.parent / "fastagent.secrets.yaml",
|
|
322
|
+
]:
|
|
323
|
+
if secrets_file.exists():
|
|
324
|
+
with open(secrets_file, "r", encoding="utf-8") as f:
|
|
325
|
+
yaml_secrets = yaml.safe_load(f) or {}
|
|
326
|
+
merged_settings = deep_merge(merged_settings, yaml_secrets)
|
|
327
|
+
|
|
328
|
+
_settings = Settings(**merged_settings)
|
|
329
|
+
return _settings
|
|
330
|
+
else:
|
|
331
|
+
pass
|
|
332
|
+
|
|
333
|
+
_settings = Settings()
|
|
334
|
+
return _settings
|
mcp_agent/console.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Centralized console configuration for MCP Agent.
|
|
3
|
+
|
|
4
|
+
This module provides shared console instances for consistent output handling:
|
|
5
|
+
- console: Main console for general output
|
|
6
|
+
- error_console: Error console for application errors (writes to stderr)
|
|
7
|
+
- server_console: Special console for MCP server output
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
|
|
12
|
+
# Main console for general output
|
|
13
|
+
console = Console(
|
|
14
|
+
color_system="auto",
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
# Error console for application errors
|
|
18
|
+
error_console = Console(
|
|
19
|
+
stderr=True,
|
|
20
|
+
style="bold red",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
# Special console for MCP server output
|
|
24
|
+
# This could have custom styling to distinguish server messages
|
|
25
|
+
server_console = Console(
|
|
26
|
+
# Not stderr since we want to maintain output ordering with other messages
|
|
27
|
+
style="dim blue", # Or whatever style makes server output distinct
|
|
28
|
+
)
|
mcp_agent/context.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A central context object to store global state that is shared across the application.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import concurrent.futures
|
|
7
|
+
from typing import Any, Optional, Union, TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, ConfigDict
|
|
10
|
+
|
|
11
|
+
from mcp import ServerSession
|
|
12
|
+
|
|
13
|
+
from opentelemetry import trace
|
|
14
|
+
from opentelemetry.propagate import set_global_textmap
|
|
15
|
+
from opentelemetry.sdk.resources import Resource
|
|
16
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
17
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
|
18
|
+
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
|
19
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
20
|
+
|
|
21
|
+
from mcp_agent.config import get_settings
|
|
22
|
+
from mcp_agent.config import Settings
|
|
23
|
+
from mcp_agent.executor.executor import Executor
|
|
24
|
+
from mcp_agent.executor.decorator_registry import (
|
|
25
|
+
DecoratorRegistry,
|
|
26
|
+
register_asyncio_decorators,
|
|
27
|
+
register_temporal_decorators,
|
|
28
|
+
)
|
|
29
|
+
from mcp_agent.executor.task_registry import ActivityRegistry
|
|
30
|
+
from mcp_agent.executor.executor import AsyncioExecutor
|
|
31
|
+
|
|
32
|
+
from mcp_agent.logging.events import EventFilter
|
|
33
|
+
from mcp_agent.logging.logger import LoggingConfig
|
|
34
|
+
from mcp_agent.logging.transport import create_transport
|
|
35
|
+
from mcp_agent.mcp_server_registry import ServerRegistry
|
|
36
|
+
from mcp_agent.workflows.llm.llm_selector import ModelSelector
|
|
37
|
+
from mcp_agent.logging.logger import get_logger
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from mcp_agent.human_input.types import HumanInputCallback
|
|
42
|
+
from mcp_agent.executor.workflow_signal import SignalWaitCallback
|
|
43
|
+
else:
|
|
44
|
+
# Runtime placeholders for the types
|
|
45
|
+
HumanInputCallback = Any
|
|
46
|
+
SignalWaitCallback = Any
|
|
47
|
+
|
|
48
|
+
logger = get_logger(__name__)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class Context(BaseModel):
|
|
52
|
+
"""
|
|
53
|
+
Context that is passed around through the application.
|
|
54
|
+
This is a global context that is shared across the application.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
config: Optional[Settings] = None
|
|
58
|
+
executor: Optional[Executor] = None
|
|
59
|
+
human_input_handler: Optional[HumanInputCallback] = None
|
|
60
|
+
signal_notification: Optional[SignalWaitCallback] = None
|
|
61
|
+
upstream_session: Optional[ServerSession] = None # TODO: saqadri - figure this out
|
|
62
|
+
model_selector: Optional[ModelSelector] = None
|
|
63
|
+
|
|
64
|
+
# Registries
|
|
65
|
+
server_registry: Optional[ServerRegistry] = None
|
|
66
|
+
task_registry: Optional[ActivityRegistry] = None
|
|
67
|
+
decorator_registry: Optional[DecoratorRegistry] = None
|
|
68
|
+
|
|
69
|
+
tracer: Optional[trace.Tracer] = None
|
|
70
|
+
|
|
71
|
+
model_config = ConfigDict(
|
|
72
|
+
extra="allow",
|
|
73
|
+
arbitrary_types_allowed=True, # Tell Pydantic to defer type evaluation
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
async def configure_otel(config: "Settings"):
|
|
78
|
+
"""
|
|
79
|
+
Configure OpenTelemetry based on the application config.
|
|
80
|
+
"""
|
|
81
|
+
if not config.otel.enabled:
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
# Check if a provider is already set to avoid re-initialization
|
|
85
|
+
if trace.get_tracer_provider().__class__.__name__ != "NoOpTracerProvider":
|
|
86
|
+
return
|
|
87
|
+
|
|
88
|
+
# Set up global textmap propagator first
|
|
89
|
+
set_global_textmap(TraceContextTextMapPropagator())
|
|
90
|
+
|
|
91
|
+
service_name = config.otel.service_name
|
|
92
|
+
service_instance_id = config.otel.service_instance_id
|
|
93
|
+
service_version = config.otel.service_version
|
|
94
|
+
|
|
95
|
+
# Create resource identifying this service
|
|
96
|
+
resource = Resource.create(
|
|
97
|
+
attributes={
|
|
98
|
+
key: value
|
|
99
|
+
for key, value in {
|
|
100
|
+
"service.name": service_name,
|
|
101
|
+
"service.instance.id": service_instance_id,
|
|
102
|
+
"service.version": service_version,
|
|
103
|
+
}.items()
|
|
104
|
+
if value is not None
|
|
105
|
+
}
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Create provider with resource
|
|
109
|
+
tracer_provider = TracerProvider(resource=resource)
|
|
110
|
+
|
|
111
|
+
# Add exporters based on config
|
|
112
|
+
otlp_endpoint = config.otel.otlp_endpoint
|
|
113
|
+
if otlp_endpoint:
|
|
114
|
+
exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
|
|
115
|
+
tracer_provider.add_span_processor(BatchSpanProcessor(exporter))
|
|
116
|
+
|
|
117
|
+
if config.otel.console_debug:
|
|
118
|
+
tracer_provider.add_span_processor(
|
|
119
|
+
BatchSpanProcessor(ConsoleSpanExporter())
|
|
120
|
+
)
|
|
121
|
+
else:
|
|
122
|
+
# Default to console exporter in development
|
|
123
|
+
tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter()))
|
|
124
|
+
|
|
125
|
+
# Set as global tracer provider
|
|
126
|
+
trace.set_tracer_provider(tracer_provider)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def configure_logger(config: "Settings"):
|
|
130
|
+
"""
|
|
131
|
+
Configure logging and tracing based on the application config.
|
|
132
|
+
"""
|
|
133
|
+
event_filter: EventFilter = EventFilter(min_level=config.logger.level)
|
|
134
|
+
logger.info(f"Configuring logger with level: {config.logger.level}")
|
|
135
|
+
transport = create_transport(settings=config.logger, event_filter=event_filter)
|
|
136
|
+
await LoggingConfig.configure(
|
|
137
|
+
event_filter=event_filter,
|
|
138
|
+
transport=transport,
|
|
139
|
+
batch_size=config.logger.batch_size,
|
|
140
|
+
flush_interval=config.logger.flush_interval,
|
|
141
|
+
progress_display=config.logger.progress_display,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
async def configure_usage_telemetry(_config: "Settings"):
|
|
146
|
+
"""
|
|
147
|
+
Configure usage telemetry based on the application config.
|
|
148
|
+
TODO: saqadri - implement usage tracking
|
|
149
|
+
"""
|
|
150
|
+
pass
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
async def configure_executor(config: "Settings"):
|
|
154
|
+
"""
|
|
155
|
+
Configure the executor based on the application config.
|
|
156
|
+
"""
|
|
157
|
+
if config.execution_engine == "asyncio":
|
|
158
|
+
return AsyncioExecutor()
|
|
159
|
+
elif config.execution_engine == "temporal":
|
|
160
|
+
# Configure Temporal executor
|
|
161
|
+
from mcp_agent.executor.temporal import TemporalExecutor
|
|
162
|
+
|
|
163
|
+
executor = TemporalExecutor(config=config.temporal)
|
|
164
|
+
return executor
|
|
165
|
+
else:
|
|
166
|
+
# Default to asyncio executor
|
|
167
|
+
executor = AsyncioExecutor()
|
|
168
|
+
return executor
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
async def initialize_context(
|
|
172
|
+
config: Optional[Union["Settings", str]] = None, store_globally: bool = False
|
|
173
|
+
):
|
|
174
|
+
"""
|
|
175
|
+
Initialize the global application context.
|
|
176
|
+
"""
|
|
177
|
+
if config is None:
|
|
178
|
+
config = get_settings()
|
|
179
|
+
elif isinstance(config, str):
|
|
180
|
+
config = get_settings(config_path=config)
|
|
181
|
+
|
|
182
|
+
context = Context()
|
|
183
|
+
context.config = config
|
|
184
|
+
context.server_registry = ServerRegistry(config=config)
|
|
185
|
+
|
|
186
|
+
# Configure logging and telemetry
|
|
187
|
+
await configure_otel(config)
|
|
188
|
+
await configure_logger(config)
|
|
189
|
+
await configure_usage_telemetry(config)
|
|
190
|
+
|
|
191
|
+
# Configure the executor
|
|
192
|
+
context.executor = await configure_executor(config)
|
|
193
|
+
context.task_registry = ActivityRegistry()
|
|
194
|
+
|
|
195
|
+
context.decorator_registry = DecoratorRegistry()
|
|
196
|
+
register_asyncio_decorators(context.decorator_registry)
|
|
197
|
+
register_temporal_decorators(context.decorator_registry)
|
|
198
|
+
|
|
199
|
+
# Store the tracer in context if needed
|
|
200
|
+
context.tracer = trace.get_tracer(config.otel.service_name)
|
|
201
|
+
|
|
202
|
+
if store_globally:
|
|
203
|
+
global _global_context
|
|
204
|
+
_global_context = context
|
|
205
|
+
|
|
206
|
+
return context
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
async def cleanup_context():
|
|
210
|
+
"""
|
|
211
|
+
Cleanup the global application context.
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
# Shutdown logging and telemetry
|
|
215
|
+
await LoggingConfig.shutdown()
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
_global_context: Context | None = None
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def get_current_context() -> Context:
|
|
222
|
+
"""
|
|
223
|
+
Synchronous initializer/getter for global application context.
|
|
224
|
+
For async usage, use aget_current_context instead.
|
|
225
|
+
"""
|
|
226
|
+
global _global_context
|
|
227
|
+
if _global_context is None:
|
|
228
|
+
try:
|
|
229
|
+
# Try to get the current event loop
|
|
230
|
+
loop = asyncio.get_event_loop()
|
|
231
|
+
if loop.is_running():
|
|
232
|
+
# Create a new loop in a separate thread
|
|
233
|
+
def run_async():
|
|
234
|
+
new_loop = asyncio.new_event_loop()
|
|
235
|
+
asyncio.set_event_loop(new_loop)
|
|
236
|
+
return new_loop.run_until_complete(initialize_context())
|
|
237
|
+
|
|
238
|
+
with concurrent.futures.ThreadPoolExecutor() as pool:
|
|
239
|
+
_global_context = pool.submit(run_async).result()
|
|
240
|
+
else:
|
|
241
|
+
_global_context = loop.run_until_complete(initialize_context())
|
|
242
|
+
except RuntimeError:
|
|
243
|
+
_global_context = asyncio.run(initialize_context())
|
|
244
|
+
return _global_context
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def get_current_config():
|
|
248
|
+
"""
|
|
249
|
+
Get the current application config.
|
|
250
|
+
"""
|
|
251
|
+
return get_current_context().config or get_settings()
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from contextlib import contextmanager
|
|
2
|
+
from typing import Optional, TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from mcp_agent.context import Context
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ContextDependent:
|
|
10
|
+
"""
|
|
11
|
+
Mixin class for components that need context access.
|
|
12
|
+
Provides both global fallback and instance-specific context support.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, context: Optional["Context"] = None, **kwargs):
|
|
16
|
+
self._context = context
|
|
17
|
+
super().__init__(**kwargs)
|
|
18
|
+
|
|
19
|
+
@property
|
|
20
|
+
def context(self) -> "Context":
|
|
21
|
+
"""
|
|
22
|
+
Get context, with graceful fallback to global context if needed.
|
|
23
|
+
Raises clear error if no context is available.
|
|
24
|
+
"""
|
|
25
|
+
# First try instance context
|
|
26
|
+
if self._context is not None:
|
|
27
|
+
return self._context
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
# Fall back to global context if available
|
|
31
|
+
from mcp_agent.context import get_current_context
|
|
32
|
+
|
|
33
|
+
return get_current_context()
|
|
34
|
+
except Exception as e:
|
|
35
|
+
raise RuntimeError(
|
|
36
|
+
f"No context available for {self.__class__.__name__}. "
|
|
37
|
+
"Either initialize MCPApp first or pass context explicitly."
|
|
38
|
+
) from e
|
|
39
|
+
|
|
40
|
+
@contextmanager
|
|
41
|
+
def use_context(self, context: "Context"):
|
|
42
|
+
"""Temporarily use a different context."""
|
|
43
|
+
old_context = self._context
|
|
44
|
+
self._context = context
|
|
45
|
+
try:
|
|
46
|
+
yield
|
|
47
|
+
finally:
|
|
48
|
+
self._context = old_context
|