fast-agent-mcp 0.2.58__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fast-agent-mcp might be problematic. Click here for more details.
- fast_agent/__init__.py +127 -0
- fast_agent/agents/__init__.py +36 -0
- {mcp_agent/core → fast_agent/agents}/agent_types.py +2 -1
- fast_agent/agents/llm_agent.py +217 -0
- fast_agent/agents/llm_decorator.py +486 -0
- mcp_agent/agents/base_agent.py → fast_agent/agents/mcp_agent.py +377 -385
- fast_agent/agents/tool_agent.py +168 -0
- {mcp_agent → fast_agent}/agents/workflow/chain_agent.py +43 -33
- {mcp_agent → fast_agent}/agents/workflow/evaluator_optimizer.py +31 -35
- {mcp_agent → fast_agent}/agents/workflow/iterative_planner.py +56 -47
- {mcp_agent → fast_agent}/agents/workflow/orchestrator_models.py +4 -4
- {mcp_agent → fast_agent}/agents/workflow/parallel_agent.py +34 -41
- {mcp_agent → fast_agent}/agents/workflow/router_agent.py +54 -39
- {mcp_agent → fast_agent}/cli/__main__.py +5 -3
- {mcp_agent → fast_agent}/cli/commands/check_config.py +95 -66
- {mcp_agent → fast_agent}/cli/commands/go.py +20 -11
- {mcp_agent → fast_agent}/cli/commands/quickstart.py +4 -4
- {mcp_agent → fast_agent}/cli/commands/server_helpers.py +1 -1
- {mcp_agent → fast_agent}/cli/commands/setup.py +75 -134
- {mcp_agent → fast_agent}/cli/commands/url_parser.py +9 -8
- {mcp_agent → fast_agent}/cli/main.py +36 -16
- {mcp_agent → fast_agent}/cli/terminal.py +2 -2
- {mcp_agent → fast_agent}/config.py +10 -2
- fast_agent/constants.py +8 -0
- {mcp_agent → fast_agent}/context.py +24 -19
- {mcp_agent → fast_agent}/context_dependent.py +9 -5
- fast_agent/core/__init__.py +52 -0
- {mcp_agent → fast_agent}/core/agent_app.py +39 -36
- fast_agent/core/core_app.py +135 -0
- {mcp_agent → fast_agent}/core/direct_decorators.py +12 -26
- {mcp_agent → fast_agent}/core/direct_factory.py +95 -73
- {mcp_agent → fast_agent/core}/executor/executor.py +4 -5
- {mcp_agent → fast_agent}/core/fastagent.py +32 -32
- fast_agent/core/logging/__init__.py +5 -0
- {mcp_agent → fast_agent/core}/logging/events.py +3 -3
- {mcp_agent → fast_agent/core}/logging/json_serializer.py +1 -1
- {mcp_agent → fast_agent/core}/logging/listeners.py +85 -7
- {mcp_agent → fast_agent/core}/logging/logger.py +7 -7
- {mcp_agent → fast_agent/core}/logging/transport.py +10 -11
- fast_agent/core/prompt.py +9 -0
- {mcp_agent → fast_agent}/core/validation.py +4 -4
- fast_agent/event_progress.py +61 -0
- fast_agent/history/history_exporter.py +44 -0
- {mcp_agent → fast_agent}/human_input/__init__.py +9 -12
- {mcp_agent → fast_agent}/human_input/elicitation_handler.py +26 -8
- {mcp_agent → fast_agent}/human_input/elicitation_state.py +7 -7
- {mcp_agent → fast_agent}/human_input/simple_form.py +6 -4
- {mcp_agent → fast_agent}/human_input/types.py +1 -18
- fast_agent/interfaces.py +228 -0
- fast_agent/llm/__init__.py +9 -0
- mcp_agent/llm/augmented_llm.py → fast_agent/llm/fastagent_llm.py +127 -218
- fast_agent/llm/internal/passthrough.py +137 -0
- mcp_agent/llm/augmented_llm_playback.py → fast_agent/llm/internal/playback.py +29 -25
- mcp_agent/llm/augmented_llm_silent.py → fast_agent/llm/internal/silent.py +10 -17
- fast_agent/llm/internal/slow.py +38 -0
- {mcp_agent → fast_agent}/llm/memory.py +40 -30
- {mcp_agent → fast_agent}/llm/model_database.py +35 -2
- {mcp_agent → fast_agent}/llm/model_factory.py +103 -77
- fast_agent/llm/model_info.py +126 -0
- {mcp_agent/llm/providers → fast_agent/llm/provider/anthropic}/anthropic_utils.py +7 -7
- fast_agent/llm/provider/anthropic/llm_anthropic.py +603 -0
- {mcp_agent/llm/providers → fast_agent/llm/provider/anthropic}/multipart_converter_anthropic.py +79 -86
- {mcp_agent/llm/providers → fast_agent/llm/provider/bedrock}/bedrock_utils.py +3 -1
- mcp_agent/llm/providers/augmented_llm_bedrock.py → fast_agent/llm/provider/bedrock/llm_bedrock.py +833 -717
- {mcp_agent/llm/providers → fast_agent/llm/provider/google}/google_converter.py +66 -14
- fast_agent/llm/provider/google/llm_google_native.py +431 -0
- mcp_agent/llm/providers/augmented_llm_aliyun.py → fast_agent/llm/provider/openai/llm_aliyun.py +6 -7
- mcp_agent/llm/providers/augmented_llm_azure.py → fast_agent/llm/provider/openai/llm_azure.py +4 -4
- mcp_agent/llm/providers/augmented_llm_deepseek.py → fast_agent/llm/provider/openai/llm_deepseek.py +10 -11
- mcp_agent/llm/providers/augmented_llm_generic.py → fast_agent/llm/provider/openai/llm_generic.py +4 -4
- mcp_agent/llm/providers/augmented_llm_google_oai.py → fast_agent/llm/provider/openai/llm_google_oai.py +4 -4
- mcp_agent/llm/providers/augmented_llm_groq.py → fast_agent/llm/provider/openai/llm_groq.py +14 -16
- mcp_agent/llm/providers/augmented_llm_openai.py → fast_agent/llm/provider/openai/llm_openai.py +133 -207
- mcp_agent/llm/providers/augmented_llm_openrouter.py → fast_agent/llm/provider/openai/llm_openrouter.py +6 -6
- mcp_agent/llm/providers/augmented_llm_tensorzero_openai.py → fast_agent/llm/provider/openai/llm_tensorzero_openai.py +17 -16
- mcp_agent/llm/providers/augmented_llm_xai.py → fast_agent/llm/provider/openai/llm_xai.py +6 -6
- {mcp_agent/llm/providers → fast_agent/llm/provider/openai}/multipart_converter_openai.py +125 -63
- {mcp_agent/llm/providers → fast_agent/llm/provider/openai}/openai_multipart.py +12 -12
- {mcp_agent/llm/providers → fast_agent/llm/provider/openai}/openai_utils.py +18 -16
- {mcp_agent → fast_agent}/llm/provider_key_manager.py +2 -2
- {mcp_agent → fast_agent}/llm/provider_types.py +2 -0
- {mcp_agent → fast_agent}/llm/sampling_converter.py +15 -12
- {mcp_agent → fast_agent}/llm/usage_tracking.py +23 -5
- fast_agent/mcp/__init__.py +54 -0
- {mcp_agent → fast_agent}/mcp/elicitation_factory.py +3 -3
- {mcp_agent → fast_agent}/mcp/elicitation_handlers.py +19 -10
- {mcp_agent → fast_agent}/mcp/gen_client.py +3 -3
- fast_agent/mcp/helpers/__init__.py +36 -0
- fast_agent/mcp/helpers/content_helpers.py +183 -0
- {mcp_agent → fast_agent}/mcp/helpers/server_config_helpers.py +8 -8
- {mcp_agent → fast_agent}/mcp/hf_auth.py +25 -23
- fast_agent/mcp/interfaces.py +93 -0
- {mcp_agent → fast_agent}/mcp/logger_textio.py +4 -4
- {mcp_agent → fast_agent}/mcp/mcp_agent_client_session.py +49 -44
- {mcp_agent → fast_agent}/mcp/mcp_aggregator.py +66 -115
- {mcp_agent → fast_agent}/mcp/mcp_connection_manager.py +16 -23
- {mcp_agent/core → fast_agent/mcp}/mcp_content.py +23 -15
- {mcp_agent → fast_agent}/mcp/mime_utils.py +39 -0
- fast_agent/mcp/prompt.py +159 -0
- mcp_agent/mcp/prompt_message_multipart.py → fast_agent/mcp/prompt_message_extended.py +27 -20
- {mcp_agent → fast_agent}/mcp/prompt_render.py +21 -19
- {mcp_agent → fast_agent}/mcp/prompt_serialization.py +46 -46
- fast_agent/mcp/prompts/__main__.py +7 -0
- {mcp_agent → fast_agent}/mcp/prompts/prompt_helpers.py +31 -30
- {mcp_agent → fast_agent}/mcp/prompts/prompt_load.py +8 -8
- {mcp_agent → fast_agent}/mcp/prompts/prompt_server.py +11 -19
- {mcp_agent → fast_agent}/mcp/prompts/prompt_template.py +18 -18
- {mcp_agent → fast_agent}/mcp/resource_utils.py +1 -1
- {mcp_agent → fast_agent}/mcp/sampling.py +31 -26
- {mcp_agent/mcp_server → fast_agent/mcp/server}/__init__.py +1 -1
- {mcp_agent/mcp_server → fast_agent/mcp/server}/agent_server.py +5 -6
- fast_agent/mcp/ui_agent.py +48 -0
- fast_agent/mcp/ui_mixin.py +209 -0
- fast_agent/mcp_server_registry.py +90 -0
- {mcp_agent → fast_agent}/resources/examples/data-analysis/analysis-campaign.py +5 -4
- {mcp_agent → fast_agent}/resources/examples/data-analysis/analysis.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/forms_demo.py +3 -3
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/game_character.py +2 -2
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/game_character_handler.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/tool_call.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/mcp/state-transfer/agent_one.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/mcp/state-transfer/agent_two.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/researcher/researcher-eval.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/researcher/researcher-imp.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/researcher/researcher.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/tensorzero/agent.py +2 -2
- {mcp_agent → fast_agent}/resources/examples/tensorzero/image_demo.py +3 -3
- {mcp_agent → fast_agent}/resources/examples/tensorzero/simple_agent.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/workflows/chaining.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/workflows/evaluator.py +3 -3
- {mcp_agent → fast_agent}/resources/examples/workflows/human_input.py +5 -3
- {mcp_agent → fast_agent}/resources/examples/workflows/orchestrator.py +1 -1
- {mcp_agent → fast_agent}/resources/examples/workflows/parallel.py +2 -2
- {mcp_agent → fast_agent}/resources/examples/workflows/router.py +5 -2
- fast_agent/resources/setup/.gitignore +24 -0
- fast_agent/resources/setup/agent.py +18 -0
- fast_agent/resources/setup/fastagent.config.yaml +44 -0
- fast_agent/resources/setup/fastagent.secrets.yaml.example +38 -0
- fast_agent/resources/setup/pyproject.toml.tmpl +17 -0
- fast_agent/tools/elicitation.py +369 -0
- fast_agent/types/__init__.py +32 -0
- fast_agent/types/llm_stop_reason.py +77 -0
- fast_agent/ui/__init__.py +38 -0
- fast_agent/ui/console_display.py +1005 -0
- {mcp_agent/human_input → fast_agent/ui}/elicitation_form.py +17 -12
- mcp_agent/human_input/elicitation_forms.py → fast_agent/ui/elicitation_style.py +1 -1
- {mcp_agent/core → fast_agent/ui}/enhanced_prompt.py +96 -25
- {mcp_agent/core → fast_agent/ui}/interactive_prompt.py +330 -125
- fast_agent/ui/mcp_ui_utils.py +224 -0
- {mcp_agent → fast_agent/ui}/progress_display.py +2 -2
- {mcp_agent/logging → fast_agent/ui}/rich_progress.py +4 -4
- {mcp_agent/core → fast_agent/ui}/usage_display.py +3 -8
- {fast_agent_mcp-0.2.58.dist-info → fast_agent_mcp-0.3.1.dist-info}/METADATA +7 -7
- fast_agent_mcp-0.3.1.dist-info/RECORD +203 -0
- fast_agent_mcp-0.3.1.dist-info/entry_points.txt +5 -0
- fast_agent_mcp-0.2.58.dist-info/RECORD +0 -193
- fast_agent_mcp-0.2.58.dist-info/entry_points.txt +0 -6
- mcp_agent/__init__.py +0 -114
- mcp_agent/agents/agent.py +0 -92
- mcp_agent/agents/workflow/__init__.py +0 -1
- mcp_agent/agents/workflow/orchestrator_agent.py +0 -597
- mcp_agent/app.py +0 -175
- mcp_agent/core/__init__.py +0 -26
- mcp_agent/core/prompt.py +0 -191
- mcp_agent/event_progress.py +0 -134
- mcp_agent/human_input/handler.py +0 -81
- mcp_agent/llm/__init__.py +0 -2
- mcp_agent/llm/augmented_llm_passthrough.py +0 -232
- mcp_agent/llm/augmented_llm_slow.py +0 -53
- mcp_agent/llm/providers/__init__.py +0 -8
- mcp_agent/llm/providers/augmented_llm_anthropic.py +0 -718
- mcp_agent/llm/providers/augmented_llm_google_native.py +0 -496
- mcp_agent/llm/providers/sampling_converter_anthropic.py +0 -57
- mcp_agent/llm/providers/sampling_converter_openai.py +0 -26
- mcp_agent/llm/sampling_format_converter.py +0 -37
- mcp_agent/logging/__init__.py +0 -0
- mcp_agent/mcp/__init__.py +0 -50
- mcp_agent/mcp/helpers/__init__.py +0 -25
- mcp_agent/mcp/helpers/content_helpers.py +0 -187
- mcp_agent/mcp/interfaces.py +0 -266
- mcp_agent/mcp/prompts/__init__.py +0 -0
- mcp_agent/mcp/prompts/__main__.py +0 -10
- mcp_agent/mcp_server_registry.py +0 -343
- mcp_agent/tools/tool_definition.py +0 -14
- mcp_agent/ui/console_display.py +0 -790
- mcp_agent/ui/console_display_legacy.py +0 -401
- {mcp_agent → fast_agent}/agents/workflow/orchestrator_prompts.py +0 -0
- {mcp_agent/agents → fast_agent/cli}/__init__.py +0 -0
- {mcp_agent → fast_agent}/cli/constants.py +0 -0
- {mcp_agent → fast_agent}/core/error_handling.py +0 -0
- {mcp_agent → fast_agent}/core/exceptions.py +0 -0
- {mcp_agent/cli → fast_agent/core/executor}/__init__.py +0 -0
- {mcp_agent → fast_agent/core}/executor/task_registry.py +0 -0
- {mcp_agent → fast_agent/core}/executor/workflow_signal.py +0 -0
- {mcp_agent → fast_agent}/human_input/form_fields.py +0 -0
- {mcp_agent → fast_agent}/llm/prompt_utils.py +0 -0
- {mcp_agent/core → fast_agent/llm}/request_params.py +0 -0
- {mcp_agent → fast_agent}/mcp/common.py +0 -0
- {mcp_agent/executor → fast_agent/mcp/prompts}/__init__.py +0 -0
- {mcp_agent → fast_agent}/mcp/prompts/prompt_constants.py +0 -0
- {mcp_agent → fast_agent}/py.typed +0 -0
- {mcp_agent → fast_agent}/resources/examples/data-analysis/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/data-analysis/mount-point/WA_Fn-UseC_-HR-Employee-Attrition.csv +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/elicitation_account_server.py +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/elicitation_forms_server.py +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/elicitation_game_server.py +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/elicitations/fastagent.secrets.yaml.example +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/state-transfer/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/mcp/state-transfer/fastagent.secrets.yaml.example +0 -0
- {mcp_agent → fast_agent}/resources/examples/researcher/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/.env.sample +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/Makefile +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/README.md +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/demo_images/clam.jpg +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/demo_images/crab.png +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/demo_images/shrimp.png +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/docker-compose.yml +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/mcp_server/Dockerfile +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/mcp_server/entrypoint.sh +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/mcp_server/mcp_server.py +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/mcp_server/pyproject.toml +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/tensorzero_config/system_schema.json +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/tensorzero_config/system_template.minijinja +0 -0
- {mcp_agent → fast_agent}/resources/examples/tensorzero/tensorzero_config/tensorzero.toml +0 -0
- {mcp_agent → fast_agent}/resources/examples/workflows/fastagent.config.yaml +0 -0
- {mcp_agent → fast_agent}/resources/examples/workflows/graded_report.md +0 -0
- {mcp_agent → fast_agent}/resources/examples/workflows/short_story.md +0 -0
- {mcp_agent → fast_agent}/resources/examples/workflows/short_story.txt +0 -0
- {mcp_agent → fast_agent/ui}/console.py +0 -0
- {mcp_agent/core → fast_agent/ui}/mermaid_utils.py +0 -0
- {fast_agent_mcp-0.2.58.dist-info → fast_agent_mcp-0.3.1.dist-info}/WHEEL +0 -0
- {fast_agent_mcp-0.2.58.dist-info → fast_agent_mcp-0.3.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -16,16 +16,16 @@ from mcp.types import (
|
|
|
16
16
|
)
|
|
17
17
|
from pydantic import BaseModel, field_validator
|
|
18
18
|
|
|
19
|
-
from
|
|
20
|
-
from mcp_agent.mcp.prompt_serialization import (
|
|
19
|
+
from fast_agent.mcp.prompt_serialization import (
|
|
21
20
|
multipart_messages_to_delimited_format,
|
|
22
21
|
)
|
|
23
|
-
from
|
|
22
|
+
from fast_agent.mcp.prompts.prompt_constants import (
|
|
24
23
|
ASSISTANT_DELIMITER,
|
|
25
24
|
DEFAULT_DELIMITER_MAP,
|
|
26
25
|
RESOURCE_DELIMITER,
|
|
27
26
|
USER_DELIMITER,
|
|
28
27
|
)
|
|
28
|
+
from fast_agent.types import PromptMessageExtended
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
class PromptMetadata(BaseModel):
|
|
@@ -108,14 +108,14 @@ class PromptTemplate:
|
|
|
108
108
|
@classmethod
|
|
109
109
|
def from_multipart_messages(
|
|
110
110
|
cls,
|
|
111
|
-
messages: List[
|
|
111
|
+
messages: List[PromptMessageExtended],
|
|
112
112
|
delimiter_map: Optional[Dict[str, str]] = None,
|
|
113
113
|
) -> "PromptTemplate":
|
|
114
114
|
"""
|
|
115
|
-
Create a PromptTemplate from a list of
|
|
115
|
+
Create a PromptTemplate from a list of PromptMessageExtended objects.
|
|
116
116
|
|
|
117
117
|
Args:
|
|
118
|
-
messages: List of
|
|
118
|
+
messages: List of PromptMessageExtended objects
|
|
119
119
|
delimiter_map: Optional map of delimiters to roles
|
|
120
120
|
|
|
121
121
|
Returns:
|
|
@@ -165,17 +165,17 @@ class PromptTemplate:
|
|
|
165
165
|
# Create a new list with substitutions applied to each section
|
|
166
166
|
return [section.apply_substitutions(context) for section in self._parsed_content]
|
|
167
167
|
|
|
168
|
-
def
|
|
168
|
+
def apply_substitutions_to_extended(
|
|
169
169
|
self, context: Dict[str, Any]
|
|
170
|
-
) -> List[
|
|
170
|
+
) -> List[PromptMessageExtended]:
|
|
171
171
|
"""
|
|
172
|
-
Apply variable substitutions to the template and return
|
|
172
|
+
Apply variable substitutions to the template and return PromptMessageExtended objects.
|
|
173
173
|
|
|
174
174
|
Args:
|
|
175
175
|
context: Dictionary of variable names to values
|
|
176
176
|
|
|
177
177
|
Returns:
|
|
178
|
-
List of
|
|
178
|
+
List of PromptMessageExtended objects with substitutions applied
|
|
179
179
|
"""
|
|
180
180
|
# First create a substituted template
|
|
181
181
|
content_sections = self.apply_substitutions(context)
|
|
@@ -201,7 +201,7 @@ class PromptTemplate:
|
|
|
201
201
|
)
|
|
202
202
|
)
|
|
203
203
|
|
|
204
|
-
multiparts.append(
|
|
204
|
+
multiparts.append(PromptMessageExtended(role=section.role, content=content_items))
|
|
205
205
|
|
|
206
206
|
return multiparts
|
|
207
207
|
|
|
@@ -211,12 +211,12 @@ class PromptTemplate:
|
|
|
211
211
|
matches = re.findall(variable_pattern, text)
|
|
212
212
|
return set(matches)
|
|
213
213
|
|
|
214
|
-
def
|
|
214
|
+
def to_extended_messages(self) -> List[PromptMessageExtended]:
|
|
215
215
|
"""
|
|
216
|
-
Convert this template to a list of
|
|
216
|
+
Convert this template to a list of PromptMessageExtended objects.
|
|
217
217
|
|
|
218
218
|
Returns:
|
|
219
|
-
List of
|
|
219
|
+
List of PromptMessageExtended objects
|
|
220
220
|
"""
|
|
221
221
|
multiparts = []
|
|
222
222
|
|
|
@@ -239,7 +239,7 @@ class PromptTemplate:
|
|
|
239
239
|
)
|
|
240
240
|
)
|
|
241
241
|
|
|
242
|
-
multiparts.append(
|
|
242
|
+
multiparts.append(PromptMessageExtended(role=section.role, content=content_items))
|
|
243
243
|
|
|
244
244
|
return multiparts
|
|
245
245
|
|
|
@@ -348,12 +348,12 @@ class PromptTemplateLoader:
|
|
|
348
348
|
|
|
349
349
|
return PromptTemplate(content, self.delimiter_map, template_file_path=file_path)
|
|
350
350
|
|
|
351
|
-
def load_from_multipart(self, messages: List[
|
|
351
|
+
def load_from_multipart(self, messages: List[PromptMessageExtended]) -> PromptTemplate:
|
|
352
352
|
"""
|
|
353
|
-
Create a PromptTemplate from a list of
|
|
353
|
+
Create a PromptTemplate from a list of PromptMessageExtended objects.
|
|
354
354
|
|
|
355
355
|
Args:
|
|
356
|
-
messages: List of
|
|
356
|
+
messages: List of PromptMessageExtended objects
|
|
357
357
|
|
|
358
358
|
Returns:
|
|
359
359
|
A PromptTemplate object
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
"""
|
|
2
|
-
This simplified implementation directly converts between MCP types and
|
|
2
|
+
This simplified implementation directly converts between MCP types and PromptMessageExtended.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
from typing import TYPE_CHECKING
|
|
@@ -7,21 +7,23 @@ from typing import TYPE_CHECKING
|
|
|
7
7
|
from mcp import ClientSession
|
|
8
8
|
from mcp.types import CreateMessageRequestParams, CreateMessageResult, TextContent
|
|
9
9
|
|
|
10
|
-
from
|
|
11
|
-
from
|
|
12
|
-
from
|
|
13
|
-
from
|
|
14
|
-
from
|
|
10
|
+
from fast_agent.agents.agent_types import AgentConfig
|
|
11
|
+
from fast_agent.agents.llm_agent import LlmAgent
|
|
12
|
+
from fast_agent.core.logging.logger import get_logger
|
|
13
|
+
from fast_agent.interfaces import FastAgentLLMProtocol
|
|
14
|
+
from fast_agent.llm.sampling_converter import SamplingConverter
|
|
15
|
+
from fast_agent.mcp.helpers.server_config_helpers import get_server_config
|
|
16
|
+
from fast_agent.types.llm_stop_reason import LlmStopReason
|
|
15
17
|
|
|
16
18
|
if TYPE_CHECKING:
|
|
17
|
-
from
|
|
19
|
+
from fast_agent.types import PromptMessageExtended
|
|
18
20
|
|
|
19
21
|
logger = get_logger(__name__)
|
|
20
22
|
|
|
21
23
|
|
|
22
24
|
def create_sampling_llm(
|
|
23
25
|
params: CreateMessageRequestParams, model_string: str, api_key: str | None
|
|
24
|
-
) ->
|
|
26
|
+
) -> FastAgentLLMProtocol:
|
|
25
27
|
"""
|
|
26
28
|
Create an LLM instance for sampling without tools support.
|
|
27
29
|
This utility function creates a minimal LLM instance based on the model string.
|
|
@@ -33,21 +35,19 @@ def create_sampling_llm(
|
|
|
33
35
|
Returns:
|
|
34
36
|
An initialized LLM instance ready to use
|
|
35
37
|
"""
|
|
36
|
-
from
|
|
37
|
-
from mcp_agent.llm.model_factory import ModelFactory
|
|
38
|
+
from fast_agent.llm.model_factory import ModelFactory
|
|
38
39
|
|
|
39
40
|
app_context = None
|
|
40
41
|
try:
|
|
41
|
-
from
|
|
42
|
+
from fast_agent.context import get_current_context
|
|
42
43
|
|
|
43
44
|
app_context = get_current_context()
|
|
44
45
|
except Exception:
|
|
45
46
|
logger.warning("App context not available for sampling call")
|
|
46
47
|
|
|
47
|
-
agent =
|
|
48
|
+
agent = LlmAgent(
|
|
48
49
|
config=sampling_agent_config(params),
|
|
49
50
|
context=app_context,
|
|
50
|
-
connection_persistence=False,
|
|
51
51
|
)
|
|
52
52
|
|
|
53
53
|
# Create the LLM using the factory
|
|
@@ -82,30 +82,33 @@ async def sample(mcp_ctx: ClientSession, params: CreateMessageRequestParams) ->
|
|
|
82
82
|
try:
|
|
83
83
|
# Extract model from server config using type-safe helper
|
|
84
84
|
server_config = get_server_config(mcp_ctx)
|
|
85
|
-
|
|
85
|
+
|
|
86
86
|
# First priority: explicitly configured sampling model
|
|
87
87
|
if server_config and hasattr(server_config, "sampling") and server_config.sampling:
|
|
88
88
|
model = server_config.sampling.model
|
|
89
|
-
|
|
89
|
+
|
|
90
90
|
# Second priority: auto_sampling fallback (if enabled at application level)
|
|
91
91
|
if model is None:
|
|
92
92
|
# Check if auto_sampling is enabled
|
|
93
93
|
auto_sampling_enabled = False
|
|
94
94
|
try:
|
|
95
|
-
from
|
|
95
|
+
from fast_agent.context import get_current_context
|
|
96
|
+
|
|
96
97
|
app_context = get_current_context()
|
|
97
98
|
if app_context and app_context.config:
|
|
98
|
-
auto_sampling_enabled = getattr(app_context.config,
|
|
99
|
+
auto_sampling_enabled = getattr(app_context.config, "auto_sampling", True)
|
|
99
100
|
except Exception as e:
|
|
100
101
|
logger.debug(f"Could not get application config: {e}")
|
|
101
102
|
auto_sampling_enabled = True # Default to enabled
|
|
102
|
-
|
|
103
|
+
|
|
103
104
|
if auto_sampling_enabled:
|
|
104
105
|
# Import here to avoid circular import
|
|
105
|
-
from
|
|
106
|
-
|
|
106
|
+
from fast_agent.mcp.mcp_agent_client_session import MCPAgentClientSession
|
|
107
|
+
|
|
107
108
|
# Try agent's model first (from the session)
|
|
108
|
-
if hasattr(mcp_ctx, "session") and isinstance(
|
|
109
|
+
if hasattr(mcp_ctx, "session") and isinstance(
|
|
110
|
+
mcp_ctx.session, MCPAgentClientSession
|
|
111
|
+
):
|
|
109
112
|
if mcp_ctx.session.agent_model:
|
|
110
113
|
model = mcp_ctx.session.agent_model
|
|
111
114
|
logger.debug(f"Using agent's model for sampling: {model}")
|
|
@@ -123,7 +126,9 @@ async def sample(mcp_ctx: ClientSession, params: CreateMessageRequestParams) ->
|
|
|
123
126
|
logger.debug(f"Could not get system default model: {e}")
|
|
124
127
|
|
|
125
128
|
if model is None:
|
|
126
|
-
raise ValueError(
|
|
129
|
+
raise ValueError(
|
|
130
|
+
"No model configured for sampling (server config, agent model, or system default)"
|
|
131
|
+
)
|
|
127
132
|
|
|
128
133
|
# Create an LLM instance
|
|
129
134
|
llm = create_sampling_llm(params, model, api_key)
|
|
@@ -132,20 +137,20 @@ async def sample(mcp_ctx: ClientSession, params: CreateMessageRequestParams) ->
|
|
|
132
137
|
if not params.messages:
|
|
133
138
|
raise ValueError("No messages provided")
|
|
134
139
|
|
|
135
|
-
# Convert all SamplingMessages to
|
|
140
|
+
# Convert all SamplingMessages to PromptMessageExtended objects
|
|
136
141
|
conversation = SamplingConverter.convert_messages(params.messages)
|
|
137
142
|
|
|
138
143
|
# Extract request parameters using our converter
|
|
139
144
|
request_params = SamplingConverter.extract_request_params(params)
|
|
140
145
|
|
|
141
|
-
llm_response:
|
|
146
|
+
llm_response: PromptMessageExtended = await llm.generate(conversation, request_params)
|
|
142
147
|
logger.info(f"Complete sampling request : {llm_response.first_text()[:50]}...")
|
|
143
148
|
|
|
144
149
|
return CreateMessageResult(
|
|
145
150
|
role=llm_response.role,
|
|
146
151
|
content=TextContent(type="text", text=llm_response.first_text()),
|
|
147
152
|
model=model,
|
|
148
|
-
stopReason=
|
|
153
|
+
stopReason=LlmStopReason.END_TURN.value,
|
|
149
154
|
)
|
|
150
155
|
except Exception as e:
|
|
151
156
|
logger.error(f"Error in sampling: {str(e)}")
|
|
@@ -171,4 +176,4 @@ def sampling_agent_config(
|
|
|
171
176
|
if params and params.systemPrompt is not None:
|
|
172
177
|
instruction = params.systemPrompt
|
|
173
178
|
|
|
174
|
-
return AgentConfig(name="sampling_agent", instruction=instruction
|
|
179
|
+
return AgentConfig(name="sampling_agent", instruction=instruction)
|
|
@@ -11,11 +11,10 @@ from typing import Set
|
|
|
11
11
|
from mcp.server.fastmcp import Context as MCPContext
|
|
12
12
|
from mcp.server.fastmcp import FastMCP
|
|
13
13
|
|
|
14
|
-
import
|
|
15
|
-
import
|
|
16
|
-
|
|
17
|
-
from
|
|
18
|
-
from mcp_agent.logging.logger import get_logger
|
|
14
|
+
import fast_agent.core
|
|
15
|
+
import fast_agent.core.prompt
|
|
16
|
+
from fast_agent.core.agent_app import AgentApp
|
|
17
|
+
from fast_agent.core.logging.logger import get_logger
|
|
19
18
|
|
|
20
19
|
logger = get_logger(__name__)
|
|
21
20
|
|
|
@@ -96,7 +95,7 @@ class AgentMCPServer:
|
|
|
96
95
|
|
|
97
96
|
# Convert the multipart message history to standard PromptMessages
|
|
98
97
|
multipart_history = agent._llm.message_history
|
|
99
|
-
prompt_messages =
|
|
98
|
+
prompt_messages = fast_agent.core.prompt.Prompt.from_multipart(multipart_history)
|
|
100
99
|
|
|
101
100
|
# In FastMCP, we need to return the raw list of messages
|
|
102
101
|
# that matches the structure that FastMCP expects (list of dicts with role/content)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCP Agent with UI support using mixin pattern.
|
|
3
|
+
|
|
4
|
+
This module provides a concrete agent class that combines McpAgent
|
|
5
|
+
with UI functionality through the mixin pattern.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from typing import TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
from fast_agent.agents.mcp_agent import McpAgent
|
|
13
|
+
from fast_agent.mcp.ui_mixin import McpUIMixin
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from fast_agent.context import Context
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class McpAgentWithUI(McpUIMixin, McpAgent):
|
|
20
|
+
"""
|
|
21
|
+
MCP Agent with UI resource handling capabilities.
|
|
22
|
+
|
|
23
|
+
This class combines the base McpAgent functionality with UI resource
|
|
24
|
+
processing using the mixin pattern. It's a clean, type-safe way to add
|
|
25
|
+
UI functionality without the complexity of wrapper classes.
|
|
26
|
+
|
|
27
|
+
Usage:
|
|
28
|
+
agent = McpAgentWithUI(config, context=context, ui_mode="auto")
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
config,
|
|
34
|
+
context: "Context | None" = None,
|
|
35
|
+
ui_mode: str = "auto",
|
|
36
|
+
**kwargs: Any,
|
|
37
|
+
) -> None:
|
|
38
|
+
"""
|
|
39
|
+
Initialize the agent with UI capabilities.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
config: Agent configuration
|
|
43
|
+
context: Application context
|
|
44
|
+
ui_mode: UI mode - "disabled", "enabled", or "auto"
|
|
45
|
+
**kwargs: Additional arguments passed to parent classes
|
|
46
|
+
"""
|
|
47
|
+
# Initialize both parent classes with the ui_mode parameter
|
|
48
|
+
super().__init__(config=config, context=context, ui_mode=ui_mode, **kwargs)
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCP UI Mixin - Clean mixin pattern for MCP UI functionality.
|
|
3
|
+
|
|
4
|
+
This module provides a mixin class that can be combined with McpAgent
|
|
5
|
+
to add UI resource handling without modifying the base agent implementation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple
|
|
11
|
+
|
|
12
|
+
from mcp.types import CallToolResult, ContentBlock, EmbeddedResource
|
|
13
|
+
|
|
14
|
+
from fast_agent.constants import MCP_UI
|
|
15
|
+
from fast_agent.ui.mcp_ui_utils import open_links_in_browser, ui_links_from_channel
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from rich.text import Text
|
|
19
|
+
|
|
20
|
+
from fast_agent.types import PromptMessageExtended
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class McpUIMixin:
|
|
24
|
+
"""
|
|
25
|
+
Mixin that adds MCP-UI resource handling to any agent.
|
|
26
|
+
|
|
27
|
+
This mixin can be combined with any agent class to add UI resource
|
|
28
|
+
extraction and display functionality. It overrides run_tools to
|
|
29
|
+
intercept tool results and extract UI resources.
|
|
30
|
+
|
|
31
|
+
Usage:
|
|
32
|
+
class MyAgentWithUI(McpUIMixin, McpAgent):
|
|
33
|
+
def __init__(self, *args, ui_mode: str = "auto", **kwargs):
|
|
34
|
+
super().__init__(*args, **kwargs)
|
|
35
|
+
self._ui_mode = ui_mode
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, *args, ui_mode: str = "auto", **kwargs):
|
|
39
|
+
"""Initialize the mixin with UI mode configuration."""
|
|
40
|
+
super().__init__(*args, **kwargs)
|
|
41
|
+
self._ui_mode: str = ui_mode
|
|
42
|
+
self._pending_ui_resources: List[ContentBlock] = []
|
|
43
|
+
|
|
44
|
+
def set_ui_mode(self, mode: str) -> None:
|
|
45
|
+
"""
|
|
46
|
+
Set the UI mode for handling MCP-UI resources.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
mode: One of "disabled", "enabled", or "auto"
|
|
50
|
+
"""
|
|
51
|
+
if mode not in ("disabled", "enabled", "auto"):
|
|
52
|
+
mode = "auto"
|
|
53
|
+
self._ui_mode = mode
|
|
54
|
+
|
|
55
|
+
async def run_tools(self, request: "PromptMessageExtended") -> "PromptMessageExtended":
|
|
56
|
+
"""
|
|
57
|
+
Override run_tools to extract and handle UI resources.
|
|
58
|
+
|
|
59
|
+
This method intercepts tool results, extracts any UI resources,
|
|
60
|
+
and adds them to the message channels for later display.
|
|
61
|
+
"""
|
|
62
|
+
# If UI is disabled, just pass through to parent
|
|
63
|
+
if self._ui_mode == "disabled":
|
|
64
|
+
return await super().run_tools(request) # type: ignore
|
|
65
|
+
|
|
66
|
+
# Run the tools normally via parent implementation
|
|
67
|
+
result = await super().run_tools(request) # type: ignore
|
|
68
|
+
|
|
69
|
+
# Extract UI resources from tool results
|
|
70
|
+
if result and result.tool_results:
|
|
71
|
+
processed_results, ui_blocks = self._extract_ui_from_tool_results(result.tool_results)
|
|
72
|
+
|
|
73
|
+
# For mode 'auto', only act when we actually extracted something
|
|
74
|
+
if self._ui_mode == "enabled" or (self._ui_mode == "auto" and ui_blocks):
|
|
75
|
+
# Update tool_results with UI resources removed
|
|
76
|
+
result.tool_results = processed_results
|
|
77
|
+
|
|
78
|
+
# Add UI resources to channels
|
|
79
|
+
channels = result.channels or {}
|
|
80
|
+
current = channels.get(MCP_UI, [])
|
|
81
|
+
channels[MCP_UI] = current + ui_blocks
|
|
82
|
+
result.channels = channels
|
|
83
|
+
|
|
84
|
+
# Store for display after assistant message
|
|
85
|
+
self._pending_ui_resources = ui_blocks
|
|
86
|
+
|
|
87
|
+
return result
|
|
88
|
+
|
|
89
|
+
async def show_assistant_message(
|
|
90
|
+
self,
|
|
91
|
+
message: "PromptMessageExtended",
|
|
92
|
+
bottom_items: List[str] | None = None,
|
|
93
|
+
highlight_items: str | List[str] | None = None,
|
|
94
|
+
max_item_length: int | None = None,
|
|
95
|
+
name: str | None = None,
|
|
96
|
+
model: str | None = None,
|
|
97
|
+
additional_message: Optional["Text"] = None,
|
|
98
|
+
) -> None:
|
|
99
|
+
"""Override to display UI resources after showing assistant message."""
|
|
100
|
+
# Show the assistant message normally via parent
|
|
101
|
+
await super().show_assistant_message( # type: ignore
|
|
102
|
+
message,
|
|
103
|
+
bottom_items,
|
|
104
|
+
highlight_items,
|
|
105
|
+
max_item_length,
|
|
106
|
+
name=name,
|
|
107
|
+
model=model,
|
|
108
|
+
additional_message=additional_message,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Handle any pending UI resources from the previous user message
|
|
112
|
+
if self._ui_mode != "disabled":
|
|
113
|
+
await self._display_ui_resources_from_history()
|
|
114
|
+
|
|
115
|
+
async def _display_ui_resources_from_history(self) -> None:
|
|
116
|
+
"""
|
|
117
|
+
Check message history for UI resources and display them.
|
|
118
|
+
|
|
119
|
+
This looks at the previous user message for any UI resources
|
|
120
|
+
that should be displayed after the assistant's response.
|
|
121
|
+
"""
|
|
122
|
+
try:
|
|
123
|
+
history = self.message_history # type: ignore
|
|
124
|
+
if history and len(history) >= 2:
|
|
125
|
+
prev = history[-2]
|
|
126
|
+
if prev and prev.role == "user":
|
|
127
|
+
channels = prev.channels or {}
|
|
128
|
+
ui_resources = channels.get(MCP_UI, []) if isinstance(channels, dict) else []
|
|
129
|
+
if ui_resources:
|
|
130
|
+
await self._display_ui_resources(ui_resources)
|
|
131
|
+
except Exception:
|
|
132
|
+
# Silently handle any errors in UI display
|
|
133
|
+
pass
|
|
134
|
+
|
|
135
|
+
async def _display_ui_resources(self, resources: Sequence[ContentBlock]) -> None:
|
|
136
|
+
"""
|
|
137
|
+
Display UI resources by creating links and optionally opening in browser.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
resources: List of UI resource content blocks
|
|
141
|
+
"""
|
|
142
|
+
links = ui_links_from_channel(resources)
|
|
143
|
+
if links:
|
|
144
|
+
# Display links in console
|
|
145
|
+
await self.display.show_mcp_ui_links(links) # type: ignore
|
|
146
|
+
|
|
147
|
+
# Auto-open in browser if in auto mode
|
|
148
|
+
if self._ui_mode == "auto":
|
|
149
|
+
open_links_in_browser(links, mcp_ui_mode=self._ui_mode)
|
|
150
|
+
|
|
151
|
+
def _extract_ui_from_tool_results(
|
|
152
|
+
self,
|
|
153
|
+
tool_results: Dict[str, CallToolResult],
|
|
154
|
+
) -> Tuple[Dict[str, CallToolResult], List[ContentBlock]]:
|
|
155
|
+
"""
|
|
156
|
+
Extract UI resources from tool results.
|
|
157
|
+
|
|
158
|
+
Returns a tuple of (cleaned_tool_results, extracted_ui_blocks).
|
|
159
|
+
"""
|
|
160
|
+
if not tool_results:
|
|
161
|
+
return tool_results, []
|
|
162
|
+
|
|
163
|
+
extracted_ui: List[ContentBlock] = []
|
|
164
|
+
new_results: Dict[str, CallToolResult] = {}
|
|
165
|
+
|
|
166
|
+
for key, result in tool_results.items():
|
|
167
|
+
try:
|
|
168
|
+
ui_blocks, other_blocks = self._split_ui_blocks(list(result.content or []))
|
|
169
|
+
if ui_blocks:
|
|
170
|
+
extracted_ui.extend(ui_blocks)
|
|
171
|
+
|
|
172
|
+
# Recreate CallToolResult without UI blocks
|
|
173
|
+
new_results[key] = CallToolResult(content=other_blocks, isError=result.isError)
|
|
174
|
+
except Exception:
|
|
175
|
+
# Pass through untouched on any error
|
|
176
|
+
new_results[key] = result
|
|
177
|
+
|
|
178
|
+
return new_results, extracted_ui
|
|
179
|
+
|
|
180
|
+
def _split_ui_blocks(
|
|
181
|
+
self, blocks: List[ContentBlock]
|
|
182
|
+
) -> Tuple[List[ContentBlock], List[ContentBlock]]:
|
|
183
|
+
"""
|
|
184
|
+
Split content blocks into UI and non-UI blocks.
|
|
185
|
+
|
|
186
|
+
Returns tuple of (ui_blocks, other_blocks).
|
|
187
|
+
"""
|
|
188
|
+
ui_blocks: List[ContentBlock] = []
|
|
189
|
+
other_blocks: List[ContentBlock] = []
|
|
190
|
+
|
|
191
|
+
for block in blocks or []:
|
|
192
|
+
if self._is_ui_embedded_resource(block):
|
|
193
|
+
ui_blocks.append(block)
|
|
194
|
+
else:
|
|
195
|
+
other_blocks.append(block)
|
|
196
|
+
|
|
197
|
+
return ui_blocks, other_blocks
|
|
198
|
+
|
|
199
|
+
def _is_ui_embedded_resource(self, block: ContentBlock) -> bool:
|
|
200
|
+
"""Check if a content block is a UI embedded resource."""
|
|
201
|
+
try:
|
|
202
|
+
if isinstance(block, EmbeddedResource):
|
|
203
|
+
res = block.resource
|
|
204
|
+
uri = res.uri if res else None
|
|
205
|
+
if uri is not None:
|
|
206
|
+
return str(uri).startswith("ui://")
|
|
207
|
+
except Exception:
|
|
208
|
+
pass
|
|
209
|
+
return False
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module defines a `ServerRegistry` class for managing MCP server configurations
|
|
3
|
+
and initialization logic.
|
|
4
|
+
|
|
5
|
+
The class loads server configurations from a YAML file,
|
|
6
|
+
supports dynamic registration of initialization hooks, and provides methods for
|
|
7
|
+
server initialization.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from typing import Dict
|
|
11
|
+
|
|
12
|
+
from fast_agent.config import (
|
|
13
|
+
MCPServerSettings,
|
|
14
|
+
Settings,
|
|
15
|
+
get_settings,
|
|
16
|
+
)
|
|
17
|
+
from fast_agent.core.logging.logger import get_logger
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ServerRegistry:
|
|
23
|
+
"""
|
|
24
|
+
Maps MCP Server configurations to names; can be populated from a YAML file (other formats soon)
|
|
25
|
+
|
|
26
|
+
Attributes:
|
|
27
|
+
config_path (str): Path to the YAML configuration file.
|
|
28
|
+
registry (Dict[str, MCPServerSettings]): Loaded server configurations.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
registry: Dict[str, MCPServerSettings] = {}
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
config: Settings | None = None,
|
|
36
|
+
) -> None:
|
|
37
|
+
"""
|
|
38
|
+
Initialize the ServerRegistry with a configuration file.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
config (Settings): The Settings object containing the server configurations.
|
|
42
|
+
config_path (str): Path to the YAML configuration file.
|
|
43
|
+
"""
|
|
44
|
+
if config is not None and config.mcp is not None:
|
|
45
|
+
self.registry = config.mcp.servers or {}
|
|
46
|
+
|
|
47
|
+
## TODO-- leaving this here to support more file formats to add servers
|
|
48
|
+
def load_registry_from_file(
|
|
49
|
+
self, config_path: str | None = None
|
|
50
|
+
) -> Dict[str, MCPServerSettings]:
|
|
51
|
+
"""
|
|
52
|
+
Load the YAML configuration file and validate it.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Dict[str, MCPServerSettings]: A dictionary of server configurations.
|
|
56
|
+
|
|
57
|
+
Raises:
|
|
58
|
+
ValueError: If the configuration is invalid.
|
|
59
|
+
"""
|
|
60
|
+
servers = {}
|
|
61
|
+
|
|
62
|
+
settings = get_settings(config_path)
|
|
63
|
+
|
|
64
|
+
if (
|
|
65
|
+
settings.mcp is not None
|
|
66
|
+
and hasattr(settings.mcp, "servers")
|
|
67
|
+
and settings.mcp.servers is not None
|
|
68
|
+
):
|
|
69
|
+
return settings.mcp.servers
|
|
70
|
+
|
|
71
|
+
return servers
|
|
72
|
+
|
|
73
|
+
def get_server_config(self, server_name: str) -> MCPServerSettings | None:
|
|
74
|
+
"""
|
|
75
|
+
Get the configuration for a specific server.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
server_name (str): The name of the server.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
MCPServerSettings: The server configuration.
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
server_config = self.registry.get(server_name)
|
|
85
|
+
if server_config is None:
|
|
86
|
+
logger.warning(f"Server '{server_name}' not found in registry.")
|
|
87
|
+
return None
|
|
88
|
+
elif server_config.name is None:
|
|
89
|
+
server_config.name = server_name
|
|
90
|
+
return server_config
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
|
|
3
|
-
from
|
|
4
|
-
from
|
|
3
|
+
from fast_agent.core.fastagent import FastAgent
|
|
4
|
+
from fast_agent.llm.fastagent_llm import RequestParams
|
|
5
5
|
|
|
6
6
|
# Create the application
|
|
7
7
|
fast = FastAgent("Data Analysis & Campaign Generator")
|
|
@@ -179,8 +179,9 @@ async def main() -> None:
|
|
|
179
179
|
)
|
|
180
180
|
|
|
181
181
|
async with fast.run() as agent:
|
|
182
|
-
await agent.
|
|
183
|
-
|
|
182
|
+
await agent.interactive(
|
|
183
|
+
"research_campaign_creator",
|
|
184
|
+
default_prompt="Analyze the CSV file in the current directory and create a comprehensive multi-lingual social media campaign based on the findings. Save all campaign elements as separate files.",
|
|
184
185
|
)
|
|
185
186
|
|
|
186
187
|
|