mcp-mesh 0.8.0b9__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _mcp_mesh/__init__.py +1 -1
- _mcp_mesh/engine/dependency_injector.py +9 -0
- _mcp_mesh/engine/mesh_llm_agent.py +36 -14
- _mcp_mesh/engine/mesh_llm_agent_injector.py +189 -35
- _mcp_mesh/pipeline/api_heartbeat/rust_api_heartbeat.py +5 -1
- _mcp_mesh/pipeline/mcp_heartbeat/rust_heartbeat.py +12 -1
- {mcp_mesh-0.8.0b9.dist-info → mcp_mesh-0.8.1.dist-info}/METADATA +4 -2
- {mcp_mesh-0.8.0b9.dist-info → mcp_mesh-0.8.1.dist-info}/RECORD +12 -12
- mesh/decorators.py +174 -92
- mesh/helpers.py +52 -0
- {mcp_mesh-0.8.0b9.dist-info → mcp_mesh-0.8.1.dist-info}/WHEEL +0 -0
- {mcp_mesh-0.8.0b9.dist-info → mcp_mesh-0.8.1.dist-info}/licenses/LICENSE +0 -0
_mcp_mesh/__init__.py
CHANGED
|
@@ -348,6 +348,15 @@ class DependencyInjector:
|
|
|
348
348
|
logger.debug(f"🤖 Creating LLM injection wrapper for {function_id}")
|
|
349
349
|
return self._llm_injector.create_injection_wrapper(func, function_id)
|
|
350
350
|
|
|
351
|
+
def initialize_direct_llm_agents(self) -> None:
|
|
352
|
+
"""
|
|
353
|
+
Initialize LLM agents that use direct LiteLLM (no mesh delegation).
|
|
354
|
+
|
|
355
|
+
This should be called during agent startup to initialize agents that
|
|
356
|
+
don't need to wait for registry response.
|
|
357
|
+
"""
|
|
358
|
+
self._llm_injector.initialize_direct_llm_agents()
|
|
359
|
+
|
|
351
360
|
def create_injection_wrapper(
|
|
352
361
|
self, func: Callable, dependencies: list[str]
|
|
353
362
|
) -> Callable:
|
|
@@ -14,8 +14,12 @@ from typing import Any, Dict, List, Literal, Optional, Union
|
|
|
14
14
|
from pydantic import BaseModel
|
|
15
15
|
|
|
16
16
|
from .llm_config import LLMConfig
|
|
17
|
-
from .llm_errors import (
|
|
18
|
-
|
|
17
|
+
from .llm_errors import (
|
|
18
|
+
LLMAPIError,
|
|
19
|
+
MaxIterationsError,
|
|
20
|
+
ResponseParseError,
|
|
21
|
+
ToolExecutionError,
|
|
22
|
+
)
|
|
19
23
|
from .provider_handlers import ProviderHandlerRegistry
|
|
20
24
|
from .response_parser import ResponseParser
|
|
21
25
|
from .tool_executor import ToolExecutor
|
|
@@ -23,8 +27,7 @@ from .tool_schema_builder import ToolSchemaBuilder
|
|
|
23
27
|
|
|
24
28
|
# Import Jinja2 for template rendering
|
|
25
29
|
try:
|
|
26
|
-
from jinja2 import
|
|
27
|
-
TemplateSyntaxError)
|
|
30
|
+
from jinja2 import Environment, FileSystemLoader, Template, TemplateSyntaxError
|
|
28
31
|
except ImportError:
|
|
29
32
|
Environment = None
|
|
30
33
|
FileSystemLoader = None
|
|
@@ -633,12 +636,14 @@ IMPORTANT TOOL CALLING RULES:
|
|
|
633
636
|
# Multi-turn conversation - use provided messages array
|
|
634
637
|
messages = message.copy()
|
|
635
638
|
|
|
636
|
-
#
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
639
|
+
# Only add/update system message if we have non-empty content
|
|
640
|
+
# (Claude API rejects empty system messages - though decorator provides default)
|
|
641
|
+
if system_content:
|
|
642
|
+
if not messages or messages[0].get("role") != "system":
|
|
643
|
+
messages.insert(0, {"role": "system", "content": system_content})
|
|
644
|
+
else:
|
|
645
|
+
# Replace existing system message with our constructed one
|
|
646
|
+
messages[0] = {"role": "system", "content": system_content}
|
|
642
647
|
|
|
643
648
|
# Log conversation history
|
|
644
649
|
logger.info(
|
|
@@ -646,10 +651,17 @@ IMPORTANT TOOL CALLING RULES:
|
|
|
646
651
|
)
|
|
647
652
|
else:
|
|
648
653
|
# Single-turn - build messages array from string
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
654
|
+
# Only include system message if non-empty (Claude API rejects empty system messages)
|
|
655
|
+
if system_content:
|
|
656
|
+
messages = [
|
|
657
|
+
{"role": "system", "content": system_content},
|
|
658
|
+
{"role": "user", "content": message},
|
|
659
|
+
]
|
|
660
|
+
else:
|
|
661
|
+
# Fallback for edge case where system_content is explicitly empty
|
|
662
|
+
messages = [
|
|
663
|
+
{"role": "user", "content": message},
|
|
664
|
+
]
|
|
653
665
|
|
|
654
666
|
logger.info(f"🚀 Starting agentic loop for message: {message[:100]}...")
|
|
655
667
|
|
|
@@ -705,6 +717,16 @@ IMPORTANT TOOL CALLING RULES:
|
|
|
705
717
|
if self.model:
|
|
706
718
|
model_params["model"] = self.model
|
|
707
719
|
|
|
720
|
+
# Issue #459: Include output_schema for provider to apply vendor-specific handling
|
|
721
|
+
# (e.g., OpenAI needs response_format, not prompt-based JSON instructions)
|
|
722
|
+
if self.output_type is not str and hasattr(
|
|
723
|
+
self.output_type, "model_json_schema"
|
|
724
|
+
):
|
|
725
|
+
model_params["output_schema"] = (
|
|
726
|
+
self.output_type.model_json_schema()
|
|
727
|
+
)
|
|
728
|
+
model_params["output_type_name"] = self.output_type.__name__
|
|
729
|
+
|
|
708
730
|
logger.debug(
|
|
709
731
|
f"📤 Delegating to mesh provider with handler-prepared params: "
|
|
710
732
|
f"keys={list(model_params.keys())}"
|
|
@@ -65,6 +65,75 @@ class MeshLlmAgentInjector(BaseInjector):
|
|
|
65
65
|
super().__init__()
|
|
66
66
|
self._llm_agents: dict[str, dict[str, Any]] = {}
|
|
67
67
|
|
|
68
|
+
def initialize_direct_llm_agents(self) -> None:
|
|
69
|
+
"""
|
|
70
|
+
Initialize LLM agents that use direct LiteLLM (no mesh delegation).
|
|
71
|
+
|
|
72
|
+
This handles the case where:
|
|
73
|
+
- provider is a string (e.g., "claude") - direct LiteLLM call
|
|
74
|
+
- filter is None or empty - no mesh tools needed
|
|
75
|
+
|
|
76
|
+
These agents don't need to wait for registry response since all
|
|
77
|
+
information is available at decorator time.
|
|
78
|
+
"""
|
|
79
|
+
llm_agents = DecoratorRegistry.get_mesh_llm_agents()
|
|
80
|
+
|
|
81
|
+
for function_id, llm_metadata in llm_agents.items():
|
|
82
|
+
config = llm_metadata.config
|
|
83
|
+
provider = config.get("provider")
|
|
84
|
+
filter_config = config.get("filter")
|
|
85
|
+
|
|
86
|
+
# Check if this is a direct LiteLLM agent (provider is string, not dict)
|
|
87
|
+
is_direct_llm = isinstance(provider, str)
|
|
88
|
+
|
|
89
|
+
# Check if no tools needed (filter is None or empty)
|
|
90
|
+
has_no_filter = filter_config is None or (
|
|
91
|
+
isinstance(filter_config, list) and len(filter_config) == 0
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
if is_direct_llm and has_no_filter:
|
|
95
|
+
# Skip if already initialized
|
|
96
|
+
if function_id in self._llm_agents:
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
logger.info(
|
|
100
|
+
f"🔧 Initializing direct LiteLLM agent for '{function_id}' "
|
|
101
|
+
f"(provider={provider}, no filter)"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Initialize empty tools data for direct LiteLLM
|
|
105
|
+
self._llm_agents[function_id] = {
|
|
106
|
+
"config": config,
|
|
107
|
+
"output_type": llm_metadata.output_type,
|
|
108
|
+
"param_name": llm_metadata.param_name,
|
|
109
|
+
"tools_metadata": [], # No tools for direct LiteLLM
|
|
110
|
+
"tools_proxies": {}, # No tool proxies needed
|
|
111
|
+
"function": llm_metadata.function,
|
|
112
|
+
"provider_proxy": None, # No mesh delegation
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
# Get the wrapper and update it with LLM agent
|
|
116
|
+
wrapper = llm_metadata.function
|
|
117
|
+
if wrapper and hasattr(wrapper, "_mesh_update_llm_agent"):
|
|
118
|
+
llm_agent = self._create_llm_agent(function_id)
|
|
119
|
+
wrapper._mesh_update_llm_agent(llm_agent)
|
|
120
|
+
logger.info(
|
|
121
|
+
f"🔄 Updated wrapper with MeshLlmAgent for '{function_id}' (direct LiteLLM mode)"
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Set factory for per-call context agent creation (template support)
|
|
125
|
+
if config.get("is_template", False):
|
|
126
|
+
def create_context_agent(
|
|
127
|
+
context_value: Any, _func_id: str = function_id
|
|
128
|
+
) -> MeshLlmAgent:
|
|
129
|
+
"""Factory to create MeshLlmAgent with context for template rendering."""
|
|
130
|
+
return self._create_llm_agent(_func_id, context_value=context_value)
|
|
131
|
+
|
|
132
|
+
wrapper._mesh_create_context_agent = create_context_agent
|
|
133
|
+
logger.info(
|
|
134
|
+
f"🎯 Set context agent factory for template-based function '{function_id}' (direct LiteLLM mode)"
|
|
135
|
+
)
|
|
136
|
+
|
|
68
137
|
def _build_function_name_to_id_mapping(self) -> dict[str, str]:
|
|
69
138
|
"""
|
|
70
139
|
Build mapping from function_name to function_id.
|
|
@@ -176,16 +245,41 @@ class MeshLlmAgentInjector(BaseInjector):
|
|
|
176
245
|
f"✅ Set provider proxy for '{function_id}': {provider_proxy.function_name} at {provider_proxy.endpoint} (vendor={vendor})"
|
|
177
246
|
)
|
|
178
247
|
|
|
179
|
-
# Re-create and update MeshLlmAgent with new provider
|
|
180
|
-
# Get the function wrapper from DecoratorRegistry
|
|
248
|
+
# Re-create and update MeshLlmAgent with new provider
|
|
249
|
+
# Get the function wrapper and metadata from DecoratorRegistry
|
|
181
250
|
llm_agents = DecoratorRegistry.get_mesh_llm_agents()
|
|
182
251
|
wrapper = None
|
|
252
|
+
llm_metadata = None
|
|
183
253
|
for agent_func_id, metadata in llm_agents.items():
|
|
184
254
|
if metadata.function_id == function_id:
|
|
185
255
|
wrapper = metadata.function
|
|
256
|
+
llm_metadata = metadata
|
|
186
257
|
break
|
|
187
258
|
|
|
188
|
-
#
|
|
259
|
+
# Check if tools are required (filter is specified)
|
|
260
|
+
has_filter = False
|
|
261
|
+
if llm_metadata and llm_metadata.config:
|
|
262
|
+
filter_config = llm_metadata.config.get("filter")
|
|
263
|
+
has_filter = filter_config is not None and len(filter_config) > 0
|
|
264
|
+
|
|
265
|
+
# If no filter specified, initialize empty tools data so we can create LLM agent without tools
|
|
266
|
+
# This supports simple LLM calls (text generation) that don't need tool calling
|
|
267
|
+
if not has_filter and "tools_metadata" not in self._llm_agents[function_id]:
|
|
268
|
+
self._llm_agents[function_id].update(
|
|
269
|
+
{
|
|
270
|
+
"config": llm_metadata.config if llm_metadata else {},
|
|
271
|
+
"output_type": llm_metadata.output_type if llm_metadata else None,
|
|
272
|
+
"param_name": llm_metadata.param_name if llm_metadata else "llm",
|
|
273
|
+
"tools_metadata": [], # No tools for simple LLM calls
|
|
274
|
+
"tools_proxies": {}, # No tool proxies needed
|
|
275
|
+
"function": llm_metadata.function if llm_metadata else None,
|
|
276
|
+
}
|
|
277
|
+
)
|
|
278
|
+
logger.info(
|
|
279
|
+
f"✅ Initialized empty tools for '{function_id}' (no filter specified - simple LLM mode)"
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Update wrapper if we have tools data (either from filter matching or initialized empty)
|
|
189
283
|
if (
|
|
190
284
|
wrapper
|
|
191
285
|
and hasattr(wrapper, "_mesh_update_llm_agent")
|
|
@@ -194,9 +288,26 @@ class MeshLlmAgentInjector(BaseInjector):
|
|
|
194
288
|
llm_agent = self._create_llm_agent(function_id)
|
|
195
289
|
wrapper._mesh_update_llm_agent(llm_agent)
|
|
196
290
|
logger.info(
|
|
197
|
-
f"🔄 Updated wrapper with
|
|
291
|
+
f"🔄 Updated wrapper with MeshLlmAgent for '{function_id}'"
|
|
292
|
+
+ (" (with tools)" if has_filter else " (simple LLM mode)")
|
|
198
293
|
)
|
|
199
|
-
|
|
294
|
+
|
|
295
|
+
# Set factory for per-call context agent creation (template support)
|
|
296
|
+
# This is critical for filter=None cases where _process_function_tools isn't called
|
|
297
|
+
config_dict = llm_metadata.config if llm_metadata else {}
|
|
298
|
+
if config_dict.get("is_template", False):
|
|
299
|
+
# Capture function_id by value using default argument to avoid closure issues
|
|
300
|
+
def create_context_agent(
|
|
301
|
+
context_value: Any, _func_id: str = function_id
|
|
302
|
+
) -> MeshLlmAgent:
|
|
303
|
+
"""Factory to create MeshLlmAgent with context for template rendering."""
|
|
304
|
+
return self._create_llm_agent(_func_id, context_value=context_value)
|
|
305
|
+
|
|
306
|
+
wrapper._mesh_create_context_agent = create_context_agent
|
|
307
|
+
logger.info(
|
|
308
|
+
f"🎯 Set context agent factory for template-based function '{function_id}' (simple LLM mode)"
|
|
309
|
+
)
|
|
310
|
+
elif wrapper and hasattr(wrapper, "_mesh_update_llm_agent") and has_filter:
|
|
200
311
|
logger.debug(
|
|
201
312
|
f"⏳ Provider set for '{function_id}', waiting for tools before updating wrapper"
|
|
202
313
|
)
|
|
@@ -432,36 +543,60 @@ class MeshLlmAgentInjector(BaseInjector):
|
|
|
432
543
|
def inject_llm_agent(func: Callable, args: tuple, kwargs: dict) -> tuple:
|
|
433
544
|
"""Inject LLM agent into kwargs if not provided."""
|
|
434
545
|
if param_name not in kwargs or kwargs.get(param_name) is None:
|
|
435
|
-
#
|
|
546
|
+
# Get config from runtime data or fallback to decorator registry.
|
|
547
|
+
# Runtime data (self._llm_agents) is populated during heartbeat and has
|
|
548
|
+
# tools/provider info. Decorator registry is populated at decorator time
|
|
549
|
+
# and always has config/context_param. For self-dependency calls that
|
|
550
|
+
# happen before heartbeat, we need the decorator registry fallback.
|
|
551
|
+
agent_data = None
|
|
552
|
+
config_dict = None
|
|
553
|
+
|
|
554
|
+
# Try runtime data first (has tools, provider from heartbeat)
|
|
436
555
|
if function_id in self._llm_agents:
|
|
437
556
|
agent_data = self._llm_agents[function_id]
|
|
438
|
-
config_dict = agent_data
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
func, explicit_name=context_param_name
|
|
557
|
+
config_dict = agent_data.get("config")
|
|
558
|
+
|
|
559
|
+
# Fallback to decorator registry (always available, has context_param)
|
|
560
|
+
# This is critical for self-dependency calls that happen before heartbeat
|
|
561
|
+
if config_dict is None:
|
|
562
|
+
llm_agents_registry = DecoratorRegistry.get_mesh_llm_agents()
|
|
563
|
+
if function_id in llm_agents_registry:
|
|
564
|
+
llm_metadata = llm_agents_registry[function_id]
|
|
565
|
+
config_dict = llm_metadata.config
|
|
566
|
+
logger.debug(
|
|
567
|
+
f"🔄 Using DecoratorRegistry fallback for '{function_id}' config (self-dependency before heartbeat)"
|
|
450
568
|
)
|
|
451
569
|
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
570
|
+
# Check if templates are enabled
|
|
571
|
+
is_template = config_dict.get("is_template", False) if config_dict else False
|
|
572
|
+
|
|
573
|
+
if is_template and config_dict:
|
|
574
|
+
# Templates enabled - create per-call agent with context
|
|
575
|
+
# Import signature analyzer for context detection
|
|
576
|
+
from .signature_analyzer import get_context_parameter_name
|
|
456
577
|
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
context_value = args[ctx_index]
|
|
578
|
+
# Detect context parameter
|
|
579
|
+
context_param_name = config_dict.get("context_param")
|
|
580
|
+
context_info = get_context_parameter_name(
|
|
581
|
+
func, explicit_name=context_param_name
|
|
582
|
+
)
|
|
463
583
|
|
|
464
|
-
|
|
584
|
+
# Extract context value from call
|
|
585
|
+
context_value = None
|
|
586
|
+
if context_info is not None:
|
|
587
|
+
ctx_name, ctx_index = context_info
|
|
588
|
+
|
|
589
|
+
# Try kwargs first
|
|
590
|
+
if ctx_name in kwargs:
|
|
591
|
+
context_value = kwargs[ctx_name]
|
|
592
|
+
# Then try positional args
|
|
593
|
+
elif ctx_index < len(args):
|
|
594
|
+
context_value = args[ctx_index]
|
|
595
|
+
|
|
596
|
+
# Create agent with context for this call
|
|
597
|
+
# Note: _create_llm_agent requires function_id in self._llm_agents
|
|
598
|
+
# If not available yet, use cached agent with context_value set directly
|
|
599
|
+
if function_id in self._llm_agents:
|
|
465
600
|
current_agent = self._create_llm_agent(
|
|
466
601
|
function_id, context_value=context_value
|
|
467
602
|
)
|
|
@@ -469,22 +604,41 @@ class MeshLlmAgentInjector(BaseInjector):
|
|
|
469
604
|
f"🤖 Created MeshLlmAgent with context for {func.__name__}.{param_name}"
|
|
470
605
|
)
|
|
471
606
|
else:
|
|
472
|
-
#
|
|
607
|
+
# Runtime data not yet available - use cached agent but log warning
|
|
608
|
+
# The cached agent may have been created without context
|
|
473
609
|
current_agent = wrapper._mesh_llm_agent
|
|
474
610
|
if current_agent is not None:
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
611
|
+
# Update context on the cached agent if possible
|
|
612
|
+
if hasattr(current_agent, "_context_value"):
|
|
613
|
+
current_agent._context_value = context_value
|
|
614
|
+
logger.debug(
|
|
615
|
+
f"🤖 Updated context on cached MeshLlmAgent for {func.__name__}.{param_name}"
|
|
616
|
+
)
|
|
617
|
+
else:
|
|
618
|
+
logger.debug(
|
|
619
|
+
f"🤖 Injected cached MeshLlmAgent into {func.__name__}.{param_name} (context may not be applied)"
|
|
620
|
+
)
|
|
478
621
|
else:
|
|
479
622
|
logger.warning(
|
|
480
623
|
f"⚠️ MeshLlmAgent for {func.__name__}.{param_name} is None (tools not yet received from registry)"
|
|
481
624
|
)
|
|
625
|
+
elif config_dict:
|
|
626
|
+
# No template - use cached agent (existing behavior)
|
|
627
|
+
current_agent = wrapper._mesh_llm_agent
|
|
628
|
+
if current_agent is not None:
|
|
629
|
+
logger.debug(
|
|
630
|
+
f"🤖 Injected MeshLlmAgent into {func.__name__}.{param_name}"
|
|
631
|
+
)
|
|
632
|
+
else:
|
|
633
|
+
logger.warning(
|
|
634
|
+
f"⚠️ MeshLlmAgent for {func.__name__}.{param_name} is None (tools not yet received from registry)"
|
|
635
|
+
)
|
|
482
636
|
else:
|
|
483
|
-
# No
|
|
637
|
+
# No config found anywhere - use cached (backward compatibility)
|
|
484
638
|
current_agent = wrapper._mesh_llm_agent
|
|
485
639
|
if current_agent is None:
|
|
486
640
|
logger.warning(
|
|
487
|
-
f"⚠️ MeshLlmAgent for {func.__name__}.{param_name} is None (
|
|
641
|
+
f"⚠️ MeshLlmAgent for {func.__name__}.{param_name} is None (no config found)"
|
|
488
642
|
)
|
|
489
643
|
|
|
490
644
|
kwargs[param_name] = current_agent
|
|
@@ -104,9 +104,10 @@ def _build_api_agent_spec(context: dict[str, Any], service_id: str = None) -> An
|
|
|
104
104
|
# Build dependency specs
|
|
105
105
|
deps = []
|
|
106
106
|
for dep_cap in dependencies:
|
|
107
|
+
# Tags must be serialized to JSON string (Rust core expects string, not list)
|
|
107
108
|
dep_spec = core.DependencySpec(
|
|
108
109
|
capability=dep_cap,
|
|
109
|
-
tags=[],
|
|
110
|
+
tags=json.dumps([]),
|
|
110
111
|
version=None,
|
|
111
112
|
)
|
|
112
113
|
deps.append(dep_spec)
|
|
@@ -136,6 +137,7 @@ def _build_api_agent_spec(context: dict[str, Any], service_id: str = None) -> An
|
|
|
136
137
|
http_port=http_port,
|
|
137
138
|
http_host=http_host,
|
|
138
139
|
namespace=namespace,
|
|
140
|
+
agent_type="api", # API services only consume capabilities, not provide them
|
|
139
141
|
tools=tools if tools else None,
|
|
140
142
|
llm_agents=None, # API services don't have LLM agents
|
|
141
143
|
heartbeat_interval=heartbeat_interval,
|
|
@@ -272,6 +274,8 @@ async def _handle_api_dependency_change(
|
|
|
272
274
|
)
|
|
273
275
|
if not current_service_id:
|
|
274
276
|
# Use config resolver for consistent env var handling
|
|
277
|
+
from ...shared.config_resolver import get_config_value
|
|
278
|
+
|
|
275
279
|
current_service_id = get_config_value("MCP_MESH_AGENT_ID")
|
|
276
280
|
|
|
277
281
|
is_self_dependency = (
|
|
@@ -117,9 +117,12 @@ def _build_agent_spec(context: dict[str, Any]) -> Any:
|
|
|
117
117
|
# Build dependency specs
|
|
118
118
|
deps = []
|
|
119
119
|
for dep_info in tool_metadata.get("dependencies", []):
|
|
120
|
+
# Serialize tags to JSON to support nested arrays for OR alternatives
|
|
121
|
+
# e.g., ["addition", ["python", "typescript"]] -> addition AND (python OR typescript)
|
|
122
|
+
tags_json = json.dumps(dep_info.get("tags", []))
|
|
120
123
|
dep_spec = core.DependencySpec(
|
|
121
124
|
capability=dep_info.get("capability", ""),
|
|
122
|
-
tags=
|
|
125
|
+
tags=tags_json,
|
|
123
126
|
version=dep_info.get("version"),
|
|
124
127
|
)
|
|
125
128
|
deps.append(dep_spec)
|
|
@@ -269,6 +272,14 @@ async def _handle_mesh_event(event: Any, context: dict[str, Any]) -> None:
|
|
|
269
272
|
if event_type == "agent_registered":
|
|
270
273
|
logger.info(f"Agent registered with ID: {event.agent_id}")
|
|
271
274
|
|
|
275
|
+
# Initialize direct LiteLLM agents that don't need mesh delegation
|
|
276
|
+
# These agents have provider="string" and filter=None, so all info is
|
|
277
|
+
# available at decorator time - no need to wait for registry response
|
|
278
|
+
from ...engine.dependency_injector import get_global_injector
|
|
279
|
+
|
|
280
|
+
injector = get_global_injector()
|
|
281
|
+
injector.initialize_direct_llm_agents()
|
|
282
|
+
|
|
272
283
|
elif event_type == "registration_failed":
|
|
273
284
|
logger.error(f"Agent registration failed: {event.error}")
|
|
274
285
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mcp-mesh
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.1
|
|
4
4
|
Summary: Kubernetes-native platform for distributed MCP applications
|
|
5
5
|
Project-URL: Homepage, https://github.com/dhyansraj/mcp-mesh
|
|
6
6
|
Project-URL: Documentation, https://github.com/dhyansraj/mcp-mesh/tree/main/docs
|
|
@@ -18,6 +18,8 @@ Classifier: Operating System :: OS Independent
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
21
23
|
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
22
24
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
25
|
Classifier: Topic :: System :: Distributed Computing
|
|
@@ -30,7 +32,7 @@ Requires-Dist: fastmcp<3.0.0,>=2.8.0
|
|
|
30
32
|
Requires-Dist: httpx<1.0.0,>=0.25.0
|
|
31
33
|
Requires-Dist: jinja2>=3.1.0
|
|
32
34
|
Requires-Dist: litellm>=1.30.0
|
|
33
|
-
Requires-Dist: mcp-mesh-core>=0.8.
|
|
35
|
+
Requires-Dist: mcp-mesh-core>=0.8.1
|
|
34
36
|
Requires-Dist: mcp<2.0.0,>=1.9.0
|
|
35
37
|
Requires-Dist: prometheus-client<1.0.0,>=0.19.0
|
|
36
38
|
Requires-Dist: pydantic<3.0.0,>=2.4.0
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
_mcp_mesh/__init__.py,sha256=
|
|
1
|
+
_mcp_mesh/__init__.py,sha256=lXD1eCAZirQON6iPLZyBM5AagyH7bXFrcvB9EUDQv4w,2719
|
|
2
2
|
_mcp_mesh/reload.py,sha256=5Yll9n0bqxM7pmTjfAaKWg-WT_Vi0YTh0_UNWbCNCIQ,6217
|
|
3
3
|
_mcp_mesh/reload_runner.py,sha256=SgQKzzO2yHfSUBq8s3SpAnovWA0rveimVNaxeLCEo_0,1310
|
|
4
4
|
_mcp_mesh/engine/__init__.py,sha256=U_6Kw3vA_3RiNK0Oln5c5C7WvA9lSONV22wWzfxYHNw,2975
|
|
5
5
|
_mcp_mesh/engine/async_mcp_client.py,sha256=Sz-rXTkb1Mng_f0SpLqLuOdPJ8vZjv3DFy0i8yYOqYk,8792
|
|
6
6
|
_mcp_mesh/engine/base_injector.py,sha256=qzRLZqFP2VvEFagVovkpdldvDmm3VwPHm6tHwV58a2k,5648
|
|
7
7
|
_mcp_mesh/engine/decorator_registry.py,sha256=cch2QdQ6bKjHKEGi1XWp1YcLLO3uI2YlxwWBO7Np65E,28229
|
|
8
|
-
_mcp_mesh/engine/dependency_injector.py,sha256=
|
|
8
|
+
_mcp_mesh/engine/dependency_injector.py,sha256=p_W7pJNnGNGvixiv14CsjRV5If7Ll1VdV9BEU9cde1g,31923
|
|
9
9
|
_mcp_mesh/engine/http_wrapper.py,sha256=Simd6IEsLO2FXQOuf1WEx57SBN6DSr5RzphXnk0asHM,24152
|
|
10
10
|
_mcp_mesh/engine/llm_config.py,sha256=95bOsGWro5E1JGq7oZtEYhVdrzcIJqjht_r5vEdJVz4,2049
|
|
11
11
|
_mcp_mesh/engine/llm_errors.py,sha256=h7BiI14u-jL8vtvBfFbFDDrN7gIw8PQjXIl5AP1SBuA,3276
|
|
12
|
-
_mcp_mesh/engine/mesh_llm_agent.py,sha256=
|
|
13
|
-
_mcp_mesh/engine/mesh_llm_agent_injector.py,sha256=
|
|
12
|
+
_mcp_mesh/engine/mesh_llm_agent.py,sha256=am4TG2Dd0KkgtZe0EJsVkkRi9srfbANEpO0cPS8m4w4,35320
|
|
13
|
+
_mcp_mesh/engine/mesh_llm_agent_injector.py,sha256=nukBt-xE6XoWVwUY9OFjFFhoBDSmh6OVn6wKeAkgaEA,37031
|
|
14
14
|
_mcp_mesh/engine/response_parser.py,sha256=g3VNoFJotaLrOAS0pL_OTCrv9t9XQe9Iiz1plsm28bQ,10280
|
|
15
15
|
_mcp_mesh/engine/self_dependency_proxy.py,sha256=OkKt0-B_ADnJlWtHiHItoZCBZ7Su0iz2unEPFfXvrs4,3302
|
|
16
16
|
_mcp_mesh/engine/session_aware_client.py,sha256=QejKag5zYNos5BVffQvNXFMECHFMLNOv78By4e_JzQE,10589
|
|
@@ -29,7 +29,7 @@ _mcp_mesh/engine/provider_handlers/provider_handler_registry.py,sha256=klBZW8iX6
|
|
|
29
29
|
_mcp_mesh/pipeline/__init__.py,sha256=MgPwpwbiD62ND4HXKKNGcnreDk-TvPmQOs5WmjtHQ3M,1263
|
|
30
30
|
_mcp_mesh/pipeline/api_heartbeat/__init__.py,sha256=qGjEgxbGJFSl9Qm3bwu3X5yizAMbN4WpFtIUekDSFuU,690
|
|
31
31
|
_mcp_mesh/pipeline/api_heartbeat/api_lifespan_integration.py,sha256=h0mTmLyPlGDqomSHpbW7S-AZNz1Tyvg1kpy9aeWkQsU,3879
|
|
32
|
-
_mcp_mesh/pipeline/api_heartbeat/rust_api_heartbeat.py,sha256=
|
|
32
|
+
_mcp_mesh/pipeline/api_heartbeat/rust_api_heartbeat.py,sha256=7Dv3lGTn2n2WrDpXoWqxUlqi6NwjPRH17-GVbsUckgE,15843
|
|
33
33
|
_mcp_mesh/pipeline/api_startup/__init__.py,sha256=eivolkSKot2bJTWP2BV8-RKRT1Zm7SGQYuEUiTxusOQ,577
|
|
34
34
|
_mcp_mesh/pipeline/api_startup/api_pipeline.py,sha256=I9-Q0o2py5oAHZO2DJOeTD1uZo1-Dpn258k5Tr0dv9o,2474
|
|
35
35
|
_mcp_mesh/pipeline/api_startup/api_server_setup.py,sha256=72oCMkCzRfxYrE5sfFJbr57BYJwRSyKxBMISTOHmKyc,14919
|
|
@@ -38,7 +38,7 @@ _mcp_mesh/pipeline/api_startup/middleware_integration.py,sha256=J7Ux_nJ1VsMqVzl5
|
|
|
38
38
|
_mcp_mesh/pipeline/api_startup/route_collection.py,sha256=WPr4hRPLIWnNIJCoRHZ141ph9tAa_-Pm_j2TiCuWS4k,2002
|
|
39
39
|
_mcp_mesh/pipeline/api_startup/route_integration.py,sha256=qq1AVaWna-CWEXyehyDL3EyeYKgo5aMtei8uBNdvkZ8,12448
|
|
40
40
|
_mcp_mesh/pipeline/mcp_heartbeat/__init__.py,sha256=mhDcSquoHkhRItqgbM8iFfAKC2m7qMW_0smqtUgSl-w,389
|
|
41
|
-
_mcp_mesh/pipeline/mcp_heartbeat/rust_heartbeat.py,sha256=
|
|
41
|
+
_mcp_mesh/pipeline/mcp_heartbeat/rust_heartbeat.py,sha256=G4BTGrEaeX3c_PHKqgV-A76dBbAnEbMeoAcHkNXIuXc,27010
|
|
42
42
|
_mcp_mesh/pipeline/mcp_startup/__init__.py,sha256=qy960dnAoHLXMcL_y_rcro9Km2AoCVzC7_CxMwao564,1166
|
|
43
43
|
_mcp_mesh/pipeline/mcp_startup/configuration.py,sha256=OnumIPRVBTne2OEU2VWLZovLKvWcNF9iJVQtlVwuim0,2805
|
|
44
44
|
_mcp_mesh/pipeline/mcp_startup/decorator_collection.py,sha256=RHC6MHtfP9aP0hZ-IJjISZu72e0Pml3LU0qr7dc284w,2294
|
|
@@ -76,10 +76,10 @@ _mcp_mesh/tracing/trace_context_helper.py,sha256=A0UipvDExePaX-E-4SAp4M8n8uwed9P
|
|
|
76
76
|
_mcp_mesh/tracing/utils.py,sha256=GWwfvab0tYGr9QAe_zgZjZxgDKTTs0p5Mf8w6WJeWC0,4486
|
|
77
77
|
_mcp_mesh/utils/fastmcp_schema_extractor.py,sha256=fttO1EABbf4GWKjE9V5DimwbhzGY9DbfGWQ2ak4SRnE,17264
|
|
78
78
|
mesh/__init__.py,sha256=avMnUHkNAK7VgON2OhXkrFB290gr1HErghmTZpOXr-U,4207
|
|
79
|
-
mesh/decorators.py,sha256=
|
|
80
|
-
mesh/helpers.py,sha256=
|
|
79
|
+
mesh/decorators.py,sha256=Xru9NoOolmdm-awGuuQkUgBb-s5bq9UF4p5QdVidAvI,71374
|
|
80
|
+
mesh/helpers.py,sha256=UrYclIZzpOgoMQO-qWjeSshCdHCLokpByzuIUt5L7KM,15551
|
|
81
81
|
mesh/types.py,sha256=vr0CKyPbP6lHgxj9kh_GMSLo3xkJ66PFPV_opfRb1H4,17772
|
|
82
|
-
mcp_mesh-0.8.
|
|
83
|
-
mcp_mesh-0.8.
|
|
84
|
-
mcp_mesh-0.8.
|
|
85
|
-
mcp_mesh-0.8.
|
|
82
|
+
mcp_mesh-0.8.1.dist-info/METADATA,sha256=xcCC3wocWG7g8uNDNDNzm2_NIrqIA_jmXBtbv4xQmKY,5138
|
|
83
|
+
mcp_mesh-0.8.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
84
|
+
mcp_mesh-0.8.1.dist-info/licenses/LICENSE,sha256=_EBQHRQThv9FPOLc5eFOUdeeRO0mYwChC7cx60dM1tM,1078
|
|
85
|
+
mcp_mesh-0.8.1.dist-info/RECORD,,
|
mesh/decorators.py
CHANGED
|
@@ -26,6 +26,25 @@ _runtime_processor: Any | None = None
|
|
|
26
26
|
_SHARED_AGENT_ID: str | None = None
|
|
27
27
|
|
|
28
28
|
|
|
29
|
+
def _find_available_port() -> int:
|
|
30
|
+
"""
|
|
31
|
+
Find an available port by binding to port 0 and getting the OS-assigned port.
|
|
32
|
+
|
|
33
|
+
This is used when http_port=0 is specified to auto-assign a port.
|
|
34
|
+
Works reliably on all platforms (macOS, Linux, Windows) without external tools.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
int: An available port number
|
|
38
|
+
"""
|
|
39
|
+
import socket
|
|
40
|
+
|
|
41
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
42
|
+
s.bind(("127.0.0.1", 0))
|
|
43
|
+
s.listen(1)
|
|
44
|
+
port = s.getsockname()[1]
|
|
45
|
+
return port
|
|
46
|
+
|
|
47
|
+
|
|
29
48
|
def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
30
49
|
"""
|
|
31
50
|
Start basic uvicorn server immediately to prevent Python interpreter shutdown.
|
|
@@ -76,8 +95,75 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
76
95
|
app = FastAPI(title="MCP Mesh Agent (Starting)")
|
|
77
96
|
logger.debug("📦 IMMEDIATE UVICORN: Created minimal FastAPI app")
|
|
78
97
|
|
|
79
|
-
# Add trace
|
|
80
|
-
# This must be done
|
|
98
|
+
# Add middleware to strip trace arguments from tool calls BEFORE app starts
|
|
99
|
+
# This must be done unconditionally because meshctl --trace sends trace args
|
|
100
|
+
# regardless of agent's tracing configuration
|
|
101
|
+
try:
|
|
102
|
+
import json as json_module
|
|
103
|
+
|
|
104
|
+
class TraceArgumentStripperMiddleware:
|
|
105
|
+
"""Pure ASGI middleware to strip trace arguments from tool calls.
|
|
106
|
+
|
|
107
|
+
This middleware ALWAYS runs to strip _trace_id and _parent_span from
|
|
108
|
+
MCP tool arguments, preventing Pydantic validation errors when
|
|
109
|
+
meshctl --trace is used with agents that don't have tracing enabled.
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
def __init__(self, app):
|
|
113
|
+
self.app = app
|
|
114
|
+
|
|
115
|
+
async def __call__(self, scope, receive, send):
|
|
116
|
+
if scope["type"] != "http":
|
|
117
|
+
await self.app(scope, receive, send)
|
|
118
|
+
return
|
|
119
|
+
|
|
120
|
+
async def receive_with_trace_stripping():
|
|
121
|
+
message = await receive()
|
|
122
|
+
if message["type"] == "http.request":
|
|
123
|
+
body = message.get("body", b"")
|
|
124
|
+
if body:
|
|
125
|
+
try:
|
|
126
|
+
payload = json_module.loads(body.decode("utf-8"))
|
|
127
|
+
if payload.get("method") == "tools/call":
|
|
128
|
+
arguments = payload.get("params", {}).get(
|
|
129
|
+
"arguments", {}
|
|
130
|
+
)
|
|
131
|
+
# Strip trace context fields from arguments
|
|
132
|
+
if (
|
|
133
|
+
"_trace_id" in arguments
|
|
134
|
+
or "_parent_span" in arguments
|
|
135
|
+
):
|
|
136
|
+
arguments.pop("_trace_id", None)
|
|
137
|
+
arguments.pop("_parent_span", None)
|
|
138
|
+
modified_body = json_module.dumps(
|
|
139
|
+
payload
|
|
140
|
+
).encode("utf-8")
|
|
141
|
+
logger.debug(
|
|
142
|
+
"[TRACE] Stripped trace fields from arguments"
|
|
143
|
+
)
|
|
144
|
+
return {
|
|
145
|
+
**message,
|
|
146
|
+
"body": modified_body,
|
|
147
|
+
}
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.debug(
|
|
150
|
+
f"[TRACE] Failed to process body for stripping: {e}"
|
|
151
|
+
)
|
|
152
|
+
return message
|
|
153
|
+
|
|
154
|
+
await self.app(scope, receive_with_trace_stripping, send)
|
|
155
|
+
|
|
156
|
+
app.add_middleware(TraceArgumentStripperMiddleware)
|
|
157
|
+
logger.debug(
|
|
158
|
+
"📦 IMMEDIATE UVICORN: Added trace argument stripper middleware"
|
|
159
|
+
)
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.warning(
|
|
162
|
+
f"⚠️ IMMEDIATE UVICORN: Failed to add trace argument stripper middleware: {e}"
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Add trace context middleware for distributed tracing (optional)
|
|
166
|
+
# This handles trace propagation and header injection when tracing is enabled
|
|
81
167
|
try:
|
|
82
168
|
import os
|
|
83
169
|
|
|
@@ -152,11 +238,11 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
152
238
|
except Exception as e:
|
|
153
239
|
logger.warning(f"Failed to set trace context: {e}")
|
|
154
240
|
|
|
155
|
-
# Create
|
|
156
|
-
#
|
|
241
|
+
# Create receive wrapper to extract trace context from arguments
|
|
242
|
+
# Note: Argument stripping is handled by TraceArgumentStripperMiddleware
|
|
157
243
|
import json as json_module
|
|
158
244
|
|
|
159
|
-
async def
|
|
245
|
+
async def receive_with_trace_extraction():
|
|
160
246
|
message = await receive()
|
|
161
247
|
if message["type"] == "http.request":
|
|
162
248
|
body = message.get("body", b"")
|
|
@@ -207,29 +293,14 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
207
293
|
parent_span = (
|
|
208
294
|
current_trace.parent_span
|
|
209
295
|
)
|
|
296
|
+
logger.debug(
|
|
297
|
+
f"[TRACE] Extracted trace context from arguments: trace_id={arg_trace_id}"
|
|
298
|
+
)
|
|
210
299
|
except Exception:
|
|
211
300
|
pass
|
|
212
|
-
|
|
213
|
-
# Strip trace context fields from arguments
|
|
214
|
-
if (
|
|
215
|
-
"_trace_id" in arguments
|
|
216
|
-
or "_parent_span" in arguments
|
|
217
|
-
):
|
|
218
|
-
arguments.pop("_trace_id", None)
|
|
219
|
-
arguments.pop("_parent_span", None)
|
|
220
|
-
modified_body = json_module.dumps(
|
|
221
|
-
payload
|
|
222
|
-
).encode("utf-8")
|
|
223
|
-
logger.debug(
|
|
224
|
-
"[TRACE] Stripped trace fields from arguments"
|
|
225
|
-
)
|
|
226
|
-
return {
|
|
227
|
-
**message,
|
|
228
|
-
"body": modified_body,
|
|
229
|
-
}
|
|
230
301
|
except Exception as e:
|
|
231
302
|
logger.debug(
|
|
232
|
-
f"[TRACE] Failed to process body: {e}"
|
|
303
|
+
f"[TRACE] Failed to process body for extraction: {e}"
|
|
233
304
|
)
|
|
234
305
|
return message
|
|
235
306
|
|
|
@@ -249,7 +320,7 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
249
320
|
await send(message)
|
|
250
321
|
|
|
251
322
|
await self.app(
|
|
252
|
-
scope,
|
|
323
|
+
scope, receive_with_trace_extraction, send_with_trace_headers
|
|
253
324
|
)
|
|
254
325
|
|
|
255
326
|
app.add_middleware(TraceContextMiddleware)
|
|
@@ -306,6 +377,13 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
306
377
|
# which is handled upstream in the @mesh.agent decorator
|
|
307
378
|
port = http_port
|
|
308
379
|
|
|
380
|
+
# Handle http_port=0: find an available port BEFORE starting uvicorn
|
|
381
|
+
# This is more reliable than detecting the port after uvicorn starts
|
|
382
|
+
# and works on all platforms (Linux containers don't have lsof installed)
|
|
383
|
+
if port == 0:
|
|
384
|
+
port = _find_available_port()
|
|
385
|
+
logger.info(f"🎯 IMMEDIATE UVICORN: Auto-assigned port {port} for agent")
|
|
386
|
+
|
|
309
387
|
logger.debug(
|
|
310
388
|
f"🚀 IMMEDIATE UVICORN: Starting uvicorn server on {http_host}:{port}"
|
|
311
389
|
)
|
|
@@ -370,53 +448,8 @@ def _start_uvicorn_immediately(http_host: str, http_port: int):
|
|
|
370
448
|
# Give server a moment to start
|
|
371
449
|
time.sleep(1)
|
|
372
450
|
|
|
373
|
-
# Detect actual port if port=0 (auto-assign)
|
|
374
|
-
actual_port = port
|
|
375
|
-
if port == 0:
|
|
376
|
-
import socket
|
|
377
|
-
|
|
378
|
-
# Try to detect actual port by scanning for listening sockets
|
|
379
|
-
try:
|
|
380
|
-
import subprocess
|
|
381
|
-
|
|
382
|
-
# Use lsof to find the port bound by this process
|
|
383
|
-
result = subprocess.run(
|
|
384
|
-
["lsof", "-i", "-P", "-n", f"-p{os.getpid()}"],
|
|
385
|
-
capture_output=True,
|
|
386
|
-
text=True,
|
|
387
|
-
timeout=5,
|
|
388
|
-
)
|
|
389
|
-
for line in result.stdout.split("\n"):
|
|
390
|
-
if "LISTEN" in line and "python" in line.lower():
|
|
391
|
-
# Parse port from line like "python 1234 user 5u IPv4 ... TCP *:54321 (LISTEN)"
|
|
392
|
-
parts = line.split()
|
|
393
|
-
for part in parts:
|
|
394
|
-
if ":" in part and "(LISTEN)" not in part:
|
|
395
|
-
try:
|
|
396
|
-
port_str = part.split(":")[-1]
|
|
397
|
-
detected_port = int(port_str)
|
|
398
|
-
if detected_port > 0:
|
|
399
|
-
actual_port = detected_port
|
|
400
|
-
logger.info(
|
|
401
|
-
f"🎯 IMMEDIATE UVICORN: Detected auto-assigned port {actual_port}"
|
|
402
|
-
)
|
|
403
|
-
# Update server_info with actual port
|
|
404
|
-
server_info["port"] = actual_port
|
|
405
|
-
server_info["requested_port"] = (
|
|
406
|
-
0 # Remember original request
|
|
407
|
-
)
|
|
408
|
-
break
|
|
409
|
-
except (ValueError, IndexError):
|
|
410
|
-
pass
|
|
411
|
-
if actual_port > 0:
|
|
412
|
-
break
|
|
413
|
-
except Exception as e:
|
|
414
|
-
logger.warning(
|
|
415
|
-
f"⚠️ IMMEDIATE UVICORN: Could not detect auto-assigned port: {e}"
|
|
416
|
-
)
|
|
417
|
-
|
|
418
451
|
logger.debug(
|
|
419
|
-
f"✅ IMMEDIATE UVICORN: Uvicorn server running on {http_host}:{
|
|
452
|
+
f"✅ IMMEDIATE UVICORN: Uvicorn server running on {http_host}:{port} (daemon thread)"
|
|
420
453
|
)
|
|
421
454
|
|
|
422
455
|
# Set up registry context for shutdown cleanup (use defaults initially)
|
|
@@ -585,12 +618,25 @@ def tool(
|
|
|
585
618
|
raise ValueError("dependency capability must be a string")
|
|
586
619
|
|
|
587
620
|
# Validate optional dependency fields
|
|
621
|
+
# Tags can be strings or arrays of strings (OR alternatives)
|
|
622
|
+
# e.g., ["required", ["python", "typescript"]] = required AND (python OR typescript)
|
|
588
623
|
dep_tags = dep.get("tags", [])
|
|
589
624
|
if not isinstance(dep_tags, list):
|
|
590
625
|
raise ValueError("dependency tags must be a list")
|
|
591
626
|
for tag in dep_tags:
|
|
592
|
-
if
|
|
593
|
-
|
|
627
|
+
if isinstance(tag, str):
|
|
628
|
+
continue # Simple tag - OK
|
|
629
|
+
elif isinstance(tag, list):
|
|
630
|
+
# OR alternative - validate inner tags are all strings
|
|
631
|
+
for inner_tag in tag:
|
|
632
|
+
if not isinstance(inner_tag, str):
|
|
633
|
+
raise ValueError(
|
|
634
|
+
"OR alternative tags must be strings"
|
|
635
|
+
)
|
|
636
|
+
else:
|
|
637
|
+
raise ValueError(
|
|
638
|
+
"tags must be strings or arrays of strings (OR alternatives)"
|
|
639
|
+
)
|
|
594
640
|
|
|
595
641
|
dep_version = dep.get("version")
|
|
596
642
|
if dep_version is not None and not isinstance(dep_version, str):
|
|
@@ -708,7 +754,7 @@ def agent(
|
|
|
708
754
|
http_port: int = 0,
|
|
709
755
|
enable_http: bool = True,
|
|
710
756
|
namespace: str = "default",
|
|
711
|
-
|
|
757
|
+
heartbeat_interval: int = 5,
|
|
712
758
|
health_check: Callable[[], Awaitable[Any]] | None = None,
|
|
713
759
|
health_check_ttl: int = 15,
|
|
714
760
|
auto_run: bool = True, # Changed to True by default!
|
|
@@ -733,7 +779,7 @@ def agent(
|
|
|
733
779
|
Environment variable: MCP_MESH_HTTP_ENABLED (takes precedence)
|
|
734
780
|
namespace: Agent namespace (default: "default")
|
|
735
781
|
Environment variable: MCP_MESH_NAMESPACE (takes precedence)
|
|
736
|
-
|
|
782
|
+
heartbeat_interval: Heartbeat interval in seconds (default: 5)
|
|
737
783
|
Environment variable: MCP_MESH_HEALTH_INTERVAL (takes precedence)
|
|
738
784
|
health_check: Optional async function that returns HealthStatus
|
|
739
785
|
Called before heartbeat and on /health endpoint with TTL caching
|
|
@@ -750,7 +796,7 @@ def agent(
|
|
|
750
796
|
MCP_MESH_HTTP_PORT: Override http_port parameter (integer, 0-65535)
|
|
751
797
|
MCP_MESH_HTTP_ENABLED: Override enable_http parameter (boolean: true/false)
|
|
752
798
|
MCP_MESH_NAMESPACE: Override namespace parameter (string)
|
|
753
|
-
MCP_MESH_HEALTH_INTERVAL: Override
|
|
799
|
+
MCP_MESH_HEALTH_INTERVAL: Override heartbeat_interval parameter (integer, ≥1)
|
|
754
800
|
MCP_MESH_AUTO_RUN: Override auto_run parameter (boolean: true/false)
|
|
755
801
|
MCP_MESH_AUTO_RUN_INTERVAL: Override auto_run_interval parameter (integer, ≥1)
|
|
756
802
|
|
|
@@ -801,10 +847,10 @@ def agent(
|
|
|
801
847
|
if not isinstance(namespace, str):
|
|
802
848
|
raise ValueError("namespace must be a string")
|
|
803
849
|
|
|
804
|
-
if not isinstance(
|
|
805
|
-
raise ValueError("
|
|
806
|
-
if
|
|
807
|
-
raise ValueError("
|
|
850
|
+
if not isinstance(heartbeat_interval, int):
|
|
851
|
+
raise ValueError("heartbeat_interval must be an integer")
|
|
852
|
+
if heartbeat_interval < 1:
|
|
853
|
+
raise ValueError("heartbeat_interval must be at least 1 second")
|
|
808
854
|
|
|
809
855
|
if not isinstance(auto_run, bool):
|
|
810
856
|
raise ValueError("auto_run must be a boolean")
|
|
@@ -859,9 +905,9 @@ def agent(
|
|
|
859
905
|
# Import centralized defaults
|
|
860
906
|
from _mcp_mesh.shared.defaults import MeshDefaults
|
|
861
907
|
|
|
862
|
-
|
|
908
|
+
final_heartbeat_interval = get_config_value(
|
|
863
909
|
"MCP_MESH_HEALTH_INTERVAL",
|
|
864
|
-
override=
|
|
910
|
+
override=heartbeat_interval,
|
|
865
911
|
default=MeshDefaults.HEALTH_INTERVAL,
|
|
866
912
|
rule=ValidationRule.NONZERO_RULE,
|
|
867
913
|
)
|
|
@@ -892,7 +938,7 @@ def agent(
|
|
|
892
938
|
"http_port": final_http_port,
|
|
893
939
|
"enable_http": final_enable_http,
|
|
894
940
|
"namespace": final_namespace,
|
|
895
|
-
"
|
|
941
|
+
"heartbeat_interval": final_heartbeat_interval,
|
|
896
942
|
"health_check": health_check,
|
|
897
943
|
"health_check_ttl": health_check_ttl,
|
|
898
944
|
"auto_run": final_auto_run,
|
|
@@ -1063,12 +1109,25 @@ def route(
|
|
|
1063
1109
|
raise ValueError("dependency capability must be a string")
|
|
1064
1110
|
|
|
1065
1111
|
# Validate optional dependency fields
|
|
1112
|
+
# Tags can be strings or arrays of strings (OR alternatives)
|
|
1113
|
+
# e.g., ["required", ["python", "typescript"]] = required AND (python OR typescript)
|
|
1066
1114
|
dep_tags = dep.get("tags", [])
|
|
1067
1115
|
if not isinstance(dep_tags, list):
|
|
1068
1116
|
raise ValueError("dependency tags must be a list")
|
|
1069
1117
|
for tag in dep_tags:
|
|
1070
|
-
if
|
|
1071
|
-
|
|
1118
|
+
if isinstance(tag, str):
|
|
1119
|
+
continue # Simple tag - OK
|
|
1120
|
+
elif isinstance(tag, list):
|
|
1121
|
+
# OR alternative - validate inner tags are all strings
|
|
1122
|
+
for inner_tag in tag:
|
|
1123
|
+
if not isinstance(inner_tag, str):
|
|
1124
|
+
raise ValueError(
|
|
1125
|
+
"OR alternative tags must be strings"
|
|
1126
|
+
)
|
|
1127
|
+
else:
|
|
1128
|
+
raise ValueError(
|
|
1129
|
+
"tags must be strings or arrays of strings (OR alternatives)"
|
|
1130
|
+
)
|
|
1072
1131
|
|
|
1073
1132
|
dep_version = dep.get("version")
|
|
1074
1133
|
if dep_version is not None and not isinstance(dep_version, str):
|
|
@@ -1354,6 +1413,34 @@ def llm(
|
|
|
1354
1413
|
rule=ValidationRule.STRING_RULE,
|
|
1355
1414
|
)
|
|
1356
1415
|
|
|
1416
|
+
# Resolve model with env var override
|
|
1417
|
+
resolved_model = get_config_value(
|
|
1418
|
+
"MESH_LLM_MODEL",
|
|
1419
|
+
override=model,
|
|
1420
|
+
default=None,
|
|
1421
|
+
rule=ValidationRule.STRING_RULE,
|
|
1422
|
+
)
|
|
1423
|
+
|
|
1424
|
+
# Warn about missing configuration parameters
|
|
1425
|
+
if not system_prompt and not system_prompt_file:
|
|
1426
|
+
logger.warning(
|
|
1427
|
+
f"⚠️ @mesh.llm: No 'system_prompt' specified for function '{func.__name__}'. "
|
|
1428
|
+
f"Using default: 'You are a helpful assistant.' "
|
|
1429
|
+
f"Consider adding a custom system_prompt for better results."
|
|
1430
|
+
)
|
|
1431
|
+
|
|
1432
|
+
if isinstance(provider, str) and provider == "claude" and not resolved_model:
|
|
1433
|
+
logger.warning(
|
|
1434
|
+
f"⚠️ @mesh.llm: No 'model' specified for function '{func.__name__}'. "
|
|
1435
|
+
f"The LLM provider will use its default model. "
|
|
1436
|
+
f"Consider specifying a model explicitly (e.g., model='anthropic/claude-sonnet-4-5')."
|
|
1437
|
+
)
|
|
1438
|
+
|
|
1439
|
+
# Use default system prompt if not provided
|
|
1440
|
+
effective_system_prompt = (
|
|
1441
|
+
system_prompt if system_prompt else "You are a helpful assistant."
|
|
1442
|
+
)
|
|
1443
|
+
|
|
1357
1444
|
resolved_config = {
|
|
1358
1445
|
"filter": filter,
|
|
1359
1446
|
"filter_mode": get_config_value(
|
|
@@ -1363,12 +1450,7 @@ def llm(
|
|
|
1363
1450
|
rule=ValidationRule.STRING_RULE,
|
|
1364
1451
|
),
|
|
1365
1452
|
"provider": resolved_provider,
|
|
1366
|
-
"model":
|
|
1367
|
-
"MESH_LLM_MODEL",
|
|
1368
|
-
override=model,
|
|
1369
|
-
default=None,
|
|
1370
|
-
rule=ValidationRule.STRING_RULE,
|
|
1371
|
-
),
|
|
1453
|
+
"model": resolved_model,
|
|
1372
1454
|
"api_key": api_key, # Will be resolved from provider-specific env vars later
|
|
1373
1455
|
"max_iterations": get_config_value(
|
|
1374
1456
|
"MESH_LLM_MAX_ITERATIONS",
|
|
@@ -1376,7 +1458,7 @@ def llm(
|
|
|
1376
1458
|
default=10,
|
|
1377
1459
|
rule=ValidationRule.NONZERO_RULE,
|
|
1378
1460
|
),
|
|
1379
|
-
"system_prompt":
|
|
1461
|
+
"system_prompt": effective_system_prompt,
|
|
1380
1462
|
"system_prompt_file": system_prompt_file,
|
|
1381
1463
|
# Phase 1: Template metadata
|
|
1382
1464
|
"is_template": is_template,
|
mesh/helpers.py
CHANGED
|
@@ -214,6 +214,58 @@ def llm_provider(
|
|
|
214
214
|
f"(requested by consumer)"
|
|
215
215
|
)
|
|
216
216
|
|
|
217
|
+
# Issue #459: Handle output_schema for vendor-specific structured output
|
|
218
|
+
# Convert to response_format for vendors that support it
|
|
219
|
+
output_schema = model_params_copy.pop("output_schema", None)
|
|
220
|
+
output_type_name = model_params_copy.pop("output_type_name", None)
|
|
221
|
+
|
|
222
|
+
# Vendors that support structured output via response_format
|
|
223
|
+
supported_structured_output_vendors = (
|
|
224
|
+
"openai",
|
|
225
|
+
"azure", # Azure OpenAI uses same format as OpenAI
|
|
226
|
+
"gemini",
|
|
227
|
+
"vertex_ai", # Vertex AI Gemini uses same format as Gemini
|
|
228
|
+
"anthropic",
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
if output_schema:
|
|
232
|
+
if vendor in supported_structured_output_vendors:
|
|
233
|
+
# Apply vendor-specific response_format for structured output
|
|
234
|
+
from _mcp_mesh.engine.provider_handlers import make_schema_strict
|
|
235
|
+
|
|
236
|
+
if vendor == "anthropic":
|
|
237
|
+
# Claude: doesn't require all properties in 'required', uses strict=False
|
|
238
|
+
schema = make_schema_strict(
|
|
239
|
+
output_schema, add_all_required=False
|
|
240
|
+
)
|
|
241
|
+
strict_mode = False
|
|
242
|
+
else:
|
|
243
|
+
# OpenAI/Azure/Gemini/Vertex: require all properties in 'required', uses strict=True
|
|
244
|
+
schema = make_schema_strict(
|
|
245
|
+
output_schema, add_all_required=True
|
|
246
|
+
)
|
|
247
|
+
strict_mode = True
|
|
248
|
+
|
|
249
|
+
model_params_copy["response_format"] = {
|
|
250
|
+
"type": "json_schema",
|
|
251
|
+
"json_schema": {
|
|
252
|
+
"name": output_type_name or "Response",
|
|
253
|
+
"schema": schema,
|
|
254
|
+
"strict": strict_mode,
|
|
255
|
+
},
|
|
256
|
+
}
|
|
257
|
+
logger.debug(
|
|
258
|
+
f"🎯 Applied {vendor} response_format for structured output: "
|
|
259
|
+
f"{output_type_name} (strict={strict_mode})"
|
|
260
|
+
)
|
|
261
|
+
else:
|
|
262
|
+
# Vendor doesn't support structured output - warn user
|
|
263
|
+
logger.warning(
|
|
264
|
+
f"⚠️ Structured output schema '{output_type_name or 'Response'}' "
|
|
265
|
+
f"was provided but vendor '{vendor}' does not support response_format. "
|
|
266
|
+
f"The schema will be ignored and the LLM may return unstructured output."
|
|
267
|
+
)
|
|
268
|
+
|
|
217
269
|
# Build litellm.completion arguments
|
|
218
270
|
completion_args: dict[str, Any] = {
|
|
219
271
|
"model": effective_model,
|
|
File without changes
|
|
File without changes
|