mcp-mesh 0.7.21__py3-none-any.whl → 0.8.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. _mcp_mesh/__init__.py +1 -1
  2. _mcp_mesh/engine/dependency_injector.py +4 -6
  3. _mcp_mesh/engine/http_wrapper.py +69 -10
  4. _mcp_mesh/engine/mesh_llm_agent.py +4 -7
  5. _mcp_mesh/engine/mesh_llm_agent_injector.py +2 -1
  6. _mcp_mesh/engine/provider_handlers/__init__.py +14 -1
  7. _mcp_mesh/engine/provider_handlers/base_provider_handler.py +114 -8
  8. _mcp_mesh/engine/provider_handlers/claude_handler.py +15 -57
  9. _mcp_mesh/engine/provider_handlers/gemini_handler.py +181 -0
  10. _mcp_mesh/engine/provider_handlers/openai_handler.py +8 -63
  11. _mcp_mesh/engine/provider_handlers/provider_handler_registry.py +16 -10
  12. _mcp_mesh/engine/response_parser.py +61 -15
  13. _mcp_mesh/engine/unified_mcp_proxy.py +18 -34
  14. _mcp_mesh/pipeline/__init__.py +9 -20
  15. _mcp_mesh/pipeline/api_heartbeat/__init__.py +12 -7
  16. _mcp_mesh/pipeline/api_heartbeat/api_lifespan_integration.py +23 -49
  17. _mcp_mesh/pipeline/api_heartbeat/rust_api_heartbeat.py +425 -0
  18. _mcp_mesh/pipeline/api_startup/api_pipeline.py +7 -9
  19. _mcp_mesh/pipeline/api_startup/api_server_setup.py +91 -70
  20. _mcp_mesh/pipeline/api_startup/fastapi_discovery.py +22 -23
  21. _mcp_mesh/pipeline/api_startup/middleware_integration.py +32 -24
  22. _mcp_mesh/pipeline/api_startup/route_collection.py +2 -4
  23. _mcp_mesh/pipeline/mcp_heartbeat/__init__.py +5 -17
  24. _mcp_mesh/pipeline/mcp_heartbeat/rust_heartbeat.py +695 -0
  25. _mcp_mesh/pipeline/mcp_startup/__init__.py +2 -5
  26. _mcp_mesh/pipeline/mcp_startup/configuration.py +1 -1
  27. _mcp_mesh/pipeline/mcp_startup/fastapiserver_setup.py +5 -6
  28. _mcp_mesh/pipeline/mcp_startup/heartbeat_loop.py +6 -7
  29. _mcp_mesh/pipeline/mcp_startup/startup_orchestrator.py +21 -9
  30. _mcp_mesh/pipeline/mcp_startup/startup_pipeline.py +3 -8
  31. _mcp_mesh/pipeline/shared/mesh_pipeline.py +0 -2
  32. _mcp_mesh/reload.py +1 -3
  33. _mcp_mesh/shared/__init__.py +2 -8
  34. _mcp_mesh/shared/config_resolver.py +124 -80
  35. _mcp_mesh/shared/defaults.py +89 -14
  36. _mcp_mesh/shared/fastapi_middleware_manager.py +149 -91
  37. _mcp_mesh/shared/host_resolver.py +8 -46
  38. _mcp_mesh/shared/server_discovery.py +115 -86
  39. _mcp_mesh/shared/simple_shutdown.py +44 -86
  40. _mcp_mesh/tracing/execution_tracer.py +2 -6
  41. _mcp_mesh/tracing/redis_metadata_publisher.py +24 -79
  42. _mcp_mesh/tracing/trace_context_helper.py +3 -13
  43. _mcp_mesh/tracing/utils.py +29 -15
  44. _mcp_mesh/utils/fastmcp_schema_extractor.py +2 -1
  45. {mcp_mesh-0.7.21.dist-info → mcp_mesh-0.8.0b1.dist-info}/METADATA +2 -1
  46. mcp_mesh-0.8.0b1.dist-info/RECORD +85 -0
  47. mesh/__init__.py +2 -1
  48. mesh/decorators.py +89 -5
  49. _mcp_mesh/generated/.openapi-generator/FILES +0 -50
  50. _mcp_mesh/generated/.openapi-generator/VERSION +0 -1
  51. _mcp_mesh/generated/.openapi-generator-ignore +0 -15
  52. _mcp_mesh/generated/mcp_mesh_registry_client/__init__.py +0 -90
  53. _mcp_mesh/generated/mcp_mesh_registry_client/api/__init__.py +0 -6
  54. _mcp_mesh/generated/mcp_mesh_registry_client/api/agents_api.py +0 -1088
  55. _mcp_mesh/generated/mcp_mesh_registry_client/api/health_api.py +0 -764
  56. _mcp_mesh/generated/mcp_mesh_registry_client/api/tracing_api.py +0 -303
  57. _mcp_mesh/generated/mcp_mesh_registry_client/api_client.py +0 -798
  58. _mcp_mesh/generated/mcp_mesh_registry_client/api_response.py +0 -21
  59. _mcp_mesh/generated/mcp_mesh_registry_client/configuration.py +0 -577
  60. _mcp_mesh/generated/mcp_mesh_registry_client/exceptions.py +0 -217
  61. _mcp_mesh/generated/mcp_mesh_registry_client/models/__init__.py +0 -55
  62. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_info.py +0 -158
  63. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_metadata.py +0 -126
  64. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_metadata_dependencies_inner.py +0 -139
  65. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_metadata_dependencies_inner_one_of.py +0 -92
  66. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_registration.py +0 -103
  67. _mcp_mesh/generated/mcp_mesh_registry_client/models/agent_registration_metadata.py +0 -136
  68. _mcp_mesh/generated/mcp_mesh_registry_client/models/agents_list_response.py +0 -100
  69. _mcp_mesh/generated/mcp_mesh_registry_client/models/capability_info.py +0 -107
  70. _mcp_mesh/generated/mcp_mesh_registry_client/models/decorator_agent_metadata.py +0 -112
  71. _mcp_mesh/generated/mcp_mesh_registry_client/models/decorator_agent_request.py +0 -103
  72. _mcp_mesh/generated/mcp_mesh_registry_client/models/decorator_info.py +0 -105
  73. _mcp_mesh/generated/mcp_mesh_registry_client/models/dependency_info.py +0 -103
  74. _mcp_mesh/generated/mcp_mesh_registry_client/models/dependency_resolution_info.py +0 -106
  75. _mcp_mesh/generated/mcp_mesh_registry_client/models/error_response.py +0 -91
  76. _mcp_mesh/generated/mcp_mesh_registry_client/models/health_response.py +0 -103
  77. _mcp_mesh/generated/mcp_mesh_registry_client/models/heartbeat_request.py +0 -101
  78. _mcp_mesh/generated/mcp_mesh_registry_client/models/heartbeat_request_metadata.py +0 -111
  79. _mcp_mesh/generated/mcp_mesh_registry_client/models/heartbeat_response.py +0 -117
  80. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_provider.py +0 -93
  81. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_provider_resolution_info.py +0 -106
  82. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_tool_filter.py +0 -109
  83. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_tool_filter_filter_inner.py +0 -139
  84. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_tool_filter_filter_inner_one_of.py +0 -91
  85. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_tool_info.py +0 -101
  86. _mcp_mesh/generated/mcp_mesh_registry_client/models/llm_tool_resolution_info.py +0 -120
  87. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_agent_register_metadata.py +0 -112
  88. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_agent_registration.py +0 -129
  89. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_registration_response.py +0 -153
  90. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_registration_response_dependencies_resolved_value_inner.py +0 -101
  91. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_tool_dependency_registration.py +0 -93
  92. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_tool_register_metadata.py +0 -107
  93. _mcp_mesh/generated/mcp_mesh_registry_client/models/mesh_tool_registration.py +0 -117
  94. _mcp_mesh/generated/mcp_mesh_registry_client/models/registration_response.py +0 -119
  95. _mcp_mesh/generated/mcp_mesh_registry_client/models/resolved_llm_provider.py +0 -110
  96. _mcp_mesh/generated/mcp_mesh_registry_client/models/rich_dependency.py +0 -93
  97. _mcp_mesh/generated/mcp_mesh_registry_client/models/root_response.py +0 -92
  98. _mcp_mesh/generated/mcp_mesh_registry_client/models/standardized_dependency.py +0 -93
  99. _mcp_mesh/generated/mcp_mesh_registry_client/models/trace_event.py +0 -106
  100. _mcp_mesh/generated/mcp_mesh_registry_client/py.typed +0 -0
  101. _mcp_mesh/generated/mcp_mesh_registry_client/rest.py +0 -259
  102. _mcp_mesh/pipeline/api_heartbeat/api_dependency_resolution.py +0 -418
  103. _mcp_mesh/pipeline/api_heartbeat/api_fast_heartbeat_check.py +0 -117
  104. _mcp_mesh/pipeline/api_heartbeat/api_health_check.py +0 -140
  105. _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_orchestrator.py +0 -243
  106. _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_pipeline.py +0 -311
  107. _mcp_mesh/pipeline/api_heartbeat/api_heartbeat_send.py +0 -386
  108. _mcp_mesh/pipeline/api_heartbeat/api_registry_connection.py +0 -104
  109. _mcp_mesh/pipeline/mcp_heartbeat/dependency_resolution.py +0 -396
  110. _mcp_mesh/pipeline/mcp_heartbeat/fast_heartbeat_check.py +0 -116
  111. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_orchestrator.py +0 -311
  112. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_pipeline.py +0 -282
  113. _mcp_mesh/pipeline/mcp_heartbeat/heartbeat_send.py +0 -98
  114. _mcp_mesh/pipeline/mcp_heartbeat/lifespan_integration.py +0 -84
  115. _mcp_mesh/pipeline/mcp_heartbeat/llm_tools_resolution.py +0 -264
  116. _mcp_mesh/pipeline/mcp_heartbeat/registry_connection.py +0 -79
  117. _mcp_mesh/pipeline/shared/registry_connection.py +0 -80
  118. _mcp_mesh/shared/registry_client_wrapper.py +0 -515
  119. mcp_mesh-0.7.21.dist-info/RECORD +0 -152
  120. {mcp_mesh-0.7.21.dist-info → mcp_mesh-0.8.0b1.dist-info}/WHEEL +0 -0
  121. {mcp_mesh-0.7.21.dist-info → mcp_mesh-0.8.0b1.dist-info}/licenses/LICENSE +0 -0
@@ -1,84 +0,0 @@
1
- """
2
- FastAPI lifespan integration for heartbeat pipeline.
3
-
4
- Handles the execution of heartbeat pipeline as a background task
5
- during FastAPI application lifespan.
6
- """
7
-
8
- import asyncio
9
- import logging
10
- from typing import Any
11
-
12
- logger = logging.getLogger(__name__)
13
-
14
-
15
- async def heartbeat_lifespan_task(heartbeat_config: dict[str, Any]) -> None:
16
- """
17
- Heartbeat task that runs in FastAPI lifespan using pipeline architecture.
18
-
19
- Args:
20
- heartbeat_config: Configuration containing registry_wrapper, agent_id,
21
- interval, and context for heartbeat execution
22
- """
23
- registry_wrapper = heartbeat_config[
24
- "registry_wrapper"
25
- ] # May be None in standalone mode
26
- agent_id = heartbeat_config["agent_id"]
27
- interval = heartbeat_config["interval"]
28
- context = heartbeat_config["context"]
29
- standalone_mode = heartbeat_config.get("standalone_mode", False)
30
-
31
- # Check if running in standalone mode
32
- if standalone_mode:
33
- logger.info(
34
- f"💓 Starting heartbeat pipeline in standalone mode for agent '{agent_id}' (no registry communication)"
35
- )
36
- return # For now, skip heartbeat in standalone mode
37
-
38
- # Create heartbeat orchestrator for pipeline execution
39
- from .heartbeat_orchestrator import HeartbeatOrchestrator
40
-
41
- heartbeat_orchestrator = HeartbeatOrchestrator()
42
-
43
- logger.info(f"💓 Starting heartbeat pipeline task for agent '{agent_id}'")
44
-
45
- try:
46
- while True:
47
- # Check if shutdown is complete before executing heartbeat
48
- try:
49
- from ...shared.simple_shutdown import should_stop_heartbeat
50
-
51
- if should_stop_heartbeat():
52
- logger.info(
53
- f"🛑 Heartbeat stopped for agent '{agent_id}' due to shutdown"
54
- )
55
- break
56
- except ImportError:
57
- # If simple_shutdown is not available, continue normally
58
- pass
59
-
60
- try:
61
- # Execute heartbeat pipeline
62
- success = await heartbeat_orchestrator.execute_heartbeat(
63
- agent_id, context
64
- )
65
-
66
- if not success:
67
- # Log failure but continue to next cycle (pipeline handles detailed logging)
68
- logger.debug(
69
- f"💔 Heartbeat pipeline failed for agent '{agent_id}' - continuing to next cycle"
70
- )
71
-
72
- except Exception as e:
73
- # Log pipeline execution error but continue to next cycle for resilience
74
- logger.error(
75
- f"❌ Heartbeat pipeline execution error for agent '{agent_id}': {e}"
76
- )
77
- # Continue to next cycle - heartbeat should be resilient
78
-
79
- # Wait for next heartbeat interval
80
- await asyncio.sleep(interval)
81
-
82
- except asyncio.CancelledError:
83
- logger.info(f"🛑 Heartbeat pipeline task cancelled for agent '{agent_id}'")
84
- raise
@@ -1,264 +0,0 @@
1
- """
2
- LLM tools resolution step for MCP Mesh pipeline.
3
-
4
- Handles processing llm_tools from registry response and updating
5
- the LLM agent injection system.
6
- """
7
-
8
- import json
9
- import logging
10
- from typing import Any
11
-
12
- from ...engine.dependency_injector import get_global_injector
13
- from ..shared import PipelineResult, PipelineStatus, PipelineStep
14
-
15
- logger = logging.getLogger(__name__)
16
-
17
- # Global state for LLM tools hash tracking across heartbeat cycles
18
- _last_llm_tools_hash = None
19
-
20
-
21
- class LLMToolsResolutionStep(PipelineStep):
22
- """
23
- Processes LLM tools from registry response.
24
-
25
- Takes the llm_tools data from the heartbeat response and updates
26
- the LLM agent injection system. This enables LLM agents to receive
27
- auto-filtered, up-to-date tool lists based on their llm_filter configuration.
28
-
29
- The registry applies filtering logic and returns matching tools with
30
- full schemas that can be used by LLM agents.
31
- """
32
-
33
- def __init__(self):
34
- super().__init__(
35
- name="llm-tools-resolution",
36
- required=False, # Optional - only needed for LLM agents
37
- description="Process LLM tools resolution from registry",
38
- )
39
-
40
- async def execute(self, context: dict[str, Any]) -> PipelineResult:
41
- """Process LLM tools resolution with hash-based change detection."""
42
- self.logger.trace("Processing LLM tools resolution...")
43
-
44
- result = PipelineResult(message="LLM tools resolution processed")
45
-
46
- try:
47
- # Get heartbeat response
48
- heartbeat_response = context.get("heartbeat_response")
49
-
50
- if heartbeat_response is None:
51
- result.status = PipelineStatus.SUCCESS
52
- result.message = "No heartbeat response - completed successfully"
53
- self.logger.trace("ℹ️ No heartbeat response to process - this is normal")
54
- return result
55
-
56
- # Use hash-based change detection and processing logic
57
- await self.process_llm_tools_from_heartbeat(heartbeat_response)
58
-
59
- # Extract LLM tools and providers count for context
60
- llm_tools = heartbeat_response.get("llm_tools", {})
61
- llm_providers = heartbeat_response.get("llm_providers", {})
62
- function_count = len(llm_tools)
63
- tool_count = sum(
64
- len(tools) if isinstance(tools, list) else 0
65
- for tools in llm_tools.values()
66
- )
67
- provider_count = len(llm_providers)
68
-
69
- # Store processed LLM tools and providers info for context
70
- result.add_context("llm_function_count", function_count)
71
- result.add_context("llm_tool_count", tool_count)
72
- result.add_context("llm_provider_count", provider_count)
73
- result.add_context("llm_tools", llm_tools)
74
- result.add_context("llm_providers", llm_providers)
75
-
76
- result.message = (
77
- "LLM tools and providers resolution completed (efficient hash-based)"
78
- )
79
-
80
- if function_count > 0 or provider_count > 0:
81
- self.logger.info(
82
- f"🤖 LLM state resolved: {function_count} functions, {tool_count} tools, {provider_count} providers"
83
- )
84
-
85
- self.logger.trace(
86
- "🤖 LLM tools and providers resolution step completed using hash-based change detection"
87
- )
88
-
89
- except Exception as e:
90
- result.status = PipelineStatus.FAILED
91
- result.message = f"LLM tools resolution failed: {e}"
92
- result.add_error(str(e))
93
- self.logger.error(f"❌ LLM tools resolution failed: {e}")
94
-
95
- return result
96
-
97
- def _extract_llm_tools_state(
98
- self, heartbeat_response: dict[str, Any]
99
- ) -> dict[str, Any]:
100
- """Extract LLM tools and providers state structure from heartbeat response.
101
-
102
- Preserves array structure and order from registry.
103
-
104
- Returns:
105
- {
106
- "llm_tools": {function_id: [{function_name, capability, endpoint, input_schema, ...}, ...]},
107
- "llm_providers": {function_id: {name, endpoint, agent_id, capability, tags, ...}}
108
- }
109
- """
110
- llm_tools = heartbeat_response.get("llm_tools", {})
111
- llm_providers = heartbeat_response.get("llm_providers", {})
112
-
113
- if not isinstance(llm_tools, dict):
114
- self.logger.warning(f"llm_tools is not a dict, type={type(llm_tools)}")
115
- llm_tools = {}
116
-
117
- if not isinstance(llm_providers, dict):
118
- self.logger.warning(
119
- f"llm_providers is not a dict, type={type(llm_providers)}"
120
- )
121
- llm_providers = {}
122
-
123
- # Build state with both llm_tools and llm_providers
124
- # This ensures hash changes when EITHER tools OR providers change
125
- state = {
126
- "llm_tools": {},
127
- "llm_providers": llm_providers, # Include providers directly
128
- }
129
-
130
- # Filter out non-list values for llm_tools
131
- for function_id, tools in llm_tools.items():
132
- if isinstance(tools, list):
133
- state["llm_tools"][function_id] = tools
134
-
135
- return state
136
-
137
- def _hash_llm_tools_state(self, state: dict) -> str:
138
- """Create hash of LLM tools and providers state structure.
139
-
140
- This hash includes BOTH llm_tools and llm_providers to ensure
141
- rewiring happens when either changes.
142
- """
143
- import hashlib
144
-
145
- # Convert to sorted JSON string for consistent hashing
146
- state_json = json.dumps(state, sort_keys=True)
147
-
148
- hash_value = hashlib.sha256(state_json.encode()).hexdigest()[:16]
149
-
150
- return hash_value
151
-
152
- async def process_llm_tools_from_heartbeat(
153
- self, heartbeat_response: dict[str, Any]
154
- ) -> None:
155
- """Process heartbeat response to update LLM agent injection.
156
-
157
- Uses hash-based comparison to efficiently detect when ANY LLM tools OR providers change
158
- and then updates ALL affected LLM agents in one operation.
159
-
160
- Resilience logic:
161
- - No response (connection error, 5xx) → Skip entirely (keep existing state)
162
- - 2xx response with empty llm_tools/llm_providers → Clear all LLM state
163
- - 2xx response with partial llm_tools/llm_providers → Update to match registry exactly
164
-
165
- The hash includes both llm_tools and llm_providers to ensure rewiring happens
166
- when either changes (e.g., provider failover from Claude to OpenAI).
167
- """
168
- try:
169
- if not heartbeat_response:
170
- # No response from registry (connection error, timeout, 5xx)
171
- # → Skip entirely for resilience (keep existing LLM tools and providers)
172
- self.logger.trace(
173
- "No heartbeat response - skipping LLM state processing for resilience"
174
- )
175
- return
176
-
177
- # Extract current LLM tools and providers state
178
- current_state = self._extract_llm_tools_state(heartbeat_response)
179
-
180
- # IMPORTANT: Empty state from successful response means "no LLM tools or providers"
181
- # This is different from "no response" which means "keep existing for resilience"
182
-
183
- # Hash the current state (including both llm_tools and llm_providers)
184
- current_hash = self._hash_llm_tools_state(current_state)
185
-
186
- # Compare with previous state (use global variable)
187
- global _last_llm_tools_hash
188
- if current_hash == _last_llm_tools_hash:
189
- self.logger.trace(
190
- f"🔄 LLM state unchanged (hash: {current_hash}), skipping processing"
191
- )
192
- return
193
-
194
- # State changed - determine what changed
195
- llm_tools = current_state.get("llm_tools", {})
196
- llm_providers = current_state.get("llm_providers", {})
197
-
198
- function_count = len(llm_tools)
199
- total_tools = sum(len(tools) for tools in llm_tools.values())
200
- provider_count = len(llm_providers)
201
-
202
- if _last_llm_tools_hash is None:
203
- if function_count > 0 or provider_count > 0:
204
- self.logger.info(
205
- f"🤖 Initial LLM state detected: {function_count} functions, {total_tools} tools, {provider_count} providers"
206
- )
207
- else:
208
- self.logger.info(
209
- "🤖 Initial LLM state detected: no LLM tools or providers"
210
- )
211
- else:
212
- self.logger.info(
213
- f"🤖 LLM state changed (hash: {_last_llm_tools_hash} → {current_hash})"
214
- )
215
- if function_count > 0 or provider_count > 0:
216
- self.logger.info(
217
- f"🤖 Updating LLM state: {function_count} functions ({total_tools} tools), {provider_count} providers"
218
- )
219
- else:
220
- self.logger.info(
221
- "🤖 Registry reports no LLM tools or providers - clearing all existing state"
222
- )
223
-
224
- injector = get_global_injector()
225
-
226
- # Determine if this is initial processing or an update
227
- if _last_llm_tools_hash is None:
228
- # Initial processing - use process_llm_tools
229
- self.logger.trace(
230
- "🤖 Initial LLM tools processing - calling process_llm_tools()"
231
- )
232
- injector.process_llm_tools(llm_tools)
233
- else:
234
- # Update - use update_llm_tools
235
- self.logger.trace("🤖 LLM tools update - calling update_llm_tools()")
236
- injector.update_llm_tools(llm_tools)
237
-
238
- # Process LLM providers (v0.6.1 mesh delegation)
239
- # Now part of hash-based change detection, so this always runs when state changes
240
- if llm_providers:
241
- self.logger.info(
242
- f"🔌 Processing LLM providers for {len(llm_providers)} functions"
243
- )
244
- injector.process_llm_providers(llm_providers)
245
- else:
246
- self.logger.trace("🔌 No llm_providers in current state")
247
-
248
- # Store new hash for next comparison (use global variable)
249
- _last_llm_tools_hash = current_hash
250
-
251
- if function_count > 0 or provider_count > 0:
252
- self.logger.info(
253
- f"✅ Successfully processed LLM state: {function_count} functions ({total_tools} tools), {provider_count} providers (hash: {current_hash})"
254
- )
255
- else:
256
- self.logger.info(
257
- f"✅ LLM state synchronized (no tools or providers, hash: {current_hash})"
258
- )
259
-
260
- except Exception as e:
261
- self.logger.error(
262
- f"❌ Failed to process LLM tools from heartbeat: {e}", exc_info=True
263
- )
264
- # Don't raise - this should not break the heartbeat loop
@@ -1,79 +0,0 @@
1
- """
2
- Registry connection step for MCP Mesh pipeline.
3
-
4
- Handles establishing connection to the mesh registry service.
5
- """
6
-
7
- import logging
8
- import os
9
- from typing import Any
10
-
11
- from ...generated.mcp_mesh_registry_client.api_client import ApiClient
12
- from ...generated.mcp_mesh_registry_client.configuration import Configuration
13
- from ...shared.registry_client_wrapper import RegistryClientWrapper
14
- from ..shared import PipelineResult, PipelineStatus, PipelineStep
15
-
16
- logger = logging.getLogger(__name__)
17
-
18
-
19
- class RegistryConnectionStep(PipelineStep):
20
- """
21
- Establishes connection to the mesh registry.
22
-
23
- Creates and configures the registry client for subsequent
24
- communication steps.
25
- """
26
-
27
- def __init__(self):
28
- super().__init__(
29
- name="registry-connection",
30
- required=True,
31
- description="Connect to mesh registry service",
32
- )
33
-
34
- async def execute(self, context: dict[str, Any]) -> PipelineResult:
35
- """Establish registry connection or reuse existing one."""
36
- self.logger.trace("Checking registry connection...")
37
-
38
- result = PipelineResult(message="Registry connection ready")
39
-
40
- try:
41
- # Check if registry wrapper already exists in context (for heartbeat pipeline)
42
- existing_wrapper = context.get("registry_wrapper")
43
-
44
- if existing_wrapper:
45
- # Reuse existing connection for efficiency
46
- result.add_context("registry_wrapper", existing_wrapper)
47
- result.message = "Reusing existing registry connection"
48
- self.logger.trace("🔄 Reusing existing registry connection")
49
- return result
50
-
51
- # Create new connection if none exists
52
- registry_url = self._get_registry_url()
53
-
54
- # Create registry client configuration
55
- config = Configuration(host=registry_url)
56
- registry_client = ApiClient(config)
57
-
58
- # Create wrapper for type-safe operations
59
- registry_wrapper = RegistryClientWrapper(registry_client)
60
-
61
- # Store in context
62
- result.add_context("registry_url", registry_url)
63
- result.add_context("registry_client", registry_client)
64
- result.add_context("registry_wrapper", registry_wrapper)
65
-
66
- result.message = f"Connected to registry at {registry_url}"
67
- self.logger.trace(f"🔗 Registry connection established: {registry_url}")
68
-
69
- except Exception as e:
70
- result.status = PipelineStatus.FAILED
71
- result.message = f"Registry connection failed: {e}"
72
- result.add_error(str(e))
73
- self.logger.error(f"❌ Registry connection failed: {e}")
74
-
75
- return result
76
-
77
- def _get_registry_url(self) -> str:
78
- """Get registry URL from environment."""
79
- return os.getenv("MCP_MESH_REGISTRY_URL", "http://localhost:8000")
@@ -1,80 +0,0 @@
1
- """
2
- Registry connection step for MCP Mesh pipeline.
3
-
4
- Handles establishing connection to the mesh registry service.
5
- """
6
-
7
- import logging
8
- import os
9
- from typing import Any
10
-
11
- from ...generated.mcp_mesh_registry_client.api_client import ApiClient
12
- from ...generated.mcp_mesh_registry_client.configuration import Configuration
13
- from ...shared.registry_client_wrapper import RegistryClientWrapper
14
- from .base_step import PipelineStep
15
- from .pipeline_types import PipelineResult, PipelineStatus
16
-
17
- logger = logging.getLogger(__name__)
18
-
19
-
20
- class RegistryConnectionStep(PipelineStep):
21
- """
22
- Establishes connection to the mesh registry.
23
-
24
- Creates and configures the registry client for subsequent
25
- communication steps.
26
- """
27
-
28
- def __init__(self):
29
- super().__init__(
30
- name="registry-connection",
31
- required=True,
32
- description="Connect to mesh registry service",
33
- )
34
-
35
- async def execute(self, context: dict[str, Any]) -> PipelineResult:
36
- """Establish registry connection or reuse existing one."""
37
- self.logger.debug("Checking registry connection...")
38
-
39
- result = PipelineResult(message="Registry connection ready")
40
-
41
- try:
42
- # Check if registry wrapper already exists in context (for heartbeat pipeline)
43
- existing_wrapper = context.get("registry_wrapper")
44
-
45
- if existing_wrapper:
46
- # Reuse existing connection for efficiency
47
- result.add_context("registry_wrapper", existing_wrapper)
48
- result.message = "Reusing existing registry connection"
49
- self.logger.debug("🔄 Reusing existing registry connection")
50
- return result
51
-
52
- # Create new connection if none exists
53
- registry_url = self._get_registry_url()
54
-
55
- # Create registry client configuration
56
- config = Configuration(host=registry_url)
57
- registry_client = ApiClient(config)
58
-
59
- # Create wrapper for type-safe operations
60
- registry_wrapper = RegistryClientWrapper(registry_client)
61
-
62
- # Store in context
63
- result.add_context("registry_url", registry_url)
64
- result.add_context("registry_client", registry_client)
65
- result.add_context("registry_wrapper", registry_wrapper)
66
-
67
- result.message = f"Connected to registry at {registry_url}"
68
- self.logger.trace(f"🔗 Registry connection established: {registry_url}")
69
-
70
- except Exception as e:
71
- result.status = PipelineStatus.FAILED
72
- result.message = f"Registry connection failed: {e}"
73
- result.add_error(str(e))
74
- self.logger.error(f"❌ Registry connection failed: {e}")
75
-
76
- return result
77
-
78
- def _get_registry_url(self) -> str:
79
- """Get registry URL from environment."""
80
- return os.getenv("MCP_MESH_REGISTRY_URL", "http://localhost:8000")