flock-core 0.4.2__py3-none-any.whl → 0.4.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (38) hide show
  1. flock/core/__init__.py +11 -0
  2. flock/core/flock.py +144 -42
  3. flock/core/flock_agent.py +117 -4
  4. flock/core/flock_evaluator.py +1 -1
  5. flock/core/flock_factory.py +290 -2
  6. flock/core/flock_module.py +101 -0
  7. flock/core/flock_registry.py +39 -2
  8. flock/core/flock_server_manager.py +136 -0
  9. flock/core/logging/telemetry.py +1 -1
  10. flock/core/mcp/__init__.py +1 -0
  11. flock/core/mcp/flock_mcp_server.py +614 -0
  12. flock/core/mcp/flock_mcp_tool_base.py +201 -0
  13. flock/core/mcp/mcp_client.py +658 -0
  14. flock/core/mcp/mcp_client_manager.py +201 -0
  15. flock/core/mcp/mcp_config.py +237 -0
  16. flock/core/mcp/types/__init__.py +1 -0
  17. flock/core/mcp/types/callbacks.py +86 -0
  18. flock/core/mcp/types/factories.py +111 -0
  19. flock/core/mcp/types/handlers.py +240 -0
  20. flock/core/mcp/types/types.py +157 -0
  21. flock/core/mcp/util/__init__.py +0 -0
  22. flock/core/mcp/util/helpers.py +23 -0
  23. flock/core/mixin/dspy_integration.py +45 -12
  24. flock/core/serialization/flock_serializer.py +52 -1
  25. flock/core/util/spliter.py +4 -0
  26. flock/evaluators/declarative/declarative_evaluator.py +4 -3
  27. flock/mcp/servers/sse/__init__.py +1 -0
  28. flock/mcp/servers/sse/flock_sse_server.py +139 -0
  29. flock/mcp/servers/stdio/__init__.py +1 -0
  30. flock/mcp/servers/stdio/flock_stdio_server.py +138 -0
  31. flock/mcp/servers/websockets/__init__.py +1 -0
  32. flock/mcp/servers/websockets/flock_websocket_server.py +119 -0
  33. flock/modules/performance/metrics_module.py +159 -1
  34. {flock_core-0.4.2.dist-info → flock_core-0.4.5.dist-info}/METADATA +278 -64
  35. {flock_core-0.4.2.dist-info → flock_core-0.4.5.dist-info}/RECORD +38 -18
  36. {flock_core-0.4.2.dist-info → flock_core-0.4.5.dist-info}/WHEEL +0 -0
  37. {flock_core-0.4.2.dist-info → flock_core-0.4.5.dist-info}/entry_points.txt +0 -0
  38. {flock_core-0.4.2.dist-info → flock_core-0.4.5.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,240 @@
1
+ """Handler functions."""
2
+
3
+ from collections.abc import Callable
4
+
5
+ from mcp import CreateMessageRequest
6
+ from mcp.client.session import ClientResponse
7
+ from mcp.shared.context import RequestContext
8
+ from mcp.shared.session import RequestResponder
9
+ from mcp.types import (
10
+ INTERNAL_ERROR,
11
+ ClientResult,
12
+ ErrorData,
13
+ ListRootsRequest,
14
+ ServerNotification as _MCPServerNotification,
15
+ ServerRequest,
16
+ )
17
+
18
+ from flock.core.logging.logging import FlockLogger
19
+ from flock.core.mcp.mcp_client import Any
20
+ from flock.core.mcp.types.types import (
21
+ CancelledNotification,
22
+ FlockLoggingMessageNotificationParams,
23
+ LoggingMessageNotification,
24
+ ProgressNotification,
25
+ ResourceListChangedNotification,
26
+ ResourceUpdatedNotification,
27
+ ServerNotification,
28
+ ToolListChangedNotification,
29
+ )
30
+
31
+
32
+ async def handle_incoming_exception(
33
+ e: Exception,
34
+ logger_to_use: FlockLogger,
35
+ associated_client: Any,
36
+ ) -> None:
37
+ """Process an incoming exception Message."""
38
+ server_name = await associated_client.config.name
39
+
40
+ # For now, simply log it
41
+ logger_to_use.error(
42
+ f"Encountered Exception while communicating with server '{server_name}': {e}"
43
+ )
44
+
45
+
46
+ async def handle_progress_notification(
47
+ n: ProgressNotification,
48
+ logger_to_use: FlockLogger,
49
+ server_name: str,
50
+ ) -> None:
51
+ """Process an incoming progress Notification."""
52
+ params = n.params
53
+ progress = params.progress
54
+ total = params.total or "Unknown"
55
+ progress_token = params.progressToken
56
+ metadata = params.meta or {}
57
+
58
+ message = f"PROGRESS_NOTIFICATION: Server '{server_name}' reports Progress: {progress}/{total}. (Token: {progress_token}) (Meta Data: {metadata})"
59
+
60
+ logger_to_use.info(message)
61
+
62
+
63
+ async def handle_cancellation_notification(
64
+ n: CancelledNotification,
65
+ logger_to_use: FlockLogger,
66
+ server_name: str,
67
+ ) -> None:
68
+ """Process an incoming Cancellation Notification."""
69
+ params = n.params
70
+ request_id_to_cancel = params.requestId
71
+ reason = params.reason or "no reason given"
72
+ metadata = params.meta or {}
73
+
74
+ message = f"CANCELLATION_REQUEST: Server '{server_name}' requests to cancel request with id: {request_id_to_cancel}. Reason: {reason}. (Metadata: {metadata})"
75
+
76
+ logger_to_use.warning(message)
77
+
78
+
79
+ async def handle_resource_update_notification(
80
+ n: ResourceUpdatedNotification,
81
+ logger_to_use: FlockLogger,
82
+ associated_client: Any,
83
+ ) -> None:
84
+ """Handle an incoming ResourceUpdatedNotification."""
85
+ # This also means that the associated client needs to invalidate
86
+ # its resource_contents_cache
87
+
88
+ params = n.params
89
+ metadata = params.meta or {}
90
+ uri = params.uri
91
+
92
+ message = f"RESOURCE_UPDATE: Server '{associated_client.config.name}' reports change on resoure at: {uri}. (Meta Data: {metadata})"
93
+
94
+ logger_to_use.info(message)
95
+
96
+ await associated_client.invalidate_resource_contents_cache_entry(key=uri)
97
+
98
+
99
+ async def handle_resource_list_changed_notification(
100
+ n: ResourceListChangedNotification,
101
+ logger_to_use: FlockLogger,
102
+ associated_client: Any,
103
+ ) -> None:
104
+ """Handle an incoming ResourecListChangedNotification."""
105
+ # This also means that the associated client needs to invalidate
106
+ # its resource_contents_cache
107
+
108
+ params = n.params or {}
109
+ metadata = params.meta or {}
110
+
111
+ message = f"TOOLS_LIST_CHANGED: Server '{associated_client.config.name}' reports a change in their tools list: {metadata}. Resetting Tools Cache for associated clients."
112
+
113
+ logger_to_use.info(message)
114
+ await associated_client.invalidate_resource_list_cache()
115
+
116
+
117
+ async def handle_tool_list_changed_notification(
118
+ n: ToolListChangedNotification,
119
+ logger_to_use: FlockLogger,
120
+ associated_client: Any,
121
+ ) -> None:
122
+ """Handle an incoming ToolListChangedNotification."""
123
+ params = n.params or {}
124
+ metadata = params.meta or {}
125
+
126
+ message = f"TOOLS_LIST_CHANGED: Server '{associated_client.config.name}' reports a change in their tools list: {metadata}. Resetting Tools Cache for associated clients."
127
+
128
+ logger_to_use.info(message)
129
+ await associated_client.invalidate_tool_cache()
130
+
131
+
132
+ _SERVER_NOTIFICATION_MAP: dict[type[_MCPServerNotification], Callable] = {
133
+ ResourceListChangedNotification: handle_resource_list_changed_notification,
134
+ ResourceUpdatedNotification: handle_resource_update_notification,
135
+ LoggingMessageNotification: lambda n, log, client: handle_logging_message(
136
+ params=n,
137
+ logger=log,
138
+ server_name=client.config.name,
139
+ ),
140
+ ProgressNotification: handle_progress_notification,
141
+ CancelledNotification: handle_cancellation_notification,
142
+ }
143
+
144
+
145
+ async def handle_incoming_server_notification(
146
+ n: ServerNotification,
147
+ logger: FlockLogger,
148
+ client: Any,
149
+ ) -> None:
150
+ """Process an incoming server notification."""
151
+ handler = _SERVER_NOTIFICATION_MAP.get(type(n.root))
152
+ if handler:
153
+ await handler(n.root, logger, client)
154
+
155
+
156
+ async def handle_logging_message(
157
+ params: FlockLoggingMessageNotificationParams,
158
+ logger: FlockLogger,
159
+ server_name: str,
160
+ ) -> None:
161
+ """Handle a logging request."""
162
+ level = params.level
163
+ method = logger.debug
164
+ logger_name = params.logger if params.logger else "unknown_remote_logger"
165
+ metadata = params.meta or {}
166
+
167
+ str_level = "DEBUG"
168
+ prefix = f"Message from Remote MCP Logger '{logger_name}' for server '{server_name}': "
169
+
170
+ match level:
171
+ case "info":
172
+ method = logger.info
173
+ str_level = "INFO: "
174
+ case "notice":
175
+ method = logger.info
176
+ str_level = "NOTICE: "
177
+ case "alert":
178
+ method = logger.warning
179
+ str_level = "WARNING: "
180
+ case "critical":
181
+ method = logger.error
182
+ str_level = "CRITICAL: "
183
+ case "error":
184
+ method = logger.error
185
+ str_level = "ERROR: "
186
+ case "emergency":
187
+ method = logger.error
188
+ str_level = "EMERGENCY: "
189
+
190
+ full_msg = f"{prefix}{str_level}{params.data} (Meta Data: {metadata})"
191
+ method(full_msg)
192
+
193
+
194
+ async def handle_incoming_request(
195
+ req: RequestResponder[ServerRequest, ClientResult],
196
+ logger_to_use: FlockLogger,
197
+ associated_client: Any,
198
+ ) -> None:
199
+ """Handle generic request."""
200
+ ctx = RequestContext(
201
+ request_id=req.request_id,
202
+ meta=req.request_meta,
203
+ session=req._session,
204
+ lifespan_context=None,
205
+ )
206
+
207
+ try:
208
+ match req.request.root:
209
+ case CreateMessageRequest(params=req.request.root.params):
210
+ with req:
211
+ # invoke user's sampling callback
212
+ # type: ignore
213
+ response = await associated_client.sampling_callback(
214
+ ctx, req.request.root.params
215
+ )
216
+ client_resp = ClientResponse.validate_python(response)
217
+ await req.respond(client_resp)
218
+ case ListRootsRequest():
219
+ with req:
220
+ # type: ignore
221
+ response = await associated_client.list_roots_callback(ctx)
222
+ client_resp = ClientResponse.validate_python(response)
223
+ await req.respond(client_resp)
224
+ case _:
225
+ # unrecognized -> no-op
226
+ return
227
+ except Exception as e:
228
+ # 1) Log the error and stacktrace
229
+ logger_to_use.error(
230
+ f"Error in fallback handle_incoming_request (id={req.request_id}): {e}"
231
+ )
232
+ # 2) If the request wasn't already completed, send a JSON-RPC error back
233
+ if not getattr(req, "_completed", False):
234
+ with req:
235
+ err = ErrorData(
236
+ code=INTERNAL_ERROR, message=f"Client-side error: {e}"
237
+ )
238
+ client_err = ClientResponse.validate_python(err)
239
+ await req.respond(client_err)
240
+ return
@@ -0,0 +1,157 @@
1
+ """Types for Flock's MCP functionality."""
2
+
3
+ from collections.abc import Awaitable, Callable
4
+ from contextlib import AbstractAsyncContextManager
5
+ from typing import Any
6
+
7
+ from anyio.streams.memory import (
8
+ MemoryObjectReceiveStream,
9
+ MemoryObjectSendStream,
10
+ )
11
+ from mcp import (
12
+ ClientSession,
13
+ CreateMessageResult,
14
+ StdioServerParameters as _MCPStdioServerParameters,
15
+ )
16
+ from mcp.shared.context import RequestContext
17
+ from mcp.shared.session import RequestResponder
18
+ from mcp.types import (
19
+ CancelledNotification as _MCPCancelledNotification,
20
+ ClientResult,
21
+ CreateMessageRequestParams,
22
+ ErrorData,
23
+ JSONRPCMessage,
24
+ ListRootsResult,
25
+ LoggingMessageNotification as _MCPLoggingMessageNotification,
26
+ LoggingMessageNotificationParams as _MCPLoggingMessageNotificationParams,
27
+ ProgressNotification as _MCPProgressNotification,
28
+ PromptListChangedNotification as _MCPPromptListChangedNotification,
29
+ ResourceListChangedNotification as _MCPResourceListChangedNotification,
30
+ ResourceUpdatedNotification as _MCPResourceUpdateNotification,
31
+ Root as _MCPRoot,
32
+ ServerNotification as _MCPServerNotification,
33
+ ServerRequest,
34
+ ToolListChangedNotification as _MCPToolListChangedNotification,
35
+ )
36
+ from pydantic import AnyUrl, BaseModel, ConfigDict, Field
37
+
38
+ from flock.core.mcp.util.helpers import get_default_env
39
+
40
+
41
+ class ServerNotification(_MCPServerNotification):
42
+ """A notification message sent by the server side."""
43
+
44
+
45
+ class CancelledNotification(_MCPCancelledNotification):
46
+ """Notification, which can be sent bei either side to indicate that it is cancelling a previously issued request."""
47
+
48
+
49
+ class ProgressNotification(_MCPProgressNotification):
50
+ """An out-of band notification used to inform the receiver of a progress update for a long-running request."""
51
+
52
+
53
+ class LoggingMessageNotification(_MCPLoggingMessageNotification):
54
+ """A notification message sent by the server side containing a logging message."""
55
+
56
+
57
+ class ResourceUpdatedNotification(_MCPResourceUpdateNotification):
58
+ """A notification message sent by the server side informing a client about a change in a resource."""
59
+
60
+
61
+ class ResourceListChangedNotification(_MCPResourceListChangedNotification):
62
+ """A notification message sent by the server side informing a client about a change in the list of resources."""
63
+
64
+
65
+ class ToolListChangedNotification(_MCPToolListChangedNotification):
66
+ """A notification message sent by the server side informing a client about a change in the offered tools."""
67
+
68
+
69
+ class PromptListChangedNotification(_MCPPromptListChangedNotification):
70
+ """A notification message sent by the server side informing a client about a change in the list of offered Prompts."""
71
+
72
+
73
+ class FlockLoggingMessageNotificationParams(
74
+ _MCPLoggingMessageNotificationParams
75
+ ):
76
+ """Parameters contained within a Logging Message Notification."""
77
+
78
+
79
+ class MCPRoot(_MCPRoot):
80
+ """Wrapper for mcp.types.Root."""
81
+
82
+
83
+ class ServerParameters(BaseModel):
84
+ """Base Type for server parameters."""
85
+
86
+ model_config = ConfigDict(
87
+ arbitrary_types_allowed=True,
88
+ )
89
+
90
+
91
+ class StdioServerParameters(_MCPStdioServerParameters, ServerParameters):
92
+ """Base Type for Stdio Server parameters."""
93
+
94
+ env: dict[str, str] | None = Field(
95
+ default_factory=get_default_env,
96
+ description="Environment for the MCP Server.",
97
+ )
98
+
99
+
100
+ class WebsocketServerParameters(ServerParameters):
101
+ """Base Type for Websocket Server params."""
102
+
103
+ url: str | AnyUrl = Field(..., description="Url the server listens at.")
104
+
105
+
106
+ class SseServerParameters(ServerParameters):
107
+ """Base Type for SSE Server params."""
108
+
109
+ url: str | AnyUrl = Field(..., description="The url the server listens at.")
110
+
111
+ headers: dict[str, Any] | None = Field(
112
+ default=None, description="Additional Headers to pass to the client."
113
+ )
114
+
115
+ timeout: float | int = Field(default=5, description="Http Timeout")
116
+
117
+ sse_read_timeout: float | int = Field(
118
+ default=60 * 5,
119
+ description="How long the client will wait before disconnecting from the server.",
120
+ )
121
+
122
+
123
+ MCPCLientInitFunction = Callable[
124
+ ...,
125
+ AbstractAsyncContextManager[
126
+ tuple[
127
+ MemoryObjectReceiveStream[JSONRPCMessage | Exception],
128
+ MemoryObjectSendStream[JSONRPCMessage],
129
+ ]
130
+ ],
131
+ ]
132
+
133
+
134
+ FlockSamplingMCPCallback = Callable[
135
+ [RequestContext, CreateMessageRequestParams],
136
+ Awaitable[CreateMessageResult | ErrorData],
137
+ ]
138
+
139
+
140
+ FlockListRootsMCPCallback = Callable[
141
+ [RequestContext[ClientSession, Any]],
142
+ Awaitable[ListRootsResult | ErrorData],
143
+ ]
144
+
145
+ FlockLoggingMCPCallback = Callable[
146
+ [FlockLoggingMessageNotificationParams],
147
+ Awaitable[None],
148
+ ]
149
+
150
+ FlockMessageHandlerMCPCallback = Callable[
151
+ [
152
+ RequestResponder[ServerRequest, ClientResult]
153
+ | ServerNotification
154
+ | Exception
155
+ ],
156
+ Awaitable[None],
157
+ ]
File without changes
@@ -0,0 +1,23 @@
1
+ """Helper functions for Flock MCP Functionality."""
2
+
3
+ import hashlib
4
+ import json
5
+
6
+ from mcp.client.stdio import get_default_environment
7
+
8
+
9
+ def get_default_env() -> dict[str, str]:
10
+ """Returns a default environment object.
11
+
12
+ Including only environment-variables
13
+ deemed safe to inherit.
14
+ """
15
+ return get_default_environment()
16
+
17
+
18
+ def cache_key_generator(agent_id: str, run_id: str, *args, **kwargs) -> str:
19
+ """Helper function to generate cache keys for Flock MCP caches."""
20
+ args_digest = hashlib.md5(
21
+ json.dumps(kwargs, sort_keys=True).encode()
22
+ ).hexdigest()
23
+ return f"{agent_id}:{run_id}:{args_digest}"
@@ -1,10 +1,13 @@
1
1
  # src/flock/core/mixin/dspy_integration.py
2
2
  """Mixin class for integrating with the dspy library."""
3
3
 
4
+ import ast
4
5
  import re # Import re for parsing
5
6
  import typing
6
7
  from typing import Any, Literal
7
8
 
9
+ from dspy import Tool
10
+
8
11
  from flock.core.logging.logging import get_logger
9
12
  from flock.core.util.spliter import split_top_level
10
13
 
@@ -68,12 +71,15 @@ def _resolve_type_string(type_str: str) -> type:
68
71
  # Special handling for Literal
69
72
  if BaseType is typing.Literal:
70
73
  # Split literal values, remove quotes, strip whitespace
71
- literal_args_raw = split_top_level(args_str)
72
- literal_args = tuple(
73
- s.strip().strip("'\"") for s in literal_args_raw
74
- )
74
+ def parse_literal_args(args_str: str) -> tuple[str, ...]:
75
+ try:
76
+ return tuple(ast.literal_eval(f"[{args_str}]"))
77
+ except (SyntaxError, ValueError) as exc:
78
+ raise ValueError(f"Cannot parse {args_str!r} as literals") from exc
79
+
80
+ literal_args = parse_literal_args(args_str)
75
81
  logger.debug(
76
- f"Parsing Literal arguments: {literal_args_raw} -> {literal_args}"
82
+ f"Parsing Literal arguments: {args_str} -> {literal_args}"
77
83
  )
78
84
  resolved_type = typing.Literal[literal_args] # type: ignore
79
85
  logger.debug(f"Constructed Literal type: {resolved_type}")
@@ -95,7 +101,8 @@ def _resolve_type_string(type_str: str) -> type:
95
101
  if BaseType is typing.Optional:
96
102
  if len(resolved_arg_types) != 1:
97
103
  raise ValueError("Optional requires exactly one argument.")
98
- resolved_type = typing.Union[resolved_arg_types[0], type(None)] # type: ignore
104
+ # type: ignore
105
+ resolved_type = typing.Union[resolved_arg_types[0], type(None)]
99
106
  logger.debug(
100
107
  f"Constructed Optional type as Union: {resolved_type}"
101
108
  )
@@ -103,7 +110,8 @@ def _resolve_type_string(type_str: str) -> type:
103
110
  elif BaseType is typing.Union:
104
111
  if not resolved_arg_types:
105
112
  raise ValueError("Union requires at least one argument.")
106
- resolved_type = typing.Union[resolved_arg_types] # type: ignore
113
+ # type: ignore
114
+ resolved_type = typing.Union[resolved_arg_types]
107
115
  logger.debug(f"Constructed Union type: {resolved_type}")
108
116
  return resolved_type
109
117
  elif hasattr(
@@ -242,7 +250,8 @@ class DSPyIntegrationMixin:
242
250
  f"Failed to create dynamic type 'dspy_{agent_name}': {e}",
243
251
  exc_info=True,
244
252
  )
245
- raise TypeError(f"Could not create DSPy signature type: {e}") from e
253
+ raise TypeError(
254
+ f"Could not create DSPy signature type: {e}") from e
246
255
 
247
256
  def _configure_language_model(
248
257
  self,
@@ -292,12 +301,15 @@ class DSPyIntegrationMixin:
292
301
  f"Failed to configure DSPy language model '{model}': {e}",
293
302
  exc_info=True,
294
303
  )
304
+ # We need to raise this exception, otherwise Flock will trundle on until it needs dspy.settings.lm and can't find it.
305
+ raise
295
306
 
296
307
  def _select_task(
297
308
  self,
298
309
  signature: Any,
299
310
  override_evaluator_type: AgentType,
300
311
  tools: list[Any] | None = None,
312
+ mcp_tools: list[Any] | None = None,
301
313
  kwargs: dict[str, Any] = {},
302
314
  ) -> Any:
303
315
  """Select and instantiate the appropriate DSPy Program/Module."""
@@ -320,19 +332,39 @@ class DSPyIntegrationMixin:
320
332
  f"Item '{tool}' in tools list is not callable, skipping."
321
333
  )
322
334
 
335
+ processed_mcp_tools = []
336
+ if mcp_tools:
337
+ for mcp_tool in mcp_tools:
338
+ if isinstance(mcp_tool, Tool): # Basic check
339
+ processed_mcp_tools.append(mcp_tool)
340
+ else:
341
+ logger.warning(
342
+ f"Item '{mcp_tool}' is not a dspy.primitives.Tool, skipping."
343
+ )
344
+
323
345
  dspy_program = None
324
346
  selected_type = override_evaluator_type
325
347
 
326
348
  # Determine type if not overridden
327
349
  if not selected_type:
328
350
  selected_type = (
329
- "ReAct" if processed_tools else "Predict"
351
+ "ReAct" if processed_tools or processed_mcp_tools else "Predict"
330
352
  ) # Default logic
331
353
 
332
354
  logger.debug(
333
- f"Selecting DSPy program type: {selected_type} (Tools provided: {bool(processed_tools)})"
355
+ f"Selecting DSPy program type: {selected_type} (Tools provided: {bool(processed_tools)}) (MCP Tools: {bool(processed_mcp_tools)}"
334
356
  )
335
357
 
358
+ # Merge list of native tools and processed tools.
359
+ # This makes mcp tools appear as native code functions to the llm of the agent.
360
+ merged_tools = []
361
+
362
+ if processed_tools:
363
+ merged_tools = merged_tools + processed_tools
364
+
365
+ if processed_mcp_tools:
366
+ merged_tools = merged_tools + processed_mcp_tools
367
+
336
368
  try:
337
369
  if selected_type == "ChainOfThought":
338
370
  dspy_program = dspy.ChainOfThought(signature, **kwargs)
@@ -340,7 +372,7 @@ class DSPyIntegrationMixin:
340
372
  if not kwargs:
341
373
  kwargs = {"max_iters": 10}
342
374
  dspy_program = dspy.ReAct(
343
- signature, tools=processed_tools or [], **kwargs
375
+ signature, tools=merged_tools or [], **kwargs
344
376
  )
345
377
  elif selected_type == "Predict": # Default or explicitly Completion
346
378
  dspy_program = dspy.Predict(signature)
@@ -395,7 +427,8 @@ class DSPyIntegrationMixin:
395
427
  final_result = {**inputs, **output_dict}
396
428
 
397
429
  lm = dspy.settings.get("lm")
398
- cost = sum([x["cost"] for x in lm.history if x["cost"] is not None])
430
+ cost = sum([x["cost"]
431
+ for x in lm.history if x["cost"] is not None])
399
432
  lm_history = lm.history
400
433
 
401
434
  return final_result, cost, lm_history
@@ -17,7 +17,8 @@ from pydantic import BaseModel, create_model
17
17
  from flock.core.flock_registry import get_registry
18
18
  from flock.core.logging.logging import get_logger
19
19
  from flock.core.serialization.serialization_utils import (
20
- extract_pydantic_models_from_type_string, # Assuming this handles basic serialization needs
20
+ # Assuming this handles basic serialization needs
21
+ extract_pydantic_models_from_type_string,
21
22
  )
22
23
 
23
24
  if TYPE_CHECKING:
@@ -52,9 +53,40 @@ class FlockSerializer:
52
53
  )
53
54
 
54
55
  data["agents"] = {}
56
+ data["mcp_servers"] = {}
55
57
  custom_types = {}
56
58
  components = {}
57
59
 
60
+ for name, server_instance in flock_instance._servers.items():
61
+ try:
62
+ # Servers handle their own serialization via their to_dict method
63
+ server_data = server_instance.to_dict(path_type=path_type)
64
+ data["mcp_servers"][name] = server_data
65
+
66
+ # --- Extract Component Information ---
67
+
68
+ # Modules
69
+ if "modules" in server_data:
70
+ for module_name, module_data in server_data[
71
+ "modules"
72
+ ].items():
73
+ if module_data and "type" in module_data:
74
+ component_type = module_data["type"]
75
+ if component_type not in components:
76
+ logger.debug(
77
+ f"Adding module component '{component_type}' from module '{module_name}' in server '{name}'"
78
+ )
79
+ components[component_type] = (
80
+ FlockSerializer._get_component_definition(
81
+ component_type, path_type
82
+ )
83
+ )
84
+ except Exception as e:
85
+ logger.error(
86
+ f"Failed to serialize server '{name}' within Flock: {e}",
87
+ exc_info=True,
88
+ )
89
+
58
90
  for name, agent_instance in flock_instance._agents.items():
59
91
  try:
60
92
  logger.debug(f"Serializing agent '{name}'")
@@ -260,6 +292,9 @@ class FlockSerializer:
260
292
  # Import concrete types needed for instantiation
261
293
  from flock.core.flock import Flock # Import the actual class
262
294
  from flock.core.flock_agent import FlockAgent as ConcreteFlockAgent
295
+ from flock.core.mcp.flock_mcp_server import (
296
+ FlockMCPServerBase as ConcreteFlockMCPServer,
297
+ )
263
298
 
264
299
  logger.debug(
265
300
  f"Deserializing Flock from dict. Provided keys: {list(data.keys())}"
@@ -290,6 +325,8 @@ class FlockSerializer:
290
325
  FlockSerializer._check_dependencies(data.pop("dependencies"))
291
326
 
292
327
  agents_data = data.pop("agents", {})
328
+ server_data = data.pop("mcp_servers", {})
329
+ logger.info(f"Found {len(server_data)} servers to deserialize")
293
330
  logger.info(f"Found {len(agents_data)} agents to deserialize")
294
331
 
295
332
  try:
@@ -309,6 +346,20 @@ class FlockSerializer:
309
346
  f"Failed to initialize Flock from dict: {e}"
310
347
  ) from e
311
348
 
349
+ # Deserialize and add server AFTER Flock instance exists and BEFORE Agents have been added
350
+ for name, server_data in server_data.items():
351
+ try:
352
+ logger.debug(f"Deserializing server '{name}'")
353
+ server_data.setdefault("name", name)
354
+ server_instance = ConcreteFlockMCPServer.from_dict(server_data)
355
+ flock_instance.add_server(server_instance)
356
+ logger.debug(f"Successfully added server '{name}' to Flock")
357
+ except Exception as e:
358
+ logger.error(
359
+ f"Failed to deserialize/add server '{name}': {e}",
360
+ exc_info=True,
361
+ )
362
+
312
363
  # Deserialize and add agents AFTER Flock instance exists
313
364
  for name, agent_data in agents_data.items():
314
365
  try:
@@ -188,6 +188,9 @@ if __name__ == "__main__":
188
188
 
189
189
 
190
190
  SAMPLE_18 = "ticket_info : str, reasoning : str, search_queries : list[str], relevant_documents: dict[str, float] | dict of pdf_ids as keys and scores as values"
191
+
192
+
193
+ SAMPLE_19 = "title, headings: list[str], entities_and_metadata: list[dict[str, str]], type:Literal['news', 'blog', 'opinion piece', 'tweet']"
191
194
  # ➜ [('münze', 'str', "Deutsche Münzbezeichnung, engl. 'coin'")]
192
195
 
193
196
  for title, spec in [
@@ -209,6 +212,7 @@ if __name__ == "__main__":
209
212
  ("Sample-16", SAMPLE_16),
210
213
  ("Sample-17", SAMPLE_17),
211
214
  ("Sample-18", SAMPLE_18),
215
+ ("Sample-19", SAMPLE_19),
212
216
  ]:
213
217
  print(f"\n{title}")
214
218
  for row in parse_schema(spec):