flock-core 0.3.23__py3-none-any.whl → 0.3.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/__init__.py +23 -11
- flock/cli/constants.py +2 -4
- flock/cli/create_flock.py +220 -1
- flock/cli/execute_flock.py +200 -0
- flock/cli/load_flock.py +27 -7
- flock/cli/loaded_flock_cli.py +202 -0
- flock/cli/manage_agents.py +443 -0
- flock/cli/view_results.py +29 -0
- flock/cli/yaml_editor.py +283 -0
- flock/core/__init__.py +2 -2
- flock/core/api/__init__.py +11 -0
- flock/core/api/endpoints.py +222 -0
- flock/core/api/main.py +237 -0
- flock/core/api/models.py +34 -0
- flock/core/api/run_store.py +72 -0
- flock/core/api/ui/__init__.py +0 -0
- flock/core/api/ui/routes.py +271 -0
- flock/core/api/ui/utils.py +119 -0
- flock/core/flock.py +509 -388
- flock/core/flock_agent.py +384 -121
- flock/core/flock_registry.py +532 -0
- flock/core/logging/logging.py +97 -23
- flock/core/mixin/dspy_integration.py +363 -158
- flock/core/serialization/__init__.py +7 -1
- flock/core/serialization/callable_registry.py +52 -0
- flock/core/serialization/serializable.py +259 -37
- flock/core/serialization/serialization_utils.py +199 -0
- flock/evaluators/declarative/declarative_evaluator.py +2 -0
- flock/modules/memory/memory_module.py +17 -4
- flock/modules/output/output_module.py +9 -3
- flock/workflow/activities.py +2 -2
- {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/METADATA +6 -3
- {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/RECORD +36 -22
- flock/core/flock_api.py +0 -214
- flock/core/registry/agent_registry.py +0 -120
- {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/WHEEL +0 -0
- {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/entry_points.txt +0 -0
- {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,17 +1,176 @@
|
|
|
1
|
+
# src/flock/core/mixin/dspy_integration.py
|
|
1
2
|
"""Mixin class for integrating with the dspy library."""
|
|
2
3
|
|
|
3
|
-
import
|
|
4
|
-
import
|
|
4
|
+
import re # Import re for parsing
|
|
5
|
+
import typing
|
|
5
6
|
from typing import Any, Literal
|
|
6
7
|
|
|
7
8
|
from flock.core.logging.logging import get_logger
|
|
8
|
-
from flock.core.util.input_resolver import get_callable_members, split_top_level
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
# Import split_top_level (assuming it's moved or copied appropriately)
|
|
11
|
+
# Option 1: If moved to a shared util
|
|
12
|
+
# from flock.core.util.parsing_utils import split_top_level
|
|
13
|
+
# Option 2: If kept within this file (as in previous example)
|
|
14
|
+
# Define split_top_level here or ensure it's imported
|
|
15
|
+
|
|
16
|
+
logger = get_logger("mixin.dspy")
|
|
17
|
+
|
|
18
|
+
# Type definition for agent type override
|
|
19
|
+
AgentType = Literal["ReAct", "Completion", "ChainOfThought"] | None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# Helper function needed by _resolve_type_string (copied from input_resolver.py/previous response)
|
|
23
|
+
def split_top_level(s: str) -> list[str]:
|
|
24
|
+
"""Split a string on commas that are not enclosed within brackets, parentheses, or quotes."""
|
|
25
|
+
parts = []
|
|
26
|
+
current = []
|
|
27
|
+
level = 0
|
|
28
|
+
in_quote = False
|
|
29
|
+
quote_char = ""
|
|
30
|
+
i = 0
|
|
31
|
+
while i < len(s):
|
|
32
|
+
char = s[i]
|
|
33
|
+
# Handle escapes within quotes
|
|
34
|
+
if in_quote and char == "\\" and i + 1 < len(s):
|
|
35
|
+
current.append(char)
|
|
36
|
+
current.append(s[i + 1])
|
|
37
|
+
i += 1 # Skip next char
|
|
38
|
+
elif in_quote:
|
|
39
|
+
current.append(char)
|
|
40
|
+
if char == quote_char:
|
|
41
|
+
in_quote = False
|
|
42
|
+
elif char in ('"', "'"):
|
|
43
|
+
in_quote = True
|
|
44
|
+
quote_char = char
|
|
45
|
+
current.append(char)
|
|
46
|
+
elif char in "([{":
|
|
47
|
+
level += 1
|
|
48
|
+
current.append(char)
|
|
49
|
+
elif char in ")]}":
|
|
50
|
+
level -= 1
|
|
51
|
+
current.append(char)
|
|
52
|
+
elif char == "," and level == 0:
|
|
53
|
+
parts.append("".join(current).strip())
|
|
54
|
+
current = []
|
|
55
|
+
else:
|
|
56
|
+
current.append(char)
|
|
57
|
+
i += 1
|
|
58
|
+
if current:
|
|
59
|
+
parts.append("".join(current).strip())
|
|
60
|
+
# Filter out empty strings that might result from trailing commas etc.
|
|
61
|
+
return [part for part in parts if part]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# Helper function to resolve type strings (can be static or module-level)
|
|
65
|
+
def _resolve_type_string(type_str: str) -> type:
|
|
66
|
+
"""Resolves a type string into a Python type object.
|
|
67
|
+
Handles built-ins, registered types, and common typing generics like
|
|
68
|
+
List, Dict, Optional, Union, Literal.
|
|
69
|
+
"""
|
|
70
|
+
# Import registry here to avoid circular imports
|
|
71
|
+
from flock.core.flock_registry import get_registry
|
|
72
|
+
|
|
73
|
+
FlockRegistry = get_registry()
|
|
74
|
+
|
|
75
|
+
type_str = type_str.strip()
|
|
76
|
+
logger.debug(f"Attempting to resolve type string: '{type_str}'")
|
|
77
|
+
|
|
78
|
+
# 1. Check built-ins and registered types directly
|
|
79
|
+
try:
|
|
80
|
+
# This covers str, int, bool, Any, and types registered by name
|
|
81
|
+
resolved_type = FlockRegistry.get_type(type_str)
|
|
82
|
+
logger.debug(f"Resolved '{type_str}' via registry to: {resolved_type}")
|
|
83
|
+
return resolved_type
|
|
84
|
+
except KeyError:
|
|
85
|
+
logger.debug(
|
|
86
|
+
f"'{type_str}' not found directly in registry, attempting generic parsing."
|
|
87
|
+
)
|
|
88
|
+
pass # Not found, continue parsing generics
|
|
89
|
+
|
|
90
|
+
# 2. Handle typing generics (List, Dict, Optional, Union, Literal)
|
|
91
|
+
# Use regex to match pattern like Generic[InnerType1, InnerType2, ...]
|
|
92
|
+
generic_match = re.fullmatch(r"(\w+)\s*\[(.*)\]", type_str)
|
|
93
|
+
if generic_match:
|
|
94
|
+
base_name = generic_match.group(1).strip()
|
|
95
|
+
args_str = generic_match.group(2).strip()
|
|
96
|
+
logger.debug(
|
|
97
|
+
f"Detected generic pattern: Base='{base_name}', Args='{args_str}'"
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
try:
|
|
101
|
+
# Get the base generic type (e.g., list, dict, Optional) from registry/builtins
|
|
102
|
+
BaseType = FlockRegistry.get_type(
|
|
103
|
+
base_name
|
|
104
|
+
) # Expects List, Dict etc. to be registered
|
|
105
|
+
logger.debug(
|
|
106
|
+
f"Resolved base generic type '{base_name}' to: {BaseType}"
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Special handling for Literal
|
|
110
|
+
if BaseType is typing.Literal:
|
|
111
|
+
# Split literal values, remove quotes, strip whitespace
|
|
112
|
+
literal_args_raw = split_top_level(args_str)
|
|
113
|
+
literal_args = tuple(
|
|
114
|
+
s.strip().strip("'\"") for s in literal_args_raw
|
|
115
|
+
)
|
|
116
|
+
logger.debug(
|
|
117
|
+
f"Parsing Literal arguments: {literal_args_raw} -> {literal_args}"
|
|
118
|
+
)
|
|
119
|
+
resolved_type = typing.Literal[literal_args] # type: ignore
|
|
120
|
+
logger.debug(f"Constructed Literal type: {resolved_type}")
|
|
121
|
+
return resolved_type
|
|
122
|
+
|
|
123
|
+
# Recursively resolve arguments for other generics
|
|
124
|
+
logger.debug(f"Splitting generic arguments: '{args_str}'")
|
|
125
|
+
arg_strs = split_top_level(args_str)
|
|
126
|
+
logger.debug(f"Split arguments: {arg_strs}")
|
|
127
|
+
if not arg_strs:
|
|
128
|
+
raise ValueError("Generic type has no arguments.")
|
|
129
|
+
|
|
130
|
+
resolved_arg_types = tuple(
|
|
131
|
+
_resolve_type_string(arg) for arg in arg_strs
|
|
132
|
+
)
|
|
133
|
+
logger.debug(f"Resolved generic arguments: {resolved_arg_types}")
|
|
134
|
+
|
|
135
|
+
# Construct the generic type hint
|
|
136
|
+
if BaseType is typing.Optional:
|
|
137
|
+
if len(resolved_arg_types) != 1:
|
|
138
|
+
raise ValueError("Optional requires exactly one argument.")
|
|
139
|
+
resolved_type = typing.Union[resolved_arg_types[0], type(None)] # type: ignore
|
|
140
|
+
logger.debug(
|
|
141
|
+
f"Constructed Optional type as Union: {resolved_type}"
|
|
142
|
+
)
|
|
143
|
+
return resolved_type
|
|
144
|
+
elif BaseType is typing.Union:
|
|
145
|
+
if not resolved_arg_types:
|
|
146
|
+
raise ValueError("Union requires at least one argument.")
|
|
147
|
+
resolved_type = typing.Union[resolved_arg_types] # type: ignore
|
|
148
|
+
logger.debug(f"Constructed Union type: {resolved_type}")
|
|
149
|
+
return resolved_type
|
|
150
|
+
elif hasattr(
|
|
151
|
+
BaseType, "__getitem__"
|
|
152
|
+
): # Check if subscriptable (like list, dict, List, Dict)
|
|
153
|
+
resolved_type = BaseType[resolved_arg_types] # type: ignore
|
|
154
|
+
logger.debug(
|
|
155
|
+
f"Constructed subscripted generic type: {resolved_type}"
|
|
156
|
+
)
|
|
157
|
+
return resolved_type
|
|
158
|
+
else:
|
|
159
|
+
# Base type found but cannot be subscripted
|
|
160
|
+
logger.warning(
|
|
161
|
+
f"Base type '{base_name}' found but is not a standard subscriptable generic. Returning base type."
|
|
162
|
+
)
|
|
163
|
+
return BaseType
|
|
11
164
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
165
|
+
except (KeyError, ValueError, IndexError, TypeError) as e:
|
|
166
|
+
logger.warning(
|
|
167
|
+
f"Failed to parse generic type '{type_str}': {e}. Falling back."
|
|
168
|
+
)
|
|
169
|
+
# Fall through to raise KeyError below if base type itself wasn't found or parsing failed
|
|
170
|
+
|
|
171
|
+
# 3. If not resolved by now, raise error
|
|
172
|
+
logger.error(f"Type string '{type_str}' could not be resolved.")
|
|
173
|
+
raise KeyError(f"Type '{type_str}' could not be resolved.")
|
|
15
174
|
|
|
16
175
|
|
|
17
176
|
class DSPyIntegrationMixin:
|
|
@@ -20,142 +179,159 @@ class DSPyIntegrationMixin:
|
|
|
20
179
|
def create_dspy_signature_class(
|
|
21
180
|
self, agent_name, description_spec, fields_spec
|
|
22
181
|
) -> Any:
|
|
23
|
-
"""
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
182
|
+
"""Creates a dynamic DSPy Signature class from string specifications,
|
|
183
|
+
resolving types using the FlockRegistry.
|
|
184
|
+
"""
|
|
185
|
+
try:
|
|
186
|
+
import dspy
|
|
187
|
+
except ImportError:
|
|
188
|
+
logger.error(
|
|
189
|
+
"DSPy library is not installed. Cannot create DSPy signature. "
|
|
190
|
+
"Install with: pip install dspy-ai"
|
|
191
|
+
)
|
|
192
|
+
raise ImportError("DSPy is required for this functionality.")
|
|
28
193
|
|
|
29
194
|
base_class = dspy.Signature
|
|
30
|
-
|
|
31
|
-
# Start building the class dictionary with a docstring and annotations dict.
|
|
32
195
|
class_dict = {"__doc__": description_spec, "__annotations__": {}}
|
|
33
196
|
|
|
34
|
-
# ---------------------------
|
|
35
|
-
# 2. Split the fields specification into inputs and outputs.
|
|
36
|
-
# ---------------------------
|
|
37
197
|
if "->" in fields_spec:
|
|
38
198
|
inputs_spec, outputs_spec = fields_spec.split("->", 1)
|
|
39
199
|
else:
|
|
40
|
-
inputs_spec, outputs_spec =
|
|
200
|
+
inputs_spec, outputs_spec = (
|
|
201
|
+
fields_spec,
|
|
202
|
+
"",
|
|
203
|
+
) # Assume only inputs if no '->'
|
|
41
204
|
|
|
42
|
-
# ---------------------------
|
|
43
|
-
# 3. Draw the rest of the owl.
|
|
44
|
-
# ---------------------------
|
|
45
205
|
def parse_field(field_str):
|
|
46
|
-
"""
|
|
47
|
-
|
|
48
|
-
Parse a field of the form:
|
|
49
|
-
<name> [ : <type> ] [ | <desc> ]
|
|
50
|
-
Returns a tuple: (name, field_type, desc)
|
|
51
|
-
"""
|
|
206
|
+
"""Parses 'name: type_str | description' using _resolve_type_string."""
|
|
52
207
|
field_str = field_str.strip()
|
|
53
208
|
if not field_str:
|
|
54
209
|
return None
|
|
55
210
|
|
|
56
211
|
parts = field_str.split("|", 1)
|
|
57
|
-
main_part = parts[0].strip()
|
|
212
|
+
main_part = parts[0].strip()
|
|
58
213
|
desc = parts[1].strip() if len(parts) > 1 else None
|
|
59
214
|
|
|
60
215
|
if ":" in main_part:
|
|
61
216
|
name, type_str = [s.strip() for s in main_part.split(":", 1)]
|
|
62
217
|
else:
|
|
63
218
|
name = main_part
|
|
64
|
-
type_str = "str" #
|
|
219
|
+
type_str = "str" # Default type
|
|
65
220
|
|
|
66
|
-
# Evaluate the type. Since type can be any valid expression (including custom types),
|
|
67
|
-
# we use eval. (Be cautious if using eval with untrusted input.)
|
|
68
221
|
try:
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
# import dspy
|
|
74
|
-
|
|
75
|
-
# field_type = dspy.PythonInterpreter(
|
|
76
|
-
# sys.modules[__name__].__dict__ | sys.modules["__main__"].__dict__
|
|
77
|
-
# ).execute(type_str)
|
|
78
|
-
|
|
79
|
-
try:
|
|
80
|
-
field_type = eval(type_str, sys.modules[__name__].__dict__)
|
|
81
|
-
except Exception as e:
|
|
82
|
-
logger.warning(
|
|
83
|
-
"Failed to evaluate type_str in __name__" + str(e)
|
|
84
|
-
)
|
|
85
|
-
field_type = eval(
|
|
86
|
-
type_str, sys.modules["__main__"].__dict__
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
except Exception as ex:
|
|
90
|
-
# AREPL fix - var
|
|
91
|
-
logger.warning(
|
|
92
|
-
"Failed to evaluate type_str in __main__" + str(ex)
|
|
222
|
+
field_type = _resolve_type_string(type_str)
|
|
223
|
+
except Exception as e: # Catch resolution errors
|
|
224
|
+
logger.error(
|
|
225
|
+
f"Failed to resolve type '{type_str}' for field '{name}': {e}. Defaulting to str."
|
|
93
226
|
)
|
|
94
|
-
|
|
95
|
-
field_type = eval(
|
|
96
|
-
f"exec_locals.get('{type_str}')",
|
|
97
|
-
sys.modules["__main__"].__dict__,
|
|
98
|
-
)
|
|
99
|
-
except Exception as ex_arepl:
|
|
100
|
-
logger.warning(
|
|
101
|
-
"Failed to evaluate type_str in exec_locals"
|
|
102
|
-
+ str(ex_arepl)
|
|
103
|
-
)
|
|
104
|
-
field_type = str
|
|
227
|
+
field_type = str
|
|
105
228
|
|
|
106
229
|
return name, field_type, desc
|
|
107
230
|
|
|
108
231
|
def process_fields(fields_string, field_kind):
|
|
109
|
-
"""Process
|
|
110
|
-
|
|
111
|
-
field_kind: "input" or "output" determines which Field constructor to use.
|
|
112
|
-
"""
|
|
113
|
-
if not fields_string.strip():
|
|
232
|
+
"""Process fields and add to class_dict."""
|
|
233
|
+
if not fields_string or not fields_string.strip():
|
|
114
234
|
return
|
|
115
235
|
|
|
116
|
-
# Split on commas.
|
|
117
236
|
for field in split_top_level(fields_string):
|
|
118
237
|
if field.strip():
|
|
119
238
|
parsed = parse_field(field)
|
|
120
239
|
if not parsed:
|
|
121
240
|
continue
|
|
122
241
|
name, field_type, desc = parsed
|
|
123
|
-
class_dict["__annotations__"][name] =
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
242
|
+
class_dict["__annotations__"][name] = (
|
|
243
|
+
field_type # Use resolved type
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
FieldClass = (
|
|
247
|
+
dspy.InputField
|
|
248
|
+
if field_kind == "input"
|
|
249
|
+
else dspy.OutputField
|
|
250
|
+
)
|
|
251
|
+
# DSPy Fields use 'desc' for description
|
|
252
|
+
class_dict[name] = (
|
|
253
|
+
FieldClass(desc=desc)
|
|
254
|
+
if desc is not None
|
|
255
|
+
else FieldClass()
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
try:
|
|
259
|
+
process_fields(inputs_spec, "input")
|
|
260
|
+
process_fields(outputs_spec, "output")
|
|
261
|
+
except Exception as e:
|
|
262
|
+
logger.error(
|
|
263
|
+
f"Error processing fields for DSPy signature '{agent_name}': {e}",
|
|
264
|
+
exc_info=True,
|
|
265
|
+
)
|
|
266
|
+
raise ValueError(
|
|
267
|
+
f"Could not process fields for signature: {e}"
|
|
268
|
+
) from e
|
|
269
|
+
|
|
270
|
+
# Create and return the dynamic class
|
|
271
|
+
try:
|
|
272
|
+
DynamicSignature = type(
|
|
273
|
+
"dspy_" + agent_name, (base_class,), class_dict
|
|
274
|
+
)
|
|
275
|
+
logger.info(
|
|
276
|
+
f"Successfully created DSPy Signature: {DynamicSignature.__name__} "
|
|
277
|
+
f"with fields: {DynamicSignature.__annotations__}"
|
|
278
|
+
)
|
|
279
|
+
return DynamicSignature
|
|
280
|
+
except Exception as e:
|
|
281
|
+
logger.error(
|
|
282
|
+
f"Failed to create dynamic type 'dspy_{agent_name}': {e}",
|
|
283
|
+
exc_info=True,
|
|
284
|
+
)
|
|
285
|
+
raise TypeError(f"Could not create DSPy signature type: {e}") from e
|
|
145
286
|
|
|
146
287
|
def _configure_language_model(
|
|
147
|
-
self,
|
|
288
|
+
self,
|
|
289
|
+
model: str | None,
|
|
290
|
+
use_cache: bool,
|
|
291
|
+
temperature: float,
|
|
292
|
+
max_tokens: int,
|
|
148
293
|
) -> None:
|
|
149
|
-
import dspy
|
|
150
|
-
|
|
151
294
|
"""Initialize and configure the language model using dspy."""
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
295
|
+
if model is None:
|
|
296
|
+
logger.warning(
|
|
297
|
+
"No model specified for DSPy configuration. Using DSPy default."
|
|
298
|
+
)
|
|
299
|
+
# Rely on DSPy's global default or raise error if none configured
|
|
300
|
+
# import dspy
|
|
301
|
+
# if dspy.settings.lm is None:
|
|
302
|
+
# raise ValueError("No model specified for agent and no global DSPy LM configured.")
|
|
303
|
+
return
|
|
304
|
+
|
|
305
|
+
try:
|
|
306
|
+
import dspy
|
|
307
|
+
except ImportError:
|
|
308
|
+
logger.error(
|
|
309
|
+
"DSPy library is not installed. Cannot configure language model."
|
|
310
|
+
)
|
|
311
|
+
return # Or raise
|
|
312
|
+
|
|
313
|
+
try:
|
|
314
|
+
# Ensure 'cache' parameter is handled correctly (might not exist on dspy.LM directly)
|
|
315
|
+
# DSPy handles caching globally or via specific optimizers typically.
|
|
316
|
+
# We'll configure the LM without explicit cache control here.
|
|
317
|
+
lm_instance = dspy.LM(
|
|
318
|
+
model=model,
|
|
319
|
+
temperature=temperature,
|
|
320
|
+
max_tokens=max_tokens,
|
|
321
|
+
cache=use_cache,
|
|
322
|
+
# Add other relevant parameters if needed, e.g., API keys via dspy.settings
|
|
323
|
+
)
|
|
324
|
+
dspy.settings.configure(lm=lm_instance)
|
|
325
|
+
logger.info(
|
|
326
|
+
f"DSPy LM configured with model: {model}, temp: {temperature}, max_tokens: {max_tokens}"
|
|
327
|
+
)
|
|
328
|
+
# Note: DSPy caching is usually configured globally, e.g., dspy.settings.configure(cache=...)
|
|
329
|
+
# or handled by optimizers. Setting `cache=use_cache` on dspy.LM might not be standard.
|
|
330
|
+
except Exception as e:
|
|
331
|
+
logger.error(
|
|
332
|
+
f"Failed to configure DSPy language model '{model}': {e}",
|
|
333
|
+
exc_info=True,
|
|
334
|
+
)
|
|
159
335
|
|
|
160
336
|
def _select_task(
|
|
161
337
|
self,
|
|
@@ -163,75 +339,104 @@ class DSPyIntegrationMixin:
|
|
|
163
339
|
agent_type_override: AgentType,
|
|
164
340
|
tools: list[Any] | None = None,
|
|
165
341
|
) -> Any:
|
|
166
|
-
"""Select and instantiate the appropriate
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
An instance of a dspy task (either ReAct or Predict).
|
|
175
|
-
"""
|
|
176
|
-
import dspy
|
|
342
|
+
"""Select and instantiate the appropriate DSPy Program/Module."""
|
|
343
|
+
try:
|
|
344
|
+
import dspy
|
|
345
|
+
except ImportError:
|
|
346
|
+
logger.error(
|
|
347
|
+
"DSPy library is not installed. Cannot select DSPy task."
|
|
348
|
+
)
|
|
349
|
+
raise ImportError("DSPy is required for this functionality.")
|
|
177
350
|
|
|
178
351
|
processed_tools = []
|
|
179
352
|
if tools:
|
|
180
353
|
for tool in tools:
|
|
181
|
-
if
|
|
182
|
-
processed_tools.extend(get_callable_members(tool))
|
|
183
|
-
else:
|
|
354
|
+
if callable(tool): # Basic check
|
|
184
355
|
processed_tools.append(tool)
|
|
356
|
+
# Could add more sophisticated tool wrapping/validation here if needed
|
|
357
|
+
else:
|
|
358
|
+
logger.warning(
|
|
359
|
+
f"Item '{tool}' in tools list is not callable, skipping."
|
|
360
|
+
)
|
|
185
361
|
|
|
186
|
-
|
|
362
|
+
dspy_program = None
|
|
363
|
+
selected_type = agent_type_override
|
|
187
364
|
|
|
188
|
-
if
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
if
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
dspy_solver = dspy.ReAct(
|
|
206
|
-
signature,
|
|
207
|
-
tools=processed_tools,
|
|
208
|
-
max_iters=10,
|
|
365
|
+
# Determine type if not overridden
|
|
366
|
+
if not selected_type:
|
|
367
|
+
selected_type = (
|
|
368
|
+
"ReAct" if processed_tools else "Predict"
|
|
369
|
+
) # Default logic
|
|
370
|
+
|
|
371
|
+
logger.debug(
|
|
372
|
+
f"Selecting DSPy program type: {selected_type} (Tools provided: {bool(processed_tools)})"
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
try:
|
|
376
|
+
if selected_type == "ChainOfThought":
|
|
377
|
+
dspy_program = dspy.ChainOfThought(signature)
|
|
378
|
+
elif selected_type == "ReAct":
|
|
379
|
+
# ReAct requires tools, even if empty list
|
|
380
|
+
dspy_program = dspy.ReAct(
|
|
381
|
+
signature, tools=processed_tools or [], max_iters=10
|
|
209
382
|
)
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
383
|
+
elif selected_type == "Predict": # Default or explicitly Completion
|
|
384
|
+
dspy_program = dspy.Predict(signature)
|
|
385
|
+
else: # Fallback or handle unknown type
|
|
386
|
+
logger.warning(
|
|
387
|
+
f"Unknown or unsupported agent_type_override '{selected_type}'. Defaulting to dspy.Predict."
|
|
213
388
|
)
|
|
389
|
+
dspy_program = dspy.Predict(signature)
|
|
214
390
|
|
|
215
|
-
|
|
391
|
+
logger.info(
|
|
392
|
+
f"Instantiated DSPy program: {type(dspy_program).__name__}"
|
|
393
|
+
)
|
|
394
|
+
return dspy_program
|
|
395
|
+
except Exception as e:
|
|
396
|
+
logger.error(
|
|
397
|
+
f"Failed to instantiate DSPy program of type '{selected_type}': {e}",
|
|
398
|
+
exc_info=True,
|
|
399
|
+
)
|
|
400
|
+
raise RuntimeError(f"Could not create DSPy program: {e}") from e
|
|
216
401
|
|
|
217
402
|
def _process_result(
|
|
218
403
|
self, result: Any, inputs: dict[str, Any]
|
|
219
404
|
) -> dict[str, Any]:
|
|
220
|
-
"""Convert the result to a dictionary
|
|
405
|
+
"""Convert the DSPy result object to a dictionary."""
|
|
406
|
+
if result is None:
|
|
407
|
+
logger.warning("DSPy program returned None result.")
|
|
408
|
+
return {}
|
|
409
|
+
try:
|
|
410
|
+
# DSPy Prediction objects often behave like dicts or have .keys() / items()
|
|
411
|
+
if hasattr(result, "items") and callable(result.items):
|
|
412
|
+
output_dict = dict(result.items())
|
|
413
|
+
elif hasattr(result, "__dict__"): # Fallback for other object types
|
|
414
|
+
output_dict = {
|
|
415
|
+
k: v
|
|
416
|
+
for k, v in result.__dict__.items()
|
|
417
|
+
if not k.startswith("_")
|
|
418
|
+
}
|
|
419
|
+
else:
|
|
420
|
+
# If it's already a dict (less common for DSPy results directly)
|
|
421
|
+
if isinstance(result, dict):
|
|
422
|
+
output_dict = result
|
|
423
|
+
else: # Final fallback
|
|
424
|
+
logger.warning(
|
|
425
|
+
f"Could not reliably convert DSPy result of type {type(result)} to dict. Returning as is."
|
|
426
|
+
)
|
|
427
|
+
output_dict = {"raw_result": result}
|
|
221
428
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
429
|
+
logger.debug(f"Processed DSPy result to dict: {output_dict}")
|
|
430
|
+
# Optionally merge inputs back if desired (can make result dict large)
|
|
431
|
+
final_result = {**inputs, **output_dict}
|
|
432
|
+
return final_result
|
|
225
433
|
|
|
226
|
-
Returns:
|
|
227
|
-
A dictionary containing the processed output.
|
|
228
|
-
"""
|
|
229
|
-
try:
|
|
230
|
-
result = result.toDict()
|
|
231
|
-
for key in inputs:
|
|
232
|
-
result.setdefault(key, inputs.get(key))
|
|
233
434
|
except Exception as conv_error:
|
|
234
|
-
logger.
|
|
235
|
-
f"
|
|
435
|
+
logger.error(
|
|
436
|
+
f"Failed to process DSPy result into dictionary: {conv_error}",
|
|
437
|
+
exc_info=True,
|
|
236
438
|
)
|
|
237
|
-
|
|
439
|
+
return {
|
|
440
|
+
"error": "Failed to process result",
|
|
441
|
+
"raw_result": str(result),
|
|
442
|
+
}
|
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
"""Serialization utilities for Flock objects."""
|
|
2
2
|
|
|
3
|
+
from flock.core.serialization.callable_registry import CallableRegistry
|
|
3
4
|
from flock.core.serialization.json_encoder import FlockJSONEncoder
|
|
4
5
|
from flock.core.serialization.secure_serializer import SecureSerializer
|
|
5
6
|
from flock.core.serialization.serializable import Serializable
|
|
6
7
|
|
|
7
|
-
__all__ = [
|
|
8
|
+
__all__ = [
|
|
9
|
+
"CallableRegistry",
|
|
10
|
+
"FlockJSONEncoder",
|
|
11
|
+
"SecureSerializer",
|
|
12
|
+
"Serializable",
|
|
13
|
+
]
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Registry system for callable objects to support serialization."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CallableRegistry:
|
|
7
|
+
"""Registry for callable objects.
|
|
8
|
+
|
|
9
|
+
This class serves as a central registry for callable objects (functions, methods)
|
|
10
|
+
that can be referenced by name in serialized formats.
|
|
11
|
+
|
|
12
|
+
This is a placeholder implementation that will be fully implemented in task US007-T004.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
_registry: dict[str, Callable] = {}
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def register(cls, name: str, callable_obj: Callable) -> None:
|
|
19
|
+
"""Register a callable object with the given name.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
name: Unique name for the callable
|
|
23
|
+
callable_obj: Function or method to register
|
|
24
|
+
"""
|
|
25
|
+
cls._registry[name] = callable_obj
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def get(cls, name: str) -> Callable:
|
|
29
|
+
"""Get a callable object by name.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
name: Name of the callable to retrieve
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
The registered callable
|
|
36
|
+
|
|
37
|
+
Raises:
|
|
38
|
+
KeyError: If no callable with the given name is registered
|
|
39
|
+
"""
|
|
40
|
+
return cls._registry[name]
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
def contains(cls, name: str) -> bool:
|
|
44
|
+
"""Check if a callable with the given name is registered.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
name: Name to check
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
True if registered, False otherwise
|
|
51
|
+
"""
|
|
52
|
+
return name in cls._registry
|