flock-core 0.3.23__py3-none-any.whl → 0.3.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (38) hide show
  1. flock/__init__.py +23 -11
  2. flock/cli/constants.py +2 -4
  3. flock/cli/create_flock.py +220 -1
  4. flock/cli/execute_flock.py +200 -0
  5. flock/cli/load_flock.py +27 -7
  6. flock/cli/loaded_flock_cli.py +202 -0
  7. flock/cli/manage_agents.py +443 -0
  8. flock/cli/view_results.py +29 -0
  9. flock/cli/yaml_editor.py +283 -0
  10. flock/core/__init__.py +2 -2
  11. flock/core/api/__init__.py +11 -0
  12. flock/core/api/endpoints.py +222 -0
  13. flock/core/api/main.py +237 -0
  14. flock/core/api/models.py +34 -0
  15. flock/core/api/run_store.py +72 -0
  16. flock/core/api/ui/__init__.py +0 -0
  17. flock/core/api/ui/routes.py +271 -0
  18. flock/core/api/ui/utils.py +119 -0
  19. flock/core/flock.py +509 -388
  20. flock/core/flock_agent.py +384 -121
  21. flock/core/flock_registry.py +532 -0
  22. flock/core/logging/logging.py +97 -23
  23. flock/core/mixin/dspy_integration.py +363 -158
  24. flock/core/serialization/__init__.py +7 -1
  25. flock/core/serialization/callable_registry.py +52 -0
  26. flock/core/serialization/serializable.py +259 -37
  27. flock/core/serialization/serialization_utils.py +199 -0
  28. flock/evaluators/declarative/declarative_evaluator.py +2 -0
  29. flock/modules/memory/memory_module.py +17 -4
  30. flock/modules/output/output_module.py +9 -3
  31. flock/workflow/activities.py +2 -2
  32. {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/METADATA +6 -3
  33. {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/RECORD +36 -22
  34. flock/core/flock_api.py +0 -214
  35. flock/core/registry/agent_registry.py +0 -120
  36. {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/WHEEL +0 -0
  37. {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/entry_points.txt +0 -0
  38. {flock_core-0.3.23.dist-info → flock_core-0.3.31.dist-info}/licenses/LICENSE +0 -0
@@ -1,12 +1,33 @@
1
- """Module for serializable objects in the system."""
2
-
1
+ # src/flock/core/serialization/serializable.py
3
2
  import json
4
3
  from abc import ABC, abstractmethod
5
4
  from pathlib import Path
6
5
  from typing import Any, TypeVar
7
6
 
8
- import cloudpickle
9
- import msgpack
7
+ # Use yaml if available, otherwise skip yaml methods
8
+ try:
9
+ import yaml
10
+
11
+ YAML_AVAILABLE = True
12
+ except ImportError:
13
+ YAML_AVAILABLE = False
14
+
15
+ # Use msgpack if available
16
+ try:
17
+ import msgpack
18
+
19
+ MSGPACK_AVAILABLE = True
20
+ except ImportError:
21
+ MSGPACK_AVAILABLE = False
22
+
23
+ # Use cloudpickle
24
+ try:
25
+ import cloudpickle
26
+
27
+ PICKLE_AVAILABLE = True
28
+ except ImportError:
29
+ PICKLE_AVAILABLE = False
30
+
10
31
 
11
32
  T = TypeVar("T", bound="Serializable")
12
33
 
@@ -15,79 +36,280 @@ class Serializable(ABC):
15
36
  """Base class for all serializable objects in the system.
16
37
 
17
38
  Provides methods for serializing/deserializing objects to various formats.
39
+ Subclasses MUST implement to_dict and from_dict.
18
40
  """
19
41
 
20
42
  @abstractmethod
21
43
  def to_dict(self) -> dict[str, Any]:
22
- """Convert instance to dictionary representation."""
44
+ """Convert instance to a dictionary representation suitable for serialization.
45
+ This method should handle converting nested Serializable objects and callables.
46
+ """
23
47
  pass
24
48
 
25
49
  @classmethod
26
50
  @abstractmethod
27
51
  def from_dict(cls: type[T], data: dict[str, Any]) -> T:
28
- """Create instance from dictionary representation."""
52
+ """Create instance from a dictionary representation.
53
+ This method should handle reconstructing nested Serializable objects and callables.
54
+ """
29
55
  pass
30
56
 
31
- def to_json(self) -> str:
57
+ # --- JSON Methods ---
58
+ def to_json(self, indent: int | None = 2) -> str:
32
59
  """Serialize to JSON string."""
60
+ # Import encoder locally to avoid making it a hard dependency if JSON isn't used
61
+ from .json_encoder import FlockJSONEncoder
62
+
33
63
  try:
34
- return json.dumps(self.to_dict())
35
- except Exception:
36
- raise
64
+ # Note: to_dict should ideally prepare the structure fully.
65
+ # FlockJSONEncoder is a fallback for types missed by to_dict.
66
+ return json.dumps(
67
+ self.to_dict(), cls=FlockJSONEncoder, indent=indent
68
+ )
69
+ except Exception as e:
70
+ raise RuntimeError(
71
+ f"Failed to serialize {self.__class__.__name__} to JSON: {e}"
72
+ ) from e
37
73
 
38
74
  @classmethod
39
75
  def from_json(cls: type[T], json_str: str) -> T:
40
76
  """Create instance from JSON string."""
41
77
  try:
42
- return cls.from_dict(json.loads(json_str))
43
- except Exception:
78
+ data = json.loads(json_str)
79
+ return cls.from_dict(data)
80
+ except json.JSONDecodeError as e:
81
+ raise ValueError(f"Invalid JSON string: {e}") from e
82
+ except Exception as e:
83
+ raise RuntimeError(
84
+ f"Failed to deserialize {cls.__name__} from JSON: {e}"
85
+ ) from e
86
+
87
+ # --- YAML Methods ---
88
+ def to_yaml(self, sort_keys=False, default_flow_style=False) -> str:
89
+ """Serialize to YAML string."""
90
+ if not YAML_AVAILABLE:
91
+ raise NotImplementedError(
92
+ "YAML support requires PyYAML: pip install pyyaml"
93
+ )
94
+ try:
95
+ # to_dict should prepare a structure suitable for YAML dumping
96
+ return yaml.dump(
97
+ self.to_dict(),
98
+ sort_keys=sort_keys,
99
+ default_flow_style=default_flow_style,
100
+ allow_unicode=True,
101
+ )
102
+ except Exception as e:
103
+ raise RuntimeError(
104
+ f"Failed to serialize {self.__class__.__name__} to YAML: {e}"
105
+ ) from e
106
+
107
+ @classmethod
108
+ def from_yaml(cls: type[T], yaml_str: str) -> T:
109
+ """Create instance from YAML string."""
110
+ if not YAML_AVAILABLE:
111
+ raise NotImplementedError(
112
+ "YAML support requires PyYAML: pip install pyyaml"
113
+ )
114
+ try:
115
+ data = yaml.safe_load(yaml_str)
116
+ if not isinstance(data, dict):
117
+ raise TypeError(
118
+ f"YAML did not yield a dictionary for {cls.__name__}"
119
+ )
120
+ return cls.from_dict(data)
121
+ except yaml.YAMLError as e:
122
+ raise ValueError(f"Invalid YAML string: {e}") from e
123
+ except Exception as e:
124
+ raise RuntimeError(
125
+ f"Failed to deserialize {cls.__name__} from YAML: {e}"
126
+ ) from e
127
+
128
+ def to_yaml_file(self, path: Path | str, **yaml_dump_kwargs) -> None:
129
+ """Serialize to YAML file."""
130
+ if not YAML_AVAILABLE:
131
+ raise NotImplementedError(
132
+ "YAML support requires PyYAML: pip install pyyaml"
133
+ )
134
+ path = Path(path)
135
+ try:
136
+ path.parent.mkdir(parents=True, exist_ok=True)
137
+ yaml_str = self.to_yaml(**yaml_dump_kwargs)
138
+ path.write_text(yaml_str, encoding="utf-8")
139
+ except Exception as e:
140
+ raise RuntimeError(
141
+ f"Failed to write {self.__class__.__name__} to YAML file {path}: {e}"
142
+ ) from e
143
+
144
+ @classmethod
145
+ def from_yaml_file(cls: type[T], path: Path | str) -> T:
146
+ """Create instance from YAML file."""
147
+ if not YAML_AVAILABLE:
148
+ raise NotImplementedError(
149
+ "YAML support requires PyYAML: pip install pyyaml"
150
+ )
151
+ path = Path(path)
152
+ try:
153
+ yaml_str = path.read_text(encoding="utf-8")
154
+ return cls.from_yaml(yaml_str)
155
+ except FileNotFoundError:
44
156
  raise
157
+ except Exception as e:
158
+ raise RuntimeError(
159
+ f"Failed to read {cls.__name__} from YAML file {path}: {e}"
160
+ ) from e
45
161
 
46
- def to_msgpack(self, path: Path | None = None) -> bytes:
162
+ # --- MsgPack Methods ---
163
+ def to_msgpack(self) -> bytes:
47
164
  """Serialize to msgpack bytes."""
165
+ if not MSGPACK_AVAILABLE:
166
+ raise NotImplementedError(
167
+ "MsgPack support requires msgpack: pip install msgpack"
168
+ )
48
169
  try:
49
- msgpack_bytes = msgpack.packb(self.to_dict())
50
- if path:
51
- path.write_bytes(msgpack_bytes)
52
- return msgpack_bytes
53
- except Exception:
54
- raise
170
+ # Use default hook for complex types if needed, or rely on to_dict
171
+ return msgpack.packb(self.to_dict(), use_bin_type=True)
172
+ except Exception as e:
173
+ raise RuntimeError(
174
+ f"Failed to serialize {self.__class__.__name__} to MsgPack: {e}"
175
+ ) from e
55
176
 
56
177
  @classmethod
57
178
  def from_msgpack(cls: type[T], msgpack_bytes: bytes) -> T:
58
179
  """Create instance from msgpack bytes."""
180
+ if not MSGPACK_AVAILABLE:
181
+ raise NotImplementedError(
182
+ "MsgPack support requires msgpack: pip install msgpack"
183
+ )
59
184
  try:
60
- return cls.from_dict(msgpack.unpackb(msgpack_bytes))
61
- except Exception:
62
- raise
185
+ # Use object_hook if custom deserialization is needed beyond from_dict
186
+ data = msgpack.unpackb(msgpack_bytes, raw=False)
187
+ if not isinstance(data, dict):
188
+ raise TypeError(
189
+ f"MsgPack did not yield a dictionary for {cls.__name__}"
190
+ )
191
+ return cls.from_dict(data)
192
+ except Exception as e:
193
+ raise RuntimeError(
194
+ f"Failed to deserialize {cls.__name__} from MsgPack: {e}"
195
+ ) from e
196
+
197
+ def to_msgpack_file(self, path: Path | str) -> None:
198
+ """Serialize to msgpack file."""
199
+ if not MSGPACK_AVAILABLE:
200
+ raise NotImplementedError(
201
+ "MsgPack support requires msgpack: pip install msgpack"
202
+ )
203
+ path = Path(path)
204
+ try:
205
+ path.parent.mkdir(parents=True, exist_ok=True)
206
+ msgpack_bytes = self.to_msgpack()
207
+ path.write_bytes(msgpack_bytes)
208
+ except Exception as e:
209
+ raise RuntimeError(
210
+ f"Failed to write {self.__class__.__name__} to MsgPack file {path}: {e}"
211
+ ) from e
63
212
 
64
213
  @classmethod
65
- def from_msgpack_file(cls: type[T], path: Path) -> T:
214
+ def from_msgpack_file(cls: type[T], path: Path | str) -> T:
66
215
  """Create instance from msgpack file."""
216
+ if not MSGPACK_AVAILABLE:
217
+ raise NotImplementedError(
218
+ "MsgPack support requires msgpack: pip install msgpack"
219
+ )
220
+ path = Path(path)
67
221
  try:
68
- return cls.from_msgpack(path.read_bytes())
69
- except Exception:
222
+ msgpack_bytes = path.read_bytes()
223
+ return cls.from_msgpack(msgpack_bytes)
224
+ except FileNotFoundError:
70
225
  raise
226
+ except Exception as e:
227
+ raise RuntimeError(
228
+ f"Failed to read {cls.__name__} from MsgPack file {path}: {e}"
229
+ ) from e
71
230
 
231
+ # --- Pickle Methods (Use with caution due to security risks) ---
72
232
  def to_pickle(self) -> bytes:
73
- """Serialize to pickle bytes."""
233
+ """Serialize to pickle bytes using cloudpickle."""
234
+ if not PICKLE_AVAILABLE:
235
+ raise NotImplementedError(
236
+ "Pickle support requires cloudpickle: pip install cloudpickle"
237
+ )
74
238
  try:
75
239
  return cloudpickle.dumps(self)
76
- except Exception:
77
- raise
240
+ except Exception as e:
241
+ raise RuntimeError(
242
+ f"Failed to serialize {self.__class__.__name__} to Pickle: {e}"
243
+ ) from e
78
244
 
79
245
  @classmethod
80
- def from_pickle(cls, pickle_bytes: bytes) -> T:
81
- """Create instance from pickle bytes."""
246
+ def from_pickle(cls: type[T], pickle_bytes: bytes) -> T:
247
+ """Create instance from pickle bytes using cloudpickle."""
248
+ if not PICKLE_AVAILABLE:
249
+ raise NotImplementedError(
250
+ "Pickle support requires cloudpickle: pip install cloudpickle"
251
+ )
82
252
  try:
83
- return cloudpickle.loads(pickle_bytes)
84
- except Exception:
85
- raise
253
+ instance = cloudpickle.loads(pickle_bytes)
254
+ if not isinstance(instance, cls):
255
+ raise TypeError(
256
+ f"Deserialized object is not of type {cls.__name__}"
257
+ )
258
+ return instance
259
+ except Exception as e:
260
+ raise RuntimeError(
261
+ f"Failed to deserialize {cls.__name__} from Pickle: {e}"
262
+ ) from e
263
+
264
+ def to_pickle_file(self, path: Path | str) -> None:
265
+ """Serialize to pickle file using cloudpickle."""
266
+ if not PICKLE_AVAILABLE:
267
+ raise NotImplementedError(
268
+ "Pickle support requires cloudpickle: pip install cloudpickle"
269
+ )
270
+ path = Path(path)
271
+ try:
272
+ path.parent.mkdir(parents=True, exist_ok=True)
273
+ pickle_bytes = self.to_pickle()
274
+ path.write_bytes(pickle_bytes)
275
+ except Exception as e:
276
+ raise RuntimeError(
277
+ f"Failed to write {self.__class__.__name__} to Pickle file {path}: {e}"
278
+ ) from e
86
279
 
87
280
  @classmethod
88
- def from_pickle_file(cls: type[T], path: Path) -> T:
89
- """Create instance from pickle file."""
281
+ def from_pickle_file(cls: type[T], path: Path | str) -> T:
282
+ """Create instance from pickle file using cloudpickle."""
283
+ if not PICKLE_AVAILABLE:
284
+ raise NotImplementedError(
285
+ "Pickle support requires cloudpickle: pip install cloudpickle"
286
+ )
287
+ path = Path(path)
90
288
  try:
91
- return cls.from_pickle(path.read_bytes())
92
- except Exception:
289
+ pickle_bytes = path.read_bytes()
290
+ return cls.from_pickle(pickle_bytes)
291
+ except FileNotFoundError:
93
292
  raise
293
+ except Exception as e:
294
+ raise RuntimeError(
295
+ f"Failed to read {cls.__name__} from Pickle file {path}: {e}"
296
+ ) from e
297
+
298
+ # _filter_none_values remains unchanged
299
+ @staticmethod
300
+ def _filter_none_values(data: Any) -> Any:
301
+ """Filter out None values from dictionaries and lists recursively."""
302
+ if isinstance(data, dict):
303
+ return {
304
+ k: Serializable._filter_none_values(v)
305
+ for k, v in data.items()
306
+ if v is not None
307
+ }
308
+ elif isinstance(data, list):
309
+ # Filter None from list items AND recursively filter within items
310
+ return [
311
+ Serializable._filter_none_values(item)
312
+ for item in data
313
+ if item is not None
314
+ ]
315
+ return data
@@ -0,0 +1,199 @@
1
+ # src/flock/core/serialization/serialization_utils.py
2
+ """Utilities for recursive serialization/deserialization with callable handling."""
3
+
4
+ import importlib
5
+ from collections.abc import Mapping, Sequence
6
+ from typing import TYPE_CHECKING, Any
7
+
8
+ from pydantic import BaseModel
9
+
10
+ # Use TYPE_CHECKING to avoid circular imports
11
+ if TYPE_CHECKING:
12
+ pass
13
+
14
+ from flock.core.logging.logging import get_logger
15
+
16
+ logger = get_logger("serialization.utils")
17
+
18
+ # Remove this line to avoid circular import at module level
19
+ # FlockRegistry = get_registry() # Get singleton instance
20
+
21
+ # --- Serialization Helper ---
22
+
23
+
24
+ def serialize_item(item: Any) -> Any:
25
+ """Recursively prepares an item for serialization (e.g., to dict for YAML/JSON).
26
+ Converts known callables to their path strings using FlockRegistry.
27
+ Converts Pydantic models using model_dump.
28
+ """
29
+ # Import the registry lazily when needed
30
+ from flock.core.flock_registry import get_registry
31
+
32
+ FlockRegistry = get_registry()
33
+
34
+ if isinstance(item, BaseModel):
35
+ dumped = item.model_dump(mode="json", exclude_none=True)
36
+ return serialize_item(dumped)
37
+ elif callable(item) and not isinstance(item, type):
38
+ path_str = FlockRegistry.get_callable_path_string(
39
+ item
40
+ ) # Use registry helper
41
+ if path_str:
42
+ return {"__callable_ref__": path_str}
43
+ else:
44
+ logger.warning(
45
+ f"Could not get path string for callable {item}, storing as string."
46
+ )
47
+ return str(item)
48
+ elif isinstance(item, Mapping):
49
+ return {key: serialize_item(value) for key, value in item.items()}
50
+ elif isinstance(item, Sequence) and not isinstance(item, str):
51
+ return [serialize_item(sub_item) for sub_item in item]
52
+ elif isinstance(
53
+ item, type
54
+ ): # Handle type objects themselves (e.g. if stored directly)
55
+ type_name = FlockRegistry.get_component_type_name(
56
+ item
57
+ ) # Check components first
58
+ if type_name:
59
+ return {"__component_ref__": type_name}
60
+ type_name = FlockRegistry._get_path_string(
61
+ item
62
+ ) # Check regular types/classes by path
63
+ if type_name:
64
+ return {"__type_ref__": type_name}
65
+ logger.warning(
66
+ f"Could not serialize type object {item}, storing as string."
67
+ )
68
+ return str(item)
69
+ else:
70
+ # Return basic types as is
71
+ return item
72
+
73
+
74
+ # --- Deserialization Helper ---
75
+
76
+
77
+ def deserialize_item(item: Any) -> Any:
78
+ """Recursively processes a deserialized item (e.g., from YAML/JSON dict).
79
+ Converts reference dicts back to actual callables or types using FlockRegistry.
80
+ Handles nested lists and dicts.
81
+ """
82
+ # Import the registry lazily when needed
83
+ from flock.core.flock_registry import get_registry
84
+
85
+ FlockRegistry = get_registry()
86
+
87
+ if isinstance(item, Mapping):
88
+ if "__callable_ref__" in item and len(item) == 1:
89
+ path_str = item["__callable_ref__"]
90
+ try:
91
+ return FlockRegistry.get_callable(path_str)
92
+ except KeyError:
93
+ logger.error(
94
+ f"Callable reference '{path_str}' not found during deserialization."
95
+ )
96
+ return None
97
+ elif "__component_ref__" in item and len(item) == 1:
98
+ type_name = item["__component_ref__"]
99
+ try:
100
+ return FlockRegistry.get_component(type_name)
101
+ except KeyError:
102
+ logger.error(
103
+ f"Component reference '{type_name}' not found during deserialization."
104
+ )
105
+ return None
106
+ elif "__type_ref__" in item and len(item) == 1:
107
+ type_name = item["__type_ref__"]
108
+ try:
109
+ # For general types, use get_type or fallback to dynamic import like get_callable
110
+ # Using get_type for now, assuming it needs registration
111
+ return FlockRegistry.get_type(type_name)
112
+ except KeyError:
113
+ # Attempt dynamic import as fallback if get_type fails (similar to get_callable)
114
+ try:
115
+ if "." not in type_name: # Builtins?
116
+ mod = importlib.import_module("builtins")
117
+ else:
118
+ module_name, class_name = type_name.rsplit(".", 1)
119
+ mod = importlib.import_module(module_name)
120
+ type_obj = getattr(mod, class_name)
121
+ if isinstance(type_obj, type):
122
+ FlockRegistry.register_type(
123
+ type_obj, type_name
124
+ ) # Cache it
125
+ return type_obj
126
+ else:
127
+ raise TypeError()
128
+ except Exception:
129
+ logger.error(
130
+ f"Type reference '{type_name}' not found in registry or via dynamic import."
131
+ )
132
+ return None
133
+
134
+ else:
135
+ # Recursively deserialize dictionary values
136
+ return {key: deserialize_item(value) for key, value in item.items()}
137
+ elif isinstance(item, Sequence) and not isinstance(item, str):
138
+ return [deserialize_item(sub_item) for sub_item in item]
139
+ else:
140
+ # Return basic types as is
141
+ return item
142
+
143
+
144
+ # --- Component Deserialization Helper ---
145
+ def deserialize_component(
146
+ data: dict | None, expected_base_type: type
147
+ ) -> Any | None:
148
+ """Deserializes a component (Module, Evaluator, Router) from its dict representation.
149
+ Uses the 'type' field to find the correct class via FlockRegistry.
150
+ """
151
+ # Import the registry and COMPONENT_BASE_TYPES lazily when needed
152
+ from flock.core.flock_registry import COMPONENT_BASE_TYPES, get_registry
153
+
154
+ FlockRegistry = get_registry()
155
+
156
+ if data is None:
157
+ return None
158
+ if not isinstance(data, dict):
159
+ logger.error(
160
+ f"Expected dict for component deserialization, got {type(data)}"
161
+ )
162
+ return None
163
+
164
+ type_name = data.get(
165
+ "type"
166
+ ) # Assuming 'type' key holds the class name string
167
+ if not type_name:
168
+ logger.error(f"Component data missing 'type' field: {data}")
169
+ return None
170
+
171
+ try:
172
+ ComponentClass = FlockRegistry.get_component(type_name) # Use registry
173
+ # Optional: Keep the base type check
174
+ if COMPONENT_BASE_TYPES and not issubclass(
175
+ ComponentClass, expected_base_type
176
+ ):
177
+ raise TypeError(
178
+ f"Deserialized class {type_name} is not a subclass of {expected_base_type.__name__}"
179
+ )
180
+
181
+ # Recursively deserialize the data *before* passing to Pydantic constructor
182
+ # This handles nested callables/types within the component's config/data
183
+ deserialized_data_for_init = {}
184
+ for k, v in data.items():
185
+ # Don't pass the 'type' field itself to the constructor if it matches class name
186
+ if k == "type" and v == ComponentClass.__name__:
187
+ continue
188
+ deserialized_data_for_init[k] = deserialize_item(v)
189
+
190
+ # Use Pydantic constructor directly. Assumes keys match field names.
191
+ # from_dict could be added to components for more complex logic if needed.
192
+ return ComponentClass(**deserialized_data_for_init)
193
+
194
+ except (KeyError, TypeError, Exception) as e:
195
+ logger.error(
196
+ f"Failed to deserialize component of type '{type_name}': {e}",
197
+ exc_info=True,
198
+ )
199
+ return None
@@ -9,6 +9,8 @@ from flock.core.mixin.prompt_parser import PromptParserMixin
9
9
 
10
10
 
11
11
  class DeclarativeEvaluatorConfig(FlockEvaluatorConfig):
12
+ """Configuration for the DeclarativeEvaluator."""
13
+
12
14
  agent_type_override: str | None = None
13
15
  model: str | None = "openai/gpt-4o"
14
16
  use_cache: bool = True
@@ -6,8 +6,12 @@ from typing import Any, Literal
6
6
  from pydantic import Field
7
7
  from tqdm import tqdm
8
8
 
9
- from flock.core import FlockAgent, FlockModule, FlockModuleConfig
10
9
  from flock.core.context.context import FlockContext
10
+
11
+ # if TYPE_CHECKING:
12
+ # from flock.core import FlockAgent
13
+ from flock.core.flock_agent import FlockAgent
14
+ from flock.core.flock_module import FlockModule, FlockModuleConfig
11
15
  from flock.core.logging.logging import get_logger
12
16
  from flock.modules.memory.memory_parser import MemoryMappingParser
13
17
  from flock.modules.memory.memory_storage import FlockMemoryStore, MemoryEntry
@@ -282,7 +286,10 @@ class MemoryModule(FlockModule):
282
286
  return set(concept_list)
283
287
 
284
288
  async def _summarize_mode(
285
- self, agent: FlockAgent, inputs: dict[str, Any], result: dict[str, Any]
289
+ self,
290
+ agent: FlockAgent,
291
+ inputs: dict[str, Any],
292
+ result: dict[str, Any],
286
293
  ) -> str:
287
294
  """Extract information chunks using summary mode."""
288
295
  split_signature = agent.create_dspy_signature_class(
@@ -300,7 +307,10 @@ class MemoryModule(FlockModule):
300
307
  return "\n".join(split_result.chunks)
301
308
 
302
309
  async def _semantic_splitter_mode(
303
- self, agent: FlockAgent, inputs: dict[str, Any], result: dict[str, Any]
310
+ self,
311
+ agent: FlockAgent,
312
+ inputs: dict[str, Any],
313
+ result: dict[str, Any],
304
314
  ) -> str | list[dict[str, str]]:
305
315
  """Extract information chunks using semantic mode."""
306
316
  split_signature = agent.create_dspy_signature_class(
@@ -318,7 +328,10 @@ class MemoryModule(FlockModule):
318
328
  return split_result.chunks
319
329
 
320
330
  async def _character_splitter_mode(
321
- self, agent: FlockAgent, inputs: dict[str, Any], result: dict[str, Any]
331
+ self,
332
+ agent: FlockAgent,
333
+ inputs: dict[str, Any],
334
+ result: dict[str, Any],
322
335
  ) -> list[str]:
323
336
  """Extract information chunks by splitting text into fixed character lengths."""
324
337
  full_text = json.dumps(inputs) + (json.dumps(result) if result else "")
@@ -3,11 +3,13 @@
3
3
  import json
4
4
  import os
5
5
  from datetime import datetime
6
- from typing import Any
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
8
  from pydantic import Field
9
9
 
10
- from flock.core import FlockAgent
10
+ if TYPE_CHECKING:
11
+ from flock.core import FlockAgent
12
+
11
13
  from flock.core.context.context import FlockContext
12
14
  from flock.core.flock_module import FlockModule, FlockModuleConfig
13
15
  from flock.core.logging.formatters.themed_formatter import (
@@ -17,6 +19,10 @@ from flock.core.logging.formatters.themes import OutputTheme
17
19
  from flock.core.logging.logging import get_logger
18
20
  from flock.core.serialization.json_encoder import FlockJSONEncoder
19
21
 
22
+ # from flock.core.logging.formatters.themes import OutputTheme
23
+ # from flock.core.logging.logging import get_logger
24
+ # from flock.core.serialization.json_encoder import FlockJSONEncoder
25
+
20
26
  logger = get_logger("module.output")
21
27
 
22
28
 
@@ -168,7 +174,7 @@ class OutputModule(FlockModule):
168
174
 
169
175
  async def post_evaluate(
170
176
  self,
171
- agent: FlockAgent,
177
+ agent: "FlockAgent",
172
178
  inputs: dict[str, Any],
173
179
  result: dict[str, Any],
174
180
  context: FlockContext | None = None,
@@ -8,9 +8,9 @@ from temporalio import activity
8
8
  from flock.core.context.context import FlockContext
9
9
  from flock.core.context.context_vars import FLOCK_CURRENT_AGENT, FLOCK_MODEL
10
10
  from flock.core.flock_agent import FlockAgent
11
+ from flock.core.flock_registry import get_registry
11
12
  from flock.core.flock_router import HandOffRequest
12
13
  from flock.core.logging.logging import get_logger
13
- from flock.core.registry.agent_registry import Registry
14
14
  from flock.core.util.input_resolver import resolve_inputs
15
15
 
16
16
  logger = get_logger("activities")
@@ -26,7 +26,7 @@ async def run_agent(context: FlockContext) -> dict:
26
26
  """
27
27
  # Start a top-level span for the entire run_agent activity.
28
28
  with tracer.start_as_current_span("run_agent") as span:
29
- registry = Registry()
29
+ registry = get_registry()
30
30
  previous_agent_name = ""
31
31
  if isinstance(context, dict):
32
32
  context = FlockContext.from_dict(context)