flock-core 0.4.528__py3-none-any.whl → 0.5.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/cli/execute_flock.py +1 -1
- flock/cli/manage_agents.py +6 -6
- flock/components/__init__.py +30 -0
- flock/components/evaluation/__init__.py +9 -0
- flock/components/evaluation/declarative_evaluation_component.py +222 -0
- flock/components/routing/__init__.py +15 -0
- flock/{routers/conditional/conditional_router.py → components/routing/conditional_routing_component.py} +61 -53
- flock/components/routing/default_routing_component.py +103 -0
- flock/components/routing/llm_routing_component.py +206 -0
- flock/components/utility/__init__.py +15 -0
- flock/{modules/enterprise_memory/enterprise_memory_module.py → components/utility/memory_utility_component.py} +195 -173
- flock/{modules/performance/metrics_module.py → components/utility/metrics_utility_component.py} +110 -95
- flock/{modules/output/output_module.py → components/utility/output_utility_component.py} +47 -45
- flock/core/__init__.py +26 -18
- flock/core/agent/__init__.py +16 -0
- flock/core/agent/flock_agent_components.py +104 -0
- flock/core/agent/flock_agent_execution.py +101 -0
- flock/core/agent/flock_agent_integration.py +206 -0
- flock/core/agent/flock_agent_lifecycle.py +177 -0
- flock/core/agent/flock_agent_serialization.py +381 -0
- flock/core/api/endpoints.py +2 -2
- flock/core/api/service.py +2 -2
- flock/core/component/__init__.py +15 -0
- flock/core/{flock_module.py → component/agent_component_base.py} +136 -34
- flock/core/component/evaluation_component.py +56 -0
- flock/core/component/routing_component.py +74 -0
- flock/core/component/utility_component.py +69 -0
- flock/core/config/flock_agent_config.py +49 -2
- flock/core/evaluation/utils.py +3 -2
- flock/core/execution/batch_executor.py +1 -1
- flock/core/execution/evaluation_executor.py +2 -2
- flock/core/execution/opik_executor.py +1 -1
- flock/core/flock.py +147 -493
- flock/core/flock_agent.py +195 -1032
- flock/core/flock_factory.py +114 -90
- flock/core/flock_scheduler.py +1 -1
- flock/core/flock_server_manager.py +8 -8
- flock/core/logging/logging.py +1 -0
- flock/core/mcp/flock_mcp_server.py +53 -48
- flock/core/mcp/{flock_mcp_tool_base.py → flock_mcp_tool.py} +2 -2
- flock/core/mcp/mcp_client.py +9 -9
- flock/core/mcp/mcp_client_manager.py +9 -9
- flock/core/mcp/mcp_config.py +24 -24
- flock/core/mixin/dspy_integration.py +5 -5
- flock/core/orchestration/__init__.py +18 -0
- flock/core/orchestration/flock_batch_processor.py +94 -0
- flock/core/orchestration/flock_evaluator.py +113 -0
- flock/core/orchestration/flock_execution.py +288 -0
- flock/core/orchestration/flock_initialization.py +125 -0
- flock/core/orchestration/flock_server_manager.py +67 -0
- flock/core/orchestration/flock_web_server.py +117 -0
- flock/core/registry/__init__.py +45 -0
- flock/core/registry/agent_registry.py +69 -0
- flock/core/registry/callable_registry.py +139 -0
- flock/core/registry/component_discovery.py +142 -0
- flock/core/registry/component_registry.py +64 -0
- flock/core/registry/config_mapping.py +64 -0
- flock/core/registry/decorators.py +137 -0
- flock/core/registry/registry_hub.py +205 -0
- flock/core/registry/server_registry.py +57 -0
- flock/core/registry/type_registry.py +86 -0
- flock/core/serialization/flock_serializer.py +36 -32
- flock/core/serialization/serialization_utils.py +28 -25
- flock/core/util/hydrator.py +1 -1
- flock/core/util/input_resolver.py +29 -2
- flock/mcp/servers/sse/flock_sse_server.py +10 -10
- flock/mcp/servers/stdio/flock_stdio_server.py +10 -10
- flock/mcp/servers/streamable_http/flock_streamable_http_server.py +10 -10
- flock/mcp/servers/websockets/flock_websocket_server.py +10 -10
- flock/platform/docker_tools.py +3 -3
- flock/webapp/app/chat.py +1 -1
- flock/webapp/app/main.py +9 -5
- flock/webapp/app/services/flock_service.py +1 -1
- flock/webapp/app/services/sharing_store.py +1 -0
- flock/workflow/activities.py +67 -92
- flock/workflow/agent_execution_activity.py +6 -6
- flock/workflow/flock_workflow.py +1 -1
- flock_core-0.5.0b0.dist-info/METADATA +272 -0
- {flock_core-0.4.528.dist-info → flock_core-0.5.0b0.dist-info}/RECORD +82 -95
- flock/core/flock_evaluator.py +0 -60
- flock/core/flock_registry.py +0 -702
- flock/core/flock_router.py +0 -83
- flock/evaluators/__init__.py +0 -1
- flock/evaluators/declarative/__init__.py +0 -1
- flock/evaluators/declarative/declarative_evaluator.py +0 -217
- flock/evaluators/memory/memory_evaluator.py +0 -90
- flock/evaluators/test/test_case_evaluator.py +0 -38
- flock/evaluators/zep/zep_evaluator.py +0 -59
- flock/modules/__init__.py +0 -1
- flock/modules/assertion/__init__.py +0 -1
- flock/modules/assertion/assertion_module.py +0 -286
- flock/modules/callback/__init__.py +0 -1
- flock/modules/callback/callback_module.py +0 -91
- flock/modules/enterprise_memory/README.md +0 -99
- flock/modules/mem0/__init__.py +0 -1
- flock/modules/mem0/mem0_module.py +0 -126
- flock/modules/mem0_async/__init__.py +0 -1
- flock/modules/mem0_async/async_mem0_module.py +0 -126
- flock/modules/memory/__init__.py +0 -1
- flock/modules/memory/memory_module.py +0 -429
- flock/modules/memory/memory_parser.py +0 -125
- flock/modules/memory/memory_storage.py +0 -736
- flock/modules/output/__init__.py +0 -1
- flock/modules/performance/__init__.py +0 -1
- flock/modules/zep/__init__.py +0 -1
- flock/modules/zep/zep_module.py +0 -192
- flock/routers/__init__.py +0 -1
- flock/routers/agent/__init__.py +0 -1
- flock/routers/agent/agent_router.py +0 -236
- flock/routers/agent/handoff_agent.py +0 -58
- flock/routers/default/__init__.py +0 -1
- flock/routers/default/default_router.py +0 -80
- flock/routers/feedback/feedback_router.py +0 -114
- flock/routers/list_generator/list_generator_router.py +0 -166
- flock/routers/llm/__init__.py +0 -1
- flock/routers/llm/llm_router.py +0 -365
- flock/tools/__init__.py +0 -0
- flock/tools/azure_tools.py +0 -781
- flock/tools/code_tools.py +0 -167
- flock/tools/file_tools.py +0 -149
- flock/tools/github_tools.py +0 -157
- flock/tools/markdown_tools.py +0 -205
- flock/tools/system_tools.py +0 -9
- flock/tools/text_tools.py +0 -810
- flock/tools/web_tools.py +0 -90
- flock/tools/zendesk_tools.py +0 -147
- flock_core-0.4.528.dist-info/METADATA +0 -675
- {flock_core-0.4.528.dist-info → flock_core-0.5.0b0.dist-info}/WHEEL +0 -0
- {flock_core-0.4.528.dist-info → flock_core-0.5.0b0.dist-info}/entry_points.txt +0 -0
- {flock_core-0.4.528.dist-info → flock_core-0.5.0b0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# src/flock/core/orchestration/flock_batch_processor.py
|
|
2
|
+
"""Batch processing functionality for Flock orchestrator."""
|
|
3
|
+
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
from box import Box
|
|
7
|
+
from pandas import DataFrame
|
|
8
|
+
|
|
9
|
+
from flock.core.logging.logging import get_logger
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from flock.core.flock import Flock
|
|
13
|
+
from flock.core.flock_agent import FlockAgent
|
|
14
|
+
|
|
15
|
+
logger = get_logger("flock.batch_processor")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class FlockBatchProcessor:
|
|
19
|
+
"""Handles batch processing functionality for Flock orchestrator."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, flock: "Flock"):
|
|
22
|
+
self.flock = flock
|
|
23
|
+
|
|
24
|
+
async def run_batch_async(
|
|
25
|
+
self,
|
|
26
|
+
start_agent: "FlockAgent | str",
|
|
27
|
+
batch_inputs: list[dict[str, Any]] | DataFrame | str,
|
|
28
|
+
input_mapping: dict[str, str] | None = None,
|
|
29
|
+
static_inputs: dict[str, Any] | None = None,
|
|
30
|
+
parallel: bool = True,
|
|
31
|
+
max_workers: int = 5,
|
|
32
|
+
use_temporal: bool | None = None,
|
|
33
|
+
box_results: bool = True,
|
|
34
|
+
return_errors: bool = False,
|
|
35
|
+
silent_mode: bool = False,
|
|
36
|
+
write_to_csv: str | None = None,
|
|
37
|
+
hide_columns: list[str] | None = None,
|
|
38
|
+
delimiter: str = ",",
|
|
39
|
+
) -> list[Box | dict | None | Exception]:
|
|
40
|
+
"""Runs the specified agent/workflow for each item in a batch asynchronously (delegated)."""
|
|
41
|
+
# Import processor locally
|
|
42
|
+
from flock.core.execution.batch_executor import BatchProcessor
|
|
43
|
+
|
|
44
|
+
processor = BatchProcessor(self.flock) # Pass flock instance
|
|
45
|
+
return await processor.run_batch_async(
|
|
46
|
+
start_agent=start_agent,
|
|
47
|
+
batch_inputs=batch_inputs,
|
|
48
|
+
input_mapping=input_mapping,
|
|
49
|
+
static_inputs=static_inputs,
|
|
50
|
+
parallel=parallel,
|
|
51
|
+
max_workers=max_workers,
|
|
52
|
+
use_temporal=use_temporal,
|
|
53
|
+
box_results=box_results,
|
|
54
|
+
return_errors=return_errors,
|
|
55
|
+
silent_mode=silent_mode,
|
|
56
|
+
write_to_csv=write_to_csv,
|
|
57
|
+
hide_columns=hide_columns,
|
|
58
|
+
delimiter=delimiter,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def run_batch(
|
|
62
|
+
self,
|
|
63
|
+
start_agent: "FlockAgent | str",
|
|
64
|
+
batch_inputs: list[dict[str, Any]] | DataFrame | str,
|
|
65
|
+
input_mapping: dict[str, str] | None = None,
|
|
66
|
+
static_inputs: dict[str, Any] | None = None,
|
|
67
|
+
parallel: bool = True,
|
|
68
|
+
max_workers: int = 5,
|
|
69
|
+
use_temporal: bool | None = None,
|
|
70
|
+
box_results: bool = True,
|
|
71
|
+
return_errors: bool = False,
|
|
72
|
+
silent_mode: bool = False,
|
|
73
|
+
write_to_csv: str | None = None,
|
|
74
|
+
hide_columns: list[str] | None = None,
|
|
75
|
+
delimiter: str = ",",
|
|
76
|
+
) -> list[Box | dict | None | Exception]:
|
|
77
|
+
"""Synchronous wrapper for batch processing."""
|
|
78
|
+
return self.flock._execution._run_sync(
|
|
79
|
+
self.run_batch_async(
|
|
80
|
+
start_agent=start_agent,
|
|
81
|
+
batch_inputs=batch_inputs,
|
|
82
|
+
input_mapping=input_mapping,
|
|
83
|
+
static_inputs=static_inputs,
|
|
84
|
+
parallel=parallel,
|
|
85
|
+
max_workers=max_workers,
|
|
86
|
+
use_temporal=use_temporal,
|
|
87
|
+
box_results=box_results,
|
|
88
|
+
return_errors=return_errors,
|
|
89
|
+
silent_mode=silent_mode,
|
|
90
|
+
write_to_csv=write_to_csv,
|
|
91
|
+
hide_columns=hide_columns,
|
|
92
|
+
delimiter=delimiter,
|
|
93
|
+
)
|
|
94
|
+
)
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
# src/flock/core/orchestration/flock_evaluator.py
|
|
2
|
+
"""Evaluation functionality for Flock orchestrator."""
|
|
3
|
+
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
7
|
+
|
|
8
|
+
from datasets import Dataset
|
|
9
|
+
from pandas import DataFrame
|
|
10
|
+
|
|
11
|
+
from flock.core.flock_agent import FlockAgent
|
|
12
|
+
from flock.core.logging.logging import get_logger
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from flock.core.flock import Flock
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
logger = get_logger("flock.evaluator")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class FlockEvaluator:
|
|
22
|
+
"""Handles evaluation functionality for Flock orchestrator."""
|
|
23
|
+
|
|
24
|
+
def __init__(self, flock: "Flock"):
|
|
25
|
+
self.flock = flock
|
|
26
|
+
|
|
27
|
+
async def evaluate_async(
|
|
28
|
+
self,
|
|
29
|
+
dataset: str | Path | list[dict[str, Any]] | DataFrame | Dataset,
|
|
30
|
+
start_agent: FlockAgent | str,
|
|
31
|
+
input_mapping: dict[str, str],
|
|
32
|
+
answer_mapping: dict[str, str],
|
|
33
|
+
metrics: list[
|
|
34
|
+
str
|
|
35
|
+
| Callable[[Any, Any], bool | float | dict[str, Any]]
|
|
36
|
+
| FlockAgent
|
|
37
|
+
],
|
|
38
|
+
metric_configs: dict[str, dict[str, Any]] | None = None,
|
|
39
|
+
static_inputs: dict[str, Any] | None = None,
|
|
40
|
+
parallel: bool = True,
|
|
41
|
+
max_workers: int = 5,
|
|
42
|
+
use_temporal: bool | None = None,
|
|
43
|
+
error_handling: Literal["raise", "skip", "log"] = "log",
|
|
44
|
+
output_file: str | Path | None = None,
|
|
45
|
+
return_dataframe: bool = True,
|
|
46
|
+
silent_mode: bool = False,
|
|
47
|
+
metadata_columns: list[str] | None = None,
|
|
48
|
+
) -> "DataFrame | list[dict[str, Any]]":
|
|
49
|
+
"""Evaluates the Flock's performance against a dataset (delegated)."""
|
|
50
|
+
# Import processor locally
|
|
51
|
+
from flock.core.execution.evaluation_executor import EvaluationExecutor
|
|
52
|
+
|
|
53
|
+
processor = EvaluationExecutor(self.flock) # Pass flock instance
|
|
54
|
+
return await processor.evaluate_async(
|
|
55
|
+
dataset=dataset,
|
|
56
|
+
start_agent=start_agent,
|
|
57
|
+
input_mapping=input_mapping,
|
|
58
|
+
answer_mapping=answer_mapping,
|
|
59
|
+
metrics=metrics,
|
|
60
|
+
metric_configs=metric_configs,
|
|
61
|
+
static_inputs=static_inputs,
|
|
62
|
+
parallel=parallel,
|
|
63
|
+
max_workers=max_workers,
|
|
64
|
+
use_temporal=use_temporal,
|
|
65
|
+
error_handling=error_handling,
|
|
66
|
+
output_file=output_file,
|
|
67
|
+
return_dataframe=return_dataframe,
|
|
68
|
+
silent_mode=silent_mode,
|
|
69
|
+
metadata_columns=metadata_columns,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def evaluate(
|
|
73
|
+
self,
|
|
74
|
+
dataset: str | Path | list[dict[str, Any]] | DataFrame | Dataset,
|
|
75
|
+
start_agent: FlockAgent | str,
|
|
76
|
+
input_mapping: dict[str, str],
|
|
77
|
+
answer_mapping: dict[str, str],
|
|
78
|
+
metrics: list[
|
|
79
|
+
str
|
|
80
|
+
| Callable[[Any, Any], bool | float | dict[str, Any]]
|
|
81
|
+
| FlockAgent
|
|
82
|
+
],
|
|
83
|
+
metric_configs: dict[str, dict[str, Any]] | None = None,
|
|
84
|
+
static_inputs: dict[str, Any] | None = None,
|
|
85
|
+
parallel: bool = True,
|
|
86
|
+
max_workers: int = 5,
|
|
87
|
+
use_temporal: bool | None = None,
|
|
88
|
+
error_handling: Literal["raise", "skip", "log"] = "log",
|
|
89
|
+
output_file: str | Path | None = None,
|
|
90
|
+
return_dataframe: bool = True,
|
|
91
|
+
silent_mode: bool = False,
|
|
92
|
+
metadata_columns: list[str] | None = None,
|
|
93
|
+
) -> "DataFrame | list[dict[str, Any]]":
|
|
94
|
+
"""Synchronous wrapper for evaluation."""
|
|
95
|
+
return self.flock._execution._run_sync(
|
|
96
|
+
self.evaluate_async(
|
|
97
|
+
dataset=dataset,
|
|
98
|
+
start_agent=start_agent,
|
|
99
|
+
input_mapping=input_mapping,
|
|
100
|
+
answer_mapping=answer_mapping,
|
|
101
|
+
metrics=metrics,
|
|
102
|
+
metric_configs=metric_configs,
|
|
103
|
+
static_inputs=static_inputs,
|
|
104
|
+
parallel=parallel,
|
|
105
|
+
max_workers=max_workers,
|
|
106
|
+
use_temporal=use_temporal,
|
|
107
|
+
error_handling=error_handling,
|
|
108
|
+
output_file=output_file,
|
|
109
|
+
return_dataframe=return_dataframe,
|
|
110
|
+
silent_mode=silent_mode,
|
|
111
|
+
metadata_columns=metadata_columns,
|
|
112
|
+
)
|
|
113
|
+
)
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
# src/flock/core/orchestration/flock_execution.py
|
|
2
|
+
"""Execution management functionality for Flock orchestrator."""
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import contextvars
|
|
6
|
+
import uuid
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
8
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
9
|
+
|
|
10
|
+
from box import Box
|
|
11
|
+
from opentelemetry import trace
|
|
12
|
+
from opentelemetry.baggage import set_baggage
|
|
13
|
+
|
|
14
|
+
from flock.config import DEFAULT_MODEL
|
|
15
|
+
from flock.core.context.context import FlockContext
|
|
16
|
+
from flock.core.context.context_manager import initialize_context
|
|
17
|
+
from flock.core.execution.local_executor import run_local_workflow
|
|
18
|
+
from flock.core.execution.temporal_executor import run_temporal_workflow
|
|
19
|
+
from flock.core.logging.logging import get_logger
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from flock.core.flock import Flock
|
|
23
|
+
from flock.core.flock_agent import FlockAgent
|
|
24
|
+
|
|
25
|
+
logger = get_logger("flock.execution")
|
|
26
|
+
tracer = trace.get_tracer(__name__)
|
|
27
|
+
_R = TypeVar("_R")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class FlockExecution:
|
|
31
|
+
"""Handles execution management for Flock including run, run_async, and execution coordination."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, flock: "Flock"):
|
|
34
|
+
self.flock = flock
|
|
35
|
+
|
|
36
|
+
def _run_sync(self, coro) -> _R:
|
|
37
|
+
"""Execute *coro* synchronously.
|
|
38
|
+
|
|
39
|
+
* If no loop is running → ``asyncio.run``.
|
|
40
|
+
* Otherwise run ``asyncio.run`` inside a fresh thread **with**
|
|
41
|
+
context-vars propagation.
|
|
42
|
+
"""
|
|
43
|
+
try:
|
|
44
|
+
asyncio.get_running_loop()
|
|
45
|
+
except RuntimeError: # no loop → simple
|
|
46
|
+
return asyncio.run(coro)
|
|
47
|
+
|
|
48
|
+
# A loop is already running – Jupyter / ASGI / etc.
|
|
49
|
+
ctx = contextvars.copy_context() # propagate baggage
|
|
50
|
+
with ThreadPoolExecutor(max_workers=1) as pool:
|
|
51
|
+
future = pool.submit(ctx.run, asyncio.run, coro)
|
|
52
|
+
try:
|
|
53
|
+
return future.result()
|
|
54
|
+
finally:
|
|
55
|
+
if not future.done():
|
|
56
|
+
future.cancel()
|
|
57
|
+
|
|
58
|
+
def run(
|
|
59
|
+
self,
|
|
60
|
+
agent: "FlockAgent | str | None" = None,
|
|
61
|
+
input: dict | None = None,
|
|
62
|
+
context: FlockContext | None = None,
|
|
63
|
+
run_id: str = "",
|
|
64
|
+
box_result: bool = True,
|
|
65
|
+
agents: list["FlockAgent"] | None = None,
|
|
66
|
+
servers: list[Any] | None = None,
|
|
67
|
+
memo: dict[str, Any] | None = None,
|
|
68
|
+
) -> Box | dict:
|
|
69
|
+
"""Synchronous execution wrapper."""
|
|
70
|
+
return self._run_sync(
|
|
71
|
+
self.run_async(
|
|
72
|
+
agent=agent,
|
|
73
|
+
input=input,
|
|
74
|
+
context=context,
|
|
75
|
+
run_id=run_id,
|
|
76
|
+
box_result=box_result,
|
|
77
|
+
agents=agents,
|
|
78
|
+
servers=servers,
|
|
79
|
+
memo=memo,
|
|
80
|
+
)
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
async def run_async(
|
|
84
|
+
self,
|
|
85
|
+
agent: "FlockAgent | str | None" = None,
|
|
86
|
+
input: dict | None = None,
|
|
87
|
+
context: FlockContext | None = None,
|
|
88
|
+
run_id: str = "",
|
|
89
|
+
box_result: bool = True,
|
|
90
|
+
agents: list["FlockAgent"] | None = None,
|
|
91
|
+
servers: list[Any] | None = None,
|
|
92
|
+
memo: dict[str, Any] | None = None,
|
|
93
|
+
) -> Box | dict:
|
|
94
|
+
"""Entry point for running an agent system asynchronously."""
|
|
95
|
+
# Import here to allow forward reference resolution
|
|
96
|
+
from flock.core.flock_agent import FlockAgent as ConcreteFlockAgent
|
|
97
|
+
from flock.core.mcp.flock_mcp_server import (
|
|
98
|
+
FlockMCPServer as ConcreteFlockServer,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
with tracer.start_as_current_span("flock.run_async") as span:
|
|
102
|
+
# Add passed servers so that agents have access to them.
|
|
103
|
+
if servers:
|
|
104
|
+
for server_obj in servers:
|
|
105
|
+
if isinstance(server_obj, ConcreteFlockServer):
|
|
106
|
+
self.flock.add_server(server=server_obj)
|
|
107
|
+
else:
|
|
108
|
+
logger.warning(
|
|
109
|
+
f"Item in 'servers' list is not a FlockMCPServer: {type(server_obj)}"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Add passed agents
|
|
113
|
+
if agents:
|
|
114
|
+
for agent_obj in agents:
|
|
115
|
+
if isinstance(agent_obj, ConcreteFlockAgent):
|
|
116
|
+
self.flock.add_agent(agent_obj)
|
|
117
|
+
else:
|
|
118
|
+
logger.warning(
|
|
119
|
+
f"Item in 'agents' list is not a FlockAgent: {type(agent_obj)}"
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Determine starting agent name
|
|
123
|
+
start_agent_name = self._resolve_start_agent(agent)
|
|
124
|
+
|
|
125
|
+
# Setup execution context and input
|
|
126
|
+
run_input = input if input is not None else self.flock._start_input
|
|
127
|
+
effective_run_id = run_id or f"flockrun_{uuid.uuid4().hex[:8]}"
|
|
128
|
+
|
|
129
|
+
# Set span attributes
|
|
130
|
+
span.set_attribute("start_agent", start_agent_name)
|
|
131
|
+
span.set_attribute("input", str(run_input))
|
|
132
|
+
span.set_attribute("run_id", effective_run_id)
|
|
133
|
+
span.set_attribute("enable_temporal", self.flock.enable_temporal)
|
|
134
|
+
|
|
135
|
+
logger.info(
|
|
136
|
+
f"Initiating Flock run '{self.flock.name}'. Start Agent: '{start_agent_name}'. Temporal: {self.flock.enable_temporal}."
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
# Setup execution context
|
|
141
|
+
run_context = self._setup_execution_context(
|
|
142
|
+
context, start_agent_name, run_input, effective_run_id
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# Execute workflow with server management
|
|
146
|
+
async with self.flock._mgr:
|
|
147
|
+
logger.info("Entering managed server context. Servers starting up.")
|
|
148
|
+
logger.info(
|
|
149
|
+
"Starting agent execution",
|
|
150
|
+
agent=start_agent_name,
|
|
151
|
+
enable_temporal=self.flock.enable_temporal,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Execute workflow using appropriate engine
|
|
155
|
+
result = await self._execute_workflow(run_context, memo)
|
|
156
|
+
|
|
157
|
+
# Set result attributes on span
|
|
158
|
+
span.set_attribute("result.type", str(type(result)))
|
|
159
|
+
result_str = str(result)
|
|
160
|
+
span.set_attribute(
|
|
161
|
+
"result.preview",
|
|
162
|
+
result_str[:1000] + ("..." if len(result_str) > 1000 else ""),
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Format and return result
|
|
166
|
+
return self._format_result(result, box_result)
|
|
167
|
+
|
|
168
|
+
except Exception as e:
|
|
169
|
+
logger.error(f"Flock run '{self.flock.name}' failed: {e}", exc_info=True)
|
|
170
|
+
span.record_exception(e)
|
|
171
|
+
span.set_status(trace.Status(trace.StatusCode.ERROR, str(e)))
|
|
172
|
+
|
|
173
|
+
# Return a consistent error structure
|
|
174
|
+
error_output = {
|
|
175
|
+
"error": str(e),
|
|
176
|
+
"details": f"Flock run '{self.flock.name}' failed.",
|
|
177
|
+
"run_id": effective_run_id,
|
|
178
|
+
"start_agent": start_agent_name,
|
|
179
|
+
}
|
|
180
|
+
return Box(error_output) if box_result else error_output
|
|
181
|
+
|
|
182
|
+
def _resolve_start_agent(self, agent: "FlockAgent | str | None") -> str:
|
|
183
|
+
"""Resolve the start agent name from various input types."""
|
|
184
|
+
from flock.core.flock_agent import FlockAgent as ConcreteFlockAgent
|
|
185
|
+
from flock.core.registry import get_registry
|
|
186
|
+
|
|
187
|
+
registry = get_registry()
|
|
188
|
+
|
|
189
|
+
# Determine starting agent name
|
|
190
|
+
start_agent_name: str | None = None
|
|
191
|
+
if isinstance(agent, ConcreteFlockAgent):
|
|
192
|
+
start_agent_name = agent.name
|
|
193
|
+
if start_agent_name not in self.flock._agents: # Add if not already present
|
|
194
|
+
self.flock.add_agent(agent)
|
|
195
|
+
elif isinstance(agent, str):
|
|
196
|
+
start_agent_name = agent
|
|
197
|
+
else: # start_agent is None
|
|
198
|
+
start_agent_name = self.flock._start_agent_name
|
|
199
|
+
|
|
200
|
+
# Default to first agent if only one exists and none specified
|
|
201
|
+
if not start_agent_name and len(self.flock._agents) == 1:
|
|
202
|
+
start_agent_name = next(iter(self.flock._agents.keys()))
|
|
203
|
+
elif not start_agent_name:
|
|
204
|
+
raise ValueError(
|
|
205
|
+
"No start_agent specified and multiple/no agents exist in the Flock instance."
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
# Check if start_agent is in agents
|
|
209
|
+
if start_agent_name not in self.flock._agents:
|
|
210
|
+
# Try loading from registry if not found locally yet
|
|
211
|
+
reg_agent = registry.get_agent(start_agent_name)
|
|
212
|
+
if reg_agent:
|
|
213
|
+
self.flock.add_agent(reg_agent)
|
|
214
|
+
logger.info(f"Loaded start agent '{start_agent_name}' from registry.")
|
|
215
|
+
else:
|
|
216
|
+
raise ValueError(
|
|
217
|
+
f"Start agent '{start_agent_name}' not found locally or in registry."
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
return start_agent_name
|
|
221
|
+
|
|
222
|
+
def _setup_execution_context(
|
|
223
|
+
self,
|
|
224
|
+
context: FlockContext | None,
|
|
225
|
+
start_agent_name: str,
|
|
226
|
+
run_input: dict,
|
|
227
|
+
run_id: str,
|
|
228
|
+
) -> FlockContext:
|
|
229
|
+
"""Setup the execution context for the workflow."""
|
|
230
|
+
resolved_start_agent = self.flock._agents.get(start_agent_name)
|
|
231
|
+
if not resolved_start_agent: # Should have been handled by now
|
|
232
|
+
raise ValueError(f"Start agent '{start_agent_name}' not found after checks.")
|
|
233
|
+
|
|
234
|
+
run_context = context if context else FlockContext()
|
|
235
|
+
set_baggage("run_id", run_id) # Set for OpenTelemetry
|
|
236
|
+
|
|
237
|
+
initialize_context(
|
|
238
|
+
run_context,
|
|
239
|
+
start_agent_name,
|
|
240
|
+
run_input,
|
|
241
|
+
run_id,
|
|
242
|
+
not self.flock.enable_temporal, # local_debug is inverse of enable_temporal
|
|
243
|
+
self.flock.model or resolved_start_agent.model or DEFAULT_MODEL,
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
# Add agent definitions to context for routing/serialization within workflow
|
|
247
|
+
for agent_name_iter, agent_instance_iter in self.flock.agents.items():
|
|
248
|
+
agent_dict_repr = agent_instance_iter.to_dict() # Agents handle their own serialization
|
|
249
|
+
run_context.add_agent_definition(
|
|
250
|
+
agent_type=type(agent_instance_iter),
|
|
251
|
+
agent_name=agent_name_iter,
|
|
252
|
+
agent_data=agent_dict_repr,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
# Add temporal config to context if enabled
|
|
256
|
+
if self.flock.enable_temporal and self.flock.temporal_config:
|
|
257
|
+
run_context.set_variable(
|
|
258
|
+
"flock.temporal_workflow_config",
|
|
259
|
+
self.flock.temporal_config.model_dump(mode="json"),
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
return run_context
|
|
263
|
+
|
|
264
|
+
async def _execute_workflow(
|
|
265
|
+
self, run_context: FlockContext, memo: dict[str, Any] | None = None
|
|
266
|
+
) -> dict[str, Any]:
|
|
267
|
+
"""Execute the workflow using the appropriate execution engine."""
|
|
268
|
+
if not self.flock.enable_temporal:
|
|
269
|
+
return await run_local_workflow(run_context, box_result=False)
|
|
270
|
+
else:
|
|
271
|
+
return await run_temporal_workflow(
|
|
272
|
+
self.flock, # Pass the Flock instance
|
|
273
|
+
run_context,
|
|
274
|
+
box_result=False,
|
|
275
|
+
memo=memo,
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
def _format_result(self, result: dict[str, Any], box_result: bool) -> Box | dict:
|
|
279
|
+
"""Format the execution result."""
|
|
280
|
+
if box_result:
|
|
281
|
+
try:
|
|
282
|
+
logger.debug("Boxing final result.")
|
|
283
|
+
return Box(result)
|
|
284
|
+
except ImportError:
|
|
285
|
+
logger.warning("Box library not installed, returning raw dict.")
|
|
286
|
+
return result
|
|
287
|
+
else:
|
|
288
|
+
return result
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
# src/flock/core/orchestration/flock_initialization.py
|
|
2
|
+
"""Initialization functionality for Flock orchestrator."""
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import uuid
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from opentelemetry.baggage import get_baggage, set_baggage
|
|
9
|
+
|
|
10
|
+
from flock.core.logging.logging import get_logger
|
|
11
|
+
from flock.core.registry import get_registry
|
|
12
|
+
from flock.core.util.cli_helper import init_console
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from flock.core.flock import Flock
|
|
16
|
+
from flock.core.flock_agent import FlockAgent
|
|
17
|
+
from flock.core.mcp.flock_mcp_server import FlockMCPServer
|
|
18
|
+
|
|
19
|
+
logger = get_logger("flock.initialization")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FlockInitialization:
|
|
23
|
+
"""Handles initialization logic for Flock orchestrator."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, flock: "Flock"):
|
|
26
|
+
self.flock = flock
|
|
27
|
+
|
|
28
|
+
def setup(
|
|
29
|
+
self,
|
|
30
|
+
agents: list["FlockAgent"] | None = None,
|
|
31
|
+
servers: list["FlockMCPServer"] | None = None,
|
|
32
|
+
) -> None:
|
|
33
|
+
"""Handle all initialization side effects and setup."""
|
|
34
|
+
# Register passed servers first (agents may depend on them)
|
|
35
|
+
if servers:
|
|
36
|
+
self._register_servers(servers)
|
|
37
|
+
|
|
38
|
+
# Register passed agents
|
|
39
|
+
if agents:
|
|
40
|
+
self._register_agents(agents)
|
|
41
|
+
|
|
42
|
+
# Initialize console if needed for banner
|
|
43
|
+
if self.flock.show_flock_banner:
|
|
44
|
+
init_console(clear_screen=True, show_banner=self.flock.show_flock_banner)
|
|
45
|
+
|
|
46
|
+
# Set Temporal debug environment variable
|
|
47
|
+
self._set_temporal_debug_flag()
|
|
48
|
+
|
|
49
|
+
# Ensure session ID exists in baggage
|
|
50
|
+
self._ensure_session_id()
|
|
51
|
+
|
|
52
|
+
# Auto-discover components
|
|
53
|
+
registry = get_registry()
|
|
54
|
+
registry.discover_and_register_components()
|
|
55
|
+
|
|
56
|
+
# Setup Opik if enabled
|
|
57
|
+
if self.flock.enable_opik:
|
|
58
|
+
self._setup_opik()
|
|
59
|
+
|
|
60
|
+
logger.info(
|
|
61
|
+
"Flock instance initialized",
|
|
62
|
+
name=self.flock.name,
|
|
63
|
+
model=self.flock.model,
|
|
64
|
+
enable_temporal=self.flock.enable_temporal,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def _register_servers(self, servers: list["FlockMCPServer"]) -> None:
|
|
68
|
+
"""Register servers with the Flock instance."""
|
|
69
|
+
from flock.core.mcp.flock_mcp_server import (
|
|
70
|
+
FlockMCPServer as ConcreteFlockMCPServer,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
for server in servers:
|
|
74
|
+
if isinstance(server, ConcreteFlockMCPServer):
|
|
75
|
+
self.flock.add_server(server)
|
|
76
|
+
else:
|
|
77
|
+
logger.warning(
|
|
78
|
+
f"Item provided in 'servers' list is not a FlockMCPServer: {type(server)}"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
def _register_agents(self, agents: list["FlockAgent"]) -> None:
|
|
82
|
+
"""Register agents with the Flock instance."""
|
|
83
|
+
from flock.core.flock_agent import FlockAgent as ConcreteFlockAgent
|
|
84
|
+
|
|
85
|
+
for agent in agents:
|
|
86
|
+
if isinstance(agent, ConcreteFlockAgent):
|
|
87
|
+
self.flock.add_agent(agent)
|
|
88
|
+
else:
|
|
89
|
+
logger.warning(
|
|
90
|
+
f"Item provided in 'agents' list is not a FlockAgent: {type(agent)}"
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def _set_temporal_debug_flag(self) -> None:
|
|
94
|
+
"""Set or remove LOCAL_DEBUG env var based on enable_temporal."""
|
|
95
|
+
if not self.flock.enable_temporal:
|
|
96
|
+
if "LOCAL_DEBUG" not in os.environ:
|
|
97
|
+
os.environ["LOCAL_DEBUG"] = "1"
|
|
98
|
+
logger.debug("Set LOCAL_DEBUG environment variable for local execution.")
|
|
99
|
+
elif "LOCAL_DEBUG" in os.environ:
|
|
100
|
+
del os.environ["LOCAL_DEBUG"]
|
|
101
|
+
logger.debug("Removed LOCAL_DEBUG environment variable for Temporal execution.")
|
|
102
|
+
|
|
103
|
+
def _ensure_session_id(self) -> None:
|
|
104
|
+
"""Ensure a session_id exists in the OpenTelemetry baggage."""
|
|
105
|
+
session_id = get_baggage("session_id")
|
|
106
|
+
if not session_id:
|
|
107
|
+
session_id = str(uuid.uuid4())
|
|
108
|
+
set_baggage("session_id", session_id)
|
|
109
|
+
logger.debug(f"Generated new session_id: {session_id}")
|
|
110
|
+
|
|
111
|
+
def _setup_opik(self) -> None:
|
|
112
|
+
"""Setup Opik integration."""
|
|
113
|
+
try:
|
|
114
|
+
import dspy
|
|
115
|
+
import opik
|
|
116
|
+
from opik.integrations.dspy.callback import OpikCallback
|
|
117
|
+
|
|
118
|
+
opik.configure(use_local=True, automatic_approvals=True)
|
|
119
|
+
opik_callback = OpikCallback(project_name=self.flock.name, log_graph=True)
|
|
120
|
+
dspy.settings.configure(callbacks=[opik_callback])
|
|
121
|
+
logger.info("Opik integration enabled")
|
|
122
|
+
except ImportError as e:
|
|
123
|
+
logger.error(f"Failed to setup Opik integration: {e}")
|
|
124
|
+
logger.warning("Disabling Opik integration")
|
|
125
|
+
self.flock.enable_opik = False
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# src/flock/core/orchestration/flock_server_manager.py
|
|
2
|
+
"""Server management functionality for Flock orchestrator."""
|
|
3
|
+
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from flock.core.flock_server_manager import (
|
|
7
|
+
FlockServerManager as InternalServerManager,
|
|
8
|
+
)
|
|
9
|
+
from flock.core.logging.logging import get_logger
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from flock.core.flock import Flock
|
|
13
|
+
from flock.core.mcp.flock_mcp_server import FlockMCPServer
|
|
14
|
+
|
|
15
|
+
logger = get_logger("flock.server_manager")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class FlockServerManager:
|
|
19
|
+
"""Handles server lifecycle management for Flock orchestrator."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, flock: "Flock"):
|
|
22
|
+
self.flock = flock
|
|
23
|
+
# Use the existing internal server manager
|
|
24
|
+
self._internal_mgr = InternalServerManager()
|
|
25
|
+
|
|
26
|
+
def add_server(self, server: "FlockMCPServer") -> "FlockMCPServer":
|
|
27
|
+
"""Adds a server instance to this Flock configuration and registry as well as set it up to be managed by internal manager."""
|
|
28
|
+
from flock.core.mcp.flock_mcp_server import (
|
|
29
|
+
FlockMCPServer as ConcreteFlockMCPServer,
|
|
30
|
+
)
|
|
31
|
+
from flock.core.registry import get_registry
|
|
32
|
+
|
|
33
|
+
registry = get_registry()
|
|
34
|
+
|
|
35
|
+
if not isinstance(server, ConcreteFlockMCPServer):
|
|
36
|
+
raise TypeError("Provided object is not a FlockMCPServer instance.")
|
|
37
|
+
if not server.config.name:
|
|
38
|
+
raise ValueError("Server must have a name.")
|
|
39
|
+
|
|
40
|
+
if server.config.name in self.flock.servers:
|
|
41
|
+
raise ValueError(
|
|
42
|
+
f"Server with this name already exists. Name: '{server.config.name}'"
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
self.flock._servers[server.config.name] = server
|
|
46
|
+
registry.register_server(server) # Register globally.
|
|
47
|
+
|
|
48
|
+
# Prepare server to be managed by the FlockServerManager
|
|
49
|
+
logger.info(f"Adding server '{server.config.name}' to managed list.")
|
|
50
|
+
self._internal_mgr.add_server_sync(server=server)
|
|
51
|
+
logger.info(f"Server '{server.config.name}' is now on managed list.")
|
|
52
|
+
|
|
53
|
+
logger.info(f"Server '{server.config.name}' added to Flock '{self.flock.name}'")
|
|
54
|
+
return server
|
|
55
|
+
|
|
56
|
+
async def __aenter__(self):
|
|
57
|
+
"""Start all managed servers."""
|
|
58
|
+
return await self._internal_mgr.__aenter__()
|
|
59
|
+
|
|
60
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
61
|
+
"""Cleanup all managed servers."""
|
|
62
|
+
return await self._internal_mgr.__aexit__(exc_type, exc_val, exc_tb)
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def servers(self) -> dict[str, "FlockMCPServer"]:
|
|
66
|
+
"""Returns the dictionary of servers managed by this Flock instance."""
|
|
67
|
+
return self.flock._servers
|