flock-core 0.5.0b28__py3-none-any.whl → 0.5.56b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/__init__.py +12 -217
- flock/agent.py +678 -0
- flock/api/themes.py +71 -0
- flock/artifacts.py +79 -0
- flock/cli.py +75 -0
- flock/components.py +173 -0
- flock/dashboard/__init__.py +28 -0
- flock/dashboard/collector.py +283 -0
- flock/dashboard/events.py +182 -0
- flock/dashboard/launcher.py +230 -0
- flock/dashboard/service.py +537 -0
- flock/dashboard/websocket.py +235 -0
- flock/engines/__init__.py +6 -0
- flock/engines/dspy_engine.py +856 -0
- flock/examples.py +128 -0
- flock/{core/util → helper}/cli_helper.py +4 -3
- flock/{core/logging → logging}/__init__.py +2 -3
- flock/{core/logging → logging}/formatters/enum_builder.py +3 -4
- flock/{core/logging → logging}/formatters/theme_builder.py +19 -44
- flock/{core/logging → logging}/formatters/themed_formatter.py +69 -115
- flock/{core/logging → logging}/logging.py +77 -61
- flock/{core/logging → logging}/telemetry.py +20 -26
- flock/{core/logging → logging}/telemetry_exporter/base_exporter.py +2 -2
- flock/{core/logging → logging}/telemetry_exporter/file_exporter.py +6 -9
- flock/{core/logging → logging}/telemetry_exporter/sqlite_exporter.py +2 -3
- flock/{core/logging → logging}/trace_and_logged.py +20 -24
- flock/mcp/__init__.py +91 -0
- flock/{core/mcp/mcp_client.py → mcp/client.py} +103 -154
- flock/{core/mcp/mcp_config.py → mcp/config.py} +62 -117
- flock/mcp/manager.py +255 -0
- flock/mcp/servers/sse/__init__.py +1 -1
- flock/mcp/servers/sse/flock_sse_server.py +11 -53
- flock/mcp/servers/stdio/__init__.py +1 -1
- flock/mcp/servers/stdio/flock_stdio_server.py +8 -48
- flock/mcp/servers/streamable_http/flock_streamable_http_server.py +17 -62
- flock/mcp/servers/websockets/flock_websocket_server.py +7 -40
- flock/{core/mcp/flock_mcp_tool.py → mcp/tool.py} +16 -26
- flock/mcp/types/__init__.py +42 -0
- flock/{core/mcp → mcp}/types/callbacks.py +9 -15
- flock/{core/mcp → mcp}/types/factories.py +7 -6
- flock/{core/mcp → mcp}/types/handlers.py +13 -18
- flock/{core/mcp → mcp}/types/types.py +70 -74
- flock/{core/mcp → mcp}/util/helpers.py +1 -1
- flock/orchestrator.py +645 -0
- flock/registry.py +148 -0
- flock/runtime.py +262 -0
- flock/service.py +140 -0
- flock/store.py +69 -0
- flock/subscription.py +111 -0
- flock/themes/andromeda.toml +1 -1
- flock/themes/apple-system-colors.toml +1 -1
- flock/themes/arcoiris.toml +1 -1
- flock/themes/atomonelight.toml +1 -1
- flock/themes/ayu copy.toml +1 -1
- flock/themes/ayu-light.toml +1 -1
- flock/themes/belafonte-day.toml +1 -1
- flock/themes/belafonte-night.toml +1 -1
- flock/themes/blulocodark.toml +1 -1
- flock/themes/breeze.toml +1 -1
- flock/themes/broadcast.toml +1 -1
- flock/themes/brogrammer.toml +1 -1
- flock/themes/builtin-dark.toml +1 -1
- flock/themes/builtin-pastel-dark.toml +1 -1
- flock/themes/catppuccin-latte.toml +1 -1
- flock/themes/catppuccin-macchiato.toml +1 -1
- flock/themes/catppuccin-mocha.toml +1 -1
- flock/themes/cga.toml +1 -1
- flock/themes/chalk.toml +1 -1
- flock/themes/ciapre.toml +1 -1
- flock/themes/coffee-theme.toml +1 -1
- flock/themes/cyberpunkscarletprotocol.toml +1 -1
- flock/themes/dark+.toml +1 -1
- flock/themes/darkermatrix.toml +1 -1
- flock/themes/darkside.toml +1 -1
- flock/themes/desert.toml +1 -1
- flock/themes/django.toml +1 -1
- flock/themes/djangosmooth.toml +1 -1
- flock/themes/doomone.toml +1 -1
- flock/themes/dotgov.toml +1 -1
- flock/themes/dracula+.toml +1 -1
- flock/themes/duckbones.toml +1 -1
- flock/themes/encom.toml +1 -1
- flock/themes/espresso.toml +1 -1
- flock/themes/everblush.toml +1 -1
- flock/themes/fairyfloss.toml +1 -1
- flock/themes/fideloper.toml +1 -1
- flock/themes/fishtank.toml +1 -1
- flock/themes/flexoki-light.toml +1 -1
- flock/themes/floraverse.toml +1 -1
- flock/themes/framer.toml +1 -1
- flock/themes/galizur.toml +1 -1
- flock/themes/github.toml +1 -1
- flock/themes/grass.toml +1 -1
- flock/themes/grey-green.toml +1 -1
- flock/themes/gruvboxlight.toml +1 -1
- flock/themes/guezwhoz.toml +1 -1
- flock/themes/harper.toml +1 -1
- flock/themes/hax0r-blue.toml +1 -1
- flock/themes/hopscotch.256.toml +1 -1
- flock/themes/ic-green-ppl.toml +1 -1
- flock/themes/iceberg-dark.toml +1 -1
- flock/themes/japanesque.toml +1 -1
- flock/themes/jubi.toml +1 -1
- flock/themes/kibble.toml +1 -1
- flock/themes/kolorit.toml +1 -1
- flock/themes/kurokula.toml +1 -1
- flock/themes/materialdesigncolors.toml +1 -1
- flock/themes/matrix.toml +1 -1
- flock/themes/mellifluous.toml +1 -1
- flock/themes/midnight-in-mojave.toml +1 -1
- flock/themes/monokai-remastered.toml +1 -1
- flock/themes/monokai-soda.toml +1 -1
- flock/themes/neon.toml +1 -1
- flock/themes/neopolitan.toml +1 -1
- flock/themes/nord-light.toml +1 -1
- flock/themes/ocean.toml +1 -1
- flock/themes/onehalfdark.toml +1 -1
- flock/themes/onehalflight.toml +1 -1
- flock/themes/palenighthc.toml +1 -1
- flock/themes/paulmillr.toml +1 -1
- flock/themes/pencildark.toml +1 -1
- flock/themes/pnevma.toml +1 -1
- flock/themes/purple-rain.toml +1 -1
- flock/themes/purplepeter.toml +1 -1
- flock/themes/raycast-dark.toml +1 -1
- flock/themes/red-sands.toml +1 -1
- flock/themes/relaxed.toml +1 -1
- flock/themes/retro.toml +1 -1
- flock/themes/rose-pine.toml +1 -1
- flock/themes/royal.toml +1 -1
- flock/themes/ryuuko.toml +1 -1
- flock/themes/sakura.toml +1 -1
- flock/themes/scarlet-protocol.toml +1 -1
- flock/themes/seoulbones-dark.toml +1 -1
- flock/themes/shades-of-purple.toml +1 -1
- flock/themes/smyck.toml +1 -1
- flock/themes/softserver.toml +1 -1
- flock/themes/solarized-darcula.toml +1 -1
- flock/themes/square.toml +1 -1
- flock/themes/sugarplum.toml +1 -1
- flock/themes/thayer-bright.toml +1 -1
- flock/themes/tokyonight.toml +1 -1
- flock/themes/tomorrow.toml +1 -1
- flock/themes/ubuntu.toml +1 -1
- flock/themes/ultradark.toml +1 -1
- flock/themes/ultraviolent.toml +1 -1
- flock/themes/unikitty.toml +1 -1
- flock/themes/urple.toml +1 -1
- flock/themes/vesper.toml +1 -1
- flock/themes/vimbones.toml +1 -1
- flock/themes/wildcherry.toml +1 -1
- flock/themes/wilmersdorf.toml +1 -1
- flock/themes/wryan.toml +1 -1
- flock/themes/xcodedarkhc.toml +1 -1
- flock/themes/xcodelight.toml +1 -1
- flock/themes/zenbones-light.toml +1 -1
- flock/themes/zenwritten-dark.toml +1 -1
- flock/utilities.py +301 -0
- flock/{components/utility → utility}/output_utility_component.py +68 -53
- flock/visibility.py +107 -0
- flock_core-0.5.56b0.dist-info/METADATA +747 -0
- flock_core-0.5.56b0.dist-info/RECORD +398 -0
- flock_core-0.5.56b0.dist-info/entry_points.txt +2 -0
- {flock_core-0.5.0b28.dist-info → flock_core-0.5.56b0.dist-info}/licenses/LICENSE +1 -1
- flock/adapter/__init__.py +0 -14
- flock/adapter/azure_adapter.py +0 -68
- flock/adapter/chroma_adapter.py +0 -73
- flock/adapter/faiss_adapter.py +0 -97
- flock/adapter/pinecone_adapter.py +0 -51
- flock/adapter/vector_base.py +0 -47
- flock/cli/assets/release_notes.md +0 -140
- flock/cli/config.py +0 -8
- flock/cli/constants.py +0 -36
- flock/cli/create_agent.py +0 -1
- flock/cli/create_flock.py +0 -280
- flock/cli/execute_flock.py +0 -620
- flock/cli/load_agent.py +0 -1
- flock/cli/load_examples.py +0 -1
- flock/cli/load_flock.py +0 -192
- flock/cli/load_release_notes.py +0 -20
- flock/cli/loaded_flock_cli.py +0 -254
- flock/cli/manage_agents.py +0 -459
- flock/cli/registry_management.py +0 -889
- flock/cli/runner.py +0 -41
- flock/cli/settings.py +0 -857
- flock/cli/utils.py +0 -135
- flock/cli/view_results.py +0 -29
- flock/cli/yaml_editor.py +0 -396
- flock/components/__init__.py +0 -30
- flock/components/evaluation/__init__.py +0 -9
- flock/components/evaluation/declarative_evaluation_component.py +0 -606
- flock/components/routing/__init__.py +0 -15
- flock/components/routing/conditional_routing_component.py +0 -494
- flock/components/routing/default_routing_component.py +0 -103
- flock/components/routing/llm_routing_component.py +0 -206
- flock/components/utility/__init__.py +0 -22
- flock/components/utility/example_utility_component.py +0 -250
- flock/components/utility/feedback_utility_component.py +0 -206
- flock/components/utility/memory_utility_component.py +0 -550
- flock/components/utility/metrics_utility_component.py +0 -700
- flock/config.py +0 -61
- flock/core/__init__.py +0 -110
- flock/core/agent/__init__.py +0 -16
- flock/core/agent/default_agent.py +0 -216
- flock/core/agent/flock_agent_components.py +0 -104
- flock/core/agent/flock_agent_execution.py +0 -101
- flock/core/agent/flock_agent_integration.py +0 -260
- flock/core/agent/flock_agent_lifecycle.py +0 -186
- flock/core/agent/flock_agent_serialization.py +0 -381
- flock/core/api/__init__.py +0 -10
- flock/core/api/custom_endpoint.py +0 -45
- flock/core/api/endpoints.py +0 -254
- flock/core/api/main.py +0 -162
- flock/core/api/models.py +0 -97
- flock/core/api/run_store.py +0 -224
- flock/core/api/runner.py +0 -44
- flock/core/api/service.py +0 -214
- flock/core/component/__init__.py +0 -15
- flock/core/component/agent_component_base.py +0 -309
- flock/core/component/evaluation_component.py +0 -62
- flock/core/component/routing_component.py +0 -74
- flock/core/component/utility_component.py +0 -69
- flock/core/config/flock_agent_config.py +0 -58
- flock/core/config/scheduled_agent_config.py +0 -40
- flock/core/context/context.py +0 -213
- flock/core/context/context_manager.py +0 -37
- flock/core/context/context_vars.py +0 -10
- flock/core/evaluation/utils.py +0 -396
- flock/core/execution/batch_executor.py +0 -369
- flock/core/execution/evaluation_executor.py +0 -438
- flock/core/execution/local_executor.py +0 -31
- flock/core/execution/opik_executor.py +0 -103
- flock/core/execution/temporal_executor.py +0 -164
- flock/core/flock.py +0 -634
- flock/core/flock_agent.py +0 -336
- flock/core/flock_factory.py +0 -613
- flock/core/flock_scheduler.py +0 -166
- flock/core/flock_server_manager.py +0 -136
- flock/core/interpreter/python_interpreter.py +0 -689
- flock/core/mcp/__init__.py +0 -1
- flock/core/mcp/flock_mcp_server.py +0 -680
- flock/core/mcp/mcp_client_manager.py +0 -201
- flock/core/mcp/types/__init__.py +0 -1
- flock/core/mixin/dspy_integration.py +0 -403
- flock/core/mixin/prompt_parser.py +0 -125
- flock/core/orchestration/__init__.py +0 -15
- flock/core/orchestration/flock_batch_processor.py +0 -94
- flock/core/orchestration/flock_evaluator.py +0 -113
- flock/core/orchestration/flock_execution.py +0 -295
- flock/core/orchestration/flock_initialization.py +0 -149
- flock/core/orchestration/flock_server_manager.py +0 -67
- flock/core/orchestration/flock_web_server.py +0 -117
- flock/core/registry/__init__.py +0 -45
- flock/core/registry/agent_registry.py +0 -69
- flock/core/registry/callable_registry.py +0 -139
- flock/core/registry/component_discovery.py +0 -142
- flock/core/registry/component_registry.py +0 -64
- flock/core/registry/config_mapping.py +0 -64
- flock/core/registry/decorators.py +0 -137
- flock/core/registry/registry_hub.py +0 -205
- flock/core/registry/server_registry.py +0 -57
- flock/core/registry/type_registry.py +0 -86
- flock/core/serialization/__init__.py +0 -13
- flock/core/serialization/callable_registry.py +0 -52
- flock/core/serialization/flock_serializer.py +0 -832
- flock/core/serialization/json_encoder.py +0 -41
- flock/core/serialization/secure_serializer.py +0 -175
- flock/core/serialization/serializable.py +0 -342
- flock/core/serialization/serialization_utils.py +0 -412
- flock/core/util/file_path_utils.py +0 -223
- flock/core/util/hydrator.py +0 -309
- flock/core/util/input_resolver.py +0 -164
- flock/core/util/loader.py +0 -59
- flock/core/util/splitter.py +0 -219
- flock/di.py +0 -27
- flock/platform/docker_tools.py +0 -49
- flock/platform/jaeger_install.py +0 -86
- flock/webapp/__init__.py +0 -1
- flock/webapp/app/__init__.py +0 -0
- flock/webapp/app/api/__init__.py +0 -0
- flock/webapp/app/api/agent_management.py +0 -241
- flock/webapp/app/api/execution.py +0 -709
- flock/webapp/app/api/flock_management.py +0 -129
- flock/webapp/app/api/registry_viewer.py +0 -30
- flock/webapp/app/chat.py +0 -665
- flock/webapp/app/config.py +0 -104
- flock/webapp/app/dependencies.py +0 -117
- flock/webapp/app/main.py +0 -1070
- flock/webapp/app/middleware.py +0 -113
- flock/webapp/app/models_ui.py +0 -7
- flock/webapp/app/services/__init__.py +0 -0
- flock/webapp/app/services/feedback_file_service.py +0 -363
- flock/webapp/app/services/flock_service.py +0 -337
- flock/webapp/app/services/sharing_models.py +0 -81
- flock/webapp/app/services/sharing_store.py +0 -762
- flock/webapp/app/templates/theme_mapper.html +0 -326
- flock/webapp/app/theme_mapper.py +0 -812
- flock/webapp/app/utils.py +0 -85
- flock/webapp/run.py +0 -215
- flock/webapp/static/css/chat.css +0 -301
- flock/webapp/static/css/components.css +0 -167
- flock/webapp/static/css/header.css +0 -39
- flock/webapp/static/css/layout.css +0 -46
- flock/webapp/static/css/sidebar.css +0 -127
- flock/webapp/static/css/two-pane.css +0 -48
- flock/webapp/templates/base.html +0 -200
- flock/webapp/templates/chat.html +0 -152
- flock/webapp/templates/chat_settings.html +0 -19
- flock/webapp/templates/flock_editor.html +0 -16
- flock/webapp/templates/index.html +0 -12
- flock/webapp/templates/partials/_agent_detail_form.html +0 -93
- flock/webapp/templates/partials/_agent_list.html +0 -18
- flock/webapp/templates/partials/_agent_manager_view.html +0 -51
- flock/webapp/templates/partials/_agent_tools_checklist.html +0 -14
- flock/webapp/templates/partials/_chat_container.html +0 -15
- flock/webapp/templates/partials/_chat_messages.html +0 -57
- flock/webapp/templates/partials/_chat_settings_form.html +0 -85
- flock/webapp/templates/partials/_create_flock_form.html +0 -50
- flock/webapp/templates/partials/_dashboard_flock_detail.html +0 -17
- flock/webapp/templates/partials/_dashboard_flock_file_list.html +0 -16
- flock/webapp/templates/partials/_dashboard_flock_properties_preview.html +0 -28
- flock/webapp/templates/partials/_dashboard_upload_flock_form.html +0 -16
- flock/webapp/templates/partials/_dynamic_input_form_content.html +0 -22
- flock/webapp/templates/partials/_env_vars_table.html +0 -23
- flock/webapp/templates/partials/_execution_form.html +0 -118
- flock/webapp/templates/partials/_execution_view_container.html +0 -28
- flock/webapp/templates/partials/_flock_file_list.html +0 -23
- flock/webapp/templates/partials/_flock_properties_form.html +0 -52
- flock/webapp/templates/partials/_flock_upload_form.html +0 -16
- flock/webapp/templates/partials/_header_flock_status.html +0 -5
- flock/webapp/templates/partials/_load_manager_view.html +0 -49
- flock/webapp/templates/partials/_registry_table.html +0 -25
- flock/webapp/templates/partials/_registry_viewer_content.html +0 -70
- flock/webapp/templates/partials/_results_display.html +0 -78
- flock/webapp/templates/partials/_settings_env_content.html +0 -9
- flock/webapp/templates/partials/_settings_theme_content.html +0 -14
- flock/webapp/templates/partials/_settings_view.html +0 -36
- flock/webapp/templates/partials/_share_chat_link_snippet.html +0 -11
- flock/webapp/templates/partials/_share_link_snippet.html +0 -35
- flock/webapp/templates/partials/_sidebar.html +0 -74
- flock/webapp/templates/partials/_streaming_results_container.html +0 -195
- flock/webapp/templates/partials/_structured_data_view.html +0 -40
- flock/webapp/templates/partials/_theme_preview.html +0 -36
- flock/webapp/templates/registry_viewer.html +0 -84
- flock/webapp/templates/shared_run_page.html +0 -140
- flock/workflow/__init__.py +0 -0
- flock/workflow/activities.py +0 -196
- flock/workflow/agent_activities.py +0 -24
- flock/workflow/agent_execution_activity.py +0 -202
- flock/workflow/flock_workflow.py +0 -214
- flock/workflow/temporal_config.py +0 -96
- flock/workflow/temporal_setup.py +0 -68
- flock_core-0.5.0b28.dist-info/METADATA +0 -274
- flock_core-0.5.0b28.dist-info/RECORD +0 -561
- flock_core-0.5.0b28.dist-info/entry_points.txt +0 -2
- /flock/{core/logging → logging}/formatters/themes.py +0 -0
- /flock/{core/logging → logging}/span_middleware/baggage_span_processor.py +0 -0
- /flock/{core/mcp → mcp}/util/__init__.py +0 -0
- {flock_core-0.5.0b28.dist-info → flock_core-0.5.56b0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,856 @@
|
|
|
1
|
+
"""DSPy-powered engine component that mirrors the design implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
from collections import OrderedDict, defaultdict
|
|
8
|
+
from collections.abc import Iterable, Mapping, Sequence
|
|
9
|
+
from contextlib import nullcontext
|
|
10
|
+
from typing import Any, Literal
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
from flock.artifacts import Artifact
|
|
15
|
+
from flock.components import EngineComponent
|
|
16
|
+
from flock.dashboard.events import StreamingOutputEvent
|
|
17
|
+
from flock.logging.logging import get_logger
|
|
18
|
+
from flock.registry import type_registry
|
|
19
|
+
from flock.runtime import EvalInputs, EvalResult
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
_live_patch_applied = False
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
# T071: Auto-detect test environment for streaming
|
|
29
|
+
def _default_stream_value() -> bool:
|
|
30
|
+
"""Return default stream value based on environment.
|
|
31
|
+
|
|
32
|
+
Returns False in pytest (clean test output), True otherwise (rich streaming).
|
|
33
|
+
"""
|
|
34
|
+
import sys
|
|
35
|
+
|
|
36
|
+
return "pytest" not in sys.modules
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# Apply the Rich Live patch immediately on module import
|
|
40
|
+
def _apply_live_patch_on_import() -> None:
|
|
41
|
+
"""Apply Rich Live crop_above patch when module is imported."""
|
|
42
|
+
try:
|
|
43
|
+
_ensure_live_crop_above()
|
|
44
|
+
except Exception:
|
|
45
|
+
pass # Silently ignore if Rich is not available
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _ensure_live_crop_above() -> None:
|
|
49
|
+
"""Monkeypatch rich.live_render to support 'crop_above' overflow."""
|
|
50
|
+
global _live_patch_applied
|
|
51
|
+
if _live_patch_applied:
|
|
52
|
+
return
|
|
53
|
+
try:
|
|
54
|
+
from typing import Literal as _Literal
|
|
55
|
+
|
|
56
|
+
from rich import live_render as _lr
|
|
57
|
+
except Exception:
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
# Extend the accepted literal at runtime so type checks don't block the new option.
|
|
61
|
+
current_args = getattr(_lr.VerticalOverflowMethod, "__args__", ())
|
|
62
|
+
if "crop_above" not in current_args:
|
|
63
|
+
_lr.VerticalOverflowMethod = _Literal["crop", "crop_above", "ellipsis", "visible"] # type: ignore[assignment]
|
|
64
|
+
|
|
65
|
+
if getattr(_lr.LiveRender.__rich_console__, "_flock_crop_above", False):
|
|
66
|
+
_live_patch_applied = True
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
Segment = _lr.Segment
|
|
70
|
+
Text = _lr.Text
|
|
71
|
+
loop_last = _lr.loop_last
|
|
72
|
+
|
|
73
|
+
def _patched_rich_console(self, console, options):
|
|
74
|
+
renderable = self.renderable
|
|
75
|
+
style = console.get_style(self.style)
|
|
76
|
+
lines = console.render_lines(renderable, options, style=style, pad=False)
|
|
77
|
+
shape = Segment.get_shape(lines)
|
|
78
|
+
|
|
79
|
+
_, height = shape
|
|
80
|
+
max_height = options.size.height
|
|
81
|
+
if height > max_height:
|
|
82
|
+
if self.vertical_overflow == "crop":
|
|
83
|
+
lines = lines[:max_height]
|
|
84
|
+
shape = Segment.get_shape(lines)
|
|
85
|
+
elif self.vertical_overflow == "crop_above":
|
|
86
|
+
lines = lines[-max_height:]
|
|
87
|
+
shape = Segment.get_shape(lines)
|
|
88
|
+
elif self.vertical_overflow == "ellipsis" and max_height > 0:
|
|
89
|
+
lines = lines[: (max_height - 1)]
|
|
90
|
+
overflow_text = Text(
|
|
91
|
+
"...",
|
|
92
|
+
overflow="crop",
|
|
93
|
+
justify="center",
|
|
94
|
+
end="",
|
|
95
|
+
style="live.ellipsis",
|
|
96
|
+
)
|
|
97
|
+
lines.append(list(console.render(overflow_text)))
|
|
98
|
+
shape = Segment.get_shape(lines)
|
|
99
|
+
self._shape = shape
|
|
100
|
+
|
|
101
|
+
new_line = Segment.line()
|
|
102
|
+
for last, line in loop_last(lines):
|
|
103
|
+
yield from line
|
|
104
|
+
if not last:
|
|
105
|
+
yield new_line
|
|
106
|
+
|
|
107
|
+
_patched_rich_console._flock_crop_above = True # type: ignore[attr-defined]
|
|
108
|
+
_lr.LiveRender.__rich_console__ = _patched_rich_console
|
|
109
|
+
_live_patch_applied = True
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class DSPyEngine(EngineComponent):
|
|
113
|
+
"""Execute a minimal DSPy program backed by a hosted LLM.
|
|
114
|
+
|
|
115
|
+
Behavior intentionally mirrors ``design/dspy_engine.py`` so that orchestration
|
|
116
|
+
relies on the same model resolution, signature preparation, and result
|
|
117
|
+
normalization logic.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
name: str | None = "dspy"
|
|
121
|
+
model: str | None = None
|
|
122
|
+
instructions: str | None = None
|
|
123
|
+
temperature: float = 1.0
|
|
124
|
+
max_tokens: int = 32000
|
|
125
|
+
max_tool_calls: int = 10
|
|
126
|
+
max_retries: int = 0
|
|
127
|
+
stream: bool = Field(
|
|
128
|
+
default_factory=lambda: _default_stream_value(),
|
|
129
|
+
description="Enable streaming output from the underlying DSPy program. Auto-disables in pytest.",
|
|
130
|
+
)
|
|
131
|
+
no_output: bool = Field(
|
|
132
|
+
default=False,
|
|
133
|
+
description="Disable output from the underlying DSPy program.",
|
|
134
|
+
)
|
|
135
|
+
stream_vertical_overflow: Literal["crop", "ellipsis", "crop_above", "visible"] = Field(
|
|
136
|
+
default="crop_above",
|
|
137
|
+
description=(
|
|
138
|
+
"Rich Live vertical overflow strategy; select how tall output is handled; 'crop_above' keeps the most recent rows visible."
|
|
139
|
+
),
|
|
140
|
+
)
|
|
141
|
+
status_output_field: str = Field(
|
|
142
|
+
default="_status_output",
|
|
143
|
+
description="The field name for the status output.",
|
|
144
|
+
)
|
|
145
|
+
theme: str = Field(
|
|
146
|
+
default="afterglow",
|
|
147
|
+
description="Theme name for Rich output formatting.",
|
|
148
|
+
)
|
|
149
|
+
enable_cache: bool = Field(
|
|
150
|
+
default=False,
|
|
151
|
+
description="Enable caching of DSPy program results",
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
async def evaluate(self, agent, ctx, inputs: EvalInputs) -> EvalResult: # type: ignore[override]
|
|
155
|
+
if not inputs.artifacts:
|
|
156
|
+
return EvalResult(artifacts=[], state=dict(inputs.state))
|
|
157
|
+
|
|
158
|
+
model_name = self._resolve_model_name()
|
|
159
|
+
dspy_mod = self._import_dspy()
|
|
160
|
+
|
|
161
|
+
lm = dspy_mod.LM(
|
|
162
|
+
model=model_name,
|
|
163
|
+
temperature=self.temperature,
|
|
164
|
+
max_tokens=self.max_tokens,
|
|
165
|
+
cache=self.enable_cache,
|
|
166
|
+
num_retries=self.max_retries,
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
primary_artifact = self._select_primary_artifact(inputs.artifacts)
|
|
170
|
+
input_model = self._resolve_input_model(primary_artifact)
|
|
171
|
+
validated_input = self._validate_input_payload(input_model, primary_artifact.payload)
|
|
172
|
+
output_model = self._resolve_output_model(agent)
|
|
173
|
+
|
|
174
|
+
# Fetch conversation context from blackboard
|
|
175
|
+
context_history = await self.fetch_conversation_context(ctx)
|
|
176
|
+
has_context = bool(context_history) and self.should_use_context(inputs)
|
|
177
|
+
|
|
178
|
+
# Prepare signature with optional context field
|
|
179
|
+
signature = self._prepare_signature_with_context(
|
|
180
|
+
dspy_mod,
|
|
181
|
+
description=self.instructions or agent.description,
|
|
182
|
+
input_schema=input_model,
|
|
183
|
+
output_schema=output_model,
|
|
184
|
+
has_context=has_context,
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
sys_desc = self._system_description(self.instructions or agent.description)
|
|
188
|
+
|
|
189
|
+
# Pre-generate the artifact ID so it's available from the start
|
|
190
|
+
from uuid import uuid4
|
|
191
|
+
|
|
192
|
+
pre_generated_artifact_id = uuid4()
|
|
193
|
+
|
|
194
|
+
# Build execution payload with context
|
|
195
|
+
if has_context:
|
|
196
|
+
execution_payload = {
|
|
197
|
+
"input": validated_input,
|
|
198
|
+
"context": context_history,
|
|
199
|
+
}
|
|
200
|
+
else:
|
|
201
|
+
# Backwards compatible - direct input
|
|
202
|
+
execution_payload = validated_input
|
|
203
|
+
|
|
204
|
+
# Merge native tools with MCP tools
|
|
205
|
+
native_tools = list(agent.tools or [])
|
|
206
|
+
|
|
207
|
+
# Lazy-load MCP tools for this agent
|
|
208
|
+
try:
|
|
209
|
+
mcp_tools = await agent._get_mcp_tools(ctx)
|
|
210
|
+
logger.debug(f"Loaded {len(mcp_tools)} MCP tools for agent {agent.name}")
|
|
211
|
+
except Exception as e:
|
|
212
|
+
# Architecture Decision: AD007 - Graceful Degradation
|
|
213
|
+
# If MCP loading fails, continue with native tools only
|
|
214
|
+
logger.error(f"Failed to load MCP tools in engine: {e}", exc_info=True)
|
|
215
|
+
mcp_tools = []
|
|
216
|
+
|
|
217
|
+
# Combine both lists
|
|
218
|
+
# Architecture Decision: AD003 - MCP tools are namespaced, so no conflicts
|
|
219
|
+
combined_tools = native_tools + mcp_tools
|
|
220
|
+
logger.debug(
|
|
221
|
+
f"Total tools for agent {agent.name}: {len(combined_tools)} (native: {len(native_tools)}, mcp: {len(mcp_tools)})"
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
with dspy_mod.context(lm=lm):
|
|
225
|
+
program = self._choose_program(dspy_mod, signature, combined_tools)
|
|
226
|
+
|
|
227
|
+
# Detect if there's already an active Rich Live context
|
|
228
|
+
should_stream = self.stream
|
|
229
|
+
orchestrator = getattr(ctx, "orchestrator", None)
|
|
230
|
+
if orchestrator:
|
|
231
|
+
is_dashboard = getattr(orchestrator, "is_dashboard", False) if ctx else False
|
|
232
|
+
# if dashboard we always stream, streamin queue only for CLI output
|
|
233
|
+
if should_stream and ctx and not is_dashboard:
|
|
234
|
+
if not hasattr(orchestrator, "_active_streams"):
|
|
235
|
+
orchestrator._active_streams = 0
|
|
236
|
+
|
|
237
|
+
if orchestrator._active_streams > 0:
|
|
238
|
+
should_stream = False
|
|
239
|
+
else:
|
|
240
|
+
orchestrator._active_streams += 1
|
|
241
|
+
|
|
242
|
+
try:
|
|
243
|
+
if should_stream:
|
|
244
|
+
(
|
|
245
|
+
raw_result,
|
|
246
|
+
_stream_final_display_data,
|
|
247
|
+
) = await self._execute_streaming(
|
|
248
|
+
dspy_mod,
|
|
249
|
+
program,
|
|
250
|
+
signature,
|
|
251
|
+
description=sys_desc,
|
|
252
|
+
payload=execution_payload,
|
|
253
|
+
agent=agent,
|
|
254
|
+
ctx=ctx,
|
|
255
|
+
pre_generated_artifact_id=pre_generated_artifact_id,
|
|
256
|
+
)
|
|
257
|
+
if not self.no_output and ctx:
|
|
258
|
+
ctx.state["_flock_stream_live_active"] = True
|
|
259
|
+
else:
|
|
260
|
+
orchestrator = getattr(ctx, "orchestrator", None) if ctx else None
|
|
261
|
+
|
|
262
|
+
raw_result = await self._execute_standard(
|
|
263
|
+
dspy_mod,
|
|
264
|
+
program,
|
|
265
|
+
description=sys_desc,
|
|
266
|
+
payload=execution_payload,
|
|
267
|
+
)
|
|
268
|
+
if ctx and orchestrator and getattr(orchestrator, "_active_streams", 0) > 0:
|
|
269
|
+
ctx.state["_flock_output_queued"] = True
|
|
270
|
+
finally:
|
|
271
|
+
if should_stream and ctx:
|
|
272
|
+
if orchestrator is None:
|
|
273
|
+
orchestrator = getattr(ctx, "orchestrator", None)
|
|
274
|
+
if orchestrator and hasattr(orchestrator, "_active_streams"):
|
|
275
|
+
orchestrator._active_streams = max(0, orchestrator._active_streams - 1)
|
|
276
|
+
|
|
277
|
+
normalized_output = self._normalize_output_payload(getattr(raw_result, "output", None))
|
|
278
|
+
artifacts, errors = self._materialize_artifacts(
|
|
279
|
+
normalized_output,
|
|
280
|
+
agent.outputs,
|
|
281
|
+
agent.name,
|
|
282
|
+
pre_generated_id=pre_generated_artifact_id,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
state = dict(inputs.state)
|
|
286
|
+
state.setdefault("dspy", {})
|
|
287
|
+
state["dspy"].update({"model": model_name, "raw": normalized_output})
|
|
288
|
+
|
|
289
|
+
logs: list[str] = []
|
|
290
|
+
if normalized_output is not None:
|
|
291
|
+
try:
|
|
292
|
+
logs.append(f"dspy.output={json.dumps(normalized_output)}")
|
|
293
|
+
except TypeError:
|
|
294
|
+
logs.append(f"dspy.output={normalized_output!r}")
|
|
295
|
+
logs.extend(f"dspy.error={message}" for message in errors)
|
|
296
|
+
|
|
297
|
+
result_artifacts = artifacts if artifacts else list(inputs.artifacts)
|
|
298
|
+
return EvalResult(artifacts=result_artifacts, state=state, logs=logs)
|
|
299
|
+
|
|
300
|
+
# ------------------------------------------------------------------
|
|
301
|
+
# Helpers mirroring the design engine
|
|
302
|
+
|
|
303
|
+
def _resolve_model_name(self) -> str:
|
|
304
|
+
model = self.model or os.getenv("TRELLIS_MODEL") or os.getenv("OPENAI_MODEL")
|
|
305
|
+
if not model:
|
|
306
|
+
raise NotImplementedError(
|
|
307
|
+
"DSPyEngine requires a configured model (set TRELLIS_MODEL, OPENAI_MODEL, or pass model=...)."
|
|
308
|
+
)
|
|
309
|
+
return model
|
|
310
|
+
|
|
311
|
+
def _import_dspy(self): # pragma: no cover - import guarded by optional dependency
|
|
312
|
+
try:
|
|
313
|
+
import dspy
|
|
314
|
+
except Exception as exc:
|
|
315
|
+
raise NotImplementedError("DSPy is not installed or failed to import.") from exc
|
|
316
|
+
return dspy
|
|
317
|
+
|
|
318
|
+
def _select_primary_artifact(self, artifacts: Sequence[Artifact]) -> Artifact:
|
|
319
|
+
return artifacts[-1]
|
|
320
|
+
|
|
321
|
+
def _resolve_input_model(self, artifact: Artifact) -> type[BaseModel] | None:
|
|
322
|
+
try:
|
|
323
|
+
return type_registry.resolve(artifact.type)
|
|
324
|
+
except KeyError:
|
|
325
|
+
return None
|
|
326
|
+
|
|
327
|
+
def _resolve_output_model(self, agent) -> type[BaseModel] | None:
|
|
328
|
+
if not getattr(agent, "outputs", None):
|
|
329
|
+
return None
|
|
330
|
+
return agent.outputs[0].spec.model
|
|
331
|
+
|
|
332
|
+
def _validate_input_payload(
|
|
333
|
+
self,
|
|
334
|
+
schema: type[BaseModel] | None,
|
|
335
|
+
payload: Mapping[str, Any] | None,
|
|
336
|
+
) -> dict[str, Any]:
|
|
337
|
+
data = dict(payload or {})
|
|
338
|
+
if schema is None:
|
|
339
|
+
return data
|
|
340
|
+
try:
|
|
341
|
+
return schema(**data).model_dump()
|
|
342
|
+
except Exception:
|
|
343
|
+
return data
|
|
344
|
+
|
|
345
|
+
def _prepare_signature_with_context(
|
|
346
|
+
self,
|
|
347
|
+
dspy_mod,
|
|
348
|
+
*,
|
|
349
|
+
description: str | None,
|
|
350
|
+
input_schema: type[BaseModel] | None,
|
|
351
|
+
output_schema: type[BaseModel] | None,
|
|
352
|
+
has_context: bool = False,
|
|
353
|
+
) -> Any:
|
|
354
|
+
"""Prepare DSPy signature, optionally including context field."""
|
|
355
|
+
fields = {
|
|
356
|
+
"description": (str, dspy_mod.InputField()),
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
# Add context field if we have conversation history
|
|
360
|
+
if has_context:
|
|
361
|
+
fields["context"] = (
|
|
362
|
+
list,
|
|
363
|
+
dspy_mod.InputField(
|
|
364
|
+
desc="Previous conversation artifacts providing context for this request"
|
|
365
|
+
),
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
fields["input"] = (input_schema or dict, dspy_mod.InputField())
|
|
369
|
+
fields["output"] = (output_schema or dict, dspy_mod.OutputField())
|
|
370
|
+
|
|
371
|
+
signature = dspy_mod.Signature(fields)
|
|
372
|
+
|
|
373
|
+
instruction = description or "Produce a valid output that matches the 'output' schema."
|
|
374
|
+
if has_context:
|
|
375
|
+
instruction += " Consider the conversation context provided to inform your response."
|
|
376
|
+
instruction += " Return only JSON."
|
|
377
|
+
|
|
378
|
+
return signature.with_instructions(instruction)
|
|
379
|
+
|
|
380
|
+
def _choose_program(self, dspy_mod, signature, tools: Iterable[Any]):
|
|
381
|
+
tools_list = list(tools or [])
|
|
382
|
+
try:
|
|
383
|
+
if tools_list:
|
|
384
|
+
return dspy_mod.ReAct(signature, tools=tools_list, max_iters=self.max_tool_calls)
|
|
385
|
+
return dspy_mod.Predict(signature)
|
|
386
|
+
except Exception:
|
|
387
|
+
return dspy_mod.Predict(signature)
|
|
388
|
+
|
|
389
|
+
def _system_description(self, description: str | None) -> str:
|
|
390
|
+
if description:
|
|
391
|
+
return description
|
|
392
|
+
return "Produce a valid output that matches the 'output' schema. Return only JSON."
|
|
393
|
+
|
|
394
|
+
def _normalize_output_payload(self, raw: Any) -> dict[str, Any]:
|
|
395
|
+
if isinstance(raw, BaseModel):
|
|
396
|
+
return raw.model_dump()
|
|
397
|
+
if isinstance(raw, str):
|
|
398
|
+
try:
|
|
399
|
+
return json.loads(raw)
|
|
400
|
+
except json.JSONDecodeError:
|
|
401
|
+
return {"text": raw}
|
|
402
|
+
if isinstance(raw, Mapping):
|
|
403
|
+
return dict(raw)
|
|
404
|
+
return {"value": raw}
|
|
405
|
+
|
|
406
|
+
def _materialize_artifacts(
|
|
407
|
+
self,
|
|
408
|
+
payload: dict[str, Any],
|
|
409
|
+
outputs: Iterable[Any],
|
|
410
|
+
produced_by: str,
|
|
411
|
+
pre_generated_id: Any = None,
|
|
412
|
+
):
|
|
413
|
+
artifacts: list[Artifact] = []
|
|
414
|
+
errors: list[str] = []
|
|
415
|
+
for output in outputs or []:
|
|
416
|
+
model_cls = output.spec.model
|
|
417
|
+
data = self._select_output_payload(payload, model_cls, output.spec.type_name)
|
|
418
|
+
try:
|
|
419
|
+
instance = model_cls(**data)
|
|
420
|
+
except Exception as exc: # noqa: BLE001 - collect validation errors for logs
|
|
421
|
+
errors.append(str(exc))
|
|
422
|
+
continue
|
|
423
|
+
|
|
424
|
+
# Use the pre-generated ID if provided (for streaming), otherwise let Artifact auto-generate
|
|
425
|
+
artifact_kwargs = {
|
|
426
|
+
"type": output.spec.type_name,
|
|
427
|
+
"payload": instance.model_dump(),
|
|
428
|
+
"produced_by": produced_by,
|
|
429
|
+
}
|
|
430
|
+
if pre_generated_id is not None:
|
|
431
|
+
artifact_kwargs["id"] = pre_generated_id
|
|
432
|
+
|
|
433
|
+
artifacts.append(Artifact(**artifact_kwargs))
|
|
434
|
+
return artifacts, errors
|
|
435
|
+
|
|
436
|
+
def _select_output_payload(
|
|
437
|
+
self,
|
|
438
|
+
payload: Mapping[str, Any],
|
|
439
|
+
model_cls: type[BaseModel],
|
|
440
|
+
type_name: str,
|
|
441
|
+
) -> dict[str, Any]:
|
|
442
|
+
candidates = [
|
|
443
|
+
payload.get(type_name),
|
|
444
|
+
payload.get(model_cls.__name__),
|
|
445
|
+
payload.get(model_cls.__name__.lower()),
|
|
446
|
+
]
|
|
447
|
+
for candidate in candidates:
|
|
448
|
+
if isinstance(candidate, Mapping):
|
|
449
|
+
return dict(candidate)
|
|
450
|
+
if isinstance(payload, Mapping):
|
|
451
|
+
return dict(payload)
|
|
452
|
+
return {}
|
|
453
|
+
|
|
454
|
+
async def _execute_standard(
|
|
455
|
+
self, dspy_mod, program, *, description: str, payload: dict[str, Any]
|
|
456
|
+
) -> Any:
|
|
457
|
+
"""Execute DSPy program in standard mode (no streaming)."""
|
|
458
|
+
# Handle new format: {"input": ..., "context": ...}
|
|
459
|
+
if isinstance(payload, dict) and "input" in payload:
|
|
460
|
+
return program(
|
|
461
|
+
description=description,
|
|
462
|
+
input=payload["input"],
|
|
463
|
+
context=payload.get("context", []),
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
# Handle old format: direct payload (backwards compatible)
|
|
467
|
+
return program(description=description, input=payload, context=[])
|
|
468
|
+
|
|
469
|
+
async def _execute_streaming(
|
|
470
|
+
self,
|
|
471
|
+
dspy_mod,
|
|
472
|
+
program,
|
|
473
|
+
signature,
|
|
474
|
+
*,
|
|
475
|
+
description: str,
|
|
476
|
+
payload: dict[str, Any],
|
|
477
|
+
agent: Any,
|
|
478
|
+
ctx: Any = None,
|
|
479
|
+
pre_generated_artifact_id: Any = None,
|
|
480
|
+
) -> Any:
|
|
481
|
+
"""Execute DSPy program in streaming mode with Rich table updates."""
|
|
482
|
+
from rich.console import Console
|
|
483
|
+
from rich.live import Live
|
|
484
|
+
|
|
485
|
+
console = Console()
|
|
486
|
+
|
|
487
|
+
# Get WebSocketManager for frontend streaming
|
|
488
|
+
ws_manager = None
|
|
489
|
+
if ctx:
|
|
490
|
+
orchestrator = getattr(ctx, "orchestrator", None)
|
|
491
|
+
if orchestrator:
|
|
492
|
+
collector = getattr(orchestrator, "_dashboard_collector", None)
|
|
493
|
+
if collector:
|
|
494
|
+
ws_manager = getattr(collector, "_websocket_manager", None)
|
|
495
|
+
|
|
496
|
+
# Prepare stream listeners for output field
|
|
497
|
+
listeners = []
|
|
498
|
+
try:
|
|
499
|
+
streaming_mod = getattr(dspy_mod, "streaming", None)
|
|
500
|
+
if streaming_mod and hasattr(streaming_mod, "StreamListener"):
|
|
501
|
+
for name, field in signature.output_fields.items():
|
|
502
|
+
if field.annotation is str:
|
|
503
|
+
listeners.append(streaming_mod.StreamListener(signature_field_name=name))
|
|
504
|
+
except Exception:
|
|
505
|
+
listeners = []
|
|
506
|
+
|
|
507
|
+
streaming_task = dspy_mod.streamify(
|
|
508
|
+
program,
|
|
509
|
+
is_async_program=True,
|
|
510
|
+
stream_listeners=listeners if listeners else None,
|
|
511
|
+
)
|
|
512
|
+
|
|
513
|
+
# Handle new format vs old format
|
|
514
|
+
if isinstance(payload, dict) and "input" in payload:
|
|
515
|
+
stream_generator = streaming_task(
|
|
516
|
+
description=description,
|
|
517
|
+
input=payload["input"],
|
|
518
|
+
context=payload.get("context", []),
|
|
519
|
+
)
|
|
520
|
+
else:
|
|
521
|
+
# Old format - backwards compatible
|
|
522
|
+
stream_generator = streaming_task(description=description, input=payload, context=[])
|
|
523
|
+
|
|
524
|
+
signature_order = []
|
|
525
|
+
status_field = self.status_output_field
|
|
526
|
+
try:
|
|
527
|
+
signature_order = list(signature.output_fields.keys())
|
|
528
|
+
except Exception:
|
|
529
|
+
signature_order = []
|
|
530
|
+
|
|
531
|
+
# Initialize display data in full artifact format (matching OutputUtilityComponent display)
|
|
532
|
+
display_data: OrderedDict[str, Any] = OrderedDict()
|
|
533
|
+
|
|
534
|
+
# Use the pre-generated artifact ID that was created before execution started
|
|
535
|
+
display_data["id"] = str(pre_generated_artifact_id)
|
|
536
|
+
|
|
537
|
+
# Get the output type from agent configuration
|
|
538
|
+
output_type = "output"
|
|
539
|
+
if hasattr(agent, "outputs") and agent.outputs:
|
|
540
|
+
output_type = agent.outputs[0].spec.type_name
|
|
541
|
+
|
|
542
|
+
display_data["type"] = output_type
|
|
543
|
+
display_data["payload"] = OrderedDict()
|
|
544
|
+
|
|
545
|
+
# Add output fields to payload section
|
|
546
|
+
for field_name in signature_order:
|
|
547
|
+
if field_name != "description": # Skip description field
|
|
548
|
+
display_data["payload"][field_name] = ""
|
|
549
|
+
|
|
550
|
+
display_data["produced_by"] = agent.name
|
|
551
|
+
display_data["correlation_id"] = (
|
|
552
|
+
str(ctx.correlation_id) if ctx and ctx.correlation_id else None
|
|
553
|
+
)
|
|
554
|
+
display_data["partition_key"] = None
|
|
555
|
+
display_data["tags"] = "set()"
|
|
556
|
+
display_data["visibility"] = OrderedDict([("kind", "Public")])
|
|
557
|
+
display_data["created_at"] = "streaming..."
|
|
558
|
+
display_data["version"] = 1
|
|
559
|
+
display_data["status"] = status_field
|
|
560
|
+
|
|
561
|
+
stream_buffers: defaultdict[str, list[str]] = defaultdict(list)
|
|
562
|
+
stream_buffers[status_field] = []
|
|
563
|
+
stream_sequence = 0 # Monotonic sequence for ordering
|
|
564
|
+
|
|
565
|
+
formatter = theme_dict = styles = agent_label = None
|
|
566
|
+
live_cm = nullcontext()
|
|
567
|
+
overflow_mode = self.stream_vertical_overflow
|
|
568
|
+
|
|
569
|
+
if not self.no_output:
|
|
570
|
+
_ensure_live_crop_above()
|
|
571
|
+
(
|
|
572
|
+
formatter,
|
|
573
|
+
theme_dict,
|
|
574
|
+
styles,
|
|
575
|
+
agent_label,
|
|
576
|
+
) = self._prepare_stream_formatter(agent)
|
|
577
|
+
initial_panel = formatter.format_result(display_data, agent_label, theme_dict, styles)
|
|
578
|
+
live_cm = Live(
|
|
579
|
+
initial_panel,
|
|
580
|
+
console=console,
|
|
581
|
+
refresh_per_second=4,
|
|
582
|
+
transient=False,
|
|
583
|
+
vertical_overflow=overflow_mode,
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
final_result: Any = None
|
|
587
|
+
|
|
588
|
+
with live_cm as live:
|
|
589
|
+
|
|
590
|
+
def _refresh_panel() -> None:
|
|
591
|
+
if formatter is None or live is None:
|
|
592
|
+
return
|
|
593
|
+
live.update(formatter.format_result(display_data, agent_label, theme_dict, styles))
|
|
594
|
+
|
|
595
|
+
async for value in stream_generator:
|
|
596
|
+
try:
|
|
597
|
+
from dspy.streaming import StatusMessage, StreamResponse
|
|
598
|
+
from litellm import ModelResponseStream
|
|
599
|
+
except Exception:
|
|
600
|
+
StatusMessage = object # type: ignore
|
|
601
|
+
StreamResponse = object # type: ignore
|
|
602
|
+
ModelResponseStream = object # type: ignore
|
|
603
|
+
|
|
604
|
+
if isinstance(value, StatusMessage):
|
|
605
|
+
token = getattr(value, "message", "")
|
|
606
|
+
if token:
|
|
607
|
+
stream_buffers[status_field].append(str(token) + "\n")
|
|
608
|
+
display_data["status"] = "".join(stream_buffers[status_field])
|
|
609
|
+
|
|
610
|
+
# Emit to WebSocket
|
|
611
|
+
if ws_manager and token:
|
|
612
|
+
try:
|
|
613
|
+
event = StreamingOutputEvent(
|
|
614
|
+
correlation_id=str(ctx.correlation_id)
|
|
615
|
+
if ctx and ctx.correlation_id
|
|
616
|
+
else "",
|
|
617
|
+
agent_name=agent.name,
|
|
618
|
+
run_id=ctx.task_id if ctx else "",
|
|
619
|
+
output_type="llm_token",
|
|
620
|
+
content=str(token + "\n"),
|
|
621
|
+
sequence=stream_sequence,
|
|
622
|
+
is_final=False,
|
|
623
|
+
)
|
|
624
|
+
await ws_manager.broadcast(event)
|
|
625
|
+
stream_sequence += 1
|
|
626
|
+
except Exception as e:
|
|
627
|
+
logger.warning(f"Failed to emit streaming event: {e}")
|
|
628
|
+
|
|
629
|
+
if formatter is not None:
|
|
630
|
+
_refresh_panel()
|
|
631
|
+
continue
|
|
632
|
+
|
|
633
|
+
if isinstance(value, StreamResponse):
|
|
634
|
+
token = getattr(value, "chunk", None)
|
|
635
|
+
signature_field = getattr(value, "signature_field_name", None)
|
|
636
|
+
if signature_field and signature_field != "description":
|
|
637
|
+
# Update payload section - accumulate in "output" buffer
|
|
638
|
+
buffer_key = f"_stream_{signature_field}"
|
|
639
|
+
if token:
|
|
640
|
+
stream_buffers[buffer_key].append(str(token))
|
|
641
|
+
# Show streaming text in payload
|
|
642
|
+
display_data["payload"]["_streaming"] = "".join(
|
|
643
|
+
stream_buffers[buffer_key]
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
# Emit to WebSocket
|
|
647
|
+
if ws_manager:
|
|
648
|
+
logger.info(
|
|
649
|
+
f"[STREAMING] Emitting StreamResponse token='{token}', sequence={stream_sequence}"
|
|
650
|
+
)
|
|
651
|
+
try:
|
|
652
|
+
event = StreamingOutputEvent(
|
|
653
|
+
correlation_id=str(ctx.correlation_id)
|
|
654
|
+
if ctx and ctx.correlation_id
|
|
655
|
+
else "",
|
|
656
|
+
agent_name=agent.name,
|
|
657
|
+
run_id=ctx.task_id if ctx else "",
|
|
658
|
+
output_type="llm_token",
|
|
659
|
+
content=str(token),
|
|
660
|
+
sequence=stream_sequence,
|
|
661
|
+
is_final=False,
|
|
662
|
+
)
|
|
663
|
+
await ws_manager.broadcast(event)
|
|
664
|
+
stream_sequence += 1
|
|
665
|
+
except Exception as e:
|
|
666
|
+
logger.warning(f"Failed to emit streaming event: {e}")
|
|
667
|
+
|
|
668
|
+
if formatter is not None:
|
|
669
|
+
_refresh_panel()
|
|
670
|
+
continue
|
|
671
|
+
|
|
672
|
+
if isinstance(value, ModelResponseStream):
|
|
673
|
+
chunk = value
|
|
674
|
+
token = chunk.choices[0].delta.content or ""
|
|
675
|
+
signature_field = getattr(value, "signature_field_name", None)
|
|
676
|
+
|
|
677
|
+
# Determine output type based on signature field
|
|
678
|
+
output_type = "llm_token" # if signature_field and signature_field != "description" else "log"
|
|
679
|
+
|
|
680
|
+
if signature_field and signature_field != "description":
|
|
681
|
+
# Update payload section - accumulate in buffer
|
|
682
|
+
buffer_key = f"_stream_{signature_field}"
|
|
683
|
+
if token:
|
|
684
|
+
stream_buffers[buffer_key].append(str(token))
|
|
685
|
+
# Show streaming text in payload
|
|
686
|
+
display_data["payload"]["_streaming"] = "".join(
|
|
687
|
+
stream_buffers[buffer_key]
|
|
688
|
+
)
|
|
689
|
+
elif token:
|
|
690
|
+
stream_buffers[status_field].append(str(token))
|
|
691
|
+
display_data["status"] = "".join(stream_buffers[status_field])
|
|
692
|
+
|
|
693
|
+
# Emit to WebSocket
|
|
694
|
+
if ws_manager and token:
|
|
695
|
+
try:
|
|
696
|
+
event = StreamingOutputEvent(
|
|
697
|
+
correlation_id=str(ctx.correlation_id)
|
|
698
|
+
if ctx and ctx.correlation_id
|
|
699
|
+
else "",
|
|
700
|
+
agent_name=agent.name,
|
|
701
|
+
run_id=ctx.task_id if ctx else "",
|
|
702
|
+
output_type=output_type,
|
|
703
|
+
content=str(token),
|
|
704
|
+
sequence=stream_sequence,
|
|
705
|
+
is_final=False,
|
|
706
|
+
)
|
|
707
|
+
await ws_manager.broadcast(event)
|
|
708
|
+
stream_sequence += 1
|
|
709
|
+
except Exception as e:
|
|
710
|
+
logger.warning(f"Failed to emit streaming event: {e}")
|
|
711
|
+
|
|
712
|
+
if formatter is not None:
|
|
713
|
+
_refresh_panel()
|
|
714
|
+
continue
|
|
715
|
+
|
|
716
|
+
if isinstance(value, dspy_mod.Prediction):
|
|
717
|
+
final_result = value
|
|
718
|
+
|
|
719
|
+
# Emit final streaming event
|
|
720
|
+
if ws_manager:
|
|
721
|
+
try:
|
|
722
|
+
event = StreamingOutputEvent(
|
|
723
|
+
correlation_id=str(ctx.correlation_id)
|
|
724
|
+
if ctx and ctx.correlation_id
|
|
725
|
+
else "",
|
|
726
|
+
agent_name=agent.name,
|
|
727
|
+
run_id=ctx.task_id if ctx else "",
|
|
728
|
+
output_type="log",
|
|
729
|
+
content="\nAmount of output tokens: " + str(stream_sequence),
|
|
730
|
+
sequence=stream_sequence,
|
|
731
|
+
is_final=True, # Mark as final
|
|
732
|
+
)
|
|
733
|
+
await ws_manager.broadcast(event)
|
|
734
|
+
event = StreamingOutputEvent(
|
|
735
|
+
correlation_id=str(ctx.correlation_id)
|
|
736
|
+
if ctx and ctx.correlation_id
|
|
737
|
+
else "",
|
|
738
|
+
agent_name=agent.name,
|
|
739
|
+
run_id=ctx.task_id if ctx else "",
|
|
740
|
+
output_type="log",
|
|
741
|
+
content="--- End of output ---",
|
|
742
|
+
sequence=stream_sequence,
|
|
743
|
+
is_final=True, # Mark as final
|
|
744
|
+
)
|
|
745
|
+
await ws_manager.broadcast(event)
|
|
746
|
+
except Exception as e:
|
|
747
|
+
logger.warning(f"Failed to emit final streaming event: {e}")
|
|
748
|
+
|
|
749
|
+
if formatter is not None:
|
|
750
|
+
# Update payload section with final values
|
|
751
|
+
payload_data = OrderedDict()
|
|
752
|
+
for field_name in signature_order:
|
|
753
|
+
if field_name != "description" and hasattr(final_result, field_name):
|
|
754
|
+
field_value = getattr(final_result, field_name)
|
|
755
|
+
# If the field is a BaseModel, unwrap it to dict
|
|
756
|
+
if isinstance(field_value, BaseModel):
|
|
757
|
+
payload_data.update(field_value.model_dump())
|
|
758
|
+
else:
|
|
759
|
+
payload_data[field_name] = field_value
|
|
760
|
+
|
|
761
|
+
# Update all fields with actual values
|
|
762
|
+
display_data["payload"].clear()
|
|
763
|
+
display_data["payload"].update(payload_data)
|
|
764
|
+
|
|
765
|
+
# Update timestamp
|
|
766
|
+
from datetime import datetime, timezone
|
|
767
|
+
|
|
768
|
+
display_data["created_at"] = datetime.now(timezone.utc).isoformat()
|
|
769
|
+
|
|
770
|
+
# Remove status field from display
|
|
771
|
+
display_data.pop("status", None)
|
|
772
|
+
_refresh_panel()
|
|
773
|
+
|
|
774
|
+
if final_result is None:
|
|
775
|
+
raise RuntimeError("Streaming did not yield a final prediction.")
|
|
776
|
+
|
|
777
|
+
# Return both the result and the display data for final ID update
|
|
778
|
+
return final_result, (formatter, display_data, theme_dict, styles, agent_label)
|
|
779
|
+
|
|
780
|
+
def _prepare_stream_formatter(
|
|
781
|
+
self, agent: Any
|
|
782
|
+
) -> tuple[Any, dict[str, Any], dict[str, Any], str]:
|
|
783
|
+
"""Build formatter + theme metadata for streaming tables."""
|
|
784
|
+
import pathlib
|
|
785
|
+
|
|
786
|
+
from flock.logging.formatters.themed_formatter import (
|
|
787
|
+
ThemedAgentResultFormatter,
|
|
788
|
+
create_pygments_syntax_theme,
|
|
789
|
+
get_default_styles,
|
|
790
|
+
load_syntax_theme_from_file,
|
|
791
|
+
load_theme_from_file,
|
|
792
|
+
)
|
|
793
|
+
|
|
794
|
+
themes_dir = pathlib.Path(__file__).resolve().parents[1] / "themes"
|
|
795
|
+
theme_filename = self.theme
|
|
796
|
+
if not theme_filename.endswith(".toml"):
|
|
797
|
+
theme_filename = f"{theme_filename}.toml"
|
|
798
|
+
theme_path = themes_dir / theme_filename
|
|
799
|
+
|
|
800
|
+
try:
|
|
801
|
+
theme_dict = load_theme_from_file(theme_path)
|
|
802
|
+
except Exception:
|
|
803
|
+
fallback_path = themes_dir / "afterglow.toml"
|
|
804
|
+
theme_dict = load_theme_from_file(fallback_path)
|
|
805
|
+
theme_path = fallback_path
|
|
806
|
+
|
|
807
|
+
from flock.logging.formatters.themes import OutputTheme
|
|
808
|
+
|
|
809
|
+
formatter = ThemedAgentResultFormatter(theme=OutputTheme.afterglow)
|
|
810
|
+
styles = get_default_styles(theme_dict)
|
|
811
|
+
formatter.styles = styles
|
|
812
|
+
|
|
813
|
+
try:
|
|
814
|
+
syntax_theme = load_syntax_theme_from_file(theme_path)
|
|
815
|
+
formatter.syntax_style = create_pygments_syntax_theme(syntax_theme)
|
|
816
|
+
except Exception:
|
|
817
|
+
formatter.syntax_style = None
|
|
818
|
+
|
|
819
|
+
model_label = self.model or ""
|
|
820
|
+
agent_label = agent.name if not model_label else f"{agent.name} - {model_label}"
|
|
821
|
+
|
|
822
|
+
return formatter, theme_dict, styles, agent_label
|
|
823
|
+
|
|
824
|
+
def _print_final_stream_display(
|
|
825
|
+
self,
|
|
826
|
+
stream_display_data: tuple[Any, OrderedDict, dict, dict, str],
|
|
827
|
+
artifact_id: str,
|
|
828
|
+
artifact: Artifact,
|
|
829
|
+
) -> None:
|
|
830
|
+
"""Print the final streaming display with the real artifact ID."""
|
|
831
|
+
from rich.console import Console
|
|
832
|
+
|
|
833
|
+
formatter, display_data, theme_dict, styles, agent_label = stream_display_data
|
|
834
|
+
|
|
835
|
+
# Update display_data with the real artifact information
|
|
836
|
+
display_data["id"] = artifact_id
|
|
837
|
+
display_data["created_at"] = artifact.created_at.isoformat()
|
|
838
|
+
|
|
839
|
+
# Update all artifact metadata
|
|
840
|
+
display_data["correlation_id"] = (
|
|
841
|
+
str(artifact.correlation_id) if artifact.correlation_id else None
|
|
842
|
+
)
|
|
843
|
+
display_data["partition_key"] = artifact.partition_key
|
|
844
|
+
display_data["tags"] = "set()" if not artifact.tags else f"set({list(artifact.tags)})"
|
|
845
|
+
|
|
846
|
+
# Print the final panel
|
|
847
|
+
console = Console()
|
|
848
|
+
final_panel = formatter.format_result(display_data, agent_label, theme_dict, styles)
|
|
849
|
+
console.print(final_panel)
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
__all__ = ["DSPyEngine"]
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
# Apply the Rich Live patch when this module is imported
|
|
856
|
+
_apply_live_patch_on_import()
|