tactus 0.31.0__py3-none-any.whl → 0.34.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tactus/__init__.py +1 -1
- tactus/adapters/__init__.py +18 -1
- tactus/adapters/broker_log.py +127 -34
- tactus/adapters/channels/__init__.py +153 -0
- tactus/adapters/channels/base.py +174 -0
- tactus/adapters/channels/broker.py +179 -0
- tactus/adapters/channels/cli.py +448 -0
- tactus/adapters/channels/host.py +225 -0
- tactus/adapters/channels/ipc.py +297 -0
- tactus/adapters/channels/sse.py +305 -0
- tactus/adapters/cli_hitl.py +223 -1
- tactus/adapters/control_loop.py +879 -0
- tactus/adapters/file_storage.py +35 -2
- tactus/adapters/ide_log.py +7 -1
- tactus/backends/http_backend.py +0 -1
- tactus/broker/client.py +31 -1
- tactus/broker/server.py +416 -92
- tactus/cli/app.py +270 -7
- tactus/cli/control.py +393 -0
- tactus/core/config_manager.py +33 -6
- tactus/core/dsl_stubs.py +102 -18
- tactus/core/execution_context.py +265 -8
- tactus/core/lua_sandbox.py +8 -9
- tactus/core/registry.py +19 -2
- tactus/core/runtime.py +235 -27
- tactus/docker/Dockerfile.pypi +49 -0
- tactus/docs/__init__.py +33 -0
- tactus/docs/extractor.py +326 -0
- tactus/docs/html_renderer.py +72 -0
- tactus/docs/models.py +121 -0
- tactus/docs/templates/base.html +204 -0
- tactus/docs/templates/index.html +58 -0
- tactus/docs/templates/module.html +96 -0
- tactus/dspy/agent.py +403 -22
- tactus/dspy/broker_lm.py +57 -6
- tactus/dspy/config.py +14 -3
- tactus/dspy/history.py +2 -1
- tactus/dspy/module.py +136 -11
- tactus/dspy/signature.py +0 -1
- tactus/ide/config_server.py +536 -0
- tactus/ide/server.py +345 -21
- tactus/primitives/human.py +619 -47
- tactus/primitives/system.py +0 -1
- tactus/protocols/__init__.py +25 -0
- tactus/protocols/control.py +427 -0
- tactus/protocols/notification.py +207 -0
- tactus/sandbox/container_runner.py +79 -11
- tactus/sandbox/docker_manager.py +23 -0
- tactus/sandbox/entrypoint.py +26 -0
- tactus/sandbox/protocol.py +3 -0
- tactus/stdlib/README.md +77 -0
- tactus/stdlib/__init__.py +27 -1
- tactus/stdlib/classify/__init__.py +165 -0
- tactus/stdlib/classify/classify.spec.tac +195 -0
- tactus/stdlib/classify/classify.tac +257 -0
- tactus/stdlib/classify/fuzzy.py +282 -0
- tactus/stdlib/classify/llm.py +319 -0
- tactus/stdlib/classify/primitive.py +287 -0
- tactus/stdlib/core/__init__.py +57 -0
- tactus/stdlib/core/base.py +320 -0
- tactus/stdlib/core/confidence.py +211 -0
- tactus/stdlib/core/models.py +161 -0
- tactus/stdlib/core/retry.py +171 -0
- tactus/stdlib/core/validation.py +274 -0
- tactus/stdlib/extract/__init__.py +125 -0
- tactus/stdlib/extract/llm.py +330 -0
- tactus/stdlib/extract/primitive.py +256 -0
- tactus/stdlib/tac/tactus/classify/base.tac +51 -0
- tactus/stdlib/tac/tactus/classify/fuzzy.tac +87 -0
- tactus/stdlib/tac/tactus/classify/index.md +77 -0
- tactus/stdlib/tac/tactus/classify/init.tac +29 -0
- tactus/stdlib/tac/tactus/classify/llm.tac +150 -0
- tactus/stdlib/tac/tactus/classify.spec.tac +191 -0
- tactus/stdlib/tac/tactus/extract/base.tac +138 -0
- tactus/stdlib/tac/tactus/extract/index.md +96 -0
- tactus/stdlib/tac/tactus/extract/init.tac +27 -0
- tactus/stdlib/tac/tactus/extract/llm.tac +201 -0
- tactus/stdlib/tac/tactus/extract.spec.tac +153 -0
- tactus/stdlib/tac/tactus/generate/base.tac +142 -0
- tactus/stdlib/tac/tactus/generate/index.md +195 -0
- tactus/stdlib/tac/tactus/generate/init.tac +28 -0
- tactus/stdlib/tac/tactus/generate/llm.tac +169 -0
- tactus/stdlib/tac/tactus/generate.spec.tac +210 -0
- tactus/testing/behave_integration.py +171 -7
- tactus/testing/context.py +0 -1
- tactus/testing/evaluation_runner.py +0 -1
- tactus/testing/gherkin_parser.py +0 -1
- tactus/testing/mock_hitl.py +0 -1
- tactus/testing/mock_tools.py +0 -1
- tactus/testing/models.py +0 -1
- tactus/testing/steps/builtin.py +0 -1
- tactus/testing/steps/custom.py +81 -22
- tactus/testing/steps/registry.py +0 -1
- tactus/testing/test_runner.py +7 -1
- tactus/validation/semantic_visitor.py +11 -5
- tactus/validation/validator.py +0 -1
- {tactus-0.31.0.dist-info → tactus-0.34.1.dist-info}/METADATA +16 -2
- {tactus-0.31.0.dist-info → tactus-0.34.1.dist-info}/RECORD +101 -49
- {tactus-0.31.0.dist-info → tactus-0.34.1.dist-info}/WHEEL +0 -0
- {tactus-0.31.0.dist-info → tactus-0.34.1.dist-info}/entry_points.txt +0 -0
- {tactus-0.31.0.dist-info → tactus-0.34.1.dist-info}/licenses/LICENSE +0 -0
tactus/ide/server.py
CHANGED
|
@@ -25,6 +25,17 @@ logger = logging.getLogger(__name__)
|
|
|
25
25
|
# Workspace state
|
|
26
26
|
WORKSPACE_ROOT = None
|
|
27
27
|
|
|
28
|
+
# Global cache clearing function - set by create_app()
|
|
29
|
+
_clear_runtime_caches_fn = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def clear_runtime_caches():
|
|
33
|
+
"""Clear cached runtime instances. Must be called after create_app() initializes."""
|
|
34
|
+
if _clear_runtime_caches_fn:
|
|
35
|
+
_clear_runtime_caches_fn()
|
|
36
|
+
else:
|
|
37
|
+
logger.warning("clear_runtime_caches called but no implementation set")
|
|
38
|
+
|
|
28
39
|
|
|
29
40
|
class TactusLSPHandler:
|
|
30
41
|
"""LSP handler for Tactus DSL."""
|
|
@@ -743,14 +754,57 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
743
754
|
)
|
|
744
755
|
storage_backend = FileStorage(storage_dir=storage_dir)
|
|
745
756
|
|
|
746
|
-
#
|
|
757
|
+
# Load configuration cascade for this procedure
|
|
758
|
+
from tactus.core.config_manager import ConfigManager
|
|
759
|
+
|
|
760
|
+
config_manager = ConfigManager()
|
|
761
|
+
merged_config = config_manager.load_cascade(path)
|
|
762
|
+
|
|
763
|
+
# Extract API keys and other config values
|
|
764
|
+
openai_api_key = (
|
|
765
|
+
merged_config.get("openai", {}).get("api_key")
|
|
766
|
+
if isinstance(merged_config.get("openai"), dict)
|
|
767
|
+
else merged_config.get("openai_api_key")
|
|
768
|
+
) or os.environ.get("OPENAI_API_KEY")
|
|
769
|
+
|
|
770
|
+
tool_paths = merged_config.get("tool_paths")
|
|
771
|
+
mcp_servers = merged_config.get("mcp_servers", {})
|
|
772
|
+
|
|
773
|
+
# Create HITL handler with SSE channel for IDE integration
|
|
774
|
+
from tactus.adapters.control_loop import (
|
|
775
|
+
ControlLoopHandler,
|
|
776
|
+
ControlLoopHITLAdapter,
|
|
777
|
+
)
|
|
778
|
+
from tactus.adapters.channels import load_default_channels
|
|
779
|
+
|
|
780
|
+
# Load default channels (CLI + IPC) and add SSE channel
|
|
781
|
+
channels = load_default_channels(procedure_id=procedure_id)
|
|
782
|
+
sse_channel = get_sse_channel()
|
|
783
|
+
|
|
784
|
+
# Add SSE channel to the list
|
|
785
|
+
channels.append(sse_channel)
|
|
786
|
+
|
|
787
|
+
# Create control loop handler with all channels
|
|
788
|
+
control_handler = ControlLoopHandler(
|
|
789
|
+
channels=channels,
|
|
790
|
+
storage=storage_backend,
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
# Wrap in adapter for backward compatibility
|
|
794
|
+
hitl_handler = ControlLoopHITLAdapter(control_handler)
|
|
795
|
+
|
|
796
|
+
# Create runtime with log handler, run_id, and loaded config
|
|
747
797
|
runtime = TactusRuntime(
|
|
748
798
|
procedure_id=procedure_id,
|
|
749
799
|
storage_backend=storage_backend,
|
|
750
|
-
hitl_handler=
|
|
800
|
+
hitl_handler=hitl_handler, # Now includes SSE channel!
|
|
751
801
|
log_handler=log_handler,
|
|
752
802
|
run_id=run_id,
|
|
753
803
|
source_file_path=str(path),
|
|
804
|
+
openai_api_key=openai_api_key,
|
|
805
|
+
tool_paths=tool_paths,
|
|
806
|
+
mcp_servers=mcp_servers,
|
|
807
|
+
external_config=merged_config,
|
|
754
808
|
)
|
|
755
809
|
|
|
756
810
|
# Read procedure source
|
|
@@ -799,6 +853,78 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
799
853
|
# Capture inputs in closure scope for the thread
|
|
800
854
|
procedure_inputs = inputs
|
|
801
855
|
|
|
856
|
+
async def handle_container_control_request(request_data: dict) -> dict:
|
|
857
|
+
"""
|
|
858
|
+
Bridge container HITL requests to host's SSE channel.
|
|
859
|
+
|
|
860
|
+
This handler is called by the broker when the container sends
|
|
861
|
+
a control.request. It forwards the request to the SSE channel,
|
|
862
|
+
waits for the user response in the IDE, and returns the response
|
|
863
|
+
data back to the container.
|
|
864
|
+
"""
|
|
865
|
+
import threading
|
|
866
|
+
from tactus.protocols.control import ControlRequest
|
|
867
|
+
|
|
868
|
+
# Parse the request
|
|
869
|
+
request = ControlRequest.model_validate(request_data)
|
|
870
|
+
logger.info(
|
|
871
|
+
f"[HITL] Container control request {request.request_id} "
|
|
872
|
+
f"for procedure {request.procedure_id}"
|
|
873
|
+
)
|
|
874
|
+
|
|
875
|
+
# Get SSE channel
|
|
876
|
+
sse_channel = get_sse_channel()
|
|
877
|
+
|
|
878
|
+
# Create a threading event to wait for response
|
|
879
|
+
response_event = threading.Event()
|
|
880
|
+
response_data = {}
|
|
881
|
+
|
|
882
|
+
# Register pending request
|
|
883
|
+
_pending_hitl_requests[request.request_id] = {
|
|
884
|
+
"event": response_event,
|
|
885
|
+
"response": response_data,
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
try:
|
|
889
|
+
# Send to SSE channel (delivers to IDE UI)
|
|
890
|
+
delivery = await sse_channel.send(request)
|
|
891
|
+
if not delivery.success:
|
|
892
|
+
raise RuntimeError(
|
|
893
|
+
f"Failed to deliver HITL request to IDE: {delivery.error_message}"
|
|
894
|
+
)
|
|
895
|
+
|
|
896
|
+
logger.info(
|
|
897
|
+
f"[HITL] Request {request.request_id} delivered to IDE, waiting for response..."
|
|
898
|
+
)
|
|
899
|
+
|
|
900
|
+
# Wait for response (with timeout) - run blocking wait in thread pool
|
|
901
|
+
timeout_seconds = request.timeout_seconds or 300 # 5 min default
|
|
902
|
+
logger.info(
|
|
903
|
+
f"[HITL] Starting wait for response (timeout={timeout_seconds}s)..."
|
|
904
|
+
)
|
|
905
|
+
result = await asyncio.to_thread(
|
|
906
|
+
response_event.wait, timeout=timeout_seconds
|
|
907
|
+
)
|
|
908
|
+
logger.info(f"[HITL] Wait completed, result={result}")
|
|
909
|
+
|
|
910
|
+
if result:
|
|
911
|
+
logger.info(
|
|
912
|
+
f"[HITL] Received response for {request.request_id}: "
|
|
913
|
+
f"{response_data.get('value')}"
|
|
914
|
+
)
|
|
915
|
+
return response_data
|
|
916
|
+
else:
|
|
917
|
+
# Timeout
|
|
918
|
+
logger.warning(f"[HITL] Timeout for {request.request_id}")
|
|
919
|
+
return {
|
|
920
|
+
"value": request.default_value,
|
|
921
|
+
"timed_out": True,
|
|
922
|
+
"channel_id": "sse",
|
|
923
|
+
}
|
|
924
|
+
finally:
|
|
925
|
+
# Clean up pending request
|
|
926
|
+
_pending_hitl_requests.pop(request.request_id, None)
|
|
927
|
+
|
|
802
928
|
def run_procedure():
|
|
803
929
|
try:
|
|
804
930
|
# Create new event loop for this thread
|
|
@@ -808,6 +934,13 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
808
934
|
if use_sandbox:
|
|
809
935
|
# Use sandbox execution (events streamed via broker over UDS)
|
|
810
936
|
runner = ContainerRunner(sandbox_config)
|
|
937
|
+
|
|
938
|
+
# Pass async control handler directly (broker calls it in async context)
|
|
939
|
+
# Build LLM backend config (provider-agnostic)
|
|
940
|
+
llm_backend_config = {}
|
|
941
|
+
if openai_api_key:
|
|
942
|
+
llm_backend_config["openai_api_key"] = openai_api_key
|
|
943
|
+
|
|
811
944
|
exec_result = loop.run_until_complete(
|
|
812
945
|
runner.run(
|
|
813
946
|
source=source,
|
|
@@ -817,6 +950,9 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
817
950
|
event_handler=(
|
|
818
951
|
sandbox_event_queue.put if sandbox_event_queue else None
|
|
819
952
|
),
|
|
953
|
+
run_id=run_id,
|
|
954
|
+
control_handler=handle_container_control_request,
|
|
955
|
+
llm_backend_config=llm_backend_config,
|
|
820
956
|
)
|
|
821
957
|
)
|
|
822
958
|
|
|
@@ -857,19 +993,31 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
857
993
|
yield f"data: {json.dumps(container_running_event)}\n\n"
|
|
858
994
|
|
|
859
995
|
# Stream events based on execution mode
|
|
996
|
+
# Poll aggressively to stream events in real-time
|
|
860
997
|
while not result_container["done"]:
|
|
998
|
+
events_sent = False
|
|
999
|
+
|
|
861
1000
|
if use_sandbox and sandbox_event_queue:
|
|
862
1001
|
# Stream from sandbox callback queue
|
|
863
1002
|
try:
|
|
864
|
-
event_dict = sandbox_event_queue.get(timeout=0.
|
|
1003
|
+
event_dict = sandbox_event_queue.get(timeout=0.01)
|
|
865
1004
|
all_events.append(event_dict)
|
|
866
1005
|
yield f"data: {json.dumps(event_dict)}\n\n"
|
|
1006
|
+
events_sent = True
|
|
867
1007
|
except queue.Empty:
|
|
868
1008
|
pass
|
|
1009
|
+
|
|
1010
|
+
# Also check for HITL events from SSE channel (container HITL)
|
|
1011
|
+
hitl_event = sse_channel.get_next_event(timeout=0.001)
|
|
1012
|
+
if hitl_event:
|
|
1013
|
+
all_events.append(hitl_event)
|
|
1014
|
+
yield f"data: {json.dumps(hitl_event)}\n\n"
|
|
1015
|
+
events_sent = True
|
|
869
1016
|
else:
|
|
870
1017
|
# Stream from IDELogHandler (direct execution)
|
|
871
|
-
|
|
872
|
-
|
|
1018
|
+
# Get one event at a time to stream immediately
|
|
1019
|
+
try:
|
|
1020
|
+
event = log_handler.events.get(timeout=0.001)
|
|
873
1021
|
try:
|
|
874
1022
|
# Serialize with ISO format for datetime
|
|
875
1023
|
event_dict = event.model_dump(mode="json")
|
|
@@ -884,22 +1032,43 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
884
1032
|
event_dict["timestamp"] = iso_string
|
|
885
1033
|
all_events.append(event_dict)
|
|
886
1034
|
yield f"data: {json.dumps(event_dict)}\n\n"
|
|
1035
|
+
events_sent = True
|
|
887
1036
|
except Exception as e:
|
|
888
1037
|
logger.error(f"Error serializing event: {e}", exc_info=True)
|
|
889
1038
|
logger.error(f"Event type: {type(event)}, Event: {event}")
|
|
1039
|
+
except queue.Empty:
|
|
1040
|
+
pass
|
|
890
1041
|
|
|
891
|
-
|
|
1042
|
+
# Also check for HITL events from SSE channel
|
|
1043
|
+
hitl_event = sse_channel.get_next_event(timeout=0.001)
|
|
1044
|
+
if hitl_event:
|
|
1045
|
+
all_events.append(hitl_event)
|
|
1046
|
+
yield f"data: {json.dumps(hitl_event)}\n\n"
|
|
1047
|
+
events_sent = True
|
|
1048
|
+
|
|
1049
|
+
# Only sleep if no events were sent to maintain responsiveness
|
|
1050
|
+
if not events_sent:
|
|
1051
|
+
time.sleep(0.01)
|
|
892
1052
|
|
|
893
1053
|
# Get any remaining events
|
|
894
1054
|
if use_sandbox and sandbox_event_queue:
|
|
895
|
-
# Drain sandbox event queue
|
|
896
|
-
|
|
1055
|
+
# Drain sandbox event queue with retries to catch late-arriving events
|
|
1056
|
+
# Agent streaming events and ExecutionSummaryEvent may still be in flight
|
|
1057
|
+
max_wait = 2.0 # Wait up to 2 seconds for final events
|
|
1058
|
+
poll_interval = 0.05 # Poll every 50ms
|
|
1059
|
+
elapsed = 0.0
|
|
1060
|
+
consecutive_empty = 0
|
|
1061
|
+
max_consecutive_empty = 4 # Stop after 4 empty polls (200ms of no events)
|
|
1062
|
+
|
|
1063
|
+
while elapsed < max_wait and consecutive_empty < max_consecutive_empty:
|
|
897
1064
|
try:
|
|
898
|
-
event_dict = sandbox_event_queue.
|
|
1065
|
+
event_dict = sandbox_event_queue.get(timeout=poll_interval)
|
|
899
1066
|
all_events.append(event_dict)
|
|
900
1067
|
yield f"data: {json.dumps(event_dict)}\n\n"
|
|
1068
|
+
consecutive_empty = 0 # Reset counter when we get an event
|
|
901
1069
|
except queue.Empty:
|
|
902
|
-
|
|
1070
|
+
consecutive_empty += 1
|
|
1071
|
+
elapsed += poll_interval
|
|
903
1072
|
|
|
904
1073
|
# Emit container stopped event
|
|
905
1074
|
container_stopped_event = {
|
|
@@ -1832,6 +2001,34 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
1832
2001
|
logger.error(f"Error getting checkpoint {run_id}@{position}: {e}", exc_info=True)
|
|
1833
2002
|
return jsonify({"error": str(e)}), 500
|
|
1834
2003
|
|
|
2004
|
+
@app.route("/api/procedures/<procedure_id>/checkpoints", methods=["DELETE"])
|
|
2005
|
+
def clear_checkpoints(procedure_id: str):
|
|
2006
|
+
"""Clear all checkpoints for a procedure to force fresh execution."""
|
|
2007
|
+
try:
|
|
2008
|
+
from pathlib import Path as PathLib
|
|
2009
|
+
import os
|
|
2010
|
+
|
|
2011
|
+
# Build the checkpoint file path
|
|
2012
|
+
storage_dir = (
|
|
2013
|
+
PathLib(WORKSPACE_ROOT) / ".tac" / "storage"
|
|
2014
|
+
if WORKSPACE_ROOT
|
|
2015
|
+
else PathLib.home() / ".tactus" / "storage"
|
|
2016
|
+
)
|
|
2017
|
+
checkpoint_file = storage_dir / f"{procedure_id}.json"
|
|
2018
|
+
|
|
2019
|
+
if checkpoint_file.exists():
|
|
2020
|
+
os.remove(checkpoint_file)
|
|
2021
|
+
logger.info(f"Cleared checkpoints for procedure: {procedure_id}")
|
|
2022
|
+
return jsonify(
|
|
2023
|
+
{"success": True, "message": f"Checkpoints cleared for {procedure_id}"}
|
|
2024
|
+
)
|
|
2025
|
+
else:
|
|
2026
|
+
return jsonify({"success": True, "message": "No checkpoints found"}), 200
|
|
2027
|
+
|
|
2028
|
+
except Exception as e:
|
|
2029
|
+
logger.error(f"Error clearing checkpoints for {procedure_id}: {e}", exc_info=True)
|
|
2030
|
+
return jsonify({"error": str(e)}), 500
|
|
2031
|
+
|
|
1835
2032
|
@app.route("/api/traces/runs/<run_id>/statistics", methods=["GET"])
|
|
1836
2033
|
def get_run_statistics(run_id: str):
|
|
1837
2034
|
"""Get statistics for a run by filtering checkpoints by run_id."""
|
|
@@ -1940,6 +2137,17 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
1940
2137
|
raise
|
|
1941
2138
|
return coding_assistant
|
|
1942
2139
|
|
|
2140
|
+
def _clear_caches_impl():
|
|
2141
|
+
"""Clear cached runtime instances (e.g., after config changes)."""
|
|
2142
|
+
nonlocal coding_assistant
|
|
2143
|
+
if coding_assistant is not None:
|
|
2144
|
+
logger.info("Clearing coding assistant cache")
|
|
2145
|
+
coding_assistant = None
|
|
2146
|
+
|
|
2147
|
+
# Set the global cache clearing function
|
|
2148
|
+
global _clear_runtime_caches_fn
|
|
2149
|
+
_clear_runtime_caches_fn = _clear_caches_impl
|
|
2150
|
+
|
|
1943
2151
|
@app.route("/api/chat", methods=["POST"])
|
|
1944
2152
|
def chat_message():
|
|
1945
2153
|
"""Handle chat messages from the user."""
|
|
@@ -2147,22 +2355,138 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
|
|
|
2147
2355
|
|
|
2148
2356
|
# Register config API routes
|
|
2149
2357
|
try:
|
|
2150
|
-
import
|
|
2151
|
-
|
|
2152
|
-
# Add tactus-ide/backend to path for imports
|
|
2153
|
-
# Path from tactus/ide/server.py -> project root -> tactus-ide/backend
|
|
2154
|
-
backend_dir = Path(__file__).parent.parent.parent / "tactus-ide" / "backend"
|
|
2155
|
-
if backend_dir.exists():
|
|
2156
|
-
sys.path.insert(0, str(backend_dir))
|
|
2157
|
-
from config_server import register_config_routes
|
|
2358
|
+
from tactus.ide.config_server import register_config_routes
|
|
2158
2359
|
|
|
2159
|
-
|
|
2160
|
-
else:
|
|
2161
|
-
logger.warning(f"Config server backend directory not found: {backend_dir}")
|
|
2360
|
+
register_config_routes(app)
|
|
2162
2361
|
except ImportError as e:
|
|
2163
2362
|
logger.warning(f"Could not register config routes: {e}")
|
|
2164
2363
|
|
|
2165
2364
|
# Serve frontend if dist directory is provided
|
|
2365
|
+
# =========================================================================
|
|
2366
|
+
# HITL (Human-in-the-Loop) Control Channel Endpoints
|
|
2367
|
+
# =========================================================================
|
|
2368
|
+
|
|
2369
|
+
# Global SSE channel instance (shared across requests)
|
|
2370
|
+
_sse_channel = None
|
|
2371
|
+
# Pending HITL requests (for container control handler)
|
|
2372
|
+
_pending_hitl_requests: dict[str, dict] = {}
|
|
2373
|
+
|
|
2374
|
+
def get_sse_channel():
|
|
2375
|
+
"""Get or create the global SSE channel instance."""
|
|
2376
|
+
nonlocal _sse_channel
|
|
2377
|
+
if _sse_channel is None:
|
|
2378
|
+
from tactus.adapters.channels.sse import SSEControlChannel
|
|
2379
|
+
|
|
2380
|
+
_sse_channel = SSEControlChannel()
|
|
2381
|
+
return _sse_channel
|
|
2382
|
+
|
|
2383
|
+
@app.route("/api/hitl/response/<request_id>", methods=["POST"])
|
|
2384
|
+
def hitl_response(request_id: str):
|
|
2385
|
+
"""
|
|
2386
|
+
Handle HITL response from IDE.
|
|
2387
|
+
|
|
2388
|
+
Called when user responds to a HITL request in the IDE UI.
|
|
2389
|
+
Pushes response to SSEControlChannel which forwards to control loop.
|
|
2390
|
+
|
|
2391
|
+
Request body:
|
|
2392
|
+
- value: The response value (boolean, string, dict, etc.)
|
|
2393
|
+
"""
|
|
2394
|
+
try:
|
|
2395
|
+
data = request.json or {}
|
|
2396
|
+
value = data.get("value")
|
|
2397
|
+
|
|
2398
|
+
logger.info(f"Received HITL response for {request_id}: {value}")
|
|
2399
|
+
|
|
2400
|
+
# Check if this is a container HITL request (pending in our dict)
|
|
2401
|
+
if request_id in _pending_hitl_requests:
|
|
2402
|
+
pending = _pending_hitl_requests[request_id]
|
|
2403
|
+
pending["response"]["value"] = value
|
|
2404
|
+
pending["response"]["timed_out"] = False
|
|
2405
|
+
pending["response"]["channel_id"] = "sse"
|
|
2406
|
+
pending["event"].set() # Signal the waiting thread
|
|
2407
|
+
logger.info(f"[HITL] Signaled container handler for {request_id}")
|
|
2408
|
+
else:
|
|
2409
|
+
# Push to SSE channel's response queue (for non-container HITL)
|
|
2410
|
+
channel = get_sse_channel()
|
|
2411
|
+
channel.handle_ide_response(request_id, value)
|
|
2412
|
+
|
|
2413
|
+
return jsonify({"status": "ok", "request_id": request_id})
|
|
2414
|
+
|
|
2415
|
+
except Exception as e:
|
|
2416
|
+
logger.exception(f"Error handling HITL response for {request_id}")
|
|
2417
|
+
return jsonify({"status": "error", "message": str(e)}), 400
|
|
2418
|
+
|
|
2419
|
+
@app.route("/api/hitl/stream", methods=["GET"])
|
|
2420
|
+
def hitl_stream():
|
|
2421
|
+
"""
|
|
2422
|
+
SSE stream for HITL requests.
|
|
2423
|
+
|
|
2424
|
+
Clients connect to this endpoint to receive hitl.request events
|
|
2425
|
+
in real-time. Events include:
|
|
2426
|
+
- hitl.request: New HITL request with full context
|
|
2427
|
+
- hitl.cancel: Request cancelled (another channel responded)
|
|
2428
|
+
"""
|
|
2429
|
+
logger.info("[HITL-SSE] Client connected to /api/hitl/stream")
|
|
2430
|
+
|
|
2431
|
+
def generate():
|
|
2432
|
+
"""Generator that yields SSE events from the channel."""
|
|
2433
|
+
import asyncio
|
|
2434
|
+
import json
|
|
2435
|
+
|
|
2436
|
+
channel = get_sse_channel()
|
|
2437
|
+
|
|
2438
|
+
# Create event loop for this thread
|
|
2439
|
+
loop = asyncio.new_event_loop()
|
|
2440
|
+
asyncio.set_event_loop(loop)
|
|
2441
|
+
|
|
2442
|
+
try:
|
|
2443
|
+
# Send initial connection event
|
|
2444
|
+
connection_event = {
|
|
2445
|
+
"type": "connection",
|
|
2446
|
+
"status": "connected",
|
|
2447
|
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
2448
|
+
}
|
|
2449
|
+
logger.info("[HITL-SSE] Sending connection event to client")
|
|
2450
|
+
yield f"data: {json.dumps(connection_event)}\n\n"
|
|
2451
|
+
|
|
2452
|
+
# Stream events from channel
|
|
2453
|
+
while True:
|
|
2454
|
+
# Get next event from channel (non-blocking with timeout)
|
|
2455
|
+
event = loop.run_until_complete(channel.get_next_event())
|
|
2456
|
+
|
|
2457
|
+
if event:
|
|
2458
|
+
logger.info(
|
|
2459
|
+
f"[HITL-SSE] Sending event to client: {event.get('type', 'unknown')}"
|
|
2460
|
+
)
|
|
2461
|
+
yield f"data: {json.dumps(event)}\n\n"
|
|
2462
|
+
else:
|
|
2463
|
+
# Send keepalive comment every second if no events
|
|
2464
|
+
yield ": keepalive\n\n"
|
|
2465
|
+
import time
|
|
2466
|
+
|
|
2467
|
+
time.sleep(1)
|
|
2468
|
+
|
|
2469
|
+
except GeneratorExit:
|
|
2470
|
+
logger.info("HITL SSE client disconnected")
|
|
2471
|
+
except Exception as e:
|
|
2472
|
+
logger.error(f"Error in HITL SSE stream: {e}", exc_info=True)
|
|
2473
|
+
finally:
|
|
2474
|
+
loop.close()
|
|
2475
|
+
|
|
2476
|
+
return Response(
|
|
2477
|
+
stream_with_context(generate()),
|
|
2478
|
+
mimetype="text/event-stream",
|
|
2479
|
+
headers={
|
|
2480
|
+
"Cache-Control": "no-cache",
|
|
2481
|
+
"X-Accel-Buffering": "no",
|
|
2482
|
+
"Connection": "keep-alive",
|
|
2483
|
+
},
|
|
2484
|
+
)
|
|
2485
|
+
|
|
2486
|
+
# =========================================================================
|
|
2487
|
+
# Frontend Serving (if enabled)
|
|
2488
|
+
# =========================================================================
|
|
2489
|
+
|
|
2166
2490
|
if frontend_dist_dir:
|
|
2167
2491
|
|
|
2168
2492
|
@app.route("/")
|