tactus 0.34.1__py3-none-any.whl → 0.35.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. tactus/__init__.py +1 -1
  2. tactus/adapters/broker_log.py +17 -14
  3. tactus/adapters/channels/__init__.py +17 -15
  4. tactus/adapters/channels/base.py +16 -7
  5. tactus/adapters/channels/broker.py +43 -13
  6. tactus/adapters/channels/cli.py +19 -15
  7. tactus/adapters/channels/host.py +40 -25
  8. tactus/adapters/channels/ipc.py +82 -31
  9. tactus/adapters/channels/sse.py +41 -23
  10. tactus/adapters/cli_hitl.py +19 -19
  11. tactus/adapters/cli_log.py +4 -4
  12. tactus/adapters/control_loop.py +138 -99
  13. tactus/adapters/cost_collector_log.py +9 -9
  14. tactus/adapters/file_storage.py +56 -52
  15. tactus/adapters/http_callback_log.py +23 -13
  16. tactus/adapters/ide_log.py +17 -9
  17. tactus/adapters/lua_tools.py +4 -5
  18. tactus/adapters/mcp.py +16 -19
  19. tactus/adapters/mcp_manager.py +46 -30
  20. tactus/adapters/memory.py +9 -9
  21. tactus/adapters/plugins.py +42 -42
  22. tactus/broker/client.py +75 -78
  23. tactus/broker/protocol.py +57 -57
  24. tactus/broker/server.py +252 -197
  25. tactus/cli/app.py +3 -1
  26. tactus/cli/control.py +2 -2
  27. tactus/core/config_manager.py +181 -135
  28. tactus/core/dependencies/registry.py +66 -48
  29. tactus/core/dsl_stubs.py +222 -163
  30. tactus/core/exceptions.py +10 -1
  31. tactus/core/execution_context.py +152 -112
  32. tactus/core/lua_sandbox.py +72 -64
  33. tactus/core/message_history_manager.py +138 -43
  34. tactus/core/mocking.py +41 -27
  35. tactus/core/output_validator.py +49 -44
  36. tactus/core/registry.py +94 -80
  37. tactus/core/runtime.py +211 -176
  38. tactus/core/template_resolver.py +16 -16
  39. tactus/core/yaml_parser.py +55 -45
  40. tactus/docs/extractor.py +7 -6
  41. tactus/ide/server.py +119 -78
  42. tactus/primitives/control.py +10 -6
  43. tactus/primitives/file.py +48 -46
  44. tactus/primitives/handles.py +47 -35
  45. tactus/primitives/host.py +29 -27
  46. tactus/primitives/human.py +154 -137
  47. tactus/primitives/json.py +22 -23
  48. tactus/primitives/log.py +26 -26
  49. tactus/primitives/message_history.py +285 -31
  50. tactus/primitives/model.py +15 -9
  51. tactus/primitives/procedure.py +86 -64
  52. tactus/primitives/procedure_callable.py +58 -51
  53. tactus/primitives/retry.py +31 -29
  54. tactus/primitives/session.py +42 -29
  55. tactus/primitives/state.py +54 -43
  56. tactus/primitives/step.py +9 -13
  57. tactus/primitives/system.py +34 -21
  58. tactus/primitives/tool.py +44 -31
  59. tactus/primitives/tool_handle.py +76 -54
  60. tactus/primitives/toolset.py +25 -22
  61. tactus/sandbox/config.py +4 -4
  62. tactus/sandbox/container_runner.py +161 -107
  63. tactus/sandbox/docker_manager.py +20 -20
  64. tactus/sandbox/entrypoint.py +16 -14
  65. tactus/sandbox/protocol.py +15 -15
  66. tactus/stdlib/classify/llm.py +1 -3
  67. tactus/stdlib/core/validation.py +0 -3
  68. tactus/testing/pydantic_eval_runner.py +1 -1
  69. tactus/utils/asyncio_helpers.py +27 -0
  70. tactus/utils/cost_calculator.py +7 -7
  71. tactus/utils/model_pricing.py +11 -12
  72. tactus/utils/safe_file_library.py +156 -132
  73. tactus/utils/safe_libraries.py +27 -27
  74. tactus/validation/error_listener.py +18 -5
  75. tactus/validation/semantic_visitor.py +392 -333
  76. tactus/validation/validator.py +89 -49
  77. {tactus-0.34.1.dist-info → tactus-0.35.1.dist-info}/METADATA +15 -3
  78. {tactus-0.34.1.dist-info → tactus-0.35.1.dist-info}/RECORD +81 -80
  79. {tactus-0.34.1.dist-info → tactus-0.35.1.dist-info}/WHEEL +0 -0
  80. {tactus-0.34.1.dist-info → tactus-0.35.1.dist-info}/entry_points.txt +0 -0
  81. {tactus-0.34.1.dist-info → tactus-0.35.1.dist-info}/licenses/LICENSE +0 -0
tactus/ide/server.py CHANGED
@@ -15,7 +15,7 @@ from datetime import datetime
15
15
  from pathlib import Path
16
16
  from flask import Flask, request, jsonify, Response, stream_with_context
17
17
  from flask_cors import CORS
18
- from typing import Dict, Any, List, Optional
18
+ from typing import Any, Optional
19
19
 
20
20
  from tactus.validation.validator import TactusValidator, ValidationMode
21
21
  from tactus.core.registry import ValidationMessage
@@ -42,10 +42,10 @@ class TactusLSPHandler:
42
42
 
43
43
  def __init__(self):
44
44
  self.validator = TactusValidator()
45
- self.documents: Dict[str, str] = {}
46
- self.registries: Dict[str, Any] = {}
45
+ self.documents: dict[str, str] = {}
46
+ self.registries: dict[str, Any] = {}
47
47
 
48
- def validate_document(self, uri: str, text: str) -> List[Dict[str, Any]]:
48
+ def validate_document(self, uri: str, text: str) -> list[dict[str, Any]]:
49
49
  """Validate document and return LSP diagnostics."""
50
50
  self.documents[uri] = text
51
51
 
@@ -55,7 +55,7 @@ class TactusLSPHandler:
55
55
  if result.registry:
56
56
  self.registries[uri] = result.registry
57
57
 
58
- diagnostics = []
58
+ diagnostics: list[dict[str, Any]] = []
59
59
  for error in result.errors:
60
60
  diagnostic = self._convert_to_diagnostic(error, "Error")
61
61
  if diagnostic:
@@ -68,12 +68,12 @@ class TactusLSPHandler:
68
68
 
69
69
  return diagnostics
70
70
  except Exception as e:
71
- logger.error(f"Error validating document {uri}: {e}", exc_info=True)
71
+ logger.error("Error validating document %s: %s", uri, e, exc_info=True)
72
72
  return []
73
73
 
74
74
  def _convert_to_diagnostic(
75
75
  self, message: ValidationMessage, severity_str: str
76
- ) -> Optional[Dict[str, Any]]:
76
+ ) -> Optional[dict[str, Any]]:
77
77
  """Convert ValidationMessage to LSP diagnostic."""
78
78
  severity = 1 if severity_str == "Error" else 2
79
79
 
@@ -104,7 +104,7 @@ class LSPServer:
104
104
  self.initialized = False
105
105
  self.client_capabilities = {}
106
106
 
107
- def handle_message(self, message: Dict[str, Any]) -> Optional[Dict[str, Any]]:
107
+ def handle_message(self, message: dict[str, Any]) -> Optional[dict[str, Any]]:
108
108
  """Handle LSP JSON-RPC message."""
109
109
  method = message.get("method")
110
110
  params = message.get("params", {})
@@ -114,16 +114,16 @@ class LSPServer:
114
114
  if method == "initialize":
115
115
  result = self._handle_initialize(params)
116
116
  else:
117
- logger.warning(f"Unhandled LSP method: {method}")
117
+ logger.warning("Unhandled LSP method: %s", method)
118
118
  return self._error_response(msg_id, -32601, f"Method not found: {method}")
119
119
 
120
120
  if msg_id is not None:
121
121
  return {"jsonrpc": "2.0", "id": msg_id, "result": result}
122
122
  except Exception as e:
123
- logger.error(f"Error handling {method}: {e}", exc_info=True)
123
+ logger.error("Error handling %s: %s", method, e, exc_info=True)
124
124
  return self._error_response(msg_id, -32603, str(e))
125
125
 
126
- def _handle_initialize(self, params: Dict[str, Any]) -> Dict[str, Any]:
126
+ def _handle_initialize(self, params: dict[str, Any]) -> dict[str, Any]:
127
127
  """Handle initialize request."""
128
128
  self.client_capabilities = params.get("capabilities", {})
129
129
  self.initialized = True
@@ -139,7 +139,7 @@ class LSPServer:
139
139
  "serverInfo": {"name": "tactus-lsp-server", "version": "0.1.0"},
140
140
  }
141
141
 
142
- def _error_response(self, msg_id: Optional[int], code: int, message: str) -> Dict[str, Any]:
142
+ def _error_response(self, msg_id: Optional[int], code: int, message: str) -> dict[str, Any]:
143
143
  """Create LSP error response."""
144
144
  return {"jsonrpc": "2.0", "id": msg_id, "error": {"code": code, "message": message}}
145
145
 
@@ -252,11 +252,11 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
252
252
  WORKSPACE_ROOT = str(root_path)
253
253
  os.chdir(WORKSPACE_ROOT)
254
254
 
255
- logger.info(f"Workspace set to: {WORKSPACE_ROOT}")
255
+ logger.info("Workspace set to: %s", WORKSPACE_ROOT)
256
256
 
257
257
  return jsonify({"success": True, "root": WORKSPACE_ROOT, "name": root_path.name})
258
258
  except Exception as e:
259
- logger.error(f"Error setting workspace {root}: {e}")
259
+ logger.error("Error setting workspace %s: %s", root, e)
260
260
  return jsonify({"error": str(e)}), 500
261
261
 
262
262
  @app.route("/api/tree", methods=["GET"])
@@ -299,7 +299,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
299
299
  except ValueError as e:
300
300
  return jsonify({"error": str(e)}), 400
301
301
  except Exception as e:
302
- logger.error(f"Error listing directory {relative_path}: {e}")
302
+ logger.error("Error listing directory %s: %s", relative_path, e)
303
303
  return jsonify({"error": str(e)}), 500
304
304
 
305
305
  @app.route("/api/file", methods=["GET", "POST"])
@@ -331,7 +331,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
331
331
  except ValueError as e:
332
332
  return jsonify({"error": str(e)}), 400
333
333
  except Exception as e:
334
- logger.error(f"Error reading file {file_path}: {e}")
334
+ logger.error("Error reading file %s: %s", file_path, e)
335
335
  return jsonify({"error": str(e)}), 500
336
336
 
337
337
  elif request.method == "POST":
@@ -353,7 +353,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
353
353
  except ValueError as e:
354
354
  return jsonify({"error": str(e)}), 400
355
355
  except Exception as e:
356
- logger.error(f"Error writing file {file_path}: {e}")
356
+ logger.error("Error writing file %s: %s", file_path, e)
357
357
  return jsonify({"error": str(e)}), 500
358
358
 
359
359
  @app.route("/api/procedure/metadata", methods=["GET"])
@@ -496,7 +496,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
496
496
  return jsonify({"success": True, "metadata": metadata})
497
497
 
498
498
  except Exception as e:
499
- logger.error(f"Error extracting procedure metadata: {e}", exc_info=True)
499
+ logger.error("Error extracting procedure metadata: %s", e, exc_info=True)
500
500
  return jsonify({"error": str(e)}), 500
501
501
 
502
502
  @app.route("/api/validate", methods=["POST"])
@@ -536,7 +536,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
536
536
  }
537
537
  )
538
538
  except Exception as e:
539
- logger.error(f"Error validating code: {e}")
539
+ logger.error("Error validating code: %s", e)
540
540
  return jsonify({"error": str(e)}), 500
541
541
 
542
542
  @app.route("/api/validate/stream", methods=["GET"])
@@ -593,7 +593,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
593
593
  yield f"data: {json.dumps(validation_event)}\n\n"
594
594
 
595
595
  except Exception as e:
596
- logger.error(f"Error in validation: {e}", exc_info=True)
596
+ logger.error("Error in validation: %s", e, exc_info=True)
597
597
  error_event = {
598
598
  "event_type": "execution",
599
599
  "lifecycle_stage": "error",
@@ -615,7 +615,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
615
615
  except ValueError as e:
616
616
  return jsonify({"error": str(e)}), 400
617
617
  except Exception as e:
618
- logger.error(f"Error setting up validation: {e}", exc_info=True)
618
+ logger.error("Error setting up validation: %s", e, exc_info=True)
619
619
  return jsonify({"error": str(e)}), 500
620
620
 
621
621
  @app.route("/api/run", methods=["POST"])
@@ -663,7 +663,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
663
663
  except ValueError as e:
664
664
  return jsonify({"error": str(e)}), 400
665
665
  except Exception as e:
666
- logger.error(f"Error running procedure {file_path}: {e}")
666
+ logger.error("Error running procedure %s: %s", file_path, e)
667
667
  return jsonify({"error": str(e)}), 500
668
668
 
669
669
  @app.route("/api/run/stream", methods=["GET", "POST"])
@@ -822,7 +822,9 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
822
822
  logger.info("[SANDBOX] Docker available, using container execution")
823
823
  else:
824
824
  logger.info(
825
- f"[SANDBOX] Direct execution (Docker: {docker_available}, reason: {docker_reason})"
825
+ "[SANDBOX] Direct execution (Docker: %s, reason: %s)",
826
+ docker_available,
827
+ docker_reason,
826
828
  )
827
829
 
828
830
  # Create event queue for sandbox event streaming (if using sandbox)
@@ -868,8 +870,9 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
868
870
  # Parse the request
869
871
  request = ControlRequest.model_validate(request_data)
870
872
  logger.info(
871
- f"[HITL] Container control request {request.request_id} "
872
- f"for procedure {request.procedure_id}"
873
+ "[HITL] Container control request %s for procedure %s",
874
+ request.request_id,
875
+ request.procedure_id,
873
876
  )
874
877
 
875
878
  # Get SSE channel
@@ -894,28 +897,31 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
894
897
  )
895
898
 
896
899
  logger.info(
897
- f"[HITL] Request {request.request_id} delivered to IDE, waiting for response..."
900
+ "[HITL] Request %s delivered to IDE, waiting for response...",
901
+ request.request_id,
898
902
  )
899
903
 
900
904
  # Wait for response (with timeout) - run blocking wait in thread pool
901
905
  timeout_seconds = request.timeout_seconds or 300 # 5 min default
902
906
  logger.info(
903
- f"[HITL] Starting wait for response (timeout={timeout_seconds}s)..."
907
+ "[HITL] Starting wait for response (timeout=%ss)...",
908
+ timeout_seconds,
904
909
  )
905
910
  result = await asyncio.to_thread(
906
911
  response_event.wait, timeout=timeout_seconds
907
912
  )
908
- logger.info(f"[HITL] Wait completed, result={result}")
913
+ logger.info("[HITL] Wait completed, result=%s", result)
909
914
 
910
915
  if result:
911
916
  logger.info(
912
- f"[HITL] Received response for {request.request_id}: "
913
- f"{response_data.get('value')}"
917
+ "[HITL] Received response for %s: %s",
918
+ request.request_id,
919
+ response_data.get("value"),
914
920
  )
915
921
  return response_data
916
922
  else:
917
923
  # Timeout
918
- logger.warning(f"[HITL] Timeout for {request.request_id}")
924
+ logger.warning("[HITL] Timeout for %s", request.request_id)
919
925
  return {
920
926
  "value": request.default_value,
921
927
  "timed_out": True,
@@ -957,8 +963,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
957
963
  )
958
964
 
959
965
  # Mark container as ready after first response
960
- if not result_container["container_ready"]:
961
- result_container["container_ready"] = True
966
+ result_container["container_ready"] = True
962
967
 
963
968
  # Extract result from ExecutionResult
964
969
  if exec_result.status.value == "success":
@@ -1034,8 +1039,16 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1034
1039
  yield f"data: {json.dumps(event_dict)}\n\n"
1035
1040
  events_sent = True
1036
1041
  except Exception as e:
1037
- logger.error(f"Error serializing event: {e}", exc_info=True)
1038
- logger.error(f"Event type: {type(event)}, Event: {event}")
1042
+ logger.error(
1043
+ "Error serializing event: %s",
1044
+ e,
1045
+ exc_info=True,
1046
+ )
1047
+ logger.error(
1048
+ "Event type: %s, Event: %s",
1049
+ type(event),
1050
+ event,
1051
+ )
1039
1052
  except queue.Empty:
1040
1053
  pass
1041
1054
 
@@ -1098,8 +1111,16 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1098
1111
  all_events.append(event_dict)
1099
1112
  yield f"data: {json.dumps(event_dict)}\n\n"
1100
1113
  except Exception as e:
1101
- logger.error(f"Error serializing event: {e}", exc_info=True)
1102
- logger.error(f"Event type: {type(event)}, Event: {event}")
1114
+ logger.error(
1115
+ "Error serializing event: %s",
1116
+ e,
1117
+ exc_info=True,
1118
+ )
1119
+ logger.error(
1120
+ "Event type: %s, Event: %s",
1121
+ type(event),
1122
+ event,
1123
+ )
1103
1124
 
1104
1125
  # Wait for thread to finish
1105
1126
  exec_thread.join(timeout=1)
@@ -1152,10 +1173,15 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1152
1173
  with open(events_file, "w") as f:
1153
1174
  json.dump(consolidated_events, f, indent=2)
1154
1175
  except Exception as e:
1155
- logger.error(f"Failed to save events for run {run_id}: {e}", exc_info=True)
1176
+ logger.error(
1177
+ "Failed to save events for run %s: %s",
1178
+ run_id,
1179
+ e,
1180
+ exc_info=True,
1181
+ )
1156
1182
 
1157
1183
  except Exception as e:
1158
- logger.error(f"Error in streaming execution: {e}", exc_info=True)
1184
+ logger.error("Error in streaming execution: %s", e, exc_info=True)
1159
1185
  error_event = {
1160
1186
  "event_type": "execution",
1161
1187
  "lifecycle_stage": "error",
@@ -1178,7 +1204,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1178
1204
  except ValueError as e:
1179
1205
  return jsonify({"error": str(e)}), 400
1180
1206
  except Exception as e:
1181
- logger.error(f"Error setting up streaming execution: {e}", exc_info=True)
1207
+ logger.error("Error setting up streaming execution: %s", e, exc_info=True)
1182
1208
  return jsonify({"error": str(e)}), 500
1183
1209
 
1184
1210
  @app.route("/api/test/stream", methods=["GET"])
@@ -1273,7 +1299,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1273
1299
 
1274
1300
  setup_step_decorators()
1275
1301
  except Exception as e:
1276
- logger.warning(f"Could not reset Behave step registry: {e}")
1302
+ logger.warning("Could not reset Behave step registry: %s", e)
1277
1303
 
1278
1304
  # Setup test runner with mocks from registry
1279
1305
  mock_tools = None
@@ -1369,7 +1395,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1369
1395
  runner.cleanup()
1370
1396
 
1371
1397
  except Exception as e:
1372
- logger.error(f"Error in test execution: {e}", exc_info=True)
1398
+ logger.error("Error in test execution: %s", e, exc_info=True)
1373
1399
  error_event = {
1374
1400
  "event_type": "execution",
1375
1401
  "lifecycle_stage": "error",
@@ -1392,7 +1418,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1392
1418
  except ValueError as e:
1393
1419
  return jsonify({"error": str(e)}), 400
1394
1420
  except Exception as e:
1395
- logger.error(f"Error setting up test execution: {e}", exc_info=True)
1421
+ logger.error("Error setting up test execution: %s", e, exc_info=True)
1396
1422
  return jsonify({"error": str(e)}), 500
1397
1423
 
1398
1424
  @app.route("/api/evaluate/stream", methods=["GET"])
@@ -1529,7 +1555,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1529
1555
  evaluator.cleanup()
1530
1556
 
1531
1557
  except Exception as e:
1532
- logger.error(f"Error in evaluation execution: {e}", exc_info=True)
1558
+ logger.error("Error in evaluation execution: %s", e, exc_info=True)
1533
1559
  error_event = {
1534
1560
  "event_type": "execution",
1535
1561
  "lifecycle_stage": "error",
@@ -1552,7 +1578,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1552
1578
  except ValueError as e:
1553
1579
  return jsonify({"error": str(e)}), 400
1554
1580
  except Exception as e:
1555
- logger.error(f"Error setting up evaluation execution: {e}", exc_info=True)
1581
+ logger.error("Error setting up evaluation execution: %s", e, exc_info=True)
1556
1582
  return jsonify({"error": str(e)}), 500
1557
1583
 
1558
1584
  @app.route("/api/pydantic-eval/stream", methods=["GET"])
@@ -1564,7 +1590,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1564
1590
  - path: procedure file path (required)
1565
1591
  - runs: number of runs per case (optional, default 1)
1566
1592
  """
1567
- logger.info(f"Pydantic eval stream request: args={request.args}")
1593
+ logger.info("Pydantic eval stream request: args=%s", request.args)
1568
1594
 
1569
1595
  file_path = request.args.get("path")
1570
1596
  if not file_path:
@@ -1575,9 +1601,9 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1575
1601
 
1576
1602
  try:
1577
1603
  # Resolve path within workspace
1578
- logger.info(f"Resolving path: {file_path}")
1604
+ logger.info("Resolving path: %s", file_path)
1579
1605
  path = _resolve_workspace_path(file_path)
1580
- logger.info(f"Resolved to: {path}")
1606
+ logger.info("Resolved to: %s", path)
1581
1607
 
1582
1608
  if not path.exists():
1583
1609
  return jsonify({"error": f"File not found: {file_path}"}), 404
@@ -1736,7 +1762,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1736
1762
  }
1737
1763
  yield f"data: {json.dumps(error_event)}\n\n"
1738
1764
  except Exception as e:
1739
- logger.error(f"Error running Pydantic Evals: {e}", exc_info=True)
1765
+ logger.error("Error running Pydantic Evals: %s", e, exc_info=True)
1740
1766
  error_event = {
1741
1767
  "event_type": "execution",
1742
1768
  "lifecycle_stage": "error",
@@ -1757,7 +1783,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1757
1783
  )
1758
1784
 
1759
1785
  except Exception as e:
1760
- logger.error(f"Error setting up Pydantic Evals: {e}", exc_info=True)
1786
+ logger.error("Error setting up Pydantic Evals: %s", e, exc_info=True)
1761
1787
  return jsonify({"error": str(e)}), 500
1762
1788
 
1763
1789
  @app.route("/api/traces/runs", methods=["GET"])
@@ -1820,7 +1846,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1820
1846
 
1821
1847
  return jsonify({"runs": runs_data})
1822
1848
  except Exception as e:
1823
- logger.error(f"Error listing trace runs: {e}", exc_info=True)
1849
+ logger.error("Error listing trace runs: %s", e, exc_info=True)
1824
1850
  return jsonify({"error": str(e)}), 500
1825
1851
 
1826
1852
  @app.route("/api/traces/runs/<run_id>", methods=["GET"])
@@ -1887,7 +1913,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1887
1913
 
1888
1914
  return jsonify(run_dict)
1889
1915
  except Exception as e:
1890
- logger.error(f"Error getting trace run {run_id}: {e}", exc_info=True)
1916
+ logger.error("Error getting trace run %s: %s", run_id, e, exc_info=True)
1891
1917
  return jsonify({"error": str(e)}), 500
1892
1918
 
1893
1919
  @app.route("/api/traces/runs/<run_id>/checkpoints", methods=["GET"])
@@ -1941,7 +1967,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1941
1967
 
1942
1968
  return jsonify({"checkpoints": checkpoints_dict})
1943
1969
  except Exception as e:
1944
- logger.error(f"Error getting checkpoints for run {run_id}: {e}", exc_info=True)
1970
+ logger.error("Error getting checkpoints for run %s: %s", run_id, e, exc_info=True)
1945
1971
  return jsonify({"error": str(e)}), 500
1946
1972
 
1947
1973
  @app.route("/api/traces/runs/<run_id>/checkpoints/<int:position>", methods=["GET"])
@@ -1998,7 +2024,13 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
1998
2024
 
1999
2025
  return jsonify(cp_dict)
2000
2026
  except Exception as e:
2001
- logger.error(f"Error getting checkpoint {run_id}@{position}: {e}", exc_info=True)
2027
+ logger.error(
2028
+ "Error getting checkpoint %s@%s: %s",
2029
+ run_id,
2030
+ position,
2031
+ e,
2032
+ exc_info=True,
2033
+ )
2002
2034
  return jsonify({"error": str(e)}), 500
2003
2035
 
2004
2036
  @app.route("/api/procedures/<procedure_id>/checkpoints", methods=["DELETE"])
@@ -2018,7 +2050,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2018
2050
 
2019
2051
  if checkpoint_file.exists():
2020
2052
  os.remove(checkpoint_file)
2021
- logger.info(f"Cleared checkpoints for procedure: {procedure_id}")
2053
+ logger.info("Cleared checkpoints for procedure: %s", procedure_id)
2022
2054
  return jsonify(
2023
2055
  {"success": True, "message": f"Checkpoints cleared for {procedure_id}"}
2024
2056
  )
@@ -2026,7 +2058,12 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2026
2058
  return jsonify({"success": True, "message": "No checkpoints found"}), 200
2027
2059
 
2028
2060
  except Exception as e:
2029
- logger.error(f"Error clearing checkpoints for {procedure_id}: {e}", exc_info=True)
2061
+ logger.error(
2062
+ "Error clearing checkpoints for %s: %s",
2063
+ procedure_id,
2064
+ e,
2065
+ exc_info=True,
2066
+ )
2030
2067
  return jsonify({"error": str(e)}), 500
2031
2068
 
2032
2069
  @app.route("/api/traces/runs/<run_id>/statistics", methods=["GET"])
@@ -2076,7 +2113,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2076
2113
 
2077
2114
  return jsonify(stats)
2078
2115
  except Exception as e:
2079
- logger.error(f"Error getting statistics for {run_id}: {e}", exc_info=True)
2116
+ logger.error("Error getting statistics for %s: %s", run_id, e, exc_info=True)
2080
2117
  return jsonify({"error": str(e)}), 500
2081
2118
 
2082
2119
  @app.route("/api/traces/runs/<run_id>/events", methods=["GET"])
@@ -2103,7 +2140,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2103
2140
 
2104
2141
  return jsonify({"events": events})
2105
2142
  except Exception as e:
2106
- logger.error(f"Error getting events for {run_id}: {e}", exc_info=True)
2143
+ logger.error("Error getting events for %s: %s", run_id, e, exc_info=True)
2107
2144
  return jsonify({"error": str(e)}), 500
2108
2145
 
2109
2146
  # Coding Assistant - persistent agent instance per session
@@ -2133,7 +2170,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2133
2170
  coding_assistant = CodingAssistantAgent(WORKSPACE_ROOT, config)
2134
2171
  logger.info("Coding assistant initialized")
2135
2172
  except Exception as e:
2136
- logger.error(f"Failed to initialize coding assistant: {e}", exc_info=True)
2173
+ logger.error("Failed to initialize coding assistant: %s", e, exc_info=True)
2137
2174
  raise
2138
2175
  return coding_assistant
2139
2176
 
@@ -2176,7 +2213,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2176
2213
  )
2177
2214
 
2178
2215
  except Exception as e:
2179
- logger.error(f"Error handling chat message: {e}", exc_info=True)
2216
+ logger.error("Error handling chat message: %s", e, exc_info=True)
2180
2217
  return jsonify({"error": str(e)}), 500
2181
2218
 
2182
2219
  @app.route("/api/chat/stream", methods=["POST"])
@@ -2243,7 +2280,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2243
2280
  break
2244
2281
 
2245
2282
  except Exception as e:
2246
- logger.error(f"Error streaming message: {e}", exc_info=True)
2283
+ logger.error("Error streaming message: %s", e, exc_info=True)
2247
2284
  yield f"data: {json.dumps({'type': 'error', 'error': str(e)})}\n\n"
2248
2285
  finally:
2249
2286
  loop.close()
@@ -2259,7 +2296,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2259
2296
  )
2260
2297
 
2261
2298
  except Exception as e:
2262
- logger.error(f"Error in stream endpoint: {e}", exc_info=True)
2299
+ logger.error("Error in stream endpoint: %s", e, exc_info=True)
2263
2300
  return jsonify({"error": str(e)}), 500
2264
2301
 
2265
2302
  @app.route("/api/chat/reset", methods=["POST"])
@@ -2272,7 +2309,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2272
2309
  return jsonify({"success": True})
2273
2310
  return jsonify({"error": "Assistant not initialized"}), 400
2274
2311
  except Exception as e:
2275
- logger.error(f"Error resetting chat: {e}", exc_info=True)
2312
+ logger.error("Error resetting chat: %s", e, exc_info=True)
2276
2313
  return jsonify({"error": str(e)}), 500
2277
2314
 
2278
2315
  @app.route("/api/chat/tools", methods=["GET"])
@@ -2285,7 +2322,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2285
2322
  return jsonify({"tools": tools})
2286
2323
  return jsonify({"error": "Assistant not initialized"}), 400
2287
2324
  except Exception as e:
2288
- logger.error(f"Error getting tools: {e}", exc_info=True)
2325
+ logger.error("Error getting tools: %s", e, exc_info=True)
2289
2326
  return jsonify({"error": str(e)}), 500
2290
2327
 
2291
2328
  @app.route("/api/lsp", methods=["POST"])
@@ -2293,14 +2330,14 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2293
2330
  """Handle LSP requests via HTTP."""
2294
2331
  try:
2295
2332
  message = request.json
2296
- logger.debug(f"Received LSP message: {message.get('method')}")
2333
+ logger.debug("Received LSP message: %s", message.get("method"))
2297
2334
  response = lsp_server.handle_message(message)
2298
2335
 
2299
2336
  if response:
2300
2337
  return jsonify(response)
2301
2338
  return jsonify({"jsonrpc": "2.0", "id": message.get("id"), "result": None})
2302
2339
  except Exception as e:
2303
- logger.error(f"Error handling LSP message: {e}")
2340
+ logger.error("Error handling LSP message: %s", e)
2304
2341
  return (
2305
2342
  jsonify(
2306
2343
  {
@@ -2320,7 +2357,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2320
2357
  method = message.get("method")
2321
2358
  params = message.get("params", {})
2322
2359
 
2323
- logger.debug(f"Received LSP notification: {method}")
2360
+ logger.debug("Received LSP notification: %s", method)
2324
2361
 
2325
2362
  # Handle notifications that produce diagnostics
2326
2363
  diagnostics = []
@@ -2350,7 +2387,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2350
2387
 
2351
2388
  return jsonify({"status": "ok"})
2352
2389
  except Exception as e:
2353
- logger.error(f"Error handling LSP notification: {e}")
2390
+ logger.error("Error handling LSP notification: %s", e)
2354
2391
  return jsonify({"error": str(e)}), 500
2355
2392
 
2356
2393
  # Register config API routes
@@ -2359,7 +2396,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2359
2396
 
2360
2397
  register_config_routes(app)
2361
2398
  except ImportError as e:
2362
- logger.warning(f"Could not register config routes: {e}")
2399
+ logger.warning("Could not register config routes: %s", e)
2363
2400
 
2364
2401
  # Serve frontend if dist directory is provided
2365
2402
  # =========================================================================
@@ -2395,7 +2432,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2395
2432
  data = request.json or {}
2396
2433
  value = data.get("value")
2397
2434
 
2398
- logger.info(f"Received HITL response for {request_id}: {value}")
2435
+ logger.info("Received HITL response for %s: %s", request_id, value)
2399
2436
 
2400
2437
  # Check if this is a container HITL request (pending in our dict)
2401
2438
  if request_id in _pending_hitl_requests:
@@ -2403,8 +2440,11 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2403
2440
  pending["response"]["value"] = value
2404
2441
  pending["response"]["timed_out"] = False
2405
2442
  pending["response"]["channel_id"] = "sse"
2406
- pending["event"].set() # Signal the waiting thread
2407
- logger.info(f"[HITL] Signaled container handler for {request_id}")
2443
+ pending_event = pending.get("event")
2444
+ if pending_event is None:
2445
+ raise ValueError(f"Pending HITL request '{request_id}' missing event handle")
2446
+ pending_event.set() # Signal the waiting thread
2447
+ logger.info("[HITL] Signaled container handler for %s", request_id)
2408
2448
  else:
2409
2449
  # Push to SSE channel's response queue (for non-container HITL)
2410
2450
  channel = get_sse_channel()
@@ -2412,9 +2452,9 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2412
2452
 
2413
2453
  return jsonify({"status": "ok", "request_id": request_id})
2414
2454
 
2415
- except Exception as e:
2416
- logger.exception(f"Error handling HITL response for {request_id}")
2417
- return jsonify({"status": "error", "message": str(e)}), 400
2455
+ except Exception as exc:
2456
+ logger.exception("Error handling HITL response for %s", request_id)
2457
+ return jsonify({"status": "error", "message": str(exc)}), 400
2418
2458
 
2419
2459
  @app.route("/api/hitl/stream", methods=["GET"])
2420
2460
  def hitl_stream():
@@ -2456,7 +2496,8 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2456
2496
 
2457
2497
  if event:
2458
2498
  logger.info(
2459
- f"[HITL-SSE] Sending event to client: {event.get('type', 'unknown')}"
2499
+ "[HITL-SSE] Sending event to client: %s",
2500
+ event.get("type", "unknown"),
2460
2501
  )
2461
2502
  yield f"data: {json.dumps(event)}\n\n"
2462
2503
  else:
@@ -2469,7 +2510,7 @@ def create_app(initial_workspace: Optional[str] = None, frontend_dist_dir: Optio
2469
2510
  except GeneratorExit:
2470
2511
  logger.info("HITL SSE client disconnected")
2471
2512
  except Exception as e:
2472
- logger.error(f"Error in HITL SSE stream: {e}", exc_info=True)
2513
+ logger.error("Error in HITL SSE stream: %s", e, exc_info=True)
2473
2514
  finally:
2474
2515
  loop.close()
2475
2516
 
@@ -2535,7 +2576,7 @@ def main() -> None:
2535
2576
  # Get initial workspace from environment or use current directory
2536
2577
  initial_workspace = os.environ.get("TACTUS_IDE_WORKSPACE")
2537
2578
  if initial_workspace:
2538
- logger.info(f"Setting initial workspace to: {initial_workspace}")
2579
+ logger.info("Setting initial workspace to: %s", initial_workspace)
2539
2580
 
2540
2581
  app = create_app(initial_workspace=initial_workspace)
2541
2582
  # NOTE: We intentionally disable Flask's reloader here; external watchers (e.g. watchdog)
@@ -55,10 +55,14 @@ class IterationsPrimitive:
55
55
  return {success = false, reason = "Max iterations exceeded"}
56
56
  end
57
57
  """
58
- exceeded = self._current_iteration >= max_iterations
59
- if exceeded:
60
- logger.warning(f"Iterations exceeded: {self._current_iteration} >= {max_iterations}")
61
- return exceeded
58
+ has_exceeded_limit = self._current_iteration >= max_iterations
59
+ if has_exceeded_limit:
60
+ logger.warning(
61
+ "Iterations exceeded: %s >= %s",
62
+ self._current_iteration,
63
+ max_iterations,
64
+ )
65
+ return has_exceeded_limit
62
66
 
63
67
  def increment(self) -> int:
64
68
  """
@@ -70,7 +74,7 @@ class IterationsPrimitive:
70
74
  Note: This is called internally by the runtime, not from Lua
71
75
  """
72
76
  self._current_iteration += 1
73
- logger.debug(f"Iteration incremented to {self._current_iteration}")
77
+ logger.debug("Iteration incremented to %s", self._current_iteration)
74
78
  return self._current_iteration
75
79
 
76
80
  def reset(self) -> None:
@@ -155,7 +159,7 @@ class StopPrimitive:
155
159
  self._success = success
156
160
 
157
161
  log_level = logging.INFO if success else logging.WARNING
158
- logger.log(log_level, f"Stop requested: {reason} (success={success})")
162
+ logger.log(log_level, "Stop requested: %s (success=%s)", reason, success)
159
163
 
160
164
  def reset(self) -> None:
161
165
  """Reset stop state (mainly for testing)."""