tactus 0.36.0__py3-none-any.whl → 0.38.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. tactus/__init__.py +1 -1
  2. tactus/adapters/channels/base.py +22 -2
  3. tactus/adapters/channels/broker.py +1 -0
  4. tactus/adapters/channels/host.py +3 -1
  5. tactus/adapters/channels/ipc.py +18 -3
  6. tactus/adapters/channels/sse.py +2 -0
  7. tactus/adapters/mcp_manager.py +24 -7
  8. tactus/backends/http_backend.py +2 -2
  9. tactus/backends/pytorch_backend.py +2 -2
  10. tactus/broker/client.py +3 -3
  11. tactus/broker/server.py +17 -5
  12. tactus/cli/app.py +212 -57
  13. tactus/core/compaction.py +17 -0
  14. tactus/core/context_assembler.py +73 -0
  15. tactus/core/context_models.py +41 -0
  16. tactus/core/dsl_stubs.py +560 -20
  17. tactus/core/exceptions.py +8 -0
  18. tactus/core/execution_context.py +24 -24
  19. tactus/core/message_history_manager.py +2 -2
  20. tactus/core/mocking.py +12 -0
  21. tactus/core/output_validator.py +6 -6
  22. tactus/core/registry.py +171 -29
  23. tactus/core/retrieval.py +317 -0
  24. tactus/core/retriever_tasks.py +30 -0
  25. tactus/core/runtime.py +431 -117
  26. tactus/dspy/agent.py +143 -82
  27. tactus/dspy/broker_lm.py +13 -7
  28. tactus/dspy/config.py +23 -4
  29. tactus/dspy/module.py +12 -1
  30. tactus/ide/coding_assistant.py +2 -2
  31. tactus/primitives/handles.py +79 -7
  32. tactus/primitives/model.py +1 -1
  33. tactus/primitives/procedure.py +1 -1
  34. tactus/primitives/state.py +2 -2
  35. tactus/sandbox/config.py +1 -1
  36. tactus/sandbox/container_runner.py +13 -6
  37. tactus/sandbox/entrypoint.py +51 -8
  38. tactus/sandbox/protocol.py +5 -0
  39. tactus/stdlib/README.md +10 -1
  40. tactus/stdlib/biblicus/__init__.py +3 -0
  41. tactus/stdlib/biblicus/text.py +189 -0
  42. tactus/stdlib/tac/biblicus/text.tac +32 -0
  43. tactus/stdlib/tac/tactus/biblicus.spec.tac +179 -0
  44. tactus/stdlib/tac/tactus/corpora/base.tac +42 -0
  45. tactus/stdlib/tac/tactus/corpora/filesystem.tac +5 -0
  46. tactus/stdlib/tac/tactus/retrievers/base.tac +37 -0
  47. tactus/stdlib/tac/tactus/retrievers/embedding_index_file.tac +6 -0
  48. tactus/stdlib/tac/tactus/retrievers/embedding_index_inmemory.tac +6 -0
  49. tactus/stdlib/tac/tactus/retrievers/index.md +137 -0
  50. tactus/stdlib/tac/tactus/retrievers/init.tac +11 -0
  51. tactus/stdlib/tac/tactus/retrievers/sqlite_full_text_search.tac +6 -0
  52. tactus/stdlib/tac/tactus/retrievers/tf_vector.tac +6 -0
  53. tactus/testing/behave_integration.py +2 -0
  54. tactus/testing/context.py +10 -6
  55. tactus/testing/evaluation_runner.py +5 -5
  56. tactus/testing/steps/builtin.py +2 -2
  57. tactus/testing/test_runner.py +6 -4
  58. tactus/utils/asyncio_helpers.py +2 -1
  59. tactus/validation/semantic_visitor.py +357 -6
  60. tactus/validation/validator.py +142 -2
  61. {tactus-0.36.0.dist-info → tactus-0.38.0.dist-info}/METADATA +9 -6
  62. {tactus-0.36.0.dist-info → tactus-0.38.0.dist-info}/RECORD +65 -47
  63. {tactus-0.36.0.dist-info → tactus-0.38.0.dist-info}/WHEEL +0 -0
  64. {tactus-0.36.0.dist-info → tactus-0.38.0.dist-info}/entry_points.txt +0 -0
  65. {tactus-0.36.0.dist-info → tactus-0.38.0.dist-info}/licenses/LICENSE +0 -0
tactus/dspy/agent.py CHANGED
@@ -64,6 +64,7 @@ class DSPyAgentHandle:
64
64
  log_handler: Any = None,
65
65
  disable_streaming: bool = False,
66
66
  execution_context: Any = None,
67
+ context_name: Optional[str] = None,
67
68
  **kwargs: Any,
68
69
  ):
69
70
  """
@@ -100,6 +101,7 @@ class DSPyAgentHandle:
100
101
  self.tools = tools or []
101
102
  self.toolsets = toolsets or []
102
103
  self.execution_context = execution_context
104
+ self.context_name = context_name
103
105
  self._dspy_tools_cache = None # Cache for converted DSPy tools
104
106
  # Default input schema: {message: string}
105
107
  self.input_schema = input_schema or {"message": {"type": "string", "required": False}}
@@ -514,37 +516,44 @@ class DSPyAgentHandle:
514
516
  True if streaming should be enabled
515
517
  """
516
518
  # CRITICAL DEBUG: Always log entry
517
- logger.info(f"[STREAMING] Agent '{self.name}': _should_stream() called")
519
+ # logger.info(f"[STREAMING] Agent '{self.name}': _should_stream() called")
518
520
 
519
521
  # Must have log_handler to emit streaming events
520
522
  if self.log_handler is None:
521
- logger.info(f"[STREAMING] Agent '{self.name}': no log_handler, streaming disabled")
523
+ # logger.info(
524
+ # f"[STREAMING] Agent '{self.name}': no log_handler, streaming disabled"
525
+ # )
522
526
  return False
523
527
 
524
528
  # Allow log handlers to opt out of streaming (e.g., cost-only collectors)
525
529
  supports_streaming = getattr(self.log_handler, "supports_streaming", True)
526
- logger.info(
527
- f"[STREAMING] Agent '{self.name}': log_handler.supports_streaming={supports_streaming}"
528
- )
530
+ # logger.info(
531
+ # f"[STREAMING] Agent '{self.name}': "
532
+ # f"log_handler.supports_streaming={supports_streaming}"
533
+ # )
529
534
  if not supports_streaming:
530
- logger.info(
531
- f"[STREAMING] Agent '{self.name}': log_handler supports_streaming=False, streaming disabled"
532
- )
535
+ # logger.info(
536
+ # f"[STREAMING] Agent '{self.name}': "
537
+ # "log_handler supports_streaming=False, streaming disabled"
538
+ # )
533
539
  return False
534
540
 
535
541
  # Respect explicit disable flag
536
- logger.info(f"[STREAMING] Agent '{self.name}': disable_streaming={self.disable_streaming}")
542
+ # logger.info(
543
+ # f"[STREAMING] Agent '{self.name}': disable_streaming={self.disable_streaming}"
544
+ # )
537
545
  if self.disable_streaming:
538
- logger.info(
539
- f"[STREAMING] Agent '{self.name}': disable_streaming=True, streaming disabled"
540
- )
546
+ # logger.info(
547
+ # f"[STREAMING] Agent '{self.name}': "
548
+ # "disable_streaming=True, streaming disabled"
549
+ # )
541
550
  return False
542
551
 
543
552
  # Note: We intentionally allow streaming even with output_schema.
544
553
  # Streaming (UI feedback) and validation (post-processing) are orthogonal.
545
554
  # Stream raw text to UI during generation, then validate after completion.
546
555
 
547
- logger.info(f"[STREAMING] Agent '{self.name}': streaming ENABLED")
556
+ # logger.info(f"[STREAMING] Agent '{self.name}': streaming ENABLED")
548
557
  return True
549
558
 
550
559
  def _emit_cost_event(self) -> None:
@@ -664,7 +673,7 @@ class DSPyAgentHandle:
664
673
  import queue
665
674
  from tactus.protocols.models import AgentTurnEvent, AgentStreamChunkEvent
666
675
 
667
- logger.info(f"[STREAMING] Agent '{self.name}' starting streaming turn")
676
+ # logger.info(f"[STREAMING] Agent '{self.name}' starting streaming turn")
668
677
 
669
678
  # Emit turn started event so the UI shows a loading indicator
670
679
  self.log_handler.log(
@@ -673,7 +682,7 @@ class DSPyAgentHandle:
673
682
  stage="started",
674
683
  )
675
684
  )
676
- logger.info(f"[STREAMING] Agent '{self.name}' emitted AgentTurnEvent(started)")
685
+ # logger.info(f"[STREAMING] Agent '{self.name}' emitted AgentTurnEvent(started)")
677
686
 
678
687
  # Queue for passing chunks from streaming thread to main thread
679
688
  chunk_queue = queue.Queue()
@@ -690,7 +699,9 @@ class DSPyAgentHandle:
690
699
  # NOTE: streamify() automatically enables streaming on the LM
691
700
  # We do NOT need to use settings.context(stream=True) - that actually breaks it!
692
701
  streaming_module = dspy_thread.streamify(self._module.module)
693
- logger.info(f"[STREAMING] Agent '{self.name}' created streaming module")
702
+ # logger.info(
703
+ # f"[STREAMING] Agent '{self.name}' created streaming module"
704
+ # )
694
705
 
695
706
  # Call the streaming module - it returns an async generator
696
707
  stream = streaming_module(**prompt_context)
@@ -698,41 +709,43 @@ class DSPyAgentHandle:
698
709
  chunk_count = 0
699
710
  async for value in stream:
700
711
  chunk_count += 1
701
- value_type = type(value).__name__
702
-
703
712
  # Check for final Prediction first
704
713
  if isinstance(value, dspy_thread.Prediction):
705
714
  # Final prediction - this is the result
706
- logger.info(
707
- f"[STREAMING] Agent '{self.name}' received final Prediction"
708
- )
715
+ # logger.info(
716
+ # f"[STREAMING] Agent '{self.name}' received final Prediction"
717
+ # )
709
718
  result_holder["result"] = value
710
719
  # Check for ModelResponseStream (the actual streaming chunks!)
711
720
  elif hasattr(value, "choices") and value.choices:
712
721
  delta = value.choices[0].delta
713
722
  if hasattr(delta, "content") and delta.content:
714
- logger.info(
715
- f"[STREAMING] Agent '{self.name}' chunk #{chunk_count}: '{delta.content}'"
716
- )
723
+ # logger.info(
724
+ # f"[STREAMING] Agent '{self.name}' "
725
+ # f"chunk #{chunk_count}: '{delta.content}'"
726
+ # )
717
727
  chunk_queue.put(("chunk", delta.content))
718
728
  # String chunks (shouldn't happen with DSPy but handle it anyway)
719
729
  elif isinstance(value, str):
720
- logger.info(
721
- f"[STREAMING] Agent '{self.name}' got STRING chunk, len={len(value)}"
722
- )
730
+ # logger.info(
731
+ # f"[STREAMING] Agent '{self.name}' "
732
+ # f"got STRING chunk, len={len(value)}"
733
+ # )
723
734
  if value:
724
735
  chunk_queue.put(("chunk", value))
725
736
  else:
726
- logger.warning(
727
- f"[STREAMING] Agent '{self.name}' got unexpected type: {value_type}"
728
- )
737
+ pass
729
738
 
730
- logger.info(
731
- f"[STREAMING] Agent '{self.name}' stream finished, processed {chunk_count} values"
732
- )
739
+ # logger.info(
740
+ # f"[STREAMING] Agent '{self.name}' "
741
+ # f"stream finished, processed {chunk_count} values"
742
+ # )
733
743
 
734
744
  except Exception as e:
735
- logger.error(f"[STREAMING] Agent '{self.name}' error: {e}", exc_info=True)
745
+ # logger.error(
746
+ # f"[STREAMING] Agent '{self.name}' error: {e}",
747
+ # exc_info=True,
748
+ # )
736
749
  result_holder["error"] = e
737
750
  finally:
738
751
  # Signal end of stream
@@ -748,7 +761,7 @@ class DSPyAgentHandle:
748
761
  # Consume chunks from the queue and emit events in the main thread
749
762
  accumulated_text = ""
750
763
  emitted_count = 0
751
- logger.info(f"[STREAMING] Agent '{self.name}' consuming chunks from queue")
764
+ # logger.info(f"[STREAMING] Agent '{self.name}' consuming chunks from queue")
752
765
 
753
766
  while True:
754
767
  try:
@@ -763,18 +776,23 @@ class DSPyAgentHandle:
763
776
  chunk_text=msg_data,
764
777
  accumulated_text=accumulated_text,
765
778
  )
766
- logger.info(
767
- f"[STREAMING] Agent '{self.name}' emitting chunk {emitted_count}, len={len(msg_data)}"
768
- )
779
+ # logger.info(
780
+ # f"[STREAMING] Agent '{self.name}' emitting chunk "
781
+ # f"{emitted_count}, len={len(msg_data)}"
782
+ # )
769
783
  self.log_handler.log(event)
770
784
  except queue.Empty:
771
- logger.warning(f"[STREAMING] Agent '{self.name}' timeout waiting for chunks")
785
+ # logger.warning(
786
+ # f"[STREAMING] Agent '{self.name}' timeout waiting for chunks"
787
+ # )
772
788
  break
773
789
 
774
790
  # Wait for thread to complete
775
791
  streaming_thread.join(timeout=5.0)
776
792
 
777
- logger.info(f"[STREAMING] Agent '{self.name}' finished, emitted {emitted_count} events")
793
+ # logger.info(
794
+ # f"[STREAMING] Agent '{self.name}' finished, emitted {emitted_count} events"
795
+ # )
778
796
 
779
797
  # Check for errors
780
798
  if result_holder["error"] is not None:
@@ -825,31 +843,22 @@ class DSPyAgentHandle:
825
843
  assistant_msg = {"role": "assistant", "content": result_holder["result"].response}
826
844
 
827
845
  # Include tool calls in the message if present (before wrapping)
828
- has_tc = hasattr(result_holder["result"], "tool_calls")
829
- tc_value = getattr(result_holder["result"], "tool_calls", None)
830
- logger.info(
831
- f"[ASYNC_STREAMING] Agent '{self.name}' result: has_tool_calls={has_tc}, tool_calls={tc_value}"
832
- )
833
846
  if (
834
847
  hasattr(result_holder["result"], "tool_calls")
835
848
  and result_holder["result"].tool_calls
836
849
  ):
837
850
  # Convert tool calls to JSON-serializable format
838
- logger.info("[ASYNC_STREAMING] Converting tool_calls to dict format")
851
+ # logger.info("[ASYNC_STREAMING] Converting tool_calls to dict format")
839
852
  tool_calls_list = []
840
- tc_obj = result_holder["result"].tool_calls
841
- has_tc_attr = hasattr(tc_obj, "tool_calls")
842
- logger.info(
843
- f"[ASYNC_STREAMING] tool_calls object: type={type(tc_obj)}, has_tool_calls_attr={has_tc_attr}"
844
- )
845
853
  for tc in (
846
854
  result_holder["result"].tool_calls.tool_calls
847
855
  if hasattr(result_holder["result"].tool_calls, "tool_calls")
848
856
  else []
849
857
  ):
850
- logger.info(
851
- f"[ASYNC_STREAMING] Processing tool call: name={tc.name} args={tc.args}"
852
- )
858
+ # logger.info(
859
+ # f"[ASYNC_STREAMING] Processing tool call: "
860
+ # f"name={tc.name} args={tc.args}"
861
+ # )
853
862
  tool_calls_list.append(
854
863
  {
855
864
  "id": f"call_{tc.name}", # Generate a simple ID
@@ -862,21 +871,23 @@ class DSPyAgentHandle:
862
871
  },
863
872
  }
864
873
  )
865
- logger.info(
866
- f"[ASYNC_STREAMING] Built tool_calls_list with {len(tool_calls_list)} items"
867
- )
874
+ # logger.info(
875
+ # f"[ASYNC_STREAMING] Built tool_calls_list with "
876
+ # f"{len(tool_calls_list)} items"
877
+ # )
868
878
  if tool_calls_list:
869
879
  assistant_msg["tool_calls"] = tool_calls_list
870
- logger.info("[ASYNC_STREAMING] Added tool_calls to assistant_msg")
880
+ # logger.info("[ASYNC_STREAMING] Added tool_calls to assistant_msg")
871
881
 
872
882
  new_messages.append(assistant_msg)
873
883
  self._history.add(assistant_msg)
874
884
 
875
885
  # Execute tool calls and add tool result messages to history
876
886
  if assistant_msg.get("tool_calls"):
877
- logger.info(
878
- f"[ASYNC_STREAMING] Agent '{self.name}' executing {len(assistant_msg['tool_calls'])} tool calls"
879
- )
887
+ # logger.info(
888
+ # f"[ASYNC_STREAMING] Agent '{self.name}' executing "
889
+ # f"{len(assistant_msg['tool_calls'])} tool calls"
890
+ # )
880
891
  for tc in assistant_msg["tool_calls"]:
881
892
  tool_name = tc["function"]["name"]
882
893
  tool_args_str = tc["function"]["arguments"]
@@ -887,13 +898,16 @@ class DSPyAgentHandle:
887
898
  )
888
899
  tool_id = tc["id"]
889
900
 
890
- logger.info(
891
- f"[ASYNC_STREAMING] Executing tool: {tool_name} with args: {tool_args}"
892
- )
901
+ # logger.info(
902
+ # f"[ASYNC_STREAMING] Executing tool: {tool_name} "
903
+ # f"with args: {tool_args}"
904
+ # )
893
905
 
894
906
  # Execute the tool using toolsets
895
907
  tool_result = self._execute_tool(tool_name, tool_args)
896
- logger.info(f"[ASYNC_STREAMING] Tool executed successfully: {tool_result}")
908
+ # logger.info(
909
+ # f"[ASYNC_STREAMING] Tool executed successfully: {tool_result}"
910
+ # )
897
911
 
898
912
  # Record the tool call so Lua can check if it was called
899
913
  tool_primitive = getattr(self, "_tool_primitive", None)
@@ -904,7 +918,9 @@ class DSPyAgentHandle:
904
918
  tool_primitive.record_call(
905
919
  clean_tool_name, tool_args, tool_result, agent_name=self.name
906
920
  )
907
- logger.info(f"[ASYNC_STREAMING] Recorded tool call: {clean_tool_name}")
921
+ # logger.info(
922
+ # f"[ASYNC_STREAMING] Recorded tool call: {clean_tool_name}"
923
+ # )
908
924
 
909
925
  # Add tool result to history in OpenAI's expected format
910
926
  # OpenAI requires: role="tool", tool_call_id=<id>, content=<result>
@@ -919,15 +935,19 @@ class DSPyAgentHandle:
919
935
  "name": tool_name,
920
936
  "content": tool_result_str,
921
937
  }
922
- logger.info(f"[ASYNC_STREAMING] Created tool result message: {tool_result_msg}")
938
+ # logger.info(
939
+ # f"[ASYNC_STREAMING] Created tool result message: {tool_result_msg}"
940
+ # )
923
941
  new_messages.append(tool_result_msg)
924
- logger.info(
925
- f"[ASYNC_STREAMING] Added tool result to new_messages, count={len(new_messages)}"
926
- )
942
+ # logger.info(
943
+ # f"[ASYNC_STREAMING] Added tool result to new_messages, "
944
+ # f"count={len(new_messages)}"
945
+ # )
927
946
  self._history.add(tool_result_msg)
928
- logger.info(
929
- f"[ASYNC_STREAMING] Added tool result to history for tool_call_id={tool_id}, history size={len(self._history)}"
930
- )
947
+ # logger.info(
948
+ # f"[ASYNC_STREAMING] Added tool result to history for "
949
+ # f"tool_call_id={tool_id}, history size={len(self._history)}"
950
+ # )
931
951
 
932
952
  # Wrap the result with message tracking
933
953
  wrapped_result = wrap_prediction(
@@ -960,7 +980,9 @@ class DSPyAgentHandle:
960
980
  stage="completed",
961
981
  )
962
982
  )
963
- logger.info(f"[STREAMING] Agent '{self.name}' emitted AgentTurnEvent(completed)")
983
+ # logger.info(
984
+ # f"[STREAMING] Agent '{self.name}' emitted AgentTurnEvent(completed)"
985
+ # )
964
986
 
965
987
  # Extract usage and cost stats
966
988
  usage_stats, cost_stats = self._extract_last_call_stats()
@@ -1223,14 +1245,49 @@ class DSPyAgentHandle:
1223
1245
  if self._turn_count == 1 and not user_message and self.initial_message:
1224
1246
  user_message = self.initial_message
1225
1247
 
1226
- context = opts.get("context")
1248
+ context = opts.get("context") or {}
1249
+
1250
+ if self.context_name:
1251
+ if not self.registry or not hasattr(self.registry, "contexts"):
1252
+ raise RuntimeError("Context assembly requires a registry with contexts")
1253
+
1254
+ from tactus.core.context_assembler import ContextAssembler
1255
+ from tactus.dspy.history import TactusHistory
1256
+
1257
+ template_context = {
1258
+ "input": context,
1259
+ "context": getattr(self, "_context", {}) or {},
1260
+ }
1261
+ template_context.setdefault("input", {})
1262
+ if user_message:
1263
+ template_context["input"].setdefault("message", user_message)
1264
+ template_context["input"].setdefault("question", user_message)
1265
+ assembler = ContextAssembler(
1266
+ self.registry.contexts,
1267
+ retriever_registry=getattr(self.registry, "retrievers", None),
1268
+ corpus_registry=getattr(self.registry, "corpora", None),
1269
+ compactor_registry=getattr(self.registry, "compactors", None),
1270
+ )
1271
+ assembly = assembler.assemble(
1272
+ context_name=self.context_name,
1273
+ base_system_prompt=self.system_prompt,
1274
+ history_messages=self._history.get(),
1275
+ user_message=user_message or "",
1276
+ template_context=template_context,
1277
+ )
1227
1278
 
1228
- # Build the prompt context
1229
- prompt_context = {
1230
- "system_prompt": self.system_prompt,
1231
- "history": self._history.to_dspy(),
1232
- "user_message": user_message or "",
1233
- }
1279
+ prompt_context = {
1280
+ "system_prompt": assembly.system_prompt,
1281
+ "history": TactusHistory(messages=assembly.history).to_dspy(),
1282
+ "user_message": assembly.user_message,
1283
+ }
1284
+ else:
1285
+ # Build the prompt context
1286
+ prompt_context = {
1287
+ "system_prompt": self.system_prompt,
1288
+ "history": self._history.to_dspy(),
1289
+ "user_message": user_message or "",
1290
+ }
1234
1291
 
1235
1292
  # Add tools as structured DSPy Tool objects if agent has them
1236
1293
  # DSPy's adapter will convert these to OpenAI function call format
@@ -1254,7 +1311,9 @@ class DSPyAgentHandle:
1254
1311
  try:
1255
1312
  return self._turn_without_streaming(opts, prompt_context)
1256
1313
  except Exception as e:
1257
- logger.error(f"Agent '{self.name}' turn failed: {e}")
1314
+ # Avoid double-logging provider/auth errors in test runs; callers already get the
1315
+ # raised exception and can decide how to surface it.
1316
+ logger.debug("Agent '%s' turn failed: %s", self.name, e, exc_info=True)
1258
1317
  raise
1259
1318
 
1260
1319
  def _get_mock_response(self, opts: Dict[str, Any]) -> Optional[TactusPrediction]:
@@ -1488,6 +1547,7 @@ def create_dspy_agent(
1488
1547
  system_prompt=config.get("system_prompt", ""),
1489
1548
  model=config.get("model"),
1490
1549
  provider=config.get("provider"),
1550
+ context_name=config.get("context"),
1491
1551
  tools=config.get("tools", []),
1492
1552
  toolsets=config.get("toolsets", []),
1493
1553
  output_schema=config.get("output_schema") or config.get("output"),
@@ -1509,6 +1569,7 @@ def create_dspy_agent(
1509
1569
  "system_prompt",
1510
1570
  "model",
1511
1571
  "provider",
1572
+ "context",
1512
1573
  "tools",
1513
1574
  "toolsets",
1514
1575
  "output_schema",
tactus/dspy/broker_lm.py CHANGED
@@ -11,7 +11,7 @@ while still supporting streaming via DSPy's `streamify()` mechanism.
11
11
  from __future__ import annotations
12
12
 
13
13
  import logging
14
- from typing import Any
14
+ from typing import Any, Dict, List, Optional
15
15
 
16
16
  import dspy
17
17
  import litellm
@@ -42,10 +42,10 @@ class BrokeredLM(dspy.BaseLM):
42
42
  model: str,
43
43
  *,
44
44
  model_type: str = "chat",
45
- temperature: float | None = None,
46
- max_tokens: int | None = None,
47
- cache: bool | None = None,
48
- socket_path: str | None = None,
45
+ temperature: Optional[float] = None,
46
+ max_tokens: Optional[int] = None,
47
+ cache: Optional[bool] = None,
48
+ socket_path: Optional[str] = None,
49
49
  **kwargs: Any,
50
50
  ):
51
51
  if model_type != "chat":
@@ -70,12 +70,18 @@ class BrokeredLM(dspy.BaseLM):
70
70
  self._client = env_client
71
71
 
72
72
  def forward(
73
- self, prompt: str | None = None, messages: list[dict[str, Any]] | None = None, **kwargs: Any
73
+ self,
74
+ prompt: Optional[str] = None,
75
+ messages: Optional[List[Dict[str, Any]]] = None,
76
+ **kwargs: Any,
74
77
  ):
75
78
  return syncify(self.aforward)(prompt=prompt, messages=messages, **kwargs)
76
79
 
77
80
  async def aforward(
78
- self, prompt: str | None = None, messages: list[dict[str, Any]] | None = None, **kwargs: Any
81
+ self,
82
+ prompt: Optional[str] = None,
83
+ messages: Optional[List[Dict[str, Any]]] = None,
84
+ **kwargs: Any,
79
85
  ):
80
86
  provider, model_id = _split_provider_model(self.model)
81
87
 
tactus/dspy/config.py CHANGED
@@ -65,6 +65,14 @@ def configure_lm(
65
65
  f"Invalid model format: {model}. Expected format like 'provider/model-name'"
66
66
  )
67
67
 
68
+ try:
69
+ import litellm
70
+
71
+ litellm.disable_aiohttp_transport = True
72
+ litellm.use_aiohttp_transport = False
73
+ except Exception:
74
+ pass
75
+
68
76
  # Build configuration
69
77
  lm_kwargs = {
70
78
  "temperature": temperature,
@@ -103,10 +111,13 @@ def configure_lm(
103
111
 
104
112
  logger = logging.getLogger(__name__)
105
113
 
106
- adapter = ChatAdapter(use_native_function_calling=True)
107
- logger.info(
108
- f"[ADAPTER] Created ChatAdapter with use_native_function_calling={adapter.use_native_function_calling}"
109
- )
114
+ try:
115
+ adapter = ChatAdapter(use_native_function_calling=True)
116
+ except TypeError:
117
+ adapter = ChatAdapter()
118
+
119
+ use_native = getattr(adapter, "use_native_function_calling", None)
120
+ logger.info(f"[ADAPTER] Created ChatAdapter with use_native_function_calling={use_native}")
110
121
 
111
122
  # Set as global default with adapter
112
123
  dspy.configure(lm=lm, adapter=adapter)
@@ -202,6 +213,14 @@ def create_lm(
202
213
  f"Invalid model format: {model}. Expected format like 'provider/model-name'"
203
214
  )
204
215
 
216
+ try:
217
+ import litellm
218
+
219
+ litellm.disable_aiohttp_transport = True
220
+ litellm.use_aiohttp_transport = False
221
+ except Exception:
222
+ pass
223
+
205
224
  # Build configuration
206
225
  lm_kwargs = {
207
226
  "temperature": temperature,
tactus/dspy/module.py CHANGED
@@ -5,7 +5,9 @@ This module provides the Module primitive that maps to DSPy modules,
5
5
  supporting various prediction strategies like Predict, ChainOfThought, etc.
6
6
  """
7
7
 
8
+ import json
8
9
  import logging
10
+ import os
9
11
  from typing import Any, Dict, Optional, Union
10
12
 
11
13
  import dspy
@@ -188,6 +190,16 @@ class RawModule(dspy.Module):
188
190
 
189
191
  # Log summary of messages being sent
190
192
  logger.debug(f"[RAWMODULE] Sending {len(messages)} messages to LM")
193
+ if os.environ.get("TACTUS_TRACE_LLM_MESSAGES") == "1":
194
+ try:
195
+ payload = json.dumps(messages, indent=2, ensure_ascii=False)
196
+ logger.debug("[RAWMODULE] LLM messages payload:\n%s", payload)
197
+ except TypeError:
198
+ logger.debug(
199
+ "[RAWMODULE] LLM messages payload (non-JSON serializable): %r", messages
200
+ )
201
+
202
+ kwargs.pop("context", None)
191
203
 
192
204
  # Call LM directly - streamify() will intercept this call if streaming is enabled
193
205
  response = lm(messages=messages, **kwargs)
@@ -223,7 +235,6 @@ class RawModule(dspy.Module):
223
235
  # Convert to DSPy ToolCalls format
224
236
  # tool_calls_from_lm is a list of ChatCompletionMessageToolCall objects from LiteLLM
225
237
  from dspy.adapters.types.tool import ToolCalls
226
- import json
227
238
 
228
239
  tool_calls_list = []
229
240
  for tc in tool_calls_from_lm:
@@ -273,10 +273,10 @@ class CodingAssistantAgent:
273
273
  entries = []
274
274
  for item in sorted(target_path.iterdir(), key=lambda x: (not x.is_dir(), x.name)):
275
275
  if item.is_dir():
276
- entries.append(f"📁 {item.name}/")
276
+ entries.append(f"[DIR] {item.name}/")
277
277
  else:
278
278
  size = item.stat().st_size
279
- entries.append(f"📄 {item.name} ({size} bytes)")
279
+ entries.append(f"[FILE] {item.name} ({size} bytes)")
280
280
 
281
281
  if not entries:
282
282
  return f"Directory is empty: {path}"
@@ -222,7 +222,7 @@ class AgentHandle:
222
222
  return f"AgentHandle('{self.name}', {connected})"
223
223
 
224
224
 
225
- class ModelHandle:
225
+ class ModelHandle:
226
226
  """
227
227
  Lightweight handle returned by model() DSL function.
228
228
 
@@ -303,12 +303,84 @@ class ModelHandle:
303
303
  self._primitive = primitive
304
304
  logger.debug("ModelHandle '%s' connected to primitive", self.name)
305
305
 
306
- def __repr__(self) -> str:
307
- connected = "connected" if self._primitive else "disconnected"
308
- return f"ModelHandle('{self.name}', {connected})"
309
-
310
-
311
- class AgentLookup:
306
+ def __repr__(self) -> str:
307
+ connected = "connected" if self._primitive else "disconnected"
308
+ return f"ModelHandle('{self.name}', {connected})"
309
+
310
+
311
+ class ContextHandle:
312
+ """
313
+ Lightweight handle returned by Context DSL declarations.
314
+ """
315
+
316
+ def __init__(self, name: str):
317
+ """
318
+ Initialize context handle.
319
+
320
+ Args:
321
+ name: Context name (string identifier)
322
+ """
323
+ self.name = name
324
+
325
+ def __repr__(self) -> str:
326
+ return f"ContextHandle('{self.name}')"
327
+
328
+
329
+ class CorpusHandle:
330
+ """
331
+ Lightweight handle returned by Corpus DSL declarations.
332
+ """
333
+
334
+ def __init__(self, name: str):
335
+ """
336
+ Initialize corpus handle.
337
+
338
+ Args:
339
+ name: Corpus name (string identifier)
340
+ """
341
+ self.name = name
342
+
343
+ def __repr__(self) -> str:
344
+ return f"CorpusHandle('{self.name}')"
345
+
346
+
347
+ class RetrieverHandle:
348
+ """
349
+ Lightweight handle returned by Retriever DSL declarations.
350
+ """
351
+
352
+ def __init__(self, name: str):
353
+ """
354
+ Initialize retriever handle.
355
+
356
+ Args:
357
+ name: Retriever name (string identifier)
358
+ """
359
+ self.name = name
360
+
361
+ def __repr__(self) -> str:
362
+ return f"RetrieverHandle('{self.name}')"
363
+
364
+
365
+ class CompactorHandle:
366
+ """
367
+ Lightweight handle returned by Compactor DSL declarations.
368
+ """
369
+
370
+ def __init__(self, name: str):
371
+ """
372
+ Initialize compactor handle.
373
+
374
+ Args:
375
+ name: Compactor name (string identifier)
376
+ """
377
+ self.name = name
378
+
379
+ def __repr__(self) -> str:
380
+ return f"CompactorHandle('{self.name}')"
381
+
382
+
383
+ class AgentLookup:
312
384
  """
313
385
  Agent lookup primitive - provides Agent("name") lookup functionality.
314
386
 
@@ -27,7 +27,7 @@ class ModelPrimitive:
27
27
  self,
28
28
  model_name: str,
29
29
  config: dict,
30
- context: ExecutionContext | None = None,
30
+ context: Optional[ExecutionContext] = None,
31
31
  mock_manager: Optional[Any] = None,
32
32
  ):
33
33
  """