tactus 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. tactus/__init__.py +1 -1
  2. tactus/adapters/broker_log.py +55 -0
  3. tactus/adapters/cli_log.py +0 -25
  4. tactus/broker/__init__.py +12 -0
  5. tactus/broker/client.py +260 -0
  6. tactus/broker/server.py +505 -0
  7. tactus/broker/stdio.py +12 -0
  8. tactus/cli/app.py +38 -2
  9. tactus/core/dsl_stubs.py +2 -1
  10. tactus/core/output_validator.py +6 -3
  11. tactus/core/registry.py +8 -1
  12. tactus/core/runtime.py +15 -1
  13. tactus/core/yaml_parser.py +1 -11
  14. tactus/dspy/agent.py +190 -102
  15. tactus/dspy/broker_lm.py +181 -0
  16. tactus/dspy/config.py +21 -8
  17. tactus/dspy/prediction.py +71 -5
  18. tactus/ide/server.py +37 -142
  19. tactus/primitives/__init__.py +2 -0
  20. tactus/primitives/handles.py +34 -7
  21. tactus/primitives/host.py +94 -0
  22. tactus/primitives/log.py +4 -0
  23. tactus/primitives/model.py +20 -2
  24. tactus/primitives/procedure.py +106 -51
  25. tactus/primitives/tool.py +0 -2
  26. tactus/protocols/__init__.py +0 -7
  27. tactus/protocols/log_handler.py +2 -2
  28. tactus/protocols/models.py +1 -1
  29. tactus/sandbox/config.py +33 -5
  30. tactus/sandbox/container_runner.py +498 -60
  31. tactus/sandbox/entrypoint.py +30 -17
  32. tactus/sandbox/protocol.py +0 -9
  33. tactus/testing/README.md +0 -4
  34. tactus/testing/mock_agent.py +80 -23
  35. tactus/testing/test_runner.py +0 -18
  36. {tactus-0.26.0.dist-info → tactus-0.27.0.dist-info}/METADATA +1 -1
  37. {tactus-0.26.0.dist-info → tactus-0.27.0.dist-info}/RECORD +40 -33
  38. {tactus-0.26.0.dist-info → tactus-0.27.0.dist-info}/WHEEL +0 -0
  39. {tactus-0.26.0.dist-info → tactus-0.27.0.dist-info}/entry_points.txt +0 -0
  40. {tactus-0.26.0.dist-info → tactus-0.27.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,505 @@
1
+ """
2
+ Host-side broker server (local UDS transport).
3
+
4
+ This is intentionally narrow: it exposes only allowlisted operations required
5
+ by the runtime container.
6
+ """
7
+
8
+ import asyncio
9
+ import json
10
+ import logging
11
+ import os
12
+ import ssl
13
+ from collections.abc import Callable
14
+ from dataclasses import dataclass
15
+ from pathlib import Path
16
+ from typing import Any, Optional
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def _json_dumps(obj: Any) -> str:
22
+ return json.dumps(obj, ensure_ascii=False, separators=(",", ":"))
23
+
24
+
25
+ async def _write_event(writer: asyncio.StreamWriter, event: dict[str, Any]) -> None:
26
+ writer.write((_json_dumps(event) + "\n").encode("utf-8"))
27
+ await writer.drain()
28
+
29
+
30
+ @dataclass(frozen=True)
31
+ class OpenAIChatConfig:
32
+ api_key_env: str = "OPENAI_API_KEY"
33
+
34
+
35
+ class OpenAIChatBackend:
36
+ """
37
+ Minimal OpenAI chat-completions backend used by the broker.
38
+
39
+ Credentials are read from the broker process environment.
40
+ """
41
+
42
+ def __init__(self, config: Optional[OpenAIChatConfig] = None):
43
+ self._config = config or OpenAIChatConfig()
44
+
45
+ # Lazy-init the client so unit tests can run without OpenAI installed/configured.
46
+ self._client = None
47
+
48
+ def _get_client(self):
49
+ if self._client is not None:
50
+ return self._client
51
+
52
+ from openai import AsyncOpenAI
53
+
54
+ api_key = os.environ.get(self._config.api_key_env)
55
+ if not api_key:
56
+ raise RuntimeError(f"Missing OpenAI API key in environment: {self._config.api_key_env}")
57
+
58
+ self._client = AsyncOpenAI(api_key=api_key)
59
+ return self._client
60
+
61
+ async def chat(
62
+ self,
63
+ *,
64
+ model: str,
65
+ messages: list[dict[str, Any]],
66
+ temperature: Optional[float] = None,
67
+ max_tokens: Optional[int] = None,
68
+ stream: bool,
69
+ ):
70
+ client = self._get_client()
71
+
72
+ kwargs: dict[str, Any] = {"model": model, "messages": messages}
73
+ if temperature is not None:
74
+ kwargs["temperature"] = temperature
75
+ if max_tokens is not None:
76
+ kwargs["max_tokens"] = max_tokens
77
+
78
+ if stream:
79
+ return await client.chat.completions.create(**kwargs, stream=True)
80
+
81
+ return await client.chat.completions.create(**kwargs)
82
+
83
+
84
+ class HostToolRegistry:
85
+ """
86
+ Minimal deny-by-default registry for broker-executed host tools.
87
+
88
+ Phase 1B starts with a tiny allowlist and expands deliberately.
89
+ """
90
+
91
+ def __init__(self, tools: Optional[dict[str, Callable[[dict[str, Any]], Any]]] = None):
92
+ self._tools = tools or {}
93
+
94
+ @classmethod
95
+ def default(cls) -> "HostToolRegistry":
96
+ def host_ping(args: dict[str, Any]) -> dict[str, Any]:
97
+ return {"ok": True, "echo": args}
98
+
99
+ def host_echo(args: dict[str, Any]) -> dict[str, Any]:
100
+ return {"echo": args}
101
+
102
+ return cls({"host.ping": host_ping, "host.echo": host_echo})
103
+
104
+ def call(self, name: str, args: dict[str, Any]) -> Any:
105
+ if name not in self._tools:
106
+ raise KeyError(f"Tool not allowlisted: {name}")
107
+ return self._tools[name](args)
108
+
109
+
110
+ class _BaseBrokerServer:
111
+ def __init__(
112
+ self,
113
+ *,
114
+ openai_backend: Optional[OpenAIChatBackend] = None,
115
+ tool_registry: Optional[HostToolRegistry] = None,
116
+ event_handler: Optional[Callable[[dict[str, Any]], None]] = None,
117
+ ):
118
+ self._server: Optional[asyncio.AbstractServer] = None
119
+ self._openai = openai_backend or OpenAIChatBackend()
120
+ self._tools = tool_registry or HostToolRegistry.default()
121
+ self._event_handler = event_handler
122
+
123
+ async def start(self) -> None:
124
+ raise NotImplementedError
125
+
126
+ async def aclose(self) -> None:
127
+ if self._server is not None:
128
+ self._server.close()
129
+ await self._server.wait_closed()
130
+ self._server = None
131
+
132
+ async def __aenter__(self) -> "_BaseBrokerServer":
133
+ await self.start()
134
+ return self
135
+
136
+ async def __aexit__(self, exc_type, exc, tb) -> None:
137
+ await self.aclose()
138
+
139
+ async def _handle_connection(
140
+ self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
141
+ ) -> None:
142
+ try:
143
+ line = await reader.readline()
144
+ if not line:
145
+ return
146
+
147
+ req = json.loads(line.decode("utf-8"))
148
+ req_id = req.get("id")
149
+ method = req.get("method")
150
+ params = req.get("params") or {}
151
+
152
+ if not req_id or not method:
153
+ await _write_event(
154
+ writer,
155
+ {
156
+ "id": req_id or "",
157
+ "event": "error",
158
+ "error": {"type": "BadRequest", "message": "Missing id/method"},
159
+ },
160
+ )
161
+ return
162
+
163
+ if method == "events.emit":
164
+ await self._handle_events_emit(req_id, params, writer)
165
+ return
166
+
167
+ if method == "llm.chat":
168
+ await self._handle_llm_chat(req_id, params, writer)
169
+ return
170
+
171
+ if method == "tool.call":
172
+ await self._handle_tool_call(req_id, params, writer)
173
+ return
174
+
175
+ await _write_event(
176
+ writer,
177
+ {
178
+ "id": req_id,
179
+ "event": "error",
180
+ "error": {"type": "MethodNotFound", "message": f"Unknown method: {method}"},
181
+ },
182
+ )
183
+
184
+ except Exception as e:
185
+ logger.debug("[BROKER] Connection handler error", exc_info=True)
186
+ try:
187
+ await _write_event(
188
+ writer,
189
+ {
190
+ "id": "",
191
+ "event": "error",
192
+ "error": {"type": type(e).__name__, "message": str(e)},
193
+ },
194
+ )
195
+ except Exception:
196
+ pass
197
+ finally:
198
+ try:
199
+ writer.close()
200
+ await writer.wait_closed()
201
+ except Exception:
202
+ pass
203
+
204
+ async def _handle_events_emit(
205
+ self, req_id: str, params: dict[str, Any], writer: asyncio.StreamWriter
206
+ ) -> None:
207
+ event = params.get("event")
208
+ if not isinstance(event, dict):
209
+ await _write_event(
210
+ writer,
211
+ {
212
+ "id": req_id,
213
+ "event": "error",
214
+ "error": {"type": "BadRequest", "message": "params.event must be an object"},
215
+ },
216
+ )
217
+ return
218
+
219
+ try:
220
+ if self._event_handler is not None:
221
+ self._event_handler(event)
222
+ except Exception:
223
+ logger.debug("[BROKER] event_handler raised", exc_info=True)
224
+
225
+ await _write_event(writer, {"id": req_id, "event": "done", "data": {"ok": True}})
226
+
227
+ async def _handle_llm_chat(
228
+ self, req_id: str, params: dict[str, Any], writer: asyncio.StreamWriter
229
+ ) -> None:
230
+ provider = params.get("provider") or "openai"
231
+ if provider != "openai":
232
+ await _write_event(
233
+ writer,
234
+ {
235
+ "id": req_id,
236
+ "event": "error",
237
+ "error": {
238
+ "type": "UnsupportedProvider",
239
+ "message": f"Unsupported provider: {provider}",
240
+ },
241
+ },
242
+ )
243
+ return
244
+
245
+ model = params.get("model")
246
+ messages = params.get("messages")
247
+ stream = bool(params.get("stream", False))
248
+ temperature = params.get("temperature")
249
+ max_tokens = params.get("max_tokens")
250
+
251
+ if not isinstance(model, str) or not model:
252
+ await _write_event(
253
+ writer,
254
+ {
255
+ "id": req_id,
256
+ "event": "error",
257
+ "error": {"type": "BadRequest", "message": "params.model must be a string"},
258
+ },
259
+ )
260
+ return
261
+ if not isinstance(messages, list):
262
+ await _write_event(
263
+ writer,
264
+ {
265
+ "id": req_id,
266
+ "event": "error",
267
+ "error": {"type": "BadRequest", "message": "params.messages must be a list"},
268
+ },
269
+ )
270
+ return
271
+
272
+ try:
273
+ if stream:
274
+ stream_iter = await self._openai.chat(
275
+ model=model,
276
+ messages=messages,
277
+ temperature=temperature,
278
+ max_tokens=max_tokens,
279
+ stream=True,
280
+ )
281
+
282
+ full_text = ""
283
+ async for chunk in stream_iter:
284
+ try:
285
+ delta = chunk.choices[0].delta
286
+ text = getattr(delta, "content", None)
287
+ except Exception:
288
+ text = None
289
+
290
+ if not text:
291
+ continue
292
+
293
+ full_text += text
294
+ await _write_event(
295
+ writer, {"id": req_id, "event": "delta", "data": {"text": text}}
296
+ )
297
+
298
+ await _write_event(
299
+ writer,
300
+ {
301
+ "id": req_id,
302
+ "event": "done",
303
+ "data": {
304
+ "text": full_text,
305
+ "usage": {
306
+ "prompt_tokens": 0,
307
+ "completion_tokens": 0,
308
+ "total_tokens": 0,
309
+ },
310
+ },
311
+ },
312
+ )
313
+ return
314
+
315
+ resp = await self._openai.chat(
316
+ model=model,
317
+ messages=messages,
318
+ temperature=temperature,
319
+ max_tokens=max_tokens,
320
+ stream=False,
321
+ )
322
+ text = ""
323
+ try:
324
+ text = resp.choices[0].message.content or ""
325
+ except Exception:
326
+ text = ""
327
+
328
+ await _write_event(
329
+ writer,
330
+ {
331
+ "id": req_id,
332
+ "event": "done",
333
+ "data": {
334
+ "text": text,
335
+ "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0},
336
+ },
337
+ },
338
+ )
339
+ except Exception as e:
340
+ logger.debug("[BROKER] llm.chat error", exc_info=True)
341
+ await _write_event(
342
+ writer,
343
+ {
344
+ "id": req_id,
345
+ "event": "error",
346
+ "error": {"type": type(e).__name__, "message": str(e)},
347
+ },
348
+ )
349
+
350
+ async def _handle_tool_call(
351
+ self, req_id: str, params: dict[str, Any], writer: asyncio.StreamWriter
352
+ ) -> None:
353
+ name = params.get("name")
354
+ args = params.get("args") or {}
355
+
356
+ if not isinstance(name, str) or not name:
357
+ await _write_event(
358
+ writer,
359
+ {
360
+ "id": req_id,
361
+ "event": "error",
362
+ "error": {"type": "BadRequest", "message": "params.name must be a string"},
363
+ },
364
+ )
365
+ return
366
+ if not isinstance(args, dict):
367
+ await _write_event(
368
+ writer,
369
+ {
370
+ "id": req_id,
371
+ "event": "error",
372
+ "error": {"type": "BadRequest", "message": "params.args must be an object"},
373
+ },
374
+ )
375
+ return
376
+
377
+ try:
378
+ result = self._tools.call(name, args)
379
+ except KeyError:
380
+ await _write_event(
381
+ writer,
382
+ {
383
+ "id": req_id,
384
+ "event": "error",
385
+ "error": {
386
+ "type": "ToolNotAllowed",
387
+ "message": f"Tool not allowlisted: {name}",
388
+ },
389
+ },
390
+ )
391
+ return
392
+ except Exception as e:
393
+ logger.debug("[BROKER] tool.call error", exc_info=True)
394
+ await _write_event(
395
+ writer,
396
+ {
397
+ "id": req_id,
398
+ "event": "error",
399
+ "error": {"type": type(e).__name__, "message": str(e)},
400
+ },
401
+ )
402
+ return
403
+
404
+ await _write_event(writer, {"id": req_id, "event": "done", "data": {"result": result}})
405
+
406
+
407
+ class BrokerServer(_BaseBrokerServer):
408
+ """
409
+ Local broker server that listens on a Unix domain socket.
410
+
411
+ Protocol (NDJSON):
412
+ request: {"id":"...","method":"llm.chat","params":{...}}
413
+ response stream:
414
+ {"id":"...","event":"delta","data":{"text":"..."}}
415
+ {"id":"...","event":"done","data":{...}}
416
+ or:
417
+ {"id":"...","event":"error","error":{"message":"...","type":"..."}}
418
+ """
419
+
420
+ def __init__(
421
+ self,
422
+ socket_path: Path,
423
+ *,
424
+ openai_backend: Optional[OpenAIChatBackend] = None,
425
+ tool_registry: Optional[HostToolRegistry] = None,
426
+ event_handler: Optional[Callable[[dict[str, Any]], None]] = None,
427
+ ):
428
+ super().__init__(
429
+ openai_backend=openai_backend, tool_registry=tool_registry, event_handler=event_handler
430
+ )
431
+ self.socket_path = Path(socket_path)
432
+
433
+ async def start(self) -> None:
434
+ # Most platforms enforce a short maximum length for AF_UNIX socket paths.
435
+ # Keep a conservative bound to avoid opaque "AF_UNIX path too long" errors.
436
+ if len(str(self.socket_path)) > 90:
437
+ raise ValueError(
438
+ f"Broker socket path too long for AF_UNIX: {self.socket_path} "
439
+ f"(len={len(str(self.socket_path))})"
440
+ )
441
+
442
+ self.socket_path.parent.mkdir(parents=True, exist_ok=True)
443
+ if self.socket_path.exists():
444
+ self.socket_path.unlink()
445
+
446
+ self._server = await asyncio.start_unix_server(
447
+ self._handle_connection, path=str(self.socket_path)
448
+ )
449
+ logger.info(f"[BROKER] Listening on UDS: {self.socket_path}")
450
+
451
+ async def aclose(self) -> None:
452
+ await super().aclose()
453
+
454
+ try:
455
+ if self.socket_path.exists():
456
+ self.socket_path.unlink()
457
+ except Exception:
458
+ logger.debug("[BROKER] Failed to unlink socket path", exc_info=True)
459
+
460
+
461
+ class TcpBrokerServer(_BaseBrokerServer):
462
+ """
463
+ Broker server that listens on TCP (optionally TLS).
464
+
465
+ Protocol is the same NDJSON framing used by the UDS broker.
466
+ """
467
+
468
+ def __init__(
469
+ self,
470
+ *,
471
+ host: str = "127.0.0.1",
472
+ port: int = 0,
473
+ ssl_context: ssl.SSLContext | None = None,
474
+ openai_backend: Optional[OpenAIChatBackend] = None,
475
+ tool_registry: Optional[HostToolRegistry] = None,
476
+ event_handler: Optional[Callable[[dict[str, Any]], None]] = None,
477
+ ):
478
+ super().__init__(
479
+ openai_backend=openai_backend, tool_registry=tool_registry, event_handler=event_handler
480
+ )
481
+ self.host = host
482
+ self.port = port
483
+ self.ssl_context = ssl_context
484
+ self.bound_port: int | None = None
485
+
486
+ async def start(self) -> None:
487
+ self._server = await asyncio.start_server(
488
+ self._handle_connection,
489
+ host=self.host,
490
+ port=self.port,
491
+ ssl=self.ssl_context,
492
+ )
493
+
494
+ sockets = self._server.sockets or []
495
+ if sockets:
496
+ try:
497
+ sockname = sockets[0].getsockname()
498
+ self.bound_port = int(sockname[1])
499
+ except Exception:
500
+ self.bound_port = None
501
+
502
+ scheme = "tls" if self.ssl_context is not None else "tcp"
503
+ logger.info(
504
+ f"[BROKER] Listening on {scheme}: {self.host}:{self.bound_port if self.bound_port is not None else self.port}"
505
+ )
tactus/broker/stdio.py ADDED
@@ -0,0 +1,12 @@
1
+ """
2
+ Shared constants for broker-over-stdio transport.
3
+
4
+ Docker Desktop (macOS/Windows) containers cannot connect to a host AF_UNIX socket
5
+ bind-mounted from the host OS. For sandboxed runs we therefore use a broker RPC
6
+ channel over the container process stdio.
7
+ """
8
+
9
+ STDIO_TRANSPORT_VALUE = "stdio"
10
+
11
+ # Container → host requests are written to stderr with this prefix, followed by a JSON object.
12
+ STDIO_REQUEST_PREFIX = "<<<TACTUS_BROKER>>>"
tactus/cli/app.py CHANGED
@@ -455,6 +455,21 @@ def run(
455
455
  help="Run in Docker sandbox (default: required unless --no-sandbox). "
456
456
  "Use --no-sandbox to run without isolation (security risk).",
457
457
  ),
458
+ sandbox_broker: str = typer.Option(
459
+ "stdio",
460
+ "--sandbox-broker",
461
+ help="Broker transport for sandbox runtime: stdio (default, --network none) or tcp/tls (remote-mode spike).",
462
+ ),
463
+ sandbox_network: Optional[str] = typer.Option(
464
+ None,
465
+ "--sandbox-network",
466
+ help="Docker network mode for sandbox container (default: none for stdio; bridge for tcp/tls).",
467
+ ),
468
+ sandbox_broker_host: Optional[str] = typer.Option(
469
+ None,
470
+ "--sandbox-broker-host",
471
+ help="Broker hostname from inside the sandbox container (tcp/tls only).",
472
+ ),
458
473
  ):
459
474
  """
460
475
  Run a Tactus workflow.
@@ -611,6 +626,19 @@ def run(
611
626
  if sandbox is not None:
612
627
  # CLI flag overrides config
613
628
  sandbox_config_dict["enabled"] = sandbox
629
+ if sandbox_network is not None:
630
+ sandbox_config_dict["network"] = sandbox_network
631
+ if sandbox_broker_host is not None:
632
+ sandbox_config_dict["broker_host"] = sandbox_broker_host
633
+
634
+ sandbox_config_dict["broker_transport"] = sandbox_broker
635
+ if (
636
+ sandbox_network is None
637
+ and sandbox_broker in ("tcp", "tls")
638
+ and "network" not in sandbox_config_dict
639
+ ):
640
+ # Remote-mode requires container networking; default to bridge if user didn't specify.
641
+ sandbox_config_dict["network"] = "bridge"
614
642
  sandbox_config = SandboxConfig(**sandbox_config_dict)
615
643
 
616
644
  # Pass logging preferences through to the sandbox container so container stderr matches CLI UX.
@@ -632,6 +660,11 @@ def run(
632
660
  "[yellow][SANDBOX] Container isolation disabled (--no-sandbox or config).[/yellow]"
633
661
  )
634
662
  console.print("[yellow][SANDBOX] Proceeding without Docker isolation.[/yellow]")
663
+ elif not docker_available and not sandbox_config.should_error_if_unavailable():
664
+ # Sandbox is auto-mode (default): fall back when Docker is unavailable
665
+ console.print(
666
+ f"[yellow][SANDBOX] Docker not available ({docker_reason}); running without container isolation.[/yellow]"
667
+ )
635
668
  elif sandbox_config.should_error_if_unavailable() and not docker_available:
636
669
  # Sandbox required but Docker unavailable - ERROR
637
670
  console.print(f"[red][SANDBOX ERROR] Docker not available: {docker_reason}[/red]")
@@ -731,14 +764,17 @@ def run(
731
764
 
732
765
  try:
733
766
  if use_sandbox:
767
+ # Host-side broker reads OpenAI credentials from the host process environment.
768
+ # Keep secrets OUT of the sandbox container by setting the env var only on the host.
769
+ if api_key:
770
+ os.environ["OPENAI_API_KEY"] = api_key
771
+
734
772
  # Execute in Docker sandbox
735
773
  runner = ContainerRunner(sandbox_config)
736
774
  sandbox_result = asyncio.run(
737
775
  runner.run(
738
776
  source=source_content,
739
777
  params=context,
740
- config=merged_config,
741
- mcp_servers=mcp_servers,
742
778
  source_file_path=str(workflow_file),
743
779
  format=file_format,
744
780
  )
tactus/core/dsl_stubs.py CHANGED
@@ -951,7 +951,8 @@ def create_dsl_stubs(
951
951
  agent_config = {
952
952
  "tool_calls": mock_config.get("tool_calls", []),
953
953
  "message": mock_config.get("message", ""),
954
- "data": mock_config.get("data"),
954
+ "data": mock_config.get("data", {}),
955
+ "usage": mock_config.get("usage", {}),
955
956
  }
956
957
  builder.register_agent_mock(name, agent_config)
957
958
  continue
@@ -75,10 +75,13 @@ class OutputValidator:
75
75
  # If no schema defined, accept any output
76
76
  if not self.schema:
77
77
  logger.debug("No output schema defined, skipping validation")
78
- # Convert Lua table to dict if needed, otherwise return as-is
79
- if hasattr(output, "items") and not isinstance(output, dict):
78
+ if isinstance(output, dict):
79
+ return output
80
+ elif hasattr(output, "items"):
81
+ # Lua table - convert to dict
80
82
  return dict(output.items())
81
- return output
83
+ else:
84
+ return {"result": output}
82
85
 
83
86
  # Convert Lua tables to dicts recursively
84
87
  if hasattr(output, "items") or isinstance(output, dict):
tactus/core/registry.py CHANGED
@@ -119,7 +119,14 @@ class AgentMockConfig(BaseModel):
119
119
  tool_calls: list[dict[str, Any]] = Field(default_factory=list)
120
120
  # List of tool calls to simulate: [{"tool": "done", "args": {"reason": "..."}}, ...]
121
121
  message: str = "" # The agent's final message response
122
- data: Optional[dict[str, Any]] = None # Structured output data (for result.value)
122
+ data: dict[str, Any] = Field(
123
+ default_factory=dict,
124
+ description="Optional structured response payload (exposed as result.data in Lua)",
125
+ )
126
+ usage: dict[str, Any] = Field(
127
+ default_factory=dict,
128
+ description="Optional token usage payload (exposed as result.usage in Lua)",
129
+ )
123
130
 
124
131
 
125
132
  class ProcedureRegistry(BaseModel):