tactus 0.35.1__py3-none-any.whl → 0.37.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. tactus/__init__.py +1 -1
  2. tactus/adapters/channels/base.py +20 -2
  3. tactus/adapters/channels/broker.py +1 -0
  4. tactus/adapters/channels/host.py +3 -1
  5. tactus/adapters/channels/ipc.py +18 -3
  6. tactus/adapters/channels/sse.py +13 -5
  7. tactus/adapters/control_loop.py +44 -30
  8. tactus/adapters/mcp_manager.py +24 -7
  9. tactus/backends/http_backend.py +2 -2
  10. tactus/backends/pytorch_backend.py +2 -2
  11. tactus/broker/client.py +3 -3
  12. tactus/broker/server.py +17 -5
  13. tactus/core/dsl_stubs.py +3 -3
  14. tactus/core/execution_context.py +32 -27
  15. tactus/core/lua_sandbox.py +42 -34
  16. tactus/core/message_history_manager.py +51 -28
  17. tactus/core/output_validator.py +65 -51
  18. tactus/core/registry.py +29 -29
  19. tactus/core/runtime.py +69 -61
  20. tactus/dspy/broker_lm.py +13 -7
  21. tactus/dspy/config.py +7 -4
  22. tactus/ide/server.py +63 -33
  23. tactus/primitives/host.py +19 -16
  24. tactus/primitives/message_history.py +11 -14
  25. tactus/primitives/model.py +1 -1
  26. tactus/primitives/procedure.py +11 -8
  27. tactus/primitives/session.py +9 -9
  28. tactus/primitives/state.py +2 -2
  29. tactus/primitives/tool_handle.py +27 -24
  30. tactus/sandbox/container_runner.py +11 -6
  31. tactus/testing/context.py +6 -6
  32. tactus/testing/evaluation_runner.py +5 -5
  33. tactus/testing/mock_hitl.py +2 -2
  34. tactus/testing/models.py +2 -0
  35. tactus/testing/steps/builtin.py +2 -2
  36. tactus/testing/test_runner.py +6 -4
  37. tactus/utils/asyncio_helpers.py +2 -1
  38. tactus/utils/safe_libraries.py +2 -2
  39. {tactus-0.35.1.dist-info → tactus-0.37.0.dist-info}/METADATA +11 -5
  40. {tactus-0.35.1.dist-info → tactus-0.37.0.dist-info}/RECORD +43 -43
  41. {tactus-0.35.1.dist-info → tactus-0.37.0.dist-info}/WHEEL +0 -0
  42. {tactus-0.35.1.dist-info → tactus-0.37.0.dist-info}/entry_points.txt +0 -0
  43. {tactus-0.35.1.dist-info → tactus-0.37.0.dist-info}/licenses/LICENSE +0 -0
tactus/__init__.py CHANGED
@@ -5,7 +5,7 @@ Tactus provides a declarative workflow engine for AI agents with pluggable
5
5
  backends for storage, HITL, and chat recording.
6
6
  """
7
7
 
8
- __version__ = "0.35.1"
8
+ __version__ = "0.37.0"
9
9
 
10
10
  # Core exports
11
11
  from tactus.core.runtime import TactusRuntime
@@ -9,7 +9,7 @@ requiring separate processes (e.g., Discord WebSocket gateway).
9
9
  import asyncio
10
10
  import logging
11
11
  from abc import ABC, abstractmethod
12
- from typing import AsyncIterator
12
+ from typing import AsyncIterator, Optional
13
13
 
14
14
  from tactus.protocols.control import (
15
15
  ControlRequest,
@@ -54,7 +54,20 @@ class InProcessChannel(ABC):
54
54
 
55
55
  def __init__(self):
56
56
  """Initialize the channel with an internal response queue."""
57
- self._response_queue: asyncio.Queue[ControlResponse] = asyncio.Queue()
57
+ self._response_queue: Optional[asyncio.Queue[ControlResponse]] = None
58
+ self._shutdown_event: Optional[asyncio.Event] = None
59
+
60
+ def _ensure_asyncio_primitives(self) -> None:
61
+ if self._response_queue is not None and self._shutdown_event is not None:
62
+ return
63
+ try:
64
+ asyncio.get_running_loop()
65
+ except RuntimeError as error:
66
+ raise RuntimeError(
67
+ "InProcessChannel requires a running event loop before use. "
68
+ "Initialize it from within an async context."
69
+ ) from error
70
+ self._response_queue = asyncio.Queue()
58
71
  self._shutdown_event = asyncio.Event()
59
72
 
60
73
  @property
@@ -105,6 +118,7 @@ class InProcessChannel(ABC):
105
118
  Yields:
106
119
  ControlResponse as they are received
107
120
  """
121
+ self._ensure_asyncio_primitives()
108
122
  while not self._shutdown_event.is_set():
109
123
  try:
110
124
  # Use wait_for with timeout to check shutdown periodically
@@ -149,6 +163,7 @@ class InProcessChannel(ABC):
149
163
  Override for additional cleanup (close connections, etc.).
150
164
  """
151
165
  logger.info("%s: shutting down", self.channel_id)
166
+ self._ensure_asyncio_primitives()
152
167
  self._shutdown_event.set()
153
168
 
154
169
  def push_response(self, response: ControlResponse) -> None:
@@ -164,6 +179,7 @@ class InProcessChannel(ABC):
164
179
  response: ControlResponse to add to queue
165
180
  """
166
181
  try:
182
+ self._ensure_asyncio_primitives()
167
183
  self._response_queue.put_nowait(response)
168
184
  except Exception as error:
169
185
  logger.error("%s: failed to queue response: %s", self.channel_id, error)
@@ -180,4 +196,6 @@ class InProcessChannel(ABC):
180
196
  response: ControlResponse to add to queue
181
197
  loop: The event loop to use for thread-safe call
182
198
  """
199
+ if self._response_queue is None:
200
+ loop.call_soon_threadsafe(self._ensure_asyncio_primitives)
183
201
  loop.call_soon_threadsafe(self._response_queue.put_nowait, response)
@@ -91,6 +91,7 @@ class BrokerControlChannel(InProcessChannel):
91
91
  )
92
92
 
93
93
  try:
94
+ self._ensure_asyncio_primitives()
94
95
  # Serialize request to JSON-compatible dict
95
96
  request_data = request.model_dump(mode="json")
96
97
 
@@ -90,12 +90,14 @@ class HostControlChannel(InProcessChannel):
90
90
  request.request_id,
91
91
  )
92
92
 
93
+ self._ensure_asyncio_primitives()
94
+
93
95
  # Store for background thread access
94
96
  self._current_request = request
95
97
  self._cancel_event.clear()
96
98
 
97
99
  # Capture event loop for thread-safe response pushing
98
- self._event_loop = asyncio.get_event_loop()
100
+ self._event_loop = asyncio.get_running_loop()
99
101
 
100
102
  # Display the request (synchronous, before starting thread)
101
103
  self._display_request(request)
@@ -46,10 +46,22 @@ class IPCControlChannel:
46
46
 
47
47
  self._server: Optional[asyncio.Server] = None
48
48
  self._clients: dict[str, asyncio.StreamWriter] = {} # client_id -> writer
49
- self._response_queue: asyncio.Queue[ControlResponse] = asyncio.Queue()
49
+ self._response_queue: Optional[asyncio.Queue[ControlResponse]] = None
50
50
  self._pending_requests: dict[str, ControlRequest] = {} # request_id -> request
51
51
  self._initialized = False
52
52
 
53
+ def _ensure_response_queue(self) -> asyncio.Queue[ControlResponse]:
54
+ if self._response_queue is None:
55
+ try:
56
+ asyncio.get_running_loop()
57
+ except RuntimeError as error:
58
+ raise RuntimeError(
59
+ "IPCControlChannel requires a running event loop before use. "
60
+ "Initialize it from within an async context."
61
+ ) from error
62
+ self._response_queue = asyncio.Queue()
63
+ return self._response_queue
64
+
53
65
  @property
54
66
  def capabilities(self) -> ChannelCapabilities:
55
67
  """IPC supports all request types and can respond synchronously."""
@@ -68,6 +80,7 @@ class IPCControlChannel:
68
80
  return
69
81
 
70
82
  logger.info("%s: initializing...", self.channel_id)
83
+ self._ensure_response_queue()
71
84
 
72
85
  # Remove old socket file if it exists
73
86
  if os.path.exists(self.socket_path):
@@ -168,8 +181,9 @@ class IPCControlChannel:
168
181
  Yields:
169
182
  ControlResponse objects
170
183
  """
184
+ response_queue = self._ensure_response_queue()
171
185
  while True:
172
- response = await self._response_queue.get()
186
+ response = await response_queue.get()
173
187
  logger.info(
174
188
  "%s: received response for %s",
175
189
  self.channel_id,
@@ -264,6 +278,7 @@ class IPCControlChannel:
264
278
  self._clients[client_id] = writer
265
279
 
266
280
  try:
281
+ response_queue = self._ensure_response_queue()
267
282
  # Send any pending requests to the new client
268
283
  for request_id, request_data in self._pending_requests.items():
269
284
  try:
@@ -302,7 +317,7 @@ class IPCControlChannel:
302
317
  timed_out=message.get("timed_out", False),
303
318
  channel_id=self.channel_id,
304
319
  )
305
- await self._response_queue.put(response)
320
+ await response_queue.put(response)
306
321
  logger.info(
307
322
  "%s: received response for %s",
308
323
  self.channel_id,
@@ -248,7 +248,14 @@ class SSEControlChannel(InProcessChannel):
248
248
  """
249
249
  logger.info("%s: received response for %s", self.channel_id, request_id)
250
250
 
251
- response = ControlResponse(
251
+ response = self._build_response(request_id, value)
252
+
253
+ # Push to queue from sync context (Flask thread)
254
+ # Get the running event loop and schedule the put operation
255
+ self._enqueue_response_from_sync_context(request_id, response)
256
+
257
+ def _build_response(self, request_id: str, value: Any) -> ControlResponse:
258
+ return ControlResponse(
252
259
  request_id=request_id,
253
260
  value=value,
254
261
  responded_at=datetime.now(timezone.utc),
@@ -256,15 +263,15 @@ class SSEControlChannel(InProcessChannel):
256
263
  channel_id=self.channel_id,
257
264
  )
258
265
 
259
- # Push to queue from sync context (Flask thread)
260
- # Get the running event loop and schedule the put operation
266
+ def _enqueue_response_from_sync_context(
267
+ self, request_id: str, response: ControlResponse
268
+ ) -> None:
261
269
  try:
270
+ self._ensure_asyncio_primitives()
262
271
  event_loop = asyncio.get_event_loop()
263
272
  if event_loop.is_running():
264
- # Schedule the coroutine in the running loop
265
273
  asyncio.run_coroutine_threadsafe(self._response_queue.put(response), event_loop)
266
274
  else:
267
- # If no loop is running, use put_nowait (shouldn't happen)
268
275
  self._response_queue.put_nowait(response)
269
276
  except Exception as error:
270
277
  logger.error(
@@ -320,4 +327,5 @@ class SSEControlChannel(InProcessChannel):
320
327
  async def shutdown(self) -> None:
321
328
  """Shutdown SSE channel."""
322
329
  logger.info("%s: shutting down", self.channel_id)
330
+ self._ensure_asyncio_primitives()
323
331
  self._shutdown_event.set()
@@ -201,37 +201,12 @@ class ControlLoopHandler:
201
201
  message[:50],
202
202
  )
203
203
 
204
- # Run the async request flow
205
- # Check if we're already in an async context
206
- try:
207
- event_loop = asyncio.get_running_loop()
208
- if event_loop.is_closed():
209
- raise RuntimeError("Running event loop is closed")
204
+ # Run the async request flow.
205
+ running_event_loop = self._get_running_event_loop()
206
+ if running_event_loop is not None:
207
+ return self._run_request_in_running_loop(running_event_loop, request)
210
208
 
211
- # Already in async context - create task and run it
212
- # This shouldn't normally happen since request_interaction is sync
213
- import nest_asyncio
214
-
215
- nest_asyncio.apply()
216
- return event_loop.run_until_complete(self._request_interaction_async(request))
217
- except RuntimeError:
218
- # Not in async context - create a temporary event loop.
219
- previous_event_loop: asyncio.AbstractEventLoop | None = None
220
- try:
221
- previous_event_loop = asyncio.get_event_loop()
222
- except RuntimeError:
223
- previous_event_loop = None
224
- else:
225
- if getattr(previous_event_loop, "is_closed", lambda: False)():
226
- previous_event_loop = None
227
-
228
- event_loop = asyncio.new_event_loop()
229
- try:
230
- asyncio.set_event_loop(event_loop)
231
- return event_loop.run_until_complete(self._request_interaction_async(request))
232
- finally:
233
- event_loop.close()
234
- asyncio.set_event_loop(previous_event_loop)
209
+ return self._run_request_in_new_loop(request)
235
210
 
236
211
  async def _request_interaction_async(self, request: ControlRequest) -> ControlResponse:
237
212
  """
@@ -312,6 +287,45 @@ class ControlLoopHandler:
312
287
 
313
288
  raise ProcedureWaitingForHuman(request.procedure_id, request.request_id)
314
289
 
290
+ def _get_running_event_loop(self) -> Optional[asyncio.AbstractEventLoop]:
291
+ try:
292
+ event_loop = asyncio.get_running_loop()
293
+ except RuntimeError:
294
+ return None
295
+
296
+ if event_loop.is_closed():
297
+ return None
298
+ return event_loop
299
+
300
+ def _run_request_in_running_loop(
301
+ self, event_loop: asyncio.AbstractEventLoop, request: ControlRequest
302
+ ) -> ControlResponse:
303
+ # Already in async context - create task and run it.
304
+ # This shouldn't normally happen since request_interaction is sync.
305
+ import nest_asyncio
306
+
307
+ nest_asyncio.apply()
308
+ return event_loop.run_until_complete(self._request_interaction_async(request))
309
+
310
+ def _run_request_in_new_loop(self, request: ControlRequest) -> ControlResponse:
311
+ # Not in async context - create a temporary event loop.
312
+ previous_event_loop: Optional[asyncio.AbstractEventLoop] = None
313
+ try:
314
+ previous_event_loop = asyncio.get_event_loop()
315
+ except RuntimeError:
316
+ previous_event_loop = None
317
+ else:
318
+ if getattr(previous_event_loop, "is_closed", lambda: False)():
319
+ previous_event_loop = None
320
+
321
+ event_loop = asyncio.new_event_loop()
322
+ try:
323
+ asyncio.set_event_loop(event_loop)
324
+ return event_loop.run_until_complete(self._request_interaction_async(request))
325
+ finally:
326
+ event_loop.close()
327
+ asyncio.set_event_loop(previous_event_loop)
328
+
315
329
  async def _fanout(
316
330
  self,
317
331
  request: ControlRequest,
@@ -5,18 +5,32 @@ Manages multiple MCP server connections using Pydantic AI's native MCPServerStdi
5
5
  Handles lifecycle, tool prefixing, and tool call tracking.
6
6
  """
7
7
 
8
+ from __future__ import annotations
9
+
8
10
  import logging
9
11
  import os
10
12
  import re
11
13
  import asyncio
12
14
  from contextlib import AsyncExitStack
13
- from typing import Any
14
-
15
- from pydantic_ai.mcp import MCPServerStdio
15
+ from typing import Any, Optional
16
16
 
17
17
  logger = logging.getLogger(__name__)
18
18
 
19
19
 
20
+ MCPServerStdio: Optional[Any] = None
21
+
22
+
23
+ def _require_mcp_server_stdio():
24
+ try:
25
+ from pydantic_ai.mcp import MCPServerStdio
26
+ except ImportError as import_error:
27
+ raise RuntimeError(
28
+ "MCP support requires optional dependencies. "
29
+ 'Install with `pip install "pydantic-ai-slim[mcp]"`.'
30
+ ) from import_error
31
+ return MCPServerStdio
32
+
33
+
20
34
  def substitute_env_vars(value: Any) -> Any:
21
35
  """
22
36
  Replace ${VAR} with environment variable values.
@@ -55,8 +69,8 @@ class MCPServerManager:
55
69
  """
56
70
  self.configs = server_configs
57
71
  self.tool_primitive = tool_primitive
58
- self.servers: list[MCPServerStdio] = []
59
- self.server_toolsets: dict[str, MCPServerStdio] = {} # Map server names to toolsets
72
+ self.servers: list[Any] = []
73
+ self.server_toolsets: dict[str, Any] = {} # Map server names to toolsets
60
74
  self._exit_stack = AsyncExitStack()
61
75
  logger.info("MCPServerManager initialized with %s server(s)", len(server_configs))
62
76
 
@@ -64,7 +78,7 @@ class MCPServerManager:
64
78
  """Connect to all configured MCP servers."""
65
79
  for name, config in self.configs.items():
66
80
  # Retry a few times for transient stdio startup issues.
67
- last_error: Exception | None = None
81
+ last_error: Optional[Exception] = None
68
82
  for attempt in range(1, 4):
69
83
  try:
70
84
  logger.info(
@@ -77,6 +91,9 @@ class MCPServerManager:
77
91
  resolved_config = substitute_env_vars(config)
78
92
 
79
93
  # Create base server
94
+ MCPServerStdio = globals().get("MCPServerStdio")
95
+ if MCPServerStdio is None:
96
+ MCPServerStdio = _require_mcp_server_stdio()
80
97
  server = MCPServerStdio(
81
98
  command=resolved_config["command"],
82
99
  args=resolved_config.get("args", []),
@@ -190,7 +207,7 @@ class MCPServerManager:
190
207
 
191
208
  return trace_tool_call
192
209
 
193
- def get_toolsets(self) -> list[MCPServerStdio]:
210
+ def get_toolsets(self) -> list[Any]:
194
211
  """
195
212
  Return list of connected servers as toolsets.
196
213
 
@@ -3,7 +3,7 @@ HTTP model backend for REST endpoint inference.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any
6
+ from typing import Any, Dict, Optional
7
7
 
8
8
  import httpx
9
9
 
@@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
13
13
  class HTTPModelBackend:
14
14
  """Model backend that calls HTTP REST endpoints."""
15
15
 
16
- def __init__(self, endpoint: str, timeout: float = 30.0, headers: dict | None = None):
16
+ def __init__(self, endpoint: str, timeout: float = 30.0, headers: Optional[Dict] = None):
17
17
  """
18
18
  Initialize HTTP model backend.
19
19
 
@@ -4,7 +4,7 @@ PyTorch model backend for .pt file inference.
4
4
 
5
5
  import logging
6
6
  from pathlib import Path
7
- from typing import Any
7
+ from typing import Any, List, Optional
8
8
 
9
9
  logger = logging.getLogger(__name__)
10
10
 
@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
12
12
  class PyTorchModelBackend:
13
13
  """Model backend that loads and runs PyTorch models."""
14
14
 
15
- def __init__(self, path: str, device: str = "cpu", labels: list[str] | None = None):
15
+ def __init__(self, path: str, device: str = "cpu", labels: Optional[List[str]] = None):
16
16
  """
17
17
  Initialize PyTorch model backend.
18
18
 
tactus/broker/client.py CHANGED
@@ -16,7 +16,7 @@ import sys
16
16
  import threading
17
17
  import uuid
18
18
  from pathlib import Path
19
- from typing import Any, AsyncIterator, Optional
19
+ from typing import Any, AsyncIterator, Optional, Union
20
20
 
21
21
  from tactus.broker.protocol import read_message, write_message
22
22
  from tactus.broker.stdio import STDIO_REQUEST_PREFIX, STDIO_TRANSPORT_VALUE
@@ -122,7 +122,7 @@ async def close_stdio_transport() -> None:
122
122
 
123
123
 
124
124
  class BrokerClient:
125
- def __init__(self, socket_path: str | Path):
125
+ def __init__(self, socket_path: Union[str, Path]):
126
126
  self.socket_path = str(socket_path)
127
127
 
128
128
  @classmethod
@@ -158,7 +158,7 @@ class BrokerClient:
158
158
  except ValueError as error:
159
159
  raise ValueError(f"Invalid broker port in endpoint: {self.socket_path}") from error
160
160
 
161
- ssl_context: ssl.SSLContext | None = None
161
+ ssl_context: Optional[ssl.SSLContext] = None
162
162
  if use_tls:
163
163
  ssl_context = ssl.create_default_context()
164
164
  cafile = os.environ.get("TACTUS_BROKER_TLS_CA_FILE")
tactus/broker/server.py CHANGED
@@ -29,6 +29,18 @@ from tactus.broker.protocol import (
29
29
  logger = logging.getLogger(__name__)
30
30
 
31
31
 
32
+ try:
33
+ from builtins import BaseExceptionGroup as BaseExceptionGroup
34
+ except ImportError: # pragma: no cover - Python < 3.11 fallback
35
+
36
+ class BaseExceptionGroup(Exception):
37
+ """Minimal BaseExceptionGroup fallback for Python < 3.11."""
38
+
39
+ def __init__(self, message: str, exceptions: list[BaseException]):
40
+ super().__init__(message)
41
+ self.exceptions = exceptions
42
+
43
+
32
44
  def _json_dumps(obj: Any) -> str:
33
45
  return json.dumps(obj, ensure_ascii=False, separators=(",", ":"))
34
46
 
@@ -170,7 +182,7 @@ class _BaseBrokerServer:
170
182
  control_handler: Optional[Callable[[dict], Awaitable[dict]]] = None,
171
183
  ):
172
184
  self._listener = None
173
- self._serve_task: asyncio.Task[None] | None = None
185
+ self._serve_task: Optional[asyncio.Task[None]] = None
174
186
  self._openai = openai_backend or OpenAIChatBackend()
175
187
  self._tools = tool_registry or HostToolRegistry.default()
176
188
  self._event_handler = event_handler
@@ -1012,7 +1024,7 @@ class BrokerServer(_BaseBrokerServer):
1012
1024
  openai_backend=openai_backend, tool_registry=tool_registry, event_handler=event_handler
1013
1025
  )
1014
1026
  self.socket_path = Path(socket_path)
1015
- self._server: asyncio.AbstractServer | None = None
1027
+ self._server: Optional[asyncio.AbstractServer] = None
1016
1028
 
1017
1029
  async def start(self) -> None:
1018
1030
  # Most platforms enforce a short maximum length for AF_UNIX socket paths.
@@ -1445,7 +1457,7 @@ class TcpBrokerServer(_BaseBrokerServer):
1445
1457
  *,
1446
1458
  host: str = "127.0.0.1",
1447
1459
  port: int = 0,
1448
- ssl_context: ssl.SSLContext | None = None,
1460
+ ssl_context: Optional[ssl.SSLContext] = None,
1449
1461
  openai_backend: Optional[OpenAIChatBackend] = None,
1450
1462
  tool_registry: Optional[HostToolRegistry] = None,
1451
1463
  event_handler: Optional[Callable[[dict[str, Any]], None]] = None,
@@ -1460,8 +1472,8 @@ class TcpBrokerServer(_BaseBrokerServer):
1460
1472
  self.host = host
1461
1473
  self.port = port
1462
1474
  self.ssl_context = ssl_context
1463
- self.bound_port: int | None = None
1464
- self._serve_task: asyncio.Task[None] | None = None
1475
+ self.bound_port: Optional[int] = None
1476
+ self._serve_task: Optional[asyncio.Task[None]] = None
1465
1477
 
1466
1478
  async def start(self) -> None:
1467
1479
  # Create AnyIO TCP listener (doesn't block, just binds to port)
tactus/core/dsl_stubs.py CHANGED
@@ -31,7 +31,7 @@ Agent/Tool calls use direct variable access:
31
31
  done.last_result() -- Get last tool result
32
32
  """
33
33
 
34
- from typing import Any, Callable
34
+ from typing import Any, Callable, Dict, Optional
35
35
 
36
36
  from .registry import RegistryBuilder
37
37
  from tactus.primitives.handles import AgentHandle, ModelHandle, AgentLookup, ModelLookup
@@ -112,7 +112,7 @@ def create_dsl_stubs(
112
112
  builder: RegistryBuilder,
113
113
  tool_primitive: Any = None,
114
114
  mock_manager: Any = None,
115
- runtime_context: dict[str, Any] | None = None,
115
+ runtime_context: Optional[Dict[str, Any]] = None,
116
116
  ) -> dict[str, Callable]:
117
117
  """
118
118
  Create DSL stub functions that populate the registry.
@@ -144,7 +144,7 @@ def create_dsl_stubs(
144
144
  _procedure_registry = {}
145
145
 
146
146
  def _process_procedure_config(
147
- name: str | None, config: Any, procedure_registry: dict[str, Any]
147
+ name: Optional[str], config: Any, procedure_registry: Dict[str, Any]
148
148
  ):
149
149
  """
150
150
  Process procedure config and register the procedure.