onetool-mcp 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. bench/__init__.py +5 -0
  2. bench/cli.py +69 -0
  3. bench/harness/__init__.py +66 -0
  4. bench/harness/client.py +692 -0
  5. bench/harness/config.py +397 -0
  6. bench/harness/csv_writer.py +109 -0
  7. bench/harness/evaluate.py +512 -0
  8. bench/harness/metrics.py +283 -0
  9. bench/harness/runner.py +899 -0
  10. bench/py.typed +0 -0
  11. bench/reporter.py +629 -0
  12. bench/run.py +487 -0
  13. bench/secrets.py +101 -0
  14. bench/utils.py +16 -0
  15. onetool/__init__.py +4 -0
  16. onetool/cli.py +391 -0
  17. onetool/py.typed +0 -0
  18. onetool_mcp-1.0.0b1.dist-info/METADATA +163 -0
  19. onetool_mcp-1.0.0b1.dist-info/RECORD +132 -0
  20. onetool_mcp-1.0.0b1.dist-info/WHEEL +4 -0
  21. onetool_mcp-1.0.0b1.dist-info/entry_points.txt +3 -0
  22. onetool_mcp-1.0.0b1.dist-info/licenses/LICENSE.txt +687 -0
  23. onetool_mcp-1.0.0b1.dist-info/licenses/NOTICE.txt +64 -0
  24. ot/__init__.py +37 -0
  25. ot/__main__.py +6 -0
  26. ot/_cli.py +107 -0
  27. ot/_tui.py +53 -0
  28. ot/config/__init__.py +46 -0
  29. ot/config/defaults/bench.yaml +4 -0
  30. ot/config/defaults/diagram-templates/api-flow.mmd +33 -0
  31. ot/config/defaults/diagram-templates/c4-context.puml +30 -0
  32. ot/config/defaults/diagram-templates/class-diagram.mmd +87 -0
  33. ot/config/defaults/diagram-templates/feature-mindmap.mmd +70 -0
  34. ot/config/defaults/diagram-templates/microservices.d2 +81 -0
  35. ot/config/defaults/diagram-templates/project-gantt.mmd +37 -0
  36. ot/config/defaults/diagram-templates/state-machine.mmd +42 -0
  37. ot/config/defaults/onetool.yaml +25 -0
  38. ot/config/defaults/prompts.yaml +97 -0
  39. ot/config/defaults/servers.yaml +7 -0
  40. ot/config/defaults/snippets.yaml +4 -0
  41. ot/config/defaults/tool_templates/__init__.py +7 -0
  42. ot/config/defaults/tool_templates/extension.py +52 -0
  43. ot/config/defaults/tool_templates/isolated.py +61 -0
  44. ot/config/dynamic.py +121 -0
  45. ot/config/global_templates/__init__.py +2 -0
  46. ot/config/global_templates/bench-secrets-template.yaml +6 -0
  47. ot/config/global_templates/bench.yaml +9 -0
  48. ot/config/global_templates/onetool.yaml +27 -0
  49. ot/config/global_templates/secrets-template.yaml +44 -0
  50. ot/config/global_templates/servers.yaml +18 -0
  51. ot/config/global_templates/snippets.yaml +235 -0
  52. ot/config/loader.py +1087 -0
  53. ot/config/mcp.py +145 -0
  54. ot/config/secrets.py +190 -0
  55. ot/config/tool_config.py +125 -0
  56. ot/decorators.py +116 -0
  57. ot/executor/__init__.py +35 -0
  58. ot/executor/base.py +16 -0
  59. ot/executor/fence_processor.py +83 -0
  60. ot/executor/linter.py +142 -0
  61. ot/executor/pack_proxy.py +260 -0
  62. ot/executor/param_resolver.py +140 -0
  63. ot/executor/pep723.py +288 -0
  64. ot/executor/result_store.py +369 -0
  65. ot/executor/runner.py +496 -0
  66. ot/executor/simple.py +163 -0
  67. ot/executor/tool_loader.py +396 -0
  68. ot/executor/validator.py +398 -0
  69. ot/executor/worker_pool.py +388 -0
  70. ot/executor/worker_proxy.py +189 -0
  71. ot/http_client.py +145 -0
  72. ot/logging/__init__.py +37 -0
  73. ot/logging/config.py +315 -0
  74. ot/logging/entry.py +213 -0
  75. ot/logging/format.py +188 -0
  76. ot/logging/span.py +349 -0
  77. ot/meta.py +1555 -0
  78. ot/paths.py +453 -0
  79. ot/prompts.py +218 -0
  80. ot/proxy/__init__.py +21 -0
  81. ot/proxy/manager.py +396 -0
  82. ot/py.typed +0 -0
  83. ot/registry/__init__.py +189 -0
  84. ot/registry/models.py +57 -0
  85. ot/registry/parser.py +269 -0
  86. ot/registry/registry.py +413 -0
  87. ot/server.py +315 -0
  88. ot/shortcuts/__init__.py +15 -0
  89. ot/shortcuts/aliases.py +87 -0
  90. ot/shortcuts/snippets.py +258 -0
  91. ot/stats/__init__.py +35 -0
  92. ot/stats/html.py +250 -0
  93. ot/stats/jsonl_writer.py +283 -0
  94. ot/stats/reader.py +354 -0
  95. ot/stats/timing.py +57 -0
  96. ot/support.py +63 -0
  97. ot/tools.py +114 -0
  98. ot/utils/__init__.py +81 -0
  99. ot/utils/batch.py +161 -0
  100. ot/utils/cache.py +120 -0
  101. ot/utils/deps.py +403 -0
  102. ot/utils/exceptions.py +23 -0
  103. ot/utils/factory.py +179 -0
  104. ot/utils/format.py +65 -0
  105. ot/utils/http.py +202 -0
  106. ot/utils/platform.py +45 -0
  107. ot/utils/sanitize.py +130 -0
  108. ot/utils/truncate.py +69 -0
  109. ot_tools/__init__.py +4 -0
  110. ot_tools/_convert/__init__.py +12 -0
  111. ot_tools/_convert/excel.py +279 -0
  112. ot_tools/_convert/pdf.py +254 -0
  113. ot_tools/_convert/powerpoint.py +268 -0
  114. ot_tools/_convert/utils.py +358 -0
  115. ot_tools/_convert/word.py +283 -0
  116. ot_tools/brave_search.py +604 -0
  117. ot_tools/code_search.py +736 -0
  118. ot_tools/context7.py +495 -0
  119. ot_tools/convert.py +614 -0
  120. ot_tools/db.py +415 -0
  121. ot_tools/diagram.py +1604 -0
  122. ot_tools/diagram.yaml +167 -0
  123. ot_tools/excel.py +1372 -0
  124. ot_tools/file.py +1348 -0
  125. ot_tools/firecrawl.py +732 -0
  126. ot_tools/grounding_search.py +646 -0
  127. ot_tools/package.py +604 -0
  128. ot_tools/py.typed +0 -0
  129. ot_tools/ripgrep.py +544 -0
  130. ot_tools/scaffold.py +471 -0
  131. ot_tools/transform.py +213 -0
  132. ot_tools/web_fetch.py +384 -0
@@ -0,0 +1,388 @@
1
+ """Worker pool for managing persistent tool subprocesses.
2
+
3
+ Manages worker lifecycle (spawn, call, reap) for external tools that run
4
+ in isolated processes with their own dependencies via PEP 723.
5
+
6
+ Workers communicate via JSON-RPC over stdin/stdout.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import atexit
12
+ import json
13
+ import os
14
+ import subprocess
15
+ import threading
16
+ import time
17
+ from concurrent.futures import ThreadPoolExecutor
18
+ from dataclasses import dataclass, field
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from loguru import logger
23
+
24
+ # Shared thread pool for non-blocking I/O operations
25
+ _io_executor: ThreadPoolExecutor | None = None
26
+ _io_executor_lock = threading.Lock()
27
+
28
+
29
+ def _get_io_executor() -> ThreadPoolExecutor:
30
+ """Get or create the shared I/O thread pool."""
31
+ global _io_executor
32
+ if _io_executor is None:
33
+ with _io_executor_lock:
34
+ if _io_executor is None:
35
+ _io_executor = ThreadPoolExecutor(
36
+ max_workers=4, thread_name_prefix="worker-io"
37
+ )
38
+ return _io_executor
39
+
40
+
41
+ @dataclass
42
+ class Worker:
43
+ """A persistent worker subprocess."""
44
+
45
+ tool_path: Path
46
+ process: subprocess.Popen[str]
47
+ last_used: float = field(default_factory=time.time)
48
+ call_count: int = 0
49
+
50
+ def is_alive(self) -> bool:
51
+ """Check if the worker process is still running."""
52
+ return self.process.poll() is None
53
+
54
+ def refresh(self) -> None:
55
+ """Update last_used timestamp."""
56
+ self.last_used = time.time()
57
+ self.call_count += 1
58
+
59
+ def drain_stderr(self, timeout: float = 0.5) -> str:
60
+ """Read any available stderr output from the worker.
61
+
62
+ Args:
63
+ timeout: Maximum time to wait for stderr data
64
+
65
+ Returns:
66
+ Stderr content, truncated if very long
67
+ """
68
+ if self.process.stderr is None:
69
+ return ""
70
+
71
+ try:
72
+ # Use thread pool for non-blocking read
73
+ executor = _get_io_executor()
74
+ future = executor.submit(self.process.stderr.read)
75
+ try:
76
+ stderr = future.result(timeout=timeout)
77
+ except TimeoutError:
78
+ future.cancel()
79
+ return ""
80
+
81
+ if stderr:
82
+ # Truncate very long output, keep last lines (most relevant)
83
+ lines = stderr.strip().split("\n")
84
+ if len(lines) > 20:
85
+ stderr = "\n".join(["...(truncated)", *lines[-20:]])
86
+ return stderr.strip()
87
+ except Exception:
88
+ pass
89
+ return ""
90
+
91
+
92
+ class WorkerPool:
93
+ """Manages a pool of persistent worker processes.
94
+
95
+ Workers are spawned on first call and reused for subsequent calls.
96
+ Idle workers are reaped after a configurable timeout.
97
+
98
+ Isolated tools communicate via JSON-RPC over stdin/stdout and are
99
+ fully standalone (no onetool imports).
100
+ """
101
+
102
+ def __init__(
103
+ self,
104
+ idle_timeout: float = 600.0,
105
+ ) -> None:
106
+ """Initialize the worker pool.
107
+
108
+ Args:
109
+ idle_timeout: Seconds of inactivity before reaping worker (default: 10 min)
110
+ """
111
+ self.idle_timeout = idle_timeout
112
+ self._workers: dict[Path, Worker] = {}
113
+ self._lock = threading.Lock()
114
+ self._reaper_thread: threading.Thread | None = None
115
+ self._shutdown = threading.Event()
116
+
117
+ def _start_reaper(self) -> None:
118
+ """Start the background reaper thread if not already running."""
119
+ if self._reaper_thread is not None and self._reaper_thread.is_alive():
120
+ return
121
+
122
+ self._shutdown.clear()
123
+ self._reaper_thread = threading.Thread(
124
+ target=self._reaper_loop,
125
+ daemon=True,
126
+ name="worker-reaper",
127
+ )
128
+ self._reaper_thread.start()
129
+
130
+ def _reaper_loop(self) -> None:
131
+ """Background loop that reaps idle workers."""
132
+ while not self._shutdown.wait(timeout=60.0): # Check every minute
133
+ self._reap_idle_workers()
134
+
135
+ def _reap_idle_workers(self) -> None:
136
+ """Terminate workers that have been idle too long."""
137
+ now = time.time()
138
+ to_reap: list[Path] = []
139
+
140
+ with self._lock:
141
+ for tool_path, worker in self._workers.items():
142
+ idle_time = now - worker.last_used
143
+ if idle_time > self.idle_timeout:
144
+ to_reap.append(tool_path)
145
+ elif not worker.is_alive():
146
+ # Worker crashed, remove from pool
147
+ to_reap.append(tool_path)
148
+ logger.warning(f"Worker for {tool_path.name} crashed, removing")
149
+
150
+ for tool_path in to_reap:
151
+ worker = self._workers.pop(tool_path)
152
+ if worker.is_alive():
153
+ logger.info(
154
+ f"Reaping idle worker {tool_path.name} "
155
+ f"(idle {now - worker.last_used:.0f}s, {worker.call_count} calls)"
156
+ )
157
+ worker.process.terminate()
158
+ try:
159
+ worker.process.wait(timeout=5.0)
160
+ except subprocess.TimeoutExpired:
161
+ worker.process.kill()
162
+
163
+ def _spawn_worker(
164
+ self,
165
+ tool_path: Path,
166
+ _config: dict[str, Any],
167
+ _secrets: dict[str, str],
168
+ ) -> Worker:
169
+ """Spawn a new worker process for a tool.
170
+
171
+ Args:
172
+ tool_path: Path to the tool Python file
173
+ _config: Configuration dict (reserved for future use)
174
+ _secrets: Secrets dict (reserved for future use)
175
+
176
+ Returns:
177
+ New Worker instance
178
+ """
179
+ # Build uv run command
180
+ cmd = [
181
+ "uv",
182
+ "run",
183
+ str(tool_path),
184
+ ]
185
+
186
+ logger.debug(f"Spawning worker: {' '.join(cmd)}")
187
+
188
+ # Minimal env: PATH only (isolated tools are fully standalone)
189
+ env = {
190
+ "PATH": os.environ.get("PATH", ""),
191
+ }
192
+ # Pass through OT_CWD for path resolution in tool code
193
+ if ot_cwd := os.environ.get("OT_CWD"):
194
+ env["OT_CWD"] = ot_cwd
195
+
196
+ process = subprocess.Popen(
197
+ cmd,
198
+ stdin=subprocess.PIPE,
199
+ stdout=subprocess.PIPE,
200
+ stderr=subprocess.PIPE,
201
+ text=True,
202
+ bufsize=1, # Line buffered
203
+ env=env,
204
+ )
205
+
206
+ worker = Worker(tool_path=tool_path, process=process)
207
+ return worker
208
+
209
+ def call(
210
+ self,
211
+ tool_path: Path,
212
+ function: str,
213
+ kwargs: dict[str, Any],
214
+ config: dict[str, Any] | None = None,
215
+ secrets: dict[str, str] | None = None,
216
+ timeout: float = 60.0,
217
+ ) -> Any:
218
+ """Call a function in a worker process.
219
+
220
+ Spawns a new worker if needed, or reuses an existing one.
221
+ Handles worker crashes by respawning.
222
+
223
+ Args:
224
+ tool_path: Path to the tool Python file
225
+ function: Function name to call
226
+ kwargs: Keyword arguments for the function
227
+ config: Configuration dict to pass to worker
228
+ secrets: Secrets dict to pass to worker
229
+ timeout: Call timeout in seconds
230
+
231
+ Returns:
232
+ Result from the function
233
+
234
+ Raises:
235
+ RuntimeError: If worker fails or returns an error
236
+ """
237
+ tool_path = tool_path.resolve()
238
+ config = config or {}
239
+ secrets = secrets or {}
240
+
241
+ # Ensure reaper is running
242
+ self._start_reaper()
243
+
244
+ with self._lock:
245
+ worker = self._workers.get(tool_path)
246
+
247
+ # Check if we need a new worker
248
+ if worker is None or not worker.is_alive():
249
+ if worker is not None:
250
+ logger.warning(f"Worker for {tool_path.name} died, respawning")
251
+ worker = self._spawn_worker(tool_path, config, secrets)
252
+ self._workers[tool_path] = worker
253
+
254
+ worker.refresh()
255
+
256
+ # Build JSON-RPC request
257
+ request = {
258
+ "function": function,
259
+ "kwargs": kwargs,
260
+ "config": config,
261
+ "secrets": secrets,
262
+ }
263
+ request_line = json.dumps(request) + "\n"
264
+
265
+ # Send request
266
+ try:
267
+ if worker.process.stdin is None:
268
+ raise RuntimeError("Worker stdin is None")
269
+ worker.process.stdin.write(request_line)
270
+ worker.process.stdin.flush()
271
+ except (BrokenPipeError, OSError) as e:
272
+ # Worker died during write - capture stderr for debugging
273
+ stderr = worker.drain_stderr()
274
+ with self._lock:
275
+ self._workers.pop(tool_path, None)
276
+ error_msg = f"Worker for {tool_path.name} died: {e}"
277
+ if stderr:
278
+ error_msg += f"\nStderr:\n{stderr}"
279
+ raise RuntimeError(error_msg) from e
280
+
281
+ # Read response with timeout using thread pool (non-blocking, cross-platform)
282
+ try:
283
+ if worker.process.stdout is None:
284
+ raise RuntimeError("Worker stdout is None")
285
+
286
+ # Use thread pool for readline to avoid blocking main thread
287
+ # This is more portable than select.select() and works on Windows
288
+ executor = _get_io_executor()
289
+ future = executor.submit(worker.process.stdout.readline)
290
+ try:
291
+ response_line = future.result(timeout=timeout)
292
+ except TimeoutError:
293
+ future.cancel()
294
+ raise TimeoutError(f"Worker call timed out after {timeout}s") from None
295
+
296
+ if not response_line:
297
+ # Worker closed stdout (crashed) - capture stderr for debugging
298
+ stderr = worker.drain_stderr()
299
+ with self._lock:
300
+ self._workers.pop(tool_path, None)
301
+ error_msg = f"Worker for {tool_path.name} closed unexpectedly"
302
+ if stderr:
303
+ error_msg += f"\nStderr:\n{stderr}"
304
+ raise RuntimeError(error_msg)
305
+
306
+ response = json.loads(response_line)
307
+
308
+ except json.JSONDecodeError as e:
309
+ raise RuntimeError(f"Invalid JSON from worker: {e}") from e
310
+ except TimeoutError:
311
+ # Kill the worker and remove from pool
312
+ with self._lock:
313
+ w = self._workers.pop(tool_path, None)
314
+ if w:
315
+ w.process.kill()
316
+ raise
317
+
318
+ # Check for error in response
319
+ if response.get("error"):
320
+ raise RuntimeError(response["error"])
321
+
322
+ return response.get("result")
323
+
324
+ def shutdown(self) -> None:
325
+ """Shut down all workers and stop the reaper thread."""
326
+ self._shutdown.set()
327
+
328
+ with self._lock:
329
+ for tool_path, worker in list(self._workers.items()):
330
+ if worker.is_alive():
331
+ logger.info(f"Shutting down worker {tool_path.name}")
332
+ worker.process.terminate()
333
+ try:
334
+ worker.process.wait(timeout=5.0)
335
+ except subprocess.TimeoutExpired:
336
+ worker.process.kill()
337
+ self._workers.clear()
338
+
339
+ def get_stats(self) -> dict[str, Any]:
340
+ """Get pool statistics.
341
+
342
+ Returns:
343
+ Dict with pool stats (worker count, total calls, etc.)
344
+ """
345
+ with self._lock:
346
+ workers_info = []
347
+ for tool_path, worker in self._workers.items():
348
+ workers_info.append(
349
+ {
350
+ "tool": tool_path.name,
351
+ "alive": worker.is_alive(),
352
+ "calls": worker.call_count,
353
+ "idle_seconds": time.time() - worker.last_used,
354
+ }
355
+ )
356
+
357
+ return {
358
+ "worker_count": len(self._workers),
359
+ "idle_timeout": self.idle_timeout,
360
+ "workers": workers_info,
361
+ }
362
+
363
+
364
+ # Global worker pool instance (lazy initialized)
365
+ _pool: WorkerPool | None = None
366
+
367
+
368
+ def get_worker_pool() -> WorkerPool:
369
+ """Get or create the global worker pool."""
370
+ global _pool
371
+ if _pool is None:
372
+ _pool = WorkerPool()
373
+ return _pool
374
+
375
+
376
+ def shutdown_worker_pool() -> None:
377
+ """Shut down the global worker pool and I/O executor."""
378
+ global _pool, _io_executor
379
+ if _pool is not None:
380
+ _pool.shutdown()
381
+ _pool = None
382
+ if _io_executor is not None:
383
+ _io_executor.shutdown(wait=False)
384
+ _io_executor = None
385
+
386
+
387
+ # Register cleanup on process exit to prevent orphaned workers
388
+ atexit.register(shutdown_worker_pool)
@@ -0,0 +1,189 @@
1
+ """Worker proxy for routing tool calls to persistent workers.
2
+
3
+ Creates proxy objects that can be added to the execution namespace,
4
+ allowing dot notation access (e.g., brave.search()) to route to workers.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from collections.abc import Callable
10
+ from pathlib import Path
11
+ from typing import Any
12
+
13
+ from ot.executor.param_resolver import get_tool_param_names, resolve_kwargs
14
+ from ot.executor.worker_pool import get_worker_pool
15
+ from ot.stats import timed_tool_call
16
+
17
+
18
+ class WorkerFunctionProxy:
19
+ """Proxy for a single function that routes calls to a worker."""
20
+
21
+ def __init__(
22
+ self,
23
+ tool_path: Path,
24
+ function_name: str,
25
+ config: dict[str, Any],
26
+ secrets: dict[str, str],
27
+ ) -> None:
28
+ """Initialize function proxy.
29
+
30
+ Args:
31
+ tool_path: Path to the tool Python file
32
+ function_name: Name of the function to call
33
+ config: Configuration dict to pass to worker
34
+ secrets: Secrets dict to pass to worker
35
+ """
36
+ self.tool_path = tool_path
37
+ self.function_name = function_name
38
+ self.config = config
39
+ self.secrets = secrets
40
+
41
+ def __call__(self, **kwargs: Any) -> Any:
42
+ """Call the function in the worker process.
43
+
44
+ Args:
45
+ **kwargs: Keyword arguments for the function
46
+
47
+ Returns:
48
+ Result from the function
49
+ """
50
+ tool_name = f"{self.tool_path.stem}.{self.function_name}"
51
+
52
+ # Resolve abbreviated parameter names (cached lookup)
53
+ if kwargs:
54
+ param_names = get_tool_param_names(tool_name)
55
+ if param_names:
56
+ kwargs = resolve_kwargs(kwargs, param_names)
57
+
58
+ with timed_tool_call(tool_name):
59
+ pool = get_worker_pool()
60
+ return pool.call(
61
+ tool_path=self.tool_path,
62
+ function=self.function_name,
63
+ kwargs=kwargs,
64
+ config=self.config,
65
+ secrets=self.secrets,
66
+ )
67
+
68
+ def __repr__(self) -> str:
69
+ return f"<WorkerFunctionProxy {self.tool_path.stem}.{self.function_name}>"
70
+
71
+
72
+ class WorkerPackProxy:
73
+ """Proxy for a tool pack that routes attribute access to functions.
74
+
75
+ Provides dot notation access: pack.function(**kwargs)
76
+ """
77
+
78
+ def __init__(
79
+ self,
80
+ tool_path: Path,
81
+ functions: list[str],
82
+ config: dict[str, Any],
83
+ secrets: dict[str, str],
84
+ ) -> None:
85
+ """Initialize pack proxy.
86
+
87
+ Args:
88
+ tool_path: Path to the tool Python file
89
+ functions: List of function names available in the tool
90
+ config: Configuration dict to pass to worker
91
+ secrets: Secrets dict to pass to worker
92
+ """
93
+ self.tool_path = tool_path
94
+ self.functions = set(functions)
95
+ self.config = config
96
+ self.secrets = secrets
97
+ self._function_cache: dict[str, WorkerFunctionProxy] = {}
98
+
99
+ def __getattr__(self, name: str) -> WorkerFunctionProxy:
100
+ """Get a function proxy by name.
101
+
102
+ Args:
103
+ name: Function name
104
+
105
+ Returns:
106
+ WorkerFunctionProxy for the function
107
+
108
+ Raises:
109
+ AttributeError: If function name is not available
110
+ """
111
+ if name.startswith("_"):
112
+ raise AttributeError(f"Cannot access private attribute '{name}'")
113
+
114
+ if name not in self.functions:
115
+ available = ", ".join(sorted(self.functions))
116
+ raise AttributeError(
117
+ f"Tool '{self.tool_path.stem}' has no function '{name}'. "
118
+ f"Available: {available}"
119
+ )
120
+
121
+ if name not in self._function_cache:
122
+ self._function_cache[name] = WorkerFunctionProxy(
123
+ tool_path=self.tool_path,
124
+ function_name=name,
125
+ config=self.config,
126
+ secrets=self.secrets,
127
+ )
128
+
129
+ return self._function_cache[name]
130
+
131
+ def __repr__(self) -> str:
132
+ funcs = ", ".join(sorted(self.functions))
133
+ return f"<WorkerPackProxy {self.tool_path.stem}: {funcs}>"
134
+
135
+ def __dir__(self) -> list[str]:
136
+ """Return available function names for introspection."""
137
+ return list(self.functions)
138
+
139
+
140
+ def create_worker_proxy(
141
+ tool_path: Path,
142
+ functions: list[str],
143
+ config: dict[str, Any] | None = None,
144
+ secrets: dict[str, str] | None = None,
145
+ ) -> WorkerPackProxy:
146
+ """Create a worker proxy for a tool.
147
+
148
+ Args:
149
+ tool_path: Path to the tool Python file
150
+ functions: List of function names available in the tool
151
+ config: Configuration dict to pass to worker
152
+ secrets: Secrets dict to pass to worker
153
+
154
+ Returns:
155
+ WorkerPackProxy for the tool
156
+ """
157
+ return WorkerPackProxy(
158
+ tool_path=tool_path,
159
+ functions=functions,
160
+ config=config or {},
161
+ secrets=secrets or {},
162
+ )
163
+
164
+
165
+ def create_worker_function(
166
+ tool_path: Path,
167
+ function_name: str,
168
+ config: dict[str, Any] | None = None,
169
+ secrets: dict[str, str] | None = None,
170
+ ) -> Callable[..., Any]:
171
+ """Create a single worker function proxy.
172
+
173
+ Use this when you need a standalone function rather than a pack.
174
+
175
+ Args:
176
+ tool_path: Path to the tool Python file
177
+ function_name: Name of the function to call
178
+ config: Configuration dict to pass to worker
179
+ secrets: Secrets dict to pass to worker
180
+
181
+ Returns:
182
+ Callable that routes to the worker
183
+ """
184
+ return WorkerFunctionProxy(
185
+ tool_path=tool_path,
186
+ function_name=function_name,
187
+ config=config or {},
188
+ secrets=secrets or {},
189
+ )