onetool-mcp 1.0.0b1__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. onetool/cli.py +63 -4
  2. onetool_mcp-1.0.0rc2.dist-info/METADATA +266 -0
  3. onetool_mcp-1.0.0rc2.dist-info/RECORD +129 -0
  4. {onetool_mcp-1.0.0b1.dist-info → onetool_mcp-1.0.0rc2.dist-info}/licenses/LICENSE.txt +1 -1
  5. {onetool_mcp-1.0.0b1.dist-info → onetool_mcp-1.0.0rc2.dist-info}/licenses/NOTICE.txt +54 -64
  6. ot/__main__.py +6 -6
  7. ot/config/__init__.py +48 -46
  8. ot/config/global_templates/__init__.py +2 -2
  9. ot/config/{defaults → global_templates}/diagram-templates/api-flow.mmd +33 -33
  10. ot/config/{defaults → global_templates}/diagram-templates/c4-context.puml +30 -30
  11. ot/config/{defaults → global_templates}/diagram-templates/class-diagram.mmd +87 -87
  12. ot/config/{defaults → global_templates}/diagram-templates/feature-mindmap.mmd +70 -70
  13. ot/config/{defaults → global_templates}/diagram-templates/microservices.d2 +81 -81
  14. ot/config/{defaults → global_templates}/diagram-templates/project-gantt.mmd +37 -37
  15. ot/config/{defaults → global_templates}/diagram-templates/state-machine.mmd +42 -42
  16. ot/config/global_templates/diagram.yaml +167 -0
  17. ot/config/global_templates/onetool.yaml +3 -1
  18. ot/config/{defaults → global_templates}/prompts.yaml +102 -97
  19. ot/config/global_templates/security.yaml +31 -0
  20. ot/config/global_templates/servers.yaml +93 -12
  21. ot/config/global_templates/snippets.yaml +5 -26
  22. ot/config/{defaults → global_templates}/tool_templates/__init__.py +7 -7
  23. ot/config/loader.py +221 -105
  24. ot/config/mcp.py +5 -1
  25. ot/config/secrets.py +192 -190
  26. ot/decorators.py +116 -116
  27. ot/executor/__init__.py +35 -35
  28. ot/executor/base.py +16 -16
  29. ot/executor/fence_processor.py +83 -83
  30. ot/executor/linter.py +142 -142
  31. ot/executor/pep723.py +288 -288
  32. ot/executor/runner.py +20 -6
  33. ot/executor/simple.py +163 -163
  34. ot/executor/validator.py +603 -164
  35. ot/http_client.py +145 -145
  36. ot/logging/__init__.py +37 -37
  37. ot/logging/entry.py +213 -213
  38. ot/logging/format.py +191 -188
  39. ot/logging/span.py +349 -349
  40. ot/meta.py +236 -14
  41. ot/paths.py +32 -49
  42. ot/prompts.py +218 -218
  43. ot/proxy/manager.py +14 -2
  44. ot/registry/__init__.py +189 -189
  45. ot/registry/parser.py +269 -269
  46. ot/server.py +330 -315
  47. ot/shortcuts/__init__.py +15 -15
  48. ot/shortcuts/aliases.py +87 -87
  49. ot/shortcuts/snippets.py +258 -258
  50. ot/stats/__init__.py +35 -35
  51. ot/stats/html.py +2 -2
  52. ot/stats/reader.py +354 -354
  53. ot/stats/timing.py +57 -57
  54. ot/support.py +63 -63
  55. ot/tools.py +1 -1
  56. ot/utils/batch.py +161 -161
  57. ot/utils/cache.py +120 -120
  58. ot/utils/exceptions.py +23 -23
  59. ot/utils/factory.py +178 -179
  60. ot/utils/format.py +65 -65
  61. ot/utils/http.py +202 -202
  62. ot/utils/platform.py +45 -45
  63. ot/utils/truncate.py +69 -69
  64. ot_tools/__init__.py +4 -4
  65. ot_tools/_convert/__init__.py +12 -12
  66. ot_tools/_convert/pdf.py +254 -254
  67. ot_tools/diagram.yaml +167 -167
  68. ot_tools/scaffold.py +2 -2
  69. ot_tools/transform.py +124 -19
  70. ot_tools/web_fetch.py +94 -43
  71. onetool_mcp-1.0.0b1.dist-info/METADATA +0 -163
  72. onetool_mcp-1.0.0b1.dist-info/RECORD +0 -132
  73. ot/config/defaults/bench.yaml +0 -4
  74. ot/config/defaults/onetool.yaml +0 -25
  75. ot/config/defaults/servers.yaml +0 -7
  76. ot/config/defaults/snippets.yaml +0 -4
  77. ot_tools/firecrawl.py +0 -732
  78. {onetool_mcp-1.0.0b1.dist-info → onetool_mcp-1.0.0rc2.dist-info}/WHEEL +0 -0
  79. {onetool_mcp-1.0.0b1.dist-info → onetool_mcp-1.0.0rc2.dist-info}/entry_points.txt +0 -0
  80. /ot/config/{defaults → global_templates}/tool_templates/extension.py +0 -0
  81. /ot/config/{defaults → global_templates}/tool_templates/isolated.py +0 -0
ot/executor/pep723.py CHANGED
@@ -1,288 +1,288 @@
1
- """PEP 723 inline script metadata detection and parsing.
2
-
3
- PEP 723 defines inline script metadata for Python scripts, allowing them
4
- to declare dependencies and Python version requirements.
5
-
6
- Example:
7
- # /// script
8
- # requires-python = ">=3.11"
9
- # dependencies = [
10
- # "httpx>=0.27.0",
11
- # "trafilatura>=2.0.0",
12
- # ]
13
- # ///
14
-
15
- This module detects such headers and extracts tool functions for worker routing.
16
- """
17
-
18
- from __future__ import annotations
19
-
20
- import ast
21
- import re
22
- import tomllib
23
- from dataclasses import dataclass, field
24
- from pathlib import Path
25
-
26
- # Regex to match PEP 723 script block
27
- # Matches: # /// script ... # ///
28
- PEP723_PATTERN = re.compile(
29
- r"^# /// script\s*$"
30
- r"(.*?)"
31
- r"^# ///$",
32
- re.MULTILINE | re.DOTALL,
33
- )
34
-
35
-
36
- @dataclass
37
- class ScriptMetadata:
38
- """Parsed PEP 723 script metadata."""
39
-
40
- requires_python: str | None = None
41
- dependencies: list[str] = field(default_factory=list)
42
- raw_content: str = ""
43
-
44
- @property
45
- def has_dependencies(self) -> bool:
46
- """Check if script declares any dependencies."""
47
- return bool(self.dependencies)
48
-
49
-
50
- @dataclass
51
- class ToolFileInfo:
52
- """Information about a tool file.
53
-
54
- Attributes:
55
- path: Path to the tool file.
56
- pack: Pack name (e.g., "brave" for brave.search).
57
- functions: List of public function names.
58
- is_worker: True if tool uses worker subprocess (PEP 723 with deps).
59
- is_internal: True if tool is bundled with OneTool (from ot_tools package).
60
- metadata: Parsed PEP 723 metadata if present.
61
- config_class_source: Source code of Config class if present.
62
- """
63
-
64
- path: Path
65
- pack: str | None = None
66
- functions: list[str] = field(default_factory=list)
67
- is_worker: bool = False
68
- is_internal: bool = False
69
- metadata: ScriptMetadata | None = None
70
- config_class_source: str | None = None
71
-
72
-
73
- def parse_pep723_metadata(content: str) -> ScriptMetadata | None:
74
- """Parse PEP 723 inline script metadata from file content.
75
-
76
- Args:
77
- content: File content to parse
78
-
79
- Returns:
80
- ScriptMetadata if found, None otherwise
81
- """
82
- match = PEP723_PATTERN.search(content)
83
- if not match:
84
- return None
85
-
86
- raw_content = match.group(1).strip()
87
-
88
- # Strip "# " prefix from each line to get valid TOML
89
- toml_lines = [
90
- line[2:] if line.startswith("# ") else line.lstrip("#")
91
- for line in raw_content.split("\n")
92
- ]
93
- toml_content = "\n".join(toml_lines)
94
-
95
- try:
96
- data = tomllib.loads(toml_content)
97
- except tomllib.TOMLDecodeError:
98
- return None
99
-
100
- return ScriptMetadata(
101
- requires_python=data.get("requires-python"),
102
- dependencies=data.get("dependencies", []),
103
- raw_content=raw_content,
104
- )
105
-
106
-
107
- def has_pep723_header(path: Path) -> bool:
108
- """Check if a file has a PEP 723 script header.
109
-
110
- Args:
111
- path: Path to Python file
112
-
113
- Returns:
114
- True if file has PEP 723 header
115
- """
116
- try:
117
- content = path.read_text()
118
- return PEP723_PATTERN.search(content) is not None
119
- except OSError:
120
- return False
121
-
122
-
123
- def _extract_functions_from_ast(tree: ast.Module) -> list[str]:
124
- """Extract public function names from a parsed AST.
125
-
126
- Args:
127
- tree: Parsed AST module
128
-
129
- Returns:
130
- List of public function names
131
- """
132
- functions: list[str] = []
133
-
134
- # Check for __all__ definition
135
- all_names: list[str] | None = None
136
- for node in ast.walk(tree):
137
- if isinstance(node, ast.Assign):
138
- for target in node.targets:
139
- if (
140
- isinstance(target, ast.Name)
141
- and target.id == "__all__"
142
- and isinstance(node.value, ast.List)
143
- ):
144
- all_names = []
145
- for elt in node.value.elts:
146
- if isinstance(elt, ast.Constant) and isinstance(elt.value, str):
147
- all_names.append(elt.value)
148
-
149
- # Extract function definitions
150
- for node in tree.body:
151
- if isinstance(node, ast.FunctionDef):
152
- name = node.name
153
- # Skip private functions
154
- if name.startswith("_"):
155
- continue
156
- # If __all__ is defined, only include those
157
- if all_names is not None and name not in all_names:
158
- continue
159
- functions.append(name)
160
-
161
- return functions
162
-
163
-
164
- def _extract_pack_from_ast(tree: ast.Module) -> str | None:
165
- """Extract the pack declaration from a parsed AST.
166
-
167
- Looks for: pack = "name" at the top of the file.
168
-
169
- Args:
170
- tree: Parsed AST module
171
-
172
- Returns:
173
- Pack string, or None if not declared
174
- """
175
- for node in tree.body:
176
- if isinstance(node, ast.Assign):
177
- for target in node.targets:
178
- if (
179
- isinstance(target, ast.Name)
180
- and target.id == "pack"
181
- and isinstance(node.value, ast.Constant)
182
- and isinstance(node.value.value, str)
183
- ):
184
- return node.value.value
185
- return None
186
-
187
-
188
- def _extract_config_from_ast(tree: ast.Module, content: str) -> str | None:
189
- """Extract the Config class source from a parsed AST.
190
-
191
- Looks for: class Config(BaseModel): in the module body.
192
- The class must inherit from BaseModel (pydantic).
193
-
194
- Args:
195
- tree: Parsed AST module
196
- content: Original file content (needed for source extraction)
197
-
198
- Returns:
199
- Config class source code as string, or None if not found
200
- """
201
- for node in tree.body:
202
- if isinstance(node, ast.ClassDef) and node.name == "Config":
203
- # Verify it inherits from BaseModel
204
- for base in node.bases:
205
- base_name = None
206
- if isinstance(base, ast.Name):
207
- base_name = base.id
208
- elif isinstance(base, ast.Attribute):
209
- base_name = base.attr
210
-
211
- if base_name == "BaseModel":
212
- # Extract source code using line numbers
213
- lines = content.split("\n")
214
- start_line = node.lineno - 1 # 0-indexed
215
- end_line = node.end_lineno or node.lineno
216
- config_source = "\n".join(lines[start_line:end_line])
217
- return config_source
218
-
219
- return None
220
-
221
-
222
- def analyze_tool_file(path: Path) -> ToolFileInfo:
223
- """Analyze a tool file for metadata, pack, functions, and config.
224
-
225
- Reads the file once and extracts all information in a single pass.
226
-
227
- Args:
228
- path: Path to Python file
229
-
230
- Returns:
231
- ToolFileInfo with all extracted information
232
- """
233
- info = ToolFileInfo(path=path)
234
-
235
- try:
236
- content = path.read_text()
237
- except OSError:
238
- return info
239
-
240
- # Check for PEP 723 metadata
241
- info.metadata = parse_pep723_metadata(content)
242
- info.is_worker = info.metadata is not None and info.metadata.has_dependencies
243
-
244
- # Parse AST once for all extractions
245
- try:
246
- tree = ast.parse(content)
247
- except SyntaxError:
248
- return info
249
-
250
- # Extract pack, functions, and config class from pre-parsed AST
251
- info.pack = _extract_pack_from_ast(tree)
252
- info.functions = _extract_functions_from_ast(tree)
253
- info.config_class_source = _extract_config_from_ast(tree, content)
254
-
255
- return info
256
-
257
-
258
- def categorize_tools(
259
- tool_files: list[Path],
260
- internal_paths: set[Path] | None = None,
261
- ) -> tuple[list[ToolFileInfo], list[ToolFileInfo]]:
262
- """Categorize tool files into extension tools and internal tools.
263
-
264
- Internal tools (bundled with OneTool) run in-process.
265
- Extension tools (user-created with PEP 723) run in worker subprocesses.
266
-
267
- Args:
268
- tool_files: List of tool file paths.
269
- internal_paths: Set of paths that are internal tools (from ot_tools package).
270
- If provided, tools in this set are marked as is_internal=True.
271
-
272
- Returns:
273
- Tuple of (worker_tools, inprocess_tools)
274
- """
275
- worker_tools: list[ToolFileInfo] = []
276
- inprocess_tools: list[ToolFileInfo] = []
277
- internal_paths = internal_paths or set()
278
-
279
- for path in tool_files:
280
- info = analyze_tool_file(path)
281
- # Mark internal tools (bundled with OneTool)
282
- info.is_internal = path.resolve() in internal_paths
283
- if info.is_worker:
284
- worker_tools.append(info)
285
- else:
286
- inprocess_tools.append(info)
287
-
288
- return worker_tools, inprocess_tools
1
+ """PEP 723 inline script metadata detection and parsing.
2
+
3
+ PEP 723 defines inline script metadata for Python scripts, allowing them
4
+ to declare dependencies and Python version requirements.
5
+
6
+ Example:
7
+ # /// script
8
+ # requires-python = ">=3.11"
9
+ # dependencies = [
10
+ # "httpx>=0.27.0",
11
+ # "trafilatura>=2.0.0",
12
+ # ]
13
+ # ///
14
+
15
+ This module detects such headers and extracts tool functions for worker routing.
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ import ast
21
+ import re
22
+ import tomllib
23
+ from dataclasses import dataclass, field
24
+ from pathlib import Path
25
+
26
+ # Regex to match PEP 723 script block
27
+ # Matches: # /// script ... # ///
28
+ PEP723_PATTERN = re.compile(
29
+ r"^# /// script\s*$"
30
+ r"(.*?)"
31
+ r"^# ///$",
32
+ re.MULTILINE | re.DOTALL,
33
+ )
34
+
35
+
36
+ @dataclass
37
+ class ScriptMetadata:
38
+ """Parsed PEP 723 script metadata."""
39
+
40
+ requires_python: str | None = None
41
+ dependencies: list[str] = field(default_factory=list)
42
+ raw_content: str = ""
43
+
44
+ @property
45
+ def has_dependencies(self) -> bool:
46
+ """Check if script declares any dependencies."""
47
+ return bool(self.dependencies)
48
+
49
+
50
+ @dataclass
51
+ class ToolFileInfo:
52
+ """Information about a tool file.
53
+
54
+ Attributes:
55
+ path: Path to the tool file.
56
+ pack: Pack name (e.g., "brave" for brave.search).
57
+ functions: List of public function names.
58
+ is_worker: True if tool uses worker subprocess (PEP 723 with deps).
59
+ is_internal: True if tool is bundled with OneTool (from ot_tools package).
60
+ metadata: Parsed PEP 723 metadata if present.
61
+ config_class_source: Source code of Config class if present.
62
+ """
63
+
64
+ path: Path
65
+ pack: str | None = None
66
+ functions: list[str] = field(default_factory=list)
67
+ is_worker: bool = False
68
+ is_internal: bool = False
69
+ metadata: ScriptMetadata | None = None
70
+ config_class_source: str | None = None
71
+
72
+
73
+ def parse_pep723_metadata(content: str) -> ScriptMetadata | None:
74
+ """Parse PEP 723 inline script metadata from file content.
75
+
76
+ Args:
77
+ content: File content to parse
78
+
79
+ Returns:
80
+ ScriptMetadata if found, None otherwise
81
+ """
82
+ match = PEP723_PATTERN.search(content)
83
+ if not match:
84
+ return None
85
+
86
+ raw_content = match.group(1).strip()
87
+
88
+ # Strip "# " prefix from each line to get valid TOML
89
+ toml_lines = [
90
+ line[2:] if line.startswith("# ") else line.lstrip("#")
91
+ for line in raw_content.split("\n")
92
+ ]
93
+ toml_content = "\n".join(toml_lines)
94
+
95
+ try:
96
+ data = tomllib.loads(toml_content)
97
+ except tomllib.TOMLDecodeError:
98
+ return None
99
+
100
+ return ScriptMetadata(
101
+ requires_python=data.get("requires-python"),
102
+ dependencies=data.get("dependencies", []),
103
+ raw_content=raw_content,
104
+ )
105
+
106
+
107
+ def has_pep723_header(path: Path) -> bool:
108
+ """Check if a file has a PEP 723 script header.
109
+
110
+ Args:
111
+ path: Path to Python file
112
+
113
+ Returns:
114
+ True if file has PEP 723 header
115
+ """
116
+ try:
117
+ content = path.read_text()
118
+ return PEP723_PATTERN.search(content) is not None
119
+ except OSError:
120
+ return False
121
+
122
+
123
+ def _extract_functions_from_ast(tree: ast.Module) -> list[str]:
124
+ """Extract public function names from a parsed AST.
125
+
126
+ Args:
127
+ tree: Parsed AST module
128
+
129
+ Returns:
130
+ List of public function names
131
+ """
132
+ functions: list[str] = []
133
+
134
+ # Check for __all__ definition
135
+ all_names: list[str] | None = None
136
+ for node in ast.walk(tree):
137
+ if isinstance(node, ast.Assign):
138
+ for target in node.targets:
139
+ if (
140
+ isinstance(target, ast.Name)
141
+ and target.id == "__all__"
142
+ and isinstance(node.value, ast.List)
143
+ ):
144
+ all_names = []
145
+ for elt in node.value.elts:
146
+ if isinstance(elt, ast.Constant) and isinstance(elt.value, str):
147
+ all_names.append(elt.value)
148
+
149
+ # Extract function definitions
150
+ for node in tree.body:
151
+ if isinstance(node, ast.FunctionDef):
152
+ name = node.name
153
+ # Skip private functions
154
+ if name.startswith("_"):
155
+ continue
156
+ # If __all__ is defined, only include those
157
+ if all_names is not None and name not in all_names:
158
+ continue
159
+ functions.append(name)
160
+
161
+ return functions
162
+
163
+
164
+ def _extract_pack_from_ast(tree: ast.Module) -> str | None:
165
+ """Extract the pack declaration from a parsed AST.
166
+
167
+ Looks for: pack = "name" at the top of the file.
168
+
169
+ Args:
170
+ tree: Parsed AST module
171
+
172
+ Returns:
173
+ Pack string, or None if not declared
174
+ """
175
+ for node in tree.body:
176
+ if isinstance(node, ast.Assign):
177
+ for target in node.targets:
178
+ if (
179
+ isinstance(target, ast.Name)
180
+ and target.id == "pack"
181
+ and isinstance(node.value, ast.Constant)
182
+ and isinstance(node.value.value, str)
183
+ ):
184
+ return node.value.value
185
+ return None
186
+
187
+
188
+ def _extract_config_from_ast(tree: ast.Module, content: str) -> str | None:
189
+ """Extract the Config class source from a parsed AST.
190
+
191
+ Looks for: class Config(BaseModel): in the module body.
192
+ The class must inherit from BaseModel (pydantic).
193
+
194
+ Args:
195
+ tree: Parsed AST module
196
+ content: Original file content (needed for source extraction)
197
+
198
+ Returns:
199
+ Config class source code as string, or None if not found
200
+ """
201
+ for node in tree.body:
202
+ if isinstance(node, ast.ClassDef) and node.name == "Config":
203
+ # Verify it inherits from BaseModel
204
+ for base in node.bases:
205
+ base_name = None
206
+ if isinstance(base, ast.Name):
207
+ base_name = base.id
208
+ elif isinstance(base, ast.Attribute):
209
+ base_name = base.attr
210
+
211
+ if base_name == "BaseModel":
212
+ # Extract source code using line numbers
213
+ lines = content.split("\n")
214
+ start_line = node.lineno - 1 # 0-indexed
215
+ end_line = node.end_lineno or node.lineno
216
+ config_source = "\n".join(lines[start_line:end_line])
217
+ return config_source
218
+
219
+ return None
220
+
221
+
222
+ def analyze_tool_file(path: Path) -> ToolFileInfo:
223
+ """Analyze a tool file for metadata, pack, functions, and config.
224
+
225
+ Reads the file once and extracts all information in a single pass.
226
+
227
+ Args:
228
+ path: Path to Python file
229
+
230
+ Returns:
231
+ ToolFileInfo with all extracted information
232
+ """
233
+ info = ToolFileInfo(path=path)
234
+
235
+ try:
236
+ content = path.read_text()
237
+ except OSError:
238
+ return info
239
+
240
+ # Check for PEP 723 metadata
241
+ info.metadata = parse_pep723_metadata(content)
242
+ info.is_worker = info.metadata is not None and info.metadata.has_dependencies
243
+
244
+ # Parse AST once for all extractions
245
+ try:
246
+ tree = ast.parse(content)
247
+ except SyntaxError:
248
+ return info
249
+
250
+ # Extract pack, functions, and config class from pre-parsed AST
251
+ info.pack = _extract_pack_from_ast(tree)
252
+ info.functions = _extract_functions_from_ast(tree)
253
+ info.config_class_source = _extract_config_from_ast(tree, content)
254
+
255
+ return info
256
+
257
+
258
+ def categorize_tools(
259
+ tool_files: list[Path],
260
+ internal_paths: set[Path] | None = None,
261
+ ) -> tuple[list[ToolFileInfo], list[ToolFileInfo]]:
262
+ """Categorize tool files into extension tools and internal tools.
263
+
264
+ Internal tools (bundled with OneTool) run in-process.
265
+ Extension tools (user-created with PEP 723) run in worker subprocesses.
266
+
267
+ Args:
268
+ tool_files: List of tool file paths.
269
+ internal_paths: Set of paths that are internal tools (from ot_tools package).
270
+ If provided, tools in this set are marked as is_internal=True.
271
+
272
+ Returns:
273
+ Tuple of (worker_tools, inprocess_tools)
274
+ """
275
+ worker_tools: list[ToolFileInfo] = []
276
+ inprocess_tools: list[ToolFileInfo] = []
277
+ internal_paths = internal_paths or set()
278
+
279
+ for path in tool_files:
280
+ info = analyze_tool_file(path)
281
+ # Mark internal tools (bundled with OneTool)
282
+ info.is_internal = path.resolve() in internal_paths
283
+ if info.is_worker:
284
+ worker_tools.append(info)
285
+ else:
286
+ inprocess_tools.append(info)
287
+
288
+ return worker_tools, inprocess_tools
ot/executor/runner.py CHANGED
@@ -17,7 +17,7 @@ import ast
17
17
  import asyncio
18
18
  import io
19
19
  from contextlib import redirect_stdout
20
- from dataclasses import dataclass
20
+ from dataclasses import dataclass, field
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from loguru import logger
@@ -320,6 +320,7 @@ class PreparedCommand:
320
320
  code: str
321
321
  original: str
322
322
  error: str | None = None
323
+ warnings: list[str] = field(default_factory=list)
323
324
 
324
325
 
325
326
  def prepare_command(command: str) -> PreparedCommand:
@@ -380,9 +381,14 @@ def prepare_command(command: str) -> PreparedCommand:
380
381
  error=f"Code validation failed: {errors}",
381
382
  )
382
383
 
384
+ # Log warnings (validation passed but has warnings)
385
+ for warning in validation.warnings:
386
+ logger.warning(f"Code validation warning: {warning}")
387
+
383
388
  return PreparedCommand(
384
389
  code=stripped,
385
390
  original=command,
391
+ warnings=validation.warnings,
386
392
  )
387
393
 
388
394
 
@@ -393,8 +399,8 @@ def prepare_command(command: str) -> PreparedCommand:
393
399
 
394
400
  async def execute_command(
395
401
  command: str,
396
- registry: ToolRegistry, # noqa: ARG001
397
- executor: SimpleExecutor, # noqa: ARG001
402
+ registry: ToolRegistry, # noqa: ARG001 - kept for API compatibility
403
+ executor: SimpleExecutor, # noqa: ARG001 - kept for API compatibility
398
404
  tools_dir: Path | None = None,
399
405
  *,
400
406
  skip_validation: bool = False,
@@ -411,8 +417,8 @@ async def execute_command(
411
417
 
412
418
  Args:
413
419
  command: Raw command from LLM (may have fences)
414
- registry: Tool registry for looking up functions
415
- executor: Executor for running tool functions
420
+ registry: Tool registry (unused, kept for API compatibility)
421
+ executor: Executor (unused, kept for API compatibility)
416
422
  tools_dir: Path to tools directory
417
423
  skip_validation: If True, skip validation (use when already validated)
418
424
  prepared_code: Pre-processed code to execute (bypasses preparation steps)
@@ -450,7 +456,15 @@ async def execute_command(
450
456
  # Determine validation behavior
451
457
  should_validate = not skip_validation and prepared_code is None
452
458
 
453
- with LogSpan(span="runner.execute", mode="code", command=stripped[:200]) as span:
459
+ # Extract tool name from command (e.g., "brave.search(query=...)" -> "brave.search")
460
+ # Only extract for single-line commands to avoid misleading results for code blocks
461
+ tool_name = None
462
+ if "(" in stripped:
463
+ prefix = stripped.split("(")[0].strip()
464
+ if "\n" not in prefix:
465
+ tool_name = prefix
466
+
467
+ with LogSpan(span="runner.execute", command=stripped, tool=tool_name) as span:
454
468
  try:
455
469
  if use_thread_pool:
456
470
  # Run in thread pool so event loop can process proxy calls