onetool-mcp 1.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bench/__init__.py +5 -0
- bench/cli.py +69 -0
- bench/harness/__init__.py +66 -0
- bench/harness/client.py +692 -0
- bench/harness/config.py +397 -0
- bench/harness/csv_writer.py +109 -0
- bench/harness/evaluate.py +512 -0
- bench/harness/metrics.py +283 -0
- bench/harness/runner.py +899 -0
- bench/py.typed +0 -0
- bench/reporter.py +629 -0
- bench/run.py +487 -0
- bench/secrets.py +101 -0
- bench/utils.py +16 -0
- onetool/__init__.py +4 -0
- onetool/cli.py +391 -0
- onetool/py.typed +0 -0
- onetool_mcp-1.0.0b1.dist-info/METADATA +163 -0
- onetool_mcp-1.0.0b1.dist-info/RECORD +132 -0
- onetool_mcp-1.0.0b1.dist-info/WHEEL +4 -0
- onetool_mcp-1.0.0b1.dist-info/entry_points.txt +3 -0
- onetool_mcp-1.0.0b1.dist-info/licenses/LICENSE.txt +687 -0
- onetool_mcp-1.0.0b1.dist-info/licenses/NOTICE.txt +64 -0
- ot/__init__.py +37 -0
- ot/__main__.py +6 -0
- ot/_cli.py +107 -0
- ot/_tui.py +53 -0
- ot/config/__init__.py +46 -0
- ot/config/defaults/bench.yaml +4 -0
- ot/config/defaults/diagram-templates/api-flow.mmd +33 -0
- ot/config/defaults/diagram-templates/c4-context.puml +30 -0
- ot/config/defaults/diagram-templates/class-diagram.mmd +87 -0
- ot/config/defaults/diagram-templates/feature-mindmap.mmd +70 -0
- ot/config/defaults/diagram-templates/microservices.d2 +81 -0
- ot/config/defaults/diagram-templates/project-gantt.mmd +37 -0
- ot/config/defaults/diagram-templates/state-machine.mmd +42 -0
- ot/config/defaults/onetool.yaml +25 -0
- ot/config/defaults/prompts.yaml +97 -0
- ot/config/defaults/servers.yaml +7 -0
- ot/config/defaults/snippets.yaml +4 -0
- ot/config/defaults/tool_templates/__init__.py +7 -0
- ot/config/defaults/tool_templates/extension.py +52 -0
- ot/config/defaults/tool_templates/isolated.py +61 -0
- ot/config/dynamic.py +121 -0
- ot/config/global_templates/__init__.py +2 -0
- ot/config/global_templates/bench-secrets-template.yaml +6 -0
- ot/config/global_templates/bench.yaml +9 -0
- ot/config/global_templates/onetool.yaml +27 -0
- ot/config/global_templates/secrets-template.yaml +44 -0
- ot/config/global_templates/servers.yaml +18 -0
- ot/config/global_templates/snippets.yaml +235 -0
- ot/config/loader.py +1087 -0
- ot/config/mcp.py +145 -0
- ot/config/secrets.py +190 -0
- ot/config/tool_config.py +125 -0
- ot/decorators.py +116 -0
- ot/executor/__init__.py +35 -0
- ot/executor/base.py +16 -0
- ot/executor/fence_processor.py +83 -0
- ot/executor/linter.py +142 -0
- ot/executor/pack_proxy.py +260 -0
- ot/executor/param_resolver.py +140 -0
- ot/executor/pep723.py +288 -0
- ot/executor/result_store.py +369 -0
- ot/executor/runner.py +496 -0
- ot/executor/simple.py +163 -0
- ot/executor/tool_loader.py +396 -0
- ot/executor/validator.py +398 -0
- ot/executor/worker_pool.py +388 -0
- ot/executor/worker_proxy.py +189 -0
- ot/http_client.py +145 -0
- ot/logging/__init__.py +37 -0
- ot/logging/config.py +315 -0
- ot/logging/entry.py +213 -0
- ot/logging/format.py +188 -0
- ot/logging/span.py +349 -0
- ot/meta.py +1555 -0
- ot/paths.py +453 -0
- ot/prompts.py +218 -0
- ot/proxy/__init__.py +21 -0
- ot/proxy/manager.py +396 -0
- ot/py.typed +0 -0
- ot/registry/__init__.py +189 -0
- ot/registry/models.py +57 -0
- ot/registry/parser.py +269 -0
- ot/registry/registry.py +413 -0
- ot/server.py +315 -0
- ot/shortcuts/__init__.py +15 -0
- ot/shortcuts/aliases.py +87 -0
- ot/shortcuts/snippets.py +258 -0
- ot/stats/__init__.py +35 -0
- ot/stats/html.py +250 -0
- ot/stats/jsonl_writer.py +283 -0
- ot/stats/reader.py +354 -0
- ot/stats/timing.py +57 -0
- ot/support.py +63 -0
- ot/tools.py +114 -0
- ot/utils/__init__.py +81 -0
- ot/utils/batch.py +161 -0
- ot/utils/cache.py +120 -0
- ot/utils/deps.py +403 -0
- ot/utils/exceptions.py +23 -0
- ot/utils/factory.py +179 -0
- ot/utils/format.py +65 -0
- ot/utils/http.py +202 -0
- ot/utils/platform.py +45 -0
- ot/utils/sanitize.py +130 -0
- ot/utils/truncate.py +69 -0
- ot_tools/__init__.py +4 -0
- ot_tools/_convert/__init__.py +12 -0
- ot_tools/_convert/excel.py +279 -0
- ot_tools/_convert/pdf.py +254 -0
- ot_tools/_convert/powerpoint.py +268 -0
- ot_tools/_convert/utils.py +358 -0
- ot_tools/_convert/word.py +283 -0
- ot_tools/brave_search.py +604 -0
- ot_tools/code_search.py +736 -0
- ot_tools/context7.py +495 -0
- ot_tools/convert.py +614 -0
- ot_tools/db.py +415 -0
- ot_tools/diagram.py +1604 -0
- ot_tools/diagram.yaml +167 -0
- ot_tools/excel.py +1372 -0
- ot_tools/file.py +1348 -0
- ot_tools/firecrawl.py +732 -0
- ot_tools/grounding_search.py +646 -0
- ot_tools/package.py +604 -0
- ot_tools/py.typed +0 -0
- ot_tools/ripgrep.py +544 -0
- ot_tools/scaffold.py +471 -0
- ot_tools/transform.py +213 -0
- ot_tools/web_fetch.py +384 -0
ot/http_client.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""Shared HTTP client utilities.
|
|
2
|
+
|
|
3
|
+
Provides a unified http_get() function for making HTTP GET requests with:
|
|
4
|
+
- Consistent error handling and message format
|
|
5
|
+
- Optional headers (for auth tokens)
|
|
6
|
+
- Optional timeout (defaults from config)
|
|
7
|
+
- Optional LogSpan integration for observability
|
|
8
|
+
- Content-type aware response parsing (JSON or text)
|
|
9
|
+
- Connection pooling via shared client singleton
|
|
10
|
+
|
|
11
|
+
Usage:
|
|
12
|
+
from ot.http_client import http_get
|
|
13
|
+
|
|
14
|
+
# Basic GET
|
|
15
|
+
success, result = http_get("https://api.example.com/data")
|
|
16
|
+
|
|
17
|
+
# With headers and params
|
|
18
|
+
success, result = http_get(
|
|
19
|
+
"https://api.example.com/search",
|
|
20
|
+
params={"q": "test"},
|
|
21
|
+
headers={"Authorization": "Bearer token"},
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
# With LogSpan for observability
|
|
25
|
+
success, result = http_get(
|
|
26
|
+
url,
|
|
27
|
+
log_span="api.fetch",
|
|
28
|
+
log_data={"query": query},
|
|
29
|
+
)
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
from __future__ import annotations
|
|
33
|
+
|
|
34
|
+
import atexit
|
|
35
|
+
import contextlib
|
|
36
|
+
import threading
|
|
37
|
+
from typing import Any
|
|
38
|
+
|
|
39
|
+
import httpx
|
|
40
|
+
|
|
41
|
+
# Global shared HTTP client with connection pooling
|
|
42
|
+
_client: httpx.Client | None = None
|
|
43
|
+
_client_lock = threading.Lock()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_shared_client() -> httpx.Client:
|
|
47
|
+
"""Get or create the shared HTTP client with connection pooling."""
|
|
48
|
+
global _client
|
|
49
|
+
if _client is None:
|
|
50
|
+
with _client_lock:
|
|
51
|
+
if _client is None:
|
|
52
|
+
_client = httpx.Client(
|
|
53
|
+
timeout=30.0,
|
|
54
|
+
limits=httpx.Limits(
|
|
55
|
+
max_keepalive_connections=20,
|
|
56
|
+
max_connections=100,
|
|
57
|
+
keepalive_expiry=30.0,
|
|
58
|
+
),
|
|
59
|
+
)
|
|
60
|
+
atexit.register(_shutdown_client)
|
|
61
|
+
return _client
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _shutdown_client() -> None:
|
|
65
|
+
"""Close the shared client on exit."""
|
|
66
|
+
global _client
|
|
67
|
+
if _client is not None:
|
|
68
|
+
with contextlib.suppress(Exception):
|
|
69
|
+
_client.close()
|
|
70
|
+
_client = None
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def http_get(
|
|
74
|
+
url: str,
|
|
75
|
+
*,
|
|
76
|
+
params: dict[str, Any] | None = None,
|
|
77
|
+
headers: dict[str, str] | None = None,
|
|
78
|
+
timeout: float | None = None,
|
|
79
|
+
log_span: str | None = None,
|
|
80
|
+
log_data: dict[str, Any] | None = None,
|
|
81
|
+
) -> tuple[bool, dict[str, Any] | str]:
|
|
82
|
+
"""Make HTTP GET request with unified error handling.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
url: Full URL to request
|
|
86
|
+
params: Optional query parameters
|
|
87
|
+
headers: Optional HTTP headers (e.g., auth tokens)
|
|
88
|
+
timeout: Request timeout in seconds (defaults to 30.0)
|
|
89
|
+
log_span: Optional LogSpan name for observability
|
|
90
|
+
log_data: Optional data to include in LogSpan
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Tuple of (success, result). If success, result is parsed JSON dict
|
|
94
|
+
or response text. If failure, result is error message string.
|
|
95
|
+
"""
|
|
96
|
+
from ot.logging import LogSpan as LogSpanClass
|
|
97
|
+
|
|
98
|
+
# Default timeout
|
|
99
|
+
if timeout is None:
|
|
100
|
+
timeout = 30.0
|
|
101
|
+
|
|
102
|
+
# Optional LogSpan wrapper
|
|
103
|
+
span: LogSpanClass | None = None
|
|
104
|
+
if log_span:
|
|
105
|
+
span = LogSpanClass(span=log_span, **(log_data or {}))
|
|
106
|
+
span.__enter__()
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
client = _get_shared_client()
|
|
110
|
+
response = client.get(url, params=params, headers=headers, timeout=timeout)
|
|
111
|
+
response.raise_for_status()
|
|
112
|
+
|
|
113
|
+
# Parse based on content type
|
|
114
|
+
content_type = response.headers.get("content-type", "")
|
|
115
|
+
if "application/json" in content_type:
|
|
116
|
+
result = response.json()
|
|
117
|
+
else:
|
|
118
|
+
result = response.text
|
|
119
|
+
|
|
120
|
+
if span:
|
|
121
|
+
span.add("status", response.status_code)
|
|
122
|
+
|
|
123
|
+
return True, result
|
|
124
|
+
|
|
125
|
+
except httpx.HTTPStatusError as e:
|
|
126
|
+
error_msg = f"HTTP error ({e.response.status_code}): {e.response.text[:200]}"
|
|
127
|
+
if span:
|
|
128
|
+
span.add("error", f"HTTP {e.response.status_code}")
|
|
129
|
+
return False, error_msg
|
|
130
|
+
|
|
131
|
+
except httpx.RequestError as e:
|
|
132
|
+
error_msg = f"Request failed: {e}"
|
|
133
|
+
if span:
|
|
134
|
+
span.add("error", str(e))
|
|
135
|
+
return False, error_msg
|
|
136
|
+
|
|
137
|
+
except Exception as e:
|
|
138
|
+
error_msg = f"Error: {e}"
|
|
139
|
+
if span:
|
|
140
|
+
span.add("error", str(e))
|
|
141
|
+
return False, error_msg
|
|
142
|
+
|
|
143
|
+
finally:
|
|
144
|
+
if span:
|
|
145
|
+
span.__exit__(None, None, None)
|
ot/logging/__init__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""Structured logging for OneTool MCP server.
|
|
2
|
+
|
|
3
|
+
Provides JSON-structured logging with:
|
|
4
|
+
- LogEntry: Fluent API for building log entries with auto-timing
|
|
5
|
+
- LogSpan: Context manager for auto-logging operations
|
|
6
|
+
- File-only JSON output
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
|
|
11
|
+
# Remove Loguru's default console handler immediately.
|
|
12
|
+
# This prevents logs from appearing on console before configure_logging() is called.
|
|
13
|
+
logger.remove()
|
|
14
|
+
|
|
15
|
+
from ot.logging.config import ( # noqa: E402
|
|
16
|
+
configure_logging,
|
|
17
|
+
configure_test_logging,
|
|
18
|
+
)
|
|
19
|
+
from ot.logging.entry import LogEntry # noqa: E402
|
|
20
|
+
from ot.logging.format import ( # noqa: E402
|
|
21
|
+
format_log_entry,
|
|
22
|
+
format_value,
|
|
23
|
+
sanitize_for_output,
|
|
24
|
+
sanitize_url,
|
|
25
|
+
)
|
|
26
|
+
from ot.logging.span import LogSpan # noqa: E402
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
"LogEntry",
|
|
30
|
+
"LogSpan",
|
|
31
|
+
"configure_logging",
|
|
32
|
+
"configure_test_logging",
|
|
33
|
+
"format_log_entry",
|
|
34
|
+
"format_value",
|
|
35
|
+
"sanitize_for_output",
|
|
36
|
+
"sanitize_url",
|
|
37
|
+
]
|
ot/logging/config.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
"""Loguru-based logging configuration.
|
|
2
|
+
|
|
3
|
+
Outputs structured JSON logs to file only.
|
|
4
|
+
|
|
5
|
+
Settings from onetool.yaml:
|
|
6
|
+
- log_level: INFO (default), DEBUG, WARNING, ERROR
|
|
7
|
+
- log_dir: Directory for log files (default: ../logs, relative to config dir)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import logging
|
|
14
|
+
from datetime import UTC, datetime
|
|
15
|
+
from decimal import Decimal
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Any
|
|
18
|
+
|
|
19
|
+
from loguru import logger
|
|
20
|
+
|
|
21
|
+
from ot.config.loader import get_config
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class InterceptHandler(logging.Handler):
|
|
25
|
+
"""Intercept standard logging and redirect to Loguru."""
|
|
26
|
+
|
|
27
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
28
|
+
"""Redirect log record to Loguru."""
|
|
29
|
+
level: str | int
|
|
30
|
+
try:
|
|
31
|
+
level = logger.level(record.levelname).name
|
|
32
|
+
except ValueError:
|
|
33
|
+
level = record.levelno
|
|
34
|
+
|
|
35
|
+
# Store caller info from LogRecord in extra, used by json_serializer
|
|
36
|
+
logger.bind(
|
|
37
|
+
_intercepted_file=record.filename,
|
|
38
|
+
_intercepted_func=record.funcName,
|
|
39
|
+
_intercepted_line=record.lineno,
|
|
40
|
+
).opt(exception=record.exc_info).log(level, record.getMessage())
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class JSONEncoder(json.JSONEncoder):
|
|
44
|
+
"""Custom JSON encoder for log fields."""
|
|
45
|
+
|
|
46
|
+
def default(self, o: Any) -> Any:
|
|
47
|
+
"""Handle non-serializable types."""
|
|
48
|
+
if isinstance(o, datetime):
|
|
49
|
+
if o.tzinfo is None:
|
|
50
|
+
o = o.replace(tzinfo=UTC)
|
|
51
|
+
return o.isoformat().replace("+00:00", "Z")
|
|
52
|
+
elif isinstance(o, Decimal):
|
|
53
|
+
return float(o)
|
|
54
|
+
elif isinstance(o, Path):
|
|
55
|
+
return str(o)
|
|
56
|
+
elif hasattr(o, "__dict__"):
|
|
57
|
+
return o.__dict__
|
|
58
|
+
return super().default(o)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def json_serializer(record: dict[str, Any]) -> str:
|
|
62
|
+
"""Serialize log record to JSON.
|
|
63
|
+
|
|
64
|
+
The message is expected to be JSON from LogEntry.__str__, so we parse
|
|
65
|
+
and merge it into the log data.
|
|
66
|
+
"""
|
|
67
|
+
extra = record["extra"]
|
|
68
|
+
|
|
69
|
+
# Use intercepted caller info if available (from standard logging redirect)
|
|
70
|
+
if "_intercepted_file" in extra:
|
|
71
|
+
source = f"{extra['_intercepted_file']}:{extra['_intercepted_func']}:{extra['_intercepted_line']}"
|
|
72
|
+
else:
|
|
73
|
+
source = f"{record['file'].name}:{record['function']}:{record['line']}"
|
|
74
|
+
|
|
75
|
+
# Parse source into file, func, line
|
|
76
|
+
src_parts = source.split(":")
|
|
77
|
+
src_file = src_parts[0] if len(src_parts) > 0 else ""
|
|
78
|
+
src_func = src_parts[1] if len(src_parts) > 1 else ""
|
|
79
|
+
src_line = int(src_parts[2]) if len(src_parts) > 2 and src_parts[2].isdigit() else 0
|
|
80
|
+
|
|
81
|
+
log_data: dict[str, Any] = {
|
|
82
|
+
"timestamp": record["time"].strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z",
|
|
83
|
+
"level": record["level"].name,
|
|
84
|
+
"src_file": src_file,
|
|
85
|
+
"src_func": src_func,
|
|
86
|
+
"src_line": src_line,
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
msg = record["message"]
|
|
90
|
+
# Try to parse message as JSON (from LogEntry.__str__)
|
|
91
|
+
if msg.startswith("{") and msg.endswith("}"):
|
|
92
|
+
try:
|
|
93
|
+
parsed = json.loads(msg)
|
|
94
|
+
log_data.update(parsed)
|
|
95
|
+
except json.JSONDecodeError:
|
|
96
|
+
log_data["message"] = msg
|
|
97
|
+
elif msg and msg not in ("Structured log entry", "MCP stage", "MCP tool executed"):
|
|
98
|
+
log_data["message"] = msg
|
|
99
|
+
|
|
100
|
+
# Add any extra fields (excluding internal keys)
|
|
101
|
+
public_extra = {k: v for k, v in extra.items() if not k.startswith("_")}
|
|
102
|
+
if public_extra:
|
|
103
|
+
log_data.update(public_extra)
|
|
104
|
+
|
|
105
|
+
if record["exception"] is not None:
|
|
106
|
+
log_data["exc_info"] = str(record["exception"])
|
|
107
|
+
|
|
108
|
+
return json.dumps(log_data, separators=(",", ":"), cls=JSONEncoder)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def dev_formatter(record: dict[str, Any]) -> str:
|
|
112
|
+
"""Format log record as dev-friendly single line.
|
|
113
|
+
|
|
114
|
+
Format: HH:MM:SS.mmm | LEVL | file:line | span | key=value | ...
|
|
115
|
+
"""
|
|
116
|
+
extra = record["extra"]
|
|
117
|
+
|
|
118
|
+
# Short timestamp (time only, date is in filename)
|
|
119
|
+
timestamp = record["time"].strftime("%H:%M:%S.%f")[:-3]
|
|
120
|
+
|
|
121
|
+
# 6-char level with padding
|
|
122
|
+
level_map = {
|
|
123
|
+
"DEBUG": "DEBUG ",
|
|
124
|
+
"INFO": "INFO ",
|
|
125
|
+
"WARNING": "WARN ",
|
|
126
|
+
"ERROR": "ERROR ",
|
|
127
|
+
"CRITICAL": "CRIT ",
|
|
128
|
+
"TRACE": "TRACE ",
|
|
129
|
+
"SUCCESS": "OK ",
|
|
130
|
+
}
|
|
131
|
+
level = level_map.get(record["level"].name, record["level"].name[:6].ljust(6))
|
|
132
|
+
|
|
133
|
+
# Short source: file:line (skip function name)
|
|
134
|
+
if "_intercepted_file" in extra:
|
|
135
|
+
src = f"{extra['_intercepted_file'].replace('.py', '')}:{extra['_intercepted_line']}"
|
|
136
|
+
else:
|
|
137
|
+
src = f"{record['file'].name.replace('.py', '')}:{record['line']}"
|
|
138
|
+
|
|
139
|
+
parts = [timestamp, level, src]
|
|
140
|
+
|
|
141
|
+
# Parse message if it's JSON from LogEntry
|
|
142
|
+
msg = record["message"]
|
|
143
|
+
fields: dict[str, Any] = {}
|
|
144
|
+
|
|
145
|
+
if msg.startswith("{") and msg.endswith("}"):
|
|
146
|
+
try:
|
|
147
|
+
fields = json.loads(msg)
|
|
148
|
+
except json.JSONDecodeError:
|
|
149
|
+
if msg.strip():
|
|
150
|
+
fields["message"] = msg
|
|
151
|
+
elif msg.strip():
|
|
152
|
+
fields["message"] = msg
|
|
153
|
+
|
|
154
|
+
# Add extra fields (excluding internal keys)
|
|
155
|
+
for k, v in extra.items():
|
|
156
|
+
if not k.startswith("_") and k not in ("serialized", "dev"):
|
|
157
|
+
fields[k] = v
|
|
158
|
+
|
|
159
|
+
# Extract span first (most important context)
|
|
160
|
+
span = fields.pop("span", None)
|
|
161
|
+
if span:
|
|
162
|
+
parts.append(str(span))
|
|
163
|
+
|
|
164
|
+
# Format remaining fields
|
|
165
|
+
for k, v in fields.items():
|
|
166
|
+
if k == "duration" and v == 0.0:
|
|
167
|
+
continue
|
|
168
|
+
if isinstance(v, list):
|
|
169
|
+
if len(v) > 10:
|
|
170
|
+
list_items = ", ".join(str(x) for x in v[:10])
|
|
171
|
+
parts.append(f"{k}=[{list_items}, ...]")
|
|
172
|
+
else:
|
|
173
|
+
parts.append(f"{k}={v}")
|
|
174
|
+
elif isinstance(v, dict):
|
|
175
|
+
# Show full dict: key={k1=v1, k2=v2, ...}
|
|
176
|
+
dict_items: list[str] = [f"{dk}={dv}" for dk, dv in list(v.items())[:10]]
|
|
177
|
+
if len(v) > 10:
|
|
178
|
+
dict_items.append("...")
|
|
179
|
+
parts.append(f"{k}={{{', '.join(dict_items)}}}")
|
|
180
|
+
elif k == "message":
|
|
181
|
+
# Plain message without key=
|
|
182
|
+
parts.append(str(v))
|
|
183
|
+
else:
|
|
184
|
+
parts.append(f"{k}={v}")
|
|
185
|
+
|
|
186
|
+
return " | ".join(parts)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def patching(record: Any) -> None:
|
|
190
|
+
"""Patch record with serialized JSON and dev-friendly format."""
|
|
191
|
+
record["extra"]["serialized"] = json_serializer(record)
|
|
192
|
+
record["extra"]["dev"] = dev_formatter(record)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def configure_logging(log_name: str = "onetool", level: str | None = None) -> None:
|
|
196
|
+
"""Configure Loguru for file-only output with dev-friendly format.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
log_name: Name for the log file (e.g., "serve" -> logs/serve.log)
|
|
200
|
+
level: Optional log level override. If None, uses config value.
|
|
201
|
+
|
|
202
|
+
Settings from onetool.yaml:
|
|
203
|
+
- log_level: Log level (default: INFO)
|
|
204
|
+
- log_dir: Directory for log files (default: ../logs, relative to config dir)
|
|
205
|
+
"""
|
|
206
|
+
logger.remove()
|
|
207
|
+
|
|
208
|
+
config = get_config()
|
|
209
|
+
level = (level or config.log_level).upper()
|
|
210
|
+
log_dir = config.get_log_dir_path()
|
|
211
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
212
|
+
log_file = log_dir / f"{log_name}.log"
|
|
213
|
+
|
|
214
|
+
logger.configure(patcher=patching)
|
|
215
|
+
|
|
216
|
+
# Dev-friendly output to log file
|
|
217
|
+
logger.add(
|
|
218
|
+
log_file,
|
|
219
|
+
level=level,
|
|
220
|
+
format="{extra[dev]}",
|
|
221
|
+
colorize=False,
|
|
222
|
+
backtrace=True,
|
|
223
|
+
diagnose=True,
|
|
224
|
+
rotation="10 MB",
|
|
225
|
+
retention="5 days",
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
# Intercept standard logging
|
|
229
|
+
logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
|
|
230
|
+
|
|
231
|
+
# Intercept FastMCP and related loggers
|
|
232
|
+
for logger_name in ["fastmcp", "mcp", "uvicorn"]:
|
|
233
|
+
logging.getLogger(logger_name).handlers = [InterceptHandler()]
|
|
234
|
+
logging.getLogger(logger_name).propagate = False
|
|
235
|
+
|
|
236
|
+
# Silence noisy HTTP/network loggers - set to WARNING to suppress DEBUG spam
|
|
237
|
+
for logger_name in ["httpcore", "httpx", "hpack"]:
|
|
238
|
+
logging.getLogger(logger_name).setLevel(logging.WARNING)
|
|
239
|
+
|
|
240
|
+
logger.debug("Logging configured", level=level, file=str(log_file))
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def configure_test_logging(
|
|
244
|
+
module_name: str,
|
|
245
|
+
dev_output: bool = True,
|
|
246
|
+
dev_file: bool = False,
|
|
247
|
+
) -> None:
|
|
248
|
+
"""Configure Loguru for test file logging with optional dev-friendly output.
|
|
249
|
+
|
|
250
|
+
Creates a separate log file for each test module in logs/.
|
|
251
|
+
|
|
252
|
+
Args:
|
|
253
|
+
module_name: Test module name (e.g., "test_tools")
|
|
254
|
+
dev_output: If True, output dev-friendly logs to stderr
|
|
255
|
+
dev_file: If True, also write dev-friendly logs to {module_name}.dev.log
|
|
256
|
+
"""
|
|
257
|
+
import sys
|
|
258
|
+
|
|
259
|
+
logger.remove()
|
|
260
|
+
|
|
261
|
+
config = get_config()
|
|
262
|
+
level = config.log_level.upper() if config.log_level != "INFO" else "DEBUG"
|
|
263
|
+
|
|
264
|
+
# Create logs directory (resolved relative to config dir)
|
|
265
|
+
log_dir = config.get_log_dir_path()
|
|
266
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
267
|
+
|
|
268
|
+
# Test-specific log file - append mode
|
|
269
|
+
log_file = log_dir / f"{module_name}.log"
|
|
270
|
+
|
|
271
|
+
logger.configure(patcher=patching)
|
|
272
|
+
|
|
273
|
+
# JSON output to file
|
|
274
|
+
logger.add(
|
|
275
|
+
str(log_file),
|
|
276
|
+
level=level,
|
|
277
|
+
format="{extra[serialized]}",
|
|
278
|
+
colorize=False,
|
|
279
|
+
backtrace=True,
|
|
280
|
+
diagnose=True,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Dev-friendly output to stderr
|
|
284
|
+
if dev_output:
|
|
285
|
+
logger.add(
|
|
286
|
+
sys.stderr,
|
|
287
|
+
level=level,
|
|
288
|
+
format="{extra[dev]}",
|
|
289
|
+
colorize=False,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
# Dev-friendly output to file
|
|
293
|
+
if dev_file:
|
|
294
|
+
dev_log_file = log_dir / f"{module_name}.dev.log"
|
|
295
|
+
logger.add(
|
|
296
|
+
str(dev_log_file),
|
|
297
|
+
level=level,
|
|
298
|
+
format="{extra[dev]}",
|
|
299
|
+
colorize=False,
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
# Intercept standard logging
|
|
303
|
+
logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
|
|
304
|
+
|
|
305
|
+
# Silence noisy HTTP/network/client loggers - set to WARNING to suppress DEBUG/INFO spam
|
|
306
|
+
for logger_name in [
|
|
307
|
+
"httpcore",
|
|
308
|
+
"httpx",
|
|
309
|
+
"mcp",
|
|
310
|
+
"anyio",
|
|
311
|
+
"hpack",
|
|
312
|
+
"openai",
|
|
313
|
+
"openai._base_client",
|
|
314
|
+
]:
|
|
315
|
+
logging.getLogger(logger_name).setLevel(logging.WARNING)
|
ot/logging/entry.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"""LogEntry class for structured logging.
|
|
2
|
+
|
|
3
|
+
A simple struct for building log entries with automatic timing.
|
|
4
|
+
Supports fluent API, dict-style access, and lazy duration calculation.
|
|
5
|
+
|
|
6
|
+
Example:
|
|
7
|
+
# Inline - all fields in constructor
|
|
8
|
+
logger.debug(LogEntry(event="command.received", command=command))
|
|
9
|
+
|
|
10
|
+
# Fluent - chain adds
|
|
11
|
+
logger.debug(LogEntry(event="tool.lookup")
|
|
12
|
+
.add("function", func_name)
|
|
13
|
+
.add("found", True))
|
|
14
|
+
|
|
15
|
+
# Multiple logs show increasing duration (no caching)
|
|
16
|
+
entry = LogEntry(event="multi_step")
|
|
17
|
+
do_step_1()
|
|
18
|
+
logger.debug(entry) # duration: 0.1s
|
|
19
|
+
do_step_2()
|
|
20
|
+
logger.info(entry) # duration: 0.3s
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import json
|
|
26
|
+
import time
|
|
27
|
+
from typing import Any
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class LogEntry:
|
|
31
|
+
"""Structured log entry with automatic timing.
|
|
32
|
+
|
|
33
|
+
Timing starts automatically on creation. Duration is calculated
|
|
34
|
+
lazily in __str__ without caching, so multiple logs show increasing
|
|
35
|
+
duration.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, **initial_fields: Any) -> None:
|
|
39
|
+
"""Initialize a log entry with optional initial fields.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
**initial_fields: Initial fields for the log entry
|
|
43
|
+
"""
|
|
44
|
+
self._start_time = time.perf_counter()
|
|
45
|
+
self._fields: dict[str, Any] = dict(initial_fields)
|
|
46
|
+
self._status: str | None = None
|
|
47
|
+
self._status_code: int | None = None
|
|
48
|
+
self._error_type: str | None = None
|
|
49
|
+
self._error_message: str | None = None
|
|
50
|
+
|
|
51
|
+
def add(self, key: str | None = None, value: Any = None, **kwargs: Any) -> LogEntry:
|
|
52
|
+
"""Add one or more fields to the entry.
|
|
53
|
+
|
|
54
|
+
Can be called with a single key-value pair or with keyword arguments.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
key: Field name (optional if using kwargs)
|
|
58
|
+
value: Field value (required if key is provided)
|
|
59
|
+
**kwargs: Bulk field additions
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Self for method chaining
|
|
63
|
+
|
|
64
|
+
Example:
|
|
65
|
+
entry.add("function", func_name)
|
|
66
|
+
entry.add(function=func_name, found=True)
|
|
67
|
+
"""
|
|
68
|
+
if key is not None:
|
|
69
|
+
self._fields[key] = value
|
|
70
|
+
self._fields.update(kwargs)
|
|
71
|
+
return self
|
|
72
|
+
|
|
73
|
+
def success(self, status_code: int | None = None) -> LogEntry:
|
|
74
|
+
"""Mark the entry as successful.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
status_code: Optional HTTP status code
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Self for method chaining
|
|
81
|
+
"""
|
|
82
|
+
self._status = "SUCCESS"
|
|
83
|
+
self._status_code = status_code
|
|
84
|
+
return self
|
|
85
|
+
|
|
86
|
+
def failure(
|
|
87
|
+
self,
|
|
88
|
+
error: Exception | None = None,
|
|
89
|
+
error_type: str | None = None,
|
|
90
|
+
error_message: str | None = None,
|
|
91
|
+
) -> LogEntry:
|
|
92
|
+
"""Mark the entry as failed.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
error: Exception that caused the failure
|
|
96
|
+
error_type: Type name of the error
|
|
97
|
+
error_message: Error message
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Self for method chaining
|
|
101
|
+
"""
|
|
102
|
+
self._status = "FAILED"
|
|
103
|
+
if error is not None:
|
|
104
|
+
self._error_type = type(error).__name__
|
|
105
|
+
self._error_message = str(error)
|
|
106
|
+
if error_type is not None:
|
|
107
|
+
self._error_type = error_type
|
|
108
|
+
if error_message is not None:
|
|
109
|
+
self._error_message = error_message
|
|
110
|
+
return self
|
|
111
|
+
|
|
112
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
113
|
+
"""Set a field using dict-style access.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
key: Field name
|
|
117
|
+
value: Field value
|
|
118
|
+
"""
|
|
119
|
+
self._fields[key] = value
|
|
120
|
+
|
|
121
|
+
def __getitem__(self, key: str) -> Any:
|
|
122
|
+
"""Get a field using dict-style access.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
key: Field name
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
Field value
|
|
129
|
+
|
|
130
|
+
Raises:
|
|
131
|
+
KeyError: If field doesn't exist
|
|
132
|
+
"""
|
|
133
|
+
return self._fields[key]
|
|
134
|
+
|
|
135
|
+
def __contains__(self, key: str) -> bool:
|
|
136
|
+
"""Check if a field exists.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
key: Field name
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
True if field exists
|
|
143
|
+
"""
|
|
144
|
+
return key in self._fields
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def fields(self) -> dict[str, Any]:
|
|
148
|
+
"""Return a copy of the fields for testing access.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Copy of internal fields dictionary
|
|
152
|
+
"""
|
|
153
|
+
return dict(self._fields)
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def duration(self) -> float:
|
|
157
|
+
"""Return current duration since entry creation.
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Duration in seconds (not cached, calculated fresh each call)
|
|
161
|
+
"""
|
|
162
|
+
return round(time.perf_counter() - self._start_time, 3)
|
|
163
|
+
|
|
164
|
+
def to_dict(self) -> dict[str, Any]:
|
|
165
|
+
"""Return all fields with duration for output.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Dict with all fields, duration, and status info
|
|
169
|
+
"""
|
|
170
|
+
output = dict(self._fields)
|
|
171
|
+
output["duration"] = self.duration
|
|
172
|
+
|
|
173
|
+
if self._status is not None:
|
|
174
|
+
output["status"] = self._status
|
|
175
|
+
if self._status_code is not None:
|
|
176
|
+
output["statusCode"] = self._status_code
|
|
177
|
+
if self._error_type is not None:
|
|
178
|
+
output["errorType"] = self._error_type
|
|
179
|
+
if self._error_message is not None:
|
|
180
|
+
output["errorMessage"] = self._error_message
|
|
181
|
+
|
|
182
|
+
return output
|
|
183
|
+
|
|
184
|
+
def __str__(self) -> str:
|
|
185
|
+
"""Serialize to JSON with duration.
|
|
186
|
+
|
|
187
|
+
Duration is calculated lazily (not cached) so multiple
|
|
188
|
+
calls show increasing duration.
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
JSON string with fields and duration
|
|
192
|
+
"""
|
|
193
|
+
output = dict(self._fields)
|
|
194
|
+
output["duration"] = round(time.perf_counter() - self._start_time, 3)
|
|
195
|
+
|
|
196
|
+
if self._status is not None:
|
|
197
|
+
output["status"] = self._status
|
|
198
|
+
if self._status_code is not None:
|
|
199
|
+
output["statusCode"] = self._status_code
|
|
200
|
+
if self._error_type is not None:
|
|
201
|
+
output["errorType"] = self._error_type
|
|
202
|
+
if self._error_message is not None:
|
|
203
|
+
output["errorMessage"] = self._error_message
|
|
204
|
+
|
|
205
|
+
return json.dumps(output, separators=(",", ":"), default=str)
|
|
206
|
+
|
|
207
|
+
def __repr__(self) -> str:
|
|
208
|
+
"""Return a debug representation.
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
String showing LogEntry fields
|
|
212
|
+
"""
|
|
213
|
+
return f"LogEntry({self._fields!r})"
|