kicad-sch-api 0.4.1__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kicad_sch_api/__init__.py +67 -2
- kicad_sch_api/cli/kicad_to_python.py +169 -0
- kicad_sch_api/collections/__init__.py +23 -8
- kicad_sch_api/collections/base.py +369 -59
- kicad_sch_api/collections/components.py +1376 -187
- kicad_sch_api/collections/junctions.py +129 -289
- kicad_sch_api/collections/labels.py +391 -287
- kicad_sch_api/collections/wires.py +202 -316
- kicad_sch_api/core/__init__.py +37 -2
- kicad_sch_api/core/component_bounds.py +34 -12
- kicad_sch_api/core/components.py +146 -7
- kicad_sch_api/core/config.py +25 -12
- kicad_sch_api/core/connectivity.py +692 -0
- kicad_sch_api/core/exceptions.py +175 -0
- kicad_sch_api/core/factories/element_factory.py +3 -1
- kicad_sch_api/core/formatter.py +24 -7
- kicad_sch_api/core/geometry.py +94 -5
- kicad_sch_api/core/managers/__init__.py +4 -0
- kicad_sch_api/core/managers/base.py +76 -0
- kicad_sch_api/core/managers/file_io.py +3 -1
- kicad_sch_api/core/managers/format_sync.py +3 -2
- kicad_sch_api/core/managers/graphics.py +3 -2
- kicad_sch_api/core/managers/hierarchy.py +661 -0
- kicad_sch_api/core/managers/metadata.py +4 -2
- kicad_sch_api/core/managers/sheet.py +52 -14
- kicad_sch_api/core/managers/text_elements.py +3 -2
- kicad_sch_api/core/managers/validation.py +3 -2
- kicad_sch_api/core/managers/wire.py +112 -54
- kicad_sch_api/core/parsing_utils.py +63 -0
- kicad_sch_api/core/pin_utils.py +103 -9
- kicad_sch_api/core/schematic.py +343 -29
- kicad_sch_api/core/types.py +79 -7
- kicad_sch_api/exporters/__init__.py +10 -0
- kicad_sch_api/exporters/python_generator.py +610 -0
- kicad_sch_api/exporters/templates/default.py.jinja2 +65 -0
- kicad_sch_api/geometry/__init__.py +15 -3
- kicad_sch_api/geometry/routing.py +211 -0
- kicad_sch_api/parsers/elements/label_parser.py +30 -8
- kicad_sch_api/parsers/elements/symbol_parser.py +255 -83
- kicad_sch_api/utils/logging.py +555 -0
- kicad_sch_api/utils/logging_decorators.py +587 -0
- kicad_sch_api/utils/validation.py +16 -22
- kicad_sch_api/wrappers/__init__.py +14 -0
- kicad_sch_api/wrappers/base.py +89 -0
- kicad_sch_api/wrappers/wire.py +198 -0
- kicad_sch_api-0.5.1.dist-info/METADATA +540 -0
- kicad_sch_api-0.5.1.dist-info/RECORD +114 -0
- kicad_sch_api-0.5.1.dist-info/entry_points.txt +4 -0
- {kicad_sch_api-0.4.1.dist-info → kicad_sch_api-0.5.1.dist-info}/top_level.txt +1 -0
- mcp_server/__init__.py +34 -0
- mcp_server/example_logging_integration.py +506 -0
- mcp_server/models.py +252 -0
- mcp_server/server.py +357 -0
- mcp_server/tools/__init__.py +32 -0
- mcp_server/tools/component_tools.py +516 -0
- mcp_server/tools/connectivity_tools.py +532 -0
- mcp_server/tools/consolidated_tools.py +1216 -0
- mcp_server/tools/pin_discovery.py +333 -0
- mcp_server/utils/__init__.py +38 -0
- mcp_server/utils/logging.py +127 -0
- mcp_server/utils.py +36 -0
- kicad_sch_api-0.4.1.dist-info/METADATA +0 -491
- kicad_sch_api-0.4.1.dist-info/RECORD +0 -87
- kicad_sch_api-0.4.1.dist-info/entry_points.txt +0 -2
- {kicad_sch_api-0.4.1.dist-info → kicad_sch_api-0.5.1.dist-info}/WHEEL +0 -0
- {kicad_sch_api-0.4.1.dist-info → kicad_sch_api-0.5.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,555 @@
|
|
|
1
|
+
"""Production-ready structured logging framework for kicad-sch-api MCP server.
|
|
2
|
+
|
|
3
|
+
This module provides:
|
|
4
|
+
- Structured JSON logging for production
|
|
5
|
+
- Separate debug/error file handling
|
|
6
|
+
- File rotation with size limits
|
|
7
|
+
- Context tracking for operations
|
|
8
|
+
- No stdout contamination (stderr only)
|
|
9
|
+
- Performance monitoring decorators
|
|
10
|
+
- Exception logging helpers
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import logging
|
|
14
|
+
import logging.handlers
|
|
15
|
+
import json
|
|
16
|
+
import functools
|
|
17
|
+
import time
|
|
18
|
+
import traceback
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from contextlib import contextmanager
|
|
21
|
+
from typing import Any, Optional, Dict, List, Callable, TypeVar
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
from dataclasses import dataclass, field, asdict
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# Type definitions
|
|
27
|
+
T = TypeVar("T")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class OperationContext:
|
|
32
|
+
"""Context information for tracking operations."""
|
|
33
|
+
|
|
34
|
+
operation_name: str
|
|
35
|
+
start_time: float = field(default_factory=time.time)
|
|
36
|
+
component: Optional[str] = None
|
|
37
|
+
details: Dict[str, Any] = field(default_factory=dict)
|
|
38
|
+
status: str = "in_progress"
|
|
39
|
+
end_time: Optional[float] = None
|
|
40
|
+
|
|
41
|
+
def elapsed_ms(self) -> float:
|
|
42
|
+
"""Get elapsed time in milliseconds."""
|
|
43
|
+
end = self.end_time or time.time()
|
|
44
|
+
return (end - self.start_time) * 1000
|
|
45
|
+
|
|
46
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
47
|
+
"""Convert to dictionary for JSON serialization."""
|
|
48
|
+
return {
|
|
49
|
+
"operation": self.operation_name,
|
|
50
|
+
"component": self.component,
|
|
51
|
+
"status": self.status,
|
|
52
|
+
"elapsed_ms": self.elapsed_ms(),
|
|
53
|
+
"details": self.details,
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class StructuredFormatter(logging.Formatter):
|
|
58
|
+
"""Formatter that outputs structured JSON for production logs."""
|
|
59
|
+
|
|
60
|
+
def __init__(self, json_mode: bool = True):
|
|
61
|
+
"""Initialize formatter.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
json_mode: If True, output JSON. If False, output human-readable text.
|
|
65
|
+
"""
|
|
66
|
+
self.json_mode = json_mode
|
|
67
|
+
super().__init__()
|
|
68
|
+
|
|
69
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
70
|
+
"""Format log record."""
|
|
71
|
+
if self.json_mode:
|
|
72
|
+
return self._format_json(record)
|
|
73
|
+
else:
|
|
74
|
+
return self._format_text(record)
|
|
75
|
+
|
|
76
|
+
def _format_json(self, record: logging.LogRecord) -> str:
|
|
77
|
+
"""Format as JSON for structured logging."""
|
|
78
|
+
log_data = {
|
|
79
|
+
"timestamp": datetime.fromtimestamp(record.created).isoformat(),
|
|
80
|
+
"level": record.levelname,
|
|
81
|
+
"logger": record.name,
|
|
82
|
+
"message": record.getMessage(),
|
|
83
|
+
"module": record.module,
|
|
84
|
+
"function": record.funcName,
|
|
85
|
+
"line": record.lineno,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
# Add exception info if present
|
|
89
|
+
if record.exc_info and record.exc_info[0] is not None:
|
|
90
|
+
log_data["exception"] = {
|
|
91
|
+
"type": record.exc_info[0].__name__,
|
|
92
|
+
"message": str(record.exc_info[1]),
|
|
93
|
+
"traceback": traceback.format_exception(*record.exc_info),
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
# Add custom attributes if present
|
|
97
|
+
if hasattr(record, "operation_context"):
|
|
98
|
+
log_data["context"] = record.operation_context.to_dict()
|
|
99
|
+
|
|
100
|
+
return json.dumps(log_data)
|
|
101
|
+
|
|
102
|
+
def _format_text(self, record: logging.LogRecord) -> str:
|
|
103
|
+
"""Format as human-readable text for development."""
|
|
104
|
+
timestamp = datetime.fromtimestamp(record.created).strftime(
|
|
105
|
+
"%Y-%m-%d %H:%M:%S"
|
|
106
|
+
)
|
|
107
|
+
level = record.levelname
|
|
108
|
+
logger = record.name
|
|
109
|
+
msg = record.getMessage()
|
|
110
|
+
|
|
111
|
+
if record.exc_info and record.exc_info[0] is not None:
|
|
112
|
+
msg += f"\n{traceback.format_exc()}"
|
|
113
|
+
|
|
114
|
+
return f"{timestamp} [{level:8}] {logger}: {msg}"
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def configure_logging(
|
|
118
|
+
log_dir: Path = Path("logs"),
|
|
119
|
+
debug_level: bool = False,
|
|
120
|
+
json_format: bool = True,
|
|
121
|
+
max_bytes: int = 10 * 1024 * 1024, # 10MB
|
|
122
|
+
backup_count: int = 5,
|
|
123
|
+
) -> None:
|
|
124
|
+
"""Configure production-ready logging.
|
|
125
|
+
|
|
126
|
+
This function sets up:
|
|
127
|
+
- Main log file (all levels)
|
|
128
|
+
- Error log file (ERROR and CRITICAL only)
|
|
129
|
+
- DEBUG level for all loggers (if debug_level=True)
|
|
130
|
+
- No stdout contamination (stderr only)
|
|
131
|
+
- File rotation based on size
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
log_dir: Directory to store log files
|
|
135
|
+
debug_level: If True, set DEBUG level for all loggers
|
|
136
|
+
json_format: If True, use JSON format for structured logging
|
|
137
|
+
max_bytes: Maximum log file size before rotation (default 10MB)
|
|
138
|
+
backup_count: Number of backup log files to keep (default 5)
|
|
139
|
+
|
|
140
|
+
Example:
|
|
141
|
+
# Development with debug output
|
|
142
|
+
configure_logging(debug_level=True, json_format=False)
|
|
143
|
+
|
|
144
|
+
# Production with JSON
|
|
145
|
+
configure_logging(debug_level=False, json_format=True)
|
|
146
|
+
"""
|
|
147
|
+
# Create logs directory
|
|
148
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
149
|
+
|
|
150
|
+
# Get root logger
|
|
151
|
+
root = logging.getLogger()
|
|
152
|
+
root.setLevel(logging.DEBUG if debug_level else logging.INFO)
|
|
153
|
+
|
|
154
|
+
# Clear existing handlers
|
|
155
|
+
root.handlers.clear()
|
|
156
|
+
|
|
157
|
+
# Create formatter
|
|
158
|
+
formatter = StructuredFormatter(json_mode=json_format)
|
|
159
|
+
|
|
160
|
+
# Main log file handler (all levels)
|
|
161
|
+
main_log_path = log_dir / "mcp_server.log"
|
|
162
|
+
main_handler = logging.handlers.RotatingFileHandler(
|
|
163
|
+
main_log_path,
|
|
164
|
+
maxBytes=max_bytes,
|
|
165
|
+
backupCount=backup_count,
|
|
166
|
+
encoding="utf-8",
|
|
167
|
+
)
|
|
168
|
+
main_handler.setLevel(logging.DEBUG if debug_level else logging.INFO)
|
|
169
|
+
main_handler.setFormatter(formatter)
|
|
170
|
+
root.addHandler(main_handler)
|
|
171
|
+
|
|
172
|
+
# Error log file handler (errors only)
|
|
173
|
+
error_log_path = log_dir / "mcp_server.error.log"
|
|
174
|
+
error_handler = logging.handlers.RotatingFileHandler(
|
|
175
|
+
error_log_path,
|
|
176
|
+
maxBytes=max_bytes,
|
|
177
|
+
backupCount=backup_count,
|
|
178
|
+
encoding="utf-8",
|
|
179
|
+
)
|
|
180
|
+
error_handler.setLevel(logging.ERROR)
|
|
181
|
+
error_handler.setFormatter(formatter)
|
|
182
|
+
root.addHandler(error_handler)
|
|
183
|
+
|
|
184
|
+
# Console handler to stderr (INFO and above in production, DEBUG in dev)
|
|
185
|
+
if debug_level:
|
|
186
|
+
# Development: verbose debug output to console
|
|
187
|
+
console = logging.StreamHandler()
|
|
188
|
+
console.setLevel(logging.DEBUG)
|
|
189
|
+
console.setFormatter(StructuredFormatter(json_mode=False))
|
|
190
|
+
root.addHandler(console)
|
|
191
|
+
|
|
192
|
+
# Log startup
|
|
193
|
+
logger = logging.getLogger(__name__)
|
|
194
|
+
logger.info(
|
|
195
|
+
f"Logging configured: log_dir={log_dir}, "
|
|
196
|
+
f"debug={debug_level}, json={json_format}"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@contextmanager
|
|
201
|
+
def operation_context(
|
|
202
|
+
operation_name: str, component: Optional[str] = None, **details: Any
|
|
203
|
+
):
|
|
204
|
+
"""Context manager for tracking operation execution.
|
|
205
|
+
|
|
206
|
+
Logs operation start, completion, duration, and any exceptions.
|
|
207
|
+
|
|
208
|
+
Example:
|
|
209
|
+
with operation_context("create_schematic", details={"name": "My Circuit"}):
|
|
210
|
+
sch = ksa.create_schematic("My Circuit")
|
|
211
|
+
# Operation logged automatically on exit
|
|
212
|
+
"""
|
|
213
|
+
context = OperationContext(
|
|
214
|
+
operation_name=operation_name,
|
|
215
|
+
component=component,
|
|
216
|
+
details=details,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
logger = logging.getLogger(__name__)
|
|
220
|
+
|
|
221
|
+
# Log operation start
|
|
222
|
+
logger.debug(f"START: {operation_name}", extra={"operation_context": context})
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
yield context
|
|
226
|
+
# Mark success
|
|
227
|
+
context.status = "success"
|
|
228
|
+
context.end_time = time.time()
|
|
229
|
+
logger.info(
|
|
230
|
+
f"COMPLETE: {operation_name} ({context.elapsed_ms():.1f}ms)",
|
|
231
|
+
extra={"operation_context": context},
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
except Exception as e:
|
|
235
|
+
# Mark failure
|
|
236
|
+
context.status = "failed"
|
|
237
|
+
context.end_time = time.time()
|
|
238
|
+
logger.error(
|
|
239
|
+
f"FAILED: {operation_name} - {e.__class__.__name__}: {e}",
|
|
240
|
+
exc_info=True,
|
|
241
|
+
extra={"operation_context": context},
|
|
242
|
+
)
|
|
243
|
+
raise
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def timer_decorator(logger_obj: Optional[logging.Logger] = None) -> Callable:
|
|
247
|
+
"""Decorator for measuring function execution time.
|
|
248
|
+
|
|
249
|
+
Logs execution time at INFO level on success, ERROR on exception.
|
|
250
|
+
|
|
251
|
+
Example:
|
|
252
|
+
@timer_decorator()
|
|
253
|
+
def calculate_pin_position(component, pin_num):
|
|
254
|
+
# Time automatically logged
|
|
255
|
+
return position
|
|
256
|
+
"""
|
|
257
|
+
|
|
258
|
+
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
|
259
|
+
logger = logger_obj or logging.getLogger(func.__module__)
|
|
260
|
+
|
|
261
|
+
@functools.wraps(func)
|
|
262
|
+
def wrapper(*args: Any, **kwargs: Any) -> T:
|
|
263
|
+
start = time.time()
|
|
264
|
+
try:
|
|
265
|
+
result = func(*args, **kwargs)
|
|
266
|
+
elapsed = (time.time() - start) * 1000
|
|
267
|
+
logger.debug(
|
|
268
|
+
f"{func.__name__} completed in {elapsed:.2f}ms"
|
|
269
|
+
)
|
|
270
|
+
return result
|
|
271
|
+
except Exception as e:
|
|
272
|
+
elapsed = (time.time() - start) * 1000
|
|
273
|
+
logger.error(
|
|
274
|
+
f"{func.__name__} failed after {elapsed:.2f}ms: {e}",
|
|
275
|
+
exc_info=True,
|
|
276
|
+
)
|
|
277
|
+
raise
|
|
278
|
+
|
|
279
|
+
return wrapper
|
|
280
|
+
|
|
281
|
+
return decorator
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def log_exception(
|
|
285
|
+
logger_obj: logging.Logger,
|
|
286
|
+
exception: Exception,
|
|
287
|
+
context: Optional[str] = None,
|
|
288
|
+
**extra_info: Any,
|
|
289
|
+
) -> None:
|
|
290
|
+
"""Log an exception with full context and additional information.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
logger_obj: Logger instance to use
|
|
294
|
+
exception: The exception to log
|
|
295
|
+
context: Optional context string describing what was happening
|
|
296
|
+
**extra_info: Additional information to include in log
|
|
297
|
+
|
|
298
|
+
Example:
|
|
299
|
+
try:
|
|
300
|
+
pin_pos = get_pin_position(comp, pin)
|
|
301
|
+
except ValueError as e:
|
|
302
|
+
log_exception(logger, e, context="get_pin_position",
|
|
303
|
+
component="R1", pin="2")
|
|
304
|
+
"""
|
|
305
|
+
msg = f"Exception: {exception.__class__.__name__}: {exception}"
|
|
306
|
+
if context:
|
|
307
|
+
msg = f"{context}: {msg}"
|
|
308
|
+
|
|
309
|
+
if extra_info:
|
|
310
|
+
msg += f" [{', '.join(f'{k}={v}' for k, v in extra_info.items())}]"
|
|
311
|
+
|
|
312
|
+
logger_obj.error(msg, exc_info=True)
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def setup_component_logging(
|
|
316
|
+
component_ref: str,
|
|
317
|
+
) -> logging.LoggerAdapter:
|
|
318
|
+
"""Create a logger adapter for a specific component.
|
|
319
|
+
|
|
320
|
+
All logs from this adapter automatically include the component reference.
|
|
321
|
+
|
|
322
|
+
Example:
|
|
323
|
+
logger = setup_component_logging("R1")
|
|
324
|
+
logger.debug("Setting value to 10k") # Logs with component=R1
|
|
325
|
+
"""
|
|
326
|
+
logger = logging.getLogger(__name__)
|
|
327
|
+
|
|
328
|
+
class ComponentAdapter(logging.LoggerAdapter):
|
|
329
|
+
def process(self, msg: str, kwargs: Any) -> tuple:
|
|
330
|
+
return f"[{component_ref}] {msg}", kwargs
|
|
331
|
+
|
|
332
|
+
return ComponentAdapter(logger, {})
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def get_log_statistics(log_path: Path) -> Dict[str, Any]:
|
|
336
|
+
"""Parse log file and get statistics.
|
|
337
|
+
|
|
338
|
+
Returns counts of log levels, unique operations, errors, etc.
|
|
339
|
+
|
|
340
|
+
Example:
|
|
341
|
+
stats = get_log_statistics(Path("logs/mcp_server.log"))
|
|
342
|
+
print(f"Errors: {stats['error_count']}")
|
|
343
|
+
"""
|
|
344
|
+
if not log_path.exists():
|
|
345
|
+
return {"error": "Log file not found"}
|
|
346
|
+
|
|
347
|
+
stats = {
|
|
348
|
+
"debug_count": 0,
|
|
349
|
+
"info_count": 0,
|
|
350
|
+
"warning_count": 0,
|
|
351
|
+
"error_count": 0,
|
|
352
|
+
"critical_count": 0,
|
|
353
|
+
"operations": {},
|
|
354
|
+
"components": set(),
|
|
355
|
+
"errors": [],
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
try:
|
|
359
|
+
with open(log_path, "r", encoding="utf-8") as f:
|
|
360
|
+
for line in f:
|
|
361
|
+
try:
|
|
362
|
+
if line.strip().startswith("{"):
|
|
363
|
+
record = json.loads(line)
|
|
364
|
+
level = record.get("level", "").lower()
|
|
365
|
+
|
|
366
|
+
# Count by level
|
|
367
|
+
if level == "debug":
|
|
368
|
+
stats["debug_count"] += 1
|
|
369
|
+
elif level == "info":
|
|
370
|
+
stats["info_count"] += 1
|
|
371
|
+
elif level == "warning":
|
|
372
|
+
stats["warning_count"] += 1
|
|
373
|
+
elif level == "error":
|
|
374
|
+
stats["error_count"] += 1
|
|
375
|
+
stats["errors"].append(
|
|
376
|
+
{
|
|
377
|
+
"message": record.get("message"),
|
|
378
|
+
"timestamp": record.get("timestamp"),
|
|
379
|
+
}
|
|
380
|
+
)
|
|
381
|
+
elif level == "critical":
|
|
382
|
+
stats["critical_count"] += 1
|
|
383
|
+
|
|
384
|
+
# Track operations
|
|
385
|
+
if "context" in record:
|
|
386
|
+
op = record["context"].get("operation")
|
|
387
|
+
if op:
|
|
388
|
+
stats["operations"][op] = (
|
|
389
|
+
stats["operations"].get(op, 0) + 1
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
comp = record["context"].get("component")
|
|
393
|
+
if comp:
|
|
394
|
+
stats["components"].add(comp)
|
|
395
|
+
|
|
396
|
+
except json.JSONDecodeError:
|
|
397
|
+
pass
|
|
398
|
+
|
|
399
|
+
except Exception as e:
|
|
400
|
+
stats["parse_error"] = str(e)
|
|
401
|
+
|
|
402
|
+
# Convert set to list for JSON serialization
|
|
403
|
+
stats["components"] = sorted(list(stats["components"]))
|
|
404
|
+
|
|
405
|
+
return stats
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def search_logs(
|
|
409
|
+
log_path: Path,
|
|
410
|
+
pattern: Optional[str] = None,
|
|
411
|
+
level: Optional[str] = None,
|
|
412
|
+
operation: Optional[str] = None,
|
|
413
|
+
component: Optional[str] = None,
|
|
414
|
+
limit: int = 100,
|
|
415
|
+
) -> List[Dict[str, Any]]:
|
|
416
|
+
"""Search log file for entries matching criteria.
|
|
417
|
+
|
|
418
|
+
Example:
|
|
419
|
+
# Find all errors related to R1
|
|
420
|
+
errors = search_logs(
|
|
421
|
+
Path("logs/mcp_server.log"),
|
|
422
|
+
level="ERROR",
|
|
423
|
+
component="R1"
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
# Find all component creation operations
|
|
427
|
+
operations = search_logs(
|
|
428
|
+
Path("logs/mcp_server.log"),
|
|
429
|
+
operation="add_component"
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
# Search by message pattern
|
|
433
|
+
results = search_logs(
|
|
434
|
+
Path("logs/mcp_server.log"),
|
|
435
|
+
pattern="pin.*position"
|
|
436
|
+
)
|
|
437
|
+
"""
|
|
438
|
+
import re
|
|
439
|
+
|
|
440
|
+
if not log_path.exists():
|
|
441
|
+
return []
|
|
442
|
+
|
|
443
|
+
results = []
|
|
444
|
+
pattern_re = re.compile(pattern) if pattern else None
|
|
445
|
+
|
|
446
|
+
try:
|
|
447
|
+
with open(log_path, "r", encoding="utf-8") as f:
|
|
448
|
+
for line in f:
|
|
449
|
+
try:
|
|
450
|
+
if not line.strip().startswith("{"):
|
|
451
|
+
continue
|
|
452
|
+
|
|
453
|
+
record = json.loads(line)
|
|
454
|
+
|
|
455
|
+
# Filter by level
|
|
456
|
+
if level and record.get("level", "").upper() != level.upper():
|
|
457
|
+
continue
|
|
458
|
+
|
|
459
|
+
# Filter by message pattern
|
|
460
|
+
if pattern_re:
|
|
461
|
+
msg = record.get("message", "")
|
|
462
|
+
if not pattern_re.search(msg):
|
|
463
|
+
continue
|
|
464
|
+
|
|
465
|
+
# Filter by operation
|
|
466
|
+
if operation:
|
|
467
|
+
ctx_op = record.get("context", {}).get("operation")
|
|
468
|
+
if ctx_op != operation:
|
|
469
|
+
continue
|
|
470
|
+
|
|
471
|
+
# Filter by component
|
|
472
|
+
if component:
|
|
473
|
+
ctx_comp = record.get("context", {}).get("component")
|
|
474
|
+
if ctx_comp != component:
|
|
475
|
+
continue
|
|
476
|
+
|
|
477
|
+
results.append(record)
|
|
478
|
+
|
|
479
|
+
if len(results) >= limit:
|
|
480
|
+
break
|
|
481
|
+
|
|
482
|
+
except json.JSONDecodeError:
|
|
483
|
+
continue
|
|
484
|
+
|
|
485
|
+
except Exception:
|
|
486
|
+
pass
|
|
487
|
+
|
|
488
|
+
return results
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
class LogQuery:
|
|
492
|
+
"""Fluent interface for querying logs."""
|
|
493
|
+
|
|
494
|
+
def __init__(self, log_path: Path):
|
|
495
|
+
"""Initialize with log file path."""
|
|
496
|
+
self.log_path = log_path
|
|
497
|
+
self.filters = {
|
|
498
|
+
"pattern": None,
|
|
499
|
+
"level": None,
|
|
500
|
+
"operation": None,
|
|
501
|
+
"component": None,
|
|
502
|
+
"limit": 100,
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
def by_pattern(self, pattern: str) -> "LogQuery":
|
|
506
|
+
"""Filter by message pattern (regex)."""
|
|
507
|
+
self.filters["pattern"] = pattern
|
|
508
|
+
return self
|
|
509
|
+
|
|
510
|
+
def by_level(self, level: str) -> "LogQuery":
|
|
511
|
+
"""Filter by log level."""
|
|
512
|
+
self.filters["level"] = level
|
|
513
|
+
return self
|
|
514
|
+
|
|
515
|
+
def by_operation(self, operation: str) -> "LogQuery":
|
|
516
|
+
"""Filter by operation name."""
|
|
517
|
+
self.filters["operation"] = operation
|
|
518
|
+
return self
|
|
519
|
+
|
|
520
|
+
def by_component(self, component: str) -> "LogQuery":
|
|
521
|
+
"""Filter by component reference."""
|
|
522
|
+
self.filters["component"] = component
|
|
523
|
+
return self
|
|
524
|
+
|
|
525
|
+
def limit(self, limit: int) -> "LogQuery":
|
|
526
|
+
"""Limit number of results."""
|
|
527
|
+
self.filters["limit"] = limit
|
|
528
|
+
return self
|
|
529
|
+
|
|
530
|
+
def execute(self) -> List[Dict[str, Any]]:
|
|
531
|
+
"""Execute query and return results."""
|
|
532
|
+
return search_logs(self.log_path, **self.filters)
|
|
533
|
+
|
|
534
|
+
def summary(self) -> Dict[str, Any]:
|
|
535
|
+
"""Get summary of results."""
|
|
536
|
+
results = self.execute()
|
|
537
|
+
return {
|
|
538
|
+
"count": len(results),
|
|
539
|
+
"levels": self._count_by_key(results, "level"),
|
|
540
|
+
"latest": results[-1]["timestamp"] if results else None,
|
|
541
|
+
"oldest": results[0]["timestamp"] if results else None,
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
@staticmethod
|
|
545
|
+
def _count_by_key(items: List[Dict], key: str) -> Dict[str, int]:
|
|
546
|
+
"""Count items by key value."""
|
|
547
|
+
counts = {}
|
|
548
|
+
for item in items:
|
|
549
|
+
val = item.get(key, "unknown")
|
|
550
|
+
counts[val] = counts.get(val, 0) + 1
|
|
551
|
+
return counts
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
# Module-level logger
|
|
555
|
+
logger = logging.getLogger(__name__)
|