pwndoc-mcp-server 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pwndoc-mcp-server might be problematic. Click here for more details.
- pwndoc_mcp_server/__init__.py +57 -0
- pwndoc_mcp_server/cli.py +441 -0
- pwndoc_mcp_server/client.py +870 -0
- pwndoc_mcp_server/config.py +411 -0
- pwndoc_mcp_server/logging_config.py +329 -0
- pwndoc_mcp_server/server.py +950 -0
- pwndoc_mcp_server/version.py +26 -0
- pwndoc_mcp_server-1.0.2.dist-info/METADATA +110 -0
- pwndoc_mcp_server-1.0.2.dist-info/RECORD +11 -0
- pwndoc_mcp_server-1.0.2.dist-info/WHEEL +4 -0
- pwndoc_mcp_server-1.0.2.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PwnDoc MCP Server - Logging Configuration.
|
|
3
|
+
|
|
4
|
+
This module provides comprehensive logging setup with support for:
|
|
5
|
+
- Console output with colors
|
|
6
|
+
- File logging with rotation
|
|
7
|
+
- JSON structured logging
|
|
8
|
+
- Log level configuration
|
|
9
|
+
- Performance metrics logging
|
|
10
|
+
|
|
11
|
+
Environment Variables:
|
|
12
|
+
PWNDOC_LOG_LEVEL - Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
13
|
+
PWNDOC_LOG_FILE - Path to log file
|
|
14
|
+
PWNDOC_LOG_FORMAT - Log format (text, json)
|
|
15
|
+
PWNDOC_LOG_MAX_SIZE - Max log file size in MB (default: 10)
|
|
16
|
+
PWNDOC_LOG_BACKUPS - Number of backup files to keep (default: 5)
|
|
17
|
+
|
|
18
|
+
Log Levels:
|
|
19
|
+
DEBUG - Detailed information for debugging
|
|
20
|
+
INFO - General operational messages
|
|
21
|
+
WARNING - Warning messages for potential issues
|
|
22
|
+
ERROR - Error messages for failures
|
|
23
|
+
CRITICAL - Critical errors that may cause shutdown
|
|
24
|
+
|
|
25
|
+
Example:
|
|
26
|
+
>>> from pwndoc_mcp_server.logging_config import setup_logging
|
|
27
|
+
>>> setup_logging(level="DEBUG", log_file="/var/log/pwndoc-mcp.log")
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
import json
|
|
31
|
+
import logging
|
|
32
|
+
import logging.handlers
|
|
33
|
+
import os
|
|
34
|
+
import sys
|
|
35
|
+
from datetime import datetime
|
|
36
|
+
from enum import Enum
|
|
37
|
+
from pathlib import Path
|
|
38
|
+
from typing import Any, Dict, Optional
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class LogLevel(Enum):
|
|
42
|
+
"""Log level enumeration."""
|
|
43
|
+
|
|
44
|
+
DEBUG = logging.DEBUG
|
|
45
|
+
INFO = logging.INFO
|
|
46
|
+
WARNING = logging.WARNING
|
|
47
|
+
ERROR = logging.ERROR
|
|
48
|
+
CRITICAL = logging.CRITICAL
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# Custom log format
|
|
52
|
+
DEFAULT_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
53
|
+
DETAILED_FORMAT = (
|
|
54
|
+
"%(asctime)s - %(name)s - %(levelname)s - %(filename)s:%(lineno)d - %(funcName)s - %(message)s"
|
|
55
|
+
)
|
|
56
|
+
SIMPLE_FORMAT = "%(levelname)s: %(message)s"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class ColoredFormatter(logging.Formatter):
|
|
60
|
+
"""Colored log formatter for console output."""
|
|
61
|
+
|
|
62
|
+
COLORS = {
|
|
63
|
+
"DEBUG": "\033[36m", # Cyan
|
|
64
|
+
"INFO": "\033[32m", # Green
|
|
65
|
+
"WARNING": "\033[33m", # Yellow
|
|
66
|
+
"ERROR": "\033[31m", # Red
|
|
67
|
+
"CRITICAL": "\033[35m", # Magenta
|
|
68
|
+
}
|
|
69
|
+
RESET = "\033[0m"
|
|
70
|
+
|
|
71
|
+
def format(self, record):
|
|
72
|
+
color = self.COLORS.get(record.levelname, "")
|
|
73
|
+
record.levelname = f"{color}{record.levelname}{self.RESET}"
|
|
74
|
+
return super().format(record)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class JSONFormatter(logging.Formatter):
|
|
78
|
+
"""JSON log formatter for structured logging."""
|
|
79
|
+
|
|
80
|
+
def format(self, record) -> str:
|
|
81
|
+
log_obj = {
|
|
82
|
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
83
|
+
"level": record.levelname,
|
|
84
|
+
"logger": record.name,
|
|
85
|
+
"message": record.getMessage(),
|
|
86
|
+
"module": record.module,
|
|
87
|
+
"function": record.funcName,
|
|
88
|
+
"line": record.lineno,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
# Add exception info if present
|
|
92
|
+
if record.exc_info:
|
|
93
|
+
log_obj["exception"] = self.formatException(record.exc_info)
|
|
94
|
+
|
|
95
|
+
# Add extra fields
|
|
96
|
+
if hasattr(record, "extra"):
|
|
97
|
+
log_obj.update(record.extra)
|
|
98
|
+
|
|
99
|
+
return json.dumps(log_obj)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class SafeStreamHandler(logging.StreamHandler):
|
|
103
|
+
"""StreamHandler that handles Unicode encoding errors gracefully on Windows."""
|
|
104
|
+
|
|
105
|
+
def emit(self, record):
|
|
106
|
+
"""Emit a record, handling Unicode encoding errors."""
|
|
107
|
+
try:
|
|
108
|
+
super().emit(record)
|
|
109
|
+
except UnicodeEncodeError:
|
|
110
|
+
# If we get an encoding error, try to encode with 'replace' errors
|
|
111
|
+
try:
|
|
112
|
+
msg = self.format(record)
|
|
113
|
+
stream = self.stream
|
|
114
|
+
# Encode with errors='replace' to substitute unmappable characters
|
|
115
|
+
if hasattr(stream, "encoding") and stream.encoding:
|
|
116
|
+
# Encode and decode with replace to substitute unmappable chars
|
|
117
|
+
safe_msg = msg.encode(stream.encoding, errors="replace").decode(
|
|
118
|
+
stream.encoding, errors="replace"
|
|
119
|
+
)
|
|
120
|
+
stream.write(safe_msg)
|
|
121
|
+
stream.write(self.terminator)
|
|
122
|
+
self.flush()
|
|
123
|
+
else:
|
|
124
|
+
# If no encoding info, try with utf-8
|
|
125
|
+
safe_msg = msg.encode("utf-8", errors="replace").decode(
|
|
126
|
+
"utf-8", errors="replace"
|
|
127
|
+
)
|
|
128
|
+
stream.write(safe_msg)
|
|
129
|
+
stream.write(self.terminator)
|
|
130
|
+
self.flush()
|
|
131
|
+
except Exception:
|
|
132
|
+
self.handleError(record)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class PerformanceLogger:
|
|
136
|
+
"""Logger for performance metrics."""
|
|
137
|
+
|
|
138
|
+
def __init__(self, logger: logging.Logger):
|
|
139
|
+
self.logger = logger
|
|
140
|
+
self._metrics: Dict[str, Any] = {}
|
|
141
|
+
|
|
142
|
+
def start_timer(self, name: str):
|
|
143
|
+
"""Start a performance timer."""
|
|
144
|
+
import time
|
|
145
|
+
|
|
146
|
+
self._metrics[name] = {"start": time.time()}
|
|
147
|
+
|
|
148
|
+
def stop_timer(self, name: str) -> float:
|
|
149
|
+
"""Stop timer and log duration."""
|
|
150
|
+
import time
|
|
151
|
+
|
|
152
|
+
if name in self._metrics:
|
|
153
|
+
duration: float = time.time() - float(self._metrics[name]["start"])
|
|
154
|
+
self.logger.debug(f"Performance: {name} took {duration:.3f}s")
|
|
155
|
+
return duration
|
|
156
|
+
return 0.0
|
|
157
|
+
|
|
158
|
+
def log_metric(self, name: str, value: Any):
|
|
159
|
+
"""Log a performance metric."""
|
|
160
|
+
self.logger.debug(f"Metric: {name} = {value}")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def setup_logging(
|
|
164
|
+
level: str = "INFO",
|
|
165
|
+
log_file: Optional[str] = None,
|
|
166
|
+
log_format: str = "text",
|
|
167
|
+
max_size_mb: int = 10,
|
|
168
|
+
max_bytes: Optional[int] = None,
|
|
169
|
+
backup_count: int = 5,
|
|
170
|
+
json_output: bool = False,
|
|
171
|
+
colored: bool = True,
|
|
172
|
+
console: bool = True,
|
|
173
|
+
name: Optional[str] = None,
|
|
174
|
+
) -> logging.Logger:
|
|
175
|
+
"""
|
|
176
|
+
Configure logging for the application.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
180
|
+
log_file: Path to log file (optional)
|
|
181
|
+
log_format: Format type ('text', 'json', 'detailed')
|
|
182
|
+
max_size_mb: Maximum log file size in MB
|
|
183
|
+
max_bytes: Maximum log file size in bytes (overrides max_size_mb)
|
|
184
|
+
backup_count: Number of backup files to keep
|
|
185
|
+
json_output: Use JSON format for all output
|
|
186
|
+
colored: Use colored output for console
|
|
187
|
+
console: Enable console output (default: True)
|
|
188
|
+
name: Logger name (default: root logger)
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
Logger configured for the application
|
|
192
|
+
|
|
193
|
+
Example:
|
|
194
|
+
>>> logger = setup_logging(level="DEBUG", log_file="app.log")
|
|
195
|
+
>>> logger.info("Application started")
|
|
196
|
+
"""
|
|
197
|
+
# Check environment variables for overrides
|
|
198
|
+
env_level = os.environ.get("PWNDOC_LOG_LEVEL")
|
|
199
|
+
if env_level:
|
|
200
|
+
level = env_level
|
|
201
|
+
|
|
202
|
+
env_file = os.environ.get("PWNDOC_LOG_FILE")
|
|
203
|
+
if env_file:
|
|
204
|
+
log_file = env_file
|
|
205
|
+
|
|
206
|
+
# Get logger (root or named)
|
|
207
|
+
if name:
|
|
208
|
+
root_logger = logging.getLogger(name)
|
|
209
|
+
else:
|
|
210
|
+
root_logger = logging.getLogger()
|
|
211
|
+
|
|
212
|
+
root_logger.setLevel(getattr(logging, level.upper()))
|
|
213
|
+
|
|
214
|
+
# Clear existing handlers (close them first to avoid resource warnings)
|
|
215
|
+
for handler in root_logger.handlers[:]:
|
|
216
|
+
handler.close()
|
|
217
|
+
root_logger.removeHandler(handler)
|
|
218
|
+
root_logger.handlers = []
|
|
219
|
+
|
|
220
|
+
# Select format
|
|
221
|
+
formatter: logging.Formatter
|
|
222
|
+
if json_output or log_format == "json":
|
|
223
|
+
formatter = JSONFormatter()
|
|
224
|
+
elif log_format == "detailed":
|
|
225
|
+
if colored and sys.stdout.isatty():
|
|
226
|
+
formatter = ColoredFormatter(DETAILED_FORMAT)
|
|
227
|
+
else:
|
|
228
|
+
formatter = logging.Formatter(DETAILED_FORMAT)
|
|
229
|
+
else:
|
|
230
|
+
if colored and sys.stdout.isatty():
|
|
231
|
+
formatter = ColoredFormatter(DEFAULT_FORMAT)
|
|
232
|
+
else:
|
|
233
|
+
formatter = logging.Formatter(DEFAULT_FORMAT)
|
|
234
|
+
|
|
235
|
+
# Console handler (if enabled)
|
|
236
|
+
if console:
|
|
237
|
+
# On Windows, use SafeStreamHandler to handle Unicode encoding errors
|
|
238
|
+
if sys.platform == "win32":
|
|
239
|
+
console_handler = SafeStreamHandler(sys.stderr)
|
|
240
|
+
else:
|
|
241
|
+
console_handler = logging.StreamHandler(sys.stderr)
|
|
242
|
+
console_handler.setFormatter(formatter)
|
|
243
|
+
root_logger.addHandler(console_handler)
|
|
244
|
+
|
|
245
|
+
# File handler (if specified)
|
|
246
|
+
if log_file:
|
|
247
|
+
log_path = Path(log_file)
|
|
248
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
249
|
+
|
|
250
|
+
# Calculate max bytes
|
|
251
|
+
if max_bytes is None:
|
|
252
|
+
max_bytes = max_size_mb * 1024 * 1024
|
|
253
|
+
|
|
254
|
+
# Use rotating file handler with UTF-8 encoding
|
|
255
|
+
file_handler = logging.handlers.RotatingFileHandler(
|
|
256
|
+
log_path,
|
|
257
|
+
maxBytes=max_bytes,
|
|
258
|
+
backupCount=backup_count,
|
|
259
|
+
encoding="utf-8",
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
# Always use non-colored formatter for files
|
|
263
|
+
if json_output:
|
|
264
|
+
file_handler.setFormatter(JSONFormatter())
|
|
265
|
+
else:
|
|
266
|
+
file_handler.setFormatter(logging.Formatter(DEFAULT_FORMAT))
|
|
267
|
+
|
|
268
|
+
root_logger.addHandler(file_handler)
|
|
269
|
+
|
|
270
|
+
# Configure library loggers
|
|
271
|
+
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
272
|
+
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
|
273
|
+
|
|
274
|
+
return root_logger
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def get_logger(name: Optional[str] = None) -> logging.Logger:
|
|
278
|
+
"""
|
|
279
|
+
Get a logger for a specific module.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
name: Logger name (typically __name__), if None returns root logger
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Configured logger instance
|
|
286
|
+
|
|
287
|
+
Example:
|
|
288
|
+
>>> logger = get_logger(__name__)
|
|
289
|
+
>>> logger.info("Module initialized")
|
|
290
|
+
>>> logger = get_logger() # Get root logger
|
|
291
|
+
"""
|
|
292
|
+
if name is None:
|
|
293
|
+
return logging.getLogger()
|
|
294
|
+
return logging.getLogger(name)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def log_request(logger: logging.Logger, method: str, endpoint: str, duration: float = 0):
|
|
298
|
+
"""Log an API request."""
|
|
299
|
+
logger.debug(f"API Request: {method} {endpoint} ({duration:.3f}s)")
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def log_error(logger: logging.Logger, error: Exception, context: Optional[str] = None):
|
|
303
|
+
"""Log an error with context."""
|
|
304
|
+
msg = f"Error: {type(error).__name__}: {error}"
|
|
305
|
+
if context:
|
|
306
|
+
msg = f"{context} - {msg}"
|
|
307
|
+
logger.error(msg, exc_info=True)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
# Environment-based configuration
|
|
311
|
+
def setup_from_env() -> logging.Logger:
|
|
312
|
+
"""
|
|
313
|
+
Configure logging from environment variables.
|
|
314
|
+
|
|
315
|
+
Environment Variables:
|
|
316
|
+
PWNDOC_LOG_LEVEL - Log level
|
|
317
|
+
PWNDOC_LOG_FILE - Log file path
|
|
318
|
+
PWNDOC_LOG_FORMAT - Format (text, json, detailed)
|
|
319
|
+
PWNDOC_LOG_MAX_SIZE - Max file size in MB
|
|
320
|
+
PWNDOC_LOG_BACKUPS - Backup file count
|
|
321
|
+
"""
|
|
322
|
+
return setup_logging(
|
|
323
|
+
level=os.environ.get("PWNDOC_LOG_LEVEL", "INFO"),
|
|
324
|
+
log_file=os.environ.get("PWNDOC_LOG_FILE"),
|
|
325
|
+
log_format=os.environ.get("PWNDOC_LOG_FORMAT", "text"),
|
|
326
|
+
max_size_mb=int(os.environ.get("PWNDOC_LOG_MAX_SIZE", "10")),
|
|
327
|
+
backup_count=int(os.environ.get("PWNDOC_LOG_BACKUPS", "5")),
|
|
328
|
+
json_output=os.environ.get("PWNDOC_LOG_FORMAT") == "json",
|
|
329
|
+
)
|