mcp-code-indexer 3.1.4__py3-none-any.whl → 3.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_code_indexer/__init__.py +8 -6
- mcp_code_indexer/ask_handler.py +105 -75
- mcp_code_indexer/claude_api_handler.py +125 -82
- mcp_code_indexer/cleanup_manager.py +107 -81
- mcp_code_indexer/database/connection_health.py +212 -161
- mcp_code_indexer/database/database.py +529 -415
- mcp_code_indexer/database/exceptions.py +167 -118
- mcp_code_indexer/database/models.py +54 -19
- mcp_code_indexer/database/retry_executor.py +139 -103
- mcp_code_indexer/deepask_handler.py +178 -140
- mcp_code_indexer/error_handler.py +88 -76
- mcp_code_indexer/file_scanner.py +163 -141
- mcp_code_indexer/git_hook_handler.py +352 -261
- mcp_code_indexer/logging_config.py +76 -94
- mcp_code_indexer/main.py +406 -320
- mcp_code_indexer/middleware/error_middleware.py +106 -71
- mcp_code_indexer/query_preprocessor.py +40 -40
- mcp_code_indexer/server/mcp_server.py +785 -470
- mcp_code_indexer/token_counter.py +54 -47
- {mcp_code_indexer-3.1.4.dist-info → mcp_code_indexer-3.1.5.dist-info}/METADATA +3 -3
- mcp_code_indexer-3.1.5.dist-info/RECORD +37 -0
- mcp_code_indexer-3.1.4.dist-info/RECORD +0 -37
- {mcp_code_indexer-3.1.4.dist-info → mcp_code_indexer-3.1.5.dist-info}/WHEEL +0 -0
- {mcp_code_indexer-3.1.4.dist-info → mcp_code_indexer-3.1.5.dist-info}/entry_points.txt +0 -0
- {mcp_code_indexer-3.1.4.dist-info → mcp_code_indexer-3.1.5.dist-info}/licenses/LICENSE +0 -0
- {mcp_code_indexer-3.1.4.dist-info → mcp_code_indexer-3.1.5.dist-info}/top_level.txt +0 -0
mcp_code_indexer/main.py
CHANGED
@@ -8,10 +8,8 @@ Entry point for the mcp-code-indexer package when installed via pip.
|
|
8
8
|
import argparse
|
9
9
|
import asyncio
|
10
10
|
import json
|
11
|
-
import logging
|
12
11
|
import sys
|
13
12
|
from pathlib import Path
|
14
|
-
from typing import Any, Dict
|
15
13
|
|
16
14
|
from . import __version__
|
17
15
|
from .logging_config import setup_logging
|
@@ -22,86 +20,96 @@ def parse_arguments() -> argparse.Namespace:
|
|
22
20
|
"""Parse command line arguments."""
|
23
21
|
parser = argparse.ArgumentParser(
|
24
22
|
description="MCP Code Index Server - Track file descriptions across codebases",
|
25
|
-
prog="mcp-code-indexer"
|
23
|
+
prog="mcp-code-indexer",
|
26
24
|
)
|
27
|
-
|
25
|
+
|
28
26
|
parser.add_argument(
|
29
|
-
"--version",
|
30
|
-
action="version",
|
31
|
-
version=f"mcp-code-indexer {__version__}"
|
27
|
+
"--version", action="version", version=f"mcp-code-indexer {__version__}"
|
32
28
|
)
|
33
|
-
|
29
|
+
|
34
30
|
parser.add_argument(
|
35
31
|
"--token-limit",
|
36
32
|
type=int,
|
37
33
|
default=32000,
|
38
|
-
help=
|
34
|
+
help=(
|
35
|
+
"Maximum tokens before recommending search instead of full overview "
|
36
|
+
"(default: 32000)"
|
37
|
+
),
|
39
38
|
)
|
40
|
-
|
39
|
+
|
41
40
|
parser.add_argument(
|
42
41
|
"--db-path",
|
43
42
|
type=str,
|
44
43
|
default="~/.mcp-code-index/tracker.db",
|
45
|
-
help="Path to SQLite database (default: ~/.mcp-code-index/tracker.db)"
|
44
|
+
help="Path to SQLite database (default: ~/.mcp-code-index/tracker.db)",
|
46
45
|
)
|
47
|
-
|
46
|
+
|
48
47
|
parser.add_argument(
|
49
48
|
"--cache-dir",
|
50
49
|
type=str,
|
51
50
|
default="~/.mcp-code-index/cache",
|
52
|
-
help="Directory for caching token counts (default: ~/.mcp-code-index/cache)"
|
51
|
+
help="Directory for caching token counts (default: ~/.mcp-code-index/cache)",
|
53
52
|
)
|
54
|
-
|
53
|
+
|
55
54
|
parser.add_argument(
|
56
55
|
"--log-level",
|
57
56
|
type=str,
|
58
57
|
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
59
58
|
default="INFO",
|
60
|
-
help="Logging level (default: INFO)"
|
59
|
+
help="Logging level (default: INFO)",
|
61
60
|
)
|
62
|
-
|
61
|
+
|
63
62
|
# Utility commands
|
64
63
|
parser.add_argument(
|
65
64
|
"--getprojects",
|
66
65
|
action="store_true",
|
67
|
-
help="List all projects with IDs, branches, and description counts"
|
66
|
+
help="List all projects with IDs, branches, and description counts",
|
68
67
|
)
|
69
|
-
|
68
|
+
|
70
69
|
parser.add_argument(
|
71
70
|
"--runcommand",
|
72
71
|
type=str,
|
73
|
-
help="Execute a command using JSON in MCP format (single or multi-line)"
|
72
|
+
help="Execute a command using JSON in MCP format (single or multi-line)",
|
74
73
|
)
|
75
|
-
|
74
|
+
|
76
75
|
parser.add_argument(
|
77
76
|
"--dumpdescriptions",
|
78
77
|
nargs="+",
|
79
78
|
metavar=("PROJECT_ID", "BRANCH"),
|
80
|
-
help=
|
79
|
+
help=(
|
80
|
+
"Export descriptions for a project. Usage: "
|
81
|
+
"--dumpdescriptions PROJECT_ID [BRANCH]"
|
82
|
+
),
|
81
83
|
)
|
82
|
-
|
84
|
+
|
83
85
|
parser.add_argument(
|
84
86
|
"--githook",
|
85
87
|
nargs="*",
|
86
88
|
metavar="COMMIT_HASH",
|
87
|
-
help=
|
88
|
-
|
89
|
-
|
89
|
+
help=(
|
90
|
+
"Git hook mode: auto-update descriptions based on git diff using "
|
91
|
+
"OpenRouter API. Usage: --githook (current changes), --githook HASH "
|
92
|
+
"(specific commit), --githook HASH1 HASH2 (commit range from "
|
93
|
+
"HASH1 to HASH2)"
|
94
|
+
),
|
90
95
|
)
|
91
|
-
|
96
|
+
|
92
97
|
parser.add_argument(
|
93
98
|
"--cleanup",
|
94
99
|
action="store_true",
|
95
|
-
help="Remove empty projects (no descriptions and no project overview)"
|
100
|
+
help="Remove empty projects (no descriptions and no project overview)",
|
96
101
|
)
|
97
|
-
|
102
|
+
|
98
103
|
parser.add_argument(
|
99
104
|
"--map",
|
100
105
|
type=str,
|
101
106
|
metavar="PROJECT_NAME_OR_ID",
|
102
|
-
help=
|
107
|
+
help=(
|
108
|
+
"Generate a markdown project map for the specified project "
|
109
|
+
"(by name or ID)"
|
110
|
+
),
|
103
111
|
)
|
104
|
-
|
112
|
+
|
105
113
|
return parser.parse_args()
|
106
114
|
|
107
115
|
|
@@ -110,26 +118,26 @@ async def handle_getprojects(args: argparse.Namespace) -> None:
|
|
110
118
|
db_manager = None
|
111
119
|
try:
|
112
120
|
from .database.database import DatabaseManager
|
113
|
-
|
121
|
+
|
114
122
|
# Initialize database
|
115
123
|
db_path = Path(args.db_path).expanduser()
|
116
124
|
db_manager = DatabaseManager(db_path)
|
117
125
|
await db_manager.initialize()
|
118
|
-
|
126
|
+
|
119
127
|
# Get all projects
|
120
128
|
projects = await db_manager.get_all_projects()
|
121
|
-
|
129
|
+
|
122
130
|
if not projects:
|
123
131
|
print("No projects found.")
|
124
132
|
return
|
125
|
-
|
133
|
+
|
126
134
|
print("Projects:")
|
127
135
|
print("-" * 80)
|
128
|
-
|
136
|
+
|
129
137
|
for project in projects:
|
130
138
|
print(f"ID: {project.id}")
|
131
139
|
print(f"Name: {project.name}")
|
132
|
-
|
140
|
+
|
133
141
|
# Get branch information
|
134
142
|
try:
|
135
143
|
branch_counts = await db_manager.get_branch_file_counts(project.id)
|
@@ -141,9 +149,9 @@ async def handle_getprojects(args: argparse.Namespace) -> None:
|
|
141
149
|
print("Branches: No descriptions found")
|
142
150
|
except Exception as e:
|
143
151
|
print(f"Branches: Error loading branch info - {e}")
|
144
|
-
|
152
|
+
|
145
153
|
print("-" * 80)
|
146
|
-
|
154
|
+
|
147
155
|
except Exception as e:
|
148
156
|
print(f"Error: {e}", file=sys.stderr)
|
149
157
|
sys.exit(1)
|
@@ -157,103 +165,122 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
157
165
|
"""Handle --runcommand command."""
|
158
166
|
from .server.mcp_server import MCPCodeIndexServer
|
159
167
|
from .logging_config import setup_command_logger
|
160
|
-
|
168
|
+
|
161
169
|
# Set up dedicated logging for runcommand
|
162
170
|
cache_dir = Path(args.cache_dir).expanduser()
|
163
171
|
logger = setup_command_logger("runcommand", cache_dir)
|
164
|
-
|
165
|
-
logger.info(
|
166
|
-
"
|
167
|
-
|
168
|
-
"
|
169
|
-
"
|
170
|
-
"
|
171
|
-
|
172
|
+
|
173
|
+
logger.info(
|
174
|
+
"Starting runcommand execution",
|
175
|
+
extra={
|
176
|
+
"structured_data": {
|
177
|
+
"command": args.runcommand,
|
178
|
+
"args": {
|
179
|
+
"token_limit": args.token_limit,
|
180
|
+
"db_path": str(args.db_path),
|
181
|
+
"cache_dir": str(args.cache_dir),
|
182
|
+
},
|
172
183
|
}
|
173
|
-
}
|
174
|
-
|
175
|
-
|
184
|
+
},
|
185
|
+
)
|
186
|
+
|
176
187
|
try:
|
177
188
|
# Parse JSON (handle both single-line and multi-line)
|
178
189
|
logger.debug("Parsing JSON command")
|
179
190
|
json_data = json.loads(args.runcommand)
|
180
|
-
logger.debug(
|
191
|
+
logger.debug(
|
192
|
+
"JSON parsed successfully",
|
193
|
+
extra={"structured_data": {"parsed_json": json_data}},
|
194
|
+
)
|
181
195
|
except json.JSONDecodeError as e:
|
182
|
-
logger.warning(
|
196
|
+
logger.warning(
|
197
|
+
"Initial JSON parse failed", extra={"structured_data": {"error": str(e)}}
|
198
|
+
)
|
183
199
|
print(f"Initial JSON parse failed: {e}", file=sys.stderr)
|
184
|
-
|
200
|
+
|
185
201
|
# Try to repair the JSON
|
186
202
|
logger.debug("Attempting JSON repair")
|
187
203
|
try:
|
188
204
|
import re
|
205
|
+
|
189
206
|
repaired = args.runcommand
|
190
|
-
|
207
|
+
|
191
208
|
# Fix common issues
|
192
209
|
# Quote unquoted URLs and paths
|
193
210
|
url_pattern = r'("[\w]+"):\s*([a-zA-Z][a-zA-Z0-9+.-]*://[^\s,}]+|/[^\s,}]*)'
|
194
211
|
repaired = re.sub(url_pattern, r'\1: "\2"', repaired)
|
195
|
-
|
212
|
+
|
196
213
|
# Quote unquoted values
|
197
214
|
unquoted_pattern = r'("[\w]+"):\s*([a-zA-Z0-9_-]+)(?=\s*[,}])'
|
198
215
|
repaired = re.sub(unquoted_pattern, r'\1: "\2"', repaired)
|
199
|
-
|
216
|
+
|
200
217
|
# Remove trailing commas
|
201
|
-
repaired = re.sub(r
|
202
|
-
|
218
|
+
repaired = re.sub(r",(\s*[}\]])", r"\1", repaired)
|
219
|
+
|
203
220
|
json_data = json.loads(repaired)
|
204
|
-
logger.info(
|
205
|
-
"
|
206
|
-
|
207
|
-
"
|
208
|
-
|
209
|
-
|
210
|
-
|
221
|
+
logger.info(
|
222
|
+
"JSON repaired successfully",
|
223
|
+
extra={
|
224
|
+
"structured_data": {
|
225
|
+
"original": args.runcommand,
|
226
|
+
"repaired": repaired,
|
227
|
+
}
|
228
|
+
},
|
229
|
+
)
|
230
|
+
print("JSON repaired successfully", file=sys.stderr)
|
211
231
|
print(f"Original: {args.runcommand}", file=sys.stderr)
|
212
232
|
print(f"Repaired: {repaired}", file=sys.stderr)
|
213
233
|
except json.JSONDecodeError as repair_error:
|
214
|
-
logger.error(
|
215
|
-
"
|
216
|
-
|
217
|
-
"
|
218
|
-
|
219
|
-
|
234
|
+
logger.error(
|
235
|
+
"JSON repair failed",
|
236
|
+
extra={
|
237
|
+
"structured_data": {
|
238
|
+
"repair_error": str(repair_error),
|
239
|
+
"original_json": args.runcommand,
|
240
|
+
}
|
241
|
+
},
|
242
|
+
)
|
220
243
|
print(f"JSON repair also failed: {repair_error}", file=sys.stderr)
|
221
244
|
print(f"Original JSON: {args.runcommand}", file=sys.stderr)
|
222
245
|
sys.exit(1)
|
223
|
-
|
246
|
+
|
224
247
|
# Initialize server
|
225
248
|
db_path = Path(args.db_path).expanduser()
|
226
249
|
cache_dir = Path(args.cache_dir).expanduser()
|
227
|
-
|
228
|
-
logger.info(
|
229
|
-
"
|
230
|
-
|
231
|
-
"
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
250
|
+
|
251
|
+
logger.info(
|
252
|
+
"Initializing MCP server",
|
253
|
+
extra={
|
254
|
+
"structured_data": {
|
255
|
+
"db_path": str(db_path),
|
256
|
+
"cache_dir": str(cache_dir),
|
257
|
+
"token_limit": args.token_limit,
|
258
|
+
}
|
259
|
+
},
|
260
|
+
)
|
261
|
+
|
236
262
|
server = MCPCodeIndexServer(
|
237
|
-
token_limit=args.token_limit,
|
238
|
-
db_path=db_path,
|
239
|
-
cache_dir=cache_dir
|
263
|
+
token_limit=args.token_limit, db_path=db_path, cache_dir=cache_dir
|
240
264
|
)
|
241
|
-
|
265
|
+
|
242
266
|
try:
|
243
267
|
logger.debug("Initializing server database connection")
|
244
268
|
await server.initialize()
|
245
269
|
logger.debug("Server initialized successfully")
|
246
|
-
|
270
|
+
|
247
271
|
# Extract the tool call information from the JSON
|
248
272
|
if "method" in json_data and json_data["method"] == "tools/call":
|
249
273
|
tool_name = json_data["params"]["name"]
|
250
274
|
tool_arguments = json_data["params"]["arguments"]
|
251
|
-
logger.info(
|
252
|
-
"
|
253
|
-
|
254
|
-
"
|
255
|
-
|
256
|
-
|
275
|
+
logger.info(
|
276
|
+
"JSON-RPC format detected",
|
277
|
+
extra={
|
278
|
+
"structured_data": {
|
279
|
+
"tool_name": tool_name,
|
280
|
+
"arguments_keys": list(tool_arguments.keys()),
|
281
|
+
}
|
282
|
+
},
|
283
|
+
)
|
257
284
|
elif "projectName" in json_data and "folderPath" in json_data:
|
258
285
|
# Auto-detect: user provided just arguments, try to infer the tool
|
259
286
|
if "filePath" in json_data and "description" in json_data:
|
@@ -267,19 +294,31 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
267
294
|
logger.info("Auto-detected tool: check_codebase_size")
|
268
295
|
print("Auto-detected tool: check_codebase_size", file=sys.stderr)
|
269
296
|
else:
|
270
|
-
logger.error(
|
271
|
-
"
|
272
|
-
|
273
|
-
|
274
|
-
|
297
|
+
logger.error(
|
298
|
+
"Could not auto-detect tool from arguments",
|
299
|
+
extra={
|
300
|
+
"structured_data": {"provided_keys": list(json_data.keys())}
|
301
|
+
},
|
302
|
+
)
|
303
|
+
print(
|
304
|
+
"Error: Could not auto-detect tool from arguments. "
|
305
|
+
"Please use full MCP format:",
|
306
|
+
file=sys.stderr,
|
307
|
+
)
|
308
|
+
print(
|
309
|
+
'{"method": "tools/call", "params": '
|
310
|
+
'{"name": "TOOL_NAME", "arguments": {...}}}',
|
311
|
+
file=sys.stderr,
|
312
|
+
)
|
275
313
|
sys.exit(1)
|
276
314
|
else:
|
277
|
-
logger.error(
|
278
|
-
"
|
279
|
-
|
315
|
+
logger.error(
|
316
|
+
"Invalid JSON format",
|
317
|
+
extra={"structured_data": {"provided_keys": list(json_data.keys())}},
|
318
|
+
)
|
280
319
|
print("Error: JSON must contain a valid MCP tool call", file=sys.stderr)
|
281
320
|
sys.exit(1)
|
282
|
-
|
321
|
+
|
283
322
|
# Map tool names to handler methods - use the same mapping as MCP server
|
284
323
|
tool_handlers = {
|
285
324
|
"get_file_description": server._handle_get_file_description,
|
@@ -293,30 +332,31 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
293
332
|
"get_word_frequency": server._handle_get_word_frequency,
|
294
333
|
"search_codebase_overview": server._handle_search_codebase_overview,
|
295
334
|
}
|
296
|
-
|
335
|
+
|
297
336
|
if tool_name not in tool_handlers:
|
298
|
-
logger.error(
|
299
|
-
"
|
300
|
-
|
301
|
-
"
|
302
|
-
|
303
|
-
|
337
|
+
logger.error(
|
338
|
+
"Unknown tool requested",
|
339
|
+
extra={
|
340
|
+
"structured_data": {
|
341
|
+
"tool_name": tool_name,
|
342
|
+
"available_tools": list(tool_handlers.keys()),
|
343
|
+
}
|
344
|
+
},
|
345
|
+
)
|
304
346
|
error_result = {
|
305
|
-
"error": {
|
306
|
-
"code": -32601,
|
307
|
-
"message": f"Unknown tool: {tool_name}"
|
308
|
-
}
|
347
|
+
"error": {"code": -32601, "message": f"Unknown tool: {tool_name}"}
|
309
348
|
}
|
310
349
|
print(json.dumps(error_result, indent=2))
|
311
350
|
return
|
312
|
-
|
351
|
+
|
313
352
|
# Clean HTML entities from arguments before execution
|
314
353
|
def clean_html_entities(text: str) -> str:
|
315
354
|
if not text:
|
316
355
|
return text
|
317
356
|
import html
|
357
|
+
|
318
358
|
return html.unescape(text)
|
319
|
-
|
359
|
+
|
320
360
|
def clean_arguments(arguments: dict) -> dict:
|
321
361
|
cleaned = {}
|
322
362
|
for key, value in arguments.items():
|
@@ -332,56 +372,67 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
332
372
|
else:
|
333
373
|
cleaned[key] = value
|
334
374
|
return cleaned
|
335
|
-
|
375
|
+
|
336
376
|
cleaned_tool_arguments = clean_arguments(tool_arguments)
|
337
|
-
|
338
|
-
logger.info(
|
339
|
-
"
|
340
|
-
|
341
|
-
"
|
342
|
-
|
343
|
-
|
344
|
-
|
377
|
+
|
378
|
+
logger.info(
|
379
|
+
"Executing tool",
|
380
|
+
extra={
|
381
|
+
"structured_data": {
|
382
|
+
"tool_name": tool_name,
|
383
|
+
"arguments": {
|
384
|
+
k: v
|
385
|
+
for k, v in cleaned_tool_arguments.items()
|
386
|
+
if k not in ["description"]
|
387
|
+
}, # Exclude long descriptions
|
388
|
+
}
|
389
|
+
},
|
390
|
+
)
|
391
|
+
|
345
392
|
# Execute the tool handler directly
|
346
393
|
import time
|
394
|
+
|
347
395
|
start_time = time.time()
|
348
396
|
result = await tool_handlers[tool_name](cleaned_tool_arguments)
|
349
397
|
execution_time = time.time() - start_time
|
350
|
-
|
351
|
-
logger.info(
|
352
|
-
"
|
353
|
-
|
354
|
-
"
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
398
|
+
|
399
|
+
logger.info(
|
400
|
+
"Tool execution completed",
|
401
|
+
extra={
|
402
|
+
"structured_data": {
|
403
|
+
"tool_name": tool_name,
|
404
|
+
"execution_time_seconds": execution_time,
|
405
|
+
"result_type": type(result).__name__,
|
406
|
+
"result_size": (
|
407
|
+
len(json.dumps(result, default=str)) if result else 0
|
408
|
+
),
|
409
|
+
}
|
410
|
+
},
|
411
|
+
)
|
412
|
+
|
360
413
|
print(json.dumps(result, indent=2, default=str))
|
361
|
-
|
414
|
+
|
362
415
|
except Exception as e:
|
363
|
-
logger.error(
|
364
|
-
"
|
365
|
-
|
366
|
-
"
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
}
|
375
|
-
}
|
416
|
+
logger.error(
|
417
|
+
"Tool execution failed",
|
418
|
+
extra={
|
419
|
+
"structured_data": {
|
420
|
+
"tool_name": tool_name if "tool_name" in locals() else "unknown",
|
421
|
+
"error_type": type(e).__name__,
|
422
|
+
"error_message": str(e),
|
423
|
+
}
|
424
|
+
},
|
425
|
+
)
|
426
|
+
error_result = {"error": {"code": -32603, "message": str(e)}}
|
376
427
|
print(json.dumps(error_result, indent=2))
|
377
428
|
finally:
|
378
429
|
# Clean up database connections
|
379
|
-
if hasattr(server,
|
430
|
+
if hasattr(server, "db_manager") and server.db_manager:
|
380
431
|
logger.debug("Closing database connections")
|
381
432
|
await server.db_manager.close_pool()
|
382
433
|
logger.debug("Database connections closed")
|
383
434
|
logger.info("=== RUNCOMMAND SESSION ENDED ===")
|
384
|
-
|
435
|
+
|
385
436
|
# Close logger handlers to flush any remaining logs
|
386
437
|
for handler in logger.handlers[:]:
|
387
438
|
handler.close()
|
@@ -392,28 +443,27 @@ async def handle_dumpdescriptions(args: argparse.Namespace) -> None:
|
|
392
443
|
"""Handle --dumpdescriptions command."""
|
393
444
|
from .database.database import DatabaseManager
|
394
445
|
from .token_counter import TokenCounter
|
395
|
-
|
446
|
+
|
396
447
|
if len(args.dumpdescriptions) < 1:
|
397
448
|
print("Error: Project ID is required", file=sys.stderr)
|
398
449
|
sys.exit(1)
|
399
|
-
|
450
|
+
|
400
451
|
project_id = args.dumpdescriptions[0]
|
401
452
|
branch = args.dumpdescriptions[1] if len(args.dumpdescriptions) > 1 else None
|
402
|
-
|
453
|
+
|
403
454
|
db_manager = None
|
404
455
|
try:
|
405
456
|
# Initialize database and token counter
|
406
457
|
db_path = Path(args.db_path).expanduser()
|
407
458
|
db_manager = DatabaseManager(db_path)
|
408
459
|
await db_manager.initialize()
|
409
|
-
|
460
|
+
|
410
461
|
token_counter = TokenCounter(args.token_limit)
|
411
|
-
|
462
|
+
|
412
463
|
# Get file descriptions
|
413
464
|
if branch:
|
414
465
|
file_descriptions = await db_manager.get_all_file_descriptions(
|
415
|
-
project_id=project_id,
|
416
|
-
branch=branch
|
466
|
+
project_id=project_id, branch=branch
|
417
467
|
)
|
418
468
|
print(f"File descriptions for project {project_id}, branch {branch}:")
|
419
469
|
else:
|
@@ -421,9 +471,9 @@ async def handle_dumpdescriptions(args: argparse.Namespace) -> None:
|
|
421
471
|
project_id=project_id
|
422
472
|
)
|
423
473
|
print(f"File descriptions for project {project_id} (all branches):")
|
424
|
-
|
474
|
+
|
425
475
|
print("=" * 80)
|
426
|
-
|
476
|
+
|
427
477
|
if not file_descriptions:
|
428
478
|
print("No descriptions found.")
|
429
479
|
total_tokens = 0
|
@@ -435,71 +485,76 @@ async def handle_dumpdescriptions(args: argparse.Namespace) -> None:
|
|
435
485
|
print(f"Branch: {desc.branch}")
|
436
486
|
print(f"Description: {desc.description}")
|
437
487
|
print("-" * 40)
|
438
|
-
|
488
|
+
|
439
489
|
# Count tokens for this description
|
440
490
|
desc_tokens = token_counter.count_file_description_tokens(desc)
|
441
491
|
total_tokens += desc_tokens
|
442
|
-
|
492
|
+
|
443
493
|
print("=" * 80)
|
444
494
|
print(f"Total descriptions: {len(file_descriptions)}")
|
445
495
|
print(f"Total tokens: {total_tokens}")
|
446
|
-
|
496
|
+
|
447
497
|
finally:
|
448
498
|
# Clean up database connections
|
449
499
|
if db_manager:
|
450
500
|
await db_manager.close_pool()
|
451
501
|
|
452
502
|
|
453
|
-
|
454
503
|
async def handle_githook(args: argparse.Namespace) -> None:
|
455
504
|
"""Handle --githook command."""
|
456
505
|
from .logging_config import setup_command_logger
|
457
|
-
|
506
|
+
|
458
507
|
# Set up dedicated logging for githook
|
459
508
|
cache_dir = Path(args.cache_dir).expanduser()
|
460
509
|
logger = setup_command_logger("githook", cache_dir)
|
461
|
-
|
510
|
+
|
462
511
|
try:
|
463
512
|
from .database.database import DatabaseManager
|
464
513
|
from .git_hook_handler import GitHookHandler
|
465
|
-
|
514
|
+
|
466
515
|
# Process commit hash arguments
|
467
516
|
commit_hashes = args.githook if args.githook else []
|
468
|
-
|
469
|
-
logger.info(
|
470
|
-
"
|
471
|
-
|
472
|
-
|
473
|
-
"
|
474
|
-
|
475
|
-
|
517
|
+
|
518
|
+
logger.info(
|
519
|
+
"Starting git hook execution",
|
520
|
+
extra={
|
521
|
+
"structured_data": {
|
522
|
+
"args": {
|
523
|
+
"db_path": str(args.db_path),
|
524
|
+
"cache_dir": str(args.cache_dir),
|
525
|
+
"token_limit": args.token_limit,
|
526
|
+
"commit_hashes": commit_hashes,
|
527
|
+
}
|
476
528
|
}
|
477
|
-
}
|
478
|
-
|
479
|
-
|
529
|
+
},
|
530
|
+
)
|
531
|
+
|
480
532
|
# Initialize database
|
481
533
|
db_path = Path(args.db_path).expanduser()
|
482
534
|
cache_dir = Path(args.cache_dir).expanduser()
|
483
|
-
|
484
|
-
logger.info(
|
485
|
-
"
|
486
|
-
|
487
|
-
"
|
488
|
-
|
489
|
-
|
490
|
-
|
535
|
+
|
536
|
+
logger.info(
|
537
|
+
"Setting up directories and database",
|
538
|
+
extra={
|
539
|
+
"structured_data": {
|
540
|
+
"db_path": str(db_path),
|
541
|
+
"cache_dir": str(cache_dir),
|
542
|
+
}
|
543
|
+
},
|
544
|
+
)
|
545
|
+
|
491
546
|
# Create directories if they don't exist
|
492
547
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
493
548
|
cache_dir.mkdir(parents=True, exist_ok=True)
|
494
|
-
|
549
|
+
|
495
550
|
db_manager = DatabaseManager(db_path)
|
496
551
|
await db_manager.initialize()
|
497
552
|
logger.debug("Database initialized successfully")
|
498
|
-
|
553
|
+
|
499
554
|
# Initialize git hook handler
|
500
555
|
git_handler = GitHookHandler(db_manager, cache_dir, logger)
|
501
556
|
logger.debug("Git hook handler initialized")
|
502
|
-
|
557
|
+
|
503
558
|
# Run git hook analysis
|
504
559
|
logger.info("Starting git hook analysis")
|
505
560
|
if len(commit_hashes) == 0:
|
@@ -510,31 +565,36 @@ async def handle_githook(args: argparse.Namespace) -> None:
|
|
510
565
|
await git_handler.run_githook_mode(commit_hash=commit_hashes[0])
|
511
566
|
elif len(commit_hashes) == 2:
|
512
567
|
# Process commit range
|
513
|
-
await git_handler.run_githook_mode(
|
568
|
+
await git_handler.run_githook_mode(
|
569
|
+
commit_range=(commit_hashes[0], commit_hashes[1])
|
570
|
+
)
|
514
571
|
else:
|
515
572
|
raise ValueError("--githook accepts 0, 1, or 2 commit hashes")
|
516
573
|
logger.info("Git hook analysis completed successfully")
|
517
|
-
|
574
|
+
|
518
575
|
except Exception as e:
|
519
|
-
logger.error(
|
520
|
-
"
|
521
|
-
|
522
|
-
"
|
523
|
-
|
524
|
-
|
576
|
+
logger.error(
|
577
|
+
"Git hook execution failed",
|
578
|
+
extra={
|
579
|
+
"structured_data": {
|
580
|
+
"error_type": type(e).__name__,
|
581
|
+
"error_message": str(e),
|
582
|
+
}
|
583
|
+
},
|
584
|
+
)
|
525
585
|
print(f"Git hook error: {e}", file=sys.stderr)
|
526
586
|
sys.exit(1)
|
527
587
|
finally:
|
528
588
|
# Clean up database connections
|
529
|
-
if
|
589
|
+
if "db_manager" in locals():
|
530
590
|
try:
|
531
591
|
await db_manager.close_pool()
|
532
592
|
logger.debug("Database connections closed")
|
533
593
|
except Exception as e:
|
534
594
|
logger.warning(f"Error closing database connections: {e}")
|
535
|
-
|
595
|
+
|
536
596
|
logger.info("=== GITHOOK SESSION ENDED ===")
|
537
|
-
|
597
|
+
|
538
598
|
# Close logger handlers to flush any remaining logs
|
539
599
|
for handler in logger.handlers[:]:
|
540
600
|
handler.close()
|
@@ -544,50 +604,57 @@ async def handle_githook(args: argparse.Namespace) -> None:
|
|
544
604
|
async def handle_cleanup(args: argparse.Namespace) -> None:
|
545
605
|
"""Handle --cleanup command."""
|
546
606
|
from .logging_config import setup_command_logger
|
547
|
-
|
607
|
+
|
548
608
|
# Set up dedicated logging for cleanup
|
549
609
|
cache_dir = Path(args.cache_dir).expanduser()
|
550
610
|
logger = setup_command_logger("cleanup", cache_dir)
|
551
|
-
|
611
|
+
|
552
612
|
db_manager = None
|
553
613
|
try:
|
554
614
|
from .database.database import DatabaseManager
|
555
|
-
|
556
|
-
logger.info(
|
557
|
-
"
|
558
|
-
|
559
|
-
|
560
|
-
"
|
615
|
+
|
616
|
+
logger.info(
|
617
|
+
"Starting database cleanup",
|
618
|
+
extra={
|
619
|
+
"structured_data": {
|
620
|
+
"args": {
|
621
|
+
"db_path": str(args.db_path),
|
622
|
+
"cache_dir": str(args.cache_dir),
|
623
|
+
}
|
561
624
|
}
|
562
|
-
}
|
563
|
-
|
564
|
-
|
625
|
+
},
|
626
|
+
)
|
627
|
+
|
565
628
|
# Initialize database
|
566
629
|
db_path = Path(args.db_path).expanduser()
|
567
630
|
db_manager = DatabaseManager(db_path)
|
568
631
|
await db_manager.initialize()
|
569
632
|
logger.debug("Database initialized successfully")
|
570
|
-
|
633
|
+
|
571
634
|
# Perform cleanup
|
572
635
|
logger.info("Removing empty projects")
|
573
636
|
removed_count = await db_manager.cleanup_empty_projects()
|
574
|
-
|
637
|
+
|
575
638
|
if removed_count > 0:
|
576
639
|
print(f"Removed {removed_count} empty project(s)")
|
577
|
-
logger.info(
|
578
|
-
"
|
579
|
-
|
640
|
+
logger.info(
|
641
|
+
"Cleanup completed",
|
642
|
+
extra={"structured_data": {"removed_projects": removed_count}},
|
643
|
+
)
|
580
644
|
else:
|
581
645
|
print("No empty projects found")
|
582
646
|
logger.info("No empty projects found")
|
583
|
-
|
647
|
+
|
584
648
|
except Exception as e:
|
585
|
-
logger.error(
|
586
|
-
"
|
587
|
-
|
588
|
-
"
|
589
|
-
|
590
|
-
|
649
|
+
logger.error(
|
650
|
+
"Cleanup failed",
|
651
|
+
extra={
|
652
|
+
"structured_data": {
|
653
|
+
"error_type": type(e).__name__,
|
654
|
+
"error_message": str(e),
|
655
|
+
}
|
656
|
+
},
|
657
|
+
)
|
591
658
|
print(f"Cleanup error: {e}", file=sys.stderr)
|
592
659
|
sys.exit(1)
|
593
660
|
finally:
|
@@ -597,7 +664,7 @@ async def handle_cleanup(args: argparse.Namespace) -> None:
|
|
597
664
|
await db_manager.close_pool()
|
598
665
|
logger.debug("Database connections closed")
|
599
666
|
logger.info("=== CLEANUP SESSION ENDED ===")
|
600
|
-
|
667
|
+
|
601
668
|
# Close logger handlers to flush any remaining logs
|
602
669
|
for handler in logger.handlers[:]:
|
603
670
|
handler.close()
|
@@ -607,72 +674,82 @@ async def handle_cleanup(args: argparse.Namespace) -> None:
|
|
607
674
|
async def handle_map(args: argparse.Namespace) -> None:
|
608
675
|
"""Handle --map command."""
|
609
676
|
from .logging_config import setup_command_logger
|
610
|
-
|
611
|
-
from collections import defaultdict
|
612
|
-
from pathlib import Path as PathLib
|
613
|
-
|
677
|
+
|
614
678
|
# Set up dedicated logging for map
|
615
679
|
cache_dir = Path(args.cache_dir).expanduser()
|
616
680
|
logger = setup_command_logger("map", cache_dir)
|
617
|
-
|
681
|
+
|
618
682
|
db_manager = None
|
619
683
|
try:
|
620
684
|
from .database.database import DatabaseManager
|
621
|
-
|
622
|
-
logger.info(
|
623
|
-
"
|
624
|
-
|
625
|
-
"
|
626
|
-
"
|
627
|
-
"
|
685
|
+
|
686
|
+
logger.info(
|
687
|
+
"Starting project map generation",
|
688
|
+
extra={
|
689
|
+
"structured_data": {
|
690
|
+
"project_identifier": args.map,
|
691
|
+
"args": {
|
692
|
+
"db_path": str(args.db_path),
|
693
|
+
"cache_dir": str(args.cache_dir),
|
694
|
+
},
|
628
695
|
}
|
629
|
-
}
|
630
|
-
|
631
|
-
|
696
|
+
},
|
697
|
+
)
|
698
|
+
|
632
699
|
# Initialize database
|
633
700
|
db_path = Path(args.db_path).expanduser()
|
634
701
|
db_manager = DatabaseManager(db_path)
|
635
702
|
await db_manager.initialize()
|
636
703
|
logger.debug("Database initialized successfully")
|
637
|
-
|
704
|
+
|
638
705
|
# Get project data
|
639
706
|
logger.info("Retrieving project data")
|
640
707
|
project_data = await db_manager.get_project_map_data(args.map)
|
641
|
-
|
708
|
+
|
642
709
|
if not project_data:
|
643
710
|
print(f"Error: Project '{args.map}' not found", file=sys.stderr)
|
644
|
-
logger.error(
|
711
|
+
logger.error(
|
712
|
+
"Project not found", extra={"structured_data": {"identifier": args.map}}
|
713
|
+
)
|
645
714
|
sys.exit(1)
|
646
|
-
|
647
|
-
project = project_data[
|
648
|
-
branch = project_data[
|
649
|
-
overview = project_data[
|
650
|
-
files = project_data[
|
651
|
-
|
652
|
-
logger.info(
|
653
|
-
"
|
654
|
-
|
655
|
-
"
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
715
|
+
|
716
|
+
project = project_data["project"]
|
717
|
+
branch = project_data["branch"]
|
718
|
+
overview = project_data["overview"]
|
719
|
+
files = project_data["files"]
|
720
|
+
|
721
|
+
logger.info(
|
722
|
+
"Generating markdown map",
|
723
|
+
extra={
|
724
|
+
"structured_data": {
|
725
|
+
"project_name": project.name,
|
726
|
+
"branch": branch,
|
727
|
+
"file_count": len(files),
|
728
|
+
"has_overview": overview is not None,
|
729
|
+
}
|
730
|
+
},
|
731
|
+
)
|
732
|
+
|
661
733
|
# Generate markdown
|
662
|
-
markdown_content = generate_project_markdown(
|
663
|
-
|
734
|
+
markdown_content = generate_project_markdown(
|
735
|
+
project, branch, overview, files, logger
|
736
|
+
)
|
737
|
+
|
664
738
|
# Output the markdown
|
665
739
|
print(markdown_content)
|
666
|
-
|
740
|
+
|
667
741
|
logger.info("Project map generated successfully")
|
668
|
-
|
742
|
+
|
669
743
|
except Exception as e:
|
670
|
-
logger.error(
|
671
|
-
"
|
672
|
-
|
673
|
-
"
|
674
|
-
|
675
|
-
|
744
|
+
logger.error(
|
745
|
+
"Map generation failed",
|
746
|
+
extra={
|
747
|
+
"structured_data": {
|
748
|
+
"error_type": type(e).__name__,
|
749
|
+
"error_message": str(e),
|
750
|
+
}
|
751
|
+
},
|
752
|
+
)
|
676
753
|
print(f"Map generation error: {e}", file=sys.stderr)
|
677
754
|
sys.exit(1)
|
678
755
|
finally:
|
@@ -682,7 +759,7 @@ async def handle_map(args: argparse.Namespace) -> None:
|
|
682
759
|
await db_manager.close_pool()
|
683
760
|
logger.debug("Database connections closed")
|
684
761
|
logger.info("=== MAP SESSION ENDED ===")
|
685
|
-
|
762
|
+
|
686
763
|
# Close logger handlers to flush any remaining logs
|
687
764
|
for handler in logger.handlers[:]:
|
688
765
|
handler.close()
|
@@ -694,38 +771,40 @@ def generate_project_markdown(project, branch, overview, files, logger):
|
|
694
771
|
import re
|
695
772
|
from collections import defaultdict
|
696
773
|
from pathlib import Path as PathLib
|
697
|
-
|
774
|
+
|
698
775
|
markdown_lines = []
|
699
|
-
|
776
|
+
|
700
777
|
# Project header with sentence case
|
701
778
|
project_name = project.name.title() if project.name.islower() else project.name
|
702
779
|
markdown_lines.append(f"# {project_name}")
|
703
780
|
markdown_lines.append("")
|
704
|
-
|
781
|
+
|
705
782
|
# Project metadata
|
706
783
|
markdown_lines.append(f"**Branch:** {branch}")
|
707
784
|
markdown_lines.append("")
|
708
|
-
|
785
|
+
|
709
786
|
# Project overview (with header demotion if needed)
|
710
787
|
if overview and overview.overview:
|
711
788
|
markdown_lines.append("## Project Overview")
|
712
789
|
markdown_lines.append("")
|
713
|
-
|
790
|
+
|
714
791
|
# Check if overview contains H1 headers and demote if needed
|
715
792
|
overview_content = overview.overview
|
716
|
-
if re.search(r
|
793
|
+
if re.search(r"^#\s", overview_content, re.MULTILINE):
|
717
794
|
logger.debug("H1 headers found in overview, demoting all headers")
|
718
795
|
# Demote all headers by one level
|
719
|
-
overview_content = re.sub(
|
720
|
-
|
796
|
+
overview_content = re.sub(
|
797
|
+
r"^(#{1,6})", r"#\1", overview_content, flags=re.MULTILINE
|
798
|
+
)
|
799
|
+
|
721
800
|
markdown_lines.append(overview_content)
|
722
801
|
markdown_lines.append("")
|
723
|
-
|
802
|
+
|
724
803
|
# File structure
|
725
804
|
if files:
|
726
805
|
markdown_lines.append("## Codebase Structure")
|
727
806
|
markdown_lines.append("")
|
728
|
-
|
807
|
+
|
729
808
|
# Organize files by directory
|
730
809
|
directories = defaultdict(list)
|
731
810
|
for file_desc in files:
|
@@ -737,13 +816,15 @@ def generate_project_markdown(project, branch, overview, files, logger):
|
|
737
816
|
# File in subdirectory
|
738
817
|
directory = str(file_path.parent)
|
739
818
|
directories[directory].append(file_desc)
|
740
|
-
|
819
|
+
|
741
820
|
# Sort directories (root first, then alphabetically)
|
742
|
-
sorted_dirs = sorted(
|
743
|
-
|
821
|
+
sorted_dirs = sorted(
|
822
|
+
directories.keys(), key=lambda x: ("" if x == "(root)" else x)
|
823
|
+
)
|
824
|
+
|
744
825
|
for directory in sorted_dirs:
|
745
826
|
dir_files = directories[directory]
|
746
|
-
|
827
|
+
|
747
828
|
# Directory header
|
748
829
|
if directory == "(root)":
|
749
830
|
markdown_lines.append("### Root Directory")
|
@@ -752,104 +833,108 @@ def generate_project_markdown(project, branch, overview, files, logger):
|
|
752
833
|
depth = len(PathLib(directory).parts)
|
753
834
|
header_level = "#" * min(depth + 2, 6) # Cap at H6
|
754
835
|
markdown_lines.append(f"{header_level} {directory}/")
|
755
|
-
|
836
|
+
|
756
837
|
markdown_lines.append("")
|
757
|
-
|
838
|
+
|
758
839
|
# Files table
|
759
840
|
markdown_lines.append("| File | Description |")
|
760
841
|
markdown_lines.append("|------|-------------|")
|
761
|
-
|
842
|
+
|
762
843
|
for file_desc in sorted(dir_files, key=lambda x: x.file_path):
|
763
844
|
file_name = PathLib(file_desc.file_path).name
|
764
845
|
# Escape pipe characters in descriptions for markdown table
|
765
|
-
description =
|
846
|
+
description = (
|
847
|
+
file_desc.description.replace("|", "\\|").replace("\n", " ").strip()
|
848
|
+
)
|
766
849
|
markdown_lines.append(f"| `{file_name}` | {description} |")
|
767
|
-
|
850
|
+
|
768
851
|
markdown_lines.append("")
|
769
|
-
|
852
|
+
|
770
853
|
# Footer with generation info
|
771
854
|
from datetime import datetime
|
855
|
+
|
772
856
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
773
857
|
markdown_lines.append("---")
|
774
858
|
markdown_lines.append(f"*Generated by MCP Code Indexer on {timestamp}*")
|
775
|
-
|
859
|
+
|
776
860
|
return "\n".join(markdown_lines)
|
777
861
|
|
778
862
|
|
779
863
|
async def main() -> None:
|
780
864
|
"""Main entry point for the MCP server."""
|
781
865
|
args = parse_arguments()
|
782
|
-
|
783
|
-
# Handle git hook command
|
866
|
+
|
867
|
+
# Handle git hook command
|
784
868
|
if args.githook is not None:
|
785
869
|
await handle_githook(args)
|
786
870
|
return
|
787
|
-
|
871
|
+
|
788
872
|
# Handle utility commands
|
789
873
|
if args.getprojects:
|
790
874
|
await handle_getprojects(args)
|
791
875
|
return
|
792
|
-
|
876
|
+
|
793
877
|
if args.runcommand:
|
794
878
|
await handle_runcommand(args)
|
795
879
|
return
|
796
|
-
|
880
|
+
|
797
881
|
if args.dumpdescriptions:
|
798
882
|
await handle_dumpdescriptions(args)
|
799
883
|
return
|
800
|
-
|
884
|
+
|
801
885
|
if args.cleanup:
|
802
886
|
await handle_cleanup(args)
|
803
887
|
return
|
804
|
-
|
888
|
+
|
805
889
|
if args.map:
|
806
890
|
await handle_map(args)
|
807
891
|
return
|
808
|
-
|
892
|
+
|
809
893
|
# Setup structured logging
|
810
|
-
log_file =
|
894
|
+
log_file = (
|
895
|
+
Path(args.cache_dir).expanduser() / "server.log" if args.cache_dir else None
|
896
|
+
)
|
811
897
|
logger = setup_logging(
|
812
|
-
log_level=args.log_level,
|
813
|
-
log_file=log_file,
|
814
|
-
enable_file_logging=True
|
898
|
+
log_level=args.log_level, log_file=log_file, enable_file_logging=True
|
815
899
|
)
|
816
|
-
|
900
|
+
|
817
901
|
# Setup error handling
|
818
902
|
error_handler = setup_error_handling(logger)
|
819
|
-
|
903
|
+
|
820
904
|
# Expand user paths
|
821
905
|
db_path = Path(args.db_path).expanduser()
|
822
906
|
cache_dir = Path(args.cache_dir).expanduser()
|
823
|
-
|
907
|
+
|
824
908
|
# Create directories if they don't exist
|
825
909
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
826
910
|
cache_dir.mkdir(parents=True, exist_ok=True)
|
827
|
-
|
911
|
+
|
828
912
|
# Log startup information to stderr (stdout reserved for MCP JSON-RPC)
|
829
|
-
logger.info(
|
830
|
-
"
|
831
|
-
|
832
|
-
|
833
|
-
"
|
834
|
-
|
835
|
-
|
836
|
-
|
913
|
+
logger.info(
|
914
|
+
"Starting MCP Code Index Server",
|
915
|
+
extra={
|
916
|
+
"structured_data": {
|
917
|
+
"startup": {
|
918
|
+
"version": __version__,
|
919
|
+
"token_limit": args.token_limit,
|
920
|
+
"db_path": str(db_path),
|
921
|
+
"cache_dir": str(cache_dir),
|
922
|
+
"log_level": args.log_level,
|
923
|
+
}
|
837
924
|
}
|
838
|
-
}
|
839
|
-
|
840
|
-
|
925
|
+
},
|
926
|
+
)
|
927
|
+
|
841
928
|
try:
|
842
929
|
# Import and run the MCP server
|
843
930
|
from .server.mcp_server import MCPCodeIndexServer
|
844
|
-
|
931
|
+
|
845
932
|
server = MCPCodeIndexServer(
|
846
|
-
token_limit=args.token_limit,
|
847
|
-
db_path=db_path,
|
848
|
-
cache_dir=cache_dir
|
933
|
+
token_limit=args.token_limit, db_path=db_path, cache_dir=cache_dir
|
849
934
|
)
|
850
|
-
|
935
|
+
|
851
936
|
await server.run()
|
852
|
-
|
937
|
+
|
853
938
|
except Exception as e:
|
854
939
|
error_handler.log_error(e, context={"phase": "startup"})
|
855
940
|
raise
|
@@ -866,6 +951,7 @@ def cli_main():
|
|
866
951
|
except Exception as e:
|
867
952
|
# Log critical errors to stderr, not stdout
|
868
953
|
import traceback
|
954
|
+
|
869
955
|
print(f"Server failed to start: {e}", file=sys.stderr)
|
870
956
|
print(f"Traceback: {traceback.format_exc()}", file=sys.stderr)
|
871
957
|
sys.exit(1)
|