awslabs.dynamodb-mcp-server 2.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- awslabs/__init__.py +17 -0
- awslabs/dynamodb_mcp_server/__init__.py +17 -0
- awslabs/dynamodb_mcp_server/cdk_generator/__init__.py +19 -0
- awslabs/dynamodb_mcp_server/cdk_generator/generator.py +276 -0
- awslabs/dynamodb_mcp_server/cdk_generator/models.py +521 -0
- awslabs/dynamodb_mcp_server/cdk_generator/templates/README.md +57 -0
- awslabs/dynamodb_mcp_server/cdk_generator/templates/stack.ts.j2 +70 -0
- awslabs/dynamodb_mcp_server/common.py +94 -0
- awslabs/dynamodb_mcp_server/db_analyzer/__init__.py +30 -0
- awslabs/dynamodb_mcp_server/db_analyzer/analyzer_utils.py +394 -0
- awslabs/dynamodb_mcp_server/db_analyzer/base_plugin.py +355 -0
- awslabs/dynamodb_mcp_server/db_analyzer/mysql.py +450 -0
- awslabs/dynamodb_mcp_server/db_analyzer/plugin_registry.py +73 -0
- awslabs/dynamodb_mcp_server/db_analyzer/postgresql.py +215 -0
- awslabs/dynamodb_mcp_server/db_analyzer/sqlserver.py +255 -0
- awslabs/dynamodb_mcp_server/markdown_formatter.py +513 -0
- awslabs/dynamodb_mcp_server/model_validation_utils.py +845 -0
- awslabs/dynamodb_mcp_server/prompts/dynamodb_architect.md +851 -0
- awslabs/dynamodb_mcp_server/prompts/json_generation_guide.md +185 -0
- awslabs/dynamodb_mcp_server/prompts/transform_model_validation_result.md +168 -0
- awslabs/dynamodb_mcp_server/server.py +524 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/METADATA +306 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/RECORD +27 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/WHEEL +4 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/entry_points.txt +2 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/licenses/LICENSE +175 -0
- awslabs_dynamodb_mcp_server-2.0.10.dist-info/licenses/NOTICE +2 -0
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
import os
|
|
17
|
+
from awslabs.dynamodb_mcp_server.db_analyzer.base_plugin import DatabasePlugin
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from typing import Any, Dict, List, Tuple
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# Configure logger
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class MarkdownFormatter:
|
|
27
|
+
"""Formats database analysis results into LLM-optimized Markdown files."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
results: Dict[str, Any],
|
|
32
|
+
metadata: Dict[str, Any],
|
|
33
|
+
output_dir: str,
|
|
34
|
+
plugin: DatabasePlugin,
|
|
35
|
+
):
|
|
36
|
+
"""Initialize formatter with analysis results.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
results: Dictionary of query results from DatabaseAnalyzer
|
|
40
|
+
metadata: Analysis metadata (database name, dates, etc.)
|
|
41
|
+
output_dir: Directory where Markdown files will be saved
|
|
42
|
+
plugin: DatabasePlugin instance for getting query definitions (required)
|
|
43
|
+
"""
|
|
44
|
+
if plugin is None:
|
|
45
|
+
raise ValueError('plugin parameter is required and cannot be None')
|
|
46
|
+
|
|
47
|
+
self.results = results
|
|
48
|
+
self.metadata = metadata
|
|
49
|
+
self.output_dir = output_dir
|
|
50
|
+
self.plugin = plugin
|
|
51
|
+
self.file_registry: List[str] = [] # Track generated files for manifest
|
|
52
|
+
self.skipped_queries: Dict[str, str] = {} # Track skipped queries and reasons
|
|
53
|
+
self.errors: List[Tuple[str, str]] = [] # Track errors (query_name, error_message)
|
|
54
|
+
|
|
55
|
+
def _format_as_markdown_table(self, data: List[Dict[str, Any]]) -> str:
|
|
56
|
+
"""Format query result data as Markdown table.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
data: List of row dictionaries
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Markdown table string
|
|
63
|
+
"""
|
|
64
|
+
try:
|
|
65
|
+
# Handle empty data gracefully (catches None, empty list, etc.)
|
|
66
|
+
if not data:
|
|
67
|
+
logger.warning('No data provided to format as Markdown table')
|
|
68
|
+
return 'No data returned'
|
|
69
|
+
|
|
70
|
+
# Ensure data is a list
|
|
71
|
+
if not isinstance(data, list):
|
|
72
|
+
logger.error(f'Data is not a list, got type: {type(data)}')
|
|
73
|
+
return 'Error: Invalid data format'
|
|
74
|
+
|
|
75
|
+
# Get column names from first row
|
|
76
|
+
first_row = data[0]
|
|
77
|
+
if not isinstance(first_row, dict):
|
|
78
|
+
logger.error(f'First row is not a dictionary, got type: {type(first_row)}')
|
|
79
|
+
return 'Error: Invalid data structure'
|
|
80
|
+
|
|
81
|
+
if not first_row:
|
|
82
|
+
logger.warning('First row is empty dictionary')
|
|
83
|
+
return 'No columns available'
|
|
84
|
+
|
|
85
|
+
columns = list(first_row.keys())
|
|
86
|
+
|
|
87
|
+
# Build header row
|
|
88
|
+
header = '| ' + ' | '.join(columns) + ' |'
|
|
89
|
+
separator = '|' + '|'.join([' --- ' for _ in columns]) + '|'
|
|
90
|
+
|
|
91
|
+
# Build data rows
|
|
92
|
+
rows = []
|
|
93
|
+
for row_idx, row in enumerate(data):
|
|
94
|
+
try:
|
|
95
|
+
if not isinstance(row, dict):
|
|
96
|
+
logger.warning(f'Row {row_idx} is not a dictionary, skipping')
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
formatted_values = []
|
|
100
|
+
for col in columns:
|
|
101
|
+
value = row.get(col)
|
|
102
|
+
|
|
103
|
+
# Handle null values
|
|
104
|
+
if value is None:
|
|
105
|
+
formatted_values.append('NULL')
|
|
106
|
+
# Format numbers with appropriate precision
|
|
107
|
+
elif isinstance(value, float):
|
|
108
|
+
# Use 2 decimal places for floats
|
|
109
|
+
formatted_values.append(f'{value:.2f}')
|
|
110
|
+
elif isinstance(value, (int, bool)):
|
|
111
|
+
formatted_values.append(str(value))
|
|
112
|
+
else:
|
|
113
|
+
# Convert to string and escape pipe characters
|
|
114
|
+
formatted_values.append(str(value).replace('|', '\\|'))
|
|
115
|
+
|
|
116
|
+
rows.append('| ' + ' | '.join(formatted_values) + ' |')
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error(f'Error formatting row {row_idx}: {str(e)}')
|
|
119
|
+
# Continue processing remaining rows
|
|
120
|
+
continue
|
|
121
|
+
|
|
122
|
+
# If no rows were successfully formatted
|
|
123
|
+
if not rows:
|
|
124
|
+
logger.error('No rows could be formatted successfully')
|
|
125
|
+
return 'Error: Unable to format data rows'
|
|
126
|
+
|
|
127
|
+
# Combine all parts
|
|
128
|
+
table = '\n'.join([header, separator] + rows)
|
|
129
|
+
return table
|
|
130
|
+
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.error(f'Unexpected error in _format_as_markdown_table: {str(e)}')
|
|
133
|
+
return f'Error: Unable to format data - {str(e)}'
|
|
134
|
+
|
|
135
|
+
def _generate_query_file(self, query_name: str, query_result: Dict[str, Any]) -> str:
|
|
136
|
+
"""Generate Markdown file for a single query result.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
query_name: Name of the query
|
|
140
|
+
query_result: Query result data
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Path to generated file, or empty string if file generation failed
|
|
144
|
+
"""
|
|
145
|
+
try:
|
|
146
|
+
# Create filename from query name
|
|
147
|
+
filename = f'{query_name}.md'
|
|
148
|
+
file_path = os.path.join(self.output_dir, filename)
|
|
149
|
+
|
|
150
|
+
# Extract query description and data
|
|
151
|
+
description = query_result.get('description', 'No description available')
|
|
152
|
+
data = query_result.get('data', [])
|
|
153
|
+
|
|
154
|
+
# Build file content
|
|
155
|
+
content_parts = []
|
|
156
|
+
|
|
157
|
+
# Add query description header
|
|
158
|
+
title = query_name.replace('_', ' ').title()
|
|
159
|
+
content_parts.append(f'# {title}\n')
|
|
160
|
+
content_parts.append(f'**Query Description**: {description}\n')
|
|
161
|
+
|
|
162
|
+
# Add generation timestamp
|
|
163
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
164
|
+
content_parts.append(f'**Generated**: {timestamp}\n')
|
|
165
|
+
|
|
166
|
+
# Add results section
|
|
167
|
+
content_parts.append('## Results\n')
|
|
168
|
+
|
|
169
|
+
# Format data as Markdown table
|
|
170
|
+
table = self._format_as_markdown_table(data)
|
|
171
|
+
content_parts.append(table)
|
|
172
|
+
|
|
173
|
+
# Add row count footer
|
|
174
|
+
row_count = len(data) if data and isinstance(data, list) else 0
|
|
175
|
+
content_parts.append(f'\n**Total Rows**: {row_count}')
|
|
176
|
+
|
|
177
|
+
# Combine all parts
|
|
178
|
+
content = '\n'.join(content_parts)
|
|
179
|
+
|
|
180
|
+
# Save file to output directory
|
|
181
|
+
try:
|
|
182
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
183
|
+
f.write(content)
|
|
184
|
+
return file_path
|
|
185
|
+
except OSError as e:
|
|
186
|
+
error_msg = f'Failed to write file {file_path}: {str(e)}'
|
|
187
|
+
logger.error(error_msg)
|
|
188
|
+
self.errors.append((query_name, error_msg))
|
|
189
|
+
return ''
|
|
190
|
+
|
|
191
|
+
except Exception as e:
|
|
192
|
+
error_msg = f'Unexpected error generating file for {query_name}: {str(e)}'
|
|
193
|
+
logger.error(error_msg)
|
|
194
|
+
self.errors.append((query_name, error_msg))
|
|
195
|
+
return ''
|
|
196
|
+
|
|
197
|
+
def _generate_skipped_query_file(self, query_name: str, reason: str) -> str:
|
|
198
|
+
"""Generate informational file for a skipped query.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
query_name: Name of the skipped query
|
|
202
|
+
reason: Reason why the query was skipped
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
Path to generated file, or empty string if file generation failed
|
|
206
|
+
"""
|
|
207
|
+
try:
|
|
208
|
+
# Create filename from query name
|
|
209
|
+
filename = f'{query_name}.md'
|
|
210
|
+
file_path = os.path.join(self.output_dir, filename)
|
|
211
|
+
|
|
212
|
+
# Build file content
|
|
213
|
+
content_parts = []
|
|
214
|
+
|
|
215
|
+
# Add query description header
|
|
216
|
+
title = query_name.replace('_', ' ').title()
|
|
217
|
+
content_parts.append(f'# {title}\n')
|
|
218
|
+
|
|
219
|
+
# Add generation timestamp
|
|
220
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
221
|
+
content_parts.append(f'**Generated**: {timestamp}\n')
|
|
222
|
+
|
|
223
|
+
# Add skipped status
|
|
224
|
+
content_parts.append('## Status\n')
|
|
225
|
+
content_parts.append('**Query Skipped**\n')
|
|
226
|
+
|
|
227
|
+
# Add reason
|
|
228
|
+
content_parts.append('## Reason\n')
|
|
229
|
+
content_parts.append(f'{reason}\n')
|
|
230
|
+
|
|
231
|
+
# Add informational note
|
|
232
|
+
content_parts.append('## Note\n')
|
|
233
|
+
content_parts.append('This query was not executed during the analysis. ')
|
|
234
|
+
content_parts.append('No data is available for this query result.')
|
|
235
|
+
|
|
236
|
+
# Combine all parts
|
|
237
|
+
content = '\n'.join(content_parts)
|
|
238
|
+
|
|
239
|
+
# Save file to output directory
|
|
240
|
+
try:
|
|
241
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
242
|
+
f.write(content)
|
|
243
|
+
return file_path
|
|
244
|
+
except OSError as e:
|
|
245
|
+
error_msg = f'Failed to write skipped query file {file_path}: {str(e)}'
|
|
246
|
+
logger.error(error_msg)
|
|
247
|
+
self.errors.append((query_name, error_msg))
|
|
248
|
+
return ''
|
|
249
|
+
|
|
250
|
+
except Exception as e:
|
|
251
|
+
error_msg = (
|
|
252
|
+
f'Unexpected error generating skipped query file for {query_name}: {str(e)}'
|
|
253
|
+
)
|
|
254
|
+
logger.error(error_msg)
|
|
255
|
+
self.errors.append((query_name, error_msg))
|
|
256
|
+
return ''
|
|
257
|
+
|
|
258
|
+
def _generate_manifest(self) -> None:
|
|
259
|
+
"""Generate manifest.md with links to all files."""
|
|
260
|
+
try:
|
|
261
|
+
manifest_path = os.path.join(self.output_dir, 'manifest.md')
|
|
262
|
+
|
|
263
|
+
content_parts = []
|
|
264
|
+
|
|
265
|
+
# Add title
|
|
266
|
+
content_parts.append('# Database Analysis Manifest\n')
|
|
267
|
+
|
|
268
|
+
# Add metadata section
|
|
269
|
+
content_parts.append('## Metadata')
|
|
270
|
+
database_name = self.metadata.get('database', 'Unknown')
|
|
271
|
+
content_parts.append(f'- **Database**: {database_name}')
|
|
272
|
+
|
|
273
|
+
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
274
|
+
content_parts.append(f'- **Generated**: {timestamp}')
|
|
275
|
+
|
|
276
|
+
analysis_period = self.metadata.get('analysis_period', 'N/A')
|
|
277
|
+
content_parts.append(f'- **Analysis Period**: {analysis_period}')
|
|
278
|
+
|
|
279
|
+
performance_enabled = self.metadata.get('performance_enabled', True)
|
|
280
|
+
performance_status = 'Enabled' if performance_enabled else 'Disabled'
|
|
281
|
+
content_parts.append(f'- **Performance Schema**: {performance_status}\n')
|
|
282
|
+
|
|
283
|
+
# Get query categories and descriptions from plugin
|
|
284
|
+
schema_queries = self.plugin.get_queries_by_category('information_schema')
|
|
285
|
+
performance_queries = self.plugin.get_queries_by_category('performance_schema')
|
|
286
|
+
query_descriptions = self.plugin.get_query_descriptions()
|
|
287
|
+
|
|
288
|
+
# Add Query Results Files section
|
|
289
|
+
content_parts.append('## Query Results Files\n')
|
|
290
|
+
|
|
291
|
+
# Add Schema Queries section
|
|
292
|
+
content_parts.append('### Schema Queries')
|
|
293
|
+
for query_name in schema_queries:
|
|
294
|
+
filename = f'{query_name}.md'
|
|
295
|
+
description = query_descriptions.get(query_name, 'No description')
|
|
296
|
+
|
|
297
|
+
# Check if query was skipped
|
|
298
|
+
if query_name in self.skipped_queries:
|
|
299
|
+
reason = self.skipped_queries[query_name]
|
|
300
|
+
content_parts.append(
|
|
301
|
+
f'- [{query_name.replace("_", " ").title()}](./{filename}) - **SKIPPED**: {reason}'
|
|
302
|
+
)
|
|
303
|
+
else:
|
|
304
|
+
content_parts.append(
|
|
305
|
+
f'- [{query_name.replace("_", " ").title()}](./{filename}) - {description}'
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
content_parts.append('') # Empty line between sections
|
|
309
|
+
|
|
310
|
+
# Add Performance Queries section
|
|
311
|
+
content_parts.append('### Performance Queries')
|
|
312
|
+
for query_name in performance_queries:
|
|
313
|
+
filename = f'{query_name}.md'
|
|
314
|
+
description = query_descriptions.get(query_name, 'No description')
|
|
315
|
+
|
|
316
|
+
# Check if query was skipped
|
|
317
|
+
if query_name in self.skipped_queries:
|
|
318
|
+
reason = self.skipped_queries[query_name]
|
|
319
|
+
content_parts.append(
|
|
320
|
+
f'- [{query_name.replace("_", " ").title()}](./{filename}) - **SKIPPED**: {reason}'
|
|
321
|
+
)
|
|
322
|
+
else:
|
|
323
|
+
content_parts.append(
|
|
324
|
+
f'- [{query_name.replace("_", " ").title()}](./{filename}) - {description}'
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
content_parts.append('') # Empty line before skipped queries
|
|
328
|
+
|
|
329
|
+
# Add skipped queries section if any
|
|
330
|
+
if self.skipped_queries:
|
|
331
|
+
content_parts.append('## Skipped Queries\n')
|
|
332
|
+
content_parts.append('The following queries were not executed:\n')
|
|
333
|
+
for query_name, reason in self.skipped_queries.items():
|
|
334
|
+
content_parts.append(f'- **{query_name.replace("_", " ").title()}**: {reason}')
|
|
335
|
+
content_parts.append('') # Empty line after skipped queries
|
|
336
|
+
|
|
337
|
+
# Add summary statistics
|
|
338
|
+
content_parts.append('## Summary Statistics')
|
|
339
|
+
|
|
340
|
+
# Calculate statistics from results
|
|
341
|
+
total_tables = 0
|
|
342
|
+
total_columns = 0
|
|
343
|
+
total_indexes = 0
|
|
344
|
+
total_foreign_keys = 0
|
|
345
|
+
total_queries = 0
|
|
346
|
+
total_procedures = 0
|
|
347
|
+
total_triggers = 0
|
|
348
|
+
|
|
349
|
+
# Extract statistics from query results
|
|
350
|
+
if 'comprehensive_table_analysis' in self.results:
|
|
351
|
+
table_data = self.results['comprehensive_table_analysis'].get('data', [])
|
|
352
|
+
total_tables = len(table_data) if table_data else 0
|
|
353
|
+
|
|
354
|
+
if 'column_analysis' in self.results:
|
|
355
|
+
column_data = self.results['column_analysis'].get('data', [])
|
|
356
|
+
total_columns = len(column_data) if column_data else 0
|
|
357
|
+
|
|
358
|
+
if 'comprehensive_index_analysis' in self.results:
|
|
359
|
+
index_data = self.results['comprehensive_index_analysis'].get('data', [])
|
|
360
|
+
total_indexes = len(index_data) if index_data else 0
|
|
361
|
+
|
|
362
|
+
if 'foreign_key_analysis' in self.results:
|
|
363
|
+
fk_data = self.results['foreign_key_analysis'].get('data', [])
|
|
364
|
+
total_foreign_keys = len(fk_data) if fk_data else 0
|
|
365
|
+
|
|
366
|
+
if 'query_performance_stats' in self.results:
|
|
367
|
+
query_data = self.results['query_performance_stats'].get('data', [])
|
|
368
|
+
total_queries = len(query_data) if query_data else 0
|
|
369
|
+
# Count stored procedures - check if source_type column exists (MySQL-specific)
|
|
370
|
+
if query_data and len(query_data) > 0 and 'source_type' in query_data[0]:
|
|
371
|
+
total_procedures = sum(
|
|
372
|
+
1 for row in query_data if row.get('source_type') == 'PROCEDURE'
|
|
373
|
+
)
|
|
374
|
+
else:
|
|
375
|
+
total_procedures = 0
|
|
376
|
+
|
|
377
|
+
if 'triggers_stats' in self.results:
|
|
378
|
+
trigger_data = self.results['triggers_stats'].get('data', [])
|
|
379
|
+
total_triggers = len(trigger_data) if trigger_data else 0
|
|
380
|
+
|
|
381
|
+
# Add statistics
|
|
382
|
+
content_parts.append(f'- **Total Tables**: {total_tables}')
|
|
383
|
+
content_parts.append(f'- **Total Columns**: {total_columns}')
|
|
384
|
+
content_parts.append(f'- **Total Indexes**: {total_indexes}')
|
|
385
|
+
content_parts.append(f'- **Total Foreign Keys**: {total_foreign_keys}')
|
|
386
|
+
content_parts.append(f'- **Query Patterns Analyzed**: {total_queries}')
|
|
387
|
+
|
|
388
|
+
# Only show procedures/triggers if they exist in the results
|
|
389
|
+
if total_procedures > 0:
|
|
390
|
+
content_parts.append(f'- **Stored Procedures**: {total_procedures}')
|
|
391
|
+
if total_triggers > 0:
|
|
392
|
+
content_parts.append(f'- **Triggers**: {total_triggers}')
|
|
393
|
+
|
|
394
|
+
# Add errors section if any errors occurred
|
|
395
|
+
if self.errors:
|
|
396
|
+
content_parts.append('\n## Errors')
|
|
397
|
+
content_parts.append(
|
|
398
|
+
f'\n{len(self.errors)} error(s) occurred during file generation:\n'
|
|
399
|
+
)
|
|
400
|
+
for query_name, error_msg in self.errors:
|
|
401
|
+
content_parts.append(f'- **{query_name}**: {error_msg}')
|
|
402
|
+
|
|
403
|
+
# Combine all parts
|
|
404
|
+
content = '\n'.join(content_parts)
|
|
405
|
+
|
|
406
|
+
# Save manifest file
|
|
407
|
+
try:
|
|
408
|
+
with open(manifest_path, 'w', encoding='utf-8') as f:
|
|
409
|
+
f.write(content)
|
|
410
|
+
except OSError as e:
|
|
411
|
+
error_msg = f'Failed to write manifest file {manifest_path}: {str(e)}'
|
|
412
|
+
logger.error(error_msg)
|
|
413
|
+
self.errors.append(('manifest', error_msg))
|
|
414
|
+
|
|
415
|
+
except Exception as e:
|
|
416
|
+
error_msg = f'Unexpected error generating manifest: {str(e)}'
|
|
417
|
+
logger.error(error_msg)
|
|
418
|
+
self.errors.append(('manifest', error_msg))
|
|
419
|
+
|
|
420
|
+
def generate_all_files(self) -> Tuple[List[str], List[Tuple[str, str]]]:
|
|
421
|
+
"""Generate all Markdown files and manifest.
|
|
422
|
+
|
|
423
|
+
Returns:
|
|
424
|
+
Tuple of (list of generated file paths, list of errors)
|
|
425
|
+
Errors are tuples of (query_name, error_message)
|
|
426
|
+
"""
|
|
427
|
+
try:
|
|
428
|
+
# Create output directory structure
|
|
429
|
+
try:
|
|
430
|
+
os.makedirs(self.output_dir, exist_ok=True)
|
|
431
|
+
except OSError as e:
|
|
432
|
+
error_msg = f'Failed to create output directory {self.output_dir}: {str(e)}'
|
|
433
|
+
logger.error(error_msg)
|
|
434
|
+
self.errors.append(('directory_creation', error_msg))
|
|
435
|
+
return [], self.errors
|
|
436
|
+
|
|
437
|
+
# Get all expected queries from plugin
|
|
438
|
+
schema_queries = self.plugin.get_queries_by_category('information_schema')
|
|
439
|
+
performance_queries = self.plugin.get_queries_by_category('performance_schema')
|
|
440
|
+
expected_queries = schema_queries + performance_queries
|
|
441
|
+
|
|
442
|
+
# Check if performance schema is disabled
|
|
443
|
+
performance_enabled = self.metadata.get('performance_enabled', True)
|
|
444
|
+
|
|
445
|
+
# Get list of skipped queries from metadata
|
|
446
|
+
metadata_skipped_queries = self.metadata.get('skipped_queries', [])
|
|
447
|
+
|
|
448
|
+
# Iterate through all expected queries
|
|
449
|
+
for query_name in expected_queries:
|
|
450
|
+
try:
|
|
451
|
+
# Check if query result exists in results dictionary
|
|
452
|
+
if query_name in self.results:
|
|
453
|
+
query_result = self.results[query_name]
|
|
454
|
+
|
|
455
|
+
# Check if the result has data or is valid
|
|
456
|
+
if query_result and isinstance(query_result, dict):
|
|
457
|
+
# Generate one file per query result
|
|
458
|
+
file_path = self._generate_query_file(query_name, query_result)
|
|
459
|
+
# Only add to registry if file was successfully created
|
|
460
|
+
if file_path:
|
|
461
|
+
self.file_registry.append(file_path)
|
|
462
|
+
else:
|
|
463
|
+
# Result exists but is invalid
|
|
464
|
+
reason = 'Query result is invalid or empty'
|
|
465
|
+
self.skipped_queries[query_name] = reason
|
|
466
|
+
file_path = self._generate_skipped_query_file(query_name, reason)
|
|
467
|
+
# Only add to registry if file was successfully created
|
|
468
|
+
if file_path:
|
|
469
|
+
self.file_registry.append(file_path)
|
|
470
|
+
else:
|
|
471
|
+
# Query result does not exist
|
|
472
|
+
# Determine reason for skipping
|
|
473
|
+
if query_name in metadata_skipped_queries:
|
|
474
|
+
# Query was explicitly marked as skipped by analyzer
|
|
475
|
+
if query_name in performance_queries and not performance_enabled:
|
|
476
|
+
reason = 'Performance schema is disabled. This query requires performance_schema to be enabled.'
|
|
477
|
+
else:
|
|
478
|
+
reason = 'Query was skipped during analysis'
|
|
479
|
+
elif query_name in performance_queries and not performance_enabled:
|
|
480
|
+
reason = 'Performance schema is disabled. This query requires performance_schema to be enabled.'
|
|
481
|
+
else:
|
|
482
|
+
reason = 'Query was not executed or failed during analysis'
|
|
483
|
+
|
|
484
|
+
self.skipped_queries[query_name] = reason
|
|
485
|
+
file_path = self._generate_skipped_query_file(query_name, reason)
|
|
486
|
+
# Only add to registry if file was successfully created
|
|
487
|
+
if file_path:
|
|
488
|
+
self.file_registry.append(file_path)
|
|
489
|
+
|
|
490
|
+
except Exception as e:
|
|
491
|
+
# Log error and continue processing remaining files
|
|
492
|
+
error_msg = f'Error processing query {query_name}: {str(e)}'
|
|
493
|
+
logger.error(error_msg)
|
|
494
|
+
self.errors.append((query_name, error_msg))
|
|
495
|
+
# Continue to next query
|
|
496
|
+
continue
|
|
497
|
+
|
|
498
|
+
# Generate manifest file
|
|
499
|
+
self._generate_manifest()
|
|
500
|
+
|
|
501
|
+
# Log summary
|
|
502
|
+
logger.info(
|
|
503
|
+
f'File generation complete. Generated {len(self.file_registry)} files with {len(self.errors)} errors'
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
# Return list of generated file paths and errors
|
|
507
|
+
return self.file_registry, self.errors
|
|
508
|
+
|
|
509
|
+
except Exception as e:
|
|
510
|
+
error_msg = f'Critical error in generate_all_files: {str(e)}'
|
|
511
|
+
logger.error(error_msg)
|
|
512
|
+
self.errors.append(('generate_all_files', error_msg))
|
|
513
|
+
return self.file_registry, self.errors
|