d365fo-client 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. d365fo_client/__init__.py +305 -0
  2. d365fo_client/auth.py +93 -0
  3. d365fo_client/cli.py +700 -0
  4. d365fo_client/client.py +1454 -0
  5. d365fo_client/config.py +304 -0
  6. d365fo_client/crud.py +200 -0
  7. d365fo_client/exceptions.py +49 -0
  8. d365fo_client/labels.py +528 -0
  9. d365fo_client/main.py +502 -0
  10. d365fo_client/mcp/__init__.py +16 -0
  11. d365fo_client/mcp/client_manager.py +276 -0
  12. d365fo_client/mcp/main.py +98 -0
  13. d365fo_client/mcp/models.py +371 -0
  14. d365fo_client/mcp/prompts/__init__.py +43 -0
  15. d365fo_client/mcp/prompts/action_execution.py +480 -0
  16. d365fo_client/mcp/prompts/sequence_analysis.py +349 -0
  17. d365fo_client/mcp/resources/__init__.py +15 -0
  18. d365fo_client/mcp/resources/database_handler.py +555 -0
  19. d365fo_client/mcp/resources/entity_handler.py +176 -0
  20. d365fo_client/mcp/resources/environment_handler.py +132 -0
  21. d365fo_client/mcp/resources/metadata_handler.py +283 -0
  22. d365fo_client/mcp/resources/query_handler.py +135 -0
  23. d365fo_client/mcp/server.py +432 -0
  24. d365fo_client/mcp/tools/__init__.py +17 -0
  25. d365fo_client/mcp/tools/connection_tools.py +175 -0
  26. d365fo_client/mcp/tools/crud_tools.py +579 -0
  27. d365fo_client/mcp/tools/database_tools.py +813 -0
  28. d365fo_client/mcp/tools/label_tools.py +189 -0
  29. d365fo_client/mcp/tools/metadata_tools.py +766 -0
  30. d365fo_client/mcp/tools/profile_tools.py +706 -0
  31. d365fo_client/metadata_api.py +793 -0
  32. d365fo_client/metadata_v2/__init__.py +59 -0
  33. d365fo_client/metadata_v2/cache_v2.py +1372 -0
  34. d365fo_client/metadata_v2/database_v2.py +585 -0
  35. d365fo_client/metadata_v2/global_version_manager.py +573 -0
  36. d365fo_client/metadata_v2/search_engine_v2.py +423 -0
  37. d365fo_client/metadata_v2/sync_manager_v2.py +819 -0
  38. d365fo_client/metadata_v2/version_detector.py +439 -0
  39. d365fo_client/models.py +862 -0
  40. d365fo_client/output.py +181 -0
  41. d365fo_client/profile_manager.py +342 -0
  42. d365fo_client/profiles.py +178 -0
  43. d365fo_client/query.py +162 -0
  44. d365fo_client/session.py +60 -0
  45. d365fo_client/utils.py +196 -0
  46. d365fo_client-0.1.0.dist-info/METADATA +1084 -0
  47. d365fo_client-0.1.0.dist-info/RECORD +51 -0
  48. d365fo_client-0.1.0.dist-info/WHEEL +5 -0
  49. d365fo_client-0.1.0.dist-info/entry_points.txt +3 -0
  50. d365fo_client-0.1.0.dist-info/licenses/LICENSE +21 -0
  51. d365fo_client-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,813 @@
1
+ """Database analysis and query tools for MCP server."""
2
+
3
+ import json
4
+ import logging
5
+ import re
6
+ import sqlite3
7
+ import time
8
+ from typing import Any, Dict, List, Optional, Tuple
9
+
10
+ import aiosqlite
11
+ from mcp import Tool
12
+ from mcp.types import TextContent
13
+
14
+ from ..client_manager import D365FOClientManager
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class DatabaseQuerySafetyError(Exception):
20
+ """Raised when a database query is deemed unsafe or invalid."""
21
+ pass
22
+
23
+
24
+ class DatabaseTools:
25
+ """Database analysis and query tools for the MCP server."""
26
+
27
+ def __init__(self, client_manager: D365FOClientManager):
28
+ """Initialize database tools.
29
+
30
+ Args:
31
+ client_manager: D365FO client manager instance
32
+ """
33
+ self.client_manager = client_manager
34
+
35
+ # Query safety configuration
36
+ self.max_results = 1000
37
+ self.query_timeout_seconds = 30
38
+ self.allowed_operations = {'SELECT'}
39
+ self.blocked_tables = {'labels_cache'} # Tables with potentially sensitive data
40
+
41
+ # SQL injection protection patterns
42
+ self.dangerous_patterns = [
43
+ r';\s*(DROP|DELETE|UPDATE|INSERT|ALTER|CREATE|TRUNCATE)',
44
+ r'UNION\s+SELECT',
45
+ r'--\s*[^\r\n]*',
46
+ r'/\*.*?\*/',
47
+ r'exec\s*\(',
48
+ r'sp_\w+',
49
+ r'xp_\w+',
50
+ ]
51
+
52
+ def get_tools(self) -> List[Tool]:
53
+ """Get list of database tools.
54
+
55
+ Returns:
56
+ List of Tool definitions
57
+ """
58
+ return [
59
+ self._get_execute_sql_query_tool(),
60
+ self._get_database_schema_tool(),
61
+ self._get_table_info_tool(),
62
+ self._get_database_statistics_tool(),
63
+ ]
64
+
65
+ def _get_execute_sql_query_tool(self) -> Tool:
66
+ """Get execute SQL query tool definition."""
67
+ return Tool(
68
+ name="d365fo_execute_sql_query",
69
+ description="""Execute a SELECT query against the D365FO metadata database to get insights from cached metadata.
70
+
71
+ IMPORTANT SAFETY NOTES:
72
+ - Only SELECT queries are allowed (no INSERT, UPDATE, DELETE, DROP, etc.)
73
+ - Query results are limited to 1000 rows maximum
74
+ - Queries timeout after 30 seconds
75
+ - Some sensitive tables may be restricted
76
+
77
+ AVAILABLE TABLES AND THEIR PURPOSE:
78
+ - metadata_environments: D365FO environments and their details
79
+ - global_versions: Global version registry with hash and reference counts
80
+ - environment_versions: Links between environments and global versions
81
+ - data_entities: D365FO data entities metadata
82
+ - public_entities: Public entity schemas and configurations
83
+ - entity_properties: Detailed property information for entities
84
+ - entity_actions: Available OData actions for entities
85
+ - enumerations: System enumerations and their metadata
86
+ - enumeration_members: Individual enumeration values and labels
87
+ - metadata_search_v2: FTS5 search index for metadata
88
+
89
+ EXAMPLE QUERIES:
90
+ 1. Get most used entities by category:
91
+ SELECT entity_category, COUNT(*) as count FROM data_entities GROUP BY entity_category ORDER BY count DESC
92
+
93
+ 2. Find entities with most properties:
94
+ SELECT pe.name, COUNT(ep.id) as property_count FROM public_entities pe LEFT JOIN entity_properties ep ON pe.id = ep.entity_id GROUP BY pe.id ORDER BY property_count DESC LIMIT 10
95
+
96
+ 3. Analyze environment versions:
97
+ SELECT me.environment_name, gv.version_hash, ev.detected_at FROM metadata_environments me JOIN environment_versions ev ON me.id = ev.environment_id JOIN global_versions gv ON ev.global_version_id = gv.id
98
+
99
+ Use this tool to analyze metadata patterns, generate reports, and gain insights into D365FO structure.""",
100
+ inputSchema={
101
+ "type": "object",
102
+ "properties": {
103
+ "query": {
104
+ "type": "string",
105
+ "description": "SQL SELECT query to execute. Must be a SELECT statement only. Query will be validated for safety before execution.",
106
+ },
107
+ "limit": {
108
+ "type": "integer",
109
+ "minimum": 1,
110
+ "maximum": 1000,
111
+ "default": 100,
112
+ "description": "Maximum number of rows to return. Default is 100, maximum is 1000.",
113
+ },
114
+ "format": {
115
+ "type": "string",
116
+ "enum": ["table", "json", "csv"],
117
+ "default": "table",
118
+ "description": "Output format for results. 'table' for human-readable format, 'json' for structured data, 'csv' for spreadsheet-compatible format.",
119
+ },
120
+ "profile": {
121
+ "type": "string",
122
+ "description": "Configuration profile to use (optional - uses default profile if not specified)",
123
+ },
124
+ },
125
+ "required": ["query"],
126
+ },
127
+ )
128
+
129
+ def _get_database_schema_tool(self) -> Tool:
130
+ """Get database schema tool definition."""
131
+ return Tool(
132
+ name="d365fo_get_database_schema",
133
+ description="""Get comprehensive schema information for the D365FO metadata database.
134
+
135
+ This tool provides detailed information about:
136
+ - All database tables and their structures
137
+ - Column definitions with types and constraints
138
+ - Indexes and their purposes
139
+ - Foreign key relationships
140
+ - Table statistics (row counts, sizes)
141
+ - FTS5 virtual table information
142
+
143
+ Use this tool to understand the database structure before writing SQL queries.""",
144
+ inputSchema={
145
+ "type": "object",
146
+ "properties": {
147
+ "table_name": {
148
+ "type": "string",
149
+ "description": "Optional. Get schema for a specific table only. If omitted, returns schema for all tables.",
150
+ },
151
+ "include_statistics": {
152
+ "type": "boolean",
153
+ "default": True,
154
+ "description": "Include table statistics like row counts and sizes.",
155
+ },
156
+ "include_indexes": {
157
+ "type": "boolean",
158
+ "default": True,
159
+ "description": "Include index information for tables.",
160
+ },
161
+ "include_relationships": {
162
+ "type": "boolean",
163
+ "default": True,
164
+ "description": "Include foreign key relationships between tables.",
165
+ },
166
+ "profile": {
167
+ "type": "string",
168
+ "description": "Configuration profile to use (optional - uses default profile if not specified)",
169
+ },
170
+ },
171
+ "additionalProperties": False,
172
+ },
173
+ )
174
+
175
+ def _get_table_info_tool(self) -> Tool:
176
+ """Get table info tool definition."""
177
+ return Tool(
178
+ name="d365fo_get_table_info",
179
+ description="""Get detailed information about a specific database table including:
180
+ - Column definitions with types, nullability, and defaults
181
+ - Primary and foreign key constraints
182
+ - Indexes and their characteristics
183
+ - Table statistics (row count, size, last updated)
184
+ - Sample data (first few rows)
185
+ - Relationships to other tables
186
+
187
+ This tool is useful for exploring specific tables before writing queries.""",
188
+ inputSchema={
189
+ "type": "object",
190
+ "properties": {
191
+ "table_name": {
192
+ "type": "string",
193
+ "description": "Name of the table to get information about (e.g., 'data_entities', 'public_entities', 'entity_properties').",
194
+ },
195
+ "include_sample_data": {
196
+ "type": "boolean",
197
+ "default": False,
198
+ "description": "Include sample data from the table (first 5 rows).",
199
+ },
200
+ "include_relationships": {
201
+ "type": "boolean",
202
+ "default": True,
203
+ "description": "Include information about relationships to other tables.",
204
+ },
205
+ "profile": {
206
+ "type": "string",
207
+ "description": "Configuration profile to use (optional - uses default profile if not specified)",
208
+ },
209
+ },
210
+ "required": ["table_name"],
211
+ },
212
+ )
213
+
214
+ def _get_database_statistics_tool(self) -> Tool:
215
+ """Get database statistics tool definition."""
216
+ return Tool(
217
+ name="d365fo_get_database_statistics",
218
+ description="""Get comprehensive database statistics and analytics including:
219
+ - Overall database size and table counts
220
+ - Record counts by table
221
+ - Global version statistics
222
+ - Environment statistics
223
+ - Cache hit rates and performance metrics
224
+ - Storage utilization analysis
225
+ - Data distribution insights
226
+
227
+ Use this tool to understand the overall state and health of the metadata database.""",
228
+ inputSchema={
229
+ "type": "object",
230
+ "properties": {
231
+ "include_table_stats": {
232
+ "type": "boolean",
233
+ "default": True,
234
+ "description": "Include per-table statistics (row counts, sizes).",
235
+ },
236
+ "include_version_stats": {
237
+ "type": "boolean",
238
+ "default": True,
239
+ "description": "Include global version and environment statistics.",
240
+ },
241
+ "include_performance_stats": {
242
+ "type": "boolean",
243
+ "default": True,
244
+ "description": "Include cache performance and query statistics.",
245
+ },
246
+ "profile": {
247
+ "type": "string",
248
+ "description": "Configuration profile to use (optional - uses default profile if not specified)",
249
+ },
250
+ },
251
+ "additionalProperties": False,
252
+ },
253
+ )
254
+
255
+ def _validate_query_safety(self, query: str) -> None:
256
+ """Validate that a query is safe to execute.
257
+
258
+ Args:
259
+ query: SQL query to validate
260
+
261
+ Raises:
262
+ DatabaseQuerySafetyError: If query is deemed unsafe
263
+ """
264
+ # Normalize query for analysis
265
+ normalized_query = query.strip().upper()
266
+
267
+ # Check if query starts with SELECT
268
+ if not normalized_query.startswith('SELECT'):
269
+ raise DatabaseQuerySafetyError("Only SELECT queries are allowed")
270
+
271
+ # Check for dangerous patterns
272
+ for pattern in self.dangerous_patterns:
273
+ if re.search(pattern, normalized_query, re.IGNORECASE | re.MULTILINE):
274
+ raise DatabaseQuerySafetyError(f"Query contains potentially dangerous pattern: {pattern}")
275
+
276
+ # Check for blocked operations
277
+ for operation in ['INSERT', 'UPDATE', 'DELETE', 'DROP', 'ALTER', 'CREATE', 'TRUNCATE']:
278
+ if operation in normalized_query:
279
+ raise DatabaseQuerySafetyError(f"Operation {operation} is not allowed")
280
+
281
+ # Check for access to blocked tables
282
+ for blocked_table in self.blocked_tables:
283
+ if blocked_table.upper() in normalized_query:
284
+ raise DatabaseQuerySafetyError(f"Access to table {blocked_table} is restricted")
285
+
286
+ async def _get_database_path(self, profile: str = "default") -> str:
287
+ """Get the path to the metadata database.
288
+
289
+ Args:
290
+ profile: Configuration profile to use
291
+
292
+ Returns:
293
+ Path to the database file
294
+ """
295
+ client = await self.client_manager.get_client(profile)
296
+ if hasattr(client, 'metadata_cache') and client.metadata_cache:
297
+ return str(client.metadata_cache.db_path)
298
+ else:
299
+ raise DatabaseQuerySafetyError("No metadata database available for this profile")
300
+
301
+ async def _execute_safe_query(self, query: str, db_path: str, limit: int = 100) -> Tuple[List[str], List[Tuple]]:
302
+ """Execute a safe SQL query and return results.
303
+
304
+ Args:
305
+ query: SQL query to execute
306
+ db_path: Path to database file
307
+ limit: Maximum number of rows to return
308
+
309
+ Returns:
310
+ Tuple of (column_names, rows)
311
+ """
312
+ # Add LIMIT clause if not present
313
+ if limit and 'LIMIT' not in query.upper():
314
+ query += f' LIMIT {limit}'
315
+
316
+ async with aiosqlite.connect(db_path) as db:
317
+ db.row_factory = aiosqlite.Row
318
+ cursor = await db.execute(query)
319
+ rows = await cursor.fetchall()
320
+
321
+ # Get column names
322
+ column_names = [description[0] for description in cursor.description] if cursor.description else []
323
+
324
+ # Convert rows to tuples for easier processing
325
+ row_tuples = [tuple(row) for row in rows]
326
+
327
+ return column_names, row_tuples
328
+
329
+ def _format_query_results(self, columns: List[str], rows: List[Tuple], format_type: str = "table") -> str:
330
+ """Format query results in the specified format.
331
+
332
+ Args:
333
+ columns: Column names
334
+ rows: Row data
335
+ format_type: Output format (table, json, csv)
336
+
337
+ Returns:
338
+ Formatted results string
339
+ """
340
+ if format_type == "json":
341
+ # Convert to list of dictionaries
342
+ result_dicts = []
343
+ for row in rows:
344
+ row_dict = {col: value for col, value in zip(columns, row)}
345
+ result_dicts.append(row_dict)
346
+ return json.dumps({"columns": columns, "data": result_dicts, "row_count": len(rows)}, indent=2)
347
+
348
+ elif format_type == "csv":
349
+ # CSV format
350
+ import csv
351
+ import io
352
+ output = io.StringIO()
353
+ writer = csv.writer(output)
354
+ writer.writerow(columns)
355
+ writer.writerows(rows)
356
+ return output.getvalue()
357
+
358
+ else: # table format
359
+ if not rows:
360
+ return "No results found."
361
+
362
+ # Calculate column widths
363
+ col_widths = []
364
+ for i, col in enumerate(columns):
365
+ max_width = len(str(col))
366
+ for row in rows:
367
+ if i < len(row):
368
+ max_width = max(max_width, len(str(row[i])))
369
+ col_widths.append(min(max_width, 50)) # Cap at 50 chars
370
+
371
+ # Create table
372
+ lines = []
373
+
374
+ # Header
375
+ header = " | ".join(str(col).ljust(width) for col, width in zip(columns, col_widths))
376
+ lines.append(header)
377
+ lines.append("-" * len(header))
378
+
379
+ # Rows
380
+ for row in rows:
381
+ row_str = " | ".join(
382
+ str(value).ljust(width)[:width] for value, width in zip(row, col_widths)
383
+ )
384
+ lines.append(row_str)
385
+
386
+ lines.append(f"\nTotal rows: {len(rows)}")
387
+ return "\n".join(lines)
388
+
389
+ async def execute_sql_query(self, arguments: dict) -> List[TextContent]:
390
+ """Execute SQL query tool.
391
+
392
+ Args:
393
+ arguments: Tool arguments
394
+
395
+ Returns:
396
+ List of TextContent responses
397
+ """
398
+ try:
399
+ start_time = time.time()
400
+
401
+ profile = arguments.get("profile", "default")
402
+ query = arguments["query"]
403
+ limit = arguments.get("limit", 100)
404
+ format_type = arguments.get("format", "table")
405
+
406
+ # Validate query safety
407
+ self._validate_query_safety(query)
408
+
409
+ # Get database path
410
+ db_path = await self._get_database_path(profile)
411
+
412
+ # Execute query
413
+ columns, rows = await self._execute_safe_query(query, db_path, limit)
414
+
415
+ # Format results
416
+ formatted_results = self._format_query_results(columns, rows, format_type)
417
+
418
+ execution_time = time.time() - start_time
419
+
420
+ # Add metadata
421
+ metadata = {
422
+ "query": query,
423
+ "execution_time_seconds": round(execution_time, 3),
424
+ "row_count": len(rows),
425
+ "column_count": len(columns),
426
+ "format": format_type,
427
+ "limited_results": limit < 1000 and len(rows) == limit,
428
+ }
429
+
430
+ if format_type == "table":
431
+ response = f"Query Results:\n{formatted_results}\n\nExecution Metadata:\n{json.dumps(metadata, indent=2)}"
432
+ else:
433
+ # For JSON/CSV, include metadata in structured format
434
+ if format_type == "json":
435
+ parsed_results = json.loads(formatted_results)
436
+ parsed_results["metadata"] = metadata
437
+ response = json.dumps(parsed_results, indent=2)
438
+ else:
439
+ response = formatted_results + f"\n\n# Metadata: {json.dumps(metadata)}"
440
+
441
+ return [TextContent(type="text", text=response)]
442
+
443
+ except Exception as e:
444
+ logger.error(f"SQL query execution failed: {e}")
445
+ error_response = {
446
+ "error": str(e),
447
+ "tool": "d365fo_execute_sql_query",
448
+ "arguments": arguments,
449
+ "error_type": type(e).__name__,
450
+ }
451
+ return [TextContent(type="text", text=json.dumps(error_response, indent=2))]
452
+
453
+ async def execute_get_database_schema(self, arguments: dict) -> List[TextContent]:
454
+ """Execute get database schema tool.
455
+
456
+ Args:
457
+ arguments: Tool arguments
458
+
459
+ Returns:
460
+ List of TextContent responses
461
+ """
462
+ try:
463
+ profile = arguments.get("profile", "default")
464
+ table_name = arguments.get("table_name")
465
+ include_statistics = arguments.get("include_statistics", True)
466
+ include_indexes = arguments.get("include_indexes", True)
467
+ include_relationships = arguments.get("include_relationships", True)
468
+
469
+ db_path = await self._get_database_path(profile)
470
+
471
+ schema_info = await self._get_schema_info(
472
+ db_path, table_name, include_statistics, include_indexes, include_relationships
473
+ )
474
+
475
+ return [TextContent(type="text", text=json.dumps(schema_info, indent=2))]
476
+
477
+ except Exception as e:
478
+ logger.error(f"Get database schema failed: {e}")
479
+ error_response = {
480
+ "error": str(e),
481
+ "tool": "d365fo_get_database_schema",
482
+ "arguments": arguments,
483
+ }
484
+ return [TextContent(type="text", text=json.dumps(error_response, indent=2))]
485
+
486
+ async def execute_get_table_info(self, arguments: dict) -> List[TextContent]:
487
+ """Execute get table info tool.
488
+
489
+ Args:
490
+ arguments: Tool arguments
491
+
492
+ Returns:
493
+ List of TextContent responses
494
+ """
495
+ try:
496
+ profile = arguments.get("profile", "default")
497
+ table_name = arguments["table_name"]
498
+ include_sample_data = arguments.get("include_sample_data", False)
499
+ include_relationships = arguments.get("include_relationships", True)
500
+
501
+ db_path = await self._get_database_path(profile)
502
+
503
+ table_info = await self._get_detailed_table_info(
504
+ db_path, table_name, include_sample_data, include_relationships
505
+ )
506
+
507
+ return [TextContent(type="text", text=json.dumps(table_info, indent=2))]
508
+
509
+ except Exception as e:
510
+ logger.error(f"Get table info failed: {e}")
511
+ error_response = {
512
+ "error": str(e),
513
+ "tool": "d365fo_get_table_info",
514
+ "arguments": arguments,
515
+ }
516
+ return [TextContent(type="text", text=json.dumps(error_response, indent=2))]
517
+
518
+ async def execute_get_database_statistics(self, arguments: dict) -> List[TextContent]:
519
+ """Execute get database statistics tool.
520
+
521
+ Args:
522
+ arguments: Tool arguments
523
+
524
+ Returns:
525
+ List of TextContent responses
526
+ """
527
+ try:
528
+ profile = arguments.get("profile", "default")
529
+ include_table_stats = arguments.get("include_table_stats", True)
530
+ include_version_stats = arguments.get("include_version_stats", True)
531
+ include_performance_stats = arguments.get("include_performance_stats", True)
532
+
533
+ # Get database statistics using existing method
534
+ client = await self.client_manager.get_client(profile)
535
+ if hasattr(client, 'metadata_cache') and hasattr(client.metadata_cache, 'database'):
536
+ stats = await client.metadata_cache.database.get_database_statistics()
537
+ else:
538
+ raise ValueError("Database statistics not available for this profile")
539
+
540
+ # Enhance with additional statistics if requested
541
+ if include_table_stats or include_version_stats:
542
+ db_path = await self._get_database_path(profile)
543
+ additional_stats = await self._get_enhanced_statistics(
544
+ db_path, include_table_stats, include_version_stats, include_performance_stats
545
+ )
546
+ stats.update(additional_stats)
547
+
548
+ return [TextContent(type="text", text=json.dumps(stats, indent=2))]
549
+
550
+ except Exception as e:
551
+ logger.error(f"Get database statistics failed: {e}")
552
+ error_response = {
553
+ "error": str(e),
554
+ "tool": "d365fo_get_database_statistics",
555
+ "arguments": arguments,
556
+ }
557
+ return [TextContent(type="text", text=json.dumps(error_response, indent=2))]
558
+
559
+ async def _get_schema_info(
560
+ self,
561
+ db_path: str,
562
+ table_name: Optional[str] = None,
563
+ include_statistics: bool = True,
564
+ include_indexes: bool = True,
565
+ include_relationships: bool = True
566
+ ) -> Dict[str, Any]:
567
+ """Get comprehensive database schema information."""
568
+ async with aiosqlite.connect(db_path) as db:
569
+ schema_info = {
570
+ "database_path": db_path,
571
+ "generated_at": time.time(),
572
+ "tables": {}
573
+ }
574
+
575
+ # Get list of tables
576
+ if table_name:
577
+ tables_query = "SELECT name FROM sqlite_master WHERE type='table' AND name=?"
578
+ cursor = await db.execute(tables_query, (table_name,))
579
+ else:
580
+ tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
581
+ cursor = await db.execute(tables_query)
582
+
583
+ table_names = [row[0] for row in await cursor.fetchall()]
584
+
585
+ # Get detailed info for each table
586
+ for name in table_names:
587
+ table_info = {"name": name}
588
+
589
+ # Get column information
590
+ cursor = await db.execute(f"PRAGMA table_info({name})")
591
+ columns = await cursor.fetchall()
592
+ table_info["columns"] = [
593
+ {
594
+ "name": col[1],
595
+ "type": col[2],
596
+ "not_null": bool(col[3]),
597
+ "default_value": col[4],
598
+ "primary_key": bool(col[5])
599
+ }
600
+ for col in columns
601
+ ]
602
+
603
+ if include_statistics:
604
+ # Get row count
605
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {name}")
606
+ table_info["row_count"] = (await cursor.fetchone())[0]
607
+
608
+ if include_indexes:
609
+ # Get indexes
610
+ cursor = await db.execute(f"PRAGMA index_list({name})")
611
+ indexes = await cursor.fetchall()
612
+ table_info["indexes"] = [
613
+ {
614
+ "name": idx[1],
615
+ "unique": bool(idx[2]),
616
+ "origin": idx[3]
617
+ }
618
+ for idx in indexes
619
+ ]
620
+
621
+ if include_relationships:
622
+ # Get foreign keys
623
+ cursor = await db.execute(f"PRAGMA foreign_key_list({name})")
624
+ foreign_keys = await cursor.fetchall()
625
+ table_info["foreign_keys"] = [
626
+ {
627
+ "column": fk[3],
628
+ "references_table": fk[2],
629
+ "references_column": fk[4]
630
+ }
631
+ for fk in foreign_keys
632
+ ]
633
+
634
+ schema_info["tables"][name] = table_info
635
+
636
+ return schema_info
637
+
638
+ async def _get_detailed_table_info(
639
+ self,
640
+ db_path: str,
641
+ table_name: str,
642
+ include_sample_data: bool = False,
643
+ include_relationships: bool = True
644
+ ) -> Dict[str, Any]:
645
+ """Get detailed information about a specific table."""
646
+ async with aiosqlite.connect(db_path) as db:
647
+ table_info = {
648
+ "table_name": table_name,
649
+ "generated_at": time.time()
650
+ }
651
+
652
+ # Verify table exists
653
+ cursor = await db.execute(
654
+ "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
655
+ (table_name,)
656
+ )
657
+ if not await cursor.fetchone():
658
+ raise ValueError(f"Table '{table_name}' does not exist")
659
+
660
+ # Get column information with detailed types
661
+ cursor = await db.execute(f"PRAGMA table_info({table_name})")
662
+ columns = await cursor.fetchall()
663
+ table_info["columns"] = [
664
+ {
665
+ "cid": col[0],
666
+ "name": col[1],
667
+ "type": col[2],
668
+ "not_null": bool(col[3]),
669
+ "default_value": col[4],
670
+ "primary_key": bool(col[5])
671
+ }
672
+ for col in columns
673
+ ]
674
+
675
+ # Get table statistics
676
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {table_name}")
677
+ table_info["row_count"] = (await cursor.fetchone())[0]
678
+
679
+ # Get indexes
680
+ cursor = await db.execute(f"PRAGMA index_list({table_name})")
681
+ indexes = await cursor.fetchall()
682
+ table_info["indexes"] = []
683
+ for idx in indexes:
684
+ index_info = {
685
+ "name": idx[1],
686
+ "unique": bool(idx[2]),
687
+ "origin": idx[3]
688
+ }
689
+ # Get index columns
690
+ cursor = await db.execute(f"PRAGMA index_info({idx[1]})")
691
+ index_columns = await cursor.fetchall()
692
+ index_info["columns"] = [col[2] for col in index_columns]
693
+ table_info["indexes"].append(index_info)
694
+
695
+ if include_relationships:
696
+ # Get foreign keys
697
+ cursor = await db.execute(f"PRAGMA foreign_key_list({table_name})")
698
+ foreign_keys = await cursor.fetchall()
699
+ table_info["foreign_keys"] = [
700
+ {
701
+ "id": fk[0],
702
+ "seq": fk[1],
703
+ "table": fk[2],
704
+ "from": fk[3],
705
+ "to": fk[4],
706
+ "on_update": fk[5],
707
+ "on_delete": fk[6],
708
+ "match": fk[7]
709
+ }
710
+ for fk in foreign_keys
711
+ ]
712
+
713
+ # Find tables that reference this table
714
+ cursor = await db.execute(
715
+ """SELECT name FROM sqlite_master WHERE type='table'"""
716
+ )
717
+ all_tables = [row[0] for row in await cursor.fetchall()]
718
+
719
+ referencing_tables = []
720
+ for other_table in all_tables:
721
+ cursor = await db.execute(f"PRAGMA foreign_key_list({other_table})")
722
+ fks = await cursor.fetchall()
723
+ for fk in fks:
724
+ if fk[2] == table_name: # references our table
725
+ referencing_tables.append({
726
+ "table": other_table,
727
+ "column": fk[3],
728
+ "references_column": fk[4]
729
+ })
730
+
731
+ table_info["referenced_by"] = referencing_tables
732
+
733
+ if include_sample_data and table_info["row_count"] > 0:
734
+ # Get sample data (first 5 rows)
735
+ cursor = await db.execute(f"SELECT * FROM {table_name} LIMIT 5")
736
+ sample_rows = await cursor.fetchall()
737
+ column_names = [desc[0] for desc in cursor.description]
738
+
739
+ table_info["sample_data"] = {
740
+ "columns": column_names,
741
+ "rows": [list(row) for row in sample_rows]
742
+ }
743
+
744
+ return table_info
745
+
746
+ async def _get_enhanced_statistics(
747
+ self,
748
+ db_path: str,
749
+ include_table_stats: bool = True,
750
+ include_version_stats: bool = True,
751
+ include_performance_stats: bool = True
752
+ ) -> Dict[str, Any]:
753
+ """Get enhanced database statistics."""
754
+ stats = {}
755
+
756
+ async with aiosqlite.connect(db_path) as db:
757
+ if include_table_stats:
758
+ # Get detailed table statistics
759
+ cursor = await db.execute(
760
+ "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
761
+ )
762
+ table_names = [row[0] for row in await cursor.fetchall()]
763
+
764
+ table_stats = {}
765
+ for table_name in table_names:
766
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {table_name}")
767
+ row_count = (await cursor.fetchone())[0]
768
+ table_stats[table_name] = {"row_count": row_count}
769
+
770
+ stats["detailed_table_statistics"] = table_stats
771
+
772
+ if include_version_stats:
773
+ # Enhanced version statistics
774
+ cursor = await db.execute(
775
+ """SELECT
776
+ COUNT(DISTINCT gv.id) as unique_versions,
777
+ COUNT(DISTINCT ev.environment_id) as environments_with_versions,
778
+ AVG(gv.reference_count) as avg_reference_count,
779
+ MAX(gv.last_used_at) as most_recent_use
780
+ FROM global_versions gv
781
+ LEFT JOIN environment_versions ev ON gv.id = ev.global_version_id"""
782
+ )
783
+ version_stats = await cursor.fetchone()
784
+ stats["enhanced_version_statistics"] = {
785
+ "unique_versions": version_stats[0],
786
+ "environments_with_versions": version_stats[1],
787
+ "average_reference_count": round(version_stats[2] or 0, 2),
788
+ "most_recent_use": version_stats[3]
789
+ }
790
+
791
+ if include_performance_stats:
792
+ # Database performance statistics
793
+ cursor = await db.execute("PRAGMA page_count")
794
+ page_count = (await cursor.fetchone())[0]
795
+
796
+ cursor = await db.execute("PRAGMA page_size")
797
+ page_size = (await cursor.fetchone())[0]
798
+
799
+ cursor = await db.execute("PRAGMA freelist_count")
800
+ freelist_count = (await cursor.fetchone())[0]
801
+
802
+ stats["performance_statistics"] = {
803
+ "total_pages": page_count,
804
+ "page_size_bytes": page_size,
805
+ "database_size_bytes": page_count * page_size,
806
+ "free_pages": freelist_count,
807
+ "utilized_pages": page_count - freelist_count,
808
+ "space_utilization_percent": round(
809
+ ((page_count - freelist_count) / page_count * 100) if page_count > 0 else 0, 2
810
+ )
811
+ }
812
+
813
+ return stats