d365fo-client 0.2.4__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. d365fo_client/__init__.py +7 -1
  2. d365fo_client/auth.py +9 -21
  3. d365fo_client/cli.py +25 -13
  4. d365fo_client/client.py +8 -4
  5. d365fo_client/config.py +52 -30
  6. d365fo_client/credential_sources.py +5 -0
  7. d365fo_client/main.py +1 -1
  8. d365fo_client/mcp/__init__.py +3 -1
  9. d365fo_client/mcp/auth_server/__init__.py +5 -0
  10. d365fo_client/mcp/auth_server/auth/__init__.py +30 -0
  11. d365fo_client/mcp/auth_server/auth/auth.py +372 -0
  12. d365fo_client/mcp/auth_server/auth/oauth_proxy.py +989 -0
  13. d365fo_client/mcp/auth_server/auth/providers/__init__.py +0 -0
  14. d365fo_client/mcp/auth_server/auth/providers/apikey.py +83 -0
  15. d365fo_client/mcp/auth_server/auth/providers/azure.py +393 -0
  16. d365fo_client/mcp/auth_server/auth/providers/bearer.py +25 -0
  17. d365fo_client/mcp/auth_server/auth/providers/jwt.py +547 -0
  18. d365fo_client/mcp/auth_server/auth/redirect_validation.py +65 -0
  19. d365fo_client/mcp/auth_server/dependencies.py +136 -0
  20. d365fo_client/mcp/client_manager.py +16 -67
  21. d365fo_client/mcp/fastmcp_main.py +407 -0
  22. d365fo_client/mcp/fastmcp_server.py +598 -0
  23. d365fo_client/mcp/fastmcp_utils.py +431 -0
  24. d365fo_client/mcp/main.py +40 -13
  25. d365fo_client/mcp/mixins/__init__.py +24 -0
  26. d365fo_client/mcp/mixins/base_tools_mixin.py +55 -0
  27. d365fo_client/mcp/mixins/connection_tools_mixin.py +50 -0
  28. d365fo_client/mcp/mixins/crud_tools_mixin.py +311 -0
  29. d365fo_client/mcp/mixins/database_tools_mixin.py +685 -0
  30. d365fo_client/mcp/mixins/label_tools_mixin.py +87 -0
  31. d365fo_client/mcp/mixins/metadata_tools_mixin.py +565 -0
  32. d365fo_client/mcp/mixins/performance_tools_mixin.py +109 -0
  33. d365fo_client/mcp/mixins/profile_tools_mixin.py +713 -0
  34. d365fo_client/mcp/mixins/sync_tools_mixin.py +321 -0
  35. d365fo_client/mcp/prompts/action_execution.py +1 -1
  36. d365fo_client/mcp/prompts/sequence_analysis.py +1 -1
  37. d365fo_client/mcp/tools/crud_tools.py +3 -3
  38. d365fo_client/mcp/tools/sync_tools.py +1 -1
  39. d365fo_client/mcp/utilities/__init__.py +1 -0
  40. d365fo_client/mcp/utilities/auth.py +34 -0
  41. d365fo_client/mcp/utilities/logging.py +58 -0
  42. d365fo_client/mcp/utilities/types.py +426 -0
  43. d365fo_client/metadata_v2/sync_manager_v2.py +2 -0
  44. d365fo_client/metadata_v2/sync_session_manager.py +7 -7
  45. d365fo_client/models.py +139 -139
  46. d365fo_client/output.py +2 -2
  47. d365fo_client/profile_manager.py +62 -27
  48. d365fo_client/profiles.py +118 -113
  49. d365fo_client/settings.py +367 -0
  50. d365fo_client/sync_models.py +85 -2
  51. d365fo_client/utils.py +2 -1
  52. {d365fo_client-0.2.4.dist-info → d365fo_client-0.3.1.dist-info}/METADATA +273 -18
  53. d365fo_client-0.3.1.dist-info/RECORD +85 -0
  54. d365fo_client-0.3.1.dist-info/entry_points.txt +4 -0
  55. d365fo_client-0.2.4.dist-info/RECORD +0 -56
  56. d365fo_client-0.2.4.dist-info/entry_points.txt +0 -3
  57. {d365fo_client-0.2.4.dist-info → d365fo_client-0.3.1.dist-info}/WHEEL +0 -0
  58. {d365fo_client-0.2.4.dist-info → d365fo_client-0.3.1.dist-info}/licenses/LICENSE +0 -0
  59. {d365fo_client-0.2.4.dist-info → d365fo_client-0.3.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,685 @@
1
+ """Database tools mixin for FastMCP server."""
2
+
3
+ import json
4
+ import logging
5
+ import re
6
+ import time
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ import aiosqlite
10
+ from .base_tools_mixin import BaseToolsMixin
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class DatabaseQuerySafetyError(Exception):
16
+ """Raised when a database query is deemed unsafe or invalid."""
17
+ pass
18
+
19
+
20
+ class DatabaseToolsMixin(BaseToolsMixin):
21
+ """Database analysis and query tools for FastMCP server."""
22
+
23
+ def setup_database_tools(self):
24
+ """Initialize database tools configuration."""
25
+ # Query safety configuration
26
+ self.max_results = 1000
27
+ self.query_timeout_seconds = 30
28
+ self.allowed_operations = {'SELECT'}
29
+ self.blocked_tables = {'labels_cache'} # Tables with potentially sensitive data
30
+
31
+ # SQL injection protection patterns
32
+ self.dangerous_patterns = [
33
+ r';\s*(DROP|DELETE|UPDATE|INSERT|ALTER|CREATE|TRUNCATE)',
34
+ r'UNION\s+SELECT',
35
+ r'--\s*[^\r\n]*',
36
+ r'/\*.*?\*/',
37
+ r'exec\s*\(',
38
+ r'sp_\w+',
39
+ r'xp_\w+',
40
+ ]
41
+
42
+ def register_database_tools(self):
43
+ """Register all database tools with FastMCP."""
44
+
45
+ @self.mcp.tool()
46
+ async def d365fo_execute_sql_query(
47
+ query: str,
48
+ limit: int = 100,
49
+ format: str = "table",
50
+ profile: str = "default",
51
+ ) -> dict:
52
+ """Execute a SELECT query against the D365FO metadata database to get insights from cached metadata.
53
+
54
+ IMPORTANT SAFETY NOTES:
55
+ - Only SELECT queries are allowed (no INSERT, UPDATE, DELETE, DROP, etc.)
56
+ - Query results are limited to 1000 rows maximum
57
+ - Queries timeout after 30 seconds
58
+ - Some sensitive tables may be restricted
59
+
60
+ AVAILABLE TABLES AND THEIR PURPOSE:
61
+ - metadata_environments: D365FO environments and their details
62
+ - global_versions: Global version registry with hash and reference counts
63
+ - environment_versions: Links between environments and global versions
64
+ - data_entities: D365FO data entities metadata
65
+ - public_entities: Public entity schemas and configurations
66
+ - entity_properties: Detailed property information for entities
67
+ - entity_actions: Available OData actions for entities
68
+ - enumerations: System enumerations and their metadata
69
+ - enumeration_members: Individual enumeration values and labels
70
+ - metadata_search_v2: FTS5 search index for metadata
71
+
72
+ EXAMPLE QUERIES:
73
+ 1. Get most used entities by category:
74
+ SELECT entity_category, COUNT(*) as count FROM data_entities GROUP BY entity_category ORDER BY count DESC
75
+
76
+ 2. Find entities with most properties:
77
+ SELECT pe.name, COUNT(ep.id) as property_count FROM public_entities pe LEFT JOIN entity_properties ep ON pe.id = ep.entity_id GROUP BY pe.id ORDER BY property_count DESC LIMIT 10
78
+
79
+ 3. Analyze environment versions:
80
+ SELECT me.environment_name, gv.version_hash, ev.detected_at FROM metadata_environments me JOIN environment_versions ev ON me.id = ev.environment_id JOIN global_versions gv ON ev.global_version_id = gv.id
81
+
82
+ Use this tool to analyze metadata patterns, generate reports, and gain insights into D365FO structure.
83
+
84
+ Args:
85
+ query: SQL SELECT query to execute. Must be a SELECT statement only. Query will be validated for safety before execution.
86
+ limit: Maximum number of rows to return. Default is 100, maximum is 1000.
87
+ format: Output format for results. 'table' for human-readable format, 'json' for structured data, 'csv' for spreadsheet-compatible format.
88
+ profile: Configuration profile to use (optional - uses default profile if not specified)
89
+
90
+ Returns:
91
+ Dictionary with query results
92
+ """
93
+ try:
94
+ start_time = time.time()
95
+
96
+ # Validate query safety
97
+ self._validate_query_safety(query)
98
+
99
+ # Get database path
100
+ db_path = await self._get_database_path(profile)
101
+
102
+ # Execute query
103
+ columns, rows = await self._execute_safe_query(query, db_path, limit)
104
+
105
+ # Format results
106
+ formatted_results = self._format_query_results(columns, rows, format)
107
+
108
+ execution_time = time.time() - start_time
109
+
110
+ # Add metadata
111
+ metadata = {
112
+ "query": query,
113
+ "execution_time_seconds": round(execution_time, 3),
114
+ "row_count": len(rows),
115
+ "column_count": len(columns),
116
+ "format": format,
117
+ "limited_results": limit < 1000 and len(rows) == limit,
118
+ }
119
+
120
+ if format == "table":
121
+ response = {
122
+ "query_results": formatted_results,
123
+ "metadata": metadata
124
+ }
125
+ else:
126
+ # For JSON/CSV, include metadata in structured format
127
+ if format == "json":
128
+ parsed_results = json.loads(formatted_results)
129
+ response = {
130
+ "query_results": parsed_results,
131
+ "metadata": metadata
132
+ }
133
+ else:
134
+ response = {
135
+ "query_results": formatted_results,
136
+ "metadata": metadata
137
+ }
138
+
139
+ return response
140
+
141
+ except Exception as e:
142
+ logger.error(f"SQL query execution failed: {e}")
143
+ return self._create_error_response(e, "d365fo_execute_sql_query", {
144
+ "query": query,
145
+ "limit": limit,
146
+ "format": format,
147
+ "profile": profile
148
+ })
149
+
150
+ @self.mcp.tool()
151
+ async def d365fo_get_database_schema(
152
+ table_name: Optional[str] = None,
153
+ include_statistics: bool = True,
154
+ include_indexes: bool = True,
155
+ include_relationships: bool = True,
156
+ profile: str = "default",
157
+ ) -> dict:
158
+ """Get comprehensive schema information for the D365FO metadata database.
159
+
160
+ This tool provides detailed information about:
161
+ - All database tables and their structures
162
+ - Column definitions with types and constraints
163
+ - Indexes and their purposes
164
+ - Foreign key relationships
165
+ - Table statistics (row counts, sizes)
166
+ - FTS5 virtual table information
167
+
168
+ Use this tool to understand the database structure before writing SQL queries.
169
+
170
+ Args:
171
+ table_name: Optional. Get schema for a specific table only. If omitted, returns schema for all tables.
172
+ include_statistics: Include table statistics like row counts and sizes.
173
+ include_indexes: Include index information for tables.
174
+ include_relationships: Include foreign key relationships between tables.
175
+ profile: Configuration profile to use (optional - uses default profile if not specified)
176
+
177
+ Returns:
178
+ Dictionary with database schema
179
+ """
180
+ try:
181
+ db_path = await self._get_database_path(profile)
182
+
183
+ schema_info = await self._get_schema_info(
184
+ db_path, table_name, include_statistics, include_indexes, include_relationships
185
+ )
186
+
187
+ return schema_info
188
+
189
+ except Exception as e:
190
+ logger.error(f"Get database schema failed: {e}")
191
+ return self._create_error_response(e, "d365fo_get_database_schema", {
192
+ "table_name": table_name,
193
+ "include_statistics": include_statistics,
194
+ "include_indexes": include_indexes,
195
+ "include_relationships": include_relationships,
196
+ "profile": profile
197
+ })
198
+
199
+ @self.mcp.tool()
200
+ async def d365fo_get_table_info(
201
+ table_name: str,
202
+ include_sample_data: bool = False,
203
+ include_relationships: bool = True,
204
+ profile: str = "default",
205
+ ) -> dict:
206
+ """Get detailed information about a specific database table including:
207
+ - Column definitions with types, nullability, and defaults
208
+ - Primary and foreign key constraints
209
+ - Indexes and their characteristics
210
+ - Table statistics (row count, size, last updated)
211
+ - Sample data (first few rows)
212
+ - Relationships to other tables
213
+
214
+ This tool is useful for exploring specific tables before writing queries.
215
+
216
+ Args:
217
+ table_name: Name of the table to get information about (e.g., 'data_entities', 'public_entities', 'entity_properties').
218
+ include_sample_data: Include sample data from the table (first 5 rows).
219
+ include_relationships: Include information about relationships to other tables.
220
+ profile: Configuration profile to use (optional - uses default profile if not specified)
221
+
222
+ Returns:
223
+ Dictionary with table information
224
+ """
225
+ try:
226
+ db_path = await self._get_database_path(profile)
227
+
228
+ table_info = await self._get_detailed_table_info(
229
+ db_path, table_name, include_sample_data, include_relationships
230
+ )
231
+
232
+ return table_info
233
+
234
+ except Exception as e:
235
+ logger.error(f"Get table info failed: {e}")
236
+ return self._create_error_response(e, "d365fo_get_table_info", {
237
+ "table_name": table_name,
238
+ "include_sample_data": include_sample_data,
239
+ "include_relationships": include_relationships,
240
+ "profile": profile
241
+ })
242
+
243
+ @self.mcp.tool()
244
+ async def d365fo_get_database_statistics(
245
+ include_table_stats: bool = True,
246
+ include_version_stats: bool = True,
247
+ include_performance_stats: bool = True,
248
+ profile: str = "default",
249
+ ) -> dict:
250
+ """Get comprehensive database statistics and analytics including:
251
+ - Overall database size and table counts
252
+ - Record counts by table
253
+ - Global version statistics
254
+ - Environment statistics
255
+ - Cache hit rates and performance metrics
256
+ - Storage utilization analysis
257
+ - Data distribution insights
258
+
259
+ Use this tool to understand the overall state and health of the metadata database.
260
+
261
+ Args:
262
+ include_table_stats: Include per-table statistics (row counts, sizes).
263
+ include_version_stats: Include global version and environment statistics.
264
+ include_performance_stats: Include cache performance and query statistics.
265
+ profile: Configuration profile to use (optional - uses default profile if not specified)
266
+
267
+ Returns:
268
+ Dictionary with database statistics
269
+ """
270
+ try:
271
+ # Get database statistics using existing method
272
+ client = await self.client_manager.get_client(profile)
273
+ if hasattr(client, 'metadata_cache') and hasattr(client.metadata_cache, 'database'):
274
+ stats = await client.metadata_cache.database.get_database_statistics() # type: ignore
275
+ else:
276
+ raise ValueError("Database statistics not available for this profile")
277
+
278
+ # Enhance with additional statistics if requested
279
+ if include_table_stats or include_version_stats:
280
+ db_path = await self._get_database_path(profile)
281
+ additional_stats = await self._get_enhanced_statistics(
282
+ db_path, include_table_stats, include_version_stats, include_performance_stats
283
+ )
284
+ stats.update(additional_stats)
285
+
286
+ return stats
287
+
288
+ except Exception as e:
289
+ logger.error(f"Get database statistics failed: {e}")
290
+ return self._create_error_response(e, "d365fo_get_database_statistics", {
291
+ "include_table_stats": include_table_stats,
292
+ "include_version_stats": include_version_stats,
293
+ "include_performance_stats": include_performance_stats,
294
+ "profile": profile
295
+ })
296
+
297
+ def _validate_query_safety(self, query: str) -> None:
298
+ """Validate that a query is safe to execute.
299
+
300
+ Args:
301
+ query: SQL query to validate
302
+
303
+ Raises:
304
+ DatabaseQuerySafetyError: If query is deemed unsafe
305
+ """
306
+ # Normalize query for analysis
307
+ normalized_query = query.strip().upper()
308
+
309
+ # Check if query starts with SELECT
310
+ if not normalized_query.startswith('SELECT'):
311
+ raise DatabaseQuerySafetyError("Only SELECT queries are allowed")
312
+
313
+ # Check for dangerous patterns
314
+ for pattern in self.dangerous_patterns:
315
+ if re.search(pattern, normalized_query, re.IGNORECASE | re.MULTILINE):
316
+ raise DatabaseQuerySafetyError(f"Query contains potentially dangerous pattern: {pattern}")
317
+
318
+ # Check for blocked operations
319
+ for operation in ['INSERT', 'UPDATE', 'DELETE', 'DROP', 'ALTER', 'CREATE', 'TRUNCATE']:
320
+ if operation in normalized_query:
321
+ raise DatabaseQuerySafetyError(f"Operation {operation} is not allowed")
322
+
323
+ # Check for access to blocked tables
324
+ for blocked_table in self.blocked_tables:
325
+ if blocked_table.upper() in normalized_query:
326
+ raise DatabaseQuerySafetyError(f"Access to table {blocked_table} is restricted")
327
+
328
+ async def _get_database_path(self, profile: str = "default") -> str:
329
+ """Get the path to the metadata database.
330
+
331
+ Args:
332
+ profile: Configuration profile to use
333
+
334
+ Returns:
335
+ Path to the database file
336
+ """
337
+ client = await self.client_manager.get_client(profile)
338
+ if hasattr(client, 'metadata_cache') and client.metadata_cache:
339
+ return str(client.metadata_cache.db_path)
340
+ else:
341
+ raise DatabaseQuerySafetyError("No metadata database available for this profile")
342
+
343
+ async def _execute_safe_query(self, query: str, db_path: str, limit: int = 100) -> Tuple[List[str], List[Tuple]]:
344
+ """Execute a safe SQL query and return results.
345
+
346
+ Args:
347
+ query: SQL query to execute
348
+ db_path: Path to database file
349
+ limit: Maximum number of rows to return
350
+
351
+ Returns:
352
+ Tuple of (column_names, rows)
353
+ """
354
+ # Add LIMIT clause if not present
355
+ if limit and 'LIMIT' not in query.upper():
356
+ query += f' LIMIT {limit}'
357
+
358
+ async with aiosqlite.connect(db_path) as db:
359
+ db.row_factory = aiosqlite.Row
360
+ cursor = await db.execute(query)
361
+ rows = await cursor.fetchall()
362
+
363
+ # Get column names
364
+ column_names = [description[0] for description in cursor.description] if cursor.description else []
365
+
366
+ # Convert rows to tuples for easier processing
367
+ row_tuples = [tuple(row) for row in rows]
368
+
369
+ return column_names, row_tuples
370
+
371
+ def _format_query_results(self, columns: List[str], rows: List[Tuple], format_type: str = "table") -> str:
372
+ """Format query results in the specified format.
373
+
374
+ Args:
375
+ columns: Column names
376
+ rows: Row data
377
+ format_type: Output format (table, json, csv)
378
+
379
+ Returns:
380
+ Formatted results string
381
+ """
382
+ if format_type == "json":
383
+ # Convert to list of dictionaries
384
+ result_dicts = []
385
+ for row in rows:
386
+ row_dict = {col: value for col, value in zip(columns, row)}
387
+ result_dicts.append(row_dict)
388
+ return json.dumps({"columns": columns, "data": result_dicts, "row_count": len(rows)}, indent=2)
389
+
390
+ elif format_type == "csv":
391
+ # CSV format
392
+ import csv
393
+ import io
394
+ output = io.StringIO()
395
+ writer = csv.writer(output)
396
+ writer.writerow(columns)
397
+ writer.writerows(rows)
398
+ return output.getvalue()
399
+
400
+ else: # table format
401
+ if not rows:
402
+ return "No results found."
403
+
404
+ # Calculate column widths
405
+ col_widths = []
406
+ for i, col in enumerate(columns):
407
+ max_width = len(str(col))
408
+ for row in rows:
409
+ if i < len(row):
410
+ max_width = max(max_width, len(str(row[i])))
411
+ col_widths.append(min(max_width, 50)) # Cap at 50 chars
412
+
413
+ # Create table
414
+ lines = []
415
+
416
+ # Header
417
+ header = " | ".join(str(col).ljust(width) for col, width in zip(columns, col_widths))
418
+ lines.append(header)
419
+ lines.append("-" * len(header))
420
+
421
+ # Rows
422
+ for row in rows:
423
+ row_str = " | ".join(
424
+ str(value).ljust(width)[:width] for value, width in zip(row, col_widths)
425
+ )
426
+ lines.append(row_str)
427
+
428
+ lines.append(f"\nTotal rows: {len(rows)}")
429
+ return "\n".join(lines)
430
+
431
+ async def _get_schema_info(
432
+ self,
433
+ db_path: str,
434
+ table_name: Optional[str] = None,
435
+ include_statistics: bool = True,
436
+ include_indexes: bool = True,
437
+ include_relationships: bool = True
438
+ ) -> Dict[str, Any]:
439
+ """Get comprehensive database schema information."""
440
+ async with aiosqlite.connect(db_path) as db:
441
+ schema_info = {
442
+ "database_path": db_path,
443
+ "generated_at": time.time(),
444
+ "tables": {}
445
+ }
446
+
447
+ # Get list of tables
448
+ if table_name:
449
+ tables_query = "SELECT name FROM sqlite_master WHERE type='table' AND name=?"
450
+ cursor = await db.execute(tables_query, (table_name,))
451
+ else:
452
+ tables_query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
453
+ cursor = await db.execute(tables_query)
454
+
455
+ table_names = [row[0] for row in await cursor.fetchall()]
456
+
457
+ # Get detailed info for each table
458
+ for name in table_names:
459
+ table_info = {"name": name}
460
+
461
+ # Get column information
462
+ cursor = await db.execute(f"PRAGMA table_info({name})")
463
+ columns = await cursor.fetchall()
464
+ table_info["columns"] = [
465
+ {
466
+ "name": col[1],
467
+ "type": col[2],
468
+ "not_null": bool(col[3]),
469
+ "default_value": col[4],
470
+ "primary_key": bool(col[5])
471
+ }
472
+ for col in columns
473
+ ]
474
+
475
+ if include_statistics:
476
+ # Get row count
477
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {name}")
478
+ table_info["row_count"] = (await cursor.fetchone())[0] # type: ignore
479
+
480
+ if include_indexes:
481
+ # Get indexes
482
+ cursor = await db.execute(f"PRAGMA index_list({name})")
483
+ indexes = await cursor.fetchall()
484
+ table_info["indexes"] = [
485
+ {
486
+ "name": idx[1],
487
+ "unique": bool(idx[2]),
488
+ "origin": idx[3]
489
+ }
490
+ for idx in indexes
491
+ ]
492
+
493
+ if include_relationships:
494
+ # Get foreign keys
495
+ cursor = await db.execute(f"PRAGMA foreign_key_list({name})")
496
+ foreign_keys = await cursor.fetchall()
497
+ table_info["foreign_keys"] = [
498
+ {
499
+ "column": fk[3],
500
+ "references_table": fk[2],
501
+ "references_column": fk[4]
502
+ }
503
+ for fk in foreign_keys
504
+ ]
505
+
506
+ schema_info["tables"][name] = table_info
507
+
508
+ return schema_info
509
+
510
+ async def _get_detailed_table_info(
511
+ self,
512
+ db_path: str,
513
+ table_name: str,
514
+ include_sample_data: bool = False,
515
+ include_relationships: bool = True
516
+ ) -> Dict[str, Any]:
517
+ """Get detailed information about a specific table."""
518
+ async with aiosqlite.connect(db_path) as db:
519
+ table_info = {
520
+ "table_name": table_name,
521
+ "generated_at": time.time()
522
+ }
523
+
524
+ # Verify table exists
525
+ cursor = await db.execute(
526
+ "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
527
+ (table_name,)
528
+ )
529
+ if not await cursor.fetchone():
530
+ raise ValueError(f"Table '{table_name}' does not exist")
531
+
532
+ # Get column information with detailed types
533
+ cursor = await db.execute(f"PRAGMA table_info({table_name})")
534
+ columns = await cursor.fetchall()
535
+ table_info["columns"] = [
536
+ {
537
+ "cid": col[0],
538
+ "name": col[1],
539
+ "type": col[2],
540
+ "not_null": bool(col[3]),
541
+ "default_value": col[4],
542
+ "primary_key": bool(col[5])
543
+ }
544
+ for col in columns
545
+ ]
546
+
547
+ # Get table statistics
548
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {table_name}")
549
+ table_info["row_count"] = (await cursor.fetchone())[0] # type: ignore
550
+
551
+ # Get indexes
552
+ cursor = await db.execute(f"PRAGMA index_list({table_name})")
553
+ indexes = await cursor.fetchall()
554
+ table_info["indexes"] = []
555
+ for idx in indexes:
556
+ index_info = {
557
+ "name": idx[1],
558
+ "unique": bool(idx[2]),
559
+ "origin": idx[3]
560
+ }
561
+ # Get index columns
562
+ cursor = await db.execute(f"PRAGMA index_info({idx[1]})")
563
+ index_columns = await cursor.fetchall()
564
+ index_info["columns"] = [col[2] for col in index_columns]
565
+ table_info["indexes"].append(index_info)
566
+
567
+ if include_relationships:
568
+ # Get foreign keys
569
+ cursor = await db.execute(f"PRAGMA foreign_key_list({table_name})")
570
+ foreign_keys = await cursor.fetchall()
571
+ table_info["foreign_keys"] = [
572
+ {
573
+ "id": fk[0],
574
+ "seq": fk[1],
575
+ "table": fk[2],
576
+ "from": fk[3],
577
+ "to": fk[4],
578
+ "on_update": fk[5],
579
+ "on_delete": fk[6],
580
+ "match": fk[7]
581
+ }
582
+ for fk in foreign_keys
583
+ ]
584
+
585
+ # Find tables that reference this table
586
+ cursor = await db.execute(
587
+ """SELECT name FROM sqlite_master WHERE type='table'"""
588
+ )
589
+ all_tables = [row[0] for row in await cursor.fetchall()]
590
+
591
+ referencing_tables = []
592
+ for other_table in all_tables:
593
+ cursor = await db.execute(f"PRAGMA foreign_key_list({other_table})")
594
+ fks = await cursor.fetchall()
595
+ for fk in fks:
596
+ if fk[2] == table_name: # references our table
597
+ referencing_tables.append({
598
+ "table": other_table,
599
+ "column": fk[3],
600
+ "references_column": fk[4]
601
+ })
602
+
603
+ table_info["referenced_by"] = referencing_tables
604
+
605
+ if include_sample_data and table_info["row_count"] > 0:
606
+ # Get sample data (first 5 rows)
607
+ cursor = await db.execute(f"SELECT * FROM {table_name} LIMIT 5")
608
+ sample_rows = await cursor.fetchall()
609
+ column_names = [desc[0] for desc in cursor.description]
610
+
611
+ table_info["sample_data"] = {
612
+ "columns": column_names,
613
+ "rows": [list(row) for row in sample_rows]
614
+ }
615
+
616
+ return table_info
617
+
618
+ async def _get_enhanced_statistics(
619
+ self,
620
+ db_path: str,
621
+ include_table_stats: bool = True,
622
+ include_version_stats: bool = True,
623
+ include_performance_stats: bool = True
624
+ ) -> Dict[str, Any]:
625
+ """Get enhanced database statistics."""
626
+ stats = {}
627
+
628
+ async with aiosqlite.connect(db_path) as db:
629
+ if include_table_stats:
630
+ # Get detailed table statistics
631
+ cursor = await db.execute(
632
+ "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
633
+ )
634
+ table_names = [row[0] for row in await cursor.fetchall()]
635
+
636
+ table_stats = {}
637
+ for table_name in table_names:
638
+ cursor = await db.execute(f"SELECT COUNT(*) FROM {table_name}")
639
+ row_count = (await cursor.fetchone())[0] # type: ignore
640
+ table_stats[table_name] = {"row_count": row_count}
641
+
642
+ stats["detailed_table_statistics"] = table_stats
643
+
644
+ if include_version_stats:
645
+ # Enhanced version statistics
646
+ cursor = await db.execute(
647
+ """SELECT
648
+ COUNT(DISTINCT gv.id) as unique_versions,
649
+ COUNT(DISTINCT ev.environment_id) as environments_with_versions,
650
+ AVG(gv.reference_count) as avg_reference_count,
651
+ MAX(gv.last_used_at) as most_recent_use
652
+ FROM global_versions gv
653
+ LEFT JOIN environment_versions ev ON gv.id = ev.global_version_id"""
654
+ )
655
+ version_stats = await cursor.fetchone()
656
+ stats["enhanced_version_statistics"] = {
657
+ "unique_versions": version_stats[0], # type: ignore
658
+ "environments_with_versions": version_stats[1],# type: ignore
659
+ "average_reference_count": round(version_stats[2] or 0, 2),# type: ignore
660
+ "most_recent_use": version_stats[3]# type: ignore
661
+ }
662
+
663
+ if include_performance_stats:
664
+ # Database performance statistics
665
+ cursor = await db.execute("PRAGMA page_count")
666
+ page_count = (await cursor.fetchone())[0]# type: ignore
667
+
668
+ cursor = await db.execute("PRAGMA page_size")
669
+ page_size = (await cursor.fetchone())[0]# type: ignore
670
+
671
+ cursor = await db.execute("PRAGMA freelist_count")
672
+ freelist_count = (await cursor.fetchone())[0]# type: ignore
673
+
674
+ stats["performance_statistics"] = {
675
+ "total_pages": page_count,
676
+ "page_size_bytes": page_size,
677
+ "database_size_bytes": page_count * page_size,
678
+ "free_pages": freelist_count,
679
+ "utilized_pages": page_count - freelist_count,
680
+ "space_utilization_percent": round(
681
+ ((page_count - freelist_count) / page_count * 100) if page_count > 0 else 0, 2
682
+ )
683
+ }
684
+
685
+ return stats