sql-glider 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,121 @@
1
+ """Catalog registry with plugin discovery via entry points.
2
+
3
+ This module handles discovering and instantiating catalog providers from
4
+ Python entry points, allowing third-party packages to register
5
+ custom catalogs.
6
+ """
7
+
8
+ import sys
9
+ from typing import Dict, List, Type
10
+
11
+ from sqlglider.catalog.base import Catalog, CatalogError
12
+
13
+ # Cache for discovered catalogs
14
+ _catalog_cache: Dict[str, Type[Catalog]] = {}
15
+ _discovery_done: bool = False
16
+
17
+
18
+ def _discover_catalogs() -> None:
19
+ """Discover catalogs from entry points.
20
+
21
+ Uses importlib.metadata to find all registered catalogs
22
+ in the 'sqlglider.catalogs' entry point group.
23
+ """
24
+ global _discovery_done, _catalog_cache
25
+
26
+ if _discovery_done:
27
+ return
28
+
29
+ if sys.version_info >= (3, 10):
30
+ from importlib.metadata import entry_points
31
+
32
+ eps = entry_points(group="sqlglider.catalogs")
33
+ else:
34
+ from importlib.metadata import entry_points
35
+
36
+ all_eps = entry_points()
37
+ eps = all_eps.get("sqlglider.catalogs", [])
38
+
39
+ for ep in eps:
40
+ try:
41
+ catalog_class = ep.load()
42
+ if isinstance(catalog_class, type) and issubclass(catalog_class, Catalog):
43
+ _catalog_cache[ep.name] = catalog_class
44
+ except Exception:
45
+ # Skip catalogs that fail to load
46
+ # This allows graceful handling of missing optional dependencies
47
+ pass
48
+
49
+ _discovery_done = True
50
+
51
+
52
+ def get_catalog(name: str) -> Catalog:
53
+ """Get a catalog instance by name.
54
+
55
+ Args:
56
+ name: The name of the catalog (e.g., "databricks").
57
+
58
+ Returns:
59
+ An instance of the requested catalog.
60
+
61
+ Raises:
62
+ CatalogError: If the catalog is not found.
63
+
64
+ Example:
65
+ >>> catalog = get_catalog("databricks")
66
+ >>> ddl = catalog.get_ddl("my_catalog.my_schema.my_table")
67
+ """
68
+ _discover_catalogs()
69
+
70
+ if name not in _catalog_cache:
71
+ available = ", ".join(sorted(_catalog_cache.keys()))
72
+ raise CatalogError(
73
+ f"Unknown catalog '{name}'. Available catalogs: {available or 'none'}. "
74
+ f"You may need to install an optional dependency (e.g., pip install sql-glider[databricks])."
75
+ )
76
+
77
+ return _catalog_cache[name]()
78
+
79
+
80
+ def list_catalogs() -> List[str]:
81
+ """List all available catalog names.
82
+
83
+ Returns:
84
+ A sorted list of available catalog names.
85
+
86
+ Example:
87
+ >>> catalogs = list_catalogs()
88
+ >>> print(catalogs)
89
+ ['databricks']
90
+ """
91
+ _discover_catalogs()
92
+ return sorted(_catalog_cache.keys())
93
+
94
+
95
+ def register_catalog(name: str, catalog_class: Type[Catalog]) -> None:
96
+ """Register a catalog programmatically.
97
+
98
+ This is primarily useful for testing or for registering catalogs
99
+ that aren't installed via entry points.
100
+
101
+ Args:
102
+ name: The name to register the catalog under.
103
+ catalog_class: The catalog class to register.
104
+
105
+ Raises:
106
+ ValueError: If catalog_class is not a subclass of Catalog.
107
+ """
108
+ if not isinstance(catalog_class, type) or not issubclass(catalog_class, Catalog):
109
+ raise ValueError(f"{catalog_class} must be a subclass of Catalog")
110
+
111
+ _catalog_cache[name] = catalog_class
112
+
113
+
114
+ def clear_registry() -> None:
115
+ """Clear the catalog registry.
116
+
117
+ This is primarily useful for testing.
118
+ """
119
+ global _discovery_done, _catalog_cache
120
+ _catalog_cache.clear()
121
+ _discovery_done = False
sqlglider/cli.py CHANGED
@@ -332,8 +332,16 @@ def lineage(
332
332
  raise typer.Exit(1)
333
333
 
334
334
 
335
- @app.command()
336
- def tables(
335
+ # Tables command group
336
+ tables_app = typer.Typer(
337
+ name="tables",
338
+ help="Table-related analysis commands.",
339
+ )
340
+ app.add_typer(tables_app, name="tables")
341
+
342
+
343
+ @tables_app.command("overview")
344
+ def tables_overview(
337
345
  sql_file: Annotated[
338
346
  typer.FileText,
339
347
  typer.Argument(
@@ -396,22 +404,22 @@ def tables(
396
404
  Examples:
397
405
 
398
406
  # List all tables in a SQL file
399
- sqlglider tables query.sql
407
+ sqlglider tables overview query.sql
400
408
 
401
409
  # Export to JSON
402
- sqlglider tables query.sql --output-format json
410
+ sqlglider tables overview query.sql --output-format json
403
411
 
404
412
  # Export to CSV file
405
- sqlglider tables query.sql --output-format csv --output-file tables.csv
413
+ sqlglider tables overview query.sql --output-format csv --output-file tables.csv
406
414
 
407
415
  # Use different SQL dialect
408
- sqlglider tables query.sql --dialect postgres
416
+ sqlglider tables overview query.sql --dialect postgres
409
417
 
410
418
  # Filter to queries referencing a specific table
411
- sqlglider tables query.sql --table customers
419
+ sqlglider tables overview query.sql --table customers
412
420
 
413
421
  # Analyze templated SQL with Jinja2
414
- sqlglider tables query.sql --templater jinja --var schema=analytics
422
+ sqlglider tables overview query.sql --templater jinja --var schema=analytics
415
423
  """
416
424
  # Load configuration from sqlglider.toml (if it exists)
417
425
  config = load_config()
@@ -517,6 +525,263 @@ def tables(
517
525
  raise typer.Exit(1)
518
526
 
519
527
 
528
+ @tables_app.command("pull")
529
+ def tables_pull(
530
+ sql_file: Annotated[
531
+ typer.FileText,
532
+ typer.Argument(
533
+ default_factory=lambda: sys.stdin,
534
+ show_default="stdin",
535
+ help="Path to SQL file to analyze (reads from stdin if not provided)",
536
+ ),
537
+ ],
538
+ catalog_type: Optional[str] = typer.Option(
539
+ None,
540
+ "--catalog-type",
541
+ "-c",
542
+ help="Catalog provider (e.g., 'databricks'). Required if not in config.",
543
+ ),
544
+ ddl_folder: Optional[Path] = typer.Option(
545
+ None,
546
+ "--ddl-folder",
547
+ "-o",
548
+ help="Output folder for DDL files. If not provided, outputs to stdout.",
549
+ ),
550
+ dialect: Optional[str] = typer.Option(
551
+ None,
552
+ "--dialect",
553
+ "-d",
554
+ help="SQL dialect (default: spark, or from config)",
555
+ ),
556
+ templater: Optional[str] = typer.Option(
557
+ None,
558
+ "--templater",
559
+ "-t",
560
+ help="Templater for SQL preprocessing (e.g., 'jinja', 'none')",
561
+ ),
562
+ var: Optional[List[str]] = typer.Option(
563
+ None,
564
+ "--var",
565
+ "-v",
566
+ help="Template variable in key=value format (repeatable)",
567
+ ),
568
+ vars_file: Optional[Path] = typer.Option(
569
+ None,
570
+ "--vars-file",
571
+ exists=True,
572
+ help="Path to variables file (JSON or YAML)",
573
+ ),
574
+ list_available: bool = typer.Option(
575
+ False,
576
+ "--list",
577
+ "-l",
578
+ help="List available catalog providers and exit",
579
+ ),
580
+ ) -> None:
581
+ """
582
+ Pull DDL definitions from a remote catalog for tables used in SQL.
583
+
584
+ Analyzes the SQL file to find referenced tables, then fetches their DDL
585
+ from the specified catalog provider (e.g., Databricks Unity Catalog).
586
+
587
+ CTEs are automatically excluded since they don't exist in remote catalogs.
588
+
589
+ Configuration can be set in sqlglider.toml in the current directory.
590
+ CLI arguments override configuration file values.
591
+
592
+ Examples:
593
+
594
+ # Pull DDL for tables in a SQL file (output to stdout)
595
+ sqlglider tables pull query.sql --catalog-type databricks
596
+
597
+ # Pull DDL to a folder (one file per table)
598
+ sqlglider tables pull query.sql -c databricks -o ./ddl/
599
+
600
+ # Use config file for catalog settings
601
+ sqlglider tables pull query.sql
602
+
603
+ # With templating
604
+ sqlglider tables pull query.sql -c databricks --templater jinja --var schema=prod
605
+
606
+ # List available catalog providers
607
+ sqlglider tables pull --list
608
+ """
609
+ from sqlglider.catalog import CatalogError, get_catalog, list_catalogs
610
+ from sqlglider.lineage.analyzer import ObjectType
611
+
612
+ # Handle --list option
613
+ if list_available:
614
+ available = list_catalogs()
615
+ if available:
616
+ console.print("[bold]Available catalog providers:[/bold]")
617
+ for name in available:
618
+ console.print(f" - {name}")
619
+ else:
620
+ console.print(
621
+ "[yellow]No catalog providers available.[/yellow]\n"
622
+ "Install a provider with: pip install sql-glider[databricks]"
623
+ )
624
+ raise typer.Exit(0)
625
+
626
+ # Load configuration from sqlglider.toml (if it exists)
627
+ config = load_config()
628
+
629
+ # Apply priority resolution: CLI args > config > defaults
630
+ dialect = dialect or config.dialect or "spark"
631
+ templater = templater or config.templater # None means no templating
632
+ catalog_type = catalog_type or config.catalog_type
633
+ ddl_folder_str = config.ddl_folder if ddl_folder is None else None
634
+ if ddl_folder is None and ddl_folder_str:
635
+ ddl_folder = Path(ddl_folder_str)
636
+
637
+ # Validate catalog_type is provided
638
+ if not catalog_type:
639
+ err_console.print(
640
+ "[red]Error:[/red] No catalog provider specified. "
641
+ "Use --catalog-type or set catalog_type in sqlglider.toml."
642
+ )
643
+ raise typer.Exit(1)
644
+
645
+ # Check if reading from stdin (cross-platform: name is "<stdin>" on all OS)
646
+ is_stdin = sql_file.name == "<stdin>"
647
+
648
+ try:
649
+ # Check if stdin is being used without input
650
+ if is_stdin and sys.stdin.isatty():
651
+ err_console.print(
652
+ "[red]Error:[/red] No SQL file provided and stdin is interactive. "
653
+ "Provide a SQL file path or pipe SQL via stdin."
654
+ )
655
+ raise typer.Exit(1)
656
+
657
+ # Read SQL from file or stdin
658
+ sql = sql_file.read()
659
+
660
+ # Determine source path for templating (None if stdin)
661
+ source_path = None if is_stdin else Path(sql_file.name)
662
+
663
+ # Apply templating if specified
664
+ sql = _apply_templating(
665
+ sql,
666
+ templater_name=templater,
667
+ cli_vars=var,
668
+ vars_file=vars_file,
669
+ config=config,
670
+ source_path=source_path,
671
+ )
672
+
673
+ # Create analyzer and extract tables
674
+ analyzer = LineageAnalyzer(sql, dialect=dialect)
675
+ table_results = analyzer.analyze_tables()
676
+
677
+ # Collect unique table names, excluding CTEs
678
+ table_names: set[str] = set()
679
+ for result in table_results:
680
+ for table_info in result.tables:
681
+ if table_info.object_type != ObjectType.CTE:
682
+ table_names.add(table_info.name)
683
+
684
+ if not table_names:
685
+ console.print("[yellow]No tables found in SQL (CTEs excluded).[/yellow]")
686
+ raise typer.Exit(0)
687
+
688
+ # Get catalog instance and configure it
689
+ catalog = get_catalog(catalog_type)
690
+
691
+ # Build catalog config from config file
692
+ catalog_config: dict[str, str] = {}
693
+ if (
694
+ config.catalog
695
+ and catalog_type == "databricks"
696
+ and config.catalog.databricks
697
+ ):
698
+ db_config = config.catalog.databricks
699
+ if db_config.warehouse_id:
700
+ catalog_config["warehouse_id"] = db_config.warehouse_id
701
+ if db_config.profile:
702
+ catalog_config["profile"] = db_config.profile
703
+ if db_config.host:
704
+ catalog_config["host"] = db_config.host
705
+ if db_config.token:
706
+ catalog_config["token"] = db_config.token
707
+
708
+ catalog.configure(catalog_config)
709
+
710
+ # Fetch DDL for all tables
711
+ console.print(
712
+ f"[dim]Fetching DDL for {len(table_names)} table(s) from {catalog_type}...[/dim]"
713
+ )
714
+ ddl_results = catalog.get_ddl_batch(list(table_names))
715
+
716
+ # Count successes and failures
717
+ successes = 0
718
+ failures = 0
719
+
720
+ # Output DDL
721
+ if ddl_folder:
722
+ # Create output folder if it doesn't exist
723
+ ddl_folder.mkdir(parents=True, exist_ok=True)
724
+
725
+ for table_name, ddl in ddl_results.items():
726
+ if ddl.startswith("ERROR:"):
727
+ err_console.print(f"[yellow]Warning:[/yellow] {table_name}: {ddl}")
728
+ failures += 1
729
+ else:
730
+ # Write DDL to file named by table identifier
731
+ file_name = f"{table_name}.sql"
732
+ file_path = ddl_folder / file_name
733
+ file_path.write_text(ddl, encoding="utf-8")
734
+ successes += 1
735
+
736
+ console.print(
737
+ f"[green]Success:[/green] Wrote {successes} DDL file(s) to {ddl_folder}"
738
+ )
739
+ if failures > 0:
740
+ console.print(
741
+ f"[yellow]Warning:[/yellow] {failures} table(s) failed to fetch"
742
+ )
743
+ else:
744
+ # Output to stdout
745
+ for table_name, ddl in ddl_results.items():
746
+ if ddl.startswith("ERROR:"):
747
+ err_console.print(f"[yellow]Warning:[/yellow] {table_name}: {ddl}")
748
+ failures += 1
749
+ else:
750
+ print(f"-- Table: {table_name}")
751
+ print(ddl)
752
+ print()
753
+ successes += 1
754
+
755
+ if failures > 0:
756
+ err_console.print(
757
+ f"\n[yellow]Warning:[/yellow] {failures} table(s) failed to fetch"
758
+ )
759
+
760
+ except FileNotFoundError as e:
761
+ err_console.print(f"[red]Error:[/red] {e}")
762
+ raise typer.Exit(1)
763
+
764
+ except ParseError as e:
765
+ err_console.print(f"[red]Error:[/red] Failed to parse SQL: {e}")
766
+ raise typer.Exit(1)
767
+
768
+ except TemplaterError as e:
769
+ err_console.print(f"[red]Error:[/red] {e}")
770
+ raise typer.Exit(1)
771
+
772
+ except CatalogError as e:
773
+ err_console.print(f"[red]Error:[/red] {e}")
774
+ raise typer.Exit(1)
775
+
776
+ except ValueError as e:
777
+ err_console.print(f"[red]Error:[/red] {e}")
778
+ raise typer.Exit(1)
779
+
780
+ except Exception as e:
781
+ err_console.print(f"[red]Error:[/red] Unexpected error: {e}")
782
+ raise typer.Exit(1)
783
+
784
+
520
785
  @app.command()
521
786
  def template(
522
787
  sql_file: Annotated[
@@ -821,21 +1086,21 @@ def graph_build(
821
1086
  if manifest:
822
1087
  builder.add_manifest(manifest, dialect=dialect)
823
1088
 
824
- # Process paths
1089
+ # Process paths - collect all files first for progress tracking
825
1090
  if paths:
1091
+ all_files: list[Path] = []
826
1092
  for path in paths:
827
1093
  if path.is_dir():
828
- builder.add_directory(
829
- path,
830
- recursive=recursive,
831
- glob_pattern=glob_pattern,
832
- dialect=dialect,
1094
+ pattern = f"**/{glob_pattern}" if recursive else glob_pattern
1095
+ all_files.extend(
1096
+ f for f in sorted(path.glob(pattern)) if f.is_file()
833
1097
  )
834
1098
  elif path.is_file():
835
- builder.add_file(path, dialect=dialect)
1099
+ all_files.append(path)
836
1100
  else:
837
1101
  err_console.print(f"[red]Error:[/red] Path not found: {path}")
838
1102
  raise typer.Exit(1)
1103
+ builder.add_files(all_files, dialect=dialect)
839
1104
 
840
1105
  # Build and save graph
841
1106
  graph = builder.build()
@@ -1133,5 +1398,192 @@ def _format_query_result_csv(result) -> None:
1133
1398
  )
1134
1399
 
1135
1400
 
1401
+ @app.command()
1402
+ def dissect(
1403
+ sql_file: Annotated[
1404
+ typer.FileText,
1405
+ typer.Argument(
1406
+ default_factory=lambda: sys.stdin,
1407
+ show_default="stdin",
1408
+ help="Path to SQL file to dissect (reads from stdin if not provided)",
1409
+ ),
1410
+ ],
1411
+ dialect: Optional[str] = typer.Option(
1412
+ None,
1413
+ "--dialect",
1414
+ "-d",
1415
+ help="SQL dialect (default: spark, or from config)",
1416
+ ),
1417
+ output_format: Optional[str] = typer.Option(
1418
+ None,
1419
+ "--output-format",
1420
+ "-f",
1421
+ help="Output format: 'text', 'json', or 'csv' (default: text, or from config)",
1422
+ ),
1423
+ output_file: Optional[Path] = typer.Option(
1424
+ None,
1425
+ "--output-file",
1426
+ "-o",
1427
+ help="Write output to file instead of stdout",
1428
+ ),
1429
+ templater: Optional[str] = typer.Option(
1430
+ None,
1431
+ "--templater",
1432
+ "-t",
1433
+ help="Templater for SQL preprocessing (e.g., 'jinja', 'none')",
1434
+ ),
1435
+ var: Optional[List[str]] = typer.Option(
1436
+ None,
1437
+ "--var",
1438
+ "-v",
1439
+ help="Template variable in key=value format (repeatable)",
1440
+ ),
1441
+ vars_file: Optional[Path] = typer.Option(
1442
+ None,
1443
+ "--vars-file",
1444
+ exists=True,
1445
+ help="Path to variables file (JSON or YAML)",
1446
+ ),
1447
+ ) -> None:
1448
+ """
1449
+ Dissect SQL queries into constituent components.
1450
+
1451
+ Extracts CTEs, subqueries, main query, DML targets, source SELECTs,
1452
+ UNION branches, and scalar subqueries for analysis and unit testing.
1453
+
1454
+ Configuration can be set in sqlglider.toml in the current directory.
1455
+ CLI arguments override configuration file values.
1456
+
1457
+ Examples:
1458
+
1459
+ # Dissect a SQL file
1460
+ sqlglider dissect query.sql
1461
+
1462
+ # Export to JSON format
1463
+ sqlglider dissect query.sql --output-format json
1464
+
1465
+ # Export to CSV file
1466
+ sqlglider dissect query.sql --output-format csv --output-file dissected.csv
1467
+
1468
+ # Use different SQL dialect
1469
+ sqlglider dissect query.sql --dialect postgres
1470
+
1471
+ # Dissect templated SQL with Jinja2
1472
+ sqlglider dissect query.sql --templater jinja --var schema=analytics
1473
+ """
1474
+ from sqlglider.dissection.analyzer import DissectionAnalyzer
1475
+ from sqlglider.dissection.formatters import (
1476
+ DissectionCsvFormatter,
1477
+ DissectionJsonFormatter,
1478
+ DissectionTextFormatter,
1479
+ )
1480
+ from sqlglider.dissection.formatters import (
1481
+ OutputWriter as DissectionOutputWriter,
1482
+ )
1483
+
1484
+ # Load configuration from sqlglider.toml (if it exists)
1485
+ config = load_config()
1486
+
1487
+ # Apply priority resolution: CLI args > config > defaults
1488
+ dialect = dialect or config.dialect or "spark"
1489
+ output_format = output_format or config.output_format or "text"
1490
+ templater = templater or config.templater # None means no templating
1491
+
1492
+ # Validate output format
1493
+ if output_format not in ["text", "json", "csv"]:
1494
+ err_console.print(
1495
+ f"[red]Error:[/red] Invalid output format '{output_format}'. "
1496
+ "Use 'text', 'json', or 'csv'."
1497
+ )
1498
+ raise typer.Exit(1)
1499
+
1500
+ # Check if reading from stdin
1501
+ is_stdin = sql_file.name == "<stdin>"
1502
+
1503
+ try:
1504
+ # Check if stdin is being used without input
1505
+ if is_stdin and sys.stdin.isatty():
1506
+ err_console.print(
1507
+ "[red]Error:[/red] No SQL file provided and stdin is interactive. "
1508
+ "Provide a SQL file path or pipe SQL via stdin."
1509
+ )
1510
+ raise typer.Exit(1)
1511
+
1512
+ # Read SQL from file or stdin
1513
+ sql = sql_file.read()
1514
+
1515
+ # Determine source path for templating (None if stdin)
1516
+ source_path = None if is_stdin else Path(sql_file.name)
1517
+
1518
+ # Apply templating if specified
1519
+ sql = _apply_templating(
1520
+ sql,
1521
+ templater_name=templater,
1522
+ cli_vars=var,
1523
+ vars_file=vars_file,
1524
+ config=config,
1525
+ source_path=source_path,
1526
+ )
1527
+
1528
+ # Create analyzer
1529
+ analyzer = DissectionAnalyzer(sql, dialect=dialect)
1530
+
1531
+ # Dissect queries
1532
+ results = analyzer.dissect_queries()
1533
+
1534
+ # Format and output based on output format
1535
+ if output_format == "text":
1536
+ if output_file:
1537
+ # For file output, use a string-based console to capture output
1538
+ from io import StringIO
1539
+
1540
+ from rich.console import Console as FileConsole
1541
+
1542
+ string_buffer = StringIO()
1543
+ file_console = FileConsole(file=string_buffer, force_terminal=False)
1544
+ DissectionTextFormatter.format(results, file_console)
1545
+ output_file.write_text(string_buffer.getvalue(), encoding="utf-8")
1546
+ console.print(
1547
+ f"[green]Success:[/green] Dissection written to {output_file}"
1548
+ )
1549
+ else:
1550
+ # Direct console output with Rich formatting
1551
+ DissectionTextFormatter.format(results, console)
1552
+ elif output_format == "json":
1553
+ formatted = DissectionJsonFormatter.format(results)
1554
+ DissectionOutputWriter.write(formatted, output_file)
1555
+ if output_file:
1556
+ console.print(
1557
+ f"[green]Success:[/green] Dissection written to {output_file}"
1558
+ )
1559
+ else: # csv
1560
+ formatted = DissectionCsvFormatter.format(results)
1561
+ DissectionOutputWriter.write(formatted, output_file)
1562
+ if output_file:
1563
+ console.print(
1564
+ f"[green]Success:[/green] Dissection written to {output_file}"
1565
+ )
1566
+
1567
+ except FileNotFoundError as e:
1568
+ err_console.print(f"[red]Error:[/red] {e}")
1569
+ raise typer.Exit(1)
1570
+
1571
+ except ParseError as e:
1572
+ err_console.print(f"[red]Error:[/red] Failed to parse SQL: {e}")
1573
+ raise typer.Exit(1)
1574
+
1575
+ except TemplaterError as e:
1576
+ err_console.print(f"[red]Error:[/red] {e}")
1577
+ raise typer.Exit(1)
1578
+
1579
+ except ValueError as e:
1580
+ err_console.print(f"[red]Error:[/red] {e}")
1581
+ raise typer.Exit(1)
1582
+
1583
+ except Exception as e:
1584
+ err_console.print(f"[red]Error:[/red] Unexpected error: {e}")
1585
+ raise typer.Exit(1)
1586
+
1587
+
1136
1588
  if __name__ == "__main__":
1137
1589
  app()
@@ -0,0 +1,17 @@
1
+ """SQL query dissection module for decomposing queries into components."""
2
+
3
+ from sqlglider.dissection.analyzer import DissectionAnalyzer
4
+ from sqlglider.dissection.models import (
5
+ ComponentType,
6
+ QueryDissectionResult,
7
+ QueryMetadata,
8
+ SQLComponent,
9
+ )
10
+
11
+ __all__ = [
12
+ "ComponentType",
13
+ "DissectionAnalyzer",
14
+ "QueryDissectionResult",
15
+ "QueryMetadata",
16
+ "SQLComponent",
17
+ ]