sql-glider 0.1.11__tar.gz → 0.1.13__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sql_glider-0.1.11 → sql_glider-0.1.13}/PKG-INFO +1 -1
- sql_glider-0.1.13/plans/2026-01-29-no-star-flag.md +47 -0
- sql_glider-0.1.13/plans/2026-01-29-resolve-schema.md +49 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/_version.py +2 -2
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/cli.py +117 -10
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/builder.py +211 -20
- sql_glider-0.1.13/src/sqlglider/graph/formatters.py +98 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/lineage/analyzer.py +171 -5
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/utils/config.py +4 -0
- sql_glider-0.1.13/src/sqlglider/utils/schema.py +62 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/test_builder.py +253 -0
- sql_glider-0.1.13/tests/sqlglider/graph/test_formatters.py +86 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/lineage/test_analyzer.py +182 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/test_cli.py +172 -0
- sql_glider-0.1.13/tests/sqlglider/utils/test_schema.py +55 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/.github/workflows/ci.yml +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/.github/workflows/publish.yml +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/.gitignore +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/.python-version +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/ARCHITECTURE.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/CLAUDE.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/LICENSE +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/README.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-05-column-level-lineage.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-05-reverse-lineage.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-06-config-file-support.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-06-graph-lineage.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-06-unify-single-multi-query.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-07-sample-data-model.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-07-sql-templating.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-08-tables-command.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-09-graph-query-paths.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-13-dissect-command.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2025-12-14-tables-pull-command.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2026-01-25-fix-union-lineage-chain.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2026-01-26-file-scoped-schema-context.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/plans/2026-01-28-sparksql-table-extraction.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/pyproject.toml +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/README.md +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/expire_dim_customer.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/load_fact_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/load_fact_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/merge_dim_customer.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/merge_dim_product.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/business/update_dim_customer_metrics.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/complex/conditional_merge.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/complex/cte_insert.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/complex/multi_table_transform.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/dim_customer.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/dim_product.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/fact_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/fact_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_addresses.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_customers.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_order_items.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/raw_products.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/stg_customers.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/stg_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/stg_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/ddl/stg_products.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/incremental/incr_fact_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/incremental/incr_fact_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/incremental/incr_pres_sales_summary.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/maintenance/delete_expired_customers.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/maintenance/update_product_status.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/presentation/load_pres_customer_360.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/presentation/load_pres_customer_cohort.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/presentation/load_pres_product_performance.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/presentation/load_pres_sales_summary.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/staging/load_stg_customers.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/staging/load_stg_orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/staging/load_stg_payments.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sample_data_model/staging/load_stg_products.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/sqlglider.toml.example +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/catalog/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/catalog/base.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/catalog/databricks.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/catalog/registry.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/dissection/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/dissection/analyzer.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/dissection/formatters.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/dissection/models.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/global_models.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/merge.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/models.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/query.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/graph/serialization.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/lineage/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/lineage/formatters.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/templating/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/templating/base.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/templating/jinja.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/templating/registry.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/templating/variables.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/utils/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/src/sqlglider/utils/file_utils.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/analytics_pipeline.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/analytics_pipeline_union_merge.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/customers.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/orders.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/reports.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/multi_file_queries/view_based_merge.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_cte.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_cte_query.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_cte_view_star.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_generated_column_query.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_multi.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_multi_query.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_single_query.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_subquery.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_tables.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_view.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/original_queries/test_view_window_cte.sql +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/fixtures/sample_manifest.csv +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/catalog/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/catalog/test_base.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/catalog/test_databricks.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/catalog/test_registry.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/dissection/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/dissection/test_analyzer.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/dissection/test_formatters.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/dissection/test_models.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/test_merge.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/test_models.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/test_query.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/graph/test_serialization.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/lineage/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/lineage/test_formatters.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/templating/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/templating/test_base.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/templating/test_jinja.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/templating/test_registry.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/templating/test_variables.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/utils/__init__.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/utils/test_config.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/tests/sqlglider/utils/test_file_utils.py +0 -0
- {sql_glider-0.1.11 → sql_glider-0.1.13}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sql-glider
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.13
|
|
4
4
|
Summary: SQL Utility Toolkit for better understanding, use, and governance of your queries in a native environment.
|
|
5
5
|
Project-URL: Homepage, https://github.com/rycowhi/sql-glider/
|
|
6
6
|
Project-URL: Repository, https://github.com/rycowhi/sql-glider/
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# Add `--no-star` Flag
|
|
2
|
+
|
|
3
|
+
**Status:** Completed
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
Add `--no-star` flag to `lineage` and `graph build` commands. When set, analysis fails if `SELECT *` or `t.*` cannot be resolved to actual columns.
|
|
7
|
+
|
|
8
|
+
## Changes
|
|
9
|
+
|
|
10
|
+
### 1. `src/sqlglider/utils/config.py` — Add to ConfigSettings
|
|
11
|
+
- [x] Add `no_star: Optional[bool] = None`
|
|
12
|
+
|
|
13
|
+
### 2. `src/sqlglider/lineage/analyzer.py` — Add parameter + enforce
|
|
14
|
+
- [x] Add `no_star: bool = False` to `__init__`, store as `self._no_star`
|
|
15
|
+
- [x] Add `StarResolutionError` exception class (distinct from `ValueError` to avoid being swallowed by skipped-query handler)
|
|
16
|
+
- [x] DML/DDL path: raise `StarResolutionError` before fallback for bare `*` and `t.*`
|
|
17
|
+
- [x] DQL path: add star handling for both bare `*` and `t.*` with same error behavior
|
|
18
|
+
- [x] Re-raise `StarResolutionError` in `analyze_queries` instead of treating as skipped query
|
|
19
|
+
|
|
20
|
+
### 3. `src/sqlglider/graph/builder.py` — Pass through
|
|
21
|
+
- [x] Add `no_star: bool = False` to `__init__`, store as `self.no_star`
|
|
22
|
+
- [x] Pass to `LineageAnalyzer(sql_content, dialect=file_dialect, no_star=self.no_star)`
|
|
23
|
+
|
|
24
|
+
### 4. `src/sqlglider/cli.py` — Add CLI options
|
|
25
|
+
- [x] `lineage` command: Add `no_star: bool = typer.Option(False, "--no-star", ...)`
|
|
26
|
+
- [x] Resolve: `no_star = no_star or config.no_star or False`
|
|
27
|
+
- [x] Pass to `LineageAnalyzer(sql, dialect=dialect, no_star=no_star)`
|
|
28
|
+
- [x] `graph_build` command: same option, passed to `GraphBuilder(..., no_star=no_star)`
|
|
29
|
+
|
|
30
|
+
### 5. `tests/sqlglider/lineage/test_analyzer.py` — Tests
|
|
31
|
+
- [x] Test bare `SELECT *` with `no_star=True` raises `StarResolutionError`
|
|
32
|
+
- [x] Test `SELECT t.*` with `no_star=True` raises `StarResolutionError`
|
|
33
|
+
- [x] Test resolvable star (via CTE) still works with `no_star=True`
|
|
34
|
+
- [x] Test resolvable qualified star (via CTE) still works with `no_star=True`
|
|
35
|
+
- [x] Test default (`no_star=False`) still falls back to `table.*`
|
|
36
|
+
|
|
37
|
+
## Implementation Notes
|
|
38
|
+
|
|
39
|
+
### Deviations from original plan
|
|
40
|
+
- Used `StarResolutionError` instead of `ValueError` because `analyze_queries` catches `ValueError` to handle unsupported statement types (skipped queries). A plain `ValueError` would be silently swallowed.
|
|
41
|
+
- Added star handling in the DQL (plain SELECT) code path in addition to the DML/DDL path. The original plan only addressed the DML/DDL path, but plain `SELECT *` queries go through a different branch in `get_output_columns`.
|
|
42
|
+
- Resolvable star tests use CTEs instead of `CREATE TABLE` with explicit columns, since `_extract_schema_from_statement` only handles `CREATE ... AS SELECT`, not DDL with column definitions.
|
|
43
|
+
|
|
44
|
+
## Verification
|
|
45
|
+
- `uv run pytest` — 597 passed, 1 skipped, coverage 80.48%
|
|
46
|
+
- `uv run basedpyright src/` — 0 errors
|
|
47
|
+
- `uv run ruff check` — all checks passed
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# `--resolve-schema` Flag and Catalog Integration
|
|
2
|
+
|
|
3
|
+
**Status:** Completed
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
Add `--resolve-schema` flag to `graph build` that runs a two-pass process: first extracting schema from all files, then running lineage analysis with the full schema available. Optionally, `--catalog-type` fills schema gaps by pulling DDL from a remote catalog.
|
|
8
|
+
|
|
9
|
+
## Design Decisions
|
|
10
|
+
|
|
11
|
+
- **Types are not required** — SQLGlot only needs column names for star expansion; types are stored as `"UNKNOWN"`
|
|
12
|
+
- **Two-pass approach** — Pass 1 extracts schema from all files (order-independent), Pass 2 runs lineage with full schema
|
|
13
|
+
- **Opt-in via `--resolve-schema`** — default behavior unchanged
|
|
14
|
+
- **Catalog fills gaps only** — file-derived schema always wins over catalog-sourced schema
|
|
15
|
+
- **`--catalog-type` requires `--resolve-schema`** — validated at CLI level
|
|
16
|
+
|
|
17
|
+
## Implementation
|
|
18
|
+
|
|
19
|
+
- [x] Add `schema` param to `LineageAnalyzer.__init__()` — pre-populates `_file_schema`
|
|
20
|
+
- [x] Add `extract_schema_only()` and `get_extracted_schema()` methods to `LineageAnalyzer`
|
|
21
|
+
- [x] Create `src/sqlglider/utils/schema.py` with `parse_ddl_to_schema()` for DDL column extraction
|
|
22
|
+
- [x] Add `resolve_schema`, `catalog_type`, `catalog_config` to `GraphBuilder`
|
|
23
|
+
- [x] Implement `_extract_schemas()` for pass 1 and `_fill_schema_from_catalog()` for catalog gap-filling
|
|
24
|
+
- [x] Two-pass flow in `add_files()` and `add_manifest()`
|
|
25
|
+
- [x] Add `--resolve-schema` and `--catalog-type` CLI flags to `graph build`
|
|
26
|
+
- [x] Add `resolve_schema` to `ConfigSettings`
|
|
27
|
+
- [x] Tests: 25 new tests (schema parsing, analyzer schema param, cross-file resolution, catalog mocking)
|
|
28
|
+
|
|
29
|
+
## Files Modified
|
|
30
|
+
|
|
31
|
+
- `src/sqlglider/lineage/analyzer.py` — schema param, extraction methods
|
|
32
|
+
- `src/sqlglider/graph/builder.py` — two-pass processing, catalog integration
|
|
33
|
+
- `src/sqlglider/cli.py` — CLI flags
|
|
34
|
+
- `src/sqlglider/utils/config.py` — config setting
|
|
35
|
+
- `src/sqlglider/utils/schema.py` — **new** DDL parsing utility
|
|
36
|
+
- `tests/sqlglider/utils/test_schema.py` — **new**
|
|
37
|
+
- `tests/sqlglider/graph/test_builder.py` — resolve schema + catalog tests
|
|
38
|
+
- `tests/sqlglider/lineage/test_analyzer.py` — schema param tests
|
|
39
|
+
|
|
40
|
+
## Verification
|
|
41
|
+
|
|
42
|
+
- 617 passed, 1 skipped
|
|
43
|
+
- Coverage: 80.43%
|
|
44
|
+
- basedpyright: 0 errors
|
|
45
|
+
- ruff: all checks passed
|
|
46
|
+
|
|
47
|
+
## Known Limitations
|
|
48
|
+
|
|
49
|
+
- Cross-file CTAS chains with `SELECT *` (view B depends on view A via star) may not resolve if both are in separate files and the schema extraction pass processes B before A. This is rare in practice.
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.1.
|
|
32
|
-
__version_tuple__ = version_tuple = (0, 1,
|
|
31
|
+
__version__ = version = '0.1.13'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 1, 13)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -12,7 +12,7 @@ from sqlglot.errors import ParseError
|
|
|
12
12
|
from typing_extensions import Annotated
|
|
13
13
|
|
|
14
14
|
from sqlglider.global_models import AnalysisLevel, NodeFormat
|
|
15
|
-
from sqlglider.lineage.analyzer import LineageAnalyzer
|
|
15
|
+
from sqlglider.lineage.analyzer import LineageAnalyzer, SchemaResolutionError
|
|
16
16
|
from sqlglider.lineage.formatters import (
|
|
17
17
|
CsvFormatter,
|
|
18
18
|
JsonFormatter,
|
|
@@ -1001,6 +1001,35 @@ def graph_build(
|
|
|
1001
1001
|
"--no-star",
|
|
1002
1002
|
help="Fail if SELECT * cannot be resolved to actual columns",
|
|
1003
1003
|
),
|
|
1004
|
+
resolve_schema: bool = typer.Option(
|
|
1005
|
+
False,
|
|
1006
|
+
"--resolve-schema",
|
|
1007
|
+
help="Extract schema from all files before lineage analysis, "
|
|
1008
|
+
"enabling cross-file star resolution",
|
|
1009
|
+
),
|
|
1010
|
+
catalog_type: Optional[str] = typer.Option(
|
|
1011
|
+
None,
|
|
1012
|
+
"--catalog-type",
|
|
1013
|
+
"-c",
|
|
1014
|
+
help="Catalog provider for pulling DDL of tables not found in files "
|
|
1015
|
+
"(requires --resolve-schema). E.g. 'databricks'",
|
|
1016
|
+
),
|
|
1017
|
+
dump_schema: Optional[Path] = typer.Option(
|
|
1018
|
+
None,
|
|
1019
|
+
"--dump-schema",
|
|
1020
|
+
help="Dump resolved schema to file (requires --resolve-schema)",
|
|
1021
|
+
),
|
|
1022
|
+
dump_schema_format: Optional[str] = typer.Option(
|
|
1023
|
+
None,
|
|
1024
|
+
"--dump-schema-format",
|
|
1025
|
+
help="Format for dumped schema: 'text' (default), 'json', or 'csv'",
|
|
1026
|
+
),
|
|
1027
|
+
strict_schema: bool = typer.Option(
|
|
1028
|
+
False,
|
|
1029
|
+
"--strict-schema",
|
|
1030
|
+
help="Fail if any column's table cannot be identified during schema extraction "
|
|
1031
|
+
"(requires --resolve-schema)",
|
|
1032
|
+
),
|
|
1004
1033
|
) -> None:
|
|
1005
1034
|
"""
|
|
1006
1035
|
Build a lineage graph from SQL files.
|
|
@@ -1036,6 +1065,37 @@ def graph_build(
|
|
|
1036
1065
|
dialect = dialect or config.dialect or "spark"
|
|
1037
1066
|
templater = templater or config.templater # None means no templating
|
|
1038
1067
|
no_star = no_star or config.no_star or False
|
|
1068
|
+
resolve_schema = resolve_schema or config.resolve_schema or False
|
|
1069
|
+
strict_schema = strict_schema or config.strict_schema or False
|
|
1070
|
+
|
|
1071
|
+
if strict_schema and not resolve_schema:
|
|
1072
|
+
err_console.print("[red]Error:[/red] --strict-schema requires --resolve-schema")
|
|
1073
|
+
raise typer.Exit(1)
|
|
1074
|
+
|
|
1075
|
+
if catalog_type and not resolve_schema:
|
|
1076
|
+
err_console.print("[red]Error:[/red] --catalog-type requires --resolve-schema")
|
|
1077
|
+
raise typer.Exit(1)
|
|
1078
|
+
|
|
1079
|
+
# Resolve dump_schema options from config
|
|
1080
|
+
dump_schema = dump_schema or (
|
|
1081
|
+
Path(config.dump_schema) if config.dump_schema else None
|
|
1082
|
+
)
|
|
1083
|
+
dump_schema_format = dump_schema_format or config.dump_schema_format or "text"
|
|
1084
|
+
|
|
1085
|
+
if dump_schema and not resolve_schema:
|
|
1086
|
+
err_console.print("[red]Error:[/red] --dump-schema requires --resolve-schema")
|
|
1087
|
+
raise typer.Exit(1)
|
|
1088
|
+
|
|
1089
|
+
if dump_schema_format not in ("text", "json", "csv"):
|
|
1090
|
+
err_console.print(
|
|
1091
|
+
f"[red]Error:[/red] Invalid --dump-schema-format '{dump_schema_format}'. "
|
|
1092
|
+
"Use 'text', 'json', or 'csv'."
|
|
1093
|
+
)
|
|
1094
|
+
raise typer.Exit(1)
|
|
1095
|
+
|
|
1096
|
+
# Only inherit catalog_type from config when resolve_schema is active
|
|
1097
|
+
if resolve_schema and not catalog_type:
|
|
1098
|
+
catalog_type = config.catalog_type
|
|
1039
1099
|
|
|
1040
1100
|
# Validate and convert node format to enum
|
|
1041
1101
|
try:
|
|
@@ -1088,32 +1148,75 @@ def graph_build(
|
|
|
1088
1148
|
sql_preprocessor = _preprocess
|
|
1089
1149
|
|
|
1090
1150
|
try:
|
|
1151
|
+
# Build catalog config from config file if available
|
|
1152
|
+
catalog_config_dict = None
|
|
1153
|
+
if catalog_type and config.catalog:
|
|
1154
|
+
provider_config = getattr(config.catalog, catalog_type, None)
|
|
1155
|
+
if provider_config:
|
|
1156
|
+
catalog_config_dict = provider_config.model_dump(exclude_none=True)
|
|
1157
|
+
|
|
1091
1158
|
builder = GraphBuilder(
|
|
1092
1159
|
node_format=node_format_enum,
|
|
1093
1160
|
dialect=dialect,
|
|
1094
1161
|
sql_preprocessor=sql_preprocessor,
|
|
1095
1162
|
no_star=no_star,
|
|
1163
|
+
resolve_schema=resolve_schema,
|
|
1164
|
+
catalog_type=catalog_type,
|
|
1165
|
+
catalog_config=catalog_config_dict,
|
|
1166
|
+
strict_schema=strict_schema,
|
|
1096
1167
|
)
|
|
1097
1168
|
|
|
1098
|
-
#
|
|
1099
|
-
|
|
1100
|
-
builder.add_manifest(manifest, dialect=dialect)
|
|
1101
|
-
|
|
1102
|
-
# Process paths - collect all files first for progress tracking
|
|
1169
|
+
# Collect file paths for schema extraction
|
|
1170
|
+
path_files: list[Path] = []
|
|
1103
1171
|
if paths:
|
|
1104
|
-
all_files: list[Path] = []
|
|
1105
1172
|
for path in paths:
|
|
1106
1173
|
if path.is_dir():
|
|
1107
1174
|
pattern = f"**/{glob_pattern}" if recursive else glob_pattern
|
|
1108
|
-
|
|
1175
|
+
path_files.extend(
|
|
1109
1176
|
f for f in sorted(path.glob(pattern)) if f.is_file()
|
|
1110
1177
|
)
|
|
1111
1178
|
elif path.is_file():
|
|
1112
|
-
|
|
1179
|
+
path_files.append(path)
|
|
1113
1180
|
else:
|
|
1114
1181
|
err_console.print(f"[red]Error:[/red] Path not found: {path}")
|
|
1115
1182
|
raise typer.Exit(1)
|
|
1116
|
-
|
|
1183
|
+
|
|
1184
|
+
manifest_files: list[Path] = []
|
|
1185
|
+
if manifest:
|
|
1186
|
+
from sqlglider.graph.models import Manifest
|
|
1187
|
+
|
|
1188
|
+
manifest_data = Manifest.from_csv(manifest)
|
|
1189
|
+
base_dir = manifest.parent
|
|
1190
|
+
for entry in manifest_data.entries:
|
|
1191
|
+
file_path = Path(entry.file_path)
|
|
1192
|
+
if not file_path.is_absolute():
|
|
1193
|
+
file_path = (base_dir / entry.file_path).resolve()
|
|
1194
|
+
manifest_files.append(file_path)
|
|
1195
|
+
|
|
1196
|
+
# Extract schema upfront if requested, then dump before graph building
|
|
1197
|
+
all_files = manifest_files + path_files
|
|
1198
|
+
if resolve_schema and all_files:
|
|
1199
|
+
builder.extract_schemas(all_files, dialect=dialect)
|
|
1200
|
+
|
|
1201
|
+
if dump_schema:
|
|
1202
|
+
from sqlglider.graph.formatters import format_schema
|
|
1203
|
+
|
|
1204
|
+
schema_content = format_schema(
|
|
1205
|
+
builder.resolved_schema, dump_schema_format
|
|
1206
|
+
)
|
|
1207
|
+
dump_schema.write_text(schema_content, encoding="utf-8")
|
|
1208
|
+
console.print(
|
|
1209
|
+
f"[green]Schema dumped to {dump_schema} "
|
|
1210
|
+
f"({len(builder.resolved_schema)} table(s))[/green]"
|
|
1211
|
+
)
|
|
1212
|
+
|
|
1213
|
+
# Process manifest if provided
|
|
1214
|
+
if manifest:
|
|
1215
|
+
builder.add_manifest(manifest, dialect=dialect)
|
|
1216
|
+
|
|
1217
|
+
# Process path-based files
|
|
1218
|
+
if path_files:
|
|
1219
|
+
builder.add_files(path_files, dialect=dialect)
|
|
1117
1220
|
|
|
1118
1221
|
# Build and save graph
|
|
1119
1222
|
graph = builder.build()
|
|
@@ -1124,6 +1227,10 @@ def graph_build(
|
|
|
1124
1227
|
f"({graph.metadata.total_nodes} nodes, {graph.metadata.total_edges} edges)"
|
|
1125
1228
|
)
|
|
1126
1229
|
|
|
1230
|
+
except SchemaResolutionError as e:
|
|
1231
|
+
err_console.print(f"[red]Error:[/red] {e}")
|
|
1232
|
+
raise typer.Exit(1)
|
|
1233
|
+
|
|
1127
1234
|
except FileNotFoundError as e:
|
|
1128
1235
|
err_console.print(f"[red]Error:[/red] {e}")
|
|
1129
1236
|
raise typer.Exit(1)
|
|
@@ -16,8 +16,9 @@ from sqlglider.graph.models import (
|
|
|
16
16
|
LineageGraph,
|
|
17
17
|
Manifest,
|
|
18
18
|
)
|
|
19
|
-
from sqlglider.lineage.analyzer import LineageAnalyzer
|
|
19
|
+
from sqlglider.lineage.analyzer import LineageAnalyzer, SchemaResolutionError
|
|
20
20
|
from sqlglider.utils.file_utils import read_sql_file
|
|
21
|
+
from sqlglider.utils.schema import parse_ddl_to_schema
|
|
21
22
|
|
|
22
23
|
console = Console(stderr=True)
|
|
23
24
|
|
|
@@ -34,6 +35,10 @@ class GraphBuilder:
|
|
|
34
35
|
dialect: str = "spark",
|
|
35
36
|
sql_preprocessor: Optional[SqlPreprocessor] = None,
|
|
36
37
|
no_star: bool = False,
|
|
38
|
+
resolve_schema: bool = False,
|
|
39
|
+
catalog_type: Optional[str] = None,
|
|
40
|
+
catalog_config: Optional[Dict[str, object]] = None,
|
|
41
|
+
strict_schema: bool = False,
|
|
37
42
|
):
|
|
38
43
|
"""
|
|
39
44
|
Initialize the graph builder.
|
|
@@ -45,16 +50,31 @@ class GraphBuilder:
|
|
|
45
50
|
Takes (sql: str, file_path: Path) and returns processed SQL.
|
|
46
51
|
Useful for templating (e.g., Jinja2 rendering).
|
|
47
52
|
no_star: If True, fail when SELECT * cannot be resolved to columns
|
|
53
|
+
resolve_schema: If True, run a schema extraction pass across all
|
|
54
|
+
files before lineage analysis so that schema from any file is
|
|
55
|
+
available when analyzing every other file.
|
|
56
|
+
catalog_type: Optional catalog provider name (e.g. "databricks").
|
|
57
|
+
When set together with resolve_schema, DDL is pulled from the
|
|
58
|
+
catalog for tables whose schema could not be inferred from files.
|
|
59
|
+
catalog_config: Optional provider-specific configuration dict
|
|
60
|
+
passed to the catalog's configure() method.
|
|
61
|
+
strict_schema: If True, fail during schema extraction when an
|
|
62
|
+
unqualified column cannot be attributed to a table.
|
|
48
63
|
"""
|
|
49
64
|
self.node_format = node_format
|
|
50
65
|
self.dialect = dialect
|
|
51
66
|
self.sql_preprocessor = sql_preprocessor
|
|
52
67
|
self.no_star = no_star
|
|
68
|
+
self.resolve_schema = resolve_schema
|
|
69
|
+
self.catalog_type = catalog_type
|
|
70
|
+
self.catalog_config = catalog_config
|
|
71
|
+
self.strict_schema = strict_schema
|
|
53
72
|
self.graph: rx.PyDiGraph = rx.PyDiGraph()
|
|
54
73
|
self._node_index_map: Dict[str, int] = {} # identifier -> rustworkx node index
|
|
55
74
|
self._source_files: Set[str] = set()
|
|
56
75
|
self._edge_set: Set[tuple] = set() # (source, target) for dedup
|
|
57
76
|
self._skipped_files: List[tuple[str, str]] = [] # (file_path, reason)
|
|
77
|
+
self._resolved_schema: Dict[str, Dict[str, str]] = {} # accumulated schema
|
|
58
78
|
|
|
59
79
|
def add_file(
|
|
60
80
|
self,
|
|
@@ -86,7 +106,10 @@ class GraphBuilder:
|
|
|
86
106
|
sql_content = self.sql_preprocessor(sql_content, file_path)
|
|
87
107
|
|
|
88
108
|
analyzer = LineageAnalyzer(
|
|
89
|
-
sql_content,
|
|
109
|
+
sql_content,
|
|
110
|
+
dialect=file_dialect,
|
|
111
|
+
no_star=self.no_star,
|
|
112
|
+
schema=self._resolved_schema if self._resolved_schema else None,
|
|
90
113
|
)
|
|
91
114
|
results = analyzer.analyze_queries(level=AnalysisLevel.COLUMN)
|
|
92
115
|
|
|
@@ -209,23 +232,28 @@ class GraphBuilder:
|
|
|
209
232
|
entry_dialect = entry.dialect or dialect or self.dialect
|
|
210
233
|
files_with_dialects.append((file_path, entry_dialect))
|
|
211
234
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
235
|
+
if not files_with_dialects:
|
|
236
|
+
return self
|
|
237
|
+
|
|
238
|
+
# Two-pass schema resolution (skip if already resolved)
|
|
239
|
+
if self.resolve_schema and not self._resolved_schema:
|
|
240
|
+
file_paths_only = [fp for fp, _ in files_with_dialects]
|
|
241
|
+
self.extract_schemas(file_paths_only, dialect)
|
|
242
|
+
|
|
243
|
+
total = len(files_with_dialects)
|
|
244
|
+
description = "Pass 2: Analyzing lineage" if self.resolve_schema else "Parsing"
|
|
245
|
+
with Progress(
|
|
246
|
+
TextColumn("[progress.description]{task.description}"),
|
|
247
|
+
BarColumn(),
|
|
248
|
+
TaskProgressColumn(),
|
|
249
|
+
console=console,
|
|
250
|
+
transient=False,
|
|
251
|
+
) as progress:
|
|
252
|
+
task = progress.add_task(description, total=total)
|
|
253
|
+
for i, (file_path, file_dialect) in enumerate(files_with_dialects, start=1):
|
|
254
|
+
console.print(f"Parsing file {i}/{total}: {file_path.name}")
|
|
255
|
+
self.add_file(file_path, file_dialect)
|
|
256
|
+
progress.advance(task)
|
|
229
257
|
|
|
230
258
|
return self
|
|
231
259
|
|
|
@@ -249,8 +277,15 @@ class GraphBuilder:
|
|
|
249
277
|
if not file_paths:
|
|
250
278
|
return self
|
|
251
279
|
|
|
280
|
+
# Two-pass schema resolution (skip if already resolved)
|
|
281
|
+
if self.resolve_schema and not self._resolved_schema:
|
|
282
|
+
self.extract_schemas(file_paths, dialect)
|
|
283
|
+
|
|
252
284
|
if show_progress:
|
|
253
285
|
total = len(file_paths)
|
|
286
|
+
description = (
|
|
287
|
+
"Pass 2: Analyzing lineage" if self.resolve_schema else "Parsing"
|
|
288
|
+
)
|
|
254
289
|
with Progress(
|
|
255
290
|
TextColumn("[progress.description]{task.description}"),
|
|
256
291
|
BarColumn(),
|
|
@@ -258,7 +293,7 @@ class GraphBuilder:
|
|
|
258
293
|
console=console,
|
|
259
294
|
transient=False,
|
|
260
295
|
) as progress:
|
|
261
|
-
task = progress.add_task(
|
|
296
|
+
task = progress.add_task(description, total=total)
|
|
262
297
|
for i, file_path in enumerate(file_paths, start=1):
|
|
263
298
|
console.print(f"Parsing file {i}/{total}: {file_path.name}")
|
|
264
299
|
self.add_file(file_path, dialect)
|
|
@@ -268,6 +303,157 @@ class GraphBuilder:
|
|
|
268
303
|
self.add_file(file_path, dialect)
|
|
269
304
|
return self
|
|
270
305
|
|
|
306
|
+
def extract_schemas(
|
|
307
|
+
self,
|
|
308
|
+
file_paths: List[Path],
|
|
309
|
+
dialect: Optional[str] = None,
|
|
310
|
+
) -> Dict[str, Dict[str, str]]:
|
|
311
|
+
"""Run schema extraction pass and optionally fill from catalog.
|
|
312
|
+
|
|
313
|
+
Call this before add_files/add_manifest to resolve schema upfront.
|
|
314
|
+
The resolved schema is stored internally and also returned.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
file_paths: SQL files to extract schema from
|
|
318
|
+
dialect: SQL dialect override
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
Resolved schema dict
|
|
322
|
+
"""
|
|
323
|
+
console.print("[blue]Pass 1: Extracting schema from files[/blue]")
|
|
324
|
+
self._resolved_schema = self._extract_schemas(file_paths, dialect)
|
|
325
|
+
if self.catalog_type:
|
|
326
|
+
self._resolved_schema = self._fill_schema_from_catalog(
|
|
327
|
+
self._resolved_schema, file_paths, dialect
|
|
328
|
+
)
|
|
329
|
+
console.print(
|
|
330
|
+
f"[blue]Schema resolved for {len(self._resolved_schema)} table(s)[/blue]"
|
|
331
|
+
)
|
|
332
|
+
return self._resolved_schema.copy()
|
|
333
|
+
|
|
334
|
+
def _extract_schemas(
|
|
335
|
+
self,
|
|
336
|
+
file_paths: List[Path],
|
|
337
|
+
dialect: Optional[str] = None,
|
|
338
|
+
) -> Dict[str, Dict[str, str]]:
|
|
339
|
+
"""Run schema extraction pass across all files.
|
|
340
|
+
|
|
341
|
+
Parses each file and extracts schema from CREATE TABLE/VIEW
|
|
342
|
+
statements without performing lineage analysis.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
file_paths: SQL files to extract schema from
|
|
346
|
+
dialect: SQL dialect override
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
Accumulated schema dict from all files
|
|
350
|
+
"""
|
|
351
|
+
schema: Dict[str, Dict[str, str]] = {}
|
|
352
|
+
total = len(file_paths)
|
|
353
|
+
with Progress(
|
|
354
|
+
TextColumn("[progress.description]{task.description}"),
|
|
355
|
+
BarColumn(),
|
|
356
|
+
TaskProgressColumn(),
|
|
357
|
+
console=console,
|
|
358
|
+
transient=False,
|
|
359
|
+
) as progress:
|
|
360
|
+
task = progress.add_task("Pass 1: Extracting schema", total=total)
|
|
361
|
+
for i, file_path in enumerate(file_paths, start=1):
|
|
362
|
+
console.print(f"Extracting schema {i}/{total}: {file_path.name}")
|
|
363
|
+
file_dialect = dialect or self.dialect
|
|
364
|
+
try:
|
|
365
|
+
sql_content = read_sql_file(file_path)
|
|
366
|
+
if self.sql_preprocessor:
|
|
367
|
+
sql_content = self.sql_preprocessor(sql_content, file_path)
|
|
368
|
+
analyzer = LineageAnalyzer(
|
|
369
|
+
sql_content,
|
|
370
|
+
dialect=file_dialect,
|
|
371
|
+
schema=schema,
|
|
372
|
+
strict_schema=self.strict_schema,
|
|
373
|
+
)
|
|
374
|
+
file_schema = analyzer.extract_schema_only()
|
|
375
|
+
schema.update(file_schema)
|
|
376
|
+
except SchemaResolutionError:
|
|
377
|
+
raise
|
|
378
|
+
except Exception:
|
|
379
|
+
# Schema extraction failures are non-fatal; the file
|
|
380
|
+
# will be reported during the lineage pass if it also fails.
|
|
381
|
+
pass
|
|
382
|
+
progress.advance(task)
|
|
383
|
+
return schema
|
|
384
|
+
|
|
385
|
+
def _fill_schema_from_catalog(
|
|
386
|
+
self,
|
|
387
|
+
schema: Dict[str, Dict[str, str]],
|
|
388
|
+
file_paths: List[Path],
|
|
389
|
+
dialect: Optional[str] = None,
|
|
390
|
+
) -> Dict[str, Dict[str, str]]:
|
|
391
|
+
"""Pull DDL from catalog for tables not yet in schema.
|
|
392
|
+
|
|
393
|
+
Extracts all table names referenced across the files, identifies
|
|
394
|
+
those missing from the schema, and fetches their DDL from the
|
|
395
|
+
configured catalog provider.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
schema: Schema dict already populated from file extraction
|
|
399
|
+
file_paths: SQL files to scan for table references
|
|
400
|
+
dialect: SQL dialect override
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
Updated schema dict with catalog-sourced entries added
|
|
404
|
+
"""
|
|
405
|
+
from sqlglider.catalog import get_catalog
|
|
406
|
+
|
|
407
|
+
catalog = get_catalog(self.catalog_type) # type: ignore[arg-type]
|
|
408
|
+
if self.catalog_config:
|
|
409
|
+
catalog.configure(self.catalog_config)
|
|
410
|
+
|
|
411
|
+
# Collect all referenced table names across files
|
|
412
|
+
all_tables: Set[str] = set()
|
|
413
|
+
for file_path in file_paths:
|
|
414
|
+
file_dialect = dialect or self.dialect
|
|
415
|
+
try:
|
|
416
|
+
sql_content = read_sql_file(file_path)
|
|
417
|
+
if self.sql_preprocessor:
|
|
418
|
+
sql_content = self.sql_preprocessor(sql_content, file_path)
|
|
419
|
+
analyzer = LineageAnalyzer(sql_content, dialect=file_dialect)
|
|
420
|
+
tables_results = analyzer.analyze_tables()
|
|
421
|
+
for result in tables_results:
|
|
422
|
+
for table_info in result.tables:
|
|
423
|
+
# Skip CTEs — they don't exist in catalogs
|
|
424
|
+
from sqlglider.lineage.analyzer import ObjectType
|
|
425
|
+
|
|
426
|
+
if table_info.object_type != ObjectType.CTE:
|
|
427
|
+
all_tables.add(table_info.name)
|
|
428
|
+
except Exception:
|
|
429
|
+
pass
|
|
430
|
+
|
|
431
|
+
# Find tables missing from schema
|
|
432
|
+
missing = [t for t in all_tables if t not in schema]
|
|
433
|
+
if not missing:
|
|
434
|
+
return schema
|
|
435
|
+
|
|
436
|
+
console.print(
|
|
437
|
+
f"[blue]Pulling DDL from {self.catalog_type} "
|
|
438
|
+
f"for {len(missing)} table(s)...[/blue]"
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
ddl_results = catalog.get_ddl_batch(missing)
|
|
442
|
+
file_dialect = dialect or self.dialect
|
|
443
|
+
for table_name, ddl in ddl_results.items():
|
|
444
|
+
if ddl.startswith("ERROR:"):
|
|
445
|
+
console.print(
|
|
446
|
+
f"[yellow]Warning:[/yellow] Could not pull DDL "
|
|
447
|
+
f"for {table_name}: {ddl}"
|
|
448
|
+
)
|
|
449
|
+
continue
|
|
450
|
+
parsed_schema = parse_ddl_to_schema(ddl, dialect=file_dialect)
|
|
451
|
+
for name, cols in parsed_schema.items():
|
|
452
|
+
if name not in schema:
|
|
453
|
+
schema[name] = cols
|
|
454
|
+
|
|
455
|
+
return schema
|
|
456
|
+
|
|
271
457
|
def _ensure_node(
|
|
272
458
|
self,
|
|
273
459
|
identifier: str,
|
|
@@ -348,6 +534,11 @@ class GraphBuilder:
|
|
|
348
534
|
"""Get mapping from node identifiers to rustworkx indices."""
|
|
349
535
|
return self._node_index_map.copy()
|
|
350
536
|
|
|
537
|
+
@property
|
|
538
|
+
def resolved_schema(self) -> Dict[str, Dict[str, str]]:
|
|
539
|
+
"""Get the resolved schema dictionary from schema extraction pass."""
|
|
540
|
+
return self._resolved_schema.copy()
|
|
541
|
+
|
|
351
542
|
@property
|
|
352
543
|
def skipped_files(self) -> List[tuple[str, str]]:
|
|
353
544
|
"""Get list of files that were skipped during graph building."""
|