sqlglot 26.29.0__tar.gz → 26.31.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlglot-26.29.0 → sqlglot-26.31.0}/CHANGELOG.md +69 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/PKG-INFO +2 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/README.md +1 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/_version.py +2 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/__init__.py +2 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/bigquery.py +7 -4
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/clickhouse.py +2 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/databricks.py +2 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/dialect.py +18 -4
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/duckdb.py +1 -0
- sqlglot-26.31.0/sqlglot/dialects/exasol.py +46 -0
- sqlglot-26.31.0/sqlglot/dialects/fabric.py +115 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/hive.py +1 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/oracle.py +15 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/presto.py +3 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/prql.py +5 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/redshift.py +11 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/snowflake.py +4 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/spark.py +17 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/sqlite.py +4 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/tsql.py +7 -5
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/expressions.py +11 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/generator.py +3 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/scope.py +13 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/parser.py +99 -77
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/transforms.py +15 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.egg-info/PKG-INFO +2 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.egg-info/SOURCES.txt +4 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_bigquery.py +34 -16
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_clickhouse.py +5 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_databricks.py +7 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_dialect.py +5 -3
- sqlglot-26.31.0/tests/dialects/test_exasol.py +70 -0
- sqlglot-26.31.0/tests/dialects/test_fabric.py +65 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_hive.py +4 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_mysql.py +8 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_oracle.py +16 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_pipe_syntax.py +29 -46
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_presto.py +38 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_snowflake.py +44 -2
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_spark.py +24 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_sqlite.py +4 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_trino.py +16 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_tsql.py +15 -9
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/identity.sql +3 -1
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/annotate_functions.sql +9 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns.sql +3 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_tables.sql +3 -3
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/pretty.sql +6 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_optimizer.py +4 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/.gitignore +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/.gitpod.yml +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/.pre-commit-config.yaml +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/CONTRIBUTING.md +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/LICENSE +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/MANIFEST.in +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/Makefile +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/pyproject.toml +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/setup.cfg +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/setup.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/__init__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/__main__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/_typing.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/athena.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/doris.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/drill.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/druid.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/dune.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/materialize.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/mysql.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/postgres.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/risingwave.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/spark2.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/starrocks.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/tableau.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/teradata.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/dialects/trino.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/diff.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/errors.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/executor/__init__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/executor/context.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/executor/env.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/executor/python.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/executor/table.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/helper.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/jsonpath.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/lineage.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/__init__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/annotate_types.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/canonicalize.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_ctes.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_joins.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_subqueries.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/isolate_table_selects.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/merge_subqueries.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/normalize.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/normalize_identifiers.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/optimize_joins.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/optimizer.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/pushdown_predicates.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/pushdown_projections.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify_columns.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify_tables.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/simplify.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/optimizer/unnest_subqueries.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/planner.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/py.typed +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/schema.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/serde.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/time.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/tokens.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot/trie.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.egg-info/dependency_links.txt +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.egg-info/requires.txt +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.egg-info/top_level.txt +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglot.png +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/Cargo.lock +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/Cargo.toml +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/benches/dialect_settings.json +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/benches/long.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/benches/token_type_settings.json +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/benches/tokenizer_dialect_settings.json +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/benches/tokenizer_settings.json +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/pyproject.toml +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/src/lib.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/src/settings.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/src/token.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/src/tokenizer.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/sqlglotrs/src/trie.rs +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/__init__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/__init__.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_athena.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_doris.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_drill.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_druid.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_duckdb.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_dune.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_materialize.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_postgres.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_prql.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_redshift.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_risingwave.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_starrocks.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_tableau.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/dialects/test_teradata.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/jsonpath/LICENSE +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/jsonpath/cts.json +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/annotate_types.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/canonicalize.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_ctes.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_joins.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_subqueries.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/isolate_table_selects.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/merge_subqueries.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/normalize.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/normalize_identifiers.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/optimize_joins.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/optimizer.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_cte_alias_columns.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_predicates.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_projections.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns__invalid.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns__with_invisible.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns_ddl.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/quote_identifiers.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/simplify.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/call_center.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_page.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_returns.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_sales.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer_address.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer_demographics.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/date_dim.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/household_demographics.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/income_band.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/inventory.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/item.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/promotion.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/reason.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/ship_mode.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store_returns.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store_sales.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/time_dim.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/tpc-ds.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/warehouse.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_page.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_returns.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_sales.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_site.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/customer.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/lineitem.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/nation.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/orders.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/part.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/partsupp.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/region.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/supplier.csv.gz +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/tpc-h.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/unnest_subqueries.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/fixtures/partial.sql +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/gen_fixtures.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/helpers.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_build.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_diff.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_docs.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_executor.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_expressions.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_generator.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_helper.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_jsonpath.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_lineage.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_parser.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_schema.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_serde.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_time.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_tokens.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_transforms.py +0 -0
- {sqlglot-26.29.0 → sqlglot-26.31.0}/tests/test_transpile.py +0 -0
@@ -1,6 +1,73 @@
|
|
1
1
|
Changelog
|
2
2
|
=========
|
3
3
|
|
4
|
+
## [v26.30.0] - 2025-06-21
|
5
|
+
### :boom: BREAKING CHANGES
|
6
|
+
- due to [`d3dc761`](https://github.com/tobymao/sqlglot/commit/d3dc761393146357a5d20c4d7992fd2a1ae5e6e2) - change comma to cross join when precedence is the same for all join types *(PR [#5240](https://github.com/tobymao/sqlglot/pull/5240) by [@georgesittas](https://github.com/georgesittas))*:
|
7
|
+
|
8
|
+
change comma to cross join when precedence is the same for all join types (#5240)
|
9
|
+
|
10
|
+
- due to [`e7c217e`](https://github.com/tobymao/sqlglot/commit/e7c217ef08e5811e7dad2b3d26dbaa9f02114e38) - transpile from/to dbms_random.value *(PR [#5242](https://github.com/tobymao/sqlglot/pull/5242) by [@georgesittas](https://github.com/georgesittas))*:
|
11
|
+
|
12
|
+
transpile from/to dbms_random.value (#5242)
|
13
|
+
|
14
|
+
- due to [`31814cd`](https://github.com/tobymao/sqlglot/commit/31814cddb0cf65caf29fbc45a31a9c865b7991c7) - cast constructed timestamp literal to zone-aware type if needed *(PR [#5253](https://github.com/tobymao/sqlglot/pull/5253) by [@georgesittas](https://github.com/georgesittas))*:
|
15
|
+
|
16
|
+
cast constructed timestamp literal to zone-aware type if needed (#5253)
|
17
|
+
|
18
|
+
|
19
|
+
### :sparkles: New Features
|
20
|
+
- [`e7c217e`](https://github.com/tobymao/sqlglot/commit/e7c217ef08e5811e7dad2b3d26dbaa9f02114e38) - **oracle**: transpile from/to dbms_random.value *(PR [#5242](https://github.com/tobymao/sqlglot/pull/5242) by [@georgesittas](https://github.com/georgesittas))*
|
21
|
+
- :arrow_lower_right: *addresses issue [#5241](https://github.com/tobymao/sqlglot/issues/5241) opened by [@Akshat-2512](https://github.com/Akshat-2512)*
|
22
|
+
- [`0d19544`](https://github.com/tobymao/sqlglot/commit/0d19544317c1056b17fb089d4be9b5bddfe6feb3) - add Microsoft Fabric dialect, a case sensitive version of TSQL *(PR [#5247](https://github.com/tobymao/sqlglot/pull/5247) by [@mattiasthalen](https://github.com/mattiasthalen))*
|
23
|
+
- [`249dbc9`](https://github.com/tobymao/sqlglot/commit/249dbc906adc6b20932dc8efe83f6f4d23ef8c1e) - **parser**: start with SELECT and nested pipe syntax *(PR [#5248](https://github.com/tobymao/sqlglot/pull/5248) by [@geooo109](https://github.com/geooo109))*
|
24
|
+
- [`f5b5b93`](https://github.com/tobymao/sqlglot/commit/f5b5b9338eb92b7aa2c9b4c92c6138c2c05e1c40) - **fabric**: implement type mappings for unsupported Fabric types *(PR [#5249](https://github.com/tobymao/sqlglot/pull/5249) by [@mattiasthalen](https://github.com/mattiasthalen))*
|
25
|
+
- [`78fcea1`](https://github.com/tobymao/sqlglot/commit/78fcea13b5eb1734a15a254875bc80ad8063b0b0) - **spark, databricks**: parse brackets as placeholder *(PR [#5256](https://github.com/tobymao/sqlglot/pull/5256) by [@geooo109](https://github.com/geooo109))*
|
26
|
+
- :arrow_lower_right: *addresses issue [#5251](https://github.com/tobymao/sqlglot/issues/5251) opened by [@aersam](https://github.com/aersam)*
|
27
|
+
- [`7d71387`](https://github.com/tobymao/sqlglot/commit/7d7138780db82e7a75949d29282b944e739ad99d) - **fabric**: Add precision cap to temporal data types *(PR [#5250](https://github.com/tobymao/sqlglot/pull/5250) by [@mattiasthalen](https://github.com/mattiasthalen))*
|
28
|
+
- [`e8cf793`](https://github.com/tobymao/sqlglot/commit/e8cf79305d398f25640ef3c07dd8b32997cb0167) - **duckdb**: Transpile Snowflake's TO_CHAR if format is in Snowflake.TIME_MAPPING *(PR [#5257](https://github.com/tobymao/sqlglot/pull/5257) by [@VaggelisD](https://github.com/VaggelisD))*
|
29
|
+
- :arrow_lower_right: *addresses issue [#5255](https://github.com/tobymao/sqlglot/issues/5255) opened by [@kyle-cheung](https://github.com/kyle-cheung)*
|
30
|
+
|
31
|
+
### :bug: Bug Fixes
|
32
|
+
- [`d3dc761`](https://github.com/tobymao/sqlglot/commit/d3dc761393146357a5d20c4d7992fd2a1ae5e6e2) - change comma to cross join when precedence is the same for all join types *(PR [#5240](https://github.com/tobymao/sqlglot/pull/5240) by [@georgesittas](https://github.com/georgesittas))*
|
33
|
+
- [`31814cd`](https://github.com/tobymao/sqlglot/commit/31814cddb0cf65caf29fbc45a31a9c865b7991c7) - **presto**: cast constructed timestamp literal to zone-aware type if needed *(PR [#5253](https://github.com/tobymao/sqlglot/pull/5253) by [@georgesittas](https://github.com/georgesittas))*
|
34
|
+
- :arrow_lower_right: *fixes issue [#5252](https://github.com/tobymao/sqlglot/issues/5252) opened by [@agni-sairent](https://github.com/agni-sairent)*
|
35
|
+
|
36
|
+
|
37
|
+
## [v26.29.0] - 2025-06-17
|
38
|
+
### :boom: BREAKING CHANGES
|
39
|
+
- due to [`4f42d95`](https://github.com/tobymao/sqlglot/commit/4f42d951363f8c43a4c414dc21d0505d9c8e48bf) - Normalize date parts in `exp.Extract` generation *(PR [#5229](https://github.com/tobymao/sqlglot/pull/5229) by [@VaggelisD](https://github.com/VaggelisD))*:
|
40
|
+
|
41
|
+
Normalize date parts in `exp.Extract` generation (#5229)
|
42
|
+
|
43
|
+
- due to [`e7e38fe`](https://github.com/tobymao/sqlglot/commit/e7e38fe0e09f9affbff4ffa7023d0161e3a1ee49) - resolve table "columns" in bigquery that produce structs *(PR [#5230](https://github.com/tobymao/sqlglot/pull/5230) by [@georgesittas](https://github.com/georgesittas))*:
|
44
|
+
|
45
|
+
resolve table "columns" in bigquery that produce structs (#5230)
|
46
|
+
|
47
|
+
|
48
|
+
### :sparkles: New Features
|
49
|
+
- [`97f5822`](https://github.com/tobymao/sqlglot/commit/97f58226fc8815b23787b7b8699ea71f58268560) - **parser**: AS pipe syntax *(PR [#5224](https://github.com/tobymao/sqlglot/pull/5224) by [@geooo109](https://github.com/geooo109))*
|
50
|
+
- [`a7e7fee`](https://github.com/tobymao/sqlglot/commit/a7e7feef02a77fe8606f3f482bad91230fa637f4) - **parser**: EXTEND pipe syntax *(PR [#5225](https://github.com/tobymao/sqlglot/pull/5225) by [@geooo109](https://github.com/geooo109))*
|
51
|
+
- [`c1cb9f8`](https://github.com/tobymao/sqlglot/commit/c1cb9f8f682080f7a06c387219d79c6d068b6dbe) - **snowflake**: add autoincrement order clause support *(PR [#5223](https://github.com/tobymao/sqlglot/pull/5223) by [@dmaresma](https://github.com/dmaresma))*
|
52
|
+
- [`91afe4c`](https://github.com/tobymao/sqlglot/commit/91afe4cfd7b3f427e4c0b298075e867b8a1bbe55) - **parser**: TABLESAMPLE pipe syntax *(PR [#5231](https://github.com/tobymao/sqlglot/pull/5231) by [@geooo109](https://github.com/geooo109))*
|
53
|
+
- [`62da84a`](https://github.com/tobymao/sqlglot/commit/62da84acce7f44802dca26a9357a16115e21fabf) - **snowflake**: improve transpilation of unnested object lookup *(PR [#5234](https://github.com/tobymao/sqlglot/pull/5234) by [@georgesittas](https://github.com/georgesittas))*
|
54
|
+
- [`2c60453`](https://github.com/tobymao/sqlglot/commit/2c604537ba83dee74e9ced7e216673ecc70fe487) - **parser**: DROP pipe syntax *(PR [#5226](https://github.com/tobymao/sqlglot/pull/5226) by [@geooo109](https://github.com/geooo109))*
|
55
|
+
- [`9885729`](https://github.com/tobymao/sqlglot/commit/988572954135c68dc021b992c815024ce3debaff) - **parser**: SET pipe syntax *(PR [#5236](https://github.com/tobymao/sqlglot/pull/5236) by [@geooo109](https://github.com/geooo109))*
|
56
|
+
|
57
|
+
### :bug: Bug Fixes
|
58
|
+
- [`df73a79`](https://github.com/tobymao/sqlglot/commit/df73a79a2ca3ba859b8aba5e3d0f6ed269874a63) - **tsql**: Retain limit clause in subquery expression. *(PR [#5227](https://github.com/tobymao/sqlglot/pull/5227) by [@MarcusRisanger](https://github.com/MarcusRisanger))*
|
59
|
+
- [`4f42d95`](https://github.com/tobymao/sqlglot/commit/4f42d951363f8c43a4c414dc21d0505d9c8e48bf) - **duckdb**: Normalize date parts in `exp.Extract` generation *(PR [#5229](https://github.com/tobymao/sqlglot/pull/5229) by [@VaggelisD](https://github.com/VaggelisD))*
|
60
|
+
- :arrow_lower_right: *fixes issue [#5228](https://github.com/tobymao/sqlglot/issues/5228) opened by [@greybeam-bot](https://github.com/greybeam-bot)*
|
61
|
+
- [`1b4c083`](https://github.com/tobymao/sqlglot/commit/1b4c083fff8d7c44bf1dbba28c1225fa1e28c4d2) - **athena**: include Hive string escapes in the tokenizer *(PR [#5233](https://github.com/tobymao/sqlglot/pull/5233) by [@georgesittas](https://github.com/georgesittas))*
|
62
|
+
- :arrow_lower_right: *fixes issue [#5232](https://github.com/tobymao/sqlglot/issues/5232) opened by [@ligfx](https://github.com/ligfx)*
|
63
|
+
- [`e7e38fe`](https://github.com/tobymao/sqlglot/commit/e7e38fe0e09f9affbff4ffa7023d0161e3a1ee49) - **optimizer**: resolve table "columns" in bigquery that produce structs *(PR [#5230](https://github.com/tobymao/sqlglot/pull/5230) by [@georgesittas](https://github.com/georgesittas))*
|
64
|
+
- :arrow_lower_right: *fixes issue [#5207](https://github.com/tobymao/sqlglot/issues/5207) opened by [@Bladieblah](https://github.com/Bladieblah)*
|
65
|
+
- [`781539d`](https://github.com/tobymao/sqlglot/commit/781539d5cbe58142ed6688f1522fc4ed31da0a56) - **duckdb**: Generate correct DETACH syntax if IF EXISTS is set *(PR [#5235](https://github.com/tobymao/sqlglot/pull/5235) by [@erindru](https://github.com/erindru))*
|
66
|
+
|
67
|
+
### :wrench: Chores
|
68
|
+
- [`7dfb578`](https://github.com/tobymao/sqlglot/commit/7dfb5780fb242c82744dc1538077776ac624081e) - Refactor DETACH generation *(PR [#5237](https://github.com/tobymao/sqlglot/pull/5237) by [@VaggelisD](https://github.com/VaggelisD))*
|
69
|
+
|
70
|
+
|
4
71
|
## [v26.28.1] - 2025-06-13
|
5
72
|
### :boom: BREAKING CHANGES
|
6
73
|
- due to [`44297f1`](https://github.com/tobymao/sqlglot/commit/44297f1c5c8c2cb16fe77c318312f417b4281708) - JOIN pipe syntax, Set Operators as CTEs *(PR [#5215](https://github.com/tobymao/sqlglot/pull/5215) by [@geooo109](https://github.com/geooo109))*:
|
@@ -4902,3 +4969,5 @@ Changelog
|
|
4902
4969
|
[v26.26.0]: https://github.com/tobymao/sqlglot/compare/v26.25.3...v26.26.0
|
4903
4970
|
[v26.27.0]: https://github.com/tobymao/sqlglot/compare/v26.26.0...v26.27.0
|
4904
4971
|
[v26.28.1]: https://github.com/tobymao/sqlglot/compare/v26.27.1...v26.28.1
|
4972
|
+
[v26.29.0]: https://github.com/tobymao/sqlglot/compare/v26.28.1...v26.29.0
|
4973
|
+
[v26.30.0]: https://github.com/tobymao/sqlglot/compare/v26.29.0...v26.30.0
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: sqlglot
|
3
|
-
Version: 26.
|
3
|
+
Version: 26.31.0
|
4
4
|
Summary: An easily customizable SQL parser and transpiler
|
5
5
|
Author-email: Toby Mao <toby.mao@gmail.com>
|
6
6
|
License: MIT License
|
@@ -61,7 +61,7 @@ Dynamic: provides-extra
|
|
61
61
|
|
62
62
|

|
63
63
|
|
64
|
-
SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [
|
64
|
+
SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [29 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
|
65
65
|
|
66
66
|
It is a very comprehensive generic SQL parser with a robust [test suite](https://github.com/tobymao/sqlglot/blob/main/tests/). It is also quite [performant](#benchmarks), while being written purely in Python.
|
67
67
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|

|
2
2
|
|
3
|
-
SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [
|
3
|
+
SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [29 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
|
4
4
|
|
5
5
|
It is a very comprehensive generic SQL parser with a robust [test suite](https://github.com/tobymao/sqlglot/blob/main/tests/). It is also quite [performant](#benchmarks), while being written purely in Python.
|
6
6
|
|
@@ -74,6 +74,7 @@ DIALECTS = [
|
|
74
74
|
"Druid",
|
75
75
|
"DuckDB",
|
76
76
|
"Dune",
|
77
|
+
"Fabric",
|
77
78
|
"Hive",
|
78
79
|
"Materialize",
|
79
80
|
"MySQL",
|
@@ -92,6 +93,7 @@ DIALECTS = [
|
|
92
93
|
"Teradata",
|
93
94
|
"Trino",
|
94
95
|
"TSQL",
|
96
|
+
"Exasol",
|
95
97
|
]
|
96
98
|
|
97
99
|
MODULE_BY_DIALECT = {name: name.lower() for name in DIALECTS}
|
@@ -524,6 +524,7 @@ class BigQuery(Dialect):
|
|
524
524
|
PREFIXED_PIVOT_COLUMNS = True
|
525
525
|
LOG_DEFAULTS_TO_LN = True
|
526
526
|
SUPPORTS_IMPLICIT_UNNEST = True
|
527
|
+
JOINS_HAVE_EQUAL_PRECEDENCE = True
|
527
528
|
|
528
529
|
# BigQuery does not allow ASC/DESC to be used as an identifier
|
529
530
|
ID_VAR_TOKENS = parser.Parser.ID_VAR_TOKENS - {TokenType.ASC, TokenType.DESC}
|
@@ -542,7 +543,7 @@ class BigQuery(Dialect):
|
|
542
543
|
"DATE_ADD": build_date_delta_with_interval(exp.DateAdd),
|
543
544
|
"DATE_SUB": build_date_delta_with_interval(exp.DateSub),
|
544
545
|
"DATE_TRUNC": lambda args: exp.DateTrunc(
|
545
|
-
unit=
|
546
|
+
unit=seq_get(args, 1),
|
546
547
|
this=seq_get(args, 0),
|
547
548
|
zone=seq_get(args, 2),
|
548
549
|
),
|
@@ -962,9 +963,6 @@ class BigQuery(Dialect):
|
|
962
963
|
exp.DateSub: date_add_interval_sql("DATE", "SUB"),
|
963
964
|
exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
|
964
965
|
exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
|
965
|
-
exp.DateTrunc: lambda self, e: self.func(
|
966
|
-
"DATE_TRUNC", e.this, e.text("unit"), e.args.get("zone")
|
967
|
-
),
|
968
966
|
exp.FromTimeZone: lambda self, e: self.func(
|
969
967
|
"DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
|
970
968
|
),
|
@@ -1194,6 +1192,11 @@ class BigQuery(Dialect):
|
|
1194
1192
|
"within",
|
1195
1193
|
}
|
1196
1194
|
|
1195
|
+
def datetrunc_sql(self, expression: exp.DateTrunc) -> str:
|
1196
|
+
unit = expression.unit
|
1197
|
+
unit_sql = unit.name if unit.is_string else self.sql(unit)
|
1198
|
+
return self.func("DATE_TRUNC", expression.this, unit_sql, expression.args.get("zone"))
|
1199
|
+
|
1197
1200
|
def mod_sql(self, expression: exp.Mod) -> str:
|
1198
1201
|
this = expression.this
|
1199
1202
|
expr = expression.expression
|
@@ -297,6 +297,7 @@ class ClickHouse(Dialect):
|
|
297
297
|
MODIFIERS_ATTACHED_TO_SET_OP = False
|
298
298
|
INTERVAL_SPANS = False
|
299
299
|
OPTIONAL_ALIAS_TOKEN_CTE = False
|
300
|
+
JOINS_HAVE_EQUAL_PRECEDENCE = True
|
300
301
|
|
301
302
|
FUNCTIONS = {
|
302
303
|
**parser.Parser.FUNCTIONS,
|
@@ -691,6 +692,7 @@ class ClickHouse(Dialect):
|
|
691
692
|
parse_bracket: bool = False,
|
692
693
|
is_db_reference: bool = False,
|
693
694
|
parse_partition: bool = False,
|
695
|
+
consume_pipe: bool = False,
|
694
696
|
) -> t.Optional[exp.Expression]:
|
695
697
|
this = super()._parse_table(
|
696
698
|
schema=schema,
|
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
|
|
9
9
|
build_date_delta,
|
10
10
|
timestamptrunc_sql,
|
11
11
|
build_formatted_time,
|
12
|
+
groupconcat_sql,
|
12
13
|
)
|
13
14
|
from sqlglot.dialects.spark import Spark
|
14
15
|
from sqlglot.tokens import TokenType
|
@@ -87,6 +88,7 @@ class Databricks(Spark):
|
|
87
88
|
e.this,
|
88
89
|
),
|
89
90
|
exp.DatetimeTrunc: timestamptrunc_sql(),
|
91
|
+
exp.GroupConcat: groupconcat_sql,
|
90
92
|
exp.Select: transforms.preprocess(
|
91
93
|
[
|
92
94
|
transforms.eliminate_distinct_on,
|
@@ -77,6 +77,7 @@ class Dialects(str, Enum):
|
|
77
77
|
DRUID = "druid"
|
78
78
|
DUCKDB = "duckdb"
|
79
79
|
DUNE = "dune"
|
80
|
+
FABRIC = "fabric"
|
80
81
|
HIVE = "hive"
|
81
82
|
MATERIALIZE = "materialize"
|
82
83
|
MYSQL = "mysql"
|
@@ -95,6 +96,7 @@ class Dialects(str, Enum):
|
|
95
96
|
TERADATA = "teradata"
|
96
97
|
TRINO = "trino"
|
97
98
|
TSQL = "tsql"
|
99
|
+
EXASOL = "exasol"
|
98
100
|
|
99
101
|
|
100
102
|
class NormalizationStrategy(str, AutoName):
|
@@ -699,6 +701,9 @@ class Dialect(metaclass=_Dialect):
|
|
699
701
|
exp.TimeAdd,
|
700
702
|
exp.TimeSub,
|
701
703
|
},
|
704
|
+
exp.DataType.Type.TIMESTAMPTZ: {
|
705
|
+
exp.CurrentTimestampLTZ,
|
706
|
+
},
|
702
707
|
exp.DataType.Type.TIMESTAMP: {
|
703
708
|
exp.CurrentTimestamp,
|
704
709
|
exp.StrToTime,
|
@@ -1905,14 +1910,23 @@ def groupconcat_sql(
|
|
1905
1910
|
|
1906
1911
|
|
1907
1912
|
def build_timetostr_or_tochar(args: t.List, dialect: Dialect) -> exp.TimeToStr | exp.ToChar:
|
1908
|
-
|
1913
|
+
if len(args) == 2:
|
1914
|
+
this = args[0]
|
1915
|
+
if not this.type:
|
1916
|
+
from sqlglot.optimizer.annotate_types import annotate_types
|
1909
1917
|
|
1910
|
-
|
1911
|
-
from sqlglot.optimizer.annotate_types import annotate_types
|
1918
|
+
annotate_types(this, dialect=dialect)
|
1912
1919
|
|
1913
|
-
annotate_types(this, dialect=dialect)
|
1914
1920
|
if this.is_type(*exp.DataType.TEMPORAL_TYPES):
|
1915
1921
|
dialect_name = dialect.__class__.__name__.lower()
|
1916
1922
|
return build_formatted_time(exp.TimeToStr, dialect_name, default=True)(args)
|
1917
1923
|
|
1918
1924
|
return exp.ToChar.from_arg_list(args)
|
1925
|
+
|
1926
|
+
|
1927
|
+
def build_replace_with_optional_replacement(args: t.List) -> exp.Replace:
|
1928
|
+
return exp.Replace(
|
1929
|
+
this=seq_get(args, 0),
|
1930
|
+
expression=seq_get(args, 1),
|
1931
|
+
replacement=seq_get(args, 2) or exp.Literal.string(""),
|
1932
|
+
)
|
@@ -508,6 +508,7 @@ class DuckDB(Dialect):
|
|
508
508
|
parse_bracket: bool = False,
|
509
509
|
is_db_reference: bool = False,
|
510
510
|
parse_partition: bool = False,
|
511
|
+
consume_pipe: bool = False,
|
511
512
|
) -> t.Optional[exp.Expression]:
|
512
513
|
# DuckDB supports prefix aliases, e.g. FROM foo: bar
|
513
514
|
if self._next and self._next.token_type == TokenType.COLON:
|
@@ -0,0 +1,46 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
from sqlglot import exp, generator
|
3
|
+
from sqlglot.dialects.dialect import Dialect, rename_func
|
4
|
+
|
5
|
+
|
6
|
+
class Exasol(Dialect):
|
7
|
+
class Generator(generator.Generator):
|
8
|
+
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypedetails.htm#StringDataType
|
9
|
+
STRING_TYPE_MAPPING = {
|
10
|
+
exp.DataType.Type.BLOB: "VARCHAR",
|
11
|
+
exp.DataType.Type.LONGBLOB: "VARCHAR",
|
12
|
+
exp.DataType.Type.LONGTEXT: "VARCHAR",
|
13
|
+
exp.DataType.Type.MEDIUMBLOB: "VARCHAR",
|
14
|
+
exp.DataType.Type.MEDIUMTEXT: "VARCHAR",
|
15
|
+
exp.DataType.Type.TINYBLOB: "VARCHAR",
|
16
|
+
exp.DataType.Type.TINYTEXT: "VARCHAR",
|
17
|
+
exp.DataType.Type.TEXT: "VARCHAR",
|
18
|
+
exp.DataType.Type.VARBINARY: "VARCHAR",
|
19
|
+
}
|
20
|
+
|
21
|
+
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypealiases.htm
|
22
|
+
TYPE_MAPPING = {
|
23
|
+
**generator.Generator.TYPE_MAPPING,
|
24
|
+
**STRING_TYPE_MAPPING,
|
25
|
+
exp.DataType.Type.TINYINT: "SMALLINT",
|
26
|
+
exp.DataType.Type.MEDIUMINT: "INT",
|
27
|
+
exp.DataType.Type.DECIMAL32: "DECIMAL",
|
28
|
+
exp.DataType.Type.DECIMAL64: "DECIMAL",
|
29
|
+
exp.DataType.Type.DECIMAL128: "DECIMAL",
|
30
|
+
exp.DataType.Type.DECIMAL256: "DECIMAL",
|
31
|
+
exp.DataType.Type.DATETIME: "TIMESTAMP",
|
32
|
+
}
|
33
|
+
|
34
|
+
def datatype_sql(self, expression: exp.DataType) -> str:
|
35
|
+
# Exasol supports a fixed default precision of 3 for TIMESTAMP WITH LOCAL TIME ZONE
|
36
|
+
# and does not allow specifying a different custom precision
|
37
|
+
if expression.is_type(exp.DataType.Type.TIMESTAMPLTZ):
|
38
|
+
return "TIMESTAMP WITH LOCAL TIME ZONE"
|
39
|
+
|
40
|
+
return super().datatype_sql(expression)
|
41
|
+
|
42
|
+
TRANSFORMS = {
|
43
|
+
**generator.Generator.TRANSFORMS,
|
44
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/mod.htm
|
45
|
+
exp.Mod: rename_func("MOD"),
|
46
|
+
}
|
@@ -0,0 +1,115 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from sqlglot import exp
|
4
|
+
from sqlglot.dialects.dialect import NormalizationStrategy
|
5
|
+
from sqlglot.dialects.tsql import TSQL
|
6
|
+
from sqlglot.tokens import TokenType
|
7
|
+
|
8
|
+
|
9
|
+
class Fabric(TSQL):
|
10
|
+
"""
|
11
|
+
Microsoft Fabric Data Warehouse dialect that inherits from T-SQL.
|
12
|
+
|
13
|
+
Microsoft Fabric is a cloud-based analytics platform that provides a unified
|
14
|
+
data warehouse experience. While it shares much of T-SQL's syntax, it has
|
15
|
+
specific differences and limitations that this dialect addresses.
|
16
|
+
|
17
|
+
Key differences from T-SQL:
|
18
|
+
- Case-sensitive identifiers (unlike T-SQL which is case-insensitive)
|
19
|
+
- Limited data type support with mappings to supported alternatives
|
20
|
+
- Temporal types (DATETIME2, DATETIMEOFFSET, TIME) limited to 6 digits precision
|
21
|
+
- Certain legacy types (MONEY, SMALLMONEY, etc.) are not supported
|
22
|
+
- Unicode types (NCHAR, NVARCHAR) are mapped to non-unicode equivalents
|
23
|
+
|
24
|
+
References:
|
25
|
+
- Data Types: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
|
26
|
+
- T-SQL Surface Area: https://learn.microsoft.com/en-us/fabric/data-warehouse/tsql-surface-area
|
27
|
+
"""
|
28
|
+
|
29
|
+
# Fabric is case-sensitive unlike T-SQL which is case-insensitive
|
30
|
+
NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_SENSITIVE
|
31
|
+
|
32
|
+
class Tokenizer(TSQL.Tokenizer):
|
33
|
+
# Override T-SQL tokenizer to handle TIMESTAMP differently
|
34
|
+
# In T-SQL, TIMESTAMP is a synonym for ROWVERSION, but in Fabric we want it to be a datetime type
|
35
|
+
# Also add UTINYINT keyword mapping since T-SQL doesn't have it
|
36
|
+
KEYWORDS = {
|
37
|
+
**TSQL.Tokenizer.KEYWORDS,
|
38
|
+
"TIMESTAMP": TokenType.TIMESTAMP,
|
39
|
+
"UTINYINT": TokenType.UTINYINT,
|
40
|
+
}
|
41
|
+
|
42
|
+
class Generator(TSQL.Generator):
|
43
|
+
# Fabric-specific type mappings - override T-SQL types that aren't supported
|
44
|
+
# Reference: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
|
45
|
+
TYPE_MAPPING = {
|
46
|
+
**TSQL.Generator.TYPE_MAPPING,
|
47
|
+
exp.DataType.Type.DATETIME: "DATETIME2",
|
48
|
+
exp.DataType.Type.DECIMAL: "DECIMAL",
|
49
|
+
exp.DataType.Type.IMAGE: "VARBINARY",
|
50
|
+
exp.DataType.Type.INT: "INT",
|
51
|
+
exp.DataType.Type.JSON: "VARCHAR",
|
52
|
+
exp.DataType.Type.MONEY: "DECIMAL",
|
53
|
+
exp.DataType.Type.NCHAR: "CHAR",
|
54
|
+
exp.DataType.Type.NVARCHAR: "VARCHAR",
|
55
|
+
exp.DataType.Type.ROWVERSION: "ROWVERSION",
|
56
|
+
exp.DataType.Type.SMALLDATETIME: "DATETIME2",
|
57
|
+
exp.DataType.Type.SMALLMONEY: "DECIMAL",
|
58
|
+
exp.DataType.Type.TIMESTAMP: "DATETIME2",
|
59
|
+
exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
|
60
|
+
exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET",
|
61
|
+
exp.DataType.Type.TINYINT: "SMALLINT",
|
62
|
+
exp.DataType.Type.UTINYINT: "SMALLINT",
|
63
|
+
exp.DataType.Type.UUID: "VARBINARY(MAX)",
|
64
|
+
exp.DataType.Type.XML: "VARCHAR",
|
65
|
+
}
|
66
|
+
|
67
|
+
def datatype_sql(self, expression: exp.DataType) -> str:
|
68
|
+
# Check if this is a temporal type that needs precision handling. Fabric limits temporal
|
69
|
+
# types to max 6 digits precision. When no precision is specified, we default to 6 digits.
|
70
|
+
if (
|
71
|
+
expression.is_type(*exp.DataType.TEMPORAL_TYPES)
|
72
|
+
and expression.this != exp.DataType.Type.DATE
|
73
|
+
):
|
74
|
+
# Get the current precision (first expression if it exists)
|
75
|
+
precision_param = expression.find(exp.DataTypeParam)
|
76
|
+
target_precision = 6
|
77
|
+
|
78
|
+
if precision_param and precision_param.this.is_int:
|
79
|
+
# Cap precision at 6
|
80
|
+
current_precision = precision_param.this.to_py()
|
81
|
+
target_precision = min(current_precision, 6)
|
82
|
+
else:
|
83
|
+
# If precision exists but is not an integer, default to 6
|
84
|
+
target_precision = 6
|
85
|
+
|
86
|
+
# Create a new expression with the target precision
|
87
|
+
expression = exp.DataType(
|
88
|
+
this=expression.this,
|
89
|
+
expressions=[exp.DataTypeParam(this=exp.Literal.number(target_precision))],
|
90
|
+
)
|
91
|
+
|
92
|
+
return super().datatype_sql(expression)
|
93
|
+
|
94
|
+
def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
|
95
|
+
scale = expression.args.get("scale")
|
96
|
+
timestamp = expression.this
|
97
|
+
|
98
|
+
if scale not in (None, exp.UnixToTime.SECONDS):
|
99
|
+
self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
|
100
|
+
return ""
|
101
|
+
|
102
|
+
# Convert unix timestamp (seconds) to microseconds and round to avoid decimals
|
103
|
+
microseconds = timestamp * exp.Literal.number("1e6")
|
104
|
+
rounded = exp.func("round", microseconds, 0)
|
105
|
+
rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
|
106
|
+
|
107
|
+
# Create the base datetime as '1970-01-01' cast to DATETIME2(6)
|
108
|
+
epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
|
109
|
+
|
110
|
+
dateadd = exp.DateAdd(
|
111
|
+
this=epoch_start,
|
112
|
+
expression=rounded_ms_as_bigint,
|
113
|
+
unit=exp.Literal.string("MICROSECONDS"),
|
114
|
+
)
|
115
|
+
return self.sql(dateadd)
|
@@ -128,6 +128,7 @@ class Oracle(Dialect):
|
|
128
128
|
"NEXT": lambda self: self._parse_next_value_for(),
|
129
129
|
"PRIOR": lambda self: self.expression(exp.Prior, this=self._parse_bitwise()),
|
130
130
|
"SYSDATE": lambda self: self.expression(exp.CurrentTimestamp, sysdate=True),
|
131
|
+
"DBMS_RANDOM": lambda self: self._parse_dbms_random(),
|
131
132
|
}
|
132
133
|
|
133
134
|
FUNCTION_PARSERS: t.Dict[str, t.Callable] = {
|
@@ -177,6 +178,19 @@ class Oracle(Dialect):
|
|
177
178
|
),
|
178
179
|
}
|
179
180
|
|
181
|
+
def _parse_dbms_random(self) -> t.Optional[exp.Expression]:
|
182
|
+
if self._match_text_seq(".", "VALUE"):
|
183
|
+
lower, upper = None, None
|
184
|
+
if self._match(TokenType.L_PAREN, advance=False):
|
185
|
+
lower_upper = self._parse_wrapped_csv(self._parse_bitwise)
|
186
|
+
if len(lower_upper) == 2:
|
187
|
+
lower, upper = lower_upper
|
188
|
+
|
189
|
+
return exp.Rand(lower=lower, upper=upper)
|
190
|
+
|
191
|
+
self._retreat(self._index - 1)
|
192
|
+
return None
|
193
|
+
|
180
194
|
def _parse_json_array(self, expr_type: t.Type[E], **kwargs) -> E:
|
181
195
|
return self.expression(
|
182
196
|
expr_type,
|
@@ -299,6 +313,7 @@ class Oracle(Dialect):
|
|
299
313
|
exp.LogicalOr: rename_func("MAX"),
|
300
314
|
exp.LogicalAnd: rename_func("MIN"),
|
301
315
|
exp.Mod: rename_func("MOD"),
|
316
|
+
exp.Rand: rename_func("DBMS_RANDOM.VALUE"),
|
302
317
|
exp.Select: transforms.preprocess(
|
303
318
|
[
|
304
319
|
transforms.eliminate_distinct_on,
|
@@ -8,6 +8,7 @@ from sqlglot.dialects.dialect import (
|
|
8
8
|
NormalizationStrategy,
|
9
9
|
binary_from_function,
|
10
10
|
bool_xor_sql,
|
11
|
+
build_replace_with_optional_replacement,
|
11
12
|
date_trunc_to_time,
|
12
13
|
datestrtodate_sql,
|
13
14
|
encode_decode_sql,
|
@@ -315,6 +316,7 @@ class Presto(Dialect):
|
|
315
316
|
|
316
317
|
class Parser(parser.Parser):
|
317
318
|
VALUES_FOLLOWED_BY_PAREN = False
|
319
|
+
ZONE_AWARE_TIMESTAMP_CONSTRUCTOR = True
|
318
320
|
|
319
321
|
FUNCTIONS = {
|
320
322
|
**parser.Parser.FUNCTIONS,
|
@@ -359,6 +361,7 @@ class Presto(Dialect):
|
|
359
361
|
expression=seq_get(args, 1),
|
360
362
|
replacement=seq_get(args, 2) or exp.Literal.string(""),
|
361
363
|
),
|
364
|
+
"REPLACE": build_replace_with_optional_replacement,
|
362
365
|
"ROW": exp.Struct.from_arg_list,
|
363
366
|
"SEQUENCE": exp.GenerateSeries.from_arg_list,
|
364
367
|
"SET_AGG": exp.ArrayUniqueAgg.from_arg_list,
|
@@ -189,11 +189,15 @@ class PRQL(Dialect):
|
|
189
189
|
parse_bracket: bool = False,
|
190
190
|
is_db_reference: bool = False,
|
191
191
|
parse_partition: bool = False,
|
192
|
+
consume_pipe: bool = False,
|
192
193
|
) -> t.Optional[exp.Expression]:
|
193
194
|
return self._parse_table_parts()
|
194
195
|
|
195
196
|
def _parse_from(
|
196
|
-
self,
|
197
|
+
self,
|
198
|
+
joins: bool = False,
|
199
|
+
skip_from_token: bool = False,
|
200
|
+
consume_pipe: bool = False,
|
197
201
|
) -> t.Optional[exp.From]:
|
198
202
|
if not skip_from_token and not self._match(TokenType.FROM):
|
199
203
|
return None
|
@@ -90,6 +90,7 @@ class Redshift(Postgres):
|
|
90
90
|
parse_bracket: bool = False,
|
91
91
|
is_db_reference: bool = False,
|
92
92
|
parse_partition: bool = False,
|
93
|
+
consume_pipe: bool = False,
|
93
94
|
) -> t.Optional[exp.Expression]:
|
94
95
|
# Redshift supports UNPIVOTing SUPER objects, e.g. `UNPIVOT foo.obj[0] AS val AT attr`
|
95
96
|
unpivot = self._match(TokenType.UNPIVOT)
|
@@ -212,8 +213,7 @@ class Redshift(Postgres):
|
|
212
213
|
exp.TableSample: no_tablesample_sql,
|
213
214
|
exp.TsOrDsAdd: date_delta_sql("DATEADD"),
|
214
215
|
exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
|
215
|
-
exp.UnixToTime: lambda self,
|
216
|
-
e: f"(TIMESTAMP 'epoch' + {self.sql(e.this)} * INTERVAL '1 SECOND')",
|
216
|
+
exp.UnixToTime: lambda self, e: self._unix_to_time_sql(e),
|
217
217
|
}
|
218
218
|
|
219
219
|
# Postgres maps exp.Pivot to no_pivot_sql, but Redshift support pivots
|
@@ -446,3 +446,12 @@ class Redshift(Postgres):
|
|
446
446
|
def explode_sql(self, expression: exp.Explode) -> str:
|
447
447
|
self.unsupported("Unsupported EXPLODE() function")
|
448
448
|
return ""
|
449
|
+
|
450
|
+
def _unix_to_time_sql(self, expression: exp.UnixToTime) -> str:
|
451
|
+
scale = expression.args.get("scale")
|
452
|
+
this = self.sql(expression.this)
|
453
|
+
|
454
|
+
if scale is not None and scale != exp.UnixToTime.SECONDS and scale.is_int:
|
455
|
+
this = f"({this} / POWER(10, {scale.to_py()}))"
|
456
|
+
|
457
|
+
return f"(TIMESTAMP 'epoch' + {this} * INTERVAL '1 SECOND')"
|
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
|
|
9
9
|
build_timetostr_or_tochar,
|
10
10
|
binary_from_function,
|
11
11
|
build_default_decimal_type,
|
12
|
+
build_replace_with_optional_replacement,
|
12
13
|
build_timestamp_from_parts,
|
13
14
|
date_delta_sql,
|
14
15
|
date_trunc_to_time,
|
@@ -484,6 +485,7 @@ class Snowflake(Dialect):
|
|
484
485
|
"REGEXP_REPLACE": _build_regexp_replace,
|
485
486
|
"REGEXP_SUBSTR": _build_regexp_extract(exp.RegexpExtract),
|
486
487
|
"REGEXP_SUBSTR_ALL": _build_regexp_extract(exp.RegexpExtractAll),
|
488
|
+
"REPLACE": build_replace_with_optional_replacement,
|
487
489
|
"RLIKE": exp.RegexpLike.from_arg_list,
|
488
490
|
"SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
|
489
491
|
"TABLE": lambda args: exp.TableFromRows(this=seq_get(args, 0)),
|
@@ -799,6 +801,7 @@ class Snowflake(Dialect):
|
|
799
801
|
parse_bracket: bool = False,
|
800
802
|
is_db_reference: bool = False,
|
801
803
|
parse_partition: bool = False,
|
804
|
+
consume_pipe: bool = False,
|
802
805
|
) -> t.Optional[exp.Expression]:
|
803
806
|
table = super()._parse_table(
|
804
807
|
schema=schema,
|
@@ -1415,7 +1418,7 @@ class Snowflake(Dialect):
|
|
1415
1418
|
|
1416
1419
|
def timetostr_sql(self, expression: exp.TimeToStr) -> str:
|
1417
1420
|
this = expression.this
|
1418
|
-
if
|
1421
|
+
if this.is_string:
|
1419
1422
|
this = exp.cast(this, exp.DataType.Type.TIMESTAMP)
|
1420
1423
|
|
1421
1424
|
return self.func("TO_CHAR", this, self.format_time(expression))
|
@@ -7,6 +7,7 @@ from sqlglot.dialects.dialect import rename_func, unit_to_var, timestampdiff_sql
|
|
7
7
|
from sqlglot.dialects.hive import _build_with_ignore_nulls
|
8
8
|
from sqlglot.dialects.spark2 import Spark2, temporary_storage_provider, _build_as_cast
|
9
9
|
from sqlglot.helper import ensure_list, seq_get
|
10
|
+
from sqlglot.tokens import TokenType
|
10
11
|
from sqlglot.transforms import (
|
11
12
|
ctas_with_tmp_tables_to_create_tmp_view,
|
12
13
|
remove_unique_constraints,
|
@@ -121,6 +122,16 @@ class Spark(Spark2):
|
|
121
122
|
),
|
122
123
|
}
|
123
124
|
|
125
|
+
PLACEHOLDER_PARSERS = {
|
126
|
+
**Spark2.Parser.PLACEHOLDER_PARSERS,
|
127
|
+
TokenType.L_BRACE: lambda self: self._parse_query_parameter(),
|
128
|
+
}
|
129
|
+
|
130
|
+
def _parse_query_parameter(self) -> t.Optional[exp.Expression]:
|
131
|
+
this = self._parse_id_var()
|
132
|
+
self._match(TokenType.R_BRACE)
|
133
|
+
return self.expression(exp.Placeholder, this=this, widget=True)
|
134
|
+
|
124
135
|
def _parse_generated_as_identity(
|
125
136
|
self,
|
126
137
|
) -> (
|
@@ -200,3 +211,9 @@ class Spark(Spark2):
|
|
200
211
|
return self.func("DATEDIFF", unit_to_var(expression), start, end)
|
201
212
|
|
202
213
|
return self.func("DATEDIFF", end, start)
|
214
|
+
|
215
|
+
def placeholder_sql(self, expression: exp.Placeholder) -> str:
|
216
|
+
if not expression.args.get("widget"):
|
217
|
+
return super().placeholder_sql(expression)
|
218
|
+
|
219
|
+
return f"{{{expression.name}}}"
|
@@ -102,6 +102,10 @@ class SQLite(Dialect):
|
|
102
102
|
COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.REPLACE}
|
103
103
|
|
104
104
|
class Parser(parser.Parser):
|
105
|
+
STRING_ALIASES = True
|
106
|
+
ALTER_RENAME_REQUIRES_COLUMN = False
|
107
|
+
JOINS_HAVE_EQUAL_PRECEDENCE = True
|
108
|
+
|
105
109
|
FUNCTIONS = {
|
106
110
|
**parser.Parser.FUNCTIONS,
|
107
111
|
"EDITDIST3": exp.Levenshtein.from_arg_list,
|
@@ -110,9 +114,6 @@ class SQLite(Dialect):
|
|
110
114
|
"TIME": lambda args: exp.Anonymous(this="TIME", expressions=args),
|
111
115
|
}
|
112
116
|
|
113
|
-
STRING_ALIASES = True
|
114
|
-
ALTER_RENAME_REQUIRES_COLUMN = False
|
115
|
-
|
116
117
|
def _parse_unique(self) -> exp.UniqueColumnConstraint:
|
117
118
|
# Do not consume more tokens if UNIQUE is used as a standalone constraint, e.g:
|
118
119
|
# CREATE TABLE foo (bar TEXT UNIQUE REFERENCES baz ...)
|