sqlglot 27.5.1__tar.gz → 27.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlglot-27.5.1 → sqlglot-27.7.0}/CHANGELOG.md +31 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/PKG-INFO +1 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/_version.py +2 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/bigquery.py +43 -6
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/clickhouse.py +27 -8
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/dialect.py +10 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/doris.py +43 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/dremio.py +24 -3
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/duckdb.py +32 -3
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/exasol.py +58 -4
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/singlestore.py +50 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/snowflake.py +3 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/spark.py +1 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/teradata.py +58 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/expressions.py +63 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/generator.py +35 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/annotate_types.py +4 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/merge_subqueries.py +4 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/qualify_columns.py +40 -25
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/qualify_tables.py +0 -8
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/parser.py +35 -6
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.egg-info/PKG-INFO +1 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_bigquery.py +21 -23
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_clickhouse.py +8 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_dialect.py +128 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_doris.py +94 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_dremio.py +7 -6
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_duckdb.py +16 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_exasol.py +62 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_presto.py +0 -1
- sqlglot-27.7.0/tests/dialects/test_singlestore.py +107 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_snowflake.py +68 -3
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_teradata.py +35 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_tsql.py +7 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/annotate_functions.sql +94 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/merge_subqueries.sql +9 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/qualify_columns.sql +34 -2
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_optimizer.py +2 -1
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_parser.py +1 -0
- sqlglot-27.5.1/tests/dialects/test_singlestore.py +0 -51
- {sqlglot-27.5.1 → sqlglot-27.7.0}/.gitignore +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/.gitpod.yml +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/.pre-commit-config.yaml +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/CONTRIBUTING.md +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/LICENSE +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/MANIFEST.in +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/Makefile +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/README.md +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/pyproject.toml +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/setup.cfg +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/setup.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/__main__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/_typing.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/athena.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/databricks.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/drill.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/druid.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/dune.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/fabric.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/hive.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/materialize.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/mysql.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/oracle.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/postgres.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/presto.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/prql.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/redshift.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/risingwave.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/spark2.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/sqlite.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/starrocks.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/tableau.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/trino.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/dialects/tsql.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/diff.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/errors.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/executor/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/executor/context.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/executor/env.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/executor/python.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/executor/table.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/helper.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/jsonpath.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/lineage.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/canonicalize.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/eliminate_ctes.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/eliminate_joins.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/eliminate_subqueries.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/isolate_table_selects.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/normalize.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/normalize_identifiers.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/optimize_joins.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/optimizer.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/pushdown_predicates.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/pushdown_projections.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/qualify.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/scope.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/simplify.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/optimizer/unnest_subqueries.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/planner.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/py.typed +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/schema.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/serde.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/time.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/tokens.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/transforms.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot/trie.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.egg-info/SOURCES.txt +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.egg-info/dependency_links.txt +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.egg-info/requires.txt +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.egg-info/top_level.txt +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglot.png +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/Cargo.lock +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/Cargo.toml +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/benches/dialect_settings.json +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/benches/long.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/benches/token_type_settings.json +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/benches/tokenizer_dialect_settings.json +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/benches/tokenizer_settings.json +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/pyproject.toml +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/src/lib.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/src/settings.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/src/token.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/src/tokenizer.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/sqlglotrs/src/trie.rs +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/__init__.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_athena.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_databricks.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_drill.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_druid.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_dune.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_fabric.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_hive.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_materialize.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_mysql.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_oracle.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_pipe_syntax.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_postgres.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_prql.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_redshift.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_risingwave.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_spark.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_sqlite.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_starrocks.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_tableau.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/dialects/test_trino.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/identity.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/jsonpath/LICENSE +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/jsonpath/cts.json +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/annotate_types.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/canonicalize.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/eliminate_ctes.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/eliminate_joins.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/eliminate_subqueries.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/isolate_table_selects.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/normalize.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/normalize_identifiers.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/optimize_joins.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/optimizer.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/pushdown_cte_alias_columns.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/pushdown_predicates.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/pushdown_projections.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/qualify_columns__invalid.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/qualify_columns__with_invisible.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/qualify_columns_ddl.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/qualify_tables.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/quote_identifiers.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/simplify.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/call_center.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/catalog_page.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/catalog_returns.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/catalog_sales.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/customer.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/customer_address.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/customer_demographics.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/date_dim.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/household_demographics.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/income_band.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/inventory.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/item.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/promotion.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/reason.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/ship_mode.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/store.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/store_returns.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/store_sales.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/time_dim.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/tpc-ds.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/warehouse.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/web_page.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/web_returns.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/web_sales.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-ds/web_site.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/customer.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/lineitem.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/nation.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/orders.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/part.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/partsupp.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/region.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/supplier.csv.gz +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/tpc-h/tpc-h.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/optimizer/unnest_subqueries.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/partial.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/fixtures/pretty.sql +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/gen_fixtures.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/helpers.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_build.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_dialect_imports.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_diff.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_docs.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_executor.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_expressions.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_generator.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_helper.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_jsonpath.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_lineage.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_schema.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_serde.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_time.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_tokens.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_transforms.py +0 -0
- {sqlglot-27.5.1 → sqlglot-27.7.0}/tests/test_transpile.py +0 -0
|
@@ -1,6 +1,35 @@
|
|
|
1
1
|
Changelog
|
|
2
2
|
=========
|
|
3
3
|
|
|
4
|
+
## [v27.6.0] - 2025-08-01
|
|
5
|
+
### :boom: BREAKING CHANGES
|
|
6
|
+
- due to [`6b691b3`](https://github.com/tobymao/sqlglot/commit/6b691b33c3528c0377bd8822a3df90de869c6cb1) - Parse and transpile GET(...) extract function *(PR [#5500](https://github.com/tobymao/sqlglot/pull/5500) by [@VaggelisD](https://github.com/VaggelisD))*:
|
|
7
|
+
|
|
8
|
+
Parse and transpile GET(...) extract function (#5500)
|
|
9
|
+
|
|
10
|
+
- due to [`964a275`](https://github.com/tobymao/sqlglot/commit/964a275b42314380de3b301ada9f9756602729f7) - Make `UNION` column qualification recursive *(PR [#5508](https://github.com/tobymao/sqlglot/pull/5508) by [@VaggelisD](https://github.com/VaggelisD))*:
|
|
11
|
+
|
|
12
|
+
Make `UNION` column qualification recursive (#5508)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
### :sparkles: New Features
|
|
16
|
+
- [`6b691b3`](https://github.com/tobymao/sqlglot/commit/6b691b33c3528c0377bd8822a3df90de869c6cb1) - **snowflake**: Parse and transpile GET(...) extract function *(PR [#5500](https://github.com/tobymao/sqlglot/pull/5500) by [@VaggelisD](https://github.com/VaggelisD))*
|
|
17
|
+
- :arrow_lower_right: *addresses issue [#5495](https://github.com/tobymao/sqlglot/issues/5495) opened by [@kyle-cheung](https://github.com/kyle-cheung)*
|
|
18
|
+
- [`a2a2f0f`](https://github.com/tobymao/sqlglot/commit/a2a2f0fe910228651c5c39beebcc02172a0b7e94) - **exasol**: Add support for IF, NULLIFZERO, and ZEROIFNULL functions *(PR [#5502](https://github.com/tobymao/sqlglot/pull/5502) by [@nnamdi16](https://github.com/nnamdi16))*
|
|
19
|
+
- [`2d8ce58`](https://github.com/tobymao/sqlglot/commit/2d8ce587c75f21b188ec4c201936eedac3b051e8) - **singlestore**: Added cast operator *(PR [#5504](https://github.com/tobymao/sqlglot/pull/5504) by [@AdalbertMemSQL](https://github.com/AdalbertMemSQL))*
|
|
20
|
+
- [`6256348`](https://github.com/tobymao/sqlglot/commit/6256348a28b72ae9052d4244736846af209410b0) - **exasol**: add support for ADD_DAYS function in exasol dialect *(PR [#5507](https://github.com/tobymao/sqlglot/pull/5507) by [@nnamdi16](https://github.com/nnamdi16))*
|
|
21
|
+
- [`2f40fc5`](https://github.com/tobymao/sqlglot/commit/2f40fc578a840c9276a4c3b91351fb8d95c837fc) - add more pseudocols to bq which are not expanded by star *(PR [#5509](https://github.com/tobymao/sqlglot/pull/5509) by [@z3z1ma](https://github.com/z3z1ma))*
|
|
22
|
+
|
|
23
|
+
### :bug: Bug Fixes
|
|
24
|
+
- [`3b52061`](https://github.com/tobymao/sqlglot/commit/3b520611c5a894ddea935d13aadd27c791a8a755) - **exasol**: fix TokenType.TEXT mapping in exasol dialect *(PR [#5506](https://github.com/tobymao/sqlglot/pull/5506) by [@nnamdi16](https://github.com/nnamdi16))*
|
|
25
|
+
- [`964a275`](https://github.com/tobymao/sqlglot/commit/964a275b42314380de3b301ada9f9756602729f7) - Make `UNION` column qualification recursive *(PR [#5508](https://github.com/tobymao/sqlglot/pull/5508) by [@VaggelisD](https://github.com/VaggelisD))*
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
## [v27.5.1] - 2025-07-30
|
|
29
|
+
### :bug: Bug Fixes
|
|
30
|
+
- [`caf71d6`](https://github.com/tobymao/sqlglot/commit/caf71d687c0048d2346fddaee58b519e4f2e7945) - `between` builder should not set `symmetric` by default *(commit by [@georgesittas](https://github.com/georgesittas))*
|
|
31
|
+
|
|
32
|
+
|
|
4
33
|
## [v27.5.0] - 2025-07-30
|
|
5
34
|
### :boom: BREAKING CHANGES
|
|
6
35
|
- due to [`002286e`](https://github.com/tobymao/sqlglot/commit/002286ee05a608e303a2238a9a74ab963709b5da) - remove AM/PM entries from postgres, oracle `TIME_MAPPING` *(PR [#5491](https://github.com/tobymao/sqlglot/pull/5491) by [@georgesittas](https://github.com/georgesittas))*:
|
|
@@ -6373,3 +6402,5 @@ Changelog
|
|
|
6373
6402
|
[v27.4.0]: https://github.com/tobymao/sqlglot/compare/v27.3.1...v27.4.0
|
|
6374
6403
|
[v27.4.1]: https://github.com/tobymao/sqlglot/compare/v27.4.0...v27.4.1
|
|
6375
6404
|
[v27.5.0]: https://github.com/tobymao/sqlglot/compare/v27.4.1...v27.5.0
|
|
6405
|
+
[v27.5.1]: https://github.com/tobymao/sqlglot/compare/v27.5.0...v27.5.1
|
|
6406
|
+
[v27.6.0]: https://github.com/tobymao/sqlglot/compare/v27.5.1...v27.6.0
|
|
@@ -434,7 +434,9 @@ class BigQuery(Dialect):
|
|
|
434
434
|
|
|
435
435
|
# The _PARTITIONTIME and _PARTITIONDATE pseudo-columns are not returned by a SELECT * statement
|
|
436
436
|
# https://cloud.google.com/bigquery/docs/querying-partitioned-tables#query_an_ingestion-time_partitioned_table
|
|
437
|
-
|
|
437
|
+
# https://cloud.google.com/bigquery/docs/querying-wildcard-tables#scanning_a_range_of_tables_using_table_suffix
|
|
438
|
+
# https://cloud.google.com/bigquery/docs/query-cloud-storage-data#query_the_file_name_pseudo-column
|
|
439
|
+
PSEUDOCOLUMNS = {"_PARTITIONTIME", "_PARTITIONDATE", "_TABLE_SUFFIX", "_FILE_NAME"}
|
|
438
440
|
|
|
439
441
|
# All set operations require either a DISTINCT or ALL specifier
|
|
440
442
|
SET_OP_DISTINCT_BY_DEFAULT = dict.fromkeys((exp.Except, exp.Intersect, exp.Union), None)
|
|
@@ -479,10 +481,20 @@ class BigQuery(Dialect):
|
|
|
479
481
|
exp.BitwiseOrAgg: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
|
|
480
482
|
exp.BitwiseXorAgg: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
|
|
481
483
|
exp.BitwiseCountAgg: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
|
|
484
|
+
exp.ByteLength: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
|
|
485
|
+
exp.ByteString: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BINARY),
|
|
486
|
+
exp.CodePointsToString: lambda self, e: self._annotate_with_type(
|
|
487
|
+
e, exp.DataType.Type.VARCHAR
|
|
488
|
+
),
|
|
482
489
|
exp.Concat: _annotate_concat,
|
|
483
490
|
exp.Corr: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
|
|
484
491
|
exp.CovarPop: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
|
|
485
492
|
exp.CovarSamp: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DOUBLE),
|
|
493
|
+
exp.DateFromUnixDate: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DATE),
|
|
494
|
+
exp.DateTrunc: lambda self, e: self._annotate_by_args(e, "this"),
|
|
495
|
+
exp.GenerateTimestampArray: lambda self, e: self._annotate_with_type(
|
|
496
|
+
e, exp.DataType.build("ARRAY<TIMESTAMP>", dialect="bigquery")
|
|
497
|
+
),
|
|
486
498
|
exp.JSONArray: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.JSON),
|
|
487
499
|
exp.JSONExtractScalar: lambda self, e: self._annotate_with_type(
|
|
488
500
|
e, exp.DataType.Type.VARCHAR
|
|
@@ -492,6 +504,9 @@ class BigQuery(Dialect):
|
|
|
492
504
|
),
|
|
493
505
|
exp.JSONType: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.VARCHAR),
|
|
494
506
|
exp.Lag: lambda self, e: self._annotate_by_args(e, "this", "default"),
|
|
507
|
+
exp.ParseTime: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.TIME),
|
|
508
|
+
exp.ParseDatetime: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.DATETIME),
|
|
509
|
+
exp.Reverse: lambda self, e: self._annotate_by_args(e, "this"),
|
|
495
510
|
exp.SHA: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BINARY),
|
|
496
511
|
exp.SHA2: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BINARY),
|
|
497
512
|
exp.Sign: lambda self, e: self._annotate_by_args(e, "this"),
|
|
@@ -499,6 +514,10 @@ class BigQuery(Dialect):
|
|
|
499
514
|
exp.TimestampFromParts: lambda self, e: self._annotate_with_type(
|
|
500
515
|
e, exp.DataType.Type.DATETIME
|
|
501
516
|
),
|
|
517
|
+
exp.TimestampTrunc: lambda self, e: self._annotate_by_args(e, "this"),
|
|
518
|
+
exp.TimeFromParts: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.TIME),
|
|
519
|
+
exp.TsOrDsToTime: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.TIME),
|
|
520
|
+
exp.TimeTrunc: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.TIME),
|
|
502
521
|
exp.Unicode: lambda self, e: self._annotate_with_type(e, exp.DataType.Type.BIGINT),
|
|
503
522
|
}
|
|
504
523
|
|
|
@@ -619,7 +638,13 @@ class BigQuery(Dialect):
|
|
|
619
638
|
"PARSE_DATE": lambda args: build_formatted_time(exp.StrToDate, "bigquery")(
|
|
620
639
|
[seq_get(args, 1), seq_get(args, 0)]
|
|
621
640
|
),
|
|
641
|
+
"PARSE_TIME": lambda args: build_formatted_time(exp.ParseTime, "bigquery")(
|
|
642
|
+
[seq_get(args, 1), seq_get(args, 0)]
|
|
643
|
+
),
|
|
622
644
|
"PARSE_TIMESTAMP": _build_parse_timestamp,
|
|
645
|
+
"PARSE_DATETIME": lambda args: build_formatted_time(exp.ParseDatetime, "bigquery")(
|
|
646
|
+
[seq_get(args, 1), seq_get(args, 0)]
|
|
647
|
+
),
|
|
623
648
|
"REGEXP_CONTAINS": exp.RegexpLike.from_arg_list,
|
|
624
649
|
"REGEXP_EXTRACT": _build_regexp_extract(exp.RegexpExtract),
|
|
625
650
|
"REGEXP_SUBSTR": _build_regexp_extract(exp.RegexpExtract),
|
|
@@ -650,6 +675,8 @@ class BigQuery(Dialect):
|
|
|
650
675
|
"TO_JSON_STRING": exp.JSONFormat.from_arg_list,
|
|
651
676
|
"FORMAT_DATETIME": _build_format_time(exp.TsOrDsToDatetime),
|
|
652
677
|
"FORMAT_TIMESTAMP": _build_format_time(exp.TsOrDsToTimestamp),
|
|
678
|
+
"FORMAT_TIME": _build_format_time(exp.TsOrDsToTime),
|
|
679
|
+
"WEEK": lambda args: exp.WeekStart(this=exp.var(seq_get(args, 0))),
|
|
653
680
|
}
|
|
654
681
|
|
|
655
682
|
FUNCTION_PARSERS = {
|
|
@@ -992,6 +1019,13 @@ class BigQuery(Dialect):
|
|
|
992
1019
|
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
|
|
993
1020
|
SUPPORTS_UNIX_SECONDS = True
|
|
994
1021
|
|
|
1022
|
+
TS_OR_DS_TYPES = (
|
|
1023
|
+
exp.TsOrDsToDatetime,
|
|
1024
|
+
exp.TsOrDsToTimestamp,
|
|
1025
|
+
exp.TsOrDsToTime,
|
|
1026
|
+
exp.TsOrDsToDate,
|
|
1027
|
+
)
|
|
1028
|
+
|
|
995
1029
|
TRANSFORMS = {
|
|
996
1030
|
**generator.Generator.TRANSFORMS,
|
|
997
1031
|
exp.ApproxDistinct: rename_func("APPROX_COUNT_DISTINCT"),
|
|
@@ -1020,6 +1054,7 @@ class BigQuery(Dialect):
|
|
|
1020
1054
|
exp.DateSub: date_add_interval_sql("DATE", "SUB"),
|
|
1021
1055
|
exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
|
|
1022
1056
|
exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
|
|
1057
|
+
exp.DateFromUnixDate: rename_func("DATE_FROM_UNIX_DATE"),
|
|
1023
1058
|
exp.FromTimeZone: lambda self, e: self.func(
|
|
1024
1059
|
"DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
|
|
1025
1060
|
),
|
|
@@ -1057,6 +1092,10 @@ class BigQuery(Dialect):
|
|
|
1057
1092
|
exp.RegexpLike: rename_func("REGEXP_CONTAINS"),
|
|
1058
1093
|
exp.ReturnsProperty: _returnsproperty_sql,
|
|
1059
1094
|
exp.Rollback: lambda *_: "ROLLBACK TRANSACTION",
|
|
1095
|
+
exp.ParseTime: lambda self, e: self.func("PARSE_TIME", self.format_time(e), e.this),
|
|
1096
|
+
exp.ParseDatetime: lambda self, e: self.func(
|
|
1097
|
+
"PARSE_DATETIME", self.format_time(e), e.this
|
|
1098
|
+
),
|
|
1060
1099
|
exp.Select: transforms.preprocess(
|
|
1061
1100
|
[
|
|
1062
1101
|
transforms.explode_projection_to_unnest(),
|
|
@@ -1295,14 +1334,12 @@ class BigQuery(Dialect):
|
|
|
1295
1334
|
func_name = "FORMAT_DATETIME"
|
|
1296
1335
|
elif isinstance(this, exp.TsOrDsToTimestamp):
|
|
1297
1336
|
func_name = "FORMAT_TIMESTAMP"
|
|
1337
|
+
elif isinstance(this, exp.TsOrDsToTime):
|
|
1338
|
+
func_name = "FORMAT_TIME"
|
|
1298
1339
|
else:
|
|
1299
1340
|
func_name = "FORMAT_DATE"
|
|
1300
1341
|
|
|
1301
|
-
time_expr = (
|
|
1302
|
-
this
|
|
1303
|
-
if isinstance(this, (exp.TsOrDsToDatetime, exp.TsOrDsToTimestamp, exp.TsOrDsToDate))
|
|
1304
|
-
else expression
|
|
1305
|
-
)
|
|
1342
|
+
time_expr = this if isinstance(this, self.TS_OR_DS_TYPES) else expression
|
|
1306
1343
|
return self.func(
|
|
1307
1344
|
func_name, self.format_time(expression), time_expr.this, expression.args.get("zone")
|
|
1308
1345
|
)
|
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
import typing as t
|
|
3
3
|
import datetime
|
|
4
4
|
from sqlglot import exp, generator, parser, tokens
|
|
5
|
+
from sqlglot._typing import E
|
|
5
6
|
from sqlglot.dialects.dialect import (
|
|
6
7
|
Dialect,
|
|
7
8
|
NormalizationStrategy,
|
|
@@ -31,14 +32,19 @@ from sqlglot.generator import unsupported_args
|
|
|
31
32
|
DATEΤΙΜΕ_DELTA = t.Union[exp.DateAdd, exp.DateDiff, exp.DateSub, exp.TimestampSub, exp.TimestampAdd]
|
|
32
33
|
|
|
33
34
|
|
|
34
|
-
def
|
|
35
|
-
|
|
35
|
+
def _build_datetime_format(
|
|
36
|
+
expr_type: t.Type[E],
|
|
37
|
+
) -> t.Callable[[t.List], E]:
|
|
38
|
+
def _builder(args: t.List) -> E:
|
|
39
|
+
expr = build_formatted_time(expr_type, "clickhouse")(args)
|
|
36
40
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
41
|
+
timezone = seq_get(args, 2)
|
|
42
|
+
if timezone:
|
|
43
|
+
expr.set("zone", timezone)
|
|
40
44
|
|
|
41
|
-
|
|
45
|
+
return expr
|
|
46
|
+
|
|
47
|
+
return _builder
|
|
42
48
|
|
|
43
49
|
|
|
44
50
|
def _unix_to_time_sql(self: ClickHouse.Generator, expression: exp.UnixToTime) -> str:
|
|
@@ -310,16 +316,17 @@ class ClickHouse(Dialect):
|
|
|
310
316
|
"DATEADD": build_date_delta(exp.DateAdd, default_unit=None),
|
|
311
317
|
"DATE_DIFF": build_date_delta(exp.DateDiff, default_unit=None, supports_timezone=True),
|
|
312
318
|
"DATEDIFF": build_date_delta(exp.DateDiff, default_unit=None, supports_timezone=True),
|
|
313
|
-
"DATE_FORMAT":
|
|
319
|
+
"DATE_FORMAT": _build_datetime_format(exp.TimeToStr),
|
|
314
320
|
"DATE_SUB": build_date_delta(exp.DateSub, default_unit=None),
|
|
315
321
|
"DATESUB": build_date_delta(exp.DateSub, default_unit=None),
|
|
316
|
-
"FORMATDATETIME":
|
|
322
|
+
"FORMATDATETIME": _build_datetime_format(exp.TimeToStr),
|
|
317
323
|
"JSONEXTRACTSTRING": build_json_extract_path(
|
|
318
324
|
exp.JSONExtractScalar, zero_based_indexing=False
|
|
319
325
|
),
|
|
320
326
|
"LENGTH": lambda args: exp.Length(this=seq_get(args, 0), binary=True),
|
|
321
327
|
"MAP": parser.build_var_map,
|
|
322
328
|
"MATCH": exp.RegexpLike.from_arg_list,
|
|
329
|
+
"PARSEDATETIME": _build_datetime_format(exp.ParseDatetime),
|
|
323
330
|
"RANDCANONICAL": exp.Rand.from_arg_list,
|
|
324
331
|
"STR_TO_DATE": _build_str_to_date,
|
|
325
332
|
"TUPLE": exp.Struct.from_arg_list,
|
|
@@ -1141,6 +1148,7 @@ class ClickHouse(Dialect):
|
|
|
1141
1148
|
exp.Levenshtein: unsupported_args("ins_cost", "del_cost", "sub_cost", "max_dist")(
|
|
1142
1149
|
rename_func("editDistance")
|
|
1143
1150
|
),
|
|
1151
|
+
exp.ParseDatetime: rename_func("parseDateTime"),
|
|
1144
1152
|
}
|
|
1145
1153
|
|
|
1146
1154
|
PROPERTIES_LOCATION = {
|
|
@@ -1177,6 +1185,17 @@ class ClickHouse(Dialect):
|
|
|
1177
1185
|
exp.DataType.Type.MULTIPOLYGON,
|
|
1178
1186
|
}
|
|
1179
1187
|
|
|
1188
|
+
def offset_sql(self, expression: exp.Offset) -> str:
|
|
1189
|
+
offset = super().offset_sql(expression)
|
|
1190
|
+
|
|
1191
|
+
# OFFSET ... FETCH syntax requires a "ROW" or "ROWS" keyword
|
|
1192
|
+
# https://clickhouse.com/docs/sql-reference/statements/select/offset
|
|
1193
|
+
parent = expression.parent
|
|
1194
|
+
if isinstance(parent, exp.Select) and isinstance(parent.args.get("limit"), exp.Fetch):
|
|
1195
|
+
offset = f"{offset} ROWS"
|
|
1196
|
+
|
|
1197
|
+
return offset
|
|
1198
|
+
|
|
1180
1199
|
def strtodate_sql(self, expression: exp.StrToDate) -> str:
|
|
1181
1200
|
strtodate_sql = self.function_fallback_sql(expression)
|
|
1182
1201
|
|
|
@@ -654,6 +654,8 @@ class Dialect(metaclass=_Dialect):
|
|
|
654
654
|
exp.Length,
|
|
655
655
|
exp.UnixDate,
|
|
656
656
|
exp.UnixSeconds,
|
|
657
|
+
exp.UnixMicros,
|
|
658
|
+
exp.UnixMillis,
|
|
657
659
|
},
|
|
658
660
|
exp.DataType.Type.BINARY: {
|
|
659
661
|
exp.FromBase64,
|
|
@@ -674,6 +676,7 @@ class Dialect(metaclass=_Dialect):
|
|
|
674
676
|
exp.DateFromParts,
|
|
675
677
|
exp.DateStrToDate,
|
|
676
678
|
exp.DiToDate,
|
|
679
|
+
exp.LastDay,
|
|
677
680
|
exp.StrToDate,
|
|
678
681
|
exp.TimeStrToDate,
|
|
679
682
|
exp.TsOrDsToDate,
|
|
@@ -718,6 +721,9 @@ class Dialect(metaclass=_Dialect):
|
|
|
718
721
|
},
|
|
719
722
|
exp.DataType.Type.INTERVAL: {
|
|
720
723
|
exp.Interval,
|
|
724
|
+
exp.JustifyDays,
|
|
725
|
+
exp.JustifyHours,
|
|
726
|
+
exp.JustifyInterval,
|
|
721
727
|
exp.MakeInterval,
|
|
722
728
|
},
|
|
723
729
|
exp.DataType.Type.JSON: {
|
|
@@ -1650,9 +1656,11 @@ def unit_to_str(expression: exp.Expression, default: str = "DAY") -> t.Optional[
|
|
|
1650
1656
|
def unit_to_var(expression: exp.Expression, default: str = "DAY") -> t.Optional[exp.Expression]:
|
|
1651
1657
|
unit = expression.args.get("unit")
|
|
1652
1658
|
|
|
1653
|
-
if isinstance(unit, (exp.Var, exp.Placeholder)):
|
|
1659
|
+
if isinstance(unit, (exp.Var, exp.Placeholder, exp.WeekStart)):
|
|
1654
1660
|
return unit
|
|
1655
|
-
|
|
1661
|
+
|
|
1662
|
+
value = unit.name if unit else default
|
|
1663
|
+
return exp.Var(this=value) if value else None
|
|
1656
1664
|
|
|
1657
1665
|
|
|
1658
1666
|
@t.overload
|
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import typing as t
|
|
4
|
+
|
|
3
5
|
from sqlglot import exp
|
|
4
6
|
from sqlglot.dialects.dialect import (
|
|
5
7
|
approx_count_distinct_sql,
|
|
6
|
-
build_timestamp_trunc,
|
|
7
8
|
property_sql,
|
|
8
9
|
rename_func,
|
|
9
10
|
time_format,
|
|
10
11
|
unit_to_str,
|
|
11
12
|
)
|
|
12
13
|
from sqlglot.dialects.mysql import MySQL
|
|
14
|
+
from sqlglot.helper import seq_get
|
|
13
15
|
from sqlglot.tokens import TokenType
|
|
14
16
|
|
|
15
17
|
|
|
@@ -22,6 +24,22 @@ def _lag_lead_sql(self, expression: exp.Lag | exp.Lead) -> str:
|
|
|
22
24
|
)
|
|
23
25
|
|
|
24
26
|
|
|
27
|
+
# Accept both DATE_TRUNC(datetime, unit) and DATE_TRUNC(unit, datetime)
|
|
28
|
+
def _build_date_trunc(args: t.List[exp.Expression]) -> exp.Expression:
|
|
29
|
+
a0, a1 = seq_get(args, 0), seq_get(args, 1)
|
|
30
|
+
|
|
31
|
+
def _is_unit_like(e: exp.Expression | None) -> bool:
|
|
32
|
+
if not (isinstance(e, exp.Literal) and e.is_string):
|
|
33
|
+
return False
|
|
34
|
+
text = e.this
|
|
35
|
+
return not any(ch.isdigit() for ch in text)
|
|
36
|
+
|
|
37
|
+
# Determine which argument is the unit
|
|
38
|
+
unit, this = (a0, a1) if _is_unit_like(a0) else (a1, a0)
|
|
39
|
+
|
|
40
|
+
return exp.TimestampTrunc(this=this, unit=unit)
|
|
41
|
+
|
|
42
|
+
|
|
25
43
|
class Doris(MySQL):
|
|
26
44
|
DATE_FORMAT = "'yyyy-MM-dd'"
|
|
27
45
|
DATEINT_FORMAT = "'yyyyMMdd'"
|
|
@@ -31,7 +49,7 @@ class Doris(MySQL):
|
|
|
31
49
|
FUNCTIONS = {
|
|
32
50
|
**MySQL.Parser.FUNCTIONS,
|
|
33
51
|
"COLLECT_SET": exp.ArrayUniqueAgg.from_arg_list,
|
|
34
|
-
"DATE_TRUNC":
|
|
52
|
+
"DATE_TRUNC": _build_date_trunc,
|
|
35
53
|
"MONTHS_ADD": exp.AddMonths.from_arg_list,
|
|
36
54
|
"REGEXP": exp.RegexpLike.from_arg_list,
|
|
37
55
|
"TO_DATE": exp.TsOrDsToDate.from_arg_list,
|
|
@@ -40,6 +58,9 @@ class Doris(MySQL):
|
|
|
40
58
|
FUNCTION_PARSERS = MySQL.Parser.FUNCTION_PARSERS.copy()
|
|
41
59
|
FUNCTION_PARSERS.pop("GROUP_CONCAT")
|
|
42
60
|
|
|
61
|
+
NO_PAREN_FUNCTIONS = MySQL.Parser.NO_PAREN_FUNCTIONS.copy()
|
|
62
|
+
NO_PAREN_FUNCTIONS.pop(TokenType.CURRENT_DATE)
|
|
63
|
+
|
|
43
64
|
PROPERTY_PARSERS = {
|
|
44
65
|
**MySQL.Parser.PROPERTY_PARSERS,
|
|
45
66
|
"PROPERTIES": lambda self: self._parse_wrapped_properties(),
|
|
@@ -111,6 +132,7 @@ class Doris(MySQL):
|
|
|
111
132
|
LAST_DAY_SUPPORTS_DATE_PART = False
|
|
112
133
|
VARCHAR_REQUIRES_SIZE = False
|
|
113
134
|
WITH_PROPERTIES_PREFIX = "PROPERTIES"
|
|
135
|
+
RENAME_TABLE_WITH_DB = False
|
|
114
136
|
|
|
115
137
|
TYPE_MAPPING = {
|
|
116
138
|
**MySQL.Generator.TYPE_MAPPING,
|
|
@@ -123,6 +145,7 @@ class Doris(MySQL):
|
|
|
123
145
|
**MySQL.Generator.PROPERTIES_LOCATION,
|
|
124
146
|
exp.UniqueKeyProperty: exp.Properties.Location.POST_SCHEMA,
|
|
125
147
|
exp.PartitionByRangeProperty: exp.Properties.Location.POST_SCHEMA,
|
|
148
|
+
exp.PartitionedByProperty: exp.Properties.Location.POST_SCHEMA,
|
|
126
149
|
}
|
|
127
150
|
|
|
128
151
|
CAST_MAPPING = {}
|
|
@@ -137,6 +160,7 @@ class Doris(MySQL):
|
|
|
137
160
|
exp.ArrayAgg: rename_func("COLLECT_LIST"),
|
|
138
161
|
exp.ArrayToString: rename_func("ARRAY_JOIN"),
|
|
139
162
|
exp.ArrayUniqueAgg: rename_func("COLLECT_SET"),
|
|
163
|
+
exp.CurrentDate: lambda self, _: self.func("CURRENT_DATE"),
|
|
140
164
|
exp.CurrentTimestamp: lambda self, _: self.func("NOW"),
|
|
141
165
|
exp.DateTrunc: lambda self, e: self.func("DATE_TRUNC", e.this, unit_to_str(e)),
|
|
142
166
|
exp.GroupConcat: lambda self, e: self.func(
|
|
@@ -683,3 +707,20 @@ class Doris(MySQL):
|
|
|
683
707
|
# Handle both static and dynamic partition definitions
|
|
684
708
|
create_sql = ", ".join(self.sql(e) for e in create_expressions)
|
|
685
709
|
return f"PARTITION BY RANGE ({partition_expressions}) ({create_sql})"
|
|
710
|
+
|
|
711
|
+
def partitionedbyproperty_sql(self, expression: exp.PartitionedByProperty) -> str:
|
|
712
|
+
node = expression.this
|
|
713
|
+
if isinstance(node, exp.Schema):
|
|
714
|
+
parts = ", ".join(self.sql(e) for e in node.expressions)
|
|
715
|
+
return f"PARTITION BY ({parts})"
|
|
716
|
+
return f"PARTITION BY ({self.sql(node)})"
|
|
717
|
+
|
|
718
|
+
def table_sql(self, expression: exp.Table, sep: str = " AS ") -> str:
|
|
719
|
+
"""Override table_sql to avoid AS keyword in UPDATE and DELETE statements."""
|
|
720
|
+
ancestor = expression.find_ancestor(exp.Update, exp.Delete, exp.Select)
|
|
721
|
+
if not isinstance(ancestor, exp.Select):
|
|
722
|
+
sep = " "
|
|
723
|
+
return super().table_sql(expression, sep=sep)
|
|
724
|
+
|
|
725
|
+
def alterrename_sql(self, expression: exp.AlterRename, include_to: bool = True) -> str:
|
|
726
|
+
return super().alterrename_sql(expression, include_to=False)
|
|
@@ -42,30 +42,51 @@ class Dremio(Dialect):
|
|
|
42
42
|
TIME_MAPPING = {
|
|
43
43
|
# year
|
|
44
44
|
"YYYY": "%Y",
|
|
45
|
+
"yyyy": "%Y",
|
|
45
46
|
"YY": "%y",
|
|
47
|
+
"yy": "%y",
|
|
46
48
|
# month / day
|
|
47
49
|
"MM": "%m",
|
|
50
|
+
"mm": "%m",
|
|
48
51
|
"MON": "%b",
|
|
52
|
+
"mon": "%b",
|
|
49
53
|
"MONTH": "%B",
|
|
54
|
+
"month": "%B",
|
|
50
55
|
"DDD": "%j",
|
|
56
|
+
"ddd": "%j",
|
|
51
57
|
"DD": "%d",
|
|
58
|
+
"dd": "%d",
|
|
52
59
|
"DY": "%a",
|
|
60
|
+
"dy": "%a",
|
|
53
61
|
"DAY": "%A",
|
|
62
|
+
"day": "%A",
|
|
54
63
|
# hours / minutes / seconds
|
|
55
64
|
"HH24": "%H",
|
|
65
|
+
"hh24": "%H",
|
|
56
66
|
"HH12": "%I",
|
|
57
|
-
"
|
|
67
|
+
"hh12": "%I",
|
|
68
|
+
"HH": "%I",
|
|
69
|
+
"hh": "%I", # 24- / 12-hour
|
|
58
70
|
"MI": "%M",
|
|
71
|
+
"mi": "%M",
|
|
59
72
|
"SS": "%S",
|
|
73
|
+
"ss": "%S",
|
|
60
74
|
"FFF": "%f",
|
|
75
|
+
"fff": "%f",
|
|
61
76
|
"AMPM": "%p",
|
|
77
|
+
"ampm": "%p",
|
|
62
78
|
# ISO week / century etc.
|
|
63
79
|
"WW": "%W",
|
|
80
|
+
"ww": "%W",
|
|
64
81
|
"D": "%w",
|
|
82
|
+
"d": "%w",
|
|
65
83
|
"CC": "%C",
|
|
84
|
+
"cc": "%C",
|
|
66
85
|
# timezone
|
|
67
|
-
"TZD": "%Z",
|
|
68
|
-
"
|
|
86
|
+
"TZD": "%Z",
|
|
87
|
+
"tzd": "%Z", # abbreviation (UTC, PST, ...)
|
|
88
|
+
"TZO": "%z",
|
|
89
|
+
"tzo": "%z", # numeric offset (+0200)
|
|
69
90
|
}
|
|
70
91
|
|
|
71
92
|
class Parser(parser.Parser):
|
|
@@ -396,6 +396,7 @@ class DuckDB(Dialect):
|
|
|
396
396
|
|
|
397
397
|
FUNCTIONS = {
|
|
398
398
|
**parser.Parser.FUNCTIONS,
|
|
399
|
+
"ANY_VALUE": lambda args: exp.IgnoreNulls(this=exp.AnyValue.from_arg_list(args)),
|
|
399
400
|
"ARRAY_REVERSE_SORT": _build_sort_array_desc,
|
|
400
401
|
"ARRAY_SORT": exp.SortArray.from_arg_list,
|
|
401
402
|
"DATEDIFF": _build_date_diff,
|
|
@@ -920,6 +921,7 @@ class DuckDB(Dialect):
|
|
|
920
921
|
PROPERTIES_LOCATION[exp.LikeProperty] = exp.Properties.Location.POST_SCHEMA
|
|
921
922
|
PROPERTIES_LOCATION[exp.TemporaryProperty] = exp.Properties.Location.POST_CREATE
|
|
922
923
|
PROPERTIES_LOCATION[exp.ReturnsProperty] = exp.Properties.Location.POST_ALIAS
|
|
924
|
+
PROPERTIES_LOCATION[exp.SequenceProperties] = exp.Properties.Location.POST_EXPRESSION
|
|
923
925
|
|
|
924
926
|
IGNORE_RESPECT_NULLS_WINDOW_FUNCTIONS = (
|
|
925
927
|
exp.FirstValue,
|
|
@@ -1136,9 +1138,10 @@ class DuckDB(Dialect):
|
|
|
1136
1138
|
|
|
1137
1139
|
# If BQ's UNNEST is aliased, we transform it from a column alias to a table alias in DDB
|
|
1138
1140
|
alias = expression.args.get("alias")
|
|
1139
|
-
if alias:
|
|
1141
|
+
if isinstance(alias, exp.TableAlias):
|
|
1140
1142
|
expression.set("alias", None)
|
|
1141
|
-
|
|
1143
|
+
if alias.columns:
|
|
1144
|
+
alias = exp.TableAlias(this=seq_get(alias.columns, 0))
|
|
1142
1145
|
|
|
1143
1146
|
unnest_sql = super().unnest_sql(expression)
|
|
1144
1147
|
select = exp.Select(expressions=[unnest_sql]).subquery(alias)
|
|
@@ -1152,7 +1155,9 @@ class DuckDB(Dialect):
|
|
|
1152
1155
|
# window functions that accept it e.g. FIRST_VALUE(... IGNORE NULLS) OVER (...)
|
|
1153
1156
|
return super().ignorenulls_sql(expression)
|
|
1154
1157
|
|
|
1155
|
-
|
|
1158
|
+
if not isinstance(expression.this, exp.AnyValue):
|
|
1159
|
+
self.unsupported("IGNORE NULLS is not supported for non-window functions.")
|
|
1160
|
+
|
|
1156
1161
|
return self.sql(expression, "this")
|
|
1157
1162
|
|
|
1158
1163
|
def respectnulls_sql(self, expression: exp.RespectNulls) -> str:
|
|
@@ -1247,3 +1252,27 @@ class DuckDB(Dialect):
|
|
|
1247
1252
|
return self.sql(exp.Subquery(this=exp.Select(expressions=[posexplode_sql])))
|
|
1248
1253
|
|
|
1249
1254
|
return posexplode_sql
|
|
1255
|
+
|
|
1256
|
+
def addmonths_sql(self, expression: exp.AddMonths) -> str:
|
|
1257
|
+
this = expression.this
|
|
1258
|
+
|
|
1259
|
+
if not this.type:
|
|
1260
|
+
from sqlglot.optimizer.annotate_types import annotate_types
|
|
1261
|
+
|
|
1262
|
+
this = annotate_types(this, dialect=self.dialect)
|
|
1263
|
+
|
|
1264
|
+
if this.is_type(*exp.DataType.TEXT_TYPES):
|
|
1265
|
+
this = exp.Cast(this=this, to=exp.DataType(this=exp.DataType.Type.TIMESTAMP))
|
|
1266
|
+
|
|
1267
|
+
func = self.func(
|
|
1268
|
+
"DATE_ADD", this, exp.Interval(this=expression.expression, unit=exp.var("MONTH"))
|
|
1269
|
+
)
|
|
1270
|
+
|
|
1271
|
+
# DuckDB's DATE_ADD function returns TIMESTAMP/DATETIME by default, even when the input is DATE
|
|
1272
|
+
# To match for example Snowflake's ADD_MONTHS behavior (which preserves the input type)
|
|
1273
|
+
# We need to cast the result back to the original type when the input is DATE or TIMESTAMPTZ
|
|
1274
|
+
# Example: ADD_MONTHS('2023-01-31'::date, 1) should return DATE, not TIMESTAMP
|
|
1275
|
+
if this.is_type(exp.DataType.Type.DATE, exp.DataType.Type.TIMESTAMPTZ):
|
|
1276
|
+
return self.sql(exp.Cast(this=func, to=this.type))
|
|
1277
|
+
|
|
1278
|
+
return self.sql(func)
|
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
|
|
3
3
|
import typing as t
|
|
4
4
|
|
|
5
5
|
from sqlglot import exp, generator, parser, tokens
|
|
6
|
-
from sqlglot.dialects.clickhouse import timestamptrunc_sql
|
|
7
6
|
from sqlglot.dialects.dialect import (
|
|
8
7
|
Dialect,
|
|
9
8
|
binary_from_function,
|
|
@@ -12,6 +11,8 @@ from sqlglot.dialects.dialect import (
|
|
|
12
11
|
strposition_sql,
|
|
13
12
|
timestrtotime_sql,
|
|
14
13
|
unit_to_str,
|
|
14
|
+
timestamptrunc_sql,
|
|
15
|
+
build_date_delta,
|
|
15
16
|
)
|
|
16
17
|
from sqlglot.generator import unsupported_args
|
|
17
18
|
from sqlglot.helper import seq_get
|
|
@@ -27,6 +28,16 @@ def _sha2_sql(self: Exasol.Generator, expression: exp.SHA2) -> str:
|
|
|
27
28
|
return self.func(func_name, expression.this)
|
|
28
29
|
|
|
29
30
|
|
|
31
|
+
def _date_diff_sql(self: Exasol.Generator, expression: exp.DateDiff | exp.TsOrDsDiff) -> str:
|
|
32
|
+
unit = expression.text("unit").upper() or "DAY"
|
|
33
|
+
|
|
34
|
+
if unit not in DATE_UNITS:
|
|
35
|
+
self.unsupported(f"'{unit}' is not supported in Exasol.")
|
|
36
|
+
return self.function_fallback_sql(expression)
|
|
37
|
+
|
|
38
|
+
return self.func(f"{unit}S_BETWEEN", expression.this, expression.expression)
|
|
39
|
+
|
|
40
|
+
|
|
30
41
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/trunc%5Bate%5D%20(datetime).htm
|
|
31
42
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/trunc%5Bate%5D%20(number).htm
|
|
32
43
|
def _build_trunc(args: t.List[exp.Expression], dialect: DialectType) -> exp.Expression:
|
|
@@ -46,6 +57,21 @@ def _build_trunc(args: t.List[exp.Expression], dialect: DialectType) -> exp.Expr
|
|
|
46
57
|
return exp.Anonymous(this="TRUNC", expressions=args)
|
|
47
58
|
|
|
48
59
|
|
|
60
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/zeroifnull.htm
|
|
61
|
+
def _build_zeroifnull(args: t.List) -> exp.If:
|
|
62
|
+
cond = exp.Is(this=seq_get(args, 0), expression=exp.Null())
|
|
63
|
+
return exp.If(this=cond, true=exp.Literal.number(0), false=seq_get(args, 0))
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/nullifzero.htm
|
|
67
|
+
def _build_nullifzero(args: t.List) -> exp.If:
|
|
68
|
+
cond = exp.EQ(this=seq_get(args, 0), expression=exp.Literal.number(0))
|
|
69
|
+
return exp.If(this=cond, true=exp.Null(), false=seq_get(args, 0))
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
DATE_UNITS = {"DAY", "WEEK", "MONTH", "YEAR", "HOUR", "MINUTE", "SECOND"}
|
|
73
|
+
|
|
74
|
+
|
|
49
75
|
class Exasol(Dialect):
|
|
50
76
|
TIME_MAPPING = {
|
|
51
77
|
"yyyy": "%Y",
|
|
@@ -79,11 +105,22 @@ class Exasol(Dialect):
|
|
|
79
105
|
KEYWORDS = {
|
|
80
106
|
**tokens.Tokenizer.KEYWORDS,
|
|
81
107
|
"USER": TokenType.CURRENT_USER,
|
|
108
|
+
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/if.htm
|
|
109
|
+
"ENDIF": TokenType.END,
|
|
110
|
+
"LONG VARCHAR": TokenType.TEXT,
|
|
82
111
|
}
|
|
83
112
|
|
|
84
113
|
class Parser(parser.Parser):
|
|
85
114
|
FUNCTIONS = {
|
|
86
115
|
**parser.Parser.FUNCTIONS,
|
|
116
|
+
**{
|
|
117
|
+
f"ADD_{unit}S": build_date_delta(exp.DateAdd, default_unit=unit)
|
|
118
|
+
for unit in DATE_UNITS
|
|
119
|
+
},
|
|
120
|
+
**{
|
|
121
|
+
f"{unit}S_BETWEEN": build_date_delta(exp.DateDiff, default_unit=unit)
|
|
122
|
+
for unit in DATE_UNITS
|
|
123
|
+
},
|
|
87
124
|
"BIT_AND": binary_from_function(exp.BitwiseAnd),
|
|
88
125
|
"BIT_OR": binary_from_function(exp.BitwiseOr),
|
|
89
126
|
"BIT_XOR": binary_from_function(exp.BitwiseXor),
|
|
@@ -127,6 +164,8 @@ class Exasol(Dialect):
|
|
|
127
164
|
timestamp=seq_get(args, 0),
|
|
128
165
|
options=seq_get(args, 3),
|
|
129
166
|
),
|
|
167
|
+
"NULLIFZERO": _build_nullifzero,
|
|
168
|
+
"ZEROIFNULL": _build_zeroifnull,
|
|
130
169
|
}
|
|
131
170
|
CONSTRAINT_PARSERS = {
|
|
132
171
|
**parser.Parser.CONSTRAINT_PARSERS,
|
|
@@ -146,7 +185,8 @@ class Exasol(Dialect):
|
|
|
146
185
|
exp.DataType.Type.MEDIUMTEXT: "VARCHAR",
|
|
147
186
|
exp.DataType.Type.TINYBLOB: "VARCHAR",
|
|
148
187
|
exp.DataType.Type.TINYTEXT: "VARCHAR",
|
|
149
|
-
|
|
188
|
+
# https://docs.exasol.com/db/latest/sql_references/data_types/datatypealiases.htm
|
|
189
|
+
exp.DataType.Type.TEXT: "LONG VARCHAR",
|
|
150
190
|
exp.DataType.Type.VARBINARY: "VARCHAR",
|
|
151
191
|
}
|
|
152
192
|
|
|
@@ -187,8 +227,8 @@ class Exasol(Dialect):
|
|
|
187
227
|
exp.BitwiseRightShift: rename_func("BIT_RSHIFT"),
|
|
188
228
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/bit_xor.htm
|
|
189
229
|
exp.BitwiseXor: rename_func("BIT_XOR"),
|
|
190
|
-
|
|
191
|
-
exp.
|
|
230
|
+
exp.DateDiff: _date_diff_sql,
|
|
231
|
+
exp.TsOrDsDiff: _date_diff_sql,
|
|
192
232
|
exp.DateTrunc: lambda self, e: self.func("TRUNC", e.this, unit_to_str(e)),
|
|
193
233
|
exp.DatetimeTrunc: timestamptrunc_sql(),
|
|
194
234
|
# https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/edit_distance.htm#EDIT_DISTANCE
|
|
@@ -249,3 +289,17 @@ class Exasol(Dialect):
|
|
|
249
289
|
options = expression.args.get("options")
|
|
250
290
|
|
|
251
291
|
return self.func("CONVERT_TZ", datetime, from_tz, to_tz, options)
|
|
292
|
+
|
|
293
|
+
def if_sql(self, expression: exp.If) -> str:
|
|
294
|
+
this = self.sql(expression, "this")
|
|
295
|
+
true = self.sql(expression, "true")
|
|
296
|
+
false = self.sql(expression, "false")
|
|
297
|
+
return f"IF {this} THEN {true} ELSE {false} ENDIF"
|
|
298
|
+
|
|
299
|
+
def dateadd_sql(self, expression: exp.DateAdd) -> str:
|
|
300
|
+
unit = expression.text("unit").upper() or "DAY"
|
|
301
|
+
if unit not in DATE_UNITS:
|
|
302
|
+
self.unsupported(f"'{unit}' is not supported in Exasol.")
|
|
303
|
+
return self.function_fallback_sql(expression)
|
|
304
|
+
|
|
305
|
+
return self.func(f"ADD_{unit}S", expression.this, expression.expression)
|