sqlglot 26.30.0__tar.gz → 26.31.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (223) hide show
  1. {sqlglot-26.30.0 → sqlglot-26.31.0}/CHANGELOG.md +34 -0
  2. {sqlglot-26.30.0 → sqlglot-26.31.0}/PKG-INFO +2 -2
  3. {sqlglot-26.30.0 → sqlglot-26.31.0}/README.md +1 -1
  4. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/_version.py +2 -2
  5. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/__init__.py +1 -0
  6. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/bigquery.py +6 -4
  7. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/databricks.py +2 -0
  8. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/dialect.py +15 -9
  9. sqlglot-26.31.0/sqlglot/dialects/exasol.py +46 -0
  10. sqlglot-26.31.0/sqlglot/dialects/fabric.py +115 -0
  11. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/presto.py +2 -0
  12. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/redshift.py +10 -2
  13. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/snowflake.py +3 -1
  14. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/tsql.py +7 -5
  15. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/expressions.py +9 -2
  16. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/generator.py +3 -3
  17. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/scope.py +13 -3
  18. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/parser.py +3 -2
  19. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/transforms.py +15 -1
  20. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.egg-info/PKG-INFO +2 -2
  21. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.egg-info/SOURCES.txt +2 -0
  22. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_bigquery.py +14 -0
  23. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_databricks.py +3 -0
  24. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_dialect.py +3 -1
  25. sqlglot-26.31.0/tests/dialects/test_exasol.py +70 -0
  26. sqlglot-26.31.0/tests/dialects/test_fabric.py +65 -0
  27. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_mysql.py +8 -0
  28. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_oracle.py +2 -2
  29. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_presto.py +31 -0
  30. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_snowflake.py +61 -40
  31. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_tsql.py +15 -9
  32. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/identity.sql +3 -1
  33. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/annotate_functions.sql +9 -0
  34. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/pretty.sql +6 -0
  35. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_optimizer.py +4 -0
  36. sqlglot-26.30.0/sqlglot/dialects/fabric.py +0 -88
  37. sqlglot-26.30.0/tests/dialects/test_fabric.py +0 -64
  38. {sqlglot-26.30.0 → sqlglot-26.31.0}/.gitignore +0 -0
  39. {sqlglot-26.30.0 → sqlglot-26.31.0}/.gitpod.yml +0 -0
  40. {sqlglot-26.30.0 → sqlglot-26.31.0}/.pre-commit-config.yaml +0 -0
  41. {sqlglot-26.30.0 → sqlglot-26.31.0}/CONTRIBUTING.md +0 -0
  42. {sqlglot-26.30.0 → sqlglot-26.31.0}/LICENSE +0 -0
  43. {sqlglot-26.30.0 → sqlglot-26.31.0}/MANIFEST.in +0 -0
  44. {sqlglot-26.30.0 → sqlglot-26.31.0}/Makefile +0 -0
  45. {sqlglot-26.30.0 → sqlglot-26.31.0}/pyproject.toml +0 -0
  46. {sqlglot-26.30.0 → sqlglot-26.31.0}/setup.cfg +0 -0
  47. {sqlglot-26.30.0 → sqlglot-26.31.0}/setup.py +0 -0
  48. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/__init__.py +0 -0
  49. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/__main__.py +0 -0
  50. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/_typing.py +0 -0
  51. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/athena.py +0 -0
  52. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/clickhouse.py +0 -0
  53. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/doris.py +0 -0
  54. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/drill.py +0 -0
  55. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/druid.py +0 -0
  56. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/duckdb.py +0 -0
  57. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/dune.py +0 -0
  58. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/hive.py +0 -0
  59. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/materialize.py +0 -0
  60. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/mysql.py +0 -0
  61. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/oracle.py +0 -0
  62. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/postgres.py +0 -0
  63. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/prql.py +0 -0
  64. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/risingwave.py +0 -0
  65. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/spark.py +0 -0
  66. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/spark2.py +0 -0
  67. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/sqlite.py +0 -0
  68. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/starrocks.py +0 -0
  69. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/tableau.py +0 -0
  70. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/teradata.py +0 -0
  71. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/dialects/trino.py +0 -0
  72. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/diff.py +0 -0
  73. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/errors.py +0 -0
  74. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/executor/__init__.py +0 -0
  75. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/executor/context.py +0 -0
  76. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/executor/env.py +0 -0
  77. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/executor/python.py +0 -0
  78. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/executor/table.py +0 -0
  79. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/helper.py +0 -0
  80. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/jsonpath.py +0 -0
  81. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/lineage.py +0 -0
  82. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/__init__.py +0 -0
  83. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/annotate_types.py +0 -0
  84. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/canonicalize.py +0 -0
  85. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_ctes.py +0 -0
  86. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_joins.py +0 -0
  87. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/eliminate_subqueries.py +0 -0
  88. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/isolate_table_selects.py +0 -0
  89. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/merge_subqueries.py +0 -0
  90. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/normalize.py +0 -0
  91. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/normalize_identifiers.py +0 -0
  92. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/optimize_joins.py +0 -0
  93. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/optimizer.py +0 -0
  94. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/pushdown_predicates.py +0 -0
  95. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/pushdown_projections.py +0 -0
  96. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify.py +0 -0
  97. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify_columns.py +0 -0
  98. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/qualify_tables.py +0 -0
  99. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/simplify.py +0 -0
  100. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/optimizer/unnest_subqueries.py +0 -0
  101. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/planner.py +0 -0
  102. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/py.typed +0 -0
  103. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/schema.py +0 -0
  104. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/serde.py +0 -0
  105. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/time.py +0 -0
  106. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/tokens.py +0 -0
  107. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot/trie.py +0 -0
  108. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.egg-info/dependency_links.txt +0 -0
  109. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.egg-info/requires.txt +0 -0
  110. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.egg-info/top_level.txt +0 -0
  111. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglot.png +0 -0
  112. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/Cargo.lock +0 -0
  113. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/Cargo.toml +0 -0
  114. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/benches/dialect_settings.json +0 -0
  115. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/benches/long.rs +0 -0
  116. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/benches/token_type_settings.json +0 -0
  117. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/benches/tokenizer_dialect_settings.json +0 -0
  118. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/benches/tokenizer_settings.json +0 -0
  119. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/pyproject.toml +0 -0
  120. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/src/lib.rs +0 -0
  121. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/src/settings.rs +0 -0
  122. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/src/token.rs +0 -0
  123. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/src/tokenizer.rs +0 -0
  124. {sqlglot-26.30.0 → sqlglot-26.31.0}/sqlglotrs/src/trie.rs +0 -0
  125. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/__init__.py +0 -0
  126. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/__init__.py +0 -0
  127. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_athena.py +0 -0
  128. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_clickhouse.py +0 -0
  129. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_doris.py +0 -0
  130. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_drill.py +0 -0
  131. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_druid.py +0 -0
  132. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_duckdb.py +0 -0
  133. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_dune.py +0 -0
  134. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_hive.py +0 -0
  135. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_materialize.py +0 -0
  136. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_pipe_syntax.py +0 -0
  137. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_postgres.py +0 -0
  138. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_prql.py +0 -0
  139. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_redshift.py +0 -0
  140. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_risingwave.py +0 -0
  141. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_spark.py +0 -0
  142. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_sqlite.py +0 -0
  143. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_starrocks.py +0 -0
  144. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_tableau.py +0 -0
  145. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_teradata.py +0 -0
  146. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/dialects/test_trino.py +0 -0
  147. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/jsonpath/LICENSE +0 -0
  148. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/jsonpath/cts.json +0 -0
  149. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/annotate_types.sql +0 -0
  150. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/canonicalize.sql +0 -0
  151. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_ctes.sql +0 -0
  152. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_joins.sql +0 -0
  153. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/eliminate_subqueries.sql +0 -0
  154. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/isolate_table_selects.sql +0 -0
  155. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/merge_subqueries.sql +0 -0
  156. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/normalize.sql +0 -0
  157. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/normalize_identifiers.sql +0 -0
  158. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/optimize_joins.sql +0 -0
  159. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/optimizer.sql +0 -0
  160. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_cte_alias_columns.sql +0 -0
  161. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_predicates.sql +0 -0
  162. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/pushdown_projections.sql +0 -0
  163. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns.sql +0 -0
  164. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns__invalid.sql +0 -0
  165. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns__with_invisible.sql +0 -0
  166. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_columns_ddl.sql +0 -0
  167. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/qualify_tables.sql +0 -0
  168. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/quote_identifiers.sql +0 -0
  169. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/simplify.sql +0 -0
  170. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/call_center.csv.gz +0 -0
  171. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_page.csv.gz +0 -0
  172. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_returns.csv.gz +0 -0
  173. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/catalog_sales.csv.gz +0 -0
  174. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer.csv.gz +0 -0
  175. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer_address.csv.gz +0 -0
  176. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/customer_demographics.csv.gz +0 -0
  177. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/date_dim.csv.gz +0 -0
  178. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/household_demographics.csv.gz +0 -0
  179. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/income_band.csv.gz +0 -0
  180. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/inventory.csv.gz +0 -0
  181. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/item.csv.gz +0 -0
  182. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/promotion.csv.gz +0 -0
  183. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/reason.csv.gz +0 -0
  184. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/ship_mode.csv.gz +0 -0
  185. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store.csv.gz +0 -0
  186. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store_returns.csv.gz +0 -0
  187. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/store_sales.csv.gz +0 -0
  188. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/time_dim.csv.gz +0 -0
  189. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/tpc-ds.sql +0 -0
  190. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/warehouse.csv.gz +0 -0
  191. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_page.csv.gz +0 -0
  192. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_returns.csv.gz +0 -0
  193. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_sales.csv.gz +0 -0
  194. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-ds/web_site.csv.gz +0 -0
  195. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/customer.csv.gz +0 -0
  196. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/lineitem.csv.gz +0 -0
  197. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/nation.csv.gz +0 -0
  198. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/orders.csv.gz +0 -0
  199. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/part.csv.gz +0 -0
  200. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/partsupp.csv.gz +0 -0
  201. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/region.csv.gz +0 -0
  202. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/supplier.csv.gz +0 -0
  203. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/tpc-h/tpc-h.sql +0 -0
  204. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/optimizer/unnest_subqueries.sql +0 -0
  205. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/fixtures/partial.sql +0 -0
  206. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/gen_fixtures.py +0 -0
  207. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/helpers.py +0 -0
  208. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_build.py +0 -0
  209. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_diff.py +0 -0
  210. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_docs.py +0 -0
  211. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_executor.py +0 -0
  212. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_expressions.py +0 -0
  213. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_generator.py +0 -0
  214. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_helper.py +0 -0
  215. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_jsonpath.py +0 -0
  216. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_lineage.py +0 -0
  217. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_parser.py +0 -0
  218. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_schema.py +0 -0
  219. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_serde.py +0 -0
  220. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_time.py +0 -0
  221. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_tokens.py +0 -0
  222. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_transforms.py +0 -0
  223. {sqlglot-26.30.0 → sqlglot-26.31.0}/tests/test_transpile.py +0 -0
@@ -1,6 +1,39 @@
1
1
  Changelog
2
2
  =========
3
3
 
4
+ ## [v26.30.0] - 2025-06-21
5
+ ### :boom: BREAKING CHANGES
6
+ - due to [`d3dc761`](https://github.com/tobymao/sqlglot/commit/d3dc761393146357a5d20c4d7992fd2a1ae5e6e2) - change comma to cross join when precedence is the same for all join types *(PR [#5240](https://github.com/tobymao/sqlglot/pull/5240) by [@georgesittas](https://github.com/georgesittas))*:
7
+
8
+ change comma to cross join when precedence is the same for all join types (#5240)
9
+
10
+ - due to [`e7c217e`](https://github.com/tobymao/sqlglot/commit/e7c217ef08e5811e7dad2b3d26dbaa9f02114e38) - transpile from/to dbms_random.value *(PR [#5242](https://github.com/tobymao/sqlglot/pull/5242) by [@georgesittas](https://github.com/georgesittas))*:
11
+
12
+ transpile from/to dbms_random.value (#5242)
13
+
14
+ - due to [`31814cd`](https://github.com/tobymao/sqlglot/commit/31814cddb0cf65caf29fbc45a31a9c865b7991c7) - cast constructed timestamp literal to zone-aware type if needed *(PR [#5253](https://github.com/tobymao/sqlglot/pull/5253) by [@georgesittas](https://github.com/georgesittas))*:
15
+
16
+ cast constructed timestamp literal to zone-aware type if needed (#5253)
17
+
18
+
19
+ ### :sparkles: New Features
20
+ - [`e7c217e`](https://github.com/tobymao/sqlglot/commit/e7c217ef08e5811e7dad2b3d26dbaa9f02114e38) - **oracle**: transpile from/to dbms_random.value *(PR [#5242](https://github.com/tobymao/sqlglot/pull/5242) by [@georgesittas](https://github.com/georgesittas))*
21
+ - :arrow_lower_right: *addresses issue [#5241](https://github.com/tobymao/sqlglot/issues/5241) opened by [@Akshat-2512](https://github.com/Akshat-2512)*
22
+ - [`0d19544`](https://github.com/tobymao/sqlglot/commit/0d19544317c1056b17fb089d4be9b5bddfe6feb3) - add Microsoft Fabric dialect, a case sensitive version of TSQL *(PR [#5247](https://github.com/tobymao/sqlglot/pull/5247) by [@mattiasthalen](https://github.com/mattiasthalen))*
23
+ - [`249dbc9`](https://github.com/tobymao/sqlglot/commit/249dbc906adc6b20932dc8efe83f6f4d23ef8c1e) - **parser**: start with SELECT and nested pipe syntax *(PR [#5248](https://github.com/tobymao/sqlglot/pull/5248) by [@geooo109](https://github.com/geooo109))*
24
+ - [`f5b5b93`](https://github.com/tobymao/sqlglot/commit/f5b5b9338eb92b7aa2c9b4c92c6138c2c05e1c40) - **fabric**: implement type mappings for unsupported Fabric types *(PR [#5249](https://github.com/tobymao/sqlglot/pull/5249) by [@mattiasthalen](https://github.com/mattiasthalen))*
25
+ - [`78fcea1`](https://github.com/tobymao/sqlglot/commit/78fcea13b5eb1734a15a254875bc80ad8063b0b0) - **spark, databricks**: parse brackets as placeholder *(PR [#5256](https://github.com/tobymao/sqlglot/pull/5256) by [@geooo109](https://github.com/geooo109))*
26
+ - :arrow_lower_right: *addresses issue [#5251](https://github.com/tobymao/sqlglot/issues/5251) opened by [@aersam](https://github.com/aersam)*
27
+ - [`7d71387`](https://github.com/tobymao/sqlglot/commit/7d7138780db82e7a75949d29282b944e739ad99d) - **fabric**: Add precision cap to temporal data types *(PR [#5250](https://github.com/tobymao/sqlglot/pull/5250) by [@mattiasthalen](https://github.com/mattiasthalen))*
28
+ - [`e8cf793`](https://github.com/tobymao/sqlglot/commit/e8cf79305d398f25640ef3c07dd8b32997cb0167) - **duckdb**: Transpile Snowflake's TO_CHAR if format is in Snowflake.TIME_MAPPING *(PR [#5257](https://github.com/tobymao/sqlglot/pull/5257) by [@VaggelisD](https://github.com/VaggelisD))*
29
+ - :arrow_lower_right: *addresses issue [#5255](https://github.com/tobymao/sqlglot/issues/5255) opened by [@kyle-cheung](https://github.com/kyle-cheung)*
30
+
31
+ ### :bug: Bug Fixes
32
+ - [`d3dc761`](https://github.com/tobymao/sqlglot/commit/d3dc761393146357a5d20c4d7992fd2a1ae5e6e2) - change comma to cross join when precedence is the same for all join types *(PR [#5240](https://github.com/tobymao/sqlglot/pull/5240) by [@georgesittas](https://github.com/georgesittas))*
33
+ - [`31814cd`](https://github.com/tobymao/sqlglot/commit/31814cddb0cf65caf29fbc45a31a9c865b7991c7) - **presto**: cast constructed timestamp literal to zone-aware type if needed *(PR [#5253](https://github.com/tobymao/sqlglot/pull/5253) by [@georgesittas](https://github.com/georgesittas))*
34
+ - :arrow_lower_right: *fixes issue [#5252](https://github.com/tobymao/sqlglot/issues/5252) opened by [@agni-sairent](https://github.com/agni-sairent)*
35
+
36
+
4
37
  ## [v26.29.0] - 2025-06-17
5
38
  ### :boom: BREAKING CHANGES
6
39
  - due to [`4f42d95`](https://github.com/tobymao/sqlglot/commit/4f42d951363f8c43a4c414dc21d0505d9c8e48bf) - Normalize date parts in `exp.Extract` generation *(PR [#5229](https://github.com/tobymao/sqlglot/pull/5229) by [@VaggelisD](https://github.com/VaggelisD))*:
@@ -4937,3 +4970,4 @@ Changelog
4937
4970
  [v26.27.0]: https://github.com/tobymao/sqlglot/compare/v26.26.0...v26.27.0
4938
4971
  [v26.28.1]: https://github.com/tobymao/sqlglot/compare/v26.27.1...v26.28.1
4939
4972
  [v26.29.0]: https://github.com/tobymao/sqlglot/compare/v26.28.1...v26.29.0
4973
+ [v26.30.0]: https://github.com/tobymao/sqlglot/compare/v26.29.0...v26.30.0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 26.30.0
3
+ Version: 26.31.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License: MIT License
@@ -61,7 +61,7 @@ Dynamic: provides-extra
61
61
 
62
62
  ![SQLGlot logo](sqlglot.png)
63
63
 
64
- SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [27 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
64
+ SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [29 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
65
65
 
66
66
  It is a very comprehensive generic SQL parser with a robust [test suite](https://github.com/tobymao/sqlglot/blob/main/tests/). It is also quite [performant](#benchmarks), while being written purely in Python.
67
67
 
@@ -1,6 +1,6 @@
1
1
  ![SQLGlot logo](sqlglot.png)
2
2
 
3
- SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [27 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
3
+ SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [29 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
4
4
 
5
5
  It is a very comprehensive generic SQL parser with a robust [test suite](https://github.com/tobymao/sqlglot/blob/main/tests/). It is also quite [performant](#benchmarks), while being written purely in Python.
6
6
 
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '26.30.0'
21
- __version_tuple__ = version_tuple = (26, 30, 0)
20
+ __version__ = version = '26.31.0'
21
+ __version_tuple__ = version_tuple = (26, 31, 0)
@@ -93,6 +93,7 @@ DIALECTS = [
93
93
  "Teradata",
94
94
  "Trino",
95
95
  "TSQL",
96
+ "Exasol",
96
97
  ]
97
98
 
98
99
  MODULE_BY_DIALECT = {name: name.lower() for name in DIALECTS}
@@ -543,7 +543,7 @@ class BigQuery(Dialect):
543
543
  "DATE_ADD": build_date_delta_with_interval(exp.DateAdd),
544
544
  "DATE_SUB": build_date_delta_with_interval(exp.DateSub),
545
545
  "DATE_TRUNC": lambda args: exp.DateTrunc(
546
- unit=exp.Literal.string(str(seq_get(args, 1))),
546
+ unit=seq_get(args, 1),
547
547
  this=seq_get(args, 0),
548
548
  zone=seq_get(args, 2),
549
549
  ),
@@ -963,9 +963,6 @@ class BigQuery(Dialect):
963
963
  exp.DateSub: date_add_interval_sql("DATE", "SUB"),
964
964
  exp.DatetimeAdd: date_add_interval_sql("DATETIME", "ADD"),
965
965
  exp.DatetimeSub: date_add_interval_sql("DATETIME", "SUB"),
966
- exp.DateTrunc: lambda self, e: self.func(
967
- "DATE_TRUNC", e.this, e.text("unit"), e.args.get("zone")
968
- ),
969
966
  exp.FromTimeZone: lambda self, e: self.func(
970
967
  "DATETIME", self.func("TIMESTAMP", e.this, e.args.get("zone")), "'UTC'"
971
968
  ),
@@ -1195,6 +1192,11 @@ class BigQuery(Dialect):
1195
1192
  "within",
1196
1193
  }
1197
1194
 
1195
+ def datetrunc_sql(self, expression: exp.DateTrunc) -> str:
1196
+ unit = expression.unit
1197
+ unit_sql = unit.name if unit.is_string else self.sql(unit)
1198
+ return self.func("DATE_TRUNC", expression.this, unit_sql, expression.args.get("zone"))
1199
+
1198
1200
  def mod_sql(self, expression: exp.Mod) -> str:
1199
1201
  this = expression.this
1200
1202
  expr = expression.expression
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
9
9
  build_date_delta,
10
10
  timestamptrunc_sql,
11
11
  build_formatted_time,
12
+ groupconcat_sql,
12
13
  )
13
14
  from sqlglot.dialects.spark import Spark
14
15
  from sqlglot.tokens import TokenType
@@ -87,6 +88,7 @@ class Databricks(Spark):
87
88
  e.this,
88
89
  ),
89
90
  exp.DatetimeTrunc: timestamptrunc_sql(),
91
+ exp.GroupConcat: groupconcat_sql,
90
92
  exp.Select: transforms.preprocess(
91
93
  [
92
94
  transforms.eliminate_distinct_on,
@@ -96,6 +96,7 @@ class Dialects(str, Enum):
96
96
  TERADATA = "teradata"
97
97
  TRINO = "trino"
98
98
  TSQL = "tsql"
99
+ EXASOL = "exasol"
99
100
 
100
101
 
101
102
  class NormalizationStrategy(str, AutoName):
@@ -700,6 +701,9 @@ class Dialect(metaclass=_Dialect):
700
701
  exp.TimeAdd,
701
702
  exp.TimeSub,
702
703
  },
704
+ exp.DataType.Type.TIMESTAMPTZ: {
705
+ exp.CurrentTimestampLTZ,
706
+ },
703
707
  exp.DataType.Type.TIMESTAMP: {
704
708
  exp.CurrentTimestamp,
705
709
  exp.StrToTime,
@@ -1906,21 +1910,23 @@ def groupconcat_sql(
1906
1910
 
1907
1911
 
1908
1912
  def build_timetostr_or_tochar(args: t.List, dialect: Dialect) -> exp.TimeToStr | exp.ToChar:
1909
- this = seq_get(args, 0)
1910
- format = seq_get(args, 1)
1911
-
1912
- if this:
1913
+ if len(args) == 2:
1914
+ this = args[0]
1913
1915
  if not this.type:
1914
1916
  from sqlglot.optimizer.annotate_types import annotate_types
1915
1917
 
1916
1918
  annotate_types(this, dialect=dialect)
1917
1919
 
1918
- from sqlglot.dialects import Snowflake
1919
-
1920
- if this.is_type(*exp.DataType.TEMPORAL_TYPES) or (
1921
- isinstance(format, exp.Literal) and format.name in Snowflake.TIME_MAPPING
1922
- ):
1920
+ if this.is_type(*exp.DataType.TEMPORAL_TYPES):
1923
1921
  dialect_name = dialect.__class__.__name__.lower()
1924
1922
  return build_formatted_time(exp.TimeToStr, dialect_name, default=True)(args)
1925
1923
 
1926
1924
  return exp.ToChar.from_arg_list(args)
1925
+
1926
+
1927
+ def build_replace_with_optional_replacement(args: t.List) -> exp.Replace:
1928
+ return exp.Replace(
1929
+ this=seq_get(args, 0),
1930
+ expression=seq_get(args, 1),
1931
+ replacement=seq_get(args, 2) or exp.Literal.string(""),
1932
+ )
@@ -0,0 +1,46 @@
1
+ from __future__ import annotations
2
+ from sqlglot import exp, generator
3
+ from sqlglot.dialects.dialect import Dialect, rename_func
4
+
5
+
6
+ class Exasol(Dialect):
7
+ class Generator(generator.Generator):
8
+ # https://docs.exasol.com/db/latest/sql_references/data_types/datatypedetails.htm#StringDataType
9
+ STRING_TYPE_MAPPING = {
10
+ exp.DataType.Type.BLOB: "VARCHAR",
11
+ exp.DataType.Type.LONGBLOB: "VARCHAR",
12
+ exp.DataType.Type.LONGTEXT: "VARCHAR",
13
+ exp.DataType.Type.MEDIUMBLOB: "VARCHAR",
14
+ exp.DataType.Type.MEDIUMTEXT: "VARCHAR",
15
+ exp.DataType.Type.TINYBLOB: "VARCHAR",
16
+ exp.DataType.Type.TINYTEXT: "VARCHAR",
17
+ exp.DataType.Type.TEXT: "VARCHAR",
18
+ exp.DataType.Type.VARBINARY: "VARCHAR",
19
+ }
20
+
21
+ # https://docs.exasol.com/db/latest/sql_references/data_types/datatypealiases.htm
22
+ TYPE_MAPPING = {
23
+ **generator.Generator.TYPE_MAPPING,
24
+ **STRING_TYPE_MAPPING,
25
+ exp.DataType.Type.TINYINT: "SMALLINT",
26
+ exp.DataType.Type.MEDIUMINT: "INT",
27
+ exp.DataType.Type.DECIMAL32: "DECIMAL",
28
+ exp.DataType.Type.DECIMAL64: "DECIMAL",
29
+ exp.DataType.Type.DECIMAL128: "DECIMAL",
30
+ exp.DataType.Type.DECIMAL256: "DECIMAL",
31
+ exp.DataType.Type.DATETIME: "TIMESTAMP",
32
+ }
33
+
34
+ def datatype_sql(self, expression: exp.DataType) -> str:
35
+ # Exasol supports a fixed default precision of 3 for TIMESTAMP WITH LOCAL TIME ZONE
36
+ # and does not allow specifying a different custom precision
37
+ if expression.is_type(exp.DataType.Type.TIMESTAMPLTZ):
38
+ return "TIMESTAMP WITH LOCAL TIME ZONE"
39
+
40
+ return super().datatype_sql(expression)
41
+
42
+ TRANSFORMS = {
43
+ **generator.Generator.TRANSFORMS,
44
+ # https://docs.exasol.com/db/latest/sql_references/functions/alphabeticallistfunctions/mod.htm
45
+ exp.Mod: rename_func("MOD"),
46
+ }
@@ -0,0 +1,115 @@
1
+ from __future__ import annotations
2
+
3
+ from sqlglot import exp
4
+ from sqlglot.dialects.dialect import NormalizationStrategy
5
+ from sqlglot.dialects.tsql import TSQL
6
+ from sqlglot.tokens import TokenType
7
+
8
+
9
+ class Fabric(TSQL):
10
+ """
11
+ Microsoft Fabric Data Warehouse dialect that inherits from T-SQL.
12
+
13
+ Microsoft Fabric is a cloud-based analytics platform that provides a unified
14
+ data warehouse experience. While it shares much of T-SQL's syntax, it has
15
+ specific differences and limitations that this dialect addresses.
16
+
17
+ Key differences from T-SQL:
18
+ - Case-sensitive identifiers (unlike T-SQL which is case-insensitive)
19
+ - Limited data type support with mappings to supported alternatives
20
+ - Temporal types (DATETIME2, DATETIMEOFFSET, TIME) limited to 6 digits precision
21
+ - Certain legacy types (MONEY, SMALLMONEY, etc.) are not supported
22
+ - Unicode types (NCHAR, NVARCHAR) are mapped to non-unicode equivalents
23
+
24
+ References:
25
+ - Data Types: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
26
+ - T-SQL Surface Area: https://learn.microsoft.com/en-us/fabric/data-warehouse/tsql-surface-area
27
+ """
28
+
29
+ # Fabric is case-sensitive unlike T-SQL which is case-insensitive
30
+ NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_SENSITIVE
31
+
32
+ class Tokenizer(TSQL.Tokenizer):
33
+ # Override T-SQL tokenizer to handle TIMESTAMP differently
34
+ # In T-SQL, TIMESTAMP is a synonym for ROWVERSION, but in Fabric we want it to be a datetime type
35
+ # Also add UTINYINT keyword mapping since T-SQL doesn't have it
36
+ KEYWORDS = {
37
+ **TSQL.Tokenizer.KEYWORDS,
38
+ "TIMESTAMP": TokenType.TIMESTAMP,
39
+ "UTINYINT": TokenType.UTINYINT,
40
+ }
41
+
42
+ class Generator(TSQL.Generator):
43
+ # Fabric-specific type mappings - override T-SQL types that aren't supported
44
+ # Reference: https://learn.microsoft.com/en-us/fabric/data-warehouse/data-types
45
+ TYPE_MAPPING = {
46
+ **TSQL.Generator.TYPE_MAPPING,
47
+ exp.DataType.Type.DATETIME: "DATETIME2",
48
+ exp.DataType.Type.DECIMAL: "DECIMAL",
49
+ exp.DataType.Type.IMAGE: "VARBINARY",
50
+ exp.DataType.Type.INT: "INT",
51
+ exp.DataType.Type.JSON: "VARCHAR",
52
+ exp.DataType.Type.MONEY: "DECIMAL",
53
+ exp.DataType.Type.NCHAR: "CHAR",
54
+ exp.DataType.Type.NVARCHAR: "VARCHAR",
55
+ exp.DataType.Type.ROWVERSION: "ROWVERSION",
56
+ exp.DataType.Type.SMALLDATETIME: "DATETIME2",
57
+ exp.DataType.Type.SMALLMONEY: "DECIMAL",
58
+ exp.DataType.Type.TIMESTAMP: "DATETIME2",
59
+ exp.DataType.Type.TIMESTAMPNTZ: "DATETIME2",
60
+ exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET",
61
+ exp.DataType.Type.TINYINT: "SMALLINT",
62
+ exp.DataType.Type.UTINYINT: "SMALLINT",
63
+ exp.DataType.Type.UUID: "VARBINARY(MAX)",
64
+ exp.DataType.Type.XML: "VARCHAR",
65
+ }
66
+
67
+ def datatype_sql(self, expression: exp.DataType) -> str:
68
+ # Check if this is a temporal type that needs precision handling. Fabric limits temporal
69
+ # types to max 6 digits precision. When no precision is specified, we default to 6 digits.
70
+ if (
71
+ expression.is_type(*exp.DataType.TEMPORAL_TYPES)
72
+ and expression.this != exp.DataType.Type.DATE
73
+ ):
74
+ # Get the current precision (first expression if it exists)
75
+ precision_param = expression.find(exp.DataTypeParam)
76
+ target_precision = 6
77
+
78
+ if precision_param and precision_param.this.is_int:
79
+ # Cap precision at 6
80
+ current_precision = precision_param.this.to_py()
81
+ target_precision = min(current_precision, 6)
82
+ else:
83
+ # If precision exists but is not an integer, default to 6
84
+ target_precision = 6
85
+
86
+ # Create a new expression with the target precision
87
+ expression = exp.DataType(
88
+ this=expression.this,
89
+ expressions=[exp.DataTypeParam(this=exp.Literal.number(target_precision))],
90
+ )
91
+
92
+ return super().datatype_sql(expression)
93
+
94
+ def unixtotime_sql(self, expression: exp.UnixToTime) -> str:
95
+ scale = expression.args.get("scale")
96
+ timestamp = expression.this
97
+
98
+ if scale not in (None, exp.UnixToTime.SECONDS):
99
+ self.unsupported(f"UnixToTime scale {scale} is not supported by Fabric")
100
+ return ""
101
+
102
+ # Convert unix timestamp (seconds) to microseconds and round to avoid decimals
103
+ microseconds = timestamp * exp.Literal.number("1e6")
104
+ rounded = exp.func("round", microseconds, 0)
105
+ rounded_ms_as_bigint = exp.cast(rounded, exp.DataType.Type.BIGINT)
106
+
107
+ # Create the base datetime as '1970-01-01' cast to DATETIME2(6)
108
+ epoch_start = exp.cast("'1970-01-01'", "datetime2(6)", dialect="fabric")
109
+
110
+ dateadd = exp.DateAdd(
111
+ this=epoch_start,
112
+ expression=rounded_ms_as_bigint,
113
+ unit=exp.Literal.string("MICROSECONDS"),
114
+ )
115
+ return self.sql(dateadd)
@@ -8,6 +8,7 @@ from sqlglot.dialects.dialect import (
8
8
  NormalizationStrategy,
9
9
  binary_from_function,
10
10
  bool_xor_sql,
11
+ build_replace_with_optional_replacement,
11
12
  date_trunc_to_time,
12
13
  datestrtodate_sql,
13
14
  encode_decode_sql,
@@ -360,6 +361,7 @@ class Presto(Dialect):
360
361
  expression=seq_get(args, 1),
361
362
  replacement=seq_get(args, 2) or exp.Literal.string(""),
362
363
  ),
364
+ "REPLACE": build_replace_with_optional_replacement,
363
365
  "ROW": exp.Struct.from_arg_list,
364
366
  "SEQUENCE": exp.GenerateSeries.from_arg_list,
365
367
  "SET_AGG": exp.ArrayUniqueAgg.from_arg_list,
@@ -213,8 +213,7 @@ class Redshift(Postgres):
213
213
  exp.TableSample: no_tablesample_sql,
214
214
  exp.TsOrDsAdd: date_delta_sql("DATEADD"),
215
215
  exp.TsOrDsDiff: date_delta_sql("DATEDIFF"),
216
- exp.UnixToTime: lambda self,
217
- e: f"(TIMESTAMP 'epoch' + {self.sql(e.this)} * INTERVAL '1 SECOND')",
216
+ exp.UnixToTime: lambda self, e: self._unix_to_time_sql(e),
218
217
  }
219
218
 
220
219
  # Postgres maps exp.Pivot to no_pivot_sql, but Redshift support pivots
@@ -447,3 +446,12 @@ class Redshift(Postgres):
447
446
  def explode_sql(self, expression: exp.Explode) -> str:
448
447
  self.unsupported("Unsupported EXPLODE() function")
449
448
  return ""
449
+
450
+ def _unix_to_time_sql(self, expression: exp.UnixToTime) -> str:
451
+ scale = expression.args.get("scale")
452
+ this = self.sql(expression.this)
453
+
454
+ if scale is not None and scale != exp.UnixToTime.SECONDS and scale.is_int:
455
+ this = f"({this} / POWER(10, {scale.to_py()}))"
456
+
457
+ return f"(TIMESTAMP 'epoch' + {this} * INTERVAL '1 SECOND')"
@@ -9,6 +9,7 @@ from sqlglot.dialects.dialect import (
9
9
  build_timetostr_or_tochar,
10
10
  binary_from_function,
11
11
  build_default_decimal_type,
12
+ build_replace_with_optional_replacement,
12
13
  build_timestamp_from_parts,
13
14
  date_delta_sql,
14
15
  date_trunc_to_time,
@@ -484,6 +485,7 @@ class Snowflake(Dialect):
484
485
  "REGEXP_REPLACE": _build_regexp_replace,
485
486
  "REGEXP_SUBSTR": _build_regexp_extract(exp.RegexpExtract),
486
487
  "REGEXP_SUBSTR_ALL": _build_regexp_extract(exp.RegexpExtractAll),
488
+ "REPLACE": build_replace_with_optional_replacement,
487
489
  "RLIKE": exp.RegexpLike.from_arg_list,
488
490
  "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)),
489
491
  "TABLE": lambda args: exp.TableFromRows(this=seq_get(args, 0)),
@@ -1416,7 +1418,7 @@ class Snowflake(Dialect):
1416
1418
 
1417
1419
  def timetostr_sql(self, expression: exp.TimeToStr) -> str:
1418
1420
  this = expression.this
1419
- if not isinstance(this, exp.TsOrDsToTimestamp):
1421
+ if this.is_string:
1420
1422
  this = exp.cast(this, exp.DataType.Type.TIMESTAMP)
1421
1423
 
1422
1424
  return self.func("TO_CHAR", this, self.format_time(expression))
@@ -612,6 +612,7 @@ class TSQL(Dialect):
612
612
  "SYSDATETIME": exp.CurrentTimestamp.from_arg_list,
613
613
  "SUSER_NAME": exp.CurrentUser.from_arg_list,
614
614
  "SUSER_SNAME": exp.CurrentUser.from_arg_list,
615
+ "SYSDATETIMEOFFSET": exp.CurrentTimestampLTZ.from_arg_list,
615
616
  "SYSTEM_USER": exp.CurrentUser.from_arg_list,
616
617
  "TIMEFROMPARTS": _build_timefromparts,
617
618
  "DATETRUNC": _build_datetrunc,
@@ -1020,6 +1021,7 @@ class TSQL(Dialect):
1020
1021
  exp.CTE: transforms.preprocess([qualify_derived_table_outputs]),
1021
1022
  exp.CurrentDate: rename_func("GETDATE"),
1022
1023
  exp.CurrentTimestamp: rename_func("GETDATE"),
1024
+ exp.CurrentTimestampLTZ: rename_func("SYSDATETIMEOFFSET"),
1023
1025
  exp.DateStrToDate: datestrtodate_sql,
1024
1026
  exp.Extract: rename_func("DATEPART"),
1025
1027
  exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql,
@@ -1249,15 +1251,15 @@ class TSQL(Dialect):
1249
1251
  sql_with_ctes = self.prepend_ctes(expression, sql)
1250
1252
  sql_literal = self.sql(exp.Literal.string(sql_with_ctes))
1251
1253
  if kind == "SCHEMA":
1252
- return f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql_literal})"""
1254
+ return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = {identifier}) EXEC({sql_literal})"""
1253
1255
  elif kind == "TABLE":
1254
1256
  assert table
1255
1257
  where = exp.and_(
1256
- exp.column("table_name").eq(table.name),
1257
- exp.column("table_schema").eq(table.db) if table.db else None,
1258
- exp.column("table_catalog").eq(table.catalog) if table.catalog else None,
1258
+ exp.column("TABLE_NAME").eq(table.name),
1259
+ exp.column("TABLE_SCHEMA").eq(table.db) if table.db else None,
1260
+ exp.column("TABLE_CATALOG").eq(table.catalog) if table.catalog else None,
1259
1261
  )
1260
- return f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql_literal})"""
1262
+ return f"""IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {where}) EXEC({sql_literal})"""
1261
1263
  elif kind == "INDEX":
1262
1264
  index = self.sql(exp.Literal.string(expression.this.text("this")))
1263
1265
  return f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql_literal})"""
@@ -5806,6 +5806,10 @@ class CurrentTimestamp(Func):
5806
5806
  arg_types = {"this": False, "sysdate": False}
5807
5807
 
5808
5808
 
5809
+ class CurrentTimestampLTZ(Func):
5810
+ arg_types = {}
5811
+
5812
+
5809
5813
  class CurrentSchema(Func):
5810
5814
  arg_types = {"this": False}
5811
5815
 
@@ -5846,8 +5850,6 @@ class DateTrunc(Func):
5846
5850
  unit_name = TimeUnit.UNABBREVIATED_UNIT_NAME[unit_name]
5847
5851
 
5848
5852
  args["unit"] = Literal.string(unit_name)
5849
- elif isinstance(unit, Week):
5850
- unit.set("this", Literal.string(unit.this.name.upper()))
5851
5853
 
5852
5854
  super().__init__(**args)
5853
5855
 
@@ -6669,6 +6671,11 @@ class Repeat(Func):
6669
6671
  arg_types = {"this": True, "times": True}
6670
6672
 
6671
6673
 
6674
+ # Some dialects like Snowflake support two argument replace
6675
+ class Replace(Func):
6676
+ arg_types = {"this": True, "expression": True, "replacement": False}
6677
+
6678
+
6672
6679
  # https://learn.microsoft.com/en-us/sql/t-sql/functions/round-transact-sql?view=sql-server-ver16
6673
6680
  # tsql third argument function == trunctaion if not 0
6674
6681
  class Round(Func):
@@ -3480,7 +3480,7 @@ class Generator(metaclass=_Generator):
3480
3480
 
3481
3481
  actions_list.append(action_sql)
3482
3482
 
3483
- actions_sql = self.format_args(*actions_list)
3483
+ actions_sql = self.format_args(*actions_list).lstrip("\n")
3484
3484
 
3485
3485
  exists = " IF EXISTS" if expression.args.get("exists") else ""
3486
3486
  on_cluster = self.sql(expression, "cluster")
@@ -3491,7 +3491,7 @@ class Generator(metaclass=_Generator):
3491
3491
  kind = self.sql(expression, "kind")
3492
3492
  not_valid = " NOT VALID" if expression.args.get("not_valid") else ""
3493
3493
 
3494
- return f"ALTER {kind}{exists}{only} {self.sql(expression, 'this')}{on_cluster} {actions_sql}{not_valid}{options}"
3494
+ return f"ALTER {kind}{exists}{only} {self.sql(expression, 'this')}{on_cluster}{self.sep()}{actions_sql}{not_valid}{options}"
3495
3495
 
3496
3496
  def add_column_sql(self, expression: exp.Expression) -> str:
3497
3497
  sql = self.sql(expression)
@@ -3510,7 +3510,7 @@ class Generator(metaclass=_Generator):
3510
3510
  return f"DROP{exists}{expressions}"
3511
3511
 
3512
3512
  def addconstraint_sql(self, expression: exp.AddConstraint) -> str:
3513
- return f"ADD {self.expressions(expression)}"
3513
+ return f"ADD {self.expressions(expression, indent=False)}"
3514
3514
 
3515
3515
  def addpartition_sql(self, expression: exp.AddPartition) -> str:
3516
3516
  exists = "IF NOT EXISTS " if expression.args.get("exists") else ""
@@ -358,7 +358,7 @@ class Scope:
358
358
  for expression in itertools.chain(self.derived_tables, self.udtfs):
359
359
  self._references.append(
360
360
  (
361
- expression.alias,
361
+ _get_source_alias(expression),
362
362
  expression if expression.args.get("pivots") else expression.unnest(),
363
363
  )
364
364
  )
@@ -785,7 +785,7 @@ def _traverse_tables(scope):
785
785
  # This shouldn't be a problem once qualify_columns runs, as it adds aliases on everything.
786
786
  # Until then, this means that only a single, unaliased derived table is allowed (rather,
787
787
  # the latest one wins.
788
- sources[expression.alias] = child_scope
788
+ sources[_get_source_alias(expression)] = child_scope
789
789
 
790
790
  # append the final child_scope yielded
791
791
  if child_scope:
@@ -825,7 +825,7 @@ def _traverse_udtfs(scope):
825
825
  ):
826
826
  yield child_scope
827
827
  top = child_scope
828
- sources[expression.alias] = child_scope
828
+ sources[_get_source_alias(expression)] = child_scope
829
829
 
830
830
  scope.subquery_scopes.append(top)
831
831
 
@@ -915,3 +915,13 @@ def find_in_scope(expression, expression_types, bfs=True):
915
915
  the criteria was found.
916
916
  """
917
917
  return next(find_all_in_scope(expression, expression_types, bfs=bfs), None)
918
+
919
+
920
+ def _get_source_alias(expression):
921
+ alias_arg = expression.args.get("alias")
922
+ alias_name = expression.alias
923
+
924
+ if not alias_name and isinstance(alias_arg, exp.TableAlias) and len(alias_arg.columns) == 1:
925
+ alias_name = alias_arg.columns[0].name
926
+
927
+ return alias_name
@@ -7362,8 +7362,9 @@ class Parser(metaclass=_Parser):
7362
7362
 
7363
7363
  return None
7364
7364
 
7365
- if not self.dialect.ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN or self._match_text_seq(
7366
- "COLUMNS"
7365
+ if not self._match_set(self.ADD_CONSTRAINT_TOKENS, advance=False) and (
7366
+ not self.dialect.ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN
7367
+ or self._match_text_seq("COLUMNS")
7367
7368
  ):
7368
7369
  schema = self._parse_schema()
7369
7370
 
@@ -352,13 +352,20 @@ def unnest_to_explode(
352
352
  has_multi_expr = len(exprs) > 1
353
353
  this, *expressions = _unnest_zip_exprs(unnest, exprs, has_multi_expr)
354
354
 
355
+ columns = alias.columns if alias else []
356
+ offset = unnest.args.get("offset")
357
+ if offset:
358
+ columns.insert(
359
+ 0, offset if isinstance(offset, exp.Identifier) else exp.to_identifier("pos")
360
+ )
361
+
355
362
  unnest.replace(
356
363
  exp.Table(
357
364
  this=_udtf_type(unnest, has_multi_expr)(
358
365
  this=this,
359
366
  expressions=expressions,
360
367
  ),
361
- alias=exp.TableAlias(this=alias.this, columns=alias.columns) if alias else None,
368
+ alias=exp.TableAlias(this=alias.this, columns=columns) if alias else None,
362
369
  )
363
370
  )
364
371
 
@@ -393,6 +400,13 @@ def unnest_to_explode(
393
400
  "CROSS JOIN UNNEST to LATERAL VIEW EXPLODE transformation requires explicit column aliases"
394
401
  )
395
402
 
403
+ offset = unnest.args.get("offset")
404
+ if offset:
405
+ alias_cols.insert(
406
+ 0,
407
+ offset if isinstance(offset, exp.Identifier) else exp.to_identifier("pos"),
408
+ )
409
+
396
410
  for e, column in zip(exprs, alias_cols):
397
411
  expression.append(
398
412
  "laterals",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 26.30.0
3
+ Version: 26.31.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License: MIT License
@@ -61,7 +61,7 @@ Dynamic: provides-extra
61
61
 
62
62
  ![SQLGlot logo](sqlglot.png)
63
63
 
64
- SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [27 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
64
+ SQLGlot is a no-dependency SQL parser, transpiler, optimizer, and engine. It can be used to format SQL or translate between [29 different dialects](https://github.com/tobymao/sqlglot/blob/main/sqlglot/dialects/__init__.py) like [DuckDB](https://duckdb.org/), [Presto](https://prestodb.io/) / [Trino](https://trino.io/), [Spark](https://spark.apache.org/) / [Databricks](https://www.databricks.com/), [Snowflake](https://www.snowflake.com/en/), and [BigQuery](https://cloud.google.com/bigquery/). It aims to read a wide variety of SQL inputs and output syntactically and semantically correct SQL in the targeted dialects.
65
65
 
66
66
  It is a very comprehensive generic SQL parser with a robust [test suite](https://github.com/tobymao/sqlglot/blob/main/tests/). It is also quite [performant](#benchmarks), while being written purely in Python.
67
67
 
@@ -47,6 +47,7 @@ sqlglot/dialects/drill.py
47
47
  sqlglot/dialects/druid.py
48
48
  sqlglot/dialects/duckdb.py
49
49
  sqlglot/dialects/dune.py
50
+ sqlglot/dialects/exasol.py
50
51
  sqlglot/dialects/fabric.py
51
52
  sqlglot/dialects/hive.py
52
53
  sqlglot/dialects/materialize.py
@@ -135,6 +136,7 @@ tests/dialects/test_drill.py
135
136
  tests/dialects/test_druid.py
136
137
  tests/dialects/test_duckdb.py
137
138
  tests/dialects/test_dune.py
139
+ tests/dialects/test_exasol.py
138
140
  tests/dialects/test_fabric.py
139
141
  tests/dialects/test_hive.py
140
142
  tests/dialects/test_materialize.py