sqlglot 27.16.2__tar.gz → 27.17.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (226) hide show
  1. {sqlglot-27.16.2 → sqlglot-27.17.0}/CHANGELOG.md +12 -0
  2. {sqlglot-27.16.2 → sqlglot-27.17.0}/PKG-INFO +1 -1
  3. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/_version.py +3 -3
  4. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/clickhouse.py +2 -3
  5. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/duckdb.py +6 -0
  6. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/postgres.py +13 -7
  7. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/snowflake.py +31 -0
  8. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/expressions.py +45 -0
  9. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/generator.py +31 -7
  10. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/parser.py +47 -5
  11. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.egg-info/PKG-INFO +1 -1
  12. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_bigquery.py +4 -0
  13. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_clickhouse.py +1 -0
  14. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_postgres.py +30 -0
  15. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_presto.py +7 -0
  16. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_snowflake.py +41 -4
  17. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_trino.py +8 -0
  18. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_tsql.py +1 -0
  19. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/identity.sql +2 -0
  20. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/annotate_functions.sql +116 -0
  21. {sqlglot-27.16.2 → sqlglot-27.17.0}/.gitignore +0 -0
  22. {sqlglot-27.16.2 → sqlglot-27.17.0}/.gitpod.yml +0 -0
  23. {sqlglot-27.16.2 → sqlglot-27.17.0}/.pre-commit-config.yaml +0 -0
  24. {sqlglot-27.16.2 → sqlglot-27.17.0}/CONTRIBUTING.md +0 -0
  25. {sqlglot-27.16.2 → sqlglot-27.17.0}/LICENSE +0 -0
  26. {sqlglot-27.16.2 → sqlglot-27.17.0}/MANIFEST.in +0 -0
  27. {sqlglot-27.16.2 → sqlglot-27.17.0}/Makefile +0 -0
  28. {sqlglot-27.16.2 → sqlglot-27.17.0}/README.md +0 -0
  29. {sqlglot-27.16.2 → sqlglot-27.17.0}/pyproject.toml +0 -0
  30. {sqlglot-27.16.2 → sqlglot-27.17.0}/setup.cfg +0 -0
  31. {sqlglot-27.16.2 → sqlglot-27.17.0}/setup.py +0 -0
  32. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/__init__.py +0 -0
  33. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/__main__.py +0 -0
  34. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/_typing.py +0 -0
  35. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/__init__.py +0 -0
  36. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/athena.py +0 -0
  37. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/bigquery.py +0 -0
  38. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/databricks.py +0 -0
  39. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/dialect.py +0 -0
  40. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/doris.py +0 -0
  41. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/dremio.py +0 -0
  42. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/drill.py +0 -0
  43. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/druid.py +0 -0
  44. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/dune.py +0 -0
  45. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/exasol.py +0 -0
  46. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/fabric.py +0 -0
  47. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/hive.py +0 -0
  48. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/materialize.py +0 -0
  49. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/mysql.py +0 -0
  50. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/oracle.py +0 -0
  51. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/presto.py +0 -0
  52. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/prql.py +0 -0
  53. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/redshift.py +0 -0
  54. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/risingwave.py +0 -0
  55. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/singlestore.py +0 -0
  56. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/spark.py +0 -0
  57. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/spark2.py +0 -0
  58. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/sqlite.py +0 -0
  59. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/starrocks.py +0 -0
  60. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/tableau.py +0 -0
  61. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/teradata.py +0 -0
  62. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/trino.py +0 -0
  63. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/dialects/tsql.py +0 -0
  64. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/diff.py +0 -0
  65. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/errors.py +0 -0
  66. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/executor/__init__.py +0 -0
  67. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/executor/context.py +0 -0
  68. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/executor/env.py +0 -0
  69. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/executor/python.py +0 -0
  70. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/executor/table.py +0 -0
  71. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/helper.py +0 -0
  72. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/jsonpath.py +0 -0
  73. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/lineage.py +0 -0
  74. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/__init__.py +0 -0
  75. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/annotate_types.py +0 -0
  76. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/canonicalize.py +0 -0
  77. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/eliminate_ctes.py +0 -0
  78. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/eliminate_joins.py +0 -0
  79. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/eliminate_subqueries.py +0 -0
  80. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/isolate_table_selects.py +0 -0
  81. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/merge_subqueries.py +0 -0
  82. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/normalize.py +0 -0
  83. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/normalize_identifiers.py +0 -0
  84. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/optimize_joins.py +0 -0
  85. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/optimizer.py +0 -0
  86. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/pushdown_predicates.py +0 -0
  87. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/pushdown_projections.py +0 -0
  88. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/qualify.py +0 -0
  89. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/qualify_columns.py +0 -0
  90. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/qualify_tables.py +0 -0
  91. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/scope.py +0 -0
  92. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/simplify.py +0 -0
  93. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/optimizer/unnest_subqueries.py +0 -0
  94. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/planner.py +0 -0
  95. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/py.typed +0 -0
  96. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/schema.py +0 -0
  97. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/serde.py +0 -0
  98. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/time.py +0 -0
  99. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/tokens.py +0 -0
  100. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/transforms.py +0 -0
  101. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot/trie.py +0 -0
  102. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.egg-info/SOURCES.txt +0 -0
  103. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.egg-info/dependency_links.txt +0 -0
  104. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.egg-info/requires.txt +0 -0
  105. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.egg-info/top_level.txt +0 -0
  106. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglot.png +0 -0
  107. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/Cargo.lock +0 -0
  108. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/Cargo.toml +0 -0
  109. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/benches/dialect_settings.json +0 -0
  110. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/benches/long.rs +0 -0
  111. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/benches/token_type_settings.json +0 -0
  112. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/benches/tokenizer_dialect_settings.json +0 -0
  113. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/benches/tokenizer_settings.json +0 -0
  114. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/pyproject.toml +0 -0
  115. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/src/lib.rs +0 -0
  116. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/src/settings.rs +0 -0
  117. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/src/token.rs +0 -0
  118. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/src/tokenizer.rs +0 -0
  119. {sqlglot-27.16.2 → sqlglot-27.17.0}/sqlglotrs/src/trie.rs +0 -0
  120. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/__init__.py +0 -0
  121. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/__init__.py +0 -0
  122. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_athena.py +0 -0
  123. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_databricks.py +0 -0
  124. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_dialect.py +0 -0
  125. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_doris.py +0 -0
  126. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_dremio.py +0 -0
  127. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_drill.py +0 -0
  128. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_druid.py +0 -0
  129. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_duckdb.py +0 -0
  130. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_dune.py +0 -0
  131. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_exasol.py +0 -0
  132. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_fabric.py +0 -0
  133. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_hive.py +0 -0
  134. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_materialize.py +0 -0
  135. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_mysql.py +0 -0
  136. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_oracle.py +0 -0
  137. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_pipe_syntax.py +0 -0
  138. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_prql.py +0 -0
  139. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_redshift.py +0 -0
  140. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_risingwave.py +0 -0
  141. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_singlestore.py +0 -0
  142. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_spark.py +0 -0
  143. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_sqlite.py +0 -0
  144. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_starrocks.py +0 -0
  145. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_tableau.py +0 -0
  146. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/dialects/test_teradata.py +0 -0
  147. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/jsonpath/LICENSE +0 -0
  148. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/jsonpath/cts.json +0 -0
  149. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/annotate_types.sql +0 -0
  150. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/canonicalize.sql +0 -0
  151. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/eliminate_ctes.sql +0 -0
  152. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/eliminate_joins.sql +0 -0
  153. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/eliminate_subqueries.sql +0 -0
  154. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/isolate_table_selects.sql +0 -0
  155. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/merge_subqueries.sql +0 -0
  156. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/normalize.sql +0 -0
  157. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/normalize_identifiers.sql +0 -0
  158. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/optimize_joins.sql +0 -0
  159. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/optimizer.sql +0 -0
  160. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/pushdown_cte_alias_columns.sql +0 -0
  161. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/pushdown_predicates.sql +0 -0
  162. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/pushdown_projections.sql +0 -0
  163. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/qualify_columns.sql +0 -0
  164. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/qualify_columns__invalid.sql +0 -0
  165. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/qualify_columns__with_invisible.sql +0 -0
  166. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/qualify_columns_ddl.sql +0 -0
  167. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/qualify_tables.sql +0 -0
  168. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/quote_identifiers.sql +0 -0
  169. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/simplify.sql +0 -0
  170. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/call_center.csv.gz +0 -0
  171. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/catalog_page.csv.gz +0 -0
  172. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/catalog_returns.csv.gz +0 -0
  173. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/catalog_sales.csv.gz +0 -0
  174. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/customer.csv.gz +0 -0
  175. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/customer_address.csv.gz +0 -0
  176. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/customer_demographics.csv.gz +0 -0
  177. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/date_dim.csv.gz +0 -0
  178. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/household_demographics.csv.gz +0 -0
  179. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/income_band.csv.gz +0 -0
  180. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/inventory.csv.gz +0 -0
  181. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/item.csv.gz +0 -0
  182. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/promotion.csv.gz +0 -0
  183. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/reason.csv.gz +0 -0
  184. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/ship_mode.csv.gz +0 -0
  185. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/store.csv.gz +0 -0
  186. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/store_returns.csv.gz +0 -0
  187. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/store_sales.csv.gz +0 -0
  188. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/time_dim.csv.gz +0 -0
  189. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/tpc-ds.sql +0 -0
  190. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/warehouse.csv.gz +0 -0
  191. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/web_page.csv.gz +0 -0
  192. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/web_returns.csv.gz +0 -0
  193. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/web_sales.csv.gz +0 -0
  194. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-ds/web_site.csv.gz +0 -0
  195. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/customer.csv.gz +0 -0
  196. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/lineitem.csv.gz +0 -0
  197. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/nation.csv.gz +0 -0
  198. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/orders.csv.gz +0 -0
  199. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/part.csv.gz +0 -0
  200. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/partsupp.csv.gz +0 -0
  201. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/region.csv.gz +0 -0
  202. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/supplier.csv.gz +0 -0
  203. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/tpc-h/tpc-h.sql +0 -0
  204. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/optimizer/unnest_subqueries.sql +0 -0
  205. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/partial.sql +0 -0
  206. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/fixtures/pretty.sql +0 -0
  207. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/gen_fixtures.py +0 -0
  208. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/helpers.py +0 -0
  209. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_build.py +0 -0
  210. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_dialect_imports.py +0 -0
  211. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_diff.py +0 -0
  212. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_docs.py +0 -0
  213. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_executor.py +0 -0
  214. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_expressions.py +0 -0
  215. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_generator.py +0 -0
  216. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_helper.py +0 -0
  217. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_jsonpath.py +0 -0
  218. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_lineage.py +0 -0
  219. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_optimizer.py +0 -0
  220. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_parser.py +0 -0
  221. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_schema.py +0 -0
  222. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_serde.py +0 -0
  223. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_time.py +0 -0
  224. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_tokens.py +0 -0
  225. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_transforms.py +0 -0
  226. {sqlglot-27.16.2 → sqlglot-27.17.0}/tests/test_transpile.py +0 -0
@@ -1,6 +1,16 @@
1
1
  Changelog
2
2
  =========
3
3
 
4
+ ## [v27.16.3] - 2025-09-18
5
+ ### :bug: Bug Fixes
6
+ - [`d127051`](https://github.com/tobymao/sqlglot/commit/d1270517c3e124ca59caf29e4506eb3848f7452e) - precedence issue with column operator parsing *(PR [#5914](https://github.com/tobymao/sqlglot/pull/5914) by [@georgesittas](https://github.com/georgesittas))*
7
+
8
+
9
+ ## [v27.16.2] - 2025-09-18
10
+ ### :wrench: Chores
11
+ - [`837890c`](https://github.com/tobymao/sqlglot/commit/837890c7e8bcc3695541bbe32fd8088eee70fea3) - handle badly formed binary expressions gracefully in type inference *(commit by [@georgesittas](https://github.com/georgesittas))*
12
+
13
+
4
14
  ## [v27.16.1] - 2025-09-18
5
15
  ### :bug: Bug Fixes
6
16
  - [`0e256b3`](https://github.com/tobymao/sqlglot/commit/0e256b3f864bc2d026817bd08e89ee89f44ad256) - edge case with parsing `interval` as identifier *(commit by [@georgesittas](https://github.com/georgesittas))*
@@ -7381,3 +7391,5 @@ Changelog
7381
7391
  [v27.15.3]: https://github.com/tobymao/sqlglot/compare/v27.15.2...v27.15.3
7382
7392
  [v27.16.0]: https://github.com/tobymao/sqlglot/compare/v27.15.3...v27.16.0
7383
7393
  [v27.16.1]: https://github.com/tobymao/sqlglot/compare/v27.16.0...v27.16.1
7394
+ [v27.16.2]: https://github.com/tobymao/sqlglot/compare/v27.16.1...v27.16.2
7395
+ [v27.16.3]: https://github.com/tobymao/sqlglot/compare/v27.16.2...v27.16.3
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.16.2
3
+ Version: 27.17.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License-Expression: MIT
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '27.16.2'
32
- __version_tuple__ = version_tuple = (27, 16, 2)
31
+ __version__ = version = '27.17.0'
32
+ __version_tuple__ = version_tuple = (27, 17, 0)
33
33
 
34
- __commit_id__ = commit_id = 'g837890c7e'
34
+ __commit_id__ = commit_id = 'g0dc0015a3'
@@ -332,7 +332,6 @@ class ClickHouse(Dialect):
332
332
  "PARSEDATETIME": _build_datetime_format(exp.ParseDatetime),
333
333
  "RANDCANONICAL": exp.Rand.from_arg_list,
334
334
  "STR_TO_DATE": _build_str_to_date,
335
- "TUPLE": exp.Struct.from_arg_list,
336
335
  "TIMESTAMP_SUB": build_date_delta(exp.TimestampSub, default_unit=None),
337
336
  "TIMESTAMPSUB": build_date_delta(exp.TimestampSub, default_unit=None),
338
337
  "TIMESTAMP_ADD": build_date_delta(exp.TimestampAdd, default_unit=None),
@@ -505,14 +504,13 @@ class ClickHouse(Dialect):
505
504
  }
506
505
  )(AGG_FUNCTIONS, AGG_FUNCTIONS_SUFFIXES)
507
506
 
508
- FUNCTIONS_WITH_ALIASED_ARGS = {*parser.Parser.FUNCTIONS_WITH_ALIASED_ARGS, "TUPLE"}
509
-
510
507
  FUNCTION_PARSERS = {
511
508
  **parser.Parser.FUNCTION_PARSERS,
512
509
  "ARRAYJOIN": lambda self: self.expression(exp.Explode, this=self._parse_expression()),
513
510
  "QUANTILE": lambda self: self._parse_quantile(),
514
511
  "MEDIAN": lambda self: self._parse_quantile(),
515
512
  "COLUMNS": lambda self: self._parse_columns(),
513
+ "TUPLE": lambda self: exp.Struct.from_arg_list(self._parse_function_args(alias=True)),
516
514
  }
517
515
 
518
516
  FUNCTION_PARSERS.pop("MATCH")
@@ -1126,6 +1124,7 @@ class ClickHouse(Dialect):
1126
1124
  exp.RegexpLike: lambda self, e: self.func("match", e.this, e.expression),
1127
1125
  exp.Rand: rename_func("randCanonical"),
1128
1126
  exp.StartsWith: rename_func("startsWith"),
1127
+ exp.Struct: rename_func("tuple"),
1129
1128
  exp.EndsWith: rename_func("endsWith"),
1130
1129
  exp.EuclideanDistance: rename_func("L2Distance"),
1131
1130
  exp.StrPosition: lambda self, e: strposition_sql(
@@ -1296,3 +1296,9 @@ class DuckDB(Dialect):
1296
1296
  return self.sql(exp.Cast(this=func, to=this.type))
1297
1297
 
1298
1298
  return self.sql(func)
1299
+
1300
+ def format_sql(self, expression: exp.Format) -> str:
1301
+ if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1302
+ return self.func("FORMAT", "'{}'", expression.expressions[0])
1303
+
1304
+ return self.function_fallback_sql(expression)
@@ -375,6 +375,8 @@ class Postgres(Dialect):
375
375
  VAR_SINGLE_TOKENS = {"$"}
376
376
 
377
377
  class Parser(parser.Parser):
378
+ SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT = True
379
+
378
380
  PROPERTY_PARSERS = {
379
381
  **parser.Parser.PROPERTY_PARSERS,
380
382
  "SET": lambda self: self.expression(exp.SetConfigProperty, this=self._parse_set()),
@@ -426,7 +428,7 @@ class Postgres(Dialect):
426
428
  "DATE_PART": lambda self: self._parse_date_part(),
427
429
  "JSON_AGG": lambda self: self.expression(
428
430
  exp.JSONArrayAgg,
429
- this=self._parse_bitwise(),
431
+ this=self._parse_lambda(),
430
432
  order=self._parse_order(),
431
433
  ),
432
434
  "JSONB_EXISTS": lambda self: self._parse_jsonb_exists(),
@@ -459,12 +461,16 @@ class Postgres(Dialect):
459
461
 
460
462
  COLUMN_OPERATORS = {
461
463
  **parser.Parser.COLUMN_OPERATORS,
462
- TokenType.ARROW: lambda self, this, path: build_json_extract_path(
463
- exp.JSONExtract, arrow_req_json_type=self.JSON_ARROWS_REQUIRE_JSON_TYPE
464
- )([this, path]),
465
- TokenType.DARROW: lambda self, this, path: build_json_extract_path(
466
- exp.JSONExtractScalar, arrow_req_json_type=self.JSON_ARROWS_REQUIRE_JSON_TYPE
467
- )([this, path]),
464
+ TokenType.ARROW: lambda self, this, path: self.validate_expression(
465
+ build_json_extract_path(
466
+ exp.JSONExtract, arrow_req_json_type=self.JSON_ARROWS_REQUIRE_JSON_TYPE
467
+ )([this, path])
468
+ ),
469
+ TokenType.DARROW: lambda self, this, path: self.validate_expression(
470
+ build_json_extract_path(
471
+ exp.JSONExtractScalar, arrow_req_json_type=self.JSON_ARROWS_REQUIRE_JSON_TYPE
472
+ )([this, path])
473
+ ),
468
474
  }
469
475
 
470
476
  def _parse_query_parameter(self) -> t.Optional[exp.Expression]:
@@ -324,6 +324,15 @@ def _build_regexp_extract(expr_type: t.Type[E]) -> t.Callable[[t.List], E]:
324
324
  return _builder
325
325
 
326
326
 
327
+ def _build_like(expr_type: t.Type[E]) -> t.Callable[[t.List], E | exp.Escape]:
328
+ def _builder(args: t.List) -> E | exp.Escape:
329
+ like_expr = expr_type(this=args[0], expression=args[1])
330
+ escape = seq_get(args, 2)
331
+ return exp.Escape(this=like_expr, expression=escape) if escape else like_expr
332
+
333
+ return _builder
334
+
335
+
327
336
  def _regexpextract_sql(self, expression: exp.RegexpExtract | exp.RegexpExtractAll) -> str:
328
337
  # Other dialects don't support all of the following parameters, so we need to
329
338
  # generate default values as necessary to ensure the transpilation is correct
@@ -522,14 +531,24 @@ class Snowflake(Dialect):
522
531
  **Dialect.TYPE_TO_EXPRESSIONS,
523
532
  exp.DataType.Type.INT: {
524
533
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.INT],
534
+ exp.Ascii,
525
535
  exp.Length,
536
+ exp.BitLength,
537
+ exp.Levenshtein,
526
538
  },
527
539
  exp.DataType.Type.VARCHAR: {
528
540
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.VARCHAR],
541
+ exp.Base64DecodeString,
542
+ exp.Base64Encode,
529
543
  exp.MD5,
530
544
  exp.AIAgg,
531
545
  exp.AIClassify,
532
546
  exp.AISummarizeAgg,
547
+ exp.Chr,
548
+ exp.Collate,
549
+ exp.HexDecodeString,
550
+ exp.HexEncode,
551
+ exp.Initcap,
533
552
  exp.RegexpExtract,
534
553
  exp.RegexpReplace,
535
554
  exp.Repeat,
@@ -541,9 +560,12 @@ class Snowflake(Dialect):
541
560
  },
542
561
  exp.DataType.Type.BINARY: {
543
562
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.BINARY],
563
+ exp.Base64DecodeBinary,
564
+ exp.Compress,
544
565
  exp.MD5Digest,
545
566
  exp.SHA1Digest,
546
567
  exp.SHA2Digest,
568
+ exp.Unhex,
547
569
  },
548
570
  exp.DataType.Type.BIGINT: {
549
571
  *Dialect.TYPE_TO_EXPRESSIONS[exp.DataType.Type.BIGINT],
@@ -678,6 +700,7 @@ class Snowflake(Dialect):
678
700
  "DATE_TRUNC": _date_trunc_to_time,
679
701
  "DATEADD": _build_date_time_add(exp.DateAdd),
680
702
  "DATEDIFF": _build_datediff,
703
+ "DAYOFWEEKISO": exp.DayOfWeekIso.from_arg_list,
681
704
  "DIV0": _build_if_from_div0,
682
705
  "EDITDISTANCE": lambda args: exp.Levenshtein(
683
706
  this=seq_get(args, 0), expression=seq_get(args, 1), max_dist=seq_get(args, 2)
@@ -746,6 +769,8 @@ class Snowflake(Dialect):
746
769
  "TO_JSON": exp.JSONFormat.from_arg_list,
747
770
  "VECTOR_L2_DISTANCE": exp.EuclideanDistance.from_arg_list,
748
771
  "ZEROIFNULL": _build_if_from_zeroifnull,
772
+ "LIKE": _build_like(exp.Like),
773
+ "ILIKE": _build_like(exp.ILike),
749
774
  }
750
775
  FUNCTIONS.pop("PREDICT")
751
776
 
@@ -1816,3 +1841,9 @@ class Snowflake(Dialect):
1816
1841
 
1817
1842
  def modelattribute_sql(self, expression: exp.ModelAttribute) -> str:
1818
1843
  return f"{self.sql(expression, 'this')}!{self.sql(expression, 'expression')}"
1844
+
1845
+ def format_sql(self, expression: exp.Format) -> str:
1846
+ if expression.name.lower() == "%s" and len(expression.expressions) == 1:
1847
+ return self.func("TO_CHAR", expression.expressions[0])
1848
+
1849
+ return self.function_fallback_sql(expression)
@@ -6414,6 +6414,19 @@ class ToBase64(Func):
6414
6414
  pass
6415
6415
 
6416
6416
 
6417
+ # https://docs.snowflake.com/en/sql-reference/functions/base64_decode_binary
6418
+ class Base64DecodeBinary(Func):
6419
+ arg_types = {"this": True, "alphabet": False}
6420
+
6421
+
6422
+ class Base64DecodeString(Func):
6423
+ arg_types = {"this": True, "alphabet": False}
6424
+
6425
+
6426
+ class Base64Encode(Func):
6427
+ arg_types = {"this": True, "max_line_length": False, "alphabet": False}
6428
+
6429
+
6417
6430
  # https://trino.io/docs/current/functions/datetime.html#from_iso8601_timestamp
6418
6431
  class FromISO8601Timestamp(Func):
6419
6432
  _sql_names = ["FROM_ISO8601_TIMESTAMP"]
@@ -6465,6 +6478,21 @@ class Hex(Func):
6465
6478
  pass
6466
6479
 
6467
6480
 
6481
+ # https://docs.snowflake.com/en/sql-reference/functions/hex_decode_string
6482
+ class HexDecodeString(Func):
6483
+ pass
6484
+
6485
+
6486
+ class HexEncode(Func):
6487
+ arg_types = {"this": True, "case": False}
6488
+
6489
+
6490
+ # T-SQL: https://learn.microsoft.com/en-us/sql/t-sql/functions/compress-transact-sql?view=sql-server-ver17
6491
+ # Snowflake: https://docs.snowflake.com/en/sql-reference/functions/compress
6492
+ class Compress(Func):
6493
+ arg_types = {"this": True, "method": False}
6494
+
6495
+
6468
6496
  class LowerHex(Hex):
6469
6497
  pass
6470
6498
 
@@ -6871,6 +6899,10 @@ class Length(Func):
6871
6899
  _sql_names = ["LENGTH", "LEN", "CHAR_LENGTH", "CHARACTER_LENGTH"]
6872
6900
 
6873
6901
 
6902
+ class BitLength(Func):
6903
+ pass
6904
+
6905
+
6874
6906
  class Levenshtein(Func):
6875
6907
  arg_types = {
6876
6908
  "this": True,
@@ -8557,6 +8589,19 @@ def parse_identifier(name: str | Identifier, dialect: DialectType = None) -> Ide
8557
8589
 
8558
8590
  INTERVAL_STRING_RE = re.compile(r"\s*(-?[0-9]+(?:\.[0-9]+)?)\s*([a-zA-Z]+)\s*")
8559
8591
 
8592
+ # Matches day-time interval strings that contain
8593
+ # - A number of days (possibly negative or with decimals)
8594
+ # - At least one space
8595
+ # - Portions of a time-like signature, potentially negative
8596
+ # - Standard format [-]h+:m+:s+[.f+]
8597
+ # - Just minutes/seconds/frac seconds [-]m+:s+.f+
8598
+ # - Just hours, minutes, maybe colon [-]h+:m+[:]
8599
+ # - Just hours, maybe colon [-]h+[:]
8600
+ # - Just colon :
8601
+ INTERVAL_DAY_TIME_RE = re.compile(
8602
+ r"\s*-?\s*\d+(?:\.\d+)?\s+(?:-?(?:\d+:)?\d+:\d+(?:\.\d+)?|-?(?:\d+:){1,2}|:)\s*"
8603
+ )
8604
+
8560
8605
 
8561
8606
  def to_interval(interval: str | Literal) -> Interval:
8562
8607
  """Builds an interval expression from a string like '1 day' or '5 months'."""
@@ -727,6 +727,7 @@ class Generator(metaclass=_Generator):
727
727
  "dialect",
728
728
  "unsupported_messages",
729
729
  "_escaped_quote_end",
730
+ "_escaped_byte_quote_end",
730
731
  "_escaped_identifier_end",
731
732
  "_next_name",
732
733
  "_identifier_start",
@@ -773,6 +774,11 @@ class Generator(metaclass=_Generator):
773
774
  self._escaped_quote_end: str = (
774
775
  self.dialect.tokenizer_class.STRING_ESCAPES[0] + self.dialect.QUOTE_END
775
776
  )
777
+ self._escaped_byte_quote_end: str = (
778
+ self.dialect.tokenizer_class.STRING_ESCAPES[0] + self.dialect.BYTE_END
779
+ if self.dialect.BYTE_END
780
+ else ""
781
+ )
776
782
  self._escaped_identifier_end = self.dialect.IDENTIFIER_END * 2
777
783
 
778
784
  self._next_name = name_sequence("_t")
@@ -1376,7 +1382,13 @@ class Generator(metaclass=_Generator):
1376
1382
  def bytestring_sql(self, expression: exp.ByteString) -> str:
1377
1383
  this = self.sql(expression, "this")
1378
1384
  if self.dialect.BYTE_START:
1379
- return f"{self.dialect.BYTE_START}{this}{self.dialect.BYTE_END}"
1385
+ escaped_byte_string = self.escape_str(
1386
+ this,
1387
+ escape_backslash=False,
1388
+ delimiter=self.dialect.BYTE_END,
1389
+ escaped_delimiter=self._escaped_byte_quote_end,
1390
+ )
1391
+ return f"{self.dialect.BYTE_START}{escaped_byte_string}{self.dialect.BYTE_END}"
1380
1392
  return this
1381
1393
 
1382
1394
  def unicodestring_sql(self, expression: exp.UnicodeString) -> str:
@@ -2475,16 +2487,23 @@ class Generator(metaclass=_Generator):
2475
2487
  text = f"{self.dialect.QUOTE_START}{self.escape_str(text)}{self.dialect.QUOTE_END}"
2476
2488
  return text
2477
2489
 
2478
- def escape_str(self, text: str, escape_backslash: bool = True) -> str:
2490
+ def escape_str(
2491
+ self,
2492
+ text: str,
2493
+ escape_backslash: bool = True,
2494
+ delimiter: t.Optional[str] = None,
2495
+ escaped_delimiter: t.Optional[str] = None,
2496
+ ) -> str:
2479
2497
  if self.dialect.ESCAPED_SEQUENCES:
2480
2498
  to_escaped = self.dialect.ESCAPED_SEQUENCES
2481
2499
  text = "".join(
2482
2500
  to_escaped.get(ch, ch) if escape_backslash or ch != "\\" else ch for ch in text
2483
2501
  )
2484
2502
 
2485
- return self._replace_line_breaks(text).replace(
2486
- self.dialect.QUOTE_END, self._escaped_quote_end
2487
- )
2503
+ delimiter = delimiter or self.dialect.QUOTE_END
2504
+ escaped_delimiter = escaped_delimiter or self._escaped_quote_end
2505
+
2506
+ return self._replace_line_breaks(text).replace(delimiter, escaped_delimiter)
2488
2507
 
2489
2508
  def loaddata_sql(self, expression: exp.LoadData) -> str:
2490
2509
  local = " LOCAL" if expression.args.get("local") else ""
@@ -3256,14 +3275,19 @@ class Generator(metaclass=_Generator):
3256
3275
  return f"(SELECT {self.sql(unnest)})"
3257
3276
 
3258
3277
  def interval_sql(self, expression: exp.Interval) -> str:
3259
- unit = self.sql(expression, "unit")
3278
+ unit_expression = expression.args.get("unit")
3279
+ unit = self.sql(unit_expression) if unit_expression else ""
3260
3280
  if not self.INTERVAL_ALLOWS_PLURAL_FORM:
3261
3281
  unit = self.TIME_PART_SINGULARS.get(unit, unit)
3262
3282
  unit = f" {unit}" if unit else ""
3263
3283
 
3264
3284
  if self.SINGLE_STRING_INTERVAL:
3265
3285
  this = expression.this.name if expression.this else ""
3266
- return f"INTERVAL '{this}{unit}'" if this else f"INTERVAL{unit}"
3286
+ if this:
3287
+ if unit_expression and isinstance(unit_expression, exp.IntervalSpan):
3288
+ return f"INTERVAL '{this}'{unit}"
3289
+ return f"INTERVAL '{this}{unit}'"
3290
+ return f"INTERVAL{unit}"
3267
3291
 
3268
3292
  this = self.sql(expression, "this")
3269
3293
  if this:
@@ -1561,6 +1561,10 @@ class Parser(metaclass=_Parser):
1561
1561
  # Adding an ON TRUE, makes transpilation semantically correct for other dialects
1562
1562
  ADD_JOIN_ON_TRUE = False
1563
1563
 
1564
+ # Whether INTERVAL spans with literal format '\d+ hh:[mm:[ss[.ff]]]'
1565
+ # can omit the span unit `DAY TO MINUTE` or `DAY TO SECOND`
1566
+ SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT = False
1567
+
1564
1568
  __slots__ = (
1565
1569
  "error_level",
1566
1570
  "error_message_context",
@@ -4613,7 +4617,7 @@ class Parser(metaclass=_Parser):
4613
4617
 
4614
4618
  def _parse_grouping_set(self) -> t.Optional[exp.Expression]:
4615
4619
  if self._match(TokenType.L_PAREN):
4616
- grouping_set = self._parse_csv(self._parse_column)
4620
+ grouping_set = self._parse_csv(self._parse_bitwise)
4617
4621
  self._match_r_paren()
4618
4622
  return self.expression(exp.Tuple, expressions=grouping_set)
4619
4623
 
@@ -5105,9 +5109,37 @@ class Parser(metaclass=_Parser):
5105
5109
  self._retreat(index)
5106
5110
  return None
5107
5111
 
5108
- unit = self._parse_function() or (
5109
- not self._match(TokenType.ALIAS, advance=False)
5110
- and self._parse_var(any_token=True, upper=True)
5112
+ # handle day-time format interval span with omitted units:
5113
+ # INTERVAL '<number days> hh[:][mm[:ss[.ff]]]' <maybe `unit TO unit`>
5114
+ interval_span_units_omitted = None
5115
+ if (
5116
+ this
5117
+ and this.is_string
5118
+ and self.SUPPORTS_OMITTED_INTERVAL_SPAN_UNIT
5119
+ and exp.INTERVAL_DAY_TIME_RE.match(this.name)
5120
+ ):
5121
+ index = self._index
5122
+
5123
+ # Var "TO" Var
5124
+ first_unit = self._parse_var(any_token=True, upper=True)
5125
+ second_unit = None
5126
+ if first_unit and self._match_text_seq("TO"):
5127
+ second_unit = self._parse_var(any_token=True, upper=True)
5128
+
5129
+ interval_span_units_omitted = not (first_unit and second_unit)
5130
+
5131
+ self._retreat(index)
5132
+
5133
+ unit = (
5134
+ None
5135
+ if interval_span_units_omitted
5136
+ else (
5137
+ self._parse_function()
5138
+ or (
5139
+ not self._match(TokenType.ALIAS, advance=False)
5140
+ and self._parse_var(any_token=True, upper=True)
5141
+ )
5142
+ )
5111
5143
  )
5112
5144
 
5113
5145
  # Most dialects support, e.g., the form INTERVAL '5' day, thus we try to parse
@@ -5124,6 +5156,7 @@ class Parser(metaclass=_Parser):
5124
5156
  if len(parts) == 1:
5125
5157
  this = exp.Literal.string(parts[0][0])
5126
5158
  unit = self.expression(exp.Var, this=parts[0][1].upper())
5159
+
5127
5160
  if self.INTERVAL_SPANS and self._match_text_seq("TO"):
5128
5161
  unit = self.expression(
5129
5162
  exp.IntervalSpan, this=unit, expression=self._parse_var(any_token=True, upper=True)
@@ -5490,6 +5523,11 @@ class Parser(metaclass=_Parser):
5490
5523
 
5491
5524
  type_token = unsigned_type_token or type_token
5492
5525
 
5526
+ # NULLABLE without parentheses can be a column (Presto/Trino)
5527
+ if type_token == TokenType.NULLABLE and not expressions:
5528
+ self._retreat(index)
5529
+ return None
5530
+
5493
5531
  this = exp.DataType(
5494
5532
  this=exp.DataType.Type[type_token.value],
5495
5533
  expressions=expressions,
@@ -5691,7 +5729,7 @@ class Parser(metaclass=_Parser):
5691
5729
  if not field:
5692
5730
  self.raise_error("Expected type")
5693
5731
  elif op and self._curr:
5694
- field = self._parse_column_reference() or self._parse_bracket()
5732
+ field = self._parse_column_reference() or self._parse_bitwise()
5695
5733
  if isinstance(field, exp.Column) and self._match(TokenType.DOT, advance=False):
5696
5734
  field = self._parse_column_ops(field)
5697
5735
  else:
@@ -5760,6 +5798,10 @@ class Parser(metaclass=_Parser):
5760
5798
  this.add_comments(comments)
5761
5799
 
5762
5800
  self._match_r_paren(expression=this)
5801
+
5802
+ if isinstance(this, exp.Paren) and isinstance(this.this, exp.AggFunc):
5803
+ return self._parse_window(this)
5804
+
5763
5805
  return this
5764
5806
 
5765
5807
  def _parse_primary(self) -> t.Optional[exp.Expression]:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlglot
3
- Version: 27.16.2
3
+ Version: 27.17.0
4
4
  Summary: An easily customizable SQL parser and transpiler
5
5
  Author-email: Toby Mao <toby.mao@gmail.com>
6
6
  License-Expression: MIT
@@ -191,6 +191,10 @@ class TestBigQuery(Validator):
191
191
  self.validate_identity(
192
192
  "CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id",
193
193
  )
194
+ self.validate_identity(
195
+ '''SELECT b"\\x0a$'x'00"''',
196
+ """SELECT b'\\x0a$\\'x\\'00'""",
197
+ )
194
198
  self.validate_identity(
195
199
  "--c\nARRAY_AGG(v IGNORE NULLS)",
196
200
  "ARRAY_AGG(v IGNORE NULLS) /* c */",
@@ -647,6 +647,7 @@ class TestClickhouse(Validator):
647
647
 
648
648
  self.validate_identity("cosineDistance(x, y)")
649
649
  self.validate_identity("L2Distance(x, y)")
650
+ self.validate_identity("tuple(1 = 1, 'foo' = 'foo')")
650
651
 
651
652
  def test_clickhouse_values(self):
652
653
  ast = self.parse_one("SELECT * FROM VALUES (1, 2, 3)")
@@ -161,6 +161,10 @@ class TestPostgres(Validator):
161
161
  "pg_catalog.PG_TABLE_IS_VISIBLE(c.oid) "
162
162
  "ORDER BY 2, 3"
163
163
  )
164
+ self.validate_identity(
165
+ "x::JSON -> 'duration' ->> -1",
166
+ "JSON_EXTRACT_PATH_TEXT(CAST(x AS JSON) -> 'duration', -1)",
167
+ ).assert_is(exp.JSONExtractScalar).this.assert_is(exp.JSONExtract)
164
168
  self.validate_identity(
165
169
  "SELECT SUBSTRING('Thomas' FOR 3 FROM 2)",
166
170
  "SELECT SUBSTRING('Thomas' FROM 2 FOR 3)",
@@ -964,9 +968,13 @@ FROM json_data, field_ids""",
964
968
  """SELECT CAST('["a", {"b":1}]' AS JSONB) #- '{1,b}'""",
965
969
  )
966
970
 
971
+ self.validate_identity("SELECT JSON_AGG(DISTINCT name) FROM users")
967
972
  self.validate_identity(
968
973
  "SELECT JSON_AGG(c1 ORDER BY c1) FROM (VALUES ('c'), ('b'), ('a')) AS t(c1)"
969
974
  )
975
+ self.validate_identity(
976
+ "SELECT JSON_AGG(DISTINCT c1 ORDER BY c1) FROM (VALUES ('c'), ('b'), ('a')) AS t(c1)"
977
+ )
970
978
 
971
979
  def test_ddl(self):
972
980
  # Checks that user-defined types are parsed into DataType instead of Identifier
@@ -1633,3 +1641,25 @@ CROSS JOIN JSON_ARRAY_ELEMENTS(CAST(JSON_EXTRACT_PATH(tbox, 'boxes') AS JSON)) A
1633
1641
  self.validate_identity(
1634
1642
  f"BEGIN {keyword} {level}, {level}", f"BEGIN {level}, {level}"
1635
1643
  ).assert_is(exp.Transaction)
1644
+
1645
+ def test_interval_span(self):
1646
+ for time_str in ["1 01:", "1 01:00", "1.5 01:", "-0.25 01:"]:
1647
+ with self.subTest(f"Postgres INTERVAL span, omitted DAY TO MINUTE unit: {time_str}"):
1648
+ self.validate_identity(f"INTERVAL '{time_str}'")
1649
+
1650
+ for time_str in [
1651
+ "1 01:01:",
1652
+ "1 01:01:",
1653
+ "1 01:01:01",
1654
+ "1 01:01:01.01",
1655
+ "1.5 01:01:",
1656
+ "-0.25 01:01:",
1657
+ ]:
1658
+ with self.subTest(f"Postgres INTERVAL span, omitted DAY TO SECOND unit: {time_str}"):
1659
+ self.validate_identity(f"INTERVAL '{time_str}'")
1660
+
1661
+ # Ensure AND is not consumed as a unit following an omitted-span interval
1662
+ with self.subTest("Postgres INTERVAL span, omitted unit with following AND"):
1663
+ day_time_str = "a > INTERVAL '1 00:00' AND TRUE"
1664
+ self.validate_identity(day_time_str, "a > INTERVAL '1 00:00' AND TRUE")
1665
+ self.assertIsInstance(self.parse_one(day_time_str), exp.And)
@@ -1117,6 +1117,13 @@ class TestPresto(Validator):
1117
1117
  self.validate_identity(
1118
1118
  "SELECT id, FIRST_VALUE(is_deleted) OVER (PARTITION BY id) AS first_is_deleted, NTH_VALUE(is_deleted, 2) OVER (PARTITION BY id) AS nth_is_deleted, LAST_VALUE(is_deleted) OVER (PARTITION BY id) AS last_is_deleted FROM my_table"
1119
1119
  )
1120
+ self.validate_all(
1121
+ "SELECT NULLABLE FROM system.jdbc.types",
1122
+ read={
1123
+ "presto": "SELECT NULLABLE FROM system.jdbc.types",
1124
+ "trino": "SELECT NULLABLE FROM system.jdbc.types",
1125
+ },
1126
+ )
1120
1127
 
1121
1128
  def test_encode_decode(self):
1122
1129
  self.validate_identity("FROM_UTF8(x, y)")
@@ -29,6 +29,14 @@ class TestSnowflake(Validator):
29
29
  expr.selects[0].assert_is(exp.AggFunc)
30
30
  self.assertEqual(expr.sql(dialect="snowflake"), "SELECT APPROX_TOP_K(C4, 3, 5) FROM t")
31
31
 
32
+ self.validate_identity("SELECT BIT_LENGTH('abc')")
33
+ self.validate_identity("SELECT BIT_LENGTH(x'A1B2')")
34
+ self.validate_identity("SELECT HEX_DECODE_STRING('48656C6C6F')")
35
+ self.validate_identity("SELECT HEX_ENCODE('Hello World')")
36
+ self.validate_identity("SELECT HEX_ENCODE('Hello World', 1)")
37
+ self.validate_identity("SELECT HEX_ENCODE('Hello World', 0)")
38
+ self.validate_identity("SELECT CHR(8364)")
39
+ self.validate_identity("SELECT COMPRESS('Hello World', 'ZLIB')")
32
40
  self.validate_identity("SELECT {*} FROM my_table")
33
41
  self.validate_identity("SELECT {my_table.*} FROM my_table")
34
42
  self.validate_identity("SELECT {* ILIKE 'col1%'} FROM my_table")
@@ -1184,11 +1192,12 @@ class TestSnowflake(Validator):
1184
1192
  self.validate_all(
1185
1193
  "DAYOFWEEKISO(foo)",
1186
1194
  read={
1195
+ "snowflake": "DAYOFWEEKISO(foo)",
1187
1196
  "presto": "DAY_OF_WEEK(foo)",
1188
1197
  "trino": "DAY_OF_WEEK(foo)",
1189
1198
  },
1190
1199
  write={
1191
- "snowflake": "DAYOFWEEKISO(foo)",
1200
+ "duckdb": "ISODOW(foo)",
1192
1201
  },
1193
1202
  )
1194
1203
 
@@ -1198,9 +1207,6 @@ class TestSnowflake(Validator):
1198
1207
  "presto": "DOW(foo)",
1199
1208
  "trino": "DOW(foo)",
1200
1209
  },
1201
- write={
1202
- "snowflake": "DAYOFWEEKISO(foo)",
1203
- },
1204
1210
  )
1205
1211
 
1206
1212
  self.validate_all(
@@ -1321,6 +1327,37 @@ class TestSnowflake(Validator):
1321
1327
  },
1322
1328
  )
1323
1329
 
1330
+ self.validate_identity("SELECT LIKE(col, 'pattern')", "SELECT col LIKE 'pattern'")
1331
+ self.validate_identity("SELECT ILIKE(col, 'pattern')", "SELECT col ILIKE 'pattern'")
1332
+ self.validate_identity(
1333
+ "SELECT LIKE(col, 'pattern', '\\\\')", "SELECT col LIKE 'pattern' ESCAPE '\\\\'"
1334
+ )
1335
+ self.validate_identity(
1336
+ "SELECT ILIKE(col, 'pattern', '\\\\')", "SELECT col ILIKE 'pattern' ESCAPE '\\\\'"
1337
+ )
1338
+ self.validate_identity(
1339
+ "SELECT LIKE(col, 'pattern', '!')", "SELECT col LIKE 'pattern' ESCAPE '!'"
1340
+ )
1341
+ self.validate_identity(
1342
+ "SELECT ILIKE(col, 'pattern', '!')", "SELECT col ILIKE 'pattern' ESCAPE '!'"
1343
+ )
1344
+
1345
+ self.validate_identity("SELECT BASE64_DECODE_BINARY('SGVsbG8=')")
1346
+ self.validate_identity(
1347
+ "SELECT BASE64_DECODE_BINARY('SGVsbG8=', 'ABCDEFGHwxyz0123456789+/')"
1348
+ )
1349
+
1350
+ self.validate_identity("SELECT BASE64_DECODE_STRING('SGVsbG8gV29ybGQ=')")
1351
+ self.validate_identity(
1352
+ "SELECT BASE64_DECODE_STRING('SGVsbG8gV29ybGQ=', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/')"
1353
+ )
1354
+
1355
+ self.validate_identity("SELECT BASE64_ENCODE('Hello World')")
1356
+ self.validate_identity("SELECT BASE64_ENCODE('Hello World', 76)")
1357
+ self.validate_identity(
1358
+ "SELECT BASE64_ENCODE('Hello World', 76, 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/')"
1359
+ )
1360
+
1324
1361
  def test_null_treatment(self):
1325
1362
  self.validate_all(
1326
1363
  r"SELECT FIRST_VALUE(TABLE1.COLUMN1) OVER (PARTITION BY RANDOM_COLUMN1, RANDOM_COLUMN2 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS MY_ALIAS FROM TABLE1",
@@ -35,6 +35,14 @@ class TestTrino(Validator):
35
35
  "trino": "SELECT CAST('2012-10-31 01:00:00 +02:00' AS TIMESTAMP WITH TIME ZONE)",
36
36
  },
37
37
  )
38
+ self.validate_all(
39
+ "SELECT FORMAT('%s', 123)",
40
+ write={
41
+ "duckdb": "SELECT FORMAT('{}', 123)",
42
+ "snowflake": "SELECT TO_CHAR(123)",
43
+ "trino": "SELECT FORMAT('%s', 123)",
44
+ },
45
+ )
38
46
 
39
47
  def test_listagg(self):
40
48
  self.validate_identity(
@@ -20,6 +20,7 @@ class TestTSQL(Validator):
20
20
  self.validate_identity("SELECT * FROM a..b")
21
21
 
22
22
  self.validate_identity("SELECT SYSDATETIMEOFFSET()")
23
+ self.validate_identity("SELECT COMPRESS('Hello World')")
23
24
  self.validate_identity("GO").assert_is(exp.Command)
24
25
  self.validate_identity("SELECT go").selects[0].assert_is(exp.Column)
25
26
  self.validate_identity("CREATE view a.b.c", "CREATE VIEW b.c")