sqlframe 1.13.0__tar.gz → 1.14.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {sqlframe-1.13.0 → sqlframe-1.14.0}/PKG-INFO +1 -1
  2. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/bigquery.md +1 -0
  3. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/duckdb.md +2 -0
  4. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/postgres.md +2 -0
  5. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/snowflake.md +2 -0
  6. {sqlframe-1.13.0 → sqlframe-1.14.0}/setup.py +1 -1
  7. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/_version.py +2 -2
  8. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/function_alternatives.py +104 -0
  9. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/functions.py +27 -10
  10. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/util.py +4 -0
  11. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/functions.py +3 -0
  12. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/functions.py +4 -0
  13. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/functions.py +3 -0
  14. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/session.py +8 -0
  15. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/functions.py +2 -0
  16. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/functions.py +1 -0
  17. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe.egg-info/PKG-INFO +1 -1
  18. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe.egg-info/requires.txt +1 -1
  19. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_catalog.py +2 -0
  20. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_int_functions.py +54 -8
  21. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_functions.py +3 -3
  22. {sqlframe-1.13.0 → sqlframe-1.14.0}/.github/CODEOWNERS +0 -0
  23. {sqlframe-1.13.0 → sqlframe-1.14.0}/.github/workflows/main.workflow.yaml +0 -0
  24. {sqlframe-1.13.0 → sqlframe-1.14.0}/.github/workflows/publish.workflow.yaml +0 -0
  25. {sqlframe-1.13.0 → sqlframe-1.14.0}/.gitignore +0 -0
  26. {sqlframe-1.13.0 → sqlframe-1.14.0}/.pre-commit-config.yaml +0 -0
  27. {sqlframe-1.13.0 → sqlframe-1.14.0}/.readthedocs.yaml +0 -0
  28. {sqlframe-1.13.0 → sqlframe-1.14.0}/LICENSE +0 -0
  29. {sqlframe-1.13.0 → sqlframe-1.14.0}/Makefile +0 -0
  30. {sqlframe-1.13.0 → sqlframe-1.14.0}/README.md +0 -0
  31. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/add_chatgpt_support.md +0 -0
  32. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  33. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  34. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  35. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  36. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  37. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  38. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  39. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  40. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/but_wait_theres_more.gif +0 -0
  41. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/cake.gif +0 -0
  42. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  43. {sqlframe-1.13.0 → sqlframe-1.14.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  44. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/configuration.md +0 -0
  45. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/bigquery.md +0 -0
  46. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/duckdb.md +0 -0
  47. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/images/SF.png +0 -0
  48. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/images/favicon.png +0 -0
  49. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/images/favicon_old.png +0 -0
  50. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  51. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/images/sqlframe_logo.png +0 -0
  52. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/docs/postgres.md +0 -0
  53. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/images/SF.png +0 -0
  54. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/images/favicon.png +0 -0
  55. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/images/favicon_old.png +0 -0
  56. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/images/sqlframe_diagram.png +0 -0
  57. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/images/sqlframe_logo.png +0 -0
  58. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/index.md +0 -0
  59. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/requirements.txt +0 -0
  60. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/spark.md +0 -0
  61. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/standalone.md +0 -0
  62. {sqlframe-1.13.0 → sqlframe-1.14.0}/docs/stylesheets/extra.css +0 -0
  63. {sqlframe-1.13.0 → sqlframe-1.14.0}/mkdocs.yml +0 -0
  64. {sqlframe-1.13.0 → sqlframe-1.14.0}/pytest.ini +0 -0
  65. {sqlframe-1.13.0 → sqlframe-1.14.0}/renovate.json +0 -0
  66. {sqlframe-1.13.0 → sqlframe-1.14.0}/setup.cfg +0 -0
  67. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/LICENSE +0 -0
  68. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/__init__.py +0 -0
  69. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/__init__.py +0 -0
  70. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/_typing.py +0 -0
  71. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/catalog.py +0 -0
  72. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/column.py +0 -0
  73. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/dataframe.py +0 -0
  74. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/decorators.py +0 -0
  75. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/exceptions.py +0 -0
  76. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/group.py +0 -0
  77. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/mixins/__init__.py +0 -0
  78. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  79. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  80. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  81. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/normalize.py +0 -0
  82. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/operations.py +0 -0
  83. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/readerwriter.py +0 -0
  84. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/session.py +0 -0
  85. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/transforms.py +0 -0
  86. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/types.py +0 -0
  87. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/base/window.py +0 -0
  88. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/__init__.py +0 -0
  89. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/catalog.py +0 -0
  90. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/column.py +0 -0
  91. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/dataframe.py +0 -0
  92. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/functions.pyi +0 -0
  93. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/group.py +0 -0
  94. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/readwriter.py +0 -0
  95. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/session.py +0 -0
  96. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/types.py +0 -0
  97. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/bigquery/window.py +0 -0
  98. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/__init__.py +0 -0
  99. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/catalog.py +0 -0
  100. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/column.py +0 -0
  101. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/dataframe.py +0 -0
  102. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/functions.pyi +0 -0
  103. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/group.py +0 -0
  104. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/readwriter.py +0 -0
  105. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/session.py +0 -0
  106. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/types.py +0 -0
  107. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/duckdb/window.py +0 -0
  108. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/__init__.py +0 -0
  109. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/catalog.py +0 -0
  110. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/column.py +0 -0
  111. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/dataframe.py +0 -0
  112. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/functions.pyi +0 -0
  113. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/group.py +0 -0
  114. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/readwriter.py +0 -0
  115. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/types.py +0 -0
  116. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/postgres/window.py +0 -0
  117. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/__init__.py +0 -0
  118. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/catalog.py +0 -0
  119. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/column.py +0 -0
  120. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/dataframe.py +0 -0
  121. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/functions.py +0 -0
  122. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/group.py +0 -0
  123. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/readwriter.py +0 -0
  124. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/session.py +0 -0
  125. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/types.py +0 -0
  126. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/redshift/window.py +0 -0
  127. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/__init__.py +0 -0
  128. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/catalog.py +0 -0
  129. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/column.py +0 -0
  130. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/dataframe.py +0 -0
  131. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/functions.pyi +0 -0
  132. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/group.py +0 -0
  133. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/readwriter.py +0 -0
  134. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/session.py +0 -0
  135. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/types.py +0 -0
  136. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/snowflake/window.py +0 -0
  137. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/__init__.py +0 -0
  138. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/catalog.py +0 -0
  139. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/column.py +0 -0
  140. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/dataframe.py +0 -0
  141. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/functions.pyi +0 -0
  142. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/group.py +0 -0
  143. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/readwriter.py +0 -0
  144. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/session.py +0 -0
  145. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/types.py +0 -0
  146. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/spark/window.py +0 -0
  147. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/__init__.py +0 -0
  148. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/catalog.py +0 -0
  149. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/column.py +0 -0
  150. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/dataframe.py +0 -0
  151. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/functions.py +0 -0
  152. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/group.py +0 -0
  153. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/readwriter.py +0 -0
  154. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/session.py +0 -0
  155. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/types.py +0 -0
  156. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/standalone/window.py +0 -0
  157. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/testing/__init__.py +0 -0
  158. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe/testing/utils.py +0 -0
  159. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  160. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  161. {sqlframe-1.13.0 → sqlframe-1.14.0}/sqlframe.egg-info/top_level.txt +0 -0
  162. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/__init__.py +0 -0
  163. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/common_fixtures.py +0 -0
  164. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/conftest.py +0 -0
  165. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/fixtures/employee.csv +0 -0
  166. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/fixtures/employee.json +0 -0
  167. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/fixtures/employee.parquet +0 -0
  168. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/fixtures/employee_extra_line.csv +0 -0
  169. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/__init__.py +0 -0
  170. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/__init__.py +0 -0
  171. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  172. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  173. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  174. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  175. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/duck/__init__.py +0 -0
  176. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  177. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  178. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  179. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  180. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/__init__.py +0 -0
  181. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  182. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  183. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/__init__.py +0 -0
  184. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  185. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  186. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  187. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  188. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  189. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  190. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/spark/__init__.py +0 -0
  191. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  192. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  193. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_column.py +0 -0
  194. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  195. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_reader.py +0 -0
  196. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_session.py +0 -0
  197. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_writer.py +0 -0
  198. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/engines/test_int_testing.py +0 -0
  199. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/fixtures.py +0 -0
  200. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/test_int_dataframe.py +0 -0
  201. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  202. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/test_int_grouped_data.py +0 -0
  203. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/integration/test_int_session.py +0 -0
  204. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/types.py +0 -0
  205. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/__init__.py +0 -0
  206. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/__init__.py +0 -0
  207. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/fixtures.py +0 -0
  208. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_column.py +0 -0
  209. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_dataframe.py +0 -0
  210. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  211. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_session.py +0 -0
  212. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  213. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_types.py +0 -0
  214. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/standalone/test_window.py +0 -0
  215. {sqlframe-1.13.0 → sqlframe-1.14.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.13.0
3
+ Version: 1.14.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -446,6 +446,7 @@ See something that you would like to see supported? [Open an issue](https://gith
446
446
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
447
447
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
448
448
  * Shorthand expressions not supported. Ex: Use `month` instead of `mon`
449
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
449
450
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
450
451
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
451
452
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
@@ -279,6 +279,7 @@ See something that you would like to see supported? [Open an issue](https://gith
279
279
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
280
280
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
281
281
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
282
+ * [day](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.day.html)
282
283
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
283
284
  * [dayofweek](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofweek.html)
284
285
  * [dayofyear](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofyear.html)
@@ -411,6 +412,7 @@ See something that you would like to see supported? [Open an issue](https://gith
411
412
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
412
413
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
413
414
  * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
415
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
414
416
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
415
417
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
416
418
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
@@ -403,6 +403,8 @@ See something that you would like to see supported? [Open an issue](https://gith
403
403
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
404
404
  * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
405
405
  * Negative index returns null and cannot lookup elements in maps
406
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
407
+ * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
406
408
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
407
409
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
408
410
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
@@ -444,6 +444,8 @@ See something that you would like to see supported? [Open an issue](https://gith
444
444
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
445
445
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
446
446
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
447
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
448
+ * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
447
449
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
448
450
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
449
451
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.11.0",
23
- "sqlglot>=24.0.0,<25.4",
23
+ "sqlglot>=24.0.0,<25.5",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.13.0'
16
- __version_tuple__ = version_tuple = (1, 13, 0)
15
+ __version__ = version = '1.14.0'
16
+ __version_tuple__ = version_tuple = (1, 14, 0)
@@ -561,6 +561,14 @@ def to_date_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) ->
561
561
  return to_date(to_timestamp(col, format))
562
562
 
563
563
 
564
+ def to_date_time_format(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
565
+ from sqlframe.base.functions import to_date
566
+
567
+ lit = get_func_from_session("lit")
568
+ format = lit(format or spark_default_time_format())
569
+ return to_date(col, format=format)
570
+
571
+
564
572
  def last_day_with_cast(col: ColumnOrName) -> Column:
565
573
  from sqlframe.base.functions import last_day
566
574
 
@@ -1519,3 +1527,99 @@ def to_unix_timestamp_include_default_format(
1519
1527
  else:
1520
1528
  format = format_time_from_spark(format)
1521
1529
  return to_unix_timestamp(timestamp, format)
1530
+
1531
+
1532
+ def array_append_list_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1533
+ lit = get_func_from_session("lit")
1534
+ value = value if isinstance(value, Column) else lit(value)
1535
+ return Column.invoke_anonymous_function(col, "LIST_APPEND", value)
1536
+
1537
+
1538
+ def array_append_using_array_cat(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1539
+ lit = get_func_from_session("lit")
1540
+ array = get_func_from_session("array")
1541
+ value = value if isinstance(value, Column) else lit(value)
1542
+ return Column.invoke_anonymous_function(col, "ARRAY_CONCAT", array(value))
1543
+
1544
+
1545
+ def day_with_try_to_timestamp(col: ColumnOrName) -> Column:
1546
+ from sqlframe.base.functions import day
1547
+
1548
+ try_to_timestamp = get_func_from_session("try_to_timestamp")
1549
+ to_date = get_func_from_session("to_date")
1550
+ when = get_func_from_session("when")
1551
+ _is_string = get_func_from_session("_is_string")
1552
+ coalesce = get_func_from_session("coalesce")
1553
+ return day(
1554
+ when(
1555
+ _is_string(col),
1556
+ coalesce(try_to_timestamp(col), to_date(col)),
1557
+ ).otherwise(col)
1558
+ )
1559
+
1560
+
1561
+ def try_to_timestamp_strptime(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1562
+ lit = get_func_from_session("lit")
1563
+
1564
+ format = lit(format or spark_default_time_format())
1565
+ return Column.invoke_anonymous_function(col, "TRY_STRPTIME", format_time_from_spark(format)) # type: ignore
1566
+
1567
+
1568
+ def try_to_timestamp_safe(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1569
+ lit = get_func_from_session("lit")
1570
+
1571
+ format = lit(format or spark_default_time_format())
1572
+ return Column.invoke_anonymous_function(
1573
+ format_time_from_spark(format), # type: ignore
1574
+ "SAFE.PARSE_TIMESTAMP",
1575
+ col, # type: ignore
1576
+ )
1577
+
1578
+
1579
+ def try_to_timestamp_pgtemp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1580
+ lit = get_func_from_session("lit")
1581
+
1582
+ format = lit(format or spark_default_time_format())
1583
+ return Column.invoke_anonymous_function(
1584
+ col,
1585
+ "pg_temp.TRY_TO_TIMESTAMP",
1586
+ format_time_from_spark(format), # type: ignore
1587
+ )
1588
+
1589
+
1590
+ def typeof_pg_typeof(col: ColumnOrName) -> Column:
1591
+ return Column.invoke_anonymous_function(col, "pg_typeof").cast("regtype").cast("text")
1592
+
1593
+
1594
+ def typeof_from_variant(col: ColumnOrName) -> Column:
1595
+ col = Column.invoke_anonymous_function(col, "TO_VARIANT")
1596
+ return Column.invoke_anonymous_function(col, "TYPEOF")
1597
+
1598
+
1599
+ def _is_string_using_typeof_varchar(col: ColumnOrName) -> Column:
1600
+ typeof = get_func_from_session("typeof")
1601
+ lit = get_func_from_session("lit")
1602
+ return lit(typeof(col) == lit("VARCHAR"))
1603
+
1604
+
1605
+ def _is_string_using_typeof_char_varying(col: ColumnOrName) -> Column:
1606
+ typeof = get_func_from_session("typeof")
1607
+ lit = get_func_from_session("lit")
1608
+ return lit(
1609
+ (typeof(col) == lit("text"))
1610
+ | (typeof(col) == lit("character varying"))
1611
+ | (typeof(col) == lit("unknown"))
1612
+ | (typeof(col) == lit("text"))
1613
+ )
1614
+
1615
+
1616
+ def _is_string_using_typeof_string(col: ColumnOrName) -> Column:
1617
+ typeof = get_func_from_session("typeof")
1618
+ lit = get_func_from_session("lit")
1619
+ return lit(typeof(col) == lit("STRING"))
1620
+
1621
+
1622
+ def _is_string_using_typeof_string_lcase(col: ColumnOrName) -> Column:
1623
+ typeof = get_func_from_session("typeof")
1624
+ lit = get_func_from_session("lit")
1625
+ return lit(typeof(col) == lit("string"))
@@ -13,7 +13,12 @@ from sqlglot.helper import flatten as _flatten
13
13
 
14
14
  from sqlframe.base.column import Column
15
15
  from sqlframe.base.decorators import func_metadata as meta
16
- from sqlframe.base.util import format_time_from_spark, spark_default_time_format
16
+ from sqlframe.base.util import (
17
+ format_time_from_spark,
18
+ get_func_from_session,
19
+ spark_default_date_format,
20
+ spark_default_time_format,
21
+ )
17
22
 
18
23
  if t.TYPE_CHECKING:
19
24
  from pyspark.sql.session import SparkContext
@@ -877,7 +882,7 @@ def months_between(
877
882
 
878
883
  @meta()
879
884
  def to_date(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
880
- format = lit(format or spark_default_time_format())
885
+ format = lit(format or spark_default_date_format())
881
886
  if format is not None:
882
887
  return Column.invoke_expression_over_column(
883
888
  col, expression.TsOrDsToDate, format=format_time_from_spark(format)
@@ -1295,7 +1300,7 @@ def array_agg(col: ColumnOrName) -> Column:
1295
1300
  return Column.invoke_expression_over_column(col, expression.ArrayAgg)
1296
1301
 
1297
1302
 
1298
- @meta(unsupported_engines="*")
1303
+ @meta()
1299
1304
  def array_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1300
1305
  value = value if isinstance(value, Column) else lit(value)
1301
1306
  return Column.invoke_anonymous_function(col, "ARRAY_APPEND", value)
@@ -1743,7 +1748,7 @@ def map_zip_with(
1743
1748
  return Column.invoke_anonymous_function(col1, "MAP_ZIP_WITH", col2, Column(f_expression))
1744
1749
 
1745
1750
 
1746
- @meta(unsupported_engines=["postgres", "snowflake"])
1751
+ @meta()
1747
1752
  def typeof(col: ColumnOrName) -> Column:
1748
1753
  return Column.invoke_anonymous_function(col, "TYPEOF")
1749
1754
 
@@ -2162,7 +2167,7 @@ def datepart(field: ColumnOrName, source: ColumnOrName) -> Column:
2162
2167
  return Column.invoke_anonymous_function(field, "datepart", source)
2163
2168
 
2164
2169
 
2165
- @meta(unsupported_engines="*")
2170
+ @meta(unsupported_engines=["bigquery", "postgres", "snowflake"])
2166
2171
  def day(col: ColumnOrName) -> Column:
2167
2172
  return Column.invoke_expression_over_column(col, expression.Day)
2168
2173
 
@@ -5277,7 +5282,7 @@ def try_element_at(col: ColumnOrName, extraction: ColumnOrName) -> Column:
5277
5282
  )
5278
5283
 
5279
5284
 
5280
- @meta(unsupported_engines="*")
5285
+ @meta()
5281
5286
  def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
5282
5287
  """
5283
5288
  Parses the `col` with the `format` to a timestamp. The function always
@@ -5302,10 +5307,8 @@ def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None)
5302
5307
  >>> df.select(try_to_timestamp(df.t, lit('yyyy-MM-dd HH:mm:ss')).alias('dt')).collect()
5303
5308
  [Row(dt=datetime.datetime(1997, 2, 28, 10, 30))]
5304
5309
  """
5305
- if format is not None:
5306
- return Column.invoke_anonymous_function(col, "try_to_timestamp", format)
5307
- else:
5308
- return Column.invoke_anonymous_function(col, "try_to_timestamp")
5310
+ format = lit(format or spark_default_time_format())
5311
+ return Column.invoke_anonymous_function(col, "try_to_timestamp", format_time_from_spark(format)) # type: ignore
5309
5312
 
5310
5313
 
5311
5314
  @meta()
@@ -5797,6 +5800,20 @@ def years(col: ColumnOrName) -> Column:
5797
5800
  return Column.invoke_anonymous_function(col, "years")
5798
5801
 
5799
5802
 
5803
+ # SQLFrame specific
5804
+ @meta()
5805
+ def _is_string(col: ColumnOrName) -> Column:
5806
+ col = Column.invoke_anonymous_function(col, "TO_VARIANT")
5807
+ return Column.invoke_anonymous_function(col, "IS_VARCHAR")
5808
+
5809
+
5810
+ @meta()
5811
+ def _is_date(col: ColumnOrName) -> Column:
5812
+ typeof = get_func_from_session("typeof")
5813
+ upper = get_func_from_session("upper")
5814
+ return lit(upper(typeof(col)) == lit("DATE"))
5815
+
5816
+
5800
5817
  @meta()
5801
5818
  def _lambda_quoted(value: str) -> t.Optional[bool]:
5802
5819
  return False if value == "_" else None
@@ -365,3 +365,7 @@ def format_time_from_spark(value: ColumnOrLiteral) -> Column:
365
365
 
366
366
  def spark_default_time_format() -> str:
367
367
  return Dialect["spark"].TIME_FORMAT.strip("'")
368
+
369
+
370
+ def spark_default_date_format() -> str:
371
+ return Dialect["spark"].DATE_FORMAT.strip("'")
@@ -72,6 +72,9 @@ from sqlframe.base.function_alternatives import ( # noqa
72
72
  array_union_using_array_concat as array_union,
73
73
  sequence_from_generate_array as sequence,
74
74
  position_as_strpos as position,
75
+ try_to_timestamp_safe as try_to_timestamp,
76
+ _is_string_using_typeof_string as _is_string,
77
+ array_append_using_array_cat as array_append,
75
78
  )
76
79
 
77
80
 
@@ -46,4 +46,8 @@ from sqlframe.base.function_alternatives import ( # noqa
46
46
  array_max_from_sort as array_max,
47
47
  sequence_from_generate_series as sequence,
48
48
  try_element_at_zero_based as try_element_at,
49
+ day_with_try_to_timestamp as day,
50
+ try_to_timestamp_strptime as try_to_timestamp,
51
+ _is_string_using_typeof_varchar as _is_string,
52
+ array_append_list_append as array_append,
49
53
  )
@@ -64,4 +64,7 @@ from sqlframe.base.function_alternatives import ( # noqa
64
64
  right_cast_len as right,
65
65
  position_cast_start as position,
66
66
  try_element_at_zero_based as try_element_at,
67
+ try_to_timestamp_pgtemp as try_to_timestamp,
68
+ typeof_pg_typeof as typeof,
69
+ _is_string_using_typeof_char_varying as _is_string,
67
70
  )
@@ -38,6 +38,14 @@ class PostgresSession(
38
38
  if not hasattr(self, "_conn"):
39
39
  super().__init__(conn)
40
40
  self._execute("CREATE EXTENSION IF NOT EXISTS fuzzystrmatch")
41
+ self._execute("""CREATE OR REPLACE FUNCTION pg_temp.try_to_timestamp(input_text TEXT, format TEXT)
42
+ RETURNS TIMESTAMP AS $$
43
+ BEGIN
44
+ RETURN TO_TIMESTAMP(input_text, format);
45
+ EXCEPTION WHEN OTHERS THEN
46
+ RETURN NULL;
47
+ END;
48
+ $$ LANGUAGE plpgsql;""")
41
49
 
42
50
  def _fetch_rows(
43
51
  self, sql: t.Union[str, exp.Expression], *, quote_identifiers: bool = True
@@ -63,4 +63,6 @@ from sqlframe.base.function_alternatives import ( # noqa
63
63
  map_concat_using_map_cat as map_concat,
64
64
  sequence_from_array_generate_range as sequence,
65
65
  to_number_using_to_double as to_number,
66
+ typeof_from_variant as typeof,
67
+ to_date_time_format as to_date,
66
68
  )
@@ -17,4 +17,5 @@ from sqlframe.base.function_alternatives import ( # noqa
17
17
  percentile_without_disc as percentile,
18
18
  add_months_by_multiplication as add_months,
19
19
  arrays_overlap_renamed as arrays_overlap,
20
+ _is_string_using_typeof_string_lcase as _is_string,
20
21
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.13.0
3
+ Version: 1.14.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,5 @@
1
1
  prettytable<3.11.0
2
- sqlglot<25.4,>=24.0.0
2
+ sqlglot<25.5,>=24.0.0
3
3
  typing_extensions<5,>=4.8
4
4
 
5
5
  [bigquery]
@@ -36,7 +36,9 @@ def test_list_databases(postgres_session: PostgresSession):
36
36
  Database(name="db1", catalog="tests", description=None, locationUri=""),
37
37
  Database(name="information_schema", catalog="tests", description=None, locationUri=""),
38
38
  Database(name="pg_catalog", catalog="tests", description=None, locationUri=""),
39
+ Database(name="pg_temp_3", catalog="tests", description=None, locationUri=""),
39
40
  Database(name="pg_toast", catalog="tests", description=None, locationUri=""),
41
+ Database(name="pg_toast_temp_3", catalog="tests", description=None, locationUri=""),
40
42
  Database(name="public", catalog="tests", description=None, locationUri=""),
41
43
  ]
42
44
 
@@ -204,6 +204,24 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
204
204
  pytest.skip("BigQuery doesn't support binary")
205
205
  if expected == "timestamp":
206
206
  expected = "datetime"
207
+ if isinstance(session, PostgresSession):
208
+ if expected.startswith("map"):
209
+ pytest.skip("Postgres doesn't support map types")
210
+ elif expected.startswith("struct"):
211
+ pytest.skip("Postgres doesn't support struct types")
212
+ elif expected == "binary":
213
+ pytest.skip("Postgres doesn't support binary")
214
+ if isinstance(session, SnowflakeSession):
215
+ if expected == "bigint":
216
+ expected = "int"
217
+ elif expected == "string":
218
+ expected = "varchar"
219
+ elif expected.startswith("map") or expected.startswith("struct"):
220
+ expected = "object"
221
+ elif expected.startswith("array"):
222
+ pytest.skip("Snowflake doesn't handle arrays properly in values clause")
223
+ elif expected == "timestamp":
224
+ expected = "timestampntz"
207
225
  result = df.select(typeof("col").alias("test")).first()[0]
208
226
  assert exp.DataType.build(result, dialect=dialect) == exp.DataType.build(
209
227
  expected, dialect=dialect
@@ -2049,9 +2067,10 @@ def test_array_agg(get_session_and_func):
2049
2067
  ]
2050
2068
 
2051
2069
 
2052
- def test_array_append(get_session_and_func):
2070
+ def test_array_append(get_session_and_func, get_func):
2053
2071
  session, array_append = get_session_and_func("array_append")
2054
- df = session.createDataFrame([Row(c1=["b", "a", "c"], c2="c")])
2072
+ lit = get_func("lit", session)
2073
+ df = session.range(1).select(lit(["b", "a", "c"]).alias("c1"), lit("c").alias("c2"))
2055
2074
  assert df.select(array_append(df.c1, df.c2)).collect() == [
2056
2075
  Row(value=["b", "a", "c", "c"]),
2057
2076
  ]
@@ -4849,12 +4868,16 @@ def test_try_to_timestamp(get_session_and_func, get_func):
4849
4868
  session, try_to_timestamp = get_session_and_func("try_to_timestamp")
4850
4869
  lit = get_func("lit", session)
4851
4870
  df = session.createDataFrame([("1997-02-28 10:30:00",)], ["t"])
4852
- assert df.select(try_to_timestamp(df.t).alias("dt")).first()[0] == datetime.datetime(
4853
- 1997, 2, 28, 10, 30
4854
- )
4855
- assert df.select(try_to_timestamp(df.t, lit("yyyy-MM-dd HH:mm:ss")).alias("dt")).first()[
4856
- 0
4857
- ] == datetime.datetime(1997, 2, 28, 10, 30)
4871
+ result = df.select(try_to_timestamp(df.t).alias("dt")).first()[0]
4872
+ if isinstance(session, BigQuerySession):
4873
+ assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
4874
+ else:
4875
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
4876
+ result = df.select(try_to_timestamp(df.t, lit("yyyy-MM-dd HH:mm:ss")).alias("dt")).first()[0]
4877
+ if isinstance(session, BigQuerySession):
4878
+ assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
4879
+ else:
4880
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
4858
4881
 
4859
4882
 
4860
4883
  def test_ucase(get_session_and_func, get_func):
@@ -5010,3 +5033,26 @@ def test_xpath_string(get_session_and_func, get_func):
5010
5033
  lit = get_func("lit", session)
5011
5034
  df = session.createDataFrame([("<a><b>b</b><c>cc</c></a>",)], ["x"])
5012
5035
  assert df.select(xpath_string(df.x, lit("a/c")).alias("r")).first()[0] == "cc"
5036
+
5037
+
5038
+ def test_is_string(get_session_and_func, get_func):
5039
+ session, _is_string = get_session_and_func("_is_string")
5040
+ lit = get_func("lit", session)
5041
+ assert session.range(1).select(_is_string(lit("value")), _is_string(lit(1))).collect() == [
5042
+ Row(v1=True, v2=False)
5043
+ ]
5044
+
5045
+
5046
+ def test_is_date(get_session_and_func, get_func):
5047
+ session, _is_date = get_session_and_func("_is_date")
5048
+ to_date = get_func("to_date", session)
5049
+ lit = get_func("lit", session)
5050
+ assert session.range(1).select(
5051
+ _is_date(to_date(lit("2021-01-01"), "yyyy-MM-dd")), _is_date(lit("2021-01-01"))
5052
+ ).collect() == [Row(v1=True, v2=False)]
5053
+
5054
+
5055
+ # def test_
5056
+
5057
+ # typeof = get_func("typeof", session)
5058
+ # assert session.range(1).select(typeof(to_date(lit("2021-01-01"), 'yyyy-MM-dd'))).collect() == [Row(value=True)]
@@ -4636,9 +4636,9 @@ def test_try_element_at(expression, expected):
4636
4636
  @pytest.mark.parametrize(
4637
4637
  "expression, expected",
4638
4638
  [
4639
- (SF.try_to_timestamp("cola"), "TRY_TO_TIMESTAMP(cola)"),
4640
- (SF.try_to_timestamp(SF.col("cola")), "TRY_TO_TIMESTAMP(cola)"),
4641
- (SF.try_to_timestamp("cola", "colb"), "TRY_TO_TIMESTAMP(cola, colb)"),
4639
+ (SF.try_to_timestamp("cola"), "TRY_TO_TIMESTAMP(cola, 'yyyy-MM-dd HH:mm:ss')"),
4640
+ (SF.try_to_timestamp(SF.col("cola")), "TRY_TO_TIMESTAMP(cola, 'yyyy-MM-dd HH:mm:ss')"),
4641
+ (SF.try_to_timestamp("cola", "blah"), "TRY_TO_TIMESTAMP(cola, 'blah')"),
4642
4642
  ],
4643
4643
  )
4644
4644
  def test_try_to_timestamp(expression, expected):
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes