sqlframe 1.12.0__tar.gz → 1.14.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {sqlframe-1.12.0 → sqlframe-1.14.0}/PKG-INFO +1 -1
  2. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/bigquery.md +1 -4
  3. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/duckdb.md +3 -6
  4. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/postgres.md +2 -3
  5. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/snowflake.md +2 -4
  6. {sqlframe-1.12.0 → sqlframe-1.14.0}/setup.py +2 -2
  7. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/_version.py +2 -2
  8. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/function_alternatives.py +115 -10
  9. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/functions.py +49 -23
  10. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/session.py +4 -1
  11. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/util.py +28 -1
  12. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/functions.py +13 -17
  13. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/functions.pyi +0 -1
  14. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/session.py +0 -1
  15. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/functions.py +4 -1
  16. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/session.py +0 -2
  17. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/functions.py +3 -0
  18. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/session.py +8 -2
  19. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/functions.py +2 -0
  20. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/functions.pyi +0 -1
  21. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/session.py +0 -2
  22. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/functions.py +1 -0
  23. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe.egg-info/PKG-INFO +1 -1
  24. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe.egg-info/requires.txt +2 -2
  25. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_catalog.py +2 -0
  26. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_int_functions.py +75 -68
  27. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_functions.py +5 -6
  28. {sqlframe-1.12.0 → sqlframe-1.14.0}/.github/CODEOWNERS +0 -0
  29. {sqlframe-1.12.0 → sqlframe-1.14.0}/.github/workflows/main.workflow.yaml +0 -0
  30. {sqlframe-1.12.0 → sqlframe-1.14.0}/.github/workflows/publish.workflow.yaml +0 -0
  31. {sqlframe-1.12.0 → sqlframe-1.14.0}/.gitignore +0 -0
  32. {sqlframe-1.12.0 → sqlframe-1.14.0}/.pre-commit-config.yaml +0 -0
  33. {sqlframe-1.12.0 → sqlframe-1.14.0}/.readthedocs.yaml +0 -0
  34. {sqlframe-1.12.0 → sqlframe-1.14.0}/LICENSE +0 -0
  35. {sqlframe-1.12.0 → sqlframe-1.14.0}/Makefile +0 -0
  36. {sqlframe-1.12.0 → sqlframe-1.14.0}/README.md +0 -0
  37. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/add_chatgpt_support.md +0 -0
  38. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  39. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  40. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  41. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  42. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  43. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  44. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  45. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  46. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/but_wait_theres_more.gif +0 -0
  47. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/cake.gif +0 -0
  48. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  49. {sqlframe-1.12.0 → sqlframe-1.14.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  50. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/configuration.md +0 -0
  51. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/bigquery.md +0 -0
  52. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/duckdb.md +0 -0
  53. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/images/SF.png +0 -0
  54. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/images/favicon.png +0 -0
  55. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/images/favicon_old.png +0 -0
  56. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  57. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/images/sqlframe_logo.png +0 -0
  58. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/docs/postgres.md +0 -0
  59. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/images/SF.png +0 -0
  60. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/images/favicon.png +0 -0
  61. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/images/favicon_old.png +0 -0
  62. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/images/sqlframe_diagram.png +0 -0
  63. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/images/sqlframe_logo.png +0 -0
  64. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/index.md +0 -0
  65. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/requirements.txt +0 -0
  66. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/spark.md +0 -0
  67. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/standalone.md +0 -0
  68. {sqlframe-1.12.0 → sqlframe-1.14.0}/docs/stylesheets/extra.css +0 -0
  69. {sqlframe-1.12.0 → sqlframe-1.14.0}/mkdocs.yml +0 -0
  70. {sqlframe-1.12.0 → sqlframe-1.14.0}/pytest.ini +0 -0
  71. {sqlframe-1.12.0 → sqlframe-1.14.0}/renovate.json +0 -0
  72. {sqlframe-1.12.0 → sqlframe-1.14.0}/setup.cfg +0 -0
  73. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/LICENSE +0 -0
  74. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/__init__.py +0 -0
  75. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/__init__.py +0 -0
  76. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/_typing.py +0 -0
  77. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/catalog.py +0 -0
  78. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/column.py +0 -0
  79. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/dataframe.py +0 -0
  80. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/decorators.py +0 -0
  81. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/exceptions.py +0 -0
  82. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/group.py +0 -0
  83. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/mixins/__init__.py +0 -0
  84. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  85. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  86. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  87. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/normalize.py +0 -0
  88. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/operations.py +0 -0
  89. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/readerwriter.py +0 -0
  90. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/transforms.py +0 -0
  91. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/types.py +0 -0
  92. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/base/window.py +0 -0
  93. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/__init__.py +0 -0
  94. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/catalog.py +0 -0
  95. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/column.py +0 -0
  96. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/dataframe.py +0 -0
  97. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/group.py +0 -0
  98. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/readwriter.py +0 -0
  99. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/types.py +0 -0
  100. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/bigquery/window.py +0 -0
  101. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/__init__.py +0 -0
  102. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/catalog.py +0 -0
  103. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/column.py +0 -0
  104. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/dataframe.py +0 -0
  105. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/functions.pyi +0 -0
  106. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/group.py +0 -0
  107. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/readwriter.py +0 -0
  108. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/types.py +0 -0
  109. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/duckdb/window.py +0 -0
  110. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/__init__.py +0 -0
  111. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/catalog.py +0 -0
  112. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/column.py +0 -0
  113. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/dataframe.py +0 -0
  114. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/functions.pyi +0 -0
  115. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/group.py +0 -0
  116. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/readwriter.py +0 -0
  117. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/types.py +0 -0
  118. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/postgres/window.py +0 -0
  119. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/__init__.py +0 -0
  120. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/catalog.py +0 -0
  121. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/column.py +0 -0
  122. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/dataframe.py +0 -0
  123. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/functions.py +0 -0
  124. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/group.py +0 -0
  125. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/readwriter.py +0 -0
  126. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/session.py +0 -0
  127. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/types.py +0 -0
  128. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/redshift/window.py +0 -0
  129. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/__init__.py +0 -0
  130. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/catalog.py +0 -0
  131. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/column.py +0 -0
  132. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/dataframe.py +0 -0
  133. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/group.py +0 -0
  134. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/readwriter.py +0 -0
  135. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/types.py +0 -0
  136. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/snowflake/window.py +0 -0
  137. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/__init__.py +0 -0
  138. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/catalog.py +0 -0
  139. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/column.py +0 -0
  140. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/dataframe.py +0 -0
  141. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/functions.pyi +0 -0
  142. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/group.py +0 -0
  143. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/readwriter.py +0 -0
  144. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/session.py +0 -0
  145. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/types.py +0 -0
  146. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/spark/window.py +0 -0
  147. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/__init__.py +0 -0
  148. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/catalog.py +0 -0
  149. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/column.py +0 -0
  150. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/dataframe.py +0 -0
  151. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/functions.py +0 -0
  152. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/group.py +0 -0
  153. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/readwriter.py +0 -0
  154. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/session.py +0 -0
  155. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/types.py +0 -0
  156. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/standalone/window.py +0 -0
  157. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/testing/__init__.py +0 -0
  158. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe/testing/utils.py +0 -0
  159. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  160. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  161. {sqlframe-1.12.0 → sqlframe-1.14.0}/sqlframe.egg-info/top_level.txt +0 -0
  162. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/__init__.py +0 -0
  163. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/common_fixtures.py +0 -0
  164. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/conftest.py +0 -0
  165. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/fixtures/employee.csv +0 -0
  166. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/fixtures/employee.json +0 -0
  167. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/fixtures/employee.parquet +0 -0
  168. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/fixtures/employee_extra_line.csv +0 -0
  169. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/__init__.py +0 -0
  170. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/__init__.py +0 -0
  171. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  172. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  173. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  174. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  175. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/duck/__init__.py +0 -0
  176. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  177. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  178. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  179. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  180. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/__init__.py +0 -0
  181. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  182. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  183. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/__init__.py +0 -0
  184. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  185. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  186. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  187. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  188. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  189. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  190. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/spark/__init__.py +0 -0
  191. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  192. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  193. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_column.py +0 -0
  194. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  195. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_reader.py +0 -0
  196. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_session.py +0 -0
  197. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_engine_writer.py +0 -0
  198. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/engines/test_int_testing.py +0 -0
  199. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/fixtures.py +0 -0
  200. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/test_int_dataframe.py +0 -0
  201. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  202. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/test_int_grouped_data.py +0 -0
  203. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/integration/test_int_session.py +0 -0
  204. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/types.py +0 -0
  205. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/__init__.py +0 -0
  206. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/__init__.py +0 -0
  207. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/fixtures.py +0 -0
  208. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_column.py +0 -0
  209. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_dataframe.py +0 -0
  210. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  211. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_session.py +0 -0
  212. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  213. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_types.py +0 -0
  214. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/standalone/test_window.py +0 -0
  215. {sqlframe-1.12.0 → sqlframe-1.14.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.12.0
3
+ Version: 1.14.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -307,7 +307,6 @@ See something that you would like to see supported? [Open an issue](https://gith
307
307
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
308
308
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
309
309
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
310
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
311
310
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
312
311
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
313
312
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
@@ -442,19 +441,17 @@ See something that you would like to see supported? [Open an issue](https://gith
442
441
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
443
442
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
444
443
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
445
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
446
444
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
447
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
448
445
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
449
446
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
450
447
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
451
448
  * Shorthand expressions not supported. Ex: Use `month` instead of `mon`
449
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
452
450
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
453
451
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
454
452
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
455
453
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
456
454
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
457
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
458
455
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
459
456
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
460
457
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -276,10 +276,10 @@ See something that you would like to see supported? [Open an issue](https://gith
276
276
  * [dateadd](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dateadd.html)
277
277
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
278
278
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
279
- * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
280
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
279
+ * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
281
280
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
282
281
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
282
+ * [day](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.day.html)
283
283
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
284
284
  * [dayofweek](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofweek.html)
285
285
  * [dayofyear](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofyear.html)
@@ -405,22 +405,19 @@ See something that you would like to see supported? [Open an issue](https://gith
405
405
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
406
406
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
407
407
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
408
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
409
408
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
410
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
411
409
  * [to_unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_unix_timestamp.html)
412
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers
413
410
  * The values must match the format string (null will not be returned if they do not)
414
411
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
415
412
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
416
413
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
417
414
  * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
415
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
418
416
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
419
417
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
420
418
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
421
419
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
422
420
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
423
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
424
421
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
425
422
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
426
423
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -284,7 +284,6 @@ See something that you would like to see supported? [Open an issue](https://gith
284
284
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
285
285
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
286
286
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
287
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
288
287
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
289
288
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
290
289
  * Rounded whole number is returned
@@ -397,15 +396,15 @@ See something that you would like to see supported? [Open an issue](https://gith
397
396
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
398
397
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
399
398
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
400
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
401
399
  * [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
402
400
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
403
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
404
401
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
405
402
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
406
403
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
407
404
  * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
408
405
  * Negative index returns null and cannot lookup elements in maps
406
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
407
+ * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
409
408
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
410
409
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
411
410
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
@@ -307,7 +307,6 @@ See something that you would like to see supported? [Open an issue](https://gith
307
307
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
308
308
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
309
309
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
310
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
311
310
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
312
311
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
313
312
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
@@ -440,18 +439,17 @@ See something that you would like to see supported? [Open an issue](https://gith
440
439
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
441
440
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
442
441
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
443
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
444
442
  * [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
445
443
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
446
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
447
444
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
448
445
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
449
446
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
447
+ * [try_to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_timestamp.html)
448
+ * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
450
449
  * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
451
450
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
452
451
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
453
452
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
454
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
455
453
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
456
454
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
457
455
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.11.0",
23
- "sqlglot>=24.0.0,<25.4",
23
+ "sqlglot>=24.0.0,<25.5",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -42,7 +42,7 @@ setup(
42
42
  "pytest-xdist>=3.6,<3.7",
43
43
  "pre-commit>=3.5;python_version=='3.8'",
44
44
  "pre-commit>=3.7,<3.8;python_version>='3.9'",
45
- "ruff>=0.4.4,<0.5",
45
+ "ruff>=0.4.4,<0.6",
46
46
  "types-psycopg2>=2.9,<3",
47
47
  ],
48
48
  "docs": [
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.12.0'
16
- __version_tuple__ = version_tuple = (1, 12, 0)
15
+ __version__ = version = '1.14.0'
16
+ __version_tuple__ = version_tuple = (1, 14, 0)
@@ -6,11 +6,16 @@ import re
6
6
  import typing as t
7
7
 
8
8
  from sqlglot import exp as expression
9
+ from sqlglot.dialects.dialect import build_formatted_time
9
10
  from sqlglot.helper import ensure_list
10
11
  from sqlglot.helper import flatten as _flatten
11
12
 
12
13
  from sqlframe.base.column import Column
13
- from sqlframe.base.util import get_func_from_session
14
+ from sqlframe.base.util import (
15
+ format_time_from_spark,
16
+ get_func_from_session,
17
+ spark_default_time_format,
18
+ )
14
19
 
15
20
  if t.TYPE_CHECKING:
16
21
  from sqlframe.base._typing import ColumnOrLiteral, ColumnOrName
@@ -556,6 +561,14 @@ def to_date_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) ->
556
561
  return to_date(to_timestamp(col, format))
557
562
 
558
563
 
564
+ def to_date_time_format(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
565
+ from sqlframe.base.functions import to_date
566
+
567
+ lit = get_func_from_session("lit")
568
+ format = lit(format or spark_default_time_format())
569
+ return to_date(col, format=format)
570
+
571
+
559
572
  def last_day_with_cast(col: ColumnOrName) -> Column:
560
573
  from sqlframe.base.functions import last_day
561
574
 
@@ -715,14 +728,10 @@ def months_between_cast_as_date_cast_roundoff(
715
728
 
716
729
 
717
730
  def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
718
- from sqlframe.base.session import _BaseSession
719
-
720
- session: _BaseSession = _BaseSession()
721
731
  lit = get_func_from_session("lit")
722
732
  col_func = get_func_from_session("col")
723
733
 
724
- if format is None:
725
- format = session.DEFAULT_TIME_FORMAT
734
+ format = lit(format or spark_default_time_format())
726
735
  return Column.invoke_expression_over_column(
727
736
  Column(
728
737
  expression.Anonymous(
@@ -731,7 +740,7 @@ def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = No
731
740
  )
732
741
  ),
733
742
  expression.TimeToStr,
734
- format=lit(format),
743
+ format=format_time_from_spark(format), # type: ignore
735
744
  )
736
745
 
737
746
 
@@ -1511,10 +1520,106 @@ def to_unix_timestamp_include_default_format(
1511
1520
  format: t.Optional[ColumnOrName] = None,
1512
1521
  ) -> Column:
1513
1522
  from sqlframe.base.functions import to_unix_timestamp
1523
+ from sqlframe.base.session import _BaseSession
1524
+
1525
+ if not format:
1526
+ format = _BaseSession().output_dialect.TIME_FORMAT
1527
+ else:
1528
+ format = format_time_from_spark(format)
1529
+ return to_unix_timestamp(timestamp, format)
1530
+
1531
+
1532
+ def array_append_list_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1533
+ lit = get_func_from_session("lit")
1534
+ value = value if isinstance(value, Column) else lit(value)
1535
+ return Column.invoke_anonymous_function(col, "LIST_APPEND", value)
1514
1536
 
1537
+
1538
+ def array_append_using_array_cat(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1515
1539
  lit = get_func_from_session("lit")
1540
+ array = get_func_from_session("array")
1541
+ value = value if isinstance(value, Column) else lit(value)
1542
+ return Column.invoke_anonymous_function(col, "ARRAY_CONCAT", array(value))
1516
1543
 
1517
- if not format:
1518
- format = lit("%Y-%m-%d %H:%M:%S")
1519
1544
 
1520
- return to_unix_timestamp(timestamp, format)
1545
+ def day_with_try_to_timestamp(col: ColumnOrName) -> Column:
1546
+ from sqlframe.base.functions import day
1547
+
1548
+ try_to_timestamp = get_func_from_session("try_to_timestamp")
1549
+ to_date = get_func_from_session("to_date")
1550
+ when = get_func_from_session("when")
1551
+ _is_string = get_func_from_session("_is_string")
1552
+ coalesce = get_func_from_session("coalesce")
1553
+ return day(
1554
+ when(
1555
+ _is_string(col),
1556
+ coalesce(try_to_timestamp(col), to_date(col)),
1557
+ ).otherwise(col)
1558
+ )
1559
+
1560
+
1561
+ def try_to_timestamp_strptime(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1562
+ lit = get_func_from_session("lit")
1563
+
1564
+ format = lit(format or spark_default_time_format())
1565
+ return Column.invoke_anonymous_function(col, "TRY_STRPTIME", format_time_from_spark(format)) # type: ignore
1566
+
1567
+
1568
+ def try_to_timestamp_safe(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1569
+ lit = get_func_from_session("lit")
1570
+
1571
+ format = lit(format or spark_default_time_format())
1572
+ return Column.invoke_anonymous_function(
1573
+ format_time_from_spark(format), # type: ignore
1574
+ "SAFE.PARSE_TIMESTAMP",
1575
+ col, # type: ignore
1576
+ )
1577
+
1578
+
1579
+ def try_to_timestamp_pgtemp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1580
+ lit = get_func_from_session("lit")
1581
+
1582
+ format = lit(format or spark_default_time_format())
1583
+ return Column.invoke_anonymous_function(
1584
+ col,
1585
+ "pg_temp.TRY_TO_TIMESTAMP",
1586
+ format_time_from_spark(format), # type: ignore
1587
+ )
1588
+
1589
+
1590
+ def typeof_pg_typeof(col: ColumnOrName) -> Column:
1591
+ return Column.invoke_anonymous_function(col, "pg_typeof").cast("regtype").cast("text")
1592
+
1593
+
1594
+ def typeof_from_variant(col: ColumnOrName) -> Column:
1595
+ col = Column.invoke_anonymous_function(col, "TO_VARIANT")
1596
+ return Column.invoke_anonymous_function(col, "TYPEOF")
1597
+
1598
+
1599
+ def _is_string_using_typeof_varchar(col: ColumnOrName) -> Column:
1600
+ typeof = get_func_from_session("typeof")
1601
+ lit = get_func_from_session("lit")
1602
+ return lit(typeof(col) == lit("VARCHAR"))
1603
+
1604
+
1605
+ def _is_string_using_typeof_char_varying(col: ColumnOrName) -> Column:
1606
+ typeof = get_func_from_session("typeof")
1607
+ lit = get_func_from_session("lit")
1608
+ return lit(
1609
+ (typeof(col) == lit("text"))
1610
+ | (typeof(col) == lit("character varying"))
1611
+ | (typeof(col) == lit("unknown"))
1612
+ | (typeof(col) == lit("text"))
1613
+ )
1614
+
1615
+
1616
+ def _is_string_using_typeof_string(col: ColumnOrName) -> Column:
1617
+ typeof = get_func_from_session("typeof")
1618
+ lit = get_func_from_session("lit")
1619
+ return lit(typeof(col) == lit("STRING"))
1620
+
1621
+
1622
+ def _is_string_using_typeof_string_lcase(col: ColumnOrName) -> Column:
1623
+ typeof = get_func_from_session("typeof")
1624
+ lit = get_func_from_session("lit")
1625
+ return lit(typeof(col) == lit("string"))
@@ -6,12 +6,19 @@ import decimal
6
6
  import logging
7
7
  import typing as t
8
8
 
9
+ from sqlglot import Dialect
9
10
  from sqlglot import exp as expression
10
11
  from sqlglot.helper import ensure_list
11
12
  from sqlglot.helper import flatten as _flatten
12
13
 
13
14
  from sqlframe.base.column import Column
14
15
  from sqlframe.base.decorators import func_metadata as meta
16
+ from sqlframe.base.util import (
17
+ format_time_from_spark,
18
+ get_func_from_session,
19
+ spark_default_date_format,
20
+ spark_default_time_format,
21
+ )
15
22
 
16
23
  if t.TYPE_CHECKING:
17
24
  from pyspark.sql.session import SparkContext
@@ -695,7 +702,7 @@ def date_format(col: ColumnOrName, format: str) -> Column:
695
702
  return Column.invoke_expression_over_column(
696
703
  Column(expression.TimeStrToTime(this=Column.ensure_col(col).expression)),
697
704
  expression.TimeToStr,
698
- format=lit(format),
705
+ format=format_time_from_spark(format),
699
706
  )
700
707
 
701
708
 
@@ -875,17 +882,21 @@ def months_between(
875
882
 
876
883
  @meta()
877
884
  def to_date(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
885
+ format = lit(format or spark_default_date_format())
878
886
  if format is not None:
879
887
  return Column.invoke_expression_over_column(
880
- col, expression.TsOrDsToDate, format=lit(format)
888
+ col, expression.TsOrDsToDate, format=format_time_from_spark(format)
881
889
  )
882
890
  return Column.invoke_expression_over_column(col, expression.TsOrDsToDate)
883
891
 
884
892
 
885
893
  @meta()
886
894
  def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
895
+ format = lit(format or spark_default_time_format())
887
896
  if format is not None:
888
- return Column.invoke_expression_over_column(col, expression.StrToTime, format=lit(format))
897
+ return Column.invoke_expression_over_column(
898
+ col, expression.StrToTime, format=format_time_from_spark(format)
899
+ )
889
900
 
890
901
  return Column.ensure_col(col).cast("timestamp")
891
902
 
@@ -916,23 +927,23 @@ def last_day(col: ColumnOrName) -> Column:
916
927
 
917
928
  @meta()
918
929
  def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
919
- from sqlframe.base.session import _BaseSession
920
-
921
- if format is None:
922
- format = _BaseSession().DEFAULT_TIME_FORMAT
923
- return Column.invoke_expression_over_column(col, expression.UnixToStr, format=lit(format))
930
+ format = lit(format or spark_default_time_format())
931
+ return Column.invoke_expression_over_column(
932
+ col,
933
+ expression.UnixToStr,
934
+ format=format_time_from_spark(format), # type: ignore
935
+ )
924
936
 
925
937
 
926
938
  @meta()
927
939
  def unix_timestamp(
928
940
  timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
929
941
  ) -> Column:
930
- from sqlframe.base.session import _BaseSession
931
-
932
- if format is None:
933
- format = _BaseSession().DEFAULT_TIME_FORMAT
942
+ format = lit(format or spark_default_time_format())
934
943
  return Column.invoke_expression_over_column(
935
- timestamp, expression.StrToUnix, format=lit(format)
944
+ timestamp,
945
+ expression.StrToUnix,
946
+ format=format_time_from_spark(format), # type: ignore
936
947
  ).cast("bigint")
937
948
 
938
949
 
@@ -1289,7 +1300,7 @@ def array_agg(col: ColumnOrName) -> Column:
1289
1300
  return Column.invoke_expression_over_column(col, expression.ArrayAgg)
1290
1301
 
1291
1302
 
1292
- @meta(unsupported_engines="*")
1303
+ @meta()
1293
1304
  def array_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
1294
1305
  value = value if isinstance(value, Column) else lit(value)
1295
1306
  return Column.invoke_anonymous_function(col, "ARRAY_APPEND", value)
@@ -1737,7 +1748,7 @@ def map_zip_with(
1737
1748
  return Column.invoke_anonymous_function(col1, "MAP_ZIP_WITH", col2, Column(f_expression))
1738
1749
 
1739
1750
 
1740
- @meta(unsupported_engines=["postgres", "snowflake"])
1751
+ @meta()
1741
1752
  def typeof(col: ColumnOrName) -> Column:
1742
1753
  return Column.invoke_anonymous_function(col, "TYPEOF")
1743
1754
 
@@ -2156,7 +2167,7 @@ def datepart(field: ColumnOrName, source: ColumnOrName) -> Column:
2156
2167
  return Column.invoke_anonymous_function(field, "datepart", source)
2157
2168
 
2158
2169
 
2159
- @meta(unsupported_engines="*")
2170
+ @meta(unsupported_engines=["bigquery", "postgres", "snowflake"])
2160
2171
  def day(col: ColumnOrName) -> Column:
2161
2172
  return Column.invoke_expression_over_column(col, expression.Day)
2162
2173
 
@@ -5106,8 +5117,11 @@ def to_unix_timestamp(
5106
5117
  [Row(r=None)]
5107
5118
  >>> spark.conf.unset("spark.sql.session.timeZone")
5108
5119
  """
5120
+ format = lit(spark_default_time_format()) if format is None else format
5109
5121
  if format is not None:
5110
- return Column.invoke_expression_over_column(timestamp, expression.StrToUnix, format=format)
5122
+ return Column.invoke_expression_over_column(
5123
+ timestamp, expression.StrToUnix, format=format_time_from_spark(format)
5124
+ )
5111
5125
  else:
5112
5126
  return Column.invoke_expression_over_column(timestamp, expression.StrToUnix)
5113
5127
 
@@ -5268,7 +5282,7 @@ def try_element_at(col: ColumnOrName, extraction: ColumnOrName) -> Column:
5268
5282
  )
5269
5283
 
5270
5284
 
5271
- @meta(unsupported_engines="*")
5285
+ @meta()
5272
5286
  def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
5273
5287
  """
5274
5288
  Parses the `col` with the `format` to a timestamp. The function always
@@ -5293,10 +5307,8 @@ def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None)
5293
5307
  >>> df.select(try_to_timestamp(df.t, lit('yyyy-MM-dd HH:mm:ss')).alias('dt')).collect()
5294
5308
  [Row(dt=datetime.datetime(1997, 2, 28, 10, 30))]
5295
5309
  """
5296
- if format is not None:
5297
- return Column.invoke_anonymous_function(col, "try_to_timestamp", format)
5298
- else:
5299
- return Column.invoke_anonymous_function(col, "try_to_timestamp")
5310
+ format = lit(format or spark_default_time_format())
5311
+ return Column.invoke_anonymous_function(col, "try_to_timestamp", format_time_from_spark(format)) # type: ignore
5300
5312
 
5301
5313
 
5302
5314
  @meta()
@@ -5324,7 +5336,7 @@ def ucase(str: ColumnOrName) -> Column:
5324
5336
  return Column.invoke_expression_over_column(str, expression.Upper)
5325
5337
 
5326
5338
 
5327
- @meta()
5339
+ @meta(unsupported_engines=["bigquery", "snowflake"])
5328
5340
  def unix_date(col: ColumnOrName) -> Column:
5329
5341
  """Returns the number of days since 1970-01-01.
5330
5342
 
@@ -5788,6 +5800,20 @@ def years(col: ColumnOrName) -> Column:
5788
5800
  return Column.invoke_anonymous_function(col, "years")
5789
5801
 
5790
5802
 
5803
+ # SQLFrame specific
5804
+ @meta()
5805
+ def _is_string(col: ColumnOrName) -> Column:
5806
+ col = Column.invoke_anonymous_function(col, "TO_VARIANT")
5807
+ return Column.invoke_anonymous_function(col, "IS_VARCHAR")
5808
+
5809
+
5810
+ @meta()
5811
+ def _is_date(col: ColumnOrName) -> Column:
5812
+ typeof = get_func_from_session("typeof")
5813
+ upper = get_func_from_session("upper")
5814
+ return lit(upper(typeof(col)) == lit("DATE"))
5815
+
5816
+
5791
5817
  @meta()
5792
5818
  def _lambda_quoted(value: str) -> t.Optional[bool]:
5793
5819
  return False if value == "_" else None
@@ -72,7 +72,6 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
72
72
  _df: t.Type[DF]
73
73
 
74
74
  SANITIZE_COLUMN_NAMES = False
75
- DEFAULT_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"
76
75
 
77
76
  def __init__(
78
77
  self,
@@ -114,6 +113,10 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
114
113
  def _cur(self) -> DBAPICursorWithPandas:
115
114
  return self._conn.cursor()
116
115
 
116
+ @property
117
+ def default_time_format(self) -> str:
118
+ return self.output_dialect.TIME_FORMAT.strip("'")
119
+
117
120
  def _sanitize_column_name(self, name: str) -> str:
118
121
  if self.SANITIZE_COLUMN_NAMES:
119
122
  return name.replace("(", "_").replace(")", "_")
@@ -13,7 +13,12 @@ if t.TYPE_CHECKING:
13
13
  from pyspark.sql.dataframe import SparkSession as PySparkSession
14
14
 
15
15
  from sqlframe.base import types
16
- from sqlframe.base._typing import OptionalPrimitiveType, SchemaInput
16
+ from sqlframe.base._typing import (
17
+ ColumnOrLiteral,
18
+ OptionalPrimitiveType,
19
+ SchemaInput,
20
+ )
21
+ from sqlframe.base.column import Column
17
22
  from sqlframe.base.session import _BaseSession
18
23
  from sqlframe.base.types import StructType
19
24
 
@@ -342,3 +347,25 @@ def sqlglot_to_spark(sqlglot_dtype: exp.DataType) -> types.DataType:
342
347
  ]
343
348
  )
344
349
  raise NotImplementedError(f"Unsupported data type: {sqlglot_dtype}")
350
+
351
+
352
+ def format_time_from_spark(value: ColumnOrLiteral) -> Column:
353
+ from sqlframe.base.column import Column
354
+ from sqlframe.base.session import _BaseSession
355
+
356
+ lit = get_func_from_session("lit")
357
+ value = lit(value) if not isinstance(value, Column) else value
358
+ formatted_time = Dialect["spark"].format_time(value.expression)
359
+ return Column(
360
+ _BaseSession()
361
+ .output_dialect.generator()
362
+ .format_time(exp.StrToTime(this=exp.Null(), format=formatted_time))
363
+ )
364
+
365
+
366
+ def spark_default_time_format() -> str:
367
+ return Dialect["spark"].TIME_FORMAT.strip("'")
368
+
369
+
370
+ def spark_default_date_format() -> str:
371
+ return Dialect["spark"].DATE_FORMAT.strip("'")
@@ -7,7 +7,11 @@ import typing as t
7
7
  from sqlglot import exp as sqlglot_expression
8
8
 
9
9
  import sqlframe.base.functions
10
- from sqlframe.base.util import get_func_from_session
10
+ from sqlframe.base.util import (
11
+ format_time_from_spark,
12
+ get_func_from_session,
13
+ spark_default_time_format,
14
+ )
11
15
  from sqlframe.bigquery.column import Column
12
16
 
13
17
  if t.TYPE_CHECKING:
@@ -68,6 +72,9 @@ from sqlframe.base.function_alternatives import ( # noqa
68
72
  array_union_using_array_concat as array_union,
69
73
  sequence_from_generate_array as sequence,
70
74
  position_as_strpos as position,
75
+ try_to_timestamp_safe as try_to_timestamp,
76
+ _is_string_using_typeof_string as _is_string,
77
+ array_append_using_array_cat as array_append,
71
78
  )
72
79
 
73
80
 
@@ -148,23 +155,15 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
148
155
 
149
156
  session: _BaseSession = _BaseSession()
150
157
  lit = get_func_from_session("lit")
151
- to_timestamp = get_func_from_session("to_timestamp")
152
158
 
153
159
  expressions = [Column.ensure_col(col).expression]
154
- if format is not None:
155
- expressions.append(lit(format).expression)
156
160
  return Column(
157
161
  sqlglot_expression.Anonymous(
158
162
  this="FORMAT_TIMESTAMP",
159
163
  expressions=[
160
- lit(session.DEFAULT_TIME_FORMAT).expression,
161
- to_timestamp(
162
- Column(
163
- sqlglot_expression.Anonymous(
164
- this="TIMESTAMP_SECONDS", expressions=expressions
165
- )
166
- ),
167
- format,
164
+ lit(session.default_time_format).expression,
165
+ Column(
166
+ sqlglot_expression.Anonymous(this="TIMESTAMP_SECONDS", expressions=expressions)
168
167
  ).expression,
169
168
  ],
170
169
  )
@@ -174,12 +173,9 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
174
173
  def unix_timestamp(
175
174
  timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
176
175
  ) -> Column:
177
- from sqlframe.base.session import _BaseSession
178
-
179
176
  lit = get_func_from_session("lit")
180
177
 
181
- if format is None:
182
- format = _BaseSession().DEFAULT_TIME_FORMAT
178
+ format = lit(format or spark_default_time_format())
183
179
  return Column(
184
180
  sqlglot_expression.Anonymous(
185
181
  this="UNIX_SECONDS",
@@ -187,7 +183,7 @@ def unix_timestamp(
187
183
  sqlglot_expression.Anonymous(
188
184
  this="PARSE_TIMESTAMP",
189
185
  expressions=[
190
- lit(format).expression,
186
+ format_time_from_spark(format).expression,
191
187
  Column.ensure_col(timestamp).expression,
192
188
  lit("UTC").expression,
193
189
  ],