sqlframe 1.12.0__tar.gz → 1.13.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {sqlframe-1.12.0 → sqlframe-1.13.0}/PKG-INFO +1 -1
  2. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/bigquery.md +0 -4
  3. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/duckdb.md +1 -6
  4. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/postgres.md +0 -3
  5. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/snowflake.md +0 -4
  6. {sqlframe-1.12.0 → sqlframe-1.13.0}/setup.py +1 -1
  7. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/_version.py +2 -2
  8. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/function_alternatives.py +12 -11
  9. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/functions.py +24 -15
  10. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/session.py +4 -1
  11. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/util.py +24 -1
  12. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/functions.py +10 -17
  13. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/functions.pyi +0 -1
  14. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/session.py +0 -1
  15. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/functions.py +0 -1
  16. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/session.py +0 -2
  17. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/session.py +0 -2
  18. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/functions.pyi +0 -1
  19. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/session.py +0 -2
  20. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe.egg-info/PKG-INFO +1 -1
  21. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe.egg-info/requires.txt +1 -1
  22. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_int_functions.py +21 -60
  23. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_functions.py +2 -3
  24. {sqlframe-1.12.0 → sqlframe-1.13.0}/.github/CODEOWNERS +0 -0
  25. {sqlframe-1.12.0 → sqlframe-1.13.0}/.github/workflows/main.workflow.yaml +0 -0
  26. {sqlframe-1.12.0 → sqlframe-1.13.0}/.github/workflows/publish.workflow.yaml +0 -0
  27. {sqlframe-1.12.0 → sqlframe-1.13.0}/.gitignore +0 -0
  28. {sqlframe-1.12.0 → sqlframe-1.13.0}/.pre-commit-config.yaml +0 -0
  29. {sqlframe-1.12.0 → sqlframe-1.13.0}/.readthedocs.yaml +0 -0
  30. {sqlframe-1.12.0 → sqlframe-1.13.0}/LICENSE +0 -0
  31. {sqlframe-1.12.0 → sqlframe-1.13.0}/Makefile +0 -0
  32. {sqlframe-1.12.0 → sqlframe-1.13.0}/README.md +0 -0
  33. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/add_chatgpt_support.md +0 -0
  34. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  35. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  36. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  37. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  38. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  39. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  40. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  41. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  42. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/but_wait_theres_more.gif +0 -0
  43. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/cake.gif +0 -0
  44. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  45. {sqlframe-1.12.0 → sqlframe-1.13.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  46. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/configuration.md +0 -0
  47. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/bigquery.md +0 -0
  48. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/duckdb.md +0 -0
  49. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/images/SF.png +0 -0
  50. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/images/favicon.png +0 -0
  51. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/images/favicon_old.png +0 -0
  52. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  53. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/images/sqlframe_logo.png +0 -0
  54. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/docs/postgres.md +0 -0
  55. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/images/SF.png +0 -0
  56. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/images/favicon.png +0 -0
  57. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/images/favicon_old.png +0 -0
  58. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/images/sqlframe_diagram.png +0 -0
  59. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/images/sqlframe_logo.png +0 -0
  60. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/index.md +0 -0
  61. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/requirements.txt +0 -0
  62. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/spark.md +0 -0
  63. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/standalone.md +0 -0
  64. {sqlframe-1.12.0 → sqlframe-1.13.0}/docs/stylesheets/extra.css +0 -0
  65. {sqlframe-1.12.0 → sqlframe-1.13.0}/mkdocs.yml +0 -0
  66. {sqlframe-1.12.0 → sqlframe-1.13.0}/pytest.ini +0 -0
  67. {sqlframe-1.12.0 → sqlframe-1.13.0}/renovate.json +0 -0
  68. {sqlframe-1.12.0 → sqlframe-1.13.0}/setup.cfg +0 -0
  69. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/LICENSE +0 -0
  70. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/__init__.py +0 -0
  71. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/__init__.py +0 -0
  72. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/_typing.py +0 -0
  73. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/catalog.py +0 -0
  74. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/column.py +0 -0
  75. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/dataframe.py +0 -0
  76. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/decorators.py +0 -0
  77. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/exceptions.py +0 -0
  78. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/group.py +0 -0
  79. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/mixins/__init__.py +0 -0
  80. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  81. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  82. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  83. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/normalize.py +0 -0
  84. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/operations.py +0 -0
  85. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/readerwriter.py +0 -0
  86. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/transforms.py +0 -0
  87. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/types.py +0 -0
  88. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/base/window.py +0 -0
  89. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/__init__.py +0 -0
  90. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/catalog.py +0 -0
  91. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/column.py +0 -0
  92. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/dataframe.py +0 -0
  93. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/group.py +0 -0
  94. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/readwriter.py +0 -0
  95. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/types.py +0 -0
  96. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/bigquery/window.py +0 -0
  97. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/__init__.py +0 -0
  98. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/catalog.py +0 -0
  99. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/column.py +0 -0
  100. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/dataframe.py +0 -0
  101. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/functions.pyi +0 -0
  102. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/group.py +0 -0
  103. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/readwriter.py +0 -0
  104. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/types.py +0 -0
  105. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/duckdb/window.py +0 -0
  106. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/__init__.py +0 -0
  107. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/catalog.py +0 -0
  108. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/column.py +0 -0
  109. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/dataframe.py +0 -0
  110. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/functions.py +0 -0
  111. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/functions.pyi +0 -0
  112. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/group.py +0 -0
  113. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/readwriter.py +0 -0
  114. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/types.py +0 -0
  115. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/postgres/window.py +0 -0
  116. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/__init__.py +0 -0
  117. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/catalog.py +0 -0
  118. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/column.py +0 -0
  119. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/dataframe.py +0 -0
  120. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/functions.py +0 -0
  121. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/group.py +0 -0
  122. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/readwriter.py +0 -0
  123. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/session.py +0 -0
  124. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/types.py +0 -0
  125. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/redshift/window.py +0 -0
  126. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/__init__.py +0 -0
  127. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/catalog.py +0 -0
  128. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/column.py +0 -0
  129. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/dataframe.py +0 -0
  130. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/functions.py +0 -0
  131. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/group.py +0 -0
  132. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/readwriter.py +0 -0
  133. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/types.py +0 -0
  134. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/snowflake/window.py +0 -0
  135. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/__init__.py +0 -0
  136. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/catalog.py +0 -0
  137. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/column.py +0 -0
  138. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/dataframe.py +0 -0
  139. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/functions.py +0 -0
  140. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/functions.pyi +0 -0
  141. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/group.py +0 -0
  142. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/readwriter.py +0 -0
  143. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/session.py +0 -0
  144. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/types.py +0 -0
  145. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/spark/window.py +0 -0
  146. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/__init__.py +0 -0
  147. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/catalog.py +0 -0
  148. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/column.py +0 -0
  149. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/dataframe.py +0 -0
  150. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/functions.py +0 -0
  151. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/group.py +0 -0
  152. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/readwriter.py +0 -0
  153. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/session.py +0 -0
  154. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/types.py +0 -0
  155. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/standalone/window.py +0 -0
  156. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/testing/__init__.py +0 -0
  157. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe/testing/utils.py +0 -0
  158. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  159. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  160. {sqlframe-1.12.0 → sqlframe-1.13.0}/sqlframe.egg-info/top_level.txt +0 -0
  161. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/__init__.py +0 -0
  162. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/common_fixtures.py +0 -0
  163. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/conftest.py +0 -0
  164. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/fixtures/employee.csv +0 -0
  165. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/fixtures/employee.json +0 -0
  166. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/fixtures/employee.parquet +0 -0
  167. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/fixtures/employee_extra_line.csv +0 -0
  168. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/__init__.py +0 -0
  169. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/__init__.py +0 -0
  170. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  171. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  172. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  173. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  174. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/duck/__init__.py +0 -0
  175. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  176. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  177. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  178. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  179. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/postgres/__init__.py +0 -0
  180. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  181. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  182. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  183. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/redshift/__init__.py +0 -0
  184. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  185. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  186. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  187. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  188. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  189. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  190. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/spark/__init__.py +0 -0
  191. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  192. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  193. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_engine_column.py +0 -0
  194. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  195. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_engine_reader.py +0 -0
  196. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_engine_session.py +0 -0
  197. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_engine_writer.py +0 -0
  198. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/engines/test_int_testing.py +0 -0
  199. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/fixtures.py +0 -0
  200. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/test_int_dataframe.py +0 -0
  201. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  202. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/test_int_grouped_data.py +0 -0
  203. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/integration/test_int_session.py +0 -0
  204. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/types.py +0 -0
  205. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/__init__.py +0 -0
  206. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/__init__.py +0 -0
  207. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/fixtures.py +0 -0
  208. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_column.py +0 -0
  209. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_dataframe.py +0 -0
  210. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  211. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_session.py +0 -0
  212. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  213. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_types.py +0 -0
  214. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/standalone/test_window.py +0 -0
  215. {sqlframe-1.12.0 → sqlframe-1.13.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.12.0
3
+ Version: 1.13.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -307,7 +307,6 @@ See something that you would like to see supported? [Open an issue](https://gith
307
307
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
308
308
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
309
309
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
310
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
311
310
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
312
311
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
313
312
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
@@ -442,9 +441,7 @@ See something that you would like to see supported? [Open an issue](https://gith
442
441
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
443
442
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
444
443
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
445
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
446
444
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
447
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
448
445
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
449
446
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
450
447
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
@@ -454,7 +451,6 @@ See something that you would like to see supported? [Open an issue](https://gith
454
451
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
455
452
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
456
453
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
457
- * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
458
454
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
459
455
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
460
456
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -276,8 +276,7 @@ See something that you would like to see supported? [Open an issue](https://gith
276
276
  * [dateadd](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dateadd.html)
277
277
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
278
278
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
279
- * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
280
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
279
+ * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
281
280
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
282
281
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
283
282
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
@@ -405,11 +404,8 @@ See something that you would like to see supported? [Open an issue](https://gith
405
404
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
406
405
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
407
406
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
408
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
409
407
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
410
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
411
408
  * [to_unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_unix_timestamp.html)
412
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers
413
409
  * The values must match the format string (null will not be returned if they do not)
414
410
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
415
411
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
@@ -420,7 +416,6 @@ See something that you would like to see supported? [Open an issue](https://gith
420
416
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
421
417
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
422
418
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
423
- * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
424
419
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
425
420
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
426
421
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -284,7 +284,6 @@ See something that you would like to see supported? [Open an issue](https://gith
284
284
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
285
285
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
286
286
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
287
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
288
287
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
289
288
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
290
289
  * Rounded whole number is returned
@@ -397,10 +396,8 @@ See something that you would like to see supported? [Open an issue](https://gith
397
396
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
398
397
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
399
398
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
400
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
401
399
  * [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
402
400
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
403
- * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
404
401
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
405
402
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
406
403
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
@@ -307,7 +307,6 @@ See something that you would like to see supported? [Open an issue](https://gith
307
307
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
308
308
  * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
309
309
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
310
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
311
310
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
312
311
  * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
313
312
  * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
@@ -440,10 +439,8 @@ See something that you would like to see supported? [Open an issue](https://gith
440
439
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
441
440
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
442
441
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
443
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
444
442
  * [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
445
443
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
446
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
447
444
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
448
445
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
449
446
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
@@ -451,7 +448,6 @@ See something that you would like to see supported? [Open an issue](https://gith
451
448
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
452
449
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
453
450
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
454
- * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
455
451
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
456
452
  * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
457
453
  * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
@@ -42,7 +42,7 @@ setup(
42
42
  "pytest-xdist>=3.6,<3.7",
43
43
  "pre-commit>=3.5;python_version=='3.8'",
44
44
  "pre-commit>=3.7,<3.8;python_version>='3.9'",
45
- "ruff>=0.4.4,<0.5",
45
+ "ruff>=0.4.4,<0.6",
46
46
  "types-psycopg2>=2.9,<3",
47
47
  ],
48
48
  "docs": [
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.12.0'
16
- __version_tuple__ = version_tuple = (1, 12, 0)
15
+ __version__ = version = '1.13.0'
16
+ __version_tuple__ = version_tuple = (1, 13, 0)
@@ -6,11 +6,16 @@ import re
6
6
  import typing as t
7
7
 
8
8
  from sqlglot import exp as expression
9
+ from sqlglot.dialects.dialect import build_formatted_time
9
10
  from sqlglot.helper import ensure_list
10
11
  from sqlglot.helper import flatten as _flatten
11
12
 
12
13
  from sqlframe.base.column import Column
13
- from sqlframe.base.util import get_func_from_session
14
+ from sqlframe.base.util import (
15
+ format_time_from_spark,
16
+ get_func_from_session,
17
+ spark_default_time_format,
18
+ )
14
19
 
15
20
  if t.TYPE_CHECKING:
16
21
  from sqlframe.base._typing import ColumnOrLiteral, ColumnOrName
@@ -715,14 +720,10 @@ def months_between_cast_as_date_cast_roundoff(
715
720
 
716
721
 
717
722
  def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
718
- from sqlframe.base.session import _BaseSession
719
-
720
- session: _BaseSession = _BaseSession()
721
723
  lit = get_func_from_session("lit")
722
724
  col_func = get_func_from_session("col")
723
725
 
724
- if format is None:
725
- format = session.DEFAULT_TIME_FORMAT
726
+ format = lit(format or spark_default_time_format())
726
727
  return Column.invoke_expression_over_column(
727
728
  Column(
728
729
  expression.Anonymous(
@@ -731,7 +732,7 @@ def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = No
731
732
  )
732
733
  ),
733
734
  expression.TimeToStr,
734
- format=lit(format),
735
+ format=format_time_from_spark(format), # type: ignore
735
736
  )
736
737
 
737
738
 
@@ -1511,10 +1512,10 @@ def to_unix_timestamp_include_default_format(
1511
1512
  format: t.Optional[ColumnOrName] = None,
1512
1513
  ) -> Column:
1513
1514
  from sqlframe.base.functions import to_unix_timestamp
1514
-
1515
- lit = get_func_from_session("lit")
1515
+ from sqlframe.base.session import _BaseSession
1516
1516
 
1517
1517
  if not format:
1518
- format = lit("%Y-%m-%d %H:%M:%S")
1519
-
1518
+ format = _BaseSession().output_dialect.TIME_FORMAT
1519
+ else:
1520
+ format = format_time_from_spark(format)
1520
1521
  return to_unix_timestamp(timestamp, format)
@@ -6,12 +6,14 @@ import decimal
6
6
  import logging
7
7
  import typing as t
8
8
 
9
+ from sqlglot import Dialect
9
10
  from sqlglot import exp as expression
10
11
  from sqlglot.helper import ensure_list
11
12
  from sqlglot.helper import flatten as _flatten
12
13
 
13
14
  from sqlframe.base.column import Column
14
15
  from sqlframe.base.decorators import func_metadata as meta
16
+ from sqlframe.base.util import format_time_from_spark, spark_default_time_format
15
17
 
16
18
  if t.TYPE_CHECKING:
17
19
  from pyspark.sql.session import SparkContext
@@ -695,7 +697,7 @@ def date_format(col: ColumnOrName, format: str) -> Column:
695
697
  return Column.invoke_expression_over_column(
696
698
  Column(expression.TimeStrToTime(this=Column.ensure_col(col).expression)),
697
699
  expression.TimeToStr,
698
- format=lit(format),
700
+ format=format_time_from_spark(format),
699
701
  )
700
702
 
701
703
 
@@ -875,17 +877,21 @@ def months_between(
875
877
 
876
878
  @meta()
877
879
  def to_date(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
880
+ format = lit(format or spark_default_time_format())
878
881
  if format is not None:
879
882
  return Column.invoke_expression_over_column(
880
- col, expression.TsOrDsToDate, format=lit(format)
883
+ col, expression.TsOrDsToDate, format=format_time_from_spark(format)
881
884
  )
882
885
  return Column.invoke_expression_over_column(col, expression.TsOrDsToDate)
883
886
 
884
887
 
885
888
  @meta()
886
889
  def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
890
+ format = lit(format or spark_default_time_format())
887
891
  if format is not None:
888
- return Column.invoke_expression_over_column(col, expression.StrToTime, format=lit(format))
892
+ return Column.invoke_expression_over_column(
893
+ col, expression.StrToTime, format=format_time_from_spark(format)
894
+ )
889
895
 
890
896
  return Column.ensure_col(col).cast("timestamp")
891
897
 
@@ -916,23 +922,23 @@ def last_day(col: ColumnOrName) -> Column:
916
922
 
917
923
  @meta()
918
924
  def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
919
- from sqlframe.base.session import _BaseSession
920
-
921
- if format is None:
922
- format = _BaseSession().DEFAULT_TIME_FORMAT
923
- return Column.invoke_expression_over_column(col, expression.UnixToStr, format=lit(format))
925
+ format = lit(format or spark_default_time_format())
926
+ return Column.invoke_expression_over_column(
927
+ col,
928
+ expression.UnixToStr,
929
+ format=format_time_from_spark(format), # type: ignore
930
+ )
924
931
 
925
932
 
926
933
  @meta()
927
934
  def unix_timestamp(
928
935
  timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
929
936
  ) -> Column:
930
- from sqlframe.base.session import _BaseSession
931
-
932
- if format is None:
933
- format = _BaseSession().DEFAULT_TIME_FORMAT
937
+ format = lit(format or spark_default_time_format())
934
938
  return Column.invoke_expression_over_column(
935
- timestamp, expression.StrToUnix, format=lit(format)
939
+ timestamp,
940
+ expression.StrToUnix,
941
+ format=format_time_from_spark(format), # type: ignore
936
942
  ).cast("bigint")
937
943
 
938
944
 
@@ -5106,8 +5112,11 @@ def to_unix_timestamp(
5106
5112
  [Row(r=None)]
5107
5113
  >>> spark.conf.unset("spark.sql.session.timeZone")
5108
5114
  """
5115
+ format = lit(spark_default_time_format()) if format is None else format
5109
5116
  if format is not None:
5110
- return Column.invoke_expression_over_column(timestamp, expression.StrToUnix, format=format)
5117
+ return Column.invoke_expression_over_column(
5118
+ timestamp, expression.StrToUnix, format=format_time_from_spark(format)
5119
+ )
5111
5120
  else:
5112
5121
  return Column.invoke_expression_over_column(timestamp, expression.StrToUnix)
5113
5122
 
@@ -5324,7 +5333,7 @@ def ucase(str: ColumnOrName) -> Column:
5324
5333
  return Column.invoke_expression_over_column(str, expression.Upper)
5325
5334
 
5326
5335
 
5327
- @meta()
5336
+ @meta(unsupported_engines=["bigquery", "snowflake"])
5328
5337
  def unix_date(col: ColumnOrName) -> Column:
5329
5338
  """Returns the number of days since 1970-01-01.
5330
5339
 
@@ -72,7 +72,6 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
72
72
  _df: t.Type[DF]
73
73
 
74
74
  SANITIZE_COLUMN_NAMES = False
75
- DEFAULT_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"
76
75
 
77
76
  def __init__(
78
77
  self,
@@ -114,6 +113,10 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
114
113
  def _cur(self) -> DBAPICursorWithPandas:
115
114
  return self._conn.cursor()
116
115
 
116
+ @property
117
+ def default_time_format(self) -> str:
118
+ return self.output_dialect.TIME_FORMAT.strip("'")
119
+
117
120
  def _sanitize_column_name(self, name: str) -> str:
118
121
  if self.SANITIZE_COLUMN_NAMES:
119
122
  return name.replace("(", "_").replace(")", "_")
@@ -13,7 +13,12 @@ if t.TYPE_CHECKING:
13
13
  from pyspark.sql.dataframe import SparkSession as PySparkSession
14
14
 
15
15
  from sqlframe.base import types
16
- from sqlframe.base._typing import OptionalPrimitiveType, SchemaInput
16
+ from sqlframe.base._typing import (
17
+ ColumnOrLiteral,
18
+ OptionalPrimitiveType,
19
+ SchemaInput,
20
+ )
21
+ from sqlframe.base.column import Column
17
22
  from sqlframe.base.session import _BaseSession
18
23
  from sqlframe.base.types import StructType
19
24
 
@@ -342,3 +347,21 @@ def sqlglot_to_spark(sqlglot_dtype: exp.DataType) -> types.DataType:
342
347
  ]
343
348
  )
344
349
  raise NotImplementedError(f"Unsupported data type: {sqlglot_dtype}")
350
+
351
+
352
+ def format_time_from_spark(value: ColumnOrLiteral) -> Column:
353
+ from sqlframe.base.column import Column
354
+ from sqlframe.base.session import _BaseSession
355
+
356
+ lit = get_func_from_session("lit")
357
+ value = lit(value) if not isinstance(value, Column) else value
358
+ formatted_time = Dialect["spark"].format_time(value.expression)
359
+ return Column(
360
+ _BaseSession()
361
+ .output_dialect.generator()
362
+ .format_time(exp.StrToTime(this=exp.Null(), format=formatted_time))
363
+ )
364
+
365
+
366
+ def spark_default_time_format() -> str:
367
+ return Dialect["spark"].TIME_FORMAT.strip("'")
@@ -7,7 +7,11 @@ import typing as t
7
7
  from sqlglot import exp as sqlglot_expression
8
8
 
9
9
  import sqlframe.base.functions
10
- from sqlframe.base.util import get_func_from_session
10
+ from sqlframe.base.util import (
11
+ format_time_from_spark,
12
+ get_func_from_session,
13
+ spark_default_time_format,
14
+ )
11
15
  from sqlframe.bigquery.column import Column
12
16
 
13
17
  if t.TYPE_CHECKING:
@@ -148,23 +152,15 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
148
152
 
149
153
  session: _BaseSession = _BaseSession()
150
154
  lit = get_func_from_session("lit")
151
- to_timestamp = get_func_from_session("to_timestamp")
152
155
 
153
156
  expressions = [Column.ensure_col(col).expression]
154
- if format is not None:
155
- expressions.append(lit(format).expression)
156
157
  return Column(
157
158
  sqlglot_expression.Anonymous(
158
159
  this="FORMAT_TIMESTAMP",
159
160
  expressions=[
160
- lit(session.DEFAULT_TIME_FORMAT).expression,
161
- to_timestamp(
162
- Column(
163
- sqlglot_expression.Anonymous(
164
- this="TIMESTAMP_SECONDS", expressions=expressions
165
- )
166
- ),
167
- format,
161
+ lit(session.default_time_format).expression,
162
+ Column(
163
+ sqlglot_expression.Anonymous(this="TIMESTAMP_SECONDS", expressions=expressions)
168
164
  ).expression,
169
165
  ],
170
166
  )
@@ -174,12 +170,9 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
174
170
  def unix_timestamp(
175
171
  timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
176
172
  ) -> Column:
177
- from sqlframe.base.session import _BaseSession
178
-
179
173
  lit = get_func_from_session("lit")
180
174
 
181
- if format is None:
182
- format = _BaseSession().DEFAULT_TIME_FORMAT
175
+ format = lit(format or spark_default_time_format())
183
176
  return Column(
184
177
  sqlglot_expression.Anonymous(
185
178
  this="UNIX_SECONDS",
@@ -187,7 +180,7 @@ def unix_timestamp(
187
180
  sqlglot_expression.Anonymous(
188
181
  this="PARSE_TIMESTAMP",
189
182
  expressions=[
190
- lit(format).expression,
183
+ format_time_from_spark(format).expression,
191
184
  Column.ensure_col(timestamp).expression,
192
185
  lit("UTC").expression,
193
186
  ],
@@ -267,7 +267,6 @@ from sqlframe.base.functions import trunc as trunc
267
267
  from sqlframe.base.functions import ucase as ucase
268
268
  from sqlframe.base.functions import unbase64 as unbase64
269
269
  from sqlframe.base.functions import unhex as unhex
270
- from sqlframe.base.functions import unix_date as unix_date
271
270
  from sqlframe.base.functions import upper as upper
272
271
  from sqlframe.base.functions import user as user
273
272
  from sqlframe.base.functions import var_pop as var_pop
@@ -32,7 +32,6 @@ class BigQuerySession(
32
32
  _writer = BigQueryDataFrameWriter
33
33
  _df = BigQueryDataFrame
34
34
 
35
- DEFAULT_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
36
35
  QUALIFY_INFO_SCHEMA_WITH_DATABASE = True
37
36
  SANITIZE_COLUMN_NAMES = True
38
37
 
@@ -46,5 +46,4 @@ from sqlframe.base.function_alternatives import ( # noqa
46
46
  array_max_from_sort as array_max,
47
47
  sequence_from_generate_series as sequence,
48
48
  try_element_at_zero_based as try_element_at,
49
- to_unix_timestamp_include_default_format as to_unix_timestamp,
50
49
  )
@@ -33,8 +33,6 @@ class DuckDBSession(
33
33
  _writer = DuckDBDataFrameWriter
34
34
  _df = DuckDBDataFrame
35
35
 
36
- DEFAULT_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
37
-
38
36
  def __init__(self, conn: t.Optional[DuckDBPyConnection] = None, *args, **kwargs):
39
37
  import duckdb
40
38
  from duckdb.typing import VARCHAR
@@ -34,8 +34,6 @@ class PostgresSession(
34
34
  _writer = PostgresDataFrameWriter
35
35
  _df = PostgresDataFrame
36
36
 
37
- DEFAULT_TIME_FORMAT = "yyyy-MM-dd HH:MI:SS"
38
-
39
37
  def __init__(self, conn: t.Optional[psycopg2_connection] = None):
40
38
  if not hasattr(self, "_conn"):
41
39
  super().__init__(conn)
@@ -207,7 +207,6 @@ from sqlframe.base.functions import (
207
207
  trim as trim,
208
208
  trunc as trunc,
209
209
  ucase as ucase,
210
- unix_date as unix_date,
211
210
  upper as upper,
212
211
  user as user,
213
212
  var_pop as var_pop,
@@ -57,8 +57,6 @@ class SnowflakeSession(
57
57
  _writer = SnowflakeDataFrameWriter
58
58
  _df = SnowflakeDataFrame
59
59
 
60
- DEFAULT_TIME_FORMAT = "YYYY-MM-DD HH:MI:SS"
61
-
62
60
  def __init__(self, conn: t.Optional[SnowflakeConnection] = None):
63
61
  import snowflake
64
62
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.12.0
3
+ Version: 1.13.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -18,7 +18,7 @@ pyspark<3.6,>=2
18
18
  pytest-postgresql<7,>=6
19
19
  pytest-xdist<3.7,>=3.6
20
20
  pytest<8.3,>=8.2.0
21
- ruff<0.5,>=0.4.4
21
+ ruff<0.6,>=0.4.4
22
22
  types-psycopg2<3,>=2.9
23
23
 
24
24
  [dev:python_version == "3.8"]