sqlframe 3.9.3__tar.gz → 3.10.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (375) hide show
  1. {sqlframe-3.9.3 → sqlframe-3.10.0}/PKG-INFO +1 -1
  2. {sqlframe-3.9.3 → sqlframe-3.10.0}/setup.py +4 -3
  3. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/_version.py +2 -2
  4. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/function_alternatives.py +33 -0
  5. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/functions.py +1 -1
  6. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/session.py +2 -0
  7. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/functions.py +1 -0
  8. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/functions.py +1 -0
  9. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/functions.pyi +1 -0
  10. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/functions.py +1 -0
  11. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/PKG-INFO +1 -1
  12. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/requires.txt +4 -3
  13. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/common_fixtures.py +2 -1
  14. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/conftest.py +1 -1
  15. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +10 -10
  16. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_int_functions.py +46 -155
  17. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/databricks/test_activate.py +1 -3
  18. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_functions.py +5 -5
  19. {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/CODEOWNERS +0 -0
  20. {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/workflows/main.workflow.yaml +0 -0
  21. {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/workflows/publish.workflow.yaml +0 -0
  22. {sqlframe-3.9.3 → sqlframe-3.10.0}/.gitignore +0 -0
  23. {sqlframe-3.9.3 → sqlframe-3.10.0}/.pre-commit-config.yaml +0 -0
  24. {sqlframe-3.9.3 → sqlframe-3.10.0}/.readthedocs.yaml +0 -0
  25. {sqlframe-3.9.3 → sqlframe-3.10.0}/LICENSE +0 -0
  26. {sqlframe-3.9.3 → sqlframe-3.10.0}/Makefile +0 -0
  27. {sqlframe-3.9.3 → sqlframe-3.10.0}/README.md +0 -0
  28. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/add_chatgpt_support.md +0 -0
  29. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  30. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  31. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  32. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  33. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  34. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  35. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  36. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  37. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/but_wait_theres_more.gif +0 -0
  38. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/cake.gif +0 -0
  39. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  40. {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  41. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/bigquery.md +0 -0
  42. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/configuration.md +0 -0
  43. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/databricks.md +0 -0
  44. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/bigquery.md +0 -0
  45. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/duckdb.md +0 -0
  46. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/SF.png +0 -0
  47. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/favicon.png +0 -0
  48. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/favicon_old.png +0 -0
  49. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  50. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/sqlframe_logo.png +0 -0
  51. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/postgres.md +0 -0
  52. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/duckdb.md +0 -0
  53. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/SF.png +0 -0
  54. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/favicon.png +0 -0
  55. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/favicon_old.png +0 -0
  56. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/sqlframe_diagram.png +0 -0
  57. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/sqlframe_logo.png +0 -0
  58. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/index.md +0 -0
  59. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/postgres.md +0 -0
  60. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/redshift.md +0 -0
  61. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/requirements.txt +0 -0
  62. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/snowflake.md +0 -0
  63. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/spark.md +0 -0
  64. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/standalone.md +0 -0
  65. {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/stylesheets/extra.css +0 -0
  66. {sqlframe-3.9.3 → sqlframe-3.10.0}/mkdocs.yml +0 -0
  67. {sqlframe-3.9.3 → sqlframe-3.10.0}/pytest.ini +0 -0
  68. {sqlframe-3.9.3 → sqlframe-3.10.0}/renovate.json +0 -0
  69. {sqlframe-3.9.3 → sqlframe-3.10.0}/setup.cfg +0 -0
  70. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/LICENSE +0 -0
  71. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/__init__.py +0 -0
  72. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/__init__.py +0 -0
  73. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/_typing.py +0 -0
  74. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/catalog.py +0 -0
  75. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/column.py +0 -0
  76. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/dataframe.py +0 -0
  77. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/decorators.py +0 -0
  78. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/exceptions.py +0 -0
  79. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/group.py +0 -0
  80. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/__init__.py +0 -0
  81. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  82. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  83. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  84. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/normalize.py +0 -0
  85. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/operations.py +0 -0
  86. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/readerwriter.py +0 -0
  87. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/transforms.py +0 -0
  88. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/types.py +0 -0
  89. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/udf.py +0 -0
  90. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/util.py +0 -0
  91. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/window.py +0 -0
  92. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/__init__.py +0 -0
  93. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/catalog.py +0 -0
  94. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/column.py +0 -0
  95. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/dataframe.py +0 -0
  96. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/functions.pyi +0 -0
  97. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/group.py +0 -0
  98. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/readwriter.py +0 -0
  99. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/session.py +0 -0
  100. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/types.py +0 -0
  101. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/udf.py +0 -0
  102. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/window.py +0 -0
  103. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/__init__.py +0 -0
  104. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/catalog.py +0 -0
  105. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/column.py +0 -0
  106. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/dataframe.py +0 -0
  107. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/functions.py +0 -0
  108. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/functions.pyi +0 -0
  109. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/group.py +0 -0
  110. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/readwriter.py +0 -0
  111. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/session.py +0 -0
  112. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/types.py +0 -0
  113. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/udf.py +0 -0
  114. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/window.py +0 -0
  115. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/__init__.py +0 -0
  116. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/catalog.py +0 -0
  117. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/column.py +0 -0
  118. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/dataframe.py +0 -0
  119. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/group.py +0 -0
  120. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/readwriter.py +0 -0
  121. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/session.py +0 -0
  122. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/types.py +0 -0
  123. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/udf.py +0 -0
  124. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/window.py +0 -0
  125. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/__init__.py +0 -0
  126. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/catalog.py +0 -0
  127. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/column.py +0 -0
  128. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/dataframe.py +0 -0
  129. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/functions.pyi +0 -0
  130. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/group.py +0 -0
  131. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/readwriter.py +0 -0
  132. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/session.py +0 -0
  133. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/types.py +0 -0
  134. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/udf.py +0 -0
  135. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/window.py +0 -0
  136. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/__init__.py +0 -0
  137. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/catalog.py +0 -0
  138. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/column.py +0 -0
  139. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/dataframe.py +0 -0
  140. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/functions.py +0 -0
  141. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/group.py +0 -0
  142. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/readwriter.py +0 -0
  143. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/session.py +0 -0
  144. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/types.py +0 -0
  145. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/udf.py +0 -0
  146. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/window.py +0 -0
  147. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/__init__.py +0 -0
  148. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/catalog.py +0 -0
  149. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/column.py +0 -0
  150. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/dataframe.py +0 -0
  151. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/functions.py +0 -0
  152. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/functions.pyi +0 -0
  153. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/group.py +0 -0
  154. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/readwriter.py +0 -0
  155. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/session.py +0 -0
  156. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/types.py +0 -0
  157. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/udf.py +0 -0
  158. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/window.py +0 -0
  159. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/__init__.py +0 -0
  160. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/catalog.py +0 -0
  161. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/column.py +0 -0
  162. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/dataframe.py +0 -0
  163. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/functions.py +0 -0
  164. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/functions.pyi +0 -0
  165. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/group.py +0 -0
  166. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/readwriter.py +0 -0
  167. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/session.py +0 -0
  168. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/types.py +0 -0
  169. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/udf.py +0 -0
  170. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/window.py +0 -0
  171. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/__init__.py +0 -0
  172. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/catalog.py +0 -0
  173. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/column.py +0 -0
  174. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/dataframe.py +0 -0
  175. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/functions.py +0 -0
  176. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/group.py +0 -0
  177. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/readwriter.py +0 -0
  178. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/session.py +0 -0
  179. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/types.py +0 -0
  180. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/udf.py +0 -0
  181. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/window.py +0 -0
  182. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/testing/__init__.py +0 -0
  183. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/testing/utils.py +0 -0
  184. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  185. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  186. {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/top_level.txt +0 -0
  187. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/__init__.py +0 -0
  188. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.csv +0 -0
  189. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.json +0 -0
  190. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.parquet +0 -0
  191. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  192. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  193. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  194. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  195. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  196. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  197. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  198. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  199. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  200. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  201. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  202. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  203. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  204. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  205. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_extra_line.csv +0 -0
  206. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  207. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  208. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  209. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  210. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  211. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  212. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  213. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  214. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  215. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  216. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  217. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  218. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  219. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  220. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  221. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  222. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  223. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  224. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  225. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  226. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  227. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  228. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  229. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  230. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  231. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  232. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  233. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  234. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  235. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  236. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  237. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  238. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  239. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  240. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  241. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  242. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  243. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  244. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  245. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  246. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  247. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  248. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  249. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  250. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  251. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  252. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  253. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  254. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  255. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  256. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  257. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  258. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  259. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  260. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  261. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  262. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  263. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  264. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  265. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  266. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  267. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  268. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  269. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  270. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  271. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  272. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  273. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  274. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  275. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  276. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  277. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  278. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  279. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  280. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  281. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  282. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  283. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  284. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  285. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  286. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  287. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  288. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  289. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  290. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  291. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  292. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  293. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  294. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  295. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  296. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  297. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  298. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  299. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  300. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  301. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  302. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  303. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  304. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  305. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/__init__.py +0 -0
  306. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/__init__.py +0 -0
  307. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  308. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  309. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  310. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  311. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/__init__.py +0 -0
  312. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  313. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
  314. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  315. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/__init__.py +0 -0
  316. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  317. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  318. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  319. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  320. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  321. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  322. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  323. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/__init__.py +0 -0
  324. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  325. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  326. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  327. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  328. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/__init__.py +0 -0
  329. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  330. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  331. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  332. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  333. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  334. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/__init__.py +0 -0
  335. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  336. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  337. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_column.py +0 -0
  338. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  339. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_reader.py +0 -0
  340. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_session.py +0 -0
  341. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_writer.py +0 -0
  342. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_int_testing.py +0 -0
  343. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/fixtures.py +0 -0
  344. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_dataframe.py +0 -0
  345. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  346. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_grouped_data.py +0 -0
  347. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_session.py +0 -0
  348. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/types.py +0 -0
  349. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/__init__.py +0 -0
  350. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/bigquery/__init__.py +0 -0
  351. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/bigquery/test_activate.py +0 -0
  352. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/conftest.py +0 -0
  353. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/databricks/__init__.py +0 -0
  354. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/duck/__init__.py +0 -0
  355. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/duck/test_activate.py +0 -0
  356. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/postgres/__init__.py +0 -0
  357. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/postgres/test_activate.py +0 -0
  358. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/redshift/__init__.py +0 -0
  359. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/redshift/test_activate.py +0 -0
  360. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/snowflake/__init__.py +0 -0
  361. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/snowflake/test_activate.py +0 -0
  362. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/spark/__init__.py +0 -0
  363. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/spark/test_activate.py +0 -0
  364. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/__init__.py +0 -0
  365. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/fixtures.py +0 -0
  366. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_activate.py +0 -0
  367. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_column.py +0 -0
  368. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe.py +0 -0
  369. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  370. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_session.py +0 -0
  371. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  372. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_types.py +0 -0
  373. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_window.py +0 -0
  374. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/test_activate.py +0 -0
  375. {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.9.3
3
+ Version: 3.10.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.12.1",
23
- "sqlglot>=24.0.0,<25.33",
23
+ "sqlglot>=24.0.0,<26.1",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -32,13 +32,14 @@ setup(
32
32
  "duckdb>=0.9,<1.2",
33
33
  "findspark>=2,<3",
34
34
  "mypy>=1.10.0,<1.14",
35
- "openai>=1.30,<1.56",
35
+ "openai>=1.30,<1.58",
36
36
  "pandas>=2,<3",
37
37
  "pandas-stubs>=2,<3",
38
38
  "psycopg>=3.1,<4",
39
39
  "pyarrow>=10,<19",
40
40
  "pyspark>=2,<3.6",
41
41
  "pytest>=8.2.0,<8.4",
42
+ "pytest-forked",
42
43
  "pytest-postgresql>=6,<7",
43
44
  "pytest-xdist>=3.6,<3.7",
44
45
  "pre-commit>=3.5;python_version=='3.8'",
@@ -58,7 +59,7 @@ setup(
58
59
  "pandas>=2,<3",
59
60
  ],
60
61
  "openai": [
61
- "openai>=1.30,<1.56",
62
+ "openai>=1.30,<1.58",
62
63
  ],
63
64
  "pandas": [
64
65
  "pandas>=2,<3",
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '3.9.3'
16
- __version_tuple__ = version_tuple = (3, 9, 3)
15
+ __version__ = version = '3.10.0'
16
+ __version_tuple__ = version_tuple = (3, 10, 0)
@@ -64,6 +64,39 @@ def first_always_ignore_nulls(col: ColumnOrName, ignorenulls: t.Optional[bool] =
64
64
  return first(col)
65
65
 
66
66
 
67
+ def to_timestamp_with_time_zone(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
68
+ from sqlframe.base.session import _BaseSession
69
+
70
+ if format is not None:
71
+ return Column.invoke_expression_over_column(
72
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
73
+ )
74
+
75
+ return Column.ensure_col(col).cast("timestamp with time zone", dialect="postgres")
76
+
77
+
78
+ def to_timestamp_tz(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
79
+ from sqlframe.base.session import _BaseSession
80
+
81
+ if format is not None:
82
+ return Column.invoke_expression_over_column(
83
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
84
+ )
85
+
86
+ return Column.ensure_col(col).cast("timestamptz", dialect="duckdb")
87
+
88
+
89
+ def to_timestamp_just_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
90
+ from sqlframe.base.session import _BaseSession
91
+
92
+ if format is not None:
93
+ return Column.invoke_expression_over_column(
94
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
95
+ )
96
+
97
+ return Column.ensure_col(col).cast("datetime", dialect="bigquery")
98
+
99
+
67
100
  def bitwise_not_from_bitnot(col: ColumnOrName) -> Column:
68
101
  return Column.invoke_anonymous_function(col, "BITNOT")
69
102
 
@@ -900,7 +900,7 @@ def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
900
900
  col, expression.StrToTime, format=_BaseSession().format_time(format)
901
901
  )
902
902
 
903
- return Column.ensure_col(col).cast("timestamp")
903
+ return Column.ensure_col(col).cast("timestampltz")
904
904
 
905
905
 
906
906
  @meta()
@@ -570,6 +570,8 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN, UDF_REGISTRATION
570
570
  return cls._to_row(list(value.keys()), list(value.values()))
571
571
  elif isinstance(value, (list, set, tuple)) and value:
572
572
  return [cls._to_value(x) for x in value]
573
+ elif isinstance(value, datetime.datetime):
574
+ return value.replace(tzinfo=None)
573
575
  return value
574
576
 
575
577
  @classmethod
@@ -73,6 +73,7 @@ from sqlframe.base.function_alternatives import ( # noqa
73
73
  _is_string_using_typeof_string as _is_string,
74
74
  array_append_using_array_cat as array_append,
75
75
  endswith_with_underscore as endswith,
76
+ to_timestamp_just_timestamp as to_timestamp,
76
77
  )
77
78
 
78
79
 
@@ -53,4 +53,5 @@ from sqlframe.base.function_alternatives import ( # noqa
53
53
  endswith_with_underscore as endswith,
54
54
  last_day_with_cast as last_day,
55
55
  regexp_replace_global_option as regexp_replace,
56
+ to_timestamp_tz as to_timestamp,
56
57
  )
@@ -29,6 +29,7 @@ from sqlframe.base.function_alternatives import ( # noqa
29
29
  try_element_at_zero_based as try_element_at,
30
30
  to_unix_timestamp_include_default_format as to_unix_timestamp,
31
31
  regexp_replace_global_option as regexp_replace,
32
+ to_timestamp_tz as to_timestamp,
32
33
  )
33
34
  from sqlframe.base.functions import (
34
35
  abs as abs,
@@ -70,4 +70,5 @@ from sqlframe.base.function_alternatives import ( # noqa
70
70
  endswith_using_like as endswith,
71
71
  last_day_with_cast as last_day,
72
72
  regexp_replace_global_option as regexp_replace,
73
+ to_timestamp_with_time_zone as to_timestamp,
73
74
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.9.3
3
+ Version: 3.10.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,5 @@
1
1
  prettytable<3.12.1
2
- sqlglot<25.33,>=24.0.0
2
+ sqlglot<26.1,>=24.0.0
3
3
  typing_extensions<5,>=4.8
4
4
 
5
5
  [bigquery]
@@ -13,12 +13,13 @@ databricks-sql-connector<4,>=3.6
13
13
  duckdb<1.2,>=0.9
14
14
  findspark<3,>=2
15
15
  mypy<1.14,>=1.10.0
16
- openai<1.56,>=1.30
16
+ openai<1.58,>=1.30
17
17
  pandas-stubs<3,>=2
18
18
  pandas<3,>=2
19
19
  psycopg<4,>=3.1
20
20
  pyarrow<19,>=10
21
21
  pyspark<3.6,>=2
22
+ pytest-forked
22
23
  pytest-postgresql<7,>=6
23
24
  pytest-xdist<3.7,>=3.6
24
25
  pytest<8.4,>=8.2.0
@@ -43,7 +44,7 @@ duckdb<1.2,>=0.9
43
44
  pandas<3,>=2
44
45
 
45
46
  [openai]
46
- openai<1.56,>=1.30
47
+ openai<1.58,>=1.30
47
48
 
48
49
  [pandas]
49
50
  pandas<3,>=2
@@ -92,7 +92,7 @@ def pyspark_session(tmp_path_factory, gen_tpcds: t.List[Path]) -> PySparkSession
92
92
  .config("spark.sql.warehouse.dir", data_dir)
93
93
  .config("spark.driver.extraJavaOptions", f"-Dderby.system.home={derby_dir}")
94
94
  .config("spark.sql.shuffle.partitions", 1)
95
- .config("spark.sql.session.timeZone", "America/Los_Angeles")
95
+ .config("spark.sql.session.timeZone", "UTC")
96
96
  .master("local[1]")
97
97
  .appName("Unit-tests")
98
98
  .getOrCreate()
@@ -225,6 +225,7 @@ def snowflake_connection() -> SnowflakeConnection:
225
225
  @pytest.fixture
226
226
  def snowflake_session(snowflake_connection: SnowflakeConnection) -> SnowflakeSession:
227
227
  session = SnowflakeSession(snowflake_connection)
228
+ session._execute("ALTER SESSION SET TIMEZONE = 'UTC'")
228
229
  session._execute("CREATE SCHEMA IF NOT EXISTS db1")
229
230
  session._execute("CREATE TABLE IF NOT EXISTS db1.table1 (id INTEGER, name VARCHAR(100))")
230
231
  session._execute(
@@ -16,7 +16,7 @@ def pytest_collection_modifyitems(items, *args, **kwargs):
16
16
  def set_tz():
17
17
  import os
18
18
 
19
- os.environ["TZ"] = "US/Pacific"
19
+ os.environ["TZ"] = "UTC"
20
20
  time.tzset()
21
21
  yield
22
22
  del os.environ["TZ"]
@@ -54,8 +54,8 @@ def test_print_schema_basic(snowflake_employee: SnowflakeDataFrame, capsys):
54
54
  == """
55
55
  root
56
56
  |-- employee_id: decimal(38, 0) (nullable = true)
57
- |-- fname: string (nullable = true)
58
- |-- lname: string (nullable = true)
57
+ |-- fname: varchar(16777216) (nullable = true)
58
+ |-- lname: varchar(16777216) (nullable = true)
59
59
  |-- age: decimal(38, 0) (nullable = true)
60
60
  |-- store_id: decimal(38, 0) (nullable = true)""".strip()
61
61
  )
@@ -70,9 +70,9 @@ def test_print_schema_nested(snowflake_datatypes: SnowflakeDataFrame, capsys):
70
70
  root
71
71
  |-- bigint_col: decimal(38, 0) (nullable = true)
72
72
  |-- double_col: float (nullable = true)
73
- |-- string_col: string (nullable = true)
74
- |-- map_string_bigint__col: map<string, decimal(38, 0)> (nullable = true)
75
- | |-- key: string (nullable = true)
73
+ |-- string_col: varchar(16777216) (nullable = true)
74
+ |-- map_string_bigint__col: map<varchar(16777216), decimal(38, 0)> (nullable = true)
75
+ | |-- key: varchar(16777216) (nullable = true)
76
76
  | |-- value: decimal(38, 0) (nullable = true)
77
77
  |-- array_struct_a_bigint_b_bigint__: array<object<a decimal(38, 0), b decimal(38, 0)>> (nullable = true)
78
78
  | |-- element: object<a decimal(38, 0), b decimal(38, 0)> (nullable = true)
@@ -83,7 +83,7 @@ root
83
83
  |-- struct_a_bigint__col: object<a decimal(38, 0)> (nullable = true)
84
84
  | |-- a: decimal(38, 0) (nullable = true)
85
85
  |-- date_col: date (nullable = true)
86
- |-- timestamp_col: timestamp_ntz (nullable = true)
86
+ |-- timestamp_col: timestamp (nullable = true)
87
87
  |-- timestamptz_col: timestamp (nullable = true)
88
88
  |-- boolean_col: boolean (nullable = true)""".strip()
89
89
  )
@@ -96,9 +96,9 @@ def test_schema(snowflake_employee: SnowflakeDataFrame):
96
96
  assert struct_fields[0].name == "employee_id"
97
97
  assert struct_fields[0].dataType == types.DecimalType(38, 0)
98
98
  assert struct_fields[1].name == "fname"
99
- assert struct_fields[1].dataType == types.StringType()
99
+ assert struct_fields[1].dataType == types.VarcharType(16777216)
100
100
  assert struct_fields[2].name == "lname"
101
- assert struct_fields[2].dataType == types.StringType()
101
+ assert struct_fields[2].dataType == types.VarcharType(16777216)
102
102
  assert struct_fields[3].name == "age"
103
103
  assert struct_fields[3].dataType == types.DecimalType(38, 0)
104
104
  assert struct_fields[4].name == "store_id"
@@ -114,10 +114,10 @@ def test_schema_nested(snowflake_datatypes: SnowflakeDataFrame):
114
114
  assert struct_fields[1].name == "double_col"
115
115
  assert struct_fields[1].dataType == types.FloatType()
116
116
  assert struct_fields[2].name == "string_col"
117
- assert struct_fields[2].dataType == types.StringType()
117
+ assert struct_fields[2].dataType == types.VarcharType(16777216)
118
118
  assert struct_fields[3].name == "map_string_bigint__col"
119
119
  assert struct_fields[3].dataType == types.MapType(
120
- types.StringType(),
120
+ types.VarcharType(16777216),
121
121
  types.DecimalType(38, 0),
122
122
  )
123
123
  assert struct_fields[4].name == "array_struct_a_bigint_b_bigint__"
@@ -116,7 +116,7 @@ def get_types() -> t.Callable:
116
116
  (datetime.datetime(2022, 1, 1, 1, 1, 1), datetime.datetime(2022, 1, 1, 1, 1, 1)),
117
117
  (
118
118
  datetime.datetime(2022, 1, 1, 1, 1, 1, tzinfo=datetime.timezone.utc),
119
- datetime.datetime(2022, 1, 1, 1, 1, 1, tzinfo=datetime.timezone.utc),
119
+ datetime.datetime(2022, 1, 1, 1, 1, 1),
120
120
  ),
121
121
  ({"cola": 1}, {"cola": 1}),
122
122
  (Row(**{"cola": 1, "colb": "test"}), Row(**{"cola": 1, "colb": "test"})),
@@ -134,20 +134,12 @@ def test_lit(get_session_and_func, arg, expected):
134
134
  pytest.skip("PySpark doesn't literal dict types")
135
135
  if isinstance(arg, Row):
136
136
  pytest.skip("PySpark doesn't support literal row types")
137
- if isinstance(arg, datetime.datetime) and arg.tzinfo is not None:
138
- pytest.skip("PySpark doesn't preserve timezone information in datetime literals")
139
137
  if isinstance(session, BigQuerySession):
140
138
  if isinstance(arg, dict):
141
139
  pytest.skip("BigQuery doesn't support map types")
142
- if isinstance(session, SparkSession):
143
- if isinstance(arg, datetime.datetime) and arg.tzinfo is not None:
144
- pytest.skip("Spark doesn't preserve timezone information in datetime literals")
145
140
  if isinstance(session, SnowflakeSession):
146
141
  if isinstance(arg, Row):
147
142
  pytest.skip("Snowflake doesn't support literal row types")
148
- if isinstance(session, DatabricksSession):
149
- if isinstance(arg, datetime.datetime) and arg.tzinfo is None:
150
- expected = expected.replace(tzinfo=datetime.timezone.utc)
151
143
  if isinstance(session, DuckDBSession):
152
144
  if isinstance(arg, dict):
153
145
  expected = Row(**expected)
@@ -181,7 +173,7 @@ def test_col(get_session_and_func, input, output):
181
173
  ([1, 2, 3], "array<bigint>"),
182
174
  (Row(a=1), "struct<a:bigint>"),
183
175
  (datetime.date(2022, 1, 1), "date"),
184
- (datetime.datetime(2022, 1, 1, 0, 0, 0), "timestamp"),
176
+ (datetime.datetime(2022, 1, 1, 0, 0, 0), "timestamptz"),
185
177
  (datetime.datetime(2022, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc), "timestamptz"),
186
178
  (True, "boolean"),
187
179
  (bytes("test", "utf-8"), "binary"),
@@ -211,8 +203,6 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
211
203
  expected = expected.split("<")[0]
212
204
  if expected == "binary":
213
205
  pytest.skip("BigQuery doesn't support binary")
214
- if expected == "timestamp":
215
- expected = "datetime"
216
206
  if isinstance(session, PostgresSession):
217
207
  if expected.startswith("map"):
218
208
  pytest.skip("Postgres doesn't support map types")
@@ -229,8 +219,6 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
229
219
  expected = "object"
230
220
  elif expected.startswith("array"):
231
221
  pytest.skip("Snowflake doesn't handle arrays properly in values clause")
232
- elif expected == "timestamp":
233
- expected = "timestampntz"
234
222
  result = df.select(typeof("col").alias("test")).first()[0]
235
223
  assert exp.DataType.build(result, dialect=dialect) == exp.DataType.build(
236
224
  expected, dialect=dialect
@@ -1250,23 +1238,17 @@ def test_current_date(get_session_and_func):
1250
1238
  session, current_date = get_session_and_func("current_date")
1251
1239
  df = session.range(1)
1252
1240
  # The current date can depend on how the connection is configured so we check for dates around today
1253
- assert df.select(current_date()).first()[0] in (
1254
- datetime.date.today() - datetime.timedelta(days=1),
1255
- datetime.date.today(),
1256
- datetime.date.today() + datetime.timedelta(days=1),
1257
- )
1241
+ assert df.select(current_date()).first()[0] == datetime.date.today()
1258
1242
 
1259
1243
 
1260
1244
  def test_current_timestamp(get_session_and_func):
1261
1245
  session, current_timestamp = get_session_and_func("current_timestamp")
1262
1246
  df = session.range(1)
1263
- # The current date can depend on how the connection is configured so we check for dates around today
1247
+ now = datetime.datetime.now(pytz.timezone("UTC")).replace(tzinfo=None)
1264
1248
  result = df.select(current_timestamp()).first()[0]
1265
1249
  assert isinstance(result, datetime.datetime)
1266
- assert result.date() in (
1267
- datetime.date.today() - datetime.timedelta(days=1),
1268
- datetime.date.today(),
1269
- datetime.date.today() + datetime.timedelta(days=1),
1250
+ assert result >= now - datetime.timedelta(minutes=1) and result <= now + datetime.timedelta(
1251
+ minutes=1
1270
1252
  )
1271
1253
 
1272
1254
 
@@ -1441,32 +1423,9 @@ def test_to_timestamp(get_session_and_func):
1441
1423
  session, to_timestamp = get_session_and_func("to_timestamp")
1442
1424
  df = session.createDataFrame([("1997-02-28 10:30:00",)], ["t"])
1443
1425
  result = df.select(to_timestamp(df.t).alias("dt")).first()[0]
1444
- if isinstance(session, DatabricksSession):
1445
- assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
1446
- else:
1447
- assert result == datetime.datetime(1997, 2, 28, 10, 30)
1426
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
1448
1427
  result = df.select(to_timestamp(df.t, "yyyy-MM-dd HH:mm:ss").alias("dt")).first()[0]
1449
- if isinstance(session, (BigQuerySession, DuckDBSession)):
1450
- assert result == datetime.datetime(
1451
- 1997,
1452
- 2,
1453
- 28,
1454
- 10,
1455
- 30,
1456
- tzinfo=datetime.timezone.utc if isinstance(session, BigQuerySession) else None,
1457
- )
1458
- elif isinstance(session, (PostgresSession, DatabricksSession)):
1459
- assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
1460
- elif isinstance(session, SnowflakeSession):
1461
- assert result == datetime.datetime(
1462
- 1997,
1463
- 2,
1464
- 28,
1465
- 10,
1466
- 30,
1467
- )
1468
- else:
1469
- assert result == datetime.datetime(1997, 2, 28, 10, 30)
1428
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
1470
1429
 
1471
1430
 
1472
1431
  def test_trunc(get_session_and_func):
@@ -1482,18 +1441,14 @@ def test_trunc(get_session_and_func):
1482
1441
  def test_date_trunc(get_session_and_func):
1483
1442
  session, date_trunc = get_session_and_func("date_trunc")
1484
1443
  df = session.createDataFrame([("1997-02-28 05:02:11",)], ["t"])
1485
- assert df.select(date_trunc("year", df.t).alias("year")).first()[0].replace(
1486
- tzinfo=None
1487
- ) == datetime.datetime(
1444
+ assert df.select(date_trunc("year", df.t).alias("year")).first()[0] == datetime.datetime(
1488
1445
  1997,
1489
1446
  1,
1490
1447
  1,
1491
1448
  0,
1492
1449
  0,
1493
1450
  )
1494
- assert df.select(date_trunc("month", df.t).alias("month")).first()[0].replace(
1495
- tzinfo=None
1496
- ) == datetime.datetime(
1451
+ assert df.select(date_trunc("month", df.t).alias("month")).first()[0] == datetime.datetime(
1497
1452
  1997,
1498
1453
  2,
1499
1454
  1,
@@ -1517,13 +1472,7 @@ def test_last_day(get_session_and_func):
1517
1472
  def test_from_unixtime(get_session_and_func):
1518
1473
  session, from_unixtime = get_session_and_func("from_unixtime")
1519
1474
  df = session.createDataFrame([(1428476400,)], ["unix_time"])
1520
- if isinstance(
1521
- session,
1522
- (BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
1523
- ):
1524
- expected = "2015-04-08 07:00:00"
1525
- else:
1526
- expected = "2015-04-08 00:00:00"
1475
+ expected = "2015-04-08 07:00:00"
1527
1476
  assert df.select(from_unixtime("unix_time").alias("ts")).first()[0] == expected
1528
1477
 
1529
1478
 
@@ -1531,47 +1480,35 @@ def test_unix_timestamp(get_session_and_func):
1531
1480
  session, unix_timestamp = get_session_and_func("unix_timestamp")
1532
1481
  df = session.createDataFrame([("2015-04-08",)], ["dt"])
1533
1482
  result = df.select(unix_timestamp("dt", "yyyy-MM-dd").alias("unix_time")).first()[0]
1534
- if isinstance(
1535
- session,
1536
- (BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
1537
- ):
1538
- assert result == 1428451200
1539
- else:
1540
- assert result == 1428476400
1483
+ assert result == 1428451200
1541
1484
 
1542
1485
 
1543
1486
  def test_from_utc_timestamp(get_session_and_func):
1544
1487
  session, from_utc_timestamp = get_session_and_func("from_utc_timestamp")
1545
1488
  df = session.createDataFrame([("1997-02-28 10:30:00", "JST")], ["ts", "tz"])
1546
- assert df.select(from_utc_timestamp(df.ts, "PST").alias("local_time")).first()[0].replace(
1547
- tzinfo=None
1548
- ) == datetime.datetime(1997, 2, 28, 2, 30)
1549
- assert df.select(from_utc_timestamp(df.ts, df.tz).alias("local_time")).first()[0].replace(
1550
- tzinfo=None
1551
- ) == datetime.datetime(1997, 2, 28, 19, 30)
1489
+ assert df.select(from_utc_timestamp(df.ts, "PST").alias("local_time")).first()[
1490
+ 0
1491
+ ] == datetime.datetime(1997, 2, 28, 2, 30)
1492
+ assert df.select(from_utc_timestamp(df.ts, df.tz).alias("local_time")).first()[
1493
+ 0
1494
+ ] == datetime.datetime(1997, 2, 28, 19, 30)
1552
1495
 
1553
1496
 
1554
1497
  def test_to_utc_timestamp(get_session_and_func):
1555
1498
  session, to_utc_timestamp = get_session_and_func("to_utc_timestamp")
1556
1499
  df = session.createDataFrame([("1997-02-28 10:30:00", "JST")], ["ts", "tz"])
1557
- assert df.select(to_utc_timestamp(df.ts, "PST").alias("utc_time")).first()[0].replace(
1558
- tzinfo=None
1559
- ) == datetime.datetime(1997, 2, 28, 18, 30)
1560
- assert df.select(to_utc_timestamp(df.ts, df.tz).alias("utc_time")).first()[0].replace(
1561
- tzinfo=None
1562
- ) == datetime.datetime(1997, 2, 28, 1, 30)
1500
+ assert df.select(to_utc_timestamp(df.ts, "PST").alias("utc_time")).first()[
1501
+ 0
1502
+ ] == datetime.datetime(1997, 2, 28, 18, 30)
1503
+ assert df.select(to_utc_timestamp(df.ts, df.tz).alias("utc_time")).first()[
1504
+ 0
1505
+ ] == datetime.datetime(1997, 2, 28, 1, 30)
1563
1506
 
1564
1507
 
1565
1508
  def test_timestamp_seconds(get_session_and_func):
1566
1509
  session, timestamp_seconds = get_session_and_func("timestamp_seconds")
1567
1510
  df = session.createDataFrame([(1230219000,)], ["unix_time"])
1568
- if isinstance(
1569
- session,
1570
- (BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
1571
- ):
1572
- expected = datetime.datetime(2008, 12, 25, 15, 30, 00)
1573
- else:
1574
- expected = datetime.datetime(2008, 12, 25, 7, 30)
1511
+ expected = datetime.datetime(2008, 12, 25, 15, 30, 00)
1575
1512
  assert (
1576
1513
  df.select(timestamp_seconds(df.unix_time).alias("ts")).first()[0].replace(tzinfo=None)
1577
1514
  == expected
@@ -3571,16 +3508,7 @@ def test_convert_timezone(get_session_and_func, get_func):
3571
3508
  session, convert_timezone = get_session_and_func("convert_timezone")
3572
3509
  lit = get_func("lit", session)
3573
3510
  df = session.createDataFrame([("2015-04-08",)], ["dt"])
3574
- if isinstance(session, DuckDBSession):
3575
- expected = pytz.timezone("US/Pacific").localize(datetime.datetime(2015, 4, 7, 9, 0))
3576
- elif isinstance(session, PostgresSession):
3577
- expected = datetime.datetime(2015, 4, 7, 16, 0, tzinfo=datetime.timezone.utc)
3578
- elif isinstance(session, SnowflakeSession):
3579
- expected = datetime.datetime(2015, 4, 8, 15, 0, tzinfo=pytz.FixedOffset(480))
3580
- elif isinstance(session, DatabricksSession):
3581
- expected = datetime.datetime(2015, 4, 8, 8, 0)
3582
- else:
3583
- expected = datetime.datetime(2015, 4, 8, 15, 0)
3511
+ expected = datetime.datetime(2015, 4, 8, 8, 0)
3584
3512
  assert df.select(convert_timezone(None, lit("Asia/Hong_Kong"), "dt").alias("ts")).collect() == [
3585
3513
  Row(ts=expected)
3586
3514
  ]
@@ -3638,7 +3566,7 @@ def test_current_schema(get_session_and_func, get_func):
3638
3566
 
3639
3567
  def test_current_timezone(get_session_and_func, get_func):
3640
3568
  session, current_timezone = get_session_and_func("current_timezone")
3641
- assert session.range(1).select(current_timezone()).first()[0] == "America/Los_Angeles"
3569
+ assert session.range(1).select(current_timezone()).first()[0] == "UTC"
3642
3570
 
3643
3571
 
3644
3572
  def test_date_from_unix_date(get_session_and_func, get_func):
@@ -4097,24 +4025,12 @@ def test_make_timestamp(get_session_and_func, get_func):
4097
4025
  [[2014, 12, 28, 6, 30, 45.887, "CET"]],
4098
4026
  ["year", "month", "day", "hour", "min", "sec", "timezone"],
4099
4027
  )
4100
- if isinstance(session, DatabricksSession):
4101
- assert df.select(
4102
- make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone).alias(
4103
- "r"
4104
- )
4105
- ).first()[0].replace(tzinfo=None) == datetime.datetime(2014, 12, 28, 5, 30, 45, 887000)
4106
- assert df.select(
4107
- make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
4108
- ).first()[0].replace(tzinfo=None) == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
4109
- else:
4110
- assert df.select(
4111
- make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone).alias(
4112
- "r"
4113
- )
4114
- ).first()[0] == datetime.datetime(2014, 12, 27, 21, 30, 45, 887000)
4115
- assert df.select(
4116
- make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
4117
- ).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
4028
+ assert df.select(
4029
+ make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone).alias("r")
4030
+ ).first()[0] == datetime.datetime(2014, 12, 28, 5, 30, 45, 887000)
4031
+ assert df.select(
4032
+ make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
4033
+ ).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
4118
4034
 
4119
4035
 
4120
4036
  def test_make_timestamp_ltz(get_session_and_func, get_func):
@@ -4125,7 +4041,7 @@ def test_make_timestamp_ltz(get_session_and_func, get_func):
4125
4041
  )
4126
4042
  assert df.select(
4127
4043
  make_timestamp_ltz(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone)
4128
- ).first()[0] == datetime.datetime(2014, 12, 27, 21, 30, 45, 887000)
4044
+ ).first()[0] == datetime.datetime(2014, 12, 28, 5, 30, 45, 887000)
4129
4045
  assert df.select(
4130
4046
  make_timestamp_ltz(df.year, df.month, df.day, df.hour, df.min, df.sec)
4131
4047
  ).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
@@ -4826,27 +4742,17 @@ def test_substr(get_session_and_func, get_func):
4826
4742
  def test_timestamp_micros(get_session_and_func, get_func):
4827
4743
  session, timestamp_micros = get_session_and_func("timestamp_micros")
4828
4744
  time_df = session.createDataFrame([(1230219000,)], ["unix_time"])
4829
- if isinstance(session, DatabricksSession):
4830
- assert time_df.select(timestamp_micros(time_df.unix_time).alias("ts")).first()[0].replace(
4831
- tzinfo=None
4832
- ) == datetime.datetime(1970, 1, 1, 0, 20, 30, 219000)
4833
- else:
4834
- assert time_df.select(timestamp_micros(time_df.unix_time).alias("ts")).first()[
4835
- 0
4836
- ] == datetime.datetime(1969, 12, 31, 16, 20, 30, 219000)
4745
+ assert time_df.select(timestamp_micros(time_df.unix_time).alias("ts")).first()[
4746
+ 0
4747
+ ] == datetime.datetime(1970, 1, 1, 0, 20, 30, 219000)
4837
4748
 
4838
4749
 
4839
4750
  def test_timestamp_millis(get_session_and_func, get_func):
4840
4751
  session, timestamp_millis = get_session_and_func("timestamp_millis")
4841
4752
  time_df = session.createDataFrame([(1230219000,)], ["unix_time"])
4842
- if isinstance(session, DatabricksSession):
4843
- assert time_df.select(timestamp_millis(time_df.unix_time).alias("ts")).first()[0].replace(
4844
- tzinfo=None
4845
- ) == datetime.datetime(1970, 1, 15, 5, 43, 39)
4846
- else:
4847
- assert time_df.select(timestamp_millis(time_df.unix_time).alias("ts")).first()[
4848
- 0
4849
- ] == datetime.datetime(1970, 1, 14, 21, 43, 39)
4753
+ assert time_df.select(timestamp_millis(time_df.unix_time).alias("ts")).first()[
4754
+ 0
4755
+ ] == datetime.datetime(1970, 1, 15, 5, 43, 39)
4850
4756
 
4851
4757
 
4852
4758
  def test_to_char(get_session_and_func, get_func):
@@ -4901,7 +4807,7 @@ def test_to_unix_timestamp(get_session_and_func, get_func):
4901
4807
  if isinstance(session, (DuckDBSession, DatabricksSession)):
4902
4808
  assert result == 1460073600.0
4903
4809
  else:
4904
- assert result == 1460098800
4810
+ assert result == 1460073600
4905
4811
  # DuckDB requires the value to match the format which the default format is "yyyy-MM-dd HH:mm:ss".
4906
4812
  # https://spark.apache.org/docs/latest/api/sql/#to_unix_timestamp
4907
4813
  if isinstance(session, DuckDBSession):
@@ -4992,15 +4898,9 @@ def test_try_to_timestamp(get_session_and_func, get_func):
4992
4898
  lit = get_func("lit", session)
4993
4899
  df = session.createDataFrame([("1997-02-28 10:30:00",)], ["t"])
4994
4900
  result = df.select(try_to_timestamp(df.t).alias("dt")).first()[0]
4995
- if isinstance(session, (BigQuerySession, DatabricksSession)):
4996
- assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
4997
- else:
4998
- assert result == datetime.datetime(1997, 2, 28, 10, 30)
4901
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
4999
4902
  result = df.select(try_to_timestamp(df.t, lit("yyyy-MM-dd HH:mm:ss")).alias("dt")).first()[0]
5000
- if isinstance(session, (BigQuerySession, DatabricksSession)):
5001
- assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
5002
- else:
5003
- assert result == datetime.datetime(1997, 2, 28, 10, 30)
4903
+ assert result == datetime.datetime(1997, 2, 28, 10, 30)
5004
4904
 
5005
4905
 
5006
4906
  def test_ucase(get_session_and_func, get_func):
@@ -5020,30 +4920,21 @@ def test_unix_micros(get_session_and_func, get_func):
5020
4920
  session, unix_micros = get_session_and_func("unix_micros")
5021
4921
  to_timestamp = get_func("to_timestamp", session)
5022
4922
  df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
5023
- if isinstance(session, DatabricksSession):
5024
- assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000000
5025
- else:
5026
- assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400000000
4923
+ assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000000
5027
4924
 
5028
4925
 
5029
4926
  def test_unix_millis(get_session_and_func, get_func):
5030
4927
  session, unix_millis = get_session_and_func("unix_millis")
5031
4928
  to_timestamp = get_func("to_timestamp", session)
5032
4929
  df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
5033
- if isinstance(session, DatabricksSession):
5034
- assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000
5035
- else:
5036
- assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400000
4930
+ assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000
5037
4931
 
5038
4932
 
5039
4933
  def test_unix_seconds(get_session_and_func, get_func):
5040
4934
  session, unix_seconds = get_session_and_func("unix_seconds")
5041
4935
  to_timestamp = get_func("to_timestamp", session)
5042
4936
  df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
5043
- if isinstance(session, DatabricksSession):
5044
- assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200
5045
- else:
5046
- assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400
4937
+ assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200
5047
4938
 
5048
4939
 
5049
4940
  def test_url_decode(get_session_and_func, get_func):