sqlframe 3.9.2__tar.gz → 3.10.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (375) hide show
  1. {sqlframe-3.9.2 → sqlframe-3.10.0}/Makefile +3 -0
  2. {sqlframe-3.9.2 → sqlframe-3.10.0}/PKG-INFO +1 -1
  3. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/databricks.md +0 -2
  4. {sqlframe-3.9.2 → sqlframe-3.10.0}/setup.py +4 -3
  5. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/_version.py +2 -2
  6. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/function_alternatives.py +38 -0
  7. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/functions.py +11 -11
  8. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/session.py +2 -0
  9. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/functions.py +1 -0
  10. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/catalog.py +12 -2
  11. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/dataframe.py +3 -2
  12. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/functions.py +1 -0
  13. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/session.py +14 -1
  14. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/functions.py +1 -0
  15. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/functions.pyi +1 -0
  16. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/functions.py +1 -0
  17. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe.egg-info/PKG-INFO +1 -1
  18. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe.egg-info/SOURCES.txt +6 -0
  19. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe.egg-info/requires.txt +4 -3
  20. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/common_fixtures.py +29 -1
  21. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/conftest.py +1 -1
  22. sqlframe-3.10.0/tests/integration/engines/databricks/test_databricks_catalog.py +338 -0
  23. sqlframe-3.10.0/tests/integration/engines/databricks/test_databricks_dataframe.py +169 -0
  24. sqlframe-3.10.0/tests/integration/engines/databricks/test_databricks_session.py +47 -0
  25. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +10 -10
  26. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_engine_session.py +4 -1
  27. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_int_functions.py +136 -107
  28. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/fixtures.py +70 -0
  29. sqlframe-3.10.0/tests/unit/databricks/test_activate.py +50 -0
  30. sqlframe-3.10.0/tests/unit/spark/__init__.py +0 -0
  31. sqlframe-3.10.0/tests/unit/standalone/__init__.py +0 -0
  32. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_functions.py +5 -5
  33. {sqlframe-3.9.2 → sqlframe-3.10.0}/.github/CODEOWNERS +0 -0
  34. {sqlframe-3.9.2 → sqlframe-3.10.0}/.github/workflows/main.workflow.yaml +0 -0
  35. {sqlframe-3.9.2 → sqlframe-3.10.0}/.github/workflows/publish.workflow.yaml +0 -0
  36. {sqlframe-3.9.2 → sqlframe-3.10.0}/.gitignore +0 -0
  37. {sqlframe-3.9.2 → sqlframe-3.10.0}/.pre-commit-config.yaml +0 -0
  38. {sqlframe-3.9.2 → sqlframe-3.10.0}/.readthedocs.yaml +0 -0
  39. {sqlframe-3.9.2 → sqlframe-3.10.0}/LICENSE +0 -0
  40. {sqlframe-3.9.2 → sqlframe-3.10.0}/README.md +0 -0
  41. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/add_chatgpt_support.md +0 -0
  42. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  43. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  44. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  45. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  46. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  47. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  48. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  49. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  50. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/but_wait_theres_more.gif +0 -0
  51. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/cake.gif +0 -0
  52. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  53. {sqlframe-3.9.2 → sqlframe-3.10.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  54. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/bigquery.md +0 -0
  55. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/configuration.md +0 -0
  56. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/bigquery.md +0 -0
  57. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/duckdb.md +0 -0
  58. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/images/SF.png +0 -0
  59. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/images/favicon.png +0 -0
  60. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/images/favicon_old.png +0 -0
  61. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  62. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/images/sqlframe_logo.png +0 -0
  63. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/docs/postgres.md +0 -0
  64. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/duckdb.md +0 -0
  65. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/images/SF.png +0 -0
  66. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/images/favicon.png +0 -0
  67. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/images/favicon_old.png +0 -0
  68. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/images/sqlframe_diagram.png +0 -0
  69. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/images/sqlframe_logo.png +0 -0
  70. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/index.md +0 -0
  71. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/postgres.md +0 -0
  72. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/redshift.md +0 -0
  73. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/requirements.txt +0 -0
  74. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/snowflake.md +0 -0
  75. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/spark.md +0 -0
  76. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/standalone.md +0 -0
  77. {sqlframe-3.9.2 → sqlframe-3.10.0}/docs/stylesheets/extra.css +0 -0
  78. {sqlframe-3.9.2 → sqlframe-3.10.0}/mkdocs.yml +0 -0
  79. {sqlframe-3.9.2 → sqlframe-3.10.0}/pytest.ini +0 -0
  80. {sqlframe-3.9.2 → sqlframe-3.10.0}/renovate.json +0 -0
  81. {sqlframe-3.9.2 → sqlframe-3.10.0}/setup.cfg +0 -0
  82. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/LICENSE +0 -0
  83. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/__init__.py +0 -0
  84. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/__init__.py +0 -0
  85. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/_typing.py +0 -0
  86. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/catalog.py +0 -0
  87. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/column.py +0 -0
  88. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/dataframe.py +0 -0
  89. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/decorators.py +0 -0
  90. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/exceptions.py +0 -0
  91. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/group.py +0 -0
  92. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/mixins/__init__.py +0 -0
  93. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  94. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  95. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  96. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/normalize.py +0 -0
  97. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/operations.py +0 -0
  98. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/readerwriter.py +0 -0
  99. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/transforms.py +0 -0
  100. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/types.py +0 -0
  101. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/udf.py +0 -0
  102. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/util.py +0 -0
  103. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/base/window.py +0 -0
  104. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/__init__.py +0 -0
  105. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/catalog.py +0 -0
  106. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/column.py +0 -0
  107. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/dataframe.py +0 -0
  108. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/functions.pyi +0 -0
  109. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/group.py +0 -0
  110. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/readwriter.py +0 -0
  111. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/session.py +0 -0
  112. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/types.py +0 -0
  113. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/udf.py +0 -0
  114. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/bigquery/window.py +0 -0
  115. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/__init__.py +0 -0
  116. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/column.py +0 -0
  117. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/functions.pyi +0 -0
  118. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/group.py +0 -0
  119. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/readwriter.py +0 -0
  120. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/types.py +0 -0
  121. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/udf.py +0 -0
  122. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/databricks/window.py +0 -0
  123. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/__init__.py +0 -0
  124. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/catalog.py +0 -0
  125. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/column.py +0 -0
  126. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/dataframe.py +0 -0
  127. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/group.py +0 -0
  128. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/readwriter.py +0 -0
  129. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/session.py +0 -0
  130. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/types.py +0 -0
  131. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/udf.py +0 -0
  132. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/duckdb/window.py +0 -0
  133. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/__init__.py +0 -0
  134. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/catalog.py +0 -0
  135. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/column.py +0 -0
  136. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/dataframe.py +0 -0
  137. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/functions.pyi +0 -0
  138. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/group.py +0 -0
  139. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/readwriter.py +0 -0
  140. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/session.py +0 -0
  141. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/types.py +0 -0
  142. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/udf.py +0 -0
  143. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/postgres/window.py +0 -0
  144. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/__init__.py +0 -0
  145. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/catalog.py +0 -0
  146. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/column.py +0 -0
  147. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/dataframe.py +0 -0
  148. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/functions.py +0 -0
  149. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/group.py +0 -0
  150. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/readwriter.py +0 -0
  151. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/session.py +0 -0
  152. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/types.py +0 -0
  153. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/udf.py +0 -0
  154. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/redshift/window.py +0 -0
  155. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/__init__.py +0 -0
  156. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/catalog.py +0 -0
  157. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/column.py +0 -0
  158. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/dataframe.py +0 -0
  159. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/functions.py +0 -0
  160. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/functions.pyi +0 -0
  161. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/group.py +0 -0
  162. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/readwriter.py +0 -0
  163. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/session.py +0 -0
  164. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/types.py +0 -0
  165. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/udf.py +0 -0
  166. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/snowflake/window.py +0 -0
  167. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/__init__.py +0 -0
  168. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/catalog.py +0 -0
  169. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/column.py +0 -0
  170. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/dataframe.py +0 -0
  171. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/functions.py +0 -0
  172. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/functions.pyi +0 -0
  173. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/group.py +0 -0
  174. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/readwriter.py +0 -0
  175. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/session.py +0 -0
  176. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/types.py +0 -0
  177. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/udf.py +0 -0
  178. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/spark/window.py +0 -0
  179. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/__init__.py +0 -0
  180. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/catalog.py +0 -0
  181. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/column.py +0 -0
  182. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/dataframe.py +0 -0
  183. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/functions.py +0 -0
  184. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/group.py +0 -0
  185. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/readwriter.py +0 -0
  186. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/session.py +0 -0
  187. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/types.py +0 -0
  188. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/udf.py +0 -0
  189. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/standalone/window.py +0 -0
  190. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/testing/__init__.py +0 -0
  191. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe/testing/utils.py +0 -0
  192. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  193. {sqlframe-3.9.2 → sqlframe-3.10.0}/sqlframe.egg-info/top_level.txt +0 -0
  194. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/__init__.py +0 -0
  195. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee.csv +0 -0
  196. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee.json +0 -0
  197. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee.parquet +0 -0
  198. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  199. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  200. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  201. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  202. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  203. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  204. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  205. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  206. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  207. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  208. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  209. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  210. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  211. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  212. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/employee_extra_line.csv +0 -0
  213. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  214. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  215. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  216. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  217. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  218. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  219. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  220. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  221. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  222. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  223. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  224. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  225. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  226. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  227. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  228. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  229. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  230. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  231. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  232. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  233. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  234. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  235. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  236. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  237. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  238. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  239. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  240. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  241. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  242. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  243. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  244. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  245. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  246. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  247. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  248. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  249. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  250. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  251. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  252. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  253. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  254. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  255. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  256. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  257. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  258. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  259. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  260. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  261. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  262. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  263. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  264. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  265. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  266. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  267. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  268. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  269. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  270. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  271. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  272. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  273. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  274. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  275. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  276. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  277. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  278. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  279. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  280. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  281. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  282. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  283. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  284. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  285. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  286. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  287. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  288. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  289. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  290. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  291. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  292. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  293. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  294. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  295. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  296. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  297. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  298. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  299. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  300. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  301. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  302. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  303. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  304. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  305. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  306. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  307. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  308. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  309. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  310. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  311. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  312. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/__init__.py +0 -0
  313. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/__init__.py +0 -0
  314. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  315. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  316. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  317. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  318. {sqlframe-3.9.2/tests/integration/engines/duck → sqlframe-3.10.0/tests/integration/engines/databricks}/__init__.py +0 -0
  319. {sqlframe-3.9.2/tests/integration/engines/postgres → sqlframe-3.10.0/tests/integration/engines/duck}/__init__.py +0 -0
  320. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  321. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  322. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  323. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  324. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  325. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  326. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  327. {sqlframe-3.9.2/tests/integration/engines/redshift → sqlframe-3.10.0/tests/integration/engines/postgres}/__init__.py +0 -0
  328. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  329. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  330. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  331. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  332. {sqlframe-3.9.2/tests/integration/engines/snowflake → sqlframe-3.10.0/tests/integration/engines/redshift}/__init__.py +0 -0
  333. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  334. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  335. {sqlframe-3.9.2/tests/integration/engines/spark → sqlframe-3.10.0/tests/integration/engines/snowflake}/__init__.py +0 -0
  336. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  337. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  338. {sqlframe-3.9.2/tests/unit → sqlframe-3.10.0/tests/integration/engines/spark}/__init__.py +0 -0
  339. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  340. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  341. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_engine_column.py +0 -0
  342. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  343. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_engine_reader.py +0 -0
  344. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_engine_writer.py +0 -0
  345. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/engines/test_int_testing.py +0 -0
  346. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/test_int_dataframe.py +0 -0
  347. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  348. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/test_int_grouped_data.py +0 -0
  349. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/integration/test_int_session.py +0 -0
  350. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/types.py +0 -0
  351. {sqlframe-3.9.2/tests/unit/bigquery → sqlframe-3.10.0/tests/unit}/__init__.py +0 -0
  352. {sqlframe-3.9.2/tests/unit/duck → sqlframe-3.10.0/tests/unit/bigquery}/__init__.py +0 -0
  353. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/bigquery/test_activate.py +0 -0
  354. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/conftest.py +0 -0
  355. {sqlframe-3.9.2/tests/unit/postgres → sqlframe-3.10.0/tests/unit/databricks}/__init__.py +0 -0
  356. {sqlframe-3.9.2/tests/unit/redshift → sqlframe-3.10.0/tests/unit/duck}/__init__.py +0 -0
  357. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/duck/test_activate.py +0 -0
  358. {sqlframe-3.9.2/tests/unit/snowflake → sqlframe-3.10.0/tests/unit/postgres}/__init__.py +0 -0
  359. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/postgres/test_activate.py +0 -0
  360. {sqlframe-3.9.2/tests/unit/spark → sqlframe-3.10.0/tests/unit/redshift}/__init__.py +0 -0
  361. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/redshift/test_activate.py +0 -0
  362. {sqlframe-3.9.2/tests/unit/standalone → sqlframe-3.10.0/tests/unit/snowflake}/__init__.py +0 -0
  363. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/snowflake/test_activate.py +0 -0
  364. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/spark/test_activate.py +0 -0
  365. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/fixtures.py +0 -0
  366. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_activate.py +0 -0
  367. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_column.py +0 -0
  368. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe.py +0 -0
  369. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  370. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_session.py +0 -0
  371. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  372. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_types.py +0 -0
  373. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/standalone/test_window.py +0 -0
  374. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/test_activate.py +0 -0
  375. {sqlframe-3.9.2 → sqlframe-3.10.0}/tests/unit/test_util.py +0 -0
@@ -22,6 +22,9 @@ duckdb-test:
22
22
  snowflake-test:
23
23
  pytest -n auto -m "snowflake"
24
24
 
25
+ databricks-test:
26
+ pytest -n auto -m "databricks"
27
+
25
28
  style:
26
29
  pre-commit run --all-files
27
30
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.9.2
3
+ Version: 3.10.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,3 @@
1
- from test import auth_type
2
-
3
1
  # Databricks (In Development)
4
2
 
5
3
  ## Installation
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.12.1",
23
- "sqlglot>=24.0.0,<25.33",
23
+ "sqlglot>=24.0.0,<26.1",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -32,13 +32,14 @@ setup(
32
32
  "duckdb>=0.9,<1.2",
33
33
  "findspark>=2,<3",
34
34
  "mypy>=1.10.0,<1.14",
35
- "openai>=1.30,<1.56",
35
+ "openai>=1.30,<1.58",
36
36
  "pandas>=2,<3",
37
37
  "pandas-stubs>=2,<3",
38
38
  "psycopg>=3.1,<4",
39
39
  "pyarrow>=10,<19",
40
40
  "pyspark>=2,<3.6",
41
41
  "pytest>=8.2.0,<8.4",
42
+ "pytest-forked",
42
43
  "pytest-postgresql>=6,<7",
43
44
  "pytest-xdist>=3.6,<3.7",
44
45
  "pre-commit>=3.5;python_version=='3.8'",
@@ -58,7 +59,7 @@ setup(
58
59
  "pandas>=2,<3",
59
60
  ],
60
61
  "openai": [
61
- "openai>=1.30,<1.56",
62
+ "openai>=1.30,<1.58",
62
63
  ],
63
64
  "pandas": [
64
65
  "pandas>=2,<3",
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '3.9.2'
16
- __version_tuple__ = version_tuple = (3, 9, 2)
15
+ __version__ = version = '3.10.0'
16
+ __version_tuple__ = version_tuple = (3, 10, 0)
@@ -64,6 +64,39 @@ def first_always_ignore_nulls(col: ColumnOrName, ignorenulls: t.Optional[bool] =
64
64
  return first(col)
65
65
 
66
66
 
67
+ def to_timestamp_with_time_zone(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
68
+ from sqlframe.base.session import _BaseSession
69
+
70
+ if format is not None:
71
+ return Column.invoke_expression_over_column(
72
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
73
+ )
74
+
75
+ return Column.ensure_col(col).cast("timestamp with time zone", dialect="postgres")
76
+
77
+
78
+ def to_timestamp_tz(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
79
+ from sqlframe.base.session import _BaseSession
80
+
81
+ if format is not None:
82
+ return Column.invoke_expression_over_column(
83
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
84
+ )
85
+
86
+ return Column.ensure_col(col).cast("timestamptz", dialect="duckdb")
87
+
88
+
89
+ def to_timestamp_just_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
90
+ from sqlframe.base.session import _BaseSession
91
+
92
+ if format is not None:
93
+ return Column.invoke_expression_over_column(
94
+ col, expression.StrToTime, format=_BaseSession().format_time(format)
95
+ )
96
+
97
+ return Column.ensure_col(col).cast("datetime", dialect="bigquery")
98
+
99
+
67
100
  def bitwise_not_from_bitnot(col: ColumnOrName) -> Column:
68
101
  return Column.invoke_anonymous_function(col, "BITNOT")
69
102
 
@@ -1220,6 +1253,11 @@ def get_json_object_cast_object(col: ColumnOrName, path: str) -> Column:
1220
1253
  return get_json_object(col_func(col).cast("variant"), path)
1221
1254
 
1222
1255
 
1256
+ def get_json_object_using_function(col: ColumnOrName, path: str) -> Column:
1257
+ lit = get_func_from_session("lit")
1258
+ return Column.invoke_anonymous_function(col, "GET_JSON_OBJECT", lit(path))
1259
+
1260
+
1223
1261
  def create_map_with_cast(*cols: t.Union[ColumnOrName, t.Iterable[ColumnOrName]]) -> Column:
1224
1262
  from sqlframe.base.functions import create_map
1225
1263
 
@@ -900,7 +900,7 @@ def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
900
900
  col, expression.StrToTime, format=_BaseSession().format_time(format)
901
901
  )
902
902
 
903
- return Column.ensure_col(col).cast("timestamp")
903
+ return Column.ensure_col(col).cast("timestampltz")
904
904
 
905
905
 
906
906
  @meta()
@@ -2173,7 +2173,7 @@ def current_database() -> Column:
2173
2173
  current_schema = current_database
2174
2174
 
2175
2175
 
2176
- @meta(unsupported_engines="*")
2176
+ @meta(unsupported_engines=["*", "databricks"])
2177
2177
  def current_timezone() -> Column:
2178
2178
  return Column.invoke_anonymous_function(None, "current_timezone")
2179
2179
 
@@ -2261,7 +2261,7 @@ def get(col: ColumnOrName, index: t.Union[ColumnOrName, int]) -> Column:
2261
2261
  return Column.invoke_anonymous_function(col, "get", index)
2262
2262
 
2263
2263
 
2264
- @meta(unsupported_engines="*")
2264
+ @meta(unsupported_engines=["*", "databricks"])
2265
2265
  def get_active_spark_context() -> SparkContext:
2266
2266
  """Raise RuntimeError if SparkContext is not initialized,
2267
2267
  otherwise, returns the active SparkContext."""
@@ -2778,7 +2778,7 @@ def isnotnull(col: ColumnOrName) -> Column:
2778
2778
  return Column.invoke_anonymous_function(col, "isnotnull")
2779
2779
 
2780
2780
 
2781
- @meta(unsupported_engines="*")
2781
+ @meta(unsupported_engines=["*", "databricks"])
2782
2782
  def java_method(*cols: ColumnOrName) -> Column:
2783
2783
  """
2784
2784
  Calls a method with reflection.
@@ -3050,7 +3050,7 @@ def ln(col: ColumnOrName) -> Column:
3050
3050
  return Column.invoke_expression_over_column(col, expression.Ln)
3051
3051
 
3052
3052
 
3053
- @meta(unsupported_engines="*")
3053
+ @meta(unsupported_engines=["*", "databricks"])
3054
3054
  def localtimestamp() -> Column:
3055
3055
  """
3056
3056
  Returns the current timestamp without time zone at the start of query evaluation
@@ -3080,7 +3080,7 @@ def localtimestamp() -> Column:
3080
3080
  return Column.invoke_anonymous_function(None, "localtimestamp")
3081
3081
 
3082
3082
 
3083
- @meta(unsupported_engines="*")
3083
+ @meta(unsupported_engines=["*", "databricks"])
3084
3084
  def make_dt_interval(
3085
3085
  days: t.Optional[ColumnOrName] = None,
3086
3086
  hours: t.Optional[ColumnOrName] = None,
@@ -3227,7 +3227,7 @@ def make_timestamp(
3227
3227
  )
3228
3228
 
3229
3229
 
3230
- @meta(unsupported_engines="*")
3230
+ @meta(unsupported_engines=["*", "databricks"])
3231
3231
  def make_timestamp_ltz(
3232
3232
  years: ColumnOrName,
3233
3233
  months: ColumnOrName,
@@ -3354,7 +3354,7 @@ def make_timestamp_ntz(
3354
3354
  )
3355
3355
 
3356
3356
 
3357
- @meta(unsupported_engines="*")
3357
+ @meta(unsupported_engines=["*", "databricks"])
3358
3358
  def make_ym_interval(
3359
3359
  years: t.Optional[ColumnOrName] = None,
3360
3360
  months: t.Optional[ColumnOrName] = None,
@@ -3922,7 +3922,7 @@ def printf(format: ColumnOrName, *cols: ColumnOrName) -> Column:
3922
3922
  return Column.invoke_anonymous_function(format, "printf", *cols)
3923
3923
 
3924
3924
 
3925
- @meta(unsupported_engines=["*", "spark"])
3925
+ @meta(unsupported_engines=["*", "spark", "databricks"])
3926
3926
  def product(col: ColumnOrName) -> Column:
3927
3927
  """
3928
3928
  Aggregate function: returns the product of the values in a group.
@@ -3961,7 +3961,7 @@ def product(col: ColumnOrName) -> Column:
3961
3961
  reduce = aggregate
3962
3962
 
3963
3963
 
3964
- @meta(unsupported_engines="*")
3964
+ @meta(unsupported_engines=["*", "databricks"])
3965
3965
  def reflect(*cols: ColumnOrName) -> Column:
3966
3966
  """
3967
3967
  Calls a method with reflection.
@@ -5046,7 +5046,7 @@ def to_str(value: t.Any) -> t.Optional[str]:
5046
5046
  return str(value)
5047
5047
 
5048
5048
 
5049
- @meta(unsupported_engines="*")
5049
+ @meta(unsupported_engines=["*", "databricks"])
5050
5050
  def to_timestamp_ltz(
5051
5051
  timestamp: ColumnOrName,
5052
5052
  format: t.Optional[ColumnOrName] = None,
@@ -570,6 +570,8 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN, UDF_REGISTRATION
570
570
  return cls._to_row(list(value.keys()), list(value.values()))
571
571
  elif isinstance(value, (list, set, tuple)) and value:
572
572
  return [cls._to_value(x) for x in value]
573
+ elif isinstance(value, datetime.datetime):
574
+ return value.replace(tzinfo=None)
573
575
  return value
574
576
 
575
577
  @classmethod
@@ -73,6 +73,7 @@ from sqlframe.base.function_alternatives import ( # noqa
73
73
  _is_string_using_typeof_string as _is_string,
74
74
  array_append_using_array_cat as array_append,
75
75
  endswith_with_underscore as endswith,
76
+ to_timestamp_just_timestamp as to_timestamp,
76
77
  )
77
78
 
78
79
 
@@ -26,7 +26,6 @@ if t.TYPE_CHECKING:
26
26
 
27
27
 
28
28
  class DatabricksCatalog(
29
- SetCurrentCatalogFromUseMixin["DatabricksSession", "DatabricksDataFrame"],
30
29
  GetCurrentCatalogFromFunctionMixin["DatabricksSession", "DatabricksDataFrame"],
31
30
  GetCurrentDatabaseFromFunctionMixin["DatabricksSession", "DatabricksDataFrame"],
32
31
  ListDatabasesFromInfoSchemaMixin["DatabricksSession", "DatabricksDataFrame"],
@@ -38,6 +37,15 @@ class DatabricksCatalog(
38
37
  CURRENT_CATALOG_EXPRESSION: exp.Expression = exp.func("current_catalog")
39
38
  UPPERCASE_INFO_SCHEMA = True
40
39
 
40
+ def setCurrentCatalog(self, catalogName: str) -> None:
41
+ self.session._collect(
42
+ exp.Use(
43
+ kind=exp.Var(this=exp.to_identifier("CATALOG")),
44
+ this=exp.parse_identifier(catalogName, dialect=self.session.input_dialect),
45
+ ),
46
+ quote_identifiers=False,
47
+ )
48
+
41
49
  def listFunctions(
42
50
  self, dbName: t.Optional[str] = None, pattern: t.Optional[str] = None
43
51
  ) -> t.List[Function]:
@@ -106,7 +114,9 @@ class DatabricksCatalog(
106
114
  )
107
115
  functions = [
108
116
  Function(
109
- name=normalize_string(x["function"], from_dialect="execution", to_dialect="output"),
117
+ name=normalize_string(
118
+ x["function"].split(".")[-1], from_dialect="execution", to_dialect="output"
119
+ ),
110
120
  catalog=normalize_string(
111
121
  schema.catalog, from_dialect="execution", to_dialect="output"
112
122
  ),
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- import sys
5
4
  import typing as t
6
5
 
7
6
  from sqlframe.base.catalog import Column as CatalogColumn
@@ -52,7 +51,9 @@ class DatabricksDataFrame(
52
51
  columns.append(
53
52
  CatalogColumn(
54
53
  name=normalize_string(
55
- row.col_name, from_dialect="execution", to_dialect="output"
54
+ row.col_name,
55
+ from_dialect="execution",
56
+ to_dialect="output",
56
57
  ),
57
58
  dataType=normalize_string(
58
59
  row.data_type,
@@ -19,4 +19,5 @@ from sqlframe.base.function_alternatives import ( # noqa
19
19
  arrays_overlap_renamed as arrays_overlap,
20
20
  _is_string_using_typeof_string_lcase as _is_string,
21
21
  try_element_at_zero_based as try_element_at,
22
+ get_json_object_using_function as get_json_object,
22
23
  )
@@ -44,7 +44,20 @@ class DatabricksSession(
44
44
  from databricks import sql
45
45
 
46
46
  if not hasattr(self, "_conn"):
47
- super().__init__(conn or sql.connect(server_hostname, http_path, access_token))
47
+ super().__init__(
48
+ conn or sql.connect(server_hostname, http_path, access_token, disable_pandas=True)
49
+ )
50
+
51
+ @classmethod
52
+ def _try_get_map(cls, value: t.Any) -> t.Optional[t.Dict[str, t.Any]]:
53
+ if (
54
+ value
55
+ and isinstance(value, list)
56
+ and all(isinstance(item, tuple) for item in value)
57
+ and all(len(item) == 2 for item in value)
58
+ ):
59
+ return dict(value)
60
+ return None
48
61
 
49
62
  class Builder(_BaseSession.Builder):
50
63
  DEFAULT_EXECUTION_DIALECT = "databricks"
@@ -53,4 +53,5 @@ from sqlframe.base.function_alternatives import ( # noqa
53
53
  endswith_with_underscore as endswith,
54
54
  last_day_with_cast as last_day,
55
55
  regexp_replace_global_option as regexp_replace,
56
+ to_timestamp_tz as to_timestamp,
56
57
  )
@@ -29,6 +29,7 @@ from sqlframe.base.function_alternatives import ( # noqa
29
29
  try_element_at_zero_based as try_element_at,
30
30
  to_unix_timestamp_include_default_format as to_unix_timestamp,
31
31
  regexp_replace_global_option as regexp_replace,
32
+ to_timestamp_tz as to_timestamp,
32
33
  )
33
34
  from sqlframe.base.functions import (
34
35
  abs as abs,
@@ -70,4 +70,5 @@ from sqlframe.base.function_alternatives import ( # noqa
70
70
  endswith_using_like as endswith,
71
71
  last_day_with_cast as last_day,
72
72
  regexp_replace_global_option as regexp_replace,
73
+ to_timestamp_with_time_zone as to_timestamp,
73
74
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.9.2
3
+ Version: 3.10.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -316,6 +316,10 @@ tests/integration/engines/bigquery/__init__.py
316
316
  tests/integration/engines/bigquery/test_bigquery_catalog.py
317
317
  tests/integration/engines/bigquery/test_bigquery_dataframe.py
318
318
  tests/integration/engines/bigquery/test_bigquery_session.py
319
+ tests/integration/engines/databricks/__init__.py
320
+ tests/integration/engines/databricks/test_databricks_catalog.py
321
+ tests/integration/engines/databricks/test_databricks_dataframe.py
322
+ tests/integration/engines/databricks/test_databricks_session.py
319
323
  tests/integration/engines/duck/__init__.py
320
324
  tests/integration/engines/duck/test_duckdb_activate.py
321
325
  tests/integration/engines/duck/test_duckdb_catalog.py
@@ -345,6 +349,8 @@ tests/unit/test_activate.py
345
349
  tests/unit/test_util.py
346
350
  tests/unit/bigquery/__init__.py
347
351
  tests/unit/bigquery/test_activate.py
352
+ tests/unit/databricks/__init__.py
353
+ tests/unit/databricks/test_activate.py
348
354
  tests/unit/duck/__init__.py
349
355
  tests/unit/duck/test_activate.py
350
356
  tests/unit/postgres/__init__.py
@@ -1,5 +1,5 @@
1
1
  prettytable<3.12.1
2
- sqlglot<25.33,>=24.0.0
2
+ sqlglot<26.1,>=24.0.0
3
3
  typing_extensions<5,>=4.8
4
4
 
5
5
  [bigquery]
@@ -13,12 +13,13 @@ databricks-sql-connector<4,>=3.6
13
13
  duckdb<1.2,>=0.9
14
14
  findspark<3,>=2
15
15
  mypy<1.14,>=1.10.0
16
- openai<1.56,>=1.30
16
+ openai<1.58,>=1.30
17
17
  pandas-stubs<3,>=2
18
18
  pandas<3,>=2
19
19
  psycopg<4,>=3.1
20
20
  pyarrow<19,>=10
21
21
  pyspark<3.6,>=2
22
+ pytest-forked
22
23
  pytest-postgresql<7,>=6
23
24
  pytest-xdist<3.7,>=3.6
24
25
  pytest<8.4,>=8.2.0
@@ -43,7 +44,7 @@ duckdb<1.2,>=0.9
43
44
  pandas<3,>=2
44
45
 
45
46
  [openai]
46
- openai<1.56,>=1.30
47
+ openai<1.58,>=1.30
47
48
 
48
49
  [pandas]
49
50
  pandas<3,>=2
@@ -12,6 +12,7 @@ from pytest_postgresql.janitor import DatabaseJanitor
12
12
 
13
13
  from sqlframe.base.session import _BaseSession
14
14
  from sqlframe.bigquery.session import BigQuerySession
15
+ from sqlframe.databricks.session import DatabricksSession
15
16
  from sqlframe.duckdb.session import DuckDBSession
16
17
  from sqlframe.postgres.session import PostgresSession
17
18
  from sqlframe.redshift.session import RedshiftSession
@@ -22,6 +23,7 @@ from sqlframe.standalone.dataframe import StandaloneDataFrame
22
23
  from sqlframe.standalone.session import StandaloneSession
23
24
 
24
25
  if t.TYPE_CHECKING:
26
+ from databricks.sql import Connection as DatabricksConnection
25
27
  from google.cloud.bigquery.dbapi.connection import (
26
28
  Connection as BigQueryConnection,
27
29
  )
@@ -90,7 +92,7 @@ def pyspark_session(tmp_path_factory, gen_tpcds: t.List[Path]) -> PySparkSession
90
92
  .config("spark.sql.warehouse.dir", data_dir)
91
93
  .config("spark.driver.extraJavaOptions", f"-Dderby.system.home={derby_dir}")
92
94
  .config("spark.sql.shuffle.partitions", 1)
93
- .config("spark.sql.session.timeZone", "America/Los_Angeles")
95
+ .config("spark.sql.session.timeZone", "UTC")
94
96
  .master("local[1]")
95
97
  .appName("Unit-tests")
96
98
  .getOrCreate()
@@ -223,6 +225,7 @@ def snowflake_connection() -> SnowflakeConnection:
223
225
  @pytest.fixture
224
226
  def snowflake_session(snowflake_connection: SnowflakeConnection) -> SnowflakeSession:
225
227
  session = SnowflakeSession(snowflake_connection)
228
+ session._execute("ALTER SESSION SET TIMEZONE = 'UTC'")
226
229
  session._execute("CREATE SCHEMA IF NOT EXISTS db1")
227
230
  session._execute("CREATE TABLE IF NOT EXISTS db1.table1 (id INTEGER, name VARCHAR(100))")
228
231
  session._execute(
@@ -231,6 +234,31 @@ def snowflake_session(snowflake_connection: SnowflakeConnection) -> SnowflakeSes
231
234
  return session
232
235
 
233
236
 
237
+ @pytest.fixture(scope="session")
238
+ def databricks_connection() -> DatabricksConnection:
239
+ from databricks.sql import connect
240
+
241
+ conn = connect(
242
+ server_hostname=os.environ["SQLFRAME_DATABRICKS_SERVER_HOSTNAME"],
243
+ http_path=os.environ["SQLFRAME_DATABRICKS_HTTP_PATH"],
244
+ access_token=os.environ["SQLFRAME_DATABRICKS_ACCESS_TOKEN"],
245
+ auth_type="access_token",
246
+ catalog=os.environ["SQLFRAME_DATABRICKS_CATALOG"],
247
+ schema=os.environ["SQLFRAME_DATABRICKS_SCHEMA"],
248
+ _disable_pandas=True,
249
+ )
250
+ return conn
251
+
252
+
253
+ @pytest.fixture
254
+ def databricks_session(databricks_connection: DatabricksConnection) -> DatabricksSession:
255
+ session = DatabricksSession(databricks_connection)
256
+ session._execute("CREATE SCHEMA IF NOT EXISTS db1")
257
+ session._execute("CREATE TABLE IF NOT EXISTS db1.table1 (id INTEGER, name VARCHAR(100))")
258
+ session._execute("CREATE OR REPLACE FUNCTION db1.add(x INT, y INT) RETURNS INT RETURN x + y")
259
+ return session
260
+
261
+
234
262
  @pytest.fixture(scope="module")
235
263
  def _employee_data() -> EmployeeData:
236
264
  return [
@@ -16,7 +16,7 @@ def pytest_collection_modifyitems(items, *args, **kwargs):
16
16
  def set_tz():
17
17
  import os
18
18
 
19
- os.environ["TZ"] = "US/Pacific"
19
+ os.environ["TZ"] = "UTC"
20
20
  time.tzset()
21
21
  yield
22
22
  del os.environ["TZ"]