sqlframe 3.8.2__tar.gz → 3.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (369) hide show
  1. {sqlframe-3.8.2 → sqlframe-3.9.0}/Makefile +2 -1
  2. {sqlframe-3.8.2 → sqlframe-3.9.0}/PKG-INFO +11 -1
  3. {sqlframe-3.8.2 → sqlframe-3.9.0}/README.md +9 -0
  4. sqlframe-3.9.0/docs/databricks.md +157 -0
  5. sqlframe-3.9.0/docs/redshift.md +162 -0
  6. {sqlframe-3.8.2 → sqlframe-3.9.0}/mkdocs.yml +2 -0
  7. {sqlframe-3.8.2 → sqlframe-3.9.0}/setup.py +7 -4
  8. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/__init__.py +1 -0
  9. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/_version.py +2 -2
  10. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/dataframe.py +2 -2
  11. sqlframe-3.9.0/sqlframe/databricks/__init__.py +32 -0
  12. sqlframe-3.9.0/sqlframe/databricks/catalog.py +302 -0
  13. sqlframe-3.9.0/sqlframe/databricks/dataframe.py +69 -0
  14. sqlframe-3.9.0/sqlframe/databricks/functions.py +22 -0
  15. sqlframe-3.9.0/sqlframe/databricks/group.py +14 -0
  16. sqlframe-3.9.0/sqlframe/databricks/readwriter.py +96 -0
  17. sqlframe-3.9.0/sqlframe/databricks/session.py +59 -0
  18. sqlframe-3.9.0/sqlframe/databricks/udf.py +11 -0
  19. sqlframe-3.9.0/sqlframe/spark/functions.pyi +416 -0
  20. sqlframe-3.9.0/sqlframe/standalone/column.py +1 -0
  21. sqlframe-3.9.0/sqlframe/standalone/types.py +1 -0
  22. sqlframe-3.9.0/sqlframe/standalone/window.py +1 -0
  23. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe.egg-info/PKG-INFO +11 -1
  24. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe.egg-info/SOURCES.txt +14 -0
  25. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe.egg-info/requires.txt +7 -4
  26. {sqlframe-3.8.2 → sqlframe-3.9.0}/.github/CODEOWNERS +0 -0
  27. {sqlframe-3.8.2 → sqlframe-3.9.0}/.github/workflows/main.workflow.yaml +0 -0
  28. {sqlframe-3.8.2 → sqlframe-3.9.0}/.github/workflows/publish.workflow.yaml +0 -0
  29. {sqlframe-3.8.2 → sqlframe-3.9.0}/.gitignore +0 -0
  30. {sqlframe-3.8.2 → sqlframe-3.9.0}/.pre-commit-config.yaml +0 -0
  31. {sqlframe-3.8.2 → sqlframe-3.9.0}/.readthedocs.yaml +0 -0
  32. {sqlframe-3.8.2 → sqlframe-3.9.0}/LICENSE +0 -0
  33. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/add_chatgpt_support.md +0 -0
  34. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  35. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  36. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  37. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  38. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  39. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  40. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  41. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  42. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/but_wait_theres_more.gif +0 -0
  43. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/cake.gif +0 -0
  44. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  45. {sqlframe-3.8.2 → sqlframe-3.9.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  46. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/bigquery.md +0 -0
  47. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/configuration.md +0 -0
  48. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/bigquery.md +0 -0
  49. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/duckdb.md +0 -0
  50. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/images/SF.png +0 -0
  51. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/images/favicon.png +0 -0
  52. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/images/favicon_old.png +0 -0
  53. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  54. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/images/sqlframe_logo.png +0 -0
  55. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/docs/postgres.md +0 -0
  56. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/duckdb.md +0 -0
  57. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/images/SF.png +0 -0
  58. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/images/favicon.png +0 -0
  59. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/images/favicon_old.png +0 -0
  60. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/images/sqlframe_diagram.png +0 -0
  61. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/images/sqlframe_logo.png +0 -0
  62. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/index.md +0 -0
  63. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/postgres.md +0 -0
  64. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/requirements.txt +0 -0
  65. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/snowflake.md +0 -0
  66. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/spark.md +0 -0
  67. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/standalone.md +0 -0
  68. {sqlframe-3.8.2 → sqlframe-3.9.0}/docs/stylesheets/extra.css +0 -0
  69. {sqlframe-3.8.2 → sqlframe-3.9.0}/pytest.ini +0 -0
  70. {sqlframe-3.8.2 → sqlframe-3.9.0}/renovate.json +0 -0
  71. {sqlframe-3.8.2 → sqlframe-3.9.0}/setup.cfg +0 -0
  72. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/LICENSE +0 -0
  73. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/__init__.py +0 -0
  74. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/_typing.py +0 -0
  75. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/catalog.py +0 -0
  76. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/column.py +0 -0
  77. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/decorators.py +0 -0
  78. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/exceptions.py +0 -0
  79. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/function_alternatives.py +0 -0
  80. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/functions.py +0 -0
  81. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/group.py +0 -0
  82. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/mixins/__init__.py +0 -0
  83. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  84. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  85. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  86. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/normalize.py +0 -0
  87. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/operations.py +0 -0
  88. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/readerwriter.py +0 -0
  89. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/session.py +0 -0
  90. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/transforms.py +0 -0
  91. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/types.py +0 -0
  92. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/udf.py +0 -0
  93. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/util.py +0 -0
  94. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/base/window.py +0 -0
  95. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/__init__.py +0 -0
  96. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/catalog.py +0 -0
  97. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/column.py +0 -0
  98. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/dataframe.py +0 -0
  99. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/functions.py +0 -0
  100. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/functions.pyi +0 -0
  101. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/group.py +0 -0
  102. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/readwriter.py +0 -0
  103. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/session.py +0 -0
  104. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/types.py +0 -0
  105. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/udf.py +0 -0
  106. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/bigquery/window.py +0 -0
  107. {sqlframe-3.8.2/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/column.py +0 -0
  108. {sqlframe-3.8.2/sqlframe/spark → sqlframe-3.9.0/sqlframe/databricks}/functions.pyi +0 -0
  109. {sqlframe-3.8.2/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/types.py +0 -0
  110. {sqlframe-3.8.2/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/window.py +0 -0
  111. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/__init__.py +0 -0
  112. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/catalog.py +0 -0
  113. {sqlframe-3.8.2/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/column.py +0 -0
  114. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/dataframe.py +0 -0
  115. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/functions.py +0 -0
  116. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/functions.pyi +0 -0
  117. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/group.py +0 -0
  118. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/readwriter.py +0 -0
  119. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/session.py +0 -0
  120. {sqlframe-3.8.2/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/types.py +0 -0
  121. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/duckdb/udf.py +0 -0
  122. {sqlframe-3.8.2/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/window.py +0 -0
  123. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/__init__.py +0 -0
  124. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/catalog.py +0 -0
  125. {sqlframe-3.8.2/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/column.py +0 -0
  126. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/dataframe.py +0 -0
  127. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/functions.py +0 -0
  128. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/functions.pyi +0 -0
  129. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/group.py +0 -0
  130. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/readwriter.py +0 -0
  131. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/session.py +0 -0
  132. {sqlframe-3.8.2/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/types.py +0 -0
  133. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/postgres/udf.py +0 -0
  134. {sqlframe-3.8.2/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/window.py +0 -0
  135. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/__init__.py +0 -0
  136. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/catalog.py +0 -0
  137. {sqlframe-3.8.2/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/column.py +0 -0
  138. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/dataframe.py +0 -0
  139. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/functions.py +0 -0
  140. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/group.py +0 -0
  141. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/readwriter.py +0 -0
  142. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/session.py +0 -0
  143. {sqlframe-3.8.2/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/types.py +0 -0
  144. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/redshift/udf.py +0 -0
  145. {sqlframe-3.8.2/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/window.py +0 -0
  146. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/__init__.py +0 -0
  147. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/catalog.py +0 -0
  148. {sqlframe-3.8.2/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/column.py +0 -0
  149. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/dataframe.py +0 -0
  150. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/functions.py +0 -0
  151. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/functions.pyi +0 -0
  152. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/group.py +0 -0
  153. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/readwriter.py +0 -0
  154. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/session.py +0 -0
  155. {sqlframe-3.8.2/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/types.py +0 -0
  156. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/snowflake/udf.py +0 -0
  157. {sqlframe-3.8.2/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/window.py +0 -0
  158. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/__init__.py +0 -0
  159. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/catalog.py +0 -0
  160. {sqlframe-3.8.2/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/column.py +0 -0
  161. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/dataframe.py +0 -0
  162. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/functions.py +0 -0
  163. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/group.py +0 -0
  164. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/readwriter.py +0 -0
  165. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/session.py +0 -0
  166. {sqlframe-3.8.2/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/types.py +0 -0
  167. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/spark/udf.py +0 -0
  168. {sqlframe-3.8.2/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/window.py +0 -0
  169. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/__init__.py +0 -0
  170. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/catalog.py +0 -0
  171. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/dataframe.py +0 -0
  172. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/functions.py +0 -0
  173. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/group.py +0 -0
  174. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/readwriter.py +0 -0
  175. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/session.py +0 -0
  176. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/standalone/udf.py +0 -0
  177. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/testing/__init__.py +0 -0
  178. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe/testing/utils.py +0 -0
  179. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  180. {sqlframe-3.8.2 → sqlframe-3.9.0}/sqlframe.egg-info/top_level.txt +0 -0
  181. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/__init__.py +0 -0
  182. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/common_fixtures.py +0 -0
  183. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/conftest.py +0 -0
  184. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee.csv +0 -0
  185. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee.json +0 -0
  186. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee.parquet +0 -0
  187. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  188. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  189. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  190. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  191. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  192. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  193. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  194. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  195. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  196. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  197. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  198. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  199. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  200. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  201. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/employee_extra_line.csv +0 -0
  202. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  203. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  204. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  205. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  206. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  207. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  208. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  209. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  210. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  211. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  212. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  213. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  214. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  215. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  216. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  217. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  218. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  219. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  220. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  221. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  222. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  223. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  224. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  225. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  226. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  227. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  228. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  229. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  230. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  231. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  232. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  233. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  234. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  235. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  236. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  237. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  238. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  239. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  240. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  241. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  242. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  243. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  244. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  245. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  246. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  247. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  248. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  249. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  250. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  251. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  252. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  253. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  254. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  255. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  256. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  257. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  258. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  259. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  260. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  261. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  262. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  263. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  264. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  265. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  266. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  267. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  268. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  269. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  270. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  271. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  272. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  273. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  274. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  275. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  276. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  277. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  278. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  279. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  280. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  281. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  282. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  283. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  284. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  285. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  286. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  287. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  288. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  289. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  290. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  291. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  292. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  293. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  294. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  295. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  296. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  297. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  298. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  299. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  300. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  301. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/__init__.py +0 -0
  302. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/__init__.py +0 -0
  303. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  304. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  305. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  306. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  307. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/__init__.py +0 -0
  308. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  309. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  310. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  311. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  312. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  313. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  314. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  315. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/postgres/__init__.py +0 -0
  316. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  317. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  318. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  319. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  320. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/redshift/__init__.py +0 -0
  321. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  322. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  323. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  324. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  325. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  326. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  327. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/spark/__init__.py +0 -0
  328. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  329. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  330. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_engine_column.py +0 -0
  331. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  332. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_engine_reader.py +0 -0
  333. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_engine_session.py +0 -0
  334. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_engine_writer.py +0 -0
  335. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_int_functions.py +0 -0
  336. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/engines/test_int_testing.py +0 -0
  337. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/fixtures.py +0 -0
  338. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/test_int_dataframe.py +0 -0
  339. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  340. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/test_int_grouped_data.py +0 -0
  341. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/integration/test_int_session.py +0 -0
  342. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/types.py +0 -0
  343. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/__init__.py +0 -0
  344. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/bigquery/__init__.py +0 -0
  345. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/bigquery/test_activate.py +0 -0
  346. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/conftest.py +0 -0
  347. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/duck/__init__.py +0 -0
  348. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/duck/test_activate.py +0 -0
  349. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/postgres/__init__.py +0 -0
  350. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/postgres/test_activate.py +0 -0
  351. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/redshift/__init__.py +0 -0
  352. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/redshift/test_activate.py +0 -0
  353. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/snowflake/__init__.py +0 -0
  354. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/snowflake/test_activate.py +0 -0
  355. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/spark/__init__.py +0 -0
  356. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/spark/test_activate.py +0 -0
  357. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/__init__.py +0 -0
  358. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/fixtures.py +0 -0
  359. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_activate.py +0 -0
  360. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_column.py +0 -0
  361. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_dataframe.py +0 -0
  362. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  363. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_functions.py +0 -0
  364. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_session.py +0 -0
  365. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  366. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_types.py +0 -0
  367. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/standalone/test_window.py +0 -0
  368. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/test_activate.py +0 -0
  369. {sqlframe-3.8.2 → sqlframe-3.9.0}/tests/unit/test_util.py +0 -0
@@ -1,5 +1,5 @@
1
1
  install-dev:
2
- pip install -e ".[bigquery,dev,docs,duckdb,pandas,postgres,redshift,snowflake,spark]"
2
+ pip install -e ".[bigquery,dev,docs,duckdb,pandas,postgres,redshift,snowflake,databricks,spark]"
3
3
 
4
4
  install-pre-commit:
5
5
  pre-commit install
@@ -33,6 +33,7 @@ stubs:
33
33
  stubgen sqlframe/duckdb/functions.py --output ./ --inspect-mode
34
34
  stubgen sqlframe/postgres/functions.py --output ./ --inspect-mode
35
35
  stubgen sqlframe/snowflake/functions.py --output ./ --inspect-mode
36
+ stubgen sqlframe/databricks/functions.py --output ./ --inspect-mode
36
37
  stubgen sqlframe/spark/functions.py --output ./ --inspect-mode
37
38
 
38
39
  package:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.8.2
3
+ Version: 3.9.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
17
17
  Requires-Python: >=3.8
18
18
  Description-Content-Type: text/markdown
19
19
  Provides-Extra: bigquery
20
+ Provides-Extra: databricks
20
21
  Provides-Extra: dev
21
22
  Provides-Extra: docs
22
23
  Provides-Extra: duckdb
@@ -42,6 +43,11 @@ SQLFrame currently supports the following engines (many more in development):
42
43
  * [Snowflake](https://sqlframe.readthedocs.io/en/stable/snowflake)
43
44
  * [Spark](https://sqlframe.readthedocs.io/en/stable/spark)
44
45
 
46
+ There are also two engines in development. These engines lack test coverage and robust documentation, but are available for early testing:
47
+
48
+ * [Redshift](https://sqlframe.readthedocs.io/en/stable/redshift)
49
+ * [Databricks](https://sqlframe.readthedocs.io/en/stable/databricks)
50
+
45
51
  SQLFrame also has a "Standalone" session that be used to generate SQL without any connection to a database engine.
46
52
 
47
53
  * [Standalone](https://sqlframe.readthedocs.io/en/stable/standalone)
@@ -66,6 +72,10 @@ pip install "sqlframe[postgres]"
66
72
  pip install "sqlframe[snowflake]"
67
73
  # Spark
68
74
  pip install "sqlframe[spark]"
75
+ # Redshift (in development)
76
+ pip install "sqlframe[redshift]"
77
+ # Databricks (in development)
78
+ pip install "sqlframe[databricks]"
69
79
  # Standalone
70
80
  pip install sqlframe
71
81
  ```
@@ -12,6 +12,11 @@ SQLFrame currently supports the following engines (many more in development):
12
12
  * [Snowflake](https://sqlframe.readthedocs.io/en/stable/snowflake)
13
13
  * [Spark](https://sqlframe.readthedocs.io/en/stable/spark)
14
14
 
15
+ There are also two engines in development. These engines lack test coverage and robust documentation, but are available for early testing:
16
+
17
+ * [Redshift](https://sqlframe.readthedocs.io/en/stable/redshift)
18
+ * [Databricks](https://sqlframe.readthedocs.io/en/stable/databricks)
19
+
15
20
  SQLFrame also has a "Standalone" session that be used to generate SQL without any connection to a database engine.
16
21
 
17
22
  * [Standalone](https://sqlframe.readthedocs.io/en/stable/standalone)
@@ -36,6 +41,10 @@ pip install "sqlframe[postgres]"
36
41
  pip install "sqlframe[snowflake]"
37
42
  # Spark
38
43
  pip install "sqlframe[spark]"
44
+ # Redshift (in development)
45
+ pip install "sqlframe[redshift]"
46
+ # Databricks (in development)
47
+ pip install "sqlframe[databricks]"
39
48
  # Standalone
40
49
  pip install sqlframe
41
50
  ```
@@ -0,0 +1,157 @@
1
+ from test import auth_type
2
+
3
+ # Databricks (In Development)
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install "sqlframe[databricks]"
9
+ ```
10
+
11
+ ## Enabling SQLFrame
12
+
13
+ SQLFrame can be used in two ways:
14
+
15
+ * Directly importing the `sqlframe.databricks` package
16
+ * Using the [activate](./configuration.md#activating-sqlframe) function to allow for continuing to use `pyspark.sql` but have it use SQLFrame behind the scenes.
17
+
18
+ ### Import
19
+
20
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.databricks`.
21
+ In addition, many classes will have a `Databricks` prefix.
22
+ For example, `DatabricksDataFrame` instead of `DataFrame`.
23
+
24
+
25
+ ```python
26
+ # PySpark import
27
+ # from pyspark.sql import SparkSession
28
+ # from pyspark.sql import functions as F
29
+ # from pyspark.sql.dataframe import DataFrame
30
+ # SQLFrame import
31
+ from sqlframe.databricks import DatabricksSession
32
+ from sqlframe.databricks import functions as F
33
+ from sqlframe.databricks import DatabricksDataFrame
34
+ ```
35
+
36
+ ### Activate
37
+
38
+ If you would like to continue using `pyspark.sql` but have it use SQLFrame behind the scenes, you can use the [activate](./configuration.md#activating-sqlframe) function.
39
+
40
+ ```python
41
+ import os
42
+
43
+ from databricks.sql import connect
44
+ from sqlframe import activate
45
+ conn = connect(
46
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
47
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
48
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
49
+ auth_type="access_token",
50
+ catalog="catalog",
51
+ schema="schema",
52
+ )
53
+ activate("databricks", conn=conn)
54
+
55
+ from pyspark.sql import SparkSession
56
+ ```
57
+
58
+ `SparkSession` will now be a SQLFrame `DatabricksSession` object and everything will be run on Databricks directly.
59
+
60
+ See [activate configuration](./configuration.md#activating-sqlframe) for information on how to pass in a connection and config options.
61
+
62
+ ## Creating a Session
63
+
64
+ SQLFrame uses [Databricks SQL Connector for Python](https://github.com/databricks/databricks-sql-python) to connect to Databricks.
65
+ A DatabricksSession, which implements the PySpark Session API, is created by passing in a `databricks.sql.client.Connection` object.
66
+
67
+ === "Import"
68
+
69
+ ```python
70
+ import os
71
+
72
+ from databricks.sql import connect
73
+ from sqlframe.databricks import DatabricksSession
74
+
75
+ conn = connect(
76
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
77
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
78
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
79
+ auth_type="access_token",
80
+ catalog="catalog",
81
+ schema="schema",
82
+ )
83
+ session = DatabricksSession(conn=conn)
84
+ ```
85
+
86
+ === "Activate"
87
+
88
+ ```python
89
+ import os
90
+
91
+ from databricks.sql import connect
92
+ from sqlframe import activate
93
+
94
+ conn = connect(
95
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
96
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
97
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
98
+ auth_type="access_token",
99
+ catalog="catalog",
100
+ schema="schema",
101
+ )
102
+ activate("databricks", conn=conn)
103
+
104
+ from pyspark.sql import SparkSession
105
+ session = SparkSession.builder.getOrCreate()
106
+ ```
107
+
108
+ ## Example Usage
109
+
110
+ ```python
111
+ import os
112
+
113
+ from databricks.sql import connect
114
+ from sqlframe import activate
115
+
116
+ conn = connect(
117
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
118
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
119
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
120
+ auth_type="access_token",
121
+ catalog="catalog",
122
+ schema="schema",
123
+ )
124
+ activate("databricks", conn=conn)
125
+
126
+ from pyspark.sql import SparkSession
127
+ from pyspark.sql import functions as F
128
+
129
+ session = SparkSession.builder.getOrCreate()
130
+ table_path = "samples.nyctaxi.trips"
131
+ # Get columns in the table
132
+ print(session.catalog.listColumns(table_path))
133
+ # Get the number of rides per hour
134
+ (
135
+ session.table(table_path)
136
+ .where(F.col("tpep_pickup_datetime").between("2016-01-01", "2016-01-16"))
137
+ .withColumn("dropoff_hour", F.hour(F.col("tpep_dropoff_datetime")))
138
+ .groupBy("dropoff_hour").count()
139
+ .select(
140
+ F.format_string('%02d:00', F.col("dropoff_hour")).alias("dropoff Hour"),
141
+ F.col("count").alias("number of rides")
142
+ ).orderBy("dropoff Hour")
143
+ .limit(5)
144
+ .show()
145
+ )
146
+ """
147
+ +----------------+-------------------+
148
+ | `dropoff hour` | `number of rides` |
149
+ +----------------+-------------------+
150
+ | 00:00 | 205 |
151
+ | 01:00 | 159 |
152
+ | 02:00 | 117 |
153
+ | 03:00 | 88 |
154
+ | 04:00 | 73 |
155
+ +----------------+-------------------+
156
+ """
157
+ ```
@@ -0,0 +1,162 @@
1
+ # Redshift (In Development)
2
+
3
+ ## Installation
4
+
5
+ ```bash
6
+ pip install "sqlframe[redshift]"
7
+ ```
8
+
9
+ ## Enabling SQLFrame
10
+
11
+ SQLFrame can be used in two ways:
12
+
13
+ * Directly importing the `sqlframe.redshift` package
14
+ * Using the [activate](./configuration.md#activating-sqlframe) function to allow for continuing to use `pyspark.sql` but have it use SQLFrame behind the scenes.
15
+
16
+ ### Import
17
+
18
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.redshift`.
19
+ In addition, many classes will have a `Redshift` prefix.
20
+ For example, `RedshiftDataFrame` instead of `DataFrame`.
21
+
22
+
23
+ ```python
24
+ # PySpark import
25
+ # from pyspark.sql import SparkSession
26
+ # from pyspark.sql import functions as F
27
+ # from pyspark.sql.dataframe import DataFrame
28
+ # SQLFrame import
29
+ from sqlframe.redshift import RedshiftSession
30
+ from sqlframe.redshift import functions as F
31
+ from sqlframe.redshift import RedshiftDataFrame
32
+ ```
33
+
34
+ ### Activate
35
+
36
+ If you would like to continue using `pyspark.sql` but have it use SQLFrame behind the scenes, you can use the [activate](./configuration.md#activating-sqlframe) function.
37
+
38
+ ```python
39
+ import os
40
+
41
+ from redshift_connector import connect
42
+ from sqlframe import activate
43
+ conn = connect(
44
+ user="user",
45
+ password=os.environ["PASSWORD"], # Replace this with how you get your password
46
+ database="database",
47
+ host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
48
+ port=5439,
49
+ )
50
+ activate("redshift", conn=conn)
51
+
52
+ from pyspark.sql import SparkSession
53
+ ```
54
+
55
+ `SparkSession` will now be a SQLFrame `RedshiftSession` object and everything will be run on Redshift directly.
56
+
57
+ See [activate configuration](./configuration.md#activating-sqlframe) for information on how to pass in a connection and config options.
58
+
59
+ ## Creating a Session
60
+
61
+ SQLFrame uses [Redshift DBAPI Python Connector](https://github.com/aws/amazon-redshift-python-driver) to connect to Redshift.
62
+ A RedshiftSession, which implements the PySpark Session API, is created by passing in a `redshift_connector.Connection` object.
63
+
64
+ === "Import"
65
+
66
+ ```python
67
+ import os
68
+
69
+ from redshift_connector import connect
70
+ from sqlframe.redshift import RedshiftSession
71
+
72
+ conn = connect(
73
+ user="user",
74
+ password=os.environ["PASSWORD"], # Replace this with how you get your password
75
+ database="database",
76
+ host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
77
+ port=5439,
78
+ )
79
+ session = RedshiftSession(conn=conn)
80
+ ```
81
+
82
+ === "Activate"
83
+
84
+ ```python
85
+ import os
86
+
87
+ from redshift_connector import connect
88
+ from sqlframe import activate
89
+
90
+ conn = connect(
91
+ user="user",
92
+ password=os.environ["PASSWORD"], # Replace this with how you get your password
93
+ database="database",
94
+ host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
95
+ port=5439,
96
+ )
97
+ activate("redshift", conn=conn)
98
+
99
+ from pyspark.sql import SparkSession
100
+ session = SparkSession.builder.getOrCreate()
101
+ ```
102
+
103
+ ## Example Usage
104
+
105
+ ```python
106
+ import os
107
+
108
+ from redshift_connector import connect
109
+ from sqlframe import activate
110
+
111
+ conn = connect(
112
+ user="user",
113
+ password=os.environ["PASSWORD"], # Replace this with how you get your password
114
+ database="database",
115
+ host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
116
+ port=5439,
117
+ )
118
+ activate("redshift", conn=conn)
119
+
120
+ from pyspark.sql import SparkSession
121
+ from pyspark.sql import functions as F
122
+
123
+ session = SparkSession.builder.getOrCreate()
124
+ table_path = '"catalog.db.table"'
125
+ # Get columns in the table
126
+ print(session.catalog.listColumns(table_path))
127
+ # Get the top 5 years with the greatest year-over-year % change in new families with a single child
128
+ (
129
+ session.table(table_path)
130
+ .where(F.col("ever_born") == 1)
131
+ .groupBy("year")
132
+ .agg(F.count("*").alias("num_single_child_families"))
133
+ .withColumn(
134
+ "last_year_num_single_child_families",
135
+ F.lag(F.col("num_single_child_families"), 1).over(Window.orderBy("year"))
136
+ )
137
+ .withColumn(
138
+ "percent_change",
139
+ (F.col("num_single_child_families") - F.col("last_year_num_single_child_families"))
140
+ / F.col("last_year_num_single_child_families")
141
+ )
142
+ .orderBy(F.abs(F.col("percent_change")).desc())
143
+ .select(
144
+ F.col("year").alias("year"),
145
+ F.format_number("num_single_child_families", 0).alias("new families single child"),
146
+ F.format_number(F.col("percent_change") * 100, 2).alias("percent change"),
147
+ )
148
+ .limit(5)
149
+ .show()
150
+ )
151
+ """
152
+ +------+---------------------------+----------------+
153
+ | year | new families single child | percent change |
154
+ +------+---------------------------+----------------+
155
+ | 1989 | 1,650,246 | 25.02 |
156
+ | 1974 | 783,448 | 14.49 |
157
+ | 1977 | 1,057,379 | 11.38 |
158
+ | 1985 | 1,308,476 | 11.15 |
159
+ | 1975 | 868,985 | 10.92 |
160
+ +------+---------------------------+----------------+
161
+ """
162
+ ```
@@ -9,6 +9,8 @@ nav:
9
9
  - "Spark": spark.md
10
10
  - "Standalone": standalone.md
11
11
  - "Configuration": configuration.md
12
+ - "Redshift (In-Development)": redshift.md
13
+ - "Databricks (In-Development)": databricks.md
12
14
  theme:
13
15
  name: material
14
16
  logo: images/SF.png
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.12.1",
23
- "sqlglot>=24.0.0,<25.32",
23
+ "sqlglot>=24.0.0,<25.33",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -32,7 +32,7 @@ setup(
32
32
  "duckdb>=0.9,<1.2",
33
33
  "findspark>=2,<3",
34
34
  "mypy>=1.10.0,<1.14",
35
- "openai>=1.30,<1.55",
35
+ "openai>=1.30,<1.56",
36
36
  "pandas>=2,<3",
37
37
  "pandas-stubs>=2,<3",
38
38
  "psycopg>=3.1,<4",
@@ -43,7 +43,7 @@ setup(
43
43
  "pytest-xdist>=3.6,<3.7",
44
44
  "pre-commit>=3.5;python_version=='3.8'",
45
45
  "pre-commit>=3.7,<4.1;python_version>='3.9'",
46
- "ruff>=0.4.4,<0.8",
46
+ "ruff>=0.4.4,<0.9",
47
47
  "types-psycopg2>=2.9,<3",
48
48
  ],
49
49
  "docs": [
@@ -58,7 +58,7 @@ setup(
58
58
  "pandas>=2,<3",
59
59
  ],
60
60
  "openai": [
61
- "openai>=1.30,<1.55",
61
+ "openai>=1.30,<1.56",
62
62
  ],
63
63
  "pandas": [
64
64
  "pandas>=2,<3",
@@ -75,6 +75,9 @@ setup(
75
75
  "spark": [
76
76
  "pyspark>=2,<3.6",
77
77
  ],
78
+ "databricks": [
79
+ "databricks-sql-connector>=3.6,<4",
80
+ ],
78
81
  },
79
82
  classifiers=[
80
83
  "Development Status :: 5 - Production/Stable",
@@ -15,6 +15,7 @@ ENGINE_TO_PREFIX = {
15
15
  "redshift": "Redshift",
16
16
  "snowflake": "Snowflake",
17
17
  "spark": "Spark",
18
+ "databricks": "Databricks",
18
19
  "standalone": "Standalone",
19
20
  }
20
21
 
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '3.8.2'
16
- __version_tuple__ = version_tuple = (3, 8, 2)
15
+ __version__ = version = '3.9.0'
16
+ __version_tuple__ = version_tuple = (3, 9, 0)
@@ -629,10 +629,10 @@ class _BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
629
629
  # We will drop the "view" if it exists before running the cache table
630
630
  output_expressions.append(exp.Drop(this=cache_table, exists=True, kind="VIEW"))
631
631
  elif expression_type == exp.Create:
632
- expression = df.output_expression_container.copy()
632
+ expression = df.output_expression_container.copy() # type: ignore
633
633
  expression.set("expression", select_expression)
634
634
  elif expression_type == exp.Insert:
635
- expression = df.output_expression_container.copy()
635
+ expression = df.output_expression_container.copy() # type: ignore
636
636
  select_without_ctes = select_expression.copy()
637
637
  select_without_ctes.set("with", None)
638
638
  expression.set("expression", select_without_ctes)
@@ -0,0 +1,32 @@
1
+ from sqlframe.databricks.catalog import DatabricksCatalog
2
+ from sqlframe.databricks.column import Column
3
+ from sqlframe.databricks.dataframe import (
4
+ DatabricksDataFrame,
5
+ DatabricksDataFrameNaFunctions,
6
+ DatabricksDataFrameStatFunctions,
7
+ )
8
+ from sqlframe.databricks.group import DatabricksGroupedData
9
+ from sqlframe.databricks.readwriter import (
10
+ DatabricksDataFrameReader,
11
+ DatabricksDataFrameWriter,
12
+ )
13
+ from sqlframe.databricks.session import DatabricksSession
14
+ from sqlframe.databricks.types import Row
15
+ from sqlframe.databricks.udf import DatabricksUDFRegistration
16
+ from sqlframe.databricks.window import Window, WindowSpec
17
+
18
+ __all__ = [
19
+ "Column",
20
+ "Row",
21
+ "DatabricksCatalog",
22
+ "DatabricksDataFrame",
23
+ "DatabricksDataFrameNaFunctions",
24
+ "DatabricksGroupedData",
25
+ "DatabricksDataFrameReader",
26
+ "DatabricksDataFrameWriter",
27
+ "DatabricksSession",
28
+ "DatabricksDataFrameStatFunctions",
29
+ "DatabricksUDFRegistration",
30
+ "Window",
31
+ "WindowSpec",
32
+ ]