sqlframe 3.35.1__tar.gz → 3.36.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (390) hide show
  1. {sqlframe-3.35.1 → sqlframe-3.36.0}/PKG-INFO +1 -1
  2. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/databricks.md +23 -0
  3. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/snowflake.md +1 -0
  4. {sqlframe-3.35.1 → sqlframe-3.36.0}/mkdocs.yml +2 -2
  5. {sqlframe-3.35.1 → sqlframe-3.36.0}/setup.py +2 -2
  6. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/_version.py +2 -2
  7. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/function_alternatives.py +0 -4
  8. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/functions.py +14 -17
  9. sqlframe-3.36.0/sqlframe/base/group.py +227 -0
  10. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/session.py +51 -2
  11. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe.egg-info/PKG-INFO +1 -1
  12. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe.egg-info/requires.txt +2 -2
  13. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_int_functions.py +7 -3
  14. sqlframe-3.36.0/tests/integration/test_int_grouped_data.py +372 -0
  15. sqlframe-3.35.1/sqlframe/base/group.py +0 -108
  16. sqlframe-3.35.1/tests/integration/test_int_grouped_data.py +0 -165
  17. {sqlframe-3.35.1 → sqlframe-3.36.0}/.github/CODEOWNERS +0 -0
  18. {sqlframe-3.35.1 → sqlframe-3.36.0}/.github/workflows/main.workflow.yaml +0 -0
  19. {sqlframe-3.35.1 → sqlframe-3.36.0}/.github/workflows/publish.workflow.yaml +0 -0
  20. {sqlframe-3.35.1 → sqlframe-3.36.0}/.gitignore +0 -0
  21. {sqlframe-3.35.1 → sqlframe-3.36.0}/.pre-commit-config.yaml +0 -0
  22. {sqlframe-3.35.1 → sqlframe-3.36.0}/.readthedocs.yaml +0 -0
  23. {sqlframe-3.35.1 → sqlframe-3.36.0}/LICENSE +0 -0
  24. {sqlframe-3.35.1 → sqlframe-3.36.0}/Makefile +0 -0
  25. {sqlframe-3.35.1 → sqlframe-3.36.0}/README.md +0 -0
  26. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/add_chatgpt_support.md +0 -0
  27. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  28. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  29. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  30. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  31. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  32. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  33. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  34. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  35. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/but_wait_theres_more.gif +0 -0
  36. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/cake.gif +0 -0
  37. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  38. {sqlframe-3.35.1 → sqlframe-3.36.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  39. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/bigquery.md +0 -0
  40. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/configuration.md +0 -0
  41. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/bigquery.md +0 -0
  42. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/duckdb.md +0 -0
  43. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/images/SF.png +0 -0
  44. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/images/favicon.png +0 -0
  45. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/images/sqlframe_logo.png +0 -0
  46. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/docs/postgres.md +0 -0
  47. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/duckdb.md +0 -0
  48. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/images/SF.png +0 -0
  49. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/images/favicon.png +0 -0
  50. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/images/sqlframe_logo.png +0 -0
  51. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/index.md +0 -0
  52. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/postgres.md +0 -0
  53. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/redshift.md +0 -0
  54. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/requirements.txt +0 -0
  55. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/spark.md +0 -0
  56. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/standalone.md +0 -0
  57. {sqlframe-3.35.1 → sqlframe-3.36.0}/docs/stylesheets/extra.css +0 -0
  58. {sqlframe-3.35.1 → sqlframe-3.36.0}/pytest.ini +0 -0
  59. {sqlframe-3.35.1 → sqlframe-3.36.0}/renovate.json +0 -0
  60. {sqlframe-3.35.1 → sqlframe-3.36.0}/setup.cfg +0 -0
  61. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/LICENSE +0 -0
  62. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/__init__.py +0 -0
  63. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/__init__.py +0 -0
  64. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/_typing.py +0 -0
  65. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/catalog.py +0 -0
  66. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/column.py +0 -0
  67. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/dataframe.py +0 -0
  68. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/decorators.py +0 -0
  69. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/exceptions.py +0 -0
  70. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/mixins/__init__.py +0 -0
  71. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  72. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  73. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  74. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/mixins/table_mixins.py +0 -0
  75. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/normalize.py +0 -0
  76. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/operations.py +0 -0
  77. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/readerwriter.py +0 -0
  78. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/session.py +0 -0
  79. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/table.py +0 -0
  80. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/transforms.py +0 -0
  81. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/types.py +0 -0
  82. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/udf.py +0 -0
  83. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/util.py +0 -0
  84. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/base/window.py +0 -0
  85. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/__init__.py +0 -0
  86. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/catalog.py +0 -0
  87. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/column.py +0 -0
  88. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/dataframe.py +0 -0
  89. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/functions.py +0 -0
  90. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/functions.pyi +0 -0
  91. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/group.py +0 -0
  92. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/readwriter.py +0 -0
  93. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/session.py +0 -0
  94. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/table.py +0 -0
  95. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/types.py +0 -0
  96. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/udf.py +0 -0
  97. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/bigquery/window.py +0 -0
  98. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/__init__.py +0 -0
  99. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/catalog.py +0 -0
  100. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/column.py +0 -0
  101. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/dataframe.py +0 -0
  102. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/functions.py +0 -0
  103. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/functions.pyi +0 -0
  104. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/group.py +0 -0
  105. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/readwriter.py +0 -0
  106. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/table.py +0 -0
  107. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/types.py +0 -0
  108. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/udf.py +0 -0
  109. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/databricks/window.py +0 -0
  110. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/__init__.py +0 -0
  111. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/catalog.py +0 -0
  112. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/column.py +0 -0
  113. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/dataframe.py +0 -0
  114. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/functions.py +0 -0
  115. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/functions.pyi +0 -0
  116. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/group.py +0 -0
  117. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/readwriter.py +0 -0
  118. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/session.py +0 -0
  119. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/table.py +0 -0
  120. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/types.py +0 -0
  121. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/udf.py +0 -0
  122. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/duckdb/window.py +0 -0
  123. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/__init__.py +0 -0
  124. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/catalog.py +0 -0
  125. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/column.py +0 -0
  126. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/dataframe.py +0 -0
  127. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/functions.py +0 -0
  128. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/functions.pyi +0 -0
  129. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/group.py +0 -0
  130. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/readwriter.py +0 -0
  131. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/session.py +0 -0
  132. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/table.py +0 -0
  133. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/types.py +0 -0
  134. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/udf.py +0 -0
  135. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/postgres/window.py +0 -0
  136. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/py.typed +0 -0
  137. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/__init__.py +0 -0
  138. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/catalog.py +0 -0
  139. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/column.py +0 -0
  140. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/dataframe.py +0 -0
  141. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/functions.py +0 -0
  142. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/group.py +0 -0
  143. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/readwriter.py +0 -0
  144. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/session.py +0 -0
  145. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/table.py +0 -0
  146. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/types.py +0 -0
  147. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/udf.py +0 -0
  148. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/redshift/window.py +0 -0
  149. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/__init__.py +0 -0
  150. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/catalog.py +0 -0
  151. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/column.py +0 -0
  152. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/dataframe.py +0 -0
  153. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/functions.py +0 -0
  154. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/functions.pyi +0 -0
  155. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/group.py +0 -0
  156. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/readwriter.py +0 -0
  157. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/session.py +0 -0
  158. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/table.py +0 -0
  159. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/types.py +0 -0
  160. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/udf.py +0 -0
  161. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/snowflake/window.py +0 -0
  162. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/__init__.py +0 -0
  163. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/catalog.py +0 -0
  164. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/column.py +0 -0
  165. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/dataframe.py +0 -0
  166. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/functions.py +0 -0
  167. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/functions.pyi +0 -0
  168. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/group.py +0 -0
  169. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/readwriter.py +0 -0
  170. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/session.py +0 -0
  171. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/table.py +0 -0
  172. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/types.py +0 -0
  173. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/udf.py +0 -0
  174. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/spark/window.py +0 -0
  175. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/__init__.py +0 -0
  176. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/catalog.py +0 -0
  177. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/column.py +0 -0
  178. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/dataframe.py +0 -0
  179. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/functions.py +0 -0
  180. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/group.py +0 -0
  181. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/readwriter.py +0 -0
  182. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/session.py +0 -0
  183. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/table.py +0 -0
  184. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/types.py +0 -0
  185. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/udf.py +0 -0
  186. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/standalone/window.py +0 -0
  187. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/testing/__init__.py +0 -0
  188. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe/testing/utils.py +0 -0
  189. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  190. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  191. {sqlframe-3.35.1 → sqlframe-3.36.0}/sqlframe.egg-info/top_level.txt +0 -0
  192. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/__init__.py +0 -0
  193. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/common_fixtures.py +0 -0
  194. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/conftest.py +0 -0
  195. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee.csv +0 -0
  196. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee.json +0 -0
  197. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee.parquet +0 -0
  198. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  199. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  200. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  201. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  202. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  203. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  204. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  205. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  206. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  207. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  208. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  209. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  210. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  211. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  212. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/employee_extra_line.csv +0 -0
  213. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/issue_219.csv +0 -0
  214. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  215. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  216. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  217. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  218. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  219. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  220. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  221. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  222. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  223. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  224. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  225. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  226. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  227. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  228. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  229. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  230. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  231. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  232. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  233. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  234. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  235. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  236. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  237. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  238. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  239. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  240. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  241. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  242. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  243. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  244. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  245. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  246. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  247. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  248. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  249. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  250. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  251. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  252. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  253. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  254. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  255. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  256. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  257. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  258. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  259. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  260. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  261. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  262. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  263. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  264. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  265. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  266. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  267. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  268. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  269. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  270. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  271. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  272. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  273. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  274. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  275. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  276. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  277. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  278. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  279. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  280. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  281. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  282. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  283. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  284. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  285. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  286. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  287. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  288. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  289. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  290. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  291. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  292. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  293. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  294. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  295. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  296. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  297. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  298. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  299. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  300. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  301. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  302. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  303. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  304. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  305. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  306. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  307. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  308. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  309. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  310. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  311. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  312. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  313. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/__init__.py +0 -0
  314. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/__init__.py +0 -0
  315. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  316. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  317. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  318. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  319. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/databricks/__init__.py +0 -0
  320. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  321. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
  322. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  323. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/__init__.py +0 -0
  324. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  325. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  326. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  327. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  328. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  329. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  330. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  331. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/postgres/__init__.py +0 -0
  332. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  333. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  334. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  335. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  336. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/redshift/__init__.py +0 -0
  337. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  338. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  339. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  340. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  341. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  342. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  343. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/spark/__init__.py +0 -0
  344. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  345. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  346. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_column.py +0 -0
  347. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  348. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_reader.py +0 -0
  349. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_session.py +0 -0
  350. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_table.py +0 -0
  351. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_engine_writer.py +0 -0
  352. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/engines/test_int_testing.py +0 -0
  353. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/fixtures.py +0 -0
  354. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/test_int_dataframe.py +0 -0
  355. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  356. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/integration/test_int_session.py +0 -0
  357. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/types.py +0 -0
  358. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/__init__.py +0 -0
  359. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/bigquery/__init__.py +0 -0
  360. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/bigquery/test_activate.py +0 -0
  361. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/conftest.py +0 -0
  362. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/databricks/__init__.py +0 -0
  363. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/databricks/test_activate.py +0 -0
  364. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/duck/__init__.py +0 -0
  365. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/duck/test_activate.py +0 -0
  366. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/duck/test_reader_options.py +0 -0
  367. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/postgres/__init__.py +0 -0
  368. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/postgres/test_activate.py +0 -0
  369. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/redshift/__init__.py +0 -0
  370. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/redshift/test_activate.py +0 -0
  371. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/snowflake/__init__.py +0 -0
  372. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/snowflake/test_activate.py +0 -0
  373. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/spark/__init__.py +0 -0
  374. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/spark/test_activate.py +0 -0
  375. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/spark/test_reader_options.py +0 -0
  376. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/__init__.py +0 -0
  377. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/fixtures.py +0 -0
  378. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_activate.py +0 -0
  379. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_column.py +0 -0
  380. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_dataframe.py +0 -0
  381. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  382. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_functions.py +0 -0
  383. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_session.py +0 -0
  384. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  385. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_types.py +0 -0
  386. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/standalone/test_window.py +0 -0
  387. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/test_activate.py +0 -0
  388. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/test_base_reader_options.py +0 -0
  389. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/test_catalog.py +0 -0
  390. {sqlframe-3.35.1 → sqlframe-3.36.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.35.1
3
+ Version: 3.36.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -103,6 +103,29 @@ A DatabricksSession, which implements the PySpark Session API, is created by pas
103
103
  session = SparkSession.builder.getOrCreate()
104
104
  ```
105
105
 
106
+ ### Creating Session with Idle Connections
107
+
108
+ The Databricks SQL Connector for Python will automatically close connections that have been idle for a while.
109
+ This will cause errors when using SQLFrame since it will retry to use a closed connection.
110
+ To avoid this, you can have SQLFrame create the connection for you and it will automatically reconnect when needed.
111
+ Note that this will not work with the `activate` function since it requires a `databricks.sql.client.Connection` object.
112
+
113
+ ```python
114
+ import os
115
+
116
+ from sqlframe.databricks import DatabricksSession
117
+
118
+ session = DatabricksSession(
119
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
120
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
121
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
122
+ auth_type="access_token",
123
+ catalog="catalog",
124
+ schema="schema",
125
+ )
126
+ ```
127
+
128
+
106
129
  ## Example Usage
107
130
 
108
131
  ```python
@@ -544,6 +544,7 @@ See something that you would like to see supported? [Open an issue](https://gith
544
544
  * [mean](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.mean.html)
545
545
  * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.min.html)
546
546
  * [pivot](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.pivot.html)
547
+ * Doesn't support multiple aggregate functions on a single pivot
547
548
  * [sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.sum.html)
548
549
 
549
550
  ### DataFrameReader Class
@@ -3,14 +3,14 @@ repo_url: https://github.com/eakmanrq/sqlframe
3
3
  repo_name: eakmanrq/sqlframe
4
4
  nav:
5
5
  - "Overview": index.md
6
+ - "Configuration": configuration.md
6
7
  - "BigQuery": bigquery.md
8
+ - "Databricks": databricks.md
7
9
  - "DuckDB": duckdb.md
8
10
  - "Postgres": postgres.md
9
11
  - "Spark": spark.md
10
12
  - "Standalone": standalone.md
11
- - "Configuration": configuration.md
12
13
  - "Redshift (In-Development)": redshift.md
13
- - "Databricks (In-Development)": databricks.md
14
14
  theme:
15
15
  name: material
16
16
  logo: images/SF.png
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.9",
21
21
  install_requires=[
22
22
  "prettytable<4",
23
- "sqlglot>=24.0.0,<26.26",
23
+ "sqlglot>=24.0.0,<26.32",
24
24
  "typing_extensions",
25
25
  ],
26
26
  extras_require={
@@ -43,7 +43,7 @@ setup(
43
43
  "pytest-postgresql>=6,<8",
44
44
  "pytest-xdist>=3.6,<3.8",
45
45
  "pre-commit>=3.7,<5",
46
- "ruff>=0.4.4,<0.12",
46
+ "ruff>=0.4.4,<0.13",
47
47
  "types-psycopg2>=2.9,<3",
48
48
  ],
49
49
  "docs": [
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '3.35.1'
21
- __version_tuple__ = version_tuple = (3, 35, 1)
20
+ __version__ = version = '3.36.0'
21
+ __version_tuple__ = version_tuple = (3, 36, 0)
@@ -1300,10 +1300,6 @@ def day_with_try_to_timestamp(col: ColumnOrName) -> Column:
1300
1300
  )
1301
1301
 
1302
1302
 
1303
- def endswith_with_underscore(str: ColumnOrName, suffix: ColumnOrName) -> Column:
1304
- return Column.invoke_anonymous_function(str, "ENDS_WITH", suffix)
1305
-
1306
-
1307
1303
  def endswith_using_like(str: ColumnOrName, suffix: ColumnOrName) -> Column:
1308
1304
  concat = get_func_from_session("concat")
1309
1305
  lit = get_func_from_session("lit")
@@ -2288,14 +2288,14 @@ def array_distinct(col: ColumnOrName) -> Column:
2288
2288
 
2289
2289
  @meta(unsupported_engines=["bigquery", "postgres"])
2290
2290
  def array_intersect(col1: ColumnOrName, col2: ColumnOrName) -> Column:
2291
- from sqlframe.base.function_alternatives import array_intersect_using_intersection
2292
-
2293
- session = _get_session()
2294
-
2295
- if session._is_snowflake:
2296
- return array_intersect_using_intersection(col1, col2)
2297
-
2298
- return Column.invoke_anonymous_function(col1, "ARRAY_INTERSECT", Column.ensure_col(col2))
2291
+ return Column(
2292
+ expression.ArrayIntersect(
2293
+ expressions=[
2294
+ Column.ensure_col(col1).column_expression,
2295
+ Column.ensure_col(col2).column_expression,
2296
+ ]
2297
+ )
2298
+ )
2299
2299
 
2300
2300
 
2301
2301
  @meta(unsupported_engines=["postgres"])
@@ -3226,18 +3226,16 @@ def elt(*inputs: ColumnOrName) -> Column:
3226
3226
  def endswith(str: ColumnOrName, suffix: ColumnOrName) -> Column:
3227
3227
  from sqlframe.base.function_alternatives import (
3228
3228
  endswith_using_like,
3229
- endswith_with_underscore,
3230
3229
  )
3231
3230
 
3232
3231
  session = _get_session()
3233
3232
 
3234
- if session._is_bigquery or session._is_duckdb:
3235
- return endswith_with_underscore(str, suffix)
3236
-
3237
3233
  if session._is_postgres:
3238
3234
  return endswith_using_like(str, suffix)
3239
3235
 
3240
- return Column.invoke_anonymous_function(str, "endswith", suffix)
3236
+ return Column.invoke_expression_over_column(
3237
+ str, expression.EndsWith, expression=Column.ensure_col(suffix).column_expression
3238
+ )
3241
3239
 
3242
3240
 
3243
3241
  @meta(unsupported_engines="*")
@@ -5655,10 +5653,9 @@ def replace(
5655
5653
  ):
5656
5654
  replace = expression.Literal.string("") # type: ignore
5657
5655
 
5658
- if replace is not None:
5659
- return Column.invoke_anonymous_function(src, "replace", search, replace)
5660
- else:
5661
- return Column.invoke_anonymous_function(src, "replace", search)
5656
+ return Column.invoke_expression_over_column(
5657
+ src, expression.Replace, expression=search, replacement=replace
5658
+ )
5662
5659
 
5663
5660
 
5664
5661
  @meta()
@@ -0,0 +1,227 @@
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
+
3
+ from __future__ import annotations
4
+
5
+ import sys
6
+ import typing as t
7
+
8
+ from sqlframe.base.operations import Operation, group_operation, operation
9
+
10
+ if sys.version_info >= (3, 11):
11
+ from typing import Self
12
+ else:
13
+ from typing_extensions import Self
14
+
15
+ if t.TYPE_CHECKING:
16
+ from sqlframe.base.column import Column
17
+ from sqlframe.base.session import DF
18
+ else:
19
+ DF = t.TypeVar("DF")
20
+
21
+
22
+ # https://spark.apache.org/docs/latest/sql-ref-syntax-qry-select-groupby.html
23
+ # https://stackoverflow.com/questions/37975227/what-is-the-difference-between-cube-rollup-and-groupby-operators
24
+ class _BaseGroupedData(t.Generic[DF]):
25
+ last_op: Operation
26
+
27
+ def __init__(
28
+ self,
29
+ df: DF,
30
+ group_by_cols: t.Union[t.List[Column], t.List[t.List[Column]]],
31
+ last_op: Operation,
32
+ ):
33
+ self._df = df.copy()
34
+ self.session = df.session
35
+ self.last_op = last_op
36
+ self.group_by_cols = group_by_cols
37
+ self.pivot_col: t.Optional[str] = None
38
+ self.pivot_values: t.Optional[t.List[t.Any]] = None
39
+
40
+ def _get_function_applied_columns(
41
+ self, func_name: str, cols: t.Tuple[str, ...]
42
+ ) -> t.List[Column]:
43
+ from sqlframe.base import functions as F
44
+
45
+ func_name = func_name.lower()
46
+ return [
47
+ getattr(F, func_name)(name).alias(
48
+ self.session._sanitize_column_name(f"{func_name}({name})")
49
+ )
50
+ for name in cols
51
+ ]
52
+
53
+ @group_operation(Operation.SELECT)
54
+ def agg(self, *exprs: t.Union[Column, t.Dict[str, str]]) -> DF:
55
+ from sqlframe.base.column import Column
56
+
57
+ columns = (
58
+ [
59
+ self._get_function_applied_columns(agg_func, (column_name,))[0]
60
+ for column_name, agg_func in exprs[0].items()
61
+ ]
62
+ if isinstance(exprs[0], dict)
63
+ else exprs
64
+ )
65
+ cols = self._df._ensure_and_normalize_cols(columns)
66
+
67
+ # Handle pivot transformation
68
+ if self.pivot_col is not None and self.pivot_values is not None:
69
+ from sqlglot import exp
70
+
71
+ from sqlframe.base import functions as F
72
+
73
+ # Build the pivot expression
74
+ # First, we need to convert the DataFrame to include the pivot logic
75
+ df = self._df.copy()
76
+
77
+ # Create the base query with group by columns, pivot column, and aggregation columns
78
+ select_cols = []
79
+ # Add group by columns
80
+ for col in self.group_by_cols:
81
+ select_cols.append(col.expression) # type: ignore
82
+ # Add pivot column
83
+ select_cols.append(Column.ensure_col(self.pivot_col).expression)
84
+ # Add the value columns that will be aggregated
85
+ for agg_col in cols:
86
+ # Extract the column being aggregated from the aggregation function
87
+ # For example, from SUM(earnings), we want to extract 'earnings'
88
+ if (
89
+ isinstance(agg_col.column_expression, exp.AggFunc)
90
+ and agg_col.column_expression.this
91
+ ):
92
+ if agg_col.column_expression.this not in select_cols:
93
+ select_cols.append(agg_col.column_expression.this)
94
+
95
+ # Create the base query
96
+ base_query = df.expression.select(*select_cols, append=False)
97
+
98
+ # Build pivot expression
99
+ pivot_expressions = []
100
+ for agg_col in cols:
101
+ if isinstance(agg_col.column_expression, exp.AggFunc):
102
+ # Clone the aggregation function
103
+ # Snowflake doesn't support alias in the pivot, so we need to use the column_expression
104
+ agg_func = (
105
+ agg_col.column_expression.copy()
106
+ if self.session._is_snowflake
107
+ else agg_col.expression.copy()
108
+ )
109
+ pivot_expressions.append(agg_func)
110
+
111
+ # Create the IN clause with pivot values
112
+ in_values = []
113
+ for v in self.pivot_values:
114
+ if isinstance(v, str):
115
+ in_values.append(exp.Literal.string(v))
116
+ else:
117
+ in_values.append(exp.Literal.number(v))
118
+
119
+ # Build the pivot node with the fields parameter
120
+ pivot = exp.Pivot(
121
+ expressions=pivot_expressions,
122
+ fields=[
123
+ exp.In(
124
+ this=Column.ensure_col(self.pivot_col).column_expression,
125
+ expressions=in_values,
126
+ )
127
+ ],
128
+ )
129
+
130
+ # Create a subquery with the pivot attached
131
+ subquery = base_query.subquery()
132
+ subquery.set("pivots", [pivot])
133
+
134
+ # Create the final select from the pivoted subquery
135
+ expression = exp.select("*").from_(subquery)
136
+
137
+ return self._df.copy(expression=expression)
138
+
139
+ # Original non-pivot logic
140
+ if not self.group_by_cols or not isinstance(self.group_by_cols[0], (list, tuple, set)):
141
+ expression = self._df.expression.group_by(
142
+ # User column_expression for group by to avoid alias in group by
143
+ *[x.column_expression for x in self.group_by_cols] # type: ignore
144
+ ).select(*[x.expression for x in self.group_by_cols + cols], append=False) # type: ignore
145
+ group_by_cols = self.group_by_cols
146
+ else:
147
+ from sqlglot import exp
148
+
149
+ expression = self._df.expression
150
+ all_grouping_sets = []
151
+ group_by_cols = []
152
+ for grouping_set in self.group_by_cols:
153
+ all_grouping_sets.append(
154
+ exp.Tuple(expressions=[x.column_expression for x in grouping_set]) # type: ignore
155
+ )
156
+ group_by_cols.extend(grouping_set) # type: ignore
157
+ group_by_cols = list(dict.fromkeys(group_by_cols))
158
+ group_by = exp.Group(grouping_sets=[exp.GroupingSets(expressions=all_grouping_sets)])
159
+ expression.set("group", group_by)
160
+ for col in cols:
161
+ # Spark supports having an empty grouping_id which means all of the columns but other dialects
162
+ # like duckdb don't support this so we expand the grouping_id to include all of the columns
163
+ if col.column_expression.this == "GROUPING_ID":
164
+ col.column_expression.set("expressions", [x.expression for x in group_by_cols]) # type: ignore
165
+ expression = expression.select(*[x.expression for x in group_by_cols + cols], append=False) # type: ignore
166
+ return self._df.copy(expression=expression)
167
+
168
+ def count(self) -> DF:
169
+ from sqlframe.base import functions as F
170
+
171
+ return self.agg(F.count("*").alias("count"))
172
+
173
+ def mean(self, *cols: str) -> DF:
174
+ return self.avg(*cols)
175
+
176
+ def avg(self, *cols: str) -> DF:
177
+ return self.agg(*self._get_function_applied_columns("avg", cols))
178
+
179
+ def max(self, *cols: str) -> DF:
180
+ return self.agg(*self._get_function_applied_columns("max", cols))
181
+
182
+ def min(self, *cols: str) -> DF:
183
+ return self.agg(*self._get_function_applied_columns("min", cols))
184
+
185
+ def sum(self, *cols: str) -> DF:
186
+ return self.agg(*self._get_function_applied_columns("sum", cols))
187
+
188
+ def pivot(self, pivot_col: str, values: t.Optional[t.List[t.Any]] = None) -> Self:
189
+ """
190
+ Pivots a column of the current DataFrame and perform the specified aggregation.
191
+
192
+ There are two versions of the pivot function: one that requires the caller
193
+ to specify the list of distinct values to pivot on, and one that does not.
194
+ The latter is more concise but less efficient, because Spark needs to first
195
+ compute the list of distinct values internally.
196
+
197
+ Parameters
198
+ ----------
199
+ pivot_col : str
200
+ Name of the column to pivot.
201
+ values : list, optional
202
+ List of values that will be translated to columns in the output DataFrame.
203
+
204
+ Returns
205
+ -------
206
+ GroupedData
207
+ Returns self to allow chaining with aggregation methods.
208
+ """
209
+ if self.session._is_postgres:
210
+ raise NotImplementedError(
211
+ "Pivot operation is not supported in Postgres. Please create an issue if you would like a workaround implemented."
212
+ )
213
+
214
+ self.pivot_col = pivot_col
215
+
216
+ if values is None:
217
+ # Eagerly compute distinct values
218
+ from sqlframe.base.column import Column
219
+
220
+ distinct_df = self._df.select(pivot_col).distinct()
221
+ distinct_rows = distinct_df.collect()
222
+ # Sort to make the results deterministic
223
+ self.pivot_values = sorted([row[0] for row in distinct_rows])
224
+ else:
225
+ self.pivot_values = values
226
+
227
+ return self
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  import typing as t
4
- import warnings
5
5
 
6
6
  from sqlframe.base.session import _BaseSession
7
7
  from sqlframe.databricks.catalog import DatabricksCatalog
@@ -19,6 +19,9 @@ else:
19
19
  DatabricksConnection = t.Any
20
20
 
21
21
 
22
+ logger = logging.getLogger(__name__)
23
+
24
+
22
25
  class DatabricksSession(
23
26
  _BaseSession[ # type: ignore
24
27
  DatabricksCatalog,
@@ -43,14 +46,60 @@ class DatabricksSession(
43
46
  server_hostname: t.Optional[str] = None,
44
47
  http_path: t.Optional[str] = None,
45
48
  access_token: t.Optional[str] = None,
49
+ **kwargs: t.Any,
46
50
  ):
47
51
  from databricks import sql
48
52
 
53
+ self._conn_kwargs = (
54
+ {}
55
+ if conn
56
+ else {
57
+ "server_hostname": server_hostname,
58
+ "http_path": http_path,
59
+ "access_token": access_token,
60
+ "disable_pandas": True,
61
+ **kwargs,
62
+ }
63
+ )
64
+
49
65
  if not hasattr(self, "_conn"):
50
66
  super().__init__(
51
- conn or sql.connect(server_hostname, http_path, access_token, disable_pandas=True)
67
+ conn or sql.connect(**self._conn_kwargs),
52
68
  )
53
69
 
70
+ def _execute(self, sql: str) -> None:
71
+ from databricks.sql import connect
72
+ from databricks.sql.exc import DatabaseError, RequestError
73
+
74
+ try:
75
+ super()._execute(sql)
76
+ except (DatabaseError, RequestError) as e:
77
+ logger.warning("Failed to execute query")
78
+ if not self._is_session_expired_error(e):
79
+ logger.error("Error is not related to session expiration, re-raising")
80
+ raise e
81
+ if self._conn_kwargs:
82
+ logger.info("Attempting to reconnect with provided connection parameters")
83
+ self._connection = connect(**self._conn_kwargs)
84
+ # Clear the cached cursor
85
+ if hasattr(self, "_cur"):
86
+ delattr(self, "_cur")
87
+ super()._execute(sql)
88
+ else:
89
+ logger.error("No connection parameters provided so could not reconnect")
90
+ raise
91
+
92
+ def _is_session_expired_error(self, error: Exception) -> bool:
93
+ error_str = str(error).lower()
94
+ session_keywords = [
95
+ "invalid sessionhandle",
96
+ "session is closed",
97
+ "session expired",
98
+ "session not found",
99
+ "sessionhandle",
100
+ ]
101
+ return any(keyword in error_str for keyword in session_keywords)
102
+
54
103
  @classmethod
55
104
  def _try_get_map(cls, value: t.Any) -> t.Optional[t.Dict[str, t.Any]]:
56
105
  if (
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.35.1
3
+ Version: 3.36.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,5 @@
1
1
  prettytable<4
2
- sqlglot<26.26,>=24.0.0
2
+ sqlglot<26.32,>=24.0.0
3
3
  typing_extensions
4
4
 
5
5
  [bigquery]
@@ -24,7 +24,7 @@ pytest-forked
24
24
  pytest-postgresql<8,>=6
25
25
  pytest-xdist<3.8,>=3.6
26
26
  pytest<8.5,>=8.2.0
27
- ruff<0.12,>=0.4.4
27
+ ruff<0.13,>=0.4.4
28
28
  types-psycopg2<3,>=2.9
29
29
 
30
30
  [docs]
@@ -3409,6 +3409,10 @@ def test_bitmap_or_agg(get_session_and_func, get_func):
3409
3409
 
3410
3410
  def test_any_value(get_session_and_func):
3411
3411
  session, any_value = get_session_and_func("any_value")
3412
+ if isinstance(session, PostgresSession):
3413
+ pytest.skip(
3414
+ "any_value is supported in SQLGlot for Postgres but by default assumes Postgres 16+. Tests run against 15. Therefore skipping but should remove this if SQLFrame addss the ability to define Postgres version."
3415
+ )
3412
3416
  df = session.createDataFrame(
3413
3417
  [("c", None), ("a", 2), ("a", 3), ("b", 8), ("b", 2)], ["c1", "c2"]
3414
3418
  )
@@ -3419,9 +3423,9 @@ def test_any_value(get_session_and_func):
3419
3423
  assert non_ignore_nulls == [Row(value="c", value2=2)]
3420
3424
  assert ignore_nulls == [Row(value="c", value2=2)]
3421
3425
  # SQLGlot converts any_value to max
3422
- elif isinstance(session, PostgresSession):
3423
- assert non_ignore_nulls == [Row(value="c", value2=8)]
3424
- assert ignore_nulls == [Row(value="c", value2=8)]
3426
+ # elif isinstance(session, PostgresSession):
3427
+ # assert non_ignore_nulls == [Row(value="c", value2=8)]
3428
+ # assert ignore_nulls == [Row(value="c", value2=8)]
3425
3429
  # Always includes nulls
3426
3430
  elif isinstance(session, SnowflakeSession):
3427
3431
  assert non_ignore_nulls == [Row(value="c", value2=None)]