sqlframe 3.13.4__tar.gz → 3.14.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (388) hide show
  1. {sqlframe-3.13.4 → sqlframe-3.14.0}/PKG-INFO +1 -1
  2. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/bigquery.md +212 -0
  3. sqlframe-3.14.0/docs/databricks.md +365 -0
  4. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/duckdb.md +87 -0
  5. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/postgres.md +205 -0
  6. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/redshift.md +95 -0
  7. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/snowflake.md +153 -0
  8. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/_version.py +2 -2
  9. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/dataframe.py +68 -51
  10. sqlframe-3.14.0/sqlframe/base/mixins/table_mixins.py +335 -0
  11. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/readerwriter.py +5 -4
  12. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/session.py +8 -2
  13. sqlframe-3.14.0/sqlframe/base/table.py +238 -0
  14. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/catalog.py +1 -0
  15. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/readwriter.py +2 -1
  16. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/session.py +3 -0
  17. sqlframe-3.14.0/sqlframe/bigquery/table.py +24 -0
  18. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/readwriter.py +2 -1
  19. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/session.py +3 -0
  20. sqlframe-3.14.0/sqlframe/databricks/table.py +24 -0
  21. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/readwriter.py +4 -1
  22. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/session.py +3 -0
  23. sqlframe-3.14.0/sqlframe/duckdb/table.py +16 -0
  24. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/readwriter.py +2 -1
  25. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/session.py +3 -0
  26. sqlframe-3.14.0/sqlframe/postgres/table.py +24 -0
  27. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/readwriter.py +2 -1
  28. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/session.py +3 -0
  29. sqlframe-3.14.0/sqlframe/redshift/table.py +15 -0
  30. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/readwriter.py +2 -1
  31. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/session.py +3 -0
  32. sqlframe-3.14.0/sqlframe/snowflake/table.py +23 -0
  33. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/readwriter.py +2 -1
  34. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/session.py +3 -0
  35. sqlframe-3.14.0/sqlframe/spark/table.py +6 -0
  36. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/readwriter.py +4 -1
  37. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/session.py +3 -0
  38. sqlframe-3.14.0/sqlframe/standalone/table.py +6 -0
  39. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe.egg-info/PKG-INFO +1 -1
  40. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe.egg-info/SOURCES.txt +11 -0
  41. sqlframe-3.14.0/tests/integration/engines/test_engine_table.py +413 -0
  42. sqlframe-3.13.4/docs/databricks.md +0 -155
  43. {sqlframe-3.13.4 → sqlframe-3.14.0}/.github/CODEOWNERS +0 -0
  44. {sqlframe-3.13.4 → sqlframe-3.14.0}/.github/workflows/main.workflow.yaml +0 -0
  45. {sqlframe-3.13.4 → sqlframe-3.14.0}/.github/workflows/publish.workflow.yaml +0 -0
  46. {sqlframe-3.13.4 → sqlframe-3.14.0}/.gitignore +0 -0
  47. {sqlframe-3.13.4 → sqlframe-3.14.0}/.pre-commit-config.yaml +0 -0
  48. {sqlframe-3.13.4 → sqlframe-3.14.0}/.readthedocs.yaml +0 -0
  49. {sqlframe-3.13.4 → sqlframe-3.14.0}/LICENSE +0 -0
  50. {sqlframe-3.13.4 → sqlframe-3.14.0}/Makefile +0 -0
  51. {sqlframe-3.13.4 → sqlframe-3.14.0}/README.md +0 -0
  52. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/add_chatgpt_support.md +0 -0
  53. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  54. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  55. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  56. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  57. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  58. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  59. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  60. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  61. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/but_wait_theres_more.gif +0 -0
  62. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/cake.gif +0 -0
  63. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  64. {sqlframe-3.13.4 → sqlframe-3.14.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  65. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/configuration.md +0 -0
  66. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/bigquery.md +0 -0
  67. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/duckdb.md +0 -0
  68. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/images/SF.png +0 -0
  69. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/images/favicon.png +0 -0
  70. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/images/favicon_old.png +0 -0
  71. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  72. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/images/sqlframe_logo.png +0 -0
  73. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/docs/postgres.md +0 -0
  74. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/images/SF.png +0 -0
  75. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/images/favicon.png +0 -0
  76. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/images/favicon_old.png +0 -0
  77. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/images/sqlframe_diagram.png +0 -0
  78. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/images/sqlframe_logo.png +0 -0
  79. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/index.md +0 -0
  80. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/requirements.txt +0 -0
  81. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/spark.md +0 -0
  82. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/standalone.md +0 -0
  83. {sqlframe-3.13.4 → sqlframe-3.14.0}/docs/stylesheets/extra.css +0 -0
  84. {sqlframe-3.13.4 → sqlframe-3.14.0}/mkdocs.yml +0 -0
  85. {sqlframe-3.13.4 → sqlframe-3.14.0}/pytest.ini +0 -0
  86. {sqlframe-3.13.4 → sqlframe-3.14.0}/renovate.json +0 -0
  87. {sqlframe-3.13.4 → sqlframe-3.14.0}/setup.cfg +0 -0
  88. {sqlframe-3.13.4 → sqlframe-3.14.0}/setup.py +0 -0
  89. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/LICENSE +0 -0
  90. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/__init__.py +0 -0
  91. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/__init__.py +0 -0
  92. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/_typing.py +0 -0
  93. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/catalog.py +0 -0
  94. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/column.py +0 -0
  95. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/decorators.py +0 -0
  96. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/exceptions.py +0 -0
  97. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/function_alternatives.py +0 -0
  98. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/functions.py +0 -0
  99. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/group.py +0 -0
  100. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/mixins/__init__.py +0 -0
  101. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  102. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  103. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  104. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/normalize.py +0 -0
  105. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/operations.py +0 -0
  106. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/transforms.py +0 -0
  107. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/types.py +0 -0
  108. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/udf.py +0 -0
  109. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/util.py +0 -0
  110. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/base/window.py +0 -0
  111. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/__init__.py +0 -0
  112. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/column.py +0 -0
  113. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/dataframe.py +0 -0
  114. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/functions.py +0 -0
  115. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/functions.pyi +0 -0
  116. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/group.py +0 -0
  117. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/types.py +0 -0
  118. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/udf.py +0 -0
  119. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/bigquery/window.py +0 -0
  120. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/__init__.py +0 -0
  121. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/catalog.py +0 -0
  122. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/column.py +0 -0
  123. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/dataframe.py +0 -0
  124. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/functions.py +0 -0
  125. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/functions.pyi +0 -0
  126. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/group.py +0 -0
  127. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/types.py +0 -0
  128. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/udf.py +0 -0
  129. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/databricks/window.py +0 -0
  130. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/__init__.py +0 -0
  131. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/catalog.py +0 -0
  132. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/column.py +0 -0
  133. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/dataframe.py +0 -0
  134. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/functions.py +0 -0
  135. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/functions.pyi +0 -0
  136. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/group.py +0 -0
  137. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/types.py +0 -0
  138. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/udf.py +0 -0
  139. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/duckdb/window.py +0 -0
  140. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/__init__.py +0 -0
  141. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/catalog.py +0 -0
  142. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/column.py +0 -0
  143. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/dataframe.py +0 -0
  144. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/functions.py +0 -0
  145. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/functions.pyi +0 -0
  146. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/group.py +0 -0
  147. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/types.py +0 -0
  148. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/udf.py +0 -0
  149. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/postgres/window.py +0 -0
  150. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/__init__.py +0 -0
  151. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/catalog.py +0 -0
  152. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/column.py +0 -0
  153. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/dataframe.py +0 -0
  154. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/functions.py +0 -0
  155. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/group.py +0 -0
  156. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/types.py +0 -0
  157. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/udf.py +0 -0
  158. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/redshift/window.py +0 -0
  159. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/__init__.py +0 -0
  160. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/catalog.py +0 -0
  161. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/column.py +0 -0
  162. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/dataframe.py +0 -0
  163. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/functions.py +0 -0
  164. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/functions.pyi +0 -0
  165. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/group.py +0 -0
  166. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/types.py +0 -0
  167. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/udf.py +0 -0
  168. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/snowflake/window.py +0 -0
  169. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/__init__.py +0 -0
  170. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/catalog.py +0 -0
  171. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/column.py +0 -0
  172. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/dataframe.py +0 -0
  173. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/functions.py +0 -0
  174. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/functions.pyi +0 -0
  175. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/group.py +0 -0
  176. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/types.py +0 -0
  177. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/udf.py +0 -0
  178. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/spark/window.py +0 -0
  179. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/__init__.py +0 -0
  180. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/catalog.py +0 -0
  181. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/column.py +0 -0
  182. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/dataframe.py +0 -0
  183. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/functions.py +0 -0
  184. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/group.py +0 -0
  185. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/types.py +0 -0
  186. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/udf.py +0 -0
  187. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/standalone/window.py +0 -0
  188. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/testing/__init__.py +0 -0
  189. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe/testing/utils.py +0 -0
  190. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  191. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe.egg-info/requires.txt +0 -0
  192. {sqlframe-3.13.4 → sqlframe-3.14.0}/sqlframe.egg-info/top_level.txt +0 -0
  193. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/__init__.py +0 -0
  194. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/common_fixtures.py +0 -0
  195. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/conftest.py +0 -0
  196. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee.csv +0 -0
  197. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee.json +0 -0
  198. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee.parquet +0 -0
  199. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  200. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  201. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  202. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  203. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  204. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  205. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  206. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  207. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  208. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  209. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  210. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  211. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  212. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  213. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/employee_extra_line.csv +0 -0
  214. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/issue_219.csv +0 -0
  215. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  216. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  217. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  218. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  219. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  220. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  221. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  222. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  223. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  224. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  225. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  226. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  227. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  228. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  229. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  230. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  231. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  232. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  233. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  234. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  235. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  236. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  237. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  238. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  239. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  240. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  241. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  242. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  243. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  244. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  245. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  246. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  247. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  248. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  249. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  250. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  251. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  252. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  253. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  254. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  255. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  256. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  257. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  258. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  259. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  260. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  261. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  262. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  263. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  264. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  265. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  266. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  267. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  268. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  269. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  270. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  271. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  272. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  273. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  274. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  275. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  276. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  277. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  278. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  279. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  280. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  281. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  282. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  283. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  284. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  285. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  286. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  287. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  288. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  289. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  290. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  291. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  292. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  293. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  294. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  295. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  296. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  297. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  298. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  299. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  300. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  301. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  302. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  303. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  304. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  305. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  306. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  307. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  308. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  309. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  310. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  311. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  312. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  313. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  314. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/__init__.py +0 -0
  315. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/__init__.py +0 -0
  316. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  317. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  318. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  319. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  320. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/databricks/__init__.py +0 -0
  321. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  322. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
  323. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  324. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/__init__.py +0 -0
  325. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  326. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  327. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  328. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  329. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  330. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  331. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  332. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/postgres/__init__.py +0 -0
  333. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  334. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  335. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  336. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  337. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/redshift/__init__.py +0 -0
  338. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  339. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  340. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  341. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  342. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  343. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  344. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/spark/__init__.py +0 -0
  345. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  346. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  347. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_engine_column.py +0 -0
  348. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  349. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_engine_reader.py +0 -0
  350. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_engine_session.py +0 -0
  351. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_engine_writer.py +0 -0
  352. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_int_functions.py +0 -0
  353. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/engines/test_int_testing.py +0 -0
  354. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/fixtures.py +0 -0
  355. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/test_int_dataframe.py +0 -0
  356. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  357. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/test_int_grouped_data.py +0 -0
  358. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/integration/test_int_session.py +0 -0
  359. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/types.py +0 -0
  360. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/__init__.py +0 -0
  361. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/bigquery/__init__.py +0 -0
  362. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/bigquery/test_activate.py +0 -0
  363. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/conftest.py +0 -0
  364. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/databricks/__init__.py +0 -0
  365. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/databricks/test_activate.py +0 -0
  366. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/duck/__init__.py +0 -0
  367. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/duck/test_activate.py +0 -0
  368. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/postgres/__init__.py +0 -0
  369. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/postgres/test_activate.py +0 -0
  370. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/redshift/__init__.py +0 -0
  371. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/redshift/test_activate.py +0 -0
  372. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/snowflake/__init__.py +0 -0
  373. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/snowflake/test_activate.py +0 -0
  374. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/spark/__init__.py +0 -0
  375. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/spark/test_activate.py +0 -0
  376. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/__init__.py +0 -0
  377. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/fixtures.py +0 -0
  378. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_activate.py +0 -0
  379. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_column.py +0 -0
  380. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_dataframe.py +0 -0
  381. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  382. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_functions.py +0 -0
  383. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_session.py +0 -0
  384. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  385. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_types.py +0 -0
  386. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/standalone/test_window.py +0 -0
  387. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/test_activate.py +0 -0
  388. {sqlframe-3.13.4 → sqlframe-3.14.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.13.4
3
+ Version: 3.14.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -599,3 +599,215 @@ See something that you would like to see supported? [Open an issue](https://gith
599
599
  * [rowsBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.rowsBetween.html)
600
600
  * sql
601
601
  * SQLFrame Specific: Get the SQL representation of the WindowSpec
602
+
603
+
604
+ ## Extra Functionality not Present in PySpark
605
+
606
+ SQLFrame supports the following extra functionality not in PySpark
607
+
608
+ ### Table Class
609
+
610
+ SQLFrame provides a `Table` class that supports extra DML operations like `update`, `delete` and `merge`. This class is returned when using the `table` function from the `DataFrameReader` class.
611
+
612
+ ```python
613
+ import google.auth
614
+ from google.api_core import client_info
615
+ from google.oauth2 import service_account
616
+ from google.cloud.bigquery.dbapi import connect
617
+ from sqlframe.bigquery import BigQuerySession
618
+ from sqlframe.base.table import WhenMatched, WhenNotMatched, WhenNotMatchedBySource
619
+
620
+ creds = service_account.Credentials.from_service_account_file("path/to/credentials.json")
621
+
622
+ client = google.cloud.bigquery.Client(
623
+ project="my-project",
624
+ credentials=creds,
625
+ location="us-central1",
626
+ client_info=client_info.ClientInfo(user_agent="sqlframe"),
627
+ )
628
+
629
+ conn = connect(client=client)
630
+ session = BigQuerySession(conn=conn, default_dataset="sqlframe.db1")
631
+
632
+ df_employee = session.createDataFrame(
633
+ [
634
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 37, "store_id": 1},
635
+ {"id": 2, "fname": "John", "lname": "Locke", "age": 65, "store_id": 2},
636
+ {"id": 3, "fname": "Kate", "lname": "Austen", "age": 37, "store_id": 3},
637
+ {"id": 4, "fname": "Claire", "lname": "Littleton", "age": 27, "store_id": 1},
638
+ {"id": 5, "fname": "Hugo", "lname": "Reyes", "age": 29, "store_id": 3},
639
+ ]
640
+ )
641
+
642
+ df_employee.write.mode("overwrite").saveAsTable("employee")
643
+
644
+ table_employee = session.table("employee") # This object is of Type DatabricksTable
645
+ ```
646
+
647
+ #### Update Statement
648
+ The `update` method of the `Table` class is equivalent to the `UPDATE table_name` statement used in standard `sql`.
649
+
650
+ ```python
651
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
652
+ update_expr = table_employee.update(
653
+ set_={"age": table_employee["age"] + 1},
654
+ where=table_employee["id"] == 1,
655
+ )
656
+
657
+ # Excecutes the update statement
658
+ update_expr.execute()
659
+
660
+ # Show the result
661
+ table_employee.show()
662
+ ```
663
+
664
+ Output:
665
+ ```
666
+ +----+--------+-----------+-----+----------+
667
+ | id | fname | lname | age | store_id |
668
+ +----+--------+-----------+-----+----------+
669
+ | 1 | Jack | Shephard | 38 | 1 |
670
+ | 2 | John | Locke | 65 | 2 |
671
+ | 3 | Kate | Austen | 37 | 3 |
672
+ | 4 | Claire | Littleton | 27 | 1 |
673
+ | 5 | Hugo | Reyes | 29 | 3 |
674
+ +----+--------+-----------+-----+----------+
675
+ ```
676
+ #### Delete Statement
677
+ The `delete` method of the `Table` class is equivalent to the `DELETE FROM table_name` statement used in standard `sql`.
678
+
679
+ ```python
680
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
681
+ delete_expr = table_employee.delete(
682
+ where=table_employee["id"] == 1,
683
+ )
684
+
685
+ # Excecutes the delete statement
686
+ delete_expr.execute()
687
+
688
+ # Show the result
689
+ table_employee.show()
690
+ ```
691
+
692
+ Output:
693
+ ```
694
+ +----+--------+-----------+-----+----------+
695
+ | id | fname | lname | age | store_id |
696
+ +----+--------+-----------+-----+----------+
697
+ | 2 | John | Locke | 65 | 2 |
698
+ | 3 | Kate | Austen | 37 | 3 |
699
+ | 4 | Claire | Littleton | 27 | 1 |
700
+ | 5 | Hugo | Reyes | 29 | 3 |
701
+ +----+--------+-----------+-----+----------+
702
+ ```
703
+ #### Merge Statement
704
+
705
+ The `merge` method of the `Table` class is equivalent to the `MERGE INTO table_name` statement used in some `sql` engines.
706
+
707
+ ```python
708
+ df_new_employee = session.createDataFrame(
709
+ [
710
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 38, "store_id": 1, "delete": False},
711
+ {"id": 2, "fname": "Cate", "lname": "Austen", "age": 39, "store_id": 5, "delete": False},
712
+ {"id": 5, "fname": "Ugo", "lname": "Reyes", "age": 29, "store_id": 3, "delete": True},
713
+ {"id": 6, "fname": "Sun-Hwa", "lname": "Kwon", "age": 27, "store_id": 5, "delete": False},
714
+ ]
715
+ )
716
+
717
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
718
+ merge_expr = table_employee.merge(
719
+ df_new_employee,
720
+ condition=table_employee["id"] == df_new_employee["id"],
721
+ clauses=[
722
+ WhenMatched(condition=table_employee["fname"] == df_new_employee["fname"]).update(
723
+ set_={
724
+ "age": df_new_employee["age"],
725
+ }
726
+ ),
727
+ WhenMatched(condition=df_new_employee["delete"]).delete(),
728
+ WhenNotMatched().insert(
729
+ values={
730
+ "id": df_new_employee["id"],
731
+ "fname": df_new_employee["fname"],
732
+ "lname": df_new_employee["lname"],
733
+ "age": df_new_employee["age"],
734
+ "store_id": df_new_employee["store_id"],
735
+ }
736
+ ),
737
+ ],
738
+ )
739
+
740
+ # Excecutes the merge statement
741
+ merge_expr.execute()
742
+
743
+ # Show the result
744
+ table_employee.show()
745
+ ```
746
+
747
+ Output:
748
+ ```
749
+ +----+---------+-----------+-----+----------+
750
+ | id | fname | lname | age | store_id |
751
+ +----+---------+-----------+-----+----------+
752
+ | 1 | Jack | Shephard | 38 | 1 |
753
+ | 2 | John | Locke | 65 | 2 |
754
+ | 3 | Kate | Austen | 37 | 3 |
755
+ | 4 | Claire | Littleton | 27 | 1 |
756
+ | 6 | Sun-Hwa | Kwon | 27 | 5 |
757
+ +----+---------+-----------+-----+----------+
758
+ ```
759
+
760
+
761
+ Some engines like `BigQuery` support an extra clause inside the `merge` statement which is `WHEN NOT MATCHED BY SOURCE THEN DELETE`.
762
+
763
+ ```python
764
+ df_new_employee = session.createDataFrame(
765
+ [
766
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 38, "store_id": 1},
767
+ {"id": 2, "fname": "Cate", "lname": "Austen", "age": 39, "store_id": 5},
768
+ {"id": 5, "fname": "Hugo", "lname": "Reyes", "age": 29, "store_id": 3},
769
+ {"id": 6, "fname": "Sun-Hwa", "lname": "Kwon", "age": 27, "store_id": 5},
770
+ ]
771
+ )
772
+
773
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
774
+ merge_expr = table_employee.merge(
775
+ df_new_employee,
776
+ condition=table_employee["id"] == df_new_employee["id"],
777
+ clauses=[
778
+ WhenMatched(condition=table_employee["fname"] == df_new_employee["fname"]).update(
779
+ set_={
780
+ "age": df_new_employee["age"],
781
+ }
782
+ ),
783
+ WhenNotMatched().insert(
784
+ values={
785
+ "id": df_new_employee["id"],
786
+ "fname": df_new_employee["fname"],
787
+ "lname": df_new_employee["lname"],
788
+ "age": df_new_employee["age"],
789
+ "store_id": df_new_employee["store_id"],
790
+ }
791
+ ),
792
+ WhenNotMatchedBySource().delete(),
793
+ ],
794
+ )
795
+
796
+ # Excecutes the merge statement
797
+ merge_expr.execute()
798
+
799
+ # Show the result
800
+ table_employee.show()
801
+ ```
802
+
803
+ Output:
804
+ ```
805
+ +----+---------+-----------+-----+----------+
806
+ | id | fname | lname | age | store_id |
807
+ +----+---------+-----------+-----+----------+
808
+ | 1 | Jack | Shephard | 38 | 1 |
809
+ | 2 | John | Locke | 65 | 2 |
810
+ | 5 | Hugo | Reyes | 29 | 3 |
811
+ | 6 | Sun-Hwa | Kwon | 27 | 5 |
812
+ +----+---------+-----------+-----+----------+
813
+ ```
@@ -0,0 +1,365 @@
1
+ from sqlframe.base.table import WhenNotMatchedBySourcefrom sqlframe.base.table import WhenMatched
2
+
3
+ # Databricks (In Development)
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install "sqlframe[databricks]"
9
+ ```
10
+
11
+ ## Enabling SQLFrame
12
+
13
+ SQLFrame can be used in two ways:
14
+
15
+ * Directly importing the `sqlframe.databricks` package
16
+ * Using the [activate](./configuration.md#activating-sqlframe) function to allow for continuing to use `pyspark.sql` but have it use SQLFrame behind the scenes.
17
+
18
+ ### Import
19
+
20
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.databricks`.
21
+ In addition, many classes will have a `Databricks` prefix.
22
+ For example, `DatabricksDataFrame` instead of `DataFrame`.
23
+
24
+
25
+ ```python
26
+ # PySpark import
27
+ # from pyspark.sql import SparkSession
28
+ # from pyspark.sql import functions as F
29
+ # from pyspark.sql.dataframe import DataFrame
30
+ # SQLFrame import
31
+ from sqlframe.databricks import DatabricksSession
32
+ from sqlframe.databricks import functions as F
33
+ from sqlframe.databricks import DatabricksDataFrame
34
+ ```
35
+
36
+ ### Activate
37
+
38
+ If you would like to continue using `pyspark.sql` but have it use SQLFrame behind the scenes, you can use the [activate](./configuration.md#activating-sqlframe) function.
39
+
40
+ ```python
41
+ import os
42
+
43
+ from databricks.sql import connect
44
+ from sqlframe import activate
45
+ conn = connect(
46
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
47
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
48
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
49
+ auth_type="access_token",
50
+ catalog="catalog",
51
+ schema="schema",
52
+ )
53
+ activate("databricks", conn=conn)
54
+
55
+ from pyspark.sql import SparkSession
56
+ ```
57
+
58
+ `SparkSession` will now be a SQLFrame `DatabricksSession` object and everything will be run on Databricks directly.
59
+
60
+ See [activate configuration](./configuration.md#activating-sqlframe) for information on how to pass in a connection and config options.
61
+
62
+ ## Creating a Session
63
+
64
+ SQLFrame uses [Databricks SQL Connector for Python](https://github.com/databricks/databricks-sql-python) to connect to Databricks.
65
+ A DatabricksSession, which implements the PySpark Session API, is created by passing in a `databricks.sql.client.Connection` object.
66
+
67
+ === "Import"
68
+
69
+ ```python
70
+ import os
71
+
72
+ from databricks.sql import connect
73
+ from sqlframe.databricks import DatabricksSession
74
+
75
+ conn = connect(
76
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
77
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
78
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
79
+ auth_type="access_token",
80
+ catalog="catalog",
81
+ schema="schema",
82
+ )
83
+ session = DatabricksSession(conn=conn)
84
+ ```
85
+
86
+ === "Activate"
87
+
88
+ ```python
89
+ import os
90
+
91
+ from databricks.sql import connect
92
+ from sqlframe import activate
93
+
94
+ conn = connect(
95
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
96
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
97
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
98
+ auth_type="access_token",
99
+ catalog="catalog",
100
+ schema="schema",
101
+ )
102
+ activate("databricks", conn=conn)
103
+
104
+ from pyspark.sql import SparkSession
105
+ session = SparkSession.builder.getOrCreate()
106
+ ```
107
+
108
+ ## Example Usage
109
+
110
+ ```python
111
+ import os
112
+
113
+ from databricks.sql import connect
114
+ from sqlframe import activate
115
+
116
+ conn = connect(
117
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
118
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
119
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
120
+ auth_type="access_token",
121
+ catalog="catalog",
122
+ schema="schema",
123
+ )
124
+ activate("databricks", conn=conn)
125
+
126
+ from pyspark.sql import SparkSession
127
+ from pyspark.sql import functions as F
128
+
129
+ session = SparkSession.builder.getOrCreate()
130
+ table_path = "samples.nyctaxi.trips"
131
+ # Get columns in the table
132
+ print(session.catalog.listColumns(table_path))
133
+ # Get the number of rides per hour
134
+ (
135
+ session.table(table_path)
136
+ .where(F.col("tpep_pickup_datetime").between("2016-01-01", "2016-01-16"))
137
+ .withColumn("dropoff_hour", F.hour(F.col("tpep_dropoff_datetime")))
138
+ .groupBy("dropoff_hour").count()
139
+ .select(
140
+ F.format_string('%02d:00', F.col("dropoff_hour")).alias("dropoff Hour"),
141
+ F.col("count").alias("number of rides")
142
+ ).orderBy("dropoff Hour")
143
+ .limit(5)
144
+ .show()
145
+ )
146
+ """
147
+ +----------------+-------------------+
148
+ | `dropoff hour` | `number of rides` |
149
+ +----------------+-------------------+
150
+ | 00:00 | 205 |
151
+ | 01:00 | 159 |
152
+ | 02:00 | 117 |
153
+ | 03:00 | 88 |
154
+ | 04:00 | 73 |
155
+ +----------------+-------------------+
156
+ """
157
+ ```
158
+
159
+ ## Extra Functionality not Present in PySpark
160
+
161
+ SQLFrame supports the following extra functionality not in PySpark
162
+
163
+ ### Table Class
164
+
165
+ SQLFrame provides a `Table` class that supports extra DML operations like `update`, `delete` and `merge`. This class is returned when using the `table` function from the `DataFrameReader` class.
166
+
167
+ ```python
168
+ import os
169
+
170
+ from databricks.sql import connect
171
+ from sqlframe.databricks import DatabricksSession
172
+ from sqlframe.base.table import WhenMatched, WhenNotMatched, WhenNotMatchedBySource
173
+
174
+ conn = connect(
175
+ server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
176
+ http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
177
+ access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
178
+ auth_type="access_token",
179
+ catalog="catalog",
180
+ schema="schema",
181
+ )
182
+ session = DatabricksSession(conn=conn)
183
+
184
+ df_employee = session.createDataFrame(
185
+ [
186
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 37, "store_id": 1},
187
+ {"id": 2, "fname": "John", "lname": "Locke", "age": 65, "store_id": 2},
188
+ {"id": 3, "fname": "Kate", "lname": "Austen", "age": 37, "store_id": 3},
189
+ {"id": 4, "fname": "Claire", "lname": "Littleton", "age": 27, "store_id": 1},
190
+ {"id": 5, "fname": "Hugo", "lname": "Reyes", "age": 29, "store_id": 3},
191
+ ]
192
+ )
193
+
194
+ df_employee.write.mode("overwrite").saveAsTable("employee")
195
+
196
+ table_employee = session.table("employee") # This object is of Type DatabricksTable
197
+ ```
198
+
199
+ #### Update Statement
200
+ The `update` method of the `Table` class is equivalent to the `UPDATE table_name` statement used in standard `sql`.
201
+
202
+ ```python
203
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
204
+ update_expr = table_employee.update(
205
+ set_={"age": table_employee["age"] + 1},
206
+ where=table_employee["id"] == 1,
207
+ )
208
+
209
+ # Excecutes the update statement
210
+ update_expr.execute()
211
+
212
+ # Show the result
213
+ table_employee.show()
214
+ ```
215
+
216
+ Output:
217
+ ```
218
+ +----+--------+-----------+-----+----------+
219
+ | id | fname | lname | age | store_id |
220
+ +----+--------+-----------+-----+----------+
221
+ | 1 | Jack | Shephard | 38 | 1 |
222
+ | 2 | John | Locke | 65 | 2 |
223
+ | 3 | Kate | Austen | 37 | 3 |
224
+ | 4 | Claire | Littleton | 27 | 1 |
225
+ | 5 | Hugo | Reyes | 29 | 3 |
226
+ +----+--------+-----------+-----+----------+
227
+ ```
228
+ #### Delete Statement
229
+ The `delete` method of the `Table` class is equivalent to the `DELETE FROM table_name` statement used in standard `sql`.
230
+
231
+ ```python
232
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
233
+ delete_expr = table_employee.delete(
234
+ where=table_employee["id"] == 1,
235
+ )
236
+
237
+ # Excecutes the delete statement
238
+ delete_expr.execute()
239
+
240
+ # Show the result
241
+ table_employee.show()
242
+ ```
243
+
244
+ Output:
245
+ ```
246
+ +----+--------+-----------+-----+----------+
247
+ | id | fname | lname | age | store_id |
248
+ +----+--------+-----------+-----+----------+
249
+ | 2 | John | Locke | 65 | 2 |
250
+ | 3 | Kate | Austen | 37 | 3 |
251
+ | 4 | Claire | Littleton | 27 | 1 |
252
+ | 5 | Hugo | Reyes | 29 | 3 |
253
+ +----+--------+-----------+-----+----------+
254
+ ```
255
+ #### Merge Statement
256
+
257
+ The `merge` method of the `Table` class is equivalent to the `MERGE INTO table_name` statement used in some `sql` engines.
258
+
259
+ ```python
260
+ df_new_employee = session.createDataFrame(
261
+ [
262
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 38, "store_id": 1, "delete": False},
263
+ {"id": 2, "fname": "Cate", "lname": "Austen", "age": 39, "store_id": 5, "delete": False},
264
+ {"id": 5, "fname": "Ugo", "lname": "Reyes", "age": 29, "store_id": 3, "delete": True},
265
+ {"id": 6, "fname": "Sun-Hwa", "lname": "Kwon", "age": 27, "store_id": 5, "delete": False},
266
+ ]
267
+ )
268
+
269
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
270
+ merge_expr = table_employee.merge(
271
+ df_new_employee,
272
+ condition=table_employee["id"] == df_new_employee["id"],
273
+ clauses=[
274
+ WhenMatched(condition=table_employee["fname"] == df_new_employee["fname"]).update(
275
+ set_={
276
+ "age": df_new_employee["age"],
277
+ }
278
+ ),
279
+ WhenMatched(condition=df_new_employee["delete"]).delete(),
280
+ WhenNotMatched().insert(
281
+ values={
282
+ "id": df_new_employee["id"],
283
+ "fname": df_new_employee["fname"],
284
+ "lname": df_new_employee["lname"],
285
+ "age": df_new_employee["age"],
286
+ "store_id": df_new_employee["store_id"],
287
+ }
288
+ ),
289
+ ],
290
+ )
291
+
292
+ # Excecutes the merge statement
293
+ merge_expr.execute()
294
+
295
+ # Show the result
296
+ table_employee.show()
297
+ ```
298
+
299
+ Output:
300
+ ```
301
+ +----+---------+-----------+-----+----------+
302
+ | id | fname | lname | age | store_id |
303
+ +----+---------+-----------+-----+----------+
304
+ | 1 | Jack | Shephard | 38 | 1 |
305
+ | 2 | John | Locke | 65 | 2 |
306
+ | 3 | Kate | Austen | 37 | 3 |
307
+ | 4 | Claire | Littleton | 27 | 1 |
308
+ | 6 | Sun-Hwa | Kwon | 27 | 5 |
309
+ +----+---------+-----------+-----+----------+
310
+ ```
311
+
312
+
313
+ Some engines like `Databricks` support an extra clause inside the `merge` statement which is `WHEN NOT MATCHED BY SOURCE THEN DELETE`.
314
+
315
+ ```python
316
+ df_new_employee = session.createDataFrame(
317
+ [
318
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 38, "store_id": 1},
319
+ {"id": 2, "fname": "Cate", "lname": "Austen", "age": 39, "store_id": 5},
320
+ {"id": 5, "fname": "Hugo", "lname": "Reyes", "age": 29, "store_id": 3},
321
+ {"id": 6, "fname": "Sun-Hwa", "lname": "Kwon", "age": 27, "store_id": 5},
322
+ ]
323
+ )
324
+
325
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
326
+ merge_expr = table_employee.merge(
327
+ df_new_employee,
328
+ condition=table_employee["id"] == df_new_employee["id"],
329
+ clauses=[
330
+ WhenMatched(condition=table_employee["fname"] == df_new_employee["fname"]).update(
331
+ set_={
332
+ "age": df_new_employee["age"],
333
+ }
334
+ ),
335
+ WhenNotMatched().insert(
336
+ values={
337
+ "id": df_new_employee["id"],
338
+ "fname": df_new_employee["fname"],
339
+ "lname": df_new_employee["lname"],
340
+ "age": df_new_employee["age"],
341
+ "store_id": df_new_employee["store_id"],
342
+ }
343
+ ),
344
+ WhenNotMatchedBySource().delete(),
345
+ ],
346
+ )
347
+
348
+ # Excecutes the merge statement
349
+ merge_expr.execute()
350
+
351
+ # Show the result
352
+ table_employee.show()
353
+ ```
354
+
355
+ Output:
356
+ ```
357
+ +----+---------+-----------+-----+----------+
358
+ | id | fname | lname | age | store_id |
359
+ +----+---------+-----------+-----+----------+
360
+ | 1 | Jack | Shephard | 38 | 1 |
361
+ | 2 | John | Locke | 65 | 2 |
362
+ | 5 | Hugo | Reyes | 29 | 3 |
363
+ | 6 | Sun-Hwa | Kwon | 27 | 5 |
364
+ +----+---------+-----------+-----+----------+
365
+ ```
@@ -564,3 +564,90 @@ See something that you would like to see supported? [Open an issue](https://gith
564
564
  * [rowsBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.rowsBetween.html)
565
565
  * sql
566
566
  * SQLFrame Specific: Get the SQL representation of the WindowSpec
567
+
568
+ ## Extra Functionality not Present in PySpark
569
+
570
+ SQLFrame supports the following extra functionality not in PySpark
571
+
572
+ ### Table Class
573
+
574
+ SQLFrame provides a `Table` class that supports extra DML operations like `update` and `delete`. This class is returned when using the `table` function from the `DataFrameReader` class.
575
+
576
+ ```python
577
+ import duckdb
578
+ from sqlframe.duckdb import DuckDBSession
579
+
580
+ conn = duckdb.connect(database=":memory:")
581
+ session = DuckDBSession(conn=conn)
582
+
583
+ df_employee = session.createDataFrame(
584
+ [
585
+ {"id": 1, "fname": "Jack", "lname": "Shephard", "age": 37, "store_id": 1},
586
+ {"id": 2, "fname": "John", "lname": "Locke", "age": 65, "store_id": 2},
587
+ {"id": 3, "fname": "Kate", "lname": "Austen", "age": 37, "store_id": 3},
588
+ {"id": 4, "fname": "Claire", "lname": "Littleton", "age": 27, "store_id": 1},
589
+ {"id": 5, "fname": "Hugo", "lname": "Reyes", "age": 29, "store_id": 3},
590
+ ]
591
+ )
592
+
593
+ df_employee.write.mode("overwrite").saveAsTable("employee")
594
+
595
+ table_employee = session.table("employee") # This object is of Type DatabricksTable
596
+ ```
597
+
598
+ #### Update Statement
599
+ The `update` method of the `Table` class is equivalent to the `UPDATE table_name` statement used in standard `sql`.
600
+
601
+ ```python
602
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
603
+ update_expr = table_employee.update(
604
+ set_={"age": table_employee["age"] + 1},
605
+ where=table_employee["id"] == 1,
606
+ )
607
+
608
+ # Excecutes the update statement
609
+ update_expr.execute()
610
+
611
+ # Show the result
612
+ table_employee.show()
613
+ ```
614
+
615
+ Output:
616
+ ```
617
+ +----+--------+-----------+-----+----------+
618
+ | id | fname | lname | age | store_id |
619
+ +----+--------+-----------+-----+----------+
620
+ | 1 | Jack | Shephard | 38 | 1 |
621
+ | 2 | John | Locke | 65 | 2 |
622
+ | 3 | Kate | Austen | 37 | 3 |
623
+ | 4 | Claire | Littleton | 27 | 1 |
624
+ | 5 | Hugo | Reyes | 29 | 3 |
625
+ +----+--------+-----------+-----+----------+
626
+ ```
627
+ #### Delete Statement
628
+ The `delete` method of the `Table` class is equivalent to the `DELETE FROM table_name` statement used in standard `sql`.
629
+
630
+ ```python
631
+ # Generates a `LazyExpression` object which can be executed using the `execute` method
632
+ delete_expr = table_employee.delete(
633
+ where=table_employee["id"] == 1,
634
+ )
635
+
636
+ # Excecutes the delete statement
637
+ delete_expr.execute()
638
+
639
+ # Show the result
640
+ table_employee.show()
641
+ ```
642
+
643
+ Output:
644
+ ```
645
+ +----+--------+-----------+-----+----------+
646
+ | id | fname | lname | age | store_id |
647
+ +----+--------+-----------+-----+----------+
648
+ | 2 | John | Locke | 65 | 2 |
649
+ | 3 | Kate | Austen | 37 | 3 |
650
+ | 4 | Claire | Littleton | 27 | 1 |
651
+ | 5 | Hugo | Reyes | 29 | 3 |
652
+ +----+--------+-----------+-----+----------+
653
+ ```