sqlframe 3.21.0__tar.gz → 3.22.0__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (388) hide show
  1. {sqlframe-3.21.0 → sqlframe-3.22.0}/PKG-INFO +1 -1
  2. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/duckdb.md +1 -1
  3. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/snowflake.md +1 -1
  4. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/_version.py +2 -2
  5. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/dataframe.py +25 -16
  6. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/function_alternatives.py +0 -4
  7. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/functions.py +24 -5
  8. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/mixins/readwriter_mixins.py +4 -0
  9. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/readerwriter.py +40 -0
  10. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/util.py +20 -5
  11. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/readwriter.py +1 -0
  12. sqlframe-3.22.0/sqlframe/spark/readwriter.py +163 -0
  13. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe.egg-info/PKG-INFO +1 -1
  14. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +8 -8
  15. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +8 -8
  16. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +2 -2
  17. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_dataframe.py +36 -1
  18. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_reader.py +24 -7
  19. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_writer.py +31 -16
  20. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_int_functions.py +2 -6
  21. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_dataframe.py +1 -1
  22. sqlframe-3.21.0/sqlframe/spark/readwriter.py +0 -30
  23. {sqlframe-3.21.0 → sqlframe-3.22.0}/.github/CODEOWNERS +0 -0
  24. {sqlframe-3.21.0 → sqlframe-3.22.0}/.github/workflows/main.workflow.yaml +0 -0
  25. {sqlframe-3.21.0 → sqlframe-3.22.0}/.github/workflows/publish.workflow.yaml +0 -0
  26. {sqlframe-3.21.0 → sqlframe-3.22.0}/.gitignore +0 -0
  27. {sqlframe-3.21.0 → sqlframe-3.22.0}/.pre-commit-config.yaml +0 -0
  28. {sqlframe-3.21.0 → sqlframe-3.22.0}/.readthedocs.yaml +0 -0
  29. {sqlframe-3.21.0 → sqlframe-3.22.0}/LICENSE +0 -0
  30. {sqlframe-3.21.0 → sqlframe-3.22.0}/Makefile +0 -0
  31. {sqlframe-3.21.0 → sqlframe-3.22.0}/README.md +0 -0
  32. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/add_chatgpt_support.md +0 -0
  33. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  34. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  35. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  36. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  37. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  38. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  39. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  40. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  41. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/but_wait_theres_more.gif +0 -0
  42. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/cake.gif +0 -0
  43. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  44. {sqlframe-3.21.0 → sqlframe-3.22.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  45. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/bigquery.md +0 -0
  46. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/configuration.md +0 -0
  47. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/databricks.md +0 -0
  48. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/bigquery.md +0 -0
  49. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/duckdb.md +0 -0
  50. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/images/SF.png +0 -0
  51. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/images/favicon.png +0 -0
  52. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/images/favicon_old.png +0 -0
  53. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  54. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/images/sqlframe_logo.png +0 -0
  55. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/docs/postgres.md +0 -0
  56. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/images/SF.png +0 -0
  57. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/images/favicon.png +0 -0
  58. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/images/favicon_old.png +0 -0
  59. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/images/sqlframe_diagram.png +0 -0
  60. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/images/sqlframe_logo.png +0 -0
  61. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/index.md +0 -0
  62. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/postgres.md +0 -0
  63. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/redshift.md +0 -0
  64. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/requirements.txt +0 -0
  65. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/spark.md +0 -0
  66. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/standalone.md +0 -0
  67. {sqlframe-3.21.0 → sqlframe-3.22.0}/docs/stylesheets/extra.css +0 -0
  68. {sqlframe-3.21.0 → sqlframe-3.22.0}/mkdocs.yml +0 -0
  69. {sqlframe-3.21.0 → sqlframe-3.22.0}/pytest.ini +0 -0
  70. {sqlframe-3.21.0 → sqlframe-3.22.0}/renovate.json +0 -0
  71. {sqlframe-3.21.0 → sqlframe-3.22.0}/setup.cfg +0 -0
  72. {sqlframe-3.21.0 → sqlframe-3.22.0}/setup.py +0 -0
  73. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/LICENSE +0 -0
  74. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/__init__.py +0 -0
  75. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/__init__.py +0 -0
  76. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/_typing.py +0 -0
  77. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/catalog.py +0 -0
  78. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/column.py +0 -0
  79. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/decorators.py +0 -0
  80. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/exceptions.py +0 -0
  81. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/group.py +0 -0
  82. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/mixins/__init__.py +0 -0
  83. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  84. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  85. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/mixins/table_mixins.py +0 -0
  86. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/normalize.py +0 -0
  87. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/operations.py +0 -0
  88. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/session.py +0 -0
  89. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/table.py +0 -0
  90. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/transforms.py +0 -0
  91. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/types.py +0 -0
  92. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/udf.py +0 -0
  93. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/base/window.py +0 -0
  94. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/__init__.py +0 -0
  95. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/catalog.py +0 -0
  96. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/column.py +0 -0
  97. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/dataframe.py +0 -0
  98. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/functions.py +0 -0
  99. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/functions.pyi +0 -0
  100. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/group.py +0 -0
  101. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/readwriter.py +0 -0
  102. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/session.py +0 -0
  103. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/table.py +0 -0
  104. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/types.py +0 -0
  105. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/udf.py +0 -0
  106. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/bigquery/window.py +0 -0
  107. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/__init__.py +0 -0
  108. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/catalog.py +0 -0
  109. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/column.py +0 -0
  110. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/dataframe.py +0 -0
  111. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/functions.py +0 -0
  112. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/functions.pyi +0 -0
  113. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/group.py +0 -0
  114. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/readwriter.py +0 -0
  115. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/session.py +0 -0
  116. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/table.py +0 -0
  117. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/types.py +0 -0
  118. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/udf.py +0 -0
  119. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/databricks/window.py +0 -0
  120. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/__init__.py +0 -0
  121. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/catalog.py +0 -0
  122. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/column.py +0 -0
  123. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/dataframe.py +0 -0
  124. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/functions.py +0 -0
  125. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/functions.pyi +0 -0
  126. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/group.py +0 -0
  127. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/session.py +0 -0
  128. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/table.py +0 -0
  129. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/types.py +0 -0
  130. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/udf.py +0 -0
  131. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/duckdb/window.py +0 -0
  132. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/__init__.py +0 -0
  133. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/catalog.py +0 -0
  134. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/column.py +0 -0
  135. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/dataframe.py +0 -0
  136. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/functions.py +0 -0
  137. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/functions.pyi +0 -0
  138. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/group.py +0 -0
  139. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/readwriter.py +0 -0
  140. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/session.py +0 -0
  141. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/table.py +0 -0
  142. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/types.py +0 -0
  143. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/udf.py +0 -0
  144. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/postgres/window.py +0 -0
  145. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/__init__.py +0 -0
  146. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/catalog.py +0 -0
  147. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/column.py +0 -0
  148. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/dataframe.py +0 -0
  149. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/functions.py +0 -0
  150. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/group.py +0 -0
  151. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/readwriter.py +0 -0
  152. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/session.py +0 -0
  153. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/table.py +0 -0
  154. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/types.py +0 -0
  155. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/udf.py +0 -0
  156. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/redshift/window.py +0 -0
  157. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/__init__.py +0 -0
  158. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/catalog.py +0 -0
  159. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/column.py +0 -0
  160. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/dataframe.py +0 -0
  161. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/functions.py +0 -0
  162. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/functions.pyi +0 -0
  163. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/group.py +0 -0
  164. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/readwriter.py +0 -0
  165. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/session.py +0 -0
  166. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/table.py +0 -0
  167. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/types.py +0 -0
  168. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/udf.py +0 -0
  169. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/snowflake/window.py +0 -0
  170. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/__init__.py +0 -0
  171. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/catalog.py +0 -0
  172. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/column.py +0 -0
  173. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/dataframe.py +0 -0
  174. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/functions.py +0 -0
  175. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/functions.pyi +0 -0
  176. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/group.py +0 -0
  177. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/session.py +0 -0
  178. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/table.py +0 -0
  179. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/types.py +0 -0
  180. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/udf.py +0 -0
  181. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/spark/window.py +0 -0
  182. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/__init__.py +0 -0
  183. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/catalog.py +0 -0
  184. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/column.py +0 -0
  185. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/dataframe.py +0 -0
  186. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/functions.py +0 -0
  187. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/group.py +0 -0
  188. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/readwriter.py +0 -0
  189. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/session.py +0 -0
  190. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/table.py +0 -0
  191. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/types.py +0 -0
  192. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/udf.py +0 -0
  193. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/standalone/window.py +0 -0
  194. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/testing/__init__.py +0 -0
  195. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe/testing/utils.py +0 -0
  196. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  197. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  198. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe.egg-info/requires.txt +0 -0
  199. {sqlframe-3.21.0 → sqlframe-3.22.0}/sqlframe.egg-info/top_level.txt +0 -0
  200. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/__init__.py +0 -0
  201. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/common_fixtures.py +0 -0
  202. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/conftest.py +0 -0
  203. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee.csv +0 -0
  204. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee.json +0 -0
  205. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee.parquet +0 -0
  206. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  207. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  208. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  209. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  210. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  211. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  212. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  213. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  214. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  215. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  216. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  217. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  218. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  219. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  220. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/employee_extra_line.csv +0 -0
  221. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/issue_219.csv +0 -0
  222. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  223. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  224. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  225. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  226. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  227. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  228. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  229. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  230. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  231. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  232. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  233. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  234. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  235. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  236. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  237. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  238. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  239. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  240. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  241. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  242. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  243. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  244. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  245. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  246. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  247. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  248. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  249. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  250. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  251. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  252. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  253. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  254. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  255. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  256. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  257. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  258. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  259. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  260. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  261. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  262. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  263. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  264. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  265. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  266. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  267. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  268. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  269. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  270. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  271. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  272. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  273. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  274. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  275. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  276. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  277. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  278. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  279. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  280. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  281. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  282. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  283. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  284. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  285. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  286. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  287. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  288. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  289. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  290. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  291. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  292. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  293. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  294. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  295. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  296. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  297. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  298. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  299. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  300. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  301. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  302. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  303. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  304. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  305. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  306. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  307. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  308. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  309. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  310. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  311. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  312. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  313. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  314. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  315. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  316. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  317. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  318. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  319. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  320. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  321. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/__init__.py +0 -0
  322. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/__init__.py +0 -0
  323. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  324. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  325. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  326. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  327. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/databricks/__init__.py +0 -0
  328. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  329. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  330. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/__init__.py +0 -0
  331. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  332. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  333. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  334. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  335. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  336. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
  337. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/postgres/__init__.py +0 -0
  338. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  339. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  340. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  341. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/redshift/__init__.py +0 -0
  342. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  343. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  344. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  345. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  346. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  347. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  348. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/spark/__init__.py +0 -0
  349. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  350. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  351. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_column.py +0 -0
  352. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_session.py +0 -0
  353. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_engine_table.py +0 -0
  354. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/engines/test_int_testing.py +0 -0
  355. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/fixtures.py +0 -0
  356. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/test_int_dataframe.py +0 -0
  357. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  358. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/test_int_grouped_data.py +0 -0
  359. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/integration/test_int_session.py +0 -0
  360. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/types.py +0 -0
  361. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/__init__.py +0 -0
  362. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/bigquery/__init__.py +0 -0
  363. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/bigquery/test_activate.py +0 -0
  364. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/conftest.py +0 -0
  365. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/databricks/__init__.py +0 -0
  366. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/databricks/test_activate.py +0 -0
  367. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/duck/__init__.py +0 -0
  368. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/duck/test_activate.py +0 -0
  369. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/postgres/__init__.py +0 -0
  370. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/postgres/test_activate.py +0 -0
  371. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/redshift/__init__.py +0 -0
  372. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/redshift/test_activate.py +0 -0
  373. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/snowflake/__init__.py +0 -0
  374. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/snowflake/test_activate.py +0 -0
  375. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/spark/__init__.py +0 -0
  376. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/spark/test_activate.py +0 -0
  377. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/__init__.py +0 -0
  378. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/fixtures.py +0 -0
  379. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_activate.py +0 -0
  380. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_column.py +0 -0
  381. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  382. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_functions.py +0 -0
  383. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_session.py +0 -0
  384. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  385. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_types.py +0 -0
  386. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/standalone/test_window.py +0 -0
  387. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/test_activate.py +0 -0
  388. {sqlframe-3.21.0 → sqlframe-3.22.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.21.0
3
+ Version: 3.22.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -406,6 +406,7 @@ See something that you would like to see supported? [Open an issue](https://gith
406
406
  * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min.html)
407
407
  * [min_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min_by.html)
408
408
  * [minute](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.minute.html)
409
+ * [mode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.mode.html)
409
410
  * [month](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.month.html)
410
411
  * [months_between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.months_between.html)
411
412
  * Rounded whole number is returned
@@ -448,7 +449,6 @@ See something that you would like to see supported? [Open an issue](https://gith
448
449
  * [sin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sin.html)
449
450
  * [size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.size.html)
450
451
  * [skewness](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.skewness.html)
451
- * Returned value is different but is still calculating the same thing. Need to investigate difference in calculation
452
452
  * [slice](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.slice.html)
453
453
  * [sort_array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sort_array.html)
454
454
  * [soundex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.soundex.html)
@@ -439,6 +439,7 @@ See something that you would like to see supported? [Open an issue](https://gith
439
439
  * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min.html)
440
440
  * [min_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min_by.html)
441
441
  * [minute](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.minute.html)
442
+ * [mode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.mode.html)
442
443
  * [module](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.module.html)
443
444
  * [month](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.month.html)
444
445
  * [months_between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.months_between.html)
@@ -487,7 +488,6 @@ See something that you would like to see supported? [Open an issue](https://gith
487
488
  * [sinh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sinh.html)
488
489
  * [size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.size.html)
489
490
  * [skewness](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.skewness.html)
490
- * Skewness is calculated differently
491
491
  * [slice](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.slice.html)
492
492
  * [sort_array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sort_array.html)
493
493
  * [soundex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.soundex.html)
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '3.21.0'
16
- __version_tuple__ = version_tuple = (3, 21, 0)
15
+ __version__ = version = '3.22.0'
16
+ __version_tuple__ = version_tuple = (3, 22, 0)
@@ -296,6 +296,12 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
296
296
 
297
297
  @property
298
298
  def columns(self) -> t.List[str]:
299
+ expression_display_names = self.expression.copy()
300
+ self._set_display_names(expression_display_names)
301
+ return expression_display_names.named_selects
302
+
303
+ @property
304
+ def _columns(self) -> t.List[str]:
299
305
  return self.expression.named_selects
300
306
 
301
307
  @property
@@ -336,7 +342,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
336
342
  return types.StructType(
337
343
  [
338
344
  types.StructField(
339
- c.name,
345
+ self.display_name_mapping.get(c.name, c.name),
340
346
  sqlglot_to_spark(
341
347
  exp.DataType.build(c.dataType, dialect=self.session.output_dialect)
342
348
  ),
@@ -611,6 +617,18 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
611
617
  }
612
618
  self.display_name_mapping.update(zipped)
613
619
 
620
+ def _set_display_names(self, select_expression: exp.Select) -> None:
621
+ for index, column in enumerate(select_expression.expressions):
622
+ column_name = quote_preserving_alias_or_name(column)
623
+ if column_name in self.display_name_mapping:
624
+ display_name_identifier = exp.to_identifier(
625
+ self.display_name_mapping[column_name], quoted=True
626
+ )
627
+ display_name_identifier._meta = {"case_sensitive": True, **(column._meta or {})}
628
+ select_expression.expressions[index] = exp.alias_(
629
+ column.unalias(), display_name_identifier, quoted=True
630
+ )
631
+
614
632
  def _get_expressions(
615
633
  self,
616
634
  optimize: bool = True,
@@ -631,16 +649,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
631
649
  select_expression = select_expression.transform(
632
650
  replace_id_value, replacement_mapping
633
651
  ).assert_is(exp.Select)
634
- for index, column in enumerate(select_expression.expressions):
635
- column_name = quote_preserving_alias_or_name(column)
636
- if column_name in self.display_name_mapping:
637
- display_name_identifier = exp.to_identifier(
638
- self.display_name_mapping[column_name], quoted=True
639
- )
640
- display_name_identifier._meta = {"case_sensitive": True, **(column._meta or {})}
641
- select_expression.expressions[index] = exp.alias_(
642
- column.unalias(), display_name_identifier, quoted=True
643
- )
652
+ self._set_display_names(select_expression)
644
653
  if optimize:
645
654
  select_expression = t.cast(
646
655
  exp.Select,
@@ -1158,8 +1167,8 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1158
1167
 
1159
1168
  @operation(Operation.FROM)
1160
1169
  def unionByName(self, other: Self, allowMissingColumns: bool = False) -> Self:
1161
- l_columns = self.columns
1162
- r_columns = other.columns
1170
+ l_columns = self._columns
1171
+ r_columns = other._columns
1163
1172
  if not allowMissingColumns:
1164
1173
  l_expressions = l_columns
1165
1174
  r_expressions = l_columns
@@ -1619,9 +1628,9 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1619
1628
  | 16| Bob|
1620
1629
  +---+-----+
1621
1630
  """
1622
- if len(cols) != len(self.columns):
1631
+ if len(cols) != len(self._columns):
1623
1632
  raise ValueError(
1624
- f"Number of column names does not match number of columns: {len(cols)} != {len(self.columns)}"
1633
+ f"Number of column names does not match number of columns: {len(cols)} != {len(self._columns)}"
1625
1634
  )
1626
1635
  expression = self.expression.copy()
1627
1636
  expression = expression.select(
@@ -1889,7 +1898,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1889
1898
  print("root")
1890
1899
  for column in self._typed_columns:
1891
1900
  print_schema(
1892
- column.name,
1901
+ self.display_name_mapping.get(column.name, column.name),
1893
1902
  exp.DataType.build(column.dataType, dialect=self.session.output_dialect),
1894
1903
  column.nullable,
1895
1904
  0,
@@ -193,10 +193,6 @@ def factorial_ensure_int(col: ColumnOrName) -> Column:
193
193
  return Column.invoke_anonymous_function(col_func(col).cast("integer"), "FACTORIAL")
194
194
 
195
195
 
196
- def skewness_from_skew(col: ColumnOrName) -> Column:
197
- return Column.invoke_anonymous_function(col, "SKEW")
198
-
199
-
200
196
  def isnan_using_equal(col: ColumnOrName) -> Column:
201
197
  lit = get_func_from_session("lit")
202
198
  return Column(
@@ -486,14 +486,32 @@ def var_pop(col: ColumnOrName) -> Column:
486
486
 
487
487
  @meta(unsupported_engines=["bigquery", "postgres"])
488
488
  def skewness(col: ColumnOrName) -> Column:
489
- from sqlframe.base.function_alternatives import skewness_from_skew
490
-
491
489
  session = _get_session()
492
490
 
491
+ func_name = "SKEWNESS"
492
+
493
493
  if session._is_snowflake:
494
- return skewness_from_skew(col)
494
+ func_name = "SKEW"
495
495
 
496
- return Column.invoke_anonymous_function(col, "SKEWNESS")
496
+ if session._is_duckdb or session._is_snowflake:
497
+ when_func = get_func_from_session("when")
498
+ count_func = get_func_from_session("count")
499
+ count_star = count_func("*")
500
+ lit_func = get_func_from_session("lit")
501
+ sqrt_func = get_func_from_session("sqrt")
502
+ col = Column.ensure_col(col)
503
+ return (
504
+ when_func(count_star == lit_func(0), lit_func(None))
505
+ .when(count_star == lit_func(1), lit_func(float("nan")))
506
+ .when(count_star == lit_func(2), lit_func(0.0))
507
+ .otherwise(
508
+ Column.invoke_anonymous_function(col, func_name)
509
+ * (count_star - lit_func(2))
510
+ / (sqrt_func(count_star * (count_star - lit_func(1))))
511
+ )
512
+ )
513
+
514
+ return Column.invoke_anonymous_function(col, func_name)
497
515
 
498
516
 
499
517
  @meta(unsupported_engines=["bigquery", "postgres"])
@@ -4486,7 +4504,7 @@ def median(col: ColumnOrName) -> Column:
4486
4504
  return Column.invoke_expression_over_column(col, expression.Median)
4487
4505
 
4488
4506
 
4489
- @meta(unsupported_engines="*")
4507
+ @meta(unsupported_engines=["bigquery", "postgres"])
4490
4508
  def mode(col: ColumnOrName) -> Column:
4491
4509
  """
4492
4510
  Returns the most frequent value in a group.
@@ -4522,6 +4540,7 @@ def mode(col: ColumnOrName) -> Column:
4522
4540
  |dotNET| 2012|
4523
4541
  +------+----------+
4524
4542
  """
4543
+
4525
4544
  return Column.invoke_anonymous_function(col, "mode")
4526
4545
 
4527
4546
 
@@ -82,6 +82,10 @@ class PandasLoaderMixin(_BaseDataFrameReader, t.Generic[SESSION, DF]):
82
82
  elif format == "parquet":
83
83
  df = pd.read_parquet(path, **kwargs) # type: ignore
84
84
  elif format == "csv":
85
+ kwargs.pop("inferSchema", None)
86
+ if "header" in kwargs:
87
+ if isinstance(kwargs["header"], bool) and kwargs["header"]:
88
+ kwargs["header"] = "infer"
85
89
  df = pd.read_csv(path, **kwargs) # type: ignore
86
90
  else:
87
91
  raise UnsupportedOperationError(f"Unsupported format: {format}")
@@ -393,10 +393,12 @@ class _BaseDataFrameWriter(t.Generic[SESSION, DF]):
393
393
  df: DF,
394
394
  mode: t.Optional[str] = None,
395
395
  by_name: bool = False,
396
+ state_format_to_write: t.Optional[str] = None,
396
397
  ):
397
398
  self._df = df
398
399
  self._mode = mode
399
400
  self._by_name = by_name
401
+ self._state_format_to_write = state_format_to_write
400
402
 
401
403
  @property
402
404
  def _session(self) -> SESSION:
@@ -484,6 +486,44 @@ class _BaseDataFrameWriter(t.Generic[SESSION, DF]):
484
486
  def _write(self, path: str, mode: t.Optional[str], format: str, **options) -> None:
485
487
  raise NotImplementedError
486
488
 
489
+ def format(self, source: str) -> "Self":
490
+ """Specifies the input data source format.
491
+
492
+ .. versionadded:: 1.4.0
493
+
494
+ .. versionchanged:: 3.4.0
495
+ Supports Spark Connect.
496
+
497
+ Parameters
498
+ ----------
499
+ source : str
500
+ string, name of the data source, e.g. 'json', 'parquet'.
501
+
502
+ Examples
503
+ --------
504
+ >>> spark.read.format('json')
505
+ <...readwriter.DataFrameReader object ...>
506
+
507
+ Write a DataFrame into a JSON file and read it back.
508
+
509
+ >>> import tempfile
510
+ >>> with tempfile.TemporaryDirectory() as d:
511
+ ... # Write a DataFrame into a JSON file
512
+ ... spark.createDataFrame(
513
+ ... [{"age": 100, "name": "Hyukjin Kwon"}]
514
+ ... ).write.mode("overwrite").format("json").save(d)
515
+ ...
516
+ ... # Read the JSON file as a DataFrame.
517
+ ... spark.read.format('json').load(d).show()
518
+ +---+------------+
519
+ |age| name|
520
+ +---+------------+
521
+ |100|Hyukjin Kwon|
522
+ +---+------------+
523
+ """
524
+ self._state_format_to_write = source
525
+ return self
526
+
487
527
  def json(
488
528
  self,
489
529
  path: str,
@@ -1,6 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import importlib
4
+ import random
5
+ import string
4
6
  import typing as t
5
7
  import unicodedata
6
8
 
@@ -97,12 +99,8 @@ def get_column_mapping_from_schema_input(
97
99
  else:
98
100
  value = {x.strip(): None for x in schema}
99
101
  return {
100
- exp.to_column(k).sql(dialect=dialect): exp.DataType.build(v, dialect=dialect)
101
- if v is not None
102
- else v
103
- for k, v in value.items()
102
+ k: exp.DataType.build(v, dialect=dialect) if v is not None else v for k, v in value.items()
104
103
  }
105
- # return {x.strip(): None for x in schema} # type: ignore
106
104
 
107
105
 
108
106
  def get_tables_from_expression_with_join(expression: exp.Select) -> t.List[exp.Table]:
@@ -431,3 +429,20 @@ def normalize_string(
431
429
  for pos in star_positions:
432
430
  normalized_value = normalized_value[:pos] + "*" + normalized_value[pos:]
433
431
  return normalized_value
432
+
433
+
434
+ def generate_random_identifier(size=6, chars=string.ascii_uppercase + string.digits):
435
+ return "_" + "".join(random.choice(chars) for _ in range(size))
436
+
437
+
438
+ def split_filepath(filepath: str) -> tuple[str, str]:
439
+ if filepath.startswith("dbfs:") or filepath.startswith("/dbfs"):
440
+ prefix = "dbfs:"
441
+ return prefix, filepath[len(prefix) :]
442
+ if filepath.startswith("file://"):
443
+ prefix = "file://"
444
+ return "", filepath[len(prefix) :]
445
+ split_ = str(filepath).split("://", 1)
446
+ if len(split_) == 2: # noqa: PLR2004
447
+ return split_[0] + "://", split_[1]
448
+ return "", split_[0]
@@ -92,6 +92,7 @@ class DuckDBDataFrameReader(
92
92
  if format == "delta":
93
93
  from_clause = f"delta_scan('{path}')"
94
94
  elif format:
95
+ options.pop("inferSchema", None)
95
96
  paths = ",".join([f"'{path}'" for path in ensure_list(path)])
96
97
  from_clause = f"read_{format}([{paths}], {to_csv(options)})"
97
98
  else:
@@ -0,0 +1,163 @@
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing as t
6
+
7
+ from sqlglot import exp
8
+ from sqlglot.helper import ensure_list
9
+
10
+ from sqlframe.base.readerwriter import (
11
+ _BaseDataFrameReader,
12
+ _BaseDataFrameWriter,
13
+ _infer_format,
14
+ )
15
+ from sqlframe.base.util import ensure_column_mapping, generate_random_identifier, to_csv
16
+
17
+ if t.TYPE_CHECKING:
18
+ from sqlframe.base._typing import OptionalPrimitiveType, PathOrPaths
19
+ from sqlframe.base.types import StructType
20
+ from sqlframe.spark.dataframe import SparkDataFrame
21
+ from sqlframe.spark.session import SparkSession
22
+ from sqlframe.spark.table import SparkTable
23
+
24
+
25
+ class SparkDataFrameReader(
26
+ _BaseDataFrameReader["SparkSession", "SparkDataFrame", "SparkTable"],
27
+ ):
28
+ def load(
29
+ self,
30
+ path: t.Optional[PathOrPaths] = None,
31
+ format: t.Optional[str] = None,
32
+ schema: t.Optional[t.Union[StructType, str]] = None,
33
+ **options: OptionalPrimitiveType,
34
+ ) -> SparkDataFrame:
35
+ """Loads data from a data source and returns it as a :class:`DataFrame`.
36
+
37
+ .. versionadded:: 1.4.0
38
+
39
+ .. versionchanged:: 3.4.0
40
+ Supports Spark Connect.
41
+
42
+ Parameters
43
+ ----------
44
+ path : str or list, t.Optional
45
+ t.Optional string or a list of string for file-system backed data sources.
46
+ format : str, t.Optional
47
+ t.Optional string for format of the data source. Default to 'parquet'.
48
+ schema : :class:`pyspark.sql.types.StructType` or str, t.Optional
49
+ t.Optional :class:`pyspark.sql.types.StructType` for the input schema
50
+ or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
51
+ **options : dict
52
+ all other string options
53
+
54
+ Examples
55
+ --------
56
+ Load a CSV file with format, schema and options specified.
57
+
58
+ >>> import tempfile
59
+ >>> with tempfile.TemporaryDirectory() as d:
60
+ ... # Write a DataFrame into a CSV file with a header
61
+ ... df = spark.createDataFrame([{"age": 100, "name": "Hyukjin Kwon"}])
62
+ ... df.write.option("header", True).mode("overwrite").format("csv").save(d)
63
+ ...
64
+ ... # Read the CSV file as a DataFrame with 'nullValue' option set to 'Hyukjin Kwon',
65
+ ... # and 'header' option set to `True`.
66
+ ... df = spark.read.load(
67
+ ... d, schema=df.schema, format="csv", nullValue="Hyukjin Kwon", header=True)
68
+ ... df.printSchema()
69
+ ... df.show()
70
+ root
71
+ |-- age: long (nullable = true)
72
+ |-- name: string (nullable = true)
73
+ +---+----+
74
+ |age|name|
75
+ +---+----+
76
+ |100|NULL|
77
+ +---+----+
78
+ """
79
+ assert path is not None, "path is required"
80
+ assert isinstance(path, str), "path must be a string"
81
+ format = format or self.state_format_to_read or _infer_format(path)
82
+ if schema:
83
+ column_mapping = ensure_column_mapping(schema)
84
+ select_column_mapping = column_mapping.copy()
85
+ select_columns = [x.expression for x in self._to_casted_columns(select_column_mapping)]
86
+
87
+ if hasattr(schema, "simpleString"):
88
+ schema = schema.simpleString()
89
+ else:
90
+ select_columns = [exp.Star()]
91
+
92
+ if format == "delta":
93
+ from_clause = f"delta.`{path}`"
94
+ elif format:
95
+ paths = ",".join([f"{path}" for path in ensure_list(path)])
96
+ tmp_view_key = options.get("_tmp_view_key_", f"{generate_random_identifier()}_vw")
97
+ options["_tmp_view_key_"] = tmp_view_key
98
+
99
+ format_options: dict[str, OptionalPrimitiveType] = {
100
+ k: v for k, v in options.items() if v is not None
101
+ }
102
+ format_options.pop("_tmp_view_key_")
103
+ format_options["path"] = paths
104
+ if schema:
105
+ format_options["schema"] = f"{schema}"
106
+ format_options.pop("inferSchema", None)
107
+ format_options = {key: f"'{val}'" for key, val in format_options.items()}
108
+ format_options_str = to_csv(format_options, " ")
109
+
110
+ tmp_view = f"CREATE OR REPLACE TEMPORARY VIEW {tmp_view_key} USING {format}" + (
111
+ f" OPTIONS ({format_options_str})" if format_options_str else ""
112
+ )
113
+ self.session.spark_session.sql(tmp_view).collect()
114
+
115
+ from_clause = f"{tmp_view_key}"
116
+ else:
117
+ from_clause = f"'{path}'"
118
+
119
+ df = self.session.sql(
120
+ exp.select(*select_columns).from_(from_clause, dialect=self.session.input_dialect),
121
+ qualify=False,
122
+ )
123
+ if select_columns == [exp.Star()] and df.schema:
124
+ return self.load(path=path, format=format, schema=df.schema, **options)
125
+ self.session._last_loaded_file = path # type: ignore
126
+ return df
127
+
128
+
129
+ class SparkDataFrameWriter(
130
+ _BaseDataFrameWriter["SparkSession", "SparkDataFrame"],
131
+ ):
132
+ def save(
133
+ self,
134
+ path: str,
135
+ mode: t.Optional[str] = None,
136
+ format: t.Optional[str] = None,
137
+ partitionBy: t.Optional[t.Union[str, t.List[str]]] = None,
138
+ **options,
139
+ ):
140
+ format = str(format or self._state_format_to_write)
141
+ self._write(path, mode, format, partitionBy=partitionBy, **options)
142
+
143
+ def _write(self, path: str, mode: t.Optional[str], format: str, **options):
144
+ spark_df = None
145
+ expressions = self._df._get_expressions()
146
+ for i, expression in enumerate(expressions):
147
+ if i < len(expressions) - 1:
148
+ self._df.session._collect(expressions)
149
+ else:
150
+ sql = self._df.session._to_sql(expression)
151
+ spark_df = self._session.spark_session.sql(sql)
152
+ if spark_df is not None:
153
+ options = {k: v for k, v in options.items() if v is not None}
154
+ mode = str(mode or self._mode or "default")
155
+ spark_writer = spark_df.write.format(format).mode(mode)
156
+ partition_columns = options.pop("partitionBy", None)
157
+ compression = options.pop("compression", None)
158
+ if partition_columns:
159
+ partition_columns = options.pop("partitionBy")
160
+ spark_writer = spark_writer.partitionBy(*partition_columns)
161
+ if compression:
162
+ spark_writer = spark_writer.option("compression", compression)
163
+ spark_writer.save(path=path, **options)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.21.0
3
+ Version: 3.22.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -71,16 +71,16 @@ root
71
71
  |-- bigint_col: bigint (nullable = true)
72
72
  |-- double_col: double (nullable = true)
73
73
  |-- string_col: string (nullable = true)
74
- |-- `map<string,bigint>_col`: map<string, bigint> (nullable = true)
74
+ |-- map<string,bigint>_col: map<string, bigint> (nullable = true)
75
75
  | |-- key: string (nullable = true)
76
76
  | |-- value: bigint (nullable = true)
77
- |-- `array<struct<a:bigint,b:bigint>>`: array<struct<a: bigint, b: bigint>> (nullable = true)
77
+ |-- array<struct<a:bigint,b:bigint>>: array<struct<a: bigint, b: bigint>> (nullable = true)
78
78
  | |-- element: struct<a: bigint, b: bigint> (nullable = true)
79
79
  | | |-- a: bigint (nullable = true)
80
80
  | | |-- b: bigint (nullable = true)
81
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
81
+ |-- array<bigint>_col: array<bigint> (nullable = true)
82
82
  | |-- element: bigint (nullable = true)
83
- |-- `struct<a:bigint>_col`: struct<a: bigint> (nullable = true)
83
+ |-- struct<a:bigint>_col: struct<a: bigint> (nullable = true)
84
84
  | |-- a: bigint (nullable = true)
85
85
  |-- date_col: date (nullable = true)
86
86
  |-- timestamp_col: timestamp (nullable = true)
@@ -126,12 +126,12 @@ def test_schema_nested(databricks_datatypes: DatabricksDataFrame):
126
126
  assert struct_fields[1].dataType == types.DoubleType()
127
127
  assert struct_fields[2].name == "string_col"
128
128
  assert struct_fields[2].dataType == types.StringType()
129
- assert struct_fields[3].name == "`map<string,bigint>_col`"
129
+ assert struct_fields[3].name == "map<string,bigint>_col"
130
130
  assert struct_fields[3].dataType == types.MapType(
131
131
  types.StringType(),
132
132
  types.LongType(),
133
133
  )
134
- assert struct_fields[4].name == "`array<struct<a:bigint,b:bigint>>`"
134
+ assert struct_fields[4].name == "array<struct<a:bigint,b:bigint>>"
135
135
  assert struct_fields[4].dataType == types.ArrayType(
136
136
  types.StructType(
137
137
  [
@@ -146,11 +146,11 @@ def test_schema_nested(databricks_datatypes: DatabricksDataFrame):
146
146
  ]
147
147
  ),
148
148
  )
149
- assert struct_fields[5].name == "`array<bigint>_col`"
149
+ assert struct_fields[5].name == "array<bigint>_col"
150
150
  assert struct_fields[5].dataType == types.ArrayType(
151
151
  types.LongType(),
152
152
  )
153
- assert struct_fields[6].name == "`struct<a:bigint>_col`"
153
+ assert struct_fields[6].name == "struct<a:bigint>_col"
154
154
  assert struct_fields[6].dataType == types.StructType(
155
155
  [
156
156
  types.StructField(
@@ -67,16 +67,16 @@ root
67
67
  |-- bigint_col: bigint (nullable = true)
68
68
  |-- double_col: double (nullable = true)
69
69
  |-- string_col: string (nullable = true)
70
- |-- `map<string,bigint>_col`: map<string, bigint> (nullable = true)
70
+ |-- map<string,bigint>_col: map<string, bigint> (nullable = true)
71
71
  | |-- key: string (nullable = true)
72
72
  | |-- value: bigint (nullable = true)
73
- |-- `array<struct<a:bigint,b:bigint>>`: array<struct<a: bigint, b: bigint>> (nullable = true)
73
+ |-- array<struct<a:bigint,b:bigint>>: array<struct<a: bigint, b: bigint>> (nullable = true)
74
74
  | |-- element: struct<a: bigint, b: bigint> (nullable = true)
75
75
  | | |-- a: bigint (nullable = true)
76
76
  | | |-- b: bigint (nullable = true)
77
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
77
+ |-- array<bigint>_col: array<bigint> (nullable = true)
78
78
  | |-- element: bigint (nullable = true)
79
- |-- `struct<a:bigint>_col`: struct<a: bigint> (nullable = true)
79
+ |-- struct<a:bigint>_col: struct<a: bigint> (nullable = true)
80
80
  | |-- a: bigint (nullable = true)
81
81
  |-- date_col: date (nullable = true)
82
82
  |-- timestamp_col: timestamp (nullable = true)
@@ -122,12 +122,12 @@ def test_schema_nested(duckdb_datatypes: DuckDBDataFrame):
122
122
  assert struct_fields[1].dataType == types.DoubleType()
123
123
  assert struct_fields[2].name == "string_col"
124
124
  assert struct_fields[2].dataType == types.StringType()
125
- assert struct_fields[3].name == "`map<string,bigint>_col`"
125
+ assert struct_fields[3].name == "map<string,bigint>_col"
126
126
  assert struct_fields[3].dataType == types.MapType(
127
127
  types.StringType(),
128
128
  types.LongType(),
129
129
  )
130
- assert struct_fields[4].name == "`array<struct<a:bigint,b:bigint>>`"
130
+ assert struct_fields[4].name == "array<struct<a:bigint,b:bigint>>"
131
131
  assert struct_fields[4].dataType == types.ArrayType(
132
132
  types.StructType(
133
133
  [
@@ -142,11 +142,11 @@ def test_schema_nested(duckdb_datatypes: DuckDBDataFrame):
142
142
  ]
143
143
  ),
144
144
  )
145
- assert struct_fields[5].name == "`array<bigint>_col`"
145
+ assert struct_fields[5].name == "array<bigint>_col"
146
146
  assert struct_fields[5].dataType == types.ArrayType(
147
147
  types.LongType(),
148
148
  )
149
- assert struct_fields[6].name == "`struct<a:bigint>_col`"
149
+ assert struct_fields[6].name == "struct<a:bigint>_col"
150
150
  assert struct_fields[6].dataType == types.StructType(
151
151
  [
152
152
  types.StructField(
@@ -62,7 +62,7 @@ root
62
62
  |-- bigint_col: bigint (nullable = true)
63
63
  |-- double_col: double (nullable = true)
64
64
  |-- string_col: string (nullable = true)
65
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
65
+ |-- array<bigint>_col: array<bigint> (nullable = true)
66
66
  | |-- element: bigint (nullable = true)
67
67
  |-- date_col: date (nullable = true)
68
68
  |-- timestamp_col: timestamp (nullable = true)
@@ -108,7 +108,7 @@ def test_schema_nested(postgres_datatypes: PostgresDataFrame):
108
108
  assert struct_fields[1].dataType == types.DoubleType()
109
109
  assert struct_fields[2].name == "string_col"
110
110
  assert struct_fields[2].dataType == types.StringType()
111
- assert struct_fields[3].name == "`array<bigint>_col`"
111
+ assert struct_fields[3].name == "array<bigint>_col"
112
112
  assert struct_fields[3].dataType == types.ArrayType(
113
113
  types.LongType(),
114
114
  )