sqlframe 3.16.0__tar.gz → 3.17.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (387) hide show
  1. {sqlframe-3.16.0 → sqlframe-3.17.1}/Makefile +2 -11
  2. {sqlframe-3.16.0 → sqlframe-3.17.1}/PKG-INFO +1 -1
  3. {sqlframe-3.16.0 → sqlframe-3.17.1}/setup.py +2 -2
  4. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/_version.py +2 -2
  5. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/column.py +1 -0
  6. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/dataframe.py +71 -24
  7. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/functions.py +12 -4
  8. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/session.py +27 -10
  9. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/session.py +1 -1
  10. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/session.py +1 -1
  11. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/session.py +1 -1
  12. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/session.py +1 -1
  13. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/session.py +1 -1
  14. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/session.py +13 -12
  15. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/session.py +1 -1
  16. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe.egg-info/PKG-INFO +1 -1
  17. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe.egg-info/requires.txt +2 -2
  18. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_dataframe.py +36 -14
  19. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_int_functions.py +1 -1
  20. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/test_int_dataframe.py +47 -1
  21. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_dataframe.py +2 -2
  22. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_dataframe_writer.py +1 -1
  23. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_functions.py +2 -2
  24. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_session_case_sensitivity.py +2 -2
  25. {sqlframe-3.16.0 → sqlframe-3.17.1}/.github/CODEOWNERS +0 -0
  26. {sqlframe-3.16.0 → sqlframe-3.17.1}/.github/workflows/main.workflow.yaml +0 -0
  27. {sqlframe-3.16.0 → sqlframe-3.17.1}/.github/workflows/publish.workflow.yaml +0 -0
  28. {sqlframe-3.16.0 → sqlframe-3.17.1}/.gitignore +0 -0
  29. {sqlframe-3.16.0 → sqlframe-3.17.1}/.pre-commit-config.yaml +0 -0
  30. {sqlframe-3.16.0 → sqlframe-3.17.1}/.readthedocs.yaml +0 -0
  31. {sqlframe-3.16.0 → sqlframe-3.17.1}/LICENSE +0 -0
  32. {sqlframe-3.16.0 → sqlframe-3.17.1}/README.md +0 -0
  33. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/add_chatgpt_support.md +0 -0
  34. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  35. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  36. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  37. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  38. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  39. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  40. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  41. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  42. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/but_wait_theres_more.gif +0 -0
  43. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/cake.gif +0 -0
  44. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/images/you_get_pyspark_api.gif +0 -0
  45. {sqlframe-3.16.0 → sqlframe-3.17.1}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  46. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/bigquery.md +0 -0
  47. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/configuration.md +0 -0
  48. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/databricks.md +0 -0
  49. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/bigquery.md +0 -0
  50. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/duckdb.md +0 -0
  51. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/images/SF.png +0 -0
  52. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/images/favicon.png +0 -0
  53. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/images/favicon_old.png +0 -0
  54. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/images/sqlframe_diagram.png +0 -0
  55. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/images/sqlframe_logo.png +0 -0
  56. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/docs/postgres.md +0 -0
  57. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/duckdb.md +0 -0
  58. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/images/SF.png +0 -0
  59. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/images/favicon.png +0 -0
  60. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/images/favicon_old.png +0 -0
  61. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/images/sqlframe_diagram.png +0 -0
  62. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/images/sqlframe_logo.png +0 -0
  63. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/index.md +0 -0
  64. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/postgres.md +0 -0
  65. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/redshift.md +0 -0
  66. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/requirements.txt +0 -0
  67. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/snowflake.md +0 -0
  68. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/spark.md +0 -0
  69. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/standalone.md +0 -0
  70. {sqlframe-3.16.0 → sqlframe-3.17.1}/docs/stylesheets/extra.css +0 -0
  71. {sqlframe-3.16.0 → sqlframe-3.17.1}/mkdocs.yml +0 -0
  72. {sqlframe-3.16.0 → sqlframe-3.17.1}/pytest.ini +0 -0
  73. {sqlframe-3.16.0 → sqlframe-3.17.1}/renovate.json +0 -0
  74. {sqlframe-3.16.0 → sqlframe-3.17.1}/setup.cfg +0 -0
  75. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/LICENSE +0 -0
  76. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/__init__.py +0 -0
  77. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/__init__.py +0 -0
  78. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/_typing.py +0 -0
  79. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/catalog.py +0 -0
  80. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/decorators.py +0 -0
  81. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/exceptions.py +0 -0
  82. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/function_alternatives.py +0 -0
  83. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/group.py +0 -0
  84. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/mixins/__init__.py +0 -0
  85. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  86. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  87. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  88. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/mixins/table_mixins.py +0 -0
  89. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/normalize.py +0 -0
  90. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/operations.py +0 -0
  91. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/readerwriter.py +0 -0
  92. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/table.py +0 -0
  93. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/transforms.py +0 -0
  94. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/types.py +0 -0
  95. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/udf.py +0 -0
  96. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/util.py +0 -0
  97. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/base/window.py +0 -0
  98. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/__init__.py +0 -0
  99. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/catalog.py +0 -0
  100. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/column.py +0 -0
  101. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/dataframe.py +0 -0
  102. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/functions.py +0 -0
  103. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/functions.pyi +0 -0
  104. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/group.py +0 -0
  105. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/readwriter.py +0 -0
  106. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/table.py +0 -0
  107. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/types.py +0 -0
  108. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/udf.py +0 -0
  109. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/bigquery/window.py +0 -0
  110. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/__init__.py +0 -0
  111. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/catalog.py +0 -0
  112. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/column.py +0 -0
  113. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/dataframe.py +0 -0
  114. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/functions.py +0 -0
  115. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/functions.pyi +0 -0
  116. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/group.py +0 -0
  117. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/readwriter.py +0 -0
  118. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/table.py +0 -0
  119. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/types.py +0 -0
  120. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/udf.py +0 -0
  121. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/databricks/window.py +0 -0
  122. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/__init__.py +0 -0
  123. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/catalog.py +0 -0
  124. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/column.py +0 -0
  125. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/dataframe.py +0 -0
  126. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/functions.py +0 -0
  127. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/functions.pyi +0 -0
  128. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/group.py +0 -0
  129. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/readwriter.py +0 -0
  130. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/table.py +0 -0
  131. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/types.py +0 -0
  132. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/udf.py +0 -0
  133. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/duckdb/window.py +0 -0
  134. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/__init__.py +0 -0
  135. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/catalog.py +0 -0
  136. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/column.py +0 -0
  137. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/dataframe.py +0 -0
  138. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/functions.py +0 -0
  139. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/functions.pyi +0 -0
  140. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/group.py +0 -0
  141. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/readwriter.py +0 -0
  142. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/table.py +0 -0
  143. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/types.py +0 -0
  144. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/udf.py +0 -0
  145. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/postgres/window.py +0 -0
  146. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/__init__.py +0 -0
  147. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/catalog.py +0 -0
  148. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/column.py +0 -0
  149. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/dataframe.py +0 -0
  150. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/functions.py +0 -0
  151. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/group.py +0 -0
  152. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/readwriter.py +0 -0
  153. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/session.py +0 -0
  154. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/table.py +0 -0
  155. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/types.py +0 -0
  156. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/udf.py +0 -0
  157. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/redshift/window.py +0 -0
  158. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/__init__.py +0 -0
  159. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/catalog.py +0 -0
  160. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/column.py +0 -0
  161. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/dataframe.py +0 -0
  162. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/functions.py +0 -0
  163. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/functions.pyi +0 -0
  164. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/group.py +0 -0
  165. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/readwriter.py +0 -0
  166. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/table.py +0 -0
  167. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/types.py +0 -0
  168. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/udf.py +0 -0
  169. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/snowflake/window.py +0 -0
  170. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/__init__.py +0 -0
  171. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/catalog.py +0 -0
  172. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/column.py +0 -0
  173. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/dataframe.py +0 -0
  174. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/functions.py +0 -0
  175. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/functions.pyi +0 -0
  176. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/group.py +0 -0
  177. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/readwriter.py +0 -0
  178. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/table.py +0 -0
  179. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/types.py +0 -0
  180. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/udf.py +0 -0
  181. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/spark/window.py +0 -0
  182. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/__init__.py +0 -0
  183. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/catalog.py +0 -0
  184. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/column.py +0 -0
  185. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/dataframe.py +0 -0
  186. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/functions.py +0 -0
  187. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/group.py +0 -0
  188. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/readwriter.py +0 -0
  189. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/table.py +0 -0
  190. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/types.py +0 -0
  191. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/udf.py +0 -0
  192. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/standalone/window.py +0 -0
  193. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/testing/__init__.py +0 -0
  194. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe/testing/utils.py +0 -0
  195. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe.egg-info/SOURCES.txt +0 -0
  196. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe.egg-info/dependency_links.txt +0 -0
  197. {sqlframe-3.16.0 → sqlframe-3.17.1}/sqlframe.egg-info/top_level.txt +0 -0
  198. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/__init__.py +0 -0
  199. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/common_fixtures.py +0 -0
  200. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/conftest.py +0 -0
  201. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee.csv +0 -0
  202. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee.json +0 -0
  203. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee.parquet +0 -0
  204. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  205. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  206. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  207. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  208. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  209. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  210. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  211. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  212. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  213. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  214. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  215. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  216. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  217. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  218. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/employee_extra_line.csv +0 -0
  219. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/issue_219.csv +0 -0
  220. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  221. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  222. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  223. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  224. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  225. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  226. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  227. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  228. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  229. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  230. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  231. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  232. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  233. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  234. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  235. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  236. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  237. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  238. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  239. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  240. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  241. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  242. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  243. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  244. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  245. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  246. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  247. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  248. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  249. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  250. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  251. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  252. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  253. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  254. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  255. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  256. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  257. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  258. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  259. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  260. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  261. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  262. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  263. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  264. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  265. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  266. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  267. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  268. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  269. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  270. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  271. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  272. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  273. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  274. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  275. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  276. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  277. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  278. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  279. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  280. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  281. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  282. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  283. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  284. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  285. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  286. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  287. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  288. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  289. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  290. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  291. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  292. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  293. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  294. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  295. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  296. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  297. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  298. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  299. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  300. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  301. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  302. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  303. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  304. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  305. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  306. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  307. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  308. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  309. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  310. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  311. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  312. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  313. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  314. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  315. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  316. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  317. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  318. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  319. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/__init__.py +0 -0
  320. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/__init__.py +0 -0
  321. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/bigquery/__init__.py +0 -0
  322. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  323. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  324. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  325. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/databricks/__init__.py +0 -0
  326. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  327. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
  328. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  329. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/__init__.py +0 -0
  330. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  331. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  332. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  333. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  334. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  335. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  336. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/duck/test_tpcds.py +0 -0
  337. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/postgres/__init__.py +0 -0
  338. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  339. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  340. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  341. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  342. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/redshift/__init__.py +0 -0
  343. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  344. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  345. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/snowflake/__init__.py +0 -0
  346. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  347. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  348. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  349. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/spark/__init__.py +0 -0
  350. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  351. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  352. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_column.py +0 -0
  353. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_reader.py +0 -0
  354. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_session.py +0 -0
  355. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_table.py +0 -0
  356. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_engine_writer.py +0 -0
  357. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/engines/test_int_testing.py +0 -0
  358. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/fixtures.py +0 -0
  359. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/test_int_dataframe_stats.py +0 -0
  360. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/test_int_grouped_data.py +0 -0
  361. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/integration/test_int_session.py +0 -0
  362. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/types.py +0 -0
  363. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/__init__.py +0 -0
  364. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/bigquery/__init__.py +0 -0
  365. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/bigquery/test_activate.py +0 -0
  366. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/conftest.py +0 -0
  367. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/databricks/__init__.py +0 -0
  368. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/databricks/test_activate.py +0 -0
  369. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/duck/__init__.py +0 -0
  370. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/duck/test_activate.py +0 -0
  371. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/postgres/__init__.py +0 -0
  372. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/postgres/test_activate.py +0 -0
  373. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/redshift/__init__.py +0 -0
  374. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/redshift/test_activate.py +0 -0
  375. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/snowflake/__init__.py +0 -0
  376. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/snowflake/test_activate.py +0 -0
  377. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/spark/__init__.py +0 -0
  378. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/spark/test_activate.py +0 -0
  379. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/__init__.py +0 -0
  380. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/fixtures.py +0 -0
  381. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_activate.py +0 -0
  382. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_column.py +0 -0
  383. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_session.py +0 -0
  384. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_types.py +0 -0
  385. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/standalone/test_window.py +0 -0
  386. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/test_activate.py +0 -0
  387. {sqlframe-3.16.0 → sqlframe-3.17.1}/tests/unit/test_util.py +0 -0
@@ -13,17 +13,8 @@ fast-test:
13
13
  local-test:
14
14
  pytest -n auto -m "fast or local"
15
15
 
16
- bigquery-test:
17
- pytest -n auto -m "bigquery"
18
-
19
- duckdb-test:
20
- pytest -n auto -m "duckdb"
21
-
22
- snowflake-test:
23
- pytest -n auto -m "snowflake"
24
-
25
- databricks-test:
26
- pytest -n auto -m "databricks"
16
+ %-test:
17
+ pytest -n auto -m "${*}"
27
18
 
28
19
  style:
29
20
  pre-commit run --all-files
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.16.0
3
+ Version: 3.17.1
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.9",
21
21
  install_requires=[
22
22
  "prettytable<4",
23
- "sqlglot>=24.0.0,<26.4",
23
+ "sqlglot>=24.0.0,<26.5",
24
24
  "typing_extensions",
25
25
  ],
26
26
  extras_require={
@@ -31,7 +31,7 @@ setup(
31
31
  "dev": [
32
32
  "duckdb>=0.9,<1.2",
33
33
  "findspark>=2,<3",
34
- "mypy>=1.10.0,<1.15",
34
+ "mypy>=1.10.0,<1.16",
35
35
  "openai>=1.30,<2",
36
36
  "pandas>=2,<3",
37
37
  "pandas-stubs>=2,<3",
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '3.16.0'
16
- __version_tuple__ = version_tuple = (3, 16, 0)
15
+ __version__ = version = '3.17.1'
16
+ __version_tuple__ = version_tuple = (3, 17, 1)
@@ -291,6 +291,7 @@ class Column:
291
291
  this=self.column_expression,
292
292
  alias=alias.this if isinstance(alias, exp.Column) else alias,
293
293
  )
294
+ new_expression._meta = {"display_name": name, **(new_expression._meta or {})}
294
295
  return Column(new_expression)
295
296
 
296
297
  def asc(self) -> Column:
@@ -233,6 +233,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
233
233
  last_op: Operation = Operation.INIT,
234
234
  pending_hints: t.Optional[t.List[exp.Expression]] = None,
235
235
  output_expression_container: t.Optional[OutputExpressionContainer] = None,
236
+ display_name_mapping: t.Optional[t.Dict[str, str]] = None,
236
237
  **kwargs,
237
238
  ):
238
239
  self.session = session
@@ -246,6 +247,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
246
247
  self.pending_hints = pending_hints or []
247
248
  self.output_expression_container = output_expression_container or exp.Select()
248
249
  self.temp_views: t.List[exp.Select] = []
250
+ self.display_name_mapping = display_name_mapping or {}
249
251
 
250
252
  def __getattr__(self, column_name: str) -> Column:
251
253
  return self[column_name]
@@ -385,13 +387,14 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
385
387
  return Column.ensure_cols(ensure_list(cols)) # type: ignore
386
388
 
387
389
  def _ensure_and_normalize_cols(
388
- self, cols, expression: t.Optional[exp.Select] = None
390
+ self, cols, expression: t.Optional[exp.Select] = None, skip_star_expansion: bool = False
389
391
  ) -> t.List[Column]:
390
392
  from sqlframe.base.normalize import normalize
391
393
 
392
394
  cols = self._ensure_list_of_columns(cols)
393
395
  normalize(self.session, expression or self.expression, cols)
394
- cols = list(flatten([self._expand_star(col) for col in cols]))
396
+ if not skip_star_expansion:
397
+ cols = list(flatten([self._expand_star(col) for col in cols]))
395
398
  self._resolve_ambiguous_columns(cols)
396
399
  return cols
397
400
 
@@ -592,6 +595,23 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
592
595
  )
593
596
  return [col]
594
597
 
598
+ def _update_display_name_mapping(
599
+ self, normalized_columns: t.List[Column], user_input: t.Iterable[ColumnOrName]
600
+ ) -> None:
601
+ from sqlframe.base.column import Column
602
+
603
+ normalized_aliases = [x.alias_or_name for x in normalized_columns]
604
+ user_display_names = [
605
+ x.expression.meta.get("display_name") if isinstance(x, Column) else x
606
+ for x in user_input
607
+ ]
608
+ zipped = {
609
+ k: v
610
+ for k, v in dict(zip(normalized_aliases, user_display_names)).items()
611
+ if v is not None
612
+ }
613
+ self.display_name_mapping.update(zipped)
614
+
595
615
  def _get_expressions(
596
616
  self,
597
617
  optimize: bool = True,
@@ -611,6 +631,16 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
611
631
  select_expression = select_expression.transform(
612
632
  replace_id_value, replacement_mapping
613
633
  ).assert_is(exp.Select)
634
+ for index, column in enumerate(select_expression.expressions):
635
+ column_name = quote_preserving_alias_or_name(column)
636
+ if column_name in self.display_name_mapping:
637
+ display_name_identifier = exp.to_identifier(
638
+ self.display_name_mapping[column_name], quoted=True
639
+ )
640
+ display_name_identifier._meta = {"case_sensitive": True, **(column._meta or {})}
641
+ select_expression.expressions[index] = exp.alias_(
642
+ column.unalias(), display_name_identifier, quoted=True
643
+ )
614
644
  if optimize:
615
645
  select_expression = t.cast(
616
646
  exp.Select,
@@ -792,8 +822,9 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
792
822
  if cte:
793
823
  resolved_column_position[ambiguous_col] += 1
794
824
  else:
795
- cte = ctes_with_column[resolved_column_position[ambiguous_col]]
796
- ambiguous_col.set("table", exp.to_identifier(cte.alias_or_name))
825
+ cte = seq_get(ctes_with_column, resolved_column_position[ambiguous_col])
826
+ if cte:
827
+ ambiguous_col.set("table", exp.to_identifier(cte.alias_or_name))
797
828
 
798
829
  @operation(Operation.SELECT)
799
830
  def select(self, *cols, **kwargs) -> Self:
@@ -803,6 +834,17 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
803
834
  if isinstance(cols[0], list):
804
835
  cols = cols[0] # type: ignore
805
836
  columns = self._ensure_and_normalize_cols(cols)
837
+ if "skip_update_display_name_mapping" not in kwargs:
838
+ unexpanded_columns = self._ensure_and_normalize_cols(cols, skip_star_expansion=True)
839
+ user_cols = list(cols)
840
+ star_columns = []
841
+ for index, user_col in enumerate(cols):
842
+ if "*" in (user_col if isinstance(user_col, str) else user_col.alias_or_name):
843
+ star_columns.append(index)
844
+ for index in star_columns:
845
+ unexpanded_columns.pop(index)
846
+ user_cols.pop(index)
847
+ self._update_display_name_mapping(unexpanded_columns, user_cols)
806
848
  kwargs["append"] = kwargs.get("append", False)
807
849
  # If an expression is `CAST(x AS DATETYPE)` then we want to alias so that `x` is the result column name
808
850
  columns = [
@@ -852,6 +894,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
852
894
  @operation(Operation.SELECT)
853
895
  def agg(self, *exprs, **kwargs) -> Self:
854
896
  cols = self._ensure_and_normalize_cols(exprs)
897
+ self._update_display_name_mapping(cols, exprs)
855
898
  return self.groupBy().agg(*cols)
856
899
 
857
900
  @operation(Operation.FROM)
@@ -1051,7 +1094,9 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1051
1094
  new_df = self.copy(expression=join_expression)
1052
1095
  new_df.pending_join_hints.extend(self.pending_join_hints)
1053
1096
  new_df.pending_hints.extend(other_df.pending_hints)
1054
- new_df = new_df.select.__wrapped__(new_df, *select_column_names) # type: ignore
1097
+ new_df = new_df.select.__wrapped__( # type: ignore
1098
+ new_df, *select_column_names, skip_update_display_name_mapping=True
1099
+ )
1055
1100
  return new_df
1056
1101
 
1057
1102
  @operation(Operation.ORDER_BY)
@@ -1441,20 +1486,18 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1441
1486
  def withColumnRenamed(self, existing: str, new: str) -> Self:
1442
1487
  expression = self.expression.copy()
1443
1488
  existing = self.session._normalize_string(existing)
1444
- new = self.session._normalize_string(new)
1445
- existing_columns = [
1446
- expression
1447
- for expression in expression.expressions
1448
- if expression.alias_or_name == existing
1449
- ]
1450
- if not existing_columns:
1489
+ columns = self._get_outer_select_columns(expression)
1490
+ results = []
1491
+ found_match = False
1492
+ for column in columns:
1493
+ if column.alias_or_name == existing:
1494
+ column = column.alias(new)
1495
+ self._update_display_name_mapping([column], [new])
1496
+ found_match = True
1497
+ results.append(column)
1498
+ if not found_match:
1451
1499
  raise ValueError("Tried to rename a column that doesn't exist")
1452
- for existing_column in existing_columns:
1453
- if isinstance(existing_column, exp.Column):
1454
- existing_column.replace(exp.alias_(existing_column, new))
1455
- else:
1456
- existing_column.set("alias", exp.to_identifier(new))
1457
- return self.copy(expression=expression)
1500
+ return self.select.__wrapped__(self, *results, skip_update_display_name_mapping=True) # type: ignore
1458
1501
 
1459
1502
  @operation(Operation.SELECT)
1460
1503
  def withColumns(self, *colsMap: t.Dict[str, Column]) -> Self:
@@ -1495,23 +1538,27 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1495
1538
  if len(colsMap) != 1:
1496
1539
  raise ValueError("Only a single map is supported")
1497
1540
  col_map = {
1498
- self._ensure_and_normalize_col(k).alias_or_name: self._ensure_and_normalize_col(v)
1541
+ self._ensure_and_normalize_col(k): (self._ensure_and_normalize_col(v), k)
1499
1542
  for k, v in colsMap[0].items()
1500
1543
  }
1501
1544
  existing_cols = self._get_outer_select_columns(self.expression)
1502
1545
  existing_col_names = [x.alias_or_name for x in existing_cols]
1503
1546
  select_columns = existing_cols
1504
- for column_name, col_value in col_map.items():
1547
+ for col, (col_value, display_name) in col_map.items():
1548
+ column_name = col.alias_or_name
1505
1549
  existing_col_index = (
1506
1550
  existing_col_names.index(column_name) if column_name in existing_col_names else None
1507
1551
  )
1508
1552
  if existing_col_index is not None:
1509
1553
  select_columns[existing_col_index] = col_value.alias( # type: ignore
1510
- column_name
1511
- ).expression
1554
+ display_name
1555
+ )
1512
1556
  else:
1513
- select_columns.append(col_value.alias(column_name))
1514
- return self.select.__wrapped__(self, *select_columns) # type: ignore
1557
+ select_columns.append(col_value.alias(display_name))
1558
+ self._update_display_name_mapping(
1559
+ [col for col in col_map], [name for _, name in col_map.values()]
1560
+ )
1561
+ return self.select.__wrapped__(self, *select_columns, skip_update_display_name_mapping=True) # type: ignore
1515
1562
 
1516
1563
  @operation(Operation.SELECT)
1517
1564
  def drop(self, *cols: t.Union[str, Column]) -> Self:
@@ -39,11 +39,19 @@ def col(column_name: t.Union[ColumnOrName, t.Any]) -> Column:
39
39
 
40
40
  dialect = _BaseSession().input_dialect
41
41
  if isinstance(column_name, str):
42
- return Column(
43
- expression.to_column(column_name, dialect=dialect).transform(
44
- dialect.normalize_identifier
45
- )
42
+ col_expression = expression.to_column(column_name, dialect=dialect).transform(
43
+ dialect.normalize_identifier
46
44
  )
45
+ case_sensitive_expression = expression.to_column(column_name, dialect=dialect)
46
+ if not isinstance(
47
+ case_sensitive_expression, (expression.Star, expression.Literal, expression.Null)
48
+ ):
49
+ col_expression._meta = {
50
+ "display_name": case_sensitive_expression.this.this,
51
+ **(col_expression._meta or {}),
52
+ }
53
+
54
+ return Column(col_expression)
47
55
  return Column(column_name)
48
56
 
49
57
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import contextlib
5
6
  import datetime
6
7
  import logging
7
8
  import sys
@@ -213,13 +214,16 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
213
214
 
214
215
  def createDataFrame(
215
216
  self,
216
- data: t.Sequence[
217
- t.Union[
218
- t.Dict[str, ColumnLiterals],
219
- t.List[ColumnLiterals],
220
- t.Tuple[ColumnLiterals, ...],
221
- ColumnLiterals,
222
- ]
217
+ data: t.Union[
218
+ t.Sequence[
219
+ t.Union[
220
+ t.Dict[str, ColumnLiterals],
221
+ t.List[ColumnLiterals],
222
+ t.Tuple[ColumnLiterals, ...],
223
+ ColumnLiterals,
224
+ ],
225
+ ],
226
+ pd.DataFrame,
223
227
  ],
224
228
  schema: t.Optional[SchemaInput] = None,
225
229
  samplingRatio: t.Optional[float] = None,
@@ -240,11 +244,18 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
240
244
  ):
241
245
  raise NotImplementedError("Only schema of either list or string of list supported")
242
246
 
247
+ with contextlib.suppress(ImportError):
248
+ from pandas import DataFrame as pd_DataFrame
249
+
250
+ if isinstance(data, pd_DataFrame):
251
+ data = data.to_dict("records") # type: ignore
252
+
243
253
  column_mapping: t.Mapping[str, t.Optional[exp.DataType]]
244
254
  if schema is not None:
245
255
  column_mapping = get_column_mapping_from_schema_input(
246
256
  schema, dialect=self.input_dialect
247
257
  )
258
+
248
259
  elif data:
249
260
  if isinstance(data[0], Row):
250
261
  column_mapping = {col_name.strip(): None for col_name in data[0].__fields__}
@@ -386,7 +397,8 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
386
397
  dialect = Dialect.get_or_raise(dialect or self.input_dialect)
387
398
  expression = (
388
399
  sqlglot.parse_one(
389
- normalize_string(sqlQuery, from_dialect=dialect, is_query=True), read=dialect
400
+ normalize_string(sqlQuery, from_dialect=dialect, is_query=True),
401
+ read=dialect,
390
402
  )
391
403
  if isinstance(sqlQuery, str)
392
404
  else sqlQuery
@@ -507,9 +519,14 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
507
519
  result = self._cur.fetchall()
508
520
  if not self._cur.description:
509
521
  return []
522
+ case_sensitive_cols = []
523
+ for col in self._cur.description:
524
+ col_id = exp.parse_identifier(col[0], dialect=self.execution_dialect)
525
+ col_id._meta = {"case_sensitive": True, **(col_id._meta or {})}
526
+ case_sensitive_cols.append(col_id)
510
527
  columns = [
511
- normalize_string(x[0], from_dialect="execution", to_dialect="output", is_column=True)
512
- for x in self._cur.description
528
+ normalize_string(x, from_dialect="execution", to_dialect="output")
529
+ for x in case_sensitive_cols
513
530
  ]
514
531
  return [self._to_row(columns, row) for row in result]
515
532
 
@@ -27,7 +27,7 @@ class BigQuerySession(
27
27
  BigQueryDataFrameWriter,
28
28
  BigQueryDataFrame,
29
29
  BigQueryTable,
30
- BigQueryConnection,
30
+ BigQueryConnection, # type: ignore
31
31
  BigQueryUDFRegistration,
32
32
  ],
33
33
  ):
@@ -26,7 +26,7 @@ class DatabricksSession(
26
26
  DatabricksDataFrameWriter,
27
27
  DatabricksDataFrame,
28
28
  DatabricksTable,
29
- DatabricksConnection,
29
+ DatabricksConnection, # type: ignore
30
30
  DatabricksUDFRegistration,
31
31
  ],
32
32
  ):
@@ -28,7 +28,7 @@ class DuckDBSession(
28
28
  DuckDBDataFrameWriter,
29
29
  DuckDBDataFrame,
30
30
  DuckDBTable,
31
- DuckDBPyConnection,
31
+ DuckDBPyConnection, # type: ignore
32
32
  DuckDBUDFRegistration,
33
33
  ]
34
34
  ):
@@ -29,7 +29,7 @@ class PostgresSession(
29
29
  PostgresDataFrameWriter,
30
30
  PostgresDataFrame,
31
31
  PostgresTable,
32
- psycopg2_connection,
32
+ psycopg2_connection, # type: ignore
33
33
  PostgresUDFRegistration,
34
34
  ],
35
35
  ):
@@ -53,7 +53,7 @@ class SnowflakeSession(
53
53
  SnowflakeDataFrameWriter,
54
54
  SnowflakeDataFrame,
55
55
  SnowflakeTable,
56
- SnowflakeConnection,
56
+ SnowflakeConnection, # type: ignore
57
57
  SnowflakeUDFRegistration,
58
58
  ],
59
59
  ):
@@ -34,7 +34,7 @@ class SparkSession(
34
34
  SparkDataFrameWriter,
35
35
  SparkDataFrame,
36
36
  SparkTable,
37
- PySparkSession,
37
+ PySparkSession, # type: ignore
38
38
  SparkUDFRegistration,
39
39
  ],
40
40
  ):
@@ -79,17 +79,18 @@ class SparkSession(
79
79
  if skip_rows:
80
80
  return []
81
81
  assert self._last_df is not None
82
- return [
83
- Row(
84
- **{
85
- normalize_string(
86
- k, from_dialect="execution", to_dialect="output", is_column=True
87
- ): v
88
- for k, v in row.asDict().items()
89
- }
90
- )
91
- for row in self._last_df.collect()
92
- ]
82
+ results = []
83
+ for row in self._last_df.collect():
84
+ rows_normalized = {}
85
+ for k, v in row.asDict().items():
86
+ col_id = exp.parse_identifier(k, dialect=self.execution_dialect)
87
+ col_id._meta = {"case_sensitive": True, **(col_id._meta or {})}
88
+ col_name = normalize_string(
89
+ col_id, from_dialect="execution", to_dialect="output", is_column=True
90
+ )
91
+ rows_normalized[col_name] = v
92
+ results.append(Row(**rows_normalized))
93
+ return results
93
94
 
94
95
  def _execute(self, sql: str) -> None:
95
96
  self._last_df = self.spark_session.sql(sql)
@@ -20,7 +20,7 @@ class StandaloneSession(
20
20
  StandaloneDataFrameWriter,
21
21
  StandaloneDataFrame,
22
22
  StandaloneTable,
23
- object,
23
+ object, # type: ignore
24
24
  StandaloneUDFRegistration,
25
25
  ]
26
26
  ): # type: ignore
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.16.0
3
+ Version: 3.17.1
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,5 @@
1
1
  prettytable<4
2
- sqlglot<26.4,>=24.0.0
2
+ sqlglot<26.5,>=24.0.0
3
3
  typing_extensions
4
4
 
5
5
  [bigquery]
@@ -12,7 +12,7 @@ databricks-sql-connector<5,>=3.6
12
12
  [dev]
13
13
  duckdb<1.2,>=0.9
14
14
  findspark<3,>=2
15
- mypy<1.15,>=1.10.0
15
+ mypy<1.16,>=1.10.0
16
16
  openai<2,>=1.30
17
17
  pandas-stubs<3,>=2
18
18
  pandas<3,>=2
@@ -3,6 +3,8 @@ from __future__ import annotations
3
3
  import typing as t
4
4
 
5
5
  from sqlframe.base.types import Row
6
+ from sqlframe.snowflake import SnowflakeSession
7
+ from sqlframe.spark import SparkSession
6
8
 
7
9
  if t.TYPE_CHECKING:
8
10
  from sqlframe.base.dataframe import BaseDataFrame
@@ -31,20 +33,30 @@ def test_show(
31
33
  ):
32
34
  employee = get_engine_df("employee")
33
35
  lit = get_func("lit", employee.session)
34
- employee = employee.select("*", lit(1).alias("one"))
36
+ col = get_func("col", employee.session)
37
+ employee = (
38
+ employee.select("EmPloyee_Id", "fname", "lnamE", "AGE", "stoRe_iD", lit(1).alias("One"))
39
+ .withColumnRenamed("sToRe_id", "SToRE_Id")
40
+ .withColumns(
41
+ {
42
+ "lNamE": col("lname"),
43
+ "tWo": lit(2),
44
+ }
45
+ )
46
+ )
35
47
  employee.show()
36
48
  captured = capsys.readouterr()
37
49
  assert (
38
50
  captured.out
39
- == """+-------------+--------+-----------+-----+----------+-----+
40
- | employee_id | fname | lname | age | store_id | one |
41
- +-------------+--------+-----------+-----+----------+-----+
42
- | 1 | Jack | Shephard | 37 | 1 | 1 |
43
- | 2 | John | Locke | 65 | 1 | 1 |
44
- | 3 | Kate | Austen | 37 | 2 | 1 |
45
- | 4 | Claire | Littleton | 27 | 2 | 1 |
46
- | 5 | Hugo | Reyes | 29 | 100 | 1 |
47
- +-------------+--------+-----------+-----+----------+-----+\n"""
51
+ == """+-------------+--------+-----------+-----+----------+-----+-----+
52
+ | EmPloyee_Id | fname | lNamE | AGE | SToRE_Id | One | tWo |
53
+ +-------------+--------+-----------+-----+----------+-----+-----+
54
+ | 1 | Jack | Shephard | 37 | 1 | 1 | 2 |
55
+ | 2 | John | Locke | 65 | 1 | 1 | 2 |
56
+ | 3 | Kate | Austen | 37 | 2 | 1 | 2 |
57
+ | 4 | Claire | Littleton | 27 | 2 | 1 | 2 |
58
+ | 5 | Hugo | Reyes | 29 | 100 | 1 | 2 |
59
+ +-------------+--------+-----------+-----+----------+-----+-----+\n"""
48
60
  )
49
61
  assert "Truncate is ignored so full results will be displayed" not in caplog.text
50
62
  employee.show(truncate=True)
@@ -58,11 +70,21 @@ def test_show_limit(
58
70
  employee = get_engine_df("employee")
59
71
  employee.show(1)
60
72
  captured = capsys.readouterr()
61
- assert (
62
- captured.out
63
- == """+-------------+-------+----------+-----+----------+
73
+ if isinstance(employee.session, SnowflakeSession):
74
+ assert (
75
+ captured.out
76
+ == """+-------------+-------+----------+-----+----------+
77
+ | EMPLOYEE_ID | FNAME | LNAME | AGE | STORE_ID |
78
+ +-------------+-------+----------+-----+----------+
79
+ | 1 | Jack | Shephard | 37 | 1 |
80
+ +-------------+-------+----------+-----+----------+\n"""
81
+ )
82
+ else:
83
+ assert (
84
+ captured.out
85
+ == """+-------------+-------+----------+-----+----------+
64
86
  | employee_id | fname | lname | age | store_id |
65
87
  +-------------+-------+----------+-----+----------+
66
88
  | 1 | Jack | Shephard | 37 | 1 |
67
89
  +-------------+-------+----------+-----+----------+\n"""
68
- )
90
+ )
@@ -241,7 +241,7 @@ def test_alias(get_session_and_func):
241
241
  DatabricksSession,
242
242
  ),
243
243
  ):
244
- assert space_result == "`a space in new name`"
244
+ assert space_result == "`A Space In New Name`"
245
245
  else:
246
246
  assert space_result == "A Space In New Name"
247
247
 
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import typing as t
4
4
 
5
+ import pandas as pd
5
6
  import pytest
6
7
  from _pytest.fixtures import FixtureRequest
7
8
  from pyspark.sql import DataFrame as PySparkDataFrame
@@ -9,7 +10,7 @@ from pyspark.sql import functions as F
9
10
 
10
11
  from sqlframe.standalone import functions as SF
11
12
  from sqlframe.standalone.dataframe import StandaloneDataFrame
12
- from tests.integration.fixtures import StandaloneSession
13
+ from tests.integration.fixtures import StandaloneSession, is_snowflake
13
14
 
14
15
  if t.TYPE_CHECKING:
15
16
  from sqlframe.base.dataframe import BaseDataFrame
@@ -29,6 +30,20 @@ def test_empty_df(
29
30
  compare_frames(df_empty, dfs_empty, no_empty=False)
30
31
 
31
32
 
33
+ def test_dataframe_from_pandas(
34
+ pyspark_employee: PySparkDataFrame,
35
+ get_df: t.Callable[[str], BaseDataFrame],
36
+ compare_frames: t.Callable,
37
+ ):
38
+ employee = get_df("employee")
39
+ compare_frames(
40
+ pyspark_employee,
41
+ employee.session.createDataFrame(
42
+ pyspark_employee.toPandas(), schema=pyspark_employee.schema.simpleString()
43
+ ),
44
+ )
45
+
46
+
32
47
  def test_simple_select(
33
48
  pyspark_employee: PySparkDataFrame,
34
49
  get_df: t.Callable[[str], BaseDataFrame],
@@ -1780,6 +1795,7 @@ def test_with_columns_reference_another(
1780
1795
  compare_frames: t.Callable,
1781
1796
  is_bigquery: t.Callable,
1782
1797
  is_postgres: t.Callable,
1798
+ is_snowflake: t.Callable,
1783
1799
  ):
1784
1800
  # Could consider two options:
1785
1801
  # 1. Use SQLGlot optimizer to properly change the references to be expanded to avoid the issue (a rule already does this)
@@ -1792,6 +1808,14 @@ def test_with_columns_reference_another(
1792
1808
  pytest.skip(
1793
1809
  "Postgres doesn't support having selects with columns that reference each other."
1794
1810
  )
1811
+ if is_snowflake():
1812
+ # Snowflake does allow columns that reference each other but the issue is that if you do this in the final
1813
+ # select the columns are replaced with their alias version to show their display name (the case-sensitive
1814
+ # name provided by the user) and then, since the column is now aliased and case-sensitive, SF thinks
1815
+ # the column doesn't exist since the column of the same case does not exist since it was aliased.
1816
+ pytest.skip(
1817
+ "Bugged behavior introduced display names means that snowflake can no longer reference itself."
1818
+ )
1795
1819
  employee = get_df("employee")
1796
1820
  df = pyspark_employee.withColumns(
1797
1821
  {
@@ -2405,3 +2429,25 @@ def test_filtering_join_key(
2405
2429
  ).filter(SF.col("store_id") > 1)
2406
2430
 
2407
2431
  compare_frames(df, dfs, compare_schema=False, sort=True)
2432
+
2433
+
2434
+ # https://github.com/eakmanrq/sqlframe/issues/281
2435
+ def test_create_column_after_join(
2436
+ pyspark_employee: PySparkDataFrame,
2437
+ pyspark_store: PySparkDataFrame,
2438
+ get_df: t.Callable[[str], BaseDataFrame],
2439
+ compare_frames: t.Callable,
2440
+ ):
2441
+ df = pyspark_employee.join(
2442
+ pyspark_store,
2443
+ on="store_id",
2444
+ ).withColumn("new_col", F.lit(1))
2445
+
2446
+ employee = get_df("employee")
2447
+ store = get_df("store")
2448
+ dfs = employee.join(
2449
+ store,
2450
+ on="store_id",
2451
+ ).withColumn("new_col", SF.lit(1))
2452
+
2453
+ compare_frames(df, dfs, compare_schema=False, sort=True)