sqlframe 3.21.1__tar.gz → 3.22.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (388) hide show
  1. {sqlframe-3.21.1 → sqlframe-3.22.1}/PKG-INFO +1 -1
  2. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/duckdb.md +1 -0
  3. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/snowflake.md +1 -0
  4. {sqlframe-3.21.1 → sqlframe-3.22.1}/setup.py +1 -1
  5. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/_version.py +9 -4
  6. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/catalog.py +6 -3
  7. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/dataframe.py +2 -2
  8. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/functions.py +2 -1
  9. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/mixins/readwriter_mixins.py +4 -0
  10. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/readerwriter.py +40 -0
  11. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/util.py +19 -9
  12. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/window.py +19 -30
  13. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/readwriter.py +1 -0
  14. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/session.py +1 -3
  15. sqlframe-3.22.1/sqlframe/spark/readwriter.py +163 -0
  16. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe.egg-info/PKG-INFO +1 -1
  17. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe.egg-info/requires.txt +1 -1
  18. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/databricks/test_databricks_dataframe.py +8 -8
  19. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_dataframe.py +8 -8
  20. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/postgres/test_postgres_dataframe.py +2 -2
  21. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_dataframe.py +5 -1
  22. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_reader.py +24 -7
  23. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_writer.py +31 -16
  24. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_int_functions.py +1 -1
  25. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/test_int_dataframe.py +86 -0
  26. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_column.py +4 -2
  27. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_window.py +18 -6
  28. sqlframe-3.21.1/sqlframe/spark/readwriter.py +0 -30
  29. {sqlframe-3.21.1 → sqlframe-3.22.1}/.github/CODEOWNERS +0 -0
  30. {sqlframe-3.21.1 → sqlframe-3.22.1}/.github/workflows/main.workflow.yaml +0 -0
  31. {sqlframe-3.21.1 → sqlframe-3.22.1}/.github/workflows/publish.workflow.yaml +0 -0
  32. {sqlframe-3.21.1 → sqlframe-3.22.1}/.gitignore +0 -0
  33. {sqlframe-3.21.1 → sqlframe-3.22.1}/.pre-commit-config.yaml +0 -0
  34. {sqlframe-3.21.1 → sqlframe-3.22.1}/.readthedocs.yaml +0 -0
  35. {sqlframe-3.21.1 → sqlframe-3.22.1}/LICENSE +0 -0
  36. {sqlframe-3.21.1 → sqlframe-3.22.1}/Makefile +0 -0
  37. {sqlframe-3.21.1 → sqlframe-3.22.1}/README.md +0 -0
  38. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/add_chatgpt_support.md +0 -0
  39. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  40. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  41. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  42. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  43. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  44. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  45. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  46. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  47. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/but_wait_theres_more.gif +0 -0
  48. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/cake.gif +0 -0
  49. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/images/you_get_pyspark_api.gif +0 -0
  50. {sqlframe-3.21.1 → sqlframe-3.22.1}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  51. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/bigquery.md +0 -0
  52. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/configuration.md +0 -0
  53. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/databricks.md +0 -0
  54. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/bigquery.md +0 -0
  55. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/duckdb.md +0 -0
  56. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/images/SF.png +0 -0
  57. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/images/favicon.png +0 -0
  58. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/images/favicon_old.png +0 -0
  59. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/images/sqlframe_diagram.png +0 -0
  60. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/images/sqlframe_logo.png +0 -0
  61. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/docs/postgres.md +0 -0
  62. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/images/SF.png +0 -0
  63. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/images/favicon.png +0 -0
  64. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/images/favicon_old.png +0 -0
  65. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/images/sqlframe_diagram.png +0 -0
  66. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/images/sqlframe_logo.png +0 -0
  67. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/index.md +0 -0
  68. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/postgres.md +0 -0
  69. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/redshift.md +0 -0
  70. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/requirements.txt +0 -0
  71. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/spark.md +0 -0
  72. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/standalone.md +0 -0
  73. {sqlframe-3.21.1 → sqlframe-3.22.1}/docs/stylesheets/extra.css +0 -0
  74. {sqlframe-3.21.1 → sqlframe-3.22.1}/mkdocs.yml +0 -0
  75. {sqlframe-3.21.1 → sqlframe-3.22.1}/pytest.ini +0 -0
  76. {sqlframe-3.21.1 → sqlframe-3.22.1}/renovate.json +0 -0
  77. {sqlframe-3.21.1 → sqlframe-3.22.1}/setup.cfg +0 -0
  78. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/LICENSE +0 -0
  79. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/__init__.py +0 -0
  80. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/__init__.py +0 -0
  81. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/_typing.py +0 -0
  82. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/column.py +0 -0
  83. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/decorators.py +0 -0
  84. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/exceptions.py +0 -0
  85. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/function_alternatives.py +0 -0
  86. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/group.py +0 -0
  87. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/mixins/__init__.py +0 -0
  88. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  89. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  90. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/mixins/table_mixins.py +0 -0
  91. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/normalize.py +0 -0
  92. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/operations.py +0 -0
  93. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/session.py +0 -0
  94. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/table.py +0 -0
  95. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/transforms.py +0 -0
  96. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/types.py +0 -0
  97. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/base/udf.py +0 -0
  98. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/__init__.py +0 -0
  99. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/catalog.py +0 -0
  100. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/column.py +0 -0
  101. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/dataframe.py +0 -0
  102. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/functions.py +0 -0
  103. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/functions.pyi +0 -0
  104. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/group.py +0 -0
  105. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/readwriter.py +0 -0
  106. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/session.py +0 -0
  107. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/table.py +0 -0
  108. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/types.py +0 -0
  109. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/udf.py +0 -0
  110. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/bigquery/window.py +0 -0
  111. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/__init__.py +0 -0
  112. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/catalog.py +0 -0
  113. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/column.py +0 -0
  114. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/dataframe.py +0 -0
  115. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/functions.py +0 -0
  116. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/functions.pyi +0 -0
  117. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/group.py +0 -0
  118. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/readwriter.py +0 -0
  119. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/session.py +0 -0
  120. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/table.py +0 -0
  121. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/types.py +0 -0
  122. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/udf.py +0 -0
  123. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/databricks/window.py +0 -0
  124. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/__init__.py +0 -0
  125. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/catalog.py +0 -0
  126. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/column.py +0 -0
  127. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/dataframe.py +0 -0
  128. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/functions.py +0 -0
  129. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/functions.pyi +0 -0
  130. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/group.py +0 -0
  131. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/table.py +0 -0
  132. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/types.py +0 -0
  133. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/udf.py +0 -0
  134. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/duckdb/window.py +0 -0
  135. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/__init__.py +0 -0
  136. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/catalog.py +0 -0
  137. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/column.py +0 -0
  138. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/dataframe.py +0 -0
  139. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/functions.py +0 -0
  140. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/functions.pyi +0 -0
  141. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/group.py +0 -0
  142. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/readwriter.py +0 -0
  143. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/session.py +0 -0
  144. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/table.py +0 -0
  145. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/types.py +0 -0
  146. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/udf.py +0 -0
  147. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/postgres/window.py +0 -0
  148. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/__init__.py +0 -0
  149. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/catalog.py +0 -0
  150. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/column.py +0 -0
  151. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/dataframe.py +0 -0
  152. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/functions.py +0 -0
  153. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/group.py +0 -0
  154. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/readwriter.py +0 -0
  155. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/session.py +0 -0
  156. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/table.py +0 -0
  157. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/types.py +0 -0
  158. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/udf.py +0 -0
  159. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/redshift/window.py +0 -0
  160. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/__init__.py +0 -0
  161. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/catalog.py +0 -0
  162. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/column.py +0 -0
  163. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/dataframe.py +0 -0
  164. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/functions.py +0 -0
  165. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/functions.pyi +0 -0
  166. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/group.py +0 -0
  167. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/readwriter.py +0 -0
  168. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/session.py +0 -0
  169. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/table.py +0 -0
  170. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/types.py +0 -0
  171. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/udf.py +0 -0
  172. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/snowflake/window.py +0 -0
  173. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/__init__.py +0 -0
  174. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/catalog.py +0 -0
  175. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/column.py +0 -0
  176. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/dataframe.py +0 -0
  177. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/functions.py +0 -0
  178. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/functions.pyi +0 -0
  179. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/group.py +0 -0
  180. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/session.py +0 -0
  181. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/table.py +0 -0
  182. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/types.py +0 -0
  183. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/udf.py +0 -0
  184. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/spark/window.py +0 -0
  185. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/__init__.py +0 -0
  186. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/catalog.py +0 -0
  187. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/column.py +0 -0
  188. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/dataframe.py +0 -0
  189. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/functions.py +0 -0
  190. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/group.py +0 -0
  191. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/readwriter.py +0 -0
  192. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/session.py +0 -0
  193. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/table.py +0 -0
  194. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/types.py +0 -0
  195. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/udf.py +0 -0
  196. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/standalone/window.py +0 -0
  197. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/testing/__init__.py +0 -0
  198. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe/testing/utils.py +0 -0
  199. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe.egg-info/SOURCES.txt +0 -0
  200. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe.egg-info/dependency_links.txt +0 -0
  201. {sqlframe-3.21.1 → sqlframe-3.22.1}/sqlframe.egg-info/top_level.txt +0 -0
  202. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/__init__.py +0 -0
  203. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/common_fixtures.py +0 -0
  204. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/conftest.py +0 -0
  205. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee.csv +0 -0
  206. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee.json +0 -0
  207. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee.parquet +0 -0
  208. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
  209. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
  210. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
  211. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
  212. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
  213. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
  214. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
  215. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
  216. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
  217. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
  218. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
  219. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
  220. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
  221. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
  222. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/employee_extra_line.csv +0 -0
  223. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/issue_219.csv +0 -0
  224. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds1.sql +0 -0
  225. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds10.sql +0 -0
  226. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds11.sql +0 -0
  227. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds12.sql +0 -0
  228. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds13.sql +0 -0
  229. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds14.sql +0 -0
  230. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds15.sql +0 -0
  231. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds16.sql +0 -0
  232. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds17.sql +0 -0
  233. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds18.sql +0 -0
  234. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds19.sql +0 -0
  235. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds2.sql +0 -0
  236. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds20.sql +0 -0
  237. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds21.sql +0 -0
  238. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds22.sql +0 -0
  239. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds23.sql +0 -0
  240. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds24.sql +0 -0
  241. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds25.sql +0 -0
  242. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds26.sql +0 -0
  243. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds27.sql +0 -0
  244. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds28.sql +0 -0
  245. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds29.sql +0 -0
  246. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds3.sql +0 -0
  247. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds30.sql +0 -0
  248. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds31.sql +0 -0
  249. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds32.sql +0 -0
  250. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds33.sql +0 -0
  251. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds34.sql +0 -0
  252. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds35.sql +0 -0
  253. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds36.sql +0 -0
  254. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds37.sql +0 -0
  255. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds38.sql +0 -0
  256. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds39.sql +0 -0
  257. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds4.sql +0 -0
  258. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds40.sql +0 -0
  259. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds41.sql +0 -0
  260. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds42.sql +0 -0
  261. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds43.sql +0 -0
  262. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds44.sql +0 -0
  263. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds45.sql +0 -0
  264. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds46.sql +0 -0
  265. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds47.sql +0 -0
  266. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds48.sql +0 -0
  267. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds49.sql +0 -0
  268. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds5.sql +0 -0
  269. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds50.sql +0 -0
  270. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds51.sql +0 -0
  271. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds52.sql +0 -0
  272. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds53.sql +0 -0
  273. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds54.sql +0 -0
  274. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds55.sql +0 -0
  275. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds56.sql +0 -0
  276. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds57.sql +0 -0
  277. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds58.sql +0 -0
  278. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds59.sql +0 -0
  279. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds6.sql +0 -0
  280. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds60.sql +0 -0
  281. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds61.sql +0 -0
  282. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds62.sql +0 -0
  283. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds63.sql +0 -0
  284. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds64.sql +0 -0
  285. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds65.sql +0 -0
  286. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds66.sql +0 -0
  287. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds67.sql +0 -0
  288. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds68.sql +0 -0
  289. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds69.sql +0 -0
  290. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds7.sql +0 -0
  291. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds70.sql +0 -0
  292. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds71.sql +0 -0
  293. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds72.sql +0 -0
  294. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds73.sql +0 -0
  295. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds74.sql +0 -0
  296. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds75.sql +0 -0
  297. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds76.sql +0 -0
  298. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds77.sql +0 -0
  299. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds78.sql +0 -0
  300. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds79.sql +0 -0
  301. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds8.sql +0 -0
  302. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds80.sql +0 -0
  303. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds81.sql +0 -0
  304. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds82.sql +0 -0
  305. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds83.sql +0 -0
  306. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds84.sql +0 -0
  307. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds85.sql +0 -0
  308. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds86.sql +0 -0
  309. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds87.sql +0 -0
  310. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds88.sql +0 -0
  311. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds89.sql +0 -0
  312. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds9.sql +0 -0
  313. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds90.sql +0 -0
  314. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds91.sql +0 -0
  315. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds92.sql +0 -0
  316. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds93.sql +0 -0
  317. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds94.sql +0 -0
  318. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds95.sql +0 -0
  319. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds96.sql +0 -0
  320. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds97.sql +0 -0
  321. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds98.sql +0 -0
  322. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/fixtures/tpcds/tpcds99.sql +0 -0
  323. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/__init__.py +0 -0
  324. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/__init__.py +0 -0
  325. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/bigquery/__init__.py +0 -0
  326. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  327. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
  328. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  329. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/databricks/__init__.py +0 -0
  330. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
  331. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
  332. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/__init__.py +0 -0
  333. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
  334. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  335. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  336. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  337. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
  338. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/duck/test_tpcds.py +0 -0
  339. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/postgres/__init__.py +0 -0
  340. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
  341. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  342. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  343. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/redshift/__init__.py +0 -0
  344. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  345. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  346. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/snowflake/__init__.py +0 -0
  347. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  348. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
  349. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  350. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/spark/__init__.py +0 -0
  351. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  352. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
  353. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_column.py +0 -0
  354. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_session.py +0 -0
  355. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_engine_table.py +0 -0
  356. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/engines/test_int_testing.py +0 -0
  357. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/fixtures.py +0 -0
  358. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/test_int_dataframe_stats.py +0 -0
  359. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/test_int_grouped_data.py +0 -0
  360. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/integration/test_int_session.py +0 -0
  361. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/types.py +0 -0
  362. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/__init__.py +0 -0
  363. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/bigquery/__init__.py +0 -0
  364. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/bigquery/test_activate.py +0 -0
  365. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/conftest.py +0 -0
  366. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/databricks/__init__.py +0 -0
  367. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/databricks/test_activate.py +0 -0
  368. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/duck/__init__.py +0 -0
  369. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/duck/test_activate.py +0 -0
  370. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/postgres/__init__.py +0 -0
  371. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/postgres/test_activate.py +0 -0
  372. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/redshift/__init__.py +0 -0
  373. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/redshift/test_activate.py +0 -0
  374. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/snowflake/__init__.py +0 -0
  375. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/snowflake/test_activate.py +0 -0
  376. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/spark/__init__.py +0 -0
  377. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/spark/test_activate.py +0 -0
  378. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/__init__.py +0 -0
  379. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/fixtures.py +0 -0
  380. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_activate.py +0 -0
  381. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_dataframe.py +0 -0
  382. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  383. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_functions.py +0 -0
  384. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_session.py +0 -0
  385. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  386. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/standalone/test_types.py +0 -0
  387. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/test_activate.py +0 -0
  388. {sqlframe-3.21.1 → sqlframe-3.22.1}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.21.1
3
+ Version: 3.22.1
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -406,6 +406,7 @@ See something that you would like to see supported? [Open an issue](https://gith
406
406
  * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min.html)
407
407
  * [min_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min_by.html)
408
408
  * [minute](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.minute.html)
409
+ * [mode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.mode.html)
409
410
  * [month](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.month.html)
410
411
  * [months_between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.months_between.html)
411
412
  * Rounded whole number is returned
@@ -439,6 +439,7 @@ See something that you would like to see supported? [Open an issue](https://gith
439
439
  * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min.html)
440
440
  * [min_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min_by.html)
441
441
  * [minute](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.minute.html)
442
+ * [mode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.mode.html)
442
443
  * [module](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.module.html)
443
444
  * [month](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.month.html)
444
445
  * [months_between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.months_between.html)
@@ -40,7 +40,7 @@ setup(
40
40
  "pyspark>=2,<3.6",
41
41
  "pytest>=8.2.0,<8.4",
42
42
  "pytest-forked",
43
- "pytest-postgresql>=6,<7",
43
+ "pytest-postgresql>=6,<8",
44
44
  "pytest-xdist>=3.6,<3.7",
45
45
  "pre-commit>=3.7,<5",
46
46
  "ruff>=0.4.4,<0.10",
@@ -1,8 +1,13 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
3
6
  TYPE_CHECKING = False
4
7
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
8
+ from typing import Tuple
9
+ from typing import Union
10
+
6
11
  VERSION_TUPLE = Tuple[Union[int, str], ...]
7
12
  else:
8
13
  VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
12
17
  __version_tuple__: VERSION_TUPLE
13
18
  version_tuple: VERSION_TUPLE
14
19
 
15
- __version__ = version = '3.21.1'
16
- __version_tuple__ = version_tuple = (3, 21, 1)
20
+ __version__ = version = '3.22.1'
21
+ __version_tuple__ = version_tuple = (3, 22, 1)
@@ -70,13 +70,16 @@ class _BaseCatalog(t.Generic[SESSION, DF]):
70
70
  }
71
71
 
72
72
  def add_table(
73
- self, table: exp.Table | str, column_mapping: t.Optional[ColumnMapping] = None
73
+ self,
74
+ table: exp.Table | str,
75
+ column_mapping: t.Optional[ColumnMapping] = None,
76
+ **kwargs: t.Any,
74
77
  ) -> None:
75
78
  # TODO: Making this an update or add
76
79
  table = self.ensure_table(table)
77
80
  if self._schema.find(table):
78
81
  return
79
- if not column_mapping:
82
+ if column_mapping is None:
80
83
  try:
81
84
  column_mapping = {
82
85
  normalize_string(
@@ -100,7 +103,7 @@ class _BaseCatalog(t.Generic[SESSION, DF]):
100
103
  if column.this.quoted:
101
104
  self._quoted_columns[table].append(column.this.name)
102
105
 
103
- self._schema.add_table(table, column_mapping, dialect=self.session.input_dialect)
106
+ self._schema.add_table(table, column_mapping, dialect=self.session.input_dialect, **kwargs)
104
107
 
105
108
  def getDatabase(self, dbName: str) -> Database:
106
109
  """Get the database with the specified name.
@@ -342,7 +342,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
342
342
  return types.StructType(
343
343
  [
344
344
  types.StructField(
345
- c.name,
345
+ self.display_name_mapping.get(c.name, c.name),
346
346
  sqlglot_to_spark(
347
347
  exp.DataType.build(c.dataType, dialect=self.session.output_dialect)
348
348
  ),
@@ -1898,7 +1898,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
1898
1898
  print("root")
1899
1899
  for column in self._typed_columns:
1900
1900
  print_schema(
1901
- column.name,
1901
+ self.display_name_mapping.get(column.name, column.name),
1902
1902
  exp.DataType.build(column.dataType, dialect=self.session.output_dialect),
1903
1903
  column.nullable,
1904
1904
  0,
@@ -4504,7 +4504,7 @@ def median(col: ColumnOrName) -> Column:
4504
4504
  return Column.invoke_expression_over_column(col, expression.Median)
4505
4505
 
4506
4506
 
4507
- @meta(unsupported_engines="*")
4507
+ @meta(unsupported_engines=["bigquery", "postgres"])
4508
4508
  def mode(col: ColumnOrName) -> Column:
4509
4509
  """
4510
4510
  Returns the most frequent value in a group.
@@ -4540,6 +4540,7 @@ def mode(col: ColumnOrName) -> Column:
4540
4540
  |dotNET| 2012|
4541
4541
  +------+----------+
4542
4542
  """
4543
+
4543
4544
  return Column.invoke_anonymous_function(col, "mode")
4544
4545
 
4545
4546
 
@@ -82,6 +82,10 @@ class PandasLoaderMixin(_BaseDataFrameReader, t.Generic[SESSION, DF]):
82
82
  elif format == "parquet":
83
83
  df = pd.read_parquet(path, **kwargs) # type: ignore
84
84
  elif format == "csv":
85
+ kwargs.pop("inferSchema", None)
86
+ if "header" in kwargs:
87
+ if isinstance(kwargs["header"], bool) and kwargs["header"]:
88
+ kwargs["header"] = "infer"
85
89
  df = pd.read_csv(path, **kwargs) # type: ignore
86
90
  else:
87
91
  raise UnsupportedOperationError(f"Unsupported format: {format}")
@@ -393,10 +393,12 @@ class _BaseDataFrameWriter(t.Generic[SESSION, DF]):
393
393
  df: DF,
394
394
  mode: t.Optional[str] = None,
395
395
  by_name: bool = False,
396
+ state_format_to_write: t.Optional[str] = None,
396
397
  ):
397
398
  self._df = df
398
399
  self._mode = mode
399
400
  self._by_name = by_name
401
+ self._state_format_to_write = state_format_to_write
400
402
 
401
403
  @property
402
404
  def _session(self) -> SESSION:
@@ -484,6 +486,44 @@ class _BaseDataFrameWriter(t.Generic[SESSION, DF]):
484
486
  def _write(self, path: str, mode: t.Optional[str], format: str, **options) -> None:
485
487
  raise NotImplementedError
486
488
 
489
+ def format(self, source: str) -> "Self":
490
+ """Specifies the input data source format.
491
+
492
+ .. versionadded:: 1.4.0
493
+
494
+ .. versionchanged:: 3.4.0
495
+ Supports Spark Connect.
496
+
497
+ Parameters
498
+ ----------
499
+ source : str
500
+ string, name of the data source, e.g. 'json', 'parquet'.
501
+
502
+ Examples
503
+ --------
504
+ >>> spark.read.format('json')
505
+ <...readwriter.DataFrameReader object ...>
506
+
507
+ Write a DataFrame into a JSON file and read it back.
508
+
509
+ >>> import tempfile
510
+ >>> with tempfile.TemporaryDirectory() as d:
511
+ ... # Write a DataFrame into a JSON file
512
+ ... spark.createDataFrame(
513
+ ... [{"age": 100, "name": "Hyukjin Kwon"}]
514
+ ... ).write.mode("overwrite").format("json").save(d)
515
+ ...
516
+ ... # Read the JSON file as a DataFrame.
517
+ ... spark.read.format('json').load(d).show()
518
+ +---+------------+
519
+ |age| name|
520
+ +---+------------+
521
+ |100|Hyukjin Kwon|
522
+ +---+------------+
523
+ """
524
+ self._state_format_to_write = source
525
+ return self
526
+
487
527
  def json(
488
528
  self,
489
529
  path: str,
@@ -1,6 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import importlib
4
+ import random
5
+ import string
4
6
  import typing as t
5
7
  import unicodedata
6
8
 
@@ -277,15 +279,6 @@ def verify_openai_installed():
277
279
  )
278
280
 
279
281
 
280
- def verify_numpy_installed():
281
- try:
282
- import numpy # noqa
283
- except ImportError:
284
- raise ImportError(
285
- """Numpy is required for this functionality. `pip install "sqlframe[pandas]"` (also include your engine if needed) to install pandas/numpy."""
286
- )
287
-
288
-
289
282
  def quote_preserving_alias_or_name(col: t.Union[exp.Column, exp.Alias]) -> str:
290
283
  from sqlframe.base.session import _BaseSession
291
284
 
@@ -427,3 +420,20 @@ def normalize_string(
427
420
  for pos in star_positions:
428
421
  normalized_value = normalized_value[:pos] + "*" + normalized_value[pos:]
429
422
  return normalized_value
423
+
424
+
425
+ def generate_random_identifier(size=6, chars=string.ascii_uppercase + string.digits):
426
+ return "_" + "".join(random.choice(chars) for _ in range(size))
427
+
428
+
429
+ def split_filepath(filepath: str) -> tuple[str, str]:
430
+ if filepath.startswith("dbfs:") or filepath.startswith("/dbfs"):
431
+ prefix = "dbfs:"
432
+ return prefix, filepath[len(prefix) :]
433
+ if filepath.startswith("file://"):
434
+ prefix = "file://"
435
+ return "", filepath[len(prefix) :]
436
+ split_ = str(filepath).split("://", 1)
437
+ if len(split_) == 2: # noqa: PLR2004
438
+ return split_[0] + "://", split_[1]
439
+ return "", split_[0]
@@ -82,37 +82,26 @@ class WindowSpec:
82
82
  def _calc_start_end(
83
83
  self, start: int, end: int
84
84
  ) -> t.Dict[str, t.Optional[t.Union[str, exp.Expression]]]:
85
- kwargs: t.Dict[str, t.Optional[t.Union[str, exp.Expression]]] = {
86
- "start_side": None,
87
- "end_side": None,
85
+ def get_value_and_side(x: int) -> t.Tuple[t.Union[str, exp.Expression], t.Optional[str]]:
86
+ if x == Window.currentRow:
87
+ return "CURRENT ROW", None
88
+ if x < 0:
89
+ side = "PRECEDING"
90
+ value = "UNBOUNDED" if x <= Window.unboundedPreceding else F.lit(abs(x)).expression
91
+ return value, side
92
+ else:
93
+ side = "FOLLOWING"
94
+ value = "UNBOUNDED" if x >= Window.unboundedFollowing else F.lit(x).expression
95
+ return value, side
96
+
97
+ start, start_side = get_value_and_side(start) # type: ignore
98
+ end, end_side = get_value_and_side(end) # type: ignore
99
+ return {
100
+ "start": start, # type: ignore
101
+ "start_side": start_side,
102
+ "end": end, # type: ignore
103
+ "end_side": end_side,
88
104
  }
89
- if start == Window.currentRow:
90
- kwargs["start"] = "CURRENT ROW"
91
- else:
92
- kwargs = {
93
- **kwargs,
94
- **{
95
- "start_side": "PRECEDING",
96
- "start": (
97
- "UNBOUNDED"
98
- if start <= Window.unboundedPreceding
99
- else F.lit(start).expression
100
- ),
101
- },
102
- }
103
- if end == Window.currentRow:
104
- kwargs["end"] = "CURRENT ROW"
105
- else:
106
- kwargs = {
107
- **kwargs,
108
- **{
109
- "end_side": "FOLLOWING",
110
- "end": (
111
- "UNBOUNDED" if end >= Window.unboundedFollowing else F.lit(end).expression
112
- ),
113
- },
114
- }
115
- return kwargs
116
105
 
117
106
  def rowsBetween(self, start: int, end: int) -> WindowSpec:
118
107
  window_spec = self.copy()
@@ -92,6 +92,7 @@ class DuckDBDataFrameReader(
92
92
  if format == "delta":
93
93
  from_clause = f"delta_scan('{path}')"
94
94
  elif format:
95
+ options.pop("inferSchema", None)
95
96
  paths = ",".join([f"'{path}'" for path in ensure_list(path)])
96
97
  from_clause = f"read_{format}([{paths}], {to_csv(options)})"
97
98
  else:
@@ -4,7 +4,7 @@ import typing as t
4
4
  from functools import cached_property
5
5
 
6
6
  from sqlframe.base.session import _BaseSession
7
- from sqlframe.base.util import soundex, verify_numpy_installed
7
+ from sqlframe.base.util import soundex
8
8
  from sqlframe.duckdb.catalog import DuckDBCatalog
9
9
  from sqlframe.duckdb.dataframe import DuckDBDataFrame
10
10
  from sqlframe.duckdb.readwriter import (
@@ -46,8 +46,6 @@ class DuckDBSession(
46
46
  if not hasattr(self, "_conn"):
47
47
  conn = conn or duckdb.connect()
48
48
  try:
49
- # Creating a function requires numpy to be installed so if they don't have it, we'll just skip it
50
- verify_numpy_installed()
51
49
  conn.create_function("SOUNDEX", lambda x: soundex(x), return_type=VARCHAR)
52
50
  except ImportError:
53
51
  pass
@@ -0,0 +1,163 @@
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing as t
6
+
7
+ from sqlglot import exp
8
+ from sqlglot.helper import ensure_list
9
+
10
+ from sqlframe.base.readerwriter import (
11
+ _BaseDataFrameReader,
12
+ _BaseDataFrameWriter,
13
+ _infer_format,
14
+ )
15
+ from sqlframe.base.util import ensure_column_mapping, generate_random_identifier, to_csv
16
+
17
+ if t.TYPE_CHECKING:
18
+ from sqlframe.base._typing import OptionalPrimitiveType, PathOrPaths
19
+ from sqlframe.base.types import StructType
20
+ from sqlframe.spark.dataframe import SparkDataFrame
21
+ from sqlframe.spark.session import SparkSession
22
+ from sqlframe.spark.table import SparkTable
23
+
24
+
25
+ class SparkDataFrameReader(
26
+ _BaseDataFrameReader["SparkSession", "SparkDataFrame", "SparkTable"],
27
+ ):
28
+ def load(
29
+ self,
30
+ path: t.Optional[PathOrPaths] = None,
31
+ format: t.Optional[str] = None,
32
+ schema: t.Optional[t.Union[StructType, str]] = None,
33
+ **options: OptionalPrimitiveType,
34
+ ) -> SparkDataFrame:
35
+ """Loads data from a data source and returns it as a :class:`DataFrame`.
36
+
37
+ .. versionadded:: 1.4.0
38
+
39
+ .. versionchanged:: 3.4.0
40
+ Supports Spark Connect.
41
+
42
+ Parameters
43
+ ----------
44
+ path : str or list, t.Optional
45
+ t.Optional string or a list of string for file-system backed data sources.
46
+ format : str, t.Optional
47
+ t.Optional string for format of the data source. Default to 'parquet'.
48
+ schema : :class:`pyspark.sql.types.StructType` or str, t.Optional
49
+ t.Optional :class:`pyspark.sql.types.StructType` for the input schema
50
+ or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
51
+ **options : dict
52
+ all other string options
53
+
54
+ Examples
55
+ --------
56
+ Load a CSV file with format, schema and options specified.
57
+
58
+ >>> import tempfile
59
+ >>> with tempfile.TemporaryDirectory() as d:
60
+ ... # Write a DataFrame into a CSV file with a header
61
+ ... df = spark.createDataFrame([{"age": 100, "name": "Hyukjin Kwon"}])
62
+ ... df.write.option("header", True).mode("overwrite").format("csv").save(d)
63
+ ...
64
+ ... # Read the CSV file as a DataFrame with 'nullValue' option set to 'Hyukjin Kwon',
65
+ ... # and 'header' option set to `True`.
66
+ ... df = spark.read.load(
67
+ ... d, schema=df.schema, format="csv", nullValue="Hyukjin Kwon", header=True)
68
+ ... df.printSchema()
69
+ ... df.show()
70
+ root
71
+ |-- age: long (nullable = true)
72
+ |-- name: string (nullable = true)
73
+ +---+----+
74
+ |age|name|
75
+ +---+----+
76
+ |100|NULL|
77
+ +---+----+
78
+ """
79
+ assert path is not None, "path is required"
80
+ assert isinstance(path, str), "path must be a string"
81
+ format = format or self.state_format_to_read or _infer_format(path)
82
+ if schema:
83
+ column_mapping = ensure_column_mapping(schema)
84
+ select_column_mapping = column_mapping.copy()
85
+ select_columns = [x.expression for x in self._to_casted_columns(select_column_mapping)]
86
+
87
+ if hasattr(schema, "simpleString"):
88
+ schema = schema.simpleString()
89
+ else:
90
+ select_columns = [exp.Star()]
91
+
92
+ if format == "delta":
93
+ from_clause = f"delta.`{path}`"
94
+ elif format:
95
+ paths = ",".join([f"{path}" for path in ensure_list(path)])
96
+ tmp_view_key = options.get("_tmp_view_key_", f"{generate_random_identifier()}_vw")
97
+ options["_tmp_view_key_"] = tmp_view_key
98
+
99
+ format_options: dict[str, OptionalPrimitiveType] = {
100
+ k: v for k, v in options.items() if v is not None
101
+ }
102
+ format_options.pop("_tmp_view_key_")
103
+ format_options["path"] = paths
104
+ if schema:
105
+ format_options["schema"] = f"{schema}"
106
+ format_options.pop("inferSchema", None)
107
+ format_options = {key: f"'{val}'" for key, val in format_options.items()}
108
+ format_options_str = to_csv(format_options, " ")
109
+
110
+ tmp_view = f"CREATE OR REPLACE TEMPORARY VIEW {tmp_view_key} USING {format}" + (
111
+ f" OPTIONS ({format_options_str})" if format_options_str else ""
112
+ )
113
+ self.session.spark_session.sql(tmp_view).collect()
114
+
115
+ from_clause = f"{tmp_view_key}"
116
+ else:
117
+ from_clause = f"'{path}'"
118
+
119
+ df = self.session.sql(
120
+ exp.select(*select_columns).from_(from_clause, dialect=self.session.input_dialect),
121
+ qualify=False,
122
+ )
123
+ if select_columns == [exp.Star()] and df.schema:
124
+ return self.load(path=path, format=format, schema=df.schema, **options)
125
+ self.session._last_loaded_file = path # type: ignore
126
+ return df
127
+
128
+
129
+ class SparkDataFrameWriter(
130
+ _BaseDataFrameWriter["SparkSession", "SparkDataFrame"],
131
+ ):
132
+ def save(
133
+ self,
134
+ path: str,
135
+ mode: t.Optional[str] = None,
136
+ format: t.Optional[str] = None,
137
+ partitionBy: t.Optional[t.Union[str, t.List[str]]] = None,
138
+ **options,
139
+ ):
140
+ format = str(format or self._state_format_to_write)
141
+ self._write(path, mode, format, partitionBy=partitionBy, **options)
142
+
143
+ def _write(self, path: str, mode: t.Optional[str], format: str, **options):
144
+ spark_df = None
145
+ expressions = self._df._get_expressions()
146
+ for i, expression in enumerate(expressions):
147
+ if i < len(expressions) - 1:
148
+ self._df.session._collect(expressions)
149
+ else:
150
+ sql = self._df.session._to_sql(expression)
151
+ spark_df = self._session.spark_session.sql(sql)
152
+ if spark_df is not None:
153
+ options = {k: v for k, v in options.items() if v is not None}
154
+ mode = str(mode or self._mode or "default")
155
+ spark_writer = spark_df.write.format(format).mode(mode)
156
+ partition_columns = options.pop("partitionBy", None)
157
+ compression = options.pop("compression", None)
158
+ if partition_columns:
159
+ partition_columns = options.pop("partitionBy")
160
+ spark_writer = spark_writer.partitionBy(*partition_columns)
161
+ if compression:
162
+ spark_writer = spark_writer.option("compression", compression)
163
+ spark_writer.save(path=path, **options)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 3.21.1
3
+ Version: 3.22.1
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -21,7 +21,7 @@ psycopg<4,>=3.1
21
21
  pyarrow<20,>=10
22
22
  pyspark<3.6,>=2
23
23
  pytest-forked
24
- pytest-postgresql<7,>=6
24
+ pytest-postgresql<8,>=6
25
25
  pytest-xdist<3.7,>=3.6
26
26
  pytest<8.4,>=8.2.0
27
27
  ruff<0.10,>=0.4.4
@@ -71,16 +71,16 @@ root
71
71
  |-- bigint_col: bigint (nullable = true)
72
72
  |-- double_col: double (nullable = true)
73
73
  |-- string_col: string (nullable = true)
74
- |-- `map<string,bigint>_col`: map<string, bigint> (nullable = true)
74
+ |-- map<string,bigint>_col: map<string, bigint> (nullable = true)
75
75
  | |-- key: string (nullable = true)
76
76
  | |-- value: bigint (nullable = true)
77
- |-- `array<struct<a:bigint,b:bigint>>`: array<struct<a: bigint, b: bigint>> (nullable = true)
77
+ |-- array<struct<a:bigint,b:bigint>>: array<struct<a: bigint, b: bigint>> (nullable = true)
78
78
  | |-- element: struct<a: bigint, b: bigint> (nullable = true)
79
79
  | | |-- a: bigint (nullable = true)
80
80
  | | |-- b: bigint (nullable = true)
81
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
81
+ |-- array<bigint>_col: array<bigint> (nullable = true)
82
82
  | |-- element: bigint (nullable = true)
83
- |-- `struct<a:bigint>_col`: struct<a: bigint> (nullable = true)
83
+ |-- struct<a:bigint>_col: struct<a: bigint> (nullable = true)
84
84
  | |-- a: bigint (nullable = true)
85
85
  |-- date_col: date (nullable = true)
86
86
  |-- timestamp_col: timestamp (nullable = true)
@@ -126,12 +126,12 @@ def test_schema_nested(databricks_datatypes: DatabricksDataFrame):
126
126
  assert struct_fields[1].dataType == types.DoubleType()
127
127
  assert struct_fields[2].name == "string_col"
128
128
  assert struct_fields[2].dataType == types.StringType()
129
- assert struct_fields[3].name == "`map<string,bigint>_col`"
129
+ assert struct_fields[3].name == "map<string,bigint>_col"
130
130
  assert struct_fields[3].dataType == types.MapType(
131
131
  types.StringType(),
132
132
  types.LongType(),
133
133
  )
134
- assert struct_fields[4].name == "`array<struct<a:bigint,b:bigint>>`"
134
+ assert struct_fields[4].name == "array<struct<a:bigint,b:bigint>>"
135
135
  assert struct_fields[4].dataType == types.ArrayType(
136
136
  types.StructType(
137
137
  [
@@ -146,11 +146,11 @@ def test_schema_nested(databricks_datatypes: DatabricksDataFrame):
146
146
  ]
147
147
  ),
148
148
  )
149
- assert struct_fields[5].name == "`array<bigint>_col`"
149
+ assert struct_fields[5].name == "array<bigint>_col"
150
150
  assert struct_fields[5].dataType == types.ArrayType(
151
151
  types.LongType(),
152
152
  )
153
- assert struct_fields[6].name == "`struct<a:bigint>_col`"
153
+ assert struct_fields[6].name == "struct<a:bigint>_col"
154
154
  assert struct_fields[6].dataType == types.StructType(
155
155
  [
156
156
  types.StructField(
@@ -67,16 +67,16 @@ root
67
67
  |-- bigint_col: bigint (nullable = true)
68
68
  |-- double_col: double (nullable = true)
69
69
  |-- string_col: string (nullable = true)
70
- |-- `map<string,bigint>_col`: map<string, bigint> (nullable = true)
70
+ |-- map<string,bigint>_col: map<string, bigint> (nullable = true)
71
71
  | |-- key: string (nullable = true)
72
72
  | |-- value: bigint (nullable = true)
73
- |-- `array<struct<a:bigint,b:bigint>>`: array<struct<a: bigint, b: bigint>> (nullable = true)
73
+ |-- array<struct<a:bigint,b:bigint>>: array<struct<a: bigint, b: bigint>> (nullable = true)
74
74
  | |-- element: struct<a: bigint, b: bigint> (nullable = true)
75
75
  | | |-- a: bigint (nullable = true)
76
76
  | | |-- b: bigint (nullable = true)
77
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
77
+ |-- array<bigint>_col: array<bigint> (nullable = true)
78
78
  | |-- element: bigint (nullable = true)
79
- |-- `struct<a:bigint>_col`: struct<a: bigint> (nullable = true)
79
+ |-- struct<a:bigint>_col: struct<a: bigint> (nullable = true)
80
80
  | |-- a: bigint (nullable = true)
81
81
  |-- date_col: date (nullable = true)
82
82
  |-- timestamp_col: timestamp (nullable = true)
@@ -122,12 +122,12 @@ def test_schema_nested(duckdb_datatypes: DuckDBDataFrame):
122
122
  assert struct_fields[1].dataType == types.DoubleType()
123
123
  assert struct_fields[2].name == "string_col"
124
124
  assert struct_fields[2].dataType == types.StringType()
125
- assert struct_fields[3].name == "`map<string,bigint>_col`"
125
+ assert struct_fields[3].name == "map<string,bigint>_col"
126
126
  assert struct_fields[3].dataType == types.MapType(
127
127
  types.StringType(),
128
128
  types.LongType(),
129
129
  )
130
- assert struct_fields[4].name == "`array<struct<a:bigint,b:bigint>>`"
130
+ assert struct_fields[4].name == "array<struct<a:bigint,b:bigint>>"
131
131
  assert struct_fields[4].dataType == types.ArrayType(
132
132
  types.StructType(
133
133
  [
@@ -142,11 +142,11 @@ def test_schema_nested(duckdb_datatypes: DuckDBDataFrame):
142
142
  ]
143
143
  ),
144
144
  )
145
- assert struct_fields[5].name == "`array<bigint>_col`"
145
+ assert struct_fields[5].name == "array<bigint>_col"
146
146
  assert struct_fields[5].dataType == types.ArrayType(
147
147
  types.LongType(),
148
148
  )
149
- assert struct_fields[6].name == "`struct<a:bigint>_col`"
149
+ assert struct_fields[6].name == "struct<a:bigint>_col"
150
150
  assert struct_fields[6].dataType == types.StructType(
151
151
  [
152
152
  types.StructField(
@@ -62,7 +62,7 @@ root
62
62
  |-- bigint_col: bigint (nullable = true)
63
63
  |-- double_col: double (nullable = true)
64
64
  |-- string_col: string (nullable = true)
65
- |-- `array<bigint>_col`: array<bigint> (nullable = true)
65
+ |-- array<bigint>_col: array<bigint> (nullable = true)
66
66
  | |-- element: bigint (nullable = true)
67
67
  |-- date_col: date (nullable = true)
68
68
  |-- timestamp_col: timestamp (nullable = true)
@@ -108,7 +108,7 @@ def test_schema_nested(postgres_datatypes: PostgresDataFrame):
108
108
  assert struct_fields[1].dataType == types.DoubleType()
109
109
  assert struct_fields[2].name == "string_col"
110
110
  assert struct_fields[2].dataType == types.StringType()
111
- assert struct_fields[3].name == "`array<bigint>_col`"
111
+ assert struct_fields[3].name == "array<bigint>_col"
112
112
  assert struct_fields[3].dataType == types.ArrayType(
113
113
  types.LongType(),
114
114
  )
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import typing as t
4
4
 
5
5
  from sqlframe.base.session import _BaseSession
6
- from sqlframe.base.types import Row
6
+ from sqlframe.base.types import DoubleType, LongType, Row, StructField, StructType
7
7
  from sqlframe.snowflake import SnowflakeSession
8
8
  from sqlframe.spark import SparkSession
9
9
 
@@ -185,3 +185,7 @@ def test_show_from_create_with_space_with_schema(get_session: t.Callable[[], _Ba
185
185
  Row(**{"an tan": 3, "b": 4, "z": 8.0}),
186
186
  Row(**{"an tan": 2, "b": 6, "z": 9.0}),
187
187
  ]
188
+ assert df.schema.fields[0].name == "an tan"
189
+ df.printSchema()
190
+ captured = capsys.readouterr()
191
+ assert "|-- an tan:" in captured.out.strip()