sqlframe 1.8.0__tar.gz → 1.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (208) hide show
  1. {sqlframe-1.8.0 → sqlframe-1.9.0}/PKG-INFO +1 -1
  2. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/spark.md +3 -0
  3. {sqlframe-1.8.0 → sqlframe-1.9.0}/setup.py +4 -4
  4. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/_version.py +2 -2
  5. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/column.py +1 -1
  6. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/functions.py +42 -7
  7. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/functions.pyi +3 -0
  8. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe.egg-info/PKG-INFO +1 -1
  9. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe.egg-info/requires.txt +4 -4
  10. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/conftest.py +1 -1
  11. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_session.py +1 -1
  12. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/test_int_functions.py +151 -2
  13. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_column.py +5 -1
  14. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_functions.py +69 -4
  15. {sqlframe-1.8.0 → sqlframe-1.9.0}/.github/CODEOWNERS +0 -0
  16. {sqlframe-1.8.0 → sqlframe-1.9.0}/.github/workflows/main.workflow.yaml +0 -0
  17. {sqlframe-1.8.0 → sqlframe-1.9.0}/.github/workflows/publish.workflow.yaml +0 -0
  18. {sqlframe-1.8.0 → sqlframe-1.9.0}/.gitignore +0 -0
  19. {sqlframe-1.8.0 → sqlframe-1.9.0}/.pre-commit-config.yaml +0 -0
  20. {sqlframe-1.8.0 → sqlframe-1.9.0}/.readthedocs.yaml +0 -0
  21. {sqlframe-1.8.0 → sqlframe-1.9.0}/LICENSE +0 -0
  22. {sqlframe-1.8.0 → sqlframe-1.9.0}/Makefile +0 -0
  23. {sqlframe-1.8.0 → sqlframe-1.9.0}/README.md +0 -0
  24. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/add_chatgpt_support.md +0 -0
  25. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  26. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  27. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  28. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  29. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  30. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  31. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  32. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  33. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/but_wait_theres_more.gif +0 -0
  34. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/cake.gif +0 -0
  35. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  36. {sqlframe-1.8.0 → sqlframe-1.9.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  37. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/bigquery.md +0 -0
  38. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/configuration.md +0 -0
  39. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/bigquery.md +0 -0
  40. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/duckdb.md +0 -0
  41. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/images/SF.png +0 -0
  42. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/images/favicon.png +0 -0
  43. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/images/favicon_old.png +0 -0
  44. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  45. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/images/sqlframe_logo.png +0 -0
  46. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/docs/postgres.md +0 -0
  47. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/duckdb.md +0 -0
  48. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/images/SF.png +0 -0
  49. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/images/favicon.png +0 -0
  50. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/images/favicon_old.png +0 -0
  51. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/images/sqlframe_diagram.png +0 -0
  52. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/images/sqlframe_logo.png +0 -0
  53. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/index.md +0 -0
  54. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/postgres.md +0 -0
  55. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/requirements.txt +0 -0
  56. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/snowflake.md +0 -0
  57. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/standalone.md +0 -0
  58. {sqlframe-1.8.0 → sqlframe-1.9.0}/docs/stylesheets/extra.css +0 -0
  59. {sqlframe-1.8.0 → sqlframe-1.9.0}/mkdocs.yml +0 -0
  60. {sqlframe-1.8.0 → sqlframe-1.9.0}/pytest.ini +0 -0
  61. {sqlframe-1.8.0 → sqlframe-1.9.0}/renovate.json +0 -0
  62. {sqlframe-1.8.0 → sqlframe-1.9.0}/setup.cfg +0 -0
  63. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/LICENSE +0 -0
  64. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/__init__.py +0 -0
  65. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/__init__.py +0 -0
  66. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/_typing.py +0 -0
  67. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/catalog.py +0 -0
  68. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/dataframe.py +0 -0
  69. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/decorators.py +0 -0
  70. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/exceptions.py +0 -0
  71. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/function_alternatives.py +0 -0
  72. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/group.py +0 -0
  73. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/mixins/__init__.py +0 -0
  74. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  75. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  76. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  77. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/normalize.py +0 -0
  78. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/operations.py +0 -0
  79. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/readerwriter.py +0 -0
  80. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/session.py +0 -0
  81. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/transforms.py +0 -0
  82. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/types.py +0 -0
  83. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/util.py +0 -0
  84. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/base/window.py +0 -0
  85. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/__init__.py +0 -0
  86. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/catalog.py +0 -0
  87. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/column.py +0 -0
  88. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/dataframe.py +0 -0
  89. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/functions.py +0 -0
  90. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/functions.pyi +0 -0
  91. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/group.py +0 -0
  92. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/readwriter.py +0 -0
  93. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/session.py +0 -0
  94. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/types.py +0 -0
  95. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/bigquery/window.py +0 -0
  96. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/__init__.py +0 -0
  97. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/catalog.py +0 -0
  98. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/column.py +0 -0
  99. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/dataframe.py +0 -0
  100. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/functions.py +0 -0
  101. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/functions.pyi +0 -0
  102. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/group.py +0 -0
  103. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/readwriter.py +0 -0
  104. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/session.py +0 -0
  105. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/types.py +0 -0
  106. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/duckdb/window.py +0 -0
  107. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/__init__.py +0 -0
  108. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/catalog.py +0 -0
  109. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/column.py +0 -0
  110. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/dataframe.py +0 -0
  111. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/functions.py +0 -0
  112. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/functions.pyi +0 -0
  113. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/group.py +0 -0
  114. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/readwriter.py +0 -0
  115. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/session.py +0 -0
  116. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/types.py +0 -0
  117. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/postgres/window.py +0 -0
  118. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/__init__.py +0 -0
  119. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/catalog.py +0 -0
  120. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/column.py +0 -0
  121. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/dataframe.py +0 -0
  122. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/functions.py +0 -0
  123. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/group.py +0 -0
  124. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/readwriter.py +0 -0
  125. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/session.py +0 -0
  126. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/types.py +0 -0
  127. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/redshift/window.py +0 -0
  128. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/__init__.py +0 -0
  129. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/catalog.py +0 -0
  130. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/column.py +0 -0
  131. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/dataframe.py +0 -0
  132. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/functions.py +0 -0
  133. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/functions.pyi +0 -0
  134. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/group.py +0 -0
  135. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/readwriter.py +0 -0
  136. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/session.py +0 -0
  137. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/types.py +0 -0
  138. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/snowflake/window.py +0 -0
  139. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/__init__.py +0 -0
  140. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/catalog.py +0 -0
  141. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/column.py +0 -0
  142. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/dataframe.py +0 -0
  143. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/functions.py +0 -0
  144. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/group.py +0 -0
  145. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/readwriter.py +0 -0
  146. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/session.py +0 -0
  147. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/types.py +0 -0
  148. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/spark/window.py +0 -0
  149. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/__init__.py +0 -0
  150. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/catalog.py +0 -0
  151. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/column.py +0 -0
  152. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/dataframe.py +0 -0
  153. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/functions.py +0 -0
  154. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/group.py +0 -0
  155. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/readwriter.py +0 -0
  156. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/session.py +0 -0
  157. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/types.py +0 -0
  158. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe/standalone/window.py +0 -0
  159. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  160. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  161. {sqlframe-1.8.0 → sqlframe-1.9.0}/sqlframe.egg-info/top_level.txt +0 -0
  162. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/__init__.py +0 -0
  163. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/common_fixtures.py +0 -0
  164. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/fixtures/employee.csv +0 -0
  165. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/fixtures/employee.json +0 -0
  166. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/fixtures/employee.parquet +0 -0
  167. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/fixtures/employee_extra_line.csv +0 -0
  168. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/__init__.py +0 -0
  169. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/__init__.py +0 -0
  170. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  171. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  172. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  173. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/duck/__init__.py +0 -0
  174. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  175. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  176. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  177. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/postgres/__init__.py +0 -0
  178. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  179. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  180. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  181. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/redshift/__init__.py +0 -0
  182. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  183. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  184. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  185. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  186. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  187. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/spark/__init__.py +0 -0
  188. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  189. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  190. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/test_engine_reader.py +0 -0
  191. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/test_engine_session.py +0 -0
  192. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/engines/test_engine_writer.py +0 -0
  193. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/fixtures.py +0 -0
  194. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/test_int_dataframe.py +0 -0
  195. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  196. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/test_int_grouped_data.py +0 -0
  197. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/integration/test_int_session.py +0 -0
  198. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/types.py +0 -0
  199. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/__init__.py +0 -0
  200. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/__init__.py +0 -0
  201. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/fixtures.py +0 -0
  202. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_dataframe.py +0 -0
  203. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  204. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_session.py +0 -0
  205. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  206. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_types.py +0 -0
  207. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/standalone/test_window.py +0 -0
  208. {sqlframe-1.8.0 → sqlframe-1.9.0}/tests/unit/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.8.0
3
+ Version: 1.9.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -183,6 +183,8 @@ df.show(5)
183
183
  * [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
184
184
  * [acosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acosh.html)
185
185
  * [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
186
+ * [aes_encrypt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aes_encrypt.html)
187
+ * [aes_decrypt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aes_decrypt.html)
186
188
  * [aggregate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aggregate.html)
187
189
  * [approxCountDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approxCountDistinct.html)
188
190
  * [approx_count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approx_count_distinct.html)
@@ -398,6 +400,7 @@ df.show(5)
398
400
  * [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
399
401
  * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
400
402
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
403
+ * [to_binary](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_binary.html)
401
404
  * [to_csv](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_csv.html)
402
405
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
403
406
  * [to_json](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_json.html)
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.11.0",
23
- "sqlglot>=24.0.0,<25.1",
23
+ "sqlglot>=24.0.0,<25.4",
24
24
  "typing_extensions>=4.8,<5",
25
25
  ],
26
26
  extras_require={
@@ -31,7 +31,7 @@ setup(
31
31
  "dev": [
32
32
  "duckdb>=0.9,<1.1",
33
33
  "mypy>=1.10.0,<1.11",
34
- "openai>=1.30,<1.34",
34
+ "openai>=1.30,<1.36",
35
35
  "pandas>=2,<3",
36
36
  "pandas-stubs>=2,<3",
37
37
  "psycopg>=3.1,<4",
@@ -57,7 +57,7 @@ setup(
57
57
  "pandas>=2,<3",
58
58
  ],
59
59
  "openai": [
60
- "openai>=1.30,<1.34",
60
+ "openai>=1.30,<1.36",
61
61
  ],
62
62
  "pandas": [
63
63
  "pandas>=2,<3",
@@ -69,7 +69,7 @@ setup(
69
69
  "redshift_connector>=2.1.1,<2.2.0",
70
70
  ],
71
71
  "snowflake": [
72
- "snowflake-connector-python[secure-local-storage]>=3.10.0,<3.11",
72
+ "snowflake-connector-python[secure-local-storage]>=3.10.0,<3.12",
73
73
  ],
74
74
  "spark": [
75
75
  "pyspark>=2,<3.6",
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.8.0'
16
- __version_tuple__ = version_tuple = (1, 8, 0)
15
+ __version__ = version = '1.9.0'
16
+ __version_tuple__ = version_tuple = (1, 9, 0)
@@ -229,7 +229,7 @@ class Column:
229
229
  return Column(op)
230
230
 
231
231
  def unary_op(self, klass: t.Callable, **kwargs) -> Column:
232
- return Column(klass(this=self.column_expression, **kwargs))
232
+ return Column(klass(this=exp.Paren(this=self.column_expression), **kwargs))
233
233
 
234
234
  @property
235
235
  def is_alias(self):
@@ -1692,14 +1692,9 @@ def make_interval(
1692
1692
  mins: t.Optional[ColumnOrName] = None,
1693
1693
  secs: t.Optional[ColumnOrName] = None,
1694
1694
  ) -> Column:
1695
- values = [years, months, weeks, days, hours, mins, secs]
1696
- for value in reversed(values.copy()):
1697
- if value is not None:
1698
- break
1699
- values = values[:-1]
1700
- else:
1695
+ columns = _ensure_column_of_optionals([years, months, weeks, days, hours, mins, secs])
1696
+ if not columns:
1701
1697
  raise ValueError("At least one value must be provided")
1702
- columns = [Column.ensure_col(x) if x is not None else lit(None) for x in values]
1703
1698
  return Column.invoke_anonymous_function(columns[0], "MAKE_INTERVAL", *columns[1:])
1704
1699
 
1705
1700
 
@@ -1747,6 +1742,38 @@ def try_to_number(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) ->
1747
1742
  return Column.invoke_anonymous_function(col, "TRY_TO_NUMBER")
1748
1743
 
1749
1744
 
1745
+ @meta(unsupported_engines="*")
1746
+ def aes_decrypt(
1747
+ input: ColumnOrName,
1748
+ key: ColumnOrName,
1749
+ mode: t.Optional[ColumnOrName] = None,
1750
+ padding: t.Optional[ColumnOrName] = None,
1751
+ aad: t.Optional[ColumnOrName] = None,
1752
+ ) -> Column:
1753
+ columns = _ensure_column_of_optionals([key, mode, padding, aad])
1754
+ return Column.invoke_anonymous_function(input, "AES_DECRYPT", *columns)
1755
+
1756
+
1757
+ @meta(unsupported_engines="*")
1758
+ def aes_encrypt(
1759
+ input: ColumnOrName,
1760
+ key: ColumnOrName,
1761
+ mode: t.Optional[ColumnOrName] = None,
1762
+ padding: t.Optional[ColumnOrName] = None,
1763
+ iv: t.Optional[ColumnOrName] = None,
1764
+ aad: t.Optional[ColumnOrName] = None,
1765
+ ) -> Column:
1766
+ columns = _ensure_column_of_optionals([key, mode, padding, iv, aad])
1767
+ return Column.invoke_anonymous_function(input, "AES_ENCRYPT", *columns)
1768
+
1769
+
1770
+ @meta(unsupported_engines="*")
1771
+ def to_binary(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
1772
+ if format is not None:
1773
+ return Column.invoke_anonymous_function(col, "TO_BINARY", format)
1774
+ return Column.invoke_anonymous_function(col, "TO_BINARY")
1775
+
1776
+
1750
1777
  @meta()
1751
1778
  def _lambda_quoted(value: str) -> t.Optional[bool]:
1752
1779
  return False if value == "_" else None
@@ -1762,3 +1789,11 @@ def _get_lambda_from_func(lambda_expression: t.Callable):
1762
1789
  this=lambda_expression(*[Column(x) for x in variables]).expression,
1763
1790
  expressions=variables,
1764
1791
  )
1792
+
1793
+
1794
+ def _ensure_column_of_optionals(optionals: t.List[t.Optional[ColumnOrName]]) -> t.List[Column]:
1795
+ for value in reversed(optionals.copy()):
1796
+ if value is not None:
1797
+ break
1798
+ optionals = optionals[:-1]
1799
+ return [Column.ensure_col(x) if x is not None else lit(None) for x in optionals]
@@ -7,6 +7,8 @@ from sqlframe.base.functions import (
7
7
  abs as abs,
8
8
  acos as acos,
9
9
  acosh as acosh,
10
+ aes_encrypt as aes_encrypt,
11
+ aes_decrypt as aes_decrypt,
10
12
  aggregate as aggregate,
11
13
  approxCountDistinct as approxCountDistinct,
12
14
  approx_count_distinct as approx_count_distinct,
@@ -218,6 +220,7 @@ from sqlframe.base.functions import (
218
220
  timestamp_seconds as timestamp_seconds,
219
221
  toDegrees as toDegrees,
220
222
  toRadians as toRadians,
223
+ to_binary as to_binary,
221
224
  to_csv as to_csv,
222
225
  to_date as to_date,
223
226
  to_json as to_json,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.8.0
3
+ Version: 1.9.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -1,5 +1,5 @@
1
1
  prettytable<3.11.0
2
- sqlglot<25.1,>=24.0.0
2
+ sqlglot<25.4,>=24.0.0
3
3
  typing_extensions<5,>=4.8
4
4
 
5
5
  [bigquery]
@@ -9,7 +9,7 @@ google-cloud-bigquery[pandas]<4,>=3
9
9
  [dev]
10
10
  duckdb<1.1,>=0.9
11
11
  mypy<1.11,>=1.10.0
12
- openai<1.34,>=1.30
12
+ openai<1.36,>=1.30
13
13
  pandas-stubs<3,>=2
14
14
  pandas<3,>=2
15
15
  psycopg<4,>=3.1
@@ -39,7 +39,7 @@ duckdb<1.1,>=0.9
39
39
  pandas<3,>=2
40
40
 
41
41
  [openai]
42
- openai<1.34,>=1.30
42
+ openai<1.36,>=1.30
43
43
 
44
44
  [pandas]
45
45
  pandas<3,>=2
@@ -51,7 +51,7 @@ psycopg2<3,>=2.8
51
51
  redshift_connector<2.2.0,>=2.1.1
52
52
 
53
53
  [snowflake]
54
- snowflake-connector-python[secure-local-storage]<3.11,>=3.10.0
54
+ snowflake-connector-python[secure-local-storage]<3.12,>=3.10.0
55
55
 
56
56
  [spark]
57
57
  pyspark<3.6,>=2
@@ -7,7 +7,7 @@ import pytest
7
7
 
8
8
  def pytest_collection_modifyitems(items, *args, **kwargs):
9
9
  for item in items:
10
- if not list(item.iter_markers()):
10
+ if not [x for x in item.iter_markers() if x.name != "parametrize"]:
11
11
  item.add_marker("fast")
12
12
 
13
13
 
@@ -10,4 +10,4 @@ def test_session_from_config():
10
10
  conn.execute("CREATE TABLE test_table (cola INT, colb STRING)")
11
11
  session = DuckDBSession.builder.config("sqlframe.conn", conn).getOrCreate()
12
12
  columns = session.catalog.get_columns("test_table")
13
- assert columns == {'"cola"': exp.DataType.build("INT"), '"colb"': exp.DataType.build("VARCHAR")}
13
+ assert columns == {'"cola"': exp.DataType.build("INT"), '"colb"': exp.DataType.build("TEXT")}
@@ -194,7 +194,7 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
194
194
  if isinstance(session, DuckDBSession):
195
195
  if expected == "binary":
196
196
  pytest.skip("DuckDB doesn't support binary")
197
- expected = expected.replace("string", "varchar")
197
+ expected = expected.replace("string", "varchar").replace("struct<a:", "struct<a ")
198
198
  if isinstance(session, BigQuerySession):
199
199
  if expected.startswith("map"):
200
200
  pytest.skip("BigQuery doesn't support map types")
@@ -202,8 +202,12 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
202
202
  expected = expected.split("<")[0]
203
203
  if expected == "binary":
204
204
  pytest.skip("BigQuery doesn't support binary")
205
+ if expected == "timestamp":
206
+ expected = "datetime"
205
207
  result = df.select(typeof("col").alias("test")).first()[0]
206
- assert exp.DataType.build(result, dialect=dialect) == exp.DataType.build(expected)
208
+ assert exp.DataType.build(result, dialect=dialect) == exp.DataType.build(
209
+ expected, dialect=dialect
210
+ )
207
211
 
208
212
 
209
213
  def test_alias(get_session_and_func):
@@ -3071,3 +3075,148 @@ def test_try_to_number(get_session_and_func, get_func):
3071
3075
  else:
3072
3076
  expected = Decimal("78.12")
3073
3077
  assert actual == expected
3078
+
3079
+
3080
+ def test_to_binary(get_session_and_func, get_func):
3081
+ session, to_binary = get_session_and_func("to_binary")
3082
+ lit = get_func("lit", session)
3083
+ df = session.createDataFrame([("abc",)], ["e"])
3084
+ assert df.select(to_binary(df.e, lit("utf-8")).alias("r")).first()[0] == bytearray(b"abc")
3085
+ df = session.createDataFrame([("414243",)], ["e"])
3086
+ assert df.select(to_binary(df.e).alias("r")).first()[0] == bytearray(b"ABC")
3087
+
3088
+
3089
+ def test_aes_decrypt(get_session_and_func, get_func):
3090
+ session, aes_decrypt = get_session_and_func("aes_decrypt")
3091
+ unhex = get_func("unhex", session)
3092
+ unbase64 = get_func("unbase64", session)
3093
+ df = session.createDataFrame(
3094
+ [
3095
+ (
3096
+ "AAAAAAAAAAAAAAAAQiYi+sTLm7KD9UcZ2nlRdYDe/PX4",
3097
+ "abcdefghijklmnop12345678ABCDEFGH",
3098
+ "GCM",
3099
+ "DEFAULT",
3100
+ "This is an AAD mixed into the input",
3101
+ )
3102
+ ],
3103
+ ["input", "key", "mode", "padding", "aad"],
3104
+ )
3105
+ assert df.select(
3106
+ aes_decrypt(unbase64(df.input), df.key, df.mode, df.padding, df.aad).alias("r")
3107
+ ).first()[0] == bytearray(b"Spark")
3108
+ df = session.createDataFrame(
3109
+ [
3110
+ (
3111
+ "AAAAAAAAAAAAAAAAAAAAAPSd4mWyMZ5mhvjiAPQJnfg=",
3112
+ "abcdefghijklmnop12345678ABCDEFGH",
3113
+ "CBC",
3114
+ "DEFAULT",
3115
+ )
3116
+ ],
3117
+ ["input", "key", "mode", "padding"],
3118
+ )
3119
+ assert df.select(
3120
+ aes_decrypt(unbase64(df.input), df.key, df.mode, df.padding).alias("r")
3121
+ ).first()[0] == bytearray(b"Spark")
3122
+ assert df.select(aes_decrypt(unbase64(df.input), df.key, df.mode).alias("r")).first()[
3123
+ 0
3124
+ ] == bytearray(b"Spark")
3125
+ df = session.createDataFrame(
3126
+ [
3127
+ (
3128
+ "83F16B2AA704794132802D248E6BFD4E380078182D1544813898AC97E709B28A94",
3129
+ "0000111122223333",
3130
+ )
3131
+ ],
3132
+ ["input", "key"],
3133
+ )
3134
+ assert df.select(aes_decrypt(unhex(df.input), df.key).alias("r")).first()[0] == bytearray(
3135
+ b"Spark"
3136
+ )
3137
+
3138
+
3139
+ def test_aes_encrypt(get_session_and_func, get_func):
3140
+ session, aes_encrypt = get_session_and_func("aes_encrypt")
3141
+ to_binary = get_func("to_binary", session)
3142
+ base64 = get_func("base64", session)
3143
+ unbase64 = get_func("unbase64", session)
3144
+ lit = get_func("lit", session)
3145
+ aes_decrypt = get_func("aes_decrypt", session)
3146
+ df = session.createDataFrame(
3147
+ [
3148
+ (
3149
+ "Spark",
3150
+ "abcdefghijklmnop12345678ABCDEFGH",
3151
+ "GCM",
3152
+ "DEFAULT",
3153
+ "000000000000000000000000",
3154
+ "This is an AAD mixed into the input",
3155
+ )
3156
+ ],
3157
+ ["input", "key", "mode", "padding", "iv", "aad"],
3158
+ )
3159
+ assert (
3160
+ df.select(
3161
+ base64(
3162
+ aes_encrypt(
3163
+ df.input, df.key, df.mode, df.padding, to_binary(df.iv, lit("hex")), df.aad
3164
+ )
3165
+ ).alias("r")
3166
+ ).first()[0]
3167
+ == "AAAAAAAAAAAAAAAAQiYi+sTLm7KD9UcZ2nlRdYDe/PX4"
3168
+ )
3169
+ assert (
3170
+ df.select(
3171
+ base64(
3172
+ aes_encrypt(df.input, df.key, df.mode, df.padding, to_binary(df.iv, lit("hex")))
3173
+ ).alias("r")
3174
+ ).first()[0]
3175
+ == "AAAAAAAAAAAAAAAAQiYi+sRNYDAOTjdSEcYBFsAWPL1f"
3176
+ )
3177
+ df = session.createDataFrame(
3178
+ [
3179
+ (
3180
+ "Spark SQL",
3181
+ "1234567890abcdef",
3182
+ "ECB",
3183
+ "PKCS",
3184
+ )
3185
+ ],
3186
+ ["input", "key", "mode", "padding"],
3187
+ )
3188
+ assert df.select(
3189
+ aes_decrypt(
3190
+ aes_encrypt(df.input, df.key, df.mode, df.padding), df.key, df.mode, df.padding
3191
+ ).alias("r")
3192
+ ).first()[0] == bytearray(b"Spark SQL")
3193
+ df = session.createDataFrame(
3194
+ [
3195
+ (
3196
+ "Spark SQL",
3197
+ "0000111122223333",
3198
+ "ECB",
3199
+ )
3200
+ ],
3201
+ ["input", "key", "mode"],
3202
+ )
3203
+ assert df.select(
3204
+ aes_decrypt(aes_encrypt(df.input, df.key, df.mode), df.key, df.mode).alias("r")
3205
+ ).first()[0] == bytearray(b"Spark SQL")
3206
+ df = session.createDataFrame(
3207
+ [
3208
+ (
3209
+ "Spark SQL",
3210
+ "abcdefghijklmnop",
3211
+ )
3212
+ ],
3213
+ ["input", "key"],
3214
+ )
3215
+ assert (
3216
+ df.select(
3217
+ aes_decrypt(unbase64(base64(aes_encrypt(df.input, df.key))), df.key)
3218
+ .cast("STRING")
3219
+ .alias("r")
3220
+ ).first()[0]
3221
+ == "Spark SQL"
3222
+ )
@@ -85,7 +85,11 @@ def test_rpow():
85
85
 
86
86
 
87
87
  def test_invert():
88
- assert (~F.col("cola")).sql() == "NOT cola"
88
+ assert (~F.col("cola")).sql() == "NOT (cola)"
89
+
90
+
91
+ def test_invert_conjuction():
92
+ assert (~(F.col("cola") | F.col("colb"))).sql() == "NOT (cola OR colb)"
89
93
 
90
94
 
91
95
  def test_paren():
@@ -2348,14 +2348,14 @@ def test_to_json(expression, expected):
2348
2348
  "expression, expected",
2349
2349
  [
2350
2350
  (
2351
- SF.schema_of_json("cola", dict(timestampFormat="dd/MM/yyyy")),
2351
+ SF.schema_of_json(SF.col("cola"), dict(timestampFormat="dd/MM/yyyy")),
2352
2352
  "SCHEMA_OF_JSON(cola, MAP('timestampFormat', 'dd/MM/yyyy'))",
2353
2353
  ),
2354
2354
  (
2355
2355
  SF.schema_of_json(SF.col("cola"), dict(timestampFormat="dd/MM/yyyy")),
2356
2356
  "SCHEMA_OF_JSON(cola, MAP('timestampFormat', 'dd/MM/yyyy'))",
2357
2357
  ),
2358
- (SF.schema_of_json("cola"), "SCHEMA_OF_JSON(cola)"),
2358
+ (SF.schema_of_json(SF.col("cola")), "SCHEMA_OF_JSON(cola)"),
2359
2359
  ],
2360
2360
  )
2361
2361
  def test_schema_of_json(expression, expected):
@@ -2366,14 +2366,14 @@ def test_schema_of_json(expression, expected):
2366
2366
  "expression, expected",
2367
2367
  [
2368
2368
  (
2369
- SF.schema_of_csv("cola", dict(timestampFormat="dd/MM/yyyy")),
2369
+ SF.schema_of_csv(SF.col("cola"), dict(timestampFormat="dd/MM/yyyy")),
2370
2370
  "SCHEMA_OF_CSV(cola, MAP('timestampFormat', 'dd/MM/yyyy'))",
2371
2371
  ),
2372
2372
  (
2373
2373
  SF.schema_of_csv(SF.col("cola"), dict(timestampFormat="dd/MM/yyyy")),
2374
2374
  "SCHEMA_OF_CSV(cola, MAP('timestampFormat', 'dd/MM/yyyy'))",
2375
2375
  ),
2376
- (SF.schema_of_csv("cola"), "SCHEMA_OF_CSV(cola)"),
2376
+ (SF.schema_of_csv(SF.col("cola")), "SCHEMA_OF_CSV(cola)"),
2377
2377
  ],
2378
2378
  )
2379
2379
  def test_schema_of_csv(expression, expected):
@@ -2903,3 +2903,68 @@ def test_try_to_binary(expression, expected):
2903
2903
  )
2904
2904
  def test_try_to_number(expression, expected):
2905
2905
  assert expression.sql() == expected
2906
+
2907
+
2908
+ @pytest.mark.parametrize(
2909
+ "expression, expected",
2910
+ [
2911
+ (
2912
+ SF.aes_decrypt("cola", "colb", "colc", "cold", "cole"),
2913
+ "AES_DECRYPT(cola, colb, colc, cold, cole)",
2914
+ ),
2915
+ (
2916
+ SF.aes_decrypt(
2917
+ SF.col("cola"), SF.col("colb"), SF.col("colc"), SF.col("cold"), SF.col("cole")
2918
+ ),
2919
+ "AES_DECRYPT(cola, colb, colc, cold, cole)",
2920
+ ),
2921
+ (SF.aes_decrypt("cola", SF.col("colb")), "AES_DECRYPT(cola, colb)"),
2922
+ (
2923
+ SF.aes_decrypt(SF.col("cola"), SF.col("colb"), padding="colc"),
2924
+ "AES_DECRYPT(cola, colb, NULL, colc)",
2925
+ ),
2926
+ ],
2927
+ )
2928
+ def test_aes_decrypt(expression, expected):
2929
+ assert expression.sql() == expected
2930
+
2931
+
2932
+ @pytest.mark.parametrize(
2933
+ "expression, expected",
2934
+ [
2935
+ (
2936
+ SF.aes_encrypt("cola", "colb", "colc", "cold", "cole", "colf"),
2937
+ "AES_ENCRYPT(cola, colb, colc, cold, cole, colf)",
2938
+ ),
2939
+ (
2940
+ SF.aes_encrypt(
2941
+ SF.col("cola"),
2942
+ SF.col("colb"),
2943
+ SF.col("colc"),
2944
+ SF.col("cold"),
2945
+ SF.col("cole"),
2946
+ SF.col("colf"),
2947
+ ),
2948
+ "AES_ENCRYPT(cola, colb, colc, cold, cole, colf)",
2949
+ ),
2950
+ (SF.aes_encrypt("cola", SF.col("colb")), "AES_ENCRYPT(cola, colb)"),
2951
+ (
2952
+ SF.aes_encrypt(SF.col("cola"), SF.col("colb"), padding="colc"),
2953
+ "AES_ENCRYPT(cola, colb, NULL, colc)",
2954
+ ),
2955
+ ],
2956
+ )
2957
+ def test_aes_encrypt(expression, expected):
2958
+ assert expression.sql() == expected
2959
+
2960
+
2961
+ @pytest.mark.parametrize(
2962
+ "expression, expected",
2963
+ [
2964
+ (SF.to_binary("cola"), "TO_BINARY(cola)"),
2965
+ (SF.to_binary(SF.col("cola")), "TO_BINARY(cola)"),
2966
+ (SF.to_binary("cola", SF.lit("UTF-8")), "TO_BINARY(cola, 'UTF-8')"),
2967
+ ],
2968
+ )
2969
+ def test_to_binary(expression, expected):
2970
+ assert expression.sql() == expected
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes