sqlframe 1.9.0__tar.gz → 1.11.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (223) hide show
  1. {sqlframe-1.9.0 → sqlframe-1.11.0}/PKG-INFO +1 -1
  2. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/bigquery.md +40 -0
  3. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/duckdb.md +43 -1
  4. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/postgres.md +49 -0
  5. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/snowflake.md +49 -0
  6. sqlframe-1.11.0/docs/spark.md +264 -0
  7. sqlframe-1.11.0/docs/standalone.md +223 -0
  8. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/_version.py +2 -2
  9. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/dataframe.py +54 -1
  10. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/exceptions.py +12 -0
  11. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/function_alternatives.py +96 -0
  12. sqlframe-1.11.0/sqlframe/base/functions.py +5811 -0
  13. sqlframe-1.11.0/sqlframe/base/mixins/dataframe_mixins.py +54 -0
  14. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/session.py +2 -2
  15. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/types.py +3 -3
  16. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/util.py +56 -0
  17. sqlframe-1.11.0/sqlframe/bigquery/dataframe.py +74 -0
  18. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/functions.py +4 -0
  19. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/functions.pyi +37 -1
  20. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/dataframe.py +6 -15
  21. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/functions.py +3 -0
  22. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/functions.pyi +29 -0
  23. sqlframe-1.11.0/sqlframe/postgres/catalog.py +227 -0
  24. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/dataframe.py +6 -10
  25. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/functions.py +6 -0
  26. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/functions.pyi +28 -0
  27. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/dataframe.py +3 -14
  28. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/dataframe.py +23 -13
  29. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/functions.py +3 -0
  30. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/functions.pyi +27 -0
  31. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/dataframe.py +25 -15
  32. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/functions.pyi +161 -1
  33. sqlframe-1.11.0/sqlframe/testing/__init__.py +3 -0
  34. sqlframe-1.11.0/sqlframe/testing/utils.py +320 -0
  35. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe.egg-info/PKG-INFO +1 -1
  36. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe.egg-info/SOURCES.txt +6 -0
  37. sqlframe-1.11.0/tests/integration/engines/bigquery/test_bigquery_dataframe.py +159 -0
  38. sqlframe-1.11.0/tests/integration/engines/duck/test_duckdb_dataframe.py +165 -0
  39. sqlframe-1.11.0/tests/integration/engines/postgres/test_postgres_dataframe.py +122 -0
  40. sqlframe-1.11.0/tests/integration/engines/snowflake/test_snowflake_dataframe.py +158 -0
  41. sqlframe-1.11.0/tests/integration/engines/spark/test_spark_dataframe.py +165 -0
  42. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/test_int_functions.py +1831 -2
  43. sqlframe-1.11.0/tests/integration/engines/test_int_testing.py +79 -0
  44. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_functions.py +1917 -5
  45. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_session.py +23 -0
  46. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_types.py +9 -9
  47. sqlframe-1.11.0/tests/unit/test_util.py +73 -0
  48. sqlframe-1.9.0/docs/spark.md +0 -517
  49. sqlframe-1.9.0/docs/standalone.md +0 -470
  50. sqlframe-1.9.0/sqlframe/base/functions.py +0 -1799
  51. sqlframe-1.9.0/sqlframe/base/mixins/dataframe_mixins.py +0 -63
  52. sqlframe-1.9.0/sqlframe/bigquery/dataframe.py +0 -54
  53. sqlframe-1.9.0/sqlframe/postgres/catalog.py +0 -107
  54. sqlframe-1.9.0/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -79
  55. sqlframe-1.9.0/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -64
  56. sqlframe-1.9.0/tests/unit/test_util.py +0 -26
  57. {sqlframe-1.9.0 → sqlframe-1.11.0}/.github/CODEOWNERS +0 -0
  58. {sqlframe-1.9.0 → sqlframe-1.11.0}/.github/workflows/main.workflow.yaml +0 -0
  59. {sqlframe-1.9.0 → sqlframe-1.11.0}/.github/workflows/publish.workflow.yaml +0 -0
  60. {sqlframe-1.9.0 → sqlframe-1.11.0}/.gitignore +0 -0
  61. {sqlframe-1.9.0 → sqlframe-1.11.0}/.pre-commit-config.yaml +0 -0
  62. {sqlframe-1.9.0 → sqlframe-1.11.0}/.readthedocs.yaml +0 -0
  63. {sqlframe-1.9.0 → sqlframe-1.11.0}/LICENSE +0 -0
  64. {sqlframe-1.9.0 → sqlframe-1.11.0}/Makefile +0 -0
  65. {sqlframe-1.9.0 → sqlframe-1.11.0}/README.md +0 -0
  66. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/add_chatgpt_support.md +0 -0
  67. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
  68. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
  69. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
  70. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
  71. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
  72. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
  73. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
  74. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
  75. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/but_wait_theres_more.gif +0 -0
  76. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/cake.gif +0 -0
  77. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  78. {sqlframe-1.9.0 → sqlframe-1.11.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  79. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/configuration.md +0 -0
  80. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/bigquery.md +0 -0
  81. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/duckdb.md +0 -0
  82. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/images/SF.png +0 -0
  83. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/images/favicon.png +0 -0
  84. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/images/favicon_old.png +0 -0
  85. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  86. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/images/sqlframe_logo.png +0 -0
  87. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/docs/postgres.md +0 -0
  88. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/images/SF.png +0 -0
  89. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/images/favicon.png +0 -0
  90. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/images/favicon_old.png +0 -0
  91. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/images/sqlframe_diagram.png +0 -0
  92. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/images/sqlframe_logo.png +0 -0
  93. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/index.md +0 -0
  94. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/requirements.txt +0 -0
  95. {sqlframe-1.9.0 → sqlframe-1.11.0}/docs/stylesheets/extra.css +0 -0
  96. {sqlframe-1.9.0 → sqlframe-1.11.0}/mkdocs.yml +0 -0
  97. {sqlframe-1.9.0 → sqlframe-1.11.0}/pytest.ini +0 -0
  98. {sqlframe-1.9.0 → sqlframe-1.11.0}/renovate.json +0 -0
  99. {sqlframe-1.9.0 → sqlframe-1.11.0}/setup.cfg +0 -0
  100. {sqlframe-1.9.0 → sqlframe-1.11.0}/setup.py +0 -0
  101. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/LICENSE +0 -0
  102. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/__init__.py +0 -0
  103. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/__init__.py +0 -0
  104. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/_typing.py +0 -0
  105. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/catalog.py +0 -0
  106. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/column.py +0 -0
  107. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/decorators.py +0 -0
  108. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/group.py +0 -0
  109. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/mixins/__init__.py +0 -0
  110. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  111. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  112. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/normalize.py +0 -0
  113. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/operations.py +0 -0
  114. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/readerwriter.py +0 -0
  115. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/transforms.py +0 -0
  116. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/base/window.py +0 -0
  117. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/__init__.py +0 -0
  118. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/catalog.py +0 -0
  119. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/column.py +0 -0
  120. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/group.py +0 -0
  121. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/readwriter.py +0 -0
  122. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/session.py +0 -0
  123. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/types.py +0 -0
  124. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/bigquery/window.py +0 -0
  125. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/__init__.py +0 -0
  126. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/catalog.py +0 -0
  127. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/column.py +0 -0
  128. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/group.py +0 -0
  129. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/readwriter.py +0 -0
  130. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/session.py +0 -0
  131. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/types.py +0 -0
  132. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/duckdb/window.py +0 -0
  133. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/__init__.py +0 -0
  134. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/column.py +0 -0
  135. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/group.py +0 -0
  136. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/readwriter.py +0 -0
  137. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/session.py +0 -0
  138. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/types.py +0 -0
  139. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/postgres/window.py +0 -0
  140. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/__init__.py +0 -0
  141. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/catalog.py +0 -0
  142. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/column.py +0 -0
  143. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/functions.py +0 -0
  144. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/group.py +0 -0
  145. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/readwriter.py +0 -0
  146. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/session.py +0 -0
  147. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/types.py +0 -0
  148. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/redshift/window.py +0 -0
  149. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/__init__.py +0 -0
  150. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/catalog.py +0 -0
  151. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/column.py +0 -0
  152. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/group.py +0 -0
  153. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/readwriter.py +0 -0
  154. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/session.py +0 -0
  155. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/types.py +0 -0
  156. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/snowflake/window.py +0 -0
  157. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/__init__.py +0 -0
  158. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/catalog.py +0 -0
  159. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/column.py +0 -0
  160. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/functions.py +0 -0
  161. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/group.py +0 -0
  162. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/readwriter.py +0 -0
  163. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/session.py +0 -0
  164. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/types.py +0 -0
  165. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/spark/window.py +0 -0
  166. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/__init__.py +0 -0
  167. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/catalog.py +0 -0
  168. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/column.py +0 -0
  169. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/dataframe.py +0 -0
  170. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/functions.py +0 -0
  171. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/group.py +0 -0
  172. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/readwriter.py +0 -0
  173. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/session.py +0 -0
  174. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/types.py +0 -0
  175. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe/standalone/window.py +0 -0
  176. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  177. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe.egg-info/requires.txt +0 -0
  178. {sqlframe-1.9.0 → sqlframe-1.11.0}/sqlframe.egg-info/top_level.txt +0 -0
  179. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/__init__.py +0 -0
  180. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/common_fixtures.py +0 -0
  181. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/conftest.py +0 -0
  182. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/fixtures/employee.csv +0 -0
  183. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/fixtures/employee.json +0 -0
  184. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/fixtures/employee.parquet +0 -0
  185. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/fixtures/employee_extra_line.csv +0 -0
  186. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/__init__.py +0 -0
  187. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/__init__.py +0 -0
  188. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  189. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  190. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  191. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/duck/__init__.py +0 -0
  192. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  193. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  194. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  195. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/postgres/__init__.py +0 -0
  196. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  197. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  198. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/redshift/__init__.py +0 -0
  199. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  200. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  201. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  202. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  203. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  204. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/spark/__init__.py +0 -0
  205. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  206. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  207. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/test_engine_reader.py +0 -0
  208. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/test_engine_session.py +0 -0
  209. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/engines/test_engine_writer.py +0 -0
  210. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/fixtures.py +0 -0
  211. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/test_int_dataframe.py +0 -0
  212. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  213. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/test_int_grouped_data.py +0 -0
  214. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/integration/test_int_session.py +0 -0
  215. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/types.py +0 -0
  216. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/__init__.py +0 -0
  217. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/__init__.py +0 -0
  218. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/fixtures.py +0 -0
  219. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_column.py +0 -0
  220. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_dataframe.py +0 -0
  221. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  222. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  223. {sqlframe-1.9.0 → sqlframe-1.11.0}/tests/unit/standalone/test_window.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.9.0
3
+ Version: 1.11.0
4
4
  Summary: Turning PySpark Into a Universal DataFrame API
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -61,6 +61,22 @@ from sqlframe.bigquery import functions as F
61
61
  from sqlframe.bigquery import BigQueryDataFrame
62
62
  ```
63
63
 
64
+ ## Using BigQuery Unique Functions
65
+
66
+ BigQuery may have a function that isn't represented within the PySpark API.
67
+ If that is the case, you can call it directly using PySpark [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html) function.
68
+
69
+ ```python
70
+ from sqlframe.bigquery import BigQuerySession
71
+ from sqlframe.bigquery import functions as F
72
+
73
+ session = BigQuerySession(default_dataset="sqlframe.db1")
74
+ (
75
+ session.table("bigquery-public-data.samples.natality")
76
+ .select(F.call_function("FARM_FINGERPRINT", F.col("source")).alias("source_hash"))
77
+ .show()
78
+ )
79
+ ```
64
80
 
65
81
  ## Example Usage
66
82
 
@@ -201,8 +217,10 @@ See something that you would like to see supported? [Open an issue](https://gith
201
217
  * [na](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.na.html)
202
218
  * [orderBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.orderBy.html)
203
219
  * [persist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.persist.html)
220
+ * [printSchema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.printSchema.html)
204
221
  * [replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.replace.html)
205
222
  * [select](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.select.html)
223
+ * [schema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.schema.html)
206
224
  * [show](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.show.html)
207
225
  * Vertical Argument is not Supported
208
226
  * [sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.sort.html)
@@ -225,6 +243,8 @@ See something that you would like to see supported? [Open an issue](https://gith
225
243
  * [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
226
244
  * [acosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acosh.html)
227
245
  * [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
246
+ * [any_value](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.any_value.html)
247
+ * Always ignores nulls
228
248
  * [approxCountDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approxCountDistinct.html)
229
249
  * [approx_count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approx_count_distinct.html)
230
250
  * [array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array.html)
@@ -254,10 +274,14 @@ See something that you would like to see supported? [Open an issue](https://gith
254
274
  * Symbols are not supported
255
275
  * [bitwiseNOT](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwiseNOT.html)
256
276
  * [bitwise_not](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwise_not.html)
277
+ * [bool_and](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_and.html)
278
+ * [bool_or](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_or.html)
257
279
  * [bround](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bround.html)
280
+ * [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html)
258
281
  * [cbrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cbrt.html)
259
282
  * [ceil](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceil.html)
260
283
  * [ceiling](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceiling.html)
284
+ * [char](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.char.html)
261
285
  * [coalesce](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.coalesce.html)
262
286
  * [col](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.col.html)
263
287
  * [collect_list](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.collect_list.html)
@@ -270,14 +294,18 @@ See something that you would like to see supported? [Open an issue](https://gith
270
294
  * [cosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cosh.html)
271
295
  * [cot](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cot.html)
272
296
  * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count.html)
297
+ * [count_if](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_if.html)
273
298
  * [covar_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_pop.html)
274
299
  * [covar_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_samp.html)
275
300
  * [csc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.csc.html)
276
301
  * [cume_dist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cume_dist.html)
277
302
  * [current_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_date.html)
278
303
  * [current_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_timestamp.html)
304
+ * [current_user](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_user.html)
279
305
  * [date_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_add.html)
306
+ * [dateadd](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dateadd.html)
280
307
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
308
+ * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
281
309
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
282
310
  * [The format string should be in BigQuery syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements)
283
311
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
@@ -300,6 +328,8 @@ See something that you would like to see supported? [Open an issue](https://gith
300
328
  * Doesn't support exploding maps
301
329
  * [expm1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expm1.html)
302
330
  * [expr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expr.html)
331
+ * [extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.extract.html)
332
+ * Some fields may start from 0 instead of 1 (like `week`). [Extract](https://cloud.google.com/bigquery/docs/reference/standard-sql/timestamp_functions#extract)
303
333
  * [factorial](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.factorial.html)
304
334
  * [floor](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.floor.html)
305
335
  * [format_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.format_number.html)
@@ -320,10 +350,13 @@ See something that you would like to see supported? [Open an issue](https://gith
320
350
  * [isnull](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.isnull.html)
321
351
  * [lag](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lag.html)
322
352
  * [last_day](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.last_day.html)
353
+ * [lcase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lcase.html)
323
354
  * [lead](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lead.html)
324
355
  * [least](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.least.html)
356
+ * [left](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.left.html)
325
357
  * [length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.length.html)
326
358
  * [lit](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lit.html)
359
+ * [ln](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ln.html)
327
360
  * [log](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log.html)
328
361
  * [log10](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log10.html)
329
362
  * [log1p](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log1p.html)
@@ -353,6 +386,7 @@ See something that you would like to see supported? [Open an issue](https://gith
353
386
  * Default order of columns are `col`, `pos` instead of `pos`, `col`
354
387
  * [posexplode_outer](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.posexplode_outer.html)
355
388
  * Default order of columns are `col`, `pos` instead of `pos`, `col`
389
+ * [position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.position.html)
356
390
  * [pow](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.pow.html)
357
391
  * [quarter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.quarter.html)
358
392
  * [radians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.radians.html)
@@ -360,11 +394,14 @@ See something that you would like to see supported? [Open an issue](https://gith
360
394
  * [rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rank.html)
361
395
  * [regexp_extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_extract.html)
362
396
  * Single capture group is supported
397
+ * [regexp_like](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_like.html)
363
398
  * [regexp_replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_replace.html)
364
399
  * [repeat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.repeat.html)
365
400
  * [reverse](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.reverse.html)
366
401
  * Only works on strings (does not work on arrays)
402
+ * [right](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.right.html)
367
403
  * [rint](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rint.html)
404
+ * [rlike](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rlike.html)
368
405
  * [round](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.round.html)
369
406
  * [row_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.row_number.html)
370
407
  * [rpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rpad.html)
@@ -372,11 +409,13 @@ See something that you would like to see supported? [Open an issue](https://gith
372
409
  * [sec](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sec.html)
373
410
  * [second](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.second.html)
374
411
  * [sequence](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sequence.html)
412
+ * [sha](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sha.html)
375
413
  * [sha1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sha1.html)
376
414
  * [shiftLeft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftLeft.html)
377
415
  * [shiftRight](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftRight.html)
378
416
  * [shiftleft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftleft.html)
379
417
  * [shiftright](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftright.html)
418
+ * [sign](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sign.html)
380
419
  * [signum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.signum.html)
381
420
  * [sin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sin.html)
382
421
  * [sinh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sinh.html)
@@ -411,6 +450,7 @@ See something that you would like to see supported? [Open an issue](https://gith
411
450
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
412
451
  * Shorthand expressions not supported. Ex: Use `month` instead of `mon`
413
452
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
453
+ * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
414
454
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
415
455
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
416
456
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
@@ -48,6 +48,23 @@ from sqlframe.duckdb import functions as F
48
48
  from sqlframe.duckdb import DuckDBDataFrame
49
49
  ```
50
50
 
51
+ ## Using DuckDB Unique Functions
52
+
53
+ DuckDB may have a function that isn't represented within the PySpark API.
54
+ If that is the case, you can call it directly using PySpark [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html) function.
55
+
56
+ ```python
57
+ from sqlframe.duckdb import DuckDBSession
58
+ from sqlframe.duckdb import functions as F
59
+
60
+ session = DuckDBSession()
61
+ (
62
+ session.table("example.table")
63
+ .select(F.call_function("CURRENT_SETTING", F.lit("access_mode")).alias("access_mode_value"))
64
+ .show()
65
+ )
66
+ ```
67
+
51
68
  ## Example Usage
52
69
 
53
70
  ```python
@@ -175,6 +192,7 @@ See something that you would like to see supported? [Open an issue](https://gith
175
192
  * [persist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.persist.html)
176
193
  * [printSchema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.printSchema.html)
177
194
  * [replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.replace.html)
195
+ * [schema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.schema.html)
178
196
  * [select](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.select.html)
179
197
  * [show](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.show.html)
180
198
  * Vertical Argument is not Supported
@@ -197,6 +215,8 @@ See something that you would like to see supported? [Open an issue](https://gith
197
215
  * [abs](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.abs.html)
198
216
  * [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
199
217
  * [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
218
+ * [any_value](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.any_value.html)
219
+ * Always ignores nulls
200
220
  * [approxCountDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approxCountDistinct.html)
201
221
  * [approx_count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approx_count_distinct.html)
202
222
  * [array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array.html)
@@ -224,9 +244,13 @@ See something that you would like to see supported? [Open an issue](https://gith
224
244
  * [bit_length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bit_length.html)
225
245
  * [bitwiseNOT](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwiseNOT.html)
226
246
  * [bitwise_not](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwise_not.html)
247
+ * [bool_and](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_and.html)
248
+ * [bool_or](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_or.html)
249
+ * [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html)
227
250
  * [cbrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cbrt.html)
228
251
  * [ceil](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceil.html)
229
252
  * [ceiling](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceiling.html)
253
+ * [char](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.char.html)
230
254
  * [coalesce](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.coalesce.html)
231
255
  * [col](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.col.html)
232
256
  * [collect_list](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.collect_list.html)
@@ -240,14 +264,18 @@ See something that you would like to see supported? [Open an issue](https://gith
240
264
  * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count.html)
241
265
  * [countDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.countDistinct.html)
242
266
  * [count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_distinct.html)
267
+ * [count_if](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_if.html)
243
268
  * [covar_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_pop.html)
244
269
  * [covar_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_samp.html)
245
270
  * [create_map](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.create_map.html)
246
271
  * [cume_dist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cume_dist.html)
247
272
  * [current_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_date.html)
248
273
  * [current_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_timestamp.html)
274
+ * [current_user](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_user.html)
249
275
  * [date_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_add.html)
276
+ * [dateadd](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dateadd.html)
250
277
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
278
+ * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
251
279
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
252
280
  * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
253
281
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
@@ -269,6 +297,7 @@ See something that you would like to see supported? [Open an issue](https://gith
269
297
  * [explode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.explode.html)
270
298
  * [expm1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expm1.html)
271
299
  * [expr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expr.html)
300
+ * [extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.extract.html)
272
301
  * [factorial](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.factorial.html)
273
302
  * [first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.first.html)
274
303
  * [flatten](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.flatten.html)
@@ -291,11 +320,14 @@ See something that you would like to see supported? [Open an issue](https://gith
291
320
  * [kurtosis](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.kurtosis.html)
292
321
  * [lag](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lag.html)
293
322
  * [last](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.last.html)
323
+ * [lcase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lcase.html)
294
324
  * [lead](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lead.html)
295
325
  * [least](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.least.html)
326
+ * [left](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.left.html)
296
327
  * [length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.length.html)
297
328
  * [levenshtein](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.levenshtein.html)
298
329
  * [lit](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lit.html)
330
+ * [ln](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ln.html)
299
331
  * [locate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.locate.html)
300
332
  * [log](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log.html)
301
333
  * [log10](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log10.html)
@@ -323,18 +355,22 @@ See something that you would like to see supported? [Open an issue](https://gith
323
355
  * [percent_rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percent_rank.html)
324
356
  * [percentile](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percentile.html)
325
357
  * [percentile_approx](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percentile_approx.html)
358
+ * [position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.position.html)
326
359
  * [pow](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.pow.html)
327
360
  * [quarter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.quarter.html)
328
361
  * [radians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.radians.html)
329
362
  * [rand](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rand.html)
330
363
  * [rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rank.html)
331
364
  * [regexp_extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_extract.html)
365
+ * [regexp_like](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_like.html)
332
366
  * [regexp_replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_replace.html)
333
367
  * Replaces first match only
334
368
  * [repeat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.repeat.html)
335
369
  * [reverse](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.reverse.html)
336
370
  * Only works on strings (does not work on arrays)
371
+ * [right](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.right.html)
337
372
  * [rint](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rint.html)
373
+ * [rlike](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rlike.html)
338
374
  * [round](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.round.html)
339
375
  * [row_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.row_number.html)
340
376
  * [rpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rpad.html)
@@ -345,6 +381,7 @@ See something that you would like to see supported? [Open an issue](https://gith
345
381
  * [shiftRight](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftRight.html)
346
382
  * [shiftleft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftleft.html)
347
383
  * [shiftright](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftright.html)
384
+ * [sign](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sign.html)
348
385
  * [signum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.signum.html)
349
386
  * [sin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sin.html)
350
387
  * [size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.size.html)
@@ -370,11 +407,16 @@ See something that you would like to see supported? [Open an issue](https://gith
370
407
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
371
408
  * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
372
409
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
373
- * * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
410
+ * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers)
411
+ * [to_unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_unix_timestamp.html)
412
+ * [The format string should be in DuckDB syntax](https://duckdb.org/docs/sql/functions/dateformat.html#format-specifiers
413
+ * The values must match the format string (null will not be returned if they do not)
374
414
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
375
415
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
376
416
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
417
+ * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
377
418
  * [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
419
+ * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
378
420
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
379
421
  * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
380
422
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
@@ -43,6 +43,31 @@ from sqlframe.postgres import functions as F
43
43
  from sqlframe.postgres import PostgresDataFrame
44
44
  ```
45
45
 
46
+ ## Using Postgres Unique Functions
47
+
48
+ Postgres may have a function that isn't represented within the PySpark API.
49
+ If that is the case, you can call it directly using PySpark [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html) function.
50
+
51
+ ```python
52
+ from psycopg2 import connect
53
+ from sqlframe.postgres import PostgresSession
54
+ from sqlframe.postgres import functions as F
55
+
56
+ conn = connect(
57
+ dbname="postgres",
58
+ user="postgres",
59
+ password="password",
60
+ host="localhost",
61
+ port="5432",
62
+ )
63
+ session = PostgresSession(conn=conn)
64
+ (
65
+ session.table("example.table")
66
+ .select(F.call_function("PG_DATABASE_SIZE", F.lit("some_database")).alias("database_size"))
67
+ .show()
68
+ )
69
+ ```
70
+
46
71
  ## Example Usage
47
72
 
48
73
  ```python
@@ -178,6 +203,7 @@ See something that you would like to see supported? [Open an issue](https://gith
178
203
  * [persist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.persist.html)
179
204
  * [printSchema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.printSchema.html)
180
205
  * [replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.replace.html)
206
+ * [schema](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.schema.html)
181
207
  * [select](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.select.html)
182
208
  * [show](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.show.html)
183
209
  * Vertical Argument is not Supported
@@ -201,6 +227,8 @@ See something that you would like to see supported? [Open an issue](https://gith
201
227
  * [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
202
228
  * [acosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acosh.html)
203
229
  * [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
230
+ * [any_value](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.any_value.html)
231
+ * Returns the max value and ignore nulls is not supported
204
232
  * [array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array.html)
205
233
  * [array_contains](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_contains.html)
206
234
  * [array_join](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_join.html)
@@ -223,9 +251,13 @@ See something that you would like to see supported? [Open an issue](https://gith
223
251
  * [bit_length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bit_length.html)
224
252
  * [bitwiseNOT](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwiseNOT.html)
225
253
  * [bitwise_not](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwise_not.html)
254
+ * [bool_and](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_and.html)
255
+ * [bool_or](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bool_or.html)
256
+ * [call_function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.call_function.html)
226
257
  * [cbrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cbrt.html)
227
258
  * [ceil](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceil.html)
228
259
  * [ceiling](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceiling.html)
260
+ * [char](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.char.html)
229
261
  * [coalesce](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.coalesce.html)
230
262
  * [col](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.col.html)
231
263
  * [collect_list](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.collect_list.html)
@@ -240,13 +272,17 @@ See something that you would like to see supported? [Open an issue](https://gith
240
272
  * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count.html)
241
273
  * [countDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.countDistinct.html)
242
274
  * [count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_distinct.html)
275
+ * [count_if](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_if.html)
243
276
  * [covar_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_pop.html)
244
277
  * [covar_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_samp.html)
245
278
  * [cume_dist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cume_dist.html)
246
279
  * [current_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_date.html)
247
280
  * [current_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_timestamp.html)
281
+ * [current_user](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_user.html)
248
282
  * [date_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_add.html)
283
+ * [dateadd](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dateadd.html)
249
284
  * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
285
+ * [datediff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.datediff.html)
250
286
  * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
251
287
  * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
252
288
  * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
@@ -270,6 +306,7 @@ See something that you would like to see supported? [Open an issue](https://gith
270
306
  * Doesn't support exploding maps
271
307
  * [expm1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expm1.html)
272
308
  * [expr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expr.html)
309
+ * [extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.extract.html)
273
310
  * [factorial](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.factorial.html)
274
311
  * [floor](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.floor.html)
275
312
  * [format_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.format_number.html)
@@ -284,11 +321,14 @@ See something that you would like to see supported? [Open an issue](https://gith
284
321
  * [isnan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.isnan.html)
285
322
  * [isnull](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.isnull.html)
286
323
  * [lag](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lag.html)
324
+ * [lcase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lcase.html)
287
325
  * [lead](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lead.html)
288
326
  * [least](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.least.html)
327
+ * [left](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.left.html)
289
328
  * [length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.length.html)
290
329
  * [levenshtein](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.levenshtein.html)
291
330
  * [lit](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lit.html)
331
+ * [ln](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ln.html)
292
332
  * [locate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.locate.html)
293
333
  * [log](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log.html)
294
334
  * [log10](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log10.html)
@@ -312,16 +352,20 @@ See something that you would like to see supported? [Open an issue](https://gith
312
352
  * [overlay](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.overlay.html)
313
353
  * [percent_rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percent_rank.html)
314
354
  * [percentile](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percentile.html)
355
+ * [position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.position.html)
315
356
  * [pow](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.pow.html)
316
357
  * [quarter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.quarter.html)
317
358
  * [radians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.radians.html)
318
359
  * [rand](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rand.html)
319
360
  * [rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rank.html)
361
+ * [regexp_like](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_like.html)
320
362
  * [regexp_replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_replace.html)
321
363
  * [repeat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.repeat.html)
322
364
  * [reverse](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.reverse.html)
323
365
  * Only works on strings (does not work on arrays)
366
+ * [right](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.right.html)
324
367
  * [rint](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rint.html)
368
+ * [rlike](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rlike.html)
325
369
  * [round](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.round.html)
326
370
  * [row_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.row_number.html)
327
371
  * [rpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rpad.html)
@@ -331,6 +375,7 @@ See something that you would like to see supported? [Open an issue](https://gith
331
375
  * [shiftRight](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftRight.html)
332
376
  * [shiftleft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftleft.html)
333
377
  * [shiftright](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftright.html)
378
+ * [sign](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sign.html)
334
379
  * [signum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.signum.html)
335
380
  * [sin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sin.html)
336
381
  * [sinh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sinh.html)
@@ -353,11 +398,15 @@ See something that you would like to see supported? [Open an issue](https://gith
353
398
  * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
354
399
  * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
355
400
  * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
401
+ * [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
356
402
  * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
357
403
  * [The format string should be in Postgres syntax](https://www.postgresql.org/docs/current/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIME-TABLE)
358
404
  * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
359
405
  * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
360
406
  * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
407
+ * [try_element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_element_at.html)
408
+ * Negative index returns null and cannot lookup elements in maps
409
+ * [ucase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ucase.html)
361
410
  * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
362
411
  * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
363
412
  * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)