sqlframe 1.4.0__tar.gz → 1.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (198) hide show
  1. {sqlframe-1.4.0 → sqlframe-1.5.0}/Makefile +1 -0
  2. {sqlframe-1.4.0 → sqlframe-1.5.0}/PKG-INFO +1 -1
  3. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/bigquery.md +0 -1
  4. sqlframe-1.5.0/docs/snowflake.md +492 -0
  5. {sqlframe-1.4.0 → sqlframe-1.5.0}/setup.py +1 -1
  6. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/_version.py +2 -2
  7. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/column.py +7 -0
  8. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/function_alternatives.py +371 -0
  9. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/functions.py +49 -43
  10. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/normalize.py +9 -0
  11. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/session.py +2 -0
  12. sqlframe-1.5.0/sqlframe/snowflake/functions.py +63 -0
  13. sqlframe-1.5.0/sqlframe/snowflake/functions.pyi +192 -0
  14. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/session.py +11 -4
  15. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe.egg-info/PKG-INFO +1 -1
  16. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe.egg-info/SOURCES.txt +2 -0
  17. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe.egg-info/requires.txt +1 -1
  18. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/test_int_functions.py +367 -182
  19. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/fixtures.py +15 -15
  20. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/test_int_dataframe_stats.py +8 -2
  21. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_functions.py +4 -0
  22. sqlframe-1.4.0/sqlframe/snowflake/functions.py +0 -18
  23. {sqlframe-1.4.0 → sqlframe-1.5.0}/.github/CODEOWNERS +0 -0
  24. {sqlframe-1.4.0 → sqlframe-1.5.0}/.github/workflows/main.workflow.yaml +0 -0
  25. {sqlframe-1.4.0 → sqlframe-1.5.0}/.github/workflows/publish.workflow.yaml +0 -0
  26. {sqlframe-1.4.0 → sqlframe-1.5.0}/.gitignore +0 -0
  27. {sqlframe-1.4.0 → sqlframe-1.5.0}/.pre-commit-config.yaml +0 -0
  28. {sqlframe-1.4.0 → sqlframe-1.5.0}/.readthedocs.yaml +0 -0
  29. {sqlframe-1.4.0 → sqlframe-1.5.0}/LICENSE +0 -0
  30. {sqlframe-1.4.0 → sqlframe-1.5.0}/README.md +0 -0
  31. {sqlframe-1.4.0 → sqlframe-1.5.0}/blogs/images/but_wait_theres_more.gif +0 -0
  32. {sqlframe-1.4.0 → sqlframe-1.5.0}/blogs/images/cake.gif +0 -0
  33. {sqlframe-1.4.0 → sqlframe-1.5.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  34. {sqlframe-1.4.0 → sqlframe-1.5.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  35. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/configuration.md +0 -0
  36. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/bigquery.md +0 -0
  37. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/duckdb.md +0 -0
  38. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/images/SF.png +0 -0
  39. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/images/favicon.png +0 -0
  40. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/images/favicon_old.png +0 -0
  41. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  42. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/images/sqlframe_logo.png +0 -0
  43. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/docs/postgres.md +0 -0
  44. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/duckdb.md +0 -0
  45. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/images/SF.png +0 -0
  46. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/images/favicon.png +0 -0
  47. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/images/favicon_old.png +0 -0
  48. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/images/sqlframe_diagram.png +0 -0
  49. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/images/sqlframe_logo.png +0 -0
  50. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/index.md +0 -0
  51. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/postgres.md +0 -0
  52. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/requirements.txt +0 -0
  53. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/standalone.md +0 -0
  54. {sqlframe-1.4.0 → sqlframe-1.5.0}/docs/stylesheets/extra.css +0 -0
  55. {sqlframe-1.4.0 → sqlframe-1.5.0}/mkdocs.yml +0 -0
  56. {sqlframe-1.4.0 → sqlframe-1.5.0}/pytest.ini +0 -0
  57. {sqlframe-1.4.0 → sqlframe-1.5.0}/renovate.json +0 -0
  58. {sqlframe-1.4.0 → sqlframe-1.5.0}/setup.cfg +0 -0
  59. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/LICENSE +0 -0
  60. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/__init__.py +0 -0
  61. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/__init__.py +0 -0
  62. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/_typing.py +0 -0
  63. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/catalog.py +0 -0
  64. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/dataframe.py +0 -0
  65. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/decorators.py +0 -0
  66. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/exceptions.py +0 -0
  67. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/group.py +0 -0
  68. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/mixins/__init__.py +0 -0
  69. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  70. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
  71. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  72. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/operations.py +0 -0
  73. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/readerwriter.py +0 -0
  74. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/transforms.py +0 -0
  75. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/types.py +0 -0
  76. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/util.py +0 -0
  77. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/base/window.py +0 -0
  78. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/__init__.py +0 -0
  79. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/catalog.py +0 -0
  80. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/column.py +0 -0
  81. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/dataframe.py +0 -0
  82. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/functions.py +0 -0
  83. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/functions.pyi +0 -0
  84. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/group.py +0 -0
  85. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/readwriter.py +0 -0
  86. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/session.py +0 -0
  87. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/types.py +0 -0
  88. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/bigquery/window.py +0 -0
  89. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/__init__.py +0 -0
  90. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/catalog.py +0 -0
  91. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/column.py +0 -0
  92. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/dataframe.py +0 -0
  93. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/functions.py +0 -0
  94. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/functions.pyi +0 -0
  95. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/group.py +0 -0
  96. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/readwriter.py +0 -0
  97. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/session.py +0 -0
  98. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/types.py +0 -0
  99. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/duckdb/window.py +0 -0
  100. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/__init__.py +0 -0
  101. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/catalog.py +0 -0
  102. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/column.py +0 -0
  103. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/dataframe.py +0 -0
  104. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/functions.py +0 -0
  105. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/functions.pyi +0 -0
  106. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/group.py +0 -0
  107. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/readwriter.py +0 -0
  108. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/session.py +0 -0
  109. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/types.py +0 -0
  110. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/postgres/window.py +0 -0
  111. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/__init__.py +0 -0
  112. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/catalog.py +0 -0
  113. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/column.py +0 -0
  114. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/dataframe.py +0 -0
  115. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/functions.py +0 -0
  116. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/group.py +0 -0
  117. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/readwriter.py +0 -0
  118. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/session.py +0 -0
  119. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/types.py +0 -0
  120. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/redshift/window.py +0 -0
  121. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/__init__.py +0 -0
  122. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/catalog.py +0 -0
  123. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/column.py +0 -0
  124. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/dataframe.py +0 -0
  125. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/group.py +0 -0
  126. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/readwriter.py +0 -0
  127. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/types.py +0 -0
  128. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/snowflake/window.py +0 -0
  129. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/__init__.py +0 -0
  130. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/catalog.py +0 -0
  131. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/column.py +0 -0
  132. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/dataframe.py +0 -0
  133. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/functions.py +0 -0
  134. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/group.py +0 -0
  135. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/readwriter.py +0 -0
  136. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/session.py +0 -0
  137. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/types.py +0 -0
  138. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/spark/window.py +0 -0
  139. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/__init__.py +0 -0
  140. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/catalog.py +0 -0
  141. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/column.py +0 -0
  142. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/dataframe.py +0 -0
  143. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/functions.py +0 -0
  144. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/group.py +0 -0
  145. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/readwriter.py +0 -0
  146. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/session.py +0 -0
  147. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/types.py +0 -0
  148. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe/standalone/window.py +0 -0
  149. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  150. {sqlframe-1.4.0 → sqlframe-1.5.0}/sqlframe.egg-info/top_level.txt +0 -0
  151. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/__init__.py +0 -0
  152. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/common_fixtures.py +0 -0
  153. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/conftest.py +0 -0
  154. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/fixtures/employee.csv +0 -0
  155. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/fixtures/employee.json +0 -0
  156. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/fixtures/employee.parquet +0 -0
  157. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/fixtures/employee_extra_line.csv +0 -0
  158. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/__init__.py +0 -0
  159. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/__init__.py +0 -0
  160. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  161. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  162. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  163. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/duck/__init__.py +0 -0
  164. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  165. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
  166. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  167. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  168. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/postgres/__init__.py +0 -0
  169. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  170. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
  171. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  172. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/redshift/__init__.py +0 -0
  173. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  174. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  175. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  176. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  177. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  178. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/spark/__init__.py +0 -0
  179. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  180. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  181. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/test_engine_reader.py +0 -0
  182. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/test_engine_session.py +0 -0
  183. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/engines/test_engine_writer.py +0 -0
  184. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/test_int_dataframe.py +0 -0
  185. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/test_int_grouped_data.py +0 -0
  186. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/integration/test_int_session.py +0 -0
  187. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/types.py +0 -0
  188. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/__init__.py +0 -0
  189. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/__init__.py +0 -0
  190. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/fixtures.py +0 -0
  191. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_column.py +0 -0
  192. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_dataframe.py +0 -0
  193. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  194. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_session.py +0 -0
  195. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  196. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_types.py +0 -0
  197. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/standalone/test_window.py +0 -0
  198. {sqlframe-1.4.0 → sqlframe-1.5.0}/tests/unit/test_util.py +0 -0
@@ -29,6 +29,7 @@ stubs:
29
29
  stubgen sqlframe/bigquery/functions.py --output ./ --inspect-mode
30
30
  stubgen sqlframe/duckdb/functions.py --output ./ --inspect-mode
31
31
  stubgen sqlframe/postgres/functions.py --output ./ --inspect-mode
32
+ stubgen sqlframe/snowflake/functions.py --output ./ --inspect-mode
32
33
 
33
34
  package:
34
35
  pip3 install wheel && python3 setup.py sdist bdist_wheel
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.4.0
3
+ Version: 1.5.0
4
4
  Summary: Taking the Spark out of PySpark by converting to SQL
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -385,7 +385,6 @@ print(session.catalog.listColumns(table_path))
385
385
  * [soundex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.soundex.html)
386
386
  * [split](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.split.html)
387
387
  * Regular expressions not supported
388
- * [sqlglot_expression](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sqlglot_expression.html)
389
388
  * [sqrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sqrt.html)
390
389
  * [stddev](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.stddev.html)
391
390
  * [stddev_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.stddev_pop.html)
@@ -0,0 +1,492 @@
1
+ # BigQuery
2
+
3
+ ## Installation
4
+
5
+ ```bash
6
+ pip install "sqlframe[snowflake]"
7
+ ```
8
+
9
+ ## Creating a Session
10
+
11
+ SQLFrame uses the [Snowflake Python Connector](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector) to connect to Snowflake.
12
+ A SnowflakeQuerySession, which implements the PySpark Session API, can be created by passing in a `snowflake.connector.connection.SnowflakeConnection` object.
13
+
14
+ ```python
15
+ import os
16
+
17
+ from snowflake.connector import connect
18
+ from sqlframe.snowflake import SnowflakeSession
19
+ from sqlframe.snowflake import functions as F
20
+
21
+ connection = connect(
22
+ account=os.environ["SNOWFLAKE_ACCOUNT"],
23
+ user=os.environ["SNOWFLAKE_USER"],
24
+ password=os.environ["SNOWFLAKE_PASSWORD"],
25
+ warehouse=os.environ["SNOWFLAKE_WAREHOUSE"],
26
+ database=os.environ["SNOWFLAKE_DATABASE"],
27
+ schema=os.environ["SNOWFLAKE_SCHEMA"],
28
+ )
29
+ session = SnowflakeSession(conn=connection)
30
+ ```
31
+
32
+ ## Imports
33
+
34
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.snowflake`.
35
+ In addition, many classes will have a `Snowflake` prefix.
36
+ For example, `SnowflakeDataFrame` instead of `DataFrame`.
37
+
38
+
39
+ ```python
40
+ # PySpark import
41
+ # from pyspark.sql import SparkSession
42
+ # from pyspark.sql import functions as F
43
+ # from pyspark.sql.dataframe import DataFrame
44
+ # SQLFrame import
45
+ from sqlframe.snowflake import SnowflakeSession
46
+ from sqlframe.snowflake import functions as F
47
+ from sqlframe.snowflake import SnowflakeDataFrame
48
+ ```
49
+
50
+
51
+ ## Example Usage
52
+
53
+ ```python
54
+ import os
55
+
56
+ from snowflake.connector import connect
57
+ from sqlframe.snowflake import SnowflakeSession
58
+ from sqlframe.snowflake import functions as F
59
+
60
+ connection = connect(
61
+ account=os.environ["SQLFRAME_SNOWFLAKE_ACCOUNT"],
62
+ user=os.environ["SQLFRAME_SNOWFLAKE_USER"],
63
+ password=os.environ["SQLFRAME_SNOWFLAKE_PASSWORD"],
64
+ warehouse=os.environ["SQLFRAME_SNOWFLAKE_WAREHOUSE"],
65
+ # Dataset: https://app.snowflake.com/marketplace/listing/GZ1M6ZVQIAF/insights-global-covid-statistics
66
+ database="GLOBAL_COVID_STATISTICS",
67
+ schema=os.environ["SQLFRAME_SNOWFLAKE_SCHEMA"],
68
+ )
69
+
70
+ session = SnowflakeSession(conn=connection)
71
+ df = (
72
+ session.table("INSIGHTS.COVID19_METRICS_BY_COUNTRY")
73
+ .where(F.col("date").between("2021-01-01", "2021-12-31"))
74
+ .groupby("continent", "country")
75
+ .agg(
76
+ F.sum("total_case").alias("total_cases"),
77
+ F.sum("total_deaths").alias("total_deaths"),
78
+ (F.sum("total_deaths").cast("float") / F.nullif(F.sum("total_case"), F.lit(0))).alias("death_rate"),
79
+ )
80
+ .orderBy(F.col("death_rate").desc_nulls_last())
81
+ )
82
+ df.show(5)
83
+ """
84
+ +---------------+---------+-------------+--------------+---------------+
85
+ | CONTINENT | COUNTRY | TOTAL_CASES | TOTAL_DEATHS | DEATH_RATE |
86
+ +---------------+---------+-------------+--------------+---------------+
87
+ | Asia | Yemen | 2372121 | 475483 | 0.2004463516 |
88
+ | South America | Peru | 675530177 | 62217492 | 0.09210172116 |
89
+ | North America | Mexico | 1039445357 | 89674958 | 0.08627193089 |
90
+ | Africa | Sudan | 13109385 | 929258 | 0.07088494235 |
91
+ | Asia | Syria | 9963515 | 654570 | 0.06569669439 |
92
+ +---------------+---------+-------------+--------------+---------------+
93
+ """
94
+ ```
95
+
96
+ ## Supported PySpark API Methods
97
+
98
+ ### Catalog Class
99
+
100
+ * add_table
101
+ * SQLFrame Specific: Adds a table to known schemas that SQLFrame tracks
102
+ * [currentCatalog](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.currentCatalog.html)
103
+ * [currentDatabase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.currentDatabase.html)
104
+ * [databaseExists](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.databaseExists.html)
105
+ * [functionExists](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.functionExists.html)
106
+ * [getDatabase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.getDatabase.html)
107
+ * [getFunction](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.getFunction.html)
108
+ * [getTable](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.getTable.html)
109
+ * get_columns
110
+ * SQLFrame Specific: Similar to `listColumns` but returns SQLGlot expressions instead
111
+ * get_columns_from_schema
112
+ * SQLFrame Specific: Gets the columns from the known schemas to SQLFrame
113
+ * [listCatalogs](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.listCatalogs.html)
114
+ * [listColumns](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.listColumns.html)
115
+ * [listDatabases](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.listDatabases.html)
116
+ * [listFunctions](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.listFunctions.html)
117
+ * [listTables](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.listTables.html)
118
+ * [setCurrentCatalog](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.setCurrentCatalog.html)
119
+ * [setCurrentDatabase](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.setCurrentDatabase.html)
120
+ * [tableExists](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Catalog.tableExists.html)
121
+
122
+ ### Column Class
123
+
124
+ * [alias](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.alias.html)
125
+ * [alias](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.alias.html)
126
+ * [asc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.asc.html)
127
+ * [asc_nulls_first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.asc_nulls_first.html)
128
+ * [asc_nulls_last](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.asc_nulls_last.html)
129
+ * [between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.between.html)
130
+ * [cast](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.cast.html)
131
+ * [desc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.desc.html)
132
+ * [desc_nulls_first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.desc_nulls_first.html)
133
+ * [desc_nulls_last](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.desc_nulls_last.html)
134
+ * [endswith](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.endswith.html)
135
+ * [ilike](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.ilike.html)
136
+ * [isNotNull](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.isNotNull.html)
137
+ * [isNull](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.isNull.html)
138
+ * [isin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.isin.html)
139
+ * [like](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.like.html)
140
+ * [otherwise](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.otherwise.html)
141
+ * [over](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.over.html)
142
+ * [rlike](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.rlike.html)
143
+ * sql
144
+ * SQLFrame Specific: Get the SQL representation of a given column
145
+ * [startswith](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.startswith.html)
146
+ * [substr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.substr.html)
147
+ * [when](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Column.when.html)
148
+
149
+ ### DataFrame Class
150
+
151
+ * [agg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.agg.html)
152
+ * [alias](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.alias.html)
153
+ * [approxQuantile](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.approxQuantile.html)
154
+ * [cache](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.cache.html)
155
+ * [coalesce](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.coalesce.html)
156
+ * [collect](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.collect.html)
157
+ * [columns](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.columns.html)
158
+ * [copy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.copy.html)
159
+ * [corr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.corr.html)
160
+ * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.count.html)
161
+ * [cov](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.cov.html)
162
+ * [createOrReplaceTempView](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.createOrReplaceTempView.html)
163
+ * [crossJoin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.crossJoin.html)
164
+ * [cube](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.cube.html)
165
+ * [distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.distinct.html)
166
+ * [drop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.drop.html)
167
+ * [dropDuplicates](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.dropDuplicates.html)
168
+ * [drop_duplicates](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.drop_duplicates.html)
169
+ * [dropna](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.dropna.html)
170
+ * [exceptAll](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.exceptAll.html)
171
+ * [explain](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.explain.html)
172
+ * [fillna](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.fillna.html)
173
+ * [filter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.filter.html)
174
+ * [first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.first.html)
175
+ * [groupBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.groupBy.html)
176
+ * [groupby](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.groupby.html)
177
+ * [head](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.head.html)
178
+ * [intersect](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.intersect.html)
179
+ * [intersectAll](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.intersectAll.html)
180
+ * [join](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.join.html)
181
+ * [limit](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.limit.html)
182
+ * [na](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.na.html)
183
+ * [orderBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.orderBy.html)
184
+ * [persist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.persist.html)
185
+ * [replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.replace.html)
186
+ * [select](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.select.html)
187
+ * [show](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.show.html)
188
+ * Vertical Argument is not Supported
189
+ * [sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.sort.html)
190
+ * sql
191
+ * SQLFrame Specific: Get the SQL representation of a given DataFrame
192
+ * [stat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.stat.html)
193
+ * [toDF](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.toDF.html)
194
+ * [toPandas](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.toPandas.html)
195
+ * [union](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.union.html)
196
+ * [unionAll](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.unionAll.html)
197
+ * [unionByName](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.unionByName.html)
198
+ * [where](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.where.html)
199
+ * [withColumn](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.withColumn.html)
200
+ * [withColumnRenamed](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.withColumnRenamed.html)
201
+ * [write](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.write.html)
202
+
203
+ ### Functions
204
+
205
+ * [abs](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.abs.html)
206
+ * [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
207
+ * [acosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acosh.html)
208
+ * [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
209
+ * [approxCountDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approxCountDistinct.html)
210
+ * [approx_count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approx_count_distinct.html)
211
+ * [array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array.html)
212
+ * [array_contains](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_contains.html)
213
+ * [array_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_distinct.html)
214
+ * [array_except](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_except.html)
215
+ * [array_intersect](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_intersect.html)
216
+ * [array_join](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_join.html)
217
+ * Null values are repsented as nothing instead of NULL. Ex: "a," instead of "a,NULL"
218
+ * [array_max](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_max.html)
219
+ * [array_min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_min.html)
220
+ * [array_position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_position.html)
221
+ * [array_remove](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_remove.html)
222
+ * [array_sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_sort.html)
223
+ * [array_union](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_union.html)
224
+ * [arrays_overlap](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.arrays_overlap.html)
225
+ * [asc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asc.html)
226
+ * [asc_nulls_first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asc_nulls_first.html)
227
+ * [asc_nulls_last](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asc_nulls_last.html)
228
+ * [ascii](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ascii.html)
229
+ * [asin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asin.html)
230
+ * [asinh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asinh.html)
231
+ * [atan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.atan.html)
232
+ * [atan2](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.atan2.html)
233
+ * [atanh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.atanh.html)
234
+ * [avg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.avg.html)
235
+ * [base64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.base64.html)
236
+ * [bit_length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bit_length.html)
237
+ * [bitwiseNOT](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwiseNOT.html)
238
+ * [bitwise_not](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bitwise_not.html)
239
+ * [bround](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.bround.html)
240
+ * [Input must be a fixed-point nnumber](https://docs.snowflake.com/en/sql-reference/data-types-numeric.html#label-data-types-for-fixed-point-numbers)
241
+ * [cbrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cbrt.html)
242
+ * [ceil](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceil.html)
243
+ * [ceiling](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ceiling.html)
244
+ * [coalesce](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.coalesce.html)
245
+ * [col](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.col.html)
246
+ * [collect_list](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.collect_list.html)
247
+ * [collect_set](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.collect_set.html)
248
+ * [concat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.concat.html)
249
+ * Can only concat strings not arrays
250
+ * [concat_ws](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.concat_ws.html)
251
+ * [corr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.corr.html)
252
+ * [cos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cos.html)
253
+ * [cosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cosh.html)
254
+ * [cot](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cot.html)
255
+ * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count.html)
256
+ * [countDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.countDistinct.html)
257
+ * [count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.count_distinct.html)
258
+ * [covar_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_pop.html)
259
+ * [covar_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.covar_samp.html)
260
+ * [create_map](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.create_map.html)
261
+ * Assumes VARCHAR datatype for key/value unless the column is explicitly casted when calling `create_map`
262
+ * [cume_dist](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.cume_dist.html)
263
+ * [current_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_date.html)
264
+ * [current_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.current_timestamp.html)
265
+ * [date_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_add.html)
266
+ * [date_diff](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_diff.html)
267
+ * [date_format](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_format.html)
268
+ * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
269
+ * [date_sub](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_sub.html)
270
+ * [date_trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.date_trunc.html)
271
+ * [dayofmonth](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofmonth.html)
272
+ * [dayofweek](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofweek.html)
273
+ * [dayofyear](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dayofyear.html)
274
+ * [degrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.degrees.html)
275
+ * [dense_rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.dense_rank.html)
276
+ * [desc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.desc.html)
277
+ * [desc_nulls_first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.desc_nulls_first.html)
278
+ * [desc_nulls_last](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.desc_nulls_last.html)
279
+ * [e](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.e.html)
280
+ * [element_at](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.element_at.html)
281
+ * [exp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.exp.html)
282
+ * [explode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.explode.html)
283
+ * [expm1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expm1.html)
284
+ * [expr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.expr.html)
285
+ * [factorial](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.factorial.html)
286
+ * [flatten](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.flatten.html)
287
+ * [floor](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.floor.html)
288
+ * [format_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.format_number.html)
289
+ * [from_unixtime](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.from_unixtime.html)
290
+ * [greatest](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.greatest.html)
291
+ * [grouping_id](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.grouping_id.html)
292
+ * [hash](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.hash.html)
293
+ * The hash is calculated differently
294
+ * [hex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.hex.html)
295
+ * Hex on int does not produce the same result as Spark. Need to research why.
296
+ * [hour](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.hour.html)
297
+ * [initcap](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.initcap.html)
298
+ * [input_file_name](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.input_file_name.html)
299
+ * [inspect](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.inspect.html)
300
+ * [instr](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.instr.html)
301
+ * [isnan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.isnan.html)
302
+ * [isnull](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.isnull.html)
303
+ * [kurtosis](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.kurtosis.html)
304
+ * [lag](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lag.html)
305
+ * [last_day](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.last_day.html)
306
+ * [lead](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lead.html)
307
+ * [least](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.least.html)
308
+ * [length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.length.html)
309
+ * [levenshtein](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.levenshtein.html)
310
+ * threshold is not supported
311
+ * [lit](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lit.html)
312
+ * [locate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.locate.html)
313
+ * [log](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log.html)
314
+ * [log10](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log10.html)
315
+ * [log1p](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log1p.html)
316
+ * [log2](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.log2.html)
317
+ * [lower](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lower.html)
318
+ * [lpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lpad.html)
319
+ * [ltrim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ltrim.html)
320
+ * [make_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.make_date.html)
321
+ * [map_concat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_concat.html)
322
+ * [map_keys](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_keys.html)
323
+ * [max](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.max.html)
324
+ * [max_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.max_by.html)
325
+ * [md5](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.md5.html)
326
+ * [mean](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.mean.html)
327
+ * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min.html)
328
+ * [min_by](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.min_by.html)
329
+ * [minute](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.minute.html)
330
+ * [module](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.module.html)
331
+ * [month](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.month.html)
332
+ * [months_between](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.months_between.html)
333
+ * [nanvl](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.nanvl.html)
334
+ * [next_day](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.next_day.html)
335
+ * [nth_value](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.nth_value.html)
336
+ * offset > 1 means that previous values within the partition also share the same result. In Spark rows < offset have a Null value
337
+ * [ntile](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ntile.html)
338
+ * [nullif](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.nullif.html)
339
+ * [octet_length](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.octet_length.html)
340
+ * [overlay](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.overlay.html)
341
+ * [percent_rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percent_rank.html)
342
+ * [percentile](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percentile.html)
343
+ * does not accept an array of percentiles
344
+ * [percentile_approx](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.percentile_approx.html)
345
+ * [posexplode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.posexplode.html)
346
+ * Default order of columns are `col`, `pos` instead of `pos`, `col`
347
+ * [pow](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.pow.html)
348
+ * [quarter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.quarter.html)
349
+ * [radians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.radians.html)
350
+ * [rand](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rand.html)
351
+ * [rank](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rank.html)
352
+ * [regexp_extract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_extract.html)
353
+ * [regexp_replace](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.regexp_replace.html)
354
+ * [repeat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.repeat.html)
355
+ * [rint](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rint.html)
356
+ * [round](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.round.html)
357
+ * [row_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.row_number.html)
358
+ * [rpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rpad.html)
359
+ * [rtrim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.rtrim.html)
360
+ * [second](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.second.html)
361
+ * [sequence](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sequence.html)
362
+ * [sha1](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sha1.html)
363
+ * [sha2](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sha2.html)
364
+ * [shiftLeft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftLeft.html)
365
+ * [shiftRight](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftRight.html)
366
+ * [shiftleft](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftleft.html)
367
+ * [shiftright](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.shiftright.html)
368
+ * [signum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.signum.html)
369
+ * [sin](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sin.html)
370
+ * [sinh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sinh.html)
371
+ * [size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.size.html)
372
+ * [skewness](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.skewness.html)
373
+ * Skewness is calculated differently
374
+ * [slice](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.slice.html)
375
+ * [sort_array](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sort_array.html)
376
+ * [soundex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.soundex.html)
377
+ * [split](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.split.html)
378
+ * Regular expressions not supported
379
+ * [sqrt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sqrt.html)
380
+ * [stddev](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.stddev.html)
381
+ * [stddev_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.stddev_pop.html)
382
+ * [stddev_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.stddev_samp.html)
383
+ * [struct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.struct.html)
384
+ * [substring](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.substring.html)
385
+ * [sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum.html)
386
+ * [sumDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sumDistinct.html)
387
+ * [sum_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum_distinct.html)
388
+ * [tan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tan.html)
389
+ * [tanh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tanh.html)
390
+ * [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
391
+ * [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
392
+ * [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
393
+ * [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
394
+ * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
395
+ * [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
396
+ * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
397
+ * [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
398
+ * [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
399
+ * [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
400
+ * [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
401
+ * [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
402
+ * [unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unix_timestamp.html)
403
+ * [The format string should be in Snowflake syntax](https://docs.snowflake.com/en/sql-reference/functions-conversion#label-date-time-format-conversion)
404
+ * [upper](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.upper.html)
405
+ * [var_pop](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_pop.html)
406
+ * [var_samp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.var_samp.html)
407
+ * [variance](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.variance.html)
408
+ * [weekofyear](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.weekofyear.html)
409
+ * [when](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.when.html)
410
+ * [year](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.year.html)
411
+
412
+ ### GroupedData Class
413
+
414
+ * [agg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.agg.html)
415
+ * [avg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.avg.html)
416
+ * [count](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.count.html)
417
+ * [max](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.max.html)
418
+ * [mean](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.mean.html)
419
+ * [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.min.html)
420
+ * [pivot](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.pivot.html)
421
+ * [sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.sum.html)
422
+
423
+ ### DataFrameReader Class
424
+
425
+ * [csv](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameReader.csv.html)
426
+ * [json](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameReader.json.html)
427
+ * [load](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameReader.load.html)
428
+ * [parquet](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameReader.parquet.html)
429
+ * [table](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameReader.table.html)
430
+
431
+ ### DataFrameWriter Class
432
+
433
+ * [csv](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.csv.html)
434
+ * [insertInto](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.insertInto.html)
435
+ * [json](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.json.html)
436
+ * [mode](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.mode.html)
437
+ * [parquet](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.parquet.html)
438
+ * [saveAsTable](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrameWriter.saveAsTable.html)
439
+ * sql
440
+ * SQLFrame Specific: Get the SQL representation of the DataFrame
441
+
442
+ ### SparkSession Class
443
+
444
+ * [builder](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.builder.html)
445
+ * [catalog](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.catalog.html)
446
+ * [createDataFrame](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.createDataFrame.html)
447
+ * [range](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.range.html)
448
+ * [read](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.read.html)
449
+ * [sql](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.sql.html)
450
+ * [table](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.SparkSession.table.html)
451
+
452
+ ### DataTypes
453
+
454
+ * [ArrayType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.ArrayType.html)
455
+ * [BinaryType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.BinaryType.html)
456
+ * [BooleanType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.BooleanType.html)
457
+ * [ByteType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.ByteType.html)
458
+ * [CharType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.CharType.html)
459
+ * [DataType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.DataType.html)
460
+ * [DateType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.DateType.html)
461
+ * [DecimalType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.DecimalType.html)
462
+ * [DoubleType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.DoubleType.html)
463
+ * [FloatType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.FloatType.html)
464
+ * [IntegerType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.IntegerType.html)
465
+ * [LongType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.LongType.html)
466
+ * [Row](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/row.html)
467
+ * [ShortType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.ShortType.html)
468
+ * [StringType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.StringType.html)
469
+ * [StructField](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.StructField.html)
470
+ * [StructType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.StructType.html)
471
+ * [TimestampNTZType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.TimestampNTZType.html)
472
+ * [TimestampType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.TimestampType.html)
473
+ * [VarcharType](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.types.VarcharType.html)
474
+
475
+ ### Window Class
476
+
477
+ * [currentRow](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.currentRow.html)
478
+ * [orderBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.orderBy.html)
479
+ * [partitionBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.partitionBy.html)
480
+ * [rangeBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.rangeBetween.html)
481
+ * [rowsBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.rowsBetween.html)
482
+ * [unboundedFollowing](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.unboundedFollowing.html)
483
+ * [unboundedPreceding](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.Window.unboundedPreceding.html)
484
+
485
+ ### WindowSpec Class
486
+
487
+ * [orderBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.orderBy.html)
488
+ * [partitionBy](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.partitionBy.html)
489
+ * [rangeBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.rangeBetween.html)
490
+ * [rowsBetween](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.WindowSpec.rowsBetween.html)
491
+ * sql
492
+ * SQLFrame Specific: Get the SQL representation of the WindowSpec
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.11.0",
23
- "sqlglot>=24.0.0,<24.1",
23
+ "sqlglot>=24.0.0,<24.2",
24
24
  ],
25
25
  extras_require={
26
26
  "bigquery": [
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.4.0'
16
- __version_tuple__ = version_tuple = (1, 4, 0)
15
+ __version__ = version = '1.5.0'
16
+ __version_tuple__ = version_tuple = (1, 5, 0)
@@ -251,6 +251,13 @@ class Column:
251
251
  return lit(value)
252
252
  return Column(value)
253
253
 
254
+ @property
255
+ def dtype(self) -> t.Optional[DataType]:
256
+ expression = self.expression.unalias()
257
+ if isinstance(expression, exp.Cast):
258
+ return expression.args.get("to")
259
+ return None
260
+
254
261
  def copy(self) -> Column:
255
262
  return Column(self.expression.copy())
256
263