sqlframe 1.1.0__tar.gz → 1.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. {sqlframe-1.1.0 → sqlframe-1.1.1}/Makefile +1 -1
  2. {sqlframe-1.1.0 → sqlframe-1.1.1}/PKG-INFO +1 -1
  3. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/bigquery.md +19 -0
  4. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/duckdb.md +16 -0
  5. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/postgres.md +18 -0
  6. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/standalone.md +18 -0
  7. {sqlframe-1.1.0 → sqlframe-1.1.1}/setup.py +1 -1
  8. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/_version.py +2 -2
  9. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/catalog.py +1 -1
  10. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/column.py +1 -1
  11. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/dataframe.py +4 -2
  12. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/functions.py +1 -1
  13. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/group.py +1 -1
  14. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/normalize.py +1 -1
  15. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/operations.py +1 -1
  16. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/readerwriter.py +1 -1
  17. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/session.py +1 -1
  18. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/transforms.py +1 -1
  19. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/types.py +1 -1
  20. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/window.py +1 -1
  21. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/group.py +1 -1
  22. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/readwriter.py +1 -1
  23. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/catalog.py +1 -1
  24. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/group.py +1 -1
  25. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/readwriter.py +1 -1
  26. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/catalog.py +1 -1
  27. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/group.py +1 -1
  28. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/readwriter.py +1 -1
  29. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/catalog.py +1 -1
  30. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/group.py +1 -1
  31. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/readwriter.py +1 -1
  32. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/catalog.py +1 -1
  33. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/group.py +1 -1
  34. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/readwriter.py +1 -1
  35. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/catalog.py +1 -1
  36. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/group.py +1 -1
  37. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/readwriter.py +1 -1
  38. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/group.py +1 -1
  39. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/readwriter.py +1 -1
  40. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/session.py +1 -1
  41. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe.egg-info/PKG-INFO +1 -1
  42. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe.egg-info/requires.txt +1 -1
  43. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_dataframe.py +9 -0
  44. {sqlframe-1.1.0 → sqlframe-1.1.1}/.github/CODEOWNERS +0 -0
  45. {sqlframe-1.1.0 → sqlframe-1.1.1}/.github/workflows/main.workflow.yaml +0 -0
  46. {sqlframe-1.1.0 → sqlframe-1.1.1}/.github/workflows/publish.workflow.yaml +0 -0
  47. {sqlframe-1.1.0 → sqlframe-1.1.1}/.gitignore +0 -0
  48. {sqlframe-1.1.0 → sqlframe-1.1.1}/.pre-commit-config.yaml +0 -0
  49. {sqlframe-1.1.0 → sqlframe-1.1.1}/.readthedocs.yaml +0 -0
  50. {sqlframe-1.1.0 → sqlframe-1.1.1}/LICENSE +0 -0
  51. {sqlframe-1.1.0 → sqlframe-1.1.1}/README.md +0 -0
  52. {sqlframe-1.1.0 → sqlframe-1.1.1}/blogs/images/but_wait_theres_more.gif +0 -0
  53. {sqlframe-1.1.0 → sqlframe-1.1.1}/blogs/images/cake.gif +0 -0
  54. {sqlframe-1.1.0 → sqlframe-1.1.1}/blogs/images/you_get_pyspark_api.gif +0 -0
  55. {sqlframe-1.1.0 → sqlframe-1.1.1}/blogs/sqlframe_universal_dataframe_api.md +0 -0
  56. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/bigquery.md +0 -0
  57. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/duckdb.md +0 -0
  58. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/images/SF.png +0 -0
  59. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/images/favicon.png +0 -0
  60. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/images/favicon_old.png +0 -0
  61. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/images/sqlframe_diagram.png +0 -0
  62. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/images/sqlframe_logo.png +0 -0
  63. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/docs/postgres.md +0 -0
  64. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/images/SF.png +0 -0
  65. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/images/favicon.png +0 -0
  66. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/images/favicon_old.png +0 -0
  67. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/images/sqlframe_diagram.png +0 -0
  68. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/images/sqlframe_logo.png +0 -0
  69. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/index.md +0 -0
  70. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/requirements.txt +0 -0
  71. {sqlframe-1.1.0 → sqlframe-1.1.1}/docs/stylesheets/extra.css +0 -0
  72. {sqlframe-1.1.0 → sqlframe-1.1.1}/mkdocs.yml +0 -0
  73. {sqlframe-1.1.0 → sqlframe-1.1.1}/pytest.ini +0 -0
  74. {sqlframe-1.1.0 → sqlframe-1.1.1}/renovate.json +0 -0
  75. {sqlframe-1.1.0 → sqlframe-1.1.1}/setup.cfg +0 -0
  76. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/LICENSE +0 -0
  77. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/__init__.py +0 -0
  78. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/__init__.py +0 -0
  79. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/_typing.py +0 -0
  80. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/decorators.py +0 -0
  81. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/exceptions.py +0 -0
  82. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/function_alternatives.py +0 -0
  83. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/mixins/__init__.py +0 -0
  84. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  85. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  86. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/base/util.py +0 -0
  87. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/__init__.py +0 -0
  88. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/catalog.py +0 -0
  89. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/column.py +0 -0
  90. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/dataframe.py +0 -0
  91. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/functions.py +0 -0
  92. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/functions.pyi +0 -0
  93. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/session.py +0 -0
  94. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/types.py +0 -0
  95. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/bigquery/window.py +0 -0
  96. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/__init__.py +0 -0
  97. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/column.py +0 -0
  98. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/dataframe.py +0 -0
  99. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/functions.py +0 -0
  100. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/functions.pyi +0 -0
  101. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/session.py +0 -0
  102. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/types.py +0 -0
  103. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/duckdb/window.py +0 -0
  104. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/__init__.py +0 -0
  105. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/column.py +0 -0
  106. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/dataframe.py +0 -0
  107. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/functions.py +0 -0
  108. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/functions.pyi +0 -0
  109. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/session.py +0 -0
  110. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/types.py +0 -0
  111. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/postgres/window.py +0 -0
  112. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/__init__.py +0 -0
  113. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/column.py +0 -0
  114. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/dataframe.py +0 -0
  115. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/functions.py +0 -0
  116. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/session.py +0 -0
  117. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/types.py +0 -0
  118. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/redshift/window.py +0 -0
  119. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/__init__.py +0 -0
  120. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/column.py +0 -0
  121. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/dataframe.py +0 -0
  122. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/functions.py +0 -0
  123. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/session.py +0 -0
  124. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/types.py +0 -0
  125. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/snowflake/window.py +0 -0
  126. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/__init__.py +0 -0
  127. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/column.py +0 -0
  128. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/dataframe.py +0 -0
  129. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/functions.py +0 -0
  130. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/session.py +0 -0
  131. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/types.py +0 -0
  132. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/spark/window.py +0 -0
  133. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/__init__.py +0 -0
  134. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/catalog.py +0 -0
  135. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/column.py +0 -0
  136. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/dataframe.py +0 -0
  137. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/functions.py +0 -0
  138. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/types.py +0 -0
  139. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe/standalone/window.py +0 -0
  140. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe.egg-info/SOURCES.txt +0 -0
  141. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe.egg-info/dependency_links.txt +0 -0
  142. {sqlframe-1.1.0 → sqlframe-1.1.1}/sqlframe.egg-info/top_level.txt +0 -0
  143. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/__init__.py +0 -0
  144. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/common_fixtures.py +0 -0
  145. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/conftest.py +0 -0
  146. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/fixtures/employee.csv +0 -0
  147. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/fixtures/employee.json +0 -0
  148. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/fixtures/employee.parquet +0 -0
  149. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/fixtures/employee_extra_line.csv +0 -0
  150. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/__init__.py +0 -0
  151. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/__init__.py +0 -0
  152. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/bigquery/__init__.py +0 -0
  153. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  154. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
  155. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/duck/__init__.py +0 -0
  156. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  157. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
  158. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
  159. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/postgres/__init__.py +0 -0
  160. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  161. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
  162. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/redshift/__init__.py +0 -0
  163. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  164. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
  165. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/snowflake/__init__.py +0 -0
  166. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  167. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  168. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/spark/__init__.py +0 -0
  169. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  170. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/test_engine_dataframe.py +0 -0
  171. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/test_engine_reader.py +0 -0
  172. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/test_engine_session.py +0 -0
  173. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/test_engine_writer.py +0 -0
  174. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/engines/test_int_functions.py +0 -0
  175. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/fixtures.py +0 -0
  176. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/test_int_dataframe.py +0 -0
  177. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/test_int_dataframe_stats.py +0 -0
  178. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/test_int_grouped_data.py +0 -0
  179. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/integration/test_int_session.py +0 -0
  180. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/types.py +0 -0
  181. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/__init__.py +0 -0
  182. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/__init__.py +0 -0
  183. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/fixtures.py +0 -0
  184. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_column.py +0 -0
  185. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  186. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_functions.py +0 -0
  187. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_session.py +0 -0
  188. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  189. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_types.py +0 -0
  190. {sqlframe-1.1.0 → sqlframe-1.1.1}/tests/unit/standalone/test_window.py +0 -0
@@ -1,5 +1,5 @@
1
1
  install-dev:
2
- pip install -e ".[dev,duckdb,postgres,redshift,bigquery,snowflake,spark]"
2
+ pip install -e ".[dev,docs,duckdb,postgres,redshift,bigquery,snowflake,spark]"
3
3
 
4
4
  install-pre-commit:
5
5
  pre-commit install
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.1.0
3
+ Version: 1.1.1
4
4
  Summary: Taking the Spark out of PySpark by converting to SQL
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -43,6 +43,25 @@ Regardless of approach, it is recommended to configure `default_dataset` in the
43
43
  session = BigQuerySession(conn=conn, default_dataset="sqlframe.db1")
44
44
  ```
45
45
 
46
+ ## Imports
47
+
48
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.bigquery`.
49
+ In addition, many classes will have a `BigQuery` prefix.
50
+ For example, `BigQueryDataFrame` instead of `DataFrame`.
51
+
52
+
53
+ ```python
54
+ # PySpark import
55
+ # from pyspark.sql import SparkSession
56
+ # from pyspark.sql import functions as F
57
+ # from pyspark.sql.dataframe import DataFrame
58
+ # SQLFrame import
59
+ from sqlframe.bigquery import BigQuerySession
60
+ from sqlframe.bigquery import functions as F
61
+ from sqlframe.bigquery import BigQueryDataFrame
62
+ ```
63
+
64
+
46
65
  ## Example Usage
47
66
 
48
67
  ```python
@@ -30,7 +30,23 @@ By default, SQLFrame will create a connection to an in-memory database.
30
30
  conn = duckdb.connect(database=":memory:")
31
31
  session = DuckDBSession(conn=conn)
32
32
  ```
33
+ ## Imports
33
34
 
35
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.duckdb`.
36
+ In addition, many classes will have a `DuckDB` prefix.
37
+ For example, `DuckDBDataFrame` instead of `DataFrame`.
38
+
39
+
40
+ ```python
41
+ # PySpark import
42
+ # from pyspark.sql import SparkSession
43
+ # from pyspark.sql import functions as F
44
+ # from pyspark.sql.dataframe import DataFrame
45
+ # SQLFrame import
46
+ from sqlframe.duckdb import DuckDBSession
47
+ from sqlframe.duckdb import functions as F
48
+ from sqlframe.duckdb import DuckDBDataFrame
49
+ ```
34
50
 
35
51
  ## Example Usage
36
52
 
@@ -25,6 +25,24 @@ conn = connect(
25
25
  session = PostgresSession(conn=conn)
26
26
  ```
27
27
 
28
+ ## Imports
29
+
30
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.postgres`.
31
+ In addition, many classes will have a `Postgres` prefix.
32
+ For example, `PostgresDataFrame` instead of `DataFrame`.
33
+
34
+
35
+ ```python
36
+ # PySpark import
37
+ # from pyspark.sql import SparkSession
38
+ # from pyspark.sql import functions as F
39
+ # from pyspark.sql.dataframe import DataFrame
40
+ # SQLFrame import
41
+ from sqlframe.postgres import PostgresSession
42
+ from sqlframe.postgres import functions as F
43
+ from sqlframe.postgres import PostgresDataFrame
44
+ ```
45
+
28
46
  ## Example Usage
29
47
 
30
48
  ```python
@@ -24,6 +24,24 @@ from sqlframe.standalone import StandaloneSession
24
24
  session = StandaloneSession.builder.config(map={"sqlframe.input.dialect": 'duckdb', "sqlframe.output.dialect": 'bigquery'}).getOrCreate()
25
25
  ```
26
26
 
27
+ ## Imports
28
+
29
+ If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.standalone`.
30
+ In addition, many classes will have a `Standalone` prefix.
31
+ For example, `StandaloneDataFrame` instead of `DataFrame`.
32
+
33
+
34
+ ```python
35
+ # PySpark import
36
+ # from pyspark.sql import SparkSession
37
+ # from pyspark.sql import functions as F
38
+ # from pyspark.sql.dataframe import DataFrame
39
+ # SQLFrame import
40
+ from sqlframe.standalone import StandaloneSession
41
+ from sqlframe.standalone import functions as F
42
+ from sqlframe.standalone import StandaloneDataFrame
43
+ ```
44
+
27
45
  ## Accessing Tables
28
46
 
29
47
  PySpark DataFrame API, and currently SQLFrame, requires that a table can be access to get it's schema information.
@@ -47,7 +47,7 @@ setup(
47
47
  ],
48
48
  "docs": [
49
49
  "mkdocs==1.4.2",
50
- "mkdocs-include-markdown-plugin==4.0.3",
50
+ "mkdocs-include-markdown-plugin==6.0.6",
51
51
  "mkdocs-material==9.0.5",
52
52
  "mkdocs-material-extensions==1.1.1",
53
53
  "pymdown-extensions",
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.1.0'
16
- __version_tuple__ = version_tuple = (1, 1, 0)
15
+ __version__ = version = '1.1.1'
16
+ __version_tuple__ = version_tuple = (1, 1, 1)
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -608,7 +608,9 @@ class _BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
608
608
  @operation(Operation.WHERE)
609
609
  def where(self, column: t.Union[Column, str, bool], **kwargs) -> Self:
610
610
  if isinstance(column, str):
611
- col = sqlglot.parse_one(column, dialect=self.session.input_dialect)
611
+ col = self._ensure_and_normalize_col(
612
+ sqlglot.parse_one(column, dialect=self.session.input_dialect)
613
+ )
612
614
  else:
613
615
  col = self._ensure_and_normalize_col(column)
614
616
  return self.copy(expression=self.expression.where(col.expression))
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  import typing as t
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,4 +1,4 @@
1
- # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'dataframe' folder.
1
+ # This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
2
2
 
3
3
  from __future__ import annotations
4
4
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 1.1.0
3
+ Version: 1.1.1
4
4
  Summary: Taking the Spark out of PySpark by converting to SQL
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -28,7 +28,7 @@ pre-commit>=3.5
28
28
  pre-commit<3.8,>=3.7
29
29
 
30
30
  [docs]
31
- mkdocs-include-markdown-plugin==4.0.3
31
+ mkdocs-include-markdown-plugin==6.0.6
32
32
  mkdocs-material-extensions==1.1.1
33
33
  mkdocs-material==9.0.5
34
34
  mkdocs==1.4.2
@@ -55,3 +55,12 @@ def test_with_column_duplicate_alias(standalone_employee: StandaloneDataFrame):
55
55
  df.sql(pretty=False)
56
56
  == "SELECT `a1`.`employee_id` AS `employee_id`, CAST(`a1`.`age` AS STRING) AS `fname`, CAST(`a1`.`lname` AS STRING) AS `lname`, `a1`.`age` AS `age`, `a1`.`store_id` AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`)"
57
57
  )
58
+
59
+
60
+ def test_where_expr(standalone_employee: StandaloneDataFrame):
61
+ df = standalone_employee.where("fname = 'Jack' AND age = 37")
62
+ assert df.columns == ["employee_id", "fname", "lname", "age", "store_id"]
63
+ assert (
64
+ df.sql(pretty=False)
65
+ == "SELECT `a1`.`employee_id` AS `employee_id`, CAST(`a1`.`fname` AS STRING) AS `fname`, CAST(`a1`.`lname` AS STRING) AS `lname`, `a1`.`age` AS `age`, `a1`.`store_id` AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`) WHERE `a1`.`age` = 37 AND CAST(`a1`.`fname` AS STRING) = 'Jack'"
66
+ )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes