sqlframe 0.0.3__tar.gz → 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. {sqlframe-0.0.3 → sqlframe-1.0.0}/Makefile +2 -2
  2. {sqlframe-0.0.3 → sqlframe-1.0.0}/PKG-INFO +1 -3
  3. {sqlframe-0.0.3 → sqlframe-1.0.0}/README.md +0 -2
  4. {sqlframe-0.0.3 → sqlframe-1.0.0}/blogs/sqlframe_universal_dataframe_api.md +6 -5
  5. sqlframe-1.0.0/pytest.ini +7 -0
  6. {sqlframe-0.0.3 → sqlframe-1.0.0}/setup.py +1 -1
  7. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/_version.py +2 -2
  8. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe.egg-info/PKG-INFO +1 -3
  9. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe.egg-info/requires.txt +1 -1
  10. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/conftest.py +6 -0
  11. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/bigquery/test_bigquery_session.py +1 -1
  12. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/duck/test_duckdb_session.py +1 -1
  13. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/postgres/test_postgres_session.py +2 -2
  14. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/redshift/test_redshift_session.py +2 -2
  15. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/test_engine_session.py +9 -2
  16. sqlframe-0.0.3/pytest.ini +0 -4
  17. {sqlframe-0.0.3 → sqlframe-1.0.0}/.github/CODEOWNERS +0 -0
  18. {sqlframe-0.0.3 → sqlframe-1.0.0}/.github/workflows/main.workflow.yaml +0 -0
  19. {sqlframe-0.0.3 → sqlframe-1.0.0}/.github/workflows/publish.workflow.yaml +0 -0
  20. {sqlframe-0.0.3 → sqlframe-1.0.0}/.gitignore +0 -0
  21. {sqlframe-0.0.3 → sqlframe-1.0.0}/.pre-commit-config.yaml +0 -0
  22. {sqlframe-0.0.3 → sqlframe-1.0.0}/.readthedocs.yaml +0 -0
  23. {sqlframe-0.0.3 → sqlframe-1.0.0}/LICENSE +0 -0
  24. {sqlframe-0.0.3 → sqlframe-1.0.0}/blogs/images/but_wait_theres_more.gif +0 -0
  25. {sqlframe-0.0.3 → sqlframe-1.0.0}/blogs/images/cake.gif +0 -0
  26. {sqlframe-0.0.3 → sqlframe-1.0.0}/blogs/images/you_get_pyspark_api.gif +0 -0
  27. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/bigquery.md +0 -0
  28. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/bigquery.md +0 -0
  29. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/duckdb.md +0 -0
  30. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/images/SF.png +0 -0
  31. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/images/favicon.png +0 -0
  32. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/images/favicon_old.png +0 -0
  33. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/images/sqlframe_diagram.png +0 -0
  34. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/images/sqlframe_logo.png +0 -0
  35. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/docs/postgres.md +0 -0
  36. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/duckdb.md +0 -0
  37. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/images/SF.png +0 -0
  38. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/images/favicon.png +0 -0
  39. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/images/favicon_old.png +0 -0
  40. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/images/sqlframe_diagram.png +0 -0
  41. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/images/sqlframe_logo.png +0 -0
  42. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/index.md +0 -0
  43. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/postgres.md +0 -0
  44. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/requirements.txt +0 -0
  45. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/standalone.md +0 -0
  46. {sqlframe-0.0.3 → sqlframe-1.0.0}/docs/stylesheets/extra.css +0 -0
  47. {sqlframe-0.0.3 → sqlframe-1.0.0}/mkdocs.yml +0 -0
  48. {sqlframe-0.0.3 → sqlframe-1.0.0}/renovate.json +0 -0
  49. {sqlframe-0.0.3 → sqlframe-1.0.0}/setup.cfg +0 -0
  50. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/LICENSE +0 -0
  51. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/__init__.py +0 -0
  52. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/__init__.py +0 -0
  53. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/_typing.py +0 -0
  54. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/catalog.py +0 -0
  55. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/column.py +0 -0
  56. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/dataframe.py +0 -0
  57. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/decorators.py +0 -0
  58. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/exceptions.py +0 -0
  59. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/function_alternatives.py +0 -0
  60. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/functions.py +0 -0
  61. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/group.py +0 -0
  62. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/mixins/__init__.py +0 -0
  63. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
  64. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
  65. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/normalize.py +0 -0
  66. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/operations.py +0 -0
  67. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/readerwriter.py +0 -0
  68. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/session.py +0 -0
  69. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/transforms.py +0 -0
  70. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/types.py +0 -0
  71. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/util.py +0 -0
  72. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/base/window.py +0 -0
  73. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/__init__.py +0 -0
  74. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/catalog.py +0 -0
  75. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/column.py +0 -0
  76. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/dataframe.py +0 -0
  77. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/functions.py +0 -0
  78. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/functions.pyi +0 -0
  79. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/group.py +0 -0
  80. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/readwriter.py +0 -0
  81. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/session.py +0 -0
  82. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/types.py +0 -0
  83. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/bigquery/window.py +0 -0
  84. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/__init__.py +0 -0
  85. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/catalog.py +0 -0
  86. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/column.py +0 -0
  87. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/dataframe.py +0 -0
  88. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/functions.py +0 -0
  89. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/functions.pyi +0 -0
  90. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/group.py +0 -0
  91. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/readwriter.py +0 -0
  92. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/session.py +0 -0
  93. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/types.py +0 -0
  94. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/duckdb/window.py +0 -0
  95. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/__init__.py +0 -0
  96. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/catalog.py +0 -0
  97. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/column.py +0 -0
  98. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/dataframe.py +0 -0
  99. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/functions.py +0 -0
  100. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/functions.pyi +0 -0
  101. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/group.py +0 -0
  102. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/readwriter.py +0 -0
  103. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/session.py +0 -0
  104. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/types.py +0 -0
  105. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/postgres/window.py +0 -0
  106. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/__init__.py +0 -0
  107. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/catalog.py +0 -0
  108. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/column.py +0 -0
  109. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/dataframe.py +0 -0
  110. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/functions.py +0 -0
  111. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/group.py +0 -0
  112. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/readwriter.py +0 -0
  113. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/session.py +0 -0
  114. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/types.py +0 -0
  115. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/redshift/window.py +0 -0
  116. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/__init__.py +0 -0
  117. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/catalog.py +0 -0
  118. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/column.py +0 -0
  119. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/dataframe.py +0 -0
  120. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/functions.py +0 -0
  121. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/group.py +0 -0
  122. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/readwriter.py +0 -0
  123. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/session.py +0 -0
  124. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/types.py +0 -0
  125. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/snowflake/window.py +0 -0
  126. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/__init__.py +0 -0
  127. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/catalog.py +0 -0
  128. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/column.py +0 -0
  129. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/dataframe.py +0 -0
  130. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/functions.py +0 -0
  131. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/group.py +0 -0
  132. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/readwriter.py +0 -0
  133. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/session.py +0 -0
  134. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/types.py +0 -0
  135. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/spark/window.py +0 -0
  136. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/__init__.py +0 -0
  137. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/catalog.py +0 -0
  138. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/column.py +0 -0
  139. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/dataframe.py +0 -0
  140. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/functions.py +0 -0
  141. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/group.py +0 -0
  142. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/readwriter.py +0 -0
  143. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/session.py +0 -0
  144. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/types.py +0 -0
  145. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe/standalone/window.py +0 -0
  146. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe.egg-info/SOURCES.txt +0 -0
  147. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe.egg-info/dependency_links.txt +0 -0
  148. {sqlframe-0.0.3 → sqlframe-1.0.0}/sqlframe.egg-info/top_level.txt +0 -0
  149. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/__init__.py +0 -0
  150. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/common_fixtures.py +0 -0
  151. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/fixtures/employee.csv +0 -0
  152. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/fixtures/employee.json +0 -0
  153. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/fixtures/employee.parquet +0 -0
  154. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/__init__.py +0 -0
  155. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/__init__.py +0 -0
  156. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/bigquery/__init__.py +0 -0
  157. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
  158. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/duck/__init__.py +0 -0
  159. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
  160. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/postgres/__init__.py +0 -0
  161. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
  162. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/redshift/__init__.py +0 -0
  163. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
  164. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/snowflake/__init__.py +0 -0
  165. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
  166. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
  167. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/spark/__init__.py +0 -0
  168. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
  169. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
  170. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/test_engine_reader.py +0 -0
  171. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/test_engine_writer.py +0 -0
  172. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/engines/test_int_functions.py +0 -0
  173. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/fixtures.py +0 -0
  174. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/test_int_dataframe.py +0 -0
  175. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/test_int_dataframe_stats.py +0 -0
  176. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/test_int_grouped_data.py +0 -0
  177. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/integration/test_int_session.py +0 -0
  178. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/types.py +0 -0
  179. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/__init__.py +0 -0
  180. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/__init__.py +0 -0
  181. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/fixtures.py +0 -0
  182. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_column.py +0 -0
  183. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_dataframe.py +0 -0
  184. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
  185. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_functions.py +0 -0
  186. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_session.py +0 -0
  187. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
  188. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_types.py +0 -0
  189. {sqlframe-0.0.3 → sqlframe-1.0.0}/tests/unit/standalone/test_window.py +0 -0
@@ -8,10 +8,10 @@ slow-test:
8
8
  pytest -n auto tests
9
9
 
10
10
  fast-test:
11
- pytest -n auto tests/unit
11
+ pytest -n auto -m "fast"
12
12
 
13
13
  local-test:
14
- pytest -n auto -m "local"
14
+ pytest -n auto -m "fast or local"
15
15
 
16
16
  bigquery-test:
17
17
  pytest -n auto -m "bigquery"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 0.0.3
3
+ Version: 1.0.0
4
4
  Summary: Taking the Spark out of PySpark by converting to SQL
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -30,8 +30,6 @@ License-File: LICENSE
30
30
  <img src="https://sqlframe.readthedocs.io/en/latest/docs/images/sqlframe_logo.png" alt="SQLFrame Logo" width="400"/>
31
31
  </div>
32
32
 
33
- ![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)
34
-
35
33
  SQLFrame implements the PySpark DataFrame API in order to enable running transformation pipelines directly on database engines - no Spark clusters or dependencies required.
36
34
 
37
35
  SQLFrame currently supports the following engines (many more in development):
@@ -2,8 +2,6 @@
2
2
  <img src="https://sqlframe.readthedocs.io/en/latest/docs/images/sqlframe_logo.png" alt="SQLFrame Logo" width="400"/>
3
3
  </div>
4
4
 
5
- ![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)
6
-
7
5
  SQLFrame implements the PySpark DataFrame API in order to enable running transformation pipelines directly on database engines - no Spark clusters or dependencies required.
8
6
 
9
7
  SQLFrame currently supports the following engines (many more in development):
@@ -9,7 +9,8 @@ SQL is the universal language that unites all data professionals, and it enables
9
9
  Despite its strengths, SQL often seems ill-suited for maintaining data pipelines.
10
10
  The language lacks support for abstracting common operations or unit testing specific segments of code, leading many to use Jinja as a makeshift solution.
11
11
  Jinja SQL is to SQL what Pig Latin is to English - can be fun in small doses but impossible to understand at scale.
12
- Moreover, SQL's repetitive nature, requiring columns to be repeated across operations, leads to fatigue and many data practitioners responding to the siren song of `SELECT *` and later found drowning in the sea of non-determinism.
12
+ Furthermore, the repetitive nature of SQL, where columns must be specified repeatedly across operations, often leads to fatigue among data professionals.
13
+ This results in data professionals responding to the siren song of `SELECT *`, only to be later found drowning in the sea of non-determinism.
13
14
 
14
15
  This has put data professionals in a tough spot: Do you write your pipelines in SQL to favor accessibility or Python to favor maintainability?
15
16
  Well, starting today, you no longer have to choose.
@@ -25,7 +26,7 @@ You can finally have your cake and eat it too.
25
26
  <img src="../docs/images/sqlframe_logo.png" alt="SQLFrame Logo" width="800"/>
26
27
  </div>
27
28
 
28
- SQLFrame revolutionizes how data professionals interact with SQL and PySpark DataFrames.
29
+ [SQLFrame](https://github.com/eakmanrq/sqlframe) revolutionizes how data professionals interact with SQL and PySpark DataFrames.
29
30
  Unlike traditional PySpark, SQLFrame converts DataFrame operations directly into SQL, enabling real-time SQL script generation during development.
30
31
  Here's how it works:
31
32
 
@@ -133,7 +134,7 @@ Therefore not only does SQLFrame make your DataFrame pipeline more accessible, i
133
134
  <img src="images/you_get_pyspark_api.gif" alt="There is more" width="800"/>
134
135
  </div>
135
136
 
136
- SQLFrame currently supports BigQuery, DuckDB, and PostgreSQL, with Redshift, Snowflake, Spark, and Trino in development.
137
- For those interested in experimenting with SQL generation for other engines, the "StandaloneSession" provides a flexible testing ground.
137
+ SQLFrame currently supports [BigQuery](https://sqlframe.readthedocs.io/en/stable/bigquery/), [DuckDB](https://sqlframe.readthedocs.io/en/stable/duckdb/), and [Postgres](https://sqlframe.readthedocs.io/en/stable/postgres/), with Redshift, Snowflake, Spark, and Trino in development.
138
+ For those interested in experimenting with SQL generation for other engines, the [StandaloneSession](https://sqlframe.readthedocs.io/en/stable/standalone/) provides a flexible testing ground.
138
139
 
139
- Follow the simple setup guide to begin integrating SQLFrame into your projects today!
140
+ Checkout the [README](https://github.com/eakmanrq/sqlframe) for more information on how to get started with SQLFrame!
@@ -0,0 +1,7 @@
1
+ [pytest]
2
+ markers =
3
+ bigquery: test for BigQuery
4
+ duckdb: test for DuckDB
5
+ local: test that don't rely on external connections
6
+ postgres: test for Postgres
7
+ addopts = -n 0 --dist=loadgroup
@@ -20,7 +20,7 @@ setup(
20
20
  python_requires=">=3.8",
21
21
  install_requires=[
22
22
  "prettytable<3.11.0",
23
- "sqlglot>=23.14.0,<23.17",
23
+ "sqlglot>=23.14.0,<23.18",
24
24
  ],
25
25
  extras_require={
26
26
  "bigquery": [
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.0.3'
16
- __version_tuple__ = version_tuple = (0, 0, 3)
15
+ __version__ = version = '1.0.0'
16
+ __version_tuple__ = version_tuple = (1, 0, 0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sqlframe
3
- Version: 0.0.3
3
+ Version: 1.0.0
4
4
  Summary: Taking the Spark out of PySpark by converting to SQL
5
5
  Home-page: https://github.com/eakmanrq/sqlframe
6
6
  Author: Ryan Eakman
@@ -30,8 +30,6 @@ License-File: LICENSE
30
30
  <img src="https://sqlframe.readthedocs.io/en/latest/docs/images/sqlframe_logo.png" alt="SQLFrame Logo" width="400"/>
31
31
  </div>
32
32
 
33
- ![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)
34
-
35
33
  SQLFrame implements the PySpark DataFrame API in order to enable running transformation pipelines directly on database engines - no Spark clusters or dependencies required.
36
34
 
37
35
  SQLFrame currently supports the following engines (many more in development):
@@ -1,5 +1,5 @@
1
1
  prettytable<3.11.0
2
- sqlglot<23.17,>=23.14.0
2
+ sqlglot<23.18,>=23.14.0
3
3
 
4
4
  [bigquery]
5
5
  google-cloud-bigquery-storage<3,>=2
@@ -5,6 +5,12 @@ import time
5
5
  import pytest
6
6
 
7
7
 
8
+ def pytest_collection_modifyitems(items, *args, **kwargs):
9
+ for item in items:
10
+ if not list(item.iter_markers()):
11
+ item.add_marker("fast")
12
+
13
+
8
14
  @pytest.fixture(scope="session", autouse=True)
9
15
  def set_tz():
10
16
  import os
@@ -17,4 +17,4 @@ def test_session_from_config():
17
17
  conn.cursor().execute("CREATE TABLE IF NOT EXISTS db1.test_table (cola INT, colb STRING)")
18
18
  session = BigQuerySession.builder.config("default_dataset", "sqlframe.db1").getOrCreate()
19
19
  columns = session.catalog.get_columns("test_table")
20
- assert columns == {"cola": exp.DataType.build("BIGINT"), "colb": exp.DataType.build("TEXT")}
20
+ assert columns == {"`cola`": exp.DataType.build("BIGINT"), "`colb`": exp.DataType.build("TEXT")}
@@ -10,4 +10,4 @@ def test_session_from_config():
10
10
  conn.execute("CREATE TABLE test_table (cola INT, colb STRING)")
11
11
  session = DuckDBSession.builder.config("sqlframe.conn", conn).getOrCreate()
12
12
  columns = session.catalog.get_columns("test_table")
13
- assert columns == {"cola": exp.DataType.build("INT"), "colb": exp.DataType.build("VARCHAR")}
13
+ assert columns == {'"cola"': exp.DataType.build("INT"), '"colb"': exp.DataType.build("VARCHAR")}
@@ -14,6 +14,6 @@ def test_session_from_config(function_scoped_postgres):
14
14
  ).getOrCreate()
15
15
  columns = session.catalog.get_columns("test_table")
16
16
  assert columns == {
17
- "cola": exp.DataType.build("INT", dialect=session.output_dialect),
18
- "colb": exp.DataType.build("STRING", dialect=session.output_dialect),
17
+ '"cola"': exp.DataType.build("INT", dialect=session.output_dialect),
18
+ '"colb"': exp.DataType.build("STRING", dialect=session.output_dialect),
19
19
  }
@@ -42,6 +42,6 @@ def test_session_from_config(cleanup_connector: RedshiftConnection):
42
42
  session = RedshiftSession.builder.config("sqlframe.conn", cleanup_connector).getOrCreate()
43
43
  columns = session.catalog.get_columns("dev.db1.test_table")
44
44
  assert columns == {
45
- "cola": exp.DataType.build("INT", dialect=session.output_dialect),
46
- "colb": exp.DataType.build("VARCHAR", dialect=session.output_dialect),
45
+ '"cola"': exp.DataType.build("INT", dialect=session.output_dialect),
46
+ '"colb"': exp.DataType.build("VARCHAR", dialect=session.output_dialect),
47
47
  }
@@ -29,8 +29,15 @@ def test_session(cleanup_session: _BaseSession):
29
29
  cola_type = exp.DataType.build("DECIMAL", dialect=session.output_dialect)
30
30
  else:
31
31
  cola_type = exp.DataType.build("INT", dialect=session.output_dialect)
32
- cola_name = '"COLA"' if session.output_dialect == "snowflake" else '"cola"'
33
- colb_name = '"COLB"' if session.output_dialect == "snowflake" else '"colb"'
32
+ if session.output_dialect == "bigquery":
33
+ cola_name = "`cola`"
34
+ colb_name = "`colb`"
35
+ elif session.output_dialect == "snowflake":
36
+ cola_name = '"COLA"'
37
+ colb_name = '"COLB"'
38
+ else:
39
+ cola_name = '"cola"'
40
+ colb_name = '"colb"'
34
41
  assert columns == {
35
42
  cola_name: cola_type,
36
43
  colb_name: exp.DataType.build("VARCHAR", dialect=session.output_dialect)
sqlframe-0.0.3/pytest.ini DELETED
@@ -1,4 +0,0 @@
1
- [pytest]
2
- markers =
3
- bigquery: test for BigQuery
4
- addopts = -n 0 --dist=loadgroup
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes