sqlframe 3.8.1__tar.gz → 3.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.8.1 → sqlframe-3.9.0}/Makefile +2 -1
- {sqlframe-3.8.1 → sqlframe-3.9.0}/PKG-INFO +11 -1
- {sqlframe-3.8.1 → sqlframe-3.9.0}/README.md +9 -0
- sqlframe-3.9.0/docs/databricks.md +157 -0
- sqlframe-3.9.0/docs/redshift.md +162 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/mkdocs.yml +2 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/setup.py +7 -4
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/__init__.py +1 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/_version.py +2 -2
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/dataframe.py +2 -2
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/util.py +10 -0
- sqlframe-3.9.0/sqlframe/databricks/__init__.py +32 -0
- sqlframe-3.9.0/sqlframe/databricks/catalog.py +302 -0
- sqlframe-3.9.0/sqlframe/databricks/dataframe.py +69 -0
- sqlframe-3.9.0/sqlframe/databricks/functions.py +22 -0
- sqlframe-3.9.0/sqlframe/databricks/group.py +14 -0
- sqlframe-3.9.0/sqlframe/databricks/readwriter.py +96 -0
- sqlframe-3.9.0/sqlframe/databricks/session.py +59 -0
- sqlframe-3.9.0/sqlframe/databricks/udf.py +11 -0
- sqlframe-3.9.0/sqlframe/spark/functions.pyi +416 -0
- sqlframe-3.9.0/sqlframe/standalone/column.py +1 -0
- sqlframe-3.9.0/sqlframe/standalone/types.py +1 -0
- sqlframe-3.9.0/sqlframe/standalone/window.py +1 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe.egg-info/PKG-INFO +11 -1
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe.egg-info/SOURCES.txt +14 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe.egg-info/requires.txt +7 -4
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.github/CODEOWNERS +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.gitignore +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/.readthedocs.yaml +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/LICENSE +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/cake.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/bigquery.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/configuration.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/docs/postgres.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/duckdb.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/images/SF.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/images/favicon.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/index.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/postgres.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/requirements.txt +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/snowflake.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/spark.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/standalone.md +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/pytest.ini +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/renovate.json +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/setup.cfg +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/LICENSE +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/base/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.8.1/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/column.py +0 -0
- {sqlframe-3.8.1/sqlframe/spark → sqlframe-3.9.0/sqlframe/databricks}/functions.pyi +0 -0
- {sqlframe-3.8.1/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/types.py +0 -0
- {sqlframe-3.8.1/sqlframe/duckdb → sqlframe-3.9.0/sqlframe/databricks}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.8.1/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.8.1/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.8.1/sqlframe/postgres → sqlframe-3.9.0/sqlframe/duckdb}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.8.1/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.8.1/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.8.1/sqlframe/redshift → sqlframe-3.9.0/sqlframe/postgres}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.8.1/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.8.1/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.8.1/sqlframe/snowflake → sqlframe-3.9.0/sqlframe/redshift}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.8.1/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.8.1/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.8.1/sqlframe/spark → sqlframe-3.9.0/sqlframe/snowflake}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.8.1/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.8.1/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.8.1/sqlframe/standalone → sqlframe-3.9.0/sqlframe/spark}/window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/common_fixtures.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/conftest.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_int_functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/conftest.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_functions.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.8.1 → sqlframe-3.9.0}/tests/unit/test_util.py +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
install-dev:
|
|
2
|
-
pip install -e ".[bigquery,dev,docs,duckdb,pandas,postgres,redshift,snowflake,spark]"
|
|
2
|
+
pip install -e ".[bigquery,dev,docs,duckdb,pandas,postgres,redshift,snowflake,databricks,spark]"
|
|
3
3
|
|
|
4
4
|
install-pre-commit:
|
|
5
5
|
pre-commit install
|
|
@@ -33,6 +33,7 @@ stubs:
|
|
|
33
33
|
stubgen sqlframe/duckdb/functions.py --output ./ --inspect-mode
|
|
34
34
|
stubgen sqlframe/postgres/functions.py --output ./ --inspect-mode
|
|
35
35
|
stubgen sqlframe/snowflake/functions.py --output ./ --inspect-mode
|
|
36
|
+
stubgen sqlframe/databricks/functions.py --output ./ --inspect-mode
|
|
36
37
|
stubgen sqlframe/spark/functions.py --output ./ --inspect-mode
|
|
37
38
|
|
|
38
39
|
package:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sqlframe
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.9.0
|
|
4
4
|
Summary: Turning PySpark Into a Universal DataFrame API
|
|
5
5
|
Home-page: https://github.com/eakmanrq/sqlframe
|
|
6
6
|
Author: Ryan Eakman
|
|
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
|
|
|
17
17
|
Requires-Python: >=3.8
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
Provides-Extra: bigquery
|
|
20
|
+
Provides-Extra: databricks
|
|
20
21
|
Provides-Extra: dev
|
|
21
22
|
Provides-Extra: docs
|
|
22
23
|
Provides-Extra: duckdb
|
|
@@ -42,6 +43,11 @@ SQLFrame currently supports the following engines (many more in development):
|
|
|
42
43
|
* [Snowflake](https://sqlframe.readthedocs.io/en/stable/snowflake)
|
|
43
44
|
* [Spark](https://sqlframe.readthedocs.io/en/stable/spark)
|
|
44
45
|
|
|
46
|
+
There are also two engines in development. These engines lack test coverage and robust documentation, but are available for early testing:
|
|
47
|
+
|
|
48
|
+
* [Redshift](https://sqlframe.readthedocs.io/en/stable/redshift)
|
|
49
|
+
* [Databricks](https://sqlframe.readthedocs.io/en/stable/databricks)
|
|
50
|
+
|
|
45
51
|
SQLFrame also has a "Standalone" session that be used to generate SQL without any connection to a database engine.
|
|
46
52
|
|
|
47
53
|
* [Standalone](https://sqlframe.readthedocs.io/en/stable/standalone)
|
|
@@ -66,6 +72,10 @@ pip install "sqlframe[postgres]"
|
|
|
66
72
|
pip install "sqlframe[snowflake]"
|
|
67
73
|
# Spark
|
|
68
74
|
pip install "sqlframe[spark]"
|
|
75
|
+
# Redshift (in development)
|
|
76
|
+
pip install "sqlframe[redshift]"
|
|
77
|
+
# Databricks (in development)
|
|
78
|
+
pip install "sqlframe[databricks]"
|
|
69
79
|
# Standalone
|
|
70
80
|
pip install sqlframe
|
|
71
81
|
```
|
|
@@ -12,6 +12,11 @@ SQLFrame currently supports the following engines (many more in development):
|
|
|
12
12
|
* [Snowflake](https://sqlframe.readthedocs.io/en/stable/snowflake)
|
|
13
13
|
* [Spark](https://sqlframe.readthedocs.io/en/stable/spark)
|
|
14
14
|
|
|
15
|
+
There are also two engines in development. These engines lack test coverage and robust documentation, but are available for early testing:
|
|
16
|
+
|
|
17
|
+
* [Redshift](https://sqlframe.readthedocs.io/en/stable/redshift)
|
|
18
|
+
* [Databricks](https://sqlframe.readthedocs.io/en/stable/databricks)
|
|
19
|
+
|
|
15
20
|
SQLFrame also has a "Standalone" session that be used to generate SQL without any connection to a database engine.
|
|
16
21
|
|
|
17
22
|
* [Standalone](https://sqlframe.readthedocs.io/en/stable/standalone)
|
|
@@ -36,6 +41,10 @@ pip install "sqlframe[postgres]"
|
|
|
36
41
|
pip install "sqlframe[snowflake]"
|
|
37
42
|
# Spark
|
|
38
43
|
pip install "sqlframe[spark]"
|
|
44
|
+
# Redshift (in development)
|
|
45
|
+
pip install "sqlframe[redshift]"
|
|
46
|
+
# Databricks (in development)
|
|
47
|
+
pip install "sqlframe[databricks]"
|
|
39
48
|
# Standalone
|
|
40
49
|
pip install sqlframe
|
|
41
50
|
```
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
from test import auth_type
|
|
2
|
+
|
|
3
|
+
# Databricks (In Development)
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install "sqlframe[databricks]"
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Enabling SQLFrame
|
|
12
|
+
|
|
13
|
+
SQLFrame can be used in two ways:
|
|
14
|
+
|
|
15
|
+
* Directly importing the `sqlframe.databricks` package
|
|
16
|
+
* Using the [activate](./configuration.md#activating-sqlframe) function to allow for continuing to use `pyspark.sql` but have it use SQLFrame behind the scenes.
|
|
17
|
+
|
|
18
|
+
### Import
|
|
19
|
+
|
|
20
|
+
If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.databricks`.
|
|
21
|
+
In addition, many classes will have a `Databricks` prefix.
|
|
22
|
+
For example, `DatabricksDataFrame` instead of `DataFrame`.
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
```python
|
|
26
|
+
# PySpark import
|
|
27
|
+
# from pyspark.sql import SparkSession
|
|
28
|
+
# from pyspark.sql import functions as F
|
|
29
|
+
# from pyspark.sql.dataframe import DataFrame
|
|
30
|
+
# SQLFrame import
|
|
31
|
+
from sqlframe.databricks import DatabricksSession
|
|
32
|
+
from sqlframe.databricks import functions as F
|
|
33
|
+
from sqlframe.databricks import DatabricksDataFrame
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
### Activate
|
|
37
|
+
|
|
38
|
+
If you would like to continue using `pyspark.sql` but have it use SQLFrame behind the scenes, you can use the [activate](./configuration.md#activating-sqlframe) function.
|
|
39
|
+
|
|
40
|
+
```python
|
|
41
|
+
import os
|
|
42
|
+
|
|
43
|
+
from databricks.sql import connect
|
|
44
|
+
from sqlframe import activate
|
|
45
|
+
conn = connect(
|
|
46
|
+
server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
47
|
+
http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
|
|
48
|
+
access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
|
|
49
|
+
auth_type="access_token",
|
|
50
|
+
catalog="catalog",
|
|
51
|
+
schema="schema",
|
|
52
|
+
)
|
|
53
|
+
activate("databricks", conn=conn)
|
|
54
|
+
|
|
55
|
+
from pyspark.sql import SparkSession
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
`SparkSession` will now be a SQLFrame `DatabricksSession` object and everything will be run on Databricks directly.
|
|
59
|
+
|
|
60
|
+
See [activate configuration](./configuration.md#activating-sqlframe) for information on how to pass in a connection and config options.
|
|
61
|
+
|
|
62
|
+
## Creating a Session
|
|
63
|
+
|
|
64
|
+
SQLFrame uses [Databricks SQL Connector for Python](https://github.com/databricks/databricks-sql-python) to connect to Databricks.
|
|
65
|
+
A DatabricksSession, which implements the PySpark Session API, is created by passing in a `databricks.sql.client.Connection` object.
|
|
66
|
+
|
|
67
|
+
=== "Import"
|
|
68
|
+
|
|
69
|
+
```python
|
|
70
|
+
import os
|
|
71
|
+
|
|
72
|
+
from databricks.sql import connect
|
|
73
|
+
from sqlframe.databricks import DatabricksSession
|
|
74
|
+
|
|
75
|
+
conn = connect(
|
|
76
|
+
server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
77
|
+
http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
|
|
78
|
+
access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
|
|
79
|
+
auth_type="access_token",
|
|
80
|
+
catalog="catalog",
|
|
81
|
+
schema="schema",
|
|
82
|
+
)
|
|
83
|
+
session = DatabricksSession(conn=conn)
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
=== "Activate"
|
|
87
|
+
|
|
88
|
+
```python
|
|
89
|
+
import os
|
|
90
|
+
|
|
91
|
+
from databricks.sql import connect
|
|
92
|
+
from sqlframe import activate
|
|
93
|
+
|
|
94
|
+
conn = connect(
|
|
95
|
+
server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
96
|
+
http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
|
|
97
|
+
access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
|
|
98
|
+
auth_type="access_token",
|
|
99
|
+
catalog="catalog",
|
|
100
|
+
schema="schema",
|
|
101
|
+
)
|
|
102
|
+
activate("databricks", conn=conn)
|
|
103
|
+
|
|
104
|
+
from pyspark.sql import SparkSession
|
|
105
|
+
session = SparkSession.builder.getOrCreate()
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Example Usage
|
|
109
|
+
|
|
110
|
+
```python
|
|
111
|
+
import os
|
|
112
|
+
|
|
113
|
+
from databricks.sql import connect
|
|
114
|
+
from sqlframe import activate
|
|
115
|
+
|
|
116
|
+
conn = connect(
|
|
117
|
+
server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
118
|
+
http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
|
|
119
|
+
access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
|
|
120
|
+
auth_type="access_token",
|
|
121
|
+
catalog="catalog",
|
|
122
|
+
schema="schema",
|
|
123
|
+
)
|
|
124
|
+
activate("databricks", conn=conn)
|
|
125
|
+
|
|
126
|
+
from pyspark.sql import SparkSession
|
|
127
|
+
from pyspark.sql import functions as F
|
|
128
|
+
|
|
129
|
+
session = SparkSession.builder.getOrCreate()
|
|
130
|
+
table_path = "samples.nyctaxi.trips"
|
|
131
|
+
# Get columns in the table
|
|
132
|
+
print(session.catalog.listColumns(table_path))
|
|
133
|
+
# Get the number of rides per hour
|
|
134
|
+
(
|
|
135
|
+
session.table(table_path)
|
|
136
|
+
.where(F.col("tpep_pickup_datetime").between("2016-01-01", "2016-01-16"))
|
|
137
|
+
.withColumn("dropoff_hour", F.hour(F.col("tpep_dropoff_datetime")))
|
|
138
|
+
.groupBy("dropoff_hour").count()
|
|
139
|
+
.select(
|
|
140
|
+
F.format_string('%02d:00', F.col("dropoff_hour")).alias("dropoff Hour"),
|
|
141
|
+
F.col("count").alias("number of rides")
|
|
142
|
+
).orderBy("dropoff Hour")
|
|
143
|
+
.limit(5)
|
|
144
|
+
.show()
|
|
145
|
+
)
|
|
146
|
+
"""
|
|
147
|
+
+----------------+-------------------+
|
|
148
|
+
| `dropoff hour` | `number of rides` |
|
|
149
|
+
+----------------+-------------------+
|
|
150
|
+
| 00:00 | 205 |
|
|
151
|
+
| 01:00 | 159 |
|
|
152
|
+
| 02:00 | 117 |
|
|
153
|
+
| 03:00 | 88 |
|
|
154
|
+
| 04:00 | 73 |
|
|
155
|
+
+----------------+-------------------+
|
|
156
|
+
"""
|
|
157
|
+
```
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
# Redshift (In Development)
|
|
2
|
+
|
|
3
|
+
## Installation
|
|
4
|
+
|
|
5
|
+
```bash
|
|
6
|
+
pip install "sqlframe[redshift]"
|
|
7
|
+
```
|
|
8
|
+
|
|
9
|
+
## Enabling SQLFrame
|
|
10
|
+
|
|
11
|
+
SQLFrame can be used in two ways:
|
|
12
|
+
|
|
13
|
+
* Directly importing the `sqlframe.redshift` package
|
|
14
|
+
* Using the [activate](./configuration.md#activating-sqlframe) function to allow for continuing to use `pyspark.sql` but have it use SQLFrame behind the scenes.
|
|
15
|
+
|
|
16
|
+
### Import
|
|
17
|
+
|
|
18
|
+
If converting a PySpark pipeline, all `pyspark.sql` should be replaced with `sqlframe.redshift`.
|
|
19
|
+
In addition, many classes will have a `Redshift` prefix.
|
|
20
|
+
For example, `RedshiftDataFrame` instead of `DataFrame`.
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
```python
|
|
24
|
+
# PySpark import
|
|
25
|
+
# from pyspark.sql import SparkSession
|
|
26
|
+
# from pyspark.sql import functions as F
|
|
27
|
+
# from pyspark.sql.dataframe import DataFrame
|
|
28
|
+
# SQLFrame import
|
|
29
|
+
from sqlframe.redshift import RedshiftSession
|
|
30
|
+
from sqlframe.redshift import functions as F
|
|
31
|
+
from sqlframe.redshift import RedshiftDataFrame
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
### Activate
|
|
35
|
+
|
|
36
|
+
If you would like to continue using `pyspark.sql` but have it use SQLFrame behind the scenes, you can use the [activate](./configuration.md#activating-sqlframe) function.
|
|
37
|
+
|
|
38
|
+
```python
|
|
39
|
+
import os
|
|
40
|
+
|
|
41
|
+
from redshift_connector import connect
|
|
42
|
+
from sqlframe import activate
|
|
43
|
+
conn = connect(
|
|
44
|
+
user="user",
|
|
45
|
+
password=os.environ["PASSWORD"], # Replace this with how you get your password
|
|
46
|
+
database="database",
|
|
47
|
+
host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
|
|
48
|
+
port=5439,
|
|
49
|
+
)
|
|
50
|
+
activate("redshift", conn=conn)
|
|
51
|
+
|
|
52
|
+
from pyspark.sql import SparkSession
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
`SparkSession` will now be a SQLFrame `RedshiftSession` object and everything will be run on Redshift directly.
|
|
56
|
+
|
|
57
|
+
See [activate configuration](./configuration.md#activating-sqlframe) for information on how to pass in a connection and config options.
|
|
58
|
+
|
|
59
|
+
## Creating a Session
|
|
60
|
+
|
|
61
|
+
SQLFrame uses [Redshift DBAPI Python Connector](https://github.com/aws/amazon-redshift-python-driver) to connect to Redshift.
|
|
62
|
+
A RedshiftSession, which implements the PySpark Session API, is created by passing in a `redshift_connector.Connection` object.
|
|
63
|
+
|
|
64
|
+
=== "Import"
|
|
65
|
+
|
|
66
|
+
```python
|
|
67
|
+
import os
|
|
68
|
+
|
|
69
|
+
from redshift_connector import connect
|
|
70
|
+
from sqlframe.redshift import RedshiftSession
|
|
71
|
+
|
|
72
|
+
conn = connect(
|
|
73
|
+
user="user",
|
|
74
|
+
password=os.environ["PASSWORD"], # Replace this with how you get your password
|
|
75
|
+
database="database",
|
|
76
|
+
host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
|
|
77
|
+
port=5439,
|
|
78
|
+
)
|
|
79
|
+
session = RedshiftSession(conn=conn)
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
=== "Activate"
|
|
83
|
+
|
|
84
|
+
```python
|
|
85
|
+
import os
|
|
86
|
+
|
|
87
|
+
from redshift_connector import connect
|
|
88
|
+
from sqlframe import activate
|
|
89
|
+
|
|
90
|
+
conn = connect(
|
|
91
|
+
user="user",
|
|
92
|
+
password=os.environ["PASSWORD"], # Replace this with how you get your password
|
|
93
|
+
database="database",
|
|
94
|
+
host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
|
|
95
|
+
port=5439,
|
|
96
|
+
)
|
|
97
|
+
activate("redshift", conn=conn)
|
|
98
|
+
|
|
99
|
+
from pyspark.sql import SparkSession
|
|
100
|
+
session = SparkSession.builder.getOrCreate()
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Example Usage
|
|
104
|
+
|
|
105
|
+
```python
|
|
106
|
+
import os
|
|
107
|
+
|
|
108
|
+
from redshift_connector import connect
|
|
109
|
+
from sqlframe import activate
|
|
110
|
+
|
|
111
|
+
conn = connect(
|
|
112
|
+
user="user",
|
|
113
|
+
password=os.environ["PASSWORD"], # Replace this with how you get your password
|
|
114
|
+
database="database",
|
|
115
|
+
host="xxxxx.xxxxxx.region.redshift-serverless.amazonaws.com",
|
|
116
|
+
port=5439,
|
|
117
|
+
)
|
|
118
|
+
activate("redshift", conn=conn)
|
|
119
|
+
|
|
120
|
+
from pyspark.sql import SparkSession
|
|
121
|
+
from pyspark.sql import functions as F
|
|
122
|
+
|
|
123
|
+
session = SparkSession.builder.getOrCreate()
|
|
124
|
+
table_path = '"catalog.db.table"'
|
|
125
|
+
# Get columns in the table
|
|
126
|
+
print(session.catalog.listColumns(table_path))
|
|
127
|
+
# Get the top 5 years with the greatest year-over-year % change in new families with a single child
|
|
128
|
+
(
|
|
129
|
+
session.table(table_path)
|
|
130
|
+
.where(F.col("ever_born") == 1)
|
|
131
|
+
.groupBy("year")
|
|
132
|
+
.agg(F.count("*").alias("num_single_child_families"))
|
|
133
|
+
.withColumn(
|
|
134
|
+
"last_year_num_single_child_families",
|
|
135
|
+
F.lag(F.col("num_single_child_families"), 1).over(Window.orderBy("year"))
|
|
136
|
+
)
|
|
137
|
+
.withColumn(
|
|
138
|
+
"percent_change",
|
|
139
|
+
(F.col("num_single_child_families") - F.col("last_year_num_single_child_families"))
|
|
140
|
+
/ F.col("last_year_num_single_child_families")
|
|
141
|
+
)
|
|
142
|
+
.orderBy(F.abs(F.col("percent_change")).desc())
|
|
143
|
+
.select(
|
|
144
|
+
F.col("year").alias("year"),
|
|
145
|
+
F.format_number("num_single_child_families", 0).alias("new families single child"),
|
|
146
|
+
F.format_number(F.col("percent_change") * 100, 2).alias("percent change"),
|
|
147
|
+
)
|
|
148
|
+
.limit(5)
|
|
149
|
+
.show()
|
|
150
|
+
)
|
|
151
|
+
"""
|
|
152
|
+
+------+---------------------------+----------------+
|
|
153
|
+
| year | new families single child | percent change |
|
|
154
|
+
+------+---------------------------+----------------+
|
|
155
|
+
| 1989 | 1,650,246 | 25.02 |
|
|
156
|
+
| 1974 | 783,448 | 14.49 |
|
|
157
|
+
| 1977 | 1,057,379 | 11.38 |
|
|
158
|
+
| 1985 | 1,308,476 | 11.15 |
|
|
159
|
+
| 1975 | 868,985 | 10.92 |
|
|
160
|
+
+------+---------------------------+----------------+
|
|
161
|
+
"""
|
|
162
|
+
```
|
|
@@ -20,7 +20,7 @@ setup(
|
|
|
20
20
|
python_requires=">=3.8",
|
|
21
21
|
install_requires=[
|
|
22
22
|
"prettytable<3.12.1",
|
|
23
|
-
"sqlglot>=24.0.0,<25.
|
|
23
|
+
"sqlglot>=24.0.0,<25.33",
|
|
24
24
|
"typing_extensions>=4.8,<5",
|
|
25
25
|
],
|
|
26
26
|
extras_require={
|
|
@@ -32,7 +32,7 @@ setup(
|
|
|
32
32
|
"duckdb>=0.9,<1.2",
|
|
33
33
|
"findspark>=2,<3",
|
|
34
34
|
"mypy>=1.10.0,<1.14",
|
|
35
|
-
"openai>=1.30,<1.
|
|
35
|
+
"openai>=1.30,<1.56",
|
|
36
36
|
"pandas>=2,<3",
|
|
37
37
|
"pandas-stubs>=2,<3",
|
|
38
38
|
"psycopg>=3.1,<4",
|
|
@@ -43,7 +43,7 @@ setup(
|
|
|
43
43
|
"pytest-xdist>=3.6,<3.7",
|
|
44
44
|
"pre-commit>=3.5;python_version=='3.8'",
|
|
45
45
|
"pre-commit>=3.7,<4.1;python_version>='3.9'",
|
|
46
|
-
"ruff>=0.4.4,<0.
|
|
46
|
+
"ruff>=0.4.4,<0.9",
|
|
47
47
|
"types-psycopg2>=2.9,<3",
|
|
48
48
|
],
|
|
49
49
|
"docs": [
|
|
@@ -58,7 +58,7 @@ setup(
|
|
|
58
58
|
"pandas>=2,<3",
|
|
59
59
|
],
|
|
60
60
|
"openai": [
|
|
61
|
-
"openai>=1.30,<1.
|
|
61
|
+
"openai>=1.30,<1.56",
|
|
62
62
|
],
|
|
63
63
|
"pandas": [
|
|
64
64
|
"pandas>=2,<3",
|
|
@@ -75,6 +75,9 @@ setup(
|
|
|
75
75
|
"spark": [
|
|
76
76
|
"pyspark>=2,<3.6",
|
|
77
77
|
],
|
|
78
|
+
"databricks": [
|
|
79
|
+
"databricks-sql-connector>=3.6,<4",
|
|
80
|
+
],
|
|
78
81
|
},
|
|
79
82
|
classifiers=[
|
|
80
83
|
"Development Status :: 5 - Production/Stable",
|
|
@@ -629,10 +629,10 @@ class _BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
|
|
|
629
629
|
# We will drop the "view" if it exists before running the cache table
|
|
630
630
|
output_expressions.append(exp.Drop(this=cache_table, exists=True, kind="VIEW"))
|
|
631
631
|
elif expression_type == exp.Create:
|
|
632
|
-
expression = df.output_expression_container.copy()
|
|
632
|
+
expression = df.output_expression_container.copy() # type: ignore
|
|
633
633
|
expression.set("expression", select_expression)
|
|
634
634
|
elif expression_type == exp.Insert:
|
|
635
|
-
expression = df.output_expression_container.copy()
|
|
635
|
+
expression = df.output_expression_container.copy() # type: ignore
|
|
636
636
|
select_without_ctes = select_expression.copy()
|
|
637
637
|
select_without_ctes.set("with", None)
|
|
638
638
|
expression.set("expression", select_without_ctes)
|
|
@@ -6,6 +6,7 @@ import unicodedata
|
|
|
6
6
|
|
|
7
7
|
from sqlglot import expressions as exp
|
|
8
8
|
from sqlglot import parse_one, to_table
|
|
9
|
+
from sqlglot.dialects import DuckDB
|
|
9
10
|
from sqlglot.dialects.dialect import Dialect, DialectType
|
|
10
11
|
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
|
|
11
12
|
from sqlglot.optimizer.qualify_columns import (
|
|
@@ -372,6 +373,12 @@ def normalize_string(
|
|
|
372
373
|
) -> str:
|
|
373
374
|
from sqlframe.base.session import _BaseSession
|
|
374
375
|
|
|
376
|
+
data_type_replacement_mapping = {
|
|
377
|
+
DuckDB: {
|
|
378
|
+
"TIMESTAMP_NS": "TIMESTAMP",
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
375
382
|
session: _BaseSession = _BaseSession()
|
|
376
383
|
|
|
377
384
|
str_to_dialect = {
|
|
@@ -397,6 +404,9 @@ def normalize_string(
|
|
|
397
404
|
elif is_table:
|
|
398
405
|
value_expression = to_table(value_without_star, dialect=from_dialect)
|
|
399
406
|
elif is_datatype:
|
|
407
|
+
value_without_star = data_type_replacement_mapping.get(from_dialect, {}).get( # type: ignore
|
|
408
|
+
value_without_star, value_without_star
|
|
409
|
+
)
|
|
400
410
|
value_expression = exp.DataType.build(value_without_star, dialect=from_dialect)
|
|
401
411
|
elif is_column:
|
|
402
412
|
value_expression = exp.to_column(value_without_star, dialect=from_dialect)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from sqlframe.databricks.catalog import DatabricksCatalog
|
|
2
|
+
from sqlframe.databricks.column import Column
|
|
3
|
+
from sqlframe.databricks.dataframe import (
|
|
4
|
+
DatabricksDataFrame,
|
|
5
|
+
DatabricksDataFrameNaFunctions,
|
|
6
|
+
DatabricksDataFrameStatFunctions,
|
|
7
|
+
)
|
|
8
|
+
from sqlframe.databricks.group import DatabricksGroupedData
|
|
9
|
+
from sqlframe.databricks.readwriter import (
|
|
10
|
+
DatabricksDataFrameReader,
|
|
11
|
+
DatabricksDataFrameWriter,
|
|
12
|
+
)
|
|
13
|
+
from sqlframe.databricks.session import DatabricksSession
|
|
14
|
+
from sqlframe.databricks.types import Row
|
|
15
|
+
from sqlframe.databricks.udf import DatabricksUDFRegistration
|
|
16
|
+
from sqlframe.databricks.window import Window, WindowSpec
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"Column",
|
|
20
|
+
"Row",
|
|
21
|
+
"DatabricksCatalog",
|
|
22
|
+
"DatabricksDataFrame",
|
|
23
|
+
"DatabricksDataFrameNaFunctions",
|
|
24
|
+
"DatabricksGroupedData",
|
|
25
|
+
"DatabricksDataFrameReader",
|
|
26
|
+
"DatabricksDataFrameWriter",
|
|
27
|
+
"DatabricksSession",
|
|
28
|
+
"DatabricksDataFrameStatFunctions",
|
|
29
|
+
"DatabricksUDFRegistration",
|
|
30
|
+
"Window",
|
|
31
|
+
"WindowSpec",
|
|
32
|
+
]
|