sqlframe 3.35.0__tar.gz → 3.36.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.35.0 → sqlframe-3.36.0}/PKG-INFO +1 -1
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/databricks.md +23 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/snowflake.md +1 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/mkdocs.yml +2 -2
- {sqlframe-3.35.0 → sqlframe-3.36.0}/setup.py +3 -3
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/_version.py +2 -2
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/function_alternatives.py +0 -60
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/functions.py +17 -31
- sqlframe-3.36.0/sqlframe/base/group.py +227 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/types.py +4 -4
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/session.py +51 -2
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe.egg-info/requires.txt +3 -3
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_int_functions.py +7 -3
- sqlframe-3.36.0/tests/integration/test_int_grouped_data.py +372 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_types.py +7 -7
- sqlframe-3.35.0/sqlframe/base/group.py +0 -108
- sqlframe-3.35.0/tests/integration/test_int_grouped_data.py +0 -165
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.github/CODEOWNERS +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.gitignore +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/.readthedocs.yaml +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/LICENSE +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/Makefile +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/README.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/cake.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/bigquery.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/configuration.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/docs/postgres.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/duckdb.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/images/SF.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/images/favicon.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/index.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/postgres.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/redshift.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/requirements.txt +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/spark.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/standalone.md +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/pytest.ini +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/renovate.json +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/setup.cfg +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/LICENSE +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/mixins/table_mixins.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/util.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/base/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/py.typed +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/common_fixtures.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/conftest.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/issue_219.csv +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_table.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/types.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/conftest.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/databricks/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/duck/test_reader_options.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/spark/test_reader_options.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_functions.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/test_base_reader_options.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/test_catalog.py +0 -0
- {sqlframe-3.35.0 → sqlframe-3.36.0}/tests/unit/test_util.py +0 -0
@@ -103,6 +103,29 @@ A DatabricksSession, which implements the PySpark Session API, is created by pas
|
|
103
103
|
session = SparkSession.builder.getOrCreate()
|
104
104
|
```
|
105
105
|
|
106
|
+
### Creating Session with Idle Connections
|
107
|
+
|
108
|
+
The Databricks SQL Connector for Python will automatically close connections that have been idle for a while.
|
109
|
+
This will cause errors when using SQLFrame since it will retry to use a closed connection.
|
110
|
+
To avoid this, you can have SQLFrame create the connection for you and it will automatically reconnect when needed.
|
111
|
+
Note that this will not work with the `activate` function since it requires a `databricks.sql.client.Connection` object.
|
112
|
+
|
113
|
+
```python
|
114
|
+
import os
|
115
|
+
|
116
|
+
from sqlframe.databricks import DatabricksSession
|
117
|
+
|
118
|
+
session = DatabricksSession(
|
119
|
+
server_hostname="dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
120
|
+
http_path="/sql/1.0/warehouses/xxxxxxxxxxxxxxxx",
|
121
|
+
access_token=os.environ["ACCESS_TOKEN"], # Replace this with how you get your databricks access token
|
122
|
+
auth_type="access_token",
|
123
|
+
catalog="catalog",
|
124
|
+
schema="schema",
|
125
|
+
)
|
126
|
+
```
|
127
|
+
|
128
|
+
|
106
129
|
## Example Usage
|
107
130
|
|
108
131
|
```python
|
@@ -544,6 +544,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
544
544
|
* [mean](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.mean.html)
|
545
545
|
* [min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.min.html)
|
546
546
|
* [pivot](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.pivot.html)
|
547
|
+
* Doesn't support multiple aggregate functions on a single pivot
|
547
548
|
* [sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.GroupedData.sum.html)
|
548
549
|
|
549
550
|
### DataFrameReader Class
|
@@ -3,14 +3,14 @@ repo_url: https://github.com/eakmanrq/sqlframe
|
|
3
3
|
repo_name: eakmanrq/sqlframe
|
4
4
|
nav:
|
5
5
|
- "Overview": index.md
|
6
|
+
- "Configuration": configuration.md
|
6
7
|
- "BigQuery": bigquery.md
|
8
|
+
- "Databricks": databricks.md
|
7
9
|
- "DuckDB": duckdb.md
|
8
10
|
- "Postgres": postgres.md
|
9
11
|
- "Spark": spark.md
|
10
12
|
- "Standalone": standalone.md
|
11
|
-
- "Configuration": configuration.md
|
12
13
|
- "Redshift (In-Development)": redshift.md
|
13
|
-
- "Databricks (In-Development)": databricks.md
|
14
14
|
theme:
|
15
15
|
name: material
|
16
16
|
logo: images/SF.png
|
@@ -20,7 +20,7 @@ setup(
|
|
20
20
|
python_requires=">=3.9",
|
21
21
|
install_requires=[
|
22
22
|
"prettytable<4",
|
23
|
-
"sqlglot>=24.0.0,<26.
|
23
|
+
"sqlglot>=24.0.0,<26.32",
|
24
24
|
"typing_extensions",
|
25
25
|
],
|
26
26
|
extras_require={
|
@@ -38,12 +38,12 @@ setup(
|
|
38
38
|
"psycopg>=3.1,<4",
|
39
39
|
"pyarrow>=10,<21",
|
40
40
|
"pyspark>=2,<3.6",
|
41
|
-
"pytest>=8.2.0,<8.
|
41
|
+
"pytest>=8.2.0,<8.5",
|
42
42
|
"pytest-forked",
|
43
43
|
"pytest-postgresql>=6,<8",
|
44
44
|
"pytest-xdist>=3.6,<3.8",
|
45
45
|
"pre-commit>=3.7,<5",
|
46
|
-
"ruff>=0.4.4,<0.
|
46
|
+
"ruff>=0.4.4,<0.13",
|
47
47
|
"types-psycopg2>=2.9,<3",
|
48
48
|
],
|
49
49
|
"docs": [
|
@@ -999,27 +999,6 @@ def element_at_using_brackets(col: ColumnOrName, value: ColumnOrLiteral) -> Colu
|
|
999
999
|
)
|
1000
1000
|
|
1001
1001
|
|
1002
|
-
def array_remove_using_filter(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
1003
|
-
lit = get_func_from_session("lit")
|
1004
|
-
col_func = get_func_from_session("col")
|
1005
|
-
|
1006
|
-
value = value if isinstance(value, Column) else lit(value)
|
1007
|
-
return Column(
|
1008
|
-
expression.Anonymous(
|
1009
|
-
this="LIST_FILTER",
|
1010
|
-
expressions=[
|
1011
|
-
col_func(col).column_expression,
|
1012
|
-
expression.Lambda(
|
1013
|
-
this=expression.NEQ(
|
1014
|
-
this=expression.Identifier(this="x"), expression=value.column_expression
|
1015
|
-
),
|
1016
|
-
expressions=[expression.Identifier(this="x")],
|
1017
|
-
),
|
1018
|
-
],
|
1019
|
-
)
|
1020
|
-
)
|
1021
|
-
|
1022
|
-
|
1023
1002
|
def array_union_using_list_concat(col1: ColumnOrName, col2: ColumnOrName) -> Column:
|
1024
1003
|
col_func = get_func_from_session("col")
|
1025
1004
|
|
@@ -1321,10 +1300,6 @@ def day_with_try_to_timestamp(col: ColumnOrName) -> Column:
|
|
1321
1300
|
)
|
1322
1301
|
|
1323
1302
|
|
1324
|
-
def endswith_with_underscore(str: ColumnOrName, suffix: ColumnOrName) -> Column:
|
1325
|
-
return Column.invoke_anonymous_function(str, "ENDS_WITH", suffix)
|
1326
|
-
|
1327
|
-
|
1328
1303
|
def endswith_using_like(str: ColumnOrName, suffix: ColumnOrName) -> Column:
|
1329
1304
|
concat = get_func_from_session("concat")
|
1330
1305
|
lit = get_func_from_session("lit")
|
@@ -1664,41 +1639,6 @@ def array_position_bgutil(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
1664
1639
|
)
|
1665
1640
|
|
1666
1641
|
|
1667
|
-
def array_remove_bgutil(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
1668
|
-
lit = get_func_from_session("lit")
|
1669
|
-
|
1670
|
-
value_col = value if isinstance(value, Column) else lit(value)
|
1671
|
-
|
1672
|
-
filter_subquery = expression.select(
|
1673
|
-
"*",
|
1674
|
-
).from_(
|
1675
|
-
expression.Unnest(
|
1676
|
-
expressions=[Column.ensure_col(col).column_expression],
|
1677
|
-
alias=expression.TableAlias(
|
1678
|
-
columns=[expression.to_identifier("x")],
|
1679
|
-
),
|
1680
|
-
)
|
1681
|
-
)
|
1682
|
-
|
1683
|
-
agg_subquery = (
|
1684
|
-
expression.select(
|
1685
|
-
expression.Anonymous(
|
1686
|
-
this="ARRAY_AGG",
|
1687
|
-
expressions=[expression.column("x")],
|
1688
|
-
),
|
1689
|
-
)
|
1690
|
-
.from_(filter_subquery.subquery("t"))
|
1691
|
-
.where(
|
1692
|
-
expression.NEQ(
|
1693
|
-
this=expression.column("x", "t"),
|
1694
|
-
expression=value_col.column_expression,
|
1695
|
-
)
|
1696
|
-
)
|
1697
|
-
)
|
1698
|
-
|
1699
|
-
return Column(agg_subquery.subquery())
|
1700
|
-
|
1701
|
-
|
1702
1642
|
def array_distinct_bgutil(col: ColumnOrName) -> Column:
|
1703
1643
|
return Column(
|
1704
1644
|
expression.Anonymous(
|
@@ -2268,21 +2268,10 @@ def element_at(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
2268
2268
|
|
2269
2269
|
@meta()
|
2270
2270
|
def array_remove(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
2271
|
-
from sqlframe.base.function_alternatives import (
|
2272
|
-
array_remove_bgutil,
|
2273
|
-
array_remove_using_filter,
|
2274
|
-
)
|
2275
|
-
|
2276
|
-
session = _get_session()
|
2277
|
-
|
2278
|
-
if session._is_bigquery:
|
2279
|
-
return array_remove_bgutil(col, value)
|
2280
|
-
|
2281
|
-
if session._is_duckdb:
|
2282
|
-
return array_remove_using_filter(col, value)
|
2283
|
-
|
2284
2271
|
value_col = value if isinstance(value, Column) else lit(value)
|
2285
|
-
return Column.
|
2272
|
+
return Column.invoke_expression_over_column(
|
2273
|
+
col, expression.ArrayRemove, expression=value_col.column_expression
|
2274
|
+
)
|
2286
2275
|
|
2287
2276
|
|
2288
2277
|
@meta(unsupported_engines="postgres")
|
@@ -2299,14 +2288,14 @@ def array_distinct(col: ColumnOrName) -> Column:
|
|
2299
2288
|
|
2300
2289
|
@meta(unsupported_engines=["bigquery", "postgres"])
|
2301
2290
|
def array_intersect(col1: ColumnOrName, col2: ColumnOrName) -> Column:
|
2302
|
-
|
2303
|
-
|
2304
|
-
|
2305
|
-
|
2306
|
-
|
2307
|
-
|
2308
|
-
|
2309
|
-
|
2291
|
+
return Column(
|
2292
|
+
expression.ArrayIntersect(
|
2293
|
+
expressions=[
|
2294
|
+
Column.ensure_col(col1).column_expression,
|
2295
|
+
Column.ensure_col(col2).column_expression,
|
2296
|
+
]
|
2297
|
+
)
|
2298
|
+
)
|
2310
2299
|
|
2311
2300
|
|
2312
2301
|
@meta(unsupported_engines=["postgres"])
|
@@ -3237,18 +3226,16 @@ def elt(*inputs: ColumnOrName) -> Column:
|
|
3237
3226
|
def endswith(str: ColumnOrName, suffix: ColumnOrName) -> Column:
|
3238
3227
|
from sqlframe.base.function_alternatives import (
|
3239
3228
|
endswith_using_like,
|
3240
|
-
endswith_with_underscore,
|
3241
3229
|
)
|
3242
3230
|
|
3243
3231
|
session = _get_session()
|
3244
3232
|
|
3245
|
-
if session._is_bigquery or session._is_duckdb:
|
3246
|
-
return endswith_with_underscore(str, suffix)
|
3247
|
-
|
3248
3233
|
if session._is_postgres:
|
3249
3234
|
return endswith_using_like(str, suffix)
|
3250
3235
|
|
3251
|
-
return Column.
|
3236
|
+
return Column.invoke_expression_over_column(
|
3237
|
+
str, expression.EndsWith, expression=Column.ensure_col(suffix).column_expression
|
3238
|
+
)
|
3252
3239
|
|
3253
3240
|
|
3254
3241
|
@meta(unsupported_engines="*")
|
@@ -5666,10 +5653,9 @@ def replace(
|
|
5666
5653
|
):
|
5667
5654
|
replace = expression.Literal.string("") # type: ignore
|
5668
5655
|
|
5669
|
-
|
5670
|
-
|
5671
|
-
|
5672
|
-
return Column.invoke_anonymous_function(src, "replace", search)
|
5656
|
+
return Column.invoke_expression_over_column(
|
5657
|
+
src, expression.Replace, expression=search, replacement=replace
|
5658
|
+
)
|
5673
5659
|
|
5674
5660
|
|
5675
5661
|
@meta()
|
@@ -0,0 +1,227 @@
|
|
1
|
+
# This code is based on code from Apache Spark under the license found in the LICENSE file located in the 'sqlframe' folder.
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import sys
|
6
|
+
import typing as t
|
7
|
+
|
8
|
+
from sqlframe.base.operations import Operation, group_operation, operation
|
9
|
+
|
10
|
+
if sys.version_info >= (3, 11):
|
11
|
+
from typing import Self
|
12
|
+
else:
|
13
|
+
from typing_extensions import Self
|
14
|
+
|
15
|
+
if t.TYPE_CHECKING:
|
16
|
+
from sqlframe.base.column import Column
|
17
|
+
from sqlframe.base.session import DF
|
18
|
+
else:
|
19
|
+
DF = t.TypeVar("DF")
|
20
|
+
|
21
|
+
|
22
|
+
# https://spark.apache.org/docs/latest/sql-ref-syntax-qry-select-groupby.html
|
23
|
+
# https://stackoverflow.com/questions/37975227/what-is-the-difference-between-cube-rollup-and-groupby-operators
|
24
|
+
class _BaseGroupedData(t.Generic[DF]):
|
25
|
+
last_op: Operation
|
26
|
+
|
27
|
+
def __init__(
|
28
|
+
self,
|
29
|
+
df: DF,
|
30
|
+
group_by_cols: t.Union[t.List[Column], t.List[t.List[Column]]],
|
31
|
+
last_op: Operation,
|
32
|
+
):
|
33
|
+
self._df = df.copy()
|
34
|
+
self.session = df.session
|
35
|
+
self.last_op = last_op
|
36
|
+
self.group_by_cols = group_by_cols
|
37
|
+
self.pivot_col: t.Optional[str] = None
|
38
|
+
self.pivot_values: t.Optional[t.List[t.Any]] = None
|
39
|
+
|
40
|
+
def _get_function_applied_columns(
|
41
|
+
self, func_name: str, cols: t.Tuple[str, ...]
|
42
|
+
) -> t.List[Column]:
|
43
|
+
from sqlframe.base import functions as F
|
44
|
+
|
45
|
+
func_name = func_name.lower()
|
46
|
+
return [
|
47
|
+
getattr(F, func_name)(name).alias(
|
48
|
+
self.session._sanitize_column_name(f"{func_name}({name})")
|
49
|
+
)
|
50
|
+
for name in cols
|
51
|
+
]
|
52
|
+
|
53
|
+
@group_operation(Operation.SELECT)
|
54
|
+
def agg(self, *exprs: t.Union[Column, t.Dict[str, str]]) -> DF:
|
55
|
+
from sqlframe.base.column import Column
|
56
|
+
|
57
|
+
columns = (
|
58
|
+
[
|
59
|
+
self._get_function_applied_columns(agg_func, (column_name,))[0]
|
60
|
+
for column_name, agg_func in exprs[0].items()
|
61
|
+
]
|
62
|
+
if isinstance(exprs[0], dict)
|
63
|
+
else exprs
|
64
|
+
)
|
65
|
+
cols = self._df._ensure_and_normalize_cols(columns)
|
66
|
+
|
67
|
+
# Handle pivot transformation
|
68
|
+
if self.pivot_col is not None and self.pivot_values is not None:
|
69
|
+
from sqlglot import exp
|
70
|
+
|
71
|
+
from sqlframe.base import functions as F
|
72
|
+
|
73
|
+
# Build the pivot expression
|
74
|
+
# First, we need to convert the DataFrame to include the pivot logic
|
75
|
+
df = self._df.copy()
|
76
|
+
|
77
|
+
# Create the base query with group by columns, pivot column, and aggregation columns
|
78
|
+
select_cols = []
|
79
|
+
# Add group by columns
|
80
|
+
for col in self.group_by_cols:
|
81
|
+
select_cols.append(col.expression) # type: ignore
|
82
|
+
# Add pivot column
|
83
|
+
select_cols.append(Column.ensure_col(self.pivot_col).expression)
|
84
|
+
# Add the value columns that will be aggregated
|
85
|
+
for agg_col in cols:
|
86
|
+
# Extract the column being aggregated from the aggregation function
|
87
|
+
# For example, from SUM(earnings), we want to extract 'earnings'
|
88
|
+
if (
|
89
|
+
isinstance(agg_col.column_expression, exp.AggFunc)
|
90
|
+
and agg_col.column_expression.this
|
91
|
+
):
|
92
|
+
if agg_col.column_expression.this not in select_cols:
|
93
|
+
select_cols.append(agg_col.column_expression.this)
|
94
|
+
|
95
|
+
# Create the base query
|
96
|
+
base_query = df.expression.select(*select_cols, append=False)
|
97
|
+
|
98
|
+
# Build pivot expression
|
99
|
+
pivot_expressions = []
|
100
|
+
for agg_col in cols:
|
101
|
+
if isinstance(agg_col.column_expression, exp.AggFunc):
|
102
|
+
# Clone the aggregation function
|
103
|
+
# Snowflake doesn't support alias in the pivot, so we need to use the column_expression
|
104
|
+
agg_func = (
|
105
|
+
agg_col.column_expression.copy()
|
106
|
+
if self.session._is_snowflake
|
107
|
+
else agg_col.expression.copy()
|
108
|
+
)
|
109
|
+
pivot_expressions.append(agg_func)
|
110
|
+
|
111
|
+
# Create the IN clause with pivot values
|
112
|
+
in_values = []
|
113
|
+
for v in self.pivot_values:
|
114
|
+
if isinstance(v, str):
|
115
|
+
in_values.append(exp.Literal.string(v))
|
116
|
+
else:
|
117
|
+
in_values.append(exp.Literal.number(v))
|
118
|
+
|
119
|
+
# Build the pivot node with the fields parameter
|
120
|
+
pivot = exp.Pivot(
|
121
|
+
expressions=pivot_expressions,
|
122
|
+
fields=[
|
123
|
+
exp.In(
|
124
|
+
this=Column.ensure_col(self.pivot_col).column_expression,
|
125
|
+
expressions=in_values,
|
126
|
+
)
|
127
|
+
],
|
128
|
+
)
|
129
|
+
|
130
|
+
# Create a subquery with the pivot attached
|
131
|
+
subquery = base_query.subquery()
|
132
|
+
subquery.set("pivots", [pivot])
|
133
|
+
|
134
|
+
# Create the final select from the pivoted subquery
|
135
|
+
expression = exp.select("*").from_(subquery)
|
136
|
+
|
137
|
+
return self._df.copy(expression=expression)
|
138
|
+
|
139
|
+
# Original non-pivot logic
|
140
|
+
if not self.group_by_cols or not isinstance(self.group_by_cols[0], (list, tuple, set)):
|
141
|
+
expression = self._df.expression.group_by(
|
142
|
+
# User column_expression for group by to avoid alias in group by
|
143
|
+
*[x.column_expression for x in self.group_by_cols] # type: ignore
|
144
|
+
).select(*[x.expression for x in self.group_by_cols + cols], append=False) # type: ignore
|
145
|
+
group_by_cols = self.group_by_cols
|
146
|
+
else:
|
147
|
+
from sqlglot import exp
|
148
|
+
|
149
|
+
expression = self._df.expression
|
150
|
+
all_grouping_sets = []
|
151
|
+
group_by_cols = []
|
152
|
+
for grouping_set in self.group_by_cols:
|
153
|
+
all_grouping_sets.append(
|
154
|
+
exp.Tuple(expressions=[x.column_expression for x in grouping_set]) # type: ignore
|
155
|
+
)
|
156
|
+
group_by_cols.extend(grouping_set) # type: ignore
|
157
|
+
group_by_cols = list(dict.fromkeys(group_by_cols))
|
158
|
+
group_by = exp.Group(grouping_sets=[exp.GroupingSets(expressions=all_grouping_sets)])
|
159
|
+
expression.set("group", group_by)
|
160
|
+
for col in cols:
|
161
|
+
# Spark supports having an empty grouping_id which means all of the columns but other dialects
|
162
|
+
# like duckdb don't support this so we expand the grouping_id to include all of the columns
|
163
|
+
if col.column_expression.this == "GROUPING_ID":
|
164
|
+
col.column_expression.set("expressions", [x.expression for x in group_by_cols]) # type: ignore
|
165
|
+
expression = expression.select(*[x.expression for x in group_by_cols + cols], append=False) # type: ignore
|
166
|
+
return self._df.copy(expression=expression)
|
167
|
+
|
168
|
+
def count(self) -> DF:
|
169
|
+
from sqlframe.base import functions as F
|
170
|
+
|
171
|
+
return self.agg(F.count("*").alias("count"))
|
172
|
+
|
173
|
+
def mean(self, *cols: str) -> DF:
|
174
|
+
return self.avg(*cols)
|
175
|
+
|
176
|
+
def avg(self, *cols: str) -> DF:
|
177
|
+
return self.agg(*self._get_function_applied_columns("avg", cols))
|
178
|
+
|
179
|
+
def max(self, *cols: str) -> DF:
|
180
|
+
return self.agg(*self._get_function_applied_columns("max", cols))
|
181
|
+
|
182
|
+
def min(self, *cols: str) -> DF:
|
183
|
+
return self.agg(*self._get_function_applied_columns("min", cols))
|
184
|
+
|
185
|
+
def sum(self, *cols: str) -> DF:
|
186
|
+
return self.agg(*self._get_function_applied_columns("sum", cols))
|
187
|
+
|
188
|
+
def pivot(self, pivot_col: str, values: t.Optional[t.List[t.Any]] = None) -> Self:
|
189
|
+
"""
|
190
|
+
Pivots a column of the current DataFrame and perform the specified aggregation.
|
191
|
+
|
192
|
+
There are two versions of the pivot function: one that requires the caller
|
193
|
+
to specify the list of distinct values to pivot on, and one that does not.
|
194
|
+
The latter is more concise but less efficient, because Spark needs to first
|
195
|
+
compute the list of distinct values internally.
|
196
|
+
|
197
|
+
Parameters
|
198
|
+
----------
|
199
|
+
pivot_col : str
|
200
|
+
Name of the column to pivot.
|
201
|
+
values : list, optional
|
202
|
+
List of values that will be translated to columns in the output DataFrame.
|
203
|
+
|
204
|
+
Returns
|
205
|
+
-------
|
206
|
+
GroupedData
|
207
|
+
Returns self to allow chaining with aggregation methods.
|
208
|
+
"""
|
209
|
+
if self.session._is_postgres:
|
210
|
+
raise NotImplementedError(
|
211
|
+
"Pivot operation is not supported in Postgres. Please create an issue if you would like a workaround implemented."
|
212
|
+
)
|
213
|
+
|
214
|
+
self.pivot_col = pivot_col
|
215
|
+
|
216
|
+
if values is None:
|
217
|
+
# Eagerly compute distinct values
|
218
|
+
from sqlframe.base.column import Column
|
219
|
+
|
220
|
+
distinct_df = self._df.select(pivot_col).distinct()
|
221
|
+
distinct_rows = distinct_df.collect()
|
222
|
+
# Sort to make the results deterministic
|
223
|
+
self.pivot_values = sorted([row[0] for row in distinct_rows])
|
224
|
+
else:
|
225
|
+
self.pivot_values = values
|
226
|
+
|
227
|
+
return self
|
@@ -104,22 +104,22 @@ class FloatType(DataType):
|
|
104
104
|
|
105
105
|
|
106
106
|
class ByteType(DataType):
|
107
|
-
def
|
107
|
+
def simpleString(self) -> str:
|
108
108
|
return "tinyint"
|
109
109
|
|
110
110
|
|
111
111
|
class IntegerType(DataType):
|
112
|
-
def
|
112
|
+
def simpleString(self) -> str:
|
113
113
|
return "int"
|
114
114
|
|
115
115
|
|
116
116
|
class LongType(DataType):
|
117
|
-
def
|
117
|
+
def simpleString(self) -> str:
|
118
118
|
return "bigint"
|
119
119
|
|
120
120
|
|
121
121
|
class ShortType(DataType):
|
122
|
-
def
|
122
|
+
def simpleString(self) -> str:
|
123
123
|
return "smallint"
|
124
124
|
|
125
125
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
+
import logging
|
3
4
|
import typing as t
|
4
|
-
import warnings
|
5
5
|
|
6
6
|
from sqlframe.base.session import _BaseSession
|
7
7
|
from sqlframe.databricks.catalog import DatabricksCatalog
|
@@ -19,6 +19,9 @@ else:
|
|
19
19
|
DatabricksConnection = t.Any
|
20
20
|
|
21
21
|
|
22
|
+
logger = logging.getLogger(__name__)
|
23
|
+
|
24
|
+
|
22
25
|
class DatabricksSession(
|
23
26
|
_BaseSession[ # type: ignore
|
24
27
|
DatabricksCatalog,
|
@@ -43,14 +46,60 @@ class DatabricksSession(
|
|
43
46
|
server_hostname: t.Optional[str] = None,
|
44
47
|
http_path: t.Optional[str] = None,
|
45
48
|
access_token: t.Optional[str] = None,
|
49
|
+
**kwargs: t.Any,
|
46
50
|
):
|
47
51
|
from databricks import sql
|
48
52
|
|
53
|
+
self._conn_kwargs = (
|
54
|
+
{}
|
55
|
+
if conn
|
56
|
+
else {
|
57
|
+
"server_hostname": server_hostname,
|
58
|
+
"http_path": http_path,
|
59
|
+
"access_token": access_token,
|
60
|
+
"disable_pandas": True,
|
61
|
+
**kwargs,
|
62
|
+
}
|
63
|
+
)
|
64
|
+
|
49
65
|
if not hasattr(self, "_conn"):
|
50
66
|
super().__init__(
|
51
|
-
conn or sql.connect(
|
67
|
+
conn or sql.connect(**self._conn_kwargs),
|
52
68
|
)
|
53
69
|
|
70
|
+
def _execute(self, sql: str) -> None:
|
71
|
+
from databricks.sql import connect
|
72
|
+
from databricks.sql.exc import DatabaseError, RequestError
|
73
|
+
|
74
|
+
try:
|
75
|
+
super()._execute(sql)
|
76
|
+
except (DatabaseError, RequestError) as e:
|
77
|
+
logger.warning("Failed to execute query")
|
78
|
+
if not self._is_session_expired_error(e):
|
79
|
+
logger.error("Error is not related to session expiration, re-raising")
|
80
|
+
raise e
|
81
|
+
if self._conn_kwargs:
|
82
|
+
logger.info("Attempting to reconnect with provided connection parameters")
|
83
|
+
self._connection = connect(**self._conn_kwargs)
|
84
|
+
# Clear the cached cursor
|
85
|
+
if hasattr(self, "_cur"):
|
86
|
+
delattr(self, "_cur")
|
87
|
+
super()._execute(sql)
|
88
|
+
else:
|
89
|
+
logger.error("No connection parameters provided so could not reconnect")
|
90
|
+
raise
|
91
|
+
|
92
|
+
def _is_session_expired_error(self, error: Exception) -> bool:
|
93
|
+
error_str = str(error).lower()
|
94
|
+
session_keywords = [
|
95
|
+
"invalid sessionhandle",
|
96
|
+
"session is closed",
|
97
|
+
"session expired",
|
98
|
+
"session not found",
|
99
|
+
"sessionhandle",
|
100
|
+
]
|
101
|
+
return any(keyword in error_str for keyword in session_keywords)
|
102
|
+
|
54
103
|
@classmethod
|
55
104
|
def _try_get_map(cls, value: t.Any) -> t.Optional[t.Dict[str, t.Any]]:
|
56
105
|
if (
|
@@ -1,5 +1,5 @@
|
|
1
1
|
prettytable<4
|
2
|
-
sqlglot<26.
|
2
|
+
sqlglot<26.32,>=24.0.0
|
3
3
|
typing_extensions
|
4
4
|
|
5
5
|
[bigquery]
|
@@ -23,8 +23,8 @@ pyspark<3.6,>=2
|
|
23
23
|
pytest-forked
|
24
24
|
pytest-postgresql<8,>=6
|
25
25
|
pytest-xdist<3.8,>=3.6
|
26
|
-
pytest<8.
|
27
|
-
ruff<0.
|
26
|
+
pytest<8.5,>=8.2.0
|
27
|
+
ruff<0.13,>=0.4.4
|
28
28
|
types-psycopg2<3,>=2.9
|
29
29
|
|
30
30
|
[docs]
|
@@ -3409,6 +3409,10 @@ def test_bitmap_or_agg(get_session_and_func, get_func):
|
|
3409
3409
|
|
3410
3410
|
def test_any_value(get_session_and_func):
|
3411
3411
|
session, any_value = get_session_and_func("any_value")
|
3412
|
+
if isinstance(session, PostgresSession):
|
3413
|
+
pytest.skip(
|
3414
|
+
"any_value is supported in SQLGlot for Postgres but by default assumes Postgres 16+. Tests run against 15. Therefore skipping but should remove this if SQLFrame addss the ability to define Postgres version."
|
3415
|
+
)
|
3412
3416
|
df = session.createDataFrame(
|
3413
3417
|
[("c", None), ("a", 2), ("a", 3), ("b", 8), ("b", 2)], ["c1", "c2"]
|
3414
3418
|
)
|
@@ -3419,9 +3423,9 @@ def test_any_value(get_session_and_func):
|
|
3419
3423
|
assert non_ignore_nulls == [Row(value="c", value2=2)]
|
3420
3424
|
assert ignore_nulls == [Row(value="c", value2=2)]
|
3421
3425
|
# SQLGlot converts any_value to max
|
3422
|
-
elif isinstance(session, PostgresSession):
|
3423
|
-
|
3424
|
-
|
3426
|
+
# elif isinstance(session, PostgresSession):
|
3427
|
+
# assert non_ignore_nulls == [Row(value="c", value2=8)]
|
3428
|
+
# assert ignore_nulls == [Row(value="c", value2=8)]
|
3425
3429
|
# Always includes nulls
|
3426
3430
|
elif isinstance(session, SnowflakeSession):
|
3427
3431
|
assert non_ignore_nulls == [Row(value="c", value2=None)]
|