sqlframe 3.9.2__tar.gz → 3.9.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.9.2 → sqlframe-3.9.3}/Makefile +3 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/PKG-INFO +1 -1
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/databricks.md +0 -2
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/_version.py +2 -2
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/function_alternatives.py +5 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/functions.py +10 -10
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/catalog.py +12 -2
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/dataframe.py +3 -2
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/functions.py +1 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/session.py +14 -1
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe.egg-info/SOURCES.txt +6 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/common_fixtures.py +27 -0
- sqlframe-3.9.3/tests/integration/engines/databricks/test_databricks_catalog.py +338 -0
- sqlframe-3.9.3/tests/integration/engines/databricks/test_databricks_dataframe.py +169 -0
- sqlframe-3.9.3/tests/integration/engines/databricks/test_databricks_session.py +47 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_engine_session.py +4 -1
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_int_functions.py +202 -64
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/fixtures.py +70 -0
- sqlframe-3.9.3/tests/unit/databricks/test_activate.py +52 -0
- sqlframe-3.9.3/tests/unit/spark/__init__.py +0 -0
- sqlframe-3.9.3/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.github/CODEOWNERS +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.gitignore +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/.readthedocs.yaml +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/LICENSE +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/README.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/cake.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/bigquery.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/configuration.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/docs/postgres.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/duckdb.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/images/SF.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/images/favicon.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/images/favicon_old.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/index.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/postgres.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/redshift.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/requirements.txt +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/snowflake.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/spark.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/standalone.md +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/mkdocs.yml +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/pytest.ini +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/renovate.json +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/setup.cfg +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/setup.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/LICENSE +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/util.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/base/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe.egg-info/requires.txt +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/conftest.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.9.2/tests/integration/engines/duck → sqlframe-3.9.3/tests/integration/engines/databricks}/__init__.py +0 -0
- {sqlframe-3.9.2/tests/integration/engines/postgres → sqlframe-3.9.3/tests/integration/engines/duck}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.9.2/tests/integration/engines/redshift → sqlframe-3.9.3/tests/integration/engines/postgres}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.9.2/tests/integration/engines/snowflake → sqlframe-3.9.3/tests/integration/engines/redshift}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.9.2/tests/integration/engines/spark → sqlframe-3.9.3/tests/integration/engines/snowflake}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.9.2/tests/unit → sqlframe-3.9.3/tests/integration/engines/spark}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/types.py +0 -0
- {sqlframe-3.9.2/tests/unit/bigquery → sqlframe-3.9.3/tests/unit}/__init__.py +0 -0
- {sqlframe-3.9.2/tests/unit/duck → sqlframe-3.9.3/tests/unit/bigquery}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/conftest.py +0 -0
- {sqlframe-3.9.2/tests/unit/postgres → sqlframe-3.9.3/tests/unit/databricks}/__init__.py +0 -0
- {sqlframe-3.9.2/tests/unit/redshift → sqlframe-3.9.3/tests/unit/duck}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.9.2/tests/unit/snowflake → sqlframe-3.9.3/tests/unit/postgres}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.9.2/tests/unit/spark → sqlframe-3.9.3/tests/unit/redshift}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.9.2/tests/unit/standalone → sqlframe-3.9.3/tests/unit/snowflake}/__init__.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_functions.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.9.2 → sqlframe-3.9.3}/tests/unit/test_util.py +0 -0
|
@@ -1220,6 +1220,11 @@ def get_json_object_cast_object(col: ColumnOrName, path: str) -> Column:
|
|
|
1220
1220
|
return get_json_object(col_func(col).cast("variant"), path)
|
|
1221
1221
|
|
|
1222
1222
|
|
|
1223
|
+
def get_json_object_using_function(col: ColumnOrName, path: str) -> Column:
|
|
1224
|
+
lit = get_func_from_session("lit")
|
|
1225
|
+
return Column.invoke_anonymous_function(col, "GET_JSON_OBJECT", lit(path))
|
|
1226
|
+
|
|
1227
|
+
|
|
1223
1228
|
def create_map_with_cast(*cols: t.Union[ColumnOrName, t.Iterable[ColumnOrName]]) -> Column:
|
|
1224
1229
|
from sqlframe.base.functions import create_map
|
|
1225
1230
|
|
|
@@ -2173,7 +2173,7 @@ def current_database() -> Column:
|
|
|
2173
2173
|
current_schema = current_database
|
|
2174
2174
|
|
|
2175
2175
|
|
|
2176
|
-
@meta(unsupported_engines="*")
|
|
2176
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
2177
2177
|
def current_timezone() -> Column:
|
|
2178
2178
|
return Column.invoke_anonymous_function(None, "current_timezone")
|
|
2179
2179
|
|
|
@@ -2261,7 +2261,7 @@ def get(col: ColumnOrName, index: t.Union[ColumnOrName, int]) -> Column:
|
|
|
2261
2261
|
return Column.invoke_anonymous_function(col, "get", index)
|
|
2262
2262
|
|
|
2263
2263
|
|
|
2264
|
-
@meta(unsupported_engines="*")
|
|
2264
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
2265
2265
|
def get_active_spark_context() -> SparkContext:
|
|
2266
2266
|
"""Raise RuntimeError if SparkContext is not initialized,
|
|
2267
2267
|
otherwise, returns the active SparkContext."""
|
|
@@ -2778,7 +2778,7 @@ def isnotnull(col: ColumnOrName) -> Column:
|
|
|
2778
2778
|
return Column.invoke_anonymous_function(col, "isnotnull")
|
|
2779
2779
|
|
|
2780
2780
|
|
|
2781
|
-
@meta(unsupported_engines="*")
|
|
2781
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
2782
2782
|
def java_method(*cols: ColumnOrName) -> Column:
|
|
2783
2783
|
"""
|
|
2784
2784
|
Calls a method with reflection.
|
|
@@ -3050,7 +3050,7 @@ def ln(col: ColumnOrName) -> Column:
|
|
|
3050
3050
|
return Column.invoke_expression_over_column(col, expression.Ln)
|
|
3051
3051
|
|
|
3052
3052
|
|
|
3053
|
-
@meta(unsupported_engines="*")
|
|
3053
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
3054
3054
|
def localtimestamp() -> Column:
|
|
3055
3055
|
"""
|
|
3056
3056
|
Returns the current timestamp without time zone at the start of query evaluation
|
|
@@ -3080,7 +3080,7 @@ def localtimestamp() -> Column:
|
|
|
3080
3080
|
return Column.invoke_anonymous_function(None, "localtimestamp")
|
|
3081
3081
|
|
|
3082
3082
|
|
|
3083
|
-
@meta(unsupported_engines="*")
|
|
3083
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
3084
3084
|
def make_dt_interval(
|
|
3085
3085
|
days: t.Optional[ColumnOrName] = None,
|
|
3086
3086
|
hours: t.Optional[ColumnOrName] = None,
|
|
@@ -3227,7 +3227,7 @@ def make_timestamp(
|
|
|
3227
3227
|
)
|
|
3228
3228
|
|
|
3229
3229
|
|
|
3230
|
-
@meta(unsupported_engines="*")
|
|
3230
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
3231
3231
|
def make_timestamp_ltz(
|
|
3232
3232
|
years: ColumnOrName,
|
|
3233
3233
|
months: ColumnOrName,
|
|
@@ -3354,7 +3354,7 @@ def make_timestamp_ntz(
|
|
|
3354
3354
|
)
|
|
3355
3355
|
|
|
3356
3356
|
|
|
3357
|
-
@meta(unsupported_engines="*")
|
|
3357
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
3358
3358
|
def make_ym_interval(
|
|
3359
3359
|
years: t.Optional[ColumnOrName] = None,
|
|
3360
3360
|
months: t.Optional[ColumnOrName] = None,
|
|
@@ -3922,7 +3922,7 @@ def printf(format: ColumnOrName, *cols: ColumnOrName) -> Column:
|
|
|
3922
3922
|
return Column.invoke_anonymous_function(format, "printf", *cols)
|
|
3923
3923
|
|
|
3924
3924
|
|
|
3925
|
-
@meta(unsupported_engines=["*", "spark"])
|
|
3925
|
+
@meta(unsupported_engines=["*", "spark", "databricks"])
|
|
3926
3926
|
def product(col: ColumnOrName) -> Column:
|
|
3927
3927
|
"""
|
|
3928
3928
|
Aggregate function: returns the product of the values in a group.
|
|
@@ -3961,7 +3961,7 @@ def product(col: ColumnOrName) -> Column:
|
|
|
3961
3961
|
reduce = aggregate
|
|
3962
3962
|
|
|
3963
3963
|
|
|
3964
|
-
@meta(unsupported_engines="*")
|
|
3964
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
3965
3965
|
def reflect(*cols: ColumnOrName) -> Column:
|
|
3966
3966
|
"""
|
|
3967
3967
|
Calls a method with reflection.
|
|
@@ -5046,7 +5046,7 @@ def to_str(value: t.Any) -> t.Optional[str]:
|
|
|
5046
5046
|
return str(value)
|
|
5047
5047
|
|
|
5048
5048
|
|
|
5049
|
-
@meta(unsupported_engines="*")
|
|
5049
|
+
@meta(unsupported_engines=["*", "databricks"])
|
|
5050
5050
|
def to_timestamp_ltz(
|
|
5051
5051
|
timestamp: ColumnOrName,
|
|
5052
5052
|
format: t.Optional[ColumnOrName] = None,
|
|
@@ -26,7 +26,6 @@ if t.TYPE_CHECKING:
|
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class DatabricksCatalog(
|
|
29
|
-
SetCurrentCatalogFromUseMixin["DatabricksSession", "DatabricksDataFrame"],
|
|
30
29
|
GetCurrentCatalogFromFunctionMixin["DatabricksSession", "DatabricksDataFrame"],
|
|
31
30
|
GetCurrentDatabaseFromFunctionMixin["DatabricksSession", "DatabricksDataFrame"],
|
|
32
31
|
ListDatabasesFromInfoSchemaMixin["DatabricksSession", "DatabricksDataFrame"],
|
|
@@ -38,6 +37,15 @@ class DatabricksCatalog(
|
|
|
38
37
|
CURRENT_CATALOG_EXPRESSION: exp.Expression = exp.func("current_catalog")
|
|
39
38
|
UPPERCASE_INFO_SCHEMA = True
|
|
40
39
|
|
|
40
|
+
def setCurrentCatalog(self, catalogName: str) -> None:
|
|
41
|
+
self.session._collect(
|
|
42
|
+
exp.Use(
|
|
43
|
+
kind=exp.Var(this=exp.to_identifier("CATALOG")),
|
|
44
|
+
this=exp.parse_identifier(catalogName, dialect=self.session.input_dialect),
|
|
45
|
+
),
|
|
46
|
+
quote_identifiers=False,
|
|
47
|
+
)
|
|
48
|
+
|
|
41
49
|
def listFunctions(
|
|
42
50
|
self, dbName: t.Optional[str] = None, pattern: t.Optional[str] = None
|
|
43
51
|
) -> t.List[Function]:
|
|
@@ -106,7 +114,9 @@ class DatabricksCatalog(
|
|
|
106
114
|
)
|
|
107
115
|
functions = [
|
|
108
116
|
Function(
|
|
109
|
-
name=normalize_string(
|
|
117
|
+
name=normalize_string(
|
|
118
|
+
x["function"].split(".")[-1], from_dialect="execution", to_dialect="output"
|
|
119
|
+
),
|
|
110
120
|
catalog=normalize_string(
|
|
111
121
|
schema.catalog, from_dialect="execution", to_dialect="output"
|
|
112
122
|
),
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
|
-
import sys
|
|
5
4
|
import typing as t
|
|
6
5
|
|
|
7
6
|
from sqlframe.base.catalog import Column as CatalogColumn
|
|
@@ -52,7 +51,9 @@ class DatabricksDataFrame(
|
|
|
52
51
|
columns.append(
|
|
53
52
|
CatalogColumn(
|
|
54
53
|
name=normalize_string(
|
|
55
|
-
row.col_name,
|
|
54
|
+
row.col_name,
|
|
55
|
+
from_dialect="execution",
|
|
56
|
+
to_dialect="output",
|
|
56
57
|
),
|
|
57
58
|
dataType=normalize_string(
|
|
58
59
|
row.data_type,
|
|
@@ -44,7 +44,20 @@ class DatabricksSession(
|
|
|
44
44
|
from databricks import sql
|
|
45
45
|
|
|
46
46
|
if not hasattr(self, "_conn"):
|
|
47
|
-
super().__init__(
|
|
47
|
+
super().__init__(
|
|
48
|
+
conn or sql.connect(server_hostname, http_path, access_token, disable_pandas=True)
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
def _try_get_map(cls, value: t.Any) -> t.Optional[t.Dict[str, t.Any]]:
|
|
53
|
+
if (
|
|
54
|
+
value
|
|
55
|
+
and isinstance(value, list)
|
|
56
|
+
and all(isinstance(item, tuple) for item in value)
|
|
57
|
+
and all(len(item) == 2 for item in value)
|
|
58
|
+
):
|
|
59
|
+
return dict(value)
|
|
60
|
+
return None
|
|
48
61
|
|
|
49
62
|
class Builder(_BaseSession.Builder):
|
|
50
63
|
DEFAULT_EXECUTION_DIALECT = "databricks"
|
|
@@ -316,6 +316,10 @@ tests/integration/engines/bigquery/__init__.py
|
|
|
316
316
|
tests/integration/engines/bigquery/test_bigquery_catalog.py
|
|
317
317
|
tests/integration/engines/bigquery/test_bigquery_dataframe.py
|
|
318
318
|
tests/integration/engines/bigquery/test_bigquery_session.py
|
|
319
|
+
tests/integration/engines/databricks/__init__.py
|
|
320
|
+
tests/integration/engines/databricks/test_databricks_catalog.py
|
|
321
|
+
tests/integration/engines/databricks/test_databricks_dataframe.py
|
|
322
|
+
tests/integration/engines/databricks/test_databricks_session.py
|
|
319
323
|
tests/integration/engines/duck/__init__.py
|
|
320
324
|
tests/integration/engines/duck/test_duckdb_activate.py
|
|
321
325
|
tests/integration/engines/duck/test_duckdb_catalog.py
|
|
@@ -345,6 +349,8 @@ tests/unit/test_activate.py
|
|
|
345
349
|
tests/unit/test_util.py
|
|
346
350
|
tests/unit/bigquery/__init__.py
|
|
347
351
|
tests/unit/bigquery/test_activate.py
|
|
352
|
+
tests/unit/databricks/__init__.py
|
|
353
|
+
tests/unit/databricks/test_activate.py
|
|
348
354
|
tests/unit/duck/__init__.py
|
|
349
355
|
tests/unit/duck/test_activate.py
|
|
350
356
|
tests/unit/postgres/__init__.py
|
|
@@ -12,6 +12,7 @@ from pytest_postgresql.janitor import DatabaseJanitor
|
|
|
12
12
|
|
|
13
13
|
from sqlframe.base.session import _BaseSession
|
|
14
14
|
from sqlframe.bigquery.session import BigQuerySession
|
|
15
|
+
from sqlframe.databricks.session import DatabricksSession
|
|
15
16
|
from sqlframe.duckdb.session import DuckDBSession
|
|
16
17
|
from sqlframe.postgres.session import PostgresSession
|
|
17
18
|
from sqlframe.redshift.session import RedshiftSession
|
|
@@ -22,6 +23,7 @@ from sqlframe.standalone.dataframe import StandaloneDataFrame
|
|
|
22
23
|
from sqlframe.standalone.session import StandaloneSession
|
|
23
24
|
|
|
24
25
|
if t.TYPE_CHECKING:
|
|
26
|
+
from databricks.sql import Connection as DatabricksConnection
|
|
25
27
|
from google.cloud.bigquery.dbapi.connection import (
|
|
26
28
|
Connection as BigQueryConnection,
|
|
27
29
|
)
|
|
@@ -231,6 +233,31 @@ def snowflake_session(snowflake_connection: SnowflakeConnection) -> SnowflakeSes
|
|
|
231
233
|
return session
|
|
232
234
|
|
|
233
235
|
|
|
236
|
+
@pytest.fixture(scope="session")
|
|
237
|
+
def databricks_connection() -> DatabricksConnection:
|
|
238
|
+
from databricks.sql import connect
|
|
239
|
+
|
|
240
|
+
conn = connect(
|
|
241
|
+
server_hostname=os.environ["SQLFRAME_DATABRICKS_SERVER_HOSTNAME"],
|
|
242
|
+
http_path=os.environ["SQLFRAME_DATABRICKS_HTTP_PATH"],
|
|
243
|
+
access_token=os.environ["SQLFRAME_DATABRICKS_ACCESS_TOKEN"],
|
|
244
|
+
auth_type="access_token",
|
|
245
|
+
catalog=os.environ["SQLFRAME_DATABRICKS_CATALOG"],
|
|
246
|
+
schema=os.environ["SQLFRAME_DATABRICKS_SCHEMA"],
|
|
247
|
+
_disable_pandas=True,
|
|
248
|
+
)
|
|
249
|
+
return conn
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
@pytest.fixture
|
|
253
|
+
def databricks_session(databricks_connection: DatabricksConnection) -> DatabricksSession:
|
|
254
|
+
session = DatabricksSession(databricks_connection)
|
|
255
|
+
session._execute("CREATE SCHEMA IF NOT EXISTS db1")
|
|
256
|
+
session._execute("CREATE TABLE IF NOT EXISTS db1.table1 (id INTEGER, name VARCHAR(100))")
|
|
257
|
+
session._execute("CREATE OR REPLACE FUNCTION db1.add(x INT, y INT) RETURNS INT RETURN x + y")
|
|
258
|
+
return session
|
|
259
|
+
|
|
260
|
+
|
|
234
261
|
@pytest.fixture(scope="module")
|
|
235
262
|
def _employee_data() -> EmployeeData:
|
|
236
263
|
return [
|
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
import typing as t
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
|
|
5
|
+
from sqlframe.base.catalog import CatalogMetadata, Column, Database, Function, Table
|
|
6
|
+
from sqlframe.databricks.session import DatabricksSession
|
|
7
|
+
|
|
8
|
+
pytest_plugins = ["tests.integration.fixtures"]
|
|
9
|
+
pytestmark = [
|
|
10
|
+
pytest.mark.databricks,
|
|
11
|
+
pytest.mark.xdist_group("databricks_tests"),
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@pytest.fixture
|
|
16
|
+
def reset_catalog(databricks_session: DatabricksSession) -> t.Iterator[None]:
|
|
17
|
+
yield
|
|
18
|
+
databricks_session.catalog.setCurrentCatalog("sqlframe")
|
|
19
|
+
databricks_session.catalog.setCurrentDatabase("db1")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@pytest.fixture
|
|
23
|
+
def reset_database(databricks_session: DatabricksSession) -> t.Iterator[None]:
|
|
24
|
+
yield
|
|
25
|
+
databricks_session.catalog.setCurrentDatabase("db1")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def test_current_catalog(databricks_session: DatabricksSession):
|
|
29
|
+
assert databricks_session.catalog.currentCatalog() == "sqlframe"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def test_set_current_catalog(databricks_session: DatabricksSession, reset_catalog):
|
|
33
|
+
assert databricks_session.catalog.currentCatalog() == "sqlframe"
|
|
34
|
+
databricks_session.catalog.setCurrentCatalog("catalog1")
|
|
35
|
+
assert databricks_session.catalog.currentCatalog() == "catalog1"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_list_catalogs(databricks_session: DatabricksSession):
|
|
39
|
+
assert sorted(databricks_session.catalog.listCatalogs(), key=lambda x: x.name) == [
|
|
40
|
+
CatalogMetadata(name="sqlframe", description=None)
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_current_database(databricks_session: DatabricksSession):
|
|
45
|
+
assert databricks_session.catalog.currentDatabase() == "db1"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def test_set_current_database(databricks_session: DatabricksSession, reset_database):
|
|
49
|
+
assert databricks_session.catalog.currentDatabase() == "db1"
|
|
50
|
+
databricks_session.catalog.setCurrentDatabase("default")
|
|
51
|
+
assert databricks_session.catalog.currentDatabase() == "default"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def test_list_databases(databricks_session: DatabricksSession):
|
|
55
|
+
assert sorted(
|
|
56
|
+
databricks_session.catalog.listDatabases(), key=lambda x: (x.catalog, x.name)
|
|
57
|
+
) == [
|
|
58
|
+
Database(name="db1", catalog="sqlframe", description=None, locationUri=""),
|
|
59
|
+
Database(name="default", catalog="sqlframe", description=None, locationUri=""),
|
|
60
|
+
Database(name="information_schema", catalog="sqlframe", description=None, locationUri=""),
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def test_list_databases_pattern(databricks_session: DatabricksSession):
|
|
65
|
+
assert sorted(
|
|
66
|
+
databricks_session.catalog.listDatabases("db*"), key=lambda x: (x.catalog, x.name)
|
|
67
|
+
) == [
|
|
68
|
+
Database(name="db1", catalog="sqlframe", description=None, locationUri=""),
|
|
69
|
+
]
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def test_get_database_no_match(databricks_session: DatabricksSession):
|
|
73
|
+
with pytest.raises(ValueError):
|
|
74
|
+
assert databricks_session.catalog.getDatabase("nonexistent")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_get_database_name_only(databricks_session: DatabricksSession):
|
|
78
|
+
assert databricks_session.catalog.getDatabase("db1") == Database(
|
|
79
|
+
name="db1", catalog="sqlframe", description=None, locationUri=""
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def test_get_database_name_and_catalog(databricks_session: DatabricksSession):
|
|
84
|
+
assert databricks_session.catalog.getDatabase("sqlframe.db1") == Database(
|
|
85
|
+
name="db1", catalog="sqlframe", description=None, locationUri=""
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def test_database_exists_does_exist(databricks_session: DatabricksSession):
|
|
90
|
+
assert databricks_session.catalog.databaseExists("db1") is True
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def test_database_exists_does_not_exist(databricks_session: DatabricksSession):
|
|
94
|
+
assert databricks_session.catalog.databaseExists("nonexistent") is False
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def test_list_tables_no_args(databricks_session: DatabricksSession):
|
|
98
|
+
assert sorted(
|
|
99
|
+
databricks_session.catalog.listTables(), key=lambda x: (x.catalog, x.database, x.name)
|
|
100
|
+
) == [
|
|
101
|
+
Table(
|
|
102
|
+
name="table1",
|
|
103
|
+
catalog="sqlframe",
|
|
104
|
+
namespace=["db1"],
|
|
105
|
+
description=None,
|
|
106
|
+
tableType="MANAGED",
|
|
107
|
+
isTemporary=False,
|
|
108
|
+
)
|
|
109
|
+
]
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def test_list_tables_db_no_catalog(databricks_session: DatabricksSession):
|
|
113
|
+
assert sorted(
|
|
114
|
+
databricks_session.catalog.listTables("db1"), key=lambda x: (x.catalog, x.database, x.name)
|
|
115
|
+
) == [
|
|
116
|
+
Table(
|
|
117
|
+
name="table1",
|
|
118
|
+
catalog="sqlframe",
|
|
119
|
+
namespace=["db1"],
|
|
120
|
+
description=None,
|
|
121
|
+
tableType="MANAGED",
|
|
122
|
+
isTemporary=False,
|
|
123
|
+
)
|
|
124
|
+
]
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def test_list_tables_db_and_catalog(databricks_session: DatabricksSession):
|
|
128
|
+
assert sorted(
|
|
129
|
+
databricks_session.catalog.listTables("sqlframe.db1"),
|
|
130
|
+
key=lambda x: (x.catalog, x.database, x.name),
|
|
131
|
+
) == [
|
|
132
|
+
Table(
|
|
133
|
+
name="table1",
|
|
134
|
+
catalog="sqlframe",
|
|
135
|
+
namespace=["db1"],
|
|
136
|
+
description=None,
|
|
137
|
+
tableType="MANAGED",
|
|
138
|
+
isTemporary=False,
|
|
139
|
+
)
|
|
140
|
+
]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def test_list_tables_pattern(databricks_session: DatabricksSession):
|
|
144
|
+
assert Table(
|
|
145
|
+
name="table1",
|
|
146
|
+
catalog="sqlframe",
|
|
147
|
+
namespace=["db1"],
|
|
148
|
+
description=None,
|
|
149
|
+
tableType="MANAGED",
|
|
150
|
+
isTemporary=False,
|
|
151
|
+
) in databricks_session.catalog.listTables(pattern="tab*")
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def test_get_table(databricks_session: DatabricksSession):
|
|
155
|
+
assert databricks_session.catalog.getTable("sqlframe.db1.table1") == Table(
|
|
156
|
+
name="table1",
|
|
157
|
+
catalog="sqlframe",
|
|
158
|
+
namespace=["db1"],
|
|
159
|
+
description=None,
|
|
160
|
+
tableType="MANAGED",
|
|
161
|
+
isTemporary=False,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def test_get_table_not_exists(databricks_session: DatabricksSession):
|
|
166
|
+
with pytest.raises(ValueError):
|
|
167
|
+
assert databricks_session.catalog.getTable("dev.db1.nonexistent")
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def test_list_functions(databricks_session: DatabricksSession):
|
|
171
|
+
assert databricks_session.catalog.listFunctions() == [
|
|
172
|
+
Function(
|
|
173
|
+
name="add",
|
|
174
|
+
catalog="sqlframe",
|
|
175
|
+
namespace=["db1"],
|
|
176
|
+
description=None,
|
|
177
|
+
className="",
|
|
178
|
+
isTemporary=False,
|
|
179
|
+
)
|
|
180
|
+
]
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def test_list_functions_pattern(databricks_session: DatabricksSession):
|
|
184
|
+
assert databricks_session.catalog.listFunctions(dbName="db1", pattern="ad*") == [
|
|
185
|
+
Function(
|
|
186
|
+
name="add",
|
|
187
|
+
catalog="sqlframe",
|
|
188
|
+
namespace=["db1"],
|
|
189
|
+
description=None,
|
|
190
|
+
className="",
|
|
191
|
+
isTemporary=False,
|
|
192
|
+
)
|
|
193
|
+
]
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def test_function_exists_does_exist(databricks_session: DatabricksSession):
|
|
197
|
+
assert databricks_session.catalog.functionExists("add", dbName="sqlframe.db1") is True
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def test_function_exists_does_not_exist(databricks_session: DatabricksSession):
|
|
201
|
+
assert databricks_session.catalog.functionExists("nonexistent") is False
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def test_get_function_exists(databricks_session: DatabricksSession):
|
|
205
|
+
assert databricks_session.catalog.getFunction("sqlframe.db1.add") == Function(
|
|
206
|
+
name="add",
|
|
207
|
+
catalog="sqlframe",
|
|
208
|
+
namespace=["db1"],
|
|
209
|
+
description=None,
|
|
210
|
+
className="",
|
|
211
|
+
isTemporary=False,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def test_get_function_not_exists(databricks_session: DatabricksSession):
|
|
216
|
+
with pytest.raises(ValueError):
|
|
217
|
+
assert databricks_session.catalog.getFunction("sqlframe.db1.nonexistent")
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def test_list_columns(databricks_session: DatabricksSession):
|
|
221
|
+
assert sorted(
|
|
222
|
+
databricks_session.catalog.listColumns("sqlframe.db1.table1"), key=lambda x: x.name
|
|
223
|
+
) == [
|
|
224
|
+
Column(
|
|
225
|
+
name="id",
|
|
226
|
+
description=None,
|
|
227
|
+
dataType="INT",
|
|
228
|
+
nullable=True,
|
|
229
|
+
isPartition=False,
|
|
230
|
+
isBucket=False,
|
|
231
|
+
),
|
|
232
|
+
Column(
|
|
233
|
+
name="name",
|
|
234
|
+
description=None,
|
|
235
|
+
dataType="VARCHAR(100)",
|
|
236
|
+
nullable=True,
|
|
237
|
+
isPartition=False,
|
|
238
|
+
isBucket=False,
|
|
239
|
+
),
|
|
240
|
+
]
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def test_list_columns_use_db_name(databricks_session: DatabricksSession):
|
|
244
|
+
assert sorted(
|
|
245
|
+
databricks_session.catalog.listColumns("table1", dbName="sqlframe.db1"),
|
|
246
|
+
key=lambda x: x.name,
|
|
247
|
+
) == [
|
|
248
|
+
Column(
|
|
249
|
+
name="id",
|
|
250
|
+
description=None,
|
|
251
|
+
dataType="INT",
|
|
252
|
+
nullable=True,
|
|
253
|
+
isPartition=False,
|
|
254
|
+
isBucket=False,
|
|
255
|
+
),
|
|
256
|
+
Column(
|
|
257
|
+
name="name",
|
|
258
|
+
description=None,
|
|
259
|
+
dataType="VARCHAR(100)",
|
|
260
|
+
nullable=True,
|
|
261
|
+
isPartition=False,
|
|
262
|
+
isBucket=False,
|
|
263
|
+
),
|
|
264
|
+
]
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def test_table_exists_table_name_only(databricks_session: DatabricksSession):
|
|
268
|
+
assert databricks_session.catalog.tableExists("sqlframe.db1.table1") is True
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def test_table_exists_table_name_and_db_name(databricks_session: DatabricksSession):
|
|
272
|
+
assert databricks_session.catalog.tableExists("table1", dbName="sqlframe.db1") is True
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def test_table_not_exists(databricks_session: DatabricksSession):
|
|
276
|
+
assert databricks_session.catalog.tableExists("nonexistent") is False
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def test_create_external_table(databricks_session: DatabricksSession):
|
|
280
|
+
with pytest.raises(NotImplementedError):
|
|
281
|
+
databricks_session.catalog.createExternalTable(
|
|
282
|
+
"table1", "sqlframe.default", "path/to/table"
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def test_create_table(databricks_session: DatabricksSession):
|
|
287
|
+
with pytest.raises(NotImplementedError):
|
|
288
|
+
databricks_session.catalog.createTable("table1", "sqlframe.default")
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def test_drop_temp_view(databricks_session: DatabricksSession):
|
|
292
|
+
with pytest.raises(NotImplementedError):
|
|
293
|
+
databricks_session.catalog.dropTempView("view1")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def test_drop_global_temp_view(databricks_session: DatabricksSession):
|
|
297
|
+
with pytest.raises(NotImplementedError):
|
|
298
|
+
databricks_session.catalog.dropGlobalTempView("view1")
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def test_register_function(databricks_session: DatabricksSession):
|
|
302
|
+
with pytest.raises(NotImplementedError):
|
|
303
|
+
databricks_session.catalog.registerFunction("function1", lambda x: x)
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def test_is_cached(databricks_session: DatabricksSession):
|
|
307
|
+
with pytest.raises(NotImplementedError):
|
|
308
|
+
databricks_session.catalog.isCached("table1")
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def test_cache_table(databricks_session: DatabricksSession):
|
|
312
|
+
with pytest.raises(NotImplementedError):
|
|
313
|
+
databricks_session.catalog.cacheTable("table1")
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def test_uncache_table(databricks_session: DatabricksSession):
|
|
317
|
+
with pytest.raises(NotImplementedError):
|
|
318
|
+
databricks_session.catalog.uncacheTable("table1")
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def test_clear_cache(databricks_session: DatabricksSession):
|
|
322
|
+
with pytest.raises(NotImplementedError):
|
|
323
|
+
databricks_session.catalog.clearCache()
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def test_refresh_table(databricks_session: DatabricksSession):
|
|
327
|
+
with pytest.raises(NotImplementedError):
|
|
328
|
+
databricks_session.catalog.refreshTable("table1")
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def test_recover_partitions(databricks_session: DatabricksSession):
|
|
332
|
+
with pytest.raises(NotImplementedError):
|
|
333
|
+
databricks_session.catalog.recoverPartitions("table1")
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def test_refresh_by_path(databricks_session: DatabricksSession):
|
|
337
|
+
with pytest.raises(NotImplementedError):
|
|
338
|
+
databricks_session.catalog.refreshByPath("path/to/table")
|