sqlframe 3.11.0__tar.gz → 3.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.11.0 → sqlframe-3.12.0}/PKG-INFO +1 -1
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/_version.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/decorators.py +1 -1
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/mixins/dataframe_mixins.py +3 -3
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/operations.py +5 -5
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/session.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/dataframe.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/testing/utils.py +3 -3
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_engine_dataframe.py +4 -4
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_engine_writer.py +15 -17
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_int_functions.py +2 -2
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/fixtures.py +7 -7
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/test_int_dataframe.py +95 -95
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/test_int_dataframe_stats.py +4 -4
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.github/CODEOWNERS +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.gitignore +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/.readthedocs.yaml +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/LICENSE +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/Makefile +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/README.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/cake.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/bigquery.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/configuration.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/databricks.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/docs/postgres.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/duckdb.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/images/SF.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/images/favicon.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/index.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/postgres.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/redshift.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/requirements.txt +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/snowflake.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/spark.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/standalone.md +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/mkdocs.yml +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/pytest.ini +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/renovate.json +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/setup.cfg +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/setup.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/LICENSE +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/util.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/base/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe.egg-info/requires.txt +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/common_fixtures.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/conftest.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/issue_219.csv +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/conftest.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/databricks/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_functions.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.11.0 → sqlframe-3.12.0}/tests/unit/test_util.py +0 -0
|
@@ -80,7 +80,7 @@ JOIN_HINTS = {
|
|
|
80
80
|
}
|
|
81
81
|
|
|
82
82
|
|
|
83
|
-
DF = t.TypeVar("DF", bound="
|
|
83
|
+
DF = t.TypeVar("DF", bound="BaseDataFrame")
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
class OpenAIMode(enum.Enum):
|
|
@@ -198,7 +198,7 @@ class _BaseDataFrameStatFunctions(t.Generic[DF]):
|
|
|
198
198
|
STAT = t.TypeVar("STAT", bound=_BaseDataFrameStatFunctions)
|
|
199
199
|
|
|
200
200
|
|
|
201
|
-
class
|
|
201
|
+
class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
|
|
202
202
|
_na: t.Type[NA]
|
|
203
203
|
_stat: t.Type[STAT]
|
|
204
204
|
_group_data: t.Type[GROUP_DATA]
|
|
@@ -43,7 +43,7 @@ def func_metadata(unsupported_engines: t.Optional[t.Union[str, t.List[str]]] = N
|
|
|
43
43
|
col_name = col_name.this
|
|
44
44
|
alias_name = f"{func.__name__}__{col_name or ''}__"
|
|
45
45
|
# BigQuery has restrictions on alias names so we constrain it to alphanumeric characters and underscores
|
|
46
|
-
return result.alias(re.sub("\W", "_", alias_name))
|
|
46
|
+
return result.alias(re.sub("\W", "_", alias_name)) # type: ignore
|
|
47
47
|
return result
|
|
48
48
|
|
|
49
49
|
wrapper.unsupported_engines = ( # type: ignore
|
|
@@ -11,7 +11,7 @@ from sqlframe.base.dataframe import (
|
|
|
11
11
|
SESSION,
|
|
12
12
|
STAT,
|
|
13
13
|
WRITER,
|
|
14
|
-
|
|
14
|
+
BaseDataFrame,
|
|
15
15
|
)
|
|
16
16
|
|
|
17
17
|
if sys.version_info >= (3, 11):
|
|
@@ -23,7 +23,7 @@ else:
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
class NoCachePersistSupportMixin(
|
|
26
|
+
class NoCachePersistSupportMixin(BaseDataFrame, t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
|
|
27
27
|
def cache(self) -> Self:
|
|
28
28
|
logger.warning("This engine does not support caching. Ignoring cache() call.")
|
|
29
29
|
return self
|
|
@@ -34,7 +34,7 @@ class NoCachePersistSupportMixin(_BaseDataFrame, t.Generic[SESSION, WRITER, NA,
|
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
class TypedColumnsFromTempViewMixin(
|
|
37
|
-
|
|
37
|
+
BaseDataFrame, t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]
|
|
38
38
|
):
|
|
39
39
|
@property
|
|
40
40
|
def _typed_columns(self) -> t.List[Column]:
|
|
@@ -7,7 +7,7 @@ import typing as t
|
|
|
7
7
|
from enum import IntEnum
|
|
8
8
|
|
|
9
9
|
if t.TYPE_CHECKING:
|
|
10
|
-
from sqlframe.base.dataframe import
|
|
10
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
11
11
|
from sqlframe.base.group import _BaseGroupedData
|
|
12
12
|
|
|
13
13
|
|
|
@@ -37,7 +37,7 @@ def operation(op: Operation) -> t.Callable[[t.Callable], t.Callable]:
|
|
|
37
37
|
|
|
38
38
|
def decorator(func: t.Callable) -> t.Callable:
|
|
39
39
|
@functools.wraps(func)
|
|
40
|
-
def wrapper(self:
|
|
40
|
+
def wrapper(self: BaseDataFrame, *args, **kwargs) -> BaseDataFrame:
|
|
41
41
|
if self.last_op == Operation.INIT:
|
|
42
42
|
self = self._convert_leaf_to_cte()
|
|
43
43
|
self.last_op = Operation.NO_OP
|
|
@@ -45,7 +45,7 @@ def operation(op: Operation) -> t.Callable[[t.Callable], t.Callable]:
|
|
|
45
45
|
new_op = op if op != Operation.NO_OP else last_op
|
|
46
46
|
if new_op < last_op or (last_op == new_op == Operation.SELECT):
|
|
47
47
|
self = self._convert_leaf_to_cte()
|
|
48
|
-
df: t.Union[
|
|
48
|
+
df: t.Union[BaseDataFrame, _BaseGroupedData] = func(self, *args, **kwargs)
|
|
49
49
|
df.last_op = new_op # type: ignore
|
|
50
50
|
return df # type: ignore
|
|
51
51
|
|
|
@@ -69,7 +69,7 @@ def group_operation(op: Operation) -> t.Callable[[t.Callable], t.Callable]:
|
|
|
69
69
|
|
|
70
70
|
def decorator(func: t.Callable) -> t.Callable:
|
|
71
71
|
@functools.wraps(func)
|
|
72
|
-
def wrapper(self: _BaseGroupedData, *args, **kwargs) ->
|
|
72
|
+
def wrapper(self: _BaseGroupedData, *args, **kwargs) -> BaseDataFrame:
|
|
73
73
|
if self._df.last_op == Operation.INIT:
|
|
74
74
|
self._df = self._df._convert_leaf_to_cte()
|
|
75
75
|
self._df.last_op = Operation.NO_OP
|
|
@@ -77,7 +77,7 @@ def group_operation(op: Operation) -> t.Callable[[t.Callable], t.Callable]:
|
|
|
77
77
|
new_op = op if op != Operation.NO_OP else last_op
|
|
78
78
|
if new_op < last_op or (last_op == new_op == Operation.SELECT):
|
|
79
79
|
self._df = self._df._convert_leaf_to_cte()
|
|
80
|
-
df:
|
|
80
|
+
df: BaseDataFrame = func(self, *args, **kwargs)
|
|
81
81
|
df.last_op = new_op # type: ignore
|
|
82
82
|
return df
|
|
83
83
|
|
|
@@ -24,7 +24,7 @@ from sqlglot.optimizer.qualify_columns import (
|
|
|
24
24
|
from sqlglot.schema import MappingSchema
|
|
25
25
|
|
|
26
26
|
from sqlframe.base.catalog import _BaseCatalog
|
|
27
|
-
from sqlframe.base.dataframe import
|
|
27
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
28
28
|
from sqlframe.base.normalize import normalize_dict
|
|
29
29
|
from sqlframe.base.readerwriter import _BaseDataFrameReader, _BaseDataFrameWriter
|
|
30
30
|
from sqlframe.base.udf import _BaseUDFRegistration
|
|
@@ -64,7 +64,7 @@ logger = logging.getLogger(__name__)
|
|
|
64
64
|
CATALOG = t.TypeVar("CATALOG", bound=_BaseCatalog)
|
|
65
65
|
READER = t.TypeVar("READER", bound=_BaseDataFrameReader)
|
|
66
66
|
WRITER = t.TypeVar("WRITER", bound=_BaseDataFrameWriter)
|
|
67
|
-
DF = t.TypeVar("DF", bound=
|
|
67
|
+
DF = t.TypeVar("DF", bound=BaseDataFrame)
|
|
68
68
|
UDF_REGISTRATION = t.TypeVar("UDF_REGISTRATION", bound=_BaseUDFRegistration)
|
|
69
69
|
|
|
70
70
|
_MISSING = "MISSING"
|
|
@@ -5,7 +5,7 @@ import typing as t
|
|
|
5
5
|
|
|
6
6
|
from sqlframe.base.catalog import Column as CatalogColumn
|
|
7
7
|
from sqlframe.base.dataframe import (
|
|
8
|
-
|
|
8
|
+
BaseDataFrame,
|
|
9
9
|
_BaseDataFrameNaFunctions,
|
|
10
10
|
_BaseDataFrameStatFunctions,
|
|
11
11
|
)
|
|
@@ -30,7 +30,7 @@ class BigQueryDataFrameStatFunctions(_BaseDataFrameStatFunctions["BigQueryDataFr
|
|
|
30
30
|
|
|
31
31
|
class BigQueryDataFrame(
|
|
32
32
|
NoCachePersistSupportMixin,
|
|
33
|
-
|
|
33
|
+
BaseDataFrame[
|
|
34
34
|
"BigQuerySession",
|
|
35
35
|
"BigQueryDataFrameWriter",
|
|
36
36
|
"BigQueryDataFrameNaFunctions",
|
|
@@ -5,7 +5,7 @@ import typing as t
|
|
|
5
5
|
|
|
6
6
|
from sqlframe.base.catalog import Column as CatalogColumn
|
|
7
7
|
from sqlframe.base.dataframe import (
|
|
8
|
-
|
|
8
|
+
BaseDataFrame,
|
|
9
9
|
_BaseDataFrameNaFunctions,
|
|
10
10
|
_BaseDataFrameStatFunctions,
|
|
11
11
|
)
|
|
@@ -31,7 +31,7 @@ class DatabricksDataFrameStatFunctions(_BaseDataFrameStatFunctions["DatabricksDa
|
|
|
31
31
|
|
|
32
32
|
class DatabricksDataFrame(
|
|
33
33
|
NoCachePersistSupportMixin,
|
|
34
|
-
|
|
34
|
+
BaseDataFrame[
|
|
35
35
|
"DatabricksSession",
|
|
36
36
|
"DatabricksDataFrameWriter",
|
|
37
37
|
"DatabricksDataFrameNaFunctions",
|
|
@@ -4,7 +4,7 @@ import logging
|
|
|
4
4
|
import typing as t
|
|
5
5
|
|
|
6
6
|
from sqlframe.base.dataframe import (
|
|
7
|
-
|
|
7
|
+
BaseDataFrame,
|
|
8
8
|
_BaseDataFrameNaFunctions,
|
|
9
9
|
_BaseDataFrameStatFunctions,
|
|
10
10
|
)
|
|
@@ -34,7 +34,7 @@ class DuckDBDataFrameStatFunctions(_BaseDataFrameStatFunctions["DuckDBDataFrame"
|
|
|
34
34
|
class DuckDBDataFrame(
|
|
35
35
|
NoCachePersistSupportMixin,
|
|
36
36
|
TypedColumnsFromTempViewMixin,
|
|
37
|
-
|
|
37
|
+
BaseDataFrame[
|
|
38
38
|
"DuckDBSession",
|
|
39
39
|
"DuckDBDataFrameWriter",
|
|
40
40
|
"DuckDBDataFrameNaFunctions",
|
|
@@ -5,7 +5,7 @@ import sys
|
|
|
5
5
|
import typing as t
|
|
6
6
|
|
|
7
7
|
from sqlframe.base.dataframe import (
|
|
8
|
-
|
|
8
|
+
BaseDataFrame,
|
|
9
9
|
_BaseDataFrameNaFunctions,
|
|
10
10
|
_BaseDataFrameStatFunctions,
|
|
11
11
|
)
|
|
@@ -39,7 +39,7 @@ class PostgresDataFrameStatFunctions(_BaseDataFrameStatFunctions["PostgresDataFr
|
|
|
39
39
|
class PostgresDataFrame(
|
|
40
40
|
NoCachePersistSupportMixin,
|
|
41
41
|
TypedColumnsFromTempViewMixin,
|
|
42
|
-
|
|
42
|
+
BaseDataFrame[
|
|
43
43
|
"PostgresSession",
|
|
44
44
|
"PostgresDataFrameWriter",
|
|
45
45
|
"PostgresDataFrameNaFunctions",
|
|
@@ -5,7 +5,7 @@ import sys
|
|
|
5
5
|
import typing as t
|
|
6
6
|
|
|
7
7
|
from sqlframe.base.dataframe import (
|
|
8
|
-
|
|
8
|
+
BaseDataFrame,
|
|
9
9
|
_BaseDataFrameNaFunctions,
|
|
10
10
|
_BaseDataFrameStatFunctions,
|
|
11
11
|
)
|
|
@@ -30,7 +30,7 @@ class RedshiftDataFrameStatFunctions(_BaseDataFrameStatFunctions["RedshiftDataFr
|
|
|
30
30
|
|
|
31
31
|
class RedshiftDataFrame(
|
|
32
32
|
NoCachePersistSupportMixin,
|
|
33
|
-
|
|
33
|
+
BaseDataFrame[
|
|
34
34
|
"RedshiftSession",
|
|
35
35
|
"RedshiftDataFrameWriter",
|
|
36
36
|
"RedshiftDataFrameNaFunctions",
|
|
@@ -6,7 +6,7 @@ import typing as t
|
|
|
6
6
|
|
|
7
7
|
from sqlframe.base.catalog import Column as CatalogColumn
|
|
8
8
|
from sqlframe.base.dataframe import (
|
|
9
|
-
|
|
9
|
+
BaseDataFrame,
|
|
10
10
|
_BaseDataFrameNaFunctions,
|
|
11
11
|
_BaseDataFrameStatFunctions,
|
|
12
12
|
)
|
|
@@ -32,7 +32,7 @@ class SnowflakeDataFrameStatFunctions(_BaseDataFrameStatFunctions["SnowflakeData
|
|
|
32
32
|
|
|
33
33
|
class SnowflakeDataFrame(
|
|
34
34
|
NoCachePersistSupportMixin,
|
|
35
|
-
|
|
35
|
+
BaseDataFrame[
|
|
36
36
|
"SnowflakeSession",
|
|
37
37
|
"SnowflakeDataFrameWriter",
|
|
38
38
|
"SnowflakeDataFrameNaFunctions",
|
|
@@ -7,7 +7,7 @@ from sqlglot import exp
|
|
|
7
7
|
|
|
8
8
|
from sqlframe.base.catalog import Column
|
|
9
9
|
from sqlframe.base.dataframe import (
|
|
10
|
-
|
|
10
|
+
BaseDataFrame,
|
|
11
11
|
_BaseDataFrameNaFunctions,
|
|
12
12
|
_BaseDataFrameStatFunctions,
|
|
13
13
|
)
|
|
@@ -31,7 +31,7 @@ class SparkDataFrameStatFunctions(_BaseDataFrameStatFunctions["SparkDataFrame"])
|
|
|
31
31
|
|
|
32
32
|
class SparkDataFrame(
|
|
33
33
|
NoCachePersistSupportMixin,
|
|
34
|
-
|
|
34
|
+
BaseDataFrame[
|
|
35
35
|
"SparkSession",
|
|
36
36
|
"SparkDataFrameWriter",
|
|
37
37
|
"SparkDataFrameNaFunctions",
|
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import typing as t
|
|
4
4
|
|
|
5
5
|
from sqlframe.base.dataframe import (
|
|
6
|
-
|
|
6
|
+
BaseDataFrame,
|
|
7
7
|
_BaseDataFrameNaFunctions,
|
|
8
8
|
_BaseDataFrameStatFunctions,
|
|
9
9
|
)
|
|
@@ -23,7 +23,7 @@ class StandaloneDataFrameStatFunctions(_BaseDataFrameStatFunctions["StandaloneDa
|
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
class StandaloneDataFrame(
|
|
26
|
-
|
|
26
|
+
BaseDataFrame[
|
|
27
27
|
"StandaloneSession",
|
|
28
28
|
"StandaloneDataFrameWriter",
|
|
29
29
|
"StandaloneDataFrameNaFunctions",
|
|
@@ -7,7 +7,7 @@ import typing as t
|
|
|
7
7
|
from itertools import zip_longest
|
|
8
8
|
|
|
9
9
|
from sqlframe.base import types
|
|
10
|
-
from sqlframe.base.dataframe import
|
|
10
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
11
11
|
from sqlframe.base.exceptions import (
|
|
12
12
|
DataFrameDiffError,
|
|
13
13
|
SchemaDiffError,
|
|
@@ -64,8 +64,8 @@ def _context_diff(actual: t.List[str], expected: t.List[str], n: int = 3):
|
|
|
64
64
|
|
|
65
65
|
# Source: https://github.com/apache/spark/blob/master/python/pyspark/testing/utils.py#L519
|
|
66
66
|
def assertDataFrameEqual(
|
|
67
|
-
actual: t.Union[
|
|
68
|
-
expected: t.Union[
|
|
67
|
+
actual: t.Union[BaseDataFrame, pd.DataFrame, t.List[types.Row]],
|
|
68
|
+
expected: t.Union[BaseDataFrame, pd.DataFrame, t.List[types.Row]],
|
|
69
69
|
checkRowOrder: bool = False,
|
|
70
70
|
rtol: float = 1e-5,
|
|
71
71
|
atol: float = 1e-8,
|
|
@@ -5,12 +5,12 @@ import typing as t
|
|
|
5
5
|
from sqlframe.base.types import Row
|
|
6
6
|
|
|
7
7
|
if t.TYPE_CHECKING:
|
|
8
|
-
from sqlframe.base.dataframe import
|
|
8
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
9
9
|
|
|
10
10
|
pytest_plugins = ["tests.integration.fixtures"]
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
def test_collect(get_engine_df: t.Callable[[str],
|
|
13
|
+
def test_collect(get_engine_df: t.Callable[[str], BaseDataFrame], get_func):
|
|
14
14
|
employee = get_engine_df("employee")
|
|
15
15
|
col = get_func("col", employee.session)
|
|
16
16
|
results = employee.select(col("fname"), col("lname")).collect()
|
|
@@ -24,7 +24,7 @@ def test_collect(get_engine_df: t.Callable[[str], _BaseDataFrame], get_func):
|
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
def test_show(
|
|
27
|
-
get_engine_df: t.Callable[[str],
|
|
27
|
+
get_engine_df: t.Callable[[str], BaseDataFrame],
|
|
28
28
|
get_func,
|
|
29
29
|
capsys,
|
|
30
30
|
caplog,
|
|
@@ -53,7 +53,7 @@ def test_show(
|
|
|
53
53
|
|
|
54
54
|
|
|
55
55
|
def test_show_limit(
|
|
56
|
-
get_engine_df: t.Callable[[str],
|
|
56
|
+
get_engine_df: t.Callable[[str], BaseDataFrame], capsys, is_snowflake: t.Callable
|
|
57
57
|
):
|
|
58
58
|
employee = get_engine_df("employee")
|
|
59
59
|
employee.show(1)
|
|
@@ -10,15 +10,15 @@ from sqlframe.base.types import Row
|
|
|
10
10
|
from sqlframe.duckdb.session import DuckDBSession
|
|
11
11
|
|
|
12
12
|
if t.TYPE_CHECKING:
|
|
13
|
-
from sqlframe.base.dataframe import
|
|
13
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
14
14
|
|
|
15
15
|
pytest_plugins = ["tests.integration.fixtures"]
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
@pytest.fixture
|
|
19
19
|
def cleanup_employee_df(
|
|
20
|
-
get_engine_df: t.Callable[[str],
|
|
21
|
-
) -> t.Iterator[
|
|
20
|
+
get_engine_df: t.Callable[[str], BaseDataFrame],
|
|
21
|
+
) -> t.Iterator[BaseDataFrame]:
|
|
22
22
|
df = get_engine_df("employee")
|
|
23
23
|
df.session._execute("DROP TABLE IF EXISTS insert_into_employee")
|
|
24
24
|
df.session._execute("DROP TABLE IF EXISTS save_as_table_employee")
|
|
@@ -27,7 +27,7 @@ def cleanup_employee_df(
|
|
|
27
27
|
df.session._execute("DROP TABLE IF EXISTS save_as_table_employee")
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
def test_write_json(get_engine_df: t.Callable[[str],
|
|
30
|
+
def test_write_json(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
31
31
|
df_employee = get_engine_df("employee")
|
|
32
32
|
temp_json = str(tmp_path / "employee.json")
|
|
33
33
|
df_employee.write.json(temp_json)
|
|
@@ -50,9 +50,7 @@ def test_write_json_append(get_session: t.Callable[[], _BaseSession], tmp_path:
|
|
|
50
50
|
assert df_result.collect() == [Row(_1=1), Row(_1=2)]
|
|
51
51
|
|
|
52
52
|
|
|
53
|
-
def test_write_json_ignore(
|
|
54
|
-
get_engine_df: t.Callable[[str], _BaseDataFrame], tmp_path: pathlib.Path
|
|
55
|
-
):
|
|
53
|
+
def test_write_json_ignore(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
56
54
|
df_employee = get_engine_df("employee")
|
|
57
55
|
temp_json = tmp_path / "employee.json"
|
|
58
56
|
temp_json.touch()
|
|
@@ -62,7 +60,7 @@ def test_write_json_ignore(
|
|
|
62
60
|
|
|
63
61
|
|
|
64
62
|
def test_write_json_error(
|
|
65
|
-
get_engine_df: t.Callable[[str],
|
|
63
|
+
get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path, caplog
|
|
66
64
|
):
|
|
67
65
|
df_employee = get_engine_df("employee")
|
|
68
66
|
temp_json = tmp_path / "employee.json"
|
|
@@ -71,7 +69,7 @@ def test_write_json_error(
|
|
|
71
69
|
df_employee.write.json(temp_json, mode="error")
|
|
72
70
|
|
|
73
71
|
|
|
74
|
-
def test_write_parquet(get_engine_df: t.Callable[[str],
|
|
72
|
+
def test_write_parquet(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
75
73
|
df_employee = get_engine_df("employee")
|
|
76
74
|
temp_parquet = str(tmp_path / "employee.parquet")
|
|
77
75
|
df_employee.write.parquet(temp_parquet)
|
|
@@ -80,7 +78,7 @@ def test_write_parquet(get_engine_df: t.Callable[[str], _BaseDataFrame], tmp_pat
|
|
|
80
78
|
|
|
81
79
|
|
|
82
80
|
def test_write_parquet_ignore(
|
|
83
|
-
get_engine_df: t.Callable[[str],
|
|
81
|
+
get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path
|
|
84
82
|
):
|
|
85
83
|
df_employee = get_engine_df("employee")
|
|
86
84
|
temp_parquet = str(tmp_path / "employee.parquet")
|
|
@@ -95,7 +93,7 @@ def test_write_parquet_ignore(
|
|
|
95
93
|
|
|
96
94
|
|
|
97
95
|
def test_write_parquet_error(
|
|
98
|
-
get_engine_df: t.Callable[[str],
|
|
96
|
+
get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path, caplog
|
|
99
97
|
):
|
|
100
98
|
df_employee = get_engine_df("employee")
|
|
101
99
|
temp_parquet = tmp_path / "employee.parquet"
|
|
@@ -105,7 +103,7 @@ def test_write_parquet_error(
|
|
|
105
103
|
|
|
106
104
|
|
|
107
105
|
def test_write_parquet_unsupported_modes(
|
|
108
|
-
get_engine_df: t.Callable[[str],
|
|
106
|
+
get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path
|
|
109
107
|
):
|
|
110
108
|
df_employee = get_engine_df("employee")
|
|
111
109
|
temp_json = tmp_path / "employee.parquet"
|
|
@@ -113,7 +111,7 @@ def test_write_parquet_unsupported_modes(
|
|
|
113
111
|
df_employee.write.parquet(str(temp_json), mode="append")
|
|
114
112
|
|
|
115
113
|
|
|
116
|
-
def test_write_csv(get_engine_df: t.Callable[[str],
|
|
114
|
+
def test_write_csv(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
117
115
|
df_employee = get_engine_df("employee")
|
|
118
116
|
temp_csv = str(tmp_path / "employee.csv")
|
|
119
117
|
df_employee.write.csv(temp_csv)
|
|
@@ -136,7 +134,7 @@ def test_write_csv_append(get_session: t.Callable[[], _BaseSession], tmp_path: p
|
|
|
136
134
|
assert df_result.collect() == [Row(_1=1), Row(_1=2)]
|
|
137
135
|
|
|
138
136
|
|
|
139
|
-
def test_write_csv_ignore(get_engine_df: t.Callable[[str],
|
|
137
|
+
def test_write_csv_ignore(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
140
138
|
df_employee = get_engine_df("employee")
|
|
141
139
|
temp_csv = str(tmp_path / "employee.csv")
|
|
142
140
|
df1 = df_employee.session.createDataFrame([(1,)])
|
|
@@ -150,7 +148,7 @@ def test_write_csv_ignore(get_engine_df: t.Callable[[str], _BaseDataFrame], tmp_
|
|
|
150
148
|
assert df_result.collect() == df1.collect()
|
|
151
149
|
|
|
152
150
|
|
|
153
|
-
def test_write_csv_error(get_engine_df: t.Callable[[str],
|
|
151
|
+
def test_write_csv_error(get_engine_df: t.Callable[[str], BaseDataFrame], tmp_path: pathlib.Path):
|
|
154
152
|
df_employee = get_engine_df("employee")
|
|
155
153
|
temp_csv = tmp_path / "employee.csv"
|
|
156
154
|
temp_csv.touch()
|
|
@@ -158,14 +156,14 @@ def test_write_csv_error(get_engine_df: t.Callable[[str], _BaseDataFrame], tmp_p
|
|
|
158
156
|
df_employee.write.json(temp_csv, mode="error")
|
|
159
157
|
|
|
160
158
|
|
|
161
|
-
def test_save_as_table(cleanup_employee_df:
|
|
159
|
+
def test_save_as_table(cleanup_employee_df: BaseDataFrame, caplog):
|
|
162
160
|
df_employee = cleanup_employee_df
|
|
163
161
|
df_employee.write.saveAsTable("save_as_table_employee")
|
|
164
162
|
df2 = df_employee.session.read.table("save_as_table_employee")
|
|
165
163
|
assert sorted(df2.collect()) == sorted(df_employee.collect())
|
|
166
164
|
|
|
167
165
|
|
|
168
|
-
def test_insertInto(cleanup_employee_df:
|
|
166
|
+
def test_insertInto(cleanup_employee_df: BaseDataFrame, caplog):
|
|
169
167
|
df_employee = cleanup_employee_df
|
|
170
168
|
df = df_employee.session.createDataFrame(
|
|
171
169
|
[(9, "Sayid", "Jarrah", 40, 1)], ["id", "first_name", "last_name", "age", "store_id"]
|
|
@@ -25,7 +25,7 @@ from sqlframe.snowflake import SnowflakeSession
|
|
|
25
25
|
from sqlframe.spark.session import SparkSession
|
|
26
26
|
|
|
27
27
|
if t.TYPE_CHECKING:
|
|
28
|
-
from sqlframe.base.dataframe import
|
|
28
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
29
29
|
|
|
30
30
|
pytest_plugins = ["tests.integration.fixtures"]
|
|
31
31
|
|
|
@@ -33,7 +33,7 @@ pytest_plugins = ["tests.integration.fixtures"]
|
|
|
33
33
|
class GetDfAndFuncCallable(t.Protocol):
|
|
34
34
|
def __call__(
|
|
35
35
|
self, name: str, limit: t.Optional[int] = None
|
|
36
|
-
) -> t.Tuple[
|
|
36
|
+
) -> t.Tuple[BaseDataFrame, t.Callable]: ...
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
def get_func_from_session(name: str, session: t.Union[PySparkSession, _BaseSession]) -> t.Callable:
|
|
@@ -46,7 +46,7 @@ from sqlframe.standalone.dataframe import StandaloneDataFrame
|
|
|
46
46
|
from sqlframe.standalone.session import StandaloneSession
|
|
47
47
|
|
|
48
48
|
if t.TYPE_CHECKING:
|
|
49
|
-
from sqlframe.base.dataframe import
|
|
49
|
+
from sqlframe.base.dataframe import BaseDataFrame
|
|
50
50
|
from sqlframe.base.session import _BaseSession
|
|
51
51
|
from tests.types import DistrictData, EmployeeData, StoreData
|
|
52
52
|
|
|
@@ -650,14 +650,14 @@ def get_explain_plan() -> t.Callable:
|
|
|
650
650
|
|
|
651
651
|
|
|
652
652
|
@pytest.fixture(params=ENGINE_PARAMETERS_NO_PYSPARK_STANDALONE)
|
|
653
|
-
def get_engine_df(request: FixtureRequest) -> t.Callable[[str],
|
|
653
|
+
def get_engine_df(request: FixtureRequest) -> t.Callable[[str], BaseDataFrame]:
|
|
654
654
|
mapping = {
|
|
655
655
|
"employee": f"{request.param}_employee",
|
|
656
656
|
"store": f"{request.param}_store",
|
|
657
657
|
"district": f"{request.param}_district",
|
|
658
658
|
}
|
|
659
659
|
|
|
660
|
-
def _get_engine_df(name: str) ->
|
|
660
|
+
def _get_engine_df(name: str) -> BaseDataFrame:
|
|
661
661
|
return request.getfixturevalue(mapping[name])
|
|
662
662
|
|
|
663
663
|
return _get_engine_df
|
|
@@ -672,14 +672,14 @@ def get_session(request: FixtureRequest) -> t.Callable[[], _BaseSession]:
|
|
|
672
672
|
|
|
673
673
|
|
|
674
674
|
@pytest.fixture(params=ENGINE_PARAMETERS_NO_PYSPARK)
|
|
675
|
-
def get_df(request: FixtureRequest) -> t.Callable[[str],
|
|
675
|
+
def get_df(request: FixtureRequest) -> t.Callable[[str], BaseDataFrame]:
|
|
676
676
|
mapping = {
|
|
677
677
|
"employee": f"{request.param}_employee",
|
|
678
678
|
"store": f"{request.param}_store",
|
|
679
679
|
"district": f"{request.param}_district",
|
|
680
680
|
}
|
|
681
681
|
|
|
682
|
-
def _get_df(name: str) ->
|
|
682
|
+
def _get_df(name: str) -> BaseDataFrame:
|
|
683
683
|
return request.getfixturevalue(mapping[name])
|
|
684
684
|
|
|
685
685
|
return _get_df
|
|
@@ -698,14 +698,14 @@ def get_engine_session_and_spark(
|
|
|
698
698
|
@pytest.fixture(params=ENGINE_PARAMETERS_NO_STANDALONE)
|
|
699
699
|
def get_engine_df_and_pyspark(
|
|
700
700
|
request: FixtureRequest,
|
|
701
|
-
) -> t.Callable[[str], t.Union[
|
|
701
|
+
) -> t.Callable[[str], t.Union[BaseDataFrame, PySparkDataFrame]]:
|
|
702
702
|
mapping = {
|
|
703
703
|
"employee": f"{request.param}_employee",
|
|
704
704
|
"store": f"{request.param}_store",
|
|
705
705
|
"district": f"{request.param}_district",
|
|
706
706
|
}
|
|
707
707
|
|
|
708
|
-
def _get_engine_df_and_pyspark(name: str) -> t.Union[
|
|
708
|
+
def _get_engine_df_and_pyspark(name: str) -> t.Union[BaseDataFrame, PySparkDataFrame]:
|
|
709
709
|
return request.getfixturevalue(mapping[name])
|
|
710
710
|
|
|
711
711
|
return _get_engine_df_and_pyspark
|