sqlframe 3.31.4__tar.gz → 3.32.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.31.4 → sqlframe-3.32.1}/PKG-INFO +1 -1
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/_version.py +2 -2
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/catalog.py +12 -1
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/dataframe.py +3 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/functions.py +7 -7
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/util.py +87 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe.egg-info/SOURCES.txt +1 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_int_functions.py +2 -0
- sqlframe-3.32.1/tests/unit/test_catalog.py +57 -0
- sqlframe-3.32.1/tests/unit/test_util.py +216 -0
- sqlframe-3.31.4/tests/unit/test_util.py +0 -75
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.github/CODEOWNERS +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.gitignore +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/.readthedocs.yaml +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/LICENSE +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/Makefile +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/README.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/cake.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/bigquery.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/configuration.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/databricks.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/docs/postgres.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/duckdb.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/images/SF.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/images/favicon.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/index.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/postgres.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/redshift.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/requirements.txt +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/snowflake.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/spark.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/standalone.md +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/mkdocs.yml +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/pytest.ini +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/renovate.json +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/setup.cfg +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/setup.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/LICENSE +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/mixins/table_mixins.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/base/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/py.typed +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe.egg-info/requires.txt +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/common_fixtures.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/conftest.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/issue_219.csv +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_table.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/conftest.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/databricks/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/duck/test_reader_options.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/spark/test_reader_options.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_functions.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.31.4 → sqlframe-3.32.1}/tests/unit/test_base_reader_options.py +0 -0
@@ -6,9 +6,16 @@ import typing as t
|
|
6
6
|
from collections import defaultdict
|
7
7
|
|
8
8
|
from sqlglot import MappingSchema, exp
|
9
|
+
from sqlglot.helper import seq_get
|
9
10
|
|
11
|
+
from sqlframe.base import types
|
10
12
|
from sqlframe.base.exceptions import TableSchemaError
|
11
|
-
from sqlframe.base.util import
|
13
|
+
from sqlframe.base.util import (
|
14
|
+
ensure_column_mapping,
|
15
|
+
normalize_string,
|
16
|
+
spark_to_sqlglot,
|
17
|
+
to_schema,
|
18
|
+
)
|
12
19
|
|
13
20
|
if t.TYPE_CHECKING:
|
14
21
|
from sqlglot.schema import ColumnMapping
|
@@ -99,6 +106,10 @@ class _BaseCatalog(t.Generic[SESSION, DF, TABLE]):
|
|
99
106
|
"This session does not have access to a catalog that can lookup column information. See docs for explicitly defining columns or using a session that can automatically determine this."
|
100
107
|
)
|
101
108
|
column_mapping = ensure_column_mapping(column_mapping) # type: ignore
|
109
|
+
if isinstance(column_mapping, dict) and isinstance(
|
110
|
+
seq_get(list(column_mapping.values()), 0), types.DataType
|
111
|
+
):
|
112
|
+
column_mapping = {k: spark_to_sqlglot(v) for k, v in column_mapping.items()}
|
102
113
|
for column_name in column_mapping:
|
103
114
|
column = exp.to_column(column_name, dialect=self.session.input_dialect)
|
104
115
|
if column.this.quoted:
|
@@ -260,6 +260,9 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
|
|
260
260
|
def __copy__(self):
|
261
261
|
return self.copy()
|
262
262
|
|
263
|
+
def _display_(self) -> str:
|
264
|
+
return self.__repr__()
|
265
|
+
|
263
266
|
@property
|
264
267
|
def _typed_columns(self) -> t.List[CatalogColumn]:
|
265
268
|
raise NotImplementedError
|
@@ -494,21 +494,21 @@ def skewness(col: ColumnOrName) -> Column:
|
|
494
494
|
func_name = "SKEW"
|
495
495
|
|
496
496
|
if session._is_duckdb or session._is_snowflake:
|
497
|
+
col = Column.ensure_col(col)
|
497
498
|
when_func = get_func_from_session("when")
|
498
499
|
count_func = get_func_from_session("count")
|
499
|
-
|
500
|
+
count_col = count_func(col)
|
500
501
|
lit_func = get_func_from_session("lit")
|
501
502
|
sqrt_func = get_func_from_session("sqrt")
|
502
|
-
col = Column.ensure_col(col)
|
503
503
|
full_calc = (
|
504
504
|
Column.invoke_anonymous_function(col, func_name)
|
505
|
-
* (
|
506
|
-
/ (sqrt_func(
|
505
|
+
* (count_col - lit_func(2))
|
506
|
+
/ (sqrt_func(count_col * (count_col - lit_func(1))))
|
507
507
|
)
|
508
508
|
return (
|
509
|
-
when_func(
|
510
|
-
.when(
|
511
|
-
.when(
|
509
|
+
when_func(count_col == lit_func(0), lit_func(None))
|
510
|
+
.when(count_col == lit_func(1), lit_func(None))
|
511
|
+
.when(count_col == lit_func(2), lit_func(0.0))
|
512
512
|
.otherwise(full_calc)
|
513
513
|
)
|
514
514
|
|
@@ -347,6 +347,93 @@ def sqlglot_to_spark(sqlglot_dtype: exp.DataType) -> types.DataType:
|
|
347
347
|
raise NotImplementedError(f"Unsupported data type: {sqlglot_dtype}")
|
348
348
|
|
349
349
|
|
350
|
+
def spark_to_sqlglot(spark_dtype: types.DataType) -> exp.DataType:
|
351
|
+
"""
|
352
|
+
Convert a Spark data type to a SQLGlot data type.
|
353
|
+
|
354
|
+
This function is the opposite of sqlglot_to_spark.
|
355
|
+
|
356
|
+
Args:
|
357
|
+
spark_dtype: A Spark data type
|
358
|
+
|
359
|
+
Returns:
|
360
|
+
The equivalent SQLGlot data type
|
361
|
+
"""
|
362
|
+
from sqlframe.base import types
|
363
|
+
|
364
|
+
# Handle primitive types
|
365
|
+
if isinstance(spark_dtype, types.StringType):
|
366
|
+
return exp.DataType(this=exp.DataType.Type.TEXT)
|
367
|
+
elif isinstance(spark_dtype, types.VarcharType):
|
368
|
+
return exp.DataType(
|
369
|
+
this=exp.DataType.Type.VARCHAR,
|
370
|
+
expressions=[exp.DataTypeParam(this=exp.Literal.number(spark_dtype.length))],
|
371
|
+
)
|
372
|
+
elif isinstance(spark_dtype, types.CharType):
|
373
|
+
return exp.DataType(
|
374
|
+
this=exp.DataType.Type.CHAR,
|
375
|
+
expressions=[exp.DataTypeParam(this=exp.Literal.number(spark_dtype.length))],
|
376
|
+
)
|
377
|
+
elif isinstance(spark_dtype, types.BinaryType):
|
378
|
+
return exp.DataType(this=exp.DataType.Type.BINARY)
|
379
|
+
elif isinstance(spark_dtype, types.BooleanType):
|
380
|
+
return exp.DataType(this=exp.DataType.Type.BOOLEAN)
|
381
|
+
elif isinstance(spark_dtype, types.IntegerType):
|
382
|
+
return exp.DataType(this=exp.DataType.Type.INT)
|
383
|
+
elif isinstance(spark_dtype, types.LongType):
|
384
|
+
return exp.DataType(this=exp.DataType.Type.BIGINT)
|
385
|
+
elif isinstance(spark_dtype, types.ShortType):
|
386
|
+
return exp.DataType(this=exp.DataType.Type.SMALLINT)
|
387
|
+
elif isinstance(spark_dtype, types.ByteType):
|
388
|
+
return exp.DataType(this=exp.DataType.Type.TINYINT)
|
389
|
+
elif isinstance(spark_dtype, types.FloatType):
|
390
|
+
return exp.DataType(this=exp.DataType.Type.FLOAT)
|
391
|
+
elif isinstance(spark_dtype, types.DoubleType):
|
392
|
+
return exp.DataType(this=exp.DataType.Type.DOUBLE)
|
393
|
+
elif isinstance(spark_dtype, types.DecimalType):
|
394
|
+
if spark_dtype.precision is not None and spark_dtype.scale is not None:
|
395
|
+
return exp.DataType(
|
396
|
+
this=exp.DataType.Type.DECIMAL,
|
397
|
+
expressions=[
|
398
|
+
exp.DataTypeParam(this=exp.Literal.number(spark_dtype.precision)),
|
399
|
+
exp.DataTypeParam(this=exp.Literal.number(spark_dtype.scale)),
|
400
|
+
],
|
401
|
+
)
|
402
|
+
return exp.DataType(this=exp.DataType.Type.DECIMAL)
|
403
|
+
elif isinstance(spark_dtype, types.TimestampType):
|
404
|
+
return exp.DataType(this=exp.DataType.Type.TIMESTAMP)
|
405
|
+
elif isinstance(spark_dtype, types.TimestampNTZType):
|
406
|
+
return exp.DataType(this=exp.DataType.Type.TIMESTAMPNTZ)
|
407
|
+
elif isinstance(spark_dtype, types.DateType):
|
408
|
+
return exp.DataType(this=exp.DataType.Type.DATE)
|
409
|
+
|
410
|
+
# Handle complex types
|
411
|
+
elif isinstance(spark_dtype, types.ArrayType):
|
412
|
+
return exp.DataType(
|
413
|
+
this=exp.DataType.Type.ARRAY, expressions=[spark_to_sqlglot(spark_dtype.elementType)]
|
414
|
+
)
|
415
|
+
elif isinstance(spark_dtype, types.MapType):
|
416
|
+
return exp.DataType(
|
417
|
+
this=exp.DataType.Type.MAP,
|
418
|
+
expressions=[
|
419
|
+
spark_to_sqlglot(spark_dtype.keyType),
|
420
|
+
spark_to_sqlglot(spark_dtype.valueType),
|
421
|
+
],
|
422
|
+
)
|
423
|
+
elif isinstance(spark_dtype, types.StructType):
|
424
|
+
return exp.DataType(
|
425
|
+
this=exp.DataType.Type.STRUCT,
|
426
|
+
expressions=[
|
427
|
+
exp.ColumnDef(
|
428
|
+
this=exp.to_identifier(field.name), kind=spark_to_sqlglot(field.dataType)
|
429
|
+
)
|
430
|
+
for field in spark_dtype
|
431
|
+
],
|
432
|
+
)
|
433
|
+
|
434
|
+
raise NotImplementedError(f"Unsupported data type: {spark_dtype}")
|
435
|
+
|
436
|
+
|
350
437
|
def normalize_string(
|
351
438
|
value: t.Union[str, exp.Expression],
|
352
439
|
from_dialect: DialectType = None,
|
@@ -356,6 +356,7 @@ tests/unit/__init__.py
|
|
356
356
|
tests/unit/conftest.py
|
357
357
|
tests/unit/test_activate.py
|
358
358
|
tests/unit/test_base_reader_options.py
|
359
|
+
tests/unit/test_catalog.py
|
359
360
|
tests/unit/test_util.py
|
360
361
|
tests/unit/bigquery/__init__.py
|
361
362
|
tests/unit/bigquery/test_activate.py
|
@@ -694,6 +694,8 @@ def test_skewness(get_session_and_func):
|
|
694
694
|
assert math.isclose(df.select(skewness(df.c)).first()[0], 0.7071067811865475, rel_tol=1e-5)
|
695
695
|
df = session.createDataFrame([{"a": 2.0}, {"a": None}])
|
696
696
|
assert df.agg(skewness(df.a)).collect() == [Row(value=None)]
|
697
|
+
df = session.createDataFrame([{"a": 1}, {"a": 2}])
|
698
|
+
assert df.agg(skewness(df.a)).collect() == [Row(value=0.0)]
|
697
699
|
|
698
700
|
|
699
701
|
def test_kurtosis(get_session_and_func):
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import pytest
|
2
|
+
from sqlglot import exp
|
3
|
+
|
4
|
+
from sqlframe.base import types
|
5
|
+
from sqlframe.base.util import spark_to_sqlglot
|
6
|
+
from sqlframe.standalone.session import StandaloneSession
|
7
|
+
|
8
|
+
|
9
|
+
@pytest.fixture(scope="function")
|
10
|
+
def standalone_session() -> StandaloneSession:
|
11
|
+
return StandaloneSession()
|
12
|
+
|
13
|
+
|
14
|
+
def test_add_table_with_spark_types(standalone_session: StandaloneSession):
|
15
|
+
"""Test that add_table properly converts spark types to sqlglot types."""
|
16
|
+
# Create a dictionary with column names as keys and spark types as values
|
17
|
+
column_mapping = {
|
18
|
+
"col_string": types.StringType(),
|
19
|
+
"col_int": types.IntegerType(),
|
20
|
+
"col_long": types.LongType(),
|
21
|
+
"col_float": types.FloatType(),
|
22
|
+
"col_double": types.DoubleType(),
|
23
|
+
"col_boolean": types.BooleanType(),
|
24
|
+
"col_timestamp": types.TimestampType(),
|
25
|
+
"col_date": types.DateType(),
|
26
|
+
"col_decimal": types.DecimalType(10, 2),
|
27
|
+
"col_binary": types.BinaryType(),
|
28
|
+
"col_array": types.ArrayType(types.StringType()),
|
29
|
+
"col_map": types.MapType(types.StringType(), types.IntegerType()),
|
30
|
+
"col_struct": types.StructType(
|
31
|
+
[
|
32
|
+
types.StructField("nested_string", types.StringType()),
|
33
|
+
types.StructField("nested_int", types.IntegerType()),
|
34
|
+
]
|
35
|
+
),
|
36
|
+
}
|
37
|
+
|
38
|
+
# Call add_table with the dictionary
|
39
|
+
table_name = "test_table"
|
40
|
+
standalone_session.catalog.add_table(table_name, column_mapping)
|
41
|
+
|
42
|
+
# Get the schema from the catalog
|
43
|
+
table = exp.to_table(table_name, dialect=standalone_session.input_dialect)
|
44
|
+
schema = standalone_session.catalog._schema.find(table)
|
45
|
+
|
46
|
+
# Verify that the schema has been properly updated with the expected sqlglot types
|
47
|
+
assert schema is not None
|
48
|
+
|
49
|
+
# Check each column type
|
50
|
+
for col_name, spark_type in column_mapping.items():
|
51
|
+
expected_sqlglot_type = spark_to_sqlglot(spark_type)
|
52
|
+
actual_sqlglot_type = schema[col_name]
|
53
|
+
|
54
|
+
# Compare the SQL representation of the types
|
55
|
+
assert actual_sqlglot_type.sql() == expected_sqlglot_type.sql(), (
|
56
|
+
f"Column {col_name}: expected {expected_sqlglot_type.sql()}, got {actual_sqlglot_type.sql()}"
|
57
|
+
)
|
@@ -0,0 +1,216 @@
|
|
1
|
+
import typing as t
|
2
|
+
|
3
|
+
import pytest
|
4
|
+
from sqlglot import exp, parse_one
|
5
|
+
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
|
6
|
+
|
7
|
+
from sqlframe.base import types
|
8
|
+
from sqlframe.base.util import (
|
9
|
+
quote_preserving_alias_or_name,
|
10
|
+
spark_to_sqlglot,
|
11
|
+
sqlglot_to_spark,
|
12
|
+
)
|
13
|
+
|
14
|
+
|
15
|
+
@pytest.mark.parametrize(
|
16
|
+
"expression, expected",
|
17
|
+
[
|
18
|
+
("a", "a"),
|
19
|
+
("a AS b", "b"),
|
20
|
+
("`a`", "`a`"),
|
21
|
+
("`a` AS b", "b"),
|
22
|
+
("`a` AS `b`", "`b`"),
|
23
|
+
("`aB`", "`aB`"),
|
24
|
+
("`aB` AS c", "c"),
|
25
|
+
("`aB` AS `c`", "`c`"),
|
26
|
+
("`aB` AS `Cd`", "`Cd`"),
|
27
|
+
# We assume inputs have been normalized so `Cd` is returned as is instead of normalized `cd`
|
28
|
+
("`aB` AS Cd", "Cd"),
|
29
|
+
],
|
30
|
+
)
|
31
|
+
def test_quote_preserving_alias_or_name(expression: t.Union[exp.Column, exp.Alias], expected: str):
|
32
|
+
assert quote_preserving_alias_or_name(parse_one(expression, dialect="bigquery")) == expected # type: ignore
|
33
|
+
|
34
|
+
|
35
|
+
@pytest.mark.parametrize(
|
36
|
+
"dtype, expected",
|
37
|
+
[
|
38
|
+
("STRING", types.StringType()),
|
39
|
+
("VARCHAR(100)", types.VarcharType(100)),
|
40
|
+
("CHAR(100)", types.CharType(100)),
|
41
|
+
("DECIMAL(10, 2)", types.DecimalType(10, 2)),
|
42
|
+
("STRING", types.StringType()),
|
43
|
+
("INTEGER", types.IntegerType()),
|
44
|
+
("BIGINT", types.LongType()),
|
45
|
+
("SMALLINT", types.ShortType()),
|
46
|
+
("TINYINT", types.ByteType()),
|
47
|
+
("FLOAT", types.FloatType()),
|
48
|
+
("DOUBLE", types.DoubleType()),
|
49
|
+
("BOOLEAN", types.BooleanType()),
|
50
|
+
("TIMESTAMP", types.TimestampType()),
|
51
|
+
("DATE", types.DateType()),
|
52
|
+
("DECIMAL", types.DecimalType()),
|
53
|
+
("BINARY", types.BinaryType()),
|
54
|
+
("ARRAY<STRING>", types.ArrayType(types.StringType())),
|
55
|
+
("MAP<STRING, INTEGER>", types.MapType(types.StringType(), types.IntegerType())),
|
56
|
+
(
|
57
|
+
"STRUCT<a STRING, b INTEGER>",
|
58
|
+
types.StructType(
|
59
|
+
[
|
60
|
+
types.StructField("a", types.StringType()),
|
61
|
+
types.StructField("b", types.IntegerType()),
|
62
|
+
]
|
63
|
+
),
|
64
|
+
),
|
65
|
+
(
|
66
|
+
"ARRAY<STRUCT<a STRING, b INTEGER>>",
|
67
|
+
types.ArrayType(
|
68
|
+
types.StructType(
|
69
|
+
[
|
70
|
+
types.StructField("a", types.StringType()),
|
71
|
+
types.StructField("b", types.IntegerType()),
|
72
|
+
]
|
73
|
+
)
|
74
|
+
),
|
75
|
+
),
|
76
|
+
],
|
77
|
+
)
|
78
|
+
def test_sqlglot_to_spark(dtype: str, expected: types.DataType):
|
79
|
+
assert sqlglot_to_spark(exp.DataType.build(dtype)) == expected
|
80
|
+
|
81
|
+
|
82
|
+
@pytest.mark.parametrize(
|
83
|
+
"spark_dtype, expected_str",
|
84
|
+
[
|
85
|
+
(types.StringType(), "TEXT"),
|
86
|
+
(types.VarcharType(100), "VARCHAR(100)"),
|
87
|
+
(types.CharType(100), "CHAR(100)"),
|
88
|
+
(types.DecimalType(11, 2), "DECIMAL(11, 2)"),
|
89
|
+
(types.IntegerType(), "INT"),
|
90
|
+
(types.LongType(), "BIGINT"),
|
91
|
+
(types.ShortType(), "SMALLINT"),
|
92
|
+
(types.ByteType(), "TINYINT"),
|
93
|
+
(types.FloatType(), "FLOAT"),
|
94
|
+
(types.DoubleType(), "DOUBLE"),
|
95
|
+
(types.BooleanType(), "BOOLEAN"),
|
96
|
+
(types.TimestampType(), "TIMESTAMP"),
|
97
|
+
(types.TimestampNTZType(), "TIMESTAMPNTZ"),
|
98
|
+
(types.DateType(), "DATE"),
|
99
|
+
(types.DecimalType(), "DECIMAL(10, 0)"),
|
100
|
+
(types.BinaryType(), "BINARY"),
|
101
|
+
(types.ArrayType(types.StringType()), "ARRAY(TEXT)"),
|
102
|
+
(types.MapType(types.StringType(), types.IntegerType()), "MAP(TEXT, INT)"),
|
103
|
+
(
|
104
|
+
types.StructType(
|
105
|
+
[
|
106
|
+
types.StructField("a", types.StringType()),
|
107
|
+
types.StructField("b", types.IntegerType()),
|
108
|
+
]
|
109
|
+
),
|
110
|
+
"STRUCT(a TEXT, b INT)",
|
111
|
+
),
|
112
|
+
(
|
113
|
+
types.ArrayType(
|
114
|
+
types.StructType(
|
115
|
+
[
|
116
|
+
types.StructField("a", types.StringType()),
|
117
|
+
types.StructField("b", types.IntegerType()),
|
118
|
+
]
|
119
|
+
)
|
120
|
+
),
|
121
|
+
"ARRAY(STRUCT(a TEXT, b INT))",
|
122
|
+
),
|
123
|
+
],
|
124
|
+
)
|
125
|
+
def test_spark_to_sqlglot(spark_dtype: types.DataType, expected_str: str):
|
126
|
+
sqlglot_dtype = spark_to_sqlglot(spark_dtype)
|
127
|
+
assert sqlglot_dtype.sql() == expected_str
|
128
|
+
|
129
|
+
|
130
|
+
@pytest.mark.parametrize(
|
131
|
+
"dtype, expected_str",
|
132
|
+
[
|
133
|
+
("STRING", "TEXT"),
|
134
|
+
("VARCHAR(100)", "VARCHAR(100)"),
|
135
|
+
("CHAR(100)", "CHAR(100)"),
|
136
|
+
("DECIMAL(10, 2)", "DECIMAL(10, 2)"),
|
137
|
+
("INTEGER", "INT"),
|
138
|
+
("BIGINT", "BIGINT"),
|
139
|
+
("SMALLINT", "SMALLINT"),
|
140
|
+
("TINYINT", "TINYINT"),
|
141
|
+
("FLOAT", "FLOAT"),
|
142
|
+
("DOUBLE", "DOUBLE"),
|
143
|
+
("BOOLEAN", "BOOLEAN"),
|
144
|
+
("TIMESTAMP", "TIMESTAMP"),
|
145
|
+
("DATE", "DATE"),
|
146
|
+
("DECIMAL", "DECIMAL(10, 0)"),
|
147
|
+
("BINARY", "BINARY"),
|
148
|
+
("ARRAY<STRING>", "ARRAY(TEXT)"),
|
149
|
+
("MAP<STRING, INTEGER>", "MAP(TEXT, INT)"),
|
150
|
+
(
|
151
|
+
"STRUCT<a STRING, b INTEGER>",
|
152
|
+
"STRUCT(a TEXT, b INT)",
|
153
|
+
),
|
154
|
+
(
|
155
|
+
"ARRAY<STRUCT<a STRING, b INTEGER>>",
|
156
|
+
"ARRAY(STRUCT(a TEXT, b INT))",
|
157
|
+
),
|
158
|
+
],
|
159
|
+
)
|
160
|
+
def test_sqlglot_to_spark_to_sqlglot(dtype: str, expected_str: str):
|
161
|
+
"""Test round trip conversion from SQLGlot to Spark and back to SQLGlot."""
|
162
|
+
spark_dtype = sqlglot_to_spark(exp.DataType.build(dtype))
|
163
|
+
sqlglot_dtype = spark_to_sqlglot(spark_dtype)
|
164
|
+
|
165
|
+
assert sqlglot_dtype.sql() == expected_str
|
166
|
+
|
167
|
+
|
168
|
+
@pytest.mark.parametrize(
|
169
|
+
"spark_dtype, expected_str",
|
170
|
+
[
|
171
|
+
(types.StringType(), "TEXT"),
|
172
|
+
(types.VarcharType(100), "VARCHAR(100)"),
|
173
|
+
(types.CharType(100), "CHAR(100)"),
|
174
|
+
(types.DecimalType(11, 2), "DECIMAL(11, 2)"),
|
175
|
+
(types.IntegerType(), "INT"),
|
176
|
+
(types.LongType(), "BIGINT"),
|
177
|
+
(types.ShortType(), "SMALLINT"),
|
178
|
+
(types.ByteType(), "TINYINT"),
|
179
|
+
(types.FloatType(), "FLOAT"),
|
180
|
+
(types.DoubleType(), "DOUBLE"),
|
181
|
+
(types.BooleanType(), "BOOLEAN"),
|
182
|
+
(types.TimestampType(), "TIMESTAMP"),
|
183
|
+
(types.TimestampNTZType(), "TIMESTAMPNTZ"),
|
184
|
+
(types.DateType(), "DATE"),
|
185
|
+
(types.DecimalType(), "DECIMAL(10, 0)"),
|
186
|
+
(types.BinaryType(), "BINARY"),
|
187
|
+
(types.ArrayType(types.StringType()), "ARRAY(TEXT)"),
|
188
|
+
(types.MapType(types.StringType(), types.IntegerType()), "MAP(TEXT, INT)"),
|
189
|
+
(
|
190
|
+
types.StructType(
|
191
|
+
[
|
192
|
+
types.StructField("a", types.StringType()),
|
193
|
+
types.StructField("b", types.IntegerType()),
|
194
|
+
]
|
195
|
+
),
|
196
|
+
"STRUCT(a TEXT, b INT)",
|
197
|
+
),
|
198
|
+
(
|
199
|
+
types.ArrayType(
|
200
|
+
types.StructType(
|
201
|
+
[
|
202
|
+
types.StructField("a", types.StringType()),
|
203
|
+
types.StructField("b", types.IntegerType()),
|
204
|
+
]
|
205
|
+
)
|
206
|
+
),
|
207
|
+
"ARRAY(STRUCT(a TEXT, b INT))",
|
208
|
+
),
|
209
|
+
],
|
210
|
+
)
|
211
|
+
def test_spark_to_sqlglot_to_spark(spark_dtype: types.DataType, expected_str: str):
|
212
|
+
"""Test round trip conversion from Spark to SQLGlot and back to Spark."""
|
213
|
+
sqlglot_dtype = spark_to_sqlglot(spark_dtype)
|
214
|
+
final_spark_dtype = sqlglot_to_spark(sqlglot_dtype)
|
215
|
+
|
216
|
+
assert str(final_spark_dtype) == str(spark_dtype)
|
@@ -1,75 +0,0 @@
|
|
1
|
-
import typing as t
|
2
|
-
|
3
|
-
import pytest
|
4
|
-
from sqlglot import exp, parse_one
|
5
|
-
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
|
6
|
-
|
7
|
-
from sqlframe.base import types
|
8
|
-
from sqlframe.base.util import quote_preserving_alias_or_name, sqlglot_to_spark
|
9
|
-
|
10
|
-
|
11
|
-
@pytest.mark.parametrize(
|
12
|
-
"expression, expected",
|
13
|
-
[
|
14
|
-
("a", "a"),
|
15
|
-
("a AS b", "b"),
|
16
|
-
("`a`", "`a`"),
|
17
|
-
("`a` AS b", "b"),
|
18
|
-
("`a` AS `b`", "`b`"),
|
19
|
-
("`aB`", "`aB`"),
|
20
|
-
("`aB` AS c", "c"),
|
21
|
-
("`aB` AS `c`", "`c`"),
|
22
|
-
("`aB` AS `Cd`", "`Cd`"),
|
23
|
-
# We assume inputs have been normalized so `Cd` is returned as is instead of normalized `cd`
|
24
|
-
("`aB` AS Cd", "Cd"),
|
25
|
-
],
|
26
|
-
)
|
27
|
-
def test_quote_preserving_alias_or_name(expression: t.Union[exp.Column, exp.Alias], expected: str):
|
28
|
-
assert quote_preserving_alias_or_name(parse_one(expression, dialect="bigquery")) == expected # type: ignore
|
29
|
-
|
30
|
-
|
31
|
-
@pytest.mark.parametrize(
|
32
|
-
"dtype, expected",
|
33
|
-
[
|
34
|
-
("STRING", types.StringType()),
|
35
|
-
("VARCHAR(100)", types.VarcharType(100)),
|
36
|
-
("CHAR(100)", types.CharType(100)),
|
37
|
-
("DECIMAL(10, 2)", types.DecimalType(10, 2)),
|
38
|
-
("STRING", types.StringType()),
|
39
|
-
("INTEGER", types.IntegerType()),
|
40
|
-
("BIGINT", types.LongType()),
|
41
|
-
("SMALLINT", types.ShortType()),
|
42
|
-
("TINYINT", types.ByteType()),
|
43
|
-
("FLOAT", types.FloatType()),
|
44
|
-
("DOUBLE", types.DoubleType()),
|
45
|
-
("BOOLEAN", types.BooleanType()),
|
46
|
-
("TIMESTAMP", types.TimestampType()),
|
47
|
-
("DATE", types.DateType()),
|
48
|
-
("DECIMAL", types.DecimalType()),
|
49
|
-
("BINARY", types.BinaryType()),
|
50
|
-
("ARRAY<STRING>", types.ArrayType(types.StringType())),
|
51
|
-
("MAP<STRING, INTEGER>", types.MapType(types.StringType(), types.IntegerType())),
|
52
|
-
(
|
53
|
-
"STRUCT<a STRING, b INTEGER>",
|
54
|
-
types.StructType(
|
55
|
-
[
|
56
|
-
types.StructField("a", types.StringType()),
|
57
|
-
types.StructField("b", types.IntegerType()),
|
58
|
-
]
|
59
|
-
),
|
60
|
-
),
|
61
|
-
(
|
62
|
-
"ARRAY<STRUCT<a STRING, b INTEGER>>",
|
63
|
-
types.ArrayType(
|
64
|
-
types.StructType(
|
65
|
-
[
|
66
|
-
types.StructField("a", types.StringType()),
|
67
|
-
types.StructField("b", types.IntegerType()),
|
68
|
-
]
|
69
|
-
)
|
70
|
-
),
|
71
|
-
),
|
72
|
-
],
|
73
|
-
)
|
74
|
-
def test_sqlglot_to_spark(dtype: str, expected: types.DataType):
|
75
|
-
assert sqlglot_to_spark(exp.DataType.build(dtype)) == expected
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif
RENAMED
File without changes
|
File without changes
|
{sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/openai_full_rewrite.png
RENAMED
File without changes
|
{sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png
RENAMED
File without changes
|
{sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png
RENAMED
File without changes
|
{sqlframe-3.31.4 → sqlframe-3.32.1}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|