sqlframe 1.7.1__tar.gz → 1.8.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-1.7.1 → sqlframe-1.8.0}/PKG-INFO +1 -1
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/spark.md +9 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/_version.py +2 -2
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/functions.py +65 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/catalog.py +4 -1
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/functions.pyi +9 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/bigquery/test_bigquery_session.py +1 -1
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/test_int_functions.py +188 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_functions.py +103 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.github/CODEOWNERS +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.gitignore +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/.readthedocs.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/LICENSE +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/Makefile +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/README.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/cake.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/bigquery.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/configuration.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/docs/postgres.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/duckdb.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/images/SF.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/images/favicon.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/index.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/postgres.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/requirements.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/snowflake.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/standalone.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/mkdocs.yml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/pytest.ini +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/renovate.json +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/setup.cfg +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/setup.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/LICENSE +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/util.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/base/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe.egg-info/requires.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/common_fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/conftest.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.8.0}/tests/unit/test_util.py +0 -0
|
@@ -310,6 +310,7 @@ df.show(5)
|
|
|
310
310
|
* [lpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lpad.html)
|
|
311
311
|
* [ltrim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ltrim.html)
|
|
312
312
|
* [make_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.make_date.html)
|
|
313
|
+
* [make_interval](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.make_interval.html)
|
|
313
314
|
* [map_concat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_concat.html)
|
|
314
315
|
* [map_entries](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_entries.html)
|
|
315
316
|
* [map_filter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_filter.html)
|
|
@@ -408,6 +409,14 @@ df.show(5)
|
|
|
408
409
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
|
409
410
|
* [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
|
|
410
411
|
* [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
|
|
412
|
+
* [try_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_add.html)
|
|
413
|
+
* [try_avg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_avg.html)
|
|
414
|
+
* [try_divide](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_divide.html)
|
|
415
|
+
* [try_multiply](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_multiply.html)
|
|
416
|
+
* [try_subtract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_subtract.html)
|
|
417
|
+
* [try_sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_sum.html)
|
|
418
|
+
* [try_to_binary](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_binary.html)
|
|
419
|
+
* [try_to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_number.html)
|
|
411
420
|
* [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
|
|
412
421
|
* [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
|
|
413
422
|
* [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
|
|
@@ -1682,6 +1682,71 @@ def stack(*cols: ColumnOrName) -> Column:
|
|
|
1682
1682
|
)
|
|
1683
1683
|
|
|
1684
1684
|
|
|
1685
|
+
@meta(unsupported_engines="*")
|
|
1686
|
+
def make_interval(
|
|
1687
|
+
years: t.Optional[ColumnOrName] = None,
|
|
1688
|
+
months: t.Optional[ColumnOrName] = None,
|
|
1689
|
+
weeks: t.Optional[ColumnOrName] = None,
|
|
1690
|
+
days: t.Optional[ColumnOrName] = None,
|
|
1691
|
+
hours: t.Optional[ColumnOrName] = None,
|
|
1692
|
+
mins: t.Optional[ColumnOrName] = None,
|
|
1693
|
+
secs: t.Optional[ColumnOrName] = None,
|
|
1694
|
+
) -> Column:
|
|
1695
|
+
values = [years, months, weeks, days, hours, mins, secs]
|
|
1696
|
+
for value in reversed(values.copy()):
|
|
1697
|
+
if value is not None:
|
|
1698
|
+
break
|
|
1699
|
+
values = values[:-1]
|
|
1700
|
+
else:
|
|
1701
|
+
raise ValueError("At least one value must be provided")
|
|
1702
|
+
columns = [Column.ensure_col(x) if x is not None else lit(None) for x in values]
|
|
1703
|
+
return Column.invoke_anonymous_function(columns[0], "MAKE_INTERVAL", *columns[1:])
|
|
1704
|
+
|
|
1705
|
+
|
|
1706
|
+
@meta(unsupported_engines="*")
|
|
1707
|
+
def try_add(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1708
|
+
return Column.invoke_anonymous_function(left, "TRY_ADD", right)
|
|
1709
|
+
|
|
1710
|
+
|
|
1711
|
+
@meta(unsupported_engines="*")
|
|
1712
|
+
def try_avg(col: ColumnOrName) -> Column:
|
|
1713
|
+
return Column.invoke_anonymous_function(col, "TRY_AVG")
|
|
1714
|
+
|
|
1715
|
+
|
|
1716
|
+
@meta(unsupported_engines="*")
|
|
1717
|
+
def try_divide(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1718
|
+
return Column.invoke_anonymous_function(left, "TRY_DIVIDE", right)
|
|
1719
|
+
|
|
1720
|
+
|
|
1721
|
+
@meta(unsupported_engines="*")
|
|
1722
|
+
def try_multiply(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1723
|
+
return Column.invoke_anonymous_function(left, "TRY_MULTIPLY", right)
|
|
1724
|
+
|
|
1725
|
+
|
|
1726
|
+
@meta(unsupported_engines="*")
|
|
1727
|
+
def try_subtract(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1728
|
+
return Column.invoke_anonymous_function(left, "TRY_SUBTRACT", right)
|
|
1729
|
+
|
|
1730
|
+
|
|
1731
|
+
@meta(unsupported_engines="*")
|
|
1732
|
+
def try_sum(col: ColumnOrName) -> Column:
|
|
1733
|
+
return Column.invoke_anonymous_function(col, "TRY_SUM")
|
|
1734
|
+
|
|
1735
|
+
|
|
1736
|
+
@meta(unsupported_engines="*")
|
|
1737
|
+
def try_to_binary(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1738
|
+
if format is not None:
|
|
1739
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_BINARY", format)
|
|
1740
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_BINARY")
|
|
1741
|
+
|
|
1742
|
+
|
|
1743
|
+
@meta(unsupported_engines="*")
|
|
1744
|
+
def try_to_number(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1745
|
+
if format is not None:
|
|
1746
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_NUMBER", format)
|
|
1747
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_NUMBER")
|
|
1748
|
+
|
|
1749
|
+
|
|
1685
1750
|
@meta()
|
|
1686
1751
|
def _lambda_quoted(value: str) -> t.Optional[bool]:
|
|
1687
1752
|
return False if value == "_" else None
|
|
@@ -519,7 +519,10 @@ class SparkCatalog(
|
|
|
519
519
|
)
|
|
520
520
|
for col in df.columns
|
|
521
521
|
]
|
|
522
|
-
return [
|
|
522
|
+
return [
|
|
523
|
+
Column(**{name: x._asdict()[name] for name in Column._fields})
|
|
524
|
+
for x in self._spark_catalog.listColumns(tableName, dbName)
|
|
525
|
+
]
|
|
523
526
|
|
|
524
527
|
def listFunctions(
|
|
525
528
|
self, dbName: t.Optional[str] = None, pattern: t.Optional[str] = None
|
|
@@ -132,6 +132,7 @@ from sqlframe.base.functions import (
|
|
|
132
132
|
lpad as lpad,
|
|
133
133
|
ltrim as ltrim,
|
|
134
134
|
make_date as make_date,
|
|
135
|
+
make_interval as make_interval,
|
|
135
136
|
map_concat as map_concat,
|
|
136
137
|
map_entries as map_entries,
|
|
137
138
|
map_filter as map_filter,
|
|
@@ -228,6 +229,14 @@ from sqlframe.base.functions import (
|
|
|
228
229
|
translate as translate,
|
|
229
230
|
trim as trim,
|
|
230
231
|
trunc as trunc,
|
|
232
|
+
try_add as try_add,
|
|
233
|
+
try_avg as try_avg,
|
|
234
|
+
try_divide as try_divide,
|
|
235
|
+
try_multiply as try_multiply,
|
|
236
|
+
try_subtract as try_subtract,
|
|
237
|
+
try_sum as try_sum,
|
|
238
|
+
try_to_binary as try_to_binary,
|
|
239
|
+
try_to_number as try_to_number,
|
|
231
240
|
typeof as typeof,
|
|
232
241
|
unbase64 as unbase64,
|
|
233
242
|
unhex as unhex,
|
{sqlframe-1.7.1 → sqlframe-1.8.0}/tests/integration/engines/bigquery/test_bigquery_session.py
RENAMED
|
@@ -16,5 +16,5 @@ def test_session_from_config():
|
|
|
16
16
|
conn.cursor().execute("CREATE SCHEMA IF NOT EXISTS db1")
|
|
17
17
|
conn.cursor().execute("CREATE TABLE IF NOT EXISTS db1.test_table (cola INT, colb STRING)")
|
|
18
18
|
session = BigQuerySession.builder.config("default_dataset", "sqlframe.db1").getOrCreate()
|
|
19
|
-
columns = session.catalog.get_columns("test_table")
|
|
19
|
+
columns = session.catalog.get_columns("db1.test_table")
|
|
20
20
|
assert columns == {"`cola`": exp.DataType.build("BIGINT"), "`colb`": exp.DataType.build("TEXT")}
|
|
@@ -4,6 +4,7 @@ import datetime
|
|
|
4
4
|
import math
|
|
5
5
|
import typing as t
|
|
6
6
|
from collections import Counter
|
|
7
|
+
from decimal import Decimal
|
|
7
8
|
|
|
8
9
|
import pytest
|
|
9
10
|
from pyspark.sql import SparkSession as PySparkSession
|
|
@@ -2883,3 +2884,190 @@ def test_stack(get_session_and_func, get_func):
|
|
|
2883
2884
|
Row(key=1, value=2),
|
|
2884
2885
|
Row(key=3, value=None),
|
|
2885
2886
|
]
|
|
2887
|
+
|
|
2888
|
+
|
|
2889
|
+
def test_make_interval(get_session_and_func, get_func):
|
|
2890
|
+
session, make_interval = get_session_and_func("make_interval")
|
|
2891
|
+
df = session.createDataFrame(
|
|
2892
|
+
[[100, 11, 1, 1, 12, 30, 01.001001]], ["year", "month", "week", "day", "hour", "min", "sec"]
|
|
2893
|
+
)
|
|
2894
|
+
assert (
|
|
2895
|
+
df.select(
|
|
2896
|
+
make_interval(df.year, df.month, df.week, df.day, df.hour, df.min, df.sec)
|
|
2897
|
+
.cast("string")
|
|
2898
|
+
.alias("r")
|
|
2899
|
+
).first()[0]
|
|
2900
|
+
== "100 years 11 months 8 days 12 hours 30 minutes 1.001001 seconds"
|
|
2901
|
+
)
|
|
2902
|
+
assert (
|
|
2903
|
+
df.select(
|
|
2904
|
+
make_interval(df.year, df.month, df.week, df.day, df.hour, df.min)
|
|
2905
|
+
.cast("string")
|
|
2906
|
+
.alias("r")
|
|
2907
|
+
).first()[0]
|
|
2908
|
+
== "100 years 11 months 8 days 12 hours 30 minutes"
|
|
2909
|
+
)
|
|
2910
|
+
assert (
|
|
2911
|
+
df.select(
|
|
2912
|
+
make_interval(df.year, df.month, df.week, df.day, df.hour).cast("string").alias("r")
|
|
2913
|
+
).first()[0]
|
|
2914
|
+
== "100 years 11 months 8 days 12 hours"
|
|
2915
|
+
)
|
|
2916
|
+
assert (
|
|
2917
|
+
df.select(
|
|
2918
|
+
make_interval(df.year, df.month, df.week, df.day).cast("string").alias("r")
|
|
2919
|
+
).first()[0]
|
|
2920
|
+
== "100 years 11 months 8 days"
|
|
2921
|
+
)
|
|
2922
|
+
assert (
|
|
2923
|
+
df.select(make_interval(df.year, df.month, df.week).cast("string").alias("r")).first()[0]
|
|
2924
|
+
== "100 years 11 months 7 days"
|
|
2925
|
+
)
|
|
2926
|
+
assert (
|
|
2927
|
+
df.select(make_interval(df.year, df.month).cast("string").alias("r")).first()[0]
|
|
2928
|
+
== "100 years 11 months"
|
|
2929
|
+
)
|
|
2930
|
+
assert df.select(make_interval(df.year).cast("string").alias("r")).first()[0] == "100 years"
|
|
2931
|
+
|
|
2932
|
+
|
|
2933
|
+
def test_try_add(get_session_and_func, get_func, get_types):
|
|
2934
|
+
session, try_add = get_session_and_func("try_add")
|
|
2935
|
+
to_date = get_func("to_date", session)
|
|
2936
|
+
make_interval = get_func("make_interval", session)
|
|
2937
|
+
lit = get_func("lit", session)
|
|
2938
|
+
types = get_types(session)
|
|
2939
|
+
df = session.createDataFrame([(1982, 15), (1990, 2)], ["birth", "age"])
|
|
2940
|
+
assert df.select(try_add(df.birth, df.age).alias("r")).collect() == [
|
|
2941
|
+
Row(r=1997),
|
|
2942
|
+
Row(r=1992),
|
|
2943
|
+
]
|
|
2944
|
+
schema = types.StructType(
|
|
2945
|
+
[
|
|
2946
|
+
types.StructField("i", types.IntegerType(), True),
|
|
2947
|
+
types.StructField("d", types.StringType(), True),
|
|
2948
|
+
]
|
|
2949
|
+
)
|
|
2950
|
+
df = session.createDataFrame([(1, "2015-09-30")], schema)
|
|
2951
|
+
df = df.select(df.i, to_date(df.d).alias("d"))
|
|
2952
|
+
assert df.select(try_add(df.d, df.i).alias("r")).collect() == [
|
|
2953
|
+
Row(r=datetime.date(2015, 10, 1))
|
|
2954
|
+
]
|
|
2955
|
+
assert df.select(try_add(df.d, make_interval(df.i)).alias("r")).collect() == [
|
|
2956
|
+
Row(r=datetime.date(2016, 9, 30))
|
|
2957
|
+
]
|
|
2958
|
+
assert df.select(
|
|
2959
|
+
try_add(df.d, make_interval(lit(0), lit(0), lit(0), df.i)).alias("r")
|
|
2960
|
+
).collect() == [Row(r=datetime.date(2015, 10, 1))]
|
|
2961
|
+
assert df.select(
|
|
2962
|
+
try_add(make_interval(df.i), make_interval(df.i)).cast("string").alias("r")
|
|
2963
|
+
).collect() == [Row(r="2 years")]
|
|
2964
|
+
|
|
2965
|
+
|
|
2966
|
+
def test_try_avg(get_session_and_func, get_func):
|
|
2967
|
+
session, try_avg = get_session_and_func("try_avg")
|
|
2968
|
+
df = session.createDataFrame([(1982, 15), (1990, 2)], ["birth", "age"])
|
|
2969
|
+
assert df.select(try_avg("age")).first()[0] == 8.5
|
|
2970
|
+
|
|
2971
|
+
|
|
2972
|
+
def test_try_divide(get_session_and_func, get_func):
|
|
2973
|
+
session, try_divide = get_session_and_func("try_divide")
|
|
2974
|
+
make_interval = get_func("make_interval", session)
|
|
2975
|
+
lit = get_func("lit", session)
|
|
2976
|
+
df = session.createDataFrame([(6000, 15), (1990, 2)], ["a", "b"])
|
|
2977
|
+
assert df.select(try_divide(df.a, df.b).alias("r")).collect() == [
|
|
2978
|
+
Row(r=400.0),
|
|
2979
|
+
Row(r=995.0),
|
|
2980
|
+
]
|
|
2981
|
+
df = session.createDataFrame([(1, 2)], ["year", "month"])
|
|
2982
|
+
assert (
|
|
2983
|
+
df.select(try_divide(make_interval(df.year), df.month).cast("string").alias("r")).first()[0]
|
|
2984
|
+
== "6 months"
|
|
2985
|
+
)
|
|
2986
|
+
assert (
|
|
2987
|
+
df.select(
|
|
2988
|
+
try_divide(make_interval(df.year, df.month), lit(2)).cast("string").alias("r")
|
|
2989
|
+
).first()[0]
|
|
2990
|
+
== "7 months"
|
|
2991
|
+
)
|
|
2992
|
+
assert (
|
|
2993
|
+
df.select(
|
|
2994
|
+
try_divide(make_interval(df.year, df.month), lit(0)).cast("string").alias("r")
|
|
2995
|
+
).first()[0]
|
|
2996
|
+
is None
|
|
2997
|
+
)
|
|
2998
|
+
|
|
2999
|
+
|
|
3000
|
+
def test_try_multiply(get_session_and_func, get_func):
|
|
3001
|
+
session, try_multiply = get_session_and_func("try_multiply")
|
|
3002
|
+
make_interval = get_func("make_interval", session)
|
|
3003
|
+
lit = get_func("lit", session)
|
|
3004
|
+
df = session.createDataFrame([(6000, 15), (1990, 2)], ["a", "b"])
|
|
3005
|
+
assert df.select(try_multiply(df.a, df.b).alias("r")).collect() == [
|
|
3006
|
+
Row(r=90000),
|
|
3007
|
+
Row(r=3980),
|
|
3008
|
+
]
|
|
3009
|
+
df = session.createDataFrame([(2, 3)], ["a", "b"])
|
|
3010
|
+
assert (
|
|
3011
|
+
df.select(try_multiply(make_interval(df.a), df.b).cast("string").alias("r")).first()[0]
|
|
3012
|
+
== "6 years"
|
|
3013
|
+
)
|
|
3014
|
+
|
|
3015
|
+
|
|
3016
|
+
def test_try_subtract(get_session_and_func, get_func, get_types):
|
|
3017
|
+
session, try_subtract = get_session_and_func("try_subtract")
|
|
3018
|
+
make_interval = get_func("make_interval", session)
|
|
3019
|
+
types = get_types(session)
|
|
3020
|
+
lit = get_func("lit", session)
|
|
3021
|
+
to_date = get_func("to_date", session)
|
|
3022
|
+
df = session.createDataFrame([(6000, 15), (1990, 2)], ["a", "b"])
|
|
3023
|
+
assert df.select(try_subtract(df.a, df.b).alias("r")).collect() == [
|
|
3024
|
+
Row(r=5985),
|
|
3025
|
+
Row(r=1988),
|
|
3026
|
+
]
|
|
3027
|
+
schema = types.StructType(
|
|
3028
|
+
[
|
|
3029
|
+
types.StructField("i", types.IntegerType(), True),
|
|
3030
|
+
types.StructField("d", types.StringType(), True),
|
|
3031
|
+
]
|
|
3032
|
+
)
|
|
3033
|
+
df = session.createDataFrame([(1, "2015-09-30")], schema)
|
|
3034
|
+
df = df.select(df.i, to_date(df.d).alias("d"))
|
|
3035
|
+
assert df.select(try_subtract(df.d, df.i).alias("r")).first()[0] == datetime.date(2015, 9, 29)
|
|
3036
|
+
assert df.select(try_subtract(df.d, make_interval(df.i)).alias("r")).first()[
|
|
3037
|
+
0
|
|
3038
|
+
] == datetime.date(2014, 9, 30)
|
|
3039
|
+
assert df.select(
|
|
3040
|
+
try_subtract(df.d, make_interval(lit(0), lit(0), lit(0), df.i)).alias("r")
|
|
3041
|
+
).first()[0] == datetime.date(2015, 9, 29)
|
|
3042
|
+
assert (
|
|
3043
|
+
df.select(
|
|
3044
|
+
try_subtract(make_interval(df.i), make_interval(df.i)).cast("string").alias("r")
|
|
3045
|
+
).first()[0]
|
|
3046
|
+
== "0 seconds"
|
|
3047
|
+
)
|
|
3048
|
+
|
|
3049
|
+
|
|
3050
|
+
def test_try_sum(get_session_and_func, get_func):
|
|
3051
|
+
session, try_sum = get_session_and_func("try_sum")
|
|
3052
|
+
assert session.range(10).select(try_sum("id")).first()[0] == 45
|
|
3053
|
+
|
|
3054
|
+
|
|
3055
|
+
def test_try_to_binary(get_session_and_func, get_func):
|
|
3056
|
+
session, try_to_binary = get_session_and_func("try_to_binary")
|
|
3057
|
+
lit = get_func("lit", session)
|
|
3058
|
+
df = session.createDataFrame([("abc",)], ["e"])
|
|
3059
|
+
assert df.select(try_to_binary(df.e, lit("utf-8")).alias("r")).first()[0] == bytearray(b"abc")
|
|
3060
|
+
df = session.createDataFrame([("414243",)], ["e"])
|
|
3061
|
+
assert df.select(try_to_binary(df.e).alias("r")).first()[0] == bytearray(b"ABC")
|
|
3062
|
+
|
|
3063
|
+
|
|
3064
|
+
def test_try_to_number(get_session_and_func, get_func):
|
|
3065
|
+
session, try_to_number = get_session_and_func("try_to_number")
|
|
3066
|
+
lit = get_func("lit", session)
|
|
3067
|
+
df = session.createDataFrame([("$78.12",)], ["e"])
|
|
3068
|
+
actual = df.select(try_to_number(df.e, lit("$99.99")).alias("r")).first()[0]
|
|
3069
|
+
if isinstance(session, SparkSession):
|
|
3070
|
+
expected = 78.12
|
|
3071
|
+
else:
|
|
3072
|
+
expected = Decimal("78.12")
|
|
3073
|
+
assert actual == expected
|
|
@@ -2800,3 +2800,106 @@ def test_nullif():
|
|
|
2800
2800
|
)
|
|
2801
2801
|
def test_stack(expression, expected):
|
|
2802
2802
|
assert expression.sql() == expected
|
|
2803
|
+
|
|
2804
|
+
|
|
2805
|
+
@pytest.mark.parametrize(
|
|
2806
|
+
"expression, expected",
|
|
2807
|
+
[
|
|
2808
|
+
(
|
|
2809
|
+
SF.make_interval("cola", "colb", None, SF.lit(100)),
|
|
2810
|
+
"MAKE_INTERVAL(cola, colb, NULL, 100)",
|
|
2811
|
+
),
|
|
2812
|
+
],
|
|
2813
|
+
)
|
|
2814
|
+
def test_make_interval(expression, expected):
|
|
2815
|
+
assert expression.sql() == expected
|
|
2816
|
+
|
|
2817
|
+
|
|
2818
|
+
@pytest.mark.parametrize(
|
|
2819
|
+
"expression, expected",
|
|
2820
|
+
[
|
|
2821
|
+
(SF.try_add("cola", "colb"), "TRY_ADD(cola, colb)"),
|
|
2822
|
+
(SF.try_add(SF.col("cola"), SF.col("colb")), "TRY_ADD(cola, colb)"),
|
|
2823
|
+
],
|
|
2824
|
+
)
|
|
2825
|
+
def test_try_add(expression, expected):
|
|
2826
|
+
assert expression.sql() == expected
|
|
2827
|
+
|
|
2828
|
+
|
|
2829
|
+
@pytest.mark.parametrize(
|
|
2830
|
+
"expression, expected",
|
|
2831
|
+
[
|
|
2832
|
+
(SF.try_avg("cola"), "TRY_AVG(cola)"),
|
|
2833
|
+
(SF.try_avg(SF.col("cola")), "TRY_AVG(cola)"),
|
|
2834
|
+
],
|
|
2835
|
+
)
|
|
2836
|
+
def test_try_avg(expression, expected):
|
|
2837
|
+
assert expression.sql() == expected
|
|
2838
|
+
|
|
2839
|
+
|
|
2840
|
+
@pytest.mark.parametrize(
|
|
2841
|
+
"expression, expected",
|
|
2842
|
+
[
|
|
2843
|
+
(SF.try_divide("cola", "colb"), "TRY_DIVIDE(cola, colb)"),
|
|
2844
|
+
(SF.try_divide(SF.col("cola"), SF.col("colb")), "TRY_DIVIDE(cola, colb)"),
|
|
2845
|
+
],
|
|
2846
|
+
)
|
|
2847
|
+
def test_try_divide(expression, expected):
|
|
2848
|
+
assert expression.sql() == expected
|
|
2849
|
+
|
|
2850
|
+
|
|
2851
|
+
@pytest.mark.parametrize(
|
|
2852
|
+
"expression, expected",
|
|
2853
|
+
[
|
|
2854
|
+
(SF.try_multiply("cola", "colb"), "TRY_MULTIPLY(cola, colb)"),
|
|
2855
|
+
(SF.try_multiply(SF.col("cola"), SF.col("colb")), "TRY_MULTIPLY(cola, colb)"),
|
|
2856
|
+
],
|
|
2857
|
+
)
|
|
2858
|
+
def test_try_multiply(expression, expected):
|
|
2859
|
+
assert expression.sql() == expected
|
|
2860
|
+
|
|
2861
|
+
|
|
2862
|
+
@pytest.mark.parametrize(
|
|
2863
|
+
"expression, expected",
|
|
2864
|
+
[
|
|
2865
|
+
(SF.try_subtract("cola", "colb"), "TRY_SUBTRACT(cola, colb)"),
|
|
2866
|
+
(SF.try_subtract(SF.col("cola"), SF.col("colb")), "TRY_SUBTRACT(cola, colb)"),
|
|
2867
|
+
],
|
|
2868
|
+
)
|
|
2869
|
+
def test_try_subtract(expression, expected):
|
|
2870
|
+
assert expression.sql() == expected
|
|
2871
|
+
|
|
2872
|
+
|
|
2873
|
+
@pytest.mark.parametrize(
|
|
2874
|
+
"expression, expected",
|
|
2875
|
+
[
|
|
2876
|
+
(SF.try_sum("cola"), "TRY_SUM(cola)"),
|
|
2877
|
+
(SF.try_sum(SF.col("cola")), "TRY_SUM(cola)"),
|
|
2878
|
+
],
|
|
2879
|
+
)
|
|
2880
|
+
def test_try_sum(expression, expected):
|
|
2881
|
+
assert expression.sql() == expected
|
|
2882
|
+
|
|
2883
|
+
|
|
2884
|
+
@pytest.mark.parametrize(
|
|
2885
|
+
"expression, expected",
|
|
2886
|
+
[
|
|
2887
|
+
(SF.try_to_binary("cola"), "TRY_TO_BINARY(cola)"),
|
|
2888
|
+
(SF.try_to_binary(SF.col("cola")), "TRY_TO_BINARY(cola)"),
|
|
2889
|
+
(SF.try_to_binary("cola", SF.lit("UTF-8")), "TRY_TO_BINARY(cola, 'UTF-8')"),
|
|
2890
|
+
],
|
|
2891
|
+
)
|
|
2892
|
+
def test_try_to_binary(expression, expected):
|
|
2893
|
+
assert expression.sql() == expected
|
|
2894
|
+
|
|
2895
|
+
|
|
2896
|
+
@pytest.mark.parametrize(
|
|
2897
|
+
"expression, expected",
|
|
2898
|
+
[
|
|
2899
|
+
(SF.try_to_number("cola"), "TRY_TO_NUMBER(cola)"),
|
|
2900
|
+
(SF.try_to_number(SF.col("cola")), "TRY_TO_NUMBER(cola)"),
|
|
2901
|
+
(SF.try_to_number(SF.col("cola"), SF.lit("$99.99")), "TRY_TO_NUMBER(cola, '$99.99')"),
|
|
2902
|
+
],
|
|
2903
|
+
)
|
|
2904
|
+
def test_try_to_number(expression, expected):
|
|
2905
|
+
assert expression.sql() == expected
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png
RENAMED
|
File without changes
|
{sqlframe-1.7.1 → sqlframe-1.8.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|