sqlframe 1.7.1__tar.gz → 1.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-1.7.1 → sqlframe-1.9.0}/PKG-INFO +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/spark.md +12 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/setup.py +4 -4
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/_version.py +2 -2
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/column.py +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/functions.py +100 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/catalog.py +4 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/functions.pyi +12 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe.egg-info/requires.txt +4 -4
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/conftest.py +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/bigquery/test_bigquery_session.py +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_session.py +1 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/test_int_functions.py +339 -2
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_column.py +5 -1
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_functions.py +172 -4
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.github/CODEOWNERS +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.gitignore +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/.readthedocs.yaml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/LICENSE +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/Makefile +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/README.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/cake.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/bigquery.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/configuration.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/docs/postgres.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/duckdb.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/images/SF.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/images/favicon.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/index.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/postgres.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/requirements.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/snowflake.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/standalone.md +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/mkdocs.yml +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/pytest.ini +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/renovate.json +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/setup.cfg +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/LICENSE +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/util.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/base/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/common_fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-1.7.1 → sqlframe-1.9.0}/tests/unit/test_util.py +0 -0
|
@@ -183,6 +183,8 @@ df.show(5)
|
|
|
183
183
|
* [acos](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acos.html)
|
|
184
184
|
* [acosh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.acosh.html)
|
|
185
185
|
* [add_months](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.add_months.html)
|
|
186
|
+
* [aes_encrypt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aes_encrypt.html)
|
|
187
|
+
* [aes_decrypt](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aes_decrypt.html)
|
|
186
188
|
* [aggregate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.aggregate.html)
|
|
187
189
|
* [approxCountDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approxCountDistinct.html)
|
|
188
190
|
* [approx_count_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.approx_count_distinct.html)
|
|
@@ -310,6 +312,7 @@ df.show(5)
|
|
|
310
312
|
* [lpad](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.lpad.html)
|
|
311
313
|
* [ltrim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.ltrim.html)
|
|
312
314
|
* [make_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.make_date.html)
|
|
315
|
+
* [make_interval](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.make_interval.html)
|
|
313
316
|
* [map_concat](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_concat.html)
|
|
314
317
|
* [map_entries](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_entries.html)
|
|
315
318
|
* [map_filter](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.map_filter.html)
|
|
@@ -397,6 +400,7 @@ df.show(5)
|
|
|
397
400
|
* [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
|
|
398
401
|
* [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
|
|
399
402
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
|
403
|
+
* [to_binary](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_binary.html)
|
|
400
404
|
* [to_csv](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_csv.html)
|
|
401
405
|
* [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
|
|
402
406
|
* [to_json](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_json.html)
|
|
@@ -408,6 +412,14 @@ df.show(5)
|
|
|
408
412
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
|
409
413
|
* [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
|
|
410
414
|
* [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
|
|
415
|
+
* [try_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_add.html)
|
|
416
|
+
* [try_avg](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_avg.html)
|
|
417
|
+
* [try_divide](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_divide.html)
|
|
418
|
+
* [try_multiply](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_multiply.html)
|
|
419
|
+
* [try_subtract](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_subtract.html)
|
|
420
|
+
* [try_sum](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_sum.html)
|
|
421
|
+
* [try_to_binary](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_binary.html)
|
|
422
|
+
* [try_to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.try_to_number.html)
|
|
411
423
|
* [typeof](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.typeof.html)
|
|
412
424
|
* [unbase64](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unbase64.html)
|
|
413
425
|
* [unhex](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.unhex.html)
|
|
@@ -20,7 +20,7 @@ setup(
|
|
|
20
20
|
python_requires=">=3.8",
|
|
21
21
|
install_requires=[
|
|
22
22
|
"prettytable<3.11.0",
|
|
23
|
-
"sqlglot>=24.0.0,<25.
|
|
23
|
+
"sqlglot>=24.0.0,<25.4",
|
|
24
24
|
"typing_extensions>=4.8,<5",
|
|
25
25
|
],
|
|
26
26
|
extras_require={
|
|
@@ -31,7 +31,7 @@ setup(
|
|
|
31
31
|
"dev": [
|
|
32
32
|
"duckdb>=0.9,<1.1",
|
|
33
33
|
"mypy>=1.10.0,<1.11",
|
|
34
|
-
"openai>=1.30,<1.
|
|
34
|
+
"openai>=1.30,<1.36",
|
|
35
35
|
"pandas>=2,<3",
|
|
36
36
|
"pandas-stubs>=2,<3",
|
|
37
37
|
"psycopg>=3.1,<4",
|
|
@@ -57,7 +57,7 @@ setup(
|
|
|
57
57
|
"pandas>=2,<3",
|
|
58
58
|
],
|
|
59
59
|
"openai": [
|
|
60
|
-
"openai>=1.30,<1.
|
|
60
|
+
"openai>=1.30,<1.36",
|
|
61
61
|
],
|
|
62
62
|
"pandas": [
|
|
63
63
|
"pandas>=2,<3",
|
|
@@ -69,7 +69,7 @@ setup(
|
|
|
69
69
|
"redshift_connector>=2.1.1,<2.2.0",
|
|
70
70
|
],
|
|
71
71
|
"snowflake": [
|
|
72
|
-
"snowflake-connector-python[secure-local-storage]>=3.10.0,<3.
|
|
72
|
+
"snowflake-connector-python[secure-local-storage]>=3.10.0,<3.12",
|
|
73
73
|
],
|
|
74
74
|
"spark": [
|
|
75
75
|
"pyspark>=2,<3.6",
|
|
@@ -229,7 +229,7 @@ class Column:
|
|
|
229
229
|
return Column(op)
|
|
230
230
|
|
|
231
231
|
def unary_op(self, klass: t.Callable, **kwargs) -> Column:
|
|
232
|
-
return Column(klass(this=self.column_expression, **kwargs))
|
|
232
|
+
return Column(klass(this=exp.Paren(this=self.column_expression), **kwargs))
|
|
233
233
|
|
|
234
234
|
@property
|
|
235
235
|
def is_alias(self):
|
|
@@ -1682,6 +1682,98 @@ def stack(*cols: ColumnOrName) -> Column:
|
|
|
1682
1682
|
)
|
|
1683
1683
|
|
|
1684
1684
|
|
|
1685
|
+
@meta(unsupported_engines="*")
|
|
1686
|
+
def make_interval(
|
|
1687
|
+
years: t.Optional[ColumnOrName] = None,
|
|
1688
|
+
months: t.Optional[ColumnOrName] = None,
|
|
1689
|
+
weeks: t.Optional[ColumnOrName] = None,
|
|
1690
|
+
days: t.Optional[ColumnOrName] = None,
|
|
1691
|
+
hours: t.Optional[ColumnOrName] = None,
|
|
1692
|
+
mins: t.Optional[ColumnOrName] = None,
|
|
1693
|
+
secs: t.Optional[ColumnOrName] = None,
|
|
1694
|
+
) -> Column:
|
|
1695
|
+
columns = _ensure_column_of_optionals([years, months, weeks, days, hours, mins, secs])
|
|
1696
|
+
if not columns:
|
|
1697
|
+
raise ValueError("At least one value must be provided")
|
|
1698
|
+
return Column.invoke_anonymous_function(columns[0], "MAKE_INTERVAL", *columns[1:])
|
|
1699
|
+
|
|
1700
|
+
|
|
1701
|
+
@meta(unsupported_engines="*")
|
|
1702
|
+
def try_add(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1703
|
+
return Column.invoke_anonymous_function(left, "TRY_ADD", right)
|
|
1704
|
+
|
|
1705
|
+
|
|
1706
|
+
@meta(unsupported_engines="*")
|
|
1707
|
+
def try_avg(col: ColumnOrName) -> Column:
|
|
1708
|
+
return Column.invoke_anonymous_function(col, "TRY_AVG")
|
|
1709
|
+
|
|
1710
|
+
|
|
1711
|
+
@meta(unsupported_engines="*")
|
|
1712
|
+
def try_divide(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1713
|
+
return Column.invoke_anonymous_function(left, "TRY_DIVIDE", right)
|
|
1714
|
+
|
|
1715
|
+
|
|
1716
|
+
@meta(unsupported_engines="*")
|
|
1717
|
+
def try_multiply(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1718
|
+
return Column.invoke_anonymous_function(left, "TRY_MULTIPLY", right)
|
|
1719
|
+
|
|
1720
|
+
|
|
1721
|
+
@meta(unsupported_engines="*")
|
|
1722
|
+
def try_subtract(left: ColumnOrName, right: ColumnOrName) -> Column:
|
|
1723
|
+
return Column.invoke_anonymous_function(left, "TRY_SUBTRACT", right)
|
|
1724
|
+
|
|
1725
|
+
|
|
1726
|
+
@meta(unsupported_engines="*")
|
|
1727
|
+
def try_sum(col: ColumnOrName) -> Column:
|
|
1728
|
+
return Column.invoke_anonymous_function(col, "TRY_SUM")
|
|
1729
|
+
|
|
1730
|
+
|
|
1731
|
+
@meta(unsupported_engines="*")
|
|
1732
|
+
def try_to_binary(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1733
|
+
if format is not None:
|
|
1734
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_BINARY", format)
|
|
1735
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_BINARY")
|
|
1736
|
+
|
|
1737
|
+
|
|
1738
|
+
@meta(unsupported_engines="*")
|
|
1739
|
+
def try_to_number(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1740
|
+
if format is not None:
|
|
1741
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_NUMBER", format)
|
|
1742
|
+
return Column.invoke_anonymous_function(col, "TRY_TO_NUMBER")
|
|
1743
|
+
|
|
1744
|
+
|
|
1745
|
+
@meta(unsupported_engines="*")
|
|
1746
|
+
def aes_decrypt(
|
|
1747
|
+
input: ColumnOrName,
|
|
1748
|
+
key: ColumnOrName,
|
|
1749
|
+
mode: t.Optional[ColumnOrName] = None,
|
|
1750
|
+
padding: t.Optional[ColumnOrName] = None,
|
|
1751
|
+
aad: t.Optional[ColumnOrName] = None,
|
|
1752
|
+
) -> Column:
|
|
1753
|
+
columns = _ensure_column_of_optionals([key, mode, padding, aad])
|
|
1754
|
+
return Column.invoke_anonymous_function(input, "AES_DECRYPT", *columns)
|
|
1755
|
+
|
|
1756
|
+
|
|
1757
|
+
@meta(unsupported_engines="*")
|
|
1758
|
+
def aes_encrypt(
|
|
1759
|
+
input: ColumnOrName,
|
|
1760
|
+
key: ColumnOrName,
|
|
1761
|
+
mode: t.Optional[ColumnOrName] = None,
|
|
1762
|
+
padding: t.Optional[ColumnOrName] = None,
|
|
1763
|
+
iv: t.Optional[ColumnOrName] = None,
|
|
1764
|
+
aad: t.Optional[ColumnOrName] = None,
|
|
1765
|
+
) -> Column:
|
|
1766
|
+
columns = _ensure_column_of_optionals([key, mode, padding, iv, aad])
|
|
1767
|
+
return Column.invoke_anonymous_function(input, "AES_ENCRYPT", *columns)
|
|
1768
|
+
|
|
1769
|
+
|
|
1770
|
+
@meta(unsupported_engines="*")
|
|
1771
|
+
def to_binary(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1772
|
+
if format is not None:
|
|
1773
|
+
return Column.invoke_anonymous_function(col, "TO_BINARY", format)
|
|
1774
|
+
return Column.invoke_anonymous_function(col, "TO_BINARY")
|
|
1775
|
+
|
|
1776
|
+
|
|
1685
1777
|
@meta()
|
|
1686
1778
|
def _lambda_quoted(value: str) -> t.Optional[bool]:
|
|
1687
1779
|
return False if value == "_" else None
|
|
@@ -1697,3 +1789,11 @@ def _get_lambda_from_func(lambda_expression: t.Callable):
|
|
|
1697
1789
|
this=lambda_expression(*[Column(x) for x in variables]).expression,
|
|
1698
1790
|
expressions=variables,
|
|
1699
1791
|
)
|
|
1792
|
+
|
|
1793
|
+
|
|
1794
|
+
def _ensure_column_of_optionals(optionals: t.List[t.Optional[ColumnOrName]]) -> t.List[Column]:
|
|
1795
|
+
for value in reversed(optionals.copy()):
|
|
1796
|
+
if value is not None:
|
|
1797
|
+
break
|
|
1798
|
+
optionals = optionals[:-1]
|
|
1799
|
+
return [Column.ensure_col(x) if x is not None else lit(None) for x in optionals]
|
|
@@ -519,7 +519,10 @@ class SparkCatalog(
|
|
|
519
519
|
)
|
|
520
520
|
for col in df.columns
|
|
521
521
|
]
|
|
522
|
-
return [
|
|
522
|
+
return [
|
|
523
|
+
Column(**{name: x._asdict()[name] for name in Column._fields})
|
|
524
|
+
for x in self._spark_catalog.listColumns(tableName, dbName)
|
|
525
|
+
]
|
|
523
526
|
|
|
524
527
|
def listFunctions(
|
|
525
528
|
self, dbName: t.Optional[str] = None, pattern: t.Optional[str] = None
|
|
@@ -7,6 +7,8 @@ from sqlframe.base.functions import (
|
|
|
7
7
|
abs as abs,
|
|
8
8
|
acos as acos,
|
|
9
9
|
acosh as acosh,
|
|
10
|
+
aes_encrypt as aes_encrypt,
|
|
11
|
+
aes_decrypt as aes_decrypt,
|
|
10
12
|
aggregate as aggregate,
|
|
11
13
|
approxCountDistinct as approxCountDistinct,
|
|
12
14
|
approx_count_distinct as approx_count_distinct,
|
|
@@ -132,6 +134,7 @@ from sqlframe.base.functions import (
|
|
|
132
134
|
lpad as lpad,
|
|
133
135
|
ltrim as ltrim,
|
|
134
136
|
make_date as make_date,
|
|
137
|
+
make_interval as make_interval,
|
|
135
138
|
map_concat as map_concat,
|
|
136
139
|
map_entries as map_entries,
|
|
137
140
|
map_filter as map_filter,
|
|
@@ -217,6 +220,7 @@ from sqlframe.base.functions import (
|
|
|
217
220
|
timestamp_seconds as timestamp_seconds,
|
|
218
221
|
toDegrees as toDegrees,
|
|
219
222
|
toRadians as toRadians,
|
|
223
|
+
to_binary as to_binary,
|
|
220
224
|
to_csv as to_csv,
|
|
221
225
|
to_date as to_date,
|
|
222
226
|
to_json as to_json,
|
|
@@ -228,6 +232,14 @@ from sqlframe.base.functions import (
|
|
|
228
232
|
translate as translate,
|
|
229
233
|
trim as trim,
|
|
230
234
|
trunc as trunc,
|
|
235
|
+
try_add as try_add,
|
|
236
|
+
try_avg as try_avg,
|
|
237
|
+
try_divide as try_divide,
|
|
238
|
+
try_multiply as try_multiply,
|
|
239
|
+
try_subtract as try_subtract,
|
|
240
|
+
try_sum as try_sum,
|
|
241
|
+
try_to_binary as try_to_binary,
|
|
242
|
+
try_to_number as try_to_number,
|
|
231
243
|
typeof as typeof,
|
|
232
244
|
unbase64 as unbase64,
|
|
233
245
|
unhex as unhex,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
prettytable<3.11.0
|
|
2
|
-
sqlglot<25.
|
|
2
|
+
sqlglot<25.4,>=24.0.0
|
|
3
3
|
typing_extensions<5,>=4.8
|
|
4
4
|
|
|
5
5
|
[bigquery]
|
|
@@ -9,7 +9,7 @@ google-cloud-bigquery[pandas]<4,>=3
|
|
|
9
9
|
[dev]
|
|
10
10
|
duckdb<1.1,>=0.9
|
|
11
11
|
mypy<1.11,>=1.10.0
|
|
12
|
-
openai<1.
|
|
12
|
+
openai<1.36,>=1.30
|
|
13
13
|
pandas-stubs<3,>=2
|
|
14
14
|
pandas<3,>=2
|
|
15
15
|
psycopg<4,>=3.1
|
|
@@ -39,7 +39,7 @@ duckdb<1.1,>=0.9
|
|
|
39
39
|
pandas<3,>=2
|
|
40
40
|
|
|
41
41
|
[openai]
|
|
42
|
-
openai<1.
|
|
42
|
+
openai<1.36,>=1.30
|
|
43
43
|
|
|
44
44
|
[pandas]
|
|
45
45
|
pandas<3,>=2
|
|
@@ -51,7 +51,7 @@ psycopg2<3,>=2.8
|
|
|
51
51
|
redshift_connector<2.2.0,>=2.1.1
|
|
52
52
|
|
|
53
53
|
[snowflake]
|
|
54
|
-
snowflake-connector-python[secure-local-storage]<3.
|
|
54
|
+
snowflake-connector-python[secure-local-storage]<3.12,>=3.10.0
|
|
55
55
|
|
|
56
56
|
[spark]
|
|
57
57
|
pyspark<3.6,>=2
|
{sqlframe-1.7.1 → sqlframe-1.9.0}/tests/integration/engines/bigquery/test_bigquery_session.py
RENAMED
|
@@ -16,5 +16,5 @@ def test_session_from_config():
|
|
|
16
16
|
conn.cursor().execute("CREATE SCHEMA IF NOT EXISTS db1")
|
|
17
17
|
conn.cursor().execute("CREATE TABLE IF NOT EXISTS db1.test_table (cola INT, colb STRING)")
|
|
18
18
|
session = BigQuerySession.builder.config("default_dataset", "sqlframe.db1").getOrCreate()
|
|
19
|
-
columns = session.catalog.get_columns("test_table")
|
|
19
|
+
columns = session.catalog.get_columns("db1.test_table")
|
|
20
20
|
assert columns == {"`cola`": exp.DataType.build("BIGINT"), "`colb`": exp.DataType.build("TEXT")}
|
|
@@ -10,4 +10,4 @@ def test_session_from_config():
|
|
|
10
10
|
conn.execute("CREATE TABLE test_table (cola INT, colb STRING)")
|
|
11
11
|
session = DuckDBSession.builder.config("sqlframe.conn", conn).getOrCreate()
|
|
12
12
|
columns = session.catalog.get_columns("test_table")
|
|
13
|
-
assert columns == {'"cola"': exp.DataType.build("INT"), '"colb"': exp.DataType.build("
|
|
13
|
+
assert columns == {'"cola"': exp.DataType.build("INT"), '"colb"': exp.DataType.build("TEXT")}
|