sqlframe 3.36.1__tar.gz → 3.36.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.36.1 → sqlframe-3.36.3}/Makefile +1 -1
- {sqlframe-3.36.1 → sqlframe-3.36.3}/PKG-INFO +1 -1
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/bigquery.md +1 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/duckdb.md +1 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/postgres.md +2 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/snowflake.md +1 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/setup.py +4 -3
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/_version.py +2 -2
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/functions.py +33 -3
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/group.py +51 -2
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe.egg-info/requires.txt +4 -3
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_int_functions.py +42 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/test_int_grouped_data.py +61 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_functions.py +18 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.github/CODEOWNERS +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.gitignore +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/.readthedocs.yaml +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/LICENSE +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/README.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/cake.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/configuration.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/databricks.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/docs/postgres.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/images/SF.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/images/favicon.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/index.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/redshift.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/requirements.txt +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/spark.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/standalone.md +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/mkdocs.yml +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/pytest.ini +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/renovate.json +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/setup.cfg +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/LICENSE +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/mixins/table_mixins.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/util.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/base/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/py.typed +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/common_fixtures.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/conftest.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/issue_219.csv +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_table.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/conftest.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/databricks/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/duck/test_reader_options.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/spark/test_reader_options.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/test_base_reader_options.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/test_catalog.py +0 -0
- {sqlframe-3.36.1 → sqlframe-3.36.3}/tests/unit/test_util.py +0 -0
@@ -507,6 +507,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
507
507
|
* [sum_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum_distinct.html)
|
508
508
|
* [tan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tan.html)
|
509
509
|
* [tanh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tanh.html)
|
510
|
+
* [timestamp_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_add.html)
|
510
511
|
* [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
|
511
512
|
* [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
|
512
513
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
@@ -468,6 +468,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
468
468
|
* [sumDistinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sumDistinct.html)
|
469
469
|
* [sum_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum_distinct.html)
|
470
470
|
* [tan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tan.html)
|
471
|
+
* [timestamp_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_add.html)
|
471
472
|
* [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
|
472
473
|
* [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
|
473
474
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
@@ -455,6 +455,8 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
455
455
|
* [sum_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum_distinct.html)
|
456
456
|
* [tan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tan.html)
|
457
457
|
* [tanh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tanh.html)
|
458
|
+
* [timestamp_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_add.html)
|
459
|
+
* The quantity argument must be literal, not a column
|
458
460
|
* [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
|
459
461
|
* [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
|
460
462
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
@@ -507,6 +507,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
507
507
|
* [sum_distinct](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.sum_distinct.html)
|
508
508
|
* [tan](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tan.html)
|
509
509
|
* [tanh](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.tanh.html)
|
510
|
+
* [timestamp_add](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_add.html)
|
510
511
|
* [timestamp_seconds](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.timestamp_seconds.html)
|
511
512
|
* [toDegrees](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toDegrees.html)
|
512
513
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
@@ -20,7 +20,7 @@ setup(
|
|
20
20
|
python_requires=">=3.9",
|
21
21
|
install_requires=[
|
22
22
|
"prettytable<4",
|
23
|
-
"sqlglot>=24.0.0,<26.
|
23
|
+
"sqlglot>=24.0.0,<26.34",
|
24
24
|
"typing_extensions",
|
25
25
|
],
|
26
26
|
extras_require={
|
@@ -41,7 +41,8 @@ setup(
|
|
41
41
|
"pytest>=8.2.0,<8.5",
|
42
42
|
"pytest-forked",
|
43
43
|
"pytest-postgresql>=6,<8",
|
44
|
-
"pytest-
|
44
|
+
"pytest-rerunfailures",
|
45
|
+
"pytest-xdist>=3.6,<3.9",
|
45
46
|
"pre-commit>=3.7,<5",
|
46
47
|
"ruff>=0.4.4,<0.13",
|
47
48
|
"types-psycopg2>=2.9,<3",
|
@@ -70,7 +71,7 @@ setup(
|
|
70
71
|
"redshift_connector>=2.1.1,<2.2.0",
|
71
72
|
],
|
72
73
|
"snowflake": [
|
73
|
-
"snowflake-connector-python[secure-local-storage]>=3.10.0,<3.
|
74
|
+
"snowflake-connector-python[secure-local-storage]>=3.10.0,<3.17",
|
74
75
|
],
|
75
76
|
"spark": [
|
76
77
|
"pyspark>=2,<3.6",
|
@@ -1475,6 +1475,32 @@ def timestamp_seconds(col: ColumnOrName) -> Column:
|
|
1475
1475
|
return Column.invoke_expression_over_column(col, expression.UnixToTime)
|
1476
1476
|
|
1477
1477
|
|
1478
|
+
@meta()
|
1479
|
+
def timestamp_add(unit: str, quantity: ColumnOrName, ts: ColumnOrName) -> Column:
|
1480
|
+
session = _get_session()
|
1481
|
+
|
1482
|
+
if session._is_duckdb or session._is_postgres:
|
1483
|
+
quantity = lit(quantity) if isinstance(quantity, int) else quantity
|
1484
|
+
if (
|
1485
|
+
isinstance(quantity, Column)
|
1486
|
+
and isinstance(quantity.expression, expression.Literal)
|
1487
|
+
and quantity.expression.is_number
|
1488
|
+
and int(quantity.expression.this) < 0
|
1489
|
+
):
|
1490
|
+
# If quantity is a negative literal, we use DateSub
|
1491
|
+
expr = expression.DateSub
|
1492
|
+
quantity.expression.set("this", str(-int(quantity.expression.this)))
|
1493
|
+
else:
|
1494
|
+
expr = expression.DateAdd # type: ignore
|
1495
|
+
return Column.invoke_expression_over_column(
|
1496
|
+
ts, expr, expression=quantity, unit=expression.Var(this=unit.upper())
|
1497
|
+
)
|
1498
|
+
|
1499
|
+
return Column.invoke_expression_over_column(
|
1500
|
+
ts, expression.TimestampAdd, expression=quantity, unit=expression.Var(this=unit.upper())
|
1501
|
+
)
|
1502
|
+
|
1503
|
+
|
1478
1504
|
@meta(unsupported_engines=["*", "spark"])
|
1479
1505
|
def window(
|
1480
1506
|
timeColumn: ColumnOrName,
|
@@ -1796,7 +1822,9 @@ def substring_index(str: ColumnOrName, delim: str, count: int) -> Column:
|
|
1796
1822
|
if session._is_bigquery:
|
1797
1823
|
return substring_index_bgutil(str, delim, count)
|
1798
1824
|
|
1799
|
-
return Column.
|
1825
|
+
return Column.invoke_expression_over_column(
|
1826
|
+
str, expression.SubstringIndex, delimiter=lit(delim), count=lit(count)
|
1827
|
+
)
|
1800
1828
|
|
1801
1829
|
|
1802
1830
|
@meta(unsupported_engines="bigquery")
|
@@ -2205,7 +2233,9 @@ def slice(
|
|
2205
2233
|
|
2206
2234
|
start_col = lit(start) if isinstance(start, int) else start
|
2207
2235
|
length_col = lit(length) if isinstance(length, int) else length
|
2208
|
-
return Column.
|
2236
|
+
return Column.invoke_expression_over_column(
|
2237
|
+
x, expression.ArraySlice, start=start_col, end=length_col
|
2238
|
+
)
|
2209
2239
|
|
2210
2240
|
|
2211
2241
|
@meta()
|
@@ -2748,7 +2778,7 @@ def typeof(col: ColumnOrName) -> Column:
|
|
2748
2778
|
if session._is_snowflake:
|
2749
2779
|
return typeof_from_variant(col)
|
2750
2780
|
|
2751
|
-
return Column.
|
2781
|
+
return Column.invoke_expression_over_column(col, expression.Typeof)
|
2752
2782
|
|
2753
2783
|
|
2754
2784
|
@meta()
|
@@ -70,6 +70,11 @@ class _BaseGroupedData(t.Generic[DF]):
|
|
70
70
|
|
71
71
|
from sqlframe.base import functions as F
|
72
72
|
|
73
|
+
if self.session._is_snowflake and len(cols) > 1:
|
74
|
+
raise ValueError(
|
75
|
+
"Snowflake does not support multiple aggregation functions in a single group by operation."
|
76
|
+
)
|
77
|
+
|
73
78
|
# Build the pivot expression
|
74
79
|
# First, we need to convert the DataFrame to include the pivot logic
|
75
80
|
df = self._df.copy()
|
@@ -132,11 +137,55 @@ class _BaseGroupedData(t.Generic[DF]):
|
|
132
137
|
subquery.set("pivots", [pivot])
|
133
138
|
|
134
139
|
# Create the final select from the pivoted subquery
|
135
|
-
|
140
|
+
final_select_in_values = []
|
141
|
+
for col in in_values: # type: ignore
|
142
|
+
for agg_col in cols:
|
143
|
+
original_name = col.alias_or_name # type: ignore
|
144
|
+
if self.session._is_snowflake:
|
145
|
+
# Snowflake takes the provided values, like 'Java', and creates the column as "'Java'"
|
146
|
+
# Therefore the user to select the column would need to use "'Java'"
|
147
|
+
# This does not conform to the PySpark API, nor is it very user-friendly.
|
148
|
+
# Therefore, we select the column as expected, and tell SQLFrame it is case-sensitive, but then
|
149
|
+
# alias is to case-insensitive "Java" so that the user can select it without quotes.
|
150
|
+
# This has a downside that if a user really needed case-sensitive column names then it wouldn't work.
|
151
|
+
new_col = exp.to_column(
|
152
|
+
col.alias_or_name, # type: ignore
|
153
|
+
quoted=True,
|
154
|
+
dialect=self.session.execution_dialect,
|
155
|
+
)
|
156
|
+
new_col.this.set("this", f"'{new_col.this.this}'")
|
157
|
+
new_col = exp.alias_(new_col, original_name)
|
158
|
+
new_col.unalias()._meta = {"case_sensitive": True}
|
159
|
+
elif self.session._is_bigquery:
|
160
|
+
# BigQuery flips the alias order to <alias>_<value> instead of <value>_<alias>
|
161
|
+
new_col = exp.to_column(
|
162
|
+
f"{agg_col.alias_or_name}_{original_name}",
|
163
|
+
dialect=self.session.execution_dialect,
|
164
|
+
)
|
165
|
+
new_col = (
|
166
|
+
exp.alias_(new_col, original_name)
|
167
|
+
if len(cols) == 1
|
168
|
+
else exp.alias_(new_col, f"{original_name}_{agg_col.alias_or_name}")
|
169
|
+
)
|
170
|
+
elif self.session._is_duckdb:
|
171
|
+
# DuckDB always respects the alias if if num_cols == 1
|
172
|
+
new_col = exp.column(f"{original_name}_{agg_col.expression.alias_or_name}")
|
173
|
+
if len(cols) == 1:
|
174
|
+
new_col = exp.alias_(new_col, original_name)
|
175
|
+
else:
|
176
|
+
new_col = (
|
177
|
+
exp.column(original_name)
|
178
|
+
if len(cols) == 1
|
179
|
+
else exp.column(f"{original_name}_{agg_col.expression.alias_or_name}")
|
180
|
+
)
|
181
|
+
final_select_in_values.append(new_col)
|
182
|
+
|
183
|
+
expression = exp.select(
|
184
|
+
*[x.column_expression for x in self.group_by_cols] + final_select_in_values # type: ignore
|
185
|
+
).from_(subquery)
|
136
186
|
|
137
187
|
return self._df.copy(expression=expression)
|
138
188
|
|
139
|
-
# Original non-pivot logic
|
140
189
|
if not self.group_by_cols or not isinstance(self.group_by_cols[0], (list, tuple, set)):
|
141
190
|
expression = self._df.expression.group_by(
|
142
191
|
# User column_expression for group by to avoid alias in group by
|
@@ -1,5 +1,5 @@
|
|
1
1
|
prettytable<4
|
2
|
-
sqlglot<26.
|
2
|
+
sqlglot<26.34,>=24.0.0
|
3
3
|
typing_extensions
|
4
4
|
|
5
5
|
[bigquery]
|
@@ -22,7 +22,8 @@ pyarrow<21,>=10
|
|
22
22
|
pyspark<3.6,>=2
|
23
23
|
pytest-forked
|
24
24
|
pytest-postgresql<8,>=6
|
25
|
-
pytest-
|
25
|
+
pytest-rerunfailures
|
26
|
+
pytest-xdist<3.9,>=3.6
|
26
27
|
pytest<8.5,>=8.2.0
|
27
28
|
ruff<0.13,>=0.4.4
|
28
29
|
types-psycopg2<3,>=2.9
|
@@ -51,7 +52,7 @@ psycopg2<3,>=2.8
|
|
51
52
|
redshift_connector<2.2.0,>=2.1.1
|
52
53
|
|
53
54
|
[snowflake]
|
54
|
-
snowflake-connector-python[secure-local-storage]<3.
|
55
|
+
snowflake-connector-python[secure-local-storage]<3.17,>=3.10.0
|
55
56
|
|
56
57
|
[spark]
|
57
58
|
pyspark<3.6,>=2
|
@@ -1531,6 +1531,48 @@ def test_timestamp_seconds(get_session_and_func):
|
|
1531
1531
|
)
|
1532
1532
|
|
1533
1533
|
|
1534
|
+
def test_timestamp_add(get_session_and_func, get_func):
|
1535
|
+
session, timestamp_add = get_session_and_func("timestamp_add")
|
1536
|
+
lit = get_func("lit", session)
|
1537
|
+
|
1538
|
+
# Test data from PySpark examples
|
1539
|
+
df = session.createDataFrame(
|
1540
|
+
[(datetime.datetime(2016, 3, 11, 9, 0, 7), 2), (datetime.datetime(2024, 4, 2, 9, 0, 7), 3)],
|
1541
|
+
["ts", "quantity"],
|
1542
|
+
)
|
1543
|
+
|
1544
|
+
# Test adding years
|
1545
|
+
if not session._is_postgres:
|
1546
|
+
result = df.select(timestamp_add("year", "quantity", "ts")).collect()
|
1547
|
+
expected_years = [
|
1548
|
+
datetime.datetime(2018, 3, 11, 9, 0, 7),
|
1549
|
+
datetime.datetime(2027, 4, 2, 9, 0, 7),
|
1550
|
+
]
|
1551
|
+
for i, row in enumerate(result):
|
1552
|
+
actual = row[0].replace(tzinfo=None) if row[0] else None
|
1553
|
+
assert actual == expected_years[i], f"Year addition failed for row {i}"
|
1554
|
+
|
1555
|
+
# Test adding weeks
|
1556
|
+
result = df.select(timestamp_add("WEEK", lit(5), "ts")).collect()
|
1557
|
+
expected_weeks = [
|
1558
|
+
datetime.datetime(2016, 4, 15, 9, 0, 7),
|
1559
|
+
datetime.datetime(2024, 5, 7, 9, 0, 7),
|
1560
|
+
]
|
1561
|
+
for i, row in enumerate(result):
|
1562
|
+
actual = row[0].replace(tzinfo=None) if row[0] else None
|
1563
|
+
assert actual == expected_weeks[i], f"Week addition failed for row {i}"
|
1564
|
+
|
1565
|
+
# Test subtracting days
|
1566
|
+
result = df.select(timestamp_add("day", lit(-5), "ts")).collect()
|
1567
|
+
expected_days = [
|
1568
|
+
datetime.datetime(2016, 3, 6, 9, 0, 7),
|
1569
|
+
datetime.datetime(2024, 3, 28, 9, 0, 7),
|
1570
|
+
]
|
1571
|
+
for i, row in enumerate(result):
|
1572
|
+
actual = row[0].replace(tzinfo=None) if row[0] else None
|
1573
|
+
assert actual == expected_days[i], f"Day subtraction failed for row {i}"
|
1574
|
+
|
1575
|
+
|
1534
1576
|
def test_window(get_session_and_func, get_func):
|
1535
1577
|
session, window = get_session_and_func("window")
|
1536
1578
|
sum = get_func("sum", session)
|
@@ -370,3 +370,64 @@ def test_pivot_multiple_aggregations(
|
|
370
370
|
)
|
371
371
|
|
372
372
|
compare_frames(df_pivot, dfs_pivot)
|
373
|
+
|
374
|
+
|
375
|
+
def test_pivot_without_values_and_selects(
|
376
|
+
pyspark_employee: DataFrame,
|
377
|
+
compare_frames: t.Callable,
|
378
|
+
get_session: t.Callable,
|
379
|
+
is_postgres: t.Callable,
|
380
|
+
):
|
381
|
+
"""Test pivot without values (auto-detect)"""
|
382
|
+
sqlf_spark = get_session()
|
383
|
+
if is_postgres():
|
384
|
+
pytest.skip("Pivot operation is not supported in Postgres")
|
385
|
+
spark = pyspark_employee.sparkSession
|
386
|
+
|
387
|
+
# Create test data based on PySpark documentation example
|
388
|
+
df1 = spark.createDataFrame(
|
389
|
+
[
|
390
|
+
Row(course="dotNET", year=2012, earnings=10000),
|
391
|
+
Row(course="Java", year=2012, earnings=20000),
|
392
|
+
Row(course="dotNET", year=2012, earnings=5000),
|
393
|
+
Row(course="dotNET", year=2013, earnings=48000),
|
394
|
+
Row(course="Java", year=2013, earnings=30000),
|
395
|
+
]
|
396
|
+
)
|
397
|
+
|
398
|
+
# Create the same DataFrame in SQLFrame
|
399
|
+
dfs1 = sqlf_spark.createDataFrame(
|
400
|
+
[
|
401
|
+
{
|
402
|
+
"course": "dotNET",
|
403
|
+
"year": 2012,
|
404
|
+
"earnings": 10000,
|
405
|
+
},
|
406
|
+
{
|
407
|
+
"course": "Java",
|
408
|
+
"year": 2012,
|
409
|
+
"earnings": 20000,
|
410
|
+
},
|
411
|
+
{
|
412
|
+
"course": "dotNET",
|
413
|
+
"year": 2012,
|
414
|
+
"earnings": 5000,
|
415
|
+
},
|
416
|
+
{
|
417
|
+
"course": "dotNET",
|
418
|
+
"year": 2013,
|
419
|
+
"earnings": 48000,
|
420
|
+
},
|
421
|
+
{
|
422
|
+
"course": "Java",
|
423
|
+
"year": 2013,
|
424
|
+
"earnings": 30000,
|
425
|
+
},
|
426
|
+
]
|
427
|
+
)
|
428
|
+
|
429
|
+
# Test pivot without values (auto-detect)
|
430
|
+
df_pivot = df1.groupBy("year").pivot("course").sum("earnings").select("Java")
|
431
|
+
dfs_pivot = dfs1.groupBy("year").pivot("course").sum("earnings").select("Java")
|
432
|
+
|
433
|
+
compare_frames(df_pivot, dfs_pivot)
|
@@ -1544,6 +1544,24 @@ def test_timestamp_seconds(expression, expected):
|
|
1544
1544
|
assert expression.column_expression.sql(dialect="spark") == expected
|
1545
1545
|
|
1546
1546
|
|
1547
|
+
@pytest.mark.parametrize(
|
1548
|
+
"expression, expected",
|
1549
|
+
[
|
1550
|
+
(SF.timestamp_add("year", "quantity", "ts"), "DATE_ADD(YEAR, quantity, ts)"),
|
1551
|
+
(SF.timestamp_add("WEEK", SF.lit(5), "ts"), "DATE_ADD(WEEK, 5, ts)"),
|
1552
|
+
(SF.timestamp_add("day", SF.lit(-5), "ts"), "DATE_ADD(DAY, -5, ts)"),
|
1553
|
+
(
|
1554
|
+
SF.timestamp_add("hour", SF.col("quantity"), SF.col("ts")),
|
1555
|
+
"DATE_ADD(HOUR, quantity, ts)",
|
1556
|
+
),
|
1557
|
+
(SF.timestamp_add("second", SF.lit(120), "ts"), "DATE_ADD(SECOND, 120, ts)"),
|
1558
|
+
(SF.timestamp_add("month", "quantity", "ts"), "DATE_ADD(MONTH, quantity, ts)"),
|
1559
|
+
],
|
1560
|
+
)
|
1561
|
+
def test_timestamp_add(expression, expected):
|
1562
|
+
assert expression.column_expression.sql(dialect="spark") == expected
|
1563
|
+
|
1564
|
+
|
1547
1565
|
@pytest.mark.parametrize(
|
1548
1566
|
"expression, expected",
|
1549
1567
|
[
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif
RENAMED
File without changes
|
File without changes
|
{sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/openai_full_rewrite.png
RENAMED
File without changes
|
{sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png
RENAMED
File without changes
|
{sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png
RENAMED
File without changes
|
{sqlframe-3.36.1 → sqlframe-3.36.3}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|