sqlframe 3.9.3__tar.gz → 3.10.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlframe-3.9.3 → sqlframe-3.10.0}/PKG-INFO +1 -1
- {sqlframe-3.9.3 → sqlframe-3.10.0}/setup.py +4 -3
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/_version.py +2 -2
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/function_alternatives.py +33 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/functions.py +1 -1
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/session.py +2 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/functions.py +1 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/functions.py +1 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/functions.pyi +1 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/functions.py +1 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/requires.txt +4 -3
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/common_fixtures.py +2 -1
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/conftest.py +1 -1
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +10 -10
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_int_functions.py +46 -155
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/databricks/test_activate.py +1 -3
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_functions.py +5 -5
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/CODEOWNERS +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.gitignore +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/.readthedocs.yaml +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/LICENSE +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/Makefile +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/README.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/cake.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/bigquery.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/configuration.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/databricks.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/docs/postgres.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/duckdb.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/SF.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/favicon.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/index.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/postgres.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/redshift.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/requirements.txt +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/snowflake.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/spark.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/standalone.md +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/mkdocs.yml +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/pytest.ini +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/renovate.json +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/setup.cfg +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/LICENSE +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/util.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/base/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/conftest.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.9.3 → sqlframe-3.10.0}/tests/unit/test_util.py +0 -0
|
@@ -20,7 +20,7 @@ setup(
|
|
|
20
20
|
python_requires=">=3.8",
|
|
21
21
|
install_requires=[
|
|
22
22
|
"prettytable<3.12.1",
|
|
23
|
-
"sqlglot>=24.0.0,<
|
|
23
|
+
"sqlglot>=24.0.0,<26.1",
|
|
24
24
|
"typing_extensions>=4.8,<5",
|
|
25
25
|
],
|
|
26
26
|
extras_require={
|
|
@@ -32,13 +32,14 @@ setup(
|
|
|
32
32
|
"duckdb>=0.9,<1.2",
|
|
33
33
|
"findspark>=2,<3",
|
|
34
34
|
"mypy>=1.10.0,<1.14",
|
|
35
|
-
"openai>=1.30,<1.
|
|
35
|
+
"openai>=1.30,<1.58",
|
|
36
36
|
"pandas>=2,<3",
|
|
37
37
|
"pandas-stubs>=2,<3",
|
|
38
38
|
"psycopg>=3.1,<4",
|
|
39
39
|
"pyarrow>=10,<19",
|
|
40
40
|
"pyspark>=2,<3.6",
|
|
41
41
|
"pytest>=8.2.0,<8.4",
|
|
42
|
+
"pytest-forked",
|
|
42
43
|
"pytest-postgresql>=6,<7",
|
|
43
44
|
"pytest-xdist>=3.6,<3.7",
|
|
44
45
|
"pre-commit>=3.5;python_version=='3.8'",
|
|
@@ -58,7 +59,7 @@ setup(
|
|
|
58
59
|
"pandas>=2,<3",
|
|
59
60
|
],
|
|
60
61
|
"openai": [
|
|
61
|
-
"openai>=1.30,<1.
|
|
62
|
+
"openai>=1.30,<1.58",
|
|
62
63
|
],
|
|
63
64
|
"pandas": [
|
|
64
65
|
"pandas>=2,<3",
|
|
@@ -64,6 +64,39 @@ def first_always_ignore_nulls(col: ColumnOrName, ignorenulls: t.Optional[bool] =
|
|
|
64
64
|
return first(col)
|
|
65
65
|
|
|
66
66
|
|
|
67
|
+
def to_timestamp_with_time_zone(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
68
|
+
from sqlframe.base.session import _BaseSession
|
|
69
|
+
|
|
70
|
+
if format is not None:
|
|
71
|
+
return Column.invoke_expression_over_column(
|
|
72
|
+
col, expression.StrToTime, format=_BaseSession().format_time(format)
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return Column.ensure_col(col).cast("timestamp with time zone", dialect="postgres")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def to_timestamp_tz(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
79
|
+
from sqlframe.base.session import _BaseSession
|
|
80
|
+
|
|
81
|
+
if format is not None:
|
|
82
|
+
return Column.invoke_expression_over_column(
|
|
83
|
+
col, expression.StrToTime, format=_BaseSession().format_time(format)
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
return Column.ensure_col(col).cast("timestamptz", dialect="duckdb")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def to_timestamp_just_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
90
|
+
from sqlframe.base.session import _BaseSession
|
|
91
|
+
|
|
92
|
+
if format is not None:
|
|
93
|
+
return Column.invoke_expression_over_column(
|
|
94
|
+
col, expression.StrToTime, format=_BaseSession().format_time(format)
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return Column.ensure_col(col).cast("datetime", dialect="bigquery")
|
|
98
|
+
|
|
99
|
+
|
|
67
100
|
def bitwise_not_from_bitnot(col: ColumnOrName) -> Column:
|
|
68
101
|
return Column.invoke_anonymous_function(col, "BITNOT")
|
|
69
102
|
|
|
@@ -900,7 +900,7 @@ def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
|
900
900
|
col, expression.StrToTime, format=_BaseSession().format_time(format)
|
|
901
901
|
)
|
|
902
902
|
|
|
903
|
-
return Column.ensure_col(col).cast("
|
|
903
|
+
return Column.ensure_col(col).cast("timestampltz")
|
|
904
904
|
|
|
905
905
|
|
|
906
906
|
@meta()
|
|
@@ -570,6 +570,8 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN, UDF_REGISTRATION
|
|
|
570
570
|
return cls._to_row(list(value.keys()), list(value.values()))
|
|
571
571
|
elif isinstance(value, (list, set, tuple)) and value:
|
|
572
572
|
return [cls._to_value(x) for x in value]
|
|
573
|
+
elif isinstance(value, datetime.datetime):
|
|
574
|
+
return value.replace(tzinfo=None)
|
|
573
575
|
return value
|
|
574
576
|
|
|
575
577
|
@classmethod
|
|
@@ -29,6 +29,7 @@ from sqlframe.base.function_alternatives import ( # noqa
|
|
|
29
29
|
try_element_at_zero_based as try_element_at,
|
|
30
30
|
to_unix_timestamp_include_default_format as to_unix_timestamp,
|
|
31
31
|
regexp_replace_global_option as regexp_replace,
|
|
32
|
+
to_timestamp_tz as to_timestamp,
|
|
32
33
|
)
|
|
33
34
|
from sqlframe.base.functions import (
|
|
34
35
|
abs as abs,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
prettytable<3.12.1
|
|
2
|
-
sqlglot<
|
|
2
|
+
sqlglot<26.1,>=24.0.0
|
|
3
3
|
typing_extensions<5,>=4.8
|
|
4
4
|
|
|
5
5
|
[bigquery]
|
|
@@ -13,12 +13,13 @@ databricks-sql-connector<4,>=3.6
|
|
|
13
13
|
duckdb<1.2,>=0.9
|
|
14
14
|
findspark<3,>=2
|
|
15
15
|
mypy<1.14,>=1.10.0
|
|
16
|
-
openai<1.
|
|
16
|
+
openai<1.58,>=1.30
|
|
17
17
|
pandas-stubs<3,>=2
|
|
18
18
|
pandas<3,>=2
|
|
19
19
|
psycopg<4,>=3.1
|
|
20
20
|
pyarrow<19,>=10
|
|
21
21
|
pyspark<3.6,>=2
|
|
22
|
+
pytest-forked
|
|
22
23
|
pytest-postgresql<7,>=6
|
|
23
24
|
pytest-xdist<3.7,>=3.6
|
|
24
25
|
pytest<8.4,>=8.2.0
|
|
@@ -43,7 +44,7 @@ duckdb<1.2,>=0.9
|
|
|
43
44
|
pandas<3,>=2
|
|
44
45
|
|
|
45
46
|
[openai]
|
|
46
|
-
openai<1.
|
|
47
|
+
openai<1.58,>=1.30
|
|
47
48
|
|
|
48
49
|
[pandas]
|
|
49
50
|
pandas<3,>=2
|
|
@@ -92,7 +92,7 @@ def pyspark_session(tmp_path_factory, gen_tpcds: t.List[Path]) -> PySparkSession
|
|
|
92
92
|
.config("spark.sql.warehouse.dir", data_dir)
|
|
93
93
|
.config("spark.driver.extraJavaOptions", f"-Dderby.system.home={derby_dir}")
|
|
94
94
|
.config("spark.sql.shuffle.partitions", 1)
|
|
95
|
-
.config("spark.sql.session.timeZone", "
|
|
95
|
+
.config("spark.sql.session.timeZone", "UTC")
|
|
96
96
|
.master("local[1]")
|
|
97
97
|
.appName("Unit-tests")
|
|
98
98
|
.getOrCreate()
|
|
@@ -225,6 +225,7 @@ def snowflake_connection() -> SnowflakeConnection:
|
|
|
225
225
|
@pytest.fixture
|
|
226
226
|
def snowflake_session(snowflake_connection: SnowflakeConnection) -> SnowflakeSession:
|
|
227
227
|
session = SnowflakeSession(snowflake_connection)
|
|
228
|
+
session._execute("ALTER SESSION SET TIMEZONE = 'UTC'")
|
|
228
229
|
session._execute("CREATE SCHEMA IF NOT EXISTS db1")
|
|
229
230
|
session._execute("CREATE TABLE IF NOT EXISTS db1.table1 (id INTEGER, name VARCHAR(100))")
|
|
230
231
|
session._execute(
|
{sqlframe-3.9.3 → sqlframe-3.10.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py
RENAMED
|
@@ -54,8 +54,8 @@ def test_print_schema_basic(snowflake_employee: SnowflakeDataFrame, capsys):
|
|
|
54
54
|
== """
|
|
55
55
|
root
|
|
56
56
|
|-- employee_id: decimal(38, 0) (nullable = true)
|
|
57
|
-
|-- fname:
|
|
58
|
-
|-- lname:
|
|
57
|
+
|-- fname: varchar(16777216) (nullable = true)
|
|
58
|
+
|-- lname: varchar(16777216) (nullable = true)
|
|
59
59
|
|-- age: decimal(38, 0) (nullable = true)
|
|
60
60
|
|-- store_id: decimal(38, 0) (nullable = true)""".strip()
|
|
61
61
|
)
|
|
@@ -70,9 +70,9 @@ def test_print_schema_nested(snowflake_datatypes: SnowflakeDataFrame, capsys):
|
|
|
70
70
|
root
|
|
71
71
|
|-- bigint_col: decimal(38, 0) (nullable = true)
|
|
72
72
|
|-- double_col: float (nullable = true)
|
|
73
|
-
|-- string_col:
|
|
74
|
-
|-- map_string_bigint__col: map<
|
|
75
|
-
| |-- key:
|
|
73
|
+
|-- string_col: varchar(16777216) (nullable = true)
|
|
74
|
+
|-- map_string_bigint__col: map<varchar(16777216), decimal(38, 0)> (nullable = true)
|
|
75
|
+
| |-- key: varchar(16777216) (nullable = true)
|
|
76
76
|
| |-- value: decimal(38, 0) (nullable = true)
|
|
77
77
|
|-- array_struct_a_bigint_b_bigint__: array<object<a decimal(38, 0), b decimal(38, 0)>> (nullable = true)
|
|
78
78
|
| |-- element: object<a decimal(38, 0), b decimal(38, 0)> (nullable = true)
|
|
@@ -83,7 +83,7 @@ root
|
|
|
83
83
|
|-- struct_a_bigint__col: object<a decimal(38, 0)> (nullable = true)
|
|
84
84
|
| |-- a: decimal(38, 0) (nullable = true)
|
|
85
85
|
|-- date_col: date (nullable = true)
|
|
86
|
-
|-- timestamp_col:
|
|
86
|
+
|-- timestamp_col: timestamp (nullable = true)
|
|
87
87
|
|-- timestamptz_col: timestamp (nullable = true)
|
|
88
88
|
|-- boolean_col: boolean (nullable = true)""".strip()
|
|
89
89
|
)
|
|
@@ -96,9 +96,9 @@ def test_schema(snowflake_employee: SnowflakeDataFrame):
|
|
|
96
96
|
assert struct_fields[0].name == "employee_id"
|
|
97
97
|
assert struct_fields[0].dataType == types.DecimalType(38, 0)
|
|
98
98
|
assert struct_fields[1].name == "fname"
|
|
99
|
-
assert struct_fields[1].dataType == types.
|
|
99
|
+
assert struct_fields[1].dataType == types.VarcharType(16777216)
|
|
100
100
|
assert struct_fields[2].name == "lname"
|
|
101
|
-
assert struct_fields[2].dataType == types.
|
|
101
|
+
assert struct_fields[2].dataType == types.VarcharType(16777216)
|
|
102
102
|
assert struct_fields[3].name == "age"
|
|
103
103
|
assert struct_fields[3].dataType == types.DecimalType(38, 0)
|
|
104
104
|
assert struct_fields[4].name == "store_id"
|
|
@@ -114,10 +114,10 @@ def test_schema_nested(snowflake_datatypes: SnowflakeDataFrame):
|
|
|
114
114
|
assert struct_fields[1].name == "double_col"
|
|
115
115
|
assert struct_fields[1].dataType == types.FloatType()
|
|
116
116
|
assert struct_fields[2].name == "string_col"
|
|
117
|
-
assert struct_fields[2].dataType == types.
|
|
117
|
+
assert struct_fields[2].dataType == types.VarcharType(16777216)
|
|
118
118
|
assert struct_fields[3].name == "map_string_bigint__col"
|
|
119
119
|
assert struct_fields[3].dataType == types.MapType(
|
|
120
|
-
types.
|
|
120
|
+
types.VarcharType(16777216),
|
|
121
121
|
types.DecimalType(38, 0),
|
|
122
122
|
)
|
|
123
123
|
assert struct_fields[4].name == "array_struct_a_bigint_b_bigint__"
|
|
@@ -116,7 +116,7 @@ def get_types() -> t.Callable:
|
|
|
116
116
|
(datetime.datetime(2022, 1, 1, 1, 1, 1), datetime.datetime(2022, 1, 1, 1, 1, 1)),
|
|
117
117
|
(
|
|
118
118
|
datetime.datetime(2022, 1, 1, 1, 1, 1, tzinfo=datetime.timezone.utc),
|
|
119
|
-
datetime.datetime(2022, 1, 1, 1, 1, 1
|
|
119
|
+
datetime.datetime(2022, 1, 1, 1, 1, 1),
|
|
120
120
|
),
|
|
121
121
|
({"cola": 1}, {"cola": 1}),
|
|
122
122
|
(Row(**{"cola": 1, "colb": "test"}), Row(**{"cola": 1, "colb": "test"})),
|
|
@@ -134,20 +134,12 @@ def test_lit(get_session_and_func, arg, expected):
|
|
|
134
134
|
pytest.skip("PySpark doesn't literal dict types")
|
|
135
135
|
if isinstance(arg, Row):
|
|
136
136
|
pytest.skip("PySpark doesn't support literal row types")
|
|
137
|
-
if isinstance(arg, datetime.datetime) and arg.tzinfo is not None:
|
|
138
|
-
pytest.skip("PySpark doesn't preserve timezone information in datetime literals")
|
|
139
137
|
if isinstance(session, BigQuerySession):
|
|
140
138
|
if isinstance(arg, dict):
|
|
141
139
|
pytest.skip("BigQuery doesn't support map types")
|
|
142
|
-
if isinstance(session, SparkSession):
|
|
143
|
-
if isinstance(arg, datetime.datetime) and arg.tzinfo is not None:
|
|
144
|
-
pytest.skip("Spark doesn't preserve timezone information in datetime literals")
|
|
145
140
|
if isinstance(session, SnowflakeSession):
|
|
146
141
|
if isinstance(arg, Row):
|
|
147
142
|
pytest.skip("Snowflake doesn't support literal row types")
|
|
148
|
-
if isinstance(session, DatabricksSession):
|
|
149
|
-
if isinstance(arg, datetime.datetime) and arg.tzinfo is None:
|
|
150
|
-
expected = expected.replace(tzinfo=datetime.timezone.utc)
|
|
151
143
|
if isinstance(session, DuckDBSession):
|
|
152
144
|
if isinstance(arg, dict):
|
|
153
145
|
expected = Row(**expected)
|
|
@@ -181,7 +173,7 @@ def test_col(get_session_and_func, input, output):
|
|
|
181
173
|
([1, 2, 3], "array<bigint>"),
|
|
182
174
|
(Row(a=1), "struct<a:bigint>"),
|
|
183
175
|
(datetime.date(2022, 1, 1), "date"),
|
|
184
|
-
(datetime.datetime(2022, 1, 1, 0, 0, 0), "
|
|
176
|
+
(datetime.datetime(2022, 1, 1, 0, 0, 0), "timestamptz"),
|
|
185
177
|
(datetime.datetime(2022, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc), "timestamptz"),
|
|
186
178
|
(True, "boolean"),
|
|
187
179
|
(bytes("test", "utf-8"), "binary"),
|
|
@@ -211,8 +203,6 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
|
|
|
211
203
|
expected = expected.split("<")[0]
|
|
212
204
|
if expected == "binary":
|
|
213
205
|
pytest.skip("BigQuery doesn't support binary")
|
|
214
|
-
if expected == "timestamp":
|
|
215
|
-
expected = "datetime"
|
|
216
206
|
if isinstance(session, PostgresSession):
|
|
217
207
|
if expected.startswith("map"):
|
|
218
208
|
pytest.skip("Postgres doesn't support map types")
|
|
@@ -229,8 +219,6 @@ def test_typeof(get_session_and_func, get_types, arg, expected):
|
|
|
229
219
|
expected = "object"
|
|
230
220
|
elif expected.startswith("array"):
|
|
231
221
|
pytest.skip("Snowflake doesn't handle arrays properly in values clause")
|
|
232
|
-
elif expected == "timestamp":
|
|
233
|
-
expected = "timestampntz"
|
|
234
222
|
result = df.select(typeof("col").alias("test")).first()[0]
|
|
235
223
|
assert exp.DataType.build(result, dialect=dialect) == exp.DataType.build(
|
|
236
224
|
expected, dialect=dialect
|
|
@@ -1250,23 +1238,17 @@ def test_current_date(get_session_and_func):
|
|
|
1250
1238
|
session, current_date = get_session_and_func("current_date")
|
|
1251
1239
|
df = session.range(1)
|
|
1252
1240
|
# The current date can depend on how the connection is configured so we check for dates around today
|
|
1253
|
-
assert df.select(current_date()).first()[0]
|
|
1254
|
-
datetime.date.today() - datetime.timedelta(days=1),
|
|
1255
|
-
datetime.date.today(),
|
|
1256
|
-
datetime.date.today() + datetime.timedelta(days=1),
|
|
1257
|
-
)
|
|
1241
|
+
assert df.select(current_date()).first()[0] == datetime.date.today()
|
|
1258
1242
|
|
|
1259
1243
|
|
|
1260
1244
|
def test_current_timestamp(get_session_and_func):
|
|
1261
1245
|
session, current_timestamp = get_session_and_func("current_timestamp")
|
|
1262
1246
|
df = session.range(1)
|
|
1263
|
-
|
|
1247
|
+
now = datetime.datetime.now(pytz.timezone("UTC")).replace(tzinfo=None)
|
|
1264
1248
|
result = df.select(current_timestamp()).first()[0]
|
|
1265
1249
|
assert isinstance(result, datetime.datetime)
|
|
1266
|
-
assert result.
|
|
1267
|
-
|
|
1268
|
-
datetime.date.today(),
|
|
1269
|
-
datetime.date.today() + datetime.timedelta(days=1),
|
|
1250
|
+
assert result >= now - datetime.timedelta(minutes=1) and result <= now + datetime.timedelta(
|
|
1251
|
+
minutes=1
|
|
1270
1252
|
)
|
|
1271
1253
|
|
|
1272
1254
|
|
|
@@ -1441,32 +1423,9 @@ def test_to_timestamp(get_session_and_func):
|
|
|
1441
1423
|
session, to_timestamp = get_session_and_func("to_timestamp")
|
|
1442
1424
|
df = session.createDataFrame([("1997-02-28 10:30:00",)], ["t"])
|
|
1443
1425
|
result = df.select(to_timestamp(df.t).alias("dt")).first()[0]
|
|
1444
|
-
|
|
1445
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
|
|
1446
|
-
else:
|
|
1447
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
1426
|
+
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
1448
1427
|
result = df.select(to_timestamp(df.t, "yyyy-MM-dd HH:mm:ss").alias("dt")).first()[0]
|
|
1449
|
-
|
|
1450
|
-
assert result == datetime.datetime(
|
|
1451
|
-
1997,
|
|
1452
|
-
2,
|
|
1453
|
-
28,
|
|
1454
|
-
10,
|
|
1455
|
-
30,
|
|
1456
|
-
tzinfo=datetime.timezone.utc if isinstance(session, BigQuerySession) else None,
|
|
1457
|
-
)
|
|
1458
|
-
elif isinstance(session, (PostgresSession, DatabricksSession)):
|
|
1459
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
|
|
1460
|
-
elif isinstance(session, SnowflakeSession):
|
|
1461
|
-
assert result == datetime.datetime(
|
|
1462
|
-
1997,
|
|
1463
|
-
2,
|
|
1464
|
-
28,
|
|
1465
|
-
10,
|
|
1466
|
-
30,
|
|
1467
|
-
)
|
|
1468
|
-
else:
|
|
1469
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
1428
|
+
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
1470
1429
|
|
|
1471
1430
|
|
|
1472
1431
|
def test_trunc(get_session_and_func):
|
|
@@ -1482,18 +1441,14 @@ def test_trunc(get_session_and_func):
|
|
|
1482
1441
|
def test_date_trunc(get_session_and_func):
|
|
1483
1442
|
session, date_trunc = get_session_and_func("date_trunc")
|
|
1484
1443
|
df = session.createDataFrame([("1997-02-28 05:02:11",)], ["t"])
|
|
1485
|
-
assert df.select(date_trunc("year", df.t).alias("year")).first()[0].
|
|
1486
|
-
tzinfo=None
|
|
1487
|
-
) == datetime.datetime(
|
|
1444
|
+
assert df.select(date_trunc("year", df.t).alias("year")).first()[0] == datetime.datetime(
|
|
1488
1445
|
1997,
|
|
1489
1446
|
1,
|
|
1490
1447
|
1,
|
|
1491
1448
|
0,
|
|
1492
1449
|
0,
|
|
1493
1450
|
)
|
|
1494
|
-
assert df.select(date_trunc("month", df.t).alias("month")).first()[0].
|
|
1495
|
-
tzinfo=None
|
|
1496
|
-
) == datetime.datetime(
|
|
1451
|
+
assert df.select(date_trunc("month", df.t).alias("month")).first()[0] == datetime.datetime(
|
|
1497
1452
|
1997,
|
|
1498
1453
|
2,
|
|
1499
1454
|
1,
|
|
@@ -1517,13 +1472,7 @@ def test_last_day(get_session_and_func):
|
|
|
1517
1472
|
def test_from_unixtime(get_session_and_func):
|
|
1518
1473
|
session, from_unixtime = get_session_and_func("from_unixtime")
|
|
1519
1474
|
df = session.createDataFrame([(1428476400,)], ["unix_time"])
|
|
1520
|
-
|
|
1521
|
-
session,
|
|
1522
|
-
(BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
|
|
1523
|
-
):
|
|
1524
|
-
expected = "2015-04-08 07:00:00"
|
|
1525
|
-
else:
|
|
1526
|
-
expected = "2015-04-08 00:00:00"
|
|
1475
|
+
expected = "2015-04-08 07:00:00"
|
|
1527
1476
|
assert df.select(from_unixtime("unix_time").alias("ts")).first()[0] == expected
|
|
1528
1477
|
|
|
1529
1478
|
|
|
@@ -1531,47 +1480,35 @@ def test_unix_timestamp(get_session_and_func):
|
|
|
1531
1480
|
session, unix_timestamp = get_session_and_func("unix_timestamp")
|
|
1532
1481
|
df = session.createDataFrame([("2015-04-08",)], ["dt"])
|
|
1533
1482
|
result = df.select(unix_timestamp("dt", "yyyy-MM-dd").alias("unix_time")).first()[0]
|
|
1534
|
-
|
|
1535
|
-
session,
|
|
1536
|
-
(BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
|
|
1537
|
-
):
|
|
1538
|
-
assert result == 1428451200
|
|
1539
|
-
else:
|
|
1540
|
-
assert result == 1428476400
|
|
1483
|
+
assert result == 1428451200
|
|
1541
1484
|
|
|
1542
1485
|
|
|
1543
1486
|
def test_from_utc_timestamp(get_session_and_func):
|
|
1544
1487
|
session, from_utc_timestamp = get_session_and_func("from_utc_timestamp")
|
|
1545
1488
|
df = session.createDataFrame([("1997-02-28 10:30:00", "JST")], ["ts", "tz"])
|
|
1546
|
-
assert df.select(from_utc_timestamp(df.ts, "PST").alias("local_time")).first()[
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
assert df.select(from_utc_timestamp(df.ts, df.tz).alias("local_time")).first()[
|
|
1550
|
-
|
|
1551
|
-
|
|
1489
|
+
assert df.select(from_utc_timestamp(df.ts, "PST").alias("local_time")).first()[
|
|
1490
|
+
0
|
|
1491
|
+
] == datetime.datetime(1997, 2, 28, 2, 30)
|
|
1492
|
+
assert df.select(from_utc_timestamp(df.ts, df.tz).alias("local_time")).first()[
|
|
1493
|
+
0
|
|
1494
|
+
] == datetime.datetime(1997, 2, 28, 19, 30)
|
|
1552
1495
|
|
|
1553
1496
|
|
|
1554
1497
|
def test_to_utc_timestamp(get_session_and_func):
|
|
1555
1498
|
session, to_utc_timestamp = get_session_and_func("to_utc_timestamp")
|
|
1556
1499
|
df = session.createDataFrame([("1997-02-28 10:30:00", "JST")], ["ts", "tz"])
|
|
1557
|
-
assert df.select(to_utc_timestamp(df.ts, "PST").alias("utc_time")).first()[
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
assert df.select(to_utc_timestamp(df.ts, df.tz).alias("utc_time")).first()[
|
|
1561
|
-
|
|
1562
|
-
|
|
1500
|
+
assert df.select(to_utc_timestamp(df.ts, "PST").alias("utc_time")).first()[
|
|
1501
|
+
0
|
|
1502
|
+
] == datetime.datetime(1997, 2, 28, 18, 30)
|
|
1503
|
+
assert df.select(to_utc_timestamp(df.ts, df.tz).alias("utc_time")).first()[
|
|
1504
|
+
0
|
|
1505
|
+
] == datetime.datetime(1997, 2, 28, 1, 30)
|
|
1563
1506
|
|
|
1564
1507
|
|
|
1565
1508
|
def test_timestamp_seconds(get_session_and_func):
|
|
1566
1509
|
session, timestamp_seconds = get_session_and_func("timestamp_seconds")
|
|
1567
1510
|
df = session.createDataFrame([(1230219000,)], ["unix_time"])
|
|
1568
|
-
|
|
1569
|
-
session,
|
|
1570
|
-
(BigQuerySession, DuckDBSession, PostgresSession, SnowflakeSession, DatabricksSession),
|
|
1571
|
-
):
|
|
1572
|
-
expected = datetime.datetime(2008, 12, 25, 15, 30, 00)
|
|
1573
|
-
else:
|
|
1574
|
-
expected = datetime.datetime(2008, 12, 25, 7, 30)
|
|
1511
|
+
expected = datetime.datetime(2008, 12, 25, 15, 30, 00)
|
|
1575
1512
|
assert (
|
|
1576
1513
|
df.select(timestamp_seconds(df.unix_time).alias("ts")).first()[0].replace(tzinfo=None)
|
|
1577
1514
|
== expected
|
|
@@ -3571,16 +3508,7 @@ def test_convert_timezone(get_session_and_func, get_func):
|
|
|
3571
3508
|
session, convert_timezone = get_session_and_func("convert_timezone")
|
|
3572
3509
|
lit = get_func("lit", session)
|
|
3573
3510
|
df = session.createDataFrame([("2015-04-08",)], ["dt"])
|
|
3574
|
-
|
|
3575
|
-
expected = pytz.timezone("US/Pacific").localize(datetime.datetime(2015, 4, 7, 9, 0))
|
|
3576
|
-
elif isinstance(session, PostgresSession):
|
|
3577
|
-
expected = datetime.datetime(2015, 4, 7, 16, 0, tzinfo=datetime.timezone.utc)
|
|
3578
|
-
elif isinstance(session, SnowflakeSession):
|
|
3579
|
-
expected = datetime.datetime(2015, 4, 8, 15, 0, tzinfo=pytz.FixedOffset(480))
|
|
3580
|
-
elif isinstance(session, DatabricksSession):
|
|
3581
|
-
expected = datetime.datetime(2015, 4, 8, 8, 0)
|
|
3582
|
-
else:
|
|
3583
|
-
expected = datetime.datetime(2015, 4, 8, 15, 0)
|
|
3511
|
+
expected = datetime.datetime(2015, 4, 8, 8, 0)
|
|
3584
3512
|
assert df.select(convert_timezone(None, lit("Asia/Hong_Kong"), "dt").alias("ts")).collect() == [
|
|
3585
3513
|
Row(ts=expected)
|
|
3586
3514
|
]
|
|
@@ -3638,7 +3566,7 @@ def test_current_schema(get_session_and_func, get_func):
|
|
|
3638
3566
|
|
|
3639
3567
|
def test_current_timezone(get_session_and_func, get_func):
|
|
3640
3568
|
session, current_timezone = get_session_and_func("current_timezone")
|
|
3641
|
-
assert session.range(1).select(current_timezone()).first()[0] == "
|
|
3569
|
+
assert session.range(1).select(current_timezone()).first()[0] == "UTC"
|
|
3642
3570
|
|
|
3643
3571
|
|
|
3644
3572
|
def test_date_from_unix_date(get_session_and_func, get_func):
|
|
@@ -4097,24 +4025,12 @@ def test_make_timestamp(get_session_and_func, get_func):
|
|
|
4097
4025
|
[[2014, 12, 28, 6, 30, 45.887, "CET"]],
|
|
4098
4026
|
["year", "month", "day", "hour", "min", "sec", "timezone"],
|
|
4099
4027
|
)
|
|
4100
|
-
|
|
4101
|
-
|
|
4102
|
-
|
|
4103
|
-
|
|
4104
|
-
|
|
4105
|
-
|
|
4106
|
-
assert df.select(
|
|
4107
|
-
make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
|
|
4108
|
-
).first()[0].replace(tzinfo=None) == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
|
|
4109
|
-
else:
|
|
4110
|
-
assert df.select(
|
|
4111
|
-
make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone).alias(
|
|
4112
|
-
"r"
|
|
4113
|
-
)
|
|
4114
|
-
).first()[0] == datetime.datetime(2014, 12, 27, 21, 30, 45, 887000)
|
|
4115
|
-
assert df.select(
|
|
4116
|
-
make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
|
|
4117
|
-
).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
|
|
4028
|
+
assert df.select(
|
|
4029
|
+
make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone).alias("r")
|
|
4030
|
+
).first()[0] == datetime.datetime(2014, 12, 28, 5, 30, 45, 887000)
|
|
4031
|
+
assert df.select(
|
|
4032
|
+
make_timestamp(df.year, df.month, df.day, df.hour, df.min, df.sec).alias("r")
|
|
4033
|
+
).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
|
|
4118
4034
|
|
|
4119
4035
|
|
|
4120
4036
|
def test_make_timestamp_ltz(get_session_and_func, get_func):
|
|
@@ -4125,7 +4041,7 @@ def test_make_timestamp_ltz(get_session_and_func, get_func):
|
|
|
4125
4041
|
)
|
|
4126
4042
|
assert df.select(
|
|
4127
4043
|
make_timestamp_ltz(df.year, df.month, df.day, df.hour, df.min, df.sec, df.timezone)
|
|
4128
|
-
).first()[0] == datetime.datetime(2014, 12,
|
|
4044
|
+
).first()[0] == datetime.datetime(2014, 12, 28, 5, 30, 45, 887000)
|
|
4129
4045
|
assert df.select(
|
|
4130
4046
|
make_timestamp_ltz(df.year, df.month, df.day, df.hour, df.min, df.sec)
|
|
4131
4047
|
).first()[0] == datetime.datetime(2014, 12, 28, 6, 30, 45, 887000)
|
|
@@ -4826,27 +4742,17 @@ def test_substr(get_session_and_func, get_func):
|
|
|
4826
4742
|
def test_timestamp_micros(get_session_and_func, get_func):
|
|
4827
4743
|
session, timestamp_micros = get_session_and_func("timestamp_micros")
|
|
4828
4744
|
time_df = session.createDataFrame([(1230219000,)], ["unix_time"])
|
|
4829
|
-
|
|
4830
|
-
|
|
4831
|
-
|
|
4832
|
-
) == datetime.datetime(1970, 1, 1, 0, 20, 30, 219000)
|
|
4833
|
-
else:
|
|
4834
|
-
assert time_df.select(timestamp_micros(time_df.unix_time).alias("ts")).first()[
|
|
4835
|
-
0
|
|
4836
|
-
] == datetime.datetime(1969, 12, 31, 16, 20, 30, 219000)
|
|
4745
|
+
assert time_df.select(timestamp_micros(time_df.unix_time).alias("ts")).first()[
|
|
4746
|
+
0
|
|
4747
|
+
] == datetime.datetime(1970, 1, 1, 0, 20, 30, 219000)
|
|
4837
4748
|
|
|
4838
4749
|
|
|
4839
4750
|
def test_timestamp_millis(get_session_and_func, get_func):
|
|
4840
4751
|
session, timestamp_millis = get_session_and_func("timestamp_millis")
|
|
4841
4752
|
time_df = session.createDataFrame([(1230219000,)], ["unix_time"])
|
|
4842
|
-
|
|
4843
|
-
|
|
4844
|
-
|
|
4845
|
-
) == datetime.datetime(1970, 1, 15, 5, 43, 39)
|
|
4846
|
-
else:
|
|
4847
|
-
assert time_df.select(timestamp_millis(time_df.unix_time).alias("ts")).first()[
|
|
4848
|
-
0
|
|
4849
|
-
] == datetime.datetime(1970, 1, 14, 21, 43, 39)
|
|
4753
|
+
assert time_df.select(timestamp_millis(time_df.unix_time).alias("ts")).first()[
|
|
4754
|
+
0
|
|
4755
|
+
] == datetime.datetime(1970, 1, 15, 5, 43, 39)
|
|
4850
4756
|
|
|
4851
4757
|
|
|
4852
4758
|
def test_to_char(get_session_and_func, get_func):
|
|
@@ -4901,7 +4807,7 @@ def test_to_unix_timestamp(get_session_and_func, get_func):
|
|
|
4901
4807
|
if isinstance(session, (DuckDBSession, DatabricksSession)):
|
|
4902
4808
|
assert result == 1460073600.0
|
|
4903
4809
|
else:
|
|
4904
|
-
assert result ==
|
|
4810
|
+
assert result == 1460073600
|
|
4905
4811
|
# DuckDB requires the value to match the format which the default format is "yyyy-MM-dd HH:mm:ss".
|
|
4906
4812
|
# https://spark.apache.org/docs/latest/api/sql/#to_unix_timestamp
|
|
4907
4813
|
if isinstance(session, DuckDBSession):
|
|
@@ -4992,15 +4898,9 @@ def test_try_to_timestamp(get_session_and_func, get_func):
|
|
|
4992
4898
|
lit = get_func("lit", session)
|
|
4993
4899
|
df = session.createDataFrame([("1997-02-28 10:30:00",)], ["t"])
|
|
4994
4900
|
result = df.select(try_to_timestamp(df.t).alias("dt")).first()[0]
|
|
4995
|
-
|
|
4996
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
|
|
4997
|
-
else:
|
|
4998
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
4901
|
+
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
4999
4902
|
result = df.select(try_to_timestamp(df.t, lit("yyyy-MM-dd HH:mm:ss")).alias("dt")).first()[0]
|
|
5000
|
-
|
|
5001
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30, tzinfo=datetime.timezone.utc)
|
|
5002
|
-
else:
|
|
5003
|
-
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
4903
|
+
assert result == datetime.datetime(1997, 2, 28, 10, 30)
|
|
5004
4904
|
|
|
5005
4905
|
|
|
5006
4906
|
def test_ucase(get_session_and_func, get_func):
|
|
@@ -5020,30 +4920,21 @@ def test_unix_micros(get_session_and_func, get_func):
|
|
|
5020
4920
|
session, unix_micros = get_session_and_func("unix_micros")
|
|
5021
4921
|
to_timestamp = get_func("to_timestamp", session)
|
|
5022
4922
|
df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
|
|
5023
|
-
|
|
5024
|
-
assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000000
|
|
5025
|
-
else:
|
|
5026
|
-
assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400000000
|
|
4923
|
+
assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000000
|
|
5027
4924
|
|
|
5028
4925
|
|
|
5029
4926
|
def test_unix_millis(get_session_and_func, get_func):
|
|
5030
4927
|
session, unix_millis = get_session_and_func("unix_millis")
|
|
5031
4928
|
to_timestamp = get_func("to_timestamp", session)
|
|
5032
4929
|
df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
|
|
5033
|
-
|
|
5034
|
-
assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000
|
|
5035
|
-
else:
|
|
5036
|
-
assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400000
|
|
4930
|
+
assert df.select(unix_millis(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000
|
|
5037
4931
|
|
|
5038
4932
|
|
|
5039
4933
|
def test_unix_seconds(get_session_and_func, get_func):
|
|
5040
4934
|
session, unix_seconds = get_session_and_func("unix_seconds")
|
|
5041
4935
|
to_timestamp = get_func("to_timestamp", session)
|
|
5042
4936
|
df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
|
|
5043
|
-
|
|
5044
|
-
assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200
|
|
5045
|
-
else:
|
|
5046
|
-
assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437584400
|
|
4937
|
+
assert df.select(unix_seconds(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200
|
|
5047
4938
|
|
|
5048
4939
|
|
|
5049
4940
|
def test_url_decode(get_session_and_func, get_func):
|