sqlframe 3.18.0__tar.gz → 3.19.0__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {sqlframe-3.18.0 → sqlframe-3.19.0}/PKG-INFO +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/bigquery.md +2 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/duckdb.md +2 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/postgres.md +2 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/snowflake.md +2 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/setup.py +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/_version.py +2 -2
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/dataframe.py +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/functions.py +52 -9
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/session.py +8 -7
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/session.py +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe.egg-info/PKG-INFO +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe.egg-info/requires.txt +1 -1
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_dataframe.py +66 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_int_functions.py +18 -23
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/test_int_dataframe.py +49 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_dataframe.py +3 -3
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_functions.py +2 -2
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.github/CODEOWNERS +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.github/workflows/main.workflow.yaml +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.github/workflows/publish.workflow.yaml +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.gitignore +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.pre-commit-config.yaml +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/.readthedocs.yaml +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/LICENSE +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/Makefile +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/README.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/add_chatgpt_support.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/adding_ai_to_meal.jpeg +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/hype_train.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/nonsense_sql.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/but_wait_theres_more.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/cake.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/you_get_pyspark_api.gif +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/sqlframe_universal_dataframe_api.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/configuration.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/databricks.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/bigquery.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/duckdb.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/images/SF.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/images/favicon.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/images/favicon_old.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/docs/postgres.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/images/SF.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/images/favicon.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/images/favicon_old.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/images/sqlframe_diagram.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/images/sqlframe_logo.png +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/index.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/redshift.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/requirements.txt +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/spark.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/standalone.md +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/docs/stylesheets/extra.css +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/mkdocs.yml +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/pytest.ini +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/renovate.json +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/setup.cfg +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/LICENSE +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/_typing.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/decorators.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/exceptions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/function_alternatives.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/mixins/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/mixins/catalog_mixins.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/mixins/dataframe_mixins.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/mixins/readwriter_mixins.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/mixins/table_mixins.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/normalize.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/operations.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/readerwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/transforms.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/util.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/base/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/bigquery/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/databricks/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/duckdb/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/postgres/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/redshift/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/snowflake/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/functions.pyi +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/spark/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/functions.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/group.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/readwriter.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/standalone/window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/testing/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe/testing/utils.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe.egg-info/SOURCES.txt +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe.egg-info/dependency_links.txt +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/sqlframe.egg-info/top_level.txt +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/common_fixtures.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/conftest.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee.csv +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee.json +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/.part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/_delta_log/.00000000000000000000.json.crc +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/_delta_log/00000000000000000000.json +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00000-e5965c7b-e58f-4d3c-ad56-002876814e3a-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00002-3fed7f18-370f-4b16-b232-504d6194eb52-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00004-143c5da1-d5ab-4706-8e84-0d2a324c6894-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00006-64f07e25-c30e-4075-acc6-b3c69c4ce80b-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00008-89ccad8d-df73-4ad5-8850-82ef3884db60-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_delta/part-00010-812b3382-8c7f-4c4e-9bcd-09ce8664f6e0-c000.snappy.parquet +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/employee_extra_line.csv +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/issue_219.csv +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds1.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds10.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds11.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds12.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds13.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds14.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds15.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds16.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds17.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds18.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds19.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds2.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds20.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds21.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds22.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds23.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds24.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds25.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds26.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds27.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds28.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds29.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds3.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds30.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds31.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds32.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds33.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds34.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds35.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds36.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds37.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds38.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds39.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds4.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds40.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds41.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds42.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds43.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds44.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds45.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds46.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds47.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds48.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds49.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds5.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds50.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds51.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds52.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds53.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds54.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds55.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds56.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds57.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds58.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds59.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds6.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds60.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds61.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds62.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds63.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds64.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds65.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds66.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds67.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds68.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds69.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds7.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds70.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds71.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds72.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds73.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds74.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds75.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds76.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds77.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds78.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds79.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds8.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds80.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds81.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds82.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds83.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds84.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds85.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds86.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds87.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds88.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds89.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds9.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds90.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds91.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds92.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds93.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds94.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds95.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds96.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds97.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds98.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/fixtures/tpcds/tpcds99.sql +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/bigquery/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/bigquery/test_bigquery_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/bigquery/test_bigquery_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/bigquery/test_bigquery_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/databricks/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/databricks/test_databricks_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/databricks/test_databricks_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/databricks/test_databricks_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_reader.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_duckdb_udf.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/duck/test_tpcds.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/postgres/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/postgres/test_postgres_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/postgres/test_postgres_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/postgres/test_postgres_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/postgres/test_postgres_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/redshift/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/redshift/test_redshift_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/redshift/test_redshift_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/snowflake/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/snowflake/test_snowflake_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/snowflake/test_snowflake_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/snowflake/test_snowflake_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/spark/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/spark/test_spark_catalog.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/spark/test_spark_dataframe.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_reader.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_table.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_engine_writer.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/engines/test_int_testing.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/fixtures.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/test_int_dataframe_stats.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/test_int_grouped_data.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/integration/test_int_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/bigquery/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/bigquery/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/conftest.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/databricks/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/databricks/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/duck/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/duck/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/postgres/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/postgres/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/redshift/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/redshift/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/snowflake/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/snowflake/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/spark/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/spark/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/__init__.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/fixtures.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_column.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_dataframe_writer.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_session.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_session_case_sensitivity.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_types.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/standalone/test_window.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/test_activate.py +0 -0
- {sqlframe-3.18.0 → sqlframe-3.19.0}/tests/unit/test_util.py +0 -0
@@ -312,6 +312,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
312
312
|
* [array_min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_min.html)
|
313
313
|
* [array_position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_position.html)
|
314
314
|
* [array_remove](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_remove.html)
|
315
|
+
* [array_size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_size.html)
|
315
316
|
* [array_sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_sort.html)
|
316
317
|
* Arrays are not allowed to have None (NULL) values
|
317
318
|
* [array_union](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_union.html)
|
@@ -505,6 +506,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
505
506
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
506
507
|
* [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
|
507
508
|
* [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
|
509
|
+
* [to_timestamp_ntz](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp_ntz.html)
|
508
510
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
509
511
|
* [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
|
510
512
|
* [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
|
@@ -278,6 +278,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
278
278
|
* [array_min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_min.html)
|
279
279
|
* [array_position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_position.html)
|
280
280
|
* [array_remove](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_remove.html)
|
281
|
+
* [array_size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_size.html)
|
281
282
|
* [array_sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_sort.html)
|
282
283
|
* [array_union](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_union.html)
|
283
284
|
* [arrays_overlap](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.arrays_overlap.html)
|
@@ -467,6 +468,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
467
468
|
* [toRadians](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.toRadians.html)
|
468
469
|
* [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
|
469
470
|
* [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
|
471
|
+
* [to_timestamp_ntz](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp_ntz.html)
|
470
472
|
* [to_unix_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_unix_timestamp.html)
|
471
473
|
* The values must match the format string (null will not be returned if they do not)
|
472
474
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
@@ -289,6 +289,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
289
289
|
* [array_min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_min.html)
|
290
290
|
* [array_position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_position.html)
|
291
291
|
* [array_remove](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_remove.html)
|
292
|
+
* [array_size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_size.html)
|
292
293
|
* [arrays_overlap](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.arrays_overlap.html)
|
293
294
|
* [asc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asc.html)
|
294
295
|
* [asc_nulls_first](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.asc_nulls_first.html)
|
@@ -458,6 +459,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
458
459
|
* [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
|
459
460
|
* [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
|
460
461
|
* [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
|
462
|
+
* [to_timestamp_ntz](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp_ntz.html)
|
461
463
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
462
464
|
* [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
|
463
465
|
* [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
|
@@ -311,6 +311,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
311
311
|
* [array_min](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_min.html)
|
312
312
|
* [array_position](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_position.html)
|
313
313
|
* [array_remove](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_remove.html)
|
314
|
+
* [array_size](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_size.html)
|
314
315
|
* [array_sort](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_sort.html)
|
315
316
|
* [array_union](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.array_union.html)
|
316
317
|
* [arrays_overlap](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.arrays_overlap.html)
|
@@ -509,6 +510,7 @@ See something that you would like to see supported? [Open an issue](https://gith
|
|
509
510
|
* [to_date](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_date.html)
|
510
511
|
* [to_number](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_number.html)
|
511
512
|
* [to_timestamp](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html)
|
513
|
+
* [to_timestamp_ntz](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp_ntz.html)
|
512
514
|
* [translate](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.translate.html)
|
513
515
|
* [trim](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trim.html)
|
514
516
|
* [trunc](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.trunc.html)
|
@@ -1066,7 +1066,7 @@ class BaseDataFrame(t.Generic[SESSION, WRITER, NA, STAT, GROUP_DATA]):
|
|
1066
1066
|
left_col.sql(dialect=self.session.input_dialect),
|
1067
1067
|
right_col.sql(dialect=self.session.input_dialect),
|
1068
1068
|
).alias(left_col.alias_or_name)
|
1069
|
-
if
|
1069
|
+
if join_type == "full outer"
|
1070
1070
|
else left_col.alias_or_name
|
1071
1071
|
for left_col, right_col in join_column_pairs
|
1072
1072
|
]
|
@@ -8,6 +8,7 @@ import typing as t
|
|
8
8
|
|
9
9
|
from sqlglot import Dialect
|
10
10
|
from sqlglot import exp as expression
|
11
|
+
from sqlglot.dialects.dialect import time_format
|
11
12
|
from sqlglot.helper import ensure_list
|
12
13
|
from sqlglot.helper import flatten as _flatten
|
13
14
|
|
@@ -2016,9 +2017,12 @@ def array_prepend(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
2016
2017
|
return Column.invoke_anonymous_function(col, "ARRAY_PREPEND", value)
|
2017
2018
|
|
2018
2019
|
|
2019
|
-
@meta(
|
2020
|
+
@meta()
|
2020
2021
|
def array_size(col: ColumnOrName) -> Column:
|
2021
|
-
|
2022
|
+
session = _get_session()
|
2023
|
+
if session._is_spark or session._is_databricks:
|
2024
|
+
return Column.invoke_anonymous_function(col, "ARRAY_SIZE")
|
2025
|
+
return Column.invoke_expression_over_column(col, expression.ArraySize)
|
2022
2026
|
|
2023
2027
|
|
2024
2028
|
@meta(unsupported_engines="*")
|
@@ -6088,7 +6092,7 @@ def to_timestamp_ltz(
|
|
6088
6092
|
return Column.invoke_anonymous_function(timestamp, "to_timestamp_ltz")
|
6089
6093
|
|
6090
6094
|
|
6091
|
-
@meta(
|
6095
|
+
@meta()
|
6092
6096
|
def to_timestamp_ntz(
|
6093
6097
|
timestamp: ColumnOrName,
|
6094
6098
|
format: t.Optional[ColumnOrName] = None,
|
@@ -6118,6 +6122,32 @@ def to_timestamp_ntz(
|
|
6118
6122
|
... # doctest: +SKIP
|
6119
6123
|
[Row(r=datetime.datetime(2016, 4, 8, 0, 0))]
|
6120
6124
|
"""
|
6125
|
+
session = _get_session()
|
6126
|
+
|
6127
|
+
if session._is_duckdb:
|
6128
|
+
to_timestamp_func = get_func_from_session("to_timestamp")
|
6129
|
+
return to_timestamp_func(timestamp, format)
|
6130
|
+
|
6131
|
+
if session._is_bigquery:
|
6132
|
+
if format is not None:
|
6133
|
+
return Column.invoke_anonymous_function(
|
6134
|
+
session.format_execution_time(format), # type: ignore
|
6135
|
+
"parse_datetime",
|
6136
|
+
timestamp,
|
6137
|
+
)
|
6138
|
+
else:
|
6139
|
+
return Column.ensure_col(timestamp).cast("datetime", dialect="bigquery")
|
6140
|
+
|
6141
|
+
if session._is_postgres:
|
6142
|
+
if format is not None:
|
6143
|
+
return Column.invoke_anonymous_function(
|
6144
|
+
timestamp,
|
6145
|
+
"to_timestamp",
|
6146
|
+
session.format_execution_time(format), # type: ignore
|
6147
|
+
)
|
6148
|
+
else:
|
6149
|
+
return Column.ensure_col(timestamp).cast("timestamp", dialect="postgres")
|
6150
|
+
|
6121
6151
|
if format is not None:
|
6122
6152
|
return Column.invoke_anonymous_function(timestamp, "to_timestamp_ntz", format)
|
6123
6153
|
else:
|
@@ -6442,12 +6472,25 @@ def unix_micros(col: ColumnOrName) -> Column:
|
|
6442
6472
|
"""
|
6443
6473
|
from sqlframe.base.function_alternatives import unix_micros_multiply_epoch
|
6444
6474
|
|
6445
|
-
if (
|
6446
|
-
|
6447
|
-
|
6448
|
-
|
6449
|
-
|
6450
|
-
|
6475
|
+
if _get_session()._is_duckdb:
|
6476
|
+
return Column.invoke_anonymous_function(col, "epoch_us")
|
6477
|
+
|
6478
|
+
if _get_session()._is_bigquery:
|
6479
|
+
return Column(
|
6480
|
+
expression.Anonymous(
|
6481
|
+
this="UNIX_MICROS",
|
6482
|
+
expressions=[
|
6483
|
+
expression.Anonymous(
|
6484
|
+
this="TIMESTAMP",
|
6485
|
+
expressions=[
|
6486
|
+
Column.ensure_col(col).column_expression,
|
6487
|
+
],
|
6488
|
+
)
|
6489
|
+
],
|
6490
|
+
)
|
6491
|
+
)
|
6492
|
+
|
6493
|
+
if _get_session()._is_postgres or _get_session()._is_snowflake:
|
6451
6494
|
return unix_micros_multiply_epoch(col)
|
6452
6495
|
|
6453
6496
|
return Column.invoke_anonymous_function(col, "unix_micros")
|
@@ -267,10 +267,6 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
|
|
267
267
|
else:
|
268
268
|
column_mapping = {}
|
269
269
|
|
270
|
-
column_mapping = {
|
271
|
-
normalize_identifiers(k, self.input_dialect).sql(dialect=self.input_dialect): v
|
272
|
-
for k, v in column_mapping.items()
|
273
|
-
}
|
274
270
|
empty_df = not data
|
275
271
|
rows = [[None] * len(column_mapping)] if empty_df else list(data) # type: ignore
|
276
272
|
|
@@ -327,7 +323,6 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
|
|
327
323
|
if isinstance(sample_row, Row):
|
328
324
|
sample_row = sample_row.asDict()
|
329
325
|
if isinstance(sample_row, dict):
|
330
|
-
sample_row = normalize_dict(self, sample_row)
|
331
326
|
default_data_type = get_default_data_type(sample_row[name])
|
332
327
|
updated_mapping[name] = (
|
333
328
|
exp.DataType.build(default_data_type, dialect="spark")
|
@@ -387,7 +382,11 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
|
|
387
382
|
sel_expression = exp.Select(**select_kwargs)
|
388
383
|
if empty_df:
|
389
384
|
sel_expression = sel_expression.where(exp.false())
|
390
|
-
|
385
|
+
df = self._create_df(sel_expression)
|
386
|
+
df._update_display_name_mapping(
|
387
|
+
df._ensure_and_normalize_cols(list(column_mapping.keys())), list(column_mapping.keys())
|
388
|
+
)
|
389
|
+
return df
|
391
390
|
|
392
391
|
def sql(
|
393
392
|
self,
|
@@ -526,7 +525,9 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, TABLE, CONN, UDF_REGIS
|
|
526
525
|
col_id._meta = {"case_sensitive": True, **(col_id._meta or {})}
|
527
526
|
case_sensitive_cols.append(col_id)
|
528
527
|
columns = [
|
529
|
-
normalize_string(
|
528
|
+
normalize_string(
|
529
|
+
x, from_dialect="execution", to_dialect="output", to_string_literal=True
|
530
|
+
)
|
530
531
|
for x in case_sensitive_cols
|
531
532
|
]
|
532
533
|
return [self._to_row(columns, row) for row in result]
|
@@ -86,7 +86,7 @@ class SparkSession(
|
|
86
86
|
col_id = exp.parse_identifier(k, dialect=self.execution_dialect)
|
87
87
|
col_id._meta = {"case_sensitive": True, **(col_id._meta or {})}
|
88
88
|
col_name = normalize_string(
|
89
|
-
col_id, from_dialect="execution", to_dialect="output",
|
89
|
+
col_id, from_dialect="execution", to_dialect="output", to_string_literal=True
|
90
90
|
)
|
91
91
|
rows_normalized[col_name] = v
|
92
92
|
results.append(Row(**rows_normalized))
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import typing as t
|
4
4
|
|
5
|
+
from sqlframe.base.session import _BaseSession
|
5
6
|
from sqlframe.base.types import Row
|
6
7
|
from sqlframe.snowflake import SnowflakeSession
|
7
8
|
from sqlframe.spark import SparkSession
|
@@ -88,3 +89,68 @@ def test_show_limit(
|
|
88
89
|
| 1 | Jack | Shephard | 37 | 1 |
|
89
90
|
+-------------+-------+----------+-----+----------+\n"""
|
90
91
|
)
|
92
|
+
|
93
|
+
|
94
|
+
# https://github.com/eakmanrq/sqlframe/issues/294
|
95
|
+
def test_show_from_create_version_1(get_session: t.Callable[[], _BaseSession], capsys):
|
96
|
+
session = get_session()
|
97
|
+
df = session.createDataFrame([(1, 4), (2, 5), (3, 6)], schema=["foo", "BAR"])
|
98
|
+
df.show()
|
99
|
+
captured = capsys.readouterr()
|
100
|
+
assert (
|
101
|
+
captured.out.strip()
|
102
|
+
== """
|
103
|
+
+-----+-----+
|
104
|
+
| foo | BAR |
|
105
|
+
+-----+-----+
|
106
|
+
| 1 | 4 |
|
107
|
+
| 2 | 5 |
|
108
|
+
| 3 | 6 |
|
109
|
+
+-----+-----+
|
110
|
+
""".strip()
|
111
|
+
)
|
112
|
+
|
113
|
+
|
114
|
+
# https://github.com/eakmanrq/sqlframe/issues/294
|
115
|
+
def test_show_from_create_version_2(get_session: t.Callable[[], _BaseSession], capsys):
|
116
|
+
session = get_session()
|
117
|
+
df = session.createDataFrame(
|
118
|
+
[
|
119
|
+
{"a": 1, "BAR": 1},
|
120
|
+
{"a": 1, "BAR": 2},
|
121
|
+
]
|
122
|
+
)
|
123
|
+
df.show()
|
124
|
+
captured = capsys.readouterr()
|
125
|
+
assert (
|
126
|
+
captured.out.strip()
|
127
|
+
== """
|
128
|
+
+---+-----+
|
129
|
+
| a | BAR |
|
130
|
+
+---+-----+
|
131
|
+
| 1 | 1 |
|
132
|
+
| 1 | 2 |
|
133
|
+
+---+-----+
|
134
|
+
""".strip()
|
135
|
+
)
|
136
|
+
|
137
|
+
|
138
|
+
def test_show_from_create_with_space(get_session: t.Callable[[], _BaseSession], capsys):
|
139
|
+
session = get_session()
|
140
|
+
df = session.createDataFrame(
|
141
|
+
[
|
142
|
+
{"zor ro": 1},
|
143
|
+
]
|
144
|
+
)
|
145
|
+
df.show()
|
146
|
+
captured = capsys.readouterr()
|
147
|
+
assert (
|
148
|
+
captured.out.strip()
|
149
|
+
== """
|
150
|
+
+--------+
|
151
|
+
| zor ro |
|
152
|
+
+--------+
|
153
|
+
| 1 |
|
154
|
+
+--------+
|
155
|
+
""".strip()
|
156
|
+
)
|
@@ -150,13 +150,11 @@ def test_lit(get_session_and_func, arg, expected):
|
|
150
150
|
"input, output",
|
151
151
|
[
|
152
152
|
("employee_id", "employee_id"),
|
153
|
-
("employee id", "
|
153
|
+
("employee id", "employee id"),
|
154
154
|
],
|
155
155
|
)
|
156
156
|
def test_col(get_session_and_func, input, output):
|
157
157
|
session, col = get_session_and_func("col")
|
158
|
-
if isinstance(session, PySparkSession):
|
159
|
-
output = output.replace("`", "")
|
160
158
|
df = session.createDataFrame([(1,)], schema=[input])
|
161
159
|
result = df.select(col(input)).first()
|
162
160
|
assert result[0] == 1
|
@@ -230,20 +228,7 @@ def test_alias(get_session_and_func):
|
|
230
228
|
df = session.createDataFrame([(1,)], schema=["employee_id"])
|
231
229
|
assert df.select(col("employee_id").alias("test")).first().__fields__[0] == "test"
|
232
230
|
space_result = df.select(col("employee_id").alias("A Space In New Name")).first().__fields__[0]
|
233
|
-
|
234
|
-
session,
|
235
|
-
(
|
236
|
-
DuckDBSession,
|
237
|
-
BigQuerySession,
|
238
|
-
PostgresSession,
|
239
|
-
SnowflakeSession,
|
240
|
-
SparkSession,
|
241
|
-
DatabricksSession,
|
242
|
-
),
|
243
|
-
):
|
244
|
-
assert space_result == "`A Space In New Name`"
|
245
|
-
else:
|
246
|
-
assert space_result == "A Space In New Name"
|
231
|
+
assert space_result == "A Space In New Name"
|
247
232
|
|
248
233
|
|
249
234
|
def test_asc(get_session_and_func):
|
@@ -2072,13 +2057,20 @@ def test_array_prepend(get_session_and_func):
|
|
2072
2057
|
]
|
2073
2058
|
|
2074
2059
|
|
2075
|
-
def test_array_size(get_session_and_func):
|
2060
|
+
def test_array_size(get_session_and_func, get_func):
|
2076
2061
|
session, array_size = get_session_and_func("array_size")
|
2077
|
-
|
2078
|
-
|
2079
|
-
|
2080
|
-
|
2081
|
-
|
2062
|
+
# Snowflake doesn't support arrays in VALUES so we need to do it in select
|
2063
|
+
if isinstance(session, SnowflakeSession):
|
2064
|
+
lit = get_func("lit", session)
|
2065
|
+
assert session.range(1).select(
|
2066
|
+
array_size(lit(["a", "b", "c"])), array_size(lit(None))
|
2067
|
+
).collect() == [Row(value=3, value2=None)]
|
2068
|
+
else:
|
2069
|
+
df = session.createDataFrame([([2, 1, 3],), (None,)], ["data"])
|
2070
|
+
assert df.select(array_size(df.data).alias("r")).collect() == [
|
2071
|
+
Row(r=3),
|
2072
|
+
Row(r=None),
|
2073
|
+
]
|
2082
2074
|
|
2083
2075
|
|
2084
2076
|
def test_create_map(get_session_and_func, get_func):
|
@@ -4923,6 +4915,9 @@ def test_unix_micros(get_session_and_func, get_func):
|
|
4923
4915
|
to_timestamp = get_func("to_timestamp", session)
|
4924
4916
|
df = session.createDataFrame([("2015-07-22 10:00:00",)], ["t"])
|
4925
4917
|
assert df.select(unix_micros(to_timestamp(df.t)).alias("n")).first()[0] == 1437559200000000
|
4918
|
+
if not isinstance(session, SnowflakeSession):
|
4919
|
+
df = session.createDataFrame([(datetime.datetime(2021, 3, 1, 12, 34, 56, 49000),)], ["t"])
|
4920
|
+
assert df.select(unix_micros(df.t).alias("n")).first()[0] == 1614602096049000
|
4926
4921
|
|
4927
4922
|
|
4928
4923
|
def test_unix_millis(get_session_and_func, get_func):
|
@@ -2451,3 +2451,52 @@ def test_create_column_after_join(
|
|
2451
2451
|
).withColumn("new_col", SF.lit(1))
|
2452
2452
|
|
2453
2453
|
compare_frames(df, dfs, compare_schema=False, sort=True)
|
2454
|
+
|
2455
|
+
|
2456
|
+
# https://github.com/eakmanrq/sqlframe/issues/289
|
2457
|
+
def test_full_outer_nulls_no_match(
|
2458
|
+
pyspark_employee: PySparkDataFrame,
|
2459
|
+
get_df: t.Callable[[str], BaseDataFrame],
|
2460
|
+
compare_frames: t.Callable,
|
2461
|
+
):
|
2462
|
+
spark = pyspark_employee._session
|
2463
|
+
|
2464
|
+
df_concept_1 = spark.createDataFrame(
|
2465
|
+
[
|
2466
|
+
(1, 100),
|
2467
|
+
(2, 101),
|
2468
|
+
],
|
2469
|
+
["A", "col1"],
|
2470
|
+
)
|
2471
|
+
|
2472
|
+
df_concept_2 = spark.createDataFrame(
|
2473
|
+
[
|
2474
|
+
(3, 102),
|
2475
|
+
(4, 103),
|
2476
|
+
],
|
2477
|
+
["B", "col1"],
|
2478
|
+
)
|
2479
|
+
|
2480
|
+
df = df_concept_1.join(df_concept_2, on="col1", how="outer")
|
2481
|
+
|
2482
|
+
session = get_df("employee").session
|
2483
|
+
|
2484
|
+
dfs_concept_1 = session.createDataFrame(
|
2485
|
+
[
|
2486
|
+
(1, 100),
|
2487
|
+
(2, 101),
|
2488
|
+
],
|
2489
|
+
["A", "col1"],
|
2490
|
+
)
|
2491
|
+
|
2492
|
+
dfs_concept_2 = session.createDataFrame(
|
2493
|
+
[
|
2494
|
+
(3, 102),
|
2495
|
+
(4, 103),
|
2496
|
+
],
|
2497
|
+
["B", "col1"],
|
2498
|
+
)
|
2499
|
+
|
2500
|
+
dfs = dfs_concept_1.join(dfs_concept_2, on="col1", how="outer")
|
2501
|
+
|
2502
|
+
compare_frames(df, dfs, compare_schema=False)
|
@@ -128,14 +128,14 @@ def test_missing_method(standalone_employee: StandaloneDataFrame):
|
|
128
128
|
def test_expand_star(standalone_employee: StandaloneDataFrame):
|
129
129
|
assert (
|
130
130
|
standalone_employee.select("*").sql(pretty=False, optimize=False)
|
131
|
-
== "WITH `t51718876` AS (SELECT CAST(`employee_id` AS INT) AS `employee_id`, CAST(`fname` AS STRING) AS `fname`, CAST(`lname` AS STRING) AS `lname`, CAST(`age` AS INT) AS `age`, CAST(`store_id` AS INT) AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`)) SELECT `employee_id`, `fname`, `lname`, `age`, `store_id` FROM `t51718876`"
|
131
|
+
== "WITH `t51718876` AS (SELECT CAST(`employee_id` AS INT) AS `employee_id`, CAST(`fname` AS STRING) AS `fname`, CAST(`lname` AS STRING) AS `lname`, CAST(`age` AS INT) AS `age`, CAST(`store_id` AS INT) AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`)) SELECT `employee_id` AS `employee_id`, `fname` AS `fname`, `lname` AS `lname`, `age` AS `age`, `store_id` AS `store_id` FROM `t51718876`"
|
132
132
|
)
|
133
133
|
|
134
134
|
|
135
135
|
def test_expand_star_table_alias(standalone_employee: StandaloneDataFrame):
|
136
136
|
assert (
|
137
137
|
standalone_employee.alias("blah").select("blah.*").sql(pretty=False, optimize=False)
|
138
|
-
== "WITH `t51718876` AS (SELECT CAST(`employee_id` AS INT) AS `employee_id`, CAST(`fname` AS STRING) AS `fname`, CAST(`lname` AS STRING) AS `lname`, CAST(`age` AS INT) AS `age`, CAST(`store_id` AS INT) AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`)), `t37842204` AS (SELECT `employee_id`, `fname`, `lname`, `age`, `store_id` FROM `t51718876`) SELECT `t37842204`.`employee_id`, `t37842204`.`fname`, `t37842204`.`lname`, `t37842204`.`age`, `t37842204`.`store_id` FROM `t37842204`"
|
138
|
+
== "WITH `t51718876` AS (SELECT CAST(`employee_id` AS INT) AS `employee_id`, CAST(`fname` AS STRING) AS `fname`, CAST(`lname` AS STRING) AS `lname`, CAST(`age` AS INT) AS `age`, CAST(`store_id` AS INT) AS `store_id` FROM VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100) AS `a1`(`employee_id`, `fname`, `lname`, `age`, `store_id`)), `t37842204` AS (SELECT `employee_id`, `fname`, `lname`, `age`, `store_id` FROM `t51718876`) SELECT `t37842204`.`employee_id` AS `employee_id`, `t37842204`.`fname` AS `fname`, `t37842204`.`lname` AS `lname`, `t37842204`.`age` AS `age`, `t37842204`.`store_id` AS `store_id` FROM `t37842204`"
|
139
139
|
)
|
140
140
|
|
141
141
|
|
@@ -154,5 +154,5 @@ def test_lineage(standalone_employee: StandaloneDataFrame):
|
|
154
154
|
def test_unquoted_identifiers(standalone_employee: StandaloneDataFrame):
|
155
155
|
assert (
|
156
156
|
standalone_employee.sql(dialect="snowflake", pretty=False, quote_identifiers=False)
|
157
|
-
== "SELECT A1.EMPLOYEE_ID AS
|
157
|
+
== """SELECT A1.EMPLOYEE_ID AS "employee_id", CAST(A1.FNAME AS TEXT) AS "fname", CAST(A1.LNAME AS TEXT) AS "lname", A1.AGE AS "age", A1.STORE_ID AS "store_id" FROM (VALUES (1, 'Jack', 'Shephard', 37, 1), (2, 'John', 'Locke', 65, 1), (3, 'Kate', 'Austen', 37, 2), (4, 'Claire', 'Littleton', 27, 2), (5, 'Hugo', 'Reyes', 29, 100)) AS A1(EMPLOYEE_ID, FNAME, LNAME, AGE, STORE_ID)"""
|
158
158
|
)
|
@@ -2152,8 +2152,8 @@ def test_array_prepend(expression, expected):
|
|
2152
2152
|
@pytest.mark.parametrize(
|
2153
2153
|
"expression, expected",
|
2154
2154
|
[
|
2155
|
-
(SF.array_size("cola"), "
|
2156
|
-
(SF.array_size(SF.col("cola")), "
|
2155
|
+
(SF.array_size("cola"), "SIZE(cola)"),
|
2156
|
+
(SF.array_size(SF.col("cola")), "SIZE(cola)"),
|
2157
2157
|
],
|
2158
2158
|
)
|
2159
2159
|
def test_array_size(expression, expected):
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/marvin_paranoid_robot.gif
RENAMED
File without changes
|
File without changes
|
{sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/openai_full_rewrite.png
RENAMED
File without changes
|
{sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/openai_replacing_cte_names.png
RENAMED
File without changes
|
{sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/sqlglot_optimized_code.png
RENAMED
File without changes
|
{sqlframe-3.18.0 → sqlframe-3.19.0}/blogs/images/add_chatgpt_support/sunny_shake_head_no.gif
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|