snowpark-connect 0.20.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of snowpark-connect might be problematic. Click here for more details.
- snowflake/snowpark_connect/__init__.py +23 -0
- snowflake/snowpark_connect/analyze_plan/__init__.py +3 -0
- snowflake/snowpark_connect/analyze_plan/map_tree_string.py +38 -0
- snowflake/snowpark_connect/column_name_handler.py +735 -0
- snowflake/snowpark_connect/config.py +576 -0
- snowflake/snowpark_connect/constants.py +47 -0
- snowflake/snowpark_connect/control_server.py +52 -0
- snowflake/snowpark_connect/dataframe_name_handler.py +54 -0
- snowflake/snowpark_connect/date_time_format_mapping.py +399 -0
- snowflake/snowpark_connect/empty_dataframe.py +18 -0
- snowflake/snowpark_connect/error/__init__.py +11 -0
- snowflake/snowpark_connect/error/error_mapping.py +6174 -0
- snowflake/snowpark_connect/error/error_utils.py +321 -0
- snowflake/snowpark_connect/error/exceptions.py +24 -0
- snowflake/snowpark_connect/execute_plan/__init__.py +3 -0
- snowflake/snowpark_connect/execute_plan/map_execution_command.py +204 -0
- snowflake/snowpark_connect/execute_plan/map_execution_root.py +173 -0
- snowflake/snowpark_connect/execute_plan/utils.py +183 -0
- snowflake/snowpark_connect/expression/__init__.py +3 -0
- snowflake/snowpark_connect/expression/literal.py +90 -0
- snowflake/snowpark_connect/expression/map_cast.py +343 -0
- snowflake/snowpark_connect/expression/map_expression.py +293 -0
- snowflake/snowpark_connect/expression/map_extension.py +104 -0
- snowflake/snowpark_connect/expression/map_sql_expression.py +633 -0
- snowflake/snowpark_connect/expression/map_udf.py +142 -0
- snowflake/snowpark_connect/expression/map_unresolved_attribute.py +241 -0
- snowflake/snowpark_connect/expression/map_unresolved_extract_value.py +85 -0
- snowflake/snowpark_connect/expression/map_unresolved_function.py +9450 -0
- snowflake/snowpark_connect/expression/map_unresolved_star.py +218 -0
- snowflake/snowpark_connect/expression/map_update_fields.py +164 -0
- snowflake/snowpark_connect/expression/map_window_function.py +258 -0
- snowflake/snowpark_connect/expression/typer.py +125 -0
- snowflake/snowpark_connect/includes/__init__.py +0 -0
- snowflake/snowpark_connect/includes/jars/antlr4-runtime-4.9.3.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-cli-1.5.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-codec-1.16.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-collections-3.2.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-collections4-4.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-compiler-3.1.9.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-compress-1.26.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-crypto-1.1.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-dbcp-1.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-io-2.16.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-lang-2.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-lang3-3.12.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-logging-1.1.3.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-math3-3.6.1.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-pool-1.5.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/commons-text-1.10.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/hadoop-client-api-3.3.4.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-annotations-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-core-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-core-asl-1.9.13.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-databind-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-dataformat-yaml-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-datatype-jsr310-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-mapper-asl-1.9.13.jar +0 -0
- snowflake/snowpark_connect/includes/jars/jackson-module-scala_2.12-2.15.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-ast_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-core_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-jackson_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/json4s-scalap_2.12-3.7.0-M11.jar +0 -0
- snowflake/snowpark_connect/includes/jars/kryo-shaded-4.0.2.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-1.2-api-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-api-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-core-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/log4j-slf4j2-impl-2.20.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/paranamer-2.8.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-collection-compat_2.12-2.7.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-compiler-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-library-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-parser-combinators_2.12-2.3.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-reflect-2.12.18.jar +0 -0
- snowflake/snowpark_connect/includes/jars/scala-xml_2.12-2.1.0.jar +0 -0
- snowflake/snowpark_connect/includes/jars/slf4j-api-2.0.7.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-catalyst_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-common-utils_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-core_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-graphx_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-hive-thriftserver_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-hive_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-kubernetes_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-kvstore_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-launcher_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mesos_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mllib-local_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-mllib_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-network-common_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-network-shuffle_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-repl_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sketch_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sql-api_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-sql_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-streaming_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-tags_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-unsafe_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/jars/spark-yarn_2.12-3.5.6.jar +0 -0
- snowflake/snowpark_connect/includes/python/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/__init__.py +173 -0
- snowflake/snowpark_connect/includes/python/pyspark/_globals.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/_typing.pyi +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/accumulators.py +341 -0
- snowflake/snowpark_connect/includes/python/pyspark/broadcast.py +383 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/__init__.py +8 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/cloudpickle.py +948 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/cloudpickle_fast.py +844 -0
- snowflake/snowpark_connect/includes/python/pyspark/cloudpickle/compat.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/conf.py +276 -0
- snowflake/snowpark_connect/includes/python/pyspark/context.py +2601 -0
- snowflake/snowpark_connect/includes/python/pyspark/daemon.py +218 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/__init__.py +70 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/error_classes.py +889 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/base.py +228 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/captured.py +307 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/exceptions/connect.py +190 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/tests/test_errors.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/errors/utils.py +116 -0
- snowflake/snowpark_connect/includes/python/pyspark/files.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/find_spark_home.py +95 -0
- snowflake/snowpark_connect/includes/python/pyspark/install.py +203 -0
- snowflake/snowpark_connect/includes/python/pyspark/instrumentation_utils.py +190 -0
- snowflake/snowpark_connect/includes/python/pyspark/java_gateway.py +248 -0
- snowflake/snowpark_connect/includes/python/pyspark/join.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/__init__.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/_typing.pyi +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/base.py +414 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/classification.py +4332 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/clustering.py +2188 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/common.py +146 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/__init__.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/base.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/classification.py +382 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/evaluation.py +291 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/feature.py +258 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/functions.py +77 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/io_utils.py +335 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/pipeline.py +262 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/summarizer.py +120 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/tuning.py +579 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/connect/util.py +173 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/deepspeed_distributor.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/deepspeed/tests/test_deepspeed_distributor.py +306 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/dl_util.py +150 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/evaluation.py +1166 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/feature.py +7474 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/fpm.py +543 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/functions.py +842 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/image.py +271 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/linalg/__init__.py +1382 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/model_cache.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/__init__.py +602 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/_shared_params_code_gen.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/param/shared.py +878 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/pipeline.py +451 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/recommendation.py +748 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/regression.py +3335 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/stat.py +523 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_classification.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_evaluation.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_feature.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_function.py +114 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_pipeline.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_summarizer.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_connect_tuning.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_classification.py +238 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_evaluation.py +194 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_feature.py +156 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_pipeline.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_summarizer.py +78 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_legacy_mode_tuning.py +292 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_data_loader.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/connect/test_parity_torch_distributor.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_algorithms.py +456 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_base.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_dl_util.py +186 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_evaluation.py +77 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_feature.py +401 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_functions.py +528 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_image.py +82 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_linalg.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_model_cache.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_param.py +441 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_persistence.py +546 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_pipeline.py +71 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_stat.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_training_summary.py +494 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_util.py +85 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/test_wrapper.py +138 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_basic.py +151 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_nested.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_cv_io_pipeline.py +143 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tuning.py +551 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_basic.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_nested.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tests/tuning/test_tvs_io_pipeline.py +142 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/data.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/distributor.py +1133 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/log_communication.py +198 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_data_loader.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_distributor.py +561 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/tests/test_log_communication.py +172 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/torch/torch_run_process_wrapper.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tree.py +434 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/tuning.py +1741 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/util.py +749 -0
- snowflake/snowpark_connect/includes/python/pyspark/ml/wrapper.py +465 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/__init__.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/_typing.pyi +33 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/classification.py +989 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/clustering.py +1318 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/common.py +174 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/evaluation.py +691 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/feature.py +1085 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/fpm.py +233 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/linalg/__init__.py +1653 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/linalg/distributed.py +1662 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/random.py +698 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/recommendation.py +389 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/regression.py +1067 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/KernelDensity.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/__init__.py +34 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/_statistics.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/distribution.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/stat/test.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_algorithms.py +353 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_feature.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_linalg.py +680 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_stat.py +206 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_streaming_algorithms.py +471 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tests/test_util.py +108 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/tree.py +888 -0
- snowflake/snowpark_connect/includes/python/pyspark/mllib/util.py +659 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/__init__.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/_typing.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/accessors.py +989 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/base.py +1804 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/categorical.py +822 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/config.py +539 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/correlation.py +262 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/base.py +519 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/binary_ops.py +98 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/boolean_ops.py +426 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/categorical_ops.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/complex_ops.py +145 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/date_ops.py +127 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/datetime_ops.py +171 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/null_ops.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/num_ops.py +588 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/string_ops.py +154 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/timedelta_ops.py +101 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/data_type_ops/udt_ops.py +29 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/datetimes.py +891 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/exceptions.py +150 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/extensions.py +388 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/frame.py +13738 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/generic.py +3560 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/groupby.py +4448 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/base.py +2783 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/category.py +773 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/datetimes.py +843 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/multi.py +1323 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/numeric.py +210 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexes/timedelta.py +197 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/indexing.py +1862 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/internal.py +1680 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/__init__.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/common.py +76 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/frame.py +63 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/general_functions.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/groupby.py +93 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/indexes.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/resample.py +101 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/scalars.py +29 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/series.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/missing/window.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/mlflow.py +238 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/namespace.py +3807 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/numpy_compat.py +260 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/__init__.py +17 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/core.py +1213 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/matplotlib.py +928 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/plot/plotly.py +261 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/resample.py +816 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/series.py +7440 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/sql_formatter.py +308 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/sql_processor.py +394 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/strings.py +2371 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/supported_api_gen.py +378 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_any_all.py +177 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_apply_func.py +575 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_binary_ops.py +235 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_combine.py +653 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_compute.py +463 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_corrwith.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cov.py +151 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_cumulative.py +139 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_describe.py +458 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_eval.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_melt.py +202 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_missing_data.py +520 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/computation/test_pivot.py +361 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_any_all.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_apply_func.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_binary_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_combine.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_compute.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_corrwith.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cov.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_cumulative.py +90 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_describe.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_eval.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_melt.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_missing_data.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/computation/test_parity_pivot.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_base.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_binary_ops.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_boolean_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_categorical_ops.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_complex_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_date_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_datetime_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_null_ops.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_arithmetic.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_num_reverse.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_string_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_timedelta_ops.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/test_parity_udt_ops.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/data_type_ops/testing_utils.py +226 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_align.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_basic_slow.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_cov_corrwith.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_frame.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_dot_series.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_index.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_series.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_frame.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/diff_frames_ops/test_parity_setitem_series.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_attrs.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_constructor.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_conversion.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reindexing.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_reshaping.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_spark.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_take.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_time_series.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/frame/test_parity_truncate.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_aggregate.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_apply_func.py +41 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_cumulative.py +67 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_describe.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_groupby.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_head_tail.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_index.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_missing_data.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_split_apply.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/groupby/test_parity_stat.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_align.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_base.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_category.py +73 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_datetime.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_indexing.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reindex.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_rename.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_reset_index.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/indexes/test_parity_timedelta.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/io/test_parity_io.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_matplotlib.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_frame_plot_plotly.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_matplotlib.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/plot/test_parity_series_plot_plotly.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_all_any.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_arg_ops.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_of.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_as_type.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_compute.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_conversion.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_cumulative.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_index.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_missing_data.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_series.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_sort.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/series/test_parity_stat.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_categorical.py +66 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_config.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_csv.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_conversion.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_dataframe_spark_io.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_default_index.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ewm.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_expanding.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_extension.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_frame_spark.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_generic_functions.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexing.py +49 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_indexops_spark.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_internal.py +41 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_namespace.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_numpy_compat.py +60 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_expanding.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_ops_on_diff_frames_groupby_rolling.py +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_repr.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_resample.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_reshape.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_rolling.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_scalars.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_conversion.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_datetime.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_series_string.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_spark_functions.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_sql.py +43 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_stats.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_typedef.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_utils.py +37 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/connect/test_parity_window.py +39 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_base.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_binary_ops.py +224 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_boolean_ops.py +825 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_categorical_ops.py +562 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_complex_ops.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_date_ops.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_datetime_ops.py +260 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_null_ops.py +178 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_arithmetic.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_ops.py +497 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_num_reverse.py +140 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_string_ops.py +354 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_timedelta_ops.py +219 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/test_udt_ops.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/data_type_ops/testing_utils.py +228 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_align.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_basic_slow.py +198 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_cov_corrwith.py +181 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_frame.py +103 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_dot_series.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_index.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_series.py +136 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_frame.py +125 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/diff_frames_ops/test_setitem_series.py +217 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_attrs.py +384 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_constructor.py +598 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_conversion.py +73 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reindexing.py +869 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_reshaping.py +487 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_spark.py +309 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_take.py +156 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_time_series.py +149 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/frame/test_truncate.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_aggregate.py +311 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_apply_func.py +524 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_cumulative.py +419 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_describe.py +144 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_groupby.py +979 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_head_tail.py +234 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_index.py +206 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_missing_data.py +421 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_split_apply.py +187 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/groupby/test_stat.py +397 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_align.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_base.py +2743 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_category.py +484 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_datetime.py +276 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_indexing.py +432 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reindex.py +310 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_rename.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_reset_index.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/indexes/test_timedelta.py +128 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/io/test_io.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot.py +170 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_matplotlib.py +547 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_frame_plot_plotly.py +285 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot.py +106 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_matplotlib.py +409 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/plot/test_series_plot_plotly.py +247 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_all_any.py +105 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_arg_ops.py +197 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_of.py +137 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_as_type.py +227 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_compute.py +634 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_conversion.py +88 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_cumulative.py +139 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_index.py +475 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_missing_data.py +265 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_series.py +818 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_sort.py +162 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/series/test_stat.py +780 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_categorical.py +741 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_config.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_csv.py +453 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_conversion.py +281 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_dataframe_spark_io.py +487 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_default_index.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ewm.py +434 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_expanding.py +253 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_extension.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_frame_spark.py +162 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_generic_functions.py +234 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexing.py +1339 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_indexops_spark.py +82 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_internal.py +124 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_namespace.py +638 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_numpy_compat.py +200 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames.py +1355 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby.py +655 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_expanding.py +113 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_ops_on_diff_frames_groupby_rolling.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_repr.py +192 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_resample.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_reshape.py +495 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_rolling.py +263 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_scalars.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_conversion.py +85 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_datetime.py +364 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_series_string.py +362 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_spark_functions.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_sql.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_stats.py +581 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_typedef.py +447 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_utils.py +301 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/tests/test_window.py +465 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/typedef/typehints.py +874 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/usage_logging/__init__.py +143 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/usage_logging/usage_logger.py +132 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/utils.py +1063 -0
- snowflake/snowpark_connect/includes/python/pyspark/pandas/window.py +2702 -0
- snowflake/snowpark_connect/includes/python/pyspark/profiler.py +489 -0
- snowflake/snowpark_connect/includes/python/pyspark/py.typed +1 -0
- snowflake/snowpark_connect/includes/python/pyspark/python/pyspark/shell.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/rdd.py +5518 -0
- snowflake/snowpark_connect/includes/python/pyspark/rddsampler.py +115 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/__init__.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/information.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/profile.py +317 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/requests.py +539 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/resource/tests/test_resources.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/resultiterable.py +45 -0
- snowflake/snowpark_connect/includes/python/pyspark/serializers.py +681 -0
- snowflake/snowpark_connect/includes/python/pyspark/shell.py +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/shuffle.py +854 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/__init__.py +75 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/_typing.pyi +80 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/avro/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/avro/functions.py +188 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/catalog.py +1270 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/column.py +1431 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/conf.py +99 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/_typing.py +90 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/avro/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/avro/functions.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/catalog.py +356 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/artifact.py +412 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/core.py +1689 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/client/reattach.py +340 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/column.py +514 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/conf.py +128 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/conversion.py +490 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/dataframe.py +2172 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/expressions.py +1056 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/functions.py +3937 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/group.py +418 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/plan.py +2289 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/__init__.py +25 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2.py +203 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2.pyi +2718 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/base_pb2_grpc.py +423 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/catalog_pb2.py +109 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/catalog_pb2.pyi +1130 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/commands_pb2.py +141 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/commands_pb2.pyi +1766 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/common_pb2.py +47 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/common_pb2.pyi +123 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/example_plugins_pb2.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/example_plugins_pb2.pyi +112 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/expressions_pb2.py +107 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/expressions_pb2.pyi +1507 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/relations_pb2.py +195 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/relations_pb2.pyi +3613 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/types_pb2.py +95 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/proto/types_pb2.pyi +980 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/protobuf/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/protobuf/functions.py +166 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/readwriter.py +861 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/session.py +952 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/query.py +295 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/readwriter.py +618 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/foreach_batch_worker.py +87 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/streaming/worker/listener_worker.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/types.py +301 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/udf.py +296 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/udtf.py +200 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/utils.py +58 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/connect/window.py +266 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/context.py +818 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/dataframe.py +5973 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/functions.py +15889 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/group.py +547 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/observation.py +152 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/__init__.pyi +344 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/__init__.pyi +17 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/frame.pyi +20 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/_typing/protocols/series.pyi +20 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/conversion.py +671 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/functions.py +480 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/functions.pyi +132 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/group_ops.py +523 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/map_ops.py +216 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/serializers.py +1019 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/typehints.py +172 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/types.py +972 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/pandas/utils.py +86 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/protobuf/__init__.py +18 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/protobuf/functions.py +334 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/readwriter.py +2159 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/session.py +2088 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/sql_formatter.py +84 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/listener.py +1050 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/query.py +746 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/readwriter.py +1652 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/streaming/state.py +288 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_artifact.py +420 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/client/test_client.py +358 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_foreach_batch.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_listener.py +116 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/streaming/test_parity_streaming.py +35 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_basic.py +3612 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_column.py +1042 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_function.py +2381 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_connect_plan.py +1060 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_map.py +38 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_arrow_python_udf.py +48 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_catalog.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_column.py +55 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_conf.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_dataframe.py +96 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_datasources.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_errors.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_functions.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_group.py +36 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_cogrouped_map.py +59 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map.py +74 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_grouped_map_with_state.py +62 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_map.py +58 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf.py +70 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_grouped_agg.py +50 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_scalar.py +68 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_pandas_udf_window.py +40 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_readwriter.py +46 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_serde.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_types.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udf.py +100 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_parity_udtf.py +163 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_session.py +181 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/connect/test_utils.py +42 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py +623 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map.py +869 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_grouped_map_with_state.py +342 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_map.py +436 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf.py +363 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_grouped_agg.py +592 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_scalar.py +1503 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints.py +392 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_typehints_with_future_annotations.py +375 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/pandas/test_pandas_udf_window.py +411 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming.py +401 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach.py +295 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_foreach_batch.py +106 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/streaming/test_streaming_listener.py +558 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow.py +1346 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_map.py +182 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_arrow_python_udf.py +202 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_catalog.py +503 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_column.py +225 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_conf.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_context.py +201 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_dataframe.py +1931 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_datasources.py +256 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_errors.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_functions.py +1349 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_group.py +53 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_pandas_sqlmetrics.py +68 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_readwriter.py +283 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_serde.py +155 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_session.py +412 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_types.py +1581 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf.py +961 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udf_profiler.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_udtf.py +1456 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/tests/test_utils.py +1686 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/types.py +2558 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/udf.py +714 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/udtf.py +325 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/utils.py +339 -0
- snowflake/snowpark_connect/includes/python/pyspark/sql/window.py +492 -0
- snowflake/snowpark_connect/includes/python/pyspark/statcounter.py +165 -0
- snowflake/snowpark_connect/includes/python/pyspark/status.py +112 -0
- snowflake/snowpark_connect/includes/python/pyspark/storagelevel.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/__init__.py +22 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/context.py +471 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/dstream.py +933 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/kinesis.py +205 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/listener.py +83 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_context.py +184 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_dstream.py +706 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_kinesis.py +118 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/tests/test_listener.py +160 -0
- snowflake/snowpark_connect/includes/python/pyspark/streaming/util.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/taskcontext.py +502 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/__init__.py +21 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/connectutils.py +199 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/mllibutils.py +30 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/mlutils.py +275 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/objects.py +121 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/pandasutils.py +714 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/sqlutils.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/streamingutils.py +178 -0
- snowflake/snowpark_connect/includes/python/pyspark/testing/utils.py +636 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/__init__.py +16 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_appsubmit.py +306 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_broadcast.py +196 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_conf.py +44 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_context.py +346 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_daemon.py +89 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_install_spark.py +124 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_join.py +69 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_memory_profiler.py +167 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_pin_thread.py +194 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_profiler.py +168 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rdd.py +939 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddbarrier.py +52 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_rddsampler.py +66 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_readwrite.py +368 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_serializers.py +257 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_shuffle.py +267 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_stage_sched.py +153 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_statcounter.py +130 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_taskcontext.py +350 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_util.py +97 -0
- snowflake/snowpark_connect/includes/python/pyspark/tests/test_worker.py +271 -0
- snowflake/snowpark_connect/includes/python/pyspark/traceback_utils.py +81 -0
- snowflake/snowpark_connect/includes/python/pyspark/util.py +416 -0
- snowflake/snowpark_connect/includes/python/pyspark/version.py +19 -0
- snowflake/snowpark_connect/includes/python/pyspark/worker.py +1307 -0
- snowflake/snowpark_connect/includes/python/pyspark/worker_util.py +46 -0
- snowflake/snowpark_connect/proto/__init__.py +10 -0
- snowflake/snowpark_connect/proto/control_pb2.py +35 -0
- snowflake/snowpark_connect/proto/control_pb2.pyi +38 -0
- snowflake/snowpark_connect/proto/control_pb2_grpc.py +183 -0
- snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.py +35 -0
- snowflake/snowpark_connect/proto/snowflake_expression_ext_pb2.pyi +53 -0
- snowflake/snowpark_connect/proto/snowflake_rdd_pb2.pyi +39 -0
- snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.py +47 -0
- snowflake/snowpark_connect/proto/snowflake_relation_ext_pb2.pyi +111 -0
- snowflake/snowpark_connect/relation/__init__.py +3 -0
- snowflake/snowpark_connect/relation/catalogs/__init__.py +12 -0
- snowflake/snowpark_connect/relation/catalogs/abstract_spark_catalog.py +287 -0
- snowflake/snowpark_connect/relation/catalogs/snowflake_catalog.py +467 -0
- snowflake/snowpark_connect/relation/catalogs/utils.py +51 -0
- snowflake/snowpark_connect/relation/io_utils.py +76 -0
- snowflake/snowpark_connect/relation/map_aggregate.py +322 -0
- snowflake/snowpark_connect/relation/map_catalog.py +151 -0
- snowflake/snowpark_connect/relation/map_column_ops.py +1068 -0
- snowflake/snowpark_connect/relation/map_crosstab.py +48 -0
- snowflake/snowpark_connect/relation/map_extension.py +412 -0
- snowflake/snowpark_connect/relation/map_join.py +341 -0
- snowflake/snowpark_connect/relation/map_local_relation.py +326 -0
- snowflake/snowpark_connect/relation/map_map_partitions.py +146 -0
- snowflake/snowpark_connect/relation/map_relation.py +253 -0
- snowflake/snowpark_connect/relation/map_row_ops.py +716 -0
- snowflake/snowpark_connect/relation/map_sample_by.py +35 -0
- snowflake/snowpark_connect/relation/map_show_string.py +50 -0
- snowflake/snowpark_connect/relation/map_sql.py +1874 -0
- snowflake/snowpark_connect/relation/map_stats.py +324 -0
- snowflake/snowpark_connect/relation/map_subquery_alias.py +32 -0
- snowflake/snowpark_connect/relation/map_udtf.py +288 -0
- snowflake/snowpark_connect/relation/read/__init__.py +7 -0
- snowflake/snowpark_connect/relation/read/jdbc_read_dbapi.py +668 -0
- snowflake/snowpark_connect/relation/read/map_read.py +367 -0
- snowflake/snowpark_connect/relation/read/map_read_csv.py +142 -0
- snowflake/snowpark_connect/relation/read/map_read_jdbc.py +108 -0
- snowflake/snowpark_connect/relation/read/map_read_json.py +344 -0
- snowflake/snowpark_connect/relation/read/map_read_parquet.py +194 -0
- snowflake/snowpark_connect/relation/read/map_read_socket.py +59 -0
- snowflake/snowpark_connect/relation/read/map_read_table.py +109 -0
- snowflake/snowpark_connect/relation/read/map_read_text.py +106 -0
- snowflake/snowpark_connect/relation/read/reader_config.py +399 -0
- snowflake/snowpark_connect/relation/read/utils.py +155 -0
- snowflake/snowpark_connect/relation/stage_locator.py +161 -0
- snowflake/snowpark_connect/relation/utils.py +219 -0
- snowflake/snowpark_connect/relation/write/__init__.py +3 -0
- snowflake/snowpark_connect/relation/write/jdbc_write_dbapi.py +339 -0
- snowflake/snowpark_connect/relation/write/map_write.py +436 -0
- snowflake/snowpark_connect/relation/write/map_write_jdbc.py +48 -0
- snowflake/snowpark_connect/resources/java_udfs-1.0-SNAPSHOT.jar +0 -0
- snowflake/snowpark_connect/resources_initializer.py +75 -0
- snowflake/snowpark_connect/server.py +1136 -0
- snowflake/snowpark_connect/start_server.py +32 -0
- snowflake/snowpark_connect/tcm.py +8 -0
- snowflake/snowpark_connect/type_mapping.py +1003 -0
- snowflake/snowpark_connect/typed_column.py +94 -0
- snowflake/snowpark_connect/utils/__init__.py +3 -0
- snowflake/snowpark_connect/utils/artifacts.py +48 -0
- snowflake/snowpark_connect/utils/attribute_handling.py +72 -0
- snowflake/snowpark_connect/utils/cache.py +84 -0
- snowflake/snowpark_connect/utils/concurrent.py +124 -0
- snowflake/snowpark_connect/utils/context.py +390 -0
- snowflake/snowpark_connect/utils/describe_query_cache.py +231 -0
- snowflake/snowpark_connect/utils/interrupt.py +85 -0
- snowflake/snowpark_connect/utils/io_utils.py +35 -0
- snowflake/snowpark_connect/utils/pandas_udtf_utils.py +117 -0
- snowflake/snowpark_connect/utils/profiling.py +47 -0
- snowflake/snowpark_connect/utils/session.py +180 -0
- snowflake/snowpark_connect/utils/snowpark_connect_logging.py +38 -0
- snowflake/snowpark_connect/utils/telemetry.py +513 -0
- snowflake/snowpark_connect/utils/udf_cache.py +392 -0
- snowflake/snowpark_connect/utils/udf_helper.py +328 -0
- snowflake/snowpark_connect/utils/udf_utils.py +310 -0
- snowflake/snowpark_connect/utils/udtf_helper.py +420 -0
- snowflake/snowpark_connect/utils/udtf_utils.py +799 -0
- snowflake/snowpark_connect/utils/xxhash64.py +247 -0
- snowflake/snowpark_connect/version.py +6 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-connect +71 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-session +11 -0
- snowpark_connect-0.20.2.data/scripts/snowpark-submit +354 -0
- snowpark_connect-0.20.2.dist-info/METADATA +37 -0
- snowpark_connect-0.20.2.dist-info/RECORD +879 -0
- snowpark_connect-0.20.2.dist-info/WHEEL +5 -0
- snowpark_connect-0.20.2.dist-info/licenses/LICENSE.txt +202 -0
- snowpark_connect-0.20.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,3613 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
3
|
+
# contributor license agreements. See the NOTICE file distributed with
|
|
4
|
+
# this work for additional information regarding copyright ownership.
|
|
5
|
+
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
6
|
+
# (the "License"); you may not use this file except in compliance with
|
|
7
|
+
# the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
#
|
|
17
|
+
"""
|
|
18
|
+
@generated by mypy-protobuf. Do not edit manually!
|
|
19
|
+
isort:skip_file
|
|
20
|
+
|
|
21
|
+
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
22
|
+
contributor license agreements. See the NOTICE file distributed with
|
|
23
|
+
this work for additional information regarding copyright ownership.
|
|
24
|
+
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
25
|
+
(the "License"); you may not use this file except in compliance with
|
|
26
|
+
the License. You may obtain a copy of the License at
|
|
27
|
+
|
|
28
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
29
|
+
|
|
30
|
+
Unless required by applicable law or agreed to in writing, software
|
|
31
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
32
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
33
|
+
See the License for the specific language governing permissions and
|
|
34
|
+
limitations under the License.
|
|
35
|
+
"""
|
|
36
|
+
import builtins
|
|
37
|
+
import collections.abc
|
|
38
|
+
import google.protobuf.any_pb2
|
|
39
|
+
import google.protobuf.descriptor
|
|
40
|
+
import google.protobuf.internal.containers
|
|
41
|
+
import google.protobuf.internal.enum_type_wrapper
|
|
42
|
+
import google.protobuf.message
|
|
43
|
+
import pyspark.sql.connect.proto.catalog_pb2
|
|
44
|
+
import pyspark.sql.connect.proto.expressions_pb2
|
|
45
|
+
import pyspark.sql.connect.proto.types_pb2
|
|
46
|
+
import sys
|
|
47
|
+
import typing
|
|
48
|
+
|
|
49
|
+
if sys.version_info >= (3, 10):
|
|
50
|
+
import typing as typing_extensions
|
|
51
|
+
else:
|
|
52
|
+
import typing_extensions
|
|
53
|
+
|
|
54
|
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
|
55
|
+
|
|
56
|
+
class Relation(google.protobuf.message.Message):
|
|
57
|
+
"""The main [[Relation]] type. Fundamentally, a relation is a typed container
|
|
58
|
+
that has exactly one explicit relation type set.
|
|
59
|
+
|
|
60
|
+
When adding new relation types, they have to be registered here.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
64
|
+
|
|
65
|
+
COMMON_FIELD_NUMBER: builtins.int
|
|
66
|
+
READ_FIELD_NUMBER: builtins.int
|
|
67
|
+
PROJECT_FIELD_NUMBER: builtins.int
|
|
68
|
+
FILTER_FIELD_NUMBER: builtins.int
|
|
69
|
+
JOIN_FIELD_NUMBER: builtins.int
|
|
70
|
+
SET_OP_FIELD_NUMBER: builtins.int
|
|
71
|
+
SORT_FIELD_NUMBER: builtins.int
|
|
72
|
+
LIMIT_FIELD_NUMBER: builtins.int
|
|
73
|
+
AGGREGATE_FIELD_NUMBER: builtins.int
|
|
74
|
+
SQL_FIELD_NUMBER: builtins.int
|
|
75
|
+
LOCAL_RELATION_FIELD_NUMBER: builtins.int
|
|
76
|
+
SAMPLE_FIELD_NUMBER: builtins.int
|
|
77
|
+
OFFSET_FIELD_NUMBER: builtins.int
|
|
78
|
+
DEDUPLICATE_FIELD_NUMBER: builtins.int
|
|
79
|
+
RANGE_FIELD_NUMBER: builtins.int
|
|
80
|
+
SUBQUERY_ALIAS_FIELD_NUMBER: builtins.int
|
|
81
|
+
REPARTITION_FIELD_NUMBER: builtins.int
|
|
82
|
+
TO_DF_FIELD_NUMBER: builtins.int
|
|
83
|
+
WITH_COLUMNS_RENAMED_FIELD_NUMBER: builtins.int
|
|
84
|
+
SHOW_STRING_FIELD_NUMBER: builtins.int
|
|
85
|
+
DROP_FIELD_NUMBER: builtins.int
|
|
86
|
+
TAIL_FIELD_NUMBER: builtins.int
|
|
87
|
+
WITH_COLUMNS_FIELD_NUMBER: builtins.int
|
|
88
|
+
HINT_FIELD_NUMBER: builtins.int
|
|
89
|
+
UNPIVOT_FIELD_NUMBER: builtins.int
|
|
90
|
+
TO_SCHEMA_FIELD_NUMBER: builtins.int
|
|
91
|
+
REPARTITION_BY_EXPRESSION_FIELD_NUMBER: builtins.int
|
|
92
|
+
MAP_PARTITIONS_FIELD_NUMBER: builtins.int
|
|
93
|
+
COLLECT_METRICS_FIELD_NUMBER: builtins.int
|
|
94
|
+
PARSE_FIELD_NUMBER: builtins.int
|
|
95
|
+
GROUP_MAP_FIELD_NUMBER: builtins.int
|
|
96
|
+
CO_GROUP_MAP_FIELD_NUMBER: builtins.int
|
|
97
|
+
WITH_WATERMARK_FIELD_NUMBER: builtins.int
|
|
98
|
+
APPLY_IN_PANDAS_WITH_STATE_FIELD_NUMBER: builtins.int
|
|
99
|
+
HTML_STRING_FIELD_NUMBER: builtins.int
|
|
100
|
+
CACHED_LOCAL_RELATION_FIELD_NUMBER: builtins.int
|
|
101
|
+
CACHED_REMOTE_RELATION_FIELD_NUMBER: builtins.int
|
|
102
|
+
COMMON_INLINE_USER_DEFINED_TABLE_FUNCTION_FIELD_NUMBER: builtins.int
|
|
103
|
+
FILL_NA_FIELD_NUMBER: builtins.int
|
|
104
|
+
DROP_NA_FIELD_NUMBER: builtins.int
|
|
105
|
+
REPLACE_FIELD_NUMBER: builtins.int
|
|
106
|
+
SUMMARY_FIELD_NUMBER: builtins.int
|
|
107
|
+
CROSSTAB_FIELD_NUMBER: builtins.int
|
|
108
|
+
DESCRIBE_FIELD_NUMBER: builtins.int
|
|
109
|
+
COV_FIELD_NUMBER: builtins.int
|
|
110
|
+
CORR_FIELD_NUMBER: builtins.int
|
|
111
|
+
APPROX_QUANTILE_FIELD_NUMBER: builtins.int
|
|
112
|
+
FREQ_ITEMS_FIELD_NUMBER: builtins.int
|
|
113
|
+
SAMPLE_BY_FIELD_NUMBER: builtins.int
|
|
114
|
+
CATALOG_FIELD_NUMBER: builtins.int
|
|
115
|
+
EXTENSION_FIELD_NUMBER: builtins.int
|
|
116
|
+
UNKNOWN_FIELD_NUMBER: builtins.int
|
|
117
|
+
@property
|
|
118
|
+
def common(self) -> global___RelationCommon: ...
|
|
119
|
+
@property
|
|
120
|
+
def read(self) -> global___Read: ...
|
|
121
|
+
@property
|
|
122
|
+
def project(self) -> global___Project: ...
|
|
123
|
+
@property
|
|
124
|
+
def filter(self) -> global___Filter: ...
|
|
125
|
+
@property
|
|
126
|
+
def join(self) -> global___Join: ...
|
|
127
|
+
@property
|
|
128
|
+
def set_op(self) -> global___SetOperation: ...
|
|
129
|
+
@property
|
|
130
|
+
def sort(self) -> global___Sort: ...
|
|
131
|
+
@property
|
|
132
|
+
def limit(self) -> global___Limit: ...
|
|
133
|
+
@property
|
|
134
|
+
def aggregate(self) -> global___Aggregate: ...
|
|
135
|
+
@property
|
|
136
|
+
def sql(self) -> global___SQL: ...
|
|
137
|
+
@property
|
|
138
|
+
def local_relation(self) -> global___LocalRelation: ...
|
|
139
|
+
@property
|
|
140
|
+
def sample(self) -> global___Sample: ...
|
|
141
|
+
@property
|
|
142
|
+
def offset(self) -> global___Offset: ...
|
|
143
|
+
@property
|
|
144
|
+
def deduplicate(self) -> global___Deduplicate: ...
|
|
145
|
+
@property
|
|
146
|
+
def range(self) -> global___Range: ...
|
|
147
|
+
@property
|
|
148
|
+
def subquery_alias(self) -> global___SubqueryAlias: ...
|
|
149
|
+
@property
|
|
150
|
+
def repartition(self) -> global___Repartition: ...
|
|
151
|
+
@property
|
|
152
|
+
def to_df(self) -> global___ToDF: ...
|
|
153
|
+
@property
|
|
154
|
+
def with_columns_renamed(self) -> global___WithColumnsRenamed: ...
|
|
155
|
+
@property
|
|
156
|
+
def show_string(self) -> global___ShowString: ...
|
|
157
|
+
@property
|
|
158
|
+
def drop(self) -> global___Drop: ...
|
|
159
|
+
@property
|
|
160
|
+
def tail(self) -> global___Tail: ...
|
|
161
|
+
@property
|
|
162
|
+
def with_columns(self) -> global___WithColumns: ...
|
|
163
|
+
@property
|
|
164
|
+
def hint(self) -> global___Hint: ...
|
|
165
|
+
@property
|
|
166
|
+
def unpivot(self) -> global___Unpivot: ...
|
|
167
|
+
@property
|
|
168
|
+
def to_schema(self) -> global___ToSchema: ...
|
|
169
|
+
@property
|
|
170
|
+
def repartition_by_expression(self) -> global___RepartitionByExpression: ...
|
|
171
|
+
@property
|
|
172
|
+
def map_partitions(self) -> global___MapPartitions: ...
|
|
173
|
+
@property
|
|
174
|
+
def collect_metrics(self) -> global___CollectMetrics: ...
|
|
175
|
+
@property
|
|
176
|
+
def parse(self) -> global___Parse: ...
|
|
177
|
+
@property
|
|
178
|
+
def group_map(self) -> global___GroupMap: ...
|
|
179
|
+
@property
|
|
180
|
+
def co_group_map(self) -> global___CoGroupMap: ...
|
|
181
|
+
@property
|
|
182
|
+
def with_watermark(self) -> global___WithWatermark: ...
|
|
183
|
+
@property
|
|
184
|
+
def apply_in_pandas_with_state(self) -> global___ApplyInPandasWithState: ...
|
|
185
|
+
@property
|
|
186
|
+
def html_string(self) -> global___HtmlString: ...
|
|
187
|
+
@property
|
|
188
|
+
def cached_local_relation(self) -> global___CachedLocalRelation: ...
|
|
189
|
+
@property
|
|
190
|
+
def cached_remote_relation(self) -> global___CachedRemoteRelation: ...
|
|
191
|
+
@property
|
|
192
|
+
def common_inline_user_defined_table_function(
|
|
193
|
+
self,
|
|
194
|
+
) -> global___CommonInlineUserDefinedTableFunction: ...
|
|
195
|
+
@property
|
|
196
|
+
def fill_na(self) -> global___NAFill:
|
|
197
|
+
"""NA functions"""
|
|
198
|
+
@property
|
|
199
|
+
def drop_na(self) -> global___NADrop: ...
|
|
200
|
+
@property
|
|
201
|
+
def replace(self) -> global___NAReplace: ...
|
|
202
|
+
@property
|
|
203
|
+
def summary(self) -> global___StatSummary:
|
|
204
|
+
"""stat functions"""
|
|
205
|
+
@property
|
|
206
|
+
def crosstab(self) -> global___StatCrosstab: ...
|
|
207
|
+
@property
|
|
208
|
+
def describe(self) -> global___StatDescribe: ...
|
|
209
|
+
@property
|
|
210
|
+
def cov(self) -> global___StatCov: ...
|
|
211
|
+
@property
|
|
212
|
+
def corr(self) -> global___StatCorr: ...
|
|
213
|
+
@property
|
|
214
|
+
def approx_quantile(self) -> global___StatApproxQuantile: ...
|
|
215
|
+
@property
|
|
216
|
+
def freq_items(self) -> global___StatFreqItems: ...
|
|
217
|
+
@property
|
|
218
|
+
def sample_by(self) -> global___StatSampleBy: ...
|
|
219
|
+
@property
|
|
220
|
+
def catalog(self) -> pyspark.sql.connect.proto.catalog_pb2.Catalog:
|
|
221
|
+
"""Catalog API (experimental / unstable)"""
|
|
222
|
+
@property
|
|
223
|
+
def extension(self) -> google.protobuf.any_pb2.Any:
|
|
224
|
+
"""This field is used to mark extensions to the protocol. When plugins generate arbitrary
|
|
225
|
+
relations they can add them here. During the planning the correct resolution is done.
|
|
226
|
+
"""
|
|
227
|
+
@property
|
|
228
|
+
def unknown(self) -> global___Unknown: ...
|
|
229
|
+
def __init__(
|
|
230
|
+
self,
|
|
231
|
+
*,
|
|
232
|
+
common: global___RelationCommon | None = ...,
|
|
233
|
+
read: global___Read | None = ...,
|
|
234
|
+
project: global___Project | None = ...,
|
|
235
|
+
filter: global___Filter | None = ...,
|
|
236
|
+
join: global___Join | None = ...,
|
|
237
|
+
set_op: global___SetOperation | None = ...,
|
|
238
|
+
sort: global___Sort | None = ...,
|
|
239
|
+
limit: global___Limit | None = ...,
|
|
240
|
+
aggregate: global___Aggregate | None = ...,
|
|
241
|
+
sql: global___SQL | None = ...,
|
|
242
|
+
local_relation: global___LocalRelation | None = ...,
|
|
243
|
+
sample: global___Sample | None = ...,
|
|
244
|
+
offset: global___Offset | None = ...,
|
|
245
|
+
deduplicate: global___Deduplicate | None = ...,
|
|
246
|
+
range: global___Range | None = ...,
|
|
247
|
+
subquery_alias: global___SubqueryAlias | None = ...,
|
|
248
|
+
repartition: global___Repartition | None = ...,
|
|
249
|
+
to_df: global___ToDF | None = ...,
|
|
250
|
+
with_columns_renamed: global___WithColumnsRenamed | None = ...,
|
|
251
|
+
show_string: global___ShowString | None = ...,
|
|
252
|
+
drop: global___Drop | None = ...,
|
|
253
|
+
tail: global___Tail | None = ...,
|
|
254
|
+
with_columns: global___WithColumns | None = ...,
|
|
255
|
+
hint: global___Hint | None = ...,
|
|
256
|
+
unpivot: global___Unpivot | None = ...,
|
|
257
|
+
to_schema: global___ToSchema | None = ...,
|
|
258
|
+
repartition_by_expression: global___RepartitionByExpression | None = ...,
|
|
259
|
+
map_partitions: global___MapPartitions | None = ...,
|
|
260
|
+
collect_metrics: global___CollectMetrics | None = ...,
|
|
261
|
+
parse: global___Parse | None = ...,
|
|
262
|
+
group_map: global___GroupMap | None = ...,
|
|
263
|
+
co_group_map: global___CoGroupMap | None = ...,
|
|
264
|
+
with_watermark: global___WithWatermark | None = ...,
|
|
265
|
+
apply_in_pandas_with_state: global___ApplyInPandasWithState | None = ...,
|
|
266
|
+
html_string: global___HtmlString | None = ...,
|
|
267
|
+
cached_local_relation: global___CachedLocalRelation | None = ...,
|
|
268
|
+
cached_remote_relation: global___CachedRemoteRelation | None = ...,
|
|
269
|
+
common_inline_user_defined_table_function: global___CommonInlineUserDefinedTableFunction
|
|
270
|
+
| None = ...,
|
|
271
|
+
fill_na: global___NAFill | None = ...,
|
|
272
|
+
drop_na: global___NADrop | None = ...,
|
|
273
|
+
replace: global___NAReplace | None = ...,
|
|
274
|
+
summary: global___StatSummary | None = ...,
|
|
275
|
+
crosstab: global___StatCrosstab | None = ...,
|
|
276
|
+
describe: global___StatDescribe | None = ...,
|
|
277
|
+
cov: global___StatCov | None = ...,
|
|
278
|
+
corr: global___StatCorr | None = ...,
|
|
279
|
+
approx_quantile: global___StatApproxQuantile | None = ...,
|
|
280
|
+
freq_items: global___StatFreqItems | None = ...,
|
|
281
|
+
sample_by: global___StatSampleBy | None = ...,
|
|
282
|
+
catalog: pyspark.sql.connect.proto.catalog_pb2.Catalog | None = ...,
|
|
283
|
+
extension: google.protobuf.any_pb2.Any | None = ...,
|
|
284
|
+
unknown: global___Unknown | None = ...,
|
|
285
|
+
) -> None: ...
|
|
286
|
+
def HasField(
|
|
287
|
+
self,
|
|
288
|
+
field_name: typing_extensions.Literal[
|
|
289
|
+
"aggregate",
|
|
290
|
+
b"aggregate",
|
|
291
|
+
"apply_in_pandas_with_state",
|
|
292
|
+
b"apply_in_pandas_with_state",
|
|
293
|
+
"approx_quantile",
|
|
294
|
+
b"approx_quantile",
|
|
295
|
+
"cached_local_relation",
|
|
296
|
+
b"cached_local_relation",
|
|
297
|
+
"cached_remote_relation",
|
|
298
|
+
b"cached_remote_relation",
|
|
299
|
+
"catalog",
|
|
300
|
+
b"catalog",
|
|
301
|
+
"co_group_map",
|
|
302
|
+
b"co_group_map",
|
|
303
|
+
"collect_metrics",
|
|
304
|
+
b"collect_metrics",
|
|
305
|
+
"common",
|
|
306
|
+
b"common",
|
|
307
|
+
"common_inline_user_defined_table_function",
|
|
308
|
+
b"common_inline_user_defined_table_function",
|
|
309
|
+
"corr",
|
|
310
|
+
b"corr",
|
|
311
|
+
"cov",
|
|
312
|
+
b"cov",
|
|
313
|
+
"crosstab",
|
|
314
|
+
b"crosstab",
|
|
315
|
+
"deduplicate",
|
|
316
|
+
b"deduplicate",
|
|
317
|
+
"describe",
|
|
318
|
+
b"describe",
|
|
319
|
+
"drop",
|
|
320
|
+
b"drop",
|
|
321
|
+
"drop_na",
|
|
322
|
+
b"drop_na",
|
|
323
|
+
"extension",
|
|
324
|
+
b"extension",
|
|
325
|
+
"fill_na",
|
|
326
|
+
b"fill_na",
|
|
327
|
+
"filter",
|
|
328
|
+
b"filter",
|
|
329
|
+
"freq_items",
|
|
330
|
+
b"freq_items",
|
|
331
|
+
"group_map",
|
|
332
|
+
b"group_map",
|
|
333
|
+
"hint",
|
|
334
|
+
b"hint",
|
|
335
|
+
"html_string",
|
|
336
|
+
b"html_string",
|
|
337
|
+
"join",
|
|
338
|
+
b"join",
|
|
339
|
+
"limit",
|
|
340
|
+
b"limit",
|
|
341
|
+
"local_relation",
|
|
342
|
+
b"local_relation",
|
|
343
|
+
"map_partitions",
|
|
344
|
+
b"map_partitions",
|
|
345
|
+
"offset",
|
|
346
|
+
b"offset",
|
|
347
|
+
"parse",
|
|
348
|
+
b"parse",
|
|
349
|
+
"project",
|
|
350
|
+
b"project",
|
|
351
|
+
"range",
|
|
352
|
+
b"range",
|
|
353
|
+
"read",
|
|
354
|
+
b"read",
|
|
355
|
+
"rel_type",
|
|
356
|
+
b"rel_type",
|
|
357
|
+
"repartition",
|
|
358
|
+
b"repartition",
|
|
359
|
+
"repartition_by_expression",
|
|
360
|
+
b"repartition_by_expression",
|
|
361
|
+
"replace",
|
|
362
|
+
b"replace",
|
|
363
|
+
"sample",
|
|
364
|
+
b"sample",
|
|
365
|
+
"sample_by",
|
|
366
|
+
b"sample_by",
|
|
367
|
+
"set_op",
|
|
368
|
+
b"set_op",
|
|
369
|
+
"show_string",
|
|
370
|
+
b"show_string",
|
|
371
|
+
"sort",
|
|
372
|
+
b"sort",
|
|
373
|
+
"sql",
|
|
374
|
+
b"sql",
|
|
375
|
+
"subquery_alias",
|
|
376
|
+
b"subquery_alias",
|
|
377
|
+
"summary",
|
|
378
|
+
b"summary",
|
|
379
|
+
"tail",
|
|
380
|
+
b"tail",
|
|
381
|
+
"to_df",
|
|
382
|
+
b"to_df",
|
|
383
|
+
"to_schema",
|
|
384
|
+
b"to_schema",
|
|
385
|
+
"unknown",
|
|
386
|
+
b"unknown",
|
|
387
|
+
"unpivot",
|
|
388
|
+
b"unpivot",
|
|
389
|
+
"with_columns",
|
|
390
|
+
b"with_columns",
|
|
391
|
+
"with_columns_renamed",
|
|
392
|
+
b"with_columns_renamed",
|
|
393
|
+
"with_watermark",
|
|
394
|
+
b"with_watermark",
|
|
395
|
+
],
|
|
396
|
+
) -> builtins.bool: ...
|
|
397
|
+
def ClearField(
|
|
398
|
+
self,
|
|
399
|
+
field_name: typing_extensions.Literal[
|
|
400
|
+
"aggregate",
|
|
401
|
+
b"aggregate",
|
|
402
|
+
"apply_in_pandas_with_state",
|
|
403
|
+
b"apply_in_pandas_with_state",
|
|
404
|
+
"approx_quantile",
|
|
405
|
+
b"approx_quantile",
|
|
406
|
+
"cached_local_relation",
|
|
407
|
+
b"cached_local_relation",
|
|
408
|
+
"cached_remote_relation",
|
|
409
|
+
b"cached_remote_relation",
|
|
410
|
+
"catalog",
|
|
411
|
+
b"catalog",
|
|
412
|
+
"co_group_map",
|
|
413
|
+
b"co_group_map",
|
|
414
|
+
"collect_metrics",
|
|
415
|
+
b"collect_metrics",
|
|
416
|
+
"common",
|
|
417
|
+
b"common",
|
|
418
|
+
"common_inline_user_defined_table_function",
|
|
419
|
+
b"common_inline_user_defined_table_function",
|
|
420
|
+
"corr",
|
|
421
|
+
b"corr",
|
|
422
|
+
"cov",
|
|
423
|
+
b"cov",
|
|
424
|
+
"crosstab",
|
|
425
|
+
b"crosstab",
|
|
426
|
+
"deduplicate",
|
|
427
|
+
b"deduplicate",
|
|
428
|
+
"describe",
|
|
429
|
+
b"describe",
|
|
430
|
+
"drop",
|
|
431
|
+
b"drop",
|
|
432
|
+
"drop_na",
|
|
433
|
+
b"drop_na",
|
|
434
|
+
"extension",
|
|
435
|
+
b"extension",
|
|
436
|
+
"fill_na",
|
|
437
|
+
b"fill_na",
|
|
438
|
+
"filter",
|
|
439
|
+
b"filter",
|
|
440
|
+
"freq_items",
|
|
441
|
+
b"freq_items",
|
|
442
|
+
"group_map",
|
|
443
|
+
b"group_map",
|
|
444
|
+
"hint",
|
|
445
|
+
b"hint",
|
|
446
|
+
"html_string",
|
|
447
|
+
b"html_string",
|
|
448
|
+
"join",
|
|
449
|
+
b"join",
|
|
450
|
+
"limit",
|
|
451
|
+
b"limit",
|
|
452
|
+
"local_relation",
|
|
453
|
+
b"local_relation",
|
|
454
|
+
"map_partitions",
|
|
455
|
+
b"map_partitions",
|
|
456
|
+
"offset",
|
|
457
|
+
b"offset",
|
|
458
|
+
"parse",
|
|
459
|
+
b"parse",
|
|
460
|
+
"project",
|
|
461
|
+
b"project",
|
|
462
|
+
"range",
|
|
463
|
+
b"range",
|
|
464
|
+
"read",
|
|
465
|
+
b"read",
|
|
466
|
+
"rel_type",
|
|
467
|
+
b"rel_type",
|
|
468
|
+
"repartition",
|
|
469
|
+
b"repartition",
|
|
470
|
+
"repartition_by_expression",
|
|
471
|
+
b"repartition_by_expression",
|
|
472
|
+
"replace",
|
|
473
|
+
b"replace",
|
|
474
|
+
"sample",
|
|
475
|
+
b"sample",
|
|
476
|
+
"sample_by",
|
|
477
|
+
b"sample_by",
|
|
478
|
+
"set_op",
|
|
479
|
+
b"set_op",
|
|
480
|
+
"show_string",
|
|
481
|
+
b"show_string",
|
|
482
|
+
"sort",
|
|
483
|
+
b"sort",
|
|
484
|
+
"sql",
|
|
485
|
+
b"sql",
|
|
486
|
+
"subquery_alias",
|
|
487
|
+
b"subquery_alias",
|
|
488
|
+
"summary",
|
|
489
|
+
b"summary",
|
|
490
|
+
"tail",
|
|
491
|
+
b"tail",
|
|
492
|
+
"to_df",
|
|
493
|
+
b"to_df",
|
|
494
|
+
"to_schema",
|
|
495
|
+
b"to_schema",
|
|
496
|
+
"unknown",
|
|
497
|
+
b"unknown",
|
|
498
|
+
"unpivot",
|
|
499
|
+
b"unpivot",
|
|
500
|
+
"with_columns",
|
|
501
|
+
b"with_columns",
|
|
502
|
+
"with_columns_renamed",
|
|
503
|
+
b"with_columns_renamed",
|
|
504
|
+
"with_watermark",
|
|
505
|
+
b"with_watermark",
|
|
506
|
+
],
|
|
507
|
+
) -> None: ...
|
|
508
|
+
def WhichOneof(
|
|
509
|
+
self, oneof_group: typing_extensions.Literal["rel_type", b"rel_type"]
|
|
510
|
+
) -> typing_extensions.Literal[
|
|
511
|
+
"read",
|
|
512
|
+
"project",
|
|
513
|
+
"filter",
|
|
514
|
+
"join",
|
|
515
|
+
"set_op",
|
|
516
|
+
"sort",
|
|
517
|
+
"limit",
|
|
518
|
+
"aggregate",
|
|
519
|
+
"sql",
|
|
520
|
+
"local_relation",
|
|
521
|
+
"sample",
|
|
522
|
+
"offset",
|
|
523
|
+
"deduplicate",
|
|
524
|
+
"range",
|
|
525
|
+
"subquery_alias",
|
|
526
|
+
"repartition",
|
|
527
|
+
"to_df",
|
|
528
|
+
"with_columns_renamed",
|
|
529
|
+
"show_string",
|
|
530
|
+
"drop",
|
|
531
|
+
"tail",
|
|
532
|
+
"with_columns",
|
|
533
|
+
"hint",
|
|
534
|
+
"unpivot",
|
|
535
|
+
"to_schema",
|
|
536
|
+
"repartition_by_expression",
|
|
537
|
+
"map_partitions",
|
|
538
|
+
"collect_metrics",
|
|
539
|
+
"parse",
|
|
540
|
+
"group_map",
|
|
541
|
+
"co_group_map",
|
|
542
|
+
"with_watermark",
|
|
543
|
+
"apply_in_pandas_with_state",
|
|
544
|
+
"html_string",
|
|
545
|
+
"cached_local_relation",
|
|
546
|
+
"cached_remote_relation",
|
|
547
|
+
"common_inline_user_defined_table_function",
|
|
548
|
+
"fill_na",
|
|
549
|
+
"drop_na",
|
|
550
|
+
"replace",
|
|
551
|
+
"summary",
|
|
552
|
+
"crosstab",
|
|
553
|
+
"describe",
|
|
554
|
+
"cov",
|
|
555
|
+
"corr",
|
|
556
|
+
"approx_quantile",
|
|
557
|
+
"freq_items",
|
|
558
|
+
"sample_by",
|
|
559
|
+
"catalog",
|
|
560
|
+
"extension",
|
|
561
|
+
"unknown",
|
|
562
|
+
] | None: ...
|
|
563
|
+
|
|
564
|
+
global___Relation = Relation
|
|
565
|
+
|
|
566
|
+
class Unknown(google.protobuf.message.Message):
|
|
567
|
+
"""Used for testing purposes only."""
|
|
568
|
+
|
|
569
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
570
|
+
|
|
571
|
+
def __init__(
|
|
572
|
+
self,
|
|
573
|
+
) -> None: ...
|
|
574
|
+
|
|
575
|
+
global___Unknown = Unknown
|
|
576
|
+
|
|
577
|
+
class RelationCommon(google.protobuf.message.Message):
|
|
578
|
+
"""Common metadata of all relations."""
|
|
579
|
+
|
|
580
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
581
|
+
|
|
582
|
+
SOURCE_INFO_FIELD_NUMBER: builtins.int
|
|
583
|
+
PLAN_ID_FIELD_NUMBER: builtins.int
|
|
584
|
+
source_info: builtins.str
|
|
585
|
+
"""(Required) Shared relation metadata."""
|
|
586
|
+
plan_id: builtins.int
|
|
587
|
+
"""(Optional) A per-client globally unique id for a given connect plan."""
|
|
588
|
+
def __init__(
|
|
589
|
+
self,
|
|
590
|
+
*,
|
|
591
|
+
source_info: builtins.str = ...,
|
|
592
|
+
plan_id: builtins.int | None = ...,
|
|
593
|
+
) -> None: ...
|
|
594
|
+
def HasField(
|
|
595
|
+
self, field_name: typing_extensions.Literal["_plan_id", b"_plan_id", "plan_id", b"plan_id"]
|
|
596
|
+
) -> builtins.bool: ...
|
|
597
|
+
def ClearField(
|
|
598
|
+
self,
|
|
599
|
+
field_name: typing_extensions.Literal[
|
|
600
|
+
"_plan_id", b"_plan_id", "plan_id", b"plan_id", "source_info", b"source_info"
|
|
601
|
+
],
|
|
602
|
+
) -> None: ...
|
|
603
|
+
def WhichOneof(
|
|
604
|
+
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
|
|
605
|
+
) -> typing_extensions.Literal["plan_id"] | None: ...
|
|
606
|
+
|
|
607
|
+
global___RelationCommon = RelationCommon
|
|
608
|
+
|
|
609
|
+
class SQL(google.protobuf.message.Message):
|
|
610
|
+
"""Relation that uses a SQL query to generate the output."""
|
|
611
|
+
|
|
612
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
613
|
+
|
|
614
|
+
class ArgsEntry(google.protobuf.message.Message):
|
|
615
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
616
|
+
|
|
617
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
618
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
619
|
+
key: builtins.str
|
|
620
|
+
@property
|
|
621
|
+
def value(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal: ...
|
|
622
|
+
def __init__(
|
|
623
|
+
self,
|
|
624
|
+
*,
|
|
625
|
+
key: builtins.str = ...,
|
|
626
|
+
value: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ...,
|
|
627
|
+
) -> None: ...
|
|
628
|
+
def HasField(
|
|
629
|
+
self, field_name: typing_extensions.Literal["value", b"value"]
|
|
630
|
+
) -> builtins.bool: ...
|
|
631
|
+
def ClearField(
|
|
632
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
633
|
+
) -> None: ...
|
|
634
|
+
|
|
635
|
+
QUERY_FIELD_NUMBER: builtins.int
|
|
636
|
+
ARGS_FIELD_NUMBER: builtins.int
|
|
637
|
+
POS_ARGS_FIELD_NUMBER: builtins.int
|
|
638
|
+
query: builtins.str
|
|
639
|
+
"""(Required) The SQL query."""
|
|
640
|
+
@property
|
|
641
|
+
def args(
|
|
642
|
+
self,
|
|
643
|
+
) -> google.protobuf.internal.containers.MessageMap[
|
|
644
|
+
builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
645
|
+
]:
|
|
646
|
+
"""(Optional) A map of parameter names to literal expressions."""
|
|
647
|
+
@property
|
|
648
|
+
def pos_args(
|
|
649
|
+
self,
|
|
650
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
651
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
652
|
+
]:
|
|
653
|
+
"""(Optional) A sequence of literal expressions for positional parameters in the SQL query text."""
|
|
654
|
+
def __init__(
|
|
655
|
+
self,
|
|
656
|
+
*,
|
|
657
|
+
query: builtins.str = ...,
|
|
658
|
+
args: collections.abc.Mapping[
|
|
659
|
+
builtins.str, pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
660
|
+
]
|
|
661
|
+
| None = ...,
|
|
662
|
+
pos_args: collections.abc.Iterable[
|
|
663
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
664
|
+
]
|
|
665
|
+
| None = ...,
|
|
666
|
+
) -> None: ...
|
|
667
|
+
def ClearField(
|
|
668
|
+
self,
|
|
669
|
+
field_name: typing_extensions.Literal[
|
|
670
|
+
"args", b"args", "pos_args", b"pos_args", "query", b"query"
|
|
671
|
+
],
|
|
672
|
+
) -> None: ...
|
|
673
|
+
|
|
674
|
+
global___SQL = SQL
|
|
675
|
+
|
|
676
|
+
class Read(google.protobuf.message.Message):
|
|
677
|
+
"""Relation that reads from a file / table or other data source. Does not have additional
|
|
678
|
+
inputs.
|
|
679
|
+
"""
|
|
680
|
+
|
|
681
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
682
|
+
|
|
683
|
+
class NamedTable(google.protobuf.message.Message):
|
|
684
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
685
|
+
|
|
686
|
+
class OptionsEntry(google.protobuf.message.Message):
|
|
687
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
688
|
+
|
|
689
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
690
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
691
|
+
key: builtins.str
|
|
692
|
+
value: builtins.str
|
|
693
|
+
def __init__(
|
|
694
|
+
self,
|
|
695
|
+
*,
|
|
696
|
+
key: builtins.str = ...,
|
|
697
|
+
value: builtins.str = ...,
|
|
698
|
+
) -> None: ...
|
|
699
|
+
def ClearField(
|
|
700
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
701
|
+
) -> None: ...
|
|
702
|
+
|
|
703
|
+
UNPARSED_IDENTIFIER_FIELD_NUMBER: builtins.int
|
|
704
|
+
OPTIONS_FIELD_NUMBER: builtins.int
|
|
705
|
+
unparsed_identifier: builtins.str
|
|
706
|
+
"""(Required) Unparsed identifier for the table."""
|
|
707
|
+
@property
|
|
708
|
+
def options(
|
|
709
|
+
self,
|
|
710
|
+
) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
|
|
711
|
+
"""Options for the named table. The map key is case insensitive."""
|
|
712
|
+
def __init__(
|
|
713
|
+
self,
|
|
714
|
+
*,
|
|
715
|
+
unparsed_identifier: builtins.str = ...,
|
|
716
|
+
options: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
|
|
717
|
+
) -> None: ...
|
|
718
|
+
def ClearField(
|
|
719
|
+
self,
|
|
720
|
+
field_name: typing_extensions.Literal[
|
|
721
|
+
"options", b"options", "unparsed_identifier", b"unparsed_identifier"
|
|
722
|
+
],
|
|
723
|
+
) -> None: ...
|
|
724
|
+
|
|
725
|
+
class DataSource(google.protobuf.message.Message):
|
|
726
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
727
|
+
|
|
728
|
+
class OptionsEntry(google.protobuf.message.Message):
|
|
729
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
730
|
+
|
|
731
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
732
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
733
|
+
key: builtins.str
|
|
734
|
+
value: builtins.str
|
|
735
|
+
def __init__(
|
|
736
|
+
self,
|
|
737
|
+
*,
|
|
738
|
+
key: builtins.str = ...,
|
|
739
|
+
value: builtins.str = ...,
|
|
740
|
+
) -> None: ...
|
|
741
|
+
def ClearField(
|
|
742
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
743
|
+
) -> None: ...
|
|
744
|
+
|
|
745
|
+
FORMAT_FIELD_NUMBER: builtins.int
|
|
746
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
747
|
+
OPTIONS_FIELD_NUMBER: builtins.int
|
|
748
|
+
PATHS_FIELD_NUMBER: builtins.int
|
|
749
|
+
PREDICATES_FIELD_NUMBER: builtins.int
|
|
750
|
+
format: builtins.str
|
|
751
|
+
"""(Optional) Supported formats include: parquet, orc, text, json, parquet, csv, avro.
|
|
752
|
+
|
|
753
|
+
If not set, the value from SQL conf 'spark.sql.sources.default' will be used.
|
|
754
|
+
"""
|
|
755
|
+
schema: builtins.str
|
|
756
|
+
"""(Optional) If not set, Spark will infer the schema.
|
|
757
|
+
|
|
758
|
+
This schema string should be either DDL-formatted or JSON-formatted.
|
|
759
|
+
"""
|
|
760
|
+
@property
|
|
761
|
+
def options(
|
|
762
|
+
self,
|
|
763
|
+
) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
|
|
764
|
+
"""Options for the data source. The context of this map varies based on the
|
|
765
|
+
data source format. This options could be empty for valid data source format.
|
|
766
|
+
The map key is case insensitive.
|
|
767
|
+
"""
|
|
768
|
+
@property
|
|
769
|
+
def paths(
|
|
770
|
+
self,
|
|
771
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
772
|
+
"""(Optional) A list of path for file-system backed data sources."""
|
|
773
|
+
@property
|
|
774
|
+
def predicates(
|
|
775
|
+
self,
|
|
776
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
777
|
+
"""(Optional) Condition in the where clause for each partition.
|
|
778
|
+
|
|
779
|
+
This is only supported by the JDBC data source.
|
|
780
|
+
"""
|
|
781
|
+
def __init__(
|
|
782
|
+
self,
|
|
783
|
+
*,
|
|
784
|
+
format: builtins.str | None = ...,
|
|
785
|
+
schema: builtins.str | None = ...,
|
|
786
|
+
options: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
|
|
787
|
+
paths: collections.abc.Iterable[builtins.str] | None = ...,
|
|
788
|
+
predicates: collections.abc.Iterable[builtins.str] | None = ...,
|
|
789
|
+
) -> None: ...
|
|
790
|
+
def HasField(
|
|
791
|
+
self,
|
|
792
|
+
field_name: typing_extensions.Literal[
|
|
793
|
+
"_format",
|
|
794
|
+
b"_format",
|
|
795
|
+
"_schema",
|
|
796
|
+
b"_schema",
|
|
797
|
+
"format",
|
|
798
|
+
b"format",
|
|
799
|
+
"schema",
|
|
800
|
+
b"schema",
|
|
801
|
+
],
|
|
802
|
+
) -> builtins.bool: ...
|
|
803
|
+
def ClearField(
|
|
804
|
+
self,
|
|
805
|
+
field_name: typing_extensions.Literal[
|
|
806
|
+
"_format",
|
|
807
|
+
b"_format",
|
|
808
|
+
"_schema",
|
|
809
|
+
b"_schema",
|
|
810
|
+
"format",
|
|
811
|
+
b"format",
|
|
812
|
+
"options",
|
|
813
|
+
b"options",
|
|
814
|
+
"paths",
|
|
815
|
+
b"paths",
|
|
816
|
+
"predicates",
|
|
817
|
+
b"predicates",
|
|
818
|
+
"schema",
|
|
819
|
+
b"schema",
|
|
820
|
+
],
|
|
821
|
+
) -> None: ...
|
|
822
|
+
@typing.overload
|
|
823
|
+
def WhichOneof(
|
|
824
|
+
self, oneof_group: typing_extensions.Literal["_format", b"_format"]
|
|
825
|
+
) -> typing_extensions.Literal["format"] | None: ...
|
|
826
|
+
@typing.overload
|
|
827
|
+
def WhichOneof(
|
|
828
|
+
self, oneof_group: typing_extensions.Literal["_schema", b"_schema"]
|
|
829
|
+
) -> typing_extensions.Literal["schema"] | None: ...
|
|
830
|
+
|
|
831
|
+
NAMED_TABLE_FIELD_NUMBER: builtins.int
|
|
832
|
+
DATA_SOURCE_FIELD_NUMBER: builtins.int
|
|
833
|
+
IS_STREAMING_FIELD_NUMBER: builtins.int
|
|
834
|
+
@property
|
|
835
|
+
def named_table(self) -> global___Read.NamedTable: ...
|
|
836
|
+
@property
|
|
837
|
+
def data_source(self) -> global___Read.DataSource: ...
|
|
838
|
+
is_streaming: builtins.bool
|
|
839
|
+
"""(Optional) Indicates if this is a streaming read."""
|
|
840
|
+
def __init__(
|
|
841
|
+
self,
|
|
842
|
+
*,
|
|
843
|
+
named_table: global___Read.NamedTable | None = ...,
|
|
844
|
+
data_source: global___Read.DataSource | None = ...,
|
|
845
|
+
is_streaming: builtins.bool = ...,
|
|
846
|
+
) -> None: ...
|
|
847
|
+
def HasField(
|
|
848
|
+
self,
|
|
849
|
+
field_name: typing_extensions.Literal[
|
|
850
|
+
"data_source", b"data_source", "named_table", b"named_table", "read_type", b"read_type"
|
|
851
|
+
],
|
|
852
|
+
) -> builtins.bool: ...
|
|
853
|
+
def ClearField(
|
|
854
|
+
self,
|
|
855
|
+
field_name: typing_extensions.Literal[
|
|
856
|
+
"data_source",
|
|
857
|
+
b"data_source",
|
|
858
|
+
"is_streaming",
|
|
859
|
+
b"is_streaming",
|
|
860
|
+
"named_table",
|
|
861
|
+
b"named_table",
|
|
862
|
+
"read_type",
|
|
863
|
+
b"read_type",
|
|
864
|
+
],
|
|
865
|
+
) -> None: ...
|
|
866
|
+
def WhichOneof(
|
|
867
|
+
self, oneof_group: typing_extensions.Literal["read_type", b"read_type"]
|
|
868
|
+
) -> typing_extensions.Literal["named_table", "data_source"] | None: ...
|
|
869
|
+
|
|
870
|
+
global___Read = Read
|
|
871
|
+
|
|
872
|
+
class Project(google.protobuf.message.Message):
|
|
873
|
+
"""Projection of a bag of expressions for a given input relation.
|
|
874
|
+
|
|
875
|
+
The input relation must be specified.
|
|
876
|
+
The projected expression can be an arbitrary expression.
|
|
877
|
+
"""
|
|
878
|
+
|
|
879
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
880
|
+
|
|
881
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
882
|
+
EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
883
|
+
@property
|
|
884
|
+
def input(self) -> global___Relation:
|
|
885
|
+
"""(Optional) Input relation is optional for Project.
|
|
886
|
+
|
|
887
|
+
For example, `SELECT ABS(-1)` is valid plan without an input plan.
|
|
888
|
+
"""
|
|
889
|
+
@property
|
|
890
|
+
def expressions(
|
|
891
|
+
self,
|
|
892
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
893
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
894
|
+
]:
|
|
895
|
+
"""(Required) A Project requires at least one expression."""
|
|
896
|
+
def __init__(
|
|
897
|
+
self,
|
|
898
|
+
*,
|
|
899
|
+
input: global___Relation | None = ...,
|
|
900
|
+
expressions: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
901
|
+
| None = ...,
|
|
902
|
+
) -> None: ...
|
|
903
|
+
def HasField(
|
|
904
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
905
|
+
) -> builtins.bool: ...
|
|
906
|
+
def ClearField(
|
|
907
|
+
self,
|
|
908
|
+
field_name: typing_extensions.Literal["expressions", b"expressions", "input", b"input"],
|
|
909
|
+
) -> None: ...
|
|
910
|
+
|
|
911
|
+
global___Project = Project
|
|
912
|
+
|
|
913
|
+
class Filter(google.protobuf.message.Message):
|
|
914
|
+
"""Relation that applies a boolean expression `condition` on each row of `input` to produce
|
|
915
|
+
the output result.
|
|
916
|
+
"""
|
|
917
|
+
|
|
918
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
919
|
+
|
|
920
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
921
|
+
CONDITION_FIELD_NUMBER: builtins.int
|
|
922
|
+
@property
|
|
923
|
+
def input(self) -> global___Relation:
|
|
924
|
+
"""(Required) Input relation for a Filter."""
|
|
925
|
+
@property
|
|
926
|
+
def condition(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression:
|
|
927
|
+
"""(Required) A Filter must have a condition expression."""
|
|
928
|
+
def __init__(
|
|
929
|
+
self,
|
|
930
|
+
*,
|
|
931
|
+
input: global___Relation | None = ...,
|
|
932
|
+
condition: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ...,
|
|
933
|
+
) -> None: ...
|
|
934
|
+
def HasField(
|
|
935
|
+
self, field_name: typing_extensions.Literal["condition", b"condition", "input", b"input"]
|
|
936
|
+
) -> builtins.bool: ...
|
|
937
|
+
def ClearField(
|
|
938
|
+
self, field_name: typing_extensions.Literal["condition", b"condition", "input", b"input"]
|
|
939
|
+
) -> None: ...
|
|
940
|
+
|
|
941
|
+
global___Filter = Filter
|
|
942
|
+
|
|
943
|
+
class Join(google.protobuf.message.Message):
|
|
944
|
+
"""Relation of type [[Join]].
|
|
945
|
+
|
|
946
|
+
`left` and `right` must be present.
|
|
947
|
+
"""
|
|
948
|
+
|
|
949
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
950
|
+
|
|
951
|
+
class _JoinType:
|
|
952
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
953
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
954
|
+
|
|
955
|
+
class _JoinTypeEnumTypeWrapper(
|
|
956
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Join._JoinType.ValueType],
|
|
957
|
+
builtins.type,
|
|
958
|
+
): # noqa: F821
|
|
959
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
960
|
+
JOIN_TYPE_UNSPECIFIED: Join._JoinType.ValueType # 0
|
|
961
|
+
JOIN_TYPE_INNER: Join._JoinType.ValueType # 1
|
|
962
|
+
JOIN_TYPE_FULL_OUTER: Join._JoinType.ValueType # 2
|
|
963
|
+
JOIN_TYPE_LEFT_OUTER: Join._JoinType.ValueType # 3
|
|
964
|
+
JOIN_TYPE_RIGHT_OUTER: Join._JoinType.ValueType # 4
|
|
965
|
+
JOIN_TYPE_LEFT_ANTI: Join._JoinType.ValueType # 5
|
|
966
|
+
JOIN_TYPE_LEFT_SEMI: Join._JoinType.ValueType # 6
|
|
967
|
+
JOIN_TYPE_CROSS: Join._JoinType.ValueType # 7
|
|
968
|
+
|
|
969
|
+
class JoinType(_JoinType, metaclass=_JoinTypeEnumTypeWrapper): ...
|
|
970
|
+
JOIN_TYPE_UNSPECIFIED: Join.JoinType.ValueType # 0
|
|
971
|
+
JOIN_TYPE_INNER: Join.JoinType.ValueType # 1
|
|
972
|
+
JOIN_TYPE_FULL_OUTER: Join.JoinType.ValueType # 2
|
|
973
|
+
JOIN_TYPE_LEFT_OUTER: Join.JoinType.ValueType # 3
|
|
974
|
+
JOIN_TYPE_RIGHT_OUTER: Join.JoinType.ValueType # 4
|
|
975
|
+
JOIN_TYPE_LEFT_ANTI: Join.JoinType.ValueType # 5
|
|
976
|
+
JOIN_TYPE_LEFT_SEMI: Join.JoinType.ValueType # 6
|
|
977
|
+
JOIN_TYPE_CROSS: Join.JoinType.ValueType # 7
|
|
978
|
+
|
|
979
|
+
class JoinDataType(google.protobuf.message.Message):
|
|
980
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
981
|
+
|
|
982
|
+
IS_LEFT_STRUCT_FIELD_NUMBER: builtins.int
|
|
983
|
+
IS_RIGHT_STRUCT_FIELD_NUMBER: builtins.int
|
|
984
|
+
is_left_struct: builtins.bool
|
|
985
|
+
"""If the left data type is a struct."""
|
|
986
|
+
is_right_struct: builtins.bool
|
|
987
|
+
"""If the right data type is a struct."""
|
|
988
|
+
def __init__(
|
|
989
|
+
self,
|
|
990
|
+
*,
|
|
991
|
+
is_left_struct: builtins.bool = ...,
|
|
992
|
+
is_right_struct: builtins.bool = ...,
|
|
993
|
+
) -> None: ...
|
|
994
|
+
def ClearField(
|
|
995
|
+
self,
|
|
996
|
+
field_name: typing_extensions.Literal[
|
|
997
|
+
"is_left_struct", b"is_left_struct", "is_right_struct", b"is_right_struct"
|
|
998
|
+
],
|
|
999
|
+
) -> None: ...
|
|
1000
|
+
|
|
1001
|
+
LEFT_FIELD_NUMBER: builtins.int
|
|
1002
|
+
RIGHT_FIELD_NUMBER: builtins.int
|
|
1003
|
+
JOIN_CONDITION_FIELD_NUMBER: builtins.int
|
|
1004
|
+
JOIN_TYPE_FIELD_NUMBER: builtins.int
|
|
1005
|
+
USING_COLUMNS_FIELD_NUMBER: builtins.int
|
|
1006
|
+
JOIN_DATA_TYPE_FIELD_NUMBER: builtins.int
|
|
1007
|
+
@property
|
|
1008
|
+
def left(self) -> global___Relation:
|
|
1009
|
+
"""(Required) Left input relation for a Join."""
|
|
1010
|
+
@property
|
|
1011
|
+
def right(self) -> global___Relation:
|
|
1012
|
+
"""(Required) Right input relation for a Join."""
|
|
1013
|
+
@property
|
|
1014
|
+
def join_condition(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression:
|
|
1015
|
+
"""(Optional) The join condition. Could be unset when `using_columns` is utilized.
|
|
1016
|
+
|
|
1017
|
+
This field does not co-exist with using_columns.
|
|
1018
|
+
"""
|
|
1019
|
+
join_type: global___Join.JoinType.ValueType
|
|
1020
|
+
"""(Required) The join type."""
|
|
1021
|
+
@property
|
|
1022
|
+
def using_columns(
|
|
1023
|
+
self,
|
|
1024
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1025
|
+
"""Optional. using_columns provides a list of columns that should present on both sides of
|
|
1026
|
+
the join inputs that this Join will join on. For example A JOIN B USING col_name is
|
|
1027
|
+
equivalent to A JOIN B on A.col_name = B.col_name.
|
|
1028
|
+
|
|
1029
|
+
This field does not co-exist with join_condition.
|
|
1030
|
+
"""
|
|
1031
|
+
@property
|
|
1032
|
+
def join_data_type(self) -> global___Join.JoinDataType:
|
|
1033
|
+
"""(Optional) Only used by joinWith. Set the left and right join data types."""
|
|
1034
|
+
def __init__(
|
|
1035
|
+
self,
|
|
1036
|
+
*,
|
|
1037
|
+
left: global___Relation | None = ...,
|
|
1038
|
+
right: global___Relation | None = ...,
|
|
1039
|
+
join_condition: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ...,
|
|
1040
|
+
join_type: global___Join.JoinType.ValueType = ...,
|
|
1041
|
+
using_columns: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1042
|
+
join_data_type: global___Join.JoinDataType | None = ...,
|
|
1043
|
+
) -> None: ...
|
|
1044
|
+
def HasField(
|
|
1045
|
+
self,
|
|
1046
|
+
field_name: typing_extensions.Literal[
|
|
1047
|
+
"_join_data_type",
|
|
1048
|
+
b"_join_data_type",
|
|
1049
|
+
"join_condition",
|
|
1050
|
+
b"join_condition",
|
|
1051
|
+
"join_data_type",
|
|
1052
|
+
b"join_data_type",
|
|
1053
|
+
"left",
|
|
1054
|
+
b"left",
|
|
1055
|
+
"right",
|
|
1056
|
+
b"right",
|
|
1057
|
+
],
|
|
1058
|
+
) -> builtins.bool: ...
|
|
1059
|
+
def ClearField(
|
|
1060
|
+
self,
|
|
1061
|
+
field_name: typing_extensions.Literal[
|
|
1062
|
+
"_join_data_type",
|
|
1063
|
+
b"_join_data_type",
|
|
1064
|
+
"join_condition",
|
|
1065
|
+
b"join_condition",
|
|
1066
|
+
"join_data_type",
|
|
1067
|
+
b"join_data_type",
|
|
1068
|
+
"join_type",
|
|
1069
|
+
b"join_type",
|
|
1070
|
+
"left",
|
|
1071
|
+
b"left",
|
|
1072
|
+
"right",
|
|
1073
|
+
b"right",
|
|
1074
|
+
"using_columns",
|
|
1075
|
+
b"using_columns",
|
|
1076
|
+
],
|
|
1077
|
+
) -> None: ...
|
|
1078
|
+
def WhichOneof(
|
|
1079
|
+
self, oneof_group: typing_extensions.Literal["_join_data_type", b"_join_data_type"]
|
|
1080
|
+
) -> typing_extensions.Literal["join_data_type"] | None: ...
|
|
1081
|
+
|
|
1082
|
+
global___Join = Join
|
|
1083
|
+
|
|
1084
|
+
class SetOperation(google.protobuf.message.Message):
|
|
1085
|
+
"""Relation of type [[SetOperation]]"""
|
|
1086
|
+
|
|
1087
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1088
|
+
|
|
1089
|
+
class _SetOpType:
|
|
1090
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
1091
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
1092
|
+
|
|
1093
|
+
class _SetOpTypeEnumTypeWrapper(
|
|
1094
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
|
|
1095
|
+
SetOperation._SetOpType.ValueType
|
|
1096
|
+
],
|
|
1097
|
+
builtins.type,
|
|
1098
|
+
): # noqa: F821
|
|
1099
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
1100
|
+
SET_OP_TYPE_UNSPECIFIED: SetOperation._SetOpType.ValueType # 0
|
|
1101
|
+
SET_OP_TYPE_INTERSECT: SetOperation._SetOpType.ValueType # 1
|
|
1102
|
+
SET_OP_TYPE_UNION: SetOperation._SetOpType.ValueType # 2
|
|
1103
|
+
SET_OP_TYPE_EXCEPT: SetOperation._SetOpType.ValueType # 3
|
|
1104
|
+
|
|
1105
|
+
class SetOpType(_SetOpType, metaclass=_SetOpTypeEnumTypeWrapper): ...
|
|
1106
|
+
SET_OP_TYPE_UNSPECIFIED: SetOperation.SetOpType.ValueType # 0
|
|
1107
|
+
SET_OP_TYPE_INTERSECT: SetOperation.SetOpType.ValueType # 1
|
|
1108
|
+
SET_OP_TYPE_UNION: SetOperation.SetOpType.ValueType # 2
|
|
1109
|
+
SET_OP_TYPE_EXCEPT: SetOperation.SetOpType.ValueType # 3
|
|
1110
|
+
|
|
1111
|
+
LEFT_INPUT_FIELD_NUMBER: builtins.int
|
|
1112
|
+
RIGHT_INPUT_FIELD_NUMBER: builtins.int
|
|
1113
|
+
SET_OP_TYPE_FIELD_NUMBER: builtins.int
|
|
1114
|
+
IS_ALL_FIELD_NUMBER: builtins.int
|
|
1115
|
+
BY_NAME_FIELD_NUMBER: builtins.int
|
|
1116
|
+
ALLOW_MISSING_COLUMNS_FIELD_NUMBER: builtins.int
|
|
1117
|
+
@property
|
|
1118
|
+
def left_input(self) -> global___Relation:
|
|
1119
|
+
"""(Required) Left input relation for a Set operation."""
|
|
1120
|
+
@property
|
|
1121
|
+
def right_input(self) -> global___Relation:
|
|
1122
|
+
"""(Required) Right input relation for a Set operation."""
|
|
1123
|
+
set_op_type: global___SetOperation.SetOpType.ValueType
|
|
1124
|
+
"""(Required) The Set operation type."""
|
|
1125
|
+
is_all: builtins.bool
|
|
1126
|
+
"""(Optional) If to remove duplicate rows.
|
|
1127
|
+
|
|
1128
|
+
True to preserve all results.
|
|
1129
|
+
False to remove duplicate rows.
|
|
1130
|
+
"""
|
|
1131
|
+
by_name: builtins.bool
|
|
1132
|
+
"""(Optional) If to perform the Set operation based on name resolution.
|
|
1133
|
+
|
|
1134
|
+
Only UNION supports this option.
|
|
1135
|
+
"""
|
|
1136
|
+
allow_missing_columns: builtins.bool
|
|
1137
|
+
"""(Optional) If to perform the Set operation and allow missing columns.
|
|
1138
|
+
|
|
1139
|
+
Only UNION supports this option.
|
|
1140
|
+
"""
|
|
1141
|
+
def __init__(
|
|
1142
|
+
self,
|
|
1143
|
+
*,
|
|
1144
|
+
left_input: global___Relation | None = ...,
|
|
1145
|
+
right_input: global___Relation | None = ...,
|
|
1146
|
+
set_op_type: global___SetOperation.SetOpType.ValueType = ...,
|
|
1147
|
+
is_all: builtins.bool | None = ...,
|
|
1148
|
+
by_name: builtins.bool | None = ...,
|
|
1149
|
+
allow_missing_columns: builtins.bool | None = ...,
|
|
1150
|
+
) -> None: ...
|
|
1151
|
+
def HasField(
|
|
1152
|
+
self,
|
|
1153
|
+
field_name: typing_extensions.Literal[
|
|
1154
|
+
"_allow_missing_columns",
|
|
1155
|
+
b"_allow_missing_columns",
|
|
1156
|
+
"_by_name",
|
|
1157
|
+
b"_by_name",
|
|
1158
|
+
"_is_all",
|
|
1159
|
+
b"_is_all",
|
|
1160
|
+
"allow_missing_columns",
|
|
1161
|
+
b"allow_missing_columns",
|
|
1162
|
+
"by_name",
|
|
1163
|
+
b"by_name",
|
|
1164
|
+
"is_all",
|
|
1165
|
+
b"is_all",
|
|
1166
|
+
"left_input",
|
|
1167
|
+
b"left_input",
|
|
1168
|
+
"right_input",
|
|
1169
|
+
b"right_input",
|
|
1170
|
+
],
|
|
1171
|
+
) -> builtins.bool: ...
|
|
1172
|
+
def ClearField(
|
|
1173
|
+
self,
|
|
1174
|
+
field_name: typing_extensions.Literal[
|
|
1175
|
+
"_allow_missing_columns",
|
|
1176
|
+
b"_allow_missing_columns",
|
|
1177
|
+
"_by_name",
|
|
1178
|
+
b"_by_name",
|
|
1179
|
+
"_is_all",
|
|
1180
|
+
b"_is_all",
|
|
1181
|
+
"allow_missing_columns",
|
|
1182
|
+
b"allow_missing_columns",
|
|
1183
|
+
"by_name",
|
|
1184
|
+
b"by_name",
|
|
1185
|
+
"is_all",
|
|
1186
|
+
b"is_all",
|
|
1187
|
+
"left_input",
|
|
1188
|
+
b"left_input",
|
|
1189
|
+
"right_input",
|
|
1190
|
+
b"right_input",
|
|
1191
|
+
"set_op_type",
|
|
1192
|
+
b"set_op_type",
|
|
1193
|
+
],
|
|
1194
|
+
) -> None: ...
|
|
1195
|
+
@typing.overload
|
|
1196
|
+
def WhichOneof(
|
|
1197
|
+
self,
|
|
1198
|
+
oneof_group: typing_extensions.Literal["_allow_missing_columns", b"_allow_missing_columns"],
|
|
1199
|
+
) -> typing_extensions.Literal["allow_missing_columns"] | None: ...
|
|
1200
|
+
@typing.overload
|
|
1201
|
+
def WhichOneof(
|
|
1202
|
+
self, oneof_group: typing_extensions.Literal["_by_name", b"_by_name"]
|
|
1203
|
+
) -> typing_extensions.Literal["by_name"] | None: ...
|
|
1204
|
+
@typing.overload
|
|
1205
|
+
def WhichOneof(
|
|
1206
|
+
self, oneof_group: typing_extensions.Literal["_is_all", b"_is_all"]
|
|
1207
|
+
) -> typing_extensions.Literal["is_all"] | None: ...
|
|
1208
|
+
|
|
1209
|
+
global___SetOperation = SetOperation
|
|
1210
|
+
|
|
1211
|
+
class Limit(google.protobuf.message.Message):
|
|
1212
|
+
"""Relation of type [[Limit]] that is used to `limit` rows from the input relation."""
|
|
1213
|
+
|
|
1214
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1215
|
+
|
|
1216
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1217
|
+
LIMIT_FIELD_NUMBER: builtins.int
|
|
1218
|
+
@property
|
|
1219
|
+
def input(self) -> global___Relation:
|
|
1220
|
+
"""(Required) Input relation for a Limit."""
|
|
1221
|
+
limit: builtins.int
|
|
1222
|
+
"""(Required) the limit."""
|
|
1223
|
+
def __init__(
|
|
1224
|
+
self,
|
|
1225
|
+
*,
|
|
1226
|
+
input: global___Relation | None = ...,
|
|
1227
|
+
limit: builtins.int = ...,
|
|
1228
|
+
) -> None: ...
|
|
1229
|
+
def HasField(
|
|
1230
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1231
|
+
) -> builtins.bool: ...
|
|
1232
|
+
def ClearField(
|
|
1233
|
+
self, field_name: typing_extensions.Literal["input", b"input", "limit", b"limit"]
|
|
1234
|
+
) -> None: ...
|
|
1235
|
+
|
|
1236
|
+
global___Limit = Limit
|
|
1237
|
+
|
|
1238
|
+
class Offset(google.protobuf.message.Message):
|
|
1239
|
+
"""Relation of type [[Offset]] that is used to read rows staring from the `offset` on
|
|
1240
|
+
the input relation.
|
|
1241
|
+
"""
|
|
1242
|
+
|
|
1243
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1244
|
+
|
|
1245
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1246
|
+
OFFSET_FIELD_NUMBER: builtins.int
|
|
1247
|
+
@property
|
|
1248
|
+
def input(self) -> global___Relation:
|
|
1249
|
+
"""(Required) Input relation for an Offset."""
|
|
1250
|
+
offset: builtins.int
|
|
1251
|
+
"""(Required) the limit."""
|
|
1252
|
+
def __init__(
|
|
1253
|
+
self,
|
|
1254
|
+
*,
|
|
1255
|
+
input: global___Relation | None = ...,
|
|
1256
|
+
offset: builtins.int = ...,
|
|
1257
|
+
) -> None: ...
|
|
1258
|
+
def HasField(
|
|
1259
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1260
|
+
) -> builtins.bool: ...
|
|
1261
|
+
def ClearField(
|
|
1262
|
+
self, field_name: typing_extensions.Literal["input", b"input", "offset", b"offset"]
|
|
1263
|
+
) -> None: ...
|
|
1264
|
+
|
|
1265
|
+
global___Offset = Offset
|
|
1266
|
+
|
|
1267
|
+
class Tail(google.protobuf.message.Message):
|
|
1268
|
+
"""Relation of type [[Tail]] that is used to fetch `limit` rows from the last of the input relation."""
|
|
1269
|
+
|
|
1270
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1271
|
+
|
|
1272
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1273
|
+
LIMIT_FIELD_NUMBER: builtins.int
|
|
1274
|
+
@property
|
|
1275
|
+
def input(self) -> global___Relation:
|
|
1276
|
+
"""(Required) Input relation for an Tail."""
|
|
1277
|
+
limit: builtins.int
|
|
1278
|
+
"""(Required) the limit."""
|
|
1279
|
+
def __init__(
|
|
1280
|
+
self,
|
|
1281
|
+
*,
|
|
1282
|
+
input: global___Relation | None = ...,
|
|
1283
|
+
limit: builtins.int = ...,
|
|
1284
|
+
) -> None: ...
|
|
1285
|
+
def HasField(
|
|
1286
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1287
|
+
) -> builtins.bool: ...
|
|
1288
|
+
def ClearField(
|
|
1289
|
+
self, field_name: typing_extensions.Literal["input", b"input", "limit", b"limit"]
|
|
1290
|
+
) -> None: ...
|
|
1291
|
+
|
|
1292
|
+
global___Tail = Tail
|
|
1293
|
+
|
|
1294
|
+
class Aggregate(google.protobuf.message.Message):
|
|
1295
|
+
"""Relation of type [[Aggregate]]."""
|
|
1296
|
+
|
|
1297
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1298
|
+
|
|
1299
|
+
class _GroupType:
|
|
1300
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
1301
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
1302
|
+
|
|
1303
|
+
class _GroupTypeEnumTypeWrapper(
|
|
1304
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Aggregate._GroupType.ValueType],
|
|
1305
|
+
builtins.type,
|
|
1306
|
+
): # noqa: F821
|
|
1307
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
1308
|
+
GROUP_TYPE_UNSPECIFIED: Aggregate._GroupType.ValueType # 0
|
|
1309
|
+
GROUP_TYPE_GROUPBY: Aggregate._GroupType.ValueType # 1
|
|
1310
|
+
GROUP_TYPE_ROLLUP: Aggregate._GroupType.ValueType # 2
|
|
1311
|
+
GROUP_TYPE_CUBE: Aggregate._GroupType.ValueType # 3
|
|
1312
|
+
GROUP_TYPE_PIVOT: Aggregate._GroupType.ValueType # 4
|
|
1313
|
+
|
|
1314
|
+
class GroupType(_GroupType, metaclass=_GroupTypeEnumTypeWrapper): ...
|
|
1315
|
+
GROUP_TYPE_UNSPECIFIED: Aggregate.GroupType.ValueType # 0
|
|
1316
|
+
GROUP_TYPE_GROUPBY: Aggregate.GroupType.ValueType # 1
|
|
1317
|
+
GROUP_TYPE_ROLLUP: Aggregate.GroupType.ValueType # 2
|
|
1318
|
+
GROUP_TYPE_CUBE: Aggregate.GroupType.ValueType # 3
|
|
1319
|
+
GROUP_TYPE_PIVOT: Aggregate.GroupType.ValueType # 4
|
|
1320
|
+
|
|
1321
|
+
class Pivot(google.protobuf.message.Message):
|
|
1322
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1323
|
+
|
|
1324
|
+
COL_FIELD_NUMBER: builtins.int
|
|
1325
|
+
VALUES_FIELD_NUMBER: builtins.int
|
|
1326
|
+
@property
|
|
1327
|
+
def col(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression:
|
|
1328
|
+
"""(Required) The column to pivot"""
|
|
1329
|
+
@property
|
|
1330
|
+
def values(
|
|
1331
|
+
self,
|
|
1332
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1333
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
1334
|
+
]:
|
|
1335
|
+
"""(Optional) List of values that will be translated to columns in the output DataFrame.
|
|
1336
|
+
|
|
1337
|
+
Note that if it is empty, the server side will immediately trigger a job to collect
|
|
1338
|
+
the distinct values of the column.
|
|
1339
|
+
"""
|
|
1340
|
+
def __init__(
|
|
1341
|
+
self,
|
|
1342
|
+
*,
|
|
1343
|
+
col: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ...,
|
|
1344
|
+
values: collections.abc.Iterable[
|
|
1345
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
1346
|
+
]
|
|
1347
|
+
| None = ...,
|
|
1348
|
+
) -> None: ...
|
|
1349
|
+
def HasField(
|
|
1350
|
+
self, field_name: typing_extensions.Literal["col", b"col"]
|
|
1351
|
+
) -> builtins.bool: ...
|
|
1352
|
+
def ClearField(
|
|
1353
|
+
self, field_name: typing_extensions.Literal["col", b"col", "values", b"values"]
|
|
1354
|
+
) -> None: ...
|
|
1355
|
+
|
|
1356
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1357
|
+
GROUP_TYPE_FIELD_NUMBER: builtins.int
|
|
1358
|
+
GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
1359
|
+
AGGREGATE_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
1360
|
+
PIVOT_FIELD_NUMBER: builtins.int
|
|
1361
|
+
@property
|
|
1362
|
+
def input(self) -> global___Relation:
|
|
1363
|
+
"""(Required) Input relation for a RelationalGroupedDataset."""
|
|
1364
|
+
group_type: global___Aggregate.GroupType.ValueType
|
|
1365
|
+
"""(Required) How the RelationalGroupedDataset was built."""
|
|
1366
|
+
@property
|
|
1367
|
+
def grouping_expressions(
|
|
1368
|
+
self,
|
|
1369
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1370
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
1371
|
+
]:
|
|
1372
|
+
"""(Required) Expressions for grouping keys"""
|
|
1373
|
+
@property
|
|
1374
|
+
def aggregate_expressions(
|
|
1375
|
+
self,
|
|
1376
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1377
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
1378
|
+
]:
|
|
1379
|
+
"""(Required) List of values that will be translated to columns in the output DataFrame."""
|
|
1380
|
+
@property
|
|
1381
|
+
def pivot(self) -> global___Aggregate.Pivot:
|
|
1382
|
+
"""(Optional) Pivots a column of the current `DataFrame` and performs the specified aggregation."""
|
|
1383
|
+
def __init__(
|
|
1384
|
+
self,
|
|
1385
|
+
*,
|
|
1386
|
+
input: global___Relation | None = ...,
|
|
1387
|
+
group_type: global___Aggregate.GroupType.ValueType = ...,
|
|
1388
|
+
grouping_expressions: collections.abc.Iterable[
|
|
1389
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
1390
|
+
]
|
|
1391
|
+
| None = ...,
|
|
1392
|
+
aggregate_expressions: collections.abc.Iterable[
|
|
1393
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
1394
|
+
]
|
|
1395
|
+
| None = ...,
|
|
1396
|
+
pivot: global___Aggregate.Pivot | None = ...,
|
|
1397
|
+
) -> None: ...
|
|
1398
|
+
def HasField(
|
|
1399
|
+
self, field_name: typing_extensions.Literal["input", b"input", "pivot", b"pivot"]
|
|
1400
|
+
) -> builtins.bool: ...
|
|
1401
|
+
def ClearField(
|
|
1402
|
+
self,
|
|
1403
|
+
field_name: typing_extensions.Literal[
|
|
1404
|
+
"aggregate_expressions",
|
|
1405
|
+
b"aggregate_expressions",
|
|
1406
|
+
"group_type",
|
|
1407
|
+
b"group_type",
|
|
1408
|
+
"grouping_expressions",
|
|
1409
|
+
b"grouping_expressions",
|
|
1410
|
+
"input",
|
|
1411
|
+
b"input",
|
|
1412
|
+
"pivot",
|
|
1413
|
+
b"pivot",
|
|
1414
|
+
],
|
|
1415
|
+
) -> None: ...
|
|
1416
|
+
|
|
1417
|
+
global___Aggregate = Aggregate
|
|
1418
|
+
|
|
1419
|
+
class Sort(google.protobuf.message.Message):
|
|
1420
|
+
"""Relation of type [[Sort]]."""
|
|
1421
|
+
|
|
1422
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1423
|
+
|
|
1424
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1425
|
+
ORDER_FIELD_NUMBER: builtins.int
|
|
1426
|
+
IS_GLOBAL_FIELD_NUMBER: builtins.int
|
|
1427
|
+
@property
|
|
1428
|
+
def input(self) -> global___Relation:
|
|
1429
|
+
"""(Required) Input relation for a Sort."""
|
|
1430
|
+
@property
|
|
1431
|
+
def order(
|
|
1432
|
+
self,
|
|
1433
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1434
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.SortOrder
|
|
1435
|
+
]:
|
|
1436
|
+
"""(Required) The ordering expressions"""
|
|
1437
|
+
is_global: builtins.bool
|
|
1438
|
+
"""(Optional) if this is a global sort."""
|
|
1439
|
+
def __init__(
|
|
1440
|
+
self,
|
|
1441
|
+
*,
|
|
1442
|
+
input: global___Relation | None = ...,
|
|
1443
|
+
order: collections.abc.Iterable[
|
|
1444
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.SortOrder
|
|
1445
|
+
]
|
|
1446
|
+
| None = ...,
|
|
1447
|
+
is_global: builtins.bool | None = ...,
|
|
1448
|
+
) -> None: ...
|
|
1449
|
+
def HasField(
|
|
1450
|
+
self,
|
|
1451
|
+
field_name: typing_extensions.Literal[
|
|
1452
|
+
"_is_global", b"_is_global", "input", b"input", "is_global", b"is_global"
|
|
1453
|
+
],
|
|
1454
|
+
) -> builtins.bool: ...
|
|
1455
|
+
def ClearField(
|
|
1456
|
+
self,
|
|
1457
|
+
field_name: typing_extensions.Literal[
|
|
1458
|
+
"_is_global",
|
|
1459
|
+
b"_is_global",
|
|
1460
|
+
"input",
|
|
1461
|
+
b"input",
|
|
1462
|
+
"is_global",
|
|
1463
|
+
b"is_global",
|
|
1464
|
+
"order",
|
|
1465
|
+
b"order",
|
|
1466
|
+
],
|
|
1467
|
+
) -> None: ...
|
|
1468
|
+
def WhichOneof(
|
|
1469
|
+
self, oneof_group: typing_extensions.Literal["_is_global", b"_is_global"]
|
|
1470
|
+
) -> typing_extensions.Literal["is_global"] | None: ...
|
|
1471
|
+
|
|
1472
|
+
global___Sort = Sort
|
|
1473
|
+
|
|
1474
|
+
class Drop(google.protobuf.message.Message):
|
|
1475
|
+
"""Drop specified columns."""
|
|
1476
|
+
|
|
1477
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1478
|
+
|
|
1479
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1480
|
+
COLUMNS_FIELD_NUMBER: builtins.int
|
|
1481
|
+
COLUMN_NAMES_FIELD_NUMBER: builtins.int
|
|
1482
|
+
@property
|
|
1483
|
+
def input(self) -> global___Relation:
|
|
1484
|
+
"""(Required) The input relation."""
|
|
1485
|
+
@property
|
|
1486
|
+
def columns(
|
|
1487
|
+
self,
|
|
1488
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
1489
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
1490
|
+
]:
|
|
1491
|
+
"""(Optional) columns to drop."""
|
|
1492
|
+
@property
|
|
1493
|
+
def column_names(
|
|
1494
|
+
self,
|
|
1495
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1496
|
+
"""(Optional) names of columns to drop."""
|
|
1497
|
+
def __init__(
|
|
1498
|
+
self,
|
|
1499
|
+
*,
|
|
1500
|
+
input: global___Relation | None = ...,
|
|
1501
|
+
columns: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
1502
|
+
| None = ...,
|
|
1503
|
+
column_names: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1504
|
+
) -> None: ...
|
|
1505
|
+
def HasField(
|
|
1506
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1507
|
+
) -> builtins.bool: ...
|
|
1508
|
+
def ClearField(
|
|
1509
|
+
self,
|
|
1510
|
+
field_name: typing_extensions.Literal[
|
|
1511
|
+
"column_names", b"column_names", "columns", b"columns", "input", b"input"
|
|
1512
|
+
],
|
|
1513
|
+
) -> None: ...
|
|
1514
|
+
|
|
1515
|
+
global___Drop = Drop
|
|
1516
|
+
|
|
1517
|
+
class Deduplicate(google.protobuf.message.Message):
|
|
1518
|
+
"""Relation of type [[Deduplicate]] which have duplicate rows removed, could consider either only
|
|
1519
|
+
the subset of columns or all the columns.
|
|
1520
|
+
"""
|
|
1521
|
+
|
|
1522
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1523
|
+
|
|
1524
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1525
|
+
COLUMN_NAMES_FIELD_NUMBER: builtins.int
|
|
1526
|
+
ALL_COLUMNS_AS_KEYS_FIELD_NUMBER: builtins.int
|
|
1527
|
+
WITHIN_WATERMARK_FIELD_NUMBER: builtins.int
|
|
1528
|
+
@property
|
|
1529
|
+
def input(self) -> global___Relation:
|
|
1530
|
+
"""(Required) Input relation for a Deduplicate."""
|
|
1531
|
+
@property
|
|
1532
|
+
def column_names(
|
|
1533
|
+
self,
|
|
1534
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1535
|
+
"""(Optional) Deduplicate based on a list of column names.
|
|
1536
|
+
|
|
1537
|
+
This field does not co-use with `all_columns_as_keys`.
|
|
1538
|
+
"""
|
|
1539
|
+
all_columns_as_keys: builtins.bool
|
|
1540
|
+
"""(Optional) Deduplicate based on all the columns of the input relation.
|
|
1541
|
+
|
|
1542
|
+
This field does not co-use with `column_names`.
|
|
1543
|
+
"""
|
|
1544
|
+
within_watermark: builtins.bool
|
|
1545
|
+
"""(Optional) Deduplicate within the time range of watermark."""
|
|
1546
|
+
def __init__(
|
|
1547
|
+
self,
|
|
1548
|
+
*,
|
|
1549
|
+
input: global___Relation | None = ...,
|
|
1550
|
+
column_names: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1551
|
+
all_columns_as_keys: builtins.bool | None = ...,
|
|
1552
|
+
within_watermark: builtins.bool | None = ...,
|
|
1553
|
+
) -> None: ...
|
|
1554
|
+
def HasField(
|
|
1555
|
+
self,
|
|
1556
|
+
field_name: typing_extensions.Literal[
|
|
1557
|
+
"_all_columns_as_keys",
|
|
1558
|
+
b"_all_columns_as_keys",
|
|
1559
|
+
"_within_watermark",
|
|
1560
|
+
b"_within_watermark",
|
|
1561
|
+
"all_columns_as_keys",
|
|
1562
|
+
b"all_columns_as_keys",
|
|
1563
|
+
"input",
|
|
1564
|
+
b"input",
|
|
1565
|
+
"within_watermark",
|
|
1566
|
+
b"within_watermark",
|
|
1567
|
+
],
|
|
1568
|
+
) -> builtins.bool: ...
|
|
1569
|
+
def ClearField(
|
|
1570
|
+
self,
|
|
1571
|
+
field_name: typing_extensions.Literal[
|
|
1572
|
+
"_all_columns_as_keys",
|
|
1573
|
+
b"_all_columns_as_keys",
|
|
1574
|
+
"_within_watermark",
|
|
1575
|
+
b"_within_watermark",
|
|
1576
|
+
"all_columns_as_keys",
|
|
1577
|
+
b"all_columns_as_keys",
|
|
1578
|
+
"column_names",
|
|
1579
|
+
b"column_names",
|
|
1580
|
+
"input",
|
|
1581
|
+
b"input",
|
|
1582
|
+
"within_watermark",
|
|
1583
|
+
b"within_watermark",
|
|
1584
|
+
],
|
|
1585
|
+
) -> None: ...
|
|
1586
|
+
@typing.overload
|
|
1587
|
+
def WhichOneof(
|
|
1588
|
+
self,
|
|
1589
|
+
oneof_group: typing_extensions.Literal["_all_columns_as_keys", b"_all_columns_as_keys"],
|
|
1590
|
+
) -> typing_extensions.Literal["all_columns_as_keys"] | None: ...
|
|
1591
|
+
@typing.overload
|
|
1592
|
+
def WhichOneof(
|
|
1593
|
+
self, oneof_group: typing_extensions.Literal["_within_watermark", b"_within_watermark"]
|
|
1594
|
+
) -> typing_extensions.Literal["within_watermark"] | None: ...
|
|
1595
|
+
|
|
1596
|
+
global___Deduplicate = Deduplicate
|
|
1597
|
+
|
|
1598
|
+
class LocalRelation(google.protobuf.message.Message):
|
|
1599
|
+
"""A relation that does not need to be qualified by name."""
|
|
1600
|
+
|
|
1601
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1602
|
+
|
|
1603
|
+
DATA_FIELD_NUMBER: builtins.int
|
|
1604
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
1605
|
+
data: builtins.bytes
|
|
1606
|
+
"""(Optional) Local collection data serialized into Arrow IPC streaming format which contains
|
|
1607
|
+
the schema of the data.
|
|
1608
|
+
"""
|
|
1609
|
+
schema: builtins.str
|
|
1610
|
+
"""(Optional) The schema of local data.
|
|
1611
|
+
It should be either a DDL-formatted type string or a JSON string.
|
|
1612
|
+
|
|
1613
|
+
The server side will update the column names and data types according to this schema.
|
|
1614
|
+
If the 'data' is not provided, then this schema will be required.
|
|
1615
|
+
"""
|
|
1616
|
+
def __init__(
|
|
1617
|
+
self,
|
|
1618
|
+
*,
|
|
1619
|
+
data: builtins.bytes | None = ...,
|
|
1620
|
+
schema: builtins.str | None = ...,
|
|
1621
|
+
) -> None: ...
|
|
1622
|
+
def HasField(
|
|
1623
|
+
self,
|
|
1624
|
+
field_name: typing_extensions.Literal[
|
|
1625
|
+
"_data", b"_data", "_schema", b"_schema", "data", b"data", "schema", b"schema"
|
|
1626
|
+
],
|
|
1627
|
+
) -> builtins.bool: ...
|
|
1628
|
+
def ClearField(
|
|
1629
|
+
self,
|
|
1630
|
+
field_name: typing_extensions.Literal[
|
|
1631
|
+
"_data", b"_data", "_schema", b"_schema", "data", b"data", "schema", b"schema"
|
|
1632
|
+
],
|
|
1633
|
+
) -> None: ...
|
|
1634
|
+
@typing.overload
|
|
1635
|
+
def WhichOneof(
|
|
1636
|
+
self, oneof_group: typing_extensions.Literal["_data", b"_data"]
|
|
1637
|
+
) -> typing_extensions.Literal["data"] | None: ...
|
|
1638
|
+
@typing.overload
|
|
1639
|
+
def WhichOneof(
|
|
1640
|
+
self, oneof_group: typing_extensions.Literal["_schema", b"_schema"]
|
|
1641
|
+
) -> typing_extensions.Literal["schema"] | None: ...
|
|
1642
|
+
|
|
1643
|
+
global___LocalRelation = LocalRelation
|
|
1644
|
+
|
|
1645
|
+
class CachedLocalRelation(google.protobuf.message.Message):
|
|
1646
|
+
"""A local relation that has been cached already."""
|
|
1647
|
+
|
|
1648
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1649
|
+
|
|
1650
|
+
HASH_FIELD_NUMBER: builtins.int
|
|
1651
|
+
hash: builtins.str
|
|
1652
|
+
"""(Required) A sha-256 hash of the serialized local relation in proto, see LocalRelation."""
|
|
1653
|
+
def __init__(
|
|
1654
|
+
self,
|
|
1655
|
+
*,
|
|
1656
|
+
hash: builtins.str = ...,
|
|
1657
|
+
) -> None: ...
|
|
1658
|
+
def ClearField(self, field_name: typing_extensions.Literal["hash", b"hash"]) -> None: ...
|
|
1659
|
+
|
|
1660
|
+
global___CachedLocalRelation = CachedLocalRelation
|
|
1661
|
+
|
|
1662
|
+
class CachedRemoteRelation(google.protobuf.message.Message):
|
|
1663
|
+
"""Represents a remote relation that has been cached on server."""
|
|
1664
|
+
|
|
1665
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1666
|
+
|
|
1667
|
+
RELATION_ID_FIELD_NUMBER: builtins.int
|
|
1668
|
+
relation_id: builtins.str
|
|
1669
|
+
"""(Required) ID of the remote related (assigned by the service)."""
|
|
1670
|
+
def __init__(
|
|
1671
|
+
self,
|
|
1672
|
+
*,
|
|
1673
|
+
relation_id: builtins.str = ...,
|
|
1674
|
+
) -> None: ...
|
|
1675
|
+
def ClearField(
|
|
1676
|
+
self, field_name: typing_extensions.Literal["relation_id", b"relation_id"]
|
|
1677
|
+
) -> None: ...
|
|
1678
|
+
|
|
1679
|
+
global___CachedRemoteRelation = CachedRemoteRelation
|
|
1680
|
+
|
|
1681
|
+
class Sample(google.protobuf.message.Message):
|
|
1682
|
+
"""Relation of type [[Sample]] that samples a fraction of the dataset."""
|
|
1683
|
+
|
|
1684
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1685
|
+
|
|
1686
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1687
|
+
LOWER_BOUND_FIELD_NUMBER: builtins.int
|
|
1688
|
+
UPPER_BOUND_FIELD_NUMBER: builtins.int
|
|
1689
|
+
WITH_REPLACEMENT_FIELD_NUMBER: builtins.int
|
|
1690
|
+
SEED_FIELD_NUMBER: builtins.int
|
|
1691
|
+
DETERMINISTIC_ORDER_FIELD_NUMBER: builtins.int
|
|
1692
|
+
@property
|
|
1693
|
+
def input(self) -> global___Relation:
|
|
1694
|
+
"""(Required) Input relation for a Sample."""
|
|
1695
|
+
lower_bound: builtins.float
|
|
1696
|
+
"""(Required) lower bound."""
|
|
1697
|
+
upper_bound: builtins.float
|
|
1698
|
+
"""(Required) upper bound."""
|
|
1699
|
+
with_replacement: builtins.bool
|
|
1700
|
+
"""(Optional) Whether to sample with replacement."""
|
|
1701
|
+
seed: builtins.int
|
|
1702
|
+
"""(Optional) The random seed."""
|
|
1703
|
+
deterministic_order: builtins.bool
|
|
1704
|
+
"""(Required) Explicitly sort the underlying plan to make the ordering deterministic or cache it.
|
|
1705
|
+
This flag is true when invoking `dataframe.randomSplit` to randomly splits DataFrame with the
|
|
1706
|
+
provided weights. Otherwise, it is false.
|
|
1707
|
+
"""
|
|
1708
|
+
def __init__(
|
|
1709
|
+
self,
|
|
1710
|
+
*,
|
|
1711
|
+
input: global___Relation | None = ...,
|
|
1712
|
+
lower_bound: builtins.float = ...,
|
|
1713
|
+
upper_bound: builtins.float = ...,
|
|
1714
|
+
with_replacement: builtins.bool | None = ...,
|
|
1715
|
+
seed: builtins.int | None = ...,
|
|
1716
|
+
deterministic_order: builtins.bool = ...,
|
|
1717
|
+
) -> None: ...
|
|
1718
|
+
def HasField(
|
|
1719
|
+
self,
|
|
1720
|
+
field_name: typing_extensions.Literal[
|
|
1721
|
+
"_seed",
|
|
1722
|
+
b"_seed",
|
|
1723
|
+
"_with_replacement",
|
|
1724
|
+
b"_with_replacement",
|
|
1725
|
+
"input",
|
|
1726
|
+
b"input",
|
|
1727
|
+
"seed",
|
|
1728
|
+
b"seed",
|
|
1729
|
+
"with_replacement",
|
|
1730
|
+
b"with_replacement",
|
|
1731
|
+
],
|
|
1732
|
+
) -> builtins.bool: ...
|
|
1733
|
+
def ClearField(
|
|
1734
|
+
self,
|
|
1735
|
+
field_name: typing_extensions.Literal[
|
|
1736
|
+
"_seed",
|
|
1737
|
+
b"_seed",
|
|
1738
|
+
"_with_replacement",
|
|
1739
|
+
b"_with_replacement",
|
|
1740
|
+
"deterministic_order",
|
|
1741
|
+
b"deterministic_order",
|
|
1742
|
+
"input",
|
|
1743
|
+
b"input",
|
|
1744
|
+
"lower_bound",
|
|
1745
|
+
b"lower_bound",
|
|
1746
|
+
"seed",
|
|
1747
|
+
b"seed",
|
|
1748
|
+
"upper_bound",
|
|
1749
|
+
b"upper_bound",
|
|
1750
|
+
"with_replacement",
|
|
1751
|
+
b"with_replacement",
|
|
1752
|
+
],
|
|
1753
|
+
) -> None: ...
|
|
1754
|
+
@typing.overload
|
|
1755
|
+
def WhichOneof(
|
|
1756
|
+
self, oneof_group: typing_extensions.Literal["_seed", b"_seed"]
|
|
1757
|
+
) -> typing_extensions.Literal["seed"] | None: ...
|
|
1758
|
+
@typing.overload
|
|
1759
|
+
def WhichOneof(
|
|
1760
|
+
self, oneof_group: typing_extensions.Literal["_with_replacement", b"_with_replacement"]
|
|
1761
|
+
) -> typing_extensions.Literal["with_replacement"] | None: ...
|
|
1762
|
+
|
|
1763
|
+
global___Sample = Sample
|
|
1764
|
+
|
|
1765
|
+
class Range(google.protobuf.message.Message):
|
|
1766
|
+
"""Relation of type [[Range]] that generates a sequence of integers."""
|
|
1767
|
+
|
|
1768
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1769
|
+
|
|
1770
|
+
START_FIELD_NUMBER: builtins.int
|
|
1771
|
+
END_FIELD_NUMBER: builtins.int
|
|
1772
|
+
STEP_FIELD_NUMBER: builtins.int
|
|
1773
|
+
NUM_PARTITIONS_FIELD_NUMBER: builtins.int
|
|
1774
|
+
start: builtins.int
|
|
1775
|
+
"""(Optional) Default value = 0"""
|
|
1776
|
+
end: builtins.int
|
|
1777
|
+
"""(Required)"""
|
|
1778
|
+
step: builtins.int
|
|
1779
|
+
"""(Required)"""
|
|
1780
|
+
num_partitions: builtins.int
|
|
1781
|
+
"""Optional. Default value is assigned by 1) SQL conf "spark.sql.leafNodeDefaultParallelism" if
|
|
1782
|
+
it is set, or 2) spark default parallelism.
|
|
1783
|
+
"""
|
|
1784
|
+
def __init__(
|
|
1785
|
+
self,
|
|
1786
|
+
*,
|
|
1787
|
+
start: builtins.int | None = ...,
|
|
1788
|
+
end: builtins.int = ...,
|
|
1789
|
+
step: builtins.int = ...,
|
|
1790
|
+
num_partitions: builtins.int | None = ...,
|
|
1791
|
+
) -> None: ...
|
|
1792
|
+
def HasField(
|
|
1793
|
+
self,
|
|
1794
|
+
field_name: typing_extensions.Literal[
|
|
1795
|
+
"_num_partitions",
|
|
1796
|
+
b"_num_partitions",
|
|
1797
|
+
"_start",
|
|
1798
|
+
b"_start",
|
|
1799
|
+
"num_partitions",
|
|
1800
|
+
b"num_partitions",
|
|
1801
|
+
"start",
|
|
1802
|
+
b"start",
|
|
1803
|
+
],
|
|
1804
|
+
) -> builtins.bool: ...
|
|
1805
|
+
def ClearField(
|
|
1806
|
+
self,
|
|
1807
|
+
field_name: typing_extensions.Literal[
|
|
1808
|
+
"_num_partitions",
|
|
1809
|
+
b"_num_partitions",
|
|
1810
|
+
"_start",
|
|
1811
|
+
b"_start",
|
|
1812
|
+
"end",
|
|
1813
|
+
b"end",
|
|
1814
|
+
"num_partitions",
|
|
1815
|
+
b"num_partitions",
|
|
1816
|
+
"start",
|
|
1817
|
+
b"start",
|
|
1818
|
+
"step",
|
|
1819
|
+
b"step",
|
|
1820
|
+
],
|
|
1821
|
+
) -> None: ...
|
|
1822
|
+
@typing.overload
|
|
1823
|
+
def WhichOneof(
|
|
1824
|
+
self, oneof_group: typing_extensions.Literal["_num_partitions", b"_num_partitions"]
|
|
1825
|
+
) -> typing_extensions.Literal["num_partitions"] | None: ...
|
|
1826
|
+
@typing.overload
|
|
1827
|
+
def WhichOneof(
|
|
1828
|
+
self, oneof_group: typing_extensions.Literal["_start", b"_start"]
|
|
1829
|
+
) -> typing_extensions.Literal["start"] | None: ...
|
|
1830
|
+
|
|
1831
|
+
global___Range = Range
|
|
1832
|
+
|
|
1833
|
+
class SubqueryAlias(google.protobuf.message.Message):
|
|
1834
|
+
"""Relation alias."""
|
|
1835
|
+
|
|
1836
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1837
|
+
|
|
1838
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1839
|
+
ALIAS_FIELD_NUMBER: builtins.int
|
|
1840
|
+
QUALIFIER_FIELD_NUMBER: builtins.int
|
|
1841
|
+
@property
|
|
1842
|
+
def input(self) -> global___Relation:
|
|
1843
|
+
"""(Required) The input relation of SubqueryAlias."""
|
|
1844
|
+
alias: builtins.str
|
|
1845
|
+
"""(Required) The alias."""
|
|
1846
|
+
@property
|
|
1847
|
+
def qualifier(
|
|
1848
|
+
self,
|
|
1849
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
1850
|
+
"""(Optional) Qualifier of the alias."""
|
|
1851
|
+
def __init__(
|
|
1852
|
+
self,
|
|
1853
|
+
*,
|
|
1854
|
+
input: global___Relation | None = ...,
|
|
1855
|
+
alias: builtins.str = ...,
|
|
1856
|
+
qualifier: collections.abc.Iterable[builtins.str] | None = ...,
|
|
1857
|
+
) -> None: ...
|
|
1858
|
+
def HasField(
|
|
1859
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1860
|
+
) -> builtins.bool: ...
|
|
1861
|
+
def ClearField(
|
|
1862
|
+
self,
|
|
1863
|
+
field_name: typing_extensions.Literal[
|
|
1864
|
+
"alias", b"alias", "input", b"input", "qualifier", b"qualifier"
|
|
1865
|
+
],
|
|
1866
|
+
) -> None: ...
|
|
1867
|
+
|
|
1868
|
+
global___SubqueryAlias = SubqueryAlias
|
|
1869
|
+
|
|
1870
|
+
class Repartition(google.protobuf.message.Message):
|
|
1871
|
+
"""Relation repartition."""
|
|
1872
|
+
|
|
1873
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1874
|
+
|
|
1875
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1876
|
+
NUM_PARTITIONS_FIELD_NUMBER: builtins.int
|
|
1877
|
+
SHUFFLE_FIELD_NUMBER: builtins.int
|
|
1878
|
+
@property
|
|
1879
|
+
def input(self) -> global___Relation:
|
|
1880
|
+
"""(Required) The input relation of Repartition."""
|
|
1881
|
+
num_partitions: builtins.int
|
|
1882
|
+
"""(Required) Must be positive."""
|
|
1883
|
+
shuffle: builtins.bool
|
|
1884
|
+
"""(Optional) Default value is false."""
|
|
1885
|
+
def __init__(
|
|
1886
|
+
self,
|
|
1887
|
+
*,
|
|
1888
|
+
input: global___Relation | None = ...,
|
|
1889
|
+
num_partitions: builtins.int = ...,
|
|
1890
|
+
shuffle: builtins.bool | None = ...,
|
|
1891
|
+
) -> None: ...
|
|
1892
|
+
def HasField(
|
|
1893
|
+
self,
|
|
1894
|
+
field_name: typing_extensions.Literal[
|
|
1895
|
+
"_shuffle", b"_shuffle", "input", b"input", "shuffle", b"shuffle"
|
|
1896
|
+
],
|
|
1897
|
+
) -> builtins.bool: ...
|
|
1898
|
+
def ClearField(
|
|
1899
|
+
self,
|
|
1900
|
+
field_name: typing_extensions.Literal[
|
|
1901
|
+
"_shuffle",
|
|
1902
|
+
b"_shuffle",
|
|
1903
|
+
"input",
|
|
1904
|
+
b"input",
|
|
1905
|
+
"num_partitions",
|
|
1906
|
+
b"num_partitions",
|
|
1907
|
+
"shuffle",
|
|
1908
|
+
b"shuffle",
|
|
1909
|
+
],
|
|
1910
|
+
) -> None: ...
|
|
1911
|
+
def WhichOneof(
|
|
1912
|
+
self, oneof_group: typing_extensions.Literal["_shuffle", b"_shuffle"]
|
|
1913
|
+
) -> typing_extensions.Literal["shuffle"] | None: ...
|
|
1914
|
+
|
|
1915
|
+
global___Repartition = Repartition
|
|
1916
|
+
|
|
1917
|
+
class ShowString(google.protobuf.message.Message):
|
|
1918
|
+
"""Compose the string representing rows for output.
|
|
1919
|
+
It will invoke 'Dataset.showString' to compute the results.
|
|
1920
|
+
"""
|
|
1921
|
+
|
|
1922
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1923
|
+
|
|
1924
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1925
|
+
NUM_ROWS_FIELD_NUMBER: builtins.int
|
|
1926
|
+
TRUNCATE_FIELD_NUMBER: builtins.int
|
|
1927
|
+
VERTICAL_FIELD_NUMBER: builtins.int
|
|
1928
|
+
@property
|
|
1929
|
+
def input(self) -> global___Relation:
|
|
1930
|
+
"""(Required) The input relation."""
|
|
1931
|
+
num_rows: builtins.int
|
|
1932
|
+
"""(Required) Number of rows to show."""
|
|
1933
|
+
truncate: builtins.int
|
|
1934
|
+
"""(Required) If set to more than 0, truncates strings to
|
|
1935
|
+
`truncate` characters and all cells will be aligned right.
|
|
1936
|
+
"""
|
|
1937
|
+
vertical: builtins.bool
|
|
1938
|
+
"""(Required) If set to true, prints output rows vertically (one line per column value)."""
|
|
1939
|
+
def __init__(
|
|
1940
|
+
self,
|
|
1941
|
+
*,
|
|
1942
|
+
input: global___Relation | None = ...,
|
|
1943
|
+
num_rows: builtins.int = ...,
|
|
1944
|
+
truncate: builtins.int = ...,
|
|
1945
|
+
vertical: builtins.bool = ...,
|
|
1946
|
+
) -> None: ...
|
|
1947
|
+
def HasField(
|
|
1948
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1949
|
+
) -> builtins.bool: ...
|
|
1950
|
+
def ClearField(
|
|
1951
|
+
self,
|
|
1952
|
+
field_name: typing_extensions.Literal[
|
|
1953
|
+
"input",
|
|
1954
|
+
b"input",
|
|
1955
|
+
"num_rows",
|
|
1956
|
+
b"num_rows",
|
|
1957
|
+
"truncate",
|
|
1958
|
+
b"truncate",
|
|
1959
|
+
"vertical",
|
|
1960
|
+
b"vertical",
|
|
1961
|
+
],
|
|
1962
|
+
) -> None: ...
|
|
1963
|
+
|
|
1964
|
+
global___ShowString = ShowString
|
|
1965
|
+
|
|
1966
|
+
class HtmlString(google.protobuf.message.Message):
|
|
1967
|
+
"""Compose the string representing rows for output.
|
|
1968
|
+
It will invoke 'Dataset.htmlString' to compute the results.
|
|
1969
|
+
"""
|
|
1970
|
+
|
|
1971
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
1972
|
+
|
|
1973
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
1974
|
+
NUM_ROWS_FIELD_NUMBER: builtins.int
|
|
1975
|
+
TRUNCATE_FIELD_NUMBER: builtins.int
|
|
1976
|
+
@property
|
|
1977
|
+
def input(self) -> global___Relation:
|
|
1978
|
+
"""(Required) The input relation."""
|
|
1979
|
+
num_rows: builtins.int
|
|
1980
|
+
"""(Required) Number of rows to show."""
|
|
1981
|
+
truncate: builtins.int
|
|
1982
|
+
"""(Required) If set to more than 0, truncates strings to
|
|
1983
|
+
`truncate` characters and all cells will be aligned right.
|
|
1984
|
+
"""
|
|
1985
|
+
def __init__(
|
|
1986
|
+
self,
|
|
1987
|
+
*,
|
|
1988
|
+
input: global___Relation | None = ...,
|
|
1989
|
+
num_rows: builtins.int = ...,
|
|
1990
|
+
truncate: builtins.int = ...,
|
|
1991
|
+
) -> None: ...
|
|
1992
|
+
def HasField(
|
|
1993
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
1994
|
+
) -> builtins.bool: ...
|
|
1995
|
+
def ClearField(
|
|
1996
|
+
self,
|
|
1997
|
+
field_name: typing_extensions.Literal[
|
|
1998
|
+
"input", b"input", "num_rows", b"num_rows", "truncate", b"truncate"
|
|
1999
|
+
],
|
|
2000
|
+
) -> None: ...
|
|
2001
|
+
|
|
2002
|
+
global___HtmlString = HtmlString
|
|
2003
|
+
|
|
2004
|
+
class StatSummary(google.protobuf.message.Message):
|
|
2005
|
+
"""Computes specified statistics for numeric and string columns.
|
|
2006
|
+
It will invoke 'Dataset.summary' (same as 'StatFunctions.summary')
|
|
2007
|
+
to compute the results.
|
|
2008
|
+
"""
|
|
2009
|
+
|
|
2010
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2011
|
+
|
|
2012
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2013
|
+
STATISTICS_FIELD_NUMBER: builtins.int
|
|
2014
|
+
@property
|
|
2015
|
+
def input(self) -> global___Relation:
|
|
2016
|
+
"""(Required) The input relation."""
|
|
2017
|
+
@property
|
|
2018
|
+
def statistics(
|
|
2019
|
+
self,
|
|
2020
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2021
|
+
"""(Optional) Statistics from to be computed.
|
|
2022
|
+
|
|
2023
|
+
Available statistics are:
|
|
2024
|
+
count
|
|
2025
|
+
mean
|
|
2026
|
+
stddev
|
|
2027
|
+
min
|
|
2028
|
+
max
|
|
2029
|
+
arbitrary approximate percentiles specified as a percentage (e.g. 75%)
|
|
2030
|
+
count_distinct
|
|
2031
|
+
approx_count_distinct
|
|
2032
|
+
|
|
2033
|
+
If no statistics are given, this function computes 'count', 'mean', 'stddev', 'min',
|
|
2034
|
+
'approximate quartiles' (percentiles at 25%, 50%, and 75%), and 'max'.
|
|
2035
|
+
"""
|
|
2036
|
+
def __init__(
|
|
2037
|
+
self,
|
|
2038
|
+
*,
|
|
2039
|
+
input: global___Relation | None = ...,
|
|
2040
|
+
statistics: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2041
|
+
) -> None: ...
|
|
2042
|
+
def HasField(
|
|
2043
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2044
|
+
) -> builtins.bool: ...
|
|
2045
|
+
def ClearField(
|
|
2046
|
+
self, field_name: typing_extensions.Literal["input", b"input", "statistics", b"statistics"]
|
|
2047
|
+
) -> None: ...
|
|
2048
|
+
|
|
2049
|
+
global___StatSummary = StatSummary
|
|
2050
|
+
|
|
2051
|
+
class StatDescribe(google.protobuf.message.Message):
|
|
2052
|
+
"""Computes basic statistics for numeric and string columns, including count, mean, stddev, min,
|
|
2053
|
+
and max. If no columns are given, this function computes statistics for all numerical or
|
|
2054
|
+
string columns.
|
|
2055
|
+
"""
|
|
2056
|
+
|
|
2057
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2058
|
+
|
|
2059
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2060
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2061
|
+
@property
|
|
2062
|
+
def input(self) -> global___Relation:
|
|
2063
|
+
"""(Required) The input relation."""
|
|
2064
|
+
@property
|
|
2065
|
+
def cols(
|
|
2066
|
+
self,
|
|
2067
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2068
|
+
"""(Optional) Columns to compute statistics on."""
|
|
2069
|
+
def __init__(
|
|
2070
|
+
self,
|
|
2071
|
+
*,
|
|
2072
|
+
input: global___Relation | None = ...,
|
|
2073
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2074
|
+
) -> None: ...
|
|
2075
|
+
def HasField(
|
|
2076
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2077
|
+
) -> builtins.bool: ...
|
|
2078
|
+
def ClearField(
|
|
2079
|
+
self, field_name: typing_extensions.Literal["cols", b"cols", "input", b"input"]
|
|
2080
|
+
) -> None: ...
|
|
2081
|
+
|
|
2082
|
+
global___StatDescribe = StatDescribe
|
|
2083
|
+
|
|
2084
|
+
class StatCrosstab(google.protobuf.message.Message):
|
|
2085
|
+
"""Computes a pair-wise frequency table of the given columns. Also known as a contingency table.
|
|
2086
|
+
It will invoke 'Dataset.stat.crosstab' (same as 'StatFunctions.crossTabulate')
|
|
2087
|
+
to compute the results.
|
|
2088
|
+
"""
|
|
2089
|
+
|
|
2090
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2091
|
+
|
|
2092
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2093
|
+
COL1_FIELD_NUMBER: builtins.int
|
|
2094
|
+
COL2_FIELD_NUMBER: builtins.int
|
|
2095
|
+
@property
|
|
2096
|
+
def input(self) -> global___Relation:
|
|
2097
|
+
"""(Required) The input relation."""
|
|
2098
|
+
col1: builtins.str
|
|
2099
|
+
"""(Required) The name of the first column.
|
|
2100
|
+
|
|
2101
|
+
Distinct items will make the first item of each row.
|
|
2102
|
+
"""
|
|
2103
|
+
col2: builtins.str
|
|
2104
|
+
"""(Required) The name of the second column.
|
|
2105
|
+
|
|
2106
|
+
Distinct items will make the column names of the DataFrame.
|
|
2107
|
+
"""
|
|
2108
|
+
def __init__(
|
|
2109
|
+
self,
|
|
2110
|
+
*,
|
|
2111
|
+
input: global___Relation | None = ...,
|
|
2112
|
+
col1: builtins.str = ...,
|
|
2113
|
+
col2: builtins.str = ...,
|
|
2114
|
+
) -> None: ...
|
|
2115
|
+
def HasField(
|
|
2116
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2117
|
+
) -> builtins.bool: ...
|
|
2118
|
+
def ClearField(
|
|
2119
|
+
self,
|
|
2120
|
+
field_name: typing_extensions.Literal["col1", b"col1", "col2", b"col2", "input", b"input"],
|
|
2121
|
+
) -> None: ...
|
|
2122
|
+
|
|
2123
|
+
global___StatCrosstab = StatCrosstab
|
|
2124
|
+
|
|
2125
|
+
class StatCov(google.protobuf.message.Message):
|
|
2126
|
+
"""Calculate the sample covariance of two numerical columns of a DataFrame.
|
|
2127
|
+
It will invoke 'Dataset.stat.cov' (same as 'StatFunctions.calculateCov') to compute the results.
|
|
2128
|
+
"""
|
|
2129
|
+
|
|
2130
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2131
|
+
|
|
2132
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2133
|
+
COL1_FIELD_NUMBER: builtins.int
|
|
2134
|
+
COL2_FIELD_NUMBER: builtins.int
|
|
2135
|
+
@property
|
|
2136
|
+
def input(self) -> global___Relation:
|
|
2137
|
+
"""(Required) The input relation."""
|
|
2138
|
+
col1: builtins.str
|
|
2139
|
+
"""(Required) The name of the first column."""
|
|
2140
|
+
col2: builtins.str
|
|
2141
|
+
"""(Required) The name of the second column."""
|
|
2142
|
+
def __init__(
|
|
2143
|
+
self,
|
|
2144
|
+
*,
|
|
2145
|
+
input: global___Relation | None = ...,
|
|
2146
|
+
col1: builtins.str = ...,
|
|
2147
|
+
col2: builtins.str = ...,
|
|
2148
|
+
) -> None: ...
|
|
2149
|
+
def HasField(
|
|
2150
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2151
|
+
) -> builtins.bool: ...
|
|
2152
|
+
def ClearField(
|
|
2153
|
+
self,
|
|
2154
|
+
field_name: typing_extensions.Literal["col1", b"col1", "col2", b"col2", "input", b"input"],
|
|
2155
|
+
) -> None: ...
|
|
2156
|
+
|
|
2157
|
+
global___StatCov = StatCov
|
|
2158
|
+
|
|
2159
|
+
class StatCorr(google.protobuf.message.Message):
|
|
2160
|
+
"""Calculates the correlation of two columns of a DataFrame. Currently only supports the Pearson
|
|
2161
|
+
Correlation Coefficient. It will invoke 'Dataset.stat.corr' (same as
|
|
2162
|
+
'StatFunctions.pearsonCorrelation') to compute the results.
|
|
2163
|
+
"""
|
|
2164
|
+
|
|
2165
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2166
|
+
|
|
2167
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2168
|
+
COL1_FIELD_NUMBER: builtins.int
|
|
2169
|
+
COL2_FIELD_NUMBER: builtins.int
|
|
2170
|
+
METHOD_FIELD_NUMBER: builtins.int
|
|
2171
|
+
@property
|
|
2172
|
+
def input(self) -> global___Relation:
|
|
2173
|
+
"""(Required) The input relation."""
|
|
2174
|
+
col1: builtins.str
|
|
2175
|
+
"""(Required) The name of the first column."""
|
|
2176
|
+
col2: builtins.str
|
|
2177
|
+
"""(Required) The name of the second column."""
|
|
2178
|
+
method: builtins.str
|
|
2179
|
+
"""(Optional) Default value is 'pearson'.
|
|
2180
|
+
|
|
2181
|
+
Currently only supports the Pearson Correlation Coefficient.
|
|
2182
|
+
"""
|
|
2183
|
+
def __init__(
|
|
2184
|
+
self,
|
|
2185
|
+
*,
|
|
2186
|
+
input: global___Relation | None = ...,
|
|
2187
|
+
col1: builtins.str = ...,
|
|
2188
|
+
col2: builtins.str = ...,
|
|
2189
|
+
method: builtins.str | None = ...,
|
|
2190
|
+
) -> None: ...
|
|
2191
|
+
def HasField(
|
|
2192
|
+
self,
|
|
2193
|
+
field_name: typing_extensions.Literal[
|
|
2194
|
+
"_method", b"_method", "input", b"input", "method", b"method"
|
|
2195
|
+
],
|
|
2196
|
+
) -> builtins.bool: ...
|
|
2197
|
+
def ClearField(
|
|
2198
|
+
self,
|
|
2199
|
+
field_name: typing_extensions.Literal[
|
|
2200
|
+
"_method",
|
|
2201
|
+
b"_method",
|
|
2202
|
+
"col1",
|
|
2203
|
+
b"col1",
|
|
2204
|
+
"col2",
|
|
2205
|
+
b"col2",
|
|
2206
|
+
"input",
|
|
2207
|
+
b"input",
|
|
2208
|
+
"method",
|
|
2209
|
+
b"method",
|
|
2210
|
+
],
|
|
2211
|
+
) -> None: ...
|
|
2212
|
+
def WhichOneof(
|
|
2213
|
+
self, oneof_group: typing_extensions.Literal["_method", b"_method"]
|
|
2214
|
+
) -> typing_extensions.Literal["method"] | None: ...
|
|
2215
|
+
|
|
2216
|
+
global___StatCorr = StatCorr
|
|
2217
|
+
|
|
2218
|
+
class StatApproxQuantile(google.protobuf.message.Message):
|
|
2219
|
+
"""Calculates the approximate quantiles of numerical columns of a DataFrame.
|
|
2220
|
+
It will invoke 'Dataset.stat.approxQuantile' (same as 'StatFunctions.approxQuantile')
|
|
2221
|
+
to compute the results.
|
|
2222
|
+
"""
|
|
2223
|
+
|
|
2224
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2225
|
+
|
|
2226
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2227
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2228
|
+
PROBABILITIES_FIELD_NUMBER: builtins.int
|
|
2229
|
+
RELATIVE_ERROR_FIELD_NUMBER: builtins.int
|
|
2230
|
+
@property
|
|
2231
|
+
def input(self) -> global___Relation:
|
|
2232
|
+
"""(Required) The input relation."""
|
|
2233
|
+
@property
|
|
2234
|
+
def cols(
|
|
2235
|
+
self,
|
|
2236
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2237
|
+
"""(Required) The names of the numerical columns."""
|
|
2238
|
+
@property
|
|
2239
|
+
def probabilities(
|
|
2240
|
+
self,
|
|
2241
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
|
|
2242
|
+
"""(Required) A list of quantile probabilities.
|
|
2243
|
+
|
|
2244
|
+
Each number must belong to [0, 1].
|
|
2245
|
+
For example 0 is the minimum, 0.5 is the median, 1 is the maximum.
|
|
2246
|
+
"""
|
|
2247
|
+
relative_error: builtins.float
|
|
2248
|
+
"""(Required) The relative target precision to achieve (greater than or equal to 0).
|
|
2249
|
+
|
|
2250
|
+
If set to zero, the exact quantiles are computed, which could be very expensive.
|
|
2251
|
+
Note that values greater than 1 are accepted but give the same result as 1.
|
|
2252
|
+
"""
|
|
2253
|
+
def __init__(
|
|
2254
|
+
self,
|
|
2255
|
+
*,
|
|
2256
|
+
input: global___Relation | None = ...,
|
|
2257
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2258
|
+
probabilities: collections.abc.Iterable[builtins.float] | None = ...,
|
|
2259
|
+
relative_error: builtins.float = ...,
|
|
2260
|
+
) -> None: ...
|
|
2261
|
+
def HasField(
|
|
2262
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2263
|
+
) -> builtins.bool: ...
|
|
2264
|
+
def ClearField(
|
|
2265
|
+
self,
|
|
2266
|
+
field_name: typing_extensions.Literal[
|
|
2267
|
+
"cols",
|
|
2268
|
+
b"cols",
|
|
2269
|
+
"input",
|
|
2270
|
+
b"input",
|
|
2271
|
+
"probabilities",
|
|
2272
|
+
b"probabilities",
|
|
2273
|
+
"relative_error",
|
|
2274
|
+
b"relative_error",
|
|
2275
|
+
],
|
|
2276
|
+
) -> None: ...
|
|
2277
|
+
|
|
2278
|
+
global___StatApproxQuantile = StatApproxQuantile
|
|
2279
|
+
|
|
2280
|
+
class StatFreqItems(google.protobuf.message.Message):
|
|
2281
|
+
"""Finding frequent items for columns, possibly with false positives.
|
|
2282
|
+
It will invoke 'Dataset.stat.freqItems' (same as 'StatFunctions.freqItems')
|
|
2283
|
+
to compute the results.
|
|
2284
|
+
"""
|
|
2285
|
+
|
|
2286
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2287
|
+
|
|
2288
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2289
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2290
|
+
SUPPORT_FIELD_NUMBER: builtins.int
|
|
2291
|
+
@property
|
|
2292
|
+
def input(self) -> global___Relation:
|
|
2293
|
+
"""(Required) The input relation."""
|
|
2294
|
+
@property
|
|
2295
|
+
def cols(
|
|
2296
|
+
self,
|
|
2297
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2298
|
+
"""(Required) The names of the columns to search frequent items in."""
|
|
2299
|
+
support: builtins.float
|
|
2300
|
+
"""(Optional) The minimum frequency for an item to be considered `frequent`.
|
|
2301
|
+
Should be greater than 1e-4.
|
|
2302
|
+
"""
|
|
2303
|
+
def __init__(
|
|
2304
|
+
self,
|
|
2305
|
+
*,
|
|
2306
|
+
input: global___Relation | None = ...,
|
|
2307
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2308
|
+
support: builtins.float | None = ...,
|
|
2309
|
+
) -> None: ...
|
|
2310
|
+
def HasField(
|
|
2311
|
+
self,
|
|
2312
|
+
field_name: typing_extensions.Literal[
|
|
2313
|
+
"_support", b"_support", "input", b"input", "support", b"support"
|
|
2314
|
+
],
|
|
2315
|
+
) -> builtins.bool: ...
|
|
2316
|
+
def ClearField(
|
|
2317
|
+
self,
|
|
2318
|
+
field_name: typing_extensions.Literal[
|
|
2319
|
+
"_support", b"_support", "cols", b"cols", "input", b"input", "support", b"support"
|
|
2320
|
+
],
|
|
2321
|
+
) -> None: ...
|
|
2322
|
+
def WhichOneof(
|
|
2323
|
+
self, oneof_group: typing_extensions.Literal["_support", b"_support"]
|
|
2324
|
+
) -> typing_extensions.Literal["support"] | None: ...
|
|
2325
|
+
|
|
2326
|
+
global___StatFreqItems = StatFreqItems
|
|
2327
|
+
|
|
2328
|
+
class StatSampleBy(google.protobuf.message.Message):
|
|
2329
|
+
"""Returns a stratified sample without replacement based on the fraction
|
|
2330
|
+
given on each stratum.
|
|
2331
|
+
It will invoke 'Dataset.stat.freqItems' (same as 'StatFunctions.freqItems')
|
|
2332
|
+
to compute the results.
|
|
2333
|
+
"""
|
|
2334
|
+
|
|
2335
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2336
|
+
|
|
2337
|
+
class Fraction(google.protobuf.message.Message):
|
|
2338
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2339
|
+
|
|
2340
|
+
STRATUM_FIELD_NUMBER: builtins.int
|
|
2341
|
+
FRACTION_FIELD_NUMBER: builtins.int
|
|
2342
|
+
@property
|
|
2343
|
+
def stratum(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal:
|
|
2344
|
+
"""(Required) The stratum."""
|
|
2345
|
+
fraction: builtins.float
|
|
2346
|
+
"""(Required) The fraction value. Must be in [0, 1]."""
|
|
2347
|
+
def __init__(
|
|
2348
|
+
self,
|
|
2349
|
+
*,
|
|
2350
|
+
stratum: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ...,
|
|
2351
|
+
fraction: builtins.float = ...,
|
|
2352
|
+
) -> None: ...
|
|
2353
|
+
def HasField(
|
|
2354
|
+
self, field_name: typing_extensions.Literal["stratum", b"stratum"]
|
|
2355
|
+
) -> builtins.bool: ...
|
|
2356
|
+
def ClearField(
|
|
2357
|
+
self,
|
|
2358
|
+
field_name: typing_extensions.Literal["fraction", b"fraction", "stratum", b"stratum"],
|
|
2359
|
+
) -> None: ...
|
|
2360
|
+
|
|
2361
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2362
|
+
COL_FIELD_NUMBER: builtins.int
|
|
2363
|
+
FRACTIONS_FIELD_NUMBER: builtins.int
|
|
2364
|
+
SEED_FIELD_NUMBER: builtins.int
|
|
2365
|
+
@property
|
|
2366
|
+
def input(self) -> global___Relation:
|
|
2367
|
+
"""(Required) The input relation."""
|
|
2368
|
+
@property
|
|
2369
|
+
def col(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression:
|
|
2370
|
+
"""(Required) The column that defines strata."""
|
|
2371
|
+
@property
|
|
2372
|
+
def fractions(
|
|
2373
|
+
self,
|
|
2374
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2375
|
+
global___StatSampleBy.Fraction
|
|
2376
|
+
]:
|
|
2377
|
+
"""(Required) Sampling fraction for each stratum.
|
|
2378
|
+
|
|
2379
|
+
If a stratum is not specified, we treat its fraction as zero.
|
|
2380
|
+
"""
|
|
2381
|
+
seed: builtins.int
|
|
2382
|
+
"""(Optional) The random seed."""
|
|
2383
|
+
def __init__(
|
|
2384
|
+
self,
|
|
2385
|
+
*,
|
|
2386
|
+
input: global___Relation | None = ...,
|
|
2387
|
+
col: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ...,
|
|
2388
|
+
fractions: collections.abc.Iterable[global___StatSampleBy.Fraction] | None = ...,
|
|
2389
|
+
seed: builtins.int | None = ...,
|
|
2390
|
+
) -> None: ...
|
|
2391
|
+
def HasField(
|
|
2392
|
+
self,
|
|
2393
|
+
field_name: typing_extensions.Literal[
|
|
2394
|
+
"_seed", b"_seed", "col", b"col", "input", b"input", "seed", b"seed"
|
|
2395
|
+
],
|
|
2396
|
+
) -> builtins.bool: ...
|
|
2397
|
+
def ClearField(
|
|
2398
|
+
self,
|
|
2399
|
+
field_name: typing_extensions.Literal[
|
|
2400
|
+
"_seed",
|
|
2401
|
+
b"_seed",
|
|
2402
|
+
"col",
|
|
2403
|
+
b"col",
|
|
2404
|
+
"fractions",
|
|
2405
|
+
b"fractions",
|
|
2406
|
+
"input",
|
|
2407
|
+
b"input",
|
|
2408
|
+
"seed",
|
|
2409
|
+
b"seed",
|
|
2410
|
+
],
|
|
2411
|
+
) -> None: ...
|
|
2412
|
+
def WhichOneof(
|
|
2413
|
+
self, oneof_group: typing_extensions.Literal["_seed", b"_seed"]
|
|
2414
|
+
) -> typing_extensions.Literal["seed"] | None: ...
|
|
2415
|
+
|
|
2416
|
+
global___StatSampleBy = StatSampleBy
|
|
2417
|
+
|
|
2418
|
+
class NAFill(google.protobuf.message.Message):
|
|
2419
|
+
"""Replaces null values.
|
|
2420
|
+
It will invoke 'Dataset.na.fill' (same as 'DataFrameNaFunctions.fill') to compute the results.
|
|
2421
|
+
Following 3 parameter combinations are supported:
|
|
2422
|
+
1, 'values' only contains 1 item, 'cols' is empty:
|
|
2423
|
+
replaces null values in all type-compatible columns.
|
|
2424
|
+
2, 'values' only contains 1 item, 'cols' is not empty:
|
|
2425
|
+
replaces null values in specified columns.
|
|
2426
|
+
3, 'values' contains more than 1 items, then 'cols' is required to have the same length:
|
|
2427
|
+
replaces each specified column with corresponding value.
|
|
2428
|
+
"""
|
|
2429
|
+
|
|
2430
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2431
|
+
|
|
2432
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2433
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2434
|
+
VALUES_FIELD_NUMBER: builtins.int
|
|
2435
|
+
@property
|
|
2436
|
+
def input(self) -> global___Relation:
|
|
2437
|
+
"""(Required) The input relation."""
|
|
2438
|
+
@property
|
|
2439
|
+
def cols(
|
|
2440
|
+
self,
|
|
2441
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2442
|
+
"""(Optional) Optional list of column names to consider."""
|
|
2443
|
+
@property
|
|
2444
|
+
def values(
|
|
2445
|
+
self,
|
|
2446
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2447
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
2448
|
+
]:
|
|
2449
|
+
"""(Required) Values to replace null values with.
|
|
2450
|
+
|
|
2451
|
+
Should contain at least 1 item.
|
|
2452
|
+
Only 4 data types are supported now: bool, long, double, string
|
|
2453
|
+
"""
|
|
2454
|
+
def __init__(
|
|
2455
|
+
self,
|
|
2456
|
+
*,
|
|
2457
|
+
input: global___Relation | None = ...,
|
|
2458
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2459
|
+
values: collections.abc.Iterable[
|
|
2460
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Literal
|
|
2461
|
+
]
|
|
2462
|
+
| None = ...,
|
|
2463
|
+
) -> None: ...
|
|
2464
|
+
def HasField(
|
|
2465
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2466
|
+
) -> builtins.bool: ...
|
|
2467
|
+
def ClearField(
|
|
2468
|
+
self,
|
|
2469
|
+
field_name: typing_extensions.Literal[
|
|
2470
|
+
"cols", b"cols", "input", b"input", "values", b"values"
|
|
2471
|
+
],
|
|
2472
|
+
) -> None: ...
|
|
2473
|
+
|
|
2474
|
+
global___NAFill = NAFill
|
|
2475
|
+
|
|
2476
|
+
class NADrop(google.protobuf.message.Message):
|
|
2477
|
+
"""Drop rows containing null values.
|
|
2478
|
+
It will invoke 'Dataset.na.drop' (same as 'DataFrameNaFunctions.drop') to compute the results.
|
|
2479
|
+
"""
|
|
2480
|
+
|
|
2481
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2482
|
+
|
|
2483
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2484
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2485
|
+
MIN_NON_NULLS_FIELD_NUMBER: builtins.int
|
|
2486
|
+
@property
|
|
2487
|
+
def input(self) -> global___Relation:
|
|
2488
|
+
"""(Required) The input relation."""
|
|
2489
|
+
@property
|
|
2490
|
+
def cols(
|
|
2491
|
+
self,
|
|
2492
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2493
|
+
"""(Optional) Optional list of column names to consider.
|
|
2494
|
+
|
|
2495
|
+
When it is empty, all the columns in the input relation will be considered.
|
|
2496
|
+
"""
|
|
2497
|
+
min_non_nulls: builtins.int
|
|
2498
|
+
"""(Optional) The minimum number of non-null and non-NaN values required to keep.
|
|
2499
|
+
|
|
2500
|
+
When not set, it is equivalent to the number of considered columns, which means
|
|
2501
|
+
a row will be kept only if all columns are non-null.
|
|
2502
|
+
|
|
2503
|
+
'how' options ('all', 'any') can be easily converted to this field:
|
|
2504
|
+
- 'all' -> set 'min_non_nulls' 1;
|
|
2505
|
+
- 'any' -> keep 'min_non_nulls' unset;
|
|
2506
|
+
"""
|
|
2507
|
+
def __init__(
|
|
2508
|
+
self,
|
|
2509
|
+
*,
|
|
2510
|
+
input: global___Relation | None = ...,
|
|
2511
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2512
|
+
min_non_nulls: builtins.int | None = ...,
|
|
2513
|
+
) -> None: ...
|
|
2514
|
+
def HasField(
|
|
2515
|
+
self,
|
|
2516
|
+
field_name: typing_extensions.Literal[
|
|
2517
|
+
"_min_non_nulls",
|
|
2518
|
+
b"_min_non_nulls",
|
|
2519
|
+
"input",
|
|
2520
|
+
b"input",
|
|
2521
|
+
"min_non_nulls",
|
|
2522
|
+
b"min_non_nulls",
|
|
2523
|
+
],
|
|
2524
|
+
) -> builtins.bool: ...
|
|
2525
|
+
def ClearField(
|
|
2526
|
+
self,
|
|
2527
|
+
field_name: typing_extensions.Literal[
|
|
2528
|
+
"_min_non_nulls",
|
|
2529
|
+
b"_min_non_nulls",
|
|
2530
|
+
"cols",
|
|
2531
|
+
b"cols",
|
|
2532
|
+
"input",
|
|
2533
|
+
b"input",
|
|
2534
|
+
"min_non_nulls",
|
|
2535
|
+
b"min_non_nulls",
|
|
2536
|
+
],
|
|
2537
|
+
) -> None: ...
|
|
2538
|
+
def WhichOneof(
|
|
2539
|
+
self, oneof_group: typing_extensions.Literal["_min_non_nulls", b"_min_non_nulls"]
|
|
2540
|
+
) -> typing_extensions.Literal["min_non_nulls"] | None: ...
|
|
2541
|
+
|
|
2542
|
+
global___NADrop = NADrop
|
|
2543
|
+
|
|
2544
|
+
class NAReplace(google.protobuf.message.Message):
|
|
2545
|
+
"""Replaces old values with the corresponding values.
|
|
2546
|
+
It will invoke 'Dataset.na.replace' (same as 'DataFrameNaFunctions.replace')
|
|
2547
|
+
to compute the results.
|
|
2548
|
+
"""
|
|
2549
|
+
|
|
2550
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2551
|
+
|
|
2552
|
+
class Replacement(google.protobuf.message.Message):
|
|
2553
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2554
|
+
|
|
2555
|
+
OLD_VALUE_FIELD_NUMBER: builtins.int
|
|
2556
|
+
NEW_VALUE_FIELD_NUMBER: builtins.int
|
|
2557
|
+
@property
|
|
2558
|
+
def old_value(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal:
|
|
2559
|
+
"""(Required) The old value.
|
|
2560
|
+
|
|
2561
|
+
Only 4 data types are supported now: null, bool, double, string.
|
|
2562
|
+
"""
|
|
2563
|
+
@property
|
|
2564
|
+
def new_value(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression.Literal:
|
|
2565
|
+
"""(Required) The new value.
|
|
2566
|
+
|
|
2567
|
+
Should be of the same data type with the old value.
|
|
2568
|
+
"""
|
|
2569
|
+
def __init__(
|
|
2570
|
+
self,
|
|
2571
|
+
*,
|
|
2572
|
+
old_value: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ...,
|
|
2573
|
+
new_value: pyspark.sql.connect.proto.expressions_pb2.Expression.Literal | None = ...,
|
|
2574
|
+
) -> None: ...
|
|
2575
|
+
def HasField(
|
|
2576
|
+
self,
|
|
2577
|
+
field_name: typing_extensions.Literal[
|
|
2578
|
+
"new_value", b"new_value", "old_value", b"old_value"
|
|
2579
|
+
],
|
|
2580
|
+
) -> builtins.bool: ...
|
|
2581
|
+
def ClearField(
|
|
2582
|
+
self,
|
|
2583
|
+
field_name: typing_extensions.Literal[
|
|
2584
|
+
"new_value", b"new_value", "old_value", b"old_value"
|
|
2585
|
+
],
|
|
2586
|
+
) -> None: ...
|
|
2587
|
+
|
|
2588
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2589
|
+
COLS_FIELD_NUMBER: builtins.int
|
|
2590
|
+
REPLACEMENTS_FIELD_NUMBER: builtins.int
|
|
2591
|
+
@property
|
|
2592
|
+
def input(self) -> global___Relation:
|
|
2593
|
+
"""(Required) The input relation."""
|
|
2594
|
+
@property
|
|
2595
|
+
def cols(
|
|
2596
|
+
self,
|
|
2597
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2598
|
+
"""(Optional) List of column names to consider.
|
|
2599
|
+
|
|
2600
|
+
When it is empty, all the type-compatible columns in the input relation will be considered.
|
|
2601
|
+
"""
|
|
2602
|
+
@property
|
|
2603
|
+
def replacements(
|
|
2604
|
+
self,
|
|
2605
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2606
|
+
global___NAReplace.Replacement
|
|
2607
|
+
]:
|
|
2608
|
+
"""(Optional) The value replacement mapping."""
|
|
2609
|
+
def __init__(
|
|
2610
|
+
self,
|
|
2611
|
+
*,
|
|
2612
|
+
input: global___Relation | None = ...,
|
|
2613
|
+
cols: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2614
|
+
replacements: collections.abc.Iterable[global___NAReplace.Replacement] | None = ...,
|
|
2615
|
+
) -> None: ...
|
|
2616
|
+
def HasField(
|
|
2617
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2618
|
+
) -> builtins.bool: ...
|
|
2619
|
+
def ClearField(
|
|
2620
|
+
self,
|
|
2621
|
+
field_name: typing_extensions.Literal[
|
|
2622
|
+
"cols", b"cols", "input", b"input", "replacements", b"replacements"
|
|
2623
|
+
],
|
|
2624
|
+
) -> None: ...
|
|
2625
|
+
|
|
2626
|
+
global___NAReplace = NAReplace
|
|
2627
|
+
|
|
2628
|
+
class ToDF(google.protobuf.message.Message):
|
|
2629
|
+
"""Rename columns on the input relation by the same length of names."""
|
|
2630
|
+
|
|
2631
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2632
|
+
|
|
2633
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2634
|
+
COLUMN_NAMES_FIELD_NUMBER: builtins.int
|
|
2635
|
+
@property
|
|
2636
|
+
def input(self) -> global___Relation:
|
|
2637
|
+
"""(Required) The input relation of RenameColumnsBySameLengthNames."""
|
|
2638
|
+
@property
|
|
2639
|
+
def column_names(
|
|
2640
|
+
self,
|
|
2641
|
+
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
2642
|
+
"""(Required)
|
|
2643
|
+
|
|
2644
|
+
The number of columns of the input relation must be equal to the length
|
|
2645
|
+
of this field. If this is not true, an exception will be returned.
|
|
2646
|
+
"""
|
|
2647
|
+
def __init__(
|
|
2648
|
+
self,
|
|
2649
|
+
*,
|
|
2650
|
+
input: global___Relation | None = ...,
|
|
2651
|
+
column_names: collections.abc.Iterable[builtins.str] | None = ...,
|
|
2652
|
+
) -> None: ...
|
|
2653
|
+
def HasField(
|
|
2654
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2655
|
+
) -> builtins.bool: ...
|
|
2656
|
+
def ClearField(
|
|
2657
|
+
self,
|
|
2658
|
+
field_name: typing_extensions.Literal["column_names", b"column_names", "input", b"input"],
|
|
2659
|
+
) -> None: ...
|
|
2660
|
+
|
|
2661
|
+
global___ToDF = ToDF
|
|
2662
|
+
|
|
2663
|
+
class WithColumnsRenamed(google.protobuf.message.Message):
|
|
2664
|
+
"""Rename columns on the input relation by a map with name to name mapping."""
|
|
2665
|
+
|
|
2666
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2667
|
+
|
|
2668
|
+
class RenameColumnsMapEntry(google.protobuf.message.Message):
|
|
2669
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2670
|
+
|
|
2671
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
2672
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
2673
|
+
key: builtins.str
|
|
2674
|
+
value: builtins.str
|
|
2675
|
+
def __init__(
|
|
2676
|
+
self,
|
|
2677
|
+
*,
|
|
2678
|
+
key: builtins.str = ...,
|
|
2679
|
+
value: builtins.str = ...,
|
|
2680
|
+
) -> None: ...
|
|
2681
|
+
def ClearField(
|
|
2682
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
2683
|
+
) -> None: ...
|
|
2684
|
+
|
|
2685
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2686
|
+
RENAME_COLUMNS_MAP_FIELD_NUMBER: builtins.int
|
|
2687
|
+
@property
|
|
2688
|
+
def input(self) -> global___Relation:
|
|
2689
|
+
"""(Required) The input relation."""
|
|
2690
|
+
@property
|
|
2691
|
+
def rename_columns_map(
|
|
2692
|
+
self,
|
|
2693
|
+
) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
|
|
2694
|
+
"""(Required)
|
|
2695
|
+
|
|
2696
|
+
Renaming column names of input relation from A to B where A is the map key
|
|
2697
|
+
and B is the map value. This is a no-op if schema doesn't contain any A. It
|
|
2698
|
+
does not require that all input relation column names to present as keys.
|
|
2699
|
+
duplicated B are not allowed.
|
|
2700
|
+
"""
|
|
2701
|
+
def __init__(
|
|
2702
|
+
self,
|
|
2703
|
+
*,
|
|
2704
|
+
input: global___Relation | None = ...,
|
|
2705
|
+
rename_columns_map: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
|
|
2706
|
+
) -> None: ...
|
|
2707
|
+
def HasField(
|
|
2708
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2709
|
+
) -> builtins.bool: ...
|
|
2710
|
+
def ClearField(
|
|
2711
|
+
self,
|
|
2712
|
+
field_name: typing_extensions.Literal[
|
|
2713
|
+
"input", b"input", "rename_columns_map", b"rename_columns_map"
|
|
2714
|
+
],
|
|
2715
|
+
) -> None: ...
|
|
2716
|
+
|
|
2717
|
+
global___WithColumnsRenamed = WithColumnsRenamed
|
|
2718
|
+
|
|
2719
|
+
class WithColumns(google.protobuf.message.Message):
|
|
2720
|
+
"""Adding columns or replacing the existing columns that have the same names."""
|
|
2721
|
+
|
|
2722
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2723
|
+
|
|
2724
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2725
|
+
ALIASES_FIELD_NUMBER: builtins.int
|
|
2726
|
+
@property
|
|
2727
|
+
def input(self) -> global___Relation:
|
|
2728
|
+
"""(Required) The input relation."""
|
|
2729
|
+
@property
|
|
2730
|
+
def aliases(
|
|
2731
|
+
self,
|
|
2732
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2733
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Alias
|
|
2734
|
+
]:
|
|
2735
|
+
"""(Required)
|
|
2736
|
+
|
|
2737
|
+
Given a column name, apply the corresponding expression on the column. If column
|
|
2738
|
+
name exists in the input relation, then replace the column. If the column name
|
|
2739
|
+
does not exist in the input relation, then adds it as a new column.
|
|
2740
|
+
|
|
2741
|
+
Only one name part is expected from each Expression.Alias.
|
|
2742
|
+
|
|
2743
|
+
An exception is thrown when duplicated names are present in the mapping.
|
|
2744
|
+
"""
|
|
2745
|
+
def __init__(
|
|
2746
|
+
self,
|
|
2747
|
+
*,
|
|
2748
|
+
input: global___Relation | None = ...,
|
|
2749
|
+
aliases: collections.abc.Iterable[
|
|
2750
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression.Alias
|
|
2751
|
+
]
|
|
2752
|
+
| None = ...,
|
|
2753
|
+
) -> None: ...
|
|
2754
|
+
def HasField(
|
|
2755
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2756
|
+
) -> builtins.bool: ...
|
|
2757
|
+
def ClearField(
|
|
2758
|
+
self, field_name: typing_extensions.Literal["aliases", b"aliases", "input", b"input"]
|
|
2759
|
+
) -> None: ...
|
|
2760
|
+
|
|
2761
|
+
global___WithColumns = WithColumns
|
|
2762
|
+
|
|
2763
|
+
class WithWatermark(google.protobuf.message.Message):
|
|
2764
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2765
|
+
|
|
2766
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2767
|
+
EVENT_TIME_FIELD_NUMBER: builtins.int
|
|
2768
|
+
DELAY_THRESHOLD_FIELD_NUMBER: builtins.int
|
|
2769
|
+
@property
|
|
2770
|
+
def input(self) -> global___Relation:
|
|
2771
|
+
"""(Required) The input relation"""
|
|
2772
|
+
event_time: builtins.str
|
|
2773
|
+
"""(Required) Name of the column containing event time."""
|
|
2774
|
+
delay_threshold: builtins.str
|
|
2775
|
+
"""(Required)"""
|
|
2776
|
+
def __init__(
|
|
2777
|
+
self,
|
|
2778
|
+
*,
|
|
2779
|
+
input: global___Relation | None = ...,
|
|
2780
|
+
event_time: builtins.str = ...,
|
|
2781
|
+
delay_threshold: builtins.str = ...,
|
|
2782
|
+
) -> None: ...
|
|
2783
|
+
def HasField(
|
|
2784
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2785
|
+
) -> builtins.bool: ...
|
|
2786
|
+
def ClearField(
|
|
2787
|
+
self,
|
|
2788
|
+
field_name: typing_extensions.Literal[
|
|
2789
|
+
"delay_threshold", b"delay_threshold", "event_time", b"event_time", "input", b"input"
|
|
2790
|
+
],
|
|
2791
|
+
) -> None: ...
|
|
2792
|
+
|
|
2793
|
+
global___WithWatermark = WithWatermark
|
|
2794
|
+
|
|
2795
|
+
class Hint(google.protobuf.message.Message):
|
|
2796
|
+
"""Specify a hint over a relation. Hint should have a name and optional parameters."""
|
|
2797
|
+
|
|
2798
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2799
|
+
|
|
2800
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2801
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
2802
|
+
PARAMETERS_FIELD_NUMBER: builtins.int
|
|
2803
|
+
@property
|
|
2804
|
+
def input(self) -> global___Relation:
|
|
2805
|
+
"""(Required) The input relation."""
|
|
2806
|
+
name: builtins.str
|
|
2807
|
+
"""(Required) Hint name.
|
|
2808
|
+
|
|
2809
|
+
Supported Join hints include BROADCAST, MERGE, SHUFFLE_HASH, SHUFFLE_REPLICATE_NL.
|
|
2810
|
+
|
|
2811
|
+
Supported partitioning hints include COALESCE, REPARTITION, REPARTITION_BY_RANGE.
|
|
2812
|
+
"""
|
|
2813
|
+
@property
|
|
2814
|
+
def parameters(
|
|
2815
|
+
self,
|
|
2816
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2817
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
2818
|
+
]:
|
|
2819
|
+
"""(Optional) Hint parameters."""
|
|
2820
|
+
def __init__(
|
|
2821
|
+
self,
|
|
2822
|
+
*,
|
|
2823
|
+
input: global___Relation | None = ...,
|
|
2824
|
+
name: builtins.str = ...,
|
|
2825
|
+
parameters: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
2826
|
+
| None = ...,
|
|
2827
|
+
) -> None: ...
|
|
2828
|
+
def HasField(
|
|
2829
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
2830
|
+
) -> builtins.bool: ...
|
|
2831
|
+
def ClearField(
|
|
2832
|
+
self,
|
|
2833
|
+
field_name: typing_extensions.Literal[
|
|
2834
|
+
"input", b"input", "name", b"name", "parameters", b"parameters"
|
|
2835
|
+
],
|
|
2836
|
+
) -> None: ...
|
|
2837
|
+
|
|
2838
|
+
global___Hint = Hint
|
|
2839
|
+
|
|
2840
|
+
class Unpivot(google.protobuf.message.Message):
|
|
2841
|
+
"""Unpivot a DataFrame from wide format to long format, optionally leaving identifier columns set."""
|
|
2842
|
+
|
|
2843
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2844
|
+
|
|
2845
|
+
class Values(google.protobuf.message.Message):
|
|
2846
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2847
|
+
|
|
2848
|
+
VALUES_FIELD_NUMBER: builtins.int
|
|
2849
|
+
@property
|
|
2850
|
+
def values(
|
|
2851
|
+
self,
|
|
2852
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2853
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
2854
|
+
]: ...
|
|
2855
|
+
def __init__(
|
|
2856
|
+
self,
|
|
2857
|
+
*,
|
|
2858
|
+
values: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
2859
|
+
| None = ...,
|
|
2860
|
+
) -> None: ...
|
|
2861
|
+
def ClearField(
|
|
2862
|
+
self, field_name: typing_extensions.Literal["values", b"values"]
|
|
2863
|
+
) -> None: ...
|
|
2864
|
+
|
|
2865
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2866
|
+
IDS_FIELD_NUMBER: builtins.int
|
|
2867
|
+
VALUES_FIELD_NUMBER: builtins.int
|
|
2868
|
+
VARIABLE_COLUMN_NAME_FIELD_NUMBER: builtins.int
|
|
2869
|
+
VALUE_COLUMN_NAME_FIELD_NUMBER: builtins.int
|
|
2870
|
+
@property
|
|
2871
|
+
def input(self) -> global___Relation:
|
|
2872
|
+
"""(Required) The input relation."""
|
|
2873
|
+
@property
|
|
2874
|
+
def ids(
|
|
2875
|
+
self,
|
|
2876
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2877
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
2878
|
+
]:
|
|
2879
|
+
"""(Required) Id columns."""
|
|
2880
|
+
@property
|
|
2881
|
+
def values(self) -> global___Unpivot.Values:
|
|
2882
|
+
"""(Optional) Value columns to unpivot."""
|
|
2883
|
+
variable_column_name: builtins.str
|
|
2884
|
+
"""(Required) Name of the variable column."""
|
|
2885
|
+
value_column_name: builtins.str
|
|
2886
|
+
"""(Required) Name of the value column."""
|
|
2887
|
+
def __init__(
|
|
2888
|
+
self,
|
|
2889
|
+
*,
|
|
2890
|
+
input: global___Relation | None = ...,
|
|
2891
|
+
ids: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
2892
|
+
| None = ...,
|
|
2893
|
+
values: global___Unpivot.Values | None = ...,
|
|
2894
|
+
variable_column_name: builtins.str = ...,
|
|
2895
|
+
value_column_name: builtins.str = ...,
|
|
2896
|
+
) -> None: ...
|
|
2897
|
+
def HasField(
|
|
2898
|
+
self,
|
|
2899
|
+
field_name: typing_extensions.Literal[
|
|
2900
|
+
"_values", b"_values", "input", b"input", "values", b"values"
|
|
2901
|
+
],
|
|
2902
|
+
) -> builtins.bool: ...
|
|
2903
|
+
def ClearField(
|
|
2904
|
+
self,
|
|
2905
|
+
field_name: typing_extensions.Literal[
|
|
2906
|
+
"_values",
|
|
2907
|
+
b"_values",
|
|
2908
|
+
"ids",
|
|
2909
|
+
b"ids",
|
|
2910
|
+
"input",
|
|
2911
|
+
b"input",
|
|
2912
|
+
"value_column_name",
|
|
2913
|
+
b"value_column_name",
|
|
2914
|
+
"values",
|
|
2915
|
+
b"values",
|
|
2916
|
+
"variable_column_name",
|
|
2917
|
+
b"variable_column_name",
|
|
2918
|
+
],
|
|
2919
|
+
) -> None: ...
|
|
2920
|
+
def WhichOneof(
|
|
2921
|
+
self, oneof_group: typing_extensions.Literal["_values", b"_values"]
|
|
2922
|
+
) -> typing_extensions.Literal["values"] | None: ...
|
|
2923
|
+
|
|
2924
|
+
global___Unpivot = Unpivot
|
|
2925
|
+
|
|
2926
|
+
class ToSchema(google.protobuf.message.Message):
|
|
2927
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2928
|
+
|
|
2929
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2930
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
2931
|
+
@property
|
|
2932
|
+
def input(self) -> global___Relation:
|
|
2933
|
+
"""(Required) The input relation."""
|
|
2934
|
+
@property
|
|
2935
|
+
def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
|
|
2936
|
+
"""(Required) The user provided schema.
|
|
2937
|
+
|
|
2938
|
+
The Sever side will update the dataframe with this schema.
|
|
2939
|
+
"""
|
|
2940
|
+
def __init__(
|
|
2941
|
+
self,
|
|
2942
|
+
*,
|
|
2943
|
+
input: global___Relation | None = ...,
|
|
2944
|
+
schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
2945
|
+
) -> None: ...
|
|
2946
|
+
def HasField(
|
|
2947
|
+
self, field_name: typing_extensions.Literal["input", b"input", "schema", b"schema"]
|
|
2948
|
+
) -> builtins.bool: ...
|
|
2949
|
+
def ClearField(
|
|
2950
|
+
self, field_name: typing_extensions.Literal["input", b"input", "schema", b"schema"]
|
|
2951
|
+
) -> None: ...
|
|
2952
|
+
|
|
2953
|
+
global___ToSchema = ToSchema
|
|
2954
|
+
|
|
2955
|
+
class RepartitionByExpression(google.protobuf.message.Message):
|
|
2956
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
2957
|
+
|
|
2958
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
2959
|
+
PARTITION_EXPRS_FIELD_NUMBER: builtins.int
|
|
2960
|
+
NUM_PARTITIONS_FIELD_NUMBER: builtins.int
|
|
2961
|
+
@property
|
|
2962
|
+
def input(self) -> global___Relation:
|
|
2963
|
+
"""(Required) The input relation."""
|
|
2964
|
+
@property
|
|
2965
|
+
def partition_exprs(
|
|
2966
|
+
self,
|
|
2967
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
2968
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
2969
|
+
]:
|
|
2970
|
+
"""(Required) The partitioning expressions."""
|
|
2971
|
+
num_partitions: builtins.int
|
|
2972
|
+
"""(Optional) number of partitions, must be positive."""
|
|
2973
|
+
def __init__(
|
|
2974
|
+
self,
|
|
2975
|
+
*,
|
|
2976
|
+
input: global___Relation | None = ...,
|
|
2977
|
+
partition_exprs: collections.abc.Iterable[
|
|
2978
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
2979
|
+
]
|
|
2980
|
+
| None = ...,
|
|
2981
|
+
num_partitions: builtins.int | None = ...,
|
|
2982
|
+
) -> None: ...
|
|
2983
|
+
def HasField(
|
|
2984
|
+
self,
|
|
2985
|
+
field_name: typing_extensions.Literal[
|
|
2986
|
+
"_num_partitions",
|
|
2987
|
+
b"_num_partitions",
|
|
2988
|
+
"input",
|
|
2989
|
+
b"input",
|
|
2990
|
+
"num_partitions",
|
|
2991
|
+
b"num_partitions",
|
|
2992
|
+
],
|
|
2993
|
+
) -> builtins.bool: ...
|
|
2994
|
+
def ClearField(
|
|
2995
|
+
self,
|
|
2996
|
+
field_name: typing_extensions.Literal[
|
|
2997
|
+
"_num_partitions",
|
|
2998
|
+
b"_num_partitions",
|
|
2999
|
+
"input",
|
|
3000
|
+
b"input",
|
|
3001
|
+
"num_partitions",
|
|
3002
|
+
b"num_partitions",
|
|
3003
|
+
"partition_exprs",
|
|
3004
|
+
b"partition_exprs",
|
|
3005
|
+
],
|
|
3006
|
+
) -> None: ...
|
|
3007
|
+
def WhichOneof(
|
|
3008
|
+
self, oneof_group: typing_extensions.Literal["_num_partitions", b"_num_partitions"]
|
|
3009
|
+
) -> typing_extensions.Literal["num_partitions"] | None: ...
|
|
3010
|
+
|
|
3011
|
+
global___RepartitionByExpression = RepartitionByExpression
|
|
3012
|
+
|
|
3013
|
+
class MapPartitions(google.protobuf.message.Message):
|
|
3014
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3015
|
+
|
|
3016
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3017
|
+
FUNC_FIELD_NUMBER: builtins.int
|
|
3018
|
+
IS_BARRIER_FIELD_NUMBER: builtins.int
|
|
3019
|
+
@property
|
|
3020
|
+
def input(self) -> global___Relation:
|
|
3021
|
+
"""(Required) Input relation for a mapPartitions-equivalent API: mapInPandas, mapInArrow."""
|
|
3022
|
+
@property
|
|
3023
|
+
def func(self) -> pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction:
|
|
3024
|
+
"""(Required) Input user-defined function."""
|
|
3025
|
+
is_barrier: builtins.bool
|
|
3026
|
+
"""(Optional) Whether to use barrier mode execution or not."""
|
|
3027
|
+
def __init__(
|
|
3028
|
+
self,
|
|
3029
|
+
*,
|
|
3030
|
+
input: global___Relation | None = ...,
|
|
3031
|
+
func: pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction
|
|
3032
|
+
| None = ...,
|
|
3033
|
+
is_barrier: builtins.bool | None = ...,
|
|
3034
|
+
) -> None: ...
|
|
3035
|
+
def HasField(
|
|
3036
|
+
self,
|
|
3037
|
+
field_name: typing_extensions.Literal[
|
|
3038
|
+
"_is_barrier",
|
|
3039
|
+
b"_is_barrier",
|
|
3040
|
+
"func",
|
|
3041
|
+
b"func",
|
|
3042
|
+
"input",
|
|
3043
|
+
b"input",
|
|
3044
|
+
"is_barrier",
|
|
3045
|
+
b"is_barrier",
|
|
3046
|
+
],
|
|
3047
|
+
) -> builtins.bool: ...
|
|
3048
|
+
def ClearField(
|
|
3049
|
+
self,
|
|
3050
|
+
field_name: typing_extensions.Literal[
|
|
3051
|
+
"_is_barrier",
|
|
3052
|
+
b"_is_barrier",
|
|
3053
|
+
"func",
|
|
3054
|
+
b"func",
|
|
3055
|
+
"input",
|
|
3056
|
+
b"input",
|
|
3057
|
+
"is_barrier",
|
|
3058
|
+
b"is_barrier",
|
|
3059
|
+
],
|
|
3060
|
+
) -> None: ...
|
|
3061
|
+
def WhichOneof(
|
|
3062
|
+
self, oneof_group: typing_extensions.Literal["_is_barrier", b"_is_barrier"]
|
|
3063
|
+
) -> typing_extensions.Literal["is_barrier"] | None: ...
|
|
3064
|
+
|
|
3065
|
+
global___MapPartitions = MapPartitions
|
|
3066
|
+
|
|
3067
|
+
class GroupMap(google.protobuf.message.Message):
|
|
3068
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3069
|
+
|
|
3070
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3071
|
+
GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3072
|
+
FUNC_FIELD_NUMBER: builtins.int
|
|
3073
|
+
SORTING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3074
|
+
INITIAL_INPUT_FIELD_NUMBER: builtins.int
|
|
3075
|
+
INITIAL_GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3076
|
+
IS_MAP_GROUPS_WITH_STATE_FIELD_NUMBER: builtins.int
|
|
3077
|
+
OUTPUT_MODE_FIELD_NUMBER: builtins.int
|
|
3078
|
+
TIMEOUT_CONF_FIELD_NUMBER: builtins.int
|
|
3079
|
+
@property
|
|
3080
|
+
def input(self) -> global___Relation:
|
|
3081
|
+
"""(Required) Input relation for Group Map API: apply, applyInPandas."""
|
|
3082
|
+
@property
|
|
3083
|
+
def grouping_expressions(
|
|
3084
|
+
self,
|
|
3085
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3086
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3087
|
+
]:
|
|
3088
|
+
"""(Required) Expressions for grouping keys."""
|
|
3089
|
+
@property
|
|
3090
|
+
def func(self) -> pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction:
|
|
3091
|
+
"""(Required) Input user-defined function."""
|
|
3092
|
+
@property
|
|
3093
|
+
def sorting_expressions(
|
|
3094
|
+
self,
|
|
3095
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3096
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3097
|
+
]:
|
|
3098
|
+
"""(Optional) Expressions for sorting. Only used by Scala Sorted Group Map API."""
|
|
3099
|
+
@property
|
|
3100
|
+
def initial_input(self) -> global___Relation:
|
|
3101
|
+
"""Below fields are only used by (Flat)MapGroupsWithState
|
|
3102
|
+
(Optional) Input relation for initial State.
|
|
3103
|
+
"""
|
|
3104
|
+
@property
|
|
3105
|
+
def initial_grouping_expressions(
|
|
3106
|
+
self,
|
|
3107
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3108
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3109
|
+
]:
|
|
3110
|
+
"""(Optional) Expressions for grouping keys of the initial state input relation."""
|
|
3111
|
+
is_map_groups_with_state: builtins.bool
|
|
3112
|
+
"""(Optional) True if MapGroupsWithState, false if FlatMapGroupsWithState."""
|
|
3113
|
+
output_mode: builtins.str
|
|
3114
|
+
"""(Optional) The output mode of the function."""
|
|
3115
|
+
timeout_conf: builtins.str
|
|
3116
|
+
"""(Optional) Timeout configuration for groups that do not receive data for a while."""
|
|
3117
|
+
def __init__(
|
|
3118
|
+
self,
|
|
3119
|
+
*,
|
|
3120
|
+
input: global___Relation | None = ...,
|
|
3121
|
+
grouping_expressions: collections.abc.Iterable[
|
|
3122
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3123
|
+
]
|
|
3124
|
+
| None = ...,
|
|
3125
|
+
func: pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction
|
|
3126
|
+
| None = ...,
|
|
3127
|
+
sorting_expressions: collections.abc.Iterable[
|
|
3128
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3129
|
+
]
|
|
3130
|
+
| None = ...,
|
|
3131
|
+
initial_input: global___Relation | None = ...,
|
|
3132
|
+
initial_grouping_expressions: collections.abc.Iterable[
|
|
3133
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3134
|
+
]
|
|
3135
|
+
| None = ...,
|
|
3136
|
+
is_map_groups_with_state: builtins.bool | None = ...,
|
|
3137
|
+
output_mode: builtins.str | None = ...,
|
|
3138
|
+
timeout_conf: builtins.str | None = ...,
|
|
3139
|
+
) -> None: ...
|
|
3140
|
+
def HasField(
|
|
3141
|
+
self,
|
|
3142
|
+
field_name: typing_extensions.Literal[
|
|
3143
|
+
"_is_map_groups_with_state",
|
|
3144
|
+
b"_is_map_groups_with_state",
|
|
3145
|
+
"_output_mode",
|
|
3146
|
+
b"_output_mode",
|
|
3147
|
+
"_timeout_conf",
|
|
3148
|
+
b"_timeout_conf",
|
|
3149
|
+
"func",
|
|
3150
|
+
b"func",
|
|
3151
|
+
"initial_input",
|
|
3152
|
+
b"initial_input",
|
|
3153
|
+
"input",
|
|
3154
|
+
b"input",
|
|
3155
|
+
"is_map_groups_with_state",
|
|
3156
|
+
b"is_map_groups_with_state",
|
|
3157
|
+
"output_mode",
|
|
3158
|
+
b"output_mode",
|
|
3159
|
+
"timeout_conf",
|
|
3160
|
+
b"timeout_conf",
|
|
3161
|
+
],
|
|
3162
|
+
) -> builtins.bool: ...
|
|
3163
|
+
def ClearField(
|
|
3164
|
+
self,
|
|
3165
|
+
field_name: typing_extensions.Literal[
|
|
3166
|
+
"_is_map_groups_with_state",
|
|
3167
|
+
b"_is_map_groups_with_state",
|
|
3168
|
+
"_output_mode",
|
|
3169
|
+
b"_output_mode",
|
|
3170
|
+
"_timeout_conf",
|
|
3171
|
+
b"_timeout_conf",
|
|
3172
|
+
"func",
|
|
3173
|
+
b"func",
|
|
3174
|
+
"grouping_expressions",
|
|
3175
|
+
b"grouping_expressions",
|
|
3176
|
+
"initial_grouping_expressions",
|
|
3177
|
+
b"initial_grouping_expressions",
|
|
3178
|
+
"initial_input",
|
|
3179
|
+
b"initial_input",
|
|
3180
|
+
"input",
|
|
3181
|
+
b"input",
|
|
3182
|
+
"is_map_groups_with_state",
|
|
3183
|
+
b"is_map_groups_with_state",
|
|
3184
|
+
"output_mode",
|
|
3185
|
+
b"output_mode",
|
|
3186
|
+
"sorting_expressions",
|
|
3187
|
+
b"sorting_expressions",
|
|
3188
|
+
"timeout_conf",
|
|
3189
|
+
b"timeout_conf",
|
|
3190
|
+
],
|
|
3191
|
+
) -> None: ...
|
|
3192
|
+
@typing.overload
|
|
3193
|
+
def WhichOneof(
|
|
3194
|
+
self,
|
|
3195
|
+
oneof_group: typing_extensions.Literal[
|
|
3196
|
+
"_is_map_groups_with_state", b"_is_map_groups_with_state"
|
|
3197
|
+
],
|
|
3198
|
+
) -> typing_extensions.Literal["is_map_groups_with_state"] | None: ...
|
|
3199
|
+
@typing.overload
|
|
3200
|
+
def WhichOneof(
|
|
3201
|
+
self, oneof_group: typing_extensions.Literal["_output_mode", b"_output_mode"]
|
|
3202
|
+
) -> typing_extensions.Literal["output_mode"] | None: ...
|
|
3203
|
+
@typing.overload
|
|
3204
|
+
def WhichOneof(
|
|
3205
|
+
self, oneof_group: typing_extensions.Literal["_timeout_conf", b"_timeout_conf"]
|
|
3206
|
+
) -> typing_extensions.Literal["timeout_conf"] | None: ...
|
|
3207
|
+
|
|
3208
|
+
global___GroupMap = GroupMap
|
|
3209
|
+
|
|
3210
|
+
class CoGroupMap(google.protobuf.message.Message):
|
|
3211
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3212
|
+
|
|
3213
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3214
|
+
INPUT_GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3215
|
+
OTHER_FIELD_NUMBER: builtins.int
|
|
3216
|
+
OTHER_GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3217
|
+
FUNC_FIELD_NUMBER: builtins.int
|
|
3218
|
+
INPUT_SORTING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3219
|
+
OTHER_SORTING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3220
|
+
@property
|
|
3221
|
+
def input(self) -> global___Relation:
|
|
3222
|
+
"""(Required) One input relation for CoGroup Map API - applyInPandas."""
|
|
3223
|
+
@property
|
|
3224
|
+
def input_grouping_expressions(
|
|
3225
|
+
self,
|
|
3226
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3227
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3228
|
+
]:
|
|
3229
|
+
"""Expressions for grouping keys of the first input relation."""
|
|
3230
|
+
@property
|
|
3231
|
+
def other(self) -> global___Relation:
|
|
3232
|
+
"""(Required) The other input relation."""
|
|
3233
|
+
@property
|
|
3234
|
+
def other_grouping_expressions(
|
|
3235
|
+
self,
|
|
3236
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3237
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3238
|
+
]:
|
|
3239
|
+
"""Expressions for grouping keys of the other input relation."""
|
|
3240
|
+
@property
|
|
3241
|
+
def func(self) -> pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction:
|
|
3242
|
+
"""(Required) Input user-defined function."""
|
|
3243
|
+
@property
|
|
3244
|
+
def input_sorting_expressions(
|
|
3245
|
+
self,
|
|
3246
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3247
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3248
|
+
]:
|
|
3249
|
+
"""(Optional) Expressions for sorting. Only used by Scala Sorted CoGroup Map API."""
|
|
3250
|
+
@property
|
|
3251
|
+
def other_sorting_expressions(
|
|
3252
|
+
self,
|
|
3253
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3254
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3255
|
+
]:
|
|
3256
|
+
"""(Optional) Expressions for sorting. Only used by Scala Sorted CoGroup Map API."""
|
|
3257
|
+
def __init__(
|
|
3258
|
+
self,
|
|
3259
|
+
*,
|
|
3260
|
+
input: global___Relation | None = ...,
|
|
3261
|
+
input_grouping_expressions: collections.abc.Iterable[
|
|
3262
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3263
|
+
]
|
|
3264
|
+
| None = ...,
|
|
3265
|
+
other: global___Relation | None = ...,
|
|
3266
|
+
other_grouping_expressions: collections.abc.Iterable[
|
|
3267
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3268
|
+
]
|
|
3269
|
+
| None = ...,
|
|
3270
|
+
func: pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction
|
|
3271
|
+
| None = ...,
|
|
3272
|
+
input_sorting_expressions: collections.abc.Iterable[
|
|
3273
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3274
|
+
]
|
|
3275
|
+
| None = ...,
|
|
3276
|
+
other_sorting_expressions: collections.abc.Iterable[
|
|
3277
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3278
|
+
]
|
|
3279
|
+
| None = ...,
|
|
3280
|
+
) -> None: ...
|
|
3281
|
+
def HasField(
|
|
3282
|
+
self,
|
|
3283
|
+
field_name: typing_extensions.Literal[
|
|
3284
|
+
"func", b"func", "input", b"input", "other", b"other"
|
|
3285
|
+
],
|
|
3286
|
+
) -> builtins.bool: ...
|
|
3287
|
+
def ClearField(
|
|
3288
|
+
self,
|
|
3289
|
+
field_name: typing_extensions.Literal[
|
|
3290
|
+
"func",
|
|
3291
|
+
b"func",
|
|
3292
|
+
"input",
|
|
3293
|
+
b"input",
|
|
3294
|
+
"input_grouping_expressions",
|
|
3295
|
+
b"input_grouping_expressions",
|
|
3296
|
+
"input_sorting_expressions",
|
|
3297
|
+
b"input_sorting_expressions",
|
|
3298
|
+
"other",
|
|
3299
|
+
b"other",
|
|
3300
|
+
"other_grouping_expressions",
|
|
3301
|
+
b"other_grouping_expressions",
|
|
3302
|
+
"other_sorting_expressions",
|
|
3303
|
+
b"other_sorting_expressions",
|
|
3304
|
+
],
|
|
3305
|
+
) -> None: ...
|
|
3306
|
+
|
|
3307
|
+
global___CoGroupMap = CoGroupMap
|
|
3308
|
+
|
|
3309
|
+
class ApplyInPandasWithState(google.protobuf.message.Message):
|
|
3310
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3311
|
+
|
|
3312
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3313
|
+
GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int
|
|
3314
|
+
FUNC_FIELD_NUMBER: builtins.int
|
|
3315
|
+
OUTPUT_SCHEMA_FIELD_NUMBER: builtins.int
|
|
3316
|
+
STATE_SCHEMA_FIELD_NUMBER: builtins.int
|
|
3317
|
+
OUTPUT_MODE_FIELD_NUMBER: builtins.int
|
|
3318
|
+
TIMEOUT_CONF_FIELD_NUMBER: builtins.int
|
|
3319
|
+
@property
|
|
3320
|
+
def input(self) -> global___Relation:
|
|
3321
|
+
"""(Required) Input relation for applyInPandasWithState."""
|
|
3322
|
+
@property
|
|
3323
|
+
def grouping_expressions(
|
|
3324
|
+
self,
|
|
3325
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3326
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3327
|
+
]:
|
|
3328
|
+
"""(Required) Expressions for grouping keys."""
|
|
3329
|
+
@property
|
|
3330
|
+
def func(self) -> pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction:
|
|
3331
|
+
"""(Required) Input user-defined function."""
|
|
3332
|
+
output_schema: builtins.str
|
|
3333
|
+
"""(Required) Schema for the output DataFrame."""
|
|
3334
|
+
state_schema: builtins.str
|
|
3335
|
+
"""(Required) Schema for the state."""
|
|
3336
|
+
output_mode: builtins.str
|
|
3337
|
+
"""(Required) The output mode of the function."""
|
|
3338
|
+
timeout_conf: builtins.str
|
|
3339
|
+
"""(Required) Timeout configuration for groups that do not receive data for a while."""
|
|
3340
|
+
def __init__(
|
|
3341
|
+
self,
|
|
3342
|
+
*,
|
|
3343
|
+
input: global___Relation | None = ...,
|
|
3344
|
+
grouping_expressions: collections.abc.Iterable[
|
|
3345
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3346
|
+
]
|
|
3347
|
+
| None = ...,
|
|
3348
|
+
func: pyspark.sql.connect.proto.expressions_pb2.CommonInlineUserDefinedFunction
|
|
3349
|
+
| None = ...,
|
|
3350
|
+
output_schema: builtins.str = ...,
|
|
3351
|
+
state_schema: builtins.str = ...,
|
|
3352
|
+
output_mode: builtins.str = ...,
|
|
3353
|
+
timeout_conf: builtins.str = ...,
|
|
3354
|
+
) -> None: ...
|
|
3355
|
+
def HasField(
|
|
3356
|
+
self, field_name: typing_extensions.Literal["func", b"func", "input", b"input"]
|
|
3357
|
+
) -> builtins.bool: ...
|
|
3358
|
+
def ClearField(
|
|
3359
|
+
self,
|
|
3360
|
+
field_name: typing_extensions.Literal[
|
|
3361
|
+
"func",
|
|
3362
|
+
b"func",
|
|
3363
|
+
"grouping_expressions",
|
|
3364
|
+
b"grouping_expressions",
|
|
3365
|
+
"input",
|
|
3366
|
+
b"input",
|
|
3367
|
+
"output_mode",
|
|
3368
|
+
b"output_mode",
|
|
3369
|
+
"output_schema",
|
|
3370
|
+
b"output_schema",
|
|
3371
|
+
"state_schema",
|
|
3372
|
+
b"state_schema",
|
|
3373
|
+
"timeout_conf",
|
|
3374
|
+
b"timeout_conf",
|
|
3375
|
+
],
|
|
3376
|
+
) -> None: ...
|
|
3377
|
+
|
|
3378
|
+
global___ApplyInPandasWithState = ApplyInPandasWithState
|
|
3379
|
+
|
|
3380
|
+
class CommonInlineUserDefinedTableFunction(google.protobuf.message.Message):
|
|
3381
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3382
|
+
|
|
3383
|
+
FUNCTION_NAME_FIELD_NUMBER: builtins.int
|
|
3384
|
+
DETERMINISTIC_FIELD_NUMBER: builtins.int
|
|
3385
|
+
ARGUMENTS_FIELD_NUMBER: builtins.int
|
|
3386
|
+
PYTHON_UDTF_FIELD_NUMBER: builtins.int
|
|
3387
|
+
function_name: builtins.str
|
|
3388
|
+
"""(Required) Name of the user-defined table function."""
|
|
3389
|
+
deterministic: builtins.bool
|
|
3390
|
+
"""(Optional) Whether the user-defined table function is deterministic."""
|
|
3391
|
+
@property
|
|
3392
|
+
def arguments(
|
|
3393
|
+
self,
|
|
3394
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3395
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3396
|
+
]:
|
|
3397
|
+
"""(Optional) Function input arguments. Empty arguments are allowed."""
|
|
3398
|
+
@property
|
|
3399
|
+
def python_udtf(self) -> global___PythonUDTF: ...
|
|
3400
|
+
def __init__(
|
|
3401
|
+
self,
|
|
3402
|
+
*,
|
|
3403
|
+
function_name: builtins.str = ...,
|
|
3404
|
+
deterministic: builtins.bool = ...,
|
|
3405
|
+
arguments: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
3406
|
+
| None = ...,
|
|
3407
|
+
python_udtf: global___PythonUDTF | None = ...,
|
|
3408
|
+
) -> None: ...
|
|
3409
|
+
def HasField(
|
|
3410
|
+
self,
|
|
3411
|
+
field_name: typing_extensions.Literal[
|
|
3412
|
+
"function", b"function", "python_udtf", b"python_udtf"
|
|
3413
|
+
],
|
|
3414
|
+
) -> builtins.bool: ...
|
|
3415
|
+
def ClearField(
|
|
3416
|
+
self,
|
|
3417
|
+
field_name: typing_extensions.Literal[
|
|
3418
|
+
"arguments",
|
|
3419
|
+
b"arguments",
|
|
3420
|
+
"deterministic",
|
|
3421
|
+
b"deterministic",
|
|
3422
|
+
"function",
|
|
3423
|
+
b"function",
|
|
3424
|
+
"function_name",
|
|
3425
|
+
b"function_name",
|
|
3426
|
+
"python_udtf",
|
|
3427
|
+
b"python_udtf",
|
|
3428
|
+
],
|
|
3429
|
+
) -> None: ...
|
|
3430
|
+
def WhichOneof(
|
|
3431
|
+
self, oneof_group: typing_extensions.Literal["function", b"function"]
|
|
3432
|
+
) -> typing_extensions.Literal["python_udtf"] | None: ...
|
|
3433
|
+
|
|
3434
|
+
global___CommonInlineUserDefinedTableFunction = CommonInlineUserDefinedTableFunction
|
|
3435
|
+
|
|
3436
|
+
class PythonUDTF(google.protobuf.message.Message):
|
|
3437
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3438
|
+
|
|
3439
|
+
RETURN_TYPE_FIELD_NUMBER: builtins.int
|
|
3440
|
+
EVAL_TYPE_FIELD_NUMBER: builtins.int
|
|
3441
|
+
COMMAND_FIELD_NUMBER: builtins.int
|
|
3442
|
+
PYTHON_VER_FIELD_NUMBER: builtins.int
|
|
3443
|
+
@property
|
|
3444
|
+
def return_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
|
|
3445
|
+
"""(Optional) Return type of the Python UDTF."""
|
|
3446
|
+
eval_type: builtins.int
|
|
3447
|
+
"""(Required) EvalType of the Python UDTF."""
|
|
3448
|
+
command: builtins.bytes
|
|
3449
|
+
"""(Required) The encoded commands of the Python UDTF."""
|
|
3450
|
+
python_ver: builtins.str
|
|
3451
|
+
"""(Required) Python version being used in the client."""
|
|
3452
|
+
def __init__(
|
|
3453
|
+
self,
|
|
3454
|
+
*,
|
|
3455
|
+
return_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
3456
|
+
eval_type: builtins.int = ...,
|
|
3457
|
+
command: builtins.bytes = ...,
|
|
3458
|
+
python_ver: builtins.str = ...,
|
|
3459
|
+
) -> None: ...
|
|
3460
|
+
def HasField(
|
|
3461
|
+
self,
|
|
3462
|
+
field_name: typing_extensions.Literal[
|
|
3463
|
+
"_return_type", b"_return_type", "return_type", b"return_type"
|
|
3464
|
+
],
|
|
3465
|
+
) -> builtins.bool: ...
|
|
3466
|
+
def ClearField(
|
|
3467
|
+
self,
|
|
3468
|
+
field_name: typing_extensions.Literal[
|
|
3469
|
+
"_return_type",
|
|
3470
|
+
b"_return_type",
|
|
3471
|
+
"command",
|
|
3472
|
+
b"command",
|
|
3473
|
+
"eval_type",
|
|
3474
|
+
b"eval_type",
|
|
3475
|
+
"python_ver",
|
|
3476
|
+
b"python_ver",
|
|
3477
|
+
"return_type",
|
|
3478
|
+
b"return_type",
|
|
3479
|
+
],
|
|
3480
|
+
) -> None: ...
|
|
3481
|
+
def WhichOneof(
|
|
3482
|
+
self, oneof_group: typing_extensions.Literal["_return_type", b"_return_type"]
|
|
3483
|
+
) -> typing_extensions.Literal["return_type"] | None: ...
|
|
3484
|
+
|
|
3485
|
+
global___PythonUDTF = PythonUDTF
|
|
3486
|
+
|
|
3487
|
+
class CollectMetrics(google.protobuf.message.Message):
|
|
3488
|
+
"""Collect arbitrary (named) metrics from a dataset."""
|
|
3489
|
+
|
|
3490
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3491
|
+
|
|
3492
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3493
|
+
NAME_FIELD_NUMBER: builtins.int
|
|
3494
|
+
METRICS_FIELD_NUMBER: builtins.int
|
|
3495
|
+
@property
|
|
3496
|
+
def input(self) -> global___Relation:
|
|
3497
|
+
"""(Required) The input relation."""
|
|
3498
|
+
name: builtins.str
|
|
3499
|
+
"""(Required) Name of the metrics."""
|
|
3500
|
+
@property
|
|
3501
|
+
def metrics(
|
|
3502
|
+
self,
|
|
3503
|
+
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
|
|
3504
|
+
pyspark.sql.connect.proto.expressions_pb2.Expression
|
|
3505
|
+
]:
|
|
3506
|
+
"""(Required) The metric sequence."""
|
|
3507
|
+
def __init__(
|
|
3508
|
+
self,
|
|
3509
|
+
*,
|
|
3510
|
+
input: global___Relation | None = ...,
|
|
3511
|
+
name: builtins.str = ...,
|
|
3512
|
+
metrics: collections.abc.Iterable[pyspark.sql.connect.proto.expressions_pb2.Expression]
|
|
3513
|
+
| None = ...,
|
|
3514
|
+
) -> None: ...
|
|
3515
|
+
def HasField(
|
|
3516
|
+
self, field_name: typing_extensions.Literal["input", b"input"]
|
|
3517
|
+
) -> builtins.bool: ...
|
|
3518
|
+
def ClearField(
|
|
3519
|
+
self,
|
|
3520
|
+
field_name: typing_extensions.Literal[
|
|
3521
|
+
"input", b"input", "metrics", b"metrics", "name", b"name"
|
|
3522
|
+
],
|
|
3523
|
+
) -> None: ...
|
|
3524
|
+
|
|
3525
|
+
global___CollectMetrics = CollectMetrics
|
|
3526
|
+
|
|
3527
|
+
class Parse(google.protobuf.message.Message):
|
|
3528
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3529
|
+
|
|
3530
|
+
class _ParseFormat:
|
|
3531
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
|
3532
|
+
V: typing_extensions.TypeAlias = ValueType
|
|
3533
|
+
|
|
3534
|
+
class _ParseFormatEnumTypeWrapper(
|
|
3535
|
+
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Parse._ParseFormat.ValueType],
|
|
3536
|
+
builtins.type,
|
|
3537
|
+
): # noqa: F821
|
|
3538
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
3539
|
+
PARSE_FORMAT_UNSPECIFIED: Parse._ParseFormat.ValueType # 0
|
|
3540
|
+
PARSE_FORMAT_CSV: Parse._ParseFormat.ValueType # 1
|
|
3541
|
+
PARSE_FORMAT_JSON: Parse._ParseFormat.ValueType # 2
|
|
3542
|
+
|
|
3543
|
+
class ParseFormat(_ParseFormat, metaclass=_ParseFormatEnumTypeWrapper): ...
|
|
3544
|
+
PARSE_FORMAT_UNSPECIFIED: Parse.ParseFormat.ValueType # 0
|
|
3545
|
+
PARSE_FORMAT_CSV: Parse.ParseFormat.ValueType # 1
|
|
3546
|
+
PARSE_FORMAT_JSON: Parse.ParseFormat.ValueType # 2
|
|
3547
|
+
|
|
3548
|
+
class OptionsEntry(google.protobuf.message.Message):
|
|
3549
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
3550
|
+
|
|
3551
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
3552
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
3553
|
+
key: builtins.str
|
|
3554
|
+
value: builtins.str
|
|
3555
|
+
def __init__(
|
|
3556
|
+
self,
|
|
3557
|
+
*,
|
|
3558
|
+
key: builtins.str = ...,
|
|
3559
|
+
value: builtins.str = ...,
|
|
3560
|
+
) -> None: ...
|
|
3561
|
+
def ClearField(
|
|
3562
|
+
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
|
|
3563
|
+
) -> None: ...
|
|
3564
|
+
|
|
3565
|
+
INPUT_FIELD_NUMBER: builtins.int
|
|
3566
|
+
FORMAT_FIELD_NUMBER: builtins.int
|
|
3567
|
+
SCHEMA_FIELD_NUMBER: builtins.int
|
|
3568
|
+
OPTIONS_FIELD_NUMBER: builtins.int
|
|
3569
|
+
@property
|
|
3570
|
+
def input(self) -> global___Relation:
|
|
3571
|
+
"""(Required) Input relation to Parse. The input is expected to have single text column."""
|
|
3572
|
+
format: global___Parse.ParseFormat.ValueType
|
|
3573
|
+
"""(Required) The expected format of the text."""
|
|
3574
|
+
@property
|
|
3575
|
+
def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
|
|
3576
|
+
"""(Optional) DataType representing the schema. If not set, Spark will infer the schema."""
|
|
3577
|
+
@property
|
|
3578
|
+
def options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
|
|
3579
|
+
"""Options for the csv/json parser. The map key is case insensitive."""
|
|
3580
|
+
def __init__(
|
|
3581
|
+
self,
|
|
3582
|
+
*,
|
|
3583
|
+
input: global___Relation | None = ...,
|
|
3584
|
+
format: global___Parse.ParseFormat.ValueType = ...,
|
|
3585
|
+
schema: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
|
|
3586
|
+
options: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
|
|
3587
|
+
) -> None: ...
|
|
3588
|
+
def HasField(
|
|
3589
|
+
self,
|
|
3590
|
+
field_name: typing_extensions.Literal[
|
|
3591
|
+
"_schema", b"_schema", "input", b"input", "schema", b"schema"
|
|
3592
|
+
],
|
|
3593
|
+
) -> builtins.bool: ...
|
|
3594
|
+
def ClearField(
|
|
3595
|
+
self,
|
|
3596
|
+
field_name: typing_extensions.Literal[
|
|
3597
|
+
"_schema",
|
|
3598
|
+
b"_schema",
|
|
3599
|
+
"format",
|
|
3600
|
+
b"format",
|
|
3601
|
+
"input",
|
|
3602
|
+
b"input",
|
|
3603
|
+
"options",
|
|
3604
|
+
b"options",
|
|
3605
|
+
"schema",
|
|
3606
|
+
b"schema",
|
|
3607
|
+
],
|
|
3608
|
+
) -> None: ...
|
|
3609
|
+
def WhichOneof(
|
|
3610
|
+
self, oneof_group: typing_extensions.Literal["_schema", b"_schema"]
|
|
3611
|
+
) -> typing_extensions.Literal["schema"] | None: ...
|
|
3612
|
+
|
|
3613
|
+
global___Parse = Parse
|